From acc659a3a3986d830af1fd61d64a4a11eb7f7c3b Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 2 Mar 2016 21:44:28 -0800 Subject: [PATCH 001/203] ENH: Add writer for Siemens CSA header Allows us to take a parsed CSA header and convert it back into a string. Useful for things like DICOM anonymization, or perhaps round tripping DICOM -> Nifti -> DICOM. --- nibabel/nicom/csareader.py | 110 ++++++++++++++++++++++++++ nibabel/nicom/tests/test_csareader.py | 11 +++ 2 files changed, 121 insertions(+) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index 1764e2878c..b2b87b866f 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -2,6 +2,7 @@ ''' import numpy as np +import struct from .structreader import Unpacker from .utils import find_private_section @@ -29,6 +30,10 @@ class CSAReadError(CSAError): pass +class CSAWriteError(CSAError): + pass + + def get_csa_header(dcm_data, csa_type='image'): ''' Get CSA header information from DICOM header @@ -162,6 +167,96 @@ def read(csa_str): return csa_dict +def write(csa_header): + ''' Write string from CSA header `csa_header` + + Parameters + ---------- + csa_header : dict + header information as dict, where `header` has fields (at least) + ``type, n_tags, tags``. ``header['tags']`` is also a dictionary + with one key, value pair for each tag in the header. + + Returns + ------- + csa_str : str + byte string containing CSA header information + ''' + result = [] + if csa_header['type'] == 2: + result.append(b'SV10') + result.append(csa_header['unused0']) + if not 0 < csa_header['n_tags'] <= 128: + raise CSAWriteError('Number of tags `t` should be ' + '0 < t <= 128') + result.append(struct.pack('2I', + csa_header['n_tags'], + csa_header['check']) + ) + + # Build list of tags in correct order + tags = list(csa_header['tags'].items()) + tags.sort(key=lambda x: x[1]['tag_no']) + tag0_n_items = tags[0][1]['n_items'] + + # Add the information for each tag + for tag_name, tag_dict in tags: + vm = tag_dict['vm'] + vr = tag_dict['vr'] + n_items = tag_dict['n_items'] + assert n_items < 100 + result.append(struct.pack('64si4s3i', + make_nt_str(tag_name), + vm, + make_nt_str(vr), + tag_dict['syngodt'], + n_items, + tag_dict['last3']) + ) + + # Figure out the number of values for this tag + if vm == 0: + n_values = n_items + else: + n_values = vm + + # Add each item for this tag + for item_no in range(n_items): + # Figure out the item length + if item_no >= n_values or tag_dict['items'][item_no] == '': + item_len = 0 + else: + item = tag_dict['items'][item_no] + if not isinstance(item, str): + item = str(item) + item_nt_str = make_nt_str(item) + item_len = len(item_nt_str) + + # These values aren't actually preserved in the dict + # representation of the header. Best we can do is set the ones + # that determine the item length appropriately. + x0, x1, x2, x3 = 0, 0, 0, 0 + if csa_header['type'] == 1: # CSA1 - odd length calculation + x0 = tag0_n_items + item_len + if item_len < 0 or (ptr + item_len) > csa_len: + if item_no < vm: + items.append('') + break + else: # CSA2 + x1 = item_len + result.append(struct.pack('4i', x0, x1, x2, x3)) + + if item_len == 0: + continue + + result.append(item_nt_str) + # go to 4 byte boundary + plus4 = item_len % 4 + if plus4 != 0: + result.append(b'\x00' * (4 - plus4)) + return b''.join(result) + + def get_scalar(csa_dict, tag_name): try: items = csa_dict['tags'][tag_name]['items'] @@ -259,3 +354,18 @@ def nt_str(s): if zero_pos == -1: return s return s[:zero_pos].decode('latin-1') + + +def make_nt_str(s): + ''' Create a null terminated byte string from a unicode object. + + Parameters + ---------- + s : unicode + + Returns + ------- + result : bytes + s encoded as latin-1 with a null char appended + ''' + return s.encode('latin-1') + b'\x00' diff --git a/nibabel/nicom/tests/test_csareader.py b/nibabel/nicom/tests/test_csareader.py index 1692aad622..ba644a09ff 100644 --- a/nibabel/nicom/tests/test_csareader.py +++ b/nibabel/nicom/tests/test_csareader.py @@ -136,3 +136,14 @@ def test_missing_csa_elem(): del dcm[csa_tag] hdr = csa.get_csa_header(dcm, 'image') assert hdr is None + + +def test_read_write_rt(): + # Try doing a read-write-read round trip and make sure the dictionary + # representation of the header is the same. We can't exactly reproduce the + # original string representation currently. + for csa_str in (CSA2_B0, CSA2_B1000): + csa_info = csa.read(csa_str) + new_csa_str = csa.write(csa_info) + new_csa_info = csa.read(new_csa_str) + assert csa_info == new_csa_info From 122a923dfb5b55f22487d4c3f072391f1dcc2afd Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Mon, 23 Mar 2020 20:01:44 -0700 Subject: [PATCH 002/203] CLN: Cleanup whitespace and formatting --- nibabel/nicom/csareader.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index b2b87b866f..98d06557f2 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -188,11 +188,11 @@ def write(csa_header): result.append(csa_header['unused0']) if not 0 < csa_header['n_tags'] <= 128: raise CSAWriteError('Number of tags `t` should be ' - '0 < t <= 128') + '0 < t <= 128') result.append(struct.pack('2I', csa_header['n_tags'], csa_header['check']) - ) + ) # Build list of tags in correct order tags = list(csa_header['tags'].items()) @@ -212,7 +212,7 @@ def write(csa_header): tag_dict['syngodt'], n_items, tag_dict['last3']) - ) + ) # Figure out the number of values for this tag if vm == 0: @@ -242,7 +242,7 @@ def write(csa_header): if item_no < vm: items.append('') break - else: # CSA2 + else: # CSA2 x1 = item_len result.append(struct.pack('4i', x0, x1, x2, x3)) From 58271684a8fd406874c9a4549b125601fc25e052 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 20 Apr 2023 09:53:06 -0400 Subject: [PATCH 003/203] ENH: Catch SVD failure and raise informative HeaderDataError --- nibabel/nifti1.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 9bb88e844c..8502ad4fa6 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -1098,7 +1098,10 @@ def set_qform(self, affine, code=None, strip_shears=True): # (a subtle requirement of the NIFTI format qform transform) # Transform below is polar decomposition, returning the closest # orthogonal matrix PR, to input R - P, S, Qs = npl.svd(R) + try: + P, S, Qs = npl.svd(R) + except np.linalg.LinAlgError as e: + raise HeaderDataError(f'Could not decompose affine:\n{affine}') from e PR = np.dot(P, Qs) if not strip_shears and not np.allclose(PR, R): raise HeaderDataError('Shears in affine and `strip_shears` is False') From 8bc1af450f92d3bb4105d11f89397b8e87c6b298 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 12 Dec 2023 09:36:36 -0500 Subject: [PATCH 004/203] DOC: Fix references in changelog --- Changelog | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog b/Changelog index 06cbf74fdf..cd3c2b005b 100644 --- a/Changelog +++ b/Changelog @@ -36,7 +36,7 @@ tested up to Python 3.12 and NumPy 1.26. New features ------------ * Add generic :class:`~nibabel.pointset.Pointset` and regularly spaced - :class:`~nibabel.pointset.NDGrid` data structures in preparation for coordinate + :class:`~nibabel.pointset.Grid` data structures in preparation for coordinate transformation and resampling (pr/1251) (CM, reviewed by Oscar Esteban) Enhancements @@ -44,7 +44,7 @@ Enhancements * Add :meth:`~nibabel.arrayproxy.ArrayProxy.copy` method to :class:`~nibabel.arrayproxy.ArrayProxy` (pr/1255) (CM, reviewed by Paul McCarthy) * Permit :meth:`~nibabel.xmlutils.XmlSerializable.to_xml` to pass keyword - arguments to :meth:`~xml.etree.ElementTree.ElementTree.tostring` (pr/1258) + arguments to :meth:`~xml.etree.ElementTree.tostring` (pr/1258) (CM) * Allow user expansion (e.g., ``~/...``) in strings passed to functions that accept paths (pr/1260) (Reinder Vos de Wael, reviewed by CM) @@ -54,7 +54,7 @@ Enhancements ``affine=None`` argument (pr/1253) (Blake Dewey, reviewed by CM) * Warn on invalid MINC2 spacing declarations, treat as missing (pr/1237) (Peter Suter, reviewed by CM) -* Refactor :func:`~nibabel.nicom.utils.find_private_element` for improved +* Refactor :func:`~nibabel.nicom.utils.find_private_section` for improved readability and maintainability (pr/1228) (MB, reviewed by CM) Bug fixes From c9e7795306f7dd6912d6502318129c1dc8056397 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 12 Dec 2023 09:37:39 -0500 Subject: [PATCH 005/203] MNT: Add tool for generating GitHub-friendly release notes --- tools/markdown_release_notes.py | 94 +++++++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100644 tools/markdown_release_notes.py diff --git a/tools/markdown_release_notes.py b/tools/markdown_release_notes.py new file mode 100644 index 0000000000..66e7876036 --- /dev/null +++ b/tools/markdown_release_notes.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +import re +import sys +from pathlib import Path + +CHANGELOG = Path(__file__).parent.parent / 'Changelog' + +# Match release lines like "5.2.0 (Monday 11 December 2023)" +RELEASE_REGEX = re.compile(r"""((?:\d+)\.(?:\d+)\.(?:\d+)) \(\w+ \d{1,2} \w+ \d{4}\)$""") + + +def main(): + version = sys.argv[1] + output = sys.argv[2] + if output == '-': + output = sys.stdout + else: + output = open(output, 'w') + + release_notes = [] + in_release_notes = False + + with open(CHANGELOG) as f: + for line in f: + match = RELEASE_REGEX.match(line) + if match: + if in_release_notes: + break + in_release_notes = match.group(1) == version + next(f) # Skip the underline + continue + + if in_release_notes: + release_notes.append(line) + + # Drop empty lines at start and end + while release_notes and not release_notes[0].strip(): + release_notes.pop(0) + while release_notes and not release_notes[-1].strip(): + release_notes.pop() + + # Join lines + release_notes = ''.join(release_notes) + + # Remove line breaks when they are followed by a space + release_notes = re.sub(r'\n +', ' ', release_notes) + + # Replace pr/ with # for GitHub + release_notes = re.sub(r'\(pr/(\d+)\)', r'(#\1)', release_notes) + + # Replace :mod:`package.X` with [package.X](...) + release_notes = re.sub( + r':mod:`nibabel\.(.*)`', + r'[nibabel.\1](https://nipy.org/nibabel/reference/nibabel.\1.html)', + release_notes, + ) + # Replace :class/func/attr:`package.module.X` with [package.module.X](...) + release_notes = re.sub( + r':(?:class|func|attr):`(nibabel\.\w*)(\.[\w.]*)?\.(\w+)`', + r'[\1\2.\3](https://nipy.org/nibabel/reference/\1.html#\1\2.\3)', + release_notes, + ) + release_notes = re.sub( + r':(?:class|func|attr):`~(nibabel\.\w*)(\.[\w.]*)?\.(\w+)`', + r'[\3](https://nipy.org/nibabel/reference/\1.html#\1\2.\3)', + release_notes, + ) + # Replace :meth:`package.module.class.X` with [package.module.class.X](...) + release_notes = re.sub( + r':meth:`(nibabel\.[\w.]*)\.(\w+)\.(\w+)`', + r'[\1.\2.\3](https://nipy.org/nibabel/reference/\1.html#\1.\2.\3)', + release_notes, + ) + release_notes = re.sub( + r':meth:`~(nibabel\.[\w.]*)\.(\w+)\.(\w+)`', + r'[\3](https://nipy.org/nibabel/reference/\1.html#\1.\2.\3)', + release_notes, + ) + + def python_doc(match): + module = match.group(1) + name = match.group(2) + return f'[{name}](https://docs.python.org/3/library/{module.lower()}.html#{module}.{name})' + + release_notes = re.sub(r':meth:`~([\w.]+)\.(\w+)`', python_doc, release_notes) + + output.write('## Release notes\n\n') + output.write(release_notes) + + output.close() + + +if __name__ == '__main__': + main() From 33363bfa49ce3b2417ed0d5b456a0b919571185d Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 12 Dec 2023 09:48:41 -0500 Subject: [PATCH 006/203] MNT: Avoid isort version with broken extras --- tox.ini | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index d91c136fc1..cc2b263cb1 100644 --- a/tox.ini +++ b/tox.ini @@ -141,7 +141,8 @@ labels = check deps = flake8 blue - isort[colors] + # Broken extras, remove when fix is released + isort[colors]!=5.13.1 skip_install = true commands = blue --check --diff --color nibabel @@ -153,7 +154,7 @@ description = Auto-apply style guide to the extent possible labels = pre-release deps = blue - isort[colors] + isort skip_install = true commands = blue nibabel From 773e3c40eebf072630abbc26a30d3ad67adf5e90 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 12 Dec 2023 10:54:07 -0500 Subject: [PATCH 007/203] DOC: Fix intersphinx mapping and reference type --- Changelog | 4 ++-- doc/source/conf.py | 7 ++++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Changelog b/Changelog index cd3c2b005b..10afc42df8 100644 --- a/Changelog +++ b/Changelog @@ -43,8 +43,8 @@ Enhancements ------------ * Add :meth:`~nibabel.arrayproxy.ArrayProxy.copy` method to :class:`~nibabel.arrayproxy.ArrayProxy` (pr/1255) (CM, reviewed by Paul McCarthy) -* Permit :meth:`~nibabel.xmlutils.XmlSerializable.to_xml` to pass keyword - arguments to :meth:`~xml.etree.ElementTree.tostring` (pr/1258) +* Permit :meth:`~nibabel.xmlutils.XmlSerializable.to_xml` methods to pass keyword + arguments to :func:`xml.etree.ElementTree.tostring` (pr/1258) (CM) * Allow user expansion (e.g., ``~/...``) in strings passed to functions that accept paths (pr/1260) (Reinder Vos de Wael, reviewed by CM) diff --git a/doc/source/conf.py b/doc/source/conf.py index 82fe25adac..175c6340bd 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -280,7 +280,12 @@ # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/3/': None} +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), + 'numpy': ('https://numpy.org/doc/stable', None), + 'scipy': ('https://docs.scipy.org/doc/scipy', None), + 'matplotlib': ('https://matplotlib.org/stable', None), +} # Config of plot_directive plot_include_source = True From f7b9bc4c89f9bfb9e31763e3b2d672016d6d8f33 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 12 Dec 2023 11:13:10 -0500 Subject: [PATCH 008/203] MNT: Advertise Python 3.12 support --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 50905dff56..9fec3975cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Scientific/Engineering", ] # Version from setuptools_scm From 46a765d162239e131c4db7d573f9bf9a05b3c3f1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 12 Dec 2023 21:55:20 -0500 Subject: [PATCH 009/203] FIX: Tolerate missing git Closes gh-1285. --- nibabel/pkg_info.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/nibabel/pkg_info.py b/nibabel/pkg_info.py index 7e816939d5..7232806a0a 100644 --- a/nibabel/pkg_info.py +++ b/nibabel/pkg_info.py @@ -1,6 +1,7 @@ from __future__ import annotations import sys +from contextlib import suppress from subprocess import run from packaging.version import Version @@ -102,14 +103,16 @@ def pkg_commit_hash(pkg_path: str | None = None) -> tuple[str, str]: ver = Version(__version__) if ver.local is not None and ver.local.startswith('g'): return 'installation', ver.local[1:8] - # maybe we are in a repository - proc = run( - ('git', 'rev-parse', '--short', 'HEAD'), - capture_output=True, - cwd=pkg_path, - ) - if proc.stdout: - return 'repository', proc.stdout.decode().strip() + # maybe we are in a repository, but consider that we may not have git + with suppress(FileNotFoundError): + proc = run( + ('git', 'rev-parse', '--short', 'HEAD'), + capture_output=True, + cwd=pkg_path, + ) + if proc.stdout: + return 'repository', proc.stdout.decode().strip() + return '(none found)', '' From 1ec84885bc40ea459252fb74e45945f25bd804f1 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 26 Dec 2023 00:12:12 +0100 Subject: [PATCH 010/203] MNT: Apply Repo-Review suggestions --- nibabel/_compression.py | 4 ++-- nibabel/benchmarks/bench_arrayproxy_slicing.py | 2 +- nibabel/cmdline/dicomfs.py | 2 +- nibabel/externals/conftest.py | 2 +- nibabel/minc2.py | 2 +- nibabel/parrec.py | 2 +- nibabel/pydicom_compat.py | 2 +- nibabel/spm99analyze.py | 2 +- nibabel/tmpdirs.py | 2 +- nibabel/xmlutils.py | 2 +- pyproject.toml | 4 +++- 11 files changed, 14 insertions(+), 12 deletions(-) diff --git a/nibabel/_compression.py b/nibabel/_compression.py index bf13895c80..75a5e3bbf4 100644 --- a/nibabel/_compression.py +++ b/nibabel/_compression.py @@ -17,7 +17,7 @@ from .optpkg import optional_package if ty.TYPE_CHECKING: # pragma: no cover - import indexed_gzip # type: ignore + import indexed_gzip # type: ignore[import-not-found] import pyzstd HAVE_INDEXED_GZIP = True @@ -40,7 +40,7 @@ if HAVE_INDEXED_GZIP: COMPRESSED_FILE_LIKES += (indexed_gzip.IndexedGzipFile,) COMPRESSION_ERRORS += (indexed_gzip.ZranError,) - from indexed_gzip import IndexedGzipFile # type: ignore + from indexed_gzip import IndexedGzipFile # type: ignore[import-not-found] else: IndexedGzipFile = gzip.GzipFile diff --git a/nibabel/benchmarks/bench_arrayproxy_slicing.py b/nibabel/benchmarks/bench_arrayproxy_slicing.py index 958923d7ea..dc9acfdedd 100644 --- a/nibabel/benchmarks/bench_arrayproxy_slicing.py +++ b/nibabel/benchmarks/bench_arrayproxy_slicing.py @@ -26,7 +26,7 @@ # if memory_profiler is installed, we get memory usage results try: - from memory_profiler import memory_usage # type: ignore + from memory_profiler import memory_usage # type: ignore[import-not-found] except ImportError: memory_usage = None diff --git a/nibabel/cmdline/dicomfs.py b/nibabel/cmdline/dicomfs.py index 85d7d8dcad..dec4011c51 100644 --- a/nibabel/cmdline/dicomfs.py +++ b/nibabel/cmdline/dicomfs.py @@ -25,7 +25,7 @@ class dummy_fuse: try: - import fuse # type: ignore + import fuse # type: ignore[import-not-found] uid = os.getuid() gid = os.getgid() diff --git a/nibabel/externals/conftest.py b/nibabel/externals/conftest.py index 33f88eb323..472f2f0296 100644 --- a/nibabel/externals/conftest.py +++ b/nibabel/externals/conftest.py @@ -6,7 +6,7 @@ import os from contextlib import contextmanager - @contextmanager # type: ignore + @contextmanager # type: ignore[no-redef] def _chdir(path): cwd = os.getcwd() os.chdir(path) diff --git a/nibabel/minc2.py b/nibabel/minc2.py index 3096ef9499..94e1be76e2 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -163,7 +163,7 @@ class Minc2Image(Minc1Image): def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): # Import of h5py might take awhile for MPI-enabled builds # So we are importing it here "on demand" - import h5py # type: ignore + import h5py # type: ignore[import-not-found] holder = file_map['image'] if holder.filename is None: diff --git a/nibabel/parrec.py b/nibabel/parrec.py index ec3fdea711..3a8a6030de 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -1338,7 +1338,7 @@ def from_filename( strict_sort=strict_sort, ) - load = from_filename # type: ignore + load = from_filename # type: ignore[assignment] load = PARRECImage.from_filename diff --git a/nibabel/pydicom_compat.py b/nibabel/pydicom_compat.py index fae24e691c..d61c880117 100644 --- a/nibabel/pydicom_compat.py +++ b/nibabel/pydicom_compat.py @@ -42,7 +42,7 @@ if have_dicom: # Values not imported by default - import pydicom.values # type: ignore + import pydicom.values # type: ignore[import-not-found] from pydicom.dicomio import dcmread as read_file # noqa:F401 from pydicom.sequence import Sequence # noqa:F401 diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index 974f8609cf..c859d702f4 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -275,7 +275,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): contents = matf.read() if len(contents) == 0: return ret - import scipy.io as sio # type: ignore + import scipy.io as sio # type: ignore[import-not-found] mats = sio.loadmat(BytesIO(contents)) if 'mat' in mats: # this overrides a 'M', and includes any flip diff --git a/nibabel/tmpdirs.py b/nibabel/tmpdirs.py index 7fe47e6510..49d69d2bf2 100644 --- a/nibabel/tmpdirs.py +++ b/nibabel/tmpdirs.py @@ -15,7 +15,7 @@ from contextlib import chdir as _chdir except ImportError: # PY310 - @contextmanager # type: ignore + @contextmanager # type: ignore[no-redef] def _chdir(path): cwd = os.getcwd() os.chdir(path) diff --git a/nibabel/xmlutils.py b/nibabel/xmlutils.py index 4a5fb28979..d3a7a08309 100644 --- a/nibabel/xmlutils.py +++ b/nibabel/xmlutils.py @@ -32,7 +32,7 @@ def to_xml(self, enc='utf-8', **kwargs) -> bytes: Additional keyword arguments to :func:`xml.etree.ElementTree.tostring`. """ ele = self._to_xml_element() - return b'' if ele is None else tostring(ele, enc, **kwargs) + return tostring(ele, enc, **kwargs) class XmlBasedHeader(FileBasedHeader, XmlSerializable): diff --git a/pyproject.toml b/pyproject.toml index 9fec3975cc..14095b8f22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -111,7 +111,7 @@ __version_tuple__ = version_tuple = {version_tuple!r} [tool.blue] line_length = 99 -target-version = ["py37"] +target-version = ["py38"] force-exclude = """ ( _version.py @@ -130,6 +130,8 @@ python_version = "3.11" exclude = [ "/tests", ] +warn_unreachable = true +enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] [tool.codespell] skip = "*/data/*,./nibabel-data" From cff32bbcc2c32defe176aebb00150331a18ed3c3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 26 Dec 2023 07:34:24 -0600 Subject: [PATCH 011/203] MNT: Purge defunct nisext package --- .coveragerc | 4 +- Makefile | 21 +- nisext/__init__.py | 13 - nisext/py3builder.py | 38 --- nisext/sexts.py | 285 ------------------- nisext/testers.py | 523 ----------------------------------- nisext/tests/__init__.py | 1 - nisext/tests/test_sexts.py | 106 ------- nisext/tests/test_testers.py | 35 --- pyproject.toml | 2 +- 10 files changed, 4 insertions(+), 1024 deletions(-) delete mode 100644 nisext/__init__.py delete mode 100644 nisext/py3builder.py delete mode 100644 nisext/sexts.py delete mode 100644 nisext/testers.py delete mode 100644 nisext/tests/__init__.py delete mode 100644 nisext/tests/test_sexts.py delete mode 100644 nisext/tests/test_testers.py diff --git a/.coveragerc b/.coveragerc index 57747ec0d8..bcf28e09c2 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,7 +1,7 @@ [run] branch = True -source = nibabel, nisext -include = */nibabel/*, */nisext/* +source = nibabel +include = */nibabel/* omit = */externals/* */benchmarks/* diff --git a/Makefile b/Makefile index 7d4c6666ae..689ad6a75f 100644 --- a/Makefile +++ b/Makefile @@ -233,25 +233,6 @@ bdist_rpm: bdist_mpkg: $(PYTHON) tools/mpkg_wrapper.py setup.py install -# Check for files not installed -check-files: - $(PYTHON) -c 'from nisext.testers import check_files; check_files("nibabel")' - -# Print out info for possible install methods -check-version-info: - $(PYTHON) -c 'from nisext.testers import info_from_here; info_from_here("nibabel")' - -# Run tests from installed code -installed-tests: - $(PYTHON) -c 'from nisext.testers import tests_installed; tests_installed("nibabel")' - -# Run tests from packaged distributions -sdist-tests: - $(PYTHON) -c 'from nisext.testers import sdist_tests; sdist_tests("nibabel", doctests=False)' - -bdist-egg-tests: - $(PYTHON) -c 'from nisext.testers import bdist_egg_tests; bdist_egg_tests("nibabel", doctests=False, label="not script_test")' - sdist-venv: clean rm -rf dist venv unset PYTHONPATH && $(PYTHON) setup.py sdist --formats=zip @@ -260,7 +241,7 @@ sdist-venv: clean mkdir venv/tmp cd venv/tmp && unzip ../../dist/*.zip . venv/bin/activate && cd venv/tmp/nibabel* && python setup.py install - unset PYTHONPATH && . venv/bin/activate && cd venv && nosetests --with-doctest nibabel nisext + unset PYTHONPATH && . venv/bin/activate && cd venv && pytest --doctest-modules --doctest-plus --pyargs nibabel source-release: distclean $(PYTHON) -m compileall . diff --git a/nisext/__init__.py b/nisext/__init__.py deleted file mode 100644 index 6b19d7eb8e..0000000000 --- a/nisext/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# init for sext package -"""Setuptools extensions - -nibabel uses these routines, and houses them, and installs them. nipy-proper -and dipy use them. -""" - -import warnings - -warnings.warn( - """The nisext package is deprecated as of NiBabel 5.0 and will be fully -removed in NiBabel 6.0""" -) diff --git a/nisext/py3builder.py b/nisext/py3builder.py deleted file mode 100644 index 24bd298364..0000000000 --- a/nisext/py3builder.py +++ /dev/null @@ -1,38 +0,0 @@ -"""distutils utilities for porting to python 3 within 2-compatible tree""" - - -try: - from distutils.command.build_py import build_py_2to3 -except ImportError: - # 2.x - no parsing of code - from distutils.command.build_py import build_py -else: # Python 3 - # Command to also apply 2to3 to doctests - from distutils import log - - class build_py(build_py_2to3): - def run_2to3(self, files): - # Add doctest parsing; this stuff copied from distutils.utils in - # python 3.2 source - if not files: - return - fixer_names, options, explicit = (self.fixer_names, self.options, self.explicit) - # Make this class local, to delay import of 2to3 - from lib2to3.refactor import RefactoringTool, get_fixers_from_package - - class DistutilsRefactoringTool(RefactoringTool): - def log_error(self, msg, *args, **kw): - log.error(msg, *args) - - def log_message(self, msg, *args): - log.info(msg, *args) - - def log_debug(self, msg, *args): - log.debug(msg, *args) - - if fixer_names is None: - fixer_names = get_fixers_from_package('lib2to3.fixes') - r = DistutilsRefactoringTool(fixer_names, options=options) - r.refactor(files, write=True) - # Then doctests - r.refactor(files, write=True, doctests_only=True) diff --git a/nisext/sexts.py b/nisext/sexts.py deleted file mode 100644 index b206588dec..0000000000 --- a/nisext/sexts.py +++ /dev/null @@ -1,285 +0,0 @@ -"""Distutils / setuptools helpers""" - -import os -from configparser import ConfigParser -from distutils import log -from distutils.command.build_py import build_py -from distutils.command.install_scripts import install_scripts -from distutils.version import LooseVersion -from os.path import join as pjoin -from os.path import split as psplit -from os.path import splitext - - -def get_comrec_build(pkg_dir, build_cmd=build_py): - """Return extended build command class for recording commit - - The extended command tries to run git to find the current commit, getting - the empty string if it fails. It then writes the commit hash into a file - in the `pkg_dir` path, named ``COMMIT_INFO.txt``. - - In due course this information can be used by the package after it is - installed, to tell you what commit it was installed from if known. - - To make use of this system, you need a package with a COMMIT_INFO.txt file - - e.g. ``myproject/COMMIT_INFO.txt`` - that might well look like this:: - - # This is an ini file that may contain information about the code state - [commit hash] - # The line below may contain a valid hash if it has been substituted during 'git archive' - archive_subst_hash=$Format:%h$ - # This line may be modified by the install process - install_hash= - - The COMMIT_INFO file above is also designed to be used with git substitution - - so you probably also want a ``.gitattributes`` file in the root directory - of your working tree that contains something like this:: - - myproject/COMMIT_INFO.txt export-subst - - That will cause the ``COMMIT_INFO.txt`` file to get filled in by ``git - archive`` - useful in case someone makes such an archive - for example with - via the github 'download source' button. - - Although all the above will work as is, you might consider having something - like a ``get_info()`` function in your package to display the commit - information at the terminal. See the ``pkg_info.py`` module in the nipy - package for an example. - """ - - class MyBuildPy(build_cmd): - """Subclass to write commit data into installation tree""" - - def run(self): - build_cmd.run(self) - import subprocess - - proc = subprocess.Popen( - 'git rev-parse --short HEAD', - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - shell=True, - ) - repo_commit, _ = proc.communicate() - # Fix for python 3 - repo_commit = str(repo_commit) - # We write the installation commit even if it's empty - cfg_parser = ConfigParser() - cfg_parser.read(pjoin(pkg_dir, 'COMMIT_INFO.txt')) - cfg_parser.set('commit hash', 'install_hash', repo_commit) - out_pth = pjoin(self.build_lib, pkg_dir, 'COMMIT_INFO.txt') - cfg_parser.write(open(out_pth, 'wt')) - - return MyBuildPy - - -def _add_append_key(in_dict, key, value): - """Helper for appending dependencies to setuptools args""" - # If in_dict[key] does not exist, create it - # If in_dict[key] is a string, make it len 1 list of strings - # Append value to in_dict[key] list - if key not in in_dict: - in_dict[key] = [] - elif isinstance(in_dict[key], str): - in_dict[key] = [in_dict[key]] - in_dict[key].append(value) - - -# Dependency checks -def package_check( - pkg_name, - version=None, - optional=False, - checker=LooseVersion, - version_getter=None, - messages=None, - setuptools_args=None, -): - """Check if package `pkg_name` is present and has good enough version - - Has two modes of operation. If `setuptools_args` is None (the default), - raise an error for missing non-optional dependencies and log warnings for - missing optional dependencies. If `setuptools_args` is a dict, then fill - ``install_requires`` key value with any missing non-optional dependencies, - and the ``extras_requires`` key value with optional dependencies. - - This allows us to work with and without setuptools. It also means we can - check for packages that have not been installed with setuptools to avoid - installing them again. - - Parameters - ---------- - pkg_name : str - name of package as imported into python - version : {None, str}, optional - minimum version of the package that we require. If None, we don't - check the version. Default is None - optional : bool or str, optional - If ``bool(optional)`` is False, raise error for absent package or wrong - version; otherwise warn. If ``setuptools_args`` is not None, and - ``bool(optional)`` is not False, then `optional` should be a string - giving the feature name for the ``extras_require`` argument to setup. - checker : callable, optional - callable with which to return comparable thing from version - string. Default is ``distutils.version.LooseVersion`` - version_getter : {None, callable}: - Callable that takes `pkg_name` as argument, and returns the - package version string - as in:: - - ``version = version_getter(pkg_name)`` - - If None, equivalent to:: - - mod = __import__(pkg_name); version = mod.__version__`` - messages : None or dict, optional - dictionary giving output messages - setuptools_args : None or dict - If None, raise errors / warnings for missing non-optional / optional - dependencies. If dict fill key values ``install_requires`` and - ``extras_require`` for non-optional and optional dependencies. - """ - setuptools_mode = not setuptools_args is None - optional_tf = bool(optional) - if version_getter is None: - - def version_getter(pkg_name): - mod = __import__(pkg_name) - return mod.__version__ - - if messages is None: - messages = {} - msgs = { - 'missing': 'Cannot import package "%s" - is it installed?', - 'missing opt': 'Missing optional package "%s"', - 'opt suffix': '; you may get run-time errors', - 'version too old': 'You have version %s of package "%s" but we need version >= %s', - } - msgs.update(messages) - status, have_version = _package_status(pkg_name, version, version_getter, checker) - if status == 'satisfied': - return - if not setuptools_mode: - if status == 'missing': - if not optional_tf: - raise RuntimeError(msgs['missing'] % pkg_name) - log.warn(msgs['missing opt'] % pkg_name + msgs['opt suffix']) - return - elif status == 'no-version': - raise RuntimeError(f'Cannot find version for {pkg_name}') - assert status == 'low-version' - if not optional_tf: - raise RuntimeError(msgs['version too old'] % (have_version, pkg_name, version)) - log.warn(msgs['version too old'] % (have_version, pkg_name, version) + msgs['opt suffix']) - return - # setuptools mode - if optional_tf and not isinstance(optional, str): - raise RuntimeError('Not-False optional arg should be string') - dependency = pkg_name - if version: - dependency += '>=' + version - if optional_tf: - if not 'extras_require' in setuptools_args: - setuptools_args['extras_require'] = {} - _add_append_key(setuptools_args['extras_require'], optional, dependency) - else: - _add_append_key(setuptools_args, 'install_requires', dependency) - - -def _package_status(pkg_name, version, version_getter, checker): - try: - __import__(pkg_name) - except ImportError: - return 'missing', None - if not version: - return 'satisfied', None - try: - have_version = version_getter(pkg_name) - except AttributeError: - return 'no-version', None - if checker(have_version) < checker(version): - return 'low-version', have_version - return 'satisfied', have_version - - -BAT_TEMPLATE = r"""@echo off -REM wrapper to use shebang first line of {FNAME} -set mypath=%~dp0 -set pyscript="%mypath%{FNAME}" -set /p line1=<%pyscript% -if "%line1:~0,2%" == "#!" (goto :goodstart) -echo First line of %pyscript% does not start with "#!" -exit /b 1 -:goodstart -set py_exe=%line1:~2% -call "%py_exe%" %pyscript% %* -""" - - -class install_scripts_bat(install_scripts): - """Make scripts executable on Windows - - Scripts are bare file names without extension on Unix, fitting (for example) - Debian rules. They identify as python scripts with the usual ``#!`` first - line. Unix recognizes and uses this first "shebang" line, but Windows does - not. So, on Windows only we add a ``.bat`` wrapper of name - ``bare_script_name.bat`` to call ``bare_script_name`` using the python - interpreter from the #! first line of the script. - - Notes - ----- - See discussion at - https://matthew-brett.github.io/pydagogue/installing_scripts.html and - example at git://github.com/matthew-brett/myscripter.git for more - background. - """ - - def run(self): - install_scripts.run(self) - if not os.name == 'nt': - return - for filepath in self.get_outputs(): - # If we can find an executable name in the #! top line of the script - # file, make .bat wrapper for script. - with open(filepath, 'rt') as fobj: - first_line = fobj.readline() - if not (first_line.startswith('#!') and 'python' in first_line.lower()): - log.info('No #!python executable found, skipping .bat wrapper') - continue - pth, fname = psplit(filepath) - froot, ext = splitext(fname) - bat_file = pjoin(pth, froot + '.bat') - bat_contents = BAT_TEMPLATE.replace('{FNAME}', fname) - log.info(f'Making {bat_file} wrapper for {filepath}') - if self.dry_run: - continue - with open(bat_file, 'wt') as fobj: - fobj.write(bat_contents) - - -class Bunch: - def __init__(self, vars): - for key, name in vars.items(): - if key.startswith('__'): - continue - self.__dict__[key] = name - - -def read_vars_from(ver_file): - """Read variables from Python text file - - Parameters - ---------- - ver_file : str - Filename of file to read - - Returns - ------- - info_vars : Bunch instance - Bunch object where variables read from `ver_file` appear as - attributes - """ - # Use exec for compabibility with Python 3 - ns = {} - with open(ver_file, 'rt') as fobj: - exec(fobj.read(), ns) - return Bunch(ns) diff --git a/nisext/testers.py b/nisext/testers.py deleted file mode 100644 index 07f71af696..0000000000 --- a/nisext/testers.py +++ /dev/null @@ -1,523 +0,0 @@ -"""Test package information in various install settings - -The routines here install the package from source directories, zips or eggs, and -check these installations by running tests, checking version information, -looking for files that were not copied over. - -The typical use for this module is as a Makefile target. For example, here are -the Makefile targets from nibabel:: - - # Check for files not installed - check-files: - $(PYTHON) -c 'from nisext.testers import check_files; check_files("nibabel")' - - # Print out info for possible install methods - check-version-info: - $(PYTHON) -c 'from nisext.testers import info_from_here; info_from_here("nibabel")' - - # Run tests from installed code - installed-tests: - $(PYTHON) -c 'from nisext.testers import tests_installed; tests_installed("nibabel")' - - # Run tests from installed code - sdist-tests: - $(PYTHON) -c 'from nisext.testers import sdist_tests; sdist_tests("nibabel")' - - # Run tests from binary egg - bdist-egg-tests: - $(PYTHON) -c 'from nisext.testers import bdist_egg_tests; bdist_egg_tests("nibabel")' -""" - - -import os -import re -import shutil -import sys -import tempfile -import zipfile -from glob import glob -from os.path import abspath -from os.path import join as pjoin -from subprocess import PIPE, Popen - -NEEDS_SHELL = os.name != 'nt' -PYTHON = sys.executable -HAVE_PUTENV = hasattr(os, 'putenv') - -PY_LIB_SDIR = 'pylib' - - -def back_tick(cmd, ret_err=False, as_str=True): - """Run command `cmd`, return stdout, or stdout, stderr if `ret_err` - - Roughly equivalent to ``check_output`` in Python 2.7 - - Parameters - ---------- - cmd : str - command to execute - ret_err : bool, optional - If True, return stderr in addition to stdout. If False, just return - stdout - as_str : bool, optional - Whether to decode outputs to unicode string on exit. - - Returns - ------- - out : str or tuple - If `ret_err` is False, return stripped string containing stdout from - `cmd`. If `ret_err` is True, return tuple of (stdout, stderr) where - ``stdout`` is the stripped stdout, and ``stderr`` is the stripped - stderr. - - Raises - ------ - RuntimeError - if command returns non-zero exit code. - """ - proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=NEEDS_SHELL) - out, err = proc.communicate() - retcode = proc.returncode - if retcode is None: - proc.terminate() - raise RuntimeError(cmd + ' process did not terminate') - if retcode != 0: - raise RuntimeError(cmd + ' process returned code %d' % retcode) - out = out.strip() - if as_str: - out = out.decode('latin-1') - if not ret_err: - return out - err = err.strip() - if as_str: - err = err.decode('latin-1') - return out, err - - -def run_mod_cmd(mod_name, pkg_path, cmd, script_dir=None, print_location=True): - """Run command in own process in anonymous path - - Parameters - ---------- - mod_name : str - Name of module to import - e.g. 'nibabel' - pkg_path : str - directory containing `mod_name` package. Typically that will be the - directory containing the e.g. 'nibabel' directory. - cmd : str - Python command to execute - script_dir : None or str, optional - script directory to prepend to PATH - print_location : bool, optional - Whether to print the location of the imported `mod_name` - - Returns - ------- - stdout : str - stdout as str - stderr : str - stderr as str - """ - if script_dir is None: - paths_add = '' - else: - if not HAVE_PUTENV: - raise RuntimeError('We cannot set environment variables') - # Need to add the python path for the scripts to pick up our package in - # their environment, because the scripts will get called via the shell - # (via `cmd`). Consider that PYTHONPATH may not be set. Because the - # command might run scripts via the shell, prepend script_dir to the - # system path also. - paths_add = r""" -os.environ['PATH'] = r'"{script_dir}"' + os.path.pathsep + os.environ['PATH'] -PYTHONPATH = os.environ.get('PYTHONPATH') -if PYTHONPATH is None: - os.environ['PYTHONPATH'] = r'"{pkg_path}"' -else: - os.environ['PYTHONPATH'] = r'"{pkg_path}"' + os.path.pathsep + PYTHONPATH -""".format( - **locals() - ) - if print_location: - p_loc = f'print({mod_name}.__file__);' - else: - p_loc = '' - cwd = os.getcwd() - tmpdir = tempfile.mkdtemp() - try: - os.chdir(tmpdir) - with open('script.py', 'wt') as fobj: - fobj.write( - r""" -import os -import sys -sys.path.insert(0, r"{pkg_path}") -{paths_add} -import {mod_name} -{p_loc} -{cmd}""".format( - **locals() - ) - ) - res = back_tick(f'{PYTHON} script.py', ret_err=True) - finally: - os.chdir(cwd) - shutil.rmtree(tmpdir) - return res - - -def zip_extract_all(fname, path=None): - """Extract all members from zipfile - - Deals with situation where the directory is stored in the zipfile as a name, - as well as files that have to go into this directory. - """ - zf = zipfile.ZipFile(fname) - members = zf.namelist() - # Remove members that are just bare directories - members = [m for m in members if not m.endswith('/')] - for zipinfo in members: - zf.extract(zipinfo, path, None) - - -def install_from_to(from_dir, to_dir, py_lib_sdir=PY_LIB_SDIR, bin_sdir='bin'): - """Install package in `from_dir` to standard location in `to_dir` - - Parameters - ---------- - from_dir : str - path containing files to install with ``python setup.py ...`` - to_dir : str - prefix path to which files will be installed, as in ``python setup.py - install --prefix=to_dir`` - py_lib_sdir : str, optional - subdirectory within `to_dir` to which library code will be installed - bin_sdir : str, optional - subdirectory within `to_dir` to which scripts will be installed - """ - site_pkgs_path = os.path.join(to_dir, py_lib_sdir) - py_lib_locs = f' --install-purelib={site_pkgs_path} ' f'--install-platlib={site_pkgs_path}' - pwd = os.path.abspath(os.getcwd()) - cmd = f'{PYTHON} setup.py --quiet install --prefix={to_dir} {py_lib_locs}' - try: - os.chdir(from_dir) - back_tick(cmd) - finally: - os.chdir(pwd) - - -def install_from_zip( - zip_fname, install_path, pkg_finder=None, py_lib_sdir=PY_LIB_SDIR, script_sdir='bin' -): - """Install package from zip file `zip_fname` - - Parameters - ---------- - zip_fname : str - filename of zip file containing package code - install_path : str - output prefix at which to install package - pkg_finder : None or callable, optional - If None, assume zip contains ``setup.py`` at the top level. Otherwise, - find directory containing ``setup.py`` with ``pth = - pkg_finder(unzip_path)`` where ``unzip_path`` is the path to which we - have unzipped the zip file contents. - py_lib_sdir : str, optional - subdirectory to which to write the library code from the package. Thus - if package called ``nibabel``, the written code will be in - ``//nibabel - script_sdir : str, optional - subdirectory to which we write the installed scripts. Thus scripts will - be written to ``/ - """ - unzip_path = tempfile.mkdtemp() - try: - # Zip may unpack module into current directory - zip_extract_all(zip_fname, unzip_path) - if pkg_finder is None: - from_path = unzip_path - else: - from_path = pkg_finder(unzip_path) - install_from_to(from_path, install_path, py_lib_sdir, script_sdir) - finally: - shutil.rmtree(unzip_path) - - -def contexts_print_info(mod_name, repo_path, install_path): - """Print result of get_info from different installation routes - - Runs installation from: - - * git archive zip file - * with setup.py install from repository directory - * just running code from repository directory - - and prints out result of get_info in each case. There will be many files - written into `install_path` that you may want to clean up somehow. - - Parameters - ---------- - mod_name : str - package name that will be installed, and tested - repo_path : str - path to location of git repository - install_path : str - path into which to install temporary installations - """ - site_pkgs_path = os.path.join(install_path, PY_LIB_SDIR) - # first test archive - pwd = os.path.abspath(os.getcwd()) - out_fname = pjoin(install_path, 'test.zip') - try: - os.chdir(repo_path) - back_tick(f'git archive --format zip -o {out_fname} HEAD') - finally: - os.chdir(pwd) - install_from_zip(out_fname, install_path, None) - cmd_str = f'print({mod_name}.get_info())' - print(run_mod_cmd(mod_name, site_pkgs_path, cmd_str)[0]) - # now test install into a directory from the repository - install_from_to(repo_path, install_path, PY_LIB_SDIR) - print(run_mod_cmd(mod_name, site_pkgs_path, cmd_str)[0]) - # test from development tree - print(run_mod_cmd(mod_name, repo_path, cmd_str)[0]) - - -def info_from_here(mod_name): - """Run info context checks starting in working directory - - Runs checks from current working directory, installing temporary - installations into a new temporary directory - - Parameters - ---------- - mod_name : str - package name that will be installed, and tested - """ - repo_path = os.path.abspath(os.getcwd()) - install_path = tempfile.mkdtemp() - try: - contexts_print_info(mod_name, repo_path, install_path) - finally: - shutil.rmtree(install_path) - - -def tests_installed(mod_name, source_path=None): - """Install from `source_path` into temporary directory; run tests - - Parameters - ---------- - mod_name : str - name of module - e.g. 'nibabel' - source_path : None or str - Path from which to install. If None, defaults to working directory - """ - if source_path is None: - source_path = os.path.abspath(os.getcwd()) - install_path = tempfile.mkdtemp() - site_pkgs_path = pjoin(install_path, PY_LIB_SDIR) - scripts_path = pjoin(install_path, 'bin') - try: - install_from_to(source_path, install_path, PY_LIB_SDIR, 'bin') - stdout, stderr = run_mod_cmd(mod_name, site_pkgs_path, mod_name + '.test()', scripts_path) - finally: - shutil.rmtree(install_path) - print(stdout) - print(stderr) - - -# Tell nose this is not a test -tests_installed.__test__ = False - - -def check_installed_files(repo_mod_path, install_mod_path): - """Check files in `repo_mod_path` are installed at `install_mod_path` - - At the moment, all this does is check that all the ``*.py`` files in - `repo_mod_path` are installed at `install_mod_path`. - - Parameters - ---------- - repo_mod_path : str - repository path containing package files, e.g. /nibabel> - install_mod_path : str - path at which package has been installed. This is the path where the - root package ``__init__.py`` lives. - - Return - ------ - uninstalled : list - list of files that should have been installed, but have not been - installed - """ - return missing_from(repo_mod_path, install_mod_path, filter=r'\.py$') - - -def missing_from(path0, path1, filter=None): - """Return filenames present in `path0` but not in `path1` - - Parameters - ---------- - path0 : str - path which contains all files of interest - path1 : str - path which should contain all files of interest - filter : None or str or regexp, optional - A successful result from ``filter.search(fname)`` means the file is of - interest. None means all files are of interest - - Returns - ------- - path1_missing : list - list of all files missing from `path1` that are in `path0` at the same - relative path. - """ - if not filter is None: - filter = re.compile(filter) - uninstalled = [] - # Walk directory tree to get py files - for dirpath, dirnames, filenames in os.walk(path0): - out_dirpath = dirpath.replace(path0, path1) - for fname in filenames: - if not filter is None and filter.search(fname) is None: - continue - equiv_fname = os.path.join(out_dirpath, fname) - if not os.path.isfile(equiv_fname): - uninstalled.append(pjoin(dirpath, fname)) - return uninstalled - - -def check_files(mod_name, repo_path=None, scripts_sdir='bin'): - """Print library and script files not picked up during install""" - if repo_path is None: - repo_path = abspath(os.getcwd()) - install_path = tempfile.mkdtemp() - repo_mod_path = pjoin(repo_path, mod_name) - installed_mod_path = pjoin(install_path, PY_LIB_SDIR, mod_name) - repo_bin = pjoin(repo_path, 'bin') - installed_bin = pjoin(install_path, 'bin') - try: - zip_fname = make_dist(repo_path, install_path, 'sdist --formats=zip', '*.zip') - pf = get_sdist_finder(mod_name) - install_from_zip(zip_fname, install_path, pf, PY_LIB_SDIR, scripts_sdir) - lib_misses = missing_from(repo_mod_path, installed_mod_path, r'\.py$') - script_misses = missing_from(repo_bin, installed_bin) - finally: - shutil.rmtree(install_path) - if lib_misses: - print('Missed library files: ', ', '.join(lib_misses)) - else: - print('You got all the library files') - if script_misses: - print('Missed script files: ', ', '.join(script_misses)) - else: - print('You got all the script files') - return len(lib_misses) > 0 or len(script_misses) > 0 - - -def get_sdist_finder(mod_name): - """Return function finding sdist source directory for `mod_name`""" - - def pf(pth): - pkg_dirs = glob(pjoin(pth, mod_name + '-*')) - if len(pkg_dirs) != 1: - raise OSError('There must be one and only one package dir') - return pkg_dirs[0] - - return pf - - -def sdist_tests(mod_name, repo_path=None, label='fast', doctests=True): - """Make sdist zip, install from it, and run tests""" - if repo_path is None: - repo_path = abspath(os.getcwd()) - install_path = tempfile.mkdtemp() - try: - zip_fname = make_dist(repo_path, install_path, 'sdist --formats=zip', '*.zip') - pf = get_sdist_finder(mod_name) - install_from_zip(zip_fname, install_path, pf, PY_LIB_SDIR, 'bin') - site_pkgs_path = pjoin(install_path, PY_LIB_SDIR) - script_path = pjoin(install_path, 'bin') - cmd = f"{mod_name}.test(label='{label}', doctests={doctests})" - stdout, stderr = run_mod_cmd(mod_name, site_pkgs_path, cmd, script_path) - finally: - shutil.rmtree(install_path) - print(stdout) - print(stderr) - - -sdist_tests.__test__ = False - - -def bdist_egg_tests(mod_name, repo_path=None, label='fast', doctests=True): - """Make bdist_egg, unzip it, and run tests from result - - We've got a problem here, because the egg does not contain the scripts, and - so, if we are testing the scripts with ``mod.test()``, we won't pick up the - scripts from the repository we are testing. - - So, you might need to add a label to the script tests, and use the `label` - parameter to indicate these should be skipped. As in: - - bdist_egg_tests('nibabel', None, label='not script_test') - """ - if repo_path is None: - repo_path = abspath(os.getcwd()) - install_path = tempfile.mkdtemp() - scripts_path = pjoin(install_path, 'bin') - try: - zip_fname = make_dist(repo_path, install_path, 'bdist_egg', '*.egg') - zip_extract_all(zip_fname, install_path) - cmd = f"{mod_name}.test(label='{label}', doctests={doctests})" - stdout, stderr = run_mod_cmd(mod_name, install_path, cmd, scripts_path) - finally: - shutil.rmtree(install_path) - print(stdout) - print(stderr) - - -bdist_egg_tests.__test__ = False - - -def make_dist(repo_path, out_dir, setup_params, zipglob): - """Create distutils distribution file - - Parameters - ---------- - repo_path : str - path to repository containing code and ``setup.py`` - out_dir : str - path to which to write new distribution file - setup_params: str - parameters to pass to ``setup.py`` to create distribution. - zipglob : str - glob identifying expected output file. - - Returns - ------- - out_fname : str - filename of generated distribution file - - Examples - -------- - Make, return a zipped sdist:: - - make_dist('/path/to/repo', '/tmp/path', 'sdist --formats=zip', '*.zip') - - Make, return a binary egg:: - - make_dist('/path/to/repo', '/tmp/path', 'bdist_egg', '*.egg') - """ - pwd = os.path.abspath(os.getcwd()) - try: - os.chdir(repo_path) - back_tick(f'{PYTHON} setup.py {setup_params} --dist-dir={out_dir}') - zips = glob(pjoin(out_dir, zipglob)) - if len(zips) != 1: - raise OSError( - f'There must be one and only one {zipglob} ' - f"file, but I found \"{': '.join(zips)}\"" - ) - finally: - os.chdir(pwd) - return zips[0] diff --git a/nisext/tests/__init__.py b/nisext/tests/__init__.py deleted file mode 100644 index af7d1d1dd2..0000000000 --- a/nisext/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Tests for nisext package diff --git a/nisext/tests/test_sexts.py b/nisext/tests/test_sexts.py deleted file mode 100644 index f262ec5685..0000000000 --- a/nisext/tests/test_sexts.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Tests for nisexts.sexts module -""" - -import sys -import types - -import pytest - -from ..sexts import package_check - -FAKE_NAME = 'nisext_improbable' -assert FAKE_NAME not in sys.modules -FAKE_MODULE = types.ModuleType('nisext_fake') - - -def test_package_check(): - # Try to use a required package - raise error - with pytest.raises(RuntimeError): - package_check(FAKE_NAME) - # Optional, log.warn - package_check(FAKE_NAME, optional=True) - # Can also pass a string - package_check(FAKE_NAME, optional='some-package') - try: - # Make a package - sys.modules[FAKE_NAME] = FAKE_MODULE - # Now it passes if we don't check the version - package_check(FAKE_NAME) - # A fake version - FAKE_MODULE.__version__ = '0.2' - package_check(FAKE_NAME, version='0.2') - # fails when version not good enough - with pytest.raises(RuntimeError): - package_check(FAKE_NAME, '0.3') - # Unless optional in which case log.warns - package_check(FAKE_NAME, version='0.3', optional=True) - # Might do custom version check - package_check(FAKE_NAME, version='0.2', version_getter=lambda x: '0.2') - finally: - del sys.modules[FAKE_NAME] - - -def test_package_check_setuptools(): - # If setuptools arg not None, missing package just adds it to arg - with pytest.raises(RuntimeError): - package_check(FAKE_NAME, setuptools_args=None) - - def pkg_chk_sta(*args, **kwargs): - st_args = {} - package_check(*args, setuptools_args=st_args, **kwargs) - return st_args - - assert pkg_chk_sta(FAKE_NAME) == {'install_requires': ['nisext_improbable']} - # Check that this gets appended to existing value - old_sta = {'install_requires': ['something']} - package_check(FAKE_NAME, setuptools_args=old_sta) - assert old_sta == {'install_requires': ['something', 'nisext_improbable']} - # That existing value as string gets converted to a list - old_sta = {'install_requires': 'something'} - package_check(FAKE_NAME, setuptools_args=old_sta) - assert old_sta == {'install_requires': ['something', 'nisext_improbable']} - # Optional, add to extras_require - assert pkg_chk_sta(FAKE_NAME, optional='something') == { - 'extras_require': {'something': ['nisext_improbable']} - } - # Check that this gets appended to existing value - old_sta = {'extras_require': {'something': ['amodule']}} - package_check(FAKE_NAME, optional='something', setuptools_args=old_sta) - assert old_sta == {'extras_require': {'something': ['amodule', 'nisext_improbable']}} - # That string gets converted to a list here too - old_sta = {'extras_require': {'something': 'amodule'}} - package_check(FAKE_NAME, optional='something', setuptools_args=old_sta) - assert old_sta == {'extras_require': {'something': ['amodule', 'nisext_improbable']}} - # But optional has to be a string if not empty and setuptools_args defined - with pytest.raises(RuntimeError): - package_check(FAKE_NAME, optional=True, setuptools_args={}) - try: - # Make a package - sys.modules[FAKE_NAME] = FAKE_MODULE - # No install_requires because we already have it - assert pkg_chk_sta(FAKE_NAME) == {} - # A fake version still works - FAKE_MODULE.__version__ = '0.2' - assert pkg_chk_sta(FAKE_NAME, version='0.2') == {} - # goes into install requires when version not good enough - exp_spec = [FAKE_NAME + '>=0.3'] - assert pkg_chk_sta(FAKE_NAME, version='0.3') == {'install_requires': exp_spec} - # Unless optional in which case goes into extras_require - package_check(FAKE_NAME, version='0.2', version_getter=lambda x: '0.2') - assert pkg_chk_sta(FAKE_NAME, version='0.3', optional='afeature') == { - 'extras_require': {'afeature': exp_spec} - } - # Might do custom version check - assert pkg_chk_sta(FAKE_NAME, version='0.2', version_getter=lambda x: '0.2') == {} - # If the version check fails, put into requires - bad_getter = lambda x: x.not_an_attribute - exp_spec = [FAKE_NAME + '>=0.2'] - assert pkg_chk_sta(FAKE_NAME, version='0.2', version_getter=bad_getter) == { - 'install_requires': exp_spec - } - # Likewise for optional dependency - assert pkg_chk_sta( - FAKE_NAME, version='0.2', optional='afeature', version_getter=bad_getter - ) == {'extras_require': {'afeature': [FAKE_NAME + '>=0.2']}} - finally: - del sys.modules[FAKE_NAME] diff --git a/nisext/tests/test_testers.py b/nisext/tests/test_testers.py deleted file mode 100644 index f81a40f1df..0000000000 --- a/nisext/tests/test_testers.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Tests for testers -""" - -import os -from os.path import dirname, pathsep - -import pytest - -from ..testers import PYTHON, back_tick, run_mod_cmd - - -def test_back_tick(): - cmd = f'{PYTHON} -c "print(\'Hello\')"' - assert back_tick(cmd) == 'Hello' - assert back_tick(cmd, ret_err=True) == ('Hello', '') - assert back_tick(cmd, True, False) == (b'Hello', b'') - cmd = f'{PYTHON} -c "raise ValueError()"' - with pytest.raises(RuntimeError): - back_tick(cmd) - - -def test_run_mod_cmd(): - mod = 'os' - mod_dir = dirname(os.__file__) - assert run_mod_cmd(mod, mod_dir, "print('Hello')", None, False) == ('Hello', '') - sout, serr = run_mod_cmd(mod, mod_dir, "print('Hello again')") - assert serr == '' - mod_file, out_str = [s.strip() for s in sout.split('\n')] - assert mod_file.startswith(mod_dir) - assert out_str == 'Hello again' - sout, serr = run_mod_cmd(mod, mod_dir, "print(os.environ['PATH'])", None, False) - assert serr == '' - sout2, serr = run_mod_cmd(mod, mod_dir, "print(os.environ['PATH'])", 'pth2', False) - assert serr == '' - assert sout2 == '"pth2"' + pathsep + sout diff --git a/pyproject.toml b/pyproject.toml index 14095b8f22..e92c465e0d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,7 @@ exclude = [ ] [tool.hatch.build.targets.wheel] -packages = ["nibabel", "nisext"] +packages = ["nibabel"] exclude = [ # 56MB test file does not need to be installed everywhere "nibabel/nicom/tests/data/4d_multiframe_test.dcm", From e3ffb71891c616deebfe28bfe1f45dc67bb361ce Mon Sep 17 00:00:00 2001 From: Serge Koudoro Date: Wed, 17 Jan 2024 14:39:37 -0500 Subject: [PATCH 012/203] allow inhomogeneous array --- nibabel/streamlines/tests/test_tractogram.py | 44 ++++++++++++++------ nibabel/streamlines/tractogram.py | 20 +++++++-- 2 files changed, 48 insertions(+), 16 deletions(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 30294be438..09e3b910be 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -80,6 +80,7 @@ def make_dummy_streamline(nb_points): 'mean_curvature': np.array([1.11], dtype='f4'), 'mean_torsion': np.array([1.22], dtype='f4'), 'mean_colors': np.array([1, 0, 0], dtype='f4'), + 'clusters_labels': np.array([0, 1], dtype='i4'), } elif nb_points == 2: @@ -92,6 +93,7 @@ def make_dummy_streamline(nb_points): 'mean_curvature': np.array([2.11], dtype='f4'), 'mean_torsion': np.array([2.22], dtype='f4'), 'mean_colors': np.array([0, 1, 0], dtype='f4'), + 'clusters_labels': np.array([2, 3, 4], dtype='i4'), } elif nb_points == 5: @@ -104,6 +106,7 @@ def make_dummy_streamline(nb_points): 'mean_curvature': np.array([3.11], dtype='f4'), 'mean_torsion': np.array([3.22], dtype='f4'), 'mean_colors': np.array([0, 0, 1], dtype='f4'), + 'clusters_labels': np.array([5, 6, 7, 8], dtype='i4'), } return streamline, data_per_point, data_for_streamline @@ -119,6 +122,7 @@ def setup_module(): DATA['mean_curvature'] = [] DATA['mean_torsion'] = [] DATA['mean_colors'] = [] + DATA['clusters_labels'] = [] for nb_points in [1, 2, 5]: data = make_dummy_streamline(nb_points) streamline, data_per_point, data_for_streamline = data @@ -128,12 +132,14 @@ def setup_module(): DATA['mean_curvature'].append(data_for_streamline['mean_curvature']) DATA['mean_torsion'].append(data_for_streamline['mean_torsion']) DATA['mean_colors'].append(data_for_streamline['mean_colors']) + DATA['clusters_labels'].append(data_for_streamline['clusters_labels']) DATA['data_per_point'] = {'colors': DATA['colors'], 'fa': DATA['fa']} DATA['data_per_streamline'] = { 'mean_curvature': DATA['mean_curvature'], 'mean_torsion': DATA['mean_torsion'], 'mean_colors': DATA['mean_colors'], + 'clusters_labels': DATA['clusters_labels'], } DATA['empty_tractogram'] = Tractogram(affine_to_rasmm=np.eye(4)) @@ -154,6 +160,7 @@ def setup_module(): 'mean_curvature': lambda: (e for e in DATA['mean_curvature']), 'mean_torsion': lambda: (e for e in DATA['mean_torsion']), 'mean_colors': lambda: (e for e in DATA['mean_colors']), + 'clusters_labels': lambda: (e for e in DATA['clusters_labels']), } DATA['lazy_tractogram'] = LazyTractogram( @@ -214,7 +221,10 @@ def test_per_array_dict_creation(self): data_dict = PerArrayDict(nb_streamlines, data_per_streamline) assert data_dict.keys() == data_per_streamline.keys() for k in data_dict.keys(): - assert_array_equal(data_dict[k], data_per_streamline[k]) + if isinstance(data_dict[k], np.ndarray) and np.all( + data_dict[k].shape[0] == data_dict[k].shape + ): + assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] assert len(data_dict) == len(data_per_streamline) - 1 @@ -224,7 +234,10 @@ def test_per_array_dict_creation(self): data_dict = PerArrayDict(nb_streamlines, data_per_streamline) assert data_dict.keys() == data_per_streamline.keys() for k in data_dict.keys(): - assert_array_equal(data_dict[k], data_per_streamline[k]) + if isinstance(data_dict[k], np.ndarray) and np.all( + data_dict[k].shape[0] == data_dict[k].shape + ): + assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] assert len(data_dict) == len(data_per_streamline) - 1 @@ -234,7 +247,10 @@ def test_per_array_dict_creation(self): data_dict = PerArrayDict(nb_streamlines, **data_per_streamline) assert data_dict.keys() == data_per_streamline.keys() for k in data_dict.keys(): - assert_array_equal(data_dict[k], data_per_streamline[k]) + if isinstance(data_dict[k], np.ndarray) and np.all( + data_dict[k].shape[0] == data_dict[k].shape + ): + assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] assert len(data_dict) == len(data_per_streamline) - 1 @@ -261,6 +277,7 @@ def test_extend(self): 'mean_curvature': 2 * np.array(DATA['mean_curvature']), 'mean_torsion': 3 * np.array(DATA['mean_torsion']), 'mean_colors': 4 * np.array(DATA['mean_colors']), + 'clusters_labels': 5 * np.array(DATA['clusters_labels'], dtype=object), } sdict2 = PerArrayDict(len(DATA['tractogram']), new_data) @@ -284,7 +301,8 @@ def test_extend(self): 'mean_curvature': 2 * np.array(DATA['mean_curvature']), 'mean_torsion': 3 * np.array(DATA['mean_torsion']), 'mean_colors': 4 * np.array(DATA['mean_colors']), - 'other': 5 * np.array(DATA['mean_colors']), + 'clusters_labels': 5 * np.array(DATA['clusters_labels'], dtype=object), + 'other': 6 * np.array(DATA['mean_colors']), } sdict2 = PerArrayDict(len(DATA['tractogram']), new_data) @@ -305,6 +323,7 @@ def test_extend(self): 'mean_curvature': 2 * np.array(DATA['mean_curvature']), 'mean_torsion': 3 * np.array(DATA['mean_torsion']), 'mean_colors': 4 * np.array(DATA['mean_torsion']), + 'clusters_labels': 5 * np.array(DATA['clusters_labels'], dtype=object), } sdict2 = PerArrayDict(len(DATA['tractogram']), new_data) with pytest.raises(ValueError): @@ -441,7 +460,10 @@ def test_lazydict_creation(self): assert is_lazy_dict(data_dict) assert data_dict.keys() == expected_keys for k in data_dict.keys(): - assert_array_equal(list(data_dict[k]), list(DATA['data_per_streamline'][k])) + if isinstance(data_dict[k], np.ndarray) and np.all( + data_dict[k].shape[0] == data_dict[k].shape + ): + assert_array_equal(list(data_dict[k]), list(DATA['data_per_streamline'][k])) assert len(data_dict) == len(DATA['data_per_streamline_func']) @@ -578,6 +600,7 @@ def test_tractogram_add_new_data(self): t.data_per_streamline['mean_curvature'] = DATA['mean_curvature'] t.data_per_streamline['mean_torsion'] = DATA['mean_torsion'] t.data_per_streamline['mean_colors'] = DATA['mean_colors'] + t.data_per_streamline['clusters_labels'] = DATA['clusters_labels'] assert_tractogram_equal(t, DATA['tractogram']) # Retrieve tractogram by their index. @@ -598,6 +621,7 @@ def test_tractogram_add_new_data(self): t.data_per_streamline['mean_curvature'] = DATA['mean_curvature'] t.data_per_streamline['mean_torsion'] = DATA['mean_torsion'] t.data_per_streamline['mean_colors'] = DATA['mean_colors'] + t.data_per_streamline['clusters_labels'] = DATA['clusters_labels'] assert_tractogram_equal(t, DATA['tractogram']) def test_tractogram_copy(self): @@ -647,14 +671,6 @@ def test_creating_invalid_tractogram(self): with pytest.raises(ValueError): Tractogram(streamlines=DATA['streamlines'], data_per_point={'scalars': scalars}) - # Inconsistent dimension for a data_per_streamline. - properties = [[1.11, 1.22], [2.11], [3.11, 3.22]] - - with pytest.raises(ValueError): - Tractogram( - streamlines=DATA['streamlines'], data_per_streamline={'properties': properties} - ) - # Too many dimension for a data_per_streamline. properties = [ np.array([[1.11], [1.22]], dtype='f4'), @@ -870,6 +886,7 @@ def test_lazy_tractogram_from_data_func(self): DATA['mean_curvature'], DATA['mean_torsion'], DATA['mean_colors'], + DATA['clusters_labels'], ] def _data_gen(): @@ -879,6 +896,7 @@ def _data_gen(): 'mean_curvature': d[3], 'mean_torsion': d[4], 'mean_colors': d[5], + 'clusters_labels': d[6], } yield TractogramItem(d[0], data_for_streamline, data_for_points) diff --git a/nibabel/streamlines/tractogram.py b/nibabel/streamlines/tractogram.py index 9e7c0f9af2..5a39b415a6 100644 --- a/nibabel/streamlines/tractogram.py +++ b/nibabel/streamlines/tractogram.py @@ -1,6 +1,7 @@ import copy import numbers -from collections.abc import MutableMapping +import types +from collections.abc import Iterable, MutableMapping from warnings import warn import numpy as np @@ -101,15 +102,28 @@ def __init__(self, n_rows=0, *args, **kwargs): super().__init__(*args, **kwargs) def __setitem__(self, key, value): - value = np.asarray(list(value)) + dtype = np.float64 + + if isinstance(value, types.GeneratorType): + value = list(value) + + if isinstance(value, np.ndarray): + dtype = value.dtype + elif not all(len(v) == len(value[0]) for v in value[1:]): + dtype = object + + value = np.asarray(value, dtype=dtype) if value.ndim == 1 and value.dtype != object: # Reshape without copy value.shape = (len(value), 1) - if value.ndim != 2: + if value.ndim != 2 and value.dtype != object: raise ValueError('data_per_streamline must be a 2D array.') + if value.dtype == object and not all(isinstance(v, Iterable) for v in value): + raise ValueError('data_per_streamline must be a 2D array') + # We make sure there is the right amount of values if 0 < self.n_rows != len(value): msg = f'The number of values ({len(value)}) should match n_elements ({self.n_rows}).' From 6919b539401541fc5935e83610a0579e690ba79f Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 22 Feb 2024 21:35:21 -0500 Subject: [PATCH 013/203] TEST: Accommodate pytest 8 changes --- nibabel/testing/__init__.py | 12 ++++++ nibabel/tests/test_image_api.py | 56 +++++++++------------------ nibabel/tests/test_image_load_save.py | 4 +- nibabel/tests/test_loadsave.py | 26 ++++++------- nibabel/tests/test_onetime.py | 4 +- nibabel/tests/test_orientations.py | 4 +- nibabel/tests/test_spatialimages.py | 12 +++--- 7 files changed, 56 insertions(+), 62 deletions(-) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 5baa5e2b86..21ecadf841 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -233,3 +233,15 @@ def expires(version): return lambda x: x return pytest.mark.xfail(raises=ExpiredDeprecationError) + + +def deprecated_to(version): + """Context manager to expect DeprecationWarnings until a given version""" + from packaging.version import Version + + from nibabel import __version__ as nbver + + if Version(nbver) < Version(version): + return pytest.deprecated_call() + + return nullcontext() diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index f1fc720716..86c04985f8 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -48,6 +48,7 @@ bytesio_filemap, bytesio_round_trip, clear_and_catch_warnings, + deprecated_to, expires, nullcontext, ) @@ -80,10 +81,6 @@ from .test_parrec import EXAMPLE_IMAGES as PARREC_EXAMPLE_IMAGES -def maybe_deprecated(meth_name): - return pytest.deprecated_call() if meth_name == 'get_data' else nullcontext() - - class GenericImageAPI(ValidateAPI): """General image validation API""" @@ -194,7 +191,7 @@ def validate_no_slicing(self, imaker, params): @expires('5.0.0') def validate_get_data_deprecated(self, imaker, params): img = imaker() - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): data = img.get_data() assert_array_equal(np.asanyarray(img.dataobj), data) @@ -246,14 +243,12 @@ def validate_data_interface(self, imaker, params): self._check_array_interface(imaker, meth_name) method = getattr(img, meth_name) # Data shape is same as image shape - with maybe_deprecated(meth_name): - assert img.shape == method().shape + assert img.shape == method().shape # Data ndim is same as image ndim - with maybe_deprecated(meth_name): - assert img.ndim == method().ndim + assert img.ndim == method().ndim # Values to get_data caching parameter must be 'fill' or # 'unchanged' - with maybe_deprecated(meth_name), pytest.raises(ValueError): + with pytest.raises(ValueError): method(caching='something') # dataobj is read only fake_data = np.zeros(img.shape, dtype=img.get_data_dtype()) @@ -277,13 +272,11 @@ def _check_proxy_interface(self, imaker, meth_name): assert not img.in_memory # Load with caching='unchanged' method = getattr(img, meth_name) - with maybe_deprecated(meth_name): - data = method(caching='unchanged') + data = method(caching='unchanged') # Still not cached assert not img.in_memory # Default load, does caching - with maybe_deprecated(meth_name): - data = method() + data = method() # Data now cached. in_memory is True if either of the get_data # or get_fdata caches are not-None assert img.in_memory @@ -295,36 +288,30 @@ def _check_proxy_interface(self, imaker, meth_name): # integers, but lets assume that's not true here. assert_array_equal(proxy_data, data) # Now caching='unchanged' does nothing, returns cached version - with maybe_deprecated(meth_name): - data_again = method(caching='unchanged') + data_again = method(caching='unchanged') assert data is data_again # caching='fill' does nothing because the cache is already full - with maybe_deprecated(meth_name): - data_yet_again = method(caching='fill') + data_yet_again = method(caching='fill') assert data is data_yet_again # changing array data does not change proxy data, or reloaded # data data[:] = 42 assert_array_equal(proxy_data, proxy_copy) assert_array_equal(np.asarray(img.dataobj), proxy_copy) - # It does change the result of get_data - with maybe_deprecated(meth_name): - assert_array_equal(method(), 42) + # It does change the result of get_fdata + assert_array_equal(method(), 42) # until we uncache img.uncache() # Which unsets in_memory assert not img.in_memory - with maybe_deprecated(meth_name): - assert_array_equal(method(), proxy_copy) + assert_array_equal(method(), proxy_copy) # Check caching='fill' does cache data img = imaker() method = getattr(img, meth_name) assert not img.in_memory - with maybe_deprecated(meth_name): - data = method(caching='fill') + data = method(caching='fill') assert img.in_memory - with maybe_deprecated(meth_name): - data_again = method() + data_again = method() assert data is data_again # Check that caching refreshes for new floating point type. img.uncache() @@ -368,8 +355,7 @@ def _check_array_caching(self, imaker, meth_name, caching): get_data_func = method if caching is None else partial(method, caching=caching) assert isinstance(img.dataobj, np.ndarray) assert img.in_memory - with maybe_deprecated(meth_name): - data = get_data_func() + data = get_data_func() # Returned data same object as underlying dataobj if using # old ``get_data`` method, or using newer ``get_fdata`` # method, where original array was float64. @@ -377,8 +363,7 @@ def _check_array_caching(self, imaker, meth_name, caching): dataobj_is_data = arr_dtype == np.float64 or method == img.get_data # Set something to the output array. data[:] = 42 - with maybe_deprecated(meth_name): - get_result_changed = np.all(get_data_func() == 42) + get_result_changed = np.all(get_data_func() == 42) assert get_result_changed == (dataobj_is_data or caching != 'unchanged') if dataobj_is_data: assert data is img.dataobj @@ -387,15 +372,13 @@ def _check_array_caching(self, imaker, meth_name, caching): assert_array_equal(np.asarray(img.dataobj), 42) # Uncache has no effect img.uncache() - with maybe_deprecated(meth_name): - assert_array_equal(get_data_func(), 42) + assert_array_equal(get_data_func(), 42) else: assert not data is img.dataobj assert not np.all(np.asarray(img.dataobj) == 42) # Uncache does have an effect img.uncache() - with maybe_deprecated(meth_name): - assert not np.all(get_data_func() == 42) + assert not np.all(get_data_func() == 42) # in_memory is always true for array images, regardless of # cache state. img.uncache() @@ -408,8 +391,7 @@ def _check_array_caching(self, imaker, meth_name, caching): if arr_dtype not in float_types: return for float_type in float_types: - with maybe_deprecated(meth_name): - data = get_data_func(dtype=float_type) + data = get_data_func(dtype=float_type) assert (data is img.dataobj) == (arr_dtype == float_type) def validate_shape(self, imaker, params): diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 962a2433bf..706a87f10f 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -40,7 +40,7 @@ from .. import spm99analyze as spm99 from ..optpkg import optional_package from ..spatialimages import SpatialImage -from ..testing import expires +from ..testing import deprecated_to, expires from ..tmpdirs import InTemporaryDirectory from ..volumeutils import native_code, swapped_code @@ -285,7 +285,7 @@ def test_filename_save(): @expires('5.0.0') def test_guessed_image_type(): # Test whether we can guess the image type from example files - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert nils.guessed_image_type(pjoin(DATA_PATH, 'example4d.nii.gz')) == Nifti1Image assert nils.guessed_image_type(pjoin(DATA_PATH, 'nifti1.hdr')) == Nifti1Pair assert nils.guessed_image_type(pjoin(DATA_PATH, 'example_nifti2.nii.gz')) == Nifti2Image diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 4071b09f72..401ed04535 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -21,7 +21,7 @@ from ..loadsave import _signature_matches_extension, load, read_img_data from ..openers import Opener from ..optpkg import optional_package -from ..testing import expires +from ..testing import deprecated_to, expires from ..tmpdirs import InTemporaryDirectory _, have_scipy, _ = optional_package('scipy') @@ -50,14 +50,14 @@ def test_read_img_data(): fpath = pathlib.Path(fpath) img = load(fpath) data = img.get_fdata() - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): data2 = read_img_data(img) assert_array_equal(data, data2) # These examples have null scaling - assert prefer=unscaled is the same dao = img.dataobj if hasattr(dao, 'slope') and hasattr(img.header, 'raw_data_from_fileobj'): assert (dao.slope, dao.inter) == (1, 0) - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert_array_equal(read_img_data(img, prefer='unscaled'), data) # Assert all caps filename works as well with TemporaryDirectory() as tmpdir: @@ -140,21 +140,21 @@ def test_read_img_data_nifti(): img = img_class(data, np.eye(4)) img.set_data_dtype(out_dtype) # No filemap => error - with pytest.deprecated_call(), pytest.raises(ImageFileError): + with deprecated_to('5.0.0'), pytest.raises(ImageFileError): read_img_data(img) # Make a filemap froot = f'an_image_{i}' img.file_map = img.filespec_to_file_map(froot) # Trying to read from this filemap will generate an error because # we are going to read from files that do not exist - with pytest.deprecated_call(), pytest.raises(OSError): + with deprecated_to('5.0.0'), pytest.raises(OSError): read_img_data(img) img.to_file_map() # Load - now the scaling and offset correctly applied img_fname = img.file_map['image'].filename img_back = load(img_fname) data_back = img_back.get_fdata() - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert_array_equal(data_back, read_img_data(img_back)) # This is the same as if we loaded the image and header separately hdr_fname = img.file_map['header'].filename if 'header' in img.file_map else img_fname @@ -166,16 +166,16 @@ def test_read_img_data_nifti(): # Unscaled is the same as returned from raw_data_from_fileobj with open(img_fname, 'rb') as fobj: unscaled_back = hdr_back.raw_data_from_fileobj(fobj) - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert_array_equal(unscaled_back, read_img_data(img_back, prefer='unscaled')) # If we futz with the scaling in the header, the result changes - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert_array_equal(data_back, read_img_data(img_back)) has_inter = hdr_back.has_data_intercept old_slope = hdr_back['scl_slope'] old_inter = hdr_back['scl_inter'] if has_inter else 0 est_unscaled = (data_back - old_inter) / old_slope - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): actual_unscaled = read_img_data(img_back, prefer='unscaled') assert_almost_equal(est_unscaled, actual_unscaled) img_back.header['scl_slope'] = 2.1 @@ -185,10 +185,10 @@ def test_read_img_data_nifti(): else: new_inter = 0 # scaled scaling comes from new parameters in header - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert np.allclose(actual_unscaled * 2.1 + new_inter, read_img_data(img_back)) # Unscaled array didn't change - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert_array_equal(actual_unscaled, read_img_data(img_back, prefer='unscaled')) # Check the offset too img.header.set_data_offset(1024) @@ -200,14 +200,14 @@ def test_read_img_data_nifti(): fobj.write(b'\x00\x00') img_back = load(img_fname) data_back = img_back.get_fdata() - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert_array_equal(data_back, read_img_data(img_back)) img_back.header.set_data_offset(1026) # Check we pick up new offset exp_offset = np.zeros((data.size,), data.dtype) + old_inter exp_offset[:-1] = np.ravel(data_back, order='F')[1:] exp_offset = np.reshape(exp_offset, shape, order='F') - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert_array_equal(exp_offset, read_img_data(img_back)) # Delete stuff that might hold onto file references del img, img_back, data_back diff --git a/nibabel/tests/test_onetime.py b/nibabel/tests/test_onetime.py index 426702fa43..b22a4ef3ec 100644 --- a/nibabel/tests/test_onetime.py +++ b/nibabel/tests/test_onetime.py @@ -1,12 +1,12 @@ import pytest from nibabel.onetime import auto_attr, setattr_on_read -from nibabel.testing import expires +from nibabel.testing import deprecated_to, expires @expires('5.0.0') def test_setattr_on_read(): - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): class MagicProp: @setattr_on_read diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 8821fac0e0..0094711e79 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -26,7 +26,7 @@ ornt2axcodes, ornt_transform, ) -from ..testing import expires +from ..testing import deprecated_to, expires IN_ARRS = [ np.eye(4), @@ -407,6 +407,6 @@ def test_inv_ornt_aff(): def test_flip_axis_deprecation(): a = np.arange(24).reshape((2, 3, 4)) axis = 1 - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): a_flipped = flip_axis(a, axis) assert_array_equal(a_flipped, np.flip(a, axis)) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 5cad23a22f..7157d5c459 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -18,7 +18,7 @@ from .. import load as top_load from ..imageclasses import spatial_axes_first from ..spatialimages import HeaderDataError, SpatialHeader, SpatialImage -from ..testing import bytesio_round_trip, expires, memmap_after_ufunc +from ..testing import bytesio_round_trip, deprecated_to, expires, memmap_after_ufunc from ..tmpdirs import InTemporaryDirectory @@ -368,7 +368,7 @@ def test_get_data(self): in_data = in_data_template.copy() img = img_klass(in_data, None) assert in_data is img.dataobj - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): out_data = img.get_data() assert in_data is out_data # and that uncache has no effect @@ -381,18 +381,18 @@ def test_get_data(self): rt_img = bytesio_round_trip(img) assert in_data is not rt_img.dataobj assert (rt_img.dataobj == in_data).all() - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): out_data = rt_img.get_data() assert (out_data == in_data).all() assert rt_img.dataobj is not out_data # cache - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert rt_img.get_data() is out_data out_data[:] = 42 rt_img.uncache() - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert rt_img.get_data() is not out_data - with pytest.deprecated_call(): + with deprecated_to('5.0.0'): assert (rt_img.get_data() == in_data).all() def test_slicer(self): From 511ca0b4e53e1b51c5dc24c6226739862183f559 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 22 Feb 2024 21:37:41 -0500 Subject: [PATCH 014/203] TYP: Update ignore comments --- nibabel/_compression.py | 2 +- nibabel/benchmarks/bench_arrayproxy_slicing.py | 2 +- nibabel/cmdline/dicomfs.py | 2 +- nibabel/minc2.py | 2 +- nibabel/spm99analyze.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/nibabel/_compression.py b/nibabel/_compression.py index 75a5e3bbf4..b7cfc8f49f 100644 --- a/nibabel/_compression.py +++ b/nibabel/_compression.py @@ -17,7 +17,7 @@ from .optpkg import optional_package if ty.TYPE_CHECKING: # pragma: no cover - import indexed_gzip # type: ignore[import-not-found] + import indexed_gzip # type: ignore[import] import pyzstd HAVE_INDEXED_GZIP = True diff --git a/nibabel/benchmarks/bench_arrayproxy_slicing.py b/nibabel/benchmarks/bench_arrayproxy_slicing.py index dc9acfdedd..305c5215e4 100644 --- a/nibabel/benchmarks/bench_arrayproxy_slicing.py +++ b/nibabel/benchmarks/bench_arrayproxy_slicing.py @@ -26,7 +26,7 @@ # if memory_profiler is installed, we get memory usage results try: - from memory_profiler import memory_usage # type: ignore[import-not-found] + from memory_profiler import memory_usage # type: ignore[import] except ImportError: memory_usage = None diff --git a/nibabel/cmdline/dicomfs.py b/nibabel/cmdline/dicomfs.py index dec4011c51..66ffb8adea 100644 --- a/nibabel/cmdline/dicomfs.py +++ b/nibabel/cmdline/dicomfs.py @@ -25,7 +25,7 @@ class dummy_fuse: try: - import fuse # type: ignore[import-not-found] + import fuse # type: ignore[import] uid = os.getuid() gid = os.getgid() diff --git a/nibabel/minc2.py b/nibabel/minc2.py index 94e1be76e2..912b5d28ae 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -163,7 +163,7 @@ class Minc2Image(Minc1Image): def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): # Import of h5py might take awhile for MPI-enabled builds # So we are importing it here "on demand" - import h5py # type: ignore[import-not-found] + import h5py # type: ignore[import] holder = file_map['image'] if holder.filename is None: diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index c859d702f4..3465c57190 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -275,7 +275,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): contents = matf.read() if len(contents) == 0: return ret - import scipy.io as sio # type: ignore[import-not-found] + import scipy.io as sio # type: ignore[import] mats = sio.loadmat(BytesIO(contents)) if 'mat' in mats: # this overrides a 'M', and includes any flip From cff293645aa71361882ac4e300a124790d5d6f19 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 22 Feb 2024 22:26:03 -0500 Subject: [PATCH 015/203] TEST: Prepare tests to fail at 6.0 --- nibabel/gifti/tests/test_gifti.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index a2f8395cae..5cc2756c60 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -14,7 +14,7 @@ from ... import load from ...fileholders import FileHolder from ...nifti1 import data_type_codes -from ...testing import get_test_data +from ...testing import deprecated_to, expires, get_test_data from .. import ( GiftiCoordSystem, GiftiDataArray, @@ -275,27 +275,29 @@ def test_labeltable(): assert len(img.labeltable.labels) == 2 +@expires('6.0.0') def test_metadata(): md = GiftiMetaData(key='value') # Old initialization methods - with pytest.warns(DeprecationWarning) as w: + with deprecated_to('6.0.0'): nvpair = GiftiNVPairs('key', 'value') with pytest.warns(FutureWarning) as w: md2 = GiftiMetaData(nvpair=nvpair) assert len(w) == 1 - with pytest.warns(DeprecationWarning) as w: + with deprecated_to('6.0.0'): md3 = GiftiMetaData.from_dict({'key': 'value'}) assert md == md2 == md3 == {'key': 'value'} # .data as a list of NVPairs is going away - with pytest.warns(DeprecationWarning) as w: + with deprecated_to('6.0.0'): assert md.data[0].name == 'key' + with deprecated_to('6.0.0'): assert md.data[0].value == 'value' - assert len(w) == 2 +@expires('6.0.0') def test_metadata_list_interface(): md = GiftiMetaData(key='value') - with pytest.warns(DeprecationWarning): + with deprecated_to('6.0.0'): mdlist = md.data assert len(mdlist) == 1 assert mdlist[0].name == 'key' @@ -312,7 +314,7 @@ def test_metadata_list_interface(): assert md['foo'] == 'bar' # Append new NVPair - with pytest.warns(DeprecationWarning) as w: + with deprecated_to('6.0.0'): nvpair = GiftiNVPairs('key', 'value') mdlist.append(nvpair) assert len(mdlist) == 2 From 4b65364e6f255ab5a574c532a1b751265a8b48b1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 8 Feb 2024 08:58:57 -0500 Subject: [PATCH 016/203] DATA: Add dcm_qa_xa30 as submodule for test data --- .gitmodules | 3 +++ nibabel-data/dcm_qa_xa30 | 1 + 2 files changed, 4 insertions(+) create mode 160000 nibabel-data/dcm_qa_xa30 diff --git a/.gitmodules b/.gitmodules index cdcef650f1..20e97c2ebb 100644 --- a/.gitmodules +++ b/.gitmodules @@ -19,3 +19,6 @@ [submodule "nibabel-data/nitest-dicom"] path = nibabel-data/nitest-dicom url = https://github.com/effigies/nitest-dicom +[submodule "nibabel-data/dcm_qa_xa30"] + path = nibabel-data/dcm_qa_xa30 + url = https://github.com/neurolabusc/dcm_qa_xa30.git diff --git a/nibabel-data/dcm_qa_xa30 b/nibabel-data/dcm_qa_xa30 new file mode 160000 index 0000000000..89b2509218 --- /dev/null +++ b/nibabel-data/dcm_qa_xa30 @@ -0,0 +1 @@ +Subproject commit 89b2509218a6dd021c5d40ddaf2a017ac1bacafc From bc227ec4658f9f28e54e6861694ca14e97b229c1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 22 Feb 2024 16:49:56 -0500 Subject: [PATCH 017/203] TEST: Add test for Siemens TRACE volume --- nibabel/nicom/tests/test_dicomwrappers.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 083357537e..5c29349362 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -35,6 +35,11 @@ DATA_FILE_EMPTY_ST = pjoin(IO_DATA_PATH, 'slicethickness_empty_string.dcm') DATA_FILE_4D_DERIVED = pjoin(get_nibabel_data(), 'nitest-dicom', '4d_multiframe_with_derived.dcm') DATA_FILE_CT = pjoin(get_nibabel_data(), 'nitest-dicom', 'siemens_ct_header_csa.dcm') +DATA_FILE_SIEMENS_TRACE = pjoin( + get_nibabel_data(), + 'dcm_qa_xa30', + 'In/20_DWI_dir80_AP/0001_1.3.12.2.1107.5.2.43.67093.2022071112140611403312307.dcm', +) # This affine from our converted image was shown to match our image spatially # with an image from SPM DICOM conversion. We checked the matching with SPM @@ -656,6 +661,13 @@ def test_data_derived_shape(self): with pytest.warns(UserWarning, match='Derived images found and removed'): assert dw.image_shape == (96, 96, 60, 33) + @dicom_test + @needs_nibabel_data('dcm_qa_xa30') + def test_data_trace(self): + # Test that a standalone trace volume is found and not dropped + dw = didw.wrapper_from_file(DATA_FILE_SIEMENS_TRACE) + assert dw.image_shape == (72, 72, 39, 1) + @dicom_test @needs_nibabel_data('nitest-dicom') def test_data_unreadable_private_headers(self): From 3f81a96b61106d218da51c0453de23c4e6669bf6 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 22 Feb 2024 16:50:05 -0500 Subject: [PATCH 018/203] FIX: Conditionally drop isotropic frames --- nibabel/nicom/dicomwrappers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 42d4b1413f..5ff4f33052 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -509,11 +509,14 @@ def image_shape(self): if hasattr(first_frame, 'get') and first_frame.get([0x18, 0x9117]): # DWI image may include derived isotropic, ADC or trace volume try: - self.frames = pydicom.Sequence( + anisotropic = pydicom.Sequence( frame for frame in self.frames if frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC' ) + # Image contains DWI volumes followed by derived images; remove derived images + if len(anisotropic) != 0: + self.frames = anisotropic except IndexError: # Sequence tag is found but missing items! raise WrapperError('Diffusion file missing information') From 79792de0bff76d0a98781c3910b31d6cda6f21d0 Mon Sep 17 00:00:00 2001 From: manifest-rules Date: Fri, 23 Feb 2024 09:57:36 +0000 Subject: [PATCH 019/203] TEST: Unit test for loading ASCII-encoded "flat" GIFTI data array. Currently failing --- nibabel/gifti/tests/data/ascii_flat_data.gii | 76 ++++++++++++++++++++ nibabel/gifti/tests/test_parse_gifti_fast.py | 15 +++- 2 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 nibabel/gifti/tests/data/ascii_flat_data.gii diff --git a/nibabel/gifti/tests/data/ascii_flat_data.gii b/nibabel/gifti/tests/data/ascii_flat_data.gii new file mode 100644 index 0000000000..26a73fba02 --- /dev/null +++ b/nibabel/gifti/tests/data/ascii_flat_data.gii @@ -0,0 +1,76 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 + + 155.17539978 135.58103943 98.30715179 140.33973694 190.0491333 73.24776459 157.3598938 196.97969055 83.65809631 171.46174622 137.43661499 78.4709549 148.54592896 97.06752777 65.96373749 123.45701599 111.46841431 66.3571167 135.30892944 202.28720093 36.38148499 178.28155518 162.59469604 37.75128937 178.11087036 115.28820038 57.17986679 142.81582642 82.82115173 31.02205276 + + + + + + + + + + + + + 6402 17923 25602 14085 25602 17923 25602 14085 4483 17923 1602 14085 4483 25603 25602 25604 25602 25603 25602 25604 6402 25603 3525 25604 1123 17922 12168 25604 12168 17922 + + diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index f08bdd1b17..49f2729f37 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -39,9 +39,10 @@ DATA_FILE5 = pjoin(IO_DATA_PATH, 'base64bin.gii') DATA_FILE6 = pjoin(IO_DATA_PATH, 'rh.aparc.annot.gii') DATA_FILE7 = pjoin(IO_DATA_PATH, 'external.gii') +DATA_FILE8 = pjoin(IO_DATA_PATH, 'ascii_flat_data.gii') -datafiles = [DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6, DATA_FILE7] -numDA = [2, 1, 1, 1, 2, 1, 2] +datafiles = [DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6, DATA_FILE7, DATA_FILE8] +numDA = [2, 1, 1, 1, 2, 1, 2, 2] DATA_FILE1_darr1 = np.array( [ @@ -152,6 +153,10 @@ dtype=np.int32, ) +DATA_FILE8_darr1 = np.copy(DATA_FILE5_darr1) + +DATA_FILE8_darr2 = np.copy(DATA_FILE5_darr2) + def assert_default_types(loaded): default = loaded.__class__() @@ -448,3 +453,9 @@ def test_load_compressed(): img7 = load(fn) assert_array_almost_equal(img7.darrays[0].data, DATA_FILE7_darr1) assert_array_almost_equal(img7.darrays[1].data, DATA_FILE7_darr2) + + +def test_load_flat_ascii_data(): + img = load(DATA_FILE8) + assert_array_almost_equal(img.darrays[0].data, DATA_FILE8_darr1) + assert_array_almost_equal(img.darrays[1].data, DATA_FILE8_darr2) From 6ffeeacc158c51111691e91fbb2fbbc303f42cd8 Mon Sep 17 00:00:00 2001 From: manifest-rules Date: Fri, 23 Feb 2024 10:08:14 +0000 Subject: [PATCH 020/203] RF: Make sure that ASCII-encoded DataArrays are returned with expected shape --- nibabel/gifti/parse_gifti_fast.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nibabel/gifti/parse_gifti_fast.py b/nibabel/gifti/parse_gifti_fast.py index 7d8eacb825..af01dd544b 100644 --- a/nibabel/gifti/parse_gifti_fast.py +++ b/nibabel/gifti/parse_gifti_fast.py @@ -74,6 +74,10 @@ def read_data_block(darray, fname, data, mmap): # GIFTI_ENCODING_ASCII c = StringIO(data) da = np.loadtxt(c, dtype=dtype) + # Reshape to dims specified in GiftiDataArray attributes, but preserve + # existing behaviour of loading as 1D for arrays with a dimension of + # length 1 + da = da.reshape(darray.dims).squeeze() return da # independent of the endianness elif enclabel not in ('B64BIN', 'B64GZ', 'External'): return 0 From b46c82946d6bd88b73164904834567b12aadf935 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 23 Feb 2024 10:05:56 -0500 Subject: [PATCH 021/203] RF: Consistently apply data type, shape and index order in GIFTI data blocks --- nibabel/gifti/parse_gifti_fast.py | 70 +++++++++++++------------------ 1 file changed, 29 insertions(+), 41 deletions(-) diff --git a/nibabel/gifti/parse_gifti_fast.py b/nibabel/gifti/parse_gifti_fast.py index af01dd544b..ccd608324a 100644 --- a/nibabel/gifti/parse_gifti_fast.py +++ b/nibabel/gifti/parse_gifti_fast.py @@ -68,21 +68,21 @@ def read_data_block(darray, fname, data, mmap): if mmap is True: mmap = 'c' enclabel = gifti_encoding_codes.label[darray.encoding] - dtype = data_type_codes.type[darray.datatype] + if enclabel not in ('ASCII', 'B64BIN', 'B64GZ', 'External'): + raise GiftiParseError(f'Unknown encoding {darray.encoding}') + + # Encode the endianness in the dtype + byteorder = gifti_endian_codes.byteorder[darray.endian] + dtype = data_type_codes.dtype[darray.datatype].newbyteorder(byteorder) + + shape = tuple(darray.dims) + order = array_index_order_codes.npcode[darray.ind_ord] + + # GIFTI_ENCODING_ASCII if enclabel == 'ASCII': - # GIFTI_ENCODING_ASCII - c = StringIO(data) - da = np.loadtxt(c, dtype=dtype) - # Reshape to dims specified in GiftiDataArray attributes, but preserve - # existing behaviour of loading as 1D for arrays with a dimension of - # length 1 - da = da.reshape(darray.dims).squeeze() - return da # independent of the endianness - elif enclabel not in ('B64BIN', 'B64GZ', 'External'): - return 0 - - # GIFTI_ENCODING_EXTBIN + return np.loadtxt(StringIO(data), dtype=dtype, ndmin=1).reshape(shape, order=order) + # We assume that the external data file is raw uncompressed binary, with # the data type/endianness/ordering specified by the other DataArray # attributes @@ -98,12 +98,13 @@ def read_data_block(darray, fname, data, mmap): newarr = None if mmap: try: - newarr = np.memmap( + return np.memmap( ext_fname, dtype=dtype, mode=mmap, offset=darray.ext_offset, - shape=tuple(darray.dims), + shape=shape, + order=order, ) # If the memmap fails, we ignore the error and load the data into # memory below @@ -111,13 +112,12 @@ def read_data_block(darray, fname, data, mmap): pass # mmap=False or np.memmap failed if newarr is None: - # We can replace this with a call to np.fromfile in numpy>=1.17, - # as an "offset" parameter was added in that version. - with open(ext_fname, 'rb') as f: - f.seek(darray.ext_offset) - nbytes = np.prod(darray.dims) * dtype().itemsize - buff = f.read(nbytes) - newarr = np.frombuffer(buff, dtype=dtype) + return np.fromfile( + ext_fname, + dtype=dtype, + count=np.prod(darray.dims), + offset=darray.ext_offset, + ).reshape(shape, order=order) # Numpy arrays created from bytes objects are read-only. # Neither b64decode nor decompress will return bytearrays, and there @@ -125,26 +125,14 @@ def read_data_block(darray, fname, data, mmap): # there is not a simple way to avoid making copies. # If this becomes a problem, we should write a decoding interface with # a tunable chunk size. + dec = base64.b64decode(data.encode('ascii')) + if enclabel == 'B64BIN': + buff = bytearray(dec) else: - dec = base64.b64decode(data.encode('ascii')) - if enclabel == 'B64BIN': - # GIFTI_ENCODING_B64BIN - buff = bytearray(dec) - else: - # GIFTI_ENCODING_B64GZ - buff = bytearray(zlib.decompress(dec)) - del dec - newarr = np.frombuffer(buff, dtype=dtype) - - sh = tuple(darray.dims) - if len(newarr.shape) != len(sh): - newarr = newarr.reshape(sh, order=array_index_order_codes.npcode[darray.ind_ord]) - - # check if we need to byteswap - required_byteorder = gifti_endian_codes.byteorder[darray.endian] - if required_byteorder in ('big', 'little') and required_byteorder != sys.byteorder: - newarr = newarr.byteswap() - return newarr + # GIFTI_ENCODING_B64GZ + buff = bytearray(zlib.decompress(dec)) + del dec + return np.frombuffer(buff, dtype=dtype).reshape(shape, order=order) def _str2int(in_str): From afbcc88d2c3ff83df3acadbff4741a790d2d5647 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 23 Feb 2024 10:08:22 -0500 Subject: [PATCH 022/203] TEST: Expect data arrays to be the advertised shapes --- nibabel/gifti/gifti.py | 2 +- nibabel/gifti/tests/test_parse_gifti_fast.py | 13 +++++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 76bad4677a..7aba877309 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -745,7 +745,7 @@ def agg_data(self, intent_code=None): >>> triangles_2 = surf_img.agg_data('triangle') >>> triangles_3 = surf_img.agg_data(1009) # Numeric code for pointset >>> print(np.array2string(triangles)) - [0 1 2] + [[0 1 2]] >>> np.array_equal(triangles, triangles_2) True >>> np.array_equal(triangles, triangles_3) diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index 49f2729f37..f972425679 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -41,7 +41,16 @@ DATA_FILE7 = pjoin(IO_DATA_PATH, 'external.gii') DATA_FILE8 = pjoin(IO_DATA_PATH, 'ascii_flat_data.gii') -datafiles = [DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6, DATA_FILE7, DATA_FILE8] +datafiles = [ + DATA_FILE1, + DATA_FILE2, + DATA_FILE3, + DATA_FILE4, + DATA_FILE5, + DATA_FILE6, + DATA_FILE7, + DATA_FILE8, +] numDA = [2, 1, 1, 1, 2, 1, 2, 2] DATA_FILE1_darr1 = np.array( @@ -51,7 +60,7 @@ [-17.614349, -65.401642, 21.071466], ] ) -DATA_FILE1_darr2 = np.array([0, 1, 2]) +DATA_FILE1_darr2 = np.array([[0, 1, 2]]) DATA_FILE2_darr1 = np.array( [ From 8cc8f05e98f2be2e7cf2b6c68636c97e47099aff Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 23 Feb 2024 10:29:43 -0500 Subject: [PATCH 023/203] CI: Configure dependabot to update official actions in bulk --- .github/dependabot.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..6c9e83fcbf --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + groups: + actions-infrastructure: + patterns: + - "actions/*" From d641e44347caad6f52751b3d4f933cd11e8350d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Feb 2024 15:30:27 +0000 Subject: [PATCH 024/203] Build(deps): Bump the actions-infrastructure group with 3 updates Bumps the actions-infrastructure group with 3 updates: [actions/setup-python](https://github.com/actions/setup-python), [actions/upload-artifact](https://github.com/actions/upload-artifact) and [actions/download-artifact](https://github.com/actions/download-artifact). Updates `actions/setup-python` from 4 to 5 - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) Updates `actions/upload-artifact` from 3 to 4 - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v3...v4) Updates `actions/download-artifact` from 3 to 4 - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions-infrastructure - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions-infrastructure - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions-infrastructure ... Signed-off-by: dependabot[bot] --- .github/workflows/test.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fc9afdc218..ac78e7c9cd 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -44,7 +44,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: 3 - run: pip install --upgrade build twine @@ -54,12 +54,12 @@ jobs: - name: Build git archive run: mkdir archive && git archive -v -o archive/nibabel-archive.tgz HEAD - name: Upload sdist and wheel artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: dist path: dist/ - name: Upload git archive artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: archive path: archive/ @@ -73,17 +73,17 @@ jobs: steps: - name: Download sdist and wheel artifacts if: matrix.package != 'archive' - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: dist path: dist/ - name: Download git archive artifact if: matrix.package == 'archive' - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: archive path: archive/ - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: 3 - name: Display Python version @@ -147,7 +147,7 @@ jobs: submodules: recursive fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} @@ -167,7 +167,7 @@ jobs: with: files: cov.xml - name: Upload pytest test results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }} path: test-results.xml @@ -183,7 +183,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3 - name: Display Python version @@ -204,7 +204,7 @@ jobs: id-token: write if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: dist path: dist/ From d752aeb0160951527cef73d67123b16287aea5e0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Feb 2024 15:30:31 +0000 Subject: [PATCH 025/203] Build(deps): Bump codecov/codecov-action from 3 to 4 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v3...v4) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fc9afdc218..93ad63e177 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -162,7 +162,7 @@ jobs: run: tox c - name: Run tox run: tox -v --exit-and-dump-after 1200 - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 if: ${{ always() }} with: files: cov.xml From 6471a889dd9817ea671feacde882c77f20ecb895 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 23 Feb 2024 10:32:25 -0500 Subject: [PATCH 026/203] Update .github/workflows/test.yml --- .github/workflows/test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 93ad63e177..520bd3d8a0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -166,6 +166,7 @@ jobs: if: ${{ always() }} with: files: cov.xml + token: ${{ secrets.CODECOV_TOKEN }} - name: Upload pytest test results uses: actions/upload-artifact@v3 with: From 42dea7a10842c03f4a1a70191b2091f2d7eee9f6 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 23 Feb 2024 10:53:02 -0500 Subject: [PATCH 027/203] Update .github/workflows/test.yml --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ac78e7c9cd..d9d644b871 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -169,7 +169,7 @@ jobs: - name: Upload pytest test results uses: actions/upload-artifact@v4 with: - name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }} + name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.dependencies }}-${{ matrix.architecture }} path: test-results.xml if: ${{ always() }} From 75692191fc7763feea35ee2c439a04d42d357f9b Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Tue, 27 Feb 2024 09:19:58 -0500 Subject: [PATCH 028/203] Make "Calculated shape" more "correct" (do show shape) and informative --- nibabel/nicom/dicomwrappers.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 5ff4f33052..7e8f7201a8 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -565,8 +565,11 @@ def image_shape(self): ns_unique = [len(np.unique(row)) for row in self._frame_indices.T] shape = (rows, cols) + tuple(ns_unique) n_vols = np.prod(shape[3:]) - if n_frames != n_vols * shape[2]: - raise WrapperError('Calculated shape does not match number of frames.') + n_frames_calc = n_vols * shape[2] + if n_frames != n_frames_calc: + raise WrapperError( + f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) of shape {shape} does not ' + f'match NumberOfFrames {n_frames}.') return tuple(shape) @one_time From d063b95a83bc2fba49d083a96235e60b3a0035c1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 28 Feb 2024 09:40:14 -0500 Subject: [PATCH 029/203] STY: blue/flake8 --- nibabel/nicom/dicomwrappers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 7e8f7201a8..a5ea550d87 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -568,8 +568,9 @@ def image_shape(self): n_frames_calc = n_vols * shape[2] if n_frames != n_frames_calc: raise WrapperError( - f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) of shape {shape} does not ' - f'match NumberOfFrames {n_frames}.') + f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) ' + f'of shape {shape} does not match NumberOfFrames {n_frames}.' + ) return tuple(shape) @one_time From e4facc17fbebeb92fa6fed600b9a349c6e373ee3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 4 Mar 2024 09:10:12 -0500 Subject: [PATCH 030/203] PIN: Temporarily pin pytest<8.1, pending scientific-python/pytest-doctestplus#239 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e92c465e0d..3cd81f93e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,7 +67,7 @@ doc = [ "tomli; python_version < '3.11'", ] test = [ - "pytest", + "pytest<8.1", # relax once pytest-doctestplus releases 1.2.0 "pytest-doctestplus", "pytest-cov", "pytest-httpserver", From 2bad8cce331976af3e8b42cecaed76bb075ee8b3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 5 Mar 2024 08:16:05 -0500 Subject: [PATCH 031/203] FIX: Use np.asarray instead of np.array(..., copy=False) --- nibabel/affines.py | 2 +- nibabel/casting.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/affines.py b/nibabel/affines.py index 05fdd7bb58..1478fd2dca 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -365,7 +365,7 @@ def rescale_affine(affine, shape, zooms, new_shape=None): A new affine transform with the specified voxel sizes """ - shape = np.array(shape, copy=False) + shape = np.asarray(shape) new_shape = np.array(new_shape if new_shape is not None else shape) s = voxel_sizes(affine) diff --git a/nibabel/casting.py b/nibabel/casting.py index f3e04f30f4..09015135f2 100644 --- a/nibabel/casting.py +++ b/nibabel/casting.py @@ -611,7 +611,7 @@ def int_abs(arr): >>> int_abs(np.array([-128, 127], dtype=np.float32)) array([128., 127.], dtype=float32) """ - arr = np.array(arr, copy=False) + arr = np.asarray(arr) dt = arr.dtype if dt.kind == 'u': return arr From 1d984adf83f41f328324af8eb917fec68e6dfbd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Mon, 11 Mar 2024 20:10:35 -0400 Subject: [PATCH 032/203] DOC: Update affiliation of jhlegarreta Update affiliation of jhlegarreta. --- .zenodo.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.zenodo.json b/.zenodo.json index 6cadd84a7a..a30467ebe0 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -270,7 +270,7 @@ "orcid": "0000-0003-1076-5122" }, { - "affiliation": "Universit\u00e9 de Sherbrooke", + "affiliation": "Brigham and Women's Hospital, Mass General Brigham/Harvard Medical School", "name": "Legarreta, Jon Haitz", "orcid": "0000-0002-9661-1396" }, From f23ca14310724897fb24f8061eeee2dc382cf2cc Mon Sep 17 00:00:00 2001 From: Joshua Newton Date: Fri, 22 Mar 2024 17:48:06 -0400 Subject: [PATCH 033/203] `casting.py`: Filter WSL1 + np.longdouble warning This commit filters the following warning: > UserWarning: Signature b'\x00\xd0\xcc\xcc\xcc\xcc\xcc\xcc\xfb\xbf\x00\x00\x00\x00\x00\x00' for > does not match any known type: falling back to type probe function. > This warnings [sic] indicates broken support for the dtype! > machar = _get_machar(dtype) To ensure that this warning is only filtered on WSL1, we try to detect WSL by checking for a WSL-specific string from the uname, which appears to be endorsed by WSL devs. (https://github.com/microsoft/WSL/issues/4555#issuecomment-700315063) I also tried checking the `WSL_INTEROP` and `WSL_DISTRO_NAME` environment variables as suggested in the above linked issues, but I preferred reusing the `platform` module that was already imported inside `casting.py`. There is perhaps a more thorough approach where we collect all raised warnings, test the collected warnings, etc. but I didn't want to overcomplicate things. --- nibabel/casting.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/nibabel/casting.py b/nibabel/casting.py index 09015135f2..ec86089576 100644 --- a/nibabel/casting.py +++ b/nibabel/casting.py @@ -6,7 +6,7 @@ from __future__ import annotations import warnings -from platform import machine, processor +from platform import machine, processor, uname import numpy as np @@ -274,7 +274,15 @@ def type_info(np_type): nexp=None, width=width, ) - info = np.finfo(dt) + # Mitigate warning from WSL1 when checking `np.longdouble` (#1309) + # src for '-Microsoft': https://github.com/microsoft/WSL/issues/4555#issuecomment-536862561 + with warnings.catch_warnings(): + if uname().release.endswith('-Microsoft'): + warnings.filterwarnings( + action='ignore', category=UserWarning, message='Signature.*numpy.longdouble' + ) + info = np.finfo(dt) + # Trust the standard IEEE types nmant, nexp = info.nmant, info.nexp ret = dict( From 50dd737089d46adc1bd5c0e7f97d137c10cb1166 Mon Sep 17 00:00:00 2001 From: Joshua Newton Date: Sat, 23 Mar 2024 13:41:02 -0400 Subject: [PATCH 034/203] `casting.py`: Remove `uname` check for WSL1 --- nibabel/casting.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/nibabel/casting.py b/nibabel/casting.py index ec86089576..77da57e406 100644 --- a/nibabel/casting.py +++ b/nibabel/casting.py @@ -6,7 +6,7 @@ from __future__ import annotations import warnings -from platform import machine, processor, uname +from platform import machine, processor import numpy as np @@ -275,12 +275,10 @@ def type_info(np_type): width=width, ) # Mitigate warning from WSL1 when checking `np.longdouble` (#1309) - # src for '-Microsoft': https://github.com/microsoft/WSL/issues/4555#issuecomment-536862561 with warnings.catch_warnings(): - if uname().release.endswith('-Microsoft'): - warnings.filterwarnings( - action='ignore', category=UserWarning, message='Signature.*numpy.longdouble' - ) + warnings.filterwarnings( + action='ignore', category=UserWarning, message='Signature.*numpy.longdouble' + ) info = np.finfo(dt) # Trust the standard IEEE types From 2978ee8ee45cf8c935b91a5a2e3268406f8f24f6 Mon Sep 17 00:00:00 2001 From: Joshua Newton Date: Sat, 23 Mar 2024 18:58:47 -0400 Subject: [PATCH 035/203] `.zenodo.json`: Add Joshua Newton --- .zenodo.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.zenodo.json b/.zenodo.json index a30467ebe0..553aba0548 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -391,6 +391,11 @@ }, { "name": "freec84" + }, + { + "affiliation": "Polytechnique Montréal, Montréal, CA", + "name": "Newton, Joshua", + "orcid": "0009-0005-6963-3812" } ], "keywords": [ From 733c0f36af71808185245617a156b3e7b4bd26a2 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 26 Dec 2023 14:00:58 +0100 Subject: [PATCH 036/203] =?UTF-8?q?MNT:=20blue/isort/flake8=20=E2=86=92=20?= =?UTF-8?q?ruff?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .pre-commit-config.yaml | 19 +++++++------------ pyproject.toml | 39 +++++++++++++++++++++++++-------------- tox.ini | 17 ++++++----------- 3 files changed, 38 insertions(+), 37 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2b620a6de3..ef2d891fbd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,19 +12,14 @@ repos: - id: check-case-conflict - id: check-merge-conflict - id: check-vcs-permalinks - - repo: https://github.com/grantjenks/blue - rev: v0.9.1 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.9 hooks: - - id: blue - - repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - - id: isort - - repo: https://github.com/pycqa/flake8 - rev: 6.1.0 - hooks: - - id: flake8 - exclude: "^(doc|nisext|tools)/" + - id: ruff + args: [--fix, --show-fix, --exit-non-zero-on-fix] + exclude: = ["doc", "tools"] + - id: ruff-format + exclude: = ["doc", "tools"] - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.5.1 hooks: diff --git a/pyproject.toml b/pyproject.toml index 3cd81f93e5..515c35850b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,21 +109,32 @@ __version__ = version = {version!r} __version_tuple__ = version_tuple = {version_tuple!r} ''' -[tool.blue] -line_length = 99 -target-version = ["py38"] -force-exclude = """ -( - _version.py - | nibabel/externals/ - | versioneer.py -) -""" +[tool.ruff] +line-length = 99 +exclude = ["doc", "nibabel/externals", "tools", "version.py", "versioneer.py"] -[tool.isort] -profile = "black" -line_length = 99 -extend_skip = ["_version.py", "externals"] +[tool.ruff.lint] +select = ["F", "I", "Q"] +ignore = [ + # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + "W191", + "E111", + "E114", + "E117", + "D206", + "D300", + "Q000", + "Q001", + "Q002", + "Q003", + "COM812", + "COM819", + "ISC001", + "ISC002", +] + +[tool.ruff.format] +quote-style = "single" [tool.mypy] python_version = "3.11" diff --git a/tox.ini b/tox.ini index cc2b263cb1..4e9b220ce8 100644 --- a/tox.ini +++ b/tox.ini @@ -139,26 +139,21 @@ commands = description = Check our style guide labels = check deps = - flake8 - blue - # Broken extras, remove when fix is released - isort[colors]!=5.13.1 + ruff>=0.1.9 skip_install = true commands = - blue --check --diff --color nibabel - isort --check --diff --color nibabel - flake8 nibabel + ruff --diff nibabel + ruff format --diff nibabel [testenv:style-fix] description = Auto-apply style guide to the extent possible labels = pre-release deps = - blue - isort + ruff skip_install = true commands = - blue nibabel - isort nibabel + ruff --fix nibabel + ruff format nibabel [testenv:spellcheck] description = Check spelling From 39429f9708ede298088c1a9206fca83ef2b73b49 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 26 Dec 2023 15:17:14 +0100 Subject: [PATCH 037/203] MNT: run `ruff --fix` and `ruf format` Also fix remaining issues manually. --- doc/source/conf.py | 4 ++-- doc/tools/apigen.py | 1 - doc/tools/build_modref_templates.py | 2 +- nibabel/__init__.py | 6 +++--- nibabel/benchmarks/bench_arrayproxy_slicing.py | 3 --- nibabel/cifti2/__init__.py | 1 + nibabel/cifti2/tests/test_cifti2.py | 2 +- nibabel/cifti2/tests/test_cifti2io_header.py | 3 +-- nibabel/cmdline/diff.py | 3 --- nibabel/cmdline/tests/test_convert.py | 2 +- nibabel/cmdline/tests/test_roi.py | 3 +-- nibabel/cmdline/tests/test_stats.py | 3 --- nibabel/cmdline/tests/test_utils.py | 14 ++++++++++++-- nibabel/conftest.py | 2 +- nibabel/dft.py | 4 ++-- nibabel/ecat.py | 1 - nibabel/freesurfer/__init__.py | 2 ++ nibabel/freesurfer/tests/test_mghformat.py | 1 + nibabel/gifti/__init__.py | 2 ++ nibabel/gifti/tests/test_gifti.py | 13 ++++++------- nibabel/gifti/tests/test_parse_gifti_fast.py | 4 ++-- nibabel/info.py | 2 +- nibabel/nicom/tests/test_ascconv.py | 1 - nibabel/nicom/tests/test_csareader.py | 1 - nibabel/nicom/tests/test_dicomwrappers.py | 2 +- nibabel/openers.py | 2 +- nibabel/streamlines/__init__.py | 2 ++ nibabel/streamlines/tck.py | 1 - nibabel/streamlines/tests/test_array_sequence.py | 3 +-- nibabel/streamlines/tests/test_streamlines.py | 1 - nibabel/streamlines/tests/test_tck.py | 3 +-- nibabel/streamlines/tests/test_tractogram.py | 2 -- nibabel/streamlines/tests/test_tractogram_file.py | 1 - nibabel/streamlines/tests/test_trk.py | 2 +- nibabel/streamlines/trk.py | 5 +---- nibabel/testing/__init__.py | 3 +++ nibabel/tests/nibabel_data.py | 3 +-- nibabel/tests/scriptrunner.py | 3 +-- nibabel/tests/test_affines.py | 2 +- nibabel/tests/test_arraywriters.py | 6 +++--- nibabel/tests/test_brikhead.py | 2 +- nibabel/tests/test_data.py | 2 +- nibabel/tests/test_ecat.py | 1 - nibabel/tests/test_ecat_data.py | 2 +- nibabel/tests/test_floating.py | 3 --- nibabel/tests/test_funcs.py | 1 - nibabel/tests/test_image_load_save.py | 3 +-- nibabel/tests/test_image_types.py | 1 - nibabel/tests/test_imageclasses.py | 4 +--- nibabel/tests/test_init.py | 1 + nibabel/tests/test_minc1.py | 6 +----- nibabel/tests/test_minc2.py | 2 +- nibabel/tests/test_nibabel_data.py | 3 +-- nibabel/tests/test_nifti1.py | 1 - nibabel/tests/test_nifti2.py | 2 +- nibabel/tests/test_openers.py | 1 - nibabel/tests/test_orientations.py | 2 -- nibabel/tests/test_parrec.py | 5 ++--- nibabel/tests/test_parrec_data.py | 4 +--- nibabel/tests/test_pkg_info.py | 2 +- nibabel/tests/test_pointset.py | 3 --- nibabel/tests/test_quaternions.py | 7 ------- nibabel/tests/test_removalschedule.py | 1 - nibabel/tests/test_scripts.py | 5 ++--- nibabel/tests/test_spatialimages.py | 3 +-- nibabel/tests/test_testing.py | 4 ++-- nibabel/tests/test_wrapstruct.py | 9 --------- tools/make_tarball.py | 2 +- tools/markdown_release_notes.py | 2 +- tools/mpkg_wrapper.py | 2 +- 70 files changed, 79 insertions(+), 128 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 175c6340bd..e8999b7d2b 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -30,11 +30,11 @@ # Check for external Sphinx extensions we depend on try: - import numpydoc + import numpydoc # noqa: F401 except ImportError: raise RuntimeError('Need to install "numpydoc" package for doc build') try: - import texext + import texext # noqa: F401 except ImportError: raise RuntimeError('Need to install "texext" package for doc build') diff --git a/doc/tools/apigen.py b/doc/tools/apigen.py index 3167362643..a1279a3e98 100644 --- a/doc/tools/apigen.py +++ b/doc/tools/apigen.py @@ -405,7 +405,6 @@ def discover_modules(self): def write_modules_api(self, modules, outdir): # upper-level modules - main_module = modules[0].split('.')[0] ulms = [ '.'.join(m.split('.')[:2]) if m.count('.') >= 1 else m.split('.')[0] for m in modules ] diff --git a/doc/tools/build_modref_templates.py b/doc/tools/build_modref_templates.py index 11eae99741..0e82cf6bf8 100755 --- a/doc/tools/build_modref_templates.py +++ b/doc/tools/build_modref_templates.py @@ -38,7 +38,7 @@ def abort(error): try: __import__(package) - except ImportError as e: + except ImportError: abort('Can not import ' + package) module = sys.modules[package] diff --git a/nibabel/__init__.py b/nibabel/__init__.py index db427435ae..1cb7abf53f 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -7,6 +7,8 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# ruff: noqa: F401 + import os from .info import long_description as __doc__ @@ -39,12 +41,10 @@ # module imports from . import analyze as ana -from . import ecat, imagestats, mriutils +from . import ecat, imagestats, mriutils, orientations, streamlines, viewers from . import nifti1 as ni1 -from . import orientations from . import spm2analyze as spm2 from . import spm99analyze as spm99 -from . import streamlines, viewers # isort: split diff --git a/nibabel/benchmarks/bench_arrayproxy_slicing.py b/nibabel/benchmarks/bench_arrayproxy_slicing.py index 305c5215e4..3444cb8d8f 100644 --- a/nibabel/benchmarks/bench_arrayproxy_slicing.py +++ b/nibabel/benchmarks/bench_arrayproxy_slicing.py @@ -56,7 +56,6 @@ def bench_arrayproxy_slicing(): - print_git_title('\nArrayProxy gzip slicing') # each test is a tuple containing @@ -100,7 +99,6 @@ def fmt_sliceobj(sliceobj): return f"[{', '.join(slcstr)}]" with InTemporaryDirectory(): - print(f'Generating test data... ({int(round(np.prod(SHAPE) * 4 / 1048576.0))} MB)') data = np.array(np.random.random(SHAPE), dtype=np.float32) @@ -128,7 +126,6 @@ def fmt_sliceobj(sliceobj): seeds = [np.random.randint(0, 2**32) for s in SLICEOBJS] for ti, test in enumerate(tests): - label = get_test_label(test) have_igzip, keep_open, sliceobj = test seed = seeds[SLICEOBJS.index(sliceobj)] diff --git a/nibabel/cifti2/__init__.py b/nibabel/cifti2/__init__.py index 9c6805f818..4a5cad7675 100644 --- a/nibabel/cifti2/__init__.py +++ b/nibabel/cifti2/__init__.py @@ -6,6 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# ruff: noqa: F401 """CIFTI-2 format IO .. currentmodule:: nibabel.cifti2 diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index bf287b8e03..d7fd0a0eda 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -7,7 +7,7 @@ import pytest from nibabel import cifti2 as ci -from nibabel.cifti2.cifti2 import Cifti2HeaderError, _float_01, _value_if_klass +from nibabel.cifti2.cifti2 import _float_01, _value_if_klass from nibabel.nifti2 import Nifti2Header from nibabel.tests.test_dataobj_images import TestDataobjAPI as _TDA from nibabel.tests.test_image_api import DtypeOverrideMixin, SerializeMixin diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 8d393686dd..92078a26d7 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -import io from os.path import dirname from os.path import join as pjoin @@ -38,7 +37,7 @@ def test_space_separated_affine(): - img = ci.Cifti2Image.from_filename(pjoin(NIBABEL_TEST_DATA, 'row_major.dconn.nii')) + _ = ci.Cifti2Image.from_filename(pjoin(NIBABEL_TEST_DATA, 'row_major.dconn.nii')) def test_read_nifti2(): diff --git a/nibabel/cmdline/diff.py b/nibabel/cmdline/diff.py index b409c7205d..d20a105e76 100755 --- a/nibabel/cmdline/diff.py +++ b/nibabel/cmdline/diff.py @@ -231,7 +231,6 @@ def get_data_diff(files, max_abs=0, max_rel=0, dtype=np.float64): diffs1 = [None] * (i + 1) for j, d2 in enumerate(data[i + 1 :], i + 1): - if d1.shape == d2.shape: abs_diff = np.abs(d1 - d2) mean_abs = (np.abs(d1) + np.abs(d2)) * 0.5 @@ -255,7 +254,6 @@ def get_data_diff(files, max_abs=0, max_rel=0, dtype=np.float64): max_rel_diff = 0 if np.any(candidates): - diff_rec = OrderedDict() # so that abs goes before relative diff_rec['abs'] = max_abs_diff.astype(dtype) @@ -268,7 +266,6 @@ def get_data_diff(files, max_abs=0, max_rel=0, dtype=np.float64): diffs1.append({'CMP': 'incompat'}) if any(diffs1): - diffs['DATA(diff %d:)' % (i + 1)] = diffs1 return diffs diff --git a/nibabel/cmdline/tests/test_convert.py b/nibabel/cmdline/tests/test_convert.py index 4605bc810d..021e6ea8ef 100644 --- a/nibabel/cmdline/tests/test_convert.py +++ b/nibabel/cmdline/tests/test_convert.py @@ -119,7 +119,7 @@ def test_convert_imgtype(tmp_path, ext, img_class): def test_convert_nifti_int_fail(tmp_path): infile = get_test_data(fname='anatomical.nii') - outfile = tmp_path / f'output.nii' + outfile = tmp_path / 'output.nii' orig = nib.load(infile) assert not outfile.exists() diff --git a/nibabel/cmdline/tests/test_roi.py b/nibabel/cmdline/tests/test_roi.py index ea3852b4da..d2baa80eeb 100644 --- a/nibabel/cmdline/tests/test_roi.py +++ b/nibabel/cmdline/tests/test_roi.py @@ -1,5 +1,4 @@ import os -import unittest from unittest import mock import numpy as np @@ -140,7 +139,7 @@ def test_entrypoint(capsys): # Check that we handle missing args as expected with mock.patch('sys.argv', ['nib-roi', '--help']): try: - retval = main() + main() except SystemExit: pass else: diff --git a/nibabel/cmdline/tests/test_stats.py b/nibabel/cmdline/tests/test_stats.py index 576a408bce..905114e31b 100644 --- a/nibabel/cmdline/tests/test_stats.py +++ b/nibabel/cmdline/tests/test_stats.py @@ -8,9 +8,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -import sys -from io import StringIO - import numpy as np from nibabel import Nifti1Image diff --git a/nibabel/cmdline/tests/test_utils.py b/nibabel/cmdline/tests/test_utils.py index 8143d648d9..0efb5ee0b9 100644 --- a/nibabel/cmdline/tests/test_utils.py +++ b/nibabel/cmdline/tests/test_utils.py @@ -12,8 +12,18 @@ import pytest import nibabel as nib -from nibabel.cmdline.diff import * -from nibabel.cmdline.utils import * +from nibabel.cmdline.diff import ( + display_diff, + get_data_diff, + get_data_hash_diff, + get_headers_diff, + main, +) +from nibabel.cmdline.utils import ( + ap, + safe_get, + table2string, +) from nibabel.testing import data_path diff --git a/nibabel/conftest.py b/nibabel/conftest.py index 5eba256fa5..a4f8b6de90 100644 --- a/nibabel/conftest.py +++ b/nibabel/conftest.py @@ -5,7 +5,7 @@ # Ignore warning requesting help with nicom with pytest.warns(UserWarning): - import nibabel.nicom + import nibabel.nicom # noqa :401 @pytest.fixture(scope='session', autouse=True) diff --git a/nibabel/dft.py b/nibabel/dft.py index ee34595b3f..aeb8accbb5 100644 --- a/nibabel/dft.py +++ b/nibabel/dft.py @@ -161,7 +161,7 @@ def as_nifti(self): data = numpy.ndarray( (len(self.storage_instances), self.rows, self.columns), dtype=numpy.int16 ) - for (i, si) in enumerate(self.storage_instances): + for i, si in enumerate(self.storage_instances): if i + 1 != si.instance_number: raise InstanceStackError(self, i, si) logger.info('reading %d/%d' % (i + 1, len(self.storage_instances))) @@ -243,7 +243,7 @@ def dicom(self): def _get_subdirs(base_dir, files_dict=None, followlinks=False): dirs = [] - for (dirpath, dirnames, filenames) in os.walk(base_dir, followlinks=followlinks): + for dirpath, dirnames, filenames in os.walk(base_dir, followlinks=followlinks): abs_dir = os.path.realpath(dirpath) if abs_dir in dirs: raise CachingError(f'link cycle detected under {base_dir}') diff --git a/nibabel/ecat.py b/nibabel/ecat.py index 1db902d10a..85de9184b5 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -513,7 +513,6 @@ def read_subheaders(fileobj, mlist, endianness): class EcatSubHeader: - _subhdrdtype = subhdr_dtype _data_type_codes = data_type_codes diff --git a/nibabel/freesurfer/__init__.py b/nibabel/freesurfer/__init__.py index 806d19a272..48922285c9 100644 --- a/nibabel/freesurfer/__init__.py +++ b/nibabel/freesurfer/__init__.py @@ -1,6 +1,8 @@ """Reading functions for freesurfer files """ +# ruff: noqa: F401 + from .io import ( read_annot, read_geometry, diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 189f1a9dd7..d69587811b 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -460,6 +460,7 @@ def test_as_byteswapped(self): for endianness in (None,) + LITTLE_CODES: with pytest.raises(ValueError): hdr.as_byteswapped(endianness) + # Note that contents is not rechecked on swap / copy class DC(self.header_class): def check_fix(self, *args, **kwargs): diff --git a/nibabel/gifti/__init__.py b/nibabel/gifti/__init__.py index f54a1d2e54..d2a1e2da65 100644 --- a/nibabel/gifti/__init__.py +++ b/nibabel/gifti/__init__.py @@ -16,6 +16,8 @@ gifti """ +# ruff: noqa: F401 + from .gifti import ( GiftiCoordSystem, GiftiDataArray, diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 5cc2756c60..7e4c223971 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -2,12 +2,11 @@ """ import itertools import sys -import warnings from io import BytesIO import numpy as np import pytest -from numpy.testing import assert_array_almost_equal, assert_array_equal +from numpy.testing import assert_array_equal from nibabel.tmpdirs import InTemporaryDirectory @@ -329,7 +328,7 @@ def test_metadata_list_interface(): assert len(md) == 0 # Extension adds multiple keys - with pytest.warns(DeprecationWarning) as w: + with pytest.warns(DeprecationWarning) as _: foobar = GiftiNVPairs('foo', 'bar') mdlist.extend([nvpair, foobar]) assert len(mdlist) == 2 @@ -337,7 +336,7 @@ def test_metadata_list_interface(): assert md == {'key': 'value', 'foo': 'bar'} # Insertion updates list order, though we don't attempt to preserve it in the dict - with pytest.warns(DeprecationWarning) as w: + with pytest.warns(DeprecationWarning) as _: lastone = GiftiNVPairs('last', 'one') mdlist.insert(1, lastone) assert len(mdlist) == 3 @@ -360,14 +359,14 @@ def test_metadata_list_interface(): mypair.value = 'strings' assert 'completelynew' not in md assert md == {'foo': 'bar', 'last': 'one'} - # Check popping from the end (lastone inserted before foobar) - lastpair = mdlist.pop() + # Check popping from the end (last one inserted before foobar) + _ = mdlist.pop() assert len(mdlist) == 1 assert len(md) == 1 assert md == {'last': 'one'} # And let's remove an old pair with a new object - with pytest.warns(DeprecationWarning) as w: + with pytest.warns(DeprecationWarning) as _: lastoneagain = GiftiNVPairs('last', 'one') mdlist.remove(lastoneagain) assert len(mdlist) == 0 diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index f972425679..17258fbd30 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -447,13 +447,13 @@ def test_external_file_failure_cases(): shutil.copy(DATA_FILE7, '.') filename = pjoin(tmpdir, basename(DATA_FILE7)) with pytest.raises(GiftiParseError): - img = load(filename) + _ = load(filename) # load from in-memory xml string (parser requires it as bytes) with open(DATA_FILE7, 'rb') as f: xmldata = f.read() parser = GiftiImageParser() with pytest.raises(GiftiParseError): - img = parser.parse(xmldata) + _ = parser.parse(xmldata) def test_load_compressed(): diff --git a/nibabel/info.py b/nibabel/info.py index a608932fa8..d7873de211 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -108,4 +108,4 @@ .. _Digital Object Identifier: https://en.wikipedia.org/wiki/Digital_object_identifier .. _zenodo: https://zenodo.org -""" # noqa: E501 +""" # noqa: E501 diff --git a/nibabel/nicom/tests/test_ascconv.py b/nibabel/nicom/tests/test_ascconv.py index cd27bc3192..cf40298c56 100644 --- a/nibabel/nicom/tests/test_ascconv.py +++ b/nibabel/nicom/tests/test_ascconv.py @@ -5,7 +5,6 @@ from os.path import dirname from os.path import join as pjoin -import numpy as np from numpy.testing import assert_array_almost_equal, assert_array_equal from .. import ascconv diff --git a/nibabel/nicom/tests/test_csareader.py b/nibabel/nicom/tests/test_csareader.py index 0fc559c7fc..ddb46a942a 100644 --- a/nibabel/nicom/tests/test_csareader.py +++ b/nibabel/nicom/tests/test_csareader.py @@ -1,7 +1,6 @@ """Testing Siemens CSA header reader """ import gzip -import sys from copy import deepcopy from os.path import join as pjoin diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 5c29349362..fa2dfc07c6 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -631,7 +631,7 @@ def test_image_position(self): def test_affine(self): # Make sure we find orientation/position/spacing info dw = didw.wrapper_from_file(DATA_FILE_4D) - aff = dw.affine + _ = dw.affine @dicom_test @pytest.mark.xfail(reason='Not packaged in install', raises=FileNotFoundError) diff --git a/nibabel/openers.py b/nibabel/openers.py index 90c7774d12..d69412fb85 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -86,7 +86,6 @@ def _gzip_open( mtime: int = 0, keep_open: bool = False, ) -> gzip.GzipFile: - if not HAVE_INDEXED_GZIP or mode != 'rb': gzip_file = DeterministicGzipFile(filename, mode, compresslevel, mtime=mtime) @@ -129,6 +128,7 @@ class Opener: passed to opening method when `fileish` is str. Change of defaults as for \*args """ + gz_def = (_gzip_open, ('mode', 'compresslevel', 'mtime', 'keep_open')) bz2_def = (BZ2File, ('mode', 'buffering', 'compresslevel')) zstd_def = (_zstd_open, ('mode', 'level_or_option', 'zstd_dict')) diff --git a/nibabel/streamlines/__init__.py b/nibabel/streamlines/__init__.py index f99f80e4e4..f3cbd2da59 100644 --- a/nibabel/streamlines/__init__.py +++ b/nibabel/streamlines/__init__.py @@ -1,5 +1,7 @@ """Multiformat-capable streamline format read / write interface """ +# ruff: noqa: F401 + import os import warnings diff --git a/nibabel/streamlines/tck.py b/nibabel/streamlines/tck.py index 43df2f87e0..358c579362 100644 --- a/nibabel/streamlines/tck.py +++ b/nibabel/streamlines/tck.py @@ -309,7 +309,6 @@ def _read_header(cls, fileobj): offset_data = 0 with Opener(fileobj) as f: - # Record start position start_position = f.tell() diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 0c8557fe50..a06b2c45d9 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -1,6 +1,5 @@ import itertools import os -import sys import tempfile import unittest @@ -220,7 +219,7 @@ def test_arraysequence_extend(self): seq.extend(data) # Extend after extracting some slice - working_slice = seq[:2] + _ = seq[:2] seq.extend(ArraySequence(new_data)) def test_arraysequence_getitem(self): diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index 300397b2b4..f0bd9c7c49 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -1,5 +1,4 @@ import os -import tempfile import unittest import warnings from io import BytesIO diff --git a/nibabel/streamlines/tests/test_tck.py b/nibabel/streamlines/tests/test_tck.py index 3df7dd4f2d..6b4c163ed6 100644 --- a/nibabel/streamlines/tests/test_tck.py +++ b/nibabel/streamlines/tests/test_tck.py @@ -8,7 +8,6 @@ from numpy.testing import assert_array_equal from ...testing import data_path, error_warnings -from .. import tck as tck_module from ..array_sequence import ArraySequence from ..tck import TckFile from ..tractogram import Tractogram @@ -138,7 +137,7 @@ def test_load_file_with_wrong_information(self): # Simulate a TCK file with no `file` field. new_tck_file = tck_file.replace(b'\nfile: . 67', b'') - with pytest.warns(HeaderWarning, match="Missing 'file'") as w: + with pytest.warns(HeaderWarning, match="Missing 'file'") as _: tck = TckFile.load(BytesIO(new_tck_file)) assert_array_equal(tck.header['file'], '. 56') diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 09e3b910be..9159688548 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -1,6 +1,5 @@ import copy import operator -import sys import unittest import warnings from collections import defaultdict @@ -172,7 +171,6 @@ def setup_module(): def check_tractogram_item(tractogram_item, streamline, data_for_streamline={}, data_for_points={}): - assert_array_equal(tractogram_item.streamline, streamline) assert len(tractogram_item.data_for_streamline) == len(data_for_streamline) diff --git a/nibabel/streamlines/tests/test_tractogram_file.py b/nibabel/streamlines/tests/test_tractogram_file.py index 53a7fb662b..71e2326ecf 100644 --- a/nibabel/streamlines/tests/test_tractogram_file.py +++ b/nibabel/streamlines/tests/test_tractogram_file.py @@ -8,7 +8,6 @@ def test_subclassing_tractogram_file(): - # Missing 'save' method class DummyTractogramFile(TractogramFile): @classmethod diff --git a/nibabel/streamlines/tests/test_trk.py b/nibabel/streamlines/tests/test_trk.py index b8ff43620b..749bf3ed30 100644 --- a/nibabel/streamlines/tests/test_trk.py +++ b/nibabel/streamlines/tests/test_trk.py @@ -149,7 +149,7 @@ def test_load_file_with_wrong_information(self): # Simulate a TRK where `vox_to_ras` is invalid. trk_struct, trk_bytes = self.trk_with_bytes() trk_struct[Field.VOXEL_TO_RASMM] = np.diag([0, 0, 0, 1]) - with clear_and_catch_warnings(record=True, modules=[trk_module]) as w: + with clear_and_catch_warnings(record=True, modules=[trk_module]) as _: with pytest.raises(HeaderError): TrkFile.load(BytesIO(trk_bytes)) diff --git a/nibabel/streamlines/trk.py b/nibabel/streamlines/trk.py index 966b133d1f..0b11f5684e 100644 --- a/nibabel/streamlines/trk.py +++ b/nibabel/streamlines/trk.py @@ -366,7 +366,6 @@ def _read(): tractogram = LazyTractogram.from_data_func(_read) else: - # Speed up loading by guessing a suitable buffer size. with Opener(fileobj) as f: old_file_position = f.tell() @@ -773,6 +772,4 @@ def __str__(self): swap_yz: {swap_yz} swap_zx: {swap_zx} n_count: {NB_STREAMLINES} -hdr_size: {hdr_size}""".format( - **vars - ) +hdr_size: {hdr_size}""".format(**vars) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 21ecadf841..a3e98e064b 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -7,6 +7,9 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Utilities for testing""" + +# ruff: noqa: F401 + from __future__ import annotations import os diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 8d4652d79f..1f89c9c1a1 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -3,9 +3,8 @@ import unittest from os import environ, listdir -from os.path import dirname, exists, isdir +from os.path import dirname, exists, isdir, realpath from os.path import join as pjoin -from os.path import realpath def get_nibabel_data(): diff --git a/nibabel/tests/scriptrunner.py b/nibabel/tests/scriptrunner.py index 1ec2fcb486..1e8b1fdda2 100644 --- a/nibabel/tests/scriptrunner.py +++ b/nibabel/tests/scriptrunner.py @@ -14,9 +14,8 @@ """ import os import sys -from os.path import dirname, isdir, isfile +from os.path import dirname, isdir, isfile, pathsep, realpath from os.path import join as pjoin -from os.path import pathsep, realpath from subprocess import PIPE, Popen MY_PACKAGE = __package__ diff --git a/nibabel/tests/test_affines.py b/nibabel/tests/test_affines.py index 28f405e566..1d7ef1e6bf 100644 --- a/nibabel/tests/test_affines.py +++ b/nibabel/tests/test_affines.py @@ -225,7 +225,7 @@ def test_rescale_affine(): orig_shape = rng.randint(low=20, high=512, size=(3,)) orig_aff = np.eye(4) orig_aff[:3, :] = rng.normal(size=(3, 4)) - orig_zooms = voxel_sizes(orig_aff) + orig_zooms = voxel_sizes(orig_aff) # noqa: F841 orig_axcodes = aff2axcodes(orig_aff) orig_centroid = apply_affine(orig_aff, (orig_shape - 1) // 2) diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index 89e7ac6755..2fc9c32358 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -276,7 +276,7 @@ def test_slope_inter_castable(): for out_dtt in NUMERIC_TYPES: for klass in (ArrayWriter, SlopeArrayWriter, SlopeInterArrayWriter): arr = np.zeros((5,), dtype=in_dtt) - aw = klass(arr, out_dtt) # no error + _ = klass(arr, out_dtt) # no error # Test special case of none finite # This raises error for ArrayWriter, but not for the others arr = np.array([np.inf, np.nan, -np.inf]) @@ -285,8 +285,8 @@ def test_slope_inter_castable(): in_arr = arr.astype(in_dtt) with pytest.raises(WriterError): ArrayWriter(in_arr, out_dtt) - aw = SlopeArrayWriter(arr.astype(in_dtt), out_dtt) # no error - aw = SlopeInterArrayWriter(arr.astype(in_dtt), out_dtt) # no error + _ = SlopeArrayWriter(arr.astype(in_dtt), out_dtt) # no error + _ = SlopeInterArrayWriter(arr.astype(in_dtt), out_dtt) # no error for in_dtt, out_dtt, arr, slope_only, slope_inter, neither in ( (np.float32, np.float32, 1, True, True, True), (np.float64, np.float32, 1, True, True, True), diff --git a/nibabel/tests/test_brikhead.py b/nibabel/tests/test_brikhead.py index 5bf6e79cb9..31e0d0d47c 100644 --- a/nibabel/tests/test_brikhead.py +++ b/nibabel/tests/test_brikhead.py @@ -13,7 +13,7 @@ import pytest from numpy.testing import assert_array_equal -from .. import Nifti1Image, brikhead, load +from .. import Nifti1Image, brikhead from ..testing import assert_data_similar, data_path from .test_fileslice import slicer_samples diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index abcb3caaf2..3ccb4963ca 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -22,7 +22,7 @@ get_data_path, make_datasource, ) -from .test_environment import DATA_KEY, USER_KEY, with_environment +from .test_environment import DATA_KEY, USER_KEY, with_environment # noqa: F401 @pytest.fixture diff --git a/nibabel/tests/test_ecat.py b/nibabel/tests/test_ecat.py index 6a076cbc38..702913e14d 100644 --- a/nibabel/tests/test_ecat.py +++ b/nibabel/tests/test_ecat.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## import os -import warnings from pathlib import Path from unittest import TestCase diff --git a/nibabel/tests/test_ecat_data.py b/nibabel/tests/test_ecat_data.py index b7dbe4750a..23485ae92b 100644 --- a/nibabel/tests/test_ecat_data.py +++ b/nibabel/tests/test_ecat_data.py @@ -13,7 +13,7 @@ from os.path import join as pjoin import numpy as np -from numpy.testing import assert_almost_equal, assert_array_equal +from numpy.testing import assert_almost_equal from ..ecat import load from .nibabel_data import get_nibabel_data, needs_nibabel_data diff --git a/nibabel/tests/test_floating.py b/nibabel/tests/test_floating.py index 3e6e7f426b..c2ccd44039 100644 --- a/nibabel/tests/test_floating.py +++ b/nibabel/tests/test_floating.py @@ -1,10 +1,8 @@ """Test floating point deconstructions and floor methods """ import sys -from contextlib import nullcontext import numpy as np -import pytest from packaging.version import Version from ..casting import ( @@ -13,7 +11,6 @@ _check_nmant, ceil_exact, floor_exact, - floor_log2, have_binary128, longdouble_precision_improved, ok_floats, diff --git a/nibabel/tests/test_funcs.py b/nibabel/tests/test_funcs.py index 10f6e90813..5e59bc63b6 100644 --- a/nibabel/tests/test_funcs.py +++ b/nibabel/tests/test_funcs.py @@ -58,7 +58,6 @@ def test_concat(): # Loop over every possible axis, including None (explicit and implied) for axis in list(range(-(dim - 2), (dim - 1))) + [None, '__default__']: - # Allow testing default vs. passing explicit param if axis == '__default__': np_concat_kwargs = dict(axis=-1) diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 706a87f10f..4e787f0d71 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -24,7 +24,6 @@ MGHImage, Minc1Image, Minc2Image, - Nifti1Header, Nifti1Image, Nifti1Pair, Nifti2Image, @@ -131,7 +130,7 @@ def test_save_load(): affine[:3, 3] = [3, 2, 1] img = ni1.Nifti1Image(data, affine) img.set_data_dtype(npt) - with InTemporaryDirectory() as pth: + with InTemporaryDirectory() as _: nifn = 'an_image.nii' sifn = 'another_image.img' ni1.save(img, nifn) diff --git a/nibabel/tests/test_image_types.py b/nibabel/tests/test_image_types.py index da2f93e21f..bc50c8417e 100644 --- a/nibabel/tests/test_image_types.py +++ b/nibabel/tests/test_image_types.py @@ -88,7 +88,6 @@ def check_img(img_path, img_klass, sniff_mode, sniff, expect_success, msg): irrelevant=b'a' * (sizeof_hdr - 1), # A too-small sniff, query bad_sniff=b'a' * sizeof_hdr, # Bad sniff, should fail ).items(): - for klass in img_klasses: if klass == expected_img_klass: # Class will load unless you pass a bad sniff, diff --git a/nibabel/tests/test_imageclasses.py b/nibabel/tests/test_imageclasses.py index 74f05dc6e3..90424b7d34 100644 --- a/nibabel/tests/test_imageclasses.py +++ b/nibabel/tests/test_imageclasses.py @@ -1,15 +1,13 @@ """Testing imageclasses module """ -import warnings from os.path import dirname from os.path import join as pjoin import numpy as np -import pytest import nibabel as nib -from nibabel import imageclasses +from nibabel import imageclasses # noqa: F401 from nibabel.analyze import AnalyzeImage from nibabel.imageclasses import spatial_axes_first from nibabel.nifti1 import Nifti1Image diff --git a/nibabel/tests/test_init.py b/nibabel/tests/test_init.py index 2317a6397e..969b80b6fc 100644 --- a/nibabel/tests/test_init.py +++ b/nibabel/tests/test_init.py @@ -1,4 +1,5 @@ import pathlib +import unittest from unittest import mock import pytest diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index be4f0deb07..8f88bf802d 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -9,8 +9,6 @@ import bz2 import gzip -import types -import warnings from io import BytesIO from os.path import join as pjoin @@ -19,12 +17,10 @@ from numpy.testing import assert_array_equal from .. import Nifti1Image, load, minc1 -from ..deprecated import ModuleProxy -from ..deprecator import ExpiredDeprecationError from ..externals.netcdf import netcdf_file from ..minc1 import Minc1File, Minc1Image, MincHeader from ..optpkg import optional_package -from ..testing import assert_data_similar, clear_and_catch_warnings, data_path +from ..testing import assert_data_similar, data_path from ..tmpdirs import InTemporaryDirectory from . import test_spatialimages as tsi from .test_fileslice import slicer_samples diff --git a/nibabel/tests/test_minc2.py b/nibabel/tests/test_minc2.py index e76cb05ce7..7ab29edfde 100644 --- a/nibabel/tests/test_minc2.py +++ b/nibabel/tests/test_minc2.py @@ -129,5 +129,5 @@ def test_bad_diminfo(): # File has a bad spacing field 'xspace' when it should be # `irregular`, `regular__` or absent (default to regular__). # We interpret an invalid spacing as absent, but warn. - with pytest.warns(UserWarning) as w: + with pytest.warns(UserWarning) as _: Minc2Image.from_filename(fname) diff --git a/nibabel/tests/test_nibabel_data.py b/nibabel/tests/test_nibabel_data.py index 1687589549..0c7116e9a0 100644 --- a/nibabel/tests/test_nibabel_data.py +++ b/nibabel/tests/test_nibabel_data.py @@ -2,9 +2,8 @@ """ import os -from os.path import dirname, isdir +from os.path import dirname, isdir, realpath from os.path import join as pjoin -from os.path import realpath from . import nibabel_data as nibd diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index c7c4d1d84b..a5b9427bc4 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -731,7 +731,6 @@ def unshear_44(affine): class TestNifti1SingleHeader(TestNifti1PairHeader): - header_class = Nifti1Header def test_empty(self): diff --git a/nibabel/tests/test_nifti2.py b/nibabel/tests/test_nifti2.py index 742ef148bf..a25e23b49d 100644 --- a/nibabel/tests/test_nifti2.py +++ b/nibabel/tests/test_nifti2.py @@ -13,7 +13,7 @@ from numpy.testing import assert_array_equal from .. import nifti2 -from ..nifti1 import Nifti1Extension, Nifti1Extensions, Nifti1Header, Nifti1PairHeader +from ..nifti1 import Nifti1Extension, Nifti1Header, Nifti1PairHeader from ..nifti2 import Nifti2Header, Nifti2Image, Nifti2Pair, Nifti2PairHeader from ..testing import data_path from . import test_nifti1 as tn1 diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index a228e66135..5c6a1643cc 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -19,7 +19,6 @@ import pytest from packaging.version import Version -from ..deprecator import ExpiredDeprecationError from ..openers import HAVE_INDEXED_GZIP, BZ2File, DeterministicGzipFile, ImageOpener, Opener from ..optpkg import optional_package from ..tmpdirs import InTemporaryDirectory diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 0094711e79..7e4a33e29f 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Testing for orientations module""" -import warnings import numpy as np import pytest @@ -185,7 +184,6 @@ def test_apply(): apply_orientation(a[:, :, 1], ornt) with pytest.raises(OrientationError): apply_orientation(a, [[0, 1], [np.nan, np.nan], [2, 1]]) - shape = np.array(a.shape) for ornt in ALL_ORNTS: t_arr = apply_orientation(a, ornt) assert_array_equal(a.shape, np.array(t_arr.shape)[np.array(ornt)[:, 0]]) diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index 6035d47f8d..980a2f403f 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -285,8 +285,8 @@ def test_affine_regression(): # Test against checked affines from previous runs # Checked against Michael's data using some GUI tools # Data at http://psydata.ovgu.de/philips_achieva_testfiles/conversion2 - for basename, exp_affine in PREVIOUS_AFFINES.items(): - fname = pjoin(DATA_PATH, basename + '.PAR') + for basename_affine, exp_affine in PREVIOUS_AFFINES.items(): + fname = pjoin(DATA_PATH, basename_affine + '.PAR') with open(fname) as fobj: hdr = PARRECHeader.from_fileobj(fobj) assert_almost_equal(hdr.get_affine(), exp_affine) @@ -884,7 +884,6 @@ def test_dualTR(): def test_ADC_map(): # test reading an apparent diffusion coefficient map with open(ADC_PAR) as fobj: - # two truncation warnings expected because general_info indicates: # 1.) multiple directions # 2.) multiple b-values diff --git a/nibabel/tests/test_parrec_data.py b/nibabel/tests/test_parrec_data.py index a437fafeda..2a52d97250 100644 --- a/nibabel/tests/test_parrec_data.py +++ b/nibabel/tests/test_parrec_data.py @@ -3,12 +3,10 @@ import unittest from glob import glob -from os.path import basename, exists +from os.path import basename, exists, splitext from os.path import join as pjoin -from os.path import splitext import numpy as np -import pytest from numpy.testing import assert_almost_equal from .. import load as top_load diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index dfe18c975a..1422bb3351 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -15,7 +15,7 @@ def test_pkg_info(): - nibabel.pkg_info.get_pkg_info - nibabel.pkg_info.pkg_commit_hash """ - info = nib.get_info() + _ = nib.get_info() def test_version(): diff --git a/nibabel/tests/test_pointset.py b/nibabel/tests/test_pointset.py index fb9a7c5c81..f4f0e4361b 100644 --- a/nibabel/tests/test_pointset.py +++ b/nibabel/tests/test_pointset.py @@ -1,15 +1,12 @@ from math import prod from pathlib import Path -from unittest import skipUnless import numpy as np import pytest from nibabel import pointset as ps from nibabel.affines import apply_affine -from nibabel.arrayproxy import ArrayProxy from nibabel.fileslice import strided_scalar -from nibabel.onetime import auto_attr from nibabel.optpkg import optional_package from nibabel.spatialimages import SpatialImage from nibabel.tests.nibabel_data import get_nibabel_data diff --git a/nibabel/tests/test_quaternions.py b/nibabel/tests/test_quaternions.py index fff7c5e040..ec882dd0b3 100644 --- a/nibabel/tests/test_quaternions.py +++ b/nibabel/tests/test_quaternions.py @@ -112,7 +112,6 @@ def test_fillpositive_simulated_error(dtype): # Permit 1 epsilon per value (default, but make explicit here) w2_thresh = 3 * np.finfo(dtype).eps - pos_error = neg_error = False for _ in range(50): xyz = norm(gen_vec(dtype)) @@ -186,12 +185,6 @@ def test_inverse(M, q): assert np.allclose(iM, iqM) -def test_eye(): - qi = nq.eye() - assert np.all([1, 0, 0, 0] == qi) - assert np.allclose(nq.quat2mat(qi), np.eye(3)) - - @pytest.mark.parametrize('vec', np.eye(3)) @pytest.mark.parametrize('M, q', eg_pairs) def test_qrotate(vec, M, q): diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index 772d395fd4..7a56f3fb8b 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -1,4 +1,3 @@ -import unittest from unittest import mock import pytest diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index cc4bb468ad..455a994ae1 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -11,9 +11,8 @@ import sys import unittest from glob import glob -from os.path import abspath, basename, dirname, exists +from os.path import abspath, basename, dirname, exists, splitext from os.path import join as pjoin -from os.path import splitext import numpy as np import pytest @@ -197,7 +196,7 @@ def test_help(): # needs special treatment since depends on fuse module which # might not be available. try: - import fuse + import fuse # noqa: F401 except Exception: continue # do not test this one code, stdout, stderr = run_command([cmd, '--help']) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 7157d5c459..a5cab9e751 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -399,8 +399,7 @@ def test_slicer(self): img_klass = self.image_class in_data_template = np.arange(240, dtype=np.int16) base_affine = np.eye(4) - t_axis = None - for dshape in ((4, 5, 6, 2), (8, 5, 6)): # Time series # Volume + for dshape in ((4, 5, 6, 2), (8, 5, 6)): # Time series # Volume in_data = in_data_template.copy().reshape(dshape) img = img_klass(in_data, base_affine.copy()) diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index dee3ea3554..1ca1fb9b97 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -114,7 +114,7 @@ def test_warn_error(): with error_warnings(): with pytest.raises(UserWarning): warnings.warn('A test') - with error_warnings() as w: # w not used for anything + with error_warnings() as _: with pytest.raises(UserWarning): warnings.warn('A test') assert n_warns == len(warnings.filters) @@ -134,7 +134,7 @@ def test_warn_ignore(): with suppress_warnings(): warnings.warn('Here is a warning, you will not see it') warnings.warn('Nor this one', DeprecationWarning) - with suppress_warnings() as w: # w not used + with suppress_warnings() as _: warnings.warn('Here is a warning, you will not see it') warnings.warn('Nor this one', DeprecationWarning) assert n_warns == len(warnings.filters) diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index 10b4b3f22c..e18fb0210a 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -435,15 +435,6 @@ def test_copy(self): self._set_something_into_hdr(hdr2) assert hdr == hdr2 - def test_copy(self): - hdr = self.header_class() - hdr2 = hdr.copy() - assert hdr == hdr2 - self._set_something_into_hdr(hdr) - assert hdr != hdr2 - self._set_something_into_hdr(hdr2) - assert hdr == hdr2 - def test_checks(self): # Test header checks hdr_t = self.header_class() diff --git a/tools/make_tarball.py b/tools/make_tarball.py index 3cdad40d0b..b49a1f276a 100755 --- a/tools/make_tarball.py +++ b/tools/make_tarball.py @@ -5,7 +5,7 @@ import os import commands -from toollib import * +from toollib import c, cd tag = commands.getoutput('git describe') base_name = f'nibabel-{tag}' diff --git a/tools/markdown_release_notes.py b/tools/markdown_release_notes.py index 66e7876036..73bdbf7752 100644 --- a/tools/markdown_release_notes.py +++ b/tools/markdown_release_notes.py @@ -27,7 +27,7 @@ def main(): if in_release_notes: break in_release_notes = match.group(1) == version - next(f) # Skip the underline + next(f) # Skip the underline continue if in_release_notes: diff --git a/tools/mpkg_wrapper.py b/tools/mpkg_wrapper.py index 0a96156e4d..f5f059b28d 100644 --- a/tools/mpkg_wrapper.py +++ b/tools/mpkg_wrapper.py @@ -24,7 +24,7 @@ def main(): g = dict(globals()) g['__file__'] = sys.argv[0] g['__name__'] = '__main__' - execfile(sys.argv[0], g, g) + exec(open(sys.argv[0]).read(), g, g) if __name__ == '__main__': From 04dd1f4fd1a7491c91d1c3c1dfadeac8ade5aeaa Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 5 Mar 2024 17:24:14 +0100 Subject: [PATCH 038/203] =?UTF-8?q?MNT:=20ruff=200.1.9=20=E2=86=92=200.3.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .pre-commit-config.yaml | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ef2d891fbd..d35d287579 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ repos: - id: check-merge-conflict - id: check-vcs-permalinks - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.9 + rev: v0.3.0 hooks: - id: ruff args: [--fix, --show-fix, --exit-non-zero-on-fix] diff --git a/tox.ini b/tox.ini index 4e9b220ce8..53860445aa 100644 --- a/tox.ini +++ b/tox.ini @@ -139,7 +139,7 @@ commands = description = Check our style guide labels = check deps = - ruff>=0.1.9 + ruff>=0.3.0 skip_install = true commands = ruff --diff nibabel From 3ee9480d356198167c9c45854ecc489a7c186416 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 5 Mar 2024 16:51:02 +0100 Subject: [PATCH 039/203] MNT: run `ruff check --fix` and `ruff format` Also fix remaining issues manually. --- nibabel/_compression.py | 1 + nibabel/affines.py | 1 + nibabel/analyze.py | 1 + nibabel/arrayproxy.py | 15 +++++++-------- nibabel/arraywriters.py | 1 + nibabel/benchmarks/butils.py | 3 +-- nibabel/brikhead.py | 1 + nibabel/casting.py | 1 + nibabel/cifti2/cifti2.py | 1 + nibabel/cifti2/cifti2_axes.py | 1 + nibabel/cifti2/tests/test_cifti2.py | 4 ++-- nibabel/cifti2/tests/test_new_cifti2.py | 1 + nibabel/cmdline/__init__.py | 3 +-- nibabel/cmdline/diff.py | 6 +++--- nibabel/cmdline/parrec2nii.py | 3 +-- nibabel/cmdline/tck2trk.py | 1 + nibabel/cmdline/tests/test_parrec2nii.py | 4 ++-- nibabel/cmdline/utils.py | 1 - nibabel/data.py | 1 + nibabel/dataobj_images.py | 1 + nibabel/deprecated.py | 1 + nibabel/deprecator.py | 1 + nibabel/dft.py | 2 -- nibabel/ecat.py | 1 + nibabel/environment.py | 1 + nibabel/eulerangles.py | 1 + nibabel/filebasedimages.py | 1 + nibabel/fileholders.py | 1 + nibabel/filename_parser.py | 1 + nibabel/fileslice.py | 1 + nibabel/freesurfer/__init__.py | 3 +-- nibabel/freesurfer/io.py | 3 +-- nibabel/freesurfer/mghformat.py | 1 + nibabel/funcs.py | 1 + nibabel/gifti/gifti.py | 1 + nibabel/gifti/tests/test_gifti.py | 4 ++-- nibabel/imageclasses.py | 1 + nibabel/imageglobals.py | 1 + nibabel/imagestats.py | 1 + nibabel/loadsave.py | 1 + nibabel/minc1.py | 1 + nibabel/minc2.py | 1 + nibabel/nicom/__init__.py | 1 + nibabel/nicom/ascconv.py | 1 + nibabel/nicom/csareader.py | 4 ++-- nibabel/nicom/dwiparams.py | 1 + nibabel/nicom/tests/test_ascconv.py | 3 +-- nibabel/nicom/tests/test_csareader.py | 4 ++-- nibabel/nicom/tests/test_dicomreaders.py | 3 +-- nibabel/nicom/tests/test_dicomwrappers.py | 3 +-- nibabel/nicom/tests/test_dwiparams.py | 3 +-- nibabel/nicom/tests/test_structreader.py | 4 ++-- nibabel/nicom/tests/test_utils.py | 4 ++-- nibabel/nicom/utils.py | 3 +-- nibabel/nifti1.py | 1 + nibabel/nifti2.py | 1 + nibabel/onetime.py | 9 +++++---- nibabel/openers.py | 7 +++---- nibabel/optpkg.py | 1 + nibabel/orientations.py | 1 + nibabel/parrec.py | 1 + nibabel/pointset.py | 9 +++++---- nibabel/processing.py | 1 + nibabel/pydicom_compat.py | 1 + nibabel/quaternions.py | 1 + nibabel/rstutils.py | 1 + nibabel/spaces.py | 1 + nibabel/spatialimages.py | 16 ++++++---------- nibabel/spm2analyze.py | 1 + nibabel/spm99analyze.py | 1 + nibabel/streamlines/__init__.py | 3 +-- nibabel/streamlines/header.py | 3 +-- .../streamlines/tests/test_tractogram_file.py | 3 +-- nibabel/streamlines/tractogram_file.py | 4 ++-- nibabel/testing/helpers.py | 4 ++-- nibabel/testing/np_features.py | 4 ++-- nibabel/tests/data/check_parrec_reslice.py | 1 + nibabel/tests/data/gen_standard.py | 1 + nibabel/tests/nibabel_data.py | 3 +-- nibabel/tests/scriptrunner.py | 1 + nibabel/tests/test_api_validators.py | 4 ++-- nibabel/tests/test_arrayproxy.py | 3 +-- nibabel/tests/test_batteryrunners.py | 3 +-- nibabel/tests/test_casting.py | 4 ++-- nibabel/tests/test_data.py | 3 ++- nibabel/tests/test_dataobj_images.py | 3 +-- nibabel/tests/test_deprecated.py | 3 +-- nibabel/tests/test_deprecator.py | 3 +-- nibabel/tests/test_dft.py | 3 +-- nibabel/tests/test_diff.py | 3 +-- nibabel/tests/test_ecat_data.py | 3 +-- nibabel/tests/test_environment.py | 3 +-- nibabel/tests/test_filebasedimages.py | 3 +-- nibabel/tests/test_fileholders.py | 3 +-- nibabel/tests/test_filename_parser.py | 1 + nibabel/tests/test_files_interface.py | 3 +-- nibabel/tests/test_fileslice.py | 1 - nibabel/tests/test_fileutils.py | 4 +--- nibabel/tests/test_floating.py | 4 ++-- nibabel/tests/test_image_api.py | 1 - nibabel/tests/test_image_load_save.py | 1 + nibabel/tests/test_imageclasses.py | 3 +-- nibabel/tests/test_imageglobals.py | 4 ++-- nibabel/tests/test_loadsave.py | 3 +-- nibabel/tests/test_minc2_data.py | 3 +-- nibabel/tests/test_mriutils.py | 4 +--- nibabel/tests/test_nibabel_data.py | 3 +-- nibabel/tests/test_nifti1.py | 1 + nibabel/tests/test_nifti2.py | 1 + nibabel/tests/test_onetime.py | 2 -- nibabel/tests/test_openers.py | 1 + nibabel/tests/test_optpkg.py | 3 +-- nibabel/tests/test_orientations.py | 1 - nibabel/tests/test_parrec.py | 3 +-- nibabel/tests/test_parrec_data.py | 3 +-- nibabel/tests/test_pkg_info.py | 3 +-- nibabel/tests/test_processing.py | 3 +-- nibabel/tests/test_rstutils.py | 3 +-- nibabel/tests/test_spaces.py | 3 +-- nibabel/tests/test_spatialimages.py | 3 +-- nibabel/tests/test_testing.py | 3 +-- nibabel/tests/test_tripwire.py | 3 +-- nibabel/tests/test_wrapstruct.py | 1 + nibabel/tmpdirs.py | 1 + nibabel/tripwire.py | 1 + nibabel/viewers.py | 1 + nibabel/volumeutils.py | 9 +++++---- nibabel/wrapstruct.py | 1 + nibabel/xmlutils.py | 1 + tox.ini | 2 +- 130 files changed, 166 insertions(+), 161 deletions(-) diff --git a/nibabel/_compression.py b/nibabel/_compression.py index b7cfc8f49f..eeb66f36b4 100644 --- a/nibabel/_compression.py +++ b/nibabel/_compression.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Constants and types for dealing transparently with compression""" + from __future__ import annotations import bz2 diff --git a/nibabel/affines.py b/nibabel/affines.py index 1478fd2dca..4b6001dec0 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utility routines for working with points and affine transforms""" + from functools import reduce import numpy as np diff --git a/nibabel/analyze.py b/nibabel/analyze.py index 20fdac055a..189f2e0a1a 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -81,6 +81,7 @@ can be loaded with and without a default flip, so the saved zoom will not constrain the affine. """ + from __future__ import annotations import numpy as np diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index 57d8aa0f8b..4bf5bd4700 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -25,6 +25,7 @@ See :mod:`nibabel.tests.test_proxy_api` for proxy API conformance checks. """ + from __future__ import annotations import typing as ty @@ -74,21 +75,19 @@ class ArrayLike(ty.Protocol): shape: tuple[int, ...] @property - def ndim(self) -> int: - ... # pragma: no cover + def ndim(self) -> int: ... # pragma: no cover # If no dtype is passed, any dtype might be returned, depending on the array-like @ty.overload - def __array__(self, dtype: None = ..., /) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: - ... # pragma: no cover + def __array__( + self, dtype: None = ..., / + ) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... # pragma: no cover # Any dtype might be passed, and *that* dtype must be returned @ty.overload - def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: - ... # pragma: no cover + def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... # pragma: no cover - def __getitem__(self, key, /) -> npt.NDArray: - ... # pragma: no cover + def __getitem__(self, key, /) -> npt.NDArray: ... # pragma: no cover class ArrayProxy(ArrayLike): diff --git a/nibabel/arraywriters.py b/nibabel/arraywriters.py index 751eb6ad1f..1f55263fc3 100644 --- a/nibabel/arraywriters.py +++ b/nibabel/arraywriters.py @@ -28,6 +28,7 @@ def __init__(self, array, out_dtype=None) something else to make sense of conversions between float and int, or between larger ints and smaller. """ + import numpy as np from .casting import best_float, floor_exact, int_abs, shared_range, type_info diff --git a/nibabel/benchmarks/butils.py b/nibabel/benchmarks/butils.py index 01d6931eba..13c255d1c1 100644 --- a/nibabel/benchmarks/butils.py +++ b/nibabel/benchmarks/butils.py @@ -1,5 +1,4 @@ -"""Benchmarking utilities -""" +"""Benchmarking utilities""" from .. import get_info diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 6694ff08a5..3a3cfd0871 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -26,6 +26,7 @@ am aware) always be >= 1. This permits sub-brick indexing common in AFNI programs (e.g., example4d+orig'[0]'). """ + import os import re from copy import deepcopy diff --git a/nibabel/casting.py b/nibabel/casting.py index 77da57e406..31e27d0e8c 100644 --- a/nibabel/casting.py +++ b/nibabel/casting.py @@ -3,6 +3,7 @@ Most routines work round some numpy oddities in floating point precision and casting. Others work round numpy casting to and from python ints """ + from __future__ import annotations import warnings diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 452bceb7ea..cb2e0cfaf4 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -16,6 +16,7 @@ http://www.nitrc.org/projects/cifti """ + import re from collections import OrderedDict from collections.abc import Iterable, MutableMapping, MutableSequence diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 6443a34fb5..af7c63beaa 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -118,6 +118,7 @@ ... bm_cortex))) """ + import abc from operator import xor diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index d7fd0a0eda..895b8f9597 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -1,5 +1,5 @@ -"""Testing CIFTI-2 objects -""" +"""Testing CIFTI-2 objects""" + import collections from xml.etree import ElementTree diff --git a/nibabel/cifti2/tests/test_new_cifti2.py b/nibabel/cifti2/tests/test_new_cifti2.py index 0f90b822da..4cf5502ad7 100644 --- a/nibabel/cifti2/tests/test_new_cifti2.py +++ b/nibabel/cifti2/tests/test_new_cifti2.py @@ -6,6 +6,7 @@ These functions are used in the tests to generate most CIFTI file types from scratch. """ + import numpy as np import pytest diff --git a/nibabel/cmdline/__init__.py b/nibabel/cmdline/__init__.py index 6478e5f261..f0744521bc 100644 --- a/nibabel/cmdline/__init__.py +++ b/nibabel/cmdline/__init__.py @@ -6,5 +6,4 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Functionality to be exposed in the command line -""" +"""Functionality to be exposed in the command line""" diff --git a/nibabel/cmdline/diff.py b/nibabel/cmdline/diff.py index d20a105e76..1231a778f4 100755 --- a/nibabel/cmdline/diff.py +++ b/nibabel/cmdline/diff.py @@ -246,9 +246,9 @@ def get_data_diff(files, max_abs=0, max_rel=0, dtype=np.float64): sub_thr = rel_diff <= max_rel # Since we operated on sub-selected values already, we need # to plug them back in - candidates[ - tuple(indexes[sub_thr] for indexes in np.where(candidates)) - ] = False + candidates[tuple(indexes[sub_thr] for indexes in np.where(candidates))] = ( + False + ) max_rel_diff = np.max(rel_diff) else: max_rel_diff = 0 diff --git a/nibabel/cmdline/parrec2nii.py b/nibabel/cmdline/parrec2nii.py index 9340626395..0ae6b3fb40 100644 --- a/nibabel/cmdline/parrec2nii.py +++ b/nibabel/cmdline/parrec2nii.py @@ -1,5 +1,4 @@ -"""Code for PAR/REC to NIfTI converter command -""" +"""Code for PAR/REC to NIfTI converter command""" import csv import os diff --git a/nibabel/cmdline/tck2trk.py b/nibabel/cmdline/tck2trk.py index d5d29ba430..a73540c446 100644 --- a/nibabel/cmdline/tck2trk.py +++ b/nibabel/cmdline/tck2trk.py @@ -1,6 +1,7 @@ """ Convert tractograms (TCK -> TRK). """ + import argparse import os diff --git a/nibabel/cmdline/tests/test_parrec2nii.py b/nibabel/cmdline/tests/test_parrec2nii.py index 017df9813a..ccedafb74b 100644 --- a/nibabel/cmdline/tests/test_parrec2nii.py +++ b/nibabel/cmdline/tests/test_parrec2nii.py @@ -1,5 +1,5 @@ -"""Tests for the parrec2nii exe code -""" +"""Tests for the parrec2nii exe code""" + from os.path import basename, isfile, join from unittest.mock import MagicMock, Mock, patch diff --git a/nibabel/cmdline/utils.py b/nibabel/cmdline/utils.py index 2149235704..d89cc5c964 100644 --- a/nibabel/cmdline/utils.py +++ b/nibabel/cmdline/utils.py @@ -10,7 +10,6 @@ Helper utilities to be used in cmdline applications """ - # global verbosity switch import re from io import StringIO diff --git a/nibabel/data.py b/nibabel/data.py index 7e2fe2af70..c49580d09b 100644 --- a/nibabel/data.py +++ b/nibabel/data.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utilities to find files from NIPY data packages""" + import configparser import glob import os diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index eaf341271e..a2ee691a16 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -7,6 +7,7 @@ * returns an array from ``numpy.asanyarray(obj)``; * has an attribute or property ``shape``. """ + from __future__ import annotations import typing as ty diff --git a/nibabel/deprecated.py b/nibabel/deprecated.py index 092370106e..b8c378cee3 100644 --- a/nibabel/deprecated.py +++ b/nibabel/deprecated.py @@ -1,4 +1,5 @@ """Module to help with deprecating objects and classes""" + from __future__ import annotations import typing as ty diff --git a/nibabel/deprecator.py b/nibabel/deprecator.py index 779fdb462d..b9912534d2 100644 --- a/nibabel/deprecator.py +++ b/nibabel/deprecator.py @@ -1,4 +1,5 @@ """Class for recording and reporting deprecations""" + from __future__ import annotations import functools diff --git a/nibabel/dft.py b/nibabel/dft.py index aeb8accbb5..d9e3359998 100644 --- a/nibabel/dft.py +++ b/nibabel/dft.py @@ -9,7 +9,6 @@ # Copyright (C) 2011 Christian Haselgrove """DICOM filesystem tools""" - import contextlib import getpass import logging @@ -44,7 +43,6 @@ class VolumeError(DFTError): class InstanceStackError(DFTError): - """bad series of instance numbers""" def __init__(self, series, i, si): diff --git a/nibabel/ecat.py b/nibabel/ecat.py index 85de9184b5..03a4c72b98 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -42,6 +42,7 @@ GPL and some of the header files are adapted from CTI files (called CTI code below). It's not clear what the licenses are for these files. """ + import warnings from numbers import Integral diff --git a/nibabel/environment.py b/nibabel/environment.py index 09aaa6320f..a828ccb865 100644 --- a/nibabel/environment.py +++ b/nibabel/environment.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Settings from the system environment relevant to NIPY""" + import os from os.path import join as pjoin diff --git a/nibabel/eulerangles.py b/nibabel/eulerangles.py index 13dc059644..b1d187e8c1 100644 --- a/nibabel/eulerangles.py +++ b/nibabel/eulerangles.py @@ -82,6 +82,7 @@ ``y``, followed by rotation around ``x``, is known (confusingly) as "xyz", pitch-roll-yaw, Cardan angles, or Tait-Bryan angles. """ + import math from functools import reduce diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 42760cccdf..4e0d06b64c 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Common interface for any image format--volume or surface, binary or xml""" + from __future__ import annotations import io diff --git a/nibabel/fileholders.py b/nibabel/fileholders.py index a27715350d..3db4c62a9e 100644 --- a/nibabel/fileholders.py +++ b/nibabel/fileholders.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Fileholder class""" + from __future__ import annotations import io diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index 92a2f4b1f5..bdbca6a383 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Create filename pairs, triplets etc, with expected extensions""" + from __future__ import annotations import os diff --git a/nibabel/fileslice.py b/nibabel/fileslice.py index fe7d6bba54..816f1cdaf6 100644 --- a/nibabel/fileslice.py +++ b/nibabel/fileslice.py @@ -1,4 +1,5 @@ """Utilities for getting array slices out of file-like objects""" + import operator from functools import reduce from mmap import mmap diff --git a/nibabel/freesurfer/__init__.py b/nibabel/freesurfer/__init__.py index 48922285c9..aa76eb2e89 100644 --- a/nibabel/freesurfer/__init__.py +++ b/nibabel/freesurfer/__init__.py @@ -1,5 +1,4 @@ -"""Reading functions for freesurfer files -""" +"""Reading functions for freesurfer files""" # ruff: noqa: F401 diff --git a/nibabel/freesurfer/io.py b/nibabel/freesurfer/io.py index b4d6ef2a3a..74bc05fc31 100644 --- a/nibabel/freesurfer/io.py +++ b/nibabel/freesurfer/io.py @@ -1,5 +1,4 @@ -"""Read / write FreeSurfer geometry, morphometry, label, annotation formats -""" +"""Read / write FreeSurfer geometry, morphometry, label, annotation formats""" import getpass import time diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 5dd2660342..93abf7b407 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -10,6 +10,7 @@ Author: Krish Subramaniam """ + from os.path import splitext import numpy as np diff --git a/nibabel/funcs.py b/nibabel/funcs.py index f83ed68709..cda4a5d2ed 100644 --- a/nibabel/funcs.py +++ b/nibabel/funcs.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Processor functions for images""" + import numpy as np from .loadsave import load diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 7aba877309..7c5c3c4fb0 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -11,6 +11,7 @@ The Gifti specification was (at time of writing) available as a PDF download from http://www.nitrc.org/projects/gifti/ """ + from __future__ import annotations import base64 diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 7e4c223971..f27546afe7 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -1,5 +1,5 @@ -"""Testing gifti objects -""" +"""Testing gifti objects""" + import itertools import sys from io import BytesIO diff --git a/nibabel/imageclasses.py b/nibabel/imageclasses.py index b36131ed94..20cf1cac9c 100644 --- a/nibabel/imageclasses.py +++ b/nibabel/imageclasses.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Define supported image classes and names""" + from __future__ import annotations from .analyze import AnalyzeImage diff --git a/nibabel/imageglobals.py b/nibabel/imageglobals.py index 551719a7ee..81a1742809 100644 --- a/nibabel/imageglobals.py +++ b/nibabel/imageglobals.py @@ -23,6 +23,7 @@ Use ``logger.level = 1`` to see all messages. """ + import logging error_level = 40 diff --git a/nibabel/imagestats.py b/nibabel/imagestats.py index 38dc9d3f16..36fbddee0e 100644 --- a/nibabel/imagestats.py +++ b/nibabel/imagestats.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Functions for computing image statistics""" + import numpy as np from nibabel.imageclasses import spatial_axes_first diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 463a687975..159d9bae82 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -8,6 +8,7 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # module imports """Utilities to load and save image objects""" + from __future__ import annotations import os diff --git a/nibabel/minc1.py b/nibabel/minc1.py index 5f8422bc23..d0b9fd5375 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Read MINC1 format images""" + from __future__ import annotations from numbers import Integral diff --git a/nibabel/minc2.py b/nibabel/minc2.py index 912b5d28ae..161be5c111 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -25,6 +25,7 @@ mincstats my_funny.mnc """ + import warnings import numpy as np diff --git a/nibabel/nicom/__init__.py b/nibabel/nicom/__init__.py index 3a389db172..d15e0846ff 100644 --- a/nibabel/nicom/__init__.py +++ b/nibabel/nicom/__init__.py @@ -19,6 +19,7 @@ dwiparams structreader """ + import warnings warnings.warn( diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py index be6da9786c..0966de2a96 100644 --- a/nibabel/nicom/ascconv.py +++ b/nibabel/nicom/ascconv.py @@ -3,6 +3,7 @@ """ Parse the "ASCCONV" meta data format found in a variety of Siemens MR files. """ + import ast import re from collections import OrderedDict diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index 40f3f852d9..df379e0be8 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -1,5 +1,5 @@ -"""CSA header reader from SPM spec -""" +"""CSA header reader from SPM spec""" + import numpy as np from .structreader import Unpacker diff --git a/nibabel/nicom/dwiparams.py b/nibabel/nicom/dwiparams.py index cb0e501202..5930e96f91 100644 --- a/nibabel/nicom/dwiparams.py +++ b/nibabel/nicom/dwiparams.py @@ -18,6 +18,7 @@ B ~ (q_est . q_est.T) / norm(q_est) """ + import numpy as np import numpy.linalg as npl diff --git a/nibabel/nicom/tests/test_ascconv.py b/nibabel/nicom/tests/test_ascconv.py index cf40298c56..afe5f05e13 100644 --- a/nibabel/nicom/tests/test_ascconv.py +++ b/nibabel/nicom/tests/test_ascconv.py @@ -1,5 +1,4 @@ -"""Testing Siemens "ASCCONV" parser -""" +"""Testing Siemens "ASCCONV" parser""" from collections import OrderedDict from os.path import dirname diff --git a/nibabel/nicom/tests/test_csareader.py b/nibabel/nicom/tests/test_csareader.py index ddb46a942a..f31f4a3935 100644 --- a/nibabel/nicom/tests/test_csareader.py +++ b/nibabel/nicom/tests/test_csareader.py @@ -1,5 +1,5 @@ -"""Testing Siemens CSA header reader -""" +"""Testing Siemens CSA header reader""" + import gzip from copy import deepcopy from os.path import join as pjoin diff --git a/nibabel/nicom/tests/test_dicomreaders.py b/nibabel/nicom/tests/test_dicomreaders.py index 17ea7430f2..d508343be1 100644 --- a/nibabel/nicom/tests/test_dicomreaders.py +++ b/nibabel/nicom/tests/test_dicomreaders.py @@ -1,5 +1,4 @@ -"""Testing reading DICOM files -""" +"""Testing reading DICOM files""" from os.path import join as pjoin diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index fa2dfc07c6..e96607df9e 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -1,5 +1,4 @@ -"""Testing DICOM wrappers -""" +"""Testing DICOM wrappers""" import gzip from copy import copy diff --git a/nibabel/nicom/tests/test_dwiparams.py b/nibabel/nicom/tests/test_dwiparams.py index 6e98b4af61..559c0a2143 100644 --- a/nibabel/nicom/tests/test_dwiparams.py +++ b/nibabel/nicom/tests/test_dwiparams.py @@ -1,5 +1,4 @@ -"""Testing diffusion parameter processing -""" +"""Testing diffusion parameter processing""" import numpy as np import pytest diff --git a/nibabel/nicom/tests/test_structreader.py b/nibabel/nicom/tests/test_structreader.py index 2d37bbc3ed..ccd2dd4f85 100644 --- a/nibabel/nicom/tests/test_structreader.py +++ b/nibabel/nicom/tests/test_structreader.py @@ -1,5 +1,5 @@ -"""Testing Siemens CSA header reader -""" +"""Testing Siemens CSA header reader""" + import struct import sys diff --git a/nibabel/nicom/tests/test_utils.py b/nibabel/nicom/tests/test_utils.py index ea3b999fad..4f0d7e68d5 100644 --- a/nibabel/nicom/tests/test_utils.py +++ b/nibabel/nicom/tests/test_utils.py @@ -1,5 +1,5 @@ -"""Testing nicom.utils module -""" +"""Testing nicom.utils module""" + import re from nibabel.optpkg import optional_package diff --git a/nibabel/nicom/utils.py b/nibabel/nicom/utils.py index 617ff2a28a..24f4afc2fe 100644 --- a/nibabel/nicom/utils.py +++ b/nibabel/nicom/utils.py @@ -1,5 +1,4 @@ -"""Utilities for working with DICOM datasets -""" +"""Utilities for working with DICOM datasets""" def find_private_section(dcm_data, group_no, creator): diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 4cf1e52748..d07e54de18 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -10,6 +10,7 @@ NIfTI1 format defined at http://nifti.nimh.nih.gov/nifti-1/ """ + from __future__ import annotations import warnings diff --git a/nibabel/nifti2.py b/nibabel/nifti2.py index 8d9b81e1f9..9c898b47ba 100644 --- a/nibabel/nifti2.py +++ b/nibabel/nifti2.py @@ -12,6 +12,7 @@ https://www.nitrc.org/forum/message.php?msg_id=3738 """ + import numpy as np from .analyze import AnalyzeHeader diff --git a/nibabel/onetime.py b/nibabel/onetime.py index e365e81f74..fa1b2f9927 100644 --- a/nibabel/onetime.py +++ b/nibabel/onetime.py @@ -18,6 +18,7 @@ [2] Python data model, https://docs.python.org/reference/datamodel.html """ + from __future__ import annotations import typing as ty @@ -136,12 +137,12 @@ def __init__(self, func: ty.Callable[[InstanceT], T]) -> None: @ty.overload def __get__( self, obj: None, objtype: type[InstanceT] | None = None - ) -> ty.Callable[[InstanceT], T]: - ... # pragma: no cover + ) -> ty.Callable[[InstanceT], T]: ... # pragma: no cover @ty.overload - def __get__(self, obj: InstanceT, objtype: type[InstanceT] | None = None) -> T: - ... # pragma: no cover + def __get__( + self, obj: InstanceT, objtype: type[InstanceT] | None = None + ) -> T: ... # pragma: no cover def __get__( self, obj: InstanceT | None, objtype: type[InstanceT] | None = None diff --git a/nibabel/openers.py b/nibabel/openers.py index d69412fb85..f84ccb7069 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Context manager openers for various fileobject types""" + from __future__ import annotations import gzip @@ -35,11 +36,9 @@ @ty.runtime_checkable class Fileish(ty.Protocol): - def read(self, size: int = -1, /) -> bytes: - ... # pragma: no cover + def read(self, size: int = -1, /) -> bytes: ... # pragma: no cover - def write(self, b: bytes, /) -> int | None: - ... # pragma: no cover + def write(self, b: bytes, /) -> int | None: ... # pragma: no cover class DeterministicGzipFile(gzip.GzipFile): diff --git a/nibabel/optpkg.py b/nibabel/optpkg.py index b59a89bb35..bfe6a629cc 100644 --- a/nibabel/optpkg.py +++ b/nibabel/optpkg.py @@ -1,4 +1,5 @@ """Routines to support optional packages""" + from __future__ import annotations import typing as ty diff --git a/nibabel/orientations.py b/nibabel/orientations.py index 075cbd4ffd..7265bf56f3 100644 --- a/nibabel/orientations.py +++ b/nibabel/orientations.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Utilities for calculating and applying affine orientations""" + import numpy as np import numpy.linalg as npl diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 3a8a6030de..d04f683d1d 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -121,6 +121,7 @@ utility via the option "--strict-sort". The dimension info can be exported to a CSV file by adding the option "--volume-info". """ + import re import warnings from collections import OrderedDict diff --git a/nibabel/pointset.py b/nibabel/pointset.py index 58fca148a8..e39a4d4187 100644 --- a/nibabel/pointset.py +++ b/nibabel/pointset.py @@ -17,6 +17,7 @@ adjacent points to be identified. A *triangular mesh* in particular uses triplets of adjacent vertices to describe faces. """ + from __future__ import annotations import math @@ -40,12 +41,12 @@ class CoordinateArray(ty.Protocol): shape: tuple[int, int] @ty.overload - def __array__(self, dtype: None = ..., /) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: - ... # pragma: no cover + def __array__( + self, dtype: None = ..., / + ) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... # pragma: no cover @ty.overload - def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: - ... # pragma: no cover + def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... # pragma: no cover @dataclass diff --git a/nibabel/processing.py b/nibabel/processing.py index d634ce7086..6027575d47 100644 --- a/nibabel/processing.py +++ b/nibabel/processing.py @@ -16,6 +16,7 @@ Smoothing and resampling routines need scipy. """ + import numpy as np import numpy.linalg as npl diff --git a/nibabel/pydicom_compat.py b/nibabel/pydicom_compat.py index d61c880117..76423b40a8 100644 --- a/nibabel/pydicom_compat.py +++ b/nibabel/pydicom_compat.py @@ -19,6 +19,7 @@ A deprecated copy is available here for backward compatibility. """ + from __future__ import annotations import warnings diff --git a/nibabel/quaternions.py b/nibabel/quaternions.py index d2fc3ac4ca..77cf8d2d3f 100644 --- a/nibabel/quaternions.py +++ b/nibabel/quaternions.py @@ -25,6 +25,7 @@ >>> vec = np.array([1, 2, 3]).reshape((3,1)) # column vector >>> tvec = np.dot(M, vec) """ + import math import numpy as np diff --git a/nibabel/rstutils.py b/nibabel/rstutils.py index 625a2af477..cb40633e54 100644 --- a/nibabel/rstutils.py +++ b/nibabel/rstutils.py @@ -2,6 +2,7 @@ * Make ReST table given array of values """ + import numpy as np diff --git a/nibabel/spaces.py b/nibabel/spaces.py index e5b87171df..d06a39b0ed 100644 --- a/nibabel/spaces.py +++ b/nibabel/spaces.py @@ -19,6 +19,7 @@ mapping), or * a length 2 sequence with the same information (shape, affine). """ + from itertools import product import numpy as np diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index bcc4336f73..185694cd72 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -129,6 +129,7 @@ >>> np.all(img3.get_fdata(dtype=np.float32) == data) True """ + from __future__ import annotations import io @@ -161,23 +162,18 @@ class HasDtype(ty.Protocol): - def get_data_dtype(self) -> np.dtype: - ... # pragma: no cover + def get_data_dtype(self) -> np.dtype: ... # pragma: no cover - def set_data_dtype(self, dtype: npt.DTypeLike) -> None: - ... # pragma: no cover + def set_data_dtype(self, dtype: npt.DTypeLike) -> None: ... # pragma: no cover @ty.runtime_checkable class SpatialProtocol(ty.Protocol): - def get_data_dtype(self) -> np.dtype: - ... # pragma: no cover + def get_data_dtype(self) -> np.dtype: ... # pragma: no cover - def get_data_shape(self) -> ty.Tuple[int, ...]: - ... # pragma: no cover + def get_data_shape(self) -> ty.Tuple[int, ...]: ... # pragma: no cover - def get_zooms(self) -> ty.Tuple[float, ...]: - ... # pragma: no cover + def get_zooms(self) -> ty.Tuple[float, ...]: ... # pragma: no cover class HeaderDataError(Exception): diff --git a/nibabel/spm2analyze.py b/nibabel/spm2analyze.py index f63785807c..9c4c544cf5 100644 --- a/nibabel/spm2analyze.py +++ b/nibabel/spm2analyze.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Read / write access to SPM2 version of analyze image format""" + import numpy as np from . import spm99analyze as spm99 # module import diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index 3465c57190..7be6c240d4 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Read / write access to SPM99 version of analyze image format""" + import warnings from io import BytesIO diff --git a/nibabel/streamlines/__init__.py b/nibabel/streamlines/__init__.py index f3cbd2da59..24a7e01469 100644 --- a/nibabel/streamlines/__init__.py +++ b/nibabel/streamlines/__init__.py @@ -1,5 +1,4 @@ -"""Multiformat-capable streamline format read / write interface -""" +"""Multiformat-capable streamline format read / write interface""" # ruff: noqa: F401 import os diff --git a/nibabel/streamlines/header.py b/nibabel/streamlines/header.py index 2aed10c62c..a3b52b0747 100644 --- a/nibabel/streamlines/header.py +++ b/nibabel/streamlines/header.py @@ -1,5 +1,4 @@ -"""Field class defining common header fields in tractogram files -""" +"""Field class defining common header fields in tractogram files""" class Field: diff --git a/nibabel/streamlines/tests/test_tractogram_file.py b/nibabel/streamlines/tests/test_tractogram_file.py index 71e2326ecf..6f764009f1 100644 --- a/nibabel/streamlines/tests/test_tractogram_file.py +++ b/nibabel/streamlines/tests/test_tractogram_file.py @@ -1,5 +1,4 @@ -"""Test tractogramFile base class -""" +"""Test tractogramFile base class""" import pytest diff --git a/nibabel/streamlines/tractogram_file.py b/nibabel/streamlines/tractogram_file.py index 2cec1ea9cb..557261e9a0 100644 --- a/nibabel/streamlines/tractogram_file.py +++ b/nibabel/streamlines/tractogram_file.py @@ -1,5 +1,5 @@ -"""Define abstract interface for Tractogram file classes -""" +"""Define abstract interface for Tractogram file classes""" + from abc import ABC, abstractmethod from .header import Field diff --git a/nibabel/testing/helpers.py b/nibabel/testing/helpers.py index 2f25a354d7..ae859d6572 100644 --- a/nibabel/testing/helpers.py +++ b/nibabel/testing/helpers.py @@ -1,5 +1,5 @@ -"""Helper functions for tests -""" +"""Helper functions for tests""" + from io import BytesIO import numpy as np diff --git a/nibabel/testing/np_features.py b/nibabel/testing/np_features.py index eeb783900a..226df64845 100644 --- a/nibabel/testing/np_features.py +++ b/nibabel/testing/np_features.py @@ -1,5 +1,5 @@ -"""Look for changes in numpy behavior over versions -""" +"""Look for changes in numpy behavior over versions""" + from functools import lru_cache import numpy as np diff --git a/nibabel/tests/data/check_parrec_reslice.py b/nibabel/tests/data/check_parrec_reslice.py index 8ade7f539c..244b4c3a64 100644 --- a/nibabel/tests/data/check_parrec_reslice.py +++ b/nibabel/tests/data/check_parrec_reslice.py @@ -21,6 +21,7 @@ The *_cor_SENSE* image has a higher RMS because the back of the phantom is out of the field of view. """ + import glob import numpy as np diff --git a/nibabel/tests/data/gen_standard.py b/nibabel/tests/data/gen_standard.py index 598726fe74..7fd05d936e 100644 --- a/nibabel/tests/data/gen_standard.py +++ b/nibabel/tests/data/gen_standard.py @@ -5,6 +5,7 @@ * standard.trk """ + import numpy as np import nibabel as nib diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 1f89c9c1a1..5919eba925 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -1,5 +1,4 @@ -"""Functions / decorators for finding / requiring nibabel-data directory -""" +"""Functions / decorators for finding / requiring nibabel-data directory""" import unittest from os import environ, listdir diff --git a/nibabel/tests/scriptrunner.py b/nibabel/tests/scriptrunner.py index 1e8b1fdda2..2f3de50791 100644 --- a/nibabel/tests/scriptrunner.py +++ b/nibabel/tests/scriptrunner.py @@ -12,6 +12,7 @@ assert_equal(code, 0) assert_equal(stdout, b'This script ran OK') """ + import os import sys from os.path import dirname, isdir, isfile, pathsep, realpath diff --git a/nibabel/tests/test_api_validators.py b/nibabel/tests/test_api_validators.py index 1d21092eef..a4e787465a 100644 --- a/nibabel/tests/test_api_validators.py +++ b/nibabel/tests/test_api_validators.py @@ -1,5 +1,5 @@ -"""Metaclass and class for validating instance APIs -""" +"""Metaclass and class for validating instance APIs""" + import os import pytest diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index a207e4ed6d..a79f63bc72 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -6,8 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Tests for arrayproxy module -""" +"""Tests for arrayproxy module""" import contextlib import gzip diff --git a/nibabel/tests/test_batteryrunners.py b/nibabel/tests/test_batteryrunners.py index 84590452ea..5cae764c8b 100644 --- a/nibabel/tests/test_batteryrunners.py +++ b/nibabel/tests/test_batteryrunners.py @@ -6,8 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Tests for BatteryRunner and Report objects -""" +"""Tests for BatteryRunner and Report objects""" import logging from io import StringIO diff --git a/nibabel/tests/test_casting.py b/nibabel/tests/test_casting.py index f345952aac..d4cf81515a 100644 --- a/nibabel/tests/test_casting.py +++ b/nibabel/tests/test_casting.py @@ -1,5 +1,5 @@ -"""Test casting utilities -""" +"""Test casting utilities""" + import os from platform import machine diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index 3ccb4963ca..cca8d0ba81 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for data module""" + import os import sys import tempfile @@ -26,7 +27,7 @@ @pytest.fixture -def with_nimd_env(request, with_environment): +def with_nimd_env(request): DATA_FUNCS = {} DATA_FUNCS['home_dir_func'] = nibd.get_nipy_user_dir DATA_FUNCS['sys_dir_func'] = nibd.get_nipy_system_dir diff --git a/nibabel/tests/test_dataobj_images.py b/nibabel/tests/test_dataobj_images.py index a1d2dbc9f1..877e407812 100644 --- a/nibabel/tests/test_dataobj_images.py +++ b/nibabel/tests/test_dataobj_images.py @@ -1,5 +1,4 @@ -"""Testing dataobj_images module -""" +"""Testing dataobj_images module""" import numpy as np diff --git a/nibabel/tests/test_deprecated.py b/nibabel/tests/test_deprecated.py index 2576eca3d9..f1c3d517c9 100644 --- a/nibabel/tests/test_deprecated.py +++ b/nibabel/tests/test_deprecated.py @@ -1,5 +1,4 @@ -"""Testing `deprecated` module -""" +"""Testing `deprecated` module""" import warnings diff --git a/nibabel/tests/test_deprecator.py b/nibabel/tests/test_deprecator.py index 833908af94..eedeec4852 100644 --- a/nibabel/tests/test_deprecator.py +++ b/nibabel/tests/test_deprecator.py @@ -1,5 +1,4 @@ -"""Testing deprecator module / Deprecator class -""" +"""Testing deprecator module / Deprecator class""" import sys import warnings diff --git a/nibabel/tests/test_dft.py b/nibabel/tests/test_dft.py index f756600fd3..654af98279 100644 --- a/nibabel/tests/test_dft.py +++ b/nibabel/tests/test_dft.py @@ -1,5 +1,4 @@ -"""Testing dft -""" +"""Testing dft""" import os import sqlite3 diff --git a/nibabel/tests/test_diff.py b/nibabel/tests/test_diff.py index fee71d628b..798a7f7b30 100644 --- a/nibabel/tests/test_diff.py +++ b/nibabel/tests/test_diff.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Test diff -""" +"""Test diff""" from os.path import abspath, dirname from os.path import join as pjoin diff --git a/nibabel/tests/test_ecat_data.py b/nibabel/tests/test_ecat_data.py index 23485ae92b..427645b92a 100644 --- a/nibabel/tests/test_ecat_data.py +++ b/nibabel/tests/test_ecat_data.py @@ -6,8 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Test we can correctly import example ECAT files -""" +"""Test we can correctly import example ECAT files""" import os from os.path import join as pjoin diff --git a/nibabel/tests/test_environment.py b/nibabel/tests/test_environment.py index afb6d36f84..aa58d9b8e0 100644 --- a/nibabel/tests/test_environment.py +++ b/nibabel/tests/test_environment.py @@ -1,5 +1,4 @@ -"""Testing environment settings -""" +"""Testing environment settings""" import os from os import environ as env diff --git a/nibabel/tests/test_filebasedimages.py b/nibabel/tests/test_filebasedimages.py index 3aa1ae78c5..7d162c0917 100644 --- a/nibabel/tests/test_filebasedimages.py +++ b/nibabel/tests/test_filebasedimages.py @@ -1,5 +1,4 @@ -"""Testing filebasedimages module -""" +"""Testing filebasedimages module""" import warnings from itertools import product diff --git a/nibabel/tests/test_fileholders.py b/nibabel/tests/test_fileholders.py index 33b3f76e6f..83fe75aecc 100644 --- a/nibabel/tests/test_fileholders.py +++ b/nibabel/tests/test_fileholders.py @@ -1,5 +1,4 @@ -"""Testing fileholders -""" +"""Testing fileholders""" from io import BytesIO diff --git a/nibabel/tests/test_filename_parser.py b/nibabel/tests/test_filename_parser.py index 5d352f72dd..4e53cb2e5d 100644 --- a/nibabel/tests/test_filename_parser.py +++ b/nibabel/tests/test_filename_parser.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Tests for filename container""" + import pathlib import pytest diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py index 52557d353d..07e394eca4 100644 --- a/nibabel/tests/test_files_interface.py +++ b/nibabel/tests/test_files_interface.py @@ -6,8 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Testing filesets - a draft -""" +"""Testing filesets - a draft""" from io import BytesIO diff --git a/nibabel/tests/test_fileslice.py b/nibabel/tests/test_fileslice.py index e9f65e45a2..355743b04e 100644 --- a/nibabel/tests/test_fileslice.py +++ b/nibabel/tests/test_fileslice.py @@ -1,6 +1,5 @@ """Test slicing of file-like objects""" - import time from functools import partial from io import BytesIO diff --git a/nibabel/tests/test_fileutils.py b/nibabel/tests/test_fileutils.py index 21c7676fce..bc202c6682 100644 --- a/nibabel/tests/test_fileutils.py +++ b/nibabel/tests/test_fileutils.py @@ -6,9 +6,7 @@ # copyright and license terms. # # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Testing fileutils module -""" - +"""Testing fileutils module""" import pytest diff --git a/nibabel/tests/test_floating.py b/nibabel/tests/test_floating.py index c2ccd44039..82c8e667a9 100644 --- a/nibabel/tests/test_floating.py +++ b/nibabel/tests/test_floating.py @@ -1,5 +1,5 @@ -"""Test floating point deconstructions and floor methods -""" +"""Test floating point deconstructions and floor methods""" + import sys import numpy as np diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 86c04985f8..5898762322 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -50,7 +50,6 @@ clear_and_catch_warnings, deprecated_to, expires, - nullcontext, ) from .. import ( diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 4e787f0d71..934698d9e6 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Tests for loader function""" + import logging import pathlib import shutil diff --git a/nibabel/tests/test_imageclasses.py b/nibabel/tests/test_imageclasses.py index 90424b7d34..7b3add6cd0 100644 --- a/nibabel/tests/test_imageclasses.py +++ b/nibabel/tests/test_imageclasses.py @@ -1,5 +1,4 @@ -"""Testing imageclasses module -""" +"""Testing imageclasses module""" from os.path import dirname from os.path import join as pjoin diff --git a/nibabel/tests/test_imageglobals.py b/nibabel/tests/test_imageglobals.py index ac043d192b..9de72e87c6 100644 --- a/nibabel/tests/test_imageglobals.py +++ b/nibabel/tests/test_imageglobals.py @@ -6,8 +6,8 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Tests for imageglobals module -""" +"""Tests for imageglobals module""" + from .. import imageglobals as igs diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 401ed04535..d039263bd1 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -1,5 +1,4 @@ -"""Testing loadsave module -""" +"""Testing loadsave module""" import pathlib import shutil diff --git a/nibabel/tests/test_minc2_data.py b/nibabel/tests/test_minc2_data.py index e96e716699..a5ea38a8a9 100644 --- a/nibabel/tests/test_minc2_data.py +++ b/nibabel/tests/test_minc2_data.py @@ -6,8 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Test we can correctly import example MINC2_PATH files -""" +"""Test we can correctly import example MINC2_PATH files""" import os from os.path import join as pjoin diff --git a/nibabel/tests/test_mriutils.py b/nibabel/tests/test_mriutils.py index 848579cee6..02b9da5482 100644 --- a/nibabel/tests/test_mriutils.py +++ b/nibabel/tests/test_mriutils.py @@ -6,9 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Testing mriutils module -""" - +"""Testing mriutils module""" import pytest from numpy.testing import assert_almost_equal diff --git a/nibabel/tests/test_nibabel_data.py b/nibabel/tests/test_nibabel_data.py index 0c7116e9a0..7e319ac3f5 100644 --- a/nibabel/tests/test_nibabel_data.py +++ b/nibabel/tests/test_nibabel_data.py @@ -1,5 +1,4 @@ -"""Tests for ``get_nibabel_data`` -""" +"""Tests for ``get_nibabel_data``""" import os from os.path import dirname, isdir, realpath diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index a5b9427bc4..5ee4fb3c15 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Tests for nifti reading package""" + import os import struct import unittest diff --git a/nibabel/tests/test_nifti2.py b/nibabel/tests/test_nifti2.py index a25e23b49d..01d44c1595 100644 --- a/nibabel/tests/test_nifti2.py +++ b/nibabel/tests/test_nifti2.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Tests for nifti2 reading package""" + import os import numpy as np diff --git a/nibabel/tests/test_onetime.py b/nibabel/tests/test_onetime.py index b22a4ef3ec..4d72949271 100644 --- a/nibabel/tests/test_onetime.py +++ b/nibabel/tests/test_onetime.py @@ -1,5 +1,3 @@ -import pytest - from nibabel.onetime import auto_attr, setattr_on_read from nibabel.testing import deprecated_to, expires diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index 5c6a1643cc..15290d5ef9 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Test for openers module""" + import contextlib import hashlib import os diff --git a/nibabel/tests/test_optpkg.py b/nibabel/tests/test_optpkg.py index 7ffaa2f851..c243633a07 100644 --- a/nibabel/tests/test_optpkg.py +++ b/nibabel/tests/test_optpkg.py @@ -1,5 +1,4 @@ -"""Testing optpkg module -""" +"""Testing optpkg module""" import builtins import sys diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 7e4a33e29f..e7c32d7867 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Testing for orientations module""" - import numpy as np import pytest from numpy.testing import assert_array_equal diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index 980a2f403f..a312c558a8 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -1,5 +1,4 @@ -"""Testing parrec module -""" +"""Testing parrec module""" from glob import glob from os.path import basename, dirname diff --git a/nibabel/tests/test_parrec_data.py b/nibabel/tests/test_parrec_data.py index 2a52d97250..02a1d5733a 100644 --- a/nibabel/tests/test_parrec_data.py +++ b/nibabel/tests/test_parrec_data.py @@ -1,5 +1,4 @@ -"""Test we can correctly import example PARREC files -""" +"""Test we can correctly import example PARREC files""" import unittest from glob import glob diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index 1422bb3351..c927b0fb9e 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -1,5 +1,4 @@ -"""Testing package info -""" +"""Testing package info""" import pytest diff --git a/nibabel/tests/test_processing.py b/nibabel/tests/test_processing.py index 27da6639c0..f1a4f0a909 100644 --- a/nibabel/tests/test_processing.py +++ b/nibabel/tests/test_processing.py @@ -6,8 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Testing processing module -""" +"""Testing processing module""" import logging from os.path import dirname diff --git a/nibabel/tests/test_rstutils.py b/nibabel/tests/test_rstutils.py index 847b7a4eee..eab1969857 100644 --- a/nibabel/tests/test_rstutils.py +++ b/nibabel/tests/test_rstutils.py @@ -1,5 +1,4 @@ -"""Test printable table -""" +"""Test printable table""" import numpy as np import pytest diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index dbfe533890..f5e467b2cc 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -1,5 +1,4 @@ -"""Tests for spaces module -""" +"""Tests for spaces module""" import numpy as np import numpy.linalg as npl diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index a5cab9e751..3d14dac18d 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -6,8 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""Testing spatialimages -""" +"""Testing spatialimages""" from io import BytesIO diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 1ca1fb9b97..6b84725218 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -1,5 +1,4 @@ -"""Tests for warnings context managers -""" +"""Tests for warnings context managers""" import os import sys diff --git a/nibabel/tests/test_tripwire.py b/nibabel/tests/test_tripwire.py index f172d5c579..bcc81b5f5f 100644 --- a/nibabel/tests/test_tripwire.py +++ b/nibabel/tests/test_tripwire.py @@ -1,5 +1,4 @@ -"""Testing tripwire module -""" +"""Testing tripwire module""" import pytest diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index e18fb0210a..0eb906fee7 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -23,6 +23,7 @@ _field_recoders -> field_recoders """ + import logging from io import BytesIO, StringIO diff --git a/nibabel/tmpdirs.py b/nibabel/tmpdirs.py index 49d69d2bf2..9d67f6acb7 100644 --- a/nibabel/tmpdirs.py +++ b/nibabel/tmpdirs.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Contexts for *with* statement providing temporary directories""" + import os import tempfile from contextlib import contextmanager diff --git a/nibabel/tripwire.py b/nibabel/tripwire.py index fa45e73382..efe651fd93 100644 --- a/nibabel/tripwire.py +++ b/nibabel/tripwire.py @@ -1,4 +1,5 @@ """Class to raise error for missing modules or other misfortunes""" + from typing import Any diff --git a/nibabel/viewers.py b/nibabel/viewers.py index 60ebd3a256..1e927544ba 100644 --- a/nibabel/viewers.py +++ b/nibabel/viewers.py @@ -3,6 +3,7 @@ Includes version of OrthoSlicer3D code originally written by our own Paul Ivanov. """ + import weakref import numpy as np diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 90e5e5ff35..cf2437e621 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Utility functions for analyze-like formats""" + from __future__ import annotations import io @@ -1190,13 +1191,13 @@ def _ftype4scaled_finite( @ty.overload def finite_range( arr: npt.ArrayLike, check_nan: ty.Literal[False] = False -) -> tuple[Scalar, Scalar]: - ... # pragma: no cover +) -> tuple[Scalar, Scalar]: ... # pragma: no cover @ty.overload -def finite_range(arr: npt.ArrayLike, check_nan: ty.Literal[True]) -> tuple[Scalar, Scalar, bool]: - ... # pragma: no cover +def finite_range( + arr: npt.ArrayLike, check_nan: ty.Literal[True] +) -> tuple[Scalar, Scalar, bool]: ... # pragma: no cover def finite_range( diff --git a/nibabel/wrapstruct.py b/nibabel/wrapstruct.py index 6e236d7356..5ffe04bc78 100644 --- a/nibabel/wrapstruct.py +++ b/nibabel/wrapstruct.py @@ -109,6 +109,7 @@ nib.imageglobals.logger = logger """ + from __future__ import annotations import numpy as np diff --git a/nibabel/xmlutils.py b/nibabel/xmlutils.py index d3a7a08309..5049a76412 100644 --- a/nibabel/xmlutils.py +++ b/nibabel/xmlutils.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Thin layer around xml.etree.ElementTree, to abstract nibabel xml support""" + from io import BytesIO from xml.etree.ElementTree import Element, SubElement, tostring # noqa from xml.parsers.expat import ParserCreate diff --git a/tox.ini b/tox.ini index 53860445aa..2e6a2449e6 100644 --- a/tox.ini +++ b/tox.ini @@ -142,7 +142,7 @@ deps = ruff>=0.3.0 skip_install = true commands = - ruff --diff nibabel + ruff check --diff nibabel ruff format --diff nibabel [testenv:style-fix] From a6f2a61f16308d7a3dcb968e60b2ffce1f7cbc53 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:14:21 +0100 Subject: [PATCH 040/203] MNT: get rid of .flake8/.pep8speaks.yml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit These are made obsolete by teh flake8 → ruff shift. --- .flake8 | 9 --------- .pep8speaks.yml | 12 ------------ 2 files changed, 21 deletions(-) delete mode 100644 .flake8 delete mode 100644 .pep8speaks.yml diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 9fe631ac81..0000000000 --- a/.flake8 +++ /dev/null @@ -1,9 +0,0 @@ -[flake8] -max-line-length = 100 -extend-ignore = E203,E266,E402,E731 -exclude = - *test* - *sphinx* - nibabel/externals/* -per-file-ignores = - */__init__.py: F401 diff --git a/.pep8speaks.yml b/.pep8speaks.yml deleted file mode 100644 index 0a0d8c619f..0000000000 --- a/.pep8speaks.yml +++ /dev/null @@ -1,12 +0,0 @@ -scanner: - diff_only: True # Only show errors caused by the patch - linter: flake8 - -message: # Customize the comment made by the bot - opened: # Messages when a new PR is submitted - header: "Hello @{name}, thank you for submitting the Pull Request!" - footer: "To test for issues locally, `pip install flake8` and then run `flake8 nibabel`." - updated: # Messages when new commits are added to the PR - header: "Hello @{name}, Thank you for updating!" - footer: "To test for issues locally, `pip install flake8` and then run `flake8 nibabel`." - no_errors: "Cheers! There are no style issues detected in this Pull Request. :beers: " From ac29ed26d403791f5868ac10056136a5ce66ddd7 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:25:11 +0100 Subject: [PATCH 041/203] MNT: ignore F401 in __init__.py Enforce that in pyproject.toml instead of __init__.py itself. --- nibabel/__init__.py | 2 -- nibabel/cifti2/__init__.py | 1 - nibabel/freesurfer/__init__.py | 2 -- nibabel/gifti/__init__.py | 2 -- nibabel/parrec.py | 2 +- nibabel/streamlines/__init__.py | 1 - nibabel/testing/__init__.py | 2 -- pyproject.toml | 3 +++ 8 files changed, 4 insertions(+), 11 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 1cb7abf53f..aa90540b8f 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -7,8 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -# ruff: noqa: F401 - import os from .info import long_description as __doc__ diff --git a/nibabel/cifti2/__init__.py b/nibabel/cifti2/__init__.py index 4a5cad7675..9c6805f818 100644 --- a/nibabel/cifti2/__init__.py +++ b/nibabel/cifti2/__init__.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -# ruff: noqa: F401 """CIFTI-2 format IO .. currentmodule:: nibabel.cifti2 diff --git a/nibabel/freesurfer/__init__.py b/nibabel/freesurfer/__init__.py index aa76eb2e89..1ab3859756 100644 --- a/nibabel/freesurfer/__init__.py +++ b/nibabel/freesurfer/__init__.py @@ -1,7 +1,5 @@ """Reading functions for freesurfer files""" -# ruff: noqa: F401 - from .io import ( read_annot, read_geometry, diff --git a/nibabel/gifti/__init__.py b/nibabel/gifti/__init__.py index d2a1e2da65..f54a1d2e54 100644 --- a/nibabel/gifti/__init__.py +++ b/nibabel/gifti/__init__.py @@ -16,8 +16,6 @@ gifti """ -# ruff: noqa: F401 - from .gifti import ( GiftiCoordSystem, GiftiDataArray, diff --git a/nibabel/parrec.py b/nibabel/parrec.py index d04f683d1d..8b3ffb34a2 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -7,7 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # Disable line length checking for PAR fragments in module docstring -# flake8: noqa E501 +# noqa: E501 """Read images in PAR/REC format This is yet another MRI image format generated by Philips scanners. It is an diff --git a/nibabel/streamlines/__init__.py b/nibabel/streamlines/__init__.py index 24a7e01469..dd00a1e842 100644 --- a/nibabel/streamlines/__init__.py +++ b/nibabel/streamlines/__init__.py @@ -1,5 +1,4 @@ """Multiformat-capable streamline format read / write interface""" -# ruff: noqa: F401 import os import warnings diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index a3e98e064b..d335c9a8c6 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -8,8 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Utilities for testing""" -# ruff: noqa: F401 - from __future__ import annotations import os diff --git a/pyproject.toml b/pyproject.toml index 515c35850b..5df6d01896 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,6 +133,9 @@ ignore = [ "ISC002", ] +[tool.ruff.lint.per-file-ignores] +"__init__.py" = ["F401"] + [tool.ruff.format] quote-style = "single" From d3352aef6991f1df8013d6bdc67aca56288dd346 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:27:28 +0100 Subject: [PATCH 042/203] =?UTF-8?q?MNT:=20ruff=200.3.0=20=E2=86=92=200.3.4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d35d287579..354bd3da1d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ repos: - id: check-merge-conflict - id: check-vcs-permalinks - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.0 + rev: v0.3.4 hooks: - id: ruff args: [--fix, --show-fix, --exit-non-zero-on-fix] From f57f5cbc4bb8d62861ee0c00931c134e4a66e0d7 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:29:51 +0100 Subject: [PATCH 043/203] Update doc/tools/apigen.py Co-authored-by: Chris Markiewicz --- doc/tools/apigen.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/doc/tools/apigen.py b/doc/tools/apigen.py index a1279a3e98..336c81d8d8 100644 --- a/doc/tools/apigen.py +++ b/doc/tools/apigen.py @@ -405,9 +405,7 @@ def discover_modules(self): def write_modules_api(self, modules, outdir): # upper-level modules - ulms = [ - '.'.join(m.split('.')[:2]) if m.count('.') >= 1 else m.split('.')[0] for m in modules - ] + ulms = ['.'.join(m.split('.')[:2]) for m in modules] from collections import OrderedDict From 1684a9dada92558b44ce7995f2050f5111f1ec33 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:30:36 +0100 Subject: [PATCH 044/203] Update nibabel/cifti2/tests/test_cifti2io_header.py Co-authored-by: Chris Markiewicz --- nibabel/cifti2/tests/test_cifti2io_header.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 92078a26d7..1c37cfe0e7 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -37,7 +37,7 @@ def test_space_separated_affine(): - _ = ci.Cifti2Image.from_filename(pjoin(NIBABEL_TEST_DATA, 'row_major.dconn.nii')) + ci.Cifti2Image.from_filename(pjoin(NIBABEL_TEST_DATA, 'row_major.dconn.nii')) def test_read_nifti2(): From a8ba819a26a15d6be2ea5c2bb6d6eaaaf89cef93 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:32:21 +0100 Subject: [PATCH 045/203] Update nibabel/gifti/tests/test_gifti.py Co-authored-by: Chris Markiewicz --- nibabel/gifti/tests/test_gifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index f27546afe7..88a2f31f8e 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -328,7 +328,7 @@ def test_metadata_list_interface(): assert len(md) == 0 # Extension adds multiple keys - with pytest.warns(DeprecationWarning) as _: + with deprecated_to('6.0'): foobar = GiftiNVPairs('foo', 'bar') mdlist.extend([nvpair, foobar]) assert len(mdlist) == 2 From d797ffe10431a4c62322d81495873bf01e277e72 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:32:37 +0100 Subject: [PATCH 046/203] Update nibabel/tests/test_pkg_info.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_pkg_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index c927b0fb9e..a39eac65b1 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -14,7 +14,7 @@ def test_pkg_info(): - nibabel.pkg_info.get_pkg_info - nibabel.pkg_info.pkg_commit_hash """ - _ = nib.get_info() + nib.get_info() def test_version(): From 59d6291ac98b1be6784b88291b9826220c1f7241 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:32:54 +0100 Subject: [PATCH 047/203] Update nibabel/gifti/tests/test_gifti.py Co-authored-by: Chris Markiewicz --- nibabel/gifti/tests/test_gifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 88a2f31f8e..a4cf5bb485 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -336,7 +336,7 @@ def test_metadata_list_interface(): assert md == {'key': 'value', 'foo': 'bar'} # Insertion updates list order, though we don't attempt to preserve it in the dict - with pytest.warns(DeprecationWarning) as _: + with deprecated_to('6.0'): lastone = GiftiNVPairs('last', 'one') mdlist.insert(1, lastone) assert len(mdlist) == 3 From 6daadc82d4634b7c9d31cc65f7de288ef67328ad Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:33:16 +0100 Subject: [PATCH 048/203] Update nibabel/tests/test_spatialimages.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_spatialimages.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 3d14dac18d..baf470090b 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -398,7 +398,10 @@ def test_slicer(self): img_klass = self.image_class in_data_template = np.arange(240, dtype=np.int16) base_affine = np.eye(4) - for dshape in ((4, 5, 6, 2), (8, 5, 6)): # Time series # Volume + for dshape in ( + (4, 5, 6, 2), # Time series + (8, 5, 6), # Volume + ): in_data = in_data_template.copy().reshape(dshape) img = img_klass(in_data, base_affine.copy()) From 32d0109c3a26fed6ac49d91613bef7193f324aac Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:33:29 +0100 Subject: [PATCH 049/203] Update nibabel/tests/test_testing.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_testing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 6b84725218..c9f91eb849 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -133,7 +133,7 @@ def test_warn_ignore(): with suppress_warnings(): warnings.warn('Here is a warning, you will not see it') warnings.warn('Nor this one', DeprecationWarning) - with suppress_warnings() as _: + with suppress_warnings(): warnings.warn('Here is a warning, you will not see it') warnings.warn('Nor this one', DeprecationWarning) assert n_warns == len(warnings.filters) From 9104d2fcfbd914494d5d4626c5a5bad0a675d6d0 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:33:38 +0100 Subject: [PATCH 050/203] Update tox.ini Co-authored-by: Chris Markiewicz --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 2e6a2449e6..d0b8653457 100644 --- a/tox.ini +++ b/tox.ini @@ -152,7 +152,7 @@ deps = ruff skip_install = true commands = - ruff --fix nibabel + ruff check --fix nibabel ruff format nibabel [testenv:spellcheck] From 02918edececbef01d9d536c4452cb17c0fd35955 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:34:08 +0100 Subject: [PATCH 051/203] Update nibabel/tests/test_testing.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_testing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index c9f91eb849..04ba813d8b 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -113,7 +113,7 @@ def test_warn_error(): with error_warnings(): with pytest.raises(UserWarning): warnings.warn('A test') - with error_warnings() as _: + with error_warnings(): with pytest.raises(UserWarning): warnings.warn('A test') assert n_warns == len(warnings.filters) From eeab46f658d9cf754ea9aeda5e3836553e6139d3 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:53:36 +0100 Subject: [PATCH 052/203] Update nibabel/gifti/tests/test_gifti.py Co-authored-by: Chris Markiewicz --- nibabel/gifti/tests/test_gifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index a4cf5bb485..6c867ad25b 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -360,7 +360,7 @@ def test_metadata_list_interface(): assert 'completelynew' not in md assert md == {'foo': 'bar', 'last': 'one'} # Check popping from the end (last one inserted before foobar) - _ = mdlist.pop() + mdlist.pop() assert len(mdlist) == 1 assert len(md) == 1 assert md == {'last': 'one'} From 46c84879dc2952b69508ad489927437ad1e471ab Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:53:51 +0100 Subject: [PATCH 053/203] Update nibabel/gifti/tests/test_gifti.py Co-authored-by: Chris Markiewicz --- nibabel/gifti/tests/test_gifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 6c867ad25b..1cead0d928 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -366,7 +366,7 @@ def test_metadata_list_interface(): assert md == {'last': 'one'} # And let's remove an old pair with a new object - with pytest.warns(DeprecationWarning) as _: + with deprecated_to('6.0'): lastoneagain = GiftiNVPairs('last', 'one') mdlist.remove(lastoneagain) assert len(mdlist) == 0 From dec3a2dba421db615aaa9c85cd53d002b4af5644 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:54:03 +0100 Subject: [PATCH 054/203] Update nibabel/gifti/tests/test_parse_gifti_fast.py Co-authored-by: Chris Markiewicz --- nibabel/gifti/tests/test_parse_gifti_fast.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index 17258fbd30..c562b90480 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -447,7 +447,7 @@ def test_external_file_failure_cases(): shutil.copy(DATA_FILE7, '.') filename = pjoin(tmpdir, basename(DATA_FILE7)) with pytest.raises(GiftiParseError): - _ = load(filename) + load(filename) # load from in-memory xml string (parser requires it as bytes) with open(DATA_FILE7, 'rb') as f: xmldata = f.read() From d8d3a4489c67b84e61b0d2aa190bb2b31b5d3a1e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:54:17 +0100 Subject: [PATCH 055/203] Update nibabel/gifti/tests/test_parse_gifti_fast.py Co-authored-by: Chris Markiewicz --- nibabel/gifti/tests/test_parse_gifti_fast.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index c562b90480..8cb7c96794 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -453,7 +453,7 @@ def test_external_file_failure_cases(): xmldata = f.read() parser = GiftiImageParser() with pytest.raises(GiftiParseError): - _ = parser.parse(xmldata) + parser.parse(xmldata) def test_load_compressed(): From de9f2b0a2a246b60cb6bcae8780df000a70cd59d Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:54:29 +0100 Subject: [PATCH 056/203] Update nibabel/nicom/tests/test_dicomwrappers.py Co-authored-by: Chris Markiewicz --- nibabel/nicom/tests/test_dicomwrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index e96607df9e..d14c35dcdb 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -630,7 +630,7 @@ def test_image_position(self): def test_affine(self): # Make sure we find orientation/position/spacing info dw = didw.wrapper_from_file(DATA_FILE_4D) - _ = dw.affine + dw.affine @dicom_test @pytest.mark.xfail(reason='Not packaged in install', raises=FileNotFoundError) From b7a5f5aa644ca645499a88438360d814ef377769 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:54:45 +0100 Subject: [PATCH 057/203] Update nibabel/streamlines/tests/test_tck.py Co-authored-by: Chris Markiewicz --- nibabel/streamlines/tests/test_tck.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tck.py b/nibabel/streamlines/tests/test_tck.py index 6b4c163ed6..083ab8e6e9 100644 --- a/nibabel/streamlines/tests/test_tck.py +++ b/nibabel/streamlines/tests/test_tck.py @@ -137,7 +137,7 @@ def test_load_file_with_wrong_information(self): # Simulate a TCK file with no `file` field. new_tck_file = tck_file.replace(b'\nfile: . 67', b'') - with pytest.warns(HeaderWarning, match="Missing 'file'") as _: + with pytest.warns(HeaderWarning, match="Missing 'file'"): tck = TckFile.load(BytesIO(new_tck_file)) assert_array_equal(tck.header['file'], '. 56') From a621d41987ae64f964fe71b800a59771981f4130 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:55:04 +0100 Subject: [PATCH 058/203] Update nibabel/streamlines/tests/test_trk.py Co-authored-by: Chris Markiewicz --- nibabel/streamlines/tests/test_trk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_trk.py b/nibabel/streamlines/tests/test_trk.py index 749bf3ed30..4cb6032c25 100644 --- a/nibabel/streamlines/tests/test_trk.py +++ b/nibabel/streamlines/tests/test_trk.py @@ -149,7 +149,7 @@ def test_load_file_with_wrong_information(self): # Simulate a TRK where `vox_to_ras` is invalid. trk_struct, trk_bytes = self.trk_with_bytes() trk_struct[Field.VOXEL_TO_RASMM] = np.diag([0, 0, 0, 1]) - with clear_and_catch_warnings(record=True, modules=[trk_module]) as _: + with clear_and_catch_warnings(modules=[trk_module]): with pytest.raises(HeaderError): TrkFile.load(BytesIO(trk_bytes)) From 37ff0ebe9dd035f99d28ded561928f58315fdb68 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:55:31 +0100 Subject: [PATCH 059/203] Update nibabel/tests/test_affines.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_affines.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/tests/test_affines.py b/nibabel/tests/test_affines.py index 1d7ef1e6bf..d4ea11821b 100644 --- a/nibabel/tests/test_affines.py +++ b/nibabel/tests/test_affines.py @@ -225,7 +225,6 @@ def test_rescale_affine(): orig_shape = rng.randint(low=20, high=512, size=(3,)) orig_aff = np.eye(4) orig_aff[:3, :] = rng.normal(size=(3, 4)) - orig_zooms = voxel_sizes(orig_aff) # noqa: F841 orig_axcodes = aff2axcodes(orig_aff) orig_centroid = apply_affine(orig_aff, (orig_shape - 1) // 2) From 36d36fbddcdb1c0cb4f3fc503452291ba90971a6 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:55:51 +0100 Subject: [PATCH 060/203] Update nibabel/tests/test_arraywriters.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_arraywriters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index 2fc9c32358..25040e5eed 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -276,7 +276,7 @@ def test_slope_inter_castable(): for out_dtt in NUMERIC_TYPES: for klass in (ArrayWriter, SlopeArrayWriter, SlopeInterArrayWriter): arr = np.zeros((5,), dtype=in_dtt) - _ = klass(arr, out_dtt) # no error + klass(arr, out_dtt) # no error # Test special case of none finite # This raises error for ArrayWriter, but not for the others arr = np.array([np.inf, np.nan, -np.inf]) From 0922369b170a38215b9cc6d0d2ce69d668f579c1 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:56:13 +0100 Subject: [PATCH 061/203] Update nibabel/tests/test_arraywriters.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_arraywriters.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index 25040e5eed..4a853ecf5e 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -285,8 +285,8 @@ def test_slope_inter_castable(): in_arr = arr.astype(in_dtt) with pytest.raises(WriterError): ArrayWriter(in_arr, out_dtt) - _ = SlopeArrayWriter(arr.astype(in_dtt), out_dtt) # no error - _ = SlopeInterArrayWriter(arr.astype(in_dtt), out_dtt) # no error + SlopeArrayWriter(arr.astype(in_dtt), out_dtt) # no error + SlopeInterArrayWriter(arr.astype(in_dtt), out_dtt) # no error for in_dtt, out_dtt, arr, slope_only, slope_inter, neither in ( (np.float32, np.float32, 1, True, True, True), (np.float64, np.float32, 1, True, True, True), From 50177cc9e521716234510dab8c4bd48892c40b6a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:56:53 +0100 Subject: [PATCH 062/203] Update nibabel/tests/test_image_load_save.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_image_load_save.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 934698d9e6..0e5fd57d08 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -131,7 +131,7 @@ def test_save_load(): affine[:3, 3] = [3, 2, 1] img = ni1.Nifti1Image(data, affine) img.set_data_dtype(npt) - with InTemporaryDirectory() as _: + with InTemporaryDirectory(): nifn = 'an_image.nii' sifn = 'another_image.img' ni1.save(img, nifn) From 489b9d29795d33c46a4b2e0e079a22bb4a6e9a1e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:57:11 +0100 Subject: [PATCH 063/203] Update nibabel/tests/test_imageclasses.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_imageclasses.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/tests/test_imageclasses.py b/nibabel/tests/test_imageclasses.py index 7b3add6cd0..90ef966d2d 100644 --- a/nibabel/tests/test_imageclasses.py +++ b/nibabel/tests/test_imageclasses.py @@ -6,7 +6,6 @@ import numpy as np import nibabel as nib -from nibabel import imageclasses # noqa: F401 from nibabel.analyze import AnalyzeImage from nibabel.imageclasses import spatial_axes_first from nibabel.nifti1 import Nifti1Image From 72c0ebf96f2081eee22bab5b167e12306a4693a3 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:57:41 +0100 Subject: [PATCH 064/203] Update nibabel/tests/test_minc2.py Co-authored-by: Chris Markiewicz --- nibabel/tests/test_minc2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_minc2.py b/nibabel/tests/test_minc2.py index 7ab29edfde..4c2973a728 100644 --- a/nibabel/tests/test_minc2.py +++ b/nibabel/tests/test_minc2.py @@ -129,5 +129,5 @@ def test_bad_diminfo(): # File has a bad spacing field 'xspace' when it should be # `irregular`, `regular__` or absent (default to regular__). # We interpret an invalid spacing as absent, but warn. - with pytest.warns(UserWarning) as _: + with pytest.warns(UserWarning): Minc2Image.from_filename(fname) From 223fdc072ee22034c5388a824e350aafb5c8914a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 15:05:08 +0100 Subject: [PATCH 065/203] Put back argument, used by @pytest.fixture --- nibabel/tests/test_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index cca8d0ba81..5697752ea4 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -27,7 +27,7 @@ @pytest.fixture -def with_nimd_env(request): +def with_nimd_env(request, with_environment): # noqa: F811 DATA_FUNCS = {} DATA_FUNCS['home_dir_func'] = nibd.get_nipy_user_dir DATA_FUNCS['sys_dir_func'] = nibd.get_nipy_system_dir From 19e4a56f8e4d4f6e6e5460f08389c2ced5e44c16 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 15:08:24 +0100 Subject: [PATCH 066/203] MNT: ignore F401 in doc/source/conf.py Enforce that in pyproject.toml instead of conf.py itself. --- doc/source/conf.py | 4 ++-- pyproject.toml | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index e8999b7d2b..175c6340bd 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -30,11 +30,11 @@ # Check for external Sphinx extensions we depend on try: - import numpydoc # noqa: F401 + import numpydoc except ImportError: raise RuntimeError('Need to install "numpydoc" package for doc build') try: - import texext # noqa: F401 + import texext except ImportError: raise RuntimeError('Need to install "texext" package for doc build') diff --git a/pyproject.toml b/pyproject.toml index 5df6d01896..bf7b099031 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -135,6 +135,7 @@ ignore = [ [tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] +"doc/source/conf.py" = ["F401"] [tool.ruff.format] quote-style = "single" From 066431d9bf5b6843514528ff5a6d81fbef4f8e9d Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 29 Mar 2024 15:10:55 +0100 Subject: [PATCH 067/203] MNT: Get rid of last `coding: utf-8` --- doc/source/conf.py | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 175c6340bd..f4ab16d2db 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## From 418188e34b18fcf5231b6a3c2a8e947608ea5aa3 Mon Sep 17 00:00:00 2001 From: Matthew Brett Date: Fri, 29 Mar 2024 17:13:39 +0000 Subject: [PATCH 068/203] DOC: fix typos for key kay -> key --- nibabel/analyze.py | 2 +- nibabel/dataobj_images.py | 2 +- nibabel/freesurfer/mghformat.py | 2 +- nibabel/spm99analyze.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/analyze.py b/nibabel/analyze.py index 20fdac055a..bd3eaa8897 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -929,7 +929,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): Parameters ---------- file_map : dict - Mapping with (kay, value) pairs of (``file_type``, FileHolder + Mapping with (key, value) pairs of (``file_type``, FileHolder instance giving file-likes for each file needed for this image type. mmap : {True, False, 'c', 'r'}, optional, keyword only diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index eaf341271e..019d6b9551 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -437,7 +437,7 @@ def from_file_map( Parameters ---------- file_map : dict - Mapping with (kay, value) pairs of (``file_type``, FileHolder + Mapping with (key, value) pairs of (``file_type``, FileHolder instance giving file-likes for each file needed for this image type. mmap : {True, False, 'c', 'r'}, optional, keyword only diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 5dd2660342..4c4b854a3e 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -495,7 +495,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): Parameters ---------- file_map : dict - Mapping with (kay, value) pairs of (``file_type``, FileHolder + Mapping with (key, value) pairs of (``file_type``, FileHolder instance giving file-likes for each file needed for this image type. mmap : {True, False, 'c', 'r'}, optional, keyword only diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index 3465c57190..395a299c1a 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -240,7 +240,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): Parameters ---------- file_map : dict - Mapping with (kay, value) pairs of (``file_type``, FileHolder + Mapping with (key, value) pairs of (``file_type``, FileHolder instance giving file-likes for each file needed for this image type. mmap : {True, False, 'c', 'r'}, optional, keyword only From 9fa116b3ddfb8065421fd6a5a9320bf7bd1646e3 Mon Sep 17 00:00:00 2001 From: Sandro Date: Mon, 1 Apr 2024 15:59:57 +0200 Subject: [PATCH 069/203] Python 3.13: Account for dedented docstrings - Dedent docstrings in Python 3.13+ - Fix #1311 - Ref: https://github.com/python/cpython/issues/81283 --- nibabel/deprecator.py | 15 +++++++++++++++ nibabel/tests/test_deprecator.py | 15 ++++++++++----- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/nibabel/deprecator.py b/nibabel/deprecator.py index 779fdb462d..a80fa25692 100644 --- a/nibabel/deprecator.py +++ b/nibabel/deprecator.py @@ -3,8 +3,10 @@ import functools import re +import sys import typing as ty import warnings +from textwrap import dedent if ty.TYPE_CHECKING: # pragma: no cover T = ty.TypeVar('T') @@ -12,6 +14,15 @@ _LEADING_WHITE = re.compile(r'^(\s*)') + +def _dedent_docstring(docstring): + """Compatibility with Python 3.13+. + + xref: https://github.com/python/cpython/issues/81283 + """ + return '\n'.join([dedent(line) for line in docstring.split('\n')]) + + TESTSETUP = """ .. testsetup:: @@ -32,6 +43,10 @@ """ +if sys.version_info >= (3, 13): + TESTSETUP = _dedent_docstring(TESTSETUP) + TESTCLEANUP = _dedent_docstring(TESTCLEANUP) + class ExpiredDeprecationError(RuntimeError): """Error for expired deprecation diff --git a/nibabel/tests/test_deprecator.py b/nibabel/tests/test_deprecator.py index 833908af94..4303ff6737 100644 --- a/nibabel/tests/test_deprecator.py +++ b/nibabel/tests/test_deprecator.py @@ -14,6 +14,7 @@ Deprecator, ExpiredDeprecationError, _add_dep_doc, + _dedent_docstring, _ensure_cr, ) @@ -21,6 +22,14 @@ _OWN_MODULE = sys.modules[__name__] +func_docstring = ( + f'A docstring\n \n foo\n \n{indent(TESTSETUP, " ", lambda x: True)}' + f' Some text\n{indent(TESTCLEANUP, " ", lambda x: True)}' +) + +if sys.version_info >= (3, 13): + func_docstring = _dedent_docstring(func_docstring) + def test__ensure_cr(): # Make sure text ends with carriage return @@ -92,11 +101,7 @@ def test_dep_func(self): with pytest.deprecated_call() as w: assert func(1, 2) is None assert len(w) == 1 - assert ( - func.__doc__ - == f'A docstring\n \n foo\n \n{indent(TESTSETUP, " ", lambda x: True)}' - f' Some text\n{indent(TESTCLEANUP, " ", lambda x: True)}' - ) + assert func.__doc__ == func_docstring # Try some since and until versions func = dec('foo', '1.1')(func_no_doc) From f262e75361c4a737e4f6c534c2882b07b0d78fd7 Mon Sep 17 00:00:00 2001 From: Sandro Date: Tue, 2 Apr 2024 12:13:49 +0200 Subject: [PATCH 070/203] Update instructions for building docs The top level `Makefile` is outdated. This circumvents its use. --- doc/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/README.rst b/doc/README.rst index a19a3c1261..b2afd8ce16 100644 --- a/doc/README.rst +++ b/doc/README.rst @@ -6,4 +6,4 @@ To build the documentation, change to the root directory (containing ``setup.py``) and run:: pip install -r doc-requirements.txt - make html + make -C doc html From 9d1201396a6cf9714b96ed501408e048ae422754 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 14 Apr 2024 10:08:57 -0400 Subject: [PATCH 071/203] Update doc/README.rst --- doc/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/README.rst b/doc/README.rst index b2afd8ce16..d5fd9765e6 100644 --- a/doc/README.rst +++ b/doc/README.rst @@ -3,7 +3,7 @@ Nibabel documentation ##################### To build the documentation, change to the root directory (containing -``setup.py``) and run:: +``pyproject.toml``) and run:: pip install -r doc-requirements.txt make -C doc html From 568e37fb1e55a78d17978c12a269aa6e309e0e35 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 28 Apr 2024 21:17:16 -0400 Subject: [PATCH 072/203] TOX: Update dependencies for arm64 --- tox.ini | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tox.ini b/tox.ini index d0b8653457..2826623eac 100644 --- a/tox.ini +++ b/tox.ini @@ -43,14 +43,14 @@ DEPENDS = ARCH = x64: x64 x86: x86 + arm64: arm64 [testenv] description = Pytest with coverage labels = test install_command = python -I -m pip install -v \ - x64: --only-binary numpy,scipy,h5py,pillow \ - x86: --only-binary numpy,scipy,h5py,pillow,matplotlib \ + --only-binary numpy,scipy,h5py,pillow,matplotlib \ pre,dev: --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ {opts} {packages} pip_pre = @@ -91,11 +91,11 @@ deps = pre: numpy <2.0.dev0 dev: numpy >=2.0.dev0 # Scipy stopped producing win32 wheels at py310 - py3{8,9}-full-x86,x64: scipy >=1.6 + py3{8,9}-full-x86,x64,arm64: scipy >=1.6 # Matplotlib depends on scipy, so cannot be built for py310 on x86 - py3{8,9}-full-x86,x64: matplotlib >=3.4 + py3{8,9}-full-x86,x64,arm64: matplotlib >=3.4 # h5py stopped producing win32 wheels at py39 - py38-full-x86,x64: h5py >=2.10 + py38-full-x86,x64,arm64: h5py >=2.10 full,pre,dev: pillow >=8.1 full,pre,dev: indexed_gzip >=1.4 full,pre,dev: pyzstd >=0.14.3 From 8f2c039f2e3d9ccdb3af1a526e3ff1985819dabe Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 28 Apr 2024 21:27:04 -0400 Subject: [PATCH 073/203] CI: Add/distinguish macos-13-x64 and macos-14-arm64 runs --- .github/workflows/test.yml | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a6eb39734f..3b79c87105 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -112,9 +112,9 @@ jobs: strategy: fail-fast: false matrix: - os: ['ubuntu-latest', 'windows-latest', 'macos-latest'] + os: ['ubuntu-latest', 'windows-latest', 'macos-13', 'macos-latest'] python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] - architecture: ['x64', 'x86'] + architecture: ['x64', 'x86', 'arm64'] dependencies: ['full', 'pre'] include: # Basic dependencies only @@ -130,12 +130,28 @@ jobs: python-version: '3.12' dependencies: 'dev' exclude: + # x86 for Windows + Python<3.12 - os: ubuntu-latest architecture: x86 + - os: macos-13 + architecture: x86 - os: macos-latest architecture: x86 - python-version: '3.12' architecture: x86 + # arm64 is available for macos-14+ + - os: ubuntu-latest + architecture: arm64 + - os: windows-latest + architecture: arm64 + - os: macos-13 + architecture: arm64 + # x64 is not available for macos-14+ + - os: macos-latest + architecture: x64 + # Drop pre tests for macos-13 + - os: macos-13 + dependencies: pre env: DEPENDS: ${{ matrix.dependencies }} From 08dd20d4d66984f9704cee9c49c7275f16e5e86a Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 28 Apr 2024 21:46:02 -0400 Subject: [PATCH 074/203] TOX: Print durations to see slow tests --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 2826623eac..b9ac9557cb 100644 --- a/tox.ini +++ b/tox.ini @@ -106,6 +106,7 @@ commands = pytest --doctest-modules --doctest-plus \ --cov nibabel --cov-report xml:cov.xml \ --junitxml test-results.xml \ + --durations=20 --durations-min=1.0 \ --pyargs nibabel {posargs:-n auto} [testenv:install] From feda198d53028db570e32509761088eedf98231d Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 28 Apr 2024 21:53:08 -0400 Subject: [PATCH 075/203] CI: Run pre-release tests only on SPEC-0 supported Python --- .github/workflows/test.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3b79c87105..2b3d9f2494 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -152,6 +152,11 @@ jobs: # Drop pre tests for macos-13 - os: macos-13 dependencies: pre + # Drop pre tests for SPEC-0-unsupported Python versions + - python-version: '3.8' + dependencies: pre + - python-version: '3.9' + dependencies: pre env: DEPENDS: ${{ matrix.dependencies }} From 47ea032b541fccb512ecb44f9ddb9420cfacdd0a Mon Sep 17 00:00:00 2001 From: Guillaume Becq Date: Thu, 25 Apr 2024 16:33:18 +0200 Subject: [PATCH 076/203] Update OrthoSlicer3D._set_position in viewers.py wrong indices to original data leading to weird selection of voxels for weird affine transforms and weird volumes this bug is also related to strange behavior with special acquisition, for example with small animal settings such as rodents leading to wrong location of origin (0,0,0) with image.orthoview() --- nibabel/viewers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nibabel/viewers.py b/nibabel/viewers.py index 1e927544ba..e66a34149a 100644 --- a/nibabel/viewers.py +++ b/nibabel/viewers.py @@ -399,7 +399,8 @@ def _set_position(self, x, y, z, notify=True): # deal with slicing appropriately self._position[:3] = [x, y, z] idxs = np.dot(self._inv_affine, self._position)[:3] - for ii, (size, idx) in enumerate(zip(self._sizes, idxs)): + idxs_new_order = idxs[self._order] + for ii, (size, idx) in enumerate(zip(self._sizes, idxs_new_order)): self._data_idx[ii] = max(min(int(round(idx)), size - 1), 0) for ii in range(3): # sagittal: get to S/A From ab64f37c2d0cd3ab1160d99cfe4ba27874b69cc2 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 5 May 2024 13:12:32 +0200 Subject: [PATCH 077/203] STY: Apply ruff/flake8-implicit-str-concat rule ISC001 ISC001 Implicitly concatenated string literals on one line This rule is currently disabled because it conflicts with the formatter: https://github.com/astral-sh/ruff/issues/8272 --- nibabel/streamlines/__init__.py | 2 +- nibabel/volumeutils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/streamlines/__init__.py b/nibabel/streamlines/__init__.py index dd00a1e842..46b403b424 100644 --- a/nibabel/streamlines/__init__.py +++ b/nibabel/streamlines/__init__.py @@ -131,7 +131,7 @@ def save(tractogram, filename, **kwargs): warnings.warn(msg, ExtensionWarning) if kwargs: - msg = "A 'TractogramFile' object was provided, no need for" ' keyword arguments.' + msg = "A 'TractogramFile' object was provided, no need for keyword arguments." raise ValueError(msg) tractogram_file.save(filename) diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index cf2437e621..379d654a35 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -441,7 +441,7 @@ def array_from_file( True """ if mmap not in (True, False, 'c', 'r', 'r+'): - raise ValueError("mmap value should be one of True, False, 'c', " "'r', 'r+'") + raise ValueError("mmap value should be one of True, False, 'c', 'r', 'r+'") in_dtype = np.dtype(in_dtype) # Get file-like object from Opener instance infile = getattr(infile, 'fobj', infile) From 1bd8c262c8ac1adb17eeb313456232488f721d83 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 21 May 2024 18:16:51 -0400 Subject: [PATCH 078/203] MNT: Fix ruff arg in pre-commit config --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 354bd3da1d..b348393a45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,7 @@ repos: rev: v0.3.4 hooks: - id: ruff - args: [--fix, --show-fix, --exit-non-zero-on-fix] + args: [--fix, --show-fixes, --exit-non-zero-on-fix] exclude: = ["doc", "tools"] - id: ruff-format exclude: = ["doc", "tools"] From d571b92588447871fb8d869642d8053db44f1b74 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 21 May 2024 18:16:58 -0400 Subject: [PATCH 079/203] ENH: Add Nifti2 capabilities to nib-nifti-dx --- nibabel/cmdline/nifti_dx.py | 27 +++++++++++++++++++-------- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/nibabel/cmdline/nifti_dx.py b/nibabel/cmdline/nifti_dx.py index 103bbf2640..eb917a04b8 100644 --- a/nibabel/cmdline/nifti_dx.py +++ b/nibabel/cmdline/nifti_dx.py @@ -9,8 +9,7 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Print nifti diagnostics for header files""" -import sys -from optparse import OptionParser +from argparse import ArgumentParser import nibabel as nib @@ -21,15 +20,27 @@ def main(args=None): """Go go team""" - parser = OptionParser( - usage=f'{sys.argv[0]} [FILE ...]\n\n' + __doc__, version='%prog ' + nib.__version__ + parser = ArgumentParser(description=__doc__) + parser.add_argument('--version', action='version', version=f'%(prog)s {nib.__version__}') + parser.add_argument( + '-1', + '--nifti1', + dest='header_class', + action='store_const', + const=nib.Nifti1Header, + default=nib.Nifti1Header, ) - (opts, files) = parser.parse_args(args=args) + parser.add_argument( + '-2', '--nifti2', dest='header_class', action='store_const', const=nib.Nifti2Header + ) + parser.add_argument('files', nargs='*', metavar='FILE', help='Nifti file names') + + args = parser.parse_args(args=args) - for fname in files: + for fname in args.files: with nib.openers.ImageOpener(fname) as fobj: - hdr = fobj.read(nib.nifti1.header_dtype.itemsize) - result = nib.Nifti1Header.diagnose_binaryblock(hdr) + hdr = fobj.read(args.header_class.template_dtype.itemsize) + result = args.header_class.diagnose_binaryblock(hdr) if len(result): print(f'Picky header check output for "{fname}"\n') print(result + '\n') diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 455a994ae1..d97c99d051 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -202,7 +202,7 @@ def test_help(): code, stdout, stderr = run_command([cmd, '--help']) assert code == 0 assert_re_in(f'.*{cmd}', stdout) - assert_re_in('.*Usage', stdout) + assert_re_in('.*[uU]sage', stdout) # Some third party modules might like to announce some Deprecation # etc warnings, see e.g. https://travis-ci.org/nipy/nibabel/jobs/370353602 if 'warning' not in stderr.lower(): From 82c8588528d5a06fd0dfc99e3cbb83d5cc299e2b Mon Sep 17 00:00:00 2001 From: Sandro Date: Wed, 29 May 2024 00:20:34 +0200 Subject: [PATCH 080/203] Replace deprecated setup() and teardown() Those were compatibility functions for porting from nose. They are now deprecated and have been removed from pytest. This will make all tests compatible with pytests 8.x. --- nibabel/streamlines/tests/test_streamlines.py | 2 +- nibabel/tests/test_deprecated.py | 4 ++-- nibabel/tests/test_dft.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index f0bd9c7c49..53a43c393a 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -20,7 +20,7 @@ DATA = {} -def setup(): +def setup_module(): global DATA DATA['empty_filenames'] = [pjoin(data_path, 'empty' + ext) for ext in FORMATS.keys()] DATA['simple_filenames'] = [pjoin(data_path, 'simple' + ext) for ext in FORMATS.keys()] diff --git a/nibabel/tests/test_deprecated.py b/nibabel/tests/test_deprecated.py index f1c3d517c9..01636632e4 100644 --- a/nibabel/tests/test_deprecated.py +++ b/nibabel/tests/test_deprecated.py @@ -14,12 +14,12 @@ from nibabel.tests.test_deprecator import TestDeprecatorFunc as _TestDF -def setup(): +def setup_module(): # Hack nibabel version string pkg_info.cmp_pkg_version.__defaults__ = ('2.0',) -def teardown(): +def teardown_module(): # Hack nibabel version string back again pkg_info.cmp_pkg_version.__defaults__ = (pkg_info.__version__,) diff --git a/nibabel/tests/test_dft.py b/nibabel/tests/test_dft.py index 654af98279..6c6695b16e 100644 --- a/nibabel/tests/test_dft.py +++ b/nibabel/tests/test_dft.py @@ -26,7 +26,7 @@ data_dir = pjoin(dirname(__file__), 'data') -def setUpModule(): +def setup_module(): if os.name == 'nt': raise unittest.SkipTest('FUSE not available for windows, skipping dft tests') if not have_dicom: From 95e7c156e0d115c222f4a7e9545f27edd8f6dced Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 19:43:01 -0400 Subject: [PATCH 081/203] RF: Prefer using `getlocale()` instead of `getdefaultlocale()` Prefer using `getlocale()` instead of `getdefaultlocale()`. Fixes: ``` /home/runner/work/nibabel/nibabel/nibabel/cmdline/dicomfs.py:40: DeprecationWarning: 'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15. Use setlocale(), getencoding() and getlocale() instead. encoding = locale.getdefaultlocale()[1] ``` raised for example at: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:164 --- nibabel/cmdline/dicomfs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cmdline/dicomfs.py b/nibabel/cmdline/dicomfs.py index 66ffb8adea..552bb09319 100644 --- a/nibabel/cmdline/dicomfs.py +++ b/nibabel/cmdline/dicomfs.py @@ -37,7 +37,7 @@ class dummy_fuse: import nibabel as nib import nibabel.dft as dft -encoding = locale.getdefaultlocale()[1] +encoding = locale.getlocale()[1] fuse.fuse_python_api = (0, 2) From 17809b067ddd22de438b9b49b116c2c496b7a752 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 19:51:43 -0400 Subject: [PATCH 082/203] RF: Prefer using `np.vstack` instead of `np.row_stack` Prefer using `np.vstack` instead of `np.row_stack`. Fixes: ``` nibabel/ecat.py: 3 warnings /home/runner/work/nibabel/nibabel/nibabel/ecat.py:393: DeprecationWarning: `row_stack` alias is deprecated. Use `np.vstack` directly. return np.row_stack(mlists) ``` and similar warnings. Raised for example at: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:186 Documentation: https://numpy.org/doc/1.26/reference/generated/numpy.row_stack.html This helps preparing for full Numpy 2.0 compatibility. Documentation: https://numpy.org/doc/stable/numpy_2_0_migration_guide.html#main-namespace --- nibabel/brikhead.py | 2 +- nibabel/ecat.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 3a3cfd0871..da8692efd3 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -391,7 +391,7 @@ def get_affine(self): # AFNI default is RAI- == LPS+ == DICOM order. We need to flip RA sign # to align with nibabel RAS+ system affine = np.asarray(self.info['IJK_TO_DICOM_REAL']).reshape(3, 4) - affine = np.row_stack((affine * [[-1], [-1], [1]], [0, 0, 0, 1])) + affine = np.vstack((affine * [[-1], [-1], [1]], [0, 0, 0, 1])) return affine def get_data_scaling(self): diff --git a/nibabel/ecat.py b/nibabel/ecat.py index 03a4c72b98..34ff06323c 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -390,7 +390,7 @@ def read_mlist(fileobj, endianness): mlist_index += n_rows if mlist_block_no <= 2: # should block_no in (1, 2) be an error? break - return np.row_stack(mlists) + return np.vstack(mlists) def get_frame_order(mlist): From 94e3e83752c58b1ae20a50e97c5ea9eed21abacf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 20:17:19 -0400 Subject: [PATCH 083/203] RF: Fix `ast` library type and attribute deprecation warnings Fix `ast` library type and attribute deprecation warnings. Fixes: ``` /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:177: DeprecationWarning: ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead if isinstance(value, ast.Num): /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:179: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead if isinstance(value, ast.Str): /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:180: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead return value.s /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:94: DeprecationWarning: Attribute n is deprecated and will be removed in Python 3.14; use value instead index = target.slice.n /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:182: DeprecationWarning: Attribute n is deprecated and will be removed in Python 3.14; use value instead return -value.operand.n ``` raised for example in: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:207 Documentation: https://docs.python.org/3/library/ast.html --- nibabel/nicom/ascconv.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py index 0966de2a96..8ec72fb3ec 100644 --- a/nibabel/nicom/ascconv.py +++ b/nibabel/nicom/ascconv.py @@ -91,7 +91,7 @@ def assign2atoms(assign_ast, default_class=int): prev_target_type = OrderedDict elif isinstance(target, ast.Subscript): if isinstance(target.slice, ast.Constant): # PY39 - index = target.slice.n + index = target.slice.value else: # PY38 index = target.slice.value.n atoms.append(Atom(target, prev_target_type, index)) @@ -174,12 +174,10 @@ def obj_from_atoms(atoms, namespace): def _get_value(assign): value = assign.value - if isinstance(value, ast.Num): - return value.n - if isinstance(value, ast.Str): - return value.s + if isinstance(value, ast.Constant): + return value.value if isinstance(value, ast.UnaryOp) and isinstance(value.op, ast.USub): - return -value.operand.n + return -value.operand.value raise AscconvParseError(f'Unexpected RHS of assignment: {value}') From d1235a6ef5ea31c5be784a6b5448b9e0d598014f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 20:04:02 -0400 Subject: [PATCH 084/203] RF: Remove unnecessary call to `asbytes` for `b`-prepended strings Remove unnecessary call to `asbytes` for `b`-prepended strings: strings prepended with `b` are already treated as bytes literals: - `TckFile.MAGIC_NUMBER` is b'mrtrix tracks' - `TrkFile.MAGIC_NUMBER` is b'TRACK' Documentation: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals Fixes: ``` /home/runner/work/nibabel/nibabel/nibabel/streamlines/tests/test_streamlines.py:9: DeprecationWarning: `np.compat`, which was used during the Python 2 to 3 transition, is deprecated since 1.26.0, and will be removed from numpy.compat.py3k import asbytes ``` raised for example at: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:178 --- nibabel/streamlines/tests/test_streamlines.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index 53a43c393a..857e64fec9 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -6,7 +6,6 @@ import numpy as np import pytest -from numpy.compat.py3k import asbytes import nibabel as nib from nibabel.testing import clear_and_catch_warnings, data_path, error_warnings @@ -95,7 +94,7 @@ def test_is_supported_detect_format(tmp_path): # Valid file without extension for tfile_cls in FORMATS.values(): f = BytesIO() - f.write(asbytes(tfile_cls.MAGIC_NUMBER)) + f.write(tfile_cls.MAGIC_NUMBER) f.seek(0, os.SEEK_SET) assert nib.streamlines.is_supported(f) assert nib.streamlines.detect_format(f) is tfile_cls @@ -104,7 +103,7 @@ def test_is_supported_detect_format(tmp_path): for tfile_cls in FORMATS.values(): fpath = tmp_path / 'test.txt' with open(fpath, 'w+b') as f: - f.write(asbytes(tfile_cls.MAGIC_NUMBER)) + f.write(tfile_cls.MAGIC_NUMBER) f.seek(0, os.SEEK_SET) assert nib.streamlines.is_supported(f) assert nib.streamlines.detect_format(f) is tfile_cls From 447ef576316d814138f7af33cee97dc6e23e5337 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 20:27:22 -0400 Subject: [PATCH 085/203] RF: Fix for `abc` library `Traversable` class module Fix for `abc` library `Traversable` class module: import from `importlib.resources.abc`. Fixes: ``` /home/runner/work/nibabel/nibabel/nibabel/testing/__init__.py:30: DeprecationWarning: 'importlib.abc.Traversable' is deprecated and slated for removal in Python 3.14 from importlib.abc import Traversable ``` raised for example at: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:157 Documentation: https://docs.python.org/3/library/importlib.resources.abc.html#importlib.resources.abc.Traversable --- nibabel/testing/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index d335c9a8c6..0ba82d6cb0 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -27,8 +27,8 @@ from .np_features import memmap_after_ufunc try: - from importlib.abc import Traversable from importlib.resources import as_file, files + from importlib.resources.abc import Traversable except ImportError: # PY38 from importlib_resources import as_file, files from importlib_resources.abc import Traversable From 7caef99068f88bafbf25f61b0e75b10770e28df4 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 27 Jun 2024 16:27:58 +0900 Subject: [PATCH 086/203] MNT: Update importlib_resources requirement to match 3.12 usage --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bf7b099031..4df5886d78 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ requires-python = ">=3.8" dependencies = [ "numpy >=1.20", "packaging >=17", - "importlib_resources >=1.3; python_version < '3.9'", + "importlib_resources >=5.12; python_version < '3.12'", ] classifiers = [ "Development Status :: 5 - Production/Stable", From 3a7cebaca9729b0b03c8dd4ba01ff1a62d39cb26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Thu, 27 Jun 2024 18:35:51 -0400 Subject: [PATCH 087/203] RF: Use `numpy.lib.scimath` to demonstrate warning context manager Use `numpy.lib.scimath` instead of deprecated `numpy.core.fromnumeric` in `clear_and_catch_warnings` context manager doctests. Take advantage of the commit to add an actual case that would raise a warning. Fixes: ``` nibabel/testing/__init__.py::nibabel.testing.clear_and_catch_warnings :1: DeprecationWarning: numpy.core is deprecated and has been renamed to numpy._core. The numpy._core namespace contains private NumPy internals and its use is discouraged, as NumPy internals can change without warning in any release. In practice, most real-world usage of numpy.core is to access functionality in the public NumPy API. If that is the case, use the public NumPy API. If not, you are using NumPy internals. If you would still like to access an internal attribute, use numpy._core.fromnumeric. ``` raised for example at: https://github.com/nipy/nibabel/actions/runs/9692730430/job/26746686623#step:7:195 --- nibabel/testing/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 0ba82d6cb0..992ef2ead4 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -150,9 +150,10 @@ class clear_and_catch_warnings(warnings.catch_warnings): Examples -------- >>> import warnings - >>> with clear_and_catch_warnings(modules=[np.core.fromnumeric]): + >>> with clear_and_catch_warnings(modules=[np.lib.scimath]): ... warnings.simplefilter('always') - ... # do something that raises a warning in np.core.fromnumeric + ... # do something that raises a warning in np.lib.scimath + ... _ = np.arccos(90) """ class_modules = () From 170b20c53a3c0c0bfae29ebd8c14638cfb9d192e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 5 Jul 2024 21:10:41 -0400 Subject: [PATCH 088/203] FIX: Use legacy numpy printing during doc builds/tests --- doc/source/conf.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/source/conf.py b/doc/source/conf.py index f4ab16d2db..4255ff1841 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -28,6 +28,10 @@ import tomli as tomllib # Check for external Sphinx extensions we depend on +try: + import numpy as np +except ImportError: + raise RuntimeError('Need to install "numpy" package for doc build') try: import numpydoc except ImportError: @@ -45,6 +49,11 @@ 'Need nibabel on Python PATH; consider "make htmldoc" from nibabel root directory' ) +from packaging.version import Version + +if Version(np.__version__) >= Version('1.22'): + np.set_printoptions(legacy='1.21') + # -- General configuration ---------------------------------------------------- # We load the nibabel release info into a dict by explicit execution From 65c3ca28a21b5aa15e0fac06e6b5a3faa0096857 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 7 Jul 2024 07:26:30 -0400 Subject: [PATCH 089/203] MNT: Update coverage config Remove ignored entry, add excludes for patterns that are unreachable or reasonable not to test. --- .coveragerc | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.coveragerc b/.coveragerc index bcf28e09c2..8e218461f5 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,9 +1,19 @@ [run] branch = True source = nibabel -include = */nibabel/* omit = */externals/* */benchmarks/* */tests/* nibabel/_version.py + +[report] +exclude_also = + def __repr__ + if (ty\.|typing\.)?TYPE_CHECKING: + class .*\((ty\.|typing\.)Protocol\): + @(ty\.|typing\.)overload + if 0: + if __name__ == .__main__.: + @(abc\.)?abstractmethod + raise NotImplementedError From 2306616a1fb0bf1752b8cd3ad12b19156e64c295 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 8 Jul 2024 11:29:52 -0400 Subject: [PATCH 090/203] MNT: Remove "pragma: no cover" from lines ignored by config --- nibabel/_compression.py | 2 +- nibabel/arrayproxy.py | 12 +++++------- nibabel/dataobj_images.py | 2 +- nibabel/deprecated.py | 2 +- nibabel/deprecator.py | 2 +- nibabel/filebasedimages.py | 14 +++++++------- nibabel/filename_parser.py | 2 +- nibabel/loadsave.py | 2 +- nibabel/onetime.py | 6 ++---- nibabel/openers.py | 7 +++---- nibabel/pointset.py | 8 +++----- nibabel/spatialimages.py | 15 ++++++--------- nibabel/volumeutils.py | 6 +++--- nibabel/xmlutils.py | 8 ++++---- 14 files changed, 39 insertions(+), 49 deletions(-) diff --git a/nibabel/_compression.py b/nibabel/_compression.py index eeb66f36b4..f697fa54cc 100644 --- a/nibabel/_compression.py +++ b/nibabel/_compression.py @@ -17,7 +17,7 @@ from .optpkg import optional_package -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import indexed_gzip # type: ignore[import] import pyzstd diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index 4bf5bd4700..ed2310519e 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -57,7 +57,7 @@ KEEP_FILE_OPEN_DEFAULT = False -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import numpy.typing as npt from typing_extensions import Self # PY310 @@ -75,19 +75,17 @@ class ArrayLike(ty.Protocol): shape: tuple[int, ...] @property - def ndim(self) -> int: ... # pragma: no cover + def ndim(self) -> int: ... # If no dtype is passed, any dtype might be returned, depending on the array-like @ty.overload - def __array__( - self, dtype: None = ..., / - ) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... # pragma: no cover + def __array__(self, dtype: None = ..., /) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... # Any dtype might be passed, and *that* dtype must be returned @ty.overload - def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... # pragma: no cover + def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... - def __getitem__(self, key, /) -> npt.NDArray: ... # pragma: no cover + def __getitem__(self, key, /) -> npt.NDArray: ... class ArrayProxy(ArrayLike): diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index e84ac8567a..6850599014 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -19,7 +19,7 @@ from .filebasedimages import FileBasedHeader, FileBasedImage from .fileholders import FileMap -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import numpy.typing as npt from .filename_parser import FileSpec diff --git a/nibabel/deprecated.py b/nibabel/deprecated.py index b8c378cee3..15d3e53265 100644 --- a/nibabel/deprecated.py +++ b/nibabel/deprecated.py @@ -8,7 +8,7 @@ from .deprecator import Deprecator from .pkg_info import cmp_pkg_version -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: P = ty.ParamSpec('P') diff --git a/nibabel/deprecator.py b/nibabel/deprecator.py index 010b1be234..83118dd539 100644 --- a/nibabel/deprecator.py +++ b/nibabel/deprecator.py @@ -9,7 +9,7 @@ import warnings from textwrap import dedent -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: T = ty.TypeVar('T') P = ty.ParamSpec('P') diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 4e0d06b64c..c12644a2bd 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -20,7 +20,7 @@ from .filename_parser import TypesFilenamesError, _stringify_path, splitext_addext, types_filenames from .openers import ImageOpener -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: from .filename_parser import ExtensionSpec, FileSpec FileSniff = ty.Tuple[bytes, str] @@ -54,13 +54,13 @@ def from_header(klass: type[HdrT], header: FileBasedHeader | ty.Mapping | None = @classmethod def from_fileobj(klass: type[HdrT], fileobj: io.IOBase) -> HdrT: - raise NotImplementedError # pragma: no cover + raise NotImplementedError def write_to(self, fileobj: io.IOBase) -> None: - raise NotImplementedError # pragma: no cover + raise NotImplementedError def __eq__(self, other: object) -> bool: - raise NotImplementedError # pragma: no cover + raise NotImplementedError def __ne__(self, other: object) -> bool: return not self == other @@ -251,7 +251,7 @@ def from_filename(klass: type[ImgT], filename: FileSpec) -> ImgT: @classmethod def from_file_map(klass: type[ImgT], file_map: FileMap) -> ImgT: - raise NotImplementedError # pragma: no cover + raise NotImplementedError @classmethod def filespec_to_file_map(klass, filespec: FileSpec) -> FileMap: @@ -308,7 +308,7 @@ def to_filename(self, filename: FileSpec, **kwargs) -> None: self.to_file_map(**kwargs) def to_file_map(self, file_map: FileMap | None = None, **kwargs) -> None: - raise NotImplementedError # pragma: no cover + raise NotImplementedError @classmethod def make_file_map(klass, mapping: ty.Mapping[str, str | io.IOBase] | None = None) -> FileMap: @@ -373,7 +373,7 @@ def from_image(klass: type[ImgT], img: FileBasedImage) -> ImgT: img : ``FileBasedImage`` instance Image, of our own class """ - raise NotImplementedError # pragma: no cover + raise NotImplementedError @classmethod def _sniff_meta_for( diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index bdbca6a383..d2c23ae6e4 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -14,7 +14,7 @@ import pathlib import typing as ty -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: FileSpec = str | os.PathLike[str] ExtensionSpec = tuple[str, str | None] diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 159d9bae82..e39aeceba3 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -26,7 +26,7 @@ _compressed_suffixes = ('.gz', '.bz2', '.zst') -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: from .filebasedimages import FileBasedImage from .filename_parser import FileSpec diff --git a/nibabel/onetime.py b/nibabel/onetime.py index fa1b2f9927..5018ba90c5 100644 --- a/nibabel/onetime.py +++ b/nibabel/onetime.py @@ -137,12 +137,10 @@ def __init__(self, func: ty.Callable[[InstanceT], T]) -> None: @ty.overload def __get__( self, obj: None, objtype: type[InstanceT] | None = None - ) -> ty.Callable[[InstanceT], T]: ... # pragma: no cover + ) -> ty.Callable[[InstanceT], T]: ... @ty.overload - def __get__( - self, obj: InstanceT, objtype: type[InstanceT] | None = None - ) -> T: ... # pragma: no cover + def __get__(self, obj: InstanceT, objtype: type[InstanceT] | None = None) -> T: ... def __get__( self, obj: InstanceT | None, objtype: type[InstanceT] | None = None diff --git a/nibabel/openers.py b/nibabel/openers.py index f84ccb7069..c3fa9a4783 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -18,7 +18,7 @@ from ._compression import HAVE_INDEXED_GZIP, IndexedGzipFile, pyzstd -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: from types import TracebackType from _typeshed import WriteableBuffer @@ -36,9 +36,8 @@ @ty.runtime_checkable class Fileish(ty.Protocol): - def read(self, size: int = -1, /) -> bytes: ... # pragma: no cover - - def write(self, b: bytes, /) -> int | None: ... # pragma: no cover + def read(self, size: int = -1, /) -> bytes: ... + def write(self, b: bytes, /) -> int | None: ... class DeterministicGzipFile(gzip.GzipFile): diff --git a/nibabel/pointset.py b/nibabel/pointset.py index e39a4d4187..70a802480d 100644 --- a/nibabel/pointset.py +++ b/nibabel/pointset.py @@ -30,7 +30,7 @@ from nibabel.fileslice import strided_scalar from nibabel.spatialimages import SpatialImage -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: from typing_extensions import Self _DType = ty.TypeVar('_DType', bound=np.dtype[ty.Any]) @@ -41,12 +41,10 @@ class CoordinateArray(ty.Protocol): shape: tuple[int, int] @ty.overload - def __array__( - self, dtype: None = ..., / - ) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... # pragma: no cover + def __array__(self, dtype: None = ..., /) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... @ty.overload - def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... # pragma: no cover + def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... @dataclass diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index 185694cd72..96f8115a22 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -154,7 +154,7 @@ except ImportError: # PY38 from functools import lru_cache as cache -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import numpy.typing as npt SpatialImgT = ty.TypeVar('SpatialImgT', bound='SpatialImage') @@ -162,18 +162,15 @@ class HasDtype(ty.Protocol): - def get_data_dtype(self) -> np.dtype: ... # pragma: no cover - - def set_data_dtype(self, dtype: npt.DTypeLike) -> None: ... # pragma: no cover + def get_data_dtype(self) -> np.dtype: ... + def set_data_dtype(self, dtype: npt.DTypeLike) -> None: ... @ty.runtime_checkable class SpatialProtocol(ty.Protocol): - def get_data_dtype(self) -> np.dtype: ... # pragma: no cover - - def get_data_shape(self) -> ty.Tuple[int, ...]: ... # pragma: no cover - - def get_zooms(self) -> ty.Tuple[float, ...]: ... # pragma: no cover + def get_data_dtype(self) -> np.dtype: ... + def get_data_shape(self) -> ty.Tuple[int, ...]: ... + def get_zooms(self) -> ty.Tuple[float, ...]: ... class HeaderDataError(Exception): diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 379d654a35..29b954dbb3 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -24,7 +24,7 @@ from .casting import OK_FLOATS, shared_range from .externals.oset import OrderedSet -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import numpy.typing as npt Scalar = np.number | float @@ -1191,13 +1191,13 @@ def _ftype4scaled_finite( @ty.overload def finite_range( arr: npt.ArrayLike, check_nan: ty.Literal[False] = False -) -> tuple[Scalar, Scalar]: ... # pragma: no cover +) -> tuple[Scalar, Scalar]: ... @ty.overload def finite_range( arr: npt.ArrayLike, check_nan: ty.Literal[True] -) -> tuple[Scalar, Scalar, bool]: ... # pragma: no cover +) -> tuple[Scalar, Scalar, bool]: ... def finite_range( diff --git a/nibabel/xmlutils.py b/nibabel/xmlutils.py index 5049a76412..5d079e1172 100644 --- a/nibabel/xmlutils.py +++ b/nibabel/xmlutils.py @@ -20,7 +20,7 @@ class XmlSerializable: def _to_xml_element(self) -> Element: """Output should be a xml.etree.ElementTree.Element""" - raise NotImplementedError # pragma: no cover + raise NotImplementedError def to_xml(self, enc='utf-8', **kwargs) -> bytes: r"""Generate an XML bytestring with a given encoding. @@ -109,10 +109,10 @@ def parse(self, string=None, fname=None, fptr=None): parser.ParseFile(fptr) def StartElementHandler(self, name, attrs): - raise NotImplementedError # pragma: no cover + raise NotImplementedError def EndElementHandler(self, name): - raise NotImplementedError # pragma: no cover + raise NotImplementedError def CharacterDataHandler(self, data): - raise NotImplementedError # pragma: no cover + raise NotImplementedError From 043c431ef46c5f6cd301a087bda2173a7972ab75 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 8 Jul 2024 11:40:54 -0400 Subject: [PATCH 091/203] MNT: Require coverage>=7.2 for exclude_also Remove outdated pytest version cap while we're here. --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4df5886d78..ff5168f9c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,11 +67,12 @@ doc = [ "tomli; python_version < '3.11'", ] test = [ - "pytest<8.1", # relax once pytest-doctestplus releases 1.2.0 + "pytest", "pytest-doctestplus", "pytest-cov", "pytest-httpserver", "pytest-xdist", + "coverage>=7.2", ] # Remaining: Simpler to centralize in tox dev = ["tox"] From ee1c9c43900dc42d511d08a4302d4486c9258250 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 8 Jul 2024 11:42:23 -0400 Subject: [PATCH 092/203] MNT: Stop excluding tests from coverage --- .coveragerc | 1 - 1 file changed, 1 deletion(-) diff --git a/.coveragerc b/.coveragerc index 8e218461f5..f65ab1441f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -4,7 +4,6 @@ source = nibabel omit = */externals/* */benchmarks/* - */tests/* nibabel/_version.py [report] From 07db76b966020b26b636e5fd94b79b8b04b440ab Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 21:16:03 -0400 Subject: [PATCH 093/203] CI: Add 3.13-nogil build --- .github/workflows/test.yml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2b3d9f2494..2b453e890a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -125,9 +125,9 @@ jobs: - os: ubuntu-latest python-version: 3.8 dependencies: 'min' - # NumPy 2.0 + # NoGIL - os: ubuntu-latest - python-version: '3.12' + python-version: '3.13-dev' dependencies: 'dev' exclude: # x86 for Windows + Python<3.12 @@ -168,11 +168,18 @@ jobs: submodules: recursive fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} + if: "!endsWith(matrix.python-version, '-dev')" uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} allow-prereleases: true + - name: Set up Python ${{ matrix.python-version }} + if: endsWith(matrix.python-version, '-dev') + uses: deadsnakes/action@v3.1.0 + with: + python-version: ${{ matrix.python-version }} + nogil: true - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Install tox From 6efd41a7279de2488aa857518e3ab30e8a8ff6d4 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 21:17:24 -0400 Subject: [PATCH 094/203] TOX: Add a Python 3.13 environment --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index b9ac9557cb..02de7a7e08 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ envlist = # x64-only range py312-{full,pre}-x64 # Special environment for numpy 2.0-dev testing - py312-dev-x64 + py313-dev-x64 install doctest style @@ -31,6 +31,7 @@ python = 3.10: py310 3.11: py311 3.12: py312 + 3.13: py313 [gh-actions:env] DEPENDS = From cb73d1c6dfcd0e8ca93011125cf507c85987f1ad Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 21:26:42 -0400 Subject: [PATCH 095/203] TOX: Drop h5py and indexed_gzip dependencies for dev Allow pillow and matplotlib to be built from sdist in dev environments. --- tox.ini | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index 02de7a7e08..5b4dcc0174 100644 --- a/tox.ini +++ b/tox.ini @@ -51,7 +51,8 @@ description = Pytest with coverage labels = test install_command = python -I -m pip install -v \ - --only-binary numpy,scipy,h5py,pillow,matplotlib \ + dev: --only-binary numpy,scipy,h5py \ + !dev: --only-binary numpy,scipy,h5py,pillow,matplotlib \ pre,dev: --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ {opts} {packages} pip_pre = @@ -90,15 +91,15 @@ deps = # Numpy 2.0 is a major breaking release; we cannot put much effort into # supporting until it's at least RC stable pre: numpy <2.0.dev0 - dev: numpy >=2.0.dev0 + dev: numpy >=2.1.dev0 # Scipy stopped producing win32 wheels at py310 py3{8,9}-full-x86,x64,arm64: scipy >=1.6 # Matplotlib depends on scipy, so cannot be built for py310 on x86 py3{8,9}-full-x86,x64,arm64: matplotlib >=3.4 # h5py stopped producing win32 wheels at py39 - py38-full-x86,x64,arm64: h5py >=2.10 + py38-full-x86,{full,pre}-{x64,arm64}: h5py >=2.10 full,pre,dev: pillow >=8.1 - full,pre,dev: indexed_gzip >=1.4 + full,pre: indexed_gzip >=1.4 full,pre,dev: pyzstd >=0.14.3 full,pre: pydicom >=2.1 dev: pydicom @ git+https://github.com/pydicom/pydicom.git@main From a14ead51ccb8ff3da9603e5ca0002857de18ae6d Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 22:01:49 -0400 Subject: [PATCH 096/203] CI: Run tox in debug to see what files are downloaded --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2b453e890a..05718dc1ff 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -189,7 +189,7 @@ jobs: - name: Show tox config run: tox c - name: Run tox - run: tox -v --exit-and-dump-after 1200 + run: tox -vv --exit-and-dump-after 1200 - uses: codecov/codecov-action@v4 if: ${{ always() }} with: From 880e13e3dcd30b077762e1c8b46ce76496bd28b8 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 22:08:41 -0400 Subject: [PATCH 097/203] TOX: Add PYTHON_GIL=0 to py313 environments --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index 5b4dcc0174..5df35c8d38 100644 --- a/tox.ini +++ b/tox.ini @@ -71,6 +71,8 @@ pass_env = NO_COLOR CLICOLOR CLICOLOR_FORCE +set_env = + py313: PYTHON_GIL=0 extras = test deps = # General minimum dependencies: pin based on API usage From e0e50df3e8fb7a48fba207098ec446abf9d2efed Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 25 Jul 2024 11:34:36 -0400 Subject: [PATCH 098/203] RF: Replace OneTimeProperty/auto_attr with cached_property --- nibabel/onetime.py | 114 +++++++--------------------------- nibabel/tests/test_onetime.py | 40 ++++++++---- 2 files changed, 51 insertions(+), 103 deletions(-) diff --git a/nibabel/onetime.py b/nibabel/onetime.py index 5018ba90c5..f6d3633af3 100644 --- a/nibabel/onetime.py +++ b/nibabel/onetime.py @@ -1,9 +1,12 @@ """Descriptor support for NIPY -Utilities to support special Python descriptors [1,2], in particular the use of -a useful pattern for properties we call 'one time properties'. These are -object attributes which are declared as properties, but become regular -attributes once they've been read the first time. They can thus be evaluated +Utilities to support special Python descriptors [1,2], in particular +:func:`~functools.cached_property`, which has been available in the Python +standard library since Python 3.8. We currently maintain aliases from +earlier names for this descriptor, specifically `OneTimeProperty` and `auto_attr`. + +:func:`~functools.cached_property` creates properties that are computed once +and then stored as regular attributes. They can thus be evaluated later in the object's life cycle, but once evaluated they become normal, static attributes with no function call overhead on access or any other constraints. @@ -21,10 +24,7 @@ from __future__ import annotations -import typing as ty - -InstanceT = ty.TypeVar('InstanceT') -T = ty.TypeVar('T') +from functools import cached_property from nibabel.deprecated import deprecate_with_version @@ -34,22 +34,22 @@ class ResetMixin: - """A Mixin class to add a .reset() method to users of OneTimeProperty. + """A Mixin class to add a .reset() method to users of cached_property. - By default, auto attributes once computed, become static. If they happen + By default, cached properties, once computed, become static. If they happen to depend on other parts of an object and those parts change, their values may now be invalid. This class offers a .reset() method that users can call *explicitly* when they know the state of their objects may have changed and they want to ensure that *all* their special attributes should be invalidated. Once - reset() is called, all their auto attributes are reset to their - OneTimeProperty descriptors, and their accessor functions will be triggered - again. + reset() is called, all their cached properties are reset to their + :func:`~functools.cached_property` descriptors, + and their accessor functions will be triggered again. .. warning:: - If a class has a set of attributes that are OneTimeProperty, but that + If a class has a set of attributes that are cached_property, but that can be initialized from any one of them, do NOT use this mixin! For instance, UniformTimeSeries can be initialized with only sampling_rate and t0, sampling_interval and time are auto-computed. But if you were @@ -68,15 +68,15 @@ class ResetMixin: ... def __init__(self,x=1.0): ... self.x = x ... - ... @auto_attr + ... @cached_property ... def y(self): ... print('*** y computation executed ***') ... return self.x / 2.0 - ... >>> a = A(10) About to access y twice, the second time no computation is done: + >>> a.y *** y computation executed *** 5.0 @@ -84,17 +84,21 @@ class ResetMixin: 5.0 Changing x + >>> a.x = 20 a.y doesn't change to 10, since it is a static attribute: + >>> a.y 5.0 We now reset a, and this will then force all auto attributes to recompute the next time we access them: + >>> a.reset() About to access y twice again after reset(): + >>> a.y *** y computation executed *** 10.0 @@ -103,88 +107,18 @@ class ResetMixin: """ def reset(self) -> None: - """Reset all OneTimeProperty attributes that may have fired already.""" + """Reset all cached_property attributes that may have fired already.""" # To reset them, we simply remove them from the instance dict. At that # point, it's as if they had never been computed. On the next access, # the accessor function from the parent class will be called, simply # because that's how the python descriptor protocol works. for mname, mval in self.__class__.__dict__.items(): - if mname in self.__dict__ and isinstance(mval, OneTimeProperty): + if mname in self.__dict__ and isinstance(mval, cached_property): delattr(self, mname) -class OneTimeProperty(ty.Generic[T]): - """A descriptor to make special properties that become normal attributes. - - This is meant to be used mostly by the auto_attr decorator in this module. - """ - - def __init__(self, func: ty.Callable[[InstanceT], T]) -> None: - """Create a OneTimeProperty instance. - - Parameters - ---------- - func : method - - The method that will be called the first time to compute a value. - Afterwards, the method's name will be a standard attribute holding - the value of this computation. - """ - self.getter = func - self.name = func.__name__ - self.__doc__ = func.__doc__ - - @ty.overload - def __get__( - self, obj: None, objtype: type[InstanceT] | None = None - ) -> ty.Callable[[InstanceT], T]: ... - - @ty.overload - def __get__(self, obj: InstanceT, objtype: type[InstanceT] | None = None) -> T: ... - - def __get__( - self, obj: InstanceT | None, objtype: type[InstanceT] | None = None - ) -> T | ty.Callable[[InstanceT], T]: - """This will be called on attribute access on the class or instance.""" - if obj is None: - # Being called on the class, return the original function. This - # way, introspection works on the class. - return self.getter - - # Errors in the following line are errors in setting a OneTimeProperty - val = self.getter(obj) - - obj.__dict__[self.name] = val - return val - - -def auto_attr(func: ty.Callable[[InstanceT], T]) -> OneTimeProperty[T]: - """Decorator to create OneTimeProperty attributes. - - Parameters - ---------- - func : method - The method that will be called the first time to compute a value. - Afterwards, the method's name will be a standard attribute holding the - value of this computation. - - Examples - -------- - >>> class MagicProp: - ... @auto_attr - ... def a(self): - ... return 99 - ... - >>> x = MagicProp() - >>> 'a' in x.__dict__ - False - >>> x.a - 99 - >>> 'a' in x.__dict__ - True - """ - return OneTimeProperty(func) - +OneTimeProperty = cached_property +auto_attr = cached_property # ----------------------------------------------------------------------------- # Deprecated API diff --git a/nibabel/tests/test_onetime.py b/nibabel/tests/test_onetime.py index 4d72949271..d6b4579534 100644 --- a/nibabel/tests/test_onetime.py +++ b/nibabel/tests/test_onetime.py @@ -1,7 +1,22 @@ -from nibabel.onetime import auto_attr, setattr_on_read +from functools import cached_property + +from nibabel.onetime import ResetMixin, setattr_on_read from nibabel.testing import deprecated_to, expires +class A(ResetMixin): + @cached_property + def y(self): + return self.x / 2.0 + + @cached_property + def z(self): + return self.x / 3.0 + + def __init__(self, x=1.0): + self.x = x + + @expires('5.0.0') def test_setattr_on_read(): with deprecated_to('5.0.0'): @@ -19,15 +34,14 @@ def a(self): assert x.a is obj -def test_auto_attr(): - class MagicProp: - @auto_attr - def a(self): - return object() - - x = MagicProp() - assert 'a' not in x.__dict__ - obj = x.a - assert 'a' in x.__dict__ - # Each call to object() produces a unique object. Verify we get the same one every time. - assert x.a is obj +def test_ResetMixin(): + a = A(10) + assert 'y' not in a.__dict__ + assert a.y == 5 + assert 'y' in a.__dict__ + a.x = 20 + assert a.y == 5 + # Call reset and no error should be raised even though z was never accessed + a.reset() + assert 'y' not in a.__dict__ + assert a.y == 10 From c7c98f7dae9733e38892b70bfcd190610e21c5d0 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 25 Jul 2024 11:42:00 -0400 Subject: [PATCH 099/203] DOC: Use packaging.version.Version over LooseVersion --- doc/tools/build_modref_templates.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/tools/build_modref_templates.py b/doc/tools/build_modref_templates.py index 0e82cf6bf8..76cf9cdf39 100755 --- a/doc/tools/build_modref_templates.py +++ b/doc/tools/build_modref_templates.py @@ -9,7 +9,7 @@ import sys # version comparison -from distutils.version import LooseVersion as V +from packaging.version import Version as V from os.path import join as pjoin # local imports @@ -73,6 +73,8 @@ def abort(error): if re.match('^_version_(major|minor|micro|extra)', v) ] ) + + source_version = V(source_version) print('***', source_version) if source_version != installed_version: From b6eccc250cc56ddc1cb8a81b240f0bc0e3325436 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 25 Jul 2024 12:04:29 -0400 Subject: [PATCH 100/203] RF: nibabel.onetime.auto_attr -> functools.cached_property --- nibabel/nicom/dicomwrappers.py | 46 +++++++++++++++++----------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index a5ea550d87..2270ed3f05 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -14,12 +14,12 @@ import operator import warnings +from functools import cached_property import numpy as np from nibabel.optpkg import optional_package -from ..onetime import auto_attr as one_time from ..openers import ImageOpener from . import csareader as csar from .dwiparams import B2q, nearest_pos_semi_def, q2bg @@ -140,7 +140,7 @@ def __init__(self, dcm_data): """ self.dcm_data = dcm_data - @one_time + @cached_property def image_shape(self): """The array shape as it will be returned by ``get_data()``""" shape = (self.get('Rows'), self.get('Columns')) @@ -148,7 +148,7 @@ def image_shape(self): return None return shape - @one_time + @cached_property def image_orient_patient(self): """Note that this is _not_ LR flipped""" iop = self.get('ImageOrientationPatient') @@ -158,7 +158,7 @@ def image_orient_patient(self): iop = np.array(list(map(float, iop))) return np.array(iop).reshape(2, 3).T - @one_time + @cached_property def slice_normal(self): iop = self.image_orient_patient if iop is None: @@ -166,7 +166,7 @@ def slice_normal(self): # iop[:, 0] is column index cosine, iop[:, 1] is row index cosine return np.cross(iop[:, 1], iop[:, 0]) - @one_time + @cached_property def rotation_matrix(self): """Return rotation matrix between array indices and mm @@ -193,7 +193,7 @@ def rotation_matrix(self): raise WrapperPrecisionError('Rotation matrix not nearly orthogonal') return R - @one_time + @cached_property def voxel_sizes(self): """voxel sizes for array as returned by ``get_data()``""" # pix space gives (row_spacing, column_spacing). That is, the @@ -212,7 +212,7 @@ def voxel_sizes(self): pix_space = list(map(float, pix_space)) return tuple(pix_space + [zs]) - @one_time + @cached_property def image_position(self): """Return position of first voxel in data block @@ -231,7 +231,7 @@ def image_position(self): # Values are python Decimals in pydicom 0.9.7 return np.array(list(map(float, ipp))) - @one_time + @cached_property def slice_indicator(self): """A number that is higher for higher slices in Z @@ -246,12 +246,12 @@ def slice_indicator(self): return None return np.inner(ipp, s_norm) - @one_time + @cached_property def instance_number(self): """Just because we use this a lot for sorting""" return self.get('InstanceNumber') - @one_time + @cached_property def series_signature(self): """Signature for matching slices into series @@ -390,7 +390,7 @@ def _apply_scale_offset(self, data, scale, offset): return data + offset return data - @one_time + @cached_property def b_value(self): """Return b value for diffusion or None if not available""" q_vec = self.q_vector @@ -398,7 +398,7 @@ def b_value(self): return None return q2bg(q_vec)[0] - @one_time + @cached_property def b_vector(self): """Return b vector for diffusion or None if not available""" q_vec = self.q_vector @@ -469,7 +469,7 @@ def __init__(self, dcm_data): raise WrapperError('SharedFunctionalGroupsSequence is empty.') self._shape = None - @one_time + @cached_property def image_shape(self): """The array shape as it will be returned by ``get_data()`` @@ -573,7 +573,7 @@ def image_shape(self): ) return tuple(shape) - @one_time + @cached_property def image_orient_patient(self): """ Note that this is _not_ LR flipped @@ -590,7 +590,7 @@ def image_orient_patient(self): iop = np.array(list(map(float, iop))) return np.array(iop).reshape(2, 3).T - @one_time + @cached_property def voxel_sizes(self): """Get i, j, k voxel sizes""" try: @@ -610,7 +610,7 @@ def voxel_sizes(self): # Ensure values are float rather than Decimal return tuple(map(float, list(pix_space) + [zs])) - @one_time + @cached_property def image_position(self): try: ipp = self.shared.PlanePositionSequence[0].ImagePositionPatient @@ -623,7 +623,7 @@ def image_position(self): return None return np.array(list(map(float, ipp))) - @one_time + @cached_property def series_signature(self): signature = {} eq = operator.eq @@ -696,7 +696,7 @@ def __init__(self, dcm_data, csa_header=None): csa_header = {} self.csa_header = csa_header - @one_time + @cached_property def slice_normal(self): # The std_slice_normal comes from the cross product of the directions # in the ImageOrientationPatient @@ -720,7 +720,7 @@ def slice_normal(self): else: return std_slice_normal - @one_time + @cached_property def series_signature(self): """Add ICE dims from CSA header to signature""" signature = super().series_signature @@ -730,7 +730,7 @@ def series_signature(self): signature['ICE_Dims'] = (ice, operator.eq) return signature - @one_time + @cached_property def b_matrix(self): """Get DWI B matrix referring to voxel space @@ -767,7 +767,7 @@ def b_matrix(self): # semi-definite. return nearest_pos_semi_def(B_vox) - @one_time + @cached_property def q_vector(self): """Get DWI q vector referring to voxel space @@ -840,7 +840,7 @@ def __init__(self, dcm_data, csa_header=None, n_mosaic=None): self.n_mosaic = n_mosaic self.mosaic_size = int(np.ceil(np.sqrt(n_mosaic))) - @one_time + @cached_property def image_shape(self): """Return image shape as returned by ``get_data()``""" # reshape pixel slice array back from mosaic @@ -850,7 +850,7 @@ def image_shape(self): return None return (rows // self.mosaic_size, cols // self.mosaic_size, self.n_mosaic) - @one_time + @cached_property def image_position(self): """Return position of first voxel in data block From c49dff290f6113327eaa62bbd8aff4da924dd54a Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Tue, 18 Jun 2024 16:44:03 -0700 Subject: [PATCH 101/203] BF: Fix for 'split' (concatenated?) multiframe DICOM Can't just use number of frame indices to determine shape of data, as the actual frames could still be split into different files. Also can't assume a multiframe file is more than a single slice. --- nibabel/nicom/dicomwrappers.py | 34 ++++++++++++++++------------------ 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 2270ed3f05..894a0ed219 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -554,23 +554,20 @@ def image_shape(self): raise WrapperError('Missing information, cannot remove indices with confidence.') derived_dim_idx = dim_seq.index(derived_tag) frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) - # account for the 2 additional dimensions (row and column) not included - # in the indices - n_dim = frame_indices.shape[1] + 2 # Store frame indices self._frame_indices = frame_indices - if n_dim < 4: # 3D volume - return rows, cols, n_frames - # More than 3 dimensions + # Determine size of any extra-spatial dimensions ns_unique = [len(np.unique(row)) for row in self._frame_indices.T] - shape = (rows, cols) + tuple(ns_unique) - n_vols = np.prod(shape[3:]) - n_frames_calc = n_vols * shape[2] - if n_frames != n_frames_calc: - raise WrapperError( - f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) ' - f'of shape {shape} does not match NumberOfFrames {n_frames}.' - ) + shape = (rows, cols) + tuple(x for i, x in enumerate(ns_unique) if i == 0 or x != 1) + n_dim = len(shape) + if n_dim > 3: + n_vols = np.prod(shape[3:]) + n_frames_calc = n_vols * shape[2] + if n_frames != n_frames_calc: + raise WrapperError( + f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) ' + f'of shape {shape} does not match NumberOfFrames {n_frames}.' + ) return tuple(shape) @cached_property @@ -640,10 +637,11 @@ def get_data(self): raise WrapperError('No valid information for image shape') data = self.get_pixel_array() # Roll frames axis to last - data = data.transpose((1, 2, 0)) - # Sort frames with first index changing fastest, last slowest - sorted_indices = np.lexsort(self._frame_indices.T) - data = data[..., sorted_indices] + if len(data.shape) > 2: + data = data.transpose((1, 2, 0)) + # Sort frames with first index changing fastest, last slowest + sorted_indices = np.lexsort(self._frame_indices.T) + data = data[..., sorted_indices] data = data.reshape(shape, order='F') return self._scale_data(data) From 4063114c2bde09f34d88c1193a5fd20adc8c1932 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 15:29:26 -0700 Subject: [PATCH 102/203] BF+TST: Test and fix a bunch of multiframe fixes Corrects issue where order of slice indices was assumed to match the order needed to move along the direction of the slice normal, which resulted in slice orientation flips. Ignores indices that don't evenly divide data, and at the end will try to combine those indices (if needed) into a single tuple index. --- nibabel/nicom/dicomwrappers.py | 124 +++++++++++++++----- nibabel/nicom/tests/test_dicomwrappers.py | 132 ++++++++++++++++++---- 2 files changed, 203 insertions(+), 53 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 894a0ed219..c3f484a003 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -467,6 +467,25 @@ def __init__(self, dcm_data): self.shared = dcm_data.get('SharedFunctionalGroupsSequence')[0] except TypeError: raise WrapperError('SharedFunctionalGroupsSequence is empty.') + # Try to determine slice order and minimal image position patient + self._frame_slc_ord = self._ipp = None + try: + frame_ipps = [self.shared.PlanePositionSequence[0].ImagePositionPatient] + except AttributeError: + try: + frame_ipps = [f.PlanePositionSequence[0].ImagePositionPatient for f in self.frames] + except AttributeError: + frame_ipps = None + if frame_ipps is not None and all(ipp is not None for ipp in frame_ipps): + frame_ipps = [np.array(list(map(float, ipp))) for ipp in frame_ipps] + frame_slc_pos = [np.inner(ipp, self.slice_normal) for ipp in frame_ipps] + rnd_slc_pos = np.round(frame_slc_pos, 4) + uniq_slc_pos = np.unique(rnd_slc_pos) + pos_ord_map = { + val: order for val, order in zip(uniq_slc_pos, np.argsort(uniq_slc_pos)) + } + self._frame_slc_ord = [pos_ord_map[pos] for pos in rnd_slc_pos] + self._ipp = frame_ipps[np.argmin(frame_slc_pos)] self._shape = None @cached_property @@ -509,14 +528,16 @@ def image_shape(self): if hasattr(first_frame, 'get') and first_frame.get([0x18, 0x9117]): # DWI image may include derived isotropic, ADC or trace volume try: - anisotropic = pydicom.Sequence( - frame - for frame in self.frames - if frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC' - ) + aniso_frames = pydicom.Sequence() + aniso_slc_ord = [] + for slc_ord, frame in zip(self._frame_slc_ord, self.frames): + if frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC': + aniso_frames.append(frame) + aniso_slc_ord.append(slc_ord) # Image contains DWI volumes followed by derived images; remove derived images - if len(anisotropic) != 0: - self.frames = anisotropic + if len(aniso_frames) != 0: + self.frames = aniso_frames + self._frame_slc_ord = aniso_slc_ord except IndexError: # Sequence tag is found but missing items! raise WrapperError('Diffusion file missing information') @@ -554,20 +575,70 @@ def image_shape(self): raise WrapperError('Missing information, cannot remove indices with confidence.') derived_dim_idx = dim_seq.index(derived_tag) frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) + # Determine the shape and which indices to use + shape = [rows, cols] + curr_parts = n_frames + frames_per_part = 1 + del_indices = {} + for row_idx, row in enumerate(frame_indices.T): + if curr_parts == 1: + break + unique = np.unique(row) + count = len(unique) + if count == 1: + continue + # Replace slice indices with order determined from slice positions along normal + if len(shape) == 2: + row = self._frame_slc_ord + frame_indices.T[row_idx, :] = row + unique = np.unique(row) + if len(unique) != count: + raise WrapperError("Number of slice indices and positions don't match") + new_parts, leftover = divmod(curr_parts, count) + allowed_val_counts = [new_parts * frames_per_part] + if len(shape) > 2: + # Except for the slice dim, having a unique value for each frame is valid + allowed_val_counts.append(n_frames) + if leftover != 0 or any( + np.count_nonzero(row == val) not in allowed_val_counts for val in unique + ): + if len(shape) == 2: + raise WrapperError('Missing slices from multiframe') + del_indices[row_idx] = count + continue + frames_per_part *= count + shape.append(count) + curr_parts = new_parts + if del_indices: + if curr_parts > 1: + ns_failed = [k for k, v in del_indices.items() if v != 1] + if len(ns_failed) > 1: + # If some indices weren't used yet but we still have unaccounted for + # partitions, try combining indices into single tuple and using that + tup_dtype = np.dtype(','.join(['I'] * len(ns_failed))) + row = [tuple(x for x in vals) for vals in frame_indices[:, ns_failed]] + row = np.array(row, dtype=tup_dtype) + frame_indices = np.delete(frame_indices, np.array(list(del_indices.keys())), axis=1) + if curr_parts > 1 and len(ns_failed) > 1: + unique = np.unique(row, axis=0) + count = len(unique) + new_parts, rem = divmod(curr_parts, count) + allowed_val_counts = [new_parts * frames_per_part, n_frames] + if rem == 0 and all( + np.count_nonzero(row == val) in allowed_val_counts for val in unique + ): + shape.append(count) + curr_parts = new_parts + ord_vals = np.argsort(unique) + order = {tuple(unique[i]): ord_vals[i] for i in range(count)} + ord_row = np.array([order[tuple(v)] for v in row]) + frame_indices = np.hstack( + [frame_indices, np.array(ord_row).reshape((n_frames, 1))] + ) + if curr_parts > 1: + raise WrapperError('Unable to determine sorting of final dimension(s)') # Store frame indices self._frame_indices = frame_indices - # Determine size of any extra-spatial dimensions - ns_unique = [len(np.unique(row)) for row in self._frame_indices.T] - shape = (rows, cols) + tuple(x for i, x in enumerate(ns_unique) if i == 0 or x != 1) - n_dim = len(shape) - if n_dim > 3: - n_vols = np.prod(shape[3:]) - n_frames_calc = n_vols * shape[2] - if n_frames != n_frames_calc: - raise WrapperError( - f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) ' - f'of shape {shape} does not match NumberOfFrames {n_frames}.' - ) return tuple(shape) @cached_property @@ -607,18 +678,11 @@ def voxel_sizes(self): # Ensure values are float rather than Decimal return tuple(map(float, list(pix_space) + [zs])) - @cached_property + @property def image_position(self): - try: - ipp = self.shared.PlanePositionSequence[0].ImagePositionPatient - except AttributeError: - try: - ipp = self.frames[0].PlanePositionSequence[0].ImagePositionPatient - except AttributeError: - raise WrapperError('Cannot get image position from dicom') - if ipp is None: - return None - return np.array(list(map(float, ipp))) + if self._ipp is None: + raise WrapperError('Not enough information for image_position_patient') + return self._ipp @cached_property def series_signature(self): diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index d14c35dcdb..25a58d70e5 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -364,7 +364,7 @@ def test_decimal_rescale(): assert dw.get_data().dtype != np.dtype(object) -def fake_frames(seq_name, field_name, value_seq): +def fake_frames(seq_name, field_name, value_seq, frame_seq=None): """Make fake frames for multiframe testing Parameters @@ -375,6 +375,8 @@ def fake_frames(seq_name, field_name, value_seq): name of field within sequence value_seq : length N sequence sequence of values + frame_seq : length N list + previous result from this function to update Returns ------- @@ -386,19 +388,28 @@ def fake_frames(seq_name, field_name, value_seq): class Fake: pass - frames = [] - for value in value_seq: - fake_frame = Fake() + if frame_seq == None: + frame_seq = [Fake() for _ in range(len(value_seq))] + for value, fake_frame in zip(value_seq, frame_seq): fake_element = Fake() setattr(fake_element, field_name, value) setattr(fake_frame, seq_name, [fake_element]) - frames.append(fake_frame) - return frames + return frame_seq -def fake_shape_dependents(div_seq, sid_seq=None, sid_dim=None): +def fake_shape_dependents( + div_seq, + sid_seq=None, + sid_dim=None, + ipp_seq=None, + slice_dim=None, + flip_ipp_idx_corr=False, +): """Make a fake dictionary of data that ``image_shape`` is dependent on. + If you are providing the ``ipp_seq`` argument, they should be generated using + a slice normal aligned with the z-axis (i.e. iop == (0, 1, 0, 1, 0, 0)). + Parameters ---------- div_seq : list of tuples @@ -407,39 +418,86 @@ def fake_shape_dependents(div_seq, sid_seq=None, sid_dim=None): list of values to use for the `StackID` of each frame. sid_dim : int the index of the column in 'div_seq' to use as 'sid_seq' + ipp_seq : list of tuples + list of values to use for `ImagePositionPatient` for each frame + slice_dim : int + the index of the column in 'div_seq' corresponding to slices + flip_ipp_idx_corr : bool + generate ipp values so slice location is negatively correlated with slice index """ - class DimIdxSeqElem: + class PrintBase: + def __repr__(self): + attr_strs = [] + for attr in dir(self): + if attr[0].isupper(): + attr_strs.append(f'{attr}={getattr(self, attr)}') + return f"{self.__class__.__name__}({', '.join(attr_strs)})" + + class DimIdxSeqElem(PrintBase): def __init__(self, dip=(0, 0), fgp=None): self.DimensionIndexPointer = dip if fgp is not None: self.FunctionalGroupPointer = fgp - class FrmContSeqElem: + class FrmContSeqElem(PrintBase): def __init__(self, div, sid): self.DimensionIndexValues = div self.StackID = sid - class PerFrmFuncGrpSeqElem: - def __init__(self, div, sid): + class PlnPosSeqElem(PrintBase): + def __init__(self, ipp): + self.ImagePositionPatient = ipp + + class PlnOrientSeqElem(PrintBase): + def __init__(self, iop): + self.ImageOrientationPatient = iop + + class PerFrmFuncGrpSeqElem(PrintBase): + def __init__(self, div, sid, ipp, iop): self.FrameContentSequence = [FrmContSeqElem(div, sid)] + self.PlanePositionSequence = [PlnPosSeqElem(ipp)] + self.PlaneOrientationSequence = [PlnOrientSeqElem(iop)] # if no StackID values passed in then use the values at index 'sid_dim' in # the value for DimensionIndexValues for it + n_indices = len(div_seq[0]) if sid_seq is None: if sid_dim is None: sid_dim = 0 sid_seq = [div[sid_dim] for div in div_seq] - # create the DimensionIndexSequence + # Determine slice_dim and create per-slice ipp information + if slice_dim is None: + slice_dim = 1 if sid_dim == 0 else 0 num_of_frames = len(div_seq) - dim_idx_seq = [DimIdxSeqElem()] * num_of_frames + frame_slc_indices = np.array(div_seq)[:, slice_dim] + uniq_slc_indices = np.unique(frame_slc_indices) + n_slices = len(uniq_slc_indices) + assert num_of_frames % n_slices == 0 + iop_seq = [(0.0, 1.0, 0.0, 1.0, 0.0, 0.0) for _ in range(num_of_frames)] + if ipp_seq is None: + slc_locs = np.linspace(-1.0, 1.0, n_slices) + if flip_ipp_idx_corr: + slc_locs = slc_locs[::-1] + slc_idx_loc = { + div_idx: slc_locs[arr_idx] for arr_idx, div_idx in enumerate(np.sort(uniq_slc_indices)) + } + ipp_seq = [(-1.0, -1.0, slc_idx_loc[idx]) for idx in frame_slc_indices] + else: + assert flip_ipp_idx_corr is False # caller can flip it themselves + assert len(ipp_seq) == num_of_frames + # create the DimensionIndexSequence + dim_idx_seq = [DimIdxSeqElem()] * n_indices # add an entry for StackID into the DimensionIndexSequence if sid_dim is not None: sid_tag = pydicom.datadict.tag_for_keyword('StackID') fcs_tag = pydicom.datadict.tag_for_keyword('FrameContentSequence') dim_idx_seq[sid_dim] = DimIdxSeqElem(sid_tag, fcs_tag) # create the PerFrameFunctionalGroupsSequence - frames = [PerFrmFuncGrpSeqElem(div, sid) for div, sid in zip(div_seq, sid_seq)] + frames = [ + PerFrmFuncGrpSeqElem(div, sid, ipp, iop) + for div, sid, ipp, iop in zip(div_seq, sid_seq, ipp_seq, iop_seq) + ] return { 'NumberOfFrames': num_of_frames, 'DimensionIndexSequence': dim_idx_seq, @@ -480,7 +538,15 @@ def test_shape(self): # PerFrameFunctionalGroupsSequence does not match NumberOfFrames with pytest.raises(AssertionError): dw.image_shape - # check 3D shape when StackID index is 0 + # check 2D shape with StackID index is 0 + div_seq = ((1, 1),) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64) + # Check 2D shape with extraneous extra indices + div_seq = ((1, 1, 2),) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64) + # Check 3D shape when StackID index is 0 div_seq = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 4) @@ -541,6 +607,18 @@ def test_shape(self): div_seq = ((1, 1, 1), (2, 1, 1), (1, 1, 2), (2, 1, 2), (1, 1, 3), (2, 1, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) assert MFW(fake_mf).image_shape == (32, 64, 2, 3) + # Test with combo indices, here with the last two needing to be combined into + # a single index corresponding to [(1, 1), (1, 1), (2, 1), (2, 1), (2, 2), (2, 2)] + div_seq = ( + (1, 1, 1, 1), + (1, 2, 1, 1), + (1, 1, 2, 1), + (1, 2, 2, 1), + (1, 1, 2, 2), + (1, 2, 2, 2), + ) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3) def test_iop(self): # Test Image orient patient for multiframe @@ -608,22 +686,30 @@ def test_image_position(self): with pytest.raises(didw.WrapperError): dw.image_position # Make a fake frame - fake_frame = fake_frames( - 'PlanePositionSequence', 'ImagePositionPatient', [[-2.0, 3.0, 7]] - )[0] - fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] + iop = [0, 1, 0, 1, 0, 0] + frames = fake_frames('PlaneOrientationSequence', 'ImageOrientationPatient', [iop]) + frames = fake_frames( + 'PlanePositionSequence', 'ImagePositionPatient', [[-2.0, 3.0, 7]], frames + ) + fake_mf['SharedFunctionalGroupsSequence'] = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) fake_mf['SharedFunctionalGroupsSequence'] = [None] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_position - fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] + fake_mf['PerFrameFunctionalGroupsSequence'] = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) # Check lists of Decimals work - fake_frame.PlanePositionSequence[0].ImagePositionPatient = [ + frames[0].PlanePositionSequence[0].ImagePositionPatient = [ Decimal(str(v)) for v in [-2, 3, 7] ] assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) assert MFW(fake_mf).image_position.dtype == float + # We should get minimum along slice normal with multiple frames + frames = fake_frames('PlaneOrientationSequence', 'ImageOrientationPatient', [iop] * 2) + ipps = [[-2.0, 3.0, 7], [-2.0, 3.0, 6]] + frames = fake_frames('PlanePositionSequence', 'ImagePositionPatient', ipps, frames) + fake_mf['PerFrameFunctionalGroupsSequence'] = frames + assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 6]) @dicom_test @pytest.mark.xfail(reason='Not packaged in install', raises=FileNotFoundError) @@ -644,7 +730,7 @@ def test_data_real(self): if endian_codes[data.dtype.byteorder] == '>': data = data.byteswap() dat_str = data.tobytes() - assert sha1(dat_str).hexdigest() == '149323269b0af92baa7508e19ca315240f77fa8c' + assert sha1(dat_str).hexdigest() == 'dc011bb49682fb78f3cebacf965cb65cc9daba7d' @dicom_test def test_slicethickness_fallback(self): @@ -665,7 +751,7 @@ def test_data_derived_shape(self): def test_data_trace(self): # Test that a standalone trace volume is found and not dropped dw = didw.wrapper_from_file(DATA_FILE_SIEMENS_TRACE) - assert dw.image_shape == (72, 72, 39, 1) + assert dw.image_shape == (72, 72, 39) @dicom_test @needs_nibabel_data('nitest-dicom') From 14c24ef7fc156d2a0bb760304e482cfde4694bc3 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 16:34:11 -0700 Subject: [PATCH 103/203] BF: Trim unneeded trailing indices from _frame_indices --- nibabel/nicom/dicomwrappers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index c3f484a003..eab0471ec4 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -581,11 +581,10 @@ def image_shape(self): frames_per_part = 1 del_indices = {} for row_idx, row in enumerate(frame_indices.T): - if curr_parts == 1: - break unique = np.unique(row) count = len(unique) - if count == 1: + if count == 1 or curr_parts == 1: + del_indices[row_idx] = count continue # Replace slice indices with order determined from slice positions along normal if len(shape) == 2: From 019f448c9924e352ed5503aae384b59918bb1d95 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 17:09:09 -0700 Subject: [PATCH 104/203] BF+TST: Fix 2D plus time case Explicitly use `InStackPositionNumber` to identify the slice dim, produce correct output for 2D + time data. --- nibabel/nicom/dicomwrappers.py | 17 +++++++++++------ nibabel/nicom/tests/test_dicomwrappers.py | 9 ++++++++- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index eab0471ec4..14041e631f 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -470,10 +470,10 @@ def __init__(self, dcm_data): # Try to determine slice order and minimal image position patient self._frame_slc_ord = self._ipp = None try: - frame_ipps = [self.shared.PlanePositionSequence[0].ImagePositionPatient] + frame_ipps = [f.PlanePositionSequence[0].ImagePositionPatient for f in self.frames] except AttributeError: try: - frame_ipps = [f.PlanePositionSequence[0].ImagePositionPatient for f in self.frames] + frame_ipps = [self.shared.PlanePositionSequence[0].ImagePositionPatient] except AttributeError: frame_ipps = None if frame_ipps is not None and all(ipp is not None for ipp in frame_ipps): @@ -575,19 +575,24 @@ def image_shape(self): raise WrapperError('Missing information, cannot remove indices with confidence.') derived_dim_idx = dim_seq.index(derived_tag) frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) + dim_seq.pop(derived_dim_idx) # Determine the shape and which indices to use shape = [rows, cols] curr_parts = n_frames frames_per_part = 1 del_indices = {} + stackpos_tag = pydicom.datadict.tag_for_keyword('InStackPositionNumber') + slice_dim_idx = dim_seq.index(stackpos_tag) for row_idx, row in enumerate(frame_indices.T): unique = np.unique(row) count = len(unique) - if count == 1 or curr_parts == 1: + if curr_parts == 1 or (count == 1 and row_idx != slice_dim_idx): del_indices[row_idx] = count continue # Replace slice indices with order determined from slice positions along normal - if len(shape) == 2: + if row_idx == slice_dim_idx: + if len(shape) > 2: + raise WrapperError('Non-singular index precedes the slice index') row = self._frame_slc_ord frame_indices.T[row_idx, :] = row unique = np.unique(row) @@ -595,13 +600,13 @@ def image_shape(self): raise WrapperError("Number of slice indices and positions don't match") new_parts, leftover = divmod(curr_parts, count) allowed_val_counts = [new_parts * frames_per_part] - if len(shape) > 2: + if row_idx != slice_dim_idx: # Except for the slice dim, having a unique value for each frame is valid allowed_val_counts.append(n_frames) if leftover != 0 or any( np.count_nonzero(row == val) not in allowed_val_counts for val in unique ): - if len(shape) == 2: + if row_idx == slice_dim_idx: raise WrapperError('Missing slices from multiframe') del_indices[row_idx] = count continue diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 25a58d70e5..0402421626 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -488,10 +488,13 @@ def __init__(self, div, sid, ipp, iop): assert len(ipp_seq) == num_of_frames # create the DimensionIndexSequence dim_idx_seq = [DimIdxSeqElem()] * n_indices + # Add entry for InStackPositionNumber to DimensionIndexSequence + fcs_tag = pydicom.datadict.tag_for_keyword('FrameContentSequence') + isp_tag = pydicom.datadict.tag_for_keyword('InStackPositionNumber') + dim_idx_seq[slice_dim] = DimIdxSeqElem(isp_tag, fcs_tag) # add an entry for StackID into the DimensionIndexSequence if sid_dim is not None: sid_tag = pydicom.datadict.tag_for_keyword('StackID') - fcs_tag = pydicom.datadict.tag_for_keyword('FrameContentSequence') dim_idx_seq[sid_dim] = DimIdxSeqElem(sid_tag, fcs_tag) # create the PerFrameFunctionalGroupsSequence frames = [ @@ -546,6 +549,10 @@ def test_shape(self): div_seq = ((1, 1, 2),) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64) + # Check 2D plus time + div_seq = ((1, 1, 1), (1, 1, 2), (1, 1, 3)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 1, 3) # Check 3D shape when StackID index is 0 div_seq = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) From 0215ce5db008f32e6001335f2b4d4f39d5a0a346 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 18:33:56 -0700 Subject: [PATCH 105/203] BF+TST: Handle case with extra-spatial index that is unique per frame Not sure if this ever actually happens in real multiframe data, but it does in non-multiframe and I can imagine if a DimensionIndexSequence element refrences a per-frame AcquisitionTime then this could happen. --- nibabel/nicom/dicomwrappers.py | 25 +++++++++++++------ nibabel/nicom/tests/test_dicomwrappers.py | 29 +++++++++++++++++++++++ 2 files changed, 47 insertions(+), 7 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 14041e631f..3743878700 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -598,21 +598,32 @@ def image_shape(self): unique = np.unique(row) if len(unique) != count: raise WrapperError("Number of slice indices and positions don't match") + elif count == n_frames: + if shape[-1] == 'remaining': + raise WrapperError('At most one index have ambiguous size') + shape.append('remaining') + continue new_parts, leftover = divmod(curr_parts, count) - allowed_val_counts = [new_parts * frames_per_part] - if row_idx != slice_dim_idx: - # Except for the slice dim, having a unique value for each frame is valid - allowed_val_counts.append(n_frames) - if leftover != 0 or any( - np.count_nonzero(row == val) not in allowed_val_counts for val in unique - ): + expected = new_parts * frames_per_part + if leftover != 0 or any(np.count_nonzero(row == val) != expected for val in unique): if row_idx == slice_dim_idx: raise WrapperError('Missing slices from multiframe') del_indices[row_idx] = count continue + if shape[-1] == 'remaining': + shape[-1] = new_parts + frames_per_part *= shape[-1] + new_parts = 1 frames_per_part *= count shape.append(count) curr_parts = new_parts + if shape[-1] == 'remaining': + if curr_parts > 1: + shape[-1] = curr_parts + curr_parts = 1 + else: + del_indices[len(shape)] = 1 + shape = shape[:-1] if del_indices: if curr_parts > 1: ns_failed = [k for k, v in del_indices.items() if v != 1] diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 0402421626..b50535a4bb 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -626,6 +626,35 @@ def test_shape(self): ) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 2, 3) + # Test invalid 4D indices + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 4)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 2)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape + # Time index that is unique to each frame + div_seq = ((1, 1, 1), (1, 2, 2), (1, 1, 3), (1, 2, 4), (1, 1, 5), (1, 2, 6)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3) + div_seq = ( + (1, 1, 1, 1), + (1, 2, 2, 1), + (1, 1, 3, 1), + (1, 2, 4, 1), + (1, 1, 5, 1), + (1, 2, 6, 1), + (1, 1, 7, 2), + (1, 2, 8, 2), + (1, 1, 9, 2), + (1, 2, 10, 2), + (1, 1, 11, 2), + (1, 2, 12, 2), + ) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3, 2) def test_iop(self): # Test Image orient patient for multiframe From 259483f1f5412e4e8deb919b800b72abddccd439 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 23:24:53 -0700 Subject: [PATCH 106/203] TST: Expand test coverage for multiframe dicom shape determination --- nibabel/nicom/tests/test_dicomwrappers.py | 33 ++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index b50535a4bb..2168476bb4 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -473,7 +473,6 @@ def __init__(self, div, sid, ipp, iop): frame_slc_indices = np.array(div_seq)[:, slice_dim] uniq_slc_indices = np.unique(frame_slc_indices) n_slices = len(uniq_slc_indices) - assert num_of_frames % n_slices == 0 iop_seq = [(0.0, 1.0, 0.0, 1.0, 0.0, 0.0) for _ in range(num_of_frames)] if ipp_seq is None: slc_locs = np.linspace(-1.0, 1.0, n_slices) @@ -579,6 +578,17 @@ def test_shape(self): div_seq = ((1, 1, 0), (1, 2, 0), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 2, 2) + # Check number of IPP vals match the number of slices or we raise + frames = fake_mf['PerFrameFunctionalGroupsSequence'] + for frame in frames[1:]: + frame.PlanePositionSequence = frames[0].PlanePositionSequence[:] + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape + # Check we raise on missing slices + div_seq = ((1, 1, 0), (1, 2, 0), (1, 1, 1)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # check 3D shape when there is no StackID index div_seq = ((1,), (2,), (3,), (4,)) sid_seq = (1, 1, 1, 1) @@ -614,6 +624,11 @@ def test_shape(self): div_seq = ((1, 1, 1), (2, 1, 1), (1, 1, 2), (2, 1, 2), (1, 1, 3), (2, 1, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) assert MFW(fake_mf).image_shape == (32, 64, 2, 3) + # Check non-singular dimension preceding slice dim raises + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 3)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0, slice_dim=2)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # Test with combo indices, here with the last two needing to be combined into # a single index corresponding to [(1, 1), (1, 1), (2, 1), (2, 1), (2, 2), (2, 2)] div_seq = ( @@ -655,6 +670,22 @@ def test_shape(self): ) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 2, 3, 2) + # Check we only allow one extra spatial dimension with unique val per frame + div_seq = ( + (1, 1, 1, 6), + (1, 2, 2, 5), + (1, 1, 3, 4), + (1, 2, 4, 3), + (1, 1, 5, 2), + (1, 2, 6, 1), + ) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape + # Check that having unique value per frame works with single volume + div_seq = ((1, 1, 1), (1, 2, 2), (1, 3, 3)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 3) def test_iop(self): # Test Image orient patient for multiframe From 52c31052e4f22ff7f0a01883129584c6091e9ac9 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Thu, 25 Jul 2024 09:58:19 -0700 Subject: [PATCH 107/203] TST+CLN: More slice ordering testing, minor cleanup --- nibabel/nicom/tests/test_dicomwrappers.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 2168476bb4..e01759c86a 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -388,7 +388,7 @@ def fake_frames(seq_name, field_name, value_seq, frame_seq=None): class Fake: pass - if frame_seq == None: + if frame_seq is None: frame_seq = [Fake() for _ in range(len(value_seq))] for value, fake_frame in zip(value_seq, frame_seq): fake_element = Fake() @@ -868,6 +868,11 @@ def test_data_fake(self): sorted_data = data[..., [3, 1, 2, 0]] fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) + # Check slice sorting with negative index / IPP correlation + fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0, flip_ipp_idx_corr=True)) + sorted_data = data[..., [0, 2, 1, 3]] + fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) + assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) # 5D! dim_idxs = [ [1, 4, 2, 1], From 629dbb52e14e813203d1f9c355de95399fd70dda Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Thu, 25 Jul 2024 10:05:32 -0700 Subject: [PATCH 108/203] DOC: Add some notes to the changelog --- Changelog | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/Changelog b/Changelog index 6892951256..24e89095f3 100644 --- a/Changelog +++ b/Changelog @@ -25,6 +25,33 @@ Eric Larson (EL), Demian Wassermann, Stephan Gerhard and Ross Markello (RM). References like "pr/298" refer to github pull request numbers. +Upcoming release (To be determined) +=================================== + +New features +------------ + +Enhancements +------------ + * Ability to read data from many multiframe DICOM files that previously generated errors + +Bug fixes +--------- + * Fixed multiframe DICOM issue where data could be flipped along slice dimension relative to the + affine + * Fixed multiframe DICOM issue where ``image_position`` and the translation component in the + ``affine`` could be incorrect + +Documentation +------------- + +Maintenance +----------- + +API changes and deprecations +---------------------------- + + 5.2.1 (Monday 26 February 2024) =============================== From fd56bf4abe195da9d351d64345381231ce7f7038 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Mon, 12 Aug 2024 15:08:26 -0700 Subject: [PATCH 109/203] BF+ENH: Fixes to DICOM scaling, make frame filtering explicit Fixes how we handle DICOM scaling, particularly for Philips and multi-frame files. For Philips data scale factors without defined units should be avoided, and instead a private tag should be used to make image intensities comparable across series. For multi-frame DICOM, it is possible to have different scale factors (potentially coming from different tags) per-frame. We also prefer scale factors from a RealWorldValueMapping provided they have defined units. The base Wrapper class now has a few new attributes and methods to support this functionality. In particular an attribute `scale_factors` that provides an array of slope/intercept pairs, and a method `get_unscaled_data` that will return the reordered/reshaped data but without the scaling applied. A `vendor` attribute was also added to better support vendor-specific implementation details. For the MultiFrameWrapper I also added an attribute `frame_order` which exposes the order used to sort the frames, and use this to return the `scale_factors` in sorted order. While implementing this I kept bumping into issues due to the (implicit) frame filtering that was happening in the `image_shape` property, so I made this filtering explicit and configurable and moved it into the class initialization. --- nibabel/nicom/dicomwrappers.py | 410 +++++++++++++++++----- nibabel/nicom/tests/test_dicomwrappers.py | 363 ++++++++++++++----- nibabel/nicom/utils.py | 54 +++ 3 files changed, 636 insertions(+), 191 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 3743878700..3842248fd5 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -13,6 +13,7 @@ """ import operator +import re import warnings from functools import cached_property @@ -23,6 +24,7 @@ from ..openers import ImageOpener from . import csareader as csar from .dwiparams import B2q, nearest_pos_semi_def, q2bg +from .utils import Vendor, find_private_section, vendor_from_private pydicom = optional_package('pydicom')[0] @@ -59,7 +61,7 @@ def wrapper_from_file(file_like, *args, **kwargs): return wrapper_from_data(dcm_data) -def wrapper_from_data(dcm_data): +def wrapper_from_data(dcm_data, frame_filters=None): """Create DICOM wrapper from DICOM data object Parameters @@ -68,6 +70,9 @@ def wrapper_from_data(dcm_data): Object allowing attribute access, with DICOM attributes. Probably a dataset as read by ``pydicom``. + frame_filters + Optionally override the `frame_filters` used to create a `MultiFrameWrapper` + Returns ------- dcm_w : ``dicomwrappers.Wrapper`` or subclass @@ -76,9 +81,8 @@ def wrapper_from_data(dcm_data): sop_class = dcm_data.get('SOPClassUID') # try to detect what type of dicom object to wrap if sop_class == '1.2.840.10008.5.1.4.1.1.4.1': # Enhanced MR Image Storage - # currently only Philips is using Enhanced Multiframe DICOM - return MultiframeWrapper(dcm_data) - # Check for Siemens DICOM format types + return MultiframeWrapper(dcm_data, frame_filters) + # Check for non-enhanced (legacy) Siemens DICOM format types # Only Siemens will have data for the CSA header try: csa = csar.get_csa_header(dcm_data) @@ -103,6 +107,7 @@ class Wrapper: Methods: * get_data() + * get_unscaled_data() * get_pixel_array() * is_same_series(other) * __getitem__ : return attributes from `dcm_data` @@ -120,6 +125,8 @@ class Wrapper: * image_position : sequence length 3 * slice_indicator : float * series_signature : tuple + * scale_factors : (N, 2) array + * vendor : Vendor """ is_csa = False @@ -136,10 +143,34 @@ def __init__(self, dcm_data): dcm_data : object object should allow 'get' and '__getitem__' access. Usually this will be a ``dicom.dataset.Dataset`` object resulting from reading a - DICOM file, but a dictionary should also work. + DICOM file. """ self.dcm_data = dcm_data + @cached_property + def vendor(self): + """The vendor of the instrument that produced the DICOM""" + # Look at manufacturer tag first + mfgr = self.get('Manufacturer') + if mfgr: + if re.search('Siemens', mfgr, re.IGNORECASE): + return Vendor.SIEMENS + if re.search('Philips', mfgr, re.IGNORECASE): + return Vendor.PHILIPS + if re.search('GE Medical', mfgr, re.IGNORECASE): + return Vendor.GE + # Next look at UID prefixes + for uid_src in ('StudyInstanceUID', 'SeriesInstanceUID', 'SOPInstanceUID'): + uid = str(self.get(uid_src)) + if uid.startswith(('1.3.12.2.1007.', '1.3.12.2.1107.')): + return Vendor.SIEMENS + if uid.startswith(('1.3.46', '1.3.12.2.1017')): + return Vendor.PHILIPS + if uid.startswith('1.2.840.113619'): + return Vendor.GE + # Finally look for vendor specific private blocks + return vendor_from_private(self.dcm_data) + @cached_property def image_shape(self): """The array shape as it will be returned by ``get_data()``""" @@ -315,14 +346,30 @@ def affine(self): return aff def get_pixel_array(self): - """Return unscaled pixel array from DICOM""" + """Return raw pixel array without reshaping or scaling + + Returns + ------- + data : array + array with raw pixel data from DICOM + """ data = self.dcm_data.get('pixel_array') if data is None: raise WrapperError('Cannot find data in DICOM') return data + def get_unscaled_data(self): + """Return pixel array that is potentially reshaped, but without any scaling + + Returns + ------- + data : array + array with raw pixel data from DICOM + """ + return self.get_pixel_array() + def get_data(self): - """Get scaled image data from DICOMs + """Get potentially scaled and reshaped image data from DICOMs We return the data as DICOM understands it, first dimension is rows, second dimension is columns @@ -333,7 +380,7 @@ def get_data(self): array with data as scaled from any scaling in the DICOM fields. """ - return self._scale_data(self.get_pixel_array()) + return self._scale_data(self.get_unscaled_data()) def is_same_series(self, other): """Return True if `other` appears to be in same series @@ -372,11 +419,86 @@ def is_same_series(self, other): return False return True + @cached_property + def scale_factors(self): + """Return (2, N) array of slope/intercept pairs""" + scaling = self._get_best_scale_factor(self.dcm_data) + if scaling is None: + if self.vendor == Vendor.PHILIPS: + warnings.warn( + 'Unable to find Philips private scale factor, cross-series comparisons may be invalid' + ) + scaling = (1, 0) + return np.array((scaling,)) + + def _get_rwv_scale_factor(self, dcm_data): + """Return the first set of 'real world' scale factors with defined units""" + rw_seq = dcm_data.get('RealWorldValueMappingSequence') + if rw_seq: + for rw_map in rw_seq: + try: + units = rw_map.MeasurementUnitsCodeSequence[0].CodeMeaning + except (AttributeError, IndexError): + continue + if units not in ('', 'no units', 'UNDEFINED'): + return ( + rw_map.get('RealWorldValueSlope', 1), + rw_map.get('RealWorldValueIntercept', 0), + ) + + def _get_legacy_scale_factor(self, dcm_data): + """Return scale factors from older 'Modality LUT' macro + + For Philips data we require RescaleType is defined and not set to 'normalized' + """ + pix_trans_seq = dcm_data.get('PixelValueTransformationSequence') + if pix_trans_seq is not None: + pix_trans = pix_trans_seq[0] + if self.vendor != Vendor.PHILIPS or pix_trans.get('RescaleType', 'US') not in ( + '', + 'US', + 'normalized', + ): + return (pix_trans.get('RescaleSlope', 1), pix_trans.get('RescaleIntercept', 0)) + if ( + dcm_data.get('RescaleSlope') is not None + or dcm_data.get('RescaleIntercept') is not None + ): + if self.vendor != Vendor.PHILIPS or dcm_data.get('RescaleType', 'US') not in ( + '', + 'US', + 'normalized', + ): + return (dcm_data.get('RescaleSlope', 1), dcm_data.get('RescaleIntercept', 0)) + + def _get_philips_scale_factor(self, dcm_data): + """Return scale factors from Philips private element + + If we don't have any other scale factors that are tied to real world units, then + this is the best scaling to use to enable cross-series comparisons + """ + offset = find_private_section(dcm_data, 0x2005, 'Philips MR Imaging DD 001') + priv_scale = None if offset is None else dcm_data.get((0x2005, offset + 0xE)) + if priv_scale is not None: + return (priv_scale.value, 0.0) + + def _get_best_scale_factor(self, dcm_data): + """Return the most appropriate scale factor found or None""" + scaling = self._get_rwv_scale_factor(dcm_data) + if scaling is not None: + return scaling + scaling = self._get_legacy_scale_factor(dcm_data) + if scaling is not None: + return scaling + if self.vendor == Vendor.PHILIPS: + scaling = self._get_philips_scale_factor(dcm_data) + if scaling is not None: + return scaling + def _scale_data(self, data): # depending on pydicom and dicom files, values might need casting from # Decimal to float - scale = float(self.get('RescaleSlope', 1)) - offset = float(self.get('RescaleIntercept', 0)) + scale, offset = self.scale_factors[0] return self._apply_scale_offset(data, scale, offset) def _apply_scale_offset(self, data, scale, offset): @@ -407,6 +529,71 @@ def b_vector(self): return q2bg(q_vec)[1] +class FrameFilter: + """Base class for defining how to filter out (ignore) frames from a multiframe file + + It is guaranteed that the `applies` method will on a dataset before the `keep` method + is called on any of the frames inside. + """ + + def applies(self, dcm_wrp) -> bool: + """Returns true if the filter should be applied to a dataset""" + return True + + def keep(self, frame_data) -> bool: + """Return true if the frame should be kept""" + raise NotImplementedError + + +class FilterMultiStack(FrameFilter): + """Filter out all but one `StackID`""" + + def __init__(self, keep_id=None): + self._keep_id = keep_id + + def applies(self, dcm_wrp) -> bool: + first_fcs = dcm_wrp.frames[0].get('FrameContentSequence', (None,))[0] + if first_fcs is None or not hasattr(first_fcs, 'StackID'): + return False + stack_ids = {frame.FrameContentSequence[0].StackID for frame in dcm_wrp.frames} + if self._keep_id is not None: + if self._keep_id not in stack_ids: + raise WrapperError('Explicitly requested StackID not found') + self._selected = self._keep_id + if len(stack_ids) > 1: + if self._keep_id is None: + warnings.warn( + 'A multi-stack file was passed without an explicit filter, just using lowest StackID' + ) + self._selected = sorted(stack_ids)[0] + return True + return False + + def keep(self, frame) -> bool: + return frame.FrameContentSequence[0].StackID == self._selected + + +class FilterDwiIso(FrameFilter): + """Filter out derived ISOTROPIC frames from DWI series""" + + def applies(self, dcm_wrp) -> bool: + if not hasattr(dcm_wrp.frames[0], 'MRDiffusionSequence'): + return False + diff_dirs = { + f.MRDiffusionSequence[0].get('DiffusionDirectionality') for f in dcm_wrp.frames + } + if len(diff_dirs) > 1 and 'ISOTROPIC' in diff_dirs: + warnings.warn('Derived images found and removed') + return True + return False + + def keep(self, frame) -> bool: + return frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC' + + +DEFUALT_FRAME_FILTERS = (FilterMultiStack(), FilterDwiIso()) + + class MultiframeWrapper(Wrapper): """Wrapper for Enhanced MR Storage SOP Class @@ -436,17 +623,20 @@ class MultiframeWrapper(Wrapper): Methods ------- + vendor(self) + frame_order(self) image_shape(self) image_orient_patient(self) voxel_sizes(self) image_position(self) series_signature(self) + scale_factors(self) get_data(self) """ is_multiframe = True - def __init__(self, dcm_data): + def __init__(self, dcm_data, frame_filters=None): """Initializes MultiframeWrapper Parameters @@ -454,10 +644,13 @@ def __init__(self, dcm_data): dcm_data : object object should allow 'get' and '__getitem__' access. Usually this will be a ``dicom.dataset.Dataset`` object resulting from reading a - DICOM file, but a dictionary should also work. + DICOM file. + + frame_filters : Iterable of FrameFilter + defines which frames inside the dataset should be ignored. If None then + `dicomwrappers.DEFAULT_FRAME_FILTERS` will be used. """ Wrapper.__init__(self, dcm_data) - self.dcm_data = dcm_data self.frames = dcm_data.get('PerFrameFunctionalGroupsSequence') try: self.frames[0] @@ -467,8 +660,19 @@ def __init__(self, dcm_data): self.shared = dcm_data.get('SharedFunctionalGroupsSequence')[0] except TypeError: raise WrapperError('SharedFunctionalGroupsSequence is empty.') + # Apply frame filters one at a time in the order provided + if frame_filters is None: + frame_filters = DEFUALT_FRAME_FILTERS + frame_filters = [filt for filt in frame_filters if filt.applies(self)] + for filt in frame_filters: + self.frames = [f for f in self.frames if filt.keep(f)] + # Make sure there is only one StackID remaining + first_fcs = self.frames[0].get('FrameContentSequence', (None,))[0] + if first_fcs is not None and hasattr(first_fcs, 'StackID'): + if len({frame.FrameContentSequence[0].StackID for frame in self.frames}) > 1: + raise WrapperError('More than one StackID remains after filtering') # Try to determine slice order and minimal image position patient - self._frame_slc_ord = self._ipp = None + self._frame_slc_ord = self._ipp = self._slice_spacing = None try: frame_ipps = [f.PlanePositionSequence[0].ImagePositionPatient for f in self.frames] except AttributeError: @@ -485,8 +689,29 @@ def __init__(self, dcm_data): val: order for val, order in zip(uniq_slc_pos, np.argsort(uniq_slc_pos)) } self._frame_slc_ord = [pos_ord_map[pos] for pos in rnd_slc_pos] + if len(self._frame_slc_ord) > 1: + self._slice_spacing = ( + frame_slc_pos[self._frame_slc_ord[1]] - frame_slc_pos[self._frame_slc_ord[0]] + ) self._ipp = frame_ipps[np.argmin(frame_slc_pos)] - self._shape = None + self._frame_indices = None + + @cached_property + def vendor(self): + """The vendor of the instrument that produced the DICOM""" + vendor = super().vendor + if vendor is not None: + return vendor + vendor = vendor_from_private(self.shared) + if vendor is not None: + return vendor + return vendor_from_private(self.frames[0]) + + @cached_property + def frame_order(self): + if self._frame_indices is None: + _ = self.image_shape + return np.lexsort(self._frame_indices.T) @cached_property def image_shape(self): @@ -519,68 +744,20 @@ def image_shape(self): rows, cols = self.get('Rows'), self.get('Columns') if None in (rows, cols): raise WrapperError('Rows and/or Columns are empty.') - - # Check number of frames - first_frame = self.frames[0] - n_frames = self.get('NumberOfFrames') - # some Philips may have derived images appended - has_derived = False - if hasattr(first_frame, 'get') and first_frame.get([0x18, 0x9117]): - # DWI image may include derived isotropic, ADC or trace volume - try: - aniso_frames = pydicom.Sequence() - aniso_slc_ord = [] - for slc_ord, frame in zip(self._frame_slc_ord, self.frames): - if frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC': - aniso_frames.append(frame) - aniso_slc_ord.append(slc_ord) - # Image contains DWI volumes followed by derived images; remove derived images - if len(aniso_frames) != 0: - self.frames = aniso_frames - self._frame_slc_ord = aniso_slc_ord - except IndexError: - # Sequence tag is found but missing items! - raise WrapperError('Diffusion file missing information') - except AttributeError: - # DiffusionDirectionality tag is not required - pass - else: - if n_frames != len(self.frames): - warnings.warn('Derived images found and removed') - n_frames = len(self.frames) - has_derived = True - - assert len(self.frames) == n_frames - frame_indices = np.array( - [frame.FrameContentSequence[0].DimensionIndexValues for frame in self.frames] - ) - # Check that there is only one multiframe stack index - stack_ids = {frame.FrameContentSequence[0].StackID for frame in self.frames} - if len(stack_ids) > 1: - raise WrapperError( - 'File contains more than one StackID. Cannot handle multi-stack files' + # Check number of frames, initialize array of frame indices + n_frames = len(self.frames) + try: + frame_indices = np.array( + [frame.FrameContentSequence[0].DimensionIndexValues for frame in self.frames] ) - # Determine if one of the dimension indices refers to the stack id - dim_seq = [dim.DimensionIndexPointer for dim in self.get('DimensionIndexSequence')] - stackid_tag = pydicom.datadict.tag_for_keyword('StackID') - # remove the stack id axis if present - if stackid_tag in dim_seq: - stackid_dim_idx = dim_seq.index(stackid_tag) - frame_indices = np.delete(frame_indices, stackid_dim_idx, axis=1) - dim_seq.pop(stackid_dim_idx) - if has_derived: - # derived volume is included - derived_tag = pydicom.datadict.tag_for_keyword('DiffusionBValue') - if derived_tag not in dim_seq: - raise WrapperError('Missing information, cannot remove indices with confidence.') - derived_dim_idx = dim_seq.index(derived_tag) - frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) - dim_seq.pop(derived_dim_idx) + except AttributeError: + raise WrapperError("Can't find frame 'DimensionIndexValues'") # Determine the shape and which indices to use shape = [rows, cols] curr_parts = n_frames frames_per_part = 1 del_indices = {} + dim_seq = [dim.DimensionIndexPointer for dim in self.get('DimensionIndexSequence')] stackpos_tag = pydicom.datadict.tag_for_keyword('InStackPositionNumber') slice_dim_idx = dim_seq.index(stackpos_tag) for row_idx, row in enumerate(frame_indices.T): @@ -684,12 +861,15 @@ def voxel_sizes(self): except AttributeError: raise WrapperError('Not enough data for pixel spacing') pix_space = pix_measures.PixelSpacing - try: - zs = pix_measures.SliceThickness - except AttributeError: - zs = self.get('SpacingBetweenSlices') - if zs is None: - raise WrapperError('Not enough data for slice thickness') + if self._slice_spacing is not None: + zs = self._slice_spacing + else: + try: + zs = pix_measures.SliceThickness + except AttributeError: + zs = self.get('SpacingBetweenSlices') + if zs is None: + raise WrapperError('Not enough data for slice thickness') # Ensure values are float rather than Decimal return tuple(map(float, list(pix_space) + [zs])) @@ -710,27 +890,63 @@ def series_signature(self): signature['vox'] = (self.voxel_sizes, none_or_close) return signature - def get_data(self): + @cached_property + def scale_factors(self): + """Return `(2, N)` array of slope/intercept pairs + + If there is a single global scale factor then `N` will be one, otherwise it will + be the number of frames + """ + # Look for shared / global RWV scale factor first + shared_scale = self._get_rwv_scale_factor(self.shared) + if shared_scale is not None: + return np.array([shared_scale]) + shared_scale = self._get_rwv_scale_factor(self.dcm_data) + if shared_scale is not None: + return np.array([shared_scale]) + # Try pulling out best scale factors from each individual frame + frame_scales = [self._get_best_scale_factor(f) for f in self.frames] + if any(s is not None for s in frame_scales): + if any(s is None for s in frame_scales): + if self.vendor == Vendor.PHILIPS: + warnings.warn( + 'Unable to find Philips private scale factor, cross-series comparisons may be invalid' + ) + frame_scales = [s if s is not None else (1, 0) for s in frame_scales] + if all(s == frame_scales[0] for s in frame_scales[1:]): + return np.array([frame_scales[0]]) + return np.array(frame_scales)[self.frame_order] + # Finally look for shared non-RWV scale factors + shared_scale = self._get_best_scale_factor(self.shared) + if shared_scale is not None: + return np.array([shared_scale]) + shared_scale = self._get_best_scale_factor(self.dcm_data) + if shared_scale is None: + if self.vendor == Vendor.PHILIPS: + warnings.warn( + 'Unable to find Philips private scale factor, cross-series comparisons may be invalid' + ) + shared_scale = (1, 0) + return np.array([shared_scale]) + + def get_unscaled_data(self): shape = self.image_shape if shape is None: raise WrapperError('No valid information for image shape') data = self.get_pixel_array() - # Roll frames axis to last + # Roll frames axis to last and reorder if len(data.shape) > 2: - data = data.transpose((1, 2, 0)) - # Sort frames with first index changing fastest, last slowest - sorted_indices = np.lexsort(self._frame_indices.T) - data = data[..., sorted_indices] - data = data.reshape(shape, order='F') - return self._scale_data(data) + data = data.transpose((1, 2, 0))[..., self.frame_order] + return data.reshape(shape, order='F') def _scale_data(self, data): - pix_trans = getattr(self.frames[0], 'PixelValueTransformationSequence', None) - if pix_trans is None: - return super()._scale_data(data) - scale = float(pix_trans[0].RescaleSlope) - offset = float(pix_trans[0].RescaleIntercept) - return self._apply_scale_offset(data, scale, offset) + scale_factors = self.scale_factors + if scale_factors.shape[0] == 1: + scale, offset = scale_factors[0] + return self._apply_scale_offset(data, scale, offset) + orig_shape = data.shape + data = data.reshape(data.shape[:2] + (len(self.frames),)) + return (data * scale_factors[:, 0] + scale_factors[:, 1]).reshape(orig_shape) class SiemensWrapper(Wrapper): @@ -757,7 +973,7 @@ def __init__(self, dcm_data, csa_header=None): object should allow 'get' and '__getitem__' access. If `csa_header` is None, it should also be possible to extract a CSA header from `dcm_data`. Usually this will be a ``dicom.dataset.Dataset`` object - resulting from reading a DICOM file. A dict should also work. + resulting from reading a DICOM file. csa_header : None or mapping, optional mapping giving values for Siemens CSA image sub-header. If None, we try and read the CSA information from `dcm_data`. @@ -773,6 +989,11 @@ def __init__(self, dcm_data, csa_header=None): csa_header = {} self.csa_header = csa_header + @cached_property + def vendor(self): + """The vendor of the instrument that produced the DICOM""" + return Vendor.SIEMENS + @cached_property def slice_normal(self): # The std_slice_normal comes from the cross product of the directions @@ -964,7 +1185,7 @@ def image_position(self): Q = np.fliplr(iop) * pix_spacing return ipp + np.dot(Q, vox_trans_fixes[:, None]).ravel() - def get_data(self): + def get_unscaled_data(self): """Get scaled image data from DICOMs Resorts data block from mosaic to 3D @@ -1007,8 +1228,7 @@ def get_data(self): # pool mosaic-generated dims v3 = v4.reshape((n_slice_rows, n_slice_cols, n_blocks)) # delete any padding slices - v3 = v3[..., :n_mosaic] - return self._scale_data(v3) + return v3[..., :n_mosaic] def none_or_close(val1, val2, rtol=1e-5, atol=1e-6): diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index e01759c86a..0556fc63cc 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -1,7 +1,7 @@ """Testing DICOM wrappers""" import gzip -from copy import copy +from copy import deepcopy from decimal import Decimal from hashlib import sha1 from os.path import dirname @@ -11,6 +11,7 @@ import numpy as np import pytest from numpy.testing import assert_array_almost_equal, assert_array_equal +from pydicom.dataset import Dataset from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data from ...volumeutils import endian_codes @@ -63,8 +64,8 @@ def test_wrappers(): # test direct wrapper calls # first with empty or minimal data multi_minimal = { - 'PerFrameFunctionalGroupsSequence': [None], - 'SharedFunctionalGroupsSequence': [None], + 'PerFrameFunctionalGroupsSequence': [Dataset()], + 'SharedFunctionalGroupsSequence': [Dataset()], } for maker, args in ( (didw.Wrapper, ({},)), @@ -163,10 +164,10 @@ def test_wrapper_from_data(): fake_data['SOPClassUID'] = '1.2.840.10008.5.1.4.1.1.4.1' with pytest.raises(didw.WrapperError): didw.wrapper_from_data(fake_data) - fake_data['PerFrameFunctionalGroupsSequence'] = [None] + fake_data['PerFrameFunctionalGroupsSequence'] = [Dataset()] with pytest.raises(didw.WrapperError): didw.wrapper_from_data(fake_data) - fake_data['SharedFunctionalGroupsSequence'] = [None] + fake_data['SharedFunctionalGroupsSequence'] = [Dataset()] # minimal set should now be met dw = didw.wrapper_from_data(fake_data) assert dw.is_multiframe @@ -384,16 +385,17 @@ def fake_frames(seq_name, field_name, value_seq, frame_seq=None): each element in list is obj.[0]. = value_seq[n] for n in range(N) """ - - class Fake: - pass - if frame_seq is None: - frame_seq = [Fake() for _ in range(len(value_seq))] + frame_seq = [Dataset() for _ in range(len(value_seq))] for value, fake_frame in zip(value_seq, frame_seq): - fake_element = Fake() + if value is None: + continue + if hasattr(fake_frame, seq_name): + fake_element = getattr(fake_frame, seq_name)[0] + else: + fake_element = Dataset() + setattr(fake_frame, seq_name, [fake_element]) setattr(fake_element, field_name, value) - setattr(fake_frame, seq_name, [fake_element]) return frame_seq @@ -434,27 +436,32 @@ def __repr__(self): attr_strs.append(f'{attr}={getattr(self, attr)}') return f"{self.__class__.__name__}({', '.join(attr_strs)})" - class DimIdxSeqElem(PrintBase): + class DimIdxSeqElem(Dataset): def __init__(self, dip=(0, 0), fgp=None): + super().__init__() self.DimensionIndexPointer = dip if fgp is not None: self.FunctionalGroupPointer = fgp - class FrmContSeqElem(PrintBase): + class FrmContSeqElem(Dataset): def __init__(self, div, sid): + super().__init__() self.DimensionIndexValues = div self.StackID = sid - class PlnPosSeqElem(PrintBase): + class PlnPosSeqElem(Dataset): def __init__(self, ipp): + super().__init__() self.ImagePositionPatient = ipp - class PlnOrientSeqElem(PrintBase): + class PlnOrientSeqElem(Dataset): def __init__(self, iop): + super().__init__() self.ImageOrientationPatient = iop - class PerFrmFuncGrpSeqElem(PrintBase): + class PerFrmFuncGrpSeqElem(Dataset): def __init__(self, div, sid, ipp, iop): + super().__init__() self.FrameContentSequence = [FrmContSeqElem(div, sid)] self.PlanePositionSequence = [PlnPosSeqElem(ipp)] self.PlaneOrientationSequence = [PlnOrientSeqElem(iop)] @@ -473,7 +480,7 @@ def __init__(self, div, sid, ipp, iop): frame_slc_indices = np.array(div_seq)[:, slice_dim] uniq_slc_indices = np.unique(frame_slc_indices) n_slices = len(uniq_slc_indices) - iop_seq = [(0.0, 1.0, 0.0, 1.0, 0.0, 0.0) for _ in range(num_of_frames)] + iop_seq = [[0.0, 1.0, 0.0, 1.0, 0.0, 0.0] for _ in range(num_of_frames)] if ipp_seq is None: slc_locs = np.linspace(-1.0, 1.0, n_slices) if flip_ipp_idx_corr: @@ -481,7 +488,7 @@ def __init__(self, div, sid, ipp, iop): slc_idx_loc = { div_idx: slc_locs[arr_idx] for arr_idx, div_idx in enumerate(np.sort(uniq_slc_indices)) } - ipp_seq = [(-1.0, -1.0, slc_idx_loc[idx]) for idx in frame_slc_indices] + ipp_seq = [[-1.0, -1.0, slc_idx_loc[idx]] for idx in frame_slc_indices] else: assert flip_ipp_idx_corr is False # caller can flip it themselves assert len(ipp_seq) == num_of_frames @@ -507,38 +514,37 @@ def __init__(self, div, sid, ipp, iop): } +class FakeDataset(Dataset): + pixel_array = None + + class TestMultiFrameWrapper(TestCase): # Test MultiframeWrapper - MINIMAL_MF = { - # Minimal contents of dcm_data for this wrapper - 'PerFrameFunctionalGroupsSequence': [None], - 'SharedFunctionalGroupsSequence': [None], - } + # Minimal contents of dcm_data for this wrapper + MINIMAL_MF = FakeDataset() + MINIMAL_MF.PerFrameFunctionalGroupsSequence = [Dataset()] + MINIMAL_MF.SharedFunctionalGroupsSequence = [Dataset()] WRAPCLASS = didw.MultiframeWrapper @dicom_test def test_shape(self): # Check the shape algorithm - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) # No rows, cols, raise WrapperError with pytest.raises(didw.WrapperError): dw.image_shape - fake_mf['Rows'] = 64 + fake_mf.Rows = 64 with pytest.raises(didw.WrapperError): dw.image_shape fake_mf.pop('Rows') - fake_mf['Columns'] = 64 + fake_mf.Columns = 64 with pytest.raises(didw.WrapperError): dw.image_shape - fake_mf['Rows'] = 32 - # Missing frame data, raise AssertionError - with pytest.raises(AssertionError): - dw.image_shape - fake_mf['NumberOfFrames'] = 4 - # PerFrameFunctionalGroupsSequence does not match NumberOfFrames - with pytest.raises(AssertionError): + fake_mf.Rows = 32 + # No frame data raises WrapperError + with pytest.raises(didw.WrapperError): dw.image_shape # check 2D shape with StackID index is 0 div_seq = ((1, 1),) @@ -556,11 +562,32 @@ def test_shape(self): div_seq = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 4) - # Check stack number matching when StackID index is 0 + # Check fow warning when implicitly dropping stacks div_seq = ((1, 1), (1, 2), (1, 3), (2, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + assert MFW(fake_mf).image_shape == (32, 64, 3) + # No warning if we expclitly select that StackID to keep + assert MFW(fake_mf, frame_filters=(didw.FilterMultiStack(1),)).image_shape == (32, 64, 3) + assert MFW(fake_mf, frame_filters=(didw.FilterMultiStack(2),)).image_shape == (32, 64) + # Stack filtering is the same when StackID is not an index + div_seq = ((1,), (2,), (3,), (4,)) + sid_seq = (1, 1, 1, 2) + fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + assert MFW(fake_mf).image_shape == (32, 64, 3) + # No warning if we expclitly select that StackID to keep + assert MFW(fake_mf, frame_filters=(didw.FilterMultiStack(1),)).image_shape == (32, 64, 3) + assert MFW(fake_mf, frame_filters=(didw.FilterMultiStack(2),)).image_shape == (32, 64) + # Check for error when explicitly requested StackID is missing with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape + MFW(fake_mf, frame_filters=(didw.FilterMultiStack(3),)) # Make some fake frame data for 4D when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) @@ -568,8 +595,12 @@ def test_shape(self): # Check stack number matching for 4D when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (2, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) - with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # Check indices can be non-contiguous when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) @@ -579,7 +610,7 @@ def test_shape(self): fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 2, 2) # Check number of IPP vals match the number of slices or we raise - frames = fake_mf['PerFrameFunctionalGroupsSequence'] + frames = fake_mf.PerFrameFunctionalGroupsSequence for frame in frames[1:]: frame.PlanePositionSequence = frames[0].PlanePositionSequence[:] with pytest.raises(didw.WrapperError): @@ -594,12 +625,6 @@ def test_shape(self): sid_seq = (1, 1, 1, 1) fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) assert MFW(fake_mf).image_shape == (32, 64, 4) - # check 3D stack number matching when there is no StackID index - div_seq = ((1,), (2,), (3,), (4,)) - sid_seq = (1, 1, 1, 2) - fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) - with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape # check 4D shape when there is no StackID index div_seq = ((1, 1), (2, 1), (1, 2), (2, 2), (1, 3), (2, 3)) sid_seq = (1, 1, 1, 1, 1, 1) @@ -609,8 +634,12 @@ def test_shape(self): div_seq = ((1, 1), (2, 1), (1, 2), (2, 2), (1, 3), (2, 3)) sid_seq = (1, 1, 1, 1, 1, 2) fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) - with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # check 3D shape when StackID index is 1 div_seq = ((1, 1), (2, 1), (3, 1), (4, 1)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) @@ -618,8 +647,11 @@ def test_shape(self): # Check stack number matching when StackID index is 1 div_seq = ((1, 1), (2, 1), (3, 2), (4, 1)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) - with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + assert MFW(fake_mf).image_shape == (32, 64, 3) # Make some fake frame data for 4D when StackID index is 1 div_seq = ((1, 1, 1), (2, 1, 1), (1, 1, 2), (2, 1, 2), (1, 1, 3), (2, 1, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) @@ -689,7 +721,7 @@ def test_shape(self): def test_iop(self): # Test Image orient patient for multiframe - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) with pytest.raises(didw.WrapperError): @@ -698,56 +730,56 @@ def test_iop(self): fake_frame = fake_frames( 'PlaneOrientationSequence', 'ImageOrientationPatient', [[0, 1, 0, 1, 0, 0]] )[0] - fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] + fake_mf.SharedFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) - fake_mf['SharedFunctionalGroupsSequence'] = [None] + fake_mf.SharedFunctionalGroupsSequence = [Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_orient_patient - fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] + fake_mf.PerFrameFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) def test_voxel_sizes(self): # Test voxel size calculation - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) with pytest.raises(didw.WrapperError): dw.voxel_sizes # Make a fake frame fake_frame = fake_frames('PixelMeasuresSequence', 'PixelSpacing', [[2.1, 3.2]])[0] - fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] + fake_mf.SharedFunctionalGroupsSequence = [fake_frame] # Still not enough, we lack information for slice distances with pytest.raises(didw.WrapperError): MFW(fake_mf).voxel_sizes # This can come from SpacingBetweenSlices or frame SliceThickness - fake_mf['SpacingBetweenSlices'] = 4.3 + fake_mf.SpacingBetweenSlices = 4.3 assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 4.3]) # If both, prefer SliceThickness fake_frame.PixelMeasuresSequence[0].SliceThickness = 5.4 assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Just SliceThickness is OK - del fake_mf['SpacingBetweenSlices'] + del fake_mf.SpacingBetweenSlices assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Removing shared leads to error again - fake_mf['SharedFunctionalGroupsSequence'] = [None] + fake_mf.SharedFunctionalGroupsSequence = [Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).voxel_sizes # Restoring to frames makes it work again - fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] + fake_mf.PerFrameFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Decimals in any field are OK fake_frame = fake_frames( 'PixelMeasuresSequence', 'PixelSpacing', [[Decimal('2.1'), Decimal('3.2')]] )[0] - fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] - fake_mf['SpacingBetweenSlices'] = Decimal('4.3') + fake_mf.SharedFunctionalGroupsSequence = [fake_frame] + fake_mf.SpacingBetweenSlices = Decimal('4.3') assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 4.3]) fake_frame.PixelMeasuresSequence[0].SliceThickness = Decimal('5.4') assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) def test_image_position(self): # Test image_position property for multiframe - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) with pytest.raises(didw.WrapperError): @@ -758,12 +790,12 @@ def test_image_position(self): frames = fake_frames( 'PlanePositionSequence', 'ImagePositionPatient', [[-2.0, 3.0, 7]], frames ) - fake_mf['SharedFunctionalGroupsSequence'] = frames + fake_mf.SharedFunctionalGroupsSequence = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) - fake_mf['SharedFunctionalGroupsSequence'] = [None] + fake_mf.SharedFunctionalGroupsSequence = [Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_position - fake_mf['PerFrameFunctionalGroupsSequence'] = frames + fake_mf.PerFrameFunctionalGroupsSequence = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) # Check lists of Decimals work frames[0].PlanePositionSequence[0].ImagePositionPatient = [ @@ -775,7 +807,7 @@ def test_image_position(self): frames = fake_frames('PlaneOrientationSequence', 'ImageOrientationPatient', [iop] * 2) ipps = [[-2.0, 3.0, 7], [-2.0, 3.0, 6]] frames = fake_frames('PlanePositionSequence', 'ImagePositionPatient', ipps, frames) - fake_mf['PerFrameFunctionalGroupsSequence'] = frames + fake_mf.PerFrameFunctionalGroupsSequence = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 6]) @dicom_test @@ -809,9 +841,9 @@ def test_slicethickness_fallback(self): def test_data_derived_shape(self): # Test 4D diffusion data with an additional trace volume included # Excludes the trace volume and generates the correct shape - dw = didw.wrapper_from_file(DATA_FILE_4D_DERIVED) with pytest.warns(UserWarning, match='Derived images found and removed'): - assert dw.image_shape == (96, 96, 60, 33) + dw = didw.wrapper_from_file(DATA_FILE_4D_DERIVED) + assert dw.image_shape == (96, 96, 60, 33) @dicom_test @needs_nibabel_data('dcm_qa_xa30') @@ -831,7 +863,7 @@ def test_data_unreadable_private_headers(self): @dicom_test def test_data_fake(self): # Test algorithm for get_data - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) # Fails - no shape @@ -843,8 +875,8 @@ def test_data_fake(self): with pytest.raises(didw.WrapperError): dw.get_data() # Make shape and indices - fake_mf['Rows'] = 2 - fake_mf['Columns'] = 3 + fake_mf.Rows = 2 + fake_mf.Columns = 3 dim_idxs = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0)) assert MFW(fake_mf).image_shape == (2, 3, 4) @@ -854,24 +886,24 @@ def test_data_fake(self): # Add data - 3D data = np.arange(24).reshape((2, 3, 4)) # Frames dim is first for some reason - fake_mf['pixel_array'] = np.rollaxis(data, 2) + object.__setattr__(fake_mf, 'pixel_array', np.rollaxis(data, 2)) # Now it should work dw = MFW(fake_mf) assert_array_equal(dw.get_data(), data) # Test scaling works - fake_mf['RescaleSlope'] = 2.0 - fake_mf['RescaleIntercept'] = -1 + fake_mf.RescaleSlope = 2.0 + fake_mf.RescaleIntercept = -1 assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) # Check slice sorting dim_idxs = ((1, 4), (1, 2), (1, 3), (1, 1)) fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0)) sorted_data = data[..., [3, 1, 2, 0]] - fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) + fake_mf.pixel_array = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) # Check slice sorting with negative index / IPP correlation fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0, flip_ipp_idx_corr=True)) sorted_data = data[..., [0, 2, 1, 3]] - fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) + fake_mf.pixel_array = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) # 5D! dim_idxs = [ @@ -898,28 +930,167 @@ def test_data_fake(self): sorted_data = data.reshape(shape[:2] + (-1,), order='F') order = [11, 9, 10, 8, 3, 1, 2, 0, 15, 13, 14, 12, 7, 5, 6, 4] sorted_data = sorted_data[..., np.argsort(order)] - fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) + fake_mf.pixel_array = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) - def test__scale_data(self): + def test_scale_data(self): # Test data scaling - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) + fake_mf.Rows = 2 + fake_mf.Columns = 3 + fake_mf.PerFrameFunctionalGroupsSequence = [Dataset() for _ in range(4)] MFW = self.WRAPCLASS - dw = MFW(fake_mf) - data = np.arange(24).reshape((2, 3, 4)) - assert_array_equal(data, dw._scale_data(data)) - fake_mf['RescaleSlope'] = 2.0 - fake_mf['RescaleIntercept'] = -1.0 - assert_array_equal(data * 2 - 1, dw._scale_data(data)) - fake_frame = fake_frames('PixelValueTransformationSequence', 'RescaleSlope', [3.0])[0] - fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] - # Lacking RescaleIntercept -> Error - dw = MFW(fake_mf) - with pytest.raises(AttributeError): - dw._scale_data(data) - fake_frame.PixelValueTransformationSequence[0].RescaleIntercept = -2 - assert_array_equal(data * 3 - 2, dw._scale_data(data)) + data = np.arange(24).reshape((2, 3, 4), order='F') + assert_array_equal(data, MFW(fake_mf)._scale_data(data)) + # Test legacy top-level slope/intercept + fake_mf.RescaleSlope = 2.0 + fake_mf.RescaleIntercept = -1.0 + assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) + # RealWorldValueMapping takes precedence, but only with defined units + fake_mf.RealWorldValueMappingSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[0].RealWorldValueSlope = 10.0 + fake_mf.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -5.0 + assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) + fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[0].CodeMeaning = '%' + assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) + fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[ + 0 + ].CodeMeaning = 'no units' + assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) + # Possible to have more than one RealWorldValueMapping, use first one with defined units + fake_mf.RealWorldValueMappingSequence.append(Dataset()) + fake_mf.RealWorldValueMappingSequence[-1].RealWorldValueSlope = 15.0 + fake_mf.RealWorldValueMappingSequence[-1].RealWorldValueIntercept = -3.0 + fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence[0].CodeMeaning = '%' + assert_array_equal(data * 15 - 3, MFW(fake_mf)._scale_data(data)) + # A global RWV scale takes precedence over per-frame PixelValueTransformation + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + frames = fake_frames( + 'PixelValueTransformationSequence', + 'RescaleSlope', + [3.0, 3.0, 3.0, 3.0], + fake_mf.PerFrameFunctionalGroupsSequence, + ) + assert_array_equal(data * 15 - 3, MFW(fake_mf)._scale_data(data)) + # The per-frame PixelValueTransformation takes precedence over plain top-level slope / inter + delattr(fake_mf, 'RealWorldValueMappingSequence') + assert_array_equal(data * 3, MFW(fake_mf)._scale_data(data)) + for frame in frames: + frame.PixelValueTransformationSequence[0].RescaleIntercept = -2 + assert_array_equal(data * 3 - 2, MFW(fake_mf)._scale_data(data)) # Decimals are OK - fake_frame.PixelValueTransformationSequence[0].RescaleSlope = Decimal('3') - fake_frame.PixelValueTransformationSequence[0].RescaleIntercept = Decimal('-2') - assert_array_equal(data * 3 - 2, dw._scale_data(data)) + for frame in frames: + frame.PixelValueTransformationSequence[0].RescaleSlope = Decimal('3') + frame.PixelValueTransformationSequence[0].RescaleIntercept = Decimal('-2') + assert_array_equal(data * 3 - 2, MFW(fake_mf)._scale_data(data)) + # A per-frame RWV scaling takes precedence over per-frame PixelValueTransformation + for frame in frames: + frame.RealWorldValueMappingSequence = [Dataset()] + frame.RealWorldValueMappingSequence[0].RealWorldValueSlope = 10.0 + frame.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -5.0 + frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [Dataset()] + frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[ + 0 + ].CodeMeaning = '%' + assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) + # Test varying per-frame scale factors + for frame_idx, frame in enumerate(frames): + frame.RealWorldValueMappingSequence[0].RealWorldValueSlope = 2 * (frame_idx + 1) + frame.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -1 * (frame_idx + 1) + assert_array_equal( + data * np.array([2, 4, 6, 8]) + np.array([-1, -2, -3, -4]), + MFW(fake_mf)._scale_data(data), + ) + + def test_philips_scale_data(self): + fake_mf = deepcopy(self.MINIMAL_MF) + fake_mf.Manufacturer = 'Philips' + fake_mf.Rows = 2 + fake_mf.Columns = 3 + fake_mf.PerFrameFunctionalGroupsSequence = [Dataset() for _ in range(4)] + MFW = self.WRAPCLASS + data = np.arange(24).reshape((2, 3, 4), order='F') + # Unlike other manufacturers, public scale factors from Philips without defined + # units should not be used. In lieu of this the private scale factor should be + # used, which should always be available (modulo deidentification). If we can't + # find any of these scale factors a warning is issued. + with pytest.warns( + UserWarning, + match='Unable to find Philips private scale factor, cross-series comparisons may be invalid', + ): + assert_array_equal(data, MFW(fake_mf)._scale_data(data)) + fake_mf.RescaleSlope = 2.0 + fake_mf.RescaleIntercept = -1.0 + for rescale_type in (None, '', 'US', 'normalized'): + if rescale_type is not None: + fake_mf.RescaleType = rescale_type + with pytest.warns( + UserWarning, + match='Unable to find Philips private scale factor, cross-series comparisons may be invalid', + ): + assert_array_equal(data, MFW(fake_mf)._scale_data(data)) + # Falling back to private scaling doesn't generate error + priv_block = fake_mf.private_block(0x2005, 'Philips MR Imaging DD 001', create=True) + priv_block.add_new(0xE, 'FL', 3.0) + assert_array_equal(data * 3.0, MFW(fake_mf)._scale_data(data)) + # If the units are defined they take precedence over private scaling + fake_mf.RescaleType = 'mrad' + assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) + # A RWV scale factor with defined units takes precdence + shared = Dataset() + fake_mf.SharedFunctionalGroupsSequence = [shared] + rwv_map = Dataset() + rwv_map.RealWorldValueSlope = 10.0 + rwv_map.RealWorldValueIntercept = -5.0 + rwv_map.MeasurementUnitsCodeSequence = [Dataset()] + rwv_map.MeasurementUnitsCodeSequence[0].CodeMeaning = '%' + shared.RealWorldValueMappingSequence = [rwv_map] + assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) + # Get rid of valid top-level scale factors, test per-frame scale factors + delattr(shared, 'RealWorldValueMappingSequence') + delattr(fake_mf, 'RescaleType') + del fake_mf[priv_block.get_tag(0xE)] + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + # Simplest case is all frames have same (valid) scale factor + for frame in fake_mf.PerFrameFunctionalGroupsSequence: + pix_trans = Dataset() + pix_trans.RescaleSlope = 2.5 + pix_trans.RescaleIntercept = -4 + pix_trans.RescaleType = 'mrad' + frame.PixelValueTransformationSequence = [pix_trans] + assert_array_equal(data * 2.5 - 4, MFW(fake_mf)._scale_data(data)) + # If some frames are missing valid scale factors we should get a warning + for frame in fake_mf.PerFrameFunctionalGroupsSequence[2:]: + delattr(frame.PixelValueTransformationSequence[0], 'RescaleType') + with pytest.warns( + UserWarning, + match='Unable to find Philips private scale factor, cross-series comparisons may be invalid', + ): + assert_array_equal( + data * np.array([2.5, 2.5, 1, 1]) + np.array([-4, -4, 0, 0]), + MFW(fake_mf)._scale_data(data), + ) + # We can fall back to private scale factor on frame-by-frame basis + for frame in fake_mf.PerFrameFunctionalGroupsSequence: + priv_block = frame.private_block(0x2005, 'Philips MR Imaging DD 001', create=True) + priv_block.add_new(0xE, 'FL', 7.0) + assert_array_equal( + data * np.array([2.5, 2.5, 7, 7]) + np.array([-4, -4, 0, 0]), + MFW(fake_mf)._scale_data(data), + ) + # Again RWV scale factors take precedence + for frame_idx, frame in enumerate(fake_mf.PerFrameFunctionalGroupsSequence): + rwv_map = Dataset() + rwv_map.RealWorldValueSlope = 14.0 - frame_idx + rwv_map.RealWorldValueIntercept = 5.0 + rwv_map.MeasurementUnitsCodeSequence = [Dataset()] + rwv_map.MeasurementUnitsCodeSequence[0].CodeMeaning = '%' + frame.RealWorldValueMappingSequence = [rwv_map] + assert_array_equal( + data * np.array([14, 13, 12, 11]) + np.array([5, 5, 5, 5]), + MFW(fake_mf)._scale_data(data), + ) diff --git a/nibabel/nicom/utils.py b/nibabel/nicom/utils.py index 24f4afc2fe..2c01c9d161 100644 --- a/nibabel/nicom/utils.py +++ b/nibabel/nicom/utils.py @@ -1,5 +1,7 @@ """Utilities for working with DICOM datasets""" +from enum import Enum + def find_private_section(dcm_data, group_no, creator): """Return start element in group `group_no` given creator name `creator` @@ -45,3 +47,55 @@ def find_private_section(dcm_data, group_no, creator): if match_func(val): return elno * 0x100 return None + + +class Vendor(Enum): + SIEMENS = 1 + GE = 2 + PHILIPS = 3 + + +vendor_priv_sections = { + Vendor.SIEMENS: [ + (0x9, 'SIEMENS SYNGO INDEX SERVICE'), + (0x19, 'SIEMENS MR HEADER'), + (0x21, 'SIEMENS MR SDR 01'), + (0x21, 'SIEMENS MR SDS 01'), + (0x21, 'SIEMENS MR SDI 02'), + (0x29, 'SIEMENS CSA HEADER'), + (0x29, 'SIEMENS MEDCOM HEADER2'), + (0x51, 'SIEMENS MR HEADER'), + ], + Vendor.PHILIPS: [ + (0x2001, 'Philips Imaging DD 001'), + (0x2001, 'Philips Imaging DD 002'), + (0x2001, 'Philips Imaging DD 129'), + (0x2005, 'Philips MR Imaging DD 001'), + (0x2005, 'Philips MR Imaging DD 002'), + (0x2005, 'Philips MR Imaging DD 003'), + (0x2005, 'Philips MR Imaging DD 004'), + (0x2005, 'Philips MR Imaging DD 005'), + (0x2005, 'Philips MR Imaging DD 006'), + (0x2005, 'Philips MR Imaging DD 007'), + (0x2005, 'Philips MR Imaging DD 005'), + (0x2005, 'Philips MR Imaging DD 006'), + ], + Vendor.GE: [ + (0x9, 'GEMS_IDEN_01'), + (0x19, 'GEMS_ACQU_01'), + (0x21, 'GEMS_RELA_01'), + (0x23, 'GEMS_STDY_01'), + (0x25, 'GEMS_SERS_01'), + (0x27, 'GEMS_IMAG_01'), + (0x29, 'GEMS_IMPS_01'), + (0x43, 'GEMS_PARM_01'), + ], +} + + +def vendor_from_private(dcm_data): + """Try to determine the vendor by looking for specific private tags""" + for vendor, priv_sections in vendor_priv_sections.items(): + for priv_group, priv_creator in priv_sections: + if find_private_section(dcm_data, priv_group, priv_creator) != None: + return vendor From f0264abbb295e063ea8b66be36d56319a30b2ecb Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Mon, 12 Aug 2024 17:14:04 -0700 Subject: [PATCH 110/203] TST: Don't assume pydicom installed in test_dicomwrappers --- nibabel/nicom/tests/test_dicomwrappers.py | 84 +++++++++++++---------- 1 file changed, 48 insertions(+), 36 deletions(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 0556fc63cc..55c27df50a 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -11,7 +11,6 @@ import numpy as np import pytest from numpy.testing import assert_array_almost_equal, assert_array_equal -from pydicom.dataset import Dataset from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data from ...volumeutils import endian_codes @@ -64,8 +63,8 @@ def test_wrappers(): # test direct wrapper calls # first with empty or minimal data multi_minimal = { - 'PerFrameFunctionalGroupsSequence': [Dataset()], - 'SharedFunctionalGroupsSequence': [Dataset()], + 'PerFrameFunctionalGroupsSequence': [pydicom.Dataset()], + 'SharedFunctionalGroupsSequence': [pydicom.Dataset()], } for maker, args in ( (didw.Wrapper, ({},)), @@ -164,10 +163,10 @@ def test_wrapper_from_data(): fake_data['SOPClassUID'] = '1.2.840.10008.5.1.4.1.1.4.1' with pytest.raises(didw.WrapperError): didw.wrapper_from_data(fake_data) - fake_data['PerFrameFunctionalGroupsSequence'] = [Dataset()] + fake_data['PerFrameFunctionalGroupsSequence'] = [pydicom.Dataset()] with pytest.raises(didw.WrapperError): didw.wrapper_from_data(fake_data) - fake_data['SharedFunctionalGroupsSequence'] = [Dataset()] + fake_data['SharedFunctionalGroupsSequence'] = [pydicom.Dataset()] # minimal set should now be met dw = didw.wrapper_from_data(fake_data) assert dw.is_multiframe @@ -386,14 +385,14 @@ def fake_frames(seq_name, field_name, value_seq, frame_seq=None): value_seq[n] for n in range(N) """ if frame_seq is None: - frame_seq = [Dataset() for _ in range(len(value_seq))] + frame_seq = [pydicom.Dataset() for _ in range(len(value_seq))] for value, fake_frame in zip(value_seq, frame_seq): if value is None: continue if hasattr(fake_frame, seq_name): fake_element = getattr(fake_frame, seq_name)[0] else: - fake_element = Dataset() + fake_element = pydicom.Dataset() setattr(fake_frame, seq_name, [fake_element]) setattr(fake_element, field_name, value) return frame_seq @@ -436,30 +435,30 @@ def __repr__(self): attr_strs.append(f'{attr}={getattr(self, attr)}') return f"{self.__class__.__name__}({', '.join(attr_strs)})" - class DimIdxSeqElem(Dataset): + class DimIdxSeqElem(pydicom.Dataset): def __init__(self, dip=(0, 0), fgp=None): super().__init__() self.DimensionIndexPointer = dip if fgp is not None: self.FunctionalGroupPointer = fgp - class FrmContSeqElem(Dataset): + class FrmContSeqElem(pydicom.Dataset): def __init__(self, div, sid): super().__init__() self.DimensionIndexValues = div self.StackID = sid - class PlnPosSeqElem(Dataset): + class PlnPosSeqElem(pydicom.Dataset): def __init__(self, ipp): super().__init__() self.ImagePositionPatient = ipp - class PlnOrientSeqElem(Dataset): + class PlnOrientSeqElem(pydicom.Dataset): def __init__(self, iop): super().__init__() self.ImageOrientationPatient = iop - class PerFrmFuncGrpSeqElem(Dataset): + class PerFrmFuncGrpSeqElem(pydicom.Dataset): def __init__(self, div, sid, ipp, iop): super().__init__() self.FrameContentSequence = [FrmContSeqElem(div, sid)] @@ -514,17 +513,21 @@ def __init__(self, div, sid, ipp, iop): } -class FakeDataset(Dataset): - pixel_array = None +if have_dicom: + + class FakeDataset(pydicom.Dataset): + pixel_array = None class TestMultiFrameWrapper(TestCase): # Test MultiframeWrapper - # Minimal contents of dcm_data for this wrapper - MINIMAL_MF = FakeDataset() - MINIMAL_MF.PerFrameFunctionalGroupsSequence = [Dataset()] - MINIMAL_MF.SharedFunctionalGroupsSequence = [Dataset()] - WRAPCLASS = didw.MultiframeWrapper + + if have_dicom: + # Minimal contents of dcm_data for this wrapper + MINIMAL_MF = FakeDataset() + MINIMAL_MF.PerFrameFunctionalGroupsSequence = [pydicom.Dataset()] + MINIMAL_MF.SharedFunctionalGroupsSequence = [pydicom.Dataset()] + WRAPCLASS = didw.MultiframeWrapper @dicom_test def test_shape(self): @@ -719,6 +722,7 @@ def test_shape(self): fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 3) + @dicom_test def test_iop(self): # Test Image orient patient for multiframe fake_mf = deepcopy(self.MINIMAL_MF) @@ -732,12 +736,13 @@ def test_iop(self): )[0] fake_mf.SharedFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) - fake_mf.SharedFunctionalGroupsSequence = [Dataset()] + fake_mf.SharedFunctionalGroupsSequence = [pydicom.Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_orient_patient fake_mf.PerFrameFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) + @dicom_test def test_voxel_sizes(self): # Test voxel size calculation fake_mf = deepcopy(self.MINIMAL_MF) @@ -761,7 +766,7 @@ def test_voxel_sizes(self): del fake_mf.SpacingBetweenSlices assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Removing shared leads to error again - fake_mf.SharedFunctionalGroupsSequence = [Dataset()] + fake_mf.SharedFunctionalGroupsSequence = [pydicom.Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).voxel_sizes # Restoring to frames makes it work again @@ -777,6 +782,7 @@ def test_voxel_sizes(self): fake_frame.PixelMeasuresSequence[0].SliceThickness = Decimal('5.4') assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) + @dicom_test def test_image_position(self): # Test image_position property for multiframe fake_mf = deepcopy(self.MINIMAL_MF) @@ -792,7 +798,7 @@ def test_image_position(self): ) fake_mf.SharedFunctionalGroupsSequence = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) - fake_mf.SharedFunctionalGroupsSequence = [Dataset()] + fake_mf.SharedFunctionalGroupsSequence = [pydicom.Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_position fake_mf.PerFrameFunctionalGroupsSequence = frames @@ -933,12 +939,13 @@ def test_data_fake(self): fake_mf.pixel_array = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) + @dicom_test def test_scale_data(self): # Test data scaling fake_mf = deepcopy(self.MINIMAL_MF) fake_mf.Rows = 2 fake_mf.Columns = 3 - fake_mf.PerFrameFunctionalGroupsSequence = [Dataset() for _ in range(4)] + fake_mf.PerFrameFunctionalGroupsSequence = [pydicom.Dataset() for _ in range(4)] MFW = self.WRAPCLASS data = np.arange(24).reshape((2, 3, 4), order='F') assert_array_equal(data, MFW(fake_mf)._scale_data(data)) @@ -947,11 +954,11 @@ def test_scale_data(self): fake_mf.RescaleIntercept = -1.0 assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) # RealWorldValueMapping takes precedence, but only with defined units - fake_mf.RealWorldValueMappingSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence = [pydicom.Dataset()] fake_mf.RealWorldValueMappingSequence[0].RealWorldValueSlope = 10.0 fake_mf.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -5.0 assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) - fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [pydicom.Dataset()] fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[0].CodeMeaning = '%' assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[ @@ -959,10 +966,12 @@ def test_scale_data(self): ].CodeMeaning = 'no units' assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) # Possible to have more than one RealWorldValueMapping, use first one with defined units - fake_mf.RealWorldValueMappingSequence.append(Dataset()) + fake_mf.RealWorldValueMappingSequence.append(pydicom.Dataset()) fake_mf.RealWorldValueMappingSequence[-1].RealWorldValueSlope = 15.0 fake_mf.RealWorldValueMappingSequence[-1].RealWorldValueIntercept = -3.0 - fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence = [ + pydicom.Dataset() + ] fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence[0].CodeMeaning = '%' assert_array_equal(data * 15 - 3, MFW(fake_mf)._scale_data(data)) # A global RWV scale takes precedence over per-frame PixelValueTransformation @@ -988,10 +997,12 @@ def test_scale_data(self): assert_array_equal(data * 3 - 2, MFW(fake_mf)._scale_data(data)) # A per-frame RWV scaling takes precedence over per-frame PixelValueTransformation for frame in frames: - frame.RealWorldValueMappingSequence = [Dataset()] + frame.RealWorldValueMappingSequence = [pydicom.Dataset()] frame.RealWorldValueMappingSequence[0].RealWorldValueSlope = 10.0 frame.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -5.0 - frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [Dataset()] + frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [ + pydicom.Dataset() + ] frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[ 0 ].CodeMeaning = '%' @@ -1005,12 +1016,13 @@ def test_scale_data(self): MFW(fake_mf)._scale_data(data), ) + @dicom_test def test_philips_scale_data(self): fake_mf = deepcopy(self.MINIMAL_MF) fake_mf.Manufacturer = 'Philips' fake_mf.Rows = 2 fake_mf.Columns = 3 - fake_mf.PerFrameFunctionalGroupsSequence = [Dataset() for _ in range(4)] + fake_mf.PerFrameFunctionalGroupsSequence = [pydicom.Dataset() for _ in range(4)] MFW = self.WRAPCLASS data = np.arange(24).reshape((2, 3, 4), order='F') # Unlike other manufacturers, public scale factors from Philips without defined @@ -1040,12 +1052,12 @@ def test_philips_scale_data(self): fake_mf.RescaleType = 'mrad' assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) # A RWV scale factor with defined units takes precdence - shared = Dataset() + shared = pydicom.Dataset() fake_mf.SharedFunctionalGroupsSequence = [shared] - rwv_map = Dataset() + rwv_map = pydicom.Dataset() rwv_map.RealWorldValueSlope = 10.0 rwv_map.RealWorldValueIntercept = -5.0 - rwv_map.MeasurementUnitsCodeSequence = [Dataset()] + rwv_map.MeasurementUnitsCodeSequence = [pydicom.Dataset()] rwv_map.MeasurementUnitsCodeSequence[0].CodeMeaning = '%' shared.RealWorldValueMappingSequence = [rwv_map] assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) @@ -1057,7 +1069,7 @@ def test_philips_scale_data(self): fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) # Simplest case is all frames have same (valid) scale factor for frame in fake_mf.PerFrameFunctionalGroupsSequence: - pix_trans = Dataset() + pix_trans = pydicom.Dataset() pix_trans.RescaleSlope = 2.5 pix_trans.RescaleIntercept = -4 pix_trans.RescaleType = 'mrad' @@ -1084,10 +1096,10 @@ def test_philips_scale_data(self): ) # Again RWV scale factors take precedence for frame_idx, frame in enumerate(fake_mf.PerFrameFunctionalGroupsSequence): - rwv_map = Dataset() + rwv_map = pydicom.Dataset() rwv_map.RealWorldValueSlope = 14.0 - frame_idx rwv_map.RealWorldValueIntercept = 5.0 - rwv_map.MeasurementUnitsCodeSequence = [Dataset()] + rwv_map.MeasurementUnitsCodeSequence = [pydicom.Dataset()] rwv_map.MeasurementUnitsCodeSequence[0].CodeMeaning = '%' frame.RealWorldValueMappingSequence = [rwv_map] assert_array_equal( From 5203368461dbd720be6e776d52803a5ac81fe434 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 5 Sep 2024 14:07:31 -0400 Subject: [PATCH 111/203] fix: Update order of indices on mouseclick --- nibabel/viewers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nibabel/viewers.py b/nibabel/viewers.py index e66a34149a..0dc2f0dafc 100644 --- a/nibabel/viewers.py +++ b/nibabel/viewers.py @@ -492,10 +492,11 @@ def _on_mouse(self, event): x, y = event.xdata, event.ydata x = self._sizes[xax] - x if self._flips[xax] else x y = self._sizes[yax] - y if self._flips[yax] else y - idxs = [None, None, None, 1.0] + idxs = np.ones(4) idxs[xax] = x idxs[yax] = y idxs[ii] = self._data_idx[ii] + idxs[:3] = idxs[self._order] self._set_position(*np.dot(self._affine, idxs)[:3]) self._draw() From 4f36bc7a5591a4ac5ac416a9586a4ad8ec53148c Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 5 Sep 2024 14:20:26 -0400 Subject: [PATCH 112/203] test: Add regression test for rotated data --- nibabel/tests/test_viewers.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/nibabel/tests/test_viewers.py b/nibabel/tests/test_viewers.py index 53f4a32bdc..72d839c923 100644 --- a/nibabel/tests/test_viewers.py +++ b/nibabel/tests/test_viewers.py @@ -102,3 +102,35 @@ def test_viewer(): v2.link_to(v1) # shouldn't do anything v1.close() v2.close() + + +@needs_mpl +def test_viewer_nonRAS(): + data1 = np.random.rand(10, 20, 40) + data1[5, 10, :] = 0 + data1[5, :, 30] = 0 + data1[:, 10, 30] = 0 + # RSA affine + aff1 = np.array([[1, 0, 0, -5], [0, 0, 1, -30], [0, 1, 0, -10], [0, 0, 0, 1]]) + o1 = OrthoSlicer3D(data1, aff1) + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + + # Sagittal view: [0, I->S, P->A], so data is transposed, matching plot array + assert_array_equal(sag, data1[5, :, :]) + # Coronal view: [L->R, I->S, 0]. Data is not transposed, transpose to match plot array + assert_array_equal(cor, data1[:, :, 30].T) + # Axial view: [L->R, 0, P->A]. Data is not transposed, transpose to match plot array + assert_array_equal(axi, data1[:, 10, :].T) + + o1.set_position(1, 2, 3) # R, A, S coordinates + + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + + # Shift 1 right, 2 anterior, 3 superior + assert_array_equal(sag, data1[6, :, :]) + assert_array_equal(cor, data1[:, :, 32].T) + assert_array_equal(axi, data1[:, 13, :].T) From 032f6df03de1c3a39b22ebe88694b981ae0b000d Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 7 Sep 2024 12:53:06 -0400 Subject: [PATCH 113/203] Revert "ENH: Add writer for Siemens CSA header" --- nibabel/nicom/csareader.py | 110 -------------------------- nibabel/nicom/tests/test_csareader.py | 11 --- 2 files changed, 121 deletions(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index dd081b22c2..df379e0be8 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -1,7 +1,6 @@ """CSA header reader from SPM spec""" import numpy as np -import struct from .structreader import Unpacker from .utils import find_private_section @@ -29,10 +28,6 @@ class CSAReadError(CSAError): pass -class CSAWriteError(CSAError): - pass - - def get_csa_header(dcm_data, csa_type='image'): """Get CSA header information from DICOM header @@ -166,96 +161,6 @@ def read(csa_str): return csa_dict -def write(csa_header): - ''' Write string from CSA header `csa_header` - - Parameters - ---------- - csa_header : dict - header information as dict, where `header` has fields (at least) - ``type, n_tags, tags``. ``header['tags']`` is also a dictionary - with one key, value pair for each tag in the header. - - Returns - ------- - csa_str : str - byte string containing CSA header information - ''' - result = [] - if csa_header['type'] == 2: - result.append(b'SV10') - result.append(csa_header['unused0']) - if not 0 < csa_header['n_tags'] <= 128: - raise CSAWriteError('Number of tags `t` should be ' - '0 < t <= 128') - result.append(struct.pack('2I', - csa_header['n_tags'], - csa_header['check']) - ) - - # Build list of tags in correct order - tags = list(csa_header['tags'].items()) - tags.sort(key=lambda x: x[1]['tag_no']) - tag0_n_items = tags[0][1]['n_items'] - - # Add the information for each tag - for tag_name, tag_dict in tags: - vm = tag_dict['vm'] - vr = tag_dict['vr'] - n_items = tag_dict['n_items'] - assert n_items < 100 - result.append(struct.pack('64si4s3i', - make_nt_str(tag_name), - vm, - make_nt_str(vr), - tag_dict['syngodt'], - n_items, - tag_dict['last3']) - ) - - # Figure out the number of values for this tag - if vm == 0: - n_values = n_items - else: - n_values = vm - - # Add each item for this tag - for item_no in range(n_items): - # Figure out the item length - if item_no >= n_values or tag_dict['items'][item_no] == '': - item_len = 0 - else: - item = tag_dict['items'][item_no] - if not isinstance(item, str): - item = str(item) - item_nt_str = make_nt_str(item) - item_len = len(item_nt_str) - - # These values aren't actually preserved in the dict - # representation of the header. Best we can do is set the ones - # that determine the item length appropriately. - x0, x1, x2, x3 = 0, 0, 0, 0 - if csa_header['type'] == 1: # CSA1 - odd length calculation - x0 = tag0_n_items + item_len - if item_len < 0 or (ptr + item_len) > csa_len: - if item_no < vm: - items.append('') - break - else: # CSA2 - x1 = item_len - result.append(struct.pack('4i', x0, x1, x2, x3)) - - if item_len == 0: - continue - - result.append(item_nt_str) - # go to 4 byte boundary - plus4 = item_len % 4 - if plus4 != 0: - result.append(b'\x00' * (4 - plus4)) - return b''.join(result) - - def get_scalar(csa_dict, tag_name): try: items = csa_dict['tags'][tag_name]['items'] @@ -353,18 +258,3 @@ def nt_str(s): if zero_pos == -1: return s return s[:zero_pos].decode('latin-1') - - -def make_nt_str(s): - ''' Create a null terminated byte string from a unicode object. - - Parameters - ---------- - s : unicode - - Returns - ------- - result : bytes - s encoded as latin-1 with a null char appended - ''' - return s.encode('latin-1') + b'\x00' diff --git a/nibabel/nicom/tests/test_csareader.py b/nibabel/nicom/tests/test_csareader.py index 67ae44ecbf..f31f4a3935 100644 --- a/nibabel/nicom/tests/test_csareader.py +++ b/nibabel/nicom/tests/test_csareader.py @@ -130,14 +130,3 @@ def test_missing_csa_elem(): del dcm[csa_tag] hdr = csa.get_csa_header(dcm, 'image') assert hdr is None - - -def test_read_write_rt(): - # Try doing a read-write-read round trip and make sure the dictionary - # representation of the header is the same. We can't exactly reproduce the - # original string representation currently. - for csa_str in (CSA2_B0, CSA2_B1000): - csa_info = csa.read(csa_str) - new_csa_str = csa.write(csa_info) - new_csa_info = csa.read(new_csa_str) - assert csa_info == new_csa_info From a70ab5417143806330f00b59fd9e28537b6ebe3e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 5 Sep 2024 14:30:03 -0400 Subject: [PATCH 114/203] TYP: Ignore overzealous warning for min/max with numpy scalars --- nibabel/volumeutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 29b954dbb3..c2387f0949 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -624,7 +624,7 @@ def array_to_file( # pre scale thresholds mn, mx = _dt_min_max(in_dtype, mn, mx) mn_out, mx_out = _dt_min_max(out_dtype) - pre_clips = max(mn, mn_out), min(mx, mx_out) + pre_clips = max(mn, mn_out), min(mx, mx_out) # type: ignore[type-var] return _write_data(data, fileobj, out_dtype, order, pre_clips=pre_clips) # In any case, we do not want to check for nans because we've already # disallowed scaling that generates nans From a1fff406a18313ff67f9ed6abd9fce58dbb65e59 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 5 Sep 2024 14:32:04 -0400 Subject: [PATCH 115/203] Update pre-commit config --- .pre-commit-config.yaml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b348393a45..4f49318eb0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ exclude: ".*/data/.*" repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.6.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -13,15 +13,18 @@ repos: - id: check-merge-conflict - id: check-vcs-permalinks - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.4 + rev: v0.6.4 hooks: - id: ruff - args: [--fix, --show-fixes, --exit-non-zero-on-fix] + args: [ --fix ] exclude: = ["doc", "tools"] - id: ruff-format exclude: = ["doc", "tools"] + - id: ruff + args: [ --select, ISC001, --fix ] + exclude: = ["doc", "tools"] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.5.1 + rev: v1.11.2 hooks: - id: mypy # Sync with project.optional-dependencies.typing @@ -36,7 +39,7 @@ repos: args: ["nibabel"] pass_filenames: false - repo: https://github.com/codespell-project/codespell - rev: v2.2.6 + rev: v2.3.0 hooks: - id: codespell additional_dependencies: From e2fe1903f73c4c58865af34fd2ab8781c58ab7e8 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 7 Sep 2024 12:16:47 -0400 Subject: [PATCH 116/203] typ: Ignore Pointset.__rmatmul__/ndarray.__matmul__ inconsistency --- nibabel/pointset.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/nibabel/pointset.py b/nibabel/pointset.py index 70a802480d..889a8c70cd 100644 --- a/nibabel/pointset.py +++ b/nibabel/pointset.py @@ -101,7 +101,11 @@ def dim(self) -> int: """The dimensionality of the space the coordinates are in""" return self.coordinates.shape[1] - self.homogeneous - def __rmatmul__(self, affine: np.ndarray) -> Self: + # Use __rmatmul__ to prefer to compose affines. Mypy does not like that + # this conflicts with ndarray.__matmul__. We will need some more feedback + # on how this plays out for type-checking or code suggestions before we + # can do better than ignore. + def __rmatmul__(self, affine: np.ndarray) -> Self: # type: ignore[misc] """Apply an affine transformation to the pointset This will return a new pointset with an updated affine matrix only. From 7a502a3d052cc68ac3c4ae22b89447ff9c53d013 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 5 Jul 2024 17:16:05 -0400 Subject: [PATCH 117/203] MNT: Require typing_extensions for Python <3.13 --- pyproject.toml | 1 + tox.ini | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ff5168f9c6..34d9f7bb50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ dependencies = [ "numpy >=1.20", "packaging >=17", "importlib_resources >=5.12; python_version < '3.12'", + "typing_extensions >=4.6; python_version < '3.13'", ] classifiers = [ "Development Status :: 5 - Production/Stable", diff --git a/tox.ini b/tox.ini index 5df35c8d38..bd99d986c2 100644 --- a/tox.ini +++ b/tox.ini @@ -77,7 +77,8 @@ extras = test deps = # General minimum dependencies: pin based on API usage min: packaging ==17 - min: importlib_resources ==1.3; python_version < '3.9' + min: importlib_resources ==5.12; python_version < '3.12' + min: typing_extensions ==4.6; python_version < '3.13' # NEP29/SPEC0 + 1yr: Test on minor release series within the last 3 years # We're extending this to all optional dependencies # This only affects the range that we test on; numpy is the only non-optional From bb8b808622dad737acbe0e881423ad22a4849e38 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 5 Jul 2024 15:51:04 -0400 Subject: [PATCH 118/203] RF: Add generic NiftiExtension base class Nifti1Extension is a non-ideal base class for NIfTI extensions because it assumes that it is safe to store use a null transformation, and thus default to `bytes` objects. This makes it difficult to define its typing behavior in a way that allows subclasses to refine the type such that type-checkers understand it. This patch creates a generic `NiftiExtension` class that parameterizes the "runtime representation" type. Nifti1Extension subclasses with another parameter that defaults to `bytes`, allowing it to be subclassed in turn (preserving the Nifti1Extension -> Nifti1DicomExtension subclass relationship) while still emitting `bytes`. We could have simply made `Nifti1Extension` the base class, but the mangle/unmangle methods need some casts or ignore comments to type-check cleanly. This separation allows us to have a clean base class with the legacy hacks cordoned off into an subclass. --- nibabel/nifti1.py | 264 +++++++++++++++++++++-------------- nibabel/tests/test_nifti1.py | 6 +- 2 files changed, 166 insertions(+), 104 deletions(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index ecd94c10de..791bf3b1e5 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -13,11 +13,13 @@ from __future__ import annotations +import typing as ty import warnings from io import BytesIO import numpy as np import numpy.linalg as npl +from typing_extensions import TypeVar # PY312 from . import analyze # module import from .arrayproxy import get_obj_dtype @@ -31,7 +33,19 @@ from .spm99analyze import SpmAnalyzeHeader from .volumeutils import Recoder, endian_codes, make_dt_codes -pdcm, have_dicom, _ = optional_package('pydicom') +if ty.TYPE_CHECKING: + import pydicom as pdcm + + have_dicom = True + DicomDataset = pdcm.Dataset +else: + pdcm, have_dicom, _ = optional_package('pydicom') + if have_dicom: + DicomDataset = pdcm.Dataset + else: + DicomDataset = ty.Any + +T = TypeVar('T', default=bytes) # nifti1 flat header definition for Analyze-like first 348 bytes # first number in comments indicates offset in file header in bytes @@ -283,15 +297,19 @@ ) -class Nifti1Extension: - """Baseclass for NIfTI1 header extensions. +class NiftiExtension(ty.Generic[T]): + """Base class for NIfTI header extensions.""" - This class is sufficient to handle very simple text-based extensions, such - as `comment`. More sophisticated extensions should/will be supported by - dedicated subclasses. - """ + code: int + encoding: ty.Optional[str] = None + _content: bytes + _object: ty.Optional[T] = None - def __init__(self, code, content): + def __init__( + self, + code: ty.Union[int, str], + content: bytes, + ) -> None: """ Parameters ---------- @@ -299,94 +317,83 @@ def __init__(self, code, content): Canonical extension code as defined in the NIfTI standard, given either as integer or corresponding label (see :data:`~nibabel.nifti1.extension_codes`) - content : str - Extension content as read from the NIfTI file header. This content is - converted into a runtime representation. + content : bytes + Extension content as read from the NIfTI file header. This content may + be converted into a runtime representation. """ try: - self._code = extension_codes.code[code] + self.code = extension_codes.code[code] # type: ignore[assignment] except KeyError: - # XXX or fail or at least complain? - self._code = code - self._content = self._unmangle(content) + self.code = code # type: ignore[assignment] + self._content = content - def _unmangle(self, value): - """Convert the extension content into its runtime representation. + # Handle (de)serialization of extension content + # Subclasses may implement these methods to provide an alternative + # view of the extension content. If left unimplemented, the content + # must be bytes and is not modified. + def _mangle(self, obj: T) -> bytes: + raise NotImplementedError - The default implementation does nothing at all. + def _unmangle(self, content: bytes) -> T: + raise NotImplementedError - Parameters - ---------- - value : str - Extension content as read from file. + def _sync(self) -> None: + """Synchronize content with object. - Returns - ------- - The same object that was passed as `value`. - - Notes - ----- - Subclasses should reimplement this method to provide the desired - unmangling procedure and may return any type of object. + This permits the runtime representation to be modified in-place + and updates the bytes representation accordingly. """ - return value - - def _mangle(self, value): - """Convert the extension content into NIfTI file header representation. + if self._object is not None: + self._content = self._mangle(self._object) - The default implementation does nothing at all. - - Parameters - ---------- - value : str - Extension content in runtime form. + def __repr__(self) -> str: + try: + code = extension_codes.label[self.code] + except KeyError: + # deal with unknown codes + code = self.code + return f'{self.__class__.__name__}({code}, {self._content!r})' - Returns - ------- - str + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, self.__class__) + and self.code == other.code + and self.content == other.content + ) - Notes - ----- - Subclasses should reimplement this method to provide the desired - mangling procedure. - """ - return value + def __ne__(self, other): + return not self == other def get_code(self): """Return the canonical extension type code.""" - return self._code + return self.code - def get_content(self): - """Return the extension content in its runtime representation.""" + @property + def content(self) -> bytes: + """Return the extension content as raw bytes.""" + self._sync() return self._content - def get_sizeondisk(self): + def get_content(self) -> T: + """Return the extension content in its runtime representation. + + This method may return a different type for each extension type. + """ + if self._object is None: + self._object = self._unmangle(self._content) + return self._object + + def get_sizeondisk(self) -> int: """Return the size of the extension in the NIfTI file.""" + self._sync() # need raw value size plus 8 bytes for esize and ecode - size = len(self._mangle(self._content)) - size += 8 + size = len(self._content) + 8 # extensions size has to be a multiple of 16 bytes if size % 16 != 0: size += 16 - (size % 16) return size - def __repr__(self): - try: - code = extension_codes.label[self._code] - except KeyError: - # deal with unknown codes - code = self._code - - s = f"Nifti1Extension('{code}', '{self._content}')" - return s - - def __eq__(self, other): - return (self._code, self._content) == (other._code, other._content) - - def __ne__(self, other): - return not self == other - - def write_to(self, fileobj, byteswap): + def write_to(self, fileobj: ty.BinaryIO, byteswap: bool = False) -> None: """Write header extensions to fileobj Write starts at fileobj current file position. @@ -402,22 +409,74 @@ def write_to(self, fileobj, byteswap): ------- None """ + self._sync() extstart = fileobj.tell() rawsize = self.get_sizeondisk() # write esize and ecode first - extinfo = np.array((rawsize, self._code), dtype=np.int32) + extinfo = np.array((rawsize, self.code), dtype=np.int32) if byteswap: extinfo = extinfo.byteswap() fileobj.write(extinfo.tobytes()) # followed by the actual extension content # XXX if mangling upon load is implemented, it should be reverted here - fileobj.write(self._mangle(self._content)) + fileobj.write(self._content) # be nice and zero out remaining part of the extension till the # next 16 byte border fileobj.write(b'\x00' * (extstart + rawsize - fileobj.tell())) -class Nifti1DicomExtension(Nifti1Extension): +class Nifti1Extension(NiftiExtension[T]): + """Baseclass for NIfTI1 header extensions. + + This class is sufficient to handle very simple text-based extensions, such + as `comment`. More sophisticated extensions should/will be supported by + dedicated subclasses. + """ + + def _unmangle(self, value: bytes) -> T: + """Convert the extension content into its runtime representation. + + The default implementation does nothing at all. + + Parameters + ---------- + value : str + Extension content as read from file. + + Returns + ------- + The same object that was passed as `value`. + + Notes + ----- + Subclasses should reimplement this method to provide the desired + unmangling procedure and may return any type of object. + """ + return value # type: ignore[return-value] + + def _mangle(self, value: T) -> bytes: + """Convert the extension content into NIfTI file header representation. + + The default implementation does nothing at all. + + Parameters + ---------- + value : str + Extension content in runtime form. + + Returns + ------- + str + + Notes + ----- + Subclasses should reimplement this method to provide the desired + mangling procedure. + """ + return value # type: ignore[return-value] + + +class Nifti1DicomExtension(Nifti1Extension[DicomDataset]): """NIfTI1 DICOM header extension This class is a thin wrapper around pydicom to read a binary DICOM @@ -427,7 +486,12 @@ class Nifti1DicomExtension(Nifti1Extension): header. """ - def __init__(self, code, content, parent_hdr=None): + def __init__( + self, + code: ty.Union[int, str], + content: ty.Union[bytes, DicomDataset, None] = None, + parent_hdr: ty.Optional[Nifti1Header] = None, + ) -> None: """ Parameters ---------- @@ -452,30 +516,30 @@ def __init__(self, code, content, parent_hdr=None): code should always be 2 for DICOM. """ - self._code = code - if parent_hdr: - self._is_little_endian = parent_hdr.endianness == '<' - else: - self._is_little_endian = True + self._is_little_endian = parent_hdr is None or parent_hdr.endianness == '<' + + bytes_content: bytes if isinstance(content, pdcm.dataset.Dataset): self._is_implicit_VR = False - self._raw_content = self._mangle(content) - self._content = content + self._object = content + bytes_content = self._mangle(content) elif isinstance(content, bytes): # Got a byte string - unmangle it - self._raw_content = content - self._is_implicit_VR = self._guess_implicit_VR() - ds = self._unmangle(content, self._is_implicit_VR, self._is_little_endian) - self._content = ds + self._is_implicit_VR = self._guess_implicit_VR(content) + self._object = self._unmangle(content) + bytes_content = content elif content is None: # initialize a new dicom dataset self._is_implicit_VR = False - self._content = pdcm.dataset.Dataset() + self._object = pdcm.dataset.Dataset() + bytes_content = self._mangle(self._object) else: raise TypeError( f'content must be either a bytestring or a pydicom Dataset. ' f'Got {content.__class__}' ) + super().__init__(code, bytes_content) - def _guess_implicit_VR(self): + @staticmethod + def _guess_implicit_VR(content) -> bool: """Try to guess DICOM syntax by checking for valid VRs. Without a DICOM Transfer Syntax, it's difficult to tell if Value @@ -483,19 +547,17 @@ def _guess_implicit_VR(self): This reads where the first VR would be and checks it against a list of valid VRs """ - potential_vr = self._raw_content[4:6].decode() - if potential_vr in pdcm.values.converters.keys(): - implicit_VR = False - else: - implicit_VR = True - return implicit_VR - - def _unmangle(self, value, is_implicit_VR=False, is_little_endian=True): - bio = BytesIO(value) - ds = pdcm.filereader.read_dataset(bio, is_implicit_VR, is_little_endian) - return ds + potential_vr = content[4:6].decode() + return potential_vr not in pdcm.values.converters.keys() + + def _unmangle(self, obj: bytes) -> DicomDataset: + return pdcm.filereader.read_dataset( + BytesIO(obj), + self._is_implicit_VR, + self._is_little_endian, + ) - def _mangle(self, dataset): + def _mangle(self, dataset: DicomDataset) -> bytes: bio = BytesIO() dio = pdcm.filebase.DicomFileLike(bio) dio.is_implicit_VR = self._is_implicit_VR diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 5ee4fb3c15..d1fa4afd0f 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -1339,7 +1339,7 @@ def test_nifti_dicom_extension(): dcmbytes_explicit = struct.pack('') # Big Endian Nifti1Header dcmext = Nifti1DicomExtension(2, dcmbytes_explicit_be, parent_hdr=hdr_be) assert dcmext.__class__ == Nifti1DicomExtension - assert dcmext._guess_implicit_VR() is False + assert dcmext._is_implicit_VR is False assert dcmext.get_code() == 2 assert dcmext.get_content().PatientID == 'NiPy' assert dcmext.get_content()[0x10, 0x20].value == 'NiPy' From 2e2a0e648d445247c6e35ed76fd5299c5a87c508 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 5 Jul 2024 16:19:39 -0400 Subject: [PATCH 119/203] ENH: Add .text and .json() accessors for ease --- nibabel/nifti1.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 791bf3b1e5..bab8031fea 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -13,6 +13,7 @@ from __future__ import annotations +import json import typing as ty import warnings from io import BytesIO @@ -368,16 +369,38 @@ def get_code(self): """Return the canonical extension type code.""" return self.code + # Canonical access to extension content + # Follows the lead of httpx.Response .content, .text and .json() + # properties/methods @property def content(self) -> bytes: """Return the extension content as raw bytes.""" self._sync() return self._content + @property + def text(self) -> str: + """Attempt to decode the extension content as text. + + The encoding is determined by the `encoding` attribute, which may be + set by the user or subclass. If not set, the default encoding is 'utf-8'. + """ + return self.content.decode(self.encoding or 'utf-8') + + def json(self) -> ty.Any: + """Attempt to decode the extension content as JSON. + + If the content is not valid JSON, a JSONDecodeError or UnicodeDecodeError + will be raised. + """ + return json.loads(self.content) + def get_content(self) -> T: """Return the extension content in its runtime representation. This method may return a different type for each extension type. + For simple use cases, consider using ``.content``, ``.text`` or ``.json()`` + instead. """ if self._object is None: self._object = self._unmangle(self._content) From e54fab9f77961c3a517ccbaa151e24dfd16d1bec Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 6 Jul 2024 13:07:18 -0400 Subject: [PATCH 120/203] TEST: Test content, text and json() access --- nibabel/tests/test_nifti1.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index d1fa4afd0f..23e71c8324 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -1224,6 +1224,32 @@ def test_ext_eq(): assert not ext == ext2 +def test_extension_content_access(): + ext = Nifti1Extension('comment', b'123') + # Unmangled content access + assert ext.get_content() == b'123' + + # Raw, text and JSON access + assert ext.content == b'123' + assert ext.text == '123' + assert ext.json() == 123 + + # Encoding can be set + ext.encoding = 'ascii' + assert ext.text == '123' + + # Test that encoding errors are caught + ascii_ext = Nifti1Extension('comment', 'hôpital'.encode('utf-8')) + ascii_ext.encoding = 'ascii' + with pytest.raises(UnicodeDecodeError): + ascii_ext.text + + json_ext = Nifti1Extension('unknown', b'{"a": 1}') + assert json_ext.content == b'{"a": 1}' + assert json_ext.text == '{"a": 1}' + assert json_ext.json() == {'a': 1} + + def test_extension_codes(): for k in extension_codes.keys(): Nifti1Extension(k, 'somevalue') From ef60adc24274f658820c8d69fdf58afa4282f7eb Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 7 Jul 2024 08:08:52 -0400 Subject: [PATCH 121/203] ENH: Add from_bytes method for subclasses with known codes --- nibabel/nifti1.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index bab8031fea..0fc92f3aaf 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -20,7 +20,7 @@ import numpy as np import numpy.linalg as npl -from typing_extensions import TypeVar # PY312 +from typing_extensions import Self, TypeVar # PY312 from . import analyze # module import from .arrayproxy import get_obj_dtype @@ -328,6 +328,12 @@ def __init__( self.code = code # type: ignore[assignment] self._content = content + @classmethod + def from_bytes(cls, content: bytes) -> Self: + if not hasattr(cls, 'code'): + raise NotImplementedError('from_bytes() requires a class attribute `code`') + return cls(cls.code, content) + # Handle (de)serialization of extension content # Subclasses may implement these methods to provide an alternative # view of the extension content. If left unimplemented, the content @@ -509,6 +515,8 @@ class Nifti1DicomExtension(Nifti1Extension[DicomDataset]): header. """ + code = 2 + def __init__( self, code: ty.Union[int, str], From 8b0e69959b9b87f3f833a62a738faa6b66dda278 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 7 Jul 2024 08:09:32 -0400 Subject: [PATCH 122/203] TYP: Annotate Cifti2Extension --- nibabel/cifti2/cifti2.py | 2 +- nibabel/cifti2/parse_cifti2.py | 12 ++++-------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index cb2e0cfaf4..b2b67978b7 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1570,7 +1570,7 @@ def to_file_map(self, file_map=None, dtype=None): self.update_headers() header = self._nifti_header - extension = Cifti2Extension(content=self.header.to_xml()) + extension = Cifti2Extension.from_bytes(self.header.to_xml()) header.extensions = Nifti1Extensions( ext for ext in header.extensions if not isinstance(ext, Cifti2Extension) ) diff --git a/nibabel/cifti2/parse_cifti2.py b/nibabel/cifti2/parse_cifti2.py index 48c2e06537..764e3ae203 100644 --- a/nibabel/cifti2/parse_cifti2.py +++ b/nibabel/cifti2/parse_cifti2.py @@ -40,19 +40,15 @@ ) -class Cifti2Extension(Nifti1Extension): +class Cifti2Extension(Nifti1Extension[Cifti2Header]): code = 32 - def __init__(self, code=None, content=None): - Nifti1Extension.__init__(self, code=code or self.code, content=content) - - def _unmangle(self, value): + def _unmangle(self, value: bytes) -> Cifti2Header: parser = Cifti2Parser() parser.parse(string=value) - self._content = parser.header - return self._content + return parser.header - def _mangle(self, value): + def _mangle(self, value: Cifti2Header) -> bytes: if not isinstance(value, Cifti2Header): raise ValueError('Can only mangle a Cifti2Header.') return value.to_xml() From 7237eba757039d5b8cbf9278ff2e33e4488f353b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 7 Sep 2024 12:04:22 -0400 Subject: [PATCH 123/203] rf: Allow extensions to be constructed from objects without serialization --- nibabel/nifti1.py | 77 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 58 insertions(+), 19 deletions(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 0fc92f3aaf..d93e4615cc 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -299,7 +299,25 @@ class NiftiExtension(ty.Generic[T]): - """Base class for NIfTI header extensions.""" + """Base class for NIfTI header extensions. + + This class provides access to the extension content in various forms. + For simple extensions that expose data as bytes, text or JSON, this class + is sufficient. More complex extensions should be implemented as subclasses + that provide custom serialization/deserialization methods. + + Efficiency note: + + This class assumes that the runtime representation of the extension content + is mutable. Once a runtime representation is set, it is cached and will be + serialized on any attempt to access the extension content as bytes, including + determining the size of the extension in the NIfTI file. + + If the runtime representation is never accessed, the raw bytes will be used + without modification. While avoiding unnecessary deserialization, if there + are bytestrings that do not produce a valid runtime representation, they will + be written as-is, and may cause errors downstream. + """ code: int encoding: ty.Optional[str] = None @@ -309,7 +327,8 @@ class NiftiExtension(ty.Generic[T]): def __init__( self, code: ty.Union[int, str], - content: bytes, + content: bytes = b'', + object: ty.Optional[T] = None, ) -> None: """ Parameters @@ -318,21 +337,40 @@ def __init__( Canonical extension code as defined in the NIfTI standard, given either as integer or corresponding label (see :data:`~nibabel.nifti1.extension_codes`) - content : bytes - Extension content as read from the NIfTI file header. This content may - be converted into a runtime representation. + content : bytes, optional + Extension content as read from the NIfTI file header. + object : optional + Extension content in runtime form. """ try: self.code = extension_codes.code[code] # type: ignore[assignment] except KeyError: self.code = code # type: ignore[assignment] self._content = content + if object is not None: + self._object = object @classmethod def from_bytes(cls, content: bytes) -> Self: + """Create an extension from raw bytes. + + This constructor may only be used in extension classes with a class + attribute `code` to indicate the extension type. + """ if not hasattr(cls, 'code'): raise NotImplementedError('from_bytes() requires a class attribute `code`') - return cls(cls.code, content) + return cls(cls.code, content=content) + + @classmethod + def from_object(cls, obj: T) -> Self: + """Create an extension from a runtime object. + + This constructor may only be used in extension classes with a class + attribute `code` to indicate the extension type. + """ + if not hasattr(cls, 'code'): + raise NotImplementedError('from_object() requires a class attribute `code`') + return cls(cls.code, object=obj) # Handle (de)serialization of extension content # Subclasses may implement these methods to provide an alternative @@ -401,7 +439,7 @@ def json(self) -> ty.Any: """ return json.loads(self.content) - def get_content(self) -> T: + def get_object(self) -> T: """Return the extension content in its runtime representation. This method may return a different type for each extension type. @@ -412,15 +450,14 @@ def get_content(self) -> T: self._object = self._unmangle(self._content) return self._object + # Backwards compatibility + get_content = get_object + def get_sizeondisk(self) -> int: """Return the size of the extension in the NIfTI file.""" - self._sync() - # need raw value size plus 8 bytes for esize and ecode - size = len(self._content) + 8 - # extensions size has to be a multiple of 16 bytes - if size % 16 != 0: - size += 16 - (size % 16) - return size + # need raw value size plus 8 bytes for esize and ecode, rounded up to next 16 bytes + # Rounding C+8 up to M is done by (C+8 + (M-1)) // M * M + return (len(self.content) + 23) // 16 * 16 def write_to(self, fileobj: ty.BinaryIO, byteswap: bool = False) -> None: """Write header extensions to fileobj @@ -438,20 +475,20 @@ def write_to(self, fileobj: ty.BinaryIO, byteswap: bool = False) -> None: ------- None """ - self._sync() extstart = fileobj.tell() - rawsize = self.get_sizeondisk() + rawsize = self.get_sizeondisk() # Calls _sync() # write esize and ecode first extinfo = np.array((rawsize, self.code), dtype=np.int32) if byteswap: extinfo = extinfo.byteswap() fileobj.write(extinfo.tobytes()) - # followed by the actual extension content - # XXX if mangling upon load is implemented, it should be reverted here + # followed by the actual extension content, synced above fileobj.write(self._content) # be nice and zero out remaining part of the extension till the # next 16 byte border - fileobj.write(b'\x00' * (extstart + rawsize - fileobj.tell())) + pad = extstart + rawsize - fileobj.tell() + if pad: + fileobj.write(bytes(pad)) class Nifti1Extension(NiftiExtension[T]): @@ -462,6 +499,8 @@ class Nifti1Extension(NiftiExtension[T]): dedicated subclasses. """ + code = 0 # Default to unknown extension + def _unmangle(self, value: bytes) -> T: """Convert the extension content into its runtime representation. From a0231b1c5476550506fde114a9df305a5f4b8913 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 7 Sep 2024 12:08:59 -0400 Subject: [PATCH 124/203] rf: Construct DicomExtensions more simply --- nibabel/nifti1.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index d93e4615cc..da890a63ac 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -555,6 +555,8 @@ class Nifti1DicomExtension(Nifti1Extension[DicomDataset]): """ code = 2 + _is_implict_VR: bool = False + _is_little_endian: bool = True def __init__( self, @@ -586,27 +588,25 @@ def __init__( code should always be 2 for DICOM. """ - self._is_little_endian = parent_hdr is None or parent_hdr.endianness == '<' + if code != 2: + raise ValueError(f'code must be 2 for DICOM. Got {code}.') + + if content is None: + content = pdcm.Dataset() + + if parent_hdr is not None: + self._is_little_endian = parent_hdr.endianness == '<' - bytes_content: bytes if isinstance(content, pdcm.dataset.Dataset): - self._is_implicit_VR = False - self._object = content - bytes_content = self._mangle(content) + super().__init__(code, object=content) elif isinstance(content, bytes): # Got a byte string - unmangle it self._is_implicit_VR = self._guess_implicit_VR(content) - self._object = self._unmangle(content) - bytes_content = content - elif content is None: # initialize a new dicom dataset - self._is_implicit_VR = False - self._object = pdcm.dataset.Dataset() - bytes_content = self._mangle(self._object) + super().__init__(code, content=content) else: raise TypeError( f'content must be either a bytestring or a pydicom Dataset. ' f'Got {content.__class__}' ) - super().__init__(code, bytes_content) @staticmethod def _guess_implicit_VR(content) -> bool: From 1936d246835ac1fdf207ebe329f4880559fb8de9 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 11 Jun 2024 22:32:37 -0400 Subject: [PATCH 125/203] TEST: Test NiftiJSONExtension --- nibabel/tests/test_nifti1.py | 51 ++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 23e71c8324..79f1c84d68 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -30,6 +30,7 @@ Nifti1Image, Nifti1Pair, Nifti1PairHeader, + NiftiJSONExtension, data_type_codes, extension_codes, load, @@ -1414,6 +1415,56 @@ def test_nifti_dicom_extension(): Nifti1DicomExtension(2, 0) +def test_json_extension(tmp_path): + nim = load(image_file) + hdr = nim.header + exts_container = hdr.extensions + + # Test basic functionality + json_ext = NiftiJSONExtension('ignore', b'{"key": "value"}') + assert json_ext.get_content() == {'key': 'value'} + byte_content = json_ext._mangle(json_ext.get_content()) + assert byte_content == b'{"key": "value"}' + json_obj = json_ext._unmangle(byte_content) + assert json_obj == {'key': 'value'} + size = 16 * ((len(byte_content) + 7) // 16 + 1) + assert json_ext.get_sizeondisk() == size + + def ext_to_bytes(ext, byteswap=False): + bio = BytesIO() + ext.write_to(bio, byteswap) + return bio.getvalue() + + # Check serialization + bytestring = ext_to_bytes(json_ext) + assert bytestring[:8] == struct.pack('<2I', size, extension_codes['ignore']) + assert bytestring[8:].startswith(byte_content) + assert len(bytestring) == size + + # Save to file and read back + exts_container.append(json_ext) + nim.to_filename(tmp_path / 'test.nii') + + # We used ignore, so it comes back as a Nifti1Extension + rt_img = Nifti1Image.from_filename(tmp_path / 'test.nii') + assert len(rt_img.header.extensions) == 3 + rt_ext = rt_img.header.extensions[-1] + assert rt_ext.get_code() == extension_codes['ignore'] + assert rt_ext.get_content() == byte_content + + # MRS is currently the only JSON extension + json_ext._code = extension_codes['mrs'] + nim.to_filename(tmp_path / 'test.nii') + + # Check that the extension is read back as a NiftiJSONExtension + rt_img = Nifti1Image.from_filename(tmp_path / 'test.nii') + assert len(rt_img.header.extensions) == 3 + rt_ext = rt_img.header.extensions[-1] + assert rt_ext.get_code() == extension_codes['mrs'] + assert isinstance(rt_ext, NiftiJSONExtension) + assert rt_ext.get_content() == json_obj + + class TestNifti1General: """Test class to test nifti1 in general From 061fbf566673296cb7c10007c62c02297139f334 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 7 Sep 2024 19:49:46 -0400 Subject: [PATCH 126/203] feat: Add current extension codes --- nibabel/nifti1.py | 15 +++++++++++ nibabel/tests/test_nifti1.py | 51 ------------------------------------ 2 files changed, 15 insertions(+), 51 deletions(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index da890a63ac..31fed2e63c 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -652,6 +652,21 @@ def _mangle(self, dataset: DicomDataset) -> bytes: (12, 'workflow_fwds', Nifti1Extension), (14, 'freesurfer', Nifti1Extension), (16, 'pypickle', Nifti1Extension), + (18, 'mind_ident', NiftiExtension), + (20, 'b_value', NiftiExtension), + (22, 'spherical_direction', NiftiExtension), + (24, 'dt_component', NiftiExtension), + (26, 'shc_degreeorder', NiftiExtension), + (28, 'voxbo', NiftiExtension), + (30, 'caret', NiftiExtension), + ## Defined in nibabel.cifti2.parse_cifti2 + # (32, 'cifti', Cifti2Extension), + (34, 'variable_frame_timing', NiftiExtension), + (36, 'unassigned', NiftiExtension), + (38, 'eval', NiftiExtension), + (40, 'matlab', NiftiExtension), + (42, 'quantiphyse', NiftiExtension), + (44, 'mrs', NiftiExtension[ty.Dict[str, ty.Any]]), ), fields=('code', 'label', 'handler'), ) diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 79f1c84d68..23e71c8324 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -30,7 +30,6 @@ Nifti1Image, Nifti1Pair, Nifti1PairHeader, - NiftiJSONExtension, data_type_codes, extension_codes, load, @@ -1415,56 +1414,6 @@ def test_nifti_dicom_extension(): Nifti1DicomExtension(2, 0) -def test_json_extension(tmp_path): - nim = load(image_file) - hdr = nim.header - exts_container = hdr.extensions - - # Test basic functionality - json_ext = NiftiJSONExtension('ignore', b'{"key": "value"}') - assert json_ext.get_content() == {'key': 'value'} - byte_content = json_ext._mangle(json_ext.get_content()) - assert byte_content == b'{"key": "value"}' - json_obj = json_ext._unmangle(byte_content) - assert json_obj == {'key': 'value'} - size = 16 * ((len(byte_content) + 7) // 16 + 1) - assert json_ext.get_sizeondisk() == size - - def ext_to_bytes(ext, byteswap=False): - bio = BytesIO() - ext.write_to(bio, byteswap) - return bio.getvalue() - - # Check serialization - bytestring = ext_to_bytes(json_ext) - assert bytestring[:8] == struct.pack('<2I', size, extension_codes['ignore']) - assert bytestring[8:].startswith(byte_content) - assert len(bytestring) == size - - # Save to file and read back - exts_container.append(json_ext) - nim.to_filename(tmp_path / 'test.nii') - - # We used ignore, so it comes back as a Nifti1Extension - rt_img = Nifti1Image.from_filename(tmp_path / 'test.nii') - assert len(rt_img.header.extensions) == 3 - rt_ext = rt_img.header.extensions[-1] - assert rt_ext.get_code() == extension_codes['ignore'] - assert rt_ext.get_content() == byte_content - - # MRS is currently the only JSON extension - json_ext._code = extension_codes['mrs'] - nim.to_filename(tmp_path / 'test.nii') - - # Check that the extension is read back as a NiftiJSONExtension - rt_img = Nifti1Image.from_filename(tmp_path / 'test.nii') - assert len(rt_img.header.extensions) == 3 - rt_ext = rt_img.header.extensions[-1] - assert rt_ext.get_code() == extension_codes['mrs'] - assert isinstance(rt_ext, NiftiJSONExtension) - assert rt_ext.get_content() == json_obj - - class TestNifti1General: """Test class to test nifti1 in general From 72a93c2d3d43cbf39faa633b972152bd6b23e139 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 7 Sep 2024 20:21:27 -0400 Subject: [PATCH 127/203] Update nibabel/nifti1.py --- nibabel/nifti1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 31fed2e63c..a22959dfd6 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -555,7 +555,7 @@ class Nifti1DicomExtension(Nifti1Extension[DicomDataset]): """ code = 2 - _is_implict_VR: bool = False + _is_implicit_VR: bool = False _is_little_endian: bool = True def __init__( From bb978c1c3dab40fc5fb12876059df526c85d33ad Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 8 Sep 2024 02:16:22 -0400 Subject: [PATCH 128/203] fix: Import from typing in Python 3.13 --- nibabel/nifti1.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index a22959dfd6..ee6cec53a7 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -14,13 +14,18 @@ from __future__ import annotations import json +import sys import typing as ty import warnings from io import BytesIO import numpy as np import numpy.linalg as npl -from typing_extensions import Self, TypeVar # PY312 + +if sys.version_info <= (3, 12): + from typing_extensions import Self, TypeVar # PY312 +else: + from typing import Self, TypeVar from . import analyze # module import from .arrayproxy import get_obj_dtype From 398488ec600d01a432f46a2d2e94523245b897f9 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 8 Sep 2024 02:42:24 -0400 Subject: [PATCH 129/203] Update nibabel/nifti1.py --- nibabel/nifti1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index ee6cec53a7..626d217527 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -22,7 +22,7 @@ import numpy as np import numpy.linalg as npl -if sys.version_info <= (3, 12): +if sys.version_info < (3, 13): from typing_extensions import Self, TypeVar # PY312 else: from typing import Self, TypeVar From 4d09e33b530bc7dab87d0492db2bc1489795318c Mon Sep 17 00:00:00 2001 From: Guillaume Becq Date: Tue, 10 Sep 2024 19:00:07 +0200 Subject: [PATCH 130/203] Add files via upload --- nibabel/viewers.py | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/nibabel/viewers.py b/nibabel/viewers.py index 0dc2f0dafc..07881eb695 100644 --- a/nibabel/viewers.py +++ b/nibabel/viewers.py @@ -103,7 +103,7 @@ def __init__(self, data, affine=None, axes=None, title=None): # | | | | # | | | | # +---------+ +---------+ - # A --> <-- R + # A --> R --> # ^ +---------+ +---------+ # | | | | | # | Axial | | Vol | @@ -111,7 +111,7 @@ def __init__(self, data, affine=None, axes=None, title=None): # | | | | # | | | | # +---------+ +---------+ - # <-- R <-- t --> + # R --> <-- t --> fig, axes = plt.subplots(2, 2) fig.set_size_inches((8, 8), forward=True) @@ -419,7 +419,7 @@ def _set_position(self, x, y, z, notify=True): # deal with crosshairs loc = self._data_idx[ii] if self._flips[ii]: - loc = self._sizes[ii] - loc + loc = self._sizes[ii] - 1 - loc loc = [loc] * 2 if ii == 0: self._crosshairs[2]['vert'].set_xdata(loc) @@ -468,12 +468,17 @@ def _on_scroll(self, event): dv *= 1.0 if event.button == 'up' else -1.0 dv *= -1 if self._flips[ii] else 1 val = self._data_idx[ii] + dv + if ii == 3: self._set_volume_index(val) else: - coords = [self._data_idx[k] for k in range(3)] + [1.0] + coords = [self._data_idx[k] for k in range(3)] coords[ii] = val - self._set_position(*np.dot(self._affine, coords)[:3]) + coords_ordered = [0, 0, 0, 1] + for k in range(3): + coords_ordered[self._order[k]] = coords[k] + position = np.dot(self._affine, coords_ordered)[:3] + self._set_position(*position) self._draw() def _on_mouse(self, event): @@ -488,18 +493,19 @@ def _on_mouse(self, event): self._set_volume_index(event.xdata) else: # translate click xdata/ydata to physical position - xax, yax = [[1, 2], [0, 2], [0, 1]][ii] + xax, yax = [[self._order[1], self._order[2]], + [self._order[0], self._order[2]], + [self._order[0], self._order[1]]][ii] x, y = event.xdata, event.ydata - x = self._sizes[xax] - x if self._flips[xax] else x - y = self._sizes[yax] - y if self._flips[yax] else y + x = self._sizes[xax] - x - 1 if self._flips[xax] else x + y = self._sizes[yax] - y - 1 if self._flips[yax] else y idxs = np.ones(4) idxs[xax] = x idxs[yax] = y - idxs[ii] = self._data_idx[ii] - idxs[:3] = idxs[self._order] - self._set_position(*np.dot(self._affine, idxs)[:3]) + idxs[self._order[ii]] = self._data_idx[ii] + self._set_position(*np.dot(self._affine, idxs)[:3]) self._draw() - + def _on_keypress(self, event): """Handle mpl keypress events""" if event.key is not None and 'escape' in event.key: From 6bfdcafe31c66d9e1f5e6329e09e3a332cd5c6c0 Mon Sep 17 00:00:00 2001 From: Guillaume Becq Date: Tue, 10 Sep 2024 19:01:42 +0200 Subject: [PATCH 131/203] Add files via upload --- nibabel/tests/test_viewers.py | 200 ++++++++++++++++++++++++++++++++++ 1 file changed, 200 insertions(+) diff --git a/nibabel/tests/test_viewers.py b/nibabel/tests/test_viewers.py index 72d839c923..dff93926db 100644 --- a/nibabel/tests/test_viewers.py +++ b/nibabel/tests/test_viewers.py @@ -134,3 +134,203 @@ def test_viewer_nonRAS(): assert_array_equal(sag, data1[6, :, :]) assert_array_equal(cor, data1[:, :, 32].T) assert_array_equal(axi, data1[:, 13, :].T) + + + +@needs_mpl +def test_viewer_nonRAS_on_mouse(): + """ + test on_mouse selection on non RAS matrices + + """ + # This affine simulates an acquisition on a quadruped subject that is in a prone position. + # This corresponds to an acquisition with: + # - LR inverted on scanner x (i) + # - IS on scanner y (j) + # - PA on scanner z (k) + # This example enables to test also OrthoSlicer3D properties `_flips` and `_order`. + + (I, J, K) = (10, 20, 40) + data1 = np.random.rand(I, J, K) + (i_target, j_target, k_target) = (2, 14, 12) + i1 = i_target - 2 + i2 = i_target + 2 + j1 = j_target - 3 + j2 = j_target + 3 + k1 = k_target - 4 + k2 = k_target + 4 + data1[i1: i2 + 1, j1: j2 + 1, k1: k2 + 1] = 0 + data1[i_target, j_target, k_target] = 1 + valp1 = 1.5 + valm1 = 0.5 + data1[i_target - 1, j_target, k_target] = valp1 # x flipped + data1[i_target + 1, j_target, k_target] = valm1 # x flipped + data1[i_target, j_target - 1, k_target] = valm1 + data1[i_target, j_target + 1, k_target] = valp1 + data1[i_target, j_target, k_target - 1] = valm1 + data1[i_target, j_target, k_target + 1] = valp1 + + aff1 = np.array([[-1, 0, 0, 5], + [0, 0, 1, -10], + [0, 1, 0, -30], + [0, 0, 0, 1]]) + + o1 = OrthoSlicer3D(data1, aff1) + + class Event: + def __init__(self): + self.name = "simulated mouse event" + self.button = 1 + + event = Event() + event.xdata = k_target + event.ydata = j_target + event.inaxes = o1._ims[0].axes + o1._on_mouse(event) + + event.inaxes = o1._ims[1].axes + event.xdata = (I - 1) - i_target # x flipped + event.ydata = j_target + o1._on_mouse(event) + + event.inaxes = o1._ims[2].axes + event.xdata = (I - 1) - i_target # x flipped + event.ydata = k_target + o1._on_mouse(event) + + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + + assert_array_equal(sag, data1[i_target, :, :]) # + assert_array_equal(cor, data1[::-1, :, k_target].T) # x flipped + assert_array_equal(axi, data1[::-1, j_target, :].T) # x flipped + return None + + +@needs_mpl +def test_viewer_nonRAS_on_scroll(): + """ + test scrolling on non RAS matrices + + """ + # This affine simulates an acquisition on a quadruped subject that is in a prone position. + # This corresponds to an acquisition with: + # - LR inverted on scanner x (i) + # - IS on scanner y (j) + # - PA on scanner z (k) + # This example enables to test also OrthoSlicer3D properties `_flips` and `_order`. + + (I, J, K) = (10, 20, 40) + data1 = np.random.rand(I, J, K) + (i_target, j_target, k_target) = (2, 14, 12) + i1 = i_target - 2 + i2 = i_target + 2 + j1 = j_target - 3 + j2 = j_target + 3 + k1 = k_target - 4 + k2 = k_target + 4 + data1[i1: i2 + 1, j1: j2 + 1, k1: k2 + 1] = 0 + data1[i_target, j_target, k_target] = 1 + valp1 = 1.5 + valm1 = 0.5 + data1[i_target - 1, j_target, k_target] = valp1 # x flipped + data1[i_target + 1, j_target, k_target] = valm1 # x flipped + data1[i_target, j_target - 1, k_target] = valm1 + data1[i_target, j_target + 1, k_target] = valp1 + data1[i_target, j_target, k_target - 1] = valm1 + data1[i_target, j_target, k_target + 1] = valp1 + + aff1 = np.array([[-1, 0, 0, 5], + [0, 0, 1, -10], + [0, 1, 0, -30], + [0, 0, 0, 1]]) + + o1 = OrthoSlicer3D(data1, aff1) + + class Event: + def __init__(self): + self.name = "simulated mouse event" + self.button = None + self.key = None + + i_last = data1.shape[0] - 1 + + [x_t, y_t, z_t] = list(aff1.dot(np.array([i_target, j_target, k_target, 1]))[:3]) + # print(x_t, y_t, z_t) + # scanner positions are x_t=3, y_t=2, z_t=16 + + event = Event() + + # Sagittal plane - one scroll up + # x coordinate is flipped so index decrease by 1 + o1.set_position(x_t, y_t, z_t) + event.inaxes = o1._ims[0].axes + event.button = 'up' + o1._on_scroll(event) + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + assert_array_equal(sag, data1[i_target - 1, :, :]) + assert_array_equal(cor, data1[::-1, :, k_target].T) # ::-1 because the array is flipped in x + assert_array_equal(axi, data1[::-1, j_target, :].T) # ::-1 because the array is flipped in x + + # Sagittal plane - one scrolled down + o1.set_position(x_t, y_t, z_t) + event.button = 'down' + o1._on_scroll(event) + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + assert_array_equal(sag, data1[i_target + 1, :, :]) + assert_array_equal(cor, data1[::-1, :, k_target].T) + assert_array_equal(axi, data1[::-1, j_target, :].T) + + # Coronal plane - one scroll up + # y coordinate is increase by 1 + o1.set_position(x_t, y_t, z_t) + event.inaxes = o1._ims[1].axes + event.button = 'up' + o1._on_scroll(event) + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + assert_array_equal(sag, data1[i_target, :, :]) + assert_array_equal(cor, data1[::-1, :, k_target + 1].T) # ::-1 because the array is flipped in x + assert_array_equal(axi, data1[::-1, j_target, :].T) # ::-1 because the array is flipped in x + + # Coronal plane - one scrolled down + o1.set_position(x_t, y_t, z_t) + event.button = 'down' + o1._on_scroll(event) + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + assert_array_equal(sag, data1[i_target, :, :]) + assert_array_equal(cor, data1[::-1, :, k_target - 1].T) + assert_array_equal(axi, data1[::-1, j_target, :].T) + + # Axial plane - one scroll up + # y is increase by 1 + o1.set_position(x_t, y_t, z_t) + event.inaxes = o1._ims[2].axes + event.button = 'up' + o1._on_scroll(event) + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + assert_array_equal(sag, data1[i_target, :, :]) + assert_array_equal(cor, data1[::-1, :, k_target].T) # ::-1 because the array is flipped in x + assert_array_equal(axi, data1[::-1, j_target + 1, :].T) # ::-1 because the array is flipped in x + + # Axial plane - one scrolled down + o1.set_position(x_t, y_t, z_t) + event.button = 'down' + o1._on_scroll(event) + sag = o1._ims[0].get_array() + cor = o1._ims[1].get_array() + axi = o1._ims[2].get_array() + assert_array_equal(sag, data1[i_target, :, :]) + assert_array_equal(cor, data1[::-1, :, k_target].T) + assert_array_equal(axi, data1[::-1, j_target - 1, :].T) + return None \ No newline at end of file From e1f28f382db87b5859e3e9dedeb9171a4b5d8621 Mon Sep 17 00:00:00 2001 From: Guillaume Becq Date: Fri, 13 Sep 2024 13:59:54 +0200 Subject: [PATCH 132/203] BF for non RAS matrices correct `_on_mouse`and `on_scroll` method --- nibabel/viewers.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/nibabel/viewers.py b/nibabel/viewers.py index 07881eb695..5181ace7bb 100644 --- a/nibabel/viewers.py +++ b/nibabel/viewers.py @@ -468,14 +468,14 @@ def _on_scroll(self, event): dv *= 1.0 if event.button == 'up' else -1.0 dv *= -1 if self._flips[ii] else 1 val = self._data_idx[ii] + dv - + if ii == 3: self._set_volume_index(val) else: coords = [self._data_idx[k] for k in range(3)] coords[ii] = val coords_ordered = [0, 0, 0, 1] - for k in range(3): + for k in range(3): coords_ordered[self._order[k]] = coords[k] position = np.dot(self._affine, coords_ordered)[:3] self._set_position(*position) @@ -493,9 +493,11 @@ def _on_mouse(self, event): self._set_volume_index(event.xdata) else: # translate click xdata/ydata to physical position - xax, yax = [[self._order[1], self._order[2]], - [self._order[0], self._order[2]], - [self._order[0], self._order[1]]][ii] + xax, yax = [ + [self._order[1], self._order[2]], + [self._order[0], self._order[2]], + [self._order[0], self._order[1]], + ][ii] x, y = event.xdata, event.ydata x = self._sizes[xax] - x - 1 if self._flips[xax] else x y = self._sizes[yax] - y - 1 if self._flips[yax] else y @@ -503,9 +505,9 @@ def _on_mouse(self, event): idxs[xax] = x idxs[yax] = y idxs[self._order[ii]] = self._data_idx[ii] - self._set_position(*np.dot(self._affine, idxs)[:3]) + self._set_position(*np.dot(self._affine, idxs)[:3]) self._draw() - + def _on_keypress(self, event): """Handle mpl keypress events""" if event.key is not None and 'escape' in event.key: From 5d89d2fb6c9056dba8d15bbca1445eaa467a6eb6 Mon Sep 17 00:00:00 2001 From: Guillaume Becq Date: Fri, 13 Sep 2024 14:00:55 +0200 Subject: [PATCH 133/203] test for BF viwers non RSA matrices --- nibabel/tests/test_viewers.py | 133 ++++++++++++++++------------------ 1 file changed, 64 insertions(+), 69 deletions(-) diff --git a/nibabel/tests/test_viewers.py b/nibabel/tests/test_viewers.py index dff93926db..fa22d9021a 100644 --- a/nibabel/tests/test_viewers.py +++ b/nibabel/tests/test_viewers.py @@ -136,20 +136,19 @@ def test_viewer_nonRAS(): assert_array_equal(axi, data1[:, 13, :].T) - @needs_mpl def test_viewer_nonRAS_on_mouse(): """ test on_mouse selection on non RAS matrices - + """ - # This affine simulates an acquisition on a quadruped subject that is in a prone position. - # This corresponds to an acquisition with: + # This affine simulates an acquisition on a quadruped subject that is in a prone position. + # This corresponds to an acquisition with: # - LR inverted on scanner x (i) # - IS on scanner y (j) # - PA on scanner z (k) - # This example enables to test also OrthoSlicer3D properties `_flips` and `_order`. - + # This example enables to test also OrthoSlicer3D properties `_flips` and `_order`. + (I, J, K) = (10, 20, 40) data1 = np.random.rand(I, J, K) (i_target, j_target, k_target) = (2, 14, 12) @@ -159,52 +158,49 @@ def test_viewer_nonRAS_on_mouse(): j2 = j_target + 3 k1 = k_target - 4 k2 = k_target + 4 - data1[i1: i2 + 1, j1: j2 + 1, k1: k2 + 1] = 0 + data1[i1 : i2 + 1, j1 : j2 + 1, k1 : k2 + 1] = 0 data1[i_target, j_target, k_target] = 1 valp1 = 1.5 valm1 = 0.5 - data1[i_target - 1, j_target, k_target] = valp1 # x flipped - data1[i_target + 1, j_target, k_target] = valm1 # x flipped + data1[i_target - 1, j_target, k_target] = valp1 # x flipped + data1[i_target + 1, j_target, k_target] = valm1 # x flipped data1[i_target, j_target - 1, k_target] = valm1 data1[i_target, j_target + 1, k_target] = valp1 data1[i_target, j_target, k_target - 1] = valm1 data1[i_target, j_target, k_target + 1] = valp1 - - aff1 = np.array([[-1, 0, 0, 5], - [0, 0, 1, -10], - [0, 1, 0, -30], - [0, 0, 0, 1]]) - + + aff1 = np.array([[-1, 0, 0, 5], [0, 0, 1, -10], [0, 1, 0, -30], [0, 0, 0, 1]]) + o1 = OrthoSlicer3D(data1, aff1) - - class Event: - def __init__(self): - self.name = "simulated mouse event" + + class Event: + def __init__(self): + self.name = 'simulated mouse event' self.button = 1 - + event = Event() event.xdata = k_target event.ydata = j_target event.inaxes = o1._ims[0].axes o1._on_mouse(event) - + event.inaxes = o1._ims[1].axes - event.xdata = (I - 1) - i_target # x flipped + event.xdata = (I - 1) - i_target # x flipped event.ydata = j_target o1._on_mouse(event) - + event.inaxes = o1._ims[2].axes - event.xdata = (I - 1) - i_target # x flipped + event.xdata = (I - 1) - i_target # x flipped event.ydata = k_target o1._on_mouse(event) - + sag = o1._ims[0].get_array() cor = o1._ims[1].get_array() axi = o1._ims[2].get_array() - - assert_array_equal(sag, data1[i_target, :, :]) # - assert_array_equal(cor, data1[::-1, :, k_target].T) # x flipped - assert_array_equal(axi, data1[::-1, j_target, :].T) # x flipped + + assert_array_equal(sag, data1[i_target, :, :]) # + assert_array_equal(cor, data1[::-1, :, k_target].T) # x flipped + assert_array_equal(axi, data1[::-1, j_target, :].T) # x flipped return None @@ -212,15 +208,15 @@ def __init__(self): def test_viewer_nonRAS_on_scroll(): """ test scrolling on non RAS matrices - + """ - # This affine simulates an acquisition on a quadruped subject that is in a prone position. - # This corresponds to an acquisition with: + # This affine simulates an acquisition on a quadruped subject that is in a prone position. + # This corresponds to an acquisition with: # - LR inverted on scanner x (i) # - IS on scanner y (j) # - PA on scanner z (k) # This example enables to test also OrthoSlicer3D properties `_flips` and `_order`. - + (I, J, K) = (10, 20, 40) data1 = np.random.rand(I, J, K) (i_target, j_target, k_target) = (2, 14, 12) @@ -230,40 +226,35 @@ def test_viewer_nonRAS_on_scroll(): j2 = j_target + 3 k1 = k_target - 4 k2 = k_target + 4 - data1[i1: i2 + 1, j1: j2 + 1, k1: k2 + 1] = 0 + data1[i1 : i2 + 1, j1 : j2 + 1, k1 : k2 + 1] = 0 data1[i_target, j_target, k_target] = 1 valp1 = 1.5 valm1 = 0.5 - data1[i_target - 1, j_target, k_target] = valp1 # x flipped - data1[i_target + 1, j_target, k_target] = valm1 # x flipped + data1[i_target - 1, j_target, k_target] = valp1 # x flipped + data1[i_target + 1, j_target, k_target] = valm1 # x flipped data1[i_target, j_target - 1, k_target] = valm1 data1[i_target, j_target + 1, k_target] = valp1 data1[i_target, j_target, k_target - 1] = valm1 data1[i_target, j_target, k_target + 1] = valp1 - - aff1 = np.array([[-1, 0, 0, 5], - [0, 0, 1, -10], - [0, 1, 0, -30], - [0, 0, 0, 1]]) - + + aff1 = np.array([[-1, 0, 0, 5], [0, 0, 1, -10], [0, 1, 0, -30], [0, 0, 0, 1]]) + o1 = OrthoSlicer3D(data1, aff1) - - class Event: - def __init__(self): - self.name = "simulated mouse event" + + class Event: + def __init__(self): + self.name = 'simulated mouse event' self.button = None self.key = None - - i_last = data1.shape[0] - 1 - + [x_t, y_t, z_t] = list(aff1.dot(np.array([i_target, j_target, k_target, 1]))[:3]) # print(x_t, y_t, z_t) # scanner positions are x_t=3, y_t=2, z_t=16 - + event = Event() - + # Sagittal plane - one scroll up - # x coordinate is flipped so index decrease by 1 + # x coordinate is flipped so index decrease by 1 o1.set_position(x_t, y_t, z_t) event.inaxes = o1._ims[0].axes event.button = 'up' @@ -272,10 +263,10 @@ def __init__(self): cor = o1._ims[1].get_array() axi = o1._ims[2].get_array() assert_array_equal(sag, data1[i_target - 1, :, :]) - assert_array_equal(cor, data1[::-1, :, k_target].T) # ::-1 because the array is flipped in x - assert_array_equal(axi, data1[::-1, j_target, :].T) # ::-1 because the array is flipped in x - - # Sagittal plane - one scrolled down + assert_array_equal(cor, data1[::-1, :, k_target].T) # ::-1 because the array is flipped in x + assert_array_equal(axi, data1[::-1, j_target, :].T) # ::-1 because the array is flipped in x + + # Sagittal plane - one scrolled down o1.set_position(x_t, y_t, z_t) event.button = 'down' o1._on_scroll(event) @@ -285,9 +276,9 @@ def __init__(self): assert_array_equal(sag, data1[i_target + 1, :, :]) assert_array_equal(cor, data1[::-1, :, k_target].T) assert_array_equal(axi, data1[::-1, j_target, :].T) - + # Coronal plane - one scroll up - # y coordinate is increase by 1 + # y coordinate is increase by 1 o1.set_position(x_t, y_t, z_t) event.inaxes = o1._ims[1].axes event.button = 'up' @@ -296,10 +287,12 @@ def __init__(self): cor = o1._ims[1].get_array() axi = o1._ims[2].get_array() assert_array_equal(sag, data1[i_target, :, :]) - assert_array_equal(cor, data1[::-1, :, k_target + 1].T) # ::-1 because the array is flipped in x - assert_array_equal(axi, data1[::-1, j_target, :].T) # ::-1 because the array is flipped in x - - # Coronal plane - one scrolled down + assert_array_equal( + cor, data1[::-1, :, k_target + 1].T + ) # ::-1 because the array is flipped in x + assert_array_equal(axi, data1[::-1, j_target, :].T) # ::-1 because the array is flipped in x + + # Coronal plane - one scrolled down o1.set_position(x_t, y_t, z_t) event.button = 'down' o1._on_scroll(event) @@ -309,9 +302,9 @@ def __init__(self): assert_array_equal(sag, data1[i_target, :, :]) assert_array_equal(cor, data1[::-1, :, k_target - 1].T) assert_array_equal(axi, data1[::-1, j_target, :].T) - + # Axial plane - one scroll up - # y is increase by 1 + # y is increase by 1 o1.set_position(x_t, y_t, z_t) event.inaxes = o1._ims[2].axes event.button = 'up' @@ -320,10 +313,12 @@ def __init__(self): cor = o1._ims[1].get_array() axi = o1._ims[2].get_array() assert_array_equal(sag, data1[i_target, :, :]) - assert_array_equal(cor, data1[::-1, :, k_target].T) # ::-1 because the array is flipped in x - assert_array_equal(axi, data1[::-1, j_target + 1, :].T) # ::-1 because the array is flipped in x - - # Axial plane - one scrolled down + assert_array_equal(cor, data1[::-1, :, k_target].T) # ::-1 because the array is flipped in x + assert_array_equal( + axi, data1[::-1, j_target + 1, :].T + ) # ::-1 because the array is flipped in x + + # Axial plane - one scrolled down o1.set_position(x_t, y_t, z_t) event.button = 'down' o1._on_scroll(event) @@ -333,4 +328,4 @@ def __init__(self): assert_array_equal(sag, data1[i_target, :, :]) assert_array_equal(cor, data1[::-1, :, k_target].T) assert_array_equal(axi, data1[::-1, j_target - 1, :].T) - return None \ No newline at end of file + return None From 9aaacfa6ee7ad548a83e2a8349d4c1b36078fe14 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:12:06 +0200 Subject: [PATCH 134/203] STY: Applty ruff/pyupgrade rule UP006 UP006 Use `type` instead of `Type` for type annotation UP006 Use `tuple` instead of `ty.Tuple` for type annotation --- nibabel/gifti/gifti.py | 4 ++-- nibabel/spatialimages.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 7c5c3c4fb0..caee7c3500 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -18,7 +18,7 @@ import sys import warnings from copy import copy -from typing import Type, cast +from typing import cast import numpy as np @@ -598,7 +598,7 @@ class GiftiImage(xml.XmlSerializable, SerializableImage): # The parser will in due course be a GiftiImageParser, but we can't set # that now, because it would result in a circular import. We set it after # the class has been defined, at the end of the class definition. - parser: Type[xml.XmlParser] + parser: type[xml.XmlParser] def __init__( self, diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index 96f8115a22..f4d27791b2 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -169,8 +169,8 @@ def set_data_dtype(self, dtype: npt.DTypeLike) -> None: ... @ty.runtime_checkable class SpatialProtocol(ty.Protocol): def get_data_dtype(self) -> np.dtype: ... - def get_data_shape(self) -> ty.Tuple[int, ...]: ... - def get_zooms(self) -> ty.Tuple[float, ...]: ... + def get_data_shape(self) -> tuple[int, ...]: ... + def get_zooms(self) -> tuple[float, ...]: ... class HeaderDataError(Exception): From 4c784a700578d69792724deea24f1633a9942b85 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:13:09 +0200 Subject: [PATCH 135/203] STY: Apply ruff/pyupgrade rule UP031 UP031 Use format specifiers instead of percent format --- nibabel/analyze.py | 4 +++- nibabel/cmdline/diff.py | 2 +- nibabel/cmdline/ls.py | 2 +- nibabel/dft.py | 2 +- nibabel/freesurfer/mghformat.py | 4 +++- nibabel/nifti1.py | 2 +- nibabel/tests/test_data.py | 2 +- nibabel/tests/test_nifti1.py | 2 +- 8 files changed, 12 insertions(+), 8 deletions(-) diff --git a/nibabel/analyze.py b/nibabel/analyze.py index e697181719..34597319d6 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -515,7 +515,9 @@ def data_to_fileobj(self, data, fileobj, rescale=True): data = np.asanyarray(data) shape = self.get_data_shape() if data.shape != shape: - raise HeaderDataError('Data should be shape (%s)' % ', '.join(str(s) for s in shape)) + raise HeaderDataError( + 'Data should be shape ({})'.format(', '.join(str(s) for s in shape)) + ) out_dtype = self.get_data_dtype() if rescale: try: diff --git a/nibabel/cmdline/diff.py b/nibabel/cmdline/diff.py index 1231a778f4..36760f7ebb 100755 --- a/nibabel/cmdline/diff.py +++ b/nibabel/cmdline/diff.py @@ -302,7 +302,7 @@ def display_diff(files, diff): for item in value: if isinstance(item, dict): - item_str = ', '.join('%s: %s' % i for i in item.items()) + item_str = ', '.join('{}: {}'.format(*i) for i in item.items()) elif item is None: item_str = '-' else: diff --git a/nibabel/cmdline/ls.py b/nibabel/cmdline/ls.py index ff41afbd0a..f79c27f0c5 100755 --- a/nibabel/cmdline/ls.py +++ b/nibabel/cmdline/ls.py @@ -112,7 +112,7 @@ def proc_file(f, opts): and (h.has_data_slope or h.has_data_intercept) and not h.get_slope_inter() in ((1.0, 0.0), (None, None)) ): - row += ['@l*%.3g+%.3g' % h.get_slope_inter()] + row += ['@l*{:.3g}+{:.3g}'.format(*h.get_slope_inter())] else: row += [''] diff --git a/nibabel/dft.py b/nibabel/dft.py index d9e3359998..e63c9c4796 100644 --- a/nibabel/dft.py +++ b/nibabel/dft.py @@ -231,7 +231,7 @@ def __getattribute__(self, name): WHERE storage_instance = ? ORDER BY directory, name""" c.execute(query, (self.uid,)) - val = ['%s/%s' % tuple(row) for row in c] + val = ['{}/{}'.format(*tuple(row)) for row in c] self.files = val return val diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 533d235927..6efa67ffa8 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -570,7 +570,9 @@ def _write_data(self, mghfile, data, header): """ shape = header.get_data_shape() if data.shape != shape: - raise HeaderDataError('Data should be shape (%s)' % ', '.join(str(s) for s in shape)) + raise HeaderDataError( + 'Data should be shape ({})'.format(', '.join(str(s) for s in shape)) + ) offset = header.get_data_offset() out_dtype = header.get_data_dtype() array_to_file(data, mghfile, out_dtype, offset) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index ecd94c10de..4788947315 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -552,7 +552,7 @@ def get_sizeondisk(self): return np.sum([e.get_sizeondisk() for e in self]) def __repr__(self): - return 'Nifti1Extensions(%s)' % ', '.join(str(e) for e in self) + return 'Nifti1Extensions({})'.format(', '.join(str(e) for e in self)) def write_to(self, fileobj, byteswap): """Write header extensions to fileobj diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index 5697752ea4..511fa7f857 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -160,7 +160,7 @@ def test_data_path(with_nimd_env): tmpfile = pjoin(tmpdir, 'another_example.ini') with open(tmpfile, 'w') as fobj: fobj.write('[DATA]\n') - fobj.write('path = %s\n' % '/path/two') + fobj.write('path = {}\n'.format('/path/two')) assert get_data_path() == tst_list + ['/path/two'] + old_pth diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 5ee4fb3c15..819a270811 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -538,7 +538,7 @@ def test_slice_times(self): hdr.set_slice_duration(0.1) # We need a function to print out the Nones and floating point # values in a predictable way, for the tests below. - _stringer = lambda val: val is not None and '%2.1f' % val or None + _stringer = lambda val: val is not None and '{:2.1f}'.format(val) or None _print_me = lambda s: list(map(_stringer, s)) # The following examples are from the nifti1.h documentation. hdr['slice_code'] = slice_order_codes['sequential increasing'] From fe7d97c49faac5e2946dc320096d8a3f0d856e9f Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:13:50 +0200 Subject: [PATCH 136/203] STY: Apply ruff/pyupgrade rule UP032 UP032 Use f-string instead of `format` call --- nibabel/cmdline/dicomfs.py | 4 +--- nibabel/tests/test_nifti1.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/nibabel/cmdline/dicomfs.py b/nibabel/cmdline/dicomfs.py index 552bb09319..afd994b151 100644 --- a/nibabel/cmdline/dicomfs.py +++ b/nibabel/cmdline/dicomfs.py @@ -193,9 +193,7 @@ def release(self, path, flags, fh): def get_opt_parser(): # use module docstring for help output p = OptionParser( - usage='{} [OPTIONS] '.format( - os.path.basename(sys.argv[0]) - ), + usage=f'{os.path.basename(sys.argv[0])} [OPTIONS] ', version='%prog ' + nib.__version__, ) diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 819a270811..5a04958587 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -538,7 +538,7 @@ def test_slice_times(self): hdr.set_slice_duration(0.1) # We need a function to print out the Nones and floating point # values in a predictable way, for the tests below. - _stringer = lambda val: val is not None and '{:2.1f}'.format(val) or None + _stringer = lambda val: val is not None and f'{val:2.1f}' or None _print_me = lambda s: list(map(_stringer, s)) # The following examples are from the nifti1.h documentation. hdr['slice_code'] = slice_order_codes['sequential increasing'] From bf3e23e1d91ed68ea4b8eadba19bfc57ecc893ce Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:24:06 +0200 Subject: [PATCH 137/203] STY: Enforce ruff/pyupgrade rules (UP) --- pyproject.toml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ff5168f9c6..2840119c4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -115,7 +115,12 @@ line-length = 99 exclude = ["doc", "nibabel/externals", "tools", "version.py", "versioneer.py"] [tool.ruff.lint] -select = ["F", "I", "Q"] +select = [ + "F", + "I", + "Q", + "UP", +] ignore = [ # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", From 7a23f67eb431330a7aca17a0eca9d4bae7be6d8e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:19:42 +0200 Subject: [PATCH 138/203] STY: Apply ruff/Pylint rule PLE0101 PLE0101 Explicit return in `__init__` --- nibabel/openers.py | 2 +- nibabel/tmpdirs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/openers.py b/nibabel/openers.py index c3fa9a4783..9a306d4e47 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -68,7 +68,7 @@ def __init__( raise TypeError('Must define either fileobj or filename') # Cast because GzipFile.myfileobj has type io.FileIO while open returns ty.IO fileobj = self.myfileobj = ty.cast(io.FileIO, open(filename, modestr)) - return super().__init__( + super().__init__( filename='', mode=modestr, compresslevel=compresslevel, diff --git a/nibabel/tmpdirs.py b/nibabel/tmpdirs.py index 9d67f6acb7..2bcf9fdeba 100644 --- a/nibabel/tmpdirs.py +++ b/nibabel/tmpdirs.py @@ -54,7 +54,7 @@ def __init__(self, suffix='', prefix=tempfile.template, dir=None): >>> os.path.exists(tmpdir) False """ - return super().__init__(suffix, prefix, dir) + super().__init__(suffix, prefix, dir) @contextmanager From 747338cd86ea958de1f1b45e7d5d87ebe7d1a222 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:53:36 +0200 Subject: [PATCH 139/203] STY: Enforce ruff/Pylint rules, errors only (PLE) --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 2840119c4f..915ea9b815 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,6 +118,7 @@ exclude = ["doc", "nibabel/externals", "tools", "version.py", "versioneer.py"] select = [ "F", "I", + "PLE", "Q", "UP", ] From 930cc28a306d211e09228ca1ebef8966586b17e2 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:22:45 +0200 Subject: [PATCH 140/203] STY: Apply ruff/flake8-raise rule RSE102 RSE102 Unnecessary parentheses on raised exception --- nibabel/streamlines/tractogram_file.py | 6 +++--- nibabel/tests/test_volumeutils.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/streamlines/tractogram_file.py b/nibabel/streamlines/tractogram_file.py index 557261e9a0..65add3e2f2 100644 --- a/nibabel/streamlines/tractogram_file.py +++ b/nibabel/streamlines/tractogram_file.py @@ -74,7 +74,7 @@ def is_correct_format(cls, fileobj): Returns True if `fileobj` is in the right streamlines file format, otherwise returns False. """ - raise NotImplementedError() + raise NotImplementedError @classmethod def create_empty_header(cls): @@ -101,7 +101,7 @@ def load(cls, fileobj, lazy_load=True): Returns an object containing tractogram data and header information. """ - raise NotImplementedError() + raise NotImplementedError @abstractmethod def save(self, fileobj): @@ -113,4 +113,4 @@ def save(self, fileobj): If string, a filename; otherwise an open file-like object opened and ready to write. """ - raise NotImplementedError() + raise NotImplementedError diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 07ca9a6baa..9d321f07e4 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -989,7 +989,7 @@ def test_seek_tell_logic(): class BabyBio(BytesIO): def seek(self, *args): - raise OSError() + raise OSError bio = BabyBio() # Fresh fileobj, position 0, can't seek - error From 47df196256e67a248abf664d84c681c54f0bd784 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:47:06 +0200 Subject: [PATCH 141/203] STY: Enforce ruff/flake8-raise rules (RSE) --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 2840119c4f..55e96d992c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,6 +119,7 @@ select = [ "F", "I", "Q", + "RSE", "UP", ] ignore = [ From aa1315277b5f2b8ff9cfda4e16b7ab98a57eecf4 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:05:30 +0200 Subject: [PATCH 142/203] STY: Apply ruff/flake8-bugbear rule B009 B009 Do not call `getattr` with a constant attribute value. It is not any safer than normal property access. --- nibabel/tests/conftest.py | 2 +- nibabel/viewers.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/conftest.py b/nibabel/tests/conftest.py index 3cf54a34c5..fb13708450 100644 --- a/nibabel/tests/conftest.py +++ b/nibabel/tests/conftest.py @@ -6,7 +6,7 @@ # Generate dynamic fixtures def pytest_generate_tests(metafunc): if 'supported_dtype' in metafunc.fixturenames: - if metafunc.cls is None or not getattr(metafunc.cls, 'image_class'): + if metafunc.cls is None or not metafunc.cls.image_class: raise pytest.UsageError( 'Attempting to use supported_dtype fixture outside an image test case' ) diff --git a/nibabel/viewers.py b/nibabel/viewers.py index 0dc2f0dafc..4dd8a1c258 100644 --- a/nibabel/viewers.py +++ b/nibabel/viewers.py @@ -447,7 +447,7 @@ def _set_position(self, x, y, z, notify=True): # Matplotlib handlers #################################################### def _in_axis(self, event): """Return axis index if within one of our axes, else None""" - if getattr(event, 'inaxes') is None: + if event.inaxes is None: return None for ii, ax in enumerate(self._axes): if event.inaxes is ax: From d6ea77beed3db1361c04165a054f4081cf9b8dd8 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:07:35 +0200 Subject: [PATCH 143/203] STY: Apply ruff/flake8-bugbear rule B015 B015 Pointless comparison. Did you mean to assign a value? Otherwise, prepend `assert` or remove it. --- nibabel/gifti/tests/test_parse_gifti_fast.py | 4 ++-- nibabel/tests/test_openers.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index 8cb7c96794..6ca54df038 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -241,7 +241,7 @@ def test_load_dataarray1(): me = img.darrays[0].meta assert 'AnatomicalStructurePrimary' in me assert 'AnatomicalStructureSecondary' in me - me['AnatomicalStructurePrimary'] == 'CortexLeft' + assert me['AnatomicalStructurePrimary'] == 'CortexLeft' assert_array_almost_equal(img.darrays[0].coordsys.xform, np.eye(4, 4)) assert xform_codes.niistring[img.darrays[0].coordsys.dataspace] == 'NIFTI_XFORM_TALAIRACH' assert xform_codes.niistring[img.darrays[0].coordsys.xformspace] == 'NIFTI_XFORM_TALAIRACH' @@ -279,7 +279,7 @@ def test_load_dataarray4(): def test_dataarray5(): img5 = load(DATA_FILE5) for da in img5.darrays: - gifti_endian_codes.byteorder[da.endian] == 'little' + assert gifti_endian_codes.byteorder[da.endian] == 'little' assert_array_almost_equal(img5.darrays[0].data, DATA_FILE5_darr1) assert_array_almost_equal(img5.darrays[1].data, DATA_FILE5_darr2) # Round trip tested below diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index 15290d5ef9..0b58794331 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -431,17 +431,17 @@ def test_DeterministicGzipFile_fileobj(): with open('test.gz', 'wb') as fobj: with DeterministicGzipFile(filename='', mode='wb', fileobj=fobj) as gzobj: gzobj.write(msg) - md5sum('test.gz') == ref_chksum + assert md5sum('test.gz') == ref_chksum with open('test.gz', 'wb') as fobj: with DeterministicGzipFile(fileobj=fobj, mode='wb') as gzobj: gzobj.write(msg) - md5sum('test.gz') == ref_chksum + assert md5sum('test.gz') == ref_chksum with open('test.gz', 'wb') as fobj: with DeterministicGzipFile(filename='test.gz', mode='wb', fileobj=fobj) as gzobj: gzobj.write(msg) - md5sum('test.gz') == ref_chksum + assert md5sum('test.gz') == ref_chksum def test_bitwise_determinism(): From f064b62e8045a60065b9a6ac48670a4def46af38 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:31:14 +0200 Subject: [PATCH 144/203] STY: Enforce ruff/flake8-bugbear rules (B) --- pyproject.toml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 2840119c4f..ead2782b23 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -116,12 +116,23 @@ exclude = ["doc", "nibabel/externals", "tools", "version.py", "versioneer.py"] [tool.ruff.lint] select = [ + "B", "F", "I", "Q", "UP", ] ignore = [ + "B006", # TODO: enable + "B008", # TODO: enable + "B007", + "B011", + "B017", # TODO: enable + "B018", + "B020", + "B023", # TODO: enable + "B028", + "B904", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From 16754c8f828a75dc916c25b82ae9ca150e7cd686 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:58:30 +0200 Subject: [PATCH 145/203] STY: Apply ruff/flake8-comprehensions rule C406 C406 Unnecessary `list` literal (rewrite as a `dict` literal) --- nibabel/cifti2/tests/test_cifti2.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index 895b8f9597..1d9d5097c0 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -37,7 +37,7 @@ def test_cifti2_metadata(): assert len(md) == 1 assert list(iter(md)) == ['a'] assert md['a'] == 'aval' - assert md.data == dict([('a', 'aval')]) + assert md.data == {'a': 'aval'} with pytest.warns(FutureWarning): md = ci.Cifti2MetaData(metadata={'a': 'aval'}) @@ -57,7 +57,7 @@ def test_cifti2_metadata(): md['a'] = 'aval' assert md['a'] == 'aval' assert len(md) == 1 - assert md.data == dict([('a', 'aval')]) + assert md.data == {'a': 'aval'} del md['a'] assert len(md) == 0 From 9e007ece3aedff5e9518ba6e9ab95395bdabcfb6 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:59:16 +0200 Subject: [PATCH 146/203] STY: Apply ruff/flake8-comprehensions rule C413 C413 Unnecessary `list` call around `sorted()` --- nibabel/cifti2/tests/test_cifti2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index 1d9d5097c0..6382dab9d6 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -392,7 +392,7 @@ def test_matrix(): m[0] = mim_1 assert list(m.mapped_indices) == [1] m.insert(0, mim_0) - assert list(sorted(m.mapped_indices)) == [0, 1] + assert sorted(m.mapped_indices) == [0, 1] assert h.number_of_mapped_indices == 2 assert h.get_index_map(0) == mim_0 assert h.get_index_map(1) == mim_1 From a826ccdb634bd78f8ed08ad289269751acb20d53 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:00:45 +0200 Subject: [PATCH 147/203] STY: Apply ruff/flake8-comprehensions rule C416 C416 Unnecessary `dict` comprehension (rewrite using `dict()`) --- nibabel/brikhead.py | 2 +- nibabel/nicom/dicomwrappers.py | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index da8692efd3..d187a6b34b 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -198,7 +198,7 @@ def parse_AFNI_header(fobj): return parse_AFNI_header(src) # unpack variables in HEAD file head = fobj.read().split('\n\n') - return {key: value for key, value in map(_unpack_var, head)} + return dict(map(_unpack_var, head)) class AFNIArrayProxy(ArrayProxy): diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 3842248fd5..009880e496 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -685,9 +685,7 @@ def __init__(self, dcm_data, frame_filters=None): frame_slc_pos = [np.inner(ipp, self.slice_normal) for ipp in frame_ipps] rnd_slc_pos = np.round(frame_slc_pos, 4) uniq_slc_pos = np.unique(rnd_slc_pos) - pos_ord_map = { - val: order for val, order in zip(uniq_slc_pos, np.argsort(uniq_slc_pos)) - } + pos_ord_map = dict(zip(uniq_slc_pos, np.argsort(uniq_slc_pos))) self._frame_slc_ord = [pos_ord_map[pos] for pos in rnd_slc_pos] if len(self._frame_slc_ord) > 1: self._slice_spacing = ( From 102bbf7f750f443f6e13aee04bbffc764a67e6d4 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:01:44 +0200 Subject: [PATCH 148/203] STY: Apply ruff/flake8-comprehensions rule C419 C419 Unnecessary list comprehension --- nibabel/casting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/casting.py b/nibabel/casting.py index 31e27d0e8c..042a2f415d 100644 --- a/nibabel/casting.py +++ b/nibabel/casting.py @@ -764,7 +764,7 @@ def able_int_type(values): >>> able_int_type([-1, 1]) == np.int8 True """ - if any([v % 1 for v in values]): + if any(v % 1 for v in values): return None mn = min(values) mx = max(values) From a28ce642ea707d0456579411f33049e8e2e0a9ab Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:36:57 +0200 Subject: [PATCH 149/203] STY: Enforce ruff/flake8-comprehensions rules (C4) --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index f7d116ea92..becc93366d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -117,6 +117,7 @@ exclude = ["doc", "nibabel/externals", "tools", "version.py", "versioneer.py"] [tool.ruff.lint] select = [ "B", + "C4", "F", "I", "PLE", @@ -135,6 +136,9 @@ ignore = [ "B023", # TODO: enable "B028", "B904", + "C401", + "C408", + "C416", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From 2cfabbd30632b6e2231f061c997dc5be20611984 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:15:24 +0200 Subject: [PATCH 150/203] STY: Apply ruff/flake8-type-checking rule TCH001 TCH001 Move application import into a type-checking block --- nibabel/dataobj_images.py | 4 ++-- nibabel/imageclasses.py | 8 ++++++-- nibabel/spatialimages.py | 5 +++-- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index 6850599014..565a228794 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -14,14 +14,14 @@ import numpy as np -from .arrayproxy import ArrayLike from .deprecated import deprecate_with_version from .filebasedimages import FileBasedHeader, FileBasedImage -from .fileholders import FileMap if ty.TYPE_CHECKING: import numpy.typing as npt + from .arrayproxy import ArrayLike + from .fileholders import FileMap from .filename_parser import FileSpec ArrayImgT = ty.TypeVar('ArrayImgT', bound='DataobjImage') diff --git a/nibabel/imageclasses.py b/nibabel/imageclasses.py index 20cf1cac9c..66f984e268 100644 --- a/nibabel/imageclasses.py +++ b/nibabel/imageclasses.py @@ -10,11 +10,11 @@ from __future__ import annotations +from typing import TYPE_CHECKING + from .analyze import AnalyzeImage from .brikhead import AFNIImage from .cifti2 import Cifti2Image -from .dataobj_images import DataobjImage -from .filebasedimages import FileBasedImage from .freesurfer import MGHImage from .gifti import GiftiImage from .minc1 import Minc1Image @@ -25,6 +25,10 @@ from .spm2analyze import Spm2AnalyzeImage from .spm99analyze import Spm99AnalyzeImage +if TYPE_CHECKING: + from .dataobj_images import DataobjImage + from .filebasedimages import FileBasedImage + # Ordered by the load/save priority. all_image_classes: list[type[FileBasedImage]] = [ Nifti1Pair, diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index f4d27791b2..bd5ff8c11b 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -139,11 +139,9 @@ import numpy as np -from .arrayproxy import ArrayLike from .casting import sctypes_aliases from .dataobj_images import DataobjImage from .filebasedimages import FileBasedHeader, FileBasedImage -from .fileholders import FileMap from .fileslice import canonical_slicers from .orientations import apply_orientation, inv_ornt_aff from .viewers import OrthoSlicer3D @@ -157,6 +155,9 @@ if ty.TYPE_CHECKING: import numpy.typing as npt + from .arrayproxy import ArrayLike + from .fileholders import FileMap + SpatialImgT = ty.TypeVar('SpatialImgT', bound='SpatialImage') SpatialHdrT = ty.TypeVar('SpatialHdrT', bound='SpatialHeader') From bb221918bb1c644d4e944fb3219d18cf7ad82fc3 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:16:18 +0200 Subject: [PATCH 151/203] STY: Apply ruff/flake8-type-checking rule TCH002 TCH002 Move third-party import into a type-checking block --- nibabel/testing/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 992ef2ead4..f41c657f5f 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -26,12 +26,14 @@ from .helpers import assert_data_similar, bytesio_filemap, bytesio_round_trip from .np_features import memmap_after_ufunc +if ty.TYPE_CHECKING: + from importlib_resources.abc import Traversable + try: from importlib.resources import as_file, files from importlib.resources.abc import Traversable except ImportError: # PY38 from importlib_resources import as_file, files - from importlib_resources.abc import Traversable def get_test_data( From 0a27464e27682b48de188e4bf4e97b91c0c8fdd8 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:19:08 +0200 Subject: [PATCH 152/203] STY: Apply ruff/flake8-type-checking rule TCH003 TCH003 Move standard library import into a type-checking block --- nibabel/_compression.py | 3 ++- nibabel/fileholders.py | 4 +++- nibabel/optpkg.py | 4 +++- nibabel/spatialimages.py | 5 +++-- nibabel/testing/__init__.py | 3 +-- nibabel/volumeutils.py | 3 ++- 6 files changed, 14 insertions(+), 8 deletions(-) diff --git a/nibabel/_compression.py b/nibabel/_compression.py index f697fa54cc..871be2629f 100644 --- a/nibabel/_compression.py +++ b/nibabel/_compression.py @@ -12,12 +12,13 @@ import bz2 import gzip -import io import typing as ty from .optpkg import optional_package if ty.TYPE_CHECKING: + import io + import indexed_gzip # type: ignore[import] import pyzstd diff --git a/nibabel/fileholders.py b/nibabel/fileholders.py index 3db4c62a9e..df7c34af63 100644 --- a/nibabel/fileholders.py +++ b/nibabel/fileholders.py @@ -10,12 +10,14 @@ from __future__ import annotations -import io import typing as ty from copy import copy from .openers import ImageOpener +if ty.TYPE_CHECKING: + import io + class FileHolderError(Exception): pass diff --git a/nibabel/optpkg.py b/nibabel/optpkg.py index bfe6a629cc..90b8ded518 100644 --- a/nibabel/optpkg.py +++ b/nibabel/optpkg.py @@ -3,12 +3,14 @@ from __future__ import annotations import typing as ty -from types import ModuleType from packaging.version import Version from .tripwire import TripWire +if ty.TYPE_CHECKING: + from types import ModuleType + def _check_pkg_version(min_version: str | Version) -> ty.Callable[[ModuleType], bool]: min_ver = Version(min_version) if isinstance(min_version, str) else min_version diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index bd5ff8c11b..ce8ee3c6e6 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -132,9 +132,7 @@ from __future__ import annotations -import io import typing as ty -from collections.abc import Sequence from typing import Literal import numpy as np @@ -153,6 +151,9 @@ from functools import lru_cache as cache if ty.TYPE_CHECKING: + import io + from collections.abc import Sequence + import numpy.typing as npt from .arrayproxy import ArrayLike diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index f41c657f5f..be111747b2 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -27,11 +27,10 @@ from .np_features import memmap_after_ufunc if ty.TYPE_CHECKING: - from importlib_resources.abc import Traversable + from importlib.resources.abc import Traversable try: from importlib.resources import as_file, files - from importlib.resources.abc import Traversable except ImportError: # PY38 from importlib_resources import as_file, files diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index c2387f0949..6e43f79186 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -10,7 +10,6 @@ from __future__ import annotations -import io import sys import typing as ty import warnings @@ -25,6 +24,8 @@ from .externals.oset import OrderedSet if ty.TYPE_CHECKING: + import io + import numpy.typing as npt Scalar = np.number | float From 8ca899aa43c0b690dec4a04a44a723da831463d8 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:51:49 +0200 Subject: [PATCH 153/203] STY: Enforce ruff/flake8-type-checking rules (TCH) --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index f7d116ea92..d45c4e19fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,6 +122,7 @@ select = [ "PLE", "Q", "RSE", + "TCH", "UP", ] ignore = [ From 7af724bf5294257b315424297f0c9154259aaf92 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:52:04 +0200 Subject: [PATCH 154/203] STY: Apply ruff/flake8-pie rule PIE807 PIE807 Prefer `list` over useless lambda --- nibabel/streamlines/tests/test_tractogram.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 9159688548..72b84fac6e 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -49,8 +49,8 @@ def make_fake_tractogram( ): """Make multiple streamlines according to provided requirements.""" all_streamlines = [] - all_data_per_point = defaultdict(lambda: []) - all_data_per_streamline = defaultdict(lambda: []) + all_data_per_point = defaultdict(list) + all_data_per_streamline = defaultdict(list) for nb_points in list_nb_points: data = make_fake_streamline( nb_points, data_per_point_shapes, data_for_streamline_shapes, rng From b4fb300525adacf7167b07ccf89a04232e72c866 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:52:54 +0200 Subject: [PATCH 155/203] STY: Apply ruff/flake8-pie rule PIE808 PIE808 Unnecessary `start` argument in `range` --- nibabel/ecat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index 34ff06323c..c4b55624f9 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -957,7 +957,7 @@ def to_file_map(self, file_map=None): hdr.write_to(hdrf) # Write every frames - for index in range(0, self.header['num_frames']): + for index in range(self.header['num_frames']): # Move to subheader offset frame_offset = subheaders._get_frame_offset(index) - 512 imgf.seek(frame_offset) From 576b74bd1ef5d0373cfe5d17bc8ce06f366bc9c0 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:39:54 +0200 Subject: [PATCH 156/203] STY: Enforce ruff/flake8-pie rules (PIE) --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index becc93366d..7f416c13ad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -120,6 +120,7 @@ select = [ "C4", "F", "I", + "PIE", "PLE", "Q", "RSE", @@ -139,6 +140,7 @@ ignore = [ "C401", "C408", "C416", + "PIE790", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From d53b64cee8ed919ad24ba40657eb1ea37833e364 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 15:44:48 +0200 Subject: [PATCH 157/203] STY: Apply ruff/refurb rule FURB167 FURB167 Use of regular expression alias --- nibabel/nicom/ascconv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py index 8ec72fb3ec..6d72436039 100644 --- a/nibabel/nicom/ascconv.py +++ b/nibabel/nicom/ascconv.py @@ -10,7 +10,7 @@ ASCCONV_RE = re.compile( r'### ASCCONV BEGIN((?:\s*[^=\s]+=[^=\s]+)*) ###\n(.*?)\n### ASCCONV END ###', - flags=re.M | re.S, + flags=re.MULTILINE | re.DOTALL, ) From 1abcdec867c54c3c58e2d8a7c0215a128e2c9f69 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 22:51:25 +0200 Subject: [PATCH 158/203] STY: Enforce ruff/refurb rules (FURB) --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index becc93366d..316abdecad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,6 +119,7 @@ select = [ "B", "C4", "F", + "FURB", "I", "PLE", "Q", From 5cc97c6ab4746589fac78d84a7d5341c20f70cd1 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:49:32 +0200 Subject: [PATCH 159/203] STY: Apply ruff/flake8-pyi rule PYI034 PYI034 `__enter__` methods usually return `self` at runtime --- nibabel/openers.py | 3 ++- tox.ini | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/nibabel/openers.py b/nibabel/openers.py index 9a306d4e47..35b10c20a4 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -22,6 +22,7 @@ from types import TracebackType from _typeshed import WriteableBuffer + from typing_extensions import Self ModeRT = ty.Literal['r', 'rt'] ModeRB = ty.Literal['rb'] @@ -246,7 +247,7 @@ def close_if_mine(self) -> None: if self.me_opened: self.close() - def __enter__(self) -> Opener: + def __enter__(self) -> Self: return self def __exit__( diff --git a/tox.ini b/tox.ini index 5df35c8d38..675526f944 100644 --- a/tox.ini +++ b/tox.ini @@ -181,6 +181,7 @@ deps = numpy pyzstd importlib_resources + typing_extensions skip_install = true commands = mypy nibabel From df862cce6f9c90536aa0b44337822d64ce792326 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:41:32 +0200 Subject: [PATCH 160/203] STY: Enforce ruff/flake8-pyi rules (PYI) --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index fa3f881162..0dd49c847d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -123,6 +123,7 @@ select = [ "I", "PIE", "PLE", + "PYI", "Q", "RSE", "TCH", @@ -143,6 +144,7 @@ ignore = [ "C408", "C416", "PIE790", + "PYI024", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From 38fa63868dc6180641f806a063bfa54d85dcd33e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:07:42 +0200 Subject: [PATCH 161/203] STY: Apply ruff/flynt rule FLY002 FLY002 Consider f-string instead of string join --- nibabel/batteryrunners.py | 2 +- nibabel/gifti/tests/test_gifti.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/nibabel/batteryrunners.py b/nibabel/batteryrunners.py index 30727f3962..860b9b993c 100644 --- a/nibabel/batteryrunners.py +++ b/nibabel/batteryrunners.py @@ -252,7 +252,7 @@ def __str__(self): def message(self): """formatted message string, including fix message if present""" if self.fix_msg: - return '; '.join((self.problem_msg, self.fix_msg)) + return f'{self.problem_msg}; {self.fix_msg}' return self.problem_msg def log_raise(self, logger, error_level=40): diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 1cead0d928..97c929ac4c 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -423,13 +423,13 @@ def test_gifti_coord(capsys): gcs.xform = None gcs.print_summary() captured = capsys.readouterr() - assert captured.out == '\n'.join( - [ - 'Dataspace: NIFTI_XFORM_UNKNOWN', - 'XFormSpace: NIFTI_XFORM_UNKNOWN', - 'Affine Transformation Matrix: ', - ' None\n', - ] + assert ( + captured.out + == """Dataspace: NIFTI_XFORM_UNKNOWN +XFormSpace: NIFTI_XFORM_UNKNOWN +Affine Transformation Matrix: + None + """ ) gcs.to_xml() From 1c8010bc3d51c031a393558192aa99b30782cc06 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:57:58 +0200 Subject: [PATCH 162/203] STY: Enforce ruff/flynt rules (FLY) --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 0dd49c847d..3e2ffa0b43 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,6 +119,7 @@ select = [ "B", "C4", "F", + "FLY", "FURB", "I", "PIE", From 27baa683961cdfd42153d368c79ee3ea32ef4ab2 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 23 Sep 2024 09:34:11 -0400 Subject: [PATCH 163/203] sty: Remove unnecessary trailing whitespace in summary --- nibabel/gifti/gifti.py | 2 +- nibabel/gifti/tests/test_gifti.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index caee7c3500..c983a14dfd 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -374,7 +374,7 @@ def _to_xml_element(self): def print_summary(self): print('Dataspace: ', xform_codes.niistring[self.dataspace]) print('XFormSpace: ', xform_codes.niistring[self.xformspace]) - print('Affine Transformation Matrix: \n', self.xform) + print('Affine Transformation Matrix:\n', self.xform) def _data_tag_element(dataarray, encoding, dtype, ordering): diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 97c929ac4c..416faf3c84 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -425,11 +425,12 @@ def test_gifti_coord(capsys): captured = capsys.readouterr() assert ( captured.out - == """Dataspace: NIFTI_XFORM_UNKNOWN + == """\ +Dataspace: NIFTI_XFORM_UNKNOWN XFormSpace: NIFTI_XFORM_UNKNOWN -Affine Transformation Matrix: +Affine Transformation Matrix: None - """ +""" ) gcs.to_xml() From aeb7a8d2a627afc450618ae844101e1f8dfb98ce Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 15:56:07 +0200 Subject: [PATCH 164/203] STY: Apply ruff/Perflint rule PERF102 PERF102 When using only the keys of a dict use the `keys()` method PERF102 When using only the values of a dict use the `values()` method --- nibabel/streamlines/tests/test_streamlines.py | 8 ++++---- nibabel/testing/helpers.py | 2 +- nibabel/tests/test_analyze.py | 8 ++++---- nibabel/tests/test_files_interface.py | 4 ++-- nibabel/tests/test_nifti1.py | 2 +- nibabel/tests/test_spm99analyze.py | 4 ++-- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index 857e64fec9..740b4c2616 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -207,7 +207,7 @@ def test_save_tractogram_file(self): def test_save_empty_file(self): tractogram = Tractogram(affine_to_rasmm=np.eye(4)) - for ext, cls in FORMATS.items(): + for ext in FORMATS: with InTemporaryDirectory(): filename = 'streamlines' + ext nib.streamlines.save(tractogram, filename) @@ -216,7 +216,7 @@ def test_save_empty_file(self): def test_save_simple_file(self): tractogram = Tractogram(DATA['streamlines'], affine_to_rasmm=np.eye(4)) - for ext, cls in FORMATS.items(): + for ext in FORMATS: with InTemporaryDirectory(): filename = 'streamlines' + ext nib.streamlines.save(tractogram, filename) @@ -262,7 +262,7 @@ def test_save_complex_file(self): def test_save_sliced_tractogram(self): tractogram = Tractogram(DATA['streamlines'], affine_to_rasmm=np.eye(4)) original_tractogram = tractogram.copy() - for ext, cls in FORMATS.items(): + for ext in FORMATS: with InTemporaryDirectory(): filename = 'streamlines' + ext nib.streamlines.save(tractogram[::2], filename) @@ -283,7 +283,7 @@ def test_save_from_generator(self): tractogram = Tractogram(DATA['streamlines'], affine_to_rasmm=np.eye(4)) # Just to create a generator - for ext, _ in FORMATS.items(): + for ext in FORMATS: filtered = (s for s in tractogram.streamlines if True) lazy_tractogram = LazyTractogram(lambda: filtered, affine_to_rasmm=np.eye(4)) diff --git a/nibabel/testing/helpers.py b/nibabel/testing/helpers.py index ae859d6572..ad4bf258cd 100644 --- a/nibabel/testing/helpers.py +++ b/nibabel/testing/helpers.py @@ -14,7 +14,7 @@ def bytesio_filemap(klass): """Return bytes io filemap for this image class `klass`""" file_map = klass.make_file_map() - for name, fileholder in file_map.items(): + for fileholder in file_map.values(): fileholder.fileobj = BytesIO() fileholder.pos = 0 return file_map diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index cb7b8d686d..d3c6211bfc 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -728,7 +728,7 @@ def test_data_hdr_cache(self): IC = self.image_class # save an image to a file map fm = IC.make_file_map() - for key, value in fm.items(): + for key in fm: fm[key].fileobj = BytesIO() shape = (2, 3, 4) data = np.arange(24, dtype=np.int8).reshape(shape) @@ -831,7 +831,7 @@ def test_header_updating(self): hdr = img.header hdr.set_zooms((4, 5, 6)) # Save / reload using bytes IO objects - for key, value in img.file_map.items(): + for value in img.file_map.values(): value.fileobj = BytesIO() img.to_file_map() hdr_back = img.from_file_map(img.file_map).header @@ -842,7 +842,7 @@ def test_header_updating(self): assert_array_equal(hdr.get_zooms(), (2, 3, 4)) # Modify affine in-place? Update on save. img.affine[0, 0] = 9 - for key, value in img.file_map.items(): + for value in img.file_map.values(): value.fileobj = BytesIO() img.to_file_map() hdr_back = img.from_file_map(img.file_map).header @@ -864,7 +864,7 @@ def test_pickle(self): assert_array_equal(img.get_fdata(), img2.get_fdata()) assert img.header == img2.header # Save / reload using bytes IO objects - for key, value in img.file_map.items(): + for value in img.file_map.values(): value.fileobj = BytesIO() img.to_file_map() img_prox = img.from_file_map(img.file_map) diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py index 07e394eca4..b3562b6083 100644 --- a/nibabel/tests/test_files_interface.py +++ b/nibabel/tests/test_files_interface.py @@ -28,7 +28,7 @@ def test_files_spatialimages(): ] for klass in klasses: file_map = klass.make_file_map() - for key, value in file_map.items(): + for value in file_map.values(): assert value.filename is None assert value.fileobj is None assert value.pos == 0 @@ -41,7 +41,7 @@ def test_files_spatialimages(): img = klass(arr.astype(np.float32), aff) else: img = klass(arr, aff) - for key, value in img.file_map.items(): + for value in img.file_map.values(): assert value.filename is None assert value.fileobj is None assert value.pos == 0 diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index ec4b8674eb..52e38fded2 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -820,7 +820,7 @@ def _qform_rt(self, img): hdr['qform_code'] = 3 hdr['sform_code'] = 4 # Save / reload using bytes IO objects - for key, value in img.file_map.items(): + for value in img.file_map.values(): value.fileobj = BytesIO() img.to_file_map() return img.from_file_map(img.file_map) diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index ada92d3b05..26098d8ede 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -423,7 +423,7 @@ def test_mat_read(self): aff = np.diag([2, 3, 4, 1]) # no LR flip in affine img = img_klass(arr, aff) fm = img.file_map - for key, value in fm.items(): + for value in fm.values(): value.fileobj = BytesIO() # Test round trip img.to_file_map() @@ -475,7 +475,7 @@ def test_none_affine(self): img = img_klass(np.zeros((2, 3, 4)), None) aff = img.header.get_best_affine() # Save / reload using bytes IO objects - for key, value in img.file_map.items(): + for value in img.file_map.values(): value.fileobj = BytesIO() img.to_file_map() img_back = img.from_file_map(img.file_map) From d6b6c3b1590d9644217923ab7cb1708eb8c694da Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:01:28 +0200 Subject: [PATCH 165/203] STY: Apply ruff/Perflint rule PERF401 PERF401 Use a list comprehension to create a transformed list --- nibabel/data.py | 3 +-- nibabel/nicom/tests/test_dicomwrappers.py | 7 +++---- nibabel/tests/test_euler.py | 6 +----- nibabel/tests/test_filehandles.py | 3 +-- 4 files changed, 6 insertions(+), 13 deletions(-) diff --git a/nibabel/data.py b/nibabel/data.py index c49580d09b..8ea056d8e7 100644 --- a/nibabel/data.py +++ b/nibabel/data.py @@ -87,8 +87,7 @@ def list_files(self, relative=True): for base, dirs, files in os.walk(self.base_path): if relative: base = base[len(self.base_path) + 1 :] - for filename in files: - out_list.append(pjoin(base, filename)) + out_list.extend(pjoin(base, filename) for filename in files) return out_list diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 55c27df50a..db3f667518 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -429,10 +429,9 @@ def fake_shape_dependents( class PrintBase: def __repr__(self): - attr_strs = [] - for attr in dir(self): - if attr[0].isupper(): - attr_strs.append(f'{attr}={getattr(self, attr)}') + attr_strs = [ + f'{attr}={getattr(self, attr)}' for attr in dir(self) if attr[0].isupper() + ] return f"{self.__class__.__name__}({', '.join(attr_strs)})" class DimIdxSeqElem(pydicom.Dataset): diff --git a/nibabel/tests/test_euler.py b/nibabel/tests/test_euler.py index b0c965c399..3cc07e8f5d 100644 --- a/nibabel/tests/test_euler.py +++ b/nibabel/tests/test_euler.py @@ -21,12 +21,8 @@ FLOAT_EPS = np.finfo(np.float64).eps # Example rotations """ -eg_rots = [] params = np.arange(-pi * 2, pi * 2.5, pi / 2) -for x in params: - for y in params: - for z in params: - eg_rots.append((x, y, z)) +eg_rots = [(x, y, z) for x in params for y in params for z in params] def x_only(x): diff --git a/nibabel/tests/test_filehandles.py b/nibabel/tests/test_filehandles.py index 506a623758..93eb284dfb 100644 --- a/nibabel/tests/test_filehandles.py +++ b/nibabel/tests/test_filehandles.py @@ -33,8 +33,7 @@ def test_multiload(): tmpdir = mkdtemp() fname = pjoin(tmpdir, 'test.img') save(img, fname) - for i in range(N): - imgs.append(load(fname)) + imgs.extend(load(fname) for i in range(N)) finally: del img, imgs shutil.rmtree(tmpdir) From 0a5af04eb6e68946f0310dcedb3be36d79233655 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 22:49:52 +0200 Subject: [PATCH 166/203] STY: Enforce ruff/Perflint rules (PERF) --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index f45532e81f..0706e08764 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,6 +122,7 @@ select = [ "F", "FURB", "I", + "PERF", "PIE", "PLE", "PYI", @@ -144,6 +145,7 @@ ignore = [ "C401", "C408", "C416", + "PERF203", "PIE790", "PYI024", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules From 326addc5d98968a50f9cec8f58b8110557e448c0 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 24 Sep 2024 22:03:49 +0200 Subject: [PATCH 167/203] STY: Consistency Co-authored-by: Chris Markiewicz --- nibabel/tests/test_analyze.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index d3c6211bfc..befc920f1e 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -728,8 +728,8 @@ def test_data_hdr_cache(self): IC = self.image_class # save an image to a file map fm = IC.make_file_map() - for key in fm: - fm[key].fileobj = BytesIO() + for value in fm.values(): + value.fileobj = BytesIO() shape = (2, 3, 4) data = np.arange(24, dtype=np.int8).reshape(shape) affine = np.eye(4) From 74c853f5d9afa19f97ccf529b83763b852ae5e55 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 24 Sep 2024 22:04:46 +0200 Subject: [PATCH 168/203] STY: Prefix unused loop control variable with an underscore Co-authored-by: Chris Markiewicz --- nibabel/tests/test_filehandles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_filehandles.py b/nibabel/tests/test_filehandles.py index 93eb284dfb..c985d35440 100644 --- a/nibabel/tests/test_filehandles.py +++ b/nibabel/tests/test_filehandles.py @@ -33,7 +33,7 @@ def test_multiload(): tmpdir = mkdtemp() fname = pjoin(tmpdir, 'test.img') save(img, fname) - imgs.extend(load(fname) for i in range(N)) + imgs.extend(load(fname) for _ in range(N)) finally: del img, imgs shutil.rmtree(tmpdir) From 25321329674bfde4ba45189ca67519a3a3e1246f Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 24 Sep 2024 19:56:00 -0400 Subject: [PATCH 169/203] sty: Apply UP007, UP012 This is safe since we use from __future__ import annotations. --- nibabel/nifti1.py | 14 +++++++------- nibabel/tests/test_nifti1.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 3ad0ec9389..180f67cca4 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -325,15 +325,15 @@ class NiftiExtension(ty.Generic[T]): """ code: int - encoding: ty.Optional[str] = None + encoding: str | None = None _content: bytes - _object: ty.Optional[T] = None + _object: T | None = None def __init__( self, - code: ty.Union[int, str], + code: int | str, content: bytes = b'', - object: ty.Optional[T] = None, + object: T | None = None, ) -> None: """ Parameters @@ -565,9 +565,9 @@ class Nifti1DicomExtension(Nifti1Extension[DicomDataset]): def __init__( self, - code: ty.Union[int, str], - content: ty.Union[bytes, DicomDataset, None] = None, - parent_hdr: ty.Optional[Nifti1Header] = None, + code: int | str, + content: bytes | DicomDataset | None = None, + parent_hdr: Nifti1Header | None = None, ) -> None: """ Parameters diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index ec4b8674eb..a3626f5688 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -1239,7 +1239,7 @@ def test_extension_content_access(): assert ext.text == '123' # Test that encoding errors are caught - ascii_ext = Nifti1Extension('comment', 'hôpital'.encode('utf-8')) + ascii_ext = Nifti1Extension('comment', 'hôpital'.encode()) ascii_ext.encoding = 'ascii' with pytest.raises(UnicodeDecodeError): ascii_ext.text From f4646182fe16e6af965b575a90499e28d6840f9a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:30:48 +0200 Subject: [PATCH 170/203] STY: Apply ruff/flake8-pytest-style rule PT006 PT006 Wrong type passed to first argument of `@pytest.mark.parametrize`; expected `tuple` --- nibabel/cmdline/tests/test_convert.py | 6 +++--- nibabel/cmdline/tests/test_roi.py | 2 +- nibabel/tests/test_euler.py | 4 ++-- nibabel/tests/test_init.py | 2 +- nibabel/tests/test_pkg_info.py | 2 +- nibabel/tests/test_quaternions.py | 10 +++++----- nibabel/tests/test_scaling.py | 6 +++--- nibabel/tests/test_spaces.py | 2 +- nibabel/tests/test_testing.py | 2 +- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/nibabel/cmdline/tests/test_convert.py b/nibabel/cmdline/tests/test_convert.py index 021e6ea8ef..d500a717a3 100644 --- a/nibabel/cmdline/tests/test_convert.py +++ b/nibabel/cmdline/tests/test_convert.py @@ -71,7 +71,7 @@ def test_convert_dtype(tmp_path, data_dtype): @pytest.mark.parametrize( - 'ext,img_class', + ('ext', 'img_class'), [ ('mgh', nib.MGHImage), ('img', nib.Nifti1Pair), @@ -94,7 +94,7 @@ def test_convert_by_extension(tmp_path, ext, img_class): @pytest.mark.parametrize( - 'ext,img_class', + ('ext', 'img_class'), [ ('mgh', nib.MGHImage), ('img', nib.Nifti1Pair), @@ -141,7 +141,7 @@ def test_convert_nifti_int_fail(tmp_path): @pytest.mark.parametrize( - 'orig_dtype,alias,expected_dtype', + ('orig_dtype', 'alias', 'expected_dtype'), [ ('int64', 'mask', 'uint8'), ('int64', 'compat', 'int32'), diff --git a/nibabel/cmdline/tests/test_roi.py b/nibabel/cmdline/tests/test_roi.py index d2baa80eeb..19bdf29011 100644 --- a/nibabel/cmdline/tests/test_roi.py +++ b/nibabel/cmdline/tests/test_roi.py @@ -119,7 +119,7 @@ def test_nib_roi(tmp_path, inplace): @pytest.mark.parametrize( - 'args, errmsg', + ('args', 'errmsg'), ( (('-i', '1:1'), 'Cannot take zero-length slice'), (('-j', '1::2'), 'Downsampling is not supported'), diff --git a/nibabel/tests/test_euler.py b/nibabel/tests/test_euler.py index b0c965c399..1a781b8f14 100644 --- a/nibabel/tests/test_euler.py +++ b/nibabel/tests/test_euler.py @@ -123,7 +123,7 @@ def test_euler_mat_1(): assert_array_equal(M, np.eye(3)) -@pytest.mark.parametrize('x, y, z', eg_rots) +@pytest.mark.parametrize(('x', 'y', 'z'), eg_rots) def test_euler_mat_2(x, y, z): M1 = nea.euler2mat(z, y, x) M2 = sympy_euler(z, y, x) @@ -176,7 +176,7 @@ def test_euler_instability(): assert not np.allclose(M_e, M_e_back) -@pytest.mark.parametrize('x, y, z', eg_rots) +@pytest.mark.parametrize(('x', 'y', 'z'), eg_rots) def test_quats(x, y, z): M1 = nea.euler2mat(z, y, x) quatM = nq.mat2quat(M1) diff --git a/nibabel/tests/test_init.py b/nibabel/tests/test_init.py index 969b80b6fc..d54f55053b 100644 --- a/nibabel/tests/test_init.py +++ b/nibabel/tests/test_init.py @@ -13,7 +13,7 @@ @pytest.mark.parametrize( - 'verbose, v_args', [(-2, ['-qq']), (-1, ['-q']), (0, []), (1, ['-v']), (2, ['-vv'])] + ('verbose', 'v_args'), [(-2, ['-qq']), (-1, ['-q']), (0, []), (1, ['-v']), (2, ['-vv'])] ) @pytest.mark.parametrize('doctests', (True, False)) @pytest.mark.parametrize('coverage', (True, False)) diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index a39eac65b1..94ee903494 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -37,7 +37,7 @@ def test_cmp_pkg_version_0(): @pytest.mark.parametrize( - 'test_ver, pkg_ver, exp_out', + ('test_ver', 'pkg_ver', 'exp_out'), [ ('1.0', '1.0', 0), ('1.0.0', '1.0', 0), diff --git a/nibabel/tests/test_quaternions.py b/nibabel/tests/test_quaternions.py index ec882dd0b3..a5ec89d948 100644 --- a/nibabel/tests/test_quaternions.py +++ b/nibabel/tests/test_quaternions.py @@ -146,7 +146,7 @@ def test_inverse_0(): assert iq.dtype.kind == 'f' -@pytest.mark.parametrize('M, q', eg_pairs) +@pytest.mark.parametrize(('M', 'q'), eg_pairs) def test_inverse_1(M, q): iq = nq.inverse(q) iqM = nq.quat2mat(iq) @@ -169,15 +169,15 @@ def test_norm(): assert not nq.isunit(qi) -@pytest.mark.parametrize('M1, q1', eg_pairs[0::4]) -@pytest.mark.parametrize('M2, q2', eg_pairs[1::4]) +@pytest.mark.parametrize(('M1', 'q1'), eg_pairs[0::4]) +@pytest.mark.parametrize(('M2', 'q2'), eg_pairs[1::4]) def test_mult(M1, q1, M2, q2): # Test that quaternion * same as matrix * q21 = nq.mult(q2, q1) assert_array_almost_equal, M2 @ M1, nq.quat2mat(q21) -@pytest.mark.parametrize('M, q', eg_pairs) +@pytest.mark.parametrize(('M', 'q'), eg_pairs) def test_inverse(M, q): iq = nq.inverse(q) iqM = nq.quat2mat(iq) @@ -186,7 +186,7 @@ def test_inverse(M, q): @pytest.mark.parametrize('vec', np.eye(3)) -@pytest.mark.parametrize('M, q', eg_pairs) +@pytest.mark.parametrize(('M', 'q'), eg_pairs) def test_qrotate(vec, M, q): vdash = nq.rotate_vector(vec, q) vM = M @ vec diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index f667b4164d..eae0b1702c 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -25,7 +25,7 @@ @pytest.mark.parametrize( - 'in_arr, res', + ('in_arr', 'res'), [ ([[-1, 0, 1], [np.inf, np.nan, -np.inf]], (-1, 1)), (np.array([[-1, 0, 1], [np.inf, np.nan, -np.inf]]), (-1, 1)), @@ -134,7 +134,7 @@ def test_a2f_nan2zero(): @pytest.mark.parametrize( - 'in_type, out_type', + ('in_type', 'out_type'), [ (np.int16, np.int16), (np.int16, np.int8), @@ -163,7 +163,7 @@ def test_array_file_scales(in_type, out_type): @pytest.mark.parametrize( - 'category0, category1, overflow', + ('category0', 'category1', 'overflow'), [ # Confirm that, for all ints and uints as input, and all possible outputs, # for any simple way of doing the calculation, the result is near enough diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index f5e467b2cc..4722228a5b 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -125,7 +125,7 @@ def test_slice2volume(): @pytest.mark.parametrize( - 'index, axis', + ('index', 'axis'), [ [-1, 0], [0, -1], diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 04ba813d8b..ec147baa95 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -148,7 +148,7 @@ def f(): @pytest.mark.parametrize( - 'regex, entries', + ('regex', 'entries'), [ ['.*', ''], ['.*', ['any']], From bb1c08b44ceb923a850beb86f25576a1e4866c5b Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:35:25 +0200 Subject: [PATCH 171/203] STY: Apply ruff/flake8-pytest-style rule PT014 PT014 Duplicate of test case --- nibabel/tests/test_pkg_info.py | 2 -- nibabel/tests/test_scaling.py | 1 - 2 files changed, 3 deletions(-) diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index 94ee903494..1a9a06dc93 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -54,8 +54,6 @@ def test_cmp_pkg_version_0(): ('1.2.1rc1', '1.2.1', -1), ('1.2.1rc1', '1.2.1rc', 1), ('1.2.1rc', '1.2.1rc1', -1), - ('1.2.1rc1', '1.2.1rc', 1), - ('1.2.1rc', '1.2.1rc1', -1), ('1.2.1b', '1.2.1a', 1), ('1.2.1a', '1.2.1b', -1), ('1.2.0+1', '1.2', 1), diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index eae0b1702c..ccc379c256 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -36,7 +36,6 @@ ([[np.nan, -1, 2], [-2, np.nan, 1]], (-2, 2)), ([[np.nan, -np.inf, 2], [-2, np.nan, np.inf]], (-2, 2)), ([[-np.inf, 2], [np.nan, 1]], (1, 2)), # good max case - ([[np.nan, -np.inf, 2], [-2, np.nan, np.inf]], (-2, 2)), ([np.nan], (np.inf, -np.inf)), ([np.inf], (np.inf, -np.inf)), ([-np.inf], (np.inf, -np.inf)), From 30cba2ca39bc02a2da7f7411a178354046fd6cd2 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:37:46 +0200 Subject: [PATCH 172/203] STY: Apply ruff/flake8-pytest-style rule PT015 PT015 Assertion always fails, replace with `pytest.fail()` --- nibabel/cmdline/tests/test_roi.py | 2 +- nibabel/tests/test_removalschedule.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/cmdline/tests/test_roi.py b/nibabel/cmdline/tests/test_roi.py index 19bdf29011..5f538d53f4 100644 --- a/nibabel/cmdline/tests/test_roi.py +++ b/nibabel/cmdline/tests/test_roi.py @@ -143,7 +143,7 @@ def test_entrypoint(capsys): except SystemExit: pass else: - assert False, 'argparse exits on --help. If changing to another parser, update test.' + pytest.fail('argparse exits on --help. If changing to another parser, update test.') captured = capsys.readouterr() assert captured.out.startswith('usage: nib-roi') diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index 7a56f3fb8b..d2bc7da2fc 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -125,7 +125,7 @@ def test_module_removal(): for module in _filter(MODULE_SCHEDULE): with pytest.raises(ImportError): __import__(module) - assert False, f'Time to remove {module}' + raise AssertionError(f'Time to remove {module}') def test_object_removal(): From e4a8d1c9f8e79dbd43b62cbc24dfeeb98abf27b3 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:43:42 +0200 Subject: [PATCH 173/203] STY: Apply ruff/flake8-pytest-style rule PT017 PT017 Found assertion on exception `err` in `except` block, use `pytest.raises()` instead --- nibabel/tests/test_tripwire.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/nibabel/tests/test_tripwire.py b/nibabel/tests/test_tripwire.py index bcc81b5f5f..6bc4e8533e 100644 --- a/nibabel/tests/test_tripwire.py +++ b/nibabel/tests/test_tripwire.py @@ -16,9 +16,6 @@ def test_tripwire(): with pytest.raises(TripWireError): silly_module_name.do_silly_thing # Check AttributeError can be checked too - try: + with pytest.raises(AttributeError) as err: silly_module_name.__wrapped__ - except TripWireError as err: - assert isinstance(err, AttributeError) - else: - raise RuntimeError('No error raised, but expected') + assert isinstance(err.value, AttributeError) From 341d6d79e35f328b4a6ab2ddd3aa2dc8b5416c2e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:45:31 +0200 Subject: [PATCH 174/203] STY: Apply ruff/flake8-pytest-style rule PT022 PT022 No teardown in fixture `db`, use `return` instead of `yield` --- nibabel/tests/test_dft.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_dft.py b/nibabel/tests/test_dft.py index 6c6695b16e..6155dda83c 100644 --- a/nibabel/tests/test_dft.py +++ b/nibabel/tests/test_dft.py @@ -58,7 +58,7 @@ def db(monkeypatch): and not modify the host filesystem.""" database = dft._DB(fname=':memory:') monkeypatch.setattr(dft, 'DB', database) - yield database + return database def test_init(db): From bb549fbc84643020b4159d07cf6abcc0fbc34a45 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:46:22 +0200 Subject: [PATCH 175/203] STY: Apply ruff/flake8-pytest-style rule PT027 PT027 Use `pytest.raises` instead of unittest-style `assertRaises` --- nibabel/streamlines/tests/test_streamlines.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index 857e64fec9..359cbc5e1c 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -191,13 +191,13 @@ def test_save_tractogram_file(self): trk_file = trk.TrkFile(tractogram) # No need for keyword arguments. - with self.assertRaises(ValueError): + with pytest.raises(ValueError): nib.streamlines.save(trk_file, 'dummy.trk', header={}) # Wrong extension. with pytest.warns(ExtensionWarning, match='extension'): trk_file = trk.TrkFile(tractogram) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): nib.streamlines.save(trk_file, 'dummy.tck', header={}) with InTemporaryDirectory(): @@ -272,11 +272,11 @@ def test_save_sliced_tractogram(self): assert_tractogram_equal(tractogram, original_tractogram) def test_load_unknown_format(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): nib.streamlines.load('') def test_save_unknown_format(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): nib.streamlines.save(Tractogram(), '') def test_save_from_generator(self): From a7e1afdb0b292ae7de45bfadb3d9313b9341df70 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:44:19 +0200 Subject: [PATCH 176/203] STY: Enforce ruff/flake8-pytest-style rules (PT) --- pyproject.toml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index c973d3e0c2..22be5f917f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -125,6 +125,7 @@ select = [ "I", "PIE", "PLE", + "PT", "PYI", "Q", "RSE", @@ -146,6 +147,12 @@ ignore = [ "C408", "C416", "PIE790", + "PT004", + "PT007", + "PT011", + "PT012", + "PT017", + "PT018", "PYI024", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", From 0e1dee31894e3034031ce0a251c5cfe73da5cdfc Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 24 Sep 2024 22:32:58 +0200 Subject: [PATCH 177/203] MNT: Drop test which verifies that TripWireError is an AttributeError Co-authored-by: Chris Markiewicz --- nibabel/tests/test_tripwire.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/tests/test_tripwire.py b/nibabel/tests/test_tripwire.py index 6bc4e8533e..d7daefe0b1 100644 --- a/nibabel/tests/test_tripwire.py +++ b/nibabel/tests/test_tripwire.py @@ -18,4 +18,3 @@ def test_tripwire(): # Check AttributeError can be checked too with pytest.raises(AttributeError) as err: silly_module_name.__wrapped__ - assert isinstance(err.value, AttributeError) From e58e2ea40ed5c9d0d5bf613e86c789ea0689eedb Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 24 Sep 2024 22:34:04 +0200 Subject: [PATCH 178/203] MNT: Simplify try/except/else block Co-authored-by: Chris Markiewicz --- nibabel/cmdline/tests/test_roi.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/nibabel/cmdline/tests/test_roi.py b/nibabel/cmdline/tests/test_roi.py index 5f538d53f4..4692bbb038 100644 --- a/nibabel/cmdline/tests/test_roi.py +++ b/nibabel/cmdline/tests/test_roi.py @@ -138,12 +138,8 @@ def test_nib_roi_bad_slices(capsys, args, errmsg): def test_entrypoint(capsys): # Check that we handle missing args as expected with mock.patch('sys.argv', ['nib-roi', '--help']): - try: + with pytest.raises(SystemExit): main() - except SystemExit: - pass - else: - pytest.fail('argparse exits on --help. If changing to another parser, update test.') captured = capsys.readouterr() assert captured.out.startswith('usage: nib-roi') From 35124b7f45604d54fe90753c1f7119bddf9eb997 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:10:42 +0200 Subject: [PATCH 179/203] STY: Apply ruff/pygrep-hooks rule PGH004 PGH004 Do not add spaces between `noqa` and its colon --- nibabel/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/conftest.py b/nibabel/conftest.py index a4f8b6de90..b16a832f28 100644 --- a/nibabel/conftest.py +++ b/nibabel/conftest.py @@ -5,7 +5,7 @@ # Ignore warning requesting help with nicom with pytest.warns(UserWarning): - import nibabel.nicom # noqa :401 + import nibabel.nicom # noqa: F401 @pytest.fixture(scope='session', autouse=True) From f31bf2b95f975e5e03e5e50f88b0c65225f733e0 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:12:51 +0200 Subject: [PATCH 180/203] STY: Apply ruff/pygrep-hooks rule PGH004 PGH004 Use specific rule codes when using `noqa` --- nibabel/benchmarks/bench_array_to_file.py | 4 ++-- nibabel/benchmarks/bench_finite_range.py | 2 +- nibabel/xmlutils.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/benchmarks/bench_array_to_file.py b/nibabel/benchmarks/bench_array_to_file.py index c2bab7e95e..2af8b5677f 100644 --- a/nibabel/benchmarks/bench_array_to_file.py +++ b/nibabel/benchmarks/bench_array_to_file.py @@ -11,12 +11,12 @@ """ import sys -from io import BytesIO # NOQA +from io import BytesIO # noqa: F401 import numpy as np from numpy.testing import measure -from nibabel.volumeutils import array_to_file # NOQA +from nibabel.volumeutils import array_to_file # noqa: F401 from .butils import print_git_title diff --git a/nibabel/benchmarks/bench_finite_range.py b/nibabel/benchmarks/bench_finite_range.py index edd839ce61..957446884c 100644 --- a/nibabel/benchmarks/bench_finite_range.py +++ b/nibabel/benchmarks/bench_finite_range.py @@ -15,7 +15,7 @@ import numpy as np from numpy.testing import measure -from nibabel.volumeutils import finite_range # NOQA +from nibabel.volumeutils import finite_range # noqa: F401 from .butils import print_git_title diff --git a/nibabel/xmlutils.py b/nibabel/xmlutils.py index 5d079e1172..12fd30f225 100644 --- a/nibabel/xmlutils.py +++ b/nibabel/xmlutils.py @@ -9,7 +9,7 @@ """Thin layer around xml.etree.ElementTree, to abstract nibabel xml support""" from io import BytesIO -from xml.etree.ElementTree import Element, SubElement, tostring # noqa +from xml.etree.ElementTree import Element, SubElement, tostring # noqa: F401 from xml.parsers.expat import ParserCreate from .filebasedimages import FileBasedHeader From aea7fe7be420deaa8c93ea8d7711c7a77214eb92 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 22:53:46 +0200 Subject: [PATCH 181/203] STY: Enforce ruff/pygrep-hooks rules (PGH) --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 748dc12ce1..e865cd0097 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -124,6 +124,7 @@ select = [ "FURB", "I", "PERF", + "PGH", "PIE", "PLE", "PT", From 50e9231c2257e6bd6773f241e54815a6608d514b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 04:47:31 +0000 Subject: [PATCH 182/203] Bump deadsnakes/action from 3.1.0 to 3.2.0 Bumps [deadsnakes/action](https://github.com/deadsnakes/action) from 3.1.0 to 3.2.0. - [Release notes](https://github.com/deadsnakes/action/releases) - [Commits](https://github.com/deadsnakes/action/compare/v3.1.0...v3.2.0) --- updated-dependencies: - dependency-name: deadsnakes/action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 05718dc1ff..9e5ddd5162 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -176,7 +176,7 @@ jobs: allow-prereleases: true - name: Set up Python ${{ matrix.python-version }} if: endsWith(matrix.python-version, '-dev') - uses: deadsnakes/action@v3.1.0 + uses: deadsnakes/action@v3.2.0 with: python-version: ${{ matrix.python-version }} nogil: true From afa13e717b8ef355224f2d45dfa834f5df481bf1 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 15:49:34 +0200 Subject: [PATCH 183/203] STY: Apply ruff rule RUF100 RUF100 Unused `noqa` directive --- nibabel/casting.py | 8 ++++---- nibabel/info.py | 2 +- nibabel/parrec.py | 2 -- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/nibabel/casting.py b/nibabel/casting.py index 042a2f415d..b279325477 100644 --- a/nibabel/casting.py +++ b/nibabel/casting.py @@ -51,11 +51,11 @@ class CastingError(Exception): getattr(np, dtype) for dtype in ( 'int8', 'byte', 'int16', 'short', 'int32', 'intc', 'int_', 'int64', 'longlong', - 'uint8', 'ubyte', 'uint16', 'ushort', 'uint32', 'uintc', 'uint', 'uint64', 'ulonglong', # noqa: E501 - 'float16', 'half', 'float32', 'single', 'float64', 'double', 'float96', 'float128', 'longdouble', # noqa: E501 - 'complex64', 'csingle', 'complex128', 'cdouble', 'complex192', 'complex256', 'clongdouble', # noqa: E501 + 'uint8', 'ubyte', 'uint16', 'ushort', 'uint32', 'uintc', 'uint', 'uint64', 'ulonglong', + 'float16', 'half', 'float32', 'single', 'float64', 'double', 'float96', 'float128', 'longdouble', + 'complex64', 'csingle', 'complex128', 'cdouble', 'complex192', 'complex256', 'clongdouble', # other names of the built-in scalar types - 'int_', 'float_', 'complex_', 'bytes_', 'str_', 'bool_', 'datetime64', 'timedelta64', # noqa: E501 + 'int_', 'float_', 'complex_', 'bytes_', 'str_', 'bool_', 'datetime64', 'timedelta64', # other 'object_', 'void', ) diff --git a/nibabel/info.py b/nibabel/info.py index d7873de211..87727cab13 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -108,4 +108,4 @@ .. _Digital Object Identifier: https://en.wikipedia.org/wiki/Digital_object_identifier .. _zenodo: https://zenodo.org -""" # noqa: E501 +""" diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 8b3ffb34a2..0a2005835f 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -6,8 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -# Disable line length checking for PAR fragments in module docstring -# noqa: E501 """Read images in PAR/REC format This is yet another MRI image format generated by Philips scanners. It is an From 5ea47a7cc1258fe5fc7c2b9cdc0ece9bf8baeaec Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 22 Sep 2024 22:57:07 +0200 Subject: [PATCH 184/203] STY: Encorce ruff rules (RUF) --- pyproject.toml | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e865cd0097..9b5815e332 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -131,18 +131,19 @@ select = [ "PYI", "Q", "RSE", + "RUF", "TCH", "UP", ] ignore = [ - "B006", # TODO: enable - "B008", # TODO: enable + "B006", # TODO: enable + "B008", # TODO: enable "B007", "B011", - "B017", # TODO: enable + "B017", # TODO: enable "B018", "B020", - "B023", # TODO: enable + "B023", # TODO: enable "B028", "B904", "C401", @@ -157,6 +158,10 @@ ignore = [ "PT017", "PT018", "PYI024", + "RUF005", + "RUF012", # TODO: enable + "RUF015", + "RUF017", # TODO: enable # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From e52c4c8d338ec588d633ed2cd99a9bc62e14ba93 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:09:49 +0200 Subject: [PATCH 185/203] STY: Disable deprecated ruff rules --- pyproject.toml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e865cd0097..23827a9967 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -135,14 +135,14 @@ select = [ "UP", ] ignore = [ - "B006", # TODO: enable - "B008", # TODO: enable + "B006", # TODO: enable + "B008", # TODO: enable "B007", "B011", - "B017", # TODO: enable + "B017", # TODO: enable "B018", "B020", - "B023", # TODO: enable + "B023", # TODO: enable "B028", "B904", "C401", @@ -150,13 +150,16 @@ ignore = [ "C416", "PERF203", "PIE790", - "PT004", + "PT004", # deprecated + "PT005", # deprecated "PT007", "PT011", "PT012", "PT017", "PT018", "PYI024", + "UP027", # deprecated + "UP038", # https://github.com/astral-sh/ruff/issues/7871 # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From 9f28bc8b0c3e70665a7abdd4fa0fd20ee772acfe Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:47:08 +0200 Subject: [PATCH 186/203] STY: Apply ruff/pyupgrade preview rule UP031 UP031 Use format specifiers instead of percent format Co-authored-by: Chris Markiewicz --- nibabel/analyze.py | 6 +++--- nibabel/cifti2/cifti2_axes.py | 5 ++--- nibabel/cmdline/dicomfs.py | 12 ++++++------ nibabel/cmdline/diff.py | 4 ++-- nibabel/cmdline/ls.py | 10 +++++----- nibabel/dft.py | 2 +- nibabel/ecat.py | 4 ++-- nibabel/fileslice.py | 2 +- nibabel/freesurfer/io.py | 8 ++++---- nibabel/freesurfer/mghformat.py | 2 +- nibabel/gifti/gifti.py | 2 +- nibabel/gifti/parse_gifti_fast.py | 4 ++-- nibabel/nicom/csareader.py | 2 +- nibabel/nicom/dicomreaders.py | 4 ++-- nibabel/nifti1.py | 10 +++++----- nibabel/orientations.py | 2 +- nibabel/spatialimages.py | 2 +- nibabel/tests/test_funcs.py | 2 +- 18 files changed, 41 insertions(+), 42 deletions(-) diff --git a/nibabel/analyze.py b/nibabel/analyze.py index 34597319d6..d02363c792 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -699,7 +699,7 @@ def set_zooms(self, zooms): ndim = dims[0] zooms = np.asarray(zooms) if len(zooms) != ndim: - raise HeaderDataError('Expecting %d zoom values for ndim %d' % (ndim, ndim)) + raise HeaderDataError(f'Expecting {ndim} zoom values for ndim {ndim}') if np.any(zooms < 0): raise HeaderDataError('zooms must be positive') pixdims = hdr['pixdim'] @@ -818,11 +818,11 @@ def _chk_datatype(klass, hdr, fix=False): dtype = klass._data_type_codes.dtype[code] except KeyError: rep.problem_level = 40 - rep.problem_msg = 'data code %d not recognized' % code + rep.problem_msg = f'data code {code} not recognized' else: if dtype.itemsize == 0: rep.problem_level = 40 - rep.problem_msg = 'data code %d not supported' % code + rep.problem_msg = f'data code {code} not supported' else: return hdr, rep if fix: diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index af7c63beaa..32914be1b6 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -373,7 +373,7 @@ def from_mask(cls, mask, name='other', affine=None): else: raise ValueError( 'Mask should be either 1-dimensional (for surfaces) or ' - '3-dimensional (for volumes), not %i-dimensional' % mask.ndim + f'3-dimensional (for volumes), not {mask.ndim}-dimensional' ) @classmethod @@ -1519,7 +1519,6 @@ def get_element(self, index): index = self.size + index if index >= self.size or index < 0: raise IndexError( - 'index %i is out of range for SeriesAxis with size %i' - % (original_index, self.size) + f'index {original_index} is out of range for SeriesAxis with size {self.size}' ) return self.start + self.step * index diff --git a/nibabel/cmdline/dicomfs.py b/nibabel/cmdline/dicomfs.py index afd994b151..07aa51e2d3 100644 --- a/nibabel/cmdline/dicomfs.py +++ b/nibabel/cmdline/dicomfs.py @@ -51,7 +51,7 @@ def __init__(self, fno): self.direct_io = False def __str__(self): - return 'FileHandle(%d)' % self.fno + return f'FileHandle({self.fno})' class DICOMFS(fuse.Fuse): @@ -85,11 +85,11 @@ def get_paths(self): series_info += f'UID: {series.uid}\n' series_info += f'number: {series.number}\n' series_info += f'description: {series.description}\n' - series_info += 'rows: %d\n' % series.rows - series_info += 'columns: %d\n' % series.columns - series_info += 'bits allocated: %d\n' % series.bits_allocated - series_info += 'bits stored: %d\n' % series.bits_stored - series_info += 'storage instances: %d\n' % len(series.storage_instances) + series_info += f'rows: {series.rows}\n' + series_info += f'columns: {series.columns}\n' + series_info += f'bits allocated: {series.bits_allocated}\n' + series_info += f'bits stored: {series.bits_stored}\n' + series_info += f'storage instances: {len(series.storage_instances)}\n' d[series.number] = { 'INFO': series_info.encode('ascii', 'replace'), f'{series.number}.nii': (series.nifti_size, series.as_nifti), diff --git a/nibabel/cmdline/diff.py b/nibabel/cmdline/diff.py index 36760f7ebb..55f827e973 100755 --- a/nibabel/cmdline/diff.py +++ b/nibabel/cmdline/diff.py @@ -266,7 +266,7 @@ def get_data_diff(files, max_abs=0, max_rel=0, dtype=np.float64): diffs1.append({'CMP': 'incompat'}) if any(diffs1): - diffs['DATA(diff %d:)' % (i + 1)] = diffs1 + diffs[f'DATA(diff {i + 1}:)'] = diffs1 return diffs @@ -293,7 +293,7 @@ def display_diff(files, diff): output += field_width.format('Field/File') for i, f in enumerate(files, 1): - output += '%d:%s' % (i, filename_width.format(os.path.basename(f))) + output += f'{i}:{filename_width.format(os.path.basename(f))}' output += '\n' diff --git a/nibabel/cmdline/ls.py b/nibabel/cmdline/ls.py index f79c27f0c5..72fb227687 100755 --- a/nibabel/cmdline/ls.py +++ b/nibabel/cmdline/ls.py @@ -73,7 +73,7 @@ def get_opt_parser(): action='store_true', dest='all_counts', default=False, - help='Output all counts, even if number of unique values > %d' % MAX_UNIQUE, + help=f'Output all counts, even if number of unique values > {MAX_UNIQUE}', ), Option( '-z', @@ -117,7 +117,7 @@ def proc_file(f, opts): row += [''] if hasattr(h, 'extensions') and len(h.extensions): - row += ['@l#exts: %d' % len(h.extensions)] + row += [f'@l#exts: {len(h.extensions)}'] else: row += [''] @@ -166,16 +166,16 @@ def proc_file(f, opts): d = d.reshape(-1) if opts.stats: # just # of elements - row += ['@l[%d]' % np.prod(d.shape)] + row += [f'@l[{np.prod(d.shape)}]'] # stats row += [f'@l[{np.min(d):.2g}, {np.max(d):.2g}]' if len(d) else '-'] if opts.counts: items, inv = np.unique(d, return_inverse=True) if len(items) > 1000 and not opts.all_counts: - counts = _err('%d uniques. Use --all-counts' % len(items)) + counts = _err(f'{len(items)} uniques. Use --all-counts') else: freq = np.bincount(inv) - counts = ' '.join('%g:%d' % (i, f) for i, f in zip(items, freq)) + counts = ' '.join(f'{i:g}:{f}' for i, f in zip(items, freq)) row += ['@l' + counts] except OSError as e: verbose(2, f'Failed to obtain stats/counts -- {e}') diff --git a/nibabel/dft.py b/nibabel/dft.py index e63c9c4796..23108895b2 100644 --- a/nibabel/dft.py +++ b/nibabel/dft.py @@ -162,7 +162,7 @@ def as_nifti(self): for i, si in enumerate(self.storage_instances): if i + 1 != si.instance_number: raise InstanceStackError(self, i, si) - logger.info('reading %d/%d' % (i + 1, len(self.storage_instances))) + logger.info(f'reading {i + 1}/{len(self.storage_instances)}') d = self.storage_instances[i].dicom() data[i, :, :] = d.pixel_array diff --git a/nibabel/ecat.py b/nibabel/ecat.py index c4b55624f9..f634bcd8a6 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -309,14 +309,14 @@ def get_patient_orient(self): """ code = self._structarr['patient_orientation'].item() if code not in self._patient_orient_codes: - raise KeyError('Ecat Orientation CODE %d not recognized' % code) + raise KeyError(f'Ecat Orientation CODE {code} not recognized') return self._patient_orient_codes[code] def get_filetype(self): """Type of ECAT Matrix File from code stored in header""" code = self._structarr['file_type'].item() if code not in self._ft_codes: - raise KeyError('Ecat Filetype CODE %d not recognized' % code) + raise KeyError(f'Ecat Filetype CODE {code} not recognized') return self._ft_codes[code] @classmethod diff --git a/nibabel/fileslice.py b/nibabel/fileslice.py index 816f1cdaf6..91ed1f70a1 100644 --- a/nibabel/fileslice.py +++ b/nibabel/fileslice.py @@ -127,7 +127,7 @@ def canonical_slicers(sliceobj, shape, check_inds=True): if slicer < 0: slicer = dim_len + slicer elif check_inds and slicer >= dim_len: - raise ValueError('Integer index %d to large' % slicer) + raise ValueError(f'Integer index {slicer} too large') can_slicers.append(slicer) # Fill out any missing dimensions if n_real < n_dim: diff --git a/nibabel/freesurfer/io.py b/nibabel/freesurfer/io.py index 74bc05fc31..31745df720 100644 --- a/nibabel/freesurfer/io.py +++ b/nibabel/freesurfer/io.py @@ -427,7 +427,7 @@ def _read_annot_ctab_old_format(fobj, n_entries): for i in range(n_entries): # structure name length + string name_length = np.fromfile(fobj, dt, 1)[0] - name = np.fromfile(fobj, '|S%d' % name_length, 1)[0] + name = np.fromfile(fobj, f'|S{name_length}', 1)[0] names.append(name) # read RGBT for this entry ctab[i, :4] = np.fromfile(fobj, dt, 4) @@ -471,7 +471,7 @@ def _read_annot_ctab_new_format(fobj, ctab_version): ctab = np.zeros((max_index, 5), dt) # orig_tab string length + string length = np.fromfile(fobj, dt, 1)[0] - np.fromfile(fobj, '|S%d' % length, 1)[0] # Orig table path + np.fromfile(fobj, f'|S{length}', 1)[0] # Orig table path # number of LUT entries present in the file entries_to_read = np.fromfile(fobj, dt, 1)[0] names = list() @@ -480,7 +480,7 @@ def _read_annot_ctab_new_format(fobj, ctab_version): idx = np.fromfile(fobj, dt, 1)[0] # structure name length + string name_length = np.fromfile(fobj, dt, 1)[0] - name = np.fromfile(fobj, '|S%d' % name_length, 1)[0] + name = np.fromfile(fobj, f'|S{name_length}', 1)[0] names.append(name) # RGBT ctab[idx, :4] = np.fromfile(fobj, dt, 4) @@ -525,7 +525,7 @@ def write(num, dtype=dt): def write_string(s): s = (s if isinstance(s, bytes) else s.encode()) + b'\x00' write(len(s)) - write(s, dtype='|S%d' % len(s)) + write(s, dtype=f'|S{len(s)}') # Generate annotation values for each ctab entry if fill_ctab: diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 6efa67ffa8..0adcb88e2c 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -281,7 +281,7 @@ def set_zooms(self, zooms): zooms = np.asarray(zooms) ndims = self._ndims() if len(zooms) > ndims: - raise HeaderDataError('Expecting %d zoom values' % ndims) + raise HeaderDataError(f'Expecting {ndims} zoom values') if np.any(zooms[:3] <= 0): raise HeaderDataError( f'Spatial (first three) zooms must be positive; got {tuple(zooms[:3])}' diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index c983a14dfd..76fcc4a451 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -522,7 +522,7 @@ def _to_xml_element(self): }, ) for di, dn in enumerate(self.dims): - data_array.attrib['Dim%d' % di] = str(dn) + data_array.attrib[f'Dim{di}'] = str(dn) if self.meta is not None: data_array.append(self.meta._to_xml_element()) diff --git a/nibabel/gifti/parse_gifti_fast.py b/nibabel/gifti/parse_gifti_fast.py index ccd608324a..5bcd8c8c32 100644 --- a/nibabel/gifti/parse_gifti_fast.py +++ b/nibabel/gifti/parse_gifti_fast.py @@ -284,8 +284,8 @@ def EndElementHandler(self, name): if name == 'GIFTI': if hasattr(self, 'expected_numDA') and self.expected_numDA != self.img.numDA: warnings.warn( - 'Actual # of data arrays does not match ' - '# expected: %d != %d.' % (self.expected_numDA, self.img.numDA) + 'Actual # of data arrays does not match # expected: ' + f'{self.expected_numDA} != {self.img.numDA}.' ) # remove last element of the list self.fsm_state.pop() diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index df379e0be8..b98dae7403 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -179,7 +179,7 @@ def get_vector(csa_dict, tag_name, n): if len(items) == 0: return None if len(items) != n: - raise ValueError('Expecting %d vector' % n) + raise ValueError(f'Expecting {n} vector') return np.array(items) diff --git a/nibabel/nicom/dicomreaders.py b/nibabel/nicom/dicomreaders.py index 5892bb8db2..07362ee47d 100644 --- a/nibabel/nicom/dicomreaders.py +++ b/nibabel/nicom/dicomreaders.py @@ -131,7 +131,7 @@ def slices_to_series(wrappers): break else: # no match in current volume lists volume_lists.append([dw]) - print('We appear to have %d Series' % len(volume_lists)) + print(f'We appear to have {len(volume_lists)} Series') # second pass out_vol_lists = [] for vol_list in volume_lists: @@ -143,7 +143,7 @@ def slices_to_series(wrappers): out_vol_lists += _third_pass(vol_list) continue out_vol_lists.append(vol_list) - print('We have %d volumes after second pass' % len(out_vol_lists)) + print(f'We have {len(out_vol_lists)} volumes after second pass') # final pass check for vol_list in out_vol_lists: zs = [s.slice_indicator for s in vol_list] diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 180f67cca4..b9c78c81bc 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -1559,7 +1559,7 @@ def get_intent(self, code_repr='label'): else: raise TypeError('repr can be "label" or "code"') n_params = len(recoder.parameters[code]) if known_intent else 0 - params = (float(hdr['intent_p%d' % (i + 1)]) for i in range(n_params)) + params = (float(hdr[f'intent_p{i}']) for i in range(1, n_params + 1)) name = hdr['intent_name'].item().decode('latin-1') return label, tuple(params), name @@ -1632,8 +1632,8 @@ def set_intent(self, code, params=(), name='', allow_unknown=False): hdr['intent_name'] = name all_params = [0] * 3 all_params[: len(params)] = params[:] - for i, param in enumerate(all_params): - hdr['intent_p%d' % (i + 1)] = param + for i, param in enumerate(all_params, start=1): + hdr[f'intent_p{i}'] = param def get_slice_duration(self): """Get slice duration @@ -1911,7 +1911,7 @@ def _chk_offset(hdr, fix=False): return hdr, rep if magic == hdr.single_magic and offset < hdr.single_vox_offset: rep.problem_level = 40 - rep.problem_msg = 'vox offset %d too low for single file nifti1' % offset + rep.problem_msg = f'vox offset {int(offset)} too low for single file nifti1' if fix: hdr['vox_offset'] = hdr.single_vox_offset rep.fix_msg = f'setting to minimum value of {hdr.single_vox_offset}' @@ -1943,7 +1943,7 @@ def _chk_xform_code(klass, code_type, hdr, fix): if code in recoder.value_set(): return hdr, rep rep.problem_level = 30 - rep.problem_msg = '%s %d not valid' % (code_type, code) + rep.problem_msg = f'{code_type} {code} not valid' if fix: hdr[code_type] = 0 rep.fix_msg = 'setting to 0' diff --git a/nibabel/orientations.py b/nibabel/orientations.py index 7265bf56f3..12e414def9 100644 --- a/nibabel/orientations.py +++ b/nibabel/orientations.py @@ -124,7 +124,7 @@ def ornt_transform(start_ornt, end_ornt): result[start_in_idx, :] = [end_in_idx, flip] break else: - raise ValueError('Unable to find out axis %d in start_ornt' % end_out_idx) + raise ValueError(f'Unable to find out axis {end_out_idx} in start_ornt') return result diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index ce8ee3c6e6..19677c1a7d 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -267,7 +267,7 @@ def set_zooms(self, zooms: Sequence[float]) -> None: shape = self.get_data_shape() ndim = len(shape) if len(zooms) != ndim: - raise HeaderDataError('Expecting %d zoom values for ndim %d' % (ndim, ndim)) + raise HeaderDataError(f'Expecting {ndim} zoom values for ndim {ndim}') if any(z < 0 for z in zooms): raise HeaderDataError('zooms must be positive') self._zooms = zooms diff --git a/nibabel/tests/test_funcs.py b/nibabel/tests/test_funcs.py index 5e59bc63b6..8666406168 100644 --- a/nibabel/tests/test_funcs.py +++ b/nibabel/tests/test_funcs.py @@ -23,7 +23,7 @@ def _as_fname(img): global _counter - fname = 'img%3d.nii' % _counter + fname = f'img{_counter:3d}.nii' _counter = _counter + 1 save(img, fname) return fname From 95cc728dd0c49245373d928f73c263a7ca7f7813 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 20:03:23 +0200 Subject: [PATCH 187/203] =?UTF-8?q?MNT:=20Python=203=20string=20formatting?= =?UTF-8?q?:=20%i=20=E2=86=92=20%d?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Chris Markiewicz --- nibabel/freesurfer/io.py | 2 +- nibabel/gifti/util.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/freesurfer/io.py b/nibabel/freesurfer/io.py index 31745df720..5b3f6a3664 100644 --- a/nibabel/freesurfer/io.py +++ b/nibabel/freesurfer/io.py @@ -465,7 +465,7 @@ def _read_annot_ctab_new_format(fobj, ctab_version): dt = _ANNOT_DT # This code works with a file version == 2, nothing else if ctab_version != 2: - raise Exception('Unrecognised .annot file version (%i)', ctab_version) + raise Exception(f'Unrecognised .annot file version ({ctab_version})') # maximum LUT index present in the file max_index = np.fromfile(fobj, dt, 1)[0] ctab = np.zeros((max_index, 5), dt) diff --git a/nibabel/gifti/util.py b/nibabel/gifti/util.py index 9393292013..791f133022 100644 --- a/nibabel/gifti/util.py +++ b/nibabel/gifti/util.py @@ -10,7 +10,7 @@ from ..volumeutils import Recoder # Translate dtype.kind char codes to XML text output strings -KIND2FMT = {'i': '%i', 'u': '%i', 'f': '%10.6f', 'c': '%10.6f', 'V': ''} +KIND2FMT = {'i': '%d', 'u': '%d', 'f': '%10.6f', 'c': '%10.6f', 'V': ''} array_index_order_codes = Recoder( ( From 5daffcce1ed1f6c399d9ed057a32c038a0f87a25 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:49:36 +0200 Subject: [PATCH 188/203] STY: Apply ruff/refurb preview rule FURB145 FURB145 Prefer `copy` method over slicing --- nibabel/tests/test_nifti1.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 8eae0410e9..f0029681b8 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -578,12 +578,12 @@ def test_slice_times(self): with pytest.raises(HeaderDataError): # all None hdr.set_slice_times((None,) * len(times)) - n_mid_times = times[:] + n_mid_times = times.copy() n_mid_times[3] = None with pytest.raises(HeaderDataError): # None in middle hdr.set_slice_times(n_mid_times) - funny_times = times[:] + funny_times = times.copy() funny_times[3] = 0.05 with pytest.raises(HeaderDataError): # can't get single slice duration From 4810cd78bd7d21b9e9f8754bb0a7bd4a86235c49 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:52:29 +0200 Subject: [PATCH 189/203] STY: Apply ruff/refurb preview rule FURB148 FURB148 `enumerate` index is unused, use `for x in y` instead --- nibabel/cifti2/tests/test_cifti2io_header.py | 2 +- nibabel/tests/test_round_trip.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 1c37cfe0e7..ecdf0c69a7 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -72,7 +72,7 @@ def test_read_and_proxies(): @needs_nibabel_data('nitest-cifti2') def test_version(): - for i, dat in enumerate(datafiles): + for dat in datafiles: img = nib.load(dat) assert Version(img.header.version) == Version('2') diff --git a/nibabel/tests/test_round_trip.py b/nibabel/tests/test_round_trip.py index 07783fe550..6daf960aa4 100644 --- a/nibabel/tests/test_round_trip.py +++ b/nibabel/tests/test_round_trip.py @@ -108,15 +108,15 @@ def test_round_trip(): iuint_types = [t for t in iuint_types if t in nifti_supported] f_types = [np.float32, np.float64] # Expanding standard deviations - for i, sd_10 in enumerate(sd_10s): + for sd_10 in sd_10s: sd = 10.0**sd_10 V_in = rng.normal(0, sd, size=(N, 1)) - for j, in_type in enumerate(f_types): - for k, out_type in enumerate(iuint_types): + for in_type in f_types: + for out_type in iuint_types: check_arr(sd_10, V_in, in_type, out_type, scaling_type) # Spread integers across range - for i, sd in enumerate(np.linspace(0.05, 0.5, 5)): - for j, in_type in enumerate(iuint_types): + for sd in np.linspace(0.05, 0.5, 5): + for in_type in iuint_types: info = np.iinfo(in_type) mn, mx = info.min, info.max type_range = mx - mn @@ -124,7 +124,7 @@ def test_round_trip(): # float(sd) because type_range can be type 'long' width = type_range * float(sd) V_in = rng.normal(center, width, size=(N, 1)) - for k, out_type in enumerate(iuint_types): + for out_type in iuint_types: check_arr(sd, V_in, in_type, out_type, scaling_type) From 02b7b0e308b594f730cd139448fbc3e9a0fc4b47 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:55:39 +0200 Subject: [PATCH 190/203] STY: Apply ruff/refurb preview rule FURB157 FURB157 Verbose expression in `Decimal` constructor --- nibabel/nicom/tests/test_dicomwrappers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index db3f667518..aefb35e892 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -991,8 +991,8 @@ def test_scale_data(self): assert_array_equal(data * 3 - 2, MFW(fake_mf)._scale_data(data)) # Decimals are OK for frame in frames: - frame.PixelValueTransformationSequence[0].RescaleSlope = Decimal('3') - frame.PixelValueTransformationSequence[0].RescaleIntercept = Decimal('-2') + frame.PixelValueTransformationSequence[0].RescaleSlope = Decimal(3) + frame.PixelValueTransformationSequence[0].RescaleIntercept = Decimal(-2) assert_array_equal(data * 3 - 2, MFW(fake_mf)._scale_data(data)) # A per-frame RWV scaling takes precedence over per-frame PixelValueTransformation for frame in frames: From 8c2a501de8c7a1d278634f00320acbfb22355799 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:56:25 +0200 Subject: [PATCH 191/203] STY: Apply ruff/refurb preview rule FURB192 FURB192 Prefer `min` over `sorted()` to compute the minimum value in a sequence --- nibabel/nicom/dicomwrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 009880e496..64b2b4a96d 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -565,7 +565,7 @@ def applies(self, dcm_wrp) -> bool: warnings.warn( 'A multi-stack file was passed without an explicit filter, just using lowest StackID' ) - self._selected = sorted(stack_ids)[0] + self._selected = min(stack_ids) return True return False From 73bae7e98c4d86492f266adfad38febf41107a4a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:59:16 +0200 Subject: [PATCH 192/203] STY: Apply ruff/flake8-comprehensions preview rule C409 C409 Unnecessary list comprehension passed to `tuple()` (rewrite as a generator) --- nibabel/streamlines/tests/test_array_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index a06b2c45d9..96e66b44c5 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -79,7 +79,7 @@ def test_creating_arraysequence_from_list(self): # List of ndarrays. N = 5 for ndim in range(1, N + 1): - common_shape = tuple([SEQ_DATA['rng'].randint(1, 10) for _ in range(ndim - 1)]) + common_shape = tuple(SEQ_DATA['rng'].randint(1, 10) for _ in range(ndim - 1)) data = generate_data(nb_arrays=5, common_shape=common_shape, rng=SEQ_DATA['rng']) check_arr_seq(ArraySequence(data), data) From b33bcde28337707fcd71dbddf69d8d1bc52a75ca Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Oct 2024 19:00:30 +0200 Subject: [PATCH 193/203] STY: Apply ruff/flake8-comprehensions preview rule C419 C419 Unnecessary list comprehension --- nibabel/orientations.py | 2 +- nibabel/tests/test_volumeutils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/orientations.py b/nibabel/orientations.py index 12e414def9..b620fff02b 100644 --- a/nibabel/orientations.py +++ b/nibabel/orientations.py @@ -322,7 +322,7 @@ def axcodes2ornt(axcodes, labels=None): [ 2., 1.]]) """ labels = list(zip('LPI', 'RAS')) if labels is None else labels - allowed_labels = sum([list(L) for L in labels], []) + [None] + allowed_labels = sum((list(L) for L in labels), []) + [None] if len(allowed_labels) != len(set(allowed_labels)): raise ValueError(f'Duplicate labels in {allowed_labels}') if not set(axcodes).issubset(allowed_labels): diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 9d321f07e4..1bd44cbd0a 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -607,7 +607,7 @@ def test_a2f_nanpos(): def test_a2f_bad_scaling(): # Test that pathological scalers raise an error - NUMERICAL_TYPES = sum([sctypes[key] for key in ['int', 'uint', 'float', 'complex']], []) + NUMERICAL_TYPES = sum((sctypes[key] for key in ['int', 'uint', 'float', 'complex']), []) for in_type, out_type, slope, inter in itertools.product( NUMERICAL_TYPES, NUMERICAL_TYPES, From b8487cec305898d353c0fe10a814bc3bb87d6f80 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 2 Oct 2024 15:04:06 +0200 Subject: [PATCH 194/203] MNT: Fix misspellings found by codespell --- nibabel/tests/test_casting.py | 2 +- nibabel/tests/test_proxy_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_casting.py b/nibabel/tests/test_casting.py index d4cf81515a..c6c1ddb661 100644 --- a/nibabel/tests/test_casting.py +++ b/nibabel/tests/test_casting.py @@ -161,7 +161,7 @@ def test_floor_log2(): def test_able_int_type(): - # The integer type cabable of containing values + # The integer type capable of containing values for vals, exp_out in ( ([0, 1], np.uint8), ([0, 255], np.uint8), diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 421bc5bf47..ba0f784d59 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -25,7 +25,7 @@ * if you pass a header into the __init__, then modifying the original header will not affect the result of the array return. -These last are to allow the proxy to be re-used with different images. +These last are to allow the proxy to be reused with different images. """ import unittest From ec15839f8141745600e40ce1b737ba768d33d2fe Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 2 Oct 2024 19:05:00 +0200 Subject: [PATCH 195/203] MNT: better way to normalize sequences to lists and flatten Co-authored-by: Chris Markiewicz --- nibabel/orientations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/orientations.py b/nibabel/orientations.py index b620fff02b..f1cdd228be 100644 --- a/nibabel/orientations.py +++ b/nibabel/orientations.py @@ -322,7 +322,7 @@ def axcodes2ornt(axcodes, labels=None): [ 2., 1.]]) """ labels = list(zip('LPI', 'RAS')) if labels is None else labels - allowed_labels = sum((list(L) for L in labels), []) + [None] + allowed_labels = sum(map(list, labels), [None]) if len(allowed_labels) != len(set(allowed_labels)): raise ValueError(f'Duplicate labels in {allowed_labels}') if not set(axcodes).issubset(allowed_labels): From 7a733f6f54c9f382f28e468c1fab8d414b8fdae6 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 8 Oct 2024 10:36:30 -0400 Subject: [PATCH 196/203] DOC: Update changelog --- Changelog | 61 ++++++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 51 insertions(+), 10 deletions(-) diff --git a/Changelog b/Changelog index 24e89095f3..f72a6a8874 100644 --- a/Changelog +++ b/Changelog @@ -25,31 +25,72 @@ Eric Larson (EL), Demian Wassermann, Stephan Gerhard and Ross Markello (RM). References like "pr/298" refer to github pull request numbers. -Upcoming release (To be determined) -=================================== +5.3.0 (Tuesday 8 October 2024) +============================== + +This release primarily adds support for Python 3.13 and Numpy 2.0. + +NiBabel 6.0 will drop support for Numpy 1.x. New features ------------ +* Update NIfTI extension protocol to include ``.content : bytes``, ``.text : str`` and ``.json : dict`` + properties for accessing extension contents. Exceptions will be raised on ``.text`` and ``.json`` if + conversion fails. (pr/1336) (CM) Enhancements ------------ - * Ability to read data from many multiframe DICOM files that previously generated errors +* Ability to read data from many multiframe DICOM files that previously generated errors (pr/1340) + (Brendan Moloney, reviewed by CM) +* ``nib-nifti-dx`` now supports NIfTI-2 files with a ``--nifti2`` flag (pr/1323) (CM) +* Update :mod:`nibabel.streamlines.tractogram` to support ragged arrays. (pr/1291) + (Serge Koudoro, reviewed by CM) +* Filter numpy ``UserWarning`` on ``np.finfo(np.longdouble)``. This can occur on + Windows systems, but it's done in the context of checking for the problem that + is being warned against, so there's no need to be noisy. (pr/1310) + (Joshua Newton, reviewed by CM) +* Improve error message for for dicomwrapper errors in shape calculation (pr/1302) + (YOH, reviewed by CM) +* Support "flat" ASCII-encoded GIFTI DataArrays (pr/1298) (PM, reviewed by CM) Bug fixes --------- - * Fixed multiframe DICOM issue where data could be flipped along slice dimension relative to the - affine - * Fixed multiframe DICOM issue where ``image_position`` and the translation component in the - ``affine`` could be incorrect - -Documentation -------------- +* Fix location initialization/update in OrthoSlicer3D for permuted axes (pr/1319, pr/1350) + (Guillaume Becq, reviewed by CM) +* Fix DICOM scaling, making frame filtering explicit (pr/1342) (Brendan Moloney, reviewed by CM) +* Fixed multiframe DICOM issue where data could be flipped along slice dimension relative to the + affine (pr/1340) (Brendan Moloney, reviewed by CM) +* Fixed multiframe DICOM issue where ``image_position`` and the translation component in the + ``affine`` could be incorrect (pr/1340) (Brendan Moloney, reviewed by CM) Maintenance ----------- +* Numpy 2.0 compatibility and addressing deprecations in numpy API + (pr/1304, pr/1330, pr/1331, pr/1334, pr/1337) (Jon Haitz Legarreta Gorroño, CM) +* Python 3.13 compatibility (pr/1315) (Sandro from the Fedora Project, reviewed by CM) +* Testing on Python 3.13 with free-threading (pr/1339) (CM) +* Testing on ARM64 Mac OS runners (pr/1320) (CM) +* Proactively address deprecations in coming Python versions (pr/1329, pr/1332, pr/1333) + (Jon Haitz Legarreta Gorroño, reviewed by CM) +* Replace nose-era ``setup()`` and ``teardown()`` functions with pytest equivalents + (pr/1325) (Sandro from the Fedora Project, reviewed by Étienne Mollier and CM) +* Transitioned from blue/isort/flake8 to `ruff `__. (pr/1289) + (Dimitri Papadopoulos, reviewed by CM) +* Vetted and added various rules to the ruff configuration for auto-formatting and style + guide enforcement. (pr/1321, pr/1351, pr/1352, pr/1353, pr/1354, pr/1355, pr/1357, pr/1358, + pr/1359, pr/1360, pr/1361, pr/1362, pr/1363, pr/1364, pr/1368, pr/1369) + (Dimitri Papadopoulos, reviewed by CM) +* Fixing typos when found. (pr/1313, pr/1370) (MB, Dimitri Papadopoulos) +* Applied Repo-Review suggestions (Dimitri Papadopoulos, reviewed by CM) API changes and deprecations ---------------------------- +* Raise :class:`~nibabel.spatialimages.HeaderDataError` from + :func:`~nibabel.nifti1.Nifti1Header.set_qform` if the affine fails to decompose. + This would previously result in :class:`numpy.linalg.LinAlgError`. (pr/1227) (CM) +* The :func:`nibabel.onetime.auto_attr` module can be replaced by :func:`functools.cached_property` + in all supported versions of Python. This alias may be removed in future versions. (pr/1341) (CM) +* Removed the deprecated ``nisext`` (setuptools extensions) package. (pr/1290) (CM, reviewed by MB) 5.2.1 (Monday 26 February 2024) From 607b5cad30119defc3e005c8f25cfc2bb2f505cb Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 8 Oct 2024 10:38:46 -0400 Subject: [PATCH 197/203] DOC: Update Zenodo contributors --- .zenodo.json | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index 553aba0548..250611d54d 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -25,6 +25,11 @@ "name": "Cipollini, Ben", "orcid": "0000-0002-7782-0790" }, + { + "affiliation": "CEA", + "name": "Papadopoulos Orfanos, Dimitri", + "orcid": "0000-0002-1242-8990" + }, { "name": "McCarthy, Paul" }, @@ -78,13 +83,11 @@ "orcid": "0000-0001-7159-1387" }, { - "name": "Wang, Hao-Ting", - "orcid": "0000-0003-4078-2038" + "name": "Moloney, Brendan" }, { - "affiliation": "CEA", - "name": "Papadopoulos Orfanos, Dimitri", - "orcid": "0000-0002-1242-8990" + "name": "Wang, Hao-Ting", + "orcid": "0000-0003-4078-2038" }, { "affiliation": "Harvard University - Psychology", @@ -123,9 +126,6 @@ { "name": "S\u00f3lon, Anibal" }, - { - "name": "Moloney, Brendan" - }, { "name": "Morency, F\u00e9lix C." }, @@ -177,6 +177,11 @@ { "name": "Van, Andrew" }, + { + "affiliation": "Brigham and Women's Hospital, Mass General Brigham/Harvard Medical School", + "name": "Legarreta, Jon Haitz", + "orcid": "0000-0002-9661-1396" + }, { "affiliation": "Google", "name": "Gorgolewski, Krzysztof J.", @@ -203,6 +208,9 @@ { "name": "Baker, Eric M." }, + { + "name": "Koudoro, Serge" + }, { "name": "Hayashi, Soichi" }, @@ -220,14 +228,14 @@ "name": "Esteban, Oscar", "orcid": "0000-0001-8435-6191" }, - { - "name": "Koudoro, Serge" - }, { "affiliation": "University College London", "name": "P\u00e9rez-Garc\u00eda, Fernando", "orcid": "0000-0001-9090-3024" }, + { + "name": "Becq, Guillaume" + }, { "name": "Dock\u00e8s, J\u00e9r\u00f4me" }, @@ -270,9 +278,9 @@ "orcid": "0000-0003-1076-5122" }, { - "affiliation": "Brigham and Women's Hospital, Mass General Brigham/Harvard Medical School", - "name": "Legarreta, Jon Haitz", - "orcid": "0000-0002-9661-1396" + "affiliation": "Polytechnique Montr\u00e9al, Montr\u00e9al, CA", + "name": "Newton, Joshua", + "orcid": "0009-0005-6963-3812" }, { "name": "Hahn, Kevin S." @@ -285,6 +293,9 @@ { "name": "Hinds, Oliver P." }, + { + "name": "Sandro" + }, { "name": "Fauber, Bennet" }, @@ -391,11 +402,6 @@ }, { "name": "freec84" - }, - { - "affiliation": "Polytechnique Montréal, Montréal, CA", - "name": "Newton, Joshua", - "orcid": "0009-0005-6963-3812" } ], "keywords": [ From 9bdbc42217321d78578c809b83b38f18102dea93 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 8 Oct 2024 10:44:29 -0400 Subject: [PATCH 198/203] DOC: Update mailmap and contributor list --- .mailmap | 1 + doc/source/index.rst | 3 +++ 2 files changed, 4 insertions(+) diff --git a/.mailmap b/.mailmap index 7b5dfa0d43..43932c865b 100644 --- a/.mailmap +++ b/.mailmap @@ -75,6 +75,7 @@ Oliver P. Hinds Or Duek Oscar Esteban Paul McCarthy +Paul McCarthy Reinder Vos de Wael Roberto Guidotti Roberto Guidotti diff --git a/doc/source/index.rst b/doc/source/index.rst index 72c731d25f..677e81b331 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -130,6 +130,9 @@ contributed code and discussion (in rough order of appearance): * Reinder Vos de Wael * Peter Suter * Blake Dewey +* Guillaume Becq +* Joshua Newton +* Sandro from the Fedora Project License reprise =============== From 5a32a60918be2f73f8345376c30495028bc59046 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 8 Oct 2024 10:45:37 -0400 Subject: [PATCH 199/203] DOC: Remove end year from copyright --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 4255ff1841..9811651223 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -102,7 +102,7 @@ # General information about the project. project = 'NiBabel' -copyright = f"2006-2023, {authors['name']} <{authors['email']}>" +copyright = f"2006, {authors['name']} <{authors['email']}>" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the From fcc2957c2a71a645508c38aeada94620de100ce3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 8 Oct 2024 10:47:51 -0400 Subject: [PATCH 200/203] MNT: Update support matrix for Python and numpy --- .github/workflows/test.yml | 10 ++++----- doc/source/installation.rst | 20 +++++++++--------- pyproject.toml | 12 +++++------ tox.ini | 41 ++++++++++++++++++------------------- 4 files changed, 41 insertions(+), 42 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9e5ddd5162..a741a40714 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -113,17 +113,17 @@ jobs: fail-fast: false matrix: os: ['ubuntu-latest', 'windows-latest', 'macos-13', 'macos-latest'] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] architecture: ['x64', 'x86', 'arm64'] dependencies: ['full', 'pre'] include: # Basic dependencies only - os: ubuntu-latest - python-version: 3.8 + python-version: 3.9 dependencies: 'none' # Absolute minimum dependencies - os: ubuntu-latest - python-version: 3.8 + python-version: 3.9 dependencies: 'min' # NoGIL - os: ubuntu-latest @@ -153,10 +153,10 @@ jobs: - os: macos-13 dependencies: pre # Drop pre tests for SPEC-0-unsupported Python versions - - python-version: '3.8' - dependencies: pre - python-version: '3.9' dependencies: pre + - python-version: '3.10' + dependencies: pre env: DEPENDS: ${{ matrix.dependencies }} diff --git a/doc/source/installation.rst b/doc/source/installation.rst index 4f747e7feb..983968c50f 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -81,16 +81,16 @@ is for you. Requirements ------------ -.. check these against pyproject.toml - -* Python_ 3.8 or greater -* NumPy_ 1.20 or greater -* Packaging_ 17.0 or greater -* importlib-resources_ 1.3 or greater (or Python 3.9+) -* SciPy_ (optional, for full SPM-ANALYZE support) -* h5py_ (optional, for MINC2 support) -* PyDICOM_ 1.0.0 or greater (optional, for DICOM support) -* `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS) +.. check these against pyproject.toml / tox.ini + +* Python_ 3.9 or greater +* NumPy_ 1.22 or greater +* Packaging_ 20.0 or greater +* importlib-resources_ 5.12 or greater (or Python 3.12+) +* SciPy_ 1.8 or greater (optional, for full SPM-ANALYZE support) +* h5py_ 3.5 or greater (optional, for MINC2 support) +* PyDICOM_ 2.3.0 or greater (optional, for DICOM support) +* `Python Imaging Library`_ 8.4 or greater (optional, for PNG conversion in DICOMFS) * pytest_ (optional, to run the tests) * sphinx_ (optional, to build the documentation) diff --git a/pyproject.toml b/pyproject.toml index 18883b90ec..b62c0048af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,10 +9,10 @@ authors = [{ name = "NiBabel developers", email = "neuroimaging@python.org" }] maintainers = [{ name = "Christopher Markiewicz" }] readme = "README.rst" license = { text = "MIT License" } -requires-python = ">=3.8" +requires-python = ">=3.9" dependencies = [ - "numpy >=1.20", - "packaging >=17", + "numpy >=1.22", + "packaging >=20", "importlib_resources >=5.12; python_version < '3.12'", "typing_extensions >=4.6; python_version < '3.13'", ] @@ -23,11 +23,11 @@ classifiers = [ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Scientific/Engineering", ] # Version from setuptools_scm @@ -53,7 +53,7 @@ parrec2nii = "nibabel.cmdline.parrec2nii:main" [project.optional-dependencies] all = ["nibabel[dicomfs,minc2,spm,zstd]"] # Features -dicom = ["pydicom >=1.0.0"] +dicom = ["pydicom >=2.3"] dicomfs = ["nibabel[dicom]", "pillow"] minc2 = ["h5py"] spm = ["scipy"] @@ -62,7 +62,7 @@ zstd = ["pyzstd >= 0.14.3"] # tox should use these with extras instead of duplicating doc = [ "sphinx", - "matplotlib>=1.5.3", + "matplotlib>=3.5", "numpydoc", "texext", "tomli; python_version < '3.11'", diff --git a/tox.ini b/tox.ini index 0e0f81a7ae..82c13debc6 100644 --- a/tox.ini +++ b/tox.ini @@ -7,14 +7,14 @@ requires = tox>=4 envlist = # No preinstallations - py3{8,9,10,11,12}-none + py3{9,10,11,12,13}-none # Minimum Python - py38-{min,full} + py39-{min,full} # x86 support range py3{9,10,11}-{full,pre}-{x86,x64} py3{9,10,11}-pre-{x86,x64} # x64-only range - py312-{full,pre}-x64 + py3{12,13}-{full,pre}-x64 # Special environment for numpy 2.0-dev testing py313-dev-x64 install @@ -26,7 +26,6 @@ skip_missing_interpreters = true # Configuration that allows us to split tests across GitHub runners effectively [gh-actions] python = - 3.8: py38 3.9: py39 3.10: py310 3.11: py311 @@ -76,35 +75,35 @@ set_env = extras = test deps = # General minimum dependencies: pin based on API usage - min: packaging ==17 + # matplotlib 3.5 requires packaging 20 + min: packaging ==20 min: importlib_resources ==5.12; python_version < '3.12' min: typing_extensions ==4.6; python_version < '3.13' # NEP29/SPEC0 + 1yr: Test on minor release series within the last 3 years # We're extending this to all optional dependencies # This only affects the range that we test on; numpy is the only non-optional # dependency, and will be the only one to affect pip environment resolution. - min: numpy ==1.20 - min: h5py ==2.10 - min: indexed_gzip ==1.4 - min: matplotlib ==3.4 - min: pillow ==8.1 - min: pydicom ==2.1 - min: pyzstd ==0.14.3 - min: scipy ==1.6 + min: numpy ==1.22 + min: h5py ==3.5 + min: indexed_gzip ==1.6 + min: matplotlib ==3.5 + min: pillow ==8.4 + min: pydicom ==2.3 + min: pyzstd ==0.15.2 + min: scipy ==1.8 # Numpy 2.0 is a major breaking release; we cannot put much effort into # supporting until it's at least RC stable - pre: numpy <2.0.dev0 dev: numpy >=2.1.dev0 # Scipy stopped producing win32 wheels at py310 - py3{8,9}-full-x86,x64,arm64: scipy >=1.6 + py39-full-x86,x64,arm64: scipy >=1.8 # Matplotlib depends on scipy, so cannot be built for py310 on x86 - py3{8,9}-full-x86,x64,arm64: matplotlib >=3.4 + py39-full-x86,x64,arm64: matplotlib >=3.5 # h5py stopped producing win32 wheels at py39 - py38-full-x86,{full,pre}-{x64,arm64}: h5py >=2.10 - full,pre,dev: pillow >=8.1 - full,pre: indexed_gzip >=1.4 - full,pre,dev: pyzstd >=0.14.3 - full,pre: pydicom >=2.1 + {full,pre}-{x64,arm64}: h5py >=3.5 + full,pre,dev: pillow >=8.4 + full,pre: indexed_gzip >=1.6 + full,pre,dev: pyzstd >=0.15.2 + full,pre: pydicom >=2.3 dev: pydicom @ git+https://github.com/pydicom/pydicom.git@main commands = From 1d93526980d3b9107c49d2788bc04da3cfaf89ce Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 8 Oct 2024 11:13:43 -0400 Subject: [PATCH 201/203] MNT: Remove workarounds used for Python 3.8 support --- nibabel/__init__.py | 5 +---- nibabel/conftest.py | 7 ++----- nibabel/filebasedimages.py | 2 +- nibabel/nicom/ascconv.py | 5 +---- nibabel/nifti1.py | 2 +- nibabel/spatialimages.py | 6 +----- nibabel/testing/__init__.py | 6 +----- nibabel/testing/np_features.py | 4 ++-- nibabel/tests/test_arrayproxy.py | 8 +++++--- nibabel/tests/test_init.py | 6 +----- nibabel/tests/test_openers.py | 5 +++-- nibabel/volumeutils.py | 2 +- 12 files changed, 20 insertions(+), 38 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index aa90540b8f..c389c603fc 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -170,10 +170,7 @@ def bench(label=None, verbose=1, extra_argv=None): code : ExitCode Returns the result of running the tests as a ``pytest.ExitCode`` enum """ - try: - from importlib.resources import as_file, files - except ImportError: - from importlib_resources import as_file, files + from importlib.resources import as_file, files args = [] if extra_argv is not None: diff --git a/nibabel/conftest.py b/nibabel/conftest.py index b16a832f28..1d7389e867 100644 --- a/nibabel/conftest.py +++ b/nibabel/conftest.py @@ -10,10 +10,7 @@ @pytest.fixture(scope='session', autouse=True) def legacy_printoptions(): - from packaging.version import Version - - if Version(np.__version__) >= Version('1.22'): - np.set_printoptions(legacy='1.21') + np.set_printoptions(legacy='1.21') @pytest.fixture @@ -24,7 +21,7 @@ def max_digits(): orig_max_str_digits = sys.get_int_max_str_digits() yield sys.set_int_max_str_digits sys.set_int_max_str_digits(orig_max_str_digits) - except AttributeError: # pragma: no cover + except AttributeError: # PY310 # pragma: no cover # Nothing to do for versions of Python that lack these methods # They were added as DoS protection in Python 3.11 and backported to # some other versions. diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index c12644a2bd..086e31f123 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -23,7 +23,7 @@ if ty.TYPE_CHECKING: from .filename_parser import ExtensionSpec, FileSpec -FileSniff = ty.Tuple[bytes, str] +FileSniff = tuple[bytes, str] ImgT = ty.TypeVar('ImgT', bound='FileBasedImage') HdrT = ty.TypeVar('HdrT', bound='FileBasedHeader') diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py index 6d72436039..2eca5a1579 100644 --- a/nibabel/nicom/ascconv.py +++ b/nibabel/nicom/ascconv.py @@ -90,10 +90,7 @@ def assign2atoms(assign_ast, default_class=int): target = target.value prev_target_type = OrderedDict elif isinstance(target, ast.Subscript): - if isinstance(target.slice, ast.Constant): # PY39 - index = target.slice.value - else: # PY38 - index = target.slice.value.n + index = target.slice.value atoms.append(Atom(target, prev_target_type, index)) target = target.value prev_target_type = list diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index b9c78c81bc..f0bd91fc48 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -671,7 +671,7 @@ def _mangle(self, dataset: DicomDataset) -> bytes: (38, 'eval', NiftiExtension), (40, 'matlab', NiftiExtension), (42, 'quantiphyse', NiftiExtension), - (44, 'mrs', NiftiExtension[ty.Dict[str, ty.Any]]), + (44, 'mrs', NiftiExtension[dict[str, ty.Any]]), ), fields=('code', 'label', 'handler'), ) diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index 19677c1a7d..a8e8993597 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -133,6 +133,7 @@ from __future__ import annotations import typing as ty +from functools import cache from typing import Literal import numpy as np @@ -145,11 +146,6 @@ from .viewers import OrthoSlicer3D from .volumeutils import shape_zoom_affine -try: - from functools import cache -except ImportError: # PY38 - from functools import lru_cache as cache - if ty.TYPE_CHECKING: import io from collections.abc import Sequence diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index be111747b2..b42baf2955 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -17,6 +17,7 @@ import unittest import warnings from contextlib import nullcontext +from importlib.resources import as_file, files from itertools import zip_longest import numpy as np @@ -29,11 +30,6 @@ if ty.TYPE_CHECKING: from importlib.resources.abc import Traversable -try: - from importlib.resources import as_file, files -except ImportError: # PY38 - from importlib_resources import as_file, files - def get_test_data( subdir: ty.Literal['gifti', 'nicom', 'externals'] | None = None, diff --git a/nibabel/testing/np_features.py b/nibabel/testing/np_features.py index 226df64845..dd21aac2c0 100644 --- a/nibabel/testing/np_features.py +++ b/nibabel/testing/np_features.py @@ -1,11 +1,11 @@ """Look for changes in numpy behavior over versions""" -from functools import lru_cache +from functools import cache import numpy as np -@lru_cache(maxsize=None) +@cache def memmap_after_ufunc() -> bool: """Return True if ufuncs on memmap arrays always return memmap arrays diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index a79f63bc72..65b9131905 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -482,9 +482,11 @@ def test_keep_file_open_true_false_invalid(): for test in tests: filetype, kfo, have_igzip, exp_persist, exp_kfo = test - with InTemporaryDirectory(), mock.patch( - 'nibabel.openers.ImageOpener', CountingImageOpener - ), patch_indexed_gzip(have_igzip): + with ( + InTemporaryDirectory(), + mock.patch('nibabel.openers.ImageOpener', CountingImageOpener), + patch_indexed_gzip(have_igzip), + ): fname = f'testdata.{filetype}' # create the test data file if filetype == 'gz': diff --git a/nibabel/tests/test_init.py b/nibabel/tests/test_init.py index d54f55053b..d339c4e26b 100644 --- a/nibabel/tests/test_init.py +++ b/nibabel/tests/test_init.py @@ -1,14 +1,10 @@ import pathlib import unittest +from importlib.resources import files from unittest import mock import pytest -try: - from importlib.resources import files -except ImportError: - from importlib_resources import files - import nibabel as nib diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index 0b58794331..05d0e04cd0 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -121,8 +121,9 @@ def patch_indexed_gzip(state): values = (True, MockIndexedGzipFile) else: values = (False, GzipFile) - with mock.patch('nibabel.openers.HAVE_INDEXED_GZIP', values[0]), mock.patch( - 'nibabel.openers.IndexedGzipFile', values[1], create=True + with ( + mock.patch('nibabel.openers.HAVE_INDEXED_GZIP', values[0]), + mock.patch('nibabel.openers.IndexedGzipFile', values[1], create=True), ): yield diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 6e43f79186..d0ebb46a7b 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -235,7 +235,7 @@ def value_set(self, name: str | None = None) -> OrderedSet: endian_codes = Recoder(_endian_codes) -class DtypeMapper(ty.Dict[ty.Hashable, ty.Hashable]): +class DtypeMapper(dict[ty.Hashable, ty.Hashable]): """Specialized mapper for numpy dtypes We pass this mapper into the Recoder class to deal with numpy dtype From 48dcb4702f8cea1f21fe1fe7a38ad80132715073 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 8 Oct 2024 11:14:09 -0400 Subject: [PATCH 202/203] STY: ruff check --fix --- nibabel/tests/test_tripwire.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_tripwire.py b/nibabel/tests/test_tripwire.py index d7daefe0b1..4bf91923f2 100644 --- a/nibabel/tests/test_tripwire.py +++ b/nibabel/tests/test_tripwire.py @@ -16,5 +16,5 @@ def test_tripwire(): with pytest.raises(TripWireError): silly_module_name.do_silly_thing # Check AttributeError can be checked too - with pytest.raises(AttributeError) as err: + with pytest.raises(AttributeError): silly_module_name.__wrapped__ From 249986b169f7845c6ce8e19ac36546aef2763fd1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 8 Oct 2024 13:54:03 -0400 Subject: [PATCH 203/203] MNT: Update release notes translator --- tools/markdown_release_notes.py | 56 ++++++++++++++++++++++++++++++--- 1 file changed, 51 insertions(+), 5 deletions(-) diff --git a/tools/markdown_release_notes.py b/tools/markdown_release_notes.py index 73bdbf7752..cdae474f51 100644 --- a/tools/markdown_release_notes.py +++ b/tools/markdown_release_notes.py @@ -1,14 +1,53 @@ #!/usr/bin/env python import re import sys +from collections import defaultdict +from functools import cache +from operator import call from pathlib import Path +from sphinx.ext.intersphinx import fetch_inventory + CHANGELOG = Path(__file__).parent.parent / 'Changelog' # Match release lines like "5.2.0 (Monday 11 December 2023)" RELEASE_REGEX = re.compile(r"""((?:\d+)\.(?:\d+)\.(?:\d+)) \(\w+ \d{1,2} \w+ \d{4}\)$""") +class MockConfig: + intersphinx_timeout: int | None = None + tls_verify = False + tls_cacerts: str | dict[str, str] | None = None + user_agent: str = '' + + +@call +class MockApp: + srcdir = '' + config = MockConfig() + + +fetch_inv = cache(fetch_inventory) + + +def get_intersphinx(obj): + module = obj.split('.', 1)[0] + + registry = defaultdict(lambda: 'https://docs.python.org/3') + registry.update( + numpy='https://numpy.org/doc/stable', + ) + + base_url = registry[module] + + inventory = fetch_inv(MockApp, '', f'{base_url}/objects.inv') + # Check py: first, then whatever + for objclass in sorted(inventory, key=lambda x: not x.startswith('py:')): + if obj in inventory[objclass]: + return f'{base_url}/{inventory[objclass][obj][2]}' + raise ValueError("Couldn't lookup {obj}") + + def main(): version = sys.argv[1] output = sys.argv[2] @@ -46,7 +85,7 @@ def main(): release_notes = re.sub(r'\n +', ' ', release_notes) # Replace pr/ with # for GitHub - release_notes = re.sub(r'\(pr/(\d+)\)', r'(#\1)', release_notes) + release_notes = re.sub(r'pr/(\d+)', r'#\1', release_notes) # Replace :mod:`package.X` with [package.X](...) release_notes = re.sub( @@ -76,6 +115,14 @@ def main(): r'[\3](https://nipy.org/nibabel/reference/\1.html#\1.\2.\3)', release_notes, ) + # Replace ::`` with intersphinx lookup + for ref in re.findall(r'(:[^:]*:`~?\w[\w.]+\w`)', release_notes): + objclass, tilde, module, obj = re.match(r':([^:]*):`(~?)([\w.]+)\.(\w+)`', ref).groups() + url = get_intersphinx(f'{module}.{obj}') + mdlink = f'[{"" if tilde else module}{obj}]({url})' + release_notes = release_notes.replace(ref, mdlink) + # Replace RST links with Markdown links + release_notes = re.sub(r'`([^<`]*) <([^>]*)>`_+', r'[\1](\2)', release_notes) def python_doc(match): module = match.group(1) @@ -84,10 +131,9 @@ def python_doc(match): release_notes = re.sub(r':meth:`~([\w.]+)\.(\w+)`', python_doc, release_notes) - output.write('## Release notes\n\n') - output.write(release_notes) - - output.close() + with output: + output.write('## Release notes\n\n') + output.write(release_notes) if __name__ == '__main__':