From 9ae20164078497f8148b304dc7804af234d62e3b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Sep 2021 09:06:40 +0200 Subject: [PATCH 0001/1078] Bump setuptools-scm from 6.0.1 to 6.3.1 (#827) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 6.0.1 to 6.3.1. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v6.0.1...v6.3.1) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 1c5f810fea..50b076fce9 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.16.3 numcodecs==0.8.1 msgpack-python==0.5.6 -setuptools-scm==6.0.1 +setuptools-scm==6.3.1 # test requirements pytest==6.2.5 From ad5d97afca14638f822e7ffd9efb205deb4b68aa Mon Sep 17 00:00:00 2001 From: jmoore Date: Thu, 9 Sep 2021 17:27:35 +0200 Subject: [PATCH 0002/1078] Add FUNDING.yml for NumFOCUS button on GitHub --- FUNDING.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 FUNDING.yml diff --git a/FUNDING.yml b/FUNDING.yml new file mode 100644 index 0000000000..a2faf1a285 --- /dev/null +++ b/FUNDING.yml @@ -0,0 +1,2 @@ +github: [numfocus] +custom: ['https://numfocus.org/donate-to-zarr'] From 5b0437537dc6e9067c9627857aacfc7e873439ac Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Sep 2021 10:04:13 +0200 Subject: [PATCH 0003/1078] Bump setuptools-scm from 6.3.1 to 6.3.2 (#830) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 6.3.1 to 6.3.2. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v6.3.1...v6.3.2) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 50b076fce9..12ae808ce6 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.16.3 numcodecs==0.8.1 msgpack-python==0.5.6 -setuptools-scm==6.3.1 +setuptools-scm==6.3.2 # test requirements pytest==6.2.5 From 7ec8c640f81db2c32a829ea6d0ea44bdb8ce235d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 17 Sep 2021 09:34:00 +0200 Subject: [PATCH 0004/1078] Bump tox from 3.24.3 to 3.24.4 (#833) Bumps [tox](https://github.com/tox-dev/tox) from 3.24.3 to 3.24.4. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/master/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/3.24.3...3.24.4) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index f4f1c59e90..e2d5da4a48 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -12,7 +12,7 @@ types-redis types-setuptools pymongo==3.12.0 # optional test requirements -tox==3.24.3 +tox==3.24.4 coverage flake8==3.9.2 pytest-cov==2.12.1 From be3d657484590adacaedf980a2951ddfa92b68b8 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Fri, 17 Sep 2021 21:01:05 +0200 Subject: [PATCH 0005/1078] Ignore None dim_separators in save_array (fix #831) (#832) * Ignore None dim_separators in save_array (fix #831) If a store does not have a `dimension_separator` set, then requesting a non-`None` value should not be an error. * Fix flake8 --- zarr/creation.py | 5 +++-- zarr/tests/test_convenience.py | 12 ++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/zarr/creation.py b/zarr/creation.py index 28f3b7b89d..0e2d2041ba 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -126,11 +126,12 @@ def create(shape, chunks=True, dtype=None, compressor='default', if dimension_separator is None: dimension_separator = getattr(store, "_dimension_separator", None) else: - if getattr(store, "_dimension_separator", None) != dimension_separator: + store_separator = getattr(store, "_dimension_separator", None) + if store_separator not in (None, dimension_separator): raise ValueError( f"Specified dimension_separtor: {dimension_separator}" f"conflicts with store's separator: " - f"{store._dimension_separator}") + f"{store_separator}") dimension_separator = normalize_dimension_separator(dimension_separator) # initialize array metadata diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index a5ac40e371..d2bd91038b 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -18,6 +18,7 @@ open_consolidated, save, save_group, + save_array, copy_all, ) from zarr.core import Array @@ -225,6 +226,17 @@ def test_consolidated_with_chunk_store(): chunk_store=chunk_store) +@pytest.mark.parametrize("options", ( + {"dimension_separator": "/"}, + {"dimension_separator": "."}, + {"dimension_separator": None}, +)) +def test_save_array_separator(tmpdir, options): + data = np.arange(6).reshape((3, 2)) + url = tmpdir.join("test.zarr") + save_array(url, data, **options) + + class TestCopyStore(unittest.TestCase): def setUp(self): From dca87fcbe96ff3be5d74e662815ecb5e67ce9b28 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Sun, 19 Sep 2021 10:22:14 -0400 Subject: [PATCH 0006/1078] N5FSStore (#793) * Drop skip_if_nested_chunks from test_storage.py * Add failing nested test * Make DirectoryStore dimension_separator aware * Migrate key logic to core rather than storage Previous tests (now commented out) used logic in the store classes to convert "0/0" keys into "0.0" keys, forcing the store to be aware of array details. This tries to swap the logic so that stores are responsible for passing dimension separator values down to the arrays only. Since arrays can also get the dimension_separator value from a .zarray file they are now in charge. * Fix linting in new test * Extend the test suite for dim_sep * add n5fsstore and tests * slightly smarter kwarg interception * remove outdated unittest ref and fix the name of a test func * fix massive string block and fix default key_separator kwarg for FSStore * flake8 * promote n5store to toplevel import and fix examples in docstring * Try fsspec 2021.7 (see #802) * Revert "Try fsspec 2021.7 (see #802)" This reverts commit 68adca50b62441dabc6b3f48364fe3dcf35eeb69. * Add missing core tests for N5FSStore, and rchanges required for making them pass * tmp: debug * uncomment N5 chunk ordering test * more commented tests get uncommented * add dimension_separator to array metadata adaptor * Revert "tmp: debug" This reverts commit ee9cdbc3f18626005e39f36c4e28a8f11d3ed3e9. * Attempt failed: keeping '.' and switching * Revert "Attempt failed: keeping '.' and switching" This reverts commit 51b31094d7d31519d5db894153ad96b8944746f1. * regex: attempt failed due to slight diff in files * Revert "regex: attempt failed due to slight diff in files" This reverts commit 3daea7c534cc599830a60db7e14f2610fce793e9. * N5: use "." internally for dimension separation This allows N5 to detect the split between key and chunks and pre-process them (re-ordering and changing the separator). see: #773 #793 * move FSSpec import guard * remove os.path.sep concatenation in listdir that was erroring a test, and add a mea culpa docstring about the dimension_separator for n5 stores * resolve merge conflicts in favor of upstream * make listdir implementation for n5fsstore look more like fsstore's listdir, and add crucial lstrip * Update hexdigest tests for N5Stores to account for the presence of the dimension_separator keyword now present in metadata * Add tests for dimension_separator in array meta for N5Stores * N5FSStore: try to increase code coverage * Adds a test for the dimension_separator warning * uses the parent test_complex for listdir * "nocover" the import error since fsspec is ever present * flake8 * add chunk nesting test to N5FSStore test suite * make array_meta_key, group_meta_key, attrs_key private * N5FSStore: Remove ImportError test FSStore only throws ModuleNotFoundError on initialization rather than on import. Therefore N5FSStore does the same. If this *weren't* the case, then the import in zarr/init would need to test the import as well, which isn't the case. Co-authored-by: jmoore Co-authored-by: Josh Moore --- zarr/__init__.py | 2 +- zarr/n5.py | 294 ++++++++++++++++++++++++++++++++++++- zarr/storage.py | 19 ++- zarr/tests/test_core.py | 30 +++- zarr/tests/test_storage.py | 152 +++++++++++++++++-- 5 files changed, 466 insertions(+), 31 deletions(-) diff --git a/zarr/__init__.py b/zarr/__init__.py index 8079bab071..7558ce77de 100644 --- a/zarr/__init__.py +++ b/zarr/__init__.py @@ -9,7 +9,7 @@ zeros_like) from zarr.errors import CopyError, MetadataError from zarr.hierarchy import Group, group, open_group -from zarr.n5 import N5Store +from zarr.n5 import N5Store, N5FSStore from zarr.storage import (ABSStore, DBMStore, DictStore, DirectoryStore, LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, NestedDirectoryStore, RedisStore, SQLiteStore, diff --git a/zarr/n5.py b/zarr/n5.py index 45e2cdda95..797558fa2d 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -11,7 +11,8 @@ from numcodecs.registry import get_codec, register_codec from .meta import ZARR_FORMAT, json_dumps, json_loads -from .storage import NestedDirectoryStore, _prog_ckey, _prog_number +from .storage import FSStore +from .storage import NestedDirectoryStore, _prog_ckey, _prog_number, normalize_storage_path from .storage import array_meta_key as zarr_array_meta_key from .storage import attrs_key as zarr_attrs_key from .storage import group_meta_key as zarr_group_meta_key @@ -281,12 +282,298 @@ def _contains_attrs(self, path): return len(attrs) > 0 +class N5FSStore(FSStore): + """Implentation of the N5 format (https://github.com/saalfeldlab/n5) using `fsspec`, + which allows storage on a variety of filesystems. Based on `zarr.N5Store`. + Parameters + ---------- + path : string + Location of directory to use as the root of the storage hierarchy. + normalize_keys : bool, optional + If True, all store keys will be normalized to use lower case characters + (e.g. 'foo' and 'FOO' will be treated as equivalent). This can be + useful to avoid potential discrepancies between case-senstive and + case-insensitive file system. Default value is False. + + Examples + -------- + Store a single array:: + + >>> import zarr + >>> store = zarr.N5FSStore('data/array.n5', auto_mkdir=True) + >>> z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> z[...] = 42 + + Store a group:: + + >>> store = zarr.N5FSStore('data/group.n5', auto_mkdir=True) + >>> root = zarr.group(store=store, overwrite=True) + >>> foo = root.create_group('foo') + >>> bar = foo.zeros('bar', shape=(10, 10), chunks=(5, 5)) + >>> bar[...] = 42 + + Notes + ----- + This is an experimental feature. + Safe to write in multiple threads or processes. + + Be advised that the `_dimension_separator` property of this store + (and arrays it creates) is ".", but chunks saved by this store will + in fact be "/" separated, as proscribed by the N5 format. + + This is counter-intuitive (to say the least), but not arbitrary. + Chunks in N5 format are stored with reversed dimension order + relative to Zarr chunks: a chunk of a 3D Zarr array would be stored + on a file system as `/0/1/2`, but in N5 the same chunk would be + stored as `/2/1/0`. Therefore, stores targeting N5 must intercept + chunk keys and flip the order of the dimensions before writing to + storage, and this procedure requires chunk keys with "." separated + dimensions, hence the Zarr arrays targeting N5 have the deceptive + "." dimension separator. + """ + _array_meta_key = 'attributes.json' + _group_meta_key = 'attributes.json' + _attrs_key = 'attributes.json' + + def __init__(self, *args, **kwargs): + if 'dimension_separator' in kwargs: + kwargs.pop('dimension_separator') + warnings.warn('Keyword argument `dimension_separator` will be ignored') + dimension_separator = "." + super().__init__(*args, dimension_separator=dimension_separator, **kwargs) + + def _swap_separator(self, key): + segments = list(key.split('/')) + if segments: + last_segment = segments[-1] + if _prog_ckey.match(last_segment): + coords = list(last_segment.split('.')) + last_segment = '/'.join(coords[::-1]) + segments = segments[:-1] + [last_segment] + key = '/'.join(segments) + return key + + def _normalize_key(self, key): + if is_chunk_key(key): + key = invert_chunk_coords(key) + + key = normalize_storage_path(key).lstrip("/") + if key: + *bits, end = key.split("/") + + if end not in (self._array_meta_key, self._group_meta_key, self._attrs_key): + end = end.replace(".", "/") + key = "/".join(bits + [end]) + return key.lower() if self.normalize_keys else key + + def __getitem__(self, key): + if key.endswith(zarr_group_meta_key): + + key = key.replace(zarr_group_meta_key, self._group_meta_key) + value = group_metadata_to_zarr(self._load_n5_attrs(key)) + + return json_dumps(value) + + elif key.endswith(zarr_array_meta_key): + + key = key.replace(zarr_array_meta_key, self._array_meta_key) + value = array_metadata_to_zarr(self._load_n5_attrs(key)) + + return json_dumps(value) + + elif key.endswith(zarr_attrs_key): + + key = key.replace(zarr_attrs_key, self._attrs_key) + value = attrs_to_zarr(self._load_n5_attrs(key)) + + if len(value) == 0: + raise KeyError(key) + else: + return json_dumps(value) + + elif is_chunk_key(key): + key = self._swap_separator(key) + + return super().__getitem__(key) + + def __setitem__(self, key, value): + if key.endswith(zarr_group_meta_key): + + key = key.replace(zarr_group_meta_key, self._group_meta_key) + + n5_attrs = self._load_n5_attrs(key) + n5_attrs.update(**group_metadata_to_n5(json_loads(value))) + + value = json_dumps(n5_attrs) + + elif key.endswith(zarr_array_meta_key): + + key = key.replace(zarr_array_meta_key, self._array_meta_key) + + n5_attrs = self._load_n5_attrs(key) + n5_attrs.update(**array_metadata_to_n5(json_loads(value))) + + value = json_dumps(n5_attrs) + + elif key.endswith(zarr_attrs_key): + + key = key.replace(zarr_attrs_key, self._attrs_key) + + n5_attrs = self._load_n5_attrs(key) + zarr_attrs = json_loads(value) + + for k in n5_keywords: + if k in zarr_attrs.keys(): + raise ValueError( + "Can not set attribute %s, this is a reserved N5 keyword" % k + ) + + # replace previous user attributes + for k in list(n5_attrs.keys()): + if k not in n5_keywords: + del n5_attrs[k] + + # add new user attributes + n5_attrs.update(**zarr_attrs) + + value = json_dumps(n5_attrs) + + elif is_chunk_key(key): + key = self._swap_separator(key) + + super().__setitem__(key, value) + + def __delitem__(self, key): + + if key.endswith(zarr_group_meta_key): # pragma: no cover + key = key.replace(zarr_group_meta_key, self._group_meta_key) + elif key.endswith(zarr_array_meta_key): # pragma: no cover + key = key.replace(zarr_array_meta_key, self._array_meta_key) + elif key.endswith(zarr_attrs_key): # pragma: no cover + key = key.replace(zarr_attrs_key, self._attrs_key) + elif is_chunk_key(key): + key = self._swap_separator(key) + + super().__delitem__(key) + + def __contains__(self, key): + if key.endswith(zarr_group_meta_key): + + key = key.replace(zarr_group_meta_key, self._group_meta_key) + if key not in self: + return False + # group if not a dataset (attributes do not contain 'dimensions') + return "dimensions" not in self._load_n5_attrs(key) + + elif key.endswith(zarr_array_meta_key): + + key = key.replace(zarr_array_meta_key, self._array_meta_key) + # array if attributes contain 'dimensions' + return "dimensions" in self._load_n5_attrs(key) + + elif key.endswith(zarr_attrs_key): + + key = key.replace(zarr_attrs_key, self._attrs_key) + return self._contains_attrs(key) + + elif is_chunk_key(key): + key = self._swap_separator(key) + + return super().__contains__(key) + + def __eq__(self, other): + return isinstance(other, N5FSStore) and self.path == other.path + + def listdir(self, path=None): + if path is not None: + path = invert_chunk_coords(path) + + # We can't use NestedDirectoryStore's listdir, as it requires + # array_meta_key to be present in array directories, which this store + # doesn't provide. + children = super().listdir(path=path) + if self._is_array(path): + + # replace n5 attribute file with respective zarr attribute files + children.remove(self._array_meta_key) + children.append(zarr_array_meta_key) + if self._contains_attrs(path): + children.append(zarr_attrs_key) + + # special handling of directories containing an array to map + # inverted nested chunk keys back to standard chunk keys + new_children = [] + root_path = self.dir_path(path) + for entry in children: + entry_path = os.path.join(root_path, entry) + if _prog_number.match(entry) and self.fs.isdir(entry_path): + for file_name in self.fs.find(entry_path): + file_path = os.path.join(root_path, file_name) + rel_path = file_path.split(root_path)[1] + new_child = rel_path.lstrip('/').replace('/', ".") + new_children.append(invert_chunk_coords(new_child)) + else: + new_children.append(entry) + return sorted(new_children) + + elif self._is_group(path): + + # replace n5 attribute file with respective zarr attribute files + children.remove(self._group_meta_key) + children.append(zarr_group_meta_key) + if self._contains_attrs(path): # pragma: no cover + children.append(zarr_attrs_key) + return sorted(children) + else: + return children + + def _load_n5_attrs(self, path): + try: + s = super().__getitem__(path) + return json_loads(s) + except KeyError: + return {} + + def _is_group(self, path): + + if path is None: + attrs_key = self._attrs_key + else: + attrs_key = os.path.join(path, self._attrs_key) + + n5_attrs = self._load_n5_attrs(attrs_key) + return len(n5_attrs) > 0 and "dimensions" not in n5_attrs + + def _is_array(self, path): + + if path is None: + attrs_key = self._attrs_key + else: + attrs_key = os.path.join(path, self._attrs_key) + + return "dimensions" in self._load_n5_attrs(attrs_key) + + def _contains_attrs(self, path): + + if path is None: + attrs_key = self._attrs_key + else: + if not path.endswith(self._attrs_key): + attrs_key = os.path.join(path, self._attrs_key) + else: # pragma: no cover + attrs_key = path + + attrs = attrs_to_zarr(self._load_n5_attrs(attrs_key)) + return len(attrs) > 0 + + def is_chunk_key(key): + rv = False segments = list(key.split('/')) if segments: last_segment = segments[-1] - return _prog_ckey.match(last_segment) - return False # pragma: no cover + rv = _prog_ckey.match(last_segment) + return rv def invert_chunk_coords(key): @@ -373,6 +660,7 @@ def array_metadata_to_zarr(array_metadata): array_metadata['fill_value'] = 0 # also if None was requested array_metadata['order'] = 'C' array_metadata['filters'] = [] + array_metadata['dimension_separator'] = '.' compressor_config = array_metadata['compressor'] compressor_config = compressor_config_to_zarr(compressor_config) diff --git a/zarr/storage.py b/zarr/storage.py index 6ca6271dbf..395551687f 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -1065,22 +1065,28 @@ class FSStore(MutableMapping): Separator placed between the dimensions of a chunk. storage_options : passed to the fsspec implementation """ + _array_meta_key = array_meta_key + _group_meta_key = group_meta_key + _attrs_key = attrs_key - _META_KEYS = (attrs_key, group_meta_key, array_meta_key) - - def __init__(self, url, normalize_keys=False, key_separator=None, + def __init__(self, url, normalize_keys=True, key_separator=None, mode='w', exceptions=(KeyError, PermissionError, IOError), dimension_separator=None, **storage_options): import fsspec self.normalize_keys = normalize_keys + + protocol, _ = fsspec.core.split_protocol(url) + # set auto_mkdir to True for local file system + if protocol in (None, "file") and not storage_options.get("auto_mkdir"): + storage_options["auto_mkdir"] = True + self.map = fsspec.get_mapper(url, **storage_options) self.fs = self.map.fs # for direct operations self.path = self.fs._strip_protocol(url) self.mode = mode self.exceptions = exceptions - # For backwards compatibility. Guaranteed to be non-None if key_separator is not None: dimension_separator = key_separator @@ -1091,7 +1097,6 @@ def __init__(self, url, normalize_keys=False, key_separator=None, # Pass attributes to array creation self._dimension_separator = dimension_separator - if self.fs.exists(self.path) and not self.fs.isdir(self.path): raise FSPathExistNotDir(url) @@ -1100,7 +1105,7 @@ def _normalize_key(self, key): if key: *bits, end = key.split('/') - if end not in FSStore._META_KEYS: + if end not in (self._array_meta_key, self._group_meta_key, self._attrs_key): end = end.replace('.', self.key_separator) key = '/'.join(bits + [end]) @@ -1178,7 +1183,7 @@ def listdir(self, path=None): if self.key_separator != "/": return children else: - if array_meta_key in children: + if self._array_meta_key in children: # special handling of directories containing an array to map nested chunk # keys back to standard chunk keys new_children = [] diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 9043a32a51..be2feffe8a 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -19,7 +19,7 @@ from zarr.core import Array from zarr.meta import json_loads -from zarr.n5 import N5Store, n5_keywords +from zarr.n5 import N5Store, N5FSStore, n5_keywords from zarr.storage import ( ABSStore, DBMStore, @@ -1984,12 +1984,12 @@ def test_compressors(self): def expected(self): return [ - 'c6b83adfad999fbd865057531d749d87cf138f58', - 'a3d6d187536ecc3a9dd6897df55d258e2f52f9c5', - 'ec2e008525ae09616dbc1d2408cbdb42532005c8', - 'b63f031031dcd5248785616edcb2d6fe68203c28', - '0cfc673215a8292a87f3c505e2402ce75243c601', - ] + '4e9cf910000506455f82a70938a272a3fce932e5', + 'f9d4cbf1402901f63dea7acf764d2546e4b6aa38', + '1d8199f5f7b70d61aa0d29cc375212c3df07d50a', + '874880f91aa6736825584509144afe6b06b0c05c', + 'e2258fedc74752196a8c8383db49e27193c995e2', + ] def test_hexdigest(self): found = [] @@ -2018,6 +2018,22 @@ def test_hexdigest(self): assert self.expected() == found +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestArrayWithN5FSStore(TestArrayWithN5Store): + + @staticmethod + def create_array(read_only=False, **kwargs): + path = mkdtemp() + atexit.register(shutil.rmtree, path) + store = N5FSStore(path) + cache_metadata = kwargs.pop('cache_metadata', True) + cache_attrs = kwargs.pop('cache_attrs', True) + kwargs.setdefault('compressor', Zlib(1)) + init_array(store, **kwargs) + return Array(store, read_only=read_only, cache_metadata=cache_metadata, + cache_attrs=cache_attrs) + + class TestArrayWithDBMStore(TestArray): @staticmethod diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 4296ee6364..1412ec2099 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -23,7 +23,7 @@ from zarr.meta import (ZARR_FORMAT, decode_array_metadata, decode_group_metadata, encode_array_metadata, encode_group_metadata) -from zarr.n5 import N5Store +from zarr.n5 import N5Store, N5FSStore from zarr.storage import (ABSStore, ConsolidatedMetadataStore, DBMStore, DictStore, DirectoryStore, LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, NestedDirectoryStore, @@ -900,13 +900,20 @@ def mock_walker_no_slash(_path): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestFSStore(StoreTests): - def create_store(self, normalize_keys=False, dimension_separator="."): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) + def create_store(self, normalize_keys=False, + dimension_separator=".", + path=None, + **kwargs): + + if path is None: + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = FSStore( path, normalize_keys=normalize_keys, - dimension_separator=dimension_separator) + dimension_separator=dimension_separator, + **kwargs) return store def test_init_array(self): @@ -937,8 +944,9 @@ def test_dimension_separator(self): def test_complex(self): path1 = tempfile.mkdtemp() path2 = tempfile.mkdtemp() - store = FSStore("simplecache::file://" + path1, - simplecache={"same_names": True, "cache_storage": path2}) + store = self.create_store(path="simplecache::file://" + path1, + simplecache={"same_names": True, + "cache_storage": path2}) assert not store assert not os.listdir(path1) assert not os.listdir(path2) @@ -949,6 +957,20 @@ def test_complex(self): assert store["foo"] == b"hello" assert 'foo' in os.listdir(path2) + def test_deep_ndim(self): + import zarr + + store = self.create_store() + foo = zarr.open_group(store=store) + bar = foo.create_group("bar") + baz = bar.create_dataset("baz", + shape=(4, 4, 4), + chunks=(2, 2, 2), + dtype="i8") + baz[:] = 1 + assert set(store.listdir()) == set([".zgroup", "bar"]) + assert foo["bar"]["baz"][(0, 0, 0)] == 1 + def test_not_fsspec(self): import zarr path = tempfile.mkdtemp() @@ -979,10 +1001,10 @@ def test_create(self): def test_read_only(self): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) - store = FSStore(path) + store = self.create_store(path=path) store['foo'] = b"bar" - store = FSStore(path, mode='r') + store = self.create_store(path=path, mode='r') with pytest.raises(PermissionError): store['foo'] = b"hex" @@ -1000,11 +1022,11 @@ def test_read_only(self): filepath = os.path.join(path, "foo") with pytest.raises(ValueError): - FSStore(filepath, mode='r') + self.create_store(path=filepath, mode='r') def test_eq(self): - store1 = FSStore("anypath") - store2 = FSStore("anypath") + store1 = self.create_store(path="anypath") + store2 = self.create_store(path="anypath") assert store1 == store2 @pytest.mark.usefixtures("s3") @@ -1187,7 +1209,7 @@ def test_value_error(self): class TestN5Store(TestNestedDirectoryStore): def create_store(self, normalize_keys=False): - path = tempfile.mkdtemp(suffix='.n5') + path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) store = N5Store(path, normalize_keys=normalize_keys) return store @@ -1228,6 +1250,7 @@ def test_init_array(self): assert default_compressor.get_config() == compressor_config # N5Store always has a fill value of 0 assert meta['fill_value'] == 0 + assert meta['dimension_separator'] == '.' def test_init_array_path(self): path = 'foo/bar' @@ -1297,6 +1320,109 @@ def test_filters(self): init_array(store, shape=1000, chunks=100, filters=filters) +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestN5FSStore(TestFSStore): + def create_store(self, normalize_keys=False, path=None, **kwargs): + + if path is None: + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + + store = N5FSStore(path, normalize_keys=normalize_keys, **kwargs) + return store + + def test_equal(self): + store_a = self.create_store() + store_b = N5FSStore(store_a.path) + assert store_a == store_b + + # This is copied wholesale from the N5Store tests. The same test could + # be run by making TestN5FSStore inherit from both TestFSStore and + # TestN5Store, but a direct copy is arguably more explicit. + def test_chunk_nesting(self): + store = self.create_store() + store['0.0'] = b'xxx' + assert '0.0' in store + assert b'xxx' == store['0.0'] + # assert b'xxx' == store['0/0'] + store['foo/10.20.30'] = b'yyy' + assert 'foo/10.20.30' in store + assert b'yyy' == store['foo/10.20.30'] + # N5 reverses axis order + assert b'yyy' == store['foo/30/20/10'] + store['42'] = b'zzz' + assert '42' in store + assert b'zzz' == store['42'] + + def test_init_array(self): + store = self.create_store() + init_array(store, shape=1000, chunks=100) + + # check metadata + assert array_meta_key in store + meta = decode_array_metadata(store[array_meta_key]) + assert ZARR_FORMAT == meta['zarr_format'] + assert (1000,) == meta['shape'] + assert (100,) == meta['chunks'] + assert np.dtype(None) == meta['dtype'] + # N5Store wraps the actual compressor + compressor_config = meta['compressor']['compressor_config'] + assert default_compressor.get_config() == compressor_config + # N5Store always has a fill value of 0 + assert meta['fill_value'] == 0 + assert meta['dimension_separator'] == '.' + + def test_init_array_path(self): + path = 'foo/bar' + store = self.create_store() + init_array(store, shape=1000, chunks=100, path=path) + + # check metadata + key = path + '/' + array_meta_key + assert key in store + meta = decode_array_metadata(store[key]) + assert ZARR_FORMAT == meta['zarr_format'] + assert (1000,) == meta['shape'] + assert (100,) == meta['chunks'] + assert np.dtype(None) == meta['dtype'] + # N5Store wraps the actual compressor + compressor_config = meta['compressor']['compressor_config'] + assert default_compressor.get_config() == compressor_config + # N5Store always has a fill value of 0 + assert meta['fill_value'] == 0 + + def test_init_array_compat(self): + store = self.create_store() + init_array(store, shape=1000, chunks=100, compressor='none') + meta = decode_array_metadata(store[array_meta_key]) + # N5Store wraps the actual compressor + compressor_config = meta['compressor']['compressor_config'] + assert compressor_config is None + + def test_init_array_overwrite(self): + self._test_init_array_overwrite('C') + + def test_init_array_overwrite_path(self): + self._test_init_array_overwrite_path('C') + + def test_init_array_overwrite_chunk_store(self): + self._test_init_array_overwrite_chunk_store('C') + + def test_init_group_overwrite(self): + self._test_init_group_overwrite('C') + + def test_init_group_overwrite_path(self): + self._test_init_group_overwrite_path('C') + + def test_init_group_overwrite_chunk_store(self): + self._test_init_group_overwrite_chunk_store('C') + + def test_dimension_separator(self): + + with pytest.warns(UserWarning, match='dimension_separator'): + self.create_store(dimension_separator='/') + + @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestNestedFSStore(TestNestedDirectoryStore): From 5c0ef7ff56e957033bde3d6c57a485efb0490b52 Mon Sep 17 00:00:00 2001 From: jmoore Date: Sun, 19 Sep 2021 16:41:24 +0200 Subject: [PATCH 0007/1078] 2.10.0 changelog --- docs/release.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index cd782311d1..4d40c0a0b4 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,6 +6,23 @@ Release notes Unreleased ---------- +.. _release_2.10.0: + +2.10.0 +------ + +Enhancements +~~~~~~~~~~~~ + +* Add N5FSStore. + By :user:`Davis Bennett `; :issue:`793`. + +Bug fixes +~~~~~~~~~ + +* Ignore None dim_separators in save_array. + By :user:`Josh Moore `; :issue:`831`. + .. _release_2.9.5: 2.9.5 From ce01a6d1c8169c73aaddda392ef504f948af71c7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Sep 2021 09:58:49 +0200 Subject: [PATCH 0008/1078] Bump pytest-doctestplus from 0.10.1 to 0.11.0 (#836) Bumps [pytest-doctestplus](https://github.com/astropy/pytest-doctestplus) from 0.10.1 to 0.11.0. - [Release notes](https://github.com/astropy/pytest-doctestplus/releases) - [Changelog](https://github.com/astropy/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/astropy/pytest-doctestplus/compare/v0.10.1...v0.11.0) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index e2d5da4a48..df6ce17455 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -16,7 +16,7 @@ tox==3.24.4 coverage flake8==3.9.2 pytest-cov==2.12.1 -pytest-doctestplus==0.10.1 +pytest-doctestplus==0.11.0 h5py==3.4.0 fsspec[s3]==2021.08.1 moto[server]>=1.3.14 From cbe371f5d399dfb14c960727333f257734461352 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Tue, 21 Sep 2021 21:01:40 +0200 Subject: [PATCH 0009/1078] Actions: add minimal.yml (close #820) (#835) * Actions: add minimal.yml (close #820) Rather than adding fsspec as a dependency to conda-forge, this tests a conda-forge style minimal build for each PR. * Add minimum requirements --- .github/workflows/minimal.yml | 28 ++++++++++++++++++++++++++++ environment.yml | 13 +++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 .github/workflows/minimal.yml create mode 100644 environment.yml diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml new file mode 100644 index 0000000000..b1705bdf98 --- /dev/null +++ b/.github/workflows/minimal.yml @@ -0,0 +1,28 @@ +# This workflow simulates the environment found during a conda-forge build +# and makes sure that Zarr can run without fsspec and other additional libraries. +name: Minimal installation + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + build: + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@master + with: + channels: conda-forge + environment-file: environment.yml + activate-environment: minimal + - name: Tests + shell: "bash -l {0}" + run: | + conda activate minimal + python -m pip install -e . + pytest -svx diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000000..b47dd9238b --- /dev/null +++ b/environment.yml @@ -0,0 +1,13 @@ +channels: + - conda-forge + - defaults +dependencies: + - wheel + - numcodecs >= 0.6.4 + - numpy >= 1.7 + - pip + - pip: + - asciitree + - fasteners + - pytest + - setuptools_scm From 78eb8b728e92cf5cbb6ff58d7da0d4a26c54a0ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Sep 2021 08:37:29 +0200 Subject: [PATCH 0010/1078] Bump fsspec[s3] from 2021.08.1 to 2021.9.0 (#838) Bumps [fsspec[s3]](https://github.com/intake/filesystem_spec) from 2021.08.1 to 2021.9.0. - [Release notes](https://github.com/intake/filesystem_spec/releases) - [Commits](https://github.com/intake/filesystem_spec/compare/2021.08.1...2021.09.0) --- updated-dependencies: - dependency-name: fsspec[s3] dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index df6ce17455..a16702d272 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,5 +18,5 @@ flake8==3.9.2 pytest-cov==2.12.1 pytest-doctestplus==0.11.0 h5py==3.4.0 -fsspec[s3]==2021.08.1 +fsspec[s3]==2021.9.0 moto[server]>=1.3.14 From e70c4b1f941aa6e5e7d8a136bb25d93b3a8d9b99 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Thu, 30 Sep 2021 03:25:21 -0400 Subject: [PATCH 0011/1078] set normalize_keys=False in fsstore constructor (#842) --- zarr/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zarr/storage.py b/zarr/storage.py index 395551687f..6ce2f88e1c 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -1069,7 +1069,7 @@ class FSStore(MutableMapping): _group_meta_key = group_meta_key _attrs_key = attrs_key - def __init__(self, url, normalize_keys=True, key_separator=None, + def __init__(self, url, normalize_keys=False, key_separator=None, mode='w', exceptions=(KeyError, PermissionError, IOError), dimension_separator=None, From 0200365bf96fe829e2106b2888ea19d80d9e19ca Mon Sep 17 00:00:00 2001 From: jmoore Date: Thu, 30 Sep 2021 09:30:20 +0200 Subject: [PATCH 0012/1078] 2.10.1 changelog --- docs/release.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 4d40c0a0b4..d37516f8a6 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,6 +6,17 @@ Release notes Unreleased ---------- +.. _release_2.10.1: + +2.10.1 +------ + +Bug fixes +~~~~~~~~~ + +* Fix regression by setting normalize_keys=False in fsstore constructor. + By :user:`Davis Bennett `; :issue:`842`. + .. _release_2.10.0: 2.10.0 From 4f8cb35ecd9a24f402a3a7a02d2efe177abaf5c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Oct 2021 08:00:34 +0200 Subject: [PATCH 0013/1078] Bump fsspec[s3] from 2021.9.0 to 2021.10.0 (#846) Bumps [fsspec[s3]](https://github.com/intake/filesystem_spec) from 2021.9.0 to 2021.10.0. - [Release notes](https://github.com/intake/filesystem_spec/releases) - [Commits](https://github.com/intake/filesystem_spec/compare/2021.09.0...2021.10.0) --- updated-dependencies: - dependency-name: fsspec[s3] dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index a16702d272..10f5990837 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,5 +18,5 @@ flake8==3.9.2 pytest-cov==2.12.1 pytest-doctestplus==0.11.0 h5py==3.4.0 -fsspec[s3]==2021.9.0 +fsspec[s3]==2021.10.0 moto[server]>=1.3.14 From d1dc98728afc534e63bc43566c8cafcefbb624c0 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Tue, 19 Oct 2021 21:48:58 +0200 Subject: [PATCH 0014/1078] Handle legacy nested datasets (with tests) (#850) * Fix #840 * Add legacy tests (See #840) Each fixture (flat & nested) should have a version which does not include any new metadata. * Fix PEP8 issues * Try dropping editable installs see: https://github.com/pypa/pip/issues/10573 * Handle case of store being a dict * Exclude unexpected failure from code coverage * Add 2.10.2 release notes --- .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- docs/release.rst | 11 +++++ fixture/flat/.zarray | 3 +- fixture/flat_legacy/.zarray | 22 +++++++++ fixture/flat_legacy/0.0 | Bin 0 -> 48 bytes fixture/nested_legacy/.zarray | 22 +++++++++ fixture/nested_legacy/0/0 | Bin 0 -> 48 bytes zarr/core.py | 13 ++++- zarr/meta.py | 4 +- zarr/tests/test_dim_separator.py | 67 ++++++++++++++++++++------ 12 files changed, 128 insertions(+), 20 deletions(-) create mode 100644 fixture/flat_legacy/.zarray create mode 100644 fixture/flat_legacy/0.0 create mode 100644 fixture/nested_legacy/.zarray create mode 100644 fixture/nested_legacy/0/0 diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index b1705bdf98..0ce211cbde 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -24,5 +24,5 @@ jobs: shell: "bash -l {0}" run: | conda activate minimal - python -m pip install -e . + python -m pip install . pytest -svx diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index d9bc362d12..9261187caf 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -57,7 +57,7 @@ jobs: python -m pip install --upgrade pip python -m pip install -U pip setuptools wheel codecov line_profiler python -m pip install -rrequirements_dev_minimal.txt numpy${{ matrix.numpy_version}} -rrequirements_dev_optional.txt pymongo redis - python -m pip install -e . + python -m pip install . python -m pip freeze - name: Tests shell: "bash -l {0}" diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 0d746ced99..a4e20b7131 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -39,7 +39,7 @@ jobs: python -m pip install --upgrade pip python -m pip install -U pip setuptools wheel python -m pip install -r requirements_dev_numpy.txt -r requirements_dev_minimal.txt -r requirements_dev_optional.txt - python -m pip install -e . + python -m pip install . python -m pip freeze npm install -g azurite - name: Run Tests diff --git a/docs/release.rst b/docs/release.rst index d37516f8a6..6c814cbedb 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,6 +6,17 @@ Release notes Unreleased ---------- +.. _release_2.10.2: + +2.10.2 +------ + +Bug fixes +~~~~~~~~~ + +* Fix NestedDirectoryStore datasets without dimension_separator metadata. + By :user:`Josh Moore `; :issue:`850`. + .. _release_2.10.1: 2.10.1 diff --git a/fixture/flat/.zarray b/fixture/flat/.zarray index 8ec79419da..f265bb0674 100644 --- a/fixture/flat/.zarray +++ b/fixture/flat/.zarray @@ -10,6 +10,7 @@ "id": "blosc", "shuffle": 1 }, + "dimension_separator": ".", "dtype": " MappingType[str, Any]: else: object_codec = None + dimension_separator = meta.get('dimension_separator', None) fill_value = decode_fill_value(meta['fill_value'], dtype, object_codec) meta = dict( zarr_format=meta['zarr_format'], @@ -57,8 +58,9 @@ def decode_array_metadata(s: Union[MappingType, str]) -> MappingType[str, Any]: fill_value=fill_value, order=meta['order'], filters=meta['filters'], - dimension_separator=meta.get('dimension_separator', '.'), ) + if dimension_separator: + meta['dimension_separator'] = dimension_separator except Exception as e: raise MetadataError('error decoding metadata: %s' % e) diff --git a/zarr/tests/test_dim_separator.py b/zarr/tests/test_dim_separator.py index 566745e665..5e17bbe279 100644 --- a/zarr/tests/test_dim_separator.py +++ b/zarr/tests/test_dim_separator.py @@ -12,8 +12,10 @@ needs_fsspec = pytest.mark.skipif(not have_fsspec, reason="needs fsspec") -@pytest.fixture(params=("static_nested", - "static_flat", +@pytest.fixture(params=("static_flat", + "static_flat_legacy", + "static_nested", + "static_nested_legacy", "directory_nested", "directory_flat", "directory_default", @@ -35,14 +37,16 @@ def dataset(tmpdir, request): if which.startswith("static"): project_root = pathlib.Path(zarr.__file__).resolve().parent.parent - if which.endswith("nested"): - static = project_root / "fixture/nested" - generator = NestedDirectoryStore - else: - static = project_root / "fixture/flat" - generator = DirectoryStore + suffix = which[len("static_"):] + static = project_root / "fixture" / suffix if not static.exists(): # pragma: no cover + + if "nested" in which: + generator = NestedDirectoryStore + else: + generator = DirectoryStore + # store the data - should be one-time operation s = generator(str(static)) a = zarr.open(store=s, mode="w", shape=(2, 2), dtype=" Date: Wed, 20 Oct 2021 06:53:16 +1100 Subject: [PATCH 0015/1078] Add support for fancy indexing on get/setitem (#725) * Fall back on .vindex when basic indexing fails Addresses #657 This matches NumPy behaviour in that basic, boolean, and vectorized integer (fancy) indexing are all accessible from `__{get,set}item__`. Users still have access to all the indexing methods if they want to be sure to use only basic indexing (integer + slices). * Fix basic selection test now with no IndexError * Fix basic_selection_2d test with no vindex error * Add specific test for fancy indexing fallback * Update get/setitem docstrings * Update tutorial.rst * PEP8 fix * Rename test array to z as in other tests * Add release note * Avoid mixing slicing and array indexing in setitem * Actually test for fancy index rather than try/except * Add check for 1D fancy index (no tuple) * Add tests for implicit fancy indexing, and getitem * Add expected blank line * Add strict test for make_slice_selection * Ensure make_slice_selection returns valid NumPy slices * Make pytest verbose to see what is failing in windows * Add 5 min per-test timeout * Use private self._shape when determining ndim self.shape is a property that hides a lot of computation, and, more importantly, it can be waiting for an update and so .ndim *cannot* be accessed during a reshape/append. See: https://github.com/zarr-developers/zarr-python/pull/725#issuecomment-854389816 This should prevent that behavior. Co-authored-by: Josh Moore --- .github/workflows/windows-testing.yml | 2 +- docs/release.rst | 6 ++ docs/tutorial.rst | 12 +++- requirements_dev_optional.txt | 1 + zarr/core.py | 51 +++++++++++++---- zarr/indexing.py | 65 +++++++++++++++++++++- zarr/tests/test_indexing.py | 79 +++++++++++++++++++++++++-- 7 files changed, 196 insertions(+), 20 deletions(-) diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index a4e20b7131..5eaafefbf4 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -48,7 +48,7 @@ jobs: conda activate zarr-env mkdir ~/blob_emulator azurite -l ~/blob_emulator --debug debug.log 2>&1 > stdouterr.log & - pytest + pytest -sv --timeout=300 env: ZARR_TEST_ABS: 1 - name: Conda info diff --git a/docs/release.rst b/docs/release.rst index 6c814cbedb..0810946ee6 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,6 +6,12 @@ Release notes Unreleased ---------- +Enhancements +~~~~~~~~~~~~ + +* array indexing with [] (getitem and setitem) now supports fancy indexing. + By :user:`Juan Nunez-Iglesias `; :issue:`725`. + .. _release_2.10.2: 2.10.2 diff --git a/docs/tutorial.rst b/docs/tutorial.rst index a3421608cc..68673f1295 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -509,7 +509,7 @@ e.g.:: [10, 11, 12, -2, 14]]) For convenience, coordinate indexing is also available via the ``vindex`` -property, e.g.:: +property, as well as the square bracket operator, e.g.:: >>> z.vindex[[0, 2], [1, 3]] array([-1, -2]) @@ -518,6 +518,16 @@ property, e.g.:: array([[ 0, -3, 2, 3, 4], [ 5, 6, 7, 8, 9], [10, 11, 12, -4, 14]]) + >>> z[[0, 2], [1, 3]] + array([-3, -4]) + +When the indexing arrays have different shapes, they are broadcast together. +That is, the following two calls are equivalent:: + + >>> z[1, [1, 3]] + array([5, 7]) + >>> z[[1, 1], [1, 3]] + array([5, 7]) Indexing with a mask array ~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 10f5990837..4ac0c6c294 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -17,6 +17,7 @@ coverage flake8==3.9.2 pytest-cov==2.12.1 pytest-doctestplus==0.11.0 +pytest-timeout==1.4.2 h5py==3.4.0 fsspec[s3]==2021.10.0 moto[server]>=1.3.14 diff --git a/zarr/core.py b/zarr/core.py index c1a1c341ff..f53c2b9b05 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -25,6 +25,7 @@ ensure_tuple, err_too_many_indices, is_contiguous_selection, + is_pure_fancy_indexing, is_scalar, pop_fields, ) @@ -351,7 +352,7 @@ def attrs(self): @property def ndim(self): """Number of dimensions.""" - return len(self.shape) + return len(self._shape) @property def _size(self): @@ -658,8 +659,20 @@ def __getitem__(self, selection): Slices with step > 1 are supported, but slices with negative step are not. Currently the implementation for __getitem__ is provided by - :func:`get_basic_selection`. For advanced ("fancy") indexing, see the methods - listed under See Also. + :func:`vindex` if the indexing is pure fancy indexing (ie a + broadcast-compatible tuple of integer array indices), or by + :func:`set_basic_selection` otherwise. + + Effectively, this means that the following indexing modes are supported: + + - integer indexing + - slice indexing + - mixed slice and integer indexing + - boolean indexing + - fancy indexing (vectorized list of integers) + + For specific indexing options including outer indexing, see the + methods listed under See Also. See Also -------- @@ -668,9 +681,12 @@ def __getitem__(self, selection): set_orthogonal_selection, vindex, oindex, __setitem__ """ - - fields, selection = pop_fields(selection) - return self.get_basic_selection(selection, fields=fields) + fields, pure_selection = pop_fields(selection) + if is_pure_fancy_indexing(pure_selection, self.ndim): + result = self.vindex[selection] + else: + result = self.get_basic_selection(pure_selection, fields=fields) + return result def get_basic_selection(self, selection=Ellipsis, out=None, fields=None): """Retrieve data for an item or region of the array. @@ -1208,8 +1224,19 @@ def __setitem__(self, selection, value): Slices with step > 1 are supported, but slices with negative step are not. Currently the implementation for __setitem__ is provided by - :func:`set_basic_selection`, which means that only integers and slices are - supported within the selection. For advanced ("fancy") indexing, see the + :func:`vindex` if the indexing is pure fancy indexing (ie a + broadcast-compatible tuple of integer array indices), or by + :func:`set_basic_selection` otherwise. + + Effectively, this means that the following indexing modes are supported: + + - integer indexing + - slice indexing + - mixed slice and integer indexing + - boolean indexing + - fancy indexing (vectorized list of integers) + + For specific indexing options including outer indexing, see the methods listed under See Also. See Also @@ -1219,9 +1246,11 @@ def __setitem__(self, selection, value): set_orthogonal_selection, vindex, oindex, __getitem__ """ - - fields, selection = pop_fields(selection) - self.set_basic_selection(selection, value, fields=fields) + fields, pure_selection = pop_fields(selection) + if is_pure_fancy_indexing(pure_selection, self.ndim): + self.vindex[selection] = value + else: + self.set_basic_selection(pure_selection, value, fields=fields) def set_basic_selection(self, selection, value, fields=None): """Modify data for an item or region of the array. diff --git a/zarr/indexing.py b/zarr/indexing.py index e58e7ba339..2e9f7c8c03 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -16,9 +16,23 @@ def is_integer(x): + """True if x is an integer (both pure Python or NumPy). + + Note that Python's bool is considered an integer too. + """ return isinstance(x, numbers.Integral) +def is_integer_list(x): + """True if x is a list of integers. + + This function assumes ie *does not check* that all elements of the list + have the same type. Mixed type lists will result in other errors that will + bubble up anyway. + """ + return isinstance(x, list) and len(x) > 0 and is_integer(x[0]) + + def is_integer_array(x, ndim=None): t = hasattr(x, 'shape') and hasattr(x, 'dtype') and x.dtype.kind in 'ui' if ndim is not None: @@ -41,6 +55,49 @@ def is_scalar(value, dtype): return False +def is_pure_fancy_indexing(selection, ndim): + """Check whether a selection contains only scalars or integer array-likes. + + Parameters + ---------- + selection : tuple, slice, or scalar + A valid selection value for indexing into arrays. + + Returns + ------- + is_pure : bool + True if the selection is a pure fancy indexing expression (ie not mixed + with boolean or slices). + """ + if ndim == 1: + if is_integer_list(selection) or is_integer_array(selection): + return True + # if not, we go through the normal path below, because a 1-tuple + # of integers is also allowed. + no_slicing = ( + isinstance(selection, tuple) + and len(selection) == ndim + and not ( + any(isinstance(elem, slice) or elem is Ellipsis + for elem in selection) + ) + ) + return ( + no_slicing and + all( + is_integer(elem) + or is_integer_list(elem) + or is_integer_array(elem) + for elem in selection + ) and + any( + is_integer_list(elem) + or is_integer_array(elem) + for elem in selection + ) + ) + + def normalize_integer_selection(dim_sel, dim_len): # normalize type to int @@ -833,10 +890,14 @@ def make_slice_selection(selection): ls = [] for dim_selection in selection: if is_integer(dim_selection): - ls.append(slice(dim_selection, dim_selection + 1, 1)) + ls.append(slice(int(dim_selection), int(dim_selection) + 1, 1)) elif isinstance(dim_selection, np.ndarray): if len(dim_selection) == 1: - ls.append(slice(dim_selection[0], dim_selection[0] + 1, 1)) + ls.append( + slice( + int(dim_selection[0]), int(dim_selection[0]) + 1, 1 + ) + ) else: raise ArrayIndexError() else: diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index 8c534f8e4a..a58a309534 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -4,6 +4,7 @@ import zarr from zarr.indexing import ( + make_slice_selection, normalize_integer_selection, oindex, oindex_set, @@ -198,15 +199,15 @@ def test_get_basic_selection_1d(): for selection in basic_selections_1d: _test_get_basic_selection(a, z, selection) - bad_selections = basic_selections_1d_bad + [ - [0, 1], # fancy indexing - ] - for selection in bad_selections: + for selection in basic_selections_1d_bad: with pytest.raises(IndexError): z.get_basic_selection(selection) with pytest.raises(IndexError): z[selection] + with pytest.raises(IndexError): + z.get_basic_selection([1, 0]) + basic_selections_2d = [ # single row @@ -274,7 +275,6 @@ def test_get_basic_selection_2d(): bad_selections = basic_selections_2d_bad + [ # integer arrays [0, 1], - ([0, 1], [0, 1]), (slice(None), [0, 1]), ] for selection in bad_selections: @@ -282,6 +282,68 @@ def test_get_basic_selection_2d(): z.get_basic_selection(selection) with pytest.raises(IndexError): z[selection] + # check fallback on fancy indexing + fancy_selection = ([0, 1], [0, 1]) + np.testing.assert_array_equal(z[fancy_selection], [0, 11]) + + +def test_fancy_indexing_fallback_on_get_setitem(): + z = zarr.zeros((20, 20)) + z[[1, 2, 3], [1, 2, 3]] = 1 + np.testing.assert_array_equal( + z[:4, :4], + [ + [0, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + ], + ) + np.testing.assert_array_equal( + z[[1, 2, 3], [1, 2, 3]], 1 + ) + # test broadcasting + np.testing.assert_array_equal( + z[1, [1, 2, 3]], [1, 0, 0] + ) + # test 1D fancy indexing + z2 = zarr.zeros(5) + z2[[1, 2, 3]] = 1 + np.testing.assert_array_equal( + z2, [0, 1, 1, 1, 0] + ) + + +def test_fancy_indexing_doesnt_mix_with_slicing(): + z = zarr.zeros((20, 20)) + with pytest.raises(IndexError): + z[[1, 2, 3], :] = 2 + with pytest.raises(IndexError): + np.testing.assert_array_equal( + z[[1, 2, 3], :], 0 + ) + + +def test_fancy_indexing_doesnt_mix_with_implicit_slicing(): + z2 = zarr.zeros((5, 5, 5)) + with pytest.raises(IndexError): + z2[[1, 2, 3], [1, 2, 3]] = 2 + with pytest.raises(IndexError): + np.testing.assert_array_equal( + z2[[1, 2, 3], [1, 2, 3]], 0 + ) + with pytest.raises(IndexError): + z2[[1, 2, 3]] = 2 + with pytest.raises(IndexError): + np.testing.assert_array_equal( + z2[[1, 2, 3]], 0 + ) + with pytest.raises(IndexError): + z2[..., [1, 2, 3]] = 2 + with pytest.raises(IndexError): + np.testing.assert_array_equal( + z2[..., [1, 2, 3]], 0 + ) def test_set_basic_selection_0d(): @@ -1373,3 +1435,10 @@ def test_PartialChunkIterator(selection, arr, expected): PCI = PartialChunkIterator(selection, arr.shape) results = list(PCI) assert results == expected + + +def test_slice_selection_uints(): + arr = np.arange(24).reshape((4, 6)) + idx = np.uint64(3) + slice_sel = make_slice_selection((idx,)) + assert arr[slice_sel].shape == (1, 6) From 831e687599da80b80ac3e97f1b0cef6f3f09b40f Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Tue, 19 Oct 2021 16:02:10 -0400 Subject: [PATCH 0016/1078] Optimize setitem with chunk equal to fill_value, round 2 (#738) * Consolidate encode/store in _chunk_setitem_nosync Matches how these lines are written in `_set_basic_selection_zd`. * Clear key-value pair if chunk is just fill value Add a simple check to see if the key-value pair is just being set with a chunk equal to the fill value. If so, simply delete the key-value pair instead of storing a chunk that only contains the fill value. The Array will behave the same externally. However this will cutdown on the space require to store the Array. Also will make sure that copying one Array to another Array won't dramatically effect the storage size. * set empty chunk write behavior via array constructor * add rudimentary tests, np.equal -> np.array_equal * add test for chunk deletion * add flattening function * add kwarg for empty writes to array creators * fix check for chunk equality to fill value * flake8 * add None check to setitems * add write_empty_chunks to output of __getstate__ * flake8 * add partial decompress to __get_state__ * functionalize emptiness checks and key deletion * flake8 * add path for delitems, and add some failing tests * flake8 * add delitems method to FSStore, and correspondingly change zarr.Array behavior * add nested + empty writes test * set write_empty_chunks to True by default * rename chunk_is_empty method and clean up logic in _chunk_setitem * rename test * add test for flatten * initial support for using delitems api in chunk_setitems * flake8 * strip path separator that was screwing up store.listdir * change tests to check for empty writing behavior * bump fsspec and s3fs versions * delitems: only attempt to delete keys that exist * cdon't pass empty collections to self.map.delitems * flake8 * use main branch of fsspec until a new release is cut * add support for checking if a chunk is all nans in _chunk_is_empty * docstring tweaks * clean up empty_write tests * fix hexdigests for FSStore + empty writes, and remove redundant nchunks_initialized test * resolve merge conflicts in favor of master * set write_empty_chunks to True by default; put chunk emptiness checking in a function in util.py; optimize chunk emptiness checking * remove np.typing import * use public attribute in test_nchunks_initialized * remove duplicated logic in _chunk_setitems, instead using _chunk_delitems; clean up 0d empty writes; add coverage exemptions * expand 0d tests and nchunks_initialized tests to hit more parts of the write_empty_chunks logic * remove return type annotation for all_equal that was breaking CI * refactor write_empty_chunks tests by expanding the create_array logic in the base test class, remove standalone write_empty_chunks tests * correctly handle merge from upstream master * don't use os.path.join for constructing a chunk key; instead use _chunk_key method * complete removal of os.path.join calls * add coverage exemption to type error branch in all_equal * remove unreachable conditionals in n5 tests * instantiate ReadOnlyError * add explcit delitems and setitems calls to readonly fsstore tests * Update docstrings * Update requirements_dev_optional * Add changelog Co-authored-by: John Kirkham Co-authored-by: Josh Moore Co-authored-by: jmoore --- docs/release.rst | 3 + zarr/core.py | 85 +++++++++++++++++--- zarr/creation.py | 27 +++++-- zarr/storage.py | 9 +++ zarr/tests/test_core.py | 161 +++++++++++++++++++++++++++++-------- zarr/tests/test_storage.py | 6 ++ zarr/tests/test_sync.py | 7 +- zarr/tests/test_util.py | 29 ++++++- zarr/util.py | 41 ++++++++++ 9 files changed, 314 insertions(+), 54 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 0810946ee6..e9c592a860 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -12,6 +12,9 @@ Enhancements * array indexing with [] (getitem and setitem) now supports fancy indexing. By :user:`Juan Nunez-Iglesias `; :issue:`725`. +* write_empty_chunks=False deletes chunks consisting of only fill_value. + By :user:`Davis Bennett `; :issue:`738`. + .. _release_2.10.2: 2.10.2 diff --git a/zarr/core.py b/zarr/core.py index f53c2b9b05..b9600467c1 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -32,6 +32,7 @@ from zarr.meta import decode_array_metadata, encode_array_metadata from zarr.storage import array_meta_key, attrs_key, getsize, listdir from zarr.util import ( + all_equal, InfoReporter, check_array_shape, human_readable_size, @@ -75,6 +76,14 @@ class Array: If True and while the chunk_store is a FSStore and the compresion used is Blosc, when getting data from the array chunks will be partially read and decompressed when possible. + write_empty_chunks : bool, optional + If True (default), all chunks will be stored regardless of their + contents. If False, each chunk is compared to the array's fill + value prior to storing. If a chunk is uniformly equal to the fill + value, then that chunk is not be stored, and the store entry for + that chunk's key is deleted. This setting enables sparser storage, + as only chunks with non-fill-value data are stored, at the expense + of overhead associated with checking the data of each chunk. .. versionadded:: 2.7 @@ -107,6 +116,7 @@ class Array: info vindex oindex + write_empty_chunks Methods ------- @@ -139,6 +149,7 @@ def __init__( cache_metadata=True, cache_attrs=True, partial_decompress=False, + write_empty_chunks=True, ): # N.B., expect at this point store is fully initialized with all # configuration metadata fully specified and normalized @@ -155,6 +166,7 @@ def __init__( self._cache_metadata = cache_metadata self._is_view = False self._partial_decompress = partial_decompress + self._write_empty_chunks = write_empty_chunks # initialize metadata self._load_metadata() @@ -455,6 +467,13 @@ def vindex(self): :func:`set_mask_selection` for documentation and examples.""" return self._vindex + @property + def write_empty_chunks(self) -> bool: + """A Boolean, True if chunks composed of the array's fill value + will be stored. If False, such chunks will not be stored. + """ + return self._write_empty_chunks + def __eq__(self, other): return ( isinstance(other, Array) and @@ -1626,9 +1645,18 @@ def _set_basic_selection_zd(self, selection, value, fields=None): else: chunk[selection] = value - # encode and store - cdata = self._encode_chunk(chunk) - self.chunk_store[ckey] = cdata + # remove chunk if write_empty_chunks is false and it only contains the fill value + if (not self.write_empty_chunks) and all_equal(self.fill_value, chunk): + try: + del self.chunk_store[ckey] + return + except Exception: # pragma: no cover + # deleting failed, fallback to overwriting + pass + else: + # encode and store + cdata = self._encode_chunk(chunk) + self.chunk_store[ckey] = cdata def _set_basic_selection_nd(self, selection, value, fields=None): # implementation of __setitem__ for array with at least one dimension @@ -1896,11 +1924,38 @@ def _chunk_getitems(self, lchunk_coords, lchunk_selection, out, lout_selection, out[out_select] = fill_value def _chunk_setitems(self, lchunk_coords, lchunk_selection, values, fields=None): - ckeys = [self._chunk_key(co) for co in lchunk_coords] - cdatas = [self._process_for_setitem(key, sel, val, fields=fields) - for key, sel, val in zip(ckeys, lchunk_selection, values)] - values = {k: v for k, v in zip(ckeys, cdatas)} - self.chunk_store.setitems(values) + ckeys = map(self._chunk_key, lchunk_coords) + cdatas = {key: self._process_for_setitem(key, sel, val, fields=fields) + for key, sel, val in zip(ckeys, lchunk_selection, values)} + to_store = {} + if not self.write_empty_chunks: + empty_chunks = {k: v for k, v in cdatas.items() if all_equal(self.fill_value, v)} + self._chunk_delitems(empty_chunks.keys()) + nonempty_keys = cdatas.keys() - empty_chunks.keys() + to_store = {k: self._encode_chunk(cdatas[k]) for k in nonempty_keys} + else: + to_store = {k: self._encode_chunk(v) for k, v in cdatas.items()} + self.chunk_store.setitems(to_store) + + def _chunk_delitems(self, ckeys): + if hasattr(self.store, "delitems"): + self.store.delitems(ckeys) + else: # pragma: no cover + # exempting this branch from coverage as there are no extant stores + # that will trigger this condition, but it's possible that they + # will be developed in the future. + tuple(map(self._chunk_delitem, ckeys)) + return None + + def _chunk_delitem(self, ckey): + """ + Attempt to delete the value associated with ckey. + """ + try: + del self.chunk_store[ckey] + return + except KeyError: + return def _chunk_setitem(self, chunk_coords, chunk_selection, value, fields=None): """Replace part or whole of a chunk. @@ -1931,8 +1986,12 @@ def _chunk_setitem(self, chunk_coords, chunk_selection, value, fields=None): def _chunk_setitem_nosync(self, chunk_coords, chunk_selection, value, fields=None): ckey = self._chunk_key(chunk_coords) cdata = self._process_for_setitem(ckey, chunk_selection, value, fields=fields) - # store - self.chunk_store[ckey] = cdata + + # attempt to delete chunk if it only contains the fill value + if (not self.write_empty_chunks) and all_equal(self.fill_value, cdata): + self._chunk_delitem(ckey) + else: + self.chunk_store[ckey] = self._encode_chunk(cdata) def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): if is_total_slice(chunk_selection, self._chunks) and not fields: @@ -1988,8 +2047,7 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): else: chunk[chunk_selection] = value - # encode chunk - return self._encode_chunk(chunk) + return chunk def _chunk_key(self, chunk_coords): return self._key_prefix + self._dimension_separator.join(map(str, chunk_coords)) @@ -2209,7 +2267,8 @@ def hexdigest(self, hashname="sha1"): def __getstate__(self): return (self._store, self._path, self._read_only, self._chunk_store, - self._synchronizer, self._cache_metadata, self._attrs.cache) + self._synchronizer, self._cache_metadata, self._attrs.cache, + self._partial_decompress, self._write_empty_chunks) def __setstate__(self, state): self.__init__(*state) diff --git a/zarr/creation.py b/zarr/creation.py index 0e2d2041ba..75ff1d0212 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -21,7 +21,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', fill_value=0, order='C', store=None, synchronizer=None, overwrite=False, path=None, chunk_store=None, filters=None, cache_metadata=True, cache_attrs=True, read_only=False, - object_codec=None, dimension_separator=None, **kwargs): + object_codec=None, dimension_separator=None, write_empty_chunks=True, **kwargs): """Create an array. Parameters @@ -71,6 +71,15 @@ def create(shape, chunks=True, dtype=None, compressor='default', dimension_separator : {'.', '/'}, optional Separator placed between the dimensions of a chunk. .. versionadded:: 2.8 + write_empty_chunks : bool, optional + If True (default), all chunks will be stored regardless of their + contents. If False, each chunk is compared to the array's fill + value prior to storing. If a chunk is uniformly equal to the fill + value, then that chunk is not be stored, and the store entry for + that chunk's key is deleted. This setting enables sparser storage, + as only chunks with non-fill-value data are stored, at the expense + of overhead associated with checking the data of each chunk. + Returns ------- @@ -142,7 +151,8 @@ def create(shape, chunks=True, dtype=None, compressor='default', # instantiate array z = Array(store, path=path, chunk_store=chunk_store, synchronizer=synchronizer, - cache_metadata=cache_metadata, cache_attrs=cache_attrs, read_only=read_only) + cache_metadata=cache_metadata, cache_attrs=cache_attrs, read_only=read_only, + write_empty_chunks=write_empty_chunks) return z @@ -400,6 +410,7 @@ def open_array( chunk_store=None, storage_options=None, partial_decompress=False, + write_empty_chunks=True, **kwargs ): """Open an array using file-mode-like semantics. @@ -454,8 +465,14 @@ def open_array( If True and while the chunk_store is a FSStore and the compresion used is Blosc, when getting data from the array chunks will be partially read and decompressed when possible. - - .. versionadded:: 2.7 + write_empty_chunks : bool, optional + If True (default), all chunks will be stored regardless of their + contents. If False, each chunk is compared to the array's fill + value prior to storing. If a chunk is uniformly equal to the fill + value, then that chunk is not be stored, and the store entry for + that chunk's key is deleted. This setting enables sparser storage, + as only chunks with non-fill-value data are stored, at the expense + of overhead associated with checking the data of each chunk. Returns ------- @@ -545,7 +562,7 @@ def open_array( # instantiate array z = Array(store, read_only=read_only, synchronizer=synchronizer, cache_metadata=cache_metadata, cache_attrs=cache_attrs, path=path, - chunk_store=chunk_store) + chunk_store=chunk_store, write_empty_chunks=write_empty_chunks) return z diff --git a/zarr/storage.py b/zarr/storage.py index 6ce2f88e1c..92be9df0aa 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -1154,6 +1154,15 @@ def __delitem__(self, key): else: del self.map[key] + def delitems(self, keys): + if self.mode == 'r': + raise ReadOnlyError() + # only remove the keys that exist in the store + nkeys = [self._normalize_key(key) for key in keys if key in self] + # rm errors if you pass an empty collection + if len(nkeys) > 0: + self.map.delitems(nkeys) + def __contains__(self, key): key = self._normalize_key(key) return key in self.map diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index be2feffe8a..4544a6cae9 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -86,9 +86,10 @@ def create_array(self, read_only=False, **kwargs): kwargs.setdefault('compressor', Zlib(level=1)) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_store_has_text_keys(self): # Initialize array @@ -939,7 +940,7 @@ def test_array_0d(self): # setup a = np.zeros(()) - z = self.create_array(shape=(), dtype=a.dtype, fill_value=0) + z = self.create_array(shape=(), dtype=a.dtype, fill_value=0, write_empty_chunks=False) # check properties assert a.ndim == z.ndim @@ -973,6 +974,8 @@ def test_array_0d(self): assert 42 == z[()] z[()] = 43 assert 43 == z[()] + z[()] = z.fill_value + assert z.fill_value == z[()] with pytest.raises(IndexError): z[0] = 42 with pytest.raises(IndexError): @@ -984,17 +987,47 @@ def test_array_0d(self): z.store.close() def test_nchunks_initialized(self): + for fill_value in (0, 1.0, np.nan): + if isinstance(fill_value, int): + dtype = 'int' + else: + dtype = 'float' + z = self.create_array(shape=100, + chunks=10, + fill_value=fill_value, + dtype=dtype, + write_empty_chunks=True) + + assert 0 == z.nchunks_initialized + # manually put something into the store to confuse matters + z.store['foo'] = b'bar' + assert 0 == z.nchunks_initialized + z[:] = 42 + assert 10 == z.nchunks_initialized + # manually remove the first chunk from the store + del z.chunk_store[z._chunk_key((0,))] + assert 9 == z.nchunks_initialized - z = self.create_array(shape=100, chunks=10) - assert 0 == z.nchunks_initialized - # manually put something into the store to confuse matters - z.store['foo'] = b'bar' - assert 0 == z.nchunks_initialized - z[:] = 42 - assert 10 == z.nchunks_initialized + if hasattr(z.store, 'close'): + z.store.close() - if hasattr(z.store, 'close'): - z.store.close() + # second round of similar tests with write_empty_chunks set to + # False + z = self.create_array(shape=100, + chunks=10, + fill_value=fill_value, + dtype=dtype, + write_empty_chunks=False) + z[:] = 42 + assert 10 == z.nchunks_initialized + # manually remove a chunk from the store + del z.chunk_store[z._chunk_key((0,))] + assert 9 == z.nchunks_initialized + z[:] = z.fill_value + assert 0 == z.nchunks_initialized + + if hasattr(z.store, 'close'): + z.store.close() def test_array_dtype_shape(self): @@ -1545,9 +1578,11 @@ def create_array(read_only=False, **kwargs): store = dict() cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, path='foo/bar', **kwargs) return Array(store, path='foo/bar', read_only=read_only, - cache_metadata=cache_metadata, cache_attrs=cache_attrs) + cache_metadata=cache_metadata, cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks) def test_hexdigest(self): # Check basic 1-D array @@ -1600,9 +1635,11 @@ def create_array(read_only=False, **kwargs): chunk_store = dict() cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, chunk_store=chunk_store, **kwargs) return Array(store, read_only=read_only, chunk_store=chunk_store, - cache_metadata=cache_metadata, cache_attrs=cache_attrs) + cache_metadata=cache_metadata, cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks) def test_hexdigest(self): # Check basic 1-D array @@ -1654,10 +1691,11 @@ def create_array(read_only=False, **kwargs): store = DirectoryStore(path) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Zlib(1)) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_nbytes_stored(self): @@ -1685,9 +1723,10 @@ def create_array(self, read_only=False, **kwargs): kwargs.setdefault('compressor', Zlib(1)) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) @pytest.mark.xfail def test_nbytes_stored(self): @@ -1708,10 +1747,11 @@ def create_array(read_only=False, **kwargs): store = NestedDirectoryStore(path) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Zlib(1)) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def expected(self): return [ @@ -1732,10 +1772,11 @@ def create_array(read_only=False, **kwargs): store = N5Store(path) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Zlib(1)) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_array_0d(self): # test behaviour for array with 0 dimensions @@ -1802,6 +1843,40 @@ def test_array_1d_fill_value(self): z = self.create_array(shape=(nvalues,), chunks=100, dtype=dtype, fill_value=1) + def test_nchunks_initialized(self): + fill_value = 0 + dtype = 'int' + z = self.create_array(shape=100, + chunks=10, + fill_value=fill_value, + dtype=dtype, + write_empty_chunks=True) + + assert 0 == z.nchunks_initialized + # manually put something into the store to confuse matters + z.store['foo'] = b'bar' + assert 0 == z.nchunks_initialized + z[:] = 42 + assert 10 == z.nchunks_initialized + # manually remove a chunk from the store + del z.chunk_store[z._chunk_key((0,))] + assert 9 == z.nchunks_initialized + + # second round of similar tests with write_empty_chunks set to + # False + z = self.create_array(shape=100, + chunks=10, + fill_value=fill_value, + dtype=dtype, + write_empty_chunks=False) + z[:] = 42 + assert 10 == z.nchunks_initialized + # manually remove a chunk from the store + del z.chunk_store[z._chunk_key((0,))] + assert 9 == z.nchunks_initialized + z[:] = z.fill_value + assert 0 == z.nchunks_initialized + def test_array_order(self): # N5 only supports 'C' at the moment @@ -2029,9 +2104,10 @@ def create_array(read_only=False, **kwargs): cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) kwargs.setdefault('compressor', Zlib(1)) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) class TestArrayWithDBMStore(TestArray): @@ -2043,10 +2119,11 @@ def create_array(read_only=False, **kwargs): store = DBMStore(path, flag='n') cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Zlib(1)) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_attrs=cache_attrs, - cache_metadata=cache_metadata) + cache_metadata=cache_metadata, write_empty_chunks=write_empty_chunks) def test_nbytes_stored(self): pass # not implemented @@ -2062,10 +2139,11 @@ def create_array(read_only=False, **kwargs): store = DBMStore(path, flag='n', open=bsddb3.btopen) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Zlib(1)) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_nbytes_stored(self): pass # not implemented @@ -2081,10 +2159,11 @@ def create_array(read_only=False, **kwargs): store = LMDBStore(path, buffers=True) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Zlib(1)) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_store_has_bytes_values(self): pass # returns values as memoryviews/buffers instead of bytes @@ -2103,10 +2182,11 @@ def create_array(read_only=False, **kwargs): store = LMDBStore(path, buffers=False) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Zlib(1)) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_nbytes_stored(self): pass # not implemented @@ -2122,10 +2202,11 @@ def create_array(read_only=False, **kwargs): store = SQLiteStore(path) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Zlib(1)) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_nbytes_stored(self): pass # not implemented @@ -2138,9 +2219,10 @@ def create_array(self, read_only=False, **kwargs): kwargs.setdefault('compressor', None) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_hexdigest(self): # Check basic 1-D array @@ -2174,9 +2256,10 @@ def create_array(self, read_only=False, **kwargs): kwargs.setdefault('compressor', compressor) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_hexdigest(self): # Check basic 1-D array @@ -2210,9 +2293,10 @@ def create_array(self, read_only=False, **kwargs): kwargs.setdefault('compressor', compressor) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_hexdigest(self): # Check basic 1-D array @@ -2253,9 +2337,10 @@ def create_array(self, read_only=False, **kwargs): kwargs.setdefault('compressor', compressor) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_hexdigest(self): # Check basic 1-D array @@ -2296,9 +2381,10 @@ def create_array(read_only=False, **kwargs): kwargs.setdefault('compressor', compressor) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_attrs=cache_attrs, - cache_metadata=cache_metadata) + cache_metadata=cache_metadata, write_empty_chunks=write_empty_chunks) def test_hexdigest(self): # Check basic 1-D array @@ -2441,9 +2527,10 @@ def create_array(read_only=False, **kwargs): kwargs.setdefault('compressor', Zlib(1)) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_nbytes_stored(self): z = self.create_array(shape=1000, chunks=100) @@ -2460,9 +2547,10 @@ def create_array(read_only=False, **kwargs): kwargs.setdefault('compressor', Zlib(level=1)) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_cache_metadata(self): a1 = self.create_array(shape=100, chunks=10, dtype='i1', cache_metadata=False) @@ -2532,9 +2620,10 @@ def create_array(read_only=False, **kwargs): kwargs.setdefault('compressor', Zlib(level=1)) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_store_has_bytes_values(self): # skip as the cache has no control over how the store provides values @@ -2551,10 +2640,11 @@ def create_array(read_only=False, **kwargs): store = FSStore(path, key_separator=key_separator, auto_mkdir=True) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Blosc()) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def expected(self): return [ @@ -2602,6 +2692,7 @@ def create_array(read_only=False, **kwargs): store = FSStore(path) cache_metadata = kwargs.pop("cache_metadata", True) cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault("compressor", Blosc()) init_array(store, **kwargs) return Array( @@ -2610,6 +2701,7 @@ def create_array(read_only=False, **kwargs): cache_metadata=cache_metadata, cache_attrs=cache_attrs, partial_decompress=True, + write_empty_chunks=write_empty_chunks ) def test_hexdigest(self): @@ -2678,10 +2770,11 @@ def create_array(read_only=False, **kwargs): store = FSStore(path, key_separator=key_separator, auto_mkdir=True) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault('compressor', Blosc()) init_array(store, **kwargs) return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def expected(self): return [ @@ -2730,6 +2823,7 @@ def create_array(read_only=False, **kwargs): store = FSStore(path, key_separator=key_separator, auto_mkdir=True) cache_metadata = kwargs.pop("cache_metadata", True) cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) kwargs.setdefault("compressor", Blosc()) init_array(store, **kwargs) return Array( @@ -2738,6 +2832,7 @@ def create_array(read_only=False, **kwargs): cache_metadata=cache_metadata, cache_attrs=cache_attrs, partial_decompress=True, + write_empty_chunks=write_empty_chunks ) def expected(self): diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 1412ec2099..51bc9bf782 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1012,6 +1012,12 @@ def test_read_only(self): with pytest.raises(PermissionError): del store['foo'] + with pytest.raises(PermissionError): + store.delitems(['foo']) + + with pytest.raises(PermissionError): + store.setitems({'foo': b'baz'}) + with pytest.raises(PermissionError): store.clear() diff --git a/zarr/tests/test_sync.py b/zarr/tests/test_sync.py index 51b7fe0e10..274ce166be 100644 --- a/zarr/tests/test_sync.py +++ b/zarr/tests/test_sync.py @@ -99,10 +99,11 @@ def create_array(self, read_only=False, **kwargs): store = dict() cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) return Array(store, synchronizer=ThreadSynchronizer(), read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs) + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) # noinspection PyMethodMayBeStatic def create_pool(self): @@ -141,12 +142,14 @@ def create_array(self, read_only=False, **kwargs): store = DirectoryStore(path) cache_metadata = kwargs.pop('cache_metadata', False) cache_attrs = kwargs.pop('cache_attrs', False) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) init_array(store, **kwargs) sync_path = tempfile.mkdtemp() atexit.register(atexit_rmtree, sync_path) synchronizer = ProcessSynchronizer(sync_path) return Array(store, synchronizer=synchronizer, read_only=read_only, - cache_metadata=cache_metadata, cache_attrs=cache_attrs) + cache_metadata=cache_metadata, cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks) # noinspection PyMethodMayBeStatic def create_pool(self): diff --git a/zarr/tests/test_util.py b/zarr/tests/test_util.py index fa1f18fa63..a65b26bae8 100644 --- a/zarr/tests/test_util.py +++ b/zarr/tests/test_util.py @@ -4,7 +4,7 @@ import numpy as np import pytest -from zarr.util import (guess_chunks, human_readable_size, info_html_report, +from zarr.util import (all_equal, flatten, guess_chunks, human_readable_size, info_html_report, info_text_report, is_total_slice, normalize_chunks, normalize_dimension_separator, normalize_fill_value, normalize_order, @@ -211,3 +211,30 @@ def fail(x): for x in range(11, 15): pytest.raises(PermissionError, fail, x) + + +def test_flatten(): + assert list(flatten(['0', ['1', ['2', ['3', [4, ]]]]])) == ['0', '1', '2', '3', 4] + assert list(flatten('foo')) == ['f', 'o', 'o'] + assert list(flatten(['foo'])) == ['foo'] + + +def test_all_equal(): + assert all_equal(0, np.zeros((10, 10, 10))) + assert not all_equal(1, np.zeros((10, 10, 10))) + + assert all_equal(1, np.ones((10, 10, 10))) + assert not all_equal(1, 1 + np.ones((10, 10, 10))) + + assert all_equal(np.nan, np.array([np.nan, np.nan])) + assert not all_equal(np.nan, np.array([np.nan, 1.0])) + + assert all_equal({'a': -1}, np.array([{'a': -1}, {'a': -1}], dtype='object')) + assert not all_equal({'a': -1}, np.array([{'a': -1}, {'a': 2}], dtype='object')) + + assert all_equal(np.timedelta64(999, 'D'), np.array([999, 999], dtype='timedelta64[D]')) + assert not all_equal(np.timedelta64(999, 'D'), np.array([999, 998], dtype='timedelta64[D]')) + + # all_equal(None, *) always returns False + assert not all_equal(None, np.array([None, None])) + assert not all_equal(None, np.array([None, 10])) diff --git a/zarr/util.py b/zarr/util.py index 2a2250433c..d092ffe0de 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -9,6 +9,7 @@ import numpy as np from asciitree import BoxStyle, LeftAligned from asciitree.traversal import Traversal +from collections.abc import Iterable from numcodecs.compat import ensure_ndarray, ensure_text from numcodecs.registry import codec_registry from numcodecs.blosc import cbuffer_sizes, cbuffer_metainfo @@ -16,6 +17,14 @@ from typing import Any, Callable, Dict, Optional, Tuple, Union +def flatten(arg: Iterable) -> Iterable: + for element in arg: + if isinstance(element, Iterable) and not isinstance(element, (str, bytes)): + yield from flatten(element) + else: + yield element + + # codecs to use for object dtype convenience API object_codecs = { str.__name__: 'vlen-utf8', @@ -650,3 +659,35 @@ def retry_call(callabl: Callable, time.sleep(wait) else: raise + + +def all_equal(value: Any, array: Any): + """ + Test if all the elements of an array are equivalent to a value. + If `value` is None, then this function does not do any comparison and + returns False. + """ + + if value is None: + return False + if not value: + # if `value` is falsey, then just 1 truthy value in `array` + # is sufficient to return False. We assume here that np.any is + # optimized to return on the first truthy value in `array`. + try: + return not np.any(array) + except TypeError: # pragma: no cover + pass + if np.issubdtype(array.dtype, np.object_): + # we have to flatten the result of np.equal to handle outputs like + # [np.array([True,True]), True, True] + return all(flatten(np.equal(value, array, dtype=array.dtype))) + else: + # Numpy errors if you call np.isnan on custom dtypes, so ensure + # we are working with floats before calling isnan + if np.issubdtype(array.dtype, np.floating) and np.isnan(value): + return np.all(np.isnan(array)) + else: + # using == raises warnings from numpy deprecated pattern, but + # using np.equal() raises type errors for structured dtypes... + return np.all(value == array) From 52103012463e5e7e91dce4b9c7b73865c3c960df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Oct 2021 22:14:56 +0200 Subject: [PATCH 0017/1078] Bump fsspec[s3] from 2021.10.0 to 2021.10.1 (#849) Bumps [fsspec[s3]](https://github.com/intake/filesystem_spec) from 2021.10.0 to 2021.10.1. - [Release notes](https://github.com/intake/filesystem_spec/releases) - [Commits](https://github.com/intake/filesystem_spec/compare/2021.10.0...2021.10.1) --- updated-dependencies: - dependency-name: fsspec[s3] dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 4ac0c6c294..dbff0cd1cc 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,5 +19,5 @@ pytest-cov==2.12.1 pytest-doctestplus==0.11.0 pytest-timeout==1.4.2 h5py==3.4.0 -fsspec[s3]==2021.10.0 +fsspec[s3]==2021.10.1 moto[server]>=1.3.14 From 5efffad1c225f749ff63e5a7605b5fda178d9b01 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Oct 2021 22:15:07 +0200 Subject: [PATCH 0018/1078] Bump pytest-cov from 2.12.1 to 3.0.0 (#845) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 2.12.1 to 3.0.0. - [Release notes](https://github.com/pytest-dev/pytest-cov/releases) - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v2.12.1...v3.0.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index dbff0cd1cc..15b280e2b2 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -15,7 +15,7 @@ pymongo==3.12.0 tox==3.24.4 coverage flake8==3.9.2 -pytest-cov==2.12.1 +pytest-cov==3.0.0 pytest-doctestplus==0.11.0 pytest-timeout==1.4.2 h5py==3.4.0 From ecfbaf6a7c390ae33862390ee16008f2382468f4 Mon Sep 17 00:00:00 2001 From: Juan Nunez-Iglesias Date: Wed, 20 Oct 2021 17:29:22 +1100 Subject: [PATCH 0019/1078] Fix versionadded tag in zarr.core.Array docstring (#852) In #738, we accidentally moved the 2.7 versionadded tag of partial_decompress to the new write_empty_chunks argument. This commit restores it to its rightful place and adds a 2.11 versionadded tag to write_empty_chunks. --- zarr/core.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/zarr/core.py b/zarr/core.py index b9600467c1..6865a0694c 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -76,6 +76,9 @@ class Array: If True and while the chunk_store is a FSStore and the compresion used is Blosc, when getting data from the array chunks will be partially read and decompressed when possible. + + .. versionadded:: 2.7 + write_empty_chunks : bool, optional If True (default), all chunks will be stored regardless of their contents. If False, each chunk is compared to the array's fill @@ -85,7 +88,8 @@ class Array: as only chunks with non-fill-value data are stored, at the expense of overhead associated with checking the data of each chunk. - .. versionadded:: 2.7 + .. versionadded:: 2.11 + Attributes ---------- From 856c2d05efb4d052b6eea77248d381a2332959b7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Oct 2021 11:20:44 +0200 Subject: [PATCH 0020/1078] Bump flake8 from 3.9.2 to 4.0.1 (#848) Bumps [flake8](https://github.com/pycqa/flake8) from 3.9.2 to 4.0.1. - [Release notes](https://github.com/pycqa/flake8/releases) - [Commits](https://github.com/pycqa/flake8/compare/3.9.2...4.0.1) --- updated-dependencies: - dependency-name: flake8 dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 15b280e2b2..523008fc31 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -14,7 +14,7 @@ pymongo==3.12.0 # optional test requirements tox==3.24.4 coverage -flake8==3.9.2 +flake8==4.0.1 pytest-cov==3.0.0 pytest-doctestplus==0.11.0 pytest-timeout==1.4.2 From 182f5c53ba78d2a0f04147e0fabbb872c5c26e50 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Oct 2021 21:20:20 +0200 Subject: [PATCH 0021/1078] Bump pytest-timeout from 1.4.2 to 2.0.1 (#851) Bumps [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) from 1.4.2 to 2.0.1. - [Release notes](https://github.com/pytest-dev/pytest-timeout/releases) - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/1.4.2...2.0.1) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 523008fc31..c32458b13a 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -17,7 +17,7 @@ coverage flake8==4.0.1 pytest-cov==3.0.0 pytest-doctestplus==0.11.0 -pytest-timeout==1.4.2 +pytest-timeout==2.0.1 h5py==3.4.0 fsspec[s3]==2021.10.1 moto[server]>=1.3.14 From d8ac8a7465f86f440b84ba35a5c0804dc48b8522 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Oct 2021 21:20:33 +0200 Subject: [PATCH 0022/1078] Bump pymongo from 3.12.0 to 3.12.1 (#854) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 3.12.0 to 3.12.1. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/3.12.1/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/3.12.0...3.12.1) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index c32458b13a..b26431be9b 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -10,7 +10,7 @@ azure-storage-blob==12.8.1 # pyup: ignore redis==3.5.3 types-redis types-setuptools -pymongo==3.12.0 +pymongo==3.12.1 # optional test requirements tox==3.24.4 coverage From 523dbb85cbad5a5fa3b74a2ec81ec7ec63123b64 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Oct 2021 09:37:47 +0200 Subject: [PATCH 0023/1078] Bump numpy from 1.21.2 to 1.21.3 (#856) Bumps [numpy](https://github.com/numpy/numpy) from 1.21.2 to 1.21.3. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github.com/numpy/numpy/compare/v1.21.2...v1.21.3) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 99d96aa7b0..47cb2ca320 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.21.2 +numpy==1.21.3 From 5c71212ccf5abe9f8a5ab7996c5ce3abbbbd61e2 Mon Sep 17 00:00:00 2001 From: "Gregory R. Lee" Date: Thu, 21 Oct 2021 11:23:57 -0400 Subject: [PATCH 0024/1078] Create a Base store class for Zarr Store (update) (#789) * fix conflicts * cleanup naming * zip move * fix erasability test * test for warning * please flake * remove uncovered lines * remove uncovered lines in tests * pragma no cover for exceptional case * minor docstring fixes add assert statements to test_capabilities * pep8 fix * avoid NumPy 1.21.0 due to https://github.com/numpy/numpy/issues/19325 * move Store class and some helper functions to zarr._storage.store update version in Store docstring * BUG: ABSStore should inherit from Store * pep8 fix * TST: make CustomMapping a subclass of Store TST: initialize stores with KVStore(dict()) instead of bare dict() * update version mentioned in Store docstring * update version mentioned in warning message * use Store._ensure_store in Attributes class ensures Attributes.store is a Store * TST: add Attributes test case ensuring store gets coerced to a Store * use Store._ensure_store in normalize_store_arg ensures open_array, etc can work when the user supplies a dict * TST: make sure high level creation functions also work when passed a dict for store * TST: add test case with group initialized from dict * TST: add test case with Array initialized from dict * change CustomMapping back to type object, not Store want to test the non-Store code path in _ensure_store * pep8 fixes * update/fix new hierarchy test case to complete code coverage * create a BaseStore parent for Store BaseStore does not have the listdir or rmdir methods cleaned up some type declerations, making sure mypy passes * flake8 * restore is_erasable check to rmdir function Otherwise the save_array doc example fails to write to a ZipStore Co-authored-by: Matthias Bussonnier Co-authored-by: Josh Moore Co-authored-by: jmoore --- docs/tutorial.rst | 12 +- mypy.ini | 2 +- pytest.ini | 2 + zarr/_storage/absstore.py | 4 +- zarr/_storage/store.py | 166 ++++++++++++++++++++ zarr/attrs.py | 3 +- zarr/convenience.py | 73 +++++---- zarr/core.py | 17 +- zarr/creation.py | 29 +--- zarr/hierarchy.py | 36 +++-- zarr/storage.py | 231 ++++++++++++++++----------- zarr/tests/test_attrs.py | 13 +- zarr/tests/test_convenience.py | 8 +- zarr/tests/test_core.py | 277 +++++++++++++-------------------- zarr/tests/test_creation.py | 71 ++++++--- zarr/tests/test_hierarchy.py | 164 ++++++++++--------- zarr/tests/test_storage.py | 259 ++++++++++++++++-------------- zarr/tests/test_sync.py | 5 +- zarr/tests/util.py | 5 +- 19 files changed, 794 insertions(+), 583 deletions(-) create mode 100644 zarr/_storage/store.py diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 68673f1295..18c232ae40 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -176,7 +176,7 @@ print some diagnostics, e.g.:: Read-only : False Compressor : Blosc(cname='zstd', clevel=3, shuffle=BITSHUFFLE, : blocksize=0) - Store type : builtins.dict + Store type : zarr.storage.KVStore No. bytes : 400000000 (381.5M) No. bytes stored : 3379344 (3.2M) Storage ratio : 118.4 @@ -268,7 +268,7 @@ Here is an example using a delta filter with the Blosc compressor:: Read-only : False Filter [0] : Delta(dtype='>> z[:] array([b'H', b'e', b'l', b'l', b'o', b' ', b'f', b'r', b'o', b'm', b' ', @@ -1274,7 +1276,7 @@ ratios, depending on the correlation structure within the data. E.g.:: Order : C Read-only : False Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0) - Store type : builtins.dict + Store type : zarr.storage.KVStore No. bytes : 400000000 (381.5M) No. bytes stored : 6696010 (6.4M) Storage ratio : 59.7 @@ -1288,7 +1290,7 @@ ratios, depending on the correlation structure within the data. E.g.:: Order : F Read-only : False Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0) - Store type : builtins.dict + Store type : zarr.storage.KVStore No. bytes : 400000000 (381.5M) No. bytes stored : 4684636 (4.5M) Storage ratio : 85.4 diff --git a/mypy.ini b/mypy.ini index cb3c188f47..7c1be49cd6 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,4 +1,4 @@ [mypy] -python_version = 3.7 +python_version = 3.8 ignore_missing_imports = True follow_imports = silent diff --git a/pytest.ini b/pytest.ini index 61a0a99ab5..8e3c0adb22 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,4 +3,6 @@ doctest_optionflags = NORMALIZE_WHITESPACE ELLIPSIS IGNORE_EXCEPTION_DETAIL addopts = --durations=10 filterwarnings = error::DeprecationWarning:zarr.* + error::UserWarning:zarr.* ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning + ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index 0dc5bf1892..01bfbd5039 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -1,16 +1,16 @@ """This module contains storage classes related to Azure Blob Storage (ABS)""" import warnings -from collections.abc import MutableMapping from numcodecs.compat import ensure_bytes from zarr.util import normalize_storage_path +from zarr._storage.store import Store __doctest_requires__ = { ('ABSStore', 'ABSStore.*'): ['azure.storage.blob'], } -class ABSStore(MutableMapping): +class ABSStore(Store): """Storage class using Azure Blob Storage (ABS). Parameters diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py new file mode 100644 index 0000000000..a779a4e26a --- /dev/null +++ b/zarr/_storage/store.py @@ -0,0 +1,166 @@ +from collections.abc import MutableMapping +from typing import Any, List, Optional, Union + +from zarr.util import normalize_storage_path + +# v2 store keys +array_meta_key = '.zarray' +group_meta_key = '.zgroup' +attrs_key = '.zattrs' + + +class BaseStore(MutableMapping): + """Abstract base class for store implementations. + + This is a thin wrapper over MutableMapping that provides methods to check + whether a store is readable, writeable, eraseable and or listable. + + Stores cannot be mutable mapping as they do have a couple of other + requirements that would break Liskov substitution principle (stores only + allow strings as keys, mutable mapping are more generic). + + Having no-op base method also helps simplifying store usage and do not need + to check the presence of attributes and methods, like `close()`. + + Stores can be used as context manager to make sure they close on exit. + + .. added: 2.11.0 + + """ + + _readable = True + _writeable = True + _erasable = True + _listable = True + + def is_readable(self): + return self._readable + + def is_writeable(self): + return self._writeable + + def is_listable(self): + return self._listable + + def is_erasable(self): + return self._erasable + + def __enter__(self): + if not hasattr(self, "_open_count"): + self._open_count = 0 + self._open_count += 1 + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._open_count -= 1 + if self._open_count == 0: + self.close() + + def close(self) -> None: + """Do nothing by default""" + pass + + def rename(self, src_path: str, dst_path: str) -> None: + if not self.is_erasable(): + raise NotImplementedError( + f'{type(self)} is not erasable, cannot call "rename"' + ) # pragma: no cover + _rename_from_keys(self, src_path, dst_path) + + @staticmethod + def _ensure_store(store: Any): + """ + We want to make sure internally that zarr stores are always a class + with a specific interface derived from ``BaseStore``, which is slightly + different than ``MutableMapping``. + + We'll do this conversion in a few places automatically + """ + from zarr.storage import KVStore # avoid circular import + + if store is None: + return None + elif isinstance(store, BaseStore): + return store + elif isinstance(store, MutableMapping): + return KVStore(store) + else: + for attr in [ + "keys", + "values", + "get", + "__setitem__", + "__getitem__", + "__delitem__", + "__contains__", + ]: + if not hasattr(store, attr): + break + else: + return KVStore(store) + + raise ValueError( + "Starting with Zarr 2.11.0, stores must be subclasses of " + "BaseStore, if your store exposes the MutableMapping interface " + f"wrap it in Zarr.storage.KVStore. Got {store}" + ) + + +class Store(BaseStore): + """Abstract store class used by implementations following the Zarr v2 spec. + + Adds public `listdir`, `rename`, and `rmdir` methods on top of BaseStore. + + .. added: 2.11.0 + + """ + def listdir(self, path: str = "") -> List[str]: + path = normalize_storage_path(path) + return _listdir_from_keys(self, path) + + def rmdir(self, path: str = "") -> None: + if not self.is_erasable(): + raise NotImplementedError( + f'{type(self)} is not erasable, cannot call "rmdir"' + ) # pragma: no cover + path = normalize_storage_path(path) + _rmdir_from_keys(self, path) + + +def _path_to_prefix(path: Optional[str]) -> str: + # assume path already normalized + if path: + prefix = path + '/' + else: + prefix = '' + return prefix + + +def _rename_from_keys(store: BaseStore, src_path: str, dst_path: str) -> None: + # assume path already normalized + src_prefix = _path_to_prefix(src_path) + dst_prefix = _path_to_prefix(dst_path) + for key in list(store.keys()): + if key.startswith(src_prefix): + new_key = dst_prefix + key.lstrip(src_prefix) + store[new_key] = store.pop(key) + + +def _rmdir_from_keys(store: Union[BaseStore, MutableMapping], path: Optional[str] = None) -> None: + # assume path already normalized + prefix = _path_to_prefix(path) + for key in list(store.keys()): + if key.startswith(prefix): + del store[key] + + +def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str]: + # assume path already normalized + prefix = _path_to_prefix(path) + children = set() + for key in list(store.keys()): + if key.startswith(prefix) and len(key) > len(prefix): + suffix = key[len(prefix):] + child = suffix.split('/')[0] + children.add(child) + return sorted(children) diff --git a/zarr/attrs.py b/zarr/attrs.py index ea6b831608..ec01dbe04f 100644 --- a/zarr/attrs.py +++ b/zarr/attrs.py @@ -1,6 +1,7 @@ from collections.abc import MutableMapping from zarr.meta import parse_metadata +from zarr._storage.store import Store from zarr.util import json_dumps @@ -26,7 +27,7 @@ class Attributes(MutableMapping): def __init__(self, store, key='.zattrs', read_only=False, cache=True, synchronizer=None): - self.store = store + self.store = Store._ensure_store(store) self.key = key self.read_only = read_only self.cache = cache diff --git a/zarr/convenience.py b/zarr/convenience.py index 80cf7fffd4..18b59a77b2 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -3,7 +3,7 @@ import itertools import os import re -from collections.abc import Mapping +from collections.abc import Mapping, MutableMapping from zarr.core import Array from zarr.creation import array as _create_array @@ -13,17 +13,21 @@ from zarr.hierarchy import group as _create_group from zarr.hierarchy import open_group from zarr.meta import json_dumps, json_loads -from zarr.storage import contains_array, contains_group +from zarr.storage import contains_array, contains_group, BaseStore from zarr.util import TreeViewer, buffer_size, normalize_storage_path +from typing import Union + +StoreLike = Union[BaseStore, MutableMapping, str, None] + # noinspection PyShadowingBuiltins -def open(store=None, mode='a', **kwargs): +def open(store: StoreLike = None, mode: str = "a", **kwargs): """Convenience function to open a group or array using file-mode-like semantics. Parameters ---------- - store : MutableMapping or string, optional + store : Store or string, optional Store or path to directory in file system or name of zip file. mode : {'r', 'r+', 'a', 'w', 'w-'}, optional Persistence mode: 'r' means read only (must exist); 'r+' means @@ -76,27 +80,28 @@ def open(store=None, mode='a', **kwargs): clobber = mode == 'w' # we pass storage options explicitly, since normalize_store_arg might construct # a store if the input is a fsspec-compatible URL - store = normalize_store_arg(store, clobber=clobber, - storage_options=kwargs.pop("storage_options", {})) + _store: BaseStore = normalize_store_arg( + store, clobber=clobber, storage_options=kwargs.pop("storage_options", {}) + ) path = normalize_storage_path(path) if mode in {'w', 'w-', 'x'}: if 'shape' in kwargs: - return open_array(store, mode=mode, **kwargs) + return open_array(_store, mode=mode, **kwargs) else: - return open_group(store, mode=mode, **kwargs) + return open_group(_store, mode=mode, **kwargs) elif mode == "a": - if "shape" in kwargs or contains_array(store, path): - return open_array(store, mode=mode, **kwargs) + if "shape" in kwargs or contains_array(_store, path): + return open_array(_store, mode=mode, **kwargs) else: - return open_group(store, mode=mode, **kwargs) + return open_group(_store, mode=mode, **kwargs) else: - if contains_array(store, path): - return open_array(store, mode=mode, **kwargs) - elif contains_group(store, path): - return open_group(store, mode=mode, **kwargs) + if contains_array(_store, path): + return open_array(_store, mode=mode, **kwargs) + elif contains_group(_store, path): + return open_group(_store, mode=mode, **kwargs) else: raise PathNotFoundError(path) @@ -105,7 +110,7 @@ def _might_close(path): return isinstance(path, (str, os.PathLike)) -def save_array(store, arr, **kwargs): +def save_array(store: StoreLike, arr, **kwargs): """Convenience function to save a NumPy array to the local file system, following a similar API to the NumPy save() function. @@ -137,16 +142,16 @@ def save_array(store, arr, **kwargs): """ may_need_closing = _might_close(store) - store = normalize_store_arg(store, clobber=True) + _store: BaseStore = normalize_store_arg(store, clobber=True) try: - _create_array(arr, store=store, overwrite=True, **kwargs) + _create_array(arr, store=_store, overwrite=True, **kwargs) finally: - if may_need_closing and hasattr(store, 'close'): + if may_need_closing: # needed to ensure zip file records are written - store.close() + _store.close() -def save_group(store, *args, **kwargs): +def save_group(store: StoreLike, *args, **kwargs): """Convenience function to save several NumPy arrays to the local file system, following a similar API to the NumPy savez()/savez_compressed() functions. @@ -208,21 +213,21 @@ def save_group(store, *args, **kwargs): raise ValueError('at least one array must be provided') # handle polymorphic store arg may_need_closing = _might_close(store) - store = normalize_store_arg(store, clobber=True) + _store: BaseStore = normalize_store_arg(store, clobber=True) try: - grp = _create_group(store, overwrite=True) + grp = _create_group(_store, overwrite=True) for i, arr in enumerate(args): k = 'arr_{}'.format(i) grp.create_dataset(k, data=arr, overwrite=True) for k, arr in kwargs.items(): grp.create_dataset(k, data=arr, overwrite=True) finally: - if may_need_closing and hasattr(store, 'close'): + if may_need_closing: # needed to ensure zip file records are written - store.close() + _store.close() -def save(store, *args, **kwargs): +def save(store: StoreLike, *args, **kwargs): """Convenience function to save an array or group of arrays to the local file system. Parameters @@ -332,7 +337,7 @@ def __repr__(self): return r -def load(store): +def load(store: StoreLike): """Load data from an array or group into memory. Parameters @@ -358,11 +363,11 @@ def load(store): """ # handle polymorphic store arg - store = normalize_store_arg(store) - if contains_array(store, path=None): - return Array(store=store, path=None)[...] - elif contains_group(store, path=None): - grp = Group(store=store, path=None) + _store = normalize_store_arg(store) + if contains_array(_store, path=None): + return Array(store=_store, path=None)[...] + elif contains_group(_store, path=None): + grp = Group(store=_store, path=None) return LazyLoader(grp) @@ -1078,7 +1083,7 @@ def copy_all(source, dest, shallow=False, without_attrs=False, log=None, return n_copied, n_skipped, n_bytes_copied -def consolidate_metadata(store, metadata_key='.zmetadata'): +def consolidate_metadata(store: StoreLike, metadata_key=".zmetadata"): """ Consolidate all metadata for groups and arrays within the given store into a single resource and put it under the given key. @@ -1129,7 +1134,7 @@ def is_zarr_key(key): return open_consolidated(store, metadata_key=metadata_key) -def open_consolidated(store, metadata_key='.zmetadata', mode='r+', **kwargs): +def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", **kwargs): """Open group using metadata previously consolidated into a single key. This is an optimised method for opening a Zarr group, where instead of diff --git a/zarr/core.py b/zarr/core.py index 6865a0694c..56b22ead8d 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -9,6 +9,8 @@ import numpy as np from numcodecs.compat import ensure_bytes, ensure_ndarray +from collections.abc import MutableMapping + from zarr.attrs import Attributes from zarr.codecs import AsType, get_codec from zarr.errors import ArrayNotFoundError, ReadOnlyError, ArrayIndexError @@ -30,7 +32,7 @@ pop_fields, ) from zarr.meta import decode_array_metadata, encode_array_metadata -from zarr.storage import array_meta_key, attrs_key, getsize, listdir +from zarr.storage import array_meta_key, attrs_key, getsize, listdir, BaseStore from zarr.util import ( all_equal, InfoReporter, @@ -145,7 +147,7 @@ class Array: def __init__( self, - store, + store: BaseStore, path=None, read_only=False, chunk_store=None, @@ -158,6 +160,9 @@ def __init__( # N.B., expect at this point store is fully initialized with all # configuration metadata fully specified and normalized + store = BaseStore._ensure_store(store) + chunk_store = BaseStore._ensure_store(chunk_store) + self._store = store self._chunk_store = chunk_store self._path = normalize_storage_path(path) @@ -2113,7 +2118,7 @@ def _encode_chunk(self, chunk): cdata = chunk # ensure in-memory data is immutable and easy to compare - if isinstance(self.chunk_store, dict): + if isinstance(self.chunk_store, MutableMapping): cdata = ensure_bytes(cdata) return cdata @@ -2146,10 +2151,10 @@ def info(self): Order : C Read-only : False Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0) - Store type : builtins.dict + Store type : zarr.storage.KVStore No. bytes : 4000000 (3.8M) - No. bytes stored : ... - Storage ratio : ... + No. bytes stored : 320 + Storage ratio : 12500.0 Chunks initialized : 0/10 """ diff --git a/zarr/creation.py b/zarr/creation.py index 75ff1d0212..244a9b080c 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -1,4 +1,3 @@ -import os from warnings import warn import numpy as np @@ -10,10 +9,9 @@ ContainsArrayError, ContainsGroupError, ) -from zarr.n5 import N5Store -from zarr.storage import (DirectoryStore, ZipStore, contains_array, - contains_group, default_compressor, init_array, - normalize_storage_path, FSStore) +from zarr.storage import (contains_array, contains_group, default_compressor, + init_array, normalize_storage_path, + normalize_store_arg) from zarr.util import normalize_dimension_separator @@ -157,27 +155,6 @@ def create(shape, chunks=True, dtype=None, compressor='default', return z -def normalize_store_arg(store, clobber=False, storage_options=None, mode='w'): - if store is None: - return dict() - if isinstance(store, os.PathLike): - store = os.fspath(store) - if isinstance(store, str): - mode = mode if clobber else "r" - if "://" in store or "::" in store: - return FSStore(store, mode=mode, **(storage_options or {})) - elif storage_options: - raise ValueError("storage_options passed with non-fsspec path") - if store.endswith('.zip'): - return ZipStore(store, mode=mode) - elif store.endswith('.n5'): - return N5Store(store) - else: - return DirectoryStore(store) - else: - return store - - def _kwargs_compat(compressor, fill_value, kwargs): # to be compatible with h5py, as well as backwards-compatible with Zarr diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 87c2178e61..402b8dd976 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -15,11 +15,26 @@ ReadOnlyError, ) from zarr.meta import decode_group_metadata -from zarr.storage import (MemoryStore, attrs_key, contains_array, - contains_group, group_meta_key, init_group, listdir, - rename, rmdir) -from zarr.util import (InfoReporter, TreeViewer, is_valid_python_name, nolock, - normalize_shape, normalize_storage_path) +from zarr.storage import ( + BaseStore, + MemoryStore, + attrs_key, + contains_array, + contains_group, + group_meta_key, + init_group, + listdir, + rename, + rmdir, +) +from zarr.util import ( + InfoReporter, + TreeViewer, + is_valid_python_name, + nolock, + normalize_shape, + normalize_storage_path, +) class Group(MutableMapping): @@ -96,6 +111,8 @@ class Group(MutableMapping): def __init__(self, store, path=None, read_only=False, chunk_store=None, cache_attrs=True, synchronizer=None): + store: BaseStore = BaseStore._ensure_store(store) + chunk_store: BaseStore = BaseStore._ensure_store(chunk_store) self._store = store self._chunk_store = chunk_store self._path = normalize_storage_path(path) @@ -237,11 +254,8 @@ def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): - """If the underlying Store has a ``close`` method, call it.""" - try: - self.store.close() - except AttributeError: - pass + """Call the close method of the underlying Store.""" + self.store.close() def info_items(self): @@ -804,11 +818,13 @@ def create_dataset(self, name, **kwargs): """ + assert "mode" not in kwargs return self._write_op(self._create_dataset_nosync, name, **kwargs) def _create_dataset_nosync(self, name, data=None, **kwargs): + assert "mode" not in kwargs path = self._item_path(name) # determine synchronizer diff --git a/zarr/storage.py b/zarr/storage.py index 92be9df0aa..901011c9d2 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -31,7 +31,7 @@ from os import scandir from pickle import PicklingError from threading import Lock, RLock -from typing import Optional, Union, List, Tuple, Dict +from typing import Optional, Union, List, Tuple, Dict, Any import uuid import time @@ -57,6 +57,15 @@ normalize_shape, normalize_storage_path, retry_call) from zarr._storage.absstore import ABSStore # noqa: F401 +from zarr._storage.store import (_listdir_from_keys, + _path_to_prefix, + _rename_from_keys, + _rmdir_from_keys, + array_meta_key, + group_meta_key, + attrs_key, + BaseStore, + Store) __doctest_requires__ = { ('RedisStore', 'RedisStore.*'): ['redis'], @@ -65,9 +74,6 @@ } -array_meta_key = '.zarray' -group_meta_key = '.zgroup' -attrs_key = '.zattrs' try: # noinspection PyUnresolvedReferences from zarr.codecs import Blosc @@ -78,18 +84,11 @@ Path = Union[str, bytes, None] +# allow MutableMapping for backwards compatibility +StoreLike = Union[BaseStore, MutableMapping] -def _path_to_prefix(path: Optional[str]) -> str: - # assume path already normalized - if path: - prefix = path + '/' - else: - prefix = '' - return prefix - - -def contains_array(store: MutableMapping, path: Path = None) -> bool: +def contains_array(store: StoreLike, path: Path = None) -> bool: """Return True if the store contains an array at the given logical path.""" path = normalize_storage_path(path) prefix = _path_to_prefix(path) @@ -97,7 +96,7 @@ def contains_array(store: MutableMapping, path: Path = None) -> bool: return key in store -def contains_group(store: MutableMapping, path: Path = None) -> bool: +def contains_group(store: StoreLike, path: Path = None) -> bool: """Return True if the store contains a group at the given logical path.""" path = normalize_storage_path(path) prefix = _path_to_prefix(path) @@ -105,41 +104,47 @@ def contains_group(store: MutableMapping, path: Path = None) -> bool: return key in store -def _rmdir_from_keys(store: MutableMapping, path: Optional[str] = None) -> None: - # assume path already normalized - prefix = _path_to_prefix(path) - for key in list(store.keys()): - if key.startswith(prefix): - del store[key] +def normalize_store_arg(store: Any, clobber=False, storage_options=None, mode="w") -> BaseStore: + if store is None: + return BaseStore._ensure_store(dict()) + elif isinstance(store, os.PathLike): + store = os.fspath(store) + if isinstance(store, str): + mode = mode if clobber else "r" + if "://" in store or "::" in store: + return FSStore(store, mode=mode, **(storage_options or {})) + elif storage_options: + raise ValueError("storage_options passed with non-fsspec path") + if store.endswith('.zip'): + return ZipStore(store, mode=mode) + elif store.endswith('.n5'): + from zarr.n5 import N5Store + return N5Store(store) + else: + return DirectoryStore(store) + else: + if not isinstance(store, BaseStore) and isinstance(store, MutableMapping): + store = BaseStore._ensure_store(store) + return store -def rmdir(store, path: Path = None): +def rmdir(store: StoreLike, path: Path = None): """Remove all items under the given path. If `store` provides a `rmdir` method, this will be called, otherwise will fall back to implementation via the - `MutableMapping` interface.""" + `Store` interface.""" path = normalize_storage_path(path) - if hasattr(store, 'rmdir'): + if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore # pass through - store.rmdir(path) + store.rmdir(path) # type: ignore else: # slow version, delete one key at a time _rmdir_from_keys(store, path) -def _rename_from_keys(store: MutableMapping, src_path: str, dst_path: str) -> None: - # assume path already normalized - src_prefix = _path_to_prefix(src_path) - dst_prefix = _path_to_prefix(dst_path) - for key in list(store.keys()): - if key.startswith(src_prefix): - new_key = dst_prefix + key.lstrip(src_prefix) - store[new_key] = store.pop(key) - - -def rename(store, src_path: Path, dst_path: Path): +def rename(store: BaseStore, src_path: Path, dst_path: Path): """Rename all items under the given path. If `store` provides a `rename` method, this will be called, otherwise will fall back to implementation via the - `MutableMapping` interface.""" + `Store` interface.""" src_path = normalize_storage_path(src_path) dst_path = normalize_storage_path(dst_path) if hasattr(store, 'rename'): @@ -150,39 +155,32 @@ def rename(store, src_path: Path, dst_path: Path): _rename_from_keys(store, src_path, dst_path) -def _listdir_from_keys(store: MutableMapping, path: Optional[str] = None) -> List[str]: - # assume path already normalized - prefix = _path_to_prefix(path) - children = set() - for key in list(store.keys()): - if key.startswith(prefix) and len(key) > len(prefix): - suffix = key[len(prefix):] - child = suffix.split('/')[0] - children.add(child) - return sorted(children) - - -def listdir(store, path: Path = None): +def listdir(store: BaseStore, path: Path = None): """Obtain a directory listing for the given path. If `store` provides a `listdir` method, this will be called, otherwise will fall back to implementation via the `MutableMapping` interface.""" path = normalize_storage_path(path) if hasattr(store, 'listdir'): # pass through - return store.listdir(path) + return store.listdir(path) # type: ignore else: # slow version, iterate through all keys + warnings.warn( + f"Store {store} has no `listdir` method. From zarr 2.9 onwards " + "may want to inherit from `Store`.", + stacklevel=2, + ) return _listdir_from_keys(store, path) -def getsize(store, path: Path = None) -> int: +def getsize(store: BaseStore, path: Path = None) -> int: """Compute size of stored items for a given path. If `store` provides a `getsize` method, this will be called, otherwise will return -1.""" path = normalize_storage_path(path) if hasattr(store, 'getsize'): # pass through - return store.getsize(path) - elif isinstance(store, dict): + return store.getsize(path) # type: ignore + elif isinstance(store, MutableMapping): # compute from size of values if path in store: v = store[path] @@ -208,8 +206,8 @@ def getsize(store, path: Path = None) -> int: def _require_parent_group( path: Optional[str], - store: MutableMapping, - chunk_store: Optional[MutableMapping], + store: StoreLike, + chunk_store: Optional[StoreLike], overwrite: bool, ): # assume path is normalized @@ -225,7 +223,7 @@ def _require_parent_group( def init_array( - store: MutableMapping, + store: StoreLike, shape: Tuple[int, ...], chunks: Union[bool, int, Tuple[int, ...]] = True, dtype=None, @@ -234,7 +232,7 @@ def init_array( order: str = "C", overwrite: bool = False, path: Path = None, - chunk_store: MutableMapping = None, + chunk_store: StoreLike = None, filters=None, object_codec=None, dimension_separator=None, @@ -244,7 +242,7 @@ def init_array( Parameters ---------- - store : MutableMapping + store : Store A mapping that supports string keys and bytes-like values. shape : int or tuple of ints Array shape. @@ -263,7 +261,7 @@ def init_array( If True, erase all data in `store` prior to initialisation. path : string, bytes, optional Path under which array is stored. - chunk_store : MutableMapping, optional + chunk_store : Store, optional Separate storage for chunks. If not provided, `store` will be used for storage of both chunks and metadata. filters : sequence, optional @@ -277,8 +275,8 @@ def init_array( -------- Initialize an array store:: - >>> from zarr.storage import init_array - >>> store = dict() + >>> from zarr.storage import init_array, KVStore + >>> store = KVStore(dict()) >>> init_array(store, shape=(10000, 10000), chunks=(1000, 1000)) >>> sorted(store.keys()) ['.zarray'] @@ -311,7 +309,7 @@ def init_array( Initialize an array using a storage path:: - >>> store = dict() + >>> store = KVStore(dict()) >>> init_array(store, shape=100000000, chunks=1000000, dtype='i1', path='foo') >>> sorted(store.keys()) ['.zgroup', 'foo/.zarray'] @@ -456,23 +454,23 @@ def _init_array_metadata( def init_group( - store: MutableMapping, + store: StoreLike, overwrite: bool = False, path: Path = None, - chunk_store: MutableMapping = None, + chunk_store: StoreLike = None, ): """Initialize a group store. Note that this is a low-level function and there should be no need to call this directly from user code. Parameters ---------- - store : MutableMapping + store : Store A mapping that supports string keys and byte sequence values. overwrite : bool, optional If True, erase all data in `store` prior to initialisation. path : string, optional Path under which array is stored. - chunk_store : MutableMapping, optional + chunk_store : Store, optional Separate storage for chunks. If not provided, `store` will be used for storage of both chunks and metadata. @@ -491,10 +489,10 @@ def init_group( def _init_group_metadata( - store: MutableMapping, + store: StoreLike, overwrite: Optional[bool] = False, path: Optional[str] = None, - chunk_store: MutableMapping = None, + chunk_store: StoreLike = None, ): # guard conditions @@ -526,7 +524,50 @@ def _dict_store_keys(d: Dict, prefix="", cls=dict): yield prefix + k -class MemoryStore(MutableMapping): +class KVStore(Store): + """ + This provides a default implementation of a store interface around + a mutable mapping, to avoid having to test stores for presence of methods. + + This, for most methods should just be a pass-through to the underlying KV + store which is likely to expose a MuttableMapping interface, + """ + + def __init__(self, mutablemapping): + self._mutable_mapping = mutablemapping + + def __getitem__(self, key): + return self._mutable_mapping[key] + + def __setitem__(self, key, value): + self._mutable_mapping[key] = value + + def __delitem__(self, key): + del self._mutable_mapping[key] + + def get(self, key, default=None): + return self._mutable_mapping.get(key, default) + + def values(self): + return self._mutable_mapping.values() + + def __iter__(self): + return iter(self._mutable_mapping) + + def __len__(self): + return len(self._mutable_mapping) + + def __repr__(self): + return f"<{self.__class__.__name__}: \n{repr(self._mutable_mapping)}\n at {hex(id(self))}>" + + def __eq__(self, other): + if isinstance(other, KVStore): + return self._mutable_mapping == other._mutable_mapping + else: + return NotImplemented + + +class MemoryStore(Store): """Store class that uses a hierarchy of :class:`dict` objects, thus all data will be held in main memory. @@ -544,7 +585,7 @@ class MemoryStore(MutableMapping): >>> z = zarr.zeros(100) >>> type(z.store) - + Notes ----- @@ -730,7 +771,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) -class DirectoryStore(MutableMapping): +class DirectoryStore(Store): """Storage class using directories and files on a standard file system. Parameters @@ -1041,7 +1082,7 @@ def atexit_rmglob(path, rmtree(p) -class FSStore(MutableMapping): +class FSStore(Store): """Wraps an fsspec.FSMap to give access to arbitrary filesystems Requires that ``fsspec`` is installed, as well as any additional @@ -1352,7 +1393,7 @@ def __eq__(self, other): # noinspection PyPep8Naming -class ZipStore(MutableMapping): +class ZipStore(Store): """Storage class using a Zip file. Parameters @@ -1444,6 +1485,8 @@ class also supports the context manager protocol, which ensures the ``close()`` """ + _erasable = False + def __init__(self, path, compression=zipfile.ZIP_STORED, allowZip64=True, mode='a', dimension_separator=None): @@ -1606,7 +1649,7 @@ def migrate_1to2(store): Parameters ---------- - store : MutableMapping + store : Store Store to be migrated. Notes @@ -1650,7 +1693,7 @@ def migrate_1to2(store): # noinspection PyShadowingBuiltins -class DBMStore(MutableMapping): +class DBMStore(Store): """Storage class using a DBM-style database. Parameters @@ -1842,7 +1885,7 @@ def __contains__(self, key): return key in self.db -class LMDBStore(MutableMapping): +class LMDBStore(Store): """Storage class using LMDB. Requires the `lmdb `_ package to be installed. @@ -2019,7 +2062,7 @@ def __len__(self): return self.db.stat()['entries'] -class LRUStoreCache(MutableMapping): +class LRUStoreCache(Store): """Storage class that implements a least-recently-used (LRU) cache layer over some other store. Intended primarily for use with stores that can be slow to access, e.g., remote stores that require network communication to store and @@ -2027,7 +2070,7 @@ class LRUStoreCache(MutableMapping): Parameters ---------- - store : MutableMapping + store : Store The store containing the actual data to be cached. max_size : int The maximum size that the cache may grow to, in number of bytes. Provide `None` @@ -2056,14 +2099,14 @@ class LRUStoreCache(MutableMapping): """ - def __init__(self, store, max_size): - self._store = store + def __init__(self, store: Store, max_size: int): + self._store = Store._ensure_store(store) self._max_size = max_size self._current_size = 0 self._keys_cache = None self._contains_cache = None - self._listdir_cache = dict() - self._values_cache = OrderedDict() + self._listdir_cache: Dict[Path, Any] = dict() + self._values_cache: Dict[Path, Any] = OrderedDict() self._mutex = Lock() self.hits = self.misses = 0 @@ -2103,7 +2146,7 @@ def _keys(self): self._keys_cache = list(self._store.keys()) return self._keys_cache - def listdir(self, path=None): + def listdir(self, path: Path = None): with self._mutex: try: return self._listdir_cache[path] @@ -2112,7 +2155,7 @@ def listdir(self, path=None): self._listdir_cache[path] = listing return listing - def getsize(self, path=None): + def getsize(self, path=None) -> int: return getsize(self._store, path=path) def _pop_value(self): @@ -2129,7 +2172,7 @@ def _accommodate_value(self, value_size): v = self._pop_value() self._current_size -= buffer_size(v) - def _cache_value(self, key, value): + def _cache_value(self, key: Path, value): # cache a value value_size = buffer_size(value) # check size of the value against max size, as if the value itself exceeds max @@ -2201,7 +2244,7 @@ def __delitem__(self, key): self._invalidate_value(key) -class SQLiteStore(MutableMapping): +class SQLiteStore(Store): """Storage class using SQLite. Parameters @@ -2404,7 +2447,7 @@ def clear(self): ) -class MongoDBStore(MutableMapping): +class MongoDBStore(Store): """Storage class using MongoDB. .. note:: This is an experimental feature. @@ -2487,7 +2530,7 @@ def clear(self): self.collection.delete_many({}) -class RedisStore(MutableMapping): +class RedisStore(Store): """Storage class using Redis. .. note:: This is an experimental feature. @@ -2556,7 +2599,7 @@ def clear(self): del self[key] -class ConsolidatedMetadataStore(MutableMapping): +class ConsolidatedMetadataStore(Store): """A layer over other storage, where the metadata has been consolidated into a single key. @@ -2580,7 +2623,7 @@ class ConsolidatedMetadataStore(MutableMapping): Parameters ---------- - store: MutableMapping + store: Store Containing the zarr array. metadata_key: str The target in the store where all of the metadata are stored. We @@ -2592,8 +2635,8 @@ class ConsolidatedMetadataStore(MutableMapping): """ - def __init__(self, store, metadata_key='.zmetadata'): - self.store = store + def __init__(self, store: StoreLike, metadata_key=".zmetadata"): + self.store = Store._ensure_store(store) # retrieve consolidated metadata meta = json_loads(store[metadata_key]) @@ -2605,7 +2648,7 @@ def __init__(self, store, metadata_key='.zmetadata'): consolidated_format) # decode metadata - self.meta_store = meta['metadata'] + self.meta_store: Store = KVStore(meta["metadata"]) def __getitem__(self, key): return self.meta_store[key] diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index 2aced3abaa..b2de736d4a 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -1,21 +1,26 @@ import json -import unittest import pytest from zarr.attrs import Attributes from zarr.tests.util import CountingDict +from zarr.storage import KVStore -class TestAttributes(unittest.TestCase): +class TestAttributes(): def init_attributes(self, store, read_only=False, cache=True): return Attributes(store, key='attrs', read_only=read_only, cache=cache) - def test_storage(self): + @pytest.mark.parametrize('store_from_dict', [False, True]) + def test_storage(self, store_from_dict): - store = dict() + if store_from_dict: + store = dict() + else: + store = KVStore(dict()) a = Attributes(store=store, key='attrs') + assert isinstance(a.store, KVStore) assert 'foo' not in a assert 'bar' not in a assert dict() == a.asdict() diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index d2bd91038b..e5ccbd494d 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -24,8 +24,12 @@ from zarr.core import Array from zarr.errors import CopyError from zarr.hierarchy import Group, group -from zarr.storage import (ConsolidatedMetadataStore, MemoryStore, - atexit_rmtree, getsize) +from zarr.storage import ( + ConsolidatedMetadataStore, + MemoryStore, + atexit_rmtree, + getsize, +) def test_open_array(path_type): diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 4544a6cae9..2b44ac0574 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -24,11 +24,12 @@ ABSStore, DBMStore, DirectoryStore, + FSStore, + KVStore, LMDBStore, LRUStoreCache, NestedDirectoryStore, SQLiteStore, - FSStore, atexit_rmglob, atexit_rmtree, init_array, @@ -45,8 +46,8 @@ class TestArray(unittest.TestCase): def test_array_init(self): # normal initialization - store = dict() - init_array(store, shape=100, chunks=10, dtype=' end assert [] == list(z.islice(6, 5)) - if hasattr(z.store, 'close'): - z.store.close() + z.store.close() def test_iter(self): params = ( @@ -1491,8 +1417,7 @@ def test_iter(self): z[:] = a for expect, actual in zip_longest(a, z): assert_array_equal(expect, actual) - if hasattr(z.store, 'close'): - z.store.close() + z.store.close() def test_islice(self): params = ( @@ -1530,8 +1455,7 @@ def test_compressors(self): assert np.all(a[0:100] == 1) a[:] = 1 assert np.all(a[:] == 1) - if hasattr(a.store, 'close'): - a.store.close() + a.store.close() def test_endian(self): dtype = np.dtype('float32') @@ -1542,10 +1466,8 @@ def test_endian(self): a2[:] = 1 x2 = a2[:] assert_array_equal(x1, x2) - if hasattr(a1.store, 'close'): - a1.store.close() - if hasattr(a2.store, 'close'): - a2.store.close() + a1.store.close() + a2.store.close() def test_attributes(self): a = self.create_array(shape=10, chunks=10, dtype='i8') @@ -1559,8 +1481,7 @@ def test_attributes(self): attrs = json_loads(a.store[a.attrs.key]) assert 'foo' in attrs and attrs['foo'] == 'bar' assert 'bar' in attrs and attrs['bar'] == 'foo' - if hasattr(a.store, 'close'): - a.store.close() + a.store.close() def test_structured_with_object(self): a = self.create_array(fill_value=(0.0, None), @@ -1575,7 +1496,7 @@ class TestArrayWithPath(TestArray): @staticmethod def create_array(read_only=False, **kwargs): - store = dict() + store = KVStore(dict()) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) write_empty_chunks = kwargs.pop('write_empty_chunks', True) @@ -1584,6 +1505,9 @@ def create_array(read_only=False, **kwargs): cache_metadata=cache_metadata, cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + def test_nchunks_initialized(self): + pass + def test_hexdigest(self): # Check basic 1-D array z = self.create_array(shape=(1050,), chunks=100, dtype=' Date: Tue, 2 Nov 2021 13:09:50 -0400 Subject: [PATCH 0025/1078] Fix: N5 keywords now emit UserWarning instead of raising a ValueError (#860) * Changed n5 keywords to print a warning Changed N5 and N5FS Added test fixture to capture stdout Updated test making sure a warning is printed * flake8 * Change to UserWarning --- zarr/n5.py | 6 ++---- zarr/tests/test_core.py | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/zarr/n5.py b/zarr/n5.py index 797558fa2d..a49604d9af 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -128,7 +128,7 @@ def __setitem__(self, key, value): for k in n5_keywords: if k in zarr_attrs.keys(): - raise ValueError("Can not set attribute %s, this is a reserved N5 keyword" % k) + warnings.warn("attribute %s is a reserved N5 keyword" % k, UserWarning) # replace previous user attributes for k in list(n5_attrs.keys()): @@ -424,9 +424,7 @@ def __setitem__(self, key, value): for k in n5_keywords: if k in zarr_attrs.keys(): - raise ValueError( - "Can not set attribute %s, this is a reserved N5 keyword" % k - ) + warnings.warn("attribute %s is a reserved N5 keyword" % k, UserWarning) # replace previous user attributes for k in list(n5_attrs.keys()): diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 2b44ac0574..3a7801d02c 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -1960,7 +1960,7 @@ def test_structured_array_contain_object(self): def test_attrs_n5_keywords(self): z = self.create_array(shape=(1050,), chunks=100, dtype='i4') for k in n5_keywords: - with pytest.raises(ValueError): + with pytest.warns(UserWarning): z.attrs[k] = "" def test_compressors(self): From 440d0975af7950b8d6ca628767a9db659b66d2bc Mon Sep 17 00:00:00 2001 From: "Gregory R. Lee" Date: Thu, 4 Nov 2021 09:04:40 -0400 Subject: [PATCH 0026/1078] Move metadata handling to a class (updated) (#839) * fix conflicts * cleanup naming * zip move * fix erasability test * test for warning * please flake * remove uncovered lines * remove uncovered lines in tests * pragma no cover for exceptional case * minor docstring fixes add assert statements to test_capabilities * pep8 fix * avoid NumPy 1.21.0 due to https://github.com/numpy/numpy/issues/19325 * move Store class and some helper functions to zarr._storage.store update version in Store docstring * BUG: ABSStore should inherit from Store * pep8 fix * TST: make CustomMapping a subclass of Store TST: initialize stores with KVStore(dict()) instead of bare dict() * update version mentioned in Store docstring * update version mentioned in warning message * use Store._ensure_store in Attributes class ensures Attributes.store is a Store * TST: add Attributes test case ensuring store gets coerced to a Store * use Store._ensure_store in normalize_store_arg ensures open_array, etc can work when the user supplies a dict * TST: make sure high level creation functions also work when passed a dict for store * TST: add test case with group initialized from dict * TST: add test case with Array initialized from dict * change CustomMapping back to type object, not Store want to test the non-Store code path in _ensure_store * pep8 fixes * update/fix new hierarchy test case to complete code coverage * create a BaseStore parent for Store BaseStore does not have the listdir or rmdir methods cleaned up some type declerations, making sure mypy passes * Convert functions in zarr.meta to class methods This is done to ease transition to Zarr v3 support. When adding v3 support, we can override encode and decode methods to account for changes in the metadata format. The classmethods are also exported under the old function names for backwards compatibility. Co-authored-by: Matthias Bussonier * Add a _metadata_class attribute to the Store class Because existing functions allow coerce of dict to store, there are a lot of hasattr calls here. We can remove these checks if we start enforcing that the input MUST be a Store. Use of this _metadata_class will ease the transition to v3 * Use _metadata_class attribute in test_storage.py This will make it easier to reuse existing testing code when adding v3 support * remove unused imports use _metadata_class methods * remove unnecessary hasattr checks In these cases self._store was created using _ensure_store, so it will always have the attribute * test migrate1to2 with store=dict() and store=KVStore(dict()) * pep8: remove unused imports * misc * s * remove unused FLOAT_FILLS * mypy fixes * sync metadata methods with current master branch * flake8 * restore is_erasable check to rmdir function Otherwise the save_array doc example fails to write to a ZipStore Co-authored-by: Matthias Bussonnier Co-authored-by: Josh Moore Co-authored-by: jmoore --- zarr/_storage/store.py | 4 + zarr/attrs.py | 3 +- zarr/core.py | 5 +- zarr/hierarchy.py | 4 +- zarr/meta.py | 393 +++++++++++++++++++------------------ zarr/storage.py | 31 +-- zarr/tests/test_storage.py | 71 ++++--- 7 files changed, 265 insertions(+), 246 deletions(-) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index a779a4e26a..6f5bf78e28 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -1,6 +1,7 @@ from collections.abc import MutableMapping from typing import Any, List, Optional, Union +from zarr.meta import Metadata2 from zarr.util import normalize_storage_path # v2 store keys @@ -32,6 +33,8 @@ class BaseStore(MutableMapping): _writeable = True _erasable = True _listable = True + _store_version = 2 + _metadata_class = Metadata2 def is_readable(self): return self._readable @@ -114,6 +117,7 @@ class Store(BaseStore): .. added: 2.11.0 """ + def listdir(self, path: str = "") -> List[str]: path = normalize_storage_path(path) return _listdir_from_keys(self, path) diff --git a/zarr/attrs.py b/zarr/attrs.py index ec01dbe04f..eff1237db1 100644 --- a/zarr/attrs.py +++ b/zarr/attrs.py @@ -1,6 +1,5 @@ from collections.abc import MutableMapping -from zarr.meta import parse_metadata from zarr._storage.store import Store from zarr.util import json_dumps @@ -40,7 +39,7 @@ def _get_nosync(self): except KeyError: d = dict() else: - d = parse_metadata(data) + d = self.store._metadata_class.parse_metadata(data) return d def asdict(self): diff --git a/zarr/core.py b/zarr/core.py index 56b22ead8d..d366139423 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -31,7 +31,6 @@ is_scalar, pop_fields, ) -from zarr.meta import decode_array_metadata, encode_array_metadata from zarr.storage import array_meta_key, attrs_key, getsize, listdir, BaseStore from zarr.util import ( all_equal, @@ -210,7 +209,7 @@ def _load_metadata_nosync(self): else: # decode and store metadata as instance members - meta = decode_array_metadata(meta_bytes) + meta = self._store._metadata_class.decode_array_metadata(meta_bytes) self._meta = meta self._shape = meta['shape'] self._chunks = meta['chunks'] @@ -267,7 +266,7 @@ def _flush_metadata_nosync(self): compressor=compressor_config, fill_value=self._fill_value, order=self._order, filters=filters_config) mkey = self._key_prefix + array_meta_key - self._store[mkey] = encode_array_metadata(meta) + self._store[mkey] = self._store._metadata_class.encode_array_metadata(meta) @property def store(self): diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 402b8dd976..763a5f1631 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -14,7 +14,6 @@ GroupNotFoundError, ReadOnlyError, ) -from zarr.meta import decode_group_metadata from zarr.storage import ( BaseStore, MemoryStore, @@ -134,8 +133,7 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None, except KeyError: raise GroupNotFoundError(path) else: - meta = decode_group_metadata(meta_bytes) - self._meta = meta + self._meta = self._store._metadata_class.decode_group_metadata(meta_bytes) # setup attributes akey = self._key_prefix + attrs_key diff --git a/zarr/meta.py b/zarr/meta.py index 8a7d760c0f..c292b09a14 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -11,220 +11,231 @@ ZARR_FORMAT = 2 -def parse_metadata(s: Union[MappingType, str]) -> MappingType[str, Any]: +class Metadata2: + ZARR_FORMAT = ZARR_FORMAT - # Here we allow that a store may return an already-parsed metadata object, - # or a string of JSON that we will parse here. We allow for an already-parsed - # object to accommodate a consolidated metadata store, where all the metadata for - # all groups and arrays will already have been parsed from JSON. + @classmethod + def parse_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: - if isinstance(s, Mapping): - # assume metadata has already been parsed into a mapping object - meta = s + # Here we allow that a store may return an already-parsed metadata object, + # or a string of JSON that we will parse here. We allow for an already-parsed + # object to accommodate a consolidated metadata store, where all the metadata for + # all groups and arrays will already have been parsed from JSON. - else: - # assume metadata needs to be parsed as JSON - meta = json_loads(s) + if isinstance(s, Mapping): + # assume metadata has already been parsed into a mapping object + meta = s - return meta + else: + # assume metadata needs to be parsed as JSON + meta = json_loads(s) + return meta -def decode_array_metadata(s: Union[MappingType, str]) -> MappingType[str, Any]: - meta = parse_metadata(s) + @classmethod + def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + meta = cls.parse_metadata(s) - # check metadata format - zarr_format = meta.get('zarr_format', None) - if zarr_format != ZARR_FORMAT: - raise MetadataError('unsupported zarr format: %s' % zarr_format) + # check metadata format + zarr_format = meta.get("zarr_format", None) + if zarr_format != cls.ZARR_FORMAT: + raise MetadataError("unsupported zarr format: %s" % zarr_format) - # extract array metadata fields - try: - dtype = decode_dtype(meta['dtype']) + # extract array metadata fields + try: + dtype = cls.decode_dtype(meta["dtype"]) + if dtype.hasobject: + import numcodecs + object_codec = numcodecs.get_codec(meta['filters'][0]) + else: + object_codec = None + + dimension_separator = meta.get("dimension_separator", None) + fill_value = cls.decode_fill_value(meta['fill_value'], dtype, object_codec) + meta = dict( + zarr_format=meta["zarr_format"], + shape=tuple(meta["shape"]), + chunks=tuple(meta["chunks"]), + dtype=dtype, + compressor=meta["compressor"], + fill_value=fill_value, + order=meta["order"], + filters=meta["filters"], + ) + if dimension_separator: + meta['dimension_separator'] = dimension_separator + except Exception as e: + raise MetadataError("error decoding metadata") from e + else: + return meta + @classmethod + def encode_array_metadata(cls, meta: MappingType[str, Any]) -> bytes: + dtype = meta["dtype"] + sdshape = () + if dtype.subdtype is not None: + dtype, sdshape = dtype.subdtype + + dimension_separator = meta.get("dimension_separator") if dtype.hasobject: import numcodecs object_codec = numcodecs.get_codec(meta['filters'][0]) else: object_codec = None - dimension_separator = meta.get('dimension_separator', None) - fill_value = decode_fill_value(meta['fill_value'], dtype, object_codec) meta = dict( - zarr_format=meta['zarr_format'], - shape=tuple(meta['shape']), - chunks=tuple(meta['chunks']), - dtype=dtype, - compressor=meta['compressor'], - fill_value=fill_value, - order=meta['order'], - filters=meta['filters'], + zarr_format=cls.ZARR_FORMAT, + shape=meta["shape"] + sdshape, + chunks=meta["chunks"], + dtype=cls.encode_dtype(dtype), + compressor=meta["compressor"], + fill_value=cls.encode_fill_value(meta["fill_value"], dtype, object_codec), + order=meta["order"], + filters=meta["filters"], ) if dimension_separator: meta['dimension_separator'] = dimension_separator - except Exception as e: - raise MetadataError('error decoding metadata: %s' % e) - else: - return meta + if dimension_separator: + meta["dimension_separator"] = dimension_separator + return json_dumps(meta) -def encode_array_metadata(meta: MappingType[str, Any]) -> bytes: - dtype = meta['dtype'] - sdshape = () - if dtype.subdtype is not None: - dtype, sdshape = dtype.subdtype - - dimension_separator = meta.get('dimension_separator') - if dtype.hasobject: - import numcodecs - object_codec = numcodecs.get_codec(meta['filters'][0]) - else: - object_codec = None - meta = dict( - zarr_format=ZARR_FORMAT, - shape=meta['shape'] + sdshape, - chunks=meta['chunks'], - dtype=encode_dtype(dtype), - compressor=meta['compressor'], - fill_value=encode_fill_value(meta['fill_value'], dtype, object_codec), - order=meta['order'], - filters=meta['filters'], - ) - - if dimension_separator: - meta['dimension_separator'] = dimension_separator - - return json_dumps(meta) - - -def encode_dtype(d: np.dtype): - if d.fields is None: - return d.str - else: - return d.descr - - -def _decode_dtype_descr(d) -> List[Any]: - # need to convert list of lists to list of tuples - if isinstance(d, list): - # recurse to handle nested structures - d = [(k[0], _decode_dtype_descr(k[1])) + tuple(k[2:]) for k in d] - return d - - -def decode_dtype(d) -> np.dtype: - d = _decode_dtype_descr(d) - return np.dtype(d) - - -def decode_group_metadata(s: Union[MappingType, str]) -> MappingType[str, Any]: - meta = parse_metadata(s) - - # check metadata format version - zarr_format = meta.get('zarr_format', None) - if zarr_format != ZARR_FORMAT: - raise MetadataError('unsupported zarr format: %s' % zarr_format) - - meta = dict(zarr_format=zarr_format) - return meta - - -# N.B., keep `meta` parameter as a placeholder for future -# noinspection PyUnusedLocal -def encode_group_metadata(meta=None) -> bytes: - meta = dict( - zarr_format=ZARR_FORMAT, - ) - return json_dumps(meta) - - -FLOAT_FILLS = { - 'NaN': np.nan, - 'Infinity': np.PINF, - '-Infinity': np.NINF -} - - -def decode_fill_value(v, dtype, object_codec=None): - # early out - if v is None: - return v - if dtype.kind == 'V' and dtype.hasobject: - if object_codec is None: - raise ValueError('missing object_codec for object array') - v = base64.standard_b64decode(v) - v = object_codec.decode(v) - v = np.array(v, dtype=dtype)[()] - return v - if dtype.kind == 'f': - if v == 'NaN': - return np.nan - elif v == 'Infinity': - return np.PINF - elif v == '-Infinity': - return np.NINF + @classmethod + def encode_dtype(cls, d: np.dtype): + if d.fields is None: + return d.str else: + return d.descr + + @classmethod + def _decode_dtype_descr(cls, d) -> List[Any]: + # need to convert list of lists to list of tuples + if isinstance(d, list): + # recurse to handle nested structures + d = [(k[0], cls._decode_dtype_descr(k[1])) + tuple(k[2:]) for k in d] + return d + + @classmethod + def decode_dtype(cls, d) -> np.dtype: + d = cls._decode_dtype_descr(d) + return np.dtype(d) + + @classmethod + def decode_group_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + meta = cls.parse_metadata(s) + + # check metadata format version + zarr_format = meta.get("zarr_format", None) + if zarr_format != cls.ZARR_FORMAT: + raise MetadataError("unsupported zarr format: %s" % zarr_format) + + meta = dict(zarr_format=zarr_format) + return meta + + # N.B., keep `meta` parameter as a placeholder for future + # noinspection PyUnusedLocal + @classmethod + def encode_group_metadata(cls, meta=None) -> bytes: + meta = dict(zarr_format=cls.ZARR_FORMAT) + return json_dumps(meta) + + @classmethod + def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> Any: + # early out + if v is None: + return v + if dtype.kind == 'V' and dtype.hasobject: + if object_codec is None: + raise ValueError('missing object_codec for object array') + v = base64.standard_b64decode(v) + v = object_codec.decode(v) + v = np.array(v, dtype=dtype)[()] + return v + if dtype.kind == "f": + if v == "NaN": + return np.nan + elif v == "Infinity": + return np.PINF + elif v == "-Infinity": + return np.NINF + else: + return np.array(v, dtype=dtype)[()] + elif dtype.kind in "c": + v = ( + cls.decode_fill_value(v[0], dtype.type().real.dtype), + cls.decode_fill_value(v[1], dtype.type().imag.dtype), + ) + v = v[0] + 1j * v[1] return np.array(v, dtype=dtype)[()] - elif dtype.kind in 'c': - v = (decode_fill_value(v[0], dtype.type().real.dtype), - decode_fill_value(v[1], dtype.type().imag.dtype)) - v = v[0] + 1j * v[1] - return np.array(v, dtype=dtype)[()] - elif dtype.kind == 'S': - # noinspection PyBroadException - try: + elif dtype.kind == "S": + # noinspection PyBroadException + try: + v = base64.standard_b64decode(v) + except Exception: + # be lenient, allow for other values that may have been used before base64 + # encoding and may work as fill values, e.g., the number 0 + pass + v = np.array(v, dtype=dtype)[()] + return v + elif dtype.kind == "V": v = base64.standard_b64decode(v) - except Exception: - # be lenient, allow for other values that may have been used before base64 - # encoding and may work as fill values, e.g., the number 0 - pass - v = np.array(v, dtype=dtype)[()] - return v - elif dtype.kind == 'V': - v = base64.standard_b64decode(v) - v = np.array(v, dtype=dtype.str).view(dtype)[()] - return v - elif dtype.kind == 'U': - # leave as-is - return v - else: - return np.array(v, dtype=dtype)[()] - - -def encode_fill_value(v: Any, dtype: np.dtype, object_codec: Any = None) -> Any: - # early out - if v is None: - return v - if dtype.kind == 'V' and dtype.hasobject: - if object_codec is None: - raise ValueError('missing object_codec for object array') - v = object_codec.encode(v) - v = str(base64.standard_b64encode(v), 'ascii') - return v - if dtype.kind == 'f': - if np.isnan(v): - return 'NaN' - elif np.isposinf(v): - return 'Infinity' - elif np.isneginf(v): - return '-Infinity' + v = np.array(v, dtype=dtype.str).view(dtype)[()] + return v + elif dtype.kind == "U": + # leave as-is + return v + else: + return np.array(v, dtype=dtype)[()] + + @classmethod + def encode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> Any: + # early out + if v is None: + return v + if dtype.kind == 'V' and dtype.hasobject: + if object_codec is None: + raise ValueError('missing object_codec for object array') + v = object_codec.encode(v) + v = str(base64.standard_b64encode(v), 'ascii') + return v + if dtype.kind == "f": + if np.isnan(v): + return "NaN" + elif np.isposinf(v): + return "Infinity" + elif np.isneginf(v): + return "-Infinity" + else: + return float(v) + elif dtype.kind in "ui": + return int(v) + elif dtype.kind == "b": + return bool(v) + elif dtype.kind in "c": + c = cast(np.complex128, np.dtype(complex).type()) + v = (cls.encode_fill_value(v.real, c.real.dtype, object_codec), + cls.encode_fill_value(v.imag, c.imag.dtype, object_codec)) + return v + elif dtype.kind in "SV": + v = str(base64.standard_b64encode(v), "ascii") + return v + elif dtype.kind == "U": + return v + elif dtype.kind in "mM": + return int(v.view("i8")) else: - return float(v) - elif dtype.kind in 'ui': - return int(v) - elif dtype.kind == 'b': - return bool(v) - elif dtype.kind in 'c': - c = cast(np.complex128, np.dtype(complex).type()) - v = (encode_fill_value(v.real, c.real.dtype, object_codec), - encode_fill_value(v.imag, c.imag.dtype, object_codec)) - return v - elif dtype.kind in 'SV': - v = str(base64.standard_b64encode(v), 'ascii') - return v - elif dtype.kind == 'U': - return v - elif dtype.kind in 'mM': - return int(v.view('i8')) - else: - return v + return v + + +# expose class methods for backwards compatibility +parse_metadata = Metadata2.parse_metadata +decode_array_metadata = Metadata2.decode_array_metadata +encode_array_metadata = Metadata2.encode_array_metadata +encode_dtype = Metadata2.encode_dtype +_decode_dtype_descr = Metadata2._decode_dtype_descr +decode_dtype = Metadata2.decode_dtype +decode_group_metadata = Metadata2.decode_group_metadata +encode_group_metadata = Metadata2.encode_group_metadata +decode_fill_value = Metadata2.decode_fill_value +encode_fill_value = Metadata2.encode_fill_value diff --git a/zarr/storage.py b/zarr/storage.py index 901011c9d2..7170eeaf23 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -231,8 +231,8 @@ def init_array( fill_value=None, order: str = "C", overwrite: bool = False, - path: Path = None, - chunk_store: StoreLike = None, + path: Optional[Path] = None, + chunk_store: Optional[StoreLike] = None, filters=None, object_codec=None, dimension_separator=None, @@ -357,7 +357,7 @@ def init_array( def _init_array_metadata( - store, + store: StoreLike, shape, chunks=None, dtype=None, @@ -366,7 +366,7 @@ def _init_array_metadata( order="C", overwrite=False, path: Optional[str] = None, - chunk_store=None, + chunk_store: Optional[StoreLike] = None, filters=None, object_codec=None, dimension_separator=None, @@ -446,7 +446,10 @@ def _init_array_metadata( order=order, filters=filters_config, dimension_separator=dimension_separator) key = _path_to_prefix(path) + array_meta_key - store[key] = encode_array_metadata(meta) + if hasattr(store, '_metadata_class'): + store[key] = store._metadata_class.encode_array_metadata(meta) # type: ignore + else: + store[key] = encode_array_metadata(meta) # backwards compatibility @@ -511,7 +514,10 @@ def _init_group_metadata( # be in future meta = dict() # type: ignore key = _path_to_prefix(path) + group_meta_key - store[key] = encode_group_metadata(meta) + if hasattr(store, '_metadata_class'): + store[key] = store._metadata_class.encode_group_metadata(meta) # type: ignore + else: + store[key] = encode_group_metadata(meta) def _dict_store_keys(d: Dict, prefix="", cls=dict): @@ -568,7 +574,7 @@ def __eq__(self, other): class MemoryStore(Store): - """Store class that uses a hierarchy of :class:`dict` objects, thus all data + """Store class that uses a hierarchy of :class:`KVStore` objects, thus all data will be held in main memory. Examples @@ -581,7 +587,7 @@ class MemoryStore(Store): Note that the default class when creating an array is the built-in - :class:`dict` class, i.e.:: + :class:`KVStore` class, i.e.:: >>> z = zarr.zeros(100) >>> type(z.store) @@ -1685,7 +1691,10 @@ def migrate_1to2(store): del meta['compression_opts'] # store migrated metadata - store[array_meta_key] = encode_array_metadata(meta) + if hasattr(store, '_metadata_class'): + store[array_meta_key] = store._metadata_class.encode_array_metadata(meta) + else: + store[array_meta_key] = encode_array_metadata(meta) # migrate user attributes store[attrs_key] = store['attrs'] @@ -2099,8 +2108,8 @@ class LRUStoreCache(Store): """ - def __init__(self, store: Store, max_size: int): - self._store = Store._ensure_store(store) + def __init__(self, store: StoreLike, max_size: int): + self._store: BaseStore = BaseStore._ensure_store(store) self._max_size = max_size self._current_size = 0 self._keys_cache = None diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 889fa80043..3438e60691 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -20,9 +20,7 @@ from zarr.codecs import BZ2, AsType, Blosc, Zlib from zarr.errors import MetadataError from zarr.hierarchy import group -from zarr.meta import (ZARR_FORMAT, decode_array_metadata, - decode_group_metadata, encode_array_metadata, - encode_group_metadata) +from zarr.meta import ZARR_FORMAT, decode_array_metadata from zarr.n5 import N5Store, N5FSStore from zarr.storage import (ABSStore, ConsolidatedMetadataStore, DBMStore, DictStore, DirectoryStore, KVStore, LMDBStore, @@ -427,7 +425,7 @@ def test_init_array(self, dimension_separator_fixture): # check metadata assert array_meta_key in store - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -460,7 +458,7 @@ def test_init_group_overwrite_chunk_store(self): def _test_init_array_overwrite(self, order): # setup store = self.create_store() - store[array_meta_key] = encode_array_metadata( + store[array_meta_key] = store._metadata_class.encode_array_metadata( dict(shape=(2000,), chunks=(200,), dtype=np.dtype('u1'), @@ -482,7 +480,7 @@ def _test_init_array_overwrite(self, order): pass else: assert array_meta_key in store - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -498,7 +496,7 @@ def test_init_array_path(self): # check metadata key = path + '/' + array_meta_key assert key in store - meta = decode_array_metadata(store[key]) + meta = store._metadata_class.decode_array_metadata(store[key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -519,8 +517,8 @@ def _test_init_array_overwrite_path(self, order): fill_value=0, order=order, filters=None) - store[array_meta_key] = encode_array_metadata(meta) - store[path + '/' + array_meta_key] = encode_array_metadata(meta) + store[array_meta_key] = store._metadata_class.encode_array_metadata(meta) + store[path + '/' + array_meta_key] = store._metadata_class.encode_array_metadata(meta) # don't overwrite with pytest.raises(ValueError): @@ -537,7 +535,7 @@ def _test_init_array_overwrite_path(self, order): assert array_meta_key not in store assert (path + '/' + array_meta_key) in store # should have been overwritten - meta = decode_array_metadata(store[path + '/' + array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[path + '/' + array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -549,7 +547,7 @@ def test_init_array_overwrite_group(self): # setup path = 'foo/bar' store = self.create_store() - store[path + '/' + group_meta_key] = encode_group_metadata() + store[path + '/' + group_meta_key] = store._metadata_class.encode_group_metadata() # don't overwrite with pytest.raises(ValueError): @@ -564,7 +562,7 @@ def test_init_array_overwrite_group(self): else: assert (path + '/' + group_meta_key) not in store assert (path + '/' + array_meta_key) in store - meta = decode_array_metadata(store[path + '/' + array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[path + '/' + array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -576,7 +574,7 @@ def _test_init_array_overwrite_chunk_store(self, order): # setup store = self.create_store() chunk_store = self.create_store() - store[array_meta_key] = encode_array_metadata( + store[array_meta_key] = store._metadata_class.encode_array_metadata( dict(shape=(2000,), chunks=(200,), dtype=np.dtype('u1'), @@ -600,7 +598,7 @@ def _test_init_array_overwrite_chunk_store(self, order): pass else: assert array_meta_key in store - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -614,7 +612,7 @@ def _test_init_array_overwrite_chunk_store(self, order): def test_init_array_compat(self): store = self.create_store() init_array(store, shape=1000, chunks=100, compressor='none') - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) assert meta['compressor'] is None store.close() @@ -625,7 +623,7 @@ def test_init_group(self): # check metadata assert group_meta_key in store - meta = decode_group_metadata(store[group_meta_key]) + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] store.close() @@ -633,7 +631,7 @@ def test_init_group(self): def _test_init_group_overwrite(self, order): # setup store = self.create_store() - store[array_meta_key] = encode_array_metadata( + store[array_meta_key] = store._metadata_class.encode_array_metadata( dict(shape=(2000,), chunks=(200,), dtype=np.dtype('u1'), @@ -655,7 +653,7 @@ def _test_init_group_overwrite(self, order): else: assert array_meta_key not in store assert group_meta_key in store - meta = decode_group_metadata(store[group_meta_key]) + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] # don't overwrite group @@ -675,8 +673,8 @@ def _test_init_group_overwrite_path(self, order): fill_value=0, order=order, filters=None) - store[array_meta_key] = encode_array_metadata(meta) - store[path + '/' + array_meta_key] = encode_array_metadata(meta) + store[array_meta_key] = store._metadata_class.encode_array_metadata(meta) + store[path + '/' + array_meta_key] = store._metadata_class.encode_array_metadata(meta) # don't overwrite with pytest.raises(ValueError): @@ -693,7 +691,7 @@ def _test_init_group_overwrite_path(self, order): assert (path + '/' + array_meta_key) not in store assert (path + '/' + group_meta_key) in store # should have been overwritten - meta = decode_group_metadata(store[path + '/' + group_meta_key]) + meta = store._metadata_class.decode_group_metadata(store[path + '/' + group_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] store.close() @@ -702,7 +700,7 @@ def _test_init_group_overwrite_chunk_store(self, order): # setup store = self.create_store() chunk_store = self.create_store() - store[array_meta_key] = encode_array_metadata( + store[array_meta_key] = store._metadata_class.encode_array_metadata( dict(shape=(2000,), chunks=(200,), dtype=np.dtype('u1'), @@ -726,7 +724,7 @@ def _test_init_group_overwrite_chunk_store(self, order): else: assert array_meta_key not in store assert group_meta_key in store - meta = decode_group_metadata(store[group_meta_key]) + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert 'foo' not in chunk_store assert 'baz' not in chunk_store @@ -941,7 +939,7 @@ def test_init_array(self): # check metadata assert array_meta_key in store - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -1191,7 +1189,7 @@ def test_init_array(self): # check metadata assert array_meta_key in store - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -1267,7 +1265,7 @@ def test_init_array(self): # check metadata assert array_meta_key in store - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -1287,7 +1285,7 @@ def test_init_array_path(self): # check metadata key = path + '/' + array_meta_key assert key in store - meta = decode_array_metadata(store[key]) + meta = store._metadata_class.decode_array_metadata(store[key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -1301,7 +1299,7 @@ def test_init_array_path(self): def test_init_array_compat(self): store = self.create_store() init_array(store, shape=1000, chunks=100, compressor='none') - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) # N5Store wraps the actual compressor compressor_config = meta['compressor']['compressor_config'] assert compressor_config is None @@ -1332,7 +1330,7 @@ def test_init_group(self): assert group_meta_key in store assert group_meta_key in store.listdir() assert group_meta_key in store.listdir('') - meta = decode_group_metadata(store[group_meta_key]) + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] def test_filters(self): @@ -1387,7 +1385,7 @@ def test_init_array(self): # check metadata assert array_meta_key in store - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -1407,7 +1405,7 @@ def test_init_array_path(self): # check metadata key = path + '/' + array_meta_key assert key in store - meta = decode_array_metadata(store[key]) + meta = store._metadata_class.decode_array_metadata(store[key]) assert ZARR_FORMAT == meta['zarr_format'] assert (1000,) == meta['shape'] assert (100,) == meta['chunks'] @@ -1421,7 +1419,7 @@ def test_init_array_path(self): def test_init_array_compat(self): store = self.create_store() init_array(store, shape=1000, chunks=100, compressor='none') - meta = decode_array_metadata(store[array_meta_key]) + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) # N5Store wraps the actual compressor compressor_config = meta['compressor']['compressor_config'] assert compressor_config is None @@ -1929,14 +1927,15 @@ def test_getsize(): assert -1 == getsize(store) -def test_migrate_1to2(): +@pytest.mark.parametrize('dict_store', [False, True]) +def test_migrate_1to2(dict_store): from zarr import meta_v1 # N.B., version 1 did not support hierarchies, so we only have to be # concerned about migrating a single array at the root of the store # setup - store = KVStore(dict()) + store = dict() if dict_store else KVStore(dict()) meta = dict( shape=(100,), chunks=(10,), @@ -1974,7 +1973,7 @@ def test_migrate_1to2(): assert meta_migrated['compressor'] == Zlib(1).get_config() # check dict compression_opts - store = KVStore(dict()) + store = dict() if dict_store else KVStore(dict()) meta['compression'] = 'blosc' meta['compression_opts'] = dict(cname='lz4', clevel=5, shuffle=1) meta_json = meta_v1.encode_metadata(meta) @@ -1988,7 +1987,7 @@ def test_migrate_1to2(): Blosc(cname='lz4', clevel=5, shuffle=1).get_config()) # check 'none' compression is migrated to None (null in JSON) - store = KVStore(dict()) + store = dict() if dict_store else KVStore(dict()) meta['compression'] = 'none' meta_json = meta_v1.encode_metadata(meta) store['meta'] = meta_json From c95cac6060ccd0fe6a9903685b48c69f67102fe7 Mon Sep 17 00:00:00 2001 From: jmoore Date: Thu, 4 Nov 2021 14:08:34 +0100 Subject: [PATCH 0027/1078] 2.11.0a2 changelog --- docs/release.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index e9c592a860..6c7da97912 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -15,6 +15,18 @@ Enhancements * write_empty_chunks=False deletes chunks consisting of only fill_value. By :user:`Davis Bennett `; :issue:`738`. +* Move metadata handling to a class. + By :user:`Greggory Lee `; :issue:`839`. + +* Create a Base store class for Zarr Store. + By :user:`Greggory Lee `; :issue:`789`. + +Bug fixes +~~~~~~~~~ + +* N5 keywords now emit UserWarning instead of raising a ValueError. + By :user:`Boaz Mohar `; :issue:`860`. + .. _release_2.10.2: 2.10.2 From f7c186603c8752400908e2881c9bf4d550c6f301 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Nov 2021 11:25:17 +0100 Subject: [PATCH 0028/1078] Bump fsspec[s3] from 2021.10.1 to 2021.11.0 (#866) Bumps [fsspec[s3]](https://github.com/fsspec/filesystem_spec) from 2021.10.1 to 2021.11.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2021.10.1...2021.11.0) --- updated-dependencies: - dependency-name: fsspec[s3] dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index b26431be9b..c8082fdecf 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,5 +19,5 @@ pytest-cov==3.0.0 pytest-doctestplus==0.11.0 pytest-timeout==2.0.1 h5py==3.4.0 -fsspec[s3]==2021.10.1 +fsspec[s3]==2021.11.0 moto[server]>=1.3.14 From db2dcd80d41e485fa559c6a25cd601ab98e83879 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Nov 2021 11:25:36 +0100 Subject: [PATCH 0029/1078] Bump numpy from 1.21.3 to 1.21.4 (#865) Bumps [numpy](https://github.com/numpy/numpy) from 1.21.3 to 1.21.4. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github.com/numpy/numpy/compare/v1.21.3...v1.21.4) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 47cb2ca320..30c31237f3 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.21.3 +numpy==1.21.4 From a92550b9ff9f4931353b4066486aaf9755895ce3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Nov 2021 13:33:15 +0100 Subject: [PATCH 0030/1078] Bump h5py from 3.4.0 to 3.5.0 (#855) Bumps [h5py](https://github.com/h5py/h5py) from 3.4.0 to 3.5.0. - [Release notes](https://github.com/h5py/h5py/releases) - [Changelog](https://github.com/h5py/h5py/blob/master/docs/release_guide.rst) - [Commits](https://github.com/h5py/h5py/compare/3.4.0...3.5.0) --- updated-dependencies: - dependency-name: h5py dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index c8082fdecf..8787e7c149 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ flake8==4.0.1 pytest-cov==3.0.0 pytest-doctestplus==0.11.0 pytest-timeout==2.0.1 -h5py==3.4.0 +h5py==3.5.0 fsspec[s3]==2021.11.0 moto[server]>=1.3.14 From 4e6e0b25555a475a488fbe6d616a7607a19f62bd Mon Sep 17 00:00:00 2001 From: Andrew Fulton Date: Mon, 15 Nov 2021 04:00:38 -0500 Subject: [PATCH 0031/1078] defines blocksize for array, updates hexdigest values (#867) --- zarr/tests/test_core.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 3a7801d02c..34f30d5abc 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -2630,7 +2630,7 @@ def create_array(read_only=False, **kwargs): cache_metadata = kwargs.pop("cache_metadata", True) cache_attrs = kwargs.pop("cache_attrs", True) write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault("compressor", Blosc()) + kwargs.setdefault("compressor", Blosc(blocksize=256)) init_array(store, **kwargs) return Array( store, @@ -2644,11 +2644,11 @@ def create_array(read_only=False, **kwargs): def test_hexdigest(self): # Check basic 1-D array z = self.create_array(shape=(1050,), chunks=100, dtype=" Date: Mon, 15 Nov 2021 04:01:20 -0500 Subject: [PATCH 0032/1078] blocks_to_decompress not used in read_part function (#861) --- zarr/util.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/zarr/util.py b/zarr/util.py index d092ffe0de..ae2ad19b28 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -605,12 +605,6 @@ def read_part(self, start, nitems): assert self.buff is not None if self.nblocks == 1: return - blocks_to_decompress = nitems / self.n_per_block - blocks_to_decompress = ( - blocks_to_decompress - if blocks_to_decompress == int(blocks_to_decompress) - else int(blocks_to_decompress + 1) - ) start_block = int(start / self.n_per_block) wanted_decompressed = 0 while wanted_decompressed < nitems: From 1e70a3c86002c1567302ea0ba4a14f242176d84e Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Mon, 15 Nov 2021 11:02:56 +0100 Subject: [PATCH 0033/1078] Activate Python 3.9 (#859) * Activate Python 3.9 * bump numcodecs to 0.9.1 * Remove numcodecs 0.6.4 pins * skip numpy 1.17 on py39 * update tox.ini * Exclude AND of py39 & numpy 1.17 --- .github/workflows/python-package.yml | 7 +++++-- .github/workflows/windows-testing.yml | 4 ++-- requirements_dev_minimal.txt | 2 +- tox.ini | 10 +++++----- 4 files changed, 13 insertions(+), 10 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 9261187caf..3d53ac43ad 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,8 +15,11 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8] + python-version: [3.7, 3.8, 3.9] numpy_version: ['!=1.21.0', '==1.17.*'] + exclude: + - python-version: 3.9 + numpy_version: '==1.17.*' services: redis: image: redis @@ -47,7 +50,7 @@ jobs: - name: Create Conda environment with the rights deps shell: "bash -l {0}" run: | - conda create -n zarr-env python==${{matrix.python-version}} bsddb3 numcodecs==0.6.4 lmdb pip nodejs flake8 mypy + conda create -n zarr-env python==${{matrix.python-version}} bsddb3 numcodecs lmdb pip nodejs flake8 mypy conda activate zarr-env npm install -g azurite - name: Install dependencies diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 5eaafefbf4..af656aa88d 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -16,7 +16,7 @@ jobs: strategy: fail-fast: True matrix: - python-version: ["3.8"] + python-version: ["3.8", "3.9"] steps: - uses: actions/checkout@v2 with: @@ -31,7 +31,7 @@ jobs: - name: Create Conda environment with the rights deps shell: bash -l {0} run: | - conda create -n zarr-env python==${{matrix.python-version}} numcodecs==0.6.4 pip nodejs + conda create -n zarr-env python==${{matrix.python-version}} numcodecs pip nodejs - name: Install dependencies shell: bash -l {0} run: | diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 12ae808ce6..a1d50c8247 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,7 +1,7 @@ # library requirements asciitree==0.3.3 fasteners==0.16.3 -numcodecs==0.8.1 +numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.3.2 # test requirements diff --git a/tox.ini b/tox.ini index a9256a9979..9e0212cc5e 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py37-npy{117,latest}, py38, docs +envlist = py37-npy{117,latest}, py38, py39, docs [testenv] install_command = pip install --no-binary=numcodecs {opts} {packages} @@ -18,17 +18,17 @@ commands = # clear out any data files generated during tests python -c 'import glob; import shutil; import os; [(shutil.rmtree(d) if os.path.isdir(d) else os.remove(d) if os.path.isfile(d) else None) for d in glob.glob("./example*")]' # main unit test runner - py38: pytest -v --cov=zarr --cov-config=.coveragerc zarr + py{38,39}: pytest -v --cov=zarr --cov-config=.coveragerc zarr # don't collect coverage when running older numpy versions py37-npy117: pytest -v zarr # collect coverage and run doctests under py37 py37-npylatest: pytest -v --cov=zarr --cov-config=.coveragerc --doctest-plus zarr --remote-data # generate a coverage report - py37-npylatest,py38: coverage report -m + py37-npylatest,py38,py39: coverage report -m # run doctests in the tutorial and spec - py38: python -m doctest -o NORMALIZE_WHITESPACE -o ELLIPSIS docs/tutorial.rst docs/spec/v2.rst + py{38,39}: python -m doctest -o NORMALIZE_WHITESPACE -o ELLIPSIS docs/tutorial.rst docs/spec/v2.rst # pep8 checks - py38: flake8 zarr + py{38,39}: flake8 zarr # print environment for debugging pip freeze deps = From 72bd47a7c5e15150362f6751582c71f8c22340d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Nov 2021 10:01:13 +0100 Subject: [PATCH 0034/1078] Bump redis from 3.5.3 to 4.0.0 (#870) Bumps [redis](https://github.com/redis/redis-py) from 3.5.3 to 4.0.0. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/3.5.3...v4.0.0) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 8787e7c149..236b2603f9 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.8.1 # pyup: ignore -redis==3.5.3 +redis==4.0.0 types-redis types-setuptools pymongo==3.12.1 From 52eae2c09a96fc62a264d6ab697415bf7d41e555 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Nov 2021 07:54:08 +0100 Subject: [PATCH 0035/1078] Bump pytest-doctestplus from 0.11.0 to 0.11.1 (#872) Bumps [pytest-doctestplus](https://github.com/astropy/pytest-doctestplus) from 0.11.0 to 0.11.1. - [Release notes](https://github.com/astropy/pytest-doctestplus/releases) - [Changelog](https://github.com/astropy/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/astropy/pytest-doctestplus/compare/v0.11.0...v0.11.1) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 236b2603f9..50fbad0e88 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -16,7 +16,7 @@ tox==3.24.4 coverage flake8==4.0.1 pytest-cov==3.0.0 -pytest-doctestplus==0.11.0 +pytest-doctestplus==0.11.1 pytest-timeout==2.0.1 h5py==3.5.0 fsspec[s3]==2021.11.0 From 85c7360a602fd28dcc7657d504588ada15a9883f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Nov 2021 07:10:40 +0000 Subject: [PATCH 0036/1078] Bump redis from 4.0.0 to 4.0.1 (#875) --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 50fbad0e88..18ba7f4420 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.8.1 # pyup: ignore -redis==4.0.0 +redis==4.0.1 types-redis types-setuptools pymongo==3.12.1 From b9ca7b79dbe824346471232d5b61fa220bf0c102 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Nov 2021 08:11:22 +0100 Subject: [PATCH 0037/1078] Bump h5py from 3.5.0 to 3.6.0 (#873) Bumps [h5py](https://github.com/h5py/h5py) from 3.5.0 to 3.6.0. - [Release notes](https://github.com/h5py/h5py/releases) - [Changelog](https://github.com/h5py/h5py/blob/master/docs/release_guide.rst) - [Commits](https://github.com/h5py/h5py/compare/3.5.0...3.6.0) --- updated-dependencies: - dependency-name: h5py dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 18ba7f4420..dde73663ab 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ flake8==4.0.1 pytest-cov==3.0.0 pytest-doctestplus==0.11.1 pytest-timeout==2.0.1 -h5py==3.5.0 +h5py==3.6.0 fsspec[s3]==2021.11.0 moto[server]>=1.3.14 From 369f50280b5b0803d888148a523e65cb68c537df Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Fri, 19 Nov 2021 09:06:11 +0100 Subject: [PATCH 0038/1078] Pass dimension_separator on fixture generation (fix #858) (#871) * Pass dimension_separator on fixture generation (fix #858) Under some test conditions (conda-forge, Debian builds), the fixtures directory is not available and is then re-created. When dimension_separator is not passed to DirectoryStore, then no metadata is assigned to the file. For the "flat" (as opposed to "flat_legacy") fixture, this meant that the NestedDirectoryStore did not correct its behavior leading to a failure. see: - https://github.com/conda-forge/zarr-feedstock/pull/56 - https://github.com/zarr-developers/zarr-python/issues/858 * Revert "Pass dimension_separator on fixture generation (fix #858)" This reverts commit 6f79c56a3153f6f1a1d36a1a385b6af636a487d9. * Add a test for missing fixtures * Revert "Revert "Pass dimension_separator on fixture generation (fix #858)"" This reverts commit 568b1cf6175513a8c4916eaa09107e36854f53ad. --- .github/workflows/minimal.yml | 7 +++++++ zarr/tests/test_dim_separator.py | 10 +++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index 0ce211cbde..90c9a59585 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -26,3 +26,10 @@ jobs: conda activate minimal python -m pip install . pytest -svx + - name: Fixture generation + shell: "bash -l {0}" + run: | + conda activate minimal + rm -rf fixture/ + pytest -svx zarr/tests/test_dim_separator.py zarr/tests/test_storage.py + # This simulates fixture-less tests in conda and debian packaging diff --git a/zarr/tests/test_dim_separator.py b/zarr/tests/test_dim_separator.py index 5e17bbe279..f439a4bd0c 100644 --- a/zarr/tests/test_dim_separator.py +++ b/zarr/tests/test_dim_separator.py @@ -2,6 +2,7 @@ import pytest from numpy.testing import assert_array_equal +from functools import partial import zarr from zarr.core import Array @@ -43,9 +44,16 @@ def dataset(tmpdir, request): if not static.exists(): # pragma: no cover if "nested" in which: + # No way to reproduce the nested_legacy file via code generator = NestedDirectoryStore else: - generator = DirectoryStore + if "legacy" in suffix: + # No dimension_separator metadata included + generator = DirectoryStore + else: + # Explicit dimension_separator metadata included + generator = partial(DirectoryStore, + dimension_separator=".") # store the data - should be one-time operation s = generator(str(static)) From 5b00f209bb435aece744b2ac58ff822b12b39476 Mon Sep 17 00:00:00 2001 From: jmoore Date: Tue, 16 Nov 2021 14:18:52 +0100 Subject: [PATCH 0039/1078] Update with changelog from 2.10.3 --- docs/release.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 6c7da97912..d0aed55665 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,12 +21,26 @@ Enhancements * Create a Base store class for Zarr Store. By :user:`Greggory Lee `; :issue:`789`. +.. _release_2.10.3: + +2.10.3 +------ + Bug fixes ~~~~~~~~~ * N5 keywords now emit UserWarning instead of raising a ValueError. By :user:`Boaz Mohar `; :issue:`860`. +* blocks_to_decompress not used in read_part function. + By :user:`Boaz Mohar `; :issue:`861`. + +* defines blocksize for array, updates hexdigest values. + By :user:`Andrew Fulton `; :issue:`867`. + +* Fix test failure on Debian and conda-forge builds. + By :user:`Josh Moore `; :issue:`871`. + .. _release_2.10.2: 2.10.2 From b9bef8e1c1b809fdc991447a4e80a1075a39eccd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Nov 2021 09:40:35 +0100 Subject: [PATCH 0040/1078] Bump redis from 4.0.1 to 4.0.2 (#878) Bumps [redis](https://github.com/redis/redis-py) from 4.0.1 to 4.0.2. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.0.1...v4.0.2) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index dde73663ab..520f772d97 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.8.1 # pyup: ignore -redis==4.0.1 +redis==4.0.2 types-redis types-setuptools pymongo==3.12.1 From f2feb5a5264e0b183019f1b3e3bf947b030bf1fe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Nov 2021 14:55:17 +0100 Subject: [PATCH 0041/1078] Bump numcodecs from 0.8.1 to 0.9.0 (#811) * Bump numcodecs from 0.8.1 to 0.9.0 Bumps [numcodecs](https://github.com/zarr-developers/numcodecs) from 0.8.1 to 0.9.0. - [Release notes](https://github.com/zarr-developers/numcodecs/releases) - [Changelog](https://github.com/zarr-developers/numcodecs/blob/master/docs/release.rst) - [Commits](https://github.com/zarr-developers/numcodecs/compare/v0.8.1...v0.9.0) --- updated-dependencies: - dependency-name: numcodecs dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump to numcodecs 0.9.1 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: jakirkham Co-authored-by: Josh Moore From e14b3718d2b781cc87456ce5909795a2fefd75af Mon Sep 17 00:00:00 2001 From: Matthias Bussonnier Date: Tue, 23 Nov 2021 06:51:09 -0800 Subject: [PATCH 0042/1078] Allow to update array fill_values (#665) * Revert "Remove the "Next release header, please revert after release" This reverts commit cb1e8a5fc0c327945e3d139fa68af32dce2db3ab. * Allow to update array fill_values closes #662 * Fix minor test issue * Fix linting error Co-authored-by: jmoore Co-authored-by: Josh Moore --- docs/release.rst | 2 ++ zarr/core.py | 5 +++++ zarr/tests/test_storage.py | 18 +++++++++++++++++- 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index d0aed55665..801dcbe877 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -9,6 +9,8 @@ Unreleased Enhancements ~~~~~~~~~~~~ +* Allow to assign array ``fill_values`` and update metadata accordingly. :issue:`662` + * array indexing with [] (getitem and setitem) now supports fancy indexing. By :user:`Juan Nunez-Iglesias `; :issue:`725`. diff --git a/zarr/core.py b/zarr/core.py index d366139423..029b3c1ae0 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -347,6 +347,11 @@ def fill_value(self): """A value used for uninitialized portions of the array.""" return self._fill_value + @fill_value.setter + def fill_value(self, new): + self._fill_value = new + self._flush_metadata_nosync() + @property def order(self): """A string indicating the order in which bytes are arranged within diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 3438e60691..0b1b00eb05 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -17,6 +17,7 @@ from numcodecs.compat import ensure_bytes +import zarr from zarr.codecs import BZ2, AsType, Blosc, Zlib from zarr.errors import MetadataError from zarr.hierarchy import group @@ -2187,4 +2188,19 @@ def test_read_write(self): with pytest.raises(PermissionError): cs['bar'] = 0 with pytest.raises(PermissionError): - cs['spam'] = 'eggs' + cs["spam"] = "eggs" + + +# standalone test we do not want to run on each store. + + +def test_fill_value_change(): + a = zarr.create((10, 10), dtype=int) + + assert a[0, 0] == 0 + + a.fill_value = 1 + + assert a[0, 0] == 1 + + assert json.loads(a.store[".zarray"])["fill_value"] == 1 From d9d460f2ade0cdaf323485096b1cd3d12208c033 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 25 Nov 2021 12:44:06 +0100 Subject: [PATCH 0043/1078] Fix typos found by codespell (#880) --- docs/contributing.rst | 2 +- docs/release.rst | 4 ++-- docs/spec/v1.rst | 2 +- docs/spec/v2.rst | 4 ++-- docs/tutorial.rst | 2 +- zarr/core.py | 6 +++--- zarr/creation.py | 4 ++-- zarr/n5.py | 9 +++++---- zarr/storage.py | 6 +++--- zarr/tests/test_dim_separator.py | 6 +++--- zarr/tests/test_storage.py | 8 ++++---- 11 files changed, 27 insertions(+), 26 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index da8b5b945f..d0019cdb60 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -152,7 +152,7 @@ the tests will be skipped. To install all optional dependencies, run:: $ pip install -r requirements_dev_optional.txt To also run the doctests within docstrings (requires optional -depencies to be installed), run:: +dependencies to be installed), run:: $ pytest -v --doctest-plus zarr diff --git a/docs/release.rst b/docs/release.rst index 801dcbe877..630f9de833 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -344,7 +344,7 @@ See `this link `; :issue:`537` -* Add typing informations to many of the core functions :issue:`589` +* Add typing information to many of the core functions :issue:`589` * Explicitly close stores during testing. By :user:`Elliott Sales de Andrade `; :issue:`442` diff --git a/docs/spec/v1.rst b/docs/spec/v1.rst index 18744b03b1..39fc6f30ce 100644 --- a/docs/spec/v1.rst +++ b/docs/spec/v1.rst @@ -150,7 +150,7 @@ and columns 4000-5000 and is stored under the key '2.4'; etc. There is no need for all chunks to be present within an array store. If a chunk is not present then it is considered to be in an -uninitialized state. An unitialized chunk MUST be treated as if it +uninitialized state. An uninitialized chunk MUST be treated as if it was uniformly filled with the value of the 'fill_value' field in the array metadata. If the 'fill_value' field is ``null`` then the contents of the chunk are undefined. diff --git a/docs/spec/v2.rst b/docs/spec/v2.rst index 8a1b58aeb3..ec73163c0d 100644 --- a/docs/spec/v2.rst +++ b/docs/spec/v2.rst @@ -81,7 +81,7 @@ filters The following keys MAY be present within the object: dimension_separator - If present, either the string ``"."`` or ``"/""`` definining the separator placed + If present, either the string ``"."`` or ``"/""`` defining the separator placed between the dimensions of a chunk. If the value is not set, then the default MUST be assumed to be ``"."``, leading to chunk keys of the form "0.0". Arrays defined with ``"/"`` as the dimension separator can be considered to have @@ -222,7 +222,7 @@ columns 4000-5000 and is stored under the key "2.4"; etc. There is no need for all chunks to be present within an array store. If a chunk is not present then it is considered to be in an uninitialized state. An -unitialized chunk MUST be treated as if it was uniformly filled with the value +uninitialized chunk MUST be treated as if it was uniformly filled with the value of the "fill_value" field in the array metadata. If the "fill_value" field is ``null`` then the contents of the chunk are undefined. diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 18c232ae40..693f389f09 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1297,7 +1297,7 @@ ratios, depending on the correlation structure within the data. E.g.:: Chunks initialized : 100/100 In the above example, Fortran order gives a better compression ratio. This is an -artifical example but illustrates the general point that changing the order of +artificial example but illustrates the general point that changing the order of bytes within chunks of an array may improve the compression ratio, depending on the structure of the data, the compression algorithm used, and which compression filters (e.g., byte-shuffle) have been applied. diff --git a/zarr/core.py b/zarr/core.py index 029b3c1ae0..0bbf535b30 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -74,7 +74,7 @@ class Array: operations. If False, user attributes are reloaded from the store prior to all attribute read operations. partial_decompress : bool, optional - If True and while the chunk_store is a FSStore and the compresion used + If True and while the chunk_store is a FSStore and the compression used is Blosc, when getting data from the array chunks will be partially read and decompressed when possible. @@ -459,7 +459,7 @@ def nchunks_initialized(self): # count chunk keys return sum(1 for k in listdir(self.chunk_store, self._path) if prog.match(k)) - # backwards compability + # backwards compatibility initialized = nchunks_initialized @property @@ -1108,7 +1108,7 @@ def get_mask_selection(self, selection, out=None, fields=None): >>> import numpy as np >>> z = zarr.array(np.arange(100).reshape(10, 10)) - Retrieve items by specifying a maks:: + Retrieve items by specifying a mask:: >>> sel = np.zeros_like(z, dtype=bool) >>> sel[1, 1] = True diff --git a/zarr/creation.py b/zarr/creation.py index 244a9b080c..aa1a4ac703 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -136,7 +136,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', store_separator = getattr(store, "_dimension_separator", None) if store_separator not in (None, dimension_separator): raise ValueError( - f"Specified dimension_separtor: {dimension_separator}" + f"Specified dimension_separator: {dimension_separator}" f"conflicts with store's separator: " f"{store_separator}") dimension_separator = normalize_dimension_separator(dimension_separator) @@ -439,7 +439,7 @@ def open_array( If using an fsspec URL to create the store, these will be passed to the backend implementation. Ignored otherwise. partial_decompress : bool, optional - If True and while the chunk_store is a FSStore and the compresion used + If True and while the chunk_store is a FSStore and the compression used is Blosc, when getting data from the array chunks will be partially read and decompressed when possible. write_empty_chunks : bool, optional diff --git a/zarr/n5.py b/zarr/n5.py index a49604d9af..242de1ddf8 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -38,7 +38,7 @@ class N5Store(NestedDirectoryStore): normalize_keys : bool, optional If True, all store keys will be normalized to use lower case characters (e.g. 'foo' and 'FOO' will be treated as equivalent). This can be - useful to avoid potential discrepancies between case-senstive and + useful to avoid potential discrepancies between case-sensitive and case-insensitive file system. Default value is False. Examples @@ -283,8 +283,9 @@ def _contains_attrs(self, path): class N5FSStore(FSStore): - """Implentation of the N5 format (https://github.com/saalfeldlab/n5) using `fsspec`, - which allows storage on a variety of filesystems. Based on `zarr.N5Store`. + """Implementation of the N5 format (https://github.com/saalfeldlab/n5) + using `fsspec`, which allows storage on a variety of filesystems. Based + on `zarr.N5Store`. Parameters ---------- path : string @@ -292,7 +293,7 @@ class N5FSStore(FSStore): normalize_keys : bool, optional If True, all store keys will be normalized to use lower case characters (e.g. 'foo' and 'FOO' will be treated as equivalent). This can be - useful to avoid potential discrepancies between case-senstive and + useful to avoid potential discrepancies between case-sensitive and case-insensitive file system. Default value is False. Examples diff --git a/zarr/storage.py b/zarr/storage.py index 7170eeaf23..ff9e7ecdd7 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -787,7 +787,7 @@ class DirectoryStore(Store): normalize_keys : bool, optional If True, all store keys will be normalized to use lower case characters (e.g. 'foo' and 'FOO' will be treated as equivalent). This can be - useful to avoid potential discrepancies between case-senstive and + useful to avoid potential discrepancies between case-sensitive and case-insensitive file system. Default value is False. dimension_separator : {'.', '/'}, optional Separator placed between the dimensions of a chunk. @@ -1291,7 +1291,7 @@ class TempStore(DirectoryStore): normalize_keys : bool, optional If True, all store keys will be normalized to use lower case characters (e.g. 'foo' and 'FOO' will be treated as equivalent). This can be - useful to avoid potential discrepancies between case-senstive and + useful to avoid potential discrepancies between case-sensitive and case-insensitive file system. Default value is False. dimension_separator : {'.', '/'}, optional Separator placed between the dimensions of a chunk. @@ -1321,7 +1321,7 @@ class NestedDirectoryStore(DirectoryStore): normalize_keys : bool, optional If True, all store keys will be normalized to use lower case characters (e.g. 'foo' and 'FOO' will be treated as equivalent). This can be - useful to avoid potential discrepancies between case-senstive and + useful to avoid potential discrepancies between case-sensitive and case-insensitive file system. Default value is False. dimension_separator : {'/'}, optional Separator placed between the dimensions of a chunk. diff --git a/zarr/tests/test_dim_separator.py b/zarr/tests/test_dim_separator.py index f439a4bd0c..c117ee9867 100644 --- a/zarr/tests/test_dim_separator.py +++ b/zarr/tests/test_dim_separator.py @@ -93,7 +93,7 @@ def verify(array, expect_failure=False): def test_open(dataset): """ - Use zarr.open to open the dataset fixture. Legacy nested datatsets + Use zarr.open to open the dataset fixture. Legacy nested datasets without the dimension_separator metadata are not expected to be openable. """ @@ -104,7 +104,7 @@ def test_open(dataset): @needs_fsspec def test_fsstore(dataset): """ - Use FSStore to open the dataset fixture. Legacy nested datatsets + Use FSStore to open the dataset fixture. Legacy nested datasets without the dimension_separator metadata are not expected to be openable. """ @@ -114,7 +114,7 @@ def test_fsstore(dataset): def test_directory(dataset): """ - Use DirectoryStore to open the dataset fixture. Legacy nested datatsets + Use DirectoryStore to open the dataset fixture. Legacy nested datasets without the dimension_separator metadata are not expected to be openable. """ diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 0b1b00eb05..81a4c9b7b1 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -751,7 +751,7 @@ def test_set_invalid_content(self): def setdel_hierarchy_checks(store): # these tests are for stores that are aware of hierarchy levels; this - # behaviour is not stricly required by Zarr but these tests are included + # behaviour is not strictly required by Zarr but these tests are included # to define behaviour of MemoryStore and DirectoryStore classes # check __setitem__ and __delitem__ blocked by leaf @@ -903,7 +903,7 @@ def test_listing_keys_no_slash(self): def mock_walker_no_slash(_path): yield from [ - # no trainling slash in first key + # no trailing slash in first key ('root_with_no_slash', ['d1', 'g1'], ['.zgroup']), ('root_with_no_slash/d1', [], ['.zarray']), ('root_with_no_slash/g1', [], ['.zgroup']) @@ -2149,7 +2149,7 @@ class TestConsolidatedMetadataStore: def test_bad_format(self): - # setup store with consolidated metdata + # setup store with consolidated metadata store = dict() consolidated = { # bad format version @@ -2163,7 +2163,7 @@ def test_bad_format(self): def test_read_write(self): - # setup store with consolidated metdata + # setup store with consolidated metadata store = dict() consolidated = { 'zarr_consolidated_format': 1, From 9d65b85fa5737a650caa8b19ec54b547cf4bafcf Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 26 Nov 2021 08:41:36 +0100 Subject: [PATCH 0044/1078] Codespell configuration (#882) Add configuration to a newly created setup.cfg file. --- setup.cfg | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 setup.cfg diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000000..28e25ed827 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,3 @@ +[codespell] +skip = ./.git +ignore-words-list = ba, ihs, kake, nd, noe, nwo, te From 862a44c8f02d6be4afa7bc68e8db967c05e2b7cd Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 26 Nov 2021 08:45:44 +0100 Subject: [PATCH 0045/1078] Unnecessary comprehension (#883) --- zarr/core.py | 2 +- zarr/indexing.py | 10 +++++----- zarr/tests/test_core.py | 2 +- zarr/tests/test_util.py | 2 +- zarr/util.py | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/zarr/core.py b/zarr/core.py index 0bbf535b30..9126efa69b 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -2070,7 +2070,7 @@ def _decode_chunk(self, cdata, start=None, nitems=None, expected_shape=None): if self._compressor: # only decode requested items if ( - all([x is not None for x in [start, nitems]]) + all(x is not None for x in [start, nitems]) and self._compressor.codec_id == "blosc" ) and hasattr(self._compressor, "decode_partial"): chunk = self._compressor.decode_partial(cdata, start, nitems) diff --git a/zarr/indexing.py b/zarr/indexing.py index 2e9f7c8c03..01406d1cfb 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -308,15 +308,15 @@ def is_positive_slice(s): def is_contiguous_selection(selection): selection = ensure_tuple(selection) - return all([ + return all( (is_integer_array(s) or is_contiguous_slice(s) or s == Ellipsis) for s in selection - ]) + ) def is_basic_selection(selection): selection = ensure_tuple(selection) - return all([is_integer(s) or is_positive_slice(s) for s in selection]) + return all(is_integer(s) or is_positive_slice(s) for s in selection) # noinspection PyProtectedMember @@ -671,8 +671,8 @@ def __setitem__(self, selection, value): def is_coordinate_selection(selection, array): return ( (len(selection) == len(array._shape)) and - all([is_integer(dim_sel) or is_integer_array(dim_sel) - for dim_sel in selection]) + all(is_integer(dim_sel) or is_integer_array(dim_sel) + for dim_sel in selection) ) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 34f30d5abc..6870559e0b 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -132,7 +132,7 @@ def test_store_has_bytes_values(self): z[:] = np.random.random(z.shape) # Check in-memory array only contains `bytes` - assert all([isinstance(v, bytes) for v in z.chunk_store.values()]) + assert all(isinstance(v, bytes) for v in z.chunk_store.values()) z.store.close() diff --git a/zarr/tests/test_util.py b/zarr/tests/test_util.py index a65b26bae8..efe8e66341 100644 --- a/zarr/tests/test_util.py +++ b/zarr/tests/test_util.py @@ -146,7 +146,7 @@ def test_guess_chunks(): assert isinstance(chunks, tuple) assert len(chunks) == len(shape) # doesn't make any sense to allow chunks to have zero length dimension - assert all([0 < c <= max(s, 1) for c, s in zip(chunks, shape)]) + assert all(0 < c <= max(s, 1) for c, s in zip(chunks, shape)) # ludicrous itemsize chunks = guess_chunks((1000000,), 40000000000) diff --git a/zarr/util.py b/zarr/util.py index ae2ad19b28..cd36ad9765 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -330,7 +330,7 @@ def normalize_storage_path(path: Union[str, bytes, None]) -> str: # don't allow path segments with just '.' or '..' segments = path.split('/') - if any([s in {'.', '..'} for s in segments]): + if any(s in {'.', '..'} for s in segments): raise ValueError("path containing '.' or '..' segment not allowed") else: From edced8eda4930672bfe6365390e9f209af3089eb Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 26 Nov 2021 09:02:12 +0100 Subject: [PATCH 0046/1078] In Python 3, all classes implicitly inherit from object (#886) No need to inherit explictly. --- zarr/convenience.py | 2 +- zarr/indexing.py | 20 ++++++++++---------- zarr/sync.py | 4 ++-- zarr/tests/test_core.py | 2 +- zarr/tests/test_creation.py | 4 ++-- zarr/tests/test_hierarchy.py | 2 +- zarr/tests/test_storage.py | 2 +- zarr/tests/test_sync.py | 4 ++-- zarr/util.py | 8 ++++---- 9 files changed, 24 insertions(+), 24 deletions(-) diff --git a/zarr/convenience.py b/zarr/convenience.py index 18b59a77b2..60fa5fe176 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -428,7 +428,7 @@ def tree(grp, expand=False, level=None): return TreeViewer(grp, expand=expand, level=level) -class _LogWriter(object): +class _LogWriter: def __init__(self, log): self.log_func = None diff --git a/zarr/indexing.py b/zarr/indexing.py index 01406d1cfb..b226198aab 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -132,7 +132,7 @@ def normalize_integer_selection(dim_sel, dim_len): """ -class IntDimIndexer(object): +class IntDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): @@ -157,7 +157,7 @@ def ceildiv(a, b): return math.ceil(a / b) -class SliceDimIndexer(object): +class SliceDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): @@ -320,7 +320,7 @@ def is_basic_selection(selection): # noinspection PyProtectedMember -class BasicIndexer(object): +class BasicIndexer: def __init__(self, selection, array): @@ -361,7 +361,7 @@ def __iter__(self): yield ChunkProjection(chunk_coords, chunk_selection, out_selection) -class BoolArrayDimIndexer(object): +class BoolArrayDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): @@ -451,7 +451,7 @@ def boundscheck_indices(x, dim_len): raise BoundsCheckError(dim_len) -class IntArrayDimIndexer(object): +class IntArrayDimIndexer: """Integer array selection against a single dimension.""" def __init__(self, dim_sel, dim_len, dim_chunk_len, wraparound=True, boundscheck=True, @@ -579,7 +579,7 @@ def oindex_set(a, selection, value): # noinspection PyProtectedMember -class OrthogonalIndexer(object): +class OrthogonalIndexer: def __init__(self, selection, array): @@ -649,7 +649,7 @@ def __iter__(self): yield ChunkProjection(chunk_coords, chunk_selection, out_selection) -class OIndex(object): +class OIndex: def __init__(self, array): self.array = array @@ -686,7 +686,7 @@ def is_mask_selection(selection, array): # noinspection PyProtectedMember -class CoordinateIndexer(object): +class CoordinateIndexer: def __init__(self, selection, array): @@ -805,7 +805,7 @@ def __init__(self, selection, array): super().__init__(selection, array) -class VIndex(object): +class VIndex: def __init__(self, array): self.array = array @@ -905,7 +905,7 @@ def make_slice_selection(selection): return ls -class PartialChunkIterator(object): +class PartialChunkIterator: """Iterator to retrieve the specific coordinates of requested data from within a compressed chunk. diff --git a/zarr/sync.py b/zarr/sync.py index fa5d811a1e..a0938b30d0 100644 --- a/zarr/sync.py +++ b/zarr/sync.py @@ -5,7 +5,7 @@ import fasteners -class ThreadSynchronizer(object): +class ThreadSynchronizer: """Provides synchronization using thread locks.""" def __init__(self): @@ -24,7 +24,7 @@ def __setstate__(self, *args): self.__init__() -class ProcessSynchronizer(object): +class ProcessSynchronizer: """Provides synchronization using file locks via the `fasteners `_ package. diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 6870559e0b..d457d50e3f 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -2423,7 +2423,7 @@ def test_structured_array_contain_object(self): # custom store, does not support getsize() -class CustomMapping(object): +class CustomMapping: def __init__(self): self.inner = KVStore(dict()) diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index 117bc338b6..0ec551ba4e 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -20,7 +20,7 @@ # something bcolz-like -class MockBcolzArray(object): +class MockBcolzArray: def __init__(self, data, chunklen): self.data = data @@ -34,7 +34,7 @@ def __getitem__(self, item): # something h5py-like -class MockH5pyDataset(object): +class MockH5pyDataset: def __init__(self, data, chunks): self.data = data diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index 1b4f96a73f..2830be8c38 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -128,7 +128,7 @@ def test_create_group(self): assert '/a/b/c' == g5.name # test non-str keys - class Foo(object): + class Foo: def __init__(self, s): self.s = s diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 81a4c9b7b1..510a38d3eb 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -94,7 +94,7 @@ def test_deprecated_listdir_nosotre(): listdir(store) -class StoreTests(object): +class StoreTests: """Abstract store tests.""" def create_store(self, **kwargs): # pragma: no cover diff --git a/zarr/tests/test_sync.py b/zarr/tests/test_sync.py index 700c23eb45..69fc0d7708 100644 --- a/zarr/tests/test_sync.py +++ b/zarr/tests/test_sync.py @@ -57,7 +57,7 @@ def _set_arange(arg): return i -class MixinArraySyncTests(object): +class MixinArraySyncTests: def test_parallel_setitem(self): n = 100 @@ -197,7 +197,7 @@ def _require_group(arg): return h.name -class MixinGroupSyncTests(object): +class MixinGroupSyncTests: def test_parallel_create_group(self): diff --git a/zarr/util.py b/zarr/util.py index cd36ad9765..04d350a68d 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -370,7 +370,7 @@ def info_html_report(items) -> str: return report -class InfoReporter(object): +class InfoReporter: def __init__(self, obj): self.obj = obj @@ -384,7 +384,7 @@ def _repr_html_(self): return info_html_report(items) -class TreeNode(object): +class TreeNode: def __init__(self, obj, depth=0, level=None): self.obj = obj @@ -468,7 +468,7 @@ def tree_widget(group, expand, level): return result -class TreeViewer(object): +class TreeViewer: def __init__(self, group, expand=False, level=None): @@ -541,7 +541,7 @@ def is_valid_python_name(name): return name.isidentifier() and not iskeyword(name) -class NoLock(object): +class NoLock: """A lock that doesn't lock.""" def __enter__(self): From 5f3db11b406b02769aa742a035ac63ed00ae81e4 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 26 Nov 2021 09:06:20 +0100 Subject: [PATCH 0047/1078] Useless `return` (#884) --- zarr/core.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/zarr/core.py b/zarr/core.py index 9126efa69b..6f6b468e3b 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -1958,7 +1958,6 @@ def _chunk_delitems(self, ckeys): # that will trigger this condition, but it's possible that they # will be developed in the future. tuple(map(self._chunk_delitem, ckeys)) - return None def _chunk_delitem(self, ckey): """ @@ -1966,9 +1965,8 @@ def _chunk_delitem(self, ckey): """ try: del self.chunk_store[ckey] - return except KeyError: - return + pass def _chunk_setitem(self, chunk_coords, chunk_selection, value, fields=None): """Replace part or whole of a chunk. From 65eba133c70d3c20e45eb8b61e516e614bd44af2 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 26 Nov 2021 09:11:06 +0100 Subject: [PATCH 0048/1078] Decorate a few method with `@staticmethod` (#885) I have not changed the multiple occurrences under zarr/test_*.py. --- zarr/_storage/absstore.py | 3 ++- zarr/n5.py | 9 ++++++--- zarr/storage.py | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index 01bfbd5039..f23b406e0b 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -76,7 +76,8 @@ def __init__(self, container=None, prefix='', account_name=None, account_key=Non self._account_name = account_name self._account_key = account_key - def _warn_deprecated(self, property_): + @staticmethod + def _warn_deprecated(property_): msg = ("The {} property is deprecated and will be removed in a future " "version. Get the property from 'ABSStore.client' instead.") warnings.warn(msg.format(property_), FutureWarning, stacklevel=3) diff --git a/zarr/n5.py b/zarr/n5.py index 242de1ddf8..2f98c2f963 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -343,7 +343,8 @@ def __init__(self, *args, **kwargs): dimension_separator = "." super().__init__(*args, dimension_separator=dimension_separator, **kwargs) - def _swap_separator(self, key): + @staticmethod + def _swap_separator(key): segments = list(key.split('/')) if segments: last_segment = segments[-1] @@ -898,7 +899,8 @@ def decode(self, chunk, out=None): return chunk - def _create_header(self, chunk): + @staticmethod + def _create_header(chunk): mode = struct.pack('>H', 0) num_dims = struct.pack('>H', len(chunk.shape)) @@ -909,7 +911,8 @@ def _create_header(self, chunk): return mode + num_dims + shape - def _read_header(self, chunk): + @staticmethod + def _read_header(chunk): num_dims = struct.unpack('>H', chunk[2:4])[0] shape = tuple( diff --git a/zarr/storage.py b/zarr/storage.py index ff9e7ecdd7..2c76938dc8 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -851,7 +851,8 @@ def __init__(self, path, normalize_keys=False, dimension_separator=None): def _normalize_key(self, key): return key.lower() if self.normalize_keys else key - def _fromfile(self, fn): + @staticmethod + def _fromfile(fn): """ Read data from a file Parameters @@ -867,7 +868,8 @@ def _fromfile(self, fn): with open(fn, 'rb') as f: return f.read() - def _tofile(self, a, fn): + @staticmethod + def _tofile(a, fn): """ Write data to a file Parameters From d65c0f27058855b97ddce66a7e50da191facec7f Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:36:37 +0100 Subject: [PATCH 0049/1078] iRemove unnecessary literal (#891) --- zarr/tests/test_storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 510a38d3eb..da1b76e9c7 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -986,7 +986,7 @@ def test_deep_ndim(self): chunks=(2, 2, 2), dtype="i8") baz[:] = 1 - assert set(store.listdir()) == set([".zgroup", "bar"]) + assert set(store.listdir()) == {".zgroup", "bar"} assert foo["bar"]["baz"][(0, 0, 0)] == 1 def test_not_fsspec(self): From 8ea708b38c8a3f298331baf805bbfceda567c22c Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:40:12 +0100 Subject: [PATCH 0050/1078] use an if `expression` instead of `and`/`or` (#888) --- zarr/storage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zarr/storage.py b/zarr/storage.py index 2c76938dc8..b5da03701c 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -997,8 +997,8 @@ def dir_path(self, path=None): return dir_path def listdir(self, path=None): - return self._dimension_separator == "/" and \ - self._nested_listdir(path) or self._flat_listdir(path) + return self._nested_listdir(path) if self._dimension_separator == "/" else \ + self._flat_listdir(path) def _flat_listdir(self, path=None): dir_path = self.dir_path(path) From e6483f926f322496660847988f27e326bd54cea1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 08:36:40 +0100 Subject: [PATCH 0051/1078] Bump fsspec[s3] from 2021.11.0 to 2021.11.1 (#892) Bumps [fsspec[s3]](https://github.com/fsspec/filesystem_spec) from 2021.11.0 to 2021.11.1. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2021.11.0...2021.11.1) --- updated-dependencies: - dependency-name: fsspec[s3] dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 520f772d97..1ff4c2b052 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,5 +19,5 @@ pytest-cov==3.0.0 pytest-doctestplus==0.11.1 pytest-timeout==2.0.1 h5py==3.6.0 -fsspec[s3]==2021.11.0 +fsspec[s3]==2021.11.1 moto[server]>=1.3.14 From 70bd39419b20444087a5fc78c051b895f50c57a4 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 2 Dec 2021 09:50:06 +0100 Subject: [PATCH 0052/1078] Unnecessary `None` provided as default (#900) --- zarr/convenience.py | 4 ++-- zarr/creation.py | 4 ++-- zarr/tests/test_core.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/zarr/convenience.py b/zarr/convenience.py index 60fa5fe176..20afb496b7 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -75,7 +75,7 @@ def open(store: StoreLike = None, mode: str = "a", **kwargs): """ - path = kwargs.get('path', None) + path = kwargs.get('path') # handle polymorphic store arg clobber = mode == 'w' # we pass storage options explicitly, since normalize_store_arg might construct @@ -1179,7 +1179,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** from .storage import ConsolidatedMetadataStore # normalize parameters - store = normalize_store_arg(store, storage_options=kwargs.get("storage_options", None)) + store = normalize_store_arg(store, storage_options=kwargs.get("storage_options")) if mode not in {'r', 'r+'}: raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}" .format(mode)) diff --git a/zarr/creation.py b/zarr/creation.py index aa1a4ac703..64c5666adb 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -339,7 +339,7 @@ def array(data, **kwargs): data = np.asanyarray(data) # setup dtype - kw_dtype = kwargs.get('dtype', None) + kw_dtype = kwargs.get('dtype') if kw_dtype is None: kwargs['dtype'] = data.dtype else: @@ -348,7 +348,7 @@ def array(data, **kwargs): # setup shape and chunks data_shape, data_chunks = _get_shape_chunks(data) kwargs['shape'] = data_shape - kw_chunks = kwargs.get('chunks', None) + kw_chunks = kwargs.get('chunks') if kw_chunks is None: kwargs['chunks'] = data_chunks else: diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index d457d50e3f..7423132887 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -2305,7 +2305,7 @@ class TestArrayWithFilters(TestArray): @staticmethod def create_array(read_only=False, **kwargs): store = KVStore(dict()) - dtype = kwargs.get('dtype', None) + dtype = kwargs.get('dtype') filters = [ Delta(dtype=dtype), FixedScaleOffset(dtype=dtype, scale=1, offset=0), From f0677c2e051cad672e781c6bf63b5edfe57aaca4 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 2 Dec 2021 09:50:21 +0100 Subject: [PATCH 0053/1078] Unnecessary comprehension (#899) --- zarr/indexing.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/zarr/indexing.py b/zarr/indexing.py index b226198aab..3f0dff42ef 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -961,10 +961,8 @@ def __init__(self, selection, arr_shape): # any selection can not be out of the range of the chunk selection_shape = np.empty(self.arr_shape)[tuple(selection)].shape if any( - [ - selection_dim < 0 or selection_dim > arr_dim - for selection_dim, arr_dim in zip(selection_shape, self.arr_shape) - ] + selection_dim < 0 or selection_dim > arr_dim + for selection_dim, arr_dim in zip(selection_shape, self.arr_shape) ): raise IndexError( "a selection index is out of range for the dimension" From dc3f839a3f9bbb932a0d464d06a6da12bf191741 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Thu, 9 Dec 2021 22:29:48 -0800 Subject: [PATCH 0054/1078] Doctest seem to be stricter now, updating tostring() to tobytes() (#907) Co-authored-by: Matthias Bussonnier --- docs/tutorial.rst | 6 +++--- zarr/storage.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 693f389f09..a292892ebd 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -814,7 +814,7 @@ Here is an example using S3Map to read an array created previously:: array([b'H', b'e', b'l', b'l', b'o', b' ', b'f', b'r', b'o', b'm', b' ', b't', b'h', b'e', b' ', b'c', b'l', b'o', b'u', b'd', b'!'], dtype='|S1') - >>> z[:].tostring() + >>> z[:].tobytes() b'Hello from the cloud!' Zarr now also has a builtin storage backend for Azure Blob Storage. @@ -855,11 +855,11 @@ store. E.g.:: >>> z = root['foo/bar/baz'] >>> from timeit import timeit >>> # first data access is relatively slow, retrieved from store - ... timeit('print(z[:].tostring())', number=1, globals=globals()) # doctest: +SKIP + ... timeit('print(z[:].tobytes())', number=1, globals=globals()) # doctest: +SKIP b'Hello from the cloud!' 0.1081731989979744 >>> # second data access is faster, uses cache - ... timeit('print(z[:].tostring())', number=1, globals=globals()) # doctest: +SKIP + ... timeit('print(z[:].tobytes())', number=1, globals=globals()) # doctest: +SKIP b'Hello from the cloud!' 0.0009490990014455747 diff --git a/zarr/storage.py b/zarr/storage.py index b5da03701c..fad0000e3a 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -2100,11 +2100,11 @@ class LRUStoreCache(Store): >>> z = root['foo/bar/baz'] # doctest: +REMOTE_DATA >>> from timeit import timeit >>> # first data access is relatively slow, retrieved from store - ... timeit('print(z[:].tostring())', number=1, globals=globals()) # doctest: +SKIP + ... timeit('print(z[:].tobytes())', number=1, globals=globals()) # doctest: +SKIP b'Hello from the cloud!' 0.1081731989979744 >>> # second data access is faster, uses cache - ... timeit('print(z[:].tostring())', number=1, globals=globals()) # doctest: +SKIP + ... timeit('print(z[:].tobytes())', number=1, globals=globals()) # doctest: +SKIP b'Hello from the cloud!' 0.0009490990014455747 From 1cbe3fbc84125b71f02ebfa6e8f686db756b0385 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Dec 2021 07:45:58 +0100 Subject: [PATCH 0055/1078] Bump pytest-doctestplus from 0.11.1 to 0.11.2 (#908) Bumps [pytest-doctestplus](https://github.com/astropy/pytest-doctestplus) from 0.11.1 to 0.11.2. - [Release notes](https://github.com/astropy/pytest-doctestplus/releases) - [Changelog](https://github.com/astropy/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/astropy/pytest-doctestplus/compare/v0.11.1...v0.11.2) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 1ff4c2b052..34e1f954bb 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -16,7 +16,7 @@ tox==3.24.4 coverage flake8==4.0.1 pytest-cov==3.0.0 -pytest-doctestplus==0.11.1 +pytest-doctestplus==0.11.2 pytest-timeout==2.0.1 h5py==3.6.0 fsspec[s3]==2021.11.1 From af0c36b2a612579db6d4ecc05388d9dcb2b3b1c2 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 10 Dec 2021 16:59:16 +0100 Subject: [PATCH 0056/1078] Unguarded next inside generator (#889) * Unguarded next inside generator Remove _fast_keys() and rewrite it in keys() with a simple loop. Might be faster, certainly simpler. * Update zarr/storage.py Co-authored-by: jakirkham * Make it faster by moving `if` out of the loop After all the name of the function is _keys_fast()! Co-authored-by: jakirkham --- zarr/storage.py | 26 +++++++++----------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/zarr/storage.py b/zarr/storage.py index fad0000e3a..1dc2cf32b3 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -965,23 +965,15 @@ def keys(self): @staticmethod def _keys_fast(path, walker=os.walk): - """ - - Faster logic on platform where the separator is `/` and using - `os.walk()` to decrease the number of stats.call. - - """ - it = iter(walker(path)) - d0, dirnames, filenames = next(it) - if d0.endswith('/'): - root_len = len(d0) - else: - root_len = len(d0)+1 - for f in filenames: - yield f - for dirpath, _, filenames in it: - for f in filenames: - yield dirpath[root_len:].replace('\\', '/')+'/'+f + for dirpath, _, filenames in walker(path): + dirpath = os.path.relpath(dirpath, path) + if dirpath == os.curdir: + for f in filenames: + yield f + else: + dirpath = dirpath.replace("\\", "/") + for f in filenames: + yield "/".join((dirpath, f)) def __iter__(self): return self.keys() From de7858ab9d70b868cf58eaf14a9c4eab16ecb263 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Dec 2021 11:48:48 +0100 Subject: [PATCH 0057/1078] Bump pytest-timeout from 2.0.1 to 2.0.2 (#910) Bumps [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) from 2.0.1 to 2.0.2. - [Release notes](https://github.com/pytest-dev/pytest-timeout/releases) - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/2.0.1...2.0.2) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 34e1f954bb..da300495d6 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -17,7 +17,7 @@ coverage flake8==4.0.1 pytest-cov==3.0.0 pytest-doctestplus==0.11.2 -pytest-timeout==2.0.1 +pytest-timeout==2.0.2 h5py==3.6.0 fsspec[s3]==2021.11.1 moto[server]>=1.3.14 From 4fb0f7e7f452fd436062d39ce67bef6ccd336420 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 17 Dec 2021 18:06:02 +0100 Subject: [PATCH 0058/1078] Add LGTM.com / DeepSource.io configuration files (#909) * Add LGTM.com configuration file The YAML configuration file for the LGTM static analysis tool can be either lgtm.yml or .lgtm.yml: https://help.semmle.com/lgtm-enterprise/user/help/lgtm.yml-configuration-file.html There is no need to integrate the LGTM tool in CI, as LGTM appears to be running on all repositories it has been run on once. The results currently appear here: https://lgtm.com/projects/g/zarr-developers/zarr-python * Add DeepSource.io configuration file The TOML configuration file is .deepsource.toml: https://deepsource.io/docs/concepts/#deepsourcetoml-file DeepSource.io analysis must be enabled by a repository owner. It doesn't look like it is possible to run DeepSource.io on each PR, rather the monitored branch is analysed periodically, after commits are pushed. --- .deepsource.toml | 10 ++++++++++ .lgtm.yml | 7 +++++++ 2 files changed, 17 insertions(+) create mode 100644 .deepsource.toml create mode 100644 .lgtm.yml diff --git a/.deepsource.toml b/.deepsource.toml new file mode 100644 index 0000000000..5143b2739e --- /dev/null +++ b/.deepsource.toml @@ -0,0 +1,10 @@ +version = 1 + +test_patterns = ["zarr/tests/test_*.py"] + +[[analyzers]] +name = "python" +enabled = true + + [analyzers.meta] + runtime_version = "3.x.x" diff --git a/.lgtm.yml b/.lgtm.yml new file mode 100644 index 0000000000..35a0c32ef2 --- /dev/null +++ b/.lgtm.yml @@ -0,0 +1,7 @@ +# Config for LGTM.com static code analysis +# https://lgtm.com/projects/g/zarr-developers/zarr-python + +extraction: + python: + python_setup: + version: 3 From 1711b160251a2f2ca74cf238bc86ef45f0c4c08c Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Tue, 21 Dec 2021 11:30:27 +0100 Subject: [PATCH 0059/1078] Drop shortcut `fsspec[s3]` for dependency (#920) Use of the short rather than `fsspec` and `s3fs` separately leads to an old version of s3fs being installed. (The benefit of the shortcut is that it prevents dependabot PRs from failing.) see: * https://github.com/fsspec/s3fs/issues/528 * https://github.com/zarr-developers/zarr-python/pull/914 --- requirements_dev_optional.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index da300495d6..59b9ae035b 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,5 +19,6 @@ pytest-cov==3.0.0 pytest-doctestplus==0.11.2 pytest-timeout==2.0.2 h5py==3.6.0 -fsspec[s3]==2021.11.1 +fsspec==2021.11.1 +s3fs==2021.11.1 moto[server]>=1.3.14 From dd8fa23c342c118aea7c0871e2219698130eac03 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 21 Dec 2021 12:29:58 +0100 Subject: [PATCH 0060/1078] Proper C-style formatting for integer (#913) Variable `port` is initialized as an integer, should be formatted with `%d`. Co-authored-by: Josh Moore --- zarr/tests/test_storage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index da1b76e9c7..4bd38755bc 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1151,8 +1151,8 @@ def s3(request): pytest.importorskip("moto") port = 5555 - endpoint_uri = 'http://127.0.0.1:%s/' % port - proc = subprocess.Popen(shlex.split("moto_server s3 -p %s" % port), + endpoint_uri = 'http://127.0.0.1:%d/' % port + proc = subprocess.Popen(shlex.split("moto_server s3 -p %d" % port), stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL) timeout = 5 From 5f7e2b6508da8732b1fcd832e34e223f9e8fdf77 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Dec 2021 12:48:13 +0100 Subject: [PATCH 0061/1078] Bump numpy from 1.21.4 to 1.21.5 (#914) Bumps [numpy](https://github.com/numpy/numpy) from 1.21.4 to 1.21.5. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github.com/numpy/numpy/compare/v1.21.4...v1.21.5) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 30c31237f3..b0d8c6eab8 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.21.4 +numpy==1.21.5 From b80c0c473277375f0013cb60d61d216c069377a3 Mon Sep 17 00:00:00 2001 From: Ray Bell Date: Tue, 21 Dec 2021 12:12:31 -0500 Subject: [PATCH 0062/1078] DOC: update create dev env (#921) --- docs/contributing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index d0019cdb60..c1401f9381 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -90,7 +90,7 @@ you have cloned the Zarr source code and your current working directory is the r the repository, you can do something like the following:: $ mkdir -p ~/pyenv/zarr-dev - $ virtualenv --no-site-packages --python=/usr/bin/python3.8 ~/pyenv/zarr-dev + $ python -m venv ~/pyenv/zarr-dev $ source ~/pyenv/zarr-dev/bin/activate $ pip install -r requirements_dev_minimal.txt -r requirements_dev_numpy.txt $ pip install -e . From 0972b30fa4d88f6611ce2b31ffba38d03569a0e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Jan 2022 08:59:40 +0100 Subject: [PATCH 0063/1078] Bump redis from 4.0.2 to 4.1.0 (#927) Bumps [redis](https://github.com/redis/redis-py) from 4.0.2 to 4.1.0. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.0.2...v4.1.0) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 59b9ae035b..4da59ee324 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.8.1 # pyup: ignore -redis==4.0.2 +redis==4.1.0 types-redis types-setuptools pymongo==3.12.1 From fbf943f8bdda07f1bfbf3d54e45977552fee00f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Jan 2022 08:59:52 +0100 Subject: [PATCH 0064/1078] Bump tox from 3.24.4 to 3.24.5 (#928) Bumps [tox](https://github.com/tox-dev/tox) from 3.24.4 to 3.24.5. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/master/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/3.24.4...3.24.5) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 4da59ee324..f76a6df1c5 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -12,7 +12,7 @@ types-redis types-setuptools pymongo==3.12.1 # optional test requirements -tox==3.24.4 +tox==3.24.5 coverage flake8==4.0.1 pytest-cov==3.0.0 From a64de4d554087d14431c56169ae29b96290fd615 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Jan 2022 09:00:04 +0100 Subject: [PATCH 0065/1078] Bump lmdb from 1.2.1 to 1.3.0 (#929) Bumps [lmdb](https://github.com/jnwatson/py-lmdb) from 1.2.1 to 1.3.0. - [Release notes](https://github.com/jnwatson/py-lmdb/releases) - [Changelog](https://github.com/jnwatson/py-lmdb/blob/py-lmdb_1.3.0/ChangeLog) - [Commits](https://github.com/jnwatson/py-lmdb/compare/py-lmdb_1.2.1...py-lmdb_1.3.0) --- updated-dependencies: - dependency-name: lmdb dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index f76a6df1c5..ec732cd64b 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -1,6 +1,6 @@ # optional library requirements # bsddb3==6.2.6; sys_platform != 'win32' -lmdb==1.2.1; sys_platform != 'win32' +lmdb==1.3.0; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.1 # optional library requirements for services From 2d4802c6c96e2d64b5b6074ffad4ef2cca8bf27e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 08:29:50 +0100 Subject: [PATCH 0066/1078] Bump numpy from 1.21.5 to 1.22.0 (#932) Bumps [numpy](https://github.com/numpy/numpy) from 1.21.5 to 1.22.0. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github.com/numpy/numpy/compare/v1.21.5...v1.22.0) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index b0d8c6eab8..66bebb955e 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.21.5 +numpy==1.22.0 From 094eee210ac56b8ce85a34065b61e14337d65376 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 6 Jan 2022 02:44:54 -0800 Subject: [PATCH 0067/1078] fix consolidate_metadata with FSStore (#916) --- zarr/convenience.py | 2 +- zarr/tests/test_storage.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/zarr/convenience.py b/zarr/convenience.py index 20afb496b7..4d07968579 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -1117,7 +1117,7 @@ def consolidate_metadata(store: StoreLike, metadata_key=".zmetadata"): open_consolidated """ - store = normalize_store_arg(store) + store = normalize_store_arg(store, clobber=True) def is_zarr_key(key): return (key.endswith('.zarray') or key.endswith('.zgroup') or diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 4bd38755bc..eecf7cd36c 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -19,6 +19,7 @@ import zarr from zarr.codecs import BZ2, AsType, Blosc, Zlib +from zarr.convenience import consolidate_metadata from zarr.errors import MetadataError from zarr.hierarchy import group from zarr.meta import ZARR_FORMAT, decode_array_metadata @@ -1009,6 +1010,10 @@ def test_create(self): assert "data" in os.listdir(path1) assert ".zgroup" in os.listdir(path1) + # consolidated metadata (GH#915) + consolidate_metadata("file://" + path1) + assert ".zmetadata" in os.listdir(path1) + g = zarr.open_group("simplecache::file://" + path1, mode='r', storage_options={"cache_storage": path2, "same_names": True}) From 22ded1d656cb2034180cf8a3f23f82b51f79d8b4 Mon Sep 17 00:00:00 2001 From: orenwatson Date: Thu, 6 Jan 2022 06:51:34 -0500 Subject: [PATCH 0068/1078] Fix bug where the checksum of zipfiles is wrong (#930) * Fix bug where the checksum of zipfiles is wrong This bug is caused by incorrect length being written to the file, because Zipfile thinks the len() of the passed object is the length in bytes, but it was passing a ndarray, whose len() is the number of rows. The fix is to convert to bytes before passing to zipfile.writestr() * add test and use a view instead of a copy Co-authored-by: jakirkham --- zarr/storage.py | 2 +- zarr/tests/test_storage.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/zarr/storage.py b/zarr/storage.py index 1dc2cf32b3..7f572d35ff 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -1551,7 +1551,7 @@ def __getitem__(self, key): def __setitem__(self, key, value): if self.mode == 'r': raise ReadOnlyError() - value = ensure_contiguous_ndarray(value) + value = ensure_contiguous_ndarray(value).view("u1") with self.mutex: # writestr(key, value) writes with default permissions from # zipfile (600) that are too restrictive, build ZipInfo for diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index eecf7cd36c..e85973b524 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1554,6 +1554,13 @@ def test_permissions(self): assert perm == '0o40775' z.close() + def test_store_and_retrieve_ndarray(self): + store = ZipStore('data/store.zip') + x = np.array([[1, 2], [3, 4]]) + store['foo'] = x + y = np.frombuffer(store['foo'], dtype=x.dtype).reshape(x.shape) + assert np.array_equiv(y, x) + class TestDBMStore(StoreTests): From 439a4558d521b6a0c3dd9750ae68904aed123937 Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Wed, 12 Jan 2022 20:37:28 +0100 Subject: [PATCH 0069/1078] minor doc fix (#937) --- zarr/creation.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/zarr/creation.py b/zarr/creation.py index 64c5666adb..d0dad231c4 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -68,7 +68,9 @@ def create(shape, chunks=True, dtype=None, compressor='default', A codec to encode object arrays, only needed if dtype=object. dimension_separator : {'.', '/'}, optional Separator placed between the dimensions of a chunk. + .. versionadded:: 2.8 + write_empty_chunks : bool, optional If True (default), all chunks will be stored regardless of their contents. If False, each chunk is compared to the array's fill From 3c4d1a5ceabedb90bd5865eaf4046d8cb7cae947 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 08:14:16 +0100 Subject: [PATCH 0070/1078] Bump fsspec from 2021.11.1 to 2022.1.0 (#935) * Bump fsspec from 2021.11.1 to 2022.1.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2021.11.1 to 2022.1.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2021.11.1...2022.01.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Also manually bump s3fs Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index ec732cd64b..ab1405beb2 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,6 +19,6 @@ pytest-cov==3.0.0 pytest-doctestplus==0.11.2 pytest-timeout==2.0.2 h5py==3.6.0 -fsspec==2021.11.1 -s3fs==2021.11.1 +fsspec==2022.1.0 +s3fs==2022.1.0 moto[server]>=1.3.14 From 90227efd3400527788b4ddee23afbe6106b1abb3 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Fri, 14 Jan 2022 23:27:34 -0800 Subject: [PATCH 0071/1078] Upgrade MongoDB in test env (#939) * Bump pymongo from 3.12.1 to 4.0.1 Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 3.12.1 to 4.0.1. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/4.0.1/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/3.12.1...4.0.1) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * try bumping mongo service Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python-package.yml | 2 +- requirements_dev_optional.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 3d53ac43ad..0c44de8b1f 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -32,7 +32,7 @@ jobs: ports: - 6379:6379 mongodb: - image: mongo:3.4.23 + image: mongo:4.4.11 ports: - 27017:27017 steps: diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index ab1405beb2..6b2d9bf510 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -10,7 +10,7 @@ azure-storage-blob==12.8.1 # pyup: ignore redis==4.1.0 types-redis types-setuptools -pymongo==3.12.1 +pymongo==4.0.1 # optional test requirements tox==3.24.5 coverage From f3e275918d91eb52dbbced613033b58ce2b60807 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jan 2022 09:14:28 +0100 Subject: [PATCH 0072/1078] Bump numpy from 1.22.0 to 1.22.1 (#940) Bumps [numpy](https://github.com/numpy/numpy) from 1.22.0 to 1.22.1. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github.com/numpy/numpy/compare/v1.22.0...v1.22.1) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 66bebb955e..63c00071bd 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.22.0 +numpy==1.22.1 From d6c0d0dc12525b1ece0962193e6125433f635b31 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 10:40:44 +0100 Subject: [PATCH 0073/1078] Bump setuptools-scm from 6.3.2 to 6.4.0 (#944) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 6.3.2 to 6.4.0. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v6.3.2...v6.4.0) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index a1d50c8247..64a8f8a3bc 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.16.3 numcodecs==0.9.1 msgpack-python==0.5.6 -setuptools-scm==6.3.2 +setuptools-scm==6.4.0 # test requirements pytest==6.2.5 From e8fd1b66b481bafa8e721d478264f8f753e8b0ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 10:40:58 +0100 Subject: [PATCH 0074/1078] Bump fasteners from 0.16.3 to 0.17.2 (#943) Bumps [fasteners](https://github.com/harlowja/fasteners) from 0.16.3 to 0.17.2. - [Release notes](https://github.com/harlowja/fasteners/releases) - [Changelog](https://github.com/harlowja/fasteners/blob/master/CHANGELOG) - [Commits](https://github.com/harlowja/fasteners/compare/0.16.3...0.17.2) --- updated-dependencies: - dependency-name: fasteners dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 64a8f8a3bc..f1c64ebc04 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,6 +1,6 @@ # library requirements asciitree==0.3.3 -fasteners==0.16.3 +fasteners==0.17.2 numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.0 From 076933e48a00cd4ce51f52e446718ca4881d5481 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 10:41:18 +0100 Subject: [PATCH 0075/1078] Bump redis from 4.1.0 to 4.1.1 (#942) Bumps [redis](https://github.com/redis/redis-py) from 4.1.0 to 4.1.1. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.1.0...v4.1.1) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 6b2d9bf510..a0692122eb 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.8.1 # pyup: ignore -redis==4.1.0 +redis==4.1.1 types-redis types-setuptools pymongo==4.0.1 From ea07537e1ea9649578ecafaba93fa8229bf10edd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Jan 2022 08:44:44 +0100 Subject: [PATCH 0076/1078] Bump pytest-timeout from 2.0.2 to 2.1.0 (#945) Bumps [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) from 2.0.2 to 2.1.0. - [Release notes](https://github.com/pytest-dev/pytest-timeout/releases) - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/2.0.2...2.1.0) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index a0692122eb..b7cfcc2235 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -17,7 +17,7 @@ coverage flake8==4.0.1 pytest-cov==3.0.0 pytest-doctestplus==0.11.2 -pytest-timeout==2.0.2 +pytest-timeout==2.1.0 h5py==3.6.0 fsspec==2022.1.0 s3fs==2022.1.0 From b62d61c6305affab23b85c264d6b0007c7c5aba9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Jan 2022 08:44:56 +0100 Subject: [PATCH 0077/1078] Bump setuptools-scm from 6.4.0 to 6.4.1 (#946) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 6.4.0 to 6.4.1. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v6.4.0...v6.4.1) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index f1c64ebc04..80615e0f27 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.17.2 numcodecs==0.9.1 msgpack-python==0.5.6 -setuptools-scm==6.4.0 +setuptools-scm==6.4.1 # test requirements pytest==6.2.5 From 21739ad12ba807b28534c814cd6b35d05775a575 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Jan 2022 23:04:37 -0800 Subject: [PATCH 0078/1078] Bump setuptools-scm from 6.4.1 to 6.4.2 (#948) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 6.4.1 to 6.4.2. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v6.4.1...v6.4.2) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 80615e0f27..9f7c8d2869 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.17.2 numcodecs==0.9.1 msgpack-python==0.5.6 -setuptools-scm==6.4.1 +setuptools-scm==6.4.2 # test requirements pytest==6.2.5 From e15001bbe756520df70a43e025537f1eef6e7228 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 26 Jan 2022 09:39:58 +0100 Subject: [PATCH 0079/1078] Bump fasteners from 0.17.2 to 0.17.3 (#949) Bumps [fasteners](https://github.com/harlowja/fasteners) from 0.17.2 to 0.17.3. - [Release notes](https://github.com/harlowja/fasteners/releases) - [Changelog](https://github.com/harlowja/fasteners/blob/master/CHANGELOG) - [Commits](https://github.com/harlowja/fasteners/compare/0.17.2...0.17.3) --- updated-dependencies: - dependency-name: fasteners dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 9f7c8d2869..481e51f8fb 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,6 +1,6 @@ # library requirements asciitree==0.3.3 -fasteners==0.17.2 +fasteners==0.17.3 numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.2 From 24dae272ab136878dca2559184fe95dbfa2875c2 Mon Sep 17 00:00:00 2001 From: Gregory Lee Date: Fri, 4 Feb 2022 05:01:05 -0500 Subject: [PATCH 0080/1078] add release note regarding introduction of BaseStore and KVStore (#950) --- docs/release.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 630f9de833..d3c9072cf5 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,6 +6,9 @@ Release notes Unreleased ---------- +This release of Zarr Python introduces a new ``BaseStore`` class that all provided store classes implemented in Zarr Python now inherit from. This is done as part of refactoring to enable future support of the Zarr version 3 spec. Existing third-party stores that are a MutableMapping (e.g. dict) can be converted to a new-style key/value store inheriting from ``BaseStore`` by passing them as the argument to the new ``zarr.storage.KVStore`` class. For backwards compatibility, various higher-level array creation and convenience functions still accept plain Python dicts or other mutable mappings for the ``store`` argument, but will internally convert these to a ``KVStore``. + + Enhancements ~~~~~~~~~~~~ From f461eb78fbb88187582cd9123d6ec7622d9abd26 Mon Sep 17 00:00:00 2001 From: Juan Nunez-Iglesias Date: Fri, 4 Feb 2022 21:07:53 +1100 Subject: [PATCH 0081/1078] Set write_empty_chunks to default to False (#853) * Set write_empty_chunks to default to False * Add release entry for write_empty_chunks default * add Empty chunks section to tutorial.rst * add benchmarky example * proper formatting of code block * Fix abstore deprecated strings * Also catch ValueError in all_equal The call to `np.any(array)` in zarr.util.all_equal triggers the following ValueError: ``` > return ufunc.reduce(obj, axis, dtype, out, **passkwargs) E ValueError: invalid literal for int() with base 10: 'baz' ``` Extending the catch block allows test_array_with_categorize_filter to pass, but it's unclear if this points to a deeper issue. * Add --timeout argument to all uses of pytest * Pin fasteners to 0.16.3 (see #952) Co-authored-by: Davis Vann Bennett Co-authored-by: Josh Moore Co-authored-by: jmoore --- .github/workflows/minimal.yml | 4 +- .github/workflows/python-package.yml | 2 +- docs/release.rst | 3 ++ docs/tutorial.rst | 69 ++++++++++++++++++++++++++++ environment.yml | 3 +- requirements_dev_minimal.txt | 2 +- setup.py | 2 +- tox.ini | 1 + windows_conda_dev.txt | 2 +- zarr/_storage/absstore.py | 10 ++++ zarr/core.py | 16 +++---- zarr/creation.py | 35 +++++++------- zarr/util.py | 2 +- 13 files changed, 119 insertions(+), 32 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index 90c9a59585..ff68783d33 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -25,11 +25,11 @@ jobs: run: | conda activate minimal python -m pip install . - pytest -svx + pytest -svx --timeout=300 - name: Fixture generation shell: "bash -l {0}" run: | conda activate minimal rm -rf fixture/ - pytest -svx zarr/tests/test_dim_separator.py zarr/tests/test_storage.py + pytest -svx --timeout=300 zarr/tests/test_dim_separator.py zarr/tests/test_storage.py # This simulates fixture-less tests in conda and debian packaging diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 0c44de8b1f..e0d404b1a0 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -73,7 +73,7 @@ jobs: conda activate zarr-env mkdir ~/blob_emulator azurite -l ~/blob_emulator --debug debug.log 2>&1 > stdouterr.log & - pytest --cov=zarr --cov-config=.coveragerc --doctest-plus --cov-report xml --cov=./ + pytest --cov=zarr --cov-config=.coveragerc --doctest-plus --cov-report xml --cov=./ --timeout=300 - uses: codecov/codecov-action@v1 with: #token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos diff --git a/docs/release.rst b/docs/release.rst index d3c9072cf5..698261fabe 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -12,6 +12,9 @@ This release of Zarr Python introduces a new ``BaseStore`` class that all provid Enhancements ~~~~~~~~~~~~ +* write_empty_chunks defaults to False. + By :user:`Juan Nunez-Iglesias `; :issue:`853`. + * Allow to assign array ``fill_values`` and update metadata accordingly. :issue:`662` * array indexing with [] (getitem and setitem) now supports fancy indexing. diff --git a/docs/tutorial.rst b/docs/tutorial.rst index a292892ebd..906d5d9f08 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1302,6 +1302,75 @@ bytes within chunks of an array may improve the compression ratio, depending on the structure of the data, the compression algorithm used, and which compression filters (e.g., byte-shuffle) have been applied. +.. _tutorial_chunks_empty_chunks: + +Empty chunks +~~~~~~~~~~~~ + +As of version 2.11, it is possible to configure how Zarr handles the storage of +chunks that are "empty" (i.e., every element in the chunk is equal to the array's fill value). +When creating an array with ``write_empty_chunks=False`` (the default), +Zarr will check whether a chunk is empty before compression and storage. If a chunk is empty, +then Zarr does not store it, and instead deletes the chunk from storage +if the chunk had been previously stored. + +This optimization prevents storing redundant objects and can speed up reads, but the cost is +added computation during array writes, since the contents of +each chunk must be compared to the fill value, and these advantages are contingent on the content of the array. +If you know that your data will form chunks that are almost always non-empty, then there is no advantage to the optimization described above. +In this case, creating an array with ``write_empty_chunks=True`` will instruct Zarr to write every chunk without checking for emptiness. + +The following example illustrates the effect of the ``write_empty_chunks`` flag on +the time required to write an array with different values.:: + + >>> import zarr + >>> import numpy as np + >>> import time + >>> from tempfile import TemporaryDirectory + >>> def timed_write(write_empty_chunks): + ... """ + ... Measure the time required and number of objects created when writing + ... to a Zarr array with random ints or fill value. + ... """ + ... chunks = (8192,) + ... shape = (chunks[0] * 1024,) + ... data = np.random.randint(0, 255, shape) + ... dtype = 'uint8' + ... + ... with TemporaryDirectory() as store: + ... arr = zarr.open(store, + ... shape=shape, + ... chunks=chunks, + ... dtype=dtype, + ... write_empty_chunks=write_empty_chunks, + ... fill_value=0, + ... mode='w') + ... # initialize all chunks + ... arr[:] = 100 + ... result = [] + ... for value in (data, arr.fill_value): + ... start = time.time() + ... arr[:] = value + ... elapsed = time.time() - start + ... result.append((elapsed, arr.nchunks_initialized)) + ... + ... return result + >>> for write_empty_chunks in (True, False): + ... full, empty = timed_write(write_empty_chunks) + ... print(f'\nwrite_empty_chunks={write_empty_chunks}:\n\tRandom Data: {full[0]:.4f}s, {full[1]} objects stored\n\t Empty Data: {empty[0]:.4f}s, {empty[1]} objects stored\n') + + write_empty_chunks=True: + Random Data: 0.1252s, 1024 objects stored + Empty Data: 0.1060s, 1024 objects stored + + + write_empty_chunks=False: + Random Data: 0.1359s, 1024 objects stored + Empty Data: 0.0301s, 0 objects stored + +In this example, writing random data is slightly slower with ``write_empty_chunks=True``, +but writing empty data is substantially faster and generates far fewer objects in storage. + .. _tutorial_rechunking: Changing chunk shapes (rechunking) diff --git a/environment.yml b/environment.yml index b47dd9238b..066319d750 100644 --- a/environment.yml +++ b/environment.yml @@ -8,6 +8,7 @@ dependencies: - pip - pip: - asciitree - - fasteners + - fasteners == 0.16.3 - pytest + - pytest-timeout - setuptools_scm diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 481e51f8fb..0395c8dd79 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,6 +1,6 @@ # library requirements asciitree==0.3.3 -fasteners==0.17.3 +fasteners==0.16.3 numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.2 diff --git a/setup.py b/setup.py index a68c77a63f..4bc6943c1d 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ dependencies = [ 'asciitree', 'numpy>=1.7', - 'fasteners', + 'fasteners==0.16.3', 'numcodecs>=0.6.4', ] diff --git a/tox.ini b/tox.ini index 9e0212cc5e..3adc147dac 100644 --- a/tox.ini +++ b/tox.ini @@ -10,6 +10,7 @@ envlist = py37-npy{117,latest}, py38, py39, docs install_command = pip install --no-binary=numcodecs {opts} {packages} setenv = PYTHONHASHSEED = 42 + PYTEST_TIMEOUT = {env:PYTEST_TIMEOUT:300} passenv = ZARR_TEST_ABS ZARR_TEST_MONGO diff --git a/windows_conda_dev.txt b/windows_conda_dev.txt index 576674827d..8bdf5fb3da 100644 --- a/windows_conda_dev.txt +++ b/windows_conda_dev.txt @@ -1,5 +1,5 @@ coverage -fasteners +fasteners==0.16.3 flake8 monotonic msgpack-python diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index f23b406e0b..98ac6328b1 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -17,26 +17,36 @@ class ABSStore(Store): ---------- container : string The name of the ABS container to use. + .. deprecated:: Use ``client`` instead. + prefix : string Location of the "directory" to use as the root of the storage hierarchy within the container. + account_name : string The Azure blob storage account name. + .. deprecated:: 2.8.3 Use ``client`` instead. + account_key : string The Azure blob storage account access key. + .. deprecated:: 2.8.3 Use ``client`` instead. + blob_service_kwargs : dictionary Extra arguments to be passed into the azure blob client, for e.g. when using the emulator, pass in blob_service_kwargs={'is_emulated': True}. + .. deprecated:: 2.8.3 Use ``client`` instead. + dimension_separator : {'.', '/'}, optional Separator placed between the dimensions of a chunk. + client : azure.storage.blob.ContainerClient, optional And ``azure.storage.blob.ContainerClient`` to connect with. See `here `_ # noqa diff --git a/zarr/core.py b/zarr/core.py index 6f6b468e3b..e0fe4eb0e9 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -81,13 +81,13 @@ class Array: .. versionadded:: 2.7 write_empty_chunks : bool, optional - If True (default), all chunks will be stored regardless of their - contents. If False, each chunk is compared to the array's fill - value prior to storing. If a chunk is uniformly equal to the fill - value, then that chunk is not be stored, and the store entry for - that chunk's key is deleted. This setting enables sparser storage, - as only chunks with non-fill-value data are stored, at the expense - of overhead associated with checking the data of each chunk. + If True, all chunks will be stored regardless of their contents. If + False (default), each chunk is compared to the array's fill value prior + to storing. If a chunk is uniformly equal to the fill value, then that + chunk is not be stored, and the store entry for that chunk's key is + deleted. This setting enables sparser storage, as only chunks with + non-fill-value data are stored, at the expense of overhead associated + with checking the data of each chunk. .. versionadded:: 2.11 @@ -154,7 +154,7 @@ def __init__( cache_metadata=True, cache_attrs=True, partial_decompress=False, - write_empty_chunks=True, + write_empty_chunks=False, ): # N.B., expect at this point store is fully initialized with all # configuration metadata fully specified and normalized diff --git a/zarr/creation.py b/zarr/creation.py index d0dad231c4..7e7adcb157 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -19,7 +19,8 @@ def create(shape, chunks=True, dtype=None, compressor='default', fill_value=0, order='C', store=None, synchronizer=None, overwrite=False, path=None, chunk_store=None, filters=None, cache_metadata=True, cache_attrs=True, read_only=False, - object_codec=None, dimension_separator=None, write_empty_chunks=True, **kwargs): + object_codec=None, dimension_separator=None, + write_empty_chunks=False, **kwargs): """Create an array. Parameters @@ -72,13 +73,14 @@ def create(shape, chunks=True, dtype=None, compressor='default', .. versionadded:: 2.8 write_empty_chunks : bool, optional - If True (default), all chunks will be stored regardless of their - contents. If False, each chunk is compared to the array's fill - value prior to storing. If a chunk is uniformly equal to the fill - value, then that chunk is not be stored, and the store entry for - that chunk's key is deleted. This setting enables sparser storage, - as only chunks with non-fill-value data are stored, at the expense - of overhead associated with checking the data of each chunk. + If True, all chunks will be stored regardless of their contents. If + False (default), each chunk is compared to the array's fill value prior + to storing. If a chunk is uniformly equal to the fill value, then that + chunk is not be stored, and the store entry for that chunk's key is + deleted. This setting enables sparser storage, as only chunks with + non-fill-value data are stored, at the expense of overhead associated + with checking the data of each chunk. + .. versionadded:: 2.11 Returns @@ -389,7 +391,7 @@ def open_array( chunk_store=None, storage_options=None, partial_decompress=False, - write_empty_chunks=True, + write_empty_chunks=False, **kwargs ): """Open an array using file-mode-like semantics. @@ -445,13 +447,14 @@ def open_array( is Blosc, when getting data from the array chunks will be partially read and decompressed when possible. write_empty_chunks : bool, optional - If True (default), all chunks will be stored regardless of their - contents. If False, each chunk is compared to the array's fill - value prior to storing. If a chunk is uniformly equal to the fill - value, then that chunk is not be stored, and the store entry for - that chunk's key is deleted. This setting enables sparser storage, - as only chunks with non-fill-value data are stored, at the expense - of overhead associated with checking the data of each chunk. + If True, all chunks will be stored regardless of their contents. If + False (default), each chunk is compared to the array's fill value prior + to storing. If a chunk is uniformly equal to the fill value, then that + chunk is not be stored, and the store entry for that chunk's key is + deleted. This setting enables sparser storage, as only chunks with + non-fill-value data are stored, at the expense of overhead associated + with checking the data of each chunk. + .. versionadded:: 2.11 Returns ------- diff --git a/zarr/util.py b/zarr/util.py index 04d350a68d..9f5f04f525 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -670,7 +670,7 @@ def all_equal(value: Any, array: Any): # optimized to return on the first truthy value in `array`. try: return not np.any(array) - except TypeError: # pragma: no cover + except (TypeError, ValueError): # pragma: no cover pass if np.issubdtype(array.dtype, np.object_): # we have to flatten the result of np.equal to handle outputs like From 1c9e905ac81546fc2e5a80dfdf78a3609830f495 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Feb 2022 12:00:50 +0100 Subject: [PATCH 0082/1078] Bump numpy from 1.22.1 to 1.22.2 (#955) Bumps [numpy](https://github.com/numpy/numpy) from 1.22.1 to 1.22.2. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github.com/numpy/numpy/compare/v1.22.1...v1.22.2) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 63c00071bd..bf26fa0530 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.22.1 +numpy==1.22.2 From 181ccc5609c7700484d067f44e39951797863ceb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Feb 2022 12:20:52 +0100 Subject: [PATCH 0083/1078] Bump redis from 4.1.1 to 4.1.2 (#953) Bumps [redis](https://github.com/redis/redis-py) from 4.1.1 to 4.1.2. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.1.1...v4.1.2) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index b7cfcc2235..0609af2bca 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.8.1 # pyup: ignore -redis==4.1.1 +redis==4.1.2 types-redis types-setuptools pymongo==4.0.1 From bad4d589437c64e137d0446bab465a819e88ab89 Mon Sep 17 00:00:00 2001 From: Ray Bell Date: Fri, 4 Feb 2022 11:34:18 -0500 Subject: [PATCH 0084/1078] DOC: update pytest line (#923) * DOC: update pytest line * more python -m --- docs/contributing.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index c1401f9381..adadb323dc 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -97,7 +97,7 @@ the repository, you can do something like the following:: To verify that your development environment is working, you can run the unit tests:: - $ pytest -v zarr + $ python -m pytest -v zarr Creating a branch ~~~~~~~~~~~~~~~~~ @@ -144,7 +144,7 @@ spec. The simplest way to run the unit tests is to activate your development environment (see `creating a development environment`_ above) and invoke:: - $ pytest -v zarr + $ python -m pytest -v zarr Some tests require optional dependencies to be installed, otherwise the tests will be skipped. To install all optional dependencies, run:: @@ -154,7 +154,7 @@ the tests will be skipped. To install all optional dependencies, run:: To also run the doctests within docstrings (requires optional dependencies to be installed), run:: - $ pytest -v --doctest-plus zarr + $ python -m pytest -v --doctest-plus zarr To run the doctests within the tutorial and storage spec (requires optional dependencies to be installed), run:: @@ -186,7 +186,7 @@ All code must conform to the PEP8 standard. Regarding line length, lines up to 1 characters are allowed, although please try to keep under 90 wherever possible. Conformance can be checked by running:: - $ flake8 --max-line-length=100 zarr + $ python -m flake8 --max-line-length=100 zarr Test coverage ~~~~~~~~~~~~~ @@ -212,7 +212,7 @@ should run and pass as doctests under Python 3.8. To run doctests, activate your development environment, install optional requirements, and run:: - $ pytest -v --doctest-plus zarr + $ python -m pytest -v --doctest-plus zarr Zarr uses Sphinx for documentation, hosted on readthedocs.org. Documentation is written in the RestructuredText markup language (.rst files) in the ``docs`` folder. From 0da6f5ed74a1a19535ec79885a4678a4216e2154 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 11:15:52 +0100 Subject: [PATCH 0085/1078] Bump pytest from 6.2.5 to 7.0.0 (#957) Bumps [pytest](https://github.com/pytest-dev/pytest) from 6.2.5 to 7.0.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/6.2.5...7.0.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 0395c8dd79..9ee19f68d1 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.2 # test requirements -pytest==6.2.5 +pytest==7.0.0 From 2813231194caa37babf9d7a5c457661a66b6ff9f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 11:44:25 +0100 Subject: [PATCH 0086/1078] Bump fasteners from 0.16.3 to 0.17.3 (#958) * Bump fasteners from 0.16.3 to 0.17.3 Bumps [fasteners](https://github.com/harlowja/fasteners) from 0.16.3 to 0.17.3. - [Release notes](https://github.com/harlowja/fasteners/releases) - [Changelog](https://github.com/harlowja/fasteners/blob/master/CHANGELOG) - [Commits](https://github.com/harlowja/fasteners/compare/0.16.3...0.17.3) --- updated-dependencies: - dependency-name: fasteners dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Remove fasteners pin from setup.py * Remove fasteners pin from windows_conda_dev.txt * Remove fasteners pin from environment.yml Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore Co-authored-by: jmoore --- environment.yml | 2 +- requirements_dev_minimal.txt | 2 +- setup.py | 2 +- windows_conda_dev.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/environment.yml b/environment.yml index 066319d750..f601026d47 100644 --- a/environment.yml +++ b/environment.yml @@ -8,7 +8,7 @@ dependencies: - pip - pip: - asciitree - - fasteners == 0.16.3 + - fasteners - pytest - pytest-timeout - setuptools_scm diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 9ee19f68d1..bc3fd1f2c4 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,6 +1,6 @@ # library requirements asciitree==0.3.3 -fasteners==0.16.3 +fasteners==0.17.3 numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.2 diff --git a/setup.py b/setup.py index 4bc6943c1d..a68c77a63f 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ dependencies = [ 'asciitree', 'numpy>=1.7', - 'fasteners==0.16.3', + 'fasteners', 'numcodecs>=0.6.4', ] diff --git a/windows_conda_dev.txt b/windows_conda_dev.txt index 8bdf5fb3da..576674827d 100644 --- a/windows_conda_dev.txt +++ b/windows_conda_dev.txt @@ -1,5 +1,5 @@ coverage -fasteners==0.16.3 +fasteners flake8 monotonic msgpack-python From 2200fc71d1447c3d048fcb0fd2e6d677224c4363 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Mon, 7 Feb 2022 15:01:47 +0100 Subject: [PATCH 0087/1078] Release notes 2.11.0 (#959) * Release notes 2.11.0 * Rendering improvements for changelog * Remove duplicated entries from 2.10.3 * Apply suggestions from code review Co-authored-by: jakirkham Co-authored-by: jakirkham --- .github/workflows/minimal.yml | 2 +- docs/release.rst | 121 +++++++++++++++++++++++++++++----- 2 files changed, 107 insertions(+), 16 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index ff68783d33..eb6ebd5d25 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -9,7 +9,7 @@ on: branches: [ master ] jobs: - build: + minimum_build: runs-on: ubuntu-latest steps: diff --git a/docs/release.rst b/docs/release.rst index 698261fabe..16c884efc7 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,28 +6,119 @@ Release notes Unreleased ---------- -This release of Zarr Python introduces a new ``BaseStore`` class that all provided store classes implemented in Zarr Python now inherit from. This is done as part of refactoring to enable future support of the Zarr version 3 spec. Existing third-party stores that are a MutableMapping (e.g. dict) can be converted to a new-style key/value store inheriting from ``BaseStore`` by passing them as the argument to the new ``zarr.storage.KVStore`` class. For backwards compatibility, various higher-level array creation and convenience functions still accept plain Python dicts or other mutable mappings for the ``store`` argument, but will internally convert these to a ``KVStore``. +.. _release_2.11.0: +2.11.0 +------ Enhancements ~~~~~~~~~~~~ -* write_empty_chunks defaults to False. - By :user:`Juan Nunez-Iglesias `; :issue:`853`. +* **Sparse changes with performance impact!** One of the advantages of the Zarr + format is that it is sparse, which means that chunks with no data (more + precisely, with data equal to the fill value, which is usually 0) don't need + to be written to disk at all. They will simply be assumed to be empty at read + time. However, until this release, the Zarr library would write these empty + chunks to disk anyway. This changes in this version: a small performance + penalty at write time leads to significant speedups at read time and in + filesystem operations in the case of sparse arrays. To revert to the old + behavior, pass the argument ``write_empty_chunks=True`` to the array creation + function. By :user:`Juan Nunez-Iglesias `; :issue:`853` and + :user:`Davis Bennett `; :issue:`738`. + +* **Fancy indexing**. Zarr arrays now support NumPy-style fancy indexing with + arrays of integer coordinates. This is equivalent to using zarr.Array.vindex. + Mixing slices and integer arrays is not supported. + By :user:`Juan Nunez-Iglesias `; :issue:`725`. -* Allow to assign array ``fill_values`` and update metadata accordingly. :issue:`662` +* **New base class**. This release of Zarr Python introduces a new + ``BaseStore`` class that all provided store classes implemented in Zarr + Python now inherit from. This is done as part of refactoring to enable future + support of the Zarr version 3 spec. Existing third-party stores that are a + MutableMapping (e.g. dict) can be converted to a new-style key/value store + inheriting from ``BaseStore`` by passing them as the argument to the new + ``zarr.storage.KVStore`` class. For backwards compatibility, various + higher-level array creation and convenience functions still accept plain + Python dicts or other mutable mappings for the ``store`` argument, but will + internally convert these to a ``KVStore``. + By :user:`Greggory Lee `; :issue:`839`, :issue:`789`, and :issue:`950`. -* array indexing with [] (getitem and setitem) now supports fancy indexing. - By :user:`Juan Nunez-Iglesias `; :issue:`725`. +* Allow to assign array ``fill_values`` and update metadata accordingly. + By :user:`Ryan Abernathey `, :issue:`662`. + +* Allow to update array fill_values + By :user:`Matthias Bussonnier ` :issue:`665`. + +Bug fixes +~~~~~~~~~ + +* Fix bug where the checksum of zipfiles is wrong + By :user:`Oren Watson ` :issue:`930`. + +* Fix consolidate_metadata with FSStore. + By :user:`Joe Hamman ` :issue:`916`. + +* Unguarded next inside generator. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`889`. + +Documentation +~~~~~~~~~~~~~ + +* Update docs creation of dev env. + By :user:`Ray Bell ` :issue:`921`. + +* Update docs to use ``python -m pytest``. + By :user:`Ray Bell ` :issue:`923`. + +* Fix versionadded tag in zarr.core.Array docstring. + By :user:`Juan Nunez-Iglesias ` :issue:`852`. + +* Doctest seem to be stricter now, updating tostring() to tobytes(). + By :user:`John Kirkham ` :issue:`907`. + +* Minor doc fix. + By :user:`Mads R. B. Kristensen ` :issue:`937`. + +Maintenance +~~~~~~~~~~~ + +* Upgrade MongoDB in test env. + By :user:`Joe Hamman ` :issue:`939`. + +* Pass dimension_separator on fixture generation. + By :user:`Josh Moore ` :issue:`858`. + +* Activate Python 3.9 in GitHub Actions. + By :user:`Josh Moore ` :issue:`859`. + +* Drop shortcut ``fsspec[s3]`` for dependency. + By :user:`Josh Moore ` :issue:`920`. + +* and a swath of code-linting improvements by :user:`Dimitri Papadopoulos Orfanos `: + + - Unnecessary comprehension (:issue:`899`) + + - Unnecessary ``None`` provided as default (:issue:`900`) + + - use an if ``expression`` instead of `and`/`or` (:issue:`888`) + + - Remove unnecessary literal (:issue:`891`) + + - Decorate a few method with `@staticmethod` (:issue:`885`) + + - Drop unneeded ``return`` (:issue:`884`) + + - Drop explicit ``object`` inheritance from ``class``es (:issue:`886`) + + - Unnecessary comprehension (:issue:`883`) + + - Codespell configuration (:issue:`882`) -* write_empty_chunks=False deletes chunks consisting of only fill_value. - By :user:`Davis Bennett `; :issue:`738`. + - Fix typos found by codespell (:issue:`880`) -* Move metadata handling to a class. - By :user:`Greggory Lee `; :issue:`839`. + - Proper C-style formatting for integer (:issue:`913`) -* Create a Base store class for Zarr Store. - By :user:`Greggory Lee `; :issue:`789`. + - Add LGTM.com / DeepSource.io configuration files (:issue:`909`) .. _release_2.10.3: @@ -144,7 +235,7 @@ Maintenance ~~~~~~~~~~~ * Correct conda-forge deployment of Zarr. - By :user:`Josh Moore `; :issue:`XXX`. + By :user:`Josh Moore `; :issue:`819`. .. _release_2.9.0: @@ -346,8 +437,8 @@ This release of Zarr Python is the first release of Zarr to not support Python 3 `_ for some performance analysis showing order of magnitude faster response in some benchmark. -See `this link ` for the full list of closed and -merged PR tagged with the 2.6 milestone. +See `this link `_ +for the full list of closed and merged PR tagged with the 2.6 milestone. * Add ability to partially read and decompress arrays, see :issue:`667`. It is only available to chunks stored using fsspec and using Blosc as a compressor. From a81de6bbcb9ece348b3b811d9b6d56648685579b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Feb 2022 08:20:21 +0100 Subject: [PATCH 0088/1078] Bump redis from 4.1.2 to 4.1.3 (#960) Bumps [redis](https://github.com/redis/redis-py) from 4.1.2 to 4.1.3. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.1.2...v4.1.3) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0609af2bca..0f9f6574df 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.8.1 # pyup: ignore -redis==4.1.2 +redis==4.1.3 types-redis types-setuptools pymongo==4.0.1 From ee4c59308199056d13e202941c4366212f938ce2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Feb 2022 15:53:11 -0800 Subject: [PATCH 0089/1078] Bump redis from 4.1.3 to 4.1.4 (#968) Bumps [redis](https://github.com/redis/redis-py) from 4.1.3 to 4.1.4. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.1.3...v4.1.4) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0f9f6574df..874cf66343 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.8.1 # pyup: ignore -redis==4.1.3 +redis==4.1.4 types-redis types-setuptools pymongo==4.0.1 From ebbeb5d739fbccbab1d045b129485663ff0e697c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Feb 2022 15:53:36 -0800 Subject: [PATCH 0090/1078] Bump pytest from 7.0.0 to 7.0.1 (#964) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.0.0 to 7.0.1. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.0.0...7.0.1) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index bc3fd1f2c4..85cf2f82cf 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.2 # test requirements -pytest==7.0.0 +pytest==7.0.1 From 79307fc71a3fa1549577045028f67db9609a52dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Feb 2022 11:53:02 -0600 Subject: [PATCH 0091/1078] Bump fsspec from 2022.1.0 to 2022.2.0 (#969) * Bump fsspec from 2022.1.0 to 2022.2.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.1.0 to 2022.2.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.01.0...2022.02.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Also bump s3fs Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 874cf66343..80c5bfa464 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,6 +19,6 @@ pytest-cov==3.0.0 pytest-doctestplus==0.11.2 pytest-timeout==2.1.0 h5py==3.6.0 -fsspec==2022.1.0 -s3fs==2022.1.0 +fsspec==2022.2.0 +s3fs==2022.2.0 moto[server]>=1.3.14 From 84d8f7cf6a3f8f398b272761f01b02bdebcbf9df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Mar 2022 10:48:56 -0600 Subject: [PATCH 0092/1078] Bump pytest-doctestplus from 0.11.2 to 0.12.0 (#973) Bumps [pytest-doctestplus](https://github.com/astropy/pytest-doctestplus) from 0.11.2 to 0.12.0. - [Release notes](https://github.com/astropy/pytest-doctestplus/releases) - [Changelog](https://github.com/astropy/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/astropy/pytest-doctestplus/compare/v0.11.2...v0.12.0) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 80c5bfa464..ab184b543f 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -16,7 +16,7 @@ tox==3.24.5 coverage flake8==4.0.1 pytest-cov==3.0.0 -pytest-doctestplus==0.11.2 +pytest-doctestplus==0.12.0 pytest-timeout==2.1.0 h5py==3.6.0 fsspec==2022.2.0 From 999593961a305ac7b15616f9b7df1826f1acf8f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20K=C3=B6lling?= Date: Fri, 4 Mar 2022 18:10:00 +0100 Subject: [PATCH 0093/1078] remove `clobber` from `normalize_store_arg` to enable writes to consolidated FSStore groups (#976) --- docs/release.rst | 8 ++++++++ zarr/convenience.py | 11 +++++------ zarr/creation.py | 8 ++++---- zarr/hierarchy.py | 12 ++++++------ zarr/storage.py | 3 +-- zarr/tests/test_convenience.py | 4 ++++ zarr/tests/test_storage.py | 36 ++++++++++++++++++++++++++++++++++ 7 files changed, 64 insertions(+), 18 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 16c884efc7..c1ca0355bc 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,6 +6,14 @@ Release notes Unreleased ---------- + +Bug fixes +~~~~~~~~~ + +* Removed `clobber` argument from `normalize_store_arg`. This enables to change + data within a opened consolidated group using mode `"r+"` (i.e region write). + By :user:`Tobias Kölling ` :issue:`975`. + .. _release_2.11.0: 2.11.0 diff --git a/zarr/convenience.py b/zarr/convenience.py index 4d07968579..0cb20220f3 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -77,11 +77,10 @@ def open(store: StoreLike = None, mode: str = "a", **kwargs): path = kwargs.get('path') # handle polymorphic store arg - clobber = mode == 'w' # we pass storage options explicitly, since normalize_store_arg might construct # a store if the input is a fsspec-compatible URL _store: BaseStore = normalize_store_arg( - store, clobber=clobber, storage_options=kwargs.pop("storage_options", {}) + store, storage_options=kwargs.pop("storage_options", {}), mode=mode ) path = normalize_storage_path(path) @@ -142,7 +141,7 @@ def save_array(store: StoreLike, arr, **kwargs): """ may_need_closing = _might_close(store) - _store: BaseStore = normalize_store_arg(store, clobber=True) + _store: BaseStore = normalize_store_arg(store, mode="w") try: _create_array(arr, store=_store, overwrite=True, **kwargs) finally: @@ -213,7 +212,7 @@ def save_group(store: StoreLike, *args, **kwargs): raise ValueError('at least one array must be provided') # handle polymorphic store arg may_need_closing = _might_close(store) - _store: BaseStore = normalize_store_arg(store, clobber=True) + _store: BaseStore = normalize_store_arg(store, mode="w") try: grp = _create_group(_store, overwrite=True) for i, arr in enumerate(args): @@ -1117,7 +1116,7 @@ def consolidate_metadata(store: StoreLike, metadata_key=".zmetadata"): open_consolidated """ - store = normalize_store_arg(store, clobber=True) + store = normalize_store_arg(store, mode="w") def is_zarr_key(key): return (key.endswith('.zarray') or key.endswith('.zgroup') or @@ -1179,7 +1178,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** from .storage import ConsolidatedMetadataStore # normalize parameters - store = normalize_store_arg(store, storage_options=kwargs.get("storage_options")) + store = normalize_store_arg(store, storage_options=kwargs.get("storage_options"), mode=mode) if mode not in {'r', 'r+'}: raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}" .format(mode)) diff --git a/zarr/creation.py b/zarr/creation.py index 7e7adcb157..9d6902a6e3 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -490,11 +490,11 @@ def open_array( # a : read/write if exists, create otherwise (default) # handle polymorphic store arg - clobber = (mode == 'w') - store = normalize_store_arg(store, clobber=clobber, storage_options=storage_options, mode=mode) + store = normalize_store_arg(store, storage_options=storage_options, mode=mode) if chunk_store is not None: - chunk_store = normalize_store_arg(chunk_store, clobber=clobber, - storage_options=storage_options) + chunk_store = normalize_store_arg(chunk_store, + storage_options=storage_options, + mode=mode) path = normalize_storage_path(path) # API compatibility with h5py diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 763a5f1631..31c9e2a8d2 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -1051,10 +1051,10 @@ def move(self, source, dest): self._write_op(self._move_nosync, source, dest) -def _normalize_store_arg(store, *, clobber=False, storage_options=None, mode=None): +def _normalize_store_arg(store, *, storage_options=None, mode="r"): if store is None: return MemoryStore() - return normalize_store_arg(store, clobber=clobber, + return normalize_store_arg(store, storage_options=storage_options, mode=mode) @@ -1164,13 +1164,13 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N """ # handle polymorphic store arg - clobber = mode != "r" store = _normalize_store_arg( - store, clobber=clobber, storage_options=storage_options, mode=mode + store, storage_options=storage_options, mode=mode ) if chunk_store is not None: - chunk_store = _normalize_store_arg(chunk_store, clobber=clobber, - storage_options=storage_options) + chunk_store = _normalize_store_arg(chunk_store, + storage_options=storage_options, + mode=mode) path = normalize_storage_path(path) # ensure store is initialized diff --git a/zarr/storage.py b/zarr/storage.py index 7f572d35ff..35e1fdb0a2 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -104,13 +104,12 @@ def contains_group(store: StoreLike, path: Path = None) -> bool: return key in store -def normalize_store_arg(store: Any, clobber=False, storage_options=None, mode="w") -> BaseStore: +def normalize_store_arg(store: Any, storage_options=None, mode="r") -> BaseStore: if store is None: return BaseStore._ensure_store(dict()) elif isinstance(store, os.PathLike): store = os.fspath(store) if isinstance(store, str): - mode = mode if clobber else "r" if "://" in store or "::" in store: return FSStore(store, mode=mode, **(storage_options or {})) elif storage_options: diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index e5ccbd494d..a6041b788e 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -175,6 +175,8 @@ def test_consolidate_metadata(): open_consolidated(store, mode='a') with pytest.raises(ValueError): open_consolidated(store, mode='w') + with pytest.raises(ValueError): + open_consolidated(store, mode='w-') # make sure keyword arguments are passed through without error open_consolidated(store, cache_attrs=True, synchronizer=None) @@ -224,6 +226,8 @@ def test_consolidated_with_chunk_store(): open_consolidated(store, mode='a', chunk_store=chunk_store) with pytest.raises(ValueError): open_consolidated(store, mode='w', chunk_store=chunk_store) + with pytest.raises(ValueError): + open_consolidated(store, mode='w-', chunk_store=chunk_store) # make sure keyword arguments are passed through without error open_consolidated(store, cache_attrs=True, synchronizer=None, diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index e85973b524..f39b1d8743 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1021,6 +1021,42 @@ def test_create(self): with pytest.raises(PermissionError): g.data[:] = 1 + @pytest.mark.parametrize('mode,allowed', [("r", False), ("r+", True)]) + def test_modify_consolidated(self, mode, allowed): + import zarr + url = "file://" + tempfile.mkdtemp() + + # create + root = zarr.open_group(url, mode="w") + root.zeros('baz', shape=(10000, 10000), chunks=(1000, 1000), dtype='i4') + zarr.consolidate_metadata(url) + + # reopen and modify + root = zarr.open_consolidated(url, mode=mode) + if allowed: + root["baz"][0, 0] = 7 + + root = zarr.open_consolidated(url, mode="r") + assert root["baz"][0, 0] == 7 + else: + with pytest.raises(zarr.errors.ReadOnlyError): + root["baz"][0, 0] = 7 + + @pytest.mark.parametrize('mode', ["r", "r+"]) + def test_modify_consolidated_metadata_raises(self, mode): + import zarr + url = "file://" + tempfile.mkdtemp() + + # create + root = zarr.open_group(url, mode="w") + root.zeros('baz', shape=(10000, 10000), chunks=(1000, 1000), dtype='i4') + zarr.consolidate_metadata(url) + + # reopen and modify + root = zarr.open_consolidated(url, mode=mode) + with pytest.raises(zarr.errors.ReadOnlyError): + root["baz"].resize(100, 100) + def test_read_only(self): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) From 6b5db73d0790d400b0df3c9ecf04040d07ededa3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 18:41:53 +0100 Subject: [PATCH 0094/1078] Bump pymongo from 4.0.1 to 4.0.2 (#979) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.0.1 to 4.0.2. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/4.0.2/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.0.1...4.0.2) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index ab184b543f..46ad0c04ee 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -10,7 +10,7 @@ azure-storage-blob==12.8.1 # pyup: ignore redis==4.1.4 types-redis types-setuptools -pymongo==4.0.1 +pymongo==4.0.2 # optional test requirements tox==3.24.5 coverage From 0fc6a1ea9b2158bd162cc3a1700737f4e1bdc27e Mon Sep 17 00:00:00 2001 From: Ben Jeffery Date: Mon, 7 Mar 2022 18:27:37 +0000 Subject: [PATCH 0095/1078] Fix indexing for scalar numpy values (#967) (#974) * Fix indexing for scalar numpy values (#967) * Fix linting errors * Remove whitespace from GH editor * Fix `W391 blank line at end of file` Co-authored-by: Josh Moore Co-authored-by: jmoore --- docs/release.rst | 7 +++++++ zarr/indexing.py | 5 ++++- zarr/tests/test_indexing.py | 9 +++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index c1ca0355bc..ef8a396c0f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,10 +6,17 @@ Release notes Unreleased ---------- +.. _release_2.11.1: + +2.11.1 +------ Bug fixes ~~~~~~~~~ +* Fix bug where indexing with a scalar numpy value returned a single-value array. + By :user:`Ben Jeffery ` :issue:`967`. + * Removed `clobber` argument from `normalize_store_arg`. This enables to change data within a opened consolidated group using mode `"r+"` (i.e region write). By :user:`Tobias Kölling ` :issue:`975`. diff --git a/zarr/indexing.py b/zarr/indexing.py index 3f0dff42ef..1941766d85 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -34,7 +34,10 @@ def is_integer_list(x): def is_integer_array(x, ndim=None): - t = hasattr(x, 'shape') and hasattr(x, 'dtype') and x.dtype.kind in 'ui' + t = not np.isscalar(x) and \ + hasattr(x, 'shape') and \ + hasattr(x, 'dtype') and \ + x.dtype.kind in 'ui' if ndim is not None: t = t and len(x.shape) == ndim return t diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index a58a309534..74f0c9f7de 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -1,3 +1,4 @@ +import numpy import numpy as np import pytest from numpy.testing import assert_array_equal @@ -1442,3 +1443,11 @@ def test_slice_selection_uints(): idx = np.uint64(3) slice_sel = make_slice_selection((idx,)) assert arr[slice_sel].shape == (1, 6) + + +def test_numpy_int_indexing(): + a = np.arange(1050) + z = zarr.create(shape=1050, chunks=100, dtype=a.dtype) + z[:] = a + assert a[42] == z[42] + assert a[numpy.int64(42)] == z[numpy.int64(42)] From 594c441693220eb4302f60c21c6bb021426bfdaa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Mar 2022 08:33:27 -0600 Subject: [PATCH 0096/1078] Bump numpy from 1.22.2 to 1.22.3 (#981) Bumps [numpy](https://github.com/numpy/numpy) from 1.22.2 to 1.22.3. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github.com/numpy/numpy/compare/v1.22.2...v1.22.3) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index bf26fa0530..75acc051e7 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.22.2 +numpy==1.22.3 From b98c7995896de41b301e84b3b468785b09191139 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Mar 2022 19:25:38 +0100 Subject: [PATCH 0097/1078] Bump azure-storage-blob from 12.8.1 to 12.10.0 (#983) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.8.1 to 12.10.0. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.8.1...azure-storage-blob_12.10.0) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 46ad0c04ee..2ceb4bc321 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -6,7 +6,7 @@ ipytree==0.2.1 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.8.1 # pyup: ignore +azure-storage-blob==12.10.0 # pyup: ignore redis==4.1.4 types-redis types-setuptools From 9babfae7e1c2235e27eed1e0dec8bf51f2ae2dbb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Mar 2022 04:18:13 +0100 Subject: [PATCH 0098/1078] Bump pytest from 7.0.1 to 7.1.0 (#985) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.0.1 to 7.1.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.0.1...7.1.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 85cf2f82cf..75f40e22e9 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.2 # test requirements -pytest==7.0.1 +pytest==7.1.0 From 3f8a309c28e33f0b63407757c7696d8c4511119c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Mar 2022 09:01:29 -0500 Subject: [PATCH 0099/1078] Bump pytest from 7.1.0 to 7.1.1 (#989) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.1.0 to 7.1.1. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.1.0...7.1.1) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 75f40e22e9..6f0c07cfe0 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.2 # test requirements -pytest==7.1.0 +pytest==7.1.1 From 2c13b95a7b3a6e80d5da48fe57bc161463495c34 Mon Sep 17 00:00:00 2001 From: Gregory Lee Date: Wed, 23 Mar 2022 19:22:21 -0400 Subject: [PATCH 0100/1078] Implement Zarr V3 protocol (#898) * add v3 store classes Define the StoreV3 class and create v3 versions of most existing stores Add a test_storage_v3.py with test classes inheriting from their v2 counterparts. Only a subset of methods involving differences in v3 behavior were overridden. * add TODO comment to meta.py * fix flake8 errors * follow zarr v3 spec when dealing with extension data types * fixes to v3 dtype handling * flake8 cleanup * remove duplicate lines in Metadata2.encode_array_metadata * Fix fields in array metadata zarr_version should not be in the array metadata, only the base store metadata compressor should be absent when there is no compression * Fix encode/decode of codec metadata classmethods adapted from zarrita code * add missing level to Zlib in _decode_codec_metadata * add extensions entry to v3 array metadata * dimension_separator should not be in the array metadata for v3 * update Attributes, adding StoreV3 support avoid pytest error about missing fixture fix flake8 error related to zarr_version fixture * add StoreV3 support to core Array object * update hexdigests * handle additional codecs that were not implemented in zarrita update hexdigests * fix * fix hexdigests * fix indentation * add StoreV3 support to Group, open_group, etc. * add StoreV3 support to creation routines * Handle dimension_separator appropriately in open_array Specifically, we want to be able to infer the dimension_separator from the store if possible * TST: add tests for open_array and dimension_separator * only allow Codec not a simple str as compressor during array initialization * add StoreV3 support to most convenience routines consolidated metadata functions haven't been updated yet * set convenience routines default to zarr_version=None This will infer the version from the store if it is a BaseStore. Otherwise it will use 2 for backwards compatibility * adjust test have dimension_separator key was removed from v3 metadata * add underscores to imported test classes in test_storage_v3.py avoids these tests running a second time when this file is called * add underscore to imported TestArrayWithPath in test_core_v3.py avoids this test class from being run a second time * refactore _valid_keys and add tests test _ensure_store(None) * move KVStoreV3 logic from StoreV3.__eq__ to KVStoreV3.__eq__ * expand tests for _ensure_store * test exception for v2 store input to _get_hierarchy_metadata * test exception for init_array with path=None * remove unneeded checks from Attributes The store can reject invalid v3 keys. _update_nosync calls _get_nosync which will add the 'attributes' key if missing * tests __repr__ of LazyLoader * test load of individual array * Add simple test case for zarr.tree convenience method * add tests for copy_store with a V3 store class * test raising of exception on intialization with mismatched store and chunk_store protocol versions * add key validation on setitem in v3 stores enable missing test_hierarchy for v3 stores. This required fixes to a number of the rename and rmdir methods for the V3 stores * Fix core V3 tests now that keys are validated on __setitem__ * pep8 in storage_v3 tests * flake8 in test_convenience.py * pep8 * fix test_attrs.py validate_key requires attr key to start with meta/ or data/ in v3 * Fix SQLLiteStore changes to rmdir were intended for SQLLiteStoreV3 not SQLLiteStore * fix failing hierarchy test * update ZipStore tests to make sure they all run on V3 * add default rmdir implementation to all StoreV3 classes without these can be overridden by the other V2 class in the MRO * fix test_sync.py * all rmdir methods for StoreV3 classes need to remove associated metadata * avoid warning from test_entropy.py * pep8 fixes * greatly reduce code duplication in test_storage_v3.py instead add v3 code path to existing test methods in test_storage.py * remove redundant test_hexdigest methods only need to defined expected() for each class reduce redundant code in test_core_v3.py * move test_core_v3.py functions back into test_core.py * typing fixes for mypy * can assume self.keys() exists since BaseStore inherits from MutableMapping * refactor rmdir methods for v3 and improve coverage * improve coverage of core.py * improve coverage of convenience.py * expend info tests needed to also test with a size > 10**12 to improve coverage * Expand tests of Array.view * improve coverage of creation.py * improve coverage of hierarchy.py * improve coverage of meta.py * pep8 * skip FSStoreV3 test when fsspec not installed * test raising of PermissionError for setter on views * remove redundant check (_normalize_store_arg will already raise here) * improve coverage and fix bugs in normalize_store_arg * improve coverage of storage.py remove redundant getsize methods * pep8 * fix StoreV3 tests * fix duplicate zarr_fsstore entry * fix rename * remove debug statements * fix typo * skip unavailable NumPy dtypes * pep8 * mypy fixes * remove redundant check (already done above) * remove KeyError check. list_prefix only returns keys that exist * coverage fixes * implemented ConsolidatedMetadataStoreV3 Parametrize test_consolidate_metadata: removes the need for a separate test_consolidated_with_chunk_store * expand ConsolidatedMetadataStoreV3 tests update _ensure_store to disallow mismatched Store versions * remove debug statement * fix tests: restore clobber=True * test error path in consolidate_metadata * add pragma: no cover for lines in test_meta.py that will only be visited on some architectures * flake8 fixes * flake8 * ENH: add ABSStoreV3 * flake8 * fix ABSStore.rmdir test coverage * always use / in path * remove remaining use of clobber argument in new tests * remove NestedDirectoryStoreV3 No need for this class as DirectoryStoreV3 with / chunk separator can be used instead * flake8 * remove rmdir_abs: rmdir method of ABSStore parent class in ABSStoreV3 * define meta_root and data_root variables These define the root path for metadata and data, respectively * move _valid_key_characters to be a StoreV3 class field * make _get_hierarchy_metadata strictly require 'zarr.json' Still use a default set of metadata in __init__ method of Group or Array classes. Add a _get_metadata_suffix helper that defaults to '.json' if metadata is not present. * ignore type checks for _get_metadata_suffix * remove unneeded if/else in Array and Hierarchy class __init__ default metadata already gets added by Metadata3.encode_hierarchy_metadata when meta=None * remove unused import * define DEFAULT_ZARR_VERSION so we can later more easily change from 2 to 3 * add test_get_hierarchy_metadata to test the v3 _get_hierarchy_metadata helper --- zarr/_storage/absstore.py | 56 +- zarr/_storage/store.py | 294 ++++++++- zarr/attrs.py | 61 +- zarr/convenience.py | 258 +++++--- zarr/core.py | 150 +++-- zarr/creation.py | 63 +- zarr/hierarchy.py | 254 ++++++-- zarr/meta.py | 342 ++++++++++- zarr/storage.py | 858 +++++++++++++++++++++++--- zarr/tests/data/store.zip | Bin 107 -> 343 bytes zarr/tests/test_attrs.py | 207 ++++--- zarr/tests/test_convenience.py | 406 +++++++++++-- zarr/tests/test_core.py | 1047 ++++++++++++++++++++++---------- zarr/tests/test_creation.py | 372 +++++++++--- zarr/tests/test_hierarchy.py | 663 +++++++++++++++++--- zarr/tests/test_indexing.py | 2 +- zarr/tests/test_info.py | 15 +- zarr/tests/test_meta.py | 123 +++- zarr/tests/test_storage.py | 1039 +++++++++++++++++++------------ zarr/tests/test_storage_v3.py | 513 ++++++++++++++++ zarr/tests/test_sync.py | 13 +- zarr/tests/util.py | 6 +- 22 files changed, 5383 insertions(+), 1359 deletions(-) create mode 100644 zarr/tests/test_storage_v3.py diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index 98ac6328b1..cc41018f9e 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -3,7 +3,7 @@ import warnings from numcodecs.compat import ensure_bytes from zarr.util import normalize_storage_path -from zarr._storage.store import Store +from zarr._storage.store import _get_metadata_suffix, data_root, meta_root, Store, StoreV3 __doctest_requires__ = { ('ABSStore', 'ABSStore.*'): ['azure.storage.blob'], @@ -209,3 +209,57 @@ def getsize(self, path=None): def clear(self): self.rmdir() + + +class ABSStoreV3(ABSStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __eq__(self, other): + return ( + isinstance(other, ABSStoreV3) and + self.client == other.client and + self.prefix == other.prefix + ) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def rmdir(self, path=None): + + if not path: + # Currently allowing clear to delete everything as in v2 + + # If we disallow an empty path then we will need to modify + # TestABSStoreV3 to have the create_store method use a prefix. + ABSStore.rmdir(self, '') + return + + meta_dir = meta_root + path + meta_dir = meta_dir.rstrip('/') + ABSStore.rmdir(self, meta_dir) + + # remove data folder + data_dir = data_root + path + data_dir = data_dir.rstrip('/') + ABSStore.rmdir(self, data_dir) + + # remove metadata files + sfx = _get_metadata_suffix(self) + array_meta_file = meta_dir + '.array' + sfx + if array_meta_file in self: + del self[array_meta_file] + group_meta_file = meta_dir + '.group' + sfx + if group_meta_file in self: + del self[group_meta_file] + + # TODO: adapt the v2 getsize method to work for v3 + # For now, calling the generic keys-based _getsize + def getsize(self, path=None): + from zarr.storage import _getsize # avoid circular import + return _getsize(self, path) + + +ABSStoreV3.__doc__ = ABSStore.__doc__ diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 6f5bf78e28..d1ad930609 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -1,7 +1,9 @@ +import abc from collections.abc import MutableMapping -from typing import Any, List, Optional, Union +from string import ascii_letters, digits +from typing import Any, List, Mapping, Optional, Union -from zarr.meta import Metadata2 +from zarr.meta import Metadata2, Metadata3 from zarr.util import normalize_storage_path # v2 store keys @@ -9,6 +11,12 @@ group_meta_key = '.zgroup' attrs_key = '.zattrs' +# v3 paths +meta_root = 'meta/root/' +data_root = 'data/root/' + +DEFAULT_ZARR_VERSION = 2 + class BaseStore(MutableMapping): """Abstract base class for store implementations. @@ -84,6 +92,10 @@ def _ensure_store(store: Any): if store is None: return None elif isinstance(store, BaseStore): + if not store._store_version == 2: + raise ValueError( + f"cannot initialize a v2 store with a v{store._store_version} store" + ) return store elif isinstance(store, MutableMapping): return KVStore(store) @@ -131,6 +143,161 @@ def rmdir(self, path: str = "") -> None: _rmdir_from_keys(self, path) +class StoreV3(BaseStore): + _store_version = 3 + _metadata_class = Metadata3 + _valid_key_characters = set(ascii_letters + digits + "/.-_") + + def _valid_key(self, key: str) -> bool: + """ + Verify that a key conforms to the specification. + + A key is any string containing only character in the range a-z, A-Z, + 0-9, or in the set /.-_ it will return True if that's the case, False + otherwise. + """ + if not isinstance(key, str) or not key.isascii(): + return False + if set(key) - self._valid_key_characters: + return False + return True + + def _validate_key(self, key: str): + """ + Verify that a key conforms to the v3 specification. + + A key is any string containing only character in the range a-z, A-Z, + 0-9, or in the set /.-_ it will return True if that's the case, False + otherwise. + + In spec v3, keys can only start with the prefix meta/, data/ or be + exactly zarr.json and should not end with /. This should not be exposed + to the user, and is a store implementation detail, so this method will + raise a ValueError in that case. + """ + if not self._valid_key(key): + raise ValueError( + f"Keys must be ascii strings and may only contain the " + f"characters {''.join(sorted(self._valid_key_characters))}" + ) + + if ( + not key.startswith("data/") + and (not key.startswith("meta/")) + and (not key == "zarr.json") + # TODO: Possibly allow key == ".zmetadata" too if we write a + # consolidated metadata spec corresponding to this? + ): + raise ValueError("keys starts with unexpected value: `{}`".format(key)) + + if key.endswith('/'): + raise ValueError("keys may not end in /") + + def list_prefix(self, prefix): + if prefix.startswith('/'): + raise ValueError("prefix must not begin with /") + # TODO: force prefix to end with /? + return [k for k in self.list() if k.startswith(prefix)] + + def erase(self, key): + self.__delitem__(key) + + def erase_prefix(self, prefix): + assert prefix.endswith("/") + + if prefix == "/": + all_keys = self.list() + else: + all_keys = self.list_prefix(prefix) + for key in all_keys: + self.erase(key) + + def list_dir(self, prefix): + """ + TODO: carefully test this with trailing/leading slashes + """ + if prefix: # allow prefix = "" ? + assert prefix.endswith("/") + + all_keys = self.list_prefix(prefix) + len_prefix = len(prefix) + keys = [] + prefixes = [] + for k in all_keys: + trail = k[len_prefix:] + if "/" not in trail: + keys.append(prefix + trail) + else: + prefixes.append(prefix + trail.split("/", maxsplit=1)[0] + "/") + return keys, list(set(prefixes)) + + def list(self): + return list(self.keys()) + + def __contains__(self, key): + return key in self.list() + + @abc.abstractmethod + def __setitem__(self, key, value): + """Set a value.""" + + @abc.abstractmethod + def __getitem__(self, key): + """Get a value.""" + + def clear(self): + """Remove all items from store.""" + self.erase_prefix("/") + + def __eq__(self, other): + return NotImplemented + + @staticmethod + def _ensure_store(store): + """ + We want to make sure internally that zarr stores are always a class + with a specific interface derived from ``Store``, which is slightly + different than ``MutableMapping``. + + We'll do this conversion in a few places automatically + """ + from zarr.storage import KVStoreV3 # avoid circular import + if store is None: + return None + elif isinstance(store, StoreV3): + return store + elif isinstance(store, Store): + raise ValueError( + f"cannot initialize a v3 store with a v{store._store_version} store" + ) + elif isinstance(store, MutableMapping): + return KVStoreV3(store) + else: + for attr in [ + "keys", + "values", + "get", + "__setitem__", + "__getitem__", + "__delitem__", + "__contains__", + ]: + if not hasattr(store, attr): + break + else: + return KVStoreV3(store) + + raise ValueError( + "v3 stores must be subclasses of StoreV3, " + "if your store exposes the MutableMapping interface wrap it in " + f"Zarr.storage.KVStoreV3. Got {store}" + ) + + +# allow MutableMapping for backwards compatibility +StoreLike = Union[BaseStore, MutableMapping] + + def _path_to_prefix(path: Optional[str]) -> str: # assume path already normalized if path: @@ -140,17 +307,68 @@ def _path_to_prefix(path: Optional[str]) -> str: return prefix +def _get_hierarchy_metadata(store: StoreV3) -> Mapping[str, Any]: + version = getattr(store, '_store_version', 2) + if version < 3: + raise ValueError("zarr.json hierarchy metadata not stored for " + f"zarr v{version} stores") + if 'zarr.json' not in store: + raise ValueError("zarr.json metadata not found in store") + return store._metadata_class.decode_hierarchy_metadata(store['zarr.json']) + + +def _get_metadata_suffix(store: StoreV3) -> str: + if 'zarr.json' in store: + return _get_hierarchy_metadata(store)['metadata_key_suffix'] + return '.json' + + +def _rename_metadata_v3(store: StoreV3, src_path: str, dst_path: str) -> bool: + """Rename source or group metadata file associated with src_path.""" + any_renamed = False + sfx = _get_metadata_suffix(store) + src_path = src_path.rstrip('/') + dst_path = dst_path.rstrip('/') + _src_array_json = meta_root + src_path + '.array' + sfx + if _src_array_json in store: + new_key = meta_root + dst_path + '.array' + sfx + store[new_key] = store.pop(_src_array_json) + any_renamed = True + _src_group_json = meta_root + src_path + '.group' + sfx + if _src_group_json in store: + new_key = meta_root + dst_path + '.group' + sfx + store[new_key] = store.pop(_src_group_json) + any_renamed = True + return any_renamed + + def _rename_from_keys(store: BaseStore, src_path: str, dst_path: str) -> None: # assume path already normalized src_prefix = _path_to_prefix(src_path) dst_prefix = _path_to_prefix(dst_path) - for key in list(store.keys()): - if key.startswith(src_prefix): - new_key = dst_prefix + key.lstrip(src_prefix) - store[new_key] = store.pop(key) - - -def _rmdir_from_keys(store: Union[BaseStore, MutableMapping], path: Optional[str] = None) -> None: + version = getattr(store, '_store_version', 2) + if version == 2: + for key in list(store.keys()): + if key.startswith(src_prefix): + new_key = dst_prefix + key.lstrip(src_prefix) + store[new_key] = store.pop(key) + else: + any_renamed = False + for root_prefix in [meta_root, data_root]: + _src_prefix = root_prefix + src_prefix + _dst_prefix = root_prefix + dst_prefix + for key in store.list_prefix(_src_prefix): # type: ignore + new_key = _dst_prefix + key[len(_src_prefix):] + store[new_key] = store.pop(key) + any_renamed = True + any_meta_renamed = _rename_metadata_v3(store, src_path, dst_path) # type: ignore + any_renamed = any_meta_renamed or any_renamed + + if not any_renamed: + raise ValueError(f"no item {src_path} found to rename") + + +def _rmdir_from_keys(store: StoreLike, path: Optional[str] = None) -> None: # assume path already normalized prefix = _path_to_prefix(path) for key in list(store.keys()): @@ -158,6 +376,27 @@ def _rmdir_from_keys(store: Union[BaseStore, MutableMapping], path: Optional[str del store[key] +def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: + + meta_dir = meta_root + path + meta_dir = meta_dir.rstrip('/') + _rmdir_from_keys(store, meta_dir) + + # remove data folder + data_dir = data_root + path + data_dir = data_dir.rstrip('/') + _rmdir_from_keys(store, data_dir) + + # remove metadata files + sfx = _get_metadata_suffix(store) + array_meta_file = meta_dir + '.array' + sfx + if array_meta_file in store: + store.erase(array_meta_file) # type: ignore + group_meta_file = meta_dir + '.group' + sfx + if group_meta_file in store: + store.erase(group_meta_file) # type: ignore + + def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str]: # assume path already normalized prefix = _path_to_prefix(path) @@ -168,3 +407,40 @@ def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str child = suffix.split('/')[0] children.add(child) return sorted(children) + + +def _prefix_to_array_key(store: StoreLike, prefix: str) -> str: + if getattr(store, "_store_version", 2) == 3: + if prefix: + sfx = _get_metadata_suffix(store) # type: ignore + key = meta_root + prefix.rstrip("/") + ".array" + sfx + else: + raise ValueError("prefix must be supplied to get a v3 array key") + else: + key = prefix + array_meta_key + return key + + +def _prefix_to_group_key(store: StoreLike, prefix: str) -> str: + if getattr(store, "_store_version", 2) == 3: + if prefix: + sfx = _get_metadata_suffix(store) # type: ignore + key = meta_root + prefix.rstrip('/') + ".group" + sfx + else: + raise ValueError("prefix must be supplied to get a v3 group key") + else: + key = prefix + group_meta_key + return key + + +def _prefix_to_attrs_key(store: StoreLike, prefix: str) -> str: + if getattr(store, "_store_version", 2) == 3: + # for v3, attributes are stored in the array metadata + sfx = _get_metadata_suffix(store) # type: ignore + if prefix: + key = meta_root + prefix.rstrip('/') + ".array" + sfx + else: + raise ValueError("prefix must be supplied to get a v3 array key") + else: + key = prefix + attrs_key + return key diff --git a/zarr/attrs.py b/zarr/attrs.py index eff1237db1..39683d45d9 100644 --- a/zarr/attrs.py +++ b/zarr/attrs.py @@ -1,6 +1,6 @@ from collections.abc import MutableMapping -from zarr._storage.store import Store +from zarr._storage.store import Store, StoreV3 from zarr.util import json_dumps @@ -26,7 +26,10 @@ class Attributes(MutableMapping): def __init__(self, store, key='.zattrs', read_only=False, cache=True, synchronizer=None): - self.store = Store._ensure_store(store) + + self._version = getattr(store, '_store_version', 2) + _Store = Store if self._version == 2 else StoreV3 + self.store = _Store._ensure_store(store) self.key = key self.read_only = read_only self.cache = cache @@ -38,6 +41,8 @@ def _get_nosync(self): data = self.store[self.key] except KeyError: d = dict() + if self._version > 2: + d['attributes'] = {} else: d = self.store._metadata_class.parse_metadata(data) return d @@ -47,6 +52,8 @@ def asdict(self): if self.cache and self._cached_asdict is not None: return self._cached_asdict d = self._get_nosync() + if self._version == 3: + d = d['attributes'] if self.cache: self._cached_asdict = d return d @@ -54,7 +61,10 @@ def asdict(self): def refresh(self): """Refresh cached attributes from the store.""" if self.cache: - self._cached_asdict = self._get_nosync() + if self._version == 2: + self._cached_asdict = self._get_nosync() + else: + self._cached_asdict = self._get_nosync()['attributes'] def __contains__(self, x): return x in self.asdict() @@ -84,7 +94,10 @@ def _setitem_nosync(self, item, value): d = self._get_nosync() # set key value - d[item] = value + if self._version == 2: + d[item] = value + else: + d['attributes'][item] = value # _put modified data self._put_nosync(d) @@ -98,7 +111,10 @@ def _delitem_nosync(self, key): d = self._get_nosync() # delete key value - del d[key] + if self._version == 2: + del d[key] + else: + del d['attributes'][key] # _put modified data self._put_nosync(d) @@ -106,12 +122,34 @@ def _delitem_nosync(self, key): def put(self, d): """Overwrite all attributes with the key/value pairs in the provided dictionary `d` in a single operation.""" - self._write_op(self._put_nosync, d) + if self._version == 2: + self._write_op(self._put_nosync, d) + else: + self._write_op(self._put_nosync, dict(attributes=d)) def _put_nosync(self, d): - self.store[self.key] = json_dumps(d) - if self.cache: - self._cached_asdict = d + if self._version == 2: + self.store[self.key] = json_dumps(d) + if self.cache: + self._cached_asdict = d + else: + if self.key in self.store: + # Cannot write the attributes directly to JSON, but have to + # store it within the pre-existing attributes key of the v3 + # metadata. + + # Note: this changes the store.counter result in test_caching_on! + + meta = self.store._metadata_class.parse_metadata(self.store[self.key]) + if 'attributes' in meta and 'filters' in meta['attributes']: + # need to preserve any existing "filters" attribute + d['attributes']['filters'] = meta['attributes']['filters'] + meta['attributes'] = d['attributes'] + else: + meta = d + self.store[self.key] = json_dumps(meta) + if self.cache: + self._cached_asdict = d['attributes'] # noinspection PyMethodOverriding def update(self, *args, **kwargs): @@ -124,7 +162,10 @@ def _update_nosync(self, *args, **kwargs): d = self._get_nosync() # update - d.update(*args, **kwargs) + if self._version == 2: + d.update(*args, **kwargs) + else: + d['attributes'].update(*args, **kwargs) # _put modified data self._put_nosync(d) diff --git a/zarr/convenience.py b/zarr/convenience.py index 0cb20220f3..2cbc9bdf68 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -5,15 +5,18 @@ import re from collections.abc import Mapping, MutableMapping +from zarr._storage.store import data_root, meta_root from zarr.core import Array from zarr.creation import array as _create_array -from zarr.creation import normalize_store_arg, open_array +from zarr.creation import open_array from zarr.errors import CopyError, PathNotFoundError from zarr.hierarchy import Group from zarr.hierarchy import group as _create_group from zarr.hierarchy import open_group from zarr.meta import json_dumps, json_loads -from zarr.storage import contains_array, contains_group, BaseStore +from zarr.storage import (_get_metadata_suffix, contains_array, contains_group, + normalize_store_arg, BaseStore, ConsolidatedMetadataStore, + ConsolidatedMetadataStoreV3) from zarr.util import TreeViewer, buffer_size, normalize_storage_path from typing import Union @@ -21,8 +24,14 @@ StoreLike = Union[BaseStore, MutableMapping, str, None] +def _check_and_update_path(store: BaseStore, path): + if getattr(store, '_store_version', 2) > 2 and not path: + raise ValueError("path must be provided for v3 stores") + return normalize_storage_path(path) + + # noinspection PyShadowingBuiltins -def open(store: StoreLike = None, mode: str = "a", **kwargs): +def open(store: StoreLike = None, mode: str = "a", *, zarr_version=None, path=None, **kwargs): """Convenience function to open a group or array using file-mode-like semantics. Parameters @@ -34,6 +43,12 @@ def open(store: StoreLike = None, mode: str = "a", **kwargs): read/write (must exist); 'a' means read/write (create if doesn't exist); 'w' means create (overwrite if exists); 'w-' means create (fail if exists). + zarr_version : {2, 3, None}, optional + The zarr protocol version to use. The default value of None will attempt + to infer the version from `store` if possible, otherwise it will fall + back to 2. + path : str or None, optional + The path within the store to open. **kwargs Additional parameters are passed through to :func:`zarr.creation.open_array` or :func:`zarr.hierarchy.open_group`. @@ -75,14 +90,16 @@ def open(store: StoreLike = None, mode: str = "a", **kwargs): """ - path = kwargs.get('path') # handle polymorphic store arg # we pass storage options explicitly, since normalize_store_arg might construct # a store if the input is a fsspec-compatible URL _store: BaseStore = normalize_store_arg( - store, storage_options=kwargs.pop("storage_options", {}), mode=mode + store, storage_options=kwargs.pop("storage_options", {}), mode=mode, + zarr_version=zarr_version, ) + # path = _check_and_update_path(_store, path) path = normalize_storage_path(path) + kwargs['path'] = path if mode in {'w', 'w-', 'x'}: if 'shape' in kwargs: @@ -109,7 +126,7 @@ def _might_close(path): return isinstance(path, (str, os.PathLike)) -def save_array(store: StoreLike, arr, **kwargs): +def save_array(store: StoreLike, arr, *, zarr_version=None, path=None, **kwargs): """Convenience function to save a NumPy array to the local file system, following a similar API to the NumPy save() function. @@ -119,6 +136,12 @@ def save_array(store: StoreLike, arr, **kwargs): Store or path to directory in file system or name of zip file. arr : ndarray NumPy array with data to save. + zarr_version : {2, 3, None}, optional + The zarr protocol version to use when saving. The default value of None + will attempt to infer the version from `store` if possible, otherwise + it will fall back to 2. + path : str or None, optional + The path within the store where the array will be saved. kwargs Passed through to :func:`create`, e.g., compressor. @@ -141,16 +164,18 @@ def save_array(store: StoreLike, arr, **kwargs): """ may_need_closing = _might_close(store) - _store: BaseStore = normalize_store_arg(store, mode="w") + _store: BaseStore = normalize_store_arg(store, mode="w", zarr_version=zarr_version) + path = _check_and_update_path(_store, path) try: - _create_array(arr, store=_store, overwrite=True, **kwargs) + _create_array(arr, store=_store, overwrite=True, zarr_version=zarr_version, path=path, + **kwargs) finally: if may_need_closing: # needed to ensure zip file records are written _store.close() -def save_group(store: StoreLike, *args, **kwargs): +def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): """Convenience function to save several NumPy arrays to the local file system, following a similar API to the NumPy savez()/savez_compressed() functions. @@ -160,6 +185,12 @@ def save_group(store: StoreLike, *args, **kwargs): Store or path to directory in file system or name of zip file. args : ndarray NumPy arrays with data to save. + zarr_version : {2, 3, None}, optional + The zarr protocol version to use when saving. The default value of None + will attempt to infer the version from `store` if possible, otherwise + it will fall back to 2. + path : str or None, optional + Path within the store where the group will be saved. kwargs NumPy arrays with data to save. @@ -212,21 +243,22 @@ def save_group(store: StoreLike, *args, **kwargs): raise ValueError('at least one array must be provided') # handle polymorphic store arg may_need_closing = _might_close(store) - _store: BaseStore = normalize_store_arg(store, mode="w") + _store: BaseStore = normalize_store_arg(store, mode="w", zarr_version=zarr_version) + path = _check_and_update_path(_store, path) try: - grp = _create_group(_store, overwrite=True) + grp = _create_group(_store, path=path, overwrite=True, zarr_version=zarr_version) for i, arr in enumerate(args): k = 'arr_{}'.format(i) - grp.create_dataset(k, data=arr, overwrite=True) + grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version) for k, arr in kwargs.items(): - grp.create_dataset(k, data=arr, overwrite=True) + grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version) finally: if may_need_closing: # needed to ensure zip file records are written _store.close() -def save(store: StoreLike, *args, **kwargs): +def save(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): """Convenience function to save an array or group of arrays to the local file system. Parameters @@ -235,6 +267,12 @@ def save(store: StoreLike, *args, **kwargs): Store or path to directory in file system or name of zip file. args : ndarray NumPy arrays with data to save. + zarr_version : {2, 3, None}, optional + The zarr protocol version to use when saving. The default value of None + will attempt to infer the version from `store` if possible, otherwise + it will fall back to 2. + path : str or None, optional + The path within the group where the arrays will be saved. kwargs NumPy arrays with data to save. @@ -301,9 +339,10 @@ def save(store: StoreLike, *args, **kwargs): if len(args) == 0 and len(kwargs) == 0: raise ValueError('at least one array must be provided') if len(args) == 1 and len(kwargs) == 0: - save_array(store, args[0]) + save_array(store, args[0], zarr_version=zarr_version, path=path) else: - save_group(store, *args, **kwargs) + save_group(store, *args, zarr_version=zarr_version, path=path, + **kwargs) class LazyLoader(Mapping): @@ -336,13 +375,19 @@ def __repr__(self): return r -def load(store: StoreLike): +def load(store: StoreLike, zarr_version=None, path=None): """Load data from an array or group into memory. Parameters ---------- store : MutableMapping or string Store or path to directory in file system or name of zip file. + zarr_version : {2, 3, None}, optional + The zarr protocol version to use when loading. The default value of + None will attempt to infer the version from `store` if possible, + otherwise it will fall back to 2. + path : str or None, optional + The path within the store from which to load. Returns ------- @@ -362,11 +407,12 @@ def load(store: StoreLike): """ # handle polymorphic store arg - _store = normalize_store_arg(store) - if contains_array(_store, path=None): - return Array(store=_store, path=None)[...] - elif contains_group(_store, path=None): - grp = Group(store=_store, path=None) + _store = normalize_store_arg(store, zarr_version=zarr_version) + path = _check_and_update_path(_store, path) + if contains_array(_store, path=path): + return Array(store=_store, path=path)[...] + elif contains_group(_store, path=path): + grp = Group(store=_store, path=path) return LazyLoader(grp) @@ -600,6 +646,16 @@ def copy_store(source, dest, source_path='', dest_path='', excludes=None, # setup counting variables n_copied = n_skipped = n_bytes_copied = 0 + source_store_version = getattr(source, '_store_version', 2) + dest_store_version = getattr(dest, '_store_version', 2) + if source_store_version != dest_store_version: + raise ValueError("zarr stores must share the same protocol version") + + if source_store_version > 2: + nchar_root = len(meta_root) + # code below assumes len(meta_root) === len(data_root) + assert len(data_root) == nchar_root + # setup logging with _LogWriter(log) as log: @@ -607,52 +663,63 @@ def copy_store(source, dest, source_path='', dest_path='', excludes=None, for source_key in sorted(source.keys()): # filter to keys under source path - if source_key.startswith(source_path): + if source_store_version == 2: + if not source_key.startswith(source_path): + continue + elif source_store_version == 3: + # skip 'meta/root/' or 'data/root/' at start of source_key + if not source_key[nchar_root:].startswith(source_path): + continue - # process excludes and includes - exclude = False - for prog in excludes: + # process excludes and includes + exclude = False + for prog in excludes: + if prog.search(source_key): + exclude = True + break + if exclude: + for prog in includes: if prog.search(source_key): - exclude = True + exclude = False break - if exclude: - for prog in includes: - if prog.search(source_key): - exclude = False - break - if exclude: - continue + if exclude: + continue - # map key to destination path + # map key to destination path + if source_store_version == 2: key_suffix = source_key[len(source_path):] dest_key = dest_path + key_suffix - - # create a descriptive label for this operation - descr = source_key - if dest_key != source_key: - descr = descr + ' -> ' + dest_key - - # decide what to do - do_copy = True - if if_exists != 'replace': - if dest_key in dest: - if if_exists == 'raise': - raise CopyError('key {!r} exists in destination' - .format(dest_key)) - elif if_exists == 'skip': - do_copy = False - - # take action - if do_copy: - log('copy {}'.format(descr)) - if not dry_run: - data = source[source_key] - n_bytes_copied += buffer_size(data) - dest[dest_key] = data - n_copied += 1 - else: - log('skip {}'.format(descr)) - n_skipped += 1 + elif source_store_version == 3: + # nchar_root is length of 'meta/root/' or 'data/root/' + key_suffix = source_key[nchar_root + len(source_path):] + dest_key = source_key[:nchar_root] + dest_path + key_suffix + + # create a descriptive label for this operation + descr = source_key + if dest_key != source_key: + descr = descr + ' -> ' + dest_key + + # decide what to do + do_copy = True + if if_exists != 'replace': + if dest_key in dest: + if if_exists == 'raise': + raise CopyError('key {!r} exists in destination' + .format(dest_key)) + elif if_exists == 'skip': + do_copy = False + + # take action + if do_copy: + log('copy {}'.format(descr)) + if not dry_run: + data = source[source_key] + n_bytes_copied += buffer_size(data) + dest[dest_key] = data + n_copied += 1 + else: + log('skip {}'.format(descr)) + n_skipped += 1 # log a final message with a summary of what happened _log_copy_summary(log, dry_run, n_copied, n_skipped, n_bytes_copied) @@ -907,7 +974,15 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, # copy attributes if not without_attrs: - ds.attrs.update(source.attrs) + if dest_h5py and 'filters' in source.attrs: + # No filters key in v3 metadata so it was stored in the + # attributes instead. We cannot copy this key to + # HDF5 attrs, though! + source_attrs = source.attrs.asdict().copy() + source_attrs.pop('filters', None) + else: + source_attrs = source.attrs + ds.attrs.update(source_attrs) n_copied += 1 @@ -1063,6 +1138,8 @@ def copy_all(source, dest, shallow=False, without_attrs=False, log=None, # setup counting variables n_copied = n_skipped = n_bytes_copied = 0 + zarr_version = getattr(source, '_version', 2) + # setup logging with _LogWriter(log) as log: @@ -1074,7 +1151,8 @@ def copy_all(source, dest, shallow=False, without_attrs=False, log=None, n_copied += c n_skipped += s n_bytes_copied += b - dest.attrs.update(**source.attrs) + if zarr_version == 2: + dest.attrs.update(**source.attrs) # log a final message with a summary of what happened _log_copy_summary(log, dry_run, n_copied, n_skipped, n_bytes_copied) @@ -1082,7 +1160,7 @@ def copy_all(source, dest, shallow=False, without_attrs=False, log=None, return n_copied, n_skipped, n_bytes_copied -def consolidate_metadata(store: StoreLike, metadata_key=".zmetadata"): +def consolidate_metadata(store: BaseStore, metadata_key=".zmetadata", *, path=''): """ Consolidate all metadata for groups and arrays within the given store into a single resource and put it under the given key. @@ -1105,6 +1183,9 @@ def consolidate_metadata(store: StoreLike, metadata_key=".zmetadata"): Store or path to directory in file system or name of zip file. metadata_key : str Key to put the consolidated metadata under. + path : str or None + Path corresponding to the group that is being consolidated. Not required + for zarr v2 stores. Returns ------- @@ -1118,9 +1199,29 @@ def consolidate_metadata(store: StoreLike, metadata_key=".zmetadata"): """ store = normalize_store_arg(store, mode="w") - def is_zarr_key(key): - return (key.endswith('.zarray') or key.endswith('.zgroup') or - key.endswith('.zattrs')) + version = store._store_version + + if version == 2: + + def is_zarr_key(key): + return (key.endswith('.zarray') or key.endswith('.zgroup') or + key.endswith('.zattrs')) + + else: + + sfx = _get_metadata_suffix(store) # type: ignore + + def is_zarr_key(key): + return (key.endswith('.array' + sfx) or key.endswith('.group' + sfx) or + key == 'zarr.json') + + # cannot create a group without a path in v3 + # so create /meta/root/consolidated group to store the metadata + if 'consolidated' not in store: + _create_group(store, path='consolidated') + if not metadata_key.startswith('meta/root/'): + metadata_key = 'meta/root/consolidated/' + metadata_key + # path = 'consolidated' out = { 'zarr_consolidated_format': 1, @@ -1130,7 +1231,7 @@ def is_zarr_key(key): } } store[metadata_key] = json_dumps(out) - return open_consolidated(store, metadata_key=metadata_key) + return open_consolidated(store, metadata_key=metadata_key, path=path) def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", **kwargs): @@ -1175,17 +1276,28 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** """ - from .storage import ConsolidatedMetadataStore - # normalize parameters store = normalize_store_arg(store, storage_options=kwargs.get("storage_options"), mode=mode) if mode not in {'r', 'r+'}: raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}" .format(mode)) + path = kwargs.pop('path', None) + if store._store_version == 2: + ConsolidatedStoreClass = ConsolidatedMetadataStore + else: + ConsolidatedStoreClass = ConsolidatedMetadataStoreV3 + # default is to store within 'consolidated' group on v3 + if not metadata_key.startswith('meta/root/'): + metadata_key = 'meta/root/consolidated/' + metadata_key + if not path: + raise ValueError( + "path must be provided to open a Zarr 3.x consolidated store" + ) + # setup metadata store - meta_store = ConsolidatedMetadataStore(store, metadata_key=metadata_key) + meta_store = ConsolidatedStoreClass(store, metadata_key=metadata_key) # pass through chunk_store = kwargs.pop('chunk_store', None) or store - return open(store=meta_store, chunk_store=chunk_store, mode=mode, **kwargs) + return open(store=meta_store, chunk_store=chunk_store, mode=mode, path=path, **kwargs) diff --git a/zarr/core.py b/zarr/core.py index e0fe4eb0e9..5e2b4252aa 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -4,13 +4,14 @@ import math import operator import re +from collections.abc import MutableMapping from functools import reduce +from typing import Any import numpy as np from numcodecs.compat import ensure_bytes, ensure_ndarray -from collections.abc import MutableMapping - +from zarr._storage.store import _prefix_to_attrs_key from zarr.attrs import Attributes from zarr.codecs import AsType, get_codec from zarr.errors import ArrayNotFoundError, ReadOnlyError, ArrayIndexError @@ -31,7 +32,13 @@ is_scalar, pop_fields, ) -from zarr.storage import array_meta_key, attrs_key, getsize, listdir, BaseStore +from zarr.storage import ( + _get_hierarchy_metadata, + _prefix_to_array_key, + getsize, + listdir, + normalize_store_arg, +) from zarr.util import ( all_equal, InfoReporter, @@ -146,7 +153,7 @@ class Array: def __init__( self, - store: BaseStore, + store: Any, # BaseStore not stricly required due to normalize_store_arg path=None, read_only=False, chunk_store=None, @@ -155,12 +162,18 @@ def __init__( cache_attrs=True, partial_decompress=False, write_empty_chunks=False, + zarr_version=None, ): # N.B., expect at this point store is fully initialized with all # configuration metadata fully specified and normalized - store = BaseStore._ensure_store(store) - chunk_store = BaseStore._ensure_store(chunk_store) + store = normalize_store_arg(store, zarr_version=zarr_version) + if zarr_version is None: + zarr_version = store._store_version + + if chunk_store is not None: + chunk_store = normalize_store_arg(chunk_store, + zarr_version=zarr_version) self._store = store self._chunk_store = chunk_store @@ -175,12 +188,19 @@ def __init__( self._is_view = False self._partial_decompress = partial_decompress self._write_empty_chunks = write_empty_chunks + self._version = zarr_version + + if self._version == 3: + self._data_key_prefix = 'data/root/' + self._key_prefix + self._data_path = 'data/root/' + self._path + self._hierarchy_metadata = _get_hierarchy_metadata(store=self._store) + self._metadata_key_suffix = self._hierarchy_metadata['metadata_key_suffix'] # initialize metadata self._load_metadata() # initialize attributes - akey = self._key_prefix + attrs_key + akey = _prefix_to_attrs_key(self._store, self._key_prefix) self._attrs = Attributes(store, key=akey, read_only=read_only, synchronizer=synchronizer, cache=cache_attrs) @@ -196,13 +216,13 @@ def _load_metadata(self): if self._synchronizer is None: self._load_metadata_nosync() else: - mkey = self._key_prefix + array_meta_key + mkey = _prefix_to_array_key(self._store, self._key_prefix) with self._synchronizer[mkey]: self._load_metadata_nosync() def _load_metadata_nosync(self): try: - mkey = self._key_prefix + array_meta_key + mkey = _prefix_to_array_key(self._store, self._key_prefix) meta_bytes = self._store[mkey] except KeyError: raise ArrayNotFoundError(self._path) @@ -212,32 +232,47 @@ def _load_metadata_nosync(self): meta = self._store._metadata_class.decode_array_metadata(meta_bytes) self._meta = meta self._shape = meta['shape'] - self._chunks = meta['chunks'] - self._dtype = meta['dtype'] self._fill_value = meta['fill_value'] - self._order = meta['order'] - dimension_separator = meta.get('dimension_separator', None) - if dimension_separator is None: - try: - dimension_separator = self._store._dimension_separator - except (AttributeError, KeyError): - pass - - # Fallback for any stores which do not choose a default + if self._version == 2: + self._chunks = meta['chunks'] + self._dtype = meta['dtype'] + self._order = meta['order'] if dimension_separator is None: - dimension_separator = "." + try: + dimension_separator = self._store._dimension_separator + except (AttributeError, KeyError): + pass + + # Fallback for any stores which do not choose a default + if dimension_separator is None: + dimension_separator = "." + else: + self._chunks = meta['chunk_grid']['chunk_shape'] + self._dtype = meta['data_type'] + self._order = meta['chunk_memory_layout'] + chunk_separator = meta['chunk_grid']['separator'] + if dimension_separator is None: + dimension_separator = meta.get('dimension_separator', chunk_separator) + self._dimension_separator = dimension_separator # setup compressor - config = meta['compressor'] - if config is None: + compressor = meta.get('compressor', None) + if compressor is None: self._compressor = None + elif self._version == 2: + self._compressor = get_codec(compressor) else: - self._compressor = get_codec(config) + self._compressor = compressor # setup filters - filters = meta['filters'] + if self._version == 2: + filters = meta.get('filters', []) + else: + # TODO: storing filters under attributes for now since the v3 + # array metadata does not have a 'filters' attribute. + filters = meta['attributes'].get('filters', []) if filters: filters = [get_codec(config) for config in filters] self._filters = filters @@ -262,10 +297,23 @@ def _flush_metadata_nosync(self): filters_config = [f.get_config() for f in self._filters] else: filters_config = None - meta = dict(shape=self._shape, chunks=self._chunks, dtype=self._dtype, - compressor=compressor_config, fill_value=self._fill_value, - order=self._order, filters=filters_config) - mkey = self._key_prefix + array_meta_key + _compressor = compressor_config if self._version == 2 else self._compressor + meta = dict(shape=self._shape, compressor=_compressor, + fill_value=self._fill_value, filters=filters_config) + if getattr(self._store, '_store_version', 2) == 2: + meta.update( + dict(chunks=self._chunks, dtype=self._dtype, order=self._order) + ) + else: + meta.update( + dict(chunk_grid=dict(type='regular', + chunk_shape=self._chunks, + separator=self._dimension_separator), + data_type=self._dtype, + chunk_memory_layout=self._order, + attributes=self.attrs.asdict()) + ) + mkey = _prefix_to_array_key(self._store, self._key_prefix) self._store[mkey] = self._store._metadata_class.encode_array_metadata(meta) @property @@ -453,11 +501,28 @@ def nchunks(self): def nchunks_initialized(self): """The number of chunks that have been initialized with some data.""" - # key pattern for chunk keys - prog = re.compile(r'\.'.join([r'\d+'] * min(1, self.ndim))) - # count chunk keys - return sum(1 for k in listdir(self.chunk_store, self._path) if prog.match(k)) + if self._version == 3: + # # key pattern for chunk keys + # prog = re.compile(r'\.'.join([r'c\d+'] * min(1, self.ndim))) + # # get chunk keys, excluding the prefix + # members = self.chunk_store.list_prefix(self._data_path) + # members = [k.split(self._data_key_prefix)[1] for k in members] + # # count the chunk keys + # return sum(1 for k in members if prog.match(k)) + + # key pattern for chunk keys + prog = re.compile(self._data_key_prefix + r'c\d+') # TODO: ndim == 0 case? + # get chunk keys, excluding the prefix + members = self.chunk_store.list_prefix(self._data_path) + # count the chunk keys + return sum(1 for k in members if prog.match(k)) + else: + # key pattern for chunk keys + prog = re.compile(r'\.'.join([r'\d+'] * min(1, self.ndim))) + + # count chunk keys + return sum(1 for k in listdir(self.chunk_store, self._path) if prog.match(k)) # backwards compatibility initialized = nchunks_initialized @@ -2061,7 +2126,15 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): return chunk def _chunk_key(self, chunk_coords): - return self._key_prefix + self._dimension_separator.join(map(str, chunk_coords)) + if self._version == 3: + # _chunk_key() corresponds to data_key(P, i, j, ...) example in the spec + # where P = self._key_prefix, i, j, ... = chunk_coords + # e.g. c0/2/3 for 3d array with chunk index (0, 2, 3) + # https://zarr-specs.readthedocs.io/en/core-protocol-v3.0-dev/protocol/core/v3.0.html#regular-grids + return ("data/root/" + self._key_prefix + + "c" + self._dimension_separator.join(map(str, chunk_coords))) + else: + return self._key_prefix + self._dimension_separator.join(map(str, chunk_coords)) def _decode_chunk(self, cdata, start=None, nitems=None, expected_shape=None): # decompress @@ -2242,7 +2315,8 @@ def digest(self, hashname="sha1"): for i in itertools.product(*[range(s) for s in self.cdata_shape]): h.update(self.chunk_store.get(self._chunk_key(i), b"")) - h.update(self.store.get(self._key_prefix + array_meta_key, b"")) + mkey = _prefix_to_array_key(self._store, self._key_prefix) + h.update(self.store.get(mkey, b"")) h.update(self.store.get(self.attrs.key, b"")) @@ -2279,7 +2353,7 @@ def hexdigest(self, hashname="sha1"): def __getstate__(self): return (self._store, self._path, self._read_only, self._chunk_store, self._synchronizer, self._cache_metadata, self._attrs.cache, - self._partial_decompress, self._write_empty_chunks) + self._partial_decompress, self._write_empty_chunks, self._version) def __setstate__(self, state): self.__init__(*state) @@ -2292,7 +2366,7 @@ def _synchronized_op(self, f, *args, **kwargs): else: # synchronize on the array - mkey = self._key_prefix + array_meta_key + mkey = _prefix_to_array_key(self._store, self._key_prefix) lock = self._synchronizer[mkey] with lock: @@ -2559,7 +2633,7 @@ def view(self, shape=None, chunks=None, dtype=None, if synchronizer is None: synchronizer = self._synchronizer a = Array(store=store, path=path, chunk_store=chunk_store, read_only=read_only, - synchronizer=synchronizer, cache_metadata=True) + synchronizer=synchronizer, cache_metadata=True, zarr_version=self._version) a._is_view = True # allow override of some properties diff --git a/zarr/creation.py b/zarr/creation.py index 9d6902a6e3..b8c40a859b 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -3,6 +3,7 @@ import numpy as np from numcodecs.registry import codec_registry +from zarr._storage.store import DEFAULT_ZARR_VERSION from zarr.core import Array from zarr.errors import ( ArrayNotFoundError, @@ -19,8 +20,8 @@ def create(shape, chunks=True, dtype=None, compressor='default', fill_value=0, order='C', store=None, synchronizer=None, overwrite=False, path=None, chunk_store=None, filters=None, cache_metadata=True, cache_attrs=True, read_only=False, - object_codec=None, dimension_separator=None, - write_empty_chunks=False, **kwargs): + object_codec=None, dimension_separator=None, write_empty_chunks=True, + *, zarr_version=None, **kwargs): """Create an array. Parameters @@ -80,8 +81,13 @@ def create(shape, chunks=True, dtype=None, compressor='default', deleted. This setting enables sparser storage, as only chunks with non-fill-value data are stored, at the expense of overhead associated with checking the data of each chunk. + .. versionadded:: 2.11 + zarr_version : {None, 2, 3}, optional + The zarr protocol version of the created array. If None, it will be + inferred from ``store`` or ``chunk_store`` if they are provided, + otherwise defaulting to 2. Returns ------- @@ -126,9 +132,12 @@ def create(shape, chunks=True, dtype=None, compressor='default', """ + if zarr_version is None and store is None: + zarr_version = getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) # handle polymorphic store arg - store = normalize_store_arg(store) + store = normalize_store_arg(store, zarr_version=zarr_version) + zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) # API compatibility with h5py compressor, fill_value = _kwargs_compat(compressor, fill_value, kwargs) @@ -145,6 +154,9 @@ def create(shape, chunks=True, dtype=None, compressor='default', f"{store_separator}") dimension_separator = normalize_dimension_separator(dimension_separator) + if zarr_version > 2 and path is None: + raise ValueError("path must be supplied to initialize a zarr v3 array") + # initialize array metadata init_array(store, shape=shape, chunks=chunks, dtype=dtype, compressor=compressor, fill_value=fill_value, order=order, overwrite=overwrite, path=path, @@ -392,6 +404,9 @@ def open_array( storage_options=None, partial_decompress=False, write_empty_chunks=False, + *, + zarr_version=None, + dimension_separator=None, **kwargs ): """Open an array using file-mode-like semantics. @@ -454,8 +469,19 @@ def open_array( deleted. This setting enables sparser storage, as only chunks with non-fill-value data are stored, at the expense of overhead associated with checking the data of each chunk. + .. versionadded:: 2.11 + zarr_version : {None, 2, 3}, optional + The zarr protocol version of the array to be opened. If None, it will + be inferred from ``store`` or ``chunk_store`` if they are provided, + otherwise defaulting to 2. + dimension_separator : {None, '.', '/'}, optional + Can be used to specify whether the array is in a flat ('.') or nested + ('/') format. If None, the appropriate value will be read from `store` + when present. Otherwise, defaults to '.' when ``zarr_version == 2`` + and `/` otherwise. + Returns ------- z : zarr.core.Array @@ -489,12 +515,29 @@ def open_array( # w- or x : create, fail if exists # a : read/write if exists, create otherwise (default) + if zarr_version is None and store is None: + zarr_version = getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) + # handle polymorphic store arg - store = normalize_store_arg(store, storage_options=storage_options, mode=mode) + store = normalize_store_arg(store, storage_options=storage_options, + mode=mode, zarr_version=zarr_version) + zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) if chunk_store is not None: chunk_store = normalize_store_arg(chunk_store, storage_options=storage_options, - mode=mode) + mode=mode, + zarr_version=zarr_version) + + # respect the dimension separator specified in a store, if present + if dimension_separator is None: + if hasattr(store, '_dimension_separator'): + dimension_separator = store._dimension_separator + else: + dimension_separator = '.' if zarr_version == 2 else '/' + + if zarr_version == 3 and path is None: + path = 'array' # TODO: raise ValueError instead? + path = normalize_storage_path(path) # API compatibility with h5py @@ -516,7 +559,8 @@ def open_array( init_array(store, shape=shape, chunks=chunks, dtype=dtype, compressor=compressor, fill_value=fill_value, order=order, filters=filters, overwrite=True, path=path, - object_codec=object_codec, chunk_store=chunk_store) + object_codec=object_codec, chunk_store=chunk_store, + dimension_separator=dimension_separator) elif mode == 'a': if not contains_array(store, path=path): @@ -525,7 +569,8 @@ def open_array( init_array(store, shape=shape, chunks=chunks, dtype=dtype, compressor=compressor, fill_value=fill_value, order=order, filters=filters, path=path, - object_codec=object_codec, chunk_store=chunk_store) + object_codec=object_codec, chunk_store=chunk_store, + dimension_separator=dimension_separator) elif mode in ['w-', 'x']: if contains_group(store, path=path): @@ -536,7 +581,8 @@ def open_array( init_array(store, shape=shape, chunks=chunks, dtype=dtype, compressor=compressor, fill_value=fill_value, order=order, filters=filters, path=path, - object_codec=object_codec, chunk_store=chunk_store) + object_codec=object_codec, chunk_store=chunk_store, + dimension_separator=dimension_separator) # determine read only status read_only = mode == 'r' @@ -564,6 +610,7 @@ def _like_args(a, kwargs): kwargs.setdefault('compressor', a.compressor) kwargs.setdefault('order', a.order) kwargs.setdefault('filters', a.filters) + kwargs.setdefault('zarr_version', a._version) else: kwargs.setdefault('compressor', 'default') kwargs.setdefault('order', 'C') diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 31c9e2a8d2..0684be4a57 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -3,11 +3,11 @@ import numpy as np +from zarr._storage.store import _get_metadata_suffix, data_root, meta_root, DEFAULT_ZARR_VERSION from zarr.attrs import Attributes from zarr.core import Array from zarr.creation import (array, create, empty, empty_like, full, full_like, - normalize_store_arg, ones, ones_like, zeros, - zeros_like) + ones, ones_like, zeros, zeros_like) from zarr.errors import ( ContainsArrayError, ContainsGroupError, @@ -15,14 +15,18 @@ ReadOnlyError, ) from zarr.storage import ( + _get_hierarchy_metadata, + _prefix_to_group_key, BaseStore, MemoryStore, + MemoryStoreV3, attrs_key, contains_array, contains_group, group_meta_key, init_group, listdir, + normalize_store_arg, rename, rmdir, ) @@ -109,9 +113,12 @@ class Group(MutableMapping): """ def __init__(self, store, path=None, read_only=False, chunk_store=None, - cache_attrs=True, synchronizer=None): - store: BaseStore = BaseStore._ensure_store(store) - chunk_store: BaseStore = BaseStore._ensure_store(chunk_store) + cache_attrs=True, synchronizer=None, zarr_version=None): + store: BaseStore = _normalize_store_arg(store, zarr_version=zarr_version) + if zarr_version is None: + zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + if chunk_store is not None: + chunk_store: BaseStore = _normalize_store_arg(chunk_store, zarr_version=zarr_version) self._store = store self._chunk_store = chunk_store self._path = normalize_storage_path(path) @@ -121,6 +128,13 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None, self._key_prefix = '' self._read_only = read_only self._synchronizer = synchronizer + self._version = zarr_version + + if self._version == 3: + self._data_key_prefix = data_root + self._key_prefix + self._data_path = data_root + self._path + self._hierarchy_metadata = _get_hierarchy_metadata(store=self._store) + self._metadata_key_suffix = _get_metadata_suffix(store=self._store) # guard conditions if contains_array(store, path=self._path): @@ -128,15 +142,29 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None, # initialize metadata try: - mkey = self._key_prefix + group_meta_key + mkey = _prefix_to_group_key(self._store, self._key_prefix) + assert not mkey.endswith("root/.group") meta_bytes = store[mkey] except KeyError: - raise GroupNotFoundError(path) + if self._version == 2: + raise GroupNotFoundError(path) + else: + implicit_prefix = meta_root + self._key_prefix + if self._store.list_prefix(implicit_prefix): + # implicit group does not have any metadata + self._meta = None + else: + raise GroupNotFoundError(path) else: self._meta = self._store._metadata_class.decode_group_metadata(meta_bytes) # setup attributes - akey = self._key_prefix + attrs_key + if self._version == 2: + akey = self._key_prefix + attrs_key + else: + # Note: mkey doesn't actually exist for implicit groups, but the + # object can still be created. + akey = mkey self._attrs = Attributes(store, key=akey, read_only=read_only, cache=cache_attrs, synchronizer=synchronizer) @@ -227,11 +255,36 @@ def __iter__(self): quux """ - for key in sorted(listdir(self._store, self._path)): - path = self._key_prefix + key - if (contains_array(self._store, path) or - contains_group(self._store, path)): - yield key + if getattr(self._store, '_store_version', 2) == 2: + for key in sorted(listdir(self._store, self._path)): + path = self._key_prefix + key + if (contains_array(self._store, path) or + contains_group(self._store, path)): + yield key + else: + # TODO: Should this iterate over data folders and/or metadata + # folders and/or metadata files + + dir_path = meta_root + self._key_prefix + name_start = len(dir_path) + keys, prefixes = self._store.list_dir(dir_path) + + # yield any groups or arrays + sfx = self._metadata_key_suffix + for key in keys: + len_suffix = len('.group') + len(sfx) # same for .array + if key.endswith(('.group' + sfx, '.array' + sfx)): + yield key[name_start:-len_suffix] + + # also yield any implicit groups + for prefix in prefixes: + prefix = prefix.rstrip('/') + # only implicit if there is no .group.sfx file + if not prefix + '.group' + sfx in self._store: + yield prefix[name_start:] + + # Note: omit data/root/ to avoid duplicate listings + # any group in data/root/ must has an entry in meta/root/ def __len__(self): """Number of members.""" @@ -325,7 +378,7 @@ def __contains__(self, item): """ path = self._item_path(item) return contains_array(self._store, path) or \ - contains_group(self._store, path) + contains_group(self._store, path, explicit_only=False) def __getitem__(self, item): """Obtain a group member. @@ -352,11 +405,21 @@ def __getitem__(self, item): if contains_array(self._store, path): return Array(self._store, read_only=self._read_only, path=path, chunk_store=self._chunk_store, - synchronizer=self._synchronizer, cache_attrs=self.attrs.cache) - elif contains_group(self._store, path): + synchronizer=self._synchronizer, cache_attrs=self.attrs.cache, + zarr_version=self._version) + elif contains_group(self._store, path, explicit_only=True): return Group(self._store, read_only=self._read_only, path=path, chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer) + synchronizer=self._synchronizer, zarr_version=self._version) + elif self._version == 3: + implicit_group = meta_root + path + '/' + # non-empty folder in the metadata path implies an implicit group + if self._store.list_prefix(implicit_group): + return Group(self._store, read_only=self._read_only, path=path, + chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, zarr_version=self._version) + else: + raise KeyError(item) else: raise KeyError(item) @@ -369,7 +432,7 @@ def __delitem__(self, item): def _delitem_nosync(self, item): path = self._item_path(item) if contains_array(self._store, path) or \ - contains_group(self._store, path): + contains_group(self._store, path, explicit_only=False): rmdir(self._store, path) else: raise KeyError(item) @@ -406,10 +469,23 @@ def group_keys(self): ['bar', 'foo'] """ - for key in sorted(listdir(self._store, self._path)): - path = self._key_prefix + key - if contains_group(self._store, path): - yield key + if self._version == 2: + for key in sorted(listdir(self._store, self._path)): + path = self._key_prefix + key + if contains_group(self._store, path): + yield key + else: + dir_name = meta_root + self._path + group_sfx = '.group' + self._metadata_key_suffix + for key in sorted(listdir(self._store, dir_name)): + if key.endswith(group_sfx): + key = key[:-len(group_sfx)] + path = self._key_prefix + key + if path.endswith(".array" + self._metadata_key_suffix): + # skip array keys + continue + if contains_group(self._store, path, explicit_only=False): + yield key def groups(self): """Return an iterator over (name, value) pairs for groups only. @@ -428,13 +504,38 @@ def groups(self): foo """ - for key in sorted(listdir(self._store, self._path)): - path = self._key_prefix + key - if contains_group(self._store, path): - yield key, Group(self._store, path=path, read_only=self._read_only, - chunk_store=self._chunk_store, - cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer) + if self._version == 2: + for key in sorted(listdir(self._store, self._path)): + path = self._key_prefix + key + if contains_group(self._store, path, explicit_only=False): + yield key, Group( + self._store, + path=path, + read_only=self._read_only, + chunk_store=self._chunk_store, + cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, + zarr_version=self._version) + + else: + dir_name = meta_root + self._path + group_sfx = '.group' + self._metadata_key_suffix + for key in sorted(listdir(self._store, dir_name)): + if key.endswith(group_sfx): + key = key[:-len(group_sfx)] + path = self._key_prefix + key + if path.endswith(".array" + self._metadata_key_suffix): + # skip array keys + continue + if contains_group(self._store, path, explicit_only=False): + yield key, Group( + self._store, + path=path, + read_only=self._read_only, + chunk_store=self._chunk_store, + cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, + zarr_version=self._version) def array_keys(self, recurse=False): """Return an iterator over member names for arrays only. @@ -491,14 +592,35 @@ def arrays(self, recurse=False): recurse=recurse) def _array_iter(self, keys_only, method, recurse): - for key in sorted(listdir(self._store, self._path)): - path = self._key_prefix + key - if contains_array(self._store, path): - yield key if keys_only else (key, self[key]) - elif recurse and contains_group(self._store, path): - group = self[key] - for i in getattr(group, method)(recurse=recurse): - yield i + if self._version == 2: + for key in sorted(listdir(self._store, self._path)): + path = self._key_prefix + key + assert not path.startswith("meta") + if contains_array(self._store, path): + _key = key.rstrip("/") + yield _key if keys_only else (_key, self[key]) + elif recurse and contains_group(self._store, path): + group = self[key] + for i in getattr(group, method)(recurse=recurse): + yield i + else: + dir_name = meta_root + self._path + array_sfx = '.array' + self._metadata_key_suffix + for key in sorted(listdir(self._store, dir_name)): + if key.endswith(array_sfx): + key = key[:-len(array_sfx)] + path = self._key_prefix + key + assert not path.startswith("meta") + if key.endswith('.group' + self._metadata_key_suffix): + # skip group metadata keys + continue + if contains_array(self._store, path): + _key = key.rstrip("/") + yield _key if keys_only else (_key, self[key]) + elif recurse and contains_group(self._store, path): + group = self[key] + for i in getattr(group, method)(recurse=recurse): + yield i def visitvalues(self, func): """Run ``func`` on each object. @@ -707,7 +829,7 @@ def _create_group_nosync(self, name, overwrite=False): return Group(self._store, path=path, read_only=self._read_only, chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer) + synchronizer=self._synchronizer, zarr_version=self._version) def create_groups(self, *names, **kwargs): """Convenience method to create multiple groups in a single call.""" @@ -751,7 +873,7 @@ def _require_group_nosync(self, name, overwrite=False): return Group(self._store, path=path, read_only=self._read_only, chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer) + synchronizer=self._synchronizer, zarr_version=self._version) def require_groups(self, *names): """Convenience method to require multiple groups in a single call.""" @@ -1039,9 +1161,10 @@ def move(self, source, dest): # Check that source exists. if not (contains_array(self._store, source) or - contains_group(self._store, source)): + contains_group(self._store, source, explicit_only=False)): raise ValueError('The source, "%s", does not exist.' % source) - if contains_array(self._store, dest) or contains_group(self._store, dest): + if (contains_array(self._store, dest) or + contains_group(self._store, dest, explicit_only=False)): raise ValueError('The dest, "%s", already exists.' % dest) # Ensure groups needed for `dest` exist. @@ -1051,15 +1174,19 @@ def move(self, source, dest): self._write_op(self._move_nosync, source, dest) -def _normalize_store_arg(store, *, storage_options=None, mode="r"): +def _normalize_store_arg(store, *, storage_options=None, mode="r", + zarr_version=None): + if zarr_version is None: + zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) if store is None: - return MemoryStore() + return MemoryStore() if zarr_version == 2 else MemoryStoreV3() return normalize_store_arg(store, - storage_options=storage_options, mode=mode) + storage_options=storage_options, mode=mode, + zarr_version=zarr_version) def group(store=None, overwrite=False, chunk_store=None, - cache_attrs=True, synchronizer=None, path=None): + cache_attrs=True, synchronizer=None, path=None, *, zarr_version=None): """Create a group. Parameters @@ -1104,20 +1231,29 @@ def group(store=None, overwrite=False, chunk_store=None, """ # handle polymorphic store arg - store = _normalize_store_arg(store) + store = _normalize_store_arg(store, zarr_version=zarr_version) + if zarr_version is None: + zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + if zarr_version == 3 and path is None: + raise ValueError(f"path must be provided for a v{zarr_version} group") path = normalize_storage_path(path) - # require group - if overwrite or not contains_group(store): + if zarr_version == 2: + requires_init = overwrite or not contains_group(store) + elif zarr_version == 3: + requires_init = overwrite or not contains_group(store, path) + + if requires_init: init_group(store, overwrite=overwrite, chunk_store=chunk_store, path=path) return Group(store, read_only=False, chunk_store=chunk_store, - cache_attrs=cache_attrs, synchronizer=synchronizer, path=path) + cache_attrs=cache_attrs, synchronizer=synchronizer, path=path, + zarr_version=zarr_version) def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=None, - chunk_store=None, storage_options=None): + chunk_store=None, storage_options=None, *, zarr_version=None): """Open a group using file-mode-like semantics. Parameters @@ -1165,12 +1301,23 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N # handle polymorphic store arg store = _normalize_store_arg( - store, storage_options=storage_options, mode=mode - ) + store, storage_options=storage_options, mode=mode, + zarr_version=zarr_version) + if zarr_version is None: + zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) if chunk_store is not None: chunk_store = _normalize_store_arg(chunk_store, storage_options=storage_options, mode=mode) + if not getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) == zarr_version: + raise ValueError( + "zarr_version of store and chunk_store must match" + ) + + store_version = getattr(store, '_store_version', 2) + if store_version == 3 and path is None: + raise ValueError("path must be supplied to initialize a zarr v3 group") + path = normalize_storage_path(path) # ensure store is initialized @@ -1202,4 +1349,5 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N read_only = mode == 'r' return Group(store, read_only=read_only, cache_attrs=cache_attrs, - synchronizer=synchronizer, path=path, chunk_store=chunk_store) + synchronizer=synchronizer, path=path, chunk_store=chunk_store, + zarr_version=zarr_version) diff --git a/zarr/meta.py b/zarr/meta.py index c292b09a14..bb4bae4199 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -1,14 +1,87 @@ import base64 +import itertools from collections.abc import Mapping +import numcodecs import numpy as np +from numcodecs.abc import Codec from zarr.errors import MetadataError from zarr.util import json_dumps, json_loads -from typing import cast, Union, Any, List, Mapping as MappingType +from typing import cast, Union, Any, List, Mapping as MappingType, Optional ZARR_FORMAT = 2 +ZARR_FORMAT_v3 = 3 + +# FLOAT_FILLS = {"NaN": np.nan, "Infinity": np.PINF, "-Infinity": np.NINF} + +_default_entry_point_metadata_v3 = { + "zarr_format": "https://purl.org/zarr/spec/protocol/core/3.0", + "metadata_encoding": "https://purl.org/zarr/spec/protocol/core/3.0", + "metadata_key_suffix": ".json", + "extensions": [], +} + +_v3_core_types = set( + "".join(d) for d in itertools.product("<>", ("u", "i", "f"), ("2", "4", "8")) +) +_v3_core_types = {"bool", "i1", "u1"} | _v3_core_types + +# The set of complex types allowed ({"c8", ">c16"}) +_v3_complex_types = set( + f"{end}c{_bytes}" for end, _bytes in itertools.product("<>", ("8", "16")) +) + +# All dtype.str values corresponding to datetime64 and timedelta64 +# see: https://numpy.org/doc/stable/reference/arrays.datetime.html#datetime-units +_date_units = ["Y", "M", "W", "D"] +_time_units = ["h", "m", "s", "ms", "us", "μs", "ns", "ps", "fs", "as"] +_v3_datetime_types = set( + f"{end}{kind}8[{unit}]" + for end, unit, kind in itertools.product("<>", _date_units + _time_units, ('m', 'M')) +) + + +def get_extended_dtype_info(dtype) -> dict: + if dtype.str in _v3_complex_types: + return dict( + extension="https://zarr-specs.readthedocs.io/en/core-protocol-v3.0-dev/protocol/extensions/complex-dtypes/v1.0.html", # noqa + type=dtype.str, + fallback=None, + ) + elif dtype.str == "|O": + return dict( + extension="TODO: object array protocol URL", # noqa + type=dtype.str, + fallback=None, + ) + elif dtype.str.startswith("|S"): + return dict( + extension="TODO: bytestring array protocol URL", # noqa + type=dtype.str, + fallback=None, + ) + elif dtype.str.startswith("U"): + return dict( + extension="TODO: unicode array protocol URL", # noqa + type=dtype.str, + fallback=None, + ) + elif dtype.str.startswith("|V"): + return dict( + extension="TODO: structured array protocol URL", # noqa + type=dtype.descr, + fallback=None, + ) + elif dtype.str in _v3_datetime_types: + return dict( + extension="https://zarr-specs.readthedocs.io/en/core-protocol-v3.0-dev/protocol/extensions/datetime-dtypes/v1.0.html", # noqa + type=dtype.str, + fallback=None, + ) + else: + raise ValueError(f"Unsupport dtype: {dtype}") class Metadata2: @@ -46,12 +119,13 @@ def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, A dtype = cls.decode_dtype(meta["dtype"]) if dtype.hasobject: import numcodecs - object_codec = numcodecs.get_codec(meta['filters'][0]) + + object_codec = numcodecs.get_codec(meta["filters"][0]) else: object_codec = None dimension_separator = meta.get("dimension_separator", None) - fill_value = cls.decode_fill_value(meta['fill_value'], dtype, object_codec) + fill_value = cls.decode_fill_value(meta["fill_value"], dtype, object_codec) meta = dict( zarr_format=meta["zarr_format"], shape=tuple(meta["shape"]), @@ -63,7 +137,7 @@ def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, A filters=meta["filters"], ) if dimension_separator: - meta['dimension_separator'] = dimension_separator + meta["dimension_separator"] = dimension_separator except Exception as e: raise MetadataError("error decoding metadata") from e else: @@ -79,7 +153,8 @@ def encode_array_metadata(cls, meta: MappingType[str, Any]) -> bytes: dimension_separator = meta.get("dimension_separator") if dtype.hasobject: import numcodecs - object_codec = numcodecs.get_codec(meta['filters'][0]) + + object_codec = numcodecs.get_codec(meta["filters"][0]) else: object_codec = None @@ -93,9 +168,6 @@ def encode_array_metadata(cls, meta: MappingType[str, Any]) -> bytes: order=meta["order"], filters=meta["filters"], ) - if dimension_separator: - meta['dimension_separator'] = dimension_separator - if dimension_separator: meta["dimension_separator"] = dimension_separator @@ -141,13 +213,15 @@ def encode_group_metadata(cls, meta=None) -> bytes: return json_dumps(meta) @classmethod - def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> Any: + def decode_fill_value( + cls, v: Any, dtype: np.dtype, object_codec: Any = None + ) -> Any: # early out if v is None: return v - if dtype.kind == 'V' and dtype.hasobject: + if dtype.kind == "V" and dtype.hasobject: if object_codec is None: - raise ValueError('missing object_codec for object array') + raise ValueError("missing object_codec for object array") v = base64.standard_b64decode(v) v = object_codec.decode(v) v = np.array(v, dtype=dtype)[()] @@ -189,15 +263,17 @@ def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return np.array(v, dtype=dtype)[()] @classmethod - def encode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> Any: + def encode_fill_value( + cls, v: Any, dtype: np.dtype, object_codec: Any = None + ) -> Any: # early out if v is None: return v - if dtype.kind == 'V' and dtype.hasobject: + if dtype.kind == "V" and dtype.hasobject: if object_codec is None: - raise ValueError('missing object_codec for object array') + raise ValueError("missing object_codec for object array") v = object_codec.encode(v) - v = str(base64.standard_b64encode(v), 'ascii') + v = str(base64.standard_b64encode(v), "ascii") return v if dtype.kind == "f": if np.isnan(v): @@ -214,8 +290,10 @@ def encode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return bool(v) elif dtype.kind in "c": c = cast(np.complex128, np.dtype(complex).type()) - v = (cls.encode_fill_value(v.real, c.real.dtype, object_codec), - cls.encode_fill_value(v.imag, c.imag.dtype, object_codec)) + v = ( + cls.encode_fill_value(v.real, c.real.dtype, object_codec), + cls.encode_fill_value(v.imag, c.imag.dtype, object_codec), + ) return v elif dtype.kind in "SV": v = str(base64.standard_b64encode(v), "ascii") @@ -228,7 +306,235 @@ def encode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return v -# expose class methods for backwards compatibility +class Metadata3(Metadata2): + ZARR_FORMAT = ZARR_FORMAT_v3 + + @classmethod + def decode_dtype(cls, d, validate=True): + if isinstance(d, dict): + # extract the type from the extension info + try: + d = d['type'] + except KeyError: + raise KeyError( + "Extended dtype info must provide a key named 'type'." + ) + d = cls._decode_dtype_descr(d) + dtype = np.dtype(d) + if validate: + if dtype.str in (_v3_core_types | {"|b1", "|u1", "|i1"}): + # it is a core dtype of the v3 spec + pass + else: + # will raise if this is not a recognized extended dtype + get_extended_dtype_info(dtype) + return dtype + + @classmethod + def encode_dtype(cls, d): + s = d.str + if s == "|b1": + return "bool" + elif s == "|u1": + return "u1" + elif s == "|i1": + return "i1" + elif s in _v3_core_types: + return Metadata2.encode_dtype(d) + else: + # Check if this dtype corresponds to a supported extension to + # the v3 protocol. + return get_extended_dtype_info(np.dtype(d)) + + @classmethod + def decode_group_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + meta = cls.parse_metadata(s) + # 1 / 0 + # # check metadata format version + # zarr_format = meta.get("zarr_format", None) + # if zarr_format != cls.ZARR_FORMAT: + # raise MetadataError("unsupported zarr format: %s" % zarr_format) + + assert "attributes" in meta + # meta = dict(attributes=meta['attributes']) + return meta + + # return json.loads(s) + + @classmethod + def encode_group_metadata(cls, meta=None) -> bytes: + # The ZARR_FORMAT should not be in the group metadata, but in the + # entry point metadata instead + # meta = dict(zarr_format=cls.ZARR_FORMAT) + if meta is None: + meta = {"attributes": {}} + meta = dict(attributes=meta.get("attributes", {})) + return json_dumps(meta) + + @classmethod + def encode_hierarchy_metadata(cls, meta=None) -> bytes: + if meta is None: + meta = _default_entry_point_metadata_v3 + elif set(meta.keys()) != { + "zarr_format", + "metadata_encoding", + "metadata_key_suffix", + "extensions", + }: + raise ValueError(f"Unexpected keys in metadata. meta={meta}") + return json_dumps(meta) + + @classmethod + def decode_hierarchy_metadata( + cls, s: Union[MappingType, str] + ) -> MappingType[str, Any]: + meta = cls.parse_metadata(s) + # check metadata format + # zarr_format = meta.get("zarr_format", None) + # if zarr_format != "https://purl.org/zarr/spec/protocol/core/3.0": + # raise MetadataError("unsupported zarr format: %s" % zarr_format) + if set(meta.keys()) != { + "zarr_format", + "metadata_encoding", + "metadata_key_suffix", + "extensions", + }: + raise ValueError(f"Unexpected keys in metdata. meta={meta}") + return meta + + @classmethod + def _encode_codec_metadata(cls, codec: Codec) -> Optional[Mapping]: + if codec is None: + return None + + # only support gzip for now + config = codec.get_config() + del config["id"] + uri = 'https://purl.org/zarr/spec/codec/' + if isinstance(codec, numcodecs.GZip): + uri = uri + "gzip/1.0" + elif isinstance(codec, numcodecs.Zlib): + uri = uri + "zlib/1.0" + elif isinstance(codec, numcodecs.Blosc): + uri = uri + "blosc/1.0" + elif isinstance(codec, numcodecs.BZ2): + uri = uri + "bz2/1.0" + elif isinstance(codec, numcodecs.LZ4): + uri = uri + "lz4/1.0" + elif isinstance(codec, numcodecs.LZMA): + uri = uri + "lzma/1.0" + meta = { + "codec": uri, + "configuration": config, + } + return meta + + @classmethod + def _decode_codec_metadata(cls, meta: Optional[Mapping]) -> Optional[Codec]: + if meta is None: + return None + + uri = 'https://purl.org/zarr/spec/codec/' + conf = meta['configuration'] + if meta['codec'].startswith(uri + 'gzip/'): + codec = numcodecs.GZip(level=conf['level']) + elif meta['codec'].startswith(uri + 'zlib/'): + codec = numcodecs.Zlib(level=conf['level']) + elif meta['codec'].startswith(uri + 'blosc/'): + codec = numcodecs.Blosc(clevel=conf['clevel'], + shuffle=conf['shuffle'], + blocksize=conf['blocksize'], + cname=conf['cname']) + elif meta['codec'].startswith(uri + 'bz2/'): + codec = numcodecs.BZ2(level=conf['level']) + elif meta['codec'].startswith(uri + 'lz4/'): + codec = numcodecs.LZ4(acceleration=conf['acceleration']) + elif meta['codec'].startswith(uri + 'lzma/'): + codec = numcodecs.LZMA(format=conf['format'], + check=conf['check'], + preset=conf['preset'], + filters=conf['filters']) + else: + raise NotImplementedError + + return codec + + @classmethod + def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + meta = cls.parse_metadata(s) + + # extract array metadata fields + try: + dtype = cls.decode_dtype(meta["data_type"]) + if dtype.hasobject: + import numcodecs + + object_codec = numcodecs.get_codec(meta["attributes"]["filters"][0]) + else: + object_codec = None + fill_value = cls.decode_fill_value(meta["fill_value"], dtype, object_codec) + # TODO: remove dimension_separator? + + compressor = cls._decode_codec_metadata(meta.get("compressor", None)) + extensions = meta.get("extensions", []) + meta = dict( + shape=tuple(meta["shape"]), + chunk_grid=dict( + type=meta["chunk_grid"]["type"], + chunk_shape=tuple(meta["chunk_grid"]["chunk_shape"]), + separator=meta["chunk_grid"]["separator"], + ), + data_type=dtype, + fill_value=fill_value, + chunk_memory_layout=meta["chunk_memory_layout"], + attributes=meta["attributes"], + extensions=extensions, + ) + # compressor field should be absent when there is no compression + if compressor: + meta['compressor'] = compressor + + except Exception as e: + raise MetadataError("error decoding metadata: %s" % e) + else: + return meta + + @classmethod + def encode_array_metadata(cls, meta: MappingType[str, Any]) -> bytes: + dtype = meta["data_type"] + sdshape = () + if dtype.subdtype is not None: + dtype, sdshape = dtype.subdtype + dimension_separator = meta.get("dimension_separator") + if dtype.hasobject: + import numcodecs + + object_codec = numcodecs.get_codec(meta["attributes"]["filters"][0]) + else: + object_codec = None + + compressor = cls._encode_codec_metadata(meta.get("compressor", None)) + extensions = meta.get("extensions", []) + meta = dict( + shape=meta["shape"] + sdshape, + chunk_grid=dict( + type=meta["chunk_grid"]["type"], + chunk_shape=tuple(meta["chunk_grid"]["chunk_shape"]), + separator=meta["chunk_grid"]["separator"], + ), + data_type=cls.encode_dtype(dtype), + fill_value=encode_fill_value(meta["fill_value"], dtype, object_codec), + chunk_memory_layout=meta["chunk_memory_layout"], + attributes=meta.get("attributes", {}), + extensions=extensions, + ) + if compressor: + meta["compressor"] = compressor + if dimension_separator: + meta["dimension_separator"] = dimension_separator + return json_dumps(meta) + + parse_metadata = Metadata2.parse_metadata decode_array_metadata = Metadata2.decode_array_metadata encode_array_metadata = Metadata2.encode_array_metadata diff --git a/zarr/storage.py b/zarr/storage.py index 35e1fdb0a2..709bbba7ee 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -35,6 +35,7 @@ import uuid import time +from numcodecs.abc import Codec from numcodecs.compat import ( ensure_bytes, ensure_text, @@ -42,6 +43,7 @@ ) from numcodecs.registry import codec_registry +from zarr._storage.store import DEFAULT_ZARR_VERSION from zarr.errors import ( MetadataError, BadCompressorError, @@ -56,16 +58,25 @@ normalize_dtype, normalize_fill_value, normalize_order, normalize_shape, normalize_storage_path, retry_call) -from zarr._storage.absstore import ABSStore # noqa: F401 -from zarr._storage.store import (_listdir_from_keys, - _path_to_prefix, +from zarr._storage.absstore import ABSStore, ABSStoreV3 # noqa: F401 +from zarr._storage.store import (_get_hierarchy_metadata, # noqa: F401 + _get_metadata_suffix, + _listdir_from_keys, _rename_from_keys, + _rename_metadata_v3, _rmdir_from_keys, + _rmdir_from_keys_v3, + _path_to_prefix, + _prefix_to_array_key, + _prefix_to_group_key, array_meta_key, - group_meta_key, attrs_key, + data_root, + group_meta_key, + meta_root, BaseStore, - Store) + Store, + StoreV3) __doctest_requires__ = { ('RedisStore', 'RedisStore.*'): ['redis'], @@ -92,39 +103,91 @@ def contains_array(store: StoreLike, path: Path = None) -> bool: """Return True if the store contains an array at the given logical path.""" path = normalize_storage_path(path) prefix = _path_to_prefix(path) - key = prefix + array_meta_key + key = _prefix_to_array_key(store, prefix) return key in store -def contains_group(store: StoreLike, path: Path = None) -> bool: +def contains_group(store: StoreLike, path: Path = None, explicit_only=True) -> bool: """Return True if the store contains a group at the given logical path.""" path = normalize_storage_path(path) prefix = _path_to_prefix(path) - key = prefix + group_meta_key - return key in store + key = _prefix_to_group_key(store, prefix) + store_version = getattr(store, '_store_version', 2) + if store_version == 2 or explicit_only: + return key in store + else: + if key in store: + return True + # for v3, need to also handle implicit groups + sfx = _get_metadata_suffix(store) # type: ignore + implicit_prefix = key.replace('.group' + sfx, '') + if not implicit_prefix.endswith('/'): + implicit_prefix += '/' + if store.list_prefix(implicit_prefix): # type: ignore + return True + return False -def normalize_store_arg(store: Any, storage_options=None, mode="r") -> BaseStore: + +def normalize_store_arg(store: Any, storage_options=None, mode="r", *, + zarr_version=None) -> BaseStore: + if zarr_version is None: + # default to v2 store for backward compatibility + zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + if zarr_version not in [2, 3]: + raise ValueError("zarr_version must be 2 or 3") if store is None: - return BaseStore._ensure_store(dict()) - elif isinstance(store, os.PathLike): - store = os.fspath(store) - if isinstance(store, str): - if "://" in store or "::" in store: - return FSStore(store, mode=mode, **(storage_options or {})) - elif storage_options: - raise ValueError("storage_options passed with non-fsspec path") - if store.endswith('.zip'): - return ZipStore(store, mode=mode) - elif store.endswith('.n5'): - from zarr.n5 import N5Store - return N5Store(store) + if zarr_version == 2: + store = KVStore(dict()) else: - return DirectoryStore(store) - else: - if not isinstance(store, BaseStore) and isinstance(store, MutableMapping): - store = BaseStore._ensure_store(store) + store = KVStoreV3(dict()) + # add default zarr.json metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) return store + elif hasattr(store, '_store_version') and store._store_version != zarr_version: + raise ValueError( + f"store is a zarr v{store._store_version} store which conflicts " + f"with the specified zarr_version ({zarr_version})." + ) + + if isinstance(store, os.PathLike): + store = os.fspath(store) + if isinstance(store, str): + if zarr_version == 2: + if "://" in store or "::" in store: + return FSStore(store, mode=mode, **(storage_options or {})) + elif storage_options: + raise ValueError("storage_options passed with non-fsspec path") + if store.endswith('.zip'): + return ZipStore(store, mode=mode) + elif store.endswith('.n5'): + from zarr.n5 import N5Store + return N5Store(store) + else: + return DirectoryStore(store) + elif zarr_version == 3: + if "://" in store or "::" in store: + store = FSStoreV3(store, mode=mode, **(storage_options or {})) + elif storage_options: + raise ValueError("storage_options passed with non-fsspec path") + elif store.endswith('.zip'): + store = ZipStoreV3(store, mode=mode) + elif store.endswith('.n5'): + raise NotImplementedError("N5Store not yet implemented for V3") + # return N5StoreV3(store) + else: + store = DirectoryStoreV3(store) + # add default zarr.json metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + return store + elif zarr_version == 2: + store = Store._ensure_store(store) + elif zarr_version == 3: + store = StoreV3._ensure_store(store) + if 'zarr.json' not in store: + # add default zarr.json metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + return store def rmdir(store: StoreLike, path: Path = None): @@ -132,15 +195,19 @@ def rmdir(store: StoreLike, path: Path = None): this will be called, otherwise will fall back to implementation via the `Store` interface.""" path = normalize_storage_path(path) + store_version = getattr(store, '_store_version', 2) if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore # pass through store.rmdir(path) # type: ignore else: # slow version, delete one key at a time - _rmdir_from_keys(store, path) + if store_version == 2: + _rmdir_from_keys(store, path) + else: + _rmdir_from_keys_v3(store, path) # type: ignore -def rename(store: BaseStore, src_path: Path, dst_path: Path): +def rename(store: Store, src_path: Path, dst_path: Path): """Rename all items under the given path. If `store` provides a `rename` method, this will be called, otherwise will fall back to implementation via the `Store` interface.""" @@ -172,33 +239,45 @@ def listdir(store: BaseStore, path: Path = None): return _listdir_from_keys(store, path) +def _getsize(store: BaseStore, path: Path = None) -> int: + # compute from size of values + if path and path in store: + v = store[path] + size = buffer_size(v) + else: + path = '' if path is None else normalize_storage_path(path) + size = 0 + store_version = getattr(store, '_store_version', 2) + if store_version == 3: + members = store.list_prefix(data_root + path) # type: ignore + members += store.list_prefix(meta_root + path) # type: ignore + # members += ['zarr.json'] + else: + members = listdir(store, path) + prefix = _path_to_prefix(path) + members = [prefix + k for k in members] + for k in members: + try: + v = store[k] + except KeyError: + pass + else: + try: + size += buffer_size(v) + except TypeError: + return -1 + return size + + def getsize(store: BaseStore, path: Path = None) -> int: """Compute size of stored items for a given path. If `store` provides a `getsize` method, this will be called, otherwise will return -1.""" - path = normalize_storage_path(path) if hasattr(store, 'getsize'): # pass through + path = normalize_storage_path(path) return store.getsize(path) # type: ignore elif isinstance(store, MutableMapping): - # compute from size of values - if path in store: - v = store[path] - size = buffer_size(v) - else: - members = listdir(store, path) - prefix = _path_to_prefix(path) - size = 0 - for k in members: - try: - v = store[prefix + k] - except KeyError: - pass - else: - try: - size += buffer_size(v) - except TypeError: - return -1 - return size + return _getsize(store, path) else: return -1 @@ -345,8 +424,18 @@ def init_array( path = normalize_storage_path(path) # ensure parent group initialized - _require_parent_group(path, store=store, chunk_store=chunk_store, overwrite=overwrite) + store_version = getattr(store, "_store_version", 2) + if store_version < 3: + _require_parent_group(path, store=store, chunk_store=chunk_store, + overwrite=overwrite) + + if store_version == 3 and 'zarr.json' not in store: + # initialize with default zarr.json entry level metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore + if not compressor: + # compatibility with legacy tests using compressor=[] + compressor = None _init_array_metadata(store, shape=shape, chunks=chunks, dtype=dtype, compressor=compressor, fill_value=fill_value, order=order, overwrite=overwrite, path=path, @@ -371,16 +460,50 @@ def _init_array_metadata( dimension_separator=None, ): + store_version = getattr(store, '_store_version', 2) + + path = normalize_storage_path(path) + # guard conditions if overwrite: - # attempt to delete any pre-existing items in store - rmdir(store, path) - if chunk_store is not None: - rmdir(chunk_store, path) - elif contains_array(store, path): - raise ContainsArrayError(path) - elif contains_group(store, path): - raise ContainsGroupError(path) + if store_version == 2: + # attempt to delete any pre-existing array in store + rmdir(store, path) + if chunk_store is not None: + rmdir(chunk_store, path) + else: + group_meta_key = _prefix_to_group_key(store, _path_to_prefix(path)) + array_meta_key = _prefix_to_array_key(store, _path_to_prefix(path)) + data_prefix = data_root + _path_to_prefix(path) + + # attempt to delete any pre-existing array in store + if array_meta_key in store: + store.erase(array_meta_key) # type: ignore + if group_meta_key in store: + store.erase(group_meta_key) # type: ignore + store.erase_prefix(data_prefix) # type: ignore + if chunk_store is not None: + chunk_store.erase_prefix(data_prefix) # type: ignore + + if '/' in path: + # path is a subfolder of an existing array, remove that array + parent_path = '/'.join(path.split('/')[:-1]) + sfx = _get_metadata_suffix(store) # type: ignore + array_key = meta_root + parent_path + '.array' + sfx + if array_key in store: + store.erase(array_key) # type: ignore + + if not overwrite: + if contains_array(store, path): + raise ContainsArrayError(path) + elif contains_group(store, path, explicit_only=False): + raise ContainsGroupError(path) + elif store_version == 3: + if '/' in path: + # cannot create an array within an existing array path + parent_path = '/'.join(path.split('/')[:-1]) + if contains_array(store, parent_path): + raise ContainsArrayError(path) # normalize metadata dtype, object_codec = normalize_dtype(dtype, object_codec) @@ -391,7 +514,7 @@ def _init_array_metadata( fill_value = normalize_fill_value(fill_value, dtype) # optional array metadata - if dimension_separator is None: + if dimension_separator is None and store_version == 2: dimension_separator = getattr(store, "_dimension_separator", None) dimension_separator = normalize_dimension_separator(dimension_separator) @@ -408,13 +531,21 @@ def _init_array_metadata( # obtain compressor config compressor_config = None if compressor: - try: - compressor_config = compressor.get_config() - except AttributeError as e: - raise BadCompressorError(compressor) from e + if store_version == 2: + try: + compressor_config = compressor.get_config() + except AttributeError as e: + raise BadCompressorError(compressor) from e + elif not isinstance(compressor, Codec): + raise ValueError("expected a numcodecs Codec for compressor") + # TODO: alternatively, could autoconvert str to a Codec + # e.g. 'zlib' -> numcodec.Zlib object + # compressor = numcodecs.get_codec({'id': compressor}) # obtain filters config if filters: + # TODO: filters was removed from the metadata in v3 + # raise error here if store_version > 2? filters_config = [f.get_config() for f in filters] else: filters_config = [] @@ -440,11 +571,31 @@ def _init_array_metadata( filters_config = None # type: ignore # initialize metadata - meta = dict(shape=shape, chunks=chunks, dtype=dtype, - compressor=compressor_config, fill_value=fill_value, - order=order, filters=filters_config, + # TODO: don't store redundant dimension_separator for v3? + _compressor = compressor_config if store_version == 2 else compressor + meta = dict(shape=shape, compressor=_compressor, + fill_value=fill_value, dimension_separator=dimension_separator) - key = _path_to_prefix(path) + array_meta_key + if store_version < 3: + meta.update(dict(chunks=chunks, dtype=dtype, order=order, + filters=filters_config)) + else: + if dimension_separator is None: + dimension_separator = "/" + if filters_config: + attributes = {'filters': filters_config} + else: + attributes = {} + meta.update( + dict(chunk_grid=dict(type="regular", + chunk_shape=chunks, + separator=dimension_separator), + chunk_memory_layout=order, + data_type=dtype, + attributes=attributes) + ) + + key = _prefix_to_array_key(store, _path_to_prefix(path)) if hasattr(store, '_metadata_class'): store[key] = store._metadata_class.encode_array_metadata(meta) # type: ignore else: @@ -481,14 +632,26 @@ def init_group( # normalize path path = normalize_storage_path(path) - # ensure parent group initialized - _require_parent_group(path, store=store, chunk_store=chunk_store, - overwrite=overwrite) + store_version = getattr(store, '_store_version', 2) + if store_version < 3: + # ensure parent group initialized + _require_parent_group(path, store=store, chunk_store=chunk_store, + overwrite=overwrite) + + if store_version == 3 and 'zarr.json' not in store: + # initialize with default zarr.json entry level metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore # initialise metadata _init_group_metadata(store=store, overwrite=overwrite, path=path, chunk_store=chunk_store) + if store_version == 3: + # TODO: Should initializing a v3 group also create a corresponding + # empty folder under data/root/? I think probably not until there + # is actual data written there. + pass + def _init_group_metadata( store: StoreLike, @@ -497,22 +660,51 @@ def _init_group_metadata( chunk_store: StoreLike = None, ): + store_version = getattr(store, '_store_version', 2) + path = normalize_storage_path(path) + # guard conditions if overwrite: - # attempt to delete any pre-existing items in store - rmdir(store, path) - if chunk_store is not None: - rmdir(chunk_store, path) - elif contains_array(store, path): - raise ContainsArrayError(path) - elif contains_group(store, path): - raise ContainsGroupError(path) + if store_version == 2: + # attempt to delete any pre-existing items in store + rmdir(store, path) + if chunk_store is not None: + rmdir(chunk_store, path) + else: + group_meta_key = _prefix_to_group_key(store, _path_to_prefix(path)) + array_meta_key = _prefix_to_array_key(store, _path_to_prefix(path)) + data_prefix = data_root + _path_to_prefix(path) + meta_prefix = meta_root + _path_to_prefix(path) + + # attempt to delete any pre-existing array in store + if array_meta_key in store: + store.erase(array_meta_key) # type: ignore + if group_meta_key in store: + store.erase(group_meta_key) # type: ignore + store.erase_prefix(data_prefix) # type: ignore + store.erase_prefix(meta_prefix) # type: ignore + if chunk_store is not None: + chunk_store.erase_prefix(data_prefix) # type: ignore + + if not overwrite: + if contains_array(store, path): + raise ContainsArrayError(path) + elif contains_group(store, path): + raise ContainsGroupError(path) + elif store_version == 3 and '/' in path: + # cannot create a group overlapping with an existing array name + parent_path = '/'.join(path.split('/')[:-1]) + if contains_array(store, parent_path): + raise ContainsArrayError(path) # initialize metadata # N.B., currently no metadata properties are needed, however there may # be in future - meta = dict() # type: ignore - key = _path_to_prefix(path) + group_meta_key + if store_version == 3: + meta = {'attributes': {}} # type: ignore + else: + meta = {} # type: ignore + key = _prefix_to_group_key(store, _path_to_prefix(path)) if hasattr(store, '_metadata_class'): store[key] = store._metadata_class.encode_group_metadata(meta) # type: ignore else: @@ -1132,14 +1324,17 @@ def __init__(self, url, normalize_keys=False, key_separator=None, dimension_separator = key_separator self.key_separator = dimension_separator - if self.key_separator is None: - self.key_separator = "." + self._default_key_separator() # Pass attributes to array creation self._dimension_separator = dimension_separator if self.fs.exists(self.path) and not self.fs.isdir(self.path): raise FSPathExistNotDir(url) + def _default_key_separator(self): + if self.key_separator is None: + self.key_separator = "." + def _normalize_key(self, key): key = normalize_storage_path(key).lstrip('/') if key: @@ -1886,6 +2081,10 @@ def __contains__(self, key): key = key.encode("ascii") return key in self.db + def rmdir(self, path: str = "") -> None: + path = normalize_storage_path(path) + _rmdir_from_keys(self, path) + class LMDBStore(Store): """Storage class using LMDB. Requires the `lmdb `_ @@ -2641,7 +2840,7 @@ def __init__(self, store: StoreLike, metadata_key=".zmetadata"): self.store = Store._ensure_store(store) # retrieve consolidated metadata - meta = json_loads(store[metadata_key]) + meta = json_loads(self.store[metadata_key]) # check format of consolidated metadata consolidated_format = meta.get('zarr_consolidated_format', None) @@ -2675,3 +2874,496 @@ def getsize(self, path): def listdir(self, path): return listdir(self.meta_store, path) + + +""" versions of stores following the v3 protocol """ + + +def _get_files_and_dirs_from_path(store, path): + path = normalize_storage_path(path) + + files = [] + # add array metadata file if present + array_key = _prefix_to_array_key(store, path) + if array_key in store: + files.append(os.path.join(store.path, array_key)) + + # add group metadata file if present + group_key = _prefix_to_group_key(store, path) + if group_key in store: + files.append(os.path.join(store.path, group_key)) + + dirs = [] + # add array and group folders if present + for d in [data_root + path, meta_root + path]: + dir_path = os.path.join(store.path, d) + if os.path.exists(dir_path): + dirs.append(dir_path) + return files, dirs + + +class RmdirV3(): + """Mixin class that can be used to ensure override of any existing v2 rmdir class.""" + + def rmdir(self, path: str = "") -> None: + path = normalize_storage_path(path) + _rmdir_from_keys_v3(self, path) # type: ignore + + +class KVStoreV3(RmdirV3, KVStore, StoreV3): + + def list(self): + return list(self._mutable_mapping.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def __eq__(self, other): + return ( + isinstance(other, KVStoreV3) and + self._mutable_mapping == other._mutable_mapping + ) + + +KVStoreV3.__doc__ = KVStore.__doc__ + + +class FSStoreV3(FSStore, StoreV3): + + # FSStoreV3 doesn't use this (FSStore uses it within _normalize_key) + _META_KEYS = () + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def _default_key_separator(self): + if self.key_separator is None: + self.key_separator = "/" + + def list(self): + return list(self.keys()) + + def _normalize_key(self, key): + key = normalize_storage_path(key).lstrip('/') + return key.lower() if self.normalize_keys else key + + def getsize(self, path=None): + size = 0 + if path is None or path == '': + # size of both the data and meta subdirs + dirs = [] + for d in ['data/root', 'meta/root']: + dir_path = os.path.join(self.path, d) + if os.path.exists(dir_path): + dirs.append(dir_path) + elif path in self: + # access individual element by full path + return buffer_size(self[path]) + else: + files, dirs = _get_files_and_dirs_from_path(self, path) + for file in files: + size += os.path.getsize(file) + for d in dirs: + size += self.fs.du(d, total=True, maxdepth=None) + return size + + def setitems(self, values): + if self.mode == 'r': + raise ReadOnlyError() + values = {self._normalize_key(key): val for key, val in values.items()} + + # initialize the /data/root/... folder corresponding to the array! + # Note: zarr.tests.test_core_v3.TestArrayWithFSStoreV3PartialRead fails + # without this explicit creation of directories + subdirectories = set([os.path.dirname(v) for v in values.keys()]) + for subdirectory in subdirectories: + data_dir = os.path.join(self.path, subdirectory) + if not self.fs.exists(data_dir): + self.fs.mkdir(data_dir) + + self.map.setitems(values) + + def rmdir(self, path=None): + if self.mode == 'r': + raise ReadOnlyError() + if path: + for base in [meta_root, data_root]: + store_path = self.dir_path(base + path) + if self.fs.isdir(store_path): + self.fs.rm(store_path, recursive=True) + + # remove any associated metadata files + sfx = _get_metadata_suffix(self) + meta_dir = (meta_root + path).rstrip('/') + array_meta_file = meta_dir + '.array' + sfx + self.pop(array_meta_file, None) + group_meta_file = meta_dir + '.group' + sfx + self.pop(group_meta_file, None) + else: + store_path = self.dir_path(path) + if self.fs.isdir(store_path): + self.fs.rm(store_path, recursive=True) + + +class MemoryStoreV3(MemoryStore, StoreV3): + + def __init__(self, root=None, cls=dict, dimension_separator=None): + if root is None: + self.root = cls() + else: + self.root = root + self.cls = cls + self.write_mutex = Lock() + self._dimension_separator = dimension_separator # TODO: modify for v3? + + def __eq__(self, other): + return ( + isinstance(other, MemoryStoreV3) and + self.root == other.root and + self.cls == other.cls + ) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def list(self): + return list(self.keys()) + + def getsize(self, path: Path = None): + return _getsize(self, path) + + def rename(self, src_path: Path, dst_path: Path): + src_path = normalize_storage_path(src_path) + dst_path = normalize_storage_path(dst_path) + + any_renamed = False + for base in [meta_root, data_root]: + if self.list_prefix(base + src_path): + src_parent, src_key = self._get_parent(base + src_path) + dst_parent, dst_key = self._require_parent(base + dst_path) + + dst_parent[dst_key] = src_parent.pop(src_key) + any_renamed = True + any_renamed = _rename_metadata_v3(self, src_path, dst_path) or any_renamed + if not any_renamed: + raise ValueError(f"no item {src_path} found to rename") + + def rmdir(self, path: Path = None): + path = normalize_storage_path(path) + if path: + for base in [meta_root, data_root]: + try: + parent, key = self._get_parent(base + path) + value = parent[key] + except KeyError: + continue + else: + if isinstance(value, self.cls): + del parent[key] + + # remove any associated metadata files + sfx = _get_metadata_suffix(self) + meta_dir = (meta_root + path).rstrip('/') + array_meta_file = meta_dir + '.array' + sfx + self.pop(array_meta_file, None) + group_meta_file = meta_dir + '.group' + sfx + self.pop(group_meta_file, None) + else: + # clear out root + self.root = self.cls() + + +MemoryStoreV3.__doc__ = MemoryStore.__doc__ + + +class DirectoryStoreV3(DirectoryStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __eq__(self, other): + return ( + isinstance(other, DirectoryStoreV3) and + self.path == other.path + ) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def getsize(self, path: Path = None): + return _getsize(self, path) + + def rename(self, src_path, dst_path, metadata_key_suffix='.json'): + store_src_path = normalize_storage_path(src_path) + store_dst_path = normalize_storage_path(dst_path) + + dir_path = self.path + any_existed = False + for root_prefix in ['meta', 'data']: + src_path = os.path.join(dir_path, root_prefix, 'root', store_src_path) + if os.path.exists(src_path): + any_existed = True + dst_path = os.path.join(dir_path, root_prefix, 'root', store_dst_path) + os.renames(src_path, dst_path) + + for suffix in ['.array' + metadata_key_suffix, + '.group' + metadata_key_suffix]: + src_meta = os.path.join(dir_path, 'meta', 'root', store_src_path + suffix) + if os.path.exists(src_meta): + any_existed = True + dst_meta = os.path.join(dir_path, 'meta', 'root', store_dst_path + suffix) + dst_dir = os.path.dirname(dst_meta) + if not os.path.exists(dst_dir): + os.makedirs(dst_dir) + os.rename(src_meta, dst_meta) + if not any_existed: + raise FileNotFoundError("nothing found at src_path") + + def rmdir(self, path=None): + store_path = normalize_storage_path(path) + dir_path = self.path + if store_path: + for base in [meta_root, data_root]: + dir_path = os.path.join(dir_path, base + store_path) + if os.path.isdir(dir_path): + shutil.rmtree(dir_path) + + # remove any associated metadata files + sfx = _get_metadata_suffix(self) + meta_dir = (meta_root + path).rstrip('/') + array_meta_file = meta_dir + '.array' + sfx + self.pop(array_meta_file, None) + group_meta_file = meta_dir + '.group' + sfx + self.pop(group_meta_file, None) + + elif os.path.isdir(dir_path): + shutil.rmtree(dir_path) + + +DirectoryStoreV3.__doc__ = DirectoryStore.__doc__ + + +class ZipStoreV3(ZipStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __eq__(self, other): + return ( + isinstance(other, ZipStore) and + self.path == other.path and + self.compression == other.compression and + self.allowZip64 == other.allowZip64 + ) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def getsize(self, path=None): + path = normalize_storage_path(path) + with self.mutex: + children = self.list_prefix(data_root + path) + children += self.list_prefix(meta_root + path) + print(f"path={path}, children={children}") + if children: + size = 0 + for name in children: + info = self.zf.getinfo(name) + size += info.compress_size + return size + elif path in self: + info = self.zf.getinfo(path) + return info.compress_size + else: + return 0 + + +ZipStoreV3.__doc__ = ZipStore.__doc__ + + +class RedisStoreV3(RmdirV3, RedisStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +RedisStoreV3.__doc__ = RedisStore.__doc__ + + +class MongoDBStoreV3(RmdirV3, MongoDBStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +MongoDBStoreV3.__doc__ = MongoDBStore.__doc__ + + +class DBMStoreV3(RmdirV3, DBMStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +DBMStoreV3.__doc__ = DBMStore.__doc__ + + +class LMDBStoreV3(RmdirV3, LMDBStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +LMDBStoreV3.__doc__ = LMDBStore.__doc__ + + +class SQLiteStoreV3(SQLiteStore, StoreV3): + + def list(self): + return list(self.keys()) + + def getsize(self, path=None): + # TODO: why does the query below not work in this case? + # For now fall back to the default _getsize implementation + # size = 0 + # for _path in [data_root + path, meta_root + path]: + # c = self.cursor.execute( + # ''' + # SELECT COALESCE(SUM(LENGTH(v)), 0) FROM zarr + # WHERE k LIKE (? || "%") AND + # 0 == INSTR(LTRIM(SUBSTR(k, LENGTH(?) + 1), "/"), "/") + # ''', + # (_path, _path) + # ) + # for item_size, in c: + # size += item_size + # return size + + # fallback to default implementation for now + return _getsize(self, path) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def rmdir(self, path=None): + path = normalize_storage_path(path) + if path: + for base in [meta_root, data_root]: + with self.lock: + self.cursor.execute( + 'DELETE FROM zarr WHERE k LIKE (? || "/%")', (base + path,) + ) + # remove any associated metadata files + sfx = _get_metadata_suffix(self) + meta_dir = (meta_root + path).rstrip('/') + array_meta_file = meta_dir + '.array' + sfx + self.pop(array_meta_file, None) + group_meta_file = meta_dir + '.group' + sfx + self.pop(group_meta_file, None) + else: + self.clear() + + +SQLiteStoreV3.__doc__ = SQLiteStore.__doc__ + + +class LRUStoreCacheV3(RmdirV3, LRUStoreCache, StoreV3): + + def __init__(self, store, max_size: int): + self._store = StoreV3._ensure_store(store) + self._max_size = max_size + self._current_size = 0 + self._keys_cache = None + self._contains_cache = None + self._listdir_cache: Dict[Path, Any] = dict() + self._values_cache: Dict[Path, Any] = OrderedDict() + self._mutex = Lock() + self.hits = self.misses = 0 + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +LRUStoreCacheV3.__doc__ = LRUStoreCache.__doc__ + + +class ConsolidatedMetadataStoreV3(ConsolidatedMetadataStore, StoreV3): + """A layer over other storage, where the metadata has been consolidated into + a single key. + + The purpose of this class, is to be able to get all of the metadata for + a given array in a single read operation from the underlying storage. + See :func:`zarr.convenience.consolidate_metadata` for how to create this + single metadata key. + + This class loads from the one key, and stores the data in a dict, so that + accessing the keys no longer requires operations on the backend store. + + This class is read-only, and attempts to change the array metadata will + fail, but changing the data is possible. If the backend storage is changed + directly, then the metadata stored here could become obsolete, and + :func:`zarr.convenience.consolidate_metadata` should be called again and the class + re-invoked. The use case is for write once, read many times. + + .. note:: This is an experimental feature. + + Parameters + ---------- + store: Store + Containing the zarr array. + metadata_key: str + The target in the store where all of the metadata are stored. We + assume JSON encoding. + + See Also + -------- + zarr.convenience.consolidate_metadata, zarr.convenience.open_consolidated + + """ + + def __init__(self, store: StoreLike, metadata_key=meta_root + "consolidated/.zmetadata"): + self.store = StoreV3._ensure_store(store) + + # retrieve consolidated metadata + meta = json_loads(self.store[metadata_key]) + + # check format of consolidated metadata + consolidated_format = meta.get('zarr_consolidated_format', None) + if consolidated_format != 1: + raise MetadataError('unsupported zarr consolidated metadata format: %s' % + consolidated_format) + + # decode metadata + self.meta_store: Store = KVStoreV3(meta["metadata"]) + + def rmdir(self, key): + raise ReadOnlyError() + + # def __setitem__(self, key, value): + # raise ReadOnlyError() diff --git a/zarr/tests/data/store.zip b/zarr/tests/data/store.zip index a36fd675b34fb7f6bcea7c7e9d3f65e359106ee5..76ba856c6279354024e61645bb2da812b188599e 100644 GIT binary patch literal 343 zcmWIWW@Zs#00Dyx^^jFP|I3(xY!K!J;@s4dME#=t{1W}N{QRWEq5u@tKm~AssyZpL z3Qeh6<{Sl}Vh{!y4m6GtNPz$ogaQkLm@H6!fHxzP2s3WCL6tEq0ULzkQgp2_r63yQ rP#9Rks0U;sw1SX{+BTW*)W`zpP!Ui6yVLsB*Kha6<9lvXkY}ffO-PFS=m5L NMj$i<(kdVh0|51G4^aRB diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index b2de736d4a..dbbc19328a 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -2,24 +2,36 @@ import pytest +from zarr._storage.store import meta_root from zarr.attrs import Attributes -from zarr.tests.util import CountingDict -from zarr.storage import KVStore +from zarr.storage import KVStore, KVStoreV3 +from zarr.tests.util import CountingDict, CountingDictV3 + + +@pytest.fixture(params=[2, 3]) +def zarr_version(request): + return request.param + + +def _init_store(version): + """Use a plain dict() for v2, but KVStoreV3 otherwise.""" + if version == 2: + return dict() + return KVStoreV3(dict()) class TestAttributes(): - def init_attributes(self, store, read_only=False, cache=True): - return Attributes(store, key='attrs', read_only=read_only, cache=cache) + def init_attributes(self, store, read_only=False, cache=True, zarr_version=2): + root = '.z' if zarr_version == 2 else meta_root + return Attributes(store, key=root + 'attrs', read_only=read_only, cache=cache) - @pytest.mark.parametrize('store_from_dict', [False, True]) - def test_storage(self, store_from_dict): + def test_storage(self, zarr_version): - if store_from_dict: - store = dict() - else: - store = KVStore(dict()) - a = Attributes(store=store, key='attrs') + store = _init_store(zarr_version) + root = '.z' if zarr_version == 2 else meta_root + attrs_key = root + 'attrs' + a = Attributes(store=store, key=attrs_key) assert isinstance(a.store, KVStore) assert 'foo' not in a assert 'bar' not in a @@ -27,14 +39,17 @@ def test_storage(self, store_from_dict): a['foo'] = 'bar' a['baz'] = 42 - assert 'attrs' in store - assert isinstance(store['attrs'], bytes) - d = json.loads(str(store['attrs'], 'ascii')) + assert attrs_key in store + assert isinstance(store[attrs_key], bytes) + d = json.loads(str(store[attrs_key], 'ascii')) + if zarr_version == 3: + d = d['attributes'] assert dict(foo='bar', baz=42) == d - def test_get_set_del_contains(self): + def test_get_set_del_contains(self, zarr_version): - a = self.init_attributes(dict()) + store = _init_store(zarr_version) + a = self.init_attributes(store, zarr_version=zarr_version) assert 'foo' not in a a['foo'] = 'bar' a['baz'] = 42 @@ -48,9 +63,10 @@ def test_get_set_del_contains(self): # noinspection PyStatementEffect a['foo'] - def test_update_put(self): + def test_update_put(self, zarr_version): - a = self.init_attributes(dict()) + store = _init_store(zarr_version) + a = self.init_attributes(store, zarr_version=zarr_version) assert 'foo' not in a assert 'bar' not in a assert 'baz' not in a @@ -65,9 +81,10 @@ def test_update_put(self): assert a['bar'] == 84 assert 'baz' not in a - def test_iterators(self): + def test_iterators(self, zarr_version): - a = self.init_attributes(dict()) + store = _init_store(zarr_version) + a = self.init_attributes(store, zarr_version=zarr_version) assert 0 == len(a) assert set() == set(a) assert set() == set(a.keys()) @@ -83,10 +100,15 @@ def test_iterators(self): assert {'bar', 42} == set(a.values()) assert {('foo', 'bar'), ('baz', 42)} == set(a.items()) - def test_read_only(self): - store = dict() - a = self.init_attributes(store, read_only=True) - store['attrs'] = json.dumps(dict(foo='bar', baz=42)).encode('ascii') + def test_read_only(self, zarr_version): + store = _init_store(zarr_version) + a = self.init_attributes(store, read_only=True, zarr_version=zarr_version) + if zarr_version == 2: + store['.zattrs'] = json.dumps(dict(foo='bar', baz=42)).encode('ascii') + else: + store['meta/root/attrs'] = json.dumps( + dict(attributes=dict(foo='bar', baz=42)) + ).encode('ascii') assert a['foo'] == 'bar' assert a['baz'] == 42 with pytest.raises(PermissionError): @@ -96,8 +118,9 @@ def test_read_only(self): with pytest.raises(PermissionError): a.update(foo='quux') - def test_key_completions(self): - a = self.init_attributes(dict()) + def test_key_completions(self, zarr_version): + store = _init_store(zarr_version) + a = self.init_attributes(store, zarr_version=zarr_version) d = a._ipython_key_completions_() assert 'foo' not in d assert '123' not in d @@ -112,113 +135,135 @@ def test_key_completions(self): assert 'asdf;' in d assert 'baz' not in d - def test_caching_on(self): + def test_caching_on(self, zarr_version): # caching is turned on by default # setup store - store = CountingDict() - assert 0 == store.counter['__getitem__', 'attrs'] - assert 0 == store.counter['__setitem__', 'attrs'] - store['attrs'] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') - assert 0 == store.counter['__getitem__', 'attrs'] - assert 1 == store.counter['__setitem__', 'attrs'] + store = CountingDict() if zarr_version == 2 else CountingDictV3() + attrs_key = '.zattrs' if zarr_version == 2 else 'meta/root/attrs' + assert 0 == store.counter['__getitem__', attrs_key] + assert 0 == store.counter['__setitem__', attrs_key] + if zarr_version == 2: + store[attrs_key] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') + else: + store[attrs_key] = json.dumps(dict(attributes=dict(foo='xxx', bar=42))).encode('ascii') + assert 0 == store.counter['__getitem__', attrs_key] + assert 1 == store.counter['__setitem__', attrs_key] # setup attributes - a = self.init_attributes(store) + a = self.init_attributes(store, zarr_version=zarr_version) # test __getitem__ causes all attributes to be cached assert a['foo'] == 'xxx' - assert 1 == store.counter['__getitem__', 'attrs'] + assert 1 == store.counter['__getitem__', attrs_key] assert a['bar'] == 42 - assert 1 == store.counter['__getitem__', 'attrs'] + assert 1 == store.counter['__getitem__', attrs_key] assert a['foo'] == 'xxx' - assert 1 == store.counter['__getitem__', 'attrs'] + assert 1 == store.counter['__getitem__', attrs_key] # test __setitem__ updates the cache a['foo'] = 'yyy' - assert 2 == store.counter['__getitem__', 'attrs'] - assert 2 == store.counter['__setitem__', 'attrs'] + get_cnt = 2 if zarr_version == 2 else 3 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 2 == store.counter['__setitem__', attrs_key] assert a['foo'] == 'yyy' - assert 2 == store.counter['__getitem__', 'attrs'] - assert 2 == store.counter['__setitem__', 'attrs'] + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 2 == store.counter['__setitem__', attrs_key] # test update() updates the cache a.update(foo='zzz', bar=84) - assert 3 == store.counter['__getitem__', 'attrs'] - assert 3 == store.counter['__setitem__', 'attrs'] + get_cnt = 3 if zarr_version == 2 else 5 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 3 == store.counter['__setitem__', attrs_key] assert a['foo'] == 'zzz' assert a['bar'] == 84 - assert 3 == store.counter['__getitem__', 'attrs'] - assert 3 == store.counter['__setitem__', 'attrs'] + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 3 == store.counter['__setitem__', attrs_key] # test __contains__ uses the cache assert 'foo' in a - assert 3 == store.counter['__getitem__', 'attrs'] - assert 3 == store.counter['__setitem__', 'attrs'] + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 3 == store.counter['__setitem__', attrs_key] assert 'spam' not in a - assert 3 == store.counter['__getitem__', 'attrs'] - assert 3 == store.counter['__setitem__', 'attrs'] + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 3 == store.counter['__setitem__', attrs_key] # test __delitem__ updates the cache del a['bar'] - assert 4 == store.counter['__getitem__', 'attrs'] - assert 4 == store.counter['__setitem__', 'attrs'] + get_cnt = 4 if zarr_version == 2 else 7 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 4 == store.counter['__setitem__', attrs_key] assert 'bar' not in a - assert 4 == store.counter['__getitem__', 'attrs'] - assert 4 == store.counter['__setitem__', 'attrs'] + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 4 == store.counter['__setitem__', attrs_key] # test refresh() - store['attrs'] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') - assert 4 == store.counter['__getitem__', 'attrs'] + if zarr_version == 2: + store[attrs_key] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') + else: + store[attrs_key] = json.dumps(dict(attributes=dict(foo='xxx', bar=42))).encode('ascii') + assert get_cnt == store.counter['__getitem__', attrs_key] a.refresh() - assert 5 == store.counter['__getitem__', 'attrs'] + get_cnt = 5 if zarr_version == 2 else 8 + assert get_cnt == store.counter['__getitem__', attrs_key] assert a['foo'] == 'xxx' - assert 5 == store.counter['__getitem__', 'attrs'] + assert get_cnt == store.counter['__getitem__', attrs_key] assert a['bar'] == 42 - assert 5 == store.counter['__getitem__', 'attrs'] + assert get_cnt == store.counter['__getitem__', attrs_key] - def test_caching_off(self): + def test_caching_off(self, zarr_version): # setup store - store = CountingDict() - assert 0 == store.counter['__getitem__', 'attrs'] - assert 0 == store.counter['__setitem__', 'attrs'] - store['attrs'] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') - assert 0 == store.counter['__getitem__', 'attrs'] - assert 1 == store.counter['__setitem__', 'attrs'] + store = CountingDict() if zarr_version == 2 else CountingDictV3() + attrs_key = '.zattrs' if zarr_version == 2 else 'meta/root/attrs' + assert 0 == store.counter['__getitem__', attrs_key] + assert 0 == store.counter['__setitem__', attrs_key] + + if zarr_version == 2: + store[attrs_key] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') + else: + store[attrs_key] = json.dumps(dict(attributes=dict(foo='xxx', bar=42))).encode('ascii') + assert 0 == store.counter['__getitem__', attrs_key] + assert 1 == store.counter['__setitem__', attrs_key] # setup attributes - a = self.init_attributes(store, cache=False) + a = self.init_attributes(store, cache=False, zarr_version=zarr_version) # test __getitem__ assert a['foo'] == 'xxx' - assert 1 == store.counter['__getitem__', 'attrs'] + assert 1 == store.counter['__getitem__', attrs_key] assert a['bar'] == 42 - assert 2 == store.counter['__getitem__', 'attrs'] + assert 2 == store.counter['__getitem__', attrs_key] assert a['foo'] == 'xxx' - assert 3 == store.counter['__getitem__', 'attrs'] + assert 3 == store.counter['__getitem__', attrs_key] # test __setitem__ a['foo'] = 'yyy' - assert 4 == store.counter['__getitem__', 'attrs'] - assert 2 == store.counter['__setitem__', 'attrs'] + get_cnt = 4 if zarr_version == 2 else 5 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 2 == store.counter['__setitem__', attrs_key] assert a['foo'] == 'yyy' - assert 5 == store.counter['__getitem__', 'attrs'] - assert 2 == store.counter['__setitem__', 'attrs'] + get_cnt = 5 if zarr_version == 2 else 6 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 2 == store.counter['__setitem__', attrs_key] # test update() a.update(foo='zzz', bar=84) - assert 6 == store.counter['__getitem__', 'attrs'] - assert 3 == store.counter['__setitem__', 'attrs'] + get_cnt = 6 if zarr_version == 2 else 8 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 3 == store.counter['__setitem__', attrs_key] assert a['foo'] == 'zzz' assert a['bar'] == 84 - assert 8 == store.counter['__getitem__', 'attrs'] - assert 3 == store.counter['__setitem__', 'attrs'] + get_cnt = 8 if zarr_version == 2 else 10 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 3 == store.counter['__setitem__', attrs_key] # test __contains__ assert 'foo' in a - assert 9 == store.counter['__getitem__', 'attrs'] - assert 3 == store.counter['__setitem__', 'attrs'] + get_cnt = 9 if zarr_version == 2 else 11 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 3 == store.counter['__setitem__', attrs_key] assert 'spam' not in a - assert 10 == store.counter['__getitem__', 'attrs'] - assert 3 == store.counter['__setitem__', 'attrs'] + get_cnt = 10 if zarr_version == 2 else 12 + assert get_cnt == store.counter['__getitem__', attrs_key] + assert 3 == store.counter['__setitem__', attrs_key] diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index a6041b788e..74c8d06fac 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -26,30 +26,54 @@ from zarr.hierarchy import Group, group from zarr.storage import ( ConsolidatedMetadataStore, + ConsolidatedMetadataStoreV3, + DirectoryStoreV3, + FSStoreV3, + KVStore, + KVStoreV3, MemoryStore, + MemoryStoreV3, + SQLiteStoreV3, atexit_rmtree, + data_root, + meta_root, getsize, ) +from zarr.tests.util import have_fsspec -def test_open_array(path_type): +def _init_creation_kwargs(zarr_version): + kwargs = {'zarr_version': zarr_version} + if zarr_version == 3: + kwargs['path'] = 'dataset' + return kwargs + + +@pytest.mark.parametrize('zarr_version', [2, 3]) +def test_open_array(path_type, zarr_version): store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) store = path_type(store) + kwargs = _init_creation_kwargs(zarr_version) # open array, create if doesn't exist - z = open(store, mode='a', shape=100) + z = open(store, mode='a', shape=100, **kwargs) assert isinstance(z, Array) assert z.shape == (100,) # open array, overwrite - z = open(store, mode='w', shape=200) + z = open(store, mode='w', shape=200, **kwargs) assert isinstance(z, Array) assert z.shape == (200,) + if zarr_version == 3: + # cannot open a v3 array without path + with pytest.raises(ValueError): + open(store, mode='w', shape=200, zarr_version=3) + # open array, read-only - z = open(store, mode='r') + z = open(store, mode='r', **kwargs) assert isinstance(z, Array) assert z.shape == (200,) assert z.read_only @@ -59,44 +83,83 @@ def test_open_array(path_type): open('doesnotexist', mode='r') -def test_open_group(path_type): +@pytest.mark.parametrize("zarr_version", [2, 3]) +def test_open_group(path_type, zarr_version): store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) store = path_type(store) + kwargs = _init_creation_kwargs(zarr_version) # open group, create if doesn't exist - g = open(store, mode='a') + g = open(store, mode='a', **kwargs) g.create_group('foo') assert isinstance(g, Group) assert 'foo' in g # open group, overwrite - g = open(store, mode='w') + g = open(store, mode='w', **kwargs) assert isinstance(g, Group) assert 'foo' not in g + if zarr_version == 3: + # cannot open a v3 group without path + with pytest.raises(ValueError): + open(store, mode='w', zarr_version=3) + # open group, read-only - g = open(store, mode='r') + g = open(store, mode='r', **kwargs) assert isinstance(g, Group) assert g.read_only -def test_save_errors(): +@pytest.mark.parametrize("zarr_version", [2, 3]) +def test_save_errors(zarr_version): with pytest.raises(ValueError): # no arrays provided - save_group('data/group.zarr') + save_group('data/group.zarr', zarr_version=zarr_version) + with pytest.raises(TypeError): + # no array provided + save_array('data/group.zarr', zarr_version=zarr_version) with pytest.raises(ValueError): # no arrays provided - save('data/group.zarr') + save('data/group.zarr', zarr_version=zarr_version) + + +def test_zarr_v3_save_multiple_unnamed(): + x = np.ones(8) + y = np.zeros(8) + store = KVStoreV3(dict()) + # no path provided + save_group(store, x, y, path='dataset', zarr_version=3) + # names become arr_{i} for unnamed *args + assert data_root + 'dataset/arr_0/c0' in store + assert data_root + 'dataset/arr_1/c0' in store + assert meta_root + 'dataset/arr_0.array.json' in store + assert meta_root + 'dataset/arr_1.array.json' in store -def test_lazy_loader(): +def test_zarr_v3_save_errors(): + x = np.ones(8) + with pytest.raises(ValueError): + # no path provided + save_group('data/group.zr3', x, zarr_version=3) + with pytest.raises(ValueError): + # no path provided + save_array('data/group.zr3', x, zarr_version=3) + with pytest.raises(ValueError): + # no path provided + save('data/group.zr3', x, zarr_version=3) + + +@pytest.mark.parametrize("zarr_version", [2, 3]) +def test_lazy_loader(zarr_version): foo = np.arange(100) bar = np.arange(100, 0, -1) - store = 'data/group.zarr' - save(store, foo=foo, bar=bar) - loader = load(store) + store = 'data/group.zarr' if zarr_version == 2 else 'data/group.zr3' + kwargs = _init_creation_kwargs(zarr_version) + save(store, foo=foo, bar=bar, **kwargs) + loader = load(store, **kwargs) assert 'foo' in loader assert 'bar' in loader assert 'baz' not in loader @@ -104,13 +167,58 @@ def test_lazy_loader(): assert sorted(loader) == ['bar', 'foo'] assert_array_equal(foo, loader['foo']) assert_array_equal(bar, loader['bar']) + assert 'LazyLoader: ' in repr(loader) -def test_consolidate_metadata(): +@pytest.mark.parametrize("zarr_version", [2, 3]) +def test_load_array(zarr_version): + foo = np.arange(100) + bar = np.arange(100, 0, -1) + store = 'data/group.zarr' if zarr_version == 2 else 'data/group.zr3' + kwargs = _init_creation_kwargs(zarr_version) + save(store, foo=foo, bar=bar, **kwargs) + + # can also load arrays directly into a numpy array + for array_name in ['foo', 'bar']: + array_path = 'dataset/' + array_name if zarr_version == 3 else array_name + array = load(store, path=array_path, zarr_version=zarr_version) + assert isinstance(array, np.ndarray) + if array_name == 'foo': + assert_array_equal(foo, array) + else: + assert_array_equal(bar, array) + + +@pytest.mark.parametrize("zarr_version", [2, 3]) +def test_tree(zarr_version): + kwargs = _init_creation_kwargs(zarr_version) + g1 = zarr.group(**kwargs) + g1.create_group('foo') + g3 = g1.create_group('bar') + g3.create_group('baz') + g5 = g3.create_group('qux') + g5.create_dataset('baz', shape=100, chunks=10) + assert repr(zarr.tree(g1)) == repr(g1.tree()) + assert str(zarr.tree(g1)) == str(g1.tree()) + + +# TODO: consolidated metadata currently only supported for v2 + +@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('with_chunk_store', [False, True], ids=['default', 'with_chunk_store']) +def test_consolidate_metadata(with_chunk_store, zarr_version): + + if zarr_version == 2: + MemoryStoreClass = MemoryStore + path = '' + else: + MemoryStoreClass = MemoryStoreV3 + path = 'dataset' # setup initial data - store = MemoryStore() - z = group(store) + store = MemoryStoreClass() + chunk_store = MemoryStoreClass() if with_chunk_store else None + z = group(store, chunk_store=chunk_store, path=path) z.create_group('g1') g2 = z.create_group('g2') g2.attrs['hello'] = 'world' @@ -121,20 +229,41 @@ def test_consolidate_metadata(): arr[:] = 1.0 assert 16 == arr.nchunks_initialized + if zarr_version == 3: + # error on v3 if path not provided + with pytest.raises(ValueError): + consolidate_metadata(store, path=None) + + with pytest.raises(ValueError): + consolidate_metadata(store, path='') + # perform consolidation - out = consolidate_metadata(store) + out = consolidate_metadata(store, path=path) assert isinstance(out, Group) - assert '.zmetadata' in store - for key in ['.zgroup', - 'g1/.zgroup', - 'g2/.zgroup', - 'g2/.zattrs', - 'g2/arr/.zarray', - 'g2/arr/.zattrs']: + assert ['g1', 'g2'] == list(out) + if zarr_version == 2: + assert isinstance(out._store, ConsolidatedMetadataStore) + assert '.zmetadata' in store + meta_keys = ['.zgroup', + 'g1/.zgroup', + 'g2/.zgroup', + 'g2/.zattrs', + 'g2/arr/.zarray', + 'g2/arr/.zattrs'] + else: + assert isinstance(out._store, ConsolidatedMetadataStoreV3) + assert 'meta/root/consolidated/.zmetadata' in store + meta_keys = ['zarr.json', + meta_root + 'dataset.group.json', + meta_root + 'dataset/g1.group.json', + meta_root + 'dataset/g2.group.json', + meta_root + 'dataset/g2/arr.array.json', + 'meta/root/consolidated.group.json'] + for key in meta_keys: del store[key] # open consolidated - z2 = open_consolidated(store) + z2 = open_consolidated(store, chunk_store=chunk_store, path=path) assert ['g1', 'g2'] == list(z2) assert 'world' == z2.g2.attrs['hello'] assert 1 == z2.g2.arr.attrs['data'] @@ -143,11 +272,18 @@ def test_consolidate_metadata(): assert 16 == z2.g2.arr.nchunks_initialized # tests del/write on the store - cmd = ConsolidatedMetadataStore(store) - with pytest.raises(PermissionError): - del cmd['.zgroup'] - with pytest.raises(PermissionError): - cmd['.zgroup'] = None + if zarr_version == 2: + cmd = ConsolidatedMetadataStore(store) + with pytest.raises(PermissionError): + del cmd['.zgroup'] + with pytest.raises(PermissionError): + cmd['.zgroup'] = None + else: + cmd = ConsolidatedMetadataStoreV3(store) + with pytest.raises(PermissionError): + del cmd[meta_root + 'dataset.group.json'] + with pytest.raises(PermissionError): + cmd[meta_root + 'dataset.group.json'] = None # test getsize on the store assert isinstance(getsize(cmd), Integral) @@ -172,14 +308,16 @@ def test_consolidate_metadata(): # test invalid modes with pytest.raises(ValueError): - open_consolidated(store, mode='a') + open_consolidated(store, chunk_store=chunk_store, mode='a', path=path) with pytest.raises(ValueError): - open_consolidated(store, mode='w') + open_consolidated(store, chunk_store=chunk_store, mode='w', path=path) with pytest.raises(ValueError): - open_consolidated(store, mode='w-') + open_consolidated(store, chunk_store=chunk_store, mode='w-', path=path) # make sure keyword arguments are passed through without error - open_consolidated(store, cache_attrs=True, synchronizer=None) + open_consolidated( + store, chunk_store=chunk_store, path=path, cache_attrs=True, synchronizer=None + ) def test_consolidated_with_chunk_store(): @@ -247,6 +385,8 @@ def test_save_array_separator(tmpdir, options): class TestCopyStore(unittest.TestCase): + _version = 2 + def setUp(self): source = dict() source['foo'] = b'xxx' @@ -254,9 +394,12 @@ def setUp(self): source['bar/qux'] = b'zzz' self.source = source + def _get_dest_store(self): + return dict() + def test_no_paths(self): source = self.source - dest = dict() + dest = self._get_dest_store() copy_store(source, dest) assert len(source) == len(dest) for key in source: @@ -266,7 +409,7 @@ def test_source_path(self): source = self.source # paths should be normalized for source_path in 'bar', 'bar/', '/bar', '/bar/': - dest = dict() + dest = self._get_dest_store() copy_store(source, dest, source_path=source_path) assert 2 == len(dest) for key in source: @@ -280,11 +423,14 @@ def test_dest_path(self): source = self.source # paths should be normalized for dest_path in 'new', 'new/', '/new', '/new/': - dest = dict() + dest = self._get_dest_store() copy_store(source, dest, dest_path=dest_path) assert len(source) == len(dest) for key in source: - dest_key = 'new/' + key + if self._version == 3: + dest_key = key[:10] + 'new/' + key[10:] + else: + dest_key = 'new/' + key assert source[key] == dest[dest_key] def test_source_dest_path(self): @@ -292,7 +438,7 @@ def test_source_dest_path(self): # paths should be normalized for source_path in 'bar', 'bar/', '/bar', '/bar/': for dest_path in 'new', 'new/', '/new', '/new/': - dest = dict() + dest = self._get_dest_store() copy_store(source, dest, source_path=source_path, dest_path=dest_path) assert 2 == len(dest) @@ -308,41 +454,44 @@ def test_excludes_includes(self): source = self.source # single excludes - dest = dict() + dest = self._get_dest_store() excludes = 'f.*' copy_store(source, dest, excludes=excludes) assert len(dest) == 2 - assert 'foo' not in dest + + root = '' if self._version == 2 else meta_root + assert root + 'foo' not in dest # multiple excludes - dest = dict() + dest = self._get_dest_store() excludes = 'b.z', '.*x' copy_store(source, dest, excludes=excludes) assert len(dest) == 1 - assert 'foo' in dest - assert 'bar/baz' not in dest - assert 'bar/qux' not in dest + assert root + 'foo' in dest + assert root + 'bar/baz' not in dest + assert root + 'bar/qux' not in dest # excludes and includes - dest = dict() + dest = self._get_dest_store() excludes = 'b.*' includes = '.*x' copy_store(source, dest, excludes=excludes, includes=includes) assert len(dest) == 2 - assert 'foo' in dest - assert 'bar/baz' not in dest - assert 'bar/qux' in dest + assert root + 'foo' in dest + assert root + 'bar/baz' not in dest + assert root + 'bar/qux' in dest def test_dry_run(self): source = self.source - dest = dict() + dest = self._get_dest_store() copy_store(source, dest, dry_run=True) assert 0 == len(dest) def test_if_exists(self): source = self.source - dest = dict() - dest['bar/baz'] = b'mmm' + dest = self._get_dest_store() + root = '' if self._version == 2 else meta_root + dest[root + 'bar/baz'] = b'mmm' # default ('raise') with pytest.raises(CopyError): @@ -355,22 +504,43 @@ def test_if_exists(self): # skip copy_store(source, dest, if_exists='skip') assert 3 == len(dest) - assert dest['foo'] == b'xxx' - assert dest['bar/baz'] == b'mmm' - assert dest['bar/qux'] == b'zzz' + assert dest[root + 'foo'] == b'xxx' + assert dest[root + 'bar/baz'] == b'mmm' + assert dest[root + 'bar/qux'] == b'zzz' # replace copy_store(source, dest, if_exists='replace') assert 3 == len(dest) - assert dest['foo'] == b'xxx' - assert dest['bar/baz'] == b'yyy' - assert dest['bar/qux'] == b'zzz' + assert dest[root + 'foo'] == b'xxx' + assert dest[root + 'bar/baz'] == b'yyy' + assert dest[root + 'bar/qux'] == b'zzz' # invalid option with pytest.raises(ValueError): copy_store(source, dest, if_exists='foobar') +class TestCopyStoreV3(TestCopyStore): + + _version = 3 + + def setUp(self): + source = KVStoreV3(dict()) + source['meta/root/foo'] = b'xxx' + source['meta/root/bar/baz'] = b'yyy' + source['meta/root/bar/qux'] = b'zzz' + self.source = source + + def _get_dest_store(self): + return KVStoreV3(dict()) + + def test_mismatched_store_versions(self): + # cannot copy between stores of mixed Zarr versions + dest = KVStore(dict()) + with pytest.raises(ValueError): + copy_store(self.source, dest) + + def check_copied_array(original, copied, without_attrs=False, expect_props=None): @@ -419,7 +589,14 @@ def check_copied_array(original, copied, without_attrs=False, for k in original.attrs.keys(): assert k not in copied.attrs else: - assert sorted(original.attrs.items()) == sorted(copied.attrs.items()) + if dest_h5py and 'filters' in original.attrs: + # special case in v3 (storing filters metadata under attributes) + # we explicitly do not copy this info over to HDF5 + original_attrs = original.attrs.asdict().copy() + original_attrs.pop('filters') + else: + original_attrs = original.attrs + assert sorted(original_attrs.items()) == sorted(copied.attrs.items()) def check_copied_group(original, copied, without_attrs=False, expect_props=None, @@ -473,10 +650,32 @@ def test_copy_all(): dry_run=False, ) + assert 'subgroup' in destination_group assert destination_group.attrs["info"] == "group attrs" assert destination_group.subgroup.attrs["info"] == "sub attrs" +def test_copy_all_v3(): + """ + https://github.com/zarr-developers/zarr-python/issues/269 + + copy_all used to not copy attributes as `.keys()` + + """ + original_group = zarr.group(store=MemoryStoreV3(), path='group1', overwrite=True) + original_group.create_group("subgroup") + + destination_group = zarr.group(store=MemoryStoreV3(), path='group2', overwrite=True) + + # copy from memory to directory store + copy_all( + original_group, + destination_group, + dry_run=False, + ) + assert 'subgroup' in destination_group + + class TestCopy: @pytest.fixture(params=[False, True], ids=['zarr', 'hdf5']) def source(self, request, tmpdir): @@ -719,3 +918,88 @@ def test_logging(self, source, dest, tmpdir): # bad option with pytest.raises(TypeError): copy(source['foo'], dest, dry_run=True, log=True) + + +class TestCopyV3(TestCopy): + + @pytest.fixture(params=['zarr', 'hdf5']) + def source(self, request, tmpdir): + def prep_source(source): + foo = source.create_group('foo') + foo.attrs['experiment'] = 'weird science' + baz = foo.create_dataset('bar/baz', data=np.arange(100), chunks=(50,)) + baz.attrs['units'] = 'metres' + if request.param == 'hdf5': + extra_kws = dict(compression='gzip', compression_opts=3, fillvalue=84, + shuffle=True, fletcher32=True) + else: + extra_kws = dict(compressor=Zlib(3), order='F', fill_value=42, filters=[Adler32()]) + source.create_dataset('spam', data=np.arange(100, 200).reshape(20, 5), + chunks=(10, 2), dtype='i2', **extra_kws) + return source + + if request.param == 'hdf5': + h5py = pytest.importorskip('h5py') + fn = tmpdir.join('source.h5') + with h5py.File(str(fn), mode='w') as h5f: + yield prep_source(h5f) + elif request.param == 'zarr': + yield prep_source(group(path='group1', zarr_version=3)) + + # Test with various destination StoreV3 types as TestCopyV3 covers rmdir + destinations = ['hdf5', 'zarr', 'zarr_kvstore', 'zarr_directorystore', 'zarr_sqlitestore'] + if have_fsspec: + destinations += ['zarr_fsstore'] + + @pytest.fixture(params=destinations) + def dest(self, request, tmpdir): + if request.param == 'hdf5': + h5py = pytest.importorskip('h5py') + fn = tmpdir.join('dest.h5') + with h5py.File(str(fn), mode='w') as h5f: + yield h5f + elif request.param == 'zarr': + yield group(path='group2', zarr_version=3) + elif request.param == 'zarr_kvstore': + store = KVStoreV3(dict()) + yield group(store, path='group2', zarr_version=3) + elif request.param == 'zarr_fsstore': + fn = tmpdir.join('dest.zr3') + store = FSStoreV3(str(fn), auto_mkdir=True) + yield group(store, path='group2', zarr_version=3) + elif request.param == 'zarr_directorystore': + fn = tmpdir.join('dest.zr3') + store = DirectoryStoreV3(str(fn)) + yield group(store, path='group2', zarr_version=3) + elif request.param == 'zarr_sqlitestore': + fn = tmpdir.join('dest.db') + store = SQLiteStoreV3(str(fn)) + yield group(store, path='group2', zarr_version=3) + + def test_copy_array_create_options(self, source, dest): + dest_h5py = dest.__module__.startswith('h5py.') + + # copy array, provide creation options + compressor = Zlib(9) + create_kws = dict(chunks=(10,)) + if dest_h5py: + create_kws.update(compression='gzip', compression_opts=9, + shuffle=True, fletcher32=True, fillvalue=42) + else: + # v3 case has no filters argument in zarr create_kws + create_kws.update(compressor=compressor, fill_value=42, order='F') + copy(source['foo/bar/baz'], dest, without_attrs=True, **create_kws) + check_copied_array(source['foo/bar/baz'], dest['baz'], + without_attrs=True, expect_props=create_kws) + + def test_copy_group_no_name(self, source, dest): + if source.__module__.startswith('h5py'): + with pytest.raises(TypeError): + copy(source, dest) + else: + # For v3, dest.name will be inferred from source.name + copy(source, dest) + check_copied_group(source, dest[source.name.lstrip('/')]) + + copy(source, dest, name='root') + check_copied_group(source, dest['root']) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 7423132887..08bda94ba2 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -17,7 +17,13 @@ from numpy.testing import assert_array_almost_equal, assert_array_equal from pkg_resources import parse_version +from zarr._storage.store import ( + _prefix_to_array_key, + _prefix_to_attrs_key, + _prefix_to_group_key +) from zarr.core import Array +from zarr.errors import ArrayNotFoundError, ContainsGroupError from zarr.meta import json_loads from zarr.n5 import N5Store, N5FSStore, n5_keywords from zarr.storage import ( @@ -30,10 +36,21 @@ LRUStoreCache, NestedDirectoryStore, SQLiteStore, + ABSStoreV3, + DBMStoreV3, + DirectoryStoreV3, + FSStoreV3, + KVStoreV3, + LMDBStoreV3, + LRUStoreCacheV3, + SQLiteStoreV3, + StoreV3, atexit_rmglob, atexit_rmtree, + data_root, init_array, init_group, + meta_root, ) from zarr.util import buffer_size from zarr.tests.util import abs_container, skip_test_env_var, have_fsspec @@ -43,6 +60,8 @@ class TestArray(unittest.TestCase): + version = 2 + def test_array_init(self): # normal initialization @@ -528,6 +547,8 @@ def test_setitem_data_not_shared(self): z.store.close() def expected(self): + # tests for array without path will not be run for v3 stores + assert self.version == 2 return [ "063b02ff8d9d3bab6da932ad5828b506ef0a6578", "f97b84dc9ffac807415f750100108764e837bb82", @@ -1111,6 +1132,19 @@ def test_dtypes(self): assert_array_equal(a, z[:]) z.store.close() + # unicode and bytestring dtypes + for dtype in ['S4', 'S6', 'U5', 'U5']: + n = 10 + z = self.create_array(shape=n, chunks=3, dtype=dtype) + assert z.dtype == np.dtype(dtype) + if dtype.startswith('S'): + a = np.asarray([b'name'] * n, dtype=dtype) + else: + a = np.asarray(['§Æ¥¿é'] * n, dtype=dtype) + z[:] = a + np.all(a == z[:]) + z.store.close() + # check that datetime generic units are not allowed with pytest.raises(ValueError): self.create_array(shape=100, dtype='M8') @@ -1180,7 +1214,6 @@ def test_object_arrays(self): def test_object_arrays_vlen_text(self): data = np.array(greetings * 1000, dtype=object) - z = self.create_array(shape=data.shape, dtype=object, object_codec=VLenUTF8()) z[0] = 'foo' assert z[0] == 'foo' @@ -1474,11 +1507,17 @@ def test_attributes(self): a.attrs['foo'] = 'bar' assert a.attrs.key in a.store attrs = json_loads(a.store[a.attrs.key]) + if self.version > 2: + # in v3, attributes are in a sub-dictionary of the metadata + attrs = attrs['attributes'] assert 'foo' in attrs and attrs['foo'] == 'bar' a.attrs['bar'] = 'foo' assert a.attrs.key in a.store attrs = json_loads(a.store[a.attrs.key]) + if self.version > 2: + # in v3, attributes are in a sub-dictionary of the metadata + attrs = attrs['attributes'] assert 'foo' in attrs and attrs['foo'] == 'bar' assert 'bar' in attrs and attrs['bar'] == 'foo' a.store.close() @@ -1508,28 +1547,14 @@ def create_array(read_only=False, **kwargs): def test_nchunks_initialized(self): pass - def test_hexdigest(self): - # Check basic 1-D array - z = self.create_array(shape=(1050,), chunks=100, dtype=' 2 and g1.store.is_erasable(): + arr_path = g1.path + '/arr1' + sfx = _get_metadata_suffix(g1.store) + array_meta_file = meta_root + arr_path + '.array' + sfx + assert array_meta_file in g1.store + group_meta_file = meta_root + g2.path + '.group' + sfx + assert group_meta_file in g1.store + + # rmdir on the array path should also remove the metadata file + g1.store.rmdir(arr_path) + assert array_meta_file not in g1.store + # rmdir on the group path should also remove its metadata file + g1.store.rmdir(g2.path) + assert group_meta_file not in g1.store + + def _dataset_path(self, group, path): + path = path.rstrip('/') + absolute = path.startswith('/') + if absolute: + dataset_path = path + else: + dataset_path = '/'.join([group.path, path]) + dataset_path = dataset_path.lstrip('/') + dataset_name = '/' + dataset_path + return dataset_path, dataset_name + def test_create_dataset(self): g = self.create_group() # create as immediate child - d1 = g.create_dataset('foo', shape=1000, chunks=100) + dpath = 'foo' + d1 = g.create_dataset(dpath, shape=1000, chunks=100) + path, name = self._dataset_path(g, dpath) assert isinstance(d1, Array) assert (1000,) == d1.shape assert (100,) == d1.chunks - assert 'foo' == d1.path - assert '/foo' == d1.name + assert path == d1.path + assert name == d1.name assert g.store is d1.store # create as descendant - d2 = g.create_dataset('/a/b/c/', shape=2000, chunks=200, dtype='i1', + dpath = '/a/b/c/' + d2 = g.create_dataset(dpath, shape=2000, chunks=200, dtype='i1', compression='zlib', compression_opts=9, fill_value=42, order='F') + path, name = self._dataset_path(g, dpath) assert isinstance(d2, Array) assert (2000,) == d2.shape assert (200,) == d2.chunks @@ -234,20 +312,22 @@ def test_create_dataset(self): assert 9 == d2.compressor.level assert 42 == d2.fill_value assert 'F' == d2.order - assert 'a/b/c' == d2.path - assert '/a/b/c' == d2.name + assert path == d2.path + assert name == d2.name assert g.store is d2.store # create with data data = np.arange(3000, dtype='u2') - d3 = g.create_dataset('bar', data=data, chunks=300) + dpath = 'bar' + d3 = g.create_dataset(dpath, data=data, chunks=300) + path, name = self._dataset_path(g, dpath) assert isinstance(d3, Array) assert (3000,) == d3.shape assert (300,) == d3.chunks assert np.dtype('u2') == d3.dtype assert_array_equal(data, d3[:]) - assert 'bar' == d3.path - assert '/bar' == d3.name + assert path == d3.path + assert name == d3.name assert g.store is d3.store # compression arguments handling follows... @@ -290,25 +370,27 @@ def test_require_dataset(self): g = self.create_group() # create - d1 = g.require_dataset('foo', shape=1000, chunks=100, dtype='f4') + dpath = 'foo' + d1 = g.require_dataset(dpath, shape=1000, chunks=100, dtype='f4') d1[:] = np.arange(1000) + path, name = self._dataset_path(g, dpath) assert isinstance(d1, Array) assert (1000,) == d1.shape assert (100,) == d1.chunks assert np.dtype('f4') == d1.dtype - assert 'foo' == d1.path - assert '/foo' == d1.name + assert path == d1.path + assert name == d1.name assert g.store is d1.store assert_array_equal(np.arange(1000), d1[:]) # require - d2 = g.require_dataset('foo', shape=1000, chunks=100, dtype='f4') + d2 = g.require_dataset(dpath, shape=1000, chunks=100, dtype='f4') assert isinstance(d2, Array) assert (1000,) == d2.shape assert (100,) == d2.chunks assert np.dtype('f4') == d2.dtype - assert 'foo' == d2.path - assert '/foo' == d2.name + assert path == d2.path + assert name == d2.name assert g.store is d2.store assert_array_equal(np.arange(1000), d2[:]) assert d1 == d2 @@ -419,7 +501,12 @@ def test_getitem_contains_iterators(self): # setup g1 = self.create_group() g2 = g1.create_group('foo/bar') - d1 = g2.create_dataset('/a/b/c', shape=1000, chunks=100) + if g1._version == 2: + d1 = g2.create_dataset('/a/b/c', shape=1000, chunks=100) + else: + # v3: cannot create a dataset at the root by starting with / + # instead, need to create the dataset on g1 directly + d1 = g1.create_dataset('a/b/c', shape=1000, chunks=100) d1[:] = np.arange(1000) d2 = g1.create_dataset('foo/baz', shape=3000, chunks=300) d2[:] = np.arange(3000) @@ -428,7 +515,13 @@ def test_getitem_contains_iterators(self): assert isinstance(g1['foo'], Group) assert isinstance(g1['foo']['bar'], Group) assert isinstance(g1['foo/bar'], Group) - assert isinstance(g1['/foo/bar/'], Group) + if g1._version == 2: + assert isinstance(g1['/foo/bar/'], Group) + else: + # start or end with / raises KeyError + # TODO: should we fix allow stripping of these on v3? + with pytest.raises(KeyError): + assert isinstance(g1['/foo/bar/'], Group) assert isinstance(g1['foo/baz'], Array) assert g2 == g1['foo/bar'] assert g1['foo']['bar'] == g1['foo/bar'] @@ -454,7 +547,9 @@ def test_getitem_contains_iterators(self): assert 'baz' not in g1 assert 'a/b/c/d' not in g1 assert 'a/z' not in g1 - assert 'quux' not in g1['foo'] + if g1._version == 2: + # TODO: handle implicit group for v3 spec + assert 'quux' not in g1['foo'] # test key errors with pytest.raises(KeyError): @@ -470,12 +565,19 @@ def test_getitem_contains_iterators(self): assert 1 == len(g1['a/b']) # test __iter__, keys() - # currently assumes sorted by key - assert ['a', 'foo'] == list(g1) - assert ['a', 'foo'] == list(g1.keys()) - assert ['bar', 'baz'] == list(g1['foo']) - assert ['bar', 'baz'] == list(g1['foo'].keys()) + if g1._version == 2: + # currently assumes sorted by key + assert ['a', 'foo'] == list(g1) + assert ['a', 'foo'] == list(g1.keys()) + assert ['bar', 'baz'] == list(g1['foo']) + assert ['bar', 'baz'] == list(g1['foo'].keys()) + else: + # v3 is not necessarily sorted by key + assert ['a', 'foo'] == sorted(list(g1)) + assert ['a', 'foo'] == sorted(list(g1.keys())) + assert ['bar', 'baz'] == sorted(list(g1['foo'])) + assert ['bar', 'baz'] == sorted(list(g1['foo'].keys())) assert [] == sorted(g1['foo/bar']) assert [] == sorted(g1['foo/bar'].keys()) @@ -484,6 +586,9 @@ def test_getitem_contains_iterators(self): items = list(g1.items()) values = list(g1.values()) + if g1._version == 3: + # v3 are not automatically sorted by key + items, values = zip(*sorted(zip(items, values), key=lambda x: x[0])) assert 'a' == items[0][0] assert g1['a'] == items[0][1] assert g1['a'] == values[0] @@ -493,6 +598,9 @@ def test_getitem_contains_iterators(self): items = list(g1['foo'].items()) values = list(g1['foo'].values()) + if g1._version == 3: + # v3 are not automatically sorted by key + items, values = zip(*sorted(zip(items, values), key=lambda x: x[0])) assert 'bar' == items[0][0] assert g1['foo']['bar'] == items[0][1] assert g1['foo']['bar'] == values[0] @@ -501,11 +609,16 @@ def test_getitem_contains_iterators(self): assert g1['foo']['baz'] == values[1] # test array_keys(), arrays(), group_keys(), groups() - # currently assumes sorted by key - assert ['a', 'foo'] == list(g1.group_keys()) groups = list(g1.groups()) arrays = list(g1.arrays()) + if g1._version == 2: + # currently assumes sorted by key + assert ['a', 'foo'] == list(g1.group_keys()) + else: + assert ['a', 'foo'] == sorted(list(g1.group_keys())) + groups = sorted(groups) + arrays = sorted(arrays) assert 'a' == groups[0][0] assert g1['a'] == groups[0][1] assert 'foo' == groups[1][0] @@ -517,6 +630,9 @@ def test_getitem_contains_iterators(self): assert ['baz'] == list(g1['foo'].array_keys()) groups = list(g1['foo'].groups()) arrays = list(g1['foo'].arrays()) + if g1._version == 3: + groups = sorted(groups) + arrays = sorted(arrays) assert 'bar' == groups[0][0] assert g1['foo']['bar'] == groups[0][1] assert 'baz' == arrays[0][0] @@ -537,21 +653,27 @@ def visitor4(name, obj): del items[:] g1.visitvalues(visitor2) - assert [ + expected_items = [ "a", "a/b", "a/b/c", "foo", "foo/bar", "foo/baz", - ] == items + ] + if g1._version == 3: + expected_items = [g1.path + '/' + i for i in expected_items] + assert expected_items == items del items[:] g1["foo"].visitvalues(visitor2) - assert [ + expected_items = [ "foo/bar", "foo/baz", - ] == items + ] + if g1._version == 3: + expected_items = [g1.path + '/' + i for i in expected_items] + assert expected_items == items del items[:] g1.visit(visitor3) @@ -627,6 +749,9 @@ def visitor0(val, *args): # noinspection PyUnusedLocal def visitor1(val, *args): name = getattr(val, "path", val) + if name.startswith('group/'): + # strip the group path for v3 + name = name[6:] if name == "a/b/c": return True @@ -762,9 +887,13 @@ def test_move(self): g2.move("bar", "/bar") assert "foo2" in g assert "foo2/bar" not in g - assert "bar" in g + if g2._version == 2: + # TODO: how to access element created outside of group.path in v3? + assert "bar" in g assert isinstance(g["foo2"], Group) - assert_array_equal(data, g["bar"]) + if g2._version == 2: + # TODO: how to access element created outside of group.path in v3? + assert_array_equal(data, g["bar"]) with pytest.raises(ValueError): g2.move("bar", "bar2") @@ -841,6 +970,9 @@ def test_paths(self): g1 = self.create_group() g2 = g1.create_group('foo/bar') + if g1._version == 3: + pytest.skip("TODO: update class for v3") + assert g1 == g1['/'] assert g1 == g1['//'] assert g1 == g1['///'] @@ -893,7 +1025,9 @@ def test_pickle(self): assert name == g2.name assert n == len(g2) assert keys == list(g2) - assert isinstance(g2['foo'], Group) + if g2._version == 2: + # TODO: handle implicit group for v3 + assert isinstance(g2['foo'], Group) assert isinstance(g2['foo/bar'], Array) g2.store.close() @@ -921,6 +1055,57 @@ def test_group_init_from_dict(chunk_dict): assert chunk_store is not g.chunk_store +# noinspection PyStatementEffect +class TestGroupV3(TestGroup, unittest.TestCase): + + @staticmethod + def create_store(): + # can be overridden in sub-classes + return KVStoreV3(dict()), None + + def create_group(self, store=None, path='group', read_only=False, + chunk_store=None, synchronizer=None): + # can be overridden in sub-classes + if store is None: + store, chunk_store = self.create_store() + init_group(store, path=path, chunk_store=chunk_store) + g = Group(store, path=path, read_only=read_only, + chunk_store=chunk_store, synchronizer=synchronizer) + return g + + def test_group_init_1(self): + store, chunk_store = self.create_store() + g = self.create_group(store, chunk_store=chunk_store) + assert store is g.store + if chunk_store is None: + assert store is g.chunk_store + else: + assert chunk_store is g.chunk_store + assert not g.read_only + # different path/name in v3 case + assert 'group' == g.path + assert '/group' == g.name + assert 'group' == g.basename + + assert isinstance(g.attrs, Attributes) + g.attrs['foo'] = 'bar' + assert g.attrs['foo'] == 'bar' + + assert isinstance(g.info, InfoReporter) + assert isinstance(repr(g.info), str) + assert isinstance(g.info._repr_html_(), str) + store.close() + + def test_group_init_errors_2(self): + store, chunk_store = self.create_store() + path = 'tmp' + init_array(store, path=path, shape=1000, chunks=100, chunk_store=chunk_store) + # array blocks group + with pytest.raises(ValueError): + Group(store, path=path, chunk_store=chunk_store) + store.close() + + class TestGroupWithMemoryStore(TestGroup): @staticmethod @@ -928,6 +1113,14 @@ def create_store(): return MemoryStore(), None +# TODO: fix MemoryStoreV3 _get_parent, etc. +# # noinspection PyStatementEffect +# class TestGroupV3WithMemoryStore(TestGroupWithMemoryStore, TestGroupV3): + +# @staticmethod +# def create_store(): +# return MemoryStoreV3(), None + class TestGroupWithDirectoryStore(TestGroup): @staticmethod @@ -938,6 +1131,16 @@ def create_store(): return store, None +class TestGroupV3WithDirectoryStore(TestGroupWithDirectoryStore, TestGroupV3): + + @staticmethod + def create_store(): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = DirectoryStoreV3(path) + return store, None + + @skip_test_env_var("ZARR_TEST_ABS") class TestGroupWithABSStore(TestGroup): @@ -954,6 +1157,22 @@ def test_pickle(self): super().test_pickle() +@skip_test_env_var("ZARR_TEST_ABS") +class TestGroupWithABSStoreV3(TestGroupV3): + + @staticmethod + def create_store(): + container_client = abs_container() + store = ABSStoreV3(client=container_client) + store.rmdir() + return store, None + + @pytest.mark.skipif(sys.version_info < (3, 7), reason="attr not serializable in py36") + def test_pickle(self): + # internal attribute on ContainerClient isn't serializable for py36 and earlier + super().test_pickle() + + class TestGroupWithNestedDirectoryStore(TestGroup): @staticmethod @@ -982,10 +1201,45 @@ def test_round_trip_nd(self): f = open_group(store, mode='w') f.create_dataset(name, data=data, chunks=(5, 5, 5), compressor=None) + assert name in f h = open_group(store, mode='r') np.testing.assert_array_equal(h[name][:], data) +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestGroupV3WithFSStore(TestGroupWithFSStore, TestGroupV3): + + @staticmethod + def create_store(): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = FSStoreV3(path) + return store, None + + def test_round_trip_nd(self): + data = np.arange(1000).reshape(10, 10, 10) + name = 'raw' + + store, _ = self.create_store() + f = open_group(store, path='group', mode='w') + f.create_dataset(name, data=data, chunks=(5, 5, 5), + compressor=None) + h = open_group(store, path='group', mode='r') + np.testing.assert_array_equal(h[name][:], data) + + f = open_group(store, path='group2', mode='w') + + data_size = data.nbytes + group_meta_size = buffer_size(store[meta_root + 'group.group.json']) + group2_meta_size = buffer_size(store[meta_root + 'group2.group.json']) + array_meta_size = buffer_size(store[meta_root + 'group/raw.array.json']) + assert store.getsize() == data_size + group_meta_size + group2_meta_size + array_meta_size + # added case with path to complete coverage + assert store.getsize('group') == data_size + group_meta_size + array_meta_size + assert store.getsize('group2') == group2_meta_size + assert store.getsize('group/raw') == data_size + array_meta_size + + @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestGroupWithNestedFSStore(TestGroupWithFSStore): @@ -1009,6 +1263,29 @@ def test_inconsistent_dimension_separator(self): compressor=None, dimension_separator='.') +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestGroupV3WithNestedFSStore(TestGroupV3WithFSStore): + + @staticmethod + def create_store(): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = FSStoreV3(path, key_separator='/', auto_mkdir=True) + return store, None + + def test_inconsistent_dimension_separator(self): + data = np.arange(1000).reshape(10, 10, 10) + name = 'raw' + + store, _ = self.create_store() + f = open_group(store, path='group', mode='w') + + # cannot specify dimension_separator that conflicts with the store + with pytest.raises(ValueError): + f.create_dataset(name, data=data, chunks=(5, 5, 5), + compressor=None, dimension_separator='.') + + class TestGroupWithZipStore(TestGroup): @staticmethod @@ -1036,6 +1313,16 @@ def test_move(self): pass +class TestGroupV3WithZipStore(TestGroupWithZipStore, TestGroupV3): + + @staticmethod + def create_store(): + path = tempfile.mktemp(suffix='.zip') + atexit.register(os.remove, path) + store = ZipStoreV3(path) + return store, None + + class TestGroupWithDBMStore(TestGroup): @staticmethod @@ -1046,6 +1333,16 @@ def create_store(): return store, None +class TestGroupV3WithDBMStore(TestGroupWithDBMStore, TestGroupV3): + + @staticmethod + def create_store(): + path = tempfile.mktemp(suffix='.anydbm') + atexit.register(atexit_rmglob, path + '*') + store = DBMStoreV3(path, flag='n') + return store, None + + class TestGroupWithDBMStoreBerkeleyDB(TestGroup): @staticmethod @@ -1057,6 +1354,17 @@ def create_store(): return store, None +class TestGroupV3WithDBMStoreBerkeleyDB(TestGroupWithDBMStoreBerkeleyDB, TestGroupV3): + + @staticmethod + def create_store(): + bsddb3 = pytest.importorskip("bsddb3") + path = tempfile.mktemp(suffix='.dbm') + atexit.register(os.remove, path) + store = DBMStoreV3(path, flag='n', open=bsddb3.btopen) + return store, None + + class TestGroupWithLMDBStore(TestGroup): @staticmethod @@ -1068,6 +1376,17 @@ def create_store(): return store, None +class TestGroupV3WithLMDBStore(TestGroupWithLMDBStore, TestGroupV3): + + @staticmethod + def create_store(): + pytest.importorskip("lmdb") + path = tempfile.mktemp(suffix='.lmdb') + atexit.register(atexit_rmtree, path) + store = LMDBStoreV3(path) + return store, None + + class TestGroupWithSQLiteStore(TestGroup): def create_store(self): @@ -1078,6 +1397,16 @@ def create_store(self): return store, None +class TestGroupV3WithSQLiteStore(TestGroupWithSQLiteStore, TestGroupV3): + + def create_store(self): + pytest.importorskip("sqlite3") + path = tempfile.mktemp(suffix='.db') + atexit.register(atexit_rmtree, path) + store = SQLiteStoreV3(path) + return store, None + + class TestGroupWithChunkStore(TestGroup): @staticmethod @@ -1109,6 +1438,41 @@ def test_chunk_store(self): assert expect == actual +class TestGroupV3WithChunkStore(TestGroupWithChunkStore, TestGroupV3): + + @staticmethod + def create_store(): + return KVStoreV3(dict()), KVStoreV3(dict()) + + def test_chunk_store(self): + # setup + store, chunk_store = self.create_store() + path = 'group1' + g = self.create_group(store, path=path, chunk_store=chunk_store) + + # check attributes + assert store is g.store + assert chunk_store is g.chunk_store + + # create array + a = g.zeros('foo', shape=100, chunks=10) + assert store is a.store + assert chunk_store is a.chunk_store + a[:] = np.arange(100) + assert_array_equal(np.arange(100), a[:]) + + # check store keys + group_key = meta_root + path + '.group.json' + array_key = meta_root + path + '/foo' + '.array.json' + expect = sorted([group_key, array_key, 'zarr.json']) + actual = sorted(store.keys()) + assert expect == actual + expect = [data_root + path + '/foo/c' + str(i) for i in range(10)] + expect += ['zarr.json'] + actual = sorted(chunk_store.keys()) + assert expect == actual + + class TestGroupWithStoreCache(TestGroup): @staticmethod @@ -1117,44 +1481,78 @@ def create_store(): return store, None -def test_group(): +class TestGroupV3WithStoreCache(TestGroupWithStoreCache, TestGroupV3): + + @staticmethod + def create_store(): + store = LRUStoreCacheV3(dict(), max_size=None) + return store, None + + +@pytest.mark.parametrize('zarr_version', [2, 3]) +def test_group(zarr_version): # test the group() convenience function # basic usage - g = group() + if zarr_version == 2: + g = group() + assert '' == g.path + assert '/' == g.name + else: + g = group(path='group1', zarr_version=zarr_version) + with pytest.raises(ValueError): + # must supply path for v3 groups + group(zarr_version=3) + assert 'group1' == g.path + assert '/group1' == g.name assert isinstance(g, Group) - assert '' == g.path - assert '/' == g.name # usage with custom store - store = KVStore(dict()) - g = group(store=store) + if zarr_version == 2: + store = KVStore(dict()) + path = None + else: + store = KVStoreV3(dict()) + path = 'foo' + g = group(store=store, path=path) assert isinstance(g, Group) assert store is g.store # overwrite behaviour - store = KVStore(dict()) - init_array(store, shape=100, chunks=10) + if zarr_version == 2: + store = KVStore(dict()) + path = None + else: + store = KVStoreV3(dict()) + path = 'foo' + init_array(store, path=path, shape=100, chunks=10) with pytest.raises(ValueError): - group(store) - g = group(store, overwrite=True) + group(store, path=path) + g = group(store, path=path, overwrite=True) assert isinstance(g, Group) assert store is g.store -def test_open_group(): +@pytest.mark.parametrize('zarr_version', [2, 3]) +def test_open_group(zarr_version): # test the open_group() convenience function store = 'data/group.zarr' + expected_store_type = DirectoryStore if zarr_version == 2 else DirectoryStoreV3 + # mode == 'w' - g = open_group(store, mode='w') + path = None if zarr_version == 2 else 'group1' + g = open_group(store, path=path, mode='w', zarr_version=zarr_version) assert isinstance(g, Group) - assert isinstance(g.store, DirectoryStore) + assert isinstance(g.store, expected_store_type) assert 0 == len(g) g.create_groups('foo', 'bar') assert 2 == len(g) + # TODO: update the r, r+ test case here for zarr_version == 3 after + # open_array has StoreV3 support + # mode in 'r', 'r+' open_array('data/array.zarr', shape=100, chunks=10, mode='w') for mode in 'r', 'r+': @@ -1175,37 +1573,40 @@ def test_open_group(): # mode == 'a' shutil.rmtree(store) - g = open_group(store, mode='a') + g = open_group(store, path=path, mode='a', zarr_version=zarr_version) assert isinstance(g, Group) - assert isinstance(g.store, DirectoryStore) + assert isinstance(g.store, expected_store_type) assert 0 == len(g) g.create_groups('foo', 'bar') assert 2 == len(g) with pytest.raises(ValueError): - open_group('data/array.zarr', mode='a') + open_group('data/array.zarr', mode='a', zarr_version=zarr_version) # mode in 'w-', 'x' for mode in 'w-', 'x': shutil.rmtree(store) - g = open_group(store, mode=mode) + g = open_group(store, path=path, mode=mode, zarr_version=zarr_version) assert isinstance(g, Group) - assert isinstance(g.store, DirectoryStore) + assert isinstance(g.store, expected_store_type) assert 0 == len(g) g.create_groups('foo', 'bar') assert 2 == len(g) with pytest.raises(ValueError): - open_group(store, mode=mode) - with pytest.raises(ValueError): - open_group('data/array.zarr', mode=mode) + open_group(store, path=path, mode=mode, zarr_version=zarr_version) + if zarr_version == 2: + with pytest.raises(ValueError): + open_group('data/array.zarr', mode=mode) # open with path - g = open_group(store, path='foo/bar') + g = open_group(store, path='foo/bar', zarr_version=zarr_version) assert isinstance(g, Group) assert 'foo/bar' == g.path -def test_group_completions(): - g = group() +@pytest.mark.parametrize('zarr_version', [2, 3]) +def test_group_completions(zarr_version): + path = None if zarr_version == 2 else 'group1' + g = group(path=path, zarr_version=zarr_version) d = dir(g) assert 'foo' not in d assert 'bar' not in d @@ -1233,8 +1634,10 @@ def test_group_completions(): assert '456' not in d # not valid identifier -def test_group_key_completions(): - g = group() +@pytest.mark.parametrize('zarr_version', [2, 3]) +def test_group_key_completions(zarr_version): + path = None if zarr_version == 2 else 'group1' + g = group(path=path, zarr_version=zarr_version) d = dir(g) # noinspection PyProtectedMember k = g._ipython_key_completions_() @@ -1268,7 +1671,12 @@ def test_group_key_completions(): g.zeros('yyy', shape=100) g.zeros('zzz', shape=100) g.zeros('456', shape=100) - g.zeros('asdf;', shape=100) + if zarr_version == 2: + g.zeros('asdf;', shape=100) + else: + # cannot have ; in key name for v3 + with pytest.raises(ValueError): + g.zeros('asdf;', shape=100) d = dir(g) # noinspection PyProtectedMember @@ -1283,7 +1691,8 @@ def test_group_key_completions(): assert 'zzz' in d assert '123' not in d # not valid identifier assert '456' not in d # not valid identifier - assert 'asdf;' not in d # not valid identifier + if zarr_version == 2: + assert 'asdf;' not in d # not valid identifier assert 'foo' in k assert 'bar' in k @@ -1294,7 +1703,8 @@ def test_group_key_completions(): assert 'zzz' in k assert '123' in k assert '456' in k - assert 'asdf;' in k + if zarr_version == 2: + assert 'asdf;' in k def _check_tree(g, expect_bytes, expect_text): @@ -1308,9 +1718,11 @@ def _check_tree(g, expect_bytes, expect_text): isinstance(widget, ipytree.Tree) -def test_tree(): +@pytest.mark.parametrize('zarr_version', [2, 3]) +def test_tree(zarr_version): # setup - g1 = group() + path = None if zarr_version == 2 else 'group1' + g1 = group(path=path, zarr_version=zarr_version) g2 = g1.create_group('foo') g3 = g1.create_group('bar') g3.create_group('baz') @@ -1318,20 +1730,38 @@ def test_tree(): g5.create_dataset('baz', shape=100, chunks=10) # test root group - expect_bytes = textwrap.dedent("""\ - / - +-- bar - | +-- baz - | +-- quux - | +-- baz (100,) float64 - +-- foo""").encode() - expect_text = textwrap.dedent("""\ - / - ├── bar - │ ├── baz - │ └── quux - │ └── baz (100,) float64 - └── foo""") + if zarr_version == 2: + expect_bytes = textwrap.dedent("""\ + / + +-- bar + | +-- baz + | +-- quux + | +-- baz (100,) float64 + +-- foo""").encode() + expect_text = textwrap.dedent("""\ + / + ├── bar + │ ├── baz + │ └── quux + │ └── baz (100,) float64 + └── foo""") + else: + # Almost the same as for v2, but has a path name and the + # subgroups are not necessarily sorted alphabetically. + expect_bytes = textwrap.dedent("""\ + group1 + +-- foo + +-- bar + +-- baz + +-- quux + +-- baz (100,) float64""").encode() + expect_text = textwrap.dedent("""\ + group1 + ├── foo + └── bar + ├── baz + └── quux + └── baz (100,) float64""") _check_tree(g1, expect_bytes, expect_text) # test different group @@ -1353,3 +1783,36 @@ def test_tree(): └── quux └── baz (100,) float64""") _check_tree(g3, expect_bytes, expect_text) + + +def test_group_mismatched_store_versions(): + store_v3 = KVStoreV3(dict()) + store_v2 = KVStore(dict()) + + # separate chunk store + chunk_store_v2 = KVStore(dict()) + chunk_store_v3 = KVStoreV3(dict()) + + init_group(store_v2, path='group1', chunk_store=chunk_store_v2) + init_group(store_v3, path='group1', chunk_store=chunk_store_v3) + + g1_v3 = Group(store_v3, path='group1', read_only=True, chunk_store=chunk_store_v3) + assert isinstance(g1_v3._store, KVStoreV3) + g1_v2 = Group(store_v2, path='group1', read_only=True, chunk_store=chunk_store_v2) + assert isinstance(g1_v2._store, KVStore) + + # store and chunk_store must have the same zarr protocol version + with pytest.raises(ValueError): + Group(store_v3, path='group1', read_only=False, chunk_store=chunk_store_v2) + with pytest.raises(ValueError): + Group(store_v2, path='group1', read_only=False, chunk_store=chunk_store_v3) + with pytest.raises(ValueError): + open_group(store_v2, path='group1', chunk_store=chunk_store_v3) + with pytest.raises(ValueError): + open_group(store_v3, path='group1', chunk_store=chunk_store_v2) + + # raises Value if read_only and path is not a pre-existing group + with pytest.raises(ValueError): + Group(store_v3, path='group2', read_only=True, chunk_store=chunk_store_v3) + with pytest.raises(ValueError): + Group(store_v3, path='group2', read_only=True, chunk_store=chunk_store_v3) diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index 74f0c9f7de..524d335c9f 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -1442,7 +1442,7 @@ def test_slice_selection_uints(): arr = np.arange(24).reshape((4, 6)) idx = np.uint64(3) slice_sel = make_slice_selection((idx,)) - assert arr[slice_sel].shape == (1, 6) + assert arr[tuple(slice_sel)].shape == (1, 6) def test_numpy_int_indexing(): diff --git a/zarr/tests/test_info.py b/zarr/tests/test_info.py index 361490c0a8..434d19d1f7 100644 --- a/zarr/tests/test_info.py +++ b/zarr/tests/test_info.py @@ -1,15 +1,18 @@ import numcodecs +import pytest import zarr +from zarr.util import InfoReporter -def test_info(): +@pytest.mark.parametrize('array_size', [10, 15000]) +def test_info(array_size): # setup g = zarr.group(store=dict(), chunk_store=dict(), synchronizer=zarr.ThreadSynchronizer()) g.create_group('foo') - z = g.zeros('bar', shape=10, filters=[numcodecs.Adler32()]) + z = g.zeros('bar', shape=array_size, filters=[numcodecs.Adler32()]) # test group info items = g.info_items() @@ -20,6 +23,10 @@ def test_info(): ]) assert expected_keys == keys + # can also get a string representation of info via the info attribute + assert isinstance(g.info, InfoReporter) + assert "Type" in repr(g.info) + # test array info items = z.info_items() keys = sorted([k for k, _ in items]) @@ -29,3 +36,7 @@ def test_info(): 'No. bytes stored', 'Storage ratio', 'Chunks initialized', 'Name' ]) assert expected_keys == keys + + # can also get a string representation of info via the info attribute + assert isinstance(z.info, InfoReporter) + assert "Type" in repr(z.info) diff --git a/zarr/tests/test_meta.py b/zarr/tests/test_meta.py index 5469921110..8acd634a13 100644 --- a/zarr/tests/test_meta.py +++ b/zarr/tests/test_meta.py @@ -1,4 +1,5 @@ import base64 +import copy import json import numpy as np @@ -8,7 +9,10 @@ from zarr.errors import MetadataError from zarr.meta import (ZARR_FORMAT, decode_array_metadata, decode_dtype, decode_group_metadata, encode_array_metadata, - encode_dtype, encode_fill_value, decode_fill_value) + encode_dtype, encode_fill_value, decode_fill_value, + get_extended_dtype_info, _v3_complex_types, + _v3_datetime_types, _default_entry_point_metadata_v3, + Metadata3) from zarr.util import normalize_dtype, normalize_fill_value @@ -260,6 +264,56 @@ def test_encode_decode_array_dtype_shape(): assert meta_dec['filters'] is None +def test_encode_decode_array_dtype_shape_v3(): + + meta = dict( + shape=(100,), + chunk_grid=dict(type='regular', + chunk_shape=(10,), + separator=('/')), + data_type=np.dtype('(10, 10)U4', '> 16) assert perm == '0o644' - info = z.getinfo('baz/') + info = z.getinfo(baz_key) perm = oct(info.external_attr >> 16) # only for posix platforms if os.name == 'posix': - assert perm == '0o40775' + if self.version == 2: + assert perm == '0o40775' + else: + # baz/ on v2, but baz on v3, so not a directory + assert perm == '0o644' z.close() def test_store_and_retrieve_ndarray(self): @@ -1609,8 +1783,8 @@ def create_store(self, dimension_separator=None): def test_context_manager(self): with self.create_store() as store: - store['foo'] = b'bar' - store['baz'] = b'qux' + store[self.root + 'foo'] = b'bar' + store[self.root + 'baz'] = b'qux' assert 2 == len(store) @@ -1669,8 +1843,8 @@ def create_store(self, **kwargs): def test_context_manager(self): with self.create_store() as store: - store['foo'] = b'bar' - store['baz'] = b'qux' + store[self.root + 'foo'] = b'bar' + store[self.root + 'baz'] = b'qux' assert 2 == len(store) @@ -1704,8 +1878,8 @@ def test_pickle(self): # setup store store = self.create_store() - store['foo'] = b'bar' - store['baz'] = b'quux' + store[self.root + 'foo'] = b'bar' + store[self.root + 'baz'] = b'quux' # round-trip through pickle with pytest.raises(PicklingError): @@ -1739,199 +1913,209 @@ def create_store(self, **kwargs): class TestLRUStoreCache(StoreTests): + CountingClass = CountingDict + LRUStoreClass = LRUStoreCache + def create_store(self, **kwargs): # wrapper therefore no dimension_separator argument skip_if_nested_chunks(**kwargs) - return LRUStoreCache(dict(), max_size=2**27) + return self.LRUStoreClass(dict(), max_size=2**27) def test_cache_values_no_max_size(self): # setup store - store = CountingDict() - store['foo'] = b'xxx' - store['bar'] = b'yyy' - assert 0 == store.counter['__getitem__', 'foo'] - assert 1 == store.counter['__setitem__', 'foo'] - assert 0 == store.counter['__getitem__', 'bar'] - assert 1 == store.counter['__setitem__', 'bar'] + store = self.CountingClass() + foo_key = self.root + 'foo' + bar_key = self.root + 'bar' + store[foo_key] = b'xxx' + store[bar_key] = b'yyy' + assert 0 == store.counter['__getitem__', foo_key] + assert 1 == store.counter['__setitem__', foo_key] + assert 0 == store.counter['__getitem__', bar_key] + assert 1 == store.counter['__setitem__', bar_key] # setup cache - cache = LRUStoreCache(store, max_size=None) + cache = self.LRUStoreClass(store, max_size=None) assert 0 == cache.hits assert 0 == cache.misses # test first __getitem__, cache miss - assert b'xxx' == cache['foo'] - assert 1 == store.counter['__getitem__', 'foo'] - assert 1 == store.counter['__setitem__', 'foo'] + assert b'xxx' == cache[foo_key] + assert 1 == store.counter['__getitem__', foo_key] + assert 1 == store.counter['__setitem__', foo_key] assert 0 == cache.hits assert 1 == cache.misses # test second __getitem__, cache hit - assert b'xxx' == cache['foo'] - assert 1 == store.counter['__getitem__', 'foo'] - assert 1 == store.counter['__setitem__', 'foo'] + assert b'xxx' == cache[foo_key] + assert 1 == store.counter['__getitem__', foo_key] + assert 1 == store.counter['__setitem__', foo_key] assert 1 == cache.hits assert 1 == cache.misses # test __setitem__, __getitem__ - cache['foo'] = b'zzz' - assert 1 == store.counter['__getitem__', 'foo'] - assert 2 == store.counter['__setitem__', 'foo'] + cache[foo_key] = b'zzz' + assert 1 == store.counter['__getitem__', foo_key] + assert 2 == store.counter['__setitem__', foo_key] # should be a cache hit - assert b'zzz' == cache['foo'] - assert 1 == store.counter['__getitem__', 'foo'] - assert 2 == store.counter['__setitem__', 'foo'] + assert b'zzz' == cache[foo_key] + assert 1 == store.counter['__getitem__', foo_key] + assert 2 == store.counter['__setitem__', foo_key] assert 2 == cache.hits assert 1 == cache.misses # manually invalidate all cached values cache.invalidate_values() - assert b'zzz' == cache['foo'] - assert 2 == store.counter['__getitem__', 'foo'] - assert 2 == store.counter['__setitem__', 'foo'] + assert b'zzz' == cache[foo_key] + assert 2 == store.counter['__getitem__', foo_key] + assert 2 == store.counter['__setitem__', foo_key] cache.invalidate() - assert b'zzz' == cache['foo'] - assert 3 == store.counter['__getitem__', 'foo'] - assert 2 == store.counter['__setitem__', 'foo'] + assert b'zzz' == cache[foo_key] + assert 3 == store.counter['__getitem__', foo_key] + assert 2 == store.counter['__setitem__', foo_key] # test __delitem__ - del cache['foo'] + del cache[foo_key] with pytest.raises(KeyError): # noinspection PyStatementEffect - cache['foo'] + cache[foo_key] with pytest.raises(KeyError): # noinspection PyStatementEffect - store['foo'] + store[foo_key] # verify other keys untouched - assert 0 == store.counter['__getitem__', 'bar'] - assert 1 == store.counter['__setitem__', 'bar'] + assert 0 == store.counter['__getitem__', bar_key] + assert 1 == store.counter['__setitem__', bar_key] def test_cache_values_with_max_size(self): # setup store - store = CountingDict() - store['foo'] = b'xxx' - store['bar'] = b'yyy' - assert 0 == store.counter['__getitem__', 'foo'] - assert 0 == store.counter['__getitem__', 'bar'] + store = self.CountingClass() + foo_key = self.root + 'foo' + bar_key = self.root + 'bar' + store[foo_key] = b'xxx' + store[bar_key] = b'yyy' + assert 0 == store.counter['__getitem__', foo_key] + assert 0 == store.counter['__getitem__', bar_key] # setup cache - can only hold one item - cache = LRUStoreCache(store, max_size=5) + cache = self.LRUStoreClass(store, max_size=5) assert 0 == cache.hits assert 0 == cache.misses # test first 'foo' __getitem__, cache miss - assert b'xxx' == cache['foo'] - assert 1 == store.counter['__getitem__', 'foo'] + assert b'xxx' == cache[foo_key] + assert 1 == store.counter['__getitem__', foo_key] assert 0 == cache.hits assert 1 == cache.misses # test second 'foo' __getitem__, cache hit - assert b'xxx' == cache['foo'] - assert 1 == store.counter['__getitem__', 'foo'] + assert b'xxx' == cache[foo_key] + assert 1 == store.counter['__getitem__', foo_key] assert 1 == cache.hits assert 1 == cache.misses # test first 'bar' __getitem__, cache miss - assert b'yyy' == cache['bar'] - assert 1 == store.counter['__getitem__', 'bar'] + assert b'yyy' == cache[bar_key] + assert 1 == store.counter['__getitem__', bar_key] assert 1 == cache.hits assert 2 == cache.misses # test second 'bar' __getitem__, cache hit - assert b'yyy' == cache['bar'] - assert 1 == store.counter['__getitem__', 'bar'] + assert b'yyy' == cache[bar_key] + assert 1 == store.counter['__getitem__', bar_key] assert 2 == cache.hits assert 2 == cache.misses # test 'foo' __getitem__, should have been evicted, cache miss - assert b'xxx' == cache['foo'] - assert 2 == store.counter['__getitem__', 'foo'] + assert b'xxx' == cache[foo_key] + assert 2 == store.counter['__getitem__', foo_key] assert 2 == cache.hits assert 3 == cache.misses # test 'bar' __getitem__, should have been evicted, cache miss - assert b'yyy' == cache['bar'] - assert 2 == store.counter['__getitem__', 'bar'] + assert b'yyy' == cache[bar_key] + assert 2 == store.counter['__getitem__', bar_key] assert 2 == cache.hits assert 4 == cache.misses # setup store - store = CountingDict() - store['foo'] = b'xxx' - store['bar'] = b'yyy' - assert 0 == store.counter['__getitem__', 'foo'] - assert 0 == store.counter['__getitem__', 'bar'] + store = self.CountingClass() + store[foo_key] = b'xxx' + store[bar_key] = b'yyy' + assert 0 == store.counter['__getitem__', foo_key] + assert 0 == store.counter['__getitem__', bar_key] # setup cache - can hold two items - cache = LRUStoreCache(store, max_size=6) + cache = self.LRUStoreClass(store, max_size=6) assert 0 == cache.hits assert 0 == cache.misses # test first 'foo' __getitem__, cache miss - assert b'xxx' == cache['foo'] - assert 1 == store.counter['__getitem__', 'foo'] + assert b'xxx' == cache[foo_key] + assert 1 == store.counter['__getitem__', foo_key] assert 0 == cache.hits assert 1 == cache.misses # test second 'foo' __getitem__, cache hit - assert b'xxx' == cache['foo'] - assert 1 == store.counter['__getitem__', 'foo'] + assert b'xxx' == cache[foo_key] + assert 1 == store.counter['__getitem__', foo_key] assert 1 == cache.hits assert 1 == cache.misses # test first 'bar' __getitem__, cache miss - assert b'yyy' == cache['bar'] - assert 1 == store.counter['__getitem__', 'bar'] + assert b'yyy' == cache[bar_key] + assert 1 == store.counter['__getitem__', bar_key] assert 1 == cache.hits assert 2 == cache.misses # test second 'bar' __getitem__, cache hit - assert b'yyy' == cache['bar'] - assert 1 == store.counter['__getitem__', 'bar'] + assert b'yyy' == cache[bar_key] + assert 1 == store.counter['__getitem__', bar_key] assert 2 == cache.hits assert 2 == cache.misses # test 'foo' __getitem__, should still be cached - assert b'xxx' == cache['foo'] - assert 1 == store.counter['__getitem__', 'foo'] + assert b'xxx' == cache[foo_key] + assert 1 == store.counter['__getitem__', foo_key] assert 3 == cache.hits assert 2 == cache.misses # test 'bar' __getitem__, should still be cached - assert b'yyy' == cache['bar'] - assert 1 == store.counter['__getitem__', 'bar'] + assert b'yyy' == cache[bar_key] + assert 1 == store.counter['__getitem__', bar_key] assert 4 == cache.hits assert 2 == cache.misses def test_cache_keys(self): # setup - store = CountingDict() - store['foo'] = b'xxx' - store['bar'] = b'yyy' - assert 0 == store.counter['__contains__', 'foo'] + store = self.CountingClass() + foo_key = self.root + 'foo' + bar_key = self.root + 'bar' + baz_key = self.root + 'baz' + store[foo_key] = b'xxx' + store[bar_key] = b'yyy' + assert 0 == store.counter['__contains__', foo_key] assert 0 == store.counter['__iter__'] assert 0 == store.counter['keys'] - cache = LRUStoreCache(store, max_size=None) + cache = self.LRUStoreClass(store, max_size=None) # keys should be cached on first call keys = sorted(cache.keys()) - assert keys == ['bar', 'foo'] + assert keys == [bar_key, foo_key] assert 1 == store.counter['keys'] # keys should now be cached assert keys == sorted(cache.keys()) assert 1 == store.counter['keys'] - assert 'foo' in cache - assert 0 == store.counter['__contains__', 'foo'] + assert foo_key in cache + assert 0 == store.counter['__contains__', foo_key] assert keys == sorted(cache) assert 0 == store.counter['__iter__'] assert 1 == store.counter['keys'] # cache should be cleared if store is modified - crude but simple for now - cache['baz'] = b'zzz' + cache[baz_key] = b'zzz' keys = sorted(cache.keys()) - assert keys == ['bar', 'baz', 'foo'] + assert keys == [bar_key, baz_key, foo_key] assert 2 == store.counter['keys'] # keys should now be cached assert keys == sorted(cache.keys()) @@ -1940,25 +2124,25 @@ def test_cache_keys(self): # manually invalidate keys cache.invalidate_keys() keys = sorted(cache.keys()) - assert keys == ['bar', 'baz', 'foo'] + assert keys == [bar_key, baz_key, foo_key] assert 3 == store.counter['keys'] - assert 0 == store.counter['__contains__', 'foo'] + assert 0 == store.counter['__contains__', foo_key] assert 0 == store.counter['__iter__'] cache.invalidate_keys() keys = sorted(cache) - assert keys == ['bar', 'baz', 'foo'] + assert keys == [bar_key, baz_key, foo_key] assert 4 == store.counter['keys'] - assert 0 == store.counter['__contains__', 'foo'] + assert 0 == store.counter['__contains__', foo_key] assert 0 == store.counter['__iter__'] cache.invalidate_keys() - assert 'foo' in cache + assert foo_key in cache assert 5 == store.counter['keys'] - assert 0 == store.counter['__contains__', 'foo'] + assert 0 == store.counter['__contains__', foo_key] assert 0 == store.counter['__iter__'] # check these would get counted if called directly - assert 'foo' in store - assert 1 == store.counter['__contains__', 'foo'] + assert foo_key in store + assert 1 == store.counter['__contains__', foo_key] assert keys == sorted(store) assert 1 == store.counter['__iter__'] @@ -2137,9 +2321,11 @@ def test_format_compatibility(): @skip_test_env_var("ZARR_TEST_ABS") class TestABSStore(StoreTests): + ABSStoreClass = ABSStore + def create_store(self, prefix=None, **kwargs): container_client = abs_container() - store = ABSStore( + store = self.ABSStoreClass( prefix=prefix, client=container_client, **kwargs, @@ -2149,7 +2335,9 @@ def create_store(self, prefix=None, **kwargs): def test_non_client_deprecated(self): with pytest.warns(FutureWarning, match='Providing'): - store = ABSStore("container", account_name="account_name", account_key="account_key") + store = self.ABSStoreClass( + "container", account_name="account_name", account_key="account_key" + ) for attr in ["container", "account_name", "account_key"]: with pytest.warns(FutureWarning, match=attr): @@ -2157,7 +2345,13 @@ def test_non_client_deprecated(self): assert result == attr def test_iterators_with_prefix(self): - for prefix in ['test_prefix', '/test_prefix', 'test_prefix/', 'test/prefix', '', None]: + prefixes = ['test_prefix', '/test_prefix', 'test_prefix/', 'test/prefix'] + + if self.version < 3: + # empty prefix not allowed in v3 + prefixes += ['', None] + + for prefix in prefixes: store = self.create_store(prefix=prefix) # test iterator methods on empty store @@ -2167,19 +2361,22 @@ def test_iterators_with_prefix(self): assert set() == set(store.values()) assert set() == set(store.items()) + prefix = meta_root if self.version > 2 else '' # setup some values - store['a'] = b'aaa' - store['b'] = b'bbb' - store['c/d'] = b'ddd' - store['c/e/f'] = b'fff' + store[prefix + 'a'] = b'aaa' + store[prefix + 'b'] = b'bbb' + store[prefix + 'c/d'] = b'ddd' + store[prefix + 'c/e/f'] = b'fff' # test iterators on store with data assert 4 == len(store) - assert {'a', 'b', 'c/d', 'c/e/f'} == set(store) - assert {'a', 'b', 'c/d', 'c/e/f'} == set(store.keys()) - assert {b'aaa', b'bbb', b'ddd', b'fff'} == set(store.values()) - assert ({('a', b'aaa'), ('b', b'bbb'), ('c/d', b'ddd'), ('c/e/f', b'fff')} == - set(store.items())) + keys = [prefix + 'a', prefix + 'b', prefix + 'c/d', prefix + 'c/e/f'] + values = [b'aaa', b'bbb', b'ddd', b'fff'] + items = [(k, v) for k, v in zip(keys, values)] + assert set(keys) == set(store) + assert set(keys) == set(store.keys()) + assert set(values) == set(store.values()) + assert set(items) == set(store.items()) def test_getsize(self): return super().test_getsize() @@ -2195,6 +2392,13 @@ def test_pickle(self): class TestConsolidatedMetadataStore: + version = 2 + ConsolidatedMetadataClass = ConsolidatedMetadataStore + + @property + def metadata_key(self): + return '.zmetadata' + def test_bad_format(self): # setup store with consolidated metadata @@ -2203,11 +2407,15 @@ def test_bad_format(self): # bad format version 'zarr_consolidated_format': 0, } - store['.zmetadata'] = json.dumps(consolidated).encode() + store[self.metadata_key] = json.dumps(consolidated).encode() # check appropriate error is raised with pytest.raises(MetadataError): - ConsolidatedMetadataStore(store) + self.ConsolidatedMetadataClass(store) + + def test_bad_store_version(self): + with pytest.raises(ValueError): + self.ConsolidatedMetadataClass(KVStoreV3(dict())) def test_read_write(self): @@ -2220,10 +2428,10 @@ def test_read_write(self): 'baz': 42, } } - store['.zmetadata'] = json.dumps(consolidated).encode() + store[self.metadata_key] = json.dumps(consolidated).encode() # create consolidated store - cs = ConsolidatedMetadataStore(store) + cs = self.ConsolidatedMetadataClass(store) # test __contains__, __getitem__ for key, value in consolidated['metadata'].items(): @@ -2252,3 +2460,24 @@ def test_fill_value_change(): assert a[0, 0] == 1 assert json.loads(a.store[".zarray"])["fill_value"] == 1 + + +def test_get_hierarchy_metadata_v2(): + # v2 stores do not have hierarchy metadata (i.e. zarr.json) + with pytest.raises(ValueError): + _get_hierarchy_metadata(KVStore(dict)) + + +def test_normalize_store_arg(tmpdir): + with pytest.raises(ValueError): + normalize_store_arg(dict(), zarr_version=4) + + for ext, Class in [('.zip', ZipStore), ('.n5', N5Store)]: + fn = tmpdir.join('store' + ext) + store = normalize_store_arg(str(fn), zarr_version=2, mode='w') + assert isinstance(store, Class) + + if have_fsspec: + path = tempfile.mkdtemp() + store = normalize_store_arg("file://" + path, zarr_version=2, mode='w') + assert isinstance(store, FSStore) diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py new file mode 100644 index 0000000000..73fda1b758 --- /dev/null +++ b/zarr/tests/test_storage_v3.py @@ -0,0 +1,513 @@ +import array +import atexit +import copy +import os +import tempfile + +import numpy as np +import pytest +from zarr._storage.store import _get_hierarchy_metadata +from zarr.meta import _default_entry_point_metadata_v3 +from zarr.storage import (ABSStoreV3, ConsolidatedMetadataStoreV3, DBMStoreV3, + DirectoryStoreV3, FSStoreV3, KVStore, KVStoreV3, + LMDBStoreV3, LRUStoreCacheV3, MemoryStoreV3, + MongoDBStoreV3, RedisStoreV3, SQLiteStoreV3, StoreV3, + ZipStoreV3, atexit_rmglob, atexit_rmtree, data_root, + default_compressor, getsize, init_array, meta_root, + normalize_store_arg) +from zarr.tests.util import CountingDictV3, have_fsspec, skip_test_env_var + +# pytest will fail to run if the following fixtures aren't imported here +from .test_storage import StoreTests as _StoreTests +from .test_storage import TestABSStore as _TestABSStore +from .test_storage import TestConsolidatedMetadataStore as _TestConsolidatedMetadataStore +from .test_storage import TestDBMStore as _TestDBMStore +from .test_storage import TestDBMStoreBerkeleyDB as _TestDBMStoreBerkeleyDB +from .test_storage import TestDBMStoreDumb as _TestDBMStoreDumb +from .test_storage import TestDBMStoreGnu as _TestDBMStoreGnu +from .test_storage import TestDBMStoreNDBM as _TestDBMStoreNDBM +from .test_storage import TestDirectoryStore as _TestDirectoryStore +from .test_storage import TestFSStore as _TestFSStore +from .test_storage import TestLMDBStore as _TestLMDBStore +from .test_storage import TestLRUStoreCache as _TestLRUStoreCache +from .test_storage import TestMemoryStore as _TestMemoryStore +from .test_storage import TestSQLiteStore as _TestSQLiteStore +from .test_storage import TestSQLiteStoreInMemory as _TestSQLiteStoreInMemory +from .test_storage import TestZipStore as _TestZipStore +from .test_storage import (dimension_separator_fixture, s3, # noqa + skip_if_nested_chunks) + + +@pytest.fixture(params=[ + (None, "/"), + (".", "."), + ("/", "/"), +]) +def dimension_separator_fixture_v3(request): + return request.param + + +class DummyStore(): + # contains all methods expected of Mutable Mapping + + def keys(self): + """keys""" + + def values(self): + """values""" + + def get(self, value, default=None): + """get""" + + def __setitem__(self, key, value): + """__setitem__""" + + def __getitem__(self, key): + """__getitem__""" + + def __delitem__(self, key): + """__delitem__""" + + def __contains__(self, key): + """__contains__""" + + +class InvalidDummyStore(): + # does not contain expected methods of a MutableMapping + + def keys(self): + """keys""" + + +def test_ensure_store_v3(): + class InvalidStore: + pass + + with pytest.raises(ValueError): + StoreV3._ensure_store(InvalidStore()) + + # cannot initialize with a store from a different Zarr version + with pytest.raises(ValueError): + StoreV3._ensure_store(KVStore(dict())) + + assert StoreV3._ensure_store(None) is None + + # class with all methods of a MutableMapping will become a KVStoreV3 + assert isinstance(StoreV3._ensure_store(DummyStore), KVStoreV3) + + with pytest.raises(ValueError): + # does not have the methods expected of a MutableMapping + StoreV3._ensure_store(InvalidDummyStore) + + +def test_valid_key(): + store = KVStoreV3(dict) + + # only ascii keys are valid + assert not store._valid_key(5) + assert not store._valid_key(2.8) + + for key in store._valid_key_characters: + assert store._valid_key(key) + + # other characters not in store._valid_key_characters are not allowed + assert not store._valid_key('*') + assert not store._valid_key('~') + assert not store._valid_key('^') + + +def test_validate_key(): + store = KVStoreV3(dict) + + # zarr.json is a valid key + store._validate_key('zarr.json') + # but other keys not starting with meta/ or data/ are not + with pytest.raises(ValueError): + store._validate_key('zar.json') + + # valid ascii keys + for valid in [meta_root + 'arr1.array.json', + data_root + 'arr1.array.json', + meta_root + 'subfolder/item_1-0.group.json']: + store._validate_key(valid) + # but otherwise valid keys cannot end in / + with pytest.raises(ValueError): + assert store._validate_key(valid + '/') + + for invalid in [0, '*', '~', '^', '&']: + with pytest.raises(ValueError): + store._validate_key(invalid) + + +class StoreV3Tests(_StoreTests): + + version = 3 + root = meta_root + + def test_getsize(self): + # TODO: determine proper getsize() behavior for v3 + # Currently returns the combined size of entries under + # meta/root/path and data/root/path. + # Any path not under meta/root/ or data/root/ (including zarr.json) + # returns size 0. + + store = self.create_store() + if isinstance(store, dict) or hasattr(store, 'getsize'): + assert 0 == getsize(store, 'zarr.json') + store[meta_root + 'foo/a'] = b'x' + assert 1 == getsize(store) + assert 1 == getsize(store, 'foo') + store[meta_root + 'foo/b'] = b'x' + assert 2 == getsize(store, 'foo') + assert 1 == getsize(store, 'foo/b') + store[meta_root + 'bar/a'] = b'yy' + assert 2 == getsize(store, 'bar') + store[data_root + 'bar/a'] = b'zzz' + assert 5 == getsize(store, 'bar') + store[data_root + 'baz/a'] = b'zzz' + assert 3 == getsize(store, 'baz') + assert 10 == getsize(store) + store[data_root + 'quux'] = array.array('B', b'zzzz') + assert 14 == getsize(store) + assert 4 == getsize(store, 'quux') + store[data_root + 'spong'] = np.frombuffer(b'zzzzz', dtype='u1') + assert 19 == getsize(store) + assert 5 == getsize(store, 'spong') + store.close() + + def test_init_array(self, dimension_separator_fixture_v3): + + pass_dim_sep, want_dim_sep = dimension_separator_fixture_v3 + + store = self.create_store() + path = 'arr1' + init_array(store, path=path, shape=1000, chunks=100, + dimension_separator=pass_dim_sep) + + # check metadata + mkey = meta_root + path + '.array.json' + assert mkey in store + meta = store._metadata_class.decode_array_metadata(store[mkey]) + assert (1000,) == meta['shape'] + assert (100,) == meta['chunk_grid']['chunk_shape'] + assert np.dtype(None) == meta['data_type'] + assert default_compressor == meta['compressor'] + assert meta['fill_value'] is None + # Missing MUST be assumed to be "/" + assert meta['chunk_grid']['separator'] is want_dim_sep + store.close() + + def test_list_prefix(self): + + store = self.create_store() + path = 'arr1' + init_array(store, path=path, shape=1000, chunks=100) + + expected = [meta_root + 'arr1.array.json', 'zarr.json'] + assert sorted(store.list_prefix('')) == expected + + expected = [meta_root + 'arr1.array.json'] + assert sorted(store.list_prefix(meta_root.rstrip('/'))) == expected + + # cannot start prefix with '/' + with pytest.raises(ValueError): + store.list_prefix(prefix='/' + meta_root.rstrip('/')) + + def test_equal(self): + store = self.create_store() + assert store == store + + def test_rename_nonexisting(self): + store = self.create_store() + if store.is_erasable(): + with pytest.raises(ValueError): + store.rename('a', 'b') + else: + with pytest.raises(NotImplementedError): + store.rename('a', 'b') + + +class TestMappingStoreV3(StoreV3Tests): + + def create_store(self, **kwargs): + return KVStoreV3(dict()) + + def test_set_invalid_content(self): + # Generic mappings support non-buffer types + pass + + +class TestMemoryStoreV3(_TestMemoryStore, StoreV3Tests): + + def create_store(self, **kwargs): + skip_if_nested_chunks(**kwargs) + return MemoryStoreV3(**kwargs) + + +class TestDirectoryStoreV3(_TestDirectoryStore, StoreV3Tests): + + def create_store(self, normalize_keys=False, **kwargs): + # For v3, don't have to skip if nested. + # skip_if_nested_chunks(**kwargs) + + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = DirectoryStoreV3(path, normalize_keys=normalize_keys, **kwargs) + return store + + def test_rename_nonexisting(self): + store = self.create_store() + with pytest.raises(FileNotFoundError): + store.rename(meta_root + 'a', meta_root + 'b') + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestFSStoreV3(_TestFSStore, StoreV3Tests): + + def create_store(self, normalize_keys=False, + dimension_separator=".", + path=None, + **kwargs): + + if path is None: + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + + store = FSStoreV3( + path, + normalize_keys=normalize_keys, + dimension_separator=dimension_separator, + **kwargs) + return store + + def test_init_array(self): + store = self.create_store() + path = 'arr1' + init_array(store, path=path, shape=1000, chunks=100) + + # check metadata + mkey = meta_root + path + '.array.json' + assert mkey in store + meta = store._metadata_class.decode_array_metadata(store[mkey]) + assert (1000,) == meta['shape'] + assert (100,) == meta['chunk_grid']['chunk_shape'] + assert np.dtype(None) == meta['data_type'] + assert meta['chunk_grid']['separator'] == "/" + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestFSStoreV3WithKeySeparator(StoreV3Tests): + + def create_store(self, normalize_keys=False, key_separator=".", **kwargs): + + # Since the user is passing key_separator, that will take priority. + skip_if_nested_chunks(**kwargs) + + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + return FSStoreV3( + path, + normalize_keys=normalize_keys, + key_separator=key_separator) + + +# TODO: enable once N5StoreV3 has been implemented +# @pytest.mark.skipif(True, reason="N5StoreV3 not yet fully implemented") +# class TestN5StoreV3(_TestN5Store, TestDirectoryStoreV3, StoreV3Tests): + + +class TestZipStoreV3(_TestZipStore, StoreV3Tests): + + ZipStoreClass = ZipStoreV3 + + def create_store(self, **kwargs): + path = tempfile.mktemp(suffix='.zip') + atexit.register(os.remove, path) + store = ZipStoreV3(path, mode='w', **kwargs) + return store + + +class TestDBMStoreV3(_TestDBMStore, StoreV3Tests): + + def create_store(self, dimension_separator=None): + path = tempfile.mktemp(suffix='.anydbm') + atexit.register(atexit_rmglob, path + '*') + # create store using default dbm implementation + store = DBMStoreV3(path, flag='n', dimension_separator=dimension_separator) + return store + + +class TestDBMStoreV3Dumb(_TestDBMStoreDumb, StoreV3Tests): + + def create_store(self, **kwargs): + path = tempfile.mktemp(suffix='.dumbdbm') + atexit.register(atexit_rmglob, path + '*') + + import dbm.dumb as dumbdbm + store = DBMStoreV3(path, flag='n', open=dumbdbm.open, **kwargs) + return store + + +class TestDBMStoreV3Gnu(_TestDBMStoreGnu, StoreV3Tests): + + def create_store(self, **kwargs): + gdbm = pytest.importorskip("dbm.gnu") + path = tempfile.mktemp(suffix=".gdbm") # pragma: no cover + atexit.register(os.remove, path) # pragma: no cover + store = DBMStoreV3( + path, flag="n", open=gdbm.open, write_lock=False, **kwargs + ) # pragma: no cover + return store # pragma: no cover + + +class TestDBMStoreV3NDBM(_TestDBMStoreNDBM, StoreV3Tests): + + def create_store(self, **kwargs): + ndbm = pytest.importorskip("dbm.ndbm") + path = tempfile.mktemp(suffix=".ndbm") # pragma: no cover + atexit.register(atexit_rmglob, path + "*") # pragma: no cover + store = DBMStoreV3(path, flag="n", open=ndbm.open, **kwargs) # pragma: no cover + return store # pragma: no cover + + +class TestDBMStoreV3BerkeleyDB(_TestDBMStoreBerkeleyDB, StoreV3Tests): + + def create_store(self, **kwargs): + bsddb3 = pytest.importorskip("bsddb3") + path = tempfile.mktemp(suffix='.dbm') + atexit.register(os.remove, path) + store = DBMStoreV3(path, flag='n', open=bsddb3.btopen, write_lock=False, **kwargs) + return store + + +class TestLMDBStoreV3(_TestLMDBStore, StoreV3Tests): + + def create_store(self, **kwargs): + pytest.importorskip("lmdb") + path = tempfile.mktemp(suffix='.lmdb') + atexit.register(atexit_rmtree, path) + buffers = True + store = LMDBStoreV3(path, buffers=buffers, **kwargs) + return store + + +class TestSQLiteStoreV3(_TestSQLiteStore, StoreV3Tests): + + def create_store(self, **kwargs): + pytest.importorskip("sqlite3") + path = tempfile.mktemp(suffix='.db') + atexit.register(atexit_rmtree, path) + store = SQLiteStoreV3(path, **kwargs) + return store + + +class TestSQLiteStoreV3InMemory(_TestSQLiteStoreInMemory, StoreV3Tests): + + def create_store(self, **kwargs): + pytest.importorskip("sqlite3") + store = SQLiteStoreV3(':memory:', **kwargs) + return store + + +@skip_test_env_var("ZARR_TEST_MONGO") +class TestMongoDBStoreV3(StoreV3Tests): + + def create_store(self, **kwargs): + pytest.importorskip("pymongo") + store = MongoDBStoreV3(host='127.0.0.1', database='zarr_tests', + collection='zarr_tests', **kwargs) + # start with an empty store + store.clear() + return store + + +@skip_test_env_var("ZARR_TEST_REDIS") +class TestRedisStoreV3(StoreV3Tests): + + def create_store(self, **kwargs): + # TODO: this is the default host for Redis on Travis, + # we probably want to generalize this though + pytest.importorskip("redis") + store = RedisStoreV3(host='localhost', port=6379, **kwargs) + # start with an empty store + store.clear() + return store + + +class TestLRUStoreCacheV3(_TestLRUStoreCache, StoreV3Tests): + + CountingClass = CountingDictV3 + LRUStoreClass = LRUStoreCacheV3 + + +@skip_test_env_var("ZARR_TEST_ABS") +class TestABSStoreV3(_TestABSStore, StoreV3Tests): + + ABSStoreClass = ABSStoreV3 + + +def test_normalize_store_arg_v3(tmpdir): + + fn = tmpdir.join('store.zip') + store = normalize_store_arg(str(fn), zarr_version=3, mode='w') + assert isinstance(store, ZipStoreV3) + assert 'zarr.json' in store + + # can't pass storage_options to non-fsspec store + with pytest.raises(ValueError): + normalize_store_arg(str(fn), zarr_version=3, mode='w', storage_options={"some": "kwargs"}) + + if have_fsspec: + path = tempfile.mkdtemp() + store = normalize_store_arg("file://" + path, zarr_version=3, mode='w') + assert isinstance(store, FSStoreV3) + assert 'zarr.json' in store + + fn = tmpdir.join('store.n5') + with pytest.raises(NotImplementedError): + normalize_store_arg(str(fn), zarr_version=3, mode='w') + + # error on zarr_version=3 with a v2 store + with pytest.raises(ValueError): + normalize_store_arg(KVStore(dict()), zarr_version=3, mode='w') + + # error on zarr_version=2 with a v3 store + with pytest.raises(ValueError): + normalize_store_arg(KVStoreV3(dict()), zarr_version=2, mode='w') + + +class TestConsolidatedMetadataStoreV3(_TestConsolidatedMetadataStore): + + version = 3 + ConsolidatedMetadataClass = ConsolidatedMetadataStoreV3 + + @property + def metadata_key(self): + return meta_root + 'consolidated/.zmetadata' + + def test_bad_store_version(self): + with pytest.raises(ValueError): + self.ConsolidatedMetadataClass(KVStore(dict())) + + +def test_get_hierarchy_metadata(): + store = KVStoreV3({}) + + # error raised if 'jarr.json' is not in the store + with pytest.raises(ValueError): + _get_hierarchy_metadata(store) + + store['zarr.json'] = _default_entry_point_metadata_v3 + assert _get_hierarchy_metadata(store) == _default_entry_point_metadata_v3 + + # ValueError if only a subset of keys are present + store['zarr.json'] = {'zarr_format': 'https://purl.org/zarr/spec/protocol/core/3.0'} + with pytest.raises(ValueError): + _get_hierarchy_metadata(store) + + # ValueError if any unexpected keys are present + extra_metadata = copy.copy(_default_entry_point_metadata_v3) + extra_metadata['extra_key'] = 'value' + store['zarr.json'] = extra_metadata + with pytest.raises(ValueError): + _get_hierarchy_metadata(store) diff --git a/zarr/tests/test_sync.py b/zarr/tests/test_sync.py index 69fc0d7708..b2bd9e35bb 100644 --- a/zarr/tests/test_sync.py +++ b/zarr/tests/test_sync.py @@ -13,17 +13,18 @@ from zarr.core import Array from zarr.hierarchy import Group from zarr.storage import (DirectoryStore, KVStore, atexit_rmtree, init_array, - init_group) + init_group, meta_root) from zarr.sync import ProcessSynchronizer, ThreadSynchronizer -from zarr.tests.test_attrs import TestAttributes +# zarr_version fixture must be imported although not used directly here +from zarr.tests.test_attrs import TestAttributes, zarr_version # noqa from zarr.tests.test_core import TestArray from zarr.tests.test_hierarchy import TestGroup class TestAttributesWithThreadSynchronizer(TestAttributes): - def init_attributes(self, store, read_only=False, cache=True): - key = 'attrs' + def init_attributes(self, store, read_only=False, cache=True, zarr_version=zarr_version): + key = '.zattrs' if zarr_version == 2 else meta_root + 'attrs' synchronizer = ThreadSynchronizer() return Attributes(store, synchronizer=synchronizer, key=key, read_only=read_only, cache=cache) @@ -31,8 +32,8 @@ def init_attributes(self, store, read_only=False, cache=True): class TestAttributesProcessSynchronizer(TestAttributes): - def init_attributes(self, store, read_only=False, cache=True): - key = 'attrs' + def init_attributes(self, store, read_only=False, cache=True, zarr_version=zarr_version): + key = '.zattrs' if zarr_version == 2 else meta_root + 'attrs' sync_path = mkdtemp() atexit.register(shutil.rmtree, sync_path) synchronizer = ProcessSynchronizer(sync_path) diff --git a/zarr/tests/util.py b/zarr/tests/util.py index e0f11d72ad..bb4df90d1b 100644 --- a/zarr/tests/util.py +++ b/zarr/tests/util.py @@ -1,7 +1,7 @@ import collections import os -from zarr.storage import Store +from zarr.storage import Store, StoreV3 import pytest @@ -41,6 +41,10 @@ def __delitem__(self, key): del self.wrapped[key] +class CountingDictV3(CountingDict, StoreV3): + pass + + def skip_test_env_var(name): """ Checks for environment variables indicating whether tests requiring services should be run """ From 8d2b65173bcfcb72ec48630c612fed63fc9fca3b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 25 Mar 2022 08:58:03 -0500 Subject: [PATCH 0101/1078] Bump redis from 4.1.4 to 4.2.0 (#992) Bumps [redis](https://github.com/redis/redis-py) from 4.1.4 to 4.2.0. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.1.4...v4.2.0) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 2ceb4bc321..8f674f1b3b 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.10.0 # pyup: ignore -redis==4.1.4 +redis==4.2.0 types-redis types-setuptools pymongo==4.0.2 From 3d5b2f545efbb29c6c5571e6ca782faed6dec775 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Mar 2022 23:48:52 -0700 Subject: [PATCH 0102/1078] Bump azure-storage-blob from 12.10.0 to 12.11.0 (#994) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.10.0 to 12.11.0. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.10.0...azure-storage-blob_12.11.0) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 8f674f1b3b..f7088a766b 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -6,7 +6,7 @@ ipytree==0.2.1 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.10.0 # pyup: ignore +azure-storage-blob==12.11.0 # pyup: ignore redis==4.2.0 types-redis types-setuptools From 3014be3add6ecc346a3b85859c9b2ec67de56204 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 11:20:11 -0700 Subject: [PATCH 0103/1078] Bump redis from 4.2.0 to 4.2.1 (#998) Bumps [redis](https://github.com/redis/redis-py) from 4.2.0 to 4.2.1. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.2.0...v4.2.1) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index f7088a766b..ad267b3be1 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.11.0 # pyup: ignore -redis==4.2.0 +redis==4.2.1 types-redis types-setuptools pymongo==4.0.2 From b8565a99c37d5b8ac23ab722005b1af744deb506 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Apr 2022 00:15:42 -0700 Subject: [PATCH 0104/1078] Bump fsspec from 2022.2.0 to 2022.3.0 (#997) * Bump fsspec from 2022.2.0 to 2022.3.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.2.0 to 2022.3.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.02.0...2022.3.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Manual bump of s3fs see: #920 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: jakirkham Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index ad267b3be1..eb2b94f8e2 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,6 +19,6 @@ pytest-cov==3.0.0 pytest-doctestplus==0.12.0 pytest-timeout==2.1.0 h5py==3.6.0 -fsspec==2022.2.0 -s3fs==2022.2.0 +fsspec==2022.3.0 +s3fs==2022.3.0 moto[server]>=1.3.14 From 6e80e97a7f3decc4c9576b27d626af3b3824f8f3 Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Mon, 4 Apr 2022 07:18:21 -0700 Subject: [PATCH 0105/1078] Revert change to default write_empty_chunks. (#1001) --- docs/release.rst | 8 ++++++++ docs/tutorial.rst | 4 ++-- zarr/core.py | 2 +- zarr/creation.py | 22 +++++++++++----------- 4 files changed, 22 insertions(+), 14 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index ef8a396c0f..13c2f20d2c 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,6 +6,14 @@ Release notes Unreleased ---------- +Bug fixes +~~~~~~~~~ + +* Changes the default value of ``write_empty_chunks`` to ``True`` to prevent + unanticipated data losses when the data types do not have a proper default + value when empty chunks are read back in. + By :user:`Vyas Ramasubramani `; :issue:`965`. + .. _release_2.11.1: 2.11.1 diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 906d5d9f08..53ddddb0b9 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1309,7 +1309,7 @@ Empty chunks As of version 2.11, it is possible to configure how Zarr handles the storage of chunks that are "empty" (i.e., every element in the chunk is equal to the array's fill value). -When creating an array with ``write_empty_chunks=False`` (the default), +When creating an array with ``write_empty_chunks=False``, Zarr will check whether a chunk is empty before compression and storage. If a chunk is empty, then Zarr does not store it, and instead deletes the chunk from storage if the chunk had been previously stored. @@ -1318,7 +1318,7 @@ This optimization prevents storing redundant objects and can speed up reads, but added computation during array writes, since the contents of each chunk must be compared to the fill value, and these advantages are contingent on the content of the array. If you know that your data will form chunks that are almost always non-empty, then there is no advantage to the optimization described above. -In this case, creating an array with ``write_empty_chunks=True`` will instruct Zarr to write every chunk without checking for emptiness. +In this case, creating an array with ``write_empty_chunks=True`` (the default) will instruct Zarr to write every chunk without checking for emptiness. The following example illustrates the effect of the ``write_empty_chunks`` flag on the time required to write an array with different values.:: diff --git a/zarr/core.py b/zarr/core.py index 5e2b4252aa..e1e04bb8fa 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -161,7 +161,7 @@ def __init__( cache_metadata=True, cache_attrs=True, partial_decompress=False, - write_empty_chunks=False, + write_empty_chunks=True, zarr_version=None, ): # N.B., expect at this point store is fully initialized with all diff --git a/zarr/creation.py b/zarr/creation.py index b8c40a859b..e77f26b3e2 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -74,11 +74,11 @@ def create(shape, chunks=True, dtype=None, compressor='default', .. versionadded:: 2.8 write_empty_chunks : bool, optional - If True, all chunks will be stored regardless of their contents. If - False (default), each chunk is compared to the array's fill value prior - to storing. If a chunk is uniformly equal to the fill value, then that - chunk is not be stored, and the store entry for that chunk's key is - deleted. This setting enables sparser storage, as only chunks with + If True (default), all chunks will be stored regardless of their + contents. If False, each chunk is compared to the array's fill value + prior to storing. If a chunk is uniformly equal to the fill value, then + that chunk is not be stored, and the store entry for that chunk's key + is deleted. This setting enables sparser storage, as only chunks with non-fill-value data are stored, at the expense of overhead associated with checking the data of each chunk. @@ -403,7 +403,7 @@ def open_array( chunk_store=None, storage_options=None, partial_decompress=False, - write_empty_chunks=False, + write_empty_chunks=True, *, zarr_version=None, dimension_separator=None, @@ -462,11 +462,11 @@ def open_array( is Blosc, when getting data from the array chunks will be partially read and decompressed when possible. write_empty_chunks : bool, optional - If True, all chunks will be stored regardless of their contents. If - False (default), each chunk is compared to the array's fill value prior - to storing. If a chunk is uniformly equal to the fill value, then that - chunk is not be stored, and the store entry for that chunk's key is - deleted. This setting enables sparser storage, as only chunks with + If True (default), all chunks will be stored regardless of their + contents. If False, each chunk is compared to the array's fill value + prior to storing. If a chunk is uniformly equal to the fill value, then + that chunk is not be stored, and the store entry for that chunk's key + is deleted. This setting enables sparser storage, as only chunks with non-fill-value data are stored, at the expense of overhead associated with checking the data of each chunk. From 2c4b655049580064ac0304329938c339ca3a15a5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Apr 2022 21:37:44 -0700 Subject: [PATCH 0106/1078] Bump redis from 4.2.1 to 4.2.2 (#1003) Bumps [redis](https://github.com/redis/redis-py) from 4.2.1 to 4.2.2. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.2.1...v4.2.2) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index eb2b94f8e2..0beb881fbb 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.11.0 # pyup: ignore -redis==4.2.1 +redis==4.2.2 types-redis types-setuptools pymongo==4.0.2 From 62e28c588f54cc95729ff1b26837fb83a33baf64 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Apr 2022 22:03:29 -0700 Subject: [PATCH 0107/1078] Bump pymongo from 4.0.2 to 4.1.0 (#1004) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.0.2 to 4.1.0. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.0.2...4.1.0) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: jakirkham --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0beb881fbb..537cb1b9fb 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -10,7 +10,7 @@ azure-storage-blob==12.11.0 # pyup: ignore redis==4.2.2 types-redis types-setuptools -pymongo==4.0.2 +pymongo==4.1.0 # optional test requirements tox==3.24.5 coverage From e806b44f34867e80205af54618048930829e265c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Apr 2022 01:52:24 -0700 Subject: [PATCH 0108/1078] Bump tox from 3.24.5 to 3.25.0 (#1008) Bumps [tox](https://github.com/tox-dev/tox) from 3.24.5 to 3.25.0. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/master/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/3.24.5...3.25.0) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 537cb1b9fb..28be1c2eaa 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -12,7 +12,7 @@ types-redis types-setuptools pymongo==4.1.0 # optional test requirements -tox==3.24.5 +tox==3.25.0 coverage flake8==4.0.1 pytest-cov==3.0.0 From 18abb190977eeb575c94dbb601fd709cfe27a990 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Apr 2022 22:55:23 -0700 Subject: [PATCH 0109/1078] Bump pymongo from 4.1.0 to 4.1.1 (#1009) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.1.0 to 4.1.1. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.1.0...4.1.1) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 28be1c2eaa..3e496d90e1 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -10,7 +10,7 @@ azure-storage-blob==12.11.0 # pyup: ignore redis==4.2.2 types-redis types-setuptools -pymongo==4.1.0 +pymongo==4.1.1 # optional test requirements tox==3.25.0 coverage From c7494a9e21df777f7b6f962ea5c41b803f7f418c Mon Sep 17 00:00:00 2001 From: Eric Prestat Date: Fri, 15 Apr 2022 10:48:27 +0100 Subject: [PATCH 0110/1078] Add numpy encoder class for json.dumps (#933) * Add numpy encoder class for json.dumps and add test * Use numbers instead of numpy module * Add entry to release note Co-authored-by: Josh Moore Co-authored-by: jakirkham --- docs/release.rst | 3 +++ zarr/tests/test_creation.py | 5 +++++ zarr/tests/test_util.py | 14 ++++++++++++-- zarr/util.py | 14 +++++++++++++- 4 files changed, 33 insertions(+), 3 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 13c2f20d2c..7a5cf51db7 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -14,6 +14,9 @@ Bug fixes value when empty chunks are read back in. By :user:`Vyas Ramasubramani `; :issue:`965`. +* Add number encoder for ``json.dumps`` to support numpy intergers in + ``chunks`` arguments. By :user:`Eric Prestat ` :issue:`697`. + .. _release_2.11.1: 2.11.1 diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index cfab4f79ec..ee99bc7c9f 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -714,3 +714,8 @@ def test_create_read_only(zarr_version): assert z.read_only with pytest.raises(PermissionError): z[:] = 42 + + +def test_json_dumps_chunks_numpy_dtype(): + z = zeros((10,), chunks=(np.int64(2),)) + assert np.all(z[...] == 0) diff --git a/zarr/tests/test_util.py b/zarr/tests/test_util.py index efe8e66341..e9e1786abe 100644 --- a/zarr/tests/test_util.py +++ b/zarr/tests/test_util.py @@ -4,8 +4,10 @@ import numpy as np import pytest -from zarr.util import (all_equal, flatten, guess_chunks, human_readable_size, info_html_report, - info_text_report, is_total_slice, normalize_chunks, +from zarr.core import Array +from zarr.util import (all_equal, flatten, guess_chunks, human_readable_size, + info_html_report, info_text_report, is_total_slice, + json_dumps, normalize_chunks, normalize_dimension_separator, normalize_fill_value, normalize_order, normalize_resize_args, normalize_shape, retry_call, @@ -238,3 +240,11 @@ def test_all_equal(): # all_equal(None, *) always returns False assert not all_equal(None, np.array([None, None])) assert not all_equal(None, np.array([None, 10])) + + +def test_json_dumps_numpy_dtype(): + assert json_dumps(np.int64(0)) == json_dumps(0) + assert json_dumps(np.float32(0)) == json_dumps(float(0)) + # Check that we raise the error of the superclass for unsupported object + with pytest.raises(TypeError): + json_dumps(Array) diff --git a/zarr/util.py b/zarr/util.py index 9f5f04f525..cc3bd50356 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -33,10 +33,22 @@ def flatten(arg: Iterable) -> Iterable: } +class NumberEncoder(json.JSONEncoder): + + def default(self, o): + # See json.JSONEncoder.default docstring for explanation + # This is necessary to encode numpy dtype + if isinstance(o, numbers.Integral): + return int(o) + if isinstance(o, numbers.Real): + return float(o) + return json.JSONEncoder.default(self, o) + + def json_dumps(o: Any) -> bytes: """Write JSON in a consistent, human-readable way.""" return json.dumps(o, indent=4, sort_keys=True, ensure_ascii=True, - separators=(',', ': ')).encode('ascii') + separators=(',', ': '), cls=NumberEncoder).encode('ascii') def json_loads(s: str) -> Dict[str, Any]: From 0aaf6d5318f1f491f96b7e503633c7812663f654 Mon Sep 17 00:00:00 2001 From: Gregory Lee Date: Wed, 20 Apr 2022 02:39:41 -0400 Subject: [PATCH 0111/1078] misc Zarr V3 bug fixes: open_group, open_consolidated and MemoryStoreV3 (#1006) * Fix missing zarr_version argument in open_group and open_consolidated * add missing KVStore import at top level * remove outdated comment * Fix implementation of MemoryStoreV3.rename and enable hierarchy tests * some cleanup in V3 group tests * pep8 fix * parameterize consolidated metadata tests to use string paths remove redundant test_consolidated_with_chunk_store test open_group with store and chunk_store specified as string paths --- zarr/__init__.py | 2 +- zarr/convenience.py | 4 +- zarr/hierarchy.py | 3 +- zarr/storage.py | 15 ++- zarr/tests/test_convenience.py | 183 ++++++++++++++------------------- zarr/tests/test_core.py | 6 +- zarr/tests/test_hierarchy.py | 122 +++++++++++++++------- 7 files changed, 186 insertions(+), 149 deletions(-) diff --git a/zarr/__init__.py b/zarr/__init__.py index 7558ce77de..8a906534d1 100644 --- a/zarr/__init__.py +++ b/zarr/__init__.py @@ -11,7 +11,7 @@ from zarr.hierarchy import Group, group, open_group from zarr.n5 import N5Store, N5FSStore from zarr.storage import (ABSStore, DBMStore, DictStore, DirectoryStore, - LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, + KVStore, LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, NestedDirectoryStore, RedisStore, SQLiteStore, TempStore, ZipStore) from zarr.sync import ProcessSynchronizer, ThreadSynchronizer diff --git a/zarr/convenience.py b/zarr/convenience.py index 2cbc9bdf68..07d649a329 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -1277,7 +1277,9 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** """ # normalize parameters - store = normalize_store_arg(store, storage_options=kwargs.get("storage_options"), mode=mode) + zarr_version = kwargs.get('zarr_version', None) + store = normalize_store_arg(store, storage_options=kwargs.get("storage_options"), mode=mode, + zarr_version=zarr_version) if mode not in {'r', 'r+'}: raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}" .format(mode)) diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 0684be4a57..c2ddf451f1 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -1308,7 +1308,8 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N if chunk_store is not None: chunk_store = _normalize_store_arg(chunk_store, storage_options=storage_options, - mode=mode) + mode=mode, + zarr_version=zarr_version) if not getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) == zarr_version: raise ValueError( "zarr_version of store and chunk_store must match" diff --git a/zarr/storage.py b/zarr/storage.py index 709bbba7ee..2a6b756d64 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -3045,7 +3045,20 @@ def rename(self, src_path: Path, dst_path: Path): src_parent, src_key = self._get_parent(base + src_path) dst_parent, dst_key = self._require_parent(base + dst_path) - dst_parent[dst_key] = src_parent.pop(src_key) + if src_key in src_parent: + dst_parent[dst_key] = src_parent.pop(src_key) + + if base == meta_root: + # check for and move corresponding metadata + sfx = _get_metadata_suffix(self) + src_meta = src_key + '.array' + sfx + if src_meta in src_parent: + dst_meta = dst_key + '.array' + sfx + dst_parent[dst_meta] = src_parent.pop(src_meta) + src_meta = src_key + '.group' + sfx + if src_meta in src_parent: + dst_meta = dst_key + '.group' + sfx + dst_parent[dst_meta] = src_parent.pop(src_meta) any_renamed = True any_renamed = _rename_metadata_v3(self, src_path, dst_path) or any_renamed if not any_renamed: diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index 74c8d06fac..53fa447b48 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -202,23 +202,30 @@ def test_tree(zarr_version): assert str(zarr.tree(g1)) == str(g1.tree()) -# TODO: consolidated metadata currently only supported for v2 - @pytest.mark.parametrize('zarr_version', [2, 3]) @pytest.mark.parametrize('with_chunk_store', [False, True], ids=['default', 'with_chunk_store']) -def test_consolidate_metadata(with_chunk_store, zarr_version): - - if zarr_version == 2: - MemoryStoreClass = MemoryStore - path = '' - else: - MemoryStoreClass = MemoryStoreV3 - path = 'dataset' - +@pytest.mark.parametrize('stores_from_path', [False, True]) +def test_consolidate_metadata(with_chunk_store, zarr_version, stores_from_path): # setup initial data - store = MemoryStoreClass() - chunk_store = MemoryStoreClass() if with_chunk_store else None - z = group(store, chunk_store=chunk_store, path=path) + if stores_from_path: + store = tempfile.mkdtemp() + atexit.register(atexit_rmtree, store) + if with_chunk_store: + chunk_store = tempfile.mkdtemp() + atexit.register(atexit_rmtree, chunk_store) + else: + chunk_store = None + version_kwarg = {'zarr_version': zarr_version} + else: + if zarr_version == 2: + store = MemoryStore() + chunk_store = MemoryStore() if with_chunk_store else None + elif zarr_version == 3: + store = MemoryStoreV3() + chunk_store = MemoryStoreV3() if with_chunk_store else None + version_kwarg = {} + path = 'dataset' if zarr_version == 3 else None + z = group(store, chunk_store=chunk_store, path=path, **version_kwarg) z.create_group('g1') g2 = z.create_group('g2') g2.attrs['hello'] = 'world' @@ -229,41 +236,48 @@ def test_consolidate_metadata(with_chunk_store, zarr_version): arr[:] = 1.0 assert 16 == arr.nchunks_initialized + if stores_from_path: + # get the actual store class for use with consolidate_metadata + store_class = z._store + else: + store_class = store + if zarr_version == 3: # error on v3 if path not provided with pytest.raises(ValueError): - consolidate_metadata(store, path=None) + consolidate_metadata(store_class, path=None) with pytest.raises(ValueError): - consolidate_metadata(store, path='') + consolidate_metadata(store_class, path='') # perform consolidation - out = consolidate_metadata(store, path=path) + out = consolidate_metadata(store_class, path=path) assert isinstance(out, Group) assert ['g1', 'g2'] == list(out) - if zarr_version == 2: - assert isinstance(out._store, ConsolidatedMetadataStore) - assert '.zmetadata' in store - meta_keys = ['.zgroup', - 'g1/.zgroup', - 'g2/.zgroup', - 'g2/.zattrs', - 'g2/arr/.zarray', - 'g2/arr/.zattrs'] - else: - assert isinstance(out._store, ConsolidatedMetadataStoreV3) - assert 'meta/root/consolidated/.zmetadata' in store - meta_keys = ['zarr.json', - meta_root + 'dataset.group.json', - meta_root + 'dataset/g1.group.json', - meta_root + 'dataset/g2.group.json', - meta_root + 'dataset/g2/arr.array.json', - 'meta/root/consolidated.group.json'] - for key in meta_keys: - del store[key] + if not stores_from_path: + if zarr_version == 2: + assert isinstance(out._store, ConsolidatedMetadataStore) + assert '.zmetadata' in store + meta_keys = ['.zgroup', + 'g1/.zgroup', + 'g2/.zgroup', + 'g2/.zattrs', + 'g2/arr/.zarray', + 'g2/arr/.zattrs'] + else: + assert isinstance(out._store, ConsolidatedMetadataStoreV3) + assert 'meta/root/consolidated/.zmetadata' in store + meta_keys = ['zarr.json', + meta_root + 'dataset.group.json', + meta_root + 'dataset/g1.group.json', + meta_root + 'dataset/g2.group.json', + meta_root + 'dataset/g2/arr.array.json', + 'meta/root/consolidated.group.json'] + for key in meta_keys: + del store[key] # open consolidated - z2 = open_consolidated(store, chunk_store=chunk_store, path=path) + z2 = open_consolidated(store, chunk_store=chunk_store, path=path, **version_kwarg) assert ['g1', 'g2'] == list(z2) assert 'world' == z2.g2.attrs['hello'] assert 1 == z2.g2.arr.attrs['data'] @@ -271,22 +285,32 @@ def test_consolidate_metadata(with_chunk_store, zarr_version): assert 16 == z2.g2.arr.nchunks assert 16 == z2.g2.arr.nchunks_initialized - # tests del/write on the store - if zarr_version == 2: - cmd = ConsolidatedMetadataStore(store) - with pytest.raises(PermissionError): - del cmd['.zgroup'] - with pytest.raises(PermissionError): - cmd['.zgroup'] = None + if stores_from_path: + # path string is note a BaseStore subclass so cannot be used to + # initialize a ConsolidatedMetadataStore. + if zarr_version == 2: + with pytest.raises(ValueError): + cmd = ConsolidatedMetadataStore(store) + elif zarr_version == 3: + with pytest.raises(ValueError): + cmd = ConsolidatedMetadataStoreV3(store) else: - cmd = ConsolidatedMetadataStoreV3(store) - with pytest.raises(PermissionError): - del cmd[meta_root + 'dataset.group.json'] - with pytest.raises(PermissionError): - cmd[meta_root + 'dataset.group.json'] = None + # tests del/write on the store + if zarr_version == 2: + cmd = ConsolidatedMetadataStore(store) + with pytest.raises(PermissionError): + del cmd['.zgroup'] + with pytest.raises(PermissionError): + cmd['.zgroup'] = None + else: + cmd = ConsolidatedMetadataStoreV3(store) + with pytest.raises(PermissionError): + del cmd[meta_root + 'dataset.group.json'] + with pytest.raises(PermissionError): + cmd[meta_root + 'dataset.group.json'] = None - # test getsize on the store - assert isinstance(getsize(cmd), Integral) + # test getsize on the store + assert isinstance(getsize(cmd), Integral) # test new metadata are not writeable with pytest.raises(PermissionError): @@ -316,62 +340,11 @@ def test_consolidate_metadata(with_chunk_store, zarr_version): # make sure keyword arguments are passed through without error open_consolidated( - store, chunk_store=chunk_store, path=path, cache_attrs=True, synchronizer=None + store, chunk_store=chunk_store, path=path, cache_attrs=True, synchronizer=None, + **version_kwarg, ) -def test_consolidated_with_chunk_store(): - # setup initial data - store = MemoryStore() - chunk_store = MemoryStore() - z = group(store, chunk_store=chunk_store) - z.create_group('g1') - g2 = z.create_group('g2') - g2.attrs['hello'] = 'world' - arr = g2.create_dataset('arr', shape=(20, 20), chunks=(5, 5), dtype='f8') - assert 16 == arr.nchunks - assert 0 == arr.nchunks_initialized - arr.attrs['data'] = 1 - arr[:] = 1.0 - assert 16 == arr.nchunks_initialized - - # perform consolidation - out = consolidate_metadata(store) - assert isinstance(out, Group) - assert '.zmetadata' in store - for key in ['.zgroup', - 'g1/.zgroup', - 'g2/.zgroup', - 'g2/.zattrs', - 'g2/arr/.zarray', - 'g2/arr/.zattrs']: - del store[key] - # open consolidated - z2 = open_consolidated(store, chunk_store=chunk_store) - assert ['g1', 'g2'] == list(z2) - assert 'world' == z2.g2.attrs['hello'] - assert 1 == z2.g2.arr.attrs['data'] - assert (z2.g2.arr[:] == 1.0).all() - assert 16 == z2.g2.arr.nchunks - assert 16 == z2.g2.arr.nchunks_initialized - - # test the data are writeable - z2.g2.arr[:] = 2 - assert (z2.g2.arr[:] == 2).all() - - # test invalid modes - with pytest.raises(ValueError): - open_consolidated(store, mode='a', chunk_store=chunk_store) - with pytest.raises(ValueError): - open_consolidated(store, mode='w', chunk_store=chunk_store) - with pytest.raises(ValueError): - open_consolidated(store, mode='w-', chunk_store=chunk_store) - - # make sure keyword arguments are passed through without error - open_consolidated(store, cache_attrs=True, synchronizer=None, - chunk_store=chunk_store) - - @pytest.mark.parametrize("options", ( {"dimension_separator": "/"}, {"dimension_separator": "."}, diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 08bda94ba2..09523dcd22 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -2720,8 +2720,8 @@ def test_array_init(self): assert isinstance(a, Array) assert (100,) == a.shape assert (10,) == a.chunks - assert path == a.path # TODO: should this include meta/root? - assert '/' + path == a.name # TODO: should this include meta/root? + assert path == a.path + assert '/' + path == a.name assert 'bar' == a.basename assert store is a.store assert "968dccbbfc0139f703ead2fd1d503ad6e44db307" == a.hexdigest() @@ -2772,7 +2772,7 @@ def test_nbytes_stored(self): z[:] = 42 expect_nbytes_stored = sum(buffer_size(v) for k, v in z.store.items() if k != 'zarr.json') assert expect_nbytes_stored == z.nbytes_stored - assert z.nchunks_initialized == 10 # TODO: added temporarily for testing, can remove + assert z.nchunks_initialized == 10 # mess with store if not isinstance(z.store, (LRUStoreCacheV3, FSStoreV3)): diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index 69ab08254e..29fff7536a 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -28,7 +28,7 @@ NestedDirectoryStore, SQLiteStore, ZipStore, array_meta_key, atexit_rmglob, atexit_rmtree, data_root, group_meta_key, init_array, init_group, meta_root) -from zarr.storage import (ABSStoreV3, KVStoreV3, DirectoryStoreV3, # MemoryStoreV3 +from zarr.storage import (ABSStoreV3, KVStoreV3, DirectoryStoreV3, MemoryStoreV3, FSStoreV3, ZipStoreV3, DBMStoreV3, LMDBStoreV3, SQLiteStoreV3, LRUStoreCacheV3) from zarr.util import InfoReporter, buffer_size @@ -233,8 +233,14 @@ def test_require_group(self): # test path normalization if g1._version == 2: - # TODO: expected behavior for v3 assert g1.require_group('quux') == g1.require_group('/quux/') + elif g1._version: + # These are not equal in v3! + # 'quux' will be within the group: + # meta/root/group/quux.group.json + # '/quux/' will be outside of the group at: + # meta/root/quux.group.json + assert g1.require_group('quux') != g1.require_group('/quux/') # multi g6, g7 = g1.require_groups('y', 'z') @@ -519,7 +525,7 @@ def test_getitem_contains_iterators(self): assert isinstance(g1['/foo/bar/'], Group) else: # start or end with / raises KeyError - # TODO: should we fix allow stripping of these on v3? + # TODO: should we allow stripping of these on v3? with pytest.raises(KeyError): assert isinstance(g1['/foo/bar/'], Group) assert isinstance(g1['foo/baz'], Array) @@ -547,9 +553,7 @@ def test_getitem_contains_iterators(self): assert 'baz' not in g1 assert 'a/b/c/d' not in g1 assert 'a/z' not in g1 - if g1._version == 2: - # TODO: handle implicit group for v3 spec - assert 'quux' not in g1['foo'] + assert 'quux' not in g1['foo'] # test key errors with pytest.raises(KeyError): @@ -888,12 +892,27 @@ def test_move(self): assert "foo2" in g assert "foo2/bar" not in g if g2._version == 2: - # TODO: how to access element created outside of group.path in v3? assert "bar" in g + else: + # The `g2.move` call above moved bar to meta/root/bar and + # meta/data/bar. This is outside the `g` group located at + # /meta/root/group, so bar is no longer within `g`. + assert "bar" not in g + assert 'meta/root/bar.array.json' in g._store + if g._chunk_store: + assert 'data/root/bar/c0' in g._chunk_store + else: + assert 'data/root/bar/c0' in g._store assert isinstance(g["foo2"], Group) if g2._version == 2: - # TODO: how to access element created outside of group.path in v3? assert_array_equal(data, g["bar"]) + else: + # TODO: How to access element created outside of group.path in v3? + # One option is to make a Hierarchy class representing the + # root. Currently Group requires specification of `path`, + # but the path of the root would be just '' which is not + # currently allowed. + pass with pytest.raises(ValueError): g2.move("bar", "bar2") @@ -970,22 +989,44 @@ def test_paths(self): g1 = self.create_group() g2 = g1.create_group('foo/bar') - if g1._version == 3: - pytest.skip("TODO: update class for v3") - - assert g1 == g1['/'] - assert g1 == g1['//'] - assert g1 == g1['///'] - assert g1 == g2['/'] - assert g1 == g2['//'] - assert g1 == g2['///'] - assert g2 == g1['foo/bar'] - assert g2 == g1['/foo/bar'] - assert g2 == g1['foo/bar/'] - assert g2 == g1['//foo/bar'] - assert g2 == g1['//foo//bar//'] - assert g2 == g1['///foo///bar///'] - assert g2 == g2['/foo/bar'] + if g1._version == 2: + assert g1 == g1['/'] + assert g1 == g1['//'] + assert g1 == g1['///'] + assert g1 == g2['/'] + assert g1 == g2['//'] + assert g1 == g2['///'] + assert g2 == g1['foo/bar'] + assert g2 == g1['/foo/bar'] + assert g2 == g1['foo/bar/'] + assert g2 == g1['//foo/bar'] + assert g2 == g1['//foo//bar//'] + assert g2 == g1['///foo///bar///'] + assert g2 == g2['/foo/bar'] + else: + # the expected key format gives a match + assert g2 == g1['foo/bar'] + + # TODO: Should presence of a trailing slash raise KeyError? + # The spec says "the final character is not a / character" + # but we currently strip trailing '/' as done for v2. + assert g2 == g1['foo/bar/'] + + # double slash also currently works (spec doesn't mention this + # case, but have kept it for v2 behavior compatibility) + assert g2 == g1['foo//bar'] + + # v3: leading / implies we are at the root, not within a group, + # so these all raise KeyError + for path in ['/foo/bar', '//foo/bar', '//foo//bar//', + '///fooo///bar///']: + with pytest.raises(KeyError): + g1[path] + + # For v3 a prefix must be supplied + for path in ['/', '//', '///']: + with pytest.raises(ValueError): + g2[path] with pytest.raises(ValueError): g1['.'] @@ -1025,9 +1066,7 @@ def test_pickle(self): assert name == g2.name assert n == len(g2) assert keys == list(g2) - if g2._version == 2: - # TODO: handle implicit group for v3 - assert isinstance(g2['foo'], Group) + assert isinstance(g2['foo'], Group) assert isinstance(g2['foo/bar'], Array) g2.store.close() @@ -1113,13 +1152,13 @@ def create_store(): return MemoryStore(), None -# TODO: fix MemoryStoreV3 _get_parent, etc. -# # noinspection PyStatementEffect -# class TestGroupV3WithMemoryStore(TestGroupWithMemoryStore, TestGroupV3): +# noinspection PyStatementEffect +class TestGroupV3WithMemoryStore(TestGroupWithMemoryStore, TestGroupV3): + + @staticmethod + def create_store(): + return MemoryStoreV3(), None -# @staticmethod -# def create_store(): -# return MemoryStoreV3(), None class TestGroupWithDirectoryStore(TestGroup): @@ -1158,7 +1197,7 @@ def test_pickle(self): @skip_test_env_var("ZARR_TEST_ABS") -class TestGroupWithABSStoreV3(TestGroupV3): +class TestGroupV3WithABSStore(TestGroupV3): @staticmethod def create_store(): @@ -1550,9 +1589,6 @@ def test_open_group(zarr_version): g.create_groups('foo', 'bar') assert 2 == len(g) - # TODO: update the r, r+ test case here for zarr_version == 3 after - # open_array has StoreV3 support - # mode in 'r', 'r+' open_array('data/array.zarr', shape=100, chunks=10, mode='w') for mode in 'r', 'r+': @@ -1816,3 +1852,15 @@ def test_group_mismatched_store_versions(): Group(store_v3, path='group2', read_only=True, chunk_store=chunk_store_v3) with pytest.raises(ValueError): Group(store_v3, path='group2', read_only=True, chunk_store=chunk_store_v3) + + +@pytest.mark.parametrize('zarr_version', [2, 3]) +def test_open_group_from_paths(zarr_version): + """Verify zarr_version is applied to both the store and chunk_store.""" + store = tempfile.mkdtemp() + chunk_store = tempfile.mkdtemp() + atexit.register(atexit_rmtree, store) + atexit.register(atexit_rmtree, chunk_store) + path = 'g1' + g = open_group(store, path=path, chunk_store=chunk_store, zarr_version=zarr_version) + assert g._store._store_version == g._chunk_store._store_version == zarr_version From e381bf76261a1cc9c10613465ab39b968a24737b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Apr 2022 08:33:03 +0200 Subject: [PATCH 0112/1078] Bump pytest from 7.1.1 to 7.1.2 (#1013) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.1.1 to 7.1.2. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.1.1...7.1.2) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 6f0c07cfe0..66cc7c92c3 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.9.1 msgpack-python==0.5.6 setuptools-scm==6.4.2 # test requirements -pytest==7.1.1 +pytest==7.1.2 From 1dc03524840535e33b5859cc46ab1f848bd117a1 Mon Sep 17 00:00:00 2001 From: Shivank Chaudhary <81817735+Alt-Shivam@users.noreply.github.com> Date: Fri, 29 Apr 2022 17:33:58 +0530 Subject: [PATCH 0113/1078] Pre-commit configuration (#1015) * flake8 added * Create Pre-commit-hooks.yml --- .github/workflows/Pre-commit-hooks.yml | 32 ++++++++++++++++++++++++++ .pre-commit-config.yaml | 13 +++++++++++ 2 files changed, 45 insertions(+) create mode 100644 .github/workflows/Pre-commit-hooks.yml create mode 100644 .pre-commit-config.yaml diff --git a/.github/workflows/Pre-commit-hooks.yml b/.github/workflows/Pre-commit-hooks.yml new file mode 100644 index 0000000000..05ed509c53 --- /dev/null +++ b/.github/workflows/Pre-commit-hooks.yml @@ -0,0 +1,32 @@ +# This is a basic workflow to help you get started with Actions + +name: pre-commit + +# Controls when the workflow will run +on: + # Triggers the workflow pull request events but only for the master branch + pull_request: + push: + branches: [master] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # Using Ubuntu image with latest tag + pre-commit: + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v3 + #setting up Python v3.0.0 + - uses: actions/setup-python@v3.0.0 + #using pre-commit latest i.e v2.0.3 + - uses: pre-commit/action@v2.0.3 + #Running pre-commit for all files + - name: Pre-Commit-Run + run: | + pip install pre-commit + pre-commit run --all-files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..95c4260f52 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,13 @@ +default_stages: [commit, push] +default_language_version: + python: python3.8 +repos: + - repo: https://github.com/PyCQA/flake8 + rev: 3.8.2 + hooks: + - id: flake8 + args: [ + --max-line-length=100 + ] + exclude: ^(venv/|docs/) + types: ['python'] \ No newline at end of file From 07c850544fcb8428fb5256bc9d86d05b790a5277 Mon Sep 17 00:00:00 2001 From: Shivank Chaudhary <81817735+Alt-Shivam@users.noreply.github.com> Date: Mon, 2 May 2022 12:30:43 +0530 Subject: [PATCH 0114/1078] Added Yaml Check in Pre-commit (#1016) --- .pre-commit-config.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 95c4260f52..69828ad50d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,4 +10,8 @@ repos: --max-line-length=100 ] exclude: ^(venv/|docs/) - types: ['python'] \ No newline at end of file + types: ['python'] + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.3.0 + hooks: + - id: check-yaml \ No newline at end of file From 27cf31555975eaeb34e802bfdd59c11222f91597 Mon Sep 17 00:00:00 2001 From: Gregory Lee Date: Mon, 2 May 2022 03:32:59 -0400 Subject: [PATCH 0115/1078] Add ZARR_V3_API_AVAILABLE environment variable (#1007) * add ZARR_V3_API_AVAILABLE environment variable controls whether or not v3 stores are imported into the top-level zarr namespace * move StoreV3 implementations from storage.py to storage_v3.py * fix * move zarr/storage_v3.py to zarr/_storage/v3.py * Run Linux tests with ZARR_V3_API_AVAILABLE=1 defined * add # pragma nocover to environment-variable dependent branches * flake8 fix * fix outdated import path * fix mypy error --- .github/workflows/python-package.yml | 1 + zarr/__init__.py | 6 + zarr/_storage/store.py | 5 +- zarr/_storage/v3.py | 590 ++++++++++++++++++++++++++ zarr/convenience.py | 4 +- zarr/hierarchy.py | 2 +- zarr/storage.py | 598 ++------------------------- zarr/tests/test_attrs.py | 3 +- zarr/tests/test_convenience.py | 14 +- zarr/tests/test_core.py | 14 +- zarr/tests/test_creation.py | 3 +- zarr/tests/test_hierarchy.py | 6 +- zarr/tests/test_storage.py | 3 +- zarr/tests/test_storage_v3.py | 25 +- zarr/tests/util.py | 3 +- 15 files changed, 686 insertions(+), 591 deletions(-) create mode 100644 zarr/_storage/v3.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e0d404b1a0..91500317b3 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -69,6 +69,7 @@ jobs: ZARR_TEST_ABS: 1 ZARR_TEST_MONGO: 1 ZARR_TEST_REDIS: 1 + ZARR_V3_API_AVAILABLE: 1 run: | conda activate zarr-env mkdir ~/blob_emulator diff --git a/zarr/__init__.py b/zarr/__init__.py index 8a906534d1..4d2c992dbf 100644 --- a/zarr/__init__.py +++ b/zarr/__init__.py @@ -10,6 +10,7 @@ from zarr.errors import CopyError, MetadataError from zarr.hierarchy import Group, group, open_group from zarr.n5 import N5Store, N5FSStore +from zarr._storage.store import v3_api_available from zarr.storage import (ABSStore, DBMStore, DictStore, DirectoryStore, KVStore, LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, NestedDirectoryStore, RedisStore, SQLiteStore, @@ -19,3 +20,8 @@ # in case setuptools scm screw up and find version to be 0.0.0 assert not __version__.startswith("0.0.0") + +if v3_api_available: + from zarr._storage.v3 import (ABSStoreV3, DBMStoreV3, KVStoreV3, DirectoryStoreV3, + LMDBStoreV3, LRUStoreCacheV3, MemoryStoreV3, MongoDBStoreV3, + RedisStoreV3, SQLiteStoreV3, ZipStoreV3) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index d1ad930609..152c9abd6b 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -1,4 +1,5 @@ import abc +import os from collections.abc import MutableMapping from string import ascii_letters, digits from typing import Any, List, Mapping, Optional, Union @@ -17,6 +18,8 @@ DEFAULT_ZARR_VERSION = 2 +v3_api_available = os.environ.get('ZARR_V3_API_AVAILABLE', '0').lower() not in ['0', 'false'] + class BaseStore(MutableMapping): """Abstract base class for store implementations. @@ -261,7 +264,7 @@ def _ensure_store(store): We'll do this conversion in a few places automatically """ - from zarr.storage import KVStoreV3 # avoid circular import + from zarr._storage.v3 import KVStoreV3 # avoid circular import if store is None: return None elif isinstance(store, StoreV3): diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py new file mode 100644 index 0000000000..540b62ef7e --- /dev/null +++ b/zarr/_storage/v3.py @@ -0,0 +1,590 @@ +import os +import shutil +from collections import OrderedDict +from collections.abc import MutableMapping +from threading import Lock +from typing import Union, Dict, Any + +from zarr.errors import ( + MetadataError, + ReadOnlyError, +) +from zarr.util import (buffer_size, json_loads, normalize_storage_path) + +from zarr._storage.absstore import ABSStoreV3 # noqa: F401 +from zarr._storage.store import (_get_hierarchy_metadata, # noqa: F401 + _get_metadata_suffix, + _listdir_from_keys, + _rename_from_keys, + _rename_metadata_v3, + _rmdir_from_keys, + _rmdir_from_keys_v3, + _path_to_prefix, + _prefix_to_array_key, + _prefix_to_group_key, + array_meta_key, + attrs_key, + data_root, + group_meta_key, + meta_root, + BaseStore, + Store, + StoreV3) +from zarr.storage import (DBMStore, ConsolidatedMetadataStore, DirectoryStore, FSStore, KVStore, + LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, RedisStore, + SQLiteStore, ZipStore, _getsize) + +__doctest_requires__ = { + ('RedisStore', 'RedisStore.*'): ['redis'], + ('MongoDBStore', 'MongoDBStore.*'): ['pymongo'], + ('LRUStoreCache', 'LRUStoreCache.*'): ['s3fs'], +} + + +try: + # noinspection PyUnresolvedReferences + from zarr.codecs import Blosc + default_compressor = Blosc() +except ImportError: # pragma: no cover + from zarr.codecs import Zlib + default_compressor = Zlib() + + +Path = Union[str, bytes, None] +# allow MutableMapping for backwards compatibility +StoreLike = Union[BaseStore, MutableMapping] + + +class RmdirV3(): + """Mixin class that can be used to ensure override of any existing v2 rmdir class.""" + + def rmdir(self, path: str = "") -> None: + path = normalize_storage_path(path) + _rmdir_from_keys_v3(self, path) # type: ignore + + +class KVStoreV3(RmdirV3, KVStore, StoreV3): + + def list(self): + return list(self._mutable_mapping.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def __eq__(self, other): + return ( + isinstance(other, KVStoreV3) and + self._mutable_mapping == other._mutable_mapping + ) + + +KVStoreV3.__doc__ = KVStore.__doc__ + + +def _get_files_and_dirs_from_path(store, path): + path = normalize_storage_path(path) + + files = [] + # add array metadata file if present + array_key = _prefix_to_array_key(store, path) + if array_key in store: + files.append(os.path.join(store.path, array_key)) + + # add group metadata file if present + group_key = _prefix_to_group_key(store, path) + if group_key in store: + files.append(os.path.join(store.path, group_key)) + + dirs = [] + # add array and group folders if present + for d in [data_root + path, meta_root + path]: + dir_path = os.path.join(store.path, d) + if os.path.exists(dir_path): + dirs.append(dir_path) + return files, dirs + + +class FSStoreV3(FSStore, StoreV3): + + # FSStoreV3 doesn't use this (FSStore uses it within _normalize_key) + _META_KEYS = () + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def _default_key_separator(self): + if self.key_separator is None: + self.key_separator = "/" + + def list(self): + return list(self.keys()) + + def _normalize_key(self, key): + key = normalize_storage_path(key).lstrip('/') + return key.lower() if self.normalize_keys else key + + def getsize(self, path=None): + size = 0 + if path is None or path == '': + # size of both the data and meta subdirs + dirs = [] + for d in ['data/root', 'meta/root']: + dir_path = os.path.join(self.path, d) + if os.path.exists(dir_path): + dirs.append(dir_path) + elif path in self: + # access individual element by full path + return buffer_size(self[path]) + else: + files, dirs = _get_files_and_dirs_from_path(self, path) + for file in files: + size += os.path.getsize(file) + for d in dirs: + size += self.fs.du(d, total=True, maxdepth=None) + return size + + def setitems(self, values): + if self.mode == 'r': + raise ReadOnlyError() + values = {self._normalize_key(key): val for key, val in values.items()} + + # initialize the /data/root/... folder corresponding to the array! + # Note: zarr.tests.test_core_v3.TestArrayWithFSStoreV3PartialRead fails + # without this explicit creation of directories + subdirectories = set([os.path.dirname(v) for v in values.keys()]) + for subdirectory in subdirectories: + data_dir = os.path.join(self.path, subdirectory) + if not self.fs.exists(data_dir): + self.fs.mkdir(data_dir) + + self.map.setitems(values) + + def rmdir(self, path=None): + if self.mode == 'r': + raise ReadOnlyError() + if path: + for base in [meta_root, data_root]: + store_path = self.dir_path(base + path) + if self.fs.isdir(store_path): + self.fs.rm(store_path, recursive=True) + + # remove any associated metadata files + sfx = _get_metadata_suffix(self) + meta_dir = (meta_root + path).rstrip('/') + array_meta_file = meta_dir + '.array' + sfx + self.pop(array_meta_file, None) + group_meta_file = meta_dir + '.group' + sfx + self.pop(group_meta_file, None) + else: + store_path = self.dir_path(path) + if self.fs.isdir(store_path): + self.fs.rm(store_path, recursive=True) + + +class MemoryStoreV3(MemoryStore, StoreV3): + + def __init__(self, root=None, cls=dict, dimension_separator=None): + if root is None: + self.root = cls() + else: + self.root = root + self.cls = cls + self.write_mutex = Lock() + self._dimension_separator = dimension_separator # TODO: modify for v3? + + def __eq__(self, other): + return ( + isinstance(other, MemoryStoreV3) and + self.root == other.root and + self.cls == other.cls + ) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def list(self): + return list(self.keys()) + + def getsize(self, path: Path = None): + return _getsize(self, path) + + def rename(self, src_path: Path, dst_path: Path): + src_path = normalize_storage_path(src_path) + dst_path = normalize_storage_path(dst_path) + + any_renamed = False + for base in [meta_root, data_root]: + if self.list_prefix(base + src_path): + src_parent, src_key = self._get_parent(base + src_path) + dst_parent, dst_key = self._require_parent(base + dst_path) + + if src_key in src_parent: + dst_parent[dst_key] = src_parent.pop(src_key) + + if base == meta_root: + # check for and move corresponding metadata + sfx = _get_metadata_suffix(self) + src_meta = src_key + '.array' + sfx + if src_meta in src_parent: + dst_meta = dst_key + '.array' + sfx + dst_parent[dst_meta] = src_parent.pop(src_meta) + src_meta = src_key + '.group' + sfx + if src_meta in src_parent: + dst_meta = dst_key + '.group' + sfx + dst_parent[dst_meta] = src_parent.pop(src_meta) + any_renamed = True + any_renamed = _rename_metadata_v3(self, src_path, dst_path) or any_renamed + if not any_renamed: + raise ValueError(f"no item {src_path} found to rename") + + def rmdir(self, path: Path = None): + path = normalize_storage_path(path) + if path: + for base in [meta_root, data_root]: + try: + parent, key = self._get_parent(base + path) + value = parent[key] + except KeyError: + continue + else: + if isinstance(value, self.cls): + del parent[key] + + # remove any associated metadata files + sfx = _get_metadata_suffix(self) + meta_dir = (meta_root + path).rstrip('/') + array_meta_file = meta_dir + '.array' + sfx + self.pop(array_meta_file, None) + group_meta_file = meta_dir + '.group' + sfx + self.pop(group_meta_file, None) + else: + # clear out root + self.root = self.cls() + + +MemoryStoreV3.__doc__ = MemoryStore.__doc__ + + +class DirectoryStoreV3(DirectoryStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __eq__(self, other): + return ( + isinstance(other, DirectoryStoreV3) and + self.path == other.path + ) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def getsize(self, path: Path = None): + return _getsize(self, path) + + def rename(self, src_path, dst_path, metadata_key_suffix='.json'): + store_src_path = normalize_storage_path(src_path) + store_dst_path = normalize_storage_path(dst_path) + + dir_path = self.path + any_existed = False + for root_prefix in ['meta', 'data']: + src_path = os.path.join(dir_path, root_prefix, 'root', store_src_path) + if os.path.exists(src_path): + any_existed = True + dst_path = os.path.join(dir_path, root_prefix, 'root', store_dst_path) + os.renames(src_path, dst_path) + + for suffix in ['.array' + metadata_key_suffix, + '.group' + metadata_key_suffix]: + src_meta = os.path.join(dir_path, 'meta', 'root', store_src_path + suffix) + if os.path.exists(src_meta): + any_existed = True + dst_meta = os.path.join(dir_path, 'meta', 'root', store_dst_path + suffix) + dst_dir = os.path.dirname(dst_meta) + if not os.path.exists(dst_dir): + os.makedirs(dst_dir) + os.rename(src_meta, dst_meta) + if not any_existed: + raise FileNotFoundError("nothing found at src_path") + + def rmdir(self, path=None): + store_path = normalize_storage_path(path) + dir_path = self.path + if store_path: + for base in [meta_root, data_root]: + dir_path = os.path.join(dir_path, base + store_path) + if os.path.isdir(dir_path): + shutil.rmtree(dir_path) + + # remove any associated metadata files + sfx = _get_metadata_suffix(self) + meta_dir = (meta_root + path).rstrip('/') + array_meta_file = meta_dir + '.array' + sfx + self.pop(array_meta_file, None) + group_meta_file = meta_dir + '.group' + sfx + self.pop(group_meta_file, None) + + elif os.path.isdir(dir_path): + shutil.rmtree(dir_path) + + +DirectoryStoreV3.__doc__ = DirectoryStore.__doc__ + + +class ZipStoreV3(ZipStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __eq__(self, other): + return ( + isinstance(other, ZipStore) and + self.path == other.path and + self.compression == other.compression and + self.allowZip64 == other.allowZip64 + ) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def getsize(self, path=None): + path = normalize_storage_path(path) + with self.mutex: + children = self.list_prefix(data_root + path) + children += self.list_prefix(meta_root + path) + print(f"path={path}, children={children}") + if children: + size = 0 + for name in children: + info = self.zf.getinfo(name) + size += info.compress_size + return size + elif path in self: + info = self.zf.getinfo(path) + return info.compress_size + else: + return 0 + + +ZipStoreV3.__doc__ = ZipStore.__doc__ + + +class RedisStoreV3(RmdirV3, RedisStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +RedisStoreV3.__doc__ = RedisStore.__doc__ + + +class MongoDBStoreV3(RmdirV3, MongoDBStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +MongoDBStoreV3.__doc__ = MongoDBStore.__doc__ + + +class DBMStoreV3(RmdirV3, DBMStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +DBMStoreV3.__doc__ = DBMStore.__doc__ + + +class LMDBStoreV3(RmdirV3, LMDBStore, StoreV3): + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +LMDBStoreV3.__doc__ = LMDBStore.__doc__ + + +class SQLiteStoreV3(SQLiteStore, StoreV3): + + def list(self): + return list(self.keys()) + + def getsize(self, path=None): + # TODO: why does the query below not work in this case? + # For now fall back to the default _getsize implementation + # size = 0 + # for _path in [data_root + path, meta_root + path]: + # c = self.cursor.execute( + # ''' + # SELECT COALESCE(SUM(LENGTH(v)), 0) FROM zarr + # WHERE k LIKE (? || "%") AND + # 0 == INSTR(LTRIM(SUBSTR(k, LENGTH(?) + 1), "/"), "/") + # ''', + # (_path, _path) + # ) + # for item_size, in c: + # size += item_size + # return size + + # fallback to default implementation for now + return _getsize(self, path) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + def rmdir(self, path=None): + path = normalize_storage_path(path) + if path: + for base in [meta_root, data_root]: + with self.lock: + self.cursor.execute( + 'DELETE FROM zarr WHERE k LIKE (? || "/%")', (base + path,) + ) + # remove any associated metadata files + sfx = _get_metadata_suffix(self) + meta_dir = (meta_root + path).rstrip('/') + array_meta_file = meta_dir + '.array' + sfx + self.pop(array_meta_file, None) + group_meta_file = meta_dir + '.group' + sfx + self.pop(group_meta_file, None) + else: + self.clear() + + +SQLiteStoreV3.__doc__ = SQLiteStore.__doc__ + + +class LRUStoreCacheV3(RmdirV3, LRUStoreCache, StoreV3): + + def __init__(self, store, max_size: int): + self._store = StoreV3._ensure_store(store) + self._max_size = max_size + self._current_size = 0 + self._keys_cache = None + self._contains_cache = None + self._listdir_cache: Dict[Path, Any] = dict() + self._values_cache: Dict[Path, Any] = OrderedDict() + self._mutex = Lock() + self.hits = self.misses = 0 + + def list(self): + return list(self.keys()) + + def __setitem__(self, key, value): + self._validate_key(key) + super().__setitem__(key, value) + + +LRUStoreCacheV3.__doc__ = LRUStoreCache.__doc__ + + +class ConsolidatedMetadataStoreV3(ConsolidatedMetadataStore, StoreV3): + """A layer over other storage, where the metadata has been consolidated into + a single key. + + The purpose of this class, is to be able to get all of the metadata for + a given array in a single read operation from the underlying storage. + See :func:`zarr.convenience.consolidate_metadata` for how to create this + single metadata key. + + This class loads from the one key, and stores the data in a dict, so that + accessing the keys no longer requires operations on the backend store. + + This class is read-only, and attempts to change the array metadata will + fail, but changing the data is possible. If the backend storage is changed + directly, then the metadata stored here could become obsolete, and + :func:`zarr.convenience.consolidate_metadata` should be called again and the class + re-invoked. The use case is for write once, read many times. + + .. note:: This is an experimental feature. + + Parameters + ---------- + store: Store + Containing the zarr array. + metadata_key: str + The target in the store where all of the metadata are stored. We + assume JSON encoding. + + See Also + -------- + zarr.convenience.consolidate_metadata, zarr.convenience.open_consolidated + + """ + + def __init__(self, store: StoreLike, metadata_key=meta_root + "consolidated/.zmetadata"): + self.store = StoreV3._ensure_store(store) + + # retrieve consolidated metadata + meta = json_loads(self.store[metadata_key]) + + # check format of consolidated metadata + consolidated_format = meta.get('zarr_consolidated_format', None) + if consolidated_format != 1: + raise MetadataError('unsupported zarr consolidated metadata format: %s' % + consolidated_format) + + # decode metadata + self.meta_store: Store = KVStoreV3(meta["metadata"]) + + def rmdir(self, key): + raise ReadOnlyError() + + +def _normalize_store_arg_v3(store: Any, storage_options=None, mode="r") -> BaseStore: + # default to v2 store for backward compatibility + zarr_version = getattr(store, '_store_version', 3) + if zarr_version != 3: + raise ValueError("store must be a version 3 store") + if store is None: + store = KVStoreV3(dict()) + # add default zarr.json metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + return store + if isinstance(store, os.PathLike): + store = os.fspath(store) + if isinstance(store, str): + if "://" in store or "::" in store: + store = FSStoreV3(store, mode=mode, **(storage_options or {})) + elif storage_options: + raise ValueError("storage_options passed with non-fsspec path") + elif store.endswith('.zip'): + store = ZipStoreV3(store, mode=mode) + elif store.endswith('.n5'): + raise NotImplementedError("N5Store not yet implemented for V3") + # return N5StoreV3(store) + else: + store = DirectoryStoreV3(store) + # add default zarr.json metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + return store + else: + store = StoreV3._ensure_store(store) + if 'zarr.json' not in store: + # add default zarr.json metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + return store diff --git a/zarr/convenience.py b/zarr/convenience.py index 07d649a329..60e47dc339 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -15,8 +15,8 @@ from zarr.hierarchy import open_group from zarr.meta import json_dumps, json_loads from zarr.storage import (_get_metadata_suffix, contains_array, contains_group, - normalize_store_arg, BaseStore, ConsolidatedMetadataStore, - ConsolidatedMetadataStoreV3) + normalize_store_arg, BaseStore, ConsolidatedMetadataStore) +from zarr._storage.v3 import ConsolidatedMetadataStoreV3 from zarr.util import TreeViewer, buffer_size, normalize_storage_path from typing import Union diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index c2ddf451f1..d92af08ffb 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -19,7 +19,6 @@ _prefix_to_group_key, BaseStore, MemoryStore, - MemoryStoreV3, attrs_key, contains_array, contains_group, @@ -30,6 +29,7 @@ rename, rmdir, ) +from zarr._storage.v3 import MemoryStoreV3 from zarr.util import ( InfoReporter, TreeViewer, diff --git a/zarr/storage.py b/zarr/storage.py index 2a6b756d64..e8a65147aa 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -43,7 +43,6 @@ ) from numcodecs.registry import codec_registry -from zarr._storage.store import DEFAULT_ZARR_VERSION from zarr.errors import ( MetadataError, BadCompressorError, @@ -58,7 +57,7 @@ normalize_dtype, normalize_fill_value, normalize_order, normalize_shape, normalize_storage_path, retry_call) -from zarr._storage.absstore import ABSStore, ABSStoreV3 # noqa: F401 +from zarr._storage.absstore import ABSStore # noqa: F401 from zarr._storage.store import (_get_hierarchy_metadata, # noqa: F401 _get_metadata_suffix, _listdir_from_keys, @@ -74,9 +73,9 @@ data_root, group_meta_key, meta_root, + DEFAULT_ZARR_VERSION, BaseStore, - Store, - StoreV3) + Store) __doctest_requires__ = { ('RedisStore', 'RedisStore.*'): ['redis'], @@ -129,67 +128,48 @@ def contains_group(store: StoreLike, path: Path = None, explicit_only=True) -> b return False -def normalize_store_arg(store: Any, storage_options=None, mode="r", *, - zarr_version=None) -> BaseStore: - if zarr_version is None: - # default to v2 store for backward compatibility - zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) - if zarr_version not in [2, 3]: - raise ValueError("zarr_version must be 2 or 3") +def _normalize_store_arg_v2(store: Any, storage_options=None, mode="r") -> BaseStore: + # default to v2 store for backward compatibility + zarr_version = getattr(store, '_store_version', 2) + if zarr_version != 2: + raise ValueError("store must be a version 2 store") if store is None: - if zarr_version == 2: - store = KVStore(dict()) - else: - store = KVStoreV3(dict()) - # add default zarr.json metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + store = KVStore(dict()) return store - elif hasattr(store, '_store_version') and store._store_version != zarr_version: - raise ValueError( - f"store is a zarr v{store._store_version} store which conflicts " - f"with the specified zarr_version ({zarr_version})." - ) - if isinstance(store, os.PathLike): store = os.fspath(store) if isinstance(store, str): - if zarr_version == 2: - if "://" in store or "::" in store: - return FSStore(store, mode=mode, **(storage_options or {})) - elif storage_options: - raise ValueError("storage_options passed with non-fsspec path") - if store.endswith('.zip'): - return ZipStore(store, mode=mode) - elif store.endswith('.n5'): - from zarr.n5 import N5Store - return N5Store(store) - else: - return DirectoryStore(store) - elif zarr_version == 3: - if "://" in store or "::" in store: - store = FSStoreV3(store, mode=mode, **(storage_options or {})) - elif storage_options: - raise ValueError("storage_options passed with non-fsspec path") - elif store.endswith('.zip'): - store = ZipStoreV3(store, mode=mode) - elif store.endswith('.n5'): - raise NotImplementedError("N5Store not yet implemented for V3") - # return N5StoreV3(store) - else: - store = DirectoryStoreV3(store) - # add default zarr.json metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) - return store - elif zarr_version == 2: + if "://" in store or "::" in store: + return FSStore(store, mode=mode, **(storage_options or {})) + elif storage_options: + raise ValueError("storage_options passed with non-fsspec path") + if store.endswith('.zip'): + return ZipStore(store, mode=mode) + elif store.endswith('.n5'): + from zarr.n5 import N5Store + return N5Store(store) + else: + return DirectoryStore(store) + else: store = Store._ensure_store(store) - elif zarr_version == 3: - store = StoreV3._ensure_store(store) - if 'zarr.json' not in store: - # add default zarr.json metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) return store +def normalize_store_arg(store: Any, storage_options=None, mode="r", *, + zarr_version=None) -> BaseStore: + if zarr_version is None: + # default to v2 store for backward compatibility + zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) + elif zarr_version not in [2, 3]: + raise ValueError("zarr_version must be either 2 or 3") + if zarr_version == 2: + normalize_store = _normalize_store_arg_v2 + elif zarr_version == 3: + from zarr._storage.v3 import _normalize_store_arg_v3 + normalize_store = _normalize_store_arg_v3 + return normalize_store(store, storage_options, mode) + + def rmdir(store: StoreLike, path: Path = None): """Remove all items under the given path. If `store` provides a `rmdir` method, this will be called, otherwise will fall back to implementation via the @@ -2874,509 +2854,3 @@ def getsize(self, path): def listdir(self, path): return listdir(self.meta_store, path) - - -""" versions of stores following the v3 protocol """ - - -def _get_files_and_dirs_from_path(store, path): - path = normalize_storage_path(path) - - files = [] - # add array metadata file if present - array_key = _prefix_to_array_key(store, path) - if array_key in store: - files.append(os.path.join(store.path, array_key)) - - # add group metadata file if present - group_key = _prefix_to_group_key(store, path) - if group_key in store: - files.append(os.path.join(store.path, group_key)) - - dirs = [] - # add array and group folders if present - for d in [data_root + path, meta_root + path]: - dir_path = os.path.join(store.path, d) - if os.path.exists(dir_path): - dirs.append(dir_path) - return files, dirs - - -class RmdirV3(): - """Mixin class that can be used to ensure override of any existing v2 rmdir class.""" - - def rmdir(self, path: str = "") -> None: - path = normalize_storage_path(path) - _rmdir_from_keys_v3(self, path) # type: ignore - - -class KVStoreV3(RmdirV3, KVStore, StoreV3): - - def list(self): - return list(self._mutable_mapping.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def __eq__(self, other): - return ( - isinstance(other, KVStoreV3) and - self._mutable_mapping == other._mutable_mapping - ) - - -KVStoreV3.__doc__ = KVStore.__doc__ - - -class FSStoreV3(FSStore, StoreV3): - - # FSStoreV3 doesn't use this (FSStore uses it within _normalize_key) - _META_KEYS = () - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def _default_key_separator(self): - if self.key_separator is None: - self.key_separator = "/" - - def list(self): - return list(self.keys()) - - def _normalize_key(self, key): - key = normalize_storage_path(key).lstrip('/') - return key.lower() if self.normalize_keys else key - - def getsize(self, path=None): - size = 0 - if path is None or path == '': - # size of both the data and meta subdirs - dirs = [] - for d in ['data/root', 'meta/root']: - dir_path = os.path.join(self.path, d) - if os.path.exists(dir_path): - dirs.append(dir_path) - elif path in self: - # access individual element by full path - return buffer_size(self[path]) - else: - files, dirs = _get_files_and_dirs_from_path(self, path) - for file in files: - size += os.path.getsize(file) - for d in dirs: - size += self.fs.du(d, total=True, maxdepth=None) - return size - - def setitems(self, values): - if self.mode == 'r': - raise ReadOnlyError() - values = {self._normalize_key(key): val for key, val in values.items()} - - # initialize the /data/root/... folder corresponding to the array! - # Note: zarr.tests.test_core_v3.TestArrayWithFSStoreV3PartialRead fails - # without this explicit creation of directories - subdirectories = set([os.path.dirname(v) for v in values.keys()]) - for subdirectory in subdirectories: - data_dir = os.path.join(self.path, subdirectory) - if not self.fs.exists(data_dir): - self.fs.mkdir(data_dir) - - self.map.setitems(values) - - def rmdir(self, path=None): - if self.mode == 'r': - raise ReadOnlyError() - if path: - for base in [meta_root, data_root]: - store_path = self.dir_path(base + path) - if self.fs.isdir(store_path): - self.fs.rm(store_path, recursive=True) - - # remove any associated metadata files - sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip('/') - array_meta_file = meta_dir + '.array' + sfx - self.pop(array_meta_file, None) - group_meta_file = meta_dir + '.group' + sfx - self.pop(group_meta_file, None) - else: - store_path = self.dir_path(path) - if self.fs.isdir(store_path): - self.fs.rm(store_path, recursive=True) - - -class MemoryStoreV3(MemoryStore, StoreV3): - - def __init__(self, root=None, cls=dict, dimension_separator=None): - if root is None: - self.root = cls() - else: - self.root = root - self.cls = cls - self.write_mutex = Lock() - self._dimension_separator = dimension_separator # TODO: modify for v3? - - def __eq__(self, other): - return ( - isinstance(other, MemoryStoreV3) and - self.root == other.root and - self.cls == other.cls - ) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def list(self): - return list(self.keys()) - - def getsize(self, path: Path = None): - return _getsize(self, path) - - def rename(self, src_path: Path, dst_path: Path): - src_path = normalize_storage_path(src_path) - dst_path = normalize_storage_path(dst_path) - - any_renamed = False - for base in [meta_root, data_root]: - if self.list_prefix(base + src_path): - src_parent, src_key = self._get_parent(base + src_path) - dst_parent, dst_key = self._require_parent(base + dst_path) - - if src_key in src_parent: - dst_parent[dst_key] = src_parent.pop(src_key) - - if base == meta_root: - # check for and move corresponding metadata - sfx = _get_metadata_suffix(self) - src_meta = src_key + '.array' + sfx - if src_meta in src_parent: - dst_meta = dst_key + '.array' + sfx - dst_parent[dst_meta] = src_parent.pop(src_meta) - src_meta = src_key + '.group' + sfx - if src_meta in src_parent: - dst_meta = dst_key + '.group' + sfx - dst_parent[dst_meta] = src_parent.pop(src_meta) - any_renamed = True - any_renamed = _rename_metadata_v3(self, src_path, dst_path) or any_renamed - if not any_renamed: - raise ValueError(f"no item {src_path} found to rename") - - def rmdir(self, path: Path = None): - path = normalize_storage_path(path) - if path: - for base in [meta_root, data_root]: - try: - parent, key = self._get_parent(base + path) - value = parent[key] - except KeyError: - continue - else: - if isinstance(value, self.cls): - del parent[key] - - # remove any associated metadata files - sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip('/') - array_meta_file = meta_dir + '.array' + sfx - self.pop(array_meta_file, None) - group_meta_file = meta_dir + '.group' + sfx - self.pop(group_meta_file, None) - else: - # clear out root - self.root = self.cls() - - -MemoryStoreV3.__doc__ = MemoryStore.__doc__ - - -class DirectoryStoreV3(DirectoryStore, StoreV3): - - def list(self): - return list(self.keys()) - - def __eq__(self, other): - return ( - isinstance(other, DirectoryStoreV3) and - self.path == other.path - ) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def getsize(self, path: Path = None): - return _getsize(self, path) - - def rename(self, src_path, dst_path, metadata_key_suffix='.json'): - store_src_path = normalize_storage_path(src_path) - store_dst_path = normalize_storage_path(dst_path) - - dir_path = self.path - any_existed = False - for root_prefix in ['meta', 'data']: - src_path = os.path.join(dir_path, root_prefix, 'root', store_src_path) - if os.path.exists(src_path): - any_existed = True - dst_path = os.path.join(dir_path, root_prefix, 'root', store_dst_path) - os.renames(src_path, dst_path) - - for suffix in ['.array' + metadata_key_suffix, - '.group' + metadata_key_suffix]: - src_meta = os.path.join(dir_path, 'meta', 'root', store_src_path + suffix) - if os.path.exists(src_meta): - any_existed = True - dst_meta = os.path.join(dir_path, 'meta', 'root', store_dst_path + suffix) - dst_dir = os.path.dirname(dst_meta) - if not os.path.exists(dst_dir): - os.makedirs(dst_dir) - os.rename(src_meta, dst_meta) - if not any_existed: - raise FileNotFoundError("nothing found at src_path") - - def rmdir(self, path=None): - store_path = normalize_storage_path(path) - dir_path = self.path - if store_path: - for base in [meta_root, data_root]: - dir_path = os.path.join(dir_path, base + store_path) - if os.path.isdir(dir_path): - shutil.rmtree(dir_path) - - # remove any associated metadata files - sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip('/') - array_meta_file = meta_dir + '.array' + sfx - self.pop(array_meta_file, None) - group_meta_file = meta_dir + '.group' + sfx - self.pop(group_meta_file, None) - - elif os.path.isdir(dir_path): - shutil.rmtree(dir_path) - - -DirectoryStoreV3.__doc__ = DirectoryStore.__doc__ - - -class ZipStoreV3(ZipStore, StoreV3): - - def list(self): - return list(self.keys()) - - def __eq__(self, other): - return ( - isinstance(other, ZipStore) and - self.path == other.path and - self.compression == other.compression and - self.allowZip64 == other.allowZip64 - ) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def getsize(self, path=None): - path = normalize_storage_path(path) - with self.mutex: - children = self.list_prefix(data_root + path) - children += self.list_prefix(meta_root + path) - print(f"path={path}, children={children}") - if children: - size = 0 - for name in children: - info = self.zf.getinfo(name) - size += info.compress_size - return size - elif path in self: - info = self.zf.getinfo(path) - return info.compress_size - else: - return 0 - - -ZipStoreV3.__doc__ = ZipStore.__doc__ - - -class RedisStoreV3(RmdirV3, RedisStore, StoreV3): - - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -RedisStoreV3.__doc__ = RedisStore.__doc__ - - -class MongoDBStoreV3(RmdirV3, MongoDBStore, StoreV3): - - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -MongoDBStoreV3.__doc__ = MongoDBStore.__doc__ - - -class DBMStoreV3(RmdirV3, DBMStore, StoreV3): - - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -DBMStoreV3.__doc__ = DBMStore.__doc__ - - -class LMDBStoreV3(RmdirV3, LMDBStore, StoreV3): - - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -LMDBStoreV3.__doc__ = LMDBStore.__doc__ - - -class SQLiteStoreV3(SQLiteStore, StoreV3): - - def list(self): - return list(self.keys()) - - def getsize(self, path=None): - # TODO: why does the query below not work in this case? - # For now fall back to the default _getsize implementation - # size = 0 - # for _path in [data_root + path, meta_root + path]: - # c = self.cursor.execute( - # ''' - # SELECT COALESCE(SUM(LENGTH(v)), 0) FROM zarr - # WHERE k LIKE (? || "%") AND - # 0 == INSTR(LTRIM(SUBSTR(k, LENGTH(?) + 1), "/"), "/") - # ''', - # (_path, _path) - # ) - # for item_size, in c: - # size += item_size - # return size - - # fallback to default implementation for now - return _getsize(self, path) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def rmdir(self, path=None): - path = normalize_storage_path(path) - if path: - for base in [meta_root, data_root]: - with self.lock: - self.cursor.execute( - 'DELETE FROM zarr WHERE k LIKE (? || "/%")', (base + path,) - ) - # remove any associated metadata files - sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip('/') - array_meta_file = meta_dir + '.array' + sfx - self.pop(array_meta_file, None) - group_meta_file = meta_dir + '.group' + sfx - self.pop(group_meta_file, None) - else: - self.clear() - - -SQLiteStoreV3.__doc__ = SQLiteStore.__doc__ - - -class LRUStoreCacheV3(RmdirV3, LRUStoreCache, StoreV3): - - def __init__(self, store, max_size: int): - self._store = StoreV3._ensure_store(store) - self._max_size = max_size - self._current_size = 0 - self._keys_cache = None - self._contains_cache = None - self._listdir_cache: Dict[Path, Any] = dict() - self._values_cache: Dict[Path, Any] = OrderedDict() - self._mutex = Lock() - self.hits = self.misses = 0 - - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -LRUStoreCacheV3.__doc__ = LRUStoreCache.__doc__ - - -class ConsolidatedMetadataStoreV3(ConsolidatedMetadataStore, StoreV3): - """A layer over other storage, where the metadata has been consolidated into - a single key. - - The purpose of this class, is to be able to get all of the metadata for - a given array in a single read operation from the underlying storage. - See :func:`zarr.convenience.consolidate_metadata` for how to create this - single metadata key. - - This class loads from the one key, and stores the data in a dict, so that - accessing the keys no longer requires operations on the backend store. - - This class is read-only, and attempts to change the array metadata will - fail, but changing the data is possible. If the backend storage is changed - directly, then the metadata stored here could become obsolete, and - :func:`zarr.convenience.consolidate_metadata` should be called again and the class - re-invoked. The use case is for write once, read many times. - - .. note:: This is an experimental feature. - - Parameters - ---------- - store: Store - Containing the zarr array. - metadata_key: str - The target in the store where all of the metadata are stored. We - assume JSON encoding. - - See Also - -------- - zarr.convenience.consolidate_metadata, zarr.convenience.open_consolidated - - """ - - def __init__(self, store: StoreLike, metadata_key=meta_root + "consolidated/.zmetadata"): - self.store = StoreV3._ensure_store(store) - - # retrieve consolidated metadata - meta = json_loads(self.store[metadata_key]) - - # check format of consolidated metadata - consolidated_format = meta.get('zarr_consolidated_format', None) - if consolidated_format != 1: - raise MetadataError('unsupported zarr consolidated metadata format: %s' % - consolidated_format) - - # decode metadata - self.meta_store: Store = KVStoreV3(meta["metadata"]) - - def rmdir(self, key): - raise ReadOnlyError() - - # def __setitem__(self, key, value): - # raise ReadOnlyError() diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index dbbc19328a..b8058d9d63 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -4,7 +4,8 @@ from zarr._storage.store import meta_root from zarr.attrs import Attributes -from zarr.storage import KVStore, KVStoreV3 +from zarr.storage import KVStore +from zarr._storage.v3 import KVStoreV3 from zarr.tests.util import CountingDict, CountingDictV3 diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index 53fa447b48..097512a240 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -26,19 +26,21 @@ from zarr.hierarchy import Group, group from zarr.storage import ( ConsolidatedMetadataStore, - ConsolidatedMetadataStoreV3, - DirectoryStoreV3, - FSStoreV3, KVStore, - KVStoreV3, MemoryStore, - MemoryStoreV3, - SQLiteStoreV3, atexit_rmtree, data_root, meta_root, getsize, ) +from zarr._storage.v3 import ( + ConsolidatedMetadataStoreV3, + DirectoryStoreV3, + FSStoreV3, + KVStoreV3, + MemoryStoreV3, + SQLiteStoreV3, +) from zarr.tests.util import have_fsspec diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 09523dcd22..635b5769c9 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -36,6 +36,14 @@ LRUStoreCache, NestedDirectoryStore, SQLiteStore, + atexit_rmglob, + atexit_rmtree, + data_root, + init_array, + init_group, + meta_root, +) +from zarr._storage.v3 import ( ABSStoreV3, DBMStoreV3, DirectoryStoreV3, @@ -45,12 +53,6 @@ LRUStoreCacheV3, SQLiteStoreV3, StoreV3, - atexit_rmglob, - atexit_rmtree, - data_root, - init_array, - init_group, - meta_root, ) from zarr.util import buffer_size from zarr.tests.util import abs_container, skip_test_env_var, have_fsspec diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index ee99bc7c9f..863b987d9b 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -16,7 +16,8 @@ zeros_like) from zarr.hierarchy import open_group from zarr.n5 import N5Store -from zarr.storage import DirectoryStore, DirectoryStoreV3, KVStore, KVStoreV3 +from zarr.storage import DirectoryStore, KVStore +from zarr._storage.v3 import DirectoryStoreV3, KVStoreV3 from zarr.sync import ThreadSynchronizer diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index 29fff7536a..3bcd826882 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -28,9 +28,9 @@ NestedDirectoryStore, SQLiteStore, ZipStore, array_meta_key, atexit_rmglob, atexit_rmtree, data_root, group_meta_key, init_array, init_group, meta_root) -from zarr.storage import (ABSStoreV3, KVStoreV3, DirectoryStoreV3, MemoryStoreV3, - FSStoreV3, ZipStoreV3, DBMStoreV3, LMDBStoreV3, SQLiteStoreV3, - LRUStoreCacheV3) +from zarr._storage.v3 import (ABSStoreV3, KVStoreV3, DirectoryStoreV3, MemoryStoreV3, + FSStoreV3, ZipStoreV3, DBMStoreV3, LMDBStoreV3, SQLiteStoreV3, + LRUStoreCacheV3) from zarr.util import InfoReporter, buffer_size from zarr.tests.util import skip_test_env_var, have_fsspec, abs_container diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 6f5aac8011..9fb869d5e3 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -29,12 +29,13 @@ DictStore, DirectoryStore, KVStore, LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, NestedDirectoryStore, RedisStore, SQLiteStore, - Store, TempStore, ZipStore, KVStoreV3, + Store, TempStore, ZipStore, array_meta_key, atexit_rmglob, atexit_rmtree, attrs_key, data_root, default_compressor, getsize, group_meta_key, init_array, init_group, migrate_1to2, meta_root, normalize_store_arg) from zarr.storage import FSStore, rename, listdir +from zarr._storage.v3 import KVStoreV3 from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index 73fda1b758..a5011cf696 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -6,15 +6,18 @@ import numpy as np import pytest -from zarr._storage.store import _get_hierarchy_metadata + +import zarr +from zarr._storage.store import _get_hierarchy_metadata, v3_api_available from zarr.meta import _default_entry_point_metadata_v3 -from zarr.storage import (ABSStoreV3, ConsolidatedMetadataStoreV3, DBMStoreV3, - DirectoryStoreV3, FSStoreV3, KVStore, KVStoreV3, - LMDBStoreV3, LRUStoreCacheV3, MemoryStoreV3, - MongoDBStoreV3, RedisStoreV3, SQLiteStoreV3, StoreV3, - ZipStoreV3, atexit_rmglob, atexit_rmtree, data_root, +from zarr.storage import (atexit_rmglob, atexit_rmtree, data_root, default_compressor, getsize, init_array, meta_root, normalize_store_arg) +from zarr._storage.v3 import (ABSStoreV3, ConsolidatedMetadataStoreV3, DBMStoreV3, + DirectoryStoreV3, FSStoreV3, KVStore, KVStoreV3, + LMDBStoreV3, LRUStoreCacheV3, MemoryStoreV3, + MongoDBStoreV3, RedisStoreV3, SQLiteStoreV3, StoreV3, + ZipStoreV3) from zarr.tests.util import CountingDictV3, have_fsspec, skip_test_env_var # pytest will fail to run if the following fixtures aren't imported here @@ -511,3 +514,13 @@ def test_get_hierarchy_metadata(): store['zarr.json'] = extra_metadata with pytest.raises(ValueError): _get_hierarchy_metadata(store) + + +def test_top_level_imports(): + for store_name in ['ABSStoreV3', 'DBMStoreV3', 'KVStoreV3', 'DirectoryStoreV3', + 'LMDBStoreV3', 'LRUStoreCacheV3', 'MemoryStoreV3', 'MongoDBStoreV3', + 'RedisStoreV3', 'SQLiteStoreV3', 'ZipStoreV3']: + if v3_api_available: + assert hasattr(zarr, store_name) # pragma: no cover + else: + assert not hasattr(zarr, store_name) # pragma: no cover diff --git a/zarr/tests/util.py b/zarr/tests/util.py index bb4df90d1b..912f2f5361 100644 --- a/zarr/tests/util.py +++ b/zarr/tests/util.py @@ -1,7 +1,8 @@ import collections import os -from zarr.storage import Store, StoreV3 +from zarr.storage import Store +from zarr._storage.v3 import StoreV3 import pytest From d1f590dffd37684aad7c9fbc19ed9974441ed325 Mon Sep 17 00:00:00 2001 From: hailiangzhang Date: Wed, 4 May 2022 06:05:34 -0400 Subject: [PATCH 0116/1078] Appending performance improvement (#1014) * feat: replace the sub-optimal itertools loop * feat: remove old scripts which was commented out * docs: explanation added in appending script * docs: comment polished * fix: remove trailing whitespace found by lint * test: add an edge case test for resize method Co-authored-by: Hailiang Zhang --- zarr/core.py | 22 ++++++++++++++++++---- zarr/tests/test_core.py | 9 +++++++++ 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/zarr/core.py b/zarr/core.py index e1e04bb8fa..c4d3a850fc 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -2428,17 +2428,31 @@ def _resize_nosync(self, *args): for s, c in zip(new_shape, chunks)) # remove any chunks not within range + # The idea is that, along each dimension, + # only find and remove the chunk slices that exist in 'old' but not 'new' data. + # Note that a mutable list ('old_cdata_shape_working_list') is introduced here + # to dynamically adjust the number of chunks along the already-processed dimensions + # in order to avoid duplicate chunk removal. chunk_store = self.chunk_store - for cidx in itertools.product(*[range(n) for n in old_cdata_shape]): - if all(i < c for i, c in zip(cidx, new_cdata_shape)): - pass # keep the chunk - else: + old_cdata_shape_working_list = list(old_cdata_shape) + for idx_cdata, (val_old_cdata, val_new_cdata) in enumerate( + zip(old_cdata_shape_working_list, new_cdata_shape) + ): + for cidx in itertools.product( + *[ + range(n_new, n_old) if (idx == idx_cdata) else range(n_old) + for idx, (n_old, n_new) in enumerate( + zip(old_cdata_shape_working_list, new_cdata_shape) + ) + ] + ): key = self._chunk_key(cidx) try: del chunk_store[key] except KeyError: # chunk not initialized pass + old_cdata_shape_working_list[idx_cdata] = min(val_old_cdata, val_new_cdata) def append(self, data, axis=0): """Append `data` to `axis`. diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 635b5769c9..ebb24a07ed 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -667,6 +667,15 @@ def test_resize_2d(self): assert (10, 10) == z.chunks assert_array_equal(a[:55, :1], z[:]) + z.resize((1, 55)) + assert (1, 55) == z.shape + assert (1, 55) == z[:].shape + assert np.dtype('i4') == z.dtype + assert np.dtype('i4') == z[:].dtype + assert (10, 10) == z.chunks + assert_array_equal(a[:1, :10], z[:, :10]) + assert_array_equal(np.zeros((1, 55-10), dtype='i4'), z[:, 10:55]) + # via shape setter z.shape = (105, 105) assert (105, 105) == z.shape From ab6b355a7547865a0549a5902243c0b09556c7d8 Mon Sep 17 00:00:00 2001 From: Ryan Abernathey Date: Wed, 4 May 2022 07:38:57 -0400 Subject: [PATCH 0117/1078] Create fsstore from filesystem (#911) * refactor FSStore class to allow fs argument * add tests * fixes #993 * add fsspec mapper kwargs to FSMap constructor * avoid passing missing_exceptions if possible * fix line length * add tests for array creation with existing fs * add test for consolidated reading of unlistable store * flake8 * rename functions and skip coverage for workaround we expect to remove * update release notes and tutorial * fix sphinx ref typo * Fix use of store.update() * Flake8 corrections Co-authored-by: Josh Moore Co-authored-by: jmoore --- docs/release.rst | 27 +++++++++++-- docs/tutorial.rst | 36 +++++++++++------ zarr/storage.py | 56 ++++++++++++++++++++------ zarr/tests/test_convenience.py | 44 +++++++++++++++++++-- zarr/tests/test_core.py | 72 +++++++++++++++++++++++++++++++++- zarr/tests/test_storage.py | 33 ++++++++++++++-- 6 files changed, 230 insertions(+), 38 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 7a5cf51db7..cd9ebbd784 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -9,6 +9,27 @@ Unreleased Bug fixes ~~~~~~~~~ +* Fix bug that made it impossible to create an ``FSStore`` on unlistable filesystems + (e.g. some HTTP servers). + By :user:`Ryan Abernathey `; :issue:`993`. + +Enhancements +~~~~~~~~~~~~ + +* **Create FSStore from an existing fsspec filesystem**. If you have created + an fsspec filesystem outside of Zarr, you can now pass it as a keyword + argument to ``FSStore``. + By :user:`Ryan Abernathey `. + + +.. _release_2.11.3: + +2.11.3 +------ + +Bug fixes +~~~~~~~~~ + * Changes the default value of ``write_empty_chunks`` to ``True`` to prevent unanticipated data losses when the data types do not have a proper default value when empty chunks are read back in. @@ -322,7 +343,7 @@ Bug fixes * FSStore: default to normalize_keys=False By :user:`Josh Moore `; :issue:`755`. -* ABSStore: compatibility with ``azure.storage.python>=12`` +* ABSStore: compatibility with ``azure.storage.python>=12`` By :user:`Tom Augspurger `; :issue:`618` @@ -487,7 +508,7 @@ This release will be the last to support Python 3.5, next version of Zarr will b * `DirectoryStore` now uses `os.scandir`, which should make listing large store faster, :issue:`563` - + * Remove a few remaining Python 2-isms. By :user:`Poruri Sai Rahul `; :issue:`393`. @@ -507,7 +528,7 @@ This release will be the last to support Python 3.5, next version of Zarr will b ``zarr.errors`` have been replaced by ``ValueError`` subclasses. The corresponding ``err_*`` function have been removed. :issue:`590`, :issue:`614`) -* Improve consistency of terminology regarding arrays and datasets in the +* Improve consistency of terminology regarding arrays and datasets in the documentation. By :user:`Josh Moore `; :issue:`571`. diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 53ddddb0b9..b40896c78c 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -758,7 +758,7 @@ databases. The :class:`zarr.storage.RedisStore` class interfaces `Redis `_ (an object oriented NoSQL database). These stores respectively require the `redis-py `_ and -`pymongo `_ packages to be installed. +`pymongo `_ packages to be installed. For compatibility with the `N5 `_ data format, Zarr also provides an N5 backend (this is currently an experimental feature). Similar to the zip storage class, an @@ -897,6 +897,18 @@ The second invocation here will be much faster. Note that the ``storage_options` have become more complex here, to account for the two parts of the supplied URL. +It is also possible to initialize the filesytem outside of Zarr and then pass +it through. This requires creating an :class:`zarr.storage.FSStore` object +explicitly. For example:: + + >>> import s3fs * doctest: +SKIP + >>> fs = s3fs.S3FileSystem(anon=True) # doctest: +SKIP + >>> store = zarr.storage.FSStore('/zarr-demo/store', fs=fs) # doctest: +SKIP + >>> g = zarr.open_group(store) # doctest: +SKIP + +This is useful in cases where you want to also use the same fsspec filesystem object +separately from Zarr. + .. _fsspec: https://filesystem-spec.readthedocs.io/en/latest/ .. _supported by fsspec: https://filesystem-spec.readthedocs.io/en/latest/api.html#built-in-implementations @@ -1306,18 +1318,18 @@ filters (e.g., byte-shuffle) have been applied. Empty chunks ~~~~~~~~~~~~ - + As of version 2.11, it is possible to configure how Zarr handles the storage of chunks that are "empty" (i.e., every element in the chunk is equal to the array's fill value). -When creating an array with ``write_empty_chunks=False``, +When creating an array with ``write_empty_chunks=False``, Zarr will check whether a chunk is empty before compression and storage. If a chunk is empty, -then Zarr does not store it, and instead deletes the chunk from storage -if the chunk had been previously stored. +then Zarr does not store it, and instead deletes the chunk from storage +if the chunk had been previously stored. -This optimization prevents storing redundant objects and can speed up reads, but the cost is -added computation during array writes, since the contents of -each chunk must be compared to the fill value, and these advantages are contingent on the content of the array. -If you know that your data will form chunks that are almost always non-empty, then there is no advantage to the optimization described above. +This optimization prevents storing redundant objects and can speed up reads, but the cost is +added computation during array writes, since the contents of +each chunk must be compared to the fill value, and these advantages are contingent on the content of the array. +If you know that your data will form chunks that are almost always non-empty, then there is no advantage to the optimization described above. In this case, creating an array with ``write_empty_chunks=True`` (the default) will instruct Zarr to write every chunk without checking for emptiness. The following example illustrates the effect of the ``write_empty_chunks`` flag on @@ -1329,7 +1341,7 @@ the time required to write an array with different values.:: >>> from tempfile import TemporaryDirectory >>> def timed_write(write_empty_chunks): ... """ - ... Measure the time required and number of objects created when writing + ... Measure the time required and number of objects created when writing ... to a Zarr array with random ints or fill value. ... """ ... chunks = (8192,) @@ -1368,8 +1380,8 @@ the time required to write an array with different values.:: Random Data: 0.1359s, 1024 objects stored Empty Data: 0.0301s, 0 objects stored -In this example, writing random data is slightly slower with ``write_empty_chunks=True``, -but writing empty data is substantially faster and generates far fewer objects in storage. +In this example, writing random data is slightly slower with ``write_empty_chunks=True``, +but writing empty data is substantially faster and generates far fewer objects in storage. .. _tutorial_rechunking: diff --git a/zarr/storage.py b/zarr/storage.py index e8a65147aa..48b6f049dd 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -1262,8 +1262,9 @@ class FSStore(Store): Parameters ---------- url : str - The destination to map. Should include protocol and path, - like "s3://bucket/root" + The destination to map. If no fs is provided, should include protocol + and path, like "s3://bucket/root". If an fs is provided, can be a path + within that filesystem, like "bucket/root" normalize_keys : bool key_separator : str public API for accessing dimension_separator. Never `None` @@ -1275,7 +1276,19 @@ class FSStore(Store): as a missing key dimension_separator : {'.', '/'}, optional Separator placed between the dimensions of a chunk. - storage_options : passed to the fsspec implementation + fs : fsspec.spec.AbstractFileSystem, optional + An existing filesystem to use for the store. + check : bool, optional + If True, performs a touch at the root location, to check for write access. + Passed to `fsspec.mapping.FSMap` constructor. + create : bool, optional + If True, performs a mkdir at the rool location. + Passed to `fsspec.mapping.FSMap` constructor. + missing_exceptions : sequence of Exceptions, optional + Exceptions classes to associate with missing files. + Passed to `fsspec.mapping.FSMap` constructor. + storage_options : passed to the fsspec implementation. Cannot be used + together with fs. """ _array_meta_key = array_meta_key _group_meta_key = group_meta_key @@ -1285,18 +1298,37 @@ def __init__(self, url, normalize_keys=False, key_separator=None, mode='w', exceptions=(KeyError, PermissionError, IOError), dimension_separator=None, + fs=None, + check=False, + create=False, + missing_exceptions=None, **storage_options): import fsspec - self.normalize_keys = normalize_keys - protocol, _ = fsspec.core.split_protocol(url) - # set auto_mkdir to True for local file system - if protocol in (None, "file") and not storage_options.get("auto_mkdir"): - storage_options["auto_mkdir"] = True + mapper_options = {"check": check, "create": create} + # https://github.com/zarr-developers/zarr-python/pull/911#discussion_r841926292 + # Some fsspec implementations don't accept missing_exceptions. + # This is a workaround to avoid passing it in the most common scenarios. + # Remove this and add missing_exceptions to mapper_options when fsspec is released. + if missing_exceptions is not None: + mapper_options["missing_exceptions"] = missing_exceptions # pragma: no cover + + if fs is None: + protocol, _ = fsspec.core.split_protocol(url) + # set auto_mkdir to True for local file system + if protocol in (None, "file") and not storage_options.get("auto_mkdir"): + storage_options["auto_mkdir"] = True + self.map = fsspec.get_mapper(url, **{**mapper_options, **storage_options}) + self.fs = self.map.fs # for direct operations + self.path = self.fs._strip_protocol(url) + else: + if storage_options: + raise ValueError("Cannot specify both fs and storage_options") + self.fs = fs + self.path = self.fs._strip_protocol(url) + self.map = self.fs.get_mapper(self.path, **mapper_options) - self.map = fsspec.get_mapper(url, **storage_options) - self.fs = self.map.fs # for direct operations - self.path = self.fs._strip_protocol(url) + self.normalize_keys = normalize_keys self.mode = mode self.exceptions = exceptions # For backwards compatibility. Guaranteed to be non-None @@ -1308,8 +1340,6 @@ def __init__(self, url, normalize_keys=False, key_separator=None, # Pass attributes to array creation self._dimension_separator = dimension_separator - if self.fs.exists(self.path) and not self.fs.isdir(self.path): - raise FSPathExistNotDir(url) def _default_key_separator(self): if self.key_separator is None: diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index 097512a240..ce8f03d0da 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -26,6 +26,7 @@ from zarr.hierarchy import Group, group from zarr.storage import ( ConsolidatedMetadataStore, + FSStore, KVStore, MemoryStore, atexit_rmtree, @@ -205,9 +206,18 @@ def test_tree(zarr_version): @pytest.mark.parametrize('zarr_version', [2, 3]) -@pytest.mark.parametrize('with_chunk_store', [False, True], ids=['default', 'with_chunk_store']) @pytest.mark.parametrize('stores_from_path', [False, True]) -def test_consolidate_metadata(with_chunk_store, zarr_version, stores_from_path): +@pytest.mark.parametrize( + 'with_chunk_store,listable', + [(False, True), (True, True), (False, False)], + ids=['default-listable', 'with_chunk_store-listable', 'default-unlistable'] +) +def test_consolidate_metadata(with_chunk_store, + zarr_version, + listable, + monkeypatch, + stores_from_path): + # setup initial data if stores_from_path: store = tempfile.mkdtemp() @@ -228,6 +238,10 @@ def test_consolidate_metadata(with_chunk_store, zarr_version, stores_from_path): version_kwarg = {} path = 'dataset' if zarr_version == 3 else None z = group(store, chunk_store=chunk_store, path=path, **version_kwarg) + + # Reload the actual store implementation in case str + store_to_copy = z.store + z.create_group('g1') g2 = z.create_group('g2') g2.attrs['hello'] = 'world' @@ -278,14 +292,36 @@ def test_consolidate_metadata(with_chunk_store, zarr_version, stores_from_path): for key in meta_keys: del store[key] + # https://github.com/zarr-developers/zarr-python/issues/993 + # Make sure we can still open consolidated on an unlistable store: + if not listable: + fs_memory = pytest.importorskip("fsspec.implementations.memory") + monkeypatch.setattr(fs_memory.MemoryFileSystem, "isdir", lambda x, y: False) + monkeypatch.delattr(fs_memory.MemoryFileSystem, "ls") + fs = fs_memory.MemoryFileSystem() + if zarr_version == 2: + store_to_open = FSStore("", fs=fs) + else: + store_to_open = FSStoreV3("", fs=fs) + + # copy original store to new unlistable store + store_to_open.update(store_to_copy) + + else: + store_to_open = store + # open consolidated - z2 = open_consolidated(store, chunk_store=chunk_store, path=path, **version_kwarg) + z2 = open_consolidated(store_to_open, chunk_store=chunk_store, path=path, **version_kwarg) assert ['g1', 'g2'] == list(z2) assert 'world' == z2.g2.attrs['hello'] assert 1 == z2.g2.arr.attrs['data'] assert (z2.g2.arr[:] == 1.0).all() assert 16 == z2.g2.arr.nchunks - assert 16 == z2.g2.arr.nchunks_initialized + if listable: + assert 16 == z2.g2.arr.nchunks_initialized + else: + with pytest.raises(NotImplementedError): + _ = z2.g2.arr.nchunks_initialized if stores_from_path: # path string is note a BaseStore subclass so cannot be used to diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index ebb24a07ed..2212b035c2 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -2495,6 +2495,13 @@ def test_store_has_bytes_values(self): pass +fsspec_mapper_kwargs = { + "check": True, + "create": True, + "missing_exceptions": None +} + + @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestArrayWithFSStore(TestArray): @staticmethod @@ -2502,7 +2509,35 @@ def create_array(read_only=False, **kwargs): path = mkdtemp() atexit.register(shutil.rmtree, path) key_separator = kwargs.pop('key_separator', ".") - store = FSStore(path, key_separator=key_separator, auto_mkdir=True) + store = FSStore(path, key_separator=key_separator, auto_mkdir=True, **fsspec_mapper_kwargs) + cache_metadata = kwargs.pop('cache_metadata', True) + cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) + kwargs.setdefault('compressor', Blosc()) + init_array(store, **kwargs) + return Array(store, read_only=read_only, cache_metadata=cache_metadata, + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + + def expected(self): + return [ + "ab753fc81df0878589535ca9bad2816ba88d91bc", + "c16261446f9436b1e9f962e57ce3e8f6074abe8a", + "c2ef3b2fb2bc9dcace99cd6dad1a7b66cc1ea058", + "6e52f95ac15b164a8e96843a230fcee0e610729b", + "091fa99bc60706095c9ce30b56ce2503e0223f56", + ] + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestArrayWithFSStoreFromFilesystem(TestArray): + @staticmethod + def create_array(read_only=False, **kwargs): + from fsspec.implementations.local import LocalFileSystem + fs = LocalFileSystem(auto_mkdir=True) + path = mkdtemp() + atexit.register(shutil.rmtree, path) + key_separator = kwargs.pop('key_separator', ".") + store = FSStore(path, fs=fs, key_separator=key_separator, **fsspec_mapper_kwargs) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) write_empty_chunks = kwargs.pop('write_empty_chunks', True) @@ -3148,7 +3183,40 @@ def create_array(array_path='arr1', read_only=False, **kwargs): path = mkdtemp() atexit.register(shutil.rmtree, path) key_separator = kwargs.pop('key_separator', ".") - store = FSStoreV3(path, key_separator=key_separator, auto_mkdir=True) + store = FSStoreV3( + path, + key_separator=key_separator, + auto_mkdir=True, + **fsspec_mapper_kwargs + ) + cache_metadata = kwargs.pop('cache_metadata', True) + cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) + kwargs.setdefault('compressor', Blosc()) + init_array(store, path=array_path, **kwargs) + return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + + def expected(self): + return [ + "1509abec4285494b61cd3e8d21f44adc3cf8ddf6", + "7cfb82ec88f7ecb7ab20ae3cb169736bc76332b8", + "b663857bb89a8ab648390454954a9cdd453aa24b", + "21e90fa927d09cbaf0e3b773130e2dc05d18ff9b", + "e8c1fdd18b5c2ee050b59d0c8c95d07db642459c", + ] + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestArrayWithFSStoreV3FromFilesystem(TestArrayWithPathV3, TestArrayWithFSStore): + @staticmethod + def create_array(array_path='arr1', read_only=False, **kwargs): + from fsspec.implementations.local import LocalFileSystem + fs = LocalFileSystem(auto_mkdir=True) + path = mkdtemp() + atexit.register(shutil.rmtree, path) + key_separator = kwargs.pop('key_separator', ".") + store = FSStoreV3(path, fs=fs, key_separator=key_separator, **fsspec_mapper_kwargs) cache_metadata = kwargs.pop('cache_metadata', True) cache_attrs = kwargs.pop('cache_attrs', True) write_empty_chunks = kwargs.pop('write_empty_chunks', True) diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 9fb869d5e3..abddb6965c 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1249,10 +1249,6 @@ def test_read_only(self): assert store[self.root + 'foo'] == b"bar" - filepath = os.path.join(path, self.root + "foo") - with pytest.raises(ValueError): - self.create_store(path=filepath, mode='r') - def test_eq(self): store1 = self.create_store(path="anypath") store2 = self.create_store(path="anypath") @@ -1339,6 +1335,35 @@ def create_store(self, normalize_keys=False, key_separator=".", **kwargs): key_separator=key_separator) +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestFSStoreFromFilesystem(StoreTests): + + def create_store(self, normalize_keys=False, + dimension_separator=".", + path=None, + **kwargs): + import fsspec + fs = fsspec.filesystem("file") + + if path is None: + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + + with pytest.raises(ValueError): + # can't specify storage_options when passing an + # existing fs object + _ = FSStore(path, fs=fs, auto_mkdir=True) + + store = FSStore( + path, + normalize_keys=normalize_keys, + dimension_separator=dimension_separator, + fs=fs, + **kwargs) + + return store + + @pytest.fixture() def s3(request): # writable local S3 system From 83111b427d4d31e79b776941f85ff1b81202fe1f Mon Sep 17 00:00:00 2001 From: hailiangzhang Date: Thu, 5 May 2022 06:44:40 -0400 Subject: [PATCH 0118/1078] Update resize doc (#1022) * docs: more comments added for resize method * docs: minor grammar fix Co-authored-by: Hailiang Zhang --- zarr/core.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/zarr/core.py b/zarr/core.py index c4d3a850fc..a60269d2e9 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -2406,6 +2406,10 @@ def resize(self, *args): If one or more dimensions are shrunk, any chunks falling outside the new array shape will be deleted from the underlying store. + However, it is noteworthy that the chunks partially falling inside the new array + (i.e. boundary chunks) will remain intact, and therefore, + the data falling outside the new array but inside the boundary chunks + would be restored by a subsequent resize operation that grows the array size. """ From 015036e2493bc291a102951c804a0a2d3f08c304 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 May 2022 11:47:46 +0200 Subject: [PATCH 0119/1078] Bump redis from 4.2.2 to 4.3.0 (#1024) Bumps [redis](https://github.com/redis/redis-py) from 4.2.2 to 4.3.0. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.2.2...v4.3.0) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 3e496d90e1..c2306ac643 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.11.0 # pyup: ignore -redis==4.2.2 +redis==4.3.0 types-redis types-setuptools pymongo==4.1.1 From 4aaa35a291c38d8c655894ff1a6a809893214f4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 May 2022 09:24:05 +0200 Subject: [PATCH 0120/1078] Bump redis from 4.3.0 to 4.3.1 (#1025) Bumps [redis](https://github.com/redis/redis-py) from 4.3.0 to 4.3.1. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.3.0...v4.3.1) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index c2306ac643..c93d83d918 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.11.0 # pyup: ignore -redis==4.3.0 +redis==4.3.1 types-redis types-setuptools pymongo==4.1.1 From 972c912158328195f4d5bbf602d1bd75e50985b5 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Tue, 10 May 2022 09:45:16 +0200 Subject: [PATCH 0121/1078] Release notes for 2.12.0a1 (#1023) * Release notes for 2.12.0a1 * Minor fixes to release notes * Extend explanation of #1014 --- docs/release.rst | 73 +++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 60 insertions(+), 13 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index cd9ebbd784..0dbaacb8f7 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -1,27 +1,65 @@ Release notes ============= -.. _unreleased: +.. + # Unindent the section between releases in order + # to coument your changes. On releases it will be + # re-indented so that it does not show up in the notes. -Unreleased ----------- + .. _unreleased: -Bug fixes -~~~~~~~~~ + Unreleased + ---------- -* Fix bug that made it impossible to create an ``FSStore`` on unlistable filesystems - (e.g. some HTTP servers). - By :user:`Ryan Abernathey `; :issue:`993`. +.. _release_2.12.0a1: + +2.12.0a1 +-------- Enhancements ~~~~~~~~~~~~ +* **Add support for reading and writing Zarr V3.** The new `zarr._store.v3` + package has the necessary classes and functions for evaluating Zarr V3. + Since the format is not yet finalized, the classes and functions are not + automatically imported into the regular `zarr` name space. Setting the + `ZARR_V3_API_AVAILABLE` environment variable will activate them. + By :user:`Greggory Lee `; :issue:`898`, :issue:`1006`, and :issue:`1007`. + * **Create FSStore from an existing fsspec filesystem**. If you have created an fsspec filesystem outside of Zarr, you can now pass it as a keyword argument to ``FSStore``. - By :user:`Ryan Abernathey `. + By :user:`Ryan Abernathey `; :issue:`911`. + +* Add numpy encoder class for json.dumps + By :user:`Eric Prestat `; :issue:`933`. + +* Appending performance improvement to Zarr arrays, e.g., when writing to S3. + By :user:`hailiangzhang `; :issue:`1014`. + +* Add number encoder for ``json.dumps`` to support numpy intergers in + ``chunks`` arguments. By :user:`Eric Prestat ` :issue:`697`. + +Bug fixes +~~~~~~~~~ + +* Fix bug that made it impossible to create an ``FSStore`` on unlistable filesystems + (e.g. some HTTP servers). + By :user:`Ryan Abernathey `; :issue:`993`. +Documentation +~~~~~~~~~~~~~ + +* Update resize doc to clarify surprising behavior. + By :user:`hailiangzhang `; :issue:`1022`. + +Maintenance +~~~~~~~~~~~ + +* Added Pre-commit configuration, incl. Yaml Check. + By :user:`Shivank Chaudhary `; :issue:`1015`, :issue:`1016`. + .. _release_2.11.3: 2.11.3 @@ -30,13 +68,22 @@ Enhancements Bug fixes ~~~~~~~~~ +* Fix missing case to fully revert change to default write_empty_chunks. + By :user:`Tom White `; :issue:`1005`. + + +.. _release_2.11.2: + +2.11.2 +------ + +Bug fixes +~~~~~~~~~ + * Changes the default value of ``write_empty_chunks`` to ``True`` to prevent unanticipated data losses when the data types do not have a proper default value when empty chunks are read back in. - By :user:`Vyas Ramasubramani `; :issue:`965`. - -* Add number encoder for ``json.dumps`` to support numpy intergers in - ``chunks`` arguments. By :user:`Eric Prestat ` :issue:`697`. + By :user:`Vyas Ramasubramani `; :issue:`965`, :issue:`1001`. .. _release_2.11.1: From 86ec138d045afb4dd40dc212249f3286f82088da Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Wed, 11 May 2022 09:36:28 +0200 Subject: [PATCH 0122/1078] Make all unignored zarr warnings errors (#1021) Following on from #654, I wanted to make sure that no warnings are being missed by the tests. --- pytest.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytest.ini b/pytest.ini index 8e3c0adb22..966b5ad931 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,7 +2,7 @@ doctest_optionflags = NORMALIZE_WHITESPACE ELLIPSIS IGNORE_EXCEPTION_DETAIL addopts = --durations=10 filterwarnings = - error::DeprecationWarning:zarr.* - error::UserWarning:zarr.* + error:::zarr.* + ignore:Not all N5 implementations support blosc compression.*:RuntimeWarning ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning From 30781732e59b6f1fd91c4966dd6c53f8d38b3b70 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 May 2022 09:50:21 +0200 Subject: [PATCH 0123/1078] Bump azure-storage-blob from 12.11.0 to 12.12.0 (#1026) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.11.0 to 12.12.0. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.11.0...azure-storage-blob_12.12.0) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index c93d83d918..c80f456288 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -6,7 +6,7 @@ ipytree==0.2.1 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.11.0 # pyup: ignore +azure-storage-blob==12.12.0 # pyup: ignore redis==4.3.1 types-redis types-setuptools From b9b9bf9e0577380222f2d7871e5272d8dfff9723 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Wed, 11 May 2022 21:48:19 +0200 Subject: [PATCH 0124/1078] Activate Py 3.10 builds (#1027) * Activate Py 3.10 and 3.11 beta * Fix quotes of versions * Also quote exclusion Co-authored-by: jakirkham * Try explicit beta1 * Use "beta.1" see versions file: https://github.com/actions/python-versions/blob/main/versions-manifest.json * Migrate to setup-python * Remove 3.11 Co-authored-by: jakirkham --- .github/workflows/python-package.yml | 4 +++- .github/workflows/windows-testing.yml | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 91500317b3..2cc1de1769 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,11 +15,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] numpy_version: ['!=1.21.0', '==1.17.*'] exclude: - python-version: 3.9 numpy_version: '==1.17.*' + - python-version: '3.10' + numpy_version: '==1.17.*' services: redis: image: redis diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index af656aa88d..0bb5c1da70 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -16,7 +16,7 @@ jobs: strategy: fail-fast: True matrix: - python-version: ["3.8", "3.9"] + python-version: ['3.8', '3.9', '3.10'] steps: - uses: actions/checkout@v2 with: From b1b47e178ba074a591e7f4d9d8d5367c5ff7cfd5 Mon Sep 17 00:00:00 2001 From: Andrew Thomas Date: Mon, 16 May 2022 21:35:52 -0400 Subject: [PATCH 0125/1078] Fix URL to renamed file in Blosc repo (#1028) --- docs/spec/v2.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/spec/v2.rst b/docs/spec/v2.rst index ec73163c0d..4746f8f61e 100644 --- a/docs/spec/v2.rst +++ b/docs/spec/v2.rst @@ -206,7 +206,7 @@ through the primary compression library to obtain a new sequence of bytes comprising the compressed chunk data. No header is added to the compressed bytes or any other modification made. The internal structure of the compressed bytes will depend on which primary compressor was used. For example, the `Blosc -compressor `_ +compressor `_ produces a sequence of bytes that begins with a 16-byte header followed by compressed data. From bd7ab1607d8c6d5f9a0b732ba155e7104bae9e89 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 20 May 2022 08:03:17 +0200 Subject: [PATCH 0126/1078] Bump fsspec from 2022.3.0 to 2022.5.0 (#1030) * Bump fsspec from 2022.3.0 to 2022.5.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.3.0 to 2022.5.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.3.0...2022.5.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump s3fs as well Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index c80f456288..8fd08612f1 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,6 +19,6 @@ pytest-cov==3.0.0 pytest-doctestplus==0.12.0 pytest-timeout==2.1.0 h5py==3.6.0 -fsspec==2022.3.0 -s3fs==2022.3.0 +fsspec==2022.5.0 +s3fs==2022.5.0 moto[server]>=1.3.14 From d5a081c6cc4645e2206b763cc1116ff2e6178603 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Mon, 23 May 2022 12:37:57 +0200 Subject: [PATCH 0127/1078] Mark V3 API experimental (#1032) * Mark V3 API experimental Following discussion at this week's community call, a few additional precautions are being implemented here: - [x] rename the var ZARR_V3_API_AVAILABLE to ZARR_V3_EXPERIMENTAL_API - [x] add an assertion to prevent use of zarr_version=3 w/o the var * Activate ZVEA for minimal.yml * First round of skipping v3 tests w/o env * Activate ZVEA for windows.yml * Skip all v3 tests if api is not enabled * skip codecov for the assertion --- .github/workflows/minimal.yml | 4 +++ .github/workflows/python-package.yml | 2 +- .github/workflows/windows-testing.yml | 1 + docs/release.rst | 2 +- zarr/_storage/store.py | 10 ++++++- zarr/convenience.py | 5 +++- zarr/core.py | 5 +++- zarr/hierarchy.py | 19 ++++++++++++- zarr/tests/test_convenience.py | 22 ++++++++++----- zarr/tests/test_core.py | 22 ++++++++++++++- zarr/tests/test_creation.py | 40 +++++++++++++++------------ zarr/tests/test_hierarchy.py | 32 ++++++++++++++++----- zarr/tests/test_storage_v3.py | 6 ++++ 13 files changed, 131 insertions(+), 39 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index eb6ebd5d25..a87e374de7 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -22,12 +22,16 @@ jobs: activate-environment: minimal - name: Tests shell: "bash -l {0}" + env: + ZARR_V3_EXPERIMENTAL_API: 1 run: | conda activate minimal python -m pip install . pytest -svx --timeout=300 - name: Fixture generation shell: "bash -l {0}" + env: + ZARR_V3_EXPERIMENTAL_API: 1 run: | conda activate minimal rm -rf fixture/ diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 2cc1de1769..37000350cb 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -71,7 +71,7 @@ jobs: ZARR_TEST_ABS: 1 ZARR_TEST_MONGO: 1 ZARR_TEST_REDIS: 1 - ZARR_V3_API_AVAILABLE: 1 + ZARR_V3_EXPERIMENTAL_API: 1 run: | conda activate zarr-env mkdir ~/blob_emulator diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 0bb5c1da70..8c249a4db4 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -51,6 +51,7 @@ jobs: pytest -sv --timeout=300 env: ZARR_TEST_ABS: 1 + ZARR_V3_EXPERIMENTAL_API: 1 - name: Conda info shell: bash -l {0} run: conda info diff --git a/docs/release.rst b/docs/release.rst index 0dbaacb8f7..09a0a4499c 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -23,7 +23,7 @@ Enhancements package has the necessary classes and functions for evaluating Zarr V3. Since the format is not yet finalized, the classes and functions are not automatically imported into the regular `zarr` name space. Setting the - `ZARR_V3_API_AVAILABLE` environment variable will activate them. + `ZARR_V3_EXPERIMENTAL_API` environment variable will activate them. By :user:`Greggory Lee `; :issue:`898`, :issue:`1006`, and :issue:`1007`. * **Create FSStore from an existing fsspec filesystem**. If you have created diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 152c9abd6b..36a5c0bff5 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -18,7 +18,15 @@ DEFAULT_ZARR_VERSION = 2 -v3_api_available = os.environ.get('ZARR_V3_API_AVAILABLE', '0').lower() not in ['0', 'false'] +v3_api_available = os.environ.get('ZARR_V3_EXPERIMENTAL_API', '0').lower() not in ['0', 'false'] + + +def assert_zarr_v3_api_available(): + if not v3_api_available: + raise NotImplementedError( + "# V3 reading and writing is experimental! To enable support, set:\n" + "ZARR_V3_EXPERIMENTAL_API=1" + ) # pragma: no cover class BaseStore(MutableMapping): diff --git a/zarr/convenience.py b/zarr/convenience.py index 60e47dc339..93dc860477 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -5,7 +5,7 @@ import re from collections.abc import Mapping, MutableMapping -from zarr._storage.store import data_root, meta_root +from zarr._storage.store import data_root, meta_root, assert_zarr_v3_api_available from zarr.core import Array from zarr.creation import array as _create_array from zarr.creation import open_array @@ -1209,6 +1209,8 @@ def is_zarr_key(key): else: + assert_zarr_v3_api_available() + sfx = _get_metadata_suffix(store) # type: ignore def is_zarr_key(key): @@ -1288,6 +1290,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** if store._store_version == 2: ConsolidatedStoreClass = ConsolidatedMetadataStore else: + assert_zarr_v3_api_available() ConsolidatedStoreClass = ConsolidatedMetadataStoreV3 # default is to store within 'consolidated' group on v3 if not metadata_key.startswith('meta/root/'): diff --git a/zarr/core.py b/zarr/core.py index a60269d2e9..6ce2fa8800 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -11,7 +11,7 @@ import numpy as np from numcodecs.compat import ensure_bytes, ensure_ndarray -from zarr._storage.store import _prefix_to_attrs_key +from zarr._storage.store import _prefix_to_attrs_key, assert_zarr_v3_api_available from zarr.attrs import Attributes from zarr.codecs import AsType, get_codec from zarr.errors import ArrayNotFoundError, ReadOnlyError, ArrayIndexError @@ -171,6 +171,9 @@ def __init__( if zarr_version is None: zarr_version = store._store_version + if zarr_version != 2: + assert_zarr_v3_api_available() + if chunk_store is not None: chunk_store = normalize_store_arg(chunk_store, zarr_version=zarr_version) diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index d92af08ffb..b9052408b4 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -3,7 +3,8 @@ import numpy as np -from zarr._storage.store import _get_metadata_suffix, data_root, meta_root, DEFAULT_ZARR_VERSION +from zarr._storage.store import (_get_metadata_suffix, data_root, meta_root, + DEFAULT_ZARR_VERSION, assert_zarr_v3_api_available) from zarr.attrs import Attributes from zarr.core import Array from zarr.creation import (array, create, empty, empty_like, full, full_like, @@ -117,6 +118,10 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None, store: BaseStore = _normalize_store_arg(store, zarr_version=zarr_version) if zarr_version is None: zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + + if zarr_version != 2: + assert_zarr_v3_api_available() + if chunk_store is not None: chunk_store: BaseStore = _normalize_store_arg(chunk_store, zarr_version=zarr_version) self._store = store @@ -1178,6 +1183,10 @@ def _normalize_store_arg(store, *, storage_options=None, mode="r", zarr_version=None): if zarr_version is None: zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + + if zarr_version != 2: + assert_zarr_v3_api_available() + if store is None: return MemoryStore() if zarr_version == 2 else MemoryStoreV3() return normalize_store_arg(store, @@ -1234,6 +1243,10 @@ def group(store=None, overwrite=False, chunk_store=None, store = _normalize_store_arg(store, zarr_version=zarr_version) if zarr_version is None: zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + + if zarr_version != 2: + assert_zarr_v3_api_available() + if zarr_version == 3 and path is None: raise ValueError(f"path must be provided for a v{zarr_version} group") path = normalize_storage_path(path) @@ -1305,6 +1318,10 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N zarr_version=zarr_version) if zarr_version is None: zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + + if zarr_version != 2: + assert_zarr_v3_api_available() + if chunk_store is not None: chunk_store = _normalize_store_arg(chunk_store, storage_options=storage_options, diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index ce8f03d0da..d0d293a694 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -34,6 +34,7 @@ meta_root, getsize, ) +from zarr._storage.store import v3_api_available from zarr._storage.v3 import ( ConsolidatedMetadataStoreV3, DirectoryStoreV3, @@ -44,6 +45,8 @@ ) from zarr.tests.util import have_fsspec +_VERSIONS = v3_api_available and (2, 3) or (2,) + def _init_creation_kwargs(zarr_version): kwargs = {'zarr_version': zarr_version} @@ -52,7 +55,7 @@ def _init_creation_kwargs(zarr_version): return kwargs -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_open_array(path_type, zarr_version): store = tempfile.mkdtemp() @@ -86,7 +89,7 @@ def test_open_array(path_type, zarr_version): open('doesnotexist', mode='r') -@pytest.mark.parametrize("zarr_version", [2, 3]) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_open_group(path_type, zarr_version): store = tempfile.mkdtemp() @@ -116,7 +119,7 @@ def test_open_group(path_type, zarr_version): assert g.read_only -@pytest.mark.parametrize("zarr_version", [2, 3]) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_save_errors(zarr_version): with pytest.raises(ValueError): # no arrays provided @@ -129,6 +132,7 @@ def test_save_errors(zarr_version): save('data/group.zarr', zarr_version=zarr_version) +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") def test_zarr_v3_save_multiple_unnamed(): x = np.ones(8) y = np.zeros(8) @@ -142,6 +146,7 @@ def test_zarr_v3_save_multiple_unnamed(): assert meta_root + 'dataset/arr_1.array.json' in store +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") def test_zarr_v3_save_errors(): x = np.ones(8) with pytest.raises(ValueError): @@ -155,7 +160,7 @@ def test_zarr_v3_save_errors(): save('data/group.zr3', x, zarr_version=3) -@pytest.mark.parametrize("zarr_version", [2, 3]) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_lazy_loader(zarr_version): foo = np.arange(100) bar = np.arange(100, 0, -1) @@ -173,7 +178,7 @@ def test_lazy_loader(zarr_version): assert 'LazyLoader: ' in repr(loader) -@pytest.mark.parametrize("zarr_version", [2, 3]) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_load_array(zarr_version): foo = np.arange(100) bar = np.arange(100, 0, -1) @@ -192,7 +197,7 @@ def test_load_array(zarr_version): assert_array_equal(bar, array) -@pytest.mark.parametrize("zarr_version", [2, 3]) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_tree(zarr_version): kwargs = _init_creation_kwargs(zarr_version) g1 = zarr.group(**kwargs) @@ -205,7 +210,7 @@ def test_tree(zarr_version): assert str(zarr.tree(g1)) == str(g1.tree()) -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) @pytest.mark.parametrize('stores_from_path', [False, True]) @pytest.mark.parametrize( 'with_chunk_store,listable', @@ -531,6 +536,7 @@ def test_if_exists(self): copy_store(source, dest, if_exists='foobar') +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestCopyStoreV3(TestCopyStore): _version = 3 @@ -666,6 +672,7 @@ def test_copy_all(): assert destination_group.subgroup.attrs["info"] == "sub attrs" +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") def test_copy_all_v3(): """ https://github.com/zarr-developers/zarr-python/issues/269 @@ -931,6 +938,7 @@ def test_logging(self, source, dest, tmpdir): copy(source['foo'], dest, dry_run=True, log=True) +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestCopyV3(TestCopy): @pytest.fixture(params=['zarr', 'hdf5']) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 2212b035c2..f5f043e6e3 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -18,6 +18,7 @@ from pkg_resources import parse_version from zarr._storage.store import ( + v3_api_available, _prefix_to_array_key, _prefix_to_attrs_key, _prefix_to_group_key @@ -2704,7 +2705,7 @@ def test_read_from_all_blocks(self): # Start with TestArrayWithPathV3 not TestArrayV3 since path must be supplied - +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayV3(unittest.TestCase): version = 3 @@ -2734,6 +2735,7 @@ def test_prefix_exceptions(self): _prefix_to_attrs_key(store, '') +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithPathV3(TestArrayWithPath): version = 3 @@ -2868,6 +2870,7 @@ def test_nchunks_initialized(self): z.store.close() +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithChunkStoreV3(TestArrayWithChunkStore, TestArrayWithPathV3): @staticmethod @@ -2910,6 +2913,7 @@ def test_nbytes_stored(self): assert -1 == z.nbytes_stored +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithDirectoryStoreV3(TestArrayWithDirectoryStore, TestArrayWithPathV3): @staticmethod @@ -2937,6 +2941,7 @@ def test_nbytes_stored(self): @skip_test_env_var("ZARR_TEST_ABS") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithABSStoreV3(TestArrayWithABSStore, TestArrayWithPathV3): @staticmethod @@ -2961,6 +2966,7 @@ def create_array(self, array_path='arr1', read_only=False, **kwargs): # class TestArrayWithN5StoreV3(TestArrayWithDirectoryStoreV3): +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithDBMStoreV3(TestArrayWithDBMStore, TestArrayWithPathV3): @staticmethod @@ -2980,6 +2986,7 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithDBMStoreV3BerkeleyDB(TestArrayWithDBMStoreBerkeleyDB, TestArrayWithPathV3): @staticmethod @@ -3000,6 +3007,7 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithLMDBStoreV3(TestArrayWithLMDBStore, TestArrayWithPathV3): @staticmethod @@ -3023,6 +3031,7 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithLMDBStoreV3NoBuffers(TestArrayWithLMDBStoreNoBuffers, TestArrayWithPathV3): @staticmethod @@ -3043,6 +3052,7 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithSQLiteStoreV3(TestArrayWithPathV3, TestArrayWithSQLiteStore): @staticmethod @@ -3076,6 +3086,7 @@ def test_nbytes_stored(self): # custom store, does not support getsize() # Note: this custom mapping doesn't actually have all methods in the # v3 spec (e.g. erase), but they aren't needed here. + class CustomMappingV3(StoreV3): def __init__(self): @@ -3112,6 +3123,7 @@ def __contains__(self, item): return item in self.inner +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithCustomMappingV3(TestArrayWithPathV3, TestArrayWithCustomMapping): @staticmethod @@ -3140,6 +3152,7 @@ def test_len(self): assert len(z._store) == 2 +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayNoCacheV3(TestArrayWithPathV3, TestArrayNoCache): @staticmethod @@ -3158,6 +3171,7 @@ def test_object_arrays_danger(self): pass +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithStoreCacheV3(TestArrayWithPathV3, TestArrayWithStoreCache): @staticmethod @@ -3177,6 +3191,7 @@ def test_store_has_bytes_values(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithFSStoreV3(TestArrayWithPathV3, TestArrayWithFSStore): @staticmethod def create_array(array_path='arr1', read_only=False, **kwargs): @@ -3208,6 +3223,7 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithFSStoreV3FromFilesystem(TestArrayWithPathV3, TestArrayWithFSStore): @staticmethod def create_array(array_path='arr1', read_only=False, **kwargs): @@ -3236,6 +3252,7 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithFSStoreV3PartialRead(TestArrayWithPathV3, TestArrayWithFSStorePartialRead): @staticmethod @@ -3269,6 +3286,7 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithFSStoreV3Nested(TestArrayWithPathV3, TestArrayWithFSStoreNested): @staticmethod @@ -3296,6 +3314,7 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithFSStoreV3NestedPartialRead(TestArrayWithPathV3, TestArrayWithFSStoreNestedPartialRead): @staticmethod @@ -3329,6 +3348,7 @@ def expected(self): ] +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") def test_array_mismatched_store_versions(): store_v3 = KVStoreV3(dict()) store_v2 = KVStore(dict()) diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index 863b987d9b..48d6aee4f5 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -17,9 +17,13 @@ from zarr.hierarchy import open_group from zarr.n5 import N5Store from zarr.storage import DirectoryStore, KVStore +from zarr._storage.store import v3_api_available from zarr._storage.v3 import DirectoryStoreV3, KVStoreV3 from zarr.sync import ThreadSynchronizer +_VERSIONS = v3_api_available and (None, 2, 3) or (None, 2) +_VERSIONS2 = v3_api_available and (2, 3) or (2,) + # something bcolz-like class MockBcolzArray: @@ -56,7 +60,7 @@ def _init_creation_kwargs(zarr_version): return kwargs -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_array(zarr_version): expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version @@ -116,7 +120,7 @@ def test_array(zarr_version): assert np.dtype('i8') == z.dtype -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_empty(zarr_version): kwargs = _init_creation_kwargs(zarr_version) z = empty(100, chunks=10, **kwargs) @@ -124,7 +128,7 @@ def test_empty(zarr_version): assert (10,) == z.chunks -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_zeros(zarr_version): kwargs = _init_creation_kwargs(zarr_version) z = zeros(100, chunks=10, **kwargs) @@ -133,7 +137,7 @@ def test_zeros(zarr_version): assert_array_equal(np.zeros(100), z[:]) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_ones(zarr_version): kwargs = _init_creation_kwargs(zarr_version) z = ones(100, chunks=10, **kwargs) @@ -142,7 +146,7 @@ def test_ones(zarr_version): assert_array_equal(np.ones(100), z[:]) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_full(zarr_version): kwargs = _init_creation_kwargs(zarr_version) z = full(100, chunks=10, fill_value=42, dtype='i4', **kwargs) @@ -155,7 +159,7 @@ def test_full(zarr_version): assert np.all(np.isnan(z[:])) -@pytest.mark.parametrize('zarr_version', [None, 2]) +@pytest.mark.parametrize('zarr_version', [None, 2]) # TODO def test_full_additional_dtypes(zarr_version): """Test additional types that aren't part of the base v3 spec.""" kwargs = _init_creation_kwargs(zarr_version) @@ -190,7 +194,7 @@ def test_full_additional_dtypes(zarr_version): @pytest.mark.parametrize('dimension_separator', ['.', '/', None]) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_open_array(zarr_version, dimension_separator): store = 'data/array.zarr' @@ -317,7 +321,7 @@ def test_open_array_none(): @pytest.mark.parametrize('dimension_separator', ['.', '/', None]) -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS2) def test_open_array_infer_separator_from_store(zarr_version, dimension_separator): if zarr_version == 3: @@ -386,7 +390,7 @@ def test_open_array_n5(zarr_version): assert_array_equal(np.full(100, fill_value=42), a[:]) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_open_array_dict_store(zarr_version): # dict will become a KVStore @@ -404,7 +408,7 @@ def test_open_array_dict_store(zarr_version): assert_array_equal(np.full(100, fill_value=42), z[:]) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_create_in_dict(zarr_version): kwargs = _init_creation_kwargs(zarr_version) expected_store_type = KVStoreV3 if zarr_version == 3 else KVStore @@ -417,7 +421,7 @@ def test_create_in_dict(zarr_version): assert isinstance(a.store, expected_store_type) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_empty_like(zarr_version): kwargs = _init_creation_kwargs(zarr_version) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version @@ -466,7 +470,7 @@ def test_empty_like(zarr_version): assert isinstance(z.chunks, tuple) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_zeros_like(zarr_version): kwargs = _init_creation_kwargs(zarr_version) @@ -493,7 +497,7 @@ def test_zeros_like(zarr_version): assert 0 == z3.fill_value -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_ones_like(zarr_version): kwargs = _init_creation_kwargs(zarr_version) @@ -521,7 +525,7 @@ def test_ones_like(zarr_version): assert z3._store._store_version == expected_zarr_version -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_full_like(zarr_version): kwargs = _init_creation_kwargs(zarr_version) @@ -551,7 +555,7 @@ def test_full_like(zarr_version): full_like(a, chunks=10, **kwargs) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_open_like(zarr_version): kwargs = _init_creation_kwargs(zarr_version) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version @@ -582,7 +586,7 @@ def test_open_like(zarr_version): assert z3._store._store_version == expected_zarr_version -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_create(zarr_version): kwargs = _init_creation_kwargs(zarr_version) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version @@ -654,7 +658,7 @@ def test_create(zarr_version): assert z.chunks == z.shape -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_compression_args(zarr_version): kwargs = _init_creation_kwargs(zarr_version) @@ -689,7 +693,7 @@ def test_compression_args(zarr_version): create(100, compressor=Zlib(9), compression_opts=1, **kwargs) -@pytest.mark.parametrize('zarr_version', [None, 2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_create_read_only(zarr_version): # https://github.com/alimanfoo/zarr/issues/151 diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index 3bcd826882..7c2eaa3f75 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -18,7 +18,7 @@ from numcodecs import Zlib from numpy.testing import assert_array_equal -from zarr._storage.store import _get_metadata_suffix +from zarr._storage.store import _get_metadata_suffix, v3_api_available from zarr.attrs import Attributes from zarr.core import Array from zarr.creation import open_array @@ -35,7 +35,11 @@ from zarr.tests.util import skip_test_env_var, have_fsspec, abs_container +_VERSIONS = v3_api_available and (2, 3) or (2,) + # noinspection PyStatementEffect + + class TestGroup(unittest.TestCase): @staticmethod @@ -1095,6 +1099,7 @@ def test_group_init_from_dict(chunk_dict): # noinspection PyStatementEffect +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3(TestGroup, unittest.TestCase): @staticmethod @@ -1153,6 +1158,7 @@ def create_store(): # noinspection PyStatementEffect +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithMemoryStore(TestGroupWithMemoryStore, TestGroupV3): @staticmethod @@ -1170,6 +1176,7 @@ def create_store(): return store, None +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithDirectoryStore(TestGroupWithDirectoryStore, TestGroupV3): @staticmethod @@ -1197,6 +1204,7 @@ def test_pickle(self): @skip_test_env_var("ZARR_TEST_ABS") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithABSStore(TestGroupV3): @staticmethod @@ -1246,6 +1254,7 @@ def test_round_trip_nd(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithFSStore(TestGroupWithFSStore, TestGroupV3): @staticmethod @@ -1303,6 +1312,7 @@ def test_inconsistent_dimension_separator(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithNestedFSStore(TestGroupV3WithFSStore): @staticmethod @@ -1352,6 +1362,7 @@ def test_move(self): pass +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithZipStore(TestGroupWithZipStore, TestGroupV3): @staticmethod @@ -1372,6 +1383,7 @@ def create_store(): return store, None +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithDBMStore(TestGroupWithDBMStore, TestGroupV3): @staticmethod @@ -1393,6 +1405,7 @@ def create_store(): return store, None +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithDBMStoreBerkeleyDB(TestGroupWithDBMStoreBerkeleyDB, TestGroupV3): @staticmethod @@ -1415,6 +1428,7 @@ def create_store(): return store, None +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithLMDBStore(TestGroupWithLMDBStore, TestGroupV3): @staticmethod @@ -1436,6 +1450,7 @@ def create_store(self): return store, None +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithSQLiteStore(TestGroupWithSQLiteStore, TestGroupV3): def create_store(self): @@ -1477,6 +1492,7 @@ def test_chunk_store(self): assert expect == actual +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithChunkStore(TestGroupWithChunkStore, TestGroupV3): @staticmethod @@ -1520,6 +1536,7 @@ def create_store(): return store, None +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithStoreCache(TestGroupWithStoreCache, TestGroupV3): @staticmethod @@ -1528,7 +1545,7 @@ def create_store(): return store, None -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_group(zarr_version): # test the group() convenience function @@ -1572,7 +1589,7 @@ def test_group(zarr_version): assert store is g.store -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_open_group(zarr_version): # test the open_group() convenience function @@ -1639,7 +1656,7 @@ def test_open_group(zarr_version): assert 'foo/bar' == g.path -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_group_completions(zarr_version): path = None if zarr_version == 2 else 'group1' g = group(path=path, zarr_version=zarr_version) @@ -1670,7 +1687,7 @@ def test_group_completions(zarr_version): assert '456' not in d # not valid identifier -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_group_key_completions(zarr_version): path = None if zarr_version == 2 else 'group1' g = group(path=path, zarr_version=zarr_version) @@ -1754,7 +1771,7 @@ def _check_tree(g, expect_bytes, expect_text): isinstance(widget, ipytree.Tree) -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_tree(zarr_version): # setup path = None if zarr_version == 2 else 'group1' @@ -1821,6 +1838,7 @@ def test_tree(zarr_version): _check_tree(g3, expect_bytes, expect_text) +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") def test_group_mismatched_store_versions(): store_v3 = KVStoreV3(dict()) store_v2 = KVStore(dict()) @@ -1854,7 +1872,7 @@ def test_group_mismatched_store_versions(): Group(store_v3, path='group2', read_only=True, chunk_store=chunk_store_v3) -@pytest.mark.parametrize('zarr_version', [2, 3]) +@pytest.mark.parametrize('zarr_version', _VERSIONS) def test_open_group_from_paths(zarr_version): """Verify zarr_version is applied to both the store and chunk_store.""" store = tempfile.mkdtemp() diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index a5011cf696..a33f274621 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -41,6 +41,12 @@ skip_if_nested_chunks) +pytestmark = pytest.mark.skipif( + not v3_api_available, + reason="v3 api is not available" +) + + @pytest.fixture(params=[ (None, "/"), (".", "."), From 2032a20440b9026942f8f0b1765dcb1ae31630d6 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Mon, 23 May 2022 12:40:44 +0200 Subject: [PATCH 0128/1078] 2.12.0a2 release notes --- docs/release.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 09a0a4499c..9fc5110f84 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -13,6 +13,26 @@ Release notes .. _release_2.12.0a1: +2.12.0a2 +-------- + +* Rename ZARR_V3_API_AVAILABLE to ZARR_V3_EXPERIMENTAL_API. + By :user:`Josh Moore ` :issue:`1032`. + +Maintenance +~~~~~~~~~~~ + +* Fix URL to renamed file in Blosc repo. + By :user:`Andrew Thomas ` :issue:`1027`. + +* Make all unignored zarr warnings errors. + By :user:`Josh Moore ` :issue:`1021`. + +.. _release_2.12.0a1: + 2.12.0a1 -------- From b529b0c1858a017e494d20c015086309e0a31e45 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Mon, 23 May 2022 13:09:18 +0200 Subject: [PATCH 0129/1078] Fix a2 release notes --- docs/release.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 9fc5110f84..7e68491aa8 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -11,7 +11,7 @@ Release notes Unreleased ---------- -.. _release_2.12.0a1: +.. _release_2.12.0a2: 2.12.0a2 -------- @@ -23,7 +23,7 @@ Maintenance ~~~~~~~~~~~ * Fix URL to renamed file in Blosc repo. - By :user:`Andrew Thomas ` :issue:`1028`. * Activate Py 3.10 builds. By :user:`Josh Moore ` :issue:`1027`. From fd979fbda3cf52dc79c3b1e49a8da0a75b03078d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 May 2022 16:39:26 +0200 Subject: [PATCH 0130/1078] Bump numpy from 1.22.3 to 1.22.4 (#1034) Bumps [numpy](https://github.com/numpy/numpy) from 1.22.3 to 1.22.4. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github.com/numpy/numpy/compare/v1.22.3...v1.22.4) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 75acc051e7..766a0bf964 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.22.3 +numpy==1.22.4 From 5cdb20229b2783c8b47f3b4a738dcd9d16a7c5b5 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Mon, 23 May 2022 14:12:52 -0700 Subject: [PATCH 0131/1078] Move `master` to `main` (#1033) --- .github/workflows/Pre-commit-hooks.yml | 4 ++-- .github/workflows/minimal.yml | 4 ++-- .github/workflows/python-package.yml | 4 ++-- .github/workflows/windows-testing.yml | 4 ++-- README.md | 8 ++++---- codecov.yml | 2 +- docs/conf.py | 10 +++++----- docs/contributing.rst | 22 +++++++++++----------- docs/index.rst | 4 ++-- docs/release.rst | 2 +- docs/tutorial.rst | 2 +- 11 files changed, 33 insertions(+), 33 deletions(-) diff --git a/.github/workflows/Pre-commit-hooks.yml b/.github/workflows/Pre-commit-hooks.yml index 05ed509c53..7955cb239a 100644 --- a/.github/workflows/Pre-commit-hooks.yml +++ b/.github/workflows/Pre-commit-hooks.yml @@ -4,10 +4,10 @@ name: pre-commit # Controls when the workflow will run on: - # Triggers the workflow pull request events but only for the master branch + # Triggers the workflow pull request events but only for the main branch pull_request: push: - branches: [master] + branches: [main] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index a87e374de7..fc9c048af7 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -4,9 +4,9 @@ name: Minimal installation on: push: - branches: [ master ] + branches: [ main ] pull_request: - branches: [ master ] + branches: [ main ] jobs: minimum_build: diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 37000350cb..c62d40721e 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -5,9 +5,9 @@ name: Linux Testing on: push: - branches: [ master ] + branches: [ main ] pull_request: - branches: [ master ] + branches: [ main ] jobs: build: diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 8c249a4db4..37eea5df7b 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -5,9 +5,9 @@ name: Python package on: push: - branches: [ master ] + branches: [ main ] pull_request: - branches: [ master ] + branches: [ main ] jobs: windows: diff --git a/README.md b/README.md index de20ebbc3b..99b2393add 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@
-
+
# Zarr @@ -31,7 +31,7 @@ License - + license @@ -40,7 +40,7 @@ Build Status - travis build status + travis build status @@ -48,7 +48,7 @@ Coverage - coverage + coverage diff --git a/codecov.yml b/codecov.yml index e90b9e9fe1..e9b99c8214 100644 --- a/codecov.yml +++ b/codecov.yml @@ -12,4 +12,4 @@ comment: behavior: default require_changes: true # if true: only post the comment if coverage changes branches: # branch names that can post comment - - "master" + - "main" diff --git a/docs/conf.py b/docs/conf.py index f3e2e1327b..2bbd3ffb6e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -61,8 +61,8 @@ # The encoding of source files. #source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = 'index' +# The main toctree document. +main_doc = 'index' # General information about the project. project = 'zarr' @@ -245,7 +245,7 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'zarr.tex', 'zarr Documentation', + (main_doc, 'zarr.tex', 'zarr Documentation', 'Zarr Developers', 'manual'), ] @@ -275,7 +275,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'zarr', 'zarr Documentation', + (main_doc, 'zarr', 'zarr Documentation', [author], 1) ] @@ -289,7 +289,7 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'zarr', 'zarr Documentation', + (main_doc, 'zarr', 'zarr Documentation', author, 'zarr', 'One line description of project.', 'Miscellaneous'), ] diff --git a/docs/contributing.rst b/docs/contributing.rst index adadb323dc..f433f1bc03 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -108,9 +108,9 @@ report the bug or propose the feature you'd like to add. It's best to synchronize your fork with the upstream repository, then create a new, separate branch for each piece of work you want to do. E.g.:: - git checkout master + git checkout main git fetch upstream - git rebase upstream/master + git rebase upstream/main git push git checkout -b shiny-new-feature git push -u origin shiny-new-feature @@ -120,18 +120,18 @@ this branch specific to one bug or feature so it is clear what the branch brings Zarr. To update this branch with latest code from Zarr, you can retrieve the changes from -the master branch and perform a rebase:: +the main branch and perform a rebase:: git fetch upstream - git rebase upstream/master + git rebase upstream/main -This will replay your commits on top of the latest Zarr git master. If this leads to +This will replay your commits on top of the latest Zarr git main. If this leads to merge conflicts, these need to be resolved before submitting a pull request. -Alternatively, you can merge the changes in from upstream/master instead of rebasing, +Alternatively, you can merge the changes in from upstream/main instead of rebasing, which can be simpler:: git fetch upstream - git merge upstream/master + git merge upstream/main Again, any conflicts need to be resolved before submitting a pull request. @@ -206,7 +206,7 @@ Documentation Docstrings for user-facing classes and functions should follow the `numpydoc -`_ +`_ standard, including sections for Parameters and Examples. All examples should run and pass as doctests under Python 3.8. To run doctests, activate your development environment, install optional requirements, @@ -242,7 +242,7 @@ one core developers before being merged. Ideally, pull requests submitted by a c should be reviewed and approved by at least one other core developers before being merged. Pull requests should not be merged until all CI checks have passed (GitHub Actions -Codecov) against code that has had the latest master merged in. +Codecov) against code that has had the latest main merged in. Compatibility and versioning policies ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -337,9 +337,9 @@ Release procedure Most of the release process is now handled by github workflow which should automatically push a release to PyPI if a tag is pushed. -Checkout and update the master branch:: +Checkout and update the main branch:: - $ git checkout master + $ git checkout main $ git pull Verify all tests pass on all supported Python versions, and docs build:: diff --git a/docs/index.rst b/docs/index.rst index d75c159fd1..5343dea0ed 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,4 +1,4 @@ -.. zarr documentation master file, created by +.. zarr documentation main file, created by sphinx-quickstart on Mon May 2 21:40:09 2016. Zarr @@ -42,7 +42,7 @@ Alternatively, install Zarr via conda:: $ conda install -c conda-forge zarr To install the latest development version of Zarr, you can use pip with the -latest GitHub master:: +latest GitHub main:: $ pip install git+https://github.com/zarr-developers/zarr-python.git diff --git a/docs/release.rst b/docs/release.rst index 7e68491aa8..50ff7f98ac 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -893,7 +893,7 @@ Enhancements properties that enable a selection of items in an array to be retrieved or updated. See the :ref:`tutorial_indexing` tutorial section for more information. There is also a `notebook - `_ + `_ with extended examples and performance benchmarks. :issue:`78`, :issue:`89`, :issue:`112`, :issue:`172`. diff --git a/docs/tutorial.rst b/docs/tutorial.rst index b40896c78c..1cd0387a75 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -427,7 +427,7 @@ Groups also have the :func:`zarr.hierarchy.Group.tree` method, e.g.:: If you're using Zarr within a Jupyter notebook (requires `ipytree `_), calling ``tree()`` will generate an interactive tree representation, see the `repr_tree.ipynb notebook -`_ +`_ for more examples. .. _tutorial_attrs: From 761bbedb8b500bb7668d8fde3dbcec5e60c72f53 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 May 2022 07:45:58 +0200 Subject: [PATCH 0132/1078] Bump h5py from 3.6.0 to 3.7.0 (#1036) Bumps [h5py](https://github.com/h5py/h5py) from 3.6.0 to 3.7.0. - [Release notes](https://github.com/h5py/h5py/releases) - [Changelog](https://github.com/h5py/h5py/blob/master/docs/release_guide.rst) - [Commits](https://github.com/h5py/h5py/compare/3.6.0...3.7.0) --- updated-dependencies: - dependency-name: h5py dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 8fd08612f1..7b560fe91d 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,7 +18,7 @@ flake8==4.0.1 pytest-cov==3.0.0 pytest-doctestplus==0.12.0 pytest-timeout==2.1.0 -h5py==3.6.0 +h5py==3.7.0 fsspec==2022.5.0 s3fs==2022.5.0 moto[server]>=1.3.14 From 9bc4f652e09df99f8a6586a2ed21315fdabee10c Mon Sep 17 00:00:00 2001 From: Shivank Chaudhary <81817735+Alt-Shivam@users.noreply.github.com> Date: Wed, 22 Jun 2022 00:40:53 +0530 Subject: [PATCH 0133/1078] Replace Build Status badge with Linux testing. (#1047) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 99b2393add..207d7dedd5 100644 --- a/README.md +++ b/README.md @@ -39,8 +39,8 @@ Build Status - - travis build status + + license From 28fddfa0e25ce09acda1717b85b0c0a24031bc07 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Jun 2022 10:51:34 +0200 Subject: [PATCH 0134/1078] Bump redis from 4.3.1 to 4.3.3 (#1043) Bumps [redis](https://github.com/redis/redis-py) from 4.3.1 to 4.3.3. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.3.1...v4.3.3) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 7b560fe91d..7deaf818b7 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.12.0 # pyup: ignore -redis==4.3.1 +redis==4.3.3 types-redis types-setuptools pymongo==4.1.1 From a78129cccd9ffbad3a0ab4ea6be2bd26e0401760 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Jun 2022 12:57:38 +0200 Subject: [PATCH 0135/1078] Bump setuptools-scm from 6.4.2 to 7.0.1 (#1049) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 6.4.2 to 7.0.1. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v6.4.2...v7.0.1) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 66cc7c92c3..a56ee01c04 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.17.3 numcodecs==0.9.1 msgpack-python==0.5.6 -setuptools-scm==6.4.2 +setuptools-scm==7.0.1 # test requirements pytest==7.1.2 From 480136c8a580123d3f7b44cf52692221401b8fea Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Thu, 23 Jun 2022 10:44:32 +0200 Subject: [PATCH 0136/1078] 2.12.0 changelog (#1038) * Merge pre-release sections An odd bug (or user error?!) with readthedocs leads to all pre-release anchors in the changelog to be reduced to just the final component (e.g. "a2"). Rather than struggle with this, this proposes to use the official anchor for the upcoming release, but to clearly mark it as a pre-release. * Fix minor warning from the sphinx parsing * Update to the new logo * Drop purple from the sidebar * Fix typo in release.rst comment * Drop pre-release warning --- docs/_static/custom.css | 2 +- docs/_static/logo1.png | Bin 10139 -> 50060 bytes docs/release.rst | 43 ++++++++++++++++------------------------ 3 files changed, 18 insertions(+), 27 deletions(-) diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 9ad0555b95..0c391c1c7e 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,7 +1,7 @@ /* Sidebar background color */ .wy-nav-side, div.wy-side-nav-search { - background-color: rgb(38, 34, 98); + background-color: rgb(38, 34, 98, 0); /* full alpha */ } /* Sidebar link click color */ diff --git a/docs/_static/logo1.png b/docs/_static/logo1.png index 4d18ca6739260970cebfd4190dc6a6b901001f0a..e6623c3ec9619da9fe8b671e9408cf4026d52549 100644 GIT binary patch literal 50060 zcmd>lg;!MH7w<4L#E>&|x0Ix$bhmU0(xG$-L(kA4DIgt!goJ=dBMdMC0+NyvBOxN) z4Ug~dz5n8^yUx1zo_p6i=X};aJ3f1#cs(68B79nW002Owp{{HI0AR)b*YI#2dMp?V z!yay6XN6}906;?u!Hq5U!#lIRy1_F5AczA1KturmR}Wo?zW~5%K>*;N4FDjS4FFJi z<+tlgKYYNk(^69g-2ZnKf2~M==z(~toBIL)#OVJTR_0@3x`$3&KaFQ9xPP$8i73e_ z2KOIEMF2FE6^!5uI|Ts<n~ zA*Af;smuxy;QyZZxtb5fCL7bLvxk+!q~H9Rm-XM)S7JNTdNo$mV z_14yjTjLk70xK5i3W~)`64`OFp{%DI$D)oFqSPJ>_QJ)OQjVXhb>3|=m6G(DxN@{T?qIrDp$29gn+jwdlNy%1 zC)ik2>o<-&M~Wk+Exd=leB?_LuhJs}CvN}sb0TOWarD=ySo&B#sYpkA|vV>Z@UZpaJ|7b|E+!d`Hvr8k&k#sIUhban`>G zax}O+>AFcUmPvw;eu2bWaQBQ+(u%|-EMUKu%EoRIooOpxq8yzq1kz)Z^Es2r0S)cC zCv*>GsIups-*%Qs8)s#%hBB&{itlqgF=M}=USmLeBJBmk1qFdSN(b|P&AA#`#150X z!t;BdI!sQ#aHx;-uye+O<+O~f!~i*7%av9}?z;;uzkm{H(3gAjy#vpKeYpDI!EZ0Ad z1`Nm^dG29n>M8^sBDCidRx_zI_9DRv@lK5!*p1|bEnm_D*aaNb8v7dpv1`;2HNNnt z6rGjdcm4E!dqW9TA;c<32^7c{0$dmz3?yHEY(gX8grr8l#u@yI*IdWIEazZl5{Is@Ml&1HIN4jQzaS5UU% zmIA#c^njh%gne_g65SRG=RTBMH19CUyRZQc4kB8Fi$bzWTnoZl%a4=#V+xY<+1<+9 zV`HjJ0|>cQ-p_|!b;%t6l-=x6WjkDcX@#LT&s{N&n;!QBt1#2ADkljL_G{?ZA3I`Q zKpj>9U=6tf3TJN(To7&Tn#X_T%e`Jo1j(SI1Ka~;!OhtIATiJtNR4I@Hzcz7TJ+h| ztcNuL#@~3cwlPWI!k_L9yJ2XGPK#PIj@!mgS~-4uXx9Hr%cm!;F#?xD#kPI8N7VA* zjbuoB8p5X z5sXdR?O8ku9bfxLNfiL1#M}tOgBjg)F@9P8Q!w2~SN}8pJ~#XWk%LTyjC^*|8vOs1I{v`=8-Ac7kEKbE zlmiZ{ub?1x!-S8R5H6f?O$W0Qw~OdNq{Vr@j3j{I5!X=aaLIh4(Vi{?HMm9Xx8sC; zlJza91->N7fAg168i$ajwGM~o3J>l-T9Tapj(mDM?DRED?KGWJu9hHP9h$R{RJagL z?Z~Y`W4NL62=QCnubX3{Rp+2c<-wHJ2x@clt9~Shr{F%(SLP{L0L&R$jR7ord}v|< zsqGQJ2xZ?oKUN&S1lYVz6sMhRY{qXh>?4((UaQjkVj}7V_(aBS)Uqysh7-&&HV7O5 z`}0uyYueQ*(6Rc~%U3pnK8NvDZ?qtPM8rs0nf^v!eC^!DmL`3j*rh!-oIX%VD+usS z-kas1orT;9t0yW&Z1n#`f%mQ^%@OJDM`gSEibPYQ6$` zb)bVen#mUvCaBCBdz=@no}>yB5?vVwPN*bFWIn&ihvjVA-?XY_EO-TD1f{P5r9#Wh zt?{1}jcola4eo=`Ml0$3w1{3~8s(@4eips^fs@WoZqyVv4wg9>l+?ABq^*dn>n#%usy+9ig*N1$*gm9tIC-%t7~0lS z=*dt{Rm|Kjfw+eB^ewd&Ds*b3Uh3UWziFvxE%H!8kQ8jLEYA^35Kb^sLpp&cDkCh; z2A$Fp{Zu3$lxW+nOm!b>+|!2_&1tUs|tRJN&(X)(b$Kg5N|FR%qO zitjtm@ag-F5_2f8#IlO%ZVh>cm@15vcY9r`G;yDFC`mO_i&T2w`|edV+!0O~d|@zOoXDE&AM6j62%k^$(3fG>@|dHW-V zIeho?Z(T)q6;m6W_nJ4!3lSkluC9F}!LNnvS6aIrQB&qlA$c>;;kbc0AtP!v=Pj-# zXWMC*a4@9BAj0H*6`a}l5!EyePW-gv3<_>9n^>Ra3f5=LE#kJ=!dD{?AC&@5{wqUN~=n`+8-UBv*V zr|pF)&LL%C9^T37*Hng-%@=5Q)K%}NN2zzU9xw%9N%Y-$+mYszd0vKNNkHzagG#iD z{L^uPAJ4x_mQ1}w>BXscC(SQihREwlS4SRiAC}+ zXG9B9>IYoEqyzIrkq8|eH;brz7T{Lygq5vwaK8cxf$T&bl3>nBIgz1STn^hwbJ0*v zYRyXPR&>uMs`0~%wn_Lf?~>fi?bY^2t}Z;PUTSmYm{hwChI;<0>IpMEdUdX-;P})b z$-1)Zvt)aGvTLeFo$KV0s*i;bX~NF9)@2(qWh}wa_s1(=j*~TRabskOv?_bazA!%6 ziE&41kRGgI_a+Ip%At&hJ39Bq<0&PW$w1x_Qd^XLQE) zl_)Npc#Rlc0iKx-MutLYr(oz(s3>Yhs2od;?F`s&L-P@{$B)^c6&W};Gin-Z?ZR*oroae z9BrE7>-$;CbA6l(fCPgdUg7z{iCU+6rQ}}Q$R}>v+hAg3$UYQ352s)KgtA7b$!}gr zieA*PNUzfE8)lIu{A(UKc9u?uaK__#)7V{qd+vA?4azGT|K`dS)#nO3c>4hhycC;$ zK%cOQ4NO}hj!TE~i@jj%`9Pq_O*hwCz!+Hv>T!|Njt(l7pgI({d7}4rNGlUSmrKuK zKZ#9cIBgJ4#vb>dO90TDs3YH(PxH}`So+}&_To9vQgq?|@@TV?k(K>KI$<*`QYZVC zMgmDczkF0)_GeNvvUsfF&o04xYr91quD2GY-~2*1dq;7Wp~r}GNcj~>Jm@CYIr>^NXSm0jz0tnGR@)ZJCva4 zM7L;lH?TqK6>ROq3=43=q~V<~*4D~?^coXlOA3>dXmMh%8E9msN2)_A&LK()$z>HH zz8}*M41>&Ti~eC@K+-QE`l57{+*GtGh8KOh9pHVk*d8R3kbl5@ZV8%(-v(07%uPcs zyn;K9Hf}@{BO#V13N62Feio~Ln&;qU)CrZ6k!-4P*<%$9L=RZq8OHvilrS<-vx%zb z!QDsb;EH{tUK$<)9=Kt`tC$;OhssO)N5{Avaz<>W*Js%IkF-`1P2^Sdm(J(H3L^bx@_W%QJ?Pf zzo#>mUO&@ldL0qO-v9gl!C}RdmLVng+tJL(nKJJG9VZ8kXw(XL>H@j{c0t&x_LsY} zlhD(Nv*c0v=jNt@XzDs(0DQ`fZ2HnC50pYCLjK&_)s}_3WiNn|sw-6VCDe8FIpXOX z1c~yAr|@d$VW1u8!~%Q40AjPk@jR+wQ6_4<27aNG?pXPdEGaAEWwoZuqUn7 ze*-b{XqD_PoWiQg_U=V<#*J`(oWw$LPR@9+#>nq`296hc2_CYzfqlV(ymEOtQ}M+G zAq4eA!{=5ZSSLz5DbC=+birV-x&8>%&!8_Off=P!68&fhygb|4%hK;1iIyy%$S)n? zU3a0E(4~7Za7!_$70+w_)FWS#QnxKWISv^o$0Cd4!SVBvCE2WR%}4F_k5V6VPG(DN zXD4?5^dLBoY?lA9b;Z@qQ(QMg&q@QceSTd!M|%esB)}auhA4s4TT(oJY$3 z#g5uMpG{pXAY&#OSdH;2!XlWF=PP zsW@$86@50M{Yt6Y5T)za?Ft2CZe~3>y*V}bY3Shqvj&E)t;oDNWPP8#Cy5b76Yv%$ z{~`ss?OfMMca#4k`T`@gr8(e4lf+#M&0rDt%(I}Tnm+364`97^^^k5OEK4B35o`(N zG_jEIK4mJGlGexaw9jrt>EKH6ND}AgJ zv40mtBwJ8!(=Sap)8mJOT)x_mb3fp+GFy1d;&52B!p4tvCVa@eYNMEXs)`m^buDQp zZju3UQ=IR<28s^od*?+Ws8$QEgOWa{EL^S8-CGb{W>6N5_lFMS2ZKZeNB>9?db6&& zWA?$Y(ruU;GyZe;=G-%Hjsx&2E7}MHMAt#j`+Y;fuRf`|;U7*V>ic`Fk;zXyn{B*o z%9_SK=v({)!-qAZ9G(1S#~Wvw2GUEae5H8CzIwI!;*j11usnTwlTvcjNy-yKyU8j! znh$!(fL6foxZM31e#T;2ak>%$GC&a~4kIhlD4C^=C_A!82#Vk)jtJxN$f{CI4y&{(5gdOR6&w4yWR_HTUi0Km3GkIyMm(jlxTG!3@sH z^LPvfD{KUo;c>OU_8Q(@zoJ|n5f%)F)=>!&%bzHPU@gW^kTpoYujDrtkJmA}dj%ME z-nu)t)pD^n(QFs?X%>6GyebfLPbd^U@`L7R=J(S}3!wF2Vjklqm<>7!>8-JX7J|fDpjVN3m*f9~#_bIYQ14VDs zF-p1Vv)WxZn@{Y5lq_c=*@7FLSm|jOPWNkDona*Sp6=x@YxueGR=@D@q7|)F#VDsd zP>?wDhK`Sk%+N1|Bnlf@NNhZwF+OaC-|mrgDSfmp=~cDmPEAefv%D7?PUm?iKriSx z(-yf!t4-5lROr;{&lQ$H6OWt$@^lTdI3(~#v(s7&Tbn(ihoFqb4_qmgiyUA zb~Rxg)7?B_v*!-Fk%3s=3p|8(_iUAtE!1&OS0-W9t}x2vDTFm>YvY7K8_XzrF;KIA z)NO5_zCZ+tw(C}TJi?`nQacP)i+YqL2Mkv|u|iIXreFrTL5E^E9D6W&j#l6amSC0d zKBFP@Zw=C@EL1CF2|&I~PkA;Zml^R2Hb=m5UtYfgs$lC+ha&y|;C_)B}8G>Gq;XZB7N9yMKF3mNcFs<4+MxHLBu-cl^7p)rBwAYEA77@t6O zjh4RmsC=rYKkw+9e||BYT;2GsMEZF2VE?BVDTuV`p}L_y!$6%>M6Iw3I`-bj z=&m6{Mf5OW2`?zWTn10mK?)G4_4WJPB_RrwJ@q-(6#JQl{n`S5gTfqgrjXJ{3iIbT zG8_~@_SxG6)SP}OyXs|T8)GYbK-#~Hv#;f5tH-XDWJ-QZYtMQhSx@DK-5-qlTa|Io zz5c$6MMbTVtJ6^s$TQZB_vS_s?TeDY0P9)NdxzA=h)6zgof_!=Nl4Scw65XQh!LRu z_BQ6g=FTW$lE#uF{BzlpmdAN5SoK!t0=Leyw&yZ-Q#oOD4(+&u+t#T})&Z~J)p*db~b8i%>fzma1?dpL#c(Eb7i ziQN#K5HqR~E7|pbEFPd>y1HlB;#jzY5>u>lR`mpS_PvUCb|JUR-Y@#4P`=9AEnnFA zo*oVX?)>>4Sih@H^-}IEwbj=2J__sHC@+jh=TOfXsx=3t_Z(_HZt&StEY^5kq%F>FkE$acY$?nnO(n&ni#N0Q5-p0UC7y+a26?+BH5$ckLIf z+Xgm~IMZ!bKHb1BWy{zGIxjIh*3|~fw7edyxC)_#i6O3p9QBRq^*^Z9i|=ev*+Xe@ zci>WAw*DDZ(T~e`xGd^jzO;3&q2 zpQgV6J`L0=QR>>_2$61SDyv1rO>N1WhTuDBOIBx8HCkk6f#rf9Sl^ z4n`_Kd!pXnAjzKBM&pAs>)JS5Nx2%NAMMlc$^Ysf$aU@vmK6XcRia* z8AJe!o~2=d?tU;GbpKU@e{bDLFSj%00hb`o9Fw=~DjfzZQ3(7a_8Ib&el)PNF82#! zNcatRr|Zi#A$aa+Ci4cpqMYPq&+=lwH{oUg$~CHW=%2i(9Uqf9Q)kyV<>{@4H-GoG z^O&?eM4U307n5|lbh9~ML?7}CF7z2m8|A}kO!TyV%fxqVbwj>?|LvSOB&_M3R#7z& z1a%NdNe=jR&gNfXYis-~+=-PD;Blaz(eq1nkWpDWd5&o!EWYTx)?*VF{S3z#UcUA; z^@_Evmhx9Tzbjo+6l)s^U)_?CG}(crD%aa<;Svjr{iY4LO;;SC*i_V(Z}Z^ z$98D`8}CT@VBLL807?S7MW`5L*YkOW0k7(1so^mU#Y8?ZC<5Y{XwubFWwrp7W6XsC#{BR2T}m(4JxlT{JoW6`|X;x?{jf_R8)7 zaN-FP3CmRtf|}+~C?`THyEyz7f_bdVB>&poDlS_j3rIk|%vdpB_(Ida3v|J;X0WoZ zGVuxZ3VsbP1-4?@Q(+9q{v%Rf@r=AVnzD1>aZ1hd{`p6#ke)H={Nqm|{sb{*wErj7 zGmf6yw&J4`gF#94VElcJ&v{{^4=lbMnYLu3oMQzsd!F|?iz33H0O1Y|U_P@PP4A^g zmqz)aj}OS1mvAjLp_$)Z8^gOV@5NY_Mfcv!*24X9_tHP&O}vWdjGk$Mu`H|PV@Z;a z;li&f@=ud=rI)ho3;&I8HnaG3`{MC$u=wCns^s|xG3q1Dh^VQH*bCX>PTUB@vh~(y z(%i}~r*xG|Q^O`0BDWZDJ%2#ZB52&WjQABckAW$=!vKss`eU&IYDRUaX*f^mN5piN z1SO(Z3dazxHr_HitlDO{6k@}NHjWE}my`N^aYc6LUa*a`Il5f_#6D@Y7hGy@9$?j# z%)`2Xc`6}$;zFU_05Raqqfayw6|;JGvFjp-zA8RGX5T8Mw^(bKBgZ5S{$NSc?NHd^ z`_r)mI|b?x9OHe=34j8tEy&`wiJ4og7#}j%?#FxLU)%F7Wumelw$hFpS_k|)$EdN$ z#e%NzA^u3|5hks+^=I@VRS@}`5+(0yah(6|BA$L%d1I^WLs#xf{U;)7;0E>wYLaT&n1;o;#o)fCp>+`zL zj+=57QlxMMmSaF&J#ay3@v|hNrV=iTy?oAed^}_aQAP) ziZ2FoP&iSNDO6P+IWkLF8_t&MR?Q4cd%7p=VLIxGeLTHUhWyMcO?_kv>i|jn2R0r# zmjos3O3`bbHjz-Ib*==8EwkBaC0gJaPZ4p~{&J!88T95ghMnN-iPmvH`GqS%cR_TD z4IehFPPIss!`aI`%m|bml%SJ$J!{WwX znI*P<+30vBWsI~=pu)rUD2oe#sCZZa!bjFBzDM!+=(_%SzeO>hapFA#U#YidA!Y-A zS6hQeGAsH~l*qitDcd9V5SpmSPpg!ujq*^LH82)q7GEV~dDf1LY&x?>`SK6t!C?BY zkG@@;uqNz%J(RnmCi_Ah?``%y`)Z)uput~)ZX@)A>lRFj9n^tO+_>W%_uX<9auv9BAXwB8ey(p$xifMLc-0*KI!qT-nqqf3! zx~{u0Z@h)D7(O`HqOa8nmp2G=s=tVR~MC+$xhIf^G!%f@}@HM z14U=?(N6Se_MN=##|d4(<<|O93BUoB zqFH;_^uB|%Q$2)*U`G=33=`P4qy6TnQt~x(Q&ssGNW8XKE#JmG6-{r#%A?EwsL>1N z#4RWZ#Lc_lT!lSPXZf;Tm~f;Cx&Cpht*V{$d8_W3Ws0-? z2Tmvn>`CR3#mTc4cj+nEF{~_(^H4Ir-nWag;k(Z_a@w(Z#ppw4nADm%2;yLrFM_Bt z)XJ3|B$u3Y`cO`83O0U7}S-^!MVH z$kE0<&I1*qWLH5G@hH~Pn-5q<1xD@4N(;w@+k@+lUOg%jbZbRmHvMP`>*X_%I+ZwB zTo>g(tT51rACc#Jfd&vV-R)Vs?v9mEhAlVB5hW`=6g-CxpHP9Eh5xwl6k!3i#(Jb` zL|9{uIaUu`t5_z;_S1ZIoEM2VC_fRZxV^6DOyMT zTtDLVs$-CtmrH9lo?zzq_&Fq*3EsAH*GAV#FD^9W2 zeu*BJ+OPw$N08N5)mQfn8Y8xjtRaUpIFh^rFI*r}ZBl~$n62bPQ;eCi!$Ino#YHuM zFvMs)0AS7HkKR$RAptHa{faw(p8PZs#E6FDjR6z;5Wa~0zKDhX29XsV9JqL*iU6wx z$r>r}^;r>7b;cJGWGXuc7`#9kfsOd8{1B3A_e^AUn;akp7GH!7XOA*Fx%*tMa;lC8{LZb))bP z5dAgV1(h7P6lQP7*6?`Fk-CUg_Jb8kN3E|bDtOedB5N0mUMUVH^G2WGM0Czj$9{a0 z>WcAv;8iE*OxYa&@Hu-AzOo&?3CF@&`zAYYV^qP<7ySnmdN6 z?lgChzc_N>Rg)v5ilGsdr-BJ3Ojatuav$oQDarRrts^6Fm?m0EP<7Q@sXsbOrCT@B zm1^aw018gG#ZbQladDY~mfD?7Mj}G-5WG*9kDolKu2gM|u38(-n? zeau3vkV+JvJ(Net0mjJwJs(ZHzO?ImS||M}s#ndzM)Jp`*D5S|9l|R;Nm60zksCQte)n+iLg$`9%!L@u@enNUO^YQ;Ug#A zXyCjz=#{f*&G}%GmMdEikK+HuVAUD&0nS8_tXRXn77}sdP$ahWiu>#5VyO_UIrb0M zHvUpQGWGrVE`8tRN)Z(>L&UAnsNzlTBM51e`=~)wM8`2Q3&tQc`exIuk5bNfKOtf2 zxx2#R?Ccu$sh{GznJH@Qs2BIr%hteex=6 zOEK1RftSnkzF-U8l0|4H=ZRWcX{D{y4;cca%amD|Kg$9A9G!XxC|q7UNtaZY?4K<$ zP8fE`#=5IZC?mx{26UsP^gPm(fB0DEfa!lS@%(%>80b0rg1Tfb=ezuV`2 z;laoJmwLtNUG16XYaEYeX8vxcZO=v{ZQvc2_TkZ!9eLr7B$R)`LU`@w1n?dY`l;8yCz zu3ZdpT;1n?Oa@vRzgV~?YI%IM8fr?1cvJ<0*GdNkv?~_dP=XqlUvQpRy@noO+X^wt z3S{PdelF&zlsNFneX2^#C*ak%w*HR6M@d_NESc>qaVogxf5p?dZ$NVb=5D{0~JvzuCp1?httiY* zyT=KMj0)03y(?ZC7rRkD5Lijd2JG0X-hc58Pts+t6(-aw6h_n`u2FY1S{V$p=7H8A z!&llL8%Equ;1@wWDZI!Uk_w31d+jM9u}9+-Z&I;e3kXiqPl2QQtoUS@m}YiDAE)qs z16%dRwj&Bj)xM2|YALGbvN@RVYH*bYC!%uO`NrjTT?eqnB&HLG^ESVad&JL3nli|& zUC1@aGr;ttE~2~)%yFr4u1F6F`-j1^^P)DMKmnbF_xsvDk{d*}VE0z)XcBC`H$!V$ zeNJ4%^ak2Vb<+5Acw>2}K5AD|ohn@5m|YNwXH)I`Z}y9^|6CZdXVw>W$ZH`+0ta0B zXm46%59zj6g%pM!TUTMvd zI=9wXxYfKH8HHM=oy3Q@!Kl6<3$9ivnmKxXE$AH$@l6g&Awzdg`UK$LcZ?%QXdhgx z{O%ld;8(u3ux3HlYrUOjp=h1VkL+I$lOqMLhC+tCN(K9&z(2EgogT}o!`v<+rhMxZ z6tYjU6YMp|y)b{# zel+%Pb)fUO36X)OQk?gE@?0`T7EabWm6)~84aTvhkN&&-!PGq#wp^40el;vb6w2o2 zOqNBOs5o%3aD`pQXAiL-IqkOA&U=&H5lGJNbxtHr$y>I50{5XXRvPYyw+fbTS zknF9V-*n1&sDULyB>ed!`4(5nx&5q2@bwX!RvMO-@;t4jRZwUw11KNh7E{Ubfn>#X z)sfI}5HmDi!Q|*)r;$hMBK!c5Pk!zd9cIK}o!IBk|#bK;`*#&baz*1A~(ZY#BB zVK3@JTAzObUd7t8YeJZT_NCjm&xa`H8J7OG2oFvtMHvob$35iA?EN0OJ#SW#e%t!a zgpYKs{t3)3W(>pzBLKHydu-znST+{9KJq~a7eOQPN?|W3@Vk;(9J)lp@5M;>xxrtU zmIGOHch}?8j?PL=5 z4(aHylSJHd2}{m))u(?>Zl-PLes#aJeT3xcedB-3<|0`k<`cM;uRllK5T}P1P}fua zgcAMLtkmNKYm{BEE2*-Jr_A3XRqGkiDM2Li1s*yV9ct z@-3x#_I?0l)nI^`l3CTph-|s^6<63SYvp$;ir6e0V*FTvnPeC%*Bd!gdz^#fcq;M) zqU+z6`B2|rSaw=I!~X-s-*pO)GkOu&!fL4oJ$qFY!G|gQa)ocU-_(-}#|VkV zrJ|fx1hT|jc#a3RpG*C~gb7&H>yb^6jCCs%vFmRczd=jXgwSIz&hV6uw}0b*C_vqN z0ylY=t3l47WYf0OlsH}Y!MO ztJI{ku9nVIc|5jNtKVX~*8N80nH>+^37pr# z>lVHfU30&NV|2!w7vU$~s$XnSe~ZMmI29qgQF}<$WkQe&oK0fmo$Jy3>$QC^PVT*E~ByA2KcFCm0nodZrPiEMpgnDL3=9Np!OVdPGHKE z&f}N7Z9#Dz5Xq6<4yNv@8>=7LWZvd(9(*Sdqx{KuQ9Nqq;HAW#dyEWhY4xXAcBNK{8C3-nXFsO`e zGXZocgKJrgY>P@Ga$`);zxz>8Vr_cZ_T^3hdNwPI^#dia~FWWL^eazJHXqRJl z7{;AYD}q0NQ!tZ}ekkC5{HvF8qae{+L@z2^EAIQE z7SyKdV+T@UB3&tW|5aaj6LfH_EtZE^;JyD6zu#3zh*8HLEy3!2r*OTPg70ta#92I* zAX6+)&={Ez-#ciPn(&dqQp^e%b*}c18@2}ZP-H#&(Cq=tk4bTT?sG~|7>b;rwtpIz zD{=i%<>;Gb?k>r|SY-4ra~#E^t(t1;2RAV1<@T`Eee}@ou`DP5MM8MMlX61{jQ|w> z<{jrXUPR~1PmHTS^fGJTQ^x|gMV@#KpKlasVmR9gvg4cs6chexI*UD=;Q(^C}KrOWuIVPN8*xK%8m9Ls(L~Eafo|= zA*tD3sc(0I-A)GijOaVF|LR8=$2=$-ZC-*ytKyZr2+X?+=T3bY$~7aKH#i^b+&x*& zoeY24wb=#_6M?I%QblyU zqEc-4rG8($iZPfx-kjW>mU?%>OID-!pUYC>E)c@i9IeX>z6H~l*^q~|W$T+2%F^@* zW&qY;(WuPBh}qh2KWCd+vh1$$4@f_I%6CBj@t+~gAfA_L2~>ZF@wR$J71e+hwr&)3 z(Jc^F+l+u*4^kC^Sv$=R^ONo?m8)6UdmL?vb>NZ9@xgOEpVTAnPIo1~3Uo~4V>`(n z$JcTPEUFw?@jUyv)-Nr>7*E4zsx1PPkt_{6h`VeW88rca;P=lLz74Gr0-(<5ozQVl zyxeh`ht&_uGb!Wz-LVmMn1)k#dvaq37BZH`i0%He%U)xsmXf1`Yd;W1fSyZu7Yliy z>@#Na5hb_PeA!c&tjkKy5Gdhr^zT!bLDFC9^+ylKx-#Qux-Z^i2!f}8wxd0$oI_ws z$w}MenZR>^CGGWaM@!X%jTz&`&?IgpaEY%46ypk=v0I&(K)Z1JH8Nm2 z=CR*=&mQ-BoHAe-Bh!|J`q*bIeV-Z10rdB9lYFY^+&h%r{6}N!caH?GzYW?nywtk@I`hlUUh4Pr~ zd~pxr**xEK`f}KrRrgP7`pYFA9_Gt%8IWmDh=FJ4JXf^~|C1IYLHzpXP4>3W{TQb* z{@59;U^xhzmkdVpXHVKlWd~G{emwW9qQ1(x~ZXDQ;-E z`h}k)Q_#y}`e-BJKU8uq3&|aEGeq`&+3lXtz$~4NL$&_8*%6EVS2)SKN{g|VJ{H{T z{R_!w9#cR~G0HyEfceBLT+lpJj65de>%JfpWK@ub;tG@C=!pBtC`479W}^67U|_Gr z4p`}`%Ys4glMorZQwouxwkVzhDd;^1bxXBO4xM28B))NdSt46(e3W*vpEVm*|cl+AUv7@9`pI9PEMmOM*7y&5hW4D{SOKg8U! z^SI$3hb(sMH&wkKzeL1W2hb~3&eehcZMz%DKYF@AAp{b7qQkYTW%DwWaf_n9e`}bp zQ#86XCQ8f-a8|u-D2imXf1w;M&1Ao5*RP6i^hg+b@9+((!(pcLKTz)8h{P8cg~%yT zxXX>3N9Ct?UMx6$9?gmfGEIDQe=FoA-Y=}|2e`w_k898e`C=?~dtWU)&tVP4gdaumV1F{Vt& zG0Dl@#vfLuT>(oT!S3h<@VrjS#b+^!rM!K^U!YjB1_2h))7KG?4>VQw7;I|cv)w0y zMuu03(aWyNJnjNqcyt9i$`rrZCx|7eJ%h1e{p;;w?@qL3klUyE{Zq(8UMB$rai1YC zoNA2&_~Nn+F2?cm8M|5Z|5z2=p2yU6%shDPufsXX`LGKjH|D!zS6S55*QA-YygMmG zxGokx%!@5}kXvf^iUHx^F$+|A1RWlKc2n-x{mC^JqU#I`S`OPA25+^U!a0H+lL(wf z+s??=0h=$psZFnqWT)RbE^Xe=tT$5xXV~bd>$_3ZGQ2bmDKK(&PJDtcKq<*i&hoqW zMz`bOp1>b)iP38dfN-rdI=e7z0lQ!E>~`?@*hBbBuflfCX=C_~v?Yr$v~*Y*Y?GJz z_=zO(7vO=jsG3q~^h^3W=s&6}bdLJF$5AzNrC@sUuZzG#t5QH?8O0z`kwPc_PNW2# zelna;jWeQs`MixNF0j&^i|35DKSS+z^{kkOGpN_XiiFBp-`LJy*uYaU*B#P>T8U~9 zwKK?D9s)mpI0tN&gQt^uNPSI8Ir#!&{Npjc94n>mo^9uT^1KFEyvONreNuG9<%b>L z&ysI!ykelABWGS68oG0StL`yV9`ygnad5Q=z28Cv0Pjgl?PGK$UsDj2)4F~g&ychT zXHdSRJH+azlW0=Ap`6+hB2wmz5ZU~JVk4N^?cRd&^-iARhp7lO^N(9Ls6pQ9fnDq5 z3bx2*=S2$kb(L>)ytrZJ7K`fT22e-AMkPpMJM%KLUP&Db$tdLMFmRvtEN#3FLQu`3 z`kR{DtqS{3_maK6BmqfgFtKu1Ov$WAO*mtl9No`axIq3J#EE6%Zw@Pyha&zr&Ye9g zKZJU4uM(9Ixtku46}Z8l<%=BjsiTOxP| zVgh}zU`O8UtNtdu0onyG^*;O**}PStHf3wJM7sZkp0Mnt9!5;N@L^W7BeG^+aHz;t1MiC)>w=sfE*3cKpkgSqD)GuV_(K1A!Me zZY5~WV^D{))j^UppGn+C`t9avD2L;hlq$$S&yttg+f}*4=UhZ*x~y+`-J8FiK(_Pc z^+f(+TgH?v%fpmU)AbvIAr6fQcjd~7K{j&s8L`gr-&#sJi!D*wO}5E|b7t>Jcw5;> zdJJ7B^?Yw$u&Q-k&+W=Un+}2@iRw2rR2_SUqw$PRy8fi;&uXgbs@! zbqrK=5}+9Z#PXn>Q~~>WEpC^43U_`Q$Ze~-s0-bB*+;29t?FwHJ9-(*fC+)Foe7g? zArbd@|C#>ZJ;iSp?5N-EBwP}&VPRnDPkQ~eU|+f1D#U1F9<}IeVX)g@I0@vLzE=o{ z^oW?!e;j}QE;)yq4I%aHtU%AR=%|A2le}NjwHJwrC%H?7Vuzx{y8-hW|0GY6TLK1W z-v2|>SFlCdcF_()H$!(zBi-Fdmvnauh;;YRAV_yBoze{5CDJvNfPi$@nfE*A{DbG( z&%SrAwRY(`;Z9d*30=LvGyW~@Xz^>v1?RcFsi~teLu{q${vHy!oK?E%oN=oWYI+Vt zu?=_-Am6K)BZBHqU(D3A?E!IN0wPD&A%|=C_&Zc86P|J+4bPlfNcWkiwx1H(!KC68Y^rhS1TyOS;7?y^(uy&~+1LBsUB+=n{sIY~N3+fgFw%V20xOtayrM8bI=-8qo zjoQX9W)S6-rXtW1#3O=HW$draVM;(?!DiPx!pnnFcZ>};gN>s!1+iR5M zsBX5TWv99E5Yh0U{3{mQ6Ur+bSx+?Ap#r`-q(5rLR*u;M6m7Kddm64YrH5RV|IS~& zb4!jaLz0x%KmuPb)IPlYX>xa=3VmwB`mZdRsgWM!^ty-Y=&dK7y zq~T8TN7YiX$LrwVc;UU@xp#;iN&%Sht2gu znEynYr%36}4unk)AmMHSyt6bT1PQ<3Y+v8GM{fi)H_M*52P}s-9^JQ475X>U<#xjV%mb!(R?p$GNbQ#?Oc28cd^y4d)#uzw6>s$h$~? zxNw{E6D$O|BKC$p;5SelipReYfbTZnSsNxG*%E8yv70A^tyDLi8TK*7*Y+B~tPm5B z9*=qeH{u8uQx&FIHtCxUP=!AG(yEhEWr1`I6S~;G1Y0d@iI}{0zGm)>Y=`1I4D$Qr z#kwLqg3jjYYuHn?Q(!)b5ek(~;nbu)Vw4^}oLjHf-djH#}-~A;>w2AW(w~~gC2=aaA zt80}mk2Oj^=TAC$pi`I$*c~|Ac|_@vHGGDml{o|VGw1#%R1ac8`~+gD*rK%;7EMAA z{j-|w_2q;!BOz!FMD|6wvK*}0EK3eTmW-}1hr{%rIY zgaDBbxk{D$`gmM1g*~^}jN%t=Xfzk*x=V$!$nv>eB$1hv8lPp&OPr%9E=bo9ak5mVoWQUT;l&5iRWKAijO-_@z8VyfE zl^u9dBa^#gzbCVj$W11fsg8K1rHr$2DtrJwXiY5iT4=J$-NUUg{Mb$+jG*Tj)5xqv zlUhhI{2n%pc|m1NLyLb3iJm-0&+_F6LNz9ph;*_J*mk5*TB7e~bnDswo#FyOO+QWQs7pzxbD_%* zv>Xw4@r=$Dj$Z;A5jr7)XrKPE?)h#imXs#-Fg@gx;c|?`(ie>LU(KffSWJQ60;Q}+ zv#D9kZDl;iubKPHpMIi?+c))@W7v(>mQ}-XU0T0h51$Ye-%kM{n_dq%dtruXe3tv1 za6v7#^xzx@T1P!%ANU8%Un<6GLJc+A(}wD!?@FRGMsDG#F;ao-n+p24NJ8Zd>hU98 zTmF@5w9W&x#&NeV#4%_4!u$`Lk#2DCPm@<+zh3Jp=pa;Q1R+==5QMm)@Ts+cJ07RY6-?teS+^DP<`NW*L z&ivdtsWMSk1Z}(9M7^l~dW29zksEck+u)Yb1bh8%y11bZ0u=+^e~lC3E^&9_3qb!B zBXhrMNB`=~{gYP#cp+o9YLIkM-{x>oV9w*^cnbx7=w=~BSkgoevp8I}^@}q$N(eSn zHO96$?+_(3zrMTgyOtP zCaJd|FXW<;6V6<<6graQCnW4*h!BL!&fS04xjynE91e|uzTu?WxB=PX4mJeIDs#-e z(T6OsfmWQi3EJ9Q0DrC@sxwMW)O*7C4)U{ zUqKi6DG6}{8!<9R|2L2D2JTnD&j7rkB*-VR?wEr=>_eCAf^zW-QCA<=T?kdhhb#3X1Xr{c(Pb zjSPDdhzu6^O5~TM5KF*Wmq1SEjyH7k@q;!Bs!>ih(Y2w67$+nJinr8)PC=5Q@fVq% zDsP362J33tT%k##+|hD;BIap0%W8Fi7w9(z8;eniQ!lKAD~z(DV(+Cb+V`@PaJ8J; zLqE^{^%JfIDcvO@YqnZ__!K2pZ}_b9giHKWe%lANL1J6>0_l9`dBJZw#?CwDJBr*H zf>O#Q#WDB}pOJ5}KQr-OTP5iNkmw1DMmdh-qBYCg`o7SmTgGumE)o%&r*!z@CB84R z_~$W=CO5tlo@mDx-im1$OOlKECFEcXDa!*@m*zj01QF`cF^0f<(*(kw#9t5r#XM^7 z85uT>kaR`&KeM0u_p6;-(0+uE9uZYG(ymiJT?=?K54GG8o4^51d58UnXFKA6Xd-+R z3SmiSA#$}iKE^D@D4o|1U8B$E62uiuP4-Mpz3-YaPZKELr>&!YY+ht?UZXmTd`R|+ zu)-YN94$b5#xn4i{(C$C!JED)LFSbRx4`cfh=@Inb)UvD#M|WwGN1yHs27b?c9F$<&&zBO$X#>=8fbPr}DFi#x+mQhz^+KK^AXKf^2>x26 zZ@l*jJp(6PgA9;uMSp5jU{vTSf^qzBcxUU+A^I)<+sgPXjf6b{3+TLP?Xk{OJ-!Xg z8^zI)Gsy6RcF`uc)UDAuvEH>gv^tpMlm67+>MiS&j7ovq1c;%DCh23hp$L}Ovb2rq zrwa-J4<9Vtl+#Z2$sH%^#~@RR``~Q^lIapo0aQ_6=IA8ZZIPvU(ll3pnsk#0PAv<~ zW8evq_LsAG%Uag?Q%Fmx7J-kt?4<3|`$PUT7cyeX#hh*iZQTM-!X1E0pIijDqnjV| zRyhI=^O=eDzZu9sV%M-&2Dbiq(=)!q;Q6bMEH<*lb+p|wqr<{jb?rDy9FS0-7w+k4 zZjFfe%hR~Ay3pDJ4npuux1OgdIXqn??*dN=C??4Ce;63xTNLkhT|6gY6Sd{Ucae{)i)u)fDP`g^a~FRM1iUK9 z6`Qrvj5hJn*8Wa(4|iBFrl54=6*NX zwrAAOWX@8ccW__#K<&{c!N2$@rVIlz*e5+@v<9V9lDS4 ztfkI5uh);>;nz!6A*b0S%B$KrCh*XzYucV^!gF+Kt_@SjRpH(0`0q^fAsDNl6%++J zXx>sge^z8zbP z9i`Rd!iDLddDU_g&`UrtWpsz#jNP~%)pmKA$_axd8 z>=T&kk0dkD0Wk~p40YN+kxQ-AFNrXv8Z5Xl{{xw)^P;GQ3n1_O*t399PMUZ@MkQ}r zk-+?T=gB_(u`Vf^(|zr=bUum*9W~b9756WZ+7QoJY45~}2?Q0vcjX{NpnJV|8u5y0 zts^3zOtsYVBG4C}uHki=Ce77Mx)eXrz7eX1-hzt^4-di~TRCPZ!?BZZ`9yl&Y$wh+sSZ<7O z5#dkWiV~rx$>UATxc;<3^MA$JN%9MEyadj9i&MLF2Y#a)a5Zb)diWmHgu1)c2!`r7 zn}qQw?9uI!7k=WVP-4Qr#88 zk%gz-`pR@yJ>aEzD}b1Us2y|2*-DfG#+dMpxZZm~v8Hn+`B8gcWcT9CzmCtFoH5>h z$I{9%EOah)f6yuj?n#i#Ch5d|i)uhTSGb~Jgj283%X$0g6#Gj%>fmON%Cq ztX`D}+I;B@vhJu>?u?CX*X+EVCy=I707innLxL^8&@#1Cpv2>Jx#SxXx|U?&9~NWp zK1RC4#lUlTfQ|uyAy%OjJ6@k4_wRPpSA_CdLn?A-o?4%YUK*~yX5FDlVAb}0Fv^$b z@3sv;$Tk1?-z&tMc4 z2!B&7<&qKN>i0qmWWSLP+mwo+HcGbH;ZRoeoYcDhL}!tW^r>uE^D&lPDP>}ZAi@_M z!Yekl0s?D?m<^tGW9v}+Y0f-8A_LIM^$T4~=eandUzJho!elq-&te12QeX|Hg5&}JXpf*kWp`dTpruFHg>PIv z&TVK1%fY%n7R_%WGvt4C_}g6#8wcKej$fV`hycb{PAEsckx}XMnL?jZ~x62DQ7#Sw7)C zy5rX2SmrESy?|sLwFuxIFaHbyJG%kPjnh)Kumm5ArCsE?Z3w`B|0C<9ZdhG>gw!bC zYaPdzALD|TVAt1Vn3Jy7EQGyMZyiVS;ZK?DL(BEUb@{uN6mx?A1$oDuCdL!Z1Xm2K zZ^YOqXkKN~J;6_eKML2BF#kRO9sdK(lc} z8fFnv783bpbSPSOHuv=_h3SHl-X%#_*uv=IWXAPiSd0&mr8CGkhX> zKIlWVQlM3%$LH1sq64n<#|u}XW(c_bd7*URN+kJgE;Bd${O<^j!bEJvOk2egntO*= zF1zBocDr{~n;)%FCb`gkh%dsz&SxTV%|Nn#u_3A{a$#P?4p+$xmOJS~_&Nu3F2>z) zb?tXc7=(zm7w(#?CI#d735e0V58Yv=k2vOJ=32%_yr3E3a9$@}`*%23g4))U{WRTUbi1AFN*XDqB`j5bQaKJvRPAKD-6ndC%C3oR$WG2Bz0`udP5 z<9rb;cko_JPl7D2CYEfcsN7~r!0vYJYNcKo=G1K*m@kzRbr4xamJiKDLK}k zIdpYr>(52Y1IhSLCR>Zeh9mEAqkumvh6+JFjCmI*J+51Un=Nragk7=7qwKt)axp9( zU5SFRg(Bj`{3V}xMyvB@BBskG4>?8ZCt6Qml+LOC2kK9M?VpJ%jE%Z3i_ozj?{*#9Y}w~dvhT~zt}VvE_T*;imUMMG$VJYMqH zy#z|1`>*fHC!8r-!0l_Ru|6aQ!7tJDVqFX=p5_oO7ohx()fiy>&SR^zkp!)izlFwz z>~6x7#Vl0nMi1gfa%VJ~R5+v8@a2Hv-Dt#^Fp7zaJ^~eeTk3Rmr^9=?!>S?_rmaQ< zl?4b4^QQz8;anSJDm~!LQAKe)lXIKtLSh5cGObwydZNrNvr!l7?o4NYWY+rz3wv=A ze!NQU3qO}&_UqdU9oWk?}eeE^Ei#etNXA*hsIo+5vP$sF*Zvu#BwrQcdhAMz*`Lqy1IT{qVjO>FEEkhpBEa1t9H7A|XOMP#Zetz;4K%pkPE5Ko^ zws-XPv$pDO+pL+i2!?bQ>UwMg=p0$ki6^8Y=33}T-^vY@L!V=?#}bCASRzcEM$7?c%S>-Mjb1sPF4G5gdn56XklzgFWLmXaR0I_Ys8+ zj;Rx+zDT=!?Ond;KuN-uGr$(ICn)if>j}&6ryS5iw4Z)W35 z*zuuL*wfL>tWt&%Ebp#HG%o$R$3ijmA^}2G`04^MF2rw*^=6jO~$kv}Y`v z<9Lz`-~L1dk#U;1RgoNn)|KA5zx1mT8_`Y^uq4GHlj$9;;Qk9lym6KCsgRhiQ`^v9 z6JHhI#6P97bL@sYLG@}WZP~TU{lAG6ZyWuI#tr|RFkcnbtS4NP2PoLi;p3FF^kbqY z3+bI|ERNseqO9}td>ptj%Q)mtFy1PZBh)FGyvJy?<$!#`>W2kFSpb#rnZ(7PR(LEr+5(M;dlolp3z`2{Vc=~7x@9!_Nu|$o z>Y9F=m>%D)H=%TPbXw7!`-fewx`ukOm-Q^R%yoDnSi1*=3X-QFldCg*T?_dB;_{*& zp&8B#s3uo0$o%xoS!mRB=cxKvav|)7m*Sht^jQAg*}l1we^w(Ja4HAJ82$#PGG1kR ztPPT?Qze3e>)l9?sVZhoF`Z-W;(YX;S&GSM;j1l{vwg~mEq5Nv+D7M_s-+|XOw5k) zFhuv3VpoAV@S>qUS(0(?rqzg-IKyt{z+y-}drFTy;kO%RdC4=>DaFzsM-uOnEb#W8 zIpfx&VAuvrV3IcTv@P8WX+z5(65$0M*7#Edp88DGX;+6nnP;$owRPy-U#Ug>Sl`e$_UidiV6QyY zF93NVwi-1=6BPEMaT$w6r&@$b5&!!znQsYd#Plm5;GMgB-`quzb0_K9ef+cnzaIe{?DW!7( zx2#YS5~^%QqeWJ>&F=9iela25W%BEM&iG4AWnj1fX~lC_pKj`kAb$IWe`5!B#S%mq z9$xW+V#k4stB*YGj;$v3z+7EA?85T!W&md}CC%j*a_S7gHD@WBdku7fJhq4+UtQ?U z(#K8xUqeI*@e4I{B~I^hn}HC+)Vh|u&5f)Z^MrQ}m2t|>spg<}4;2FmX zOdSq;SpBta)po&>rI$9E2_i%d0_X3C>#2)W*mn|^U4wCU*zht?Oe;O|5PtP!gyPoQ(iB)#-8llPn7QLJ38@sW2??#^d}nec{oH_yR3sKb(NF#kAsnf zg8m8H=M>Lv+YfuJ2?A9^R~*)Xoa`^Mc~FU1dLsuL+LP(O+CZbL-JF*fw!v_Sluu_e zm=E#_w;q0|MKo^O!%uw(_slo_xg(nGiAFxV+1rG=iT#xKEmi6{PEsgP5M+kq&b)&r zD_(qsW)DB;;q#B>%#+gm`nBPE+)yz}m&N884zl$DO>J_usP=41SGMzp75%tE3g}~} z*v`p!gQ6K-0#9R#3Y0SnAIBIUDoyq|3U{9Tk{BPqw zOV(FH`iwxv`0N54ZF=$@{J|DkXX(K_@DhlTx)!08&utvn5{TI}L+CURV!9ix@w47 zvL&o|E~-;i5`25D?H-)@jEB6zu}fZs{vj`x0;7$fv%Vd(YU=V8oRoZiC(AAZW$#^#3mayv_73vrQAZ+_KTmSd0|3d~nu z5H-Q>dGSP({%C}grT#15miGvwSA-6ICXzPS} zJ_WQZ76>1^kHEtS%u|BM9ye4M`lO~X`Wi8CVPrD`ql*UO3JOj+nr~hQFZz4*GG%Pp%N95T*t=qGNvuRO8iVHU(@I!ep(tdS?u=0wuB)9g^O=|jE-sVQ0Q z#tu30%M$JHzm=u^0TnnGc$~Nuyp7>f{y2Nay-xQJjkPaFd}WBCC8ULg|57r$7`9L3 z!ihl9fFYR3RU4v}HZ(rdxIsCbktGDR`&Fr+aU!R6?h9K=pTJS%zgQLIUV}XC)$uqmuUB zdWZG;O%Y10w4Uk279t0VoYR=5a;{McqAznz%NuTbk<+MNvO>wB%)XwyO&%zZIAr|Z zbvCqSJj3vIc%>p&UB|jj6dX#NyU~yM`1mEKzAhLT{*pFN8$3qxr&VKY9QzuT z(-gLh$(wVJpGK$6cDzV+m7lLoMkdHzRDbc|P@Jidt;kWHni&Se~#QpECH{(C8iA~G5GNcJjOQzsJ5AILZ^v}?@hV2ZPX zrhKj*^ezeR)I}Qq9P1+mWsb_(=+l+)me z3OQ6Q%sa&BO0r`#Z=6gRKu^`|`6SUOo)D0h(liNPm@oJG`=^YX&WlcD3uA-^6ged zF%(^cNX7D%VYDY=(&QQvpXR z$#piq8Y$jlI}QdLtyv=OU@OEv{wmZ22j1jKW8jw+mKmOw8Pz*nbAQV& zMKGLm>orn(tT{fkXDo6nsD8B~kos@O?%;s9Fv%|GUdtQ)r z8ZCBG*J+G9Wf9BWXLAO%>umCM3XfH@{*&LLX{w<|Zebn9NON$@`kSu~zN85Ul7+l^ zt?m#W2lhwtShVy^ytaSMYkOWYR+tp1m(6h-sD;!_lsPc^i>+I=>Rd3GctY!yv64H`yGY-V!1fH zGd}yKr8K|L!WfgM@X>7LZg#3u_%G7WS>Re2k((#H%)&JnL z_8qK0-f7^i+b&mq^$V+|1)qL-S8uIWQj>pIt~UemL263{pK*F6Gx4yokHZ{@hT!l_ zn9uri4FDvq_YgJn{}BhvRVTXp^6$KK;V1m1wr#B&ZuAY|mokoJqQ)PcLW-UGf=2R6 zvo7HtlomNz@|wa*oa7^{_Trp5N&cvzRn@)MQYdH~; z`v_lz(!jdoR`j1L$y==17%C}u;9x7d?xsvWJ>>gE3L!yh~}W^DS>FG<4-1Q#aZJ~HRI`;Tcp znYzsXTX~y$Dct|+m75jz8Kdo(NW+61%|ZXRQZ)`Fvi_8uq(@oVvdOQb_v-h-Sw>%& z3sIQ3csCid^b8P1@grWLg=wmKl>YpjinjaR6__^bQNd^jWk{KAw!QlB)4vp7P{M@Ofp=vlOk0OMeDA%C zN`eogD#V865P&@+TpRH#VVUo4PzndnD{$N;J|`KBC#Cg&05&8;&=;@{BbTKQc~Zx6 z?=7aZR*jhB=n&yjS+z zpr!3YO!62z?7_@?`UW01u5*Vb?@veVfBrHq);_5sbcNmnM_f^3-50=iT!aAREMk>H zqu{C<4y!PN|A?+C5gdydkna!=@D0ZMe!=u|py18iP%CJMgeKuiGrl+HUOJxw;m+bD zJbO+^zxB5c3=PyojE&2M-6Vr@gR}N4m#3`8K*xgX0r~vlx$-vrmhZIMd1@zbEafRi zFO){S5q0_q=GIpW>5Fl)#h+`?m+jY86FwQX`f+Ky`p0_C2w}{I65D|WlWJurVd4jq z=EOmVmyCO3?G}m1iSWa4dNfA$_6*Htam=gvhS0(R6BB_;l z5q1T@rA z#KIc+p5~l%c^E6o0~81=q64cU=RPiqx4d%`w47O1gxwzbnOL09wLr&2UF@9jdY!$Y zl3fFrpv|O(Fm(dE!$bYV?#7ny8fxe%et6D?{i|m_7z+I*6^ie02Dx=1|4F~z2ZpRa9Ycsi9Xg{o~`iVSC|1K$7s&L^I zy%?}dn)S3EAp4tIf%Oy};Fe#5f)D_#h!Y8#E-NDhMhQ3(Ohrm6|K7_2Q)i^wu-JOjB_LrXCJZXT5VoJf zKBIrC6OUW15O@8_Wcf_`!X;H8G?3lv`ofV=ZE>mv>``Psn*dMhB6ux!qcUcqhakF3 z26;&7_2FX&ER|(840)+8*s;Noop>Ogx4Q1D#MG+ycW+M*`8=ID ztvoq68VFkUaD!+-mW3cWxdu2bxvovtS6&KSbLKF``QsZy92srdP}$_-5UE&2bQJp1 zWNR5<(Yv*;&RIfR2X`YLwoPN@&W%lcTX)9?W+w z8hJw{mu3`K&J%o9f?`2`%8(i!RFBzmR|e>8>u~KJJcUk-j#{EA6|KOXlxeWro(978 z=U9uDY;~0nTx8fVmV1OYt0a$E(n+{V?%x$t$`U`7vpXwSF@&*DQZS7ALi9sPir0&m zdp80j^Q^|e%!=B0J(f6?W~kioyo8myf?VCb>cAz`^%_E9Q8+=hQ|1SBh!x~4#9vA` z>@neNDN~@fm_Y#U0+BaXm-d-^!#xNtvv3@?{ZT%l%-5Jp>%wjmJG=6jFwS~Er3y^r z^3>ZAxUg*w#A+OHyGih2W*YaX6i*s`3fW!gXbYy}B=f20AF`}Fo&F3Px7HV0;%0Fx z*EDyZk-e)QsqM)o7?>`4XR09Eg>_At9Bi)EzoorT$X9hM$pIQHI#|h=MjKc(fM2g$ zgxn&G{C_%j|snAzQ{#R=-3lR%(X1lTUUG^{jp&G6*@5v$08+;>Bry} zv3hsR-03>``CQ(wz|3WORD0j|1<_43-AS0?{t6|CxIY}lKI)GUndAxG_m@j zj|Ek2n#ib$)JlS_YG9Yh&u`*QiX) zHV0dHmbDxQ~ftB`qo#3m^~b}=Oql|>i*)! zMbRlkwW(53EeRHCH78e60FSNY;~Kl z^0vMj=moSm)P#IdbUp1}Ws@Pq6w6UDfJr4OamIb?4uKCvrB*jrzsI&z4ahL>EbPQP z+k9U|JoDHQsoJ#W=(J~KEJS%gPK|n)9?Ud*ir{`IrGz8x0;I%UV}6#gP?xRMAPeEa zecJg{5jl?p+ifto)f6jBCc@SJ+L@(pOF8A)WUUor{ZN&JOC?4oM_Tl%D5b84zO{~Q zT$xz5Lqli;kzcSg$TmVxDIWI^ama)iGilV3jVx#rVi%y8YHk$n93B9@@red^v`=<^ z#K@7wZbK0DQ4WJaw#CVHqfp!=*93^{b}Gcckq|@zCHWvHwe9KPzcRkufr&Su{29+B z*!*ty=W^RZRsHPv?DZ01WYCul0yYYhgy29_twr0epM(=u@o)@)Bot%J0Lkqq_AF06 zhzq#7WzSo|!hUAcDQMBFyPAU+r+R5evnB5b{3~s1>I?q4OsGOBZHIzL7T;Ra_TL>L z>1Y`$YOu_fLOtWsNQfEAv&pIQn@n{?;|n0S_Bume|LE8ylX;Uil1}PQht$f3HO96Y_mrQJZ7`s=6LQ%RzIif_2m6A1ts# z{X?$g49bD@s4V@5k5#LbGzVIk)pBJa_Fu~T-GKt=>nGhZvLq&J?sLYuO^92aU-l~4 zyNF`P8~cvaOfDw?JW%CqIxza6X`^R~FXp?5+*pcdzjIUI8>k|De ztTozXce+ouJ~7q31@w)KmMQ=|pfJd@L-LN^pEt2otDWh!yJVzgR59$JB{n2TZ2lDj zimAh_SvKFfFWRb|DN$MN>7ec8i^xfFeO&GO_4uz4q#E*OvWd^vtV6#FNdA*l^zVx~Hk1RxD{CtCjhwiMP8GRt zkP0aeV^Z(FNIJDJenUr)Szx7B*M_P8%dA-FMEB=9Jbj+Yi^PRRLXFW8#a!yq_l})k zQy@vW%x`h?ix!Xy$!VY%n$g}L^J2G7$T1qmdv*J1g?NK*Q+&#}X}g0WRUdY0$u#D| z56ya6PU6f{87Gmb5E!Bp4f{K{euT$D#M9<=z!uD|FwI%4TW)XFJ%Uh}A==uRnPS2Z-$Kb_zBx|gxQLl< zvo+*NlZ=u>f5J}sSeHTP8i}VEyP8ZMPUdsv-W$dIHIyX}w^Vnc^?#xl)^x6@CmJ<$ zt~e*_-aY{!1~f z^@R-jo)dNT<2mG`ci%3N>nlb!gx!72tHjU~mQxQo@O|ZH!$Cb!s6$0)dAL#RQw1J*U3L_|L*+Ti6#UkWOIJ>wRft`sbM+h_gt5R-4ZMEzR>sCS<=UKWjZ(cC5PaX zf6BFaJW9$&bB`|f_HMKj@kL0tHyE36bH!`7`{o<#P+ot$>?Ay-4U7 zZKL!@%@|NxLlwu?P9KpwzhZ>G=n>xXr|l0=G^s0ay`9TQ>7V`|ACB-U|42@c4Bm)t z8D^exb8!b$Zh#no+9=IpGm6ed2jM_z- z%>s8KFU$bm-lf&h+cF%F{QUIQG6?7_kgf&!x@8qYCn=s1#9(VBRDs@LnjkWShSutM z_*wF5rf>!KxyG}``ZLL|0g=q=|&+|$vqV+u;g^X3?@m}h*0=en5NdFo&ii0S?t^@Vt3jK2v4qj1g6q!+`xVHVI0b~d z;lFBTqWGb>HDv6$7Gn_&QT%WK7?Ka7a2%AAW5qL`hHL1vD;hoQ^{>-gR*Qd3-j$qD zbiM9%SYsY_3j6xuk{*| zJl}qh6EAWYHkbG~R5rHqjauS^oofJrpaNJ;&j#s76HYt!bqP~lSXxQEZ_AJU2NS0W zU!kQzV0pdzMxhsN!<@Y^&PUt`MyZ1ka*TivH5=z@v%MCy?rg11A=leB8kfT8O{vQj zud%9K5e1f--Tbs44H)|=s$FxR5^4D|LED_p6(4)qsm3LbPGu-8Ngl@SyKrkS$hQ$-8>N{g6r4Fk4mxcJ5?9d4i z$t%|}!w&{R`cHKH|9MinzN>;2Bslk3_9uO=TEv9=3`7GTUTQ_vp~^@?~R zYZ&B}^%X2DgqsFQOz2h9$R;D=6px(3LI2R)rFGnHAO(_?5NW%&no-MYsRjo6QvVE? zU&f$0puEFD2LPm%KFCVxFrM-)@3m-=sJ8-=fD!`p{KMviAQ#ooM#Achu}B#fF@MN# ztv^NRnq#U}Sc;@%q5YKWSE_>!(fl|u`njLmZ|j(uB=4NW?8q5H7U)_+;$1>;xZwsEB|vE0+CB6aIV8Sz<=SuDI2% zbjW8k_tu!j1yyL;@T#ZIAEdLAhf-p3Qgq%QG5sxAfjTLKpm(?yzAxs9ztMlbu8KU* zy_UB@dvbFk=7FX=dVMA(^uY)ZXl{w%+{Dk};Ma{R(NM9Y;I*n?vdk>^$1_`wAPcWa zq=|pSXT5C*1=cuoqUYd#yv&-BgN4u9y@-Lt)*PVs$a(DJf%)lvZe2QjS#5cF=jGx? zyPG|+a_!&Sq$#NsS|IF{uPnMLdk`5eC=o+HQca)l#9faGOwb$t-#P%_{A@T^! z%oXfRm7+^7s--zY@N!eDUwDDR;EHtsUX7M~-`GfK6Z5ovh&;&R>`O&M;b+{KFuWl; zvyn?tK8T!UoPV@<9OB9t7)IZ|m?gA!8S44g-Y2I?93Hd84sq3>l(E;O291FK2+Q-o zeD-%$h>kw!)(SPs%t2X2`|7*kg=@I}LFIy_f@wWu_x;5)0bWd#VVA$Qx$e&X)_bkq z_QHKr2haIui`1COfgc1uD2z$lH@%@&n(ZGWqY+=D_}Wd79s#59YAt(xI*f_rn^)R}woy~Z0%)kSs?=K+#x0s79df$B39=CZU=vHZ^ zwz6*w|04jcIh@NLYZ?x$C_EFPYX7>ERI_+J6Qs-L(E#uiKY5Xb9E9I;h+2 z@otk0{Y5--uV2LM+Ouy}9wpBm2?*iF-48EaxI%KverqBThadL)xbR~16IMFMi@P1$ zE*PmdCfDd@k0kr+zH#PCcJ8XPvz3&+yq&VJ2p>1eMGU#InTeTc7Gj2_qVF!BzTH; zS3w$u6y5HZj!M2E08r`gGbcm%F7q?iE%uO`61#PXe9`N;o2FAKF1QE67c9!kbeWxw z6&ZrGr};xo>J;RH?vG5uj+bC>y4u7Wmet2B#+?$x-{jUEBWy36|2^7BglEKf5hfAc ze~4duHWpOQO>g zw{d@D$dxy4;NFFI88HnDSAjxQFMRabE9&0t?tN?M{Pl6z#KDD^-rlyQ<11fUmhGum zdXs=;w2=wNt6gEGkg#ek)2&st{>Df1-P5uLrt4T&t(Kcj7fIg?v4Ln4V zFVAyyZX!=V?Nypz*rG%+?`FGa?zH^DoF5AVZ>LkEYNr%zflA4FMlRocjli8S;A~(J zCEV+Y4b)H$VS)jh8JEClpfv3}G|dyTya(JBdq^C$TR_1iQaTe#|-_Rn5rc zBtR-jTj*y)nLHYdBOgtSFzb=K@?m*k%2Aacf!Glmmg_$>NV$gWLg@D$0pu7=j_H;X z3(wa+z;EPEb>0aK`PTaP1g0XsyoaF6Qe0!95o5(Y~@T8sq1IbDZJ}yusY*5dj2U3<;3z$epPV{kr?dqHZ0Vu7npx_l&U0D z!}y@2|Exy>)J>>vq2%%~r1?o39#%u-pz@B&EsJ5?e`eVm>)!c^QI3S5K&*I=u}%T< zLYMktx$bP@Tu$-sNeSatY`0{ECCCl~y+eQ12Y$VtRO`BL2H+)-^8n}GJ?_>kbddkT zq7fFPJ2K#vH%4n6%hO}MBkEs+-TzY?k<7quW408k@HKjimqh5p}jk*-PRnNB46 zAxaGZSc2u;)Fnh+5MBR=b74qG$j9K*xb zAZPeG--eYr;ekd6>Ztdy8z{vRVz`|Swukr$JN6H5iKNXm_JU&oD z?|>=V`jv!?@c3vBcDTXI7{Xcpw_+ddA@m=cVtQKwu%jOW)1xQATL&k0At&|rNO^rc zf*~kVUt+OmNDCj)R=d|Z7yb*^d5?XmVO03nJ%9F^KP<|AMI>q)yYE|K^rycK3}Wy> z2J~_dG?C|gqWY;81D+%ken>jTeriuR5}wzg049+dRV}D{@j-LU6^% zcl7XIxINKztmGLx1QFW`3+QIc>=CCsog}luTYhjGC1zpykvPI78c(%G!KFo}pQf|) zca#y>g*4a~_Jq$Z0rQ#LXfG1V9TFpC6bT}``lLp{cWE>FR~5`?mni!TmZszhW-FX4 z7~cu3q6#g@Qj@2*o^>qK3SE(J9Vkc+2wJO5EY9i`#~Rxhbt2p9+x^v*&>#zV(j z?r;^Up4V)dreF><%)>Gn^l#|YP5E^>5rtRbe72*4T*D75b&((A2P=7C`fr(fL%hRp z-%V_47T^(wujfBtGvd-?YFe&{Y#W)wq|oC?X0CY(-x1&$xZ6WQ2$u^Uk#D_?BUbXk zGUxp|{UMF%X-6{6=`vVl z{MF1-oPz!oIvi!CeH$Cs>`{%md{YzsB+n#Hu<%;{37G=NLwX+r;R!qOg!81ZCsvV( zpavuYw(F1`(Ev6|HF`*C(@-$#?Kk7(6->u4*Q{ZS)Fz_z0XQ#MwER0k5@WuFiZ~?& ziyQY)9%--%B&?09iws&ArsYchJ+2;te0Gedwntr@Gi~=IdoArW zjHkbHe7)d|mGrNaK*eIht$k~yrqa+SX{A#lAgIO}l1^szdOmCB2%`%nJ6iZBX=he* zBYSVjphd*MQ4P6Q(jYGBWDPX;6Dx+RaJq&WYOh2dBl*tHSEzy2U1h2+3cBjw#DXn- zd9qRFifRkFVvKT59f;Z;_QhE^)|_5x!pUw_NkhUMnBXCfbcm9pmnokbwlAD`bnYUI zRd4d@lstLKzPD>`U^-&wF48nUzL%t*`kJ2eUc=*kQ=}$`?v8*`S9GdRn#~+>w~n87 zOjqq1;|}J1*QzJ;3(=w1+nE#*rAhnxpDNBD1RLG+$a);@#ffA(OQL!X4;KEM4M zHoHj@vzk++m$S@iV$D-rySK2t8H@;)v>Tirq{UAQ1 z+HvsJn$25qPC2NG2_Lg9?N6|?zMs~Xt;vh^Ly;IJq_a0WdMt!5i@5c+xpa7US-wdQ zI>Lndww}qNM+}NhQxr+@(C~bQXlgQ3CVM+pNU*p%N<->z1Qp&^_2LL3caI|zljIWL zqOvWq+lUv-@dNXOy)JTCQtywvay;szh}&73M5bhGV+f`C%k^z{*QlGfgnx>;8_1B3CQ>zxw2zA0=VTZR~$aUBZig!gVjaa2U!d8qUF)*y0zy zSq%>5p8HR9bwkr|C*bT^I~9GJ3#Qv~$B*nAb!$%)hoy-KZ;0GtHP#-M%s!>nSM)FP z8Kqu;7qvsjIsb!a9`Nb?!JTvtdWn^i0_-YLK)AYxA^juWu)*oGLAw%pk^z1B&@XUm zVK#;bm`$v?Fo1pYy}W^11UC9wC9mY7{4D(u zLQ@;J3-OH9Do9M(`bqp2!;xT^5H|TWKkxN=Z6NdzCD`*b7@10 zIZe~v+y#qJTh3hZmUMX4sKqf8QLXIP(0n=={~w^dSVp<+H{pf$uId6%oe+N!aY(7ul;SMREGE8>jv1ilaQug{<@(we8gY zpID@=SkBzg96q;zuq?*HP-1@kfDH-+wWIJHPfDpprBpM++@cE^&>VuL7KKPlpYU?{#%foIsPQ=`R4PyGA{@7Z-npiv z4|NSQyBhZF#f17MkF~?*IfCBj?v2Grj=KDX-<7r-hLF-^QJvHzdo5PIEb=^53Tw$) zmXi3sd9m50o%ZcL%6)VA)iPZWLXjo8WpS&S4o>oCm*y#E(nCMTo4X4&%MBvn1 zuZEii4{5F4j`*W(iVA56XCgU@vJMko<>5nThKX?MT`xh3w-VLIE7qWuqIGaQp(o=b zF&oz5FN>Uf)Qwf#^gq{Te4GQ>BP%Q7w@U8uq!@-pB3&dIX+U6jKK2$}zi2Ux4mmsD ztro+#B=Q2Q12CWb2caM4Z#-$V11b5p$B;M|vyF*YZBIn%yBnQVyZr7>nAOXJ=P z?Lw(ZPEudSNO!yQ2IA9(bqJO;*93Fd@X)GrzWEO(21;OC(A*?VSYsQihDX-=qgdz> zN0MhN;euyy&6-I>Rvha0a<6>}4 zMZ-JCM6cw_p|rBthRi^3!jHt8C_Q;pTi&F~T1vt%^;fy`U!1Mf#CIdp16S@1F&uGc zJ}eWn8>5ZekM*d=-&7rZ#21tl12Q&J#&#*akmkzoYniSfiembS#B$h1&QH=A>Kq@o ztZ+DAx&>Xx21_h1ULUZixkgABbr(tGpS;<{kxsv%Um95{{ncsgc!GuCOXNau8yuow z=Hl11eCw-Q@65U07{|@(t%aExEuV>yilCLgnb3k}qtYN%B6U*M&-keoX!C@|Y2%fx z){!+MA0Ep9#4{d_UWk|B#<_&VFiXTyG$K~*4 ztLJ%Au@9&7lkWd0Sp2EzstYi}E^!^or*E2?Uuq*6ZV*n%aD%l3n&`Jg`z3+rf4{O8 z;<)TXAG4}6HFq@s1AWf)eU&vlR-gFHIG!Q8Y2bqOn3H34O3-(Rgvy61W=6QK$%+&# z-ncUO6;AlRlVl$Gg;HVZWHBgBQ;VpavB zzobkasY??DHAvG++b_c@V;-b0?h+;9L0zBsu#Mh4WAd0%c@^OQOF~=h2X5zZBOW?c z4vK@}46X=tXBxjZMPFfetV)7~dD7@fRyA9w&7brIScKzAMry3qb5Iq&E~wL>iIGuS z9`24bNOtGtp*N1Vm{)uI*AlY9F$i;GbE6QExHN=cF-fHdxG~7daGgQv8d~>VNpPQX zcX@0+?%Y7_oog!3a0mXAU>*fEr9hyj!H958dy zMxc!zS!{P+A+vtc$!3|0UP#1v{eicoWg_*3jO-s%gHx{U zHLs%%KK$LfKjM{&_u2?vL@1QHW9Tvo=IpBEF4Nb382H2UJ%bxTcSOV$Q_}4xjxioz zCPOw@vsz}0$|Lvd{wt;vM#G8F+gAy_pV`+FQk~HMZ^&g8o@3C-$PB61{)@Fwg)rm@ zRWz#a5;gW47fG?aMvs|fsVf(6D^XQ))DQCi?Drbdz|&}i)b5=-u30)FnOXGdD64AW zRzn&ONDrgl$DPOniT=T}vPR`EE!~>C1ZM>L^H)G(TgHX+w87e(M?iUR-%epsdfy;*a8A z%3%Q>gC9*8g3^&iNsSN8V}5zQ+XTaU1JM`os%msdFd=*2_vE@rg?4_`W%qw9=RV{O z(~JxDvSFgS!0UVk2;V;-IL7TiNOK;fAFhSW4h^EaMIJUA-^n$`Jv+4`-!j8Kt)Vx{ zX*wn_%S$wB;4P!Vj&wx-BXS4eygvxm7N*=_@6i&Lr)ezNZ*RpEftRZ_p{;8_Uas-3D$p% z*J*j@Cvxu6$%7ORes6|QP$zj-eIS!a-)#asFS*Q?6rK&kmXoG0JGA)WoYQaBC8;R= zU5#Y-UFk-DU9F3Z#g5kTvvVYvb-)KpVQNnE#8Q;U)K@}81Gtlt;xSLkCcocS?UzJk ztUQ9qlFBVS@~5U0pF0a)3I^jMa)p<;*~p})pd0i4>A>HC1GD9^%}2(d8Jm%x8lrEt z*wdl}6lRrr*iL_sW(>3j6J{&)oIfG$YRu9D6Xq8%_u^P|vg?}_lp_j)z*$BjBrjI7 z%7>`)`(Z3hLHYEt{q_3OAaB?S^TNWtIs_3yqNp8j1h~5IEM>F?D8zj!l>#XPQiQGlV9i-pmAJ>uW z4tHNjer~Tj+3qesW~1vFrYf~+kNnjspr2Cb*!jR|WF3m+_7NjgdzQ*`Paq)|iR#N? z^2kq3c3sGvP>g#4;gk-^39>V&N0OC`K&b#9aFG%O*?s9?sQn|qwW+x-1)Zu-G~xdt zL`V%{6qv#k#mkyPgIlS#u#E&7yJ9OiG@$G_VbxJ&P1I|( z&`{|i`hj~xw}CZ@w4S=(18ooE$az_4y#5#poYyhzf9ebr4xkOY}5(|??;Yy;Gn zU^j7veHEN(0X#DBB_ou`HXO@tvg)j7saAKNGZ&Vr`QdA6#{iujiVDOBgB~h^mDo>! zuCq@L{BcVvs~r;7fM}?ybxLGXwcbcEgSN{(b8!7~j)>Qfq|=*!22zB}Gsmdgs}Qum ze3^4@5!#EYRWmo-@DRAd#38DS>Xp9I)S`T^ygW(3=hE-foHZP!jU`ov&L0n$w?4;{oo?@+fQD^K#U_mdZYBS`m5 zmrI4!F7o5sa)!m)zO^;;yGJ6*kG!HxCANuyoYjE+!GEDL!^y4KXM8<>3&X?l(&Lomm!rYfW~1n+IursZ?i37CX8v@_$WEj^Kzigp zBUWh`aK*6FZ4Su_Q{0!PTD?LyBgI<9 z16fXlApClR*_k+8p^%0ua~_RHo(7ErOX_Qd%2a{a*g+^BSW{Pwz|Ux7cXw`e50}ft z#o1=_R@>S8R{Ka!>T5eBb3^#Eu{0J)Ud5qZ2KBWvv?(Vnk>xc(I4z_W?+7IIRkC~3 zYfKUdTA|nG^xs1UA=HtHBW8*zcs^ulbEC@(q{0G_Tf^$iZ)v^)WkIa@EmwtS!axF; zXN*|Tb0apJN3kvPoYJLBO zg(;rs&1Dhc_W}-JUd36`6I*bg=qw;%Zq_1SD4hTY@(Ho|YC6z?xUT3qi(3&{VW@hx zgL|UAr>26IyzK6|sc?8dp{h(Hb4uZ;G8a}ref|v;MF#^UPvhf|>p<@ECJyBs-C8nQ z5n2kR4lW*l&D6!gw)siX(D3HE1YP3i*5dm!#~+-e;qj8Ft@uGjAS&g=adaIFK!{0y z9?`gr{|;iXP-}=J#X(|kH~Qe7a9k#rc-U883;oPPT*TkaN@qPp23}{ zCG$e3oGA&TA4u!kS#<*B%de$MV5o0Y_1LL;H9Cg?$qW)8uvzEORI1J2^Y1s>;N@n9e4b4%WIJkoo}WzF?S2>P+|) zPPow6Gecqq|3>v4@b4e1+=GH_sO{`&jsu@xBq#|TwVawg3sHV{}@v7NQdB@0deQ)Ez9y z9Crdcvqgv@oDpm(=!A2O`LDy2DDi4m-N{=bJ;~o#7^?Fb>OWNA&8kj~5sXKF(JExX zg1RtADdsQxy)=x=_lqfF^JhO}Ma0U{x)Jvx!zxi3=RO%ki^E(7-=5L^H2Y64inG5- zm%s}<7>l70*db5?AON&3dr79kOffKKUib>??UzjLmtiX?0kT6>kg(XHTX5t&?n?0i zb8D=7G;{QqO$+BA_Z+L7Vp;_Ry{gdMf4?8}V+kZ&lQeB(;QIa|?cF0*EuIO%GwgTR zg0^VJ(ODx+T>v7gU$HgzCwu(&KXguAMT>VNg&d4|Z8(jL;#pl|3_*X&WsMxzsZxP2re`K8~sW)yk zgn&qc|2pF-mEE2=Uu1Rkr4mUliRX+Gp_GpUZ9E8;P3D;#V~!DQ2>|qk34F=DFeA6+m&CWMY-v%zN9Pf$Y!Sc zcD*1x2DB%f@QFTDw(4V)Eaod`po_->mdw^aSpDgS&Aq6xS!sTR{fGM&r(0;IK%?BnS ziNFG4VU$B$D@ZFy7ZP%|t9+HTZB*U5#H#ap>AO@O>4+S~Aoq&=taF(qx6+BcUv9{f zYyhN>@r(N8MekqiP5ty#tS$FqNA8oiD_p5A7SLcuA zT&V5!dR8NVzPyCASCetV6%zon`GGl5Fx;nrHn9gj3Fm~rLX5+mW4hj7vVcBCNvZmJ zeE$8hRXu7mqgeY@Xj_qYlbnQBkDW>XYX@`eKcWBdlzyfN!ThV_{^mjKkRh=MqB}G4 zbb|7P!h?S1__RrEB5)fh;EUO(8vpLf#Fg~<{m1OEhUnhA9EEHLf^ay5F-p-AE)ZxW zOl8mGAD90Mb)OJ8Z`uxNhnf)jE9>jnN5^V4)MIta`eIH^fp>P`--t)CZd&z7IxxQW z1IiFb4Cn+JO~Q$n_~>5)r4iJ**fh!1KBz9~HJnHM36k*M4b%DUNik{R#UGBP0v7#c zeR^^BZR}hvKt14S|7#4?-P^$`++`!M|Iqzr_fvwUWV9eq_*S;8g|*N*Iu=(VbJ3xc zK)gUWO2HBxIQL=+zf^@$3QGN8pW`Y}Q&GZ0)hU$OKD^>ly@XxNe*E6C3*1Q@vG!~Z1VcJxtYO| zhhIp5fdYogG5l{os)_7gxw8L?cm^pkM$7d%qqrR)Ep%=Gx-UhJVM2LxtqX!Ahq@sH z_RGhV54=|Z`ZSLjyL7!t3#P=MCX^<66>$G0fDpa%Y%C}4JFJPe?3V^g#zhk>uQLkHNpK z#n34f)-5Q6JICOqvg`6!#7#vsd)Eaa^vQrkW!CqU{It=m;_dvI zh`W=r0jeBl--b;hBizjrB~d*T7a7ea&lP=p+TsJ$IG@f4wUWqT zuBz(>V-cHZkEY(MGFl-F{b(gGrGVp86cw-{ROsljnlP!mq#NIf8R$8Idi~aUp;&lQ ztBK4<));lOraQ0zsh`{Usu(G8>~0y zp|*?Xe(Ys$NFsR77!#gR+%N>ti03cby|{*}&=6+>;@KM#+m<^d@h!UCz4O|xHU?JxDm zCXQ%LLT9qNS^A*%WSnNp4hhgp1&;#vj^WXSUr-_}Ek*OKsqwiNHc62vk1#zA5g^3$ z!i6q2k7Iz%UMLtJq_sm^qlzWm8ePWmRe`cy*xcY^{3g4Dt46V>nE zMmwt%O}Q^{QAN#wMFT28n4V|V5}z?6;6!sN|LV^@N;r;~elSR}5Pd}qDpJq3H!`-x{tCj7Y!7<+50Uj}%~~6&ga6T+ z?KTiX86^{+{?L6Zb6Ocgp)ZlN|9+wCIQp^Cir7Iz!whAUV^SCL-txw!!@|rF^5@v% zd1~c`Fiso$?FpKPpc_#Mc_W+*qondNoAvi}fg+(8=`FuncTNaF%d2oUB$ZiQ?nP+& z=`}Jil1yAmO&v7x9^7u2!F!zU_B7GZ&WU=b%B8$Li6fUFZ)f zC!`Ty>RyTxGV<+ySFzfFO=I*<05;41;0zA^ct)EvA)Hy&Ml158T2N*<{GBB$2P~x_ zkI|9CU@QtaUk_bJR-F{x8t4-|e@uoCvYtnA)w~OxSe|c0!a8CS#IOgEf4^E$$dl*! zj50iyQ5O8IxyDhIHA*q`2lg+ISfuOTha4Z?bISKnPtskC{FW~f$?su5vQSu()X&z@ zLLykR?j=sqLn<_7EZmy1XK|iGTrGgpv<2bMAii((n2OX2sJ*Cg@__p{KJWgKD@(IK z_8{tYEblovL530;zdWc(9@SFKx>@qwH~RkwziYqxmYTK-%~LU#EhHi&jrenY z)nIRd5K^^jf67P7B*|2$-4OHg!(;mDr5c;6MXlHdU zUCygvt#8l*d-UbUO@=RldhQlh^lGtZhN^rR;o;!6d#DkuTnmedqQz7_Kdf-b|s?pUL#fX+8O-a?RKmITWqQs@u z-K11jKfw8OpOXbRIEn!;INR_$@wuN8Dq*M;nFPnx{WXqTyh2Wl(6{RY7k^&$dgt@o zkNcSk+JR}HZX~;y?|QbsY)mfQvA-(()t=HOnA10p(H?o_#QtaM`!MF`_ZXMR;77o@ z4$iP$EN7VlAISarXv8m9-Cz0^^FP9OlVi-x2AykhNh+6rK|35gDr@>rNT1Qa>Nf1^ zFcjsI8UDy^Ot0xq*H4yX(B(k>RV9`6)JA;RFAa;G|AKU_PpMtEbbY=0nDi8M+^Ia8#@9_$o?QJ0gt6AgoUs|ABl+>Wtz0 zz?Ws6#>3K%(RD|mk65UBC@Fh&Sb37QUE$TWfhps$30vk>j>jxj#V>fLo?U-j2nmu! zP)EG2Yj8TnZ2Ky_kR7q~=f?j(9H9l%1TLP21^TsE8Q`q4%<%UWKRIn%?f0y1{{iaU z5UU$V=wn#r!ij+3+Wm$N3ENz9?Xt=ge75s(y;Uv^^;=n%MYH^Uh8bmO%srgP`@yjj znK-&yU~14E<;{Mz7z@9N{Sl8eoWy@P-iM{Ll+c-gv^_9hEu2~z& zBI>$_H(Pekwpp%scisEQbzlAAIDo73W@M>EH_Bl_!igPVU@3%w0Q4GozA9wD4lbV3 zRqn_K91r8wqcHZ1PU1c}nVT=ss~?#wp7dH3_aTc%``I9LJ(}nJTh6_hMWp*ypJ!w) zH8AHl^@rbRBS?JOe`>xnKLhGzI1m#aUlp?!nx$LbJmne`4eu5#mmVm%sD76+>;JW&m7 zB+(yC`r7?CD5mr@6_x3Z92+V+2{iTZRS16gfo`jN!98IrR6VK&-dbNhqTiyM77huA ziP*C=!KC*VgY|!_PNPWxu?>73gE^7$cPt_YmymzDxTP{NWu3O&K+n=EIyhNGmCgN? zigr!v0`bOm{|>WLSHBQMFu*FkrXoZ;*GqUESN)w2rt?jA=ZJ3pbKG!mPxWWJJbS(1 zZUWTWa*ugP_Zv5V0-p+8$xpd#QHt~h-`+DB`8taW;NB*c#s`8Ugy^}{Exq;JEC zRSU!!xjei3tp}Iu!?4ois*VqK{>RMmBr72hf(0c(w@})sGVs2;&A5c_#5K!~mWkfQ z34k6*Td+*&!Y3b9XV6H1K?JG8*dSb^s0B=OFKRcgOLkA;;T#$un}7%G((-FT*~i&b zMQ71p>wl4r=f%Ff;{8-g<-eT;Hw?Iq@M3Rfx?q6uSe#>C?_0!vpmh`0u6V^Cq>21b zdkCp}u9q&d;;1okSwD)|y)`}1$>T@XB{0{eIiFC_(r0TF{Kk0P=5o;LDKq-^^Qtq- zRa`kx4r~fV2ZhO8uc)(g=qqD!3pOHfOffmoxylpFcPQMP@;Eb^Y`(!ZEq>^~zP#-LYV8Z*kq z$BK&JIzRF8CX1!iUzhE6i;c(|HOED-b6U8h{*)?Z5S`a%kYe+CheiPWp(QY#E6|d) z!G_RhkPD7>VE?R>*hm@e4-`~x4{6BV;YGZL1X<;GNk2xO;r$^x)YyOdahL=VyXuxH z3zF5}lg21&|EXQmZj_b-`9^6Y#Y253HO44oMbXr^giFCKGH>l$hiT0$7&*noBQ;Nt zzV5`&3-<)^O#`duyEMmuzfkKzdl@B;cRM9YugV6z=vAbWlSY@1&YO;Q0~D%icq4hK5^xoMG4WQIz&+|2n+uKzpy*KKbt|NbJh0}N8OXyVG>2q zwTQ4VJM4_N%{6FWt`w=D_2!3>FDC2i)zQ*|tU^9A;A%_vPi5~=Dg_m{(1R=v|LNnF zH?urNs0G#kZ-s1N9=?C>pRC0nS#*M05qD7*ac+3?nXh8 z*YUPM3W5AZN&O-YZ++jV6)^I`G0^Sz;MhmSKkq7G2V0-UZ44bf9_I_%R1Eb00pvw)l>^nqk}8@X0X8^Nbc@pCWVmy{fXW zztLhYw5bSD7_zK?RJW>_pFq&7w)$#vMPszhN|Xo5>>z%H4Z$=jD!o4(aaK(jSw9Ht z`U)}6mBvo^!l1ON^)2v|hlxYKT1d@+k7@n>dN85P`Qyv7cjA#x0msN+81PnKF?w8+ z)JULgDhb9spX2VFz1+2)r4eC3NVwAtNp9bGc>_x4ngaKhEp;vHw>7+%D)g`Y(Rf*R zvU{QcZOowU!}V-r$>DJ}p2PKucOriy$wu4#tsnq5qNNw+nhs zZ*_%Z`2QAd?WjSbKaAL_?p3nHJCm}*?!qx*t*$i)UJ=>R0Od)9#FuwRWLnd)5MVR_ zrIo1N`pdXtfLw|FxhwxP>I)Gz9R4Wuo_b%!c{@&DcJ2tRZwO+D8RP|rS&0Gh8H$_e z8Nt~_*{<&_Z-t-t$I#*fhqN4uq|i@EA5Iom>J&O82^;{5ibyv(3$lr{h}uF!VYsQ= z08Y2XIynhC()r!~h@+f|RrSsk#qH(U8WHU^B?2quKJjXr)4bd-m4j=d78ZkCwwAUP zUi*e&BAx+{wU|wGErj_E)PwG<9Dp7dqkCx39F<}gALnY=6osh{MyKxB`=K6!S=>mZwqc=U$BV(?^T*6V<4&cp!7L6V0|0(Ke&G5{Kc7wl%6E>8(fW zqj;yPLQau>z2HD~{|w{)$??-FrAtr{pgupbmk`wg>-`lim)i6Jxoq59+w=gV$6xSw z2;o!=o%PwRtCxY(S6C%z2qDYXn7+W{?*cP=jjTngZ|({Ku4x@mpeSVxUwkpcjuLh# zefZ8f51^j2Kc@?0917O|^D>bNvOaI#+HdPS=!QfhW%JhtD$bumW5^BLF z_YY>&0u)usS@8@iK=gbSh)RRzA_rAdYhI!wO2uNCuOd;r0z1TJ{3FAi+q^g|>zH_= z*i*Nc$ndH0F=9UV+o=Lv8IbC{{94f9*c7`gJLc<9Y8%akJ3=a7Cx89(-3ka*N>F=o zfSI(m+I}-;s)41yF8E=9XeaP`Ng4~~jzONL>7zgCdI@7ao=(Eejw&*__Fc5C$N};D zj%$l{H6#MUHW!DzSAF?AthGN(GP5AY8GLB*=v)DpGN|C-xJ1tdppsZb;yAh70=Bwb zcSV0`8TuG19je>!kmBwIDW`1W*S!7hFjqmx!cga#M*Gb9$VYOGIVz<64R~Eiz_Cap z^Pg$de_`{ck3ll{+Gs*?Q!^*7BjBS>ap==@>oU#*ZP{{vsQT3Nnm!&)=1bzgIKqq3 zpR3oeh2}B|i5<8w(pL6pjTYU@k2LyQLnMk{5G()=#j^a}JF!=7{Lz2q5p8QBIm-}a z^FfK;Ly@P1O<^7OI99RojSqzvs}Rz(yc4O|if@PpNC3g5&N}9{hto^Pa)M)7YdQ?4 zq(skss@2(Ova5QFUOO4}uVNo1Nc^f$Zfs=}lchh(hi5wN^*RYJp0^Z}A~Dr~PSIQA zrj-mZXTeKR#&QETXqAfD1we-kzjh1^C8f8%t zj$@m&%oRg>#Pu5JrX&T?>!Y#RpY+q>4kFU2z0a=zs3ZP`vuvC7)5(c{^II{NR{yy1 z1&63?FAtsVkC<%0Lo+nyj?dSp8Rz1I~=%ing)#g=`4kK--jfs1GEx7>dhyNNAc;HB4R4A zo^p6Upx%OEg8g5x$48~{ig~{tZq^wron^&)_MSXJ@bi@KFgmzX0ibsAnfwuZ8N#nD z3Nc$ST1sLEe_F+P^ZaICJ%Yk_|FtK4={-&`D-KUx;5q7&Ll%(!lE%H?=75X3hMu3K z+Uy+R%`j2=KQI26c|;+YSz)b4t}eN0A20yfb3d?5dT2anpKb~!pi@%%4|2Dnr5R`( zZEk>}Ye3v0e~`k=*LKM={}4r*K12S-`>Si*ASC&vnoG*nd@YU9^uQJ!-koBKnO=BQ zRLSCb=N}?u=rKFcP}MPpSzA777Vd`_EcT=dh*IG>J@-afarQ?wUG-PxAHH^sOr3ke z+)y;fmcE?=L#Kev`~ymI+N1k>lM%T_?XO&cX|1%W&NS zg1-%!Y6ROFh;pLU*ghz;5=$ci<(b!2J3*Iv@aGAX=0}cNyb7=1HT8aK;eEY=Iaq|p zI-#f_fceYMMDL{B#vHla{{XLM?GQ=89LEKU2F}MKvt2vb|2;mqxHUb32G(xKekEE- zc7EN~Jk>b<2B!f*b4!;n!`XDdymm-_@2?T2^oKS~%n9A@?}&t6SHf%)x|4gwVj4XF z7waz=8<-Fll0=-dukX@Jw)YIWw!ObYBl*x2h?ar6!pz03mooRwV>t4qV8E3y^@E(y zcbuPrkZ$X~(6+l(0!u?I+ky>Pq0t3N< z(7$kK8eqz~9n{Hr?a9EvF9!uGQGJ{{V7juz{}avlUytGZFGh)S_nwjYOk%rihARF8 zfk3QY%0^!Ath{U`Y&>j%2S`XjNQhTJm{&+ZPw<6=kdTC+7`K3cgn)p$4aN8Wr-8HU zJ4ZYJ|9=AsnS4B;0p-6x=(^f@`C56{f_!~_`5aw7c-mOG+48x1*k_$cQ2{T3UMp)V JRV!GA{U2}?O>O`H literal 10139 zcmeHtcQjn>+wO?yB_a}{6H!8u9OcP%=}3Kp+}*H6=X|h$!mn zAtwcT%-FMnfFBAsH4{$|=+6DChe%zIXBPzG1gR@27`(~Yo|Q?urHbkL%^bHv%;ceV z+}6c=EZH*i#@)rk{}tX^qKd5|bGmWzjr&@)q=Xc$OV<+hw*WZdt%e13Wb6~B6jeaj zxZGi-ywk|G(A{fs^6P zR(3t$^I_d7(yxN0Ls$^RD(cAH&OcdymX|)f^{8^353A5V-fq1`4_XZQC=k`bxY~3y z-`g@T4T%@_A*Xmh=e6Yu>Qdm!kw)+_N?UR!omU0gUNm^yuv<7!)9Hr%{p#;x`CRxU zJxrI2HKDc;siUlBy+uy;T3x|y$FFy$);523A>{>4w6<{3%JKSruF$O^PkEyrnTT%) z8RCfVgH870PB8OV)WDRNhIvvDX50@p+xyg}Fm0|oG89W6+}bz`C?oLYj{6>dZ})eY zw;;O`%4(d(2x3xY1vl%uzH<}vJQL(@z-q_Sd8yVvuEIW#`s$17`?3@N*%tneKrrJj z=cHT@ndGyvo^6~-{*8L%@2Mwua0;l+FTQ_+DHL~ zO6H{KMRZL6(n3CB*nr*n)M6R_7Q%CIHZ<;Cx*A*?=nx)}bv-iDO}--ktNw|nhppTp z2jg;H;c{_a`D90e8%_z&+<1=_OXY$!6v>B!H?ukVcvp%Nr4`~08R<_~js5LcHmd{u z8E;NEjN>@f za5LG6e-{+C9O>hNqy@iatmh=a#2(M@WX~NushQ(?J(NZjJ-G9=`Vx(MYv9!TdwJ4w z*9KGJk{M9bKK~RJa=75i@t|u=~V(Ay%0Om4HQ!f~fsZ@ultjd0WYA3~Gsg2$Ol^x5U(^>5NeKEY7 zfW=?U0WN-T69&FyenD?Cs*eqpo^r9mS$WO2Y76&&+ux37#y#T6GN1WPd&BhYtQiM7 z?&^?kZF-I1H2qR>bT8LUvDA?Um3cba;<00uYT3K>4&b7?V)t!DZl%PoLxtYX88G^0 zOcpWxuz9GgDLz5PGx?Nlemi^aukm)=Xd$u*&|)1gCjBGY5E=yX1JsZZg=2axF6BRHKc|n}E@iK3}?O(c8P9x*&J* z%D^749KnJN=Hv~-aYkyJQd?PSot|*~(+I@bsmK1Ce?0J}#K(Nb)1COJiiSsW^>y4s z{s}1ehhVgBlp9lttZU}1a$X}~gAIPI0vC;}^cB}gt+uQ4wf}kxqM>y}L)p06e7N~0 z?Jq$;`eFsi_s@HzN%X0M1X%j_75)mD@WV)T-qw#A;))f9BJ?0Uqw_EpIzYftEmmN3 z6)oc14_KR~p{$rs+idKU{mCQ(16ba{?Q~J7KQkW0V{-ng}y z91m=T5ZEi68%rMXc*U8zpS#s%F1_EhKidf>Xoxt0^B=D=_dk$M9j;?^9)qOa_nIm! zaR56e{9S^ll?{~@9)7dwblb&i3U3{*|7N$+>Mu$e$&Yh<81QVe!KWgn3u1GV_MZo? zo=ELr+p?^ zQ4d0pqhL)SHFqcj6_8J)e0XUfm}ZeTnH+CZQT5fta{9}&TE4{|P2t#2L-UV(63ZIK zUrmh-r;P9jq7Nrb-OFXQQMYlT_XQ7bEG9(DN~A#~kIIM4Te>7VpR>xS!kk-J>9u1# z5(wN|b#ou%9BID$gwBl%Ip9rkiGa50@97=AseFt0rdD1(RM!5~U_-@o782xL=Xxns z7g|~|_G+zpjKNT*8}R*ggN!WnT>~o`(+#rx9fHdEQ^~((@3Xi!RP83k8Ih4#n59pg zYQcs4+q|ZYBZk<9(Ng>4(U+3!S@}EJ`5a@&$!ZYY*%;*5TM8wMZ`|Q=?@r| zqMAo;G@*IPBaEk0R`#X$+ab#Qu<>NsMhP0UGL1RS=hIou+YlN>WDgq!O0LLK=>>jD z$l1K9Ib&)q*FA0#FP+`E5wnuyOX(3TUN!As)S!>OOGAlfK)N|;!<)Vk7a393Q=o0b z3pTfl4~D@E8wNNpexbSN1G&cLn|?Lkk$??}gNMr+g4{&sA3U`Sm!qbViF$76PlP=LeS8o6$7{^|{RRG$zW%%PG*b_bI7Y7}^wam%>!&GYs1?bnVR zU^8^YZ5JDt=)_>-nder=+3o6g$J*y5Mjz>C{#POk)Rn^Tq)T_v8@TXD7vheNr> zJ|f3RT%3DV(R@e{76K=3!k3?J>p^ZGxZhmxURAHDC3GE{nRUJLY{{Ub{YPyU=2N>2 zsLJdSJCv40wSZGIxZKRWJONg!j>kCfSepo|_!c6dmgG^-^IgEDcd|CLkku-ly0S6z zG5Bveg*c~iCQ8HJx@EV)-@p7m<-2b)_xqt4;v54(^0I4EuX6|E?DWm8teE>h6b4+J zk4K+dAQw7BZMC@!X6sm)18%<-V8*$a(MG&LbwLF4_d9$&zu!4zuw+HJ<{AN^7sQG} z7wvNI>7zR2i3><^6|rMO=@|VNowMqHk5ABS0T%ctEAJhfePlkg;7-{KB=KqM<$t(X zRbXXtD_jvB+^U@`{=B+))BC)>d>;Yz2%w|AH)2=ob_)e@yc4VbwVwp3vlIWTHs8@) z7!daBPRO3TE*YvwywT9H8qwME zWI&zC^tQl;qN3)ni7X{49&qP41T8gV|CBt6dhC$Fp!DKiuBbUq|72^yc^97yU3^(3 zH}WM@J6+8}b{JH;r;)OJZdtsi= zi|)2#W%tTY0^+~))kd9fvy0t&_aDc`q?h-03u!&`9u=HCX!dODGAFb~I4#e}(=~4?n5BTH(wYRJ?2MbPa z2kmPrj8T?+Zw?&1UP~bytMJ`DF;JMJk;u40x3z{lK$mzrIWY^u-<||jByK1@D}tF{ zdU^0U;HMbQ+_^Xaj}_;x>}ia2GuS%Ihq`}uzQ5R`?sp+xo!vuRdkXQhdc}fUUe%s# zPytT%QJsy_!YNZGUkA(QPt%j;kRN(>W>!RXnbD(u`j{{=oX~DeV;y)yAAX#6kC*P_9jo;zDTzIcy6hp4UZD>u3m25xMdcoD)`iotYE0#z8^hh@_S{Hc0U>Q=YJq}6d}wUhP46$JJTBldb4 zilW{wZwoa}IRum!;{^6)qttZB@hsqN;D_g4y(tq~v zce`j;q1*#2mf5T<@EBJ5)63X7GU{7EOnb!KUt$Dikxs~%R@UP}>e!NP&pu|M69{Xk zd4u6?zOa8bE+EO#qrP(|s2|noVmRg(EXAV;1P8LC=nX&mM?49P0|^5Ncz?+NOuVzm z#(A6W7B%6$9nA;K8J4ea!ZTZ6S6yv!FWZ;wDDbT6#gE!Dr6sTcpTq8`e^FDYxV=pJ z@Sma6B`eEtH*=>#M$hr;!1u9iXEK0;Sr*C+J}+KAemivc-)QEstb@H#PheOI3JX7j z^eUISK|t%rvJqin8pLUYgqrq+OLM|ZQn`_dq1LcxvGA{p)-bbRUS6IRUwRHdddx*? zLZwXfW^^E%EAL}7mlz5Pz{Pd{Ae-<1xbbD^ub)_T+=Q#P@0R>zly(y4!=8#eC5gUM z`t~@fkl;-z%^r2iYcf%G9_v+*29E14+AlTY9Q&6v+|glKX7>dx5-nrX#aTxhWNpUv zd;p^`vFHD;22u7f;ZL&iO)PXz^!U^PZJpT?X~wyT>shGP0O2yVAUW5){8-6X>wg98 zOaE0#hddMF*gF#ilq9pZ0YR%B;-0U-G$i_IB()mAB4&LnJ&*ll2ucv&6g}E#Iw-(P zPfiBx(U{ynQ9pRA1!e_Oe^30(>{tlhBRWQP3oST3_ybjR_^+)s)lS}U^b7+t{V!%*VNJUgOf888 z=RzsOB}$u=ELJt({K)2e+0r=YGC|18`9@6bt=Jyb-X4yUUiIZ5X!>x%BE*YeY+?mG2!ChPH* zKp$GxZnFtFns3T*vhFELzUKDM`?EOq{jXb);Ll9O#-R)w|M>ZOxw%&*+nZXJT2peh zJBf~AtwSG*?ShKq??l$#MWg`0);(k>c2oURS1m!PMlSinABfj`3WHp?0RQeIG4#%w z|Ck|v21ct(C}^-IpPb8cmU5WHVRo@?C>?LX25d%ydh~EDv0M6;TGA&&;lVo!*Ac%>UKyEp#Fsie^WJaIyY#d z!)b)I# z&uR8|j_o)e_6bHQnYqm80g?72S;x!kvT*R=%HuRWM3{i=&q9p;8^>oNj6dxx)xPBL(q_MK#n8f{P?+P9>WK8TcpyKSdLz7TLf9tx%pk z;Do@pE%s!Mys9Q}d4trl2Yl<7K#=oaMr}V}qz7pBd6H$uG{DHp=bmRXXbG z*tWeqTOLlH6#;TzI~oTQ0BLgooVp&sGSmPHYtBgNA{cj-kb2|T>|%i2M3a5wd@ z<-MW50ihQDXpi@MnH>SRv3O`ZSm>sdbB}s#VcjE23X$~;-7D2l za&D>Xw~>(0fiH|X3F>30M)qML@f6XQU!^N?C@ay}b#pbCFdB0khvY&^k53S1V}${n zLQ^~Rm7n4HH+tRl3g_wx&4RnfvPNVR9KM4t1dt-M!+ljQA0TqwTK|JpTx0$Ad76X=GmHWZu;6INi`y z3Gm+1&tz9Pc5oHP$;<*Cla9cs?2By*@gpLA>kEO)ZQvT#Fɮs+T~KXuidPmpR%1k1 z0cG_7hNvJZjlpv2l*#Z)ka{lO6ownBKmGz+J} z#-8q2xDC~Mk>EII>YZht`iM8vI#~SWwhiurUCh5^;7bB_vzTy=g`OTG{bXheK$%9c z68H0A9n&6?MG?I~SFkW10I^$!Q+-$4fdQ1s4oO(KNFJ7*XzufhKv||7Qx`K*Z;ou&$7cY<|p*-%VqYSx_xRo&OF{4FfoFg^kVnPZ0kDA=pX?wg(L$Jki9;$9R zAOFMS6BUW3!&u0P$C;Q3e}jo8+W02iL~YIg6ToO?pR?Xh-zj+UJZh1XRjj(W$eQe^ zEd}#aM}!D~g;q^v8B4Ti(ax!l2E^8k)oB^Ln>>n8Zf_taD@eY!S&Lh-RRpZN#Yb1F zq*rIn&nR((muKpT4qznl6I0UCfZ!u%V3vjd1|+!rD0lrHs2Q#E!k)NEU2P3xsUquQ zfoJ}+Ae|P?C6RU&xQ8!s773&^tWYG}P_6j#_kvBgrR#s|g-tQApWf%h84ZWPIJ;R^ zdsa%rO_+IUJid-nm7P55=B6WmYVM3@LS;sNdv9iI*%v(&4!5dPyz#}IP3%77xr*d_ zoooR3nrzNF+4S0C#+7knH5vJsljC|VmAjON7UJlSH#%Ovz6jdP$02nJH}x!Lwo^@w zN#S~ar-6PBK{J+tWW(PRph#3H5Q4ABYU5BD#%&(jUNz!bv`-vmPHfT&<`?7v=lnxj zP{dFnS_JHl`ZwDwCXU$C)u$vr_Xw|1RD6kh&8z=D<3B4!M!Wsc(L(Ah58S|Je#hIO zoEf_qfa5^^!z?{$BT#ikWe~`UPB9$Uu<|@=2FM=(EV414JfbrVff%2J*?{%{i72Xq zH=>N>ejpS*fxlA4Yj3$&#gKO8%r)9ECim@rPJ!qsMS+-Daa`}LuKzEAgYte3MBU?& z(ZmiI&GDc9*J@xZI^(@|6m$7sl7 zRGhyS?K5Tx-`%2!@FFK+>UjAzl8&Chd|3MRcNS?d+eyOBEbw-CheDlKj~#I-3-)=CC0z}~JY($T^Y2~6yYM55$m zHHvBn9LNv`ay+RKuZ%?`3^$KA8}1Q+O5s>CVlBm$~EE+A{GU;6LOLRip7vP6cq(c zj2t2JpS*JSEsO?#PCPl;fg*$_=t-mVpfgrMwlVTLGDH@rod~`nr%)t<$^*3+59+Iy zqGS%v@^s0pb`VRl?d*>#9NBkx*&O=3# zQ*iTY?kjmo&Sw?~U=Pu&GM9%M%=&V9mTli=HP=v$j*{tK2T@*50@f~Q&{vo_>T1|M zR*?sk*JutXBb2!m_(4G74X&&96f?!kAmRrk-zSoxzkdFqMS_4AmqSi*$OG!re8T|( z86t>5ZBR)N=;{A?5W@dn1+>zq+YI-0U4F2tei-z1 zqEV!xPFNxcnQf`npBQ^0lLb@=H9cfN7^oz5+t7F)tF!CTldEFY?`5q6{J%v(EeQ@U z(C54|L^3V9zOQ=3?C6-OqOPJ3sQ7(dYy;8JzF6*Q5g1h&-kH)4d>@(Rd0?lK$zHN@pu6`RPi>o)otqlHj z%EKZ25X5(eXFF2t{fAt!@&5LiH$ap`G?6#Zyeo@N)0X}tVyWou{?~TF#`xfONF>hh z4Wu@a^gwpB`ACzsGJmIcae4IObg9QRqh$1CtUR&SiqYsEtIo6VUZ5bT+A5XiaA@N< z^fSH8(IC;>S6tLq_}t|DNzJ8rc3eT>pPi9jEwsgsr8fq&wTrG|6aS5##j~g`<(fvoWMg6ASN`!;r4T zl~kllp>Az)Ni=JtnzI#yM>wDQ%J(d(61V9n5j)~c39)PEoJ;@}YtyL3YR1j1O0TNV z8zg(4j!ry3w2aVEJ1S3Fx<&=KO=|~i^f`D&nb;JU@1Zrnh8Yj!%UjhSEO0$#dhxE? zm*FV!?N(0SMTalj!QHIGtjnL{AP||t)wcjAw;W>D^E4Yvkb!tRZ!S5Yz~azu5Tl^# zOI^ZV&0RVr{wZCZTj=VALn z5RX#)J$Gp2C8d;|rhok6j*RyFfmFlIcsC}2X{CT{yCTDWEe*}~F`wI2dBdO4>&r*W z9b&k_&Ku2W&G;3d?h_btCe_HiHnlmh~M}%wWcRAVd8Pk0Ao9)$rrRO-F2A+M= zza)L{#`Mu3HW@SHFh=3P&@CPb4Y$4J`Si2@PmIqrdBuXQ$H)zB_b*M2Urwv(HqlZP zc;92He1!N$4qMSiWg6k!3q!N;s~>i>HUi#`U&i6HRF4cXd-yql_Jq4D3$_E|Ik%|& zPIT?mEd5P+58rlW7$Hey1M>@OnL|ac>&t2daBtc z6Aq9_0^KO|t{J~%;5lL-6WRjgmh)lqP0B`rxwx^S$N8b6)|mjC+^#v*jAzDTo?%bJ zy-A=}_QXW#ZJAI~ulfWO(QJ`Jw7@_HtYg~=#oiknzvFWe&_+i`OT6l18du)w=(aVo zVFt-+!o1SS{Cjwmd~QM2n;Js_$S_o2CWu_RybhT|C1s4%k|MvzsO%@kGAxQ%TV?Fr zH(9&m=Vxl{g`@N2%Qu{z%K6ZAjgxgzM29khAsbx_QJF8iCrcs3C$KOou|9oDZT*fs zifFh+p~S~@ds^KFDVTNUH$Ws}@Z6S-X*wi`V?Jb@I#ZTVgNoZz%%9k}hm-jygzhhx z`gzQHChDgnv{{x?Do(S4Pg!7{pk{1E>>!wY7b0K+~CjqG3EK-e!+>-S-|mEW81_e!DNm}ONEm@!Q`#cL84uvMXcuy;vyfT zz(D5KN8AO2v~%OKX?E>>X6-$S;d%dA46Dg z(gHmmi5jlVYu;p4F#|0pz$Y*dCvQ^A%Ulgk8Hrtx$zz~v{o3@(-qwNSO=Dj^g!%zj z$&Ml?s1%rdyB|flE}i6nP)RZq7G(Hbn+!8*K~nJG0oeL%`jt-_bTA zx#H%&oXKzd)`wIOmKv}nH7@$!6TUEDRcEAfaA3Dh{lE=&L?K4DIZ&_Z=F7G9Hxi`0`_STDb2h8g-ZEV{`sL-D4UKo^v0d7b0$ew~8DgC&2=hgPzR zYlJj$QFqYh|9mU;b)M-O$+yL^(EvZG+Eq;nB+=ILQNi{@w=OXDhwF@#wazG?j=+!P zaIpT`h10K0nR0n7?J+76_ytfhojv<~|(X6Y3R diff --git a/docs/release.rst b/docs/release.rst index 50ff7f98ac..696eef5b93 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -3,7 +3,7 @@ Release notes .. # Unindent the section between releases in order - # to coument your changes. On releases it will be + # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. .. _unreleased: @@ -11,30 +11,10 @@ Release notes Unreleased ---------- -.. _release_2.12.0a2: +.. _release_2.12.0: -2.12.0a2 --------- - -* Rename ZARR_V3_API_AVAILABLE to ZARR_V3_EXPERIMENTAL_API. - By :user:`Josh Moore ` :issue:`1032`. - -Maintenance -~~~~~~~~~~~ - -* Fix URL to renamed file in Blosc repo. - By :user:`Andrew Thomas ` :issue:`1028`. - -* Activate Py 3.10 builds. - By :user:`Josh Moore ` :issue:`1027`. - -* Make all unignored zarr warnings errors. - By :user:`Josh Moore ` :issue:`1021`. - -.. _release_2.12.0a1: - -2.12.0a1 --------- +2.12.0 +------ Enhancements ~~~~~~~~~~~~ @@ -44,7 +24,8 @@ Enhancements Since the format is not yet finalized, the classes and functions are not automatically imported into the regular `zarr` name space. Setting the `ZARR_V3_EXPERIMENTAL_API` environment variable will activate them. - By :user:`Greggory Lee `; :issue:`898`, :issue:`1006`, and :issue:`1007`. + By :user:`Greggory Lee `; :issue:`898`, :issue:`1006`, and :issue:`1007` + as well as by :user:`Josh Moore ` :issue:`1032`. * **Create FSStore from an existing fsspec filesystem**. If you have created an fsspec filesystem outside of Zarr, you can now pass it as a keyword @@ -80,6 +61,16 @@ Maintenance * Added Pre-commit configuration, incl. Yaml Check. By :user:`Shivank Chaudhary `; :issue:`1015`, :issue:`1016`. +* Fix URL to renamed file in Blosc repo. + By :user:`Andrew Thomas ` :issue:`1028`. + +* Activate Py 3.10 builds. + By :user:`Josh Moore ` :issue:`1027`. + +* Make all unignored zarr warnings errors. + By :user:`Josh Moore ` :issue:`1021`. + + .. _release_2.11.3: 2.11.3 @@ -222,7 +213,7 @@ Maintenance - Drop unneeded ``return`` (:issue:`884`) - - Drop explicit ``object`` inheritance from ``class``es (:issue:`886`) + - Drop explicit ``object`` inheritance from ``class``-es (:issue:`886`) - Unnecessary comprehension (:issue:`883`) From 26ff3ca554cad3c38bd831692ab4be579c77a4cb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 23 Jun 2022 21:53:07 -0700 Subject: [PATCH 0137/1078] Bump numcodecs from 0.9.1 to 0.10.0 (#1054) Bumps [numcodecs](https://github.com/zarr-developers/numcodecs) from 0.9.1 to 0.10.0. - [Release notes](https://github.com/zarr-developers/numcodecs/releases) - [Changelog](https://github.com/zarr-developers/numcodecs/blob/main/docs/release.rst) - [Commits](https://github.com/zarr-developers/numcodecs/compare/v0.9.1...v0.10.0) --- updated-dependencies: - dependency-name: numcodecs dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index a56ee01c04..4092c784d2 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,7 +1,7 @@ # library requirements asciitree==0.3.3 fasteners==0.17.3 -numcodecs==0.9.1 +numcodecs==0.10.0 msgpack-python==0.5.6 setuptools-scm==7.0.1 # test requirements From b892f9f8830b2afa9c62150488c26c33825f5dee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Jun 2022 13:52:58 +0200 Subject: [PATCH 0138/1078] Bump redis from 4.3.3 to 4.3.4 (#1058) Bumps [redis](https://github.com/redis/redis-py) from 4.3.3 to 4.3.4. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.3.3...v4.3.4) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 7deaf818b7..85d8c3d979 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipytree==0.2.1 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.12.0 # pyup: ignore -redis==4.3.3 +redis==4.3.4 types-redis types-setuptools pymongo==4.1.1 From 20b42a504c5266f1cc94c2327431e37cad75d848 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Jun 2022 09:41:15 +0200 Subject: [PATCH 0139/1078] Bump tox from 3.25.0 to 3.25.1 (#1064) Bumps [tox](https://github.com/tox-dev/tox) from 3.25.0 to 3.25.1. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/master/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/3.25.0...3.25.1) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 85d8c3d979..ead410e9dd 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -12,7 +12,7 @@ types-redis types-setuptools pymongo==4.1.1 # optional test requirements -tox==3.25.0 +tox==3.25.1 coverage flake8==4.0.1 pytest-cov==3.0.0 From 1afb9bfa65a3d4931649b5e9b9506d5b51696097 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Jul 2022 16:02:06 +0200 Subject: [PATCH 0140/1078] Bump numpy from 1.22.4 to 1.23.0 (#1053) Bumps [numpy](https://github.com/numpy/numpy) from 1.22.4 to 1.23.0. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.22.4...v1.23.0) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 766a0bf964..a3e57b13d4 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.22.4 +numpy==1.23.0 From a557d4654c6f4bd50124cf0c60a510cf90e6541a Mon Sep 17 00:00:00 2001 From: RichardScottOZ <72196131+RichardScottOZ@users.noreply.github.com> Date: Mon, 4 Jul 2022 23:39:30 +0930 Subject: [PATCH 0141/1078] integers (#1056) --- docs/release.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index 696eef5b93..d16f14fb4d 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -38,7 +38,7 @@ Enhancements * Appending performance improvement to Zarr arrays, e.g., when writing to S3. By :user:`hailiangzhang `; :issue:`1014`. -* Add number encoder for ``json.dumps`` to support numpy intergers in +* Add number encoder for ``json.dumps`` to support numpy integers in ``chunks`` arguments. By :user:`Eric Prestat ` :issue:`697`. Bug fixes From 6d9b4095f9c74161fef21666452af06017cc922a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Jul 2022 21:26:57 +0200 Subject: [PATCH 0142/1078] Bump setuptools-scm from 7.0.1 to 7.0.4 (#1065) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 7.0.1 to 7.0.4. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v7.0.1...v7.0.4) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 4092c784d2..5dcdab16be 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.17.3 numcodecs==0.10.0 msgpack-python==0.5.6 -setuptools-scm==7.0.1 +setuptools-scm==7.0.4 # test requirements pytest==7.1.2 From e1b71a38ca2643e21dcfab4c277d755cf01f4078 Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Wed, 6 Jul 2022 11:51:55 +0200 Subject: [PATCH 0143/1078] Bump minimum NumPy version to 1.20 (#988) * Bump minimum NumPy version to 1.20 * NumPy 1.20 support Python3.9 * mypy: added a "type: ignore" Co-authored-by: jakirkham --- .github/workflows/python-package.yml | 12 +++++------- tox.ini | 6 +++--- zarr/meta.py | 4 ++-- 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index c62d40721e..37fe44e212 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -16,12 +16,10 @@ jobs: strategy: matrix: python-version: ['3.7', '3.8', '3.9', '3.10'] - numpy_version: ['!=1.21.0', '==1.17.*'] + numpy_version: ['>=1.22.0', '==1.20.*'] exclude: - - python-version: 3.9 - numpy_version: '==1.17.*' - python-version: '3.10' - numpy_version: '==1.17.*' + numpy_version: '==1.20.*' services: redis: image: redis @@ -39,7 +37,7 @@ jobs: - 27017:27017 steps: - uses: actions/checkout@v2 - with: + with: fetch-depth: 0 - name: Setup Miniconda uses: conda-incubator/setup-miniconda@master @@ -66,7 +64,7 @@ jobs: python -m pip freeze - name: Tests shell: "bash -l {0}" - env: + env: COVERAGE_FILE: .coverage.${{matrix.python-version}}.${{matrix.numpy_version}} ZARR_TEST_ABS: 1 ZARR_TEST_MONGO: 1 @@ -92,4 +90,4 @@ jobs: flake8 zarr mypy zarr - + diff --git a/tox.ini b/tox.ini index 3adc147dac..3ce8611fe1 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py37-npy{117,latest}, py38, py39, docs +envlist = py37-npy{120,latest}, py38, py39, docs [testenv] install_command = pip install --no-binary=numcodecs {opts} {packages} @@ -21,7 +21,7 @@ commands = # main unit test runner py{38,39}: pytest -v --cov=zarr --cov-config=.coveragerc zarr # don't collect coverage when running older numpy versions - py37-npy117: pytest -v zarr + py37-npy120: pytest -v zarr # collect coverage and run doctests under py37 py37-npylatest: pytest -v --cov=zarr --cov-config=.coveragerc --doctest-plus zarr --remote-data # generate a coverage report @@ -33,7 +33,7 @@ commands = # print environment for debugging pip freeze deps = - py37-npy117: numpy==1.17.* + py37-npy120: numpy==1.20.* py37-npylatest,py38: -rrequirements_dev_numpy.txt -rrequirements_dev_minimal.txt -rrequirements_dev_optional.txt diff --git a/zarr/meta.py b/zarr/meta.py index bb4bae4199..a0f2e80f89 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -237,8 +237,8 @@ def decode_fill_value( return np.array(v, dtype=dtype)[()] elif dtype.kind in "c": v = ( - cls.decode_fill_value(v[0], dtype.type().real.dtype), - cls.decode_fill_value(v[1], dtype.type().imag.dtype), + cls.decode_fill_value(v[0], dtype.type().real.dtype), # type: ignore + cls.decode_fill_value(v[1], dtype.type().imag.dtype), # type: ignore ) v = v[0] + 1j * v[1] return np.array(v, dtype=dtype)[()] From 8f0dedf82cd406437510d3f024f773ae58a6910c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Jul 2022 10:36:27 +0200 Subject: [PATCH 0144/1078] Bump azure-storage-blob from 12.12.0 to 12.13.0 (#1070) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.12.0 to 12.13.0. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.12.0...azure-storage-blob_12.13.0) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index ead410e9dd..9f3a8fddfb 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -6,7 +6,7 @@ ipytree==0.2.1 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.12.0 # pyup: ignore +azure-storage-blob==12.13.0 # pyup: ignore redis==4.3.4 types-redis types-setuptools From a82b5fbd3b1d9e195f818366e8f74cfbe8f0f9a8 Mon Sep 17 00:00:00 2001 From: Gregory Lee Date: Mon, 11 Jul 2022 06:22:12 -0400 Subject: [PATCH 0145/1078] remove option to return None when the input is None from _ensure_store (#1068) * remove option to return None when the input is None from _ensure_store This capability was contrary to the docstring and does not seem useful * Add None test Co-authored-by: Josh Moore --- zarr/_storage/store.py | 4 +--- zarr/tests/test_storage.py | 4 +++- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 36a5c0bff5..6faf4a1250 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -100,9 +100,7 @@ def _ensure_store(store: Any): """ from zarr.storage import KVStore # avoid circular import - if store is None: - return None - elif isinstance(store, BaseStore): + if isinstance(store, BaseStore): if not store._store_version == 2: raise ValueError( f"cannot initialize a v2 store with a v{store._store_version} store" diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index abddb6965c..a3e227ec86 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -73,7 +73,9 @@ class InvalidStore: with pytest.raises(ValueError): Store._ensure_store(KVStoreV3(dict())) - assert Store._ensure_store(None) is None + # cannot initialize without a store + with pytest.raises(ValueError): + Store._ensure_store(None) def test_capabilities(): From 611afade5e94784f5cb3d8882dd17c893e09587a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Jul 2022 12:30:25 +0200 Subject: [PATCH 0146/1078] Bump numpy from 1.23.0 to 1.23.1 (#1071) Bumps [numpy](https://github.com/numpy/numpy) from 1.23.0 to 1.23.1. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.23.0...v1.23.1) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index a3e57b13d4..c89e4f6896 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.23.0 +numpy==1.23.1 From 9ce2f328e9ff687c151cd73634f4d624590df817 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Mon, 11 Jul 2022 08:55:52 -0400 Subject: [PATCH 0147/1078] drop py37 (#1067) Co-authored-by: Josh Moore --- .github/workflows/python-package.yml | 2 +- setup.py | 2 +- tox.ini | 20 ++++++++++---------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 37fe44e212..b42e036cfa 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] + python-version: ['3.8', '3.9', '3.10'] numpy_version: ['>=1.22.0', '==1.20.*'] exclude: - python-version: '3.10' diff --git a/setup.py b/setup.py index a68c77a63f..96c51c08ff 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ 'ipytree', ], }, - python_requires='>=3.7, <4', + python_requires='>=3.8, <4', install_requires=dependencies, package_dir={'': '.'}, packages=['zarr', 'zarr._storage', 'zarr.tests'], diff --git a/tox.ini b/tox.ini index 3ce8611fe1..4bd58796c9 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py37-npy{120,latest}, py38, py39, docs +envlist = py38-npy{120,latest}, py39, py310, docs [testenv] install_command = pip install --no-binary=numcodecs {opts} {packages} @@ -19,27 +19,27 @@ commands = # clear out any data files generated during tests python -c 'import glob; import shutil; import os; [(shutil.rmtree(d) if os.path.isdir(d) else os.remove(d) if os.path.isfile(d) else None) for d in glob.glob("./example*")]' # main unit test runner - py{38,39}: pytest -v --cov=zarr --cov-config=.coveragerc zarr + py{39,310}: pytest -v --cov=zarr --cov-config=.coveragerc zarr # don't collect coverage when running older numpy versions - py37-npy120: pytest -v zarr + py38-npy120: pytest -v zarr # collect coverage and run doctests under py37 - py37-npylatest: pytest -v --cov=zarr --cov-config=.coveragerc --doctest-plus zarr --remote-data + py38-npylatest: pytest -v --cov=zarr --cov-config=.coveragerc --doctest-plus zarr --remote-data # generate a coverage report - py37-npylatest,py38,py39: coverage report -m + py38-npylatest,py38,py39,p310: coverage report -m # run doctests in the tutorial and spec - py{38,39}: python -m doctest -o NORMALIZE_WHITESPACE -o ELLIPSIS docs/tutorial.rst docs/spec/v2.rst + py{38,39,310}: python -m doctest -o NORMALIZE_WHITESPACE -o ELLIPSIS docs/tutorial.rst docs/spec/v2.rst # pep8 checks - py{38,39}: flake8 zarr + py{38,39, 310}: flake8 zarr # print environment for debugging pip freeze deps = - py37-npy120: numpy==1.20.* - py37-npylatest,py38: -rrequirements_dev_numpy.txt + py38-npy120: numpy==1.20.* + py38-npylatest,py38: -rrequirements_dev_numpy.txt -rrequirements_dev_minimal.txt -rrequirements_dev_optional.txt [testenv:docs] -basepython = python3.7 +basepython = python3.8 changedir = docs deps = -rrequirements_rtfd.txt From 202aeb6451979323353147a0b91b42d202110a3e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 18 Jul 2022 22:45:05 +0200 Subject: [PATCH 0148/1078] Update DeepSource configuration (#1078) The default Python version has become 3.x.x: https://deepsource.io/docs/analyzer/python/ --- .deepsource.toml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.deepsource.toml b/.deepsource.toml index 5143b2739e..e68653328f 100644 --- a/.deepsource.toml +++ b/.deepsource.toml @@ -5,6 +5,3 @@ test_patterns = ["zarr/tests/test_*.py"] [[analyzers]] name = "python" enabled = true - - [analyzers.meta] - runtime_version = "3.x.x" From 7212d263f51196f21f2c9cb7bb4fc080d4399e06 Mon Sep 17 00:00:00 2001 From: Pavithra Eswaramoorthy Date: Tue, 19 Jul 2022 04:21:20 +0530 Subject: [PATCH 0149/1078] Close quote typo fix (#1083) * close quote typo fixes * add to release notes --- docs/release.rst | 12 +++++++++--- docs/tutorial.rst | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index d16f14fb4d..706859a551 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,10 +6,16 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. - .. _unreleased: +.. _unreleased: + +Unreleased +---------- + +Documentation +~~~~~~~~~~~~~ + +* Typo fixes to close quotes. By :user:`Pavithra Eswaramoorthy ` - Unreleased - ---------- .. _release_2.12.0: diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 1cd0387a75..fcee87c888 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1404,7 +1404,7 @@ access patterns and incur a substantial performance hit when using file based stores. One of the most pathological examples is switching from column-based chunking to row-based chunking e.g. :: - >>> a = zarr.zeros((10000,10000), chunks=(10000, 1), dtype='uint16, store='a.zarr') + >>> a = zarr.zeros((10000,10000), chunks=(10000, 1), dtype='uint16', store='a.zarr') >>> b = zarr.array(a, chunks=(1,10000), store='b.zarr') which will require every chunk in the input data set to be repeatedly read when creating @@ -1412,7 +1412,7 @@ each output chunk. If the entire array will fit within memory, this is simply re by forcing the entire input array into memory as a numpy array before converting back to zarr with the desired chunking. :: - >>> a = zarr.zeros((10000,10000), chunks=(10000, 1), dtype='uint16, store='a.zarr') + >>> a = zarr.zeros((10000,10000), chunks=(10000, 1), dtype='uint16', store='a.zarr') >>> b = a[...] >>> c = zarr.array(b, chunks=(1,10000), store='c.zarr') From 5c602cb47b305468b31f6126460362ae7af2303b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Jul 2022 08:21:52 -0500 Subject: [PATCH 0150/1078] Bump setuptools-scm from 7.0.4 to 7.0.5 (#1072) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 7.0.4 to 7.0.5. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v7.0.4...v7.0.5) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 5dcdab16be..40098a3159 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.17.3 numcodecs==0.10.0 msgpack-python==0.5.6 -setuptools-scm==7.0.4 +setuptools-scm==7.0.5 # test requirements pytest==7.1.2 From 4b833028ed8821ce80126e5156b6b0b3b2b0d0bc Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 19 Jul 2022 20:07:09 +0200 Subject: [PATCH 0151/1078] More typos found by codespell (#1073) --- docs/release.rst | 6 ++++++ docs/tutorial.rst | 2 +- zarr/core.py | 2 +- zarr/meta.py | 4 ++-- 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 706859a551..0b9a208b2f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -16,6 +16,12 @@ Documentation * Typo fixes to close quotes. By :user:`Pavithra Eswaramoorthy ` +Maintenance +~~~~~~~~~~~ + +* Fix spelling. + By :user:`Dimitri Papadopoulos Orfanos `, :issue:`1073`. + .. _release_2.12.0: diff --git a/docs/tutorial.rst b/docs/tutorial.rst index fcee87c888..37073fcb31 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -897,7 +897,7 @@ The second invocation here will be much faster. Note that the ``storage_options` have become more complex here, to account for the two parts of the supplied URL. -It is also possible to initialize the filesytem outside of Zarr and then pass +It is also possible to initialize the filesystem outside of Zarr and then pass it through. This requires creating an :class:`zarr.storage.FSStore` object explicitly. For example:: diff --git a/zarr/core.py b/zarr/core.py index 6ce2fa8800..bd61639ef6 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -153,7 +153,7 @@ class Array: def __init__( self, - store: Any, # BaseStore not stricly required due to normalize_store_arg + store: Any, # BaseStore not strictly required due to normalize_store_arg path=None, read_only=False, chunk_store=None, diff --git a/zarr/meta.py b/zarr/meta.py index a0f2e80f89..c290e90163 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -81,7 +81,7 @@ def get_extended_dtype_info(dtype) -> dict: fallback=None, ) else: - raise ValueError(f"Unsupport dtype: {dtype}") + raise ValueError(f"Unsupported dtype: {dtype}") class Metadata2: @@ -399,7 +399,7 @@ def decode_hierarchy_metadata( "metadata_key_suffix", "extensions", }: - raise ValueError(f"Unexpected keys in metdata. meta={meta}") + raise ValueError(f"Unexpected keys in metadata. meta={meta}") return meta @classmethod From ece1810e88cb5e921da669a554ad272d6061c996 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 19 Jul 2022 20:38:26 +0200 Subject: [PATCH 0152/1078] Fix deepsource.io issues (#1080) * Use a ternary if/else instead of and/or An instance of the pre-Python 2.5 ternary syntax is being used. Using [condition] and [on_true] or [on_false] may give wrong results when on_true has a false boolean value. * Remove unneeded `not` * Use `is` when comparing `type` of two objects * Unnecessary use of a comprehension * Unnecessary `None` provided as default Unlike pop() which raises a KeyError by default, get() returns None by default. Co-authored-by: Josh Moore --- docs/release.rst | 4 +++- zarr/convenience.py | 2 +- zarr/hierarchy.py | 2 +- zarr/storage.py | 2 +- zarr/tests/test_convenience.py | 2 +- zarr/tests/test_creation.py | 12 ++++++------ zarr/tests/test_hierarchy.py | 2 +- zarr/tests/test_storage.py | 2 +- 8 files changed, 15 insertions(+), 13 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 0b9a208b2f..b729f20ee0 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -19,10 +19,12 @@ Documentation Maintenance ~~~~~~~~~~~ +* Fix a few DeepSource.io alerts + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1080`. + * Fix spelling. By :user:`Dimitri Papadopoulos Orfanos `, :issue:`1073`. - .. _release_2.12.0: 2.12.0 diff --git a/zarr/convenience.py b/zarr/convenience.py index 93dc860477..be8b609a46 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -1279,7 +1279,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** """ # normalize parameters - zarr_version = kwargs.get('zarr_version', None) + zarr_version = kwargs.get('zarr_version') store = normalize_store_arg(store, storage_options=kwargs.get("storage_options"), mode=mode, zarr_version=zarr_version) if mode not in {'r', 'r+'}: diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index b9052408b4..80da3ddbc6 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -1327,7 +1327,7 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N storage_options=storage_options, mode=mode, zarr_version=zarr_version) - if not getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) == zarr_version: + if getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) != zarr_version: raise ValueError( "zarr_version of store and chunk_store must match" ) diff --git a/zarr/storage.py b/zarr/storage.py index 48b6f049dd..440b41ea07 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -1413,7 +1413,7 @@ def __contains__(self, key): return key in self.map def __eq__(self, other): - return (type(self) == type(other) and self.map == other.map + return (type(self) is type(other) and self.map == other.map and self.mode == other.mode) def keys(self): diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index d0d293a694..59bb3aa7da 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -45,7 +45,7 @@ ) from zarr.tests.util import have_fsspec -_VERSIONS = v3_api_available and (2, 3) or (2,) +_VERSIONS = ((2, 3) if v3_api_available else (2, )) def _init_creation_kwargs(zarr_version): diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index 48d6aee4f5..b8ab118329 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -21,8 +21,8 @@ from zarr._storage.v3 import DirectoryStoreV3, KVStoreV3 from zarr.sync import ThreadSynchronizer -_VERSIONS = v3_api_available and (None, 2, 3) or (None, 2) -_VERSIONS2 = v3_api_available and (2, 3) or (2,) +_VERSIONS = ((None, 2, 3) if v3_api_available else (None, 2)) +_VERSIONS2 = ((2, 3) if v3_api_available else (2, )) # something bcolz-like @@ -430,7 +430,7 @@ def test_empty_like(zarr_version): z = empty(100, chunks=10, dtype='f4', compressor=Zlib(5), order='F', **kwargs) # zarr_version will be inferred from z, but have to specify a path in v3 - z2 = empty_like(z, path=kwargs.get('path', None)) + z2 = empty_like(z, path=kwargs.get('path')) assert z.shape == z2.shape assert z.chunks == z2.chunks assert z.dtype == z2.dtype @@ -479,7 +479,7 @@ def test_zeros_like(zarr_version): # zarr array z = zeros(100, chunks=10, dtype='f4', compressor=Zlib(5), order='F', **kwargs) - z2 = zeros_like(z, path=kwargs.get('path', None)) + z2 = zeros_like(z, path=kwargs.get('path')) assert z.shape == z2.shape assert z.chunks == z2.chunks assert z.dtype == z2.dtype @@ -506,7 +506,7 @@ def test_ones_like(zarr_version): # zarr array z = ones(100, chunks=10, dtype='f4', compressor=Zlib(5), order='F', **kwargs) - z2 = ones_like(z, path=kwargs.get('path', None)) + z2 = ones_like(z, path=kwargs.get('path')) assert z.shape == z2.shape assert z.chunks == z2.chunks assert z.dtype == z2.dtype @@ -533,7 +533,7 @@ def test_full_like(zarr_version): z = full(100, chunks=10, dtype='f4', compressor=Zlib(5), fill_value=42, order='F', **kwargs) - z2 = full_like(z, path=kwargs.get('path', None)) + z2 = full_like(z, path=kwargs.get('path')) assert z.shape == z2.shape assert z.chunks == z2.chunks assert z.dtype == z2.dtype diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index 7c2eaa3f75..61c40e9f75 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -35,7 +35,7 @@ from zarr.tests.util import skip_test_env_var, have_fsspec, abs_container -_VERSIONS = v3_api_available and (2, 3) or (2,) +_VERSIONS = ((2, 3) if v3_api_available else (2, )) # noinspection PyStatementEffect diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index a3e227ec86..b938115e39 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -2400,7 +2400,7 @@ def test_iterators_with_prefix(self): assert 4 == len(store) keys = [prefix + 'a', prefix + 'b', prefix + 'c/d', prefix + 'c/e/f'] values = [b'aaa', b'bbb', b'ddd', b'fff'] - items = [(k, v) for k, v in zip(keys, values)] + items = list(zip(keys, values)) assert set(keys) == set(store) assert set(keys) == set(store.keys()) assert set(values) == set(store.values()) From dcc6ded4cb9b9fcc24e69b5fd5d898e97e0630dd Mon Sep 17 00:00:00 2001 From: Saransh Date: Thu, 21 Jul 2022 18:15:24 +0530 Subject: [PATCH 0153/1078] Add `YAML` files for issue tracker (#1079) * Add a `YAML` file for the bug report template * Add gitter and discussion tab reference * Minor bug fix, ironical * Update `release.rst` * Update .github/ISSUE_TEMPLATE/config.yml Co-authored-by: jakirkham * Update `config.yml` with more links * Make texts uniform and add unicode characters Co-authored-by: jakirkham --- .github/ISSUE_TEMPLATE.md | 28 ----------- .github/ISSUE_TEMPLATE/bug_report.yml | 67 +++++++++++++++++++++++++++ .github/ISSUE_TEMPLATE/config.yml | 11 +++++ docs/release.rst | 4 ++ 4 files changed, 82 insertions(+), 28 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE.md create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml create mode 100644 .github/ISSUE_TEMPLATE/config.yml diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index 9f8b2348cd..0000000000 --- a/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,28 +0,0 @@ -For bug reports, please follow the template below. For enhancement proposals, feel free -to use whatever template makes sense (major new features should be discussed in the -Zarr specifications repository https://github.com/zarr-developers/zarr-specs). - -#### Minimal, reproducible code sample, a copy-pastable example if possible - -```python -# Your code here - -``` - -#### Problem description - -Explain why the current behavior is a problem, what the expected output/behaviour -is, and why the expected output/behaviour is a better solution. - -#### Version and installation information - -Please provide the following: - -* Value of ``zarr.__version__`` -* Value of ``numcodecs.__version__`` -* Version of Python interpreter -* Operating system (Linux/Windows/Mac) -* How Zarr was installed (e.g., "using pip into virtual environment", or "using conda") - -Also, if you think it might be relevant, please provide the output from ``pip freeze`` or -``conda env export`` depending on which was used to install Zarr. diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 0000000000..ba05f23fcc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,67 @@ +name: 🐛 File a bug report +description: X's behavior is deviating from its documented behavior. +labels: ["bug"] +body: + - type: markdown + attributes: + value: | + Please provide the following information. + - type: input + id: Zarr-version + attributes: + label: Zarr version + description: Value of ``zarr.__version__`` + placeholder: v2.10.2, v2.11.3, v2.12.0, etc. + validations: + required: true + - type: input + id: Numcodecs-version + attributes: + label: Numcodecs version + description: Value of ``numcodecs.__version__`` + placeholder: v0.8.1, v0.9.0, v0.10.0, etc. + validations: + required: true + - type: input + id: Python-version + attributes: + label: Python Version + description: Version of Python interpreter + placeholder: 3.8.5, 3.9, 3.10, etc. + validations: + required: true + - type: input + id: OS + attributes: + label: Operating System + description: Operating System + placeholder: (Linux/Windows/Mac) + validations: + required: true + - type: input + id: installation + attributes: + label: Installation + description: How was Zarr installed? + placeholder: e.g., "using pip into virtual environment", or "using conda" + validations: + required: true + - type: textarea + id: description + attributes: + label: Description + description: Explain why the current behavior is a problem, what the expected output/behaviour is, and why the expected output/behaviour is a better solution. + validations: + required: true + - type: textarea + id: reproduce + attributes: + label: Steps to reproduce + description: Minimal, reproducible code sample, a copy-pastable example if possible. + validations: + required: true + - type: textarea + id: additional-output + attributes: + label: Additional output + description: If you think it might be relevant, please provide the output from ``pip freeze`` or ``conda env export`` depending on which was used to install Zarr. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000..3ce1587389 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,11 @@ +blank_issues_enabled: true +contact_links: + - name: ✨ Propose a new major feature + url: https://github.com/zarr-developers/zarr-specs + about: A new major feature should be discussed in the Zarr specifications repository. + - name: ❓ Discuss something on gitter + url: https://gitter.im/zarr-developers/community + about: For questions like "How do I do X with Zarr?", you can move to our Gitter channel. + - name: ❓ Discuss something on GitHub Discussions + url: https://github.com/zarr-developers/zarr-python/discussions + about: For questions like "How do I do X with Zarr?", you can move to GitHub Discussions. diff --git a/docs/release.rst b/docs/release.rst index b729f20ee0..77243ff394 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -25,6 +25,10 @@ Maintenance * Fix spelling. By :user:`Dimitri Papadopoulos Orfanos `, :issue:`1073`. +* Update GitHub issue templates with `YAML` format. + By :user:`Saransh Chopra ` :issue:`1079`. + + .. _release_2.12.0: 2.12.0 From 0306326a6c41f5e81167880ad9b2077323ab97f1 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Sat, 30 Jul 2022 15:10:54 -0400 Subject: [PATCH 0154/1078] pin werkzeug to <2.2.0 (#1098) --- requirements_dev_optional.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 9f3a8fddfb..eb8a04f860 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -22,3 +22,4 @@ h5py==3.7.0 fsspec==2022.5.0 s3fs==2022.5.0 moto[server]>=1.3.14 +werkzeug<2.2.0 \ No newline at end of file From 7548e756e8e96894016d66c2a329268991929004 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 31 Jul 2022 03:14:11 +0200 Subject: [PATCH 0155/1078] Bump numcodecs from 0.10.0 to 0.10.1 (#1093) Bumps [numcodecs](https://github.com/zarr-developers/numcodecs) from 0.10.0 to 0.10.1. - [Release notes](https://github.com/zarr-developers/numcodecs/releases) - [Changelog](https://github.com/zarr-developers/numcodecs/blob/main/docs/release.rst) - [Commits](https://github.com/zarr-developers/numcodecs/compare/v0.10.0...v0.10.1) --- updated-dependencies: - dependency-name: numcodecs dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 40098a3159..9168ed4cd9 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,7 +1,7 @@ # library requirements asciitree==0.3.3 fasteners==0.17.3 -numcodecs==0.10.0 +numcodecs==0.10.1 msgpack-python==0.5.6 setuptools-scm==7.0.5 # test requirements From 517739dbe1d238bad27f9759dc8f8e0f7b831df8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 31 Jul 2022 03:14:26 +0200 Subject: [PATCH 0156/1078] Bump pymongo from 4.1.1 to 4.2.0 (#1091) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.1.1 to 4.2.0. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.1.1...4.2.0) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index eb8a04f860..09d0a68151 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -10,7 +10,7 @@ azure-storage-blob==12.13.0 # pyup: ignore redis==4.3.4 types-redis types-setuptools -pymongo==4.1.1 +pymongo==4.2.0 # optional test requirements tox==3.25.1 coverage From fa68802a37a580b380288fd5840760e27ca33c3d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 31 Jul 2022 05:52:59 +0200 Subject: [PATCH 0157/1078] Bump fsspec from 2022.5.0 to 2022.7.0 (#1095) * Bump fsspec from 2022.5.0 to 2022.7.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.5.0 to 2022.7.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.5.0...2022.7.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump s3fs as well Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 09d0a68151..9025cf985f 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,7 +19,7 @@ pytest-cov==3.0.0 pytest-doctestplus==0.12.0 pytest-timeout==2.1.0 h5py==3.7.0 -fsspec==2022.5.0 -s3fs==2022.5.0 +fsspec==2022.7.0 +s3fs==2022.7.0 moto[server]>=1.3.14 werkzeug<2.2.0 \ No newline at end of file From 2803a59d71ec4789a895ef74a57649ad66d7f0ab Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Thu, 4 Aug 2022 15:07:43 -0400 Subject: [PATCH 0158/1078] N5 root array fix (#1094) * put n5 attribute in top-level metadata for arrays * flake8 * normalize temp var name * remove n5 keyword from results of __getitem__ * partial fix for hexdigest tests * use a different strategy for n5 attribute management, and general cleanups * remove test exclusion and cast of partial chunk handling to bytes * add n5-specific tests file * add an additional codec to test_compressors * expand n5 test coverage * pin werkzeug to get CI working * appease the typechecker by introducing a new var in compressor_config_to_n5 * release notes * remove tobytes and improve n5 chunk wrapper test * Update docs/release.rst Co-authored-by: jakirkham Co-authored-by: Josh Moore --- docs/release.rst | 8 + zarr/n5.py | 338 ++++++++++++++++++++----------------- zarr/tests/test_core.py | 2 +- zarr/tests/test_n5.py | 37 ++++ zarr/tests/test_storage.py | 54 +++++- 5 files changed, 282 insertions(+), 157 deletions(-) create mode 100644 zarr/tests/test_n5.py diff --git a/docs/release.rst b/docs/release.rst index 77243ff394..20c24fcea1 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -29,6 +29,14 @@ Maintenance By :user:`Saransh Chopra ` :issue:`1079`. +Bug fixes +~~~~~~~~~ + +* Fix bug in N5 storage that prevented arrays located in the root of the hierarchy from + bearing the `n5` keyword. Along with fixing this bug, new tests were added for N5 routines + that had previously been excluded from testing, and type annotations were added to the N5 codebase. + By :user:`Davis Bennett `; :issue:`1092`. + .. _release_2.12.0: 2.12.0 diff --git a/zarr/n5.py b/zarr/n5.py index 2f98c2f963..978cade1b8 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -3,6 +3,7 @@ import os import struct import sys +from typing import Any, Dict, Optional, cast import warnings import numpy as np @@ -17,6 +18,8 @@ from .storage import attrs_key as zarr_attrs_key from .storage import group_meta_key as zarr_group_meta_key +N5_FORMAT = '2.0.0' + zarr_to_n5_keys = [ ('chunks', 'blockSize'), ('dtype', 'dataType'), @@ -67,70 +70,70 @@ class N5Store(NestedDirectoryStore): """ - def __getitem__(self, key): - + def __getitem__(self, key: str) -> bytes: if key.endswith(zarr_group_meta_key): - key = key.replace(zarr_group_meta_key, n5_attrs_key) - value = group_metadata_to_zarr(self._load_n5_attrs(key)) + key_new = key.replace(zarr_group_meta_key, n5_attrs_key) + value = group_metadata_to_zarr(self._load_n5_attrs(key_new)) return json_dumps(value) elif key.endswith(zarr_array_meta_key): - key = key.replace(zarr_array_meta_key, n5_attrs_key) - value = array_metadata_to_zarr(self._load_n5_attrs(key)) - + key_new = key.replace(zarr_array_meta_key, n5_attrs_key) + top_level = key == zarr_array_meta_key + value = array_metadata_to_zarr(self._load_n5_attrs(key_new), top_level=top_level) return json_dumps(value) elif key.endswith(zarr_attrs_key): - key = key.replace(zarr_attrs_key, n5_attrs_key) - value = attrs_to_zarr(self._load_n5_attrs(key)) + key_new = key.replace(zarr_attrs_key, n5_attrs_key) + value = attrs_to_zarr(self._load_n5_attrs(key_new)) if len(value) == 0: - raise KeyError(key) + raise KeyError(key_new) else: return json_dumps(value) elif is_chunk_key(key): + key_new = invert_chunk_coords(key) - key = invert_chunk_coords(key) + else: + key_new = key - return super().__getitem__(key) + return super().__getitem__(key_new) - def __setitem__(self, key, value): + def __setitem__(self, key: str, value: Any): if key.endswith(zarr_group_meta_key): - key = key.replace(zarr_group_meta_key, n5_attrs_key) + key_new = key.replace(zarr_group_meta_key, n5_attrs_key) - n5_attrs = self._load_n5_attrs(key) + n5_attrs = self._load_n5_attrs(key_new) n5_attrs.update(**group_metadata_to_n5(json_loads(value))) value = json_dumps(n5_attrs) elif key.endswith(zarr_array_meta_key): - key = key.replace(zarr_array_meta_key, n5_attrs_key) - - n5_attrs = self._load_n5_attrs(key) - n5_attrs.update(**array_metadata_to_n5(json_loads(value))) - + key_new = key.replace(zarr_array_meta_key, n5_attrs_key) + top_level = key == zarr_array_meta_key + n5_attrs = self._load_n5_attrs(key_new) + n5_attrs.update(**array_metadata_to_n5(json_loads(value), top_level=top_level)) value = json_dumps(n5_attrs) elif key.endswith(zarr_attrs_key): - key = key.replace(zarr_attrs_key, n5_attrs_key) + key_new = key.replace(zarr_attrs_key, n5_attrs_key) - n5_attrs = self._load_n5_attrs(key) + n5_attrs = self._load_n5_attrs(key_new) zarr_attrs = json_loads(value) for k in n5_keywords: - if k in zarr_attrs.keys(): - warnings.warn("attribute %s is a reserved N5 keyword" % k, UserWarning) + if k in zarr_attrs: + warnings.warn(f"Attribute {k} is a reserved N5 keyword", UserWarning) - # replace previous user attributes + # remove previous user attributes for k in list(n5_attrs.keys()): if k not in n5_keywords: del n5_attrs[k] @@ -141,50 +144,55 @@ def __setitem__(self, key, value): value = json_dumps(n5_attrs) elif is_chunk_key(key): + key_new = invert_chunk_coords(key) - key = invert_chunk_coords(key) - - super().__setitem__(key, value) + else: + key_new = key - def __delitem__(self, key): + super().__setitem__(key_new, value) - if key.endswith(zarr_group_meta_key): # pragma: no cover - key = key.replace(zarr_group_meta_key, n5_attrs_key) - elif key.endswith(zarr_array_meta_key): # pragma: no cover - key = key.replace(zarr_array_meta_key, n5_attrs_key) - elif key.endswith(zarr_attrs_key): # pragma: no cover - key = key.replace(zarr_attrs_key, n5_attrs_key) + def __delitem__(self, key: str): + if key.endswith(zarr_group_meta_key): + key_new = key.replace(zarr_group_meta_key, n5_attrs_key) + elif key.endswith(zarr_array_meta_key): + key_new = key.replace(zarr_array_meta_key, n5_attrs_key) + elif key.endswith(zarr_attrs_key): + key_new = key.replace(zarr_attrs_key, n5_attrs_key) elif is_chunk_key(key): - key = invert_chunk_coords(key) + key_new = invert_chunk_coords(key) + else: + key_new = key - super().__delitem__(key) + super().__delitem__(key_new) def __contains__(self, key): if key.endswith(zarr_group_meta_key): - key = key.replace(zarr_group_meta_key, n5_attrs_key) - if key not in self: + key_new = key.replace(zarr_group_meta_key, n5_attrs_key) + if key_new not in self: return False # group if not a dataset (attributes do not contain 'dimensions') - return 'dimensions' not in self._load_n5_attrs(key) + return 'dimensions' not in self._load_n5_attrs(key_new) elif key.endswith(zarr_array_meta_key): - key = key.replace(zarr_array_meta_key, n5_attrs_key) + key_new = key.replace(zarr_array_meta_key, n5_attrs_key) # array if attributes contain 'dimensions' - return 'dimensions' in self._load_n5_attrs(key) + return 'dimensions' in self._load_n5_attrs(key_new) elif key.endswith(zarr_attrs_key): - key = key.replace(zarr_attrs_key, n5_attrs_key) - return self._contains_attrs(key) + key_new = key.replace(zarr_attrs_key, n5_attrs_key) + return self._contains_attrs(key_new) elif is_chunk_key(key): - key = invert_chunk_coords(key) + key_new = invert_chunk_coords(key) + else: + key_new = key - return super().__contains__(key) + return super().__contains__(key_new) def __eq__(self, other): return ( @@ -192,11 +200,11 @@ def __eq__(self, other): self.path == other.path ) - def listdir(self, path=None): + def listdir(self, path: Optional[str] = None): if path is not None: path = invert_chunk_coords(path) - + path = cast(str, path) # We can't use NestedDirectoryStore's listdir, as it requires # array_meta_key to be present in array directories, which this store # doesn't provide. @@ -233,7 +241,7 @@ def listdir(self, path=None): # replace n5 attribute file with respective zarr attribute files children.remove(n5_attrs_key) children.append(zarr_group_meta_key) - if self._contains_attrs(path): # pragma: no cover + if self._contains_attrs(path): children.append(zarr_attrs_key) return sorted(children) @@ -242,14 +250,14 @@ def listdir(self, path=None): return children - def _load_n5_attrs(self, path): + def _load_n5_attrs(self, path: str) -> Dict[str, Any]: try: s = super().__getitem__(path) return json_loads(s) except KeyError: return {} - def _is_group(self, path): + def _is_group(self, path: str): if path is None: attrs_key = n5_attrs_key @@ -259,7 +267,7 @@ def _is_group(self, path): n5_attrs = self._load_n5_attrs(attrs_key) return len(n5_attrs) > 0 and 'dimensions' not in n5_attrs - def _is_array(self, path): + def _is_array(self, path: str): if path is None: attrs_key = n5_attrs_key @@ -268,14 +276,14 @@ def _is_array(self, path): return 'dimensions' in self._load_n5_attrs(attrs_key) - def _contains_attrs(self, path): + def _contains_attrs(self, path: str): if path is None: attrs_key = n5_attrs_key else: if not path.endswith(n5_attrs_key): attrs_key = os.path.join(path, n5_attrs_key) - else: # pragma: no cover + else: attrs_key = path attrs = attrs_to_zarr(self._load_n5_attrs(attrs_key)) @@ -344,7 +352,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, dimension_separator=dimension_separator, **kwargs) @staticmethod - def _swap_separator(key): + def _swap_separator(key: str): segments = list(key.split('/')) if segments: last_segment = segments[-1] @@ -355,7 +363,7 @@ def _swap_separator(key): key = '/'.join(segments) return key - def _normalize_key(self, key): + def _normalize_key(self, key: str): if is_chunk_key(key): key = invert_chunk_coords(key) @@ -368,65 +376,68 @@ def _normalize_key(self, key): key = "/".join(bits + [end]) return key.lower() if self.normalize_keys else key - def __getitem__(self, key): + def __getitem__(self, key: str) -> bytes: if key.endswith(zarr_group_meta_key): - key = key.replace(zarr_group_meta_key, self._group_meta_key) - value = group_metadata_to_zarr(self._load_n5_attrs(key)) + key_new = key.replace(zarr_group_meta_key, self._group_meta_key) + value = group_metadata_to_zarr(self._load_n5_attrs(key_new)) return json_dumps(value) elif key.endswith(zarr_array_meta_key): - key = key.replace(zarr_array_meta_key, self._array_meta_key) - value = array_metadata_to_zarr(self._load_n5_attrs(key)) - + key_new = key.replace(zarr_array_meta_key, self._array_meta_key) + top_level = key == zarr_array_meta_key + value = array_metadata_to_zarr(self._load_n5_attrs(key_new), top_level=top_level) return json_dumps(value) elif key.endswith(zarr_attrs_key): - key = key.replace(zarr_attrs_key, self._attrs_key) - value = attrs_to_zarr(self._load_n5_attrs(key)) + key_new = key.replace(zarr_attrs_key, self._attrs_key) + value = attrs_to_zarr(self._load_n5_attrs(key_new)) if len(value) == 0: - raise KeyError(key) + raise KeyError(key_new) else: return json_dumps(value) elif is_chunk_key(key): - key = self._swap_separator(key) + key_new = self._swap_separator(key) - return super().__getitem__(key) + else: + key_new = key - def __setitem__(self, key, value): + return super().__getitem__(key_new) + + def __setitem__(self, key: str, value: Any): if key.endswith(zarr_group_meta_key): - key = key.replace(zarr_group_meta_key, self._group_meta_key) + key_new = key.replace(zarr_group_meta_key, self._group_meta_key) - n5_attrs = self._load_n5_attrs(key) + n5_attrs = self._load_n5_attrs(key_new) n5_attrs.update(**group_metadata_to_n5(json_loads(value))) value = json_dumps(n5_attrs) elif key.endswith(zarr_array_meta_key): - key = key.replace(zarr_array_meta_key, self._array_meta_key) - - n5_attrs = self._load_n5_attrs(key) - n5_attrs.update(**array_metadata_to_n5(json_loads(value))) + key_new = key.replace(zarr_array_meta_key, self._array_meta_key) + top_level = key == zarr_array_meta_key + n5_attrs = self._load_n5_attrs(key_new) + n5_attrs.update(**array_metadata_to_n5(json_loads(value), top_level=top_level)) value = json_dumps(n5_attrs) elif key.endswith(zarr_attrs_key): - key = key.replace(zarr_attrs_key, self._attrs_key) + key_new = key.replace(zarr_attrs_key, self._attrs_key) - n5_attrs = self._load_n5_attrs(key) + n5_attrs = self._load_n5_attrs(key_new) zarr_attrs = json_loads(value) for k in n5_keywords: if k in zarr_attrs.keys(): - warnings.warn("attribute %s is a reserved N5 keyword" % k, UserWarning) + warnings.warn(f"Attribute {k} is a reserved N5 keyword", UserWarning) # replace previous user attributes for k in list(n5_attrs.keys()): @@ -439,52 +450,58 @@ def __setitem__(self, key, value): value = json_dumps(n5_attrs) elif is_chunk_key(key): - key = self._swap_separator(key) + key_new = self._swap_separator(key) - super().__setitem__(key, value) + else: + key_new = key - def __delitem__(self, key): + super().__setitem__(key_new, value) - if key.endswith(zarr_group_meta_key): # pragma: no cover - key = key.replace(zarr_group_meta_key, self._group_meta_key) - elif key.endswith(zarr_array_meta_key): # pragma: no cover - key = key.replace(zarr_array_meta_key, self._array_meta_key) - elif key.endswith(zarr_attrs_key): # pragma: no cover - key = key.replace(zarr_attrs_key, self._attrs_key) - elif is_chunk_key(key): - key = self._swap_separator(key) + def __delitem__(self, key: str): - super().__delitem__(key) + if key.endswith(zarr_group_meta_key): + key_new = key.replace(zarr_group_meta_key, self._group_meta_key) + elif key.endswith(zarr_array_meta_key): + key_new = key.replace(zarr_array_meta_key, self._array_meta_key) + elif key.endswith(zarr_attrs_key): + key_new = key.replace(zarr_attrs_key, self._attrs_key) + elif is_chunk_key(key): + key_new = self._swap_separator(key) + else: + key_new = key + super().__delitem__(key_new) - def __contains__(self, key): + def __contains__(self, key: Any): if key.endswith(zarr_group_meta_key): - key = key.replace(zarr_group_meta_key, self._group_meta_key) - if key not in self: + key_new = key.replace(zarr_group_meta_key, self._group_meta_key) + if key_new not in self: return False # group if not a dataset (attributes do not contain 'dimensions') - return "dimensions" not in self._load_n5_attrs(key) + return "dimensions" not in self._load_n5_attrs(key_new) elif key.endswith(zarr_array_meta_key): - key = key.replace(zarr_array_meta_key, self._array_meta_key) + key_new = key.replace(zarr_array_meta_key, self._array_meta_key) # array if attributes contain 'dimensions' - return "dimensions" in self._load_n5_attrs(key) + return "dimensions" in self._load_n5_attrs(key_new) elif key.endswith(zarr_attrs_key): - key = key.replace(zarr_attrs_key, self._attrs_key) - return self._contains_attrs(key) + key_new = key.replace(zarr_attrs_key, self._attrs_key) + return self._contains_attrs(key_new) elif is_chunk_key(key): - key = self._swap_separator(key) + key_new = self._swap_separator(key) - return super().__contains__(key) + else: + key_new = key + return super().__contains__(key_new) - def __eq__(self, other): + def __eq__(self, other: Any): return isinstance(other, N5FSStore) and self.path == other.path - def listdir(self, path=None): + def listdir(self, path: Optional[str] = None): if path is not None: path = invert_chunk_coords(path) @@ -521,20 +538,20 @@ def listdir(self, path=None): # replace n5 attribute file with respective zarr attribute files children.remove(self._group_meta_key) children.append(zarr_group_meta_key) - if self._contains_attrs(path): # pragma: no cover + if self._contains_attrs(path): children.append(zarr_attrs_key) return sorted(children) else: return children - def _load_n5_attrs(self, path): + def _load_n5_attrs(self, path: str): try: s = super().__getitem__(path) return json_loads(s) except KeyError: return {} - def _is_group(self, path): + def _is_group(self, path: Optional[str]): if path is None: attrs_key = self._attrs_key @@ -544,7 +561,7 @@ def _is_group(self, path): n5_attrs = self._load_n5_attrs(attrs_key) return len(n5_attrs) > 0 and "dimensions" not in n5_attrs - def _is_array(self, path): + def _is_array(self, path: Optional[str]): if path is None: attrs_key = self._attrs_key @@ -553,30 +570,30 @@ def _is_array(self, path): return "dimensions" in self._load_n5_attrs(attrs_key) - def _contains_attrs(self, path): + def _contains_attrs(self, path: Optional[str]): if path is None: attrs_key = self._attrs_key else: if not path.endswith(self._attrs_key): attrs_key = os.path.join(path, self._attrs_key) - else: # pragma: no cover + else: attrs_key = path attrs = attrs_to_zarr(self._load_n5_attrs(attrs_key)) return len(attrs) > 0 -def is_chunk_key(key): +def is_chunk_key(key: str): rv = False segments = list(key.split('/')) if segments: last_segment = segments[-1] - rv = _prog_ckey.match(last_segment) + rv = bool(_prog_ckey.match(last_segment)) return rv -def invert_chunk_coords(key): +def invert_chunk_coords(key: str): segments = list(key.split('/')) if segments: last_segment = segments[-1] @@ -588,15 +605,15 @@ def invert_chunk_coords(key): return key -def group_metadata_to_n5(group_metadata): +def group_metadata_to_n5(group_metadata: Dict[str, Any]) -> Dict[str, Any]: '''Convert group metadata from zarr to N5 format.''' del group_metadata['zarr_format'] # TODO: This should only exist at the top-level - group_metadata['n5'] = '2.0.0' + group_metadata['n5'] = N5_FORMAT return group_metadata -def group_metadata_to_zarr(group_metadata): +def group_metadata_to_zarr(group_metadata: Dict[str, Any]) -> Dict[str, Any]: '''Convert group metadata from N5 to zarr format.''' # This only exists at the top level group_metadata.pop('n5', None) @@ -604,19 +621,20 @@ def group_metadata_to_zarr(group_metadata): return group_metadata -def array_metadata_to_n5(array_metadata): - '''Convert array metadata from zarr to N5 format.''' +def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dict[str, Any]: + '''Convert array metadata from zarr to N5 format. If the `top_level` keyword argument is True, + then the `N5` : N5_FORMAT key : value pair will be inserted into the metadata.''' for f, t in zarr_to_n5_keys: - array_metadata[t] = array_metadata[f] - del array_metadata[f] + array_metadata[t] = array_metadata.pop(f) del array_metadata['zarr_format'] - + if top_level: + array_metadata['n5'] = N5_FORMAT try: dtype = np.dtype(array_metadata['dataType']) - except TypeError: # pragma: no cover + except TypeError: raise TypeError( - "data type %s not supported by N5" % array_metadata['dataType']) + f"Data type {array_metadata['dataType']} is not supported by N5") array_metadata['dataType'] = dtype.name array_metadata['dimensions'] = array_metadata['dimensions'][::-1] @@ -624,17 +642,24 @@ def array_metadata_to_n5(array_metadata): if 'fill_value' in array_metadata: if array_metadata['fill_value'] != 0 and array_metadata['fill_value'] is not None: - raise ValueError("N5 only supports fill_value == 0 (for now)") + raise ValueError( + f'''Received fill_value = {array_metadata['fill_value']}, + but N5 only supports fill_value = 0''' + ) del array_metadata['fill_value'] if 'order' in array_metadata: if array_metadata['order'] != 'C': - raise ValueError("zarr N5 storage only stores arrays in C order (for now)") + raise ValueError( + f"Received order = {array_metadata['order']}, but N5 only supports order = C" + ) del array_metadata['order'] if 'filters' in array_metadata: if array_metadata['filters'] != [] and array_metadata['filters'] is not None: - raise ValueError("N5 storage does not support zarr filters") + raise ValueError( + "Received filters, but N5 storage does not support zarr filters" + ) del array_metadata['filters'] assert 'compression' in array_metadata @@ -648,11 +673,14 @@ def array_metadata_to_n5(array_metadata): return array_metadata -def array_metadata_to_zarr(array_metadata): - '''Convert array metadata from N5 to zarr format.''' +def array_metadata_to_zarr(array_metadata: Dict[str, Any], + top_level: bool = False) -> Dict[str, Any]: + '''Convert array metadata from N5 to zarr format. + If the `top_level` keyword argument is True, then the `N5` key will be removed from metadata''' for t, f in zarr_to_n5_keys: - array_metadata[t] = array_metadata[f] - del array_metadata[f] + array_metadata[t] = array_metadata.pop(f) + if top_level: + array_metadata.pop('n5') array_metadata['zarr_format'] = ZARR_FORMAT array_metadata['shape'] = array_metadata['shape'][::-1] @@ -674,7 +702,7 @@ def array_metadata_to_zarr(array_metadata): return array_metadata -def attrs_to_zarr(attrs): +def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]: '''Get all zarr attributes from an N5 attributes dictionary (i.e., all non-keyword attributes).''' @@ -686,22 +714,24 @@ def attrs_to_zarr(attrs): return attrs -def compressor_config_to_n5(compressor_config): +def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict[str, Any]: if compressor_config is None: return {'type': 'raw'} + else: + _compressor_config = compressor_config # peel wrapper, if present - if compressor_config['id'] == N5ChunkWrapper.codec_id: - compressor_config = compressor_config['compressor_config'] + if _compressor_config['id'] == N5ChunkWrapper.codec_id: + _compressor_config = _compressor_config['compressor_config'] - codec_id = compressor_config['id'] + codec_id = _compressor_config['id'] n5_config = {'type': codec_id} if codec_id == 'bz2': n5_config['type'] = 'bzip2' - n5_config['blockSize'] = compressor_config['level'] + n5_config['blockSize'] = _compressor_config['level'] elif codec_id == 'blosc': @@ -711,16 +741,16 @@ def compressor_config_to_n5(compressor_config): RuntimeWarning ) - n5_config['cname'] = compressor_config['cname'] - n5_config['clevel'] = compressor_config['clevel'] - n5_config['shuffle'] = compressor_config['shuffle'] - n5_config['blocksize'] = compressor_config['blocksize'] + n5_config['cname'] = _compressor_config['cname'] + n5_config['clevel'] = _compressor_config['clevel'] + n5_config['shuffle'] = _compressor_config['shuffle'] + n5_config['blocksize'] = _compressor_config['blocksize'] elif codec_id == 'lzma': # Switch to XZ for N5 if we are using the default XZ format. # Note: 4 is the default, which is lzma.CHECK_CRC64. - if compressor_config['format'] == 1 and compressor_config['check'] in [-1, 4]: + if _compressor_config['format'] == 1 and _compressor_config['check'] in [-1, 4]: n5_config['type'] = 'xz' else: warnings.warn( @@ -728,36 +758,36 @@ def compressor_config_to_n5(compressor_config): "might not be able to open the dataset with another N5 library.", RuntimeWarning ) - n5_config['format'] = compressor_config['format'] - n5_config['check'] = compressor_config['check'] - n5_config['filters'] = compressor_config['filters'] + n5_config['format'] = _compressor_config['format'] + n5_config['check'] = _compressor_config['check'] + n5_config['filters'] = _compressor_config['filters'] # The default is lzma.PRESET_DEFAULT, which is 6. - if compressor_config['preset']: - n5_config['preset'] = compressor_config['preset'] + if _compressor_config['preset']: + n5_config['preset'] = _compressor_config['preset'] else: n5_config['preset'] = 6 elif codec_id == 'zlib': n5_config['type'] = 'gzip' - n5_config['level'] = compressor_config['level'] + n5_config['level'] = _compressor_config['level'] n5_config['useZlib'] = True - elif codec_id == 'gzip': # pragma: no cover + elif codec_id == 'gzip': n5_config['type'] = 'gzip' - n5_config['level'] = compressor_config['level'] + n5_config['level'] = _compressor_config['level'] n5_config['useZlib'] = False - else: # pragma: no cover + else: - n5_config.update({k: v for k, v in compressor_config.items() if k != 'type'}) + n5_config.update({k: v for k, v in _compressor_config.items() if k != 'type'}) return n5_config -def compressor_config_to_zarr(compressor_config): +def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dict[str, Any]]: codec_id = compressor_config['type'] zarr_config = {'id': codec_id} @@ -794,7 +824,7 @@ def compressor_config_to_zarr(compressor_config): if 'useZlib' in compressor_config and compressor_config['useZlib']: zarr_config['id'] = 'zlib' zarr_config['level'] = compressor_config['level'] - else: # pragma: no cover + else: zarr_config['id'] = 'gzip' zarr_config['level'] = compressor_config['level'] @@ -802,7 +832,7 @@ def compressor_config_to_zarr(compressor_config): return None - else: # pragma: no cover + else: zarr_config.update({k: v for k, v in compressor_config.items() if k != 'type'}) @@ -823,7 +853,7 @@ def __init__(self, dtype, chunk_shape, compressor_config=None, compressor=None): (self.dtype.byteorder == '=' and sys.byteorder == 'little') ) - if compressor: # pragma: no cover + if compressor: if compressor_config is not None: raise ValueError("Only one of compressor_config or compressor should be given.") compressor_config = compressor.get_config() @@ -856,7 +886,7 @@ def encode(self, chunk): else: return header + chunk.tobytes(order='A') - def decode(self, chunk, out=None): + def decode(self, chunk, out=None) -> bytes: len_header, chunk_shape = self._read_header(chunk) chunk = chunk[len_header:] @@ -889,7 +919,7 @@ def decode(self, chunk, out=None): chunk = self._from_big_endian(chunk) # read partial chunk - if chunk_shape != self.chunk_shape: # pragma: no cover + if chunk_shape != self.chunk_shape: chunk = np.frombuffer(chunk, dtype=self.dtype) chunk = chunk.reshape(chunk_shape) complete_chunk = np.zeros(self.chunk_shape, dtype=self.dtype) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index f5f043e6e3..7429cba8c7 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -1988,7 +1988,7 @@ def test_attrs_n5_keywords(self): def test_compressors(self): compressors = [ - None, BZ2(), Zlib(), GZip() + None, BZ2(), Zlib(), GZip(), MsgPack() ] if LZMA: compressors.append(LZMA()) diff --git a/zarr/tests/test_n5.py b/zarr/tests/test_n5.py new file mode 100644 index 0000000000..a1a0a83e36 --- /dev/null +++ b/zarr/tests/test_n5.py @@ -0,0 +1,37 @@ + +import pytest + +from zarr.n5 import N5ChunkWrapper +from numcodecs import GZip +import numpy as np +from typing import Tuple + + +def test_make_n5_chunk_wrapper(): + dtype = 'uint8' + chunk_shape = (10,) + codec = GZip() + # ValueError when specifying both compressor and compressor_config + with pytest.raises(ValueError): + N5ChunkWrapper(dtype, + chunk_shape=chunk_shape, + compressor_config=codec.get_config(), + compressor=codec) + + wrapper_a = N5ChunkWrapper(dtype, chunk_shape=chunk_shape, compressor_config=codec.get_config()) + wrapper_b = N5ChunkWrapper(dtype, chunk_shape=chunk_shape, compressor=codec) + assert wrapper_a == wrapper_b + + +@pytest.mark.parametrize('chunk_shape', ((2,), (4, 4), (8, 8, 8))) +def test_partial_chunk_decode(chunk_shape: Tuple[int, ...]): + # Test that the N5Chunk wrapper can handle fractional chunks that + # may be generated by other N5 implementations + dtype = 'uint8' + codec = GZip() + codec_wrapped = N5ChunkWrapper(dtype, chunk_shape=chunk_shape, compressor=codec) + subslices = tuple(slice(0, cs // 2) for cs in chunk_shape) + chunk = np.zeros(chunk_shape, dtype=dtype) + chunk[subslices] = 1 + subchunk = np.ascontiguousarray(chunk[subslices]) + assert np.array_equal(codec_wrapped.decode(codec_wrapped.encode(subchunk)), chunk) diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index b938115e39..2203b95fe8 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -24,7 +24,7 @@ from zarr.errors import ContainsArrayError, ContainsGroupError, MetadataError from zarr.hierarchy import group from zarr.meta import ZARR_FORMAT, decode_array_metadata -from zarr.n5 import N5Store, N5FSStore +from zarr.n5 import N5Store, N5FSStore, N5_FORMAT, n5_attrs_key from zarr.storage import (ABSStore, ConsolidatedMetadataStore, DBMStore, DictStore, DirectoryStore, KVStore, LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, @@ -37,6 +37,7 @@ from zarr.storage import FSStore, rename, listdir from zarr._storage.v3 import KVStoreV3 from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container +from zarr.util import json_dumps @contextmanager @@ -1477,6 +1478,13 @@ def test_equal(self): store_b = N5Store(store_a.path) assert store_a == store_b + @pytest.mark.parametrize('zarr_meta_key', ['.zarray', '.zattrs', '.zgroup']) + def test_del_zarr_meta_key(self, zarr_meta_key): + store = self.create_store() + store[n5_attrs_key] = json_dumps({'foo': 'bar'}) + del store[zarr_meta_key] + assert n5_attrs_key not in store + def test_chunk_nesting(self): store = self.create_store() store['0.0'] = b'xxx' @@ -1488,6 +1496,8 @@ def test_chunk_nesting(self): assert b'yyy' == store['foo/10.20.30'] # N5 reverses axis order assert b'yyy' == store['foo/30/20/10'] + del store['foo/10.20.30'] + assert 'foo/30/20/10' not in store store['42'] = b'zzz' assert '42' in store assert b'zzz' == store['42'] @@ -1509,6 +1519,10 @@ def test_init_array(self): # N5Store always has a fill value of 0 assert meta['fill_value'] == 0 assert meta['dimension_separator'] == '.' + # Top-level groups AND arrays should have + # the n5 keyword in metadata + raw_n5_meta = json.loads(store[n5_attrs_key]) + assert raw_n5_meta.get('n5', None) == N5_FORMAT def test_init_array_path(self): path = 'foo/bar' @@ -1558,7 +1572,7 @@ def test_init_group_overwrite_chunk_store(self): def test_init_group(self): store = self.create_store() init_group(store) - + store['.zattrs'] = json_dumps({'foo': 'bar'}) # check metadata assert group_meta_key in store assert group_meta_key in store.listdir() @@ -1597,6 +1611,14 @@ def test_equal(self): # This is copied wholesale from the N5Store tests. The same test could # be run by making TestN5FSStore inherit from both TestFSStore and # TestN5Store, but a direct copy is arguably more explicit. + + @pytest.mark.parametrize('zarr_meta_key', ['.zarray', '.zattrs', '.zgroup']) + def test_del_zarr_meta_key(self, zarr_meta_key): + store = self.create_store() + store[n5_attrs_key] = json_dumps({'foo': 'bar'}) + del store[zarr_meta_key] + assert n5_attrs_key not in store + def test_chunk_nesting(self): store = self.create_store() store['0.0'] = b'xxx' @@ -1608,6 +1630,8 @@ def test_chunk_nesting(self): assert b'yyy' == store['foo/10.20.30'] # N5 reverses axis order assert b'yyy' == store['foo/30/20/10'] + del store['foo/10.20.30'] + assert 'foo/30/20/10' not in store store['42'] = b'zzz' assert '42' in store assert b'zzz' == store['42'] @@ -1629,6 +1653,10 @@ def test_init_array(self): # N5Store always has a fill value of 0 assert meta['fill_value'] == 0 assert meta['dimension_separator'] == '.' + # Top-level groups AND arrays should have + # the n5 keyword in metadata + raw_n5_meta = json.loads(store[n5_attrs_key]) + assert raw_n5_meta.get('n5', None) == N5_FORMAT def test_init_array_path(self): path = 'foo/bar' @@ -1680,6 +1708,28 @@ def test_dimension_separator(self): with pytest.warns(UserWarning, match='dimension_separator'): self.create_store(dimension_separator='/') + def test_init_group(self): + store = self.create_store() + init_group(store) + store['.zattrs'] = json_dumps({'foo': 'bar'}) + # check metadata + assert group_meta_key in store + assert group_meta_key in store.listdir() + assert group_meta_key in store.listdir('') + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) + assert ZARR_FORMAT == meta['zarr_format'] + + def test_filters(self): + all_filters, all_errors = zip(*[ + (None, does_not_raise()), + ([], does_not_raise()), + ([AsType('f4', 'f8')], pytest.raises(ValueError)), + ]) + for filters, error in zip(all_filters, all_errors): + store = self.create_store() + with error: + init_array(store, shape=1000, chunks=100, filters=filters) + @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestNestedFSStore(TestNestedDirectoryStore): From 86e64642a033ee328a7c53dc6e206c920b146467 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Aug 2022 21:09:12 +0200 Subject: [PATCH 0159/1078] Bump flake8 from 4.0.1 to 5.0.4 (#1107) Bumps [flake8](https://github.com/pycqa/flake8) from 4.0.1 to 5.0.4. - [Release notes](https://github.com/pycqa/flake8/releases) - [Commits](https://github.com/pycqa/flake8/compare/4.0.1...5.0.4) --- updated-dependencies: - dependency-name: flake8 dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 9025cf985f..786510a12b 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -14,7 +14,7 @@ pymongo==4.2.0 # optional test requirements tox==3.25.1 coverage -flake8==4.0.1 +flake8==5.0.4 pytest-cov==3.0.0 pytest-doctestplus==0.12.0 pytest-timeout==2.1.0 From a56b02d6ebfab27985acd0d61a63d65e77095401 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Aug 2022 21:09:25 +0200 Subject: [PATCH 0160/1078] Bump numcodecs from 0.10.1 to 0.10.2 (#1103) Bumps [numcodecs](https://github.com/zarr-developers/numcodecs) from 0.10.1 to 0.10.2. - [Release notes](https://github.com/zarr-developers/numcodecs/releases) - [Changelog](https://github.com/zarr-developers/numcodecs/blob/main/docs/release.rst) - [Commits](https://github.com/zarr-developers/numcodecs/compare/v0.10.1...v0.10.2) --- updated-dependencies: - dependency-name: numcodecs dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 9168ed4cd9..6baa59cc2b 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,7 +1,7 @@ # library requirements asciitree==0.3.3 fasteners==0.17.3 -numcodecs==0.10.1 +numcodecs==0.10.2 msgpack-python==0.5.6 setuptools-scm==7.0.5 # test requirements From 8bd84fbe4ef2d948c4110b2d29482f0a3f478945 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Aug 2022 21:45:17 +0200 Subject: [PATCH 0161/1078] Bump fsspec from 2022.7.0 to 2022.7.1 (#1100) * Bump fsspec from 2022.7.0 to 2022.7.1 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.7.0 to 2022.7.1. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.7.0...2022.7.1) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump s3fs as well Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 786510a12b..542a988b28 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,7 +19,7 @@ pytest-cov==3.0.0 pytest-doctestplus==0.12.0 pytest-timeout==2.1.0 h5py==3.7.0 -fsspec==2022.7.0 -s3fs==2022.7.0 +fsspec==2022.7.1 +s3fs==2022.7.1 moto[server]>=1.3.14 -werkzeug<2.2.0 \ No newline at end of file +werkzeug<2.2.0 From 7bf7998056f99dd261291c1b01cec8d7fa000fbd Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 6 Aug 2022 02:58:48 +0200 Subject: [PATCH 0162/1078] Fix URLs (#1074) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix invalid extension documentation URL * Fix invalid numpydoc URL First I tried removing the spurious .txt file extension in HOWTO_DOCUMENT.rst.txt. Then the current contents of HOWTO_DOCUMENT.rst are: This document has been replaced, see https://numpydoc.readthedocs.io/en/latest/format.html#docstring-standard * The Python issue tracker has moved to GitHub * Fix PyMongo URL Prefer the link to the PyMongo documentation. * Fix URL to Blosc file Change the default Git branch: master ⤑ main * Fix tox URL It is not hosted by codespeak.net any more. * Fix URL: HTTP ⤑ HTTPS --- .github/CONTRIBUTING.md | 2 +- CODE_OF_CONDUCT.md | 6 +++--- build.cmd | 4 ++-- docs/Makefile | 2 +- docs/api/codecs.rst | 6 +++--- docs/contributing.rst | 2 +- docs/index.rst | 6 +++--- docs/release.rst | 2 +- docs/spec/v1.rst | 2 +- docs/spec/v2.rst | 2 +- docs/tutorial.rst | 12 ++++++------ tox.ini | 2 +- zarr/meta.py | 2 +- zarr/storage.py | 6 +++--- zarr/sync.py | 2 +- 15 files changed, 29 insertions(+), 29 deletions(-) diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 5f574f4f59..d130e038e5 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -1,5 +1,5 @@ Contributing ============ -Please see the [project documentation](http://zarr.readthedocs.io/en/stable/contributing.html) for information about contributing to Zarr. +Please see the [project documentation](https://zarr.readthedocs.io/en/stable/contributing.html) for information about contributing to Zarr. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 93175dd661..f07035c69f 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -40,7 +40,7 @@ Project maintainers who do not follow or enforce the Code of Conduct in good fai ## Attribution -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [https://www.contributor-covenant.org/version/1/4][version] -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ +[homepage]: https://www.contributor-covenant.org +[version]: https://www.contributor-covenant.org/version/1/4 diff --git a/build.cmd b/build.cmd index 053894d249..4e402d5e21 100644 --- a/build.cmd +++ b/build.cmd @@ -13,10 +13,10 @@ :: :: More details at: :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows -:: http://stackoverflow.com/a/13751649/163740 +:: https://stackoverflow.com/a/13751649/163740 :: :: Author: Olivier Grisel -:: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ +:: License: CC0 1.0 Universal: https://creativecommons.org/publicdomain/zero/1.0/ @ECHO OFF SET COMMAND_TO_RUN=%* diff --git a/docs/Makefile b/docs/Makefile index fe6a0bc4e9..f279d820c6 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -9,7 +9,7 @@ BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) - $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/) + $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from https://www.sphinx-doc.org/) endif # Internal variables. diff --git a/docs/api/codecs.rst b/docs/api/codecs.rst index 2e176bb7ea..b50f747d74 100644 --- a/docs/api/codecs.rst +++ b/docs/api/codecs.rst @@ -16,8 +16,8 @@ Codec classes can also be used as filters. See the tutorial section on :ref:`tut for more information. Please note that it is also relatively straightforward to define and register custom codec -classes. See the Numcodecs `codec API `_ and -`codec registry `_ documentation for more +classes. See the Numcodecs `codec API `_ and +`codec registry `_ documentation for more information. -.. _Numcodecs: http://numcodecs.readthedocs.io/ +.. _Numcodecs: https://numcodecs.readthedocs.io/ diff --git a/docs/contributing.rst b/docs/contributing.rst index f433f1bc03..2fbfd92a7a 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -206,7 +206,7 @@ Documentation Docstrings for user-facing classes and functions should follow the `numpydoc -`_ +`_ standard, including sections for Parameters and Examples. All examples should run and pass as doctests under Python 3.8. To run doctests, activate your development environment, install optional requirements, diff --git a/docs/index.rst b/docs/index.rst index 5343dea0ed..f4afc9fd85 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -93,10 +93,10 @@ documentation, code reviews, comments and/or ideas: * :user:`Prakhar Goel ` Zarr is inspired by `HDF5 `_, `h5py -`_ and `bcolz `_. +`_ and `bcolz `_. Development of Zarr is supported by the -`MRC Centre for Genomics and Global Health `_. +`MRC Centre for Genomics and Global Health `_. Indices and tables ------------------ @@ -105,4 +105,4 @@ Indices and tables * :ref:`modindex` * :ref:`search` -.. _NumCodecs: http://numcodecs.readthedocs.io/ +.. _NumCodecs: https://numcodecs.readthedocs.io/ diff --git a/docs/release.rst b/docs/release.rst index 20c24fcea1..3f1f3a8d30 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -1333,4 +1333,4 @@ See `v0.4.0 release notes on GitHub See `v0.3.0 release notes on GitHub `_. -.. _Numcodecs: http://numcodecs.readthedocs.io/ +.. _Numcodecs: https://numcodecs.readthedocs.io/ diff --git a/docs/spec/v1.rst b/docs/spec/v1.rst index 39fc6f30ce..d8598c88c4 100644 --- a/docs/spec/v1.rst +++ b/docs/spec/v1.rst @@ -133,7 +133,7 @@ of bytes comprising the compressed chunk data. No header is added to the compressed bytes or any other modification made. The internal structure of the compressed bytes will depend on which primary compressor was used. For example, the `Blosc compressor -`_ +`_ produces a sequence of bytes that begins with a 16-byte header followed by compressed data. diff --git a/docs/spec/v2.rst b/docs/spec/v2.rst index 4746f8f61e..f2c3d92b3e 100644 --- a/docs/spec/v2.rst +++ b/docs/spec/v2.rst @@ -206,7 +206,7 @@ through the primary compression library to obtain a new sequence of bytes comprising the compressed chunk data. No header is added to the compressed bytes or any other modification made. The internal structure of the compressed bytes will depend on which primary compressor was used. For example, the `Blosc -compressor `_ +compressor `_ produces a sequence of bytes that begins with a 16-byte header followed by compressed data. diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 37073fcb31..411ce0a163 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -275,7 +275,7 @@ Here is an example using a delta filter with the Blosc compressor:: Chunks initialized : 100/100 For more information about available filter codecs, see the `Numcodecs -`_ documentation. +`_ documentation. .. _tutorial_groups: @@ -293,7 +293,7 @@ To create a group, use the :func:`zarr.group` function:: Groups have a similar API to the Group class from `h5py -`_. For example, groups can contain other groups:: +`_. For example, groups can contain other groups:: >>> foo = root.create_group('foo') >>> bar = foo.create_group('bar') @@ -427,7 +427,7 @@ Groups also have the :func:`zarr.hierarchy.Group.tree` method, e.g.:: If you're using Zarr within a Jupyter notebook (requires `ipytree `_), calling ``tree()`` will generate an interactive tree representation, see the `repr_tree.ipynb notebook -`_ +`_ for more examples. .. _tutorial_attrs: @@ -735,7 +735,7 @@ database for storage (requires `bsddb3 Also added in Zarr version 2.2 is the :class:`zarr.storage.LMDBStore` class which enables the lightning memory-mapped database (LMDB) to be used for storing an array or -group (requires `lmdb `_ to be installed):: +group (requires `lmdb `_ to be installed):: >>> store = zarr.LMDBStore('data/example.lmdb') >>> root = zarr.group(store=store, overwrite=True) @@ -779,9 +779,9 @@ Distributed/cloud storage It is also possible to use distributed storage systems. The Dask project has implementations of the ``MutableMapping`` interface for Amazon S3 (`S3Map -`_), Hadoop +`_), Hadoop Distributed File System (`HDFSMap -`_) and +`_) and Google Cloud Storage (`GCSMap `_), which can be used with Zarr. diff --git a/tox.ini b/tox.ini index 4bd58796c9..12ad6bc8ad 100644 --- a/tox.ini +++ b/tox.ini @@ -1,4 +1,4 @@ -# Tox (http://tox.testrun.org/) is a tool for running tests +# Tox (https://tox.wiki/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. diff --git a/zarr/meta.py b/zarr/meta.py index c290e90163..77c55b9871 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -76,7 +76,7 @@ def get_extended_dtype_info(dtype) -> dict: ) elif dtype.str in _v3_datetime_types: return dict( - extension="https://zarr-specs.readthedocs.io/en/core-protocol-v3.0-dev/protocol/extensions/datetime-dtypes/v1.0.html", # noqa + extension="https://zarr-specs.readthedocs.io/en/latest/extensions/data-types/datetime/v1.0.html", # noqa type=dtype.str, fallback=None, ) diff --git a/zarr/storage.py b/zarr/storage.py index 440b41ea07..8c1536d786 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -2097,7 +2097,7 @@ def rmdir(self, path: str = "") -> None: class LMDBStore(Store): - """Storage class using LMDB. Requires the `lmdb `_ + """Storage class using LMDB. Requires the `lmdb `_ package to be installed. @@ -2503,7 +2503,7 @@ def __init__(self, path, dimension_separator=None, **kwargs): # allow threading if SQLite connections are thread-safe # # ref: https://www.sqlite.org/releaselog/3_3_1.html - # ref: https://bugs.python.org/issue27190 + # ref: https://github.com/python/cpython/issues/71377 check_same_thread = True if sqlite3.sqlite_version_info >= (3, 3, 1): check_same_thread = False @@ -2663,7 +2663,7 @@ class MongoDBStore(Store): .. note:: This is an experimental feature. - Requires the `pymongo `_ + Requires the `pymongo `_ package to be installed. Parameters diff --git a/zarr/sync.py b/zarr/sync.py index a0938b30d0..49684a51ee 100644 --- a/zarr/sync.py +++ b/zarr/sync.py @@ -26,7 +26,7 @@ def __setstate__(self, *args): class ProcessSynchronizer: """Provides synchronization using file locks via the - `fasteners `_ + `fasteners `_ package. Parameters From e3b6c7f6989930d62d4e4cffe180d3e05bff783a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 6 Aug 2022 03:17:46 +0200 Subject: [PATCH 0163/1078] Bump azure-storage-blob from 12.13.0 to 12.13.1 (#1108) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.13.0 to 12.13.1. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.13.0...azure-storage-blob_12.13.1) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 542a988b28..59ddad396b 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -6,7 +6,7 @@ ipytree==0.2.1 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.13.0 # pyup: ignore +azure-storage-blob==12.13.1 # pyup: ignore redis==4.3.4 types-redis types-setuptools From 8e7f4cc01764f50b54b4d90a70495bba16766c7d Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Fri, 5 Aug 2022 21:31:17 -0500 Subject: [PATCH 0164/1078] LRUStoreCache: reset current size on invalidation (#1081) * LRUStoreCache: reset current size on invalidation fixes #1076 * Add release log * Update docs/release.rst --- docs/release.rst | 4 +++- zarr/storage.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index 3f1f3a8d30..3d06c6c37a 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -28,7 +28,6 @@ Maintenance * Update GitHub issue templates with `YAML` format. By :user:`Saransh Chopra ` :issue:`1079`. - Bug fixes ~~~~~~~~~ @@ -37,6 +36,9 @@ Bug fixes that had previously been excluded from testing, and type annotations were added to the N5 codebase. By :user:`Davis Bennett `; :issue:`1092`. +* Fix bug in LRUEStoreCache in which the current size wasn't reset on invalidation. + By :user:`BGCMHou ` and :user:`Josh Moore ` :issue:`1076`, :issue:`1077`. + .. _release_2.12.0: 2.12.0 diff --git a/zarr/storage.py b/zarr/storage.py index 8c1536d786..eb5106078b 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -2398,6 +2398,7 @@ def invalidate(self): with self._mutex: self._values_cache.clear() self._invalidate_keys() + self._current_size = 0 def invalidate_values(self): """Clear the values cache.""" From f9fd8c799e07dd7d8bc5cf39ebf944d0f7c47d23 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Fri, 5 Aug 2022 21:15:30 -0500 Subject: [PATCH 0165/1078] Prepare 2.13.0a1 changelog --- docs/release.rst | 49 ++++++++++++++++++++++++++++++++++++------------ 1 file changed, 37 insertions(+), 12 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 3d06c6c37a..2582c34e04 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,10 +6,34 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. -.. _unreleased: + .. _unreleased: -Unreleased ----------- + Unreleased + ---------- + +.. _release_2.13.0: + +2.13.0 +------ +.. warning:: + Pre-release! Use `pip install --pre zarr` to evaluate this release. + +Major changes +~~~~~~~~~~~~~ + +* Remove support for Python 3.7 in concert with NumPy dependency. + By :user:`Davis Bennett `; :issue:`1067`. + +Bug fixes +~~~~~~~~~ + +* Fix bug in N5 storage that prevented arrays located in the root of the hierarchy from + bearing the `n5` keyword. Along with fixing this bug, new tests were added for N5 routines + that had previously been excluded from testing, and type annotations were added to the N5 codebase. + By :user:`Davis Bennett `; :issue:`1092`. + +* Fix bug in LRUEStoreCache in which the current size wasn't reset on invalidation. + By :user:`BGCMHou ` and :user:`Josh Moore ` :issue:`1076`, :issue:`1077`. Documentation ~~~~~~~~~~~~~ @@ -19,25 +43,26 @@ Documentation Maintenance ~~~~~~~~~~~ +* Pin werkzeug to prevent test hangs. + By :user:`Davis Bennett `; :issue:`1098`. + * Fix a few DeepSource.io alerts By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1080`. +* Fix URLs. + By :user:`Dimitri Papadopoulos Orfanos `, :issue:`1074`. + * Fix spelling. By :user:`Dimitri Papadopoulos Orfanos `, :issue:`1073`. * Update GitHub issue templates with `YAML` format. By :user:`Saransh Chopra ` :issue:`1079`. -Bug fixes -~~~~~~~~~ - -* Fix bug in N5 storage that prevented arrays located in the root of the hierarchy from - bearing the `n5` keyword. Along with fixing this bug, new tests were added for N5 routines - that had previously been excluded from testing, and type annotations were added to the N5 codebase. - By :user:`Davis Bennett `; :issue:`1092`. +* Remove option to return None from _ensure_store. + By :user:`Greggory Lee `; :issue:`1068`. -* Fix bug in LRUEStoreCache in which the current size wasn't reset on invalidation. - By :user:`BGCMHou ` and :user:`Josh Moore ` :issue:`1076`, :issue:`1077`. +* Fix a typo of "integers". + By :user:`Richard Scott `; :issue:`1056`. .. _release_2.12.0: From 44de0e4a017b8919bb5caba41eadcd67e18abdb9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Aug 2022 15:47:23 -0700 Subject: [PATCH 0166/1078] Bump numpy from 1.23.1 to 1.23.2 (#1109) Bumps [numpy](https://github.com/numpy/numpy) from 1.23.1 to 1.23.2. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.23.1...v1.23.2) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index c89e4f6896..12310a0eda 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.23.1 +numpy==1.23.2 From f6698f676c5f599de6c996754a6e4a58e1be3503 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 11:38:25 -0700 Subject: [PATCH 0167/1078] Bump fsspec from 2022.7.1 to 2022.8.2 (#1117) * Bump fsspec from 2022.7.1 to 2022.8.2 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.7.1 to 2022.8.2. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.7.1...2022.8.2) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump s3fs as well Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 59ddad396b..cc370e018c 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,7 +19,7 @@ pytest-cov==3.0.0 pytest-doctestplus==0.12.0 pytest-timeout==2.1.0 h5py==3.7.0 -fsspec==2022.7.1 -s3fs==2022.7.1 +fsspec==2022.8.2 +s3fs==2022.8.2 moto[server]>=1.3.14 werkzeug<2.2.0 From 50edad853235099e0a1011d13281049396442eda Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Sep 2022 08:25:14 +0200 Subject: [PATCH 0168/1078] Bump pytest from 7.1.2 to 7.1.3 (#1122) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.1.2 to 7.1.3. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.1.2...7.1.3) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 6baa59cc2b..31febeee24 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.10.2 msgpack-python==0.5.6 setuptools-scm==7.0.5 # test requirements -pytest==7.1.2 +pytest==7.1.3 From 1c464cb6a59cfa5452e5b8b3b0af304626c0100a Mon Sep 17 00:00:00 2001 From: Altay Sansal Date: Tue, 6 Sep 2022 15:10:57 -0500 Subject: [PATCH 0169/1078] Add copy button to documentation (#1124) * add sphinx_copybutton and config * update release notes * add `sphinx-copybutton` to rtfd reqs --- docs/conf.py | 7 +++++++ docs/release.rst | 1 + requirements_rtfd.txt | 1 + 3 files changed, 9 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index 2bbd3ffb6e..a8bfc467d9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -44,6 +44,7 @@ 'sphinx.ext.intersphinx', 'numpydoc', 'sphinx_issues', + "sphinx_copybutton", ] numpydoc_show_class_members = False @@ -312,3 +313,9 @@ def setup(app): # :ref:`comparison manual ` intersphinx_mapping = { 'python':('https://docs.python.org/', None), 'numpy': ('https://numpy.org/doc/stable/', None)} + + +# sphinx-copybutton configuration +copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " +copybutton_line_continuation_character = "\\" +copybutton_prompt_is_regexp = True diff --git a/docs/release.rst b/docs/release.rst index 2582c34e04..dabf266caf 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -39,6 +39,7 @@ Documentation ~~~~~~~~~~~~~ * Typo fixes to close quotes. By :user:`Pavithra Eswaramoorthy ` +* Added copy button to documentation :user:`Altay Sansal ` Maintenance ~~~~~~~~~~~ diff --git a/requirements_rtfd.txt b/requirements_rtfd.txt index 2cdb12377d..8452f7af77 100644 --- a/requirements_rtfd.txt +++ b/requirements_rtfd.txt @@ -3,6 +3,7 @@ setuptools setuptools_scm sphinx sphinx-issues +sphinx-copybutton sphinx-rtd-theme numpydoc numpy!=1.21.0 From ea7bb113fe96afebccb8e8e8b8cbdde7a6b8cef9 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Thu, 8 Sep 2022 08:48:29 +0200 Subject: [PATCH 0170/1078] Simplify release docs (#1119) As discussed with @rabernat and @jakirkham, in order to encourage more folks with push rights to release zarr-python, this change makes it clear that a release is possible completely from within GitHub. The previous instructions will continue to work but have been removed to avoid confusion. --- docs/contributing.rst | 29 +++++++++++++++-------------- docs/release.rst | 2 +- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 2fbfd92a7a..64e017d17f 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -335,22 +335,23 @@ Release procedure .. note:: Most of the release process is now handled by github workflow which should - automatically push a release to PyPI if a tag is pushed. + automatically push a release to PyPI if a tag is pushed. -Checkout and update the main branch:: +Before releasing, make sure that all pull requests which will be +included in the release have been properly documented in +`docs/release.rst`. - $ git checkout main - $ git pull +To make a new release, go to +https://github.com/zarr-developers/zarr-python/releases and +click "Draft a new release". Choose a version number prefixed +with a `v` (e.g. `v0.0.0`) and set the description to: -Verify all tests pass on all supported Python versions, and docs build:: +``` +See release notes https://zarr.readthedocs.io/en/stable/release.html#release-0-0-0 +``` - $ tox +replacing the correct version numbers. For pre-release versions, +the URL should omit the pre-release suffix, e.g. "a1" or "rc1". -Tag the version (where "X.X.X" stands for the version number, e.g., "2.2.0"):: - - $ version=X.X.X - $ git tag -a v$version -m v$version - $ git push origin v$version - -Create a GitHub release in order to generate the Zenodo DOI and -review the automatically generated zarr-feedstock PR. +Be sure to review and merge the https://github.com/conda-forge/zarr-feedstock +pull request that will be automatically generated. diff --git a/docs/release.rst b/docs/release.rst index dabf266caf..5dbbfcdb03 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -16,7 +16,7 @@ Release notes 2.13.0 ------ .. warning:: - Pre-release! Use `pip install --pre zarr` to evaluate this release. + Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. Major changes ~~~~~~~~~~~~~ From f6e3d6108c36b65b3ac4637cab7ce5ac1650c977 Mon Sep 17 00:00:00 2001 From: Mattia Almansi Date: Thu, 8 Sep 2022 08:55:45 +0200 Subject: [PATCH 0171/1078] check type of attribute keys (#1066) * check type of attribute keys * introduce deprecation cycle * fix typo * stringify keys * cleanup * do not cover except --- zarr/attrs.py | 22 ++++++++++++++++++++++ zarr/tests/test_attrs.py | 15 +++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/zarr/attrs.py b/zarr/attrs.py index 39683d45d9..60dd7f1d79 100644 --- a/zarr/attrs.py +++ b/zarr/attrs.py @@ -1,3 +1,4 @@ +import warnings from collections.abc import MutableMapping from zarr._storage.store import Store, StoreV3 @@ -128,6 +129,27 @@ def put(self, d): self._write_op(self._put_nosync, dict(attributes=d)) def _put_nosync(self, d): + + d_to_check = d if self._version == 2 else d["attributes"] + if not all(isinstance(item, str) for item in d_to_check): + # TODO: Raise an error for non-string keys + # raise TypeError("attribute keys must be strings") + warnings.warn( + "only attribute keys of type 'string' will be allowed in the future", + DeprecationWarning, + stacklevel=2 + ) + + try: + d_to_check = {str(k): v for k, v in d_to_check.items()} + except TypeError as ex: # pragma: no cover + raise TypeError("attribute keys can not be stringified") from ex + + if self._version == 2: + d = d_to_check + else: + d["attributes"] = d_to_check + if self._version == 2: self.store[self.key] = json_dumps(d) if self.cache: diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index b8058d9d63..e4baf182b2 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -268,3 +268,18 @@ def test_caching_off(self, zarr_version): get_cnt = 10 if zarr_version == 2 else 12 assert get_cnt == store.counter['__getitem__', attrs_key] assert 3 == store.counter['__setitem__', attrs_key] + + def test_wrong_keys(self, zarr_version): + store = _init_store(zarr_version) + a = self.init_attributes(store, zarr_version=zarr_version) + + warning_msg = "only attribute keys of type 'string' will be allowed in the future" + + with pytest.warns(DeprecationWarning, match=warning_msg): + a[1] = "foo" + + with pytest.warns(DeprecationWarning, match=warning_msg): + a.put({1: "foo"}) + + with pytest.warns(DeprecationWarning, match=warning_msg): + a.update({1: "foo"}) From 43266eec01561186b1b32e2fe3b12247130a0f0d Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Thu, 8 Sep 2022 09:03:34 +0200 Subject: [PATCH 0172/1078] [REVIEW] Support of alternative array classes (#934) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Implement CuPyCPUCompressor and the meta_array argument This is base of Co-authored-by: John Kirkham * Adding meta_array to open_group() * CuPyCPUCompressor: clean up and doc * clean up * flake8 * mypy * Use KVStore when checking for in-memory data Checking against MutableMapping categories all BaseStores as in-memory stores. * group: the meta_array argument is now used for new arrays * flake8 * Use empty_like instead of empty Co-authored-by: jakirkham * More use of NumPy's *_like API Co-authored-by: jakirkham * Assume that array-like objects that doesn't have a `writeable` flag is writable. * _meta_array: use shape=() Co-authored-by: jakirkham * use ensure_ndarray_like() and ensure_contiguous_ndarray_like() * CI: use https://github.com/zarr-developers/numcodecs/pull/305 * Removed unused code Co-authored-by: Tobias Kölling * CI: changed minimal NumPy version to v1.20 * CI: use numpy>=1.21.* for `mypy` check * Revert "CI: use https://github.com/zarr-developers/numcodecs/pull/305" This reverts commit 9976f067a42c99b37e2c1a57ef7377b0b34f9318. * fix merge mistake * CI: remove manual numpy install * pickle: use kwargs * moved CuPyCPUCompressor to the test suite * doc-meta_array: changed to versionadded:: 2.13 * test_cupy: assert meta_array * test_cupy: test when CuPy isn't available * renamed: test_cupy.py -> test_meta_array.py * removed ensure_cls() * Added "# pragma: no cover" to the CuPyCPUCompressor test class Co-authored-by: John Kirkham Co-authored-by: Josh Moore Co-authored-by: Tobias Kölling Co-authored-by: Gregory Lee --- .github/workflows/python-package.yml | 2 +- zarr/core.py | 95 ++++++++--- zarr/creation.py | 12 +- zarr/hierarchy.py | 58 +++++-- zarr/storage.py | 11 +- zarr/tests/test_meta_array.py | 233 +++++++++++++++++++++++++++ zarr/util.py | 4 +- 7 files changed, 367 insertions(+), 48 deletions(-) create mode 100644 zarr/tests/test_meta_array.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index b42e036cfa..72f65a073b 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -59,7 +59,7 @@ jobs: conda activate zarr-env python -m pip install --upgrade pip python -m pip install -U pip setuptools wheel codecov line_profiler - python -m pip install -rrequirements_dev_minimal.txt numpy${{ matrix.numpy_version}} -rrequirements_dev_optional.txt pymongo redis + python -m pip install -rrequirements_dev_minimal.txt numpy${{matrix.numpy_version}} -rrequirements_dev_optional.txt pymongo redis python -m pip install . python -m pip freeze - name: Tests diff --git a/zarr/core.py b/zarr/core.py index bd61639ef6..e5b2045160 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -4,12 +4,11 @@ import math import operator import re -from collections.abc import MutableMapping from functools import reduce from typing import Any import numpy as np -from numcodecs.compat import ensure_bytes, ensure_ndarray +from numcodecs.compat import ensure_bytes from zarr._storage.store import _prefix_to_attrs_key, assert_zarr_v3_api_available from zarr.attrs import Attributes @@ -35,6 +34,7 @@ from zarr.storage import ( _get_hierarchy_metadata, _prefix_to_array_key, + KVStore, getsize, listdir, normalize_store_arg, @@ -51,6 +51,7 @@ normalize_shape, normalize_storage_path, PartialReadBuffer, + ensure_ndarray_like ) @@ -98,6 +99,12 @@ class Array: .. versionadded:: 2.11 + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + + .. versionadded:: 2.13 + Attributes ---------- @@ -129,6 +136,7 @@ class Array: vindex oindex write_empty_chunks + meta_array Methods ------- @@ -163,6 +171,7 @@ def __init__( partial_decompress=False, write_empty_chunks=True, zarr_version=None, + meta_array=None, ): # N.B., expect at this point store is fully initialized with all # configuration metadata fully specified and normalized @@ -191,8 +200,11 @@ def __init__( self._is_view = False self._partial_decompress = partial_decompress self._write_empty_chunks = write_empty_chunks + if meta_array is not None: + self._meta_array = np.empty_like(meta_array, shape=()) + else: + self._meta_array = np.empty(()) self._version = zarr_version - if self._version == 3: self._data_key_prefix = 'data/root/' + self._key_prefix self._data_path = 'data/root/' + self._path @@ -555,6 +567,13 @@ def write_empty_chunks(self) -> bool: """ return self._write_empty_chunks + @property + def meta_array(self): + """An array-like instance to use for determining arrays to create and return + to users. + """ + return self._meta_array + def __eq__(self, other): return ( isinstance(other, Array) and @@ -929,7 +948,7 @@ def _get_basic_selection_zd(self, selection, out=None, fields=None): except KeyError: # chunk not initialized - chunk = np.zeros((), dtype=self._dtype) + chunk = np.zeros_like(self._meta_array, shape=(), dtype=self._dtype) if self._fill_value is not None: chunk.fill(self._fill_value) @@ -1233,7 +1252,8 @@ def _get_selection(self, indexer, out=None, fields=None): # setup output array if out is None: - out = np.empty(out_shape, dtype=out_dtype, order=self._order) + out = np.empty_like(self._meta_array, shape=out_shape, + dtype=out_dtype, order=self._order) else: check_array_shape('out', out, out_shape) @@ -1607,9 +1627,13 @@ def set_coordinate_selection(self, selection, value, fields=None): # setup indexer indexer = CoordinateIndexer(selection, self) - # handle value - need to flatten + # handle value - need ndarray-like flatten value if not is_scalar(value, self._dtype): - value = np.asanyarray(value) + try: + value = ensure_ndarray_like(value) + except TypeError: + # Handle types like `list` or `tuple` + value = np.array(value, like=self._meta_array) if hasattr(value, 'shape') and len(value.shape) > 1: value = value.reshape(-1) @@ -1712,7 +1736,7 @@ def _set_basic_selection_zd(self, selection, value, fields=None): except KeyError: # chunk not initialized - chunk = np.zeros((), dtype=self._dtype) + chunk = np.zeros_like(self._meta_array, shape=(), dtype=self._dtype) if self._fill_value is not None: chunk.fill(self._fill_value) @@ -1772,7 +1796,7 @@ def _set_selection(self, indexer, value, fields=None): pass else: if not hasattr(value, 'shape'): - value = np.asanyarray(value) + value = np.asanyarray(value, like=self._meta_array) check_array_shape('value', value, sel_shape) # iterate over chunks in range @@ -1840,8 +1864,11 @@ def _process_chunk( self._dtype != object): dest = out[out_selection] + # Assume that array-like objects that doesn't have a + # `writeable` flag is writable. + dest_is_writable = getattr(dest, "writeable", True) write_direct = ( - dest.flags.writeable and + dest_is_writable and ( (self._order == 'C' and dest.flags.c_contiguous) or (self._order == 'F' and dest.flags.f_contiguous) @@ -1858,7 +1885,7 @@ def _process_chunk( cdata = cdata.read_full() self._compressor.decode(cdata, dest) else: - chunk = ensure_ndarray(cdata).view(self._dtype) + chunk = ensure_ndarray_like(cdata).view(self._dtype) chunk = chunk.reshape(self._chunks, order=self._order) np.copyto(dest, chunk) return @@ -1868,7 +1895,7 @@ def _process_chunk( if partial_read_decode: cdata.prepare_chunk() # size of chunk - tmp = np.empty(self._chunks, dtype=self.dtype) + tmp = np.empty_like(self._meta_array, shape=self._chunks, dtype=self.dtype) index_selection = PartialChunkIterator(chunk_selection, self.chunks) for start, nitems, partial_out_selection in index_selection: expected_shape = [ @@ -1925,7 +1952,7 @@ def _chunk_getitem(self, chunk_coords, chunk_selection, out, out_selection, """ out_is_ndarray = True try: - out = ensure_ndarray(out) + out = ensure_ndarray_like(out) except TypeError: out_is_ndarray = False @@ -1960,7 +1987,7 @@ def _chunk_getitems(self, lchunk_coords, lchunk_selection, out, lout_selection, """ out_is_ndarray = True try: - out = ensure_ndarray(out) + out = ensure_ndarray_like(out) except TypeError: # pragma: no cover out_is_ndarray = False @@ -2082,7 +2109,9 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): if is_scalar(value, self._dtype): # setup array filled with value - chunk = np.empty(self._chunks, dtype=self._dtype, order=self._order) + chunk = np.empty_like( + self._meta_array, shape=self._chunks, dtype=self._dtype, order=self._order + ) chunk.fill(value) else: @@ -2102,14 +2131,18 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): # chunk not initialized if self._fill_value is not None: - chunk = np.empty(self._chunks, dtype=self._dtype, order=self._order) + chunk = np.empty_like( + self._meta_array, shape=self._chunks, dtype=self._dtype, order=self._order + ) chunk.fill(self._fill_value) elif self._dtype == object: chunk = np.empty(self._chunks, dtype=self._dtype, order=self._order) else: # N.B., use zeros here so any region beyond the array has consistent # and compressible data - chunk = np.zeros(self._chunks, dtype=self._dtype, order=self._order) + chunk = np.zeros_like( + self._meta_array, shape=self._chunks, dtype=self._dtype, order=self._order + ) else: @@ -2159,7 +2192,7 @@ def _decode_chunk(self, cdata, start=None, nitems=None, expected_shape=None): chunk = f.decode(chunk) # view as numpy array with correct dtype - chunk = ensure_ndarray(chunk) + chunk = ensure_ndarray_like(chunk) # special case object dtype, because incorrect handling can lead to # segfaults and other bad things happening if self._dtype != object: @@ -2186,7 +2219,7 @@ def _encode_chunk(self, chunk): chunk = f.encode(chunk) # check object encoding - if ensure_ndarray(chunk).dtype == object: + if ensure_ndarray_like(chunk).dtype == object: raise RuntimeError('cannot write object array without object codec') # compress @@ -2196,7 +2229,7 @@ def _encode_chunk(self, chunk): cdata = chunk # ensure in-memory data is immutable and easy to compare - if isinstance(self.chunk_store, MutableMapping): + if isinstance(self.chunk_store, KVStore): cdata = ensure_bytes(cdata) return cdata @@ -2354,12 +2387,22 @@ def hexdigest(self, hashname="sha1"): return checksum def __getstate__(self): - return (self._store, self._path, self._read_only, self._chunk_store, - self._synchronizer, self._cache_metadata, self._attrs.cache, - self._partial_decompress, self._write_empty_chunks, self._version) + return { + "store": self._store, + "path": self._path, + "read_only": self._read_only, + "chunk_store": self._chunk_store, + "synchronizer": self._synchronizer, + "cache_metadata": self._cache_metadata, + "cache_attrs": self._attrs.cache, + "partial_decompress": self._partial_decompress, + "write_empty_chunks": self._write_empty_chunks, + "zarr_version": self._version, + "meta_array": self._meta_array, + } def __setstate__(self, state): - self.__init__(*state) + self.__init__(**state) def _synchronized_op(self, f, *args, **kwargs): @@ -2466,7 +2509,7 @@ def append(self, data, axis=0): Parameters ---------- - data : array_like + data : array-like Data to be appended. axis : int Axis along which to append. @@ -2502,7 +2545,7 @@ def _append_nosync(self, data, axis=0): # ensure data is array-like if not hasattr(data, 'shape'): - data = np.asanyarray(data) + data = np.asanyarray(data, like=self._meta_array) # ensure shapes are compatible for non-append dimensions self_shape_preserved = tuple(s for i, s in enumerate(self._shape) diff --git a/zarr/creation.py b/zarr/creation.py index e77f26b3e2..e1c815ed21 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -21,7 +21,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', overwrite=False, path=None, chunk_store=None, filters=None, cache_metadata=True, cache_attrs=True, read_only=False, object_codec=None, dimension_separator=None, write_empty_chunks=True, - *, zarr_version=None, **kwargs): + *, zarr_version=None, meta_array=None, **kwargs): """Create an array. Parameters @@ -89,6 +89,14 @@ def create(shape, chunks=True, dtype=None, compressor='default', inferred from ``store`` or ``chunk_store`` if they are provided, otherwise defaulting to 2. + .. versionadded:: 2.12 + + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + + .. versionadded:: 2.13 + Returns ------- z : zarr.core.Array @@ -166,7 +174,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', # instantiate array z = Array(store, path=path, chunk_store=chunk_store, synchronizer=synchronizer, cache_metadata=cache_metadata, cache_attrs=cache_attrs, read_only=read_only, - write_empty_chunks=write_empty_chunks) + write_empty_chunks=write_empty_chunks, meta_array=meta_array) return z diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 80da3ddbc6..177d1eec71 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -64,6 +64,12 @@ class Group(MutableMapping): synchronizer : object, optional Array synchronizer. + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + + .. versionadded:: 2.13 + Attributes ---------- store @@ -74,6 +80,7 @@ class Group(MutableMapping): synchronizer attrs info + meta_array Methods ------- @@ -114,7 +121,8 @@ class Group(MutableMapping): """ def __init__(self, store, path=None, read_only=False, chunk_store=None, - cache_attrs=True, synchronizer=None, zarr_version=None): + cache_attrs=True, synchronizer=None, zarr_version=None, *, + meta_array=None): store: BaseStore = _normalize_store_arg(store, zarr_version=zarr_version) if zarr_version is None: zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) @@ -133,8 +141,11 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None, self._key_prefix = '' self._read_only = read_only self._synchronizer = synchronizer + if meta_array is not None: + self._meta_array = np.empty_like(meta_array, shape=()) + else: + self._meta_array = np.empty(()) self._version = zarr_version - if self._version == 3: self._data_key_prefix = data_root + self._key_prefix self._data_path = data_root + self._path @@ -231,6 +242,13 @@ def info(self): """Return diagnostic information about the group.""" return self._info + @property + def meta_array(self): + """An array-like instance to use for determining arrays to create and return + to users. + """ + return self._meta_array + def __eq__(self, other): return ( isinstance(other, Group) and @@ -351,11 +369,19 @@ def typestr(o): return items def __getstate__(self): - return (self._store, self._path, self._read_only, self._chunk_store, - self.attrs.cache, self._synchronizer) + return { + "store": self._store, + "path": self._path, + "read_only": self._read_only, + "chunk_store": self._chunk_store, + "cache_attrs": self._attrs.cache, + "synchronizer": self._synchronizer, + "zarr_version": self._version, + "meta_array": self._meta_array, + } def __setstate__(self, state): - self.__init__(*state) + self.__init__(**state) def _item_path(self, item): absolute = isinstance(item, str) and item and item[0] == '/' @@ -411,18 +437,20 @@ def __getitem__(self, item): return Array(self._store, read_only=self._read_only, path=path, chunk_store=self._chunk_store, synchronizer=self._synchronizer, cache_attrs=self.attrs.cache, - zarr_version=self._version) + zarr_version=self._version, meta_array=self._meta_array) elif contains_group(self._store, path, explicit_only=True): return Group(self._store, read_only=self._read_only, path=path, chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, zarr_version=self._version) + synchronizer=self._synchronizer, zarr_version=self._version, + meta_array=self._meta_array) elif self._version == 3: implicit_group = meta_root + path + '/' # non-empty folder in the metadata path implies an implicit group if self._store.list_prefix(implicit_group): return Group(self._store, read_only=self._read_only, path=path, chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, zarr_version=self._version) + synchronizer=self._synchronizer, zarr_version=self._version, + meta_array=self._meta_array) else: raise KeyError(item) else: @@ -895,7 +923,7 @@ def create_dataset(self, name, **kwargs): ---------- name : string Array name. - data : array_like, optional + data : array-like, optional Initial data. shape : int or tuple of ints Array shape. @@ -1006,7 +1034,8 @@ def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, cache_attrs = kwargs.get('cache_attrs', self.attrs.cache) a = Array(self._store, path=path, read_only=self._read_only, chunk_store=self._chunk_store, synchronizer=synchronizer, - cache_metadata=cache_metadata, cache_attrs=cache_attrs) + cache_metadata=cache_metadata, cache_attrs=cache_attrs, + meta_array=self._meta_array) shape = normalize_shape(shape) if shape != a.shape: raise TypeError('shape do not match existing array; expected {}, got {}' @@ -1266,7 +1295,7 @@ def group(store=None, overwrite=False, chunk_store=None, def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=None, - chunk_store=None, storage_options=None, *, zarr_version=None): + chunk_store=None, storage_options=None, *, zarr_version=None, meta_array=None): """Open a group using file-mode-like semantics. Parameters @@ -1291,6 +1320,11 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N storage_options : dict If using an fsspec URL to create the store, these will be passed to the backend implementation. Ignored otherwise. + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + + .. versionadded:: 2.13 Returns ------- @@ -1368,4 +1402,4 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N return Group(store, read_only=read_only, cache_attrs=cache_attrs, synchronizer=synchronizer, path=path, chunk_store=chunk_store, - zarr_version=zarr_version) + zarr_version=zarr_version, meta_array=meta_array) diff --git a/zarr/storage.py b/zarr/storage.py index eb5106078b..4a1408ec01 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -39,7 +39,7 @@ from numcodecs.compat import ( ensure_bytes, ensure_text, - ensure_contiguous_ndarray + ensure_contiguous_ndarray_like ) from numcodecs.registry import codec_registry @@ -55,7 +55,8 @@ from zarr.util import (buffer_size, json_loads, nolock, normalize_chunks, normalize_dimension_separator, normalize_dtype, normalize_fill_value, normalize_order, - normalize_shape, normalize_storage_path, retry_call) + normalize_shape, normalize_storage_path, retry_call + ) from zarr._storage.absstore import ABSStore # noqa: F401 from zarr._storage.store import (_get_hierarchy_metadata, # noqa: F401 @@ -1070,7 +1071,7 @@ def __setitem__(self, key, value): key = self._normalize_key(key) # coerce to flat, contiguous array (ideally without copying) - value = ensure_contiguous_ndarray(value) + value = ensure_contiguous_ndarray_like(value) # destination path for key file_path = os.path.join(self.path, key) @@ -1755,7 +1756,7 @@ def __getitem__(self, key): def __setitem__(self, key, value): if self.mode == 'r': raise ReadOnlyError() - value = ensure_contiguous_ndarray(value).view("u1") + value = ensure_contiguous_ndarray_like(value).view("u1") with self.mutex: # writestr(key, value) writes with default permissions from # zipfile (600) that are too restrictive, build ZipInfo for @@ -2601,7 +2602,7 @@ def update(self, *args, **kwargs): kv_list = [] for dct in args: for k, v in dct.items(): - v = ensure_contiguous_ndarray(v) + v = ensure_contiguous_ndarray_like(v) # Accumulate key-value pairs for storage kv_list.append((k, v)) diff --git a/zarr/tests/test_meta_array.py b/zarr/tests/test_meta_array.py new file mode 100644 index 0000000000..6172af3be9 --- /dev/null +++ b/zarr/tests/test_meta_array.py @@ -0,0 +1,233 @@ +from typing import Optional +import numpy as np +import pytest + +from numcodecs.abc import Codec +from numcodecs.compat import ensure_contiguous_ndarray_like +from numcodecs.registry import get_codec, register_codec + +import zarr.codecs +from zarr.core import Array +from zarr.creation import array, empty, full, ones, zeros +from zarr.hierarchy import open_group +from zarr.storage import DirectoryStore, MemoryStore, Store, ZipStore + + +class CuPyCPUCompressor(Codec): # pragma: no cover + """CPU compressor for CuPy arrays + + This compressor converts CuPy arrays host memory before compressing + the arrays using `compressor`. + + Parameters + ---------- + compressor : numcodecs.abc.Codec + The codec to use for compression and decompression. + """ + + codec_id = "cupy_cpu_compressor" + + def __init__(self, compressor: Codec = None): + self.compressor = compressor + + def encode(self, buf): + import cupy + + buf = cupy.asnumpy(ensure_contiguous_ndarray_like(buf)) + if self.compressor: + buf = self.compressor.encode(buf) + return buf + + def decode(self, chunk, out=None): + import cupy + + if self.compressor: + cpu_out = None if out is None else cupy.asnumpy(out) + chunk = self.compressor.decode(chunk, cpu_out) + + chunk = cupy.asarray(ensure_contiguous_ndarray_like(chunk)) + if out is not None: + cupy.copyto(out, chunk.view(dtype=out.dtype), casting="no") + chunk = out + return chunk + + def get_config(self): + cc_config = self.compressor.get_config() if self.compressor else None + return { + "id": self.codec_id, + "compressor_config": cc_config, + } + + @classmethod + def from_config(cls, config): + cc_config = config.get("compressor_config", None) + compressor = get_codec(cc_config) if cc_config else None + return cls(compressor=compressor) + + +register_codec(CuPyCPUCompressor) + + +class MyArray(np.ndarray): + """Dummy array class to test the `meta_array` argument + + Useful when CuPy isn't available. + + This class also makes some of the functions from the numpy + module available. + """ + + testing = np.testing + + @classmethod + def arange(cls, size): + ret = cls(shape=(size,), dtype="int64") + ret[:] = range(size) + return ret + + @classmethod + def empty(cls, shape): + return cls(shape=shape) + + +def init_compressor(compressor) -> CuPyCPUCompressor: + if compressor: + compressor = getattr(zarr.codecs, compressor)() + return CuPyCPUCompressor(compressor) + + +def init_store(tmp_path, store_type) -> Optional[Store]: + if store_type is DirectoryStore: + return store_type(str(tmp_path / "store")) + if store_type is MemoryStore: + return MemoryStore() + return None + + +def ensure_module(module): + if isinstance(module, str): + return pytest.importorskip(module) + return module + + +param_module_and_compressor = [ + (MyArray, None), + ("cupy", init_compressor(None)), + ("cupy", init_compressor("Zlib")), + ("cupy", init_compressor("Blosc")), +] + + +@pytest.mark.parametrize("module, compressor", param_module_and_compressor) +@pytest.mark.parametrize("store_type", [None, DirectoryStore, MemoryStore, ZipStore]) +def test_array(tmp_path, module, compressor, store_type): + xp = ensure_module(module) + + store = init_store(tmp_path / "from_cupy_array", store_type) + a = xp.arange(100) + z = array(a, chunks=10, compressor=compressor, store=store, meta_array=xp.empty(())) + assert a.shape == z.shape + assert a.dtype == z.dtype + assert isinstance(a, type(z[:])) + assert isinstance(z.meta_array, type(xp.empty(()))) + xp.testing.assert_array_equal(a, z[:]) + + # with array-like + store = init_store(tmp_path / "from_list", store_type) + a = list(range(100)) + z = array(a, chunks=10, compressor=compressor, store=store, meta_array=xp.empty(())) + assert (100,) == z.shape + assert np.asarray(a).dtype == z.dtype + xp.testing.assert_array_equal(a, z[:]) + + # with another zarr array + store = init_store(tmp_path / "from_another_store", store_type) + z2 = array(z, compressor=compressor, store=store, meta_array=xp.empty(())) + assert z.shape == z2.shape + assert z.chunks == z2.chunks + assert z.dtype == z2.dtype + xp.testing.assert_array_equal(z[:], z2[:]) + + +@pytest.mark.parametrize("module, compressor", param_module_and_compressor) +def test_empty(module, compressor): + xp = ensure_module(module) + z = empty( + 100, + chunks=10, + compressor=compressor, + meta_array=xp.empty(()), + ) + assert (100,) == z.shape + assert (10,) == z.chunks + + +@pytest.mark.parametrize("module, compressor", param_module_and_compressor) +def test_zeros(module, compressor): + xp = ensure_module(module) + z = zeros( + 100, + chunks=10, + compressor=compressor, + meta_array=xp.empty(()), + ) + assert (100,) == z.shape + assert (10,) == z.chunks + xp.testing.assert_array_equal(np.zeros(100), z[:]) + + +@pytest.mark.parametrize("module, compressor", param_module_and_compressor) +def test_ones(module, compressor): + xp = ensure_module(module) + z = ones( + 100, + chunks=10, + compressor=compressor, + meta_array=xp.empty(()), + ) + assert (100,) == z.shape + assert (10,) == z.chunks + xp.testing.assert_array_equal(np.ones(100), z[:]) + + +@pytest.mark.parametrize("module, compressor", param_module_and_compressor) +def test_full(module, compressor): + xp = ensure_module(module) + z = full( + 100, + chunks=10, + fill_value=42, + dtype="i4", + compressor=compressor, + meta_array=xp.empty(()), + ) + assert (100,) == z.shape + assert (10,) == z.chunks + xp.testing.assert_array_equal(np.full(100, fill_value=42, dtype="i4"), z[:]) + + # nan + z = full( + 100, + chunks=10, + fill_value=np.nan, + dtype="f8", + compressor=compressor, + meta_array=xp.empty(()), + ) + assert np.all(np.isnan(z[:])) + + +@pytest.mark.parametrize("module, compressor", param_module_and_compressor) +@pytest.mark.parametrize("store_type", [None, DirectoryStore, MemoryStore, ZipStore]) +def test_group(tmp_path, module, compressor, store_type): + xp = ensure_module(module) + store = init_store(tmp_path, store_type) + g = open_group(store, meta_array=xp.empty(())) + g.ones("data", shape=(10, 11), dtype=int, compressor=compressor) + a = g["data"] + assert a.shape == (10, 11) + assert a.dtype == int + assert isinstance(a, Array) + assert isinstance(a[:], type(xp.empty(()))) + assert (a[:] == 1).all() + assert isinstance(g.meta_array, type(xp.empty(()))) diff --git a/zarr/util.py b/zarr/util.py index cc3bd50356..c9136a63eb 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -10,7 +10,7 @@ from asciitree import BoxStyle, LeftAligned from asciitree.traversal import Traversal from collections.abc import Iterable -from numcodecs.compat import ensure_ndarray, ensure_text +from numcodecs.compat import ensure_text, ensure_ndarray_like from numcodecs.registry import codec_registry from numcodecs.blosc import cbuffer_sizes, cbuffer_metainfo @@ -352,7 +352,7 @@ def normalize_storage_path(path: Union[str, bytes, None]) -> str: def buffer_size(v) -> int: - return ensure_ndarray(v).nbytes + return ensure_ndarray_like(v).nbytes def info_text_report(items: Dict[Any, Any]) -> str: From 2cfee9c0f0b2e0782f942ff0dd8681ecfb756ca0 Mon Sep 17 00:00:00 2001 From: Gregory Lee Date: Thu, 8 Sep 2022 03:09:22 -0400 Subject: [PATCH 0173/1078] Zarr v3: support root path (#1085) * support path='/' for zarr v3 to create a root array or group v3 spec states path = '/' for arrays gives /meta/root.array.json path = '/' for groups gives /meta/root.array.json In this implementation path = None or path = '' will also result in a root array. Creation routines default to path=None, so this makes it so that the path argument does not have to be manually specified. * revert change to normalize_storage_path update additional tests * fix * update TestArrayV3 to inherit all tests from TestArray * remove test bypass * fix nbytes_stored for v3 array without path update test_nbytes_stored to handle both v3 and v2 cases properly * pep8 * fix incorrect default value for at_root in _init_creation_kwargs previous behavior corresponded to at_root=True by default * flake8 --- zarr/_storage/store.py | 10 +-- zarr/creation.py | 2 +- zarr/hierarchy.py | 6 -- zarr/storage.py | 10 ++- zarr/tests/test_convenience.py | 10 --- zarr/tests/test_core.py | 117 ++++++++++++++++++--------------- zarr/tests/test_creation.py | 83 +++++++++++++---------- zarr/tests/test_hierarchy.py | 41 +++++------- 8 files changed, 143 insertions(+), 136 deletions(-) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 6faf4a1250..9e265cf383 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -420,11 +420,11 @@ def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str def _prefix_to_array_key(store: StoreLike, prefix: str) -> str: if getattr(store, "_store_version", 2) == 3: + sfx = _get_metadata_suffix(store) # type: ignore if prefix: - sfx = _get_metadata_suffix(store) # type: ignore key = meta_root + prefix.rstrip("/") + ".array" + sfx else: - raise ValueError("prefix must be supplied to get a v3 array key") + key = meta_root[:-1] + '.array' + sfx else: key = prefix + array_meta_key return key @@ -432,11 +432,11 @@ def _prefix_to_array_key(store: StoreLike, prefix: str) -> str: def _prefix_to_group_key(store: StoreLike, prefix: str) -> str: if getattr(store, "_store_version", 2) == 3: + sfx = _get_metadata_suffix(store) # type: ignore if prefix: - sfx = _get_metadata_suffix(store) # type: ignore key = meta_root + prefix.rstrip('/') + ".group" + sfx else: - raise ValueError("prefix must be supplied to get a v3 group key") + key = meta_root[:-1] + '.group' + sfx else: key = prefix + group_meta_key return key @@ -449,7 +449,7 @@ def _prefix_to_attrs_key(store: StoreLike, prefix: str) -> str: if prefix: key = meta_root + prefix.rstrip('/') + ".array" + sfx else: - raise ValueError("prefix must be supplied to get a v3 array key") + key = meta_root[:-1] + '.array' + sfx else: key = prefix + attrs_key return key diff --git a/zarr/creation.py b/zarr/creation.py index e1c815ed21..3414a0158a 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -163,7 +163,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', dimension_separator = normalize_dimension_separator(dimension_separator) if zarr_version > 2 and path is None: - raise ValueError("path must be supplied to initialize a zarr v3 array") + path = '/' # initialize array metadata init_array(store, shape=shape, chunks=chunks, dtype=dtype, compressor=compressor, diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 177d1eec71..e1d390f497 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -1276,8 +1276,6 @@ def group(store=None, overwrite=False, chunk_store=None, if zarr_version != 2: assert_zarr_v3_api_available() - if zarr_version == 3 and path is None: - raise ValueError(f"path must be provided for a v{zarr_version} group") path = normalize_storage_path(path) if zarr_version == 2: @@ -1366,10 +1364,6 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N "zarr_version of store and chunk_store must match" ) - store_version = getattr(store, '_store_version', 2) - if store_version == 3 and path is None: - raise ValueError("path must be supplied to initialize a zarr v3 group") - path = normalize_storage_path(path) # ensure store is initialized diff --git a/zarr/storage.py b/zarr/storage.py index 4a1408ec01..f5459990ba 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -230,8 +230,14 @@ def _getsize(store: BaseStore, path: Path = None) -> int: size = 0 store_version = getattr(store, '_store_version', 2) if store_version == 3: - members = store.list_prefix(data_root + path) # type: ignore - members += store.list_prefix(meta_root + path) # type: ignore + if path == '': + # have to list the root folders without trailing / in this case + members = store.list_prefix(data_root.rstrip('/')) # type: ignore + members += store.list_prefix(meta_root.rstrip('/')) # type: ignore + else: + members = store.list_prefix(data_root + path) # type: ignore + members += store.list_prefix(meta_root + path) # type: ignore + # also include zarr.json? # members += ['zarr.json'] else: members = listdir(store, path) diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index 59bb3aa7da..45ed9c3e11 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -73,11 +73,6 @@ def test_open_array(path_type, zarr_version): assert isinstance(z, Array) assert z.shape == (200,) - if zarr_version == 3: - # cannot open a v3 array without path - with pytest.raises(ValueError): - open(store, mode='w', shape=200, zarr_version=3) - # open array, read-only z = open(store, mode='r', **kwargs) assert isinstance(z, Array) @@ -108,11 +103,6 @@ def test_open_group(path_type, zarr_version): assert isinstance(g, Group) assert 'foo' not in g - if zarr_version == 3: - # cannot open a v3 group without path - with pytest.raises(ValueError): - open(store, mode='w', zarr_version=3) - # open group, read-only g = open(store, mode='r', **kwargs) assert isinstance(g, Group) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 7429cba8c7..ecfeb7a817 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -19,9 +19,6 @@ from zarr._storage.store import ( v3_api_available, - _prefix_to_array_key, - _prefix_to_attrs_key, - _prefix_to_group_key ) from zarr.core import Array from zarr.errors import ArrayNotFoundError, ContainsGroupError @@ -64,13 +61,15 @@ class TestArray(unittest.TestCase): version = 2 + root = '' + KVStoreClass = KVStore def test_array_init(self): # normal initialization - store = KVStore(dict()) + store = self.KVStoreClass(dict()) init_array(store, shape=100, chunks=10, dtype=" Date: Thu, 8 Sep 2022 03:10:30 -0400 Subject: [PATCH 0174/1078] Remove/relax erroneous "meta" path check (#1123) * assertion on path not containing meta should be meta/ this avoids disallowing key names such as 'metabolite', for instance * remove meta check altogether from the v2 branch as it is v3-specific * update docs/release.rst * Add test for #6853 Co-authored-by: Josh Moore --- docs/release.rst | 3 +++ fixture/meta/.zarray | 23 +++++++++++++++++++++++ fixture/meta/0.0 | Bin 0 -> 48 bytes zarr/hierarchy.py | 3 +-- zarr/tests/test_storage.py | 6 ++++++ 5 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 fixture/meta/.zarray create mode 100644 fixture/meta/0.0 diff --git a/docs/release.rst b/docs/release.rst index 5dbbfcdb03..15a03e5895 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -35,6 +35,9 @@ Bug fixes * Fix bug in LRUEStoreCache in which the current size wasn't reset on invalidation. By :user:`BGCMHou ` and :user:`Josh Moore ` :issue:`1076`, :issue:`1077`. +* Remove erroneous check that disallowed array keys starting with "meta". + By :user:`Gregory R. Lee `; :issue:`1105`. + Documentation ~~~~~~~~~~~~~ diff --git a/fixture/meta/.zarray b/fixture/meta/.zarray new file mode 100644 index 0000000000..f265bb0674 --- /dev/null +++ b/fixture/meta/.zarray @@ -0,0 +1,23 @@ +{ + "chunks": [ + 2, + 2 + ], + "compressor": { + "blocksize": 0, + "clevel": 5, + "cname": "lz4", + "id": "blosc", + "shuffle": 1 + }, + "dimension_separator": ".", + "dtype": " Date: Thu, 8 Sep 2022 10:11:37 +0200 Subject: [PATCH 0175/1078] Remove trailing spaces and empty lines (#1075) --- .github/CONTRIBUTING.md | 1 - .github/ISSUE_TEMPLATE/config.yml | 2 +- .github/workflows/python-package.yml | 2 -- .github/workflows/windows-testing.yml | 1 - LICENSE | 1 - bench/compress_normal.txt | 1 - docs/conf.py | 2 +- docs/contributing.rst | 4 ++-- docs/spec/v2.rst | 4 ++-- docs/talks/scipy2019/submission.rst | 2 +- 10 files changed, 7 insertions(+), 13 deletions(-) diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index d130e038e5..29281f5be9 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -2,4 +2,3 @@ Contributing ============ Please see the [project documentation](https://zarr.readthedocs.io/en/stable/contributing.html) for information about contributing to Zarr. - diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 3ce1587389..9cb5ec9a78 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -6,6 +6,6 @@ contact_links: - name: ❓ Discuss something on gitter url: https://gitter.im/zarr-developers/community about: For questions like "How do I do X with Zarr?", you can move to our Gitter channel. - - name: ❓ Discuss something on GitHub Discussions + - name: ❓ Discuss something on GitHub Discussions url: https://github.com/zarr-developers/zarr-python/discussions about: For questions like "How do I do X with Zarr?", you can move to GitHub Discussions. diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 72f65a073b..c62127b280 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -89,5 +89,3 @@ jobs: conda activate zarr-env flake8 zarr mypy zarr - - diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 37eea5df7b..791506c7bd 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -58,4 +58,3 @@ jobs: - name: Conda list shell: pwsh run: conda list - diff --git a/LICENSE b/LICENSE index 5bb4df8cf8..22c4904c4a 100644 --- a/LICENSE +++ b/LICENSE @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/bench/compress_normal.txt b/bench/compress_normal.txt index 100d24bfdf..d527cf03d4 100644 --- a/bench/compress_normal.txt +++ b/bench/compress_normal.txt @@ -158,4 +158,3 @@ Line # Hits Time Per Hit % Time Line Contents 132 # handle errors 133 200 128 0.6 0.1 if ret <= 0: 134 raise RuntimeError('error during blosc decompression: %d' % ret) - diff --git a/docs/conf.py b/docs/conf.py index a8bfc467d9..733ac60801 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -311,7 +311,7 @@ def setup(app): # Example configuration for intersphinx: refer to the Python standard library. # use in refs e.g: # :ref:`comparison manual ` -intersphinx_mapping = { 'python':('https://docs.python.org/', None), +intersphinx_mapping = { 'python':('https://docs.python.org/', None), 'numpy': ('https://numpy.org/doc/stable/', None)} diff --git a/docs/contributing.rst b/docs/contributing.rst index 64e017d17f..f7b4831089 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -166,7 +166,7 @@ locally. To run the Azure Blob Service storage tests, run an Azure storage emulator (e.g., azurite) and set the environment variable ``ZARR_TEST_ABS=1``. If you're using Docker to run azurite, start the service with:: - docker run --rm -p 10000:10000 mcr.microsoft.com/azure-storage/azurite azurite-blob --loose --blobHost 0.0.0.0 + docker run --rm -p 10000:10000 mcr.microsoft.com/azure-storage/azurite azurite-blob --loose --blobHost 0.0.0.0 To run the Mongo DB storage tests, run a Mongo server locally and set the environment variable ``ZARR_TEST_MONGO=1``. @@ -332,7 +332,7 @@ compatibility in some way. Release procedure ~~~~~~~~~~~~~~~~~ -.. note:: +.. note:: Most of the release process is now handled by github workflow which should automatically push a release to PyPI if a tag is pushed. diff --git a/docs/spec/v2.rst b/docs/spec/v2.rst index f2c3d92b3e..6d11fd1acc 100644 --- a/docs/spec/v2.rst +++ b/docs/spec/v2.rst @@ -174,8 +174,8 @@ Structured data types may also be nested, e.g., the following JSON list defines a data type with two fields "foo" and "bar", where "bar" has two sub-fields "baz" and "qux":: - [["foo", "`_, Columbia University +* `Ryan Abernathey `_, Columbia University * `Stephan Balmer `_, Meteotest * `Ambrose Carr `_, Chan Zuckerberg Initiative * `Tim Crone `_, Columbia University From 7ae1de709cbc66916d8dd630f64f6f1c4fda608c Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Thu, 8 Sep 2022 11:17:52 +0200 Subject: [PATCH 0176/1078] 2.13.0a2 release notes (#1126) * 2.13.0a2 release notes * release note cleanup * Make left-panel links darker * Try rgb color * Darken toc text and lighten background * More light purple * Even MORE purple! --- docs/_static/custom.css | 14 +++++++++----- docs/release.rst | 41 +++++++++++++++++++++++++++++++++-------- 2 files changed, 42 insertions(+), 13 deletions(-) diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0c391c1c7e..a0e3929e87 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,19 +1,23 @@ +/* override text color */ +.wy-menu-vertical a { + color: #000000; +} /* Sidebar background color */ .wy-nav-side, div.wy-side-nav-search { - background-color: rgb(38, 34, 98, 0); /* full alpha */ + background-color: rgb(198, 197, 213, 0); /* full alpha */ } /* Sidebar link click color */ .wy-menu-vertical .toctree-l1 > a:active { - background-color: rgb(38, 34, 98); - color: rgb(252, 252, 252); + background-color: rgb(198, 197, 213); + color: rgb(0, 0, 0); } /* Link color is darker to make hovering more clear */ .wy-menu-vertical .toctree-l1 > a:hover { - background-color: rgb(25, 22, 65); - color: rgb(252, 252, 252); + background-color: rgb(198, 197, 213); + color: rgb(0, 0, 0); } .wy-menu-vertical li.current > a:hover, .wy-menu-vertical li.current > a:active { diff --git a/docs/release.rst b/docs/release.rst index 15a03e5895..b79341df52 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,34 +21,59 @@ Release notes Major changes ~~~~~~~~~~~~~ -* Remove support for Python 3.7 in concert with NumPy dependency. - By :user:`Davis Bennett `; :issue:`1067`. +* **Support of alternative array classes** by introducing a new argument, + meta_array, that specifies the type/class of the underlying array. The + meta_array argument can be any class instance that can be used as the like + argument in NumPy (see `NEP 35 + `_). + enabling support for CuPy through, for example, the creation of a CuPy CPU + compressor. + By :user:`Mads R. B. Kristensen ` :issue:`934`. + +* **Remove support for Python 3.7** in concert with NumPy dependency. + By :user:`Davis Bennett ` :issue:`1067`. + +* **Zarr v3: add support for the default root path** rather than requiring + that all API users pass an explicit path. + By :user:`Gregory R. Lee ` :issue:`1085`. + Bug fixes ~~~~~~~~~ +* Remove/relax erroneous "meta" path check (**regression**). + By :user:`Gregory R. Lee ` :issue:`1123`. + +* Cast all attribute keys to strings (and issue deprecation warning). + By :user:`Mattia Almansi ` :issue:`1066`. + * Fix bug in N5 storage that prevented arrays located in the root of the hierarchy from bearing the `n5` keyword. Along with fixing this bug, new tests were added for N5 routines that had previously been excluded from testing, and type annotations were added to the N5 codebase. - By :user:`Davis Bennett `; :issue:`1092`. + By :user:`Davis Bennett ` :issue:`1092`. * Fix bug in LRUEStoreCache in which the current size wasn't reset on invalidation. By :user:`BGCMHou ` and :user:`Josh Moore ` :issue:`1076`, :issue:`1077`. * Remove erroneous check that disallowed array keys starting with "meta". - By :user:`Gregory R. Lee `; :issue:`1105`. + By :user:`Gregory R. Lee ` :issue:`1105`. Documentation ~~~~~~~~~~~~~ * Typo fixes to close quotes. By :user:`Pavithra Eswaramoorthy ` -* Added copy button to documentation :user:`Altay Sansal ` + +* Added copy button to documentation. + By :user:`Altay Sansal ` :issue:`1124`. Maintenance ~~~~~~~~~~~ +* Simplify release docs. + By :user:`Josh Moore ` :issue:`1119`. + * Pin werkzeug to prevent test hangs. - By :user:`Davis Bennett `; :issue:`1098`. + By :user:`Davis Bennett ` :issue:`1098`. * Fix a few DeepSource.io alerts By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1080`. @@ -63,10 +88,10 @@ Maintenance By :user:`Saransh Chopra ` :issue:`1079`. * Remove option to return None from _ensure_store. - By :user:`Greggory Lee `; :issue:`1068`. + By :user:`Greggory Lee ` :issue:`1068`. * Fix a typo of "integers". - By :user:`Richard Scott `; :issue:`1056`. + By :user:`Richard Scott ` :issue:`1056`. .. _release_2.12.0: From b677db3409d5d8f7b4a5f2e28df5d33e66c5098a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 Sep 2022 17:03:43 +0200 Subject: [PATCH 0177/1078] Bump tox from 3.25.1 to 3.26.0 (#1128) Bumps [tox](https://github.com/tox-dev/tox) from 3.25.1 to 3.26.0. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/master/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/3.25.1...3.26.0) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index cc370e018c..9105c14e19 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -12,7 +12,7 @@ types-redis types-setuptools pymongo==4.2.0 # optional test requirements -tox==3.25.1 +tox==3.26.0 coverage flake8==5.0.4 pytest-cov==3.0.0 From fe540892563667c1fa6f07b932ff9956a6344bca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 08:47:56 +0200 Subject: [PATCH 0178/1078] Bump numpy from 1.23.2 to 1.23.3 (#1130) Bumps [numpy](https://github.com/numpy/numpy) from 1.23.2 to 1.23.3. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.23.2...v1.23.3) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 12310a0eda..11fb3ff63c 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.23.2 +numpy==1.23.3 From 4d68015e57e6cdbd9550a6447a142f77abacc85f Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 14 Sep 2022 08:24:12 +0200 Subject: [PATCH 0179/1078] Consistent Python versions (#1136) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 96c51c08ff..c848145138 100644 --- a/setup.py +++ b/setup.py @@ -47,9 +47,9 @@ 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: Unix', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', ], maintainer='Alistair Miles', maintainer_email='alimanfoo@googlemail.com', From 77c1f723199cfa0f2871f6fd30bbe045a948456f Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Wed, 14 Sep 2022 08:38:08 +0200 Subject: [PATCH 0180/1078] Fix numcodecs dependency (#1135) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c848145138..79ff649dac 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ 'asciitree', 'numpy>=1.7', 'fasteners', - 'numcodecs>=0.6.4', + 'numcodecs>=0.10.0', ] setup( From 9f4e3b18efa2f6fe9496ccf54cad3993cc22fd4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Sep 2022 10:18:38 +0200 Subject: [PATCH 0181/1078] Bump fasteners from 0.17.3 to 0.18 (#1137) Bumps [fasteners](https://github.com/harlowja/fasteners) from 0.17.3 to 0.18. - [Release notes](https://github.com/harlowja/fasteners/releases) - [Changelog](https://github.com/harlowja/fasteners/blob/main/CHANGELOG.md) - [Commits](https://github.com/harlowja/fasteners/compare/0.17.3...0.18) --- updated-dependencies: - dependency-name: fasteners dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 31febeee24..e3809cebe9 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,6 +1,6 @@ # library requirements asciitree==0.3.3 -fasteners==0.17.3 +fasteners==0.18 numcodecs==0.10.2 msgpack-python==0.5.6 setuptools-scm==7.0.5 From 505810c44108328ec5732ad8460057f016994fd3 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 16 Sep 2022 21:41:52 +0200 Subject: [PATCH 0182/1078] Use generator instead of list comprehension (#1139) Using a container in place of a generator for calls that can accept both, slows down the performance. Consider using generators for all function calls which accept both containers and genertors. --- zarr/_storage/v3.py | 2 +- zarr/indexing.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 540b62ef7e..515e6f5aaa 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -153,7 +153,7 @@ def setitems(self, values): # initialize the /data/root/... folder corresponding to the array! # Note: zarr.tests.test_core_v3.TestArrayWithFSStoreV3PartialRead fails # without this explicit creation of directories - subdirectories = set([os.path.dirname(v) for v in values.keys()]) + subdirectories = set(os.path.dirname(v) for v in values.keys()) for subdirectory in subdirectories: data_dir = os.path.join(self.path, subdirectory) if not self.fs.exists(data_dir): diff --git a/zarr/indexing.py b/zarr/indexing.py index 1941766d85..74b53af049 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -559,7 +559,7 @@ def ix_(selection, shape): def oindex(a, selection): """Implementation of orthogonal indexing with slices and ints.""" selection = replace_ellipsis(selection, a.shape) - drop_axes = tuple([i for i, s in enumerate(selection) if is_integer(s)]) + drop_axes = tuple(i for i, s in enumerate(selection) if is_integer(s)) selection = ix_(selection, a.shape) result = a[selection] if drop_axes: @@ -569,7 +569,7 @@ def oindex(a, selection): def oindex_set(a, selection, value): selection = replace_ellipsis(selection, a.shape) - drop_axes = tuple([i for i, s in enumerate(selection) if is_integer(s)]) + drop_axes = tuple(i for i, s in enumerate(selection) if is_integer(s)) selection = ix_(selection, a.shape) if not np.isscalar(value) and drop_axes: value = np.asanyarray(value) @@ -623,8 +623,8 @@ def __init__(self, selection, array): if not isinstance(s, IntDimIndexer)) self.is_advanced = not is_basic_selection(selection) if self.is_advanced: - self.drop_axes = tuple([i for i, dim_indexer in enumerate(self.dim_indexers) - if isinstance(dim_indexer, IntDimIndexer)]) + self.drop_axes = tuple(i for i, dim_indexer in enumerate(self.dim_indexers) + if isinstance(dim_indexer, IntDimIndexer)) else: self.drop_axes = None From 718ee1338319a32056a2395e54517644d727ad16 Mon Sep 17 00:00:00 2001 From: Gregory Lee Date: Thu, 22 Sep 2022 04:07:41 -0400 Subject: [PATCH 0183/1078] empty path is fine after update to allow path=None for zarr v3 (#1142) path=None support was previously added in gh-1085. This change should have been made at that time. --- zarr/convenience.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/zarr/convenience.py b/zarr/convenience.py index be8b609a46..745e1369f5 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -1295,10 +1295,6 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** # default is to store within 'consolidated' group on v3 if not metadata_key.startswith('meta/root/'): metadata_key = 'meta/root/consolidated/' + metadata_key - if not path: - raise ValueError( - "path must be provided to open a Zarr 3.x consolidated store" - ) # setup metadata store meta_store = ConsolidatedStoreClass(store, metadata_key=metadata_key) From 363282097b146ff6575b0e8353a407b7b7a187cb Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Thu, 22 Sep 2022 11:37:18 +0200 Subject: [PATCH 0184/1078] Create codeql-analysis.yml (#1127) * Create codeql-analysis.yml see: - https://github.blog/2022-08-15-the-next-step-for-lgtm-com-github-code-scanning/ - https://github.com/zarr-developers/zarr-python/pull/909#issuecomment-1240363698 * Re-implement tempfile.mktemp using NamedTemporaryFile Adds zarr.tests.util.mktemp which can be used from all tests. The NamedTemporaryFile is immediately closed and only the path returned. --- .github/workflows/codeql-analysis.yml | 72 +++++++++++++++++++++++++++ zarr/tests/test_core.py | 4 +- zarr/tests/test_creation.py | 6 +-- zarr/tests/test_hierarchy.py | 22 ++++---- zarr/tests/test_storage.py | 20 ++++---- zarr/tests/test_storage_v3.py | 18 +++---- zarr/tests/util.py | 7 +++ 7 files changed, 114 insertions(+), 35 deletions(-) create mode 100644 .github/workflows/codeql-analysis.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000000..bebe1ee205 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,72 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "main" ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "main" ] + schedule: + - cron: '29 0 * * 1' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index ecfeb7a817..e32026e662 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -5,7 +5,7 @@ import shutil import unittest from itertools import zip_longest -from tempfile import mkdtemp, mktemp +from tempfile import mkdtemp import numpy as np import pytest @@ -53,7 +53,7 @@ StoreV3, ) from zarr.util import buffer_size -from zarr.tests.util import abs_container, skip_test_env_var, have_fsspec +from zarr.tests.util import abs_container, skip_test_env_var, have_fsspec, mktemp # noinspection PyMethodMayBeStatic diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index f5eede8b15..0f12fc5613 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -1,7 +1,6 @@ import atexit import os.path import shutil -import tempfile import warnings import numpy as np @@ -20,6 +19,7 @@ from zarr._storage.store import v3_api_available from zarr._storage.v3 import DirectoryStoreV3, KVStoreV3 from zarr.sync import ThreadSynchronizer +from zarr.tests.util import mktemp _VERSIONS = ((None, 2, 3) if v3_api_available else (None, 2)) _VERSIONS2 = ((2, 3) if v3_api_available else (2, )) @@ -574,7 +574,7 @@ def test_open_like(zarr_version, at_root): expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version # zarr array - path = tempfile.mktemp() + path = mktemp() atexit.register(shutil.rmtree, path) z = full(100, chunks=10, dtype='f4', compressor=Zlib(5), fill_value=42, order='F', **kwargs) @@ -588,7 +588,7 @@ def test_open_like(zarr_version, at_root): assert (z._store._store_version == z2._store._store_version == expected_zarr_version) # numpy array - path = tempfile.mktemp() + path = mktemp() atexit.register(shutil.rmtree, path) a = np.empty(100, dtype='f4') z3 = open_like(a, path, chunks=10, zarr_version=zarr_version) diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index ddcbe6c823..a2917acb44 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -32,7 +32,7 @@ FSStoreV3, ZipStoreV3, DBMStoreV3, LMDBStoreV3, SQLiteStoreV3, LRUStoreCacheV3) from zarr.util import InfoReporter, buffer_size -from zarr.tests.util import skip_test_env_var, have_fsspec, abs_container +from zarr.tests.util import skip_test_env_var, have_fsspec, abs_container, mktemp _VERSIONS = ((2, 3) if v3_api_available else (2, )) @@ -1331,7 +1331,7 @@ class TestGroupWithZipStore(TestGroup): @staticmethod def create_store(): - path = tempfile.mktemp(suffix='.zip') + path = mktemp(suffix='.zip') atexit.register(os.remove, path) store = ZipStore(path) return store, None @@ -1359,7 +1359,7 @@ class TestGroupV3WithZipStore(TestGroupWithZipStore, TestGroupV3): @staticmethod def create_store(): - path = tempfile.mktemp(suffix='.zip') + path = mktemp(suffix='.zip') atexit.register(os.remove, path) store = ZipStoreV3(path) return store, None @@ -1369,7 +1369,7 @@ class TestGroupWithDBMStore(TestGroup): @staticmethod def create_store(): - path = tempfile.mktemp(suffix='.anydbm') + path = mktemp(suffix='.anydbm') atexit.register(atexit_rmglob, path + '*') store = DBMStore(path, flag='n') return store, None @@ -1380,7 +1380,7 @@ class TestGroupV3WithDBMStore(TestGroupWithDBMStore, TestGroupV3): @staticmethod def create_store(): - path = tempfile.mktemp(suffix='.anydbm') + path = mktemp(suffix='.anydbm') atexit.register(atexit_rmglob, path + '*') store = DBMStoreV3(path, flag='n') return store, None @@ -1391,7 +1391,7 @@ class TestGroupWithDBMStoreBerkeleyDB(TestGroup): @staticmethod def create_store(): bsddb3 = pytest.importorskip("bsddb3") - path = tempfile.mktemp(suffix='.dbm') + path = mktemp(suffix='.dbm') atexit.register(os.remove, path) store = DBMStore(path, flag='n', open=bsddb3.btopen) return store, None @@ -1403,7 +1403,7 @@ class TestGroupV3WithDBMStoreBerkeleyDB(TestGroupWithDBMStoreBerkeleyDB, TestGro @staticmethod def create_store(): bsddb3 = pytest.importorskip("bsddb3") - path = tempfile.mktemp(suffix='.dbm') + path = mktemp(suffix='.dbm') atexit.register(os.remove, path) store = DBMStoreV3(path, flag='n', open=bsddb3.btopen) return store, None @@ -1414,7 +1414,7 @@ class TestGroupWithLMDBStore(TestGroup): @staticmethod def create_store(): pytest.importorskip("lmdb") - path = tempfile.mktemp(suffix='.lmdb') + path = mktemp(suffix='.lmdb') atexit.register(atexit_rmtree, path) store = LMDBStore(path) return store, None @@ -1426,7 +1426,7 @@ class TestGroupV3WithLMDBStore(TestGroupWithLMDBStore, TestGroupV3): @staticmethod def create_store(): pytest.importorskip("lmdb") - path = tempfile.mktemp(suffix='.lmdb') + path = mktemp(suffix='.lmdb') atexit.register(atexit_rmtree, path) store = LMDBStoreV3(path) return store, None @@ -1436,7 +1436,7 @@ class TestGroupWithSQLiteStore(TestGroup): def create_store(self): pytest.importorskip("sqlite3") - path = tempfile.mktemp(suffix='.db') + path = mktemp(suffix='.db') atexit.register(atexit_rmtree, path) store = SQLiteStore(path) return store, None @@ -1447,7 +1447,7 @@ class TestGroupV3WithSQLiteStore(TestGroupWithSQLiteStore, TestGroupV3): def create_store(self): pytest.importorskip("sqlite3") - path = tempfile.mktemp(suffix='.db') + path = mktemp(suffix='.db') atexit.register(atexit_rmtree, path) store = SQLiteStoreV3(path) return store, None diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 056dd8b637..d0f518dd05 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -36,7 +36,7 @@ meta_root, normalize_store_arg) from zarr.storage import FSStore, rename, listdir from zarr._storage.v3 import KVStoreV3 -from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container +from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp from zarr.util import json_dumps @@ -1772,7 +1772,7 @@ class TestZipStore(StoreTests): ZipStoreClass = ZipStore def create_store(self, **kwargs): - path = tempfile.mktemp(suffix='.zip') + path = mktemp(suffix='.zip') atexit.register(os.remove, path) store = ZipStore(path, mode='w', **kwargs) return store @@ -1853,7 +1853,7 @@ def test_store_and_retrieve_ndarray(self): class TestDBMStore(StoreTests): def create_store(self, dimension_separator=None): - path = tempfile.mktemp(suffix='.anydbm') + path = mktemp(suffix='.anydbm') atexit.register(atexit_rmglob, path + '*') # create store using default dbm implementation store = DBMStore(path, flag='n', dimension_separator=dimension_separator) @@ -1869,7 +1869,7 @@ def test_context_manager(self): class TestDBMStoreDumb(TestDBMStore): def create_store(self, **kwargs): - path = tempfile.mktemp(suffix='.dumbdbm') + path = mktemp(suffix='.dumbdbm') atexit.register(atexit_rmglob, path + '*') import dbm.dumb as dumbdbm @@ -1881,7 +1881,7 @@ class TestDBMStoreGnu(TestDBMStore): def create_store(self, **kwargs): gdbm = pytest.importorskip("dbm.gnu") - path = tempfile.mktemp(suffix=".gdbm") # pragma: no cover + path = mktemp(suffix=".gdbm") # pragma: no cover atexit.register(os.remove, path) # pragma: no cover store = DBMStore( path, flag="n", open=gdbm.open, write_lock=False, **kwargs @@ -1893,7 +1893,7 @@ class TestDBMStoreNDBM(TestDBMStore): def create_store(self, **kwargs): ndbm = pytest.importorskip("dbm.ndbm") - path = tempfile.mktemp(suffix=".ndbm") # pragma: no cover + path = mktemp(suffix=".ndbm") # pragma: no cover atexit.register(atexit_rmglob, path + "*") # pragma: no cover store = DBMStore(path, flag="n", open=ndbm.open, **kwargs) # pragma: no cover return store # pragma: no cover @@ -1903,7 +1903,7 @@ class TestDBMStoreBerkeleyDB(TestDBMStore): def create_store(self, **kwargs): bsddb3 = pytest.importorskip("bsddb3") - path = tempfile.mktemp(suffix='.dbm') + path = mktemp(suffix='.dbm') atexit.register(os.remove, path) store = DBMStore(path, flag='n', open=bsddb3.btopen, write_lock=False, **kwargs) return store @@ -1913,7 +1913,7 @@ class TestLMDBStore(StoreTests): def create_store(self, **kwargs): pytest.importorskip("lmdb") - path = tempfile.mktemp(suffix='.lmdb') + path = mktemp(suffix='.lmdb') atexit.register(atexit_rmtree, path) buffers = True store = LMDBStore(path, buffers=buffers, **kwargs) @@ -1930,13 +1930,13 @@ class TestSQLiteStore(StoreTests): def create_store(self, **kwargs): pytest.importorskip("sqlite3") - path = tempfile.mktemp(suffix='.db') + path = mktemp(suffix='.db') atexit.register(atexit_rmtree, path) store = SQLiteStore(path, **kwargs) return store def test_underscore_in_name(self): - path = tempfile.mktemp(suffix='.db') + path = mktemp(suffix='.db') atexit.register(atexit_rmtree, path) store = SQLiteStore(path) store['a'] = b'aaa' diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index a33f274621..13b5011676 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -18,7 +18,7 @@ LMDBStoreV3, LRUStoreCacheV3, MemoryStoreV3, MongoDBStoreV3, RedisStoreV3, SQLiteStoreV3, StoreV3, ZipStoreV3) -from zarr.tests.util import CountingDictV3, have_fsspec, skip_test_env_var +from zarr.tests.util import CountingDictV3, have_fsspec, skip_test_env_var, mktemp # pytest will fail to run if the following fixtures aren't imported here from .test_storage import StoreTests as _StoreTests @@ -330,7 +330,7 @@ class TestZipStoreV3(_TestZipStore, StoreV3Tests): ZipStoreClass = ZipStoreV3 def create_store(self, **kwargs): - path = tempfile.mktemp(suffix='.zip') + path = mktemp(suffix='.zip') atexit.register(os.remove, path) store = ZipStoreV3(path, mode='w', **kwargs) return store @@ -339,7 +339,7 @@ def create_store(self, **kwargs): class TestDBMStoreV3(_TestDBMStore, StoreV3Tests): def create_store(self, dimension_separator=None): - path = tempfile.mktemp(suffix='.anydbm') + path = mktemp(suffix='.anydbm') atexit.register(atexit_rmglob, path + '*') # create store using default dbm implementation store = DBMStoreV3(path, flag='n', dimension_separator=dimension_separator) @@ -349,7 +349,7 @@ def create_store(self, dimension_separator=None): class TestDBMStoreV3Dumb(_TestDBMStoreDumb, StoreV3Tests): def create_store(self, **kwargs): - path = tempfile.mktemp(suffix='.dumbdbm') + path = mktemp(suffix='.dumbdbm') atexit.register(atexit_rmglob, path + '*') import dbm.dumb as dumbdbm @@ -361,7 +361,7 @@ class TestDBMStoreV3Gnu(_TestDBMStoreGnu, StoreV3Tests): def create_store(self, **kwargs): gdbm = pytest.importorskip("dbm.gnu") - path = tempfile.mktemp(suffix=".gdbm") # pragma: no cover + path = mktemp(suffix=".gdbm") # pragma: no cover atexit.register(os.remove, path) # pragma: no cover store = DBMStoreV3( path, flag="n", open=gdbm.open, write_lock=False, **kwargs @@ -373,7 +373,7 @@ class TestDBMStoreV3NDBM(_TestDBMStoreNDBM, StoreV3Tests): def create_store(self, **kwargs): ndbm = pytest.importorskip("dbm.ndbm") - path = tempfile.mktemp(suffix=".ndbm") # pragma: no cover + path = mktemp(suffix=".ndbm") # pragma: no cover atexit.register(atexit_rmglob, path + "*") # pragma: no cover store = DBMStoreV3(path, flag="n", open=ndbm.open, **kwargs) # pragma: no cover return store # pragma: no cover @@ -383,7 +383,7 @@ class TestDBMStoreV3BerkeleyDB(_TestDBMStoreBerkeleyDB, StoreV3Tests): def create_store(self, **kwargs): bsddb3 = pytest.importorskip("bsddb3") - path = tempfile.mktemp(suffix='.dbm') + path = mktemp(suffix='.dbm') atexit.register(os.remove, path) store = DBMStoreV3(path, flag='n', open=bsddb3.btopen, write_lock=False, **kwargs) return store @@ -393,7 +393,7 @@ class TestLMDBStoreV3(_TestLMDBStore, StoreV3Tests): def create_store(self, **kwargs): pytest.importorskip("lmdb") - path = tempfile.mktemp(suffix='.lmdb') + path = mktemp(suffix='.lmdb') atexit.register(atexit_rmtree, path) buffers = True store = LMDBStoreV3(path, buffers=buffers, **kwargs) @@ -404,7 +404,7 @@ class TestSQLiteStoreV3(_TestSQLiteStore, StoreV3Tests): def create_store(self, **kwargs): pytest.importorskip("sqlite3") - path = tempfile.mktemp(suffix='.db') + path = mktemp(suffix='.db') atexit.register(atexit_rmtree, path) store = SQLiteStoreV3(path, **kwargs) return store diff --git a/zarr/tests/util.py b/zarr/tests/util.py index 912f2f5361..faa2f35d25 100644 --- a/zarr/tests/util.py +++ b/zarr/tests/util.py @@ -1,5 +1,6 @@ import collections import os +import tempfile from zarr.storage import Store from zarr._storage.v3 import StoreV3 @@ -80,3 +81,9 @@ def abs_container(): container_client = blob_service_client.get_container_client("test") return container_client + + +def mktemp(**kwargs): + f = tempfile.NamedTemporaryFile(**kwargs) + f.close() + return f.name From bc13b7f93a353d936c196053757ec05ab282cf0b Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Thu, 22 Sep 2022 11:38:12 +0200 Subject: [PATCH 0185/1078] Prepare 2.13.0 release (#1133) * Prepare 2.13.0 release * Update release instructions for pre-releases * Add #1142 for Greg's work * Make python-version match for pre-commits --- .github/workflows/Pre-commit-hooks.yml | 2 ++ .pre-commit-config.yaml | 4 ++-- docs/contributing.rst | 18 +++++++++++++----- docs/release.rst | 8 +++++--- 4 files changed, 22 insertions(+), 10 deletions(-) diff --git a/.github/workflows/Pre-commit-hooks.yml b/.github/workflows/Pre-commit-hooks.yml index 7955cb239a..0e51c184fb 100644 --- a/.github/workflows/Pre-commit-hooks.yml +++ b/.github/workflows/Pre-commit-hooks.yml @@ -23,6 +23,8 @@ jobs: - uses: actions/checkout@v3 #setting up Python v3.0.0 - uses: actions/setup-python@v3.0.0 + with: + python-version: '3.9' #using pre-commit latest i.e v2.0.3 - uses: pre-commit/action@v2.0.3 #Running pre-commit for all files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 69828ad50d..70ead97d0b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ default_stages: [commit, push] default_language_version: - python: python3.8 + python: python3.9 repos: - repo: https://github.com/PyCQA/flake8 rev: 3.8.2 @@ -14,4 +14,4 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.3.0 hooks: - - id: check-yaml \ No newline at end of file + - id: check-yaml diff --git a/docs/contributing.rst b/docs/contributing.rst index f7b4831089..0b3c56bc01 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -344,14 +344,22 @@ included in the release have been properly documented in To make a new release, go to https://github.com/zarr-developers/zarr-python/releases and click "Draft a new release". Choose a version number prefixed -with a `v` (e.g. `v0.0.0`) and set the description to: +with a `v` (e.g. `v0.0.0`). For pre-releases, include the +appropriate suffix (e.g. `v0.0.0a1` or `v0.0.0rc2`). -``` -See release notes https://zarr.readthedocs.io/en/stable/release.html#release-0-0-0 -``` + +Set the description of the release to:: + + See release notes https://zarr.readthedocs.io/en/stable/release.html#release-0-0-0 replacing the correct version numbers. For pre-release versions, the URL should omit the pre-release suffix, e.g. "a1" or "rc1". -Be sure to review and merge the https://github.com/conda-forge/zarr-feedstock +After creating the release, the documentation will be built on +https://readthedocs.io. Full releases will be available under +`/stable `_ while +pre-releases will be available under +`/latest `_. + +Also review and merge the https://github.com/conda-forge/zarr-feedstock pull request that will be automatically generated. diff --git a/docs/release.rst b/docs/release.rst index b79341df52..e963b5b509 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -15,8 +15,10 @@ Release notes 2.13.0 ------ -.. warning:: - Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. + +.. + # .. warning:: + # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. Major changes ~~~~~~~~~~~~~ @@ -35,7 +37,7 @@ Major changes * **Zarr v3: add support for the default root path** rather than requiring that all API users pass an explicit path. - By :user:`Gregory R. Lee ` :issue:`1085`. + By :user:`Gregory R. Lee ` :issue:`1085`, :issue:`1142`. Bug fixes From 814017ddc7aa8255d9343246288104a9ade05f07 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Mon, 26 Sep 2022 14:31:23 +0200 Subject: [PATCH 0186/1078] 2.13.1: generate fixture/meta in conda tests (#1143) * 2.13.1: generate fixture/meta in conda tests Following the pattern from #824, this generates the fixture test if it does not exist so that tests will pass on conda-forge. see: https://github.com/conda-forge/zarr-feedstock/pull/65 * Fix rst link * Fix rst link again * Add pragma: no cover --- docs/release.rst | 16 ++++++++++++---- fixture/meta/.zarray | 2 +- zarr/tests/test_storage.py | 6 ++++++ 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index e963b5b509..8488aa01ed 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -10,16 +10,24 @@ Release notes Unreleased ---------- +.. + # .. warning:: + # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. + +.. _release_2.13.1: + +2.13.1 +------ + +* Fix test failure on conda-forge builds. + By :user:`Josh Moore `; see + `zarr-feedstock#65 `_. .. _release_2.13.0: 2.13.0 ------ -.. - # .. warning:: - # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. - Major changes ~~~~~~~~~~~~~ diff --git a/fixture/meta/.zarray b/fixture/meta/.zarray index f265bb0674..d1acce7665 100644 --- a/fixture/meta/.zarray +++ b/fixture/meta/.zarray @@ -20,4 +20,4 @@ 2 ], "zarr_format": 2 -} +} \ No newline at end of file diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index d0f518dd05..d61216927c 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -2563,5 +2563,11 @@ def test_normalize_store_arg(tmpdir): def test_meta_prefix_6853(): + meta = pathlib.Path(zarr.__file__).resolve().parent.parent / "fixture" / "meta" + if not meta.exists(): # pragma: no cover + s = DirectoryStore(str(meta), dimension_separator=".") + a = zarr.open(store=s, mode="w", shape=(2, 2), dtype=" Date: Tue, 27 Sep 2022 11:30:25 +0200 Subject: [PATCH 0187/1078] Bump pytest-doctestplus from 0.12.0 to 0.12.1 (#1145) Bumps [pytest-doctestplus](https://github.com/astropy/pytest-doctestplus) from 0.12.0 to 0.12.1. - [Release notes](https://github.com/astropy/pytest-doctestplus/releases) - [Changelog](https://github.com/astropy/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/astropy/pytest-doctestplus/compare/v0.12.0...v0.12.1) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 9105c14e19..38cbf23503 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -16,7 +16,7 @@ tox==3.26.0 coverage flake8==5.0.4 pytest-cov==3.0.0 -pytest-doctestplus==0.12.0 +pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 h5py==3.7.0 fsspec==2022.8.2 From 61af678bd86ce62f9d1b3c4f922e28bc60e47f7e Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Tue, 27 Sep 2022 12:34:23 +0200 Subject: [PATCH 0188/1078] 2.13.2: another fix for the conda-forge tests (#1146) See description in https://github.com/zarr-developers/zarr-python/pull/1143 Testing locally, however, does not suffice. In this case, the relative path in the test worked for me, but doesn't work on conda-forge. The only reliable way to be sure is to run `./build-locally.py` in the zarr-feedstock **against pre-tagged code**. --- docs/release.rst | 9 +++++++++ zarr/tests/test_storage.py | 7 ++++--- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 8488aa01ed..2d5c3305cc 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -14,6 +14,15 @@ Release notes # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +.. _release_2.13.2: + +2.13.2 +------ + +* Fix test failure on conda-forge builds (again). + By :user:`Josh Moore `; see + `zarr-feedstock#65 `_. + .. _release_2.13.1: 2.13.1 diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index d61216927c..39d4b5988d 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -2563,11 +2563,12 @@ def test_normalize_store_arg(tmpdir): def test_meta_prefix_6853(): - meta = pathlib.Path(zarr.__file__).resolve().parent.parent / "fixture" / "meta" + fixture = pathlib.Path(zarr.__file__).resolve().parent.parent / "fixture" + meta = fixture / "meta" if not meta.exists(): # pragma: no cover s = DirectoryStore(str(meta), dimension_separator=".") a = zarr.open(store=s, mode="w", shape=(2, 2), dtype=" Date: Fri, 30 Sep 2022 10:04:49 +0200 Subject: [PATCH 0189/1078] Bump pytest-cov from 3.0.0 to 4.0.0 (#1151) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 3.0.0 to 4.0.0. - [Release notes](https://github.com/pytest-dev/pytest-cov/releases) - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v3.0.0...v4.0.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 38cbf23503..1022e56ec5 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -15,7 +15,7 @@ pymongo==4.2.0 tox==3.26.0 coverage flake8==5.0.4 -pytest-cov==3.0.0 +pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 h5py==3.7.0 From d6e35a5641c66bcc668ee1a666086ea04837b9db Mon Sep 17 00:00:00 2001 From: Richard Shaw Date: Fri, 7 Oct 2022 08:32:52 -0700 Subject: [PATCH 0190/1078] Omit chunks with no elements in slice selection with step (#1154) * Omit chunks with no elements in slice selection with step This stops chunks being read unnecessarily when a slice selection with a step was used. Previously all chunks spanning the start-end range would be used regardless of whether they contained any elements. Fixes #843. * Test that only the required chunks are accessed during basic selections This tests that only the expected set of chunks are accessed during basic slice selection operations for both reads and writes to an array. * Update release notes. * Fix typos in release notes. --- docs/release.rst | 9 +++++ zarr/indexing.py | 5 +++ zarr/tests/test_indexing.py | 74 +++++++++++++++++++++++++++++++++++++ 3 files changed, 88 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 2d5c3305cc..7a172a9839 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -14,6 +14,15 @@ Release notes # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +.. _release_2.13.3: + +2.13.3 +------ + +* Improve performance of slice selections with steps by omitting chunks with no relevant + data. + By :user:`Richard Shaw ` :issue:`843`. + .. _release_2.13.2: 2.13.2 diff --git a/zarr/indexing.py b/zarr/indexing.py index 74b53af049..268b487105 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -216,6 +216,11 @@ def __iter__(self): dim_chunk_sel = slice(dim_chunk_sel_start, dim_chunk_sel_stop, self.step) dim_chunk_nitems = ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) + + # If there are no elements on the selection within this chunk, then skip + if dim_chunk_nitems == 0: + continue + dim_out_sel = slice(dim_out_offset, dim_out_offset + dim_chunk_nitems) yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index 524d335c9f..5c4c580636 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -13,6 +13,8 @@ PartialChunkIterator, ) +from zarr.tests.util import CountingDict + def test_normalize_integer_selection(): @@ -1451,3 +1453,75 @@ def test_numpy_int_indexing(): z[:] = a assert a[42] == z[42] assert a[numpy.int64(42)] == z[numpy.int64(42)] + + +@pytest.mark.parametrize( + "shape, chunks, ops", + [ + # 1D test cases + ((1070,), (50,), [("__getitem__", (slice(200, 400),))]), + ((1070,), (50,), [("__getitem__", (slice(200, 400, 100),))]), + ((1070,), (50,), [ + ("__getitem__", (slice(200, 400),)), + ("__setitem__", (slice(200, 400, 100),)), + ]), + + # 2D test cases + ((40, 50), (5, 8), [ + ("__getitem__", (slice(6, 37, 13), (slice(4, 10)))), + ("__setitem__", (slice(None), (slice(None)))), + ]), + ] +) +def test_accessed_chunks(shape, chunks, ops): + # Test that only the required chunks are accessed during basic selection operations + # shape: array shape + # chunks: chunk size + # ops: list of tuples with (optype, tuple of slices) + # optype = "__getitem__" or "__setitem__", tuple length must match number of dims + import itertools + + # Use a counting dict as the backing store so we can track the items access + store = CountingDict() + z = zarr.create(shape=shape, chunks=chunks, store=store) + + for ii, (optype, slices) in enumerate(ops): + + # Resolve the slices into the accessed chunks for each dimension + chunks_per_dim = [] + for N, C, sl in zip(shape, chunks, slices): + chunk_ind = np.arange(N, dtype=int)[sl] // C + chunks_per_dim.append(np.unique(chunk_ind)) + + # Combine and generate the cartesian product to determine the chunks keys that + # will be accessed + chunks_accessed = [] + for comb in itertools.product(*chunks_per_dim): + chunks_accessed.append(".".join([str(ci) for ci in comb])) + + counts_before = store.counter.copy() + + # Perform the operation + if optype == "__getitem__": + z[slices] + else: + z[slices] = ii + + # Get the change in counts + delta_counts = store.counter - counts_before + + # Check that the access counts for the operation have increased by one for all + # the chunks we expect to be included + for ci in chunks_accessed: + assert delta_counts.pop((optype, ci)) == 1 + + # If the chunk was partially written to it will also have been read once. We + # don't determine if the chunk was actually partial here, just that the + # counts are consistent that this might have happened + if optype == "__setitem__": + assert ( + ("__getitem__", ci) not in delta_counts or + delta_counts.pop(("__getitem__", ci)) == 1 + ) + # Check that no other chunks were accessed + assert len(delta_counts) == 0 From d3a817c018e443ef1f5db07f8772b9742cf3b356 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 9 Oct 2022 12:02:44 +0200 Subject: [PATCH 0191/1078] Update GitHub Actions (#1134) Co-authored-by: Saransh Chopra Co-authored-by: Saransh Chopra Co-authored-by: jakirkham --- .github/dependabot.yml | 4 ++++ .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/releases.yml | 11 ++++++----- .github/workflows/windows-testing.yml | 2 +- 5 files changed, 13 insertions(+), 8 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2db1489656..d8e8d4d57a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,3 +5,7 @@ updates: directory: "/" schedule: interval: "daily" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index fc9c048af7..a256a53b60 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Setup Miniconda uses: conda-incubator/setup-miniconda@master with: diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index c62127b280..5ea198915c 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -36,7 +36,7 @@ jobs: ports: - 27017:27017 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Setup Miniconda diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index d97320ff8a..f3f024c71f 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -11,11 +11,12 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 with: submodules: true + fetch-depth: 0 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 name: Install Python with: python-version: '3.8' @@ -35,7 +36,7 @@ jobs: else echo "All seem good" fi - - uses: actions/upload-artifact@v1 + - uses: actions/upload-artifact@v3 with: name: releases path: dist @@ -44,7 +45,7 @@ jobs: needs: [build_artifacts] runs-on: ubuntu-latest steps: - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v3 with: name: releases path: dist @@ -59,7 +60,7 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') steps: - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v3 with: name: releases path: dist diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 791506c7bd..af8bae8cf1 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -18,7 +18,7 @@ jobs: matrix: python-version: ['3.8', '3.9', '3.10'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - uses: conda-incubator/setup-miniconda@v2 From c24750434a42353e1bb052d49c7900712dd83dc2 Mon Sep 17 00:00:00 2001 From: AWA BRANDON AWA <51425873+DON-BRAN@users.noreply.github.com> Date: Tue, 11 Oct 2022 14:56:37 +0100 Subject: [PATCH 0192/1078] updated _Forking_the_repository_ section in contributing page (#1171) --- docs/contributing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 0b3c56bc01..cfb24b50af 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -75,8 +75,8 @@ The Zarr source code is hosted on GitHub at the following location: You will need your own fork to work on the code. Go to the link above and hit the "Fork" button. Then clone your fork to your local machine:: - $ git clone git@github.com:your-user-name/zarr.git - $ cd zarr + $ git clone git@github.com:your-user-name/zarr-python.git + $ cd zarr-python $ git remote add upstream git@github.com:zarr-developers/zarr-python.git Creating a development environment From 6d4e0a6fdb6bdd2892a6dd680c3b15caee0aaa69 Mon Sep 17 00:00:00 2001 From: Emmanuel Bolarinwa Date: Tue, 11 Oct 2022 14:59:10 +0100 Subject: [PATCH 0193/1078] adjustment of year in Docs (#1165) --- LICENSE | 2 +- docs/conf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE b/LICENSE index 22c4904c4a..d672a4f670 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2015-2018 Zarr Developers +Copyright (c) 2015-2022 Zarr Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/docs/conf.py b/docs/conf.py index 733ac60801..be9e4bb574 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -67,7 +67,7 @@ # General information about the project. project = 'zarr' -copyright = '2018, Zarr Developers' +copyright = '2022, Zarr Developers' author = 'Zarr Developers' version = zarr.__version__ From da8af2b042065d5a09a7a82ecfcd707090015022 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Oct 2022 16:02:04 +0200 Subject: [PATCH 0194/1078] Bump actions/setup-python from 3.0.0 to 4.2.0 (#1162) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 3.0.0 to 4.2.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v3...v4.2.0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/Pre-commit-hooks.yml | 2 +- .github/workflows/releases.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Pre-commit-hooks.yml b/.github/workflows/Pre-commit-hooks.yml index 0e51c184fb..6873f85bf5 100644 --- a/.github/workflows/Pre-commit-hooks.yml +++ b/.github/workflows/Pre-commit-hooks.yml @@ -22,7 +22,7 @@ jobs: steps: - uses: actions/checkout@v3 #setting up Python v3.0.0 - - uses: actions/setup-python@v3.0.0 + - uses: actions/setup-python@v4.2.0 with: python-version: '3.9' #using pre-commit latest i.e v2.0.3 diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index f3f024c71f..ff004b7229 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v4.2.0 name: Install Python with: python-version: '3.8' From 53357f71b1da72b9941c502baf317f992e90d3af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Oct 2022 16:02:27 +0200 Subject: [PATCH 0195/1078] Bump codecov/codecov-action from 1 to 3 (#1160) Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 1 to 3. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v1...v3) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 5ea198915c..2eeede91fe 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -75,7 +75,7 @@ jobs: mkdir ~/blob_emulator azurite -l ~/blob_emulator --debug debug.log 2>&1 > stdouterr.log & pytest --cov=zarr --cov-config=.coveragerc --doctest-plus --cov-report xml --cov=./ --timeout=300 - - uses: codecov/codecov-action@v1 + - uses: codecov/codecov-action@v3 with: #token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos #files: ./coverage1.xml,./coverage2.xml # optional From 748d1762c7a79b2ab7f12d6dd4709eaf78a83d0c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Oct 2022 16:48:25 +0200 Subject: [PATCH 0196/1078] Bump pre-commit/action from 2.0.3 to 3.0.0 (#1159) Bumps [pre-commit/action](https://github.com/pre-commit/action) from 2.0.3 to 3.0.0. - [Release notes](https://github.com/pre-commit/action/releases) - [Commits](https://github.com/pre-commit/action/compare/v2.0.3...v3.0.0) --- updated-dependencies: - dependency-name: pre-commit/action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/Pre-commit-hooks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/Pre-commit-hooks.yml b/.github/workflows/Pre-commit-hooks.yml index 6873f85bf5..e219b75e69 100644 --- a/.github/workflows/Pre-commit-hooks.yml +++ b/.github/workflows/Pre-commit-hooks.yml @@ -26,7 +26,7 @@ jobs: with: python-version: '3.9' #using pre-commit latest i.e v2.0.3 - - uses: pre-commit/action@v2.0.3 + - uses: pre-commit/action@v3.0.0 #Running pre-commit for all files - name: Pre-Commit-Run run: | From 6ef11d3ff37a394ceedc10ad63ddfe71d5c0d3bb Mon Sep 17 00:00:00 2001 From: AWA BRANDON AWA <51425873+DON-BRAN@users.noreply.github.com> Date: Tue, 11 Oct 2022 15:49:20 +0100 Subject: [PATCH 0197/1078] fix #1167: Added installation.rst to docs (#1170) * Added installation.rst to docs * Removed _installation_ section from index.rst --- docs/index.rst | 33 +-------------------------------- docs/installation.rst | 31 +++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 32 deletions(-) create mode 100644 docs/installation.rst diff --git a/docs/index.rst b/docs/index.rst index f4afc9fd85..007c8d010f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -25,44 +25,13 @@ Zarr is still a young project. Feedback and bug reports are very welcome, please the `GitHub issue tracker `_. See :doc:`contributing` for further information about contributing to Zarr. -Installation ------------- - -Zarr depends on NumPy. It is generally best to `install NumPy -`_ first using whatever method is most -appropriate for you operating system and Python distribution. Other dependencies should be -installed automatically if using one of the installation methods below. - -Install Zarr from PyPI:: - - $ pip install zarr - -Alternatively, install Zarr via conda:: - - $ conda install -c conda-forge zarr - -To install the latest development version of Zarr, you can use pip with the -latest GitHub main:: - - $ pip install git+https://github.com/zarr-developers/zarr-python.git - -To work with Zarr source code in development, install from GitHub:: - - $ git clone --recursive https://github.com/zarr-developers/zarr-python.git - $ cd zarr-python - $ python setup.py install - -To verify that Zarr has been fully installed, run the test suite:: - - $ pip install pytest - $ python -m pytest -v --pyargs zarr - Contents -------- .. toctree:: :maxdepth: 2 + installation tutorial api spec diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 0000000000..47c1dee269 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,31 @@ +Installation +============ + +Zarr depends on NumPy. It is generally best to `install NumPy +`_ first using whatever method is most +appropriate for you operating system and Python distribution. Other dependencies should be +installed automatically if using one of the installation methods below. + +Install Zarr from PyPI:: + + $ pip install zarr + +Alternatively, install Zarr via conda:: + + $ conda install -c conda-forge zarr + +To install the latest development version of Zarr, you can use pip with the +latest GitHub main:: + + $ pip install git+https://github.com/zarr-developers/zarr-python.git + +To work with Zarr source code in development, install from GitHub:: + + $ git clone --recursive https://github.com/zarr-developers/zarr-python.git + $ cd zarr-python + $ python setup.py install + +To verify that Zarr has been fully installed, run the test suite:: + + $ pip install pytest + $ python -m pytest -v --pyargs zarr \ No newline at end of file From c0e10af608c7d66f0229cb102d454b601261fa22 Mon Sep 17 00:00:00 2001 From: AWA BRANDON AWA <51425873+DON-BRAN@users.noreply.github.com> Date: Tue, 11 Oct 2022 18:25:19 +0100 Subject: [PATCH 0198/1078] set docs confy.py language = 'en' (#1174) --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index be9e4bb574..21e2e89880 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -79,7 +79,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: From 0935f9582e1226c283861ef7252ee6ad07abd843 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Tue, 11 Oct 2022 22:30:30 +0200 Subject: [PATCH 0199/1078] Refresh of the main landing page (#1173) * Refresh of the main landing page * Update contributors * Update links * Drop "Status" section * cleanup contributors list * Move acknowledgements to the bottom * limit toctree for releases * limit toctree for all * re-expand toctree at the bottom * Add v3 spec page * Use existing spec toctree * list numfocus * due to length move release to the bottom --- docs/index.rst | 145 +++++++++++++++++++++++++++++++++-------------- docs/spec.rst | 3 +- docs/spec/v3.rst | 7 +++ 3 files changed, 113 insertions(+), 42 deletions(-) create mode 100644 docs/spec/v3.rst diff --git a/docs/index.rst b/docs/index.rst index 007c8d010f..6b41ac651d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -4,45 +4,41 @@ Zarr ==== -Zarr is a format for the storage of chunked, compressed, N-dimensional arrays. -These documents describe the Zarr format and its Python implementation. +Zarr is a format for the storage of chunked, compressed, N-dimensional arrays +inspired by `HDF5 `_, `h5py +`_ and `bcolz `_. + +The project is fiscally sponsored by `NumFOCUS `_, a US +501(c)(3) public charity, and development is supported by the +`MRC Centre for Genomics and Global Health `_ +and the `Chan Zuckerberg Initiative `_. + +These documents describe the Zarr Python implementation. More information +about the Zarr format can be found on the `main website `_. Highlights ---------- -* Create N-dimensional arrays with any NumPy dtype. -* Chunk arrays along any dimension. -* Compress and/or filter chunks using any NumCodecs_ codec. -* Store arrays in memory, on disk, inside a Zip file, on S3, ... -* Read an array concurrently from multiple threads or processes. -* Write to an array concurrently from multiple threads or processes. -* Organize arrays into hierarchies via groups. + * Create N-dimensional arrays with any NumPy dtype. + * Chunk arrays along any dimension. + * Compress and/or filter chunks using any NumCodecs_ codec. + * Store arrays in memory, on disk, inside a Zip file, on S3, ... + * Read an array concurrently from multiple threads or processes. + * Write to an array concurrently from multiple threads or processes. + * Organize arrays into hierarchies via groups. -Status ------- +Contributing +------------ -Zarr is still a young project. Feedback and bug reports are very welcome, please get in touch via +Feedback and bug reports are very welcome, please get in touch via the `GitHub issue tracker `_. See :doc:`contributing` for further information about contributing to Zarr. -Contents --------- - -.. toctree:: - :maxdepth: 2 - - installation - tutorial - api - spec - release - contributing - Projects using Zarr ------------------- If you are using Zarr, we would `love to hear about it -`_. +`_. Acknowledgments --------------- @@ -50,22 +46,89 @@ Acknowledgments The following people have contributed to the development of Zarr by contributing code, documentation, code reviews, comments and/or ideas: -* :user:`Francesc Alted ` -* :user:`Martin Durant ` -* :user:`Stephan Hoyer ` -* :user:`John Kirkham ` -* :user:`Alistair Miles ` -* :user:`Mamy Ratsimbazafy ` -* :user:`Matthew Rocklin ` -* :user:`Vincent Schut ` -* :user:`Anthony Scopatz ` -* :user:`Prakhar Goel ` - -Zarr is inspired by `HDF5 `_, `h5py -`_ and `bcolz `_. +:user:`Alistair Miles ` +:user:`Altay Sansal ` +:user:`Anderson Banihirwe ` +:user:`Andrew Fulton ` +:user:`Andrew Thomas ` +:user:`Anthony Scopatz ` +:user:`Attila Bergou ` +:user:`BGCMHou ` +:user:`Ben Jeffery ` +:user:`Ben Williams ` +:user:`Boaz Mohar ` +:user:`Charles Noyes ` +:user:`Chris Barnes ` +:user:`David Baddeley ` +:user:`Davis Bennett ` +:user:`Dimitri Papadopoulos Orfanos ` +:user:`Eduardo Gonzalez ` +:user:`Elliott Sales de Andrade ` +:user:`Eric Prestat ` +:user:`Eric Younkin ` +:user:`Francesc Alted ` +:user:`Greggory Lee ` +:user:`Gregory R. Lee ` +:user:`Ian Hunt-Isaak ` +:user:`James Bourbeau ` +:user:`Jan Funke ` +:user:`Jerome Kelleher ` +:user:`Joe Hamman ` +:user:`Joe Jevnik ` +:user:`John Kirkham ` +:user:`Josh Moore ` +:user:`Juan Nunez-Iglesias ` +:user:`Justin Swaney ` +:user:`Mads R. B. Kristensen ` +:user:`Mamy Ratsimbazafy ` +:user:`Martin Durant ` +:user:`Matthew Rocklin ` +:user:`Matthias Bussonnier ` +:user:`Mattia Almansi ` +:user:`Noah D Brenowitz ` +:user:`Oren Watson ` +:user:`Pavithra Eswaramoorthy ` +:user:`Poruri Sai Rahul ` +:user:`Prakhar Goel ` +:user:`Raphael Dussin ` +:user:`Ray Bell ` +:user:`Richard Scott ` +:user:`Richard Shaw ` +:user:`Ryan Abernathey ` +:user:`Ryan Williams ` +:user:`Saransh Chopra ` +:user:`Sebastian Grill ` +:user:`Shikhar Goenka ` +:user:`Shivank Chaudhary ` +:user:`Stephan Hoyer ` +:user:`Stephan Saalfeld ` +:user:`Tarik Onalan ` +:user:`Tim Crone ` +:user:`Tobias Kölling ` +:user:`Tom Augspurger ` +:user:`Tom White ` +:user:`Tommy Tran ` +:user:`Trevor Manz ` +:user:`Vincent Schut ` +:user:`Vyas Ramasubramani ` +:user:`Zain Patel ` +:user:`gsakkis` +:user:`hailiangzhang ` +:user:`pmav99 ` +:user:`sbalmer ` + +Contents +-------- + +.. toctree:: + :maxdepth: 2 -Development of Zarr is supported by the -`MRC Centre for Genomics and Global Health `_. + installation + tutorial + api + spec + contributing + release Indices and tables ------------------ diff --git a/docs/spec.rst b/docs/spec.rst index 765dcd782a..3c06f3228d 100644 --- a/docs/spec.rst +++ b/docs/spec.rst @@ -6,5 +6,6 @@ Specifications .. toctree:: :maxdepth: 3 - spec/v1 + spec/v3 spec/v2 + spec/v1 diff --git a/docs/spec/v3.rst b/docs/spec/v3.rst new file mode 100644 index 0000000000..2ab47fbe96 --- /dev/null +++ b/docs/spec/v3.rst @@ -0,0 +1,7 @@ +.. _spec_v2: + +Zarr storage specification version 3 (under development) +======================================================== + +The v3 specification has been migrated to its own website, +https://zarr-specs.readthedocs.io/. From be0bf3e5259ce4a1a522eafdbe8b78082060b12e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Oct 2022 22:44:43 -0700 Subject: [PATCH 0200/1078] Bump azure-storage-blob from 12.13.1 to 12.14.0 (#1175) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.13.1 to 12.14.0. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.13.1...azure-storage-blob_12.14.0) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 1022e56ec5..eb6790cd52 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -6,7 +6,7 @@ ipytree==0.2.1 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.13.1 # pyup: ignore +azure-storage-blob==12.14.0 # pyup: ignore redis==4.3.4 types-redis types-setuptools From f9581cb6db4c2950e200add3785bac2685aac391 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Oct 2022 08:19:41 +0000 Subject: [PATCH 0201/1078] Bump ipytree from 0.2.1 to 0.2.2 (#1112) Bumps [ipytree](https://github.com/martinRenou/ipytree) from 0.2.1 to 0.2.2. - [Release notes](https://github.com/martinRenou/ipytree/releases) - [Changelog](https://github.com/martinRenou/ipytree/blob/master/RELEASE.md) - [Commits](https://github.com/martinRenou/ipytree/commits) --- updated-dependencies: - dependency-name: ipytree dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 3 ++- setup.py | 3 ++- zarr/tests/test_hierarchy.py | 2 +- zarr/util.py | 5 ++--- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index eb6790cd52..58c088c2a1 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -2,7 +2,8 @@ # bsddb3==6.2.6; sys_platform != 'win32' lmdb==1.3.0; sys_platform != 'win32' # optional library requirements for Jupyter -ipytree==0.2.1 +ipytree==0.2.2 +ipywidgets==8.0.2 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) diff --git a/setup.py b/setup.py index 79ff649dac..9f005ecd39 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,8 @@ extras_require={ 'jupyter': [ 'notebook', - 'ipytree', + 'ipytree>=0.2.2', + 'ipywidgets>=8.0.0', ], }, python_requires='>=3.8, <4', diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index a2917acb44..8d1fabbed3 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -1760,7 +1760,7 @@ def _check_tree(g, expect_bytes, expect_text): assert expect_repr == repr(g.tree()) if ipytree: # noinspection PyProtectedMember - widget = g.tree()._ipython_display_() + widget = g.tree()._repr_mimebundle_() isinstance(widget, ipytree.Tree) diff --git a/zarr/util.py b/zarr/util.py index c9136a63eb..9fcdac9df7 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -533,10 +533,9 @@ def __unicode__(self): def __repr__(self): return self.__unicode__() - def _ipython_display_(self): + def _repr_mimebundle_(self, **kwargs): tree = tree_widget(self.group, expand=self.expand, level=self.level) - tree._ipython_display_() - return tree + return tree._repr_mimebundle_(**kwargs) def check_array_shape(param, array, shape): From 5199eabd7b467a7657f4a68f4e4395fe8fda27f8 Mon Sep 17 00:00:00 2001 From: zobbs-git <114318257+zobbs-git@users.noreply.github.com> Date: Wed, 12 Oct 2022 20:15:32 -0100 Subject: [PATCH 0202/1078] Fixed typo error (#1178) Co-authored-by: jakirkham --- docs/installation.rst | 2 +- docs/release.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/installation.rst b/docs/installation.rst index 47c1dee269..047dbe2bd2 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -3,7 +3,7 @@ Installation Zarr depends on NumPy. It is generally best to `install NumPy `_ first using whatever method is most -appropriate for you operating system and Python distribution. Other dependencies should be +appropriate for your operating system and Python distribution. Other dependencies should be installed automatically if using one of the installation methods below. Install Zarr from PyPI:: diff --git a/docs/release.rst b/docs/release.rst index 7a172a9839..b511c6a1b4 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -218,7 +218,7 @@ Bug fixes By :user:`Ben Jeffery ` :issue:`967`. * Removed `clobber` argument from `normalize_store_arg`. This enables to change - data within a opened consolidated group using mode `"r+"` (i.e region write). + data within an opened consolidated group using mode `"r+"` (i.e region write). By :user:`Tobias Kölling ` :issue:`975`. .. _release_2.11.0: From f65c151fec64f9b754dfa8dd05551f6d0f0a95e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Oct 2022 00:24:04 -0700 Subject: [PATCH 0203/1078] Bump numpy from 1.23.3 to 1.23.4 (#1179) Bumps [numpy](https://github.com/numpy/numpy) from 1.23.3 to 1.23.4. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.23.3...v1.23.4) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 11fb3ff63c..b1c56d3f7d 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.23.3 +numpy==1.23.4 From eb6d14364280fa64b778913bc032aee783e1f65f Mon Sep 17 00:00:00 2001 From: AWA BRANDON AWA <51425873+DON-BRAN@users.noreply.github.com> Date: Thu, 13 Oct 2022 22:02:04 +1100 Subject: [PATCH 0204/1078] updated installation documentation link in readme (#1177) * updated installation documentation link in readme * updated installation documentation link * updated installation documentation link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 207d7dedd5..ba393e6141 100644 --- a/README.md +++ b/README.md @@ -107,4 +107,4 @@ or via `conda`: conda install -c conda-forge zarr ``` -For more details, including how to install from source, see the [installation documentation](https://zarr.readthedocs.io/en/stable/#installation). +For more details, including how to install from source, see the [installation documentation](https://zarr.readthedocs.io/en/stable/index.html#installation). From 2c52b172ead27a1d5918563096703d9f3ed0f5f5 Mon Sep 17 00:00:00 2001 From: AWA BRANDON AWA <51425873+DON-BRAN@users.noreply.github.com> Date: Fri, 14 Oct 2022 21:42:54 +1100 Subject: [PATCH 0205/1078] added license to docs and updated spec_v3 (#1182) * added license to docs and updated spec_v3 * updated license * updated license.rst * updated license.rst --- docs/index.rst | 1 + docs/license.rst | 4 ++++ docs/spec/v3.rst | 2 +- 3 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 docs/license.rst diff --git a/docs/index.rst b/docs/index.rst index 6b41ac651d..5e68dcab31 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -129,6 +129,7 @@ Contents spec contributing release + license Indices and tables ------------------ diff --git a/docs/license.rst b/docs/license.rst new file mode 100644 index 0000000000..d47e1b2b34 --- /dev/null +++ b/docs/license.rst @@ -0,0 +1,4 @@ +License +======= + +.. include:: ../LICENSE \ No newline at end of file diff --git a/docs/spec/v3.rst b/docs/spec/v3.rst index 2ab47fbe96..a448fbc3a1 100644 --- a/docs/spec/v3.rst +++ b/docs/spec/v3.rst @@ -1,4 +1,4 @@ -.. _spec_v2: +.. _spec_v3: Zarr storage specification version 3 (under development) ======================================================== From 4362bb438f537b37ebd271506b774dfa31f5e68e Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Sat, 15 Oct 2022 23:17:19 +0530 Subject: [PATCH 0206/1078] Migrate to pyproject.toml + cleanup (#1158) * Migrate to pyproject.toml + cleanup * Remove more instances of python setup.py ... * release.rst entry * Move changes to unreleased section * Remove pre-commit checks from workflows * Update `jupyter` dependencies Co-authored-by: jakirkham * Lower pin setuptools * Document installing `build` Co-authored-by: jakirkham * Add docs for twine and pre-commit * Add pre-commit badge to readme. * add .readthedocs.yaml Co-authored-by: jakirkham Co-authored-by: Josh Moore --- codecov.yml => .github/codecov.yml | 0 .github/workflows/Pre-commit-hooks.yml | 34 ----------- .github/workflows/python-package.yml | 6 -- .gitignore | 3 + .pre-commit-config.yaml | 17 ++++++ .readthedocs.yaml | 15 +++++ MANIFEST.in | 0 README.md | 11 +++- docs/contributing.rst | 27 +++++++-- docs/installation.rst | 2 +- docs/release.rst | 12 +++- mypy.ini | 4 -- pyproject.toml | 79 +++++++++++++++++++++++++- pytest.ini | 8 --- release.txt | 6 +- setup.cfg | 3 - setup.py | 59 ------------------- 17 files changed, 161 insertions(+), 125 deletions(-) rename codecov.yml => .github/codecov.yml (100%) delete mode 100644 .github/workflows/Pre-commit-hooks.yml create mode 100644 .readthedocs.yaml delete mode 100644 MANIFEST.in delete mode 100644 mypy.ini delete mode 100644 pytest.ini delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/codecov.yml b/.github/codecov.yml similarity index 100% rename from codecov.yml rename to .github/codecov.yml diff --git a/.github/workflows/Pre-commit-hooks.yml b/.github/workflows/Pre-commit-hooks.yml deleted file mode 100644 index e219b75e69..0000000000 --- a/.github/workflows/Pre-commit-hooks.yml +++ /dev/null @@ -1,34 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: pre-commit - -# Controls when the workflow will run -on: - # Triggers the workflow pull request events but only for the main branch - pull_request: - push: - branches: [main] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # Using Ubuntu image with latest tag - pre-commit: - runs-on: ubuntu-latest - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v3 - #setting up Python v3.0.0 - - uses: actions/setup-python@v4.2.0 - with: - python-version: '3.9' - #using pre-commit latest i.e v2.0.3 - - uses: pre-commit/action@v3.0.0 - #Running pre-commit for all files - - name: Pre-Commit-Run - run: | - pip install pre-commit - pre-commit run --all-files diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 2eeede91fe..9abb7c7866 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -83,9 +83,3 @@ jobs: #name: codecov-umbrella # optional #fail_ci_if_error: true # optional (default = false) verbose: true # optional (default = false) - - name: Linting - shell: "bash -l {0}" - run: | - conda activate zarr-env - flake8 zarr - mypy zarr diff --git a/.gitignore b/.gitignore index f2f7edc348..535cf2b169 100644 --- a/.gitignore +++ b/.gitignore @@ -67,6 +67,9 @@ zarr/version.py # emacs *~ +# VSCode +.vscode/ + # test data #*.zarr #*.zip diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 70ead97d0b..d62b10aa40 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,6 @@ +ci: + autoupdate_commit_msg: "chore: update pre-commit hooks" + autofix_commit_msg: "style: pre-commit fixes" default_stages: [commit, push] default_language_version: python: python3.9 @@ -11,7 +14,21 @@ repos: ] exclude: ^(venv/|docs/) types: ['python'] + - repo: https://github.com/codespell-project/codespell + rev: v2.1.0 + hooks: + - id: codespell + args: ["-L", "ba,ihs,kake,nd,noe,nwo,te"] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.3.0 hooks: - id: check-yaml + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.981 + hooks: + - id: mypy + files: zarr + args: [] + additional_dependencies: + - types-redis + - types-setuptools diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..42fc08bca2 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,15 @@ +version: 2 + +build: + os: ubuntu-20.04 + tools: + python: "3.9" + +sphinx: + configuration: docs/conf.py + +python: + install: + - requirements: requirements_rtfd.txt + - method: pip + path: . diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/README.md b/README.md index ba393e6141..3576d0c30e 100644 --- a/README.md +++ b/README.md @@ -40,10 +40,19 @@ Build Status - license + build status + + Pre-commit Status + + + pre-commit status + + + + Coverage diff --git a/docs/contributing.rst b/docs/contributing.rst index cfb24b50af..64e61abacb 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -179,14 +179,33 @@ also collected automatically via the Codecov service, and total coverage over all builds must be 100% (although individual builds may be lower due to Python 2/3 or other differences). -Code standards -~~~~~~~~~~~~~~ +Code standards - using pre-commit +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ All code must conform to the PEP8 standard. Regarding line length, lines up to 100 characters are allowed, although please try to keep under 90 wherever possible. -Conformance can be checked by running:: - $ python -m flake8 --max-line-length=100 zarr +``Zarr`` uses a set of ``pre-commit`` hooks and the ``pre-commit`` bot to format, +type-check, and prettify the codebase. ``pre-commit`` can be installed locally by +running:: + + $ python -m pip install pre-commit + +The hooks can be installed locally by running:: + + $ pre-commit install + +This would run the checks every time a commit is created locally. These checks will also run +on every commit pushed to an open PR, resulting in some automatic styling fixes by the +``pre-commit`` bot. The checks will by default only run on the files modified by a commit, +but the checks can be triggered for all the files by running:: + + $ pre-commit run --all-files + +If you would like to skip the failing checks and push the code for further discussion, use +the ``--no-verify`` option with ``git commit``. + + Test coverage ~~~~~~~~~~~~~ diff --git a/docs/installation.rst b/docs/installation.rst index 047dbe2bd2..a07c1c42e1 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -23,7 +23,7 @@ To work with Zarr source code in development, install from GitHub:: $ git clone --recursive https://github.com/zarr-developers/zarr-python.git $ cd zarr-python - $ python setup.py install + $ python -m pip install -e . To verify that Zarr has been fully installed, run the test suite:: diff --git a/docs/release.rst b/docs/release.rst index b511c6a1b4..ea746999e1 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,14 +6,20 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. - .. _unreleased: +.. _unreleased: - Unreleased - ---------- +Unreleased +---------- .. # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +Maintenance +~~~~~~~~~~~ + +* Migrate to ``pyproject.toml`` and remove redundant infrastructure. + By :user:`Saransh Chopra ` :issue:`1158`. + .. _release_2.13.3: 2.13.3 diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 7c1be49cd6..0000000000 --- a/mypy.ini +++ /dev/null @@ -1,4 +0,0 @@ -[mypy] -python_version = 3.8 -ignore_missing_imports = True -follow_imports = silent diff --git a/pyproject.toml b/pyproject.toml index 2f21011953..8da5e6b994 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,80 @@ [build-system] -requires = ["setuptools>=40.8.0", "wheel"] +requires = ["setuptools>=40.8.0", "setuptools-scm", "wheel"] build-backend = "setuptools.build_meta" + + +[project] +name = "zarr" +description = "An implementation of chunked, compressed, N-dimensional arrays for Python" +readme = { file = "README.md", content-type = "text/markdown" } +maintainers = [ + { name = "Alistair Miles", email = "alimanfoo@googlemail.com" } +] +requires-python = ">=3.8" +dependencies = [ + 'asciitree', + 'numpy>=1.7', + 'fasteners', + 'numcodecs>=0.10.0', +] +dynamic = [ + "version", +] +classifiers = [ + 'Development Status :: 6 - Mature', + 'Intended Audience :: Developers', + 'Intended Audience :: Information Technology', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Operating System :: Unix', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', +] + +[project.optional-dependencies] +jupyter = [ + 'notebook', + 'ipytree>=0.2.2', + 'ipywidgets>=8.0.0', +] + +[project.urls] +"Bug Tracker" = "https://github.com/zarr-developers/zarr-python/issues" +Changelog = "https://zarr.readthedocs.io/en/stable/release.html" +Discussions = "https://github.com/zarr-developers/zarr-python/discussions" +Documentation = "https://zarr.readthedocs.io/" +Homepage = "https://github.com/zarr-developers/zarr-python" + + +[tool.setuptools] +packages = ["zarr", "zarr._storage", "zarr.tests"] + +[tool.setuptools_scm] +version_scheme = "guess-next-dev" +local_scheme = "dirty-tag" +write_to = "zarr/version.py" + +[tool.mypy] +python_version = "3.8" +ignore_missing_imports = true +follow_imports = "silent" + +[tool.pytest.ini_options] +doctest_optionflags = [ + "NORMALIZE_WHITESPACE", + "ELLIPSIS", + "IGNORE_EXCEPTION_DETAIL", +] +addopts = [ + "--durations=10", +] +filterwarnings = [ + "error:::zarr.*", + "ignore:Not all N5 implementations support blosc compression.*:RuntimeWarning", + "ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning", + "ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning", +] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 966b5ad931..0000000000 --- a/pytest.ini +++ /dev/null @@ -1,8 +0,0 @@ -[pytest] -doctest_optionflags = NORMALIZE_WHITESPACE ELLIPSIS IGNORE_EXCEPTION_DETAIL -addopts = --durations=10 -filterwarnings = - error:::zarr.* - ignore:Not all N5 implementations support blosc compression.*:RuntimeWarning - ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning - ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning diff --git a/release.txt b/release.txt index 4d7f099019..ff114f6dba 100644 --- a/release.txt +++ b/release.txt @@ -3,5 +3,9 @@ tox echo $version git tag -a v$version -m v$version git push --tags -python setup.py register sdist +# Install `build` if not present with `python -m pip install build` or similar +# for building Zarr +python -m build +# Install `twine` if not present with `python -m pip install twine` or similar +# for publishing Zarr to PyPI twine upload dist/zarr-${version}.tar.gz diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 28e25ed827..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -[codespell] -skip = ./.git -ignore-words-list = ba, ihs, kake, nd, noe, nwo, te diff --git a/setup.py b/setup.py deleted file mode 100644 index 9f005ecd39..0000000000 --- a/setup.py +++ /dev/null @@ -1,59 +0,0 @@ -from setuptools import setup - -DESCRIPTION = 'An implementation of chunked, compressed, ' \ - 'N-dimensional arrays for Python.' - -with open('README.md') as f: - LONG_DESCRIPTION = f.read() - -dependencies = [ - 'asciitree', - 'numpy>=1.7', - 'fasteners', - 'numcodecs>=0.10.0', -] - -setup( - name='zarr', - description=DESCRIPTION, - long_description=LONG_DESCRIPTION, - long_description_content_type='text/markdown', - use_scm_version={ - 'version_scheme': 'guess-next-dev', - 'local_scheme': 'dirty-tag', - 'write_to': 'zarr/version.py', - }, - setup_requires=[ - 'setuptools>=38.6.0', - 'setuptools-scm>1.5.4', - ], - extras_require={ - 'jupyter': [ - 'notebook', - 'ipytree>=0.2.2', - 'ipywidgets>=8.0.0', - ], - }, - python_requires='>=3.8, <4', - install_requires=dependencies, - package_dir={'': '.'}, - packages=['zarr', 'zarr._storage', 'zarr.tests'], - classifiers=[ - 'Development Status :: 6 - Mature', - 'Intended Audience :: Developers', - 'Intended Audience :: Information Technology', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Operating System :: Unix', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - ], - maintainer='Alistair Miles', - maintainer_email='alimanfoo@googlemail.com', - url='https://github.com/zarr-developers/zarr-python', - license='MIT', -) From e8f6fef0c737c167b16cd10accdcc3bbc6dea566 Mon Sep 17 00:00:00 2001 From: Emmanuel Bolarinwa Date: Mon, 17 Oct 2022 22:08:25 +0100 Subject: [PATCH 0207/1078] Msgpack docs (#1188) * bumped version to 1.1 * bumped version to 2.13.4.0 * versioning with rever * msgpack installation included in docs * changes restored * add msgpack dependency * msgpack removed * msgpack added * Update .gitignore Co-authored-by: jakirkham --- requirements_rtfd.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements_rtfd.txt b/requirements_rtfd.txt index 8452f7af77..0a7d90358e 100644 --- a/requirements_rtfd.txt +++ b/requirements_rtfd.txt @@ -7,3 +7,4 @@ sphinx-copybutton sphinx-rtd-theme numpydoc numpy!=1.21.0 +msgpack-python==0.5.6 \ No newline at end of file From 361ca7733bfdd3e9dfd0545ae19f109494808734 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 18 Oct 2022 21:42:12 +0200 Subject: [PATCH 0208/1078] Tell codespell to skip `fixture` / fix pre-commit (#1197) * Tell codespell to skip `fixture` This will avoid this kind of warnings: WARNING: Decoding file using encoding=utf-8 failed: fixture/8/0/11 WARNING: Trying next encoding iso-8859-1 Seen in: https://results.pre-commit.ci/run/github/48049137/1666036569.rj425j-vQKyvNbJ0CUHdcQ * pre-commit: Remove the hard requirement for Python 3.9 --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d62b10aa40..67f61ea8be 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ ci: autofix_commit_msg: "style: pre-commit fixes" default_stages: [commit, push] default_language_version: - python: python3.9 + python: python3 repos: - repo: https://github.com/PyCQA/flake8 rev: 3.8.2 @@ -18,7 +18,7 @@ repos: rev: v2.1.0 hooks: - id: codespell - args: ["-L", "ba,ihs,kake,nd,noe,nwo,te"] + args: ["-L", "ba,ihs,kake,nd,noe,nwo,te", "-S", "fixture"] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.3.0 hooks: From d6ff3bb6e05bbd6cfd4882d022c74cd775086bc1 Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Wed, 19 Oct 2022 02:24:48 +0530 Subject: [PATCH 0209/1078] Require `setuptools` 64.0.0+ Co-authored-by: jakirkham --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8da5e6b994..cc8469f7da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=40.8.0", "setuptools-scm", "wheel"] +requires = ["setuptools>=64.0.0", "setuptools-scm", "wheel"] build-backend = "setuptools.build_meta" From 8c7089246bc8b9202436a8b89ee982d485f4255e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Oct 2022 14:42:34 -0700 Subject: [PATCH 0210/1078] Bump pymongo from 4.2.0 to 4.3.2 (#1200) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.2.0 to 4.3.2. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.2.0...4.3.2) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 58c088c2a1..cad157ada0 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -11,7 +11,7 @@ azure-storage-blob==12.14.0 # pyup: ignore redis==4.3.4 types-redis types-setuptools -pymongo==4.2.0 +pymongo==4.3.2 # optional test requirements tox==3.26.0 coverage From 2d1a7dd385a001a39e61ce4577b838dadb5e5a96 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Oct 2022 22:03:10 +0000 Subject: [PATCH 0211/1078] Bump actions/setup-python from 4.2.0 to 4.3.0 (#1194) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.2.0 to 4.3.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4.2.0...v4.3.0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index ff004b7229..f008df029c 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v4.2.0 + - uses: actions/setup-python@v4.3.0 name: Install Python with: python-version: '3.8' From da7ba0c70be90142dd8adb8fab3c781bea00e3f4 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 19 Oct 2022 01:42:35 +0200 Subject: [PATCH 0212/1078] Remove LGTM.com configuration file (#1191) LGTM.com is being deprecated and replaced by GitHub code analysis: https://github.blog/2022-08-15-the-next-step-for-lgtm-com-github-code-scanning/ Co-authored-by: jakirkham --- .lgtm.yml | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 .lgtm.yml diff --git a/.lgtm.yml b/.lgtm.yml deleted file mode 100644 index 35a0c32ef2..0000000000 --- a/.lgtm.yml +++ /dev/null @@ -1,7 +0,0 @@ -# Config for LGTM.com static code analysis -# https://lgtm.com/projects/g/zarr-developers/zarr-python - -extraction: - python: - python_setup: - version: 3 From c8cd89dbc3730c6108fe4d15f3206dea7dc86d21 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 19 Oct 2022 02:02:04 +0200 Subject: [PATCH 0213/1078] Bump Numpty required version to 1.20 (#1192) We use the named parameter `shape` of `numpy.empty_like`: https://github.com/zarr-developers/zarr-python/blob/4362bb4/zarr/hierarchy.py#L145 https://github.com/zarr-developers/zarr-python/blob/4362bb4/zarr/core.py#L2134-L2135 It had been introduced in Numpy 1.17.0: https://numpy.org/doc/stable/reference/generated/numpy.empty_like.html According to NEP 29, NumPy 1.20+ is currently supported: https://numpy.org/neps/nep-0029-deprecation_policy.html Co-authored-by: jakirkham --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index cc8469f7da..119abd782d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ maintainers = [ requires-python = ">=3.8" dependencies = [ 'asciitree', - 'numpy>=1.7', + 'numpy>=1.20', 'fasteners', 'numcodecs>=0.10.0', ] From 9082fe2885725c8deb9c5a9c9abf49afd8e38fe5 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Tue, 18 Oct 2022 17:55:04 -0700 Subject: [PATCH 0214/1078] Use NumPy 1.20+ in `environment.yml` (#1201) --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index f601026d47..dc99507427 100644 --- a/environment.yml +++ b/environment.yml @@ -4,7 +4,7 @@ channels: dependencies: - wheel - numcodecs >= 0.6.4 - - numpy >= 1.7 + - numpy >= 1.20 - pip - pip: - asciitree From 5e5acc0ce047363406aa90bed11e0171f739787c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Oct 2022 00:07:29 -0700 Subject: [PATCH 0215/1078] Bump azure-storage-blob from 12.14.0 to 12.14.1 (#1202) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.14.0 to 12.14.1. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.14.0...azure-storage-blob_12.14.1) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index cad157ada0..a11db10368 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipywidgets==8.0.2 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.14.0 # pyup: ignore +azure-storage-blob==12.14.1 # pyup: ignore redis==4.3.4 types-redis types-setuptools From daad2fa86a32c3ed10871f6ba191ed3356b29e91 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 19 Oct 2022 08:20:01 +0000 Subject: [PATCH 0216/1078] chore: update pre-commit hooks (#1198) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 3.8.2 → 5.0.4](https://github.com/PyCQA/flake8/compare/3.8.2...5.0.4) - [github.com/codespell-project/codespell: v2.1.0 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.1.0...v2.2.2) - [github.com/pre-commit/pre-commit-hooks: v2.3.0 → v4.3.0](https://github.com/pre-commit/pre-commit-hooks/compare/v2.3.0...v4.3.0) - [github.com/pre-commit/mirrors-mypy: v0.981 → v0.982](https://github.com/pre-commit/mirrors-mypy/compare/v0.981...v0.982) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: jakirkham --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 67f61ea8be..fd1619eefa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/PyCQA/flake8 - rev: 3.8.2 + rev: 5.0.4 hooks: - id: flake8 args: [ @@ -15,16 +15,16 @@ repos: exclude: ^(venv/|docs/) types: ['python'] - repo: https://github.com/codespell-project/codespell - rev: v2.1.0 + rev: v2.2.2 hooks: - id: codespell args: ["-L", "ba,ihs,kake,nd,noe,nwo,te", "-S", "fixture"] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.3.0 + rev: v4.3.0 hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.981 + rev: v0.982 hooks: - id: mypy files: zarr From aec6973f731626fea31fa69192290c5d08b1ec16 Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Thu, 20 Oct 2022 00:22:53 +0530 Subject: [PATCH 0217/1078] Pin action versions (pypi-publish, setup-miniconda) for dependabot (#1205) --- .github/workflows/minimal.yml | 2 +- .github/workflows/releases.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index a256a53b60..3eb414059c 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@master + uses: conda-incubator/setup-miniconda@v2 with: channels: conda-forge environment-file: environment.yml diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index f008df029c..1bcf79ef5f 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@master + - uses: pypa/gh-action-pypi-publish@v1 with: user: __token__ password: ${{ secrets.pypi_password }} From 6a91ab14e15709e66363093d071c22b0eb10c62a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 19 Oct 2022 21:16:27 +0200 Subject: [PATCH 0218/1078] Flake8 (#1203) * Fix Flake8 alert * Use `author` variable instead of duplicating Co-authored-by: jakirkham --- docs/conf.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 21e2e89880..2639f765ee 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -247,7 +247,7 @@ def setup(app): # author, documentclass [howto, manual, or own class]). latex_documents = [ (main_doc, 'zarr.tex', 'zarr Documentation', - 'Zarr Developers', 'manual'), + author, 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -311,8 +311,10 @@ def setup(app): # Example configuration for intersphinx: refer to the Python standard library. # use in refs e.g: # :ref:`comparison manual ` -intersphinx_mapping = { 'python':('https://docs.python.org/', None), - 'numpy': ('https://numpy.org/doc/stable/', None)} +intersphinx_mapping = { + 'python': ('https://docs.python.org/', None), + 'numpy': ('https://numpy.org/doc/stable/', None), +} # sphinx-copybutton configuration From fcd130628ba41e43dfb73d52aaa2318339dfd887 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Oct 2022 22:10:55 -0700 Subject: [PATCH 0219/1078] Update moto to 4.0.8+ & drop werkzeug pin (#1099) --- requirements_dev_optional.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index a11db10368..5baf06c782 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -22,5 +22,4 @@ pytest-timeout==2.1.0 h5py==3.7.0 fsspec==2022.8.2 s3fs==2022.8.2 -moto[server]>=1.3.14 -werkzeug<2.2.0 +moto[server]>=4.0.8 From f361631384fc8e4ce7a76528eac7e89bd8f58cb6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Oct 2022 19:22:55 +0000 Subject: [PATCH 0220/1078] Bump fsspec from 2022.8.2 to 2022.10.0 (#1207) * Bump s3fs from 2022.8.2 to 2022.10.0 Bumps [s3fs](https://github.com/fsspec/s3fs) from 2022.8.2 to 2022.10.0. - [Release notes](https://github.com/fsspec/s3fs/releases) - [Changelog](https://github.com/fsspec/s3fs/blob/main/release-procedure.md) - [Commits](https://github.com/fsspec/s3fs/compare/2022.8.2...2022.10.0) --- updated-dependencies: - dependency-name: s3fs dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump fsspec from 2022.8.2 to 2022.10.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.8.2 to 2022.10.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.8.2...2022.10.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: jakirkham --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 5baf06c782..0efd67b97f 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -20,6 +20,6 @@ pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 h5py==3.7.0 -fsspec==2022.8.2 -s3fs==2022.8.2 +fsspec==2022.10.0 +s3fs==2022.10.0 moto[server]>=4.0.8 From e4749478674fe800d70acb77d2ce4154b139cbe0 Mon Sep 17 00:00:00 2001 From: AWA BRANDON AWA <51425873+DON-BRAN@users.noreply.github.com> Date: Sun, 23 Oct 2022 23:10:26 -0700 Subject: [PATCH 0221/1078] Turned on isolated_build in tox.ini file (#1210) --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 12ad6bc8ad..8d2a15276f 100644 --- a/tox.ini +++ b/tox.ini @@ -5,6 +5,7 @@ [tox] envlist = py38-npy{120,latest}, py39, py310, docs +isolated_build = True [testenv] install_command = pip install --no-binary=numcodecs {opts} {packages} From 13b77379944676cf9fafa4ed527928c3431dea4a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 30 Oct 2022 21:06:55 +0100 Subject: [PATCH 0222/1078] Redundant wheel in pyproject.toml (#1233) The backend adds the wheel dependency automatically: The setuptools package implements the build_sdist command and the wheel package implements the build_wheel command; the latter is a dependency of the former exposed via PEP 517 hooks. Listing it explicitly in the documentation was a historical mistake and has been fixed since in pypa/setuptools@f7d30a9. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 119abd782d..7cd7b5a025 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=64.0.0", "setuptools-scm", "wheel"] +requires = ["setuptools>=64.0.0", "setuptools-scm"] build-backend = "setuptools.build_meta" From 25a003e4e3c3568c6ffcc0f32519d455a3663116 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 31 Oct 2022 14:25:53 +0100 Subject: [PATCH 0223/1078] Simplify if/else statement (#1227) --- docs/release.rst | 3 +++ zarr/convenience.py | 9 ++++----- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index ea746999e1..5ff8f74f29 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -17,6 +17,9 @@ Unreleased Maintenance ~~~~~~~~~~~ +* Simplify if/else statement. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1227`. + * Migrate to ``pyproject.toml`` and remove redundant infrastructure. By :user:`Saransh Chopra ` :issue:`1158`. diff --git a/zarr/convenience.py b/zarr/convenience.py index 745e1369f5..9a0eae20a3 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -487,12 +487,11 @@ def __init__(self, log): elif isinstance(log, str): self.log_file = io.open(log, mode='w') self.needs_closing = True - else: - if not hasattr(log, 'write'): - raise TypeError('log must be a callable function, file path or ' - 'file-like object, found %r' % log) + elif hasattr(log, 'write'): self.log_file = log - self.needs_closing = False + else: + raise TypeError('log must be a callable function, file path or ' + 'file-like object, found %r' % log) def __enter__(self): return self From 1c6a36c4661c7ddf90f996a5525b8d2d5e975ea8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Oct 2022 20:16:57 +0100 Subject: [PATCH 0224/1078] Bump pytest from 7.1.3 to 7.2.0 (#1221) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.1.3 to 7.2.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.1.3...7.2.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index e3809cebe9..b76b398a16 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.10.2 msgpack-python==0.5.6 setuptools-scm==7.0.5 # test requirements -pytest==7.1.3 +pytest==7.2.0 From f2b558534f4029c45eb60cdc924c6c57da1d5557 Mon Sep 17 00:00:00 2001 From: Stephanie_nkwatoh <43364084+steph237@users.noreply.github.com> Date: Mon, 31 Oct 2022 21:47:13 +0100 Subject: [PATCH 0225/1078] update: added the link to main website on the sidebar (#1216) * update: added the link to main website on the sidebar * update: changed from official website to View homepage Co-authored-by: jakirkham --- docs/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/index.rst b/docs/index.rst index 5e68dcab31..dd6abc1862 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -130,6 +130,7 @@ Contents contributing release license + View homepage Indices and tables ------------------ From 519f7efb28f57cfed51fbe76f132059d7b30211f Mon Sep 17 00:00:00 2001 From: Weddy Gikunda <110189834+caviere@users.noreply.github.com> Date: Tue, 1 Nov 2022 15:32:31 +0300 Subject: [PATCH 0226/1078] Fix linting errors (#1226) * Fix variable possible unbound error * Avoid extra version check This avoids having to check the version twice: once in the "in [2,3]" and also in the if/elif statement. It also fixes linting error of normalize_store being possibly unbound. * Add Optional where necessary to indicate that value could be None --- zarr/creation.py | 3 ++- zarr/hierarchy.py | 2 ++ zarr/storage.py | 8 ++++---- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/zarr/creation.py b/zarr/creation.py index 3414a0158a..00d2c40030 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -1,3 +1,4 @@ +from typing import Optional from warnings import warn import numpy as np @@ -17,7 +18,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', - fill_value=0, order='C', store=None, synchronizer=None, + fill_value: Optional[int] = 0, order='C', store=None, synchronizer=None, overwrite=False, path=None, chunk_store=None, filters=None, cache_metadata=True, cache_attrs=True, read_only=False, object_codec=None, dimension_separator=None, write_empty_chunks=True, diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 8131cb71aa..8faeaf259a 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -157,6 +157,7 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None, raise ContainsArrayError(path) # initialize metadata + mkey = None try: mkey = _prefix_to_group_key(self._store, self._key_prefix) assert not mkey.endswith("root/.group") @@ -1277,6 +1278,7 @@ def group(store=None, overwrite=False, chunk_store=None, path = normalize_storage_path(path) + requires_init = None if zarr_version == 2: requires_init = overwrite or not contains_group(store) elif zarr_version == 3: diff --git a/zarr/storage.py b/zarr/storage.py index f5459990ba..6aa8cf7f92 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -161,13 +161,13 @@ def normalize_store_arg(store: Any, storage_options=None, mode="r", *, if zarr_version is None: # default to v2 store for backward compatibility zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) - elif zarr_version not in [2, 3]: - raise ValueError("zarr_version must be either 2 or 3") if zarr_version == 2: normalize_store = _normalize_store_arg_v2 elif zarr_version == 3: from zarr._storage.v3 import _normalize_store_arg_v3 normalize_store = _normalize_store_arg_v3 + else: + raise ValueError("zarr_version must be either 2 or 3") return normalize_store(store, storage_options, mode) @@ -597,7 +597,7 @@ def init_group( store: StoreLike, overwrite: bool = False, path: Path = None, - chunk_store: StoreLike = None, + chunk_store: Optional[StoreLike] = None, ): """Initialize a group store. Note that this is a low-level function and there should be no need to call this directly from user code. @@ -644,7 +644,7 @@ def _init_group_metadata( store: StoreLike, overwrite: Optional[bool] = False, path: Optional[str] = None, - chunk_store: StoreLike = None, + chunk_store: Optional[StoreLike] = None, ): store_version = getattr(store, '_store_version', 2) From 8167f3ffca56695c6bded5acae1f3c656a8e506f Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 1 Nov 2022 16:45:27 +0100 Subject: [PATCH 0227/1078] Apply some pyupgrade suggestions (#1225) Yield from an iterable instead of iterating to yield items. IOError is kept for compatibility with previous versions; starting from Python 3.3, it is an alias of OSError: https://docs.python.org/3/library/exceptions.html#IOError --- zarr/errors.py | 2 -- zarr/hierarchy.py | 9 +++------ zarr/storage.py | 23 ++++++++--------------- 3 files changed, 11 insertions(+), 23 deletions(-) diff --git a/zarr/errors.py b/zarr/errors.py index 85c28ea8b6..808cbe99a4 100644 --- a/zarr/errors.py +++ b/zarr/errors.py @@ -1,5 +1,3 @@ - - class MetadataError(Exception): pass diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 8faeaf259a..fd5c446cb2 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -634,8 +634,7 @@ def _array_iter(self, keys_only, method, recurse): yield _key if keys_only else (_key, self[key]) elif recurse and contains_group(self._store, path): group = self[key] - for i in getattr(group, method)(recurse=recurse): - yield i + yield from getattr(group, method)(recurse=recurse) else: dir_name = meta_root + self._path array_sfx = '.array' + self._metadata_key_suffix @@ -652,8 +651,7 @@ def _array_iter(self, keys_only, method, recurse): yield _key if keys_only else (_key, self[key]) elif recurse and contains_group(self._store, path): group = self[key] - for i in getattr(group, method)(recurse=recurse): - yield i + yield from getattr(group, method)(recurse=recurse) def visitvalues(self, func): """Run ``func`` on each object. @@ -687,8 +685,7 @@ def _visit(obj): yield obj keys = sorted(getattr(obj, "keys", lambda: [])()) for k in keys: - for v in _visit(obj[k]): - yield v + yield from _visit(obj[k]) for each_obj in islice(_visit(self), 1, None): value = func(each_obj) diff --git a/zarr/storage.py b/zarr/storage.py index 6aa8cf7f92..4acf637330 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -702,8 +702,7 @@ def _dict_store_keys(d: Dict, prefix="", cls=dict): for k in d.keys(): v = d[k] if isinstance(v, cls): - for sk in _dict_store_keys(v, prefix + k + '/', cls): - yield sk + yield from _dict_store_keys(v, prefix + k + '/', cls) else: yield prefix + k @@ -863,8 +862,7 @@ def __eq__(self, other): ) def keys(self): - for k in _dict_store_keys(self.root, cls=self.cls): - yield k + yield from _dict_store_keys(self.root, cls=self.cls) def __iter__(self): return self.keys() @@ -1462,7 +1460,7 @@ def listdir(self, path=None): return sorted(new_children) else: return children - except IOError: + except OSError: return [] def rmdir(self, path=None): @@ -1794,8 +1792,7 @@ def keylist(self): return sorted(self.zf.namelist()) def keys(self): - for key in self.keylist(): - yield key + yield from self.keylist() def __iter__(self): return self.keys() @@ -2270,8 +2267,7 @@ def keys(self): def values(self): with self.db.begin(buffers=self.buffers) as txn: with txn.cursor() as cursor: - for v in cursor.iternext(keys=False, values=True): - yield v + yield from cursor.iternext(keys=False, values=True) def __iter__(self): return self.keys() @@ -2581,8 +2577,7 @@ def __contains__(self, key): def items(self): kvs = self.cursor.execute('SELECT k, v FROM zarr') - for k, v in kvs: - yield k, v + yield from kvs def keys(self): ks = self.cursor.execute('SELECT k FROM zarr') @@ -2796,12 +2791,10 @@ def keylist(self): for key in self.client.keys(self._key('*'))] def keys(self): - for key in self.keylist(): - yield key + yield from self.keylist() def __iter__(self): - for key in self.keys(): - yield key + yield from self.keys() def __len__(self): return len(self.keylist()) From 5b9f2ef2ef72f261907f922970885db15ba9372a Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Tue, 1 Nov 2022 21:57:03 +0530 Subject: [PATCH 0228/1078] Remove `tox` support (#1219) * Remove tox support * Remove instances of tox --- .gitignore | 1 - docs/contributing.rst | 14 ++++++---- release.txt | 1 - requirements_dev_optional.txt | 1 - tox.ini | 51 ----------------------------------- 5 files changed, 9 insertions(+), 59 deletions(-) delete mode 100644 tox.ini diff --git a/.gitignore b/.gitignore index 535cf2b169..28e5544286 100644 --- a/.gitignore +++ b/.gitignore @@ -35,7 +35,6 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ -.tox/ .coverage .coverage.* .cache diff --git a/docs/contributing.rst b/docs/contributing.rst index 64e61abacb..d9e222b643 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -212,9 +212,12 @@ Test coverage Zarr maintains 100% test coverage under the latest Python stable release (currently Python 3.8). Both unit tests and docstring doctests are included when computing -coverage. Running ``tox -e py38`` will automatically run the test suite with coverage -and produce a coverage report. This should be 100% before code can be accepted into the -main code base. +coverage. Running:: + + $ python -m pytest -v --cov=zarr --cov-config=.coveragerc zarr + +will automatically run the test suite with coverage and produce a coverage report. +This should be 100% before code can be accepted into the main code base. When submitting a pull request, coverage will also be collected across all supported Python versions via the Codecov service, and will be reported back within the pull @@ -243,9 +246,10 @@ notes (``docs/release.rst``). The documentation can be built locally by running:: - $ tox -e docs + $ cd docs + $ make clean; make html -The resulting built documentation will be available in the ``.tox/docs/tmp/html`` folder. +The resulting built documentation will be available in the ``docs/_build/html`` folder. Development best practices, policies and procedures --------------------------------------------------- diff --git a/release.txt b/release.txt index ff114f6dba..d1cefef47c 100644 --- a/release.txt +++ b/release.txt @@ -1,4 +1,3 @@ -tox # version=x.x.x echo $version git tag -a v$version -m v$version diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0efd67b97f..fa57d8270c 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -13,7 +13,6 @@ types-redis types-setuptools pymongo==4.3.2 # optional test requirements -tox==3.26.0 coverage flake8==5.0.4 pytest-cov==4.0.0 diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 8d2a15276f..0000000000 --- a/tox.ini +++ /dev/null @@ -1,51 +0,0 @@ -# Tox (https://tox.wiki/) is a tool for running tests -# in multiple virtualenvs. This configuration file will run the -# test suite on all supported python versions. To use it, "pip install tox" -# and then run "tox" from this directory. - -[tox] -envlist = py38-npy{120,latest}, py39, py310, docs -isolated_build = True - -[testenv] -install_command = pip install --no-binary=numcodecs {opts} {packages} -setenv = - PYTHONHASHSEED = 42 - PYTEST_TIMEOUT = {env:PYTEST_TIMEOUT:300} -passenv = - ZARR_TEST_ABS - ZARR_TEST_MONGO - ZARR_TEST_REDIS -commands = - # clear out any data files generated during tests - python -c 'import glob; import shutil; import os; [(shutil.rmtree(d) if os.path.isdir(d) else os.remove(d) if os.path.isfile(d) else None) for d in glob.glob("./example*")]' - # main unit test runner - py{39,310}: pytest -v --cov=zarr --cov-config=.coveragerc zarr - # don't collect coverage when running older numpy versions - py38-npy120: pytest -v zarr - # collect coverage and run doctests under py37 - py38-npylatest: pytest -v --cov=zarr --cov-config=.coveragerc --doctest-plus zarr --remote-data - # generate a coverage report - py38-npylatest,py38,py39,p310: coverage report -m - # run doctests in the tutorial and spec - py{38,39,310}: python -m doctest -o NORMALIZE_WHITESPACE -o ELLIPSIS docs/tutorial.rst docs/spec/v2.rst - # pep8 checks - py{38,39, 310}: flake8 zarr - # print environment for debugging - pip freeze -deps = - py38-npy120: numpy==1.20.* - py38-npylatest,py38: -rrequirements_dev_numpy.txt - -rrequirements_dev_minimal.txt - -rrequirements_dev_optional.txt - -[testenv:docs] -basepython = python3.8 -changedir = docs -deps = - -rrequirements_rtfd.txt -commands = - sphinx-build -W -b html -d {envtmpdir}/doctrees . {envtmpdir}/html - -[flake8] -max-line-length = 100 From 8e6fe1153b626e431db11990199c4c74519cc071 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 2 Nov 2022 04:35:29 -0700 Subject: [PATCH 0229/1078] fastpath _array_iter for array_keys (#1149) Co-authored-by: Josh Moore --- zarr/hierarchy.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index fd5c446cb2..12ca34e7e9 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -638,17 +638,19 @@ def _array_iter(self, keys_only, method, recurse): else: dir_name = meta_root + self._path array_sfx = '.array' + self._metadata_key_suffix + group_sfx = '.group' + self._metadata_key_suffix + for key in sorted(listdir(self._store, dir_name)): if key.endswith(array_sfx): key = key[:-len(array_sfx)] + _key = key.rstrip("/") + yield _key if keys_only else (_key, self[key]) + path = self._key_prefix + key assert not path.startswith("meta/") - if key.endswith('.group' + self._metadata_key_suffix): + if key.endswith(group_sfx): # skip group metadata keys continue - if contains_array(self._store, path): - _key = key.rstrip("/") - yield _key if keys_only else (_key, self[key]) elif recurse and contains_group(self._store, path): group = self[key] yield from getattr(group, method)(recurse=recurse) From 88eb8ca9056f17ae50a1aa451398b82f1f060aa1 Mon Sep 17 00:00:00 2001 From: Weddy Gikunda <110189834+caviere@users.noreply.github.com> Date: Thu, 3 Nov 2022 19:48:54 +0300 Subject: [PATCH 0230/1078] Add documentation for find/findall using visit (#1241) * Add documentation for find/findall using visit * Remove whitespace * Fix print result * Fix indentation issue in the docstring * Indent literal block --- zarr/hierarchy.py | 70 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 12ca34e7e9..f2188217a8 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -720,6 +720,76 @@ def visit(self, func): baz quux + Search for members matching some name query can be implemented using + ``visit`` that is, ``find`` and ``findall``. Consider the following + tree:: + + / + ├── aaa + │ └── bbb + │ └── ccc + │ └── aaa + ├── bar + └── foo + + It is created as follows: + + >>> root = zarr.group() + >>> foo = root.create_group("foo") + >>> bar = root.create_group("bar") + >>> root.create_group("aaa").create_group("bbb").create_group("ccc").create_group("aaa") + + + For ``find``, the first path that matches a given pattern (for example + "aaa") is returned. Note that a non-None value is returned in the visit + function to stop further iteration. + + >>> import re + >>> pattern = re.compile("aaa") + >>> found = None + >>> def find(path): + ... global found + ... if pattern.search(path) is not None: + ... found = path + ... return True + ... + >>> root.visit(find) + True + >>> print(found) + aaa + + For ``findall``, all the results are gathered into a list + + >>> pattern = re.compile("aaa") + >>> found = [] + >>> def findall(path): + ... if pattern.search(path) is not None: + ... found.append(path) + ... + >>> root.visit(findall) + >>> print(found) + ['aaa', 'aaa/bbb', 'aaa/bbb/ccc', 'aaa/bbb/ccc/aaa'] + + To match only on the last part of the path, use a greedy regex to filter + out the prefix: + + >>> prefix_pattern = re.compile(r".*/") + >>> pattern = re.compile("aaa") + >>> found = [] + >>> def findall(path): + ... match = prefix_pattern.match(path) + ... if match is None: + ... name = path + ... else: + ... _, end = match.span() + ... name = path[end:] + ... if pattern.search(name) is not None: + ... found.append(path) + ... return None + ... + >>> root.visit(findall) + >>> print(found) + ['aaa', 'aaa/bbb/ccc/aaa'] """ base_len = len(self.name) From 3d2cd61a6329b05d872694e44447619f4707283b Mon Sep 17 00:00:00 2001 From: jakirkham Date: Thu, 3 Nov 2022 14:13:11 -0700 Subject: [PATCH 0231/1078] Add `license_files` to `pyproject.toml` (#1247) --- LICENSE => LICENSE.txt | 0 README.md | 2 +- docs/license.rst | 2 +- pyproject.toml | 2 ++ 4 files changed, 4 insertions(+), 2 deletions(-) rename LICENSE => LICENSE.txt (100%) diff --git a/LICENSE b/LICENSE.txt similarity index 100% rename from LICENSE rename to LICENSE.txt diff --git a/README.md b/README.md index 3576d0c30e..b035ffa597 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ License - + license diff --git a/docs/license.rst b/docs/license.rst index d47e1b2b34..8f93aa7d66 100644 --- a/docs/license.rst +++ b/docs/license.rst @@ -1,4 +1,4 @@ License ======= -.. include:: ../LICENSE \ No newline at end of file +.. include:: ../LICENSE.txt diff --git a/pyproject.toml b/pyproject.toml index 7cd7b5a025..7ef173879e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ classifiers = [ 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', ] +license = { text = "MIT" } [project.optional-dependencies] jupyter = [ @@ -52,6 +53,7 @@ Homepage = "https://github.com/zarr-developers/zarr-python" [tool.setuptools] packages = ["zarr", "zarr._storage", "zarr.tests"] +license-files = ["LICENSE.txt"] [tool.setuptools_scm] version_scheme = "guess-next-dev" From aa84fcbb896e8c269cbc9e2c1a6f3fa419450953 Mon Sep 17 00:00:00 2001 From: AWA BRANDON AWA <51425873+DON-BRAN@users.noreply.github.com> Date: Thu, 3 Nov 2022 22:58:50 +0100 Subject: [PATCH 0232/1078] updated docs/contributing.rst (#1243) Co-authored-by: jakirkham --- docs/contributing.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index d9e222b643..5bfd5878b2 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -92,7 +92,7 @@ the repository, you can do something like the following:: $ mkdir -p ~/pyenv/zarr-dev $ python -m venv ~/pyenv/zarr-dev $ source ~/pyenv/zarr-dev/bin/activate - $ pip install -r requirements_dev_minimal.txt -r requirements_dev_numpy.txt + $ pip install -r requirements_dev_minimal.txt -r requirements_dev_numpy.txt -r requirements_rtfd.txt $ pip install -e . To verify that your development environment is working, you can run the unit tests:: @@ -248,6 +248,7 @@ The documentation can be built locally by running:: $ cd docs $ make clean; make html + $ open _build/html/index.html The resulting built documentation will be available in the ``docs/_build/html`` folder. From bbc66df11ef0c731ebece330c1e60c38f1f36aed Mon Sep 17 00:00:00 2001 From: jakirkham Date: Fri, 4 Nov 2022 08:55:21 -0700 Subject: [PATCH 0233/1078] Delete unused files (#1251) * Delete build.cmd This file was leftover from before numcodecs was split from zarr-python. It was used to aid in compiling the Cython extensions on Windows. However zarr-python pure Python. So there is not a need to keep this file. It also doesn't appear to be used here. So drop it. * Update release.rst * Delete unused `.gitmodules` as well This was for pointing to Blosc. Again not needed after the zarr-python/numcodecs split. * Drop `release.txt` This is unused. Releases are handled through GitHub Actions. One should look there if attempting to reproduce outside of that infrastructure. * Drop unused `windows_conda_dev.txt` * Generalize release entry --- .gitmodules | 0 build.cmd | 45 ------------------------------------------- docs/release.rst | 3 +++ release.txt | 10 ---------- windows_conda_dev.txt | 9 --------- 5 files changed, 3 insertions(+), 64 deletions(-) delete mode 100644 .gitmodules delete mode 100644 build.cmd delete mode 100644 release.txt delete mode 100644 windows_conda_dev.txt diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/build.cmd b/build.cmd deleted file mode 100644 index 4e402d5e21..0000000000 --- a/build.cmd +++ /dev/null @@ -1,45 +0,0 @@ -:: To build extensions for 64 bit Python 3, we need to configure environment -:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: -:: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1) -:: -:: To build extensions for 64 bit Python 2, we need to configure environment -:: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of: -:: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0) -:: -:: 32 bit builds do not require specific environment configurations. -:: -:: Note: this script needs to be run with the /E:ON and /V:ON flags for the -:: cmd interpreter, at least for (SDK v7.0) -:: -:: More details at: -:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows -:: https://stackoverflow.com/a/13751649/163740 -:: -:: Author: Olivier Grisel -:: License: CC0 1.0 Universal: https://creativecommons.org/publicdomain/zero/1.0/ -@ECHO OFF - -SET COMMAND_TO_RUN=%* -SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows - -SET MAJOR_PYTHON_VERSION="%PYTHON_VERSION:~0,1%" -IF %MAJOR_PYTHON_VERSION% == "3" ( - SET WINDOWS_SDK_VERSION="v7.1" -) ELSE ( - ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%" - EXIT 1 -) - -IF "%DISTUTILS_USE_SDK%"=="1" ( - ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture - SET DISTUTILS_USE_SDK=1 - SET MSSdk=1 - "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION% - "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release - ECHO Executing: %COMMAND_TO_RUN% - call %COMMAND_TO_RUN% || EXIT 1 -) ELSE ( - ECHO Using default MSVC build environment - ECHO Executing: %COMMAND_TO_RUN% - call %COMMAND_TO_RUN% || EXIT 1 -) diff --git a/docs/release.rst b/docs/release.rst index 5ff8f74f29..647f722f69 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -20,6 +20,9 @@ Maintenance * Simplify if/else statement. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1227`. +* Delete unused files. + By :user:`John Kirkham ` :issue:`1251`. + * Migrate to ``pyproject.toml`` and remove redundant infrastructure. By :user:`Saransh Chopra ` :issue:`1158`. diff --git a/release.txt b/release.txt deleted file mode 100644 index d1cefef47c..0000000000 --- a/release.txt +++ /dev/null @@ -1,10 +0,0 @@ -# version=x.x.x -echo $version -git tag -a v$version -m v$version -git push --tags -# Install `build` if not present with `python -m pip install build` or similar -# for building Zarr -python -m build -# Install `twine` if not present with `python -m pip install twine` or similar -# for publishing Zarr to PyPI -twine upload dist/zarr-${version}.tar.gz diff --git a/windows_conda_dev.txt b/windows_conda_dev.txt deleted file mode 100644 index 576674827d..0000000000 --- a/windows_conda_dev.txt +++ /dev/null @@ -1,9 +0,0 @@ -coverage -fasteners -flake8 -monotonic -msgpack-python -numcodecs -numpy -setuptools_scm -twine From 9434acfa04295d35ae2a328654f297a700a67169 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Sat, 5 Nov 2022 19:03:13 -0700 Subject: [PATCH 0234/1078] Consolidate `.coveragerc` into `pyproject.toml` (#1250) --- .coveragerc | 9 --------- .github/workflows/python-package.yml | 2 +- docs/contributing.rst | 2 +- docs/release.rst | 3 +++ pyproject.toml | 11 +++++++++++ 5 files changed, 16 insertions(+), 11 deletions(-) delete mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 728a27d322..0000000000 --- a/.coveragerc +++ /dev/null @@ -1,9 +0,0 @@ -[run] -omit = - zarr/meta_v1.py - bench/compress_normal.py - -[report] -exclude_lines = - pragma: no cover - pragma: ${PY_MAJOR_VERSION} no cover diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 9abb7c7866..bb04269f07 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -74,7 +74,7 @@ jobs: conda activate zarr-env mkdir ~/blob_emulator azurite -l ~/blob_emulator --debug debug.log 2>&1 > stdouterr.log & - pytest --cov=zarr --cov-config=.coveragerc --doctest-plus --cov-report xml --cov=./ --timeout=300 + pytest --cov=zarr --cov-config=pyproject.toml --doctest-plus --cov-report xml --cov=./ --timeout=300 - uses: codecov/codecov-action@v3 with: #token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos diff --git a/docs/contributing.rst b/docs/contributing.rst index 5bfd5878b2..dc6beb0094 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -214,7 +214,7 @@ Zarr maintains 100% test coverage under the latest Python stable release (curren Python 3.8). Both unit tests and docstring doctests are included when computing coverage. Running:: - $ python -m pytest -v --cov=zarr --cov-config=.coveragerc zarr + $ python -m pytest -v --cov=zarr --cov-config=pyproject.toml zarr will automatically run the test suite with coverage and produce a coverage report. This should be 100% before code can be accepted into the main code base. diff --git a/docs/release.rst b/docs/release.rst index 647f722f69..371dbf01a8 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -26,6 +26,9 @@ Maintenance * Migrate to ``pyproject.toml`` and remove redundant infrastructure. By :user:`Saransh Chopra ` :issue:`1158`. +* Migrate coverage to ``pyproject.toml``. + By :user:`John Kirkham ` :issue:`1250`. + .. _release_2.13.3: 2.13.3 diff --git a/pyproject.toml b/pyproject.toml index 7ef173879e..1592b9887a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,17 @@ Discussions = "https://github.com/zarr-developers/zarr-python/discussions" Documentation = "https://zarr.readthedocs.io/" Homepage = "https://github.com/zarr-developers/zarr-python" +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "pragma: ${PY_MAJOR_VERSION} no cover", +] + +[tool.coverage.run] +omit = [ + "zarr/meta_v1.py", + "bench/compress_normal.py", +] [tool.setuptools] packages = ["zarr", "zarr._storage", "zarr.tests"] From 6d0eeaa7e0bdf0c8049467571483d1eccb6191b5 Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Wed, 9 Nov 2022 23:03:25 +0530 Subject: [PATCH 0235/1078] Update release.rst with maintenance PRs (#1252) --- docs/release.rst | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 371dbf01a8..2616b184bc 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -17,17 +17,27 @@ Unreleased Maintenance ~~~~~~~~~~~ +* Migrate to ``pyproject.toml`` and remove redundant infrastructure. + By :user:`Saransh Chopra ` :issue:`1158`. + +* Require ``setuptools`` 64.0.0+ + By :user:`Saransh Chopra ` :issue:`1193`. + +* Pin action versions (pypi-publish, setup-miniconda) for dependabot + By :user:`Saransh Chopra ` :issue:`1205`. + +* Remove ``tox`` support + By :user:`Saransh Chopra ` :issue:`1219`. + * Simplify if/else statement. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1227`. +* Migrate coverage to ``pyproject.toml``. + By :user:`John Kirkham ` :issue:`1250`. + * Delete unused files. By :user:`John Kirkham ` :issue:`1251`. -* Migrate to ``pyproject.toml`` and remove redundant infrastructure. - By :user:`Saransh Chopra ` :issue:`1158`. - -* Migrate coverage to ``pyproject.toml``. - By :user:`John Kirkham ` :issue:`1250`. .. _release_2.13.3: From bae14230f277621fb03b479a441711d42f682e40 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 9 Nov 2022 18:33:51 +0100 Subject: [PATCH 0236/1078] Add missing newline at EOF (#1253) --- data/donotdelete | 2 +- docs/api/hierarchy.rst | 2 +- docs/installation.rst | 2 +- requirements_rtfd.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/data/donotdelete b/data/donotdelete index 1e9ef93e26..b0c96f7ee5 100644 --- a/data/donotdelete +++ b/data/donotdelete @@ -1 +1 @@ -This directory is used for data files created during testing. \ No newline at end of file +This directory is used for data files created during testing. diff --git a/docs/api/hierarchy.rst b/docs/api/hierarchy.rst index 88b9c0fd88..11a5575144 100644 --- a/docs/api/hierarchy.rst +++ b/docs/api/hierarchy.rst @@ -38,4 +38,4 @@ Groups (``zarr.hierarchy``) .. automethod:: zeros_like .. automethod:: ones_like .. automethod:: full_like - .. automethod:: move \ No newline at end of file + .. automethod:: move diff --git a/docs/installation.rst b/docs/installation.rst index a07c1c42e1..8553d451cb 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -28,4 +28,4 @@ To work with Zarr source code in development, install from GitHub:: To verify that Zarr has been fully installed, run the test suite:: $ pip install pytest - $ python -m pytest -v --pyargs zarr \ No newline at end of file + $ python -m pytest -v --pyargs zarr diff --git a/requirements_rtfd.txt b/requirements_rtfd.txt index 0a7d90358e..553384e0bd 100644 --- a/requirements_rtfd.txt +++ b/requirements_rtfd.txt @@ -7,4 +7,4 @@ sphinx-copybutton sphinx-rtd-theme numpydoc numpy!=1.21.0 -msgpack-python==0.5.6 \ No newline at end of file +msgpack-python==0.5.6 From 810ec5a8a902d1b0acf856883172faf1b6165624 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Thu, 10 Nov 2022 06:19:00 -0800 Subject: [PATCH 0237/1078] Add `.flake8` to configure Flake8 (#1249) --- .flake8 | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .flake8 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000..7da1f9608e --- /dev/null +++ b/.flake8 @@ -0,0 +1,2 @@ +[flake8] +max-line-length = 100 From b02040471f9b7e3e1aefd4f6b90b52c76efe32b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Nov 2022 15:47:10 +0100 Subject: [PATCH 0238/1078] Bump fsspec from 2022.10.0 to 2022.11.0 (#1255) * Bump fsspec from 2022.10.0 to 2022.11.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.10.0 to 2022.11.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.10.0...2022.11.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump s3fs as well close: https://github.com/zarr-developers/zarr-python/pull/1254 Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index fa57d8270c..a30344061e 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -19,6 +19,6 @@ pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 h5py==3.7.0 -fsspec==2022.10.0 -s3fs==2022.10.0 +fsspec==2022.11.0 +s3fs==2022.11.0 moto[server]>=4.0.8 From 0a25ab2979949bb300204a8b6ed016536b8f346c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 15 Nov 2022 01:30:47 -0800 Subject: [PATCH 0239/1078] chore: update pre-commit hooks (#1262) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v0.982 → v0.990](https://github.com/pre-commit/mirrors-mypy/compare/v0.982...v0.990) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fd1619eefa..6eec599124 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,7 +24,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.982 + rev: v0.990 hooks: - id: mypy files: zarr From 090de2c3ff3f117ef9cca84a7c952412dc0553cc Mon Sep 17 00:00:00 2001 From: jakirkham Date: Tue, 15 Nov 2022 03:14:04 -0800 Subject: [PATCH 0240/1078] Use `conda-incubator/setup-miniconda@v2.2.0` (#1263) --- .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- docs/release.rst | 3 +++ 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index 3eb414059c..2cde38e081 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v2.2.0 with: channels: conda-forge environment-file: environment.yml diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index bb04269f07..872ce52343 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -40,7 +40,7 @@ jobs: with: fetch-depth: 0 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@master + uses: conda-incubator/setup-miniconda@v2.2.0 with: channels: conda-forge python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index af8bae8cf1..ea1d0f64c9 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v2 + - uses: conda-incubator/setup-miniconda@v2.2.0 with: auto-update-conda: true python-version: ${{ matrix.python-version }} diff --git a/docs/release.rst b/docs/release.rst index 2616b184bc..d68cafd29c 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -35,6 +35,9 @@ Maintenance * Migrate coverage to ``pyproject.toml``. By :user:`John Kirkham ` :issue:`1250`. +* Use ``conda-incubator/setup-miniconda@v2.2.0``. + By :user:`John Kirkham ` :issue:`1263`. + * Delete unused files. By :user:`John Kirkham ` :issue:`1251`. From b2f088fb099a72004a0f6590f1e1d09d35cf3921 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Tue, 15 Nov 2022 10:20:53 -0800 Subject: [PATCH 0241/1078] Fix coverage (#1264) --- docs/release.rst | 3 +++ zarr/hierarchy.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index d68cafd29c..e859279ef2 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -32,6 +32,9 @@ Maintenance * Simplify if/else statement. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1227`. +* Get coverage up to 100%. + By :user:`John Kirkham ` :issue:`1264`. + * Migrate coverage to ``pyproject.toml``. By :user:`John Kirkham ` :issue:`1250`. diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index f2188217a8..82323c7208 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -1430,7 +1430,7 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N mode=mode, zarr_version=zarr_version) if getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) != zarr_version: - raise ValueError( + raise ValueError( # pragma: no cover "zarr_version of store and chunk_store must match" ) From d5891b2b3cbacc598f8ef9ab0739bd6a2d0182a8 Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Wed, 16 Nov 2022 01:10:01 +0530 Subject: [PATCH 0242/1078] Workflow to label PRs with "needs release notes" (#1239) --- .github/labeler.yml | 2 ++ .github/workflows/needs_release_notes.yml | 13 +++++++++++++ docs/release.rst | 3 +++ 3 files changed, 18 insertions(+) create mode 100644 .github/labeler.yml create mode 100644 .github/workflows/needs_release_notes.yml diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000000..dbc3b95333 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,2 @@ +needs release notes: +- all: ['!docs/release.rst'] diff --git a/.github/workflows/needs_release_notes.yml b/.github/workflows/needs_release_notes.yml new file mode 100644 index 0000000000..b0b8b7c97d --- /dev/null +++ b/.github/workflows/needs_release_notes.yml @@ -0,0 +1,13 @@ +name: "Pull Request Labeler" + +on: + - pull_request_target + +jobs: + triage: + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@main + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + sync-labels: true diff --git a/docs/release.rst b/docs/release.rst index e859279ef2..8642ed8b6f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -29,6 +29,9 @@ Maintenance * Remove ``tox`` support By :user:`Saransh Chopra ` :issue:`1219`. +* Add workflow to label PRs with "needs release notes". + By :user:`Saransh Chopra ` :issue:`1239`. + * Simplify if/else statement. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1227`. From 19d159f5532a2432f8417eaa96864eb5296e447d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Nov 2022 07:54:01 +0100 Subject: [PATCH 0243/1078] Bump pymongo from 4.3.2 to 4.3.3 (#1270) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.3.2 to 4.3.3. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.3.2...4.3.3) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index a30344061e..f4853e7a0e 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -11,7 +11,7 @@ azure-storage-blob==12.14.1 # pyup: ignore redis==4.3.4 types-redis types-setuptools -pymongo==4.3.2 +pymongo==4.3.3 # optional test requirements coverage flake8==5.0.4 From 42da4aa2b2d6b6e79a6f3d6629e3d1837af8e9b9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Nov 2022 09:22:48 +0100 Subject: [PATCH 0244/1078] Bump numpy from 1.23.4 to 1.23.5 (#1272) Bumps [numpy](https://github.com/numpy/numpy) from 1.23.4 to 1.23.5. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.23.4...v1.23.5) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index b1c56d3f7d..f3f810368b 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.23.4 +numpy==1.23.5 From 5f5c868e656a021825c85e042965640b87685f26 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 22 Nov 2022 09:25:19 +0100 Subject: [PATCH 0245/1078] chore: update pre-commit hooks (#1273) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v0.990 → v0.991](https://github.com/pre-commit/mirrors-mypy/compare/v0.990...v0.991) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6eec599124..7541e60500 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,7 +24,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.990 + rev: v0.991 hooks: - id: mypy files: zarr From e4668e04b757c86c04e8e3f0659e14bf2a85acb7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Nov 2022 10:15:53 +0100 Subject: [PATCH 0246/1078] Bump redis from 4.3.4 to 4.3.5 (#1275) Bumps [redis](https://github.com/redis/redis-py) from 4.3.4 to 4.3.5. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.3.4...v4.3.5) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index f4853e7a0e..365787bd26 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.0.2 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.14.1 # pyup: ignore -redis==4.3.4 +redis==4.3.5 types-redis types-setuptools pymongo==4.3.3 From 596d9c0e2a05e95b106831d0331aae37c68c83d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Dec 2022 08:47:52 +0100 Subject: [PATCH 0247/1078] Bump redis from 4.3.5 to 4.4.0 (#1282) Bumps [redis](https://github.com/redis/redis-py) from 4.3.5 to 4.4.0. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.3.5...v4.4.0) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 365787bd26..1f1df63287 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.0.2 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.14.1 # pyup: ignore -redis==4.3.5 +redis==4.4.0 types-redis types-setuptools pymongo==4.3.3 From b3cda2b3318c93a96266f62e383b3b45f105f389 Mon Sep 17 00:00:00 2001 From: Ryan Abernathey Date: Tue, 6 Dec 2022 04:11:08 -0500 Subject: [PATCH 0248/1078] Fix double counting V3 groups bug (#1268) * fix double counting groups bug * add release notes Co-authored-by: Josh Moore --- docs/release.rst | 6 ++++++ zarr/hierarchy.py | 34 ++++++++++++++++------------------ zarr/tests/test_hierarchy.py | 13 +++++++++++++ 3 files changed, 35 insertions(+), 18 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 8642ed8b6f..e15132b60b 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -14,6 +14,12 @@ Unreleased # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +* Fix bug that caused double counting of groups in ``groups()`` and ``group_keys()`` + methods with V3 stores. + By :user:`Ryan Abernathey ` :issue:`1228`. + +.. _release_2.13.2: + Maintenance ~~~~~~~~~~~ diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 82323c7208..0dae921500 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -511,9 +511,15 @@ def group_keys(self): else: dir_name = meta_root + self._path group_sfx = '.group' + self._metadata_key_suffix - for key in sorted(listdir(self._store, dir_name)): + # The fact that we call sorted means this can't be a streaming generator. + # The keys are already in memory. + all_keys = sorted(listdir(self._store, dir_name)) + for key in all_keys: if key.endswith(group_sfx): key = key[:-len(group_sfx)] + if key in all_keys: + # otherwise we will double count this group + continue path = self._key_prefix + key if path.endswith(".array" + self._metadata_key_suffix): # skip array keys @@ -552,24 +558,16 @@ def groups(self): zarr_version=self._version) else: - dir_name = meta_root + self._path - group_sfx = '.group' + self._metadata_key_suffix - for key in sorted(listdir(self._store, dir_name)): - if key.endswith(group_sfx): - key = key[:-len(group_sfx)] + for key in self.group_keys(): path = self._key_prefix + key - if path.endswith(".array" + self._metadata_key_suffix): - # skip array keys - continue - if contains_group(self._store, path, explicit_only=False): - yield key, Group( - self._store, - path=path, - read_only=self._read_only, - chunk_store=self._chunk_store, - cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, - zarr_version=self._version) + yield key, Group( + self._store, + path=path, + read_only=self._read_only, + chunk_store=self._chunk_store, + cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, + zarr_version=self._version) def array_keys(self, recurse=False): """Return an iterator over member names for arrays only. diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index 8d1fabbed3..7d87b6d404 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -770,6 +770,19 @@ def visitor1(val, *args): g1.store.close() + # regression test for https://github.com/zarr-developers/zarr-python/issues/1228 + def test_double_counting_group_v3(self): + root_group = self.create_group() + group_names = ["foo", "foo-", "foo_"] + for name in group_names: + sub_group = root_group.create_group(name) + sub_group.create("bar", shape=10, dtype="i4") + assert list(root_group.group_keys()) == sorted(group_names) + assert list(root_group.groups()) == [ + (name, root_group[name]) + for name in sorted(group_names) + ] + def test_empty_getitem_contains_iterators(self): # setup g = self.create_group() From e7c0eb45ad1f571a8da3bfa9e23586848296e66c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 7 Dec 2022 10:44:47 +0100 Subject: [PATCH 0249/1078] chore: update pre-commit hooks (#1278) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 5.0.4 → 6.0.0](https://github.com/PyCQA/flake8/compare/5.0.4...6.0.0) - [github.com/pre-commit/pre-commit-hooks: v4.3.0 → v4.4.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.3.0...v4.4.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7541e60500..1f629ccf76 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/PyCQA/flake8 - rev: 5.0.4 + rev: 6.0.0 hooks: - id: flake8 args: [ @@ -20,7 +20,7 @@ repos: - id: codespell args: ["-L", "ba,ihs,kake,nd,noe,nwo,te", "-S", "fixture"] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.4.0 hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy From 1af77b63ad8d51a5a8dc2cd923bf73cb8abe5a64 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Dec 2022 00:39:30 -0800 Subject: [PATCH 0250/1078] Bump numcodecs from 0.10.2 to 0.11.0 (#1300) Bumps [numcodecs](https://github.com/zarr-developers/numcodecs) from 0.10.2 to 0.11.0. - [Release notes](https://github.com/zarr-developers/numcodecs/releases) - [Changelog](https://github.com/zarr-developers/numcodecs/blob/main/docs/release.rst) - [Commits](https://github.com/zarr-developers/numcodecs/compare/v0.10.2...v0.11.0) --- updated-dependencies: - dependency-name: numcodecs dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index b76b398a16..3f96d79850 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,7 +1,7 @@ # library requirements asciitree==0.3.3 fasteners==0.18 -numcodecs==0.10.2 +numcodecs==0.11.0 msgpack-python==0.5.6 setuptools-scm==7.0.5 # test requirements From 13f7d0d77530a7d59b7f3d4bc80150d436c65061 Mon Sep 17 00:00:00 2001 From: Kola Babalola Date: Wed, 21 Dec 2022 07:38:37 +0000 Subject: [PATCH 0251/1078] Fix minor indexing errors in tutorial and specification examples of documentation (#1277) --- docs/spec/v1.rst | 6 +++--- docs/spec/v2.rst | 6 +++--- docs/tutorial.rst | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/spec/v1.rst b/docs/spec/v1.rst index d8598c88c4..8584b24e6d 100644 --- a/docs/spec/v1.rst +++ b/docs/spec/v1.rst @@ -144,9 +144,9 @@ are converted to strings and concatenated with the period character ('.') separating each index. For example, given an array with shape (10000, 10000) and chunk shape (1000, 1000) there will be 100 chunks laid out in a 10 by 10 grid. The chunk with indices (0, 0) provides -data for rows 0-1000 and columns 0-1000 and is stored under the key -'0.0'; the chunk with indices (2, 4) provides data for rows 2000-3000 -and columns 4000-5000 and is stored under the key '2.4'; etc. +data for rows 0-999 and columns 0-999 and is stored under the key +'0.0'; the chunk with indices (2, 4) provides data for rows 2000-2999 +and columns 4000-4999 and is stored under the key '2.4'; etc. There is no need for all chunks to be present within an array store. If a chunk is not present then it is considered to be in an diff --git a/docs/spec/v2.rst b/docs/spec/v2.rst index 6d11fd1acc..45e6afb320 100644 --- a/docs/spec/v2.rst +++ b/docs/spec/v2.rst @@ -216,9 +216,9 @@ To form a string key for a chunk, the indices are converted to strings and concatenated with the period character (".") separating each index. For example, given an array with shape (10000, 10000) and chunk shape (1000, 1000) there will be 100 chunks laid out in a 10 by 10 grid. The chunk with indices -(0, 0) provides data for rows 0-1000 and columns 0-1000 and is stored under the -key "0.0"; the chunk with indices (2, 4) provides data for rows 2000-3000 and -columns 4000-5000 and is stored under the key "2.4"; etc. +(0, 0) provides data for rows 0-999 and columns 0-999 and is stored under the +key "0.0"; the chunk with indices (2, 4) provides data for rows 2000-2999 and +columns 4000-4999 and is stored under the key "2.4"; etc. There is no need for all chunks to be present within an array store. If a chunk is not present then it is considered to be in an uninitialized state. An diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 411ce0a163..43e42faf6b 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -525,9 +525,9 @@ When the indexing arrays have different shapes, they are broadcast together. That is, the following two calls are equivalent:: >>> z[1, [1, 3]] - array([5, 7]) + array([6, 8]) >>> z[[1, 1], [1, 3]] - array([5, 7]) + array([6, 8]) Indexing with a mask array ~~~~~~~~~~~~~~~~~~~~~~~~~~ From 5aff4cb2a3114e4fbdeb2692856789827237eb48 Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Wed, 21 Dec 2022 14:26:37 +0530 Subject: [PATCH 0252/1078] Skip labeler for bot PRs (#1271) --- .github/workflows/needs_release_notes.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/needs_release_notes.yml b/.github/workflows/needs_release_notes.yml index b0b8b7c97d..d81ee0bdc4 100644 --- a/.github/workflows/needs_release_notes.yml +++ b/.github/workflows/needs_release_notes.yml @@ -5,6 +5,7 @@ on: jobs: triage: + if: ${{ github.event.pull_request.user.login != 'dependabot[bot]' }} && ${{ github.event.pull_request.user.login != 'pre-commit-ci[bot]' }} runs-on: ubuntu-latest steps: - uses: actions/labeler@main From ab101e1c9ab094f6817ea78ff08320a513ab35cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Dec 2022 09:56:52 +0100 Subject: [PATCH 0253/1078] Bump numpy from 1.23.5 to 1.24.0 (#1301) Bumps [numpy](https://github.com/numpy/numpy) from 1.23.5 to 1.24.0. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.23.5...v1.24.0) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index f3f810368b..6b5b91b6c8 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.23.5 +numpy==1.24.0 From 4b91976e89b3d907c530ee2223e3277ed12bb931 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Dec 2022 09:57:07 +0100 Subject: [PATCH 0254/1078] Bump setuptools-scm from 7.0.5 to 7.1.0 (#1302) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 7.0.5 to 7.1.0. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/setuptools_scm/compare/v7.0.5...v7.1.0) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 3f96d79850..d5e0798e86 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.18 numcodecs==0.11.0 msgpack-python==0.5.6 -setuptools-scm==7.0.5 +setuptools-scm==7.1.0 # test requirements pytest==7.2.0 From 03853c97b1f21c0cb584ea4d554e4c0f7aad912f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Dec 2022 11:28:28 +0100 Subject: [PATCH 0255/1078] Bump ipywidgets from 8.0.2 to 8.0.3 (#1287) Bumps [ipywidgets](https://github.com/jupyter-widgets/ipywidgets) from 8.0.2 to 8.0.3. - [Release notes](https://github.com/jupyter-widgets/ipywidgets/releases) - [Commits](https://github.com/jupyter-widgets/ipywidgets/compare/8.0.2...8.0.3) --- updated-dependencies: - dependency-name: ipywidgets dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 1f1df63287..77a2b5abdf 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -3,7 +3,7 @@ lmdb==1.3.0; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 -ipywidgets==8.0.2 +ipywidgets==8.0.3 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) From d7e568ddd25151970489661ccfce67cadd919ab7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Dec 2022 11:29:18 +0100 Subject: [PATCH 0256/1078] Remove flake8 dependency (#1276) * Bump flake8 from 5.0.4 to 6.0.0 Bumps [flake8](https://github.com/pycqa/flake8) from 5.0.4 to 6.0.0. - [Release notes](https://github.com/pycqa/flake8/releases) - [Commits](https://github.com/pycqa/flake8/compare/5.0.4...6.0.0) --- updated-dependencies: - dependency-name: flake8 dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Attempt removing flake8 Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 77a2b5abdf..25df1e19cc 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -14,7 +14,6 @@ types-setuptools pymongo==4.3.3 # optional test requirements coverage -flake8==5.0.4 pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 From 482dfe531b6b180b60a734cec22a0dd7615732d3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Dec 2022 11:29:43 +0100 Subject: [PATCH 0257/1078] Bump lmdb from 1.3.0 to 1.4.0 (#1288) Bumps [lmdb](https://github.com/jnwatson/py-lmdb) from 1.3.0 to 1.4.0. - [Release notes](https://github.com/jnwatson/py-lmdb/releases) - [Changelog](https://github.com/jnwatson/py-lmdb/blob/py-lmdb_1.4.0/ChangeLog) - [Commits](https://github.com/jnwatson/py-lmdb/compare/py-lmdb_1.3.0...py-lmdb_1.4.0) --- updated-dependencies: - dependency-name: lmdb dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 25df1e19cc..05cc2b8730 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -1,6 +1,6 @@ # optional library requirements # bsddb3==6.2.6; sys_platform != 'win32' -lmdb==1.3.0; sys_platform != 'win32' +lmdb==1.4.0; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 ipywidgets==8.0.3 From 4e633ad9aa434304296900790c4c65e0fa0dfa12 Mon Sep 17 00:00:00 2001 From: Rafal Wojdyla Date: Thu, 22 Dec 2022 09:33:45 +0900 Subject: [PATCH 0258/1078] Handle fsspec.FSMap using FSStore store (#1304) --- docs/release.rst | 2 ++ zarr/_storage/v3.py | 10 ++++++++++ zarr/storage.py | 19 +++++++++++++++++++ zarr/tests/test_storage.py | 5 +++++ zarr/tests/test_storage_v3.py | 5 +++++ 5 files changed, 41 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index e15132b60b..50eb8316bf 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -17,6 +17,8 @@ Unreleased * Fix bug that caused double counting of groups in ``groups()`` and ``group_keys()`` methods with V3 stores. By :user:`Ryan Abernathey ` :issue:`1228`. +* Handle fsspec.FSMap using FSStore store + By :user:`Rafal Wojdyla ` :issue:`1304`. .. _release_2.13.2: diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 515e6f5aaa..a0a1870ffc 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -567,6 +567,16 @@ def _normalize_store_arg_v3(store: Any, storage_options=None, mode="r") -> BaseS return store if isinstance(store, os.PathLike): store = os.fspath(store) + if FSStore._fsspec_installed(): + import fsspec + if isinstance(store, fsspec.FSMap): + return FSStoreV3(store.root, + fs=store.fs, + mode=mode, + check=store.check, + create=store.create, + missing_exceptions=store.missing_exceptions, + **(storage_options or {})) if isinstance(store, str): if "://" in store or "::" in store: store = FSStoreV3(store, mode=mode, **(storage_options or {})) diff --git a/zarr/storage.py b/zarr/storage.py index 4acf637330..a2a8919d0b 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -139,6 +139,16 @@ def _normalize_store_arg_v2(store: Any, storage_options=None, mode="r") -> BaseS return store if isinstance(store, os.PathLike): store = os.fspath(store) + if FSStore._fsspec_installed(): + import fsspec + if isinstance(store, fsspec.FSMap): + return FSStore(store.root, + fs=store.fs, + mode=mode, + check=store.check, + create=store.create, + missing_exceptions=store.missing_exceptions, + **(storage_options or {})) if isinstance(store, str): if "://" in store or "::" in store: return FSStore(store, mode=mode, **(storage_options or {})) @@ -1308,6 +1318,8 @@ def __init__(self, url, normalize_keys=False, key_separator=None, create=False, missing_exceptions=None, **storage_options): + if not self._fsspec_installed(): # pragma: no cover + raise ImportError("`fsspec` is required to use zarr's FSStore") import fsspec mapper_options = {"check": check, "create": create} @@ -1479,6 +1491,13 @@ def clear(self): raise ReadOnlyError() self.map.clear() + @classmethod + def _fsspec_installed(cls): + """Returns true if fsspec is installed""" + import importlib.util + + return importlib.util.find_spec("fsspec") is not None + class TempStore(DirectoryStore): """Directory store using a temporary directory for storage. diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 39d4b5988d..7c23735f36 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -2556,10 +2556,15 @@ def test_normalize_store_arg(tmpdir): assert isinstance(store, Class) if have_fsspec: + import fsspec + path = tempfile.mkdtemp() store = normalize_store_arg("file://" + path, zarr_version=2, mode='w') assert isinstance(store, FSStore) + store = normalize_store_arg(fsspec.get_mapper("file://" + path)) + assert isinstance(store, FSStore) + def test_meta_prefix_6853(): diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index 13b5011676..4f6215135c 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -467,11 +467,16 @@ def test_normalize_store_arg_v3(tmpdir): normalize_store_arg(str(fn), zarr_version=3, mode='w', storage_options={"some": "kwargs"}) if have_fsspec: + import fsspec + path = tempfile.mkdtemp() store = normalize_store_arg("file://" + path, zarr_version=3, mode='w') assert isinstance(store, FSStoreV3) assert 'zarr.json' in store + store = normalize_store_arg(fsspec.get_mapper("file://" + path), zarr_version=3) + assert isinstance(store, FSStoreV3) + fn = tmpdir.join('store.n5') with pytest.raises(NotImplementedError): normalize_store_arg(str(fn), zarr_version=3, mode='w') From dcce26e6d4cdecdf7192ae1943127f381b16557c Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 12 Jan 2023 14:00:51 +0100 Subject: [PATCH 0259/1078] =?UTF-8?q?http://=20=E2=86=92=20https://=20(#13?= =?UTF-8?q?13)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/release.rst | 6 ++++-- notebooks/dask_copy.ipynb | 2 +- notebooks/dask_count_alleles.ipynb | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 50eb8316bf..e63e7f8c22 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -17,11 +17,10 @@ Unreleased * Fix bug that caused double counting of groups in ``groups()`` and ``group_keys()`` methods with V3 stores. By :user:`Ryan Abernathey ` :issue:`1228`. + * Handle fsspec.FSMap using FSStore store By :user:`Rafal Wojdyla ` :issue:`1304`. -.. _release_2.13.2: - Maintenance ~~~~~~~~~~~ @@ -55,6 +54,9 @@ Maintenance * Delete unused files. By :user:`John Kirkham ` :issue:`1251`. +* Uopdate web links: http:// → https:// + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1313`. + .. _release_2.13.3: diff --git a/notebooks/dask_copy.ipynb b/notebooks/dask_copy.ipynb index 5cb712508c..ba4391737a 100644 --- a/notebooks/dask_copy.ipynb +++ b/notebooks/dask_copy.ipynb @@ -33,7 +33,7 @@ "text/html": [ "\n", "
\n", - " \n", + " \n", " Loading BokehJS ...\n", "
" ] diff --git a/notebooks/dask_count_alleles.ipynb b/notebooks/dask_count_alleles.ipynb index 8ca462b232..8b9b7cec6e 100644 --- a/notebooks/dask_count_alleles.ipynb +++ b/notebooks/dask_count_alleles.ipynb @@ -26,7 +26,7 @@ "text/html": [ "\n", "
\n", - " \n", + " \n", " Loading BokehJS ...\n", "
" ] From af2600297d506778c7480b52df6d8efb9ac0bc4b Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Mon, 16 Jan 2023 14:37:22 +0100 Subject: [PATCH 0260/1078] 2.13.4/2.14.0 draft release notes (#1316) * 2.13.4/2.14.0 draft release notes * Correct typo * Fix headings * Fix issue markup * Fix bullet points * Re-arrange some PRs * Update docs/release.rst Co-authored-by: Sanket Verma * Update docs/release.rst Co-authored-by: Sanket Verma * Apply suggestions from code review Co-authored-by: Sanket Verma * Update docs/release.rst Co-authored-by: Sanket Verma Co-authored-by: Sanket Verma --- docs/release.rst | 120 ++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 113 insertions(+), 7 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index e63e7f8c22..7ffd751696 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,20 +6,58 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. -.. _unreleased: + .. _unreleased: -Unreleased ----------- + Unreleased + ---------- .. # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +.. _release_2.13.4: + +2.13.4 +------ + +Appreciation +~~~~~~~~~~~~~ + +Special thanks to Outreachy participants for contributing to most of the maintenance PRs. Please read the blog post summarising the contribution phase and welcoming new Outreachy interns: https://zarr.dev/blog/welcoming-outreachy-2022-interns/ + + +Enhancements +~~~~~~~~~~~~ + +* Handle fsspec.FSMap using FSStore store. + By :user:`Rafal Wojdyla ` :issue:`1304`. + +Bug fixes +~~~~~~~~~ + +* Fix bug that caused double counting of groups in ``groups()`` and ``group_keys()`` methods with V3 stores. + By :user:`Ryan Abernathey ` :issue:`1228`. + +* Remove unnecessary calling of `contains_array` for key that ended in `.array.json`. + By :user:`Joe Hamman ` :issue:`1149`. + * Fix bug that caused double counting of groups in ``groups()`` and ``group_keys()`` methods with V3 stores. By :user:`Ryan Abernathey ` :issue:`1228`. -* Handle fsspec.FSMap using FSStore store - By :user:`Rafal Wojdyla ` :issue:`1304`. +Documentation +~~~~~~~~~~~~~ + +* Fix minor indexing errors in tutorial and specification examples of documentation. + By :user:`Kola Babalola ` :issue:`1277`. + +* Add `requirements_rtfd.txt` in `contributing.rst`. + By :user:`AWA BRANDON AWA ` :issue:`1243`. + +* Add documentation for find/findall using visit. + By :user:`Weddy Gikunda ` :issue:`1241`. + +* Refresh of the main landing page. + By :user:`Josh Moore ` :issue:`1173`. Maintenance ~~~~~~~~~~~ @@ -54,9 +92,77 @@ Maintenance * Delete unused files. By :user:`John Kirkham ` :issue:`1251`. -* Uopdate web links: http:// → https:// - By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1313`. +* Skip labeller for bot PRs. + By :user:`Saransh Chopra ` :issue:`1271`. + +* Restore Flake8 configuration. + By :user:`John Kirkham ` :issue:`1249`. + +* Add missing newline at EOF. + By :user:`Dimitri Papadopoulos` :issue:`1253`. + +* Add `license_files` to `pyproject.toml`. + By :user:`John Kirkham ` :issue:`1247`. + +* Adding `pyupgrade` suggestions. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1225`. + +* Fixed some linting errors. + By :user:`Weddy Gikunda ` :issue:`1226`. + +* Added the link to main website in readthedocs sidebar. + By :user:`Stephanie_nkwatoh ` :issue:`1216`. + +* Remove redundant wheel dependency in `pyproject.toml`. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1233`. + +* Turned on `isloated_build` in `tox.ini` file. + By :user:`AWA BRANDON AWA ` :issue:`1210`. + +* Fixed `flake8` alert and avoid duplication of `Zarr Developers`. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1203`. + +* Bump to NumPy 1.20+ in `environment.yml`. + By :user:`John Kirkham ` :issue:`1201`. + +* Bump to NumPy 1.20 in `pyproject.toml`. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1192`. +* Remove LGTM (`.lgtm.yml`) configuration file. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1191`. + +* Codespell will skip `fixture` in pre-commit. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1197`. + +* Add msgpack in `requirements_rtfd.txt`. + By :user:`Emmanuel Bolarinwa ` :issue:`1188`. + +* Added license to docs fixed a typo from `_spec_v2` to `_spec_v3`. + By :user:`AWA BRANDON AWA ` :issue:`1182`. + +* Fixed installation link in `README.md`. + By :user:`AWA BRANDON AWA ` :issue:`1177`. + +* Fixed typos in `installation.rst` and `release.rst`. + By :user:`Chizoba Nweke ` :issue:`1178`. + +* Set `docs/conf.py` language to `en`. + By :user:`AWA BRANDON AWA ` :issue:`1174`. + +* Added `installation.rst` to the docs. + By :user:`AWA BRANDON AWA ` :issue:`1170`. + +* Adjustment of year to `2015-2018` to `2015-2022` in the docs. + By :user:`Emmanuel Bolarinwa ` :issue:`1165`. + +* Updated `Forking the repository` section in `contributing.rst`. + By :user:`AWA BRANDON AWA ` :issue:`1171`. + +* Updated GitHub actions. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1134`. + +* Uopdate web links: `http:// → https://`. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1313`. .. _release_2.13.3: From f0beb454f58331e50e7c34e7320e77ae89dd674e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Jan 2023 14:42:50 +0100 Subject: [PATCH 0261/1078] Bump pytest from 7.2.0 to 7.2.1 (#1317) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.2.0 to 7.2.1. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.2.0...7.2.1) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index d5e0798e86..34d7d98e7e 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.11.0 msgpack-python==0.5.6 setuptools-scm==7.1.0 # test requirements -pytest==7.2.0 +pytest==7.2.1 From bf1d37293044535d32cb6655d06cc078e66b6a37 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Jan 2023 14:43:03 +0100 Subject: [PATCH 0262/1078] Bump redis from 4.4.0 to 4.4.2 (#1315) Bumps [redis](https://github.com/redis/redis-py) from 4.4.0 to 4.4.2. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.4.0...v4.4.2) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 05cc2b8730..e44c60e9c9 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.0.3 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.14.1 # pyup: ignore -redis==4.4.0 +redis==4.4.2 types-redis types-setuptools pymongo==4.3.3 From c9fe26273511395e9b295f17d17224f2cde52f3b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Jan 2023 14:43:25 +0100 Subject: [PATCH 0263/1078] Bump numpy from 1.24.0 to 1.24.1 (#1311) Bumps [numpy](https://github.com/numpy/numpy) from 1.24.0 to 1.24.1. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.24.0...v1.24.1) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 6b5b91b6c8..7d373a254d 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.24.0 +numpy==1.24.1 From 876ccf2cd66135d53b3962dc6120c98624ba9d3c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Jan 2023 14:43:38 +0100 Subject: [PATCH 0264/1078] Bump ipywidgets from 8.0.3 to 8.0.4 (#1307) Bumps [ipywidgets](https://github.com/jupyter-widgets/ipywidgets) from 8.0.3 to 8.0.4. - [Release notes](https://github.com/jupyter-widgets/ipywidgets/releases) - [Commits](https://github.com/jupyter-widgets/ipywidgets/compare/8.0.3...8.0.4) --- updated-dependencies: - dependency-name: ipywidgets dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index e44c60e9c9..5d7dc3398c 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -3,7 +3,7 @@ lmdb==1.4.0; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 -ipywidgets==8.0.3 +ipywidgets==8.0.4 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) From 1fd607a9fc439545d5ed43305c49a1b42b1c3d37 Mon Sep 17 00:00:00 2001 From: James Bourbeau Date: Mon, 16 Jan 2023 09:53:12 -0600 Subject: [PATCH 0265/1078] Ensure `zarr.create` uses writeable mode (#1309) * Ensure zarr.create uses writeable mode * Update release.rst Added release notes for [#1309](https://github.com/zarr-developers/zarr-python/pull/1309) * Switch to bug fix Co-authored-by: Josh Moore Co-authored-by: Sanket Verma --- docs/release.rst | 9 +++++++++ zarr/creation.py | 2 +- zarr/tests/test_creation.py | 14 +++++++++++++- 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 7ffd751696..d6692a01d9 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -16,6 +16,15 @@ Release notes .. _release_2.13.4: +2.13.5 +------ + +Bug fixes +~~~~~~~~~ + +* Ensure ``zarr.create`` uses writeable mode to fix issue with :issue:`1304`. + By :user:`James Bourbeau ` :issue:`1309`. + 2.13.4 ------ diff --git a/zarr/creation.py b/zarr/creation.py index 00d2c40030..cc191e3734 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -145,7 +145,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', zarr_version = getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) # handle polymorphic store arg - store = normalize_store_arg(store, zarr_version=zarr_version) + store = normalize_store_arg(store, zarr_version=zarr_version, mode="w") zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) # API compatibility with h5py diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index 0f12fc5613..4c9c292734 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -19,7 +19,7 @@ from zarr._storage.store import v3_api_available from zarr._storage.v3 import DirectoryStoreV3, KVStoreV3 from zarr.sync import ThreadSynchronizer -from zarr.tests.util import mktemp +from zarr.tests.util import mktemp, have_fsspec _VERSIONS = ((None, 2, 3) if v3_api_available else (None, 2)) _VERSIONS2 = ((2, 3) if v3_api_available else (2, )) @@ -429,6 +429,18 @@ def test_create_in_dict(zarr_version, at_root): assert isinstance(a.store, expected_store_type) +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.parametrize('zarr_version', _VERSIONS) +@pytest.mark.parametrize('at_root', [False, True]) +def test_create_writeable_mode(zarr_version, at_root, tmp_path): + # Regression test for https://github.com/zarr-developers/zarr-python/issues/1306 + import fsspec + kwargs = _init_creation_kwargs(zarr_version, at_root) + store = fsspec.get_mapper(str(tmp_path)) + z = create(100, store=store, **kwargs) + assert z.store.map == store + + @pytest.mark.parametrize('zarr_version', _VERSIONS) @pytest.mark.parametrize('at_root', [False, True]) def test_empty_like(zarr_version, at_root): From df6e07193f6794b25c0f71d5dc631e7caf449321 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Mon, 16 Jan 2023 17:13:05 +0100 Subject: [PATCH 0266/1078] Bump pypi action (#1320) 2.13.4 and 2.13.5 were not released due to the following error: ``` Error: Unable to resolve action `pypa/gh-action-pypi-publish@v1`, unable to find version `v1` ``` --- .github/workflows/releases.yml | 2 +- docs/release.rst | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 1bcf79ef5f..d1479d43e1 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1 + - uses: pypa/gh-action-pypi-publish@v1.6.4 with: user: __token__ password: ${{ secrets.pypi_password }} diff --git a/docs/release.rst b/docs/release.rst index d6692a01d9..817bdc4f37 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -14,7 +14,18 @@ Release notes # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. -.. _release_2.13.4: +.. _release_2.13.6: + +2.13.6 +------ + +Maintenance +~~~~~~~~~~~ + +* Bump gh-action-pypi-publish to 1.6.4. + By :user:`Josh Moore ` :issue:`1320`. + +.. _release_2.13.5: 2.13.5 ------ @@ -25,6 +36,8 @@ Bug fixes * Ensure ``zarr.create`` uses writeable mode to fix issue with :issue:`1304`. By :user:`James Bourbeau ` :issue:`1309`. +.. _release_2.13.4: + 2.13.4 ------ From 385b5d3635618e086eb4752f81c652379751a5ad Mon Sep 17 00:00:00 2001 From: Jonathan Striebel Date: Mon, 16 Jan 2023 17:37:33 +0100 Subject: [PATCH 0267/1078] add storage_transformers and get/set_partial_values (#1096) * add storage_transformers and get/set_partial_values * formatting * add docs and release notes * add test_core testcase * Update zarr/creation.py Co-authored-by: Gregory Lee * apply PR feedback * add comment that storage_transformers=None is the same as storage_transformers=[] * use empty tuple as default for storage_transformers * make mypy happy * better coverage, minor fix, adding rmdir * add missing rmdir to test * increase coverage * improve test coverage * fix TestArrayWithStorageTransformersV3 * Update zarr/creation.py Co-authored-by: Gregory Lee * pick generic storage transformer changes from #1111 * increase coverage * fix order of storage transformers * retrigger CI * minor fixes * make flake8 happy * apply PR feedback Co-authored-by: Gregory Lee Co-authored-by: Josh Moore --- docs/release.rst | 17 ++- zarr/_storage/store.py | 225 +++++++++++++++++++++++++++++++++- zarr/core.py | 26 +++- zarr/creation.py | 12 +- zarr/meta.py | 48 +++++++- zarr/storage.py | 9 +- zarr/tests/test_core.py | 34 ++++- zarr/tests/test_creation.py | 15 +++ zarr/tests/test_storage_v3.py | 125 ++++++++++++++++++- 9 files changed, 493 insertions(+), 18 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 817bdc4f37..f633aea7cc 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,14 +6,20 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. - .. _unreleased: +.. _unreleased: - Unreleased - ---------- +Unreleased +---------- .. # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +* Improve Zarr V3 support, adding partial store read/write and storage transformers. + Add two features of the [v3 spec](https://zarr-specs.readthedocs.io/en/latest/core/v3.0.html): + * storage transformers + * `get_partial_values` and `set_partial_values` + By :user:`Jonathan Striebel `; :issue:`1096`. + .. _release_2.13.6: 2.13.6 @@ -44,7 +50,10 @@ Bug fixes Appreciation ~~~~~~~~~~~~~ -Special thanks to Outreachy participants for contributing to most of the maintenance PRs. Please read the blog post summarising the contribution phase and welcoming new Outreachy interns: https://zarr.dev/blog/welcoming-outreachy-2022-interns/ +Special thanks to Outreachy participants for contributing to most of the +maintenance PRs. Please read the blog post summarising the contribution phase +and welcoming new Outreachy interns: +https://zarr.dev/blog/welcoming-outreachy-2022-interns/ Enhancements diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 9e265cf383..4d813b8e05 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -1,8 +1,10 @@ import abc import os +from collections import defaultdict from collections.abc import MutableMapping +from copy import copy from string import ascii_letters, digits -from typing import Any, List, Mapping, Optional, Union +from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union from zarr.meta import Metadata2, Metadata3 from zarr.util import normalize_storage_path @@ -254,6 +256,82 @@ def __setitem__(self, key, value): def __getitem__(self, key): """Get a value.""" + @abc.abstractmethod + def rmdir(self, path=None): + """Remove a data path and all its subkeys and related metadata. + Expects a path without the data or meta root prefix.""" + + @property + def supports_efficient_get_partial_values(self): + return False + + def get_partial_values( + self, + key_ranges: Sequence[Tuple[str, Tuple[int, Optional[int]]]] + ) -> List[Union[bytes, memoryview, bytearray]]: + """Get multiple partial values. + key_ranges can be an iterable of key, range pairs, + where a range specifies two integers range_start and range_length + as a tuple, (range_start, range_length). + range_length may be None to indicate to read until the end. + range_start may be negative to start reading range_start bytes + from the end of the file. + A key may occur multiple times with different ranges. + Inserts None for missing keys into the returned list.""" + results: List[Union[bytes, memoryview, bytearray]] = ( + [None] * len(key_ranges) # type: ignore[list-item] + ) + indexed_ranges_by_key: Dict[str, List[Tuple[int, Tuple[int, Optional[int]]]]] = ( + defaultdict(list) + ) + for i, (key, range_) in enumerate(key_ranges): + indexed_ranges_by_key[key].append((i, range_)) + for key, indexed_ranges in indexed_ranges_by_key.items(): + try: + value = self[key] + except KeyError: # pragma: no cover + continue + for i, (range_from, range_length) in indexed_ranges: + if range_length is None: + results[i] = value[range_from:] + else: + results[i] = value[range_from:range_from + range_length] + return results + + def supports_efficient_set_partial_values(self): + return False + + def set_partial_values(self, key_start_values): + """Set multiple partial values. + key_start_values can be an iterable of key, start and value triplets + as tuples, (key, start, value), where start defines the offset in bytes. + A key may occur multiple times with different starts and non-overlapping values. + Also, start may only be beyond the current value if other values fill the gap. + start may be negative to start writing start bytes from the current + end of the file, ending the file with the new value.""" + unique_keys = set(next(zip(*key_start_values))) + values = {} + for key in unique_keys: + old_value = self.get(key) + values[key] = None if old_value is None else bytearray(old_value) + for key, start, value in key_start_values: + if values[key] is None: + assert start == 0 + values[key] = value + else: + if start > len(values[key]): # pragma: no cover + raise ValueError( + f"Cannot set value at start {start}, " + + f"since it is beyond the data at key {key}, " + + f"having length {len(values[key])}." + ) + if start < 0: + values[key][start:] = value + else: + values[key][start:start + len(value)] = value + for key, value in values.items(): + self[key] = value + def clear(self): """Remove all items from store.""" self.erase_prefix("/") @@ -303,6 +381,151 @@ def _ensure_store(store): ) +class StorageTransformer(MutableMapping, abc.ABC): + """Base class for storage transformers. The methods simply pass on the data as-is + and should be overwritten by sub-classes.""" + + _store_version = 3 + _metadata_class = Metadata3 + + def __init__(self, _type) -> None: + if _type not in self.valid_types: # pragma: no cover + raise ValueError( + f"Storage transformer cannot be initialized with type {_type}, " + + f"must be one of {list(self.valid_types)}." + ) + self.type = _type + self._inner_store = None + + def _copy_for_array(self, array, inner_store): + transformer_copy = copy(self) + transformer_copy._inner_store = inner_store + return transformer_copy + + @abc.abstractproperty + def extension_uri(self): + pass # pragma: no cover + + @abc.abstractproperty + def valid_types(self): + pass # pragma: no cover + + def get_config(self): + """Return a dictionary holding configuration parameters for this + storage transformer. All values must be compatible with JSON encoding.""" + # Override in sub-class if need special encoding of config values. + # By default, assume all non-private members are configuration + # parameters except for type . + return { + k: v for k, v in self.__dict__.items() + if not k.startswith('_') and k != "type" + } + + @classmethod + def from_config(cls, _type, config): + """Instantiate storage transformer from a configuration object.""" + # override in sub-class if need special decoding of config values + + # by default, assume constructor accepts configuration parameters as + # keyword arguments without any special decoding + return cls(_type, **config) + + @property + def inner_store(self) -> Union["StorageTransformer", StoreV3]: + assert self._inner_store is not None, ( + "inner_store is not initialized, first get a copy via _copy_for_array." + ) + return self._inner_store + + # The following implementations are usually fine to keep as-is: + + def __eq__(self, other): + return ( + type(self) == type(other) and + self._inner_store == other._inner_store and + self.get_config() == other.get_config() + ) + + def erase(self, key): + self.__delitem__(key) + + def list(self): + return list(self.keys()) + + def list_dir(self, prefix): + return StoreV3.list_dir(self, prefix) + + def is_readable(self): + return self.inner_store.is_readable() + + def is_writeable(self): + return self.inner_store.is_writeable() + + def is_listable(self): + return self.inner_store.is_listable() + + def is_erasable(self): + return self.inner_store.is_erasable() + + def clear(self): + return self.inner_store.clear() + + def __enter__(self): + return self.inner_store.__enter__() + + def __exit__(self, exc_type, exc_value, traceback): + return self.inner_store.__exit__(exc_type, exc_value, traceback) + + def close(self) -> None: + return self.inner_store.close() + + # The following implementations might need to be re-implemented + # by subclasses implementing storage transformers: + + def rename(self, src_path: str, dst_path: str) -> None: + return self.inner_store.rename(src_path, dst_path) + + def list_prefix(self, prefix): + return self.inner_store.list_prefix(prefix) + + def erase_prefix(self, prefix): + return self.inner_store.erase_prefix(prefix) + + def rmdir(self, path=None): + return self.inner_store.rmdir(path) + + def __contains__(self, key): + return self.inner_store.__contains__(key) + + def __setitem__(self, key, value): + return self.inner_store.__setitem__(key, value) + + def __getitem__(self, key): + return self.inner_store.__getitem__(key) + + def __delitem__(self, key): + return self.inner_store.__delitem__(key) + + def __iter__(self): + return self.inner_store.__iter__() + + def __len__(self): + return self.inner_store.__len__() + + @property + def supports_efficient_get_partial_values(self): + return self.inner_store.supports_efficient_get_partial_values + + def get_partial_values(self, key_ranges): + return self.inner_store.get_partial_values(key_ranges) + + def supports_efficient_set_partial_values(self): + return self.inner_store.supports_efficient_set_partial_values() + + def set_partial_values(self, key_start_values): + return self.inner_store.set_partial_values(key_start_values) + + # allow MutableMapping for backwards compatibility StoreLike = Union[BaseStore, MutableMapping] diff --git a/zarr/core.py b/zarr/core.py index e5b2045160..5d37570831 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -189,6 +189,7 @@ def __init__( self._store = store self._chunk_store = chunk_store + self._transformed_chunk_store = None self._path = normalize_storage_path(path) if self._path: self._key_prefix = self._path + '/' @@ -292,6 +293,16 @@ def _load_metadata_nosync(self): filters = [get_codec(config) for config in filters] self._filters = filters + if self._version == 3: + storage_transformers = meta.get('storage_transformers', []) + if storage_transformers: + transformed_store = self._chunk_store or self._store + for storage_transformer in storage_transformers[::-1]: + transformed_store = storage_transformer._copy_for_array( + self, transformed_store + ) + self._transformed_chunk_store = transformed_store + def _refresh_metadata(self): if not self._cache_metadata: self._load_metadata() @@ -371,10 +382,12 @@ def read_only(self, value): @property def chunk_store(self): """A MutableMapping providing the underlying storage for array chunks.""" - if self._chunk_store is None: - return self._store - else: + if self._transformed_chunk_store is not None: + return self._transformed_chunk_store + elif self._chunk_store is not None: return self._chunk_store + else: + return self._store @property def shape(self): @@ -1800,7 +1813,7 @@ def _set_selection(self, indexer, value, fields=None): check_array_shape('value', value, sel_shape) # iterate over chunks in range - if not hasattr(self.store, "setitems") or self._synchronizer is not None \ + if not hasattr(self.chunk_store, "setitems") or self._synchronizer is not None \ or any(map(lambda x: x == 0, self.shape)): # iterative approach for chunk_coords, chunk_selection, out_selection in indexer: @@ -2229,7 +2242,10 @@ def _encode_chunk(self, chunk): cdata = chunk # ensure in-memory data is immutable and easy to compare - if isinstance(self.chunk_store, KVStore): + if ( + isinstance(self.chunk_store, KVStore) + or isinstance(self._chunk_store, KVStore) + ): cdata = ensure_bytes(cdata) return cdata diff --git a/zarr/creation.py b/zarr/creation.py index cc191e3734..a6fa8e44cc 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -22,7 +22,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', overwrite=False, path=None, chunk_store=None, filters=None, cache_metadata=True, cache_attrs=True, read_only=False, object_codec=None, dimension_separator=None, write_empty_chunks=True, - *, zarr_version=None, meta_array=None, **kwargs): + *, zarr_version=None, meta_array=None, storage_transformers=(), **kwargs): """Create an array. Parameters @@ -85,6 +85,14 @@ def create(shape, chunks=True, dtype=None, compressor='default', .. versionadded:: 2.11 + storage_transformers : sequence of StorageTransformers, optional + Setting storage transformers, changes the storage structure and behaviour + of data coming from the underlying store. The transformers are applied in the + order of the given sequence. Supplying an empty sequence is the same as omitting + the argument or setting it to None. May only be set when using zarr_version 3. + + .. versionadded:: 2.13 + zarr_version : {None, 2, 3}, optional The zarr protocol version of the created array. If None, it will be inferred from ``store`` or ``chunk_store`` if they are provided, @@ -170,7 +178,7 @@ def create(shape, chunks=True, dtype=None, compressor='default', init_array(store, shape=shape, chunks=chunks, dtype=dtype, compressor=compressor, fill_value=fill_value, order=order, overwrite=overwrite, path=path, chunk_store=chunk_store, filters=filters, object_codec=object_codec, - dimension_separator=dimension_separator) + dimension_separator=dimension_separator, storage_transformers=storage_transformers) # instantiate array z = Array(store, path=path, chunk_store=chunk_store, synchronizer=synchronizer, diff --git a/zarr/meta.py b/zarr/meta.py index 77c55b9871..41a90101b5 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -9,7 +9,11 @@ from zarr.errors import MetadataError from zarr.util import json_dumps, json_loads -from typing import cast, Union, Any, List, Mapping as MappingType, Optional +from typing import cast, Union, Any, List, Mapping as MappingType, Optional, TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from zarr._storage.store import StorageTransformer + ZARR_FORMAT = 2 ZARR_FORMAT_v3 = 3 @@ -459,6 +463,36 @@ def _decode_codec_metadata(cls, meta: Optional[Mapping]) -> Optional[Codec]: return codec + @classmethod + def _encode_storage_transformer_metadata( + cls, + storage_transformer: "StorageTransformer" + ) -> Optional[Mapping]: + return { + "extension": storage_transformer.extension_uri, + "type": storage_transformer.type, + "configuration": storage_transformer.get_config(), + } + + @classmethod + def _decode_storage_transformer_metadata(cls, meta: Mapping) -> "StorageTransformer": + from zarr.tests.test_storage_v3 import DummyStorageTransfomer + + # This might be changed to a proper registry in the future + KNOWN_STORAGE_TRANSFORMERS = [DummyStorageTransfomer] + + conf = meta.get('configuration', {}) + extension_uri = meta['extension'] + transformer_type = meta['type'] + + for StorageTransformerCls in KNOWN_STORAGE_TRANSFORMERS: + if StorageTransformerCls.extension_uri == extension_uri: + break + else: # pragma: no cover + raise NotImplementedError + + return StorageTransformerCls.from_config(transformer_type, conf) + @classmethod def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: meta = cls.parse_metadata(s) @@ -476,6 +510,10 @@ def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, A # TODO: remove dimension_separator? compressor = cls._decode_codec_metadata(meta.get("compressor", None)) + storage_transformers = meta.get("storage_transformers", ()) + storage_transformers = [ + cls._decode_storage_transformer_metadata(i) for i in storage_transformers + ] extensions = meta.get("extensions", []) meta = dict( shape=tuple(meta["shape"]), @@ -493,6 +531,8 @@ def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, A # compressor field should be absent when there is no compression if compressor: meta['compressor'] = compressor + if storage_transformers: + meta['storage_transformers'] = storage_transformers except Exception as e: raise MetadataError("error decoding metadata: %s" % e) @@ -514,6 +554,10 @@ def encode_array_metadata(cls, meta: MappingType[str, Any]) -> bytes: object_codec = None compressor = cls._encode_codec_metadata(meta.get("compressor", None)) + storage_transformers = meta.get("storage_transformers", ()) + storage_transformers = [ + cls._encode_storage_transformer_metadata(i) for i in storage_transformers + ] extensions = meta.get("extensions", []) meta = dict( shape=meta["shape"] + sdshape, @@ -532,6 +576,8 @@ def encode_array_metadata(cls, meta: MappingType[str, Any]) -> bytes: meta["compressor"] = compressor if dimension_separator: meta["dimension_separator"] = dimension_separator + if storage_transformers: + meta["storage_transformers"] = storage_transformers return json_dumps(meta) diff --git a/zarr/storage.py b/zarr/storage.py index a2a8919d0b..db51cca947 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -311,6 +311,7 @@ def init_array( filters=None, object_codec=None, dimension_separator=None, + storage_transformers=(), ): """Initialize an array store with the given configuration. Note that this is a low-level function and there should be no need to call this directly from user code. @@ -438,7 +439,8 @@ def init_array( order=order, overwrite=overwrite, path=path, chunk_store=chunk_store, filters=filters, object_codec=object_codec, - dimension_separator=dimension_separator) + dimension_separator=dimension_separator, + storage_transformers=storage_transformers) def _init_array_metadata( @@ -455,6 +457,7 @@ def _init_array_metadata( filters=None, object_codec=None, dimension_separator=None, + storage_transformers=(), ): store_version = getattr(store, '_store_version', 2) @@ -576,6 +579,7 @@ def _init_array_metadata( if store_version < 3: meta.update(dict(chunks=chunks, dtype=dtype, order=order, filters=filters_config)) + assert not storage_transformers else: if dimension_separator is None: dimension_separator = "/" @@ -589,7 +593,8 @@ def _init_array_metadata( separator=dimension_separator), chunk_memory_layout=order, data_type=dtype, - attributes=attributes) + attributes=attributes, + storage_transformers=storage_transformers) ) key = _prefix_to_array_key(store, _path_to_prefix(path)) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index e32026e662..ffacefb937 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -49,9 +49,11 @@ KVStoreV3, LMDBStoreV3, LRUStoreCacheV3, + RmdirV3, SQLiteStoreV3, StoreV3, ) +from zarr.tests.test_storage_v3 import DummyStorageTransfomer from zarr.util import buffer_size from zarr.tests.util import abs_container, skip_test_env_var, have_fsspec, mktemp @@ -3098,7 +3100,7 @@ def test_nbytes_stored(self): # Note: this custom mapping doesn't actually have all methods in the # v3 spec (e.g. erase), but they aren't needed here. -class CustomMappingV3(StoreV3): +class CustomMappingV3(RmdirV3, StoreV3): def __init__(self): self.inner = KVStoreV3(dict()) @@ -3359,6 +3361,36 @@ def expected(self): ] +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") +class TestArrayWithStorageTransformersV3(TestArrayWithChunkStoreV3): + + @staticmethod + def create_array(array_path='arr1', read_only=False, **kwargs): + store = KVStoreV3(dict()) + # separate chunk store + chunk_store = KVStoreV3(dict()) + cache_metadata = kwargs.pop('cache_metadata', True) + cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) + dummy_storage_transformer = DummyStorageTransfomer( + "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT + ) + init_array(store, path=array_path, chunk_store=chunk_store, + storage_transformers=[dummy_storage_transformer], **kwargs) + return Array(store, path=array_path, read_only=read_only, + chunk_store=chunk_store, cache_metadata=cache_metadata, + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + + def expected(self): + return [ + "3fb9a4f8233b09ad02067b6b7fc9fd5caa405c7d", + "89c8eb364beb84919fc9153d2c1ed2696274ec18", + "73307055c3aec095dd1232c38d793ef82a06bd97", + "6152c09255a5efa43b1a115546e35affa00c138c", + "2f8802fc391f67f713302e84fad4fd8f1366d6c2", + ] + + @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") def test_array_mismatched_store_versions(): store_v3 = KVStoreV3(dict()) diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index 4c9c292734..b791bc3952 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -19,8 +19,10 @@ from zarr._storage.store import v3_api_available from zarr._storage.v3 import DirectoryStoreV3, KVStoreV3 from zarr.sync import ThreadSynchronizer +from zarr.tests.test_storage_v3 import DummyStorageTransfomer from zarr.tests.util import mktemp, have_fsspec + _VERSIONS = ((None, 2, 3) if v3_api_available else (None, 2)) _VERSIONS2 = ((2, 3) if v3_api_available else (2, )) @@ -747,3 +749,16 @@ def test_create_read_only(zarr_version, at_root): def test_json_dumps_chunks_numpy_dtype(): z = zeros((10,), chunks=(np.int64(2),)) assert np.all(z[...] == 0) + + +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") +@pytest.mark.parametrize('at_root', [False, True]) +def test_create_with_storage_transformers(at_root): + kwargs = _init_creation_kwargs(zarr_version=3, at_root=at_root) + transformer = DummyStorageTransfomer( + "dummy_type", + test_value=DummyStorageTransfomer.TEST_CONSTANT + ) + z = create(1000000000, chunks=True, storage_transformers=[transformer], **kwargs) + assert isinstance(z.chunk_store, DummyStorageTransfomer) + assert z.chunk_store.test_value == DummyStorageTransfomer.TEST_CONSTANT diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index 4f6215135c..9f18c89361 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -1,6 +1,7 @@ import array import atexit import copy +import inspect import os import tempfile @@ -8,7 +9,7 @@ import pytest import zarr -from zarr._storage.store import _get_hierarchy_metadata, v3_api_available +from zarr._storage.store import _get_hierarchy_metadata, v3_api_available, StorageTransformer from zarr.meta import _default_entry_point_metadata_v3 from zarr.storage import (atexit_rmglob, atexit_rmtree, data_root, default_compressor, getsize, init_array, meta_root, @@ -88,6 +89,18 @@ def keys(self): """keys""" +class DummyStorageTransfomer(StorageTransformer): + TEST_CONSTANT = "test1234" + + extension_uri = "https://purl.org/zarr/spec/storage_transformers/dummy/1.0" + valid_types = ["dummy_type"] + + def __init__(self, _type, test_value) -> None: + super().__init__(_type) + assert test_value == self.TEST_CONSTANT + self.test_value = test_value + + def test_ensure_store_v3(): class InvalidStore: pass @@ -190,8 +203,11 @@ def test_init_array(self, dimension_separator_fixture_v3): store = self.create_store() path = 'arr1' + transformer = DummyStorageTransfomer( + "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT + ) init_array(store, path=path, shape=1000, chunks=100, - dimension_separator=pass_dim_sep) + dimension_separator=pass_dim_sep, storage_transformers=[transformer]) # check metadata mkey = meta_root + path + '.array.json' @@ -204,6 +220,9 @@ def test_init_array(self, dimension_separator_fixture_v3): assert meta['fill_value'] is None # Missing MUST be assumed to be "/" assert meta['chunk_grid']['separator'] is want_dim_sep + assert len(meta["storage_transformers"]) == 1 + assert isinstance(meta["storage_transformers"][0], DummyStorageTransfomer) + assert meta["storage_transformers"][0].test_value == DummyStorageTransfomer.TEST_CONSTANT store.close() def test_list_prefix(self): @@ -235,6 +254,67 @@ def test_rename_nonexisting(self): with pytest.raises(NotImplementedError): store.rename('a', 'b') + def test_get_partial_values(self): + store = self.create_store() + store.supports_efficient_get_partial_values in [True, False] + store[data_root + 'foo'] = b'abcdefg' + store[data_root + 'baz'] = b'z' + assert [b'a'] == store.get_partial_values( + [ + (data_root + 'foo', (0, 1)) + ] + ) + assert [ + b'd', b'b', b'z', b'abc', b'defg', b'defg', b'g', b'ef' + ] == store.get_partial_values( + [ + (data_root + 'foo', (3, 1)), + (data_root + 'foo', (1, 1)), + (data_root + 'baz', (0, 1)), + (data_root + 'foo', (0, 3)), + (data_root + 'foo', (3, 4)), + (data_root + 'foo', (3, None)), + (data_root + 'foo', (-1, None)), + (data_root + 'foo', (-3, 2)), + ] + ) + + def test_set_partial_values(self): + store = self.create_store() + store.supports_efficient_set_partial_values() + store[data_root + 'foo'] = b'abcdefg' + store.set_partial_values( + [ + (data_root + 'foo', 0, b'hey') + ] + ) + assert store[data_root + 'foo'] == b'heydefg' + + store.set_partial_values( + [ + (data_root + 'baz', 0, b'z') + ] + ) + assert store[data_root + 'baz'] == b'z' + store.set_partial_values( + [ + (data_root + 'foo', 1, b'oo'), + (data_root + 'baz', 1, b'zzz'), + (data_root + 'baz', 4, b'aaaa'), + (data_root + 'foo', 6, b'done'), + ] + ) + assert store[data_root + 'foo'] == b'hoodefdone' + assert store[data_root + 'baz'] == b'zzzzaaaa' + store.set_partial_values( + [ + (data_root + 'foo', -2, b'NE'), + (data_root + 'baz', -5, b'q'), + ] + ) + assert store[data_root + 'foo'] == b'hoodefdoNE' + assert store[data_root + 'baz'] == b'zzzq' + class TestMappingStoreV3(StoreV3Tests): @@ -443,6 +523,31 @@ def create_store(self, **kwargs): return store +class TestStorageTransformerV3(TestMappingStoreV3): + + def create_store(self, **kwargs): + inner_store = super().create_store(**kwargs) + storage_transformer = DummyStorageTransfomer( + "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT + ) + return storage_transformer._copy_for_array(None, inner_store) + + def test_method_forwarding(self): + store = self.create_store() + assert store.list() == store.inner_store.list() + assert store.list_dir(data_root) == store.inner_store.list_dir(data_root) + + assert store.is_readable() + assert store.is_writeable() + assert store.is_listable() + store.inner_store._readable = False + store.inner_store._writeable = False + store.inner_store._listable = False + assert not store.is_readable() + assert not store.is_writeable() + assert not store.is_listable() + + class TestLRUStoreCacheV3(_TestLRUStoreCache, StoreV3Tests): CountingClass = CountingDictV3 @@ -535,3 +640,19 @@ def test_top_level_imports(): assert hasattr(zarr, store_name) # pragma: no cover else: assert not hasattr(zarr, store_name) # pragma: no cover + + +def _get_public_and_dunder_methods(some_class): + return set( + name for name, _ in inspect.getmembers(some_class, predicate=inspect.isfunction) + if not name.startswith("_") or name.startswith("__") + ) + + +def test_storage_transformer_interface(): + store_v3_methods = _get_public_and_dunder_methods(StoreV3) + store_v3_methods.discard("__init__") + storage_transformer_methods = _get_public_and_dunder_methods(StorageTransformer) + storage_transformer_methods.discard("__init__") + storage_transformer_methods.discard("get_config") + assert storage_transformer_methods == store_v3_methods From b9e9f5aaa44ca564835c3c242937236ea04aecb2 Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Thu, 19 Jan 2023 19:55:53 +0100 Subject: [PATCH 0268/1078] FSStore: use `ensure_bytes()` (#1285) --- zarr/storage.py | 12 +++++++++--- zarr/tests/test_core.py | 25 +++++++++++++++++++++++++ zarr/util.py | 36 +++++++++++++++++++++++++++++++++--- 3 files changed, 67 insertions(+), 6 deletions(-) diff --git a/zarr/storage.py b/zarr/storage.py index db51cca947..5f7b991aef 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -55,8 +55,8 @@ from zarr.util import (buffer_size, json_loads, nolock, normalize_chunks, normalize_dimension_separator, normalize_dtype, normalize_fill_value, normalize_order, - normalize_shape, normalize_storage_path, retry_call - ) + normalize_shape, normalize_storage_path, retry_call, + ensure_contiguous_ndarray_or_bytes) from zarr._storage.absstore import ABSStore # noqa: F401 from zarr._storage.store import (_get_hierarchy_metadata, # noqa: F401 @@ -1395,13 +1395,19 @@ def __getitem__(self, key): def setitems(self, values): if self.mode == 'r': raise ReadOnlyError() - values = {self._normalize_key(key): val for key, val in values.items()} + + # Normalize keys and make sure the values are bytes + values = { + self._normalize_key(key): ensure_contiguous_ndarray_or_bytes(val) + for key, val in values.items() + } self.map.setitems(values) def __setitem__(self, key, value): if self.mode == 'r': raise ReadOnlyError() key = self._normalize_key(key) + value = ensure_contiguous_ndarray_or_bytes(value) path = self.dir_path(key) try: if self.fs.isdir(path): diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index ffacefb937..a9d674e2d9 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -17,6 +17,7 @@ from numpy.testing import assert_array_almost_equal, assert_array_equal from pkg_resources import parse_version +import zarr from zarr._storage.store import ( v3_api_available, ) @@ -3409,3 +3410,27 @@ def test_array_mismatched_store_versions(): Array(store_v3, path='dataset', read_only=False, chunk_store=chunk_store_v2) with pytest.raises(ValueError): Array(store_v2, path='dataset', read_only=False, chunk_store=chunk_store_v3) + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +def test_issue_1279(tmpdir): + """See """ + + data = np.arange(25).reshape((5, 5)) + ds = zarr.create( + shape=data.shape, + chunks=(5, 5), + dtype=data.dtype, + compressor=(None), + store=FSStore(url=str(tmpdir), mode="a"), + order="F", + ) + + ds[:] = data + + ds_reopened = zarr.open_array( + store=FSStore(url=str(tmpdir), mode="r") + ) + + written_data = ds_reopened[:] + assert_array_equal(data, written_data) diff --git a/zarr/util.py b/zarr/util.py index 9fcdac9df7..dfbb551651 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -5,17 +5,22 @@ from textwrap import TextWrapper import mmap import time +from typing import Any, Callable, Dict, Optional, Tuple, Union import numpy as np from asciitree import BoxStyle, LeftAligned from asciitree.traversal import Traversal from collections.abc import Iterable -from numcodecs.compat import ensure_text, ensure_ndarray_like +from numcodecs.compat import ( + ensure_text, + ensure_ndarray_like, + ensure_bytes, + ensure_contiguous_ndarray_like +) +from numcodecs.ndarray_like import NDArrayLike from numcodecs.registry import codec_registry from numcodecs.blosc import cbuffer_sizes, cbuffer_metainfo -from typing import Any, Callable, Dict, Optional, Tuple, Union - def flatten(arg: Iterable) -> Iterable: for element in arg: @@ -696,3 +701,28 @@ def all_equal(value: Any, array: Any): # using == raises warnings from numpy deprecated pattern, but # using np.equal() raises type errors for structured dtypes... return np.all(value == array) + + +def ensure_contiguous_ndarray_or_bytes(buf) -> Union[NDArrayLike, bytes]: + """Convenience function to coerce `buf` to ndarray-like array or bytes. + + First check if `buf` can be zero-copy converted to a contiguous array. + If not, `buf` will be copied to a newly allocated `bytes` object. + + Parameters + ---------- + buf : ndarray-like, array-like, or bytes-like + A numpy array like object such as numpy.ndarray, cupy.ndarray, or + any object exporting a buffer interface. + + Returns + ------- + arr : NDArrayLike or bytes + A ndarray-like or bytes object + """ + + try: + return ensure_contiguous_ndarray_like(buf) + except TypeError: + # An error is raised if `buf` couldn't be zero-copy converted + return ensure_bytes(buf) From c45e8709f2f55d5635ff8587f0295e334d8872ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 20 Jan 2023 16:42:32 +0100 Subject: [PATCH 0269/1078] Bump fsspec from 2022.11.0 to 2023.1.0 (#1327) * Bump fsspec from 2022.11.0 to 2023.1.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2022.11.0 to 2023.1.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2022.11.0...2023.1.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Bump s3fs as well Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 5d7dc3398c..0c7cbf44ca 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 h5py==3.7.0 -fsspec==2022.11.0 -s3fs==2022.11.0 +fsspec==2023.1.0 +s3fs==2023.1.0 moto[server]>=4.0.8 From 1793da01edb9890955a383fc84121334e3aa4cc0 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Fri, 20 Jan 2023 10:48:28 -0800 Subject: [PATCH 0270/1078] Add FSStore contiguous bug fix note (#1325) --- docs/release.rst | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index f633aea7cc..f82c0730dd 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -14,12 +14,21 @@ Unreleased # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. -* Improve Zarr V3 support, adding partial store read/write and storage transformers. - Add two features of the [v3 spec](https://zarr-specs.readthedocs.io/en/latest/core/v3.0.html): - * storage transformers - * `get_partial_values` and `set_partial_values` + +Major changes +~~~~~~~~~~~~~ + +* Improve `Zarr V3 support `_ + adding partial store read/write and storage transformers. By :user:`Jonathan Striebel `; :issue:`1096`. + +Bug fixes +~~~~~~~~~ + +* Ensure contiguous data is give to ``FSStore``. Only copying if needed. + By :user:`Mads R. B. Kristensen ` :issue:`1285`. + .. _release_2.13.6: 2.13.6 From 0bf0b3b3444dd57debc8a6b5eacb6eb5c082c668 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Jan 2023 10:43:17 +0100 Subject: [PATCH 0271/1078] Bump h5py from 3.7.0 to 3.8.0 (#1330) Bumps [h5py](https://github.com/h5py/h5py) from 3.7.0 to 3.8.0. - [Release notes](https://github.com/h5py/h5py/releases) - [Changelog](https://github.com/h5py/h5py/blob/master/docs/release_guide.rst) - [Commits](https://github.com/h5py/h5py/compare/3.7.0...3.8.0) --- updated-dependencies: - dependency-name: h5py dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0c7cbf44ca..0cf6661d1e 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -17,7 +17,7 @@ coverage pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 -h5py==3.7.0 +h5py==3.8.0 fsspec==2023.1.0 s3fs==2023.1.0 moto[server]>=4.0.8 From 6f11ae78b142e242d2ac8bc67019e7528539fe73 Mon Sep 17 00:00:00 2001 From: Brett Graham Date: Thu, 26 Jan 2023 18:29:08 -0500 Subject: [PATCH 0272/1078] use store dimension separtor in DirectoryStore.listdir (#1335) * use store dimension separtor in DirectoryStore.listdir NestedDirectoryStore which inherits from DirectoryStore only supports '/' as a dimension separator. However listdir uses the parent DirectoryStore.listdir which produces keys with an incorrect separator '.' Fixes #1334 * update release note --- docs/release.rst | 2 ++ zarr/storage.py | 4 +++- zarr/tests/test_storage.py | 7 +++++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index f82c0730dd..5ebd77c94f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -28,6 +28,8 @@ Bug fixes * Ensure contiguous data is give to ``FSStore``. Only copying if needed. By :user:`Mads R. B. Kristensen ` :issue:`1285`. +* NestedDirectoryStore.listdir now returns chunk keys with the correct '/' dimension_separator. + By :user:`Brett Graham ` :issue:`1334`. .. _release_2.13.6: diff --git a/zarr/storage.py b/zarr/storage.py index 5f7b991aef..fae9530716 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -1204,7 +1204,9 @@ def _nested_listdir(self, path=None): for file_name in file_names: file_path = os.path.join(dir_path, file_name) rel_path = file_path.split(root_path + os.path.sep)[1] - new_children.append(rel_path.replace(os.path.sep, '.')) + new_children.append(rel_path.replace( + os.path.sep, + self._dimension_separator or '.')) else: new_children.append(entry) return sorted(new_children) diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 7c23735f36..0b21dfbd88 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1442,6 +1442,13 @@ def test_chunk_nesting(self): store[self.root + '42'] = b'zzz' assert b'zzz' == store[self.root + '42'] + def test_listdir(self): + store = self.create_store() + z = zarr.zeros((10, 10), chunks=(5, 5), store=store) + z[:] = 1 # write to all chunks + for k in store.listdir(): + assert store.get(k) is not None + class TestNestedDirectoryStoreNone: From c714d2b9d3d27086572090690e5119d76ac5e7fc Mon Sep 17 00:00:00 2001 From: Jonathan Striebel Date: Thu, 2 Feb 2023 15:12:45 +0100 Subject: [PATCH 0273/1078] Sharding storage transformer for v3 (#1111) * add storage_transformers and get/set_partial_values * formatting * add docs and release notes * add test_core testcase * Update zarr/creation.py Co-authored-by: Gregory Lee * apply PR feedback * add comment that storage_transformers=None is the same as storage_transformers=[] * use empty tuple as default for storage_transformers * make mypy happy * better coverage, minor fix, adding rmdir * add missing rmdir to test * increase coverage * improve test coverage * fix TestArrayWithStorageTransformersV3 * Update zarr/creation.py Co-authored-by: Gregory Lee * add sharding storage transformer * add actual transformer * fixe, and allow partial reads for uncompressed v3 arrays * pick generic storage transformer changes from #1111 * increase coverage * make lgtm happy * add release note * better coverage * fix hexdigest * improve tests * fix order of storage transformers * fix order of storage transformers * retrigger CI * minor test improvement * minor test update * apply PR feedback * minor fixes * make flake8 happy * call ensure_bytes in sharding transformer * minor fixes * apply PR feedback * adapt to supports_efficient_get_partial_values property * add ZARR_V3_SHARDING flag for sharding usage * fix release notes * fix release notes --------- Co-authored-by: Gregory Lee Co-authored-by: Josh Moore --- .github/workflows/minimal.yml | 2 + .github/workflows/python-package.yml | 1 + .github/workflows/windows-testing.yml | 1 + docs/release.rst | 11 +- zarr/_storage/v3.py | 29 ++ zarr/_storage/v3_storage_transformers.py | 383 +++++++++++++++++++++++ zarr/core.py | 57 +++- zarr/meta.py | 3 +- zarr/tests/test_core.py | 113 ++++++- zarr/tests/test_storage_v3.py | 28 +- zarr/util.py | 19 ++ 11 files changed, 623 insertions(+), 24 deletions(-) create mode 100644 zarr/_storage/v3_storage_transformers.py diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index 2cde38e081..4de5aca273 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -24,6 +24,7 @@ jobs: shell: "bash -l {0}" env: ZARR_V3_EXPERIMENTAL_API: 1 + ZARR_V3_SHARDING: 1 run: | conda activate minimal python -m pip install . @@ -32,6 +33,7 @@ jobs: shell: "bash -l {0}" env: ZARR_V3_EXPERIMENTAL_API: 1 + ZARR_V3_SHARDING: 1 run: | conda activate minimal rm -rf fixture/ diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 872ce52343..cee2ca7aef 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -70,6 +70,7 @@ jobs: ZARR_TEST_MONGO: 1 ZARR_TEST_REDIS: 1 ZARR_V3_EXPERIMENTAL_API: 1 + ZARR_V3_SHARDING: 1 run: | conda activate zarr-env mkdir ~/blob_emulator diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index ea1d0f64c9..2f8922b447 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -52,6 +52,7 @@ jobs: env: ZARR_TEST_ABS: 1 ZARR_V3_EXPERIMENTAL_API: 1 + ZARR_V3_SHARDING: 1 - name: Conda info shell: bash -l {0} run: conda info diff --git a/docs/release.rst b/docs/release.rst index 5ebd77c94f..dcec2872fb 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -14,13 +14,16 @@ Unreleased # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. - Major changes ~~~~~~~~~~~~~ -* Improve `Zarr V3 support `_ - adding partial store read/write and storage transformers. - By :user:`Jonathan Striebel `; :issue:`1096`. +* Improve Zarr V3 support, adding partial store read/write and storage transformers. + Add two features of the [v3 spec](https://zarr-specs.readthedocs.io/en/latest/core/v3.0.html): + * storage transformers + * `get_partial_values` and `set_partial_values` + * efficient `get_partial_values` implementation for `FSStoreV3` + * sharding storage transformer + By :user:`Jonathan Striebel `; :issue:`1096`, :issue:`1111`. Bug fixes diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index a0a1870ffc..5f8964fb5d 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -182,6 +182,35 @@ def rmdir(self, path=None): if self.fs.isdir(store_path): self.fs.rm(store_path, recursive=True) + @property + def supports_efficient_get_partial_values(self): + return True + + def get_partial_values(self, key_ranges): + """Get multiple partial values. + key_ranges can be an iterable of key, range pairs, + where a range specifies two integers range_start and range_length + as a tuple, (range_start, range_length). + range_length may be None to indicate to read until the end. + range_start may be negative to start reading range_start bytes + from the end of the file. + A key may occur multiple times with different ranges. + Inserts None for missing keys into the returned list.""" + results = [] + for key, (range_start, range_length) in key_ranges: + key = self._normalize_key(key) + path = self.dir_path(key) + try: + if range_start is None or range_length is None: + end = None + else: + end = range_start + range_length + result = self.fs.cat_file(path, start=range_start, end=end) + except self.map.missing_exceptions: + result = None + results.append(result) + return results + class MemoryStoreV3(MemoryStore, StoreV3): diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py new file mode 100644 index 0000000000..3675d42c38 --- /dev/null +++ b/zarr/_storage/v3_storage_transformers.py @@ -0,0 +1,383 @@ +import functools +import itertools +import os +from typing import NamedTuple, Tuple, Optional, Union, Iterator + +from numcodecs.compat import ensure_bytes +import numpy as np + +from zarr._storage.store import StorageTransformer, StoreV3, _rmdir_from_keys_v3 +from zarr.util import normalize_storage_path + + +MAX_UINT_64 = 2 ** 64 - 1 + + +v3_sharding_available = os.environ.get('ZARR_V3_SHARDING', '0').lower() not in ['0', 'false'] + + +def assert_zarr_v3_sharding_available(): + if not v3_sharding_available: + raise NotImplementedError( + "Using V3 sharding is experimental and not yet finalized! To enable support, set:\n" + "ZARR_V3_SHARDING=1" + ) # pragma: no cover + + +class _ShardIndex(NamedTuple): + store: "ShardingStorageTransformer" + # dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2) + offsets_and_lengths: np.ndarray + + def __localize_chunk__(self, chunk: Tuple[int, ...]) -> Tuple[int, ...]: + return tuple( + chunk_i % shard_i + for chunk_i, shard_i in zip(chunk, self.store.chunks_per_shard) + ) + + def is_all_empty(self) -> bool: + return np.array_equiv(self.offsets_and_lengths, MAX_UINT_64) + + def get_chunk_slice(self, chunk: Tuple[int, ...]) -> Optional[slice]: + localized_chunk = self.__localize_chunk__(chunk) + chunk_start, chunk_len = self.offsets_and_lengths[localized_chunk] + if (chunk_start, chunk_len) == (MAX_UINT_64, MAX_UINT_64): + return None + else: + return slice(int(chunk_start), int(chunk_start + chunk_len)) + + def set_chunk_slice( + self, chunk: Tuple[int, ...], chunk_slice: Optional[slice] + ) -> None: + localized_chunk = self.__localize_chunk__(chunk) + if chunk_slice is None: + self.offsets_and_lengths[localized_chunk] = (MAX_UINT_64, MAX_UINT_64) + else: + self.offsets_and_lengths[localized_chunk] = ( + chunk_slice.start, + chunk_slice.stop - chunk_slice.start, + ) + + def to_bytes(self) -> bytes: + return self.offsets_and_lengths.tobytes(order="C") + + @classmethod + def from_bytes( + cls, buffer: Union[bytes, bytearray], store: "ShardingStorageTransformer" + ) -> "_ShardIndex": + try: + return cls( + store=store, + offsets_and_lengths=np.frombuffer(bytearray(buffer), dtype=" None: + assert_zarr_v3_sharding_available() + super().__init__(_type) + if isinstance(chunks_per_shard, int): + chunks_per_shard = (chunks_per_shard, ) + else: + chunks_per_shard = tuple(int(i) for i in chunks_per_shard) + if chunks_per_shard == (): + chunks_per_shard = (1, ) + self.chunks_per_shard = chunks_per_shard + self._num_chunks_per_shard = functools.reduce( + lambda x, y: x * y, chunks_per_shard, 1 + ) + self._dimension_separator = None + self._data_key_prefix = None + + def _copy_for_array(self, array, inner_store): + transformer_copy = super()._copy_for_array(array, inner_store) + transformer_copy._dimension_separator = array._dimension_separator + transformer_copy._data_key_prefix = array._data_key_prefix + if len(array._shape) > len(self.chunks_per_shard): + # The array shape might be longer when initialized with subdtypes. + # subdtypes dimensions come last, therefore padding chunks_per_shard + # with ones, effectively disabling sharding on the unlisted dimensions. + transformer_copy.chunks_per_shard += ( + (1, ) * (len(array._shape) - len(self.chunks_per_shard)) + ) + return transformer_copy + + @property + def dimension_separator(self) -> str: + assert self._dimension_separator is not None, ( + "dimension_separator is not initialized, first get a copy via _copy_for_array." + ) + return self._dimension_separator + + def _is_data_key(self, key: str) -> bool: + assert self._data_key_prefix is not None, ( + "data_key_prefix is not initialized, first get a copy via _copy_for_array." + ) + return key.startswith(self._data_key_prefix) + + def _key_to_shard(self, chunk_key: str) -> Tuple[str, Tuple[int, ...]]: + prefix, _, chunk_string = chunk_key.rpartition("c") + chunk_subkeys = tuple( + map(int, chunk_string.split(self.dimension_separator)) + ) if chunk_string else (0, ) + shard_key_tuple = ( + subkey // shard_i + for subkey, shard_i in zip(chunk_subkeys, self.chunks_per_shard) + ) + shard_key = ( + prefix + "c" + self.dimension_separator.join(map(str, shard_key_tuple)) + ) + return shard_key, chunk_subkeys + + def _get_index_from_store(self, shard_key: str) -> _ShardIndex: + # At the end of each shard 2*64bit per chunk for offset and length define the index: + index_bytes = self.inner_store.get_partial_values( + [(shard_key, (-16 * self._num_chunks_per_shard, None))] + )[0] + if index_bytes is None: + raise KeyError(shard_key) + return _ShardIndex.from_bytes( + index_bytes, + self, + ) + + def _get_index_from_buffer(self, buffer: Union[bytes, bytearray]) -> _ShardIndex: + # At the end of each shard 2*64bit per chunk for offset and length define the index: + return _ShardIndex.from_bytes(buffer[-16 * self._num_chunks_per_shard:], self) + + def _get_chunks_in_shard(self, shard_key: str) -> Iterator[Tuple[int, ...]]: + _, _, chunk_string = shard_key.rpartition("c") + shard_key_tuple = tuple( + map(int, chunk_string.split(self.dimension_separator)) + ) if chunk_string else (0, ) + for chunk_offset in itertools.product( + *(range(i) for i in self.chunks_per_shard) + ): + yield tuple( + shard_key_i * shards_i + offset_i + for shard_key_i, offset_i, shards_i in zip( + shard_key_tuple, chunk_offset, self.chunks_per_shard + ) + ) + + def __getitem__(self, key): + if self._is_data_key(key): + if self.supports_efficient_get_partial_values: + # Use the partial implementation, which fetches the index separately + value = self.get_partial_values([(key, (0, None))])[0] + if value is None: + raise KeyError(key) + else: + return value + shard_key, chunk_subkey = self._key_to_shard(key) + try: + full_shard_value = self.inner_store[shard_key] + except KeyError: + raise KeyError(key) + index = self._get_index_from_buffer(full_shard_value) + chunk_slice = index.get_chunk_slice(chunk_subkey) + if chunk_slice is not None: + return full_shard_value[chunk_slice] + else: + raise KeyError(key) + else: + return self.inner_store.__getitem__(key) + + def __setitem__(self, key, value): + value = ensure_bytes(value) + if self._is_data_key(key): + shard_key, chunk_subkey = self._key_to_shard(key) + chunks_to_read = set(self._get_chunks_in_shard(shard_key)) + chunks_to_read.remove(chunk_subkey) + new_content = {chunk_subkey: value} + try: + if self.supports_efficient_get_partial_values: + index = self._get_index_from_store(shard_key) + full_shard_value = None + else: + full_shard_value = self.inner_store[shard_key] + index = self._get_index_from_buffer(full_shard_value) + except KeyError: + index = _ShardIndex.create_empty(self) + else: + chunk_slices = [ + (chunk_to_read, index.get_chunk_slice(chunk_to_read)) + for chunk_to_read in chunks_to_read + ] + valid_chunk_slices = [ + (chunk_to_read, chunk_slice) + for chunk_to_read, chunk_slice in chunk_slices + if chunk_slice is not None + ] + # use get_partial_values if less than half of the available chunks must be read: + # (This can be changed when set_partial_values can be used efficiently.) + use_partial_get = ( + self.supports_efficient_get_partial_values + and len(valid_chunk_slices) < len(chunk_slices) / 2 + ) + + if use_partial_get: + chunk_values = self.inner_store.get_partial_values( + [ + ( + shard_key, + ( + chunk_slice.start, + chunk_slice.stop - chunk_slice.start, + ), + ) + for _, chunk_slice in valid_chunk_slices + ] + ) + for chunk_value, (chunk_to_read, _) in zip( + chunk_values, valid_chunk_slices + ): + new_content[chunk_to_read] = chunk_value + else: + if full_shard_value is None: + full_shard_value = self.inner_store[shard_key] + for chunk_to_read, chunk_slice in valid_chunk_slices: + if chunk_slice is not None: + new_content[chunk_to_read] = full_shard_value[chunk_slice] + + shard_content = b"" + for chunk_subkey, chunk_content in new_content.items(): + chunk_slice = slice( + len(shard_content), len(shard_content) + len(chunk_content) + ) + index.set_chunk_slice(chunk_subkey, chunk_slice) + shard_content += chunk_content + # Appending the index at the end of the shard: + shard_content += index.to_bytes() + self.inner_store[shard_key] = shard_content + else: # pragma: no cover + self.inner_store[key] = value + + def __delitem__(self, key): + if self._is_data_key(key): + shard_key, chunk_subkey = self._key_to_shard(key) + try: + index = self._get_index_from_store(shard_key) + except KeyError: + raise KeyError(key) + + index.set_chunk_slice(chunk_subkey, None) + + if index.is_all_empty(): + del self.inner_store[shard_key] + else: + index_bytes = index.to_bytes() + self.inner_store.set_partial_values([(shard_key, -len(index_bytes), index_bytes)]) + else: # pragma: no cover + del self.inner_store[key] + + def _shard_key_to_original_keys(self, key: str) -> Iterator[str]: + if self._is_data_key(key): + index = self._get_index_from_store(key) + prefix, _, _ = key.rpartition("c") + for chunk_tuple in self._get_chunks_in_shard(key): + if index.get_chunk_slice(chunk_tuple) is not None: + yield prefix + "c" + self.dimension_separator.join( + map(str, chunk_tuple) + ) + else: + yield key + + def __iter__(self) -> Iterator[str]: + for key in self.inner_store: + yield from self._shard_key_to_original_keys(key) + + def __len__(self): + return sum(1 for _ in self.keys()) + + def get_partial_values(self, key_ranges): + if self.supports_efficient_get_partial_values: + transformed_key_ranges = [] + cached_indices = {} + none_indices = [] + for i, (key, range_) in enumerate(key_ranges): + if self._is_data_key(key): + shard_key, chunk_subkey = self._key_to_shard(key) + try: + index = cached_indices[shard_key] + except KeyError: + try: + index = self._get_index_from_store(shard_key) + except KeyError: + none_indices.append(i) + continue + cached_indices[shard_key] = index + chunk_slice = index.get_chunk_slice(chunk_subkey) + if chunk_slice is None: + none_indices.append(i) + continue + range_start, range_length = range_ + if range_length is None: + range_length = chunk_slice.stop - chunk_slice.start + transformed_key_ranges.append( + (shard_key, (range_start + chunk_slice.start, range_length)) + ) + else: # pragma: no cover + transformed_key_ranges.append((key, range_)) + values = self.inner_store.get_partial_values(transformed_key_ranges) + for i in none_indices: + values.insert(i, None) + return values + else: + return StoreV3.get_partial_values(self, key_ranges) + + def supports_efficient_set_partial_values(self): + return False + + def set_partial_values(self, key_start_values): + # This does not yet implement efficient set_partial_values + StoreV3.set_partial_values(self, key_start_values) + + def rename(self, src_path: str, dst_path: str) -> None: + StoreV3.rename(self, src_path, dst_path) # type: ignore[arg-type] + + def list_prefix(self, prefix): + return StoreV3.list_prefix(self, prefix) + + def erase_prefix(self, prefix): + if self._is_data_key(prefix): + StoreV3.erase_prefix(self, prefix) + else: + self.inner_store.erase_prefix(prefix) + + def rmdir(self, path=None): + path = normalize_storage_path(path) + _rmdir_from_keys_v3(self, path) # type: ignore + + def __contains__(self, key): + if self._is_data_key(key): + shard_key, chunk_subkeys = self._key_to_shard(key) + try: + index = self._get_index_from_store(shard_key) + except KeyError: + return False + chunk_slice = index.get_chunk_slice(chunk_subkeys) + return chunk_slice is not None + else: + return self._inner_store.__contains__(key) diff --git a/zarr/core.py b/zarr/core.py index 5d37570831..b9db6cb2c8 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -51,7 +51,8 @@ normalize_shape, normalize_storage_path, PartialReadBuffer, - ensure_ndarray_like + UncompressedPartialReadBufferV3, + ensure_ndarray_like, ) @@ -1271,8 +1272,12 @@ def _get_selection(self, indexer, out=None, fields=None): check_array_shape('out', out, out_shape) # iterate over chunks - if not hasattr(self.chunk_store, "getitems") or \ - any(map(lambda x: x == 0, self.shape)): + if ( + not hasattr(self.chunk_store, "getitems") and not ( + hasattr(self.chunk_store, "get_partial_values") and + self.chunk_store.supports_efficient_get_partial_values + ) + ) or any(map(lambda x: x == 0, self.shape)): # sequentially get one key at a time from storage for chunk_coords, chunk_selection, out_selection in indexer: @@ -1898,6 +1903,8 @@ def _process_chunk( cdata = cdata.read_full() self._compressor.decode(cdata, dest) else: + if isinstance(cdata, UncompressedPartialReadBufferV3): + cdata = cdata.read_full() chunk = ensure_ndarray_like(cdata).view(self._dtype) chunk = chunk.reshape(self._chunks, order=self._order) np.copyto(dest, chunk) @@ -1919,13 +1926,21 @@ def _process_chunk( else dim for i, dim in enumerate(self.chunks) ] - cdata.read_part(start, nitems) - chunk_partial = self._decode_chunk( - cdata.buff, - start=start, - nitems=nitems, - expected_shape=expected_shape, - ) + if isinstance(cdata, UncompressedPartialReadBufferV3): + chunk_partial = self._decode_chunk( + cdata.read_part(start, nitems), + start=start, + nitems=nitems, + expected_shape=expected_shape, + ) + else: + cdata.read_part(start, nitems) + chunk_partial = self._decode_chunk( + cdata.buff, + start=start, + nitems=nitems, + expected_shape=expected_shape, + ) tmp[partial_out_selection] = chunk_partial out[out_selection] = tmp[chunk_selection] return @@ -2020,9 +2035,29 @@ def _chunk_getitems(self, lchunk_coords, lchunk_selection, out, lout_selection, for ckey in ckeys if ckey in self.chunk_store } + elif ( + self._partial_decompress + and not self._compressor + and not fields + and self.dtype != object + and hasattr(self.chunk_store, "get_partial_values") + and self.chunk_store.supports_efficient_get_partial_values + ): + partial_read_decode = True + cdatas = { + ckey: UncompressedPartialReadBufferV3( + ckey, self.chunk_store, itemsize=self.itemsize + ) + for ckey in ckeys + if ckey in self.chunk_store + } else: partial_read_decode = False - cdatas = self.chunk_store.getitems(ckeys, on_error="omit") + if not hasattr(self.chunk_store, "getitems"): + values = self.chunk_store.get_partial_values([(ckey, (0, None)) for ckey in ckeys]) + cdatas = {key: value for key, value in zip(ckeys, values) if value is not None} + else: + cdatas = self.chunk_store.getitems(ckeys, on_error="omit") for ckey, chunk_select, out_select in zip(ckeys, lchunk_selection, lout_selection): if ckey in cdatas: self._process_chunk( diff --git a/zarr/meta.py b/zarr/meta.py index 41a90101b5..b493e833f0 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -477,9 +477,10 @@ def _encode_storage_transformer_metadata( @classmethod def _decode_storage_transformer_metadata(cls, meta: Mapping) -> "StorageTransformer": from zarr.tests.test_storage_v3 import DummyStorageTransfomer + from zarr._storage.v3_storage_transformers import ShardingStorageTransformer # This might be changed to a proper registry in the future - KNOWN_STORAGE_TRANSFORMERS = [DummyStorageTransfomer] + KNOWN_STORAGE_TRANSFORMERS = [DummyStorageTransfomer, ShardingStorageTransformer] conf = meta.get('configuration', {}) extension_uri = meta['extension'] diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index a9d674e2d9..24d6ebbc49 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -21,6 +21,7 @@ from zarr._storage.store import ( v3_api_available, ) +from .._storage.v3_storage_transformers import ShardingStorageTransformer, v3_sharding_available from zarr.core import Array from zarr.errors import ArrayNotFoundError, ContainsGroupError from zarr.meta import json_loads @@ -830,7 +831,6 @@ def test_pickle(self): attrs_cache = z.attrs.cache a = np.random.randint(0, 1000, 1000) z[:] = a - # round trip through pickle dump = pickle.dumps(z) # some stores cannot be opened twice at the same time, need to close @@ -3299,6 +3299,60 @@ def expected(self): ] +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") +@pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") +class TestArrayWithFSStoreV3PartialReadUncompressedSharded( + TestArrayWithPathV3, TestArrayWithFSStorePartialRead +): + + @staticmethod + def create_array(array_path='arr1', read_only=False, **kwargs): + path = mkdtemp() + atexit.register(shutil.rmtree, path) + store = FSStoreV3(path) + cache_metadata = kwargs.pop("cache_metadata", True) + cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) + kwargs.setdefault('compressor', None) + num_dims = 1 if isinstance(kwargs["shape"], int) else len(kwargs["shape"]) + sharding_transformer = ShardingStorageTransformer( + "indexed", chunks_per_shard=(2, ) * num_dims + ) + init_array(store, path=array_path, storage_transformers=[sharding_transformer], **kwargs) + return Array( + store, + path=array_path, + read_only=read_only, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + partial_decompress=True, + write_empty_chunks=write_empty_chunks, + ) + + def test_nbytes_stored(self): + z = self.create_array(shape=1000, chunks=100) + expect_nbytes_stored = sum(buffer_size(v) for k, v in z._store.items() if k != 'zarr.json') + assert expect_nbytes_stored == z.nbytes_stored + z[:] = 42 + expect_nbytes_stored = sum(buffer_size(v) for k, v in z._store.items() if k != 'zarr.json') + assert expect_nbytes_stored == z.nbytes_stored + + def test_supports_efficient_get_set_partial_values(self): + z = self.create_array(shape=100, chunks=10) + assert z.chunk_store.supports_efficient_get_partial_values + assert not z.chunk_store.supports_efficient_set_partial_values() + + def expected(self): + return [ + "90109fc2a4e17efbcb447003ea1c08828b91f71e", + "2b73519f7260dba3ddce0d2b70041888856fec6b", + "bca5798be2ed71d444f3045b05432d937682b7dd", + "9ff1084501e28520e577662a6e3073f1116c76a2", + "882a97cad42417f90f111d0cb916a21579650467", + ] + + @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithFSStoreV3Nested(TestArrayWithPathV3, TestArrayWithFSStoreNested): @@ -3392,6 +3446,63 @@ def expected(self): ] +@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") +@pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") +class TestArrayWithShardingStorageTransformerV3(TestArrayWithPathV3): + + @staticmethod + def create_array(array_path='arr1', read_only=False, **kwargs): + store = KVStoreV3(dict()) + cache_metadata = kwargs.pop('cache_metadata', True) + cache_attrs = kwargs.pop('cache_attrs', True) + write_empty_chunks = kwargs.pop('write_empty_chunks', True) + kwargs.setdefault('compressor', None) + num_dims = 1 if isinstance(kwargs["shape"], int) else len(kwargs["shape"]) + sharding_transformer = ShardingStorageTransformer( + "indexed", chunks_per_shard=(2, ) * num_dims + ) + init_array(store, path=array_path, storage_transformers=[sharding_transformer], **kwargs) + return Array(store, path=array_path, read_only=read_only, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + + def test_nbytes_stored(self): + z = self.create_array(shape=1000, chunks=100) + expect_nbytes_stored = sum(buffer_size(v) for k, v in z._store.items() if k != 'zarr.json') + assert expect_nbytes_stored == z.nbytes_stored + z[:] = 42 + expect_nbytes_stored = sum(buffer_size(v) for k, v in z._store.items() if k != 'zarr.json') + assert expect_nbytes_stored == z.nbytes_stored + + # mess with store + z.store[data_root + z._key_prefix + 'foo'] = list(range(10)) + assert -1 == z.nbytes_stored + + def test_keys_inner_store(self): + z = self.create_array(shape=1000, chunks=100) + assert z.chunk_store.keys() == z._store.keys() + meta_keys = set(z.store.keys()) + z[:] = 42 + assert len(z.chunk_store.keys() - meta_keys) == 10 + # inner store should have half the data keys, + # since chunks_per_shard is 2: + assert len(z._store.keys() - meta_keys) == 5 + + def test_supports_efficient_get_set_partial_values(self): + z = self.create_array(shape=100, chunks=10) + assert not z.chunk_store.supports_efficient_get_partial_values + assert not z.chunk_store.supports_efficient_set_partial_values() + + def expected(self): + return [ + '90109fc2a4e17efbcb447003ea1c08828b91f71e', + '2b73519f7260dba3ddce0d2b70041888856fec6b', + 'bca5798be2ed71d444f3045b05432d937682b7dd', + '9ff1084501e28520e577662a6e3073f1116c76a2', + '882a97cad42417f90f111d0cb916a21579650467', + ] + + @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") def test_array_mismatched_store_versions(): store_v3 = KVStoreV3(dict()) diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index 9f18c89361..cc031f0db4 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -10,6 +10,8 @@ import zarr from zarr._storage.store import _get_hierarchy_metadata, v3_api_available, StorageTransformer +from zarr._storage.v3_storage_transformers import ShardingStorageTransformer, v3_sharding_available +from zarr.core import Array from zarr.meta import _default_entry_point_metadata_v3 from zarr.storage import (atexit_rmglob, atexit_rmtree, data_root, default_compressor, getsize, init_array, meta_root, @@ -523,26 +525,38 @@ def create_store(self, **kwargs): return store +@pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") class TestStorageTransformerV3(TestMappingStoreV3): def create_store(self, **kwargs): inner_store = super().create_store(**kwargs) - storage_transformer = DummyStorageTransfomer( + dummy_transformer = DummyStorageTransfomer( "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT ) - return storage_transformer._copy_for_array(None, inner_store) + sharding_transformer = ShardingStorageTransformer( + "indexed", chunks_per_shard=2, + ) + path = 'bla' + init_array(inner_store, path=path, shape=1000, chunks=100, + dimension_separator=".", + storage_transformers=[dummy_transformer, sharding_transformer]) + store = Array(store=inner_store, path=path).chunk_store + store.erase_prefix("data/root/bla/") + store.clear() + return store def test_method_forwarding(self): store = self.create_store() - assert store.list() == store.inner_store.list() - assert store.list_dir(data_root) == store.inner_store.list_dir(data_root) + inner_store = store.inner_store.inner_store + assert store.list() == inner_store.list() + assert store.list_dir(data_root) == inner_store.list_dir(data_root) assert store.is_readable() assert store.is_writeable() assert store.is_listable() - store.inner_store._readable = False - store.inner_store._writeable = False - store.inner_store._listable = False + inner_store._readable = False + inner_store._writeable = False + inner_store._listable = False assert not store.is_readable() assert not store.is_writeable() assert not store.is_listable() diff --git a/zarr/util.py b/zarr/util.py index dfbb551651..5b307b7c5c 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -644,6 +644,25 @@ def read_full(self): return self.chunk_store[self.store_key] +class UncompressedPartialReadBufferV3: + def __init__(self, store_key, chunk_store, itemsize): + assert chunk_store.supports_efficient_get_partial_values + self.chunk_store = chunk_store + self.store_key = store_key + self.itemsize = itemsize + + def prepare_chunk(self): + pass + + def read_part(self, start, nitems): + return self.chunk_store.get_partial_values( + [(self.store_key, (start * self.itemsize, nitems * self.itemsize))] + )[0] + + def read_full(self): + return self.chunk_store[self.store_key] + + def retry_call(callabl: Callable, args=None, kwargs=None, From e9fb1f33f339d046e41a76fee46be07d8e4f39a4 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Thu, 2 Feb 2023 09:56:33 -0500 Subject: [PATCH 0274/1078] remove blosc warnings from n5 compressor handling (#1331) * remove blosc warnings from n5 compressor handling * release notes * remove reference to n5 warnings in pytest ini options * remove blosc from list of warned compressors in tests, and restore lzma warning --- docs/release.rst | 4 +++- pyproject.toml | 1 - zarr/n5.py | 6 ------ zarr/tests/test_core.py | 4 +--- 4 files changed, 4 insertions(+), 11 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index dcec2872fb..fdff400266 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -25,7 +25,9 @@ Major changes * sharding storage transformer By :user:`Jonathan Striebel `; :issue:`1096`, :issue:`1111`. - +* Remove warnings emitted when using N5Store or N5FSStore with a blosc-compressed array. + By :user:`Davis Bennett `; :issue:`1331`. + Bug fixes ~~~~~~~~~ diff --git a/pyproject.toml b/pyproject.toml index 1592b9887a..3277e9da7c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,6 @@ addopts = [ ] filterwarnings = [ "error:::zarr.*", - "ignore:Not all N5 implementations support blosc compression.*:RuntimeWarning", "ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning", "ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning", ] diff --git a/zarr/n5.py b/zarr/n5.py index 978cade1b8..4c93ce4acb 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -735,12 +735,6 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict elif codec_id == 'blosc': - warnings.warn( - "Not all N5 implementations support blosc compression (yet). You " - "might not be able to open the dataset with another N5 library.", - RuntimeWarning - ) - n5_config['cname'] = _compressor_config['cname'] n5_config['clevel'] = _compressor_config['clevel'] n5_config['shuffle'] = _compressor_config['shuffle'] diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 24d6ebbc49..b54fe3ddf0 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -2022,9 +2022,7 @@ def test_compressors(self): a1[:] = 1 assert np.all(a1[:] == 1) - compressors_warn = [ - Blosc() - ] + compressors_warn = [] if LZMA: compressors_warn.append(LZMA(2)) # Try lzma.FORMAT_ALONE, which N5 doesn't support. for compressor in compressors_warn: From 4dc6f1f5046708648b5e6b82d0a64a24d1a40566 Mon Sep 17 00:00:00 2001 From: AWA BRANDON AWA Date: Thu, 2 Feb 2023 16:01:58 +0100 Subject: [PATCH 0275/1078] changed documentation theme to pydata_sphinx_theme (#1242) * changed documentation theme to pydata_sphinx_theme * updated documentation layout * Update docs/index.rst Co-authored-by: Sanket Verma * Updated Acknowledgements section and added twitter icon * Added client-side javascript redirects * Add more redirects * Minor tweaks and added acknowledgments.rst * Added acknowledgments.html to redirect * Add indices from the old main page --------- Co-authored-by: Sanket Verma Co-authored-by: Josh Moore --- docs/_static/custom.css | 133 ++++++++++++--- docs/_static/custom.js | 18 ++ docs/_static/index_api.svg | 97 +++++++++++ docs/_static/index_contribute.svg | 76 +++++++++ docs/_static/index_getting_started.svg | 66 ++++++++ docs/_static/index_user_guide.svg | 67 ++++++++ docs/acknowledgments.rst | 76 +++++++++ docs/api.rst | 7 + docs/conf.py | 28 +++- docs/getting_started.rst | 46 +++++ docs/index.rst | 224 ++++++++++--------------- requirements_rtfd.txt | 2 + 12 files changed, 681 insertions(+), 159 deletions(-) create mode 100644 docs/_static/custom.js create mode 100644 docs/_static/index_api.svg create mode 100644 docs/_static/index_contribute.svg create mode 100644 docs/_static/index_getting_started.svg create mode 100644 docs/_static/index_user_guide.svg create mode 100644 docs/acknowledgments.rst create mode 100644 docs/getting_started.rst diff --git a/docs/_static/custom.css b/docs/_static/custom.css index a0e3929e87..487addfbbd 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,36 +1,123 @@ -/* override text color */ -.wy-menu-vertical a { - color: #000000; +@import url('https://fonts.googleapis.com/css2?family=Lato:ital,wght@0,400;0,700;0,900;1,400;1,700;1,900&family=Open+Sans:ital,wght@0,400;0,600;1,400;1,600&display=swap'); + +.navbar-brand img { + height: 75px; +} +.navbar-brand { + height: 75px; +} + +body { + font-family: 'Open Sans', sans-serif; +} + +pre, code { + font-size: 100%; + line-height: 155%; +} + +/* Style the active version button. + +- dev: orange +- stable: green +- old, PR: red + +Colors from: + +Wong, B. Points of view: Color blindness. +Nat Methods 8, 441 (2011). https://doi.org/10.1038/nmeth.1618 +*/ + +/* If the active version has the name "dev", style it orange */ +#version_switcher_button[data-active-version-name*="dev"] { + background-color: #E69F00; + border-color: #E69F00; + color:#000000; +} + +/* green for `stable` */ +#version_switcher_button[data-active-version-name*="stable"] { + background-color: #009E73; + border-color: #009E73; +} + +/* red for `old` */ +#version_switcher_button:not([data-active-version-name*="stable"], [data-active-version-name*="dev"], [data-active-version-name=""]) { + background-color: #980F0F; + border-color: #980F0F; } -/* Sidebar background color */ -.wy-nav-side, div.wy-side-nav-search { - background-color: rgb(198, 197, 213, 0); /* full alpha */ +/* Main page overview cards */ + +.sd-card { + background: #fff; + border-radius: 0; + padding: 30px 10px 20px 10px; + margin: 10px 0px; +} + +.sd-card .sd-card-header { + text-align: center; +} + +.sd-card .sd-card-header .sd-card-text { + margin: 0px; +} + +.sd-card .sd-card-img-top { + height: 52px; + width: 52px; + margin-left: auto; + margin-right: auto; +} + +.sd-card .sd-card-header { + border: none; + background-color: white; + color: #150458 !important; + font-size: var(--pst-font-size-h5); + font-weight: bold; + padding: 2.5rem 0rem 0.5rem 0rem; +} + +.sd-card .sd-card-footer { + border: none; + background-color: white; +} + +.sd-card .sd-card-footer .sd-card-text { + max-width: 220px; + margin-left: auto; + margin-right: auto; +} + +/* Dark theme tweaking */ +html[data-theme=dark] .sd-card img[src*='.svg'] { + filter: invert(0.82) brightness(0.8) contrast(1.2); } -/* Sidebar link click color */ -.wy-menu-vertical .toctree-l1 > a:active { - background-color: rgb(198, 197, 213); - color: rgb(0, 0, 0); +/* Main index page overview cards */ +html[data-theme=dark] .sd-card { + background-color:var(--pst-color-background); } -/* Link color is darker to make hovering more clear */ -.wy-menu-vertical .toctree-l1 > a:hover { - background-color: rgb(198, 197, 213); - color: rgb(0, 0, 0); +html[data-theme=dark] .sd-shadow-sm { + box-shadow: 0 .1rem 1rem rgba(250, 250, 250, .6) !important } -.wy-menu-vertical li.current > a:hover, .wy-menu-vertical li.current > a:active { - color: #404040; - background-color: #F5F5F5; +html[data-theme=dark] .sd-card .sd-card-header { + background-color:var(--pst-color-background); + color: #150458 !important; } -/* On hover over logo */ -.wy-side-nav-search > a:hover, .wy-side-nav-search .wy-dropdown > a:hover { - background: inherit; +html[data-theme=dark] .sd-card .sd-card-footer { + background-color:var(--pst-color-background); } -/* Border around search box */ -.wy-side-nav-search input[type="text"] { - border: 0px; +html[data-theme=dark] h1 { + color: var(--pst-color-primary); } + +html[data-theme=dark] h3 { + color: #0a6774; +} \ No newline at end of file diff --git a/docs/_static/custom.js b/docs/_static/custom.js new file mode 100644 index 0000000000..06b2d019b1 --- /dev/null +++ b/docs/_static/custom.js @@ -0,0 +1,18 @@ +// handle redirects +(() => { + let anchorMap = { + "installation": "installation.html", + "getting-started": "getting_started.html#getting-started", + "highlights": "getting_started.html#highlights", + "contributing": "contributing.html", + "projects-using-zarr": "getting_started.html#projects-using-zarr", + "acknowledgments": "acknowledgments.html", + "contents": "getting_started.html#contents", + "indices-and-tables": "api.html#indices-and-tables" + } + + let hash = window.location.hash.substring(1); + if (hash) { + window.location.replace(anchorMap[hash]); + } +})(); diff --git a/docs/_static/index_api.svg b/docs/_static/index_api.svg new file mode 100644 index 0000000000..69f7ba1d2d --- /dev/null +++ b/docs/_static/index_api.svg @@ -0,0 +1,97 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + diff --git a/docs/_static/index_contribute.svg b/docs/_static/index_contribute.svg new file mode 100644 index 0000000000..de3d902379 --- /dev/null +++ b/docs/_static/index_contribute.svg @@ -0,0 +1,76 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + + + + diff --git a/docs/_static/index_getting_started.svg b/docs/_static/index_getting_started.svg new file mode 100644 index 0000000000..2d36622cb7 --- /dev/null +++ b/docs/_static/index_getting_started.svg @@ -0,0 +1,66 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + diff --git a/docs/_static/index_user_guide.svg b/docs/_static/index_user_guide.svg new file mode 100644 index 0000000000..bd17053517 --- /dev/null +++ b/docs/_static/index_user_guide.svg @@ -0,0 +1,67 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + diff --git a/docs/acknowledgments.rst b/docs/acknowledgments.rst new file mode 100644 index 0000000000..36cd1f5646 --- /dev/null +++ b/docs/acknowledgments.rst @@ -0,0 +1,76 @@ +Acknowledgments +=============== + +The following people have contributed to the development of Zarr by contributing code, +documentation, code reviews, comments and/or ideas: + +* :user:`Alistair Miles ` +* :user:`Altay Sansal ` +* :user:`Anderson Banihirwe ` +* :user:`Andrew Fulton ` +* :user:`Andrew Thomas ` +* :user:`Anthony Scopatz ` +* :user:`Attila Bergou ` +* :user:`BGCMHou ` +* :user:`Ben Jeffery ` +* :user:`Ben Williams ` +* :user:`Boaz Mohar ` +* :user:`Charles Noyes ` +* :user:`Chris Barnes ` +* :user:`David Baddeley ` +* :user:`Davis Bennett ` +* :user:`Dimitri Papadopoulos Orfanos ` +* :user:`Eduardo Gonzalez ` +* :user:`Elliott Sales de Andrade ` +* :user:`Eric Prestat ` +* :user:`Eric Younkin ` +* :user:`Francesc Alted ` +* :user:`Greggory Lee ` +* :user:`Gregory R. Lee ` +* :user:`Ian Hunt-Isaak ` +* :user:`James Bourbeau ` +* :user:`Jan Funke ` +* :user:`Jerome Kelleher ` +* :user:`Joe Hamman ` +* :user:`Joe Jevnik ` +* :user:`John Kirkham ` +* :user:`Josh Moore ` +* :user:`Juan Nunez-Iglesias ` +* :user:`Justin Swaney ` +* :user:`Mads R. B. Kristensen ` +* :user:`Mamy Ratsimbazafy ` +* :user:`Martin Durant ` +* :user:`Matthew Rocklin ` +* :user:`Matthias Bussonnier ` +* :user:`Mattia Almansi ` +* :user:`Noah D Brenowitz ` +* :user:`Oren Watson ` +* :user:`Pavithra Eswaramoorthy ` +* :user:`Poruri Sai Rahul ` +* :user:`Prakhar Goel ` +* :user:`Raphael Dussin ` +* :user:`Ray Bell ` +* :user:`Richard Scott ` +* :user:`Richard Shaw ` +* :user:`Ryan Abernathey ` +* :user:`Ryan Williams ` +* :user:`Saransh Chopra ` +* :user:`Sebastian Grill ` +* :user:`Shikhar Goenka ` +* :user:`Shivank Chaudhary ` +* :user:`Stephan Hoyer ` +* :user:`Stephan Saalfeld ` +* :user:`Tarik Onalan ` +* :user:`Tim Crone ` +* :user:`Tobias Kölling ` +* :user:`Tom Augspurger ` +* :user:`Tom White ` +* :user:`Tommy Tran ` +* :user:`Trevor Manz ` +* :user:`Vincent Schut ` +* :user:`Vyas Ramasubramani ` +* :user:`Zain Patel ` +* :user:`gsakkis` +* :user:`hailiangzhang ` +* :user:`pmav99 ` +* :user:`sbalmer ` \ No newline at end of file diff --git a/docs/api.rst b/docs/api.rst index 8162ada965..2b6e7ea516 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -13,3 +13,10 @@ API reference api/codecs api/attrs api/sync + +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/conf.py b/docs/conf.py index 2639f765ee..413d648732 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -45,6 +45,7 @@ 'numpydoc', 'sphinx_issues', "sphinx_copybutton", + "sphinx_design" ] numpydoc_show_class_members = False @@ -124,12 +125,26 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' +html_theme = 'pydata_sphinx_theme' + +html_favicon = '_static/logo1.png' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -html_theme_options = {'logo_only': True} +html_theme_options = { + "github_url": "https://github.com/zarr-developers/zarr-python", + "twitter_url": "https://twitter.com/zarr_dev", + "icon_links": [ + { + "name": "Zarr Dev", + "url": "https://zarr.dev/", + "icon": "_static/logo1.png", + "type": "local" + }, + ], + "collapse_navigation": True +} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] @@ -160,6 +175,9 @@ def setup(app): # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] +html_js_files = [ + 'custom.js', +] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -246,7 +264,7 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (main_doc, 'zarr.tex', 'zarr Documentation', + (main_doc, 'zarr.tex', 'Zarr-Python', author, 'manual'), ] @@ -276,7 +294,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (main_doc, 'zarr', 'zarr Documentation', + (main_doc, 'zarr', 'Zarr-Python', [author], 1) ] @@ -290,7 +308,7 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (main_doc, 'zarr', 'zarr Documentation', + (main_doc, 'zarr', 'Zarr-Python', author, 'zarr', 'One line description of project.', 'Miscellaneous'), ] diff --git a/docs/getting_started.rst b/docs/getting_started.rst new file mode 100644 index 0000000000..77d45325e4 --- /dev/null +++ b/docs/getting_started.rst @@ -0,0 +1,46 @@ +Getting Started +=============== + +Zarr is a format for the storage of chunked, compressed, N-dimensional arrays +inspired by `HDF5 `_, `h5py +`_ and `bcolz `_. + +The project is fiscally sponsored by `NumFOCUS `_, a US +501(c)(3) public charity, and development is supported by the +`MRC Centre for Genomics and Global Health `_ +and the `Chan Zuckerberg Initiative `_. + +These documents describe the Zarr Python implementation. More information +about the Zarr format can be found on the `main website `_. + +Highlights +---------- + +* Create N-dimensional arrays with any NumPy dtype. +* Chunk arrays along any dimension. +* Compress and/or filter chunks using any NumCodecs_ codec. +* Store arrays in memory, on disk, inside a Zip file, on S3, ... +* Read an array concurrently from multiple threads or processes. +* Write to an array concurrently from multiple threads or processes. +* Organize arrays into hierarchies via groups. + +Contributing +------------ + +Feedback and bug reports are very welcome, please get in touch via +the `GitHub issue tracker `_. See +:doc:`contributing` for further information about contributing to Zarr. + +Projects using Zarr +------------------- + +If you are using Zarr, we would `love to hear about it +`_. + +.. toctree:: + :caption: Getting Started + :hidden: + + installation + +.. _NumCodecs: https://numcodecs.readthedocs.io/ diff --git a/docs/index.rst b/docs/index.rst index dd6abc1862..50060d10cc 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,142 +1,104 @@ -.. zarr documentation main file, created by - sphinx-quickstart on Mon May 2 21:40:09 2016. - -Zarr -==== - -Zarr is a format for the storage of chunked, compressed, N-dimensional arrays -inspired by `HDF5 `_, `h5py -`_ and `bcolz `_. - -The project is fiscally sponsored by `NumFOCUS `_, a US -501(c)(3) public charity, and development is supported by the -`MRC Centre for Genomics and Global Health `_ -and the `Chan Zuckerberg Initiative `_. - -These documents describe the Zarr Python implementation. More information -about the Zarr format can be found on the `main website `_. - -Highlights ----------- - - * Create N-dimensional arrays with any NumPy dtype. - * Chunk arrays along any dimension. - * Compress and/or filter chunks using any NumCodecs_ codec. - * Store arrays in memory, on disk, inside a Zip file, on S3, ... - * Read an array concurrently from multiple threads or processes. - * Write to an array concurrently from multiple threads or processes. - * Organize arrays into hierarchies via groups. - -Contributing ------------- - -Feedback and bug reports are very welcome, please get in touch via -the `GitHub issue tracker `_. See -:doc:`contributing` for further information about contributing to Zarr. - -Projects using Zarr -------------------- - -If you are using Zarr, we would `love to hear about it -`_. - -Acknowledgments ---------------- - -The following people have contributed to the development of Zarr by contributing code, -documentation, code reviews, comments and/or ideas: - -:user:`Alistair Miles ` -:user:`Altay Sansal ` -:user:`Anderson Banihirwe ` -:user:`Andrew Fulton ` -:user:`Andrew Thomas ` -:user:`Anthony Scopatz ` -:user:`Attila Bergou ` -:user:`BGCMHou ` -:user:`Ben Jeffery ` -:user:`Ben Williams ` -:user:`Boaz Mohar ` -:user:`Charles Noyes ` -:user:`Chris Barnes ` -:user:`David Baddeley ` -:user:`Davis Bennett ` -:user:`Dimitri Papadopoulos Orfanos ` -:user:`Eduardo Gonzalez ` -:user:`Elliott Sales de Andrade ` -:user:`Eric Prestat ` -:user:`Eric Younkin ` -:user:`Francesc Alted ` -:user:`Greggory Lee ` -:user:`Gregory R. Lee ` -:user:`Ian Hunt-Isaak ` -:user:`James Bourbeau ` -:user:`Jan Funke ` -:user:`Jerome Kelleher ` -:user:`Joe Hamman ` -:user:`Joe Jevnik ` -:user:`John Kirkham ` -:user:`Josh Moore ` -:user:`Juan Nunez-Iglesias ` -:user:`Justin Swaney ` -:user:`Mads R. B. Kristensen ` -:user:`Mamy Ratsimbazafy ` -:user:`Martin Durant ` -:user:`Matthew Rocklin ` -:user:`Matthias Bussonnier ` -:user:`Mattia Almansi ` -:user:`Noah D Brenowitz ` -:user:`Oren Watson ` -:user:`Pavithra Eswaramoorthy ` -:user:`Poruri Sai Rahul ` -:user:`Prakhar Goel ` -:user:`Raphael Dussin ` -:user:`Ray Bell ` -:user:`Richard Scott ` -:user:`Richard Shaw ` -:user:`Ryan Abernathey ` -:user:`Ryan Williams ` -:user:`Saransh Chopra ` -:user:`Sebastian Grill ` -:user:`Shikhar Goenka ` -:user:`Shivank Chaudhary ` -:user:`Stephan Hoyer ` -:user:`Stephan Saalfeld ` -:user:`Tarik Onalan ` -:user:`Tim Crone ` -:user:`Tobias Kölling ` -:user:`Tom Augspurger ` -:user:`Tom White ` -:user:`Tommy Tran ` -:user:`Trevor Manz ` -:user:`Vincent Schut ` -:user:`Vyas Ramasubramani ` -:user:`Zain Patel ` -:user:`gsakkis` -:user:`hailiangzhang ` -:user:`pmav99 ` -:user:`sbalmer ` - -Contents --------- +.. _zarr_docs_mainpage: + +*********** +Zarr-Python +*********** .. toctree:: - :maxdepth: 2 + :maxdepth: 1 + :hidden: - installation + getting_started tutorial api spec - contributing release license - View homepage + acknowledgments + contributing + +**Version**: |version| + +**Download documentation**: `Zipped HTML `_ + +**Useful links**: +`Installation `_ | +`Source Repository `_ | +`Issue Tracker `_ | +`Gitter `_ + +Zarr is a file storage format for chunked, compressed, N-dimensional arrays based on an open-source specification. + +.. grid:: 2 + + .. grid-item-card:: + :img-top: _static/index_getting_started.svg + + Getting Started + ^^^^^^^^^^^^^^^ + + New to Zarr? Check out the getting started guide. It contains an + introduction to Zarr's main concepts and links to additional tutorials. + + +++ + + .. button-ref:: getting_started + :expand: + :color: dark + :click-parent: + + To the getting started guide + + .. grid-item-card:: + :img-top: _static/index_user_guide.svg + + Tutorial + ^^^^^^^^ + + The tutorial provides working examples of Zarr classes and functions. + + +++ + + .. button-ref:: tutorial + :expand: + :color: dark + :click-parent: + + To the Tutorial + + .. grid-item-card:: + :img-top: _static/index_api.svg + + API Reference + ^^^^^^^^^^^^^ + + The reference guide contains a detailed description of the functions, + modules, and objects included in Zarr. The reference describes how the + methods work and which parameters can be used. It assumes that you have an + understanding of the key concepts. + + +++ + + .. button-ref:: api + :expand: + :color: dark + :click-parent: + + To the api reference guide + + .. grid-item-card:: + :img-top: _static/index_contribute.svg + + Contributor's Guide + ^^^^^^^^^^^^^^^^^^^ + + Want to contribute to Zarr? We welcome contributions in the form of bug reports, bug fixes, documentation, enhancement proposals and more. The contributing guidelines will guide you through the process of improving Zarr. -Indices and tables ------------------- + +++ -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` + .. button-ref:: contributing + :expand: + :color: dark + :click-parent: -.. _NumCodecs: https://numcodecs.readthedocs.io/ + To the contributor's guide \ No newline at end of file diff --git a/requirements_rtfd.txt b/requirements_rtfd.txt index 553384e0bd..5d7fec369a 100644 --- a/requirements_rtfd.txt +++ b/requirements_rtfd.txt @@ -2,9 +2,11 @@ asciitree setuptools setuptools_scm sphinx +sphinx_design sphinx-issues sphinx-copybutton sphinx-rtd-theme +pydata-sphinx-theme numpydoc numpy!=1.21.0 msgpack-python==0.5.6 From 280d9695990b73153127083dd640bcb5a69e8f8f Mon Sep 17 00:00:00 2001 From: Nathan Zimmerberg <39104088+nhz2@users.noreply.github.com> Date: Thu, 9 Feb 2023 05:43:03 -0500 Subject: [PATCH 0276/1078] Allow reading utf-8 encoded json files (#1312) * read utf-8 in json * update release * Update zarr/util.py Co-authored-by: jakirkham * allow str --------- Co-authored-by: jakirkham --- docs/release.rst | 3 +++ fixture/utf8attrs/.zattrs | 1 + fixture/utf8attrs/.zgroup | 3 +++ zarr/meta.py | 12 ++++++------ zarr/tests/test_attrs.py | 11 +++++++++-- zarr/util.py | 4 ++-- 6 files changed, 24 insertions(+), 10 deletions(-) create mode 100644 fixture/utf8attrs/.zattrs create mode 100644 fixture/utf8attrs/.zgroup diff --git a/docs/release.rst b/docs/release.rst index fdff400266..905ccd2ebb 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -31,6 +31,9 @@ Major changes Bug fixes ~~~~~~~~~ +* Allow reading utf-8 encoded json files + By :user:`Nathan Zimmerberg ` :issue:`1308`. + * Ensure contiguous data is give to ``FSStore``. Only copying if needed. By :user:`Mads R. B. Kristensen ` :issue:`1285`. * NestedDirectoryStore.listdir now returns chunk keys with the correct '/' dimension_separator. diff --git a/fixture/utf8attrs/.zattrs b/fixture/utf8attrs/.zattrs new file mode 100644 index 0000000000..7f85af5d3a --- /dev/null +++ b/fixture/utf8attrs/.zattrs @@ -0,0 +1 @@ +{"foo": "た"} \ No newline at end of file diff --git a/fixture/utf8attrs/.zgroup b/fixture/utf8attrs/.zgroup new file mode 100644 index 0000000000..3b7daf227c --- /dev/null +++ b/fixture/utf8attrs/.zgroup @@ -0,0 +1,3 @@ +{ + "zarr_format": 2 +} \ No newline at end of file diff --git a/zarr/meta.py b/zarr/meta.py index b493e833f0..59c56abf3d 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -92,7 +92,7 @@ class Metadata2: ZARR_FORMAT = ZARR_FORMAT @classmethod - def parse_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + def parse_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: # Here we allow that a store may return an already-parsed metadata object, # or a string of JSON that we will parse here. We allow for an already-parsed @@ -110,7 +110,7 @@ def parse_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: return meta @classmethod - def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + def decode_array_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: meta = cls.parse_metadata(s) # check metadata format @@ -198,7 +198,7 @@ def decode_dtype(cls, d) -> np.dtype: return np.dtype(d) @classmethod - def decode_group_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + def decode_group_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: meta = cls.parse_metadata(s) # check metadata format version @@ -351,7 +351,7 @@ def encode_dtype(cls, d): return get_extended_dtype_info(np.dtype(d)) @classmethod - def decode_group_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + def decode_group_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: meta = cls.parse_metadata(s) # 1 / 0 # # check metadata format version @@ -390,7 +390,7 @@ def encode_hierarchy_metadata(cls, meta=None) -> bytes: @classmethod def decode_hierarchy_metadata( - cls, s: Union[MappingType, str] + cls, s: Union[MappingType, bytes, str] ) -> MappingType[str, Any]: meta = cls.parse_metadata(s) # check metadata format @@ -495,7 +495,7 @@ def _decode_storage_transformer_metadata(cls, meta: Mapping) -> "StorageTransfor return StorageTransformerCls.from_config(transformer_type, conf) @classmethod - def decode_array_metadata(cls, s: Union[MappingType, str]) -> MappingType[str, Any]: + def decode_array_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: meta = cls.parse_metadata(s) # extract array metadata fields diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index e4baf182b2..27ec8fea8d 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -4,9 +4,10 @@ from zarr._storage.store import meta_root from zarr.attrs import Attributes -from zarr.storage import KVStore +from zarr.storage import KVStore, DirectoryStore from zarr._storage.v3 import KVStoreV3 from zarr.tests.util import CountingDict, CountingDictV3 +from zarr.hierarchy import group @pytest.fixture(params=[2, 3]) @@ -42,11 +43,17 @@ def test_storage(self, zarr_version): a['baz'] = 42 assert attrs_key in store assert isinstance(store[attrs_key], bytes) - d = json.loads(str(store[attrs_key], 'ascii')) + d = json.loads(str(store[attrs_key], 'utf-8')) if zarr_version == 3: d = d['attributes'] assert dict(foo='bar', baz=42) == d + def test_utf8_encoding(self, zarr_version): + + # fixture data + fixture = group(store=DirectoryStore('fixture')) + assert fixture['utf8attrs'].attrs.asdict() == dict(foo='た') + def test_get_set_del_contains(self, zarr_version): store = _init_store(zarr_version) diff --git a/zarr/util.py b/zarr/util.py index 5b307b7c5c..be5f174aab 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -56,9 +56,9 @@ def json_dumps(o: Any) -> bytes: separators=(',', ': '), cls=NumberEncoder).encode('ascii') -def json_loads(s: str) -> Dict[str, Any]: +def json_loads(s: Union[bytes, str]) -> Dict[str, Any]: """Read JSON in a consistent way.""" - return json.loads(ensure_text(s, 'ascii')) + return json.loads(ensure_text(s, 'utf-8')) def normalize_shape(shape) -> Tuple[int]: From 277e4b200edd275e991b321a4d735859ffd555a9 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Fri, 10 Feb 2023 09:28:01 +0100 Subject: [PATCH 0277/1078] Prepare 2.14 changelog (#1337) * Prepare 2.14 changelog * .rst fixes --- docs/release.rst | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 905ccd2ebb..0965109935 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,28 +6,35 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. -.. _unreleased: + .. _unreleased: + + Unreleased + ---------- -Unreleased ----------- .. # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +.. _release_2.14.0: + +2.14.0 +------ + Major changes ~~~~~~~~~~~~~ * Improve Zarr V3 support, adding partial store read/write and storage transformers. - Add two features of the [v3 spec](https://zarr-specs.readthedocs.io/en/latest/core/v3.0.html): - * storage transformers - * `get_partial_values` and `set_partial_values` - * efficient `get_partial_values` implementation for `FSStoreV3` - * sharding storage transformer + Add new features from the `v3 spec `_: + * storage transformers + * `get_partial_values` and `set_partial_values` + * efficient `get_partial_values` implementation for `FSStoreV3` + * sharding storage transformer By :user:`Jonathan Striebel `; :issue:`1096`, :issue:`1111`. -* Remove warnings emitted when using N5Store or N5FSStore with a blosc-compressed array. +* N5 nows supports Blosc. + Remove warnings emitted when using N5Store or N5FSStore with a blosc-compressed array. By :user:`Davis Bennett `; :issue:`1331`. - + Bug fixes ~~~~~~~~~ @@ -36,6 +43,7 @@ Bug fixes * Ensure contiguous data is give to ``FSStore``. Only copying if needed. By :user:`Mads R. B. Kristensen ` :issue:`1285`. + * NestedDirectoryStore.listdir now returns chunk keys with the correct '/' dimension_separator. By :user:`Brett Graham ` :issue:`1334`. From 4e8b84b46937ba14f9cf818065740e95a2a2b554 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Feb 2023 09:31:02 +0100 Subject: [PATCH 0278/1078] Bump redis from 4.4.2 to 4.5.1 (#1344) Bumps [redis](https://github.com/redis/redis-py) from 4.4.2 to 4.5.1. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.4.2...v4.5.1) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0cf6661d1e..07ca6d743d 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.0.4 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.14.1 # pyup: ignore -redis==4.4.2 +redis==4.5.1 types-redis types-setuptools pymongo==4.3.3 From 76fce142174b4b57e4fb0fd8141d7515aae81dcf Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Sun, 12 Feb 2023 09:26:05 +0100 Subject: [PATCH 0279/1078] Generate fixture for #1312 if it is missing (#1348) This is a temporary fix for the larger issue of out-of-tree testing described in #1347, but this should allow a release of 2.14.1 which passes on conda. --- zarr/tests/test_attrs.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index 27ec8fea8d..a329f463f0 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -1,7 +1,9 @@ import json +import pathlib import pytest +import zarr from zarr._storage.store import meta_root from zarr.attrs import Attributes from zarr.storage import KVStore, DirectoryStore @@ -50,6 +52,16 @@ def test_storage(self, zarr_version): def test_utf8_encoding(self, zarr_version): + project_root = pathlib.Path(zarr.__file__).resolve().parent.parent + fixdir = project_root / "fixture" / "utf8attrs" + if not fixdir.exists(): # pragma: no cover + # store the data - should be one-time operation + fixdir.mkdir() + with (fixdir / ".zattrs").open("w", encoding="utf-8") as f: + f.write('{"foo": "た"}') + with (fixdir / ".zgroup").open("w", encoding="utf-8") as f: + f.write("""{\n "zarr_format": 2\n}""") + # fixture data fixture = group(store=DirectoryStore('fixture')) assert fixture['utf8attrs'].attrs.asdict() == dict(foo='た') From 87c48d80c33ebe92661a05e981cce48b11b5c66f Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Sun, 12 Feb 2023 09:43:35 +0100 Subject: [PATCH 0280/1078] Fix 2.14.0 redirects (#1346) * Fix 2.14.0 redirects Check for hash in anchormap * Add release notes * Add note about conda-forge release --- docs/_static/custom.js | 4 ++-- docs/release.rst | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/docs/_static/custom.js b/docs/_static/custom.js index 06b2d019b1..dcb584ecd5 100644 --- a/docs/_static/custom.js +++ b/docs/_static/custom.js @@ -12,7 +12,7 @@ } let hash = window.location.hash.substring(1); - if (hash) { - window.location.replace(anchorMap[hash]); + if (hash && hash in anchorMap) { + window.location.replace(anchorMap[hash]); } })(); diff --git a/docs/release.rst b/docs/release.rst index 0965109935..e7802fecbb 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -15,6 +15,20 @@ Release notes # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +.. _release_2.14.1: + +2.14.1 +------ + +Documentation +~~~~~~~~~~~~~ + +* Fix API links. + By :user:`Josh Moore ` :issue:`1346`. + +* Fix unit tests which prevented the conda-forge release. + By :user:`Josh Moore ` :issue:`1348`. + .. _release_2.14.0: 2.14.0 From c3750302f71ebb0b1506db05815b38c2c097c3de Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Feb 2023 08:40:10 +0100 Subject: [PATCH 0281/1078] Bump numpy from 1.24.1 to 1.24.2 (#1350) Bumps [numpy](https://github.com/numpy/numpy) from 1.24.1 to 1.24.2. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.24.1...v1.24.2) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index 7d373a254d..e094d4fcd4 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.24.1 +numpy==1.24.2 From d7d88158c5f4e61d926675f2af9df66e7848bb68 Mon Sep 17 00:00:00 2001 From: Marwan Zouinkhi Date: Mon, 20 Feb 2023 03:00:12 -0500 Subject: [PATCH 0282/1078] Fix N5Store dtype wrong behavior (#1340) * create a test case for the bug * fix dtype bug * fix formatting * optimize code by using create function * use at exit delete * optimize import * update n5 test hexdigest * skip dtype decode if no fsspec * add contribution --- docs/release.rst | 3 +++ zarr/n5.py | 1 + zarr/tests/test_core.py | 13 ++++++------- zarr/tests/test_n5.py | 21 +++++++++++++++++++-- 4 files changed, 29 insertions(+), 9 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index e7802fecbb..0098d2e50b 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -61,6 +61,9 @@ Bug fixes * NestedDirectoryStore.listdir now returns chunk keys with the correct '/' dimension_separator. By :user:`Brett Graham ` :issue:`1334`. +* N5Store/N5FSStore dtype returns zarr Stores readable dtype. + By :user:`Marwan Zouinkhi ` :issue:`1339`. + .. _release_2.13.6: 2.13.6 diff --git a/zarr/n5.py b/zarr/n5.py index 4c93ce4acb..1eb6ef2b33 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -689,6 +689,7 @@ def array_metadata_to_zarr(array_metadata: Dict[str, Any], array_metadata['order'] = 'C' array_metadata['filters'] = [] array_metadata['dimension_separator'] = '.' + array_metadata['dtype'] = np.dtype(array_metadata['dtype']).str compressor_config = array_metadata['compressor'] compressor_config = compressor_config_to_zarr(compressor_config) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index b54fe3ddf0..ba89db3b06 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -2034,13 +2034,12 @@ def test_compressors(self): assert np.all(a2[:] == 1) def expected(self): - return [ - '4e9cf910000506455f82a70938a272a3fce932e5', - 'f9d4cbf1402901f63dea7acf764d2546e4b6aa38', - '1d8199f5f7b70d61aa0d29cc375212c3df07d50a', - '874880f91aa6736825584509144afe6b06b0c05c', - 'e2258fedc74752196a8c8383db49e27193c995e2', - ] + return ['8811a77d54caaa1901d5cc4452d946ae433c8d90', + 'd880b007d9779db5f2cdbe13274eb1cbac4a425a', + 'd80eb66d5521744f051e816ab368d8ccfc2e3edf', + '568f9f837e4b682a3819cb122988e2eebeb6572b', + '4fdf4475d786d6694110db5619acd30c80dfc372' + ] @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") diff --git a/zarr/tests/test_n5.py b/zarr/tests/test_n5.py index a1a0a83e36..8f6d97dd51 100644 --- a/zarr/tests/test_n5.py +++ b/zarr/tests/test_n5.py @@ -1,10 +1,15 @@ - import pytest -from zarr.n5 import N5ChunkWrapper +from zarr.n5 import N5ChunkWrapper, N5FSStore +from zarr.creation import create +from zarr.storage import atexit_rmtree from numcodecs import GZip import numpy as np from typing import Tuple +import json +import atexit + +from zarr.tests.util import have_fsspec def test_make_n5_chunk_wrapper(): @@ -35,3 +40,15 @@ def test_partial_chunk_decode(chunk_shape: Tuple[int, ...]): chunk[subslices] = 1 subchunk = np.ascontiguousarray(chunk[subslices]) assert np.array_equal(codec_wrapped.decode(codec_wrapped.encode(subchunk)), chunk) + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +def test_dtype_decode(): + path = 'data/array.n5' + atexit_rmtree(path) + atexit.register(atexit_rmtree, path) + n5_store = N5FSStore(path) + create(100, store=n5_store) + dtype_n5 = json.loads(n5_store[".zarray"])["dtype"] + dtype_zarr = json.loads(create(100).store[".zarray"])["dtype"] + assert dtype_n5 == dtype_zarr From 5ece3e6971595feec5fce37ce801dd54e961c728 Mon Sep 17 00:00:00 2001 From: Brandur Thorgrimsson <11929039+Swordcat@users.noreply.github.com> Date: Thu, 23 Feb 2023 15:23:23 +0000 Subject: [PATCH 0283/1078] Ensure `zarr.group` uses writable mode (#1354) * Fix creating a group with fsmap per issue #1353, regression test added * Update release notes --- docs/release.rst | 11 +++++++++++ zarr/hierarchy.py | 2 +- zarr/tests/test_hierarchy.py | 11 +++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index 0098d2e50b..a6c32100ba 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -15,6 +15,17 @@ Release notes # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +.. _release_2.14.2: + +2.14.2 +------ + +Bug fixes +~~~~~~~~~ + +* Ensure ``zarr.group`` uses writeable mode to fix issue with :issue:`1304`. + By :user:`Brandur Thorgrimsson ` :issue:`1354`. + .. _release_2.14.1: 2.14.1 diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 0dae921500..18e7ac7863 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -1336,7 +1336,7 @@ def group(store=None, overwrite=False, chunk_store=None, """ # handle polymorphic store arg - store = _normalize_store_arg(store, zarr_version=zarr_version) + store = _normalize_store_arg(store, zarr_version=zarr_version, mode='w') if zarr_version is None: zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index 7d87b6d404..d0833457fb 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -1591,6 +1591,17 @@ def test_group(zarr_version): assert store is g.store +@pytest.mark.skipif(have_fsspec is False, reason='needs fsspec') +@pytest.mark.parametrize('zarr_version', _VERSIONS) +def test_group_writeable_mode(zarr_version, tmp_path): + # Regression test for https://github.com/zarr-developers/zarr-python/issues/1353 + import fsspec + + store = fsspec.get_mapper(str(tmp_path)) + zg = group(store=store) + assert zg.store.map == store + + @pytest.mark.parametrize('zarr_version', _VERSIONS) def test_open_group(zarr_version): # test the open_group() convenience function From 7018cf1127b14e9b21a8a0169bd7da95405c4577 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Mar 2023 14:24:00 +0100 Subject: [PATCH 0284/1078] Bump pytest from 7.2.1 to 7.2.2 (#1361) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.2.1 to 7.2.2. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.2.1...7.2.2) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 34d7d98e7e..1217ee620e 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.11.0 msgpack-python==0.5.6 setuptools-scm==7.1.0 # test requirements -pytest==7.2.1 +pytest==7.2.2 From 55a875d16a7cb9f67dd652a5e921d5fe4bc37ac0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Mar 2023 15:46:49 +0100 Subject: [PATCH 0285/1078] Bump fsspec from 2023.1.0 to 2023.3.0 (#1360) * Bump fsspec from 2023.1.0 to 2023.3.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2023.1.0 to 2023.3.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2023.1.0...2023.3.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update s3fs as well --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 07ca6d743d..0599ef05ff 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 h5py==3.8.0 -fsspec==2023.1.0 -s3fs==2023.1.0 +fsspec==2023.3.0 +s3fs==2023.3.0 moto[server]>=4.0.8 From c66b35b8c1f6839ec596f4b7c87bf7b76b0c0818 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 9 Mar 2023 08:52:46 +0100 Subject: [PATCH 0286/1078] chore: update pre-commit hooks (#1351) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v0.991 → v1.0.1](https://github.com/pre-commit/mirrors-mypy/compare/v0.991...v1.0.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1f629ccf76..cd1bc44361 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,7 +24,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.991 + rev: v1.0.1 hooks: - id: mypy files: zarr From 0195567038308a3674ae94ae1dceadf136ab34c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Mar 2023 08:53:00 +0100 Subject: [PATCH 0287/1078] Bump azure-storage-blob from 12.14.1 to 12.15.0 (#1357) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.14.1 to 12.15.0. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.14.1...azure-storage-blob_12.15.0) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0599ef05ff..8a59af3e17 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipywidgets==8.0.4 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.14.1 # pyup: ignore +azure-storage-blob==12.15.0 # pyup: ignore redis==4.5.1 types-redis types-setuptools From c77f9cd6fb29bdbc1c03b7b645dac946eed6b577 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Mar 2023 10:10:39 +0100 Subject: [PATCH 0288/1078] Bump actions/setup-python from 4.3.0 to 4.5.0 (#1318) * Bump actions/setup-python from 4.3.0 to 4.5.0 Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.3.0 to 4.5.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4.3.0...v4.5.0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Get the username * Test `push` too * Revert changes to GHA --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: jakirkham Co-authored-by: Josh Moore --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index d1479d43e1..ea388b24b6 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v4.3.0 + - uses: actions/setup-python@v4.5.0 name: Install Python with: python-version: '3.8' From eacda8dc205c4b1e1785f5cebef0862ef46e69f7 Mon Sep 17 00:00:00 2001 From: Andreas Albert <103571926+AndreasAlbertQC@users.noreply.github.com> Date: Fri, 10 Mar 2023 14:10:44 +0100 Subject: [PATCH 0289/1078] More extensive orthogonal indexing in get/setitem (#1333) * More extensive orthogonal indexing in get/setitem Added pass-through to orthogonal indexing for the following cases: * index is iterable of integers * index is iterable of length ndim, with each element being a slice, integer, or list. Maximum one list. * Add test cases for indexing with single integer iterable --------- Co-authored-by: Josh Moore --- docs/release.rst | 9 +- docs/tutorial.rst | 7 ++ zarr/core.py | 5 + zarr/indexing.py | 20 ++++ zarr/tests/test_indexing.py | 214 +++++++++++++++++++++++++++++++++--- 5 files changed, 237 insertions(+), 18 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index a6c32100ba..f056f621bf 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,15 +6,18 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. - .. _unreleased: +.. _unreleased: - Unreleased - ---------- +Unreleased +---------- .. # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +* Implement more extensive fallback of getitem/setitem for orthogonal indexing. + By :user:`Andreas Albert ` :issue:`1029`. + .. _release_2.14.2: 2.14.2 diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 43e42faf6b..0f2e1c7345 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -634,6 +634,13 @@ For convenience, the orthogonal indexing functionality is also available via the Any combination of integer, slice, 1D integer array and/or 1D Boolean array can be used for orthogonal indexing. +If the index contains at most one iterable, and otherwise contains only slices and integers, +orthogonal indexing is also available directly on the array: + + >>> z = zarr.array(np.arange(15).reshape(3, 5)) + >>> all(z.oindex[[0, 2], :] == z[[0, 2], :]) + True + Indexing fields in structured arrays ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/zarr/core.py b/zarr/core.py index b9db6cb2c8..521de80e17 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -28,6 +28,7 @@ err_too_many_indices, is_contiguous_selection, is_pure_fancy_indexing, + is_pure_orthogonal_indexing, is_scalar, pop_fields, ) @@ -817,6 +818,8 @@ def __getitem__(self, selection): fields, pure_selection = pop_fields(selection) if is_pure_fancy_indexing(pure_selection, self.ndim): result = self.vindex[selection] + elif is_pure_orthogonal_indexing(pure_selection, self.ndim): + result = self.get_orthogonal_selection(pure_selection, fields=fields) else: result = self.get_basic_selection(pure_selection, fields=fields) return result @@ -1387,6 +1390,8 @@ def __setitem__(self, selection, value): fields, pure_selection = pop_fields(selection) if is_pure_fancy_indexing(pure_selection, self.ndim): self.vindex[selection] = value + elif is_pure_orthogonal_indexing(pure_selection, self.ndim): + self.set_orthogonal_selection(pure_selection, value, fields=fields) else: self.set_basic_selection(pure_selection, value, fields=fields) diff --git a/zarr/indexing.py b/zarr/indexing.py index 268b487105..2f8144fd08 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -101,6 +101,26 @@ def is_pure_fancy_indexing(selection, ndim): ) +def is_pure_orthogonal_indexing(selection, ndim): + if not ndim: + return False + + # Case 1: Selection is a single iterable of integers + if is_integer_list(selection) or is_integer_array(selection, ndim=1): + return True + + # Case two: selection contains either zero or one integer iterables. + # All other selection elements are slices or integers + return ( + isinstance(selection, tuple) and len(selection) == ndim and + sum(is_integer_list(elem) or is_integer_array(elem) for elem in selection) <= 1 and + all( + is_integer_list(elem) or is_integer_array(elem) + or isinstance(elem, slice) or isinstance(elem, int) for + elem in selection) + ) + + def normalize_integer_selection(dim_sel, dim_len): # normalize type to int diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index 5c4c580636..f5f57be010 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -283,8 +283,6 @@ def test_get_basic_selection_2d(): for selection in bad_selections: with pytest.raises(IndexError): z.get_basic_selection(selection) - with pytest.raises(IndexError): - z[selection] # check fallback on fancy indexing fancy_selection = ([0, 1], [0, 1]) np.testing.assert_array_equal(z[fancy_selection], [0, 11]) @@ -317,14 +315,179 @@ def test_fancy_indexing_fallback_on_get_setitem(): ) -def test_fancy_indexing_doesnt_mix_with_slicing(): - z = zarr.zeros((20, 20)) - with pytest.raises(IndexError): - z[[1, 2, 3], :] = 2 - with pytest.raises(IndexError): - np.testing.assert_array_equal( - z[[1, 2, 3], :], 0 +@pytest.mark.parametrize("index,expected_result", + [ + # Single iterable of integers + ( + [0, 1], + [[0, 1, 2], + [3, 4, 5]] + ), + # List first, then slice + ( + ([0, 1], slice(None)), + [[0, 1, 2], + [3, 4, 5]] + ), + # List first, then slice + ( + ([0, 1], slice(1, None)), + [[1, 2], + [4, 5]] + ), + # Slice first, then list + ( + (slice(0, 2), [0, 2]), + [[0, 2], + [3, 5]] + ), + # Slices only + ( + (slice(0, 2), slice(0, 2)), + [[0, 1], + [3, 4]] + ), + # List with repeated index + ( + ([1, 0, 1], slice(1, None)), + [[4, 5], + [1, 2], + [4, 5]] + ), + # 1D indexing + ( + ([1, 0, 1]), + [ + [3, 4, 5], + [0, 1, 2], + [3, 4, 5] + ] + ) + + ]) +def test_orthogonal_indexing_fallback_on_getitem_2d(index, expected_result): + """ + Tests the orthogonal indexing fallback on __getitem__ for a 2D matrix. + + In addition to checking expected behavior, all indexing + is also checked against numpy. + """ + # [0, 1, 2], + # [3, 4, 5], + # [6, 7, 8] + a = np.arange(9).reshape(3, 3) + z = zarr.array(a) + + np.testing.assert_array_equal(z[index], a[index], err_msg="Indexing disagrees with numpy") + np.testing.assert_array_equal(z[index], expected_result) + + +@pytest.mark.parametrize("index,expected_result", + [ + # Single iterable of integers + ( + [0, 1], + [[[0, 1, 2], + [3, 4, 5], + [6, 7, 8]], + [[9, 10, 11], + [12, 13, 14], + [15, 16, 17]]] + ), + # One slice, two integers + ( + (slice(0, 2), 1, 1), + [4, 13] + ), + # One integer, two slices + ( + (slice(0, 2), 1, slice(0, 2)), + [[3, 4], [12, 13]] + ), + # Two slices and a list + ( + (slice(0, 2), [1, 2], slice(0, 2)), + [[[3, 4], [6, 7]], [[12, 13], [15, 16]]] + ), + ]) +def test_orthogonal_indexing_fallback_on_getitem_3d(index, expected_result): + """ + Tests the orthogonal indexing fallback on __getitem__ for a 3D matrix. + + In addition to checking expected behavior, all indexing + is also checked against numpy. + """ + # [[[ 0, 1, 2], + # [ 3, 4, 5], + # [ 6, 7, 8]], + + # [[ 9, 10, 11], + # [12, 13, 14], + # [15, 16, 17]], + + # [[18, 19, 20], + # [21, 22, 23], + # [24, 25, 26]]] + a = np.arange(27).reshape(3, 3, 3) + z = zarr.array(a) + + np.testing.assert_array_equal(z[index], a[index], err_msg="Indexing disagrees with numpy") + np.testing.assert_array_equal(z[index], expected_result) + + +@pytest.mark.parametrize( + "index,expected_result", + [ + # Single iterable of integers + ( + [0, 1], + [ + [1, 1, 1], + [1, 1, 1], + [0, 0, 0] + ] + ), + # List and slice combined + ( + ([0, 1], slice(1, 3)), + [[0, 1, 1], + [0, 1, 1], + [0, 0, 0]] + ), + # Index repetition is ignored on setitem + ( + ([0, 1, 1, 1, 1, 1, 1], slice(1, 3)), + [[0, 1, 1], + [0, 1, 1], + [0, 0, 0]] + ), + # Slice with step + ( + ([0, 2], slice(None, None, 2)), + [[1, 0, 1], + [0, 0, 0], + [1, 0, 1]] ) + ] +) +def test_orthogonal_indexing_fallback_on_setitem_2d(index, expected_result): + """ + Tests the orthogonal indexing fallback on __setitem__ for a 3D matrix. + + In addition to checking expected behavior, all indexing + is also checked against numpy. + """ + # Slice + fancy index + a = np.zeros((3, 3)) + z = zarr.array(a) + z[index] = 1 + a[index] = 1 + np.testing.assert_array_equal( + z, expected_result + ) + np.testing.assert_array_equal( + z, a, err_msg="Indexing disagrees with numpy" + ) def test_fancy_indexing_doesnt_mix_with_implicit_slicing(): @@ -335,12 +498,6 @@ def test_fancy_indexing_doesnt_mix_with_implicit_slicing(): np.testing.assert_array_equal( z2[[1, 2, 3], [1, 2, 3]], 0 ) - with pytest.raises(IndexError): - z2[[1, 2, 3]] = 2 - with pytest.raises(IndexError): - np.testing.assert_array_equal( - z2[[1, 2, 3]], 0 - ) with pytest.raises(IndexError): z2[..., [1, 2, 3]] = 2 with pytest.raises(IndexError): @@ -770,6 +927,33 @@ def test_set_orthogonal_selection_3d(): _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2) +def test_orthogonal_indexing_fallback_on_get_setitem(): + z = zarr.zeros((20, 20)) + z[[1, 2, 3], [1, 2, 3]] = 1 + np.testing.assert_array_equal( + z[:4, :4], + [ + [0, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + ], + ) + np.testing.assert_array_equal( + z[[1, 2, 3], [1, 2, 3]], 1 + ) + # test broadcasting + np.testing.assert_array_equal( + z[1, [1, 2, 3]], [1, 0, 0] + ) + # test 1D fancy indexing + z2 = zarr.zeros(5) + z2[[1, 2, 3]] = 1 + np.testing.assert_array_equal( + z2, [0, 1, 1, 1, 0] + ) + + def _test_get_coordinate_selection(a, z, selection): expect = a[selection] actual = z.get_coordinate_selection(selection) From 6e63fe93abdbf984093a83a00a673405e259e023 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Fri, 10 Mar 2023 16:46:10 +0100 Subject: [PATCH 0290/1078] Don't use relative fixture path (#1364) see: #1312 see: #1347 see: #1348 --- zarr/tests/test_attrs.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index a329f463f0..d741c17837 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -53,17 +53,18 @@ def test_storage(self, zarr_version): def test_utf8_encoding(self, zarr_version): project_root = pathlib.Path(zarr.__file__).resolve().parent.parent - fixdir = project_root / "fixture" / "utf8attrs" - if not fixdir.exists(): # pragma: no cover + fixdir = project_root / "fixture" + testdir = fixdir / "utf8attrs" + if not testdir.exists(): # pragma: no cover # store the data - should be one-time operation - fixdir.mkdir() - with (fixdir / ".zattrs").open("w", encoding="utf-8") as f: + testdir.mkdir() + with (testdir / ".zattrs").open("w", encoding="utf-8") as f: f.write('{"foo": "た"}') - with (fixdir / ".zgroup").open("w", encoding="utf-8") as f: + with (testdir / ".zgroup").open("w", encoding="utf-8") as f: f.write("""{\n "zarr_format": 2\n}""") # fixture data - fixture = group(store=DirectoryStore('fixture')) + fixture = group(store=DirectoryStore(str(fixdir))) assert fixture['utf8attrs'].attrs.asdict() == dict(foo='た') def test_get_set_del_contains(self, zarr_version): From d17d8d9b03924d9ec0baa296b6d9f8aa5f935341 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 12:06:47 +0100 Subject: [PATCH 0291/1078] Bump pypa/gh-action-pypi-publish from 1.6.4 to 1.7.1 (#1365) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.6.4 to 1.7.1. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.6.4...v1.7.1) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index ea388b24b6..4d3b03ed25 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.6.4 + - uses: pypa/gh-action-pypi-publish@v1.7.1 with: user: __token__ password: ${{ secrets.pypi_password }} From f7ef424a65a7274e4aa0dfbb75795143db609857 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Thu, 16 Mar 2023 17:18:58 +0100 Subject: [PATCH 0292/1078] One more fix for missing directories (#1367) --- zarr/tests/test_attrs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index d741c17837..d6151b4f29 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -57,7 +57,7 @@ def test_utf8_encoding(self, zarr_version): testdir = fixdir / "utf8attrs" if not testdir.exists(): # pragma: no cover # store the data - should be one-time operation - testdir.mkdir() + testdir.mkdir(parents=True, exist_ok=True) with (testdir / ".zattrs").open("w", encoding="utf-8") as f: f.write('{"foo": "た"}') with (testdir / ".zgroup").open("w", encoding="utf-8") as f: From 2ff887548496855706c69a3c5983b00f17025af6 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Fri, 17 Mar 2023 17:25:06 +0530 Subject: [PATCH 0293/1078] Add API reference for V3 Implementation in the docs (#1345) * Add API reference for V3 Implementation in the docs * Minor fix * Minor fix * Minor indentation fix * Update docs/api/v3.rst Co-authored-by: Jonathan Striebel * Update docs/api/v3.rst Co-authored-by: Jonathan Striebel * Update docs/api/v3.rst Co-authored-by: Jonathan Striebel * Fix broken links --------- Co-authored-by: Jonathan Striebel --- docs/api.rst | 1 + docs/api/v3.rst | 77 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 78 insertions(+) create mode 100644 docs/api/v3.rst diff --git a/docs/api.rst b/docs/api.rst index 2b6e7ea516..e200dd908d 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -13,6 +13,7 @@ API reference api/codecs api/attrs api/sync + api/v3 Indices and tables ------------------ diff --git a/docs/api/v3.rst b/docs/api/v3.rst new file mode 100644 index 0000000000..7665b2ddd1 --- /dev/null +++ b/docs/api/v3.rst @@ -0,0 +1,77 @@ +V3 Specification Implementation(``zarr._storage.v3``) +===================================================== + +This module contains the implementation of the `Zarr V3 Specification `_. + +.. warning:: + Since Zarr Python 2.12 release, this module provides experimental infrastructure for reading and + writing the upcoming V3 spec of the Zarr format. Users wishing to prepare for the migration can set + the environment variable ``ZARR_V3_EXPERIMENTAL_API=1`` to begin experimenting, however data + written with this API should be expected to become stale, as the implementation will still change. + +The new ``zarr._store.v3`` package has the necessary classes and functions for evaluating Zarr V3. +Since the design is not finalised, the classes and functions are not automatically imported into +the regular Zarr namespace. + +Code snippet for creating Zarr V3 arrays:: + + >>> import zarr + >>> z = zarr.create((10000, 10000), + >>> chunks=(100, 100), + >>> dtype='f8', + >>> compressor='default', + >>> path='path-where-you-want-zarr-v3-array', + >>> zarr_version=3) + +Further, you can use `z.info` to see details about the array you just created:: + + >>> z.info + Name : path-where-you-want-zarr-v3-array + Type : zarr.core.Array + Data type : float64 + Shape : (10000, 10000) + Chunk shape : (100, 100) + Order : C + Read-only : False + Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0) + Store type : zarr._storage.v3.KVStoreV3 + No. bytes : 800000000 (762.9M) + No. bytes stored : 557 + Storage ratio : 1436265.7 + Chunks initialized : 0/10000 + +You can also check ``Store type`` here (which indicates Zarr V3). + +.. module:: zarr._storage.v3 + +.. autoclass:: RmdirV3 +.. autoclass:: KVStoreV3 +.. autoclass:: FSStoreV3 +.. autoclass:: MemoryStoreV3 +.. autoclass:: DirectoryStoreV3 +.. autoclass:: ZipStoreV3 +.. autoclass:: RedisStoreV3 +.. autoclass:: MongoDBStoreV3 +.. autoclass:: DBMStoreV3 +.. autoclass:: LMDBStoreV3 +.. autoclass:: SQLiteStoreV3 +.. autoclass:: LRUStoreCacheV3 +.. autoclass:: ConsolidatedMetadataStoreV3 + +In v3 `storage transformers `_ +can be set via ``zarr.create(…, storage_transformers=[…])``. +The experimental sharding storage transformer can be tested by setting +the environment variable ``ZARR_V3_SHARDING=1``. Data written with this flag +enabled should be expected to become stale until +`ZEP 2 `_ is approved +and fully implemented. + +.. module:: zarr._storage.v3_storage_transformers + +.. autoclass:: ShardingStorageTransformer + +The abstract base class for storage transformers is + +.. module:: zarr._storage.store + +.. autoclass:: StorageTransformer From fe8ef26ab0bf644c52c116403916bc14aa0c17fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Mar 2023 15:05:49 +0100 Subject: [PATCH 0294/1078] Bump pypa/gh-action-pypi-publish from 1.7.1 to 1.8.1 (#1369) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.7.1 to 1.8.1. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.7.1...v1.8.1) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 4d3b03ed25..2f561dc512 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.7.1 + - uses: pypa/gh-action-pypi-publish@v1.8.1 with: user: __token__ password: ${{ secrets.pypi_password }} From 7c0113a489a75278724a7581fa91136d1b1f0251 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Mar 2023 22:55:52 -0700 Subject: [PATCH 0295/1078] Bump redis from 4.5.1 to 4.5.3 (#1373) --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 8a59af3e17..a5081b3c57 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.0.4 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.15.0 # pyup: ignore -redis==4.5.1 +redis==4.5.3 types-redis types-setuptools pymongo==4.3.3 From 0a6dcee998106f9468846480d8560a3bbf31a210 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Mar 2023 02:05:08 -0700 Subject: [PATCH 0296/1078] Bump ipywidgets from 8.0.4 to 8.0.6 (#1379) Bumps [ipywidgets](https://github.com/jupyter-widgets/ipywidgets) from 8.0.4 to 8.0.6. - [Release notes](https://github.com/jupyter-widgets/ipywidgets/releases) - [Commits](https://github.com/jupyter-widgets/ipywidgets/compare/8.0.4...8.0.6) --- updated-dependencies: - dependency-name: ipywidgets dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index a5081b3c57..3ee7cbe5d3 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -3,7 +3,7 @@ lmdb==1.4.0; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 -ipywidgets==8.0.4 +ipywidgets==8.0.6 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) From 4b0705c6d85cdfc20a33cbb39a90c8c9d11006c1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Mar 2023 02:05:37 -0700 Subject: [PATCH 0297/1078] Bump pypa/gh-action-pypi-publish from 1.8.1 to 1.8.3 (#1376) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.1 to 1.8.3. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.1...v1.8.3) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 2f561dc512..be7a3b19cc 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.1 + - uses: pypa/gh-action-pypi-publish@v1.8.3 with: user: __token__ password: ${{ secrets.pypi_password }} From b14f15ff300aeb78973dd9468558527c14d8db69 Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Thu, 13 Apr 2023 18:26:45 +0200 Subject: [PATCH 0298/1078] Getitems: support `meta_array` (#1131) * Use _chunk_getitems() always * Implement getitems() always * FSStore.getitems(): accept meta_array and on_error * getitems(): handle on_error="omit" * Removed the `on_error argument` * remove redundant check * getitems(): use Sequence instead of Iterable * Typo Co-authored-by: Josh Moore * Introduce a contexts argument * CountingDict: impl. getitems() * added test_getitems() * Introduce Context * doc * support the new get_partial_values() method * Resolve conflict with get_partial_values() * make contexts keyword-only * Introduce ConstantMap * use typing.Mapping * test_constant_map --------- Co-authored-by: jakirkham Co-authored-by: Josh Moore --- zarr/_storage/store.py | 28 +++++++++++ zarr/context.py | 19 ++++++++ zarr/core.py | 92 +++++++++++------------------------ zarr/storage.py | 8 ++- zarr/tests/test_storage.py | 35 ++++++++++++- zarr/tests/test_storage_v3.py | 2 + zarr/tests/test_util.py | 15 +++++- zarr/tests/util.py | 9 ++++ zarr/util.py | 52 +++++++++++++++++++- 9 files changed, 190 insertions(+), 70 deletions(-) create mode 100644 zarr/context.py diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 4d813b8e05..0594dc22de 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -8,6 +8,7 @@ from zarr.meta import Metadata2, Metadata3 from zarr.util import normalize_storage_path +from zarr.context import Context # v2 store keys array_meta_key = '.zarray' @@ -131,6 +132,33 @@ def _ensure_store(store: Any): f"wrap it in Zarr.storage.KVStore. Got {store}" ) + def getitems( + self, keys: Sequence[str], *, contexts: Mapping[str, Context] + ) -> Mapping[str, Any]: + """Retrieve data from multiple keys. + + Parameters + ---------- + keys : Iterable[str] + The keys to retrieve + contexts: Mapping[str, Context] + A mapping of keys to their context. Each context is a mapping of store + specific information. E.g. a context could be a dict telling the store + the preferred output array type: `{"meta_array": cupy.empty(())}` + + Returns + ------- + Mapping + A collection mapping the input keys to their results. + + Notes + ----- + This default implementation uses __getitem__() to read each key sequentially and + ignores contexts. Overwrite this method to implement concurrent reads of multiple + keys and/or to utilize the contexts. + """ + return {k: self[k] for k in keys if k in self} + class Store(BaseStore): """Abstract store class used by implementations following the Zarr v2 spec. diff --git a/zarr/context.py b/zarr/context.py new file mode 100644 index 0000000000..83fbaafa9b --- /dev/null +++ b/zarr/context.py @@ -0,0 +1,19 @@ + +from typing import TypedDict + +from numcodecs.compat import NDArrayLike + + +class Context(TypedDict, total=False): + """ A context for component specific information + + All keys are optional. Any component reading the context must provide + a default implementation in the case a key cannot be found. + + Items + ----- + meta_array : array-like, optional + An array-like instance to use for determining the preferred output + array type. + """ + meta_array: NDArrayLike diff --git a/zarr/core.py b/zarr/core.py index 521de80e17..5537733b4b 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -13,6 +13,7 @@ from zarr._storage.store import _prefix_to_attrs_key, assert_zarr_v3_api_available from zarr.attrs import Attributes from zarr.codecs import AsType, get_codec +from zarr.context import Context from zarr.errors import ArrayNotFoundError, ReadOnlyError, ArrayIndexError from zarr.indexing import ( BasicIndexer, @@ -41,6 +42,7 @@ normalize_store_arg, ) from zarr.util import ( + ConstantMap, all_equal, InfoReporter, check_array_shape, @@ -1275,24 +1277,14 @@ def _get_selection(self, indexer, out=None, fields=None): check_array_shape('out', out, out_shape) # iterate over chunks - if ( - not hasattr(self.chunk_store, "getitems") and not ( - hasattr(self.chunk_store, "get_partial_values") and - self.chunk_store.supports_efficient_get_partial_values - ) - ) or any(map(lambda x: x == 0, self.shape)): - # sequentially get one key at a time from storage - for chunk_coords, chunk_selection, out_selection in indexer: - # load chunk selection into output array - self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection, - drop_axes=indexer.drop_axes, fields=fields) - else: + if math.prod(out_shape) > 0: # allow storage to get multiple items at once lchunk_coords, lchunk_selection, lout_selection = zip(*indexer) - self._chunk_getitems(lchunk_coords, lchunk_selection, out, lout_selection, - drop_axes=indexer.drop_axes, fields=fields) - + self._chunk_getitems( + lchunk_coords, lchunk_selection, out, lout_selection, + drop_axes=indexer.drop_axes, fields=fields + ) if out.shape: return out else: @@ -1963,68 +1955,36 @@ def _process_chunk( # store selected data in output out[out_selection] = tmp - def _chunk_getitem(self, chunk_coords, chunk_selection, out, out_selection, - drop_axes=None, fields=None): - """Obtain part or whole of a chunk. + def _chunk_getitems(self, lchunk_coords, lchunk_selection, out, lout_selection, + drop_axes=None, fields=None): + """Obtain part or whole of chunks. Parameters ---------- - chunk_coords : tuple of ints - Indices of the chunk. - chunk_selection : selection - Location of region within the chunk to extract. + chunk_coords : list of tuple of ints + Indices of the chunks. + chunk_selection : list of selections + Location of region within the chunks to extract. out : ndarray Array to store result in. - out_selection : selection - Location of region within output array to store results in. + out_selection : list of selections + Location of regions within output array to store results in. drop_axes : tuple of ints Axes to squeeze out of the chunk. fields TODO - """ - out_is_ndarray = True - try: - out = ensure_ndarray_like(out) - except TypeError: - out_is_ndarray = False - - assert len(chunk_coords) == len(self._cdata_shape) - - # obtain key for chunk - ckey = self._chunk_key(chunk_coords) - try: - # obtain compressed data for chunk - cdata = self.chunk_store[ckey] - - except KeyError: - # chunk not initialized - if self._fill_value is not None: - if fields: - fill_value = self._fill_value[fields] - else: - fill_value = self._fill_value - out[out_selection] = fill_value - - else: - self._process_chunk(out, cdata, chunk_selection, drop_axes, - out_is_ndarray, fields, out_selection) - - def _chunk_getitems(self, lchunk_coords, lchunk_selection, out, lout_selection, - drop_axes=None, fields=None): - """As _chunk_getitem, but for lists of chunks - - This gets called where the storage supports ``getitems``, so that - it can decide how to fetch the keys, allowing concurrency. - """ out_is_ndarray = True try: out = ensure_ndarray_like(out) except TypeError: # pragma: no cover out_is_ndarray = False + # Keys to retrieve ckeys = [self._chunk_key(ch) for ch in lchunk_coords] + + # Check if we can do a partial read if ( self._partial_decompress and self._compressor @@ -2056,13 +2016,17 @@ def _chunk_getitems(self, lchunk_coords, lchunk_selection, out, lout_selection, for ckey in ckeys if ckey in self.chunk_store } + elif hasattr(self.chunk_store, "get_partial_values"): + partial_read_decode = False + values = self.chunk_store.get_partial_values([(ckey, (0, None)) for ckey in ckeys]) + cdatas = {key: value for key, value in zip(ckeys, values) if value is not None} else: partial_read_decode = False - if not hasattr(self.chunk_store, "getitems"): - values = self.chunk_store.get_partial_values([(ckey, (0, None)) for ckey in ckeys]) - cdatas = {key: value for key, value in zip(ckeys, values) if value is not None} - else: - cdatas = self.chunk_store.getitems(ckeys, on_error="omit") + contexts = {} + if not isinstance(self._meta_array, np.ndarray): + contexts = ConstantMap(ckeys, constant=Context(meta_array=self._meta_array)) + cdatas = self.chunk_store.getitems(ckeys, contexts=contexts) + for ckey, chunk_select, out_select in zip(ckeys, lchunk_selection, lout_selection): if ckey in cdatas: self._process_chunk( diff --git a/zarr/storage.py b/zarr/storage.py index fae9530716..e6c3f62faf 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -31,7 +31,7 @@ from os import scandir from pickle import PicklingError from threading import Lock, RLock -from typing import Optional, Union, List, Tuple, Dict, Any +from typing import Sequence, Mapping, Optional, Union, List, Tuple, Dict, Any import uuid import time @@ -42,6 +42,7 @@ ensure_contiguous_ndarray_like ) from numcodecs.registry import codec_registry +from zarr.context import Context from zarr.errors import ( MetadataError, @@ -1380,7 +1381,10 @@ def _normalize_key(self, key): return key.lower() if self.normalize_keys else key - def getitems(self, keys, **kwargs): + def getitems( + self, keys: Sequence[str], *, contexts: Mapping[str, Context] + ) -> Mapping[str, Any]: + keys_transformed = [self._normalize_key(key) for key in keys] results = self.map.getitems(keys_transformed, on_error="omit") # The function calling this method may not recognize the transformed keys diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 0b21dfbd88..f157e2a3d2 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -20,6 +20,7 @@ import zarr from zarr._storage.store import _get_hierarchy_metadata from zarr.codecs import BZ2, AsType, Blosc, Zlib +from zarr.context import Context from zarr.convenience import consolidate_metadata from zarr.errors import ContainsArrayError, ContainsGroupError, MetadataError from zarr.hierarchy import group @@ -37,7 +38,7 @@ from zarr.storage import FSStore, rename, listdir from zarr._storage.v3 import KVStoreV3 from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp -from zarr.util import json_dumps +from zarr.util import ConstantMap, json_dumps @contextmanager @@ -2584,3 +2585,35 @@ def test_meta_prefix_6853(): fixtures = group(store=DirectoryStore(str(fixture))) assert list(fixtures.arrays()) + + +def test_getitems_contexts(): + + class MyStore(CountingDict): + def __init__(self): + super().__init__() + self.last_contexts = None + + def getitems(self, keys, *, contexts): + self.last_contexts = contexts + return super().getitems(keys, contexts=contexts) + + store = MyStore() + z = zarr.create(shape=(10,), chunks=1, store=store) + + # By default, not contexts are given to the store's getitems() + z[0] + assert len(store.last_contexts) == 0 + + # Setting a non-default meta_array, will create contexts for the store's getitems() + z._meta_array = "my_meta_array" + z[0] + assert store.last_contexts == {'0': {'meta_array': 'my_meta_array'}} + assert isinstance(store.last_contexts, ConstantMap) + # Accseeing different chunks should trigger different key request + z[1] + assert store.last_contexts == {'1': {'meta_array': 'my_meta_array'}} + assert isinstance(store.last_contexts, ConstantMap) + z[2:4] + assert store.last_contexts == ConstantMap(['2', '3'], Context({'meta_array': 'my_meta_array'})) + assert isinstance(store.last_contexts, ConstantMap) diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index cc031f0db4..418f7d506b 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -666,6 +666,8 @@ def _get_public_and_dunder_methods(some_class): def test_storage_transformer_interface(): store_v3_methods = _get_public_and_dunder_methods(StoreV3) store_v3_methods.discard("__init__") + # Note, getitems() isn't mandatory when get_partial_values() is available + store_v3_methods.discard("getitems") storage_transformer_methods = _get_public_and_dunder_methods(StorageTransformer) storage_transformer_methods.discard("__init__") storage_transformer_methods.discard("get_config") diff --git a/zarr/tests/test_util.py b/zarr/tests/test_util.py index e9e1786abe..0a717b8f28 100644 --- a/zarr/tests/test_util.py +++ b/zarr/tests/test_util.py @@ -5,7 +5,7 @@ import pytest from zarr.core import Array -from zarr.util import (all_equal, flatten, guess_chunks, human_readable_size, +from zarr.util import (ConstantMap, all_equal, flatten, guess_chunks, human_readable_size, info_html_report, info_text_report, is_total_slice, json_dumps, normalize_chunks, normalize_dimension_separator, @@ -248,3 +248,16 @@ def test_json_dumps_numpy_dtype(): # Check that we raise the error of the superclass for unsupported object with pytest.raises(TypeError): json_dumps(Array) + + +def test_constant_map(): + val = object() + m = ConstantMap(keys=[1, 2], constant=val) + assert len(m) == 2 + assert m[1] is val + assert m[2] is val + assert 1 in m + assert 0 not in m + with pytest.raises(KeyError): + m[0] + assert repr(m) == repr({1: val, 2: val}) diff --git a/zarr/tests/util.py b/zarr/tests/util.py index faa2f35d25..19ac8c0bfa 100644 --- a/zarr/tests/util.py +++ b/zarr/tests/util.py @@ -1,6 +1,8 @@ import collections import os import tempfile +from typing import Any, Mapping, Sequence +from zarr.context import Context from zarr.storage import Store from zarr._storage.v3 import StoreV3 @@ -42,6 +44,13 @@ def __delitem__(self, key): self.counter['__delitem__', key] += 1 del self.wrapped[key] + def getitems( + self, keys: Sequence[str], *, contexts: Mapping[str, Context] + ) -> Mapping[str, Any]: + for key in keys: + self.counter['__getitem__', key] += 1 + return {k: self.wrapped[k] for k in keys if k in self.wrapped} + class CountingDictV3(CountingDict, StoreV3): pass diff --git a/zarr/util.py b/zarr/util.py index be5f174aab..68a238fbe4 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -5,12 +5,22 @@ from textwrap import TextWrapper import mmap import time -from typing import Any, Callable, Dict, Optional, Tuple, Union +from typing import ( + Any, + Callable, + Dict, + Iterator, + Mapping, + Optional, + Tuple, + TypeVar, + Union, + Iterable +) import numpy as np from asciitree import BoxStyle, LeftAligned from asciitree.traversal import Traversal -from collections.abc import Iterable from numcodecs.compat import ( ensure_text, ensure_ndarray_like, @@ -21,6 +31,9 @@ from numcodecs.registry import codec_registry from numcodecs.blosc import cbuffer_sizes, cbuffer_metainfo +KeyType = TypeVar('KeyType') +ValueType = TypeVar('ValueType') + def flatten(arg: Iterable) -> Iterable: for element in arg: @@ -745,3 +758,38 @@ def ensure_contiguous_ndarray_or_bytes(buf) -> Union[NDArrayLike, bytes]: except TypeError: # An error is raised if `buf` couldn't be zero-copy converted return ensure_bytes(buf) + + +class ConstantMap(Mapping[KeyType, ValueType]): + """A read-only map that maps all keys to the same constant value + + Useful if you want to call `getitems()` with the same context for all keys. + + Parameters + ---------- + keys + The keys of the map. Will be copied to a frozenset if it isn't already. + constant + The constant that all keys are mapping to. + """ + + def __init__(self, keys: Iterable[KeyType], constant: ValueType) -> None: + self._keys = keys if isinstance(keys, frozenset) else frozenset(keys) + self._constant = constant + + def __getitem__(self, key: KeyType) -> ValueType: + if key not in self._keys: + raise KeyError(repr(key)) + return self._constant + + def __iter__(self) -> Iterator[KeyType]: + return iter(self._keys) + + def __len__(self) -> int: + return len(self._keys) + + def __contains__(self, key: object) -> bool: + return key in self._keys + + def __repr__(self) -> str: + return repr({k: v for k, v in self.items()}) From 80fc1fe57062e0b6be9f2712d88373aaae1c6b18 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Thu, 13 Apr 2023 22:02:58 +0530 Subject: [PATCH 0299/1078] Update release.rst for 2.15.0 (#1378) --- docs/release.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index f056f621bf..b79d0a5456 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -15,9 +15,29 @@ Unreleased # .. warning:: # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. +.. _release_2.15.0: + +2.15.0 +------ + +Enhancements +~~~~~~~~~~~~ + * Implement more extensive fallback of getitem/setitem for orthogonal indexing. By :user:`Andreas Albert ` :issue:`1029`. +Documentation +~~~~~~~~~~~~~ + +* Add API reference for V3 Implementation in the docs. + By :user:`Sanket Verma ` :issue:`1345`. + +Bug fixes +~~~~~~~~~ + +* Fix the conda-forge error. Read :issue:`1347` for detailed info. + By :user:`Josh Moore ` :issue:`1364` and :issue:`1367`. + .. _release_2.14.2: 2.14.2 From a66f40bb7f3013bec4c4a768df82ae2e2652c720 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Thu, 13 Apr 2023 09:49:33 -0700 Subject: [PATCH 0300/1078] Add release note for #1131 --- docs/release.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index b79d0a5456..01c1a2f895 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -26,6 +26,9 @@ Enhancements * Implement more extensive fallback of getitem/setitem for orthogonal indexing. By :user:`Andreas Albert ` :issue:`1029`. +* Getitems supports ``meta_array``. + By :user: 'Mads R. B. Kristensen ' :issue:`1131`. + Documentation ~~~~~~~~~~~~~ From e1e556108e5cac6edd8a5ea511f5c15b3ac12363 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Thu, 13 Apr 2023 13:52:08 -0700 Subject: [PATCH 0301/1078] Remove `codecov (#1391) --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index cee2ca7aef..fb410762be 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -58,7 +58,7 @@ jobs: run: | conda activate zarr-env python -m pip install --upgrade pip - python -m pip install -U pip setuptools wheel codecov line_profiler + python -m pip install -U pip setuptools wheel line_profiler python -m pip install -rrequirements_dev_minimal.txt numpy${{matrix.numpy_version}} -rrequirements_dev_optional.txt pymongo redis python -m pip install . python -m pip freeze From edd8a680b51f41c831bc223ce3178bc295817fcb Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 13 Apr 2023 14:23:49 -0700 Subject: [PATCH 0302/1078] fix for readonly error in _normalize_store_arg_v3 (#1383) * fix for readonly error in _normalize_store_arg_v3 * add test * add release note --------- Co-authored-by: Ryan Abernathey --- .pre-commit-config.yaml | 2 +- docs/release.rst | 3 +++ zarr/_storage/v3.py | 10 ++++------ zarr/tests/test_storage_v3.py | 10 ++++++++++ 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cd1bc44361..a420662b5b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: rev: v2.2.2 hooks: - id: codespell - args: ["-L", "ba,ihs,kake,nd,noe,nwo,te", "-S", "fixture"] + args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo", "-S", "fixture"] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: diff --git a/docs/release.rst b/docs/release.rst index 01c1a2f895..3105e2c67f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -41,6 +41,9 @@ Bug fixes * Fix the conda-forge error. Read :issue:`1347` for detailed info. By :user:`Josh Moore ` :issue:`1364` and :issue:`1367`. +* Fix ``ReadOnlyError`` when opening V3 store via fsspec reference file system. + By :user:`Joe Hamman ` :issue:`1383`. + .. _release_2.14.2: 2.14.2 diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 5f8964fb5d..094deed02e 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -618,12 +618,10 @@ def _normalize_store_arg_v3(store: Any, storage_options=None, mode="r") -> BaseS # return N5StoreV3(store) else: store = DirectoryStoreV3(store) - # add default zarr.json metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) - return store else: store = StoreV3._ensure_store(store) - if 'zarr.json' not in store: - # add default zarr.json metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + + if 'zarr.json' not in store: + # add default zarr.json metadata + store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) return store diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index 418f7d506b..3e9c0a05f7 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -596,6 +596,16 @@ def test_normalize_store_arg_v3(tmpdir): store = normalize_store_arg(fsspec.get_mapper("file://" + path), zarr_version=3) assert isinstance(store, FSStoreV3) + # regression for https://github.com/zarr-developers/zarr-python/issues/1382 + # contents of zarr.json are not important for this test + out = {"version": 1, "refs": {"zarr.json": "{...}"}} + store = normalize_store_arg( + "reference://", + storage_options={"fo": out, "remote_protocol": "memory"}, + zarr_version=3 + ) + assert isinstance(store, FSStoreV3) + fn = tmpdir.join('store.n5') with pytest.raises(NotImplementedError): normalize_store_arg(str(fn), zarr_version=3, mode='w') From 8f11656959c920099d8a6dec5c0abf4663a862b5 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Fri, 14 Apr 2023 05:27:49 +0530 Subject: [PATCH 0303/1078] Update release.rst for 2.15.0 (#1392) --- docs/release.rst | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index 3105e2c67f..06d656fa46 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -27,7 +27,13 @@ Enhancements By :user:`Andreas Albert ` :issue:`1029`. * Getitems supports ``meta_array``. - By :user: 'Mads R. B. Kristensen ' :issue:`1131`. + By :user:`Mads R. B. Kristensen ` :issue:`1131`. + +Maintenance +~~~~~~~~~~~ + +* Remove ``codecov`` from GitHub actions. + By :user:`John A. Kirkham ` :issue:`1391`. Documentation ~~~~~~~~~~~~~ From bb35962953fdacfbcf0a0dfa43ec6fddba0433d3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Apr 2023 16:20:12 +0200 Subject: [PATCH 0304/1078] Bump actions/setup-python from 4.5.0 to 4.6.0 (#1399) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.5.0 to 4.6.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4.5.0...v4.6.0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index be7a3b19cc..97547491f8 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v4.5.0 + - uses: actions/setup-python@v4.6.0 name: Install Python with: python-version: '3.8' From 86a54d17e114542b34dae10e5eda1b594d40bf99 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Apr 2023 16:20:26 +0200 Subject: [PATCH 0305/1078] Bump pytest from 7.2.2 to 7.3.1 (#1393) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.2.2 to 7.3.1. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.2.2...7.3.1) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 1217ee620e..e4ada6385b 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.11.0 msgpack-python==0.5.6 setuptools-scm==7.1.0 # test requirements -pytest==7.2.2 +pytest==7.3.1 From a392e30e1d3ba525c28ad6d1131bae761de6f4aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 May 2023 09:24:20 +0200 Subject: [PATCH 0306/1078] Bump azure-storage-blob from 12.15.0 to 12.16.0 (#1403) Bumps [azure-storage-blob](https://github.com/Azure/azure-sdk-for-python) from 12.15.0 to 12.16.0. - [Release notes](https://github.com/Azure/azure-sdk-for-python/releases) - [Changelog](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/esrp_release.md) - [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-storage-blob_12.15.0...azure-storage-blob_12.16.0) --- updated-dependencies: - dependency-name: azure-storage-blob dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 3ee7cbe5d3..f24fb8dc40 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -7,7 +7,7 @@ ipywidgets==8.0.6 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.15.0 # pyup: ignore +azure-storage-blob==12.16.0 # pyup: ignore redis==4.5.3 types-redis types-setuptools From c12ee031767827a4d8f3c92d98476257fb2b3707 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 May 2023 09:24:33 +0200 Subject: [PATCH 0307/1078] Bump numpy from 1.24.2 to 1.24.3 (#1402) Bumps [numpy](https://github.com/numpy/numpy) from 1.24.2 to 1.24.3. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.24.2...v1.24.3) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index e094d4fcd4..a6135bd831 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.24.2 +numpy==1.24.3 From 1deee736f8be537027bbc0c867dd50952ff40979 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Tue, 2 May 2023 09:47:53 +0200 Subject: [PATCH 0308/1078] Mark for 2.15.0a1 pre-release (#1404) * Mark for 2.15.0a1 pre-release * Move warning down --- docs/release.rst | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 06d656fa46..cdf4622fe5 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -6,20 +6,19 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. -.. _unreleased: + .. _unreleased: -Unreleased ----------- - -.. - # .. warning:: - # Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. + Unreleased + ---------- .. _release_2.15.0: 2.15.0 ------ +.. warning:: + Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. + Enhancements ~~~~~~~~~~~~ From a2e864c14dbf2f7ab2ddba61f90fed8e38406b72 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 May 2023 11:01:15 +0200 Subject: [PATCH 0309/1078] Bump fsspec from 2023.3.0 to 2023.4.0 (#1387) * Bump fsspec from 2023.3.0 to 2023.4.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2023.3.0 to 2023.4.0. - [Release notes](https://github.com/fsspec/filesystem_spec/releases) - [Commits](https://github.com/fsspec/filesystem_spec/compare/2023.3.0...2023.4.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update s3fs as well --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index f24fb8dc40..306225ea99 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 h5py==3.8.0 -fsspec==2023.3.0 -s3fs==2023.3.0 +fsspec==2023.4.0 +s3fs==2023.4.0 moto[server]>=4.0.8 From d54f25c460f8835a0ec9a7b4bc3482159a5608f9 Mon Sep 17 00:00:00 2001 From: James Bourbeau Date: Wed, 3 May 2023 12:24:54 -0500 Subject: [PATCH 0310/1078] Avoid deprecated `product` in ``numpy=1.25`` (#1405) * Avoid deprecated product in numpy=1.25 * Add changelog for np.prod --------- Co-authored-by: Josh Moore --- docs/release.rst | 3 +++ zarr/tests/test_core.py | 4 ++-- zarr/util.py | 6 +++--- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index cdf4622fe5..7442e519e8 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -34,6 +34,9 @@ Maintenance * Remove ``codecov`` from GitHub actions. By :user:`John A. Kirkham ` :issue:`1391`. +* Replace ``np.product`` with ``np.prod`` due to deprecation. + By :user:`James Bourbeau ` :issue:`1405`. + Documentation ~~~~~~~~~~~~~ diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index ba89db3b06..1cac51ba0d 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -1479,7 +1479,7 @@ def test_iter(self): ) for shape, chunks in params: z = self.create_array(shape=shape, chunks=chunks, dtype=int) - a = np.arange(np.product(shape)).reshape(shape) + a = np.arange(np.prod(shape)).reshape(shape) z[:] = a for expect, actual in zip_longest(a, z): assert_array_equal(expect, actual) @@ -1500,7 +1500,7 @@ def test_islice(self): ) for shape, chunks, start, end in params: z = self.create_array(shape=shape, chunks=chunks, dtype=int) - a = np.arange(np.product(shape)).reshape(shape) + a = np.arange(np.prod(shape)).reshape(shape) z[:] = a end_array = min(end, a.shape[0]) for expect, actual in zip_longest(a[start:end_array], diff --git a/zarr/util.py b/zarr/util.py index 68a238fbe4..b661f5f6b4 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -111,7 +111,7 @@ def guess_chunks(shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: # Determine the optimal chunk size in bytes using a PyTables expression. # This is kept as a float. - dset_size = np.product(chunks)*typesize + dset_size = np.prod(chunks)*typesize target_size = CHUNK_BASE * (2**np.log10(dset_size/(1024.*1024))) if target_size > CHUNK_MAX: @@ -126,14 +126,14 @@ def guess_chunks(shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: # 1b. We're within 50% of the target chunk size, AND # 2. The chunk is smaller than the maximum chunk size - chunk_bytes = np.product(chunks)*typesize + chunk_bytes = np.prod(chunks)*typesize if (chunk_bytes < target_size or abs(chunk_bytes-target_size)/target_size < 0.5) and \ chunk_bytes < CHUNK_MAX: break - if np.product(chunks) == 1: + if np.prod(chunks) == 1: break # Element size larger than CHUNK_MAX chunks[idx % ndims] = math.ceil(chunks[idx % ndims] / 2.0) From 25e6036414070d78ea6b7186427f6336b1d89c5e Mon Sep 17 00:00:00 2001 From: Alan Du Date: Thu, 4 May 2023 12:27:24 -0400 Subject: [PATCH 0311/1078] Fix `normalize_fill_value` for structured arrays (#1397) * Add failing test case for normalize_fill_value * Fix normalize_fill_value for structured arrays * Add changelog --------- Co-authored-by: Josh Moore --- docs/release.rst | 3 +++ zarr/tests/test_util.py | 1 + zarr/util.py | 2 +- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index 7442e519e8..83588bb3d7 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -52,6 +52,9 @@ Bug fixes * Fix ``ReadOnlyError`` when opening V3 store via fsspec reference file system. By :user:`Joe Hamman ` :issue:`1383`. +* Fix ``normalize_fill_value`` for structured arrays. + By :user:`Alan Du ` :issue:`1397`. + .. _release_2.14.2: 2.14.2 diff --git a/zarr/tests/test_util.py b/zarr/tests/test_util.py index 0a717b8f28..e01aa6711a 100644 --- a/zarr/tests/test_util.py +++ b/zarr/tests/test_util.py @@ -119,6 +119,7 @@ def test_normalize_fill_value(): structured_dtype = np.dtype([('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) expect = np.array((b'', 0, 0.), dtype=structured_dtype)[()] assert expect == normalize_fill_value(0, dtype=structured_dtype) + assert expect == normalize_fill_value(expect, dtype=structured_dtype) assert '' == normalize_fill_value(0, dtype=np.dtype('U1')) diff --git a/zarr/util.py b/zarr/util.py index b661f5f6b4..6ba20b96c2 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -295,7 +295,7 @@ def normalize_fill_value(fill_value, dtype: np.dtype): if fill_value is None or dtype.hasobject: # no fill value pass - elif fill_value == 0: + elif not isinstance(fill_value, np.void) and fill_value == 0: # this should be compatible across numpy versions for any array type, including # structured arrays fill_value = np.zeros((), dtype=dtype)[()] From 88f68a5320a13e6eb744fd30478c93fc41610409 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 7 May 2023 12:42:43 +0200 Subject: [PATCH 0312/1078] Merge isinstance calls (#1409) --- zarr/indexing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zarr/indexing.py b/zarr/indexing.py index 2f8144fd08..3fb3e2f204 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -116,7 +116,7 @@ def is_pure_orthogonal_indexing(selection, ndim): sum(is_integer_list(elem) or is_integer_array(elem) for elem in selection) <= 1 and all( is_integer_list(elem) or is_integer_array(elem) - or isinstance(elem, slice) or isinstance(elem, int) for + or isinstance(elem, (int, slice)) for elem in selection) ) From b8db120c2743d2c188e0531a0f9be93de77396e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 09:28:07 +0200 Subject: [PATCH 0313/1078] Bump pypa/gh-action-pypi-publish from 1.8.3 to 1.8.6 (#1412) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.3 to 1.8.6. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.3...v1.8.6) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 97547491f8..8b1d5ccb83 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.3 + - uses: pypa/gh-action-pypi-publish@v1.8.6 with: user: __token__ password: ${{ secrets.pypi_password }} From e1e290f0efe42444845c78c47d4b4db8f6907e96 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 09:38:58 +0200 Subject: [PATCH 0314/1078] Bump lmdb from 1.4.0 to 1.4.1 (#1384) Bumps [lmdb](https://github.com/jnwatson/py-lmdb) from 1.4.0 to 1.4.1. - [Release notes](https://github.com/jnwatson/py-lmdb/releases) - [Changelog](https://github.com/jnwatson/py-lmdb/blob/master/ChangeLog) - [Commits](https://github.com/jnwatson/py-lmdb/compare/py-lmdb_1.4.0...py-lmdb_1.4.1) --- updated-dependencies: - dependency-name: lmdb dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 306225ea99..a6a79a499a 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -1,6 +1,6 @@ # optional library requirements # bsddb3==6.2.6; sys_platform != 'win32' -lmdb==1.4.0; sys_platform != 'win32' +lmdb==1.4.1; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 ipywidgets==8.0.6 From 43fdc3dde64972c1a08b3d6d5b71433a05a7a5c2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 09:39:39 +0200 Subject: [PATCH 0315/1078] chore: update pre-commit hooks (#1366) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/codespell-project/codespell: v2.2.2 → v2.2.4](https://github.com/codespell-project/codespell/compare/v2.2.2...v2.2.4) - [github.com/pre-commit/mirrors-mypy: v1.0.1 → v1.2.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.0.1...v1.2.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a420662b5b..e090ddd6d8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,7 +15,7 @@ repos: exclude: ^(venv/|docs/) types: ['python'] - repo: https://github.com/codespell-project/codespell - rev: v2.2.2 + rev: v2.2.4 hooks: - id: codespell args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo", "-S", "fixture"] @@ -24,7 +24,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.0.1 + rev: v1.2.0 hooks: - id: mypy files: zarr From 90da1f35526083d6d605511a75a85cd150bd8afd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 11:30:37 +0200 Subject: [PATCH 0316/1078] Bump redis from 4.5.3 to 4.5.4 (#1380) Bumps [redis](https://github.com/redis/redis-py) from 4.5.3 to 4.5.4. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.5.3...v4.5.4) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index a6a79a499a..91db666cae 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.0.6 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.16.0 # pyup: ignore -redis==4.5.3 +redis==4.5.4 types-redis types-setuptools pymongo==4.3.3 From e2ab17f03e2d4e5ceb1a0ba32a62019a4fb55721 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 22:00:41 +0200 Subject: [PATCH 0317/1078] Bump fsspec from 2023.4.0 to 2023.5.0 (#1411) * Bump fsspec from 2023.4.0 to 2023.5.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2023.4.0 to 2023.5.0. - [Commits](https://github.com/fsspec/filesystem_spec/compare/2023.4.0...2023.5.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bumping s3fs as well --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 91db666cae..f5125e0c3f 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ pytest-cov==4.0.0 pytest-doctestplus==0.12.1 pytest-timeout==2.1.0 h5py==3.8.0 -fsspec==2023.4.0 -s3fs==2023.4.0 +fsspec==2023.5.0 +s3fs==2023.5.0 moto[server]>=4.0.8 From 0891bf9c8049631000dab5934be77598c41c31f1 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 11 May 2023 03:21:09 -0400 Subject: [PATCH 0318/1078] test(ci): Run tests against python 3.11 (#1415) * test(ci): Run tests against python 3.11 * exclude numpy 1.20 for 3.11 * update release notes --- .github/workflows/python-package.yml | 4 +++- .github/workflows/windows-testing.yml | 2 +- docs/release.rst | 3 +++ pyproject.toml | 1 + 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index fb410762be..f8fe9ab379 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,11 +15,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.9', '3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11'] numpy_version: ['>=1.22.0', '==1.20.*'] exclude: - python-version: '3.10' numpy_version: '==1.20.*' + - python-version: '3.11' + numpy_version: '==1.20.*' services: redis: image: redis diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 2f8922b447..b17eece058 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -16,7 +16,7 @@ jobs: strategy: fail-fast: True matrix: - python-version: ['3.8', '3.9', '3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - uses: actions/checkout@v3 with: diff --git a/docs/release.rst b/docs/release.rst index 83588bb3d7..f6e6e614ae 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -37,6 +37,9 @@ Maintenance * Replace ``np.product`` with ``np.prod`` due to deprecation. By :user:`James Bourbeau ` :issue:`1405`. +* Activate Py 3.11 builds. + By :user:`Joe Hamman ` :issue:`1415`. + Documentation ~~~~~~~~~~~~~ diff --git a/pyproject.toml b/pyproject.toml index 3277e9da7c..4beb357bb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ classifiers = [ 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', ] license = { text = "MIT" } From 4fe109175845b4bbcb243583fc63e2548a53033e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 17:03:36 +0200 Subject: [PATCH 0319/1078] Bump redis from 4.5.4 to 4.5.5 (#1413) Bumps [redis](https://github.com/redis/redis-py) from 4.5.4 to 4.5.5. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.5.4...v4.5.5) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index f5125e0c3f..5c4b6ac266 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.0.6 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.16.0 # pyup: ignore -redis==4.5.4 +redis==4.5.5 types-redis types-setuptools pymongo==4.3.3 From cc16d8c8cbafc2dd736f11b9c1ad28a58529e40d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 16 May 2023 13:10:10 +0200 Subject: [PATCH 0320/1078] chore: update pre-commit hooks (#1416) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.2.0 → v1.3.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.2.0...v1.3.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e090ddd6d8..194e8b1d5f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,7 +24,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.2.0 + rev: v1.3.0 hooks: - id: mypy files: zarr From 3649b9b23d87cfd7141045bdf8684ecc1cdfbd90 Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Thu, 25 May 2023 21:23:49 +0200 Subject: [PATCH 0321/1078] `open_array()`: adding the `meta_array` argument (#1396) * open_array(): adding the meta_array argument * updated release.txt * doc: fixed versionadded --- docs/release.rst | 5 ++++- zarr/creation.py | 8 +++++++- zarr/tests/test_meta_array.py | 19 ++++++++++++++++++- 3 files changed, 29 insertions(+), 3 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index f6e6e614ae..833bfbf7ba 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -28,6 +28,9 @@ Enhancements * Getitems supports ``meta_array``. By :user:`Mads R. B. Kristensen ` :issue:`1131`. +* ``open_array()`` now takes the ``meta_array`` argument. + By :user:`Mads R. B. Kristensen ` :issue:`1396`. + Maintenance ~~~~~~~~~~~ @@ -176,7 +179,7 @@ Bug fixes Documentation ~~~~~~~~~~~~~ -* Fix minor indexing errors in tutorial and specification examples of documentation. +* Fix minor indexing errors in tutorial and specification examples of documentation. By :user:`Kola Babalola ` :issue:`1277`. * Add `requirements_rtfd.txt` in `contributing.rst`. diff --git a/zarr/creation.py b/zarr/creation.py index a6fa8e44cc..dc8b8a157d 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -424,6 +424,7 @@ def open_array( *, zarr_version=None, dimension_separator=None, + meta_array=None, **kwargs ): """Open an array using file-mode-like semantics. @@ -498,6 +499,11 @@ def open_array( ('/') format. If None, the appropriate value will be read from `store` when present. Otherwise, defaults to '.' when ``zarr_version == 2`` and `/` otherwise. + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + + .. versionadded:: 2.15 Returns ------- @@ -607,7 +613,7 @@ def open_array( # instantiate array z = Array(store, read_only=read_only, synchronizer=synchronizer, cache_metadata=cache_metadata, cache_attrs=cache_attrs, path=path, - chunk_store=chunk_store, write_empty_chunks=write_empty_chunks) + chunk_store=chunk_store, write_empty_chunks=write_empty_chunks, meta_array=meta_array) return z diff --git a/zarr/tests/test_meta_array.py b/zarr/tests/test_meta_array.py index 6172af3be9..5ff6fae3f3 100644 --- a/zarr/tests/test_meta_array.py +++ b/zarr/tests/test_meta_array.py @@ -8,7 +8,7 @@ import zarr.codecs from zarr.core import Array -from zarr.creation import array, empty, full, ones, zeros +from zarr.creation import array, empty, full, ones, open_array, zeros from zarr.hierarchy import open_group from zarr.storage import DirectoryStore, MemoryStore, Store, ZipStore @@ -148,6 +148,23 @@ def test_array(tmp_path, module, compressor, store_type): assert z.dtype == z2.dtype xp.testing.assert_array_equal(z[:], z2[:]) + store = init_store(tmp_path / "open_array", store_type) + a = xp.arange(100) + z = open_array( + store, + shape=a.shape, + dtype=a.dtype, + chunks=10, + compressor=compressor, + meta_array=xp.empty(()) + ) + z[:] = a + assert a.shape == z.shape + assert a.dtype == z.dtype + assert isinstance(a, type(z[:])) + assert isinstance(z.meta_array, type(xp.empty(()))) + xp.testing.assert_array_equal(a, z[:]) + @pytest.mark.parametrize("module, compressor", param_module_and_compressor) def test_empty(module, compressor): From 4132f360616a4c8bfa3dd4e979a4793c5d84cdfc Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 29 May 2023 09:18:08 +0200 Subject: [PATCH 0322/1078] Fix typo introduced by dcce26e (#1420) --- docs/release.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index 833bfbf7ba..2f64454c97 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -293,7 +293,7 @@ Maintenance * Updated GitHub actions. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1134`. -* Uopdate web links: `http:// → https://`. +* Update web links: `http:// → https://`. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1313`. .. _release_2.13.3: From 5d7e287d23145b1f0f355b14e9111aa81c79cd22 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Wed, 14 Jun 2023 16:40:02 +0200 Subject: [PATCH 0323/1078] 2.15.0: Remove pre-release warning (#1419) --- docs/release.rst | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 2f64454c97..67f33a8770 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -1,6 +1,13 @@ Release notes ============= +.. + # Copy the warning statement _under_ the latest release version + # and unindent for pre-releases. + + .. warning:: + Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. + .. # Unindent the section between releases in order # to document your changes. On releases it will be @@ -16,9 +23,6 @@ Release notes 2.15.0 ------ -.. warning:: - Pre-release! Use :command:`pip install --pre zarr` to evaluate this release. - Enhancements ~~~~~~~~~~~~ From 77e985e888c17c2b52d11fba330a735b48e4733e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Jun 2023 08:45:19 +0200 Subject: [PATCH 0324/1078] Bump pytest-doctestplus from 0.12.1 to 0.13.0 (#1429) Bumps [pytest-doctestplus](https://github.com/astropy/pytest-doctestplus) from 0.12.1 to 0.13.0. - [Release notes](https://github.com/astropy/pytest-doctestplus/releases) - [Changelog](https://github.com/scientific-python/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/astropy/pytest-doctestplus/compare/v0.12.1...v0.13.0) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 5c4b6ac266..bb2fe63a86 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -15,7 +15,7 @@ pymongo==4.3.3 # optional test requirements coverage pytest-cov==4.0.0 -pytest-doctestplus==0.12.1 +pytest-doctestplus==0.13.0 pytest-timeout==2.1.0 h5py==3.8.0 fsspec==2023.5.0 From 9081cc7f8e0b4f051a32b80999c7d011816790bf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Jun 2023 08:45:31 +0200 Subject: [PATCH 0325/1078] Bump pytest from 7.3.1 to 7.3.2 (#1432) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.3.1 to 7.3.2. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.3.1...7.3.2) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index e4ada6385b..df1ca11677 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.11.0 msgpack-python==0.5.6 setuptools-scm==7.1.0 # test requirements -pytest==7.3.1 +pytest==7.3.2 From 139e1c465ba159b4fb41f793c49b6d5b02bb241d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Jun 2023 10:54:15 +0200 Subject: [PATCH 0326/1078] Bump fsspec from 2023.5.0 to 2023.6.0 (#1433) * Bump fsspec from 2023.5.0 to 2023.6.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2023.5.0 to 2023.6.0. - [Commits](https://github.com/fsspec/filesystem_spec/compare/2023.5.0...2023.6.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Also change s3fs --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index bb2fe63a86..2c909177dc 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ pytest-cov==4.0.0 pytest-doctestplus==0.13.0 pytest-timeout==2.1.0 h5py==3.8.0 -fsspec==2023.5.0 -s3fs==2023.5.0 +fsspec==2023.6.0 +s3fs==2023.6.0 moto[server]>=4.0.8 From 67f25efc93306bf760807abde9f5e185c59b5219 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jun 2023 17:53:52 +0200 Subject: [PATCH 0327/1078] Bump h5py from 3.8.0 to 3.9.0 (#1440) Bumps [h5py](https://github.com/h5py/h5py) from 3.8.0 to 3.9.0. - [Release notes](https://github.com/h5py/h5py/releases) - [Changelog](https://github.com/h5py/h5py/blob/master/docs/release_guide.rst) - [Commits](https://github.com/h5py/h5py/compare/3.8.0...3.9.0) --- updated-dependencies: - dependency-name: h5py dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 2c909177dc..60c2fffaac 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -17,7 +17,7 @@ coverage pytest-cov==4.0.0 pytest-doctestplus==0.13.0 pytest-timeout==2.1.0 -h5py==3.8.0 +h5py==3.9.0 fsspec==2023.6.0 s3fs==2023.6.0 moto[server]>=4.0.8 From b3cafa96b88c362e7843df8e2a07499588b7fa49 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 21 Jun 2023 17:54:04 +0200 Subject: [PATCH 0328/1078] chore: update pre-commit hooks (#1438) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/codespell-project/codespell: v2.2.4 → v2.2.5](https://github.com/codespell-project/codespell/compare/v2.2.4...v2.2.5) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 194e8b1d5f..583a2b0184 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,7 +15,7 @@ repos: exclude: ^(venv/|docs/) types: ['python'] - repo: https://github.com/codespell-project/codespell - rev: v2.2.4 + rev: v2.2.5 hooks: - id: codespell args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo", "-S", "fixture"] From 2713a89c13107a83843a76d5ead7f53351f8f973 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 25 Jun 2023 16:37:48 +0200 Subject: [PATCH 0329/1078] Bump pymongo from 4.3.3 to 4.4.0 (#1441) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.3.3 to 4.4.0. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.3.3...4.4.0) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 60c2fffaac..29bb9b9905 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -11,7 +11,7 @@ azure-storage-blob==12.16.0 # pyup: ignore redis==4.5.5 types-redis types-setuptools -pymongo==4.3.3 +pymongo==4.4.0 # optional test requirements coverage pytest-cov==4.0.0 From 327b6942284283d0435d2513514f4712c71376b4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 25 Jun 2023 16:38:01 +0200 Subject: [PATCH 0330/1078] Bump pytest-cov from 4.0.0 to 4.1.0 (#1418) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.0.0 to 4.1.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v4.0.0...v4.1.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 29bb9b9905..8e7b5c94ab 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -14,7 +14,7 @@ types-setuptools pymongo==4.4.0 # optional test requirements coverage -pytest-cov==4.0.0 +pytest-cov==4.1.0 pytest-doctestplus==0.13.0 pytest-timeout==2.1.0 h5py==3.9.0 From 2169c4345d65f0fcbe1ae190212110d1d67ec8ab Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 28 Jun 2023 06:01:59 -0700 Subject: [PATCH 0331/1078] test: replace pkg_resources with packaging.version for version parsing/comparison (#1450) --- zarr/tests/test_core.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 1cac51ba0d..1541943d22 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -8,6 +8,7 @@ from tempfile import mkdtemp import numpy as np +import packaging.version import pytest from numcodecs import (BZ2, JSON, LZ4, Blosc, Categorize, Delta, FixedScaleOffset, GZip, MsgPack, Pickle, VLenArray, @@ -15,7 +16,6 @@ from numcodecs.compat import ensure_bytes, ensure_ndarray from numcodecs.tests.common import greetings from numpy.testing import assert_array_almost_equal, assert_array_equal -from pkg_resources import parse_version import zarr from zarr._storage.store import ( @@ -1389,7 +1389,7 @@ def test_object_codec_warnings(self): z = self.create_array(shape=10, chunks=5, dtype="i4", object_codec=JSON()) z.store.close() - @unittest.skipIf(parse_version(np.__version__) < parse_version('1.14.0'), + @unittest.skipIf(packaging.version.parse(np.__version__) < packaging.version.parse('1.14.0'), "unsupported numpy version") def test_structured_array_contain_object(self): From cc2bd4122988b6bc58676df9a717834c09926ffc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 09:21:49 +0200 Subject: [PATCH 0332/1078] Bump pypa/gh-action-pypi-publish from 1.8.6 to 1.8.7 (#1451) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.6 to 1.8.7. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.6...v1.8.7) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 8b1d5ccb83..a00096bb18 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.6 + - uses: pypa/gh-action-pypi-publish@v1.8.7 with: user: __token__ password: ${{ secrets.pypi_password }} From 8c98f4518ea20251c8ef3258276860a3cbef9eeb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 17:04:57 +0200 Subject: [PATCH 0333/1078] Bump ipywidgets from 8.0.6 to 8.0.7 (#1452) --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 8e7b5c94ab..0398d8f494 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -3,7 +3,7 @@ lmdb==1.4.1; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 -ipywidgets==8.0.6 +ipywidgets==8.0.7 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) From ac897822bdf1ec95fe0c34f621ece67f02e07e1e Mon Sep 17 00:00:00 2001 From: Christoph Gohlke Date: Mon, 10 Jul 2023 15:03:47 -0700 Subject: [PATCH 0334/1078] Add __contains__ method to KVStore (#1454) * Add __contains__ method to KVStore * Update release notes --- docs/release.rst | 11 ++++++++--- zarr/storage.py | 3 +++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 67f33a8770..156ba7229c 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -13,10 +13,15 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. - .. _unreleased: +.. _unreleased: - Unreleased - ---------- +Unreleased +---------- + +Bug fixes +~~~~~~~~~ + +* Add ``__contains__`` method to ``KVStore``. By :user:`Christoph Gohlke ` :issue:`1454`. .. _release_2.15.0: diff --git a/zarr/storage.py b/zarr/storage.py index e6c3f62faf..ef1bd64955 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -744,6 +744,9 @@ def __setitem__(self, key, value): def __delitem__(self, key): del self._mutable_mapping[key] + def __contains__(self, key): + return key in self._mutable_mapping + def get(self, key, default=None): return self._mutable_mapping.get(key, default) From 98f74d5e362eff88d1022749e82d7bed9d62b7a2 Mon Sep 17 00:00:00 2001 From: Altay Sansal Date: Mon, 10 Jul 2023 17:05:55 -0500 Subject: [PATCH 0335/1078] Support Block (Chunk) Indexing (#1428) * add .venv to `.gitignore` * add block indexing capabilities * add release notes * fix docstrings * update tutorial * fix missing codecov hit for read-only arrays * add block selection to array tests * lint * move release notes to unreleased section * update block indexing "as of" to 2.16 --------- Co-authored-by: Altay Sansal --- .gitignore | 1 + docs/api/core.rst | 2 + docs/release.rst | 3 + docs/tutorial.rst | 78 ++++++++++++++ zarr/core.py | 210 ++++++++++++++++++++++++++++++++++-- zarr/indexing.py | 88 +++++++++++++++ zarr/tests/test_core.py | 9 +- zarr/tests/test_indexing.py | 180 ++++++++++++++++++++++++++++++- 8 files changed, 558 insertions(+), 13 deletions(-) diff --git a/.gitignore b/.gitignore index 28e5544286..4f0d523785 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ __pycache__/ # Distribution / packaging .Python env/ +.venv/ build/ develop-eggs/ dist/ diff --git a/docs/api/core.rst b/docs/api/core.rst index 5789fb996b..c4075fdb30 100644 --- a/docs/api/core.rst +++ b/docs/api/core.rst @@ -10,6 +10,8 @@ The Array class (``zarr.core``) .. automethod:: set_basic_selection .. automethod:: get_mask_selection .. automethod:: set_mask_selection + .. automethod:: get_block_selection + .. automethod:: set_block_selection .. automethod:: get_coordinate_selection .. automethod:: set_coordinate_selection .. automethod:: get_orthogonal_selection diff --git a/docs/release.rst b/docs/release.rst index 156ba7229c..e8d1c440d1 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -23,6 +23,9 @@ Bug fixes * Add ``__contains__`` method to ``KVStore``. By :user:`Christoph Gohlke ` :issue:`1454`. + * **Block Indexing**: Implemented blockwise (chunk blocks) indexing to ``zarr.Array``. + By :user:`Altay Sansal ` :issue:`1428` + .. _release_2.15.0: 2.15.0 diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 0f2e1c7345..e3155acfae 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -641,6 +641,84 @@ orthogonal indexing is also available directly on the array: >>> all(z.oindex[[0, 2], :] == z[[0, 2], :]) True +Block Indexing +~~~~~~~~~~~~~~ + +As of version 2.16.0, Zarr also support block indexing, which allows +selections of whole chunks based on their logical indices along each dimension +of an array. For example, this allows selecting a subset of chunk aligned rows and/or +columns from a 2-dimensional array. E.g.:: + + >>> import zarr + >>> import numpy as np + >>> z = zarr.array(np.arange(100).reshape(10, 10), chunks=(3, 3)) + +Retrieve items by specifying their block coordinates:: + + >>> z.get_block_selection(1) + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + +Equivalent slicing:: + + >>> z[3:6] + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + + +For convenience, the block selection functionality is also available via the +`blocks` property, e.g.:: + + >>> z.blocks[1] + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + +Block index arrays may be multidimensional to index multidimensional arrays. +For example:: + + >>> z.blocks[0, 1:3] + array([[ 3, 4, 5, 6, 7, 8], + [13, 14, 15, 16, 17, 18], + [23, 24, 25, 26, 27, 28]]) + +Data can also be modified. Let's start by a simple 2D array:: + + >>> import zarr + >>> import numpy as np + >>> z = zarr.zeros((6, 6), dtype=int, chunks=2) + +Set data for a selection of items:: + + >>> z.set_block_selection((1, 0), 1) + >>> z[...] + array([[0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0]]) + +For convenience, this functionality is also available via the ``blocks`` property. +E.g.:: + + >>> z.blocks[:, 2] = 7 + >>> z[...] + array([[0, 0, 0, 0, 7, 7], + [0, 0, 0, 0, 7, 7], + [1, 1, 0, 0, 7, 7], + [1, 1, 0, 0, 7, 7], + [0, 0, 0, 0, 7, 7], + [0, 0, 0, 0, 7, 7]]) + +Any combination of integer and slice can be used for block indexing:: + + >>> z.blocks[2, 1:3] + array([[0, 0, 7, 7], + [0, 0, 7, 7]]) + Indexing fields in structured arrays ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/zarr/core.py b/zarr/core.py index 5537733b4b..80f424bafc 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -22,6 +22,8 @@ OIndex, OrthogonalIndexer, VIndex, + BlockIndex, + BlockIndexer, PartialChunkIterator, check_fields, check_no_multi_fields, @@ -139,6 +141,7 @@ class Array: info vindex oindex + blocks write_empty_chunks meta_array @@ -154,6 +157,8 @@ class Array: set_mask_selection get_coordinate_selection set_coordinate_selection + get_block_selection + set_block_selection digest hexdigest resize @@ -230,6 +235,7 @@ def __init__( # initialize indexing helpers self._oindex = OIndex(self) self._vindex = VIndex(self) + self._blocks = BlockIndex(self) def _load_metadata(self): """(Re)load metadata from store.""" @@ -577,6 +583,12 @@ def vindex(self): :func:`set_mask_selection` for documentation and examples.""" return self._vindex + @property + def blocks(self): + """Shortcut for blocked chunked indexing, see :func:`get_block_selection` and + :func:`set_block_selection` for documentation and examples.""" + return self._blocks + @property def write_empty_chunks(self) -> bool: """A Boolean, True if chunks composed of the array's fill value @@ -814,7 +826,8 @@ def __getitem__(self, selection): -------- get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, - set_orthogonal_selection, vindex, oindex, __setitem__ + set_orthogonal_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __setitem__ """ fields, pure_selection = pop_fields(selection) @@ -933,7 +946,8 @@ def get_basic_selection(self, selection=Ellipsis, out=None, fields=None): -------- set_basic_selection, get_mask_selection, set_mask_selection, get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, - set_orthogonal_selection, vindex, oindex, __getitem__, __setitem__ + set_orthogonal_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ """ @@ -1089,7 +1103,8 @@ def get_orthogonal_selection(self, selection, out=None, fields=None): -------- get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, get_coordinate_selection, set_coordinate_selection, set_orthogonal_selection, - vindex, oindex, __getitem__, __setitem__ + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ """ @@ -1160,7 +1175,8 @@ def get_coordinate_selection(self, selection, out=None, fields=None): -------- get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, get_orthogonal_selection, set_orthogonal_selection, set_coordinate_selection, - vindex, oindex, __getitem__, __setitem__ + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ """ @@ -1185,6 +1201,90 @@ def get_coordinate_selection(self, selection, out=None, fields=None): return out + def get_block_selection(self, selection, out=None, fields=None): + """Retrieve a selection of individual chunk blocks, by providing the indices + (coordinates) for each chunk block. + + Parameters + ---------- + selection : tuple + An integer (coordinate) or slice for each dimension of the array. + out : ndarray, optional + If given, load the selected data directly into this array. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to + extract data for. + + Returns + ------- + out : ndarray + A NumPy array containing the data for the requested selection. + + Examples + -------- + Setup a 2-dimensional array:: + + >>> import zarr + >>> import numpy as np + >>> z = zarr.array(np.arange(100).reshape(10, 10), chunks=(3, 3)) + + Retrieve items by specifying their block coordinates:: + + >>> z.get_block_selection((1, slice(None))) + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + + Which is equivalent to:: + + >>> z[3:6, :] + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + + For convenience, the block selection functionality is also available via the + `blocks` property, e.g.:: + + >>> z.blocks[1] + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + + Notes + ----- + Block indexing is a convenience indexing method to work on individual chunks + with chunk index slicing. It has the same concept as Dask's `Array.blocks` + indexing. + + Slices are supported. However, only with a step size of one. + + Block index arrays may be multidimensional to index multidimensional arrays. + For example:: + + >>> z.blocks[0, 1:3] + array([[ 3, 4, 5, 6, 7, 8], + [13, 14, 15, 16, 17, 18], + [23, 24, 25, 26, 27, 28]]) + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, + set_coordinate_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ + if not self._cache_metadata: + self._load_metadata() + + # check args + check_fields(fields, self._dtype) + + # setup indexer + indexer = BlockIndexer(selection, self) + + return self._get_selection(indexer=indexer, out=out, fields=fields) + def get_mask_selection(self, selection, out=None, fields=None): """Retrieve a selection of individual items, by providing a Boolean array of the same shape as the array against which the selection is being made, where True @@ -1238,8 +1338,8 @@ def get_mask_selection(self, selection, out=None, fields=None): -------- get_basic_selection, set_basic_selection, set_mask_selection, get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, - set_coordinate_selection, vindex, oindex, __getitem__, __setitem__ - + set_coordinate_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ """ # refresh metadata @@ -1376,7 +1476,8 @@ def __setitem__(self, selection, value): -------- get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, - set_orthogonal_selection, vindex, oindex, __getitem__ + set_orthogonal_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__ """ fields, pure_selection = pop_fields(selection) @@ -1464,7 +1565,8 @@ def set_basic_selection(self, selection, value, fields=None): -------- get_basic_selection, get_mask_selection, set_mask_selection, get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, - set_orthogonal_selection, vindex, oindex, __getitem__, __setitem__ + set_orthogonal_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ """ @@ -1555,7 +1657,8 @@ def set_orthogonal_selection(self, selection, value, fields=None): -------- get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, - vindex, oindex, __getitem__, __setitem__ + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ """ @@ -1627,7 +1730,8 @@ def set_coordinate_selection(self, selection, value, fields=None): -------- get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, - vindex, oindex, __getitem__, __setitem__ + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ """ @@ -1654,6 +1758,89 @@ def set_coordinate_selection(self, selection, value, fields=None): self._set_selection(indexer, value, fields=fields) + def set_block_selection(self, selection, value, fields=None): + """Modify a selection of individual blocks, by providing the chunk indices + (coordinates) for each block to be modified. + + Parameters + ---------- + selection : tuple + An integer (coordinate) or slice for each dimension of the array. + value : scalar or array-like + Value to be stored into the array. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to set + data for. + + Examples + -------- + Set up a 2-dimensional array:: + + >>> import zarr + >>> import numpy as np + >>> z = zarr.zeros((6, 6), dtype=int, chunks=2) + + Set data for a selection of items:: + + >>> z.set_block_selection((1, 0), 1) + >>> z[...] + array([[0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0]]) + + For convenience, this functionality is also available via the `blocks` property. + E.g.:: + + >>> z.blocks[2, 1] = 4 + >>> z[...] + array([[0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [0, 0, 4, 4, 0, 0], + [0, 0, 4, 4, 0, 0]]) + + >>> z.blocks[:, 2] = 7 + >>> z[...] + array([[0, 0, 0, 0, 7, 7], + [0, 0, 0, 0, 7, 7], + [1, 1, 0, 0, 7, 7], + [1, 1, 0, 0, 7, 7], + [0, 0, 4, 4, 7, 7], + [0, 0, 4, 4, 7, 7]]) + + Notes + ----- + Block indexing is a convenience indexing method to work on individual chunks + with chunk index slicing. It has the same concept as Dask's `Array.blocks` + indexing. + + Slices are supported. However, only with a step size of one. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ + # guard conditions + if self._read_only: + raise ReadOnlyError() + + # refresh metadata + if not self._cache_metadata: + self._load_metadata_nosync() + + # setup indexer + indexer = BlockIndexer(selection, self) + + self._set_selection(indexer, value, fields=fields) + def set_mask_selection(self, selection, value, fields=None): """Modify a selection of individual items, by providing a Boolean array of the same shape as the array against which the selection is being made, where True @@ -1712,7 +1899,8 @@ def set_mask_selection(self, selection, value, fields=None): -------- get_basic_selection, set_basic_selection, get_mask_selection, get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, - set_coordinate_selection, vindex, oindex, __getitem__, __setitem__ + set_coordinate_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ """ diff --git a/zarr/indexing.py b/zarr/indexing.py index 3fb3e2f204..bc2afba992 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -695,6 +695,94 @@ def __setitem__(self, selection, value): return self.array.set_orthogonal_selection(selection, value, fields=fields) +# noinspection PyProtectedMember +class BlockIndexer: + + def __init__(self, selection, array): + + # handle ellipsis + selection = replace_ellipsis(selection, array._shape) + + # normalize list to array + selection = replace_lists(selection) + + # setup per-dimension indexers + dim_indexers = [] + for dim_sel, dim_len, dim_chunk_size in \ + zip(selection, array._shape, array._chunks): + dim_numchunks = int(np.ceil(dim_len / dim_chunk_size)) + + if is_integer(dim_sel): + if dim_sel < 0: + dim_sel = dim_numchunks + dim_sel + + start = dim_sel * dim_chunk_size + stop = start + dim_chunk_size + slice_ = slice(start, stop) + + elif is_slice(dim_sel): + start = dim_sel.start if dim_sel.start is not None else 0 + stop = dim_sel.stop if dim_sel.stop is not None else dim_numchunks + + if dim_sel.step not in {1, None}: + raise IndexError('unsupported selection item for block indexing; ' + 'expected integer or slice with step=1, got {!r}' + .format(type(dim_sel))) + + # Can't reuse wraparound_indices because it expects a numpy array + # We have integers here. + if start < 0: + start = dim_numchunks + start + if stop < 0: + stop = dim_numchunks + stop + + start = start * dim_chunk_size + stop = stop * dim_chunk_size + slice_ = slice(start, stop) + + else: + raise IndexError('unsupported selection item for block indexing; ' + 'expected integer or slice, got {!r}' + .format(type(dim_sel))) + + dim_indexer = SliceDimIndexer(slice_, dim_len, dim_chunk_size) + dim_indexers.append(dim_indexer) + + if start >= dim_len or start < 0: + raise BoundsCheckError(dim_len) + + self.dim_indexers = dim_indexers + self.shape = tuple(s.nitems for s in self.dim_indexers) + self.drop_axes = None + + def __iter__(self): + for dim_projections in itertools.product(*self.dim_indexers): + chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) + chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) + out_selection = tuple(p.dim_out_sel for p in dim_projections + if p.dim_out_sel is not None) + + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +class BlockIndex: + + def __init__(self, array): + self.array = array + + def __getitem__(self, selection): + fields, selection = pop_fields(selection) + selection = ensure_tuple(selection) + selection = replace_lists(selection) + return self.array.get_block_selection(selection, fields=fields) + + def __setitem__(self, selection, value): + fields, selection = pop_fields(selection) + selection = ensure_tuple(selection) + selection = replace_lists(selection) + return self.array.set_block_selection(selection, value, fields=fields) + + # noinspection PyProtectedMember def is_coordinate_selection(selection, array): return ( diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 1541943d22..ab1a6e8aa7 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -338,6 +338,8 @@ def test_array_1d_selections(self): assert_array_equal(a[bix], z.get_mask_selection(bix)) assert_array_equal(a[bix], z.oindex[bix]) assert_array_equal(a[bix], z.vindex[bix]) + assert_array_equal(a[200:400], z.get_block_selection(slice(2, 4))) + assert_array_equal(a[200:400], z.blocks[2:4]) # set z.set_orthogonal_selection(slice(50, 150), 1) @@ -358,7 +360,10 @@ def test_array_1d_selections(self): assert_array_equal(8, z.vindex[bix]) z.oindex[bix] = 9 assert_array_equal(9, z.oindex[bix]) - + z.set_block_selection(slice(2, 4), 10) + assert_array_equal(10, z[200:400]) + z.blocks[2:4] = 11 + assert_array_equal(11, z[200:400]) z.store.close() # noinspection PyStatementEffect @@ -810,6 +815,8 @@ def test_read_only(self): z.set_coordinate_selection([0, 1, 2], 42) with pytest.raises(PermissionError): z.vindex[[0, 1, 2]] = 42 + with pytest.raises(PermissionError): + z.blocks[...] = 42 with pytest.raises(PermissionError): z.set_mask_selection(np.ones(z.shape, dtype=bool), 42) diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index f5f57be010..61e76c63da 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -1096,7 +1096,6 @@ def _test_set_coordinate_selection(v, a, z, selection): def test_set_coordinate_selection_1d(): - # setup v = np.arange(1050, dtype=int) a = np.empty(v.shape, dtype=v.dtype) @@ -1154,6 +1153,185 @@ def test_set_coordinate_selection_2d(): _test_set_coordinate_selection(v, a, z, (ix0, ix1)) +def _test_get_block_selection(a, z, selection, expected_idx): + expect = a[expected_idx] + actual = z.get_block_selection(selection) + assert_array_equal(expect, actual) + actual = z.blocks[selection] + assert_array_equal(expect, actual) + + +block_selections_1d = [ + # test single item + 0, + 5, + # test wraparound + -1, + -4, + # test slice + slice(5), + slice(None, 3), + slice(5, 6), + slice(-3, -1), + slice(None), # Full slice +] + +block_selections_1d_array_projection = [ + # test single item + slice(100), + slice(500, 600), + # test wraparound + slice(1000, None), + slice(700, 800), + # test slice + slice(500), + slice(None, 300), + slice(500, 600), + slice(800, 1000), + slice(None), +] + +block_selections_1d_bad = [ + # slice not supported + slice(3, 8, 2), + # bad stuff + 2.3, + 'foo', + b'xxx', + None, + (0, 0), + (slice(None), slice(None)), + [0, 5, 3] +] + + +def test_get_block_selection_1d(): + # setup + a = np.arange(1050, dtype=int) + z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) + z[:] = a + + for selection, expected_idx in \ + zip(block_selections_1d, block_selections_1d_array_projection): + _test_get_block_selection(a, z, selection, expected_idx) + + bad_selections = block_selections_1d_bad + [ + z.nchunks + 1, # out of bounds + -(z.nchunks + 1), # out of bounds + ] + + for selection in bad_selections: + with pytest.raises(IndexError): + z.get_block_selection(selection) + with pytest.raises(IndexError): + z.blocks[selection] + + +block_selections_2d = [ + # test single item + (0, 0), + (1, 2), + # test wraparound + (-1, -1), + (-3, -2), + # test slice + (slice(1), slice(2)), + (slice(None, 2), slice(-2, -1)), + (slice(2, 3), slice(-2, None)), + (slice(-3, -1), slice(-3, -2)), + (slice(None), slice(None)), # Full slice +] + +block_selections_2d_array_projection = [ + # test single item + (slice(300), slice(3)), + (slice(300, 600), slice(6, 9)), + # test wraparound + (slice(900, None), slice(9, None)), + (slice(300, 600), slice(6, 9)), + # test slice + (slice(300), slice(6)), + (slice(None, 600), slice(6, 9)), + (slice(600, 900), slice(6, None)), + (slice(300, 900), slice(3, 6)), + (slice(None), slice(None)), # Full slice +] + + +def test_get_block_selection_2d(): + # setup + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) + z[:] = a + + for selection, expected_idx in \ + zip(block_selections_2d, block_selections_2d_array_projection): + _test_get_block_selection(a, z, selection, expected_idx) + + with pytest.raises(IndexError): + selection = slice(5, 15), [1, 2, 3] + z.get_block_selection(selection) + with pytest.raises(IndexError): + selection = Ellipsis, [1, 2, 3] + z.get_block_selection(selection) + with pytest.raises(IndexError): # out of bounds + selection = slice(15, 20), slice(None) + z.get_block_selection(selection) + + +def _test_set_block_selection(v: np.ndarray, a: np.ndarray, z: zarr.Array, selection, expected_idx): + for value in 42, v[expected_idx], v[expected_idx].tolist(): + # setup expectation + a[:] = 0 + a[expected_idx] = value + # test long-form API + z[:] = 0 + z.set_block_selection(selection, value) + assert_array_equal(a, z[:]) + # test short-form API + z[:] = 0 + z.blocks[selection] = value + assert_array_equal(a, z[:]) + + +def test_set_block_selection_1d(): + # setup + v = np.arange(1050, dtype=int) + a = np.empty(v.shape, dtype=v.dtype) + z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) + + for selection, expected_idx in \ + zip(block_selections_1d, block_selections_1d_array_projection): + _test_set_block_selection(v, a, z, selection, expected_idx) + + for selection in block_selections_1d_bad: + with pytest.raises(IndexError): + z.set_block_selection(selection, 42) + with pytest.raises(IndexError): + z.blocks[selection] = 42 + + +def test_set_block_selection_2d(): + # setup + v = np.arange(10000, dtype=int).reshape(1000, 10) + a = np.empty(v.shape, dtype=v.dtype) + z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) + + for selection, expected_idx in \ + zip(block_selections_2d, block_selections_2d_array_projection): + _test_set_block_selection(v, a, z, selection, expected_idx) + + with pytest.raises(IndexError): + selection = slice(5, 15), [1, 2, 3] + z.set_block_selection(selection, 42) + with pytest.raises(IndexError): + selection = Ellipsis, [1, 2, 3] + z.set_block_selection(selection, 42) + with pytest.raises(IndexError): # out of bounds + selection = slice(15, 20), slice(None) + z.set_block_selection(selection, 42) + + def _test_get_mask_selection(a, z, selection): expect = a[selection] actual = z.get_mask_selection(selection) From aa5db9f7ea688c392bd2036fd3f64516059a695b Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 10 Jul 2023 15:51:10 -0700 Subject: [PATCH 0336/1078] [V3] Allow for incomplete codec metadata using numcodecs.get_codec (#1447) * refactor(v3): Allow for incomplete codec metadata using numcodecs.get_codec * add test * lint * add release note --------- Co-authored-by: Ryan Abernathey --- docs/release.rst | 10 +++++++--- zarr/meta.py | 20 ++++++++------------ zarr/tests/test_meta.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 15 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index e8d1c440d1..46bd1f025d 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,10 +18,14 @@ Release notes Unreleased ---------- -Bug fixes -~~~~~~~~~ +Enhancements +~~~~~~~~~~~~ + +* Allow for partial codec specification in V3 array metadata. + By :user:`Joe Hamman ` :issue:`1443`. -* Add ``__contains__`` method to ``KVStore``. By :user:`Christoph Gohlke ` :issue:`1454`. +* Add ``__contains__`` method to ``KVStore``. + By :user:`Christoph Gohlke ` :issue:`1454`. * **Block Indexing**: Implemented blockwise (chunk blocks) indexing to ``zarr.Array``. By :user:`Altay Sansal ` :issue:`1428` diff --git a/zarr/meta.py b/zarr/meta.py index 59c56abf3d..aacffd7f77 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -441,26 +441,22 @@ def _decode_codec_metadata(cls, meta: Optional[Mapping]) -> Optional[Codec]: uri = 'https://purl.org/zarr/spec/codec/' conf = meta['configuration'] if meta['codec'].startswith(uri + 'gzip/'): - codec = numcodecs.GZip(level=conf['level']) + conf["id"] = "gzip" elif meta['codec'].startswith(uri + 'zlib/'): - codec = numcodecs.Zlib(level=conf['level']) + conf["id"] = "zlib" elif meta['codec'].startswith(uri + 'blosc/'): - codec = numcodecs.Blosc(clevel=conf['clevel'], - shuffle=conf['shuffle'], - blocksize=conf['blocksize'], - cname=conf['cname']) + conf["id"] = "blosc" elif meta['codec'].startswith(uri + 'bz2/'): - codec = numcodecs.BZ2(level=conf['level']) + conf["id"] = "bz2" elif meta['codec'].startswith(uri + 'lz4/'): - codec = numcodecs.LZ4(acceleration=conf['acceleration']) + conf["id"] = "lz4" elif meta['codec'].startswith(uri + 'lzma/'): - codec = numcodecs.LZMA(format=conf['format'], - check=conf['check'], - preset=conf['preset'], - filters=conf['filters']) + conf["id"] = "lzma" else: raise NotImplementedError + codec = numcodecs.get_codec(conf) + return codec @classmethod diff --git a/zarr/tests/test_meta.py b/zarr/tests/test_meta.py index 8acd634a13..a78375986e 100644 --- a/zarr/tests/test_meta.py +++ b/zarr/tests/test_meta.py @@ -314,6 +314,34 @@ def test_encode_decode_array_dtype_shape_v3(): assert 'filters' not in meta_dec +@pytest.mark.parametrize("comp_id", ["gzip", "zlib", "blosc", "bz2", "lz4", "lzma"]) +def test_decode_metadata_implicit_compressor_config_v3(comp_id): + meta = { + "attributes": {}, + "chunk_grid": { + "chunk_shape": [10], + "separator": "/", + "type": "regular" + }, + "chunk_memory_layout": "C", + "compressor": { + "codec": f"https://purl.org/zarr/spec/codec/{comp_id}/1.0", + "configuration": { + # intentionally left empty + } + }, + "data_type": " Date: Wed, 12 Jul 2023 09:54:21 -0400 Subject: [PATCH 0337/1078] style: add ruff and black to pre-commit --- .pre-commit-config.yaml | 19 +++++++++++-------- pyproject.toml | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 8 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 583a2b0184..55e0fc617a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,15 +5,18 @@ default_stages: [commit, push] default_language_version: python: python3 repos: - - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + - repo: https://github.com/charliermarsh/ruff-pre-commit + # Ruff version. + rev: 'v0.0.224' hooks: - - id: flake8 - args: [ - --max-line-length=100 - ] - exclude: ^(venv/|docs/) - types: ['python'] + - id: ruff + # Respect `exclude` and `extend-exclude` settings. + args: ["--force-exclude"] + - repo: https://github.com/psf/black + rev: 22.12.0 + hooks: + - id: black + language_version: python3.8 - repo: https://github.com/codespell-project/codespell rev: v2.2.5 hooks: diff --git a/pyproject.toml b/pyproject.toml index 4beb357bb0..4b293b90e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,6 +72,42 @@ version_scheme = "guess-next-dev" local_scheme = "dirty-tag" write_to = "zarr/version.py" +[tool.ruff] +line-length = 100 +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".mypy_cache", + ".nox", + ".pants.d", + ".ruff_cache", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "venv", + "docs" +] + +[tool.black] +line-length = 100 +exclude = ''' +/( + \.git + | \.mypy_cache + | \.venv + | _build + | buck-out + | build + | dist + | docs +)/ +''' + [tool.mypy] python_version = "3.8" ignore_missing_imports = true From 94cdd1ab492416783a525b5f2d119be59537ab4a Mon Sep 17 00:00:00 2001 From: Davis Vann Bennett Date: Wed, 12 Jul 2023 22:15:34 -0400 Subject: [PATCH 0338/1078] style: tweak codespell config to avoid a false positive --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 55e0fc617a..c46115342d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,7 +21,7 @@ repos: rev: v2.2.5 hooks: - id: codespell - args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo", "-S", "fixture"] + args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo,zar", "-S", "fixture"] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: From 4e348d6b80c96da461fd866576c971b8a659ba15 Mon Sep 17 00:00:00 2001 From: Davis Vann Bennett Date: Wed, 12 Jul 2023 22:16:05 -0400 Subject: [PATCH 0339/1078] style: lint the codebase --- bench/compress_normal.py | 25 +- docs/conf.py | 197 ++- zarr/__init__.py | 69 +- zarr/_storage/absstore.py | 87 +- zarr/_storage/store.py | 110 +- zarr/_storage/v3.py | 197 ++- zarr/_storage/v3_storage_transformers.py | 75 +- zarr/attrs.py | 31 +- zarr/context.py | 4 +- zarr/convenience.py | 338 +++-- zarr/core.py | 311 ++-- zarr/creation.py | 266 ++-- zarr/errors.py | 5 +- zarr/hierarchy.py | 487 ++++--- zarr/indexing.py | 230 ++- zarr/meta.py | 59 +- zarr/meta_v1.py | 40 +- zarr/n5.py | 345 +++-- zarr/storage.py | 661 +++++---- zarr/tests/test_attrs.py | 277 ++-- zarr/tests/test_convenience.py | 664 +++++---- zarr/tests/test_core.py | 1702 ++++++++++++---------- zarr/tests/test_creation.py | 287 ++-- zarr/tests/test_dim_separator.py | 41 +- zarr/tests/test_filters.py | 57 +- zarr/tests/test_hierarchy.py | 1142 ++++++++------- zarr/tests/test_indexing.py | 425 +++--- zarr/tests/test_info.py | 53 +- zarr/tests/test_meta.py | 333 +++-- zarr/tests/test_meta_array.py | 2 +- zarr/tests/test_n5.py | 15 +- zarr/tests/test_storage.py | 1639 ++++++++++----------- zarr/tests/test_storage_v3.py | 379 ++--- zarr/tests/test_sync.py | 184 ++- zarr/tests/test_util.py | 116 +- zarr/tests/util.py | 24 +- zarr/util.py | 269 ++-- 37 files changed, 5856 insertions(+), 5290 deletions(-) diff --git a/bench/compress_normal.py b/bench/compress_normal.py index ce0a05b9ec..9f1655541c 100644 --- a/bench/compress_normal.py +++ b/bench/compress_normal.py @@ -9,36 +9,39 @@ if __name__ == "__main__": - sys.path.insert(0, '..') + sys.path.insert(0, "..") # setup - a = np.random.normal(2000, 1000, size=200000000).astype('u2') - z = zarr.empty_like(a, chunks=1000000, - compression='blosc', - compression_opts=dict(cname='lz4', clevel=5, shuffle=2)) + a = np.random.normal(2000, 1000, size=200000000).astype("u2") + z = zarr.empty_like( + a, + chunks=1000000, + compression="blosc", + compression_opts=dict(cname="lz4", clevel=5, shuffle=2), + ) print(z) - print('*' * 79) + print("*" * 79) # time - t = timeit.repeat('z[:] = a', repeat=10, number=1, globals=globals()) + t = timeit.repeat("z[:] = a", repeat=10, number=1, globals=globals()) print(t) print(min(t)) print(z) # profile profile = line_profiler.LineProfiler(blosc.compress) - profile.run('z[:] = a') + profile.run("z[:] = a") profile.print_stats() - print('*' * 79) + print("*" * 79) # time - t = timeit.repeat('z[:]', repeat=10, number=1, globals=globals()) + t = timeit.repeat("z[:]", repeat=10, number=1, globals=globals()) print(t) print(min(t)) # profile profile = line_profiler.LineProfiler(blosc.decompress) - profile.run('z[:]') + profile.run("z[:]") profile.print_stats() diff --git a/docs/conf.py b/docs/conf.py index 413d648732..f85ecb7454 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -26,50 +26,50 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('..')) +sys.path.append(os.path.abspath("..")) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.viewcode', - 'sphinx.ext.intersphinx', - 'numpydoc', - 'sphinx_issues', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.viewcode", + "sphinx.ext.intersphinx", + "numpydoc", + "sphinx_issues", "sphinx_copybutton", - "sphinx_design" + "sphinx_design", ] numpydoc_show_class_members = False numpydoc_class_members_toctree = False -issues_github_path = 'zarr-developers/zarr-python' +issues_github_path = "zarr-developers/zarr-python" # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The main toctree document. -main_doc = 'index' +main_doc = "index" # General information about the project. -project = 'zarr' -copyright = '2022, Zarr Developers' -author = 'Zarr Developers' +project = "zarr" +copyright = "2022, Zarr Developers" +author = "Zarr Developers" version = zarr.__version__ # The full version, including alpha/beta/rc tags. @@ -80,42 +80,42 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'talks'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "talks"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -125,181 +125,174 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'pydata_sphinx_theme' +html_theme = "pydata_sphinx_theme" -html_favicon = '_static/logo1.png' +html_favicon = "_static/logo1.png" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "github_url": "https://github.com/zarr-developers/zarr-python", - "twitter_url": "https://twitter.com/zarr_dev", - "icon_links": [ - { - "name": "Zarr Dev", - "url": "https://zarr.dev/", - "icon": "_static/logo1.png", - "type": "local" - }, - ], - "collapse_navigation": True + "github_url": "https://github.com/zarr-developers/zarr-python", + "twitter_url": "https://twitter.com/zarr_dev", + "icon_links": [ + { + "name": "Zarr Dev", + "url": "https://zarr.dev/", + "icon": "_static/logo1.png", + "type": "local", + }, + ], + "collapse_navigation": True, } # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. # " v documentation" by default. -#html_title = 'zarr v@@' +# html_title = 'zarr v@@' # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = '_static/logo1.png' +html_logo = "_static/logo1.png" # Add custom css def setup(app): - app.add_css_file('custom.css') + app.add_css_file("custom.css") # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] html_js_files = [ - 'custom.js', + "custom.js", ] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. -#html_last_updated_fmt = None +# html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'zarrdoc' +htmlhelp_basename = "zarrdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (main_doc, 'zarr.tex', 'Zarr-Python', - author, 'manual'), + (main_doc, "zarr.tex", "Zarr-Python", author, "manual"), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (main_doc, 'zarr', 'Zarr-Python', - [author], 1) -] +man_pages = [(main_doc, "zarr", "Zarr-Python", [author], 1)] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -308,30 +301,36 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (main_doc, 'zarr', 'Zarr-Python', - author, 'zarr', 'One line description of project.', - 'Miscellaneous'), + ( + main_doc, + "zarr", + "Zarr-Python", + author, + "zarr", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. # use in refs e.g: # :ref:`comparison manual ` intersphinx_mapping = { - 'python': ('https://docs.python.org/', None), - 'numpy': ('https://numpy.org/doc/stable/', None), + "python": ("https://docs.python.org/", None), + "numpy": ("https://numpy.org/doc/stable/", None), } diff --git a/zarr/__init__.py b/zarr/__init__.py index 4d2c992dbf..6cecb40af8 100644 --- a/zarr/__init__.py +++ b/zarr/__init__.py @@ -1,20 +1,53 @@ # flake8: noqa from zarr.codecs import * -from zarr.convenience import (consolidate_metadata, copy, copy_all, copy_store, - load, open, open_consolidated, save, save_array, - save_group, tree) +from zarr.convenience import ( + consolidate_metadata, + copy, + copy_all, + copy_store, + load, + open, + open_consolidated, + save, + save_array, + save_group, + tree, +) from zarr.core import Array -from zarr.creation import (array, create, empty, empty_like, full, full_like, - ones, ones_like, open_array, open_like, zeros, - zeros_like) +from zarr.creation import ( + array, + create, + empty, + empty_like, + full, + full_like, + ones, + ones_like, + open_array, + open_like, + zeros, + zeros_like, +) from zarr.errors import CopyError, MetadataError from zarr.hierarchy import Group, group, open_group from zarr.n5 import N5Store, N5FSStore from zarr._storage.store import v3_api_available -from zarr.storage import (ABSStore, DBMStore, DictStore, DirectoryStore, - KVStore, LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, - NestedDirectoryStore, RedisStore, SQLiteStore, - TempStore, ZipStore) +from zarr.storage import ( + ABSStore, + DBMStore, + DictStore, + DirectoryStore, + KVStore, + LMDBStore, + LRUStoreCache, + MemoryStore, + MongoDBStore, + NestedDirectoryStore, + RedisStore, + SQLiteStore, + TempStore, + ZipStore, +) from zarr.sync import ProcessSynchronizer, ThreadSynchronizer from zarr.version import version as __version__ @@ -22,6 +55,16 @@ assert not __version__.startswith("0.0.0") if v3_api_available: - from zarr._storage.v3 import (ABSStoreV3, DBMStoreV3, KVStoreV3, DirectoryStoreV3, - LMDBStoreV3, LRUStoreCacheV3, MemoryStoreV3, MongoDBStoreV3, - RedisStoreV3, SQLiteStoreV3, ZipStoreV3) + from zarr._storage.v3 import ( + ABSStoreV3, + DBMStoreV3, + KVStoreV3, + DirectoryStoreV3, + LMDBStoreV3, + LRUStoreCacheV3, + MemoryStoreV3, + MongoDBStoreV3, + RedisStoreV3, + SQLiteStoreV3, + ZipStoreV3, + ) diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index cc41018f9e..f62529f096 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -6,7 +6,7 @@ from zarr._storage.store import _get_metadata_suffix, data_root, meta_root, Store, StoreV3 __doctest_requires__ = { - ('ABSStore', 'ABSStore.*'): ['azure.storage.blob'], + ("ABSStore", "ABSStore.*"): ["azure.storage.blob"], } @@ -58,12 +58,18 @@ class ABSStore(Store): ----- In order to use this store, you must install the Microsoft Azure Storage SDK for Python, ``azure-storage-blob>=12.5.0``. - """ - - def __init__(self, container=None, prefix='', account_name=None, account_key=None, - blob_service_kwargs=None, dimension_separator=None, - client=None, - ): + """ # noqa: E501 + + def __init__( + self, + container=None, + prefix="", + account_name=None, + account_key=None, + blob_service_kwargs=None, + dimension_separator=None, + client=None, + ): self._dimension_separator = dimension_separator self.prefix = normalize_storage_path(prefix) if client is None: @@ -75,11 +81,14 @@ def __init__(self, container=None, prefix='', account_name=None, account_key=Non ) warnings.warn(msg, FutureWarning, stacklevel=2) from azure.storage.blob import ContainerClient + blob_service_kwargs = blob_service_kwargs or {} client = ContainerClient( - "https://{}.blob.core.windows.net/".format(account_name), container, - credential=account_key, **blob_service_kwargs - ) + "https://{}.blob.core.windows.net/".format(account_name), + container, + credential=account_key, + **blob_service_kwargs + ) self.client = client self._container = container @@ -88,8 +97,10 @@ def __init__(self, container=None, prefix='', account_name=None, account_key=Non @staticmethod def _warn_deprecated(property_): - msg = ("The {} property is deprecated and will be removed in a future " - "version. Get the property from 'ABSStore.client' instead.") + msg = ( + "The {} property is deprecated and will be removed in a future " + "version. Get the property from 'ABSStore.client' instead." + ) warnings.warn(msg.format(property_), FutureWarning, stacklevel=3) @property @@ -108,10 +119,10 @@ def account_key(self): return self._account_key def _append_path_to_prefix(self, path): - if self.prefix == '': + if self.prefix == "": return normalize_storage_path(path) else: - return '/'.join([self.prefix, normalize_storage_path(path)]) + return "/".join([self.prefix, normalize_storage_path(path)]) @staticmethod def _strip_prefix_from_path(path, prefix): @@ -119,17 +130,18 @@ def _strip_prefix_from_path(path, prefix): path_norm = normalize_storage_path(path) prefix_norm = normalize_storage_path(prefix) if prefix: - return path_norm[(len(prefix_norm)+1):] + return path_norm[(len(prefix_norm) + 1) :] else: return path_norm def __getitem__(self, key): from azure.core.exceptions import ResourceNotFoundError + blob_name = self._append_path_to_prefix(key) try: return self.client.download_blob(blob_name).readall() except ResourceNotFoundError: - raise KeyError('Blob %s not found' % blob_name) + raise KeyError("Blob %s not found" % blob_name) def __setitem__(self, key, value): value = ensure_bytes(value) @@ -138,16 +150,17 @@ def __setitem__(self, key, value): def __delitem__(self, key): from azure.core.exceptions import ResourceNotFoundError + try: self.client.delete_blob(self._append_path_to_prefix(key)) except ResourceNotFoundError: - raise KeyError('Blob %s not found' % key) + raise KeyError("Blob %s not found" % key) def __eq__(self, other): return ( - isinstance(other, ABSStore) and - self.client == other.client and - self.prefix == other.prefix + isinstance(other, ABSStore) + and self.client == other.client + and self.prefix == other.prefix ) def keys(self): @@ -155,7 +168,7 @@ def keys(self): def __iter__(self): if self.prefix: - list_blobs_prefix = self.prefix + '/' + list_blobs_prefix = self.prefix + "/" else: list_blobs_prefix = None for blob in self.client.list_blobs(list_blobs_prefix): @@ -171,17 +184,17 @@ def __contains__(self, key): def listdir(self, path=None): dir_path = normalize_storage_path(self._append_path_to_prefix(path)) if dir_path: - dir_path += '/' + dir_path += "/" items = [ self._strip_prefix_from_path(blob.name, dir_path) - for blob in self.client.walk_blobs(name_starts_with=dir_path, delimiter='/') + for blob in self.client.walk_blobs(name_starts_with=dir_path, delimiter="/") ] return items def rmdir(self, path=None): dir_path = normalize_storage_path(self._append_path_to_prefix(path)) if dir_path: - dir_path += '/' + dir_path += "/" for blob in self.client.list_blobs(name_starts_with=dir_path): self.client.delete_blob(blob) @@ -197,11 +210,11 @@ def getsize(self, path=None): return blob_client.get_blob_properties().size else: size = 0 - if fs_path == '': + if fs_path == "": fs_path = None - elif not fs_path.endswith('/'): - fs_path += '/' - for blob in self.client.walk_blobs(name_starts_with=fs_path, delimiter='/'): + elif not fs_path.endswith("/"): + fs_path += "/" + for blob in self.client.walk_blobs(name_starts_with=fs_path, delimiter="/"): blob_client = self.client.get_blob_client(blob) if blob_client.exists(): size += blob_client.get_blob_properties().size @@ -212,15 +225,14 @@ def clear(self): class ABSStoreV3(ABSStore, StoreV3): - def list(self): return list(self.keys()) def __eq__(self, other): return ( - isinstance(other, ABSStoreV3) and - self.client == other.client and - self.prefix == other.prefix + isinstance(other, ABSStoreV3) + and self.client == other.client + and self.prefix == other.prefix ) def __setitem__(self, key, value): @@ -234,24 +246,24 @@ def rmdir(self, path=None): # If we disallow an empty path then we will need to modify # TestABSStoreV3 to have the create_store method use a prefix. - ABSStore.rmdir(self, '') + ABSStore.rmdir(self, "") return meta_dir = meta_root + path - meta_dir = meta_dir.rstrip('/') + meta_dir = meta_dir.rstrip("/") ABSStore.rmdir(self, meta_dir) # remove data folder data_dir = data_root + path - data_dir = data_dir.rstrip('/') + data_dir = data_dir.rstrip("/") ABSStore.rmdir(self, data_dir) # remove metadata files sfx = _get_metadata_suffix(self) - array_meta_file = meta_dir + '.array' + sfx + array_meta_file = meta_dir + ".array" + sfx if array_meta_file in self: del self[array_meta_file] - group_meta_file = meta_dir + '.group' + sfx + group_meta_file = meta_dir + ".group" + sfx if group_meta_file in self: del self[group_meta_file] @@ -259,6 +271,7 @@ def rmdir(self, path=None): # For now, calling the generic keys-based _getsize def getsize(self, path=None): from zarr.storage import _getsize # avoid circular import + return _getsize(self, path) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 0594dc22de..8daedae48f 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -11,17 +11,17 @@ from zarr.context import Context # v2 store keys -array_meta_key = '.zarray' -group_meta_key = '.zgroup' -attrs_key = '.zattrs' +array_meta_key = ".zarray" +group_meta_key = ".zgroup" +attrs_key = ".zattrs" # v3 paths -meta_root = 'meta/root/' -data_root = 'data/root/' +meta_root = "meta/root/" +data_root = "data/root/" DEFAULT_ZARR_VERSION = 2 -v3_api_available = os.environ.get('ZARR_V3_EXPERIMENTAL_API', '0').lower() not in ['0', 'false'] +v3_api_available = os.environ.get("ZARR_V3_EXPERIMENTAL_API", "0").lower() not in ["0", "false"] def assert_zarr_v3_api_available(): @@ -229,11 +229,11 @@ def _validate_key(self, key: str): ): raise ValueError("keys starts with unexpected value: `{}`".format(key)) - if key.endswith('/'): + if key.endswith("/"): raise ValueError("keys may not end in /") def list_prefix(self, prefix): - if prefix.startswith('/'): + if prefix.startswith("/"): raise ValueError("prefix must not begin with /") # TODO: force prefix to end with /? return [k for k in self.list() if k.startswith(prefix)] @@ -294,8 +294,7 @@ def supports_efficient_get_partial_values(self): return False def get_partial_values( - self, - key_ranges: Sequence[Tuple[str, Tuple[int, Optional[int]]]] + self, key_ranges: Sequence[Tuple[str, Tuple[int, Optional[int]]]] ) -> List[Union[bytes, memoryview, bytearray]]: """Get multiple partial values. key_ranges can be an iterable of key, range pairs, @@ -306,11 +305,9 @@ def get_partial_values( from the end of the file. A key may occur multiple times with different ranges. Inserts None for missing keys into the returned list.""" - results: List[Union[bytes, memoryview, bytearray]] = ( - [None] * len(key_ranges) # type: ignore[list-item] - ) - indexed_ranges_by_key: Dict[str, List[Tuple[int, Tuple[int, Optional[int]]]]] = ( - defaultdict(list) + results: List[Union[bytes, memoryview, bytearray]] = [None] * len(key_ranges) # type: ignore[list-item] # noqa: E501 + indexed_ranges_by_key: Dict[str, List[Tuple[int, Tuple[int, Optional[int]]]]] = defaultdict( + list ) for i, (key, range_) in enumerate(key_ranges): indexed_ranges_by_key[key].append((i, range_)) @@ -323,7 +320,7 @@ def get_partial_values( if range_length is None: results[i] = value[range_from:] else: - results[i] = value[range_from:range_from + range_length] + results[i] = value[range_from : range_from + range_length] return results def supports_efficient_set_partial_values(self): @@ -356,7 +353,7 @@ def set_partial_values(self, key_start_values): if start < 0: values[key][start:] = value else: - values[key][start:start + len(value)] = value + values[key][start : start + len(value)] = value for key, value in values.items(): self[key] = value @@ -377,14 +374,13 @@ def _ensure_store(store): We'll do this conversion in a few places automatically """ from zarr._storage.v3 import KVStoreV3 # avoid circular import + if store is None: return None elif isinstance(store, StoreV3): return store elif isinstance(store, Store): - raise ValueError( - f"cannot initialize a v3 store with a v{store._store_version} store" - ) + raise ValueError(f"cannot initialize a v3 store with a v{store._store_version} store") elif isinstance(store, MutableMapping): return KVStoreV3(store) else: @@ -444,10 +440,7 @@ def get_config(self): # Override in sub-class if need special encoding of config values. # By default, assume all non-private members are configuration # parameters except for type . - return { - k: v for k, v in self.__dict__.items() - if not k.startswith('_') and k != "type" - } + return {k: v for k, v in self.__dict__.items() if not k.startswith("_") and k != "type"} @classmethod def from_config(cls, _type, config): @@ -460,18 +453,18 @@ def from_config(cls, _type, config): @property def inner_store(self) -> Union["StorageTransformer", StoreV3]: - assert self._inner_store is not None, ( - "inner_store is not initialized, first get a copy via _copy_for_array." - ) + assert ( + self._inner_store is not None + ), "inner_store is not initialized, first get a copy via _copy_for_array." return self._inner_store # The following implementations are usually fine to keep as-is: def __eq__(self, other): return ( - type(self) == type(other) and - self._inner_store == other._inner_store and - self.get_config() == other.get_config() + type(self) == type(other) + and self._inner_store == other._inner_store + and self.get_config() == other.get_config() ) def erase(self, key): @@ -561,42 +554,41 @@ def set_partial_values(self, key_start_values): def _path_to_prefix(path: Optional[str]) -> str: # assume path already normalized if path: - prefix = path + '/' + prefix = path + "/" else: - prefix = '' + prefix = "" return prefix def _get_hierarchy_metadata(store: StoreV3) -> Mapping[str, Any]: - version = getattr(store, '_store_version', 2) + version = getattr(store, "_store_version", 2) if version < 3: - raise ValueError("zarr.json hierarchy metadata not stored for " - f"zarr v{version} stores") - if 'zarr.json' not in store: + raise ValueError("zarr.json hierarchy metadata not stored for " f"zarr v{version} stores") + if "zarr.json" not in store: raise ValueError("zarr.json metadata not found in store") - return store._metadata_class.decode_hierarchy_metadata(store['zarr.json']) + return store._metadata_class.decode_hierarchy_metadata(store["zarr.json"]) def _get_metadata_suffix(store: StoreV3) -> str: - if 'zarr.json' in store: - return _get_hierarchy_metadata(store)['metadata_key_suffix'] - return '.json' + if "zarr.json" in store: + return _get_hierarchy_metadata(store)["metadata_key_suffix"] + return ".json" def _rename_metadata_v3(store: StoreV3, src_path: str, dst_path: str) -> bool: """Rename source or group metadata file associated with src_path.""" any_renamed = False sfx = _get_metadata_suffix(store) - src_path = src_path.rstrip('/') - dst_path = dst_path.rstrip('/') - _src_array_json = meta_root + src_path + '.array' + sfx + src_path = src_path.rstrip("/") + dst_path = dst_path.rstrip("/") + _src_array_json = meta_root + src_path + ".array" + sfx if _src_array_json in store: - new_key = meta_root + dst_path + '.array' + sfx + new_key = meta_root + dst_path + ".array" + sfx store[new_key] = store.pop(_src_array_json) any_renamed = True - _src_group_json = meta_root + src_path + '.group' + sfx + _src_group_json = meta_root + src_path + ".group" + sfx if _src_group_json in store: - new_key = meta_root + dst_path + '.group' + sfx + new_key = meta_root + dst_path + ".group" + sfx store[new_key] = store.pop(_src_group_json) any_renamed = True return any_renamed @@ -606,7 +598,7 @@ def _rename_from_keys(store: BaseStore, src_path: str, dst_path: str) -> None: # assume path already normalized src_prefix = _path_to_prefix(src_path) dst_prefix = _path_to_prefix(dst_path) - version = getattr(store, '_store_version', 2) + version = getattr(store, "_store_version", 2) if version == 2: for key in list(store.keys()): if key.startswith(src_prefix): @@ -618,7 +610,7 @@ def _rename_from_keys(store: BaseStore, src_path: str, dst_path: str) -> None: _src_prefix = root_prefix + src_prefix _dst_prefix = root_prefix + dst_prefix for key in store.list_prefix(_src_prefix): # type: ignore - new_key = _dst_prefix + key[len(_src_prefix):] + new_key = _dst_prefix + key[len(_src_prefix) :] store[new_key] = store.pop(key) any_renamed = True any_meta_renamed = _rename_metadata_v3(store, src_path, dst_path) # type: ignore @@ -639,20 +631,20 @@ def _rmdir_from_keys(store: StoreLike, path: Optional[str] = None) -> None: def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: meta_dir = meta_root + path - meta_dir = meta_dir.rstrip('/') + meta_dir = meta_dir.rstrip("/") _rmdir_from_keys(store, meta_dir) # remove data folder data_dir = data_root + path - data_dir = data_dir.rstrip('/') + data_dir = data_dir.rstrip("/") _rmdir_from_keys(store, data_dir) # remove metadata files sfx = _get_metadata_suffix(store) - array_meta_file = meta_dir + '.array' + sfx + array_meta_file = meta_dir + ".array" + sfx if array_meta_file in store: store.erase(array_meta_file) # type: ignore - group_meta_file = meta_dir + '.group' + sfx + group_meta_file = meta_dir + ".group" + sfx if group_meta_file in store: store.erase(group_meta_file) # type: ignore @@ -663,8 +655,8 @@ def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str children = set() for key in list(store.keys()): if key.startswith(prefix) and len(key) > len(prefix): - suffix = key[len(prefix):] - child = suffix.split('/')[0] + suffix = key[len(prefix) :] + child = suffix.split("/")[0] children.add(child) return sorted(children) @@ -675,7 +667,7 @@ def _prefix_to_array_key(store: StoreLike, prefix: str) -> str: if prefix: key = meta_root + prefix.rstrip("/") + ".array" + sfx else: - key = meta_root[:-1] + '.array' + sfx + key = meta_root[:-1] + ".array" + sfx else: key = prefix + array_meta_key return key @@ -685,9 +677,9 @@ def _prefix_to_group_key(store: StoreLike, prefix: str) -> str: if getattr(store, "_store_version", 2) == 3: sfx = _get_metadata_suffix(store) # type: ignore if prefix: - key = meta_root + prefix.rstrip('/') + ".group" + sfx + key = meta_root + prefix.rstrip("/") + ".group" + sfx else: - key = meta_root[:-1] + '.group' + sfx + key = meta_root[:-1] + ".group" + sfx else: key = prefix + group_meta_key return key @@ -698,9 +690,9 @@ def _prefix_to_attrs_key(store: StoreLike, prefix: str) -> str: # for v3, attributes are stored in the array metadata sfx = _get_metadata_suffix(store) # type: ignore if prefix: - key = meta_root + prefix.rstrip('/') + ".array" + sfx + key = meta_root + prefix.rstrip("/") + ".array" + sfx else: - key = meta_root[:-1] + '.array' + sfx + key = meta_root[:-1] + ".array" + sfx else: key = prefix + attrs_key return key diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 094deed02e..1a50265c11 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -9,44 +9,60 @@ MetadataError, ReadOnlyError, ) -from zarr.util import (buffer_size, json_loads, normalize_storage_path) +from zarr.util import buffer_size, json_loads, normalize_storage_path from zarr._storage.absstore import ABSStoreV3 # noqa: F401 -from zarr._storage.store import (_get_hierarchy_metadata, # noqa: F401 - _get_metadata_suffix, - _listdir_from_keys, - _rename_from_keys, - _rename_metadata_v3, - _rmdir_from_keys, - _rmdir_from_keys_v3, - _path_to_prefix, - _prefix_to_array_key, - _prefix_to_group_key, - array_meta_key, - attrs_key, - data_root, - group_meta_key, - meta_root, - BaseStore, - Store, - StoreV3) -from zarr.storage import (DBMStore, ConsolidatedMetadataStore, DirectoryStore, FSStore, KVStore, - LMDBStore, LRUStoreCache, MemoryStore, MongoDBStore, RedisStore, - SQLiteStore, ZipStore, _getsize) +from zarr._storage.store import ( # noqa: F401 + _get_hierarchy_metadata, + _get_metadata_suffix, + _listdir_from_keys, + _rename_from_keys, + _rename_metadata_v3, + _rmdir_from_keys, + _rmdir_from_keys_v3, + _path_to_prefix, + _prefix_to_array_key, + _prefix_to_group_key, + array_meta_key, + attrs_key, + data_root, + group_meta_key, + meta_root, + BaseStore, + Store, + StoreV3, +) +from zarr.storage import ( + DBMStore, + ConsolidatedMetadataStore, + DirectoryStore, + FSStore, + KVStore, + LMDBStore, + LRUStoreCache, + MemoryStore, + MongoDBStore, + RedisStore, + SQLiteStore, + ZipStore, + _getsize, +) __doctest_requires__ = { - ('RedisStore', 'RedisStore.*'): ['redis'], - ('MongoDBStore', 'MongoDBStore.*'): ['pymongo'], - ('LRUStoreCache', 'LRUStoreCache.*'): ['s3fs'], + ("RedisStore", "RedisStore.*"): ["redis"], + ("MongoDBStore", "MongoDBStore.*"): ["pymongo"], + ("LRUStoreCache", "LRUStoreCache.*"): ["s3fs"], } try: # noinspection PyUnresolvedReferences from zarr.codecs import Blosc + default_compressor = Blosc() except ImportError: # pragma: no cover from zarr.codecs import Zlib + default_compressor = Zlib() @@ -55,7 +71,7 @@ StoreLike = Union[BaseStore, MutableMapping] -class RmdirV3(): +class RmdirV3: """Mixin class that can be used to ensure override of any existing v2 rmdir class.""" def rmdir(self, path: str = "") -> None: @@ -64,7 +80,6 @@ def rmdir(self, path: str = "") -> None: class KVStoreV3(RmdirV3, KVStore, StoreV3): - def list(self): return list(self._mutable_mapping.keys()) @@ -73,10 +88,7 @@ def __setitem__(self, key, value): super().__setitem__(key, value) def __eq__(self, other): - return ( - isinstance(other, KVStoreV3) and - self._mutable_mapping == other._mutable_mapping - ) + return isinstance(other, KVStoreV3) and self._mutable_mapping == other._mutable_mapping KVStoreV3.__doc__ = KVStore.__doc__ @@ -122,15 +134,15 @@ def list(self): return list(self.keys()) def _normalize_key(self, key): - key = normalize_storage_path(key).lstrip('/') + key = normalize_storage_path(key).lstrip("/") return key.lower() if self.normalize_keys else key def getsize(self, path=None): size = 0 - if path is None or path == '': + if path is None or path == "": # size of both the data and meta subdirs dirs = [] - for d in ['data/root', 'meta/root']: + for d in ["data/root", "meta/root"]: dir_path = os.path.join(self.path, d) if os.path.exists(dir_path): dirs.append(dir_path) @@ -146,7 +158,7 @@ def getsize(self, path=None): return size def setitems(self, values): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() values = {self._normalize_key(key): val for key, val in values.items()} @@ -162,7 +174,7 @@ def setitems(self, values): self.map.setitems(values) def rmdir(self, path=None): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() if path: for base in [meta_root, data_root]: @@ -172,10 +184,10 @@ def rmdir(self, path=None): # remove any associated metadata files sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip('/') - array_meta_file = meta_dir + '.array' + sfx + meta_dir = (meta_root + path).rstrip("/") + array_meta_file = meta_dir + ".array" + sfx self.pop(array_meta_file, None) - group_meta_file = meta_dir + '.group' + sfx + group_meta_file = meta_dir + ".group" + sfx self.pop(group_meta_file, None) else: store_path = self.dir_path(path) @@ -213,7 +225,6 @@ def get_partial_values(self, key_ranges): class MemoryStoreV3(MemoryStore, StoreV3): - def __init__(self, root=None, cls=dict, dimension_separator=None): if root is None: self.root = cls() @@ -225,9 +236,7 @@ def __init__(self, root=None, cls=dict, dimension_separator=None): def __eq__(self, other): return ( - isinstance(other, MemoryStoreV3) and - self.root == other.root and - self.cls == other.cls + isinstance(other, MemoryStoreV3) and self.root == other.root and self.cls == other.cls ) def __setitem__(self, key, value): @@ -256,13 +265,13 @@ def rename(self, src_path: Path, dst_path: Path): if base == meta_root: # check for and move corresponding metadata sfx = _get_metadata_suffix(self) - src_meta = src_key + '.array' + sfx + src_meta = src_key + ".array" + sfx if src_meta in src_parent: - dst_meta = dst_key + '.array' + sfx + dst_meta = dst_key + ".array" + sfx dst_parent[dst_meta] = src_parent.pop(src_meta) - src_meta = src_key + '.group' + sfx + src_meta = src_key + ".group" + sfx if src_meta in src_parent: - dst_meta = dst_key + '.group' + sfx + dst_meta = dst_key + ".group" + sfx dst_parent[dst_meta] = src_parent.pop(src_meta) any_renamed = True any_renamed = _rename_metadata_v3(self, src_path, dst_path) or any_renamed @@ -284,10 +293,10 @@ def rmdir(self, path: Path = None): # remove any associated metadata files sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip('/') - array_meta_file = meta_dir + '.array' + sfx + meta_dir = (meta_root + path).rstrip("/") + array_meta_file = meta_dir + ".array" + sfx self.pop(array_meta_file, None) - group_meta_file = meta_dir + '.group' + sfx + group_meta_file = meta_dir + ".group" + sfx self.pop(group_meta_file, None) else: # clear out root @@ -298,15 +307,11 @@ def rmdir(self, path: Path = None): class DirectoryStoreV3(DirectoryStore, StoreV3): - def list(self): return list(self.keys()) def __eq__(self, other): - return ( - isinstance(other, DirectoryStoreV3) and - self.path == other.path - ) + return isinstance(other, DirectoryStoreV3) and self.path == other.path def __setitem__(self, key, value): self._validate_key(key) @@ -315,25 +320,24 @@ def __setitem__(self, key, value): def getsize(self, path: Path = None): return _getsize(self, path) - def rename(self, src_path, dst_path, metadata_key_suffix='.json'): + def rename(self, src_path, dst_path, metadata_key_suffix=".json"): store_src_path = normalize_storage_path(src_path) store_dst_path = normalize_storage_path(dst_path) dir_path = self.path any_existed = False - for root_prefix in ['meta', 'data']: - src_path = os.path.join(dir_path, root_prefix, 'root', store_src_path) + for root_prefix in ["meta", "data"]: + src_path = os.path.join(dir_path, root_prefix, "root", store_src_path) if os.path.exists(src_path): any_existed = True - dst_path = os.path.join(dir_path, root_prefix, 'root', store_dst_path) + dst_path = os.path.join(dir_path, root_prefix, "root", store_dst_path) os.renames(src_path, dst_path) - for suffix in ['.array' + metadata_key_suffix, - '.group' + metadata_key_suffix]: - src_meta = os.path.join(dir_path, 'meta', 'root', store_src_path + suffix) + for suffix in [".array" + metadata_key_suffix, ".group" + metadata_key_suffix]: + src_meta = os.path.join(dir_path, "meta", "root", store_src_path + suffix) if os.path.exists(src_meta): any_existed = True - dst_meta = os.path.join(dir_path, 'meta', 'root', store_dst_path + suffix) + dst_meta = os.path.join(dir_path, "meta", "root", store_dst_path + suffix) dst_dir = os.path.dirname(dst_meta) if not os.path.exists(dst_dir): os.makedirs(dst_dir) @@ -352,10 +356,10 @@ def rmdir(self, path=None): # remove any associated metadata files sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip('/') - array_meta_file = meta_dir + '.array' + sfx + meta_dir = (meta_root + path).rstrip("/") + array_meta_file = meta_dir + ".array" + sfx self.pop(array_meta_file, None) - group_meta_file = meta_dir + '.group' + sfx + group_meta_file = meta_dir + ".group" + sfx self.pop(group_meta_file, None) elif os.path.isdir(dir_path): @@ -366,16 +370,15 @@ def rmdir(self, path=None): class ZipStoreV3(ZipStore, StoreV3): - def list(self): return list(self.keys()) def __eq__(self, other): return ( - isinstance(other, ZipStore) and - self.path == other.path and - self.compression == other.compression and - self.allowZip64 == other.allowZip64 + isinstance(other, ZipStore) + and self.path == other.path + and self.compression == other.compression + and self.allowZip64 == other.allowZip64 ) def __setitem__(self, key, value): @@ -405,7 +408,6 @@ def getsize(self, path=None): class RedisStoreV3(RmdirV3, RedisStore, StoreV3): - def list(self): return list(self.keys()) @@ -418,7 +420,6 @@ def __setitem__(self, key, value): class MongoDBStoreV3(RmdirV3, MongoDBStore, StoreV3): - def list(self): return list(self.keys()) @@ -431,7 +432,6 @@ def __setitem__(self, key, value): class DBMStoreV3(RmdirV3, DBMStore, StoreV3): - def list(self): return list(self.keys()) @@ -444,7 +444,6 @@ def __setitem__(self, key, value): class LMDBStoreV3(RmdirV3, LMDBStore, StoreV3): - def list(self): return list(self.keys()) @@ -457,7 +456,6 @@ def __setitem__(self, key, value): class SQLiteStoreV3(SQLiteStore, StoreV3): - def list(self): return list(self.keys()) @@ -490,15 +488,13 @@ def rmdir(self, path=None): if path: for base in [meta_root, data_root]: with self.lock: - self.cursor.execute( - 'DELETE FROM zarr WHERE k LIKE (? || "/%")', (base + path,) - ) + self.cursor.execute('DELETE FROM zarr WHERE k LIKE (? || "/%")', (base + path,)) # remove any associated metadata files sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip('/') - array_meta_file = meta_dir + '.array' + sfx + meta_dir = (meta_root + path).rstrip("/") + array_meta_file = meta_dir + ".array" + sfx self.pop(array_meta_file, None) - group_meta_file = meta_dir + '.group' + sfx + group_meta_file = meta_dir + ".group" + sfx self.pop(group_meta_file, None) else: self.clear() @@ -508,7 +504,6 @@ def rmdir(self, path=None): class LRUStoreCacheV3(RmdirV3, LRUStoreCache, StoreV3): - def __init__(self, store, max_size: int): self._store = StoreV3._ensure_store(store) self._max_size = max_size @@ -572,10 +567,11 @@ def __init__(self, store: StoreLike, metadata_key=meta_root + "consolidated/.zme meta = json_loads(self.store[metadata_key]) # check format of consolidated metadata - consolidated_format = meta.get('zarr_consolidated_format', None) + consolidated_format = meta.get("zarr_consolidated_format", None) if consolidated_format != 1: - raise MetadataError('unsupported zarr consolidated metadata format: %s' % - consolidated_format) + raise MetadataError( + "unsupported zarr consolidated metadata format: %s" % consolidated_format + ) # decode metadata self.meta_store: Store = KVStoreV3(meta["metadata"]) @@ -586,34 +582,37 @@ def rmdir(self, key): def _normalize_store_arg_v3(store: Any, storage_options=None, mode="r") -> BaseStore: # default to v2 store for backward compatibility - zarr_version = getattr(store, '_store_version', 3) + zarr_version = getattr(store, "_store_version", 3) if zarr_version != 3: raise ValueError("store must be a version 3 store") if store is None: store = KVStoreV3(dict()) # add default zarr.json metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) return store if isinstance(store, os.PathLike): store = os.fspath(store) if FSStore._fsspec_installed(): import fsspec + if isinstance(store, fsspec.FSMap): - return FSStoreV3(store.root, - fs=store.fs, - mode=mode, - check=store.check, - create=store.create, - missing_exceptions=store.missing_exceptions, - **(storage_options or {})) + return FSStoreV3( + store.root, + fs=store.fs, + mode=mode, + check=store.check, + create=store.create, + missing_exceptions=store.missing_exceptions, + **(storage_options or {}), + ) if isinstance(store, str): if "://" in store or "::" in store: store = FSStoreV3(store, mode=mode, **(storage_options or {})) elif storage_options: raise ValueError("storage_options passed with non-fsspec path") - elif store.endswith('.zip'): + elif store.endswith(".zip"): store = ZipStoreV3(store, mode=mode) - elif store.endswith('.n5'): + elif store.endswith(".n5"): raise NotImplementedError("N5Store not yet implemented for V3") # return N5StoreV3(store) else: @@ -621,7 +620,7 @@ def _normalize_store_arg_v3(store: Any, storage_options=None, mode="r") -> BaseS else: store = StoreV3._ensure_store(store) - if 'zarr.json' not in store: + if "zarr.json" not in store: # add default zarr.json metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) + store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) return store diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py index 3675d42c38..ff31a7281c 100644 --- a/zarr/_storage/v3_storage_transformers.py +++ b/zarr/_storage/v3_storage_transformers.py @@ -10,10 +10,10 @@ from zarr.util import normalize_storage_path -MAX_UINT_64 = 2 ** 64 - 1 +MAX_UINT_64 = 2**64 - 1 -v3_sharding_available = os.environ.get('ZARR_V3_SHARDING', '0').lower() not in ['0', 'false'] +v3_sharding_available = os.environ.get("ZARR_V3_SHARDING", "0").lower() not in ["0", "false"] def assert_zarr_v3_sharding_available(): @@ -31,8 +31,7 @@ class _ShardIndex(NamedTuple): def __localize_chunk__(self, chunk: Tuple[int, ...]) -> Tuple[int, ...]: return tuple( - chunk_i % shard_i - for chunk_i, shard_i in zip(chunk, self.store.chunks_per_shard) + chunk_i % shard_i for chunk_i, shard_i in zip(chunk, self.store.chunks_per_shard) ) def is_all_empty(self) -> bool: @@ -46,9 +45,7 @@ def get_chunk_slice(self, chunk: Tuple[int, ...]) -> Optional[slice]: else: return slice(int(chunk_start), int(chunk_start + chunk_len)) - def set_chunk_slice( - self, chunk: Tuple[int, ...], chunk_slice: Optional[slice] - ) -> None: + def set_chunk_slice(self, chunk: Tuple[int, ...], chunk_slice: Optional[slice]) -> None: localized_chunk = self.__localize_chunk__(chunk) if chunk_slice is None: self.offsets_and_lengths[localized_chunk] = (MAX_UINT_64, MAX_UINT_64) @@ -79,8 +76,7 @@ def from_bytes( def create_empty(cls, store: "ShardingStorageTransformer"): # reserving 2*64bit per chunk for offset and length: return cls.from_bytes( - MAX_UINT_64.to_bytes(8, byteorder="little") - * (2 * store._num_chunks_per_shard), + MAX_UINT_64.to_bytes(8, byteorder="little") * (2 * store._num_chunks_per_shard), store=store, ) @@ -98,15 +94,13 @@ def __init__(self, _type, chunks_per_shard) -> None: assert_zarr_v3_sharding_available() super().__init__(_type) if isinstance(chunks_per_shard, int): - chunks_per_shard = (chunks_per_shard, ) + chunks_per_shard = (chunks_per_shard,) else: chunks_per_shard = tuple(int(i) for i in chunks_per_shard) if chunks_per_shard == (): - chunks_per_shard = (1, ) + chunks_per_shard = (1,) self.chunks_per_shard = chunks_per_shard - self._num_chunks_per_shard = functools.reduce( - lambda x, y: x * y, chunks_per_shard, 1 - ) + self._num_chunks_per_shard = functools.reduce(lambda x, y: x * y, chunks_per_shard, 1) self._dimension_separator = None self._data_key_prefix = None @@ -118,36 +112,33 @@ def _copy_for_array(self, array, inner_store): # The array shape might be longer when initialized with subdtypes. # subdtypes dimensions come last, therefore padding chunks_per_shard # with ones, effectively disabling sharding on the unlisted dimensions. - transformer_copy.chunks_per_shard += ( - (1, ) * (len(array._shape) - len(self.chunks_per_shard)) + transformer_copy.chunks_per_shard += (1,) * ( + len(array._shape) - len(self.chunks_per_shard) ) return transformer_copy @property def dimension_separator(self) -> str: - assert self._dimension_separator is not None, ( - "dimension_separator is not initialized, first get a copy via _copy_for_array." - ) + assert ( + self._dimension_separator is not None + ), "dimension_separator is not initialized, first get a copy via _copy_for_array." return self._dimension_separator def _is_data_key(self, key: str) -> bool: - assert self._data_key_prefix is not None, ( - "data_key_prefix is not initialized, first get a copy via _copy_for_array." - ) + assert ( + self._data_key_prefix is not None + ), "data_key_prefix is not initialized, first get a copy via _copy_for_array." return key.startswith(self._data_key_prefix) def _key_to_shard(self, chunk_key: str) -> Tuple[str, Tuple[int, ...]]: prefix, _, chunk_string = chunk_key.rpartition("c") - chunk_subkeys = tuple( - map(int, chunk_string.split(self.dimension_separator)) - ) if chunk_string else (0, ) - shard_key_tuple = ( - subkey // shard_i - for subkey, shard_i in zip(chunk_subkeys, self.chunks_per_shard) + chunk_subkeys = ( + tuple(map(int, chunk_string.split(self.dimension_separator))) if chunk_string else (0,) ) - shard_key = ( - prefix + "c" + self.dimension_separator.join(map(str, shard_key_tuple)) + shard_key_tuple = ( + subkey // shard_i for subkey, shard_i in zip(chunk_subkeys, self.chunks_per_shard) ) + shard_key = prefix + "c" + self.dimension_separator.join(map(str, shard_key_tuple)) return shard_key, chunk_subkeys def _get_index_from_store(self, shard_key: str) -> _ShardIndex: @@ -164,16 +155,14 @@ def _get_index_from_store(self, shard_key: str) -> _ShardIndex: def _get_index_from_buffer(self, buffer: Union[bytes, bytearray]) -> _ShardIndex: # At the end of each shard 2*64bit per chunk for offset and length define the index: - return _ShardIndex.from_bytes(buffer[-16 * self._num_chunks_per_shard:], self) + return _ShardIndex.from_bytes(buffer[-16 * self._num_chunks_per_shard :], self) def _get_chunks_in_shard(self, shard_key: str) -> Iterator[Tuple[int, ...]]: _, _, chunk_string = shard_key.rpartition("c") - shard_key_tuple = tuple( - map(int, chunk_string.split(self.dimension_separator)) - ) if chunk_string else (0, ) - for chunk_offset in itertools.product( - *(range(i) for i in self.chunks_per_shard) - ): + shard_key_tuple = ( + tuple(map(int, chunk_string.split(self.dimension_separator))) if chunk_string else (0,) + ) + for chunk_offset in itertools.product(*(range(i) for i in self.chunks_per_shard)): yield tuple( shard_key_i * shards_i + offset_i for shard_key_i, offset_i, shards_i in zip( @@ -250,9 +239,7 @@ def __setitem__(self, key, value): for _, chunk_slice in valid_chunk_slices ] ) - for chunk_value, (chunk_to_read, _) in zip( - chunk_values, valid_chunk_slices - ): + for chunk_value, (chunk_to_read, _) in zip(chunk_values, valid_chunk_slices): new_content[chunk_to_read] = chunk_value else: if full_shard_value is None: @@ -263,9 +250,7 @@ def __setitem__(self, key, value): shard_content = b"" for chunk_subkey, chunk_content in new_content.items(): - chunk_slice = slice( - len(shard_content), len(shard_content) + len(chunk_content) - ) + chunk_slice = slice(len(shard_content), len(shard_content) + len(chunk_content)) index.set_chunk_slice(chunk_subkey, chunk_slice) shard_content += chunk_content # Appending the index at the end of the shard: @@ -298,9 +283,7 @@ def _shard_key_to_original_keys(self, key: str) -> Iterator[str]: prefix, _, _ = key.rpartition("c") for chunk_tuple in self._get_chunks_in_shard(key): if index.get_chunk_slice(chunk_tuple) is not None: - yield prefix + "c" + self.dimension_separator.join( - map(str, chunk_tuple) - ) + yield prefix + "c" + self.dimension_separator.join(map(str, chunk_tuple)) else: yield key diff --git a/zarr/attrs.py b/zarr/attrs.py index 60dd7f1d79..01fc617b3c 100644 --- a/zarr/attrs.py +++ b/zarr/attrs.py @@ -25,10 +25,9 @@ class Attributes(MutableMapping): """ - def __init__(self, store, key='.zattrs', read_only=False, cache=True, - synchronizer=None): + def __init__(self, store, key=".zattrs", read_only=False, cache=True, synchronizer=None): - self._version = getattr(store, '_store_version', 2) + self._version = getattr(store, "_store_version", 2) _Store = Store if self._version == 2 else StoreV3 self.store = _Store._ensure_store(store) self.key = key @@ -43,7 +42,7 @@ def _get_nosync(self): except KeyError: d = dict() if self._version > 2: - d['attributes'] = {} + d["attributes"] = {} else: d = self.store._metadata_class.parse_metadata(data) return d @@ -54,7 +53,7 @@ def asdict(self): return self._cached_asdict d = self._get_nosync() if self._version == 3: - d = d['attributes'] + d = d["attributes"] if self.cache: self._cached_asdict = d return d @@ -65,7 +64,7 @@ def refresh(self): if self._version == 2: self._cached_asdict = self._get_nosync() else: - self._cached_asdict = self._get_nosync()['attributes'] + self._cached_asdict = self._get_nosync()["attributes"] def __contains__(self, x): return x in self.asdict() @@ -77,7 +76,7 @@ def _write_op(self, f, *args, **kwargs): # guard condition if self.read_only: - raise PermissionError('attributes are read-only') + raise PermissionError("attributes are read-only") # synchronization if self.synchronizer is None: @@ -98,7 +97,7 @@ def _setitem_nosync(self, item, value): if self._version == 2: d[item] = value else: - d['attributes'][item] = value + d["attributes"][item] = value # _put modified data self._put_nosync(d) @@ -115,7 +114,7 @@ def _delitem_nosync(self, key): if self._version == 2: del d[key] else: - del d['attributes'][key] + del d["attributes"][key] # _put modified data self._put_nosync(d) @@ -137,8 +136,8 @@ def _put_nosync(self, d): warnings.warn( "only attribute keys of type 'string' will be allowed in the future", DeprecationWarning, - stacklevel=2 - ) + stacklevel=2, + ) try: d_to_check = {str(k): v for k, v in d_to_check.items()} @@ -163,15 +162,15 @@ def _put_nosync(self, d): # Note: this changes the store.counter result in test_caching_on! meta = self.store._metadata_class.parse_metadata(self.store[self.key]) - if 'attributes' in meta and 'filters' in meta['attributes']: + if "attributes" in meta and "filters" in meta["attributes"]: # need to preserve any existing "filters" attribute - d['attributes']['filters'] = meta['attributes']['filters'] - meta['attributes'] = d['attributes'] + d["attributes"]["filters"] = meta["attributes"]["filters"] + meta["attributes"] = d["attributes"] else: meta = d self.store[self.key] = json_dumps(meta) if self.cache: - self._cached_asdict = d['attributes'] + self._cached_asdict = d["attributes"] # noinspection PyMethodOverriding def update(self, *args, **kwargs): @@ -187,7 +186,7 @@ def _update_nosync(self, *args, **kwargs): if self._version == 2: d.update(*args, **kwargs) else: - d['attributes'].update(*args, **kwargs) + d["attributes"].update(*args, **kwargs) # _put modified data self._put_nosync(d) diff --git a/zarr/context.py b/zarr/context.py index 83fbaafa9b..3dd7dda4ac 100644 --- a/zarr/context.py +++ b/zarr/context.py @@ -1,11 +1,10 @@ - from typing import TypedDict from numcodecs.compat import NDArrayLike class Context(TypedDict, total=False): - """ A context for component specific information + """A context for component specific information All keys are optional. Any component reading the context must provide a default implementation in the case a key cannot be found. @@ -16,4 +15,5 @@ class Context(TypedDict, total=False): An array-like instance to use for determining the preferred output array type. """ + meta_array: NDArrayLike diff --git a/zarr/convenience.py b/zarr/convenience.py index 9a0eae20a3..ff236d0df2 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -14,8 +14,14 @@ from zarr.hierarchy import group as _create_group from zarr.hierarchy import open_group from zarr.meta import json_dumps, json_loads -from zarr.storage import (_get_metadata_suffix, contains_array, contains_group, - normalize_store_arg, BaseStore, ConsolidatedMetadataStore) +from zarr.storage import ( + _get_metadata_suffix, + contains_array, + contains_group, + normalize_store_arg, + BaseStore, + ConsolidatedMetadataStore, +) from zarr._storage.v3 import ConsolidatedMetadataStoreV3 from zarr.util import TreeViewer, buffer_size, normalize_storage_path @@ -25,7 +31,7 @@ def _check_and_update_path(store: BaseStore, path): - if getattr(store, '_store_version', 2) > 2 and not path: + if getattr(store, "_store_version", 2) > 2 and not path: raise ValueError("path must be provided for v3 stores") return normalize_storage_path(path) @@ -94,15 +100,17 @@ def open(store: StoreLike = None, mode: str = "a", *, zarr_version=None, path=No # we pass storage options explicitly, since normalize_store_arg might construct # a store if the input is a fsspec-compatible URL _store: BaseStore = normalize_store_arg( - store, storage_options=kwargs.pop("storage_options", {}), mode=mode, + store, + storage_options=kwargs.pop("storage_options", {}), + mode=mode, zarr_version=zarr_version, ) # path = _check_and_update_path(_store, path) path = normalize_storage_path(path) - kwargs['path'] = path + kwargs["path"] = path - if mode in {'w', 'w-', 'x'}: - if 'shape' in kwargs: + if mode in {"w", "w-", "x"}: + if "shape" in kwargs: return open_array(_store, mode=mode, **kwargs) else: return open_group(_store, mode=mode, **kwargs) @@ -167,8 +175,9 @@ def save_array(store: StoreLike, arr, *, zarr_version=None, path=None, **kwargs) _store: BaseStore = normalize_store_arg(store, mode="w", zarr_version=zarr_version) path = _check_and_update_path(_store, path) try: - _create_array(arr, store=_store, overwrite=True, zarr_version=zarr_version, path=path, - **kwargs) + _create_array( + arr, store=_store, overwrite=True, zarr_version=zarr_version, path=path, **kwargs + ) finally: if may_need_closing: # needed to ensure zip file records are written @@ -240,7 +249,7 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): """ if len(args) == 0 and len(kwargs) == 0: - raise ValueError('at least one array must be provided') + raise ValueError("at least one array must be provided") # handle polymorphic store arg may_need_closing = _might_close(store) _store: BaseStore = normalize_store_arg(store, mode="w", zarr_version=zarr_version) @@ -248,7 +257,7 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): try: grp = _create_group(_store, path=path, overwrite=True, zarr_version=zarr_version) for i, arr in enumerate(args): - k = 'arr_{}'.format(i) + k = "arr_{}".format(i) grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version) for k, arr in kwargs.items(): grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version) @@ -337,16 +346,14 @@ def save(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): """ if len(args) == 0 and len(kwargs) == 0: - raise ValueError('at least one array must be provided') + raise ValueError("at least one array must be provided") if len(args) == 1 and len(kwargs) == 0: save_array(store, args[0], zarr_version=zarr_version, path=path) else: - save_group(store, *args, zarr_version=zarr_version, path=path, - **kwargs) + save_group(store, *args, zarr_version=zarr_version, path=path, **kwargs) class LazyLoader(Mapping): - def __init__(self, grp): self.grp = grp self.cache = dict() @@ -369,9 +376,9 @@ def __contains__(self, item): return item in self.grp def __repr__(self): - r = ' ' + dest_key + descr = descr + " -> " + dest_key # decide what to do do_copy = True - if if_exists != 'replace': + if if_exists != "replace": if dest_key in dest: - if if_exists == 'raise': - raise CopyError('key {!r} exists in destination' - .format(dest_key)) - elif if_exists == 'skip': + if if_exists == "raise": + raise CopyError("key {!r} exists in destination".format(dest_key)) + elif if_exists == "skip": do_copy = False # take action if do_copy: - log('copy {}'.format(descr)) + log("copy {}".format(descr)) if not dry_run: data = source[source_key] n_bytes_copied += buffer_size(data) dest[dest_key] = data n_copied += 1 else: - log('skip {}'.format(descr)) + log("skip {}".format(descr)) n_skipped += 1 # log a final message with a summary of what happened @@ -727,12 +743,21 @@ def copy_store(source, dest, source_path='', dest_path='', excludes=None, def _check_dest_is_group(dest): - if not hasattr(dest, 'create_dataset'): - raise ValueError('dest must be a group, got {!r}'.format(dest)) - - -def copy(source, dest, name=None, shallow=False, without_attrs=False, log=None, - if_exists='raise', dry_run=False, **create_kws): + if not hasattr(dest, "create_dataset"): + raise ValueError("dest must be a group, got {!r}".format(dest)) + + +def copy( + source, + dest, + name=None, + shallow=False, + without_attrs=False, + log=None, + if_exists="raise", + dry_run=False, + **create_kws +): """Copy the `source` array or group into the `dest` group. Parameters @@ -855,8 +880,15 @@ def copy(source, dest, name=None, shallow=False, without_attrs=False, log=None, # do the copying n_copied, n_skipped, n_bytes_copied = _copy( - log, source, dest, name=name, root=True, shallow=shallow, - without_attrs=without_attrs, if_exists=if_exists, dry_run=dry_run, + log, + source, + dest, + name=name, + root=True, + shallow=shallow, + without_attrs=without_attrs, + if_exists=if_exists, + dry_run=dry_run, **create_kws ) @@ -866,47 +898,49 @@ def copy(source, dest, name=None, shallow=False, without_attrs=False, log=None, return n_copied, n_skipped, n_bytes_copied -def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, - dry_run, **create_kws): +def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_run, **create_kws): # N.B., if this is a dry run, dest may be None # setup counting variables n_copied = n_skipped = n_bytes_copied = 0 # are we copying to/from h5py? - source_h5py = source.__module__.startswith('h5py.') - dest_h5py = dest is not None and dest.__module__.startswith('h5py.') + source_h5py = source.__module__.startswith("h5py.") + dest_h5py = dest is not None and dest.__module__.startswith("h5py.") # check if_exists parameter - valid_if_exists = ['raise', 'replace', 'skip', 'skip_initialized'] + valid_if_exists = ["raise", "replace", "skip", "skip_initialized"] if if_exists not in valid_if_exists: - raise ValueError('if_exists must be one of {!r}; found {!r}' - .format(valid_if_exists, if_exists)) - if dest_h5py and if_exists == 'skip_initialized': - raise ValueError('{!r} can only be used when copying to zarr' - .format(if_exists)) + raise ValueError( + "if_exists must be one of {!r}; found {!r}".format(valid_if_exists, if_exists) + ) + if dest_h5py and if_exists == "skip_initialized": + raise ValueError("{!r} can only be used when copying to zarr".format(if_exists)) # determine name to copy to if name is None: - name = source.name.split('/')[-1] + name = source.name.split("/")[-1] if not name: # this can happen if source is the root group - raise TypeError('source has no name, please provide the `name` ' - 'parameter to indicate a name to copy to') + raise TypeError( + "source has no name, please provide the `name` " + "parameter to indicate a name to copy to" + ) - if hasattr(source, 'shape'): + if hasattr(source, "shape"): # copy a dataset/array # check if already exists, decide what to do do_copy = True exists = dest is not None and name in dest if exists: - if if_exists == 'raise': - raise CopyError('an object {!r} already exists in destination ' - '{!r}'.format(name, dest.name)) - elif if_exists == 'skip': + if if_exists == "raise": + raise CopyError( + "an object {!r} already exists in destination " "{!r}".format(name, dest.name) + ) + elif if_exists == "skip": do_copy = False - elif if_exists == 'skip_initialized': + elif if_exists == "skip_initialized": ds = dest[name] if ds.nchunks_initialized == ds.nchunks: do_copy = False @@ -915,7 +949,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, if do_copy: # log a message about what we're going to do - log('copy {} {} {}'.format(source.name, source.shape, source.dtype)) + log("copy {} {} {}".format(source.name, source.shape, source.dtype)) if not dry_run: @@ -927,38 +961,37 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, kws = create_kws.copy() # setup chunks option, preserve by default - kws.setdefault('chunks', source.chunks) + kws.setdefault("chunks", source.chunks) # setup compression options if source_h5py: if dest_h5py: # h5py -> h5py; preserve compression options by default - kws.setdefault('compression', source.compression) - kws.setdefault('compression_opts', source.compression_opts) - kws.setdefault('shuffle', source.shuffle) - kws.setdefault('fletcher32', source.fletcher32) - kws.setdefault('fillvalue', source.fillvalue) + kws.setdefault("compression", source.compression) + kws.setdefault("compression_opts", source.compression_opts) + kws.setdefault("shuffle", source.shuffle) + kws.setdefault("fletcher32", source.fletcher32) + kws.setdefault("fillvalue", source.fillvalue) else: # h5py -> zarr; use zarr default compression options - kws.setdefault('fill_value', source.fillvalue) + kws.setdefault("fill_value", source.fillvalue) else: if dest_h5py: # zarr -> h5py; use some vaguely sensible defaults - kws.setdefault('chunks', True) - kws.setdefault('compression', 'gzip') - kws.setdefault('compression_opts', 1) - kws.setdefault('shuffle', False) - kws.setdefault('fillvalue', source.fill_value) + kws.setdefault("chunks", True) + kws.setdefault("compression", "gzip") + kws.setdefault("compression_opts", 1) + kws.setdefault("shuffle", False) + kws.setdefault("fillvalue", source.fill_value) else: # zarr -> zarr; preserve compression options by default - kws.setdefault('compressor', source.compressor) - kws.setdefault('filters', source.filters) - kws.setdefault('order', source.order) - kws.setdefault('fill_value', source.fill_value) + kws.setdefault("compressor", source.compressor) + kws.setdefault("filters", source.filters) + kws.setdefault("order", source.order) + kws.setdefault("fill_value", source.fill_value) # create new dataset in destination - ds = dest.create_dataset(name, shape=source.shape, - dtype=source.dtype, **kws) + ds = dest.create_dataset(name, shape=source.shape, dtype=source.dtype, **kws) # copy data - N.B., go chunk by chunk to avoid loading # everything into memory @@ -966,19 +999,18 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, chunks = ds.chunks chunk_offsets = [range(0, s, c) for s, c in zip(shape, chunks)] for offset in itertools.product(*chunk_offsets): - sel = tuple(slice(o, min(s, o + c)) - for o, s, c in zip(offset, shape, chunks)) + sel = tuple(slice(o, min(s, o + c)) for o, s, c in zip(offset, shape, chunks)) ds[sel] = source[sel] n_bytes_copied += ds.size * ds.dtype.itemsize # copy attributes if not without_attrs: - if dest_h5py and 'filters' in source.attrs: + if dest_h5py and "filters" in source.attrs: # No filters key in v3 metadata so it was stored in the # attributes instead. We cannot copy this key to # HDF5 attrs, though! source_attrs = source.attrs.asdict().copy() - source_attrs.pop('filters', None) + source_attrs.pop("filters", None) else: source_attrs = source.attrs ds.attrs.update(source_attrs) @@ -986,7 +1018,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, n_copied += 1 else: - log('skip {} {} {}'.format(source.name, source.shape, source.dtype)) + log("skip {} {} {}".format(source.name, source.shape, source.dtype)) n_skipped += 1 elif root or not shallow: @@ -994,21 +1026,20 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, # check if an array is in the way do_copy = True - exists_array = (dest is not None and - name in dest and - hasattr(dest[name], 'shape')) + exists_array = dest is not None and name in dest and hasattr(dest[name], "shape") if exists_array: - if if_exists == 'raise': - raise CopyError('an array {!r} already exists in destination ' - '{!r}'.format(name, dest.name)) - elif if_exists == 'skip': + if if_exists == "raise": + raise CopyError( + "an array {!r} already exists in destination " "{!r}".format(name, dest.name) + ) + elif if_exists == "skip": do_copy = False # take action if do_copy: # log action - log('copy {}'.format(source.name)) + log("copy {}".format(source.name)) if not dry_run: @@ -1035,9 +1066,17 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, # recurse for k in source.keys(): c, s, b = _copy( - log, source[k], grp, name=k, root=False, shallow=shallow, - without_attrs=without_attrs, if_exists=if_exists, - dry_run=dry_run, **create_kws) + log, + source[k], + grp, + name=k, + root=False, + shallow=shallow, + without_attrs=without_attrs, + if_exists=if_exists, + dry_run=dry_run, + **create_kws + ) n_copied += c n_skipped += s n_bytes_copied += b @@ -1045,14 +1084,22 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, n_copied += 1 else: - log('skip {}'.format(source.name)) + log("skip {}".format(source.name)) n_skipped += 1 return n_copied, n_skipped, n_bytes_copied -def copy_all(source, dest, shallow=False, without_attrs=False, log=None, - if_exists='raise', dry_run=False, **create_kws): +def copy_all( + source, + dest, + shallow=False, + without_attrs=False, + log=None, + if_exists="raise", + dry_run=False, + **create_kws +): """Copy all children of the `source` group into the `dest` group. Parameters @@ -1137,16 +1184,24 @@ def copy_all(source, dest, shallow=False, without_attrs=False, log=None, # setup counting variables n_copied = n_skipped = n_bytes_copied = 0 - zarr_version = getattr(source, '_version', 2) + zarr_version = getattr(source, "_version", 2) # setup logging with _LogWriter(log) as log: for k in source.keys(): c, s, b = _copy( - log, source[k], dest, name=k, root=False, shallow=shallow, - without_attrs=without_attrs, if_exists=if_exists, - dry_run=dry_run, **create_kws) + log, + source[k], + dest, + name=k, + root=False, + shallow=shallow, + without_attrs=without_attrs, + if_exists=if_exists, + dry_run=dry_run, + **create_kws + ) n_copied += c n_skipped += s n_bytes_copied += b @@ -1159,7 +1214,7 @@ def copy_all(source, dest, shallow=False, without_attrs=False, log=None, return n_copied, n_skipped, n_bytes_copied -def consolidate_metadata(store: BaseStore, metadata_key=".zmetadata", *, path=''): +def consolidate_metadata(store: BaseStore, metadata_key=".zmetadata", *, path=""): """ Consolidate all metadata for groups and arrays within the given store into a single resource and put it under the given key. @@ -1203,8 +1258,7 @@ def consolidate_metadata(store: BaseStore, metadata_key=".zmetadata", *, path='' if version == 2: def is_zarr_key(key): - return (key.endswith('.zarray') or key.endswith('.zgroup') or - key.endswith('.zattrs')) + return key.endswith(".zarray") or key.endswith(".zgroup") or key.endswith(".zattrs") else: @@ -1213,23 +1267,21 @@ def is_zarr_key(key): sfx = _get_metadata_suffix(store) # type: ignore def is_zarr_key(key): - return (key.endswith('.array' + sfx) or key.endswith('.group' + sfx) or - key == 'zarr.json') + return ( + key.endswith(".array" + sfx) or key.endswith(".group" + sfx) or key == "zarr.json" + ) # cannot create a group without a path in v3 # so create /meta/root/consolidated group to store the metadata - if 'consolidated' not in store: - _create_group(store, path='consolidated') - if not metadata_key.startswith('meta/root/'): - metadata_key = 'meta/root/consolidated/' + metadata_key + if "consolidated" not in store: + _create_group(store, path="consolidated") + if not metadata_key.startswith("meta/root/"): + metadata_key = "meta/root/consolidated/" + metadata_key # path = 'consolidated' out = { - 'zarr_consolidated_format': 1, - 'metadata': { - key: json_loads(store[key]) - for key in store if is_zarr_key(key) - } + "zarr_consolidated_format": 1, + "metadata": {key: json_loads(store[key]) for key in store if is_zarr_key(key)}, } store[metadata_key] = json_dumps(out) return open_consolidated(store, metadata_key=metadata_key, path=path) @@ -1278,26 +1330,26 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** """ # normalize parameters - zarr_version = kwargs.get('zarr_version') - store = normalize_store_arg(store, storage_options=kwargs.get("storage_options"), mode=mode, - zarr_version=zarr_version) - if mode not in {'r', 'r+'}: - raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}" - .format(mode)) - - path = kwargs.pop('path', None) + zarr_version = kwargs.get("zarr_version") + store = normalize_store_arg( + store, storage_options=kwargs.get("storage_options"), mode=mode, zarr_version=zarr_version + ) + if mode not in {"r", "r+"}: + raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}".format(mode)) + + path = kwargs.pop("path", None) if store._store_version == 2: ConsolidatedStoreClass = ConsolidatedMetadataStore else: assert_zarr_v3_api_available() ConsolidatedStoreClass = ConsolidatedMetadataStoreV3 # default is to store within 'consolidated' group on v3 - if not metadata_key.startswith('meta/root/'): - metadata_key = 'meta/root/consolidated/' + metadata_key + if not metadata_key.startswith("meta/root/"): + metadata_key = "meta/root/consolidated/" + metadata_key # setup metadata store meta_store = ConsolidatedStoreClass(store, metadata_key=metadata_key) # pass through - chunk_store = kwargs.pop('chunk_store', None) or store + chunk_store = kwargs.pop("chunk_store", None) or store return open(store=meta_store, chunk_store=chunk_store, mode=mode, path=path, **kwargs) diff --git a/zarr/core.py b/zarr/core.py index 80f424bafc..43ccdbaf7d 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -193,17 +193,16 @@ def __init__( assert_zarr_v3_api_available() if chunk_store is not None: - chunk_store = normalize_store_arg(chunk_store, - zarr_version=zarr_version) + chunk_store = normalize_store_arg(chunk_store, zarr_version=zarr_version) self._store = store self._chunk_store = chunk_store self._transformed_chunk_store = None self._path = normalize_storage_path(path) if self._path: - self._key_prefix = self._path + '/' + self._key_prefix = self._path + "/" else: - self._key_prefix = '' + self._key_prefix = "" self._read_only = bool(read_only) self._synchronizer = synchronizer self._cache_metadata = cache_metadata @@ -216,18 +215,19 @@ def __init__( self._meta_array = np.empty(()) self._version = zarr_version if self._version == 3: - self._data_key_prefix = 'data/root/' + self._key_prefix - self._data_path = 'data/root/' + self._path + self._data_key_prefix = "data/root/" + self._key_prefix + self._data_path = "data/root/" + self._path self._hierarchy_metadata = _get_hierarchy_metadata(store=self._store) - self._metadata_key_suffix = self._hierarchy_metadata['metadata_key_suffix'] + self._metadata_key_suffix = self._hierarchy_metadata["metadata_key_suffix"] # initialize metadata self._load_metadata() # initialize attributes akey = _prefix_to_attrs_key(self._store, self._key_prefix) - self._attrs = Attributes(store, key=akey, read_only=read_only, - synchronizer=synchronizer, cache=cache_attrs) + self._attrs = Attributes( + store, key=akey, read_only=read_only, synchronizer=synchronizer, cache=cache_attrs + ) # initialize info reporter self._info_reporter = InfoReporter(self) @@ -257,13 +257,13 @@ def _load_metadata_nosync(self): # decode and store metadata as instance members meta = self._store._metadata_class.decode_array_metadata(meta_bytes) self._meta = meta - self._shape = meta['shape'] - self._fill_value = meta['fill_value'] - dimension_separator = meta.get('dimension_separator', None) + self._shape = meta["shape"] + self._fill_value = meta["fill_value"] + dimension_separator = meta.get("dimension_separator", None) if self._version == 2: - self._chunks = meta['chunks'] - self._dtype = meta['dtype'] - self._order = meta['order'] + self._chunks = meta["chunks"] + self._dtype = meta["dtype"] + self._order = meta["order"] if dimension_separator is None: try: dimension_separator = self._store._dimension_separator @@ -274,17 +274,17 @@ def _load_metadata_nosync(self): if dimension_separator is None: dimension_separator = "." else: - self._chunks = meta['chunk_grid']['chunk_shape'] - self._dtype = meta['data_type'] - self._order = meta['chunk_memory_layout'] - chunk_separator = meta['chunk_grid']['separator'] + self._chunks = meta["chunk_grid"]["chunk_shape"] + self._dtype = meta["data_type"] + self._order = meta["chunk_memory_layout"] + chunk_separator = meta["chunk_grid"]["separator"] if dimension_separator is None: - dimension_separator = meta.get('dimension_separator', chunk_separator) + dimension_separator = meta.get("dimension_separator", chunk_separator) self._dimension_separator = dimension_separator # setup compressor - compressor = meta.get('compressor', None) + compressor = meta.get("compressor", None) if compressor is None: self._compressor = None elif self._version == 2: @@ -294,17 +294,17 @@ def _load_metadata_nosync(self): # setup filters if self._version == 2: - filters = meta.get('filters', []) + filters = meta.get("filters", []) else: # TODO: storing filters under attributes for now since the v3 # array metadata does not have a 'filters' attribute. - filters = meta['attributes'].get('filters', []) + filters = meta["attributes"].get("filters", []) if filters: filters = [get_codec(config) for config in filters] self._filters = filters if self._version == 3: - storage_transformers = meta.get('storage_transformers', []) + storage_transformers = meta.get("storage_transformers", []) if storage_transformers: transformed_store = self._chunk_store or self._store for storage_transformer in storage_transformers[::-1]: @@ -323,7 +323,7 @@ def _refresh_metadata_nosync(self): def _flush_metadata_nosync(self): if self._is_view: - raise PermissionError('operation not permitted for views') + raise PermissionError("operation not permitted for views") if self._compressor: compressor_config = self._compressor.get_config() @@ -334,20 +334,26 @@ def _flush_metadata_nosync(self): else: filters_config = None _compressor = compressor_config if self._version == 2 else self._compressor - meta = dict(shape=self._shape, compressor=_compressor, - fill_value=self._fill_value, filters=filters_config) - if getattr(self._store, '_store_version', 2) == 2: - meta.update( - dict(chunks=self._chunks, dtype=self._dtype, order=self._order) - ) + meta = dict( + shape=self._shape, + compressor=_compressor, + fill_value=self._fill_value, + filters=filters_config, + ) + if getattr(self._store, "_store_version", 2) == 2: + meta.update(dict(chunks=self._chunks, dtype=self._dtype, order=self._order)) else: meta.update( - dict(chunk_grid=dict(type='regular', - chunk_shape=self._chunks, - separator=self._dimension_separator), - data_type=self._dtype, - chunk_memory_layout=self._order, - attributes=self.attrs.asdict()) + dict( + chunk_grid=dict( + type="regular", + chunk_shape=self._chunks, + separator=self._dimension_separator, + ), + data_type=self._dtype, + chunk_memory_layout=self._order, + attributes=self.attrs.asdict(), + ) ) mkey = _prefix_to_array_key(self._store, self._key_prefix) self._store[mkey] = self._store._metadata_class.encode_array_metadata(meta) @@ -368,8 +374,8 @@ def name(self): if self.path: # follow h5py convention: add leading slash name = self.path - if name[0] != '/': - name = '/' + name + if name[0] != "/": + name = "/" + name return name return None @@ -377,7 +383,7 @@ def name(self): def basename(self): """Final component of name.""" if self.name is not None: - return self.name.split('/')[-1] + return self.name.split("/")[-1] return None @property @@ -513,10 +519,9 @@ def nbytes_stored(self): @property def _cdata_shape(self): if self._shape == (): - return 1, + return (1,) else: - return tuple(math.ceil(s / c) - for s, c in zip(self._shape, self._chunks)) + return tuple(math.ceil(s / c) for s, c in zip(self._shape, self._chunks)) @property def cdata_shape(self): @@ -550,14 +555,14 @@ def nchunks_initialized(self): # return sum(1 for k in members if prog.match(k)) # key pattern for chunk keys - prog = re.compile(self._data_key_prefix + r'c\d+') # TODO: ndim == 0 case? + prog = re.compile(self._data_key_prefix + r"c\d+") # TODO: ndim == 0 case? # get chunk keys, excluding the prefix members = self.chunk_store.list_prefix(self._data_path) # count the chunk keys return sum(1 for k in members if prog.match(k)) else: # key pattern for chunk keys - prog = re.compile(r'\.'.join([r'\d+'] * min(1, self.ndim))) + prog = re.compile(r"\.".join([r"\d+"] * min(1, self.ndim))) # count chunk keys return sum(1 for k in listdir(self.chunk_store, self._path) if prog.match(k)) @@ -605,11 +610,11 @@ def meta_array(self): def __eq__(self, other): return ( - isinstance(other, Array) and - self.store == other.store and - self.read_only == other.read_only and - self.path == other.path and - not self._is_view + isinstance(other, Array) + and self.store == other.store + and self.read_only == other.read_only + and self.path == other.path + and not self._is_view # N.B., no need to compare other properties, should be covered by # store comparison ) @@ -664,10 +669,10 @@ def islice(self, start=None, end=None): end = self.shape[0] if not isinstance(start, int) or start < 0: - raise ValueError('start must be a nonnegative integer') + raise ValueError("start must be a nonnegative integer") if not isinstance(end, int) or end < 0: - raise ValueError('end must be a nonnegative integer') + raise ValueError("end must be a nonnegative integer") # Avoid repeatedly decompressing chunks by iterating over the chunks # in the first dimension. @@ -675,7 +680,7 @@ def islice(self, start=None, end=None): chunk = None for j in range(start, end): if j % chunk_size == 0: - chunk = self[j: j + chunk_size] + chunk = self[j : j + chunk_size] # init chunk if we start offset of chunk borders elif chunk is None: chunk_start = j - j % chunk_size @@ -691,7 +696,7 @@ def __len__(self): return self.shape[0] else: # 0-dimensional array, same error message as numpy - raise TypeError('len() of unsized object') + raise TypeError("len() of unsized object") def __getitem__(self, selection): """Retrieve data for an item or region of the array. @@ -960,11 +965,9 @@ def get_basic_selection(self, selection=Ellipsis, out=None, fields=None): # handle zero-dimensional arrays if self._shape == (): - return self._get_basic_selection_zd(selection=selection, out=out, - fields=fields) + return self._get_basic_selection_zd(selection=selection, out=out, fields=fields) else: - return self._get_basic_selection_nd(selection=selection, out=out, - fields=fields) + return self._get_basic_selection_nd(selection=selection, out=out, fields=fields) def _get_basic_selection_zd(self, selection, out=None, fields=None): # special case basic selection for zero-dimensional array @@ -1371,10 +1374,11 @@ def _get_selection(self, indexer, out=None, fields=None): # setup output array if out is None: - out = np.empty_like(self._meta_array, shape=out_shape, - dtype=out_dtype, order=self._order) + out = np.empty_like( + self._meta_array, shape=out_shape, dtype=out_dtype, order=self._order + ) else: - check_array_shape('out', out, out_shape) + check_array_shape("out", out, out_shape) # iterate over chunks @@ -1382,8 +1386,12 @@ def _get_selection(self, indexer, out=None, fields=None): # allow storage to get multiple items at once lchunk_coords, lchunk_selection, lout_selection = zip(*indexer) self._chunk_getitems( - lchunk_coords, lchunk_selection, out, lout_selection, - drop_axes=indexer.drop_axes, fields=fields + lchunk_coords, + lchunk_selection, + out, + lout_selection, + drop_axes=indexer.drop_axes, + fields=fields, ) if out.shape: return out @@ -1753,7 +1761,7 @@ def set_coordinate_selection(self, selection, value, fields=None): except TypeError: # Handle types like `list` or `tuple` value = np.array(value, like=self._meta_array) - if hasattr(value, 'shape') and len(value.shape) > 1: + if hasattr(value, "shape") and len(value.shape) > 1: value = value.reshape(-1) self._set_selection(indexer, value, fields=fields) @@ -1998,13 +2006,16 @@ def _set_selection(self, indexer, value, fields=None): # setting a scalar value pass else: - if not hasattr(value, 'shape'): + if not hasattr(value, "shape"): value = np.asanyarray(value, like=self._meta_array) - check_array_shape('value', value, sel_shape) + check_array_shape("value", value, sel_shape) # iterate over chunks in range - if not hasattr(self.chunk_store, "setitems") or self._synchronizer is not None \ - or any(map(lambda x: x == 0, self.shape)): + if ( + not hasattr(self.chunk_store, "setitems") + or self._synchronizer is not None + or any(map(lambda x: x == 0, self.shape)) + ): # iterative approach for chunk_coords, chunk_selection, out_selection in indexer: @@ -2044,8 +2055,7 @@ def _set_selection(self, indexer, value, fields=None): cv = chunk_value[item] chunk_values.append(cv) - self._chunk_setitems(lchunk_coords, lchunk_selection, chunk_values, - fields=fields) + self._chunk_setitems(lchunk_coords, lchunk_selection, chunk_values, fields=fields) def _process_chunk( self, @@ -2059,23 +2069,22 @@ def _process_chunk( partial_read_decode=False, ): """Take binary data from storage and fill output array""" - if (out_is_ndarray and - not fields and - is_contiguous_selection(out_selection) and - is_total_slice(chunk_selection, self._chunks) and - not self._filters and - self._dtype != object): + if ( + out_is_ndarray + and not fields + and is_contiguous_selection(out_selection) + and is_total_slice(chunk_selection, self._chunks) + and not self._filters + and self._dtype != object + ): dest = out[out_selection] # Assume that array-like objects that doesn't have a # `writeable` flag is writable. dest_is_writable = getattr(dest, "writeable", True) - write_direct = ( - dest_is_writable and - ( - (self._order == 'C' and dest.flags.c_contiguous) or - (self._order == 'F' and dest.flags.f_contiguous) - ) + write_direct = dest_is_writable and ( + (self._order == "C" and dest.flags.c_contiguous) + or (self._order == "F" and dest.flags.f_contiguous) ) if write_direct: @@ -2104,9 +2113,7 @@ def _process_chunk( index_selection = PartialChunkIterator(chunk_selection, self.chunks) for start, nitems, partial_out_selection in index_selection: expected_shape = [ - len( - range(*partial_out_selection[i].indices(self.chunks[0] + 1)) - ) + len(range(*partial_out_selection[i].indices(self.chunks[0] + 1))) if i < len(partial_out_selection) else dim for i, dim in enumerate(self.chunks) @@ -2143,8 +2150,9 @@ def _process_chunk( # store selected data in output out[out_selection] = tmp - def _chunk_getitems(self, lchunk_coords, lchunk_selection, out, lout_selection, - drop_axes=None, fields=None): + def _chunk_getitems( + self, lchunk_coords, lchunk_selection, out, lout_selection, drop_axes=None, fields=None + ): """Obtain part or whole of chunks. Parameters @@ -2238,8 +2246,10 @@ def _chunk_getitems(self, lchunk_coords, lchunk_selection, out, lout_selection, def _chunk_setitems(self, lchunk_coords, lchunk_selection, values, fields=None): ckeys = map(self._chunk_key, lchunk_coords) - cdatas = {key: self._process_for_setitem(key, sel, val, fields=fields) - for key, sel, val in zip(ckeys, lchunk_selection, values)} + cdatas = { + key: self._process_for_setitem(key, sel, val, fields=fields) + for key, sel, val in zip(ckeys, lchunk_selection, values) + } to_store = {} if not self.write_empty_chunks: empty_chunks = {k: v for k, v in cdatas.items() if all_equal(self.fill_value, v)} @@ -2291,8 +2301,7 @@ def _chunk_setitem(self, chunk_coords, chunk_selection, value, fields=None): lock = self._synchronizer[ckey] with lock: - self._chunk_setitem_nosync(chunk_coords, chunk_selection, value, - fields=fields) + self._chunk_setitem_nosync(chunk_coords, chunk_selection, value, fields=fields) def _chunk_setitem_nosync(self, chunk_coords, chunk_selection, value, fields=None): ckey = self._chunk_key(chunk_coords) @@ -2354,7 +2363,7 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): # decode chunk chunk = self._decode_chunk(cdata) if not chunk.flags.writeable: - chunk = chunk.copy(order='K') + chunk = chunk.copy(order="K") # modify if fields: @@ -2372,8 +2381,12 @@ def _chunk_key(self, chunk_coords): # where P = self._key_prefix, i, j, ... = chunk_coords # e.g. c0/2/3 for 3d array with chunk index (0, 2, 3) # https://zarr-specs.readthedocs.io/en/core-protocol-v3.0-dev/protocol/core/v3.0.html#regular-grids - return ("data/root/" + self._key_prefix + - "c" + self._dimension_separator.join(map(str, chunk_coords))) + return ( + "data/root/" + + self._key_prefix + + "c" + + self._dimension_separator.join(map(str, chunk_coords)) + ) else: return self._key_prefix + self._dimension_separator.join(map(str, chunk_coords)) @@ -2382,8 +2395,7 @@ def _decode_chunk(self, cdata, start=None, nitems=None, expected_shape=None): if self._compressor: # only decode requested items if ( - all(x is not None for x in [start, nitems]) - and self._compressor.codec_id == "blosc" + all(x is not None for x in [start, nitems]) and self._compressor.codec_id == "blosc" ) and hasattr(self._compressor, "decode_partial"): chunk = self._compressor.decode_partial(cdata, start, nitems) else: @@ -2408,10 +2420,10 @@ def _decode_chunk(self, cdata, start=None, nitems=None, expected_shape=None): # codec in the filter chain, i.e., a filter that converts from object # array to something else during encoding, and converts back to object # array during decoding. - raise RuntimeError('cannot read object array without object codec') + raise RuntimeError("cannot read object array without object codec") # ensure correct chunk shape - chunk = chunk.reshape(-1, order='A') + chunk = chunk.reshape(-1, order="A") chunk = chunk.reshape(expected_shape or self._chunks, order=self._order) return chunk @@ -2425,7 +2437,7 @@ def _encode_chunk(self, chunk): # check object encoding if ensure_ndarray_like(chunk).dtype == object: - raise RuntimeError('cannot write object array without object codec') + raise RuntimeError("cannot write object array without object codec") # compress if self._compressor: @@ -2434,24 +2446,21 @@ def _encode_chunk(self, chunk): cdata = chunk # ensure in-memory data is immutable and easy to compare - if ( - isinstance(self.chunk_store, KVStore) - or isinstance(self._chunk_store, KVStore) - ): + if isinstance(self.chunk_store, KVStore) or isinstance(self._chunk_store, KVStore): cdata = ensure_bytes(cdata) return cdata def __repr__(self): t = type(self) - r = '<{}.{}'.format(t.__module__, t.__name__) + r = "<{}.{}".format(t.__module__, t.__name__) if self.name: - r += ' %r' % self.name - r += ' %s' % str(self.shape) - r += ' %s' % self.dtype + r += " %r" % self.name + r += " %s" % str(self.shape) + r += " %s" % self.dtype if self._read_only: - r += ' read-only' - r += '>' + r += " read-only" + r += ">" return r @property @@ -2483,13 +2492,12 @@ def info_items(self): return self._synchronized_op(self._info_items_nosync) def _info_items_nosync(self): - def typestr(o): - return '{}.{}'.format(type(o).__module__, type(o).__name__) + return "{}.{}".format(type(o).__module__, type(o).__name__) def bytestr(n): if n > 2**10: - return '{} ({})'.format(n, human_readable_size(n)) + return "{} ({})".format(n, human_readable_size(n)) else: return str(n) @@ -2497,41 +2505,39 @@ def bytestr(n): # basic info if self.name is not None: - items += [('Name', self.name)] + items += [("Name", self.name)] items += [ - ('Type', typestr(self)), - ('Data type', '%s' % self.dtype), - ('Shape', str(self.shape)), - ('Chunk shape', str(self.chunks)), - ('Order', self.order), - ('Read-only', str(self.read_only)), + ("Type", typestr(self)), + ("Data type", "%s" % self.dtype), + ("Shape", str(self.shape)), + ("Chunk shape", str(self.chunks)), + ("Order", self.order), + ("Read-only", str(self.read_only)), ] # filters if self.filters: for i, f in enumerate(self.filters): - items += [('Filter [%s]' % i, repr(f))] + items += [("Filter [%s]" % i, repr(f))] # compressor - items += [('Compressor', repr(self.compressor))] + items += [("Compressor", repr(self.compressor))] # synchronizer if self._synchronizer is not None: - items += [('Synchronizer type', typestr(self._synchronizer))] + items += [("Synchronizer type", typestr(self._synchronizer))] # storage info - items += [('Store type', typestr(self._store))] + items += [("Store type", typestr(self._store))] if self._chunk_store is not None: - items += [('Chunk store type', typestr(self._chunk_store))] - items += [('No. bytes', bytestr(self.nbytes))] + items += [("Chunk store type", typestr(self._chunk_store))] + items += [("No. bytes", bytestr(self.nbytes))] if self.nbytes_stored > 0: items += [ - ('No. bytes stored', bytestr(self.nbytes_stored)), - ('Storage ratio', '%.1f' % (self.nbytes / self.nbytes_stored)), + ("No. bytes stored", bytestr(self.nbytes_stored)), + ("Storage ratio", "%.1f" % (self.nbytes / self.nbytes_stored)), ] - items += [ - ('Chunks initialized', '{}/{}'.format(self.nchunks_initialized, self.nchunks)) - ] + items += [("Chunks initialized", "{}/{}".format(self.nchunks_initialized, self.nchunks))] return items @@ -2590,7 +2596,7 @@ def hexdigest(self, hashname="sha1"): # This is a bytes object on Python 3 and we want a str. if type(checksum) is not str: - checksum = checksum.decode('utf8') + checksum = checksum.decode("utf8") return checksum @@ -2682,8 +2688,7 @@ def _resize_nosync(self, *args): # determine the new number and arrangement of chunks chunks = self._chunks - new_cdata_shape = tuple(math.ceil(s / c) - for s, c in zip(new_shape, chunks)) + new_cdata_shape = tuple(math.ceil(s / c) for s, c in zip(new_shape, chunks)) # remove any chunks not within range # The idea is that, along each dimension, @@ -2752,18 +2757,18 @@ def append(self, data, axis=0): def _append_nosync(self, data, axis=0): # ensure data is array-like - if not hasattr(data, 'shape'): + if not hasattr(data, "shape"): data = np.asanyarray(data, like=self._meta_array) # ensure shapes are compatible for non-append dimensions - self_shape_preserved = tuple(s for i, s in enumerate(self._shape) - if i != axis) - data_shape_preserved = tuple(s for i, s in enumerate(data.shape) - if i != axis) + self_shape_preserved = tuple(s for i, s in enumerate(self._shape) if i != axis) + data_shape_preserved = tuple(s for i, s in enumerate(data.shape) if i != axis) if self_shape_preserved != data_shape_preserved: - raise ValueError('shape of data to append is not compatible with the array; ' - 'all dimensions must match except for the dimension being ' - 'appended') + raise ValueError( + "shape of data to append is not compatible with the array; " + "all dimensions must match except for the dimension being " + "appended" + ) # remember old shape old_shape = self._shape @@ -2787,9 +2792,16 @@ def _append_nosync(self, data, axis=0): return new_shape - def view(self, shape=None, chunks=None, dtype=None, - fill_value=None, filters=None, read_only=None, - synchronizer=None): + def view( + self, + shape=None, + chunks=None, + dtype=None, + fill_value=None, + filters=None, + read_only=None, + synchronizer=None, + ): """Return an array sharing the same data. Parameters @@ -2904,8 +2916,15 @@ def view(self, shape=None, chunks=None, dtype=None, read_only = self._read_only if synchronizer is None: synchronizer = self._synchronizer - a = Array(store=store, path=path, chunk_store=chunk_store, read_only=read_only, - synchronizer=synchronizer, cache_metadata=True, zarr_version=self._version) + a = Array( + store=store, + path=path, + chunk_store=chunk_store, + read_only=read_only, + synchronizer=synchronizer, + cache_metadata=True, + zarr_version=self._version, + ) a._is_view = True # allow override of some properties diff --git a/zarr/creation.py b/zarr/creation.py index dc8b8a157d..726d0b5932 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -11,18 +11,42 @@ ContainsArrayError, ContainsGroupError, ) -from zarr.storage import (contains_array, contains_group, default_compressor, - init_array, normalize_storage_path, - normalize_store_arg) +from zarr.storage import ( + contains_array, + contains_group, + default_compressor, + init_array, + normalize_storage_path, + normalize_store_arg, +) from zarr.util import normalize_dimension_separator -def create(shape, chunks=True, dtype=None, compressor='default', - fill_value: Optional[int] = 0, order='C', store=None, synchronizer=None, - overwrite=False, path=None, chunk_store=None, filters=None, - cache_metadata=True, cache_attrs=True, read_only=False, - object_codec=None, dimension_separator=None, write_empty_chunks=True, - *, zarr_version=None, meta_array=None, storage_transformers=(), **kwargs): +def create( + shape, + chunks=True, + dtype=None, + compressor="default", + fill_value: Optional[int] = 0, + order="C", + store=None, + synchronizer=None, + overwrite=False, + path=None, + chunk_store=None, + filters=None, + cache_metadata=True, + cache_attrs=True, + read_only=False, + object_codec=None, + dimension_separator=None, + write_empty_chunks=True, + *, + zarr_version=None, + meta_array=None, + storage_transformers=(), + **kwargs, +): """Create an array. Parameters @@ -150,11 +174,11 @@ def create(shape, chunks=True, dtype=None, compressor='default', """ if zarr_version is None and store is None: - zarr_version = getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) + zarr_version = getattr(chunk_store, "_store_version", DEFAULT_ZARR_VERSION) # handle polymorphic store arg store = normalize_store_arg(store, zarr_version=zarr_version, mode="w") - zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) # API compatibility with h5py compressor, fill_value = _kwargs_compat(compressor, fill_value, kwargs) @@ -168,22 +192,43 @@ def create(shape, chunks=True, dtype=None, compressor='default', raise ValueError( f"Specified dimension_separator: {dimension_separator}" f"conflicts with store's separator: " - f"{store_separator}") + f"{store_separator}" + ) dimension_separator = normalize_dimension_separator(dimension_separator) if zarr_version > 2 and path is None: - path = '/' + path = "/" # initialize array metadata - init_array(store, shape=shape, chunks=chunks, dtype=dtype, compressor=compressor, - fill_value=fill_value, order=order, overwrite=overwrite, path=path, - chunk_store=chunk_store, filters=filters, object_codec=object_codec, - dimension_separator=dimension_separator, storage_transformers=storage_transformers) + init_array( + store, + shape=shape, + chunks=chunks, + dtype=dtype, + compressor=compressor, + fill_value=fill_value, + order=order, + overwrite=overwrite, + path=path, + chunk_store=chunk_store, + filters=filters, + object_codec=object_codec, + dimension_separator=dimension_separator, + storage_transformers=storage_transformers, + ) # instantiate array - z = Array(store, path=path, chunk_store=chunk_store, synchronizer=synchronizer, - cache_metadata=cache_metadata, cache_attrs=cache_attrs, read_only=read_only, - write_empty_chunks=write_empty_chunks, meta_array=meta_array) + z = Array( + store, + path=path, + chunk_store=chunk_store, + synchronizer=synchronizer, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + read_only=read_only, + write_empty_chunks=write_empty_chunks, + meta_array=meta_array, + ) return z @@ -193,7 +238,7 @@ def _kwargs_compat(compressor, fill_value, kwargs): # to be compatible with h5py, as well as backwards-compatible with Zarr # 1.x, accept 'compression' and 'compression_opts' keyword arguments - if compressor != 'default': + if compressor != "default": # 'compressor' overrides 'compression' if "compression" in kwargs: warn( @@ -208,14 +253,14 @@ def _kwargs_compat(compressor, fill_value, kwargs): ) del kwargs["compression_opts"] - elif 'compression' in kwargs: - compression = kwargs.pop('compression') - compression_opts = kwargs.pop('compression_opts', None) + elif "compression" in kwargs: + compression = kwargs.pop("compression") + compression_opts = kwargs.pop("compression_opts", None) - if compression is None or compression == 'none': + if compression is None or compression == "none": compressor = None - elif compression == 'default': + elif compression == "default": compressor = default_compressor elif isinstance(compression, str): @@ -233,21 +278,21 @@ def _kwargs_compat(compressor, fill_value, kwargs): compressor = codec_cls(compression_opts) # be lenient here if user gives compressor as 'compression' - elif hasattr(compression, 'get_config'): + elif hasattr(compression, "get_config"): compressor = compression else: - raise ValueError('bad value for compression: %r' % compression) + raise ValueError("bad value for compression: %r" % compression) # handle 'fillvalue' - if 'fillvalue' in kwargs: + if "fillvalue" in kwargs: # to be compatible with h5py, accept 'fillvalue' instead of # 'fill_value' - fill_value = kwargs.pop('fillvalue') + fill_value = kwargs.pop("fillvalue") # ignore other keyword arguments for k in kwargs: - warn('ignoring keyword argument %r' % k) + warn("ignoring keyword argument %r" % k) return compressor, fill_value @@ -334,16 +379,13 @@ def _get_shape_chunks(a): shape = None chunks = None - if hasattr(a, 'shape') and \ - isinstance(a.shape, tuple): + if hasattr(a, "shape") and isinstance(a.shape, tuple): shape = a.shape - if hasattr(a, 'chunks') and \ - isinstance(a.chunks, tuple) and \ - (len(a.chunks) == len(a.shape)): + if hasattr(a, "chunks") and isinstance(a.chunks, tuple) and (len(a.chunks) == len(a.shape)): chunks = a.chunks - elif hasattr(a, 'chunklen'): + elif hasattr(a, "chunklen"): # bcolz carray chunks = (a.chunklen,) + a.shape[1:] @@ -368,27 +410,27 @@ def array(data, **kwargs): """ # ensure data is array-like - if not hasattr(data, 'shape') or not hasattr(data, 'dtype'): + if not hasattr(data, "shape") or not hasattr(data, "dtype"): data = np.asanyarray(data) # setup dtype - kw_dtype = kwargs.get('dtype') + kw_dtype = kwargs.get("dtype") if kw_dtype is None: - kwargs['dtype'] = data.dtype + kwargs["dtype"] = data.dtype else: - kwargs['dtype'] = kw_dtype + kwargs["dtype"] = kw_dtype # setup shape and chunks data_shape, data_chunks = _get_shape_chunks(data) - kwargs['shape'] = data_shape - kw_chunks = kwargs.get('chunks') + kwargs["shape"] = data_shape + kw_chunks = kwargs.get("chunks") if kw_chunks is None: - kwargs['chunks'] = data_chunks + kwargs["chunks"] = data_chunks else: - kwargs['chunks'] = kw_chunks + kwargs["chunks"] = kw_chunks # pop read-only to apply after storing the data - read_only = kwargs.pop('read_only', False) + read_only = kwargs.pop("read_only", False) # instantiate array z = create(**kwargs) @@ -425,7 +467,7 @@ def open_array( zarr_version=None, dimension_separator=None, meta_array=None, - **kwargs + **kwargs, ): """Open an array using file-mode-like semantics. @@ -539,27 +581,27 @@ def open_array( # a : read/write if exists, create otherwise (default) if zarr_version is None and store is None: - zarr_version = getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) + zarr_version = getattr(chunk_store, "_store_version", DEFAULT_ZARR_VERSION) # handle polymorphic store arg - store = normalize_store_arg(store, storage_options=storage_options, - mode=mode, zarr_version=zarr_version) - zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + store = normalize_store_arg( + store, storage_options=storage_options, mode=mode, zarr_version=zarr_version + ) + zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) if chunk_store is not None: - chunk_store = normalize_store_arg(chunk_store, - storage_options=storage_options, - mode=mode, - zarr_version=zarr_version) + chunk_store = normalize_store_arg( + chunk_store, storage_options=storage_options, mode=mode, zarr_version=zarr_version + ) # respect the dimension separator specified in a store, if present if dimension_separator is None: - if hasattr(store, '_dimension_separator'): + if hasattr(store, "_dimension_separator"): dimension_separator = store._dimension_separator else: - dimension_separator = '.' if zarr_version == 2 else '/' + dimension_separator = "." if zarr_version == 2 else "/" if zarr_version == 3 and path is None: - path = 'array' # TODO: raise ValueError instead? + path = "array" # TODO: raise ValueError instead? path = normalize_storage_path(path) @@ -572,48 +614,84 @@ def open_array( # ensure store is initialized - if mode in ['r', 'r+']: + if mode in ["r", "r+"]: if not contains_array(store, path=path): if contains_group(store, path=path): raise ContainsGroupError(path) raise ArrayNotFoundError(path) - elif mode == 'w': - init_array(store, shape=shape, chunks=chunks, dtype=dtype, - compressor=compressor, fill_value=fill_value, - order=order, filters=filters, overwrite=True, path=path, - object_codec=object_codec, chunk_store=chunk_store, - dimension_separator=dimension_separator) - - elif mode == 'a': + elif mode == "w": + init_array( + store, + shape=shape, + chunks=chunks, + dtype=dtype, + compressor=compressor, + fill_value=fill_value, + order=order, + filters=filters, + overwrite=True, + path=path, + object_codec=object_codec, + chunk_store=chunk_store, + dimension_separator=dimension_separator, + ) + + elif mode == "a": if not contains_array(store, path=path): if contains_group(store, path=path): raise ContainsGroupError(path) - init_array(store, shape=shape, chunks=chunks, dtype=dtype, - compressor=compressor, fill_value=fill_value, - order=order, filters=filters, path=path, - object_codec=object_codec, chunk_store=chunk_store, - dimension_separator=dimension_separator) + init_array( + store, + shape=shape, + chunks=chunks, + dtype=dtype, + compressor=compressor, + fill_value=fill_value, + order=order, + filters=filters, + path=path, + object_codec=object_codec, + chunk_store=chunk_store, + dimension_separator=dimension_separator, + ) - elif mode in ['w-', 'x']: + elif mode in ["w-", "x"]: if contains_group(store, path=path): raise ContainsGroupError(path) elif contains_array(store, path=path): raise ContainsArrayError(path) else: - init_array(store, shape=shape, chunks=chunks, dtype=dtype, - compressor=compressor, fill_value=fill_value, - order=order, filters=filters, path=path, - object_codec=object_codec, chunk_store=chunk_store, - dimension_separator=dimension_separator) + init_array( + store, + shape=shape, + chunks=chunks, + dtype=dtype, + compressor=compressor, + fill_value=fill_value, + order=order, + filters=filters, + path=path, + object_codec=object_codec, + chunk_store=chunk_store, + dimension_separator=dimension_separator, + ) # determine read only status - read_only = mode == 'r' + read_only = mode == "r" # instantiate array - z = Array(store, read_only=read_only, synchronizer=synchronizer, - cache_metadata=cache_metadata, cache_attrs=cache_attrs, path=path, - chunk_store=chunk_store, write_empty_chunks=write_empty_chunks, meta_array=meta_array) + z = Array( + store, + read_only=read_only, + synchronizer=synchronizer, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + path=path, + chunk_store=chunk_store, + write_empty_chunks=write_empty_chunks, + meta_array=meta_array, + ) return z @@ -622,21 +700,21 @@ def _like_args(a, kwargs): shape, chunks = _get_shape_chunks(a) if shape is not None: - kwargs.setdefault('shape', shape) + kwargs.setdefault("shape", shape) if chunks is not None: - kwargs.setdefault('chunks', chunks) + kwargs.setdefault("chunks", chunks) - if hasattr(a, 'dtype'): - kwargs.setdefault('dtype', a.dtype) + if hasattr(a, "dtype"): + kwargs.setdefault("dtype", a.dtype) if isinstance(a, Array): - kwargs.setdefault('compressor', a.compressor) - kwargs.setdefault('order', a.order) - kwargs.setdefault('filters', a.filters) - kwargs.setdefault('zarr_version', a._version) + kwargs.setdefault("compressor", a.compressor) + kwargs.setdefault("order", a.order) + kwargs.setdefault("filters", a.filters) + kwargs.setdefault("zarr_version", a._version) else: - kwargs.setdefault('compressor', 'default') - kwargs.setdefault('order', 'C') + kwargs.setdefault("compressor", "default") + kwargs.setdefault("order", "C") def empty_like(a, **kwargs): @@ -661,7 +739,7 @@ def full_like(a, **kwargs): """Create a filled array like `a`.""" _like_args(a, kwargs) if isinstance(a, Array): - kwargs.setdefault('fill_value', a.fill_value) + kwargs.setdefault("fill_value", a.fill_value) return full(**kwargs) @@ -669,5 +747,5 @@ def open_like(a, path, **kwargs): """Open a persistent array like `a`.""" _like_args(a, kwargs) if isinstance(a, Array): - kwargs.setdefault('fill_value', a.fill_value) + kwargs.setdefault("fill_value", a.fill_value) return open_array(path, **kwargs) diff --git a/zarr/errors.py b/zarr/errors.py index 808cbe99a4..30c9b13d39 100644 --- a/zarr/errors.py +++ b/zarr/errors.py @@ -67,8 +67,9 @@ def __init__(self): def err_too_many_indices(selection, shape): - raise IndexError('too many indices for array; expected {}, got {}' - .format(len(shape), len(selection))) + raise IndexError( + "too many indices for array; expected {}, got {}".format(len(shape), len(selection)) + ) class VindexInvalidSelectionError(_BaseZarrIndexError): diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 18e7ac7863..c7cc5c6fe2 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -3,12 +3,27 @@ import numpy as np -from zarr._storage.store import (_get_metadata_suffix, data_root, meta_root, - DEFAULT_ZARR_VERSION, assert_zarr_v3_api_available) +from zarr._storage.store import ( + _get_metadata_suffix, + data_root, + meta_root, + DEFAULT_ZARR_VERSION, + assert_zarr_v3_api_available, +) from zarr.attrs import Attributes from zarr.core import Array -from zarr.creation import (array, create, empty, empty_like, full, full_like, - ones, ones_like, zeros, zeros_like) +from zarr.creation import ( + array, + create, + empty, + empty_like, + full, + full_like, + ones, + ones_like, + zeros, + zeros_like, +) from zarr.errors import ( ContainsArrayError, ContainsGroupError, @@ -120,12 +135,21 @@ class Group(MutableMapping): """ - def __init__(self, store, path=None, read_only=False, chunk_store=None, - cache_attrs=True, synchronizer=None, zarr_version=None, *, - meta_array=None): + def __init__( + self, + store, + path=None, + read_only=False, + chunk_store=None, + cache_attrs=True, + synchronizer=None, + zarr_version=None, + *, + meta_array=None + ): store: BaseStore = _normalize_store_arg(store, zarr_version=zarr_version) if zarr_version is None: - zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) if zarr_version != 2: assert_zarr_v3_api_available() @@ -136,9 +160,9 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None, self._chunk_store = chunk_store self._path = normalize_storage_path(path) if self._path: - self._key_prefix = self._path + '/' + self._key_prefix = self._path + "/" else: - self._key_prefix = '' + self._key_prefix = "" self._read_only = read_only self._synchronizer = synchronizer if meta_array is not None: @@ -182,8 +206,9 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None, # Note: mkey doesn't actually exist for implicit groups, but the # object can still be created. akey = mkey - self._attrs = Attributes(store, key=akey, read_only=read_only, - cache=cache_attrs, synchronizer=synchronizer) + self._attrs = Attributes( + store, key=akey, read_only=read_only, cache=cache_attrs, synchronizer=synchronizer + ) # setup info self._info = InfoReporter(self) @@ -204,15 +229,15 @@ def name(self): if self._path: # follow h5py convention: add leading slash name = self._path - if name[0] != '/': - name = '/' + name + if name[0] != "/": + name = "/" + name return name - return '/' + return "/" @property def basename(self): """Final component of name.""" - return self.name.split('/')[-1] + return self.name.split("/")[-1] @property def read_only(self): @@ -252,10 +277,10 @@ def meta_array(self): def __eq__(self, other): return ( - isinstance(other, Group) and - self._store == other.store and - self._read_only == other.read_only and - self._path == other.path + isinstance(other, Group) + and self._store == other.store + and self._read_only == other.read_only + and self._path == other.path # N.B., no need to compare attributes, should be covered by # store comparison ) @@ -279,11 +304,10 @@ def __iter__(self): quux """ - if getattr(self._store, '_store_version', 2) == 2: + if getattr(self._store, "_store_version", 2) == 2: for key in sorted(listdir(self._store, self._path)): path = self._key_prefix + key - if (contains_array(self._store, path) or - contains_group(self._store, path)): + if contains_array(self._store, path) or contains_group(self._store, path): yield key else: # TODO: Should this iterate over data folders and/or metadata @@ -296,15 +320,15 @@ def __iter__(self): # yield any groups or arrays sfx = self._metadata_key_suffix for key in keys: - len_suffix = len('.group') + len(sfx) # same for .array - if key.endswith(('.group' + sfx, '.array' + sfx)): + len_suffix = len(".group") + len(sfx) # same for .array + if key.endswith((".group" + sfx, ".array" + sfx)): yield key[name_start:-len_suffix] # also yield any implicit groups for prefix in prefixes: - prefix = prefix.rstrip('/') + prefix = prefix.rstrip("/") # only implicit if there is no .group.sfx file - if not prefix + '.group' + sfx in self._store: + if prefix + ".group" + sfx not in self._store: yield prefix[name_start:] # Note: omit data/root/ to avoid duplicate listings @@ -316,12 +340,12 @@ def __len__(self): def __repr__(self): t = type(self) - r = '<{}.{}'.format(t.__module__, t.__name__) + r = "<{}.{}".format(t.__module__, t.__name__) if self.name: - r += ' %r' % self.name + r += " %r" % self.name if self._read_only: - r += ' read-only' - r += '>' + r += " read-only" + r += ">" return r def __enter__(self): @@ -333,39 +357,38 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.store.close() def info_items(self): - def typestr(o): - return '{}.{}'.format(type(o).__module__, type(o).__name__) + return "{}.{}".format(type(o).__module__, type(o).__name__) items = [] # basic info if self.name is not None: - items += [('Name', self.name)] + items += [("Name", self.name)] items += [ - ('Type', typestr(self)), - ('Read-only', str(self.read_only)), + ("Type", typestr(self)), + ("Read-only", str(self.read_only)), ] # synchronizer if self._synchronizer is not None: - items += [('Synchronizer type', typestr(self._synchronizer))] + items += [("Synchronizer type", typestr(self._synchronizer))] # storage info - items += [('Store type', typestr(self._store))] + items += [("Store type", typestr(self._store))] if self._chunk_store is not None: - items += [('Chunk store type', typestr(self._chunk_store))] + items += [("Chunk store type", typestr(self._chunk_store))] # members - items += [('No. members', len(self))] + items += [("No. members", len(self))] array_keys = sorted(self.array_keys()) group_keys = sorted(self.group_keys()) - items += [('No. arrays', len(array_keys))] - items += [('No. groups', len(group_keys))] + items += [("No. arrays", len(array_keys))] + items += [("No. groups", len(group_keys))] if array_keys: - items += [('Arrays', ', '.join(array_keys))] + items += [("Arrays", ", ".join(array_keys))] if group_keys: - items += [('Groups', ', '.join(group_keys))] + items += [("Groups", ", ".join(group_keys))] return items @@ -385,7 +408,7 @@ def __setstate__(self, state): self.__init__(**state) def _item_path(self, item): - absolute = isinstance(item, str) and item and item[0] == '/' + absolute = isinstance(item, str) and item and item[0] == "/" path = normalize_storage_path(item) if not absolute and self._path: path = self._key_prefix + path @@ -409,8 +432,9 @@ def __contains__(self, item): """ path = self._item_path(item) - return contains_array(self._store, path) or \ - contains_group(self._store, path, explicit_only=False) + return contains_array(self._store, path) or contains_group( + self._store, path, explicit_only=False + ) def __getitem__(self, item): """Obtain a group member. @@ -435,23 +459,41 @@ def __getitem__(self, item): """ path = self._item_path(item) if contains_array(self._store, path): - return Array(self._store, read_only=self._read_only, path=path, - chunk_store=self._chunk_store, - synchronizer=self._synchronizer, cache_attrs=self.attrs.cache, - zarr_version=self._version, meta_array=self._meta_array) + return Array( + self._store, + read_only=self._read_only, + path=path, + chunk_store=self._chunk_store, + synchronizer=self._synchronizer, + cache_attrs=self.attrs.cache, + zarr_version=self._version, + meta_array=self._meta_array, + ) elif contains_group(self._store, path, explicit_only=True): - return Group(self._store, read_only=self._read_only, path=path, - chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, zarr_version=self._version, - meta_array=self._meta_array) + return Group( + self._store, + read_only=self._read_only, + path=path, + chunk_store=self._chunk_store, + cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, + zarr_version=self._version, + meta_array=self._meta_array, + ) elif self._version == 3: - implicit_group = meta_root + path + '/' + implicit_group = meta_root + path + "/" # non-empty folder in the metadata path implies an implicit group if self._store.list_prefix(implicit_group): - return Group(self._store, read_only=self._read_only, path=path, - chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, zarr_version=self._version, - meta_array=self._meta_array) + return Group( + self._store, + read_only=self._read_only, + path=path, + chunk_store=self._chunk_store, + cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, + zarr_version=self._version, + meta_array=self._meta_array, + ) else: raise KeyError(item) else: @@ -465,8 +507,9 @@ def __delitem__(self, item): def _delitem_nosync(self, item): path = self._item_path(item) - if contains_array(self._store, path) or \ - contains_group(self._store, path, explicit_only=False): + if contains_array(self._store, path) or contains_group( + self._store, path, explicit_only=False + ): rmdir(self._store, path) else: raise KeyError(item) @@ -510,13 +553,13 @@ def group_keys(self): yield key else: dir_name = meta_root + self._path - group_sfx = '.group' + self._metadata_key_suffix + group_sfx = ".group" + self._metadata_key_suffix # The fact that we call sorted means this can't be a streaming generator. # The keys are already in memory. all_keys = sorted(listdir(self._store, dir_name)) for key in all_keys: if key.endswith(group_sfx): - key = key[:-len(group_sfx)] + key = key[: -len(group_sfx)] if key in all_keys: # otherwise we will double count this group continue @@ -555,7 +598,8 @@ def groups(self): chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, synchronizer=self._synchronizer, - zarr_version=self._version) + zarr_version=self._version, + ) else: for key in self.group_keys(): @@ -567,7 +611,8 @@ def groups(self): chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, synchronizer=self._synchronizer, - zarr_version=self._version) + zarr_version=self._version, + ) def array_keys(self, recurse=False): """Return an iterator over member names for arrays only. @@ -591,9 +636,7 @@ def array_keys(self, recurse=False): ['baz', 'quux'] """ - return self._array_iter(keys_only=True, - method='array_keys', - recurse=recurse) + return self._array_iter(keys_only=True, method="array_keys", recurse=recurse) def arrays(self, recurse=False): """Return an iterator over (name, value) pairs for arrays only. @@ -619,9 +662,7 @@ def arrays(self, recurse=False): quux """ - return self._array_iter(keys_only=False, - method='arrays', - recurse=recurse) + return self._array_iter(keys_only=False, method="arrays", recurse=recurse) def _array_iter(self, keys_only, method, recurse): if self._version == 2: @@ -635,12 +676,12 @@ def _array_iter(self, keys_only, method, recurse): yield from getattr(group, method)(recurse=recurse) else: dir_name = meta_root + self._path - array_sfx = '.array' + self._metadata_key_suffix - group_sfx = '.group' + self._metadata_key_suffix + array_sfx = ".array" + self._metadata_key_suffix + group_sfx = ".group" + self._metadata_key_suffix for key in sorted(listdir(self._store, dir_name)): if key.endswith(array_sfx): - key = key[:-len(array_sfx)] + key = key[: -len(array_sfx)] _key = key.rstrip("/") yield _key if keys_only else (_key, self[key]) @@ -794,8 +835,7 @@ def visit(self, func): return self.visitvalues(lambda o: func(o.name[base_len:].lstrip("/"))) def visitkeys(self, func): - """An alias for :py:meth:`~Group.visit`. - """ + """An alias for :py:meth:`~Group.visit`.""" return self.visit(func) @@ -924,12 +964,17 @@ def _create_group_nosync(self, name, overwrite=False): path = self._item_path(name) # create terminal group - init_group(self._store, path=path, chunk_store=self._chunk_store, - overwrite=overwrite) - - return Group(self._store, path=path, read_only=self._read_only, - chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, zarr_version=self._version) + init_group(self._store, path=path, chunk_store=self._chunk_store, overwrite=overwrite) + + return Group( + self._store, + path=path, + read_only=self._read_only, + chunk_store=self._chunk_store, + cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, + zarr_version=self._version, + ) def create_groups(self, *names, **kwargs): """Convenience method to create multiple groups in a single call.""" @@ -960,20 +1005,26 @@ def require_group(self, name, overwrite=False): """ - return self._write_op(self._require_group_nosync, name, - overwrite=overwrite) + return self._write_op(self._require_group_nosync, name, overwrite=overwrite) def _require_group_nosync(self, name, overwrite=False): path = self._item_path(name) # create terminal group if necessary if not contains_group(self._store, path): - init_group(store=self._store, path=path, chunk_store=self._chunk_store, - overwrite=overwrite) + init_group( + store=self._store, path=path, chunk_store=self._chunk_store, overwrite=overwrite + ) - return Group(self._store, path=path, read_only=self._read_only, - chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, zarr_version=self._version) + return Group( + self._store, + path=path, + read_only=self._read_only, + chunk_store=self._chunk_store, + cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, + zarr_version=self._version, + ) def require_groups(self, *names): """Convenience method to require multiple groups in a single call.""" @@ -1048,17 +1099,15 @@ def _create_dataset_nosync(self, name, data=None, **kwargs): path = self._item_path(name) # determine synchronizer - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) # create array if data is None: - a = create(store=self._store, path=path, chunk_store=self._chunk_store, - **kwargs) + a = create(store=self._store, path=path, chunk_store=self._chunk_store, **kwargs) else: - a = array(data, store=self._store, path=path, chunk_store=self._chunk_store, - **kwargs) + a = array(data, store=self._store, path=path, chunk_store=self._chunk_store, **kwargs) return a @@ -1084,11 +1133,11 @@ def require_dataset(self, name, shape, dtype=None, exact=False, **kwargs): """ - return self._write_op(self._require_dataset_nosync, name, shape=shape, - dtype=dtype, exact=exact, **kwargs) + return self._write_op( + self._require_dataset_nosync, name, shape=shape, dtype=dtype, exact=exact, **kwargs + ) - def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, - **kwargs): + def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, **kwargs): path = self._item_path(name) @@ -1096,31 +1145,37 @@ def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, # array already exists at path, validate that it is the right shape and type - synchronizer = kwargs.get('synchronizer', self._synchronizer) - cache_metadata = kwargs.get('cache_metadata', True) - cache_attrs = kwargs.get('cache_attrs', self.attrs.cache) - a = Array(self._store, path=path, read_only=self._read_only, - chunk_store=self._chunk_store, synchronizer=synchronizer, - cache_metadata=cache_metadata, cache_attrs=cache_attrs, - meta_array=self._meta_array) + synchronizer = kwargs.get("synchronizer", self._synchronizer) + cache_metadata = kwargs.get("cache_metadata", True) + cache_attrs = kwargs.get("cache_attrs", self.attrs.cache) + a = Array( + self._store, + path=path, + read_only=self._read_only, + chunk_store=self._chunk_store, + synchronizer=synchronizer, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + meta_array=self._meta_array, + ) shape = normalize_shape(shape) if shape != a.shape: - raise TypeError('shape do not match existing array; expected {}, got {}' - .format(a.shape, shape)) + raise TypeError( + "shape do not match existing array; expected {}, got {}".format(a.shape, shape) + ) dtype = np.dtype(dtype) if exact: if dtype != a.dtype: - raise TypeError('dtypes do not match exactly; expected {}, got {}' - .format(a.dtype, dtype)) + raise TypeError( + "dtypes do not match exactly; expected {}, got {}".format(a.dtype, dtype) + ) else: if not np.can_cast(dtype, a.dtype): - raise TypeError('dtypes ({}, {}) cannot be safely cast' - .format(dtype, a.dtype)) + raise TypeError("dtypes ({}, {}) cannot be safely cast".format(dtype, a.dtype)) return a else: - return self._create_dataset_nosync(name, shape=shape, dtype=dtype, - **kwargs) + return self._create_dataset_nosync(name, shape=shape, dtype=dtype, **kwargs) def create(self, name, **kwargs): """Create an array. Keyword arguments as per @@ -1129,10 +1184,9 @@ def create(self, name, **kwargs): def _create_nosync(self, name, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return create(store=self._store, path=path, chunk_store=self._chunk_store, - **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return create(store=self._store, path=path, chunk_store=self._chunk_store, **kwargs) def empty(self, name, **kwargs): """Create an array. Keyword arguments as per @@ -1141,10 +1195,9 @@ def empty(self, name, **kwargs): def _empty_nosync(self, name, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return empty(store=self._store, path=path, chunk_store=self._chunk_store, - **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return empty(store=self._store, path=path, chunk_store=self._chunk_store, **kwargs) def zeros(self, name, **kwargs): """Create an array. Keyword arguments as per @@ -1153,10 +1206,9 @@ def zeros(self, name, **kwargs): def _zeros_nosync(self, name, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return zeros(store=self._store, path=path, chunk_store=self._chunk_store, - **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return zeros(store=self._store, path=path, chunk_store=self._chunk_store, **kwargs) def ones(self, name, **kwargs): """Create an array. Keyword arguments as per @@ -1165,8 +1217,8 @@ def ones(self, name, **kwargs): def _ones_nosync(self, name, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) return ones(store=self._store, path=path, chunk_store=self._chunk_store, **kwargs) def full(self, name, fill_value, **kwargs): @@ -1176,10 +1228,15 @@ def full(self, name, fill_value, **kwargs): def _full_nosync(self, name, fill_value, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return full(store=self._store, path=path, chunk_store=self._chunk_store, - fill_value=fill_value, **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return full( + store=self._store, + path=path, + chunk_store=self._chunk_store, + fill_value=fill_value, + **kwargs + ) def array(self, name, data, **kwargs): """Create an array. Keyword arguments as per @@ -1188,10 +1245,9 @@ def array(self, name, data, **kwargs): def _array_nosync(self, name, data, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return array(data, store=self._store, path=path, chunk_store=self._chunk_store, - **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return array(data, store=self._store, path=path, chunk_store=self._chunk_store, **kwargs) def empty_like(self, name, data, **kwargs): """Create an array. Keyword arguments as per @@ -1200,10 +1256,11 @@ def empty_like(self, name, data, **kwargs): def _empty_like_nosync(self, name, data, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return empty_like(data, store=self._store, path=path, - chunk_store=self._chunk_store, **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return empty_like( + data, store=self._store, path=path, chunk_store=self._chunk_store, **kwargs + ) def zeros_like(self, name, data, **kwargs): """Create an array. Keyword arguments as per @@ -1212,10 +1269,11 @@ def zeros_like(self, name, data, **kwargs): def _zeros_like_nosync(self, name, data, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return zeros_like(data, store=self._store, path=path, - chunk_store=self._chunk_store, **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return zeros_like( + data, store=self._store, path=path, chunk_store=self._chunk_store, **kwargs + ) def ones_like(self, name, data, **kwargs): """Create an array. Keyword arguments as per @@ -1224,10 +1282,11 @@ def ones_like(self, name, data, **kwargs): def _ones_like_nosync(self, name, data, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return ones_like(data, store=self._store, path=path, - chunk_store=self._chunk_store, **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return ones_like( + data, store=self._store, path=path, chunk_store=self._chunk_store, **kwargs + ) def full_like(self, name, data, **kwargs): """Create an array. Keyword arguments as per @@ -1236,10 +1295,11 @@ def full_like(self, name, data, **kwargs): def _full_like_nosync(self, name, data, **kwargs): path = self._item_path(name) - kwargs.setdefault('synchronizer', self._synchronizer) - kwargs.setdefault('cache_attrs', self.attrs.cache) - return full_like(data, store=self._store, path=path, - chunk_store=self._chunk_store, **kwargs) + kwargs.setdefault("synchronizer", self._synchronizer) + kwargs.setdefault("cache_attrs", self.attrs.cache) + return full_like( + data, store=self._store, path=path, chunk_store=self._chunk_store, **kwargs + ) def _move_nosync(self, path, new_path): rename(self._store, path, new_path) @@ -1261,11 +1321,14 @@ def move(self, source, dest): dest = self._item_path(dest) # Check that source exists. - if not (contains_array(self._store, source) or - contains_group(self._store, source, explicit_only=False)): + if not ( + contains_array(self._store, source) + or contains_group(self._store, source, explicit_only=False) + ): raise ValueError('The source, "%s", does not exist.' % source) - if (contains_array(self._store, dest) or - contains_group(self._store, dest, explicit_only=False)): + if contains_array(self._store, dest) or contains_group( + self._store, dest, explicit_only=False + ): raise ValueError('The dest, "%s", already exists.' % dest) # Ensure groups needed for `dest` exist. @@ -1275,23 +1338,30 @@ def move(self, source, dest): self._write_op(self._move_nosync, source, dest) -def _normalize_store_arg(store, *, storage_options=None, mode="r", - zarr_version=None): +def _normalize_store_arg(store, *, storage_options=None, mode="r", zarr_version=None): if zarr_version is None: - zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) if zarr_version != 2: assert_zarr_v3_api_available() if store is None: return MemoryStore() if zarr_version == 2 else MemoryStoreV3() - return normalize_store_arg(store, - storage_options=storage_options, mode=mode, - zarr_version=zarr_version) - - -def group(store=None, overwrite=False, chunk_store=None, - cache_attrs=True, synchronizer=None, path=None, *, zarr_version=None): + return normalize_store_arg( + store, storage_options=storage_options, mode=mode, zarr_version=zarr_version + ) + + +def group( + store=None, + overwrite=False, + chunk_store=None, + cache_attrs=True, + synchronizer=None, + path=None, + *, + zarr_version=None +): """Create a group. Parameters @@ -1336,9 +1406,9 @@ def group(store=None, overwrite=False, chunk_store=None, """ # handle polymorphic store arg - store = _normalize_store_arg(store, zarr_version=zarr_version, mode='w') + store = _normalize_store_arg(store, zarr_version=zarr_version, mode="w") if zarr_version is None: - zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) if zarr_version != 2: assert_zarr_v3_api_available() @@ -1352,16 +1422,31 @@ def group(store=None, overwrite=False, chunk_store=None, requires_init = overwrite or not contains_group(store, path) if requires_init: - init_group(store, overwrite=overwrite, chunk_store=chunk_store, - path=path) - - return Group(store, read_only=False, chunk_store=chunk_store, - cache_attrs=cache_attrs, synchronizer=synchronizer, path=path, - zarr_version=zarr_version) - - -def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=None, - chunk_store=None, storage_options=None, *, zarr_version=None, meta_array=None): + init_group(store, overwrite=overwrite, chunk_store=chunk_store, path=path) + + return Group( + store, + read_only=False, + chunk_store=chunk_store, + cache_attrs=cache_attrs, + synchronizer=synchronizer, + path=path, + zarr_version=zarr_version, + ) + + +def open_group( + store=None, + mode="a", + cache_attrs=True, + synchronizer=None, + path=None, + chunk_store=None, + storage_options=None, + *, + zarr_version=None, + meta_array=None +): """Open a group using file-mode-like semantics. Parameters @@ -1414,44 +1499,41 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N # handle polymorphic store arg store = _normalize_store_arg( - store, storage_options=storage_options, mode=mode, - zarr_version=zarr_version) + store, storage_options=storage_options, mode=mode, zarr_version=zarr_version + ) if zarr_version is None: - zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) + zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) if zarr_version != 2: assert_zarr_v3_api_available() if chunk_store is not None: - chunk_store = _normalize_store_arg(chunk_store, - storage_options=storage_options, - mode=mode, - zarr_version=zarr_version) - if getattr(chunk_store, '_store_version', DEFAULT_ZARR_VERSION) != zarr_version: - raise ValueError( # pragma: no cover - "zarr_version of store and chunk_store must match" - ) + chunk_store = _normalize_store_arg( + chunk_store, storage_options=storage_options, mode=mode, zarr_version=zarr_version + ) + if getattr(chunk_store, "_store_version", DEFAULT_ZARR_VERSION) != zarr_version: + raise ValueError("zarr_version of store and chunk_store must match") # pragma: no cover path = normalize_storage_path(path) # ensure store is initialized - if mode in ['r', 'r+']: + if mode in ["r", "r+"]: if not contains_group(store, path=path): if contains_array(store, path=path): raise ContainsArrayError(path) raise GroupNotFoundError(path) - elif mode == 'w': + elif mode == "w": init_group(store, overwrite=True, path=path, chunk_store=chunk_store) - elif mode == 'a': + elif mode == "a": if not contains_group(store, path=path): if contains_array(store, path=path): raise ContainsArrayError(path) init_group(store, path=path, chunk_store=chunk_store) - elif mode in ['w-', 'x']: + elif mode in ["w-", "x"]: if contains_array(store, path=path): raise ContainsArrayError(path) elif contains_group(store, path=path): @@ -1460,8 +1542,15 @@ def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=N init_group(store, path=path, chunk_store=chunk_store) # determine read only status - read_only = mode == 'r' - - return Group(store, read_only=read_only, cache_attrs=cache_attrs, - synchronizer=synchronizer, path=path, chunk_store=chunk_store, - zarr_version=zarr_version, meta_array=meta_array) + read_only = mode == "r" + + return Group( + store, + read_only=read_only, + cache_attrs=cache_attrs, + synchronizer=synchronizer, + path=path, + chunk_store=chunk_store, + zarr_version=zarr_version, + meta_array=meta_array, + ) diff --git a/zarr/indexing.py b/zarr/indexing.py index bc2afba992..487cc8b9d9 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -34,17 +34,14 @@ def is_integer_list(x): def is_integer_array(x, ndim=None): - t = not np.isscalar(x) and \ - hasattr(x, 'shape') and \ - hasattr(x, 'dtype') and \ - x.dtype.kind in 'ui' + t = not np.isscalar(x) and hasattr(x, "shape") and hasattr(x, "dtype") and x.dtype.kind in "ui" if ndim is not None: t = t and len(x.shape) == ndim return t def is_bool_array(x, ndim=None): - t = hasattr(x, 'shape') and hasattr(x, 'dtype') and x.dtype == bool + t = hasattr(x, "shape") and hasattr(x, "dtype") and x.dtype == bool if ndim is not None: t = t and len(x.shape) == ndim return t @@ -80,24 +77,15 @@ def is_pure_fancy_indexing(selection, ndim): no_slicing = ( isinstance(selection, tuple) and len(selection) == ndim - and not ( - any(isinstance(elem, slice) or elem is Ellipsis - for elem in selection) - ) + and not (any(isinstance(elem, slice) or elem is Ellipsis for elem in selection)) ) return ( - no_slicing and - all( - is_integer(elem) - or is_integer_list(elem) - or is_integer_array(elem) - for elem in selection - ) and - any( - is_integer_list(elem) - or is_integer_array(elem) + no_slicing + and all( + is_integer(elem) or is_integer_list(elem) or is_integer_array(elem) for elem in selection ) + and any(is_integer_list(elem) or is_integer_array(elem) for elem in selection) ) @@ -112,12 +100,13 @@ def is_pure_orthogonal_indexing(selection, ndim): # Case two: selection contains either zero or one integer iterables. # All other selection elements are slices or integers return ( - isinstance(selection, tuple) and len(selection) == ndim and - sum(is_integer_list(elem) or is_integer_array(elem) for elem in selection) <= 1 and - all( - is_integer_list(elem) or is_integer_array(elem) - or isinstance(elem, (int, slice)) for - elem in selection) + isinstance(selection, tuple) + and len(selection) == ndim + and sum(is_integer_list(elem) or is_integer_array(elem) for elem in selection) <= 1 + and all( + is_integer_list(elem) or is_integer_array(elem) or isinstance(elem, (int, slice)) + for elem in selection + ) ) @@ -138,8 +127,7 @@ def normalize_integer_selection(dim_sel, dim_len): ChunkDimProjection = collections.namedtuple( - 'ChunkDimProjection', - ('dim_chunk_ix', 'dim_chunk_sel', 'dim_out_sel') + "ChunkDimProjection", ("dim_chunk_ix", "dim_chunk_sel", "dim_out_sel") ) """A mapping from chunk to output array for a single dimension. @@ -156,7 +144,6 @@ def normalize_integer_selection(dim_sel, dim_len): class IntDimIndexer: - def __init__(self, dim_sel, dim_len, dim_chunk_len): # normalize @@ -181,7 +168,6 @@ def ceildiv(a, b): class SliceDimIndexer: - def __init__(self, dim_sel, dim_len, dim_chunk_len): # normalize @@ -234,8 +220,7 @@ def __iter__(self): dim_chunk_sel_stop = self.stop - dim_offset dim_chunk_sel = slice(dim_chunk_sel_start, dim_chunk_sel_stop, self.step) - dim_chunk_nitems = ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), - self.step) + dim_chunk_nitems = ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) # If there are no elements on the selection within this chunk, then skip if dim_chunk_nitems == 0: @@ -291,8 +276,7 @@ def replace_ellipsis(selection, shape): def replace_lists(selection): return tuple( - np.asarray(dim_sel) if isinstance(dim_sel, list) else dim_sel - for dim_sel in selection + np.asarray(dim_sel) if isinstance(dim_sel, list) else dim_sel for dim_sel in selection ) @@ -303,8 +287,7 @@ def ensure_tuple(v): ChunkProjection = collections.namedtuple( - 'ChunkProjection', - ('chunk_coords', 'chunk_selection', 'out_selection') + "ChunkProjection", ("chunk_coords", "chunk_selection", "out_selection") ) """A mapping of items from chunk to output array. Can be used to extract items from the chunk array for loading into an output array. Can also be used to extract items from a @@ -336,10 +319,7 @@ def is_positive_slice(s): def is_contiguous_selection(selection): selection = ensure_tuple(selection) - return all( - (is_integer_array(s) or is_contiguous_slice(s) or s == Ellipsis) - for s in selection - ) + return all((is_integer_array(s) or is_contiguous_slice(s) or s == Ellipsis) for s in selection) def is_basic_selection(selection): @@ -349,7 +329,6 @@ def is_basic_selection(selection): # noinspection PyProtectedMember class BasicIndexer: - def __init__(self, selection, array): # handle ellipsis @@ -357,8 +336,7 @@ def __init__(self, selection, array): # setup per-dimension indexers dim_indexers = [] - for dim_sel, dim_len, dim_chunk_len in \ - zip(selection, array._shape, array._chunks): + for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): if is_integer(dim_sel): dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) @@ -367,15 +345,15 @@ def __init__(self, selection, array): dim_indexer = SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) else: - raise IndexError('unsupported selection item for basic indexing; ' - 'expected integer or slice, got {!r}' - .format(type(dim_sel))) + raise IndexError( + "unsupported selection item for basic indexing; " + "expected integer or slice, got {!r}".format(type(dim_sel)) + ) dim_indexers.append(dim_indexer) self.dim_indexers = dim_indexers - self.shape = tuple(s.nitems for s in self.dim_indexers - if not isinstance(s, IntDimIndexer)) + self.shape = tuple(s.nitems for s in self.dim_indexers if not isinstance(s, IntDimIndexer)) self.drop_axes = None def __iter__(self): @@ -383,25 +361,28 @@ def __iter__(self): chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) - out_selection = tuple(p.dim_out_sel for p in dim_projections - if p.dim_out_sel is not None) + out_selection = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + ) yield ChunkProjection(chunk_coords, chunk_selection, out_selection) class BoolArrayDimIndexer: - def __init__(self, dim_sel, dim_len, dim_chunk_len): # check number of dimensions if not is_bool_array(dim_sel, 1): - raise IndexError('Boolean arrays in an orthogonal selection must ' - 'be 1-dimensional only') + raise IndexError( + "Boolean arrays in an orthogonal selection must " "be 1-dimensional only" + ) # check shape if dim_sel.shape[0] != dim_len: - raise IndexError('Boolean array has the wrong length for dimension; ' - 'expected {}, got {}'.format(dim_len, dim_sel.shape[0])) + raise IndexError( + "Boolean array has the wrong length for dimension; " + "expected {}, got {}".format(dim_len, dim_sel.shape[0]) + ) # store attributes self.dim_sel = dim_sel @@ -410,11 +391,11 @@ def __init__(self, dim_sel, dim_len, dim_chunk_len): self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) # precompute number of selected items for each chunk - self.chunk_nitems = np.zeros(self.nchunks, dtype='i8') + self.chunk_nitems = np.zeros(self.nchunks, dtype="i8") for dim_chunk_ix in range(self.nchunks): dim_offset = dim_chunk_ix * self.dim_chunk_len self.chunk_nitems[dim_chunk_ix] = np.count_nonzero( - self.dim_sel[dim_offset:dim_offset + self.dim_chunk_len] + self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] ) self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) self.nitems = self.chunk_nitems_cumsum[-1] @@ -427,12 +408,12 @@ def __iter__(self): # find region in chunk dim_offset = dim_chunk_ix * self.dim_chunk_len - dim_chunk_sel = self.dim_sel[dim_offset:dim_offset + self.dim_chunk_len] + dim_chunk_sel = self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] # pad out if final chunk if dim_chunk_sel.shape[0] < self.dim_chunk_len: tmp = np.zeros(self.dim_chunk_len, dtype=bool) - tmp[:dim_chunk_sel.shape[0]] = dim_chunk_sel + tmp[: dim_chunk_sel.shape[0]] = dim_chunk_sel dim_chunk_sel = tmp # find region in output @@ -482,14 +463,22 @@ def boundscheck_indices(x, dim_len): class IntArrayDimIndexer: """Integer array selection against a single dimension.""" - def __init__(self, dim_sel, dim_len, dim_chunk_len, wraparound=True, boundscheck=True, - order=Order.UNKNOWN): + def __init__( + self, + dim_sel, + dim_len, + dim_chunk_len, + wraparound=True, + boundscheck=True, + order=Order.UNKNOWN, + ): # ensure 1d array dim_sel = np.asanyarray(dim_sel) if not is_integer_array(dim_sel, 1): - raise IndexError('integer arrays in an orthogonal selection must be ' - '1-dimensional only') + raise IndexError( + "integer arrays in an orthogonal selection must be " "1-dimensional only" + ) # handle wraparound if wraparound: @@ -570,10 +559,14 @@ def ix_(selection, shape): selection = replace_ellipsis(selection, shape) # replace slice and int as these are not supported by numpy.ix_ - selection = [slice_to_range(dim_sel, dim_len) if isinstance(dim_sel, slice) - else [dim_sel] if is_integer(dim_sel) - else dim_sel - for dim_sel, dim_len in zip(selection, shape)] + selection = [ + slice_to_range(dim_sel, dim_len) + if isinstance(dim_sel, slice) + else [dim_sel] + if is_integer(dim_sel) + else dim_sel + for dim_sel, dim_len in zip(selection, shape) + ] # now get numpy to convert to a coordinate selection selection = np.ix_(*selection) @@ -608,7 +601,6 @@ def oindex_set(a, selection, value): # noinspection PyProtectedMember class OrthogonalIndexer: - def __init__(self, selection, array): # handle ellipsis @@ -619,8 +611,7 @@ def __init__(self, selection, array): # setup per-dimension indexers dim_indexers = [] - for dim_sel, dim_len, dim_chunk_len in \ - zip(selection, array._shape, array._chunks): + for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): if is_integer(dim_sel): dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) @@ -635,21 +626,24 @@ def __init__(self, selection, array): dim_indexer = BoolArrayDimIndexer(dim_sel, dim_len, dim_chunk_len) else: - raise IndexError('unsupported selection item for orthogonal indexing; ' - 'expected integer, slice, integer array or Boolean ' - 'array, got {!r}' - .format(type(dim_sel))) + raise IndexError( + "unsupported selection item for orthogonal indexing; " + "expected integer, slice, integer array or Boolean " + "array, got {!r}".format(type(dim_sel)) + ) dim_indexers.append(dim_indexer) self.array = array self.dim_indexers = dim_indexers - self.shape = tuple(s.nitems for s in self.dim_indexers - if not isinstance(s, IntDimIndexer)) + self.shape = tuple(s.nitems for s in self.dim_indexers if not isinstance(s, IntDimIndexer)) self.is_advanced = not is_basic_selection(selection) if self.is_advanced: - self.drop_axes = tuple(i for i, dim_indexer in enumerate(self.dim_indexers) - if isinstance(dim_indexer, IntDimIndexer)) + self.drop_axes = tuple( + i + for i, dim_indexer in enumerate(self.dim_indexers) + if isinstance(dim_indexer, IntDimIndexer) + ) else: self.drop_axes = None @@ -658,8 +652,9 @@ def __iter__(self): chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) - out_selection = tuple(p.dim_out_sel for p in dim_projections - if p.dim_out_sel is not None) + out_selection = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + ) # handle advanced indexing arrays orthogonally if self.is_advanced: @@ -678,7 +673,6 @@ def __iter__(self): class OIndex: - def __init__(self, array): self.array = array @@ -697,7 +691,6 @@ def __setitem__(self, selection, value): # noinspection PyProtectedMember class BlockIndexer: - def __init__(self, selection, array): # handle ellipsis @@ -708,8 +701,7 @@ def __init__(self, selection, array): # setup per-dimension indexers dim_indexers = [] - for dim_sel, dim_len, dim_chunk_size in \ - zip(selection, array._shape, array._chunks): + for dim_sel, dim_len, dim_chunk_size in zip(selection, array._shape, array._chunks): dim_numchunks = int(np.ceil(dim_len / dim_chunk_size)) if is_integer(dim_sel): @@ -725,9 +717,10 @@ def __init__(self, selection, array): stop = dim_sel.stop if dim_sel.stop is not None else dim_numchunks if dim_sel.step not in {1, None}: - raise IndexError('unsupported selection item for block indexing; ' - 'expected integer or slice with step=1, got {!r}' - .format(type(dim_sel))) + raise IndexError( + "unsupported selection item for block indexing; " + "expected integer or slice with step=1, got {!r}".format(type(dim_sel)) + ) # Can't reuse wraparound_indices because it expects a numpy array # We have integers here. @@ -741,9 +734,10 @@ def __init__(self, selection, array): slice_ = slice(start, stop) else: - raise IndexError('unsupported selection item for block indexing; ' - 'expected integer or slice, got {!r}' - .format(type(dim_sel))) + raise IndexError( + "unsupported selection item for block indexing; " + "expected integer or slice, got {!r}".format(type(dim_sel)) + ) dim_indexer = SliceDimIndexer(slice_, dim_len, dim_chunk_size) dim_indexers.append(dim_indexer) @@ -759,14 +753,14 @@ def __iter__(self): for dim_projections in itertools.product(*self.dim_indexers): chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) - out_selection = tuple(p.dim_out_sel for p in dim_projections - if p.dim_out_sel is not None) + out_selection = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + ) yield ChunkProjection(chunk_coords, chunk_selection, out_selection) class BlockIndex: - def __init__(self, array): self.array = array @@ -785,25 +779,20 @@ def __setitem__(self, selection, value): # noinspection PyProtectedMember def is_coordinate_selection(selection, array): - return ( - (len(selection) == len(array._shape)) and - all(is_integer(dim_sel) or is_integer_array(dim_sel) - for dim_sel in selection) + return (len(selection) == len(array._shape)) and all( + is_integer(dim_sel) or is_integer_array(dim_sel) for dim_sel in selection ) # noinspection PyProtectedMember def is_mask_selection(selection, array): return ( - len(selection) == 1 and - is_bool_array(selection[0]) and - selection[0].shape == array._shape + len(selection) == 1 and is_bool_array(selection[0]) and selection[0].shape == array._shape ) # noinspection PyProtectedMember class CoordinateIndexer: - def __init__(self, selection, array): # some initial normalization @@ -813,9 +802,11 @@ def __init__(self, selection, array): # validation if not is_coordinate_selection(selection, array): - raise IndexError('invalid coordinate selection; expected one integer ' - '(coordinate) array per dimension of the target array, ' - 'got {!r}'.format(selection)) + raise IndexError( + "invalid coordinate selection; expected one integer " + "(coordinate) array per dimension of the target array, " + "got {!r}".format(selection) + ) # handle wraparound, boundscheck for dim_sel, dim_len in zip(selection, array.shape): @@ -828,8 +819,7 @@ def __init__(self, selection, array): # compute chunk index for each point in the selection chunks_multi_index = tuple( - dim_sel // dim_chunk_len - for (dim_sel, dim_chunk_len) in zip(selection, array._chunks) + dim_sel // dim_chunk_len for (dim_sel, dim_chunk_len) in zip(selection, array._chunks) ) # broadcast selection - this will raise error if array dimensions don't match @@ -844,8 +834,7 @@ def __init__(self, selection, array): chunks_multi_index = [dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index] # ravel chunk indices - chunks_raveled_indices = np.ravel_multi_index(chunks_multi_index, - dims=array._cdata_shape) + chunks_raveled_indices = np.ravel_multi_index(chunks_multi_index, dims=array._cdata_shape) # group points by chunk if np.any(np.diff(chunks_raveled_indices) < 0): @@ -901,7 +890,6 @@ def __iter__(self): # noinspection PyProtectedMember class MaskIndexer(CoordinateIndexer): - def __init__(self, selection, array): # some initial normalization @@ -910,9 +898,10 @@ def __init__(self, selection, array): # validation if not is_mask_selection(selection, array): - raise IndexError('invalid mask selection; expected one Boolean (mask)' - 'array with the same shape as the target array, got {!r}' - .format(selection)) + raise IndexError( + "invalid mask selection; expected one Boolean (mask)" + "array with the same shape as the target array, got {!r}".format(selection) + ) # convert to indices selection = np.nonzero(selection[0]) @@ -922,7 +911,6 @@ def __init__(self, selection, array): class VIndex: - def __init__(self, array): self.array = array @@ -955,8 +943,10 @@ def check_fields(fields, dtype): return dtype # check type if not isinstance(fields, (str, list, tuple)): - raise IndexError("'fields' argument must be a string or list of strings; found " - "{!r}".format(type(fields))) + raise IndexError( + "'fields' argument must be a string or list of strings; found " + "{!r}".format(type(fields)) + ) if fields: if dtype.names is None: raise IndexError("invalid 'fields' argument, array does not have any fields") @@ -980,7 +970,7 @@ def check_no_multi_fields(fields): if len(fields) == 1: return fields[0] elif len(fields) > 1: - raise IndexError('multiple fields are not supported for this operation') + raise IndexError("multiple fields are not supported for this operation") return fields @@ -1009,11 +999,7 @@ def make_slice_selection(selection): ls.append(slice(int(dim_selection), int(dim_selection) + 1, 1)) elif isinstance(dim_selection, np.ndarray): if len(dim_selection) == 1: - ls.append( - slice( - int(dim_selection[0]), int(dim_selection[0]) + 1, 1 - ) - ) + ls.append(slice(int(dim_selection[0]), int(dim_selection[0]) + 1, 1)) else: raise ArrayIndexError() else: @@ -1108,10 +1094,10 @@ def __init__(self, selection, arr_shape): def __iter__(self): chunk1 = self.chunk_loc_slices[0] nitems = (chunk1[-1].stop - chunk1[-1].start) * np.prod( - self.arr_shape[len(chunk1):], dtype=int + self.arr_shape[len(chunk1) :], dtype=int ) for partial_out_selection in self.chunk_loc_slices: start = 0 for i, sl in enumerate(partial_out_selection): - start += sl.start * np.prod(self.arr_shape[i + 1:], dtype=int) + start += sl.start * np.prod(self.arr_shape[i + 1 :], dtype=int) yield start, nitems, partial_out_selection diff --git a/zarr/meta.py b/zarr/meta.py index aacffd7f77..48791ddf17 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -27,15 +27,11 @@ "extensions": [], } -_v3_core_types = set( - "".join(d) for d in itertools.product("<>", ("u", "i", "f"), ("2", "4", "8")) -) +_v3_core_types = set("".join(d) for d in itertools.product("<>", ("u", "i", "f"), ("2", "4", "8"))) _v3_core_types = {"bool", "i1", "u1"} | _v3_core_types # The set of complex types allowed ({"c8", ">c16"}) -_v3_complex_types = set( - f"{end}c{_bytes}" for end, _bytes in itertools.product("<>", ("8", "16")) -) +_v3_complex_types = set(f"{end}c{_bytes}" for end, _bytes in itertools.product("<>", ("8", "16"))) # All dtype.str values corresponding to datetime64 and timedelta64 # see: https://numpy.org/doc/stable/reference/arrays.datetime.html#datetime-units @@ -43,7 +39,7 @@ _time_units = ["h", "m", "s", "ms", "us", "μs", "ns", "ps", "fs", "as"] _v3_datetime_types = set( f"{end}{kind}8[{unit}]" - for end, unit, kind in itertools.product("<>", _date_units + _time_units, ('m', 'M')) + for end, unit, kind in itertools.product("<>", _date_units + _time_units, ("m", "M")) ) @@ -217,9 +213,7 @@ def encode_group_metadata(cls, meta=None) -> bytes: return json_dumps(meta) @classmethod - def decode_fill_value( - cls, v: Any, dtype: np.dtype, object_codec: Any = None - ) -> Any: + def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> Any: # early out if v is None: return v @@ -267,9 +261,7 @@ def decode_fill_value( return np.array(v, dtype=dtype)[()] @classmethod - def encode_fill_value( - cls, v: Any, dtype: np.dtype, object_codec: Any = None - ) -> Any: + def encode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> Any: # early out if v is None: return v @@ -318,11 +310,9 @@ def decode_dtype(cls, d, validate=True): if isinstance(d, dict): # extract the type from the extension info try: - d = d['type'] + d = d["type"] except KeyError: - raise KeyError( - "Extended dtype info must provide a key named 'type'." - ) + raise KeyError("Extended dtype info must provide a key named 'type'.") d = cls._decode_dtype_descr(d) dtype = np.dtype(d) if validate: @@ -389,9 +379,7 @@ def encode_hierarchy_metadata(cls, meta=None) -> bytes: return json_dumps(meta) @classmethod - def decode_hierarchy_metadata( - cls, s: Union[MappingType, bytes, str] - ) -> MappingType[str, Any]: + def decode_hierarchy_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: meta = cls.parse_metadata(s) # check metadata format # zarr_format = meta.get("zarr_format", None) @@ -414,7 +402,7 @@ def _encode_codec_metadata(cls, codec: Codec) -> Optional[Mapping]: # only support gzip for now config = codec.get_config() del config["id"] - uri = 'https://purl.org/zarr/spec/codec/' + uri = "https://purl.org/zarr/spec/codec/" if isinstance(codec, numcodecs.GZip): uri = uri + "gzip/1.0" elif isinstance(codec, numcodecs.Zlib): @@ -438,19 +426,19 @@ def _decode_codec_metadata(cls, meta: Optional[Mapping]) -> Optional[Codec]: if meta is None: return None - uri = 'https://purl.org/zarr/spec/codec/' - conf = meta['configuration'] - if meta['codec'].startswith(uri + 'gzip/'): + uri = "https://purl.org/zarr/spec/codec/" + conf = meta["configuration"] + if meta["codec"].startswith(uri + "gzip/"): conf["id"] = "gzip" - elif meta['codec'].startswith(uri + 'zlib/'): + elif meta["codec"].startswith(uri + "zlib/"): conf["id"] = "zlib" - elif meta['codec'].startswith(uri + 'blosc/'): + elif meta["codec"].startswith(uri + "blosc/"): conf["id"] = "blosc" - elif meta['codec'].startswith(uri + 'bz2/'): + elif meta["codec"].startswith(uri + "bz2/"): conf["id"] = "bz2" - elif meta['codec'].startswith(uri + 'lz4/'): + elif meta["codec"].startswith(uri + "lz4/"): conf["id"] = "lz4" - elif meta['codec'].startswith(uri + 'lzma/'): + elif meta["codec"].startswith(uri + "lzma/"): conf["id"] = "lzma" else: raise NotImplementedError @@ -461,8 +449,7 @@ def _decode_codec_metadata(cls, meta: Optional[Mapping]) -> Optional[Codec]: @classmethod def _encode_storage_transformer_metadata( - cls, - storage_transformer: "StorageTransformer" + cls, storage_transformer: "StorageTransformer" ) -> Optional[Mapping]: return { "extension": storage_transformer.extension_uri, @@ -478,9 +465,9 @@ def _decode_storage_transformer_metadata(cls, meta: Mapping) -> "StorageTransfor # This might be changed to a proper registry in the future KNOWN_STORAGE_TRANSFORMERS = [DummyStorageTransfomer, ShardingStorageTransformer] - conf = meta.get('configuration', {}) - extension_uri = meta['extension'] - transformer_type = meta['type'] + conf = meta.get("configuration", {}) + extension_uri = meta["extension"] + transformer_type = meta["type"] for StorageTransformerCls in KNOWN_STORAGE_TRANSFORMERS: if StorageTransformerCls.extension_uri == extension_uri: @@ -527,9 +514,9 @@ def decode_array_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType ) # compressor field should be absent when there is no compression if compressor: - meta['compressor'] = compressor + meta["compressor"] = compressor if storage_transformers: - meta['storage_transformers'] = storage_transformers + meta["storage_transformers"] = storage_transformers except Exception as e: raise MetadataError("error decoding metadata: %s" % e) diff --git a/zarr/meta_v1.py b/zarr/meta_v1.py index bc4ae12228..4ac381f2ca 100644 --- a/zarr/meta_v1.py +++ b/zarr/meta_v1.py @@ -6,24 +6,24 @@ def decode_metadata(b): - s = str(b, 'ascii') + s = str(b, "ascii") meta = json.loads(s) - zarr_format = meta.get('zarr_format', None) + zarr_format = meta.get("zarr_format", None) if zarr_format != 1: - raise MetadataError('unsupported zarr format: %s' % zarr_format) + raise MetadataError("unsupported zarr format: %s" % zarr_format) try: meta = dict( - zarr_format=meta['zarr_format'], - shape=tuple(meta['shape']), - chunks=tuple(meta['chunks']), - dtype=decode_dtype(meta['dtype']), - compression=meta['compression'], - compression_opts=meta['compression_opts'], - fill_value=meta['fill_value'], - order=meta['order'], + zarr_format=meta["zarr_format"], + shape=tuple(meta["shape"]), + chunks=tuple(meta["chunks"]), + dtype=decode_dtype(meta["dtype"]), + compression=meta["compression"], + compression_opts=meta["compression_opts"], + fill_value=meta["fill_value"], + order=meta["order"], ) except Exception as e: - raise MetadataError('error decoding metadata: %s' % e) + raise MetadataError("error decoding metadata: %s" % e) else: return meta @@ -31,16 +31,16 @@ def decode_metadata(b): def encode_metadata(meta): meta = dict( zarr_format=1, - shape=meta['shape'], - chunks=meta['chunks'], - dtype=encode_dtype(meta['dtype']), - compression=meta['compression'], - compression_opts=meta['compression_opts'], - fill_value=meta['fill_value'], - order=meta['order'], + shape=meta["shape"], + chunks=meta["chunks"], + dtype=encode_dtype(meta["dtype"]), + compression=meta["compression"], + compression_opts=meta["compression_opts"], + fill_value=meta["fill_value"], + order=meta["order"], ) s = json.dumps(meta, indent=4, sort_keys=True, ensure_ascii=True) - b = s.encode('ascii') + b = s.encode("ascii") return b diff --git a/zarr/n5.py b/zarr/n5.py index 1eb6ef2b33..7e73905527 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -18,16 +18,16 @@ from .storage import attrs_key as zarr_attrs_key from .storage import group_meta_key as zarr_group_meta_key -N5_FORMAT = '2.0.0' +N5_FORMAT = "2.0.0" zarr_to_n5_keys = [ - ('chunks', 'blockSize'), - ('dtype', 'dataType'), - ('compressor', 'compression'), - ('shape', 'dimensions') + ("chunks", "blockSize"), + ("dtype", "dataType"), + ("compressor", "compression"), + ("shape", "dimensions"), ] -n5_attrs_key = 'attributes.json' -n5_keywords = ['n5', 'dataType', 'dimensions', 'blockSize', 'compression'] +n5_attrs_key = "attributes.json" +n5_keywords = ["n5", "dataType", "dimensions", "blockSize", "compression"] class N5Store(NestedDirectoryStore): @@ -173,13 +173,13 @@ def __contains__(self, key): if key_new not in self: return False # group if not a dataset (attributes do not contain 'dimensions') - return 'dimensions' not in self._load_n5_attrs(key_new) + return "dimensions" not in self._load_n5_attrs(key_new) elif key.endswith(zarr_array_meta_key): key_new = key.replace(zarr_array_meta_key, n5_attrs_key) # array if attributes contain 'dimensions' - return 'dimensions' in self._load_n5_attrs(key_new) + return "dimensions" in self._load_n5_attrs(key_new) elif key.endswith(zarr_attrs_key): @@ -195,10 +195,7 @@ def __contains__(self, key): return super().__contains__(key_new) def __eq__(self, other): - return ( - isinstance(other, N5Store) and - self.path == other.path - ) + return isinstance(other, N5Store) and self.path == other.path def listdir(self, path: Optional[str] = None): @@ -229,7 +226,7 @@ def listdir(self, path: Optional[str] = None): for file_name in file_names: file_path = os.path.join(dir_path, file_name) rel_path = file_path.split(root_path + os.path.sep)[1] - new_child = rel_path.replace(os.path.sep, '.') + new_child = rel_path.replace(os.path.sep, ".") new_children.append(invert_chunk_coords(new_child)) else: new_children.append(entry) @@ -265,7 +262,7 @@ def _is_group(self, path: str): attrs_key = os.path.join(path, n5_attrs_key) n5_attrs = self._load_n5_attrs(attrs_key) - return len(n5_attrs) > 0 and 'dimensions' not in n5_attrs + return len(n5_attrs) > 0 and "dimensions" not in n5_attrs def _is_array(self, path: str): @@ -274,7 +271,7 @@ def _is_array(self, path: str): else: attrs_key = os.path.join(path, n5_attrs_key) - return 'dimensions' in self._load_n5_attrs(attrs_key) + return "dimensions" in self._load_n5_attrs(attrs_key) def _contains_attrs(self, path: str): @@ -340,27 +337,28 @@ class N5FSStore(FSStore): dimensions, hence the Zarr arrays targeting N5 have the deceptive "." dimension separator. """ - _array_meta_key = 'attributes.json' - _group_meta_key = 'attributes.json' - _attrs_key = 'attributes.json' + + _array_meta_key = "attributes.json" + _group_meta_key = "attributes.json" + _attrs_key = "attributes.json" def __init__(self, *args, **kwargs): - if 'dimension_separator' in kwargs: - kwargs.pop('dimension_separator') - warnings.warn('Keyword argument `dimension_separator` will be ignored') + if "dimension_separator" in kwargs: + kwargs.pop("dimension_separator") + warnings.warn("Keyword argument `dimension_separator` will be ignored") dimension_separator = "." super().__init__(*args, dimension_separator=dimension_separator, **kwargs) @staticmethod def _swap_separator(key: str): - segments = list(key.split('/')) + segments = list(key.split("/")) if segments: last_segment = segments[-1] if _prog_ckey.match(last_segment): - coords = list(last_segment.split('.')) - last_segment = '/'.join(coords[::-1]) + coords = list(last_segment.split(".")) + last_segment = "/".join(coords[::-1]) segments = segments[:-1] + [last_segment] - key = '/'.join(segments) + key = "/".join(segments) return key def _normalize_key(self, key: str): @@ -527,7 +525,7 @@ def listdir(self, path: Optional[str] = None): for file_name in self.fs.find(entry_path): file_path = os.path.join(root_path, file_name) rel_path = file_path.split(root_path)[1] - new_child = rel_path.lstrip('/').replace('/', ".") + new_child = rel_path.lstrip("/").replace("/", ".") new_children.append(invert_chunk_coords(new_child)) else: new_children.append(entry) @@ -586,7 +584,7 @@ def _contains_attrs(self, path: Optional[str]): def is_chunk_key(key: str): rv = False - segments = list(key.split('/')) + segments = list(key.split("/")) if segments: last_segment = segments[-1] rv = bool(_prog_ckey.match(last_segment)) @@ -594,118 +592,116 @@ def is_chunk_key(key: str): def invert_chunk_coords(key: str): - segments = list(key.split('/')) + segments = list(key.split("/")) if segments: last_segment = segments[-1] if _prog_ckey.match(last_segment): - coords = list(last_segment.split('.')) - last_segment = '/'.join(coords[::-1]) + coords = list(last_segment.split(".")) + last_segment = "/".join(coords[::-1]) segments = segments[:-1] + [last_segment] - key = '/'.join(segments) + key = "/".join(segments) return key def group_metadata_to_n5(group_metadata: Dict[str, Any]) -> Dict[str, Any]: - '''Convert group metadata from zarr to N5 format.''' - del group_metadata['zarr_format'] + """Convert group metadata from zarr to N5 format.""" + del group_metadata["zarr_format"] # TODO: This should only exist at the top-level - group_metadata['n5'] = N5_FORMAT + group_metadata["n5"] = N5_FORMAT return group_metadata def group_metadata_to_zarr(group_metadata: Dict[str, Any]) -> Dict[str, Any]: - '''Convert group metadata from N5 to zarr format.''' + """Convert group metadata from N5 to zarr format.""" # This only exists at the top level - group_metadata.pop('n5', None) - group_metadata['zarr_format'] = ZARR_FORMAT + group_metadata.pop("n5", None) + group_metadata["zarr_format"] = ZARR_FORMAT return group_metadata def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dict[str, Any]: - '''Convert array metadata from zarr to N5 format. If the `top_level` keyword argument is True, - then the `N5` : N5_FORMAT key : value pair will be inserted into the metadata.''' + """Convert array metadata from zarr to N5 format. If the `top_level` keyword argument is True, + then the `N5` : N5_FORMAT key : value pair will be inserted into the metadata.""" for f, t in zarr_to_n5_keys: array_metadata[t] = array_metadata.pop(f) - del array_metadata['zarr_format'] + del array_metadata["zarr_format"] if top_level: - array_metadata['n5'] = N5_FORMAT + array_metadata["n5"] = N5_FORMAT try: - dtype = np.dtype(array_metadata['dataType']) + dtype = np.dtype(array_metadata["dataType"]) except TypeError: - raise TypeError( - f"Data type {array_metadata['dataType']} is not supported by N5") + raise TypeError(f"Data type {array_metadata['dataType']} is not supported by N5") - array_metadata['dataType'] = dtype.name - array_metadata['dimensions'] = array_metadata['dimensions'][::-1] - array_metadata['blockSize'] = array_metadata['blockSize'][::-1] + array_metadata["dataType"] = dtype.name + array_metadata["dimensions"] = array_metadata["dimensions"][::-1] + array_metadata["blockSize"] = array_metadata["blockSize"][::-1] - if 'fill_value' in array_metadata: - if array_metadata['fill_value'] != 0 and array_metadata['fill_value'] is not None: + if "fill_value" in array_metadata: + if array_metadata["fill_value"] != 0 and array_metadata["fill_value"] is not None: raise ValueError( - f'''Received fill_value = {array_metadata['fill_value']}, - but N5 only supports fill_value = 0''' - ) - del array_metadata['fill_value'] + f"""Received fill_value = {array_metadata['fill_value']}, + but N5 only supports fill_value = 0""" + ) + del array_metadata["fill_value"] - if 'order' in array_metadata: - if array_metadata['order'] != 'C': + if "order" in array_metadata: + if array_metadata["order"] != "C": raise ValueError( f"Received order = {array_metadata['order']}, but N5 only supports order = C" - ) - del array_metadata['order'] + ) + del array_metadata["order"] - if 'filters' in array_metadata: - if array_metadata['filters'] != [] and array_metadata['filters'] is not None: - raise ValueError( - "Received filters, but N5 storage does not support zarr filters" - ) - del array_metadata['filters'] + if "filters" in array_metadata: + if array_metadata["filters"] != [] and array_metadata["filters"] is not None: + raise ValueError("Received filters, but N5 storage does not support zarr filters") + del array_metadata["filters"] - assert 'compression' in array_metadata - compressor_config = array_metadata['compression'] + assert "compression" in array_metadata + compressor_config = array_metadata["compression"] compressor_config = compressor_config_to_n5(compressor_config) - array_metadata['compression'] = compressor_config + array_metadata["compression"] = compressor_config - if 'dimension_separator' in array_metadata: - del array_metadata['dimension_separator'] + if "dimension_separator" in array_metadata: + del array_metadata["dimension_separator"] return array_metadata -def array_metadata_to_zarr(array_metadata: Dict[str, Any], - top_level: bool = False) -> Dict[str, Any]: - '''Convert array metadata from N5 to zarr format. - If the `top_level` keyword argument is True, then the `N5` key will be removed from metadata''' +def array_metadata_to_zarr( + array_metadata: Dict[str, Any], top_level: bool = False +) -> Dict[str, Any]: + """Convert array metadata from N5 to zarr format. + If the `top_level` keyword argument is True, then the `N5` key will be removed from metadata""" for t, f in zarr_to_n5_keys: array_metadata[t] = array_metadata.pop(f) if top_level: - array_metadata.pop('n5') - array_metadata['zarr_format'] = ZARR_FORMAT - - array_metadata['shape'] = array_metadata['shape'][::-1] - array_metadata['chunks'] = array_metadata['chunks'][::-1] - array_metadata['fill_value'] = 0 # also if None was requested - array_metadata['order'] = 'C' - array_metadata['filters'] = [] - array_metadata['dimension_separator'] = '.' - array_metadata['dtype'] = np.dtype(array_metadata['dtype']).str - - compressor_config = array_metadata['compressor'] + array_metadata.pop("n5") + array_metadata["zarr_format"] = ZARR_FORMAT + + array_metadata["shape"] = array_metadata["shape"][::-1] + array_metadata["chunks"] = array_metadata["chunks"][::-1] + array_metadata["fill_value"] = 0 # also if None was requested + array_metadata["order"] = "C" + array_metadata["filters"] = [] + array_metadata["dimension_separator"] = "." + array_metadata["dtype"] = np.dtype(array_metadata["dtype"]).str + + compressor_config = array_metadata["compressor"] compressor_config = compressor_config_to_zarr(compressor_config) - array_metadata['compressor'] = { - 'id': N5ChunkWrapper.codec_id, - 'compressor_config': compressor_config, - 'dtype': array_metadata['dtype'], - 'chunk_shape': array_metadata['chunks'] + array_metadata["compressor"] = { + "id": N5ChunkWrapper.codec_id, + "compressor_config": compressor_config, + "dtype": array_metadata["dtype"], + "chunk_shape": array_metadata["chunks"], } return array_metadata def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]: - '''Get all zarr attributes from an N5 attributes dictionary (i.e., - all non-keyword attributes).''' + """Get all zarr attributes from an N5 attributes dictionary (i.e., + all non-keyword attributes).""" # remove all N5 keywords for n5_key in n5_keywords: @@ -718,134 +714,133 @@ def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]: def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict[str, Any]: if compressor_config is None: - return {'type': 'raw'} + return {"type": "raw"} else: _compressor_config = compressor_config # peel wrapper, if present - if _compressor_config['id'] == N5ChunkWrapper.codec_id: - _compressor_config = _compressor_config['compressor_config'] + if _compressor_config["id"] == N5ChunkWrapper.codec_id: + _compressor_config = _compressor_config["compressor_config"] - codec_id = _compressor_config['id'] - n5_config = {'type': codec_id} + codec_id = _compressor_config["id"] + n5_config = {"type": codec_id} - if codec_id == 'bz2': + if codec_id == "bz2": - n5_config['type'] = 'bzip2' - n5_config['blockSize'] = _compressor_config['level'] + n5_config["type"] = "bzip2" + n5_config["blockSize"] = _compressor_config["level"] - elif codec_id == 'blosc': + elif codec_id == "blosc": - n5_config['cname'] = _compressor_config['cname'] - n5_config['clevel'] = _compressor_config['clevel'] - n5_config['shuffle'] = _compressor_config['shuffle'] - n5_config['blocksize'] = _compressor_config['blocksize'] + n5_config["cname"] = _compressor_config["cname"] + n5_config["clevel"] = _compressor_config["clevel"] + n5_config["shuffle"] = _compressor_config["shuffle"] + n5_config["blocksize"] = _compressor_config["blocksize"] - elif codec_id == 'lzma': + elif codec_id == "lzma": # Switch to XZ for N5 if we are using the default XZ format. # Note: 4 is the default, which is lzma.CHECK_CRC64. - if _compressor_config['format'] == 1 and _compressor_config['check'] in [-1, 4]: - n5_config['type'] = 'xz' + if _compressor_config["format"] == 1 and _compressor_config["check"] in [-1, 4]: + n5_config["type"] = "xz" else: warnings.warn( "Not all N5 implementations support lzma compression (yet). You " "might not be able to open the dataset with another N5 library.", - RuntimeWarning + RuntimeWarning, ) - n5_config['format'] = _compressor_config['format'] - n5_config['check'] = _compressor_config['check'] - n5_config['filters'] = _compressor_config['filters'] + n5_config["format"] = _compressor_config["format"] + n5_config["check"] = _compressor_config["check"] + n5_config["filters"] = _compressor_config["filters"] # The default is lzma.PRESET_DEFAULT, which is 6. - if _compressor_config['preset']: - n5_config['preset'] = _compressor_config['preset'] + if _compressor_config["preset"]: + n5_config["preset"] = _compressor_config["preset"] else: - n5_config['preset'] = 6 + n5_config["preset"] = 6 - elif codec_id == 'zlib': + elif codec_id == "zlib": - n5_config['type'] = 'gzip' - n5_config['level'] = _compressor_config['level'] - n5_config['useZlib'] = True + n5_config["type"] = "gzip" + n5_config["level"] = _compressor_config["level"] + n5_config["useZlib"] = True - elif codec_id == 'gzip': + elif codec_id == "gzip": - n5_config['type'] = 'gzip' - n5_config['level'] = _compressor_config['level'] - n5_config['useZlib'] = False + n5_config["type"] = "gzip" + n5_config["level"] = _compressor_config["level"] + n5_config["useZlib"] = False else: - n5_config.update({k: v for k, v in _compressor_config.items() if k != 'type'}) + n5_config.update({k: v for k, v in _compressor_config.items() if k != "type"}) return n5_config def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dict[str, Any]]: - codec_id = compressor_config['type'] - zarr_config = {'id': codec_id} + codec_id = compressor_config["type"] + zarr_config = {"id": codec_id} - if codec_id == 'bzip2': + if codec_id == "bzip2": - zarr_config['id'] = 'bz2' - zarr_config['level'] = compressor_config['blockSize'] + zarr_config["id"] = "bz2" + zarr_config["level"] = compressor_config["blockSize"] - elif codec_id == 'blosc': + elif codec_id == "blosc": - zarr_config['cname'] = compressor_config['cname'] - zarr_config['clevel'] = compressor_config['clevel'] - zarr_config['shuffle'] = compressor_config['shuffle'] - zarr_config['blocksize'] = compressor_config['blocksize'] + zarr_config["cname"] = compressor_config["cname"] + zarr_config["clevel"] = compressor_config["clevel"] + zarr_config["shuffle"] = compressor_config["shuffle"] + zarr_config["blocksize"] = compressor_config["blocksize"] - elif codec_id == 'lzma': + elif codec_id == "lzma": - zarr_config['format'] = compressor_config['format'] - zarr_config['check'] = compressor_config['check'] - zarr_config['preset'] = compressor_config['preset'] - zarr_config['filters'] = compressor_config['filters'] + zarr_config["format"] = compressor_config["format"] + zarr_config["check"] = compressor_config["check"] + zarr_config["preset"] = compressor_config["preset"] + zarr_config["filters"] = compressor_config["filters"] - elif codec_id == 'xz': + elif codec_id == "xz": - zarr_config['id'] = 'lzma' - zarr_config['format'] = 1 # lzma.FORMAT_XZ - zarr_config['check'] = -1 - zarr_config['preset'] = compressor_config['preset'] - zarr_config['filters'] = None + zarr_config["id"] = "lzma" + zarr_config["format"] = 1 # lzma.FORMAT_XZ + zarr_config["check"] = -1 + zarr_config["preset"] = compressor_config["preset"] + zarr_config["filters"] = None - elif codec_id == 'gzip': + elif codec_id == "gzip": - if 'useZlib' in compressor_config and compressor_config['useZlib']: - zarr_config['id'] = 'zlib' - zarr_config['level'] = compressor_config['level'] + if "useZlib" in compressor_config and compressor_config["useZlib"]: + zarr_config["id"] = "zlib" + zarr_config["level"] = compressor_config["level"] else: - zarr_config['id'] = 'gzip' - zarr_config['level'] = compressor_config['level'] + zarr_config["id"] = "gzip" + zarr_config["level"] = compressor_config["level"] - elif codec_id == 'raw': + elif codec_id == "raw": return None else: - zarr_config.update({k: v for k, v in compressor_config.items() if k != 'type'}) + zarr_config.update({k: v for k, v in compressor_config.items() if k != "type"}) return zarr_config class N5ChunkWrapper(Codec): - codec_id = 'n5_wrapper' + codec_id = "n5_wrapper" def __init__(self, dtype, chunk_shape, compressor_config=None, compressor=None): self.dtype = np.dtype(dtype) self.chunk_shape = tuple(chunk_shape) # is the dtype a little endian format? - self._little_endian = ( - self.dtype.byteorder == '<' or - (self.dtype.byteorder == '=' and sys.byteorder == 'little') + self._little_endian = self.dtype.byteorder == "<" or ( + self.dtype.byteorder == "=" and sys.byteorder == "little" ) if compressor: @@ -853,9 +848,7 @@ def __init__(self, dtype, chunk_shape, compressor_config=None, compressor=None): raise ValueError("Only one of compressor_config or compressor should be given.") compressor_config = compressor.get_config() - if ( - compressor_config is None and compressor is None or - compressor_config['id'] == 'raw'): + if compressor_config is None and compressor is None or compressor_config["id"] == "raw": self.compressor_config = None self._compressor = None else: @@ -863,10 +856,7 @@ def __init__(self, dtype, chunk_shape, compressor_config=None, compressor=None): self.compressor_config = self._compressor.get_config() def get_config(self): - config = { - 'id': self.codec_id, - 'compressor_config': self.compressor_config - } + config = {"id": self.codec_id, "compressor_config": self.compressor_config} return config def encode(self, chunk): @@ -879,7 +869,7 @@ def encode(self, chunk): if self._compressor: return header + self._compressor.encode(chunk) else: - return header + chunk.tobytes(order='A') + return header + chunk.tobytes(order="A") def decode(self, chunk, out=None) -> bytes: @@ -889,10 +879,9 @@ def decode(self, chunk, out=None) -> bytes: if out is not None: # out should only be used if we read a complete chunk - assert chunk_shape == self.chunk_shape, ( - "Expected chunk of shape {}, found {}".format( - self.chunk_shape, - chunk_shape)) + assert chunk_shape == self.chunk_shape, "Expected chunk of shape {}, found {}".format( + self.chunk_shape, chunk_shape + ) if self._compressor: self._compressor.decode(chunk, out) @@ -927,25 +916,21 @@ def decode(self, chunk, out=None) -> bytes: @staticmethod def _create_header(chunk): - mode = struct.pack('>H', 0) - num_dims = struct.pack('>H', len(chunk.shape)) - shape = b''.join( - struct.pack('>I', d) - for d in chunk.shape[::-1] - ) + mode = struct.pack(">H", 0) + num_dims = struct.pack(">H", len(chunk.shape)) + shape = b"".join(struct.pack(">I", d) for d in chunk.shape[::-1]) return mode + num_dims + shape @staticmethod def _read_header(chunk): - num_dims = struct.unpack('>H', chunk[2:4])[0] + num_dims = struct.unpack(">H", chunk[2:4])[0] shape = tuple( - struct.unpack('>I', chunk[i:i+4])[0] - for i in range(4, num_dims*4 + 4, 4) + struct.unpack(">I", chunk[i : i + 4])[0] for i in range(4, num_dims * 4 + 4, 4) )[::-1] - len_header = 4 + num_dims*4 + len_header = 4 + num_dims * 4 return len_header, shape @@ -962,7 +947,7 @@ def _from_big_endian(self, data): if not self._little_endian: return data - a = np.frombuffer(data, self.dtype.newbyteorder('>')) + a = np.frombuffer(data, self.dtype.newbyteorder(">")) return a.astype(self.dtype) diff --git a/zarr/storage.py b/zarr/storage.py index ef1bd64955..37a821fc5a 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -36,11 +36,7 @@ import time from numcodecs.abc import Codec -from numcodecs.compat import ( - ensure_bytes, - ensure_text, - ensure_contiguous_ndarray_like -) +from numcodecs.compat import ensure_bytes, ensure_text, ensure_contiguous_ndarray_like from numcodecs.registry import codec_registry from zarr.context import Context @@ -53,45 +49,58 @@ ReadOnlyError, ) from zarr.meta import encode_array_metadata, encode_group_metadata -from zarr.util import (buffer_size, json_loads, nolock, normalize_chunks, - normalize_dimension_separator, - normalize_dtype, normalize_fill_value, normalize_order, - normalize_shape, normalize_storage_path, retry_call, - ensure_contiguous_ndarray_or_bytes) +from zarr.util import ( + buffer_size, + json_loads, + nolock, + normalize_chunks, + normalize_dimension_separator, + normalize_dtype, + normalize_fill_value, + normalize_order, + normalize_shape, + normalize_storage_path, + retry_call, + ensure_contiguous_ndarray_or_bytes, +) from zarr._storage.absstore import ABSStore # noqa: F401 -from zarr._storage.store import (_get_hierarchy_metadata, # noqa: F401 - _get_metadata_suffix, - _listdir_from_keys, - _rename_from_keys, - _rename_metadata_v3, - _rmdir_from_keys, - _rmdir_from_keys_v3, - _path_to_prefix, - _prefix_to_array_key, - _prefix_to_group_key, - array_meta_key, - attrs_key, - data_root, - group_meta_key, - meta_root, - DEFAULT_ZARR_VERSION, - BaseStore, - Store) +from zarr._storage.store import ( # noqa: F401 + _get_hierarchy_metadata, + _get_metadata_suffix, + _listdir_from_keys, + _rename_from_keys, + _rename_metadata_v3, + _rmdir_from_keys, + _rmdir_from_keys_v3, + _path_to_prefix, + _prefix_to_array_key, + _prefix_to_group_key, + array_meta_key, + attrs_key, + data_root, + group_meta_key, + meta_root, + DEFAULT_ZARR_VERSION, + BaseStore, + Store, +) __doctest_requires__ = { - ('RedisStore', 'RedisStore.*'): ['redis'], - ('MongoDBStore', 'MongoDBStore.*'): ['pymongo'], - ('LRUStoreCache', 'LRUStoreCache.*'): ['s3fs'], + ("RedisStore", "RedisStore.*"): ["redis"], + ("MongoDBStore", "MongoDBStore.*"): ["pymongo"], + ("LRUStoreCache", "LRUStoreCache.*"): ["s3fs"], } try: # noinspection PyUnresolvedReferences from zarr.codecs import Blosc + default_compressor = Blosc() except ImportError: # pragma: no cover from zarr.codecs import Zlib + default_compressor = Zlib() @@ -113,7 +122,7 @@ def contains_group(store: StoreLike, path: Path = None, explicit_only=True) -> b path = normalize_storage_path(path) prefix = _path_to_prefix(path) key = _prefix_to_group_key(store, prefix) - store_version = getattr(store, '_store_version', 2) + store_version = getattr(store, "_store_version", 2) if store_version == 2 or explicit_only: return key in store else: @@ -122,9 +131,9 @@ def contains_group(store: StoreLike, path: Path = None, explicit_only=True) -> b # for v3, need to also handle implicit groups sfx = _get_metadata_suffix(store) # type: ignore - implicit_prefix = key.replace('.group' + sfx, '') - if not implicit_prefix.endswith('/'): - implicit_prefix += '/' + implicit_prefix = key.replace(".group" + sfx, "") + if not implicit_prefix.endswith("/"): + implicit_prefix += "/" if store.list_prefix(implicit_prefix): # type: ignore return True return False @@ -132,7 +141,7 @@ def contains_group(store: StoreLike, path: Path = None, explicit_only=True) -> b def _normalize_store_arg_v2(store: Any, storage_options=None, mode="r") -> BaseStore: # default to v2 store for backward compatibility - zarr_version = getattr(store, '_store_version', 2) + zarr_version = getattr(store, "_store_version", 2) if zarr_version != 2: raise ValueError("store must be a version 2 store") if store is None: @@ -142,23 +151,27 @@ def _normalize_store_arg_v2(store: Any, storage_options=None, mode="r") -> BaseS store = os.fspath(store) if FSStore._fsspec_installed(): import fsspec + if isinstance(store, fsspec.FSMap): - return FSStore(store.root, - fs=store.fs, - mode=mode, - check=store.check, - create=store.create, - missing_exceptions=store.missing_exceptions, - **(storage_options or {})) + return FSStore( + store.root, + fs=store.fs, + mode=mode, + check=store.check, + create=store.create, + missing_exceptions=store.missing_exceptions, + **(storage_options or {}), + ) if isinstance(store, str): if "://" in store or "::" in store: return FSStore(store, mode=mode, **(storage_options or {})) elif storage_options: raise ValueError("storage_options passed with non-fsspec path") - if store.endswith('.zip'): + if store.endswith(".zip"): return ZipStore(store, mode=mode) - elif store.endswith('.n5'): + elif store.endswith(".n5"): from zarr.n5 import N5Store + return N5Store(store) else: return DirectoryStore(store) @@ -167,8 +180,9 @@ def _normalize_store_arg_v2(store: Any, storage_options=None, mode="r") -> BaseS return store -def normalize_store_arg(store: Any, storage_options=None, mode="r", *, - zarr_version=None) -> BaseStore: +def normalize_store_arg( + store: Any, storage_options=None, mode="r", *, zarr_version=None +) -> BaseStore: if zarr_version is None: # default to v2 store for backward compatibility zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) @@ -176,6 +190,7 @@ def normalize_store_arg(store: Any, storage_options=None, mode="r", *, normalize_store = _normalize_store_arg_v2 elif zarr_version == 3: from zarr._storage.v3 import _normalize_store_arg_v3 + normalize_store = _normalize_store_arg_v3 else: raise ValueError("zarr_version must be either 2 or 3") @@ -187,7 +202,7 @@ def rmdir(store: StoreLike, path: Path = None): this will be called, otherwise will fall back to implementation via the `Store` interface.""" path = normalize_storage_path(path) - store_version = getattr(store, '_store_version', 2) + store_version = getattr(store, "_store_version", 2) if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore # pass through store.rmdir(path) # type: ignore @@ -205,7 +220,7 @@ def rename(store: Store, src_path: Path, dst_path: Path): `Store` interface.""" src_path = normalize_storage_path(src_path) dst_path = normalize_storage_path(dst_path) - if hasattr(store, 'rename'): + if hasattr(store, "rename"): # pass through store.rename(src_path, dst_path) else: @@ -218,7 +233,7 @@ def listdir(store: BaseStore, path: Path = None): method, this will be called, otherwise will fall back to implementation via the `MutableMapping` interface.""" path = normalize_storage_path(path) - if hasattr(store, 'listdir'): + if hasattr(store, "listdir"): # pass through return store.listdir(path) # type: ignore else: @@ -237,14 +252,14 @@ def _getsize(store: BaseStore, path: Path = None) -> int: v = store[path] size = buffer_size(v) else: - path = '' if path is None else normalize_storage_path(path) + path = "" if path is None else normalize_storage_path(path) size = 0 - store_version = getattr(store, '_store_version', 2) + store_version = getattr(store, "_store_version", 2) if store_version == 3: - if path == '': + if path == "": # have to list the root folders without trailing / in this case - members = store.list_prefix(data_root.rstrip('/')) # type: ignore - members += store.list_prefix(meta_root.rstrip('/')) # type: ignore + members = store.list_prefix(data_root.rstrip("/")) # type: ignore + members += store.list_prefix(meta_root.rstrip("/")) # type: ignore else: members = store.list_prefix(data_root + path) # type: ignore members += store.list_prefix(meta_root + path) # type: ignore @@ -270,7 +285,7 @@ def _getsize(store: BaseStore, path: Path = None) -> int: def getsize(store: BaseStore, path: Path = None) -> int: """Compute size of stored items for a given path. If `store` provides a `getsize` method, this will be called, otherwise will return -1.""" - if hasattr(store, 'getsize'): + if hasattr(store, "getsize"): # pass through path = normalize_storage_path(path) return store.getsize(path) # type: ignore @@ -288,12 +303,11 @@ def _require_parent_group( ): # assume path is normalized if path: - segments = path.split('/') + segments = path.split("/") for i in range(len(segments)): - p = '/'.join(segments[:i]) + p = "/".join(segments[:i]) if contains_array(store, p): - _init_group_metadata(store, path=p, chunk_store=chunk_store, - overwrite=overwrite) + _init_group_metadata(store, path=p, chunk_store=chunk_store, overwrite=overwrite) elif not contains_group(store, p): _init_group_metadata(store, path=p, chunk_store=chunk_store) @@ -425,23 +439,31 @@ def init_array( # ensure parent group initialized store_version = getattr(store, "_store_version", 2) if store_version < 3: - _require_parent_group(path, store=store, chunk_store=chunk_store, - overwrite=overwrite) + _require_parent_group(path, store=store, chunk_store=chunk_store, overwrite=overwrite) - if store_version == 3 and 'zarr.json' not in store: + if store_version == 3 and "zarr.json" not in store: # initialize with default zarr.json entry level metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore + store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore if not compressor: # compatibility with legacy tests using compressor=[] compressor = None - _init_array_metadata(store, shape=shape, chunks=chunks, dtype=dtype, - compressor=compressor, fill_value=fill_value, - order=order, overwrite=overwrite, path=path, - chunk_store=chunk_store, filters=filters, - object_codec=object_codec, - dimension_separator=dimension_separator, - storage_transformers=storage_transformers) + _init_array_metadata( + store, + shape=shape, + chunks=chunks, + dtype=dtype, + compressor=compressor, + fill_value=fill_value, + order=order, + overwrite=overwrite, + path=path, + chunk_store=chunk_store, + filters=filters, + object_codec=object_codec, + dimension_separator=dimension_separator, + storage_transformers=storage_transformers, + ) def _init_array_metadata( @@ -461,7 +483,7 @@ def _init_array_metadata( storage_transformers=(), ): - store_version = getattr(store, '_store_version', 2) + store_version = getattr(store, "_store_version", 2) path = normalize_storage_path(path) @@ -486,11 +508,11 @@ def _init_array_metadata( if chunk_store is not None: chunk_store.erase_prefix(data_prefix) # type: ignore - if '/' in path: + if "/" in path: # path is a subfolder of an existing array, remove that array - parent_path = '/'.join(path.split('/')[:-1]) + parent_path = "/".join(path.split("/")[:-1]) sfx = _get_metadata_suffix(store) # type: ignore - array_key = meta_root + parent_path + '.array' + sfx + array_key = meta_root + parent_path + ".array" + sfx if array_key in store: store.erase(array_key) # type: ignore @@ -500,9 +522,9 @@ def _init_array_metadata( elif contains_group(store, path, explicit_only=False): raise ContainsGroupError(path) elif store_version == 3: - if '/' in path: + if "/" in path: # cannot create an array within an existing array path - parent_path = '/'.join(path.split('/')[:-1]) + parent_path = "/".join(path.split("/")[:-1]) if contains_array(store, parent_path): raise ContainsArrayError(path) @@ -523,10 +545,10 @@ def _init_array_metadata( if shape == (): # no point in compressing a 0-dimensional array, only a single value compressor = None - elif compressor == 'none': + elif compressor == "none": # compatibility compressor = None - elif compressor == 'default': + elif compressor == "default": compressor = default_compressor # obtain compressor config @@ -556,16 +578,19 @@ def _init_array_metadata( if object_codec is None: if not filters: # there are no filters so we can be sure there is no object codec - raise ValueError('missing object_codec for object array') + raise ValueError("missing object_codec for object array") else: # one of the filters may be an object codec, issue a warning rather # than raise an error to maintain backwards-compatibility - warnings.warn('missing object_codec for object array; this will raise a ' - 'ValueError in version 3.0', FutureWarning) + warnings.warn( + "missing object_codec for object array; this will raise a " + "ValueError in version 3.0", + FutureWarning, + ) else: filters_config.insert(0, object_codec.get_config()) elif object_codec is not None: - warnings.warn('an object_codec is only needed for object arrays') + warnings.warn("an object_codec is only needed for object arrays") # use null to indicate no filters if not filters_config: @@ -574,32 +599,34 @@ def _init_array_metadata( # initialize metadata # TODO: don't store redundant dimension_separator for v3? _compressor = compressor_config if store_version == 2 else compressor - meta = dict(shape=shape, compressor=_compressor, - fill_value=fill_value, - dimension_separator=dimension_separator) + meta = dict( + shape=shape, + compressor=_compressor, + fill_value=fill_value, + dimension_separator=dimension_separator, + ) if store_version < 3: - meta.update(dict(chunks=chunks, dtype=dtype, order=order, - filters=filters_config)) + meta.update(dict(chunks=chunks, dtype=dtype, order=order, filters=filters_config)) assert not storage_transformers else: if dimension_separator is None: dimension_separator = "/" if filters_config: - attributes = {'filters': filters_config} + attributes = {"filters": filters_config} else: attributes = {} meta.update( - dict(chunk_grid=dict(type="regular", - chunk_shape=chunks, - separator=dimension_separator), - chunk_memory_layout=order, - data_type=dtype, - attributes=attributes, - storage_transformers=storage_transformers) + dict( + chunk_grid=dict(type="regular", chunk_shape=chunks, separator=dimension_separator), + chunk_memory_layout=order, + data_type=dtype, + attributes=attributes, + storage_transformers=storage_transformers, + ) ) key = _prefix_to_array_key(store, _path_to_prefix(path)) - if hasattr(store, '_metadata_class'): + if hasattr(store, "_metadata_class"): store[key] = store._metadata_class.encode_array_metadata(meta) # type: ignore else: store[key] = encode_array_metadata(meta) @@ -635,19 +662,17 @@ def init_group( # normalize path path = normalize_storage_path(path) - store_version = getattr(store, '_store_version', 2) + store_version = getattr(store, "_store_version", 2) if store_version < 3: # ensure parent group initialized - _require_parent_group(path, store=store, chunk_store=chunk_store, - overwrite=overwrite) + _require_parent_group(path, store=store, chunk_store=chunk_store, overwrite=overwrite) - if store_version == 3 and 'zarr.json' not in store: + if store_version == 3 and "zarr.json" not in store: # initialize with default zarr.json entry level metadata - store['zarr.json'] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore + store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore # initialise metadata - _init_group_metadata(store=store, overwrite=overwrite, path=path, - chunk_store=chunk_store) + _init_group_metadata(store=store, overwrite=overwrite, path=path, chunk_store=chunk_store) if store_version == 3: # TODO: Should initializing a v3 group also create a corresponding @@ -663,7 +688,7 @@ def _init_group_metadata( chunk_store: Optional[StoreLike] = None, ): - store_version = getattr(store, '_store_version', 2) + store_version = getattr(store, "_store_version", 2) path = normalize_storage_path(path) # guard conditions @@ -694,9 +719,9 @@ def _init_group_metadata( raise ContainsArrayError(path) elif contains_group(store, path): raise ContainsGroupError(path) - elif store_version == 3 and '/' in path: + elif store_version == 3 and "/" in path: # cannot create a group overlapping with an existing array name - parent_path = '/'.join(path.split('/')[:-1]) + parent_path = "/".join(path.split("/")[:-1]) if contains_array(store, parent_path): raise ContainsArrayError(path) @@ -704,11 +729,11 @@ def _init_group_metadata( # N.B., currently no metadata properties are needed, however there may # be in future if store_version == 3: - meta = {'attributes': {}} # type: ignore + meta = {"attributes": {}} # type: ignore else: meta = {} # type: ignore key = _prefix_to_group_key(store, _path_to_prefix(path)) - if hasattr(store, '_metadata_class'): + if hasattr(store, "_metadata_class"): store[key] = store._metadata_class.encode_group_metadata(meta) # type: ignore else: store[key] = encode_group_metadata(meta) @@ -718,7 +743,7 @@ def _dict_store_keys(d: Dict, prefix="", cls=dict): for k in d.keys(): v = d[k] if isinstance(v, cls): - yield from _dict_store_keys(v, prefix + k + '/', cls) + yield from _dict_store_keys(v, prefix + k + "/", cls) else: yield prefix + k @@ -814,7 +839,7 @@ def __setstate__(self, state): def _get_parent(self, item: str): parent = self.root # split the item - segments = item.split('/') + segments = item.split("/") # find the parent container for k in segments[:-1]: parent = parent[k] @@ -825,7 +850,7 @@ def _get_parent(self, item: str): def _require_parent(self, item): parent = self.root # split the item - segments = item.split('/') + segments = item.split("/") # require the parent container for k in segments[:-1]: try: @@ -874,11 +899,7 @@ def __contains__(self, item: str): # type: ignore[override] return not isinstance(value, self.cls) def __eq__(self, other): - return ( - isinstance(other, MemoryStore) and - self.root == other.root and - self.cls == other.cls - ) + return isinstance(other, MemoryStore) and self.root == other.root and self.cls == other.cls def keys(self): yield from _dict_store_keys(self.root, cls=self.cls) @@ -963,12 +984,13 @@ def clear(self): class DictStore(MemoryStore): - def __init__(self, *args, **kwargs): - warnings.warn("DictStore has been renamed to MemoryStore in 2.4.0 and " - "will be removed in the future. Please use MemoryStore.", - DeprecationWarning, - stacklevel=2) + warnings.warn( + "DictStore has been renamed to MemoryStore in 2.4.0 and " + "will be removed in the future. Please use MemoryStore.", + DeprecationWarning, + stacklevel=2, + ) super().__init__(*args, **kwargs) @@ -1048,7 +1070,7 @@ def _normalize_key(self, key): @staticmethod def _fromfile(fn): - """ Read data from a file + """Read data from a file Parameters ---------- @@ -1060,12 +1082,12 @@ def _fromfile(fn): Subclasses should overload this method to specify any custom file reading logic. """ - with open(fn, 'rb') as f: + with open(fn, "rb") as f: return f.read() @staticmethod def _tofile(a, fn): - """ Write data to a file + """Write data to a file Parameters ---------- @@ -1079,7 +1101,7 @@ def _tofile(a, fn): Subclasses should overload this method to specify any custom file writing logic. """ - with open(fn, mode='wb') as f: + with open(fn, mode="wb") as f: f.write(a) def __getitem__(self, key): @@ -1116,7 +1138,7 @@ def __setitem__(self, key, value): # write to temporary file # note we're not using tempfile.NamedTemporaryFile to avoid restrictive file permissions - temp_name = file_name + '.' + uuid.uuid4().hex + '.partial' + temp_name = file_name + "." + uuid.uuid4().hex + ".partial" temp_path = os.path.join(dir_path, temp_name) try: self._tofile(value, temp_path) @@ -1149,10 +1171,7 @@ def __contains__(self, key): return os.path.isfile(file_path) def __eq__(self, other): - return ( - isinstance(other, DirectoryStore) and - self.path == other.path - ) + return isinstance(other, DirectoryStore) and self.path == other.path def keys(self): if os.path.exists(self.path): @@ -1184,8 +1203,11 @@ def dir_path(self, path=None): return dir_path def listdir(self, path=None): - return self._nested_listdir(path) if self._dimension_separator == "/" else \ - self._flat_listdir(path) + return ( + self._nested_listdir(path) + if self._dimension_separator == "/" + else self._flat_listdir(path) + ) def _flat_listdir(self, path=None): dir_path = self.dir_path(path) @@ -1208,9 +1230,9 @@ def _nested_listdir(self, path=None): for file_name in file_names: file_path = os.path.join(dir_path, file_name) rel_path = file_path.split(root_path + os.path.sep)[1] - new_children.append(rel_path.replace( - os.path.sep, - self._dimension_separator or '.')) + new_children.append( + rel_path.replace(os.path.sep, self._dimension_separator or ".") + ) else: new_children.append(entry) return sorted(new_children) @@ -1256,21 +1278,21 @@ def clear(self): shutil.rmtree(self.path) -def atexit_rmtree(path, - isdir=os.path.isdir, - rmtree=shutil.rmtree): # pragma: no cover +def atexit_rmtree(path, isdir=os.path.isdir, rmtree=shutil.rmtree): # pragma: no cover """Ensure directory removal at interpreter exit.""" if isdir(path): rmtree(path) # noinspection PyShadowingNames -def atexit_rmglob(path, - glob=glob.glob, - isdir=os.path.isdir, - isfile=os.path.isfile, - remove=os.remove, - rmtree=shutil.rmtree): # pragma: no cover +def atexit_rmglob( + path, + glob=glob.glob, + isdir=os.path.isdir, + isfile=os.path.isfile, + remove=os.remove, + rmtree=shutil.rmtree, +): # pragma: no cover """Ensure removal of multiple files at interpreter exit.""" for p in glob(path): if isfile(p): @@ -1316,19 +1338,25 @@ class FSStore(Store): storage_options : passed to the fsspec implementation. Cannot be used together with fs. """ + _array_meta_key = array_meta_key _group_meta_key = group_meta_key _attrs_key = attrs_key - def __init__(self, url, normalize_keys=False, key_separator=None, - mode='w', - exceptions=(KeyError, PermissionError, IOError), - dimension_separator=None, - fs=None, - check=False, - create=False, - missing_exceptions=None, - **storage_options): + def __init__( + self, + url, + normalize_keys=False, + key_separator=None, + mode="w", + exceptions=(KeyError, PermissionError, IOError), + dimension_separator=None, + fs=None, + check=False, + create=False, + missing_exceptions=None, + **storage_options, + ): if not self._fsspec_installed(): # pragma: no cover raise ImportError("`fsspec` is required to use zarr's FSStore") import fsspec @@ -1374,13 +1402,13 @@ def _default_key_separator(self): self.key_separator = "." def _normalize_key(self, key): - key = normalize_storage_path(key).lstrip('/') + key = normalize_storage_path(key).lstrip("/") if key: - *bits, end = key.split('/') + *bits, end = key.split("/") if end not in (self._array_meta_key, self._group_meta_key, self._attrs_key): - end = end.replace('.', self.key_separator) - key = '/'.join(bits + [end]) + end = end.replace(".", self.key_separator) + key = "/".join(bits + [end]) return key.lower() if self.normalize_keys else key @@ -1402,7 +1430,7 @@ def __getitem__(self, key): raise KeyError(key) from e def setitems(self, values): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() # Normalize keys and make sure the values are bytes @@ -1413,7 +1441,7 @@ def setitems(self, values): self.map.setitems(values) def __setitem__(self, key, value): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() key = self._normalize_key(key) value = ensure_contiguous_ndarray_or_bytes(value) @@ -1427,7 +1455,7 @@ def __setitem__(self, key, value): raise KeyError(key) from e def __delitem__(self, key): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() key = self._normalize_key(key) path = self.dir_path(key) @@ -1437,7 +1465,7 @@ def __delitem__(self, key): del self.map[key] def delitems(self, keys): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() # only remove the keys that exist in the store nkeys = [self._normalize_key(key) for key in keys if key in self] @@ -1450,8 +1478,7 @@ def __contains__(self, key): return key in self.map def __eq__(self, other): - return (type(self) is type(other) and self.map == other.map - and self.mode == other.mode) + return type(self) is type(other) and self.map == other.map and self.mode == other.mode def keys(self): return iter(self.map) @@ -1469,8 +1496,9 @@ def dir_path(self, path=None): def listdir(self, path=None): dir_path = self.dir_path(path) try: - children = sorted(p.rstrip('/').rsplit('/', 1)[-1] - for p in self.fs.ls(dir_path, detail=False)) + children = sorted( + p.rstrip("/").rsplit("/", 1)[-1] for p in self.fs.ls(dir_path, detail=False) + ) if self.key_separator != "/": return children else: @@ -1485,8 +1513,8 @@ def listdir(self, path=None): for file_name in self.fs.find(entry_path): file_path = os.path.join(dir_path, file_name) rel_path = file_path.split(root_path)[1] - rel_path = rel_path.lstrip('/') - new_children.append(rel_path.replace('/', '.')) + rel_path = rel_path.lstrip("/") + new_children.append(rel_path.replace("/", ".")) else: new_children.append(entry) return sorted(new_children) @@ -1496,7 +1524,7 @@ def listdir(self, path=None): return [] def rmdir(self, path=None): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() store_path = self.dir_path(path) if self.fs.isdir(store_path): @@ -1507,7 +1535,7 @@ def getsize(self, path=None): return self.fs.du(store_path, True, True) def clear(self): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() self.map.clear() @@ -1540,15 +1568,16 @@ class TempStore(DirectoryStore): """ # noinspection PyShadowingBuiltins - def __init__(self, suffix='', prefix='zarr', dir=None, normalize_keys=False, - dimension_separator=None): + def __init__( + self, suffix="", prefix="zarr", dir=None, normalize_keys=False, dimension_separator=None + ): path = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir) atexit.register(atexit_rmtree, path) super().__init__(path, normalize_keys=normalize_keys) -_prog_ckey = re.compile(r'^(\d+)(\.\d+)+$') -_prog_number = re.compile(r'^\d+$') +_prog_ckey = re.compile(r"^(\d+)(\.\d+)+$") +_prog_number = re.compile(r"^\d+$") class NestedDirectoryStore(DirectoryStore): @@ -1629,15 +1658,11 @@ def __init__(self, path, normalize_keys=False, dimension_separator="/"): if dimension_separator is None: dimension_separator = "/" elif dimension_separator != "/": - raise ValueError( - "NestedDirectoryStore only supports '/' as dimension_separator") + raise ValueError("NestedDirectoryStore only supports '/' as dimension_separator") self._dimension_separator = dimension_separator def __eq__(self, other): - return ( - isinstance(other, NestedDirectoryStore) and - self.path == other.path - ) + return isinstance(other, NestedDirectoryStore) and self.path == other.path # noinspection PyPep8Naming @@ -1735,8 +1760,14 @@ class also supports the context manager protocol, which ensures the ``close()`` _erasable = False - def __init__(self, path, compression=zipfile.ZIP_STORED, allowZip64=True, mode='a', - dimension_separator=None): + def __init__( + self, + path, + compression=zipfile.ZIP_STORED, + allowZip64=True, + mode="a", + dimension_separator=None, + ): # store properties path = os.path.abspath(path) @@ -1752,8 +1783,7 @@ def __init__(self, path, compression=zipfile.ZIP_STORED, allowZip64=True, mode=' self.mutex = RLock() # open zip file - self.zf = zipfile.ZipFile(path, mode=mode, compression=compression, - allowZip64=allowZip64) + self.zf = zipfile.ZipFile(path, mode=mode, compression=compression, allowZip64=allowZip64) def __getstate__(self): self.flush() @@ -1763,10 +1793,9 @@ def __setstate__(self, state): path, compression, allowZip64, mode = state # if initially opened with mode 'w' or 'x', re-open in mode 'a' so file doesn't # get clobbered - if mode in 'wx': - mode = 'a' - self.__init__(path=path, compression=compression, allowZip64=allowZip64, - mode=mode) + if mode in "wx": + mode = "a" + self.__init__(path=path, compression=compression, allowZip64=allowZip64, mode=mode) def close(self): """Closes the underlying zip file, ensuring all records are written.""" @@ -1776,14 +1805,14 @@ def close(self): def flush(self): """Closes the underlying zip file, ensuring all records are written, then re-opens the file for further modifications.""" - if self.mode != 'r': + if self.mode != "r": with self.mutex: self.zf.close() # N.B., re-open with mode 'a' regardless of initial mode so we don't wipe # what's been written - self.zf = zipfile.ZipFile(self.path, mode='a', - compression=self.compression, - allowZip64=self.allowZip64) + self.zf = zipfile.ZipFile( + self.path, mode="a", compression=self.compression, allowZip64=self.allowZip64 + ) def __enter__(self): return self @@ -1797,21 +1826,20 @@ def __getitem__(self, key): return f.read() def __setitem__(self, key, value): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() value = ensure_contiguous_ndarray_like(value).view("u1") with self.mutex: # writestr(key, value) writes with default permissions from # zipfile (600) that are too restrictive, build ZipInfo for # the key to work around limitation - keyinfo = zipfile.ZipInfo(filename=key, - date_time=time.localtime(time.time())[:6]) + keyinfo = zipfile.ZipInfo(filename=key, date_time=time.localtime(time.time())[:6]) keyinfo.compress_type = self.compression if keyinfo.filename[-1] == os.sep: - keyinfo.external_attr = 0o40775 << 16 # drwxrwxr-x - keyinfo.external_attr |= 0x10 # MS-DOS directory flag + keyinfo.external_attr = 0o40775 << 16 # drwxrwxr-x + keyinfo.external_attr |= 0x10 # MS-DOS directory flag else: - keyinfo.external_attr = 0o644 << 16 # ?rw-r--r-- + keyinfo.external_attr = 0o644 << 16 # ?rw-r--r-- self.zf.writestr(keyinfo, value) @@ -1820,10 +1848,10 @@ def __delitem__(self, key): def __eq__(self, other): return ( - isinstance(other, ZipStore) and - self.path == other.path and - self.compression == other.compression and - self.allowZip64 == other.allowZip64 + isinstance(other, ZipStore) + and self.path == other.path + and self.compression == other.compression + and self.allowZip64 == other.allowZip64 ) def keylist(self): @@ -1860,7 +1888,7 @@ def getsize(self, path=None): size = 0 for child in children: if path: - name = path + '/' + child + name = path + "/" + child else: name = child try: @@ -1880,14 +1908,14 @@ def getsize(self, path=None): return 0 def clear(self): - if self.mode == 'r': + if self.mode == "r": raise ReadOnlyError() with self.mutex: self.close() os.remove(self.path) - self.zf = zipfile.ZipFile(self.path, mode=self.mode, - compression=self.compression, - allowZip64=self.allowZip64) + self.zf = zipfile.ZipFile( + self.path, mode=self.mode, compression=self.compression, allowZip64=self.allowZip64 + ) def migrate_1to2(store): @@ -1909,37 +1937,38 @@ def migrate_1to2(store): # migrate metadata from zarr import meta_v1 - meta = meta_v1.decode_metadata(store['meta']) - del store['meta'] + + meta = meta_v1.decode_metadata(store["meta"]) + del store["meta"] # add empty filters - meta['filters'] = None + meta["filters"] = None # migration compression metadata - compression = meta['compression'] - if compression is None or compression == 'none': + compression = meta["compression"] + if compression is None or compression == "none": compressor_config = None else: - compression_opts = meta['compression_opts'] + compression_opts = meta["compression_opts"] codec_cls = codec_registry[compression] if isinstance(compression_opts, dict): compressor = codec_cls(**compression_opts) else: compressor = codec_cls(compression_opts) compressor_config = compressor.get_config() - meta['compressor'] = compressor_config - del meta['compression'] - del meta['compression_opts'] + meta["compressor"] = compressor_config + del meta["compression"] + del meta["compression_opts"] # store migrated metadata - if hasattr(store, '_metadata_class'): + if hasattr(store, "_metadata_class"): store[array_meta_key] = store._metadata_class.encode_array_metadata(meta) else: store[array_meta_key] = encode_array_metadata(meta) # migrate user attributes - store[attrs_key] = store['attrs'] - del store['attrs'] + store[attrs_key] = store["attrs"] + del store["attrs"] # noinspection PyShadowingBuiltins @@ -2024,11 +2053,19 @@ class DBMStore(Store): """ - def __init__(self, path, flag='c', mode=0o666, open=None, write_lock=True, - dimension_separator=None, - **open_kwargs): + def __init__( + self, + path, + flag="c", + mode=0o666, + open=None, + write_lock=True, + dimension_separator=None, + **open_kwargs, + ): if open is None: import dbm + open = dbm.open path = os.path.abspath(path) # noinspection PyArgumentList @@ -2053,27 +2090,25 @@ def __getstate__(self): except Exception: # flush may fail if db has already been closed pass - return (self.path, self.flag, self.mode, self.open, self.write_lock, - self.open_kwargs) + return (self.path, self.flag, self.mode, self.open, self.write_lock, self.open_kwargs) def __setstate__(self, state): path, flag, mode, open, write_lock, open_kws = state - if flag[0] == 'n': - flag = 'c' + flag[1:] # don't clobber an existing database - self.__init__(path=path, flag=flag, mode=mode, open=open, - write_lock=write_lock, **open_kws) + if flag[0] == "n": + flag = "c" + flag[1:] # don't clobber an existing database + self.__init__(path=path, flag=flag, mode=mode, open=open, write_lock=write_lock, **open_kws) def close(self): """Closes the underlying database file.""" - if hasattr(self.db, 'close'): + if hasattr(self.db, "close"): with self.write_mutex: self.db.close() def flush(self): """Synchronizes data to the underlying database file.""" - if self.flag[0] != 'r': + if self.flag[0] != "r": with self.write_mutex: - if hasattr(self.db, 'sync'): + if hasattr(self.db, "sync"): self.db.sync() else: # pragma: no cover # we don't cover this branch anymore as ndbm (oracle) is not packaged @@ -2081,8 +2116,8 @@ def flush(self): # https://github.com/conda-forge/staged-recipes/issues/4476 # fall-back, close and re-open, needed for ndbm flag = self.flag - if flag[0] == 'n': - flag = 'c' + flag[1:] # don't clobber an existing database + if flag[0] == "n": + flag = "c" + flag[1:] # don't clobber an existing database self.db.close() # noinspection PyArgumentList self.db = self.open(self.path, flag, self.mode, **self.open_kwargs) @@ -2113,11 +2148,12 @@ def __delitem__(self, key): def __eq__(self, other): return ( - isinstance(other, DBMStore) and - self.path == other.path and + isinstance(other, DBMStore) + and self.path == other.path + and # allow flag and mode to differ - self.open == other.open and - self.open_kwargs == other.open_kwargs + self.open == other.open + and self.open_kwargs == other.open_kwargs ) def keys(self): @@ -2200,28 +2236,28 @@ def __init__(self, path, buffers=True, dimension_separator=None, **kwargs): # set default memory map size to something larger than the lmdb default, which is # very likely to be too small for any moderate array (logic copied from zict) - map_size = (2**40 if sys.maxsize >= 2**32 else 2**28) - kwargs.setdefault('map_size', map_size) + map_size = 2**40 if sys.maxsize >= 2**32 else 2**28 + kwargs.setdefault("map_size", map_size) # don't initialize buffers to zero by default, shouldn't be necessary - kwargs.setdefault('meminit', False) + kwargs.setdefault("meminit", False) # decide whether to use the writemap option based on the operating system's # support for sparse files - writemap requires sparse file support otherwise # the whole# `map_size` may be reserved up front on disk (logic copied from zict) - writemap = sys.platform.startswith('linux') - kwargs.setdefault('writemap', writemap) + writemap = sys.platform.startswith("linux") + kwargs.setdefault("writemap", writemap) # decide options for when data are flushed to disk - choose to delay syncing # data to filesystem, otherwise pay a large performance penalty (zict also does # this) - kwargs.setdefault('metasync', False) - kwargs.setdefault('sync', False) - kwargs.setdefault('map_async', False) + kwargs.setdefault("metasync", False) + kwargs.setdefault("sync", False) + kwargs.setdefault("map_async", False) # set default option for number of cached transactions max_spare_txns = multiprocessing.cpu_count() - kwargs.setdefault('max_spare_txns', max_spare_txns) + kwargs.setdefault("max_spare_txns", max_spare_txns) # normalize path path = os.path.abspath(path) @@ -2312,7 +2348,7 @@ def __iter__(self): return self.keys() def __len__(self): - return self.db.stat()['entries'] + return self.db.stat()["entries"] class LRUStoreCache(Store): @@ -2364,14 +2400,30 @@ def __init__(self, store: StoreLike, max_size: int): self.hits = self.misses = 0 def __getstate__(self): - return (self._store, self._max_size, self._current_size, self._keys_cache, - self._contains_cache, self._listdir_cache, self._values_cache, self.hits, - self.misses) + return ( + self._store, + self._max_size, + self._current_size, + self._keys_cache, + self._contains_cache, + self._listdir_cache, + self._values_cache, + self.hits, + self.misses, + ) def __setstate__(self, state): - (self._store, self._max_size, self._current_size, self._keys_cache, - self._contains_cache, self._listdir_cache, self._values_cache, self.hits, - self.misses) = state + ( + self._store, + self._max_size, + self._current_size, + self._keys_cache, + self._contains_cache, + self._listdir_cache, + self._values_cache, + self.hits, + self.misses, + ) = state self._mutex = Lock() def __len__(self): @@ -2536,7 +2588,7 @@ def __init__(self, path, dimension_separator=None, **kwargs): self._dimension_separator = dimension_separator # normalize path - if path != ':memory:': + if path != ":memory:": path = os.path.abspath(path) # store properties @@ -2560,7 +2612,7 @@ def __init__(self, path, dimension_separator=None, **kwargs): detect_types=0, isolation_level=None, check_same_thread=check_same_thread, - **self.kwargs + **self.kwargs, ) # handle keys as `str`s @@ -2571,13 +2623,11 @@ def __init__(self, path, dimension_separator=None, **kwargs): # initialize database with our table if missing with self.lock: - self.cursor.execute( - 'CREATE TABLE IF NOT EXISTS zarr(k TEXT PRIMARY KEY, v BLOB)' - ) + self.cursor.execute("CREATE TABLE IF NOT EXISTS zarr(k TEXT PRIMARY KEY, v BLOB)") def __getstate__(self): - if self.path == ':memory:': - raise PicklingError('Cannot pickle in-memory SQLite databases') + if self.path == ":memory:": + raise PicklingError("Cannot pickle in-memory SQLite databases") return self.path, self.kwargs def __setstate__(self, state): @@ -2592,8 +2642,8 @@ def close(self): self.db.close() def __getitem__(self, key): - value = self.cursor.execute('SELECT v FROM zarr WHERE (k = ?)', (key,)) - for v, in value: + value = self.cursor.execute("SELECT v FROM zarr WHERE (k = ?)", (key,)) + for (v,) in value: return v raise KeyError(key) @@ -2602,38 +2652,36 @@ def __setitem__(self, key, value): def __delitem__(self, key): with self.lock: - self.cursor.execute('DELETE FROM zarr WHERE (k = ?)', (key,)) + self.cursor.execute("DELETE FROM zarr WHERE (k = ?)", (key,)) if self.cursor.rowcount < 1: raise KeyError(key) def __contains__(self, key): - cs = self.cursor.execute( - 'SELECT COUNT(*) FROM zarr WHERE (k = ?)', (key,) - ) - for has, in cs: + cs = self.cursor.execute("SELECT COUNT(*) FROM zarr WHERE (k = ?)", (key,)) + for (has,) in cs: has = bool(has) return has def items(self): - kvs = self.cursor.execute('SELECT k, v FROM zarr') + kvs = self.cursor.execute("SELECT k, v FROM zarr") yield from kvs def keys(self): - ks = self.cursor.execute('SELECT k FROM zarr') - for k, in ks: + ks = self.cursor.execute("SELECT k FROM zarr") + for (k,) in ks: yield k def values(self): - vs = self.cursor.execute('SELECT v FROM zarr') - for v, in vs: + vs = self.cursor.execute("SELECT v FROM zarr") + for (v,) in vs: yield v def __iter__(self): return self.keys() def __len__(self): - cs = self.cursor.execute('SELECT COUNT(*) FROM zarr') - for c, in cs: + cs = self.cursor.execute("SELECT COUNT(*) FROM zarr") + for (c,) in cs: return c def update(self, *args, **kwargs): @@ -2648,19 +2696,21 @@ def update(self, *args, **kwargs): kv_list.append((k, v)) with self.lock: - self.cursor.executemany('REPLACE INTO zarr VALUES (?, ?)', kv_list) + self.cursor.executemany("REPLACE INTO zarr VALUES (?, ?)", kv_list) def listdir(self, path=None): path = normalize_storage_path(path) - sep = '_' if path == '' else '/' + sep = "_" if path == "" else "/" keys = self.cursor.execute( - ''' + """ SELECT DISTINCT SUBSTR(m, 0, INSTR(m, "/")) AS l FROM ( SELECT LTRIM(SUBSTR(k, LENGTH(?) + 1), "/") || "/" AS m FROM zarr WHERE k LIKE (? || "{sep}%") ) ORDER BY l ASC - '''.format(sep=sep), - (path, path) + """.format( + sep=sep + ), + (path, path), ) keys = list(map(operator.itemgetter(0), keys)) return keys @@ -2668,35 +2718,33 @@ def listdir(self, path=None): def getsize(self, path=None): path = normalize_storage_path(path) size = self.cursor.execute( - ''' + """ SELECT COALESCE(SUM(LENGTH(v)), 0) FROM zarr WHERE k LIKE (? || "%") AND 0 == INSTR(LTRIM(SUBSTR(k, LENGTH(?) + 1), "/"), "/") - ''', - (path, path) + """, + (path, path), ) - for s, in size: + for (s,) in size: return s def rmdir(self, path=None): path = normalize_storage_path(path) if path: with self.lock: - self.cursor.execute( - 'DELETE FROM zarr WHERE k LIKE (? || "/%")', (path,) - ) + self.cursor.execute('DELETE FROM zarr WHERE k LIKE (? || "/%")', (path,)) else: self.clear() def clear(self): with self.lock: self.cursor.executescript( - ''' + """ BEGIN TRANSACTION; DROP TABLE zarr; CREATE TABLE zarr(k TEXT PRIMARY KEY, v BLOB); COMMIT TRANSACTION; - ''' + """ ) @@ -2725,11 +2773,16 @@ class MongoDBStore(Store): """ - _key = 'key' - _value = 'value' + _key = "key" + _value = "value" - def __init__(self, database='mongodb_zarr', collection='zarr_collection', - dimension_separator=None, **kwargs): + def __init__( + self, + database="mongodb_zarr", + collection="zarr_collection", + dimension_separator=None, + **kwargs, + ): import pymongo self._database = database @@ -2751,9 +2804,9 @@ def __getitem__(self, key): def __setitem__(self, key, value): value = ensure_bytes(value) - self.collection.replace_one({self._key: key}, - {self._key: key, self._value: value}, - upsert=True) + self.collection.replace_one( + {self._key: key}, {self._key: key, self._value: value}, upsert=True + ) def __delitem__(self, key): result = self.collection.delete_many({self._key: key}) @@ -2801,8 +2854,10 @@ class RedisStore(Store): Keyword arguments passed through to the `redis.Redis` function. """ - def __init__(self, prefix='zarr', dimension_separator=None, **kwargs): + + def __init__(self, prefix="zarr", dimension_separator=None, **kwargs): import redis + self._prefix = prefix self._kwargs = kwargs self._dimension_separator = dimension_separator @@ -2810,7 +2865,7 @@ def __init__(self, prefix='zarr', dimension_separator=None, **kwargs): self.client = redis.Redis(**kwargs) def _key(self, key): - return '{prefix}:{key}'.format(prefix=self._prefix, key=key) + return "{prefix}:{key}".format(prefix=self._prefix, key=key) def __getitem__(self, key): return self.client[self._key(key)] @@ -2825,9 +2880,8 @@ def __delitem__(self, key): raise KeyError(key) def keylist(self): - offset = len(self._key('')) # length of prefix - return [key[offset:].decode('utf-8') - for key in self.client.keys(self._key('*'))] + offset = len(self._key("")) # length of prefix + return [key[offset:].decode("utf-8") for key in self.client.keys(self._key("*"))] def keys(self): yield from self.keylist() @@ -2893,10 +2947,11 @@ def __init__(self, store: StoreLike, metadata_key=".zmetadata"): meta = json_loads(self.store[metadata_key]) # check format of consolidated metadata - consolidated_format = meta.get('zarr_consolidated_format', None) + consolidated_format = meta.get("zarr_consolidated_format", None) if consolidated_format != 1: - raise MetadataError('unsupported zarr consolidated metadata format: %s' % - consolidated_format) + raise MetadataError( + "unsupported zarr consolidated metadata format: %s" % consolidated_format + ) # decode metadata self.meta_store: Store = KVStore(meta["metadata"]) diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index d6151b4f29..7dd5b340a2 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -24,31 +24,30 @@ def _init_store(version): return KVStoreV3(dict()) -class TestAttributes(): - +class TestAttributes: def init_attributes(self, store, read_only=False, cache=True, zarr_version=2): - root = '.z' if zarr_version == 2 else meta_root - return Attributes(store, key=root + 'attrs', read_only=read_only, cache=cache) + root = ".z" if zarr_version == 2 else meta_root + return Attributes(store, key=root + "attrs", read_only=read_only, cache=cache) def test_storage(self, zarr_version): store = _init_store(zarr_version) - root = '.z' if zarr_version == 2 else meta_root - attrs_key = root + 'attrs' + root = ".z" if zarr_version == 2 else meta_root + attrs_key = root + "attrs" a = Attributes(store=store, key=attrs_key) assert isinstance(a.store, KVStore) - assert 'foo' not in a - assert 'bar' not in a + assert "foo" not in a + assert "bar" not in a assert dict() == a.asdict() - a['foo'] = 'bar' - a['baz'] = 42 + a["foo"] = "bar" + a["baz"] = 42 assert attrs_key in store assert isinstance(store[attrs_key], bytes) - d = json.loads(str(store[attrs_key], 'utf-8')) + d = json.loads(str(store[attrs_key], "utf-8")) if zarr_version == 3: - d = d['attributes'] - assert dict(foo='bar', baz=42) == d + d = d["attributes"] + assert dict(foo="bar", baz=42) == d def test_utf8_encoding(self, zarr_version): @@ -65,42 +64,42 @@ def test_utf8_encoding(self, zarr_version): # fixture data fixture = group(store=DirectoryStore(str(fixdir))) - assert fixture['utf8attrs'].attrs.asdict() == dict(foo='た') + assert fixture["utf8attrs"].attrs.asdict() == dict(foo="た") def test_get_set_del_contains(self, zarr_version): store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) - assert 'foo' not in a - a['foo'] = 'bar' - a['baz'] = 42 - assert 'foo' in a - assert 'baz' in a - assert 'bar' == a['foo'] - assert 42 == a['baz'] - del a['foo'] - assert 'foo' not in a + assert "foo" not in a + a["foo"] = "bar" + a["baz"] = 42 + assert "foo" in a + assert "baz" in a + assert "bar" == a["foo"] + assert 42 == a["baz"] + del a["foo"] + assert "foo" not in a with pytest.raises(KeyError): # noinspection PyStatementEffect - a['foo'] + a["foo"] def test_update_put(self, zarr_version): store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) - assert 'foo' not in a - assert 'bar' not in a - assert 'baz' not in a + assert "foo" not in a + assert "bar" not in a + assert "baz" not in a - a.update(foo='spam', bar=42, baz=4.2) - assert a['foo'] == 'spam' - assert a['bar'] == 42 - assert a['baz'] == 4.2 + a.update(foo="spam", bar=42, baz=4.2) + assert a["foo"] == "spam" + assert a["bar"] == 42 + assert a["baz"] == 4.2 - a.put(dict(foo='eggs', bar=84)) - assert a['foo'] == 'eggs' - assert a['bar'] == 84 - assert 'baz' not in a + a.put(dict(foo="eggs", bar=84)) + assert a["foo"] == "eggs" + assert a["bar"] == 84 + assert "baz" not in a def test_iterators(self, zarr_version): @@ -112,182 +111,182 @@ def test_iterators(self, zarr_version): assert set() == set(a.values()) assert set() == set(a.items()) - a['foo'] = 'bar' - a['baz'] = 42 + a["foo"] = "bar" + a["baz"] = 42 assert 2 == len(a) - assert {'foo', 'baz'} == set(a) - assert {'foo', 'baz'} == set(a.keys()) - assert {'bar', 42} == set(a.values()) - assert {('foo', 'bar'), ('baz', 42)} == set(a.items()) + assert {"foo", "baz"} == set(a) + assert {"foo", "baz"} == set(a.keys()) + assert {"bar", 42} == set(a.values()) + assert {("foo", "bar"), ("baz", 42)} == set(a.items()) def test_read_only(self, zarr_version): store = _init_store(zarr_version) a = self.init_attributes(store, read_only=True, zarr_version=zarr_version) if zarr_version == 2: - store['.zattrs'] = json.dumps(dict(foo='bar', baz=42)).encode('ascii') + store[".zattrs"] = json.dumps(dict(foo="bar", baz=42)).encode("ascii") else: - store['meta/root/attrs'] = json.dumps( - dict(attributes=dict(foo='bar', baz=42)) - ).encode('ascii') - assert a['foo'] == 'bar' - assert a['baz'] == 42 + store["meta/root/attrs"] = json.dumps(dict(attributes=dict(foo="bar", baz=42))).encode( + "ascii" + ) + assert a["foo"] == "bar" + assert a["baz"] == 42 with pytest.raises(PermissionError): - a['foo'] = 'quux' + a["foo"] = "quux" with pytest.raises(PermissionError): - del a['foo'] + del a["foo"] with pytest.raises(PermissionError): - a.update(foo='quux') + a.update(foo="quux") def test_key_completions(self, zarr_version): store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) d = a._ipython_key_completions_() - assert 'foo' not in d - assert '123' not in d - assert 'baz' not in d - assert 'asdf;' not in d - a['foo'] = 42 - a['123'] = 4.2 - a['asdf;'] = 'ghjkl;' + assert "foo" not in d + assert "123" not in d + assert "baz" not in d + assert "asdf;" not in d + a["foo"] = 42 + a["123"] = 4.2 + a["asdf;"] = "ghjkl;" d = a._ipython_key_completions_() - assert 'foo' in d - assert '123' in d - assert 'asdf;' in d - assert 'baz' not in d + assert "foo" in d + assert "123" in d + assert "asdf;" in d + assert "baz" not in d def test_caching_on(self, zarr_version): # caching is turned on by default # setup store store = CountingDict() if zarr_version == 2 else CountingDictV3() - attrs_key = '.zattrs' if zarr_version == 2 else 'meta/root/attrs' - assert 0 == store.counter['__getitem__', attrs_key] - assert 0 == store.counter['__setitem__', attrs_key] + attrs_key = ".zattrs" if zarr_version == 2 else "meta/root/attrs" + assert 0 == store.counter["__getitem__", attrs_key] + assert 0 == store.counter["__setitem__", attrs_key] if zarr_version == 2: - store[attrs_key] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') + store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") else: - store[attrs_key] = json.dumps(dict(attributes=dict(foo='xxx', bar=42))).encode('ascii') - assert 0 == store.counter['__getitem__', attrs_key] - assert 1 == store.counter['__setitem__', attrs_key] + store[attrs_key] = json.dumps(dict(attributes=dict(foo="xxx", bar=42))).encode("ascii") + assert 0 == store.counter["__getitem__", attrs_key] + assert 1 == store.counter["__setitem__", attrs_key] # setup attributes a = self.init_attributes(store, zarr_version=zarr_version) # test __getitem__ causes all attributes to be cached - assert a['foo'] == 'xxx' - assert 1 == store.counter['__getitem__', attrs_key] - assert a['bar'] == 42 - assert 1 == store.counter['__getitem__', attrs_key] - assert a['foo'] == 'xxx' - assert 1 == store.counter['__getitem__', attrs_key] + assert a["foo"] == "xxx" + assert 1 == store.counter["__getitem__", attrs_key] + assert a["bar"] == 42 + assert 1 == store.counter["__getitem__", attrs_key] + assert a["foo"] == "xxx" + assert 1 == store.counter["__getitem__", attrs_key] # test __setitem__ updates the cache - a['foo'] = 'yyy' + a["foo"] = "yyy" get_cnt = 2 if zarr_version == 2 else 3 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 2 == store.counter['__setitem__', attrs_key] - assert a['foo'] == 'yyy' - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 2 == store.counter['__setitem__', attrs_key] + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 2 == store.counter["__setitem__", attrs_key] + assert a["foo"] == "yyy" + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 2 == store.counter["__setitem__", attrs_key] # test update() updates the cache - a.update(foo='zzz', bar=84) + a.update(foo="zzz", bar=84) get_cnt = 3 if zarr_version == 2 else 5 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 3 == store.counter['__setitem__', attrs_key] - assert a['foo'] == 'zzz' - assert a['bar'] == 84 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 3 == store.counter['__setitem__', attrs_key] + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 3 == store.counter["__setitem__", attrs_key] + assert a["foo"] == "zzz" + assert a["bar"] == 84 + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 3 == store.counter["__setitem__", attrs_key] # test __contains__ uses the cache - assert 'foo' in a - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 3 == store.counter['__setitem__', attrs_key] - assert 'spam' not in a - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 3 == store.counter['__setitem__', attrs_key] + assert "foo" in a + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 3 == store.counter["__setitem__", attrs_key] + assert "spam" not in a + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 3 == store.counter["__setitem__", attrs_key] # test __delitem__ updates the cache - del a['bar'] + del a["bar"] get_cnt = 4 if zarr_version == 2 else 7 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 4 == store.counter['__setitem__', attrs_key] - assert 'bar' not in a - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 4 == store.counter['__setitem__', attrs_key] + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 4 == store.counter["__setitem__", attrs_key] + assert "bar" not in a + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 4 == store.counter["__setitem__", attrs_key] # test refresh() if zarr_version == 2: - store[attrs_key] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') + store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") else: - store[attrs_key] = json.dumps(dict(attributes=dict(foo='xxx', bar=42))).encode('ascii') - assert get_cnt == store.counter['__getitem__', attrs_key] + store[attrs_key] = json.dumps(dict(attributes=dict(foo="xxx", bar=42))).encode("ascii") + assert get_cnt == store.counter["__getitem__", attrs_key] a.refresh() get_cnt = 5 if zarr_version == 2 else 8 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert a['foo'] == 'xxx' - assert get_cnt == store.counter['__getitem__', attrs_key] - assert a['bar'] == 42 - assert get_cnt == store.counter['__getitem__', attrs_key] + assert get_cnt == store.counter["__getitem__", attrs_key] + assert a["foo"] == "xxx" + assert get_cnt == store.counter["__getitem__", attrs_key] + assert a["bar"] == 42 + assert get_cnt == store.counter["__getitem__", attrs_key] def test_caching_off(self, zarr_version): # setup store store = CountingDict() if zarr_version == 2 else CountingDictV3() - attrs_key = '.zattrs' if zarr_version == 2 else 'meta/root/attrs' - assert 0 == store.counter['__getitem__', attrs_key] - assert 0 == store.counter['__setitem__', attrs_key] + attrs_key = ".zattrs" if zarr_version == 2 else "meta/root/attrs" + assert 0 == store.counter["__getitem__", attrs_key] + assert 0 == store.counter["__setitem__", attrs_key] if zarr_version == 2: - store[attrs_key] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') + store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") else: - store[attrs_key] = json.dumps(dict(attributes=dict(foo='xxx', bar=42))).encode('ascii') - assert 0 == store.counter['__getitem__', attrs_key] - assert 1 == store.counter['__setitem__', attrs_key] + store[attrs_key] = json.dumps(dict(attributes=dict(foo="xxx", bar=42))).encode("ascii") + assert 0 == store.counter["__getitem__", attrs_key] + assert 1 == store.counter["__setitem__", attrs_key] # setup attributes a = self.init_attributes(store, cache=False, zarr_version=zarr_version) # test __getitem__ - assert a['foo'] == 'xxx' - assert 1 == store.counter['__getitem__', attrs_key] - assert a['bar'] == 42 - assert 2 == store.counter['__getitem__', attrs_key] - assert a['foo'] == 'xxx' - assert 3 == store.counter['__getitem__', attrs_key] + assert a["foo"] == "xxx" + assert 1 == store.counter["__getitem__", attrs_key] + assert a["bar"] == 42 + assert 2 == store.counter["__getitem__", attrs_key] + assert a["foo"] == "xxx" + assert 3 == store.counter["__getitem__", attrs_key] # test __setitem__ - a['foo'] = 'yyy' + a["foo"] = "yyy" get_cnt = 4 if zarr_version == 2 else 5 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 2 == store.counter['__setitem__', attrs_key] - assert a['foo'] == 'yyy' + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 2 == store.counter["__setitem__", attrs_key] + assert a["foo"] == "yyy" get_cnt = 5 if zarr_version == 2 else 6 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 2 == store.counter['__setitem__', attrs_key] + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 2 == store.counter["__setitem__", attrs_key] # test update() - a.update(foo='zzz', bar=84) + a.update(foo="zzz", bar=84) get_cnt = 6 if zarr_version == 2 else 8 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 3 == store.counter['__setitem__', attrs_key] - assert a['foo'] == 'zzz' - assert a['bar'] == 84 + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 3 == store.counter["__setitem__", attrs_key] + assert a["foo"] == "zzz" + assert a["bar"] == 84 get_cnt = 8 if zarr_version == 2 else 10 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 3 == store.counter['__setitem__', attrs_key] + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 3 == store.counter["__setitem__", attrs_key] # test __contains__ - assert 'foo' in a + assert "foo" in a get_cnt = 9 if zarr_version == 2 else 11 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 3 == store.counter['__setitem__', attrs_key] - assert 'spam' not in a + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 3 == store.counter["__setitem__", attrs_key] + assert "spam" not in a get_cnt = 10 if zarr_version == 2 else 12 - assert get_cnt == store.counter['__getitem__', attrs_key] - assert 3 == store.counter['__setitem__', attrs_key] + assert get_cnt == store.counter["__getitem__", attrs_key] + assert 3 == store.counter["__setitem__", attrs_key] def test_wrong_keys(self, zarr_version): store = _init_store(zarr_version) diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index 45ed9c3e11..389ce90a9d 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -45,17 +45,17 @@ ) from zarr.tests.util import have_fsspec -_VERSIONS = ((2, 3) if v3_api_available else (2, )) +_VERSIONS = (2, 3) if v3_api_available else (2,) def _init_creation_kwargs(zarr_version): - kwargs = {'zarr_version': zarr_version} + kwargs = {"zarr_version": zarr_version} if zarr_version == 3: - kwargs['path'] = 'dataset' + kwargs["path"] = "dataset" return kwargs -@pytest.mark.parametrize('zarr_version', _VERSIONS) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_open_array(path_type, zarr_version): store = tempfile.mkdtemp() @@ -64,24 +64,24 @@ def test_open_array(path_type, zarr_version): kwargs = _init_creation_kwargs(zarr_version) # open array, create if doesn't exist - z = open(store, mode='a', shape=100, **kwargs) + z = open(store, mode="a", shape=100, **kwargs) assert isinstance(z, Array) assert z.shape == (100,) # open array, overwrite - z = open(store, mode='w', shape=200, **kwargs) + z = open(store, mode="w", shape=200, **kwargs) assert isinstance(z, Array) assert z.shape == (200,) # open array, read-only - z = open(store, mode='r', **kwargs) + z = open(store, mode="r", **kwargs) assert isinstance(z, Array) assert z.shape == (200,) assert z.read_only # path not found with pytest.raises(ValueError): - open('doesnotexist', mode='r') + open("doesnotexist", mode="r") @pytest.mark.parametrize("zarr_version", _VERSIONS) @@ -93,18 +93,18 @@ def test_open_group(path_type, zarr_version): kwargs = _init_creation_kwargs(zarr_version) # open group, create if doesn't exist - g = open(store, mode='a', **kwargs) - g.create_group('foo') + g = open(store, mode="a", **kwargs) + g.create_group("foo") assert isinstance(g, Group) - assert 'foo' in g + assert "foo" in g # open group, overwrite - g = open(store, mode='w', **kwargs) + g = open(store, mode="w", **kwargs) assert isinstance(g, Group) - assert 'foo' not in g + assert "foo" not in g # open group, read-only - g = open(store, mode='r', **kwargs) + g = open(store, mode="r", **kwargs) assert isinstance(g, Group) assert g.read_only @@ -113,13 +113,13 @@ def test_open_group(path_type, zarr_version): def test_save_errors(zarr_version): with pytest.raises(ValueError): # no arrays provided - save_group('data/group.zarr', zarr_version=zarr_version) + save_group("data/group.zarr", zarr_version=zarr_version) with pytest.raises(TypeError): # no array provided - save_array('data/group.zarr', zarr_version=zarr_version) + save_array("data/group.zarr", zarr_version=zarr_version) with pytest.raises(ValueError): # no arrays provided - save('data/group.zarr', zarr_version=zarr_version) + save("data/group.zarr", zarr_version=zarr_version) @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") @@ -128,12 +128,12 @@ def test_zarr_v3_save_multiple_unnamed(): y = np.zeros(8) store = KVStoreV3(dict()) # no path provided - save_group(store, x, y, path='dataset', zarr_version=3) + save_group(store, x, y, path="dataset", zarr_version=3) # names become arr_{i} for unnamed *args - assert data_root + 'dataset/arr_0/c0' in store - assert data_root + 'dataset/arr_1/c0' in store - assert meta_root + 'dataset/arr_0.array.json' in store - assert meta_root + 'dataset/arr_1.array.json' in store + assert data_root + "dataset/arr_0/c0" in store + assert data_root + "dataset/arr_1/c0" in store + assert meta_root + "dataset/arr_0.array.json" in store + assert meta_root + "dataset/arr_1.array.json" in store @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") @@ -141,47 +141,47 @@ def test_zarr_v3_save_errors(): x = np.ones(8) with pytest.raises(ValueError): # no path provided - save_group('data/group.zr3', x, zarr_version=3) + save_group("data/group.zr3", x, zarr_version=3) with pytest.raises(ValueError): # no path provided - save_array('data/group.zr3', x, zarr_version=3) + save_array("data/group.zr3", x, zarr_version=3) with pytest.raises(ValueError): # no path provided - save('data/group.zr3', x, zarr_version=3) + save("data/group.zr3", x, zarr_version=3) @pytest.mark.parametrize("zarr_version", _VERSIONS) def test_lazy_loader(zarr_version): foo = np.arange(100) bar = np.arange(100, 0, -1) - store = 'data/group.zarr' if zarr_version == 2 else 'data/group.zr3' + store = "data/group.zarr" if zarr_version == 2 else "data/group.zr3" kwargs = _init_creation_kwargs(zarr_version) save(store, foo=foo, bar=bar, **kwargs) loader = load(store, **kwargs) - assert 'foo' in loader - assert 'bar' in loader - assert 'baz' not in loader + assert "foo" in loader + assert "bar" in loader + assert "baz" not in loader assert len(loader) == 2 - assert sorted(loader) == ['bar', 'foo'] - assert_array_equal(foo, loader['foo']) - assert_array_equal(bar, loader['bar']) - assert 'LazyLoader: ' in repr(loader) + assert sorted(loader) == ["bar", "foo"] + assert_array_equal(foo, loader["foo"]) + assert_array_equal(bar, loader["bar"]) + assert "LazyLoader: " in repr(loader) @pytest.mark.parametrize("zarr_version", _VERSIONS) def test_load_array(zarr_version): foo = np.arange(100) bar = np.arange(100, 0, -1) - store = 'data/group.zarr' if zarr_version == 2 else 'data/group.zr3' + store = "data/group.zarr" if zarr_version == 2 else "data/group.zr3" kwargs = _init_creation_kwargs(zarr_version) save(store, foo=foo, bar=bar, **kwargs) # can also load arrays directly into a numpy array - for array_name in ['foo', 'bar']: - array_path = 'dataset/' + array_name if zarr_version == 3 else array_name + for array_name in ["foo", "bar"]: + array_path = "dataset/" + array_name if zarr_version == 3 else array_name array = load(store, path=array_path, zarr_version=zarr_version) assert isinstance(array, np.ndarray) - if array_name == 'foo': + if array_name == "foo": assert_array_equal(foo, array) else: assert_array_equal(bar, array) @@ -191,27 +191,25 @@ def test_load_array(zarr_version): def test_tree(zarr_version): kwargs = _init_creation_kwargs(zarr_version) g1 = zarr.group(**kwargs) - g1.create_group('foo') - g3 = g1.create_group('bar') - g3.create_group('baz') - g5 = g3.create_group('qux') - g5.create_dataset('baz', shape=100, chunks=10) + g1.create_group("foo") + g3 = g1.create_group("bar") + g3.create_group("baz") + g5 = g3.create_group("qux") + g5.create_dataset("baz", shape=100, chunks=10) assert repr(zarr.tree(g1)) == repr(g1.tree()) assert str(zarr.tree(g1)) == str(g1.tree()) -@pytest.mark.parametrize('zarr_version', _VERSIONS) -@pytest.mark.parametrize('stores_from_path', [False, True]) +@pytest.mark.parametrize("zarr_version", _VERSIONS) +@pytest.mark.parametrize("stores_from_path", [False, True]) @pytest.mark.parametrize( - 'with_chunk_store,listable', + "with_chunk_store,listable", [(False, True), (True, True), (False, False)], - ids=['default-listable', 'with_chunk_store-listable', 'default-unlistable'] + ids=["default-listable", "with_chunk_store-listable", "default-unlistable"], ) -def test_consolidate_metadata(with_chunk_store, - zarr_version, - listable, - monkeypatch, - stores_from_path): +def test_consolidate_metadata( + with_chunk_store, zarr_version, listable, monkeypatch, stores_from_path +): # setup initial data if stores_from_path: @@ -222,7 +220,7 @@ def test_consolidate_metadata(with_chunk_store, atexit.register(atexit_rmtree, chunk_store) else: chunk_store = None - version_kwarg = {'zarr_version': zarr_version} + version_kwarg = {"zarr_version": zarr_version} else: if zarr_version == 2: store = MemoryStore() @@ -231,19 +229,19 @@ def test_consolidate_metadata(with_chunk_store, store = MemoryStoreV3() chunk_store = MemoryStoreV3() if with_chunk_store else None version_kwarg = {} - path = 'dataset' if zarr_version == 3 else None + path = "dataset" if zarr_version == 3 else None z = group(store, chunk_store=chunk_store, path=path, **version_kwarg) # Reload the actual store implementation in case str store_to_copy = z.store - z.create_group('g1') - g2 = z.create_group('g2') - g2.attrs['hello'] = 'world' - arr = g2.create_dataset('arr', shape=(20, 20), chunks=(5, 5), dtype='f8') + z.create_group("g1") + g2 = z.create_group("g2") + g2.attrs["hello"] = "world" + arr = g2.create_dataset("arr", shape=(20, 20), chunks=(5, 5), dtype="f8") assert 16 == arr.nchunks assert 0 == arr.nchunks_initialized - arr.attrs['data'] = 1 + arr.attrs["data"] = 1 arr[:] = 1.0 assert 16 == arr.nchunks_initialized @@ -259,31 +257,35 @@ def test_consolidate_metadata(with_chunk_store, consolidate_metadata(store_class, path=None) with pytest.raises(ValueError): - consolidate_metadata(store_class, path='') + consolidate_metadata(store_class, path="") # perform consolidation out = consolidate_metadata(store_class, path=path) assert isinstance(out, Group) - assert ['g1', 'g2'] == list(out) + assert ["g1", "g2"] == list(out) if not stores_from_path: if zarr_version == 2: assert isinstance(out._store, ConsolidatedMetadataStore) - assert '.zmetadata' in store - meta_keys = ['.zgroup', - 'g1/.zgroup', - 'g2/.zgroup', - 'g2/.zattrs', - 'g2/arr/.zarray', - 'g2/arr/.zattrs'] + assert ".zmetadata" in store + meta_keys = [ + ".zgroup", + "g1/.zgroup", + "g2/.zgroup", + "g2/.zattrs", + "g2/arr/.zarray", + "g2/arr/.zattrs", + ] else: assert isinstance(out._store, ConsolidatedMetadataStoreV3) - assert 'meta/root/consolidated/.zmetadata' in store - meta_keys = ['zarr.json', - meta_root + 'dataset.group.json', - meta_root + 'dataset/g1.group.json', - meta_root + 'dataset/g2.group.json', - meta_root + 'dataset/g2/arr.array.json', - 'meta/root/consolidated.group.json'] + assert "meta/root/consolidated/.zmetadata" in store + meta_keys = [ + "zarr.json", + meta_root + "dataset.group.json", + meta_root + "dataset/g1.group.json", + meta_root + "dataset/g2.group.json", + meta_root + "dataset/g2/arr.array.json", + "meta/root/consolidated.group.json", + ] for key in meta_keys: del store[key] @@ -307,9 +309,9 @@ def test_consolidate_metadata(with_chunk_store, # open consolidated z2 = open_consolidated(store_to_open, chunk_store=chunk_store, path=path, **version_kwarg) - assert ['g1', 'g2'] == list(z2) - assert 'world' == z2.g2.attrs['hello'] - assert 1 == z2.g2.arr.attrs['data'] + assert ["g1", "g2"] == list(z2) + assert "world" == z2.g2.attrs["hello"] + assert 1 == z2.g2.arr.attrs["data"] assert (z2.g2.arr[:] == 1.0).all() assert 16 == z2.g2.arr.nchunks if listable: @@ -332,32 +334,32 @@ def test_consolidate_metadata(with_chunk_store, if zarr_version == 2: cmd = ConsolidatedMetadataStore(store) with pytest.raises(PermissionError): - del cmd['.zgroup'] + del cmd[".zgroup"] with pytest.raises(PermissionError): - cmd['.zgroup'] = None + cmd[".zgroup"] = None else: cmd = ConsolidatedMetadataStoreV3(store) with pytest.raises(PermissionError): - del cmd[meta_root + 'dataset.group.json'] + del cmd[meta_root + "dataset.group.json"] with pytest.raises(PermissionError): - cmd[meta_root + 'dataset.group.json'] = None + cmd[meta_root + "dataset.group.json"] = None # test getsize on the store assert isinstance(getsize(cmd), Integral) # test new metadata are not writeable with pytest.raises(PermissionError): - z2.create_group('g3') + z2.create_group("g3") with pytest.raises(PermissionError): - z2.create_dataset('spam', shape=42, chunks=7, dtype='i4') + z2.create_dataset("spam", shape=42, chunks=7, dtype="i4") with pytest.raises(PermissionError): - del z2['g2'] + del z2["g2"] # test consolidated metadata are not writeable with pytest.raises(PermissionError): - z2.g2.attrs['hello'] = 'universe' + z2.g2.attrs["hello"] = "universe" with pytest.raises(PermissionError): - z2.g2.arr.attrs['foo'] = 'bar' + z2.g2.arr.attrs["foo"] = "bar" # test the data are writeable z2.g2.arr[:] = 2 @@ -365,24 +367,31 @@ def test_consolidate_metadata(with_chunk_store, # test invalid modes with pytest.raises(ValueError): - open_consolidated(store, chunk_store=chunk_store, mode='a', path=path) + open_consolidated(store, chunk_store=chunk_store, mode="a", path=path) with pytest.raises(ValueError): - open_consolidated(store, chunk_store=chunk_store, mode='w', path=path) + open_consolidated(store, chunk_store=chunk_store, mode="w", path=path) with pytest.raises(ValueError): - open_consolidated(store, chunk_store=chunk_store, mode='w-', path=path) + open_consolidated(store, chunk_store=chunk_store, mode="w-", path=path) # make sure keyword arguments are passed through without error open_consolidated( - store, chunk_store=chunk_store, path=path, cache_attrs=True, synchronizer=None, + store, + chunk_store=chunk_store, + path=path, + cache_attrs=True, + synchronizer=None, **version_kwarg, ) -@pytest.mark.parametrize("options", ( - {"dimension_separator": "/"}, - {"dimension_separator": "."}, - {"dimension_separator": None}, -)) +@pytest.mark.parametrize( + "options", + ( + {"dimension_separator": "/"}, + {"dimension_separator": "."}, + {"dimension_separator": None}, + ), +) def test_save_array_separator(tmpdir, options): data = np.arange(6).reshape((3, 2)) url = tmpdir.join("test.zarr") @@ -395,9 +404,9 @@ class TestCopyStore(unittest.TestCase): def setUp(self): source = dict() - source['foo'] = b'xxx' - source['bar/baz'] = b'yyy' - source['bar/qux'] = b'zzz' + source["foo"] = b"xxx" + source["bar/baz"] = b"yyy" + source["bar/qux"] = b"zzz" self.source = source def _get_dest_store(self): @@ -414,13 +423,13 @@ def test_no_paths(self): def test_source_path(self): source = self.source # paths should be normalized - for source_path in 'bar', 'bar/', '/bar', '/bar/': + for source_path in "bar", "bar/", "/bar", "/bar/": dest = self._get_dest_store() copy_store(source, dest, source_path=source_path) assert 2 == len(dest) for key in source: - if key.startswith('bar/'): - dest_key = key.split('bar/')[1] + if key.startswith("bar/"): + dest_key = key.split("bar/")[1] assert source[key] == dest[dest_key] else: assert key not in dest @@ -428,64 +437,63 @@ def test_source_path(self): def test_dest_path(self): source = self.source # paths should be normalized - for dest_path in 'new', 'new/', '/new', '/new/': + for dest_path in "new", "new/", "/new", "/new/": dest = self._get_dest_store() copy_store(source, dest, dest_path=dest_path) assert len(source) == len(dest) for key in source: if self._version == 3: - dest_key = key[:10] + 'new/' + key[10:] + dest_key = key[:10] + "new/" + key[10:] else: - dest_key = 'new/' + key + dest_key = "new/" + key assert source[key] == dest[dest_key] def test_source_dest_path(self): source = self.source # paths should be normalized - for source_path in 'bar', 'bar/', '/bar', '/bar/': - for dest_path in 'new', 'new/', '/new', '/new/': + for source_path in "bar", "bar/", "/bar", "/bar/": + for dest_path in "new", "new/", "/new", "/new/": dest = self._get_dest_store() - copy_store(source, dest, source_path=source_path, - dest_path=dest_path) + copy_store(source, dest, source_path=source_path, dest_path=dest_path) assert 2 == len(dest) for key in source: - if key.startswith('bar/'): - dest_key = 'new/' + key.split('bar/')[1] + if key.startswith("bar/"): + dest_key = "new/" + key.split("bar/")[1] assert source[key] == dest[dest_key] else: assert key not in dest - assert ('new/' + key) not in dest + assert ("new/" + key) not in dest def test_excludes_includes(self): source = self.source # single excludes dest = self._get_dest_store() - excludes = 'f.*' + excludes = "f.*" copy_store(source, dest, excludes=excludes) assert len(dest) == 2 - root = '' if self._version == 2 else meta_root - assert root + 'foo' not in dest + root = "" if self._version == 2 else meta_root + assert root + "foo" not in dest # multiple excludes dest = self._get_dest_store() - excludes = 'b.z', '.*x' + excludes = "b.z", ".*x" copy_store(source, dest, excludes=excludes) assert len(dest) == 1 - assert root + 'foo' in dest - assert root + 'bar/baz' not in dest - assert root + 'bar/qux' not in dest + assert root + "foo" in dest + assert root + "bar/baz" not in dest + assert root + "bar/qux" not in dest # excludes and includes dest = self._get_dest_store() - excludes = 'b.*' - includes = '.*x' + excludes = "b.*" + includes = ".*x" copy_store(source, dest, excludes=excludes, includes=includes) assert len(dest) == 2 - assert root + 'foo' in dest - assert root + 'bar/baz' not in dest - assert root + 'bar/qux' in dest + assert root + "foo" in dest + assert root + "bar/baz" not in dest + assert root + "bar/qux" in dest def test_dry_run(self): source = self.source @@ -496,8 +504,8 @@ def test_dry_run(self): def test_if_exists(self): source = self.source dest = self._get_dest_store() - root = '' if self._version == 2 else meta_root - dest[root + 'bar/baz'] = b'mmm' + root = "" if self._version == 2 else meta_root + dest[root + "bar/baz"] = b"mmm" # default ('raise') with pytest.raises(CopyError): @@ -505,25 +513,25 @@ def test_if_exists(self): # explicit 'raise' with pytest.raises(CopyError): - copy_store(source, dest, if_exists='raise') + copy_store(source, dest, if_exists="raise") # skip - copy_store(source, dest, if_exists='skip') + copy_store(source, dest, if_exists="skip") assert 3 == len(dest) - assert dest[root + 'foo'] == b'xxx' - assert dest[root + 'bar/baz'] == b'mmm' - assert dest[root + 'bar/qux'] == b'zzz' + assert dest[root + "foo"] == b"xxx" + assert dest[root + "bar/baz"] == b"mmm" + assert dest[root + "bar/qux"] == b"zzz" # replace - copy_store(source, dest, if_exists='replace') + copy_store(source, dest, if_exists="replace") assert 3 == len(dest) - assert dest[root + 'foo'] == b'xxx' - assert dest[root + 'bar/baz'] == b'yyy' - assert dest[root + 'bar/qux'] == b'zzz' + assert dest[root + "foo"] == b"xxx" + assert dest[root + "bar/baz"] == b"yyy" + assert dest[root + "bar/qux"] == b"zzz" # invalid option with pytest.raises(ValueError): - copy_store(source, dest, if_exists='foobar') + copy_store(source, dest, if_exists="foobar") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") @@ -533,9 +541,9 @@ class TestCopyStoreV3(TestCopyStore): def setUp(self): source = KVStoreV3(dict()) - source['meta/root/foo'] = b'xxx' - source['meta/root/bar/baz'] = b'yyy' - source['meta/root/bar/qux'] = b'zzz' + source["meta/root/foo"] = b"xxx" + source["meta/root/bar/baz"] = b"yyy" + source["meta/root/bar/qux"] = b"zzz" self.source = source def _get_dest_store(self): @@ -548,12 +556,11 @@ def test_mismatched_store_versions(self): copy_store(self.source, dest) -def check_copied_array(original, copied, without_attrs=False, - expect_props=None): +def check_copied_array(original, copied, without_attrs=False, expect_props=None): # setup - source_h5py = original.__module__.startswith('h5py.') - dest_h5py = copied.__module__.startswith('h5py.') + source_h5py = original.__module__.startswith("h5py.") + dest_h5py = copied.__module__.startswith("h5py.") zarr_to_zarr = not (source_h5py or dest_h5py) h5py_to_h5py = source_h5py and dest_h5py zarr_to_h5py = not source_h5py and dest_h5py @@ -564,25 +571,32 @@ def check_copied_array(original, copied, without_attrs=False, expect_props = expect_props.copy() # common properties in zarr and h5py - for p in 'dtype', 'shape', 'chunks': + for p in "dtype", "shape", "chunks": expect_props.setdefault(p, getattr(original, p)) # zarr-specific properties if zarr_to_zarr: - for p in 'compressor', 'filters', 'order', 'fill_value': + for p in "compressor", "filters", "order", "fill_value": expect_props.setdefault(p, getattr(original, p)) # h5py-specific properties if h5py_to_h5py: - for p in ('maxshape', 'compression', 'compression_opts', 'shuffle', - 'scaleoffset', 'fletcher32', 'fillvalue'): + for p in ( + "maxshape", + "compression", + "compression_opts", + "shuffle", + "scaleoffset", + "fletcher32", + "fillvalue", + ): expect_props.setdefault(p, getattr(original, p)) # common properties with some name differences if h5py_to_zarr: - expect_props.setdefault('fill_value', original.fillvalue) + expect_props.setdefault("fill_value", original.fillvalue) if zarr_to_h5py: - expect_props.setdefault('fillvalue', original.fill_value) + expect_props.setdefault("fillvalue", original.fill_value) # compare properties for k, v in expect_props.items(): @@ -596,18 +610,17 @@ def check_copied_array(original, copied, without_attrs=False, for k in original.attrs.keys(): assert k not in copied.attrs else: - if dest_h5py and 'filters' in original.attrs: + if dest_h5py and "filters" in original.attrs: # special case in v3 (storing filters metadata under attributes) # we explicitly do not copy this info over to HDF5 original_attrs = original.attrs.asdict().copy() - original_attrs.pop('filters') + original_attrs.pop("filters") else: original_attrs = original.attrs assert sorted(original_attrs.items()) == sorted(copied.attrs.items()) -def check_copied_group(original, copied, without_attrs=False, expect_props=None, - shallow=False): +def check_copied_group(original, copied, without_attrs=False, expect_props=None, shallow=False): # setup if expect_props is None: @@ -617,16 +630,20 @@ def check_copied_group(original, copied, without_attrs=False, expect_props=None, # compare children for k, v in original.items(): - if hasattr(v, 'shape'): + if hasattr(v, "shape"): assert k in copied - check_copied_array(v, copied[k], without_attrs=without_attrs, - expect_props=expect_props) + check_copied_array(v, copied[k], without_attrs=without_attrs, expect_props=expect_props) elif shallow: assert k not in copied else: assert k in copied - check_copied_group(v, copied[k], without_attrs=without_attrs, - shallow=shallow, expect_props=expect_props) + check_copied_group( + v, + copied[k], + without_attrs=without_attrs, + shallow=shallow, + expect_props=expect_props, + ) # compare attrs if without_attrs: @@ -657,7 +674,7 @@ def test_copy_all(): dry_run=False, ) - assert 'subgroup' in destination_group + assert "subgroup" in destination_group assert destination_group.attrs["info"] == "group attrs" assert destination_group.subgroup.attrs["info"] == "sub attrs" @@ -670,10 +687,10 @@ def test_copy_all_v3(): copy_all used to not copy attributes as `.keys()` """ - original_group = zarr.group(store=MemoryStoreV3(), path='group1', overwrite=True) + original_group = zarr.group(store=MemoryStoreV3(), path="group1", overwrite=True) original_group.create_group("subgroup") - destination_group = zarr.group(store=MemoryStoreV3(), path='group2', overwrite=True) + destination_group = zarr.group(store=MemoryStoreV3(), path="group2", overwrite=True) # copy from memory to directory store copy_all( @@ -681,200 +698,212 @@ def test_copy_all_v3(): destination_group, dry_run=False, ) - assert 'subgroup' in destination_group + assert "subgroup" in destination_group class TestCopy: - @pytest.fixture(params=[False, True], ids=['zarr', 'hdf5']) + @pytest.fixture(params=[False, True], ids=["zarr", "hdf5"]) def source(self, request, tmpdir): def prep_source(source): - foo = source.create_group('foo') - foo.attrs['experiment'] = 'weird science' - baz = foo.create_dataset('bar/baz', data=np.arange(100), chunks=(50,)) - baz.attrs['units'] = 'metres' + foo = source.create_group("foo") + foo.attrs["experiment"] = "weird science" + baz = foo.create_dataset("bar/baz", data=np.arange(100), chunks=(50,)) + baz.attrs["units"] = "metres" if request.param: - extra_kws = dict(compression='gzip', compression_opts=3, fillvalue=84, - shuffle=True, fletcher32=True) + extra_kws = dict( + compression="gzip", + compression_opts=3, + fillvalue=84, + shuffle=True, + fletcher32=True, + ) else: - extra_kws = dict(compressor=Zlib(3), order='F', fill_value=42, filters=[Adler32()]) - source.create_dataset('spam', data=np.arange(100, 200).reshape(20, 5), - chunks=(10, 2), dtype='i2', **extra_kws) + extra_kws = dict(compressor=Zlib(3), order="F", fill_value=42, filters=[Adler32()]) + source.create_dataset( + "spam", + data=np.arange(100, 200).reshape(20, 5), + chunks=(10, 2), + dtype="i2", + **extra_kws, + ) return source if request.param: - h5py = pytest.importorskip('h5py') - fn = tmpdir.join('source.h5') - with h5py.File(str(fn), mode='w') as h5f: + h5py = pytest.importorskip("h5py") + fn = tmpdir.join("source.h5") + with h5py.File(str(fn), mode="w") as h5f: yield prep_source(h5f) else: yield prep_source(group()) - @pytest.fixture(params=[False, True], ids=['zarr', 'hdf5']) + @pytest.fixture(params=[False, True], ids=["zarr", "hdf5"]) def dest(self, request, tmpdir): if request.param: - h5py = pytest.importorskip('h5py') - fn = tmpdir.join('dest.h5') - with h5py.File(str(fn), mode='w') as h5f: + h5py = pytest.importorskip("h5py") + fn = tmpdir.join("dest.h5") + with h5py.File(str(fn), mode="w") as h5f: yield h5f else: yield group() def test_copy_array(self, source, dest): # copy array with default options - copy(source['foo/bar/baz'], dest) - check_copied_array(source['foo/bar/baz'], dest['baz']) - copy(source['spam'], dest) - check_copied_array(source['spam'], dest['spam']) + copy(source["foo/bar/baz"], dest) + check_copied_array(source["foo/bar/baz"], dest["baz"]) + copy(source["spam"], dest) + check_copied_array(source["spam"], dest["spam"]) def test_copy_bad_dest(self, source, dest): # try to copy to an array, dest must be a group - dest = dest.create_dataset('eggs', shape=(100,)) + dest = dest.create_dataset("eggs", shape=(100,)) with pytest.raises(ValueError): - copy(source['foo/bar/baz'], dest) + copy(source["foo/bar/baz"], dest) def test_copy_array_name(self, source, dest): # copy array with name - copy(source['foo/bar/baz'], dest, name='qux') - assert 'baz' not in dest - check_copied_array(source['foo/bar/baz'], dest['qux']) + copy(source["foo/bar/baz"], dest, name="qux") + assert "baz" not in dest + check_copied_array(source["foo/bar/baz"], dest["qux"]) def test_copy_array_create_options(self, source, dest): - dest_h5py = dest.__module__.startswith('h5py.') + dest_h5py = dest.__module__.startswith("h5py.") # copy array, provide creation options compressor = Zlib(9) create_kws = dict(chunks=(10,)) if dest_h5py: - create_kws.update(compression='gzip', compression_opts=9, - shuffle=True, fletcher32=True, fillvalue=42) + create_kws.update( + compression="gzip", compression_opts=9, shuffle=True, fletcher32=True, fillvalue=42 + ) else: - create_kws.update(compressor=compressor, fill_value=42, order='F', - filters=[Adler32()]) - copy(source['foo/bar/baz'], dest, without_attrs=True, **create_kws) - check_copied_array(source['foo/bar/baz'], dest['baz'], - without_attrs=True, expect_props=create_kws) + create_kws.update(compressor=compressor, fill_value=42, order="F", filters=[Adler32()]) + copy(source["foo/bar/baz"], dest, without_attrs=True, **create_kws) + check_copied_array( + source["foo/bar/baz"], dest["baz"], without_attrs=True, expect_props=create_kws + ) def test_copy_array_exists_array(self, source, dest): # copy array, dest array in the way - dest.create_dataset('baz', shape=(10,)) + dest.create_dataset("baz", shape=(10,)) # raise with pytest.raises(CopyError): # should raise by default - copy(source['foo/bar/baz'], dest) - assert (10,) == dest['baz'].shape + copy(source["foo/bar/baz"], dest) + assert (10,) == dest["baz"].shape with pytest.raises(CopyError): - copy(source['foo/bar/baz'], dest, if_exists='raise') - assert (10,) == dest['baz'].shape + copy(source["foo/bar/baz"], dest, if_exists="raise") + assert (10,) == dest["baz"].shape # skip - copy(source['foo/bar/baz'], dest, if_exists='skip') - assert (10,) == dest['baz'].shape + copy(source["foo/bar/baz"], dest, if_exists="skip") + assert (10,) == dest["baz"].shape # replace - copy(source['foo/bar/baz'], dest, if_exists='replace') - check_copied_array(source['foo/bar/baz'], dest['baz']) + copy(source["foo/bar/baz"], dest, if_exists="replace") + check_copied_array(source["foo/bar/baz"], dest["baz"]) # invalid option with pytest.raises(ValueError): - copy(source['foo/bar/baz'], dest, if_exists='foobar') + copy(source["foo/bar/baz"], dest, if_exists="foobar") def test_copy_array_exists_group(self, source, dest): # copy array, dest group in the way - dest.create_group('baz') + dest.create_group("baz") # raise with pytest.raises(CopyError): - copy(source['foo/bar/baz'], dest) - assert not hasattr(dest['baz'], 'shape') + copy(source["foo/bar/baz"], dest) + assert not hasattr(dest["baz"], "shape") with pytest.raises(CopyError): - copy(source['foo/bar/baz'], dest, if_exists='raise') - assert not hasattr(dest['baz'], 'shape') + copy(source["foo/bar/baz"], dest, if_exists="raise") + assert not hasattr(dest["baz"], "shape") # skip - copy(source['foo/bar/baz'], dest, if_exists='skip') - assert not hasattr(dest['baz'], 'shape') + copy(source["foo/bar/baz"], dest, if_exists="skip") + assert not hasattr(dest["baz"], "shape") # replace - copy(source['foo/bar/baz'], dest, if_exists='replace') - check_copied_array(source['foo/bar/baz'], dest['baz']) + copy(source["foo/bar/baz"], dest, if_exists="replace") + check_copied_array(source["foo/bar/baz"], dest["baz"]) def test_copy_array_skip_initialized(self, source, dest): - dest_h5py = dest.__module__.startswith('h5py.') + dest_h5py = dest.__module__.startswith("h5py.") - dest.create_dataset('baz', shape=(100,), chunks=(10,), dtype='i8') - assert not np.all(source['foo/bar/baz'][:] == dest['baz'][:]) + dest.create_dataset("baz", shape=(100,), chunks=(10,), dtype="i8") + assert not np.all(source["foo/bar/baz"][:] == dest["baz"][:]) if dest_h5py: with pytest.raises(ValueError): # not available with copy to h5py - copy(source['foo/bar/baz'], dest, if_exists='skip_initialized') + copy(source["foo/bar/baz"], dest, if_exists="skip_initialized") else: # copy array, dest array exists but not yet initialized - copy(source['foo/bar/baz'], dest, if_exists='skip_initialized') - check_copied_array(source['foo/bar/baz'], dest['baz']) + copy(source["foo/bar/baz"], dest, if_exists="skip_initialized") + check_copied_array(source["foo/bar/baz"], dest["baz"]) # copy array, dest array exists and initialized, will be skipped - dest['baz'][:] = np.arange(100, 200) - copy(source['foo/bar/baz'], dest, if_exists='skip_initialized') - assert_array_equal(np.arange(100, 200), dest['baz'][:]) - assert not np.all(source['foo/bar/baz'][:] == dest['baz'][:]) + dest["baz"][:] = np.arange(100, 200) + copy(source["foo/bar/baz"], dest, if_exists="skip_initialized") + assert_array_equal(np.arange(100, 200), dest["baz"][:]) + assert not np.all(source["foo/bar/baz"][:] == dest["baz"][:]) def test_copy_group(self, source, dest): # copy group, default options - copy(source['foo'], dest) - check_copied_group(source['foo'], dest['foo']) + copy(source["foo"], dest) + check_copied_group(source["foo"], dest["foo"]) def test_copy_group_no_name(self, source, dest): with pytest.raises(TypeError): # need a name if copy root copy(source, dest) - copy(source, dest, name='root') - check_copied_group(source, dest['root']) + copy(source, dest, name="root") + check_copied_group(source, dest["root"]) def test_copy_group_options(self, source, dest): # copy group, non-default options - copy(source['foo'], dest, name='qux', without_attrs=True) - assert 'foo' not in dest - check_copied_group(source['foo'], dest['qux'], without_attrs=True) + copy(source["foo"], dest, name="qux", without_attrs=True) + assert "foo" not in dest + check_copied_group(source["foo"], dest["qux"], without_attrs=True) def test_copy_group_shallow(self, source, dest): # copy group, shallow - copy(source, dest, name='eggs', shallow=True) - check_copied_group(source, dest['eggs'], shallow=True) + copy(source, dest, name="eggs", shallow=True) + check_copied_group(source, dest["eggs"], shallow=True) def test_copy_group_exists_group(self, source, dest): # copy group, dest groups exist - dest.create_group('foo/bar') - copy(source['foo'], dest) - check_copied_group(source['foo'], dest['foo']) + dest.create_group("foo/bar") + copy(source["foo"], dest) + check_copied_group(source["foo"], dest["foo"]) def test_copy_group_exists_array(self, source, dest): # copy group, dest array in the way - dest.create_dataset('foo/bar', shape=(10,)) + dest.create_dataset("foo/bar", shape=(10,)) # raise with pytest.raises(CopyError): - copy(source['foo'], dest) - assert dest['foo/bar'].shape == (10,) + copy(source["foo"], dest) + assert dest["foo/bar"].shape == (10,) with pytest.raises(CopyError): - copy(source['foo'], dest, if_exists='raise') - assert dest['foo/bar'].shape == (10,) + copy(source["foo"], dest, if_exists="raise") + assert dest["foo/bar"].shape == (10,) # skip - copy(source['foo'], dest, if_exists='skip') - assert dest['foo/bar'].shape == (10,) + copy(source["foo"], dest, if_exists="skip") + assert dest["foo/bar"].shape == (10,) # replace - copy(source['foo'], dest, if_exists='replace') - check_copied_group(source['foo'], dest['foo']) + copy(source["foo"], dest, if_exists="replace") + check_copied_group(source["foo"], dest["foo"]) def test_copy_group_dry_run(self, source, dest): # dry run, empty destination - n_copied, n_skipped, n_bytes_copied = \ - copy(source['foo'], dest, dry_run=True, return_stats=True) + n_copied, n_skipped, n_bytes_copied = copy( + source["foo"], dest, dry_run=True, return_stats=True + ) assert 0 == len(dest) assert 3 == n_copied assert 0 == n_skipped @@ -882,133 +911,144 @@ def test_copy_group_dry_run(self, source, dest): # dry run, array exists in destination baz = np.arange(100, 200) - dest.create_dataset('foo/bar/baz', data=baz) - assert not np.all(source['foo/bar/baz'][:] == dest['foo/bar/baz'][:]) + dest.create_dataset("foo/bar/baz", data=baz) + assert not np.all(source["foo/bar/baz"][:] == dest["foo/bar/baz"][:]) assert 1 == len(dest) # raise with pytest.raises(CopyError): - copy(source['foo'], dest, dry_run=True) + copy(source["foo"], dest, dry_run=True) assert 1 == len(dest) # skip - n_copied, n_skipped, n_bytes_copied = \ - copy(source['foo'], dest, dry_run=True, if_exists='skip', - return_stats=True) + n_copied, n_skipped, n_bytes_copied = copy( + source["foo"], dest, dry_run=True, if_exists="skip", return_stats=True + ) assert 1 == len(dest) assert 2 == n_copied assert 1 == n_skipped assert 0 == n_bytes_copied - assert_array_equal(baz, dest['foo/bar/baz']) + assert_array_equal(baz, dest["foo/bar/baz"]) # replace - n_copied, n_skipped, n_bytes_copied = \ - copy(source['foo'], dest, dry_run=True, if_exists='replace', - return_stats=True) + n_copied, n_skipped, n_bytes_copied = copy( + source["foo"], dest, dry_run=True, if_exists="replace", return_stats=True + ) assert 1 == len(dest) assert 3 == n_copied assert 0 == n_skipped assert 0 == n_bytes_copied - assert_array_equal(baz, dest['foo/bar/baz']) + assert_array_equal(baz, dest["foo/bar/baz"]) def test_logging(self, source, dest, tmpdir): # callable log - copy(source['foo'], dest, dry_run=True, log=print) + copy(source["foo"], dest, dry_run=True, log=print) # file name - fn = str(tmpdir.join('log_name')) - copy(source['foo'], dest, dry_run=True, log=fn) + fn = str(tmpdir.join("log_name")) + copy(source["foo"], dest, dry_run=True, log=fn) # file - with tmpdir.join('log_file').open(mode='w') as f: - copy(source['foo'], dest, dry_run=True, log=f) + with tmpdir.join("log_file").open(mode="w") as f: + copy(source["foo"], dest, dry_run=True, log=f) # bad option with pytest.raises(TypeError): - copy(source['foo'], dest, dry_run=True, log=True) + copy(source["foo"], dest, dry_run=True, log=True) @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestCopyV3(TestCopy): - - @pytest.fixture(params=['zarr', 'hdf5']) + @pytest.fixture(params=["zarr", "hdf5"]) def source(self, request, tmpdir): def prep_source(source): - foo = source.create_group('foo') - foo.attrs['experiment'] = 'weird science' - baz = foo.create_dataset('bar/baz', data=np.arange(100), chunks=(50,)) - baz.attrs['units'] = 'metres' - if request.param == 'hdf5': - extra_kws = dict(compression='gzip', compression_opts=3, fillvalue=84, - shuffle=True, fletcher32=True) + foo = source.create_group("foo") + foo.attrs["experiment"] = "weird science" + baz = foo.create_dataset("bar/baz", data=np.arange(100), chunks=(50,)) + baz.attrs["units"] = "metres" + if request.param == "hdf5": + extra_kws = dict( + compression="gzip", + compression_opts=3, + fillvalue=84, + shuffle=True, + fletcher32=True, + ) else: - extra_kws = dict(compressor=Zlib(3), order='F', fill_value=42, filters=[Adler32()]) - source.create_dataset('spam', data=np.arange(100, 200).reshape(20, 5), - chunks=(10, 2), dtype='i2', **extra_kws) + extra_kws = dict(compressor=Zlib(3), order="F", fill_value=42, filters=[Adler32()]) + source.create_dataset( + "spam", + data=np.arange(100, 200).reshape(20, 5), + chunks=(10, 2), + dtype="i2", + **extra_kws, + ) return source - if request.param == 'hdf5': - h5py = pytest.importorskip('h5py') - fn = tmpdir.join('source.h5') - with h5py.File(str(fn), mode='w') as h5f: + if request.param == "hdf5": + h5py = pytest.importorskip("h5py") + fn = tmpdir.join("source.h5") + with h5py.File(str(fn), mode="w") as h5f: yield prep_source(h5f) - elif request.param == 'zarr': - yield prep_source(group(path='group1', zarr_version=3)) + elif request.param == "zarr": + yield prep_source(group(path="group1", zarr_version=3)) # Test with various destination StoreV3 types as TestCopyV3 covers rmdir - destinations = ['hdf5', 'zarr', 'zarr_kvstore', 'zarr_directorystore', 'zarr_sqlitestore'] + destinations = ["hdf5", "zarr", "zarr_kvstore", "zarr_directorystore", "zarr_sqlitestore"] if have_fsspec: - destinations += ['zarr_fsstore'] + destinations += ["zarr_fsstore"] @pytest.fixture(params=destinations) def dest(self, request, tmpdir): - if request.param == 'hdf5': - h5py = pytest.importorskip('h5py') - fn = tmpdir.join('dest.h5') - with h5py.File(str(fn), mode='w') as h5f: + if request.param == "hdf5": + h5py = pytest.importorskip("h5py") + fn = tmpdir.join("dest.h5") + with h5py.File(str(fn), mode="w") as h5f: yield h5f - elif request.param == 'zarr': - yield group(path='group2', zarr_version=3) - elif request.param == 'zarr_kvstore': + elif request.param == "zarr": + yield group(path="group2", zarr_version=3) + elif request.param == "zarr_kvstore": store = KVStoreV3(dict()) - yield group(store, path='group2', zarr_version=3) - elif request.param == 'zarr_fsstore': - fn = tmpdir.join('dest.zr3') + yield group(store, path="group2", zarr_version=3) + elif request.param == "zarr_fsstore": + fn = tmpdir.join("dest.zr3") store = FSStoreV3(str(fn), auto_mkdir=True) - yield group(store, path='group2', zarr_version=3) - elif request.param == 'zarr_directorystore': - fn = tmpdir.join('dest.zr3') + yield group(store, path="group2", zarr_version=3) + elif request.param == "zarr_directorystore": + fn = tmpdir.join("dest.zr3") store = DirectoryStoreV3(str(fn)) - yield group(store, path='group2', zarr_version=3) - elif request.param == 'zarr_sqlitestore': - fn = tmpdir.join('dest.db') + yield group(store, path="group2", zarr_version=3) + elif request.param == "zarr_sqlitestore": + fn = tmpdir.join("dest.db") store = SQLiteStoreV3(str(fn)) - yield group(store, path='group2', zarr_version=3) + yield group(store, path="group2", zarr_version=3) def test_copy_array_create_options(self, source, dest): - dest_h5py = dest.__module__.startswith('h5py.') + dest_h5py = dest.__module__.startswith("h5py.") # copy array, provide creation options compressor = Zlib(9) create_kws = dict(chunks=(10,)) if dest_h5py: - create_kws.update(compression='gzip', compression_opts=9, - shuffle=True, fletcher32=True, fillvalue=42) + create_kws.update( + compression="gzip", compression_opts=9, shuffle=True, fletcher32=True, fillvalue=42 + ) else: # v3 case has no filters argument in zarr create_kws - create_kws.update(compressor=compressor, fill_value=42, order='F') - copy(source['foo/bar/baz'], dest, without_attrs=True, **create_kws) - check_copied_array(source['foo/bar/baz'], dest['baz'], - without_attrs=True, expect_props=create_kws) + create_kws.update(compressor=compressor, fill_value=42, order="F") + copy(source["foo/bar/baz"], dest, without_attrs=True, **create_kws) + check_copied_array( + source["foo/bar/baz"], dest["baz"], without_attrs=True, expect_props=create_kws + ) def test_copy_group_no_name(self, source, dest): - if source.__module__.startswith('h5py'): + if source.__module__.startswith("h5py"): with pytest.raises(TypeError): copy(source, dest) else: # For v3, dest.name will be inferred from source.name copy(source, dest) - check_copied_group(source, dest[source.name.lstrip('/')]) + check_copied_group(source, dest[source.name.lstrip("/")]) - copy(source, dest, name='root') - check_copied_group(source, dest['root']) + copy(source, dest, name="root") + check_copied_group(source, dest["root"]) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index ab1a6e8aa7..d86c3bf39b 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -10,9 +10,22 @@ import numpy as np import packaging.version import pytest -from numcodecs import (BZ2, JSON, LZ4, Blosc, Categorize, Delta, - FixedScaleOffset, GZip, MsgPack, Pickle, VLenArray, - VLenBytes, VLenUTF8, Zlib) +from numcodecs import ( + BZ2, + JSON, + LZ4, + Blosc, + Categorize, + Delta, + FixedScaleOffset, + GZip, + MsgPack, + Pickle, + VLenArray, + VLenBytes, + VLenUTF8, + Zlib, +) from numcodecs.compat import ensure_bytes, ensure_ndarray from numcodecs.tests.common import greetings from numpy.testing import assert_array_almost_equal, assert_array_equal @@ -65,7 +78,7 @@ class TestArray(unittest.TestCase): version = 2 - root = '' + root = "" KVStoreClass = KVStore def test_array_init(self): @@ -77,7 +90,7 @@ def test_array_init(self): assert isinstance(a, Array) assert (100,) == a.shape assert (10,) == a.chunks - assert '' == a.path + assert "" == a.path assert a.name is None assert a.basename is None assert store is a.store @@ -89,14 +102,14 @@ def test_array_init(self): # initialize at path store = self.KVStoreClass(dict()) - init_array(store, shape=100, chunks=10, path='foo/bar', dtype='')) + a2 = self.create_array(shape=1000, chunks=100, dtype=dtype.newbyteorder(">")) a2[:] = 1 x2 = a2[:] assert_array_equal(x1, x2) @@ -1543,46 +1558,52 @@ def test_endian(self): a2.store.close() def test_attributes(self): - a = self.create_array(shape=10, chunks=10, dtype='i8') - a.attrs['foo'] = 'bar' + a = self.create_array(shape=10, chunks=10, dtype="i8") + a.attrs["foo"] = "bar" assert a.attrs.key in a.store attrs = json_loads(a.store[a.attrs.key]) if self.version > 2: # in v3, attributes are in a sub-dictionary of the metadata - attrs = attrs['attributes'] - assert 'foo' in attrs and attrs['foo'] == 'bar' + attrs = attrs["attributes"] + assert "foo" in attrs and attrs["foo"] == "bar" - a.attrs['bar'] = 'foo' + a.attrs["bar"] = "foo" assert a.attrs.key in a.store attrs = json_loads(a.store[a.attrs.key]) if self.version > 2: # in v3, attributes are in a sub-dictionary of the metadata - attrs = attrs['attributes'] - assert 'foo' in attrs and attrs['foo'] == 'bar' - assert 'bar' in attrs and attrs['bar'] == 'foo' + attrs = attrs["attributes"] + assert "foo" in attrs and attrs["foo"] == "bar" + assert "bar" in attrs and attrs["bar"] == "foo" a.store.close() def test_structured_with_object(self): - a = self.create_array(fill_value=(0.0, None), - shape=10, - chunks=10, - dtype=[('x', float), ('y', object)], - object_codec=Pickle()) + a = self.create_array( + fill_value=(0.0, None), + shape=10, + chunks=10, + dtype=[("x", float), ("y", object)], + object_codec=Pickle(), + ) assert tuple(a[0]) == (0.0, None) class TestArrayWithPath(TestArray): - @staticmethod def create_array(read_only=False, **kwargs): store = KVStore(dict()) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, path='foo/bar', **kwargs) - return Array(store, path='foo/bar', read_only=read_only, - cache_metadata=cache_metadata, cache_attrs=cache_attrs, - write_empty_chunks=write_empty_chunks) + cache_metadata = kwargs.pop("cache_metadata", True) + cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop("write_empty_chunks", True) + init_array(store, path="foo/bar", **kwargs) + return Array( + store, + path="foo/bar", + read_only=read_only, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks, + ) def test_nchunks_initialized(self): pass @@ -1593,42 +1614,46 @@ def expected(self): "1437428e69754b1e1a38bd7fc9e43669577620db", "6c530b6b9d73e108cc5ee7b6be3d552cc994bdbe", "4c0a76fb1222498e09dcd92f7f9221d6cea8b40e", - "05b0663ffe1785f38d3a459dec17e57a18f254af" + "05b0663ffe1785f38d3a459dec17e57a18f254af", ] def test_nbytes_stored(self): # MemoryStore as store z = self.create_array(shape=1000, chunks=100) - expect_nbytes_stored = sum(buffer_size(v) - for k, v in z.store.items() - if k.startswith('foo/bar/')) + expect_nbytes_stored = sum( + buffer_size(v) for k, v in z.store.items() if k.startswith("foo/bar/") + ) assert expect_nbytes_stored == z.nbytes_stored z[:] = 42 - expect_nbytes_stored = sum(buffer_size(v) - for k, v in z.store.items() - if k.startswith('foo/bar/')) + expect_nbytes_stored = sum( + buffer_size(v) for k, v in z.store.items() if k.startswith("foo/bar/") + ) assert expect_nbytes_stored == z.nbytes_stored # mess with store - z.store[z._key_prefix + 'foo'] = list(range(10)) + z.store[z._key_prefix + "foo"] = list(range(10)) assert -1 == z.nbytes_stored class TestArrayWithChunkStore(TestArray): - @staticmethod def create_array(read_only=False, **kwargs): store = KVStore(dict()) # separate chunk store chunk_store = KVStore(dict()) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) + cache_metadata = kwargs.pop("cache_metadata", True) + cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop("write_empty_chunks", True) init_array(store, chunk_store=chunk_store, **kwargs) - return Array(store, read_only=read_only, chunk_store=chunk_store, - cache_metadata=cache_metadata, cache_attrs=cache_attrs, - write_empty_chunks=write_empty_chunks) + return Array( + store, + read_only=read_only, + chunk_store=chunk_store, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks, + ) def expected(self): return [ @@ -1636,41 +1661,43 @@ def expected(self): "1437428e69754b1e1a38bd7fc9e43669577620db", "6c530b6b9d73e108cc5ee7b6be3d552cc994bdbe", "4c0a76fb1222498e09dcd92f7f9221d6cea8b40e", - "05b0663ffe1785f38d3a459dec17e57a18f254af" + "05b0663ffe1785f38d3a459dec17e57a18f254af", ] def test_nbytes_stored(self): z = self.create_array(shape=1000, chunks=100) expect_nbytes_stored = sum(buffer_size(v) for v in z.store.values()) - expect_nbytes_stored += sum(buffer_size(v) - for v in z.chunk_store.values()) + expect_nbytes_stored += sum(buffer_size(v) for v in z.chunk_store.values()) assert expect_nbytes_stored == z.nbytes_stored z[:] = 42 expect_nbytes_stored = sum(buffer_size(v) for v in z.store.values()) - expect_nbytes_stored += sum(buffer_size(v) - for v in z.chunk_store.values()) + expect_nbytes_stored += sum(buffer_size(v) for v in z.chunk_store.values()) assert expect_nbytes_stored == z.nbytes_stored # mess with store - z.chunk_store[z._key_prefix + 'foo'] = list(range(10)) + z.chunk_store[z._key_prefix + "foo"] = list(range(10)) assert -1 == z.nbytes_stored class TestArrayWithDirectoryStore(TestArray): - @staticmethod def create_array(read_only=False, **kwargs): path = mkdtemp() atexit.register(shutil.rmtree, path) store = DirectoryStore(path) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) + cache_metadata = kwargs.pop("cache_metadata", True) + cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop("write_empty_chunks", True) + kwargs.setdefault("compressor", Zlib(1)) init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return Array( + store, + read_only=read_only, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks, + ) def test_nbytes_stored(self): @@ -1695,7 +1722,6 @@ def test_array_init_from_dict(): @skip_test_env_var("ZARR_TEST_ABS") class TestArrayWithABSStore(TestArray): - @staticmethod def absstore(): client = abs_container() @@ -1705,13 +1731,18 @@ def absstore(): def create_array(self, read_only=False, **kwargs): store = self.absstore() - kwargs.setdefault('compressor', Zlib(1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) + kwargs.setdefault("compressor", Zlib(1)) + cache_metadata = kwargs.pop("cache_metadata", True) + cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop("write_empty_chunks", True) init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return Array( + store, + read_only=read_only, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks, + ) @pytest.mark.xfail def test_nbytes_stored(self): @@ -1724,19 +1755,23 @@ def test_pickle(self): class TestArrayWithNestedDirectoryStore(TestArrayWithDirectoryStore): - @staticmethod def create_array(read_only=False, **kwargs): path = mkdtemp() atexit.register(shutil.rmtree, path) store = NestedDirectoryStore(path) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) + cache_metadata = kwargs.pop("cache_metadata", True) + cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop("write_empty_chunks", True) + kwargs.setdefault("compressor", Zlib(1)) init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return Array( + store, + read_only=read_only, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks, + ) def expected(self): return [ @@ -1749,19 +1784,23 @@ def expected(self): class TestArrayWithN5Store(TestArrayWithDirectoryStore): - @staticmethod def create_array(read_only=False, **kwargs): path = mkdtemp() atexit.register(shutil.rmtree, path) store = N5Store(path) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) + cache_metadata = kwargs.pop("cache_metadata", True) + cache_attrs = kwargs.pop("cache_attrs", True) + write_empty_chunks = kwargs.pop("write_empty_chunks", True) + kwargs.setdefault("compressor", Zlib(1)) init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return Array( + store, + read_only=read_only, + cache_metadata=cache_metadata, + cache_attrs=cache_attrs, + write_empty_chunks=write_empty_chunks, + ) def test_array_0d(self): # test behaviour for array with 0 dimensions @@ -1816,8 +1855,7 @@ def test_array_1d_fill_value(self): a = np.arange(nvalues, dtype=dtype) f = np.empty_like(a) f.fill(fill_value or 0) - z = self.create_array(shape=a.shape, chunks=100, dtype=a.dtype, - fill_value=fill_value) + z = self.create_array(shape=a.shape, chunks=100, dtype=a.dtype, fill_value=fill_value) z[190:310] = a[190:310] assert_array_equal(f[:190], z[:190]) @@ -1825,21 +1863,18 @@ def test_array_1d_fill_value(self): assert_array_equal(f[310:], z[310:]) with pytest.raises(ValueError): - z = self.create_array(shape=(nvalues,), chunks=100, dtype=dtype, - fill_value=1) + z = self.create_array(shape=(nvalues,), chunks=100, dtype=dtype, fill_value=1) def test_nchunks_initialized(self): fill_value = 0 - dtype = 'int' - z = self.create_array(shape=100, - chunks=10, - fill_value=fill_value, - dtype=dtype, - write_empty_chunks=True) + dtype = "int" + z = self.create_array( + shape=100, chunks=10, fill_value=fill_value, dtype=dtype, write_empty_chunks=True + ) assert 0 == z.nchunks_initialized # manually put something into the store to confuse matters - z.store['foo'] = b'bar' + z.store["foo"] = b"bar" assert 0 == z.nchunks_initialized z[:] = 42 assert 10 == z.nchunks_initialized @@ -1849,11 +1884,9 @@ def test_nchunks_initialized(self): # second round of similar tests with write_empty_chunks set to # False - z = self.create_array(shape=100, - chunks=10, - fill_value=fill_value, - dtype=dtype, - write_empty_chunks=False) + z = self.create_array( + shape=100, chunks=10, fill_value=fill_value, dtype=dtype, write_empty_chunks=False + ) z[:] = 42 assert 10 == z.nchunks_initialized # manually remove a chunk from the store @@ -1866,61 +1899,69 @@ def test_array_order(self): # N5 only supports 'C' at the moment with pytest.raises(ValueError): - self.create_array(shape=(10, 11), chunks=(10, 11), dtype='i8', - order='F') + self.create_array(shape=(10, 11), chunks=(10, 11), dtype="i8", order="F") # 1D a = np.arange(1050) - z = self.create_array(shape=a.shape, chunks=100, dtype=a.dtype, - order='C') - assert z.order == 'C' + z = self.create_array(shape=a.shape, chunks=100, dtype=a.dtype, order="C") + assert z.order == "C" assert z[:].flags.c_contiguous z[:] = a assert_array_equal(a, z[:]) # 2D a = np.arange(10000).reshape((100, 100)) - z = self.create_array(shape=a.shape, chunks=(10, 10), - dtype=a.dtype, order='C') + z = self.create_array(shape=a.shape, chunks=(10, 10), dtype=a.dtype, order="C") - assert z.order == 'C' + assert z.order == "C" assert z[:].flags.c_contiguous z[:] = a actual = z[:] assert_array_equal(a, actual) def test_structured_array(self): - d = np.array([(b'aaa', 1, 4.2), - (b'bbb', 2, 8.4), - (b'ccc', 3, 12.6)], - dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) - fill_values = None, b'', (b'zzz', 42, 16.8) + d = np.array( + [(b"aaa", 1, 4.2), (b"bbb", 2, 8.4), (b"ccc", 3, 12.6)], + dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")], + ) + fill_values = None, b"", (b"zzz", 42, 16.8) with pytest.raises(TypeError): self.check_structured_array(d, fill_values) def test_structured_array_subshapes(self): - d = np.array([(0, ((0, 1, 2), (1, 2, 3)), b'aaa'), - (1, ((1, 2, 3), (2, 3, 4)), b'bbb'), - (2, ((2, 3, 4), (3, 4, 5)), b'ccc')], - dtype=[('foo', 'i8'), ('bar', '(2, 3)f4'), ('baz', 'S3')]) - fill_values = None, b'', (0, ((0, 0, 0), (1, 1, 1)), b'zzz') + d = np.array( + [ + (0, ((0, 1, 2), (1, 2, 3)), b"aaa"), + (1, ((1, 2, 3), (2, 3, 4)), b"bbb"), + (2, ((2, 3, 4), (3, 4, 5)), b"ccc"), + ], + dtype=[("foo", "i8"), ("bar", "(2, 3)f4"), ("baz", "S3")], + ) + fill_values = None, b"", (0, ((0, 0, 0), (1, 1, 1)), b"zzz") with pytest.raises(TypeError): self.check_structured_array(d, fill_values) def test_structured_array_nested(self): - d = np.array([(0, (0, ((0, 1), (1, 2), (2, 3)), 0), b'aaa'), - (1, (1, ((1, 2), (2, 3), (3, 4)), 1), b'bbb'), - (2, (2, ((2, 3), (3, 4), (4, 5)), 2), b'ccc')], - dtype=[('foo', 'i8'), ('bar', [('foo', 'i4'), ('bar', '(3, 2)f4'), - ('baz', 'u1')]), ('baz', 'S3')]) - fill_values = None, b'', (0, (0, ((0, 0), (1, 1), (2, 2)), 0), b'zzz') + d = np.array( + [ + (0, (0, ((0, 1), (1, 2), (2, 3)), 0), b"aaa"), + (1, (1, ((1, 2), (2, 3), (3, 4)), 1), b"bbb"), + (2, (2, ((2, 3), (3, 4), (4, 5)), 2), b"ccc"), + ], + dtype=[ + ("foo", "i8"), + ("bar", [("foo", "i4"), ("bar", "(3, 2)f4"), ("baz", "u1")]), + ("baz", "S3"), + ], + ) + fill_values = None, b"", (0, (0, ((0, 0), (1, 1), (2, 2)), 0), b"zzz") with pytest.raises(TypeError): self.check_structured_array(d, fill_values) def test_dtypes(self): # integers - for dtype in 'u1', 'u2', 'u4', 'u8', 'i1', 'i2', 'i4', 'i8': + for dtype in "u1", "u2", "u4", "u8", "i1", "i2", "i4", "i8": z = self.create_array(shape=10, chunks=3, dtype=dtype) assert z.dtype == np.dtype(dtype) a = np.arange(z.shape[0], dtype=dtype) @@ -1928,7 +1969,7 @@ def test_dtypes(self): assert_array_equal(a, z[:]) # floats - for dtype in 'f2', 'f4', 'f8': + for dtype in "f2", "f4", "f8": z = self.create_array(shape=10, chunks=3, dtype=dtype) assert z.dtype == np.dtype(dtype) a = np.linspace(0, 1, z.shape[0], dtype=dtype) @@ -1937,9 +1978,9 @@ def test_dtypes(self): # check that datetime generic units are not allowed with pytest.raises(ValueError): - self.create_array(shape=100, dtype='M8') + self.create_array(shape=100, dtype="M8") with pytest.raises(ValueError): - self.create_array(shape=100, dtype='m8') + self.create_array(shape=100, dtype="m8") def test_object_arrays(self): @@ -1970,7 +2011,7 @@ def test_object_arrays_vlen_text(self): def test_object_arrays_vlen_bytes(self): - greetings_bytes = [g.encode('utf8') for g in greetings] + greetings_bytes = [g.encode("utf8") for g in greetings] data = np.array(greetings_bytes * 1000, dtype=object) with pytest.raises(ValueError): @@ -1982,19 +2023,19 @@ def test_object_arrays_vlen_bytes(self): def test_object_arrays_vlen_array(self): - data = np.array([np.array([1, 3, 7]), - np.array([5]), - np.array([2, 8, 12])] * 1000, dtype=object) + data = np.array( + [np.array([1, 3, 7]), np.array([5]), np.array([2, 8, 12])] * 1000, dtype=object + ) - codecs = VLenArray(int), VLenArray(' 2 and g1.store.is_erasable(): - arr_path = g1.path + '/arr1' + arr_path = g1.path + "/arr1" sfx = _get_metadata_suffix(g1.store) - array_meta_file = meta_root + arr_path + '.array' + sfx + array_meta_file = meta_root + arr_path + ".array" + sfx assert array_meta_file in g1.store - group_meta_file = meta_root + g2.path + '.group' + sfx + group_meta_file = meta_root + g2.path + ".group" + sfx assert group_meta_file in g1.store # rmdir on the array path should also remove the metadata file @@ -280,21 +308,21 @@ def test_rmdir_group_and_array_metadata_files(self): assert group_meta_file not in g1.store def _dataset_path(self, group, path): - path = path.rstrip('/') - absolute = path.startswith('/') + path = path.rstrip("/") + absolute = path.startswith("/") if absolute: dataset_path = path else: - dataset_path = '/'.join([group.path, path]) - dataset_path = dataset_path.lstrip('/') - dataset_name = '/' + dataset_path + dataset_path = "/".join([group.path, path]) + dataset_path = dataset_path.lstrip("/") + dataset_name = "/" + dataset_path return dataset_path, dataset_name def test_create_dataset(self): g = self.create_group() # create as immediate child - dpath = 'foo' + dpath = "foo" d1 = g.create_dataset(dpath, shape=1000, chunks=100) path, name = self._dataset_path(g, dpath) assert isinstance(d1, Array) @@ -305,32 +333,39 @@ def test_create_dataset(self): assert g.store is d1.store # create as descendant - dpath = '/a/b/c/' - d2 = g.create_dataset(dpath, shape=2000, chunks=200, dtype='i1', - compression='zlib', compression_opts=9, - fill_value=42, order='F') + dpath = "/a/b/c/" + d2 = g.create_dataset( + dpath, + shape=2000, + chunks=200, + dtype="i1", + compression="zlib", + compression_opts=9, + fill_value=42, + order="F", + ) path, name = self._dataset_path(g, dpath) assert isinstance(d2, Array) assert (2000,) == d2.shape assert (200,) == d2.chunks - assert np.dtype('i1') == d2.dtype - assert 'zlib' == d2.compressor.codec_id + assert np.dtype("i1") == d2.dtype + assert "zlib" == d2.compressor.codec_id assert 9 == d2.compressor.level assert 42 == d2.fill_value - assert 'F' == d2.order + assert "F" == d2.order assert path == d2.path assert name == d2.name assert g.store is d2.store # create with data - data = np.arange(3000, dtype='u2') - dpath = 'bar' + data = np.arange(3000, dtype="u2") + dpath = "bar" d3 = g.create_dataset(dpath, data=data, chunks=300) path, name = self._dataset_path(g, dpath) assert isinstance(d3, Array) assert (3000,) == d3.shape assert (300,) == d3.chunks - assert np.dtype('u2') == d3.dtype + assert np.dtype("u2") == d3.dtype assert_array_equal(data, d3[:]) assert path == d3.path assert name == d3.name @@ -339,35 +374,39 @@ def test_create_dataset(self): # compression arguments handling follows... # compression_opts as dict - d = g.create_dataset('aaa', shape=1000, dtype='u1', - compression='blosc', - compression_opts=dict(cname='zstd', clevel=1, shuffle=2)) - assert d.compressor.codec_id == 'blosc' - assert 'zstd' == d.compressor.cname + d = g.create_dataset( + "aaa", + shape=1000, + dtype="u1", + compression="blosc", + compression_opts=dict(cname="zstd", clevel=1, shuffle=2), + ) + assert d.compressor.codec_id == "blosc" + assert "zstd" == d.compressor.cname assert 1 == d.compressor.clevel assert 2 == d.compressor.shuffle # compression_opts as sequence - d = g.create_dataset('bbb', shape=1000, dtype='u1', - compression='blosc', - compression_opts=('zstd', 1, 2)) - assert d.compressor.codec_id == 'blosc' - assert 'zstd' == d.compressor.cname + d = g.create_dataset( + "bbb", shape=1000, dtype="u1", compression="blosc", compression_opts=("zstd", 1, 2) + ) + assert d.compressor.codec_id == "blosc" + assert "zstd" == d.compressor.cname assert 1 == d.compressor.clevel assert 2 == d.compressor.shuffle # None compression_opts - d = g.create_dataset('ccc', shape=1000, dtype='u1', compression='zlib') - assert d.compressor.codec_id == 'zlib' + d = g.create_dataset("ccc", shape=1000, dtype="u1", compression="zlib") + assert d.compressor.codec_id == "zlib" assert 1 == d.compressor.level # None compression - d = g.create_dataset('ddd', shape=1000, dtype='u1', compression=None) + d = g.create_dataset("ddd", shape=1000, dtype="u1", compression=None) assert d.compressor is None # compressor as compression - d = g.create_dataset('eee', shape=1000, dtype='u1', compression=Zlib(1)) - assert d.compressor.codec_id == 'zlib' + d = g.create_dataset("eee", shape=1000, dtype="u1", compression=Zlib(1)) + assert d.compressor.codec_id == "zlib" assert 1 == d.compressor.level g.store.close() @@ -376,25 +415,25 @@ def test_require_dataset(self): g = self.create_group() # create - dpath = 'foo' - d1 = g.require_dataset(dpath, shape=1000, chunks=100, dtype='f4') + dpath = "foo" + d1 = g.require_dataset(dpath, shape=1000, chunks=100, dtype="f4") d1[:] = np.arange(1000) path, name = self._dataset_path(g, dpath) assert isinstance(d1, Array) assert (1000,) == d1.shape assert (100,) == d1.chunks - assert np.dtype('f4') == d1.dtype + assert np.dtype("f4") == d1.dtype assert path == d1.path assert name == d1.name assert g.store is d1.store assert_array_equal(np.arange(1000), d1[:]) # require - d2 = g.require_dataset(dpath, shape=1000, chunks=100, dtype='f4') + d2 = g.require_dataset(dpath, shape=1000, chunks=100, dtype="f4") assert isinstance(d2, Array) assert (1000,) == d2.shape assert (100,) == d2.chunks - assert np.dtype('f4') == d2.dtype + assert np.dtype("f4") == d2.dtype assert path == d2.path assert name == d2.name assert g.store is d2.store @@ -403,20 +442,19 @@ def test_require_dataset(self): # bad shape - use TypeError for h5py compatibility with pytest.raises(TypeError): - g.require_dataset('foo', shape=2000, chunks=100, dtype='f4') + g.require_dataset("foo", shape=2000, chunks=100, dtype="f4") # dtype matching # can cast - d3 = g.require_dataset('foo', shape=1000, chunks=100, dtype='i2') - assert np.dtype('f4') == d3.dtype + d3 = g.require_dataset("foo", shape=1000, chunks=100, dtype="i2") + assert np.dtype("f4") == d3.dtype assert d1 == d3 with pytest.raises(TypeError): # cannot cast - g.require_dataset('foo', shape=1000, chunks=100, dtype='i4') + g.require_dataset("foo", shape=1000, chunks=100, dtype="i4") with pytest.raises(TypeError): # can cast but not exact match - g.require_dataset('foo', shape=1000, chunks=100, dtype='i2', - exact=True) + g.require_dataset("foo", shape=1000, chunks=100, dtype="i2", exact=True) g.store.close() @@ -424,80 +462,76 @@ def test_create_errors(self): g = self.create_group() # array obstructs group, array - g.create_dataset('foo', shape=100, chunks=10) + g.create_dataset("foo", shape=100, chunks=10) with pytest.raises(ValueError): - g.create_group('foo/bar') + g.create_group("foo/bar") with pytest.raises(ValueError): - g.require_group('foo/bar') + g.require_group("foo/bar") with pytest.raises(ValueError): - g.create_dataset('foo/bar', shape=100, chunks=10) + g.create_dataset("foo/bar", shape=100, chunks=10) with pytest.raises(ValueError): - g.require_dataset('foo/bar', shape=100, chunks=10) + g.require_dataset("foo/bar", shape=100, chunks=10) # array obstructs group, array - g.create_dataset('a/b', shape=100, chunks=10) + g.create_dataset("a/b", shape=100, chunks=10) with pytest.raises(ValueError): - g.create_group('a/b') + g.create_group("a/b") with pytest.raises(ValueError): - g.require_group('a/b') + g.require_group("a/b") with pytest.raises(ValueError): - g.create_dataset('a/b', shape=100, chunks=10) + g.create_dataset("a/b", shape=100, chunks=10) # group obstructs array - g.create_group('c/d') + g.create_group("c/d") with pytest.raises(ValueError): - g.create_dataset('c', shape=100, chunks=10) + g.create_dataset("c", shape=100, chunks=10) with pytest.raises(ValueError): - g.require_dataset('c', shape=100, chunks=10) + g.require_dataset("c", shape=100, chunks=10) with pytest.raises(ValueError): - g.create_dataset('c/d', shape=100, chunks=10) + g.create_dataset("c/d", shape=100, chunks=10) with pytest.raises(ValueError): - g.require_dataset('c/d', shape=100, chunks=10) + g.require_dataset("c/d", shape=100, chunks=10) # h5py compatibility, accept 'fillvalue' - d = g.create_dataset('x', shape=100, chunks=10, fillvalue=42) + d = g.create_dataset("x", shape=100, chunks=10, fillvalue=42) assert 42 == d.fill_value # h5py compatibility, ignore 'shuffle' with pytest.warns(UserWarning, match="ignoring keyword argument 'shuffle'"): - g.create_dataset('y', shape=100, chunks=10, shuffle=True) + g.create_dataset("y", shape=100, chunks=10, shuffle=True) # read-only g = self.create_group(read_only=True) with pytest.raises(PermissionError): - g.create_group('zzz') + g.create_group("zzz") with pytest.raises(PermissionError): - g.require_group('zzz') + g.require_group("zzz") with pytest.raises(PermissionError): - g.create_dataset('zzz', shape=100, chunks=10) + g.create_dataset("zzz", shape=100, chunks=10) with pytest.raises(PermissionError): - g.require_dataset('zzz', shape=100, chunks=10) + g.require_dataset("zzz", shape=100, chunks=10) g.store.close() def test_create_overwrite(self): try: - for method_name in 'create_dataset', 'create', 'empty', 'zeros', \ - 'ones': + for method_name in "create_dataset", "create", "empty", "zeros", "ones": g = self.create_group() - getattr(g, method_name)('foo', shape=100, chunks=10) + getattr(g, method_name)("foo", shape=100, chunks=10) # overwrite array with array - d = getattr(g, method_name)('foo', shape=200, chunks=20, - overwrite=True) + d = getattr(g, method_name)("foo", shape=200, chunks=20, overwrite=True) assert (200,) == d.shape # overwrite array with group - g2 = g.create_group('foo', overwrite=True) + g2 = g.create_group("foo", overwrite=True) assert 0 == len(g2) # overwrite group with array - d = getattr(g, method_name)('foo', shape=300, chunks=30, - overwrite=True) + d = getattr(g, method_name)("foo", shape=300, chunks=30, overwrite=True) assert (300,) == d.shape # overwrite array with group - d = getattr(g, method_name)('foo/bar', shape=400, chunks=40, - overwrite=True) + d = getattr(g, method_name)("foo/bar", shape=400, chunks=40, overwrite=True) assert (400,) == d.shape - assert isinstance(g['foo'], Group) + assert isinstance(g["foo"], Group) g.store.close() except NotImplementedError: @@ -506,84 +540,84 @@ def test_create_overwrite(self): def test_getitem_contains_iterators(self): # setup g1 = self.create_group() - g2 = g1.create_group('foo/bar') + g2 = g1.create_group("foo/bar") if g1._version == 2: - d1 = g2.create_dataset('/a/b/c', shape=1000, chunks=100) + d1 = g2.create_dataset("/a/b/c", shape=1000, chunks=100) else: # v3: cannot create a dataset at the root by starting with / # instead, need to create the dataset on g1 directly - d1 = g1.create_dataset('a/b/c', shape=1000, chunks=100) + d1 = g1.create_dataset("a/b/c", shape=1000, chunks=100) d1[:] = np.arange(1000) - d2 = g1.create_dataset('foo/baz', shape=3000, chunks=300) + d2 = g1.create_dataset("foo/baz", shape=3000, chunks=300) d2[:] = np.arange(3000) # test __getitem__ - assert isinstance(g1['foo'], Group) - assert isinstance(g1['foo']['bar'], Group) - assert isinstance(g1['foo/bar'], Group) + assert isinstance(g1["foo"], Group) + assert isinstance(g1["foo"]["bar"], Group) + assert isinstance(g1["foo/bar"], Group) if g1._version == 2: - assert isinstance(g1['/foo/bar/'], Group) + assert isinstance(g1["/foo/bar/"], Group) else: # start or end with / raises KeyError # TODO: should we allow stripping of these on v3? with pytest.raises(KeyError): - assert isinstance(g1['/foo/bar/'], Group) - assert isinstance(g1['foo/baz'], Array) - assert g2 == g1['foo/bar'] - assert g1['foo']['bar'] == g1['foo/bar'] - assert d2 == g1['foo/baz'] - assert_array_equal(d2[:], g1['foo/baz']) - assert isinstance(g1['a'], Group) - assert isinstance(g1['a']['b'], Group) - assert isinstance(g1['a/b'], Group) - assert isinstance(g1['a']['b']['c'], Array) - assert isinstance(g1['a/b/c'], Array) - assert d1 == g1['a/b/c'] - assert g1['a']['b']['c'] == g1['a/b/c'] - assert_array_equal(d1[:], g1['a/b/c'][:]) + assert isinstance(g1["/foo/bar/"], Group) + assert isinstance(g1["foo/baz"], Array) + assert g2 == g1["foo/bar"] + assert g1["foo"]["bar"] == g1["foo/bar"] + assert d2 == g1["foo/baz"] + assert_array_equal(d2[:], g1["foo/baz"]) + assert isinstance(g1["a"], Group) + assert isinstance(g1["a"]["b"], Group) + assert isinstance(g1["a/b"], Group) + assert isinstance(g1["a"]["b"]["c"], Array) + assert isinstance(g1["a/b/c"], Array) + assert d1 == g1["a/b/c"] + assert g1["a"]["b"]["c"] == g1["a/b/c"] + assert_array_equal(d1[:], g1["a/b/c"][:]) # test __contains__ - assert 'foo' in g1 - assert 'foo/bar' in g1 - assert 'foo/baz' in g1 - assert 'bar' in g1['foo'] - assert 'a' in g1 - assert 'a/b' in g1 - assert 'a/b/c' in g1 - assert 'baz' not in g1 - assert 'a/b/c/d' not in g1 - assert 'a/z' not in g1 - assert 'quux' not in g1['foo'] + assert "foo" in g1 + assert "foo/bar" in g1 + assert "foo/baz" in g1 + assert "bar" in g1["foo"] + assert "a" in g1 + assert "a/b" in g1 + assert "a/b/c" in g1 + assert "baz" not in g1 + assert "a/b/c/d" not in g1 + assert "a/z" not in g1 + assert "quux" not in g1["foo"] # test key errors with pytest.raises(KeyError): - g1['baz'] + g1["baz"] with pytest.raises(KeyError): - g1['x/y/z'] + g1["x/y/z"] # test __len__ assert 2 == len(g1) - assert 2 == len(g1['foo']) - assert 0 == len(g1['foo/bar']) - assert 1 == len(g1['a']) - assert 1 == len(g1['a/b']) + assert 2 == len(g1["foo"]) + assert 0 == len(g1["foo/bar"]) + assert 1 == len(g1["a"]) + assert 1 == len(g1["a/b"]) # test __iter__, keys() if g1._version == 2: # currently assumes sorted by key - assert ['a', 'foo'] == list(g1) - assert ['a', 'foo'] == list(g1.keys()) - assert ['bar', 'baz'] == list(g1['foo']) - assert ['bar', 'baz'] == list(g1['foo'].keys()) + assert ["a", "foo"] == list(g1) + assert ["a", "foo"] == list(g1.keys()) + assert ["bar", "baz"] == list(g1["foo"]) + assert ["bar", "baz"] == list(g1["foo"].keys()) else: # v3 is not necessarily sorted by key - assert ['a', 'foo'] == sorted(list(g1)) - assert ['a', 'foo'] == sorted(list(g1.keys())) - assert ['bar', 'baz'] == sorted(list(g1['foo'])) - assert ['bar', 'baz'] == sorted(list(g1['foo'].keys())) - assert [] == sorted(g1['foo/bar']) - assert [] == sorted(g1['foo/bar'].keys()) + assert ["a", "foo"] == sorted(list(g1)) + assert ["a", "foo"] == sorted(list(g1.keys())) + assert ["bar", "baz"] == sorted(list(g1["foo"])) + assert ["bar", "baz"] == sorted(list(g1["foo"].keys())) + assert [] == sorted(g1["foo/bar"]) + assert [] == sorted(g1["foo/bar"].keys()) # test items(), values() # currently assumes sorted by key @@ -593,24 +627,24 @@ def test_getitem_contains_iterators(self): if g1._version == 3: # v3 are not automatically sorted by key items, values = zip(*sorted(zip(items, values), key=lambda x: x[0])) - assert 'a' == items[0][0] - assert g1['a'] == items[0][1] - assert g1['a'] == values[0] - assert 'foo' == items[1][0] - assert g1['foo'] == items[1][1] - assert g1['foo'] == values[1] - - items = list(g1['foo'].items()) - values = list(g1['foo'].values()) + assert "a" == items[0][0] + assert g1["a"] == items[0][1] + assert g1["a"] == values[0] + assert "foo" == items[1][0] + assert g1["foo"] == items[1][1] + assert g1["foo"] == values[1] + + items = list(g1["foo"].items()) + values = list(g1["foo"].values()) if g1._version == 3: # v3 are not automatically sorted by key items, values = zip(*sorted(zip(items, values), key=lambda x: x[0])) - assert 'bar' == items[0][0] - assert g1['foo']['bar'] == items[0][1] - assert g1['foo']['bar'] == values[0] - assert 'baz' == items[1][0] - assert g1['foo']['baz'] == items[1][1] - assert g1['foo']['baz'] == values[1] + assert "bar" == items[0][0] + assert g1["foo"]["bar"] == items[0][1] + assert g1["foo"]["bar"] == values[0] + assert "baz" == items[1][0] + assert g1["foo"]["baz"] == items[1][1] + assert g1["foo"]["baz"] == values[1] # test array_keys(), arrays(), group_keys(), groups() @@ -618,29 +652,29 @@ def test_getitem_contains_iterators(self): arrays = list(g1.arrays()) if g1._version == 2: # currently assumes sorted by key - assert ['a', 'foo'] == list(g1.group_keys()) + assert ["a", "foo"] == list(g1.group_keys()) else: - assert ['a', 'foo'] == sorted(list(g1.group_keys())) + assert ["a", "foo"] == sorted(list(g1.group_keys())) groups = sorted(groups) arrays = sorted(arrays) - assert 'a' == groups[0][0] - assert g1['a'] == groups[0][1] - assert 'foo' == groups[1][0] - assert g1['foo'] == groups[1][1] + assert "a" == groups[0][0] + assert g1["a"] == groups[0][1] + assert "foo" == groups[1][0] + assert g1["foo"] == groups[1][1] assert [] == list(g1.array_keys()) assert [] == arrays - assert ['bar'] == list(g1['foo'].group_keys()) - assert ['baz'] == list(g1['foo'].array_keys()) - groups = list(g1['foo'].groups()) - arrays = list(g1['foo'].arrays()) + assert ["bar"] == list(g1["foo"].group_keys()) + assert ["baz"] == list(g1["foo"].array_keys()) + groups = list(g1["foo"].groups()) + arrays = list(g1["foo"].arrays()) if g1._version == 3: groups = sorted(groups) arrays = sorted(arrays) - assert 'bar' == groups[0][0] - assert g1['foo']['bar'] == groups[0][1] - assert 'baz' == arrays[0][0] - assert g1['foo']['baz'] == arrays[0][1] + assert "bar" == groups[0][0] + assert g1["foo"]["bar"] == groups[0][1] + assert "baz" == arrays[0][0] + assert g1["foo"]["baz"] == arrays[0][1] # visitor collection tests items = [] @@ -666,7 +700,7 @@ def visitor4(name, obj): "foo/baz", ] if g1._version == 3: - expected_items = [g1.path + '/' + i for i in expected_items] + expected_items = [g1.path + "/" + i for i in expected_items] assert expected_items == items del items[:] @@ -676,7 +710,7 @@ def visitor4(name, obj): "foo/baz", ] if g1._version == 3: - expected_items = [g1.path + '/' + i for i in expected_items] + expected_items = [g1.path + "/" + i for i in expected_items] assert expected_items == items del items[:] @@ -753,7 +787,7 @@ def visitor0(val, *args): # noinspection PyUnusedLocal def visitor1(val, *args): name = getattr(val, "path", val) - if name.startswith('group/'): + if name.startswith("group/"): # strip the group path for v3 name = name[6:] if name == "a/b/c": @@ -779,8 +813,7 @@ def test_double_counting_group_v3(self): sub_group.create("bar", shape=10, dtype="i4") assert list(root_group.group_keys()) == sorted(group_names) assert list(root_group.groups()) == [ - (name, root_group[name]) - for name in sorted(group_names) + (name, root_group[name]) for name in sorted(group_names) ] def test_empty_getitem_contains_iterators(self): @@ -791,47 +824,47 @@ def test_empty_getitem_contains_iterators(self): assert [] == list(g) assert [] == list(g.keys()) assert 0 == len(g) - assert 'foo' not in g + assert "foo" not in g g.store.close() def test_iterators_recurse(self): # setup g1 = self.create_group() - g2 = g1.create_group('foo/bar') - d1 = g2.create_dataset('/a/b/c', shape=1000, chunks=100) + g2 = g1.create_group("foo/bar") + d1 = g2.create_dataset("/a/b/c", shape=1000, chunks=100) d1[:] = np.arange(1000) - d2 = g1.create_dataset('foo/baz', shape=3000, chunks=300) + d2 = g1.create_dataset("foo/baz", shape=3000, chunks=300) d2[:] = np.arange(3000) - d3 = g2.create_dataset('zab', shape=2000, chunks=200) + d3 = g2.create_dataset("zab", shape=2000, chunks=200) d3[:] = np.arange(2000) # test recursive array_keys - array_keys = list(g1['foo'].array_keys(recurse=False)) - array_keys_recurse = list(g1['foo'].array_keys(recurse=True)) + array_keys = list(g1["foo"].array_keys(recurse=False)) + array_keys_recurse = list(g1["foo"].array_keys(recurse=True)) assert len(array_keys_recurse) > len(array_keys) - assert sorted(array_keys_recurse) == ['baz', 'zab'] + assert sorted(array_keys_recurse) == ["baz", "zab"] # test recursive arrays - arrays = list(g1['foo'].arrays(recurse=False)) - arrays_recurse = list(g1['foo'].arrays(recurse=True)) + arrays = list(g1["foo"].arrays(recurse=False)) + arrays_recurse = list(g1["foo"].arrays(recurse=True)) assert len(arrays_recurse) > len(arrays) - assert 'zab' == arrays_recurse[0][0] - assert g1['foo']['bar']['zab'] == arrays_recurse[0][1] + assert "zab" == arrays_recurse[0][0] + assert g1["foo"]["bar"]["zab"] == arrays_recurse[0][1] g1.store.close() def test_getattr(self): # setup g1 = self.create_group() - g2 = g1.create_group('foo') - g2.create_dataset('bar', shape=100) + g2 = g1.create_group("foo") + g2.create_dataset("bar", shape=100) # test - assert g1['foo'] == g1.foo - assert g2['bar'] == g2.bar + assert g1["foo"] == g1.foo + assert g2["bar"] == g2.bar # test that hasattr returns False instead of an exception (issue #88) - assert not hasattr(g1, 'unexistingattribute') + assert not hasattr(g1, "unexistingattribute") g1.store.close() @@ -839,46 +872,46 @@ def test_setitem(self): g = self.create_group() try: data = np.arange(100) - g['foo'] = data - assert_array_equal(data, g['foo']) + g["foo"] = data + assert_array_equal(data, g["foo"]) data = np.arange(200) - g['foo'] = data - assert_array_equal(data, g['foo']) + g["foo"] = data + assert_array_equal(data, g["foo"]) # 0d array - g['foo'] = 42 - assert () == g['foo'].shape - assert 42 == g['foo'][()] + g["foo"] = 42 + assert () == g["foo"].shape + assert 42 == g["foo"][()] except NotImplementedError: pass g.store.close() def test_delitem(self): g = self.create_group() - g.create_group('foo') - g.create_dataset('bar/baz', shape=100, chunks=10) - assert 'foo' in g - assert 'bar' in g - assert 'bar/baz' in g + g.create_group("foo") + g.create_dataset("bar/baz", shape=100, chunks=10) + assert "foo" in g + assert "bar" in g + assert "bar/baz" in g try: - del g['bar'] + del g["bar"] with pytest.raises(KeyError): - del g['xxx'] + del g["xxx"] except NotImplementedError: pass else: - assert 'foo' in g - assert 'bar' not in g - assert 'bar/baz' not in g + assert "foo" in g + assert "bar" not in g + assert "bar/baz" not in g g.store.close() def test_move(self): g = self.create_group() data = np.arange(100) - g['boo'] = data + g["boo"] = data data = np.arange(100) - g['foo'] = data + g["foo"] = data g.move("foo", "bar") assert "foo" not in g @@ -911,11 +944,11 @@ def test_move(self): # meta/data/bar. This is outside the `g` group located at # /meta/root/group, so bar is no longer within `g`. assert "bar" not in g - assert 'meta/root/bar.array.json' in g._store + assert "meta/root/bar.array.json" in g._store if g._chunk_store: - assert 'data/root/bar/c0' in g._chunk_store + assert "data/root/bar/c0" in g._chunk_store else: - assert 'data/root/bar/c0' in g._store + assert "data/root/bar/c0" in g._store assert isinstance(g["foo2"], Group) if g2._version == 2: assert_array_equal(data, g["bar"]) @@ -938,35 +971,35 @@ def test_move(self): def test_array_creation(self): grp = self.create_group() - a = grp.create('a', shape=100, chunks=10) + a = grp.create("a", shape=100, chunks=10) assert isinstance(a, Array) - b = grp.empty('b', shape=100, chunks=10) + b = grp.empty("b", shape=100, chunks=10) assert isinstance(b, Array) assert b.fill_value is None - c = grp.zeros('c', shape=100, chunks=10) + c = grp.zeros("c", shape=100, chunks=10) assert isinstance(c, Array) assert 0 == c.fill_value - d = grp.ones('d', shape=100, chunks=10) + d = grp.ones("d", shape=100, chunks=10) assert isinstance(d, Array) assert 1 == d.fill_value - e = grp.full('e', shape=100, chunks=10, fill_value=42) + e = grp.full("e", shape=100, chunks=10, fill_value=42) assert isinstance(e, Array) assert 42 == e.fill_value - f = grp.empty_like('f', a) + f = grp.empty_like("f", a) assert isinstance(f, Array) assert f.fill_value is None - g = grp.zeros_like('g', a) + g = grp.zeros_like("g", a) assert isinstance(g, Array) assert 0 == g.fill_value - h = grp.ones_like('h', a) + h = grp.ones_like("h", a) assert isinstance(h, Array) assert 1 == h.fill_value - i = grp.full_like('i', e) + i = grp.full_like("i", e) assert isinstance(i, Array) assert 42 == i.fill_value - j = grp.array('j', data=np.arange(100), chunks=10) + j = grp.array("j", data=np.arange(100), chunks=10) assert isinstance(j, Array) assert_array_equal(np.arange(100), j[:]) @@ -974,81 +1007,80 @@ def test_array_creation(self): grp = self.create_group(read_only=True) with pytest.raises(PermissionError): - grp.create('aa', shape=100, chunks=10) + grp.create("aa", shape=100, chunks=10) with pytest.raises(PermissionError): - grp.empty('aa', shape=100, chunks=10) + grp.empty("aa", shape=100, chunks=10) with pytest.raises(PermissionError): - grp.zeros('aa', shape=100, chunks=10) + grp.zeros("aa", shape=100, chunks=10) with pytest.raises(PermissionError): - grp.ones('aa', shape=100, chunks=10) + grp.ones("aa", shape=100, chunks=10) with pytest.raises(PermissionError): - grp.full('aa', shape=100, chunks=10, fill_value=42) + grp.full("aa", shape=100, chunks=10, fill_value=42) with pytest.raises(PermissionError): - grp.array('aa', data=np.arange(100), chunks=10) + grp.array("aa", data=np.arange(100), chunks=10) with pytest.raises(PermissionError): - grp.create('aa', shape=100, chunks=10) + grp.create("aa", shape=100, chunks=10) with pytest.raises(PermissionError): - grp.empty_like('aa', a) + grp.empty_like("aa", a) with pytest.raises(PermissionError): - grp.zeros_like('aa', a) + grp.zeros_like("aa", a) with pytest.raises(PermissionError): - grp.ones_like('aa', a) + grp.ones_like("aa", a) with pytest.raises(PermissionError): - grp.full_like('aa', a) + grp.full_like("aa", a) grp.store.close() def test_paths(self): g1 = self.create_group() - g2 = g1.create_group('foo/bar') + g2 = g1.create_group("foo/bar") if g1._version == 2: - assert g1 == g1['/'] - assert g1 == g1['//'] - assert g1 == g1['///'] - assert g1 == g2['/'] - assert g1 == g2['//'] - assert g1 == g2['///'] - assert g2 == g1['foo/bar'] - assert g2 == g1['/foo/bar'] - assert g2 == g1['foo/bar/'] - assert g2 == g1['//foo/bar'] - assert g2 == g1['//foo//bar//'] - assert g2 == g1['///foo///bar///'] - assert g2 == g2['/foo/bar'] + assert g1 == g1["/"] + assert g1 == g1["//"] + assert g1 == g1["///"] + assert g1 == g2["/"] + assert g1 == g2["//"] + assert g1 == g2["///"] + assert g2 == g1["foo/bar"] + assert g2 == g1["/foo/bar"] + assert g2 == g1["foo/bar/"] + assert g2 == g1["//foo/bar"] + assert g2 == g1["//foo//bar//"] + assert g2 == g1["///foo///bar///"] + assert g2 == g2["/foo/bar"] else: # the expected key format gives a match - assert g2 == g1['foo/bar'] + assert g2 == g1["foo/bar"] # TODO: Should presence of a trailing slash raise KeyError? # The spec says "the final character is not a / character" # but we currently strip trailing '/' as done for v2. - assert g2 == g1['foo/bar/'] + assert g2 == g1["foo/bar/"] # double slash also currently works (spec doesn't mention this # case, but have kept it for v2 behavior compatibility) - assert g2 == g1['foo//bar'] + assert g2 == g1["foo//bar"] # TODO, root: fix these cases # v3: leading / implies we are at the root, not within a group, # so these all raise KeyError - for path in ['/foo/bar', '//foo/bar', '//foo//bar//', - '///fooo///bar///']: + for path in ["/foo/bar", "//foo/bar", "//foo//bar//", "///fooo///bar///"]: with pytest.raises(KeyError): g1[path] with pytest.raises(ValueError): - g1['.'] + g1["."] with pytest.raises(ValueError): - g1['..'] + g1[".."] with pytest.raises(ValueError): - g1['foo/.'] + g1["foo/."] with pytest.raises(ValueError): - g1['foo/..'] + g1["foo/.."] with pytest.raises(ValueError): - g1['foo/./bar'] + g1["foo/./bar"] with pytest.raises(ValueError): - g1['foo/../bar'] + g1["foo/../bar"] g1.store.close() @@ -1056,7 +1088,7 @@ def test_pickle(self): # setup group g = self.create_group() - d = g.create_dataset('foo/bar', shape=100, chunks=10) + d = g.create_dataset("foo/bar", shape=100, chunks=10) d[:] = np.arange(100) path = g.path name = g.name @@ -1075,19 +1107,19 @@ def test_pickle(self): assert name == g2.name assert n == len(g2) assert keys == list(g2) - assert isinstance(g2['foo'], Group) - assert isinstance(g2['foo/bar'], Array) + assert isinstance(g2["foo"], Group) + assert isinstance(g2["foo/bar"], Array) g2.store.close() def test_context_manager(self): with self.create_group() as g: - d = g.create_dataset('foo/bar', shape=100, chunks=10) + d = g.create_dataset("foo/bar", shape=100, chunks=10) d[:] = np.arange(100) -@pytest.mark.parametrize('chunk_dict', [False, True]) +@pytest.mark.parametrize("chunk_dict", [False, True]) def test_group_init_from_dict(chunk_dict): if chunk_dict: store, chunk_store = dict(), dict() @@ -1106,20 +1138,25 @@ def test_group_init_from_dict(chunk_dict): # noinspection PyStatementEffect @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3(TestGroup, unittest.TestCase): - @staticmethod def create_store(): # can be overridden in sub-classes return KVStoreV3(dict()), None - def create_group(self, store=None, path='group', read_only=False, - chunk_store=None, synchronizer=None): + def create_group( + self, store=None, path="group", read_only=False, chunk_store=None, synchronizer=None + ): # can be overridden in sub-classes if store is None: store, chunk_store = self.create_store() init_group(store, path=path, chunk_store=chunk_store) - g = Group(store, path=path, read_only=read_only, - chunk_store=chunk_store, synchronizer=synchronizer) + g = Group( + store, + path=path, + read_only=read_only, + chunk_store=chunk_store, + synchronizer=synchronizer, + ) return g def test_group_init_1(self): @@ -1132,13 +1169,13 @@ def test_group_init_1(self): assert chunk_store is g.chunk_store assert not g.read_only # different path/name in v3 case - assert 'group' == g.path - assert '/group' == g.name - assert 'group' == g.basename + assert "group" == g.path + assert "/group" == g.name + assert "group" == g.basename assert isinstance(g.attrs, Attributes) - g.attrs['foo'] = 'bar' - assert g.attrs['foo'] == 'bar' + g.attrs["foo"] = "bar" + assert g.attrs["foo"] == "bar" assert isinstance(g.info, InfoReporter) assert isinstance(repr(g.info), str) @@ -1147,7 +1184,7 @@ def test_group_init_1(self): def test_group_init_errors_2(self): store, chunk_store = self.create_store() - path = 'tmp' + path = "tmp" init_array(store, path=path, shape=1000, chunks=100, chunk_store=chunk_store) # array blocks group with pytest.raises(ValueError): @@ -1156,7 +1193,6 @@ def test_group_init_errors_2(self): class TestGroupWithMemoryStore(TestGroup): - @staticmethod def create_store(): return MemoryStore(), None @@ -1165,14 +1201,12 @@ def create_store(): # noinspection PyStatementEffect @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithMemoryStore(TestGroupWithMemoryStore, TestGroupV3): - @staticmethod def create_store(): return MemoryStoreV3(), None class TestGroupWithDirectoryStore(TestGroup): - @staticmethod def create_store(): path = tempfile.mkdtemp() @@ -1183,7 +1217,6 @@ def create_store(): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithDirectoryStore(TestGroupWithDirectoryStore, TestGroupV3): - @staticmethod def create_store(): path = tempfile.mkdtemp() @@ -1194,7 +1227,6 @@ def create_store(): @skip_test_env_var("ZARR_TEST_ABS") class TestGroupWithABSStore(TestGroup): - @staticmethod def create_store(): container_client = abs_container() @@ -1211,7 +1243,6 @@ def test_pickle(self): @skip_test_env_var("ZARR_TEST_ABS") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithABSStore(TestGroupV3): - @staticmethod def create_store(): container_client = abs_container() @@ -1226,7 +1257,6 @@ def test_pickle(self): class TestGroupWithNestedDirectoryStore(TestGroup): - @staticmethod def create_store(): path = tempfile.mkdtemp() @@ -1237,7 +1267,6 @@ def create_store(): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestGroupWithFSStore(TestGroup): - @staticmethod def create_store(): path = tempfile.mkdtemp() @@ -1247,21 +1276,19 @@ def create_store(): def test_round_trip_nd(self): data = np.arange(1000).reshape(10, 10, 10) - name = 'raw' + name = "raw" store, _ = self.create_store() - f = open_group(store, mode='w') - f.create_dataset(name, data=data, chunks=(5, 5, 5), - compressor=None) + f = open_group(store, mode="w") + f.create_dataset(name, data=data, chunks=(5, 5, 5), compressor=None) assert name in f - h = open_group(store, mode='r') + h = open_group(store, mode="r") np.testing.assert_array_equal(h[name][:], data) @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithFSStore(TestGroupWithFSStore, TestGroupV3): - @staticmethod def create_store(): path = tempfile.mkdtemp() @@ -1271,80 +1298,78 @@ def create_store(): def test_round_trip_nd(self): data = np.arange(1000).reshape(10, 10, 10) - name = 'raw' + name = "raw" store, _ = self.create_store() - f = open_group(store, path='group', mode='w') - f.create_dataset(name, data=data, chunks=(5, 5, 5), - compressor=None) - h = open_group(store, path='group', mode='r') + f = open_group(store, path="group", mode="w") + f.create_dataset(name, data=data, chunks=(5, 5, 5), compressor=None) + h = open_group(store, path="group", mode="r") np.testing.assert_array_equal(h[name][:], data) - f = open_group(store, path='group2', mode='w') + f = open_group(store, path="group2", mode="w") data_size = data.nbytes - group_meta_size = buffer_size(store[meta_root + 'group.group.json']) - group2_meta_size = buffer_size(store[meta_root + 'group2.group.json']) - array_meta_size = buffer_size(store[meta_root + 'group/raw.array.json']) + group_meta_size = buffer_size(store[meta_root + "group.group.json"]) + group2_meta_size = buffer_size(store[meta_root + "group2.group.json"]) + array_meta_size = buffer_size(store[meta_root + "group/raw.array.json"]) assert store.getsize() == data_size + group_meta_size + group2_meta_size + array_meta_size # added case with path to complete coverage - assert store.getsize('group') == data_size + group_meta_size + array_meta_size - assert store.getsize('group2') == group2_meta_size - assert store.getsize('group/raw') == data_size + array_meta_size + assert store.getsize("group") == data_size + group_meta_size + array_meta_size + assert store.getsize("group2") == group2_meta_size + assert store.getsize("group/raw") == data_size + array_meta_size @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestGroupWithNestedFSStore(TestGroupWithFSStore): - @staticmethod def create_store(): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) - store = FSStore(path, key_separator='/', auto_mkdir=True) + store = FSStore(path, key_separator="/", auto_mkdir=True) return store, None def test_inconsistent_dimension_separator(self): data = np.arange(1000).reshape(10, 10, 10) - name = 'raw' + name = "raw" store, _ = self.create_store() - f = open_group(store, mode='w') + f = open_group(store, mode="w") # cannot specify dimension_separator that conflicts with the store with pytest.raises(ValueError): - f.create_dataset(name, data=data, chunks=(5, 5, 5), - compressor=None, dimension_separator='.') + f.create_dataset( + name, data=data, chunks=(5, 5, 5), compressor=None, dimension_separator="." + ) @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithNestedFSStore(TestGroupV3WithFSStore): - @staticmethod def create_store(): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) - store = FSStoreV3(path, key_separator='/', auto_mkdir=True) + store = FSStoreV3(path, key_separator="/", auto_mkdir=True) return store, None def test_inconsistent_dimension_separator(self): data = np.arange(1000).reshape(10, 10, 10) - name = 'raw' + name = "raw" store, _ = self.create_store() - f = open_group(store, path='group', mode='w') + f = open_group(store, path="group", mode="w") # cannot specify dimension_separator that conflicts with the store with pytest.raises(ValueError): - f.create_dataset(name, data=data, chunks=(5, 5, 5), - compressor=None, dimension_separator='.') + f.create_dataset( + name, data=data, chunks=(5, 5, 5), compressor=None, dimension_separator="." + ) class TestGroupWithZipStore(TestGroup): - @staticmethod def create_store(): - path = mktemp(suffix='.zip') + path = mktemp(suffix=".zip") atexit.register(os.remove, path) store = ZipStore(path) return store, None @@ -1353,7 +1378,7 @@ def test_context_manager(self): with self.create_group() as g: store = g.store - d = g.create_dataset('foo/bar', shape=100, chunks=10) + d = g.create_dataset("foo/bar", shape=100, chunks=10) d[:] = np.arange(100) # Check that exiting the context manager closes the store, @@ -1369,65 +1394,59 @@ def test_move(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithZipStore(TestGroupWithZipStore, TestGroupV3): - @staticmethod def create_store(): - path = mktemp(suffix='.zip') + path = mktemp(suffix=".zip") atexit.register(os.remove, path) store = ZipStoreV3(path) return store, None class TestGroupWithDBMStore(TestGroup): - @staticmethod def create_store(): - path = mktemp(suffix='.anydbm') - atexit.register(atexit_rmglob, path + '*') - store = DBMStore(path, flag='n') + path = mktemp(suffix=".anydbm") + atexit.register(atexit_rmglob, path + "*") + store = DBMStore(path, flag="n") return store, None @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithDBMStore(TestGroupWithDBMStore, TestGroupV3): - @staticmethod def create_store(): - path = mktemp(suffix='.anydbm') - atexit.register(atexit_rmglob, path + '*') - store = DBMStoreV3(path, flag='n') + path = mktemp(suffix=".anydbm") + atexit.register(atexit_rmglob, path + "*") + store = DBMStoreV3(path, flag="n") return store, None class TestGroupWithDBMStoreBerkeleyDB(TestGroup): - @staticmethod def create_store(): bsddb3 = pytest.importorskip("bsddb3") - path = mktemp(suffix='.dbm') + path = mktemp(suffix=".dbm") atexit.register(os.remove, path) - store = DBMStore(path, flag='n', open=bsddb3.btopen) + store = DBMStore(path, flag="n", open=bsddb3.btopen) return store, None @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithDBMStoreBerkeleyDB(TestGroupWithDBMStoreBerkeleyDB, TestGroupV3): - @staticmethod def create_store(): bsddb3 = pytest.importorskip("bsddb3") - path = mktemp(suffix='.dbm') + path = mktemp(suffix=".dbm") atexit.register(os.remove, path) - store = DBMStoreV3(path, flag='n', open=bsddb3.btopen) + store = DBMStoreV3(path, flag="n", open=bsddb3.btopen) return store, None class TestGroupWithLMDBStore(TestGroup): - @staticmethod def create_store(): pytest.importorskip("lmdb") - path = mktemp(suffix='.lmdb') + path = mktemp(suffix=".lmdb") atexit.register(atexit_rmtree, path) store = LMDBStore(path) return store, None @@ -1435,21 +1454,19 @@ def create_store(): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithLMDBStore(TestGroupWithLMDBStore, TestGroupV3): - @staticmethod def create_store(): pytest.importorskip("lmdb") - path = mktemp(suffix='.lmdb') + path = mktemp(suffix=".lmdb") atexit.register(atexit_rmtree, path) store = LMDBStoreV3(path) return store, None class TestGroupWithSQLiteStore(TestGroup): - def create_store(self): pytest.importorskip("sqlite3") - path = mktemp(suffix='.db') + path = mktemp(suffix=".db") atexit.register(atexit_rmtree, path) store = SQLiteStore(path) return store, None @@ -1457,17 +1474,15 @@ def create_store(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithSQLiteStore(TestGroupWithSQLiteStore, TestGroupV3): - def create_store(self): pytest.importorskip("sqlite3") - path = mktemp(suffix='.db') + path = mktemp(suffix=".db") atexit.register(atexit_rmtree, path) store = SQLiteStoreV3(path) return store, None class TestGroupWithChunkStore(TestGroup): - @staticmethod def create_store(): return KVStore(dict()), KVStore(dict()) @@ -1482,24 +1497,23 @@ def test_chunk_store(self): assert chunk_store is g.chunk_store # create array - a = g.zeros('foo', shape=100, chunks=10) + a = g.zeros("foo", shape=100, chunks=10) assert store is a.store assert chunk_store is a.chunk_store a[:] = np.arange(100) assert_array_equal(np.arange(100), a[:]) # check store keys - expect = sorted([group_meta_key, 'foo/' + array_meta_key]) + expect = sorted([group_meta_key, "foo/" + array_meta_key]) actual = sorted(store.keys()) assert expect == actual - expect = ['foo/' + str(i) for i in range(10)] + expect = ["foo/" + str(i) for i in range(10)] actual = sorted(chunk_store.keys()) assert expect == actual @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithChunkStore(TestGroupWithChunkStore, TestGroupV3): - @staticmethod def create_store(): return KVStoreV3(dict()), KVStoreV3(dict()) @@ -1507,7 +1521,7 @@ def create_store(): def test_chunk_store(self): # setup store, chunk_store = self.create_store() - path = 'group1' + path = "group1" g = self.create_group(store, path=path, chunk_store=chunk_store) # check attributes @@ -1515,26 +1529,25 @@ def test_chunk_store(self): assert chunk_store is g.chunk_store # create array - a = g.zeros('foo', shape=100, chunks=10) + a = g.zeros("foo", shape=100, chunks=10) assert store is a.store assert chunk_store is a.chunk_store a[:] = np.arange(100) assert_array_equal(np.arange(100), a[:]) # check store keys - group_key = meta_root + path + '.group.json' - array_key = meta_root + path + '/foo' + '.array.json' - expect = sorted([group_key, array_key, 'zarr.json']) + group_key = meta_root + path + ".group.json" + array_key = meta_root + path + "/foo" + ".array.json" + expect = sorted([group_key, array_key, "zarr.json"]) actual = sorted(store.keys()) assert expect == actual - expect = [data_root + path + '/foo/c' + str(i) for i in range(10)] - expect += ['zarr.json'] + expect = [data_root + path + "/foo/c" + str(i) for i in range(10)] + expect += ["zarr.json"] actual = sorted(chunk_store.keys()) assert expect == actual class TestGroupWithStoreCache(TestGroup): - @staticmethod def create_store(): store = LRUStoreCache(dict(), max_size=None) @@ -1543,26 +1556,25 @@ def create_store(): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestGroupV3WithStoreCache(TestGroupWithStoreCache, TestGroupV3): - @staticmethod def create_store(): store = LRUStoreCacheV3(dict(), max_size=None) return store, None -@pytest.mark.parametrize('zarr_version', _VERSIONS) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_group(zarr_version): # test the group() convenience function # basic usage if zarr_version == 2: g = group() - assert '' == g.path - assert '/' == g.name + assert "" == g.path + assert "/" == g.name else: - g = group(path='group1', zarr_version=zarr_version) - assert 'group1' == g.path - assert '/group1' == g.name + g = group(path="group1", zarr_version=zarr_version) + assert "group1" == g.path + assert "/group1" == g.name assert isinstance(g, Group) # usage with custom store @@ -1571,7 +1583,7 @@ def test_group(zarr_version): path = None else: store = KVStoreV3(dict()) - path = 'foo' + path = "foo" g = group(store=store, path=path) assert isinstance(g, Group) assert store is g.store @@ -1582,7 +1594,7 @@ def test_group(zarr_version): path = None else: store = KVStoreV3(dict()) - path = 'foo' + path = "foo" init_array(store, path=path, shape=100, chunks=10) with pytest.raises(ValueError): group(store, path=path) @@ -1591,8 +1603,8 @@ def test_group(zarr_version): assert store is g.store -@pytest.mark.skipif(have_fsspec is False, reason='needs fsspec') -@pytest.mark.parametrize('zarr_version', _VERSIONS) +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_group_writeable_mode(zarr_version, tmp_path): # Regression test for https://github.com/zarr-developers/zarr-python/issues/1353 import fsspec @@ -1602,179 +1614,179 @@ def test_group_writeable_mode(zarr_version, tmp_path): assert zg.store.map == store -@pytest.mark.parametrize('zarr_version', _VERSIONS) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_open_group(zarr_version): # test the open_group() convenience function - store = 'data/group.zarr' + store = "data/group.zarr" expected_store_type = DirectoryStore if zarr_version == 2 else DirectoryStoreV3 # mode == 'w' - path = None if zarr_version == 2 else 'group1' - g = open_group(store, path=path, mode='w', zarr_version=zarr_version) + path = None if zarr_version == 2 else "group1" + g = open_group(store, path=path, mode="w", zarr_version=zarr_version) assert isinstance(g, Group) assert isinstance(g.store, expected_store_type) assert 0 == len(g) - g.create_groups('foo', 'bar') + g.create_groups("foo", "bar") assert 2 == len(g) # mode in 'r', 'r+' - open_array('data/array.zarr', shape=100, chunks=10, mode='w') - for mode in 'r', 'r+': + open_array("data/array.zarr", shape=100, chunks=10, mode="w") + for mode in "r", "r+": with pytest.raises(ValueError): - open_group('doesnotexist', mode=mode) + open_group("doesnotexist", mode=mode) with pytest.raises(ValueError): - open_group('data/array.zarr', mode=mode) - g = open_group(store, mode='r') + open_group("data/array.zarr", mode=mode) + g = open_group(store, mode="r") assert isinstance(g, Group) assert 2 == len(g) with pytest.raises(PermissionError): - g.create_group('baz') - g = open_group(store, mode='r+') + g.create_group("baz") + g = open_group(store, mode="r+") assert isinstance(g, Group) assert 2 == len(g) - g.create_groups('baz', 'quux') + g.create_groups("baz", "quux") assert 4 == len(g) # mode == 'a' shutil.rmtree(store) - g = open_group(store, path=path, mode='a', zarr_version=zarr_version) + g = open_group(store, path=path, mode="a", zarr_version=zarr_version) assert isinstance(g, Group) assert isinstance(g.store, expected_store_type) assert 0 == len(g) - g.create_groups('foo', 'bar') + g.create_groups("foo", "bar") assert 2 == len(g) if zarr_version == 2: with pytest.raises(ValueError): - open_group('data/array.zarr', mode='a', zarr_version=zarr_version) + open_group("data/array.zarr", mode="a", zarr_version=zarr_version) else: # TODO, root: should this raise an error? - open_group('data/array.zarr', mode='a', zarr_version=zarr_version) + open_group("data/array.zarr", mode="a", zarr_version=zarr_version) # mode in 'w-', 'x' - for mode in 'w-', 'x': + for mode in "w-", "x": shutil.rmtree(store) g = open_group(store, path=path, mode=mode, zarr_version=zarr_version) assert isinstance(g, Group) assert isinstance(g.store, expected_store_type) assert 0 == len(g) - g.create_groups('foo', 'bar') + g.create_groups("foo", "bar") assert 2 == len(g) with pytest.raises(ValueError): open_group(store, path=path, mode=mode, zarr_version=zarr_version) if zarr_version == 2: with pytest.raises(ValueError): - open_group('data/array.zarr', mode=mode) + open_group("data/array.zarr", mode=mode) # open with path - g = open_group(store, path='foo/bar', zarr_version=zarr_version) + g = open_group(store, path="foo/bar", zarr_version=zarr_version) assert isinstance(g, Group) - assert 'foo/bar' == g.path + assert "foo/bar" == g.path -@pytest.mark.parametrize('zarr_version', _VERSIONS) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_group_completions(zarr_version): - path = None if zarr_version == 2 else 'group1' + path = None if zarr_version == 2 else "group1" g = group(path=path, zarr_version=zarr_version) d = dir(g) - assert 'foo' not in d - assert 'bar' not in d - assert 'baz' not in d - assert 'qux' not in d - assert 'xxx' not in d - assert 'yyy' not in d - assert 'zzz' not in d - assert '123' not in d - assert '456' not in d - g.create_groups('foo', 'bar', 'baz/qux', '123') - g.zeros('xxx', shape=100) - g.zeros('yyy', shape=100) - g.zeros('zzz', shape=100) - g.zeros('456', shape=100) + assert "foo" not in d + assert "bar" not in d + assert "baz" not in d + assert "qux" not in d + assert "xxx" not in d + assert "yyy" not in d + assert "zzz" not in d + assert "123" not in d + assert "456" not in d + g.create_groups("foo", "bar", "baz/qux", "123") + g.zeros("xxx", shape=100) + g.zeros("yyy", shape=100) + g.zeros("zzz", shape=100) + g.zeros("456", shape=100) d = dir(g) - assert 'foo' in d - assert 'bar' in d - assert 'baz' in d - assert 'qux' not in d - assert 'xxx' in d - assert 'yyy' in d - assert 'zzz' in d - assert '123' not in d # not valid identifier - assert '456' not in d # not valid identifier - - -@pytest.mark.parametrize('zarr_version', _VERSIONS) + assert "foo" in d + assert "bar" in d + assert "baz" in d + assert "qux" not in d + assert "xxx" in d + assert "yyy" in d + assert "zzz" in d + assert "123" not in d # not valid identifier + assert "456" not in d # not valid identifier + + +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_group_key_completions(zarr_version): - path = None if zarr_version == 2 else 'group1' + path = None if zarr_version == 2 else "group1" g = group(path=path, zarr_version=zarr_version) d = dir(g) # noinspection PyProtectedMember k = g._ipython_key_completions_() # none of these names should be an attribute - assert 'foo' not in d - assert 'bar' not in d - assert 'baz' not in d - assert 'qux' not in d - assert 'xxx' not in d - assert 'yyy' not in d - assert 'zzz' not in d - assert '123' not in d - assert '456' not in d - assert 'asdf;' not in d + assert "foo" not in d + assert "bar" not in d + assert "baz" not in d + assert "qux" not in d + assert "xxx" not in d + assert "yyy" not in d + assert "zzz" not in d + assert "123" not in d + assert "456" not in d + assert "asdf;" not in d # none of these names should be an item - assert 'foo' not in k - assert 'bar' not in k - assert 'baz' not in k - assert 'qux' not in k - assert 'xxx' not in k - assert 'yyy' not in k - assert 'zzz' not in k - assert '123' not in k - assert '456' not in k - assert 'asdf;' not in k - - g.create_groups('foo', 'bar', 'baz/qux', '123') - g.zeros('xxx', shape=100) - g.zeros('yyy', shape=100) - g.zeros('zzz', shape=100) - g.zeros('456', shape=100) + assert "foo" not in k + assert "bar" not in k + assert "baz" not in k + assert "qux" not in k + assert "xxx" not in k + assert "yyy" not in k + assert "zzz" not in k + assert "123" not in k + assert "456" not in k + assert "asdf;" not in k + + g.create_groups("foo", "bar", "baz/qux", "123") + g.zeros("xxx", shape=100) + g.zeros("yyy", shape=100) + g.zeros("zzz", shape=100) + g.zeros("456", shape=100) if zarr_version == 2: - g.zeros('asdf;', shape=100) + g.zeros("asdf;", shape=100) else: # cannot have ; in key name for v3 with pytest.raises(ValueError): - g.zeros('asdf;', shape=100) + g.zeros("asdf;", shape=100) d = dir(g) # noinspection PyProtectedMember k = g._ipython_key_completions_() - assert 'foo' in d - assert 'bar' in d - assert 'baz' in d - assert 'qux' not in d - assert 'xxx' in d - assert 'yyy' in d - assert 'zzz' in d - assert '123' not in d # not valid identifier - assert '456' not in d # not valid identifier + assert "foo" in d + assert "bar" in d + assert "baz" in d + assert "qux" not in d + assert "xxx" in d + assert "yyy" in d + assert "zzz" in d + assert "123" not in d # not valid identifier + assert "456" not in d # not valid identifier if zarr_version == 2: - assert 'asdf;' not in d # not valid identifier - - assert 'foo' in k - assert 'bar' in k - assert 'baz' in k - assert 'qux' not in k - assert 'xxx' in k - assert 'yyy' in k - assert 'zzz' in k - assert '123' in k - assert '456' in k + assert "asdf;" not in d # not valid identifier + + assert "foo" in k + assert "bar" in k + assert "baz" in k + assert "qux" not in k + assert "xxx" in k + assert "yyy" in k + assert "zzz" in k + assert "123" in k + assert "456" in k if zarr_version == 2: - assert 'asdf;' in k + assert "asdf;" in k def _check_tree(g, expect_bytes, expect_text): @@ -1788,72 +1800,88 @@ def _check_tree(g, expect_bytes, expect_text): isinstance(widget, ipytree.Tree) -@pytest.mark.parametrize('zarr_version', _VERSIONS) -@pytest.mark.parametrize('at_root', [False, True]) +@pytest.mark.parametrize("zarr_version", _VERSIONS) +@pytest.mark.parametrize("at_root", [False, True]) def test_tree(zarr_version, at_root): # setup - path = None if at_root else 'group1' + path = None if at_root else "group1" g1 = group(path=path, zarr_version=zarr_version) - g2 = g1.create_group('foo') - g3 = g1.create_group('bar') - g3.create_group('baz') - g5 = g3.create_group('quux') - g5.create_dataset('baz', shape=100, chunks=10) + g2 = g1.create_group("foo") + g3 = g1.create_group("bar") + g3.create_group("baz") + g5 = g3.create_group("quux") + g5.create_dataset("baz", shape=100, chunks=10) - tree_path = '/' if at_root else path + tree_path = "/" if at_root else path # test root group if zarr_version == 2: - expect_bytes = textwrap.dedent(f"""\ + expect_bytes = textwrap.dedent( + f"""\ {tree_path} +-- bar | +-- baz | +-- quux | +-- baz (100,) float64 - +-- foo""").encode() - expect_text = textwrap.dedent(f"""\ + +-- foo""" + ).encode() + expect_text = textwrap.dedent( + f"""\ {tree_path} ├── bar │ ├── baz │ └── quux │ └── baz (100,) float64 - └── foo""") + └── foo""" + ) else: # Almost the same as for v2, but has a path name and the # subgroups are not necessarily sorted alphabetically. - expect_bytes = textwrap.dedent(f"""\ + expect_bytes = textwrap.dedent( + f"""\ {tree_path} +-- foo +-- bar +-- baz +-- quux - +-- baz (100,) float64""").encode() - expect_text = textwrap.dedent(f"""\ + +-- baz (100,) float64""" + ).encode() + expect_text = textwrap.dedent( + f"""\ {tree_path} ├── foo └── bar ├── baz └── quux - └── baz (100,) float64""") + └── baz (100,) float64""" + ) _check_tree(g1, expect_bytes, expect_text) # test different group - expect_bytes = textwrap.dedent("""\ - foo""").encode() - expect_text = textwrap.dedent("""\ - foo""") + expect_bytes = textwrap.dedent( + """\ + foo""" + ).encode() + expect_text = textwrap.dedent( + """\ + foo""" + ) _check_tree(g2, expect_bytes, expect_text) # test different group - expect_bytes = textwrap.dedent("""\ + expect_bytes = textwrap.dedent( + """\ bar +-- baz +-- quux - +-- baz (100,) float64""").encode() - expect_text = textwrap.dedent("""\ + +-- baz (100,) float64""" + ).encode() + expect_text = textwrap.dedent( + """\ bar ├── baz └── quux - └── baz (100,) float64""") + └── baz (100,) float64""" + ) _check_tree(g3, expect_bytes, expect_text) @@ -1866,38 +1894,38 @@ def test_group_mismatched_store_versions(): chunk_store_v2 = KVStore(dict()) chunk_store_v3 = KVStoreV3(dict()) - init_group(store_v2, path='group1', chunk_store=chunk_store_v2) - init_group(store_v3, path='group1', chunk_store=chunk_store_v3) + init_group(store_v2, path="group1", chunk_store=chunk_store_v2) + init_group(store_v3, path="group1", chunk_store=chunk_store_v3) - g1_v3 = Group(store_v3, path='group1', read_only=True, chunk_store=chunk_store_v3) + g1_v3 = Group(store_v3, path="group1", read_only=True, chunk_store=chunk_store_v3) assert isinstance(g1_v3._store, KVStoreV3) - g1_v2 = Group(store_v2, path='group1', read_only=True, chunk_store=chunk_store_v2) + g1_v2 = Group(store_v2, path="group1", read_only=True, chunk_store=chunk_store_v2) assert isinstance(g1_v2._store, KVStore) # store and chunk_store must have the same zarr protocol version with pytest.raises(ValueError): - Group(store_v3, path='group1', read_only=False, chunk_store=chunk_store_v2) + Group(store_v3, path="group1", read_only=False, chunk_store=chunk_store_v2) with pytest.raises(ValueError): - Group(store_v2, path='group1', read_only=False, chunk_store=chunk_store_v3) + Group(store_v2, path="group1", read_only=False, chunk_store=chunk_store_v3) with pytest.raises(ValueError): - open_group(store_v2, path='group1', chunk_store=chunk_store_v3) + open_group(store_v2, path="group1", chunk_store=chunk_store_v3) with pytest.raises(ValueError): - open_group(store_v3, path='group1', chunk_store=chunk_store_v2) + open_group(store_v3, path="group1", chunk_store=chunk_store_v2) # raises Value if read_only and path is not a pre-existing group with pytest.raises(ValueError): - Group(store_v3, path='group2', read_only=True, chunk_store=chunk_store_v3) + Group(store_v3, path="group2", read_only=True, chunk_store=chunk_store_v3) with pytest.raises(ValueError): - Group(store_v3, path='group2', read_only=True, chunk_store=chunk_store_v3) + Group(store_v3, path="group2", read_only=True, chunk_store=chunk_store_v3) -@pytest.mark.parametrize('zarr_version', _VERSIONS) +@pytest.mark.parametrize("zarr_version", _VERSIONS) def test_open_group_from_paths(zarr_version): """Verify zarr_version is applied to both the store and chunk_store.""" store = tempfile.mkdtemp() chunk_store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) atexit.register(atexit_rmtree, chunk_store) - path = 'g1' + path = "g1" g = open_group(store, path=path, chunk_store=chunk_store, zarr_version=zarr_version) assert g._store._store_version == g._chunk_store._store_version == zarr_version diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index 61e76c63da..8a34c1e715 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -51,22 +51,20 @@ def test_replace_ellipsis(): assert (slice(None), 0) == replace_ellipsis((slice(None), 0), (100, 100)) # 2D slice - assert ((slice(None), slice(None)) == - replace_ellipsis(Ellipsis, (100, 100))) - assert ((slice(None), slice(None)) == - replace_ellipsis(slice(None), (100, 100))) - assert ((slice(None), slice(None)) == - replace_ellipsis((slice(None), slice(None)), (100, 100))) - assert ((slice(None), slice(None)) == - replace_ellipsis((Ellipsis, slice(None)), (100, 100))) - assert ((slice(None), slice(None)) == - replace_ellipsis((slice(None), Ellipsis), (100, 100))) - assert ((slice(None), slice(None)) == - replace_ellipsis((slice(None), Ellipsis, slice(None)), (100, 100))) - assert ((slice(None), slice(None)) == - replace_ellipsis((Ellipsis, slice(None), slice(None)), (100, 100))) - assert ((slice(None), slice(None)) == - replace_ellipsis((slice(None), slice(None), Ellipsis), (100, 100))) + assert (slice(None), slice(None)) == replace_ellipsis(Ellipsis, (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis(slice(None), (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis((slice(None), slice(None)), (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis((Ellipsis, slice(None)), (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis((slice(None), Ellipsis), (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis( + (slice(None), Ellipsis, slice(None)), (100, 100) + ) + assert (slice(None), slice(None)) == replace_ellipsis( + (Ellipsis, slice(None), slice(None)), (100, 100) + ) + assert (slice(None), slice(None)) == replace_ellipsis( + (slice(None), slice(None), Ellipsis), (100, 100) + ) def test_get_basic_selection_0d(): @@ -87,25 +85,25 @@ def test_get_basic_selection_0d(): assert_array_equal(a, b) # test structured array - value = (b'aaa', 1, 4.2) - a = np.array(value, dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) + value = (b"aaa", 1, 4.2) + a = np.array(value, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) z = zarr.create(shape=a.shape, dtype=a.dtype, fill_value=None) z[()] = value assert_array_equal(a, z.get_basic_selection(Ellipsis)) assert_array_equal(a, z[...]) assert a[()] == z.get_basic_selection(()) assert a[()] == z[()] - assert b'aaa' == z.get_basic_selection((), fields='foo') - assert b'aaa' == z['foo'] - assert a[['foo', 'bar']] == z.get_basic_selection((), fields=['foo', 'bar']) - assert a[['foo', 'bar']] == z['foo', 'bar'] + assert b"aaa" == z.get_basic_selection((), fields="foo") + assert b"aaa" == z["foo"] + assert a[["foo", "bar"]] == z.get_basic_selection((), fields=["foo", "bar"]) + assert a[["foo", "bar"]] == z["foo", "bar"] # test out param b = np.zeros_like(a) z.get_basic_selection(Ellipsis, out=b) assert_array_equal(a, b) - c = np.zeros_like(a[['foo', 'bar']]) - z.get_basic_selection(Ellipsis, out=c, fields=['foo', 'bar']) - assert_array_equal(a[['foo', 'bar']], c) + c = np.zeros_like(a[["foo", "bar"]]) + z.get_basic_selection(Ellipsis, out=c, fields=["foo", "bar"]) + assert_array_equal(a[["foo", "bar"]], c) basic_selections_1d = [ @@ -175,8 +173,8 @@ def test_get_basic_selection_0d(): slice(-1, 0, -1), # bad stuff 2.3, - 'foo', - b'xxx', + "foo", + b"xxx", None, (0, 0), (slice(None), slice(None)), @@ -252,8 +250,8 @@ def test_get_basic_selection_1d(): basic_selections_2d_bad = [ # bad stuff 2.3, - 'foo', - b'xxx', + "foo", + b"xxx", None, (2.3, slice(None)), # only positive step supported @@ -300,71 +298,34 @@ def test_fancy_indexing_fallback_on_get_setitem(): [0, 0, 0, 1], ], ) - np.testing.assert_array_equal( - z[[1, 2, 3], [1, 2, 3]], 1 - ) + np.testing.assert_array_equal(z[[1, 2, 3], [1, 2, 3]], 1) # test broadcasting - np.testing.assert_array_equal( - z[1, [1, 2, 3]], [1, 0, 0] - ) + np.testing.assert_array_equal(z[1, [1, 2, 3]], [1, 0, 0]) # test 1D fancy indexing z2 = zarr.zeros(5) z2[[1, 2, 3]] = 1 - np.testing.assert_array_equal( - z2, [0, 1, 1, 1, 0] - ) + np.testing.assert_array_equal(z2, [0, 1, 1, 1, 0]) -@pytest.mark.parametrize("index,expected_result", - [ - # Single iterable of integers - ( - [0, 1], - [[0, 1, 2], - [3, 4, 5]] - ), - # List first, then slice - ( - ([0, 1], slice(None)), - [[0, 1, 2], - [3, 4, 5]] - ), - # List first, then slice - ( - ([0, 1], slice(1, None)), - [[1, 2], - [4, 5]] - ), - # Slice first, then list - ( - (slice(0, 2), [0, 2]), - [[0, 2], - [3, 5]] - ), - # Slices only - ( - (slice(0, 2), slice(0, 2)), - [[0, 1], - [3, 4]] - ), - # List with repeated index - ( - ([1, 0, 1], slice(1, None)), - [[4, 5], - [1, 2], - [4, 5]] - ), - # 1D indexing - ( - ([1, 0, 1]), - [ - [3, 4, 5], - [0, 1, 2], - [3, 4, 5] - ] - ) - - ]) +@pytest.mark.parametrize( + "index,expected_result", + [ + # Single iterable of integers + ([0, 1], [[0, 1, 2], [3, 4, 5]]), + # List first, then slice + (([0, 1], slice(None)), [[0, 1, 2], [3, 4, 5]]), + # List first, then slice + (([0, 1], slice(1, None)), [[1, 2], [4, 5]]), + # Slice first, then list + ((slice(0, 2), [0, 2]), [[0, 2], [3, 5]]), + # Slices only + ((slice(0, 2), slice(0, 2)), [[0, 1], [3, 4]]), + # List with repeated index + (([1, 0, 1], slice(1, None)), [[4, 5], [1, 2], [4, 5]]), + # 1D indexing + (([1, 0, 1]), [[3, 4, 5], [0, 1, 2], [3, 4, 5]]), + ], +) def test_orthogonal_indexing_fallback_on_getitem_2d(index, expected_result): """ Tests the orthogonal indexing fallback on __getitem__ for a 2D matrix. @@ -382,34 +343,19 @@ def test_orthogonal_indexing_fallback_on_getitem_2d(index, expected_result): np.testing.assert_array_equal(z[index], expected_result) -@pytest.mark.parametrize("index,expected_result", - [ - # Single iterable of integers - ( - [0, 1], - [[[0, 1, 2], - [3, 4, 5], - [6, 7, 8]], - [[9, 10, 11], - [12, 13, 14], - [15, 16, 17]]] - ), - # One slice, two integers - ( - (slice(0, 2), 1, 1), - [4, 13] - ), - # One integer, two slices - ( - (slice(0, 2), 1, slice(0, 2)), - [[3, 4], [12, 13]] - ), - # Two slices and a list - ( - (slice(0, 2), [1, 2], slice(0, 2)), - [[[3, 4], [6, 7]], [[12, 13], [15, 16]]] - ), - ]) +@pytest.mark.parametrize( + "index,expected_result", + [ + # Single iterable of integers + ([0, 1], [[[0, 1, 2], [3, 4, 5], [6, 7, 8]], [[9, 10, 11], [12, 13, 14], [15, 16, 17]]]), + # One slice, two integers + ((slice(0, 2), 1, 1), [4, 13]), + # One integer, two slices + ((slice(0, 2), 1, slice(0, 2)), [[3, 4], [12, 13]]), + # Two slices and a list + ((slice(0, 2), [1, 2], slice(0, 2)), [[[3, 4], [6, 7]], [[12, 13], [15, 16]]]), + ], +) def test_orthogonal_indexing_fallback_on_getitem_3d(index, expected_result): """ Tests the orthogonal indexing fallback on __getitem__ for a 3D matrix. @@ -439,36 +385,14 @@ def test_orthogonal_indexing_fallback_on_getitem_3d(index, expected_result): "index,expected_result", [ # Single iterable of integers - ( - [0, 1], - [ - [1, 1, 1], - [1, 1, 1], - [0, 0, 0] - ] - ), + ([0, 1], [[1, 1, 1], [1, 1, 1], [0, 0, 0]]), # List and slice combined - ( - ([0, 1], slice(1, 3)), - [[0, 1, 1], - [0, 1, 1], - [0, 0, 0]] - ), + (([0, 1], slice(1, 3)), [[0, 1, 1], [0, 1, 1], [0, 0, 0]]), # Index repetition is ignored on setitem - ( - ([0, 1, 1, 1, 1, 1, 1], slice(1, 3)), - [[0, 1, 1], - [0, 1, 1], - [0, 0, 0]] - ), + (([0, 1, 1, 1, 1, 1, 1], slice(1, 3)), [[0, 1, 1], [0, 1, 1], [0, 0, 0]]), # Slice with step - ( - ([0, 2], slice(None, None, 2)), - [[1, 0, 1], - [0, 0, 0], - [1, 0, 1]] - ) - ] + (([0, 2], slice(None, None, 2)), [[1, 0, 1], [0, 0, 0], [1, 0, 1]]), + ], ) def test_orthogonal_indexing_fallback_on_setitem_2d(index, expected_result): """ @@ -482,12 +406,8 @@ def test_orthogonal_indexing_fallback_on_setitem_2d(index, expected_result): z = zarr.array(a) z[index] = 1 a[index] = 1 - np.testing.assert_array_equal( - z, expected_result - ) - np.testing.assert_array_equal( - z, a, err_msg="Indexing disagrees with numpy" - ) + np.testing.assert_array_equal(z, expected_result) + np.testing.assert_array_equal(z, a, err_msg="Indexing disagrees with numpy") def test_fancy_indexing_doesnt_mix_with_implicit_slicing(): @@ -495,15 +415,11 @@ def test_fancy_indexing_doesnt_mix_with_implicit_slicing(): with pytest.raises(IndexError): z2[[1, 2, 3], [1, 2, 3]] = 2 with pytest.raises(IndexError): - np.testing.assert_array_equal( - z2[[1, 2, 3], [1, 2, 3]], 0 - ) + np.testing.assert_array_equal(z2[[1, 2, 3], [1, 2, 3]], 0) with pytest.raises(IndexError): z2[..., [1, 2, 3]] = 2 with pytest.raises(IndexError): - np.testing.assert_array_equal( - z2[..., [1, 2, 3]], 0 - ) + np.testing.assert_array_equal(z2[..., [1, 2, 3]], 0) def test_set_basic_selection_0d(): @@ -523,8 +439,8 @@ def test_set_basic_selection_0d(): assert_array_equal(v, z) # test structured array - value = (b'aaa', 1, 4.2) - v = np.array(value, dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) + value = (b"aaa", 1, 4.2) + v = np.array(value, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) a = np.zeros_like(v) z = zarr.create(shape=a.shape, dtype=a.dtype, fill_value=None) @@ -538,19 +454,19 @@ def test_set_basic_selection_0d(): z[...] = a assert_array_equal(a, z) # with fields - z.set_basic_selection(Ellipsis, v['foo'], fields='foo') - assert v['foo'] == z['foo'] - assert a['bar'] == z['bar'] - assert a['baz'] == z['baz'] - z['bar'] = v['bar'] - assert v['foo'] == z['foo'] - assert v['bar'] == z['bar'] - assert a['baz'] == z['baz'] + z.set_basic_selection(Ellipsis, v["foo"], fields="foo") + assert v["foo"] == z["foo"] + assert a["bar"] == z["bar"] + assert a["baz"] == z["baz"] + z["bar"] = v["bar"] + assert v["foo"] == z["foo"] + assert v["bar"] == z["bar"] + assert a["baz"] == z["baz"] # multiple field assignment not supported with pytest.raises(IndexError): - z.set_basic_selection(Ellipsis, v[['foo', 'bar']], fields=['foo', 'bar']) + z.set_basic_selection(Ellipsis, v[["foo", "bar"]], fields=["foo", "bar"]) with pytest.raises(IndexError): - z[..., 'foo', 'bar'] = v[['foo', 'bar']] + z[..., "foo", "bar"] = v[["foo", "bar"]] def _test_get_orthogonal_selection(a, z, selection): @@ -610,7 +526,6 @@ def test_get_orthogonal_selection_1d_int(): [0, 3, 10, -23, -12, -1], # explicit test not sorted [3, 105, 23, 127], - ] for selection in selections: _test_get_orthogonal_selection(a, z, selection) @@ -671,7 +586,7 @@ def test_get_orthogonal_selection_2d(): # integer arrays ix0 = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) - ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * .5), replace=True) + ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * 0.5), replace=True) _test_get_orthogonal_selection_2d(a, z, ix0, ix1) ix0.sort() ix1.sort() @@ -738,14 +653,14 @@ def test_get_orthogonal_selection_3d(): # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) - ix1 = np.random.binomial(1, .5, size=a.shape[1]).astype(bool) - ix2 = np.random.binomial(1, .5, size=a.shape[2]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) + ix2 = np.random.binomial(1, 0.5, size=a.shape[2]).astype(bool) _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2) # integer arrays ix0 = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) - ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * .5), replace=True) - ix2 = np.random.choice(a.shape[2], size=int(a.shape[2] * .5), replace=True) + ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * 0.5), replace=True) + ix2 = np.random.choice(a.shape[2], size=int(a.shape[2] * 0.5), replace=True) _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2) ix0.sort() ix1.sort() @@ -846,12 +761,12 @@ def test_set_orthogonal_selection_2d(): # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) - ix1 = np.random.binomial(1, .5, size=a.shape[1]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1) # integer arrays ix0 = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) - ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * .5), replace=True) + ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * 0.5), replace=True) _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1) ix0.sort() ix1.sort() @@ -904,14 +819,14 @@ def test_set_orthogonal_selection_3d(): # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) - ix1 = np.random.binomial(1, .5, size=a.shape[1]).astype(bool) - ix2 = np.random.binomial(1, .5, size=a.shape[2]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) + ix2 = np.random.binomial(1, 0.5, size=a.shape[2]).astype(bool) _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2) # integer arrays ix0 = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) - ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * .5), replace=True) - ix2 = np.random.choice(a.shape[2], size=int(a.shape[2] * .5), replace=True) + ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * 0.5), replace=True) + ix2 = np.random.choice(a.shape[2], size=int(a.shape[2] * 0.5), replace=True) _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2) # sorted increasing @@ -939,19 +854,13 @@ def test_orthogonal_indexing_fallback_on_get_setitem(): [0, 0, 0, 1], ], ) - np.testing.assert_array_equal( - z[[1, 2, 3], [1, 2, 3]], 1 - ) + np.testing.assert_array_equal(z[[1, 2, 3], [1, 2, 3]], 1) # test broadcasting - np.testing.assert_array_equal( - z[1, [1, 2, 3]], [1, 0, 0] - ) + np.testing.assert_array_equal(z[1, [1, 2, 3]], [1, 0, 0]) # test 1D fancy indexing z2 = zarr.zeros(5) z2[[1, 2, 3]] = 1 - np.testing.assert_array_equal( - z2, [0, 1, 1, 1, 0] - ) + np.testing.assert_array_equal(z2, [0, 1, 1, 1, 0]) def _test_get_coordinate_selection(a, z, selection): @@ -969,8 +878,8 @@ def _test_get_coordinate_selection(a, z, selection): Ellipsis, # bad stuff 2.3, - 'foo', - b'xxx', + "foo", + b"xxx", None, (0, 0), (slice(None), slice(None)), @@ -1060,10 +969,8 @@ def test_get_coordinate_selection_2d(): _test_get_coordinate_selection(a, z, (ix0, ix1)) # multi-dimensional selection - ix0 = np.array([[1, 1, 2], - [2, 2, 5]]) - ix1 = np.array([[1, 3, 2], - [1, 0, 0]]) + ix0 = np.array([[1, 1, 2], [2, 2, 5]]) + ix1 = np.array([[1, 3, 2], [1, 0, 0]]) _test_get_coordinate_selection(a, z, (ix0, ix1)) with pytest.raises(IndexError): @@ -1146,10 +1053,8 @@ def test_set_coordinate_selection_2d(): _test_set_coordinate_selection(v, a, z, selection) # multi-dimensional selection - ix0 = np.array([[1, 2, 3], - [4, 5, 6]]) - ix1 = np.array([[1, 3, 2], - [2, 0, 5]]) + ix0 = np.array([[1, 2, 3], [4, 5, 6]]) + ix1 = np.array([[1, 3, 2], [2, 0, 5]]) _test_set_coordinate_selection(v, a, z, (ix0, ix1)) @@ -1196,12 +1101,12 @@ def _test_get_block_selection(a, z, selection, expected_idx): slice(3, 8, 2), # bad stuff 2.3, - 'foo', - b'xxx', + "foo", + b"xxx", None, (0, 0), (slice(None), slice(None)), - [0, 5, 3] + [0, 5, 3], ] @@ -1211,8 +1116,7 @@ def test_get_block_selection_1d(): z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) z[:] = a - for selection, expected_idx in \ - zip(block_selections_1d, block_selections_1d_array_projection): + for selection, expected_idx in zip(block_selections_1d, block_selections_1d_array_projection): _test_get_block_selection(a, z, selection, expected_idx) bad_selections = block_selections_1d_bad + [ @@ -1264,8 +1168,7 @@ def test_get_block_selection_2d(): z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) z[:] = a - for selection, expected_idx in \ - zip(block_selections_2d, block_selections_2d_array_projection): + for selection, expected_idx in zip(block_selections_2d, block_selections_2d_array_projection): _test_get_block_selection(a, z, selection, expected_idx) with pytest.raises(IndexError): @@ -1300,8 +1203,7 @@ def test_set_block_selection_1d(): a = np.empty(v.shape, dtype=v.dtype) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) - for selection, expected_idx in \ - zip(block_selections_1d, block_selections_1d_array_projection): + for selection, expected_idx in zip(block_selections_1d, block_selections_1d_array_projection): _test_set_block_selection(v, a, z, selection, expected_idx) for selection in block_selections_1d_bad: @@ -1317,8 +1219,7 @@ def test_set_block_selection_2d(): a = np.empty(v.shape, dtype=v.dtype) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) - for selection, expected_idx in \ - zip(block_selections_2d, block_selections_2d_array_projection): + for selection, expected_idx in zip(block_selections_2d, block_selections_2d_array_projection): _test_set_block_selection(v, a, z, selection, expected_idx) with pytest.raises(IndexError): @@ -1347,8 +1248,8 @@ def _test_get_mask_selection(a, z, selection): Ellipsis, # bad stuff 2.3, - 'foo', - b'xxx', + "foo", + b"xxx", None, (0, 0), (slice(None), slice(None)), @@ -1478,7 +1379,7 @@ def test_get_selection_out(): # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) - ix1 = np.random.binomial(1, .5, size=a.shape[1]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) selections = [ # index both axes with array (ix0, ix1), @@ -1526,22 +1427,20 @@ def test_get_selection_out(): def test_get_selections_with_fields(): - a = [('aaa', 1, 4.2), - ('bbb', 2, 8.4), - ('ccc', 3, 12.6)] - a = np.array(a, dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) + a = [("aaa", 1, 4.2), ("bbb", 2, 8.4), ("ccc", 3, 12.6)] + a = np.array(a, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) z = zarr.create(shape=a.shape, chunks=2, dtype=a.dtype, fill_value=None) z[:] = a fields_fixture = [ - 'foo', - ['foo'], - ['foo', 'bar'], - ['foo', 'baz'], - ['bar', 'baz'], - ['foo', 'bar', 'baz'], - ['bar', 'foo'], - ['baz', 'bar', 'foo'], + "foo", + ["foo"], + ["foo", "bar"], + ["foo", "baz"], + ["bar", "baz"], + ["foo", "bar", "baz"], + ["bar", "foo"], + ["baz", "bar", "foo"], ] for fields in fields_fixture: @@ -1629,30 +1528,28 @@ def test_get_selections_with_fields(): # missing/bad fields with pytest.raises(IndexError): - z.get_basic_selection(Ellipsis, fields=['notafield']) + z.get_basic_selection(Ellipsis, fields=["notafield"]) with pytest.raises(IndexError): z.get_basic_selection(Ellipsis, fields=slice(None)) def test_set_selections_with_fields(): - v = [('aaa', 1, 4.2), - ('bbb', 2, 8.4), - ('ccc', 3, 12.6)] - v = np.array(v, dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) + v = [("aaa", 1, 4.2), ("bbb", 2, 8.4), ("ccc", 3, 12.6)] + v = np.array(v, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) a = np.empty_like(v) z = zarr.empty_like(v, chunks=2) fields_fixture = [ - 'foo', + "foo", [], - ['foo'], - ['foo', 'bar'], - ['foo', 'baz'], - ['bar', 'baz'], - ['foo', 'bar', 'baz'], - ['bar', 'foo'], - ['baz', 'bar', 'foo'], + ["foo"], + ["foo", "bar"], + ["foo", "baz"], + ["bar", "baz"], + ["foo", "bar", "baz"], + ["bar", "foo"], + ["baz", "bar", "foo"], ] for fields in fields_fixture: @@ -1682,8 +1579,8 @@ def test_set_selections_with_fields(): key = fields # setup expectation - a[:] = ('', 0, 0) - z[:] = ('', 0, 0) + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) assert_array_equal(a, z[:]) a[key] = v[key] # total selection @@ -1691,31 +1588,31 @@ def test_set_selections_with_fields(): assert_array_equal(a, z[:]) # basic selection with slice - a[:] = ('', 0, 0) - z[:] = ('', 0, 0) + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) a[key][0:2] = v[key][0:2] z.set_basic_selection(slice(0, 2), v[key][0:2], fields=fields) assert_array_equal(a, z[:]) # orthogonal selection - a[:] = ('', 0, 0) - z[:] = ('', 0, 0) + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) ix = [0, 2] a[key][ix] = v[key][ix] z.set_orthogonal_selection(ix, v[key][ix], fields=fields) assert_array_equal(a, z[:]) # coordinate selection - a[:] = ('', 0, 0) - z[:] = ('', 0, 0) + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) ix = [0, 2] a[key][ix] = v[key][ix] z.set_coordinate_selection(ix, v[key][ix], fields=fields) assert_array_equal(a, z[:]) # mask selection - a[:] = ('', 0, 0) - z[:] = ('', 0, 0) + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) ix = [True, False, True] a[key][ix] = v[key][ix] z.set_mask_selection(ix, v[key][ix], fields=fields) @@ -1823,17 +1720,24 @@ def test_numpy_int_indexing(): # 1D test cases ((1070,), (50,), [("__getitem__", (slice(200, 400),))]), ((1070,), (50,), [("__getitem__", (slice(200, 400, 100),))]), - ((1070,), (50,), [ - ("__getitem__", (slice(200, 400),)), - ("__setitem__", (slice(200, 400, 100),)), - ]), - + ( + (1070,), + (50,), + [ + ("__getitem__", (slice(200, 400),)), + ("__setitem__", (slice(200, 400, 100),)), + ], + ), # 2D test cases - ((40, 50), (5, 8), [ - ("__getitem__", (slice(6, 37, 13), (slice(4, 10)))), - ("__setitem__", (slice(None), (slice(None)))), - ]), - ] + ( + (40, 50), + (5, 8), + [ + ("__getitem__", (slice(6, 37, 13), (slice(4, 10)))), + ("__setitem__", (slice(None), (slice(None)))), + ], + ), + ], ) def test_accessed_chunks(shape, chunks, ops): # Test that only the required chunks are accessed during basic selection operations @@ -1881,9 +1785,8 @@ def test_accessed_chunks(shape, chunks, ops): # don't determine if the chunk was actually partial here, just that the # counts are consistent that this might have happened if optype == "__setitem__": - assert ( - ("__getitem__", ci) not in delta_counts or - delta_counts.pop(("__getitem__", ci)) == 1 - ) + assert ("__getitem__", ci) not in delta_counts or delta_counts.pop( + ("__getitem__", ci) + ) == 1 # Check that no other chunks were accessed assert len(delta_counts) == 0 diff --git a/zarr/tests/test_info.py b/zarr/tests/test_info.py index 434d19d1f7..7fb6feb11b 100644 --- a/zarr/tests/test_info.py +++ b/zarr/tests/test_info.py @@ -5,22 +5,32 @@ from zarr.util import InfoReporter -@pytest.mark.parametrize('array_size', [10, 15000]) +@pytest.mark.parametrize("array_size", [10, 15000]) def test_info(array_size): # setup - g = zarr.group(store=dict(), chunk_store=dict(), - synchronizer=zarr.ThreadSynchronizer()) - g.create_group('foo') - z = g.zeros('bar', shape=array_size, filters=[numcodecs.Adler32()]) + g = zarr.group(store=dict(), chunk_store=dict(), synchronizer=zarr.ThreadSynchronizer()) + g.create_group("foo") + z = g.zeros("bar", shape=array_size, filters=[numcodecs.Adler32()]) # test group info items = g.info_items() keys = sorted([k for k, _ in items]) - expected_keys = sorted([ - 'Type', 'Read-only', 'Synchronizer type', 'Store type', 'Chunk store type', - 'No. members', 'No. arrays', 'No. groups', 'Arrays', 'Groups', 'Name' - ]) + expected_keys = sorted( + [ + "Type", + "Read-only", + "Synchronizer type", + "Store type", + "Chunk store type", + "No. members", + "No. arrays", + "No. groups", + "Arrays", + "Groups", + "Name", + ] + ) assert expected_keys == keys # can also get a string representation of info via the info attribute @@ -30,11 +40,26 @@ def test_info(array_size): # test array info items = z.info_items() keys = sorted([k for k, _ in items]) - expected_keys = sorted([ - 'Type', 'Data type', 'Shape', 'Chunk shape', 'Order', 'Read-only', 'Filter [0]', - 'Compressor', 'Synchronizer type', 'Store type', 'Chunk store type', 'No. bytes', - 'No. bytes stored', 'Storage ratio', 'Chunks initialized', 'Name' - ]) + expected_keys = sorted( + [ + "Type", + "Data type", + "Shape", + "Chunk shape", + "Order", + "Read-only", + "Filter [0]", + "Compressor", + "Synchronizer type", + "Store type", + "Chunk store type", + "No. bytes", + "No. bytes stored", + "Storage ratio", + "Chunks initialized", + "Name", + ] + ) assert expected_keys == keys # can also get a string representation of info via the info attribute diff --git a/zarr/tests/test_meta.py b/zarr/tests/test_meta.py index a78375986e..db50560c8e 100644 --- a/zarr/tests/test_meta.py +++ b/zarr/tests/test_meta.py @@ -7,18 +7,27 @@ from zarr.codecs import Blosc, Delta, Pickle, Zlib from zarr.errors import MetadataError -from zarr.meta import (ZARR_FORMAT, decode_array_metadata, decode_dtype, - decode_group_metadata, encode_array_metadata, - encode_dtype, encode_fill_value, decode_fill_value, - get_extended_dtype_info, _v3_complex_types, - _v3_datetime_types, _default_entry_point_metadata_v3, - Metadata3) +from zarr.meta import ( + ZARR_FORMAT, + decode_array_metadata, + decode_dtype, + decode_group_metadata, + encode_array_metadata, + encode_dtype, + encode_fill_value, + decode_fill_value, + get_extended_dtype_info, + _v3_complex_types, + _v3_datetime_types, + _default_entry_point_metadata_v3, + Metadata3, +) from zarr.util import normalize_dtype, normalize_fill_value def assert_json_equal(expect, actual): if isinstance(actual, bytes): - actual = str(actual, 'ascii') + actual = str(actual, "ascii") ej = json.loads(expect) aj = json.loads(actual) assert ej == aj @@ -29,14 +38,15 @@ def test_encode_decode_array_1(): meta = dict( shape=(100,), chunks=(10,), - dtype=np.dtype('U4', 'U4", " 0: @@ -1399,8 +1416,7 @@ def s3(request): pass timeout -= 0.1 # pragma: no cover time.sleep(0.1) # pragma: no cover - s3so = dict(client_kwargs={'endpoint_url': endpoint_uri}, - use_listings_cache=False) + s3so = dict(client_kwargs={"endpoint_url": endpoint_uri}, use_listings_cache=False) s3 = s3fs.S3FileSystem(anon=False, **s3so) s3.mkdir("test") request.cls.s3so = s3so @@ -1410,7 +1426,6 @@ def s3(request): class TestNestedDirectoryStore(TestDirectoryStore): - def create_store(self, normalize_keys=False, **kwargs): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) @@ -1425,23 +1440,23 @@ def test_init_array(self): # check metadata assert array_meta_key in store meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - assert ZARR_FORMAT == meta['zarr_format'] - assert (1000,) == meta['shape'] - assert (100,) == meta['chunks'] - assert np.dtype(None) == meta['dtype'] - assert meta['dimension_separator'] == "/" + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + assert meta["dimension_separator"] == "/" def test_chunk_nesting(self): store = self.create_store() # any path where last segment looks like a chunk key gets special handling - store[self.root + '0.0'] = b'xxx' - assert b'xxx' == store[self.root + '0.0'] + store[self.root + "0.0"] = b"xxx" + assert b"xxx" == store[self.root + "0.0"] # assert b'xxx' == store['0/0'] - store[self.root + 'foo/10.20.30'] = b'yyy' - assert b'yyy' == store[self.root + 'foo/10.20.30'] + store[self.root + "foo/10.20.30"] = b"yyy" + assert b"yyy" == store[self.root + "foo/10.20.30"] # assert b'yyy' == store['foo/10/20/30'] - store[self.root + '42'] = b'zzz' - assert b'zzz' == store[self.root + '42'] + store[self.root + "42"] = b"zzz" + assert b"zzz" == store[self.root + "42"] def test_listdir(self): store = self.create_store() @@ -1452,29 +1467,22 @@ def test_listdir(self): class TestNestedDirectoryStoreNone: - def test_value_error(self): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) - store = NestedDirectoryStore( - path, normalize_keys=True, - dimension_separator=None) + store = NestedDirectoryStore(path, normalize_keys=True, dimension_separator=None) assert store._dimension_separator == "/" class TestNestedDirectoryStoreWithWrongValue: - def test_value_error(self): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) with pytest.raises(ValueError): - NestedDirectoryStore( - path, normalize_keys=True, - dimension_separator=".") + NestedDirectoryStore(path, normalize_keys=True, dimension_separator=".") class TestN5Store(TestNestedDirectoryStore): - def create_store(self, normalize_keys=False): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) @@ -1486,29 +1494,29 @@ def test_equal(self): store_b = N5Store(store_a.path) assert store_a == store_b - @pytest.mark.parametrize('zarr_meta_key', ['.zarray', '.zattrs', '.zgroup']) + @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) def test_del_zarr_meta_key(self, zarr_meta_key): store = self.create_store() - store[n5_attrs_key] = json_dumps({'foo': 'bar'}) + store[n5_attrs_key] = json_dumps({"foo": "bar"}) del store[zarr_meta_key] assert n5_attrs_key not in store def test_chunk_nesting(self): store = self.create_store() - store['0.0'] = b'xxx' - assert '0.0' in store - assert b'xxx' == store['0.0'] + store["0.0"] = b"xxx" + assert "0.0" in store + assert b"xxx" == store["0.0"] # assert b'xxx' == store['0/0'] - store['foo/10.20.30'] = b'yyy' - assert 'foo/10.20.30' in store - assert b'yyy' == store['foo/10.20.30'] + store["foo/10.20.30"] = b"yyy" + assert "foo/10.20.30" in store + assert b"yyy" == store["foo/10.20.30"] # N5 reverses axis order - assert b'yyy' == store['foo/30/20/10'] - del store['foo/10.20.30'] - assert 'foo/30/20/10' not in store - store['42'] = b'zzz' - assert '42' in store - assert b'zzz' == store['42'] + assert b"yyy" == store["foo/30/20/10"] + del store["foo/10.20.30"] + assert "foo/30/20/10" not in store + store["42"] = b"zzz" + assert "42" in store + assert b"zzz" == store["42"] def test_init_array(self): store = self.create_store() @@ -1517,83 +1525,85 @@ def test_init_array(self): # check metadata assert array_meta_key in store meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - assert ZARR_FORMAT == meta['zarr_format'] - assert (1000,) == meta['shape'] - assert (100,) == meta['chunks'] - assert np.dtype(None) == meta['dtype'] + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] # N5Store wraps the actual compressor - compressor_config = meta['compressor']['compressor_config'] + compressor_config = meta["compressor"]["compressor_config"] assert default_compressor.get_config() == compressor_config # N5Store always has a fill value of 0 - assert meta['fill_value'] == 0 - assert meta['dimension_separator'] == '.' + assert meta["fill_value"] == 0 + assert meta["dimension_separator"] == "." # Top-level groups AND arrays should have # the n5 keyword in metadata raw_n5_meta = json.loads(store[n5_attrs_key]) - assert raw_n5_meta.get('n5', None) == N5_FORMAT + assert raw_n5_meta.get("n5", None) == N5_FORMAT def test_init_array_path(self): - path = 'foo/bar' + path = "foo/bar" store = self.create_store() init_array(store, shape=1000, chunks=100, path=path) # check metadata - key = path + '/' + array_meta_key + key = path + "/" + array_meta_key assert key in store meta = store._metadata_class.decode_array_metadata(store[key]) - assert ZARR_FORMAT == meta['zarr_format'] - assert (1000,) == meta['shape'] - assert (100,) == meta['chunks'] - assert np.dtype(None) == meta['dtype'] + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] # N5Store wraps the actual compressor - compressor_config = meta['compressor']['compressor_config'] + compressor_config = meta["compressor"]["compressor_config"] assert default_compressor.get_config() == compressor_config # N5Store always has a fill value of 0 - assert meta['fill_value'] == 0 + assert meta["fill_value"] == 0 def test_init_array_compat(self): store = self.create_store() - init_array(store, shape=1000, chunks=100, compressor='none') + init_array(store, shape=1000, chunks=100, compressor="none") meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) # N5Store wraps the actual compressor - compressor_config = meta['compressor']['compressor_config'] + compressor_config = meta["compressor"]["compressor_config"] assert compressor_config is None def test_init_array_overwrite(self): - self._test_init_array_overwrite('C') + self._test_init_array_overwrite("C") def test_init_array_overwrite_path(self): - self._test_init_array_overwrite_path('C') + self._test_init_array_overwrite_path("C") def test_init_array_overwrite_chunk_store(self): - self._test_init_array_overwrite_chunk_store('C') + self._test_init_array_overwrite_chunk_store("C") def test_init_group_overwrite(self): - self._test_init_group_overwrite('C') + self._test_init_group_overwrite("C") def test_init_group_overwrite_path(self): - self._test_init_group_overwrite_path('C') + self._test_init_group_overwrite_path("C") def test_init_group_overwrite_chunk_store(self): - self._test_init_group_overwrite_chunk_store('C') + self._test_init_group_overwrite_chunk_store("C") def test_init_group(self): store = self.create_store() init_group(store) - store['.zattrs'] = json_dumps({'foo': 'bar'}) + store[".zattrs"] = json_dumps({"foo": "bar"}) # check metadata assert group_meta_key in store assert group_meta_key in store.listdir() - assert group_meta_key in store.listdir('') + assert group_meta_key in store.listdir("") meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) - assert ZARR_FORMAT == meta['zarr_format'] + assert ZARR_FORMAT == meta["zarr_format"] def test_filters(self): - all_filters, all_errors = zip(*[ - (None, does_not_raise()), - ([], does_not_raise()), - ([AsType('f4', 'f8')], pytest.raises(ValueError)), - ]) + all_filters, all_errors = zip( + *[ + (None, does_not_raise()), + ([], does_not_raise()), + ([AsType("f4", "f8")], pytest.raises(ValueError)), + ] + ) for filters, error in zip(all_filters, all_errors): store = self.create_store() with error: @@ -1620,29 +1630,29 @@ def test_equal(self): # be run by making TestN5FSStore inherit from both TestFSStore and # TestN5Store, but a direct copy is arguably more explicit. - @pytest.mark.parametrize('zarr_meta_key', ['.zarray', '.zattrs', '.zgroup']) + @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) def test_del_zarr_meta_key(self, zarr_meta_key): store = self.create_store() - store[n5_attrs_key] = json_dumps({'foo': 'bar'}) + store[n5_attrs_key] = json_dumps({"foo": "bar"}) del store[zarr_meta_key] assert n5_attrs_key not in store def test_chunk_nesting(self): store = self.create_store() - store['0.0'] = b'xxx' - assert '0.0' in store - assert b'xxx' == store['0.0'] + store["0.0"] = b"xxx" + assert "0.0" in store + assert b"xxx" == store["0.0"] # assert b'xxx' == store['0/0'] - store['foo/10.20.30'] = b'yyy' - assert 'foo/10.20.30' in store - assert b'yyy' == store['foo/10.20.30'] + store["foo/10.20.30"] = b"yyy" + assert "foo/10.20.30" in store + assert b"yyy" == store["foo/10.20.30"] # N5 reverses axis order - assert b'yyy' == store['foo/30/20/10'] - del store['foo/10.20.30'] - assert 'foo/30/20/10' not in store - store['42'] = b'zzz' - assert '42' in store - assert b'zzz' == store['42'] + assert b"yyy" == store["foo/30/20/10"] + del store["foo/10.20.30"] + assert "foo/30/20/10" not in store + store["42"] = b"zzz" + assert "42" in store + assert b"zzz" == store["42"] def test_init_array(self): store = self.create_store() @@ -1651,88 +1661,90 @@ def test_init_array(self): # check metadata assert array_meta_key in store meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - assert ZARR_FORMAT == meta['zarr_format'] - assert (1000,) == meta['shape'] - assert (100,) == meta['chunks'] - assert np.dtype(None) == meta['dtype'] + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] # N5Store wraps the actual compressor - compressor_config = meta['compressor']['compressor_config'] + compressor_config = meta["compressor"]["compressor_config"] assert default_compressor.get_config() == compressor_config # N5Store always has a fill value of 0 - assert meta['fill_value'] == 0 - assert meta['dimension_separator'] == '.' + assert meta["fill_value"] == 0 + assert meta["dimension_separator"] == "." # Top-level groups AND arrays should have # the n5 keyword in metadata raw_n5_meta = json.loads(store[n5_attrs_key]) - assert raw_n5_meta.get('n5', None) == N5_FORMAT + assert raw_n5_meta.get("n5", None) == N5_FORMAT def test_init_array_path(self): - path = 'foo/bar' + path = "foo/bar" store = self.create_store() init_array(store, shape=1000, chunks=100, path=path) # check metadata - key = path + '/' + array_meta_key + key = path + "/" + array_meta_key assert key in store meta = store._metadata_class.decode_array_metadata(store[key]) - assert ZARR_FORMAT == meta['zarr_format'] - assert (1000,) == meta['shape'] - assert (100,) == meta['chunks'] - assert np.dtype(None) == meta['dtype'] + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] # N5Store wraps the actual compressor - compressor_config = meta['compressor']['compressor_config'] + compressor_config = meta["compressor"]["compressor_config"] assert default_compressor.get_config() == compressor_config # N5Store always has a fill value of 0 - assert meta['fill_value'] == 0 + assert meta["fill_value"] == 0 def test_init_array_compat(self): store = self.create_store() - init_array(store, shape=1000, chunks=100, compressor='none') + init_array(store, shape=1000, chunks=100, compressor="none") meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) # N5Store wraps the actual compressor - compressor_config = meta['compressor']['compressor_config'] + compressor_config = meta["compressor"]["compressor_config"] assert compressor_config is None def test_init_array_overwrite(self): - self._test_init_array_overwrite('C') + self._test_init_array_overwrite("C") def test_init_array_overwrite_path(self): - self._test_init_array_overwrite_path('C') + self._test_init_array_overwrite_path("C") def test_init_array_overwrite_chunk_store(self): - self._test_init_array_overwrite_chunk_store('C') + self._test_init_array_overwrite_chunk_store("C") def test_init_group_overwrite(self): - self._test_init_group_overwrite('C') + self._test_init_group_overwrite("C") def test_init_group_overwrite_path(self): - self._test_init_group_overwrite_path('C') + self._test_init_group_overwrite_path("C") def test_init_group_overwrite_chunk_store(self): - self._test_init_group_overwrite_chunk_store('C') + self._test_init_group_overwrite_chunk_store("C") def test_dimension_separator(self): - with pytest.warns(UserWarning, match='dimension_separator'): - self.create_store(dimension_separator='/') + with pytest.warns(UserWarning, match="dimension_separator"): + self.create_store(dimension_separator="/") def test_init_group(self): store = self.create_store() init_group(store) - store['.zattrs'] = json_dumps({'foo': 'bar'}) + store[".zattrs"] = json_dumps({"foo": "bar"}) # check metadata assert group_meta_key in store assert group_meta_key in store.listdir() - assert group_meta_key in store.listdir('') + assert group_meta_key in store.listdir("") meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) - assert ZARR_FORMAT == meta['zarr_format'] + assert ZARR_FORMAT == meta["zarr_format"] def test_filters(self): - all_filters, all_errors = zip(*[ - (None, does_not_raise()), - ([], does_not_raise()), - ([AsType('f4', 'f8')], pytest.raises(ValueError)), - ]) + all_filters, all_errors = zip( + *[ + (None, does_not_raise()), + ([], does_not_raise()), + ([AsType("f4", "f8")], pytest.raises(ValueError)), + ] + ) for filters, error in zip(all_filters, all_errors): store = self.create_store() with error: @@ -1741,13 +1753,13 @@ def test_filters(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestNestedFSStore(TestNestedDirectoryStore): - def create_store(self, normalize_keys=False, path=None, **kwargs): if path is None: path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) - store = FSStore(path, normalize_keys=normalize_keys, - dimension_separator='/', auto_mkdir=True, **kwargs) + store = FSStore( + path, normalize_keys=normalize_keys, dimension_separator="/", auto_mkdir=True, **kwargs + ) return store def test_numbered_groups(self): @@ -1756,7 +1768,7 @@ def test_numbered_groups(self): # Create an array store = self.create_store() group = zarr.group(store=store) - arr = group.create_dataset('0', shape=(10, 10)) + arr = group.create_dataset("0", shape=(10, 10)) arr[1] = 1 # Read it back @@ -1765,7 +1777,6 @@ def test_numbered_groups(self): class TestTempStore(StoreTests): - def create_store(self, **kwargs): skip_if_nested_chunks(**kwargs) return TempStore(**kwargs) @@ -1780,113 +1791,111 @@ class TestZipStore(StoreTests): ZipStoreClass = ZipStore def create_store(self, **kwargs): - path = mktemp(suffix='.zip') + path = mktemp(suffix=".zip") atexit.register(os.remove, path) - store = ZipStore(path, mode='w', **kwargs) + store = ZipStore(path, mode="w", **kwargs) return store def test_mode(self): - with self.ZipStoreClass('data/store.zip', mode='w') as store: - store[self.root + 'foo'] = b'bar' - store = self.ZipStoreClass('data/store.zip', mode='r') + with self.ZipStoreClass("data/store.zip", mode="w") as store: + store[self.root + "foo"] = b"bar" + store = self.ZipStoreClass("data/store.zip", mode="r") with pytest.raises(PermissionError): - store[self.root + 'foo'] = b'bar' + store[self.root + "foo"] = b"bar" with pytest.raises(PermissionError): store.clear() def test_flush(self): - store = self.ZipStoreClass('data/store.zip', mode='w') - store[self.root + 'foo'] = b'bar' + store = self.ZipStoreClass("data/store.zip", mode="w") + store[self.root + "foo"] = b"bar" store.flush() - assert store[self.root + 'foo'] == b'bar' + assert store[self.root + "foo"] == b"bar" store.close() - store = self.ZipStoreClass('data/store.zip', mode='r') + store = self.ZipStoreClass("data/store.zip", mode="r") store.flush() # no-op def test_context_manager(self): with self.create_store() as store: - store[self.root + 'foo'] = b'bar' - store[self.root + 'baz'] = b'qux' + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"qux" assert 2 == len(store) def test_pop(self): # override because not implemented store = self.create_store() - store[self.root + 'foo'] = b'bar' + store[self.root + "foo"] = b"bar" with pytest.raises(NotImplementedError): - store.pop(self.root + 'foo') + store.pop(self.root + "foo") def test_popitem(self): # override because not implemented store = self.create_store() - store[self.root + 'foo'] = b'bar' + store[self.root + "foo"] = b"bar" with pytest.raises(NotImplementedError): store.popitem() def test_permissions(self): - store = self.ZipStoreClass('data/store.zip', mode='w') - foo_key = 'foo' if self.version == 2 else self.root + 'foo' + store = self.ZipStoreClass("data/store.zip", mode="w") + foo_key = "foo" if self.version == 2 else self.root + "foo" # TODO: cannot provide key ending in / for v3 # how to create an empty folder in that case? - baz_key = 'baz/' if self.version == 2 else self.root + 'baz' - store[foo_key] = b'bar' - store[baz_key] = b'' + baz_key = "baz/" if self.version == 2 else self.root + "baz" + store[foo_key] = b"bar" + store[baz_key] = b"" store.flush() store.close() - z = ZipFile('data/store.zip', 'r') + z = ZipFile("data/store.zip", "r") info = z.getinfo(foo_key) perm = oct(info.external_attr >> 16) - assert perm == '0o644' + assert perm == "0o644" info = z.getinfo(baz_key) perm = oct(info.external_attr >> 16) # only for posix platforms - if os.name == 'posix': + if os.name == "posix": if self.version == 2: - assert perm == '0o40775' + assert perm == "0o40775" else: # baz/ on v2, but baz on v3, so not a directory - assert perm == '0o644' + assert perm == "0o644" z.close() def test_store_and_retrieve_ndarray(self): - store = ZipStore('data/store.zip') + store = ZipStore("data/store.zip") x = np.array([[1, 2], [3, 4]]) - store['foo'] = x - y = np.frombuffer(store['foo'], dtype=x.dtype).reshape(x.shape) + store["foo"] = x + y = np.frombuffer(store["foo"], dtype=x.dtype).reshape(x.shape) assert np.array_equiv(y, x) class TestDBMStore(StoreTests): - def create_store(self, dimension_separator=None): - path = mktemp(suffix='.anydbm') - atexit.register(atexit_rmglob, path + '*') + path = mktemp(suffix=".anydbm") + atexit.register(atexit_rmglob, path + "*") # create store using default dbm implementation - store = DBMStore(path, flag='n', dimension_separator=dimension_separator) + store = DBMStore(path, flag="n", dimension_separator=dimension_separator) return store def test_context_manager(self): with self.create_store() as store: - store[self.root + 'foo'] = b'bar' - store[self.root + 'baz'] = b'qux' + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"qux" assert 2 == len(store) class TestDBMStoreDumb(TestDBMStore): - def create_store(self, **kwargs): - path = mktemp(suffix='.dumbdbm') - atexit.register(atexit_rmglob, path + '*') + path = mktemp(suffix=".dumbdbm") + atexit.register(atexit_rmglob, path + "*") import dbm.dumb as dumbdbm - store = DBMStore(path, flag='n', open=dumbdbm.open, **kwargs) + + store = DBMStore(path, flag="n", open=dumbdbm.open, **kwargs) return store class TestDBMStoreGnu(TestDBMStore): - def create_store(self, **kwargs): gdbm = pytest.importorskip("dbm.gnu") path = mktemp(suffix=".gdbm") # pragma: no cover @@ -1898,7 +1907,6 @@ def create_store(self, **kwargs): class TestDBMStoreNDBM(TestDBMStore): - def create_store(self, **kwargs): ndbm = pytest.importorskip("dbm.ndbm") path = mktemp(suffix=".ndbm") # pragma: no cover @@ -1908,20 +1916,18 @@ def create_store(self, **kwargs): class TestDBMStoreBerkeleyDB(TestDBMStore): - def create_store(self, **kwargs): bsddb3 = pytest.importorskip("bsddb3") - path = mktemp(suffix='.dbm') + path = mktemp(suffix=".dbm") atexit.register(os.remove, path) - store = DBMStore(path, flag='n', open=bsddb3.btopen, write_lock=False, **kwargs) + store = DBMStore(path, flag="n", open=bsddb3.btopen, write_lock=False, **kwargs) return store class TestLMDBStore(StoreTests): - def create_store(self, **kwargs): pytest.importorskip("lmdb") - path = mktemp(suffix='.lmdb') + path = mktemp(suffix=".lmdb") atexit.register(atexit_rmtree, path) buffers = True store = LMDBStore(path, buffers=buffers, **kwargs) @@ -1929,43 +1935,41 @@ def create_store(self, **kwargs): def test_context_manager(self): with self.create_store() as store: - store[self.root + 'foo'] = b'bar' - store[self.root + 'baz'] = b'qux' + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"qux" assert 2 == len(store) class TestSQLiteStore(StoreTests): - def create_store(self, **kwargs): pytest.importorskip("sqlite3") - path = mktemp(suffix='.db') + path = mktemp(suffix=".db") atexit.register(atexit_rmtree, path) store = SQLiteStore(path, **kwargs) return store def test_underscore_in_name(self): - path = mktemp(suffix='.db') + path = mktemp(suffix=".db") atexit.register(atexit_rmtree, path) store = SQLiteStore(path) - store['a'] = b'aaa' - store['a_b'] = b'aa_bb' - store.rmdir('a') - assert 'a_b' in store + store["a"] = b"aaa" + store["a_b"] = b"aa_bb" + store.rmdir("a") + assert "a_b" in store class TestSQLiteStoreInMemory(TestSQLiteStore): - def create_store(self, **kwargs): pytest.importorskip("sqlite3") - store = SQLiteStore(':memory:', **kwargs) + store = SQLiteStore(":memory:", **kwargs) return store def test_pickle(self): # setup store store = self.create_store() - store[self.root + 'foo'] = b'bar' - store[self.root + 'baz'] = b'quux' + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"quux" # round-trip through pickle with pytest.raises(PicklingError): @@ -1974,11 +1978,11 @@ def test_pickle(self): @skip_test_env_var("ZARR_TEST_MONGO") class TestMongoDBStore(StoreTests): - def create_store(self, **kwargs): pytest.importorskip("pymongo") - store = MongoDBStore(host='127.0.0.1', database='zarr_tests', - collection='zarr_tests', **kwargs) + store = MongoDBStore( + host="127.0.0.1", database="zarr_tests", collection="zarr_tests", **kwargs + ) # start with an empty store store.clear() return store @@ -1986,12 +1990,11 @@ def create_store(self, **kwargs): @skip_test_env_var("ZARR_TEST_REDIS") class TestRedisStore(StoreTests): - def create_store(self, **kwargs): # TODO: this is the default host for Redis on Travis, # we probably want to generalize this though pytest.importorskip("redis") - store = RedisStore(host='localhost', port=6379, **kwargs) + store = RedisStore(host="localhost", port=6379, **kwargs) # start with an empty store store.clear() return store @@ -2011,14 +2014,14 @@ def test_cache_values_no_max_size(self): # setup store store = self.CountingClass() - foo_key = self.root + 'foo' - bar_key = self.root + 'bar' - store[foo_key] = b'xxx' - store[bar_key] = b'yyy' - assert 0 == store.counter['__getitem__', foo_key] - assert 1 == store.counter['__setitem__', foo_key] - assert 0 == store.counter['__getitem__', bar_key] - assert 1 == store.counter['__setitem__', bar_key] + foo_key = self.root + "foo" + bar_key = self.root + "bar" + store[foo_key] = b"xxx" + store[bar_key] = b"yyy" + assert 0 == store.counter["__getitem__", foo_key] + assert 1 == store.counter["__setitem__", foo_key] + assert 0 == store.counter["__getitem__", bar_key] + assert 1 == store.counter["__setitem__", bar_key] # setup cache cache = self.LRUStoreClass(store, max_size=None) @@ -2026,39 +2029,39 @@ def test_cache_values_no_max_size(self): assert 0 == cache.misses # test first __getitem__, cache miss - assert b'xxx' == cache[foo_key] - assert 1 == store.counter['__getitem__', foo_key] - assert 1 == store.counter['__setitem__', foo_key] + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 1 == store.counter["__setitem__", foo_key] assert 0 == cache.hits assert 1 == cache.misses # test second __getitem__, cache hit - assert b'xxx' == cache[foo_key] - assert 1 == store.counter['__getitem__', foo_key] - assert 1 == store.counter['__setitem__', foo_key] + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 1 == store.counter["__setitem__", foo_key] assert 1 == cache.hits assert 1 == cache.misses # test __setitem__, __getitem__ - cache[foo_key] = b'zzz' - assert 1 == store.counter['__getitem__', foo_key] - assert 2 == store.counter['__setitem__', foo_key] + cache[foo_key] = b"zzz" + assert 1 == store.counter["__getitem__", foo_key] + assert 2 == store.counter["__setitem__", foo_key] # should be a cache hit - assert b'zzz' == cache[foo_key] - assert 1 == store.counter['__getitem__', foo_key] - assert 2 == store.counter['__setitem__', foo_key] + assert b"zzz" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 2 == store.counter["__setitem__", foo_key] assert 2 == cache.hits assert 1 == cache.misses # manually invalidate all cached values cache.invalidate_values() - assert b'zzz' == cache[foo_key] - assert 2 == store.counter['__getitem__', foo_key] - assert 2 == store.counter['__setitem__', foo_key] + assert b"zzz" == cache[foo_key] + assert 2 == store.counter["__getitem__", foo_key] + assert 2 == store.counter["__setitem__", foo_key] cache.invalidate() - assert b'zzz' == cache[foo_key] - assert 3 == store.counter['__getitem__', foo_key] - assert 2 == store.counter['__setitem__', foo_key] + assert b"zzz" == cache[foo_key] + assert 3 == store.counter["__getitem__", foo_key] + assert 2 == store.counter["__setitem__", foo_key] # test __delitem__ del cache[foo_key] @@ -2070,104 +2073,104 @@ def test_cache_values_no_max_size(self): store[foo_key] # verify other keys untouched - assert 0 == store.counter['__getitem__', bar_key] - assert 1 == store.counter['__setitem__', bar_key] + assert 0 == store.counter["__getitem__", bar_key] + assert 1 == store.counter["__setitem__", bar_key] def test_cache_values_with_max_size(self): # setup store store = self.CountingClass() - foo_key = self.root + 'foo' - bar_key = self.root + 'bar' - store[foo_key] = b'xxx' - store[bar_key] = b'yyy' - assert 0 == store.counter['__getitem__', foo_key] - assert 0 == store.counter['__getitem__', bar_key] + foo_key = self.root + "foo" + bar_key = self.root + "bar" + store[foo_key] = b"xxx" + store[bar_key] = b"yyy" + assert 0 == store.counter["__getitem__", foo_key] + assert 0 == store.counter["__getitem__", bar_key] # setup cache - can only hold one item cache = self.LRUStoreClass(store, max_size=5) assert 0 == cache.hits assert 0 == cache.misses # test first 'foo' __getitem__, cache miss - assert b'xxx' == cache[foo_key] - assert 1 == store.counter['__getitem__', foo_key] + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] assert 0 == cache.hits assert 1 == cache.misses # test second 'foo' __getitem__, cache hit - assert b'xxx' == cache[foo_key] - assert 1 == store.counter['__getitem__', foo_key] + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] assert 1 == cache.hits assert 1 == cache.misses # test first 'bar' __getitem__, cache miss - assert b'yyy' == cache[bar_key] - assert 1 == store.counter['__getitem__', bar_key] + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] assert 1 == cache.hits assert 2 == cache.misses # test second 'bar' __getitem__, cache hit - assert b'yyy' == cache[bar_key] - assert 1 == store.counter['__getitem__', bar_key] + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] assert 2 == cache.hits assert 2 == cache.misses # test 'foo' __getitem__, should have been evicted, cache miss - assert b'xxx' == cache[foo_key] - assert 2 == store.counter['__getitem__', foo_key] + assert b"xxx" == cache[foo_key] + assert 2 == store.counter["__getitem__", foo_key] assert 2 == cache.hits assert 3 == cache.misses # test 'bar' __getitem__, should have been evicted, cache miss - assert b'yyy' == cache[bar_key] - assert 2 == store.counter['__getitem__', bar_key] + assert b"yyy" == cache[bar_key] + assert 2 == store.counter["__getitem__", bar_key] assert 2 == cache.hits assert 4 == cache.misses # setup store store = self.CountingClass() - store[foo_key] = b'xxx' - store[bar_key] = b'yyy' - assert 0 == store.counter['__getitem__', foo_key] - assert 0 == store.counter['__getitem__', bar_key] + store[foo_key] = b"xxx" + store[bar_key] = b"yyy" + assert 0 == store.counter["__getitem__", foo_key] + assert 0 == store.counter["__getitem__", bar_key] # setup cache - can hold two items cache = self.LRUStoreClass(store, max_size=6) assert 0 == cache.hits assert 0 == cache.misses # test first 'foo' __getitem__, cache miss - assert b'xxx' == cache[foo_key] - assert 1 == store.counter['__getitem__', foo_key] + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] assert 0 == cache.hits assert 1 == cache.misses # test second 'foo' __getitem__, cache hit - assert b'xxx' == cache[foo_key] - assert 1 == store.counter['__getitem__', foo_key] + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] assert 1 == cache.hits assert 1 == cache.misses # test first 'bar' __getitem__, cache miss - assert b'yyy' == cache[bar_key] - assert 1 == store.counter['__getitem__', bar_key] + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] assert 1 == cache.hits assert 2 == cache.misses # test second 'bar' __getitem__, cache hit - assert b'yyy' == cache[bar_key] - assert 1 == store.counter['__getitem__', bar_key] + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] assert 2 == cache.hits assert 2 == cache.misses # test 'foo' __getitem__, should still be cached - assert b'xxx' == cache[foo_key] - assert 1 == store.counter['__getitem__', foo_key] + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] assert 3 == cache.hits assert 2 == cache.misses # test 'bar' __getitem__, should still be cached - assert b'yyy' == cache[bar_key] - assert 1 == store.counter['__getitem__', bar_key] + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] assert 4 == cache.hits assert 2 == cache.misses @@ -2175,78 +2178,78 @@ def test_cache_keys(self): # setup store = self.CountingClass() - foo_key = self.root + 'foo' - bar_key = self.root + 'bar' - baz_key = self.root + 'baz' - store[foo_key] = b'xxx' - store[bar_key] = b'yyy' - assert 0 == store.counter['__contains__', foo_key] - assert 0 == store.counter['__iter__'] - assert 0 == store.counter['keys'] + foo_key = self.root + "foo" + bar_key = self.root + "bar" + baz_key = self.root + "baz" + store[foo_key] = b"xxx" + store[bar_key] = b"yyy" + assert 0 == store.counter["__contains__", foo_key] + assert 0 == store.counter["__iter__"] + assert 0 == store.counter["keys"] cache = self.LRUStoreClass(store, max_size=None) # keys should be cached on first call keys = sorted(cache.keys()) assert keys == [bar_key, foo_key] - assert 1 == store.counter['keys'] + assert 1 == store.counter["keys"] # keys should now be cached assert keys == sorted(cache.keys()) - assert 1 == store.counter['keys'] + assert 1 == store.counter["keys"] assert foo_key in cache - assert 0 == store.counter['__contains__', foo_key] + assert 0 == store.counter["__contains__", foo_key] assert keys == sorted(cache) - assert 0 == store.counter['__iter__'] - assert 1 == store.counter['keys'] + assert 0 == store.counter["__iter__"] + assert 1 == store.counter["keys"] # cache should be cleared if store is modified - crude but simple for now - cache[baz_key] = b'zzz' + cache[baz_key] = b"zzz" keys = sorted(cache.keys()) assert keys == [bar_key, baz_key, foo_key] - assert 2 == store.counter['keys'] + assert 2 == store.counter["keys"] # keys should now be cached assert keys == sorted(cache.keys()) - assert 2 == store.counter['keys'] + assert 2 == store.counter["keys"] # manually invalidate keys cache.invalidate_keys() keys = sorted(cache.keys()) assert keys == [bar_key, baz_key, foo_key] - assert 3 == store.counter['keys'] - assert 0 == store.counter['__contains__', foo_key] - assert 0 == store.counter['__iter__'] + assert 3 == store.counter["keys"] + assert 0 == store.counter["__contains__", foo_key] + assert 0 == store.counter["__iter__"] cache.invalidate_keys() keys = sorted(cache) assert keys == [bar_key, baz_key, foo_key] - assert 4 == store.counter['keys'] - assert 0 == store.counter['__contains__', foo_key] - assert 0 == store.counter['__iter__'] + assert 4 == store.counter["keys"] + assert 0 == store.counter["__contains__", foo_key] + assert 0 == store.counter["__iter__"] cache.invalidate_keys() assert foo_key in cache - assert 5 == store.counter['keys'] - assert 0 == store.counter['__contains__', foo_key] - assert 0 == store.counter['__iter__'] + assert 5 == store.counter["keys"] + assert 0 == store.counter["__contains__", foo_key] + assert 0 == store.counter["__iter__"] # check these would get counted if called directly assert foo_key in store - assert 1 == store.counter['__contains__', foo_key] + assert 1 == store.counter["__contains__", foo_key] assert keys == sorted(store) - assert 1 == store.counter['__iter__'] + assert 1 == store.counter["__iter__"] def test_getsize(): store = KVStore(dict()) - store['foo'] = b'aaa' - store['bar'] = b'bbbb' - store['baz/quux'] = b'ccccc' + store["foo"] = b"aaa" + store["bar"] = b"bbbb" + store["baz/quux"] = b"ccccc" assert 7 == getsize(store) - assert 5 == getsize(store, 'baz') + assert 5 == getsize(store, "baz") store = KVStore(dict()) - store['boo'] = None + store["boo"] = None assert -1 == getsize(store) -@pytest.mark.parametrize('dict_store', [False, True]) +@pytest.mark.parametrize("dict_store", [False, True]) def test_migrate_1to2(dict_store): from zarr import meta_v1 @@ -2258,64 +2261,63 @@ def test_migrate_1to2(dict_store): meta = dict( shape=(100,), chunks=(10,), - dtype=np.dtype('f4'), - compression='zlib', + dtype=np.dtype("f4"), + compression="zlib", compression_opts=1, fill_value=None, - order='C' + order="C", ) meta_json = meta_v1.encode_metadata(meta) - store['meta'] = meta_json - store['attrs'] = json.dumps(dict()).encode('ascii') + store["meta"] = meta_json + store["attrs"] = json.dumps(dict()).encode("ascii") # run migration migrate_1to2(store) # check results - assert 'meta' not in store + assert "meta" not in store assert array_meta_key in store - assert 'attrs' not in store + assert "attrs" not in store assert attrs_key in store meta_migrated = decode_array_metadata(store[array_meta_key]) - assert 2 == meta_migrated['zarr_format'] + assert 2 == meta_migrated["zarr_format"] # preserved fields - for f in 'shape', 'chunks', 'dtype', 'fill_value', 'order': + for f in "shape", "chunks", "dtype", "fill_value", "order": assert meta[f] == meta_migrated[f] # migrate should have added empty filters field - assert meta_migrated['filters'] is None + assert meta_migrated["filters"] is None # check compression and compression_opts migrated to compressor - assert 'compression' not in meta_migrated - assert 'compression_opts' not in meta_migrated - assert meta_migrated['compressor'] == Zlib(1).get_config() + assert "compression" not in meta_migrated + assert "compression_opts" not in meta_migrated + assert meta_migrated["compressor"] == Zlib(1).get_config() # check dict compression_opts store = dict() if dict_store else KVStore(dict()) - meta['compression'] = 'blosc' - meta['compression_opts'] = dict(cname='lz4', clevel=5, shuffle=1) + meta["compression"] = "blosc" + meta["compression_opts"] = dict(cname="lz4", clevel=5, shuffle=1) meta_json = meta_v1.encode_metadata(meta) - store['meta'] = meta_json - store['attrs'] = json.dumps(dict()).encode('ascii') + store["meta"] = meta_json + store["attrs"] = json.dumps(dict()).encode("ascii") migrate_1to2(store) meta_migrated = decode_array_metadata(store[array_meta_key]) - assert 'compression' not in meta_migrated - assert 'compression_opts' not in meta_migrated - assert (meta_migrated['compressor'] == - Blosc(cname='lz4', clevel=5, shuffle=1).get_config()) + assert "compression" not in meta_migrated + assert "compression_opts" not in meta_migrated + assert meta_migrated["compressor"] == Blosc(cname="lz4", clevel=5, shuffle=1).get_config() # check 'none' compression is migrated to None (null in JSON) store = dict() if dict_store else KVStore(dict()) - meta['compression'] = 'none' + meta["compression"] = "none" meta_json = meta_v1.encode_metadata(meta) - store['meta'] = meta_json - store['attrs'] = json.dumps(dict()).encode('ascii') + store["meta"] = meta_json + store["attrs"] = json.dumps(dict()).encode("ascii") migrate_1to2(store) meta_migrated = decode_array_metadata(store[array_meta_key]) - assert 'compression' not in meta_migrated - assert 'compression_opts' not in meta_migrated - assert meta_migrated['compressor'] is None + assert "compression" not in meta_migrated + assert "compression_opts" not in meta_migrated + assert meta_migrated["compressor"] is None def test_format_compatibility(): @@ -2324,71 +2326,75 @@ def test_format_compatibility(): # read data stored with a previous minor version (which should be format-compatible). # fixture data - fixture = group(store=DirectoryStore('fixture')) + fixture = group(store=DirectoryStore("fixture")) # set seed to get consistent random data np.random.seed(42) arrays_chunks = [ - (np.arange(1111, dtype=' 2 else '' + prefix = meta_root if self.version > 2 else "" # setup some values - store[prefix + 'a'] = b'aaa' - store[prefix + 'b'] = b'bbb' - store[prefix + 'c/d'] = b'ddd' - store[prefix + 'c/e/f'] = b'fff' + store[prefix + "a"] = b"aaa" + store[prefix + "b"] = b"bbb" + store[prefix + "c/d"] = b"ddd" + store[prefix + "c/e/f"] = b"fff" # test iterators on store with data assert 4 == len(store) - keys = [prefix + 'a', prefix + 'b', prefix + 'c/d', prefix + 'c/e/f'] - values = [b'aaa', b'bbb', b'ddd', b'fff'] + keys = [prefix + "a", prefix + "b", prefix + "c/d", prefix + "c/e/f"] + values = [b"aaa", b"bbb", b"ddd", b"fff"] items = list(zip(keys, values)) assert set(keys) == set(store) assert set(keys) == set(store.keys()) @@ -2483,7 +2489,7 @@ class TestConsolidatedMetadataStore: @property def metadata_key(self): - return '.zmetadata' + return ".zmetadata" def test_bad_format(self): @@ -2491,7 +2497,7 @@ def test_bad_format(self): store = dict() consolidated = { # bad format version - 'zarr_consolidated_format': 0, + "zarr_consolidated_format": 0, } store[self.metadata_key] = json.dumps(consolidated).encode() @@ -2508,11 +2514,11 @@ def test_read_write(self): # setup store with consolidated metadata store = dict() consolidated = { - 'zarr_consolidated_format': 1, - 'metadata': { - 'foo': 'bar', - 'baz': 42, - } + "zarr_consolidated_format": 1, + "metadata": { + "foo": "bar", + "baz": 42, + }, } store[self.metadata_key] = json.dumps(consolidated).encode() @@ -2520,15 +2526,15 @@ def test_read_write(self): cs = self.ConsolidatedMetadataClass(store) # test __contains__, __getitem__ - for key, value in consolidated['metadata'].items(): + for key, value in consolidated["metadata"].items(): assert key in cs assert value == cs[key] # test __delitem__, __setitem__ with pytest.raises(PermissionError): - del cs['foo'] + del cs["foo"] with pytest.raises(PermissionError): - cs['bar'] = 0 + cs["bar"] = 0 with pytest.raises(PermissionError): cs["spam"] = "eggs" @@ -2558,16 +2564,16 @@ def test_normalize_store_arg(tmpdir): with pytest.raises(ValueError): normalize_store_arg(dict(), zarr_version=4) - for ext, Class in [('.zip', ZipStore), ('.n5', N5Store)]: - fn = tmpdir.join('store' + ext) - store = normalize_store_arg(str(fn), zarr_version=2, mode='w') + for ext, Class in [(".zip", ZipStore), (".n5", N5Store)]: + fn = tmpdir.join("store" + ext) + store = normalize_store_arg(str(fn), zarr_version=2, mode="w") assert isinstance(store, Class) if have_fsspec: import fsspec path = tempfile.mkdtemp() - store = normalize_store_arg("file://" + path, zarr_version=2, mode='w') + store = normalize_store_arg("file://" + path, zarr_version=2, mode="w") assert isinstance(store, FSStore) store = normalize_store_arg(fsspec.get_mapper("file://" + path)) @@ -2578,7 +2584,7 @@ def test_meta_prefix_6853(): fixture = pathlib.Path(zarr.__file__).resolve().parent.parent / "fixture" meta = fixture / "meta" - if not meta.exists(): # pragma: no cover + if not meta.exists(): # pragma: no cover s = DirectoryStore(str(meta), dimension_separator=".") a = zarr.open(store=s, mode="w", shape=(2, 2), dtype="' == actual[-8:] + assert "" == actual[-8:] def test_tree_get_icon(): @@ -184,15 +198,13 @@ def test_tree_widget_missing_ipytree(): "to get the required ipytree dependency for displaying the tree " "widget. If using jupyterlab<3, you also need to run " "`jupyter labextension install ipytree`" - ) + ) with pytest.raises(ImportError, match=re.escape(pattern)): tree_widget(None, None, None) def test_retry_call(): - class Fixture: - def __init__(self, pass_on=1): self.c = 0 self.pass_on = pass_on @@ -217,9 +229,27 @@ def fail(x): def test_flatten(): - assert list(flatten(['0', ['1', ['2', ['3', [4, ]]]]])) == ['0', '1', '2', '3', 4] - assert list(flatten('foo')) == ['f', 'o', 'o'] - assert list(flatten(['foo'])) == ['foo'] + assert list( + flatten( + [ + "0", + [ + "1", + [ + "2", + [ + "3", + [ + 4, + ], + ], + ], + ], + ] + ) + ) == ["0", "1", "2", "3", 4] + assert list(flatten("foo")) == ["f", "o", "o"] + assert list(flatten(["foo"])) == ["foo"] def test_all_equal(): @@ -232,11 +262,11 @@ def test_all_equal(): assert all_equal(np.nan, np.array([np.nan, np.nan])) assert not all_equal(np.nan, np.array([np.nan, 1.0])) - assert all_equal({'a': -1}, np.array([{'a': -1}, {'a': -1}], dtype='object')) - assert not all_equal({'a': -1}, np.array([{'a': -1}, {'a': 2}], dtype='object')) + assert all_equal({"a": -1}, np.array([{"a": -1}, {"a": -1}], dtype="object")) + assert not all_equal({"a": -1}, np.array([{"a": -1}, {"a": 2}], dtype="object")) - assert all_equal(np.timedelta64(999, 'D'), np.array([999, 999], dtype='timedelta64[D]')) - assert not all_equal(np.timedelta64(999, 'D'), np.array([999, 998], dtype='timedelta64[D]')) + assert all_equal(np.timedelta64(999, "D"), np.array([999, 999], dtype="timedelta64[D]")) + assert not all_equal(np.timedelta64(999, "D"), np.array([999, 998], dtype="timedelta64[D]")) # all_equal(None, *) always returns False assert not all_equal(None, np.array([None, None])) diff --git a/zarr/tests/util.py b/zarr/tests/util.py index 19ac8c0bfa..b4f00f703d 100644 --- a/zarr/tests/util.py +++ b/zarr/tests/util.py @@ -11,44 +11,43 @@ class CountingDict(Store): - def __init__(self): self.wrapped = dict() self.counter = collections.Counter() def __len__(self): - self.counter['__len__'] += 1 + self.counter["__len__"] += 1 return len(self.wrapped) def keys(self): - self.counter['keys'] += 1 + self.counter["keys"] += 1 return self.wrapped.keys() def __iter__(self): - self.counter['__iter__'] += 1 + self.counter["__iter__"] += 1 return iter(self.wrapped) def __contains__(self, item): - self.counter['__contains__', item] += 1 + self.counter["__contains__", item] += 1 return item in self.wrapped def __getitem__(self, item): - self.counter['__getitem__', item] += 1 + self.counter["__getitem__", item] += 1 return self.wrapped[item] def __setitem__(self, key, value): - self.counter['__setitem__', key] += 1 + self.counter["__setitem__", key] += 1 self.wrapped[key] = value def __delitem__(self, key): - self.counter['__delitem__', key] += 1 + self.counter["__delitem__", key] += 1 del self.wrapped[key] def getitems( self, keys: Sequence[str], *, contexts: Mapping[str, Context] ) -> Mapping[str, Any]: for key in keys: - self.counter['__getitem__', key] += 1 + self.counter["__getitem__", key] += 1 return {k: self.wrapped[k] for k in keys if k in self.wrapped} @@ -57,10 +56,9 @@ class CountingDictV3(CountingDict, StoreV3): def skip_test_env_var(name): - """ Checks for environment variables indicating whether tests requiring services should be run - """ - value = os.environ.get(name, '0') - return pytest.mark.skipif(value == '0', reason='Tests not enabled via environment variable') + """Checks for environment variables indicating whether tests requiring services should be run""" + value = os.environ.get(name, "0") + return pytest.mark.skipif(value == "0", reason="Tests not enabled via environment variable") try: diff --git a/zarr/util.py b/zarr/util.py index 6ba20b96c2..b8b090ea70 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -5,18 +5,7 @@ from textwrap import TextWrapper import mmap import time -from typing import ( - Any, - Callable, - Dict, - Iterator, - Mapping, - Optional, - Tuple, - TypeVar, - Union, - Iterable -) +from typing import Any, Callable, Dict, Iterator, Mapping, Optional, Tuple, TypeVar, Union, Iterable import numpy as np from asciitree import BoxStyle, LeftAligned @@ -25,14 +14,14 @@ ensure_text, ensure_ndarray_like, ensure_bytes, - ensure_contiguous_ndarray_like + ensure_contiguous_ndarray_like, ) from numcodecs.ndarray_like import NDArrayLike from numcodecs.registry import codec_registry from numcodecs.blosc import cbuffer_sizes, cbuffer_metainfo -KeyType = TypeVar('KeyType') -ValueType = TypeVar('ValueType') +KeyType = TypeVar("KeyType") +ValueType = TypeVar("ValueType") def flatten(arg: Iterable) -> Iterable: @@ -45,14 +34,13 @@ def flatten(arg: Iterable) -> Iterable: # codecs to use for object dtype convenience API object_codecs = { - str.__name__: 'vlen-utf8', - bytes.__name__: 'vlen-bytes', - 'array': 'vlen-array', + str.__name__: "vlen-utf8", + bytes.__name__: "vlen-bytes", + "array": "vlen-array", } class NumberEncoder(json.JSONEncoder): - def default(self, o): # See json.JSONEncoder.default docstring for explanation # This is necessary to encode numpy dtype @@ -65,20 +53,21 @@ def default(self, o): def json_dumps(o: Any) -> bytes: """Write JSON in a consistent, human-readable way.""" - return json.dumps(o, indent=4, sort_keys=True, ensure_ascii=True, - separators=(',', ': '), cls=NumberEncoder).encode('ascii') + return json.dumps( + o, indent=4, sort_keys=True, ensure_ascii=True, separators=(",", ": "), cls=NumberEncoder + ).encode("ascii") def json_loads(s: Union[bytes, str]) -> Dict[str, Any]: """Read JSON in a consistent way.""" - return json.loads(ensure_text(s, 'utf-8')) + return json.loads(ensure_text(s, "utf-8")) def normalize_shape(shape) -> Tuple[int]: """Convenience function to normalize the `shape` argument.""" if shape is None: - raise TypeError('shape is None') + raise TypeError("shape is None") # handle 1D convenience form if isinstance(shape, numbers.Integral): @@ -91,9 +80,9 @@ def normalize_shape(shape) -> Tuple[int]: # code to guess chunk shape, adapted from h5py -CHUNK_BASE = 256*1024 # Multiplier by which chunks are adjusted -CHUNK_MIN = 128*1024 # Soft lower limit (128k) -CHUNK_MAX = 64*1024*1024 # Hard upper limit +CHUNK_BASE = 256 * 1024 # Multiplier by which chunks are adjusted +CHUNK_MIN = 128 * 1024 # Soft lower limit (128k) +CHUNK_MAX = 64 * 1024 * 1024 # Hard upper limit def guess_chunks(shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: @@ -107,12 +96,12 @@ def guess_chunks(shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: ndims = len(shape) # require chunks to have non-zero length for all dimensions - chunks = np.maximum(np.array(shape, dtype='=f8'), 1) + chunks = np.maximum(np.array(shape, dtype="=f8"), 1) # Determine the optimal chunk size in bytes using a PyTables expression. # This is kept as a float. - dset_size = np.prod(chunks)*typesize - target_size = CHUNK_BASE * (2**np.log10(dset_size/(1024.*1024))) + dset_size = np.prod(chunks) * typesize + target_size = CHUNK_BASE * (2 ** np.log10(dset_size / (1024.0 * 1024))) if target_size > CHUNK_MAX: target_size = CHUNK_MAX @@ -126,11 +115,11 @@ def guess_chunks(shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: # 1b. We're within 50% of the target chunk size, AND # 2. The chunk is smaller than the maximum chunk size - chunk_bytes = np.prod(chunks)*typesize + chunk_bytes = np.prod(chunks) * typesize - if (chunk_bytes < target_size or - abs(chunk_bytes-target_size)/target_size < 0.5) and \ - chunk_bytes < CHUNK_MAX: + if ( + chunk_bytes < target_size or abs(chunk_bytes - target_size) / target_size < 0.5 + ) and chunk_bytes < CHUNK_MAX: break if np.prod(chunks) == 1: @@ -142,9 +131,7 @@ def guess_chunks(shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: return tuple(int(x) for x in chunks) -def normalize_chunks( - chunks: Any, shape: Tuple[int, ...], typesize: int -) -> Tuple[int, ...]: +def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: """Convenience function to normalize the `chunks` argument for an array with the given `shape`.""" @@ -164,17 +151,16 @@ def normalize_chunks( # handle bad dimensionality if len(chunks) > len(shape): - raise ValueError('too many dimensions in chunks') + raise ValueError("too many dimensions in chunks") # handle underspecified chunks if len(chunks) < len(shape): # assume chunks across remaining dimensions - chunks += shape[len(chunks):] + chunks += shape[len(chunks) :] # handle None or -1 in chunks if -1 in chunks or None in chunks: - chunks = tuple(s if c == -1 or c is None else int(c) - for s, c in zip(shape, chunks)) + chunks = tuple(s if c == -1 or c is None else int(c) for s, c in zip(shape, chunks)) return tuple(chunks) @@ -186,30 +172,34 @@ def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype dtype = dtype.__name__ # type: ignore if isinstance(dtype, str): # allow ':' to delimit class from codec arguments - tokens = dtype.split(':') + tokens = dtype.split(":") key = tokens[0] if key in object_codecs: dtype = np.dtype(object) if object_codec is None: codec_id = object_codecs[key] if len(tokens) > 1: - args = tokens[1].split(',') + args = tokens[1].split(",") else: args = [] try: object_codec = codec_registry[codec_id](*args) except KeyError: # pragma: no cover - raise ValueError('codec %r for object type %r is not ' - 'available; please provide an ' - 'object_codec manually' % (codec_id, key)) + raise ValueError( + "codec %r for object type %r is not " + "available; please provide an " + "object_codec manually" % (codec_id, key) + ) return dtype, object_codec dtype = np.dtype(dtype) # don't allow generic datetime64 or timedelta64, require units to be specified - if dtype == np.dtype('M8') or dtype == np.dtype('m8'): - raise ValueError('datetime64 and timedelta64 dtypes with generic units ' - 'are not supported, please specify units (e.g., "M8[ns]")') + if dtype == np.dtype("M8") or dtype == np.dtype("m8"): + raise ValueError( + "datetime64 and timedelta64 dtypes with generic units " + 'are not supported, please specify units (e.g., "M8[ns]")' + ) return dtype, object_codec @@ -227,16 +217,17 @@ def is_total_slice(item, shape: Tuple[int]) -> bool: if item == slice(None): return True if isinstance(item, slice): - item = item, + item = (item,) if isinstance(item, tuple): return all( - (isinstance(s, slice) and - ((s == slice(None)) or - ((s.stop - s.start == l) and (s.step in [1, None])))) - for s, l in zip(item, shape) + ( + isinstance(it, slice) + and ((it == slice(None)) or ((it.stop - it.start == sh) and (it.step in [1, None]))) + ) + for it, sh in zip(item, shape) ) else: - raise TypeError('expected slice or tuple of slices, found %r' % item) + raise TypeError("expected slice or tuple of slices, found %r" % item) def normalize_resize_args(old_shape, *args): @@ -251,33 +242,32 @@ def normalize_resize_args(old_shape, *args): else: new_shape = tuple(new_shape) if len(new_shape) != len(old_shape): - raise ValueError('new shape must have same number of dimensions') + raise ValueError("new shape must have same number of dimensions") # handle None in new_shape - new_shape = tuple(s if n is None else int(n) - for s, n in zip(old_shape, new_shape)) + new_shape = tuple(s if n is None else int(n) for s, n in zip(old_shape, new_shape)) return new_shape def human_readable_size(size) -> str: if size < 2**10: - return '%s' % size + return "%s" % size elif size < 2**20: - return '%.1fK' % (size / float(2**10)) + return "%.1fK" % (size / float(2**10)) elif size < 2**30: - return '%.1fM' % (size / float(2**20)) + return "%.1fM" % (size / float(2**20)) elif size < 2**40: - return '%.1fG' % (size / float(2**30)) + return "%.1fG" % (size / float(2**30)) elif size < 2**50: - return '%.1fT' % (size / float(2**40)) + return "%.1fT" % (size / float(2**40)) else: - return '%.1fP' % (size / float(2**50)) + return "%.1fP" % (size / float(2**50)) def normalize_order(order: str) -> str: order = str(order).upper() - if order not in ['C', 'F']: + if order not in ["C", "F"]: raise ValueError("order must be either 'C' or 'F', found: %r" % order) return order @@ -286,8 +276,7 @@ def normalize_dimension_separator(sep: Optional[str]) -> Optional[str]: if sep in (".", "/", None): return sep else: - raise ValueError( - "dimension_separator must be either '.' or '/', found: %r" % sep) + raise ValueError("dimension_separator must be either '.' or '/', found: %r" % sep) def normalize_fill_value(fill_value, dtype: np.dtype): @@ -300,17 +289,19 @@ def normalize_fill_value(fill_value, dtype: np.dtype): # structured arrays fill_value = np.zeros((), dtype=dtype)[()] - elif dtype.kind == 'U': + elif dtype.kind == "U": # special case unicode because of encoding issues on Windows if passed through numpy # https://github.com/alimanfoo/zarr/pull/172#issuecomment-343782713 if not isinstance(fill_value, str): - raise ValueError('fill_value {!r} is not valid for dtype {}; must be a ' - 'unicode string'.format(fill_value, dtype)) + raise ValueError( + "fill_value {!r} is not valid for dtype {}; must be a " + "unicode string".format(fill_value, dtype) + ) else: try: - if isinstance(fill_value, bytes) and dtype.kind == 'V': + if isinstance(fill_value, bytes) and dtype.kind == "V": # special case for numpy 1.14 compatibility fill_value = np.array(fill_value, dtype=dtype.str).view(dtype)[()] else: @@ -318,8 +309,10 @@ def normalize_fill_value(fill_value, dtype: np.dtype): except Exception as e: # re-raise with our own error message to be helpful - raise ValueError('fill_value {!r} is not valid for dtype {}; nested ' - 'exception: {}'.format(fill_value, dtype, e)) + raise ValueError( + "fill_value {!r} is not valid for dtype {}; nested " + "exception: {}".format(fill_value, dtype, e) + ) return fill_value @@ -328,7 +321,7 @@ def normalize_storage_path(path: Union[str, bytes, None]) -> str: # handle bytes if isinstance(path, bytes): - path = str(path, 'ascii') + path = str(path, "ascii") # ensure str if path is not None and not isinstance(path, str): @@ -337,21 +330,21 @@ def normalize_storage_path(path: Union[str, bytes, None]) -> str: if path: # convert backslash to forward slash - path = path.replace('\\', '/') + path = path.replace("\\", "/") # ensure no leading slash - while len(path) > 0 and path[0] == '/': + while len(path) > 0 and path[0] == "/": path = path[1:] # ensure no trailing slash - while len(path) > 0 and path[-1] == '/': + while len(path) > 0 and path[-1] == "/": path = path[:-1] # collapse any repeated slashes previous_char = None - collapsed = '' + collapsed = "" for char in path: - if char == '/' and previous_char == '/': + if char == "/" and previous_char == "/": pass else: collapsed += char @@ -359,12 +352,12 @@ def normalize_storage_path(path: Union[str, bytes, None]) -> str: path = collapsed # don't allow path segments with just '.' or '..' - segments = path.split('/') - if any(s in {'.', '..'} for s in segments): + segments = path.split("/") + if any(s in {".", ".."} for s in segments): raise ValueError("path containing '.' or '..' segment not allowed") else: - path = '' + path = "" return path @@ -376,32 +369,34 @@ def buffer_size(v) -> int: def info_text_report(items: Dict[Any, Any]) -> str: keys = [k for k, v in items] max_key_len = max(len(k) for k in keys) - report = '' + report = "" for k, v in items: - wrapper = TextWrapper(width=80, - initial_indent=k.ljust(max_key_len) + ' : ', - subsequent_indent=' '*max_key_len + ' : ') + wrapper = TextWrapper( + width=80, + initial_indent=k.ljust(max_key_len) + " : ", + subsequent_indent=" " * max_key_len + " : ", + ) text = wrapper.fill(str(v)) - report += text + '\n' + report += text + "\n" return report def info_html_report(items) -> str: report = '' - report += '' + report += "" for k, v in items: - report += '' \ - '' \ - '' \ - '' \ - % (k, v) - report += '' - report += '
%s%s
' + report += ( + "" + '%s' + '%s' + "" % (k, v) + ) + report += "" + report += "" return report class InfoReporter: - def __init__(self, obj): self.obj = obj @@ -415,24 +410,22 @@ def _repr_html_(self): class TreeNode: - def __init__(self, obj, depth=0, level=None): self.obj = obj self.depth = depth self.level = level def get_children(self): - if hasattr(self.obj, 'values'): + if hasattr(self.obj, "values"): if self.level is None or self.depth < self.level: depth = self.depth + 1 - return [TreeNode(o, depth=depth, level=self.level) - for o in self.obj.values()] + return [TreeNode(o, depth=depth, level=self.level) for o in self.obj.values()] return [] def get_text(self): name = self.obj.name.split("/")[-1] or "/" - if hasattr(self.obj, 'shape'): - name += ' {} {}'.format(self.obj.shape, self.obj.dtype) + if hasattr(self.obj, "shape"): + name += " {} {}".format(self.obj.shape, self.obj.dtype) return name def get_type(self): @@ -440,7 +433,6 @@ def get_type(self): class TreeTraversal(Traversal): - def get_children(self, node): return node.get_children() @@ -451,8 +443,8 @@ def get_text(self, node): return node.get_text() -tree_group_icon = 'folder' -tree_array_icon = 'table' +tree_group_icon = "folder" +tree_array_icon = "table" def tree_get_icon(stype: str) -> str: @@ -499,37 +491,28 @@ def tree_widget(group, expand, level): class TreeViewer: - def __init__(self, group, expand=False, level=None): self.group = group self.expand = expand self.level = level - self.text_kwargs = dict( - horiz_len=2, - label_space=1, - indent=1 - ) + self.text_kwargs = dict(horiz_len=2, label_space=1, indent=1) self.bytes_kwargs = dict( - UP_AND_RIGHT="+", - HORIZONTAL="-", - VERTICAL="|", - VERTICAL_AND_RIGHT="+" + UP_AND_RIGHT="+", HORIZONTAL="-", VERTICAL="|", VERTICAL_AND_RIGHT="+" ) self.unicode_kwargs = dict( UP_AND_RIGHT="\u2514", HORIZONTAL="\u2500", VERTICAL="\u2502", - VERTICAL_AND_RIGHT="\u251C" + VERTICAL_AND_RIGHT="\u251C", ) def __bytes__(self): drawer = LeftAligned( - traverse=TreeTraversal(), - draw=BoxStyle(gfx=self.bytes_kwargs, **self.text_kwargs) + traverse=TreeTraversal(), draw=BoxStyle(gfx=self.bytes_kwargs, **self.text_kwargs) ) root = TreeNode(self.group, level=self.level) result = drawer(root) @@ -542,8 +525,7 @@ def __bytes__(self): def __unicode__(self): drawer = LeftAligned( - traverse=TreeTraversal(), - draw=BoxStyle(gfx=self.unicode_kwargs, **self.text_kwargs) + traverse=TreeTraversal(), draw=BoxStyle(gfx=self.unicode_kwargs, **self.text_kwargs) ) root = TreeNode(self.group, level=self.level) return drawer(root) @@ -557,16 +539,21 @@ def _repr_mimebundle_(self, **kwargs): def check_array_shape(param, array, shape): - if not hasattr(array, 'shape'): - raise TypeError('parameter {!r}: expected an array-like object, got {!r}' - .format(param, type(array))) + if not hasattr(array, "shape"): + raise TypeError( + "parameter {!r}: expected an array-like object, got {!r}".format(param, type(array)) + ) if array.shape != shape: - raise ValueError('parameter {!r}: expected array with shape {!r}, got {!r}' - .format(param, shape, array.shape)) + raise ValueError( + "parameter {!r}: expected array with shape {!r}, got {!r}".format( + param, shape, array.shape + ) + ) def is_valid_python_name(name): from keyword import iskeyword + return name.isidentifier() and not iskeyword(name) @@ -599,9 +586,9 @@ def __init__(self, store_key, chunk_store): self.read_blocks = set() _key_path = self.map._key_to_str(store_key) - _key_path = _key_path.split('/') + _key_path = _key_path.split("/") _chunk_path = [self.chunk_store._normalize_key(_key_path[-1])] - _key_path = '/'.join(_key_path[:-1] + _chunk_path) + _key_path = "/".join(_key_path[:-1] + _chunk_path) self.key_path = _key_path def prepare_chunk(self): @@ -613,21 +600,15 @@ def prepare_chunk(self): self.buff[0:16] = header self.nblocks = nbytes / blocksize self.nblocks = ( - int(self.nblocks) - if self.nblocks == int(self.nblocks) - else int(self.nblocks + 1) + int(self.nblocks) if self.nblocks == int(self.nblocks) else int(self.nblocks + 1) ) if self.nblocks == 1: self.buff = self.read_full() return - start_points_buffer = self.fs.read_block( - self.key_path, 16, int(self.nblocks * 4) - ) - self.start_points = np.frombuffer( - start_points_buffer, count=self.nblocks, dtype=np.int32 - ) + start_points_buffer = self.fs.read_block(self.key_path, 16, int(self.nblocks * 4)) + self.start_points = np.frombuffer(start_points_buffer, count=self.nblocks, dtype=np.int32) self.start_points_max = self.start_points.max() - self.buff[16: (16 + (self.nblocks * 4))] = start_points_buffer + self.buff[16 : (16 + (self.nblocks * 4))] = start_points_buffer self.n_per_block = blocksize / typesize def read_part(self, start, nitems): @@ -676,12 +657,14 @@ def read_full(self): return self.chunk_store[self.store_key] -def retry_call(callabl: Callable, - args=None, - kwargs=None, - exceptions: Tuple[Any, ...] = (), - retries: int = 10, - wait: float = 0.1) -> Any: +def retry_call( + callabl: Callable, + args=None, + kwargs=None, + exceptions: Tuple[Any, ...] = (), + retries: int = 10, + wait: float = 0.1, +) -> Any: """ Make several attempts to invoke the callable. If one of the given exceptions is raised, wait the given period of time and retry up to the given number of @@ -693,7 +676,7 @@ def retry_call(callabl: Callable, if kwargs is None: kwargs = {} - for attempt in range(1, retries+1): + for attempt in range(1, retries + 1): try: return callabl(*args, **kwargs) except exceptions: From b79f1e2959a7cb2a2d5a0d3de77aa06ef29d026e Mon Sep 17 00:00:00 2001 From: Davis Vann Bennett Date: Wed, 12 Jul 2023 22:26:47 -0400 Subject: [PATCH 0340/1078] chore: add linting commit to list of revs ignored for git blame --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..05fe3dbd8c --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# lint codebase with black and ruff +4e348d6b80c96da461fd866576c971b8a659ba15 \ No newline at end of file From b0c50e1c284c6ce693bd306b77d0a5f00df2d0e8 Mon Sep 17 00:00:00 2001 From: Davis Vann Bennett Date: Wed, 12 Jul 2023 22:49:44 -0400 Subject: [PATCH 0341/1078] docs: update release notes --- docs/release.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 46bd1f025d..5bdae7465d 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -30,6 +30,12 @@ Enhancements * **Block Indexing**: Implemented blockwise (chunk blocks) indexing to ``zarr.Array``. By :user:`Altay Sansal ` :issue:`1428` +Maintenance +~~~~~~~~~~~ + +* Style the codebase with ``ruff`` and ``black``. + By :user:`Davis Bennett` :issue:`1459` + .. _release_2.15.0: 2.15.0 From 1558041db6c829654f3af738f16e41f0f0bfeac1 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Fri, 14 Jul 2023 16:15:23 -0400 Subject: [PATCH 0342/1078] Refactor core tests, round 2 (#1462) * chore: refactor tests to inherit create_array from a base class * chore: widen type of shape to variable length tuple * chore: add importskip barrier for lmdb * docs: release notes * chore: remove fsstore kwargs variable * chore: fix type error in creation of fsstore * chore: add `create_filters` method to TestArray. Pop out `compressor` kwarg in create_array. --- docs/release.rst | 6 + zarr/storage.py | 2 +- zarr/tests/test_core.py | 859 +++++++++++----------------------------- zarr/util.py | 6 +- 4 files changed, 249 insertions(+), 624 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 46bd1f025d..269305c2a4 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -30,6 +30,12 @@ Enhancements * **Block Indexing**: Implemented blockwise (chunk blocks) indexing to ``zarr.Array``. By :user:`Altay Sansal ` :issue:`1428` +Maintenance +~~~~~~~~~~~ + +* Refactor the core array tests to reduce code duplication. + By :user:`Davis Bennett ` :issue:`1462`. + .. _release_2.15.0: 2.15.0 diff --git a/zarr/storage.py b/zarr/storage.py index ef1bd64955..c91f2f1cf0 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -300,7 +300,7 @@ def _require_parent_group( def init_array( store: StoreLike, - shape: Tuple[int, ...], + shape: Union[int, Tuple[int, ...]], chunks: Union[bool, int, Tuple[int, ...]] = True, dtype=None, compressor="default", diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index ab1a6e8aa7..8bf8789f56 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -3,10 +3,10 @@ import sys import pickle import shutil +from typing import Any, Literal, Optional, Tuple, Union import unittest from itertools import zip_longest from tempfile import mkdtemp - import numpy as np import packaging.version import pytest @@ -19,6 +19,7 @@ import zarr from zarr._storage.store import ( + BaseStore, v3_api_available, ) from .._storage.v3_storage_transformers import ShardingStorageTransformer, v3_sharding_available @@ -42,6 +43,7 @@ init_array, init_group, meta_root, + normalize_store_arg ) from zarr._storage.v3 import ( ABSStoreV3, @@ -62,16 +64,64 @@ # noinspection PyMethodMayBeStatic -class TestArray(unittest.TestCase): - +class TestArray(): version = 2 root = '' - KVStoreClass = KVStore + path = '' + compressor = Zlib(level=1) + filters = None + dimension_separator: Literal["/", ".", None] = None + cache_metadata = True + cache_attrs = True + partial_decompress: bool = False + write_empty_chunks = True + read_only = False + storage_transformers: Tuple[Any, ...] = () + + def create_store(self) -> BaseStore: + return KVStore(dict()) + + # used by child classes + def create_chunk_store(self) -> Optional[BaseStore]: + return None + + def create_storage_transformers(self, shape: Union[int, Tuple[int, ...]]) -> Tuple[Any, ...]: + return () + + def create_filters(self, dtype: Optional[str]) -> Tuple[Any, ...]: + return () + + def create_array(self, shape: Union[int, Tuple[int, ...]], **kwargs): + store = self.create_store() + chunk_store = self.create_chunk_store() + # keyword arguments for array initialization + init_array_kwargs = { + "path": kwargs.pop("path", self.path), + "compressor": kwargs.pop("compressor", self.compressor), + "chunk_store": chunk_store, + "storage_transformers": self.create_storage_transformers(shape), + "filters": kwargs.pop("filters", self.create_filters(kwargs.get("dtype", None))) + } + + # keyword arguments for array instantiation + access_array_kwargs = { + "path": init_array_kwargs["path"], + "read_only": kwargs.pop("read_only", self.read_only), + "chunk_store": chunk_store, + "cache_metadata": kwargs.pop("cache_metadata", self.cache_metadata), + "cache_attrs": kwargs.pop("cache_attrs", self.cache_attrs), + "partial_decompress": kwargs.pop("partial_decompress", self.partial_decompress), + "write_empty_chunks": kwargs.pop("write_empty_chunks", self.write_empty_chunks), + } + + init_array(store, shape, **{**init_array_kwargs, **kwargs}) + + return Array(store, **access_array_kwargs) def test_array_init(self): # normal initialization - store = self.KVStoreClass(dict()) + store = self.create_store() init_array(store, shape=100, chunks=10, dtype=" Tuple[Any, ...]: + return ( Delta(dtype=dtype), FixedScaleOffset(dtype=dtype, scale=1, offset=0), - ] - kwargs.setdefault('filters', filters) - compressor = Zlib(1) - kwargs.setdefault('compressor', compressor) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_attrs=cache_attrs, - cache_metadata=cache_metadata, write_empty_chunks=write_empty_chunks) + ) def expected(self): return [ @@ -2407,19 +2296,14 @@ def __delitem__(self, key): def __contains__(self, item): return item in self.inner + def close(self): + return self.inner.close() + class TestArrayWithCustomMapping(TestArray): - @staticmethod - def create_array(read_only=False, **kwargs): - store = CustomMapping() - kwargs.setdefault('compressor', Zlib(1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + def create_store(self): + return CustomMapping() def test_nbytes_stored(self): z = self.create_array(shape=1000, chunks=100) @@ -2429,18 +2313,6 @@ def test_nbytes_stored(self): class TestArrayNoCache(TestArray): - - @staticmethod - def create_array(read_only=False, **kwargs): - store = KVStore(dict()) - kwargs.setdefault('compressor', Zlib(level=1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) - def test_cache_metadata(self): a1 = self.create_array(shape=100, chunks=10, dtype='i1', cache_metadata=False) path = None if self.version == 2 else a1.path @@ -2504,45 +2376,30 @@ def test_object_arrays_danger(self): class TestArrayWithStoreCache(TestArray): - - @staticmethod - def create_array(read_only=False, **kwargs): - store = LRUStoreCache(dict(), max_size=None) - kwargs.setdefault('compressor', Zlib(level=1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + def create_store(self): + return LRUStoreCache(dict(), max_size=None) def test_store_has_bytes_values(self): # skip as the cache has no control over how the store provides values pass -fsspec_mapper_kwargs = { - "check": True, - "create": True, - "missing_exceptions": None -} - - @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestArrayWithFSStore(TestArray): - @staticmethod - def create_array(read_only=False, **kwargs): + compressor = Blosc() + dimension_separator: Literal[".", "/"] = "." + + def create_store(self): path = mkdtemp() atexit.register(shutil.rmtree, path) - key_separator = kwargs.pop('key_separator', ".") - store = FSStore(path, key_separator=key_separator, auto_mkdir=True, **fsspec_mapper_kwargs) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Blosc()) - init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + key_separator = self.dimension_separator + store = FSStore(path, + key_separator=key_separator, + auto_mkdir=True, + check=True, + create=True, + missing_exceptions=None) + return store def expected(self): return [ @@ -2556,21 +2413,23 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestArrayWithFSStoreFromFilesystem(TestArray): - @staticmethod - def create_array(read_only=False, **kwargs): + compressor = Blosc() + dimension_separator = "." + + def create_store(self): from fsspec.implementations.local import LocalFileSystem + fs = LocalFileSystem(auto_mkdir=True) path = mkdtemp() atexit.register(shutil.rmtree, path) - key_separator = kwargs.pop('key_separator', ".") - store = FSStore(path, fs=fs, key_separator=key_separator, **fsspec_mapper_kwargs) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Blosc()) - init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + key_separator = self.dimension_separator + store = FSStore(path, + fs=fs, + key_separator=key_separator, + check=True, + create=True, + missing_exceptions=None) + return store def expected(self): return [ @@ -2584,24 +2443,14 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestArrayWithFSStorePartialRead(TestArray): - @staticmethod - def create_array(read_only=False, **kwargs): + compressor = Blosc(blocksize=256) + partial_decompress = True + + def create_store(self): path = mkdtemp() atexit.register(shutil.rmtree, path) store = FSStore(path) - cache_metadata = kwargs.pop("cache_metadata", True) - cache_attrs = kwargs.pop("cache_attrs", True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault("compressor", Blosc(blocksize=256)) - init_array(store, **kwargs) - return Array( - store, - read_only=read_only, - cache_metadata=cache_metadata, - cache_attrs=cache_attrs, - partial_decompress=True, - write_empty_chunks=write_empty_chunks - ) + return store def expected(self): return [ @@ -2640,21 +2489,9 @@ def test_read_from_all_blocks(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -class TestArrayWithFSStoreNested(TestArray): - - @staticmethod - def create_array(read_only=False, **kwargs): - path = mkdtemp() - atexit.register(shutil.rmtree, path) - key_separator = kwargs.pop('key_separator', "/") - store = FSStore(path, key_separator=key_separator, auto_mkdir=True) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Blosc()) - init_array(store, **kwargs) - return Array(store, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) +class TestArrayWithFSStoreNested(TestArrayWithFSStore): + compressor = Blosc() + dimension_separator = "/" def expected(self): return [ @@ -2667,26 +2504,10 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -class TestArrayWithFSStoreNestedPartialRead(TestArray): - @staticmethod - def create_array(read_only=False, **kwargs): - path = mkdtemp() - atexit.register(shutil.rmtree, path) - key_separator = kwargs.pop('key_separator', "/") - store = FSStore(path, key_separator=key_separator, auto_mkdir=True) - cache_metadata = kwargs.pop("cache_metadata", True) - cache_attrs = kwargs.pop("cache_attrs", True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault("compressor", Blosc()) - init_array(store, **kwargs) - return Array( - store, - read_only=read_only, - cache_metadata=cache_metadata, - cache_attrs=cache_attrs, - partial_decompress=True, - write_empty_chunks=write_empty_chunks - ) +class TestArrayWithFSStoreNestedPartialRead(TestArrayWithFSStore): + compressor = Blosc() + dimension_separator = "/" + partial_decompress = True def expected(self): return [ @@ -2730,10 +2551,12 @@ def test_read_from_all_blocks(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayV3(TestArray): - version = 3 root = meta_root - KVStoreClass = KVStoreV3 + path = "arr1" + + def create_store(self): + return KVStoreV3(dict()) def expected(self): # tests for array without path will not be run for v3 stores @@ -2750,25 +2573,11 @@ def expected(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithPathV3(TestArrayWithPath): - - version = 3 - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - store = KVStoreV3(dict()) - kwargs.setdefault('compressor', Zlib(level=1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, - cache_metadata=cache_metadata, cache_attrs=cache_attrs, - write_empty_chunks=write_empty_chunks) +class TestArrayWithPathV3(TestArrayV3): def test_array_init(self): - store = KVStoreV3(dict()) + store = self.create_store() # can initialize an array without a path init_array(store, shape=100, chunks=10, dtype=" BaseStore: path = mkdtemp() atexit.register(shutil.rmtree, path) - store = DirectoryStoreV3(path) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, - cache_metadata=cache_metadata, cache_attrs=cache_attrs, - write_empty_chunks=write_empty_chunks) + return DirectoryStoreV3(path) def test_nbytes_stored(self): # dict as store @@ -2960,87 +2751,52 @@ def test_nbytes_stored(self): @skip_test_env_var("ZARR_TEST_ABS") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithABSStoreV3(TestArrayWithABSStore, TestArrayWithPathV3): - - @staticmethod - def absstore(): +class TestArrayWithABSStoreV3(TestArrayV3): + def create_store(self) -> ABSStoreV3: client = abs_container() store = ABSStoreV3(client=client) store.rmdir() return store - def create_array(self, array_path='arr1', read_only=False, **kwargs): - store = self.absstore() - kwargs.setdefault('compressor', Zlib(1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) - - # TODO: TestArrayWithN5StoreV3 # class TestArrayWithN5StoreV3(TestArrayWithDirectoryStoreV3): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithDBMStoreV3(TestArrayWithDBMStore, TestArrayWithPathV3): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - path = mktemp(suffix='.anydbm') - atexit.register(atexit_rmglob, path + '*') - store = DBMStoreV3(path, flag='n') - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_attrs=cache_attrs, - cache_metadata=cache_metadata, write_empty_chunks=write_empty_chunks) +class TestArrayWithDBMStoreV3(TestArrayV3): + def create_store(self) -> DBMStoreV3: + path = mktemp(suffix=".anydbm") + atexit.register(atexit_rmglob, path + "*") + store = DBMStoreV3(path, flag="n") + return store def test_nbytes_stored(self): pass # not implemented @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithDBMStoreV3BerkeleyDB(TestArrayWithDBMStoreBerkeleyDB, TestArrayWithPathV3): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): +class TestArrayWithDBMStoreV3BerkeleyDB(TestArrayV3): + def create_store(self) -> DBMStoreV3: bsddb3 = pytest.importorskip("bsddb3") - path = mktemp(suffix='.dbm') + path = mktemp(suffix=".dbm") atexit.register(os.remove, path) - store = DBMStoreV3(path, flag='n', open=bsddb3.btopen) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + store = DBMStoreV3(path, flag="n", open=bsddb3.btopen) + return store def test_nbytes_stored(self): pass # not implemented @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithLMDBStoreV3(TestArrayWithLMDBStore, TestArrayWithPathV3): +class TestArrayWithLMDBStoreV3(TestArrayV3): + lmdb_buffers = True - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): + def create_store(self) -> LMDBStoreV3: pytest.importorskip("lmdb") - path = mktemp(suffix='.lmdb') + path = mktemp(suffix=".lmdb") atexit.register(atexit_rmtree, path) - store = LMDBStoreV3(path, buffers=True) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + store = LMDBStoreV3(path, buffers=self.lmdb_buffers) + return store def test_store_has_bytes_values(self): pass # returns values as memoryviews/buffers instead of bytes @@ -3050,42 +2806,21 @@ def test_nbytes_stored(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithLMDBStoreV3NoBuffers(TestArrayWithLMDBStoreNoBuffers, TestArrayWithPathV3): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - pytest.importorskip("lmdb") - path = mktemp(suffix='.lmdb') - atexit.register(atexit_rmtree, path) - store = LMDBStoreV3(path, buffers=False) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) +class TestArrayWithLMDBStoreV3NoBuffers(TestArrayWithLMDBStoreV3): + lmdb_buffers = False def test_nbytes_stored(self): pass # not implemented @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithSQLiteStoreV3(TestArrayWithPathV3, TestArrayWithSQLiteStore): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): +class TestArrayWithSQLiteStoreV3(TestArrayV3): + def create_store(self): pytest.importorskip("sqlite3") - path = mktemp(suffix='.db') + path = mktemp(suffix=".db") atexit.register(atexit_rmtree, path) store = SQLiteStoreV3(path) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Zlib(1)) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return store def test_nbytes_stored(self): pass # not implemented @@ -3142,18 +2877,10 @@ def __contains__(self, item): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithCustomMappingV3(TestArrayWithPathV3, TestArrayWithCustomMapping): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): +class TestArrayWithCustomMappingV3(TestArrayV3): + def create_store(self): store = CustomMappingV3() - kwargs.setdefault('compressor', Zlib(1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return store def test_nbytes_stored(self): z = self.create_array(shape=1000, chunks=100) @@ -3171,18 +2898,10 @@ def test_len(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayNoCacheV3(TestArrayWithPathV3, TestArrayNoCache): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): +class TestArrayNoCacheV3(TestArrayWithPathV3): + def create_store(self): store = KVStoreV3(dict()) - kwargs.setdefault('compressor', Zlib(level=1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return store def test_object_arrays_danger(self): # skip this one as it only works if metadata are cached @@ -3190,18 +2909,10 @@ def test_object_arrays_danger(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithStoreCacheV3(TestArrayWithPathV3, TestArrayWithStoreCache): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): +class TestArrayWithStoreCacheV3(TestArrayV3): + def create_store(self): store = LRUStoreCacheV3(dict(), max_size=None) - kwargs.setdefault('compressor', Zlib(level=1)) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return store def test_store_has_bytes_values(self): # skip as the cache has no control over how the store provides values @@ -3210,25 +2921,22 @@ def test_store_has_bytes_values(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3(TestArrayWithPathV3, TestArrayWithFSStore): - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): +class TestArrayWithFSStoreV3(TestArrayV3): + compressor = Blosc() + + def create_store(self): path = mkdtemp() atexit.register(shutil.rmtree, path) - key_separator = kwargs.pop('key_separator', ".") + key_separator = self.dimension_separator store = FSStoreV3( path, key_separator=key_separator, auto_mkdir=True, - **fsspec_mapper_kwargs + create=True, + check=True, + missing_exceptions=None ) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Blosc()) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + return store def expected(self): return [ @@ -3242,22 +2950,21 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3FromFilesystem(TestArrayWithPathV3, TestArrayWithFSStore): - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): +class TestArrayWithFSStoreV3FromFilesystem(TestArrayWithFSStoreV3): + def create_store(self): from fsspec.implementations.local import LocalFileSystem + fs = LocalFileSystem(auto_mkdir=True) path = mkdtemp() atexit.register(shutil.rmtree, path) - key_separator = kwargs.pop('key_separator', ".") - store = FSStoreV3(path, fs=fs, key_separator=key_separator, **fsspec_mapper_kwargs) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Blosc()) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) + key_separator = self.dimension_separator + store = FSStoreV3(path, + fs=fs, + key_separator=key_separator, + create=True, + check=True, + missing_exceptions=None) + return store def expected(self): return [ @@ -3271,27 +2978,8 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3PartialRead(TestArrayWithPathV3, TestArrayWithFSStorePartialRead): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - path = mkdtemp() - atexit.register(shutil.rmtree, path) - store = FSStoreV3(path) - cache_metadata = kwargs.pop("cache_metadata", True) - cache_attrs = kwargs.pop("cache_attrs", True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault("compressor", Blosc()) - init_array(store, path=array_path, **kwargs) - return Array( - store, - path=array_path, - read_only=read_only, - cache_metadata=cache_metadata, - cache_attrs=cache_attrs, - partial_decompress=True, - write_empty_chunks=write_empty_chunks, - ) +class TestArrayWithFSStoreV3PartialRead(TestArrayWithFSStoreV3): + partial_decompress = True def expected(self): return [ @@ -3306,33 +2994,16 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") @pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") -class TestArrayWithFSStoreV3PartialReadUncompressedSharded( - TestArrayWithPathV3, TestArrayWithFSStorePartialRead -): +class TestArrayWithFSStoreV3PartialReadUncompressedSharded(TestArrayWithFSStoreV3): + partial_decompress = True + compressor = None - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - path = mkdtemp() - atexit.register(shutil.rmtree, path) - store = FSStoreV3(path) - cache_metadata = kwargs.pop("cache_metadata", True) - cache_attrs = kwargs.pop("cache_attrs", True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', None) - num_dims = 1 if isinstance(kwargs["shape"], int) else len(kwargs["shape"]) + def create_storage_transformers(self, shape) -> Tuple[Any]: + num_dims = 1 if isinstance(shape, int) else len(shape) sharding_transformer = ShardingStorageTransformer( "indexed", chunks_per_shard=(2, ) * num_dims ) - init_array(store, path=array_path, storage_transformers=[sharding_transformer], **kwargs) - return Array( - store, - path=array_path, - read_only=read_only, - cache_metadata=cache_metadata, - cache_attrs=cache_attrs, - partial_decompress=True, - write_empty_chunks=write_empty_chunks, - ) + return (sharding_transformer,) def test_nbytes_stored(self): z = self.create_array(shape=1000, chunks=100) @@ -3359,21 +3030,8 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3Nested(TestArrayWithPathV3, TestArrayWithFSStoreNested): - - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - path = mkdtemp() - atexit.register(shutil.rmtree, path) - key_separator = kwargs.pop('key_separator', "/") - store = FSStoreV3(path, key_separator=key_separator, auto_mkdir=True) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', Blosc()) - init_array(store, path=array_path, **kwargs) - return Array(store, path=array_path, read_only=read_only, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) +class TestArrayWithFSStoreV3Nested(TestArrayWithFSStoreV3): + dimension_separator = "/" def expected(self): return [ @@ -3387,28 +3045,8 @@ def expected(self): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3NestedPartialRead(TestArrayWithPathV3, - TestArrayWithFSStoreNestedPartialRead): - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - path = mkdtemp() - atexit.register(shutil.rmtree, path) - key_separator = kwargs.pop('key_separator', "/") - store = FSStoreV3(path, key_separator=key_separator, auto_mkdir=True) - cache_metadata = kwargs.pop("cache_metadata", True) - cache_attrs = kwargs.pop("cache_attrs", True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault("compressor", Blosc()) - init_array(store, path=array_path, **kwargs) - return Array( - store, - path=array_path, - read_only=read_only, - cache_metadata=cache_metadata, - cache_attrs=cache_attrs, - partial_decompress=True, - write_empty_chunks=write_empty_chunks, - ) +class TestArrayWithFSStoreV3NestedPartialRead(TestArrayWithFSStoreV3): + dimension_separator = "/" def expected(self): return [ @@ -3423,22 +3061,10 @@ def expected(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithStorageTransformersV3(TestArrayWithChunkStoreV3): - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - store = KVStoreV3(dict()) - # separate chunk store - chunk_store = KVStoreV3(dict()) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - dummy_storage_transformer = DummyStorageTransfomer( - "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT + def create_storage_transformers(self, shape) -> Tuple[Any]: + return ( + DummyStorageTransfomer("dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT), ) - init_array(store, path=array_path, chunk_store=chunk_store, - storage_transformers=[dummy_storage_transformer], **kwargs) - return Array(store, path=array_path, read_only=read_only, - chunk_store=chunk_store, cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def expected(self): return [ @@ -3452,23 +3078,14 @@ def expected(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") @pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") -class TestArrayWithShardingStorageTransformerV3(TestArrayWithPathV3): +class TestArrayWithShardingStorageTransformerV3(TestArrayV3): + compressor = None - @staticmethod - def create_array(array_path='arr1', read_only=False, **kwargs): - store = KVStoreV3(dict()) - cache_metadata = kwargs.pop('cache_metadata', True) - cache_attrs = kwargs.pop('cache_attrs', True) - write_empty_chunks = kwargs.pop('write_empty_chunks', True) - kwargs.setdefault('compressor', None) - num_dims = 1 if isinstance(kwargs["shape"], int) else len(kwargs["shape"]) - sharding_transformer = ShardingStorageTransformer( - "indexed", chunks_per_shard=(2, ) * num_dims + def create_storage_transformers(self, shape) -> Tuple[Any]: + num_dims = (1 if isinstance(shape, int) else len(shape)) + return ( + ShardingStorageTransformer("indexed", chunks_per_shard=(2, ) * num_dims), ) - init_array(store, path=array_path, storage_transformers=[sharding_transformer], **kwargs) - return Array(store, path=array_path, read_only=read_only, - cache_metadata=cache_metadata, - cache_attrs=cache_attrs, write_empty_chunks=write_empty_chunks) def test_nbytes_stored(self): z = self.create_array(shape=1000, chunks=100) diff --git a/zarr/util.py b/zarr/util.py index 6ba20b96c2..efbb86e4c0 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -15,7 +15,8 @@ Tuple, TypeVar, Union, - Iterable + Iterable, + cast ) import numpy as np @@ -74,7 +75,7 @@ def json_loads(s: Union[bytes, str]) -> Dict[str, Any]: return json.loads(ensure_text(s, 'utf-8')) -def normalize_shape(shape) -> Tuple[int]: +def normalize_shape(shape: Union[int, Tuple[int, ...], None]) -> Tuple[int, ...]: """Convenience function to normalize the `shape` argument.""" if shape is None: @@ -85,6 +86,7 @@ def normalize_shape(shape) -> Tuple[int]: shape = (int(shape),) # normalize + shape = cast(Tuple[int, ...], shape) shape = tuple(int(s) for s in shape) return shape From 8fc3b4b7cdd85117ed83a88cb7f30d5691eae1a1 Mon Sep 17 00:00:00 2001 From: Lars Date: Tue, 18 Jul 2023 18:02:36 +0200 Subject: [PATCH 0343/1078] Fix typo (#1468) Just a small err in the spec. --- docs/spec/v2.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/spec/v2.rst b/docs/spec/v2.rst index 45e6afb320..4fcd6ded76 100644 --- a/docs/spec/v2.rst +++ b/docs/spec/v2.rst @@ -81,7 +81,7 @@ filters The following keys MAY be present within the object: dimension_separator - If present, either the string ``"."`` or ``"/""`` defining the separator placed + If present, either the string ``"."`` or ``"/"`` defining the separator placed between the dimensions of a chunk. If the value is not set, then the default MUST be assumed to be ``"."``, leading to chunk keys of the form "0.0". Arrays defined with ``"/"`` as the dimension separator can be considered to have From a5d8a958a6821c97739a1547149eb1eede299686 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Thu, 20 Jul 2023 11:41:21 -0400 Subject: [PATCH 0344/1078] chore: expose codecov token (#1473) --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index f8fe9ab379..6a32793df3 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -80,7 +80,7 @@ jobs: pytest --cov=zarr --cov-config=pyproject.toml --doctest-plus --cov-report xml --cov=./ --timeout=300 - uses: codecov/codecov-action@v3 with: - #token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos + token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos #files: ./coverage1.xml,./coverage2.xml # optional #flags: unittests # optional #name: codecov-umbrella # optional From 9623d5c26a5cefbe92f9ec8af44ee53b75d9dc38 Mon Sep 17 00:00:00 2001 From: Ryan Abernathey Date: Thu, 20 Jul 2023 12:28:28 -0400 Subject: [PATCH 0345/1078] updated release notes for 2.16.0 (#1471) --- docs/release.rst | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 31d4ba63c4..c09667e78d 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,11 @@ Release notes Unreleased ---------- +.. _release_2.16.0: + +2.16.0 +------ + Enhancements ~~~~~~~~~~~~ @@ -27,8 +32,8 @@ Enhancements * Add ``__contains__`` method to ``KVStore``. By :user:`Christoph Gohlke ` :issue:`1454`. - * **Block Indexing**: Implemented blockwise (chunk blocks) indexing to ``zarr.Array``. - By :user:`Altay Sansal ` :issue:`1428` +* **Block Indexing**: Implemented blockwise (chunk blocks) indexing to ``zarr.Array``. + By :user:`Altay Sansal ` :issue:`1428` Maintenance ~~~~~~~~~~~ From 55ccb3a855d52c5cb2ee9b62ecfc82571d754763 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Thu, 20 Jul 2023 11:27:31 -0700 Subject: [PATCH 0346/1078] Add "Generate release notes" to release process (#1476) cc: @rabernat Starting a hand-full of versions ago, GitHub added a "Generate release notes" button. This generates links to all PRs as well as to new contributors. I've updated the 2.16.0 release, but I'd be for always clicking this button when making a release. --- docs/contributing.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index dc6beb0094..e590d15d8f 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -379,6 +379,8 @@ Set the description of the release to:: replacing the correct version numbers. For pre-release versions, the URL should omit the pre-release suffix, e.g. "a1" or "rc1". +Click on "Generate release notes" to auto-file the description. + After creating the release, the documentation will be built on https://readthedocs.io. Full releases will be available under `/stable `_ while From f3ba0b19adc74a281489a9df892041a88d3cb7fc Mon Sep 17 00:00:00 2001 From: jakirkham Date: Thu, 20 Jul 2023 15:19:54 -0700 Subject: [PATCH 0347/1078] Require `setuptools_scm` version `1.5.4`+ (#1477) --- docs/release.rst | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index c09667e78d..f80d4a8c75 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,12 @@ Release notes Unreleased ---------- +Maintenance +~~~~~~~~~~~ + +* Require ``setuptools_scm`` version ``1.5.4``\+ + By :user:`John A. Kirkham ` :issue:`1477`. + .. _release_2.16.0: 2.16.0 diff --git a/pyproject.toml b/pyproject.toml index 4b293b90e4..1b880cf6fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=64.0.0", "setuptools-scm"] +requires = ["setuptools>=64.0.0", "setuptools-scm>1.5.4"] build-backend = "setuptools.build_meta" From 6cb3cf19bc98792e0e3a6cfc7ae626551e7ea8a1 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 27 Jul 2023 22:37:25 +0200 Subject: [PATCH 0348/1078] Delete trailing spaces (#1424) ...and add missing trailing newline for POSIX compliance. --- .git-blame-ignore-revs | 2 +- docs/_static/custom.css | 2 +- docs/acknowledgments.rst | 2 +- docs/contributing.rst | 4 ++-- docs/index.rst | 4 ++-- docs/release.rst | 2 +- pyproject.toml | 2 +- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 05fe3dbd8c..53bf4633f0 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,2 +1,2 @@ # lint codebase with black and ruff -4e348d6b80c96da461fd866576c971b8a659ba15 \ No newline at end of file +4e348d6b80c96da461fd866576c971b8a659ba15 diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 487addfbbd..87dd70e347 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -120,4 +120,4 @@ html[data-theme=dark] h1 { html[data-theme=dark] h3 { color: #0a6774; -} \ No newline at end of file +} diff --git a/docs/acknowledgments.rst b/docs/acknowledgments.rst index 36cd1f5646..4fce1e8ae4 100644 --- a/docs/acknowledgments.rst +++ b/docs/acknowledgments.rst @@ -73,4 +73,4 @@ documentation, code reviews, comments and/or ideas: * :user:`gsakkis` * :user:`hailiangzhang ` * :user:`pmav99 ` -* :user:`sbalmer ` \ No newline at end of file +* :user:`sbalmer ` diff --git a/docs/contributing.rst b/docs/contributing.rst index e590d15d8f..46fc39d960 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -213,9 +213,9 @@ Test coverage Zarr maintains 100% test coverage under the latest Python stable release (currently Python 3.8). Both unit tests and docstring doctests are included when computing coverage. Running:: - + $ python -m pytest -v --cov=zarr --cov-config=pyproject.toml zarr - + will automatically run the test suite with coverage and produce a coverage report. This should be 100% before code can be accepted into the main code base. diff --git a/docs/index.rst b/docs/index.rst index 50060d10cc..97f5889ca5 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -20,7 +20,7 @@ Zarr-Python **Version**: |version| **Download documentation**: `Zipped HTML `_ - + **Useful links**: `Installation `_ | `Source Repository `_ | @@ -101,4 +101,4 @@ Zarr is a file storage format for chunked, compressed, N-dimensional arrays base :color: dark :click-parent: - To the contributor's guide \ No newline at end of file + To the contributor's guide diff --git a/docs/release.rst b/docs/release.rst index f80d4a8c75..b9a9417059 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -35,7 +35,7 @@ Enhancements * Allow for partial codec specification in V3 array metadata. By :user:`Joe Hamman ` :issue:`1443`. -* Add ``__contains__`` method to ``KVStore``. +* Add ``__contains__`` method to ``KVStore``. By :user:`Christoph Gohlke ` :issue:`1454`. * **Block Indexing**: Implemented blockwise (chunk blocks) indexing to ``zarr.Array``. diff --git a/pyproject.toml b/pyproject.toml index 1b880cf6fe..7d8d275c0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -116,7 +116,7 @@ follow_imports = "silent" [tool.pytest.ini_options] doctest_optionflags = [ "NORMALIZE_WHITESPACE", - "ELLIPSIS", + "ELLIPSIS", "IGNORE_EXCEPTION_DETAIL", ] addopts = [ From 6ed4d785db71d7087b276f0f6dc5b3f7cfedd7f1 Mon Sep 17 00:00:00 2001 From: Philipp Hanslovsky Date: Thu, 27 Jul 2023 17:17:09 -0400 Subject: [PATCH 0349/1078] Ensure that chunks is tuple of ints upon array creation (#1470) * Add failing test for creating group with float chunks * Fix flake8 errors * Cast chunks to tuple[int, ...] before returning * Use decorator to cast to int tuple * Fix mypy type issues * Fix black formatting * Add docstring to _as_int_tuple * Document changes in docs/release.rst * Revert to casting to tuple of ints inside normalize_chunks After discussion in #1470, this was selected as the best option --- docs/release.rst | 3 +++ zarr/tests/test_creation.py | 24 ++++++++++++++++++++++++ zarr/util.py | 3 ++- 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index b9a9417059..aa622723e8 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -50,6 +50,9 @@ Maintenance * Style the codebase with ``ruff`` and ``black``. By :user:`Davis Bennett` :issue:`1459` +* Ensure that chunks is tuple of ints upon array creation. + By :user:`Philipp Hanslovsky` :issue:`1461` + .. _release_2.15.0: 2.15.0 diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index 61710cc221..b44c6379fd 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -757,3 +757,27 @@ def test_create_with_storage_transformers(at_root): z = create(1000000000, chunks=True, storage_transformers=[transformer], **kwargs) assert isinstance(z.chunk_store, DummyStorageTransfomer) assert z.chunk_store.test_value == DummyStorageTransfomer.TEST_CONSTANT + + +@pytest.mark.parametrize( + ("init_shape", "init_chunks", "shape", "chunks"), + ( + ((1,), (1,), (1,), (1,)), + ((1.0,), (1.0,), (1,), (1,)), + ((1.0,), False, (1,), (1,)), + ((1.0,), True, (1,), (1,)), + ((1.0,), None, (1,), (1,)), + ), +) +def test_shape_chunk_ints(init_shape, init_chunks, shape, chunks): + g = open_group() + array = g.create_dataset("ds", shape=init_shape, chunks=init_chunks, dtype=np.uint8) + + assert all( + isinstance(s, int) for s in array.shape + ), f"Expected shape to be all ints but found {array.shape=}." + assert all( + isinstance(c, int) for c in array.chunks + ), f"Expected chunks to be all ints but found {array.chunks=}." + assert array.shape == shape, f"Expected {shape=} but found {array.shape=}." + assert array.chunks == chunks, f"Expected {chunks=} but found {array.chunks=}." diff --git a/zarr/util.py b/zarr/util.py index acd461cfd5..ea0dd9fcec 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -175,7 +175,8 @@ def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tupl if -1 in chunks or None in chunks: chunks = tuple(s if c == -1 or c is None else int(c) for s, c in zip(shape, chunks)) - return tuple(chunks) + chunks = tuple(int(c) for c in chunks) + return chunks def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype, Any]: From 0cedd9832111c2af0c46fda46b10bec385ea74ab Mon Sep 17 00:00:00 2001 From: jakirkham Date: Fri, 11 Aug 2023 02:53:34 -0700 Subject: [PATCH 0350/1078] Add `docs` requirements to `pyproject.toml` (#1494) * Add `docs` requirements to `pyproject.toml` * Add release note * Exclude `numpy` version `1.21.0` Move the RTD exclusion to the `numpy` dependency directly. * Require `numcodecs[msgpack]` for `docs` * Update references for installing doc dependencies --- .pyup.yml | 3 --- .readthedocs.yaml | 3 ++- docs/contributing.rst | 4 ++-- docs/release.rst | 3 +++ pyproject.toml | 12 +++++++++++- requirements_rtfd.txt | 12 ------------ 6 files changed, 18 insertions(+), 19 deletions(-) delete mode 100644 requirements_rtfd.txt diff --git a/.pyup.yml b/.pyup.yml index 04937ed0b0..a70df4e3fb 100644 --- a/.pyup.yml +++ b/.pyup.yml @@ -13,6 +13,3 @@ requirements: - requirements_dev_optional.txt: pin: True update: all - - requirements_rtfd.txt: - pin: False - update: False diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 42fc08bca2..2124f77271 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -10,6 +10,7 @@ sphinx: python: install: - - requirements: requirements_rtfd.txt - method: pip path: . + extra_requirements: + - docs diff --git a/docs/contributing.rst b/docs/contributing.rst index 46fc39d960..0420535093 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -92,8 +92,8 @@ the repository, you can do something like the following:: $ mkdir -p ~/pyenv/zarr-dev $ python -m venv ~/pyenv/zarr-dev $ source ~/pyenv/zarr-dev/bin/activate - $ pip install -r requirements_dev_minimal.txt -r requirements_dev_numpy.txt -r requirements_rtfd.txt - $ pip install -e . + $ pip install -r requirements_dev_minimal.txt -r requirements_dev_numpy.txt + $ pip install -e .[docs] To verify that your development environment is working, you can run the unit tests:: diff --git a/docs/release.rst b/docs/release.rst index aa622723e8..cf1400d3f8 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -24,6 +24,9 @@ Maintenance * Require ``setuptools_scm`` version ``1.5.4``\+ By :user:`John A. Kirkham ` :issue:`1477`. +* Add ``docs`` requirements to ``pyproject.toml`` + By :user:`John A. Kirkham ` :issue:`1494`. + .. _release_2.16.0: 2.16.0 diff --git a/pyproject.toml b/pyproject.toml index 7d8d275c0e..292bfddded 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ maintainers = [ requires-python = ">=3.8" dependencies = [ 'asciitree', - 'numpy>=1.20', + 'numpy>=1.20,!=1.21.0', 'fasteners', 'numcodecs>=0.10.0', ] @@ -43,6 +43,16 @@ jupyter = [ 'ipytree>=0.2.2', 'ipywidgets>=8.0.0', ] +docs = [ + 'sphinx', + 'sphinx_design', + 'sphinx-issues', + 'sphinx-copybutton', + 'sphinx-rtd-theme', + 'pydata-sphinx-theme', + 'numpydoc', + 'numcodecs[msgpack]', +] [project.urls] "Bug Tracker" = "https://github.com/zarr-developers/zarr-python/issues" diff --git a/requirements_rtfd.txt b/requirements_rtfd.txt deleted file mode 100644 index 5d7fec369a..0000000000 --- a/requirements_rtfd.txt +++ /dev/null @@ -1,12 +0,0 @@ -asciitree -setuptools -setuptools_scm -sphinx -sphinx_design -sphinx-issues -sphinx-copybutton -sphinx-rtd-theme -pydata-sphinx-theme -numpydoc -numpy!=1.21.0 -msgpack-python==0.5.6 From 12af7f13981ad6d7497e0bb32d3175758e7a109e Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Fri, 11 Aug 2023 16:23:53 +0200 Subject: [PATCH 0351/1078] `zarr.group` now accept the `meta_array` argument (#1489) * group() now takes the meta_array * added tests --- zarr/hierarchy.py | 9 ++++++++- zarr/tests/test_meta_array.py | 7 ++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index c7cc5c6fe2..3361969f08 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -1360,7 +1360,8 @@ def group( synchronizer=None, path=None, *, - zarr_version=None + zarr_version=None, + meta_array=None ): """Create a group. @@ -1382,6 +1383,11 @@ def group( Array synchronizer. path : string, optional Group path within store. + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + + .. versionadded:: 2.16.1 Returns ------- @@ -1432,6 +1438,7 @@ def group( synchronizer=synchronizer, path=path, zarr_version=zarr_version, + meta_array=meta_array, ) diff --git a/zarr/tests/test_meta_array.py b/zarr/tests/test_meta_array.py index 39394bd690..2545c6d624 100644 --- a/zarr/tests/test_meta_array.py +++ b/zarr/tests/test_meta_array.py @@ -9,7 +9,7 @@ import zarr.codecs from zarr.core import Array from zarr.creation import array, empty, full, ones, open_array, zeros -from zarr.hierarchy import open_group +from zarr.hierarchy import open_group, group from zarr.storage import DirectoryStore, MemoryStore, Store, ZipStore @@ -234,12 +234,13 @@ def test_full(module, compressor): assert np.all(np.isnan(z[:])) +@pytest.mark.parametrize("group_create_function", [group, open_group]) @pytest.mark.parametrize("module, compressor", param_module_and_compressor) @pytest.mark.parametrize("store_type", [None, DirectoryStore, MemoryStore, ZipStore]) -def test_group(tmp_path, module, compressor, store_type): +def test_group(tmp_path, group_create_function, module, compressor, store_type): xp = ensure_module(module) store = init_store(tmp_path, store_type) - g = open_group(store, meta_array=xp.empty(())) + g = group_create_function(store, meta_array=xp.empty(())) g.ones("data", shape=(10, 11), dtype=int, compressor=compressor) a = g["data"] assert a.shape == (10, 11) From f542fca7d0d42ee050e9a49d57ad0f5346f62de3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 08:44:12 +0200 Subject: [PATCH 0352/1078] Bump pypa/gh-action-pypi-publish from 1.8.7 to 1.8.10 (#1498) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.7 to 1.8.10. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.7...v1.8.10) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index a00096bb18..8956cae314 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.7 + - uses: pypa/gh-action-pypi-publish@v1.8.10 with: user: __token__ password: ${{ secrets.pypi_password }} From 7017ce16954f81a209019041c56ad5ca1e568f09 Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Wed, 16 Aug 2023 21:45:13 +0200 Subject: [PATCH 0353/1078] `LRUStoreCache`: cache "contains" by contains checks (#1499) * cache "contains" by contains checks * updated the test_cache_keys counts * release notes --- docs/release.rst | 3 +++ zarr/_storage/v3.py | 2 +- zarr/storage.py | 10 +++++----- zarr/tests/test_storage.py | 15 +++++++++------ 4 files changed, 18 insertions(+), 12 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index cf1400d3f8..188edd625f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -27,6 +27,9 @@ Maintenance * Add ``docs`` requirements to ``pyproject.toml`` By :user:`John A. Kirkham ` :issue:`1494`. +* Fixed caching issue in ``LRUStoreCache``. + By :user:`Mads R. B. Kristensen ` :issue:`1499`. + .. _release_2.16.0: 2.16.0 diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 1a50265c11..00dc085dac 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -509,7 +509,7 @@ def __init__(self, store, max_size: int): self._max_size = max_size self._current_size = 0 self._keys_cache = None - self._contains_cache = None + self._contains_cache = {} self._listdir_cache: Dict[Path, Any] = dict() self._values_cache: Dict[Path, Any] = OrderedDict() self._mutex = Lock() diff --git a/zarr/storage.py b/zarr/storage.py index 4f7b9905f1..b36f804ebd 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -2393,7 +2393,7 @@ def __init__(self, store: StoreLike, max_size: int): self._max_size = max_size self._current_size = 0 self._keys_cache = None - self._contains_cache = None + self._contains_cache: Dict[Any, Any] = {} self._listdir_cache: Dict[Path, Any] = dict() self._values_cache: Dict[Path, Any] = OrderedDict() self._mutex = Lock() @@ -2434,9 +2434,9 @@ def __iter__(self): def __contains__(self, key): with self._mutex: - if self._contains_cache is None: - self._contains_cache = set(self._keys()) - return key in self._contains_cache + if key not in self._contains_cache: + self._contains_cache[key] = key in self._store + return self._contains_cache[key] def clear(self): self._store.clear() @@ -2506,7 +2506,7 @@ def invalidate_keys(self): def _invalidate_keys(self): self._keys_cache = None - self._contains_cache = None + self._contains_cache.clear() self._listdir_cache.clear() def _invalidate_value(self, key): diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 9557000472..ca6a6c1a98 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -2196,7 +2196,10 @@ def test_cache_keys(self): assert keys == sorted(cache.keys()) assert 1 == store.counter["keys"] assert foo_key in cache - assert 0 == store.counter["__contains__", foo_key] + assert 1 == store.counter["__contains__", foo_key] + # the next check for `foo_key` is cached + assert foo_key in cache + assert 1 == store.counter["__contains__", foo_key] assert keys == sorted(cache) assert 0 == store.counter["__iter__"] assert 1 == store.counter["keys"] @@ -2215,23 +2218,23 @@ def test_cache_keys(self): keys = sorted(cache.keys()) assert keys == [bar_key, baz_key, foo_key] assert 3 == store.counter["keys"] - assert 0 == store.counter["__contains__", foo_key] + assert 1 == store.counter["__contains__", foo_key] assert 0 == store.counter["__iter__"] cache.invalidate_keys() keys = sorted(cache) assert keys == [bar_key, baz_key, foo_key] assert 4 == store.counter["keys"] - assert 0 == store.counter["__contains__", foo_key] + assert 1 == store.counter["__contains__", foo_key] assert 0 == store.counter["__iter__"] cache.invalidate_keys() assert foo_key in cache - assert 5 == store.counter["keys"] - assert 0 == store.counter["__contains__", foo_key] + assert 4 == store.counter["keys"] + assert 2 == store.counter["__contains__", foo_key] assert 0 == store.counter["__iter__"] # check these would get counted if called directly assert foo_key in store - assert 1 == store.counter["__contains__", foo_key] + assert 3 == store.counter["__contains__", foo_key] assert keys == sorted(store) assert 1 == store.counter["__iter__"] From cb3908b998f692c1ff8a071034cba0554c775e4e Mon Sep 17 00:00:00 2001 From: Ryan Abernathey Date: Fri, 18 Aug 2023 10:27:53 -0400 Subject: [PATCH 0354/1078] prepare for 2.16.1 release (#1503) --- docs/release.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 188edd625f..bf0dcd69e9 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,11 @@ Release notes Unreleased ---------- +.. _release_2.16.1: + +2.16.1 +------ + Maintenance ~~~~~~~~~~~ From d25110ebe34eaab7eb6a57fd2259c1831ca2c3d1 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Wed, 23 Aug 2023 12:18:55 -0700 Subject: [PATCH 0355/1078] Disable pre-commit.ci's PR autofixes (#1506) --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c46115342d..c5152b10b6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,7 @@ ci: autoupdate_commit_msg: "chore: update pre-commit hooks" autofix_commit_msg: "style: pre-commit fixes" + autofix_prs: false default_stages: [commit, push] default_language_version: python: python3 From 76ba69a21018822a5a0244c03af882a09293ff28 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 25 Aug 2023 11:18:48 +0200 Subject: [PATCH 0356/1078] Bump redis from 4.5.5 to 4.6.0 (#1446) Bumps [redis](https://github.com/redis/redis-py) from 4.5.5 to 4.6.0. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.5.5...v4.6.0) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0398d8f494..f86eb8c8e3 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.0.7 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.16.0 # pyup: ignore -redis==4.5.5 +redis==4.6.0 types-redis types-setuptools pymongo==4.4.0 From 4944e66cd847a6ab5ec3a70c2b7bc0973f707bd6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Aug 2023 09:09:20 +0200 Subject: [PATCH 0357/1078] Bump ipywidgets from 8.0.7 to 8.1.0 (#1510) Bumps [ipywidgets](https://github.com/jupyter-widgets/ipywidgets) from 8.0.7 to 8.1.0. - [Release notes](https://github.com/jupyter-widgets/ipywidgets/releases) - [Commits](https://github.com/jupyter-widgets/ipywidgets/compare/8.0.7...8.1.0) --- updated-dependencies: - dependency-name: ipywidgets dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index f86eb8c8e3..baa2fbc1e9 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -3,7 +3,7 @@ lmdb==1.4.1; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 -ipywidgets==8.0.7 +ipywidgets==8.1.0 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) From 9dea8e34d1c6fa1b90a2e828e1f80bc1baf638b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Sep 2023 11:49:05 +0200 Subject: [PATCH 0358/1078] Bump actions/checkout from 3 to 4 (#1520) Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/releases.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index bebe1ee205..7013f1784f 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -38,7 +38,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index 4de5aca273..63bc97d157 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Miniconda uses: conda-incubator/setup-miniconda@v2.2.0 with: diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 6a32793df3..cdf230bc7c 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -38,7 +38,7 @@ jobs: ports: - 27017:27017 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup Miniconda diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 8956cae314..238c5e457d 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -11,7 +11,7 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: true fetch-depth: 0 diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index b17eece058..3afa8c467e 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -18,7 +18,7 @@ jobs: matrix: python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: conda-incubator/setup-miniconda@v2.2.0 From 6ec746ef1242dd9fec26b128cc0b3455d28ad6f0 Mon Sep 17 00:00:00 2001 From: Jack Kelly Date: Wed, 20 Sep 2023 17:35:36 +0100 Subject: [PATCH 0359/1078] Tiny tweak: Extend copyright notice to 2023 (#1528) --- LICENSE.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE.txt b/LICENSE.txt index d672a4f670..850a0d8772 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2015-2022 Zarr Developers +Copyright (c) 2015-2023 Zarr Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal From 5a54c95e7438779f66e4fe2491e7a9238b6a43fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Oct 2023 10:24:40 -0700 Subject: [PATCH 0360/1078] Bump pytest from 7.3.2 to 7.4.0 (#1445) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.3.2 to 7.4.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.3.2...7.4.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index df1ca11677..4b1208598e 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.11.0 msgpack-python==0.5.6 setuptools-scm==7.1.0 # test requirements -pytest==7.3.2 +pytest==7.4.0 From a9d516bb562da1360f7a2b7ae845046924180674 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Wed, 25 Oct 2023 22:04:25 +0100 Subject: [PATCH 0361/1078] Allow black to be run on any Python version (#1549) * Allow black to be run on any Python version * docs: add release notes * docs: whitespace --------- Co-authored-by: Davis Vann Bennett --- .pre-commit-config.yaml | 1 - docs/release.rst | 6 ++++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c5152b10b6..f22dc39832 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,6 @@ repos: rev: 22.12.0 hooks: - id: black - language_version: python3.8 - repo: https://github.com/codespell-project/codespell rev: v2.2.5 hooks: diff --git a/docs/release.rst b/docs/release.rst index bf0dcd69e9..edb5422fa0 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,12 @@ Release notes Unreleased ---------- +Maintenance +~~~~~~~~~~~ + +* Allow ``black`` code formatter to be run with any Python version. + By :user:`David Stansby ` :issue:`1549` + .. _release_2.16.1: 2.16.1 From 16c2684345a659e70a23152442ab77c0809ab9b0 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 27 Oct 2023 00:12:46 +0200 Subject: [PATCH 0362/1078] =?UTF-8?q?Unnecessary=20list=20literal=20?= =?UTF-8?q?=E2=86=92=20set=20literal=20(#1534)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Unnecessary list literal → set literal Why use a list literal to initialise a set? Just use a set literal. * Add release note for previous commit Signed-off-by: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> --------- Signed-off-by: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> --- docs/release.rst | 3 +++ zarr/tests/test_storage.py | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index edb5422fa0..12e1449a80 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,6 +21,9 @@ Unreleased Maintenance ~~~~~~~~~~~ +* Initialise some sets in tests with set literals instead of list literals. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1534`. + * Allow ``black`` code formatter to be run with any Python version. By :user:`David Stansby ` :issue:`1549` diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index ca6a6c1a98..e87716fa47 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1166,9 +1166,9 @@ def test_deep_ndim(self): if self.version == 2: assert set(store.listdir()) == {".zgroup", "bar"} else: - assert set(store.listdir()) == set(["data", "meta", "zarr.json"]) - assert set(store.listdir("meta/root/" + path)) == set(["bar", "bar.group.json"]) - assert set(store.listdir("data/root/" + path)) == set(["bar"]) + assert set(store.listdir()) == {"data", "meta", "zarr.json"} + assert set(store.listdir("meta/root/" + path)) == {"bar", "bar.group.json"} + assert set(store.listdir("data/root/" + path)) == {"bar"} assert foo["bar"]["baz"][(0, 0, 0)] == 1 def test_not_fsspec(self): From 5eb737b02836799a0544610b87ffbc3e4e69ec01 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Thu, 26 Oct 2023 22:13:29 +0000 Subject: [PATCH 0363/1078] Minor edits in docs (#1509) --- .gitignore | 2 ++ docs/conf.py | 2 +- docs/spec.rst | 2 +- docs/spec/v1.rst | 2 +- docs/spec/v2.rst | 2 +- docs/spec/v3.rst | 6 +++--- 6 files changed, 9 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index 4f0d523785..a6a456636d 100644 --- a/.gitignore +++ b/.gitignore @@ -77,3 +77,5 @@ zarr/version.py #doesnotexist #test_sync* data/* + +.DS_Store diff --git a/docs/conf.py b/docs/conf.py index f85ecb7454..1ffaeddef4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -68,7 +68,7 @@ # General information about the project. project = "zarr" -copyright = "2022, Zarr Developers" +copyright = "2023, Zarr Developers" author = "Zarr Developers" version = zarr.__version__ diff --git a/docs/spec.rst b/docs/spec.rst index 3c06f3228d..8aca0bbd80 100644 --- a/docs/spec.rst +++ b/docs/spec.rst @@ -4,7 +4,7 @@ Specifications ============== .. toctree:: - :maxdepth: 3 + :maxdepth: 1 spec/v3 spec/v2 diff --git a/docs/spec/v1.rst b/docs/spec/v1.rst index 8584b24e6d..13f68ef36e 100644 --- a/docs/spec/v1.rst +++ b/docs/spec/v1.rst @@ -1,6 +1,6 @@ .. _spec_v1: -Zarr storage specification version 1 +Zarr Storage Specification Version 1 ==================================== This document provides a technical specification of the protocol and diff --git a/docs/spec/v2.rst b/docs/spec/v2.rst index 4fcd6ded76..c1e12e1218 100644 --- a/docs/spec/v2.rst +++ b/docs/spec/v2.rst @@ -1,6 +1,6 @@ .. _spec_v2: -Zarr storage specification version 2 +Zarr Storage Specification Version 2 ==================================== This document provides a technical specification of the protocol and format diff --git a/docs/spec/v3.rst b/docs/spec/v3.rst index a448fbc3a1..bd8852707b 100644 --- a/docs/spec/v3.rst +++ b/docs/spec/v3.rst @@ -1,7 +1,7 @@ .. _spec_v3: -Zarr storage specification version 3 (under development) -======================================================== +Zarr Storage Specification Version 3 +======================================================= -The v3 specification has been migrated to its own website, +The V3 Specification has been migrated to its website → https://zarr-specs.readthedocs.io/. From 1ed37f5fa7039cbc8849bf9b0e9aae9c890e7ced Mon Sep 17 00:00:00 2001 From: Ziwen Liu <67518483+ziw-liu@users.noreply.github.com> Date: Thu, 26 Oct 2023 16:10:10 -0700 Subject: [PATCH 0364/1078] Preserve dimension separator metadata when resizing arrays (#1540) * preserve dimension separator when resizing arrays * test dimension separator metadata after resizing * document the change * Update release.rst --------- Co-authored-by: Davis Bennett --- docs/release.rst | 5 ++++- zarr/core.py | 25 ++++++++--------------- zarr/tests/test_core.py | 44 +++++++---------------------------------- 3 files changed, 19 insertions(+), 55 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 12e1449a80..a361bbc8d9 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,11 +21,14 @@ Unreleased Maintenance ~~~~~~~~~~~ +* Preserve ``dimension_separator`` when resizing arrays. + By :user:`Ziwen Liu ` :issue:`1533`. + * Initialise some sets in tests with set literals instead of list literals. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1534`. * Allow ``black`` code formatter to be run with any Python version. - By :user:`David Stansby ` :issue:`1549` + By :user:`David Stansby ` :issue:`1549`. .. _release_2.16.1: diff --git a/zarr/core.py b/zarr/core.py index 43ccdbaf7d..2177e9055c 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -253,7 +253,6 @@ def _load_metadata_nosync(self): except KeyError: raise ArrayNotFoundError(self._path) else: - # decode and store metadata as instance members meta = self._store._metadata_class.decode_array_metadata(meta_bytes) self._meta = meta @@ -341,7 +340,14 @@ def _flush_metadata_nosync(self): filters=filters_config, ) if getattr(self._store, "_store_version", 2) == 2: - meta.update(dict(chunks=self._chunks, dtype=self._dtype, order=self._order)) + meta.update( + dict( + chunks=self._chunks, + dtype=self._dtype, + order=self._order, + dimension_separator=self._dimension_separator, + ) + ) else: meta.update( dict( @@ -1358,7 +1364,6 @@ def get_mask_selection(self, selection, out=None, fields=None): return self._get_selection(indexer=indexer, out=out, fields=fields) def _get_selection(self, indexer, out=None, fields=None): - # We iterate over all chunks which overlap the selection and thus contain data # that needs to be extracted. Each chunk is processed in turn, extracting the # necessary data and storing into the correct location in the output array. @@ -1983,7 +1988,6 @@ def _set_basic_selection_nd(self, selection, value, fields=None): self._set_selection(indexer, value, fields=fields) def _set_selection(self, indexer, value, fields=None): - # We iterate over all chunks which overlap the selection and thus contain data # that needs to be replaced. Each chunk is processed in turn, extracting the # necessary data from the value array and storing into the chunk array. @@ -2018,7 +2022,6 @@ def _set_selection(self, indexer, value, fields=None): ): # iterative approach for chunk_coords, chunk_selection, out_selection in indexer: - # extract data to store if sel_shape == (): chunk_value = value @@ -2077,7 +2080,6 @@ def _process_chunk( and not self._filters and self._dtype != object ): - dest = out[out_selection] # Assume that array-like objects that doesn't have a # `writeable` flag is writable. @@ -2088,7 +2090,6 @@ def _process_chunk( ) if write_direct: - # optimization: we want the whole chunk, and the destination is # contiguous, so we can decompress directly from the chunk # into the destination array @@ -2321,7 +2322,6 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): # to access the existing chunk data if is_scalar(value, self._dtype): - # setup array filled with value chunk = np.empty_like( self._meta_array, shape=self._chunks, dtype=self._dtype, order=self._order @@ -2329,7 +2329,6 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): chunk.fill(value) else: - # ensure array is contiguous chunk = value.astype(self._dtype, order=self._order, copy=False) @@ -2337,12 +2336,10 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): # partially replace the contents of this chunk try: - # obtain compressed data for chunk cdata = self.chunk_store[ckey] except KeyError: - # chunk not initialized if self._fill_value is not None: chunk = np.empty_like( @@ -2359,7 +2356,6 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): ) else: - # decode chunk chunk = self._decode_chunk(cdata) if not chunk.flags.writeable: @@ -2429,7 +2425,6 @@ def _decode_chunk(self, cdata, start=None, nitems=None, expected_shape=None): return chunk def _encode_chunk(self, chunk): - # apply filters if self._filters: for f in self._filters: @@ -2619,7 +2614,6 @@ def __setstate__(self, state): self.__init__(**state) def _synchronized_op(self, f, *args, **kwargs): - if self._synchronizer is None: # no synchronization lock = nolock @@ -2636,7 +2630,6 @@ def _synchronized_op(self, f, *args, **kwargs): return result def _write_op(self, f, *args, **kwargs): - # guard condition if self._read_only: raise ReadOnlyError() @@ -2676,7 +2669,6 @@ def resize(self, *args): return self._write_op(self._resize_nosync, *args) def _resize_nosync(self, *args): - # normalize new shape argument old_shape = self._shape new_shape = normalize_resize_args(old_shape, *args) @@ -2755,7 +2747,6 @@ def append(self, data, axis=0): return self._write_op(self._append_nosync, data, axis=axis) def _append_nosync(self, data, axis=0): - # ensure data is array-like if not hasattr(data, "shape"): data = np.asanyarray(data, like=self._meta_array) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 77b9415d8b..f3ca73dea8 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -132,7 +132,6 @@ def create_array(self, shape: Union[int, Tuple[int, ...]], **kwargs): return Array(store, **access_array_kwargs) def test_array_init(self): - # normal initialization store = self.create_store() init_array(store, shape=100, chunks=10, dtype=" Tuple[Any, ...]: @@ -2583,7 +2558,6 @@ def expected(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithPathV3(TestArrayV3): def test_array_init(self): - store = self.create_store() # can initialize an array without a path init_array(store, shape=100, chunks=10, dtype=" Date: Mon, 30 Oct 2023 12:03:43 +0100 Subject: [PATCH 0365/1078] =?UTF-8?q?io.open=20=E2=86=92=20open=20(#1421)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In Python 3, io.open() is an alias for the builtin open() function: https://docs.python.org/3/library/io.html#io.open Co-authored-by: Davis Bennett Co-authored-by: Josh Moore --- docs/release.rst | 3 +++ zarr/convenience.py | 5 +++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index a361bbc8d9..2f9b93a361 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,6 +21,9 @@ Unreleased Maintenance ~~~~~~~~~~~ +* Change occurrence of ``io.open()`` into ``open()``. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1421`. + * Preserve ``dimension_separator`` when resizing arrays. By :user:`Ziwen Liu ` :issue:`1533`. diff --git a/zarr/convenience.py b/zarr/convenience.py index ff236d0df2..0ee8a8d323 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -1,5 +1,4 @@ """Convenience functions for storing and loading data.""" -import io import itertools import os import re @@ -29,6 +28,8 @@ StoreLike = Union[BaseStore, MutableMapping, str, None] +_builtin_open = open # builtin open is later shadowed by a local open function + def _check_and_update_path(store: BaseStore, path): if getattr(store, "_store_version", 2) > 2 and not path: @@ -491,7 +492,7 @@ def __init__(self, log): elif callable(log): self.log_func = log elif isinstance(log, str): - self.log_file = io.open(log, mode="w") + self.log_file = _builtin_open(log, mode="w") self.needs_closing = True elif hasattr(log, "write"): self.log_file = log From 2240d612464802e6f4e6bc9c6bf8b19ef71bbc13 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 12:05:13 +0100 Subject: [PATCH 0366/1078] Bump pymongo from 4.4.0 to 4.5.0 (#1507) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.4.0 to 4.5.0. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.4.0...4.5.0) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index baa2fbc1e9..3ff11d93e5 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -11,7 +11,7 @@ azure-storage-blob==12.16.0 # pyup: ignore redis==4.6.0 types-redis types-setuptools -pymongo==4.4.0 +pymongo==4.5.0 # optional test requirements coverage pytest-cov==4.1.0 From 71ce63a5d1bd27053eb59005a868581d550a6866 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 31 Oct 2023 09:32:31 +0100 Subject: [PATCH 0367/1078] Fix typo newly found by codespell (#1554) --- docs/tutorial.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index e3155acfae..f335db18d0 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1315,7 +1315,7 @@ better performance, at least when using the Blosc compression library. The optimal chunk shape will depend on how you want to access the data. E.g., for a 2-dimensional array, if you only ever take slices along the first -dimension, then chunk across the second dimenson. If you know you want to chunk +dimension, then chunk across the second dimension. If you know you want to chunk across an entire dimension you can use ``None`` or ``-1`` within the ``chunks`` argument, e.g.:: From 15390a1fb5f76193de048dd277ef674d21d80f41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Oct 2023 09:58:52 +0100 Subject: [PATCH 0368/1078] Bump actions/setup-python from 4.6.0 to 4.7.1 (#1541) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.6.0 to 4.7.1. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4.6.0...v4.7.1) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 238c5e457d..c08bfc6677 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v4.6.0 + - uses: actions/setup-python@v4.7.1 name: Install Python with: python-version: '3.8' From 254b585ef4348608dc8e3a91153ff82ca4245bb4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Oct 2023 10:00:20 +0100 Subject: [PATCH 0369/1078] Bump pytest-doctestplus from 0.13.0 to 1.0.0 (#1512) Bumps [pytest-doctestplus](https://github.com/scientific-python/pytest-doctestplus) from 0.13.0 to 1.0.0. - [Release notes](https://github.com/scientific-python/pytest-doctestplus/releases) - [Changelog](https://github.com/scientific-python/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/scientific-python/pytest-doctestplus/compare/v0.13.0...v1.0.0) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 3ff11d93e5..fda3fcc6e5 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -15,7 +15,7 @@ pymongo==4.5.0 # optional test requirements coverage pytest-cov==4.1.0 -pytest-doctestplus==0.13.0 +pytest-doctestplus==1.0.0 pytest-timeout==2.1.0 h5py==3.9.0 fsspec==2023.6.0 From d756626759a9c0eb8a72f2c073a3bdf883b47cef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Oct 2023 10:01:02 +0100 Subject: [PATCH 0370/1078] Bump fasteners from 0.18 to 0.19 (#1553) Bumps [fasteners](https://github.com/harlowja/fasteners) from 0.18 to 0.19. - [Release notes](https://github.com/harlowja/fasteners/releases) - [Changelog](https://github.com/harlowja/fasteners/blob/main/CHANGELOG.md) - [Commits](https://github.com/harlowja/fasteners/compare/0.18...0.19) --- updated-dependencies: - dependency-name: fasteners dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 4b1208598e..725e95a8af 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,6 +1,6 @@ # library requirements asciitree==0.3.3 -fasteners==0.18 +fasteners==0.19 numcodecs==0.11.0 msgpack-python==0.5.6 setuptools-scm==7.1.0 From cce501a92a56d5ce22bd6cf5d71a72b2ae9d18fe Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 31 Oct 2023 11:09:17 +0000 Subject: [PATCH 0371/1078] Add links to numcodecs docs in tutorial (#1535) * Fix numcodecs links * Add release note --- docs/conf.py | 1 + docs/release.rst | 8 ++++++++ docs/tutorial.rst | 17 +++++++++-------- 3 files changed, 18 insertions(+), 8 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 1ffaeddef4..e33d10b2f6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -331,6 +331,7 @@ def setup(app): intersphinx_mapping = { "python": ("https://docs.python.org/", None), "numpy": ("https://numpy.org/doc/stable/", None), + "numcodecs": ("https://numcodecs.readthedocs.io/en/stable/", None), } diff --git a/docs/release.rst b/docs/release.rst index 2f9b93a361..02552a8f93 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,12 @@ Release notes Unreleased ---------- +Docs +~~~~ + +* Add links to ``numcodecs`` docs in the tutorial. + By :user:`David Stansby ` :issue:`1535`. + Maintenance ~~~~~~~~~~~ @@ -33,6 +39,8 @@ Maintenance * Allow ``black`` code formatter to be run with any Python version. By :user:`David Stansby ` :issue:`1549`. + + .. _release_2.16.1: 2.16.1 diff --git a/docs/tutorial.rst b/docs/tutorial.rst index f335db18d0..e563c16040 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1175,8 +1175,9 @@ A fixed-length unicode dtype is also available, e.g.:: For variable-length strings, the ``object`` dtype can be used, but a codec must be provided to encode the data (see also :ref:`tutorial_objects` below). At the time of writing there are four codecs available that can encode variable length string -objects: :class:`numcodecs.VLenUTF8`, :class:`numcodecs.JSON`, :class:`numcodecs.MsgPack`. -and :class:`numcodecs.Pickle`. E.g. using ``VLenUTF8``:: +objects: :class:`numcodecs.vlen.VLenUTF8`, :class:`numcodecs.json.JSON`, +:class:`numcodecs.msgpacks.MsgPack`. and :class:`numcodecs.pickles.Pickle`. +E.g. using ``VLenUTF8``:: >>> import numcodecs >>> z = zarr.array(text_data, dtype=object, object_codec=numcodecs.VLenUTF8()) @@ -1201,8 +1202,8 @@ is a short-hand for ``dtype=object, object_codec=numcodecs.VLenUTF8()``, e.g.:: 'Helló, világ!', 'Zdravo svete!', 'เฮลโลเวิลด์'], dtype=object) Variable-length byte strings are also supported via ``dtype=object``. Again an -``object_codec`` is required, which can be one of :class:`numcodecs.VLenBytes` or -:class:`numcodecs.Pickle`. For convenience, ``dtype=bytes`` (or ``dtype=str`` on Python +``object_codec`` is required, which can be one of :class:`numcodecs.vlen.VLenBytes` or +:class:`numcodecs.pickles.Pickle`. For convenience, ``dtype=bytes`` (or ``dtype=str`` on Python 2.7) can be used as a short-hand for ``dtype=object, object_codec=numcodecs.VLenBytes()``, e.g.:: @@ -1218,7 +1219,7 @@ e.g.:: b'\xe0\xb9\x80\xe0\xb8\xae\xe0\xb8\xa5\xe0\xb9\x82\xe0\xb8\xa5\xe0\xb9\x80\xe0\xb8\xa7\xe0\xb8\xb4\xe0\xb8\xa5\xe0\xb8\x94\xe0\xb9\x8c'], dtype=object) If you know ahead of time all the possible string values that can occur, you could -also use the :class:`numcodecs.Categorize` codec to encode each unique string value as an +also use the :class:`numcodecs.categorize.Categorize` codec to encode each unique string value as an integer. E.g.:: >>> categorize = numcodecs.Categorize(greetings, dtype=object) @@ -1245,7 +1246,7 @@ The best codec to use will depend on what type of objects are present in the arr At the time of writing there are three codecs available that can serve as a general purpose object codec and support encoding of a mixture of object types: -:class:`numcodecs.JSON`, :class:`numcodecs.MsgPack`. and :class:`numcodecs.Pickle`. +:class:`numcodecs.json.JSON`, :class:`numcodecs.msgpacks.MsgPack`. and :class:`numcodecs.pickles.Pickle`. For example, using the JSON codec:: @@ -1258,7 +1259,7 @@ For example, using the JSON codec:: array([42, 'foo', list(['bar', 'baz', 'qux']), {'a': 1, 'b': 2.2}, None], dtype=object) Not all codecs support encoding of all object types. The -:class:`numcodecs.Pickle` codec is the most flexible, supporting encoding any type +:class:`numcodecs.pickles.Pickle` codec is the most flexible, supporting encoding any type of Python object. However, if you are sharing data with anyone other than yourself, then Pickle is not recommended as it is a potential security risk. This is because malicious code can be embedded within pickled data. The JSON and MsgPack codecs do not have any @@ -1270,7 +1271,7 @@ Ragged arrays If you need to store an array of arrays, where each member array can be of any length and stores the same primitive type (a.k.a. a ragged array), the -:class:`numcodecs.VLenArray` codec can be used, e.g.:: +:class:`numcodecs.vlen.VLenArray` codec can be used, e.g.:: >>> z = zarr.empty(4, dtype=object, object_codec=numcodecs.VLenArray(int)) >>> z From c9532b02c59921065a27c98e94070d0acc5cfca1 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 31 Oct 2023 11:56:22 +0000 Subject: [PATCH 0372/1078] Fail doc build on warnings (#1548) * Fail doc build on warnings * Add fail on warning to readthedocs * Disable navigation with keys in docs * Add release note --------- Co-authored-by: Josh Moore --- .readthedocs.yaml | 1 + docs/Makefile | 2 +- docs/conf.py | 1 + docs/index.rst | 1 + docs/release.rst | 5 +++++ 5 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 2124f77271..e8783abe0d 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,6 +7,7 @@ build: sphinx: configuration: docs/conf.py + fail_on_warning: true python: install: diff --git a/docs/Makefile b/docs/Makefile index f279d820c6..e6adc1ca8c 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -2,7 +2,7 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = -W --keep-going SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build diff --git a/docs/conf.py b/docs/conf.py index e33d10b2f6..e338348eac 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -144,6 +144,7 @@ }, ], "collapse_navigation": True, + "navigation_with_keys": False, } # Add any paths that contain custom themes here, relative to this directory. diff --git a/docs/index.rst b/docs/index.rst index 97f5889ca5..28a2d0e6aa 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -60,6 +60,7 @@ Zarr is a file storage format for chunked, compressed, N-dimensional arrays base +++ .. button-ref:: tutorial + :ref-type: ref :expand: :color: dark :click-parent: diff --git a/docs/release.rst b/docs/release.rst index 02552a8f93..90ac434475 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,6 +21,9 @@ Unreleased Docs ~~~~ +* The documentation build now fails if there are any warnings. + By :user:`David Stansby ` :issue:`1548`. + * Add links to ``numcodecs`` docs in the tutorial. By :user:`David Stansby ` :issue:`1535`. @@ -169,10 +172,12 @@ Major changes * Improve Zarr V3 support, adding partial store read/write and storage transformers. Add new features from the `v3 spec `_: + * storage transformers * `get_partial_values` and `set_partial_values` * efficient `get_partial_values` implementation for `FSStoreV3` * sharding storage transformer + By :user:`Jonathan Striebel `; :issue:`1096`, :issue:`1111`. * N5 nows supports Blosc. From e771c51a465c1461e8d810c93b51bcbf97a945a2 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Tue, 31 Oct 2023 15:28:13 +0000 Subject: [PATCH 0373/1078] DOC: Enable offline formats (#1560) * Enable offline builds * Add link for documentation downloads --- .readthedocs.yaml | 2 ++ docs/index.rst | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index e8783abe0d..08cac8d78d 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -15,3 +15,5 @@ python: path: . extra_requirements: - docs + +formats: all diff --git a/docs/index.rst b/docs/index.rst index 28a2d0e6aa..06f79b7e7c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,7 +19,7 @@ Zarr-Python **Version**: |version| -**Download documentation**: `Zipped HTML `_ +**Download documentation**: `PDF/Zipped HTML/EPUB `_ **Useful links**: `Installation `_ | From 889d2855f288d2d215cfff5289fe86a54019f5bf Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Fri, 3 Nov 2023 15:39:31 +0530 Subject: [PATCH 0374/1078] Remove dependency (#1563) * Remove dependency * Update pyproject.toml --------- Co-authored-by: Josh Moore --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 292bfddded..6869cbf834 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,7 +48,6 @@ docs = [ 'sphinx_design', 'sphinx-issues', 'sphinx-copybutton', - 'sphinx-rtd-theme', 'pydata-sphinx-theme', 'numpydoc', 'numcodecs[msgpack]', From 4ef322337cfb1a7db06024b007e7c2f37d24bc03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 16:25:10 +0100 Subject: [PATCH 0375/1078] Bump setuptools-scm from 7.1.0 to 8.0.4 (#1562) Bumps [setuptools-scm](https://github.com/pypa/setuptools_scm) from 7.1.0 to 8.0.4. - [Release notes](https://github.com/pypa/setuptools_scm/releases) - [Changelog](https://github.com/pypa/setuptools_scm/blob/main/CHANGELOG.md) - [Commits](https://github.com/pypa/setuptools_scm/compare/v7.1.0...v8.0.4) --- updated-dependencies: - dependency-name: setuptools-scm dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 725e95a8af..b189726517 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -3,6 +3,6 @@ asciitree==0.3.3 fasteners==0.19 numcodecs==0.11.0 msgpack-python==0.5.6 -setuptools-scm==7.1.0 +setuptools-scm==8.0.4 # test requirements pytest==7.4.0 From 8a7b41500c5ce739df3f8bb60ed139e4bbac04d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 16:25:20 +0100 Subject: [PATCH 0376/1078] Bump redis from 4.6.0 to 5.0.1 (#1561) Bumps [redis](https://github.com/redis/redis-py) from 4.6.0 to 5.0.1. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.6.0...v5.0.1) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index fda3fcc6e5..584d0d838d 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.1.0 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.16.0 # pyup: ignore -redis==4.6.0 +redis==5.0.1 types-redis types-setuptools pymongo==4.5.0 From 503efafe85b82d05a438d3ab49bb64708023a300 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Thu, 23 Nov 2023 15:03:15 +0530 Subject: [PATCH 0377/1078] Update release.rst (#1574) --- docs/release.rst | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 90ac434475..9873d62896 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,15 +21,31 @@ Unreleased Docs ~~~~ +* Minor correction and changes in documentation. + By :user:`Sanket Verma ` :issue:`1509`. + +* Fix typo in documentation. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1554` + * The documentation build now fails if there are any warnings. By :user:`David Stansby ` :issue:`1548`. * Add links to ``numcodecs`` docs in the tutorial. By :user:`David Stansby ` :issue:`1535`. +* Enable offline formats for documentation builds. + By :user:`Sanket Verma ` :issue:`1551`. + +* Minor tweak to advanced indexing tutorial examples. + By :user:`Ross Barnowski ` :issue:`1550`. + + Maintenance ~~~~~~~~~~~ +* Extend copyright notice to 2023. + By :user:`Jack Kelly ` :issue:`1528`. + * Change occurrence of ``io.open()`` into ``open()``. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1421`. @@ -42,6 +58,8 @@ Maintenance * Allow ``black`` code formatter to be run with any Python version. By :user:`David Stansby ` :issue:`1549`. +* Remove ``sphinx-rtd-theme`` dependency from ``pyproject.toml``. + By :user:`Sanket Verma ` :issue:`1563`. .. _release_2.16.1: From 74764af32d870ecca0e92644e2b4985e568779c5 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 24 Nov 2023 16:31:31 +0000 Subject: [PATCH 0378/1078] Automatically document Array members (#1547) * Automatically document Array members * Fix Array link --- .gitignore | 1 + docs/api/core.rst | 23 ++----------------- docs/conf.py | 4 ++++ pyproject.toml | 1 + zarr/core.py | 58 ++--------------------------------------------- 5 files changed, 10 insertions(+), 77 deletions(-) diff --git a/.gitignore b/.gitignore index a6a456636d..7de405d8a0 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,7 @@ coverage.xml # Sphinx documentation docs/_build/ +docs/_autoapi/ # PyBuilder target/ diff --git a/docs/api/core.rst b/docs/api/core.rst index c4075fdb30..b310460e51 100644 --- a/docs/api/core.rst +++ b/docs/api/core.rst @@ -1,24 +1,5 @@ The Array class (``zarr.core``) =============================== -.. module:: zarr.core -.. autoclass:: Array - - .. automethod:: __getitem__ - .. automethod:: __setitem__ - .. automethod:: get_basic_selection - .. automethod:: set_basic_selection - .. automethod:: get_mask_selection - .. automethod:: set_mask_selection - .. automethod:: get_block_selection - .. automethod:: set_block_selection - .. automethod:: get_coordinate_selection - .. automethod:: set_coordinate_selection - .. automethod:: get_orthogonal_selection - .. automethod:: set_orthogonal_selection - .. automethod:: digest - .. automethod:: hexdigest - .. automethod:: resize - .. automethod:: append - .. automethod:: view - .. automethod:: astype +.. automodapi:: zarr.core + :no-heading: diff --git a/docs/conf.py b/docs/conf.py index e338348eac..318843a9fb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -42,6 +42,7 @@ "sphinx.ext.autosummary", "sphinx.ext.viewcode", "sphinx.ext.intersphinx", + "sphinx_automodapi.automodapi", "numpydoc", "sphinx_issues", "sphinx_copybutton", @@ -52,6 +53,9 @@ numpydoc_class_members_toctree = False issues_github_path = "zarr-developers/zarr-python" +automodapi_inheritance_diagram = False +automodapi_toctreedirnm = "_autoapi" + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] diff --git a/pyproject.toml b/pyproject.toml index 6869cbf834..22ea19f28f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ jupyter = [ ] docs = [ 'sphinx', + 'sphinx-automodapi', 'sphinx_design', 'sphinx-issues', 'sphinx-copybutton', diff --git a/zarr/core.py b/zarr/core.py index 2177e9055c..c07a31e95f 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -60,6 +60,8 @@ ensure_ndarray_like, ) +__all__ = ["Array"] + # noinspection PyUnresolvedReferences class Array: @@ -110,62 +112,6 @@ class Array: to users. Use `numpy.empty(())` by default. .. versionadded:: 2.13 - - - Attributes - ---------- - store - path - name - read_only - chunk_store - shape - chunks - dtype - compression - compression_opts - dimension_separator - fill_value - order - synchronizer - filters - attrs - size - itemsize - nbytes - nbytes_stored - cdata_shape - nchunks - nchunks_initialized - is_view - info - vindex - oindex - blocks - write_empty_chunks - meta_array - - Methods - ------- - __getitem__ - __setitem__ - get_basic_selection - set_basic_selection - get_orthogonal_selection - set_orthogonal_selection - get_mask_selection - set_mask_selection - get_coordinate_selection - set_coordinate_selection - get_block_selection - set_block_selection - digest - hexdigest - resize - append - view - astype - """ def __init__( From b93860a75760dced732273747871fa5502c310e3 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Fri, 24 Nov 2023 09:42:25 -0800 Subject: [PATCH 0379/1078] DOC: Minor tweak to advanced indexing example in tutorial (#1550) * DOC: Update advanced indexing example. Suggestion to modify the advanced indexing example so that the indices and the values in the array differ. * DOC: Fix malformed doctest comment. * DOC: Rm reference to virtualenv from contributor guide. --------- Co-authored-by: Davis Bennett --- docs/contributing.rst | 6 +++--- docs/tutorial.rst | 26 +++++++++++++------------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 0420535093..91606b7276 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -85,9 +85,9 @@ Creating a development environment To work with the Zarr source code, it is recommended to set up a Python virtual environment and install all Zarr dependencies using the same versions as are used by the core developers and continuous integration services. Assuming you have a Python -3 interpreter already installed, and have also installed the virtualenv package, and -you have cloned the Zarr source code and your current working directory is the root of -the repository, you can do something like the following:: +3 interpreter already installed, and you have cloned the Zarr source code and your +current working directory is the root of the repository, you can do something like +the following:: $ mkdir -p ~/pyenv/zarr-dev $ python -m venv ~/pyenv/zarr-dev diff --git a/docs/tutorial.rst b/docs/tutorial.rst index e563c16040..4099bac1c8 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -480,17 +480,17 @@ Indexing with coordinate arrays Items from a Zarr array can be extracted by providing an integer array of coordinates. E.g.:: - >>> z = zarr.array(np.arange(10)) + >>> z = zarr.array(np.arange(10) ** 2) >>> z[:] - array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) - >>> z.get_coordinate_selection([1, 4]) - array([1, 4]) + array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81]) + >>> z.get_coordinate_selection([2, 5]) + array([ 4, 25]) Coordinate arrays can also be used to update data, e.g.:: - >>> z.set_coordinate_selection([1, 4], [-1, -2]) + >>> z.set_coordinate_selection([2, 5], [-1, -2]) >>> z[:] - array([ 0, -1, 2, 3, -2, 5, 6, 7, 8, 9]) + array([ 0, 1, -1, 9, 16, -2, 36, 49, 64, 81]) For multidimensional arrays, coordinates must be provided for each dimension, e.g.:: @@ -534,17 +534,17 @@ Indexing with a mask array Items can also be extracted by providing a Boolean mask. E.g.:: - >>> z = zarr.array(np.arange(10)) + >>> z = zarr.array(np.arange(10) ** 2) >>> z[:] - array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) + array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81]) >>> sel = np.zeros_like(z, dtype=bool) - >>> sel[1] = True - >>> sel[4] = True + >>> sel[2] = True + >>> sel[5] = True >>> z.get_mask_selection(sel) - array([1, 4]) + array([ 4, 25]) >>> z.set_mask_selection(sel, [-1, -2]) >>> z[:] - array([ 0, -1, 2, 3, -2, 5, 6, 7, 8, 9]) + array([ 0, 1, -1, 9, 16, -2, 36, 49, 64, 81]) Here's a multidimensional example:: @@ -986,7 +986,7 @@ It is also possible to initialize the filesystem outside of Zarr and then pass it through. This requires creating an :class:`zarr.storage.FSStore` object explicitly. For example:: - >>> import s3fs * doctest: +SKIP + >>> import s3fs # doctest: +SKIP >>> fs = s3fs.S3FileSystem(anon=True) # doctest: +SKIP >>> store = zarr.storage.FSStore('/zarr-demo/store', fs=fs) # doctest: +SKIP >>> g = zarr.open_group(store) # doctest: +SKIP From bdbecc7e67b0bd55b99662d213aa066a46195f1b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Nov 2023 21:11:52 +0100 Subject: [PATCH 0380/1078] Bump h5py from 3.9.0 to 3.10.0 (#1571) Bumps [h5py](https://github.com/h5py/h5py) from 3.9.0 to 3.10.0. - [Release notes](https://github.com/h5py/h5py/releases) - [Changelog](https://github.com/h5py/h5py/blob/master/docs/release_guide.rst) - [Commits](https://github.com/h5py/h5py/compare/3.9.0...3.10.0) --- updated-dependencies: - dependency-name: h5py dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 584d0d838d..ff45c25944 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -17,7 +17,7 @@ coverage pytest-cov==4.1.0 pytest-doctestplus==1.0.0 pytest-timeout==2.1.0 -h5py==3.9.0 +h5py==3.10.0 fsspec==2023.6.0 s3fs==2023.6.0 moto[server]>=4.0.8 From d641bbceb67bdf915e0afd2259d2fe6043a8c71b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Nov 2023 22:31:34 +0100 Subject: [PATCH 0381/1078] Bump fsspec from 2023.6.0 to 2023.10.0 (#1570) * Bump fsspec from 2023.6.0 to 2023.10.0 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2023.6.0 to 2023.10.0. - [Commits](https://github.com/fsspec/filesystem_spec/compare/2023.6.0...2023.10.0) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump s3fs as well --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index ff45c25944..bf2965a8fa 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ pytest-cov==4.1.0 pytest-doctestplus==1.0.0 pytest-timeout==2.1.0 h5py==3.10.0 -fsspec==2023.6.0 -s3fs==2023.6.0 +fsspec==2023.10.0 +s3fs==2023.10.0 moto[server]>=4.0.8 From 87b034c60d98514778cbe75cd94fc0ce7fe6990c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 11:16:15 +0100 Subject: [PATCH 0382/1078] Bump pytest-timeout from 2.1.0 to 2.2.0 (#1577) Bumps [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) from 2.1.0 to 2.2.0. - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/2.1.0...2.2.0) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index bf2965a8fa..f3ea80a546 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -16,7 +16,7 @@ pymongo==4.5.0 coverage pytest-cov==4.1.0 pytest-doctestplus==1.0.0 -pytest-timeout==2.1.0 +pytest-timeout==2.2.0 h5py==3.10.0 fsspec==2023.10.0 s3fs==2023.10.0 From 5696991f7552c21f53ab80bedd9aba33e0c6c78e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 11:16:34 +0100 Subject: [PATCH 0383/1078] Bump pytest from 7.4.0 to 7.4.3 (#1576) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.0 to 7.4.3. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.0...7.4.3) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index b189726517..e2be6eb825 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.11.0 msgpack-python==0.5.6 setuptools-scm==8.0.4 # test requirements -pytest==7.4.0 +pytest==7.4.3 From 039d3ba7ac72741b1493dbe8c5649f3cc7e1aab9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 11:18:19 +0100 Subject: [PATCH 0384/1078] Bump conda-incubator/setup-miniconda from 2.2.0 to 2.3.0 (#1575) Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 2.2.0 to 2.3.0. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v2.2.0...v2.3.0) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index 63bc97d157..2c0cd45ca9 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2.2.0 + uses: conda-incubator/setup-miniconda@v2.3.0 with: channels: conda-forge environment-file: environment.yml diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index cdf230bc7c..aa7158f1cf 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -42,7 +42,7 @@ jobs: with: fetch-depth: 0 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2.2.0 + uses: conda-incubator/setup-miniconda@v2.3.0 with: channels: conda-forge python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 3afa8c467e..78945e97aa 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v2.2.0 + - uses: conda-incubator/setup-miniconda@v2.3.0 with: auto-update-conda: true python-version: ${{ matrix.python-version }} From d40bf1270ceb439b5d4229973197cd2f5e8976ea Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Wed, 29 Nov 2023 15:01:53 +0530 Subject: [PATCH 0385/1078] Update CoC to the latest version (#1572) * Update CoC * Update email address * Remove CODE_OF_CONDUCT.md --- CODE_OF_CONDUCT.md | 46 ---------------------------------------------- 1 file changed, 46 deletions(-) delete mode 100644 CODE_OF_CONDUCT.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index f07035c69f..0000000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,46 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at zarr.conduct@gmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [https://www.contributor-covenant.org/version/1/4][version] - -[homepage]: https://www.contributor-covenant.org -[version]: https://www.contributor-covenant.org/version/1/4 From ea90564e5f5276a37df06f7d3dadf90faa210d7f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 09:13:27 +0100 Subject: [PATCH 0386/1078] Bump pypa/gh-action-pypi-publish from 1.8.10 to 1.8.11 (#1586) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.10 to 1.8.11. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.10...v1.8.11) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index c08bfc6677..3bd25bfbf7 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.10 + - uses: pypa/gh-action-pypi-publish@v1.8.11 with: user: __token__ password: ${{ secrets.pypi_password }} From 79e80b36b14c50c6d522f0fe0caaee0bbfbce1a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 09:13:52 +0100 Subject: [PATCH 0387/1078] Bump conda-incubator/setup-miniconda from 2.3.0 to 3.0.1 (#1587) Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 2.3.0 to 3.0.1. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v2.3.0...v3.0.1) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index 2c0cd45ca9..2cc0213781 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2.3.0 + uses: conda-incubator/setup-miniconda@v3.0.1 with: channels: conda-forge environment-file: environment.yml diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index aa7158f1cf..0c3c49d78d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -42,7 +42,7 @@ jobs: with: fetch-depth: 0 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2.3.0 + uses: conda-incubator/setup-miniconda@v3.0.1 with: channels: conda-forge python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 78945e97aa..eeee5b704d 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v2.3.0 + - uses: conda-incubator/setup-miniconda@v3.0.1 with: auto-update-conda: true python-version: ${{ matrix.python-version }} From b4c2a1981c8ba501bac87b02466f2130d05895b2 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 5 Dec 2023 10:22:13 +0100 Subject: [PATCH 0388/1078] Bootstrap v3 branch with zarrita (#1584) * Pull Zarrita into Zarr-Python @ 78274781ad64aef95772eb4b083f7ea9b7d03d06 No code changes to Zarrita were made. * apply zarr lint rules * zarrita -> v3 * v3/abc [wip] * use abcs plus implementation notes --- zarr/v3/__init__.py | 40 +++ zarr/v3/abc/__init__.py | 0 zarr/v3/abc/array.py | 140 ++++++++++ zarr/v3/abc/codec.py | 84 ++++++ zarr/v3/abc/group.py | 86 +++++++ zarr/v3/abc/store.py | 115 +++++++++ zarr/v3/array.py | 550 +++++++++++++++++++++++++++++++++++++++ zarr/v3/array_v2.py | 552 ++++++++++++++++++++++++++++++++++++++++ zarr/v3/codecs.py | 514 +++++++++++++++++++++++++++++++++++++ zarr/v3/common.py | 158 ++++++++++++ zarr/v3/group.py | 179 +++++++++++++ zarr/v3/group_v2.py | 218 ++++++++++++++++ zarr/v3/indexing.py | 208 +++++++++++++++ zarr/v3/metadata.py | 339 ++++++++++++++++++++++++ zarr/v3/sharding.py | 516 +++++++++++++++++++++++++++++++++++++ zarr/v3/store.py | 304 ++++++++++++++++++++++ zarr/v3/sync.py | 87 +++++++ 17 files changed, 4090 insertions(+) create mode 100644 zarr/v3/__init__.py create mode 100644 zarr/v3/abc/__init__.py create mode 100644 zarr/v3/abc/array.py create mode 100644 zarr/v3/abc/codec.py create mode 100644 zarr/v3/abc/group.py create mode 100644 zarr/v3/abc/store.py create mode 100644 zarr/v3/array.py create mode 100644 zarr/v3/array_v2.py create mode 100644 zarr/v3/codecs.py create mode 100644 zarr/v3/common.py create mode 100644 zarr/v3/group.py create mode 100644 zarr/v3/group_v2.py create mode 100644 zarr/v3/indexing.py create mode 100644 zarr/v3/metadata.py create mode 100644 zarr/v3/sharding.py create mode 100644 zarr/v3/store.py create mode 100644 zarr/v3/sync.py diff --git a/zarr/v3/__init__.py b/zarr/v3/__init__.py new file mode 100644 index 0000000000..bbf5aa0359 --- /dev/null +++ b/zarr/v3/__init__.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from typing import Union + +import zarr.v3.codecs # noqa: F401 +from zarr.v3.array import Array # noqa: F401 +from zarr.v3.array_v2 import ArrayV2 # noqa: F401 +from zarr.v3.group import Group # noqa: F401 +from zarr.v3.group_v2 import GroupV2 # noqa: F401 +from zarr.v3.metadata import RuntimeConfiguration, runtime_configuration # noqa: F401 +from zarr.v3.store import ( # noqa: F401 + LocalStore, + RemoteStore, + Store, + StoreLike, + StorePath, + make_store_path, +) +from zarr.v3.sync import sync as _sync + + +async def open_auto_async( + store: StoreLike, + runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), +) -> Union[Array, ArrayV2, Group, GroupV2]: + store_path = make_store_path(store) + try: + return await Group.open_or_array(store_path, runtime_configuration=runtime_configuration_) + except KeyError: + return await GroupV2.open_or_array(store_path, runtime_configuration_) + + +def open_auto( + store: StoreLike, + runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), +) -> Union[Array, ArrayV2, Group, GroupV2]: + return _sync( + open_auto_async(store, runtime_configuration_), + runtime_configuration_.asyncio_loop, + ) diff --git a/zarr/v3/abc/__init__.py b/zarr/v3/abc/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/zarr/v3/abc/array.py b/zarr/v3/abc/array.py new file mode 100644 index 0000000000..976aa48618 --- /dev/null +++ b/zarr/v3/abc/array.py @@ -0,0 +1,140 @@ +from __future__ import annotations +from abc import abstractproperty, abstractmethod, ABC +from typing import Tuple, Any, Dict + +import numpy as np + +from zarr.v3.abc.store import ReadStore, WriteStore +from zarr.v3.common import Selection + + +class BaseArray(ABC): + @abstractproperty + def store_path(self) -> str: # TODO: rename to `path`? + """Path to this array in the underlying store.""" + ... + + @abstractproperty + def dtype(self) -> np.dtype: + """Data type of the array elements. + + Returns + ------- + dtype + array data type + """ + ... + + @abstractproperty + def ndim(self) -> int: + """Number of array dimensions (axes). + + Returns + ------- + int + number of array dimensions (axes) + """ + ... + + @abstractproperty + def shape(self) -> Tuple[int, ...]: + """Array dimensions. + + Returns + ------- + tuple of int + array dimensions + """ + ... + + @abstractproperty + def size(self) -> int: + """Number of elements in the array. + + Returns + ------- + int + number of elements in an array. + """ + + @abstractproperty + def attrs(self) -> Dict[str, Any]: + """Array attributes. + + Returns + ------- + dict + user defined attributes + """ + ... + + @abstractproperty + def info(self) -> Any: + """Report some diagnostic information about the array. + + Returns + ------- + out + """ + ... + + +class AsynchronousArray(BaseArray): + """This class can be implemented as a v2 or v3 array""" + + @classmethod + @abstractmethod + async def from_json(cls, zarr_json: Any, store: ReadStore) -> AsynchronousArray: + ... + + @classmethod + @abstractmethod + async def open(cls, store: ReadStore) -> AsynchronousArray: + ... + + @classmethod + @abstractmethod + async def create(cls, store: WriteStore, *, shape, **kwargs) -> AsynchronousArray: + ... + + @abstractmethod + async def getitem(self, selection: Selection): + ... + + @abstractmethod + async def setitem(self, selection: Selection, value: np.ndarray) -> None: + ... + + +class SynchronousArray(BaseArray): + """ + This class can be implemented as a v2 or v3 array + """ + + @classmethod + @abstractmethod + def from_json(cls, zarr_json: Any, store: ReadStore) -> SynchronousArray: + ... + + @classmethod + @abstractmethod + def open(cls, store: ReadStore) -> SynchronousArray: + ... + + @classmethod + @abstractmethod + def create(cls, store: WriteStore, *, shape, **kwargs) -> SynchronousArray: + ... + + @abstractmethod + def __getitem__(self, selection: Selection): # TODO: type as np.ndarray | scalar + ... + + @abstractmethod + def __setitem__(self, selection: Selection, value: np.ndarray) -> None: + ... + + # some day ;) + # @property + # def __array_api_version__(self) -> str: + # return "2022.12" diff --git a/zarr/v3/abc/codec.py b/zarr/v3/abc/codec.py new file mode 100644 index 0000000000..f84fc74af9 --- /dev/null +++ b/zarr/v3/abc/codec.py @@ -0,0 +1,84 @@ +# Notes: +# 1. These are missing methods described in the spec. I expected to see these method definitions: +# def compute_encoded_representation_type(self, decoded_representation_type): +# def encode(self, decoded_value): +# def decode(self, encoded_value, decoded_representation_type): +# def partial_decode(self, input_handle, decoded_representation_type, decoded_regions): +# def compute_encoded_size(self, input_size): +# 2. Understand why array metadata is included on all codecs + + +from __future__ import annotations + +from abc import abstractmethod, ABC +from typing import TYPE_CHECKING, Optional + +import numpy as np + +from zarr.v3.common import BytesLike + + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + + +class Codec(ABC): + supports_partial_decode: bool + supports_partial_encode: bool + is_fixed_size: bool + array_metadata: CoreArrayMetadata + + @abstractmethod + def compute_encoded_size(self, input_byte_length: int) -> int: + pass + + def resolve_metadata(self) -> CoreArrayMetadata: + return self.array_metadata + + +class ArrayArrayCodec(Codec): + @abstractmethod + async def decode( + self, + chunk_array: np.ndarray, + ) -> np.ndarray: + pass + + @abstractmethod + async def encode( + self, + chunk_array: np.ndarray, + ) -> Optional[np.ndarray]: + pass + + +class ArrayBytesCodec(Codec): + @abstractmethod + async def decode( + self, + chunk_array: BytesLike, + ) -> np.ndarray: + pass + + @abstractmethod + async def encode( + self, + chunk_array: np.ndarray, + ) -> Optional[BytesLike]: + pass + + +class BytesBytesCodec(Codec): + @abstractmethod + async def decode( + self, + chunk_array: BytesLike, + ) -> BytesLike: + pass + + @abstractmethod + async def encode( + self, + chunk_array: BytesLike, + ) -> Optional[BytesLike]: + pass diff --git a/zarr/v3/abc/group.py b/zarr/v3/abc/group.py new file mode 100644 index 0000000000..02de819894 --- /dev/null +++ b/zarr/v3/abc/group.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from abc import abstractproperty, ABC +from collections.abc import MutableMapping +from typing import Dict, Any + + +class BaseGroup(ABC): + @abstractproperty + def attrs(self) -> Dict[str, Any]: + """User-defined attributes.""" + ... + + @abstractproperty + def info(self) -> Any: # TODO: type this later + """Return diagnostic information about the group.""" + ... + + +class AsynchronousGroup(BaseGroup): + pass + # TODO: (considering the following api) + # store_path (rename to path?) + # nchildren - number of child groups + arrays + # children (async iterator) + # contains - check if child exists + # getitem - get child + # group_keys (async iterator) + # groups (async iterator) + # array_keys (async iterator) + # arrays (async iterator) + # visit + # visitkeys + # visitvalues + # tree + # create_group + # require_group + # create_groups + # require_groups + # create_dataset + # require_dataset + # create + # empty + # zeros + # ones + # full + # array + # empty_like + # zeros_like + # ones_like + # full_like + # move + + +class SynchronousGroup(BaseGroup, MutableMapping): + # TODO - think about if we want to keep the MutableMapping abstraction or + pass + # store_path (rename to path?) + # __enter__ + # __exit__ + # group_keys + # groups + # array_keys + # arrays + # visit + # visitkeys + # visitvalues + # visititems + # tree + # create_group + # require_group + # create_groups + # require_groups + # create_dataset + # require_dataset + # create + # empty + # zeros + # ones + # full + # array + # empty_like + # zeros_like + # ones_like + # full_like + # move diff --git a/zarr/v3/abc/store.py b/zarr/v3/abc/store.py new file mode 100644 index 0000000000..5469cafe6d --- /dev/null +++ b/zarr/v3/abc/store.py @@ -0,0 +1,115 @@ +from abc import abstractmethod, ABC + +from typing import List, Tuple + + +class Store(ABC): + pass + + +class ReadStore(Store): + @abstractmethod + async def get(self, key: str) -> bytes: + """Retrieve the value associated with a given key. + + Parameters + ---------- + key : str + + Returns + ------- + bytes + """ + ... + + @abstractmethod + async def get_partial_values(self, key_ranges: List[Tuple[str, int]]) -> bytes: + """Retrieve possibly partial values from given key_ranges. + + Parameters + ---------- + key_ranges : list[tuple[str, int]] + Ordered set of key, range pairs, a key may occur multiple times with different ranges + + Returns + ------- + list[bytes] + list of values, in the order of the key_ranges, may contain null/none for missing keys + """ + ... + + +class WriteStore(ReadStore): + @abstractmethod + async def set(self, key: str, value: bytes) -> None: + """Store a (key, value) pair. + + Parameters + ---------- + key : str + value : bytes + """ + ... + + @abstractmethod + async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: + """Store values at a given key, starting at byte range_start. + + Parameters + ---------- + key_start_values : list[tuple[str, int, bytes]] + set of key, range_start, values triples, a key may occur multiple times with different + range_starts, range_starts (considering the length of the respective values) must not + specify overlapping ranges for the same key + """ + ... + + +class ListMixin: + @abstractmethod + async def list(self) -> List[str]: + """Retrieve all keys in the store. + + Returns + ------- + list[str] + """ + ... + + @abstractmethod + async def list_prefix(self, prefix: str) -> List[str]: + """Retrieve all keys in the store. + + Parameters + ---------- + prefix : str + + Returns + ------- + list[str] + """ + ... + + @abstractmethod + async def list_dir(self, prefix: str) -> List[str]: + """ + Retrieve all keys and prefixes with a given prefix and which do not contain the character + “/” after the given prefix. + + Parameters + ---------- + prefix : str + + Returns + ------- + list[str] + """ + ... + + +class ReadListStore(ReadStore, ListMixin): + pass + + +class WriteListStore(WriteStore, ListMixin): + pass diff --git a/zarr/v3/array.py b/zarr/v3/array.py new file mode 100644 index 0000000000..3c0d7eba5c --- /dev/null +++ b/zarr/v3/array.py @@ -0,0 +1,550 @@ +# Notes on what I've changed here: +# 1. Split Array into AsyncArray and Array +# 2. Inherit from abc (SynchronousArray, AsynchronousArray) +# 3. Added .size and .attrs methods +# 4. Temporarily disabled the creation of ArrayV2 +# 5. Added from_json to AsyncArray + +# Questions to consider: +# 1. Was splitting the array into two classes really necessary? +# 2. Do we really need runtime_configuration? Specifically, the asyncio_loop seems problematic + +from __future__ import annotations + +import json +from typing import Any, Dict, Iterable, Literal, Optional, Tuple, Union + +import numpy as np +from attr import evolve, frozen + +from zarr.v3.abc.array import SynchronousArray, AsynchronousArray + +# from zarr.v3.array_v2 import ArrayV2 +from zarr.v3.codecs import CodecMetadata, CodecPipeline, bytes_codec +from zarr.v3.common import ( + ZARR_JSON, + ChunkCoords, + Selection, + SliceSelection, + concurrent_map, +) +from zarr.v3.indexing import BasicIndexer, all_chunk_coords, is_total_slice +from zarr.v3.metadata import ( + ArrayMetadata, + DataType, + DefaultChunkKeyEncodingConfigurationMetadata, + DefaultChunkKeyEncodingMetadata, + RegularChunkGridConfigurationMetadata, + RegularChunkGridMetadata, + RuntimeConfiguration, + V2ChunkKeyEncodingConfigurationMetadata, + V2ChunkKeyEncodingMetadata, + dtype_to_data_type, +) +from zarr.v3.sharding import ShardingCodec +from zarr.v3.store import StoreLike, StorePath, make_store_path +from zarr.v3.sync import sync + + +@frozen +class AsyncArray(AsynchronousArray): + metadata: ArrayMetadata + store_path: StorePath + runtime_configuration: RuntimeConfiguration + codec_pipeline: CodecPipeline + + @classmethod + async def create( + cls, + store: StoreLike, + *, + shape: ChunkCoords, + dtype: Union[str, np.dtype], + chunk_shape: ChunkCoords, + fill_value: Optional[Any] = None, + chunk_key_encoding: Union[ + Tuple[Literal["default"], Literal[".", "/"]], + Tuple[Literal["v2"], Literal[".", "/"]], + ] = ("default", "/"), + codecs: Optional[Iterable[CodecMetadata]] = None, + dimension_names: Optional[Iterable[str]] = None, + attributes: Optional[Dict[str, Any]] = None, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + exists_ok: bool = False, + ) -> AsyncArray: + store_path = make_store_path(store) + if not exists_ok: + assert not await (store_path / ZARR_JSON).exists_async() + + data_type = ( + DataType[dtype] if isinstance(dtype, str) else DataType[dtype_to_data_type[dtype.str]] + ) + + codecs = list(codecs) if codecs is not None else [bytes_codec()] + + if fill_value is None: + if data_type == DataType.bool: + fill_value = False + else: + fill_value = 0 + + metadata = ArrayMetadata( + shape=shape, + data_type=data_type, + chunk_grid=RegularChunkGridMetadata( + configuration=RegularChunkGridConfigurationMetadata(chunk_shape=chunk_shape) + ), + chunk_key_encoding=( + V2ChunkKeyEncodingMetadata( + configuration=V2ChunkKeyEncodingConfigurationMetadata( + separator=chunk_key_encoding[1] + ) + ) + if chunk_key_encoding[0] == "v2" + else DefaultChunkKeyEncodingMetadata( + configuration=DefaultChunkKeyEncodingConfigurationMetadata( + separator=chunk_key_encoding[1] + ) + ) + ), + fill_value=fill_value, + codecs=codecs, + dimension_names=tuple(dimension_names) if dimension_names else None, + attributes=attributes or {}, + ) + runtime_configuration = runtime_configuration or RuntimeConfiguration() + + array = cls( + metadata=metadata, + store_path=store_path, + runtime_configuration=runtime_configuration, + codec_pipeline=CodecPipeline.from_metadata( + metadata.codecs, metadata.get_core_metadata(runtime_configuration) + ), + ) + + await array._save_metadata() + return array + + @classmethod + def from_json( + cls, + store_path: StorePath, + zarr_json: Any, + runtime_configuration: RuntimeConfiguration, + ) -> AsyncArray: + metadata = ArrayMetadata.from_json(zarr_json) + async_array = cls( + metadata=metadata, + store_path=store_path, + runtime_configuration=runtime_configuration, + codec_pipeline=CodecPipeline.from_metadata( + metadata.codecs, metadata.get_core_metadata(runtime_configuration) + ), + ) + async_array._validate_metadata() + return async_array + + @classmethod + async def open( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> AsyncArray: + store_path = make_store_path(store) + zarr_json_bytes = await (store_path / ZARR_JSON).get_async() + assert zarr_json_bytes is not None + return cls.from_json( + store_path, + json.loads(zarr_json_bytes), + runtime_configuration=runtime_configuration, + ) + + @classmethod + async def open_auto( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> AsyncArray: # TODO: Union[AsyncArray, ArrayV2] + store_path = make_store_path(store) + v3_metadata_bytes = await (store_path / ZARR_JSON).get_async() + if v3_metadata_bytes is not None: + return cls.from_json( + store_path, + json.loads(v3_metadata_bytes), + runtime_configuration=runtime_configuration or RuntimeConfiguration(), + ) + else: + raise ValueError("no v2 support yet") + # return await ArrayV2.open_async(store_path) + + @property + def ndim(self) -> int: + return len(self.metadata.shape) + + @property + def shape(self) -> ChunkCoords: + return self.metadata.shape + + @property + def size(self) -> int: + return np.prod(self.metadata.shape) + + @property + def dtype(self) -> np.dtype: + return self.metadata.dtype + + @property + def attrs(self) -> dict: + return self.metadata.attributes + + async def getitem(self, selection: Selection): + indexer = BasicIndexer( + selection, + shape=self.metadata.shape, + chunk_shape=self.metadata.chunk_grid.configuration.chunk_shape, + ) + + # setup output array + out = np.zeros( + indexer.shape, + dtype=self.metadata.dtype, + order=self.runtime_configuration.order, + ) + + # reading chunks and decoding them + await concurrent_map( + [ + (chunk_coords, chunk_selection, out_selection, out) + for chunk_coords, chunk_selection, out_selection in indexer + ], + self._read_chunk, + self.runtime_configuration.concurrency, + ) + + if out.shape: + return out + else: + return out[()] + + async def _save_metadata(self) -> None: + self._validate_metadata() + + await (self.store_path / ZARR_JSON).set_async(self.metadata.to_bytes()) + + def _validate_metadata(self) -> None: + assert len(self.metadata.shape) == len( + self.metadata.chunk_grid.configuration.chunk_shape + ), "`chunk_shape` and `shape` need to have the same number of dimensions." + assert self.metadata.dimension_names is None or len(self.metadata.shape) == len( + self.metadata.dimension_names + ), "`dimension_names` and `shape` need to have the same number of dimensions." + assert self.metadata.fill_value is not None, "`fill_value` is required." + + async def _read_chunk( + self, + chunk_coords: ChunkCoords, + chunk_selection: SliceSelection, + out_selection: SliceSelection, + out: np.ndarray, + ): + chunk_key_encoding = self.metadata.chunk_key_encoding + chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) + store_path = self.store_path / chunk_key + + if len(self.codec_pipeline.codecs) == 1 and isinstance( + self.codec_pipeline.codecs[0], ShardingCodec + ): + chunk_array = await self.codec_pipeline.codecs[0].decode_partial( + store_path, chunk_selection + ) + if chunk_array is not None: + out[out_selection] = chunk_array + else: + out[out_selection] = self.metadata.fill_value + else: + chunk_bytes = await store_path.get_async() + if chunk_bytes is not None: + chunk_array = await self.codec_pipeline.decode(chunk_bytes) + tmp = chunk_array[chunk_selection] + out[out_selection] = tmp + else: + out[out_selection] = self.metadata.fill_value + + async def setitem(self, selection: Selection, value: np.ndarray) -> None: + chunk_shape = self.metadata.chunk_grid.configuration.chunk_shape + indexer = BasicIndexer( + selection, + shape=self.metadata.shape, + chunk_shape=chunk_shape, + ) + + sel_shape = indexer.shape + + # check value shape + if np.isscalar(value): + # setting a scalar value + pass + else: + if not hasattr(value, "shape"): + value = np.asarray(value, self.metadata.dtype) + assert value.shape == sel_shape + if value.dtype.name != self.metadata.dtype.name: + value = value.astype(self.metadata.dtype, order="A") + + # merging with existing data and encoding chunks + await concurrent_map( + [ + ( + value, + chunk_shape, + chunk_coords, + chunk_selection, + out_selection, + ) + for chunk_coords, chunk_selection, out_selection in indexer + ], + self._write_chunk, + self.runtime_configuration.concurrency, + ) + + async def _write_chunk( + self, + value: np.ndarray, + chunk_shape: ChunkCoords, + chunk_coords: ChunkCoords, + chunk_selection: SliceSelection, + out_selection: SliceSelection, + ): + chunk_key_encoding = self.metadata.chunk_key_encoding + chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) + store_path = self.store_path / chunk_key + + if is_total_slice(chunk_selection, chunk_shape): + # write entire chunks + if np.isscalar(value): + chunk_array = np.empty( + chunk_shape, + dtype=self.metadata.dtype, + ) + chunk_array.fill(value) + else: + chunk_array = value[out_selection] + await self._write_chunk_to_store(store_path, chunk_array) + + elif len(self.codec_pipeline.codecs) == 1 and isinstance( + self.codec_pipeline.codecs[0], ShardingCodec + ): + sharding_codec = self.codec_pipeline.codecs[0] + # print("encode_partial", chunk_coords, chunk_selection, repr(self)) + await sharding_codec.encode_partial( + store_path, + value[out_selection], + chunk_selection, + ) + else: + # writing partial chunks + # read chunk first + chunk_bytes = await store_path.get_async() + + # merge new value + if chunk_bytes is None: + chunk_array = np.empty( + chunk_shape, + dtype=self.metadata.dtype, + ) + chunk_array.fill(self.metadata.fill_value) + else: + chunk_array = ( + await self.codec_pipeline.decode(chunk_bytes) + ).copy() # make a writable copy + chunk_array[chunk_selection] = value[out_selection] + + await self._write_chunk_to_store(store_path, chunk_array) + + async def _write_chunk_to_store(self, store_path: StorePath, chunk_array: np.ndarray): + if np.all(chunk_array == self.metadata.fill_value): + # chunks that only contain fill_value will be removed + await store_path.delete_async() + else: + chunk_bytes = await self.codec_pipeline.encode(chunk_array) + if chunk_bytes is None: + await store_path.delete_async() + else: + await store_path.set_async(chunk_bytes) + + async def resize(self, new_shape: ChunkCoords) -> Array: + assert len(new_shape) == len(self.metadata.shape) + new_metadata = evolve(self.metadata, shape=new_shape) + + # Remove all chunks outside of the new shape + chunk_shape = self.metadata.chunk_grid.configuration.chunk_shape + chunk_key_encoding = self.metadata.chunk_key_encoding + old_chunk_coords = set(all_chunk_coords(self.metadata.shape, chunk_shape)) + new_chunk_coords = set(all_chunk_coords(new_shape, chunk_shape)) + + async def _delete_key(key: str) -> None: + await (self.store_path / key).delete_async() + + await concurrent_map( + [ + (chunk_key_encoding.encode_chunk_key(chunk_coords),) + for chunk_coords in old_chunk_coords.difference(new_chunk_coords) + ], + _delete_key, + self.runtime_configuration.concurrency, + ) + + # Write new metadata + await (self.store_path / ZARR_JSON).set_async(new_metadata.to_bytes()) + return evolve(self, metadata=new_metadata) + + async def update_attributes(self, new_attributes: Dict[str, Any]) -> Array: + new_metadata = evolve(self.metadata, attributes=new_attributes) + + # Write new metadata + await (self.store_path / ZARR_JSON).set_async(new_metadata.to_bytes()) + return evolve(self, metadata=new_metadata) + + def __repr__(self): + return f"" + + async def info(self): + return NotImplemented + + +@frozen +class Array(SynchronousArray): + _async_array: AsyncArray + + @classmethod + def create( + cls, + store: StoreLike, + *, + shape: ChunkCoords, + dtype: Union[str, np.dtype], + chunk_shape: ChunkCoords, + fill_value: Optional[Any] = None, + chunk_key_encoding: Union[ + Tuple[Literal["default"], Literal[".", "/"]], + Tuple[Literal["v2"], Literal[".", "/"]], + ] = ("default", "/"), + codecs: Optional[Iterable[CodecMetadata]] = None, + dimension_names: Optional[Iterable[str]] = None, + attributes: Optional[Dict[str, Any]] = None, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + exists_ok: bool = False, + ) -> Array: + async_array = sync( + AsyncArray.create( + store=store, + shape=shape, + dtype=dtype, + chunk_shape=chunk_shape, + fill_value=fill_value, + chunk_key_encoding=chunk_key_encoding, + codecs=codecs, + dimension_names=dimension_names, + attributes=attributes, + runtime_configuration=runtime_configuration, + exists_ok=exists_ok, + ), + runtime_configuration.asyncio_loop, + ) + return cls(async_array) + + @classmethod + def from_json( + cls, + store_path: StorePath, + zarr_json: Any, + runtime_configuration: RuntimeConfiguration, + ) -> Array: + async_array = AsyncArray.from_json( + store_path=store_path, zarr_json=zarr_json, runtime_configuration=runtime_configuration + ) + return cls(async_array) + + @classmethod + def open( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Array: + + async_array = sync( + AsyncArray.open(store, runtime_configuration=runtime_configuration), + runtime_configuration.asyncio_loop, + ) + async_array._validate_metadata() + return cls(async_array) + + @classmethod + def open_auto( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Array: # TODO: Union[Array, ArrayV2]: + async_array = sync( + AsyncArray.open_auto(store, runtime_configuration), + runtime_configuration.asyncio_loop, + ) + return cls(async_array) + + @property + def ndim(self) -> int: + return self._async_array.ndim + + @property + def shape(self) -> ChunkCoords: + return self._async_array.shape + + @property + def size(self) -> int: + return self._async_array.size + + @property + def dtype(self) -> np.dtype: + return self._async_array.dtype + + @property + def attrs(self) -> dict: + return self._async_array.attrs + + @property + def store_path(self) -> str: + return self._async_array.store_path + + def __getitem__(self, selection: Selection): + return sync( + self._async_array.getitem(selection), + self._async_array.runtime_configuration.asyncio_loop, + ) + + def __setitem__(self, selection: Selection, value: np.ndarray) -> None: + sync( + self._async_array.setitem(selection, value), + self._async_array.runtime_configuration.asyncio_loop, + ) + + def resize(self, new_shape: ChunkCoords) -> Array: + return sync( + self._async_array.resize(new_shape), + self._async_array.runtime_configuration.asyncio_loop, + ) + + def update_attributes(self, new_attributes: Dict[str, Any]) -> Array: + return sync( + self._async_array.update_attributes(new_attributes), + self._async_array.runtime_configuration.asyncio_loop, + ) + + def __repr__(self): + return f"" + + def info(self): + return sync( + self._async_array.info(), + self._async_array.runtime_configuration.asyncio_loop, + ) diff --git a/zarr/v3/array_v2.py b/zarr/v3/array_v2.py new file mode 100644 index 0000000000..a2f26f01b0 --- /dev/null +++ b/zarr/v3/array_v2.py @@ -0,0 +1,552 @@ +from __future__ import annotations + +import asyncio +import json +from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union + +import numcodecs +import numpy as np +from attr import evolve, frozen +from numcodecs.compat import ensure_bytes, ensure_ndarray + +from zarr.v3.common import ( + ZARRAY_JSON, + ZATTRS_JSON, + BytesLike, + ChunkCoords, + Selection, + SliceSelection, + concurrent_map, + to_thread, +) +from zarr.v3.indexing import BasicIndexer, all_chunk_coords, is_total_slice +from zarr.v3.metadata import ArrayV2Metadata, RuntimeConfiguration +from zarr.v3.store import StoreLike, StorePath, make_store_path +from zarr.v3.sync import sync + +if TYPE_CHECKING: + from zarr.v3.array import Array + + +@frozen +class _AsyncArrayProxy: + array: ArrayV2 + + def __getitem__(self, selection: Selection) -> _AsyncArraySelectionProxy: + return _AsyncArraySelectionProxy(self.array, selection) + + +@frozen +class _AsyncArraySelectionProxy: + array: ArrayV2 + selection: Selection + + async def get(self) -> np.ndarray: + return await self.array.get_async(self.selection) + + async def set(self, value: np.ndarray): + return await self.array.set_async(self.selection, value) + + +@frozen +class ArrayV2: + metadata: ArrayV2Metadata + attributes: Optional[Dict[str, Any]] + store_path: StorePath + runtime_configuration: RuntimeConfiguration + + @classmethod + async def create_async( + cls, + store: StoreLike, + *, + shape: ChunkCoords, + dtype: np.dtype, + chunks: ChunkCoords, + dimension_separator: Literal[".", "/"] = ".", + fill_value: Optional[Union[None, int, float]] = None, + order: Literal["C", "F"] = "C", + filters: Optional[List[Dict[str, Any]]] = None, + compressor: Optional[Dict[str, Any]] = None, + attributes: Optional[Dict[str, Any]] = None, + exists_ok: bool = False, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> ArrayV2: + store_path = make_store_path(store) + if not exists_ok: + assert not await (store_path / ZARRAY_JSON).exists_async() + + metadata = ArrayV2Metadata( + shape=shape, + dtype=np.dtype(dtype), + chunks=chunks, + order=order, + dimension_separator=dimension_separator, + fill_value=0 if fill_value is None else fill_value, + compressor=numcodecs.get_codec(compressor).get_config() + if compressor is not None + else None, + filters=[numcodecs.get_codec(filter).get_config() for filter in filters] + if filters is not None + else None, + ) + array = cls( + metadata=metadata, + store_path=store_path, + attributes=attributes, + runtime_configuration=runtime_configuration, + ) + await array._save_metadata() + return array + + @classmethod + def create( + cls, + store: StoreLike, + *, + shape: ChunkCoords, + dtype: np.dtype, + chunks: ChunkCoords, + dimension_separator: Literal[".", "/"] = ".", + fill_value: Optional[Union[None, int, float]] = None, + order: Literal["C", "F"] = "C", + filters: Optional[List[Dict[str, Any]]] = None, + compressor: Optional[Dict[str, Any]] = None, + attributes: Optional[Dict[str, Any]] = None, + exists_ok: bool = False, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> ArrayV2: + return sync( + cls.create_async( + store, + shape=shape, + dtype=dtype, + chunks=chunks, + order=order, + dimension_separator=dimension_separator, + fill_value=0 if fill_value is None else fill_value, + compressor=compressor, + filters=filters, + attributes=attributes, + exists_ok=exists_ok, + runtime_configuration=runtime_configuration, + ), + runtime_configuration.asyncio_loop, + ) + + @classmethod + async def open_async( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> ArrayV2: + store_path = make_store_path(store) + zarray_bytes, zattrs_bytes = await asyncio.gather( + (store_path / ZARRAY_JSON).get_async(), + (store_path / ZATTRS_JSON).get_async(), + ) + assert zarray_bytes is not None + return cls.from_json( + store_path, + zarray_json=json.loads(zarray_bytes), + zattrs_json=json.loads(zattrs_bytes) if zattrs_bytes is not None else None, + runtime_configuration=runtime_configuration, + ) + + @classmethod + def open( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> ArrayV2: + return sync( + cls.open_async(store, runtime_configuration), + runtime_configuration.asyncio_loop, + ) + + @classmethod + def from_json( + cls, + store_path: StorePath, + zarray_json: Any, + zattrs_json: Optional[Any], + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> ArrayV2: + metadata = ArrayV2Metadata.from_json(zarray_json) + out = cls( + store_path=store_path, + metadata=metadata, + attributes=zattrs_json, + runtime_configuration=runtime_configuration, + ) + out._validate_metadata() + return out + + async def _save_metadata(self) -> None: + self._validate_metadata() + + await (self.store_path / ZARRAY_JSON).set_async(self.metadata.to_bytes()) + if self.attributes is not None and len(self.attributes) > 0: + await (self.store_path / ZATTRS_JSON).set_async( + json.dumps(self.attributes).encode(), + ) + else: + await (self.store_path / ZATTRS_JSON).delete_async() + + def _validate_metadata(self) -> None: + assert len(self.metadata.shape) == len( + self.metadata.chunks + ), "`chunks` and `shape` need to have the same number of dimensions." + + @property + def ndim(self) -> int: + return len(self.metadata.shape) + + @property + def shape(self) -> ChunkCoords: + return self.metadata.shape + + @property + def dtype(self) -> np.dtype: + return self.metadata.dtype + + @property + def async_(self) -> _AsyncArrayProxy: + return _AsyncArrayProxy(self) + + def __getitem__(self, selection: Selection): + return sync(self.get_async(selection), self.runtime_configuration.asyncio_loop) + + async def get_async(self, selection: Selection): + indexer = BasicIndexer( + selection, + shape=self.metadata.shape, + chunk_shape=self.metadata.chunks, + ) + + # setup output array + out = np.zeros( + indexer.shape, + dtype=self.metadata.dtype, + order=self.metadata.order, + ) + + # reading chunks and decoding them + await concurrent_map( + [ + (chunk_coords, chunk_selection, out_selection, out) + for chunk_coords, chunk_selection, out_selection in indexer + ], + self._read_chunk, + ) + + if out.shape: + return out + else: + return out[()] + + async def _read_chunk( + self, + chunk_coords: ChunkCoords, + chunk_selection: SliceSelection, + out_selection: SliceSelection, + out: np.ndarray, + ): + store_path = self.store_path / self._encode_chunk_key(chunk_coords) + + chunk_array = await self._decode_chunk(await store_path.get_async()) + if chunk_array is not None: + tmp = chunk_array[chunk_selection] + out[out_selection] = tmp + else: + out[out_selection] = self.metadata.fill_value + + async def _decode_chunk(self, chunk_bytes: Optional[BytesLike]) -> Optional[np.ndarray]: + if chunk_bytes is None: + return None + + if self.metadata.compressor is not None: + compressor = numcodecs.get_codec(self.metadata.compressor) + chunk_array = ensure_ndarray(await to_thread(compressor.decode, chunk_bytes)) + else: + chunk_array = ensure_ndarray(chunk_bytes) + + # ensure correct dtype + if str(chunk_array.dtype) != self.metadata.dtype: + chunk_array = chunk_array.view(self.metadata.dtype) + + # apply filters in reverse order + if self.metadata.filters is not None: + for filter_metadata in self.metadata.filters[::-1]: + filter = numcodecs.get_codec(filter_metadata) + chunk_array = await to_thread(filter.decode, chunk_array) + + # ensure correct chunk shape + if chunk_array.shape != self.metadata.chunks: + chunk_array = chunk_array.reshape( + self.metadata.chunks, + order=self.metadata.order, + ) + + return chunk_array + + def __setitem__(self, selection: Selection, value: np.ndarray) -> None: + sync(self.set_async(selection, value), self.runtime_configuration.asyncio_loop) + + async def set_async(self, selection: Selection, value: np.ndarray) -> None: + chunk_shape = self.metadata.chunks + indexer = BasicIndexer( + selection, + shape=self.metadata.shape, + chunk_shape=chunk_shape, + ) + + sel_shape = indexer.shape + + # check value shape + if np.isscalar(value): + # setting a scalar value + pass + else: + if not hasattr(value, "shape"): + value = np.asarray(value, self.metadata.dtype) + assert value.shape == sel_shape + if value.dtype != self.metadata.dtype: + value = value.astype(self.metadata.dtype, order="A") + + # merging with existing data and encoding chunks + await concurrent_map( + [ + ( + value, + chunk_shape, + chunk_coords, + chunk_selection, + out_selection, + ) + for chunk_coords, chunk_selection, out_selection in indexer + ], + self._write_chunk, + ) + + async def _write_chunk( + self, + value: np.ndarray, + chunk_shape: ChunkCoords, + chunk_coords: ChunkCoords, + chunk_selection: SliceSelection, + out_selection: SliceSelection, + ): + store_path = self.store_path / self._encode_chunk_key(chunk_coords) + + if is_total_slice(chunk_selection, chunk_shape): + # write entire chunks + if np.isscalar(value): + chunk_array = np.empty( + chunk_shape, + dtype=self.metadata.dtype, + order=self.metadata.order, + ) + chunk_array.fill(value) + else: + chunk_array = value[out_selection] + await self._write_chunk_to_store(store_path, chunk_array) + + else: + # writing partial chunks + # read chunk first + tmp = await self._decode_chunk(await store_path.get_async()) + + # merge new value + if tmp is None: + chunk_array = np.empty( + chunk_shape, + dtype=self.metadata.dtype, + order=self.metadata.order, + ) + chunk_array.fill(self.metadata.fill_value) + else: + chunk_array = tmp.copy( + order=self.metadata.order, + ) # make a writable copy + chunk_array[chunk_selection] = value[out_selection] + + await self._write_chunk_to_store(store_path, chunk_array) + + async def _write_chunk_to_store(self, store_path: StorePath, chunk_array: np.ndarray): + chunk_bytes: Optional[BytesLike] + if np.all(chunk_array == self.metadata.fill_value): + # chunks that only contain fill_value will be removed + await store_path.delete_async() + else: + chunk_bytes = await self._encode_chunk(chunk_array) + if chunk_bytes is None: + await store_path.delete_async() + else: + await store_path.set_async(chunk_bytes) + + async def _encode_chunk(self, chunk_array: np.ndarray) -> Optional[BytesLike]: + chunk_array = chunk_array.ravel(order=self.metadata.order) + + if self.metadata.filters is not None: + for filter_metadata in self.metadata.filters: + filter = numcodecs.get_codec(filter_metadata) + chunk_array = await to_thread(filter.encode, chunk_array) + + if self.metadata.compressor is not None: + compressor = numcodecs.get_codec(self.metadata.compressor) + if not chunk_array.flags.c_contiguous and not chunk_array.flags.f_contiguous: + chunk_array = chunk_array.copy(order="A") + encoded_chunk_bytes = ensure_bytes(await to_thread(compressor.encode, chunk_array)) + else: + encoded_chunk_bytes = ensure_bytes(chunk_array) + + return encoded_chunk_bytes + + def _encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + chunk_identifier = self.metadata.dimension_separator.join(map(str, chunk_coords)) + return "0" if chunk_identifier == "" else chunk_identifier + + async def resize_async(self, new_shape: ChunkCoords) -> ArrayV2: + assert len(new_shape) == len(self.metadata.shape) + new_metadata = evolve(self.metadata, shape=new_shape) + + # Remove all chunks outside of the new shape + chunk_shape = self.metadata.chunks + old_chunk_coords = set(all_chunk_coords(self.metadata.shape, chunk_shape)) + new_chunk_coords = set(all_chunk_coords(new_shape, chunk_shape)) + + async def _delete_key(key: str) -> None: + await (self.store_path / key).delete_async() + + await concurrent_map( + [ + (self._encode_chunk_key(chunk_coords),) + for chunk_coords in old_chunk_coords.difference(new_chunk_coords) + ], + _delete_key, + ) + + # Write new metadata + await (self.store_path / ZARRAY_JSON).set_async(new_metadata.to_bytes()) + return evolve(self, metadata=new_metadata) + + def resize(self, new_shape: ChunkCoords) -> ArrayV2: + return sync(self.resize_async(new_shape), self.runtime_configuration.asyncio_loop) + + async def convert_to_v3_async(self) -> Array: + from sys import byteorder as sys_byteorder + + from zarr.v3.array import Array + from zarr.v3.common import ZARR_JSON + from zarr.v3.metadata import ( + ArrayMetadata, + BloscCodecConfigurationMetadata, + BloscCodecMetadata, + BytesCodecConfigurationMetadata, + BytesCodecMetadata, + CodecMetadata, + DataType, + GzipCodecConfigurationMetadata, + GzipCodecMetadata, + RegularChunkGridConfigurationMetadata, + RegularChunkGridMetadata, + TransposeCodecConfigurationMetadata, + TransposeCodecMetadata, + V2ChunkKeyEncodingConfigurationMetadata, + V2ChunkKeyEncodingMetadata, + blosc_shuffle_int_to_str, + dtype_to_data_type, + ) + + data_type = DataType[dtype_to_data_type[self.metadata.dtype.str]] + endian: Literal["little", "big"] + if self.metadata.dtype.byteorder == "=": + endian = sys_byteorder + elif self.metadata.dtype.byteorder == ">": + endian = "big" + else: + endian = "little" + + assert ( + self.metadata.filters is None or len(self.metadata.filters) == 0 + ), "Filters are not supported by v3." + + codecs: List[CodecMetadata] = [] + + if self.metadata.order == "F": + codecs.append( + TransposeCodecMetadata(configuration=TransposeCodecConfigurationMetadata(order="F")) + ) + codecs.append( + BytesCodecMetadata(configuration=BytesCodecConfigurationMetadata(endian=endian)) + ) + + if self.metadata.compressor is not None: + v2_codec = numcodecs.get_codec(self.metadata.compressor).get_config() + assert v2_codec["id"] in ( + "blosc", + "gzip", + ), "Only blosc and gzip are supported by v3." + if v2_codec["id"] == "blosc": + shuffle = blosc_shuffle_int_to_str[v2_codec.get("shuffle", 0)] + codecs.append( + BloscCodecMetadata( + configuration=BloscCodecConfigurationMetadata( + typesize=data_type.byte_count, + cname=v2_codec["cname"], + clevel=v2_codec["clevel"], + shuffle=shuffle, + blocksize=v2_codec.get("blocksize", 0), + ) + ) + ) + elif v2_codec["id"] == "gzip": + codecs.append( + GzipCodecMetadata( + configuration=GzipCodecConfigurationMetadata(level=v2_codec.get("level", 5)) + ) + ) + + new_metadata = ArrayMetadata( + shape=self.metadata.shape, + chunk_grid=RegularChunkGridMetadata( + configuration=RegularChunkGridConfigurationMetadata( + chunk_shape=self.metadata.chunks + ) + ), + data_type=data_type, + fill_value=0 if self.metadata.fill_value is None else self.metadata.fill_value, + chunk_key_encoding=V2ChunkKeyEncodingMetadata( + configuration=V2ChunkKeyEncodingConfigurationMetadata( + separator=self.metadata.dimension_separator + ) + ), + codecs=codecs, + attributes=self.attributes or {}, + ) + + new_metadata_bytes = new_metadata.to_bytes() + await (self.store_path / ZARR_JSON).set_async(new_metadata_bytes) + + return Array.from_json( + store_path=self.store_path, + zarr_json=json.loads(new_metadata_bytes), + runtime_configuration=self.runtime_configuration, + ) + + async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> ArrayV2: + await (self.store_path / ZATTRS_JSON).set_async(json.dumps(new_attributes).encode()) + return evolve(self, attributes=new_attributes) + + def update_attributes(self, new_attributes: Dict[str, Any]) -> ArrayV2: + return sync( + self.update_attributes_async(new_attributes), + self.runtime_configuration.asyncio_loop, + ) + + def convert_to_v3(self) -> Array: + return sync(self.convert_to_v3_async(), loop=self.runtime_configuration.asyncio_loop) + + def __repr__(self): + return f"" diff --git a/zarr/v3/codecs.py b/zarr/v3/codecs.py new file mode 100644 index 0000000000..ff913c42b2 --- /dev/null +++ b/zarr/v3/codecs.py @@ -0,0 +1,514 @@ +from __future__ import annotations + +from functools import reduce +from typing import TYPE_CHECKING, Iterable, List, Literal, Optional, Tuple, Union +from warnings import warn + +import numcodecs +import numpy as np +from attr import asdict, evolve, frozen +from crc32c import crc32c +from numcodecs.blosc import Blosc +from numcodecs.gzip import GZip +from zstandard import ZstdCompressor, ZstdDecompressor + +from zarr.v3.abc.codec import Codec, ArrayArrayCodec, ArrayBytesCodec, BytesBytesCodec +from zarr.v3.common import BytesLike, to_thread +from zarr.v3.metadata import ( + BloscCodecConfigurationMetadata, + BloscCodecMetadata, + BytesCodecConfigurationMetadata, + BytesCodecMetadata, + CodecMetadata, + Crc32cCodecMetadata, + GzipCodecConfigurationMetadata, + GzipCodecMetadata, + ShardingCodecConfigurationMetadata, + ShardingCodecMetadata, + TransposeCodecConfigurationMetadata, + TransposeCodecMetadata, + ZstdCodecConfigurationMetadata, + ZstdCodecMetadata, +) + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + +# See https://zarr.readthedocs.io/en/stable/tutorial.html#configuring-blosc +numcodecs.blosc.use_threads = False + + +@frozen +class CodecPipeline: + codecs: List[Codec] + + @classmethod + def from_metadata( + cls, + codecs_metadata: Iterable[CodecMetadata], + array_metadata: CoreArrayMetadata, + ) -> CodecPipeline: + out: List[Codec] = [] + for codec_metadata in codecs_metadata or []: + if codec_metadata.name == "endian": + codec_metadata = evolve(codec_metadata, name="bytes") # type: ignore + + codec: Codec + if codec_metadata.name == "blosc": + codec = BloscCodec.from_metadata(codec_metadata, array_metadata) + elif codec_metadata.name == "gzip": + codec = GzipCodec.from_metadata(codec_metadata, array_metadata) + elif codec_metadata.name == "zstd": + codec = ZstdCodec.from_metadata(codec_metadata, array_metadata) + elif codec_metadata.name == "transpose": + codec = TransposeCodec.from_metadata(codec_metadata, array_metadata) + elif codec_metadata.name == "bytes": + codec = BytesCodec.from_metadata(codec_metadata, array_metadata) + elif codec_metadata.name == "crc32c": + codec = Crc32cCodec.from_metadata(codec_metadata, array_metadata) + elif codec_metadata.name == "sharding_indexed": + from zarr.v3.sharding import ShardingCodec + + codec = ShardingCodec.from_metadata(codec_metadata, array_metadata) + else: + raise RuntimeError(f"Unsupported codec: {codec_metadata}") + + out.append(codec) + array_metadata = codec.resolve_metadata() + CodecPipeline._validate_codecs(out, array_metadata) + return cls(out) + + @staticmethod + def _validate_codecs(codecs: List[Codec], array_metadata: CoreArrayMetadata) -> None: + from zarr.v3.sharding import ShardingCodec + + assert any( + isinstance(codec, ArrayBytesCodec) for codec in codecs + ), "Exactly one array-to-bytes codec is required." + + prev_codec: Optional[Codec] = None + for codec in codecs: + if prev_codec is not None: + assert not isinstance(codec, ArrayBytesCodec) or not isinstance( + prev_codec, ArrayBytesCodec + ), ( + f"ArrayBytesCodec '{type(codec)}' cannot follow after " + + f"ArrayBytesCodec '{type(prev_codec)}' because exactly " + + "1 ArrayBytesCodec is allowed." + ) + assert not isinstance(codec, ArrayBytesCodec) or not isinstance( + prev_codec, BytesBytesCodec + ), ( + f"ArrayBytesCodec '{type(codec)}' cannot follow after " + + f"BytesBytesCodec '{type(prev_codec)}'." + ) + assert not isinstance(codec, ArrayArrayCodec) or not isinstance( + prev_codec, ArrayBytesCodec + ), ( + f"ArrayArrayCodec '{type(codec)}' cannot follow after " + + f"ArrayBytesCodec '{type(prev_codec)}'." + ) + assert not isinstance(codec, ArrayArrayCodec) or not isinstance( + prev_codec, BytesBytesCodec + ), ( + f"ArrayArrayCodec '{type(codec)}' cannot follow after " + + f"BytesBytesCodec '{type(prev_codec)}'." + ) + + if isinstance(codec, ShardingCodec): + assert len(codec.configuration.chunk_shape) == len(array_metadata.shape), ( + "The shard's `chunk_shape` and array's `shape` need to have the " + + "same number of dimensions." + ) + assert all( + s % c == 0 + for s, c in zip( + array_metadata.chunk_shape, + codec.configuration.chunk_shape, + ) + ), ( + "The array's `chunk_shape` needs to be divisible by the " + + "shard's inner `chunk_shape`." + ) + prev_codec = codec + + if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: + warn( + "Combining a `sharding_indexed` codec disables partial reads and " + + "writes, which may lead to inefficient performance." + ) + + def _array_array_codecs(self) -> List[ArrayArrayCodec]: + return [codec for codec in self.codecs if isinstance(codec, ArrayArrayCodec)] + + def _array_bytes_codec(self) -> ArrayBytesCodec: + return next(codec for codec in self.codecs if isinstance(codec, ArrayBytesCodec)) + + def _bytes_bytes_codecs(self) -> List[BytesBytesCodec]: + return [codec for codec in self.codecs if isinstance(codec, BytesBytesCodec)] + + async def decode(self, chunk_bytes: BytesLike) -> np.ndarray: + for bb_codec in self._bytes_bytes_codecs()[::-1]: + chunk_bytes = await bb_codec.decode(chunk_bytes) + + chunk_array = await self._array_bytes_codec().decode(chunk_bytes) + + for aa_codec in self._array_array_codecs()[::-1]: + chunk_array = await aa_codec.decode(chunk_array) + + return chunk_array + + async def encode(self, chunk_array: np.ndarray) -> Optional[BytesLike]: + for aa_codec in self._array_array_codecs(): + chunk_array_maybe = await aa_codec.encode(chunk_array) + if chunk_array_maybe is None: + return None + chunk_array = chunk_array_maybe + + chunk_bytes_maybe = await self._array_bytes_codec().encode(chunk_array) + if chunk_bytes_maybe is None: + return None + chunk_bytes = chunk_bytes_maybe + + for bb_codec in self._bytes_bytes_codecs(): + chunk_bytes_maybe = await bb_codec.encode(chunk_bytes) + if chunk_bytes_maybe is None: + return None + chunk_bytes = chunk_bytes_maybe + + return chunk_bytes + + def compute_encoded_size(self, byte_length: int) -> int: + return reduce(lambda acc, codec: codec.compute_encoded_size(acc), self.codecs, byte_length) + + +@frozen +class BloscCodec(BytesBytesCodec): + array_metadata: CoreArrayMetadata + configuration: BloscCodecConfigurationMetadata + blosc_codec: Blosc + is_fixed_size = False + + @classmethod + def from_metadata( + cls, codec_metadata: BloscCodecMetadata, array_metadata: CoreArrayMetadata + ) -> BloscCodec: + configuration = codec_metadata.configuration + if configuration.typesize == 0: + configuration = evolve(configuration, typesize=array_metadata.data_type.byte_count) + config_dict = asdict(codec_metadata.configuration) + config_dict.pop("typesize", None) + map_shuffle_str_to_int = {"noshuffle": 0, "shuffle": 1, "bitshuffle": 2} + config_dict["shuffle"] = map_shuffle_str_to_int[config_dict["shuffle"]] + return cls( + array_metadata=array_metadata, + configuration=configuration, + blosc_codec=Blosc.from_config(config_dict), + ) + + async def decode( + self, + chunk_bytes: bytes, + ) -> BytesLike: + return await to_thread(self.blosc_codec.decode, chunk_bytes) + + async def encode( + self, + chunk_bytes: bytes, + ) -> Optional[BytesLike]: + chunk_array = np.frombuffer(chunk_bytes, dtype=self.array_metadata.dtype) + return await to_thread(self.blosc_codec.encode, chunk_array) + + def compute_encoded_size(self, _input_byte_length: int) -> int: + raise NotImplementedError + + +@frozen +class BytesCodec(ArrayBytesCodec): + array_metadata: CoreArrayMetadata + configuration: BytesCodecConfigurationMetadata + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: BytesCodecMetadata, array_metadata: CoreArrayMetadata + ) -> BytesCodec: + assert ( + array_metadata.dtype.itemsize == 1 or codec_metadata.configuration.endian is not None + ), "The `endian` configuration needs to be specified for multi-byte data types." + return cls( + array_metadata=array_metadata, + configuration=codec_metadata.configuration, + ) + + def _get_byteorder(self, array: np.ndarray) -> Literal["big", "little"]: + if array.dtype.byteorder == "<": + return "little" + elif array.dtype.byteorder == ">": + return "big" + else: + import sys + + return sys.byteorder + + async def decode( + self, + chunk_bytes: BytesLike, + ) -> np.ndarray: + if self.array_metadata.dtype.itemsize > 0: + if self.configuration.endian == "little": + prefix = "<" + else: + prefix = ">" + dtype = np.dtype(f"{prefix}{self.array_metadata.data_type.to_numpy_shortname()}") + else: + dtype = np.dtype(f"|{self.array_metadata.data_type.to_numpy_shortname()}") + chunk_array = np.frombuffer(chunk_bytes, dtype) + + # ensure correct chunk shape + if chunk_array.shape != self.array_metadata.chunk_shape: + chunk_array = chunk_array.reshape( + self.array_metadata.chunk_shape, + ) + return chunk_array + + async def encode( + self, + chunk_array: np.ndarray, + ) -> Optional[BytesLike]: + if chunk_array.dtype.itemsize > 1: + byteorder = self._get_byteorder(chunk_array) + if self.configuration.endian != byteorder: + new_dtype = chunk_array.dtype.newbyteorder(self.configuration.endian) + chunk_array = chunk_array.astype(new_dtype) + return chunk_array.tobytes() + + def compute_encoded_size(self, input_byte_length: int) -> int: + return input_byte_length + + +@frozen +class TransposeCodec(ArrayArrayCodec): + array_metadata: CoreArrayMetadata + order: Tuple[int, ...] + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: TransposeCodecMetadata, array_metadata: CoreArrayMetadata + ) -> TransposeCodec: + configuration = codec_metadata.configuration + if configuration.order == "F": + order = tuple(array_metadata.ndim - x - 1 for x in range(array_metadata.ndim)) + + elif configuration.order == "C": + order = tuple(range(array_metadata.ndim)) + + else: + assert len(configuration.order) == array_metadata.ndim, ( + "The `order` tuple needs have as many entries as " + + f"there are dimensions in the array. Got: {configuration.order}" + ) + assert len(configuration.order) == len(set(configuration.order)), ( + "There must not be duplicates in the `order` tuple. " + + f"Got: {configuration.order}" + ) + assert all(0 <= x < array_metadata.ndim for x in configuration.order), ( + "All entries in the `order` tuple must be between 0 and " + + f"the number of dimensions in the array. Got: {configuration.order}" + ) + order = tuple(configuration.order) + + return cls( + array_metadata=array_metadata, + order=order, + ) + + def resolve_metadata(self) -> CoreArrayMetadata: + from zarr.v3.metadata import CoreArrayMetadata + + return CoreArrayMetadata( + shape=tuple( + self.array_metadata.shape[self.order[i]] for i in range(self.array_metadata.ndim) + ), + chunk_shape=tuple( + self.array_metadata.chunk_shape[self.order[i]] + for i in range(self.array_metadata.ndim) + ), + data_type=self.array_metadata.data_type, + fill_value=self.array_metadata.fill_value, + runtime_configuration=self.array_metadata.runtime_configuration, + ) + + async def decode( + self, + chunk_array: np.ndarray, + ) -> np.ndarray: + inverse_order = [0 for _ in range(self.array_metadata.ndim)] + for x, i in enumerate(self.order): + inverse_order[x] = i + chunk_array = chunk_array.transpose(inverse_order) + return chunk_array + + async def encode( + self, + chunk_array: np.ndarray, + ) -> Optional[np.ndarray]: + chunk_array = chunk_array.transpose(self.order) + return chunk_array + + def compute_encoded_size(self, input_byte_length: int) -> int: + return input_byte_length + + +@frozen +class GzipCodec(BytesBytesCodec): + array_metadata: CoreArrayMetadata + configuration: GzipCodecConfigurationMetadata + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: GzipCodecMetadata, array_metadata: CoreArrayMetadata + ) -> GzipCodec: + return cls( + array_metadata=array_metadata, + configuration=codec_metadata.configuration, + ) + + async def decode( + self, + chunk_bytes: bytes, + ) -> BytesLike: + return await to_thread(GZip(self.configuration.level).decode, chunk_bytes) + + async def encode( + self, + chunk_bytes: bytes, + ) -> Optional[BytesLike]: + return await to_thread(GZip(self.configuration.level).encode, chunk_bytes) + + def compute_encoded_size(self, _input_byte_length: int) -> int: + raise NotImplementedError + + +@frozen +class ZstdCodec(BytesBytesCodec): + array_metadata: CoreArrayMetadata + configuration: ZstdCodecConfigurationMetadata + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: ZstdCodecMetadata, array_metadata: CoreArrayMetadata + ) -> ZstdCodec: + return cls( + array_metadata=array_metadata, + configuration=codec_metadata.configuration, + ) + + def _compress(self, data: bytes) -> bytes: + ctx = ZstdCompressor( + level=self.configuration.level, write_checksum=self.configuration.checksum + ) + return ctx.compress(data) + + def _decompress(self, data: bytes) -> bytes: + ctx = ZstdDecompressor() + return ctx.decompress(data) + + async def decode( + self, + chunk_bytes: bytes, + ) -> BytesLike: + return await to_thread(self._decompress, chunk_bytes) + + async def encode( + self, + chunk_bytes: bytes, + ) -> Optional[BytesLike]: + return await to_thread(self._compress, chunk_bytes) + + def compute_encoded_size(self, _input_byte_length: int) -> int: + raise NotImplementedError + + +@frozen +class Crc32cCodec(BytesBytesCodec): + array_metadata: CoreArrayMetadata + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: Crc32cCodecMetadata, array_metadata: CoreArrayMetadata + ) -> Crc32cCodec: + return cls(array_metadata=array_metadata) + + async def decode( + self, + chunk_bytes: bytes, + ) -> BytesLike: + crc32_bytes = chunk_bytes[-4:] + inner_bytes = chunk_bytes[:-4] + + assert np.uint32(crc32c(inner_bytes)).tobytes() == bytes(crc32_bytes) + return inner_bytes + + async def encode( + self, + chunk_bytes: bytes, + ) -> Optional[BytesLike]: + return chunk_bytes + np.uint32(crc32c(chunk_bytes)).tobytes() + + def compute_encoded_size(self, input_byte_length: int) -> int: + return input_byte_length + 4 + + +def blosc_codec( + typesize: int, + cname: Literal["lz4", "lz4hc", "blosclz", "zstd", "snappy", "zlib"] = "zstd", + clevel: int = 5, + shuffle: Literal["noshuffle", "shuffle", "bitshuffle"] = "noshuffle", + blocksize: int = 0, +) -> BloscCodecMetadata: + return BloscCodecMetadata( + configuration=BloscCodecConfigurationMetadata( + cname=cname, + clevel=clevel, + shuffle=shuffle, + blocksize=blocksize, + typesize=typesize, + ) + ) + + +def bytes_codec(endian: Optional[Literal["big", "little"]] = "little") -> BytesCodecMetadata: + return BytesCodecMetadata(configuration=BytesCodecConfigurationMetadata(endian)) + + +def transpose_codec(order: Union[Tuple[int, ...], Literal["C", "F"]]) -> TransposeCodecMetadata: + return TransposeCodecMetadata(configuration=TransposeCodecConfigurationMetadata(order)) + + +def gzip_codec(level: int = 5) -> GzipCodecMetadata: + return GzipCodecMetadata(configuration=GzipCodecConfigurationMetadata(level)) + + +def zstd_codec(level: int = 0, checksum: bool = False) -> ZstdCodecMetadata: + return ZstdCodecMetadata(configuration=ZstdCodecConfigurationMetadata(level, checksum)) + + +def crc32c_codec() -> Crc32cCodecMetadata: + return Crc32cCodecMetadata() + + +def sharding_codec( + chunk_shape: Tuple[int, ...], + codecs: Optional[List[CodecMetadata]] = None, + index_codecs: Optional[List[CodecMetadata]] = None, +) -> ShardingCodecMetadata: + codecs = codecs or [bytes_codec()] + index_codecs = index_codecs or [bytes_codec(), crc32c_codec()] + return ShardingCodecMetadata( + configuration=ShardingCodecConfigurationMetadata(chunk_shape, codecs, index_codecs) + ) diff --git a/zarr/v3/common.py b/zarr/v3/common.py new file mode 100644 index 0000000000..0e55a7c1fd --- /dev/null +++ b/zarr/v3/common.py @@ -0,0 +1,158 @@ +from __future__ import annotations + +import asyncio +import contextvars +import functools +from typing import ( + Any, + Awaitable, + Callable, + Dict, + List, + Literal, + Optional, + Tuple, + TypeVar, + Union, +) + +import numpy as np +from cattr import Converter + +ZARR_JSON = "zarr.json" +ZARRAY_JSON = ".zarray" +ZGROUP_JSON = ".zgroup" +ZATTRS_JSON = ".zattrs" + +BytesLike = Union[bytes, bytearray, memoryview] +ChunkCoords = Tuple[int, ...] +SliceSelection = Tuple[slice, ...] +Selection = Union[slice, SliceSelection] + + +def make_cattr(): + from zarr.v3.metadata import ( + BloscCodecMetadata, + BytesCodecMetadata, + ChunkKeyEncodingMetadata, + CodecMetadata, + Crc32cCodecMetadata, + DefaultChunkKeyEncodingMetadata, + GzipCodecMetadata, + ShardingCodecMetadata, + TransposeCodecMetadata, + V2ChunkKeyEncodingMetadata, + ZstdCodecMetadata, + ) + + converter = Converter() + + def _structure_chunk_key_encoding_metadata(d: Dict[str, Any], _t) -> ChunkKeyEncodingMetadata: + if d["name"] == "default": + return converter.structure(d, DefaultChunkKeyEncodingMetadata) + if d["name"] == "v2": + return converter.structure(d, V2ChunkKeyEncodingMetadata) + raise KeyError + + converter.register_structure_hook( + ChunkKeyEncodingMetadata, _structure_chunk_key_encoding_metadata + ) + + def _structure_codec_metadata(d: Dict[str, Any], _t=None) -> CodecMetadata: + if d["name"] == "endian": + d["name"] = "bytes" + + if d["name"] == "blosc": + return converter.structure(d, BloscCodecMetadata) + if d["name"] == "bytes": + return converter.structure(d, BytesCodecMetadata) + if d["name"] == "transpose": + return converter.structure(d, TransposeCodecMetadata) + if d["name"] == "gzip": + return converter.structure(d, GzipCodecMetadata) + if d["name"] == "zstd": + return converter.structure(d, ZstdCodecMetadata) + if d["name"] == "sharding_indexed": + return converter.structure(d, ShardingCodecMetadata) + if d["name"] == "crc32c": + return converter.structure(d, Crc32cCodecMetadata) + raise KeyError + + converter.register_structure_hook(CodecMetadata, _structure_codec_metadata) + + converter.register_structure_hook_factory( + lambda t: str(t) == "ForwardRef('CodecMetadata')", + lambda t: _structure_codec_metadata, + ) + + def _structure_order(d: Any, _t=None) -> Union[Literal["C", "F"], Tuple[int, ...]]: + if d == "C": + return "C" + if d == "F": + return "F" + if isinstance(d, list): + return tuple(d) + raise KeyError + + converter.register_structure_hook_factory( + lambda t: str(t) == "typing.Union[typing.Literal['C', 'F'], typing.Tuple[int, ...]]", + lambda t: _structure_order, + ) + + # Needed for v2 fill_value + def _structure_fill_value(d: Any, _t=None) -> Union[None, int, float]: + if d is None: + return None + try: + return int(d) + except ValueError: + pass + try: + return float(d) + except ValueError: + pass + raise ValueError + + converter.register_structure_hook_factory( + lambda t: str(t) == "typing.Union[NoneType, int, float]", + lambda t: _structure_fill_value, + ) + + # Needed for v2 dtype + converter.register_structure_hook( + np.dtype, + lambda d, _: np.dtype(d), + ) + + return converter + + +def product(tup: ChunkCoords) -> int: + return functools.reduce(lambda x, y: x * y, tup, 1) + + +T = TypeVar("T", bound=Tuple) +V = TypeVar("V") + + +async def concurrent_map( + items: List[T], func: Callable[..., Awaitable[V]], limit: Optional[int] = None +) -> List[V]: + if limit is None: + return await asyncio.gather(*[func(*item) for item in items]) + + else: + sem = asyncio.Semaphore(limit) + + async def run(item): + async with sem: + return await func(*item) + + return await asyncio.gather(*[asyncio.ensure_future(run(item)) for item in items]) + + +async def to_thread(func, /, *args, **kwargs): + loop = asyncio.get_running_loop() + ctx = contextvars.copy_context() + func_call = functools.partial(ctx.run, func, *args, **kwargs) + return await loop.run_in_executor(None, func_call) diff --git a/zarr/v3/group.py b/zarr/v3/group.py new file mode 100644 index 0000000000..aa43c706a5 --- /dev/null +++ b/zarr/v3/group.py @@ -0,0 +1,179 @@ +from __future__ import annotations + +import json +from typing import Any, Dict, Literal, Optional, Union + +from attr import asdict, evolve, field, frozen + +from zarr.v3.array import Array +from zarr.v3.common import ZARR_JSON, make_cattr +from zarr.v3.metadata import RuntimeConfiguration +from zarr.v3.store import StoreLike, StorePath, make_store_path +from zarr.v3.sync import sync + + +@frozen +class GroupMetadata: + attributes: Dict[str, Any] = field(factory=dict) + zarr_format: Literal[3] = 3 + node_type: Literal["group"] = "group" + + def to_bytes(self) -> bytes: + return json.dumps(asdict(self)).encode() + + @classmethod + def from_json(cls, zarr_json: Any) -> GroupMetadata: + return make_cattr().structure(zarr_json, GroupMetadata) + + +@frozen +class Group: + metadata: GroupMetadata + store_path: StorePath + runtime_configuration: RuntimeConfiguration + + @classmethod + async def create_async( + cls, + store: StoreLike, + *, + attributes: Optional[Dict[str, Any]] = None, + exists_ok: bool = False, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Group: + store_path = make_store_path(store) + if not exists_ok: + assert not await (store_path / ZARR_JSON).exists_async() + group = cls( + metadata=GroupMetadata(attributes=attributes or {}), + store_path=store_path, + runtime_configuration=runtime_configuration, + ) + await group._save_metadata() + return group + + @classmethod + def create( + cls, + store: StoreLike, + *, + attributes: Optional[Dict[str, Any]] = None, + exists_ok: bool = False, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Group: + return sync( + cls.create_async( + store, + attributes=attributes, + exists_ok=exists_ok, + runtime_configuration=runtime_configuration, + ), + runtime_configuration.asyncio_loop, + ) + + @classmethod + async def open_async( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Group: + store_path = make_store_path(store) + zarr_json_bytes = await (store_path / ZARR_JSON).get_async() + assert zarr_json_bytes is not None + return cls.from_json(store_path, json.loads(zarr_json_bytes), runtime_configuration) + + @classmethod + def open( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Group: + return sync( + cls.open_async(store, runtime_configuration), + runtime_configuration.asyncio_loop, + ) + + @classmethod + def from_json( + cls, + store_path: StorePath, + zarr_json: Any, + runtime_configuration: RuntimeConfiguration, + ) -> Group: + group = cls( + metadata=GroupMetadata.from_json(zarr_json), + store_path=store_path, + runtime_configuration=runtime_configuration, + ) + return group + + @classmethod + async def open_or_array( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Union[Array, Group]: + store_path = make_store_path(store) + zarr_json_bytes = await (store_path / ZARR_JSON).get_async() + if zarr_json_bytes is None: + raise KeyError + zarr_json = json.loads(zarr_json_bytes) + if zarr_json["node_type"] == "group": + return cls.from_json(store_path, zarr_json, runtime_configuration) + if zarr_json["node_type"] == "array": + return Array.from_json( + store_path, zarr_json, runtime_configuration=runtime_configuration + ) + raise KeyError + + async def _save_metadata(self) -> None: + await (self.store_path / ZARR_JSON).set_async(self.metadata.to_bytes()) + + async def get_async(self, path: str) -> Union[Array, Group]: + return await self.__class__.open_or_array( + self.store_path / path, self.runtime_configuration + ) + + def __getitem__(self, path: str) -> Union[Array, Group]: + return sync(self.get_async(path), self.runtime_configuration.asyncio_loop) + + async def create_group_async(self, path: str, **kwargs) -> Group: + runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) + return await self.__class__.create_async( + self.store_path / path, + runtime_configuration=runtime_configuration, + **kwargs, + ) + + def create_group(self, path: str, **kwargs) -> Group: + return sync(self.create_group_async(path), self.runtime_configuration.asyncio_loop) + + async def create_array_async(self, path: str, **kwargs) -> Array: + runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) + return await Array.create_async( + self.store_path / path, + runtime_configuration=runtime_configuration, + **kwargs, + ) + + def create_array(self, path: str, **kwargs) -> Array: + return sync( + self.create_array_async(path, **kwargs), + self.runtime_configuration.asyncio_loop, + ) + + async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> Group: + new_metadata = evolve(self.metadata, attributes=new_attributes) + + # Write new metadata + await (self.store_path / ZARR_JSON).set_async(new_metadata.to_bytes()) + return evolve(self, metadata=new_metadata) + + def update_attributes(self, new_attributes: Dict[str, Any]) -> Group: + return sync( + self.update_attributes_async(new_attributes), + self.runtime_configuration.asyncio_loop, + ) + + def __repr__(self): + return f"" diff --git a/zarr/v3/group_v2.py b/zarr/v3/group_v2.py new file mode 100644 index 0000000000..3b1a369ae2 --- /dev/null +++ b/zarr/v3/group_v2.py @@ -0,0 +1,218 @@ +from __future__ import annotations + +import asyncio +import json +from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union + +from attr import asdict, evolve, frozen + +from zarr.v3.array_v2 import ArrayV2 +from zarr.v3.common import ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON, make_cattr +from zarr.v3.metadata import RuntimeConfiguration +from zarr.v3.store import StoreLike, StorePath, make_store_path +from zarr.v3.sync import sync + +if TYPE_CHECKING: + from zarr.v3.group import Group + + +@frozen +class GroupV2Metadata: + zarr_format: Literal[2] = 2 + + def to_bytes(self) -> bytes: + return json.dumps(asdict(self)).encode() + + @classmethod + def from_json(cls, zarr_json: Any) -> GroupV2Metadata: + return make_cattr().structure(zarr_json, cls) + + +@frozen +class GroupV2: + metadata: GroupV2Metadata + store_path: StorePath + runtime_configuration: RuntimeConfiguration + attributes: Optional[Dict[str, Any]] = None + + @classmethod + async def create_async( + cls, + store: StoreLike, + *, + attributes: Optional[Dict[str, Any]] = None, + exists_ok: bool = False, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> GroupV2: + store_path = make_store_path(store) + if not exists_ok: + assert not await (store_path / ZGROUP_JSON).exists_async() + group = cls( + metadata=GroupV2Metadata(), + attributes=attributes, + store_path=store_path, + runtime_configuration=runtime_configuration, + ) + await group._save_metadata() + return group + + @classmethod + def create( + cls, + store: StoreLike, + *, + attributes: Optional[Dict[str, Any]] = None, + exists_ok: bool = False, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> GroupV2: + return sync( + cls.create_async( + store, + attributes=attributes, + exists_ok=exists_ok, + runtime_configuration=runtime_configuration, + ), + runtime_configuration.asyncio_loop if runtime_configuration else None, + ) + + @classmethod + async def open_async( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> GroupV2: + store_path = make_store_path(store) + zgroup_bytes = await (store_path / ZGROUP_JSON).get_async() + assert zgroup_bytes is not None + zattrs_bytes = await (store_path / ZATTRS_JSON).get_async() + metadata = json.loads(zgroup_bytes) + attributes = json.loads(zattrs_bytes) if zattrs_bytes is not None else None + + return cls.from_json( + store_path, + metadata, + runtime_configuration, + attributes, + ) + + @classmethod + def open( + cls, + store_path: StorePath, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> GroupV2: + return sync( + cls.open_async(store_path, runtime_configuration), + runtime_configuration.asyncio_loop, + ) + + @classmethod + def from_json( + cls, + store_path: StorePath, + zarr_json: Any, + runtime_configuration: RuntimeConfiguration, + attributes: Optional[Dict[str, Any]] = None, + ) -> GroupV2: + group = cls( + metadata=GroupV2Metadata.from_json(zarr_json), + store_path=store_path, + runtime_configuration=runtime_configuration, + attributes=attributes, + ) + return group + + @staticmethod + async def open_or_array( + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Union[ArrayV2, GroupV2]: + store_path = make_store_path(store) + zgroup_bytes, zattrs_bytes = await asyncio.gather( + (store_path / ZGROUP_JSON).get_async(), + (store_path / ZATTRS_JSON).get_async(), + ) + attributes = json.loads(zattrs_bytes) if zattrs_bytes is not None else None + if zgroup_bytes is not None: + return GroupV2.from_json( + store_path, json.loads(zgroup_bytes), runtime_configuration, attributes + ) + zarray_bytes = await (store_path / ZARRAY_JSON).get_async() + if zarray_bytes is not None: + return ArrayV2.from_json( + store_path, json.loads(zarray_bytes), attributes, runtime_configuration + ) + raise KeyError + + async def _save_metadata(self) -> None: + await (self.store_path / ZGROUP_JSON).set_async(self.metadata.to_bytes()) + if self.attributes is not None and len(self.attributes) > 0: + await (self.store_path / ZATTRS_JSON).set_async( + json.dumps(self.attributes).encode(), + ) + else: + await (self.store_path / ZATTRS_JSON).delete_async() + + async def get_async(self, path: str) -> Union[ArrayV2, GroupV2]: + return await self.__class__.open_or_array( + self.store_path / path, self.runtime_configuration + ) + + def __getitem__(self, path: str) -> Union[ArrayV2, GroupV2]: + return sync(self.get_async(path), self.runtime_configuration.asyncio_loop) + + async def create_group_async(self, path: str, **kwargs) -> GroupV2: + runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) + return await self.__class__.create_async( + self.store_path / path, + runtime_configuration=runtime_configuration, + **kwargs, + ) + + def create_group(self, path: str, **kwargs) -> GroupV2: + return sync(self.create_group_async(path), self.runtime_configuration.asyncio_loop) + + async def create_array_async(self, path: str, **kwargs) -> ArrayV2: + runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) + return await ArrayV2.create_async( + self.store_path / path, + runtime_configuration=runtime_configuration, + **kwargs, + ) + + def create_array(self, path: str, **kwargs) -> ArrayV2: + return sync( + self.create_array_async(path, **kwargs), + self.runtime_configuration.asyncio_loop, + ) + + async def convert_to_v3_async(self) -> Group: + from zarr.v3.common import ZARR_JSON + from zarr.v3.group import Group, GroupMetadata + + new_metadata = GroupMetadata(attributes=self.attributes or {}) + new_metadata_bytes = new_metadata.to_bytes() + + await (self.store_path / ZARR_JSON).set_async(new_metadata_bytes) + + return Group.from_json( + store_path=self.store_path, + zarr_json=json.loads(new_metadata_bytes), + runtime_configuration=self.runtime_configuration, + ) + + async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> GroupV2: + await (self.store_path / ZATTRS_JSON).set_async(json.dumps(new_attributes).encode()) + return evolve(self, attributes=new_attributes) + + def update_attributes(self, new_attributes: Dict[str, Any]) -> GroupV2: + return sync( + self.update_attributes_async(new_attributes), + self.runtime_configuration.asyncio_loop, + ) + + def convert_to_v3(self) -> Group: + return sync(self.convert_to_v3_async(), loop=self.runtime_configuration.asyncio_loop) + + def __repr__(self): + return f"" diff --git a/zarr/v3/indexing.py b/zarr/v3/indexing.py new file mode 100644 index 0000000000..15adad111d --- /dev/null +++ b/zarr/v3/indexing.py @@ -0,0 +1,208 @@ +from __future__ import annotations + +import itertools +import math +from typing import Iterator, List, NamedTuple, Optional, Tuple + +from zarr.v3.common import ChunkCoords, Selection, SliceSelection, product + + +def _ensure_tuple(v: Selection) -> SliceSelection: + if not isinstance(v, tuple): + v = (v,) + return v + + +def _err_too_many_indices(selection: SliceSelection, shape: ChunkCoords): + raise IndexError( + "too many indices for array; expected {}, got {}".format(len(shape), len(selection)) + ) + + +def _err_negative_step(): + raise IndexError("only slices with step >= 1 are supported") + + +def _check_selection_length(selection: SliceSelection, shape: ChunkCoords): + if len(selection) > len(shape): + _err_too_many_indices(selection, shape) + + +def _ensure_selection( + selection: Selection, + shape: ChunkCoords, +) -> SliceSelection: + selection = _ensure_tuple(selection) + + # fill out selection if not completely specified + if len(selection) < len(shape): + selection += (slice(None),) * (len(shape) - len(selection)) + + # check selection not too long + _check_selection_length(selection, shape) + + return selection + + +class _ChunkDimProjection(NamedTuple): + dim_chunk_ix: int + dim_chunk_sel: slice + dim_out_sel: Optional[slice] + + +def _ceildiv(a, b): + return math.ceil(a / b) + + +class _SliceDimIndexer: + dim_sel: slice + dim_len: int + dim_chunk_len: int + nitems: int + + start: int + stop: int + step: int + + def __init__(self, dim_sel: slice, dim_len: int, dim_chunk_len: int): + self.start, self.stop, self.step = dim_sel.indices(dim_len) + if self.step < 1: + _err_negative_step() + + self.dim_len = dim_len + self.dim_chunk_len = dim_chunk_len + self.nitems = max(0, _ceildiv((self.stop - self.start), self.step)) + self.nchunks = _ceildiv(self.dim_len, self.dim_chunk_len) + + def __iter__(self) -> Iterator[_ChunkDimProjection]: + # figure out the range of chunks we need to visit + dim_chunk_ix_from = self.start // self.dim_chunk_len + dim_chunk_ix_to = _ceildiv(self.stop, self.dim_chunk_len) + + # iterate over chunks in range + for dim_chunk_ix in range(dim_chunk_ix_from, dim_chunk_ix_to): + # compute offsets for chunk within overall array + dim_offset = dim_chunk_ix * self.dim_chunk_len + dim_limit = min(self.dim_len, (dim_chunk_ix + 1) * self.dim_chunk_len) + + # determine chunk length, accounting for trailing chunk + dim_chunk_len = dim_limit - dim_offset + + if self.start < dim_offset: + # selection starts before current chunk + dim_chunk_sel_start = 0 + remainder = (dim_offset - self.start) % self.step + if remainder: + dim_chunk_sel_start += self.step - remainder + # compute number of previous items, provides offset into output array + dim_out_offset = _ceildiv((dim_offset - self.start), self.step) + + else: + # selection starts within current chunk + dim_chunk_sel_start = self.start - dim_offset + dim_out_offset = 0 + + if self.stop > dim_limit: + # selection ends after current chunk + dim_chunk_sel_stop = dim_chunk_len + + else: + # selection ends within current chunk + dim_chunk_sel_stop = self.stop - dim_offset + + dim_chunk_sel = slice(dim_chunk_sel_start, dim_chunk_sel_stop, self.step) + dim_chunk_nitems = _ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) + dim_out_sel = slice(dim_out_offset, dim_out_offset + dim_chunk_nitems) + + yield _ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + + +class _ChunkProjection(NamedTuple): + chunk_coords: ChunkCoords + chunk_selection: SliceSelection + out_selection: SliceSelection + + +class BasicIndexer: + dim_indexers: List[_SliceDimIndexer] + shape: ChunkCoords + + def __init__( + self, + selection: Selection, + shape: Tuple[int, ...], + chunk_shape: Tuple[int, ...], + ): + # setup per-dimension indexers + self.dim_indexers = [ + _SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) + for dim_sel, dim_len, dim_chunk_len in zip( + _ensure_selection(selection, shape), shape, chunk_shape + ) + ] + self.shape = tuple(s.nitems for s in self.dim_indexers) + + def __iter__(self) -> Iterator[_ChunkProjection]: + for dim_projections in itertools.product(*self.dim_indexers): + chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) + chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) + out_selection = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + ) + + yield _ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +def morton_order_iter(chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: + def decode_morton(z: int, chunk_shape: ChunkCoords) -> ChunkCoords: + # Inspired by compressed morton code as implemented in Neuroglancer + # https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#compressed-morton-code + bits = tuple(math.ceil(math.log2(c)) for c in chunk_shape) + max_coords_bits = max(*bits) + input_bit = 0 + input_value = z + out = [0 for _ in range(len(chunk_shape))] + + for coord_bit in range(max_coords_bits): + for dim in range(len(chunk_shape)): + if coord_bit < bits[dim]: + bit = (input_value >> input_bit) & 1 + out[dim] |= bit << coord_bit + input_bit += 1 + return tuple(out) + + for i in range(product(chunk_shape)): + yield decode_morton(i, chunk_shape) + + +def c_order_iter(chunks_per_shard: ChunkCoords) -> Iterator[ChunkCoords]: + return itertools.product(*(range(x) for x in chunks_per_shard)) + + +def is_total_slice(item: Selection, shape: ChunkCoords): + """Determine whether `item` specifies a complete slice of array with the + given `shape`. Used to optimize __setitem__ operations on the Chunk + class.""" + + # N.B., assume shape is normalized + if item == slice(None): + return True + if isinstance(item, slice): + item = (item,) + if isinstance(item, tuple): + return all( + ( + isinstance(dim_sel, slice) + and ( + (dim_sel == slice(None)) + or ((dim_sel.stop - dim_sel.start == dim_len) and (dim_sel.step in [1, None])) + ) + ) + for dim_sel, dim_len in zip(item, shape) + ) + else: + raise TypeError("expected slice or tuple of slices, found %r" % item) + + +def all_chunk_coords(shape: ChunkCoords, chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: + return itertools.product(*(range(0, _ceildiv(s, c)) for s, c in zip(shape, chunk_shape))) diff --git a/zarr/v3/metadata.py b/zarr/v3/metadata.py new file mode 100644 index 0000000000..1fc43b19f0 --- /dev/null +++ b/zarr/v3/metadata.py @@ -0,0 +1,339 @@ +from __future__ import annotations + +import json +from asyncio import AbstractEventLoop +from enum import Enum +from typing import Any, Dict, List, Literal, Optional, Tuple, Union + +import numpy as np +from attr import asdict, field, frozen + +from zarr.v3.common import ChunkCoords, make_cattr + + +@frozen +class RuntimeConfiguration: + order: Literal["C", "F"] = "C" + concurrency: Optional[int] = None + asyncio_loop: Optional[AbstractEventLoop] = None + + +def runtime_configuration( + order: Literal["C", "F"], concurrency: Optional[int] = None +) -> RuntimeConfiguration: + return RuntimeConfiguration(order=order, concurrency=concurrency) + + +class DataType(Enum): + bool = "bool" + int8 = "int8" + int16 = "int16" + int32 = "int32" + int64 = "int64" + uint8 = "uint8" + uint16 = "uint16" + uint32 = "uint32" + uint64 = "uint64" + float32 = "float32" + float64 = "float64" + + @property + def byte_count(self) -> int: + data_type_byte_counts = { + DataType.bool: 1, + DataType.int8: 1, + DataType.int16: 2, + DataType.int32: 4, + DataType.int64: 8, + DataType.uint8: 1, + DataType.uint16: 2, + DataType.uint32: 4, + DataType.uint64: 8, + DataType.float32: 4, + DataType.float64: 8, + } + return data_type_byte_counts[self] + + def to_numpy_shortname(self) -> str: + data_type_to_numpy = { + DataType.bool: "bool", + DataType.int8: "i1", + DataType.int16: "i2", + DataType.int32: "i4", + DataType.int64: "i8", + DataType.uint8: "u1", + DataType.uint16: "u2", + DataType.uint32: "u4", + DataType.uint64: "u8", + DataType.float32: "f4", + DataType.float64: "f8", + } + return data_type_to_numpy[self] + + +dtype_to_data_type = { + "|b1": "bool", + "bool": "bool", + "|i1": "int8", + " ChunkCoords: + if chunk_key == "c": + return () + return tuple(map(int, chunk_key[1:].split(self.configuration.separator))) + + def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + return self.configuration.separator.join(map(str, ("c",) + chunk_coords)) + + +@frozen +class V2ChunkKeyEncodingConfigurationMetadata: + separator: Literal[".", "/"] = "." + + +@frozen +class V2ChunkKeyEncodingMetadata: + configuration: V2ChunkKeyEncodingConfigurationMetadata = ( + V2ChunkKeyEncodingConfigurationMetadata() + ) + name: Literal["v2"] = "v2" + + def decode_chunk_key(self, chunk_key: str) -> ChunkCoords: + return tuple(map(int, chunk_key.split(self.configuration.separator))) + + def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + chunk_identifier = self.configuration.separator.join(map(str, chunk_coords)) + return "0" if chunk_identifier == "" else chunk_identifier + + +ChunkKeyEncodingMetadata = Union[DefaultChunkKeyEncodingMetadata, V2ChunkKeyEncodingMetadata] + + +BloscShuffle = Literal["noshuffle", "shuffle", "bitshuffle"] + + +@frozen +class BloscCodecConfigurationMetadata: + typesize: int + cname: Literal["lz4", "lz4hc", "blosclz", "zstd", "snappy", "zlib"] = "zstd" + clevel: int = 5 + shuffle: BloscShuffle = "noshuffle" + blocksize: int = 0 + + +blosc_shuffle_int_to_str: Dict[int, BloscShuffle] = { + 0: "noshuffle", + 1: "shuffle", + 2: "bitshuffle", +} + + +@frozen +class BloscCodecMetadata: + configuration: BloscCodecConfigurationMetadata + name: Literal["blosc"] = "blosc" + + +@frozen +class BytesCodecConfigurationMetadata: + endian: Optional[Literal["big", "little"]] = "little" + + +@frozen +class BytesCodecMetadata: + configuration: BytesCodecConfigurationMetadata + name: Literal["bytes"] = "bytes" + + +@frozen +class TransposeCodecConfigurationMetadata: + order: Union[Literal["C", "F"], Tuple[int, ...]] = "C" + + +@frozen +class TransposeCodecMetadata: + configuration: TransposeCodecConfigurationMetadata + name: Literal["transpose"] = "transpose" + + +@frozen +class GzipCodecConfigurationMetadata: + level: int = 5 + + +@frozen +class GzipCodecMetadata: + configuration: GzipCodecConfigurationMetadata + name: Literal["gzip"] = "gzip" + + +@frozen +class ZstdCodecConfigurationMetadata: + level: int = 0 + checksum: bool = False + + +@frozen +class ZstdCodecMetadata: + configuration: ZstdCodecConfigurationMetadata + name: Literal["zstd"] = "zstd" + + +@frozen +class Crc32cCodecMetadata: + name: Literal["crc32c"] = "crc32c" + + +@frozen +class ShardingCodecConfigurationMetadata: + chunk_shape: ChunkCoords + codecs: List["CodecMetadata"] + index_codecs: List["CodecMetadata"] + + +@frozen +class ShardingCodecMetadata: + configuration: ShardingCodecConfigurationMetadata + name: Literal["sharding_indexed"] = "sharding_indexed" + + +CodecMetadata = Union[ + BloscCodecMetadata, + BytesCodecMetadata, + TransposeCodecMetadata, + GzipCodecMetadata, + ZstdCodecMetadata, + ShardingCodecMetadata, + Crc32cCodecMetadata, +] + + +@frozen +class CoreArrayMetadata: + shape: ChunkCoords + chunk_shape: ChunkCoords + data_type: DataType + fill_value: Any + runtime_configuration: RuntimeConfiguration + + @property + def dtype(self) -> np.dtype: + return np.dtype(self.data_type.value) + + @property + def ndim(self) -> int: + return len(self.shape) + + +@frozen +class ArrayMetadata: + shape: ChunkCoords + data_type: DataType + chunk_grid: RegularChunkGridMetadata + chunk_key_encoding: ChunkKeyEncodingMetadata + fill_value: Any + codecs: List[CodecMetadata] + attributes: Dict[str, Any] = field(factory=dict) + dimension_names: Optional[Tuple[str, ...]] = None + zarr_format: Literal[3] = 3 + node_type: Literal["array"] = "array" + + @property + def dtype(self) -> np.dtype: + return np.dtype(self.data_type.value) + + @property + def ndim(self) -> int: + return len(self.shape) + + def get_core_metadata(self, runtime_configuration: RuntimeConfiguration) -> CoreArrayMetadata: + return CoreArrayMetadata( + shape=self.shape, + chunk_shape=self.chunk_grid.configuration.chunk_shape, + data_type=self.data_type, + fill_value=self.fill_value, + runtime_configuration=runtime_configuration, + ) + + def to_bytes(self) -> bytes: + def _json_convert(o): + if isinstance(o, DataType): + return o.name + raise TypeError + + return json.dumps( + asdict( + self, + filter=lambda attr, value: attr.name != "dimension_names" or value is not None, + ), + default=_json_convert, + ).encode() + + @classmethod + def from_json(cls, zarr_json: Any) -> ArrayMetadata: + return make_cattr().structure(zarr_json, cls) + + +@frozen +class ArrayV2Metadata: + shape: ChunkCoords + chunks: ChunkCoords + dtype: np.dtype + fill_value: Union[None, int, float] = 0 + order: Literal["C", "F"] = "C" + filters: Optional[List[Dict[str, Any]]] = None + dimension_separator: Literal[".", "/"] = "." + compressor: Optional[Dict[str, Any]] = None + zarr_format: Literal[2] = 2 + + @property + def ndim(self) -> int: + return len(self.shape) + + def to_bytes(self) -> bytes: + def _json_convert(o): + if isinstance(o, np.dtype): + if o.fields is None: + return o.str + else: + return o.descr + raise TypeError + + return json.dumps(asdict(self), default=_json_convert).encode() + + @classmethod + def from_json(cls, zarr_json: Any) -> ArrayV2Metadata: + return make_cattr().structure(zarr_json, cls) diff --git a/zarr/v3/sharding.py b/zarr/v3/sharding.py new file mode 100644 index 0000000000..3c5b4bd12d --- /dev/null +++ b/zarr/v3/sharding.py @@ -0,0 +1,516 @@ +from __future__ import annotations + +from typing import Iterator, List, Mapping, NamedTuple, Optional, Set, Tuple + +import numpy as np +from attrs import frozen + +from zarr.v3.codecs import ArrayBytesCodec, CodecPipeline +from zarr.v3.common import ( + BytesLike, + ChunkCoords, + SliceSelection, + concurrent_map, + product, +) +from zarr.v3.indexing import ( + BasicIndexer, + c_order_iter, + is_total_slice, + morton_order_iter, +) +from zarr.v3.metadata import ( + CoreArrayMetadata, + DataType, + ShardingCodecConfigurationMetadata, + ShardingCodecMetadata, +) +from zarr.v3.store import StorePath + +MAX_UINT_64 = 2**64 - 1 + + +class _ShardIndex(NamedTuple): + # dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2) + offsets_and_lengths: np.ndarray + + def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords: + return tuple( + chunk_i % shard_i + for chunk_i, shard_i in zip(chunk_coords, self.offsets_and_lengths.shape) + ) + + def is_all_empty(self) -> bool: + return bool(np.array_equiv(self.offsets_and_lengths, MAX_UINT_64)) + + def get_chunk_slice(self, chunk_coords: ChunkCoords) -> Optional[Tuple[int, int]]: + localized_chunk = self._localize_chunk(chunk_coords) + chunk_start, chunk_len = self.offsets_and_lengths[localized_chunk] + if (chunk_start, chunk_len) == (MAX_UINT_64, MAX_UINT_64): + return None + else: + return (int(chunk_start), int(chunk_start + chunk_len)) + + def set_chunk_slice(self, chunk_coords: ChunkCoords, chunk_slice: Optional[slice]) -> None: + localized_chunk = self._localize_chunk(chunk_coords) + if chunk_slice is None: + self.offsets_and_lengths[localized_chunk] = (MAX_UINT_64, MAX_UINT_64) + else: + self.offsets_and_lengths[localized_chunk] = ( + chunk_slice.start, + chunk_slice.stop - chunk_slice.start, + ) + + def is_dense(self, chunk_byte_length: int) -> bool: + sorted_offsets_and_lengths = sorted( + [ + (offset, length) + for offset, length in self.offsets_and_lengths + if offset != MAX_UINT_64 + ], + key=lambda entry: entry[0], + ) + + # Are all non-empty offsets unique? + if len( + set(offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64) + ) != len(sorted_offsets_and_lengths): + return False + + return all( + offset % chunk_byte_length == 0 and length == chunk_byte_length + for offset, length in sorted_offsets_and_lengths + ) + + @classmethod + def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardIndex: + offsets_and_lengths = np.zeros(chunks_per_shard + (2,), dtype=" _ShardProxy: + obj = cls() + obj.buf = memoryview(buf) + obj.index = await codec._decode_shard_index(obj.buf[-codec._shard_index_size() :]) + return obj + + @classmethod + def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardProxy: + index = _ShardIndex.create_empty(chunks_per_shard) + obj = cls() + obj.buf = memoryview(b"") + obj.index = index + return obj + + def __getitem__(self, chunk_coords: ChunkCoords) -> Optional[BytesLike]: + chunk_byte_slice = self.index.get_chunk_slice(chunk_coords) + if chunk_byte_slice: + return self.buf[chunk_byte_slice[0] : chunk_byte_slice[1]] + return None + + def __len__(self) -> int: + return int(self.index.offsets_and_lengths.size / 2) + + def __iter__(self) -> Iterator[ChunkCoords]: + return c_order_iter(self.index.offsets_and_lengths.shape[:-1]) + + +class _ShardBuilder(_ShardProxy): + buf: bytearray + index: _ShardIndex + + @classmethod + def merge_with_morton_order( + cls, + chunks_per_shard: ChunkCoords, + tombstones: Set[ChunkCoords], + *shard_dicts: Mapping[ChunkCoords, BytesLike], + ) -> _ShardBuilder: + obj = cls.create_empty(chunks_per_shard) + for chunk_coords in morton_order_iter(chunks_per_shard): + if tombstones is not None and chunk_coords in tombstones: + continue + for shard_dict in shard_dicts: + maybe_value = shard_dict.get(chunk_coords, None) + if maybe_value is not None: + obj.append(chunk_coords, maybe_value) + break + return obj + + @classmethod + def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardBuilder: + obj = cls() + obj.buf = bytearray() + obj.index = _ShardIndex.create_empty(chunks_per_shard) + return obj + + def append(self, chunk_coords: ChunkCoords, value: BytesLike): + chunk_start = len(self.buf) + chunk_length = len(value) + self.buf.extend(value) + self.index.set_chunk_slice(chunk_coords, slice(chunk_start, chunk_start + chunk_length)) + + def finalize(self, index_bytes: BytesLike) -> BytesLike: + self.buf.extend(index_bytes) + return self.buf + + +@frozen +class ShardingCodec(ArrayBytesCodec): + array_metadata: CoreArrayMetadata + configuration: ShardingCodecConfigurationMetadata + codec_pipeline: CodecPipeline + index_codec_pipeline: CodecPipeline + chunks_per_shard: Tuple[int, ...] + + @classmethod + def from_metadata( + cls, + codec_metadata: ShardingCodecMetadata, + array_metadata: CoreArrayMetadata, + ) -> ShardingCodec: + chunks_per_shard = tuple( + s // c + for s, c in zip( + array_metadata.chunk_shape, + codec_metadata.configuration.chunk_shape, + ) + ) + # rewriting the metadata to scope it to the shard + shard_metadata = CoreArrayMetadata( + shape=array_metadata.chunk_shape, + chunk_shape=codec_metadata.configuration.chunk_shape, + data_type=array_metadata.data_type, + fill_value=array_metadata.fill_value, + runtime_configuration=array_metadata.runtime_configuration, + ) + codec_pipeline = CodecPipeline.from_metadata( + codec_metadata.configuration.codecs, shard_metadata + ) + index_codec_pipeline = CodecPipeline.from_metadata( + codec_metadata.configuration.index_codecs, + CoreArrayMetadata( + shape=chunks_per_shard + (2,), + chunk_shape=chunks_per_shard + (2,), + data_type=DataType.uint64, + fill_value=MAX_UINT_64, + runtime_configuration=array_metadata.runtime_configuration, + ), + ) + return cls( + array_metadata=array_metadata, + configuration=codec_metadata.configuration, + codec_pipeline=codec_pipeline, + index_codec_pipeline=index_codec_pipeline, + chunks_per_shard=chunks_per_shard, + ) + + async def decode( + self, + shard_bytes: BytesLike, + ) -> np.ndarray: + # print("decode") + shard_shape = self.array_metadata.chunk_shape + chunk_shape = self.configuration.chunk_shape + + indexer = BasicIndexer( + tuple(slice(0, s) for s in shard_shape), + shape=shard_shape, + chunk_shape=chunk_shape, + ) + + # setup output array + out = np.zeros( + shard_shape, + dtype=self.array_metadata.dtype, + order=self.array_metadata.runtime_configuration.order, + ) + shard_dict = await _ShardProxy.from_bytes(shard_bytes, self) + + if shard_dict.index.is_all_empty(): + out.fill(self.array_metadata.fill_value) + return out + + # decoding chunks and writing them into the output buffer + await concurrent_map( + [ + ( + shard_dict, + chunk_coords, + chunk_selection, + out_selection, + out, + ) + for chunk_coords, chunk_selection, out_selection in indexer + ], + self._read_chunk, + self.array_metadata.runtime_configuration.concurrency, + ) + + return out + + async def decode_partial( + self, + store_path: StorePath, + selection: SliceSelection, + ) -> Optional[np.ndarray]: + # print("decode_partial") + shard_shape = self.array_metadata.chunk_shape + chunk_shape = self.configuration.chunk_shape + + indexer = BasicIndexer( + selection, + shape=shard_shape, + chunk_shape=chunk_shape, + ) + + # setup output array + out = np.zeros( + indexer.shape, + dtype=self.array_metadata.dtype, + order=self.array_metadata.runtime_configuration.order, + ) + + indexed_chunks = list(indexer) + all_chunk_coords = set(chunk_coords for chunk_coords, _, _ in indexed_chunks) + + # reading bytes of all requested chunks + shard_dict: Mapping[ChunkCoords, BytesLike] = {} + if self._is_total_shard(all_chunk_coords): + # read entire shard + shard_dict_maybe = await self._load_full_shard_maybe(store_path) + if shard_dict_maybe is None: + return None + shard_dict = shard_dict_maybe + else: + # read some chunks within the shard + shard_index = await self._load_shard_index_maybe(store_path) + if shard_index is None: + return None + shard_dict = {} + for chunk_coords in all_chunk_coords: + chunk_byte_slice = shard_index.get_chunk_slice(chunk_coords) + if chunk_byte_slice: + chunk_bytes = await store_path.get_async(chunk_byte_slice) + if chunk_bytes: + shard_dict[chunk_coords] = chunk_bytes + + # decoding chunks and writing them into the output buffer + await concurrent_map( + [ + ( + shard_dict, + chunk_coords, + chunk_selection, + out_selection, + out, + ) + for chunk_coords, chunk_selection, out_selection in indexed_chunks + ], + self._read_chunk, + self.array_metadata.runtime_configuration.concurrency, + ) + + return out + + async def _read_chunk( + self, + shard_dict: Mapping[ChunkCoords, Optional[BytesLike]], + chunk_coords: ChunkCoords, + chunk_selection: SliceSelection, + out_selection: SliceSelection, + out: np.ndarray, + ): + chunk_bytes = shard_dict.get(chunk_coords, None) + if chunk_bytes is not None: + chunk_array = await self.codec_pipeline.decode(chunk_bytes) + tmp = chunk_array[chunk_selection] + out[out_selection] = tmp + else: + out[out_selection] = self.array_metadata.fill_value + + async def encode( + self, + shard_array: np.ndarray, + ) -> Optional[BytesLike]: + shard_shape = self.array_metadata.chunk_shape + chunk_shape = self.configuration.chunk_shape + + indexer = list( + BasicIndexer( + tuple(slice(0, s) for s in shard_shape), + shape=shard_shape, + chunk_shape=chunk_shape, + ) + ) + + async def _write_chunk( + shard_array: np.ndarray, + chunk_coords: ChunkCoords, + chunk_selection: SliceSelection, + out_selection: SliceSelection, + ) -> Tuple[ChunkCoords, Optional[BytesLike]]: + if is_total_slice(chunk_selection, chunk_shape): + chunk_array = shard_array[out_selection] + else: + # handling writing partial chunks + chunk_array = np.empty( + chunk_shape, + dtype=self.array_metadata.dtype, + ) + chunk_array.fill(self.array_metadata.fill_value) + chunk_array[chunk_selection] = shard_array[out_selection] + if not np.array_equiv(chunk_array, self.array_metadata.fill_value): + return ( + chunk_coords, + await self.codec_pipeline.encode(chunk_array), + ) + return (chunk_coords, None) + + # assembling and encoding chunks within the shard + encoded_chunks: List[Tuple[ChunkCoords, Optional[BytesLike]]] = await concurrent_map( + [ + (shard_array, chunk_coords, chunk_selection, out_selection) + for chunk_coords, chunk_selection, out_selection in indexer + ], + _write_chunk, + self.array_metadata.runtime_configuration.concurrency, + ) + if len(encoded_chunks) == 0: + return None + + shard_builder = _ShardBuilder.create_empty(self.chunks_per_shard) + for chunk_coords, chunk_bytes in encoded_chunks: + if chunk_bytes is not None: + shard_builder.append(chunk_coords, chunk_bytes) + + return shard_builder.finalize(await self._encode_shard_index(shard_builder.index)) + + async def encode_partial( + self, + store_path: StorePath, + shard_array: np.ndarray, + selection: SliceSelection, + ) -> None: + # print("encode_partial") + shard_shape = self.array_metadata.chunk_shape + chunk_shape = self.configuration.chunk_shape + + old_shard_dict = ( + await self._load_full_shard_maybe(store_path) + ) or _ShardProxy.create_empty(self.chunks_per_shard) + new_shard_builder = _ShardBuilder.create_empty(self.chunks_per_shard) + tombstones: Set[ChunkCoords] = set() + + indexer = list( + BasicIndexer( + selection, + shape=shard_shape, + chunk_shape=chunk_shape, + ) + ) + + async def _write_chunk( + chunk_coords: ChunkCoords, + chunk_selection: SliceSelection, + out_selection: SliceSelection, + ) -> Tuple[ChunkCoords, Optional[BytesLike]]: + chunk_array = None + if is_total_slice(chunk_selection, self.configuration.chunk_shape): + chunk_array = shard_array[out_selection] + else: + # handling writing partial chunks + # read chunk first + chunk_bytes = old_shard_dict.get(chunk_coords, None) + + # merge new value + if chunk_bytes is None: + chunk_array = np.empty( + self.configuration.chunk_shape, + dtype=self.array_metadata.dtype, + ) + chunk_array.fill(self.array_metadata.fill_value) + else: + chunk_array = ( + await self.codec_pipeline.decode(chunk_bytes) + ).copy() # make a writable copy + chunk_array[chunk_selection] = shard_array[out_selection] + + if not np.array_equiv(chunk_array, self.array_metadata.fill_value): + return ( + chunk_coords, + await self.codec_pipeline.encode(chunk_array), + ) + else: + return (chunk_coords, None) + + encoded_chunks: List[Tuple[ChunkCoords, Optional[BytesLike]]] = await concurrent_map( + [ + ( + chunk_coords, + chunk_selection, + out_selection, + ) + for chunk_coords, chunk_selection, out_selection in indexer + ], + _write_chunk, + self.array_metadata.runtime_configuration.concurrency, + ) + + for chunk_coords, chunk_bytes in encoded_chunks: + if chunk_bytes is not None: + new_shard_builder.append(chunk_coords, chunk_bytes) + else: + tombstones.add(chunk_coords) + + shard_builder = _ShardBuilder.merge_with_morton_order( + self.chunks_per_shard, tombstones, new_shard_builder, old_shard_dict + ) + + if shard_builder.index.is_all_empty(): + await store_path.delete_async() + else: + await store_path.set_async( + shard_builder.finalize(await self._encode_shard_index(shard_builder.index)) + ) + + def _is_total_shard(self, all_chunk_coords: Set[ChunkCoords]) -> bool: + return len(all_chunk_coords) == product(self.chunks_per_shard) and all( + chunk_coords in all_chunk_coords for chunk_coords in c_order_iter(self.chunks_per_shard) + ) + + async def _decode_shard_index(self, index_bytes: BytesLike) -> _ShardIndex: + return _ShardIndex(await self.index_codec_pipeline.decode(index_bytes)) + + async def _encode_shard_index(self, index: _ShardIndex) -> BytesLike: + index_bytes = await self.index_codec_pipeline.encode(index.offsets_and_lengths) + assert index_bytes is not None + return index_bytes + + def _shard_index_size(self) -> int: + return self.index_codec_pipeline.compute_encoded_size(16 * product(self.chunks_per_shard)) + + async def _load_shard_index_maybe(self, store_path: StorePath) -> Optional[_ShardIndex]: + index_bytes = await store_path.get_async((-self._shard_index_size(), None)) + if index_bytes is not None: + return await self._decode_shard_index(index_bytes) + return None + + async def _load_shard_index(self, store_path: StorePath) -> _ShardIndex: + return (await self._load_shard_index_maybe(store_path)) or _ShardIndex.create_empty( + self.chunks_per_shard + ) + + async def _load_full_shard_maybe(self, store_path: StorePath) -> Optional[_ShardProxy]: + shard_bytes = await store_path.get_async() + + return await _ShardProxy.from_bytes(shard_bytes, self) if shard_bytes else None + + def compute_encoded_size(self, input_byte_length: int) -> int: + return input_byte_length + self._shard_index_size() diff --git a/zarr/v3/store.py b/zarr/v3/store.py new file mode 100644 index 0000000000..f7472c68d2 --- /dev/null +++ b/zarr/v3/store.py @@ -0,0 +1,304 @@ +# TODO: +# 1. Stores should inherit from zarr.v3.abc.store classes +# 2. remove "_async" suffix from all methods? + +# Changes I've made here: +# 1. Make delay import of fsspec + +from __future__ import annotations + +import asyncio +import io +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union + +from zarr.v3.common import BytesLike, to_thread + +if TYPE_CHECKING: + from upath import UPath + from fsspec.asyn import AsyncFileSystem + + +def _dereference_path(root: str, path: str) -> str: + assert isinstance(root, str) + assert isinstance(path, str) + root = root.rstrip("/") + path = f"{root}/{path}" if root != "" else path + path = path.rstrip("/") + return path + + +class StorePath: + store: Store + path: str + + def __init__(self, store: Store, path: Optional[str] = None): + self.store = store + self.path = path or "" + + @classmethod + def from_path(cls, pth: Path) -> StorePath: + return cls(Store.from_path(pth)) + + async def get_async( + self, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[BytesLike]: + return await self.store.get_async(self.path, byte_range) + + async def set_async( + self, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + ) -> None: + await self.store.set_async(self.path, value, byte_range) + + async def delete_async(self) -> None: + await self.store.delete_async(self.path) + + async def exists_async(self) -> bool: + return await self.store.exists_async(self.path) + + def __truediv__(self, other: str) -> StorePath: + return self.__class__(self.store, _dereference_path(self.path, other)) + + def __str__(self) -> str: + return _dereference_path(str(self.store), self.path) + + def __repr__(self) -> str: + return f"StorePath({self.store.__class__.__name__}, {repr(str(self))})" + + +class Store: + supports_partial_writes = False + + @classmethod + def from_path(cls, pth: Path) -> Store: + try: + from upath import UPath + from upath.implementations.local import PosixUPath, WindowsUPath + + if isinstance(pth, UPath) and not isinstance(pth, (PosixUPath, WindowsUPath)): + storage_options = pth._kwargs.copy() + storage_options.pop("_url", None) + return RemoteStore(str(pth), **storage_options) + except ImportError: + pass + + return LocalStore(pth) + + async def multi_get_async( + self, keys: List[Tuple[str, Optional[Tuple[int, int]]]] + ) -> List[Optional[BytesLike]]: + return await asyncio.gather(*[self.get_async(key, byte_range) for key, byte_range in keys]) + + async def get_async( + self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[BytesLike]: + raise NotImplementedError + + async def multi_set_async( + self, key_values: List[Tuple[str, BytesLike, Optional[Tuple[int, int]]]] + ) -> None: + await asyncio.gather( + *[self.set_async(key, value, byte_range) for key, value, byte_range in key_values] + ) + + async def set_async( + self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + ) -> None: + raise NotImplementedError + + async def delete_async(self, key: str) -> None: + raise NotImplementedError + + async def exists_async(self, key: str) -> bool: + raise NotImplementedError + + def __truediv__(self, other: str) -> StorePath: + return StorePath(self, other) + + +class LocalStore(Store): + supports_partial_writes = True + root: Path + auto_mkdir: bool + + def __init__(self, root: Union[Path, str], auto_mkdir: bool = True): + if isinstance(root, str): + root = Path(root) + assert isinstance(root, Path) + + self.root = root + self.auto_mkdir = auto_mkdir + + def _cat_file( + self, path: Path, start: Optional[int] = None, end: Optional[int] = None + ) -> BytesLike: + if start is None and end is None: + return path.read_bytes() + with path.open("rb") as f: + size = f.seek(0, io.SEEK_END) + if start is not None: + if start >= 0: + f.seek(start) + else: + f.seek(max(0, size + start)) + if end is not None: + if end < 0: + end = size + end + return f.read(end - f.tell()) + return f.read() + + def _put_file( + self, + path: Path, + value: BytesLike, + start: Optional[int] = None, + ): + if self.auto_mkdir: + path.parent.mkdir(parents=True, exist_ok=True) + if start is not None: + with path.open("r+b") as f: + f.seek(start) + f.write(value) + else: + return path.write_bytes(value) + + async def get_async( + self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[BytesLike]: + assert isinstance(key, str) + path = self.root / key + + try: + value = await ( + to_thread(self._cat_file, path, byte_range[0], byte_range[1]) + if byte_range is not None + else to_thread(self._cat_file, path) + ) + except (FileNotFoundError, IsADirectoryError, NotADirectoryError): + return None + + return value + + async def set_async( + self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + ) -> None: + assert isinstance(key, str) + path = self.root / key + + if byte_range is not None: + await to_thread(self._put_file, path, value, byte_range[0]) + else: + await to_thread(self._put_file, path, value) + + async def delete_async(self, key: str) -> None: + path = self.root / key + await to_thread(path.unlink, True) + + async def exists_async(self, key: str) -> bool: + path = self.root / key + return await to_thread(path.exists) + + def __str__(self) -> str: + return f"file://{self.root}" + + def __repr__(self) -> str: + return f"LocalStore({repr(str(self))})" + + +class RemoteStore(Store): + root: UPath + + def __init__(self, url: Union[UPath, str], **storage_options: Dict[str, Any]): + from upath import UPath + import fsspec + + if isinstance(url, str): + self.root = UPath(url, **storage_options) + else: + assert len(storage_options) == 0, ( + "If constructed with a UPath object, no additional " + + "storage_options are allowed." + ) + self.root = url.rstrip("/") + # test instantiate file system + fs, _ = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) + assert fs.__class__.async_impl, "FileSystem needs to support async operations." + + def make_fs(self) -> Tuple[AsyncFileSystem, str]: + import fsspec + + storage_options = self.root._kwargs.copy() + storage_options.pop("_url", None) + fs, root = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) + assert fs.__class__.async_impl, "FileSystem needs to support async operations." + return fs, root + + async def get_async( + self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[BytesLike]: + assert isinstance(key, str) + fs, root = self.make_fs() + path = _dereference_path(root, key) + + try: + value = await ( + fs._cat_file(path, start=byte_range[0], end=byte_range[1]) + if byte_range + else fs._cat_file(path) + ) + except (FileNotFoundError, IsADirectoryError, NotADirectoryError): + return None + + return value + + async def set_async( + self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + ) -> None: + assert isinstance(key, str) + fs, root = self.make_fs() + path = _dereference_path(root, key) + + # write data + if byte_range: + with fs._open(path, "r+b") as f: + f.seek(byte_range[0]) + f.write(value) + else: + await fs._pipe_file(path, value) + + async def delete_async(self, key: str) -> None: + fs, root = self.make_fs() + path = _dereference_path(root, key) + if await fs._exists(path): + await fs._rm(path) + + async def exists_async(self, key: str) -> bool: + fs, root = self.make_fs() + path = _dereference_path(root, key) + return await fs._exists(path) + + def __str__(self) -> str: + return str(self.root) + + def __repr__(self) -> str: + return f"RemoteStore({repr(str(self))})" + + +StoreLike = Union[Store, StorePath, Path, str] + + +def make_store_path(store_like: StoreLike) -> StorePath: + if isinstance(store_like, StorePath): + return store_like + elif isinstance(store_like, Store): + return StorePath(store_like) + elif isinstance(store_like, Path): + return StorePath(Store.from_path(store_like)) + elif isinstance(store_like, str): + try: + from upath import UPath + + return StorePath(Store.from_path(UPath(store_like))) + except ImportError: + return StorePath(LocalStore(Path(store_like))) + raise TypeError diff --git a/zarr/v3/sync.py b/zarr/v3/sync.py new file mode 100644 index 0000000000..ef3a6e08c0 --- /dev/null +++ b/zarr/v3/sync.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +import asyncio +import threading +from typing import Any, Coroutine, List, Optional + +# From https://github.com/fsspec/filesystem_spec/blob/master/fsspec/asyn.py + +iothread: List[Optional[threading.Thread]] = [None] # dedicated IO thread +loop: List[Optional[asyncio.AbstractEventLoop]] = [ + None +] # global event loop for any non-async instance +_lock: Optional[threading.Lock] = None # global lock placeholder +get_running_loop = asyncio.get_running_loop + + +def _get_lock() -> threading.Lock: + """Allocate or return a threading lock. + + The lock is allocated on first use to allow setting one lock per forked process. + """ + global _lock + if not _lock: + _lock = threading.Lock() + return _lock + + +async def _runner(event: threading.Event, coro: Coroutine, result_box: List[Optional[Any]]): + try: + result_box[0] = await coro + except Exception as ex: + result_box[0] = ex + finally: + event.set() + + +def sync(coro: Coroutine, loop: Optional[asyncio.AbstractEventLoop] = None): + """ + Make loop run coroutine until it returns. Runs in other thread + + Examples + -------- + >>> sync(async_function(), existing_loop) + """ + if loop is None: + # NB: if the loop is not running *yet*, it is OK to submit work + # and we will wait for it + loop = _get_loop() + if loop is None or loop.is_closed(): + raise RuntimeError("Loop is not running") + try: + loop0 = asyncio.events.get_running_loop() + if loop0 is loop: + raise NotImplementedError("Calling sync() from within a running loop") + except RuntimeError: + pass + result_box: List[Optional[Any]] = [None] + event = threading.Event() + asyncio.run_coroutine_threadsafe(_runner(event, coro, result_box), loop) + while True: + # this loops allows thread to get interrupted + if event.wait(1): + break + + return_result = result_box[0] + if isinstance(return_result, BaseException): + raise return_result + else: + return return_result + + +def _get_loop(): + """Create or return the default fsspec IO loop + + The loop will be running on a separate thread. + """ + if loop[0] is None: + with _get_lock(): + # repeat the check just in case the loop got filled between the + # previous two calls from another thread + if loop[0] is None: + loop[0] = asyncio.new_event_loop() + th = threading.Thread(target=loop[0].run_forever, name="zarrIO") + th.daemon = True + th.start() + iothread[0] = th + return loop[0] From 25dbeeda7d3a300569b358c157c5bd1c02ddaec3 Mon Sep 17 00:00:00 2001 From: Janick Martinez Esturo Date: Tue, 5 Dec 2023 22:31:03 +0100 Subject: [PATCH 0389/1078] * Cache result of FSStore._fsspec_installed (#1581) Prevent runtime-overhead in doing this check multiple times --- docs/release.rst | 3 +++ zarr/storage.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 9873d62896..842c36e290 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -43,6 +43,9 @@ Docs Maintenance ~~~~~~~~~~~ +* Cache result of ``FSStore._fsspec_installed()``. + By :user:`Janick Martinez Esturo ` :issue:`1581`. + * Extend copyright notice to 2023. By :user:`Jack Kelly ` :issue:`1528`. diff --git a/zarr/storage.py b/zarr/storage.py index b36f804ebd..a7426e5345 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -28,6 +28,7 @@ import zipfile from collections import OrderedDict from collections.abc import MutableMapping +from functools import lru_cache from os import scandir from pickle import PicklingError from threading import Lock, RLock @@ -1540,6 +1541,7 @@ def clear(self): self.map.clear() @classmethod + @lru_cache(maxsize=None) def _fsspec_installed(cls): """Returns true if fsspec is installed""" import importlib.util From 1a9888637c847e88126b100c5cfa99f5c654bb03 Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Wed, 6 Dec 2023 16:15:35 +0100 Subject: [PATCH 0390/1078] Extensible codecs for V3 (#1588) * Pull Zarrita into Zarr-Python @ 78274781ad64aef95772eb4b083f7ea9b7d03d06 No code changes to Zarrita were made. * apply zarr lint rules * zarrita -> v3 * v3/abc [wip] * use abcs plus implementation notes * working on making codecs extensible * adds index_location * adds support for codec entry points * adds tests from zarrita * fixes types * Apply suggestions from code review Co-authored-by: Joe Hamman * remove test codec from pyproject.toml --------- Co-authored-by: Joseph Hamman Co-authored-by: Joe Hamman --- .pre-commit-config.yaml | 3 +- zarr/tests/test_codecs_v3.py | 989 +++++++++++++++++++++++++++++++ zarr/v3/abc/codec.py | 42 +- zarr/v3/array.py | 12 +- zarr/v3/codecs.py | 514 ---------------- zarr/v3/codecs/__init__.py | 232 ++++++++ zarr/v3/codecs/blosc.py | 99 ++++ zarr/v3/codecs/bytes.py | 105 ++++ zarr/v3/codecs/crc32c_.py | 64 ++ zarr/v3/codecs/gzip.py | 70 +++ zarr/v3/codecs/registry.py | 56 ++ zarr/v3/{ => codecs}/sharding.py | 101 +++- zarr/v3/codecs/transpose.py | 114 ++++ zarr/v3/codecs/zstd.py | 80 +++ zarr/v3/common.py | 28 +- zarr/v3/metadata.py | 106 +--- zarr/v3/store.py | 49 +- 17 files changed, 2001 insertions(+), 663 deletions(-) create mode 100644 zarr/tests/test_codecs_v3.py delete mode 100644 zarr/v3/codecs.py create mode 100644 zarr/v3/codecs/__init__.py create mode 100644 zarr/v3/codecs/blosc.py create mode 100644 zarr/v3/codecs/bytes.py create mode 100644 zarr/v3/codecs/crc32c_.py create mode 100644 zarr/v3/codecs/gzip.py create mode 100644 zarr/v3/codecs/registry.py rename zarr/v3/{ => codecs}/sharding.py (85%) create mode 100644 zarr/v3/codecs/transpose.py create mode 100644 zarr/v3/codecs/zstd.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f22dc39832..a8ee599137 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.3.0 + rev: v1.7.1 hooks: - id: mypy files: zarr @@ -35,3 +35,4 @@ repos: additional_dependencies: - types-redis - types-setuptools + - attrs diff --git a/zarr/tests/test_codecs_v3.py b/zarr/tests/test_codecs_v3.py new file mode 100644 index 0000000000..93acdb2ba1 --- /dev/null +++ b/zarr/tests/test_codecs_v3.py @@ -0,0 +1,989 @@ +from __future__ import annotations + +import json +from typing import Iterator, List, Literal, Optional +from attr import frozen + +import numpy as np +import pytest +import zarr +from zarr.v3 import codecs +from zarr.v3.array import Array, AsyncArray +from zarr.v3.common import Selection +from zarr.v3.indexing import morton_order_iter +from zarr.v3.metadata import CodecMetadata, ShardingCodecIndexLocation, runtime_configuration + +from zarr.v3.store import MemoryStore, Store + + +@frozen +class _AsyncArrayProxy: + array: AsyncArray + + def __getitem__(self, selection: Selection) -> _AsyncArraySelectionProxy: + return _AsyncArraySelectionProxy(self.array, selection) + + +@frozen +class _AsyncArraySelectionProxy: + array: AsyncArray + selection: Selection + + async def get(self) -> np.ndarray: + return await self.array.getitem(self.selection) + + async def set(self, value: np.ndarray): + return await self.array.setitem(self.selection, value) + + +@pytest.fixture +def store() -> Iterator[Store]: + yield MemoryStore() + + +@pytest.fixture +def sample_data() -> np.ndarray: + return np.arange(0, 128 * 128 * 128, dtype="uint16").reshape((128, 128, 128), order="F") + + +@pytest.mark.parametrize( + "index_location", [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end] +) +def test_sharding( + store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation +): + a = Array.create( + store / "sample", + shape=sample_data.shape, + chunk_shape=(64, 64, 64), + dtype=sample_data.dtype, + fill_value=0, + codecs=[ + codecs.sharding_codec( + (32, 32, 32), + [ + codecs.transpose_codec("F", sample_data.ndim), + codecs.bytes_codec(), + codecs.blosc_codec(typesize=sample_data.dtype.itemsize, cname="lz4"), + ], + index_location=index_location, + ) + ], + ) + + a[:, :, :] = sample_data + + read_data = a[0 : sample_data.shape[0], 0 : sample_data.shape[1], 0 : sample_data.shape[2]] + assert sample_data.shape == read_data.shape + assert np.array_equal(sample_data, read_data) + + +@pytest.mark.parametrize( + "index_location", [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end] +) +def test_sharding_partial( + store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation +): + a = Array.create( + store / "sample", + shape=tuple(a + 10 for a in sample_data.shape), + chunk_shape=(64, 64, 64), + dtype=sample_data.dtype, + fill_value=0, + codecs=[ + codecs.sharding_codec( + (32, 32, 32), + [ + codecs.transpose_codec("F", sample_data.ndim), + codecs.bytes_codec(), + codecs.blosc_codec(typesize=sample_data.dtype.itemsize, cname="lz4"), + ], + index_location=index_location, + ) + ], + ) + + a[10:, 10:, 10:] = sample_data + + read_data = a[0:10, 0:10, 0:10] + assert np.all(read_data == 0) + + read_data = a[10:, 10:, 10:] + assert sample_data.shape == read_data.shape + assert np.array_equal(sample_data, read_data) + + +@pytest.mark.parametrize( + "index_location", [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end] +) +def test_sharding_partial_read( + store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation +): + a = Array.create( + store / "sample", + shape=tuple(a + 10 for a in sample_data.shape), + chunk_shape=(64, 64, 64), + dtype=sample_data.dtype, + fill_value=1, + codecs=[ + codecs.sharding_codec( + (32, 32, 32), + [ + codecs.transpose_codec("F", sample_data.ndim), + codecs.bytes_codec(), + codecs.blosc_codec(typesize=sample_data.dtype.itemsize, cname="lz4"), + ], + index_location=index_location, + ) + ], + ) + + read_data = a[0:10, 0:10, 0:10] + assert np.all(read_data == 1) + + +@pytest.mark.parametrize( + "index_location", [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end] +) +def test_sharding_partial_overwrite( + store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation +): + data = sample_data[:10, :10, :10] + + a = Array.create( + store / "sample", + shape=tuple(a + 10 for a in data.shape), + chunk_shape=(64, 64, 64), + dtype=data.dtype, + fill_value=1, + codecs=[ + codecs.sharding_codec( + (32, 32, 32), + [ + codecs.transpose_codec("F", data.ndim), + codecs.bytes_codec(), + codecs.blosc_codec(typesize=data.dtype.itemsize, cname="lz4"), + ], + index_location=index_location, + ) + ], + ) + + a[:10, :10, :10] = data + + read_data = a[0:10, 0:10, 0:10] + assert np.array_equal(data, read_data) + + data = data + 10 + a[:10, :10, :10] = data + read_data = a[0:10, 0:10, 0:10] + assert np.array_equal(data, read_data) + + +@pytest.mark.parametrize( + "outer_index_location", + [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end], +) +@pytest.mark.parametrize( + "inner_index_location", + [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end], +) +def test_nested_sharding( + store: Store, + sample_data: np.ndarray, + outer_index_location: ShardingCodecIndexLocation, + inner_index_location: ShardingCodecIndexLocation, +): + a = Array.create( + store / "l4_sample" / "color" / "1", + shape=sample_data.shape, + chunk_shape=(64, 64, 64), + dtype=sample_data.dtype, + fill_value=0, + codecs=[ + codecs.sharding_codec( + (32, 32, 32), + [codecs.sharding_codec((16, 16, 16), index_location=inner_index_location)], + index_location=outer_index_location, + ) + ], + ) + + a[:, :, :] = sample_data + + read_data = a[0 : sample_data.shape[0], 0 : sample_data.shape[1], 0 : sample_data.shape[2]] + assert sample_data.shape == read_data.shape + assert np.array_equal(sample_data, read_data) + + +@pytest.mark.parametrize("input_order", ["F", "C"]) +@pytest.mark.parametrize("store_order", ["F", "C"]) +@pytest.mark.parametrize("runtime_write_order", ["F", "C"]) +@pytest.mark.parametrize("runtime_read_order", ["F", "C"]) +@pytest.mark.parametrize("with_sharding", [True, False]) +@pytest.mark.asyncio +async def test_order( + store: Store, + input_order: Literal["F", "C"], + store_order: Literal["F", "C"], + runtime_write_order: Literal["F", "C"], + runtime_read_order: Literal["F", "C"], + with_sharding: bool, +): + data = np.arange(0, 256, dtype="uint16").reshape((32, 8), order=input_order) + + codecs_: List[CodecMetadata] = ( + [ + codecs.sharding_codec( + (16, 8), + codecs=[codecs.transpose_codec(store_order, data.ndim), codecs.bytes_codec()], + ) + ] + if with_sharding + else [codecs.transpose_codec(store_order, data.ndim), codecs.bytes_codec()] + ) + + a = await AsyncArray.create( + store / "order", + shape=data.shape, + chunk_shape=(32, 8), + dtype=data.dtype, + fill_value=0, + chunk_key_encoding=("v2", "."), + codecs=codecs_, + runtime_configuration=runtime_configuration(runtime_write_order), + ) + + await _AsyncArrayProxy(a)[:, :].set(data) + read_data = await _AsyncArrayProxy(a)[:, :].get() + assert np.array_equal(data, read_data) + + a = await AsyncArray.open( + store / "order", + runtime_configuration=runtime_configuration(order=runtime_read_order), + ) + read_data = await _AsyncArrayProxy(a)[:, :].get() + assert np.array_equal(data, read_data) + + if runtime_read_order == "F": + assert read_data.flags["F_CONTIGUOUS"] + assert not read_data.flags["C_CONTIGUOUS"] + else: + assert not read_data.flags["F_CONTIGUOUS"] + assert read_data.flags["C_CONTIGUOUS"] + + if not with_sharding: + # Compare with zarr-python + z = zarr.create( + shape=data.shape, + chunks=(32, 8), + dtype="u2", "u2", " int: def resolve_metadata(self) -> CoreArrayMetadata: return self.array_metadata + @classmethod + @abstractmethod + def from_metadata( + cls, codec_metadata: "CodecMetadata", array_metadata: CoreArrayMetadata + ) -> Codec: + pass + + @classmethod + @abstractmethod + def get_metadata_class(cls) -> "Type[CodecMetadata]": + pass + class ArrayArrayCodec(Codec): @abstractmethod @@ -68,6 +79,27 @@ async def encode( pass +class ArrayBytesCodecPartialDecodeMixin: + @abstractmethod + async def decode_partial( + self, + store_path: StorePath, + selection: SliceSelection, + ) -> Optional[np.ndarray]: + pass + + +class ArrayBytesCodecPartialEncodeMixin: + @abstractmethod + async def encode_partial( + self, + store_path: StorePath, + chunk_array: np.ndarray, + selection: SliceSelection, + ) -> None: + pass + + class BytesBytesCodec(Codec): @abstractmethod async def decode( diff --git a/zarr/v3/array.py b/zarr/v3/array.py index 3c0d7eba5c..8c54cfd91c 100644 --- a/zarr/v3/array.py +++ b/zarr/v3/array.py @@ -18,6 +18,7 @@ from attr import evolve, frozen from zarr.v3.abc.array import SynchronousArray, AsynchronousArray +from zarr.v3.abc.codec import ArrayBytesCodecPartialDecodeMixin # from zarr.v3.array_v2 import ArrayV2 from zarr.v3.codecs import CodecMetadata, CodecPipeline, bytes_codec @@ -41,7 +42,7 @@ V2ChunkKeyEncodingMetadata, dtype_to_data_type, ) -from zarr.v3.sharding import ShardingCodec +from zarr.v3.codecs.sharding import ShardingCodec from zarr.v3.store import StoreLike, StorePath, make_store_path from zarr.v3.sync import sync @@ -253,7 +254,7 @@ async def _read_chunk( store_path = self.store_path / chunk_key if len(self.codec_pipeline.codecs) == 1 and isinstance( - self.codec_pipeline.codecs[0], ShardingCodec + self.codec_pipeline.codecs[0], ArrayBytesCodecPartialDecodeMixin ): chunk_array = await self.codec_pipeline.codecs[0].decode_partial( store_path, chunk_selection @@ -373,7 +374,7 @@ async def _write_chunk_to_store(self, store_path: StorePath, chunk_array: np.nda else: await store_path.set_async(chunk_bytes) - async def resize(self, new_shape: ChunkCoords) -> Array: + async def resize(self, new_shape: ChunkCoords) -> AsyncArray: assert len(new_shape) == len(self.metadata.shape) new_metadata = evolve(self.metadata, shape=new_shape) @@ -472,7 +473,6 @@ def open( store: StoreLike, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> Array: - async_array = sync( AsyncArray.open(store, runtime_configuration=runtime_configuration), runtime_configuration.asyncio_loop, @@ -512,6 +512,10 @@ def dtype(self) -> np.dtype: def attrs(self) -> dict: return self._async_array.attrs + @property + def metadata(self) -> ArrayMetadata: + return self._async_array.metadata + @property def store_path(self) -> str: return self._async_array.store_path diff --git a/zarr/v3/codecs.py b/zarr/v3/codecs.py deleted file mode 100644 index ff913c42b2..0000000000 --- a/zarr/v3/codecs.py +++ /dev/null @@ -1,514 +0,0 @@ -from __future__ import annotations - -from functools import reduce -from typing import TYPE_CHECKING, Iterable, List, Literal, Optional, Tuple, Union -from warnings import warn - -import numcodecs -import numpy as np -from attr import asdict, evolve, frozen -from crc32c import crc32c -from numcodecs.blosc import Blosc -from numcodecs.gzip import GZip -from zstandard import ZstdCompressor, ZstdDecompressor - -from zarr.v3.abc.codec import Codec, ArrayArrayCodec, ArrayBytesCodec, BytesBytesCodec -from zarr.v3.common import BytesLike, to_thread -from zarr.v3.metadata import ( - BloscCodecConfigurationMetadata, - BloscCodecMetadata, - BytesCodecConfigurationMetadata, - BytesCodecMetadata, - CodecMetadata, - Crc32cCodecMetadata, - GzipCodecConfigurationMetadata, - GzipCodecMetadata, - ShardingCodecConfigurationMetadata, - ShardingCodecMetadata, - TransposeCodecConfigurationMetadata, - TransposeCodecMetadata, - ZstdCodecConfigurationMetadata, - ZstdCodecMetadata, -) - -if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata - -# See https://zarr.readthedocs.io/en/stable/tutorial.html#configuring-blosc -numcodecs.blosc.use_threads = False - - -@frozen -class CodecPipeline: - codecs: List[Codec] - - @classmethod - def from_metadata( - cls, - codecs_metadata: Iterable[CodecMetadata], - array_metadata: CoreArrayMetadata, - ) -> CodecPipeline: - out: List[Codec] = [] - for codec_metadata in codecs_metadata or []: - if codec_metadata.name == "endian": - codec_metadata = evolve(codec_metadata, name="bytes") # type: ignore - - codec: Codec - if codec_metadata.name == "blosc": - codec = BloscCodec.from_metadata(codec_metadata, array_metadata) - elif codec_metadata.name == "gzip": - codec = GzipCodec.from_metadata(codec_metadata, array_metadata) - elif codec_metadata.name == "zstd": - codec = ZstdCodec.from_metadata(codec_metadata, array_metadata) - elif codec_metadata.name == "transpose": - codec = TransposeCodec.from_metadata(codec_metadata, array_metadata) - elif codec_metadata.name == "bytes": - codec = BytesCodec.from_metadata(codec_metadata, array_metadata) - elif codec_metadata.name == "crc32c": - codec = Crc32cCodec.from_metadata(codec_metadata, array_metadata) - elif codec_metadata.name == "sharding_indexed": - from zarr.v3.sharding import ShardingCodec - - codec = ShardingCodec.from_metadata(codec_metadata, array_metadata) - else: - raise RuntimeError(f"Unsupported codec: {codec_metadata}") - - out.append(codec) - array_metadata = codec.resolve_metadata() - CodecPipeline._validate_codecs(out, array_metadata) - return cls(out) - - @staticmethod - def _validate_codecs(codecs: List[Codec], array_metadata: CoreArrayMetadata) -> None: - from zarr.v3.sharding import ShardingCodec - - assert any( - isinstance(codec, ArrayBytesCodec) for codec in codecs - ), "Exactly one array-to-bytes codec is required." - - prev_codec: Optional[Codec] = None - for codec in codecs: - if prev_codec is not None: - assert not isinstance(codec, ArrayBytesCodec) or not isinstance( - prev_codec, ArrayBytesCodec - ), ( - f"ArrayBytesCodec '{type(codec)}' cannot follow after " - + f"ArrayBytesCodec '{type(prev_codec)}' because exactly " - + "1 ArrayBytesCodec is allowed." - ) - assert not isinstance(codec, ArrayBytesCodec) or not isinstance( - prev_codec, BytesBytesCodec - ), ( - f"ArrayBytesCodec '{type(codec)}' cannot follow after " - + f"BytesBytesCodec '{type(prev_codec)}'." - ) - assert not isinstance(codec, ArrayArrayCodec) or not isinstance( - prev_codec, ArrayBytesCodec - ), ( - f"ArrayArrayCodec '{type(codec)}' cannot follow after " - + f"ArrayBytesCodec '{type(prev_codec)}'." - ) - assert not isinstance(codec, ArrayArrayCodec) or not isinstance( - prev_codec, BytesBytesCodec - ), ( - f"ArrayArrayCodec '{type(codec)}' cannot follow after " - + f"BytesBytesCodec '{type(prev_codec)}'." - ) - - if isinstance(codec, ShardingCodec): - assert len(codec.configuration.chunk_shape) == len(array_metadata.shape), ( - "The shard's `chunk_shape` and array's `shape` need to have the " - + "same number of dimensions." - ) - assert all( - s % c == 0 - for s, c in zip( - array_metadata.chunk_shape, - codec.configuration.chunk_shape, - ) - ), ( - "The array's `chunk_shape` needs to be divisible by the " - + "shard's inner `chunk_shape`." - ) - prev_codec = codec - - if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: - warn( - "Combining a `sharding_indexed` codec disables partial reads and " - + "writes, which may lead to inefficient performance." - ) - - def _array_array_codecs(self) -> List[ArrayArrayCodec]: - return [codec for codec in self.codecs if isinstance(codec, ArrayArrayCodec)] - - def _array_bytes_codec(self) -> ArrayBytesCodec: - return next(codec for codec in self.codecs if isinstance(codec, ArrayBytesCodec)) - - def _bytes_bytes_codecs(self) -> List[BytesBytesCodec]: - return [codec for codec in self.codecs if isinstance(codec, BytesBytesCodec)] - - async def decode(self, chunk_bytes: BytesLike) -> np.ndarray: - for bb_codec in self._bytes_bytes_codecs()[::-1]: - chunk_bytes = await bb_codec.decode(chunk_bytes) - - chunk_array = await self._array_bytes_codec().decode(chunk_bytes) - - for aa_codec in self._array_array_codecs()[::-1]: - chunk_array = await aa_codec.decode(chunk_array) - - return chunk_array - - async def encode(self, chunk_array: np.ndarray) -> Optional[BytesLike]: - for aa_codec in self._array_array_codecs(): - chunk_array_maybe = await aa_codec.encode(chunk_array) - if chunk_array_maybe is None: - return None - chunk_array = chunk_array_maybe - - chunk_bytes_maybe = await self._array_bytes_codec().encode(chunk_array) - if chunk_bytes_maybe is None: - return None - chunk_bytes = chunk_bytes_maybe - - for bb_codec in self._bytes_bytes_codecs(): - chunk_bytes_maybe = await bb_codec.encode(chunk_bytes) - if chunk_bytes_maybe is None: - return None - chunk_bytes = chunk_bytes_maybe - - return chunk_bytes - - def compute_encoded_size(self, byte_length: int) -> int: - return reduce(lambda acc, codec: codec.compute_encoded_size(acc), self.codecs, byte_length) - - -@frozen -class BloscCodec(BytesBytesCodec): - array_metadata: CoreArrayMetadata - configuration: BloscCodecConfigurationMetadata - blosc_codec: Blosc - is_fixed_size = False - - @classmethod - def from_metadata( - cls, codec_metadata: BloscCodecMetadata, array_metadata: CoreArrayMetadata - ) -> BloscCodec: - configuration = codec_metadata.configuration - if configuration.typesize == 0: - configuration = evolve(configuration, typesize=array_metadata.data_type.byte_count) - config_dict = asdict(codec_metadata.configuration) - config_dict.pop("typesize", None) - map_shuffle_str_to_int = {"noshuffle": 0, "shuffle": 1, "bitshuffle": 2} - config_dict["shuffle"] = map_shuffle_str_to_int[config_dict["shuffle"]] - return cls( - array_metadata=array_metadata, - configuration=configuration, - blosc_codec=Blosc.from_config(config_dict), - ) - - async def decode( - self, - chunk_bytes: bytes, - ) -> BytesLike: - return await to_thread(self.blosc_codec.decode, chunk_bytes) - - async def encode( - self, - chunk_bytes: bytes, - ) -> Optional[BytesLike]: - chunk_array = np.frombuffer(chunk_bytes, dtype=self.array_metadata.dtype) - return await to_thread(self.blosc_codec.encode, chunk_array) - - def compute_encoded_size(self, _input_byte_length: int) -> int: - raise NotImplementedError - - -@frozen -class BytesCodec(ArrayBytesCodec): - array_metadata: CoreArrayMetadata - configuration: BytesCodecConfigurationMetadata - is_fixed_size = True - - @classmethod - def from_metadata( - cls, codec_metadata: BytesCodecMetadata, array_metadata: CoreArrayMetadata - ) -> BytesCodec: - assert ( - array_metadata.dtype.itemsize == 1 or codec_metadata.configuration.endian is not None - ), "The `endian` configuration needs to be specified for multi-byte data types." - return cls( - array_metadata=array_metadata, - configuration=codec_metadata.configuration, - ) - - def _get_byteorder(self, array: np.ndarray) -> Literal["big", "little"]: - if array.dtype.byteorder == "<": - return "little" - elif array.dtype.byteorder == ">": - return "big" - else: - import sys - - return sys.byteorder - - async def decode( - self, - chunk_bytes: BytesLike, - ) -> np.ndarray: - if self.array_metadata.dtype.itemsize > 0: - if self.configuration.endian == "little": - prefix = "<" - else: - prefix = ">" - dtype = np.dtype(f"{prefix}{self.array_metadata.data_type.to_numpy_shortname()}") - else: - dtype = np.dtype(f"|{self.array_metadata.data_type.to_numpy_shortname()}") - chunk_array = np.frombuffer(chunk_bytes, dtype) - - # ensure correct chunk shape - if chunk_array.shape != self.array_metadata.chunk_shape: - chunk_array = chunk_array.reshape( - self.array_metadata.chunk_shape, - ) - return chunk_array - - async def encode( - self, - chunk_array: np.ndarray, - ) -> Optional[BytesLike]: - if chunk_array.dtype.itemsize > 1: - byteorder = self._get_byteorder(chunk_array) - if self.configuration.endian != byteorder: - new_dtype = chunk_array.dtype.newbyteorder(self.configuration.endian) - chunk_array = chunk_array.astype(new_dtype) - return chunk_array.tobytes() - - def compute_encoded_size(self, input_byte_length: int) -> int: - return input_byte_length - - -@frozen -class TransposeCodec(ArrayArrayCodec): - array_metadata: CoreArrayMetadata - order: Tuple[int, ...] - is_fixed_size = True - - @classmethod - def from_metadata( - cls, codec_metadata: TransposeCodecMetadata, array_metadata: CoreArrayMetadata - ) -> TransposeCodec: - configuration = codec_metadata.configuration - if configuration.order == "F": - order = tuple(array_metadata.ndim - x - 1 for x in range(array_metadata.ndim)) - - elif configuration.order == "C": - order = tuple(range(array_metadata.ndim)) - - else: - assert len(configuration.order) == array_metadata.ndim, ( - "The `order` tuple needs have as many entries as " - + f"there are dimensions in the array. Got: {configuration.order}" - ) - assert len(configuration.order) == len(set(configuration.order)), ( - "There must not be duplicates in the `order` tuple. " - + f"Got: {configuration.order}" - ) - assert all(0 <= x < array_metadata.ndim for x in configuration.order), ( - "All entries in the `order` tuple must be between 0 and " - + f"the number of dimensions in the array. Got: {configuration.order}" - ) - order = tuple(configuration.order) - - return cls( - array_metadata=array_metadata, - order=order, - ) - - def resolve_metadata(self) -> CoreArrayMetadata: - from zarr.v3.metadata import CoreArrayMetadata - - return CoreArrayMetadata( - shape=tuple( - self.array_metadata.shape[self.order[i]] for i in range(self.array_metadata.ndim) - ), - chunk_shape=tuple( - self.array_metadata.chunk_shape[self.order[i]] - for i in range(self.array_metadata.ndim) - ), - data_type=self.array_metadata.data_type, - fill_value=self.array_metadata.fill_value, - runtime_configuration=self.array_metadata.runtime_configuration, - ) - - async def decode( - self, - chunk_array: np.ndarray, - ) -> np.ndarray: - inverse_order = [0 for _ in range(self.array_metadata.ndim)] - for x, i in enumerate(self.order): - inverse_order[x] = i - chunk_array = chunk_array.transpose(inverse_order) - return chunk_array - - async def encode( - self, - chunk_array: np.ndarray, - ) -> Optional[np.ndarray]: - chunk_array = chunk_array.transpose(self.order) - return chunk_array - - def compute_encoded_size(self, input_byte_length: int) -> int: - return input_byte_length - - -@frozen -class GzipCodec(BytesBytesCodec): - array_metadata: CoreArrayMetadata - configuration: GzipCodecConfigurationMetadata - is_fixed_size = True - - @classmethod - def from_metadata( - cls, codec_metadata: GzipCodecMetadata, array_metadata: CoreArrayMetadata - ) -> GzipCodec: - return cls( - array_metadata=array_metadata, - configuration=codec_metadata.configuration, - ) - - async def decode( - self, - chunk_bytes: bytes, - ) -> BytesLike: - return await to_thread(GZip(self.configuration.level).decode, chunk_bytes) - - async def encode( - self, - chunk_bytes: bytes, - ) -> Optional[BytesLike]: - return await to_thread(GZip(self.configuration.level).encode, chunk_bytes) - - def compute_encoded_size(self, _input_byte_length: int) -> int: - raise NotImplementedError - - -@frozen -class ZstdCodec(BytesBytesCodec): - array_metadata: CoreArrayMetadata - configuration: ZstdCodecConfigurationMetadata - is_fixed_size = True - - @classmethod - def from_metadata( - cls, codec_metadata: ZstdCodecMetadata, array_metadata: CoreArrayMetadata - ) -> ZstdCodec: - return cls( - array_metadata=array_metadata, - configuration=codec_metadata.configuration, - ) - - def _compress(self, data: bytes) -> bytes: - ctx = ZstdCompressor( - level=self.configuration.level, write_checksum=self.configuration.checksum - ) - return ctx.compress(data) - - def _decompress(self, data: bytes) -> bytes: - ctx = ZstdDecompressor() - return ctx.decompress(data) - - async def decode( - self, - chunk_bytes: bytes, - ) -> BytesLike: - return await to_thread(self._decompress, chunk_bytes) - - async def encode( - self, - chunk_bytes: bytes, - ) -> Optional[BytesLike]: - return await to_thread(self._compress, chunk_bytes) - - def compute_encoded_size(self, _input_byte_length: int) -> int: - raise NotImplementedError - - -@frozen -class Crc32cCodec(BytesBytesCodec): - array_metadata: CoreArrayMetadata - is_fixed_size = True - - @classmethod - def from_metadata( - cls, codec_metadata: Crc32cCodecMetadata, array_metadata: CoreArrayMetadata - ) -> Crc32cCodec: - return cls(array_metadata=array_metadata) - - async def decode( - self, - chunk_bytes: bytes, - ) -> BytesLike: - crc32_bytes = chunk_bytes[-4:] - inner_bytes = chunk_bytes[:-4] - - assert np.uint32(crc32c(inner_bytes)).tobytes() == bytes(crc32_bytes) - return inner_bytes - - async def encode( - self, - chunk_bytes: bytes, - ) -> Optional[BytesLike]: - return chunk_bytes + np.uint32(crc32c(chunk_bytes)).tobytes() - - def compute_encoded_size(self, input_byte_length: int) -> int: - return input_byte_length + 4 - - -def blosc_codec( - typesize: int, - cname: Literal["lz4", "lz4hc", "blosclz", "zstd", "snappy", "zlib"] = "zstd", - clevel: int = 5, - shuffle: Literal["noshuffle", "shuffle", "bitshuffle"] = "noshuffle", - blocksize: int = 0, -) -> BloscCodecMetadata: - return BloscCodecMetadata( - configuration=BloscCodecConfigurationMetadata( - cname=cname, - clevel=clevel, - shuffle=shuffle, - blocksize=blocksize, - typesize=typesize, - ) - ) - - -def bytes_codec(endian: Optional[Literal["big", "little"]] = "little") -> BytesCodecMetadata: - return BytesCodecMetadata(configuration=BytesCodecConfigurationMetadata(endian)) - - -def transpose_codec(order: Union[Tuple[int, ...], Literal["C", "F"]]) -> TransposeCodecMetadata: - return TransposeCodecMetadata(configuration=TransposeCodecConfigurationMetadata(order)) - - -def gzip_codec(level: int = 5) -> GzipCodecMetadata: - return GzipCodecMetadata(configuration=GzipCodecConfigurationMetadata(level)) - - -def zstd_codec(level: int = 0, checksum: bool = False) -> ZstdCodecMetadata: - return ZstdCodecMetadata(configuration=ZstdCodecConfigurationMetadata(level, checksum)) - - -def crc32c_codec() -> Crc32cCodecMetadata: - return Crc32cCodecMetadata() - - -def sharding_codec( - chunk_shape: Tuple[int, ...], - codecs: Optional[List[CodecMetadata]] = None, - index_codecs: Optional[List[CodecMetadata]] = None, -) -> ShardingCodecMetadata: - codecs = codecs or [bytes_codec()] - index_codecs = index_codecs or [bytes_codec(), crc32c_codec()] - return ShardingCodecMetadata( - configuration=ShardingCodecConfigurationMetadata(chunk_shape, codecs, index_codecs) - ) diff --git a/zarr/v3/codecs/__init__.py b/zarr/v3/codecs/__init__.py new file mode 100644 index 0000000000..30a42c8ad5 --- /dev/null +++ b/zarr/v3/codecs/__init__.py @@ -0,0 +1,232 @@ +from __future__ import annotations + +from functools import reduce +from typing import ( + TYPE_CHECKING, + Iterable, + List, + Literal, + Optional, + Tuple, + Union, +) +from warnings import warn + +import numpy as np +from attr import frozen + +from zarr.v3.abc.codec import Codec, ArrayArrayCodec, ArrayBytesCodec, BytesBytesCodec +from zarr.v3.common import BytesLike +from zarr.v3.metadata import CodecMetadata, ShardingCodecIndexLocation +from zarr.v3.codecs.registry import get_codec_class + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + from zarr.v3.codecs.sharding import ShardingCodecMetadata + from zarr.v3.codecs.blosc import BloscCodecMetadata + from zarr.v3.codecs.bytes import BytesCodecMetadata + from zarr.v3.codecs.transpose import TransposeCodecMetadata + from zarr.v3.codecs.gzip import GzipCodecMetadata + from zarr.v3.codecs.zstd import ZstdCodecMetadata + from zarr.v3.codecs.crc32c_ import Crc32cCodecMetadata + + +@frozen +class CodecPipeline: + codecs: List[Codec] + + @classmethod + def from_metadata( + cls, + codecs_metadata: Iterable[CodecMetadata], + array_metadata: CoreArrayMetadata, + ) -> CodecPipeline: + out: List[Codec] = [] + for codec_metadata in codecs_metadata or []: + codec_cls = get_codec_class(codec_metadata.name) + codec = codec_cls.from_metadata(codec_metadata, array_metadata) + out.append(codec) + array_metadata = codec.resolve_metadata() + CodecPipeline._validate_codecs(out, array_metadata) + return cls(out) + + @staticmethod + def _validate_codecs(codecs: List[Codec], array_metadata: CoreArrayMetadata) -> None: + from zarr.v3.codecs.sharding import ShardingCodec + + assert any( + isinstance(codec, ArrayBytesCodec) for codec in codecs + ), "Exactly one array-to-bytes codec is required." + + prev_codec: Optional[Codec] = None + for codec in codecs: + if prev_codec is not None: + assert not isinstance(codec, ArrayBytesCodec) or not isinstance( + prev_codec, ArrayBytesCodec + ), ( + f"ArrayBytesCodec '{type(codec)}' cannot follow after " + + f"ArrayBytesCodec '{type(prev_codec)}' because exactly " + + "1 ArrayBytesCodec is allowed." + ) + assert not isinstance(codec, ArrayBytesCodec) or not isinstance( + prev_codec, BytesBytesCodec + ), ( + f"ArrayBytesCodec '{type(codec)}' cannot follow after " + + f"BytesBytesCodec '{type(prev_codec)}'." + ) + assert not isinstance(codec, ArrayArrayCodec) or not isinstance( + prev_codec, ArrayBytesCodec + ), ( + f"ArrayArrayCodec '{type(codec)}' cannot follow after " + + f"ArrayBytesCodec '{type(prev_codec)}'." + ) + assert not isinstance(codec, ArrayArrayCodec) or not isinstance( + prev_codec, BytesBytesCodec + ), ( + f"ArrayArrayCodec '{type(codec)}' cannot follow after " + + f"BytesBytesCodec '{type(prev_codec)}'." + ) + + if isinstance(codec, ShardingCodec): + assert len(codec.configuration.chunk_shape) == len(array_metadata.shape), ( + "The shard's `chunk_shape` and array's `shape` need to have the " + + "same number of dimensions." + ) + assert all( + s % c == 0 + for s, c in zip( + array_metadata.chunk_shape, + codec.configuration.chunk_shape, + ) + ), ( + "The array's `chunk_shape` needs to be divisible by the " + + "shard's inner `chunk_shape`." + ) + prev_codec = codec + + if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: + warn( + "Combining a `sharding_indexed` codec disables partial reads and " + + "writes, which may lead to inefficient performance." + ) + + def _array_array_codecs(self) -> List[ArrayArrayCodec]: + return [codec for codec in self.codecs if isinstance(codec, ArrayArrayCodec)] + + def _array_bytes_codec(self) -> ArrayBytesCodec: + return next(codec for codec in self.codecs if isinstance(codec, ArrayBytesCodec)) + + def _bytes_bytes_codecs(self) -> List[BytesBytesCodec]: + return [codec for codec in self.codecs if isinstance(codec, BytesBytesCodec)] + + async def decode(self, chunk_bytes: BytesLike) -> np.ndarray: + for bb_codec in self._bytes_bytes_codecs()[::-1]: + chunk_bytes = await bb_codec.decode(chunk_bytes) + + chunk_array = await self._array_bytes_codec().decode(chunk_bytes) + + for aa_codec in self._array_array_codecs()[::-1]: + chunk_array = await aa_codec.decode(chunk_array) + + return chunk_array + + async def encode(self, chunk_array: np.ndarray) -> Optional[BytesLike]: + for aa_codec in self._array_array_codecs(): + chunk_array_maybe = await aa_codec.encode(chunk_array) + if chunk_array_maybe is None: + return None + chunk_array = chunk_array_maybe + + chunk_bytes_maybe = await self._array_bytes_codec().encode(chunk_array) + if chunk_bytes_maybe is None: + return None + chunk_bytes = chunk_bytes_maybe + + for bb_codec in self._bytes_bytes_codecs(): + chunk_bytes_maybe = await bb_codec.encode(chunk_bytes) + if chunk_bytes_maybe is None: + return None + chunk_bytes = chunk_bytes_maybe + + return chunk_bytes + + def compute_encoded_size(self, byte_length: int) -> int: + return reduce(lambda acc, codec: codec.compute_encoded_size(acc), self.codecs, byte_length) + + +def blosc_codec( + typesize: int, + cname: Literal["lz4", "lz4hc", "blosclz", "zstd", "snappy", "zlib"] = "zstd", + clevel: int = 5, + shuffle: Literal["noshuffle", "shuffle", "bitshuffle"] = "noshuffle", + blocksize: int = 0, +) -> "BloscCodecMetadata": + from zarr.v3.codecs.blosc import BloscCodecMetadata, BloscCodecConfigurationMetadata + + return BloscCodecMetadata( + configuration=BloscCodecConfigurationMetadata( + cname=cname, + clevel=clevel, + shuffle=shuffle, + blocksize=blocksize, + typesize=typesize, + ) + ) + + +def bytes_codec(endian: Optional[Literal["big", "little"]] = "little") -> "BytesCodecMetadata": + from zarr.v3.codecs.bytes import BytesCodecMetadata, BytesCodecConfigurationMetadata + + return BytesCodecMetadata(configuration=BytesCodecConfigurationMetadata(endian)) + + +def transpose_codec( + order: Union[Tuple[int, ...], Literal["C", "F"]], ndim: Optional[int] = None +) -> "TransposeCodecMetadata": + from zarr.v3.codecs.transpose import TransposeCodecMetadata, TransposeCodecConfigurationMetadata + + if order == "C" or order == "F": + assert ( + isinstance(ndim, int) and ndim > 0 + ), 'When using "C" or "F" the `ndim` argument needs to be provided.' + if order == "C": + order = tuple(range(ndim)) + if order == "F": + order = tuple(ndim - i - 1 for i in range(ndim)) + + return TransposeCodecMetadata(configuration=TransposeCodecConfigurationMetadata(order)) + + +def gzip_codec(level: int = 5) -> "GzipCodecMetadata": + from zarr.v3.codecs.gzip import GzipCodecMetadata, GzipCodecConfigurationMetadata + + return GzipCodecMetadata(configuration=GzipCodecConfigurationMetadata(level)) + + +def zstd_codec(level: int = 0, checksum: bool = False) -> "ZstdCodecMetadata": + from zarr.v3.codecs.zstd import ZstdCodecMetadata, ZstdCodecConfigurationMetadata + + return ZstdCodecMetadata(configuration=ZstdCodecConfigurationMetadata(level, checksum)) + + +def crc32c_codec() -> "Crc32cCodecMetadata": + from zarr.v3.codecs.crc32c_ import Crc32cCodecMetadata + + return Crc32cCodecMetadata() + + +def sharding_codec( + chunk_shape: Tuple[int, ...], + codecs: Optional[List[CodecMetadata]] = None, + index_codecs: Optional[List[CodecMetadata]] = None, + index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end, +) -> "ShardingCodecMetadata": + from zarr.v3.codecs.sharding import ShardingCodecMetadata, ShardingCodecConfigurationMetadata + + codecs = codecs or [bytes_codec()] + index_codecs = index_codecs or [bytes_codec(), crc32c_codec()] + return ShardingCodecMetadata( + configuration=ShardingCodecConfigurationMetadata( + chunk_shape, codecs, index_codecs, index_location + ) + ) diff --git a/zarr/v3/codecs/blosc.py b/zarr/v3/codecs/blosc.py new file mode 100644 index 0000000000..8fb32faaa7 --- /dev/null +++ b/zarr/v3/codecs/blosc.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Dict, + Literal, + Optional, + Type, +) + +import numcodecs +import numpy as np +from attr import asdict, evolve, frozen, field +from numcodecs.blosc import Blosc + +from zarr.v3.abc.codec import BytesBytesCodec +from zarr.v3.codecs.registry import register_codec +from zarr.v3.common import BytesLike, to_thread +from zarr.v3.metadata import CodecMetadata + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + + +BloscShuffle = Literal["noshuffle", "shuffle", "bitshuffle"] + +# See https://zarr.readthedocs.io/en/stable/tutorial.html#configuring-blosc +numcodecs.blosc.use_threads = False + + +@frozen +class BloscCodecConfigurationMetadata: + typesize: int + cname: Literal["lz4", "lz4hc", "blosclz", "zstd", "snappy", "zlib"] = "zstd" + clevel: int = 5 + shuffle: BloscShuffle = "noshuffle" + blocksize: int = 0 + + +blosc_shuffle_int_to_str: Dict[int, BloscShuffle] = { + 0: "noshuffle", + 1: "shuffle", + 2: "bitshuffle", +} + + +@frozen +class BloscCodecMetadata: + configuration: BloscCodecConfigurationMetadata + name: Literal["blosc"] = field(default="blosc", init=False) + + +@frozen +class BloscCodec(BytesBytesCodec): + array_metadata: CoreArrayMetadata + configuration: BloscCodecConfigurationMetadata + blosc_codec: Blosc + is_fixed_size = False + + @classmethod + def from_metadata( + cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata + ) -> BloscCodec: + assert isinstance(codec_metadata, BloscCodecMetadata) + configuration = codec_metadata.configuration + if configuration.typesize == 0: + configuration = evolve(configuration, typesize=array_metadata.data_type.byte_count) + config_dict = asdict(codec_metadata.configuration) + config_dict.pop("typesize", None) + map_shuffle_str_to_int = {"noshuffle": 0, "shuffle": 1, "bitshuffle": 2} + config_dict["shuffle"] = map_shuffle_str_to_int[config_dict["shuffle"]] + return cls( + array_metadata=array_metadata, + configuration=configuration, + blosc_codec=Blosc.from_config(config_dict), + ) + + @classmethod + def get_metadata_class(cls) -> Type[BloscCodecMetadata]: + return BloscCodecMetadata + + async def decode( + self, + chunk_bytes: bytes, + ) -> BytesLike: + return await to_thread(self.blosc_codec.decode, chunk_bytes) + + async def encode( + self, + chunk_bytes: bytes, + ) -> Optional[BytesLike]: + chunk_array = np.frombuffer(chunk_bytes, dtype=self.array_metadata.dtype) + return await to_thread(self.blosc_codec.encode, chunk_array) + + def compute_encoded_size(self, _input_byte_length: int) -> int: + raise NotImplementedError + + +register_codec("blosc", BloscCodec) diff --git a/zarr/v3/codecs/bytes.py b/zarr/v3/codecs/bytes.py new file mode 100644 index 0000000000..80a3f155d0 --- /dev/null +++ b/zarr/v3/codecs/bytes.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Literal, + Optional, + Type, +) + +import numpy as np +from attr import frozen, field + +from zarr.v3.abc.codec import ArrayBytesCodec +from zarr.v3.codecs.registry import register_codec +from zarr.v3.common import BytesLike +from zarr.v3.metadata import CodecMetadata + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + + +@frozen +class BytesCodecConfigurationMetadata: + endian: Optional[Literal["big", "little"]] = "little" + + +@frozen +class BytesCodecMetadata: + configuration: BytesCodecConfigurationMetadata + name: Literal["bytes"] = field(default="bytes", init=False) + + +@frozen +class BytesCodec(ArrayBytesCodec): + array_metadata: CoreArrayMetadata + configuration: BytesCodecConfigurationMetadata + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata + ) -> BytesCodec: + assert isinstance(codec_metadata, BytesCodecMetadata) + assert ( + array_metadata.dtype.itemsize == 1 or codec_metadata.configuration.endian is not None + ), "The `endian` configuration needs to be specified for multi-byte data types." + return cls( + array_metadata=array_metadata, + configuration=codec_metadata.configuration, + ) + + @classmethod + def get_metadata_class(cls) -> Type[BytesCodecMetadata]: + return BytesCodecMetadata + + def _get_byteorder(self, array: np.ndarray) -> Literal["big", "little"]: + if array.dtype.byteorder == "<": + return "little" + elif array.dtype.byteorder == ">": + return "big" + else: + import sys + + return sys.byteorder + + async def decode( + self, + chunk_bytes: BytesLike, + ) -> np.ndarray: + if self.array_metadata.dtype.itemsize > 0: + if self.configuration.endian == "little": + prefix = "<" + else: + prefix = ">" + dtype = np.dtype(f"{prefix}{self.array_metadata.data_type.to_numpy_shortname()}") + else: + dtype = np.dtype(f"|{self.array_metadata.data_type.to_numpy_shortname()}") + chunk_array = np.frombuffer(chunk_bytes, dtype) + + # ensure correct chunk shape + if chunk_array.shape != self.array_metadata.chunk_shape: + chunk_array = chunk_array.reshape( + self.array_metadata.chunk_shape, + ) + return chunk_array + + async def encode( + self, + chunk_array: np.ndarray, + ) -> Optional[BytesLike]: + if chunk_array.dtype.itemsize > 1: + byteorder = self._get_byteorder(chunk_array) + if self.configuration.endian != byteorder: + new_dtype = chunk_array.dtype.newbyteorder(self.configuration.endian) + chunk_array = chunk_array.astype(new_dtype) + return chunk_array.tobytes() + + def compute_encoded_size(self, input_byte_length: int) -> int: + return input_byte_length + + +register_codec("bytes", BytesCodec) + +# compatibility with earlier versions of ZEP1 +register_codec("endian", BytesCodec) diff --git a/zarr/v3/codecs/crc32c_.py b/zarr/v3/codecs/crc32c_.py new file mode 100644 index 0000000000..c4fab3c9b9 --- /dev/null +++ b/zarr/v3/codecs/crc32c_.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Literal, + Optional, + Type, +) + +import numpy as np +from attr import frozen, field +from crc32c import crc32c + +from zarr.v3.abc.codec import BytesBytesCodec +from zarr.v3.codecs.registry import register_codec +from zarr.v3.common import BytesLike +from zarr.v3.metadata import CodecMetadata + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + + +@frozen +class Crc32cCodecMetadata: + name: Literal["crc32c"] = field(default="crc32c", init=False) + + +@frozen +class Crc32cCodec(BytesBytesCodec): + array_metadata: CoreArrayMetadata + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata + ) -> Crc32cCodec: + assert isinstance(codec_metadata, Crc32cCodecMetadata) + return cls(array_metadata=array_metadata) + + @classmethod + def get_metadata_class(cls) -> Type[Crc32cCodecMetadata]: + return Crc32cCodecMetadata + + async def decode( + self, + chunk_bytes: bytes, + ) -> BytesLike: + crc32_bytes = chunk_bytes[-4:] + inner_bytes = chunk_bytes[:-4] + + assert np.uint32(crc32c(inner_bytes)).tobytes() == bytes(crc32_bytes) + return inner_bytes + + async def encode( + self, + chunk_bytes: bytes, + ) -> Optional[BytesLike]: + return chunk_bytes + np.uint32(crc32c(chunk_bytes)).tobytes() + + def compute_encoded_size(self, input_byte_length: int) -> int: + return input_byte_length + 4 + + +register_codec("crc32c", Crc32cCodec) diff --git a/zarr/v3/codecs/gzip.py b/zarr/v3/codecs/gzip.py new file mode 100644 index 0000000000..be1ebcdc9f --- /dev/null +++ b/zarr/v3/codecs/gzip.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Literal, + Optional, + Type, +) + +from attr import frozen, field +from numcodecs.gzip import GZip + +from zarr.v3.abc.codec import BytesBytesCodec +from zarr.v3.codecs.registry import register_codec +from zarr.v3.common import BytesLike, to_thread +from zarr.v3.metadata import CodecMetadata + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + + +@frozen +class GzipCodecConfigurationMetadata: + level: int = 5 + + +@frozen +class GzipCodecMetadata: + configuration: GzipCodecConfigurationMetadata + name: Literal["gzip"] = field(default="gzip", init=False) + + +@frozen +class GzipCodec(BytesBytesCodec): + array_metadata: CoreArrayMetadata + configuration: GzipCodecConfigurationMetadata + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata + ) -> GzipCodec: + assert isinstance(codec_metadata, GzipCodecMetadata) + + return cls( + array_metadata=array_metadata, + configuration=codec_metadata.configuration, + ) + + @classmethod + def get_metadata_class(cls) -> Type[GzipCodecMetadata]: + return GzipCodecMetadata + + async def decode( + self, + chunk_bytes: bytes, + ) -> BytesLike: + return await to_thread(GZip(self.configuration.level).decode, chunk_bytes) + + async def encode( + self, + chunk_bytes: bytes, + ) -> Optional[BytesLike]: + return await to_thread(GZip(self.configuration.level).encode, chunk_bytes) + + def compute_encoded_size(self, _input_byte_length: int) -> int: + raise NotImplementedError + + +register_codec("gzip", GzipCodec) diff --git a/zarr/v3/codecs/registry.py b/zarr/v3/codecs/registry.py new file mode 100644 index 0000000000..642c0feebb --- /dev/null +++ b/zarr/v3/codecs/registry.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from typing import Dict, NamedTuple, Type +from importlib.metadata import EntryPoint, entry_points as get_entry_points + +from zarr.v3.abc.codec import Codec +from zarr.v3.metadata import CodecMetadata + + +class CodecRegistryItem(NamedTuple): + codec_cls: Type[Codec] + codec_metadata_cls: Type[CodecMetadata] + + +__codec_registry: Dict[str, CodecRegistryItem] = {} +__lazy_load_codecs: Dict[str, EntryPoint] = {} + + +def _collect_entrypoints() -> None: + entry_points = get_entry_points() + if hasattr(entry_points, "select"): + # If entry_points() has a select method, use that. Python 3.10+ + for e in entry_points.select(group="zarr.codecs"): + __lazy_load_codecs[e.name] = e + else: + # Otherwise, fallback to using get + for e in entry_points.get("zarr.codecs", []): + __lazy_load_codecs[e.name] = e + + +def register_codec(key: str, codec_cls: Type[Codec]) -> None: + __codec_registry[key] = CodecRegistryItem(codec_cls, codec_cls.get_metadata_class()) + + +def _get_codec_item(key: str) -> CodecRegistryItem: + item = __codec_registry.get(key) + if item is None: + if key in __lazy_load_codecs: + # logger.debug("Auto loading codec '%s' from entrypoint", codec_id) + cls = __lazy_load_codecs[key].load() + register_codec(key, cls) + item = __codec_registry.get(key) + if item: + return item + raise KeyError(key) + + +def get_codec_metadata_class(key: str) -> Type[CodecMetadata]: + return _get_codec_item(key).codec_metadata_cls + + +def get_codec_class(key: str) -> Type[Codec]: + return _get_codec_item(key).codec_cls + + +_collect_entrypoints() diff --git a/zarr/v3/sharding.py b/zarr/v3/codecs/sharding.py similarity index 85% rename from zarr/v3/sharding.py rename to zarr/v3/codecs/sharding.py index 3c5b4bd12d..edbe327a6b 100644 --- a/zarr/v3/sharding.py +++ b/zarr/v3/codecs/sharding.py @@ -1,11 +1,29 @@ from __future__ import annotations -from typing import Iterator, List, Mapping, NamedTuple, Optional, Set, Tuple +from typing import ( + Awaitable, + Callable, + Iterator, + List, + Literal, + Mapping, + NamedTuple, + Optional, + Set, + Tuple, + Type, +) +from attr import field, frozen import numpy as np -from attrs import frozen +from zarr.v3.abc.codec import ( + ArrayBytesCodec, + ArrayBytesCodecPartialDecodeMixin, + ArrayBytesCodecPartialEncodeMixin, +) -from zarr.v3.codecs import ArrayBytesCodec, CodecPipeline +from zarr.v3.codecs import CodecPipeline +from zarr.v3.codecs.registry import register_codec from zarr.v3.common import ( BytesLike, ChunkCoords, @@ -22,14 +40,28 @@ from zarr.v3.metadata import ( CoreArrayMetadata, DataType, - ShardingCodecConfigurationMetadata, - ShardingCodecMetadata, + CodecMetadata, + ShardingCodecIndexLocation, ) from zarr.v3.store import StorePath MAX_UINT_64 = 2**64 - 1 +@frozen +class ShardingCodecConfigurationMetadata: + chunk_shape: ChunkCoords + codecs: List["CodecMetadata"] + index_codecs: List["CodecMetadata"] + index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end + + +@frozen +class ShardingCodecMetadata: + configuration: ShardingCodecConfigurationMetadata + name: Literal["sharding_indexed"] = field(default="sharding_indexed", init=False) + + class _ShardIndex(NamedTuple): # dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2) offsets_and_lengths: np.ndarray @@ -49,7 +81,7 @@ def get_chunk_slice(self, chunk_coords: ChunkCoords) -> Optional[Tuple[int, int] if (chunk_start, chunk_len) == (MAX_UINT_64, MAX_UINT_64): return None else: - return (int(chunk_start), int(chunk_start + chunk_len)) + return (int(chunk_start), int(chunk_start) + int(chunk_len)) def set_chunk_slice(self, chunk_coords: ChunkCoords, chunk_slice: Optional[slice]) -> None: localized_chunk = self._localize_chunk(chunk_coords) @@ -95,9 +127,15 @@ class _ShardProxy(Mapping): @classmethod async def from_bytes(cls, buf: BytesLike, codec: ShardingCodec) -> _ShardProxy: + shard_index_size = codec._shard_index_size() obj = cls() obj.buf = memoryview(buf) - obj.index = await codec._decode_shard_index(obj.buf[-codec._shard_index_size() :]) + if codec.configuration.index_location == ShardingCodecIndexLocation.start: + shard_index_bytes = obj.buf[:shard_index_size] + else: + shard_index_bytes = obj.buf[-shard_index_size:] + + obj.index = await codec._decode_shard_index(shard_index_bytes) return obj @classmethod @@ -156,13 +194,27 @@ def append(self, chunk_coords: ChunkCoords, value: BytesLike): self.buf.extend(value) self.index.set_chunk_slice(chunk_coords, slice(chunk_start, chunk_start + chunk_length)) - def finalize(self, index_bytes: BytesLike) -> BytesLike: - self.buf.extend(index_bytes) - return self.buf + async def finalize( + self, + index_location: ShardingCodecIndexLocation, + index_encoder: Callable[[_ShardIndex], Awaitable[BytesLike]], + ) -> BytesLike: + index_bytes = await index_encoder(self.index) + if index_location == ShardingCodecIndexLocation.start: + self.index.offsets_and_lengths[..., 0] += len(index_bytes) + index_bytes = await index_encoder(self.index) # encode again with corrected offsets + out_buf = bytearray(index_bytes) + out_buf.extend(self.buf) + else: + out_buf = self.buf + out_buf.extend(index_bytes) + return out_buf @frozen -class ShardingCodec(ArrayBytesCodec): +class ShardingCodec( + ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin +): array_metadata: CoreArrayMetadata configuration: ShardingCodecConfigurationMetadata codec_pipeline: CodecPipeline @@ -172,9 +224,11 @@ class ShardingCodec(ArrayBytesCodec): @classmethod def from_metadata( cls, - codec_metadata: ShardingCodecMetadata, + codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata, ) -> ShardingCodec: + assert isinstance(codec_metadata, ShardingCodecMetadata) + chunks_per_shard = tuple( s // c for s, c in zip( @@ -211,6 +265,10 @@ def from_metadata( chunks_per_shard=chunks_per_shard, ) + @classmethod + def get_metadata_class(cls) -> Type[ShardingCodecMetadata]: + return ShardingCodecMetadata + async def decode( self, shard_bytes: BytesLike, @@ -260,7 +318,6 @@ async def decode_partial( store_path: StorePath, selection: SliceSelection, ) -> Optional[np.ndarray]: - # print("decode_partial") shard_shape = self.array_metadata.chunk_shape chunk_shape = self.configuration.chunk_shape @@ -390,7 +447,9 @@ async def _write_chunk( if chunk_bytes is not None: shard_builder.append(chunk_coords, chunk_bytes) - return shard_builder.finalize(await self._encode_shard_index(shard_builder.index)) + return await shard_builder.finalize( + self.configuration.index_location, self._encode_shard_index + ) async def encode_partial( self, @@ -477,7 +536,10 @@ async def _write_chunk( await store_path.delete_async() else: await store_path.set_async( - shard_builder.finalize(await self._encode_shard_index(shard_builder.index)) + await shard_builder.finalize( + self.configuration.index_location, + self._encode_shard_index, + ) ) def _is_total_shard(self, all_chunk_coords: Set[ChunkCoords]) -> bool: @@ -497,7 +559,11 @@ def _shard_index_size(self) -> int: return self.index_codec_pipeline.compute_encoded_size(16 * product(self.chunks_per_shard)) async def _load_shard_index_maybe(self, store_path: StorePath) -> Optional[_ShardIndex]: - index_bytes = await store_path.get_async((-self._shard_index_size(), None)) + shard_index_size = self._shard_index_size() + if self.configuration.index_location == ShardingCodecIndexLocation.start: + index_bytes = await store_path.get_async((0, shard_index_size)) + else: + index_bytes = await store_path.get_async((-shard_index_size, None)) if index_bytes is not None: return await self._decode_shard_index(index_bytes) return None @@ -514,3 +580,6 @@ async def _load_full_shard_maybe(self, store_path: StorePath) -> Optional[_Shard def compute_encoded_size(self, input_byte_length: int) -> int: return input_byte_length + self._shard_index_size() + + +register_codec("sharding_indexed", ShardingCodec) diff --git a/zarr/v3/codecs/transpose.py b/zarr/v3/codecs/transpose.py new file mode 100644 index 0000000000..d160f2a88d --- /dev/null +++ b/zarr/v3/codecs/transpose.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Literal, + Optional, + Tuple, + Type, +) + +import numpy as np +from attr import frozen, field + +from zarr.v3.abc.codec import ArrayArrayCodec +from zarr.v3.codecs.registry import register_codec +from zarr.v3.metadata import CodecMetadata + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + + +@frozen +class TransposeCodecConfigurationMetadata: + order: Tuple[int, ...] + + +@frozen +class TransposeCodecMetadata: + configuration: TransposeCodecConfigurationMetadata + name: Literal["transpose"] = field(default="transpose", init=False) + + +@frozen +class TransposeCodec(ArrayArrayCodec): + array_metadata: CoreArrayMetadata + order: Tuple[int, ...] + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata + ) -> TransposeCodec: + assert isinstance(codec_metadata, TransposeCodecMetadata) + + configuration = codec_metadata.configuration + # Compatibility with older version of ZEP1 + if configuration.order == "F": # type: ignore + order = tuple(array_metadata.ndim - x - 1 for x in range(array_metadata.ndim)) + + elif configuration.order == "C": # type: ignore + order = tuple(range(array_metadata.ndim)) + + else: + assert len(configuration.order) == array_metadata.ndim, ( + "The `order` tuple needs have as many entries as " + + f"there are dimensions in the array. Got: {configuration.order}" + ) + assert len(configuration.order) == len(set(configuration.order)), ( + "There must not be duplicates in the `order` tuple. " + + f"Got: {configuration.order}" + ) + assert all(0 <= x < array_metadata.ndim for x in configuration.order), ( + "All entries in the `order` tuple must be between 0 and " + + f"the number of dimensions in the array. Got: {configuration.order}" + ) + order = tuple(configuration.order) + + return cls( + array_metadata=array_metadata, + order=order, + ) + + @classmethod + def get_metadata_class(cls) -> Type[TransposeCodecMetadata]: + return TransposeCodecMetadata + + def resolve_metadata(self) -> CoreArrayMetadata: + from zarr.v3.metadata import CoreArrayMetadata + + return CoreArrayMetadata( + shape=tuple( + self.array_metadata.shape[self.order[i]] for i in range(self.array_metadata.ndim) + ), + chunk_shape=tuple( + self.array_metadata.chunk_shape[self.order[i]] + for i in range(self.array_metadata.ndim) + ), + data_type=self.array_metadata.data_type, + fill_value=self.array_metadata.fill_value, + runtime_configuration=self.array_metadata.runtime_configuration, + ) + + async def decode( + self, + chunk_array: np.ndarray, + ) -> np.ndarray: + inverse_order = [0 for _ in range(self.array_metadata.ndim)] + for x, i in enumerate(self.order): + inverse_order[x] = i + chunk_array = chunk_array.transpose(inverse_order) + return chunk_array + + async def encode( + self, + chunk_array: np.ndarray, + ) -> Optional[np.ndarray]: + chunk_array = chunk_array.transpose(self.order) + return chunk_array + + def compute_encoded_size(self, input_byte_length: int) -> int: + return input_byte_length + + +register_codec("transpose", TransposeCodec) diff --git a/zarr/v3/codecs/zstd.py b/zarr/v3/codecs/zstd.py new file mode 100644 index 0000000000..e66d9e0700 --- /dev/null +++ b/zarr/v3/codecs/zstd.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Literal, + Optional, + Type, +) + +from attr import frozen, field +from zstandard import ZstdCompressor, ZstdDecompressor + +from zarr.v3.abc.codec import BytesBytesCodec +from zarr.v3.codecs.registry import register_codec +from zarr.v3.common import BytesLike, to_thread +from zarr.v3.metadata import CodecMetadata + +if TYPE_CHECKING: + from zarr.v3.metadata import CoreArrayMetadata + + +@frozen +class ZstdCodecConfigurationMetadata: + level: int = 0 + checksum: bool = False + + +@frozen +class ZstdCodecMetadata: + configuration: ZstdCodecConfigurationMetadata + name: Literal["zstd"] = field(default="zstd", init=False) + + +@frozen +class ZstdCodec(BytesBytesCodec): + array_metadata: CoreArrayMetadata + configuration: ZstdCodecConfigurationMetadata + is_fixed_size = True + + @classmethod + def from_metadata( + cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata + ) -> ZstdCodec: + assert isinstance(codec_metadata, ZstdCodecMetadata) + return cls( + array_metadata=array_metadata, + configuration=codec_metadata.configuration, + ) + + @classmethod + def get_metadata_class(cls) -> Type[ZstdCodecMetadata]: + return ZstdCodecMetadata + + def _compress(self, data: bytes) -> bytes: + ctx = ZstdCompressor( + level=self.configuration.level, write_checksum=self.configuration.checksum + ) + return ctx.compress(data) + + def _decompress(self, data: bytes) -> bytes: + ctx = ZstdDecompressor() + return ctx.decompress(data) + + async def decode( + self, + chunk_bytes: bytes, + ) -> BytesLike: + return await to_thread(self._decompress, chunk_bytes) + + async def encode( + self, + chunk_bytes: bytes, + ) -> Optional[BytesLike]: + return await to_thread(self._compress, chunk_bytes) + + def compute_encoded_size(self, _input_byte_length: int) -> int: + raise NotImplementedError + + +register_codec("zstd", ZstdCodec) diff --git a/zarr/v3/common.py b/zarr/v3/common.py index 0e55a7c1fd..e91356c4e2 100644 --- a/zarr/v3/common.py +++ b/zarr/v3/common.py @@ -32,18 +32,12 @@ def make_cattr(): from zarr.v3.metadata import ( - BloscCodecMetadata, - BytesCodecMetadata, ChunkKeyEncodingMetadata, CodecMetadata, - Crc32cCodecMetadata, DefaultChunkKeyEncodingMetadata, - GzipCodecMetadata, - ShardingCodecMetadata, - TransposeCodecMetadata, V2ChunkKeyEncodingMetadata, - ZstdCodecMetadata, ) + from zarr.v3.codecs.registry import get_codec_metadata_class converter = Converter() @@ -59,24 +53,8 @@ def _structure_chunk_key_encoding_metadata(d: Dict[str, Any], _t) -> ChunkKeyEnc ) def _structure_codec_metadata(d: Dict[str, Any], _t=None) -> CodecMetadata: - if d["name"] == "endian": - d["name"] = "bytes" - - if d["name"] == "blosc": - return converter.structure(d, BloscCodecMetadata) - if d["name"] == "bytes": - return converter.structure(d, BytesCodecMetadata) - if d["name"] == "transpose": - return converter.structure(d, TransposeCodecMetadata) - if d["name"] == "gzip": - return converter.structure(d, GzipCodecMetadata) - if d["name"] == "zstd": - return converter.structure(d, ZstdCodecMetadata) - if d["name"] == "sharding_indexed": - return converter.structure(d, ShardingCodecMetadata) - if d["name"] == "crc32c": - return converter.structure(d, Crc32cCodecMetadata) - raise KeyError + codec_metadata_cls = get_codec_metadata_class(d["name"]) + return converter.structure(d, codec_metadata_cls) converter.register_structure_hook(CodecMetadata, _structure_codec_metadata) diff --git a/zarr/v3/metadata.py b/zarr/v3/metadata.py index 1fc43b19f0..53b300d3f8 100644 --- a/zarr/v3/metadata.py +++ b/zarr/v3/metadata.py @@ -3,7 +3,7 @@ import json from asyncio import AbstractEventLoop from enum import Enum -from typing import Any, Dict, List, Literal, Optional, Tuple, Union +from typing import Any, Dict, List, Literal, Optional, Protocol, Tuple, Union import numpy as np from attr import asdict, field, frozen @@ -142,103 +142,15 @@ def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: ChunkKeyEncodingMetadata = Union[DefaultChunkKeyEncodingMetadata, V2ChunkKeyEncodingMetadata] -BloscShuffle = Literal["noshuffle", "shuffle", "bitshuffle"] - - -@frozen -class BloscCodecConfigurationMetadata: - typesize: int - cname: Literal["lz4", "lz4hc", "blosclz", "zstd", "snappy", "zlib"] = "zstd" - clevel: int = 5 - shuffle: BloscShuffle = "noshuffle" - blocksize: int = 0 - - -blosc_shuffle_int_to_str: Dict[int, BloscShuffle] = { - 0: "noshuffle", - 1: "shuffle", - 2: "bitshuffle", -} - - -@frozen -class BloscCodecMetadata: - configuration: BloscCodecConfigurationMetadata - name: Literal["blosc"] = "blosc" - - -@frozen -class BytesCodecConfigurationMetadata: - endian: Optional[Literal["big", "little"]] = "little" - - -@frozen -class BytesCodecMetadata: - configuration: BytesCodecConfigurationMetadata - name: Literal["bytes"] = "bytes" - - -@frozen -class TransposeCodecConfigurationMetadata: - order: Union[Literal["C", "F"], Tuple[int, ...]] = "C" - - -@frozen -class TransposeCodecMetadata: - configuration: TransposeCodecConfigurationMetadata - name: Literal["transpose"] = "transpose" - - -@frozen -class GzipCodecConfigurationMetadata: - level: int = 5 - - -@frozen -class GzipCodecMetadata: - configuration: GzipCodecConfigurationMetadata - name: Literal["gzip"] = "gzip" - - -@frozen -class ZstdCodecConfigurationMetadata: - level: int = 0 - checksum: bool = False - - -@frozen -class ZstdCodecMetadata: - configuration: ZstdCodecConfigurationMetadata - name: Literal["zstd"] = "zstd" - - -@frozen -class Crc32cCodecMetadata: - name: Literal["crc32c"] = "crc32c" - - -@frozen -class ShardingCodecConfigurationMetadata: - chunk_shape: ChunkCoords - codecs: List["CodecMetadata"] - index_codecs: List["CodecMetadata"] - - -@frozen -class ShardingCodecMetadata: - configuration: ShardingCodecConfigurationMetadata - name: Literal["sharding_indexed"] = "sharding_indexed" +class CodecMetadata(Protocol): + @property + def name(self) -> str: + pass -CodecMetadata = Union[ - BloscCodecMetadata, - BytesCodecMetadata, - TransposeCodecMetadata, - GzipCodecMetadata, - ZstdCodecMetadata, - ShardingCodecMetadata, - Crc32cCodecMetadata, -] +class ShardingCodecIndexLocation(Enum): + start = "start" + end = "end" @frozen @@ -290,7 +202,7 @@ def get_core_metadata(self, runtime_configuration: RuntimeConfiguration) -> Core def to_bytes(self) -> bytes: def _json_convert(o): - if isinstance(o, DataType): + if isinstance(o, Enum): return o.name raise TypeError diff --git a/zarr/v3/store.py b/zarr/v3/store.py index f7472c68d2..b6c20be41f 100644 --- a/zarr/v3/store.py +++ b/zarr/v3/store.py @@ -10,7 +10,7 @@ import asyncio import io from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Dict, List, MutableMapping, Optional, Tuple, Union from zarr.v3.common import BytesLike, to_thread @@ -284,6 +284,53 @@ def __repr__(self) -> str: return f"RemoteStore({repr(str(self))})" +class MemoryStore(Store): + supports_partial_writes = True + store_dict: MutableMapping[str, bytes] + + def __init__(self, store_dict: Optional[MutableMapping[str, bytes]] = None): + self.store_dict = store_dict or {} + + async def get_async( + self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[BytesLike]: + assert isinstance(key, str) + try: + value = self.store_dict[key] + if byte_range is not None: + value = value[byte_range[0] : byte_range[1]] + return value + except KeyError: + return None + + async def set_async( + self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + ) -> None: + assert isinstance(key, str) + + if byte_range is not None: + buf = bytearray(self.store_dict[key]) + buf[byte_range[0] : byte_range[1]] = value + self.store_dict[key] = buf + else: + self.store_dict[key] = value + + async def delete_async(self, key: str) -> None: + try: + del self.store_dict[key] + except KeyError: + pass + + async def exists_async(self, key: str) -> bool: + return key in self.store_dict + + def __str__(self) -> str: + return f"memory://{id(self.store_dict)}" + + def __repr__(self) -> str: + return f"MemoryStore({repr(str(self))})" + + StoreLike = Union[Store, StorePath, Path, str] From 8579e21c80927afbc26153153ca8eedc91a6ff6f Mon Sep 17 00:00:00 2001 From: David Stansby Date: Thu, 7 Dec 2023 21:15:55 +0000 Subject: [PATCH 0391/1078] Bump version of black in pre-commit (#1559) --- .pre-commit-config.yaml | 2 +- bench/compress_normal.py | 1 - zarr/_storage/absstore.py | 3 +- zarr/_storage/store.py | 1 - zarr/_storage/v3.py | 1 - zarr/attrs.py | 6 ---- zarr/convenience.py | 20 +++-------- zarr/creation.py | 2 -- zarr/hierarchy.py | 12 +++---- zarr/indexing.py | 25 -------------- zarr/meta.py | 1 - zarr/n5.py | 57 -------------------------------- zarr/storage.py | 5 --- zarr/tests/test_attrs.py | 6 ---- zarr/tests/test_convenience.py | 7 ---- zarr/tests/test_creation.py | 9 ----- zarr/tests/test_dim_separator.py | 1 - zarr/tests/test_filters.py | 12 ------- zarr/tests/test_hierarchy.py | 3 -- zarr/tests/test_indexing.py | 35 -------------------- zarr/tests/test_info.py | 1 - zarr/tests/test_meta.py | 19 ----------- zarr/tests/test_storage.py | 20 ----------- zarr/tests/test_storage_v3.py | 10 ------ zarr/tests/test_sync.py | 2 -- zarr/tests/test_util.py | 2 -- zarr/util.py | 6 ---- 27 files changed, 11 insertions(+), 258 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f22dc39832..e985d24000 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: # Respect `exclude` and `extend-exclude` settings. args: ["--force-exclude"] - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 23.10.1 hooks: - id: black - repo: https://github.com/codespell-project/codespell diff --git a/bench/compress_normal.py b/bench/compress_normal.py index 9f1655541c..803d54b76b 100644 --- a/bench/compress_normal.py +++ b/bench/compress_normal.py @@ -8,7 +8,6 @@ from zarr import blosc if __name__ == "__main__": - sys.path.insert(0, "..") # setup diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index f62529f096..c9a113148c 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -87,7 +87,7 @@ def __init__( "https://{}.blob.core.windows.net/".format(account_name), container, credential=account_key, - **blob_service_kwargs + **blob_service_kwargs, ) self.client = client @@ -240,7 +240,6 @@ def __setitem__(self, key, value): super().__setitem__(key, value) def rmdir(self, path=None): - if not path: # Currently allowing clear to delete everything as in v2 diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 8daedae48f..80e4ad8f75 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -629,7 +629,6 @@ def _rmdir_from_keys(store: StoreLike, path: Optional[str] = None) -> None: def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: - meta_dir = meta_root + path meta_dir = meta_dir.rstrip("/") _rmdir_from_keys(store, meta_dir) diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 00dc085dac..32e78f7a34 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -118,7 +118,6 @@ def _get_files_and_dirs_from_path(store, path): class FSStoreV3(FSStore, StoreV3): - # FSStoreV3 doesn't use this (FSStore uses it within _normalize_key) _META_KEYS = () diff --git a/zarr/attrs.py b/zarr/attrs.py index 01fc617b3c..e967c5b853 100644 --- a/zarr/attrs.py +++ b/zarr/attrs.py @@ -26,7 +26,6 @@ class Attributes(MutableMapping): """ def __init__(self, store, key=".zattrs", read_only=False, cache=True, synchronizer=None): - self._version = getattr(store, "_store_version", 2) _Store = Store if self._version == 2 else StoreV3 self.store = _Store._ensure_store(store) @@ -73,7 +72,6 @@ def __getitem__(self, item): return self.asdict()[item] def _write_op(self, f, *args, **kwargs): - # guard condition if self.read_only: raise PermissionError("attributes are read-only") @@ -89,7 +87,6 @@ def __setitem__(self, item, value): self._write_op(self._setitem_nosync, item, value) def _setitem_nosync(self, item, value): - # load existing data d = self._get_nosync() @@ -106,7 +103,6 @@ def __delitem__(self, item): self._write_op(self._delitem_nosync, item) def _delitem_nosync(self, key): - # load existing data d = self._get_nosync() @@ -128,7 +124,6 @@ def put(self, d): self._write_op(self._put_nosync, dict(attributes=d)) def _put_nosync(self, d): - d_to_check = d if self._version == 2 else d["attributes"] if not all(isinstance(item, str) for item in d_to_check): # TODO: Raise an error for non-string keys @@ -178,7 +173,6 @@ def update(self, *args, **kwargs): self._write_op(self._update_nosync, *args, **kwargs) def _update_nosync(self, *args, **kwargs): - # load existing data d = self._get_nosync() diff --git a/zarr/convenience.py b/zarr/convenience.py index 0ee8a8d323..9c0deeea47 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -675,10 +675,8 @@ def copy_store( # setup logging with _LogWriter(log) as log: - # iterate over source keys for source_key in sorted(source.keys()): - # filter to keys under source path if source_store_version == 2: if not source_key.startswith(source_path): @@ -757,7 +755,7 @@ def copy( log=None, if_exists="raise", dry_run=False, - **create_kws + **create_kws, ): """Copy the `source` array or group into the `dest` group. @@ -878,7 +876,6 @@ def copy( # setup logging with _LogWriter(log) as log: - # do the copying n_copied, n_skipped, n_bytes_copied = _copy( log, @@ -890,7 +887,7 @@ def copy( without_attrs=without_attrs, if_exists=if_exists, dry_run=dry_run, - **create_kws + **create_kws, ) # log a final message with a summary of what happened @@ -948,12 +945,10 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ # take action if do_copy: - # log a message about what we're going to do log("copy {} {} {}".format(source.name, source.shape, source.dtype)) if not dry_run: - # clear the way if exists: del dest[name] @@ -1038,12 +1033,10 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ # take action if do_copy: - # log action log("copy {}".format(source.name)) if not dry_run: - # clear the way if exists_array: del dest[name] @@ -1056,7 +1049,6 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ grp.attrs.update(source.attrs) else: - # setup for dry run without creating any groups in the # destination if dest is not None: @@ -1076,7 +1068,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ without_attrs=without_attrs, if_exists=if_exists, dry_run=dry_run, - **create_kws + **create_kws, ) n_copied += c n_skipped += s @@ -1099,7 +1091,7 @@ def copy_all( log=None, if_exists="raise", dry_run=False, - **create_kws + **create_kws, ): """Copy all children of the `source` group into the `dest` group. @@ -1189,7 +1181,6 @@ def copy_all( # setup logging with _LogWriter(log) as log: - for k in source.keys(): c, s, b = _copy( log, @@ -1201,7 +1192,7 @@ def copy_all( without_attrs=without_attrs, if_exists=if_exists, dry_run=dry_run, - **create_kws + **create_kws, ) n_copied += c n_skipped += s @@ -1262,7 +1253,6 @@ def is_zarr_key(key): return key.endswith(".zarray") or key.endswith(".zgroup") or key.endswith(".zattrs") else: - assert_zarr_v3_api_available() sfx = _get_metadata_suffix(store) # type: ignore diff --git a/zarr/creation.py b/zarr/creation.py index 726d0b5932..6227f90b7b 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -234,7 +234,6 @@ def create( def _kwargs_compat(compressor, fill_value, kwargs): - # to be compatible with h5py, as well as backwards-compatible with Zarr # 1.x, accept 'compression' and 'compression_opts' keyword arguments @@ -697,7 +696,6 @@ def open_array( def _like_args(a, kwargs): - shape, chunks = _get_shape_chunks(a) if shape is not None: kwargs.setdefault("shape", shape) diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 3361969f08..1cfea89c81 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -145,7 +145,7 @@ def __init__( synchronizer=None, zarr_version=None, *, - meta_array=None + meta_array=None, ): store: BaseStore = _normalize_store_arg(store, zarr_version=zarr_version) if zarr_version is None: @@ -919,7 +919,6 @@ def tree(self, expand=False, level=None): return TreeViewer(self, expand=expand, level=level) def _write_op(self, f, *args, **kwargs): - # guard condition if self._read_only: raise ReadOnlyError() @@ -1094,7 +1093,6 @@ def create_dataset(self, name, **kwargs): return self._write_op(self._create_dataset_nosync, name, **kwargs) def _create_dataset_nosync(self, name, data=None, **kwargs): - assert "mode" not in kwargs path = self._item_path(name) @@ -1138,11 +1136,9 @@ def require_dataset(self, name, shape, dtype=None, exact=False, **kwargs): ) def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, **kwargs): - path = self._item_path(name) if contains_array(self._store, path): - # array already exists at path, validate that it is the right shape and type synchronizer = kwargs.get("synchronizer", self._synchronizer) @@ -1235,7 +1231,7 @@ def _full_nosync(self, name, fill_value, **kwargs): path=path, chunk_store=self._chunk_store, fill_value=fill_value, - **kwargs + **kwargs, ) def array(self, name, data, **kwargs): @@ -1361,7 +1357,7 @@ def group( path=None, *, zarr_version=None, - meta_array=None + meta_array=None, ): """Create a group. @@ -1452,7 +1448,7 @@ def open_group( storage_options=None, *, zarr_version=None, - meta_array=None + meta_array=None, ): """Open a group using file-mode-like semantics. diff --git a/zarr/indexing.py b/zarr/indexing.py index 487cc8b9d9..3042147ebb 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -111,7 +111,6 @@ def is_pure_orthogonal_indexing(selection, ndim): def normalize_integer_selection(dim_sel, dim_len): - # normalize type to int dim_sel = int(dim_sel) @@ -145,7 +144,6 @@ def normalize_integer_selection(dim_sel, dim_len): class IntDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): - # normalize dim_sel = normalize_integer_selection(dim_sel, dim_len) @@ -169,7 +167,6 @@ def ceildiv(a, b): class SliceDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): - # normalize self.start, self.stop, self.step = dim_sel.indices(dim_len) if self.step < 1: @@ -182,14 +179,12 @@ def __init__(self, dim_sel, dim_len, dim_chunk_len): self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) def __iter__(self): - # figure out the range of chunks we need to visit dim_chunk_ix_from = self.start // self.dim_chunk_len dim_chunk_ix_to = ceildiv(self.stop, self.dim_chunk_len) # iterate over chunks in range for dim_chunk_ix in range(dim_chunk_ix_from, dim_chunk_ix_to): - # compute offsets for chunk within overall array dim_offset = dim_chunk_ix * self.dim_chunk_len dim_limit = min(self.dim_len, (dim_chunk_ix + 1) * self.dim_chunk_len) @@ -237,7 +232,6 @@ def check_selection_length(selection, shape): def replace_ellipsis(selection, shape): - selection = ensure_tuple(selection) # count number of ellipsis present @@ -330,14 +324,12 @@ def is_basic_selection(selection): # noinspection PyProtectedMember class BasicIndexer: def __init__(self, selection, array): - # handle ellipsis selection = replace_ellipsis(selection, array._shape) # setup per-dimension indexers dim_indexers = [] for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): - if is_integer(dim_sel): dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) @@ -358,7 +350,6 @@ def __init__(self, selection, array): def __iter__(self): for dim_projections in itertools.product(*self.dim_indexers): - chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) out_selection = tuple( @@ -370,7 +361,6 @@ def __iter__(self): class BoolArrayDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): - # check number of dimensions if not is_bool_array(dim_sel, 1): raise IndexError( @@ -402,10 +392,8 @@ def __init__(self, dim_sel, dim_len, dim_chunk_len): self.dim_chunk_ixs = np.nonzero(self.chunk_nitems)[0] def __iter__(self): - # iterate over chunks with at least one item for dim_chunk_ix in self.dim_chunk_ixs: - # find region in chunk dim_offset = dim_chunk_ix * self.dim_chunk_len dim_chunk_sel = self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] @@ -472,7 +460,6 @@ def __init__( boundscheck=True, order=Order.UNKNOWN, ): - # ensure 1d array dim_sel = np.asanyarray(dim_sel) if not is_integer_array(dim_sel, 1): @@ -526,9 +513,7 @@ def __init__( self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) def __iter__(self): - for dim_chunk_ix in self.dim_chunk_ixs: - # find region in output if dim_chunk_ix == 0: start = 0 @@ -602,7 +587,6 @@ def oindex_set(a, selection, value): # noinspection PyProtectedMember class OrthogonalIndexer: def __init__(self, selection, array): - # handle ellipsis selection = replace_ellipsis(selection, array._shape) @@ -612,7 +596,6 @@ def __init__(self, selection, array): # setup per-dimension indexers dim_indexers = [] for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): - if is_integer(dim_sel): dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) @@ -649,7 +632,6 @@ def __init__(self, selection, array): def __iter__(self): for dim_projections in itertools.product(*self.dim_indexers): - chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) out_selection = tuple( @@ -658,7 +640,6 @@ def __iter__(self): # handle advanced indexing arrays orthogonally if self.is_advanced: - # N.B., numpy doesn't support orthogonal indexing directly as yet, # so need to work around via np.ix_. Also np.ix_ does not support a # mixture of arrays and slices or integers, so need to convert slices @@ -692,7 +673,6 @@ def __setitem__(self, selection, value): # noinspection PyProtectedMember class BlockIndexer: def __init__(self, selection, array): - # handle ellipsis selection = replace_ellipsis(selection, array._shape) @@ -794,7 +774,6 @@ def is_mask_selection(selection, array): # noinspection PyProtectedMember class CoordinateIndexer: def __init__(self, selection, array): - # some initial normalization selection = ensure_tuple(selection) selection = tuple([i] if is_integer(i) else i for i in selection) @@ -810,7 +789,6 @@ def __init__(self, selection, array): # handle wraparound, boundscheck for dim_sel, dim_len in zip(selection, array.shape): - # handle wraparound wraparound_indices(dim_sel, dim_len) @@ -861,10 +839,8 @@ def __init__(self, selection, array): self.chunk_mixs = np.unravel_index(self.chunk_rixs, array._cdata_shape) def __iter__(self): - # iterate over chunks for i, chunk_rix in enumerate(self.chunk_rixs): - chunk_coords = tuple(m[i] for m in self.chunk_mixs) if chunk_rix == 0: start = 0 @@ -891,7 +867,6 @@ def __iter__(self): # noinspection PyProtectedMember class MaskIndexer(CoordinateIndexer): def __init__(self, selection, array): - # some initial normalization selection = ensure_tuple(selection) selection = replace_lists(selection) diff --git a/zarr/meta.py b/zarr/meta.py index 48791ddf17..f23889f3ea 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -89,7 +89,6 @@ class Metadata2: @classmethod def parse_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: - # Here we allow that a store may return an already-parsed metadata object, # or a string of JSON that we will parse here. We allow for an already-parsed # object to accommodate a consolidated metadata store, where all the metadata for diff --git a/zarr/n5.py b/zarr/n5.py index 7e73905527..44b44e69e2 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -72,21 +72,18 @@ class N5Store(NestedDirectoryStore): def __getitem__(self, key: str) -> bytes: if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, n5_attrs_key) value = group_metadata_to_zarr(self._load_n5_attrs(key_new)) return json_dumps(value) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, n5_attrs_key) top_level = key == zarr_array_meta_key value = array_metadata_to_zarr(self._load_n5_attrs(key_new), top_level=top_level) return json_dumps(value) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, n5_attrs_key) value = attrs_to_zarr(self._load_n5_attrs(key_new)) @@ -104,9 +101,7 @@ def __getitem__(self, key: str) -> bytes: return super().__getitem__(key_new) def __setitem__(self, key: str, value: Any): - if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, n5_attrs_key) n5_attrs = self._load_n5_attrs(key_new) @@ -115,7 +110,6 @@ def __setitem__(self, key: str, value: Any): value = json_dumps(n5_attrs) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, n5_attrs_key) top_level = key == zarr_array_meta_key n5_attrs = self._load_n5_attrs(key_new) @@ -123,7 +117,6 @@ def __setitem__(self, key: str, value: Any): value = json_dumps(n5_attrs) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, n5_attrs_key) n5_attrs = self._load_n5_attrs(key_new) @@ -166,9 +159,7 @@ def __delitem__(self, key: str): super().__delitem__(key_new) def __contains__(self, key): - if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, n5_attrs_key) if key_new not in self: return False @@ -176,18 +167,15 @@ def __contains__(self, key): return "dimensions" not in self._load_n5_attrs(key_new) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, n5_attrs_key) # array if attributes contain 'dimensions' return "dimensions" in self._load_n5_attrs(key_new) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, n5_attrs_key) return self._contains_attrs(key_new) elif is_chunk_key(key): - key_new = invert_chunk_coords(key) else: key_new = key @@ -198,7 +186,6 @@ def __eq__(self, other): return isinstance(other, N5Store) and self.path == other.path def listdir(self, path: Optional[str] = None): - if path is not None: path = invert_chunk_coords(path) path = cast(str, path) @@ -208,7 +195,6 @@ def listdir(self, path: Optional[str] = None): children = super().listdir(path=path) if self._is_array(path): - # replace n5 attribute file with respective zarr attribute files children.remove(n5_attrs_key) children.append(zarr_array_meta_key) @@ -234,7 +220,6 @@ def listdir(self, path: Optional[str] = None): return sorted(new_children) elif self._is_group(path): - # replace n5 attribute file with respective zarr attribute files children.remove(n5_attrs_key) children.append(zarr_group_meta_key) @@ -244,7 +229,6 @@ def listdir(self, path: Optional[str] = None): return sorted(children) else: - return children def _load_n5_attrs(self, path: str) -> Dict[str, Any]: @@ -255,7 +239,6 @@ def _load_n5_attrs(self, path: str) -> Dict[str, Any]: return {} def _is_group(self, path: str): - if path is None: attrs_key = n5_attrs_key else: @@ -265,7 +248,6 @@ def _is_group(self, path: str): return len(n5_attrs) > 0 and "dimensions" not in n5_attrs def _is_array(self, path: str): - if path is None: attrs_key = n5_attrs_key else: @@ -274,7 +256,6 @@ def _is_array(self, path: str): return "dimensions" in self._load_n5_attrs(attrs_key) def _contains_attrs(self, path: str): - if path is None: attrs_key = n5_attrs_key else: @@ -376,21 +357,18 @@ def _normalize_key(self, key: str): def __getitem__(self, key: str) -> bytes: if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, self._group_meta_key) value = group_metadata_to_zarr(self._load_n5_attrs(key_new)) return json_dumps(value) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, self._array_meta_key) top_level = key == zarr_array_meta_key value = array_metadata_to_zarr(self._load_n5_attrs(key_new), top_level=top_level) return json_dumps(value) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, self._attrs_key) value = attrs_to_zarr(self._load_n5_attrs(key_new)) @@ -409,7 +387,6 @@ def __getitem__(self, key: str) -> bytes: def __setitem__(self, key: str, value: Any): if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, self._group_meta_key) n5_attrs = self._load_n5_attrs(key_new) @@ -418,7 +395,6 @@ def __setitem__(self, key: str, value: Any): value = json_dumps(n5_attrs) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, self._array_meta_key) top_level = key == zarr_array_meta_key n5_attrs = self._load_n5_attrs(key_new) @@ -427,7 +403,6 @@ def __setitem__(self, key: str, value: Any): value = json_dumps(n5_attrs) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, self._attrs_key) n5_attrs = self._load_n5_attrs(key_new) @@ -456,7 +431,6 @@ def __setitem__(self, key: str, value: Any): super().__setitem__(key_new, value) def __delitem__(self, key: str): - if key.endswith(zarr_group_meta_key): key_new = key.replace(zarr_group_meta_key, self._group_meta_key) elif key.endswith(zarr_array_meta_key): @@ -471,7 +445,6 @@ def __delitem__(self, key: str): def __contains__(self, key: Any): if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, self._group_meta_key) if key_new not in self: return False @@ -479,13 +452,11 @@ def __contains__(self, key: Any): return "dimensions" not in self._load_n5_attrs(key_new) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, self._array_meta_key) # array if attributes contain 'dimensions' return "dimensions" in self._load_n5_attrs(key_new) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, self._attrs_key) return self._contains_attrs(key_new) @@ -508,7 +479,6 @@ def listdir(self, path: Optional[str] = None): # doesn't provide. children = super().listdir(path=path) if self._is_array(path): - # replace n5 attribute file with respective zarr attribute files children.remove(self._array_meta_key) children.append(zarr_array_meta_key) @@ -532,7 +502,6 @@ def listdir(self, path: Optional[str] = None): return sorted(new_children) elif self._is_group(path): - # replace n5 attribute file with respective zarr attribute files children.remove(self._group_meta_key) children.append(zarr_group_meta_key) @@ -550,7 +519,6 @@ def _load_n5_attrs(self, path: str): return {} def _is_group(self, path: Optional[str]): - if path is None: attrs_key = self._attrs_key else: @@ -560,7 +528,6 @@ def _is_group(self, path: Optional[str]): return len(n5_attrs) > 0 and "dimensions" not in n5_attrs def _is_array(self, path: Optional[str]): - if path is None: attrs_key = self._attrs_key else: @@ -569,7 +536,6 @@ def _is_array(self, path: Optional[str]): return "dimensions" in self._load_n5_attrs(attrs_key) def _contains_attrs(self, path: Optional[str]): - if path is None: attrs_key = self._attrs_key else: @@ -712,7 +678,6 @@ def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]: def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict[str, Any]: - if compressor_config is None: return {"type": "raw"} else: @@ -726,19 +691,16 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict n5_config = {"type": codec_id} if codec_id == "bz2": - n5_config["type"] = "bzip2" n5_config["blockSize"] = _compressor_config["level"] elif codec_id == "blosc": - n5_config["cname"] = _compressor_config["cname"] n5_config["clevel"] = _compressor_config["clevel"] n5_config["shuffle"] = _compressor_config["shuffle"] n5_config["blocksize"] = _compressor_config["blocksize"] elif codec_id == "lzma": - # Switch to XZ for N5 if we are using the default XZ format. # Note: 4 is the default, which is lzma.CHECK_CRC64. if _compressor_config["format"] == 1 and _compressor_config["check"] in [-1, 4]: @@ -760,50 +722,42 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict n5_config["preset"] = 6 elif codec_id == "zlib": - n5_config["type"] = "gzip" n5_config["level"] = _compressor_config["level"] n5_config["useZlib"] = True elif codec_id == "gzip": - n5_config["type"] = "gzip" n5_config["level"] = _compressor_config["level"] n5_config["useZlib"] = False else: - n5_config.update({k: v for k, v in _compressor_config.items() if k != "type"}) return n5_config def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dict[str, Any]]: - codec_id = compressor_config["type"] zarr_config = {"id": codec_id} if codec_id == "bzip2": - zarr_config["id"] = "bz2" zarr_config["level"] = compressor_config["blockSize"] elif codec_id == "blosc": - zarr_config["cname"] = compressor_config["cname"] zarr_config["clevel"] = compressor_config["clevel"] zarr_config["shuffle"] = compressor_config["shuffle"] zarr_config["blocksize"] = compressor_config["blocksize"] elif codec_id == "lzma": - zarr_config["format"] = compressor_config["format"] zarr_config["check"] = compressor_config["check"] zarr_config["preset"] = compressor_config["preset"] zarr_config["filters"] = compressor_config["filters"] elif codec_id == "xz": - zarr_config["id"] = "lzma" zarr_config["format"] = 1 # lzma.FORMAT_XZ zarr_config["check"] = -1 @@ -811,7 +765,6 @@ def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dic zarr_config["filters"] = None elif codec_id == "gzip": - if "useZlib" in compressor_config and compressor_config["useZlib"]: zarr_config["id"] = "zlib" zarr_config["level"] = compressor_config["level"] @@ -820,22 +773,18 @@ def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dic zarr_config["level"] = compressor_config["level"] elif codec_id == "raw": - return None else: - zarr_config.update({k: v for k, v in compressor_config.items() if k != "type"}) return zarr_config class N5ChunkWrapper(Codec): - codec_id = "n5_wrapper" def __init__(self, dtype, chunk_shape, compressor_config=None, compressor=None): - self.dtype = np.dtype(dtype) self.chunk_shape = tuple(chunk_shape) # is the dtype a little endian format? @@ -860,7 +809,6 @@ def get_config(self): return config def encode(self, chunk): - assert chunk.flags.c_contiguous header = self._create_header(chunk) @@ -872,12 +820,10 @@ def encode(self, chunk): return header + chunk.tobytes(order="A") def decode(self, chunk, out=None) -> bytes: - len_header, chunk_shape = self._read_header(chunk) chunk = chunk[len_header:] if out is not None: - # out should only be used if we read a complete chunk assert chunk_shape == self.chunk_shape, "Expected chunk of shape {}, found {}".format( self.chunk_shape, chunk_shape @@ -895,7 +841,6 @@ def decode(self, chunk, out=None) -> bytes: return out else: - if self._compressor: chunk = self._compressor.decode(chunk) @@ -915,7 +860,6 @@ def decode(self, chunk, out=None) -> bytes: @staticmethod def _create_header(chunk): - mode = struct.pack(">H", 0) num_dims = struct.pack(">H", len(chunk.shape)) shape = b"".join(struct.pack(">I", d) for d in chunk.shape[::-1]) @@ -924,7 +868,6 @@ def _create_header(chunk): @staticmethod def _read_header(chunk): - num_dims = struct.unpack(">H", chunk[2:4])[0] shape = tuple( struct.unpack(">I", chunk[i : i + 4])[0] for i in range(4, num_dims * 4 + 4, 4) diff --git a/zarr/storage.py b/zarr/storage.py index a7426e5345..585417f59c 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -483,7 +483,6 @@ def _init_array_metadata( dimension_separator=None, storage_transformers=(), ): - store_version = getattr(store, "_store_version", 2) path = normalize_storage_path(path) @@ -688,7 +687,6 @@ def _init_group_metadata( path: Optional[str] = None, chunk_store: Optional[StoreLike] = None, ): - store_version = getattr(store, "_store_version", 2) path = normalize_storage_path(path) @@ -1056,7 +1054,6 @@ class DirectoryStore(Store): """ def __init__(self, path, normalize_keys=False, dimension_separator=None): - # guard conditions path = os.path.abspath(path) if os.path.exists(path) and not os.path.isdir(path): @@ -1416,7 +1413,6 @@ def _normalize_key(self, key): def getitems( self, keys: Sequence[str], *, contexts: Mapping[str, Context] ) -> Mapping[str, Any]: - keys_transformed = [self._normalize_key(key) for key in keys] results = self.map.getitems(keys_transformed, on_error="omit") # The function calling this method may not recognize the transformed keys @@ -1770,7 +1766,6 @@ def __init__( mode="a", dimension_separator=None, ): - # store properties path = os.path.abspath(path) self.path = path diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index 7dd5b340a2..2d9553971b 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -30,7 +30,6 @@ def init_attributes(self, store, read_only=False, cache=True, zarr_version=2): return Attributes(store, key=root + "attrs", read_only=read_only, cache=cache) def test_storage(self, zarr_version): - store = _init_store(zarr_version) root = ".z" if zarr_version == 2 else meta_root attrs_key = root + "attrs" @@ -50,7 +49,6 @@ def test_storage(self, zarr_version): assert dict(foo="bar", baz=42) == d def test_utf8_encoding(self, zarr_version): - project_root = pathlib.Path(zarr.__file__).resolve().parent.parent fixdir = project_root / "fixture" testdir = fixdir / "utf8attrs" @@ -67,7 +65,6 @@ def test_utf8_encoding(self, zarr_version): assert fixture["utf8attrs"].attrs.asdict() == dict(foo="た") def test_get_set_del_contains(self, zarr_version): - store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) assert "foo" not in a @@ -84,7 +81,6 @@ def test_get_set_del_contains(self, zarr_version): a["foo"] def test_update_put(self, zarr_version): - store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) assert "foo" not in a @@ -102,7 +98,6 @@ def test_update_put(self, zarr_version): assert "baz" not in a def test_iterators(self, zarr_version): - store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) assert 0 == len(a) @@ -232,7 +227,6 @@ def test_caching_on(self, zarr_version): assert get_cnt == store.counter["__getitem__", attrs_key] def test_caching_off(self, zarr_version): - # setup store store = CountingDict() if zarr_version == 2 else CountingDictV3() attrs_key = ".zattrs" if zarr_version == 2 else "meta/root/attrs" diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index 389ce90a9d..7d190adc2c 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -57,7 +57,6 @@ def _init_creation_kwargs(zarr_version): @pytest.mark.parametrize("zarr_version", _VERSIONS) def test_open_array(path_type, zarr_version): - store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) store = path_type(store) @@ -86,7 +85,6 @@ def test_open_array(path_type, zarr_version): @pytest.mark.parametrize("zarr_version", _VERSIONS) def test_open_group(path_type, zarr_version): - store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) store = path_type(store) @@ -210,7 +208,6 @@ def test_tree(zarr_version): def test_consolidate_metadata( with_chunk_store, zarr_version, listable, monkeypatch, stores_from_path ): - # setup initial data if stores_from_path: store = tempfile.mkdtemp() @@ -399,7 +396,6 @@ def test_save_array_separator(tmpdir, options): class TestCopyStore(unittest.TestCase): - _version = 2 def setUp(self): @@ -536,7 +532,6 @@ def test_if_exists(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestCopyStoreV3(TestCopyStore): - _version = 3 def setUp(self): @@ -557,7 +552,6 @@ def test_mismatched_store_versions(self): def check_copied_array(original, copied, without_attrs=False, expect_props=None): - # setup source_h5py = original.__module__.startswith("h5py.") dest_h5py = copied.__module__.startswith("h5py.") @@ -621,7 +615,6 @@ def check_copied_array(original, copied, without_attrs=False, expect_props=None) def check_copied_group(original, copied, without_attrs=False, expect_props=None, shallow=False): - # setup if expect_props is None: expect_props = dict() diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index b44c6379fd..8e586abfff 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -74,7 +74,6 @@ def _init_creation_kwargs(zarr_version, at_root=True): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_array(zarr_version, at_root): - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version kwargs = _init_creation_kwargs(zarr_version, at_root) @@ -213,7 +212,6 @@ def test_full_additional_dtypes(zarr_version): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_open_array(zarr_version, at_root, dimension_separator): - store = "data/array.zarr" kwargs = _init_creation_kwargs(zarr_version, at_root) @@ -329,7 +327,6 @@ def test_open_array(zarr_version, at_root, dimension_separator): def test_open_array_none(): - # open with both store and zarr_version = None z = open_array(mode="w", shape=100, chunks=10) assert isinstance(z, Array) @@ -339,7 +336,6 @@ def test_open_array_none(): @pytest.mark.parametrize("dimension_separator", [".", "/", None]) @pytest.mark.parametrize("zarr_version", _VERSIONS2) def test_open_array_infer_separator_from_store(zarr_version, dimension_separator): - if zarr_version == 3: StoreClass = DirectoryStoreV3 path = "data" @@ -370,7 +366,6 @@ def test_open_array_infer_separator_from_store(zarr_version, dimension_separator # TODO: N5 support for v3 @pytest.mark.parametrize("zarr_version", [None, 2]) def test_open_array_n5(zarr_version): - store = "data/array.zarr" kwargs = _init_creation_kwargs(zarr_version) @@ -409,7 +404,6 @@ def test_open_array_n5(zarr_version): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_open_array_dict_store(zarr_version, at_root): - # dict will become a KVStore store = dict() kwargs = _init_creation_kwargs(zarr_version, at_root) @@ -503,7 +497,6 @@ def test_empty_like(zarr_version, at_root): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_zeros_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version @@ -529,7 +522,6 @@ def test_zeros_like(zarr_version, at_root): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_ones_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version @@ -556,7 +548,6 @@ def test_ones_like(zarr_version, at_root): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_full_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version diff --git a/zarr/tests/test_dim_separator.py b/zarr/tests/test_dim_separator.py index 987852dfd0..0a5814e65f 100644 --- a/zarr/tests/test_dim_separator.py +++ b/zarr/tests/test_dim_separator.py @@ -46,7 +46,6 @@ def dataset(tmpdir, request): static = project_root / "fixture" / suffix if not static.exists(): # pragma: no cover - if "nested" in which: # No way to reproduce the nested_legacy file via code generator = NestedDirectoryStore diff --git a/zarr/tests/test_filters.py b/zarr/tests/test_filters.py index d55be9145f..fc63cdca8d 100644 --- a/zarr/tests/test_filters.py +++ b/zarr/tests/test_filters.py @@ -30,7 +30,6 @@ def test_array_with_delta_filter(): - # setup astype = "u1" dtype = "i8" @@ -38,7 +37,6 @@ def test_array_with_delta_filter(): data = np.arange(100, dtype=dtype) for compressor in compressors: - a = array(data, chunks=10, compressor=compressor, filters=filters) # check round-trip @@ -57,7 +55,6 @@ def test_array_with_delta_filter(): def test_array_with_astype_filter(): - # setup encode_dtype = "i1" decode_dtype = "i8" @@ -68,7 +65,6 @@ def test_array_with_astype_filter(): data = np.arange(shape, dtype=decode_dtype) for compressor in compressors: - a = array(data, chunks=chunks, compressor=compressor, filters=filters) # check round-trip @@ -88,7 +84,6 @@ def test_array_with_astype_filter(): def test_array_with_scaleoffset_filter(): - # setup astype = "u1" dtype = "f8" @@ -97,7 +92,6 @@ def test_array_with_scaleoffset_filter(): data = np.linspace(1000, 1001, 34, dtype="f8") for compressor in compressors: - a = array(data, chunks=5, compressor=compressor, filters=filters) # check round-trip @@ -116,7 +110,6 @@ def test_array_with_scaleoffset_filter(): def test_array_with_quantize_filter(): - # setup dtype = "f8" digits = 3 @@ -125,7 +118,6 @@ def test_array_with_quantize_filter(): data = np.linspace(0, 1, 34, dtype=dtype) for compressor in compressors: - a = array(data, chunks=5, compressor=compressor, filters=filters) # check round-trip @@ -144,14 +136,12 @@ def test_array_with_quantize_filter(): def test_array_with_packbits_filter(): - # setup flt = PackBits() filters = [flt] data = np.random.randint(0, 2, size=100, dtype=bool) for compressor in compressors: - a = array(data, chunks=5, compressor=compressor, filters=filters) # check round-trip @@ -170,14 +160,12 @@ def test_array_with_packbits_filter(): def test_array_with_categorize_filter(): - # setup data = np.random.choice(["foo", "bar", "baz"], size=100) flt = Categorize(dtype=data.dtype, labels=["foo", "bar", "baz"]) filters = [flt] for compressor in compressors: - a = array(data, chunks=5, compressor=compressor, filters=filters) # check round-trip diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index cbf59c55c3..6c08d7b88a 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -1085,7 +1085,6 @@ def test_paths(self): g1.store.close() def test_pickle(self): - # setup group g = self.create_group() d = g.create_dataset("foo/bar", shape=100, chunks=10) @@ -1113,7 +1112,6 @@ def test_pickle(self): g2.store.close() def test_context_manager(self): - with self.create_group() as g: d = g.create_dataset("foo/bar", shape=100, chunks=10) d[:] = np.arange(100) @@ -1375,7 +1373,6 @@ def create_store(): return store, None def test_context_manager(self): - with self.create_group() as g: store = g.store d = g.create_dataset("foo/bar", shape=100, chunks=10) diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index 8a34c1e715..f10360e8b7 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -17,7 +17,6 @@ def test_normalize_integer_selection(): - assert 1 == normalize_integer_selection(1, 100) assert 99 == normalize_integer_selection(-1, 100) with pytest.raises(IndexError): @@ -29,7 +28,6 @@ def test_normalize_integer_selection(): def test_replace_ellipsis(): - # 1D, single item assert (0,) == replace_ellipsis(0, (100,)) @@ -68,7 +66,6 @@ def test_replace_ellipsis(): def test_get_basic_selection_0d(): - # setup a = np.array(42) z = zarr.create(shape=a.shape, dtype=a.dtype, fill_value=None) @@ -191,7 +188,6 @@ def _test_get_basic_selection(a, z, selection): # noinspection PyStatementEffect def test_get_basic_selection_1d(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -264,7 +260,6 @@ def test_get_basic_selection_1d(): # noinspection PyStatementEffect def test_get_basic_selection_2d(): - # setup a = np.arange(10000, dtype=int).reshape(1000, 10) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) @@ -423,7 +418,6 @@ def test_fancy_indexing_doesnt_mix_with_implicit_slicing(): def test_set_basic_selection_0d(): - # setup v = np.array(42) a = np.zeros_like(v) @@ -479,7 +473,6 @@ def _test_get_orthogonal_selection(a, z, selection): # noinspection PyStatementEffect def test_get_orthogonal_selection_1d_bool(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -502,7 +495,6 @@ def test_get_orthogonal_selection_1d_bool(): # noinspection PyStatementEffect def test_get_orthogonal_selection_1d_int(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -561,7 +553,6 @@ def _test_get_orthogonal_selection_2d(a, z, ix0, ix1): # noinspection PyStatementEffect def test_get_orthogonal_selection_2d(): - # setup a = np.arange(10000, dtype=int).reshape(1000, 10) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) @@ -570,7 +561,6 @@ def test_get_orthogonal_selection_2d(): np.random.seed(42) # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: - # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) @@ -641,7 +631,6 @@ def _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2): def test_get_orthogonal_selection_3d(): - # setup a = np.arange(100000, dtype=int).reshape(200, 50, 10) z = zarr.create(shape=a.shape, chunks=(60, 20, 3), dtype=a.dtype) @@ -650,7 +639,6 @@ def test_get_orthogonal_selection_3d(): np.random.seed(42) # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: - # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) @@ -673,7 +661,6 @@ def test_get_orthogonal_selection_3d(): def test_orthogonal_indexing_edge_cases(): - a = np.arange(6).reshape(1, 2, 3) z = zarr.create(shape=a.shape, chunks=(1, 2, 3), dtype=a.dtype) z[:] = a @@ -706,7 +693,6 @@ def _test_set_orthogonal_selection(v, a, z, selection): def test_set_orthogonal_selection_1d(): - # setup v = np.arange(1050, dtype=int) a = np.empty(v.shape, dtype=int) @@ -715,7 +701,6 @@ def test_set_orthogonal_selection_1d(): # test with different degrees of sparseness np.random.seed(42) for p in 0.5, 0.1, 0.01: - # boolean arrays ix = np.random.binomial(1, p, size=a.shape[0]).astype(bool) _test_set_orthogonal_selection(v, a, z, ix) @@ -734,7 +719,6 @@ def test_set_orthogonal_selection_1d(): def _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1): - selections = [ # index both axes with array (ix0, ix1), @@ -749,7 +733,6 @@ def _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1): def test_set_orthogonal_selection_2d(): - # setup v = np.arange(10000, dtype=int).reshape(1000, 10) a = np.empty_like(v) @@ -758,7 +741,6 @@ def test_set_orthogonal_selection_2d(): np.random.seed(42) # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: - # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) @@ -780,7 +762,6 @@ def test_set_orthogonal_selection_2d(): def _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2): - selections = ( # single value (84, 42, 4), @@ -807,7 +788,6 @@ def _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2): def test_set_orthogonal_selection_3d(): - # setup v = np.arange(100000, dtype=int).reshape(200, 50, 10) a = np.empty_like(v) @@ -816,7 +796,6 @@ def test_set_orthogonal_selection_3d(): np.random.seed(42) # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: - # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) @@ -888,7 +867,6 @@ def _test_get_coordinate_selection(a, z, selection): # noinspection PyStatementEffect def test_get_coordinate_selection_1d(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -932,7 +910,6 @@ def test_get_coordinate_selection_1d(): def test_get_coordinate_selection_2d(): - # setup a = np.arange(10000, dtype=int).reshape(1000, 10) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) @@ -1027,7 +1004,6 @@ def test_set_coordinate_selection_1d(): def test_set_coordinate_selection_2d(): - # setup v = np.arange(10000, dtype=int).reshape(1000, 10) a = np.empty_like(v) @@ -1258,7 +1234,6 @@ def _test_get_mask_selection(a, z, selection): # noinspection PyStatementEffect def test_get_mask_selection_1d(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -1285,7 +1260,6 @@ def test_get_mask_selection_1d(): # noinspection PyStatementEffect def test_get_mask_selection_2d(): - # setup a = np.arange(10000, dtype=int).reshape(1000, 10) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) @@ -1318,7 +1292,6 @@ def _test_set_mask_selection(v, a, z, selection): def test_set_mask_selection_1d(): - # setup v = np.arange(1050, dtype=int) a = np.empty_like(v) @@ -1338,7 +1311,6 @@ def test_set_mask_selection_1d(): def test_set_mask_selection_2d(): - # setup v = np.arange(10000, dtype=int).reshape(1000, 10) a = np.empty_like(v) @@ -1352,7 +1324,6 @@ def test_set_mask_selection_2d(): def test_get_selection_out(): - # basic selections a = np.arange(1050) z = zarr.create(shape=1050, chunks=100, dtype=a.dtype) @@ -1426,7 +1397,6 @@ def test_get_selection_out(): def test_get_selections_with_fields(): - a = [("aaa", 1, 4.2), ("bbb", 2, 8.4), ("ccc", 3, 12.6)] a = np.array(a, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) z = zarr.create(shape=a.shape, chunks=2, dtype=a.dtype, fill_value=None) @@ -1444,7 +1414,6 @@ def test_get_selections_with_fields(): ] for fields in fields_fixture: - # total selection expect = a[fields] actual = z.get_basic_selection(Ellipsis, fields=fields) @@ -1534,7 +1503,6 @@ def test_get_selections_with_fields(): def test_set_selections_with_fields(): - v = [("aaa", 1, 4.2), ("bbb", 2, 8.4), ("ccc", 3, 12.6)] v = np.array(v, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) a = np.empty_like(v) @@ -1553,7 +1521,6 @@ def test_set_selections_with_fields(): ] for fields in fields_fixture: - # currently multi-field assignment is not supported in numpy, so we won't support # it either if isinstance(fields, list) and len(fields) > 1: @@ -1567,7 +1534,6 @@ def test_set_selections_with_fields(): z.set_mask_selection([True, False, True], v, fields=fields) else: - if isinstance(fields, list) and len(fields) == 1: # work around numpy does not support multi-field assignment even if there # is only one field @@ -1752,7 +1718,6 @@ def test_accessed_chunks(shape, chunks, ops): z = zarr.create(shape=shape, chunks=chunks, store=store) for ii, (optype, slices) in enumerate(ops): - # Resolve the slices into the accessed chunks for each dimension chunks_per_dim = [] for N, C, sl in zip(shape, chunks, slices): diff --git a/zarr/tests/test_info.py b/zarr/tests/test_info.py index 7fb6feb11b..96eae999f4 100644 --- a/zarr/tests/test_info.py +++ b/zarr/tests/test_info.py @@ -7,7 +7,6 @@ @pytest.mark.parametrize("array_size", [10, 15000]) def test_info(array_size): - # setup g = zarr.group(store=dict(), chunk_store=dict(), synchronizer=zarr.ThreadSynchronizer()) g.create_group("foo") diff --git a/zarr/tests/test_meta.py b/zarr/tests/test_meta.py index db50560c8e..3e1e0f9d63 100644 --- a/zarr/tests/test_meta.py +++ b/zarr/tests/test_meta.py @@ -34,7 +34,6 @@ def assert_json_equal(expect, actual): def test_encode_decode_array_1(): - meta = dict( shape=(100,), chunks=(10,), @@ -76,7 +75,6 @@ def test_encode_decode_array_1(): def test_encode_decode_array_2(): - # some variations df = Delta(astype=" Tupl def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype, Any]: - # convenience API for object arrays if inspect.isclass(dtype): dtype = dtype.__name__ # type: ignore @@ -245,7 +244,6 @@ def is_total_slice(item, shape: Tuple[int]) -> bool: def normalize_resize_args(old_shape, *args): - # normalize new shape argument if len(args) == 1: new_shape = args[0] @@ -294,7 +292,6 @@ def normalize_dimension_separator(sep: Optional[str]) -> Optional[str]: def normalize_fill_value(fill_value, dtype: np.dtype): - if fill_value is None or dtype.hasobject: # no fill value pass @@ -332,7 +329,6 @@ def normalize_fill_value(fill_value, dtype: np.dtype): def normalize_storage_path(path: Union[str, bytes, None]) -> str: - # handle bytes if isinstance(path, bytes): path = str(path, "ascii") @@ -342,7 +338,6 @@ def normalize_storage_path(path: Union[str, bytes, None]) -> str: path = str(path) if path: - # convert backslash to forward slash path = path.replace("\\", "/") @@ -506,7 +501,6 @@ def tree_widget(group, expand, level): class TreeViewer: def __init__(self, group, expand=False, level=None): - self.group = group self.expand = expand self.level = level From 54e31e9814a41cd7fd81255695971ce5e700ee3e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 7 Dec 2023 22:29:28 +0100 Subject: [PATCH 0392/1078] Use list comprehension where applicable (#1555) Even if this is only a test, list comprehensions are faster than repeatedly call append(). Also use tuple instead of list when possible. Co-authored-by: Davis Bennett --- zarr/tests/test_indexing.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index f10360e8b7..af046e9d28 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -1719,17 +1719,15 @@ def test_accessed_chunks(shape, chunks, ops): for ii, (optype, slices) in enumerate(ops): # Resolve the slices into the accessed chunks for each dimension - chunks_per_dim = [] - for N, C, sl in zip(shape, chunks, slices): - chunk_ind = np.arange(N, dtype=int)[sl] // C - chunks_per_dim.append(np.unique(chunk_ind)) + chunks_per_dim = [ + np.unique(np.arange(N, dtype=int)[sl] // C) for N, C, sl in zip(shape, chunks, slices) + ] # Combine and generate the cartesian product to determine the chunks keys that # will be accessed - chunks_accessed = [] - for comb in itertools.product(*chunks_per_dim): - chunks_accessed.append(".".join([str(ci) for ci in comb])) - + chunks_accessed = ( + ".".join([str(ci) for ci in comb]) for comb in itertools.product(*chunks_per_dim) + ) counts_before = store.counter.copy() # Perform the operation From 7d2c9bf5ce4c998d95630d9a1202e27e58926838 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 21:35:23 +0000 Subject: [PATCH 0393/1078] Bump numcodecs from 0.11.0 to 0.12.1 (#1580) Bumps [numcodecs](https://github.com/zarr-developers/numcodecs) from 0.11.0 to 0.12.1. - [Release notes](https://github.com/zarr-developers/numcodecs/releases) - [Changelog](https://github.com/zarr-developers/numcodecs/blob/main/docs/release.rst) - [Commits](https://github.com/zarr-developers/numcodecs/compare/v0.11.0...v0.12.1) --- updated-dependencies: - dependency-name: numcodecs dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index e2be6eb825..afea816d87 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -1,7 +1,7 @@ # library requirements asciitree==0.3.3 fasteners==0.19 -numcodecs==0.11.0 +numcodecs==0.12.1 msgpack-python==0.5.6 setuptools-scm==8.0.4 # test requirements From 10dee6ba0c0ce6ab29333e7a50f0afa4f6de06ca Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 7 Dec 2023 22:40:21 +0100 Subject: [PATCH 0394/1078] Use format specification mini-language to format string (#1558) Co-authored-by: Joe Hamman --- zarr/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zarr/storage.py b/zarr/storage.py index 585417f59c..5ba8071395 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -784,7 +784,7 @@ def __len__(self): return len(self._mutable_mapping) def __repr__(self): - return f"<{self.__class__.__name__}: \n{repr(self._mutable_mapping)}\n at {hex(id(self))}>" + return f"<{self.__class__.__name__}: \n{self._mutable_mapping!r}\n at {id(self):#x}>" def __eq__(self, other): if isinstance(other, KVStore): From 40a6e817b17e1fe600b188478ba38fb6978a5273 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 7 Dec 2023 22:45:50 +0100 Subject: [PATCH 0395/1078] Single startswith() call instead of multiple ones (#1556) It's faster and probably more readable. Co-authored-by: Davis Bennett Co-authored-by: Joe Hamman --- zarr/_storage/store.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 80e4ad8f75..667ca38147 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -221,9 +221,8 @@ def _validate_key(self, key: str): ) if ( - not key.startswith("data/") - and (not key.startswith("meta/")) - and (not key == "zarr.json") + not key.startswith(("data/", "meta/")) + and key != "zarr.json" # TODO: Possibly allow key == ".zmetadata" too if we write a # consolidated metadata spec corresponding to this? ): From 5954ff95803c1343d022f6181ed397c7095f4a0e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 21:46:14 +0000 Subject: [PATCH 0396/1078] Bump pymongo from 4.5.0 to 4.6.1 (#1585) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.5.0 to 4.6.1. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.5.0...4.6.1) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index f3ea80a546..5a3340a282 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -11,7 +11,7 @@ azure-storage-blob==12.16.0 # pyup: ignore redis==5.0.1 types-redis types-setuptools -pymongo==4.5.0 +pymongo==4.6.1 # optional test requirements coverage pytest-cov==4.1.0 From 6ad7b0e2ddabdcc5087e23f003edf123d21e9a25 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 8 Dec 2023 00:07:10 +0100 Subject: [PATCH 0397/1078] Move codespell options around (#1196) Starting with codespell 2.2.2, options can be specified in `pyrpoject.toml` in addition to `setup.cfg`: https://github.com/codespell-project/codespell#using-a-config-file Specifying options in a config file instead of command line options in `.pre-commit-config.yaml` ensures codespell uses the same options when run as pre-commit hook or from the command line in the repository root directory. --- .pre-commit-config.yaml | 1 - pyproject.toml | 5 +++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e985d24000..029dcda58f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,7 +21,6 @@ repos: rev: v2.2.5 hooks: - id: codespell - args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo,zar", "-S", "fixture"] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: diff --git a/pyproject.toml b/pyproject.toml index 22ea19f28f..36a0d896ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -137,3 +137,8 @@ filterwarnings = [ "ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning", "ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning", ] + + +[tool.codespell] +ignore-words-list = "ba,ihs,kake,nd,noe,nwo,te,fo,zar" +skip = 'fixture,.git' From cf32382b9a228eaaafe30ab82d05b9303824a783 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Dec 2023 15:19:14 +0100 Subject: [PATCH 0398/1078] Bump fsspec from 2023.10.0 to 2023.12.1 (#1600) * Bump fsspec from 2023.10.0 to 2023.12.1 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2023.10.0 to 2023.12.1. - [Commits](https://github.com/fsspec/filesystem_spec/compare/2023.10.0...2023.12.1) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update s3fs as well * Fix s3fs --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 5a3340a282..13385a243a 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ pytest-cov==4.1.0 pytest-doctestplus==1.0.0 pytest-timeout==2.2.0 h5py==3.10.0 -fsspec==2023.10.0 -s3fs==2023.10.0 +fsspec==2023.12.1 +s3fs==2023.12.1 moto[server]>=4.0.8 From 4d79cfc84f7f3914a04d9468666685520cc21276 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 8 Dec 2023 16:41:51 +0000 Subject: [PATCH 0399/1078] Add type hints to zarr.create (#1536) * Add type hints to zarr.create * Use protocol for MetaArray * Use protocol for Synchronizer * Fix Path typing * Add release note * Fix dim separator typing * Ignore ... in coverage reporting * Fix chunk typing --------- Co-authored-by: Davis Bennett --- docs/release.rst | 6 ++++++ pyproject.toml | 1 + zarr/_storage/store.py | 3 ++- zarr/creation.py | 46 +++++++++++++++++++++++------------------ zarr/storage.py | 2 +- zarr/sync.py | 12 +++++++++-- zarr/tests/test_core.py | 8 ++++--- zarr/types.py | 13 ++++++++++++ zarr/util.py | 5 +++-- 9 files changed, 67 insertions(+), 29 deletions(-) create mode 100644 zarr/types.py diff --git a/docs/release.rst b/docs/release.rst index 842c36e290..c18e0b8c20 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,12 @@ Release notes Unreleased ---------- +Enhancements +~~~~~~~~~~~~ + +* Added type hints to ``zarr.creation.create()``. + By :user:`David Stansby ` :issue:`1536`. + Docs ~~~~ diff --git a/pyproject.toml b/pyproject.toml index 36a0d896ea..4b7fef6003 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,7 @@ Homepage = "https://github.com/zarr-developers/zarr-python" exclude_lines = [ "pragma: no cover", "pragma: ${PY_MAJOR_VERSION} no cover", + '.*\.\.\.' # Ignore "..." lines ] [tool.coverage.run] diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 667ca38147..09f0b68602 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -9,6 +9,7 @@ from zarr.meta import Metadata2, Metadata3 from zarr.util import normalize_storage_path from zarr.context import Context +from zarr.types import ZARR_VERSION # v2 store keys array_meta_key = ".zarray" @@ -19,7 +20,7 @@ meta_root = "meta/root/" data_root = "data/root/" -DEFAULT_ZARR_VERSION = 2 +DEFAULT_ZARR_VERSION: ZARR_VERSION = 2 v3_api_available = os.environ.get("ZARR_V3_EXPERIMENTAL_API", "0").lower() not in ["0", "false"] diff --git a/zarr/creation.py b/zarr/creation.py index 6227f90b7b..d4f570895a 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -1,7 +1,10 @@ -from typing import Optional +from collections.abc import MutableMapping +from typing import Optional, Tuple, Union, Sequence from warnings import warn import numpy as np +import numpy.typing as npt +from numcodecs.abc import Codec from numcodecs.registry import codec_registry from zarr._storage.store import DEFAULT_ZARR_VERSION @@ -19,32 +22,35 @@ normalize_storage_path, normalize_store_arg, ) +from zarr._storage.store import StorageTransformer +from zarr.sync import Synchronizer +from zarr.types import ZARR_VERSION, DIMENSION_SEPARATOR, MEMORY_ORDER, MetaArray, PathLike from zarr.util import normalize_dimension_separator def create( - shape, - chunks=True, - dtype=None, + shape: Union[int, Tuple[int, ...]], + chunks: Union[int, Tuple[int, ...], bool] = True, + dtype: Optional[npt.DTypeLike] = None, compressor="default", fill_value: Optional[int] = 0, - order="C", - store=None, - synchronizer=None, - overwrite=False, - path=None, - chunk_store=None, - filters=None, - cache_metadata=True, - cache_attrs=True, - read_only=False, - object_codec=None, - dimension_separator=None, - write_empty_chunks=True, + order: MEMORY_ORDER = "C", + store: Optional[Union[str, MutableMapping]] = None, + synchronizer: Optional[Synchronizer] = None, + overwrite: bool = False, + path: Optional[PathLike] = None, + chunk_store: Optional[MutableMapping] = None, + filters: Optional[Sequence[Codec]] = None, + cache_metadata: bool = True, + cache_attrs: bool = True, + read_only: bool = False, + object_codec: Optional[Codec] = None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, + write_empty_chunks: bool = True, *, - zarr_version=None, - meta_array=None, - storage_transformers=(), + zarr_version: Optional[ZARR_VERSION] = None, + meta_array: Optional[MetaArray] = None, + storage_transformers: Sequence[StorageTransformer] = (), **kwargs, ): """Create an array. diff --git a/zarr/storage.py b/zarr/storage.py index 5ba8071395..1c3b39862a 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -40,6 +40,7 @@ from numcodecs.compat import ensure_bytes, ensure_text, ensure_contiguous_ndarray_like from numcodecs.registry import codec_registry from zarr.context import Context +from zarr.types import PathLike as Path from zarr.errors import ( MetadataError, @@ -105,7 +106,6 @@ default_compressor = Zlib() -Path = Union[str, bytes, None] # allow MutableMapping for backwards compatibility StoreLike = Union[BaseStore, MutableMapping] diff --git a/zarr/sync.py b/zarr/sync.py index 49684a51ee..2e843f6557 100644 --- a/zarr/sync.py +++ b/zarr/sync.py @@ -1,11 +1,19 @@ import os from collections import defaultdict from threading import Lock +from typing import Protocol import fasteners -class ThreadSynchronizer: +class Synchronizer(Protocol): + """Base class for synchronizers.""" + + def __getitem__(self, item): + ... + + +class ThreadSynchronizer(Synchronizer): """Provides synchronization using thread locks.""" def __init__(self): @@ -24,7 +32,7 @@ def __setstate__(self, *args): self.__init__() -class ProcessSynchronizer: +class ProcessSynchronizer(Synchronizer): """Provides synchronization using file locks via the `fasteners `_ package. diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index f3ca73dea8..a3fde4050d 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -3,7 +3,7 @@ import sys import pickle import shutil -from typing import Any, Literal, Optional, Tuple, Union +from typing import Any, Literal, Optional, Tuple, Union, Sequence import unittest from itertools import zip_longest from tempfile import mkdtemp @@ -26,6 +26,7 @@ VLenUTF8, Zlib, ) +from numcodecs.abc import Codec from numcodecs.compat import ensure_bytes, ensure_ndarray from numcodecs.tests.common import greetings from numpy.testing import assert_array_almost_equal, assert_array_equal @@ -73,6 +74,7 @@ from zarr.tests.test_storage_v3 import DummyStorageTransfomer from zarr.util import buffer_size from zarr.tests.util import abs_container, skip_test_env_var, have_fsspec, mktemp +from zarr.types import DIMENSION_SEPARATOR # noinspection PyMethodMayBeStatic @@ -82,8 +84,8 @@ class TestArray: root = "" path = "" compressor = Zlib(level=1) - filters = None - dimension_separator: Literal["/", ".", None] = None + filters: Optional[Sequence[Codec]] = None + dimension_separator: Optional[DIMENSION_SEPARATOR] = None cache_metadata = True cache_attrs = True partial_decompress: bool = False diff --git a/zarr/types.py b/zarr/types.py new file mode 100644 index 0000000000..1de270f25c --- /dev/null +++ b/zarr/types.py @@ -0,0 +1,13 @@ +from typing import Literal, Protocol, Union + +ZARR_VERSION = Literal[2, 3] +DIMENSION_SEPARATOR = Literal[".", "/"] +MEMORY_ORDER = Literal["C", "F"] + + +PathLike = Union[str, bytes, None] + + +class MetaArray(Protocol): + def __array_function__(self, func, types, args, kwargs): + ... diff --git a/zarr/util.py b/zarr/util.py index df1cd9d409..f97094b93a 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -31,6 +31,7 @@ from numcodecs.ndarray_like import NDArrayLike from numcodecs.registry import codec_registry from numcodecs.blosc import cbuffer_sizes, cbuffer_metainfo +from zarr.types import DIMENSION_SEPARATOR KeyType = TypeVar("KeyType") ValueType = TypeVar("ValueType") @@ -284,9 +285,9 @@ def normalize_order(order: str) -> str: return order -def normalize_dimension_separator(sep: Optional[str]) -> Optional[str]: +def normalize_dimension_separator(sep: Optional[str]) -> Optional[DIMENSION_SEPARATOR]: if sep in (".", "/", None): - return sep + return cast(Optional[DIMENSION_SEPARATOR], sep) else: raise ValueError("dimension_separator must be either '.' or '/', found: %r" % sep) From 12abd4e434e816e9b8f19b1ceb89438fa4269737 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Mon, 18 Dec 2023 11:57:44 +0000 Subject: [PATCH 0400/1078] Remove unused mypy ignore comments (#1602) Co-authored-by: Davis Bennett --- pyproject.toml | 5 +++-- zarr/_storage/store.py | 4 ++-- zarr/_storage/v3_storage_transformers.py | 2 +- zarr/meta.py | 4 ++-- zarr/storage.py | 12 ++++++------ zarr/util.py | 2 +- 6 files changed, 15 insertions(+), 14 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4b7fef6003..33e8573830 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -120,9 +120,10 @@ exclude = ''' ''' [tool.mypy] -python_version = "3.8" ignore_missing_imports = true -follow_imports = "silent" +warn_unused_configs = true +warn_redundant_casts = true +warn_unused_ignores = true [tool.pytest.ini_options] doctest_optionflags = [ diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 09f0b68602..36b596769a 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -642,10 +642,10 @@ def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: sfx = _get_metadata_suffix(store) array_meta_file = meta_dir + ".array" + sfx if array_meta_file in store: - store.erase(array_meta_file) # type: ignore + store.erase(array_meta_file) group_meta_file = meta_dir + ".group" + sfx if group_meta_file in store: - store.erase(group_meta_file) # type: ignore + store.erase(group_meta_file) def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str]: diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py index ff31a7281c..3afc3823a3 100644 --- a/zarr/_storage/v3_storage_transformers.py +++ b/zarr/_storage/v3_storage_transformers.py @@ -351,7 +351,7 @@ def erase_prefix(self, prefix): def rmdir(self, path=None): path = normalize_storage_path(path) - _rmdir_from_keys_v3(self, path) # type: ignore + _rmdir_from_keys_v3(self, path) def __contains__(self, key): if self._is_data_key(key): diff --git a/zarr/meta.py b/zarr/meta.py index f23889f3ea..d9797e4754 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -234,8 +234,8 @@ def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return np.array(v, dtype=dtype)[()] elif dtype.kind in "c": v = ( - cls.decode_fill_value(v[0], dtype.type().real.dtype), # type: ignore - cls.decode_fill_value(v[1], dtype.type().imag.dtype), # type: ignore + cls.decode_fill_value(v[0], dtype.type().real.dtype), + cls.decode_fill_value(v[1], dtype.type().imag.dtype), ) v = v[0] + 1j * v[1] return np.array(v, dtype=dtype)[()] diff --git a/zarr/storage.py b/zarr/storage.py index 1c3b39862a..aa27e98e6f 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -206,7 +206,7 @@ def rmdir(store: StoreLike, path: Path = None): store_version = getattr(store, "_store_version", 2) if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore # pass through - store.rmdir(path) # type: ignore + store.rmdir(path) else: # slow version, delete one key at a time if store_version == 2: @@ -236,7 +236,7 @@ def listdir(store: BaseStore, path: Path = None): path = normalize_storage_path(path) if hasattr(store, "listdir"): # pass through - return store.listdir(path) # type: ignore + return store.listdir(path) else: # slow version, iterate through all keys warnings.warn( @@ -289,7 +289,7 @@ def getsize(store: BaseStore, path: Path = None) -> int: if hasattr(store, "getsize"): # pass through path = normalize_storage_path(path) - return store.getsize(path) # type: ignore + return store.getsize(path) elif isinstance(store, MutableMapping): return _getsize(store, path) else: @@ -627,7 +627,7 @@ def _init_array_metadata( key = _prefix_to_array_key(store, _path_to_prefix(path)) if hasattr(store, "_metadata_class"): - store[key] = store._metadata_class.encode_array_metadata(meta) # type: ignore + store[key] = store._metadata_class.encode_array_metadata(meta) else: store[key] = encode_array_metadata(meta) @@ -730,10 +730,10 @@ def _init_group_metadata( if store_version == 3: meta = {"attributes": {}} # type: ignore else: - meta = {} # type: ignore + meta = {} key = _prefix_to_group_key(store, _path_to_prefix(path)) if hasattr(store, "_metadata_class"): - store[key] = store._metadata_class.encode_group_metadata(meta) # type: ignore + store[key] = store._metadata_class.encode_group_metadata(meta) else: store[key] = encode_group_metadata(meta) diff --git a/zarr/util.py b/zarr/util.py index f97094b93a..54c389db69 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -183,7 +183,7 @@ def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tupl def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype, Any]: # convenience API for object arrays if inspect.isclass(dtype): - dtype = dtype.__name__ # type: ignore + dtype = dtype.__name__ if isinstance(dtype, str): # allow ':' to delimit class from codec arguments tokens = dtype.split(":") From c2f5f0058d25eaa6695334e399b7b3a2d23f7a10 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 20:47:31 -0700 Subject: [PATCH 0401/1078] Bump actions/setup-python from 4.7.1 to 5.0.0 (#1605) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.7.1 to 5.0.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4.7.1...v5.0.0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 3bd25bfbf7..8d8512294d 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v4.7.1 + - uses: actions/setup-python@v5.0.0 name: Install Python with: python-version: '3.8' From 490e0fe4e59f234cde85b103252acefa34927184 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Dec 2023 22:01:40 +0100 Subject: [PATCH 0402/1078] Bump github/codeql-action from 2 to 3 (#1609) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 2 to 3. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v2...v3) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 7013f1784f..bb3d433629 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -42,7 +42,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -56,7 +56,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun @@ -69,4 +69,4 @@ jobs: # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 From b5f79ddfe7821cc9387fc4084bd7672f59215400 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 20 Dec 2023 14:38:04 -0700 Subject: [PATCH 0403/1078] chore: update pre-commit hooks (#1448) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update pre-commit hooks updates: - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.0.224 → v0.1.8](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.224...v0.1.8) - [github.com/psf/black: 23.10.1 → 23.12.0](https://github.com/psf/black/compare/23.10.1...23.12.0) - [github.com/codespell-project/codespell: v2.2.5 → v2.2.6](https://github.com/codespell-project/codespell/compare/v2.2.5...v2.2.6) - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) - [github.com/pre-commit/mirrors-mypy: v1.3.0 → v1.7.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.3.0...v1.7.1) * Attempt to fix ruff * Use isinstance --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- .pre-commit-config.yaml | 14 ++++++-------- zarr/core.py | 2 +- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 029dcda58f..b4e7ab3ccf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,27 +6,25 @@ default_stages: [commit, push] default_language_version: python: python3 repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.224' + rev: 'v0.1.8' hooks: - id: ruff - # Respect `exclude` and `extend-exclude` settings. - args: ["--force-exclude"] - repo: https://github.com/psf/black - rev: 23.10.1 + rev: 23.12.0 hooks: - id: black - repo: https://github.com/codespell-project/codespell - rev: v2.2.5 + rev: v2.2.6 hooks: - id: codespell - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.3.0 + rev: v1.7.1 hooks: - id: mypy files: zarr diff --git a/zarr/core.py b/zarr/core.py index c07a31e95f..d22a9d79c3 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -2536,7 +2536,7 @@ def hexdigest(self, hashname="sha1"): checksum = binascii.hexlify(self.digest(hashname=hashname)) # This is a bytes object on Python 3 and we want a str. - if type(checksum) is not str: + if not isinstance(checksum, str): checksum = checksum.decode("utf8") return checksum From e09ee149c4525213b07ace9eaf914ca9f552a703 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 26 Dec 2023 10:01:20 -0700 Subject: [PATCH 0404/1078] chore: update pre-commit hooks (#1618) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.8 → v0.1.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.8...v0.1.9) - [github.com/psf/black: 23.12.0 → 23.12.1](https://github.com/psf/black/compare/23.12.0...23.12.1) - [github.com/pre-commit/mirrors-mypy: v1.7.1 → v1.8.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.7.1...v1.8.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b4e7ab3ccf..80d3439dc7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,11 +8,11 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.8' + rev: 'v0.1.9' hooks: - id: ruff - repo: https://github.com/psf/black - rev: 23.12.0 + rev: 23.12.1 hooks: - id: black - repo: https://github.com/codespell-project/codespell @@ -24,7 +24,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.7.1 + rev: v1.8.0 hooks: - id: mypy files: zarr From cd139895b45a2d7d347c29b703aa2f6775a1e7c9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Dec 2023 10:24:59 -0700 Subject: [PATCH 0405/1078] Bump fsspec from 2023.12.1 to 2023.12.2 (#1606) * Bump fsspec from 2023.12.1 to 2023.12.2 Bumps [fsspec](https://github.com/fsspec/filesystem_spec) from 2023.12.1 to 2023.12.2. - [Commits](https://github.com/fsspec/filesystem_spec/compare/2023.12.1...2023.12.2) --- updated-dependencies: - dependency-name: fsspec dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update requirements_dev_optional.txt --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman --- requirements_dev_optional.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 13385a243a..5916083cfc 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -18,6 +18,6 @@ pytest-cov==4.1.0 pytest-doctestplus==1.0.0 pytest-timeout==2.2.0 h5py==3.10.0 -fsspec==2023.12.1 -s3fs==2023.12.1 +fsspec==2023.12.2 +s3fs==2023.12.2 moto[server]>=4.0.8 From 5fb420fcfbabd484e663c78e55d04edd4ac9e486 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 11:32:02 +0100 Subject: [PATCH 0406/1078] Bump pytest-doctestplus from 1.0.0 to 1.1.0 (#1619) Bumps [pytest-doctestplus](https://github.com/scientific-python/pytest-doctestplus) from 1.0.0 to 1.1.0. - [Release notes](https://github.com/scientific-python/pytest-doctestplus/releases) - [Changelog](https://github.com/scientific-python/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/scientific-python/pytest-doctestplus/compare/v1.0.0...v1.1.0) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 5916083cfc..b4de5fd515 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -15,7 +15,7 @@ pymongo==4.6.1 # optional test requirements coverage pytest-cov==4.1.0 -pytest-doctestplus==1.0.0 +pytest-doctestplus==1.1.0 pytest-timeout==2.2.0 h5py==3.10.0 fsspec==2023.12.2 From 435a7ca7306fc31dc880ed23631e3af61bf53d66 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 11:26:58 -0700 Subject: [PATCH 0407/1078] Bump pytest from 7.4.3 to 7.4.4 (#1622) --- requirements_dev_minimal.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index afea816d87..94d3fff8a6 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.12.1 msgpack-python==0.5.6 setuptools-scm==8.0.4 # test requirements -pytest==7.4.3 +pytest==7.4.4 From 6961fa9fb87ed73c85f979d84bfe65238933b5ae Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 20:31:31 -0800 Subject: [PATCH 0408/1078] chore: update pre-commit hooks (#1626) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.9 → v0.1.11](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.9...v0.1.11) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 80d3439dc7..340366ef53 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.9' + rev: 'v0.1.11' hooks: - id: ruff - repo: https://github.com/psf/black From ee518358d888caaabb6157c0498cb231d2ddb7a7 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 10 Jan 2024 06:47:36 -0800 Subject: [PATCH 0409/1078] Create TEAM.md (#1628) --- TEAM.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 TEAM.md diff --git a/TEAM.md b/TEAM.md new file mode 100644 index 0000000000..a95885ebe5 --- /dev/null +++ b/TEAM.md @@ -0,0 +1,25 @@ +## Active core-developers +- @joshmoore (Josh Moore) +- @jni (Juan Nunez-Iglesias) +- @rabernat (Ryan Abernathey) +- @jhamman (Joe Hamman) +- @d-v-b (Davis Bennett) +- @jakirkham (jakirkham) +- @martindurant (Martin Durant) + +## Emeritus core-developers +- @alimanfoo (Alistair Miles) +- @shoyer (Stephan Hoyer) +- @ryan-williams (Ryan Williams) +- @jrbourbeau (James Bourbeau) +- @mzjp2 (Zain Patel) +- @grlee77 (Gregory Lee) + +## Former core-developers +- @jeromekelleher (Jerome Kelleher) +- @tjcrone (Tim Crone) +- @funkey (Jan Funke) +- @shikharsg +- @Carreau (Matthias Bussonnier) +- @dazzag24 +- @WardF (Ward Fisher) From c7d66b4f8d7e9a4d50e5e01e5484ff8df612cb51 Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Wed, 10 Jan 2024 20:52:42 +0100 Subject: [PATCH 0410/1078] Drop python 3.8 and numpy 1.20 (#1557) * Drop 3.8 and add 3.12 * Try removing line_profiler * Also bump the minimal numpy to 1.21 * Drop 3.12 again * Revert "Try removing line_profiler" This reverts commit 837854bec99a9d25aece2ead9666f01690d228cc. * Update release.rst --------- Co-authored-by: Joe Hamman Co-authored-by: jakirkham --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- .github/workflows/python-package.yml | 8 ++++---- .github/workflows/releases.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- docs/release.rst | 3 +++ environment.yml | 2 +- pyproject.toml | 5 ++--- 7 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index ba05f23fcc..ec98af029e 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -27,7 +27,7 @@ body: attributes: label: Python Version description: Version of Python interpreter - placeholder: 3.8.5, 3.9, 3.10, etc. + placeholder: 3.9, 3.10, 3.11, etc. validations: required: true - type: input diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 0c3c49d78d..d74df9ce67 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,13 +15,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] - numpy_version: ['>=1.22.0', '==1.20.*'] + python-version: ['3.9', '3.10', '3.11'] + numpy_version: ['>=1.22.0', '==1.21.*'] exclude: - python-version: '3.10' - numpy_version: '==1.20.*' + numpy_version: '==1.21.*' - python-version: '3.11' - numpy_version: '==1.20.*' + numpy_version: '==1.21.*' services: redis: image: redis diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 8d8512294d..31a7e2770c 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -19,7 +19,7 @@ jobs: - uses: actions/setup-python@v5.0.0 name: Install Python with: - python-version: '3.8' + python-version: '3.9' - name: Install PyBuild run: | diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index eeee5b704d..5c3252c0ba 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -16,7 +16,7 @@ jobs: strategy: fail-fast: True matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.9', '3.10', '3.11'] steps: - uses: actions/checkout@v4 with: diff --git a/docs/release.rst b/docs/release.rst index c18e0b8c20..a3e0831ba4 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -49,6 +49,9 @@ Docs Maintenance ~~~~~~~~~~~ +* Drop Python 3.8 and NumPy 1.20 + By :user:`Josh Moore `; :issue:`1557`. + * Cache result of ``FSStore._fsspec_installed()``. By :user:`Janick Martinez Esturo ` :issue:`1581`. diff --git a/environment.yml b/environment.yml index dc99507427..ff2f9eedef 100644 --- a/environment.yml +++ b/environment.yml @@ -4,7 +4,7 @@ channels: dependencies: - wheel - numcodecs >= 0.6.4 - - numpy >= 1.20 + - numpy >= 1.21 - pip - pip: - asciitree diff --git a/pyproject.toml b/pyproject.toml index 33e8573830..a85e49e82c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,10 +10,10 @@ readme = { file = "README.md", content-type = "text/markdown" } maintainers = [ { name = "Alistair Miles", email = "alimanfoo@googlemail.com" } ] -requires-python = ">=3.8" +requires-python = ">=3.9" dependencies = [ 'asciitree', - 'numpy>=1.20,!=1.21.0', + 'numpy>=1.21.1', 'fasteners', 'numcodecs>=0.10.0', ] @@ -30,7 +30,6 @@ classifiers = [ 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: Unix', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', From 6ad464bb04bffa83b9665dd09caf0f8aaf6b367d Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 10 Jan 2024 11:59:46 -0800 Subject: [PATCH 0411/1078] Add Norman Rzepka to core-dev team (#1630) --- TEAM.md | 1 + 1 file changed, 1 insertion(+) diff --git a/TEAM.md b/TEAM.md index a95885ebe5..6a22d83d1f 100644 --- a/TEAM.md +++ b/TEAM.md @@ -6,6 +6,7 @@ - @d-v-b (Davis Bennett) - @jakirkham (jakirkham) - @martindurant (Martin Durant) +- @normanrz (Norman Rzepka) ## Emeritus core-developers - @alimanfoo (Alistair Miles) From a292dc43f8d0181214ded83124ebd4f85db0ff50 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 11:14:02 -0800 Subject: [PATCH 0412/1078] chore: update pre-commit hooks (#1633) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.11 → v0.1.13](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.11...v0.1.13) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 340366ef53..7d1f9254ae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.11' + rev: 'v0.1.13' hooks: - id: ruff - repo: https://github.com/psf/black From 68c87bb51d922487647fa6188392caf8c1d9a83c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 20:33:42 +0100 Subject: [PATCH 0413/1078] Bump actions/download-artifact from 3 to 4 (#1611) * Bump actions/download-artifact from 3 to 4 Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 3 to 4. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Also bump upload-artifact see https://github.com/actions/download-artifact?tab=readme-ov-file#breaking-changes > Downloading artifacts that were created from action/upload-artifact@v3 and below are not supported. --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman Co-authored-by: Josh Moore --- .github/workflows/releases.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 31a7e2770c..250c6112c8 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -36,7 +36,7 @@ jobs: else echo "All seem good" fi - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: releases path: dist @@ -45,7 +45,7 @@ jobs: needs: [build_artifacts] runs-on: ubuntu-latest steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: releases path: dist @@ -60,7 +60,7 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: releases path: dist From 1d56da0eb54f64840b1fb0f42c72622233f2f1f6 Mon Sep 17 00:00:00 2001 From: Jeff Peck Date: Tue, 16 Jan 2024 07:00:17 -0500 Subject: [PATCH 0414/1078] Update tutorial.rst to include section about accessing Zip Files on S3 (#1615) * Update tutorial.rst to include section about accessing Zip Files on S3 Per discussion here, add information about about accessing zip files on s3: https://github.com/zarr-developers/zarr-python/discussions/1613 * Update release.rst * Implement d-v-b's suggestions --------- Co-authored-by: Davis Bennett Co-authored-by: Josh Moore --- docs/release.rst | 2 ++ docs/tutorial.rst | 25 +++++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index a3e0831ba4..ab74a3debd 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -45,6 +45,8 @@ Docs * Minor tweak to advanced indexing tutorial examples. By :user:`Ross Barnowski ` :issue:`1550`. +* Added section about accessing zip files that are on s3. + By :user:`Jeff Peck ` :issue:`1613`. Maintenance ~~~~~~~~~~~ diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 4099bac1c8..351eef064a 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1000,6 +1000,31 @@ separately from Zarr. .. _tutorial_copy: +Accessing Zip Files on S3 +~~~~~~~~~~~~~~~~~~~~~~~~~ + +The built-in `ZipStore` will only work with paths on the local file-system, however +it is also possible to access ``.zarr.zip`` data on the cloud. Here is an example of +accessing a zipped Zarr file on s3: + + >>> s3_path = "s3://path/to/my.zarr.zip" + >>> + >>> s3 = s3fs.S3FileSystem() + >>> f = s3.open(s3_path) + >>> fs = ZipFileSystem(f, mode="r") + >>> store = FSMap("", fs, check=False) + >>> + >>> # cache is optional, but may be a good idea depending on the situation + >>> cache = zarr.storage.LRUStoreCache(store, max_size=2**28) + >>> z = zarr.group(store=cache) + +This store can also be generated with ``fsspec``'s handler chaining, like so: + + >>> store = zarr.storage.FSStore(url=f"zip::{s3_path}", mode="r") + +This can be especially useful if you have a very large ``.zarr.zip`` file on s3 +and only need to access a small portion of it. + Consolidating metadata ~~~~~~~~~~~~~~~~~~~~~~ From 8ac8553f25eb338d6044d1232b4a643036979486 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 16 Jan 2024 09:14:10 -0800 Subject: [PATCH 0415/1078] doc(v3): add v3 roadmap and design document (#1583) * doc(v3): add v3 roadmap and design document * Update v3-roadmap-and-design.md * updates after latest round of reviews * Update v3-roadmap-and-design.md Co-authored-by: Norman Rzepka * Update v3-roadmap-and-design.md Co-authored-by: Sanket Verma --------- Co-authored-by: Norman Rzepka Co-authored-by: Sanket Verma --- v3-roadmap-and-design.md | 429 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 429 insertions(+) create mode 100644 v3-roadmap-and-design.md diff --git a/v3-roadmap-and-design.md b/v3-roadmap-and-design.md new file mode 100644 index 0000000000..696799e56f --- /dev/null +++ b/v3-roadmap-and-design.md @@ -0,0 +1,429 @@ +# Zarr Python Roadmap + +- Status: draft +- Author: Joe Hamman +- Created On: October 31, 2023 +- Input from: + - Davis Bennett / @d-v-b + - Norman Rzepka / @normanrz + - Deepak Cherian @dcherian + - Brian Davis / @monodeldiablo + - Oliver McCormack / @olimcc + - Ryan Abernathey / @rabernat + - Jack Kelly / @JackKelly + - Martin Durrant / @martindurant + +## Introduction + +This document lays out a design proposal for version 3.0 of the [Zarr-Python](https://zarr.readthedocs.io/en/stable/) package. A specific focus of the design is to bring Zarr-Python's API up to date with the [Zarr V3 specification](https://zarr-specs.readthedocs.io/en/latest/v3/core/v3.0.html), with the hope of enabling the development of the many features and extensions that motivated the V3 Spec. The ideas presented here are expected to result in a major release of Zarr-Python (version 3.0) including significant a number of breaking API changes. +For clarity, “V3” will be used to describe the version of the Zarr specification and “3.0” will be used to describe the release tag of the Zarr-Python project. + +### Current status of V3 in Zarr-Python + +During the development of the V3 Specification, a [prototype implementation](https://github.com/zarr-developers/zarr-python/pull/898) was added to the Zarr-Python library. Since that implementation, the V3 spec evolved in significant ways and as a result, the Zarr-Python library is now out of sync with the approved spec. Downstream libraries (e.g. [Xarray](https://github.com/pydata/xarray)) have added support for this implementation and will need to migrate to the accepted spec when its available in Zarr-Python. + +## Goals + +- Provide a complete implementation of Zarr V3 through the Zarr-Python API +- Clear the way for exciting extensions / ZEPs (i.e. [sharding](https://zarr-specs.readthedocs.io/en/latest/v3/codecs/sharding-indexed/v1.0.html), [variable chunking](https://zarr.dev/zeps/draft/ZEP0003.html), etc.) +- Provide a developer API that can be used to implement and register V3 extensions +- Improve the performance of Zarr-Python by streamlining the interface between the Store layer and higher level APIs (e.g. Groups and Arrays) +- Clean up the internal and user facing APIs +- Improve code quality and robustness (e.g. achieve 100% type hint coverage) +- Align the Zarr-Python array API with the [array API Standard](https://data-apis.org/array-api/latest/) + +## Examples of what 3.0 will enable? +1. Reading and writing V3 spec-compliant groups and arrays +2. V3 extensions including sharding and variable chunking. +3. Improved performance by leveraging concurrency when creating/reading/writing to stores (imagine a `create_hierarchy(zarr_objects)` function). +4. User-developed extensions (e.g. storage-transformers) can be registered with Zarr-Python at runtime + +## Non-goals (of this document) + +- Implementation of any unaccepted Zarr V3 extensions +- Major revisions to the Zarr V3 spec + +## Requirements + +1. Read and write spec compliant V2 and V3 data +2. Limit unnecessary traffic to/from the store +3. Cleanly define the Array/Group/Store abstractions +4. Cleanly define how V2 will be supported going forward +5. Provide a clear roadmap to help users upgrade to 3.0 +6. Developer tools / hooks for registering extensions + +## Design + +### Async API + +Zarr-Python is an IO library. As such, supporting concurrent action against the storage layer is critical to achieving acceptable performance. The Zarr-Python 2 was not designed with asynchronous computation in mind and as a result has struggled to effectively leverage the benefits of concurrency. At one point, `getitems` and `setitems` support was added to the Zarr store model but that is only used for operating on a set of chunks in a single variable. + +With Zarr-Python 3.0, we have the opportunity to revisit this design. The proposal here is as follows: + +1. The `Store` interface will be entirely async. +2. On top of the async `Store` interface, we will provide an `AsyncArray` and `AsyncGroup` interface. +3. Finally, the primary user facing API will be synchronous `Array` and `Group` classes that wrap the async equivalents. + +**Examples** + +- **Store** + + ```python + class Store: + ... + async def get(self, key: str) -> bytes: + ... + async def get_partial_values(self, key_ranges: List[Tuple[str, Tuple[int, Optional[int]]]]) -> bytes: + ... + # (no sync interface here) + ``` +- **Array** + + ```python + class AsyncArray: + ... + + async def getitem(self, selection: Selection) -> np.ndarray: + # the core logic for getitem goes here + + class Array: + _async_array: AsyncArray + + def __getitem__(self, selection: Selection) -> np.ndarray: + return sync(self._async_array.getitem(selection)) + ``` +- **Group** + + ```python + class AsyncGroup: + ... + + async def create_group(self, path: str, **kwargs) -> AsyncGroup: + # the core logic for create_group goes here + + class Group: + _async_group: AsyncGroup + + def create_group(self, path: str, **kwargs) -> Group: + return sync(self._async_group.create_group(path, **kwargs)) + ``` +**Internal Synchronization API** + +With the `Store` and core `AsyncArray`/ `AsyncGroup` classes being predominantly async, Zarr-Python will need an internal API to provide a synchronous API. The proposal here is to use the approach in [fsspec](https://github.com/fsspec/filesystem_spec/blob/master/fsspec/asyn.py) to provide a high-level `sync` function that takes an `awaitable` and runs it in its managed IO Loop / thread. + +**FAQ** +1. Why two levels of Arrays/groups? + a. First, this is an intentional decision and departure from the current Zarrita implementation + b. The idea is that users rarely want to mix interfaces. Either they are working within an async context (currently quite rare) or they are in a typical synchronous context. + c. Splitting the two will allow us to clearly define behavior on the `AsyncObj` and simply wrap it in the `SyncObj`. +2. What if a store is only has a synchronous backend? + a. First off, this is expected to be a fairly rare occurrence. Most storage backends have async interfaces. + b. But in the event a storage backend doesn’t have a async interface, there is nothing wrong with putting synchronous code in `async` methods. There are approaches to enabling concurrent action through wrappers like AsyncIO's `loop.run_in_executor` ([ref 1](https://stackoverflow.com/questions/38865050/is-await-in-python3-cooperative-multitasking ), [ref 2](https://stackoverflow.com/a/43263397/732596), [ref 3](https://bbc.github.io/cloudfit-public-docs/asyncio/asyncio-part-5.html), [ref 4](https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.loop.run_in_executor). +3. Will Zarr help manage the async contexts encouraged by some libraries (e.g. [AioBotoCore](https://aiobotocore.readthedocs.io/en/latest/tutorial.html#using-botocore))? + a. Many async IO libraries require entering an async context before interacting with the API. We expect some experimentation to be needed here but the initial design will follow something close to what fsspec does ([example in s3fs](https://github.com/fsspec/s3fs/blob/949442693ec940b35cda3420c17a864fbe426567/s3fs/core.py#L527)). +4. Why not provide a synchronous Store interface? + a. We could but this design is simpler. It would mean supporting it in the `AsyncGroup` and `AsyncArray` classes which, may be more trouble than its worth. Storage backends that do not have an async API will be encouraged to wrap blocking calls in an async wrapper (e.g. `loop.run_in_executor`). + +### Store API + +The `Store` API is specified directly in the V3 specification. All V3 stores should implement this abstract API, omitting Write and List support as needed. As described above, all stores will be expected to expose the required methods as async methods. + +**Example** + +```python +class ReadWriteStore: + ... + async def get(self, key: str) -> bytes: + ... + + async def get_partial_values(self, key_ranges: List[Tuple[str, int, int]) -> bytes: + ... + + async def set(self, key: str, value: Union[bytes, bytearray, memoryview]) -> None: + ... # required for writable stores + + async def set_partial_values(self, key_start_values: List[Tuple[str, int, Union[bytes, bytearray, memoryview]]]) -> None: + ... # required for writable stores + + async def list(self) -> List[str]: + ... # required for listable stores + + async def list_prefix(self, prefix: str) -> List[str]: + ... # required for listable stores + + async def list_dir(self, prefix: str) -> List[str]: + ... # required for listable stores + + # additional (optional methods) + async def getsize(self, prefix: str) -> int: + ... + + async def rename(self, src: str, dest: str) -> None + ... + +``` + +Recognizing that there are many Zarr applications today that rely on the `MutableMapping` interface supported by Zarr-Python 2, a wrapper store will be developed to allow existing stores to plug directly into this API. + +### Array API + +The user facing array interface will implement a subset of the [Array API Standard](https://data-apis.org/array-api/latest/). Most of the computational parts of the Array API Standard don’t fit into Zarr right now. That’s okay. What matters most is that we ensure we can give downstream applications a compliant API. + +*Note, Zarr already does most of this so this is more about formalizing the relationship than a substantial change in API.* + +| | Included | Not Included | Unknown / Maybe possible? | +| --- | --- | --- | --- | +| Attributes | `dtype` | `mT` | `device` | +| | `ndim` | `T` | | +| | `shape` | | | +| | `size` | | | +| Methods | `__getitem__` | `__array_namespace__` | `to_device` | +| | `__setitem__` | `__abs__` | `__bool__` | +| | `__eq__` | `__add__` | `__complex__` | +| | `__bool__` | `__and__` | `__dlpack__` | +| | | `__floordiv__` | `__dlpack_device__` | +| | | `__ge__` | `__float__` | +| | | `__gt__` | `__index__` | +| | | `__invert__` | `__int__` | +| | | `__le__` | | +| | | `__lshift__` | | +| | | `__lt__` | | +| | | `__matmul__` | | +| | | `__mod__` | | +| | | `__mul__` | | +| | | `__ne__` | | +| | | `__neg__` | | +| | | `__or__` | | +| | | `__pos__` | | +| | | `__pow__` | | +| | | `__rshift__` | | +| | | `__sub__` | | +| | | `__truediv__` | | +| | | `__xor__` | | +| Creation functions (`zarr.creation`) | `zeros` | | `arange` | +| | `zeros_like` | | `asarray` | +| | `ones` | | `eye` | +| | `ones_like` | | `from_dlpack` | +| | `full` | | `linspace` | +| | `full_like` | | `meshgrid` | +| | `empty` | | `tril` | +| | `empty_like` | | `triu` | + +In addition to the core array API defined above, the Array class should have the following Zarr specific properties: + +- `.metadata` (see Metadata Interface below) +- `.attrs` - (pull from metadata object) +- `.info` - (pull from existing property †) + +*† In Zarr-Python 2, the info property lists the store to identify initialized chunks. By default this will be turned off in 3.0 but will be configurable.* + +**Indexing** + +Zarr-Python currently supports `__getitem__` style indexing and the special `oindex` and `vindex` indexers. These are not part of the current Array API standard (see [data-apis/array-api\#669](https://github.com/data-apis/array-api/issues/669)) but they have been [proposed as a NEP](https://numpy.org/neps/nep-0021-advanced-indexing.html). Zarr-Python will maintain these in 3.0. + +We are also exploring a new high-level indexing API that will enabled optimized batch/concurrent loading of many chunks. We expect this to be important to enable performant loading of data in the context of sharding. See [this discussion](https://github.com/zarr-developers/zarr-python/discussions/1569) for more detail. + +Concurrent indexing across multiple arrays will be possible using the AsyncArray API. + +**Async and Sync Array APIs** + +Most the logic to support Zarr Arrays will live in the `AsyncArray` class. There are a few notable differences that should be called out. + +| Sync Method | Async Method | +| --- | --- | +| `__getitem__` | `getitem` | +| `__setitem__` | `setitem` | +| `__eq__` | `equals` | + +**Metadata interface** + +Zarr-Python 2.* closely mirrors the V2 spec metadata schema in the Array and Group classes. In 3.0, we plan to move the underlying metadata representation to a separate interface (e.g. `Array.metadata`). This interface will return either a `V2ArrayMetadata` or `V3ArrayMetadata` object (both will inherit from a parent `ArrayMetadataABC` class. The `V2ArrayMetadata` and `V3ArrayMetadata` classes will be responsible for producing valid JSON representations of their metadata, and yielding a consistent view to the `Array` or `Group` class. + +### Group API + +The main question is how closely we should follow the existing Zarr-Python implementation / `MutableMapping` interface. The table below shows the primary `Group` methods in Zarr-Python 2 and attempts to identify if and how they would be implemented in 3.0. + +| V2 Group Methods | `AsyncGroup` | `Group` | `h5py_compat.Group`` | +| --- | --- | --- | --- | +| `__len__` | `length` | `__len__` | `__len__` | +| `__iter__` | `__aiter__` | `__iter__` | `__iter__` | +| `__contains__` | `contains` | `__contains__` | `__contains__` | +| `__getitem__` | `getitem` | `__getitem__` | `__getitem__` | +| `__enter__` | N/A | N/A | `__enter__` | +| `__exit__` | N/A | N/A | `__exit__` | +| `group_keys` | `group_keys` | `group_keys` | N/A | +| `groups` | `groups` | `groups` | N/A | +| `array_keys` | `array_key` | `array_keys` | N/A | +| `arrays` | `arrays`* | `arrays` | N/A | +| `visit` | ? | ? | `visit` | +| `visitkeys` | ? | ? | ? | +| `visitvalues` | ? | ? | ? | +| `visititems` | ? | ? | `visititems` | +| `tree` | `tree` | `tree` | `Both` | +| `create_group` | `create_group` | `create_group` | `create_group` | +| `require_group` | N/A | N/A | `require_group` | +| `create_groups` | ? | ? | N/A | +| `require_groups` | ? | ? | ? | +| `create_dataset` | N/A | N/A | `create_dataset` | +| `require_dataset` | N/A | N/A | `require_dataset` | +| `create` | `create_array` | `create_array` | N/A | +| `empty` | `empty` | `empty` | N/A | +| `zeros` | `zeros` | `zeros` | N/A | +| `ones` | `ones` | `ones` | N/A | +| `full` | `full` | `full` | N/A | +| `array` | `create_array` | `create_array` | N/A | +| `empty_like` | `empty_like` | `empty_like` | N/A | +| `zeros_like` | `zeros_like` | `zeros_like` | N/A | +| `ones_like` | `ones_like` | `ones_like` | N/A | +| `full_like` | `full_like` | `full_like` | N/A | +| `move` | `move` | `move` | `move` | + +**`zarr.h5compat.Group`** + +Zarr-Python 2.* made an attempt to align its API with that of [h5py](https://docs.h5py.org/en/stable/index.html). With 3.0, we will relax this alignment in favor of providing an explicit compatibility module (`zarr.h5py_compat`). This module will expose the `Group` and `Dataset` APIs that map to Zarr-Python’s `Group` and `Array` objects. + +### Creation API + +Zarr-Python 2.* bundles together the creation and serialization of Zarr objects. Zarr-Python 3.* will make it possible to create objects in memory separate from serializing them. This will specifically enable writing hierarchies of Zarr objects in a single batch step. For example: + +```python + +arr1 = Array(shape=(10, 10), path="foo/bar", dtype="i4", store=store) +arr2 = Array(shape=(10, 10), path="foo/spam", dtype="f8", store=store) + +arr1.save() +arr2.save() + +# or equivalently + +zarr.save_many([arr1 ,arr2]) +``` + +*Note: this batch creation API likely needs additional design effort prior to implementation.* + +### Plugin API + +Zarr V3 was designed to be extensible at multiple layers. Zarr-Python will support these extensions through a combination of [Abstract Base Classes](https://docs.python.org/3/library/abc.html) (ABCs) and [Entrypoints](https://packaging.python.org/en/latest/specifications/entry-points/). + +**ABCs** + +Zarr V3 will expose Abstract base classes for the following objects: + +- `Store`, `ReadStore`, `ReadWriteStore`, `ReadListStore`, and `ReadWriteListStore` +- `BaseArray`, `SynchronousArray`, and `AsynchronousArray` +- `BaseGroup`, `SynchronousGroup`, and `AsynchronousGroup` +- `Codec`, `ArrayArrayCodec`, `ArrayBytesCodec`, `BytesBytesCodec` + +**Entrypoints** + +Lots more thinking here but the idea here is to provide entrypoints for `data type`, `chunk grid`, `chunk key encoding`, `codecs`, `storage_transformers` and `stores`. These might look something like: + +``` +entry_points=""" + [zarr.codecs] + blosc_codec=codec_plugin:make_blosc_codec + zlib_codec=codec_plugin:make_zlib_codec +""" +``` + +### Python type hints and static analysis + +Target 100% Mypy coverage in 3.0 source. + +### Observability + +A persistent problem in Zarr-Python is diagnosing problems that span many parts of the stack. To address this in 3.0, we will add a basic logging framework that can be used to debug behavior at various levels of the stack. We propose to add the separate loggers for the following namespaces: + +- `array` +- `group` +- `store` +- `codec` + +These should be documented such that users know how to activate them and developers know how to use them when developing extensions. + +### Dependencies + +Today, Zarr-Python has the following required dependencies: + +```python +dependencies = [ + 'asciitree', + 'numpy>=1.20,!=1.21.0', + 'fasteners', + 'numcodecs>=0.10.0', +] +``` + +What other dependencies should be considered? + +1. Attrs - Zarrita makes extensive use of the Attrs library +2. Fsspec - Zarrita has a hard dependency on Fsspec. This could be easily relaxed though. + +## Breaking changes relative to Zarr-Python 2.* + +1. H5py compat moved to a stand alone module? +2. `Group.__getitem__` support moved to `Group.members.__getitem__`? +3. Others? + +## Open questions + +1. How to treat V2 + a. Note: Zarrita currently implements a separate `V2Array` and `V3Array` classes. This feels less than ideal. + b. We could easily convert metadata from v2 to the V3 Array, but what about writing? + c. Ideally, we don’t have completely separate code paths. But if its too complicated to support both within one interface, its probably better. +2. How and when to remove the current implementation of V3. + a. It's hidden behind a hard-to-use feature flag so we probably don't need to do anything. +4. How to model runtime configuration? +5. Which extensions belong in Zarr-Python and which belong in separate packages? + a. We don't need to take a strong position on this here. It's likely that someone will want to put Sharding in. That will be useful to develop in parallel because it will give us a good test case for the plugin interface. + +## Testing + +Zarr-python 3.0 adds a major new dimension to Zarr: Async support. This also comes with a compatibility risk, we will need to thoroughly test support in key execution environments. Testing plan: +- Reuse the existing test suite for testing the `v3` API. + - `xfail` tests that expose breaking changes with `3.0 - breaking change` description. This will help identify additional and/or unintentional breaking changes + - Rework tests that were only testing internal APIs. +- Add a set of functional / integration tests targeting real-world workflows in various contexts (e.g. w/ Dask) + +## Development process + +Zarr-Python 3.0 will introduce a number of new APIs and breaking changes to existing APIs. In order to facilitate ongoing support for Zarr-Python 2.*, we will take on the following development process: + +- Create a `v3` branch that can be use for developing the core functionality apart from the `main` branch. This will allow us to support ongoing work and bug fixes on the `main` branch. +- Put the `3.0` APIs inside a `zarr.v3` module. Imports from this namespace will all be new APIs that users can develop and test against once the `v3` branch is merged to `main`. +- Kickstart the process by pulling in the current state of `zarrita` - which has many of the features described in this design. +- Release a series of 2.* releases with the `v3` namespace +- When `v3` is complete, move contents of `v3` to the package root + +**Milestones** + +Below are a set of specific milestones leading toward the completion of this process. As work begins, we expect this list to grow in specificity. + +1. Port current version of Zarrita to Zarr-Python +2. Formalize Async interface by splitting `Array` and `Group` objects into Sync and Async versions +4. Implement "fancy" indexing operations on the `AsyncArray` +6. Implement an abstract base class for the `Store` interface and a wrapper `Store` to make use of existing `MutableMapping` stores. +7. Rework the existing unit test suite to use the `v3` namespace. +8. Develop a plugin interface for extensions +9. Develop a set of functional and integration tests +10. Work with downstream libraries (Xarray, Dask, etc.) to test new APIs + +## TODOs + +The following subjects are not covered in detail above but perhaps should be. Including them here so they are not forgotten. + +1. [Store] Should Zarr provide an API for caching objects after first read/list/etc. Read only stores? +2. [Array] buffer protocol support +3. [Array] `meta_array` support +4. [Extensions] Define how Zarr-Python will consume the various plugin types +5. [Misc] H5py compatibility requires a bit more work and a champion to drive it forward. +6. [Misc] Define `chunk_store` API in 3.0 +7. [Misc] Define `synchronizer` API in 3.0 + +## References + +1. [Zarr-Python repository](https://github.com/zarr-developers/zarr-python) +2. [Zarr core specification (version 3.0) — Zarr specs documentation](https://zarr-specs.readthedocs.io/en/latest/v3/core/v3.0.html#) +3. [Zarrita repository](https://github.com/scalableminds/zarrita) +4. [Async-Zarr](https://github.com/martindurant/async-zarr) +5. [Zarr-Python Discussion Topic](https://github.com/zarr-developers/zarr-python/discussions/1569) From a81db0782535ba04c32c277102a6457d118a73e8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 11:50:32 -0800 Subject: [PATCH 0416/1078] chore: update pre-commit hooks (#1636) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.13 → v0.1.14](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.13...v0.1.14) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7d1f9254ae..a7f48d7cd6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.13' + rev: 'v0.1.14' hooks: - id: ruff - repo: https://github.com/psf/black From 1be410b2f05679fcc5023d97a44c4c860cb88079 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Tue, 30 Jan 2024 21:10:24 +0100 Subject: [PATCH 0417/1078] Don't import from tests (#1601) * fix: move DummyStorageTransformer to zarr, and import it from tests instead of the other way around * test: tests use relative imports instead of importing from zarr.test * docs: release notes * docs: add new section for unreleased v3 work * docs: add heading to v3 release notes --- docs/release.rst | 17 ++++++++++++++--- zarr/_storage/v3_storage_transformers.py | 16 ++++++++++++++++ zarr/meta.py | 6 ++++-- zarr/tests/test_attrs.py | 2 +- zarr/tests/test_convenience.py | 2 +- zarr/tests/test_core.py | 10 +++++++--- zarr/tests/test_creation.py | 4 ++-- zarr/tests/test_dim_separator.py | 2 +- zarr/tests/test_hierarchy.py | 2 +- zarr/tests/test_indexing.py | 2 +- zarr/tests/test_n5.py | 2 +- zarr/tests/test_storage.py | 2 +- zarr/tests/test_storage_v3.py | 20 ++++++-------------- zarr/tests/test_sync.py | 6 +++--- 14 files changed, 59 insertions(+), 34 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 9873d62896..3ed47ff9f5 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -13,10 +13,22 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. +.. _unreleased(v3): + +Unreleased (v3) +--------------- + +Maintenance +~~~~~~~~~~~ + +* Remedy a situation where ``zarr-python`` was importing ``DummyStorageTransformer`` from the test suite. + The dependency relationship is now reversed: the test suite imports this class from ``zarr-python``. + By :user:`Davis Bennett ` :issue:`1601`. + .. _unreleased: -Unreleased ----------- +Unreleased (v2) +--------------- Docs ~~~~ @@ -61,7 +73,6 @@ Maintenance * Remove ``sphinx-rtd-theme`` dependency from ``pyproject.toml``. By :user:`Sanket Verma ` :issue:`1563`. - .. _release_2.16.1: 2.16.1 diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py index ff31a7281c..3090aea28c 100644 --- a/zarr/_storage/v3_storage_transformers.py +++ b/zarr/_storage/v3_storage_transformers.py @@ -81,6 +81,22 @@ def create_empty(cls, store: "ShardingStorageTransformer"): ) +class DummyStorageTransfomer(StorageTransformer): + """For testing purposes. This class was previously defined in the test suite and imported + into Zarr, but now it has been moved here and the test suite will import it like any other part + of the Zarr library.""" + + TEST_CONSTANT = "test1234" + + extension_uri = "https://purl.org/zarr/spec/storage_transformers/dummy/1.0" + valid_types = ["dummy_type"] + + def __init__(self, _type, test_value) -> None: + super().__init__(_type) + assert test_value == self.TEST_CONSTANT + self.test_value = test_value + + class ShardingStorageTransformer(StorageTransformer): # lgtm[py/missing-equals] """Implements sharding as a storage transformer, as described in the spec: https://zarr-specs.readthedocs.io/en/latest/extensions/storage-transformers/sharding/v1.0.html diff --git a/zarr/meta.py b/zarr/meta.py index 48791ddf17..bd1f4ee037 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -459,8 +459,10 @@ def _encode_storage_transformer_metadata( @classmethod def _decode_storage_transformer_metadata(cls, meta: Mapping) -> "StorageTransformer": - from zarr.tests.test_storage_v3 import DummyStorageTransfomer - from zarr._storage.v3_storage_transformers import ShardingStorageTransformer + from zarr._storage.v3_storage_transformers import ( + ShardingStorageTransformer, + DummyStorageTransfomer, + ) # This might be changed to a proper registry in the future KNOWN_STORAGE_TRANSFORMERS = [DummyStorageTransfomer, ShardingStorageTransformer] diff --git a/zarr/tests/test_attrs.py b/zarr/tests/test_attrs.py index 7dd5b340a2..a5ce4bac89 100644 --- a/zarr/tests/test_attrs.py +++ b/zarr/tests/test_attrs.py @@ -8,7 +8,7 @@ from zarr.attrs import Attributes from zarr.storage import KVStore, DirectoryStore from zarr._storage.v3 import KVStoreV3 -from zarr.tests.util import CountingDict, CountingDictV3 +from .util import CountingDict, CountingDictV3 from zarr.hierarchy import group diff --git a/zarr/tests/test_convenience.py b/zarr/tests/test_convenience.py index 389ce90a9d..0970a9e1aa 100644 --- a/zarr/tests/test_convenience.py +++ b/zarr/tests/test_convenience.py @@ -43,7 +43,7 @@ MemoryStoreV3, SQLiteStoreV3, ) -from zarr.tests.util import have_fsspec +from .util import have_fsspec _VERSIONS = (2, 3) if v3_api_available else (2,) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index f3ca73dea8..c5a43ed39d 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -35,7 +35,11 @@ BaseStore, v3_api_available, ) -from .._storage.v3_storage_transformers import ShardingStorageTransformer, v3_sharding_available +from .._storage.v3_storage_transformers import ( + DummyStorageTransfomer, + ShardingStorageTransformer, + v3_sharding_available, +) from zarr.core import Array from zarr.errors import ArrayNotFoundError, ContainsGroupError from zarr.meta import json_loads @@ -70,9 +74,9 @@ SQLiteStoreV3, StoreV3, ) -from zarr.tests.test_storage_v3 import DummyStorageTransfomer + from zarr.util import buffer_size -from zarr.tests.util import abs_container, skip_test_env_var, have_fsspec, mktemp +from .util import abs_container, skip_test_env_var, have_fsspec, mktemp # noinspection PyMethodMayBeStatic diff --git a/zarr/tests/test_creation.py b/zarr/tests/test_creation.py index b44c6379fd..9307b81b52 100644 --- a/zarr/tests/test_creation.py +++ b/zarr/tests/test_creation.py @@ -8,6 +8,7 @@ from numpy.testing import assert_array_equal from zarr._storage.store import DEFAULT_ZARR_VERSION +from zarr._storage.v3_storage_transformers import DummyStorageTransfomer from zarr.codecs import Zlib from zarr.core import Array from zarr.creation import ( @@ -30,8 +31,7 @@ from zarr._storage.store import v3_api_available from zarr._storage.v3 import DirectoryStoreV3, KVStoreV3 from zarr.sync import ThreadSynchronizer -from zarr.tests.test_storage_v3 import DummyStorageTransfomer -from zarr.tests.util import mktemp, have_fsspec +from .util import mktemp, have_fsspec _VERSIONS = (None, 2, 3) if v3_api_available else (None, 2) diff --git a/zarr/tests/test_dim_separator.py b/zarr/tests/test_dim_separator.py index 987852dfd0..83f4d3b5b9 100644 --- a/zarr/tests/test_dim_separator.py +++ b/zarr/tests/test_dim_separator.py @@ -7,7 +7,7 @@ import zarr from zarr.core import Array from zarr.storage import DirectoryStore, NestedDirectoryStore, FSStore -from zarr.tests.util import have_fsspec +from .util import have_fsspec needs_fsspec = pytest.mark.skipif(not have_fsspec, reason="needs fsspec") diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index cbf59c55c3..3eaa4743dd 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -57,7 +57,7 @@ LRUStoreCacheV3, ) from zarr.util import InfoReporter, buffer_size -from zarr.tests.util import skip_test_env_var, have_fsspec, abs_container, mktemp +from .util import skip_test_env_var, have_fsspec, abs_container, mktemp _VERSIONS = (2, 3) if v3_api_available else (2,) diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index 8a34c1e715..1835206819 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -13,7 +13,7 @@ PartialChunkIterator, ) -from zarr.tests.util import CountingDict +from .util import CountingDict def test_normalize_integer_selection(): diff --git a/zarr/tests/test_n5.py b/zarr/tests/test_n5.py index 2602aa06c1..755d60b607 100644 --- a/zarr/tests/test_n5.py +++ b/zarr/tests/test_n5.py @@ -9,7 +9,7 @@ import json import atexit -from zarr.tests.util import have_fsspec +from .util import have_fsspec def test_make_n5_chunk_wrapper(): diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index e87716fa47..5c1d437ecb 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -59,7 +59,7 @@ ) from zarr.storage import FSStore, rename, listdir from zarr._storage.v3 import KVStoreV3 -from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp +from .util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp from zarr.util import ConstantMap, json_dumps diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index ded9296059..aeb4fe7d1b 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -10,7 +10,11 @@ import zarr from zarr._storage.store import _get_hierarchy_metadata, v3_api_available, StorageTransformer -from zarr._storage.v3_storage_transformers import ShardingStorageTransformer, v3_sharding_available +from zarr._storage.v3_storage_transformers import ( + DummyStorageTransfomer, + ShardingStorageTransformer, + v3_sharding_available, +) from zarr.core import Array from zarr.meta import _default_entry_point_metadata_v3 from zarr.storage import ( @@ -40,7 +44,7 @@ StoreV3, ZipStoreV3, ) -from zarr.tests.util import CountingDictV3, have_fsspec, skip_test_env_var, mktemp +from .util import CountingDictV3, have_fsspec, skip_test_env_var, mktemp # pytest will fail to run if the following fixtures aren't imported here from .test_storage import StoreTests as _StoreTests @@ -108,18 +112,6 @@ def keys(self): """keys""" -class DummyStorageTransfomer(StorageTransformer): - TEST_CONSTANT = "test1234" - - extension_uri = "https://purl.org/zarr/spec/storage_transformers/dummy/1.0" - valid_types = ["dummy_type"] - - def __init__(self, _type, test_value) -> None: - super().__init__(_type) - assert test_value == self.TEST_CONSTANT - self.test_value = test_value - - def test_ensure_store_v3(): class InvalidStore: pass diff --git a/zarr/tests/test_sync.py b/zarr/tests/test_sync.py index c28f6b081d..d066a4e8d6 100644 --- a/zarr/tests/test_sync.py +++ b/zarr/tests/test_sync.py @@ -16,9 +16,9 @@ from zarr.sync import ProcessSynchronizer, ThreadSynchronizer # zarr_version fixture must be imported although not used directly here -from zarr.tests.test_attrs import TestAttributes, zarr_version # noqa -from zarr.tests.test_core import TestArray -from zarr.tests.test_hierarchy import TestGroup +from .test_attrs import TestAttributes, zarr_version # noqa +from .test_core import TestArray +from .test_hierarchy import TestGroup class TestAttributesWithThreadSynchronizer(TestAttributes): From b82311db3400c838d52dc7088905027690e75dda Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Fri, 2 Feb 2024 21:52:03 -0500 Subject: [PATCH 0418/1078] chore: bump minimum python/numpy versions aligned with spec0000 targetting a 2024-Q2 release (#1638) --- pyproject.toml | 7 +++---- requirements_dev_numpy.txt | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 22ea19f28f..4cd3b05b65 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,10 +10,10 @@ readme = { file = "README.md", content-type = "text/markdown" } maintainers = [ { name = "Alistair Miles", email = "alimanfoo@googlemail.com" } ] -requires-python = ">=3.8" +requires-python = ">=3.10" dependencies = [ 'asciitree', - 'numpy>=1.20,!=1.21.0', + 'numpy>=1.24', 'fasteners', 'numcodecs>=0.10.0', ] @@ -30,10 +30,9 @@ classifiers = [ 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: Unix', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', ] license = { text = "MIT" } diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index a6135bd831..ee8970780a 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.24.3 +numpy==1.26.3 From f8874bf48e464bd1f3db963eab637ff6b4ed153f Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Tue, 6 Feb 2024 04:42:26 +0100 Subject: [PATCH 0419/1078] use src layout and use `hatch` for packaging (#1592) * src layout + migrate to hatch * feat: git archival support * rm test generated files * Apply suggestions from code review * Use relative imports --------- Co-authored-by: Saransh Chopra --- .git_archival.txt | 4 ++++ .gitattributes | 1 + .gitignore | 4 ++-- .pre-commit-config.yaml | 2 +- docs/contributing.rst | 2 +- pyproject.toml | 19 +++++++----------- {zarr => src/zarr}/__init__.py | 2 +- {zarr => src/zarr}/_storage/__init__.py | 0 {zarr => src/zarr}/_storage/absstore.py | 0 {zarr => src/zarr}/_storage/store.py | 0 {zarr => src/zarr}/_storage/v3.py | 2 +- .../zarr}/_storage/v3_storage_transformers.py | 0 {zarr => src/zarr}/attrs.py | 0 {zarr => src/zarr}/codecs.py | 0 {zarr => src/zarr}/context.py | 0 {zarr => src/zarr}/convenience.py | 0 {zarr => src/zarr}/core.py | 0 {zarr => src/zarr}/creation.py | 0 {zarr => src/zarr}/errors.py | 0 {zarr => src/zarr}/hierarchy.py | 0 {zarr => src/zarr}/indexing.py | 0 {zarr => src/zarr}/meta.py | 0 {zarr => src/zarr}/meta_v1.py | 0 {zarr => src/zarr}/n5.py | 0 {zarr => src/zarr}/storage.py | 0 {zarr => src/zarr}/sync.py | 0 {zarr => src/zarr}/util.py | 0 {zarr => src/zarr}/v3/__init__.py | 0 {zarr/tests => src/zarr/v3/abc}/__init__.py | 0 {zarr => src/zarr}/v3/abc/array.py | 0 {zarr => src/zarr}/v3/abc/codec.py | 0 {zarr => src/zarr}/v3/abc/group.py | 0 {zarr => src/zarr}/v3/abc/store.py | 0 {zarr => src/zarr}/v3/array.py | 0 {zarr => src/zarr}/v3/array_v2.py | 0 {zarr => src/zarr}/v3/codecs/__init__.py | 0 {zarr => src/zarr}/v3/codecs/blosc.py | 0 {zarr => src/zarr}/v3/codecs/bytes.py | 0 {zarr => src/zarr}/v3/codecs/crc32c_.py | 0 {zarr => src/zarr}/v3/codecs/gzip.py | 0 {zarr => src/zarr}/v3/codecs/registry.py | 0 {zarr => src/zarr}/v3/codecs/sharding.py | 0 {zarr => src/zarr}/v3/codecs/transpose.py | 0 {zarr => src/zarr}/v3/codecs/zstd.py | 0 {zarr => src/zarr}/v3/common.py | 0 {zarr => src/zarr}/v3/group.py | 0 {zarr => src/zarr}/v3/group_v2.py | 0 {zarr => src/zarr}/v3/indexing.py | 0 {zarr => src/zarr}/v3/metadata.py | 0 {zarr => src/zarr}/v3/store.py | 0 {zarr => src/zarr}/v3/sync.py | 0 {zarr/v3/abc => tests}/__init__.py | 0 {zarr/tests => tests}/conftest.py | 0 {zarr/tests => tests}/data/store.zip | Bin {zarr/tests => tests}/data/store/foo | 0 {zarr/tests => tests}/test_attrs.py | 0 {zarr/tests => tests}/test_codecs_v3.py | 0 {zarr/tests => tests}/test_convenience.py | 0 {zarr/tests => tests}/test_core.py | 2 +- {zarr/tests => tests}/test_creation.py | 0 {zarr/tests => tests}/test_dim_separator.py | 0 {zarr/tests => tests}/test_filters.py | 0 {zarr/tests => tests}/test_hierarchy.py | 0 {zarr/tests => tests}/test_indexing.py | 0 {zarr/tests => tests}/test_info.py | 0 {zarr/tests => tests}/test_meta.py | 0 {zarr/tests => tests}/test_meta_array.py | 0 {zarr/tests => tests}/test_n5.py | 0 {zarr/tests => tests}/test_storage.py | 0 {zarr/tests => tests}/test_storage_v3.py | 0 {zarr/tests => tests}/test_sync.py | 0 {zarr/tests => tests}/test_util.py | 0 {zarr/tests => tests}/util.py | 0 73 files changed, 19 insertions(+), 19 deletions(-) create mode 100644 .git_archival.txt rename {zarr => src/zarr}/__init__.py (96%) rename {zarr => src/zarr}/_storage/__init__.py (100%) rename {zarr => src/zarr}/_storage/absstore.py (100%) rename {zarr => src/zarr}/_storage/store.py (100%) rename {zarr => src/zarr}/_storage/v3.py (99%) rename {zarr => src/zarr}/_storage/v3_storage_transformers.py (100%) rename {zarr => src/zarr}/attrs.py (100%) rename {zarr => src/zarr}/codecs.py (100%) rename {zarr => src/zarr}/context.py (100%) rename {zarr => src/zarr}/convenience.py (100%) rename {zarr => src/zarr}/core.py (100%) rename {zarr => src/zarr}/creation.py (100%) rename {zarr => src/zarr}/errors.py (100%) rename {zarr => src/zarr}/hierarchy.py (100%) rename {zarr => src/zarr}/indexing.py (100%) rename {zarr => src/zarr}/meta.py (100%) rename {zarr => src/zarr}/meta_v1.py (100%) rename {zarr => src/zarr}/n5.py (100%) rename {zarr => src/zarr}/storage.py (100%) rename {zarr => src/zarr}/sync.py (100%) rename {zarr => src/zarr}/util.py (100%) rename {zarr => src/zarr}/v3/__init__.py (100%) rename {zarr/tests => src/zarr/v3/abc}/__init__.py (100%) rename {zarr => src/zarr}/v3/abc/array.py (100%) rename {zarr => src/zarr}/v3/abc/codec.py (100%) rename {zarr => src/zarr}/v3/abc/group.py (100%) rename {zarr => src/zarr}/v3/abc/store.py (100%) rename {zarr => src/zarr}/v3/array.py (100%) rename {zarr => src/zarr}/v3/array_v2.py (100%) rename {zarr => src/zarr}/v3/codecs/__init__.py (100%) rename {zarr => src/zarr}/v3/codecs/blosc.py (100%) rename {zarr => src/zarr}/v3/codecs/bytes.py (100%) rename {zarr => src/zarr}/v3/codecs/crc32c_.py (100%) rename {zarr => src/zarr}/v3/codecs/gzip.py (100%) rename {zarr => src/zarr}/v3/codecs/registry.py (100%) rename {zarr => src/zarr}/v3/codecs/sharding.py (100%) rename {zarr => src/zarr}/v3/codecs/transpose.py (100%) rename {zarr => src/zarr}/v3/codecs/zstd.py (100%) rename {zarr => src/zarr}/v3/common.py (100%) rename {zarr => src/zarr}/v3/group.py (100%) rename {zarr => src/zarr}/v3/group_v2.py (100%) rename {zarr => src/zarr}/v3/indexing.py (100%) rename {zarr => src/zarr}/v3/metadata.py (100%) rename {zarr => src/zarr}/v3/store.py (100%) rename {zarr => src/zarr}/v3/sync.py (100%) rename {zarr/v3/abc => tests}/__init__.py (100%) rename {zarr/tests => tests}/conftest.py (100%) rename {zarr/tests => tests}/data/store.zip (100%) rename {zarr/tests => tests}/data/store/foo (100%) rename {zarr/tests => tests}/test_attrs.py (100%) rename {zarr/tests => tests}/test_codecs_v3.py (100%) rename {zarr/tests => tests}/test_convenience.py (100%) rename {zarr/tests => tests}/test_core.py (99%) rename {zarr/tests => tests}/test_creation.py (100%) rename {zarr/tests => tests}/test_dim_separator.py (100%) rename {zarr/tests => tests}/test_filters.py (100%) rename {zarr/tests => tests}/test_hierarchy.py (100%) rename {zarr/tests => tests}/test_indexing.py (100%) rename {zarr/tests => tests}/test_info.py (100%) rename {zarr/tests => tests}/test_meta.py (100%) rename {zarr/tests => tests}/test_meta_array.py (100%) rename {zarr/tests => tests}/test_n5.py (100%) rename {zarr/tests => tests}/test_storage.py (100%) rename {zarr/tests => tests}/test_storage_v3.py (100%) rename {zarr/tests => tests}/test_sync.py (100%) rename {zarr/tests => tests}/test_util.py (100%) rename {zarr/tests => tests}/util.py (100%) diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000000..8fb235d704 --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ +ref-names: $Format:%D$ diff --git a/.gitattributes b/.gitattributes index b6115e6e49..57eb8a8807 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,3 @@ *.py linguist-language=python *.ipynb linguist-documentation +.git_archival.txt export-subst diff --git a/.gitignore b/.gitignore index 7de405d8a0..53b6cd356d 100644 --- a/.gitignore +++ b/.gitignore @@ -62,8 +62,8 @@ target/ # Jupyter .ipynb_checkpoints/ -# setuptools-scm -zarr/version.py +# VCS versioning +src/zarr/_version.py # emacs *~ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a8ee599137..10aff8b4c6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: rev: v1.7.1 hooks: - id: mypy - files: zarr + files: src args: [] additional_dependencies: - types-redis diff --git a/docs/contributing.rst b/docs/contributing.rst index 91606b7276..a65b3d104d 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -335,7 +335,7 @@ of the storage specification that is currently implemented is stored under the Note that the Zarr test suite includes a data fixture and tests to try and ensure that data format compatibility is not accidentally broken. See the -:func:`test_format_compatibility` function in the :mod:`zarr.tests.test_storage` module +:func:`test_format_compatibility` function in the :mod:`tests.test_storage` module for details. When to make a release diff --git a/pyproject.toml b/pyproject.toml index 4cd3b05b65..4b3b07f23b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [build-system] -requires = ["setuptools>=64.0.0", "setuptools-scm>1.5.4"] -build-backend = "setuptools.build_meta" +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" [project] @@ -34,7 +34,7 @@ classifiers = [ 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', ] -license = { text = "MIT" } +license = "MIT" [project.optional-dependencies] jupyter = [ @@ -68,18 +68,13 @@ exclude_lines = [ [tool.coverage.run] omit = [ - "zarr/meta_v1.py", + "src/zarr/meta_v1.py", "bench/compress_normal.py", ] -[tool.setuptools] -packages = ["zarr", "zarr._storage", "zarr.tests"] -license-files = ["LICENSE.txt"] - -[tool.setuptools_scm] -version_scheme = "guess-next-dev" -local_scheme = "dirty-tag" -write_to = "zarr/version.py" +[tool.hatch] +version.source = "vcs" +build.hooks.vcs.version-file = "src/zarr/_version.py" [tool.ruff] line-length = 100 diff --git a/zarr/__init__.py b/src/zarr/__init__.py similarity index 96% rename from zarr/__init__.py rename to src/zarr/__init__.py index 6cecb40af8..b3c1e05b7e 100644 --- a/zarr/__init__.py +++ b/src/zarr/__init__.py @@ -49,7 +49,7 @@ ZipStore, ) from zarr.sync import ProcessSynchronizer, ThreadSynchronizer -from zarr.version import version as __version__ +from zarr._version import version as __version__ # in case setuptools scm screw up and find version to be 0.0.0 assert not __version__.startswith("0.0.0") diff --git a/zarr/_storage/__init__.py b/src/zarr/_storage/__init__.py similarity index 100% rename from zarr/_storage/__init__.py rename to src/zarr/_storage/__init__.py diff --git a/zarr/_storage/absstore.py b/src/zarr/_storage/absstore.py similarity index 100% rename from zarr/_storage/absstore.py rename to src/zarr/_storage/absstore.py diff --git a/zarr/_storage/store.py b/src/zarr/_storage/store.py similarity index 100% rename from zarr/_storage/store.py rename to src/zarr/_storage/store.py diff --git a/zarr/_storage/v3.py b/src/zarr/_storage/v3.py similarity index 99% rename from zarr/_storage/v3.py rename to src/zarr/_storage/v3.py index 00dc085dac..8ab54984b7 100644 --- a/zarr/_storage/v3.py +++ b/src/zarr/_storage/v3.py @@ -163,7 +163,7 @@ def setitems(self, values): values = {self._normalize_key(key): val for key, val in values.items()} # initialize the /data/root/... folder corresponding to the array! - # Note: zarr.tests.test_core_v3.TestArrayWithFSStoreV3PartialRead fails + # Note: tests.test_core_v3.TestArrayWithFSStoreV3PartialRead fails # without this explicit creation of directories subdirectories = set(os.path.dirname(v) for v in values.keys()) for subdirectory in subdirectories: diff --git a/zarr/_storage/v3_storage_transformers.py b/src/zarr/_storage/v3_storage_transformers.py similarity index 100% rename from zarr/_storage/v3_storage_transformers.py rename to src/zarr/_storage/v3_storage_transformers.py diff --git a/zarr/attrs.py b/src/zarr/attrs.py similarity index 100% rename from zarr/attrs.py rename to src/zarr/attrs.py diff --git a/zarr/codecs.py b/src/zarr/codecs.py similarity index 100% rename from zarr/codecs.py rename to src/zarr/codecs.py diff --git a/zarr/context.py b/src/zarr/context.py similarity index 100% rename from zarr/context.py rename to src/zarr/context.py diff --git a/zarr/convenience.py b/src/zarr/convenience.py similarity index 100% rename from zarr/convenience.py rename to src/zarr/convenience.py diff --git a/zarr/core.py b/src/zarr/core.py similarity index 100% rename from zarr/core.py rename to src/zarr/core.py diff --git a/zarr/creation.py b/src/zarr/creation.py similarity index 100% rename from zarr/creation.py rename to src/zarr/creation.py diff --git a/zarr/errors.py b/src/zarr/errors.py similarity index 100% rename from zarr/errors.py rename to src/zarr/errors.py diff --git a/zarr/hierarchy.py b/src/zarr/hierarchy.py similarity index 100% rename from zarr/hierarchy.py rename to src/zarr/hierarchy.py diff --git a/zarr/indexing.py b/src/zarr/indexing.py similarity index 100% rename from zarr/indexing.py rename to src/zarr/indexing.py diff --git a/zarr/meta.py b/src/zarr/meta.py similarity index 100% rename from zarr/meta.py rename to src/zarr/meta.py diff --git a/zarr/meta_v1.py b/src/zarr/meta_v1.py similarity index 100% rename from zarr/meta_v1.py rename to src/zarr/meta_v1.py diff --git a/zarr/n5.py b/src/zarr/n5.py similarity index 100% rename from zarr/n5.py rename to src/zarr/n5.py diff --git a/zarr/storage.py b/src/zarr/storage.py similarity index 100% rename from zarr/storage.py rename to src/zarr/storage.py diff --git a/zarr/sync.py b/src/zarr/sync.py similarity index 100% rename from zarr/sync.py rename to src/zarr/sync.py diff --git a/zarr/util.py b/src/zarr/util.py similarity index 100% rename from zarr/util.py rename to src/zarr/util.py diff --git a/zarr/v3/__init__.py b/src/zarr/v3/__init__.py similarity index 100% rename from zarr/v3/__init__.py rename to src/zarr/v3/__init__.py diff --git a/zarr/tests/__init__.py b/src/zarr/v3/abc/__init__.py similarity index 100% rename from zarr/tests/__init__.py rename to src/zarr/v3/abc/__init__.py diff --git a/zarr/v3/abc/array.py b/src/zarr/v3/abc/array.py similarity index 100% rename from zarr/v3/abc/array.py rename to src/zarr/v3/abc/array.py diff --git a/zarr/v3/abc/codec.py b/src/zarr/v3/abc/codec.py similarity index 100% rename from zarr/v3/abc/codec.py rename to src/zarr/v3/abc/codec.py diff --git a/zarr/v3/abc/group.py b/src/zarr/v3/abc/group.py similarity index 100% rename from zarr/v3/abc/group.py rename to src/zarr/v3/abc/group.py diff --git a/zarr/v3/abc/store.py b/src/zarr/v3/abc/store.py similarity index 100% rename from zarr/v3/abc/store.py rename to src/zarr/v3/abc/store.py diff --git a/zarr/v3/array.py b/src/zarr/v3/array.py similarity index 100% rename from zarr/v3/array.py rename to src/zarr/v3/array.py diff --git a/zarr/v3/array_v2.py b/src/zarr/v3/array_v2.py similarity index 100% rename from zarr/v3/array_v2.py rename to src/zarr/v3/array_v2.py diff --git a/zarr/v3/codecs/__init__.py b/src/zarr/v3/codecs/__init__.py similarity index 100% rename from zarr/v3/codecs/__init__.py rename to src/zarr/v3/codecs/__init__.py diff --git a/zarr/v3/codecs/blosc.py b/src/zarr/v3/codecs/blosc.py similarity index 100% rename from zarr/v3/codecs/blosc.py rename to src/zarr/v3/codecs/blosc.py diff --git a/zarr/v3/codecs/bytes.py b/src/zarr/v3/codecs/bytes.py similarity index 100% rename from zarr/v3/codecs/bytes.py rename to src/zarr/v3/codecs/bytes.py diff --git a/zarr/v3/codecs/crc32c_.py b/src/zarr/v3/codecs/crc32c_.py similarity index 100% rename from zarr/v3/codecs/crc32c_.py rename to src/zarr/v3/codecs/crc32c_.py diff --git a/zarr/v3/codecs/gzip.py b/src/zarr/v3/codecs/gzip.py similarity index 100% rename from zarr/v3/codecs/gzip.py rename to src/zarr/v3/codecs/gzip.py diff --git a/zarr/v3/codecs/registry.py b/src/zarr/v3/codecs/registry.py similarity index 100% rename from zarr/v3/codecs/registry.py rename to src/zarr/v3/codecs/registry.py diff --git a/zarr/v3/codecs/sharding.py b/src/zarr/v3/codecs/sharding.py similarity index 100% rename from zarr/v3/codecs/sharding.py rename to src/zarr/v3/codecs/sharding.py diff --git a/zarr/v3/codecs/transpose.py b/src/zarr/v3/codecs/transpose.py similarity index 100% rename from zarr/v3/codecs/transpose.py rename to src/zarr/v3/codecs/transpose.py diff --git a/zarr/v3/codecs/zstd.py b/src/zarr/v3/codecs/zstd.py similarity index 100% rename from zarr/v3/codecs/zstd.py rename to src/zarr/v3/codecs/zstd.py diff --git a/zarr/v3/common.py b/src/zarr/v3/common.py similarity index 100% rename from zarr/v3/common.py rename to src/zarr/v3/common.py diff --git a/zarr/v3/group.py b/src/zarr/v3/group.py similarity index 100% rename from zarr/v3/group.py rename to src/zarr/v3/group.py diff --git a/zarr/v3/group_v2.py b/src/zarr/v3/group_v2.py similarity index 100% rename from zarr/v3/group_v2.py rename to src/zarr/v3/group_v2.py diff --git a/zarr/v3/indexing.py b/src/zarr/v3/indexing.py similarity index 100% rename from zarr/v3/indexing.py rename to src/zarr/v3/indexing.py diff --git a/zarr/v3/metadata.py b/src/zarr/v3/metadata.py similarity index 100% rename from zarr/v3/metadata.py rename to src/zarr/v3/metadata.py diff --git a/zarr/v3/store.py b/src/zarr/v3/store.py similarity index 100% rename from zarr/v3/store.py rename to src/zarr/v3/store.py diff --git a/zarr/v3/sync.py b/src/zarr/v3/sync.py similarity index 100% rename from zarr/v3/sync.py rename to src/zarr/v3/sync.py diff --git a/zarr/v3/abc/__init__.py b/tests/__init__.py similarity index 100% rename from zarr/v3/abc/__init__.py rename to tests/__init__.py diff --git a/zarr/tests/conftest.py b/tests/conftest.py similarity index 100% rename from zarr/tests/conftest.py rename to tests/conftest.py diff --git a/zarr/tests/data/store.zip b/tests/data/store.zip similarity index 100% rename from zarr/tests/data/store.zip rename to tests/data/store.zip diff --git a/zarr/tests/data/store/foo b/tests/data/store/foo similarity index 100% rename from zarr/tests/data/store/foo rename to tests/data/store/foo diff --git a/zarr/tests/test_attrs.py b/tests/test_attrs.py similarity index 100% rename from zarr/tests/test_attrs.py rename to tests/test_attrs.py diff --git a/zarr/tests/test_codecs_v3.py b/tests/test_codecs_v3.py similarity index 100% rename from zarr/tests/test_codecs_v3.py rename to tests/test_codecs_v3.py diff --git a/zarr/tests/test_convenience.py b/tests/test_convenience.py similarity index 100% rename from zarr/tests/test_convenience.py rename to tests/test_convenience.py diff --git a/zarr/tests/test_core.py b/tests/test_core.py similarity index 99% rename from zarr/tests/test_core.py rename to tests/test_core.py index c5a43ed39d..87bf72e9b0 100644 --- a/zarr/tests/test_core.py +++ b/tests/test_core.py @@ -35,7 +35,7 @@ BaseStore, v3_api_available, ) -from .._storage.v3_storage_transformers import ( +from zarr._storage.v3_storage_transformers import ( DummyStorageTransfomer, ShardingStorageTransformer, v3_sharding_available, diff --git a/zarr/tests/test_creation.py b/tests/test_creation.py similarity index 100% rename from zarr/tests/test_creation.py rename to tests/test_creation.py diff --git a/zarr/tests/test_dim_separator.py b/tests/test_dim_separator.py similarity index 100% rename from zarr/tests/test_dim_separator.py rename to tests/test_dim_separator.py diff --git a/zarr/tests/test_filters.py b/tests/test_filters.py similarity index 100% rename from zarr/tests/test_filters.py rename to tests/test_filters.py diff --git a/zarr/tests/test_hierarchy.py b/tests/test_hierarchy.py similarity index 100% rename from zarr/tests/test_hierarchy.py rename to tests/test_hierarchy.py diff --git a/zarr/tests/test_indexing.py b/tests/test_indexing.py similarity index 100% rename from zarr/tests/test_indexing.py rename to tests/test_indexing.py diff --git a/zarr/tests/test_info.py b/tests/test_info.py similarity index 100% rename from zarr/tests/test_info.py rename to tests/test_info.py diff --git a/zarr/tests/test_meta.py b/tests/test_meta.py similarity index 100% rename from zarr/tests/test_meta.py rename to tests/test_meta.py diff --git a/zarr/tests/test_meta_array.py b/tests/test_meta_array.py similarity index 100% rename from zarr/tests/test_meta_array.py rename to tests/test_meta_array.py diff --git a/zarr/tests/test_n5.py b/tests/test_n5.py similarity index 100% rename from zarr/tests/test_n5.py rename to tests/test_n5.py diff --git a/zarr/tests/test_storage.py b/tests/test_storage.py similarity index 100% rename from zarr/tests/test_storage.py rename to tests/test_storage.py diff --git a/zarr/tests/test_storage_v3.py b/tests/test_storage_v3.py similarity index 100% rename from zarr/tests/test_storage_v3.py rename to tests/test_storage_v3.py diff --git a/zarr/tests/test_sync.py b/tests/test_sync.py similarity index 100% rename from zarr/tests/test_sync.py rename to tests/test_sync.py diff --git a/zarr/tests/test_util.py b/tests/test_util.py similarity index 100% rename from zarr/tests/test_util.py rename to tests/test_util.py diff --git a/zarr/tests/util.py b/tests/util.py similarity index 100% rename from zarr/tests/util.py rename to tests/util.py From 26cd9fa652d3912041678078397c33b09f671d09 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 6 Feb 2024 08:59:41 -0800 Subject: [PATCH 0420/1078] temporarily disable mypy in v3 directory (#1649) * temporarily disable mypy in v3 directory * use hatch in release workflow * bumpy python version in docs and test action --- .github/workflows/python-package.yml | 2 +- .github/workflows/releases.yml | 17 ++++------------- .pre-commit-config.yaml | 1 + .readthedocs.yaml | 2 +- 4 files changed, 7 insertions(+), 15 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index aa7158f1cf..5775d85c24 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.10', '3.11'] numpy_version: ['>=1.22.0', '==1.20.*'] exclude: - python-version: '3.10' diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index c08bfc6677..4d9565a6ab 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -19,23 +19,14 @@ jobs: - uses: actions/setup-python@v4.7.1 name: Install Python with: - python-version: '3.8' + python-version: '3.11' - name: Install PyBuild run: | - python -m pip install 'build!=0.1' setuptools-scm - + python -m pip install --upgrade pip + pip install hatch - name: Build wheel and sdist - run: | - python -m build - git describe - pwd - if [ -f dist/zarr-0.0.0.tar.gz ]; then - echo "WRONG VERSION NUMBER" - exit 1 - else - echo "All seem good" - fi + run: hatch build - uses: actions/upload-artifact@v3 with: name: releases diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 10aff8b4c6..79344604a5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,6 +31,7 @@ repos: hooks: - id: mypy files: src + exclude: ^src/zarr/v3 args: [] additional_dependencies: - types-redis diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 08cac8d78d..3a2fb6622b 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -3,7 +3,7 @@ version: 2 build: os: ubuntu-20.04 tools: - python: "3.9" + python: "3.10" sphinx: configuration: docs/conf.py From 644423c4972eb5d1c1c0acd82f279f68c6ecb176 Mon Sep 17 00:00:00 2001 From: Ryan Abernathey Date: Tue, 6 Feb 2024 15:24:17 -0500 Subject: [PATCH 0421/1078] create hatch test env (#1650) * create hatch test env * add other test deps and github worklow * skip bsddb3 test * Update pyproject.toml --------- Co-authored-by: Joe Hamman Co-authored-by: Davis Bennett --- .github/workflows/test-v3.yml | 32 ++++++++++++++++++++++++++++++++ pyproject.toml | 18 ++++++++++++++++++ tests/test_core.py | 1 + 3 files changed, 51 insertions(+) create mode 100644 .github/workflows/test-v3.yml diff --git a/.github/workflows/test-v3.yml b/.github/workflows/test-v3.yml new file mode 100644 index 0000000000..e0a4117290 --- /dev/null +++ b/.github/workflows/test-v3.yml @@ -0,0 +1,32 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Test V3 + +on: + push: + branches: [ v3 ] + pull_request: + branches: [ v3 ] + +jobs: + run-tests: + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + cache: 'pip' + - name: Install Hatch + run: | + python -m pip install --upgrade pip + pip install hatch + - name: Set Up Hatch Env + run: | + hatch env create + - name: Run Tests + run: | + hatch run test:run \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 4b3b07f23b..df4ed7dea1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,6 +76,24 @@ omit = [ version.source = "vcs" build.hooks.vcs.version-file = "src/zarr/_version.py" +[tool.hatch.envs.test] +extra-dependencies = [ + "attrs", + "cattrs", + "coverage", + "pytest", + "pytest-cov", + "msgpack", + "lmdb", + "zstandard", + "crc32c", + "pytest-asyncio" +] + +[tool.hatch.envs.test.scripts] +run-coverage = "pytest --cov-config=pyproject.toml --cov=pkg --cov=tests" +run = "run-coverage --no-cov" + [tool.ruff] line-length = 100 exclude = [ diff --git a/tests/test_core.py b/tests/test_core.py index 87bf72e9b0..e8d527c4ef 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -2040,6 +2040,7 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skip(reason="can't get bsddb3 to work on CI right now") class TestArrayWithDBMStoreBerkeleyDB(TestArray): def create_store(self): bsddb3 = pytest.importorskip("bsddb3") From 9e3e8c01d134ad2706b57882caf1b2e9147231cd Mon Sep 17 00:00:00 2001 From: Ryan Abernathey Date: Tue, 6 Feb 2024 22:36:03 -0500 Subject: [PATCH 0422/1078] removed unused environments and workflows (#1651) * removed unused environments and workflows * added v3 contributor guide * add shell --- .github/workflows/minimal.yml | 41 ------------- .github/workflows/python-package.yml | 88 ---------------------------- README-v3.md | 49 ++++++++++++++++ environment.yml | 14 ----- requirements_dev_minimal.txt | 8 --- requirements_dev_numpy.txt | 4 -- requirements_dev_optional.txt | 23 -------- 7 files changed, 49 insertions(+), 178 deletions(-) delete mode 100644 .github/workflows/minimal.yml delete mode 100644 .github/workflows/python-package.yml create mode 100644 README-v3.md delete mode 100644 environment.yml delete mode 100644 requirements_dev_minimal.txt delete mode 100644 requirements_dev_numpy.txt delete mode 100644 requirements_dev_optional.txt diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml deleted file mode 100644 index 2c0cd45ca9..0000000000 --- a/.github/workflows/minimal.yml +++ /dev/null @@ -1,41 +0,0 @@ -# This workflow simulates the environment found during a conda-forge build -# and makes sure that Zarr can run without fsspec and other additional libraries. -name: Minimal installation - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - minimum_build: - - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2.3.0 - with: - channels: conda-forge - environment-file: environment.yml - activate-environment: minimal - - name: Tests - shell: "bash -l {0}" - env: - ZARR_V3_EXPERIMENTAL_API: 1 - ZARR_V3_SHARDING: 1 - run: | - conda activate minimal - python -m pip install . - pytest -svx --timeout=300 - - name: Fixture generation - shell: "bash -l {0}" - env: - ZARR_V3_EXPERIMENTAL_API: 1 - ZARR_V3_SHARDING: 1 - run: | - conda activate minimal - rm -rf fixture/ - pytest -svx --timeout=300 zarr/tests/test_dim_separator.py zarr/tests/test_storage.py - # This simulates fixture-less tests in conda and debian packaging diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml deleted file mode 100644 index 5775d85c24..0000000000 --- a/.github/workflows/python-package.yml +++ /dev/null @@ -1,88 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Linux Testing - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.10', '3.11'] - numpy_version: ['>=1.22.0', '==1.20.*'] - exclude: - - python-version: '3.10' - numpy_version: '==1.20.*' - - python-version: '3.11' - numpy_version: '==1.20.*' - services: - redis: - image: redis - # Set health checks to wait until redis has started - options: >- - --health-cmd "redis-cli ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 6379:6379 - mongodb: - image: mongo:4.4.11 - ports: - - 27017:27017 - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2.3.0 - with: - channels: conda-forge - python-version: ${{ matrix.python-version }} - env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true - # Runs a set of commands using the runners shell - - name: Create Conda environment with the rights deps - shell: "bash -l {0}" - run: | - conda create -n zarr-env python==${{matrix.python-version}} bsddb3 numcodecs lmdb pip nodejs flake8 mypy - conda activate zarr-env - npm install -g azurite - - name: Install dependencies - shell: "bash -l {0}" - run: | - conda activate zarr-env - python -m pip install --upgrade pip - python -m pip install -U pip setuptools wheel line_profiler - python -m pip install -rrequirements_dev_minimal.txt numpy${{matrix.numpy_version}} -rrequirements_dev_optional.txt pymongo redis - python -m pip install . - python -m pip freeze - - name: Tests - shell: "bash -l {0}" - env: - COVERAGE_FILE: .coverage.${{matrix.python-version}}.${{matrix.numpy_version}} - ZARR_TEST_ABS: 1 - ZARR_TEST_MONGO: 1 - ZARR_TEST_REDIS: 1 - ZARR_V3_EXPERIMENTAL_API: 1 - ZARR_V3_SHARDING: 1 - run: | - conda activate zarr-env - mkdir ~/blob_emulator - azurite -l ~/blob_emulator --debug debug.log 2>&1 > stdouterr.log & - pytest --cov=zarr --cov-config=pyproject.toml --doctest-plus --cov-report xml --cov=./ --timeout=300 - - uses: codecov/codecov-action@v3 - with: - token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos - #files: ./coverage1.xml,./coverage2.xml # optional - #flags: unittests # optional - #name: codecov-umbrella # optional - #fail_ci_if_error: true # optional (default = false) - verbose: true # optional (default = false) diff --git a/README-v3.md b/README-v3.md new file mode 100644 index 0000000000..8348038e5a --- /dev/null +++ b/README-v3.md @@ -0,0 +1,49 @@ +# V3 Contributor Guide + +A bare-bones guide to contributing to V3. + +Developed for the Feb. 2024 Zarr Sprint. + +## Clone V3 branch + +[Fork](https://github.com/zarr-developers/zarr-python/fork) zarr-python and clone it locally. + +``` +git clone {your remote} +git remote add upstream https://github.com/zarr-developers/zarr-python +git fetch upstream +git checkout --track upstream/v3 +``` + +## Set up your environment + +Zarr uses [hatch](https://hatch.pypa.io/) for its build system. + +``` +mamba install hatch +``` + +or + +``` +pip install hatch +``` + +Then + +``` +hatch env create test +``` + +## Run the Tests + +``` +hatch run test:run +``` + +or + +``` +hatch -e test shell +pytest -v +``` \ No newline at end of file diff --git a/environment.yml b/environment.yml deleted file mode 100644 index dc99507427..0000000000 --- a/environment.yml +++ /dev/null @@ -1,14 +0,0 @@ -channels: - - conda-forge - - defaults -dependencies: - - wheel - - numcodecs >= 0.6.4 - - numpy >= 1.20 - - pip - - pip: - - asciitree - - fasteners - - pytest - - pytest-timeout - - setuptools_scm diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt deleted file mode 100644 index e2be6eb825..0000000000 --- a/requirements_dev_minimal.txt +++ /dev/null @@ -1,8 +0,0 @@ -# library requirements -asciitree==0.3.3 -fasteners==0.19 -numcodecs==0.11.0 -msgpack-python==0.5.6 -setuptools-scm==8.0.4 -# test requirements -pytest==7.4.3 diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt deleted file mode 100644 index ee8970780a..0000000000 --- a/requirements_dev_numpy.txt +++ /dev/null @@ -1,4 +0,0 @@ -# Break this out into a separate file to allow testing against -# different versions of numpy. This file should pin to the latest -# numpy version. -numpy==1.26.3 diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt deleted file mode 100644 index f3ea80a546..0000000000 --- a/requirements_dev_optional.txt +++ /dev/null @@ -1,23 +0,0 @@ -# optional library requirements -# bsddb3==6.2.6; sys_platform != 'win32' -lmdb==1.4.1; sys_platform != 'win32' -# optional library requirements for Jupyter -ipytree==0.2.2 -ipywidgets==8.1.0 -# optional library requirements for services -# don't let pyup change pinning for azure-storage-blob, need to pin to older -# version to get compatibility with azure storage emulator on appveyor (FIXME) -azure-storage-blob==12.16.0 # pyup: ignore -redis==5.0.1 -types-redis -types-setuptools -pymongo==4.5.0 -# optional test requirements -coverage -pytest-cov==4.1.0 -pytest-doctestplus==1.0.0 -pytest-timeout==2.2.0 -h5py==3.10.0 -fsspec==2023.10.0 -s3fs==2023.10.0 -moto[server]>=4.0.8 From 3bc305ed4f3db394dfc989c88596b7355dea13db Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 6 Feb 2024 20:52:51 -0800 Subject: [PATCH 0423/1078] Listable V3 Stores (#1634) * basic support for v2 and v3 groups * second rev on group apis - removed abcs for groups/arrays - improved return types in group.py - warn (temporarily) when an implicit group is found - add attributes.py with Attributes class add test file wip * progress on store interface (memory and local stores only) fixes after rebas e make all tests pass --- .gitignore | 1 + src/zarr/v3/__init__.py | 13 +- src/zarr/v3/abc/array.py | 140 - src/zarr/v3/abc/group.py | 86 - src/zarr/v3/abc/store.py | 69 +- src/zarr/v3/array.py | 33 +- src/zarr/v3/attributes.py | 32 + src/zarr/v3/codecs/sharding.py | 12 +- src/zarr/v3/config.py | 19 + src/zarr/v3/group.py | 436 ++- src/zarr/v3/group_v2.py | 218 -- src/zarr/v3/store.py | 351 --- src/zarr/v3/store/__init__.py | 5 + src/zarr/v3/store/core.py | 83 + src/zarr/v3/store/local.py | 177 ++ src/zarr/v3/store/memory.py | 86 + src/zarr/v3/store/remote.py | 95 + src/zarr/v3/sync.py | 20 + tests/test_codecs_v3.py | 53 +- tests/test_group_v3.py | 56 + tests/test_storage.py | 4967 ++++++++++++++++---------------- tests/test_storage_v3.py | 1373 ++++----- 22 files changed, 4193 insertions(+), 4132 deletions(-) delete mode 100644 src/zarr/v3/abc/array.py delete mode 100644 src/zarr/v3/abc/group.py create mode 100644 src/zarr/v3/attributes.py create mode 100644 src/zarr/v3/config.py delete mode 100644 src/zarr/v3/group_v2.py delete mode 100644 src/zarr/v3/store.py create mode 100644 src/zarr/v3/store/__init__.py create mode 100644 src/zarr/v3/store/core.py create mode 100644 src/zarr/v3/store/local.py create mode 100644 src/zarr/v3/store/memory.py create mode 100644 src/zarr/v3/store/remote.py create mode 100644 tests/test_group_v3.py diff --git a/.gitignore b/.gitignore index 53b6cd356d..7d32026e13 100644 --- a/.gitignore +++ b/.gitignore @@ -78,5 +78,6 @@ src/zarr/_version.py #doesnotexist #test_sync* data/* +src/fixture/ .DS_Store diff --git a/src/zarr/v3/__init__.py b/src/zarr/v3/__init__.py index bbf5aa0359..038dff89be 100644 --- a/src/zarr/v3/__init__.py +++ b/src/zarr/v3/__init__.py @@ -6,14 +6,9 @@ from zarr.v3.array import Array # noqa: F401 from zarr.v3.array_v2 import ArrayV2 # noqa: F401 from zarr.v3.group import Group # noqa: F401 -from zarr.v3.group_v2 import GroupV2 # noqa: F401 from zarr.v3.metadata import RuntimeConfiguration, runtime_configuration # noqa: F401 from zarr.v3.store import ( # noqa: F401 - LocalStore, - RemoteStore, - Store, StoreLike, - StorePath, make_store_path, ) from zarr.v3.sync import sync as _sync @@ -22,18 +17,18 @@ async def open_auto_async( store: StoreLike, runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), -) -> Union[Array, ArrayV2, Group, GroupV2]: +) -> Union[Array, ArrayV2, Group]: store_path = make_store_path(store) try: - return await Group.open_or_array(store_path, runtime_configuration=runtime_configuration_) + return await Array.open(store_path, runtime_configuration=runtime_configuration_) except KeyError: - return await GroupV2.open_or_array(store_path, runtime_configuration_) + return await Group.open(store_path, runtime_configuration=runtime_configuration_) def open_auto( store: StoreLike, runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), -) -> Union[Array, ArrayV2, Group, GroupV2]: +) -> Union[Array, ArrayV2, Group]: return _sync( open_auto_async(store, runtime_configuration_), runtime_configuration_.asyncio_loop, diff --git a/src/zarr/v3/abc/array.py b/src/zarr/v3/abc/array.py deleted file mode 100644 index 976aa48618..0000000000 --- a/src/zarr/v3/abc/array.py +++ /dev/null @@ -1,140 +0,0 @@ -from __future__ import annotations -from abc import abstractproperty, abstractmethod, ABC -from typing import Tuple, Any, Dict - -import numpy as np - -from zarr.v3.abc.store import ReadStore, WriteStore -from zarr.v3.common import Selection - - -class BaseArray(ABC): - @abstractproperty - def store_path(self) -> str: # TODO: rename to `path`? - """Path to this array in the underlying store.""" - ... - - @abstractproperty - def dtype(self) -> np.dtype: - """Data type of the array elements. - - Returns - ------- - dtype - array data type - """ - ... - - @abstractproperty - def ndim(self) -> int: - """Number of array dimensions (axes). - - Returns - ------- - int - number of array dimensions (axes) - """ - ... - - @abstractproperty - def shape(self) -> Tuple[int, ...]: - """Array dimensions. - - Returns - ------- - tuple of int - array dimensions - """ - ... - - @abstractproperty - def size(self) -> int: - """Number of elements in the array. - - Returns - ------- - int - number of elements in an array. - """ - - @abstractproperty - def attrs(self) -> Dict[str, Any]: - """Array attributes. - - Returns - ------- - dict - user defined attributes - """ - ... - - @abstractproperty - def info(self) -> Any: - """Report some diagnostic information about the array. - - Returns - ------- - out - """ - ... - - -class AsynchronousArray(BaseArray): - """This class can be implemented as a v2 or v3 array""" - - @classmethod - @abstractmethod - async def from_json(cls, zarr_json: Any, store: ReadStore) -> AsynchronousArray: - ... - - @classmethod - @abstractmethod - async def open(cls, store: ReadStore) -> AsynchronousArray: - ... - - @classmethod - @abstractmethod - async def create(cls, store: WriteStore, *, shape, **kwargs) -> AsynchronousArray: - ... - - @abstractmethod - async def getitem(self, selection: Selection): - ... - - @abstractmethod - async def setitem(self, selection: Selection, value: np.ndarray) -> None: - ... - - -class SynchronousArray(BaseArray): - """ - This class can be implemented as a v2 or v3 array - """ - - @classmethod - @abstractmethod - def from_json(cls, zarr_json: Any, store: ReadStore) -> SynchronousArray: - ... - - @classmethod - @abstractmethod - def open(cls, store: ReadStore) -> SynchronousArray: - ... - - @classmethod - @abstractmethod - def create(cls, store: WriteStore, *, shape, **kwargs) -> SynchronousArray: - ... - - @abstractmethod - def __getitem__(self, selection: Selection): # TODO: type as np.ndarray | scalar - ... - - @abstractmethod - def __setitem__(self, selection: Selection, value: np.ndarray) -> None: - ... - - # some day ;) - # @property - # def __array_api_version__(self) -> str: - # return "2022.12" diff --git a/src/zarr/v3/abc/group.py b/src/zarr/v3/abc/group.py deleted file mode 100644 index 02de819894..0000000000 --- a/src/zarr/v3/abc/group.py +++ /dev/null @@ -1,86 +0,0 @@ -from __future__ import annotations - -from abc import abstractproperty, ABC -from collections.abc import MutableMapping -from typing import Dict, Any - - -class BaseGroup(ABC): - @abstractproperty - def attrs(self) -> Dict[str, Any]: - """User-defined attributes.""" - ... - - @abstractproperty - def info(self) -> Any: # TODO: type this later - """Return diagnostic information about the group.""" - ... - - -class AsynchronousGroup(BaseGroup): - pass - # TODO: (considering the following api) - # store_path (rename to path?) - # nchildren - number of child groups + arrays - # children (async iterator) - # contains - check if child exists - # getitem - get child - # group_keys (async iterator) - # groups (async iterator) - # array_keys (async iterator) - # arrays (async iterator) - # visit - # visitkeys - # visitvalues - # tree - # create_group - # require_group - # create_groups - # require_groups - # create_dataset - # require_dataset - # create - # empty - # zeros - # ones - # full - # array - # empty_like - # zeros_like - # ones_like - # full_like - # move - - -class SynchronousGroup(BaseGroup, MutableMapping): - # TODO - think about if we want to keep the MutableMapping abstraction or - pass - # store_path (rename to path?) - # __enter__ - # __exit__ - # group_keys - # groups - # array_keys - # arrays - # visit - # visitkeys - # visitvalues - # visititems - # tree - # create_group - # require_group - # create_groups - # require_groups - # create_dataset - # require_dataset - # create - # empty - # zeros - # ones - # full - # array - # empty_like - # zeros_like - # ones_like - # full_like - # move diff --git a/src/zarr/v3/abc/store.py b/src/zarr/v3/abc/store.py index 5469cafe6d..ce5de279c4 100644 --- a/src/zarr/v3/abc/store.py +++ b/src/zarr/v3/abc/store.py @@ -1,20 +1,19 @@ from abc import abstractmethod, ABC -from typing import List, Tuple +from typing import List, Tuple, Optional class Store(ABC): - pass - - -class ReadStore(Store): @abstractmethod - async def get(self, key: str) -> bytes: + async def get( + self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[bytes]: """Retrieve the value associated with a given key. Parameters ---------- key : str + byte_range : tuple[int, Optional[int]], optional Returns ------- @@ -23,12 +22,14 @@ async def get(self, key: str) -> bytes: ... @abstractmethod - async def get_partial_values(self, key_ranges: List[Tuple[str, int]]) -> bytes: + async def get_partial_values( + self, key_ranges: List[Tuple[str, Tuple[int, int]]] + ) -> List[bytes]: """Retrieve possibly partial values from given key_ranges. Parameters ---------- - key_ranges : list[tuple[str, int]] + key_ranges : list[tuple[str, tuple[int, int]]] Ordered set of key, range pairs, a key may occur multiple times with different ranges Returns @@ -38,8 +39,26 @@ async def get_partial_values(self, key_ranges: List[Tuple[str, int]]) -> bytes: """ ... + @abstractmethod + async def exists(self, key: str) -> bool: + """Check if a key exists in the store. + + Parameters + ---------- + key : str + + Returns + ------- + bool + """ + ... + + @property + @abstractmethod + def supports_writes(self) -> bool: + """Does the store support writes?""" + ... -class WriteStore(ReadStore): @abstractmethod async def set(self, key: str, value: bytes) -> None: """Store a (key, value) pair. @@ -51,6 +70,22 @@ async def set(self, key: str, value: bytes) -> None: """ ... + @abstractmethod + async def delete(self, key: str) -> None: + """Remove a key from the store + + Parameters + ---------- + key : str + """ + ... + + @property + @abstractmethod + def supports_partial_writes(self) -> bool: + """Does the store support partial writes?""" + ... + @abstractmethod async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: """Store values at a given key, starting at byte range_start. @@ -64,8 +99,12 @@ async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes] """ ... + @property + @abstractmethod + def supports_listing(self) -> bool: + """Does the store support listing?""" + ... -class ListMixin: @abstractmethod async def list(self) -> List[str]: """Retrieve all keys in the store. @@ -78,7 +117,7 @@ async def list(self) -> List[str]: @abstractmethod async def list_prefix(self, prefix: str) -> List[str]: - """Retrieve all keys in the store. + """Retrieve all keys in the store with a given prefix. Parameters ---------- @@ -105,11 +144,3 @@ async def list_dir(self, prefix: str) -> List[str]: list[str] """ ... - - -class ReadListStore(ReadStore, ListMixin): - pass - - -class WriteListStore(WriteStore, ListMixin): - pass diff --git a/src/zarr/v3/array.py b/src/zarr/v3/array.py index 8c54cfd91c..d55a5aee43 100644 --- a/src/zarr/v3/array.py +++ b/src/zarr/v3/array.py @@ -1,6 +1,5 @@ # Notes on what I've changed here: # 1. Split Array into AsyncArray and Array -# 2. Inherit from abc (SynchronousArray, AsynchronousArray) # 3. Added .size and .attrs methods # 4. Temporarily disabled the creation of ArrayV2 # 5. Added from_json to AsyncArray @@ -17,9 +16,9 @@ import numpy as np from attr import evolve, frozen -from zarr.v3.abc.array import SynchronousArray, AsynchronousArray from zarr.v3.abc.codec import ArrayBytesCodecPartialDecodeMixin + # from zarr.v3.array_v2 import ArrayV2 from zarr.v3.codecs import CodecMetadata, CodecPipeline, bytes_codec from zarr.v3.common import ( @@ -48,7 +47,7 @@ @frozen -class AsyncArray(AsynchronousArray): +class AsyncArray: metadata: ArrayMetadata store_path: StorePath runtime_configuration: RuntimeConfiguration @@ -75,7 +74,7 @@ async def create( ) -> AsyncArray: store_path = make_store_path(store) if not exists_ok: - assert not await (store_path / ZARR_JSON).exists_async() + assert not await (store_path / ZARR_JSON).exists() data_type = ( DataType[dtype] if isinstance(dtype, str) else DataType[dtype_to_data_type[dtype.str]] @@ -153,7 +152,7 @@ async def open( runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> AsyncArray: store_path = make_store_path(store) - zarr_json_bytes = await (store_path / ZARR_JSON).get_async() + zarr_json_bytes = await (store_path / ZARR_JSON).get() assert zarr_json_bytes is not None return cls.from_json( store_path, @@ -168,7 +167,7 @@ async def open_auto( runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> AsyncArray: # TODO: Union[AsyncArray, ArrayV2] store_path = make_store_path(store) - v3_metadata_bytes = await (store_path / ZARR_JSON).get_async() + v3_metadata_bytes = await (store_path / ZARR_JSON).get() if v3_metadata_bytes is not None: return cls.from_json( store_path, @@ -177,7 +176,7 @@ async def open_auto( ) else: raise ValueError("no v2 support yet") - # return await ArrayV2.open_async(store_path) + # return await ArrayV2.open(store_path) @property def ndim(self) -> int: @@ -231,7 +230,7 @@ async def getitem(self, selection: Selection): async def _save_metadata(self) -> None: self._validate_metadata() - await (self.store_path / ZARR_JSON).set_async(self.metadata.to_bytes()) + await (self.store_path / ZARR_JSON).set(self.metadata.to_bytes()) def _validate_metadata(self) -> None: assert len(self.metadata.shape) == len( @@ -264,7 +263,7 @@ async def _read_chunk( else: out[out_selection] = self.metadata.fill_value else: - chunk_bytes = await store_path.get_async() + chunk_bytes = await store_path.get() if chunk_bytes is not None: chunk_array = await self.codec_pipeline.decode(chunk_bytes) tmp = chunk_array[chunk_selection] @@ -346,7 +345,7 @@ async def _write_chunk( else: # writing partial chunks # read chunk first - chunk_bytes = await store_path.get_async() + chunk_bytes = await store_path.get() # merge new value if chunk_bytes is None: @@ -366,13 +365,13 @@ async def _write_chunk( async def _write_chunk_to_store(self, store_path: StorePath, chunk_array: np.ndarray): if np.all(chunk_array == self.metadata.fill_value): # chunks that only contain fill_value will be removed - await store_path.delete_async() + await store_path.delete() else: chunk_bytes = await self.codec_pipeline.encode(chunk_array) if chunk_bytes is None: - await store_path.delete_async() + await store_path.delete() else: - await store_path.set_async(chunk_bytes) + await store_path.set(chunk_bytes) async def resize(self, new_shape: ChunkCoords) -> AsyncArray: assert len(new_shape) == len(self.metadata.shape) @@ -385,7 +384,7 @@ async def resize(self, new_shape: ChunkCoords) -> AsyncArray: new_chunk_coords = set(all_chunk_coords(new_shape, chunk_shape)) async def _delete_key(key: str) -> None: - await (self.store_path / key).delete_async() + await (self.store_path / key).delete() await concurrent_map( [ @@ -397,14 +396,14 @@ async def _delete_key(key: str) -> None: ) # Write new metadata - await (self.store_path / ZARR_JSON).set_async(new_metadata.to_bytes()) + await (self.store_path / ZARR_JSON).set(new_metadata.to_bytes()) return evolve(self, metadata=new_metadata) async def update_attributes(self, new_attributes: Dict[str, Any]) -> Array: new_metadata = evolve(self.metadata, attributes=new_attributes) # Write new metadata - await (self.store_path / ZARR_JSON).set_async(new_metadata.to_bytes()) + await (self.store_path / ZARR_JSON).set(new_metadata.to_bytes()) return evolve(self, metadata=new_metadata) def __repr__(self): @@ -415,7 +414,7 @@ async def info(self): @frozen -class Array(SynchronousArray): +class Array: _async_array: AsyncArray @classmethod diff --git a/src/zarr/v3/attributes.py b/src/zarr/v3/attributes.py new file mode 100644 index 0000000000..edbc84d8aa --- /dev/null +++ b/src/zarr/v3/attributes.py @@ -0,0 +1,32 @@ +from __future__ import annotations +from collections.abc import MutableMapping +from typing import TYPE_CHECKING, Any, Union + +if TYPE_CHECKING: + from zarr.v3.group import Group + from zarr.v3.array import Array + + +class Attributes(MutableMapping[str, Any]): + def __init__(self, obj: Union[Array, Group]): + # key=".zattrs", read_only=False, cache=True, synchronizer=None + self._obj = obj + + def __getitem__(self, key): + return self._obj.metadata.attributes[key] + + def __setitem__(self, key, value): + new_attrs = dict(self._obj.metadata.attributes) + new_attrs[key] = value + self._obj = self._obj.update_attributes(new_attrs) + + def __delitem__(self, key): + new_attrs = dict(self._obj.metadata.attributes) + del new_attrs[key] + self._obj = self._obj.update_attributes(new_attrs) + + def __iter__(self): + return iter(self._obj.metadata.attributes) + + def __len__(self): + return len(self._obj.metadata.attributes) diff --git a/src/zarr/v3/codecs/sharding.py b/src/zarr/v3/codecs/sharding.py index edbe327a6b..12c84ade29 100644 --- a/src/zarr/v3/codecs/sharding.py +++ b/src/zarr/v3/codecs/sharding.py @@ -354,7 +354,7 @@ async def decode_partial( for chunk_coords in all_chunk_coords: chunk_byte_slice = shard_index.get_chunk_slice(chunk_coords) if chunk_byte_slice: - chunk_bytes = await store_path.get_async(chunk_byte_slice) + chunk_bytes = await store_path.get(chunk_byte_slice) if chunk_bytes: shard_dict[chunk_coords] = chunk_bytes @@ -533,9 +533,9 @@ async def _write_chunk( ) if shard_builder.index.is_all_empty(): - await store_path.delete_async() + await store_path.delete() else: - await store_path.set_async( + await store_path.set( await shard_builder.finalize( self.configuration.index_location, self._encode_shard_index, @@ -561,9 +561,9 @@ def _shard_index_size(self) -> int: async def _load_shard_index_maybe(self, store_path: StorePath) -> Optional[_ShardIndex]: shard_index_size = self._shard_index_size() if self.configuration.index_location == ShardingCodecIndexLocation.start: - index_bytes = await store_path.get_async((0, shard_index_size)) + index_bytes = await store_path.get((0, shard_index_size)) else: - index_bytes = await store_path.get_async((-shard_index_size, None)) + index_bytes = await store_path.get((-shard_index_size, None)) if index_bytes is not None: return await self._decode_shard_index(index_bytes) return None @@ -574,7 +574,7 @@ async def _load_shard_index(self, store_path: StorePath) -> _ShardIndex: ) async def _load_full_shard_maybe(self, store_path: StorePath) -> Optional[_ShardProxy]: - shard_bytes = await store_path.get_async() + shard_bytes = await store_path.get() return await _ShardProxy.from_bytes(shard_bytes, self) if shard_bytes else None diff --git a/src/zarr/v3/config.py b/src/zarr/v3/config.py new file mode 100644 index 0000000000..28df25899a --- /dev/null +++ b/src/zarr/v3/config.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +from asyncio import AbstractEventLoop +from typing import Literal, Optional +from attr import frozen + + +@frozen +class SyncConfiguration: + concurrency: Optional[int] = None + asyncio_loop: Optional[AbstractEventLoop] = None + + +@frozen +class RuntimeConfiguration: + order: Literal["C", "F"] = "C" + # TODO: remove these in favor of the SyncConfiguration object + concurrency: Optional[int] = None + asyncio_loop: Optional[AbstractEventLoop] = None diff --git a/src/zarr/v3/group.py b/src/zarr/v3/group.py index aa43c706a5..9f53a49819 100644 --- a/src/zarr/v3/group.py +++ b/src/zarr/v3/group.py @@ -1,25 +1,38 @@ from __future__ import annotations +import asyncio import json -from typing import Any, Dict, Literal, Optional, Union +import logging +from typing import Any, Dict, Literal, Optional, Union, AsyncIterator, Iterator, List -from attr import asdict, evolve, field, frozen +from attr import asdict, field, frozen # , validators -from zarr.v3.array import Array -from zarr.v3.common import ZARR_JSON, make_cattr -from zarr.v3.metadata import RuntimeConfiguration +from zarr.v3.array import AsyncArray, Array +from zarr.v3.attributes import Attributes +from zarr.v3.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON, make_cattr +from zarr.v3.config import RuntimeConfiguration, SyncConfiguration from zarr.v3.store import StoreLike, StorePath, make_store_path -from zarr.v3.sync import sync +from zarr.v3.sync import SyncMixin + +logger = logging.getLogger("zarr.group") @frozen class GroupMetadata: attributes: Dict[str, Any] = field(factory=dict) - zarr_format: Literal[3] = 3 - node_type: Literal["group"] = "group" + zarr_format: Literal[2, 3] = 3 # field(default=3, validator=validators.in_([2, 3])) + node_type: Literal["group"] = field(default="group", init=False) - def to_bytes(self) -> bytes: - return json.dumps(asdict(self)).encode() + def to_bytes(self) -> Dict[str, bytes]: + if self.zarr_format == 3: + return {ZARR_JSON: json.dumps(asdict(self)).encode()} + elif self.zarr_format == 2: + return { + ZGROUP_JSON: self.zarr_format, + ZATTRS_JSON: json.dumps(self.attributes).encode(), + } + else: + raise ValueError(f"unexpected zarr_format: {self.zarr_format}") @classmethod def from_json(cls, zarr_json: Any) -> GroupMetadata: @@ -27,25 +40,29 @@ def from_json(cls, zarr_json: Any) -> GroupMetadata: @frozen -class Group: +class AsyncGroup: metadata: GroupMetadata store_path: StorePath runtime_configuration: RuntimeConfiguration @classmethod - async def create_async( + async def create( cls, store: StoreLike, *, attributes: Optional[Dict[str, Any]] = None, exists_ok: bool = False, + zarr_format: Literal[2, 3] = 3, # field(default=3, validator=validators.in_([2, 3])), runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> Group: + ) -> AsyncGroup: store_path = make_store_path(store) if not exists_ok: - assert not await (store_path / ZARR_JSON).exists_async() + if zarr_format == 3: + assert not await (store_path / ZARR_JSON).exists() + elif zarr_format == 2: + assert not await (store_path / ZGROUP_JSON).exists() group = cls( - metadata=GroupMetadata(attributes=attributes or {}), + metadata=GroupMetadata(attributes=attributes or {}, zarr_format=zarr_format), store_path=store_path, runtime_configuration=runtime_configuration, ) @@ -53,45 +70,39 @@ async def create_async( return group @classmethod - def create( - cls, - store: StoreLike, - *, - attributes: Optional[Dict[str, Any]] = None, - exists_ok: bool = False, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> Group: - return sync( - cls.create_async( - store, - attributes=attributes, - exists_ok=exists_ok, - runtime_configuration=runtime_configuration, - ), - runtime_configuration.asyncio_loop, - ) - - @classmethod - async def open_async( + async def open( cls, store: StoreLike, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> Group: + zarr_format: Literal[2, 3] = 3, + ) -> AsyncGroup: store_path = make_store_path(store) - zarr_json_bytes = await (store_path / ZARR_JSON).get_async() - assert zarr_json_bytes is not None - return cls.from_json(store_path, json.loads(zarr_json_bytes), runtime_configuration) - @classmethod - def open( - cls, - store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> Group: - return sync( - cls.open_async(store, runtime_configuration), - runtime_configuration.asyncio_loop, - ) + # TODO: consider trying to autodiscover the zarr-format here + if zarr_format == 3: + # V3 groups are comprised of a zarr.json object + # (it is optional in the case of implicit groups) + zarr_json_bytes = await (store_path / ZARR_JSON).get() + zarr_json = ( + json.loads(zarr_json_bytes) if zarr_json_bytes is not None else {"zarr_format": 3} + ) + + elif zarr_format == 2: + # V2 groups are comprised of a .zgroup and .zattrs objects + # (both are optional in the case of implicit groups) + zgroup_bytes, zattrs_bytes = await asyncio.gather( + (store_path / ZGROUP_JSON).get(), (store_path / ZATTRS_JSON).get() + ) + zgroup = ( + json.loads(json.loads(zgroup_bytes)) + if zgroup_bytes is not None + else {"zarr_format": 2} + ) + zattrs = json.loads(json.loads(zattrs_bytes)) if zattrs_bytes is not None else {} + zarr_json = {**zgroup, "attributes": zattrs} + else: + raise ValueError(f"unexpected zarr_format: {zarr_format}") + return cls.from_json(store_path, zarr_json, runtime_configuration) @classmethod def from_json( @@ -107,73 +118,306 @@ def from_json( ) return group - @classmethod - async def open_or_array( - cls, - store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> Union[Array, Group]: - store_path = make_store_path(store) - zarr_json_bytes = await (store_path / ZARR_JSON).get_async() - if zarr_json_bytes is None: - raise KeyError - zarr_json = json.loads(zarr_json_bytes) - if zarr_json["node_type"] == "group": - return cls.from_json(store_path, zarr_json, runtime_configuration) - if zarr_json["node_type"] == "array": - return Array.from_json( - store_path, zarr_json, runtime_configuration=runtime_configuration + async def getitem( + self, + key: str, + ) -> Union[AsyncArray, AsyncGroup]: + + store_path = self.store_path / key + + if self.metadata.zarr_format == 3: + zarr_json_bytes = await (store_path / ZARR_JSON).get() + if zarr_json_bytes is None: + # implicit group? + logger.warning("group at {} is an implicit group", store_path) + zarr_json = { + "zarr_format": self.metadata.zarr_format, + "node_type": "group", + "attributes": {}, + } + else: + zarr_json = json.loads(zarr_json_bytes) + if zarr_json["node_type"] == "group": + return type(self).from_json(store_path, zarr_json, self.runtime_configuration) + if zarr_json["node_type"] == "array": + return AsyncArray.from_json( + store_path, zarr_json, runtime_configuration=self.runtime_configuration + ) + elif self.metadata.zarr_format == 2: + # Q: how do we like optimistically fetching .zgroup, .zarray, and .zattrs? + # This guarantees that we will always make at least one extra request to the store + zgroup_bytes, zarray_bytes, zattrs_bytes = await asyncio.gather( + (store_path / ZGROUP_JSON).get(), + (store_path / ZARRAY_JSON).get(), + (store_path / ZATTRS_JSON).get(), ) - raise KeyError + + # unpack the zarray, if this is None then we must be opening a group + zarray = json.loads(zarray_bytes) if zarray_bytes else None + # unpack the zattrs, this can be None if no attrs were written + zattrs = json.loads(zattrs_bytes) if zattrs_bytes is not None else {} + + if zarray is not None: + # TODO: update this once the V2 array support is part of the primary array class + zarr_json = {**zarray, "attributes": zattrs} + return AsyncArray.from_json( + store_path, zarray, runtime_configuration=self.runtime_configuration + ) + else: + if zgroup_bytes is None: + # implicit group? + logger.warning("group at {} is an implicit group", store_path) + zgroup = ( + json.loads(zgroup_bytes) + if zgroup_bytes is not None + else {"zarr_format": self.metadata.zarr_format} + ) + zarr_json = {**zgroup, "attributes": zattrs} + return type(self).from_json(store_path, zarr_json, self.runtime_configuration) + else: + raise ValueError(f"unexpected zarr_format: {self.metadata.zarr_format}") + + async def delitem(self, key: str) -> None: + store_path = self.store_path / key + if self.metadata.zarr_format == 3: + await (store_path / ZARR_JSON).delete() + elif self.metadata.zarr_format == 2: + await asyncio.gather( + (store_path / ZGROUP_JSON).delete(), # TODO: missing_ok=False + (store_path / ZATTRS_JSON).delete(), # TODO: missing_ok=True + ) + else: + raise ValueError(f"unexpected zarr_format: {self.metadata.zarr_format}") async def _save_metadata(self) -> None: - await (self.store_path / ZARR_JSON).set_async(self.metadata.to_bytes()) + to_save = self.metadata.to_bytes() + awaitables = [(self.store_path / key).set(value) for key, value in to_save.items()] + await asyncio.gather(*awaitables) - async def get_async(self, path: str) -> Union[Array, Group]: - return await self.__class__.open_or_array( - self.store_path / path, self.runtime_configuration - ) + @property + def attrs(self): + return self.metadata.attributes - def __getitem__(self, path: str) -> Union[Array, Group]: - return sync(self.get_async(path), self.runtime_configuration.asyncio_loop) + @property + def info(self): + return self.metadata.info - async def create_group_async(self, path: str, **kwargs) -> Group: + async def create_group(self, path: str, **kwargs) -> AsyncGroup: runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) - return await self.__class__.create_async( + return await type(self).create( self.store_path / path, runtime_configuration=runtime_configuration, **kwargs, ) - def create_group(self, path: str, **kwargs) -> Group: - return sync(self.create_group_async(path), self.runtime_configuration.asyncio_loop) - - async def create_array_async(self, path: str, **kwargs) -> Array: + async def create_array(self, path: str, **kwargs) -> AsyncArray: runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) - return await Array.create_async( + return await AsyncArray.create( self.store_path / path, runtime_configuration=runtime_configuration, **kwargs, ) - def create_array(self, path: str, **kwargs) -> Array: - return sync( - self.create_array_async(path, **kwargs), - self.runtime_configuration.asyncio_loop, - ) - - async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> Group: - new_metadata = evolve(self.metadata, attributes=new_attributes) + async def update_attributes(self, new_attributes: Dict[str, Any]): + # metadata.attributes is "frozen" so we simply clear and update the dict + self.metadata.attributes.clear() + self.metadata.attributes.update(new_attributes) # Write new metadata - await (self.store_path / ZARR_JSON).set_async(new_metadata.to_bytes()) - return evolve(self, metadata=new_metadata) + to_save = self.metadata.to_bytes() + if self.metadata.zarr_format == 2: + # only save the .zattrs object + await (self.store_path / ZATTRS_JSON).set(to_save[ZATTRS_JSON]) + else: + await (self.store_path / ZARR_JSON).set(to_save[ZARR_JSON]) - def update_attributes(self, new_attributes: Dict[str, Any]) -> Group: - return sync( - self.update_attributes_async(new_attributes), - self.runtime_configuration.asyncio_loop, - ) + self.metadata.attributes.clear() + self.metadata.attributes.update(new_attributes) + + return self def __repr__(self): - return f"" + return f"" + + async def nchildren(self) -> int: + raise NotImplementedError + + async def children(self) -> AsyncIterator[AsyncArray, AsyncGroup]: + raise NotImplementedError + + async def contains(self, child: str) -> bool: + raise NotImplementedError + + async def group_keys(self) -> AsyncIterator[str]: + raise NotImplementedError + + async def groups(self) -> AsyncIterator[AsyncGroup]: + raise NotImplementedError + + async def array_keys(self) -> AsyncIterator[str]: + raise NotImplementedError + + async def arrays(self) -> AsyncIterator[AsyncArray]: + raise NotImplementedError + + async def tree(self, expand=False, level=None) -> Any: + raise NotImplementedError + + async def empty(self, **kwargs) -> AsyncArray: + raise NotImplementedError + + async def zeros(self, **kwargs) -> AsyncArray: + raise NotImplementedError + + async def ones(self, **kwargs) -> AsyncArray: + raise NotImplementedError + + async def full(self, **kwargs) -> AsyncArray: + raise NotImplementedError + + async def empty_like(self, prototype: AsyncArray, **kwargs) -> AsyncArray: + raise NotImplementedError + + async def zeros_like(self, prototype: AsyncArray, **kwargs) -> AsyncArray: + raise NotImplementedError + + async def ones_like(self, prototype: AsyncArray, **kwargs) -> AsyncArray: + raise NotImplementedError + + async def full_like(self, prototype: AsyncArray, **kwargs) -> AsyncArray: + raise NotImplementedError + + async def move(self, source: str, dest: str) -> None: + raise NotImplementedError + + +@frozen +class Group(SyncMixin): + _async_group: AsyncGroup + _sync_configuration: SyncConfiguration = field(init=True, default=SyncConfiguration()) + + @classmethod + def create( + cls, + store: StoreLike, + *, + attributes: Optional[Dict[str, Any]] = None, + exists_ok: bool = False, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Group: + obj = cls._sync( + AsyncGroup.create( + store, + attributes=attributes, + exists_ok=exists_ok, + runtime_configuration=runtime_configuration, + ) + ) + + return cls(obj) + + @classmethod + def open( + cls, + store: StoreLike, + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), + ) -> Group: + obj = cls._sync(AsyncGroup.open(store, runtime_configuration)) + return cls(obj) + + def __getitem__(self, path: str) -> Union[Array, Group]: + obj = self._sync(self._async_group.getitem(path)) + if isinstance(obj, AsyncArray): + return Array(obj) + else: + return Group(obj) + + def __delitem__(self, key) -> None: + self._sync(self._async_group.delitem(key)) + + def __iter__(self): + raise NotImplementedError + + def __len__(self): + raise NotImplementedError + + def __setitem__(self, key, value): + """__setitem__ is not supported in v3""" + raise NotImplementedError + + @property + def metadata(self) -> GroupMetadata: + return self._async_group.metadata + + @property + def attrs(self) -> Attributes: + return Attributes(self) + + @property + def info(self): + return self._async_group.info + + def update_attributes(self, new_attributes: Dict[str, Any]): + self._sync(self._async_group.update_attributes(new_attributes)) + return self + + @property + def nchildren(self) -> int: + return self._sync(self._async_group.nchildren) + + @property + def children(self) -> List[Array, Group]: + _children = self._sync_iter(self._async_group.children) + return [Array(obj) if isinstance(obj, AsyncArray) else Group(obj) for obj in _children] + + def __contains__(self, child) -> bool: + return self._sync(self._async_group.contains(child)) + + def group_keys(self) -> Iterator[str]: + return self._sync_iter(self._async_group.group_keys) + + def groups(self) -> List[Group]: + # TODO: in v2 this was a generator that return key: Group + return [Group(obj) for obj in self._sync_iter(self._async_group.groups)] + + def array_keys(self) -> List[str]: + return self._sync_iter(self._async_group.array_keys) + + def arrays(self) -> List[Array]: + return [Array(obj) for obj in self._sync_iter(self._async_group.arrays)] + + def tree(self, expand=False, level=None) -> Any: + return self._sync(self._async_group.tree(expand=expand, level=level)) + + def create_group(self, name: str, **kwargs) -> Group: + return Group(self._sync(self._async_group.create_group(name, **kwargs))) + + def create_array(self, name: str, **kwargs) -> Array: + return Array(self._sync(self._async_group.create_array(name, **kwargs))) + + def empty(self, **kwargs) -> Array: + return Array(self._sync(self._async_group.empty(**kwargs))) + + def zeros(self, **kwargs) -> Array: + return Array(self._sync(self._async_group.zeros(**kwargs))) + + def ones(self, **kwargs) -> Array: + return Array(self._sync(self._async_group.ones(**kwargs))) + + def full(self, **kwargs) -> Array: + return Array(self._sync(self._async_group.full(**kwargs))) + + def empty_like(self, prototype: AsyncArray, **kwargs) -> Array: + return Array(self._sync(self._async_group.empty_like(prototype, **kwargs))) + + def zeros_like(self, prototype: AsyncArray, **kwargs) -> Array: + return Array(self._sync(self._async_group.zeros_like(prototype, **kwargs))) + + def ones_like(self, prototype: AsyncArray, **kwargs) -> Array: + return Array(self._sync(self._async_group.ones_like(prototype, **kwargs))) + + def full_like(self, prototype: AsyncArray, **kwargs) -> Array: + return Array(self._sync(self._async_group.full_like(prototype, **kwargs))) + + def move(self, source: str, dest: str) -> None: + return self._sync(self._async_group.move(source, dest)) diff --git a/src/zarr/v3/group_v2.py b/src/zarr/v3/group_v2.py deleted file mode 100644 index 3b1a369ae2..0000000000 --- a/src/zarr/v3/group_v2.py +++ /dev/null @@ -1,218 +0,0 @@ -from __future__ import annotations - -import asyncio -import json -from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union - -from attr import asdict, evolve, frozen - -from zarr.v3.array_v2 import ArrayV2 -from zarr.v3.common import ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON, make_cattr -from zarr.v3.metadata import RuntimeConfiguration -from zarr.v3.store import StoreLike, StorePath, make_store_path -from zarr.v3.sync import sync - -if TYPE_CHECKING: - from zarr.v3.group import Group - - -@frozen -class GroupV2Metadata: - zarr_format: Literal[2] = 2 - - def to_bytes(self) -> bytes: - return json.dumps(asdict(self)).encode() - - @classmethod - def from_json(cls, zarr_json: Any) -> GroupV2Metadata: - return make_cattr().structure(zarr_json, cls) - - -@frozen -class GroupV2: - metadata: GroupV2Metadata - store_path: StorePath - runtime_configuration: RuntimeConfiguration - attributes: Optional[Dict[str, Any]] = None - - @classmethod - async def create_async( - cls, - store: StoreLike, - *, - attributes: Optional[Dict[str, Any]] = None, - exists_ok: bool = False, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> GroupV2: - store_path = make_store_path(store) - if not exists_ok: - assert not await (store_path / ZGROUP_JSON).exists_async() - group = cls( - metadata=GroupV2Metadata(), - attributes=attributes, - store_path=store_path, - runtime_configuration=runtime_configuration, - ) - await group._save_metadata() - return group - - @classmethod - def create( - cls, - store: StoreLike, - *, - attributes: Optional[Dict[str, Any]] = None, - exists_ok: bool = False, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> GroupV2: - return sync( - cls.create_async( - store, - attributes=attributes, - exists_ok=exists_ok, - runtime_configuration=runtime_configuration, - ), - runtime_configuration.asyncio_loop if runtime_configuration else None, - ) - - @classmethod - async def open_async( - cls, - store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> GroupV2: - store_path = make_store_path(store) - zgroup_bytes = await (store_path / ZGROUP_JSON).get_async() - assert zgroup_bytes is not None - zattrs_bytes = await (store_path / ZATTRS_JSON).get_async() - metadata = json.loads(zgroup_bytes) - attributes = json.loads(zattrs_bytes) if zattrs_bytes is not None else None - - return cls.from_json( - store_path, - metadata, - runtime_configuration, - attributes, - ) - - @classmethod - def open( - cls, - store_path: StorePath, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> GroupV2: - return sync( - cls.open_async(store_path, runtime_configuration), - runtime_configuration.asyncio_loop, - ) - - @classmethod - def from_json( - cls, - store_path: StorePath, - zarr_json: Any, - runtime_configuration: RuntimeConfiguration, - attributes: Optional[Dict[str, Any]] = None, - ) -> GroupV2: - group = cls( - metadata=GroupV2Metadata.from_json(zarr_json), - store_path=store_path, - runtime_configuration=runtime_configuration, - attributes=attributes, - ) - return group - - @staticmethod - async def open_or_array( - store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - ) -> Union[ArrayV2, GroupV2]: - store_path = make_store_path(store) - zgroup_bytes, zattrs_bytes = await asyncio.gather( - (store_path / ZGROUP_JSON).get_async(), - (store_path / ZATTRS_JSON).get_async(), - ) - attributes = json.loads(zattrs_bytes) if zattrs_bytes is not None else None - if zgroup_bytes is not None: - return GroupV2.from_json( - store_path, json.loads(zgroup_bytes), runtime_configuration, attributes - ) - zarray_bytes = await (store_path / ZARRAY_JSON).get_async() - if zarray_bytes is not None: - return ArrayV2.from_json( - store_path, json.loads(zarray_bytes), attributes, runtime_configuration - ) - raise KeyError - - async def _save_metadata(self) -> None: - await (self.store_path / ZGROUP_JSON).set_async(self.metadata.to_bytes()) - if self.attributes is not None and len(self.attributes) > 0: - await (self.store_path / ZATTRS_JSON).set_async( - json.dumps(self.attributes).encode(), - ) - else: - await (self.store_path / ZATTRS_JSON).delete_async() - - async def get_async(self, path: str) -> Union[ArrayV2, GroupV2]: - return await self.__class__.open_or_array( - self.store_path / path, self.runtime_configuration - ) - - def __getitem__(self, path: str) -> Union[ArrayV2, GroupV2]: - return sync(self.get_async(path), self.runtime_configuration.asyncio_loop) - - async def create_group_async(self, path: str, **kwargs) -> GroupV2: - runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) - return await self.__class__.create_async( - self.store_path / path, - runtime_configuration=runtime_configuration, - **kwargs, - ) - - def create_group(self, path: str, **kwargs) -> GroupV2: - return sync(self.create_group_async(path), self.runtime_configuration.asyncio_loop) - - async def create_array_async(self, path: str, **kwargs) -> ArrayV2: - runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) - return await ArrayV2.create_async( - self.store_path / path, - runtime_configuration=runtime_configuration, - **kwargs, - ) - - def create_array(self, path: str, **kwargs) -> ArrayV2: - return sync( - self.create_array_async(path, **kwargs), - self.runtime_configuration.asyncio_loop, - ) - - async def convert_to_v3_async(self) -> Group: - from zarr.v3.common import ZARR_JSON - from zarr.v3.group import Group, GroupMetadata - - new_metadata = GroupMetadata(attributes=self.attributes or {}) - new_metadata_bytes = new_metadata.to_bytes() - - await (self.store_path / ZARR_JSON).set_async(new_metadata_bytes) - - return Group.from_json( - store_path=self.store_path, - zarr_json=json.loads(new_metadata_bytes), - runtime_configuration=self.runtime_configuration, - ) - - async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> GroupV2: - await (self.store_path / ZATTRS_JSON).set_async(json.dumps(new_attributes).encode()) - return evolve(self, attributes=new_attributes) - - def update_attributes(self, new_attributes: Dict[str, Any]) -> GroupV2: - return sync( - self.update_attributes_async(new_attributes), - self.runtime_configuration.asyncio_loop, - ) - - def convert_to_v3(self) -> Group: - return sync(self.convert_to_v3_async(), loop=self.runtime_configuration.asyncio_loop) - - def __repr__(self): - return f"" diff --git a/src/zarr/v3/store.py b/src/zarr/v3/store.py deleted file mode 100644 index b6c20be41f..0000000000 --- a/src/zarr/v3/store.py +++ /dev/null @@ -1,351 +0,0 @@ -# TODO: -# 1. Stores should inherit from zarr.v3.abc.store classes -# 2. remove "_async" suffix from all methods? - -# Changes I've made here: -# 1. Make delay import of fsspec - -from __future__ import annotations - -import asyncio -import io -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, MutableMapping, Optional, Tuple, Union - -from zarr.v3.common import BytesLike, to_thread - -if TYPE_CHECKING: - from upath import UPath - from fsspec.asyn import AsyncFileSystem - - -def _dereference_path(root: str, path: str) -> str: - assert isinstance(root, str) - assert isinstance(path, str) - root = root.rstrip("/") - path = f"{root}/{path}" if root != "" else path - path = path.rstrip("/") - return path - - -class StorePath: - store: Store - path: str - - def __init__(self, store: Store, path: Optional[str] = None): - self.store = store - self.path = path or "" - - @classmethod - def from_path(cls, pth: Path) -> StorePath: - return cls(Store.from_path(pth)) - - async def get_async( - self, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[BytesLike]: - return await self.store.get_async(self.path, byte_range) - - async def set_async( - self, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None - ) -> None: - await self.store.set_async(self.path, value, byte_range) - - async def delete_async(self) -> None: - await self.store.delete_async(self.path) - - async def exists_async(self) -> bool: - return await self.store.exists_async(self.path) - - def __truediv__(self, other: str) -> StorePath: - return self.__class__(self.store, _dereference_path(self.path, other)) - - def __str__(self) -> str: - return _dereference_path(str(self.store), self.path) - - def __repr__(self) -> str: - return f"StorePath({self.store.__class__.__name__}, {repr(str(self))})" - - -class Store: - supports_partial_writes = False - - @classmethod - def from_path(cls, pth: Path) -> Store: - try: - from upath import UPath - from upath.implementations.local import PosixUPath, WindowsUPath - - if isinstance(pth, UPath) and not isinstance(pth, (PosixUPath, WindowsUPath)): - storage_options = pth._kwargs.copy() - storage_options.pop("_url", None) - return RemoteStore(str(pth), **storage_options) - except ImportError: - pass - - return LocalStore(pth) - - async def multi_get_async( - self, keys: List[Tuple[str, Optional[Tuple[int, int]]]] - ) -> List[Optional[BytesLike]]: - return await asyncio.gather(*[self.get_async(key, byte_range) for key, byte_range in keys]) - - async def get_async( - self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[BytesLike]: - raise NotImplementedError - - async def multi_set_async( - self, key_values: List[Tuple[str, BytesLike, Optional[Tuple[int, int]]]] - ) -> None: - await asyncio.gather( - *[self.set_async(key, value, byte_range) for key, value, byte_range in key_values] - ) - - async def set_async( - self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None - ) -> None: - raise NotImplementedError - - async def delete_async(self, key: str) -> None: - raise NotImplementedError - - async def exists_async(self, key: str) -> bool: - raise NotImplementedError - - def __truediv__(self, other: str) -> StorePath: - return StorePath(self, other) - - -class LocalStore(Store): - supports_partial_writes = True - root: Path - auto_mkdir: bool - - def __init__(self, root: Union[Path, str], auto_mkdir: bool = True): - if isinstance(root, str): - root = Path(root) - assert isinstance(root, Path) - - self.root = root - self.auto_mkdir = auto_mkdir - - def _cat_file( - self, path: Path, start: Optional[int] = None, end: Optional[int] = None - ) -> BytesLike: - if start is None and end is None: - return path.read_bytes() - with path.open("rb") as f: - size = f.seek(0, io.SEEK_END) - if start is not None: - if start >= 0: - f.seek(start) - else: - f.seek(max(0, size + start)) - if end is not None: - if end < 0: - end = size + end - return f.read(end - f.tell()) - return f.read() - - def _put_file( - self, - path: Path, - value: BytesLike, - start: Optional[int] = None, - ): - if self.auto_mkdir: - path.parent.mkdir(parents=True, exist_ok=True) - if start is not None: - with path.open("r+b") as f: - f.seek(start) - f.write(value) - else: - return path.write_bytes(value) - - async def get_async( - self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[BytesLike]: - assert isinstance(key, str) - path = self.root / key - - try: - value = await ( - to_thread(self._cat_file, path, byte_range[0], byte_range[1]) - if byte_range is not None - else to_thread(self._cat_file, path) - ) - except (FileNotFoundError, IsADirectoryError, NotADirectoryError): - return None - - return value - - async def set_async( - self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None - ) -> None: - assert isinstance(key, str) - path = self.root / key - - if byte_range is not None: - await to_thread(self._put_file, path, value, byte_range[0]) - else: - await to_thread(self._put_file, path, value) - - async def delete_async(self, key: str) -> None: - path = self.root / key - await to_thread(path.unlink, True) - - async def exists_async(self, key: str) -> bool: - path = self.root / key - return await to_thread(path.exists) - - def __str__(self) -> str: - return f"file://{self.root}" - - def __repr__(self) -> str: - return f"LocalStore({repr(str(self))})" - - -class RemoteStore(Store): - root: UPath - - def __init__(self, url: Union[UPath, str], **storage_options: Dict[str, Any]): - from upath import UPath - import fsspec - - if isinstance(url, str): - self.root = UPath(url, **storage_options) - else: - assert len(storage_options) == 0, ( - "If constructed with a UPath object, no additional " - + "storage_options are allowed." - ) - self.root = url.rstrip("/") - # test instantiate file system - fs, _ = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) - assert fs.__class__.async_impl, "FileSystem needs to support async operations." - - def make_fs(self) -> Tuple[AsyncFileSystem, str]: - import fsspec - - storage_options = self.root._kwargs.copy() - storage_options.pop("_url", None) - fs, root = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) - assert fs.__class__.async_impl, "FileSystem needs to support async operations." - return fs, root - - async def get_async( - self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[BytesLike]: - assert isinstance(key, str) - fs, root = self.make_fs() - path = _dereference_path(root, key) - - try: - value = await ( - fs._cat_file(path, start=byte_range[0], end=byte_range[1]) - if byte_range - else fs._cat_file(path) - ) - except (FileNotFoundError, IsADirectoryError, NotADirectoryError): - return None - - return value - - async def set_async( - self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None - ) -> None: - assert isinstance(key, str) - fs, root = self.make_fs() - path = _dereference_path(root, key) - - # write data - if byte_range: - with fs._open(path, "r+b") as f: - f.seek(byte_range[0]) - f.write(value) - else: - await fs._pipe_file(path, value) - - async def delete_async(self, key: str) -> None: - fs, root = self.make_fs() - path = _dereference_path(root, key) - if await fs._exists(path): - await fs._rm(path) - - async def exists_async(self, key: str) -> bool: - fs, root = self.make_fs() - path = _dereference_path(root, key) - return await fs._exists(path) - - def __str__(self) -> str: - return str(self.root) - - def __repr__(self) -> str: - return f"RemoteStore({repr(str(self))})" - - -class MemoryStore(Store): - supports_partial_writes = True - store_dict: MutableMapping[str, bytes] - - def __init__(self, store_dict: Optional[MutableMapping[str, bytes]] = None): - self.store_dict = store_dict or {} - - async def get_async( - self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[BytesLike]: - assert isinstance(key, str) - try: - value = self.store_dict[key] - if byte_range is not None: - value = value[byte_range[0] : byte_range[1]] - return value - except KeyError: - return None - - async def set_async( - self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None - ) -> None: - assert isinstance(key, str) - - if byte_range is not None: - buf = bytearray(self.store_dict[key]) - buf[byte_range[0] : byte_range[1]] = value - self.store_dict[key] = buf - else: - self.store_dict[key] = value - - async def delete_async(self, key: str) -> None: - try: - del self.store_dict[key] - except KeyError: - pass - - async def exists_async(self, key: str) -> bool: - return key in self.store_dict - - def __str__(self) -> str: - return f"memory://{id(self.store_dict)}" - - def __repr__(self) -> str: - return f"MemoryStore({repr(str(self))})" - - -StoreLike = Union[Store, StorePath, Path, str] - - -def make_store_path(store_like: StoreLike) -> StorePath: - if isinstance(store_like, StorePath): - return store_like - elif isinstance(store_like, Store): - return StorePath(store_like) - elif isinstance(store_like, Path): - return StorePath(Store.from_path(store_like)) - elif isinstance(store_like, str): - try: - from upath import UPath - - return StorePath(Store.from_path(UPath(store_like))) - except ImportError: - return StorePath(LocalStore(Path(store_like))) - raise TypeError diff --git a/src/zarr/v3/store/__init__.py b/src/zarr/v3/store/__init__.py new file mode 100644 index 0000000000..2268381d2a --- /dev/null +++ b/src/zarr/v3/store/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from zarr.v3.store.core import StorePath, StoreLike, make_store_path +from zarr.v3.store.remote import RemoteStore +from zarr.v3.store.local import LocalStore +from zarr.v3.store.memory import MemoryStore diff --git a/src/zarr/v3/store/core.py b/src/zarr/v3/store/core.py new file mode 100644 index 0000000000..0ef1c8569e --- /dev/null +++ b/src/zarr/v3/store/core.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Any, Optional, Tuple, Union + +from zarr.v3.common import BytesLike +from zarr.v3.abc.store import Store + + +def _dereference_path(root: str, path: str) -> str: + assert isinstance(root, str) + assert isinstance(path, str) + root = root.rstrip("/") + path = f"{root}/{path}" if root != "" else path + path = path.rstrip("/") + return path + + +class StorePath: + store: Store + path: str + + def __init__(self, store: Store, path: Optional[str] = None): + self.store = store + self.path = path or "" + + @classmethod + def from_path(cls, pth: Path) -> StorePath: + return cls(Store.from_path(pth)) + + async def get( + self, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[BytesLike]: + return await self.store.get(self.path, byte_range) + + async def set(self, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None) -> None: + if byte_range is not None: + raise NotImplementedError("Store.set does not have partial writes yet") + await self.store.set(self.path, value) + + async def delete(self) -> None: + await self.store.delete(self.path) + + async def exists(self) -> bool: + return await self.store.exists(self.path) + + def __truediv__(self, other: str) -> StorePath: + return self.__class__(self.store, _dereference_path(self.path, other)) + + def __str__(self) -> str: + return _dereference_path(str(self.store), self.path) + + def __repr__(self) -> str: + return f"StorePath({self.store.__class__.__name__}, {repr(str(self))})" + + def __eq__(self, other: Any) -> bool: + try: + if self.store == other.store and self.path == other.path: + return True + except Exception: + pass + return False + + +StoreLike = Union[Store, StorePath, Path, str] + + +def make_store_path(store_like: StoreLike) -> StorePath: + if isinstance(store_like, StorePath): + return store_like + elif isinstance(store_like, Store): + return StorePath(store_like) + # elif isinstance(store_like, Path): + # return StorePath(Store.from_path(store_like)) + elif isinstance(store_like, str): + try: + from upath import UPath + + return StorePath(Store.from_path(UPath(store_like))) + except ImportError as e: + raise e + # return StorePath(LocalStore(Path(store_like))) + raise TypeError diff --git a/src/zarr/v3/store/local.py b/src/zarr/v3/store/local.py new file mode 100644 index 0000000000..a62eea20f7 --- /dev/null +++ b/src/zarr/v3/store/local.py @@ -0,0 +1,177 @@ +from __future__ import annotations + +import io +import shutil +from pathlib import Path +from typing import Union, Optional, List, Tuple + +from zarr.v3.abc.store import Store +from zarr.v3.common import BytesLike, concurrent_map, to_thread + + +def _get(path: Path, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> bytes: + if byte_range is not None: + start = byte_range[0] + end = (start + byte_range[1]) if byte_range[1] is not None else None + else: + return path.read_bytes() + with path.open("rb") as f: + size = f.seek(0, io.SEEK_END) + if start is not None: + if start >= 0: + f.seek(start) + else: + f.seek(max(0, size + start)) + if end is not None: + if end < 0: + end = size + end + return f.read(end - f.tell()) + return f.read() + + +def _put( + path: Path, + value: BytesLike, + start: Optional[int] = None, + auto_mkdir: bool = True, +): + if auto_mkdir: + path.parent.mkdir(parents=True, exist_ok=True) + if start is not None: + with path.open("r+b") as f: + f.seek(start) + f.write(value) + else: + return path.write_bytes(value) + + +class LocalStore(Store): + + supports_writes: bool = True + supports_partial_writes: bool = True + supports_listing: bool = True + + root: Path + auto_mkdir: bool + + def __init__(self, root: Union[Path, str], auto_mkdir: bool = True): + if isinstance(root, str): + root = Path(root) + assert isinstance(root, Path) + + self.root = root + self.auto_mkdir = auto_mkdir + + def __str__(self) -> str: + return f"file://{self.root}" + + def __repr__(self) -> str: + return f"LocalStore({repr(str(self))})" + + async def get( + self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[bytes]: + assert isinstance(key, str) + path = self.root / key + + try: + return await to_thread(_get, path, byte_range) + except (FileNotFoundError, IsADirectoryError, NotADirectoryError): + return None + + async def get_partial_values( + self, key_ranges: List[Tuple[str, Tuple[int, int]]] + ) -> List[bytes]: + args = [] + for key, byte_range in key_ranges: + assert isinstance(key, str) + path = self.root / key + if byte_range is not None: + args.append((_get, path, byte_range[0], byte_range[1])) + else: + args.append((_get, path)) + return await concurrent_map(args, to_thread, limit=None) # TODO: fix limit + + async def set(self, key: str, value: BytesLike) -> None: + assert isinstance(key, str) + path = self.root / key + await to_thread(_put, path, value) + + async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: + args = [] + for key, start, value in key_start_values: + assert isinstance(key, str) + path = self.root / key + if start is not None: + args.append((_put, path, value, start)) + else: + args.append((_put, path, value)) + await concurrent_map(args, to_thread, limit=None) # TODO: fix limit + + async def delete(self, key: str) -> None: + path = self.root / key + if path.is_dir(): # TODO: support deleting directories? shutil.rmtree? + shutil.rmtree(path) + else: + await to_thread(path.unlink, True) # Q: we may want to raise if path is missing + + async def exists(self, key: str) -> bool: + path = self.root / key + return await to_thread(path.is_file) + + async def list(self) -> List[str]: + """Retrieve all keys in the store. + + Returns + ------- + list[str] + """ + # Q: do we want to return strings or Paths? + def _list(root: Path) -> List[str]: + files = [str(p) for p in root.rglob("") if p.is_file()] + return files + + return await to_thread(_list, self.root) + + async def list_prefix(self, prefix: str) -> List[str]: + """Retrieve all keys in the store with a given prefix. + + Parameters + ---------- + prefix : str + + Returns + ------- + list[str] + """ + + def _list_prefix(root: Path, prefix: str) -> List[str]: + files = [p for p in (root / prefix).rglob("*") if p.is_file()] + return files + + return await to_thread(_list_prefix, self.root, prefix) + + async def list_dir(self, prefix: str) -> List[str]: + """ + Retrieve all keys and prefixes with a given prefix and which do not contain the character + “/” after the given prefix. + + Parameters + ---------- + prefix : str + + Returns + ------- + list[str] + """ + + def _list_dir(root: Path, prefix: str) -> List[str]: + + base = root / prefix + to_strip = str(base) + "/" + try: + return [str(key).replace(to_strip, "") for key in base.iterdir()] + except (FileNotFoundError, NotADirectoryError): + return [] + + return await to_thread(_list_dir, self.root, prefix) diff --git a/src/zarr/v3/store/memory.py b/src/zarr/v3/store/memory.py new file mode 100644 index 0000000000..1370375851 --- /dev/null +++ b/src/zarr/v3/store/memory.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from typing import Optional, MutableMapping, List, Tuple + +from zarr.v3.common import BytesLike +from zarr.v3.abc.store import Store + + +# TODO: this store could easily be extended to wrap any MutuableMapping store from v2 +# When that is done, the `MemoryStore` will just be a store that wraps a dict. +class MemoryStore(Store): + supports_writes: bool = True + supports_partial_writes: bool = True + supports_listing: bool = True + + _store_dict: MutableMapping[str, bytes] + + def __init__(self, store_dict: Optional[MutableMapping[str, bytes]] = None): + self._store_dict = store_dict or {} + + def __str__(self) -> str: + return f"memory://{id(self._store_dict)}" + + def __repr__(self) -> str: + return f"MemoryStore({repr(str(self))})" + + async def get( + self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[BytesLike]: + assert isinstance(key, str) + try: + value = self._store_dict[key] + if byte_range is not None: + value = value[byte_range[0] : byte_range[1]] + return value + except KeyError: + return None + + async def get_partial_values( + self, key_ranges: List[Tuple[str, Tuple[int, int]]] + ) -> List[bytes]: + raise NotImplementedError + + async def exists(self, key: str) -> bool: + return key in self._store_dict + + async def set( + self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + ) -> None: + assert isinstance(key, str) + if not isinstance(value, (bytes, bytearray, memoryview)): + raise TypeError(f"expected BytesLike, got {type(value)}") + + if byte_range is not None: + buf = bytearray(self._store_dict[key]) + buf[byte_range[0] : byte_range[1]] = value + self._store_dict[key] = buf + else: + self._store_dict[key] = value + + async def delete(self, key: str) -> None: + try: + del self._store_dict[key] + except KeyError: + pass # Q(JH): why not raise? + + async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: + raise NotImplementedError + + async def list(self) -> List[str]: + return list(self._store_dict.keys()) + + async def list_prefix(self, prefix: str) -> List[str]: + return [key for key in self._store_dict if key.startswith(prefix)] + + async def list_dir(self, prefix: str) -> List[str]: + if prefix == "": + return list({key.split("/", maxsplit=1)[0] for key in self._store_dict}) + else: + return list( + { + key.strip(prefix + "/").split("/")[0] + for key in self._store_dict + if (key.startswith(prefix + "/") and key != prefix) + } + ) diff --git a/src/zarr/v3/store/remote.py b/src/zarr/v3/store/remote.py new file mode 100644 index 0000000000..0e6fc84e08 --- /dev/null +++ b/src/zarr/v3/store/remote.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union + +from zarr.v3.abc.store import Store +from zarr.v3.store.core import _dereference_path +from zarr.v3.common import BytesLike + + +if TYPE_CHECKING: + from upath import UPath + from fsspec.asyn import AsyncFileSystem + + +class RemoteStore(Store): + supports_writes: bool = True + supports_partial_writes: bool = False + supports_listing: bool = True + + root: UPath + + def __init__(self, url: Union[UPath, str], **storage_options: Dict[str, Any]): + from upath import UPath + import fsspec + + if isinstance(url, str): + self.root = UPath(url, **storage_options) + else: + assert len(storage_options) == 0, ( + "If constructed with a UPath object, no additional " + + "storage_options are allowed." + ) + self.root = url.rstrip("/") + # test instantiate file system + fs, _ = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) + assert fs.__class__.async_impl, "FileSystem needs to support async operations." + + def __str__(self) -> str: + return str(self.root) + + def __repr__(self) -> str: + return f"RemoteStore({repr(str(self))})" + + def _make_fs(self) -> Tuple[AsyncFileSystem, str]: + import fsspec + + storage_options = self.root._kwargs.copy() + storage_options.pop("_url", None) + fs, root = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) + assert fs.__class__.async_impl, "FileSystem needs to support async operations." + return fs, root + + async def get( + self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[BytesLike]: + assert isinstance(key, str) + fs, root = self._make_fs() + path = _dereference_path(root, key) + + try: + value = await ( + fs._cat_file(path, start=byte_range[0], end=byte_range[1]) + if byte_range + else fs._cat_file(path) + ) + except (FileNotFoundError, IsADirectoryError, NotADirectoryError): + return None + + return value + + async def set( + self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + ) -> None: + assert isinstance(key, str) + fs, root = self._make_fs() + path = _dereference_path(root, key) + + # write data + if byte_range: + with fs._open(path, "r+b") as f: + f.seek(byte_range[0]) + f.write(value) + else: + await fs._pipe_file(path, value) + + async def delete(self, key: str) -> None: + fs, root = self._make_fs() + path = _dereference_path(root, key) + if await fs._exists(path): + await fs._rm(path) + + async def exists(self, key: str) -> bool: + fs, root = self._make_fs() + path = _dereference_path(root, key) + return await fs._exists(path) diff --git a/src/zarr/v3/sync.py b/src/zarr/v3/sync.py index ef3a6e08c0..e88c8e93f2 100644 --- a/src/zarr/v3/sync.py +++ b/src/zarr/v3/sync.py @@ -4,6 +4,9 @@ import threading from typing import Any, Coroutine, List, Optional +from zarr.v3.config import SyncConfiguration + + # From https://github.com/fsspec/filesystem_spec/blob/master/fsspec/asyn.py iothread: List[Optional[threading.Thread]] = [None] # dedicated IO thread @@ -85,3 +88,20 @@ def _get_loop(): th.start() iothread[0] = th return loop[0] + + +class SyncMixin: + + _sync_configuration: SyncConfiguration + + def _sync(self, coroutine: Coroutine): # TODO: type this + # TODO: refactor this to to take *args and **kwargs and pass those to the method + # this should allow us to better type the sync wrapper + return sync(coroutine, loop=self._sync_configuration.asyncio_loop) + + def _sync_iter(self, func: Coroutine, *args, **kwargs) -> List[Any]: # TODO: type this + async def iter_to_list() -> List[Any]: + # TODO: replace with generators so we don't materialize the entire iterator at once + return [item async for item in func(*args, **kwargs)] + + return self._sync(iter_to_list) diff --git a/tests/test_codecs_v3.py b/tests/test_codecs_v3.py index 93acdb2ba1..2b18969874 100644 --- a/tests/test_codecs_v3.py +++ b/tests/test_codecs_v3.py @@ -13,7 +13,8 @@ from zarr.v3.indexing import morton_order_iter from zarr.v3.metadata import CodecMetadata, ShardingCodecIndexLocation, runtime_configuration -from zarr.v3.store import MemoryStore, Store +from zarr.v3.abc.store import Store +from zarr.v3.store import MemoryStore, StorePath @frozen @@ -38,7 +39,7 @@ async def set(self, value: np.ndarray): @pytest.fixture def store() -> Iterator[Store]: - yield MemoryStore() + yield StorePath(MemoryStore()) @pytest.fixture @@ -283,7 +284,7 @@ async def test_order( fill_value=1, ) z[:, :] = data - assert await store.get_async("order/0.0") == z._store["0.0"] + assert await (store / "order/0.0").get() == z._store["0.0"] @pytest.mark.parametrize("input_order", ["F", "C"]) @@ -395,7 +396,7 @@ async def test_transpose( fill_value=1, ) z[:, :] = data - assert await store.get_async("transpose/0.0") == await store.get_async("transpose_zarr/0.0") + assert await (store / "transpose/0.0").get() == await (store / "transpose_zarr/0.0").get() def test_transpose_invalid( @@ -606,7 +607,7 @@ async def test_delete_empty_chunks(store: Store): await _AsyncArrayProxy(a)[:16, :16].set(np.zeros((16, 16))) await _AsyncArrayProxy(a)[:16, :16].set(data) assert np.array_equal(await _AsyncArrayProxy(a)[:16, :16].get(), data) - assert await store.get_async("delete_empty_chunks/c0/0") is None + assert await (store / "delete_empty_chunks/c0/0").get() is None @pytest.mark.asyncio @@ -630,8 +631,8 @@ async def test_delete_empty_sharded_chunks(store: Store): data = np.ones((16, 16), dtype="uint16") data[:8, :8] = 0 assert np.array_equal(data, await _AsyncArrayProxy(a)[:, :].get()) - assert await store.get_async("delete_empty_sharded_chunks/c/1/0") is None - chunk_bytes = await store.get_async("delete_empty_sharded_chunks/c/0/0") + assert await (store / "delete_empty_sharded_chunks/c/1/0").get() is None + chunk_bytes = await (store / "delete_empty_sharded_chunks/c/0/0").get() assert chunk_bytes is not None and len(chunk_bytes) == 16 * 2 + 8 * 8 * 2 + 4 @@ -661,10 +662,10 @@ async def test_zarr_compat(store: Store): assert np.array_equal(data, await _AsyncArrayProxy(a)[:16, :18].get()) assert np.array_equal(data, z2[:16, :18]) - assert z2._store["0.0"] == await store.get_async("zarr_compat3/0.0") - assert z2._store["0.1"] == await store.get_async("zarr_compat3/0.1") - assert z2._store["1.0"] == await store.get_async("zarr_compat3/1.0") - assert z2._store["1.1"] == await store.get_async("zarr_compat3/1.1") + assert z2._store["0.0"] == await (store / "zarr_compat3/0.0").get() + assert z2._store["0.1"] == await (store / "zarr_compat3/0.1").get() + assert z2._store["1.0"] == await (store / "zarr_compat3/1.0").get() + assert z2._store["1.1"] == await (store / "zarr_compat3/1.1").get() @pytest.mark.asyncio @@ -695,10 +696,10 @@ async def test_zarr_compat_F(store: Store): assert np.array_equal(data, await _AsyncArrayProxy(a)[:16, :18].get()) assert np.array_equal(data, z2[:16, :18]) - assert z2._store["0.0"] == await store.get_async("zarr_compatF3/0.0") - assert z2._store["0.1"] == await store.get_async("zarr_compatF3/0.1") - assert z2._store["1.0"] == await store.get_async("zarr_compatF3/1.0") - assert z2._store["1.1"] == await store.get_async("zarr_compatF3/1.1") + assert z2._store["0.0"] == await (store / "zarr_compatF3/0.0").get() + assert z2._store["0.1"] == await (store / "zarr_compatF3/0.1").get() + assert z2._store["1.0"] == await (store / "zarr_compatF3/1.0").get() + assert z2._store["1.1"] == await (store / "zarr_compatF3/1.1").get() @pytest.mark.asyncio @@ -728,7 +729,7 @@ async def test_dimension_names(store: Store): ) assert (await AsyncArray.open(store / "dimension_names2")).metadata.dimension_names is None - zarr_json_bytes = await (store / "dimension_names2" / "zarr.json").get_async() + zarr_json_bytes = await (store / "dimension_names2" / "zarr.json").get() assert zarr_json_bytes is not None assert "dimension_names" not in json.loads(zarr_json_bytes) @@ -794,7 +795,7 @@ async def test_endian(store: Store, endian: Literal["big", "little"]): fill_value=1, ) z[:, :] = data - assert await store.get_async("endian/0.0") == z._store["0.0"] + assert await (store / "endian/0.0").get() == z._store["0.0"] @pytest.mark.parametrize("dtype_input_endian", [">u2", " None: + + agroup = AsyncGroup( + metadata=GroupMetadata(), + store_path=store_path, + runtime_configuration=RuntimeConfiguration(), + ) + group = Group(agroup) + + assert agroup.metadata is group.metadata + + # create two groups + foo = group.create_group("foo") + bar = foo.create_group("bar", attributes={"baz": "qux"}) + + # create an array from the "bar" group + data = np.arange(0, 4 * 4, dtype="uint16").reshape((4, 4)) + arr = bar.create_array( + "baz", shape=data.shape, dtype=data.dtype, chunk_shape=(2, 2), exists_ok=True + ) + arr[:] = data + + # check the array + assert arr == bar["baz"] + assert arr.shape == data.shape + assert arr.dtype == data.dtype + + # TODO: update this once the array api settles down + # assert arr.chunk_shape == (2, 2) + + bar2 = foo["bar"] + assert dict(bar2.attrs) == {"baz": "qux"} + + # update a group's attributes + bar2.attrs.update({"name": "bar"}) + # bar.attrs was modified in-place + assert dict(bar2.attrs) == {"baz": "qux", "name": "bar"} + + # and the attrs were modified in the store + bar3 = foo["bar"] + assert dict(bar3.attrs) == {"baz": "qux", "name": "bar"} diff --git a/tests/test_storage.py b/tests/test_storage.py index 5c1d437ecb..b06c6a209a 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -1,87 +1,96 @@ -import array +# import array import atexit -import json -import os -import pathlib -import sys + +# import json +# import os +# import pathlib +# import sys import pickle -import shutil + +# import shutil import tempfile -from contextlib import contextmanager -from pickle import PicklingError -from zipfile import ZipFile -import numpy as np +# from contextlib import contextmanager +# from pickle import PicklingError +# from zipfile import ZipFile + +# import numpy as np import pytest -from numpy.testing import assert_array_almost_equal, assert_array_equal - -from numcodecs.compat import ensure_bytes - -import zarr -from zarr._storage.store import _get_hierarchy_metadata -from zarr.codecs import BZ2, AsType, Blosc, Zlib -from zarr.context import Context -from zarr.convenience import consolidate_metadata -from zarr.errors import ContainsArrayError, ContainsGroupError, MetadataError -from zarr.hierarchy import group -from zarr.meta import ZARR_FORMAT, decode_array_metadata -from zarr.n5 import N5Store, N5FSStore, N5_FORMAT, n5_attrs_key + +# from numpy.testing import assert_array_almost_equal, assert_array_equal + +# from numcodecs.compat import ensure_bytes + +# import zarr +# from zarr._storage.store import _get_hierarchy_metadata +# from zarr.codecs import BZ2, AsType, Blosc, Zlib +# from zarr.context import Context +# from zarr.convenience import consolidate_metadata +# from zarr.errors import ContainsArrayError, ContainsGroupError, MetadataError +# from zarr.hierarchy import group +# from zarr.meta import ZARR_FORMAT, decode_array_metadata + +# from zarr.n5 import N5Store, N5FSStore, N5_FORMAT, n5_attrs_key from zarr.storage import ( - ABSStore, - ConsolidatedMetadataStore, - DBMStore, - DictStore, - DirectoryStore, - KVStore, - LMDBStore, - LRUStoreCache, - MemoryStore, - MongoDBStore, - NestedDirectoryStore, - RedisStore, - SQLiteStore, - Store, - TempStore, - ZipStore, - array_meta_key, - atexit_rmglob, + # ABSStore, + # ConsolidatedMetadataStore, + # DBMStore, + # DictStore, + # DirectoryStore, + # KVStore, + # LMDBStore, + # LRUStoreCache, + # MemoryStore, + # MongoDBStore, + # NestedDirectoryStore, + # RedisStore, + # SQLiteStore, + # Store, + # TempStore, + # ZipStore, + # array_meta_key, + # atexit_rmglob, atexit_rmtree, - attrs_key, - data_root, - default_compressor, - getsize, - group_meta_key, - init_array, - init_group, - migrate_1to2, - meta_root, - normalize_store_arg, + # attrs_key, + # data_root, + # default_compressor, + # getsize, + # group_meta_key, + # init_array, + # init_group, + # migrate_1to2, + # meta_root, + # normalize_store_arg, ) -from zarr.storage import FSStore, rename, listdir -from zarr._storage.v3 import KVStoreV3 -from .util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp -from zarr.util import ConstantMap, json_dumps +# from zarr.storage import FSStore, rename, listdir +# from zarr._storage.v3 import KVStoreV3 +# from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp +# from zarr.util import ConstantMap, json_dumps -@contextmanager -def does_not_raise(): - yield +from zarr.v3.abc.store import Store +from zarr.v3.store import MemoryStore as KVStore, LocalStore -@pytest.fixture( - params=[ - (None, "."), - (".", "."), - ("/", "/"), - ] -) -def dimension_separator_fixture(request): - return request.param +# @contextmanager +# def does_not_raise(): +# yield + +# @pytest.fixture( +# params=[ +# (None, "."), +# (".", "."), +# ("/", "/"), +# ] +# ) +# def dimension_separator_fixture(request): +# return request.param -def skip_if_nested_chunks(**kwargs): - if kwargs.get("dimension_separator") == "/": - pytest.skip("nested chunks are unsupported") + +# def skip_if_nested_chunks(**kwargs): +# if kwargs.get("dimension_separator") == "/": +# pytest.skip("nested chunks are unsupported") def test_kvstore_repr(): @@ -92,2536 +101,2536 @@ def test_ensure_store(): class InvalidStore: pass - with pytest.raises(ValueError): - Store._ensure_store(InvalidStore()) + assert not isinstance(InvalidStore(), Store) + + # with pytest.raises(ValueError): + # Store._ensure_store(InvalidStore()) - # cannot initialize with a store from a different Zarr version - with pytest.raises(ValueError): - Store._ensure_store(KVStoreV3(dict())) + # # cannot initialize with a store from a different Zarr version + # with pytest.raises(ValueError): + # Store._ensure_store(KVStoreV3(dict())) - # cannot initialize without a store - with pytest.raises(ValueError): - Store._ensure_store(None) + # # cannot initialize without a store + # with pytest.raises(ValueError): + # Store._ensure_store(None) def test_capabilities(): s = KVStore(dict()) - assert s.is_readable() - assert s.is_listable() - assert s.is_erasable() - assert s.is_writeable() + # assert s.is_readable() # Q(JH): do we like these flags more? + # assert s.is_listable() + # assert s.is_erasable() + # assert s.is_writeable() + assert s.supports_writes + assert s.supports_partial_writes + assert s.supports_listing -def test_getsize_non_implemented(): - assert getsize(object()) == -1 +# def test_getsize_non_implemented(): +# assert getsize(object()) == -1 -def test_kvstore_eq(): - assert KVStore(dict()) != dict() +# def test_kvstore_eq(): +# assert KVStore(dict()) != dict() -def test_coverage_rename(): - store = dict() - store["a"] = 1 - rename(store, "a", "b") +# def test_coverage_rename(): +# store = dict() +# store["a"] = 1 +# rename(store, "a", "b") -def test_deprecated_listdir_nosotre(): - store = dict() - with pytest.warns(UserWarning, match="has no `listdir`"): - listdir(store) + +# def test_deprecated_listdir_nosotre(): +# store = dict() +# with pytest.warns(UserWarning, match="has no `listdir`"): +# listdir(store) class StoreTests: """Abstract store tests.""" - version = 2 + # version = 2 root = "" def create_store(self, **kwargs): # pragma: no cover # implement in sub-class raise NotImplementedError - def test_context_manager(self): - with self.create_store(): - pass + # def test_context_manager(self): + # with self.create_store(): + # pass - def test_get_set_del_contains(self): + @pytest.mark.asyncio + async def test_get_set_del_contains(self): store = self.create_store() - # test __contains__, __getitem__, __setitem__ + # test exists, get, set key = self.root + "foo" - assert key not in store - with pytest.raises(KeyError): - # noinspection PyStatementEffect - store[key] - store[key] = b"bar" - assert key in store - assert b"bar" == ensure_bytes(store[key]) - - # test __delitem__ (optional) + assert not await store.exists(key) + assert await store.get(key) is None + await store.set(key, b"bar") + assert await store.exists(key) + assert b"bar" == await store.get(key) + + # test delete (optional) try: - del store[key] + await store.delete(key) except NotImplementedError: pass else: - assert key not in store - with pytest.raises(KeyError): - # noinspection PyStatementEffect - store[key] - with pytest.raises(KeyError): - # noinspection PyStatementEffect - del store[key] - - store.close() - - def test_set_invalid_content(self): - store = self.create_store() - - with pytest.raises(TypeError): - store[self.root + "baz"] = list(range(5)) - - store.close() - - def test_clear(self): - store = self.create_store() - store[self.root + "foo"] = b"bar" - store[self.root + "baz"] = b"qux" - assert len(store) == 2 - store.clear() - assert len(store) == 0 - assert self.root + "foo" not in store - assert self.root + "baz" not in store + assert not await store.exists(key) + assert await store.get(key) is None - store.close() + assert await store.delete(key) is None - def test_pop(self): - store = self.create_store() - store[self.root + "foo"] = b"bar" - store[self.root + "baz"] = b"qux" - assert len(store) == 2 - v = store.pop(self.root + "foo") - assert ensure_bytes(v) == b"bar" - assert len(store) == 1 - v = store.pop(self.root + "baz") - assert ensure_bytes(v) == b"qux" - assert len(store) == 0 - with pytest.raises(KeyError): - store.pop(self.root + "xxx") - v = store.pop(self.root + "xxx", b"default") - assert v == b"default" - v = store.pop(self.root + "xxx", b"") - assert v == b"" - v = store.pop(self.root + "xxx", None) - assert v is None - - store.close() - - def test_popitem(self): - store = self.create_store() - store[self.root + "foo"] = b"bar" - k, v = store.popitem() - assert k == self.root + "foo" - assert ensure_bytes(v) == b"bar" - assert len(store) == 0 - with pytest.raises(KeyError): - store.popitem() - - store.close() + # store.close() - def test_writeable_values(self): + @pytest.mark.asyncio + async def test_set_invalid_content(self): store = self.create_store() - # __setitem__ should accept any value that implements buffer interface - store[self.root + "foo1"] = b"bar" - store[self.root + "foo2"] = bytearray(b"bar") - store[self.root + "foo3"] = array.array("B", b"bar") - store[self.root + "foo4"] = np.frombuffer(b"bar", dtype="u1") - - store.close() - - def test_update(self): - store = self.create_store() - assert self.root + "foo" not in store - assert self.root + "baz" not in store - - if self.version == 2: - store.update(foo=b"bar", baz=b"quux") - else: - kv = {self.root + "foo": b"bar", self.root + "baz": b"quux"} - store.update(kv) - - assert b"bar" == ensure_bytes(store[self.root + "foo"]) - assert b"quux" == ensure_bytes(store[self.root + "baz"]) - - store.close() - - def test_iterators(self): + with pytest.raises(TypeError): + await store.set(self.root + "baz", list(range(5))) + + # store.close() + + # def test_clear(self): + # store = self.create_store() + # store[self.root + "foo"] = b"bar" + # store[self.root + "baz"] = b"qux" + # assert len(store) == 2 + # store.clear() + # assert len(store) == 0 + # assert self.root + "foo" not in store + # assert self.root + "baz" not in store + + # store.close() + + # def test_pop(self): + # store = self.create_store() + # store[self.root + "foo"] = b"bar" + # store[self.root + "baz"] = b"qux" + # assert len(store) == 2 + # v = store.pop(self.root + "foo") + # assert ensure_bytes(v) == b"bar" + # assert len(store) == 1 + # v = store.pop(self.root + "baz") + # assert ensure_bytes(v) == b"qux" + # assert len(store) == 0 + # with pytest.raises(KeyError): + # store.pop(self.root + "xxx") + # v = store.pop(self.root + "xxx", b"default") + # assert v == b"default" + # v = store.pop(self.root + "xxx", b"") + # assert v == b"" + # v = store.pop(self.root + "xxx", None) + # assert v is None + + # store.close() + + # def test_popitem(self): + # store = self.create_store() + # store[self.root + "foo"] = b"bar" + # k, v = store.popitem() + # assert k == self.root + "foo" + # assert ensure_bytes(v) == b"bar" + # assert len(store) == 0 + # with pytest.raises(KeyError): + # store.popitem() + + # store.close() + + @pytest.mark.asyncio + async def test_writeable_values(self): store = self.create_store() - # test iterator methods on empty store - assert 0 == len(store) - assert set() == set(store) - assert set() == set(store.keys()) - assert set() == set(store.values()) - assert set() == set(store.items()) - - # setup some values - store[self.root + "a"] = b"aaa" - store[self.root + "b"] = b"bbb" - store[self.root + "c/d"] = b"ddd" - store[self.root + "c/e/f"] = b"fff" - - # test iterators on store with data - assert 4 == len(store) - expected = set(self.root + k for k in ["a", "b", "c/d", "c/e/f"]) - assert expected == set(store) - assert expected == set(store.keys()) - assert {b"aaa", b"bbb", b"ddd", b"fff"} == set(map(ensure_bytes, store.values())) - assert { - (self.root + "a", b"aaa"), - (self.root + "b", b"bbb"), - (self.root + "c/d", b"ddd"), - (self.root + "c/e/f", b"fff"), - } == set(map(lambda kv: (kv[0], ensure_bytes(kv[1])), store.items())) - - store.close() - - def test_pickle(self): + # set should accept any value that implements buffer interface + await store.set(self.root + "foo1", b"bar") + await store.set(self.root + "foo2", bytearray(b"bar")) + # TODO(v3): revisit passing numpy arrays directly to the store + # await store.set(self.root + "foo3", array.array("B", b"bar")) + # await store.set(self.root + "foo4", np.frombuffer(b"bar", dtype="u1")) + + # store.close() + + # def test_update(self): + # store = self.create_store() + # assert self.root + "foo" not in store + # assert self.root + "baz" not in store + + # if self.version == 2: + # store.update(foo=b"bar", baz=b"quux") + # else: + # kv = {self.root + "foo": b"bar", self.root + "baz": b"quux"} + # store.update(kv) + + # assert b"bar" == ensure_bytes(store[self.root + "foo"]) + # assert b"quux" == ensure_bytes(store[self.root + "baz"]) + + # store.close() + + # def test_iterators(self): + # store = self.create_store() + + # # test iterator methods on empty store + # assert 0 == len(store) + # assert set() == set(store) + # assert set() == set(store.keys()) + # assert set() == set(store.values()) + # assert set() == set(store.items()) + + # # setup some values + # store[self.root + "a"] = b"aaa" + # store[self.root + "b"] = b"bbb" + # store[self.root + "c/d"] = b"ddd" + # store[self.root + "c/e/f"] = b"fff" + + # # test iterators on store with data + # assert 4 == len(store) + # expected = set(self.root + k for k in ["a", "b", "c/d", "c/e/f"]) + # assert expected == set(store) + # assert expected == set(store.keys()) + # assert {b"aaa", b"bbb", b"ddd", b"fff"} == set(map(ensure_bytes, store.values())) + # assert { + # (self.root + "a", b"aaa"), + # (self.root + "b", b"bbb"), + # (self.root + "c/d", b"ddd"), + # (self.root + "c/e/f", b"fff"), + # } == set(map(lambda kv: (kv[0], ensure_bytes(kv[1])), store.items())) + + # store.close() + + @pytest.mark.asyncio + async def test_pickle(self): # setup store store = self.create_store() - store[self.root + "foo"] = b"bar" - store[self.root + "baz"] = b"quux" - n = len(store) - keys = sorted(store.keys()) + await store.set(self.root + "foo", b"bar") + await store.set(self.root + "baz", b"quux") + # n = len(store) + keys = sorted(await store.list()) # round-trip through pickle dump = pickle.dumps(store) - # some stores cannot be opened twice at the same time, need to close - # store before can round-trip through pickle - store.close() + # # some stores cannot be opened twice at the same time, need to close + # # store before can round-trip through pickle + # store.close() # check can still pickle after close assert dump == pickle.dumps(store) store2 = pickle.loads(dump) # verify - assert n == len(store2) - assert keys == sorted(store2.keys()) - assert b"bar" == ensure_bytes(store2[self.root + "foo"]) - assert b"quux" == ensure_bytes(store2[self.root + "baz"]) - - store2.close() - - def test_getsize(self): - store = self.create_store() - if isinstance(store, dict) or hasattr(store, "getsize"): - assert 0 == getsize(store) - store["foo"] = b"x" - assert 1 == getsize(store) - assert 1 == getsize(store, "foo") - store["bar"] = b"yy" - assert 3 == getsize(store) - assert 2 == getsize(store, "bar") - store["baz"] = bytearray(b"zzz") - assert 6 == getsize(store) - assert 3 == getsize(store, "baz") - store["quux"] = array.array("B", b"zzzz") - assert 10 == getsize(store) - assert 4 == getsize(store, "quux") - store["spong"] = np.frombuffer(b"zzzzz", dtype="u1") - assert 15 == getsize(store) - assert 5 == getsize(store, "spong") - - store.close() - - # noinspection PyStatementEffect - def test_hierarchy(self): + # assert n == len(store2) + assert keys == sorted(await store2.list()) + assert await store2.get(self.root + "foo") == b"bar" + assert await store2.get(self.root + "baz") == b"quux" + + # store2.close() + + # def test_getsize(self): + # store = self.create_store() + # if isinstance(store, dict) or hasattr(store, "getsize"): + # assert 0 == getsize(store) + # store["foo"] = b"x" + # assert 1 == getsize(store) + # assert 1 == getsize(store, "foo") + # store["bar"] = b"yy" + # assert 3 == getsize(store) + # assert 2 == getsize(store, "bar") + # store["baz"] = bytearray(b"zzz") + # assert 6 == getsize(store) + # assert 3 == getsize(store, "baz") + # store["quux"] = array.array("B", b"zzzz") + # assert 10 == getsize(store) + # assert 4 == getsize(store, "quux") + # store["spong"] = np.frombuffer(b"zzzzz", dtype="u1") + # assert 15 == getsize(store) + # assert 5 == getsize(store, "spong") + + # store.close() + + # # noinspection PyStatementEffect + @pytest.mark.asyncio + async def test_hierarchy(self): # setup store = self.create_store() - store[self.root + "a"] = b"aaa" - store[self.root + "b"] = b"bbb" - store[self.root + "c/d"] = b"ddd" - store[self.root + "c/e/f"] = b"fff" - store[self.root + "c/e/g"] = b"ggg" + await store.set(self.root + "a", b"aaa") + await store.set(self.root + "b", b"bbb") + await store.set(self.root + "c/d", b"ddd") + await store.set(self.root + "c/e/f", b"fff") + await store.set(self.root + "c/e/g", b"ggg") # check keys - assert self.root + "a" in store - assert self.root + "b" in store - assert self.root + "c/d" in store - assert self.root + "c/e/f" in store - assert self.root + "c/e/g" in store - assert self.root + "c" not in store - assert self.root + "c/" not in store - assert self.root + "c/e" not in store - assert self.root + "c/e/" not in store - assert self.root + "c/d/x" not in store - - # check __getitem__ - with pytest.raises(KeyError): - store[self.root + "c"] - with pytest.raises(KeyError): - store[self.root + "c/e"] - with pytest.raises(KeyError): - store[self.root + "c/d/x"] - - # test getsize (optional) - if hasattr(store, "getsize"): - # TODO: proper behavior of getsize? - # v3 returns size of all nested arrays, not just the - # size of the arrays in the current folder. - if self.version == 2: - assert 6 == store.getsize() - else: - assert 15 == store.getsize() - assert 3 == store.getsize("a") - assert 3 == store.getsize("b") - if self.version == 2: - assert 3 == store.getsize("c") - else: - assert 9 == store.getsize("c") - assert 3 == store.getsize("c/d") - assert 6 == store.getsize("c/e") - assert 3 == store.getsize("c/e/f") - assert 3 == store.getsize("c/e/g") - # non-existent paths - assert 0 == store.getsize("x") - assert 0 == store.getsize("a/x") - assert 0 == store.getsize("c/x") - assert 0 == store.getsize("c/x/y") - assert 0 == store.getsize("c/d/y") - assert 0 == store.getsize("c/d/y/z") - - # access item via full path - assert 3 == store.getsize(self.root + "a") - - # test listdir (optional) - if hasattr(store, "listdir"): - assert {"a", "b", "c"} == set(store.listdir(self.root)) - assert {"d", "e"} == set(store.listdir(self.root + "c")) - assert {"f", "g"} == set(store.listdir(self.root + "c/e")) + assert await store.exists(self.root + "a") + assert await store.exists(self.root + "b") + assert await store.exists(self.root + "c/d") + assert await store.exists(self.root + "c/e/f") + assert await store.exists(self.root + "c/e/g") + assert not await store.exists(self.root + "c") + assert not await store.exists(self.root + "c/") + assert not await store.exists(self.root + "c/e") + assert not await store.exists(self.root + "c/e/") + assert not await store.exists(self.root + "c/d/x") + + # check get + # with pytest.raises(KeyError): + # store[self.root + "c"] + assert await store.get(self.root + "c") is None + + # with pytest.raises(KeyError): + # store[self.root + "c/e"] + assert await store.get(self.root + "c/e") is None + # with pytest.raises(KeyError): + # store[self.root + "c/d/x"] + assert await store.get(self.root + "c/d/x") is None + + # # test getsize (optional) + # if hasattr(store, "getsize"): + # # TODO: proper behavior of getsize? + # # v3 returns size of all nested arrays, not just the + # # size of the arrays in the current folder. + # if self.version == 2: + # assert 6 == store.getsize() + # else: + # assert 15 == store.getsize() + # assert 3 == store.getsize("a") + # assert 3 == store.getsize("b") + # if self.version == 2: + # assert 3 == store.getsize("c") + # else: + # assert 9 == store.getsize("c") + # assert 3 == store.getsize("c/d") + # assert 6 == store.getsize("c/e") + # assert 3 == store.getsize("c/e/f") + # assert 3 == store.getsize("c/e/g") + # # non-existent paths + # assert 0 == store.getsize("x") + # assert 0 == store.getsize("a/x") + # assert 0 == store.getsize("c/x") + # assert 0 == store.getsize("c/x/y") + # assert 0 == store.getsize("c/d/y") + # assert 0 == store.getsize("c/d/y/z") + + # # access item via full path + # assert 3 == store.getsize(self.root + "a") + + # test list_dir (optional) + if store.supports_listing: + assert set(await store.list_dir(self.root)) == {"a", "b", "c"} + assert set(await store.list_dir(self.root + "c")) == {"d", "e"} + assert set(await store.list_dir(self.root + "c/e")) == {"f", "g"} # no exception raised if path does not exist or is leaf - assert [] == store.listdir(self.root + "x") - assert [] == store.listdir(self.root + "a/x") - assert [] == store.listdir(self.root + "c/x") - assert [] == store.listdir(self.root + "c/x/y") - assert [] == store.listdir(self.root + "c/d/y") - assert [] == store.listdir(self.root + "c/d/y/z") - assert [] == store.listdir(self.root + "c/e/f") - - # test rename (optional) - if store.is_erasable(): - store.rename("c/e", "c/e2") - assert self.root + "c/d" in store - assert self.root + "c/e" not in store - assert self.root + "c/e/f" not in store - assert self.root + "c/e/g" not in store - assert self.root + "c/e2" not in store - assert self.root + "c/e2/f" in store - assert self.root + "c/e2/g" in store - store.rename("c/e2", "c/e") - assert self.root + "c/d" in store - assert self.root + "c/e2" not in store - assert self.root + "c/e2/f" not in store - assert self.root + "c/e2/g" not in store - assert self.root + "c/e" not in store - assert self.root + "c/e/f" in store - assert self.root + "c/e/g" in store - store.rename("c", "c1/c2/c3") - assert self.root + "a" in store - assert self.root + "c" not in store - assert self.root + "c/d" not in store - assert self.root + "c/e" not in store - assert self.root + "c/e/f" not in store - assert self.root + "c/e/g" not in store - assert self.root + "c1" not in store - assert self.root + "c1/c2" not in store - assert self.root + "c1/c2/c3" not in store - assert self.root + "c1/c2/c3/d" in store - assert self.root + "c1/c2/c3/e" not in store - assert self.root + "c1/c2/c3/e/f" in store - assert self.root + "c1/c2/c3/e/g" in store - store.rename("c1/c2/c3", "c") - assert self.root + "c" not in store - assert self.root + "c/d" in store - assert self.root + "c/e" not in store - assert self.root + "c/e/f" in store - assert self.root + "c/e/g" in store - assert self.root + "c1" not in store - assert self.root + "c1/c2" not in store - assert self.root + "c1/c2/c3" not in store - assert self.root + "c1/c2/c3/d" not in store - assert self.root + "c1/c2/c3/e" not in store - assert self.root + "c1/c2/c3/e/f" not in store - assert self.root + "c1/c2/c3/e/g" not in store - - # test rmdir (optional) - store.rmdir("c/e") - assert self.root + "c/d" in store - assert self.root + "c/e/f" not in store - assert self.root + "c/e/g" not in store - store.rmdir("c") - assert self.root + "c/d" not in store - store.rmdir() - assert self.root + "a" not in store - assert self.root + "b" not in store - store[self.root + "a"] = b"aaa" - store[self.root + "c/d"] = b"ddd" - store[self.root + "c/e/f"] = b"fff" - # no exceptions raised if path does not exist or is leaf - store.rmdir("x") - store.rmdir("a/x") - store.rmdir("c/x") - store.rmdir("c/x/y") - store.rmdir("c/d/y") - store.rmdir("c/d/y/z") - store.rmdir("c/e/f") - assert self.root + "a" in store - assert self.root + "c/d" in store - assert self.root + "c/e/f" in store - - store.close() - - def test_init_array(self, dimension_separator_fixture): - - pass_dim_sep, want_dim_sep = dimension_separator_fixture - - store = self.create_store(dimension_separator=pass_dim_sep) - init_array(store, shape=1000, chunks=100) - - # check metadata - assert array_meta_key in store - meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - assert (1000,) == meta["shape"] - assert (100,) == meta["chunks"] - assert np.dtype(None) == meta["dtype"] - assert default_compressor.get_config() == meta["compressor"] - assert meta["fill_value"] is None - # Missing MUST be assumed to be "." - assert meta.get("dimension_separator", ".") is want_dim_sep - - store.close() - - def test_init_array_overwrite(self): - self._test_init_array_overwrite("F") - - def test_init_array_overwrite_path(self): - self._test_init_array_overwrite_path("F") - - def test_init_array_overwrite_chunk_store(self): - self._test_init_array_overwrite_chunk_store("F") - - def test_init_group_overwrite(self): - self._test_init_group_overwrite("F") - - def test_init_group_overwrite_path(self): - self._test_init_group_overwrite_path("F") - - def test_init_group_overwrite_chunk_store(self): - self._test_init_group_overwrite_chunk_store("F") - - def _test_init_array_overwrite(self, order): - # setup - store = self.create_store() - if self.version == 2: - path = None - mkey = array_meta_key - meta = dict( - shape=(2000,), - chunks=(200,), - dtype=np.dtype("u1"), - compressor=Zlib(1).get_config(), - fill_value=0, - order=order, - filters=None, - ) - else: - path = "arr1" # no default, have to specify for v3 - mkey = meta_root + path + ".array.json" - meta = dict( - shape=(2000,), - chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), - data_type=np.dtype("u1"), - compressor=Zlib(1), - fill_value=0, - chunk_memory_layout=order, - filters=None, - ) - store[mkey] = store._metadata_class.encode_array_metadata(meta) - - # don't overwrite (default) - with pytest.raises(ContainsArrayError): - init_array(store, shape=1000, chunks=100, path=path) - - # do overwrite - try: - init_array(store, shape=1000, chunks=100, dtype="i4", overwrite=True, path=path) - except NotImplementedError: - pass - else: - assert mkey in store - meta = store._metadata_class.decode_array_metadata(store[mkey]) - if self.version == 2: - assert ZARR_FORMAT == meta["zarr_format"] - assert (100,) == meta["chunks"] - assert np.dtype("i4") == meta["dtype"] - else: - assert (100,) == meta["chunk_grid"]["chunk_shape"] - assert np.dtype("i4") == meta["data_type"] - assert (1000,) == meta["shape"] - - store.close() - - def test_init_array_path(self): - path = "foo/bar" - store = self.create_store() - init_array(store, shape=1000, chunks=100, path=path) - - # check metadata - if self.version == 2: - mkey = path + "/" + array_meta_key - else: - mkey = meta_root + path + ".array.json" - assert mkey in store - meta = store._metadata_class.decode_array_metadata(store[mkey]) - if self.version == 2: - assert ZARR_FORMAT == meta["zarr_format"] - assert (100,) == meta["chunks"] - assert np.dtype(None) == meta["dtype"] - assert default_compressor.get_config() == meta["compressor"] - else: - assert (100,) == meta["chunk_grid"]["chunk_shape"] - assert np.dtype(None) == meta["data_type"] - assert default_compressor == meta["compressor"] - assert (1000,) == meta["shape"] - assert meta["fill_value"] is None - - store.close() - - def _test_init_array_overwrite_path(self, order): - # setup - path = "foo/bar" - store = self.create_store() - if self.version == 2: - mkey = path + "/" + array_meta_key - meta = dict( - shape=(2000,), - chunks=(200,), - dtype=np.dtype("u1"), - compressor=Zlib(1).get_config(), - fill_value=0, - order=order, - filters=None, - ) - else: - mkey = meta_root + path + ".array.json" - meta = dict( - shape=(2000,), - chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), - data_type=np.dtype("u1"), - compressor=Zlib(1), - fill_value=0, - chunk_memory_layout=order, - filters=None, - ) - store[mkey] = store._metadata_class.encode_array_metadata(meta) - - # don't overwrite - with pytest.raises(ContainsArrayError): - init_array(store, shape=1000, chunks=100, path=path) - - # do overwrite - try: - init_array(store, shape=1000, chunks=100, dtype="i4", path=path, overwrite=True) - except NotImplementedError: - pass - else: - if self.version == 2: - assert group_meta_key in store - assert array_meta_key not in store - assert mkey in store - # should have been overwritten - meta = store._metadata_class.decode_array_metadata(store[mkey]) - if self.version == 2: - assert ZARR_FORMAT == meta["zarr_format"] - assert (100,) == meta["chunks"] - assert np.dtype("i4") == meta["dtype"] - else: - assert (100,) == meta["chunk_grid"]["chunk_shape"] - assert np.dtype("i4") == meta["data_type"] - assert (1000,) == meta["shape"] - - store.close() - - def test_init_array_overwrite_group(self): - # setup - path = "foo/bar" - store = self.create_store() - if self.version == 2: - array_key = path + "/" + array_meta_key - group_key = path + "/" + group_meta_key - else: - array_key = meta_root + path + ".array.json" - group_key = meta_root + path + ".group.json" - store[group_key] = store._metadata_class.encode_group_metadata() - - # don't overwrite - with pytest.raises(ContainsGroupError): - init_array(store, shape=1000, chunks=100, path=path) - - # do overwrite - try: - init_array(store, shape=1000, chunks=100, dtype="i4", path=path, overwrite=True) - except NotImplementedError: - pass - else: - assert group_key not in store - assert array_key in store - meta = store._metadata_class.decode_array_metadata(store[array_key]) - if self.version == 2: - assert ZARR_FORMAT == meta["zarr_format"] - assert (100,) == meta["chunks"] - assert np.dtype("i4") == meta["dtype"] - else: - assert (100,) == meta["chunk_grid"]["chunk_shape"] - assert np.dtype("i4") == meta["data_type"] - assert (1000,) == meta["shape"] - - store.close() - - def _test_init_array_overwrite_chunk_store(self, order): - # setup - store = self.create_store() - chunk_store = self.create_store() - - if self.version == 2: - path = None - data_path = "" - mkey = array_meta_key - meta = dict( - shape=(2000,), - chunks=(200,), - dtype=np.dtype("u1"), - compressor=None, - fill_value=0, - filters=None, - order=order, - ) - else: - path = "arr1" - data_path = data_root + "arr1/" - mkey = meta_root + path + ".array.json" - meta = dict( - shape=(2000,), - chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), - data_type=np.dtype("u1"), - compressor=None, - fill_value=0, - filters=None, - chunk_memory_layout=order, - ) - - store[mkey] = store._metadata_class.encode_array_metadata(meta) - - chunk_store[data_path + "0"] = b"aaa" - chunk_store[data_path + "1"] = b"bbb" - - # don't overwrite (default) - with pytest.raises(ContainsArrayError): - init_array(store, path=path, shape=1000, chunks=100, chunk_store=chunk_store) - - # do overwrite - try: - init_array( - store, - path=path, - shape=1000, - chunks=100, - dtype="i4", - overwrite=True, - chunk_store=chunk_store, - ) - except NotImplementedError: - pass - else: - assert mkey in store - meta = store._metadata_class.decode_array_metadata(store[mkey]) - if self.version == 2: - assert ZARR_FORMAT == meta["zarr_format"] - assert (100,) == meta["chunks"] - assert np.dtype("i4") == meta["dtype"] - else: - assert (100,) == meta["chunk_grid"]["chunk_shape"] - assert np.dtype("i4") == meta["data_type"] - assert (1000,) == meta["shape"] - assert data_path + "0" not in chunk_store - assert data_path + "1" not in chunk_store - - store.close() - chunk_store.close() - - def test_init_array_compat(self): - store = self.create_store() - if self.version == 2: - path = None - mkey = array_meta_key - else: - path = "arr1" - mkey = meta_root + path + ".array.json" - init_array(store, path=path, shape=1000, chunks=100, compressor="none") - meta = store._metadata_class.decode_array_metadata(store[mkey]) - if self.version == 2: - assert meta["compressor"] is None - else: - assert "compressor" not in meta - store.close() - - def test_init_group(self): - store = self.create_store() - if self.version == 2: - path = None - mkey = group_meta_key - else: - path = "foo" - mkey = meta_root + path + ".group.json" - init_group(store, path=path) - - # check metadata - assert mkey in store - meta = store._metadata_class.decode_group_metadata(store[mkey]) - if self.version == 2: - assert ZARR_FORMAT == meta["zarr_format"] - else: - assert meta == {"attributes": {}} - - store.close() - - def _test_init_group_overwrite(self, order): - if self.version == 3: - pytest.skip("In v3 array and group names cannot overlap") - # setup - store = self.create_store() - store[array_meta_key] = store._metadata_class.encode_array_metadata( - dict( - shape=(2000,), - chunks=(200,), - dtype=np.dtype("u1"), - compressor=None, - fill_value=0, - order=order, - filters=None, - ) - ) - - # don't overwrite array (default) - with pytest.raises(ContainsArrayError): - init_group(store) - - # do overwrite - try: - init_group(store, overwrite=True) - except NotImplementedError: - pass - else: - assert array_meta_key not in store - assert group_meta_key in store - meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - - # don't overwrite group - with pytest.raises(ValueError): - init_group(store) - - store.close() - - def _test_init_group_overwrite_path(self, order): - # setup - path = "foo/bar" - store = self.create_store() - if self.version == 2: - meta = dict( - shape=(2000,), - chunks=(200,), - dtype=np.dtype("u1"), - compressor=None, - fill_value=0, - order=order, - filters=None, - ) - array_key = path + "/" + array_meta_key - group_key = path + "/" + group_meta_key - else: - meta = dict( - shape=(2000,), - chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), - data_type=np.dtype("u1"), - compressor=None, - fill_value=0, - filters=None, - chunk_memory_layout=order, - ) - array_key = meta_root + path + ".array.json" - group_key = meta_root + path + ".group.json" - store[array_key] = store._metadata_class.encode_array_metadata(meta) - - # don't overwrite - with pytest.raises(ValueError): - init_group(store, path=path) - - # do overwrite - try: - init_group(store, overwrite=True, path=path) - except NotImplementedError: - pass - else: - if self.version == 2: - assert array_meta_key not in store - assert group_meta_key in store - assert array_key not in store - assert group_key in store - # should have been overwritten - meta = store._metadata_class.decode_group_metadata(store[group_key]) - if self.version == 2: - assert ZARR_FORMAT == meta["zarr_format"] - else: - assert meta == {"attributes": {}} - - store.close() - - def _test_init_group_overwrite_chunk_store(self, order): - if self.version == 3: - pytest.skip("In v3 array and group names cannot overlap") - # setup - store = self.create_store() - chunk_store = self.create_store() - store[array_meta_key] = store._metadata_class.encode_array_metadata( - dict( - shape=(2000,), - chunks=(200,), - dtype=np.dtype("u1"), - compressor=None, - fill_value=0, - filters=None, - order=order, - ) - ) - chunk_store["foo"] = b"bar" - chunk_store["baz"] = b"quux" - - # don't overwrite array (default) - with pytest.raises(ValueError): - init_group(store, chunk_store=chunk_store) - - # do overwrite - try: - init_group(store, overwrite=True, chunk_store=chunk_store) - except NotImplementedError: - pass - else: - assert array_meta_key not in store - assert group_meta_key in store - meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - assert "foo" not in chunk_store - assert "baz" not in chunk_store - - # don't overwrite group - with pytest.raises(ValueError): - init_group(store) - - store.close() - chunk_store.close() + assert await store.list_dir(self.root + "x") == [] + assert await store.list_dir(self.root + "a/x") == [] + assert await store.list_dir(self.root + "c/x") == [] + assert await store.list_dir(self.root + "c/x/y") == [] + assert await store.list_dir(self.root + "c/d/y") == [] + assert await store.list_dir(self.root + "c/d/y/z") == [] + assert await store.list_dir(self.root + "c/e/f") == [] + + +# # test rename (optional) +# if store.is_erasable(): +# store.rename("c/e", "c/e2") +# assert self.root + "c/d" in store +# assert self.root + "c/e" not in store +# assert self.root + "c/e/f" not in store +# assert self.root + "c/e/g" not in store +# assert self.root + "c/e2" not in store +# assert self.root + "c/e2/f" in store +# assert self.root + "c/e2/g" in store +# store.rename("c/e2", "c/e") +# assert self.root + "c/d" in store +# assert self.root + "c/e2" not in store +# assert self.root + "c/e2/f" not in store +# assert self.root + "c/e2/g" not in store +# assert self.root + "c/e" not in store +# assert self.root + "c/e/f" in store +# assert self.root + "c/e/g" in store +# store.rename("c", "c1/c2/c3") +# assert self.root + "a" in store +# assert self.root + "c" not in store +# assert self.root + "c/d" not in store +# assert self.root + "c/e" not in store +# assert self.root + "c/e/f" not in store +# assert self.root + "c/e/g" not in store +# assert self.root + "c1" not in store +# assert self.root + "c1/c2" not in store +# assert self.root + "c1/c2/c3" not in store +# assert self.root + "c1/c2/c3/d" in store +# assert self.root + "c1/c2/c3/e" not in store +# assert self.root + "c1/c2/c3/e/f" in store +# assert self.root + "c1/c2/c3/e/g" in store +# store.rename("c1/c2/c3", "c") +# assert self.root + "c" not in store +# assert self.root + "c/d" in store +# assert self.root + "c/e" not in store +# assert self.root + "c/e/f" in store +# assert self.root + "c/e/g" in store +# assert self.root + "c1" not in store +# assert self.root + "c1/c2" not in store +# assert self.root + "c1/c2/c3" not in store +# assert self.root + "c1/c2/c3/d" not in store +# assert self.root + "c1/c2/c3/e" not in store +# assert self.root + "c1/c2/c3/e/f" not in store +# assert self.root + "c1/c2/c3/e/g" not in store + +# # test rmdir (optional) +# store.rmdir("c/e") +# assert self.root + "c/d" in store +# assert self.root + "c/e/f" not in store +# assert self.root + "c/e/g" not in store +# store.rmdir("c") +# assert self.root + "c/d" not in store +# store.rmdir() +# assert self.root + "a" not in store +# assert self.root + "b" not in store +# store[self.root + "a"] = b"aaa" +# store[self.root + "c/d"] = b"ddd" +# store[self.root + "c/e/f"] = b"fff" +# # no exceptions raised if path does not exist or is leaf +# store.rmdir("x") +# store.rmdir("a/x") +# store.rmdir("c/x") +# store.rmdir("c/x/y") +# store.rmdir("c/d/y") +# store.rmdir("c/d/y/z") +# store.rmdir("c/e/f") +# assert self.root + "a" in store +# assert self.root + "c/d" in store +# assert self.root + "c/e/f" in store + +# store.close() + +# def test_init_array(self, dimension_separator_fixture): + +# pass_dim_sep, want_dim_sep = dimension_separator_fixture + +# store = self.create_store(dimension_separator=pass_dim_sep) +# init_array(store, shape=1000, chunks=100) + +# # check metadata +# assert array_meta_key in store +# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunks"] +# assert np.dtype(None) == meta["dtype"] +# assert default_compressor.get_config() == meta["compressor"] +# assert meta["fill_value"] is None +# # Missing MUST be assumed to be "." +# assert meta.get("dimension_separator", ".") is want_dim_sep + +# store.close() + +# def test_init_array_overwrite(self): +# self._test_init_array_overwrite("F") + +# def test_init_array_overwrite_path(self): +# self._test_init_array_overwrite_path("F") + +# def test_init_array_overwrite_chunk_store(self): +# self._test_init_array_overwrite_chunk_store("F") + +# def test_init_group_overwrite(self): +# self._test_init_group_overwrite("F") + +# def test_init_group_overwrite_path(self): +# self._test_init_group_overwrite_path("F") + +# def test_init_group_overwrite_chunk_store(self): +# self._test_init_group_overwrite_chunk_store("F") + +# def _test_init_array_overwrite(self, order): +# # setup +# store = self.create_store() +# if self.version == 2: +# path = None +# mkey = array_meta_key +# meta = dict( +# shape=(2000,), +# chunks=(200,), +# dtype=np.dtype("u1"), +# compressor=Zlib(1).get_config(), +# fill_value=0, +# order=order, +# filters=None, +# ) +# else: +# path = "arr1" # no default, have to specify for v3 +# mkey = meta_root + path + ".array.json" +# meta = dict( +# shape=(2000,), +# chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), +# data_type=np.dtype("u1"), +# compressor=Zlib(1), +# fill_value=0, +# chunk_memory_layout=order, +# filters=None, +# ) +# store[mkey] = store._metadata_class.encode_array_metadata(meta) + +# # don't overwrite (default) +# with pytest.raises(ContainsArrayError): +# init_array(store, shape=1000, chunks=100, path=path) + +# # do overwrite +# try: +# init_array(store, shape=1000, chunks=100, dtype="i4", overwrite=True, path=path) +# except NotImplementedError: +# pass +# else: +# assert mkey in store +# meta = store._metadata_class.decode_array_metadata(store[mkey]) +# if self.version == 2: +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (100,) == meta["chunks"] +# assert np.dtype("i4") == meta["dtype"] +# else: +# assert (100,) == meta["chunk_grid"]["chunk_shape"] +# assert np.dtype("i4") == meta["data_type"] +# assert (1000,) == meta["shape"] + +# store.close() + +# def test_init_array_path(self): +# path = "foo/bar" +# store = self.create_store() +# init_array(store, shape=1000, chunks=100, path=path) + +# # check metadata +# if self.version == 2: +# mkey = path + "/" + array_meta_key +# else: +# mkey = meta_root + path + ".array.json" +# assert mkey in store +# meta = store._metadata_class.decode_array_metadata(store[mkey]) +# if self.version == 2: +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (100,) == meta["chunks"] +# assert np.dtype(None) == meta["dtype"] +# assert default_compressor.get_config() == meta["compressor"] +# else: +# assert (100,) == meta["chunk_grid"]["chunk_shape"] +# assert np.dtype(None) == meta["data_type"] +# assert default_compressor == meta["compressor"] +# assert (1000,) == meta["shape"] +# assert meta["fill_value"] is None + +# store.close() + +# def _test_init_array_overwrite_path(self, order): +# # setup +# path = "foo/bar" +# store = self.create_store() +# if self.version == 2: +# mkey = path + "/" + array_meta_key +# meta = dict( +# shape=(2000,), +# chunks=(200,), +# dtype=np.dtype("u1"), +# compressor=Zlib(1).get_config(), +# fill_value=0, +# order=order, +# filters=None, +# ) +# else: +# mkey = meta_root + path + ".array.json" +# meta = dict( +# shape=(2000,), +# chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), +# data_type=np.dtype("u1"), +# compressor=Zlib(1), +# fill_value=0, +# chunk_memory_layout=order, +# filters=None, +# ) +# store[mkey] = store._metadata_class.encode_array_metadata(meta) + +# # don't overwrite +# with pytest.raises(ContainsArrayError): +# init_array(store, shape=1000, chunks=100, path=path) + +# # do overwrite +# try: +# init_array(store, shape=1000, chunks=100, dtype="i4", path=path, overwrite=True) +# except NotImplementedError: +# pass +# else: +# if self.version == 2: +# assert group_meta_key in store +# assert array_meta_key not in store +# assert mkey in store +# # should have been overwritten +# meta = store._metadata_class.decode_array_metadata(store[mkey]) +# if self.version == 2: +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (100,) == meta["chunks"] +# assert np.dtype("i4") == meta["dtype"] +# else: +# assert (100,) == meta["chunk_grid"]["chunk_shape"] +# assert np.dtype("i4") == meta["data_type"] +# assert (1000,) == meta["shape"] + +# store.close() + +# def test_init_array_overwrite_group(self): +# # setup +# path = "foo/bar" +# store = self.create_store() +# if self.version == 2: +# array_key = path + "/" + array_meta_key +# group_key = path + "/" + group_meta_key +# else: +# array_key = meta_root + path + ".array.json" +# group_key = meta_root + path + ".group.json" +# store[group_key] = store._metadata_class.encode_group_metadata() + +# # don't overwrite +# with pytest.raises(ContainsGroupError): +# init_array(store, shape=1000, chunks=100, path=path) + +# # do overwrite +# try: +# init_array(store, shape=1000, chunks=100, dtype="i4", path=path, overwrite=True) +# except NotImplementedError: +# pass +# else: +# assert group_key not in store +# assert array_key in store +# meta = store._metadata_class.decode_array_metadata(store[array_key]) +# if self.version == 2: +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (100,) == meta["chunks"] +# assert np.dtype("i4") == meta["dtype"] +# else: +# assert (100,) == meta["chunk_grid"]["chunk_shape"] +# assert np.dtype("i4") == meta["data_type"] +# assert (1000,) == meta["shape"] + +# store.close() + +# def _test_init_array_overwrite_chunk_store(self, order): +# # setup +# store = self.create_store() +# chunk_store = self.create_store() + +# if self.version == 2: +# path = None +# data_path = "" +# mkey = array_meta_key +# meta = dict( +# shape=(2000,), +# chunks=(200,), +# dtype=np.dtype("u1"), +# compressor=None, +# fill_value=0, +# filters=None, +# order=order, +# ) +# else: +# path = "arr1" +# data_path = data_root + "arr1/" +# mkey = meta_root + path + ".array.json" +# meta = dict( +# shape=(2000,), +# chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), +# data_type=np.dtype("u1"), +# compressor=None, +# fill_value=0, +# filters=None, +# chunk_memory_layout=order, +# ) + +# store[mkey] = store._metadata_class.encode_array_metadata(meta) + +# chunk_store[data_path + "0"] = b"aaa" +# chunk_store[data_path + "1"] = b"bbb" + +# # don't overwrite (default) +# with pytest.raises(ContainsArrayError): +# init_array(store, path=path, shape=1000, chunks=100, chunk_store=chunk_store) + +# # do overwrite +# try: +# init_array( +# store, +# path=path, +# shape=1000, +# chunks=100, +# dtype="i4", +# overwrite=True, +# chunk_store=chunk_store, +# ) +# except NotImplementedError: +# pass +# else: +# assert mkey in store +# meta = store._metadata_class.decode_array_metadata(store[mkey]) +# if self.version == 2: +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (100,) == meta["chunks"] +# assert np.dtype("i4") == meta["dtype"] +# else: +# assert (100,) == meta["chunk_grid"]["chunk_shape"] +# assert np.dtype("i4") == meta["data_type"] +# assert (1000,) == meta["shape"] +# assert data_path + "0" not in chunk_store +# assert data_path + "1" not in chunk_store + +# store.close() +# chunk_store.close() + +# def test_init_array_compat(self): +# store = self.create_store() +# if self.version == 2: +# path = None +# mkey = array_meta_key +# else: +# path = "arr1" +# mkey = meta_root + path + ".array.json" +# init_array(store, path=path, shape=1000, chunks=100, compressor="none") +# meta = store._metadata_class.decode_array_metadata(store[mkey]) +# if self.version == 2: +# assert meta["compressor"] is None +# else: +# assert "compressor" not in meta +# store.close() + +# def test_init_group(self): +# store = self.create_store() +# if self.version == 2: +# path = None +# mkey = group_meta_key +# else: +# path = "foo" +# mkey = meta_root + path + ".group.json" +# init_group(store, path=path) + +# # check metadata +# assert mkey in store +# meta = store._metadata_class.decode_group_metadata(store[mkey]) +# if self.version == 2: +# assert ZARR_FORMAT == meta["zarr_format"] +# else: +# assert meta == {"attributes": {}} + +# store.close() + +# def _test_init_group_overwrite(self, order): +# if self.version == 3: +# pytest.skip("In v3 array and group names cannot overlap") +# # setup +# store = self.create_store() +# store[array_meta_key] = store._metadata_class.encode_array_metadata( +# dict( +# shape=(2000,), +# chunks=(200,), +# dtype=np.dtype("u1"), +# compressor=None, +# fill_value=0, +# order=order, +# filters=None, +# ) +# ) + +# # don't overwrite array (default) +# with pytest.raises(ContainsArrayError): +# init_group(store) + +# # do overwrite +# try: +# init_group(store, overwrite=True) +# except NotImplementedError: +# pass +# else: +# assert array_meta_key not in store +# assert group_meta_key in store +# meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] + +# # don't overwrite group +# with pytest.raises(ValueError): +# init_group(store) + +# store.close() + +# def _test_init_group_overwrite_path(self, order): +# # setup +# path = "foo/bar" +# store = self.create_store() +# if self.version == 2: +# meta = dict( +# shape=(2000,), +# chunks=(200,), +# dtype=np.dtype("u1"), +# compressor=None, +# fill_value=0, +# order=order, +# filters=None, +# ) +# array_key = path + "/" + array_meta_key +# group_key = path + "/" + group_meta_key +# else: +# meta = dict( +# shape=(2000,), +# chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), +# data_type=np.dtype("u1"), +# compressor=None, +# fill_value=0, +# filters=None, +# chunk_memory_layout=order, +# ) +# array_key = meta_root + path + ".array.json" +# group_key = meta_root + path + ".group.json" +# store[array_key] = store._metadata_class.encode_array_metadata(meta) + +# # don't overwrite +# with pytest.raises(ValueError): +# init_group(store, path=path) + +# # do overwrite +# try: +# init_group(store, overwrite=True, path=path) +# except NotImplementedError: +# pass +# else: +# if self.version == 2: +# assert array_meta_key not in store +# assert group_meta_key in store +# assert array_key not in store +# assert group_key in store +# # should have been overwritten +# meta = store._metadata_class.decode_group_metadata(store[group_key]) +# if self.version == 2: +# assert ZARR_FORMAT == meta["zarr_format"] +# else: +# assert meta == {"attributes": {}} + +# store.close() + +# def _test_init_group_overwrite_chunk_store(self, order): +# if self.version == 3: +# pytest.skip("In v3 array and group names cannot overlap") +# # setup +# store = self.create_store() +# chunk_store = self.create_store() +# store[array_meta_key] = store._metadata_class.encode_array_metadata( +# dict( +# shape=(2000,), +# chunks=(200,), +# dtype=np.dtype("u1"), +# compressor=None, +# fill_value=0, +# filters=None, +# order=order, +# ) +# ) +# chunk_store["foo"] = b"bar" +# chunk_store["baz"] = b"quux" + +# # don't overwrite array (default) +# with pytest.raises(ValueError): +# init_group(store, chunk_store=chunk_store) + +# # do overwrite +# try: +# init_group(store, overwrite=True, chunk_store=chunk_store) +# except NotImplementedError: +# pass +# else: +# assert array_meta_key not in store +# assert group_meta_key in store +# meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] +# assert "foo" not in chunk_store +# assert "baz" not in chunk_store + +# # don't overwrite group +# with pytest.raises(ValueError): +# init_group(store) + +# store.close() +# chunk_store.close() class TestMappingStore(StoreTests): def create_store(self, **kwargs): - skip_if_nested_chunks(**kwargs) + # skip_if_nested_chunks(**kwargs) return KVStore(dict()) - def test_set_invalid_content(self): - # Generic mappings support non-buffer types - pass - - -def setdel_hierarchy_checks(store, root=""): - # these tests are for stores that are aware of hierarchy levels; this - # behaviour is not strictly required by Zarr but these tests are included - # to define behaviour of MemoryStore and DirectoryStore classes - - # check __setitem__ and __delitem__ blocked by leaf - - store[root + "a/b"] = b"aaa" - with pytest.raises(KeyError): - store[root + "a/b/c"] = b"xxx" - with pytest.raises(KeyError): - del store[root + "a/b/c"] - - store[root + "d"] = b"ddd" - with pytest.raises(KeyError): - store[root + "d/e/f"] = b"xxx" - with pytest.raises(KeyError): - del store[root + "d/e/f"] - - # test __setitem__ overwrite level - store[root + "x/y/z"] = b"xxx" - store[root + "x/y"] = b"yyy" - assert b"yyy" == ensure_bytes(store[root + "x/y"]) - assert root + "x/y/z" not in store - store[root + "x"] = b"zzz" - assert b"zzz" == ensure_bytes(store[root + "x"]) - assert root + "x/y" not in store - - # test __delitem__ overwrite level - store[root + "r/s/t"] = b"xxx" - del store[root + "r/s"] - assert root + "r/s/t" not in store - store[root + "r/s"] = b"xxx" - del store[root + "r"] - assert root + "r/s" not in store - - -class TestMemoryStore(StoreTests): - def create_store(self, **kwargs): - skip_if_nested_chunks(**kwargs) - return MemoryStore(**kwargs) - - def test_store_contains_bytes(self): - store = self.create_store() - store[self.root + "foo"] = np.array([97, 98, 99, 100, 101], dtype=np.uint8) - assert store[self.root + "foo"] == b"abcde" - - def test_setdel(self): - store = self.create_store() - setdel_hierarchy_checks(store, self.root) - - -class TestDictStore(StoreTests): - def create_store(self, **kwargs): - skip_if_nested_chunks(**kwargs) - - with pytest.warns(DeprecationWarning): - return DictStore(**kwargs) - - def test_deprecated(self): - store = self.create_store() - assert isinstance(store, MemoryStore) - - def test_pickle(self): - with pytest.warns(DeprecationWarning): - # pickle.load() will also trigger deprecation warning - super().test_pickle() - - -class TestDirectoryStore(StoreTests): - def create_store(self, normalize_keys=False, dimension_separator=".", **kwargs): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = DirectoryStore( - path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs - ) - return store - - def test_filesystem_path(self): - - # test behaviour with path that does not exist - path = "data/store" - if os.path.exists(path): - shutil.rmtree(path) - store = DirectoryStore(path) - # should only be created on demand - assert not os.path.exists(path) - store["foo"] = b"bar" - assert os.path.isdir(path) - - # check correct permissions - # regression test for https://github.com/zarr-developers/zarr-python/issues/325 - stat = os.stat(path) - mode = stat.st_mode & 0o666 - umask = os.umask(0) - os.umask(umask) - assert mode == (0o666 & ~umask) - - # test behaviour with file path - with tempfile.NamedTemporaryFile() as f: - with pytest.raises(ValueError): - DirectoryStore(f.name) - - def test_init_pathlib(self): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - DirectoryStore(pathlib.Path(path)) - - def test_pickle_ext(self): - store = self.create_store() - store2 = pickle.loads(pickle.dumps(store)) - - # check path is preserved - assert store.path == store2.path - - # check point to same underlying directory - assert self.root + "xxx" not in store - store2[self.root + "xxx"] = b"yyy" - assert b"yyy" == ensure_bytes(store[self.root + "xxx"]) - - def test_setdel(self): - store = self.create_store() - setdel_hierarchy_checks(store, self.root) - - def test_normalize_keys(self): - store = self.create_store(normalize_keys=True) - store[self.root + "FOO"] = b"bar" - assert self.root + "FOO" in store - assert self.root + "foo" in store - - def test_listing_keys_slash(self): - def mock_walker_slash(_path): - yield from [ - # trailing slash in first key - ("root_with_slash/", ["d1", "g1"], [".zgroup"]), - ("root_with_slash/d1", [], [".zarray"]), - ("root_with_slash/g1", [], [".zgroup"]), - ] - - res = set(DirectoryStore._keys_fast("root_with_slash/", walker=mock_walker_slash)) - assert res == {".zgroup", "g1/.zgroup", "d1/.zarray"} - - def test_listing_keys_no_slash(self): - def mock_walker_no_slash(_path): - yield from [ - # no trailing slash in first key - ("root_with_no_slash", ["d1", "g1"], [".zgroup"]), - ("root_with_no_slash/d1", [], [".zarray"]), - ("root_with_no_slash/g1", [], [".zgroup"]), - ] - - res = set(DirectoryStore._keys_fast("root_with_no_slash", mock_walker_no_slash)) - assert res == {".zgroup", "g1/.zgroup", "d1/.zarray"} - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -class TestFSStore(StoreTests): - def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): - - if path is None: - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - - store = FSStore( - path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs - ) - return store - - def test_init_array(self): - store = self.create_store() - init_array(store, shape=1000, chunks=100) - - # check metadata - assert array_meta_key in store - meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - assert (1000,) == meta["shape"] - assert (100,) == meta["chunks"] - assert np.dtype(None) == meta["dtype"] - assert meta["dimension_separator"] == "." - - def test_dimension_separator(self): - for x in (".", "/"): - store = self.create_store(dimension_separator=x) - norm = store._normalize_key - assert ".zarray" == norm(".zarray") - assert ".zarray" == norm("/.zarray") - assert ".zgroup" == norm("/.zgroup") - assert "group/.zarray" == norm("group/.zarray") - assert "group/.zgroup" == norm("group/.zgroup") - assert "group/.zarray" == norm("/group/.zarray") - assert "group/.zgroup" == norm("/group/.zgroup") - - def test_complex(self): - path1 = tempfile.mkdtemp() - path2 = tempfile.mkdtemp() - store = self.create_store( - path="simplecache::file://" + path1, - simplecache={"same_names": True, "cache_storage": path2}, - ) - assert not store - assert not os.listdir(path1) - assert not os.listdir(path2) - store[self.root + "foo"] = b"hello" - assert "foo" in os.listdir(str(path1) + "/" + self.root) - assert self.root + "foo" in store - assert not os.listdir(str(path2)) - assert store[self.root + "foo"] == b"hello" - assert "foo" in os.listdir(str(path2)) - - def test_deep_ndim(self): - import zarr - - store = self.create_store() - path = None if self.version == 2 else "group1" - foo = zarr.open_group(store=store, path=path) - bar = foo.create_group("bar") - baz = bar.create_dataset("baz", shape=(4, 4, 4), chunks=(2, 2, 2), dtype="i8") - baz[:] = 1 - if self.version == 2: - assert set(store.listdir()) == {".zgroup", "bar"} - else: - assert set(store.listdir()) == {"data", "meta", "zarr.json"} - assert set(store.listdir("meta/root/" + path)) == {"bar", "bar.group.json"} - assert set(store.listdir("data/root/" + path)) == {"bar"} - assert foo["bar"]["baz"][(0, 0, 0)] == 1 - - def test_not_fsspec(self): - import zarr - - path = tempfile.mkdtemp() - with pytest.raises(ValueError, match="storage_options"): - zarr.open_array(path, mode="w", storage_options={"some": "kwargs"}) - with pytest.raises(ValueError, match="storage_options"): - zarr.open_group(path, mode="w", storage_options={"some": "kwargs"}) - zarr.open_array("file://" + path, mode="w", shape=(1,), dtype="f8") - - def test_create(self): - import zarr - - path1 = tempfile.mkdtemp() - path2 = tempfile.mkdtemp() - g = zarr.open_group("file://" + path1, mode="w", storage_options={"auto_mkdir": True}) - a = g.create_dataset("data", shape=(8,)) - a[:4] = [0, 1, 2, 3] - assert "data" in os.listdir(path1) - assert ".zgroup" in os.listdir(path1) - - # consolidated metadata (GH#915) - consolidate_metadata("file://" + path1) - assert ".zmetadata" in os.listdir(path1) - - g = zarr.open_group( - "simplecache::file://" + path1, - mode="r", - storage_options={"cache_storage": path2, "same_names": True}, - ) - assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] - with pytest.raises(PermissionError): - g.data[:] = 1 - - @pytest.mark.parametrize("mode,allowed", [("r", False), ("r+", True)]) - def test_modify_consolidated(self, mode, allowed): - import zarr - - url = "file://" + tempfile.mkdtemp() - - # create - root = zarr.open_group(url, mode="w") - root.zeros("baz", shape=(10000, 10000), chunks=(1000, 1000), dtype="i4") - zarr.consolidate_metadata(url) - - # reopen and modify - root = zarr.open_consolidated(url, mode=mode) - if allowed: - root["baz"][0, 0] = 7 - - root = zarr.open_consolidated(url, mode="r") - assert root["baz"][0, 0] == 7 - else: - with pytest.raises(zarr.errors.ReadOnlyError): - root["baz"][0, 0] = 7 - - @pytest.mark.parametrize("mode", ["r", "r+"]) - def test_modify_consolidated_metadata_raises(self, mode): - import zarr - - url = "file://" + tempfile.mkdtemp() - - # create - root = zarr.open_group(url, mode="w") - root.zeros("baz", shape=(10000, 10000), chunks=(1000, 1000), dtype="i4") - zarr.consolidate_metadata(url) + # def test_set_invalid_content(self): + # # Generic mappings support non-buffer types + # pass - # reopen and modify - root = zarr.open_consolidated(url, mode=mode) - with pytest.raises(zarr.errors.ReadOnlyError): - root["baz"].resize(100, 100) - def test_read_only(self): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = self.create_store(path=path) - store[self.root + "foo"] = b"bar" - - store = self.create_store(path=path, mode="r") - - with pytest.raises(PermissionError): - store[self.root + "foo"] = b"hex" - - with pytest.raises(PermissionError): - del store[self.root + "foo"] - - with pytest.raises(PermissionError): - store.delitems([self.root + "foo"]) +# def setdel_hierarchy_checks(store, root=""): +# # these tests are for stores that are aware of hierarchy levels; this +# # behaviour is not strictly required by Zarr but these tests are included +# # to define behaviour of MemoryStore and DirectoryStore classes - with pytest.raises(PermissionError): - store.setitems({self.root + "foo": b"baz"}) +# # check __setitem__ and __delitem__ blocked by leaf - with pytest.raises(PermissionError): - store.clear() +# store[root + "a/b"] = b"aaa" +# with pytest.raises(KeyError): +# store[root + "a/b/c"] = b"xxx" +# with pytest.raises(KeyError): +# del store[root + "a/b/c"] - with pytest.raises(PermissionError): - store.rmdir(self.root + "anydir") +# store[root + "d"] = b"ddd" +# with pytest.raises(KeyError): +# store[root + "d/e/f"] = b"xxx" +# with pytest.raises(KeyError): +# del store[root + "d/e/f"] - assert store[self.root + "foo"] == b"bar" +# # test __setitem__ overwrite level +# store[root + "x/y/z"] = b"xxx" +# store[root + "x/y"] = b"yyy" +# assert b"yyy" == ensure_bytes(store[root + "x/y"]) +# assert root + "x/y/z" not in store +# store[root + "x"] = b"zzz" +# assert b"zzz" == ensure_bytes(store[root + "x"]) +# assert root + "x/y" not in store - def test_eq(self): - store1 = self.create_store(path="anypath") - store2 = self.create_store(path="anypath") - assert store1 == store2 +# # test __delitem__ overwrite level +# store[root + "r/s/t"] = b"xxx" +# del store[root + "r/s"] +# assert root + "r/s/t" not in store +# store[root + "r/s"] = b"xxx" +# del store[root + "r"] +# assert root + "r/s" not in store - @pytest.mark.usefixtures("s3") - def test_s3(self): - import zarr - g = zarr.open_group("s3://test/out.zarr", mode="w", storage_options=self.s3so) - a = g.create_dataset("data", shape=(8,)) - a[:4] = [0, 1, 2, 3] +# class TestMemoryStore(StoreTests): +# def create_store(self, **kwargs): +# skip_if_nested_chunks(**kwargs) +# return MemoryStore(**kwargs) - g = zarr.open_group("s3://test/out.zarr", mode="r", storage_options=self.s3so) +# def test_store_contains_bytes(self): +# store = self.create_store() +# store[self.root + "foo"] = np.array([97, 98, 99, 100, 101], dtype=np.uint8) +# assert store[self.root + "foo"] == b"abcde" - assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] - - # test via convenience - g = zarr.open("s3://test/out.zarr", mode="r", storage_options=self.s3so) - assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] - - @pytest.mark.usefixtures("s3") - def test_s3_complex(self): - import zarr - - g = zarr.open_group("s3://test/out.zarr", mode="w", storage_options=self.s3so) - expected = np.empty((8, 8, 8), dtype="int64") - expected[:] = -1 - a = g.create_dataset( - "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True - ) - expected[0] = 0 - expected[3] = 3 - expected[6, 6, 6] = 6 - a[6, 6, 6] = 6 - a[:4] = expected[:4] - - b = g.create_dataset( - "data_f", - shape=(8,), - chunks=(1,), - dtype=[("foo", "S3"), ("bar", "i4")], - fill_value=(b"b", 1), - ) - b[:4] = (b"aaa", 2) - g2 = zarr.open_group("s3://test/out.zarr", mode="r", storage_options=self.s3so) +# def test_setdel(self): +# store = self.create_store() +# setdel_hierarchy_checks(store, self.root) - assert (g2.data[:] == expected).all() - a.chunk_store.fs.invalidate_cache("test/out.zarr/data") - a[:] = 5 - assert (a[:] == 5).all() - - assert g2.data_f["foo"].tolist() == [b"aaa"] * 4 + [b"b"] * 4 - with pytest.raises(PermissionError): - g2.data[:] = 5 - - with pytest.raises(PermissionError): - g2.store.setitems({}) - - with pytest.raises(PermissionError): - # even though overwrite=True, store is read-only, so fails - g2.create_dataset( - "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True - ) - - a = g.create_dataset( - "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True - ) - assert (a[:] == -np.ones((8, 8, 8))).all() - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -class TestFSStoreWithKeySeparator(StoreTests): - def create_store(self, normalize_keys=False, key_separator=".", **kwargs): - - # Since the user is passing key_separator, that will take priority. - skip_if_nested_chunks(**kwargs) +class TestDirectoryStore(StoreTests): + def create_store(self, normalize_keys=False, dimension_separator=".", **kwargs): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) - return FSStore(path, normalize_keys=normalize_keys, key_separator=key_separator) - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -class TestFSStoreFromFilesystem(StoreTests): - def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): - import fsspec - - fs = fsspec.filesystem("file") - - if path is None: - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - - with pytest.raises(ValueError): - # can't specify storage_options when passing an - # existing fs object - _ = FSStore(path, fs=fs, auto_mkdir=True) - - store = FSStore( + store = LocalStore( path, - normalize_keys=normalize_keys, - dimension_separator=dimension_separator, - fs=fs, - **kwargs, + auto_mkdir=True, + # normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs ) - - return store - - -@pytest.fixture() -def s3(request): - # writable local S3 system - import shlex - import subprocess - import time - - if "BOTO_CONFIG" not in os.environ: # pragma: no cover - os.environ["BOTO_CONFIG"] = "/dev/null" - if "AWS_ACCESS_KEY_ID" not in os.environ: # pragma: no cover - os.environ["AWS_ACCESS_KEY_ID"] = "foo" - if "AWS_SECRET_ACCESS_KEY" not in os.environ: # pragma: no cover - os.environ["AWS_SECRET_ACCESS_KEY"] = "bar" - requests = pytest.importorskip("requests") - s3fs = pytest.importorskip("s3fs") - pytest.importorskip("moto") - - port = 5555 - endpoint_uri = "http://127.0.0.1:%d/" % port - proc = subprocess.Popen( - shlex.split("moto_server s3 -p %d" % port), - stderr=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - ) - - timeout = 5 - while timeout > 0: - try: - r = requests.get(endpoint_uri) - if r.ok: - break - except Exception: # pragma: no cover - pass - timeout -= 0.1 # pragma: no cover - time.sleep(0.1) # pragma: no cover - s3so = dict(client_kwargs={"endpoint_url": endpoint_uri}, use_listings_cache=False) - s3 = s3fs.S3FileSystem(anon=False, **s3so) - s3.mkdir("test") - request.cls.s3so = s3so - yield - proc.terminate() - proc.wait() - - -class TestNestedDirectoryStore(TestDirectoryStore): - def create_store(self, normalize_keys=False, **kwargs): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = NestedDirectoryStore(path, normalize_keys=normalize_keys, **kwargs) - return store - - def test_init_array(self): - store = self.create_store() - assert store._dimension_separator == "/" - init_array(store, shape=1000, chunks=100) - - # check metadata - assert array_meta_key in store - meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - assert (1000,) == meta["shape"] - assert (100,) == meta["chunks"] - assert np.dtype(None) == meta["dtype"] - assert meta["dimension_separator"] == "/" - - def test_chunk_nesting(self): - store = self.create_store() - # any path where last segment looks like a chunk key gets special handling - store[self.root + "0.0"] = b"xxx" - assert b"xxx" == store[self.root + "0.0"] - # assert b'xxx' == store['0/0'] - store[self.root + "foo/10.20.30"] = b"yyy" - assert b"yyy" == store[self.root + "foo/10.20.30"] - # assert b'yyy' == store['foo/10/20/30'] - store[self.root + "42"] = b"zzz" - assert b"zzz" == store[self.root + "42"] - - def test_listdir(self): - store = self.create_store() - z = zarr.zeros((10, 10), chunks=(5, 5), store=store) - z[:] = 1 # write to all chunks - for k in store.listdir(): - assert store.get(k) is not None - - -class TestNestedDirectoryStoreNone: - def test_value_error(self): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = NestedDirectoryStore(path, normalize_keys=True, dimension_separator=None) - assert store._dimension_separator == "/" - - -class TestNestedDirectoryStoreWithWrongValue: - def test_value_error(self): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - with pytest.raises(ValueError): - NestedDirectoryStore(path, normalize_keys=True, dimension_separator=".") - - -class TestN5Store(TestNestedDirectoryStore): - def create_store(self, normalize_keys=False): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = N5Store(path, normalize_keys=normalize_keys) return store - def test_equal(self): - store_a = self.create_store() - store_b = N5Store(store_a.path) - assert store_a == store_b - - @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) - def test_del_zarr_meta_key(self, zarr_meta_key): - store = self.create_store() - store[n5_attrs_key] = json_dumps({"foo": "bar"}) - del store[zarr_meta_key] - assert n5_attrs_key not in store - - def test_chunk_nesting(self): - store = self.create_store() - store["0.0"] = b"xxx" - assert "0.0" in store - assert b"xxx" == store["0.0"] - # assert b'xxx' == store['0/0'] - store["foo/10.20.30"] = b"yyy" - assert "foo/10.20.30" in store - assert b"yyy" == store["foo/10.20.30"] - # N5 reverses axis order - assert b"yyy" == store["foo/30/20/10"] - del store["foo/10.20.30"] - assert "foo/30/20/10" not in store - store["42"] = b"zzz" - assert "42" in store - assert b"zzz" == store["42"] - - def test_init_array(self): - store = self.create_store() - init_array(store, shape=1000, chunks=100) - - # check metadata - assert array_meta_key in store - meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - assert (1000,) == meta["shape"] - assert (100,) == meta["chunks"] - assert np.dtype(None) == meta["dtype"] - # N5Store wraps the actual compressor - compressor_config = meta["compressor"]["compressor_config"] - assert default_compressor.get_config() == compressor_config - # N5Store always has a fill value of 0 - assert meta["fill_value"] == 0 - assert meta["dimension_separator"] == "." - # Top-level groups AND arrays should have - # the n5 keyword in metadata - raw_n5_meta = json.loads(store[n5_attrs_key]) - assert raw_n5_meta.get("n5", None) == N5_FORMAT - - def test_init_array_path(self): - path = "foo/bar" - store = self.create_store() - init_array(store, shape=1000, chunks=100, path=path) - - # check metadata - key = path + "/" + array_meta_key - assert key in store - meta = store._metadata_class.decode_array_metadata(store[key]) - assert ZARR_FORMAT == meta["zarr_format"] - assert (1000,) == meta["shape"] - assert (100,) == meta["chunks"] - assert np.dtype(None) == meta["dtype"] - # N5Store wraps the actual compressor - compressor_config = meta["compressor"]["compressor_config"] - assert default_compressor.get_config() == compressor_config - # N5Store always has a fill value of 0 - assert meta["fill_value"] == 0 - - def test_init_array_compat(self): - store = self.create_store() - init_array(store, shape=1000, chunks=100, compressor="none") - meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - # N5Store wraps the actual compressor - compressor_config = meta["compressor"]["compressor_config"] - assert compressor_config is None - - def test_init_array_overwrite(self): - self._test_init_array_overwrite("C") - - def test_init_array_overwrite_path(self): - self._test_init_array_overwrite_path("C") - - def test_init_array_overwrite_chunk_store(self): - self._test_init_array_overwrite_chunk_store("C") - - def test_init_group_overwrite(self): - self._test_init_group_overwrite("C") - - def test_init_group_overwrite_path(self): - self._test_init_group_overwrite_path("C") - - def test_init_group_overwrite_chunk_store(self): - self._test_init_group_overwrite_chunk_store("C") - - def test_init_group(self): - store = self.create_store() - init_group(store) - store[".zattrs"] = json_dumps({"foo": "bar"}) - # check metadata - assert group_meta_key in store - assert group_meta_key in store.listdir() - assert group_meta_key in store.listdir("") - meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - - def test_filters(self): - all_filters, all_errors = zip( - *[ - (None, does_not_raise()), - ([], does_not_raise()), - ([AsType("f4", "f8")], pytest.raises(ValueError)), - ] - ) - for filters, error in zip(all_filters, all_errors): - store = self.create_store() - with error: - init_array(store, shape=1000, chunks=100, filters=filters) - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -class TestN5FSStore(TestFSStore): - def create_store(self, normalize_keys=False, path=None, **kwargs): +# def test_filesystem_path(self): + +# # test behaviour with path that does not exist +# path = "data/store" +# if os.path.exists(path): +# shutil.rmtree(path) +# store = DirectoryStore(path) +# # should only be created on demand +# assert not os.path.exists(path) +# store["foo"] = b"bar" +# assert os.path.isdir(path) + +# # check correct permissions +# # regression test for https://github.com/zarr-developers/zarr-python/issues/325 +# stat = os.stat(path) +# mode = stat.st_mode & 0o666 +# umask = os.umask(0) +# os.umask(umask) +# assert mode == (0o666 & ~umask) + +# # test behaviour with file path +# with tempfile.NamedTemporaryFile() as f: +# with pytest.raises(ValueError): +# DirectoryStore(f.name) + +# def test_init_pathlib(self): +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# DirectoryStore(pathlib.Path(path)) + +# def test_pickle_ext(self): +# store = self.create_store() +# store2 = pickle.loads(pickle.dumps(store)) + +# # check path is preserved +# assert store.path == store2.path + +# # check point to same underlying directory +# assert self.root + "xxx" not in store +# store2[self.root + "xxx"] = b"yyy" +# assert b"yyy" == ensure_bytes(store[self.root + "xxx"]) + +# def test_setdel(self): +# store = self.create_store() +# setdel_hierarchy_checks(store, self.root) + +# def test_normalize_keys(self): +# store = self.create_store(normalize_keys=True) +# store[self.root + "FOO"] = b"bar" +# assert self.root + "FOO" in store +# assert self.root + "foo" in store + +# def test_listing_keys_slash(self): +# def mock_walker_slash(_path): +# yield from [ +# # trailing slash in first key +# ("root_with_slash/", ["d1", "g1"], [".zgroup"]), +# ("root_with_slash/d1", [], [".zarray"]), +# ("root_with_slash/g1", [], [".zgroup"]), +# ] + +# res = set(DirectoryStore._keys_fast("root_with_slash/", walker=mock_walker_slash)) +# assert res == {".zgroup", "g1/.zgroup", "d1/.zarray"} + +# def test_listing_keys_no_slash(self): +# def mock_walker_no_slash(_path): +# yield from [ +# # no trailing slash in first key +# ("root_with_no_slash", ["d1", "g1"], [".zgroup"]), +# ("root_with_no_slash/d1", [], [".zarray"]), +# ("root_with_no_slash/g1", [], [".zgroup"]), +# ] + +# res = set(DirectoryStore._keys_fast("root_with_no_slash", mock_walker_no_slash)) +# assert res == {".zgroup", "g1/.zgroup", "d1/.zarray"} + + +# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +# class TestFSStore(StoreTests): +# def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): + +# if path is None: +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) + +# store = FSStore( +# path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs +# ) +# return store + +# def test_init_array(self): +# store = self.create_store() +# init_array(store, shape=1000, chunks=100) + +# # check metadata +# assert array_meta_key in store +# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunks"] +# assert np.dtype(None) == meta["dtype"] +# assert meta["dimension_separator"] == "." + +# def test_dimension_separator(self): +# for x in (".", "/"): +# store = self.create_store(dimension_separator=x) +# norm = store._normalize_key +# assert ".zarray" == norm(".zarray") +# assert ".zarray" == norm("/.zarray") +# assert ".zgroup" == norm("/.zgroup") +# assert "group/.zarray" == norm("group/.zarray") +# assert "group/.zgroup" == norm("group/.zgroup") +# assert "group/.zarray" == norm("/group/.zarray") +# assert "group/.zgroup" == norm("/group/.zgroup") + +# def test_complex(self): +# path1 = tempfile.mkdtemp() +# path2 = tempfile.mkdtemp() +# store = self.create_store( +# path="simplecache::file://" + path1, +# simplecache={"same_names": True, "cache_storage": path2}, +# ) +# assert not store +# assert not os.listdir(path1) +# assert not os.listdir(path2) +# store[self.root + "foo"] = b"hello" +# assert "foo" in os.listdir(str(path1) + "/" + self.root) +# assert self.root + "foo" in store +# assert not os.listdir(str(path2)) +# assert store[self.root + "foo"] == b"hello" +# assert "foo" in os.listdir(str(path2)) + +# def test_deep_ndim(self): +# import zarr + +# store = self.create_store() +# path = None if self.version == 2 else "group1" +# foo = zarr.open_group(store=store, path=path) +# bar = foo.create_group("bar") +# baz = bar.create_dataset("baz", shape=(4, 4, 4), chunks=(2, 2, 2), dtype="i8") +# baz[:] = 1 +# if self.version == 2: +# assert set(store.listdir()) == {".zgroup", "bar"} +# else: +# assert set(store.listdir()) == {"data", "meta", "zarr.json"} +# assert set(store.listdir("meta/root/" + path)) == {"bar", "bar.group.json"} +# assert set(store.listdir("data/root/" + path)) == {"bar"} +# assert foo["bar"]["baz"][(0, 0, 0)] == 1 + +# def test_not_fsspec(self): +# import zarr + +# path = tempfile.mkdtemp() +# with pytest.raises(ValueError, match="storage_options"): +# zarr.open_array(path, mode="w", storage_options={"some": "kwargs"}) +# with pytest.raises(ValueError, match="storage_options"): +# zarr.open_group(path, mode="w", storage_options={"some": "kwargs"}) +# zarr.open_array("file://" + path, mode="w", shape=(1,), dtype="f8") + +# def test_create(self): +# import zarr + +# path1 = tempfile.mkdtemp() +# path2 = tempfile.mkdtemp() +# g = zarr.open_group("file://" + path1, mode="w", storage_options={"auto_mkdir": True}) +# a = g.create_dataset("data", shape=(8,)) +# a[:4] = [0, 1, 2, 3] +# assert "data" in os.listdir(path1) +# assert ".zgroup" in os.listdir(path1) + +# # consolidated metadata (GH#915) +# consolidate_metadata("file://" + path1) +# assert ".zmetadata" in os.listdir(path1) + +# g = zarr.open_group( +# "simplecache::file://" + path1, +# mode="r", +# storage_options={"cache_storage": path2, "same_names": True}, +# ) +# assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] +# with pytest.raises(PermissionError): +# g.data[:] = 1 + +# @pytest.mark.parametrize("mode,allowed", [("r", False), ("r+", True)]) +# def test_modify_consolidated(self, mode, allowed): +# import zarr + +# url = "file://" + tempfile.mkdtemp() + +# # create +# root = zarr.open_group(url, mode="w") +# root.zeros("baz", shape=(10000, 10000), chunks=(1000, 1000), dtype="i4") +# zarr.consolidate_metadata(url) + +# # reopen and modify +# root = zarr.open_consolidated(url, mode=mode) +# if allowed: +# root["baz"][0, 0] = 7 + +# root = zarr.open_consolidated(url, mode="r") +# assert root["baz"][0, 0] == 7 +# else: +# with pytest.raises(zarr.errors.ReadOnlyError): +# root["baz"][0, 0] = 7 + +# @pytest.mark.parametrize("mode", ["r", "r+"]) +# def test_modify_consolidated_metadata_raises(self, mode): +# import zarr + +# url = "file://" + tempfile.mkdtemp() + +# # create +# root = zarr.open_group(url, mode="w") +# root.zeros("baz", shape=(10000, 10000), chunks=(1000, 1000), dtype="i4") +# zarr.consolidate_metadata(url) + +# # reopen and modify +# root = zarr.open_consolidated(url, mode=mode) +# with pytest.raises(zarr.errors.ReadOnlyError): +# root["baz"].resize(100, 100) + +# def test_read_only(self): +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# store = self.create_store(path=path) +# store[self.root + "foo"] = b"bar" + +# store = self.create_store(path=path, mode="r") + +# with pytest.raises(PermissionError): +# store[self.root + "foo"] = b"hex" + +# with pytest.raises(PermissionError): +# del store[self.root + "foo"] + +# with pytest.raises(PermissionError): +# store.delitems([self.root + "foo"]) + +# with pytest.raises(PermissionError): +# store.setitems({self.root + "foo": b"baz"}) + +# with pytest.raises(PermissionError): +# store.clear() + +# with pytest.raises(PermissionError): +# store.rmdir(self.root + "anydir") + +# assert store[self.root + "foo"] == b"bar" + +# def test_eq(self): +# store1 = self.create_store(path="anypath") +# store2 = self.create_store(path="anypath") +# assert store1 == store2 + +# @pytest.mark.usefixtures("s3") +# def test_s3(self): +# import zarr + +# g = zarr.open_group("s3://test/out.zarr", mode="w", storage_options=self.s3so) +# a = g.create_dataset("data", shape=(8,)) +# a[:4] = [0, 1, 2, 3] + +# g = zarr.open_group("s3://test/out.zarr", mode="r", storage_options=self.s3so) + +# assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] + +# # test via convenience +# g = zarr.open("s3://test/out.zarr", mode="r", storage_options=self.s3so) +# assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] + +# @pytest.mark.usefixtures("s3") +# def test_s3_complex(self): +# import zarr + +# g = zarr.open_group("s3://test/out.zarr", mode="w", storage_options=self.s3so) +# expected = np.empty((8, 8, 8), dtype="int64") +# expected[:] = -1 +# a = g.create_dataset( +# "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True +# ) +# expected[0] = 0 +# expected[3] = 3 +# expected[6, 6, 6] = 6 +# a[6, 6, 6] = 6 +# a[:4] = expected[:4] + +# b = g.create_dataset( +# "data_f", +# shape=(8,), +# chunks=(1,), +# dtype=[("foo", "S3"), ("bar", "i4")], +# fill_value=(b"b", 1), +# ) +# b[:4] = (b"aaa", 2) +# g2 = zarr.open_group("s3://test/out.zarr", mode="r", storage_options=self.s3so) + +# assert (g2.data[:] == expected).all() +# a.chunk_store.fs.invalidate_cache("test/out.zarr/data") +# a[:] = 5 +# assert (a[:] == 5).all() + +# assert g2.data_f["foo"].tolist() == [b"aaa"] * 4 + [b"b"] * 4 +# with pytest.raises(PermissionError): +# g2.data[:] = 5 + +# with pytest.raises(PermissionError): +# g2.store.setitems({}) + +# with pytest.raises(PermissionError): +# # even though overwrite=True, store is read-only, so fails +# g2.create_dataset( +# "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True +# ) + +# a = g.create_dataset( +# "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True +# ) +# assert (a[:] == -np.ones((8, 8, 8))).all() + + +# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +# class TestFSStoreWithKeySeparator(StoreTests): +# def create_store(self, normalize_keys=False, key_separator=".", **kwargs): + +# # Since the user is passing key_separator, that will take priority. +# skip_if_nested_chunks(**kwargs) + +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# return FSStore(path, normalize_keys=normalize_keys, key_separator=key_separator) + + +# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +# class TestFSStoreFromFilesystem(StoreTests): +# def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): +# import fsspec + +# fs = fsspec.filesystem("file") + +# if path is None: +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) + +# with pytest.raises(ValueError): +# # can't specify storage_options when passing an +# # existing fs object +# _ = FSStore(path, fs=fs, auto_mkdir=True) + +# store = FSStore( +# path, +# normalize_keys=normalize_keys, +# dimension_separator=dimension_separator, +# fs=fs, +# **kwargs, +# ) + +# return store + + +# @pytest.fixture() +# def s3(request): +# # writable local S3 system +# import shlex +# import subprocess +# import time + +# if "BOTO_CONFIG" not in os.environ: # pragma: no cover +# os.environ["BOTO_CONFIG"] = "/dev/null" +# if "AWS_ACCESS_KEY_ID" not in os.environ: # pragma: no cover +# os.environ["AWS_ACCESS_KEY_ID"] = "foo" +# if "AWS_SECRET_ACCESS_KEY" not in os.environ: # pragma: no cover +# os.environ["AWS_SECRET_ACCESS_KEY"] = "bar" +# requests = pytest.importorskip("requests") +# s3fs = pytest.importorskip("s3fs") +# pytest.importorskip("moto") + +# port = 5555 +# endpoint_uri = "http://127.0.0.1:%d/" % port +# proc = subprocess.Popen( +# shlex.split("moto_server s3 -p %d" % port), +# stderr=subprocess.DEVNULL, +# stdout=subprocess.DEVNULL, +# ) + +# timeout = 5 +# while timeout > 0: +# try: +# r = requests.get(endpoint_uri) +# if r.ok: +# break +# except Exception: # pragma: no cover +# pass +# timeout -= 0.1 # pragma: no cover +# time.sleep(0.1) # pragma: no cover +# s3so = dict(client_kwargs={"endpoint_url": endpoint_uri}, use_listings_cache=False) +# s3 = s3fs.S3FileSystem(anon=False, **s3so) +# s3.mkdir("test") +# request.cls.s3so = s3so +# yield +# proc.terminate() +# proc.wait() + + +# class TestNestedDirectoryStore(TestDirectoryStore): +# def create_store(self, normalize_keys=False, **kwargs): +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# store = NestedDirectoryStore(path, normalize_keys=normalize_keys, **kwargs) +# return store + +# def test_init_array(self): +# store = self.create_store() +# assert store._dimension_separator == "/" +# init_array(store, shape=1000, chunks=100) + +# # check metadata +# assert array_meta_key in store +# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunks"] +# assert np.dtype(None) == meta["dtype"] +# assert meta["dimension_separator"] == "/" + +# def test_chunk_nesting(self): +# store = self.create_store() +# # any path where last segment looks like a chunk key gets special handling +# store[self.root + "0.0"] = b"xxx" +# assert b"xxx" == store[self.root + "0.0"] +# # assert b'xxx' == store['0/0'] +# store[self.root + "foo/10.20.30"] = b"yyy" +# assert b"yyy" == store[self.root + "foo/10.20.30"] +# # assert b'yyy' == store['foo/10/20/30'] +# store[self.root + "42"] = b"zzz" +# assert b"zzz" == store[self.root + "42"] + +# def test_listdir(self): +# store = self.create_store() +# z = zarr.zeros((10, 10), chunks=(5, 5), store=store) +# z[:] = 1 # write to all chunks +# for k in store.listdir(): +# assert store.get(k) is not None + + +# class TestNestedDirectoryStoreNone: +# def test_value_error(self): +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# store = NestedDirectoryStore(path, normalize_keys=True, dimension_separator=None) +# assert store._dimension_separator == "/" + + +# class TestNestedDirectoryStoreWithWrongValue: +# def test_value_error(self): +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# with pytest.raises(ValueError): +# NestedDirectoryStore(path, normalize_keys=True, dimension_separator=".") + + +# class TestN5Store(TestNestedDirectoryStore): +# def create_store(self, normalize_keys=False): +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# store = N5Store(path, normalize_keys=normalize_keys) +# return store + +# def test_equal(self): +# store_a = self.create_store() +# store_b = N5Store(store_a.path) +# assert store_a == store_b + +# @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) +# def test_del_zarr_meta_key(self, zarr_meta_key): +# store = self.create_store() +# store[n5_attrs_key] = json_dumps({"foo": "bar"}) +# del store[zarr_meta_key] +# assert n5_attrs_key not in store + +# def test_chunk_nesting(self): +# store = self.create_store() +# store["0.0"] = b"xxx" +# assert "0.0" in store +# assert b"xxx" == store["0.0"] +# # assert b'xxx' == store['0/0'] +# store["foo/10.20.30"] = b"yyy" +# assert "foo/10.20.30" in store +# assert b"yyy" == store["foo/10.20.30"] +# # N5 reverses axis order +# assert b"yyy" == store["foo/30/20/10"] +# del store["foo/10.20.30"] +# assert "foo/30/20/10" not in store +# store["42"] = b"zzz" +# assert "42" in store +# assert b"zzz" == store["42"] + +# def test_init_array(self): +# store = self.create_store() +# init_array(store, shape=1000, chunks=100) + +# # check metadata +# assert array_meta_key in store +# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunks"] +# assert np.dtype(None) == meta["dtype"] +# # N5Store wraps the actual compressor +# compressor_config = meta["compressor"]["compressor_config"] +# assert default_compressor.get_config() == compressor_config +# # N5Store always has a fill value of 0 +# assert meta["fill_value"] == 0 +# assert meta["dimension_separator"] == "." +# # Top-level groups AND arrays should have +# # the n5 keyword in metadata +# raw_n5_meta = json.loads(store[n5_attrs_key]) +# assert raw_n5_meta.get("n5", None) == N5_FORMAT + +# def test_init_array_path(self): +# path = "foo/bar" +# store = self.create_store() +# init_array(store, shape=1000, chunks=100, path=path) + +# # check metadata +# key = path + "/" + array_meta_key +# assert key in store +# meta = store._metadata_class.decode_array_metadata(store[key]) +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunks"] +# assert np.dtype(None) == meta["dtype"] +# # N5Store wraps the actual compressor +# compressor_config = meta["compressor"]["compressor_config"] +# assert default_compressor.get_config() == compressor_config +# # N5Store always has a fill value of 0 +# assert meta["fill_value"] == 0 + +# def test_init_array_compat(self): +# store = self.create_store() +# init_array(store, shape=1000, chunks=100, compressor="none") +# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) +# # N5Store wraps the actual compressor +# compressor_config = meta["compressor"]["compressor_config"] +# assert compressor_config is None + +# def test_init_array_overwrite(self): +# self._test_init_array_overwrite("C") + +# def test_init_array_overwrite_path(self): +# self._test_init_array_overwrite_path("C") + +# def test_init_array_overwrite_chunk_store(self): +# self._test_init_array_overwrite_chunk_store("C") + +# def test_init_group_overwrite(self): +# self._test_init_group_overwrite("C") + +# def test_init_group_overwrite_path(self): +# self._test_init_group_overwrite_path("C") + +# def test_init_group_overwrite_chunk_store(self): +# self._test_init_group_overwrite_chunk_store("C") + +# def test_init_group(self): +# store = self.create_store() +# init_group(store) +# store[".zattrs"] = json_dumps({"foo": "bar"}) +# # check metadata +# assert group_meta_key in store +# assert group_meta_key in store.listdir() +# assert group_meta_key in store.listdir("") +# meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] + +# def test_filters(self): +# all_filters, all_errors = zip( +# *[ +# (None, does_not_raise()), +# ([], does_not_raise()), +# ([AsType("f4", "f8")], pytest.raises(ValueError)), +# ] +# ) +# for filters, error in zip(all_filters, all_errors): +# store = self.create_store() +# with error: +# init_array(store, shape=1000, chunks=100, filters=filters) + + +# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +# class TestN5FSStore(TestFSStore): +# def create_store(self, normalize_keys=False, path=None, **kwargs): + +# if path is None: +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) + +# store = N5FSStore(path, normalize_keys=normalize_keys, **kwargs) +# return store + +# def test_equal(self): +# store_a = self.create_store() +# store_b = N5FSStore(store_a.path) +# assert store_a == store_b + +# # This is copied wholesale from the N5Store tests. The same test could +# # be run by making TestN5FSStore inherit from both TestFSStore and +# # TestN5Store, but a direct copy is arguably more explicit. + +# @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) +# def test_del_zarr_meta_key(self, zarr_meta_key): +# store = self.create_store() +# store[n5_attrs_key] = json_dumps({"foo": "bar"}) +# del store[zarr_meta_key] +# assert n5_attrs_key not in store + +# def test_chunk_nesting(self): +# store = self.create_store() +# store["0.0"] = b"xxx" +# assert "0.0" in store +# assert b"xxx" == store["0.0"] +# # assert b'xxx' == store['0/0'] +# store["foo/10.20.30"] = b"yyy" +# assert "foo/10.20.30" in store +# assert b"yyy" == store["foo/10.20.30"] +# # N5 reverses axis order +# assert b"yyy" == store["foo/30/20/10"] +# del store["foo/10.20.30"] +# assert "foo/30/20/10" not in store +# store["42"] = b"zzz" +# assert "42" in store +# assert b"zzz" == store["42"] + +# def test_init_array(self): +# store = self.create_store() +# init_array(store, shape=1000, chunks=100) + +# # check metadata +# assert array_meta_key in store +# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunks"] +# assert np.dtype(None) == meta["dtype"] +# # N5Store wraps the actual compressor +# compressor_config = meta["compressor"]["compressor_config"] +# assert default_compressor.get_config() == compressor_config +# # N5Store always has a fill value of 0 +# assert meta["fill_value"] == 0 +# assert meta["dimension_separator"] == "." +# # Top-level groups AND arrays should have +# # the n5 keyword in metadata +# raw_n5_meta = json.loads(store[n5_attrs_key]) +# assert raw_n5_meta.get("n5", None) == N5_FORMAT + +# def test_init_array_path(self): +# path = "foo/bar" +# store = self.create_store() +# init_array(store, shape=1000, chunks=100, path=path) + +# # check metadata +# key = path + "/" + array_meta_key +# assert key in store +# meta = store._metadata_class.decode_array_metadata(store[key]) +# assert ZARR_FORMAT == meta["zarr_format"] +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunks"] +# assert np.dtype(None) == meta["dtype"] +# # N5Store wraps the actual compressor +# compressor_config = meta["compressor"]["compressor_config"] +# assert default_compressor.get_config() == compressor_config +# # N5Store always has a fill value of 0 +# assert meta["fill_value"] == 0 + +# def test_init_array_compat(self): +# store = self.create_store() +# init_array(store, shape=1000, chunks=100, compressor="none") +# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) +# # N5Store wraps the actual compressor +# compressor_config = meta["compressor"]["compressor_config"] +# assert compressor_config is None + +# def test_init_array_overwrite(self): +# self._test_init_array_overwrite("C") + +# def test_init_array_overwrite_path(self): +# self._test_init_array_overwrite_path("C") + +# def test_init_array_overwrite_chunk_store(self): +# self._test_init_array_overwrite_chunk_store("C") + +# def test_init_group_overwrite(self): +# self._test_init_group_overwrite("C") + +# def test_init_group_overwrite_path(self): +# self._test_init_group_overwrite_path("C") + +# def test_init_group_overwrite_chunk_store(self): +# self._test_init_group_overwrite_chunk_store("C") + +# def test_dimension_separator(self): + +# with pytest.warns(UserWarning, match="dimension_separator"): +# self.create_store(dimension_separator="/") + +# def test_init_group(self): +# store = self.create_store() +# init_group(store) +# store[".zattrs"] = json_dumps({"foo": "bar"}) +# # check metadata +# assert group_meta_key in store +# assert group_meta_key in store.listdir() +# assert group_meta_key in store.listdir("") +# meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) +# assert ZARR_FORMAT == meta["zarr_format"] + +# def test_filters(self): +# all_filters, all_errors = zip( +# *[ +# (None, does_not_raise()), +# ([], does_not_raise()), +# ([AsType("f4", "f8")], pytest.raises(ValueError)), +# ] +# ) +# for filters, error in zip(all_filters, all_errors): +# store = self.create_store() +# with error: +# init_array(store, shape=1000, chunks=100, filters=filters) + + +# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +# class TestNestedFSStore(TestNestedDirectoryStore): +# def create_store(self, normalize_keys=False, path=None, **kwargs): +# if path is None: +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# store = FSStore( +# path, normalize_keys=normalize_keys, +# dimension_separator="/", auto_mkdir=True, **kwargs +# ) +# return store + +# def test_numbered_groups(self): +# import zarr + +# # Create an array +# store = self.create_store() +# group = zarr.group(store=store) +# arr = group.create_dataset("0", shape=(10, 10)) +# arr[1] = 1 + +# # Read it back +# store = self.create_store(path=store.path) +# zarr.open_group(store.path)["0"] + + +# class TestTempStore(StoreTests): +# def create_store(self, **kwargs): +# skip_if_nested_chunks(**kwargs) +# return TempStore(**kwargs) + +# def test_setdel(self): +# store = self.create_store() +# setdel_hierarchy_checks(store, self.root) + + +# class TestZipStore(StoreTests): + +# ZipStoreClass = ZipStore + +# def create_store(self, **kwargs): +# path = mktemp(suffix=".zip") +# atexit.register(os.remove, path) +# store = ZipStore(path, mode="w", **kwargs) +# return store + +# def test_mode(self): +# with self.ZipStoreClass("data/store.zip", mode="w") as store: +# store[self.root + "foo"] = b"bar" +# store = self.ZipStoreClass("data/store.zip", mode="r") +# with pytest.raises(PermissionError): +# store[self.root + "foo"] = b"bar" +# with pytest.raises(PermissionError): +# store.clear() + +# def test_flush(self): +# store = self.ZipStoreClass("data/store.zip", mode="w") +# store[self.root + "foo"] = b"bar" +# store.flush() +# assert store[self.root + "foo"] == b"bar" +# store.close() + +# store = self.ZipStoreClass("data/store.zip", mode="r") +# store.flush() # no-op + +# def test_context_manager(self): +# with self.create_store() as store: +# store[self.root + "foo"] = b"bar" +# store[self.root + "baz"] = b"qux" +# assert 2 == len(store) + +# def test_pop(self): +# # override because not implemented +# store = self.create_store() +# store[self.root + "foo"] = b"bar" +# with pytest.raises(NotImplementedError): +# store.pop(self.root + "foo") + +# def test_popitem(self): +# # override because not implemented +# store = self.create_store() +# store[self.root + "foo"] = b"bar" +# with pytest.raises(NotImplementedError): +# store.popitem() + +# def test_permissions(self): +# store = self.ZipStoreClass("data/store.zip", mode="w") +# foo_key = "foo" if self.version == 2 else self.root + "foo" +# # TODO: cannot provide key ending in / for v3 +# # how to create an empty folder in that case? +# baz_key = "baz/" if self.version == 2 else self.root + "baz" +# store[foo_key] = b"bar" +# store[baz_key] = b"" + +# store.flush() +# store.close() +# z = ZipFile("data/store.zip", "r") +# info = z.getinfo(foo_key) +# perm = oct(info.external_attr >> 16) +# assert perm == "0o644" +# info = z.getinfo(baz_key) +# perm = oct(info.external_attr >> 16) +# # only for posix platforms +# if os.name == "posix": +# if self.version == 2: +# assert perm == "0o40775" +# else: +# # baz/ on v2, but baz on v3, so not a directory +# assert perm == "0o644" +# z.close() + +# def test_store_and_retrieve_ndarray(self): +# store = ZipStore("data/store.zip") +# x = np.array([[1, 2], [3, 4]]) +# store["foo"] = x +# y = np.frombuffer(store["foo"], dtype=x.dtype).reshape(x.shape) +# assert np.array_equiv(y, x) + + +# class TestDBMStore(StoreTests): +# def create_store(self, dimension_separator=None): +# path = mktemp(suffix=".anydbm") +# atexit.register(atexit_rmglob, path + "*") +# # create store using default dbm implementation +# store = DBMStore(path, flag="n", dimension_separator=dimension_separator) +# return store + +# def test_context_manager(self): +# with self.create_store() as store: +# store[self.root + "foo"] = b"bar" +# store[self.root + "baz"] = b"qux" +# assert 2 == len(store) + + +# class TestDBMStoreDumb(TestDBMStore): +# def create_store(self, **kwargs): +# path = mktemp(suffix=".dumbdbm") +# atexit.register(atexit_rmglob, path + "*") + +# import dbm.dumb as dumbdbm + +# store = DBMStore(path, flag="n", open=dumbdbm.open, **kwargs) +# return store + + +# class TestDBMStoreGnu(TestDBMStore): +# def create_store(self, **kwargs): +# gdbm = pytest.importorskip("dbm.gnu") +# path = mktemp(suffix=".gdbm") # pragma: no cover +# atexit.register(os.remove, path) # pragma: no cover +# store = DBMStore( +# path, flag="n", open=gdbm.open, write_lock=False, **kwargs +# ) # pragma: no cover +# return store # pragma: no cover + + +# class TestDBMStoreNDBM(TestDBMStore): +# def create_store(self, **kwargs): +# ndbm = pytest.importorskip("dbm.ndbm") +# path = mktemp(suffix=".ndbm") # pragma: no cover +# atexit.register(atexit_rmglob, path + "*") # pragma: no cover +# store = DBMStore(path, flag="n", open=ndbm.open, **kwargs) # pragma: no cover +# return store # pragma: no cover + + +# class TestDBMStoreBerkeleyDB(TestDBMStore): +# def create_store(self, **kwargs): +# bsddb3 = pytest.importorskip("bsddb3") +# path = mktemp(suffix=".dbm") +# atexit.register(os.remove, path) +# store = DBMStore(path, flag="n", open=bsddb3.btopen, write_lock=False, **kwargs) +# return store + + +# class TestLMDBStore(StoreTests): +# def create_store(self, **kwargs): +# pytest.importorskip("lmdb") +# path = mktemp(suffix=".lmdb") +# atexit.register(atexit_rmtree, path) +# buffers = True +# store = LMDBStore(path, buffers=buffers, **kwargs) +# return store + +# def test_context_manager(self): +# with self.create_store() as store: +# store[self.root + "foo"] = b"bar" +# store[self.root + "baz"] = b"qux" +# assert 2 == len(store) + + +# class TestSQLiteStore(StoreTests): +# def create_store(self, **kwargs): +# pytest.importorskip("sqlite3") +# path = mktemp(suffix=".db") +# atexit.register(atexit_rmtree, path) +# store = SQLiteStore(path, **kwargs) +# return store + +# def test_underscore_in_name(self): +# path = mktemp(suffix=".db") +# atexit.register(atexit_rmtree, path) +# store = SQLiteStore(path) +# store["a"] = b"aaa" +# store["a_b"] = b"aa_bb" +# store.rmdir("a") +# assert "a_b" in store + + +# class TestSQLiteStoreInMemory(TestSQLiteStore): +# def create_store(self, **kwargs): +# pytest.importorskip("sqlite3") +# store = SQLiteStore(":memory:", **kwargs) +# return store + +# def test_pickle(self): + +# # setup store +# store = self.create_store() +# store[self.root + "foo"] = b"bar" +# store[self.root + "baz"] = b"quux" + +# # round-trip through pickle +# with pytest.raises(PicklingError): +# pickle.dumps(store) + + +# @skip_test_env_var("ZARR_TEST_MONGO") +# class TestMongoDBStore(StoreTests): +# def create_store(self, **kwargs): +# pytest.importorskip("pymongo") +# store = MongoDBStore( +# host="127.0.0.1", database="zarr_tests", collection="zarr_tests", **kwargs +# ) +# # start with an empty store +# store.clear() +# return store + + +# @skip_test_env_var("ZARR_TEST_REDIS") +# class TestRedisStore(StoreTests): +# def create_store(self, **kwargs): +# # TODO: this is the default host for Redis on Travis, +# # we probably want to generalize this though +# pytest.importorskip("redis") +# store = RedisStore(host="localhost", port=6379, **kwargs) +# # start with an empty store +# store.clear() +# return store + + +# class TestLRUStoreCache(StoreTests): + +# CountingClass = CountingDict +# LRUStoreClass = LRUStoreCache + +# def create_store(self, **kwargs): +# # wrapper therefore no dimension_separator argument +# skip_if_nested_chunks(**kwargs) +# return self.LRUStoreClass(dict(), max_size=2**27) + +# def test_cache_values_no_max_size(self): + +# # setup store +# store = self.CountingClass() +# foo_key = self.root + "foo" +# bar_key = self.root + "bar" +# store[foo_key] = b"xxx" +# store[bar_key] = b"yyy" +# assert 0 == store.counter["__getitem__", foo_key] +# assert 1 == store.counter["__setitem__", foo_key] +# assert 0 == store.counter["__getitem__", bar_key] +# assert 1 == store.counter["__setitem__", bar_key] + +# # setup cache +# cache = self.LRUStoreClass(store, max_size=None) +# assert 0 == cache.hits +# assert 0 == cache.misses + +# # test first __getitem__, cache miss +# assert b"xxx" == cache[foo_key] +# assert 1 == store.counter["__getitem__", foo_key] +# assert 1 == store.counter["__setitem__", foo_key] +# assert 0 == cache.hits +# assert 1 == cache.misses + +# # test second __getitem__, cache hit +# assert b"xxx" == cache[foo_key] +# assert 1 == store.counter["__getitem__", foo_key] +# assert 1 == store.counter["__setitem__", foo_key] +# assert 1 == cache.hits +# assert 1 == cache.misses + +# # test __setitem__, __getitem__ +# cache[foo_key] = b"zzz" +# assert 1 == store.counter["__getitem__", foo_key] +# assert 2 == store.counter["__setitem__", foo_key] +# # should be a cache hit +# assert b"zzz" == cache[foo_key] +# assert 1 == store.counter["__getitem__", foo_key] +# assert 2 == store.counter["__setitem__", foo_key] +# assert 2 == cache.hits +# assert 1 == cache.misses + +# # manually invalidate all cached values +# cache.invalidate_values() +# assert b"zzz" == cache[foo_key] +# assert 2 == store.counter["__getitem__", foo_key] +# assert 2 == store.counter["__setitem__", foo_key] +# cache.invalidate() +# assert b"zzz" == cache[foo_key] +# assert 3 == store.counter["__getitem__", foo_key] +# assert 2 == store.counter["__setitem__", foo_key] + +# # test __delitem__ +# del cache[foo_key] +# with pytest.raises(KeyError): +# # noinspection PyStatementEffect +# cache[foo_key] +# with pytest.raises(KeyError): +# # noinspection PyStatementEffect +# store[foo_key] + +# # verify other keys untouched +# assert 0 == store.counter["__getitem__", bar_key] +# assert 1 == store.counter["__setitem__", bar_key] + +# def test_cache_values_with_max_size(self): + +# # setup store +# store = self.CountingClass() +# foo_key = self.root + "foo" +# bar_key = self.root + "bar" +# store[foo_key] = b"xxx" +# store[bar_key] = b"yyy" +# assert 0 == store.counter["__getitem__", foo_key] +# assert 0 == store.counter["__getitem__", bar_key] +# # setup cache - can only hold one item +# cache = self.LRUStoreClass(store, max_size=5) +# assert 0 == cache.hits +# assert 0 == cache.misses + +# # test first 'foo' __getitem__, cache miss +# assert b"xxx" == cache[foo_key] +# assert 1 == store.counter["__getitem__", foo_key] +# assert 0 == cache.hits +# assert 1 == cache.misses + +# # test second 'foo' __getitem__, cache hit +# assert b"xxx" == cache[foo_key] +# assert 1 == store.counter["__getitem__", foo_key] +# assert 1 == cache.hits +# assert 1 == cache.misses + +# # test first 'bar' __getitem__, cache miss +# assert b"yyy" == cache[bar_key] +# assert 1 == store.counter["__getitem__", bar_key] +# assert 1 == cache.hits +# assert 2 == cache.misses + +# # test second 'bar' __getitem__, cache hit +# assert b"yyy" == cache[bar_key] +# assert 1 == store.counter["__getitem__", bar_key] +# assert 2 == cache.hits +# assert 2 == cache.misses + +# # test 'foo' __getitem__, should have been evicted, cache miss +# assert b"xxx" == cache[foo_key] +# assert 2 == store.counter["__getitem__", foo_key] +# assert 2 == cache.hits +# assert 3 == cache.misses + +# # test 'bar' __getitem__, should have been evicted, cache miss +# assert b"yyy" == cache[bar_key] +# assert 2 == store.counter["__getitem__", bar_key] +# assert 2 == cache.hits +# assert 4 == cache.misses + +# # setup store +# store = self.CountingClass() +# store[foo_key] = b"xxx" +# store[bar_key] = b"yyy" +# assert 0 == store.counter["__getitem__", foo_key] +# assert 0 == store.counter["__getitem__", bar_key] +# # setup cache - can hold two items +# cache = self.LRUStoreClass(store, max_size=6) +# assert 0 == cache.hits +# assert 0 == cache.misses + +# # test first 'foo' __getitem__, cache miss +# assert b"xxx" == cache[foo_key] +# assert 1 == store.counter["__getitem__", foo_key] +# assert 0 == cache.hits +# assert 1 == cache.misses + +# # test second 'foo' __getitem__, cache hit +# assert b"xxx" == cache[foo_key] +# assert 1 == store.counter["__getitem__", foo_key] +# assert 1 == cache.hits +# assert 1 == cache.misses + +# # test first 'bar' __getitem__, cache miss +# assert b"yyy" == cache[bar_key] +# assert 1 == store.counter["__getitem__", bar_key] +# assert 1 == cache.hits +# assert 2 == cache.misses + +# # test second 'bar' __getitem__, cache hit +# assert b"yyy" == cache[bar_key] +# assert 1 == store.counter["__getitem__", bar_key] +# assert 2 == cache.hits +# assert 2 == cache.misses + +# # test 'foo' __getitem__, should still be cached +# assert b"xxx" == cache[foo_key] +# assert 1 == store.counter["__getitem__", foo_key] +# assert 3 == cache.hits +# assert 2 == cache.misses + +# # test 'bar' __getitem__, should still be cached +# assert b"yyy" == cache[bar_key] +# assert 1 == store.counter["__getitem__", bar_key] +# assert 4 == cache.hits +# assert 2 == cache.misses + +# def test_cache_keys(self): + +# # setup +# store = self.CountingClass() +# foo_key = self.root + "foo" +# bar_key = self.root + "bar" +# baz_key = self.root + "baz" +# store[foo_key] = b"xxx" +# store[bar_key] = b"yyy" +# assert 0 == store.counter["__contains__", foo_key] +# assert 0 == store.counter["__iter__"] +# assert 0 == store.counter["keys"] +# cache = self.LRUStoreClass(store, max_size=None) + +# # keys should be cached on first call +# keys = sorted(cache.keys()) +# assert keys == [bar_key, foo_key] +# assert 1 == store.counter["keys"] +# # keys should now be cached +# assert keys == sorted(cache.keys()) +# assert 1 == store.counter["keys"] +# assert foo_key in cache +# assert 1 == store.counter["__contains__", foo_key] +# # the next check for `foo_key` is cached +# assert foo_key in cache +# assert 1 == store.counter["__contains__", foo_key] +# assert keys == sorted(cache) +# assert 0 == store.counter["__iter__"] +# assert 1 == store.counter["keys"] + +# # cache should be cleared if store is modified - crude but simple for now +# cache[baz_key] = b"zzz" +# keys = sorted(cache.keys()) +# assert keys == [bar_key, baz_key, foo_key] +# assert 2 == store.counter["keys"] +# # keys should now be cached +# assert keys == sorted(cache.keys()) +# assert 2 == store.counter["keys"] + +# # manually invalidate keys +# cache.invalidate_keys() +# keys = sorted(cache.keys()) +# assert keys == [bar_key, baz_key, foo_key] +# assert 3 == store.counter["keys"] +# assert 1 == store.counter["__contains__", foo_key] +# assert 0 == store.counter["__iter__"] +# cache.invalidate_keys() +# keys = sorted(cache) +# assert keys == [bar_key, baz_key, foo_key] +# assert 4 == store.counter["keys"] +# assert 1 == store.counter["__contains__", foo_key] +# assert 0 == store.counter["__iter__"] +# cache.invalidate_keys() +# assert foo_key in cache +# assert 4 == store.counter["keys"] +# assert 2 == store.counter["__contains__", foo_key] +# assert 0 == store.counter["__iter__"] + +# # check these would get counted if called directly +# assert foo_key in store +# assert 3 == store.counter["__contains__", foo_key] +# assert keys == sorted(store) +# assert 1 == store.counter["__iter__"] + + +# def test_getsize(): +# store = KVStore(dict()) +# store["foo"] = b"aaa" +# store["bar"] = b"bbbb" +# store["baz/quux"] = b"ccccc" +# assert 7 == getsize(store) +# assert 5 == getsize(store, "baz") + +# store = KVStore(dict()) +# store["boo"] = None +# assert -1 == getsize(store) + + +# @pytest.mark.parametrize("dict_store", [False, True]) +# def test_migrate_1to2(dict_store): +# from zarr import meta_v1 + +# # N.B., version 1 did not support hierarchies, so we only have to be +# # concerned about migrating a single array at the root of the store + +# # setup +# store = dict() if dict_store else KVStore(dict()) +# meta = dict( +# shape=(100,), +# chunks=(10,), +# dtype=np.dtype("f4"), +# compression="zlib", +# compression_opts=1, +# fill_value=None, +# order="C", +# ) +# meta_json = meta_v1.encode_metadata(meta) +# store["meta"] = meta_json +# store["attrs"] = json.dumps(dict()).encode("ascii") + +# # run migration +# migrate_1to2(store) + +# # check results +# assert "meta" not in store +# assert array_meta_key in store +# assert "attrs" not in store +# assert attrs_key in store +# meta_migrated = decode_array_metadata(store[array_meta_key]) +# assert 2 == meta_migrated["zarr_format"] + +# # preserved fields +# for f in "shape", "chunks", "dtype", "fill_value", "order": +# assert meta[f] == meta_migrated[f] + +# # migrate should have added empty filters field +# assert meta_migrated["filters"] is None + +# # check compression and compression_opts migrated to compressor +# assert "compression" not in meta_migrated +# assert "compression_opts" not in meta_migrated +# assert meta_migrated["compressor"] == Zlib(1).get_config() + +# # check dict compression_opts +# store = dict() if dict_store else KVStore(dict()) +# meta["compression"] = "blosc" +# meta["compression_opts"] = dict(cname="lz4", clevel=5, shuffle=1) +# meta_json = meta_v1.encode_metadata(meta) +# store["meta"] = meta_json +# store["attrs"] = json.dumps(dict()).encode("ascii") +# migrate_1to2(store) +# meta_migrated = decode_array_metadata(store[array_meta_key]) +# assert "compression" not in meta_migrated +# assert "compression_opts" not in meta_migrated +# assert meta_migrated["compressor"] == Blosc(cname="lz4", clevel=5, shuffle=1).get_config() + +# # check 'none' compression is migrated to None (null in JSON) +# store = dict() if dict_store else KVStore(dict()) +# meta["compression"] = "none" +# meta_json = meta_v1.encode_metadata(meta) +# store["meta"] = meta_json +# store["attrs"] = json.dumps(dict()).encode("ascii") +# migrate_1to2(store) +# meta_migrated = decode_array_metadata(store[array_meta_key]) +# assert "compression" not in meta_migrated +# assert "compression_opts" not in meta_migrated +# assert meta_migrated["compressor"] is None + + +# def test_format_compatibility(): + +# # This test is intended to catch any unintended changes that break the ability to +# # read data stored with a previous minor version (which should be format-compatible). + +# # fixture data +# fixture = group(store=DirectoryStore("fixture")) + +# # set seed to get consistent random data +# np.random.seed(42) + +# arrays_chunks = [ +# (np.arange(1111, dtype=" 2 else "" +# # setup some values +# store[prefix + "a"] = b"aaa" +# store[prefix + "b"] = b"bbb" +# store[prefix + "c/d"] = b"ddd" +# store[prefix + "c/e/f"] = b"fff" + +# # test iterators on store with data +# assert 4 == len(store) +# keys = [prefix + "a", prefix + "b", prefix + "c/d", prefix + "c/e/f"] +# values = [b"aaa", b"bbb", b"ddd", b"fff"] +# items = list(zip(keys, values)) +# assert set(keys) == set(store) +# assert set(keys) == set(store.keys()) +# assert set(values) == set(store.values()) +# assert set(items) == set(store.items()) - if path is None: - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) +# def test_getsize(self): +# return super().test_getsize() - store = N5FSStore(path, normalize_keys=normalize_keys, **kwargs) - return store - - def test_equal(self): - store_a = self.create_store() - store_b = N5FSStore(store_a.path) - assert store_a == store_b +# def test_hierarchy(self): +# return super().test_hierarchy() - # This is copied wholesale from the N5Store tests. The same test could - # be run by making TestN5FSStore inherit from both TestFSStore and - # TestN5Store, but a direct copy is arguably more explicit. +# @pytest.mark.skipif(sys.version_info < (3, 7), reason="attr not serializable in py36") +# def test_pickle(self): +# # internal attribute on ContainerClient isn't serializable for py36 and earlier +# super().test_pickle() - @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) - def test_del_zarr_meta_key(self, zarr_meta_key): - store = self.create_store() - store[n5_attrs_key] = json_dumps({"foo": "bar"}) - del store[zarr_meta_key] - assert n5_attrs_key not in store - - def test_chunk_nesting(self): - store = self.create_store() - store["0.0"] = b"xxx" - assert "0.0" in store - assert b"xxx" == store["0.0"] - # assert b'xxx' == store['0/0'] - store["foo/10.20.30"] = b"yyy" - assert "foo/10.20.30" in store - assert b"yyy" == store["foo/10.20.30"] - # N5 reverses axis order - assert b"yyy" == store["foo/30/20/10"] - del store["foo/10.20.30"] - assert "foo/30/20/10" not in store - store["42"] = b"zzz" - assert "42" in store - assert b"zzz" == store["42"] - - def test_init_array(self): - store = self.create_store() - init_array(store, shape=1000, chunks=100) - - # check metadata - assert array_meta_key in store - meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - assert (1000,) == meta["shape"] - assert (100,) == meta["chunks"] - assert np.dtype(None) == meta["dtype"] - # N5Store wraps the actual compressor - compressor_config = meta["compressor"]["compressor_config"] - assert default_compressor.get_config() == compressor_config - # N5Store always has a fill value of 0 - assert meta["fill_value"] == 0 - assert meta["dimension_separator"] == "." - # Top-level groups AND arrays should have - # the n5 keyword in metadata - raw_n5_meta = json.loads(store[n5_attrs_key]) - assert raw_n5_meta.get("n5", None) == N5_FORMAT - - def test_init_array_path(self): - path = "foo/bar" - store = self.create_store() - init_array(store, shape=1000, chunks=100, path=path) - - # check metadata - key = path + "/" + array_meta_key - assert key in store - meta = store._metadata_class.decode_array_metadata(store[key]) - assert ZARR_FORMAT == meta["zarr_format"] - assert (1000,) == meta["shape"] - assert (100,) == meta["chunks"] - assert np.dtype(None) == meta["dtype"] - # N5Store wraps the actual compressor - compressor_config = meta["compressor"]["compressor_config"] - assert default_compressor.get_config() == compressor_config - # N5Store always has a fill value of 0 - assert meta["fill_value"] == 0 - - def test_init_array_compat(self): - store = self.create_store() - init_array(store, shape=1000, chunks=100, compressor="none") - meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) - # N5Store wraps the actual compressor - compressor_config = meta["compressor"]["compressor_config"] - assert compressor_config is None - def test_init_array_overwrite(self): - self._test_init_array_overwrite("C") +# class TestConsolidatedMetadataStore: - def test_init_array_overwrite_path(self): - self._test_init_array_overwrite_path("C") +# version = 2 +# ConsolidatedMetadataClass = ConsolidatedMetadataStore - def test_init_array_overwrite_chunk_store(self): - self._test_init_array_overwrite_chunk_store("C") +# @property +# def metadata_key(self): +# return ".zmetadata" - def test_init_group_overwrite(self): - self._test_init_group_overwrite("C") +# def test_bad_format(self): - def test_init_group_overwrite_path(self): - self._test_init_group_overwrite_path("C") +# # setup store with consolidated metadata +# store = dict() +# consolidated = { +# # bad format version +# "zarr_consolidated_format": 0, +# } +# store[self.metadata_key] = json.dumps(consolidated).encode() - def test_init_group_overwrite_chunk_store(self): - self._test_init_group_overwrite_chunk_store("C") +# # check appropriate error is raised +# with pytest.raises(MetadataError): +# self.ConsolidatedMetadataClass(store) - def test_dimension_separator(self): +# def test_bad_store_version(self): +# with pytest.raises(ValueError): +# self.ConsolidatedMetadataClass(KVStoreV3(dict())) - with pytest.warns(UserWarning, match="dimension_separator"): - self.create_store(dimension_separator="/") +# def test_read_write(self): - def test_init_group(self): - store = self.create_store() - init_group(store) - store[".zattrs"] = json_dumps({"foo": "bar"}) - # check metadata - assert group_meta_key in store - assert group_meta_key in store.listdir() - assert group_meta_key in store.listdir("") - meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) - assert ZARR_FORMAT == meta["zarr_format"] - - def test_filters(self): - all_filters, all_errors = zip( - *[ - (None, does_not_raise()), - ([], does_not_raise()), - ([AsType("f4", "f8")], pytest.raises(ValueError)), - ] - ) - for filters, error in zip(all_filters, all_errors): - store = self.create_store() - with error: - init_array(store, shape=1000, chunks=100, filters=filters) +# # setup store with consolidated metadata +# store = dict() +# consolidated = { +# "zarr_consolidated_format": 1, +# "metadata": { +# "foo": "bar", +# "baz": 42, +# }, +# } +# store[self.metadata_key] = json.dumps(consolidated).encode() +# # create consolidated store +# cs = self.ConsolidatedMetadataClass(store) -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -class TestNestedFSStore(TestNestedDirectoryStore): - def create_store(self, normalize_keys=False, path=None, **kwargs): - if path is None: - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = FSStore( - path, normalize_keys=normalize_keys, dimension_separator="/", auto_mkdir=True, **kwargs - ) - return store +# # test __contains__, __getitem__ +# for key, value in consolidated["metadata"].items(): +# assert key in cs +# assert value == cs[key] - def test_numbered_groups(self): - import zarr +# # test __delitem__, __setitem__ +# with pytest.raises(PermissionError): +# del cs["foo"] +# with pytest.raises(PermissionError): +# cs["bar"] = 0 +# with pytest.raises(PermissionError): +# cs["spam"] = "eggs" - # Create an array - store = self.create_store() - group = zarr.group(store=store) - arr = group.create_dataset("0", shape=(10, 10)) - arr[1] = 1 - # Read it back - store = self.create_store(path=store.path) - zarr.open_group(store.path)["0"] +# # standalone test we do not want to run on each store. -class TestTempStore(StoreTests): - def create_store(self, **kwargs): - skip_if_nested_chunks(**kwargs) - return TempStore(**kwargs) +# def test_fill_value_change(): +# a = zarr.create((10, 10), dtype=int) - def test_setdel(self): - store = self.create_store() - setdel_hierarchy_checks(store, self.root) +# assert a[0, 0] == 0 +# a.fill_value = 1 -class TestZipStore(StoreTests): +# assert a[0, 0] == 1 - ZipStoreClass = ZipStore +# assert json.loads(a.store[".zarray"])["fill_value"] == 1 - def create_store(self, **kwargs): - path = mktemp(suffix=".zip") - atexit.register(os.remove, path) - store = ZipStore(path, mode="w", **kwargs) - return store - def test_mode(self): - with self.ZipStoreClass("data/store.zip", mode="w") as store: - store[self.root + "foo"] = b"bar" - store = self.ZipStoreClass("data/store.zip", mode="r") - with pytest.raises(PermissionError): - store[self.root + "foo"] = b"bar" - with pytest.raises(PermissionError): - store.clear() - - def test_flush(self): - store = self.ZipStoreClass("data/store.zip", mode="w") - store[self.root + "foo"] = b"bar" - store.flush() - assert store[self.root + "foo"] == b"bar" - store.close() - - store = self.ZipStoreClass("data/store.zip", mode="r") - store.flush() # no-op - - def test_context_manager(self): - with self.create_store() as store: - store[self.root + "foo"] = b"bar" - store[self.root + "baz"] = b"qux" - assert 2 == len(store) - - def test_pop(self): - # override because not implemented - store = self.create_store() - store[self.root + "foo"] = b"bar" - with pytest.raises(NotImplementedError): - store.pop(self.root + "foo") +# def test_get_hierarchy_metadata_v2(): +# # v2 stores do not have hierarchy metadata (i.e. zarr.json) +# with pytest.raises(ValueError): +# _get_hierarchy_metadata(KVStore(dict)) - def test_popitem(self): - # override because not implemented - store = self.create_store() - store[self.root + "foo"] = b"bar" - with pytest.raises(NotImplementedError): - store.popitem() - - def test_permissions(self): - store = self.ZipStoreClass("data/store.zip", mode="w") - foo_key = "foo" if self.version == 2 else self.root + "foo" - # TODO: cannot provide key ending in / for v3 - # how to create an empty folder in that case? - baz_key = "baz/" if self.version == 2 else self.root + "baz" - store[foo_key] = b"bar" - store[baz_key] = b"" - - store.flush() - store.close() - z = ZipFile("data/store.zip", "r") - info = z.getinfo(foo_key) - perm = oct(info.external_attr >> 16) - assert perm == "0o644" - info = z.getinfo(baz_key) - perm = oct(info.external_attr >> 16) - # only for posix platforms - if os.name == "posix": - if self.version == 2: - assert perm == "0o40775" - else: - # baz/ on v2, but baz on v3, so not a directory - assert perm == "0o644" - z.close() - - def test_store_and_retrieve_ndarray(self): - store = ZipStore("data/store.zip") - x = np.array([[1, 2], [3, 4]]) - store["foo"] = x - y = np.frombuffer(store["foo"], dtype=x.dtype).reshape(x.shape) - assert np.array_equiv(y, x) - - -class TestDBMStore(StoreTests): - def create_store(self, dimension_separator=None): - path = mktemp(suffix=".anydbm") - atexit.register(atexit_rmglob, path + "*") - # create store using default dbm implementation - store = DBMStore(path, flag="n", dimension_separator=dimension_separator) - return store - def test_context_manager(self): - with self.create_store() as store: - store[self.root + "foo"] = b"bar" - store[self.root + "baz"] = b"qux" - assert 2 == len(store) +# def test_normalize_store_arg(tmpdir): +# with pytest.raises(ValueError): +# normalize_store_arg(dict(), zarr_version=4) +# for ext, Class in [(".zip", ZipStore), (".n5", N5Store)]: +# fn = tmpdir.join("store" + ext) +# store = normalize_store_arg(str(fn), zarr_version=2, mode="w") +# assert isinstance(store, Class) -class TestDBMStoreDumb(TestDBMStore): - def create_store(self, **kwargs): - path = mktemp(suffix=".dumbdbm") - atexit.register(atexit_rmglob, path + "*") +# if have_fsspec: +# import fsspec - import dbm.dumb as dumbdbm +# path = tempfile.mkdtemp() +# store = normalize_store_arg("file://" + path, zarr_version=2, mode="w") +# assert isinstance(store, FSStore) - store = DBMStore(path, flag="n", open=dumbdbm.open, **kwargs) - return store +# store = normalize_store_arg(fsspec.get_mapper("file://" + path)) +# assert isinstance(store, FSStore) -class TestDBMStoreGnu(TestDBMStore): - def create_store(self, **kwargs): - gdbm = pytest.importorskip("dbm.gnu") - path = mktemp(suffix=".gdbm") # pragma: no cover - atexit.register(os.remove, path) # pragma: no cover - store = DBMStore( - path, flag="n", open=gdbm.open, write_lock=False, **kwargs - ) # pragma: no cover - return store # pragma: no cover +# def test_meta_prefix_6853(): +# fixture = pathlib.Path(zarr.__file__).resolve().parent.parent / "fixture" +# meta = fixture / "meta" +# if not meta.exists(): # pragma: no cover +# s = DirectoryStore(str(meta), dimension_separator=".") +# a = zarr.open(store=s, mode="w", shape=(2, 2), dtype=" 2 else "" - # setup some values - store[prefix + "a"] = b"aaa" - store[prefix + "b"] = b"bbb" - store[prefix + "c/d"] = b"ddd" - store[prefix + "c/e/f"] = b"fff" - - # test iterators on store with data - assert 4 == len(store) - keys = [prefix + "a", prefix + "b", prefix + "c/d", prefix + "c/e/f"] - values = [b"aaa", b"bbb", b"ddd", b"fff"] - items = list(zip(keys, values)) - assert set(keys) == set(store) - assert set(keys) == set(store.keys()) - assert set(values) == set(store.values()) - assert set(items) == set(store.items()) - - def test_getsize(self): - return super().test_getsize() - - def test_hierarchy(self): - return super().test_hierarchy() - - @pytest.mark.skipif(sys.version_info < (3, 7), reason="attr not serializable in py36") - def test_pickle(self): - # internal attribute on ContainerClient isn't serializable for py36 and earlier - super().test_pickle() - - -class TestConsolidatedMetadataStore: - - version = 2 - ConsolidatedMetadataClass = ConsolidatedMetadataStore - - @property - def metadata_key(self): - return ".zmetadata" - - def test_bad_format(self): - - # setup store with consolidated metadata - store = dict() - consolidated = { - # bad format version - "zarr_consolidated_format": 0, - } - store[self.metadata_key] = json.dumps(consolidated).encode() - - # check appropriate error is raised - with pytest.raises(MetadataError): - self.ConsolidatedMetadataClass(store) - - def test_bad_store_version(self): - with pytest.raises(ValueError): - self.ConsolidatedMetadataClass(KVStoreV3(dict())) - - def test_read_write(self): - - # setup store with consolidated metadata - store = dict() - consolidated = { - "zarr_consolidated_format": 1, - "metadata": { - "foo": "bar", - "baz": 42, - }, - } - store[self.metadata_key] = json.dumps(consolidated).encode() - - # create consolidated store - cs = self.ConsolidatedMetadataClass(store) - - # test __contains__, __getitem__ - for key, value in consolidated["metadata"].items(): - assert key in cs - assert value == cs[key] - - # test __delitem__, __setitem__ - with pytest.raises(PermissionError): - del cs["foo"] - with pytest.raises(PermissionError): - cs["bar"] = 0 - with pytest.raises(PermissionError): - cs["spam"] = "eggs" - - -# standalone test we do not want to run on each store. - - -def test_fill_value_change(): - a = zarr.create((10, 10), dtype=int) - - assert a[0, 0] == 0 - - a.fill_value = 1 - - assert a[0, 0] == 1 - - assert json.loads(a.store[".zarray"])["fill_value"] == 1 - - -def test_get_hierarchy_metadata_v2(): - # v2 stores do not have hierarchy metadata (i.e. zarr.json) - with pytest.raises(ValueError): - _get_hierarchy_metadata(KVStore(dict)) - - -def test_normalize_store_arg(tmpdir): - with pytest.raises(ValueError): - normalize_store_arg(dict(), zarr_version=4) - - for ext, Class in [(".zip", ZipStore), (".n5", N5Store)]: - fn = tmpdir.join("store" + ext) - store = normalize_store_arg(str(fn), zarr_version=2, mode="w") - assert isinstance(store, Class) - - if have_fsspec: - import fsspec - - path = tempfile.mkdtemp() - store = normalize_store_arg("file://" + path, zarr_version=2, mode="w") - assert isinstance(store, FSStore) - - store = normalize_store_arg(fsspec.get_mapper("file://" + path)) - assert isinstance(store, FSStore) - - -def test_meta_prefix_6853(): - - fixture = pathlib.Path(zarr.__file__).resolve().parent.parent / "fixture" - meta = fixture / "meta" - if not meta.exists(): # pragma: no cover - s = DirectoryStore(str(meta), dimension_separator=".") - a = zarr.open(store=s, mode="w", shape=(2, 2), dtype=" Date: Wed, 7 Feb 2024 14:57:30 +0100 Subject: [PATCH 0424/1078] Codecs without array metadata (#1632) * Pull Zarrita into Zarr-Python @ 78274781ad64aef95772eb4b083f7ea9b7d03d06 No code changes to Zarrita were made. * apply zarr lint rules * zarrita -> v3 * v3/abc [wip] * use abcs plus implementation notes * working on making codecs extensible * adds index_location * adds support for codec entry points * adds tests from zarrita * fixes types * Apply suggestions from code review Co-authored-by: Joe Hamman * remove test codec from pyproject.toml * removes array_metadata from codecs * implemented pr feedback * ChunkMetadata -> ArraySpec * split codecs in 3 groups in CodecPipeline * chunk_metadata -> chunk_spec * merge --------- Co-authored-by: Joseph Hamman Co-authored-by: Joe Hamman --- src/zarr/v3/abc/codec.py | 57 ++++--- src/zarr/v3/array.py | 62 ++++--- src/zarr/v3/array_v2.py | 45 +++-- src/zarr/v3/codecs/__init__.py | 223 +++++++++++++++++-------- src/zarr/v3/codecs/blosc.py | 56 ++++--- src/zarr/v3/codecs/bytes.py | 44 ++--- src/zarr/v3/codecs/crc32c_.py | 16 +- src/zarr/v3/codecs/gzip.py | 23 +-- src/zarr/v3/codecs/registry.py | 5 + src/zarr/v3/codecs/sharding.py | 285 +++++++++++++++++++++----------- src/zarr/v3/codecs/transpose.py | 72 ++++---- src/zarr/v3/codecs/zstd.py | 19 +-- src/zarr/v3/metadata.py | 24 ++- 13 files changed, 581 insertions(+), 350 deletions(-) diff --git a/src/zarr/v3/abc/codec.py b/src/zarr/v3/abc/codec.py index c81f2c976f..0a7c68784f 100644 --- a/src/zarr/v3/abc/codec.py +++ b/src/zarr/v3/abc/codec.py @@ -1,13 +1,3 @@ -# Notes: -# 1. These are missing methods described in the spec. I expected to see these method definitions: -# def compute_encoded_representation_type(self, decoded_representation_type): -# def encode(self, decoded_value): -# def decode(self, encoded_value, decoded_representation_type): -# def partial_decode(self, input_handle, decoded_representation_type, decoded_regions): -# def compute_encoded_size(self, input_size): -# 2. Understand why array metadata is included on all codecs - - from __future__ import annotations from abc import abstractmethod, ABC @@ -20,30 +10,39 @@ if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata, CodecMetadata + from zarr.v3.metadata import ( + ArraySpec, + ArrayMetadata, + DataType, + CodecMetadata, + RuntimeConfiguration, + ) class Codec(ABC): is_fixed_size: bool - array_metadata: CoreArrayMetadata + @classmethod @abstractmethod - def compute_encoded_size(self, input_byte_length: int) -> int: + def get_metadata_class(cls) -> Type[CodecMetadata]: pass - def resolve_metadata(self) -> CoreArrayMetadata: - return self.array_metadata - @classmethod @abstractmethod - def from_metadata( - cls, codec_metadata: "CodecMetadata", array_metadata: CoreArrayMetadata - ) -> Codec: + def from_metadata(cls, codec_metadata: CodecMetadata) -> Codec: pass - @classmethod @abstractmethod - def get_metadata_class(cls) -> "Type[CodecMetadata]": + def compute_encoded_size(self, input_byte_length: int, chunk_spec: ArraySpec) -> int: + pass + + def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: + return chunk_spec + + def evolve(self, *, ndim: int, data_type: DataType) -> Codec: + return self + + def validate(self, array_metadata: ArrayMetadata) -> None: pass @@ -52,6 +51,8 @@ class ArrayArrayCodec(Codec): async def decode( self, chunk_array: np.ndarray, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: pass @@ -59,6 +60,8 @@ async def decode( async def encode( self, chunk_array: np.ndarray, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: pass @@ -68,6 +71,8 @@ class ArrayBytesCodec(Codec): async def decode( self, chunk_array: BytesLike, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: pass @@ -75,6 +80,8 @@ async def decode( async def encode( self, chunk_array: np.ndarray, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: pass @@ -85,6 +92,8 @@ async def decode_partial( self, store_path: StorePath, selection: SliceSelection, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: pass @@ -96,6 +105,8 @@ async def encode_partial( store_path: StorePath, chunk_array: np.ndarray, selection: SliceSelection, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> None: pass @@ -105,6 +116,8 @@ class BytesBytesCodec(Codec): async def decode( self, chunk_array: BytesLike, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> BytesLike: pass @@ -112,5 +125,7 @@ async def decode( async def encode( self, chunk_array: BytesLike, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: pass diff --git a/src/zarr/v3/array.py b/src/zarr/v3/array.py index d55a5aee43..dadde1658a 100644 --- a/src/zarr/v3/array.py +++ b/src/zarr/v3/array.py @@ -16,11 +16,10 @@ import numpy as np from attr import evolve, frozen -from zarr.v3.abc.codec import ArrayBytesCodecPartialDecodeMixin - # from zarr.v3.array_v2 import ArrayV2 from zarr.v3.codecs import CodecMetadata, CodecPipeline, bytes_codec +from zarr.v3.codecs.registry import get_codec_from_metadata from zarr.v3.common import ( ZARR_JSON, ChunkCoords, @@ -31,6 +30,7 @@ from zarr.v3.indexing import BasicIndexer, all_chunk_coords, is_total_slice from zarr.v3.metadata import ( ArrayMetadata, + ArraySpec, DataType, DefaultChunkKeyEncodingConfigurationMetadata, DefaultChunkKeyEncodingMetadata, @@ -41,7 +41,6 @@ V2ChunkKeyEncodingMetadata, dtype_to_data_type, ) -from zarr.v3.codecs.sharding import ShardingCodec from zarr.v3.store import StoreLike, StorePath, make_store_path from zarr.v3.sync import sync @@ -118,8 +117,11 @@ async def create( metadata=metadata, store_path=store_path, runtime_configuration=runtime_configuration, - codec_pipeline=CodecPipeline.from_metadata( - metadata.codecs, metadata.get_core_metadata(runtime_configuration) + codec_pipeline=CodecPipeline.create( + [ + get_codec_from_metadata(codec).evolve(ndim=len(shape), data_type=data_type) + for codec in codecs + ] ), ) @@ -134,13 +136,17 @@ def from_json( runtime_configuration: RuntimeConfiguration, ) -> AsyncArray: metadata = ArrayMetadata.from_json(zarr_json) + codecs = [ + get_codec_from_metadata(codec).evolve( + ndim=len(metadata.shape), data_type=metadata.data_type + ) + for codec in metadata.codecs + ] async_array = cls( metadata=metadata, store_path=store_path, runtime_configuration=runtime_configuration, - codec_pipeline=CodecPipeline.from_metadata( - metadata.codecs, metadata.get_core_metadata(runtime_configuration) - ), + codec_pipeline=CodecPipeline.create(codecs), ) async_array._validate_metadata() return async_array @@ -240,6 +246,7 @@ def _validate_metadata(self) -> None: self.metadata.dimension_names ), "`dimension_names` and `shape` need to have the same number of dimensions." assert self.metadata.fill_value is not None, "`fill_value` is required." + self.codec_pipeline.validate(self.metadata) async def _read_chunk( self, @@ -248,15 +255,14 @@ async def _read_chunk( out_selection: SliceSelection, out: np.ndarray, ): + chunk_spec = self.metadata.get_chunk_spec(chunk_coords) chunk_key_encoding = self.metadata.chunk_key_encoding chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) store_path = self.store_path / chunk_key - if len(self.codec_pipeline.codecs) == 1 and isinstance( - self.codec_pipeline.codecs[0], ArrayBytesCodecPartialDecodeMixin - ): - chunk_array = await self.codec_pipeline.codecs[0].decode_partial( - store_path, chunk_selection + if self.codec_pipeline.supports_partial_decode: + chunk_array = await self.codec_pipeline.decode_partial( + store_path, chunk_selection, chunk_spec, self.runtime_configuration ) if chunk_array is not None: out[out_selection] = chunk_array @@ -265,7 +271,9 @@ async def _read_chunk( else: chunk_bytes = await store_path.get() if chunk_bytes is not None: - chunk_array = await self.codec_pipeline.decode(chunk_bytes) + chunk_array = await self.codec_pipeline.decode( + chunk_bytes, chunk_spec, self.runtime_configuration + ) tmp = chunk_array[chunk_selection] out[out_selection] = tmp else: @@ -316,6 +324,7 @@ async def _write_chunk( chunk_selection: SliceSelection, out_selection: SliceSelection, ): + chunk_spec = self.metadata.get_chunk_spec(chunk_coords) chunk_key_encoding = self.metadata.chunk_key_encoding chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) store_path = self.store_path / chunk_key @@ -330,17 +339,16 @@ async def _write_chunk( chunk_array.fill(value) else: chunk_array = value[out_selection] - await self._write_chunk_to_store(store_path, chunk_array) + await self._write_chunk_to_store(store_path, chunk_array, chunk_spec) - elif len(self.codec_pipeline.codecs) == 1 and isinstance( - self.codec_pipeline.codecs[0], ShardingCodec - ): - sharding_codec = self.codec_pipeline.codecs[0] + elif self.codec_pipeline.supports_partial_encode: # print("encode_partial", chunk_coords, chunk_selection, repr(self)) - await sharding_codec.encode_partial( + await self.codec_pipeline.encode_partial( store_path, value[out_selection], chunk_selection, + chunk_spec, + self.runtime_configuration, ) else: # writing partial chunks @@ -356,18 +364,24 @@ async def _write_chunk( chunk_array.fill(self.metadata.fill_value) else: chunk_array = ( - await self.codec_pipeline.decode(chunk_bytes) + await self.codec_pipeline.decode( + chunk_bytes, chunk_spec, self.runtime_configuration + ) ).copy() # make a writable copy chunk_array[chunk_selection] = value[out_selection] - await self._write_chunk_to_store(store_path, chunk_array) + await self._write_chunk_to_store(store_path, chunk_array, chunk_spec) - async def _write_chunk_to_store(self, store_path: StorePath, chunk_array: np.ndarray): + async def _write_chunk_to_store( + self, store_path: StorePath, chunk_array: np.ndarray, chunk_spec: ArraySpec + ): if np.all(chunk_array == self.metadata.fill_value): # chunks that only contain fill_value will be removed await store_path.delete() else: - chunk_bytes = await self.codec_pipeline.encode(chunk_array) + chunk_bytes = await self.codec_pipeline.encode( + chunk_array, chunk_spec, self.runtime_configuration + ) if chunk_bytes is None: await store_path.delete() else: diff --git a/src/zarr/v3/array_v2.py b/src/zarr/v3/array_v2.py index a2f26f01b0..dc4cbebd5e 100644 --- a/src/zarr/v3/array_v2.py +++ b/src/zarr/v3/array_v2.py @@ -20,7 +20,7 @@ to_thread, ) from zarr.v3.indexing import BasicIndexer, all_chunk_coords, is_total_slice -from zarr.v3.metadata import ArrayV2Metadata, RuntimeConfiguration +from zarr.v3.metadata import ArrayV2Metadata, CodecMetadata, RuntimeConfiguration from zarr.v3.store import StoreLike, StorePath, make_store_path from zarr.v3.sync import sync @@ -83,12 +83,14 @@ async def create_async( order=order, dimension_separator=dimension_separator, fill_value=0 if fill_value is None else fill_value, - compressor=numcodecs.get_codec(compressor).get_config() - if compressor is not None - else None, - filters=[numcodecs.get_codec(filter).get_config() for filter in filters] - if filters is not None - else None, + compressor=( + numcodecs.get_codec(compressor).get_config() if compressor is not None else None + ), + filters=( + [numcodecs.get_codec(filter).get_config() for filter in filters] + if filters is not None + else None + ), ) array = cls( metadata=metadata, @@ -441,22 +443,29 @@ async def convert_to_v3_async(self) -> Array: from zarr.v3.common import ZARR_JSON from zarr.v3.metadata import ( ArrayMetadata, + DataType, + RegularChunkGridConfigurationMetadata, + RegularChunkGridMetadata, + V2ChunkKeyEncodingConfigurationMetadata, + V2ChunkKeyEncodingMetadata, + dtype_to_data_type, + ) + from zarr.v3.codecs.blosc import ( BloscCodecConfigurationMetadata, BloscCodecMetadata, + blosc_shuffle_int_to_str, + ) + from zarr.v3.codecs.bytes import ( BytesCodecConfigurationMetadata, BytesCodecMetadata, - CodecMetadata, - DataType, + ) + from zarr.v3.codecs.gzip import ( GzipCodecConfigurationMetadata, GzipCodecMetadata, - RegularChunkGridConfigurationMetadata, - RegularChunkGridMetadata, + ) + from zarr.v3.codecs.transpose import ( TransposeCodecConfigurationMetadata, TransposeCodecMetadata, - V2ChunkKeyEncodingConfigurationMetadata, - V2ChunkKeyEncodingMetadata, - blosc_shuffle_int_to_str, - dtype_to_data_type, ) data_type = DataType[dtype_to_data_type[self.metadata.dtype.str]] @@ -476,7 +485,11 @@ async def convert_to_v3_async(self) -> Array: if self.metadata.order == "F": codecs.append( - TransposeCodecMetadata(configuration=TransposeCodecConfigurationMetadata(order="F")) + TransposeCodecMetadata( + configuration=TransposeCodecConfigurationMetadata( + order=tuple(reversed(range(self.metadata.ndim))) + ) + ) ) codecs.append( BytesCodecMetadata(configuration=BytesCodecConfigurationMetadata(endian=endian)) diff --git a/src/zarr/v3/codecs/__init__.py b/src/zarr/v3/codecs/__init__.py index 30a42c8ad5..40c71f6807 100644 --- a/src/zarr/v3/codecs/__init__.py +++ b/src/zarr/v3/codecs/__init__.py @@ -1,9 +1,9 @@ from __future__ import annotations -from functools import reduce from typing import ( TYPE_CHECKING, Iterable, + Iterator, List, Literal, Optional, @@ -11,17 +11,24 @@ Union, ) from warnings import warn +from attr import frozen import numpy as np -from attr import frozen -from zarr.v3.abc.codec import Codec, ArrayArrayCodec, ArrayBytesCodec, BytesBytesCodec -from zarr.v3.common import BytesLike -from zarr.v3.metadata import CodecMetadata, ShardingCodecIndexLocation -from zarr.v3.codecs.registry import get_codec_class +from zarr.v3.abc.codec import ( + ArrayBytesCodecPartialDecodeMixin, + ArrayBytesCodecPartialEncodeMixin, + Codec, + ArrayArrayCodec, + ArrayBytesCodec, + BytesBytesCodec, +) +from zarr.v3.common import BytesLike, SliceSelection +from zarr.v3.metadata import CodecMetadata, ShardingCodecIndexLocation, RuntimeConfiguration +from zarr.v3.store import StorePath if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata + from zarr.v3.metadata import ArrayMetadata, ArraySpec from zarr.v3.codecs.sharding import ShardingCodecMetadata from zarr.v3.codecs.blosc import BloscCodecMetadata from zarr.v3.codecs.bytes import BytesCodecMetadata @@ -31,27 +38,23 @@ from zarr.v3.codecs.crc32c_ import Crc32cCodecMetadata +def _find_array_bytes_codec( + codecs: Iterable[Tuple[Codec, ArraySpec]] +) -> Tuple[ArrayBytesCodec, ArraySpec]: + for codec, array_spec in codecs: + if isinstance(codec, ArrayBytesCodec): + return (codec, array_spec) + raise KeyError + + @frozen class CodecPipeline: - codecs: List[Codec] + array_array_codecs: List[ArrayArrayCodec] + array_bytes_codec: ArrayBytesCodec + bytes_bytes_codecs: List[BytesBytesCodec] @classmethod - def from_metadata( - cls, - codecs_metadata: Iterable[CodecMetadata], - array_metadata: CoreArrayMetadata, - ) -> CodecPipeline: - out: List[Codec] = [] - for codec_metadata in codecs_metadata or []: - codec_cls = get_codec_class(codec_metadata.name) - codec = codec_cls.from_metadata(codec_metadata, array_metadata) - out.append(codec) - array_metadata = codec.resolve_metadata() - CodecPipeline._validate_codecs(out, array_metadata) - return cls(out) - - @staticmethod - def _validate_codecs(codecs: List[Codec], array_metadata: CoreArrayMetadata) -> None: + def create(cls, codecs: List[Codec]) -> CodecPipeline: from zarr.v3.codecs.sharding import ShardingCodec assert any( @@ -86,22 +89,6 @@ def _validate_codecs(codecs: List[Codec], array_metadata: CoreArrayMetadata) -> f"ArrayArrayCodec '{type(codec)}' cannot follow after " + f"BytesBytesCodec '{type(prev_codec)}'." ) - - if isinstance(codec, ShardingCodec): - assert len(codec.configuration.chunk_shape) == len(array_metadata.shape), ( - "The shard's `chunk_shape` and array's `shape` need to have the " - + "same number of dimensions." - ) - assert all( - s % c == 0 - for s, c in zip( - array_metadata.chunk_shape, - codec.configuration.chunk_shape, - ) - ), ( - "The array's `chunk_shape` needs to be divisible by the " - + "shard's inner `chunk_shape`." - ) prev_codec = codec if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: @@ -110,48 +97,150 @@ def _validate_codecs(codecs: List[Codec], array_metadata: CoreArrayMetadata) -> + "writes, which may lead to inefficient performance." ) - def _array_array_codecs(self) -> List[ArrayArrayCodec]: - return [codec for codec in self.codecs if isinstance(codec, ArrayArrayCodec)] - - def _array_bytes_codec(self) -> ArrayBytesCodec: - return next(codec for codec in self.codecs if isinstance(codec, ArrayBytesCodec)) - - def _bytes_bytes_codecs(self) -> List[BytesBytesCodec]: - return [codec for codec in self.codecs if isinstance(codec, BytesBytesCodec)] + return CodecPipeline( + array_array_codecs=[codec for codec in codecs if isinstance(codec, ArrayArrayCodec)], + array_bytes_codec=[codec for codec in codecs if isinstance(codec, ArrayBytesCodec)][0], + bytes_bytes_codecs=[codec for codec in codecs if isinstance(codec, BytesBytesCodec)], + ) - async def decode(self, chunk_bytes: BytesLike) -> np.ndarray: - for bb_codec in self._bytes_bytes_codecs()[::-1]: - chunk_bytes = await bb_codec.decode(chunk_bytes) + @property + def supports_partial_decode(self) -> bool: + return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance( + self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin + ) - chunk_array = await self._array_bytes_codec().decode(chunk_bytes) + @property + def supports_partial_encode(self) -> bool: + return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance( + self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin + ) - for aa_codec in self._array_array_codecs()[::-1]: - chunk_array = await aa_codec.decode(chunk_array) + def __iter__(self) -> Iterator[Codec]: + for aa_codec in self.array_array_codecs: + yield aa_codec + + yield self.array_bytes_codec + + for bb_codec in self.bytes_bytes_codecs: + yield bb_codec + + def validate(self, array_metadata: ArrayMetadata) -> None: + for codec in self: + codec.validate(array_metadata) + + def _codecs_with_resolved_metadata( + self, array_spec: ArraySpec + ) -> Tuple[ + List[Tuple[ArrayArrayCodec, ArraySpec]], + Tuple[ArrayBytesCodec, ArraySpec], + List[Tuple[BytesBytesCodec, ArraySpec]], + ]: + aa_codecs_with_spec: List[Tuple[ArrayArrayCodec, ArraySpec]] = [] + for aa_codec in self.array_array_codecs: + aa_codecs_with_spec.append((aa_codec, array_spec)) + array_spec = aa_codec.resolve_metadata(array_spec) + + ab_codec_with_spec = (self.array_bytes_codec, array_spec) + array_spec = self.array_bytes_codec.resolve_metadata(array_spec) + + bb_codecs_with_spec: List[Tuple[BytesBytesCodec, ArraySpec]] = [] + for bb_codec in self.bytes_bytes_codecs: + bb_codecs_with_spec.append((bb_codec, array_spec)) + array_spec = bb_codec.resolve_metadata(array_spec) + + return (aa_codecs_with_spec, ab_codec_with_spec, bb_codecs_with_spec) + + async def decode( + self, + chunk_bytes: BytesLike, + array_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> np.ndarray: + ( + aa_codecs_with_spec, + ab_codec_with_spec, + bb_codecs_with_spec, + ) = self._codecs_with_resolved_metadata(array_spec) + + for bb_codec, array_spec in bb_codecs_with_spec[::-1]: + chunk_bytes = await bb_codec.decode(chunk_bytes, array_spec, runtime_configuration) + + ab_codec, array_spec = ab_codec_with_spec + chunk_array = await ab_codec.decode(chunk_bytes, array_spec, runtime_configuration) + + for aa_codec, array_spec in aa_codecs_with_spec[::-1]: + chunk_array = await aa_codec.decode(chunk_array, array_spec, runtime_configuration) return chunk_array - async def encode(self, chunk_array: np.ndarray) -> Optional[BytesLike]: - for aa_codec in self._array_array_codecs(): - chunk_array_maybe = await aa_codec.encode(chunk_array) + async def decode_partial( + self, + store_path: StorePath, + selection: SliceSelection, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> Optional[np.ndarray]: + assert self.supports_partial_decode + assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin) + return await self.array_bytes_codec.decode_partial( + store_path, selection, chunk_spec, runtime_configuration + ) + + async def encode( + self, + chunk_array: np.ndarray, + array_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> Optional[BytesLike]: + ( + aa_codecs_with_spec, + ab_codec_with_spec, + bb_codecs_with_spec, + ) = self._codecs_with_resolved_metadata(array_spec) + + for aa_codec, array_spec in aa_codecs_with_spec: + chunk_array_maybe = await aa_codec.encode( + chunk_array, array_spec, runtime_configuration + ) if chunk_array_maybe is None: return None chunk_array = chunk_array_maybe - chunk_bytes_maybe = await self._array_bytes_codec().encode(chunk_array) + ab_codec, array_spec = ab_codec_with_spec + chunk_bytes_maybe = await ab_codec.encode(chunk_array, array_spec, runtime_configuration) if chunk_bytes_maybe is None: return None chunk_bytes = chunk_bytes_maybe - for bb_codec in self._bytes_bytes_codecs(): - chunk_bytes_maybe = await bb_codec.encode(chunk_bytes) + for bb_codec, array_spec in bb_codecs_with_spec: + chunk_bytes_maybe = await bb_codec.encode( + chunk_bytes, array_spec, runtime_configuration + ) if chunk_bytes_maybe is None: return None chunk_bytes = chunk_bytes_maybe return chunk_bytes - def compute_encoded_size(self, byte_length: int) -> int: - return reduce(lambda acc, codec: codec.compute_encoded_size(acc), self.codecs, byte_length) + async def encode_partial( + self, + store_path: StorePath, + chunk_array: np.ndarray, + selection: SliceSelection, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> None: + assert self.supports_partial_encode + assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin) + await self.array_bytes_codec.encode_partial( + store_path, chunk_array, selection, chunk_spec, runtime_configuration + ) + + def compute_encoded_size(self, byte_length: int, array_spec: ArraySpec) -> int: + for codec in self: + byte_length = codec.compute_encoded_size(byte_length, array_spec) + array_spec = codec.resolve_metadata(array_spec) + return byte_length def blosc_codec( @@ -217,14 +306,16 @@ def crc32c_codec() -> "Crc32cCodecMetadata": def sharding_codec( chunk_shape: Tuple[int, ...], - codecs: Optional[List[CodecMetadata]] = None, - index_codecs: Optional[List[CodecMetadata]] = None, + codecs: Optional[Iterable[CodecMetadata]] = None, + index_codecs: Optional[Iterable[CodecMetadata]] = None, index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end, ) -> "ShardingCodecMetadata": from zarr.v3.codecs.sharding import ShardingCodecMetadata, ShardingCodecConfigurationMetadata - codecs = codecs or [bytes_codec()] - index_codecs = index_codecs or [bytes_codec(), crc32c_codec()] + codecs = tuple(codecs) if codecs is not None else (bytes_codec(),) + index_codecs = ( + tuple(index_codecs) if index_codecs is not None else (bytes_codec(), crc32c_codec()) + ) return ShardingCodecMetadata( configuration=ShardingCodecConfigurationMetadata( chunk_shape, codecs, index_codecs, index_location diff --git a/src/zarr/v3/codecs/blosc.py b/src/zarr/v3/codecs/blosc.py index 8fb32faaa7..efc862e636 100644 --- a/src/zarr/v3/codecs/blosc.py +++ b/src/zarr/v3/codecs/blosc.py @@ -1,4 +1,5 @@ from __future__ import annotations +from functools import lru_cache from typing import ( TYPE_CHECKING, @@ -10,16 +11,15 @@ import numcodecs import numpy as np -from attr import asdict, evolve, frozen, field +from attr import evolve, frozen, field from numcodecs.blosc import Blosc from zarr.v3.abc.codec import BytesBytesCodec from zarr.v3.codecs.registry import register_codec from zarr.v3.common import BytesLike, to_thread -from zarr.v3.metadata import CodecMetadata if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata + from zarr.v3.metadata import ArraySpec, CodecMetadata, DataType, RuntimeConfiguration BloscShuffle = Literal["noshuffle", "shuffle", "bitshuffle"] @@ -52,47 +52,55 @@ class BloscCodecMetadata: @frozen class BloscCodec(BytesBytesCodec): - array_metadata: CoreArrayMetadata configuration: BloscCodecConfigurationMetadata - blosc_codec: Blosc is_fixed_size = False @classmethod - def from_metadata( - cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata - ) -> BloscCodec: + def from_metadata(cls, codec_metadata: CodecMetadata) -> BloscCodec: assert isinstance(codec_metadata, BloscCodecMetadata) - configuration = codec_metadata.configuration - if configuration.typesize == 0: - configuration = evolve(configuration, typesize=array_metadata.data_type.byte_count) - config_dict = asdict(codec_metadata.configuration) - config_dict.pop("typesize", None) - map_shuffle_str_to_int = {"noshuffle": 0, "shuffle": 1, "bitshuffle": 2} - config_dict["shuffle"] = map_shuffle_str_to_int[config_dict["shuffle"]] - return cls( - array_metadata=array_metadata, - configuration=configuration, - blosc_codec=Blosc.from_config(config_dict), - ) + return cls(configuration=codec_metadata.configuration) @classmethod def get_metadata_class(cls) -> Type[BloscCodecMetadata]: return BloscCodecMetadata + def evolve(self, *, data_type: DataType, **_kwargs) -> BloscCodec: + new_codec = self + if new_codec.configuration.typesize == 0: + new_configuration = evolve(new_codec.configuration, typesize=data_type.byte_count) + new_codec = evolve(new_codec, configuration=new_configuration) + + return new_codec + + @lru_cache + def get_blosc_codec(self) -> Blosc: + map_shuffle_str_to_int = {"noshuffle": 0, "shuffle": 1, "bitshuffle": 2} + config_dict = { + "cname": self.configuration.cname, + "clevel": self.configuration.clevel, + "shuffle": map_shuffle_str_to_int[self.configuration.shuffle], + "blocksize": self.configuration.blocksize, + } + return Blosc.from_config(config_dict) + async def decode( self, chunk_bytes: bytes, + _chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: - return await to_thread(self.blosc_codec.decode, chunk_bytes) + return await to_thread(self.get_blosc_codec().decode, chunk_bytes) async def encode( self, chunk_bytes: bytes, + chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: - chunk_array = np.frombuffer(chunk_bytes, dtype=self.array_metadata.dtype) - return await to_thread(self.blosc_codec.encode, chunk_array) + chunk_array = np.frombuffer(chunk_bytes, dtype=chunk_spec.dtype) + return await to_thread(self.get_blosc_codec().encode, chunk_array) - def compute_encoded_size(self, _input_byte_length: int) -> int: + def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: raise NotImplementedError diff --git a/src/zarr/v3/codecs/bytes.py b/src/zarr/v3/codecs/bytes.py index 80a3f155d0..e05ccb6abc 100644 --- a/src/zarr/v3/codecs/bytes.py +++ b/src/zarr/v3/codecs/bytes.py @@ -13,15 +13,17 @@ from zarr.v3.abc.codec import ArrayBytesCodec from zarr.v3.codecs.registry import register_codec from zarr.v3.common import BytesLike -from zarr.v3.metadata import CodecMetadata if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata + from zarr.v3.metadata import CodecMetadata, ArraySpec, ArrayMetadata, RuntimeConfiguration + + +Endian = Literal["big", "little"] @frozen class BytesCodecConfigurationMetadata: - endian: Optional[Literal["big", "little"]] = "little" + endian: Optional[Endian] = "little" @frozen @@ -32,28 +34,24 @@ class BytesCodecMetadata: @frozen class BytesCodec(ArrayBytesCodec): - array_metadata: CoreArrayMetadata configuration: BytesCodecConfigurationMetadata is_fixed_size = True @classmethod - def from_metadata( - cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata - ) -> BytesCodec: + def from_metadata(cls, codec_metadata: CodecMetadata) -> BytesCodec: assert isinstance(codec_metadata, BytesCodecMetadata) - assert ( - array_metadata.dtype.itemsize == 1 or codec_metadata.configuration.endian is not None - ), "The `endian` configuration needs to be specified for multi-byte data types." - return cls( - array_metadata=array_metadata, - configuration=codec_metadata.configuration, - ) + return cls(configuration=codec_metadata.configuration) @classmethod def get_metadata_class(cls) -> Type[BytesCodecMetadata]: return BytesCodecMetadata - def _get_byteorder(self, array: np.ndarray) -> Literal["big", "little"]: + def validate(self, array_metadata: ArrayMetadata) -> None: + assert ( + not array_metadata.data_type.has_endianness or self.configuration.endian is not None + ), "The `endian` configuration needs to be specified for multi-byte data types." + + def _get_byteorder(self, array: np.ndarray) -> Endian: if array.dtype.byteorder == "<": return "little" elif array.dtype.byteorder == ">": @@ -66,27 +64,31 @@ def _get_byteorder(self, array: np.ndarray) -> Literal["big", "little"]: async def decode( self, chunk_bytes: BytesLike, + chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: - if self.array_metadata.dtype.itemsize > 0: + if chunk_spec.dtype.itemsize > 0: if self.configuration.endian == "little": prefix = "<" else: prefix = ">" - dtype = np.dtype(f"{prefix}{self.array_metadata.data_type.to_numpy_shortname()}") + dtype = np.dtype(f"{prefix}{chunk_spec.data_type.to_numpy_shortname()}") else: - dtype = np.dtype(f"|{self.array_metadata.data_type.to_numpy_shortname()}") + dtype = np.dtype(f"|{chunk_spec.data_type.to_numpy_shortname()}") chunk_array = np.frombuffer(chunk_bytes, dtype) # ensure correct chunk shape - if chunk_array.shape != self.array_metadata.chunk_shape: + if chunk_array.shape != chunk_spec.shape: chunk_array = chunk_array.reshape( - self.array_metadata.chunk_shape, + chunk_spec.shape, ) return chunk_array async def encode( self, chunk_array: np.ndarray, + _chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: if chunk_array.dtype.itemsize > 1: byteorder = self._get_byteorder(chunk_array) @@ -95,7 +97,7 @@ async def encode( chunk_array = chunk_array.astype(new_dtype) return chunk_array.tobytes() - def compute_encoded_size(self, input_byte_length: int) -> int: + def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int: return input_byte_length diff --git a/src/zarr/v3/codecs/crc32c_.py b/src/zarr/v3/codecs/crc32c_.py index c4fab3c9b9..4f8b9c7b0b 100644 --- a/src/zarr/v3/codecs/crc32c_.py +++ b/src/zarr/v3/codecs/crc32c_.py @@ -14,10 +14,9 @@ from zarr.v3.abc.codec import BytesBytesCodec from zarr.v3.codecs.registry import register_codec from zarr.v3.common import BytesLike -from zarr.v3.metadata import CodecMetadata if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata + from zarr.v3.metadata import ArraySpec, CodecMetadata, RuntimeConfiguration @frozen @@ -27,15 +26,12 @@ class Crc32cCodecMetadata: @frozen class Crc32cCodec(BytesBytesCodec): - array_metadata: CoreArrayMetadata is_fixed_size = True @classmethod - def from_metadata( - cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata - ) -> Crc32cCodec: + def from_metadata(cls, codec_metadata: CodecMetadata) -> Crc32cCodec: assert isinstance(codec_metadata, Crc32cCodecMetadata) - return cls(array_metadata=array_metadata) + return cls() @classmethod def get_metadata_class(cls) -> Type[Crc32cCodecMetadata]: @@ -44,6 +40,8 @@ def get_metadata_class(cls) -> Type[Crc32cCodecMetadata]: async def decode( self, chunk_bytes: bytes, + _chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: crc32_bytes = chunk_bytes[-4:] inner_bytes = chunk_bytes[:-4] @@ -54,10 +52,12 @@ async def decode( async def encode( self, chunk_bytes: bytes, + _chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: return chunk_bytes + np.uint32(crc32c(chunk_bytes)).tobytes() - def compute_encoded_size(self, input_byte_length: int) -> int: + def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int: return input_byte_length + 4 diff --git a/src/zarr/v3/codecs/gzip.py b/src/zarr/v3/codecs/gzip.py index be1ebcdc9f..a3fafc1382 100644 --- a/src/zarr/v3/codecs/gzip.py +++ b/src/zarr/v3/codecs/gzip.py @@ -13,10 +13,9 @@ from zarr.v3.abc.codec import BytesBytesCodec from zarr.v3.codecs.registry import register_codec from zarr.v3.common import BytesLike, to_thread -from zarr.v3.metadata import CodecMetadata if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata + from zarr.v3.metadata import ArraySpec, CodecMetadata, RuntimeConfiguration @frozen @@ -32,20 +31,14 @@ class GzipCodecMetadata: @frozen class GzipCodec(BytesBytesCodec): - array_metadata: CoreArrayMetadata configuration: GzipCodecConfigurationMetadata is_fixed_size = True @classmethod - def from_metadata( - cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata - ) -> GzipCodec: + def from_metadata(cls, codec_metadata: CodecMetadata) -> GzipCodec: assert isinstance(codec_metadata, GzipCodecMetadata) - return cls( - array_metadata=array_metadata, - configuration=codec_metadata.configuration, - ) + return cls(configuration=codec_metadata.configuration) @classmethod def get_metadata_class(cls) -> Type[GzipCodecMetadata]: @@ -54,16 +47,24 @@ def get_metadata_class(cls) -> Type[GzipCodecMetadata]: async def decode( self, chunk_bytes: bytes, + _chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: return await to_thread(GZip(self.configuration.level).decode, chunk_bytes) async def encode( self, chunk_bytes: bytes, + _chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: return await to_thread(GZip(self.configuration.level).encode, chunk_bytes) - def compute_encoded_size(self, _input_byte_length: int) -> int: + def compute_encoded_size( + self, + _input_byte_length: int, + _chunk_spec: ArraySpec, + ) -> int: raise NotImplementedError diff --git a/src/zarr/v3/codecs/registry.py b/src/zarr/v3/codecs/registry.py index 642c0feebb..bdd9a5765d 100644 --- a/src/zarr/v3/codecs/registry.py +++ b/src/zarr/v3/codecs/registry.py @@ -45,6 +45,11 @@ def _get_codec_item(key: str) -> CodecRegistryItem: raise KeyError(key) +def get_codec_from_metadata(val: CodecMetadata) -> Codec: + key = val.name + return _get_codec_item(key).codec_cls.from_metadata(val) + + def get_codec_metadata_class(key: str) -> Type[CodecMetadata]: return _get_codec_item(key).codec_metadata_cls diff --git a/src/zarr/v3/codecs/sharding.py b/src/zarr/v3/codecs/sharding.py index 12c84ade29..26020f160f 100644 --- a/src/zarr/v3/codecs/sharding.py +++ b/src/zarr/v3/codecs/sharding.py @@ -1,4 +1,5 @@ from __future__ import annotations +from functools import cached_property, lru_cache from typing import ( Awaitable, @@ -23,7 +24,7 @@ ) from zarr.v3.codecs import CodecPipeline -from zarr.v3.codecs.registry import register_codec +from zarr.v3.codecs.registry import get_codec_from_metadata, register_codec from zarr.v3.common import ( BytesLike, ChunkCoords, @@ -38,10 +39,14 @@ morton_order_iter, ) from zarr.v3.metadata import ( - CoreArrayMetadata, + ArrayMetadata, + ArraySpec, DataType, CodecMetadata, + RegularChunkGridMetadata, ShardingCodecIndexLocation, + RuntimeConfiguration, + runtime_configuration as make_runtime_configuration, ) from zarr.v3.store import StorePath @@ -51,8 +56,8 @@ @frozen class ShardingCodecConfigurationMetadata: chunk_shape: ChunkCoords - codecs: List["CodecMetadata"] - index_codecs: List["CodecMetadata"] + codecs: Tuple[CodecMetadata, ...] + index_codecs: Tuple[CodecMetadata, ...] index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end @@ -66,6 +71,10 @@ class _ShardIndex(NamedTuple): # dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2) offsets_and_lengths: np.ndarray + @property + def chunks_per_shard(self) -> ChunkCoords: + return self.offsets_and_lengths.shape[0:-1] + def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords: return tuple( chunk_i % shard_i @@ -126,8 +135,10 @@ class _ShardProxy(Mapping): buf: BytesLike @classmethod - async def from_bytes(cls, buf: BytesLike, codec: ShardingCodec) -> _ShardProxy: - shard_index_size = codec._shard_index_size() + async def from_bytes( + cls, buf: BytesLike, codec: ShardingCodec, chunks_per_shard: ChunkCoords + ) -> _ShardProxy: + shard_index_size = codec._shard_index_size(chunks_per_shard) obj = cls() obj.buf = memoryview(buf) if codec.configuration.index_location == ShardingCodecIndexLocation.start: @@ -135,7 +146,7 @@ async def from_bytes(cls, buf: BytesLike, codec: ShardingCodec) -> _ShardProxy: else: shard_index_bytes = obj.buf[-shard_index_size:] - obj.index = await codec._decode_shard_index(shard_index_bytes) + obj.index = await codec._decode_shard_index(shard_index_bytes, chunks_per_shard) return obj @classmethod @@ -215,67 +226,49 @@ async def finalize( class ShardingCodec( ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin ): - array_metadata: CoreArrayMetadata configuration: ShardingCodecConfigurationMetadata - codec_pipeline: CodecPipeline - index_codec_pipeline: CodecPipeline - chunks_per_shard: Tuple[int, ...] @classmethod def from_metadata( cls, codec_metadata: CodecMetadata, - array_metadata: CoreArrayMetadata, ) -> ShardingCodec: assert isinstance(codec_metadata, ShardingCodecMetadata) - - chunks_per_shard = tuple( - s // c - for s, c in zip( - array_metadata.chunk_shape, - codec_metadata.configuration.chunk_shape, - ) - ) - # rewriting the metadata to scope it to the shard - shard_metadata = CoreArrayMetadata( - shape=array_metadata.chunk_shape, - chunk_shape=codec_metadata.configuration.chunk_shape, - data_type=array_metadata.data_type, - fill_value=array_metadata.fill_value, - runtime_configuration=array_metadata.runtime_configuration, - ) - codec_pipeline = CodecPipeline.from_metadata( - codec_metadata.configuration.codecs, shard_metadata - ) - index_codec_pipeline = CodecPipeline.from_metadata( - codec_metadata.configuration.index_codecs, - CoreArrayMetadata( - shape=chunks_per_shard + (2,), - chunk_shape=chunks_per_shard + (2,), - data_type=DataType.uint64, - fill_value=MAX_UINT_64, - runtime_configuration=array_metadata.runtime_configuration, - ), - ) - return cls( - array_metadata=array_metadata, - configuration=codec_metadata.configuration, - codec_pipeline=codec_pipeline, - index_codec_pipeline=index_codec_pipeline, - chunks_per_shard=chunks_per_shard, - ) + return cls(configuration=codec_metadata.configuration) @classmethod def get_metadata_class(cls) -> Type[ShardingCodecMetadata]: return ShardingCodecMetadata + def validate(self, array_metadata: ArrayMetadata) -> None: + assert len(self.configuration.chunk_shape) == array_metadata.ndim, ( + "The shard's `chunk_shape` and array's `shape` need to have the " + + "same number of dimensions." + ) + assert isinstance( + array_metadata.chunk_grid, RegularChunkGridMetadata + ), "Sharding is only compatible with regular chunk grids." + assert all( + s % c == 0 + for s, c in zip( + array_metadata.chunk_grid.configuration.chunk_shape, + self.configuration.chunk_shape, + ) + ), ( + "The array's `chunk_shape` needs to be divisible by the " + + "shard's inner `chunk_shape`." + ) + async def decode( self, shard_bytes: BytesLike, + shard_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: # print("decode") - shard_shape = self.array_metadata.chunk_shape + shard_shape = shard_spec.shape chunk_shape = self.configuration.chunk_shape + chunks_per_shard = self._get_chunks_per_shard(shard_spec) indexer = BasicIndexer( tuple(slice(0, s) for s in shard_shape), @@ -286,13 +279,13 @@ async def decode( # setup output array out = np.zeros( shard_shape, - dtype=self.array_metadata.dtype, - order=self.array_metadata.runtime_configuration.order, + dtype=shard_spec.dtype, + order=runtime_configuration.order, ) - shard_dict = await _ShardProxy.from_bytes(shard_bytes, self) + shard_dict = await _ShardProxy.from_bytes(shard_bytes, self, chunks_per_shard) if shard_dict.index.is_all_empty(): - out.fill(self.array_metadata.fill_value) + out.fill(shard_spec.fill_value) return out # decoding chunks and writing them into the output buffer @@ -303,12 +296,14 @@ async def decode( chunk_coords, chunk_selection, out_selection, + shard_spec, + runtime_configuration, out, ) for chunk_coords, chunk_selection, out_selection in indexer ], self._read_chunk, - self.array_metadata.runtime_configuration.concurrency, + runtime_configuration.concurrency, ) return out @@ -317,9 +312,12 @@ async def decode_partial( self, store_path: StorePath, selection: SliceSelection, + shard_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: - shard_shape = self.array_metadata.chunk_shape + shard_shape = shard_spec.shape chunk_shape = self.configuration.chunk_shape + chunks_per_shard = self._get_chunks_per_shard(shard_spec) indexer = BasicIndexer( selection, @@ -330,8 +328,8 @@ async def decode_partial( # setup output array out = np.zeros( indexer.shape, - dtype=self.array_metadata.dtype, - order=self.array_metadata.runtime_configuration.order, + dtype=shard_spec.dtype, + order=runtime_configuration.order, ) indexed_chunks = list(indexer) @@ -339,15 +337,15 @@ async def decode_partial( # reading bytes of all requested chunks shard_dict: Mapping[ChunkCoords, BytesLike] = {} - if self._is_total_shard(all_chunk_coords): + if self._is_total_shard(all_chunk_coords, chunks_per_shard): # read entire shard - shard_dict_maybe = await self._load_full_shard_maybe(store_path) + shard_dict_maybe = await self._load_full_shard_maybe(store_path, chunks_per_shard) if shard_dict_maybe is None: return None shard_dict = shard_dict_maybe else: # read some chunks within the shard - shard_index = await self._load_shard_index_maybe(store_path) + shard_index = await self._load_shard_index_maybe(store_path, chunks_per_shard) if shard_index is None: return None shard_dict = {} @@ -366,12 +364,14 @@ async def decode_partial( chunk_coords, chunk_selection, out_selection, + shard_spec, + runtime_configuration, out, ) for chunk_coords, chunk_selection, out_selection in indexed_chunks ], self._read_chunk, - self.array_metadata.runtime_configuration.concurrency, + runtime_configuration.concurrency, ) return out @@ -382,22 +382,30 @@ async def _read_chunk( chunk_coords: ChunkCoords, chunk_selection: SliceSelection, out_selection: SliceSelection, + shard_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, out: np.ndarray, ): + chunk_spec = self._get_chunk_spec(shard_spec) chunk_bytes = shard_dict.get(chunk_coords, None) if chunk_bytes is not None: - chunk_array = await self.codec_pipeline.decode(chunk_bytes) + chunk_array = await self._codec_pipeline.decode( + chunk_bytes, chunk_spec, runtime_configuration + ) tmp = chunk_array[chunk_selection] out[out_selection] = tmp else: - out[out_selection] = self.array_metadata.fill_value + out[out_selection] = chunk_spec.fill_value async def encode( self, shard_array: np.ndarray, + shard_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: - shard_shape = self.array_metadata.chunk_shape + shard_shape = shard_spec.shape chunk_shape = self.configuration.chunk_shape + chunks_per_shard = self._get_chunks_per_shard(shard_spec) indexer = list( BasicIndexer( @@ -419,14 +427,17 @@ async def _write_chunk( # handling writing partial chunks chunk_array = np.empty( chunk_shape, - dtype=self.array_metadata.dtype, + dtype=shard_spec.dtype, ) - chunk_array.fill(self.array_metadata.fill_value) + chunk_array.fill(shard_spec.fill_value) chunk_array[chunk_selection] = shard_array[out_selection] - if not np.array_equiv(chunk_array, self.array_metadata.fill_value): + if not np.array_equiv(chunk_array, shard_spec.fill_value): + chunk_spec = self._get_chunk_spec(shard_spec) return ( chunk_coords, - await self.codec_pipeline.encode(chunk_array), + await self._codec_pipeline.encode( + chunk_array, chunk_spec, runtime_configuration + ), ) return (chunk_coords, None) @@ -437,12 +448,12 @@ async def _write_chunk( for chunk_coords, chunk_selection, out_selection in indexer ], _write_chunk, - self.array_metadata.runtime_configuration.concurrency, + runtime_configuration.concurrency, ) if len(encoded_chunks) == 0: return None - shard_builder = _ShardBuilder.create_empty(self.chunks_per_shard) + shard_builder = _ShardBuilder.create_empty(chunks_per_shard) for chunk_coords, chunk_bytes in encoded_chunks: if chunk_bytes is not None: shard_builder.append(chunk_coords, chunk_bytes) @@ -456,15 +467,19 @@ async def encode_partial( store_path: StorePath, shard_array: np.ndarray, selection: SliceSelection, + shard_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, ) -> None: # print("encode_partial") - shard_shape = self.array_metadata.chunk_shape + shard_shape = shard_spec.shape chunk_shape = self.configuration.chunk_shape + chunks_per_shard = self._get_chunks_per_shard(shard_spec) + chunk_spec = self._get_chunk_spec(shard_spec) old_shard_dict = ( - await self._load_full_shard_maybe(store_path) - ) or _ShardProxy.create_empty(self.chunks_per_shard) - new_shard_builder = _ShardBuilder.create_empty(self.chunks_per_shard) + await self._load_full_shard_maybe(store_path, chunks_per_shard) + ) or _ShardProxy.create_empty(chunks_per_shard) + new_shard_builder = _ShardBuilder.create_empty(chunks_per_shard) tombstones: Set[ChunkCoords] = set() indexer = list( @@ -492,19 +507,23 @@ async def _write_chunk( if chunk_bytes is None: chunk_array = np.empty( self.configuration.chunk_shape, - dtype=self.array_metadata.dtype, + dtype=shard_spec.dtype, ) - chunk_array.fill(self.array_metadata.fill_value) + chunk_array.fill(shard_spec.fill_value) else: chunk_array = ( - await self.codec_pipeline.decode(chunk_bytes) + await self._codec_pipeline.decode( + chunk_bytes, chunk_spec, runtime_configuration + ) ).copy() # make a writable copy chunk_array[chunk_selection] = shard_array[out_selection] - if not np.array_equiv(chunk_array, self.array_metadata.fill_value): + if not np.array_equiv(chunk_array, shard_spec.fill_value): return ( chunk_coords, - await self.codec_pipeline.encode(chunk_array), + await self._codec_pipeline.encode( + chunk_array, chunk_spec, runtime_configuration + ), ) else: return (chunk_coords, None) @@ -519,7 +538,7 @@ async def _write_chunk( for chunk_coords, chunk_selection, out_selection in indexer ], _write_chunk, - self.array_metadata.runtime_configuration.concurrency, + runtime_configuration.concurrency, ) for chunk_coords, chunk_bytes in encoded_chunks: @@ -529,7 +548,10 @@ async def _write_chunk( tombstones.add(chunk_coords) shard_builder = _ShardBuilder.merge_with_morton_order( - self.chunks_per_shard, tombstones, new_shard_builder, old_shard_dict + chunks_per_shard, + tombstones, + new_shard_builder, + old_shard_dict, ) if shard_builder.index.is_all_empty(): @@ -542,44 +564,107 @@ async def _write_chunk( ) ) - def _is_total_shard(self, all_chunk_coords: Set[ChunkCoords]) -> bool: - return len(all_chunk_coords) == product(self.chunks_per_shard) and all( - chunk_coords in all_chunk_coords for chunk_coords in c_order_iter(self.chunks_per_shard) + def _is_total_shard( + self, all_chunk_coords: Set[ChunkCoords], chunks_per_shard: ChunkCoords + ) -> bool: + return len(all_chunk_coords) == product(chunks_per_shard) and all( + chunk_coords in all_chunk_coords for chunk_coords in c_order_iter(chunks_per_shard) ) - async def _decode_shard_index(self, index_bytes: BytesLike) -> _ShardIndex: - return _ShardIndex(await self.index_codec_pipeline.decode(index_bytes)) + async def _decode_shard_index( + self, index_bytes: BytesLike, chunks_per_shard: ChunkCoords + ) -> _ShardIndex: + return _ShardIndex( + await self._index_codec_pipeline.decode( + index_bytes, + self._get_index_chunk_spec(chunks_per_shard), + make_runtime_configuration("C"), + ) + ) async def _encode_shard_index(self, index: _ShardIndex) -> BytesLike: - index_bytes = await self.index_codec_pipeline.encode(index.offsets_and_lengths) + index_bytes = await self._index_codec_pipeline.encode( + index.offsets_and_lengths, + self._get_index_chunk_spec(index.chunks_per_shard), + make_runtime_configuration("C"), + ) assert index_bytes is not None return index_bytes - def _shard_index_size(self) -> int: - return self.index_codec_pipeline.compute_encoded_size(16 * product(self.chunks_per_shard)) + def _shard_index_size(self, chunks_per_shard: ChunkCoords) -> int: + return self._index_codec_pipeline.compute_encoded_size( + 16 * product(chunks_per_shard), self._get_index_chunk_spec(chunks_per_shard) + ) - async def _load_shard_index_maybe(self, store_path: StorePath) -> Optional[_ShardIndex]: - shard_index_size = self._shard_index_size() + @lru_cache + def _get_index_chunk_spec(self, chunks_per_shard: ChunkCoords) -> ArraySpec: + return ArraySpec( + shape=chunks_per_shard + (2,), + data_type=DataType.uint64, + fill_value=MAX_UINT_64, + ) + + @lru_cache + def _get_chunk_spec(self, shard_spec: ArraySpec) -> ArraySpec: + return ArraySpec( + shape=self.configuration.chunk_shape, + data_type=shard_spec.data_type, + fill_value=shard_spec.fill_value, + ) + + @lru_cache + def _get_chunks_per_shard(self, shard_spec: ArraySpec) -> ChunkCoords: + return tuple( + s // c + for s, c in zip( + shard_spec.shape, + self.configuration.chunk_shape, + ) + ) + + @cached_property + def _index_codec_pipeline(self) -> CodecPipeline: + return CodecPipeline.create( + [get_codec_from_metadata(c) for c in self.configuration.index_codecs] + ) + + @cached_property + def _codec_pipeline(self) -> CodecPipeline: + return CodecPipeline.create([get_codec_from_metadata(c) for c in self.configuration.codecs]) + + async def _load_shard_index_maybe( + self, store_path: StorePath, chunks_per_shard: ChunkCoords + ) -> Optional[_ShardIndex]: + shard_index_size = self._shard_index_size(chunks_per_shard) if self.configuration.index_location == ShardingCodecIndexLocation.start: index_bytes = await store_path.get((0, shard_index_size)) else: index_bytes = await store_path.get((-shard_index_size, None)) if index_bytes is not None: - return await self._decode_shard_index(index_bytes) + return await self._decode_shard_index(index_bytes, chunks_per_shard) return None - async def _load_shard_index(self, store_path: StorePath) -> _ShardIndex: - return (await self._load_shard_index_maybe(store_path)) or _ShardIndex.create_empty( - self.chunks_per_shard - ) + async def _load_shard_index( + self, store_path: StorePath, chunks_per_shard: ChunkCoords + ) -> _ShardIndex: + return ( + await self._load_shard_index_maybe(store_path, chunks_per_shard) + ) or _ShardIndex.create_empty(chunks_per_shard) - async def _load_full_shard_maybe(self, store_path: StorePath) -> Optional[_ShardProxy]: + async def _load_full_shard_maybe( + self, store_path: StorePath, chunks_per_shard: ChunkCoords + ) -> Optional[_ShardProxy]: shard_bytes = await store_path.get() - return await _ShardProxy.from_bytes(shard_bytes, self) if shard_bytes else None + return ( + await _ShardProxy.from_bytes(shard_bytes, self, chunks_per_shard) + if shard_bytes + else None + ) - def compute_encoded_size(self, input_byte_length: int) -> int: - return input_byte_length + self._shard_index_size() + def compute_encoded_size(self, input_byte_length: int, shard_spec: ArraySpec) -> int: + chunks_per_shard = self._get_chunks_per_shard(shard_spec) + return input_byte_length + self._shard_index_size(chunks_per_shard) register_codec("sharding_indexed", ShardingCodec) diff --git a/src/zarr/v3/codecs/transpose.py b/src/zarr/v3/codecs/transpose.py index d160f2a88d..de6eb0a480 100644 --- a/src/zarr/v3/codecs/transpose.py +++ b/src/zarr/v3/codecs/transpose.py @@ -9,14 +9,13 @@ ) import numpy as np -from attr import frozen, field +from attr import evolve, frozen, field from zarr.v3.abc.codec import ArrayArrayCodec from zarr.v3.codecs.registry import register_codec -from zarr.v3.metadata import CodecMetadata if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata + from zarr.v3.metadata import ArraySpec, CodecMetadata, RuntimeConfiguration @frozen @@ -32,69 +31,60 @@ class TransposeCodecMetadata: @frozen class TransposeCodec(ArrayArrayCodec): - array_metadata: CoreArrayMetadata order: Tuple[int, ...] is_fixed_size = True @classmethod - def from_metadata( - cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata - ) -> TransposeCodec: + def from_metadata(cls, codec_metadata: CodecMetadata) -> TransposeCodec: assert isinstance(codec_metadata, TransposeCodecMetadata) + return cls(order=codec_metadata.configuration.order) - configuration = codec_metadata.configuration + def evolve(self, *, ndim: int, **_kwargs) -> TransposeCodec: # Compatibility with older version of ZEP1 - if configuration.order == "F": # type: ignore - order = tuple(array_metadata.ndim - x - 1 for x in range(array_metadata.ndim)) + if self.order == "F": # type: ignore + order = tuple(ndim - x - 1 for x in range(ndim)) - elif configuration.order == "C": # type: ignore - order = tuple(range(array_metadata.ndim)) + elif self.order == "C": # type: ignore + order = tuple(range(ndim)) else: - assert len(configuration.order) == array_metadata.ndim, ( + assert len(self.order) == ndim, ( "The `order` tuple needs have as many entries as " - + f"there are dimensions in the array. Got: {configuration.order}" + + f"there are dimensions in the array. Got: {self.order}" ) - assert len(configuration.order) == len(set(configuration.order)), ( - "There must not be duplicates in the `order` tuple. " - + f"Got: {configuration.order}" + assert len(self.order) == len(set(self.order)), ( + "There must not be duplicates in the `order` tuple. " + f"Got: {self.order}" ) - assert all(0 <= x < array_metadata.ndim for x in configuration.order), ( + assert all(0 <= x < ndim for x in self.order), ( "All entries in the `order` tuple must be between 0 and " - + f"the number of dimensions in the array. Got: {configuration.order}" + + f"the number of dimensions in the array. Got: {self.order}" ) - order = tuple(configuration.order) + order = tuple(self.order) - return cls( - array_metadata=array_metadata, - order=order, - ) + if order != self.order: + return evolve(self, order=order) + return self @classmethod def get_metadata_class(cls) -> Type[TransposeCodecMetadata]: return TransposeCodecMetadata - def resolve_metadata(self) -> CoreArrayMetadata: - from zarr.v3.metadata import CoreArrayMetadata - - return CoreArrayMetadata( - shape=tuple( - self.array_metadata.shape[self.order[i]] for i in range(self.array_metadata.ndim) - ), - chunk_shape=tuple( - self.array_metadata.chunk_shape[self.order[i]] - for i in range(self.array_metadata.ndim) - ), - data_type=self.array_metadata.data_type, - fill_value=self.array_metadata.fill_value, - runtime_configuration=self.array_metadata.runtime_configuration, + def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: + from zarr.v3.metadata import ArraySpec + + return ArraySpec( + shape=tuple(chunk_spec.shape[self.order[i]] for i in range(chunk_spec.ndim)), + data_type=chunk_spec.data_type, + fill_value=chunk_spec.fill_value, ) async def decode( self, chunk_array: np.ndarray, + chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: - inverse_order = [0 for _ in range(self.array_metadata.ndim)] + inverse_order = [0] * chunk_spec.ndim for x, i in enumerate(self.order): inverse_order[x] = i chunk_array = chunk_array.transpose(inverse_order) @@ -103,11 +93,13 @@ async def decode( async def encode( self, chunk_array: np.ndarray, + chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: chunk_array = chunk_array.transpose(self.order) return chunk_array - def compute_encoded_size(self, input_byte_length: int) -> int: + def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int: return input_byte_length diff --git a/src/zarr/v3/codecs/zstd.py b/src/zarr/v3/codecs/zstd.py index e66d9e0700..59ce1cf088 100644 --- a/src/zarr/v3/codecs/zstd.py +++ b/src/zarr/v3/codecs/zstd.py @@ -13,10 +13,9 @@ from zarr.v3.abc.codec import BytesBytesCodec from zarr.v3.codecs.registry import register_codec from zarr.v3.common import BytesLike, to_thread -from zarr.v3.metadata import CodecMetadata if TYPE_CHECKING: - from zarr.v3.metadata import CoreArrayMetadata + from zarr.v3.metadata import ArraySpec, CodecMetadata, RuntimeConfiguration @frozen @@ -33,19 +32,13 @@ class ZstdCodecMetadata: @frozen class ZstdCodec(BytesBytesCodec): - array_metadata: CoreArrayMetadata configuration: ZstdCodecConfigurationMetadata is_fixed_size = True @classmethod - def from_metadata( - cls, codec_metadata: CodecMetadata, array_metadata: CoreArrayMetadata - ) -> ZstdCodec: + def from_metadata(cls, codec_metadata: CodecMetadata) -> ZstdCodec: assert isinstance(codec_metadata, ZstdCodecMetadata) - return cls( - array_metadata=array_metadata, - configuration=codec_metadata.configuration, - ) + return cls(configuration=codec_metadata.configuration) @classmethod def get_metadata_class(cls) -> Type[ZstdCodecMetadata]: @@ -64,16 +57,20 @@ def _decompress(self, data: bytes) -> bytes: async def decode( self, chunk_bytes: bytes, + _chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: return await to_thread(self._decompress, chunk_bytes) async def encode( self, chunk_bytes: bytes, + _chunk_spec: ArraySpec, + _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: return await to_thread(self._compress, chunk_bytes) - def compute_encoded_size(self, _input_byte_length: int) -> int: + def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: raise NotImplementedError diff --git a/src/zarr/v3/metadata.py b/src/zarr/v3/metadata.py index 53b300d3f8..c6dd9f1f46 100644 --- a/src/zarr/v3/metadata.py +++ b/src/zarr/v3/metadata.py @@ -24,6 +24,10 @@ def runtime_configuration( return RuntimeConfiguration(order=order, concurrency=concurrency) +# For type checking +_bool = bool + + class DataType(Enum): bool = "bool" int8 = "int8" @@ -54,6 +58,11 @@ def byte_count(self) -> int: } return data_type_byte_counts[self] + @property + def has_endianness(self) -> _bool: + # This might change in the future, e.g. for a complex with 2 8-bit floats + return self.byte_count != 1 + def to_numpy_shortname(self) -> str: data_type_to_numpy = { DataType.bool: "bool", @@ -154,12 +163,10 @@ class ShardingCodecIndexLocation(Enum): @frozen -class CoreArrayMetadata: +class ArraySpec: shape: ChunkCoords - chunk_shape: ChunkCoords data_type: DataType fill_value: Any - runtime_configuration: RuntimeConfiguration @property def dtype(self) -> np.dtype: @@ -191,13 +198,14 @@ def dtype(self) -> np.dtype: def ndim(self) -> int: return len(self.shape) - def get_core_metadata(self, runtime_configuration: RuntimeConfiguration) -> CoreArrayMetadata: - return CoreArrayMetadata( - shape=self.shape, - chunk_shape=self.chunk_grid.configuration.chunk_shape, + def get_chunk_spec(self, _chunk_coords: ChunkCoords) -> ArraySpec: + assert isinstance( + self.chunk_grid, RegularChunkGridMetadata + ), "Currently, only regular chunk grid is supported" + return ArraySpec( + shape=self.chunk_grid.configuration.chunk_shape, data_type=self.data_type, fill_value=self.fill_value, - runtime_configuration=runtime_configuration, ) def to_bytes(self) -> bytes: From 6f61e92d28bf16f21c34e101f9bcf94fb344b32d Mon Sep 17 00:00:00 2001 From: Max Jones <14077947+maxrjones@users.noreply.github.com> Date: Wed, 7 Feb 2024 10:21:36 -0500 Subject: [PATCH 0425/1078] Add env variables to sprint setup instructions (#1654) --- README-v3.md | 8 ++++++++ pyproject.toml | 1 + 2 files changed, 9 insertions(+) diff --git a/README-v3.md b/README-v3.md index 8348038e5a..dd95c3c7a3 100644 --- a/README-v3.md +++ b/README-v3.md @@ -14,6 +14,14 @@ git remote add upstream https://github.com/zarr-developers/zarr-python git fetch upstream git checkout --track upstream/v3 ``` +## Set the environment variables for V3 + +There are a couple environment variables required for enabling V3: + +``` +export ZARR_V3_EXPERIMENTAL_API=1 +export ZARR_V3_SHARDING=1 +``` ## Set up your environment diff --git a/pyproject.toml b/pyproject.toml index df4ed7dea1..5a788eb1a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,6 +93,7 @@ extra-dependencies = [ [tool.hatch.envs.test.scripts] run-coverage = "pytest --cov-config=pyproject.toml --cov=pkg --cov=tests" run = "run-coverage --no-cov" +run-verbose = "run-coverage --verbose" [tool.ruff] line-length = 100 From 4ab6b3f1817b58c66ca28e80f6ba79b12d713036 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 7 Feb 2024 08:09:08 -0800 Subject: [PATCH 0426/1078] fix sync group constructors (#1652) --- src/zarr/v3/codecs/bytes.py | 2 +- src/zarr/v3/group.py | 17 ++++++++++------- tests/test_group_v3.py | 11 +++++++++++ 3 files changed, 22 insertions(+), 8 deletions(-) diff --git a/src/zarr/v3/codecs/bytes.py b/src/zarr/v3/codecs/bytes.py index e05ccb6abc..de7c750bc9 100644 --- a/src/zarr/v3/codecs/bytes.py +++ b/src/zarr/v3/codecs/bytes.py @@ -29,7 +29,7 @@ class BytesCodecConfigurationMetadata: @frozen class BytesCodecMetadata: configuration: BytesCodecConfigurationMetadata - name: Literal["bytes"] = field(default="bytes", init=False) + name: Literal["bytes"] = field(default="bytes", init=True) @frozen diff --git a/src/zarr/v3/group.py b/src/zarr/v3/group.py index 9f53a49819..a5d0e68165 100644 --- a/src/zarr/v3/group.py +++ b/src/zarr/v3/group.py @@ -12,7 +12,7 @@ from zarr.v3.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON, make_cattr from zarr.v3.config import RuntimeConfiguration, SyncConfiguration from zarr.v3.store import StoreLike, StorePath, make_store_path -from zarr.v3.sync import SyncMixin +from zarr.v3.sync import SyncMixin, sync logger = logging.getLogger("zarr.group") @@ -20,8 +20,8 @@ @frozen class GroupMetadata: attributes: Dict[str, Any] = field(factory=dict) - zarr_format: Literal[2, 3] = 3 # field(default=3, validator=validators.in_([2, 3])) - node_type: Literal["group"] = field(default="group", init=False) + zarr_format: Literal[2, 3] = 3 + node_type: Literal["group"] = field(default="group", init=True) def to_bytes(self) -> Dict[str, bytes]: if self.zarr_format == 3: @@ -52,7 +52,7 @@ async def create( *, attributes: Optional[Dict[str, Any]] = None, exists_ok: bool = False, - zarr_format: Literal[2, 3] = 3, # field(default=3, validator=validators.in_([2, 3])), + zarr_format: Literal[2, 3] = 3, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> AsyncGroup: store_path = make_store_path(store) @@ -305,13 +305,14 @@ def create( exists_ok: bool = False, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> Group: - obj = cls._sync( + obj = sync( AsyncGroup.create( store, attributes=attributes, exists_ok=exists_ok, runtime_configuration=runtime_configuration, - ) + ), + loop=runtime_configuration.asyncio_loop, ) return cls(obj) @@ -322,7 +323,9 @@ def open( store: StoreLike, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> Group: - obj = cls._sync(AsyncGroup.open(store, runtime_configuration)) + obj = sync( + AsyncGroup.open(store, runtime_configuration), loop=runtime_configuration.asyncio_loop + ) return cls(obj) def __getitem__(self, path: str) -> Union[Array, Group]: diff --git a/tests/test_group_v3.py b/tests/test_group_v3.py index 4e7179376b..1498d6779b 100644 --- a/tests/test_group_v3.py +++ b/tests/test_group_v3.py @@ -54,3 +54,14 @@ def test_group(store_path) -> None: # and the attrs were modified in the store bar3 = foo["bar"] assert dict(bar3.attrs) == {"baz": "qux", "name": "bar"} + + +def test_group_sync_constructor(store_path) -> None: + + group = Group.create( + store=store_path, + attributes={"title": "test 123"}, + runtime_configuration=RuntimeConfiguration(), + ) + + assert group._async_group.metadata.attributes["title"] == "test 123" From c69ac316b8e0474ce58105ca2661e8cdfc88c6f5 Mon Sep 17 00:00:00 2001 From: Max Jones <14077947+maxrjones@users.noreply.github.com> Date: Wed, 7 Feb 2024 12:47:29 -0500 Subject: [PATCH 0427/1078] Specify docs hatch env for v3 branch (#1655) * Revert README changes * Specify docs hatch env * Add clean script * Update for readthedocs * Use hatch features --- .readthedocs.yaml | 10 +++------- README-v3.md | 8 -------- pyproject.toml | 9 +++++++++ 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 3a2fb6622b..8c791a292e 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -4,16 +4,12 @@ build: os: ubuntu-20.04 tools: python: "3.10" + commands: + - pip install hatch + - hatch run docs:rtd sphinx: configuration: docs/conf.py fail_on_warning: true -python: - install: - - method: pip - path: . - extra_requirements: - - docs - formats: all diff --git a/README-v3.md b/README-v3.md index dd95c3c7a3..8348038e5a 100644 --- a/README-v3.md +++ b/README-v3.md @@ -14,14 +14,6 @@ git remote add upstream https://github.com/zarr-developers/zarr-python git fetch upstream git checkout --track upstream/v3 ``` -## Set the environment variables for V3 - -There are a couple environment variables required for enabling V3: - -``` -export ZARR_V3_EXPERIMENTAL_API=1 -export ZARR_V3_SHARDING=1 -``` ## Set up your environment diff --git a/pyproject.toml b/pyproject.toml index 5a788eb1a5..0f0e4ae633 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ jupyter = [ ] docs = [ 'sphinx', + 'sphinx-autobuild>=2021.3.14', 'sphinx-automodapi', 'sphinx_design', 'sphinx-issues', @@ -95,6 +96,14 @@ run-coverage = "pytest --cov-config=pyproject.toml --cov=pkg --cov=tests" run = "run-coverage --no-cov" run-verbose = "run-coverage --verbose" +[tool.hatch.envs.docs] +features = ['docs'] + +[tool.hatch.envs.docs.scripts] +build = "sphinx-build docs/ docs/_build/" +rtd = "sphinx-build docs/ _readthedocs/html/" +serve = "sphinx-autobuild docs docs/_build --ignore 'docs/_autoapi/**/*' --host 0.0.0.0" + [tool.ruff] line-length = 100 exclude = [ From c57896305e041f402849155d74900f1c17032ac0 Mon Sep 17 00:00:00 2001 From: Max Jones <14077947+maxrjones@users.noreply.github.com> Date: Mon, 12 Feb 2024 14:12:11 -0500 Subject: [PATCH 0428/1078] Add test matrix for V3 (#1656) * Add a test matrix for Zarr V3 * Run mypy in test matrix * Simplify matrix * Update .github/workflows/test-v3.yml Co-authored-by: Joe Hamman --------- Co-authored-by: Joe Hamman --- .github/workflows/test-v3.yml | 6 +++++- pyproject.toml | 31 ++++++++++++++++++++++++------- 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/.github/workflows/test-v3.yml b/.github/workflows/test-v3.yml index e0a4117290..bdc6e99299 100644 --- a/.github/workflows/test-v3.yml +++ b/.github/workflows/test-v3.yml @@ -29,4 +29,8 @@ jobs: hatch env create - name: Run Tests run: | - hatch run test:run \ No newline at end of file + hatch run test:run + - name: Run mypy + continue-on-error: true + run: | + hatch run test:run-mypy \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 0f0e4ae633..922b10346d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,16 @@ docs = [ 'numpydoc', 'numcodecs[msgpack]', ] +extra = [ + 'attrs', + 'cattrs', + 'msgpack', + 'crc32c', + 'zstandard' +] +optional = [ + 'lmdb', +] [project.urls] "Bug Tracker" = "https://github.com/zarr-developers/zarr-python/issues" @@ -79,22 +89,29 @@ build.hooks.vcs.version-file = "src/zarr/_version.py" [tool.hatch.envs.test] extra-dependencies = [ - "attrs", - "cattrs", "coverage", "pytest", "pytest-cov", - "msgpack", - "lmdb", - "zstandard", - "crc32c", - "pytest-asyncio" + "pytest-asyncio", + "mypy", ] +features = ["extra"] + +[[tool.hatch.envs.test.matrix]] +python = ["3.10", "3.11"] +numpy = ["1.24", "1.26"] +version = ["minimal"] + +[[tool.hatch.envs.test.matrix]] +python = ["3.10", "3.11"] +numpy = ["1.24", "1.26"] +features = ["optional"] [tool.hatch.envs.test.scripts] run-coverage = "pytest --cov-config=pyproject.toml --cov=pkg --cov=tests" run = "run-coverage --no-cov" run-verbose = "run-coverage --verbose" +run-mypy = "mypy src" [tool.hatch.envs.docs] features = ['docs'] From 4f2ace4b8708cf91f4bd29ae3ed210a3e66f235c Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Wed, 14 Feb 2024 04:09:41 -0800 Subject: [PATCH 0429/1078] Fix zarr sync (#1663) This patch removes fasteners and disables zarr.sync which uses process and thread Co-authored-by: Wei Ouyang --- pyproject.toml | 2 +- zarr/sync.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a85e49e82c..4da3079808 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ requires-python = ">=3.9" dependencies = [ 'asciitree', 'numpy>=1.21.1', - 'fasteners', + 'fasteners; sys_platform != "emscripten"', 'numcodecs>=0.10.0', ] dynamic = [ diff --git a/zarr/sync.py b/zarr/sync.py index 2e843f6557..03046a4a32 100644 --- a/zarr/sync.py +++ b/zarr/sync.py @@ -3,8 +3,6 @@ from threading import Lock from typing import Protocol -import fasteners - class Synchronizer(Protocol): """Base class for synchronizers.""" @@ -49,6 +47,8 @@ def __init__(self, path): self.path = path def __getitem__(self, item): + import fasteners + path = os.path.join(self.path, item) lock = fasteners.InterProcessLock(path) return lock From 0b0ac8857a52653fdb500cc9da7b51b0ec8a05b5 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Thu, 15 Feb 2024 01:47:27 +0530 Subject: [PATCH 0430/1078] Update release.rst (#1621) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update release.rst * Update release.rst * Change 2.16.2 → 2.17.0 * Update moto for test_s3 * Skip bsddb3 tests to prevent warning failure * Fix more user warning tests * Fix even more user warning tests * Skip coverage for importorskips * Move to have_X skip method for deps * Update release.rst (PR#1663) * Fix test_core.py 'compile' issues * Add black formatting * Drop Windows/3.9 build due to unrelated failures * fix typo --------- Co-authored-by: Davis Bennett Co-authored-by: Josh Moore --- .github/workflows/windows-testing.yml | 2 +- docs/release.rst | 40 +++++++++++++++++++++++++++ requirements_dev_optional.txt | 2 +- zarr/tests/test_core.py | 28 +++++++++++++------ zarr/tests/test_storage.py | 2 +- zarr/tests/util.py | 24 ++++++++++++++++ 6 files changed, 87 insertions(+), 11 deletions(-) diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 5c3252c0ba..0ef7f21758 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -16,7 +16,7 @@ jobs: strategy: fail-fast: True matrix: - python-version: ['3.9', '3.10', '3.11'] + python-version: ['3.10', '3.11'] steps: - uses: actions/checkout@v4 with: diff --git a/docs/release.rst b/docs/release.rst index ab74a3debd..0f199aadd2 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,12 +18,20 @@ Release notes Unreleased ---------- +.. _release_2.17.0: + +2.17.0 +------ + Enhancements ~~~~~~~~~~~~ * Added type hints to ``zarr.creation.create()``. By :user:`David Stansby ` :issue:`1536`. +* Pyodide support: Don't require fasteners on Emscripten. + By :user:`Hood Chatham ` :issue:`1663`. + Docs ~~~~ @@ -45,9 +53,21 @@ Docs * Minor tweak to advanced indexing tutorial examples. By :user:`Ross Barnowski ` :issue:`1550`. +* Automatically document array members using sphinx-automodapi. + By :user:`David Stansby ` :issue:`1547`. + +* Add a markdown file documenting the current and former core-developer team. + By :user:`Joe Hamman ` :issue:`1628`. + +* Add Norman Rzepka to core-dev team. + By :user:`Joe Hamman ` :issue:`1630`. + * Added section about accessing zip files that are on s3. By :user:`Jeff Peck ` :issue:`1613`. +* Add V3 roadmap and design document. + By :user:`Joe Hamman ` :issue:`1583`. + Maintenance ~~~~~~~~~~~ @@ -75,6 +95,26 @@ Maintenance * Remove ``sphinx-rtd-theme`` dependency from ``pyproject.toml``. By :user:`Sanket Verma ` :issue:`1563`. +* Remove ``CODE_OF_CONDUCT.md`` file from the Zarr-Python repository. + By :user:`Sanket Verma ` :issue:`1572`. + +* Bump version of black in pre-commit. + By :user:`David Stansby ` :issue:`1559`. + +* Use list comprehension where applicable. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1555`. + +* Use format specification mini-language to format string. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1558`. + +* Single startswith() call instead of multiple ones. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1556`. + +* Move codespell options around. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1196`. + +* Remove unused mypy ignore comments. + By :user:`David Stansby ` :issue:`1602`. .. _release_2.16.1: diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index b4de5fd515..d1ee5a891d 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -20,4 +20,4 @@ pytest-timeout==2.2.0 h5py==3.10.0 fsspec==2023.12.2 s3fs==2023.12.2 -moto[server]>=4.0.8 +moto[server]>=5.0.1 diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index a3fde4050d..cf15703497 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -73,7 +73,15 @@ ) from zarr.tests.test_storage_v3 import DummyStorageTransfomer from zarr.util import buffer_size -from zarr.tests.util import abs_container, skip_test_env_var, have_fsspec, mktemp +from zarr.tests.util import ( + abs_container, + have_bsddb3, + have_fsspec, + have_lmdb, + have_sqlite3, + mktemp, + skip_test_env_var, +) from zarr.types import DIMENSION_SEPARATOR # noinspection PyMethodMayBeStatic @@ -2038,9 +2046,11 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skipif(have_bsddb3 is False, reason="needs bsddb3") class TestArrayWithDBMStoreBerkeleyDB(TestArray): def create_store(self): - bsddb3 = pytest.importorskip("bsddb3") + import bsddb3 + path = mktemp(suffix=".dbm") atexit.register(os.remove, path) store = DBMStore(path, flag="n", open=bsddb3.btopen) @@ -2050,9 +2060,9 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skipif(have_lmdb is False, reason="needs lmdb") class TestArrayWithLMDBStore(TestArray): def create_store(self): - pytest.importorskip("lmdb") path = mktemp(suffix=".lmdb") atexit.register(atexit_rmtree, path) store = LMDBStore(path, buffers=True) @@ -2065,9 +2075,9 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skipif(have_lmdb is False, reason="needs lmdb") class TestArrayWithLMDBStoreNoBuffers(TestArray): def create_store(self): - pytest.importorskip("lmdb") path = mktemp(suffix=".lmdb") atexit.register(atexit_rmtree, path) store = LMDBStore(path, buffers=False) @@ -2077,9 +2087,9 @@ def test_nbytes_stored(self): pass # not implemented +@pytest.mark.skipif(have_sqlite3 is False, reason="needs sqlite3") class TestArrayWithSQLiteStore(TestArray): def create_store(self): - pytest.importorskip("sqlite3") path = mktemp(suffix=".db") atexit.register(atexit_rmtree, path) store = SQLiteStore(path) @@ -2758,9 +2768,11 @@ def test_nbytes_stored(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") +@pytest.mark.skipif(have_bsddb3 is False, reason="needs bsddb3") class TestArrayWithDBMStoreV3BerkeleyDB(TestArrayV3): def create_store(self) -> DBMStoreV3: - bsddb3 = pytest.importorskip("bsddb3") + import bsddb3 + path = mktemp(suffix=".dbm") atexit.register(os.remove, path) store = DBMStoreV3(path, flag="n", open=bsddb3.btopen) @@ -2771,11 +2783,11 @@ def test_nbytes_stored(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") +@pytest.mark.skipif(have_lmdb is False, reason="needs lmdb") class TestArrayWithLMDBStoreV3(TestArrayV3): lmdb_buffers = True def create_store(self) -> LMDBStoreV3: - pytest.importorskip("lmdb") path = mktemp(suffix=".lmdb") atexit.register(atexit_rmtree, path) store = LMDBStoreV3(path, buffers=self.lmdb_buffers) @@ -2797,9 +2809,9 @@ def test_nbytes_stored(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") +@pytest.mark.skipif(have_sqlite3 is False, reason="needs sqlite3") class TestArrayWithSQLiteStoreV3(TestArrayV3): def create_store(self): - pytest.importorskip("sqlite3") path = mktemp(suffix=".db") atexit.register(atexit_rmtree, path) store = SQLiteStoreV3(path) diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 25863749d8..e4e3d93f5f 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1396,7 +1396,7 @@ def s3(request): port = 5555 endpoint_uri = "http://127.0.0.1:%d/" % port proc = subprocess.Popen( - shlex.split("moto_server s3 -p %d" % port), + shlex.split("moto_server -p %d" % port), stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, ) diff --git a/zarr/tests/util.py b/zarr/tests/util.py index b4f00f703d..b3c3249cab 100644 --- a/zarr/tests/util.py +++ b/zarr/tests/util.py @@ -69,6 +69,30 @@ def skip_test_env_var(name): have_fsspec = False +try: + import bsddb3 # noqa: F401 + + have_bsddb3 = True +except ImportError: # pragma: no cover + have_bsddb3 = False + + +try: + import lmdb # noqa: F401 + + have_lmdb = True +except ImportError: # pragma: no cover + have_lmdb = False + + +try: + import sqlite3 # noqa: F401 + + have_sqlite3 = True +except ImportError: # pragma: no cover + have_sqlite3 = False + + def abs_container(): from azure.core.exceptions import ResourceExistsError import azure.storage.blob as asb From e50b47196eb4e4071158baba22567713ad012837 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:04:51 +0100 Subject: [PATCH 0431/1078] Bump numpy from 1.24.3 to 1.26.1 (#1543) Bumps [numpy](https://github.com/numpy/numpy) from 1.24.3 to 1.26.1. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.24.3...v1.26.1) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Davis Bennett Co-authored-by: Josh Moore Co-authored-by: Joe Hamman --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index a6135bd831..c8c5f7d7ab 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.24.3 +numpy==1.26.1 From 81bbb2e7f28d64335d835523057041f11cdc7843 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:05:20 +0100 Subject: [PATCH 0432/1078] chore: update pre-commit hooks (#1642) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update pre-commit hooks updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.14 → v0.2.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.14...v0.2.1) - [github.com/psf/black: 23.12.1 → 24.2.0](https://github.com/psf/black/compare/23.12.1...24.2.0) * run black incl. comments for '...' --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- .pre-commit-config.yaml | 4 ++-- zarr/convenience.py | 1 + zarr/core.py | 8 +++++--- zarr/indexing.py | 10 +++++----- zarr/n5.py | 1 + zarr/storage.py | 1 + zarr/sync.py | 1 + zarr/types.py | 1 + 8 files changed, 17 insertions(+), 10 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a7f48d7cd6..c7d4f32c68 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,11 +8,11 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.14' + rev: 'v0.2.1' hooks: - id: ruff - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.2.0 hooks: - id: black - repo: https://github.com/codespell-project/codespell diff --git a/zarr/convenience.py b/zarr/convenience.py index 9c0deeea47..b4b8bb5293 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -1,4 +1,5 @@ """Convenience functions for storing and loading data.""" + import itertools import os import re diff --git a/zarr/core.py b/zarr/core.py index d22a9d79c3..5727afa884 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -2060,9 +2060,11 @@ def _process_chunk( index_selection = PartialChunkIterator(chunk_selection, self.chunks) for start, nitems, partial_out_selection in index_selection: expected_shape = [ - len(range(*partial_out_selection[i].indices(self.chunks[0] + 1))) - if i < len(partial_out_selection) - else dim + ( + len(range(*partial_out_selection[i].indices(self.chunks[0] + 1))) + if i < len(partial_out_selection) + else dim + ) for i, dim in enumerate(self.chunks) ] if isinstance(cdata, UncompressedPartialReadBufferV3): diff --git a/zarr/indexing.py b/zarr/indexing.py index 3042147ebb..5a2b7c0eb4 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -545,11 +545,11 @@ def ix_(selection, shape): # replace slice and int as these are not supported by numpy.ix_ selection = [ - slice_to_range(dim_sel, dim_len) - if isinstance(dim_sel, slice) - else [dim_sel] - if is_integer(dim_sel) - else dim_sel + ( + slice_to_range(dim_sel, dim_len) + if isinstance(dim_sel, slice) + else [dim_sel] if is_integer(dim_sel) else dim_sel + ) for dim_sel, dim_len in zip(selection, shape) ] diff --git a/zarr/n5.py b/zarr/n5.py index 44b44e69e2..c50c18f718 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -1,5 +1,6 @@ """This module contains a storage class and codec to support the N5 format. """ + import os import struct import sys diff --git a/zarr/storage.py b/zarr/storage.py index aa27e98e6f..a26dc636db 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -14,6 +14,7 @@ path) and a `getsize` method (return the size in bytes of a given value). """ + import atexit import errno import glob diff --git a/zarr/sync.py b/zarr/sync.py index 03046a4a32..ba1c5df5b3 100644 --- a/zarr/sync.py +++ b/zarr/sync.py @@ -8,6 +8,7 @@ class Synchronizer(Protocol): """Base class for synchronizers.""" def __getitem__(self, item): + # see subclasses ... diff --git a/zarr/types.py b/zarr/types.py index 1de270f25c..cc29a350f5 100644 --- a/zarr/types.py +++ b/zarr/types.py @@ -10,4 +10,5 @@ class MetaArray(Protocol): def __array_function__(self, func, types, args, kwargs): + # To be extended ... From 367848836535e02eecd92a11ef734dd944285615 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:05:40 +0100 Subject: [PATCH 0433/1078] Bump ipywidgets from 8.1.0 to 8.1.1 (#1538) Bumps [ipywidgets](https://github.com/jupyter-widgets/ipywidgets) from 8.1.0 to 8.1.1. - [Release notes](https://github.com/jupyter-widgets/ipywidgets/releases) - [Commits](https://github.com/jupyter-widgets/ipywidgets/compare/8.1.0...8.1.1) --- updated-dependencies: - dependency-name: ipywidgets dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Davis Bennett Co-authored-by: Josh Moore --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index d1ee5a891d..0ac4922ce1 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -3,7 +3,7 @@ lmdb==1.4.1; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 -ipywidgets==8.1.0 +ipywidgets==8.1.1 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) From 720bea687b444b2082638eb7edc3bb6a4f8fa805 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:14:43 +0100 Subject: [PATCH 0434/1078] Proper argument for numpy.reshape (#1425) `numpy.reshape` not only accepts a tuple of ints, but also a simple int. Besides `(10)` is not a tuple and is identical to `10`, unlike `(10,)`. --- zarr/tests/test_indexing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zarr/tests/test_indexing.py b/zarr/tests/test_indexing.py index af046e9d28..a3afc101c5 100644 --- a/zarr/tests/test_indexing.py +++ b/zarr/tests/test_indexing.py @@ -1632,7 +1632,7 @@ def test_set_selections_with_fields(): ), ( (slice(0, 10, 1),), - np.arange(0, 10).reshape((10)), + np.arange(0, 10).reshape(10), [(0, 10, (slice(0, 10, 1),))], ), ((0,), np.arange(0, 100).reshape((10, 10)), [(0, 10, (slice(0, 1, 1),))]), @@ -1644,7 +1644,7 @@ def test_set_selections_with_fields(): np.arange(0, 100).reshape((10, 10)), [(0, 1, (slice(0, 1, 1), slice(0, 1, 1)))], ), - ((0,), np.arange(0, 10).reshape((10)), [(0, 1, (slice(0, 1, 1),))]), + ((0,), np.arange(0, 10).reshape(10), [(0, 1, (slice(0, 1, 1),))]), pytest.param( (slice(5, 8, 1), slice(2, 4, 1), slice(0, 5, 1)), np.arange(2, 100002).reshape((10, 1, 10000)), From 74498538c180855172573f2983207f74674cbc1c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:22:59 +0100 Subject: [PATCH 0435/1078] Bump ipywidgets from 8.1.1 to 8.1.2 (#1666) Bumps [ipywidgets](https://github.com/jupyter-widgets/ipywidgets) from 8.1.1 to 8.1.2. - [Release notes](https://github.com/jupyter-widgets/ipywidgets/releases) - [Commits](https://github.com/jupyter-widgets/ipywidgets/compare/8.1.1...8.1.2) --- updated-dependencies: - dependency-name: ipywidgets dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0ac4922ce1..e94b814173 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -3,7 +3,7 @@ lmdb==1.4.1; sys_platform != 'win32' # optional library requirements for Jupyter ipytree==0.2.2 -ipywidgets==8.1.1 +ipywidgets==8.1.2 # optional library requirements for services # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) From 3db41760e18fb0a69b5066e8c7aba9752a8c474e Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Thu, 15 Feb 2024 10:54:23 +0100 Subject: [PATCH 0436/1078] docs: ZIP-related tweaks (#1641) * docs: use 'ZIP archive' instead of 'zip file'; clarify utility of caching in s3 + ZIP example; style * docs: update release notes, correct spelling of greg lee's name in past release notes, and fix markup in past release notes * docs: use 'ZIP archive' instead of 'zip file'; clarify utility of caching in s3 + ZIP example; style * docs: update release notes, correct spelling of greg lee's name in past release notes, and fix markup in past release notes --- docs/release.rst | 20 ++++++++++---------- docs/tutorial.rst | 27 ++++++++++++++------------- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 0f199aadd2..b73dcec34f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -62,8 +62,8 @@ Docs * Add Norman Rzepka to core-dev team. By :user:`Joe Hamman ` :issue:`1630`. -* Added section about accessing zip files that are on s3. - By :user:`Jeff Peck ` :issue:`1613`. +* Added section about accessing ZIP archives on s3. + By :user:`Jeff Peck ` :issue:`1613`, :issue:`1615`, and :user:`Davis Bennett ` :issue:`1641`. * Add V3 roadmap and design document. By :user:`Joe Hamman ` :issue:`1583`. @@ -157,10 +157,10 @@ Maintenance By :user:`Davis Bennett ` :issue:`1462`. * Style the codebase with ``ruff`` and ``black``. - By :user:`Davis Bennett` :issue:`1459` + By :user:`Davis Bennett ` :issue:`1459` * Ensure that chunks is tuple of ints upon array creation. - By :user:`Philipp Hanslovsky` :issue:`1461` + By :user:`Philipp Hanslovsky ` :issue:`1461` .. _release_2.15.0: @@ -548,7 +548,7 @@ Maintenance By :user:`Saransh Chopra ` :issue:`1079`. * Remove option to return None from _ensure_store. - By :user:`Greggory Lee ` :issue:`1068`. + By :user:`Gregory Lee ` :issue:`1068`. * Fix a typo of "integers". By :user:`Richard Scott ` :issue:`1056`. @@ -566,7 +566,7 @@ Enhancements Since the format is not yet finalized, the classes and functions are not automatically imported into the regular `zarr` name space. Setting the `ZARR_V3_EXPERIMENTAL_API` environment variable will activate them. - By :user:`Greggory Lee `; :issue:`898`, :issue:`1006`, and :issue:`1007` + By :user:`Gregory Lee `; :issue:`898`, :issue:`1006`, and :issue:`1007` as well as by :user:`Josh Moore ` :issue:`1032`. * **Create FSStore from an existing fsspec filesystem**. If you have created @@ -688,7 +688,7 @@ Enhancements higher-level array creation and convenience functions still accept plain Python dicts or other mutable mappings for the ``store`` argument, but will internally convert these to a ``KVStore``. - By :user:`Greggory Lee `; :issue:`839`, :issue:`789`, and :issue:`950`. + By :user:`Gregory Lee `; :issue:`839`, :issue:`789`, and :issue:`950`. * Allow to assign array ``fill_values`` and update metadata accordingly. By :user:`Ryan Abernathey `, :issue:`662`. @@ -835,7 +835,7 @@ Bug fixes ~~~~~~~~~ * Fix FSStore.listdir behavior for nested directories. - By :user:`Greggory Lee `; :issue:`802`. + By :user:`Gregory Lee `; :issue:`802`. .. _release_2.9.4: @@ -919,7 +919,7 @@ Bug fixes By :user:`Josh Moore `; :issue:`781`. * avoid NumPy 1.21.0 due to https://github.com/numpy/numpy/issues/19325 - By :user:`Greggory Lee `; :issue:`791`. + By :user:`Gregory Lee `; :issue:`791`. Maintenance ~~~~~~~~~~~ @@ -931,7 +931,7 @@ Maintenance By :user:`Elliott Sales de Andrade `; :issue:`799`. * TST: add missing assert in test_hexdigest. - By :user:`Greggory Lee `; :issue:`801`. + By :user:`Gregory Lee `; :issue:`801`. .. _release_2.8.3: diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 351eef064a..1f7accab3a 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -774,7 +774,7 @@ the following code:: Any other compatible storage class could be used in place of :class:`zarr.storage.DirectoryStore` in the code examples above. For example, -here is an array stored directly into a Zip file, via the +here is an array stored directly into a ZIP archive, via the :class:`zarr.storage.ZipStore` class:: >>> store = zarr.ZipStore('data/example.zip', mode='w') @@ -798,12 +798,12 @@ Re-open and check that data have been written:: [42, 42, 42, ..., 42, 42, 42]], dtype=int32) >>> store.close() -Note that there are some limitations on how Zip files can be used, because items -within a Zip file cannot be updated in place. This means that data in the array +Note that there are some limitations on how ZIP archives can be used, because items +within a ZIP archive cannot be updated in place. This means that data in the array should only be written once and write operations should be aligned with chunk boundaries. Note also that the ``close()`` method must be called after writing any data to the store, otherwise essential records will not be written to the -underlying zip file. +underlying ZIP archive. Another storage alternative is the :class:`zarr.storage.DBMStore` class, added in Zarr version 2.2. This class allows any DBM-style database to be used for @@ -846,7 +846,7 @@ respectively require the `redis-py `_ and `pymongo `_ packages to be installed. For compatibility with the `N5 `_ data format, Zarr also provides -an N5 backend (this is currently an experimental feature). Similar to the zip storage class, an +an N5 backend (this is currently an experimental feature). Similar to the ZIP storage class, an :class:`zarr.n5.N5Store` can be instantiated directly:: >>> store = zarr.N5Store('data/example.n5') @@ -1000,12 +1000,13 @@ separately from Zarr. .. _tutorial_copy: -Accessing Zip Files on S3 -~~~~~~~~~~~~~~~~~~~~~~~~~ +Accessing ZIP archives on S3 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The built-in `ZipStore` will only work with paths on the local file-system, however -it is also possible to access ``.zarr.zip`` data on the cloud. Here is an example of -accessing a zipped Zarr file on s3: +The built-in :class:`zarr.storage.ZipStore` will only work with paths on the local file-system; however +it is possible to access ZIP-archived Zarr data on the cloud via the `ZipFileSystem `_ +class from ``fsspec``. The following example demonstrates how to access +a ZIP-archived Zarr group on s3 using `s3fs `_ and ``ZipFileSystem``: >>> s3_path = "s3://path/to/my.zarr.zip" >>> @@ -1014,7 +1015,7 @@ accessing a zipped Zarr file on s3: >>> fs = ZipFileSystem(f, mode="r") >>> store = FSMap("", fs, check=False) >>> - >>> # cache is optional, but may be a good idea depending on the situation + >>> # caching may improve performance when repeatedly reading the same data >>> cache = zarr.storage.LRUStoreCache(store, max_size=2**28) >>> z = zarr.group(store=cache) @@ -1022,7 +1023,7 @@ This store can also be generated with ``fsspec``'s handler chaining, like so: >>> store = zarr.storage.FSStore(url=f"zip::{s3_path}", mode="r") -This can be especially useful if you have a very large ``.zarr.zip`` file on s3 +This can be especially useful if you have a very large ZIP-archived Zarr array or group on s3 and only need to access a small portion of it. Consolidating metadata @@ -1161,7 +1162,7 @@ re-compression, and so should be faster. E.g.:: └── spam (100,) int64 >>> new_root['foo/bar/baz'][:] array([ 0, 1, 2, ..., 97, 98, 99]) - >>> store2.close() # zip stores need to be closed + >>> store2.close() # ZIP stores need to be closed .. _tutorial_strings: From d23683d21728d9be5a978719fbb75b1cb45b4441 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Feb 2024 08:37:26 +0100 Subject: [PATCH 0437/1078] Bump numpy from 1.26.1 to 1.26.4 (#1669) Bumps [numpy](https://github.com/numpy/numpy) from 1.26.1 to 1.26.4. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/RELEASE_WALKTHROUGH.rst) - [Commits](https://github.com/numpy/numpy/compare/v1.26.1...v1.26.4) --- updated-dependencies: - dependency-name: numpy dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_numpy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_numpy.txt b/requirements_dev_numpy.txt index c8c5f7d7ab..d8d6c3d097 100644 --- a/requirements_dev_numpy.txt +++ b/requirements_dev_numpy.txt @@ -1,4 +1,4 @@ # Break this out into a separate file to allow testing against # different versions of numpy. This file should pin to the latest # numpy version. -numpy==1.26.1 +numpy==1.26.4 From 003ff33e70ce0a28411a7e9fde608354b1b8ee9b Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 16 Feb 2024 20:45:43 +0100 Subject: [PATCH 0438/1078] Change occurrences of % and format() to f-strings (#1423) Co-authored-by: Joe Hamman Co-authored-by: Josh Moore --- docs/release.rst | 3 + zarr/_storage/absstore.py | 6 +- zarr/_storage/store.py | 2 +- zarr/_storage/v3.py | 2 +- zarr/convenience.py | 44 +++++------ zarr/core.py | 20 ++--- zarr/creation.py | 4 +- zarr/errors.py | 4 +- zarr/hierarchy.py | 14 ++-- zarr/indexing.py | 37 +++++----- zarr/meta.py | 10 +-- zarr/meta_v1.py | 4 +- zarr/n5.py | 6 +- zarr/storage.py | 10 +-- zarr/tests/test_core.py | 10 +-- zarr/tests/test_meta.py | 146 ++++++++++++++----------------------- zarr/tests/test_storage.py | 6 +- zarr/util.py | 59 +++++++-------- 18 files changed, 167 insertions(+), 220 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index b73dcec34f..8ce4b2e33c 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,9 @@ Release notes Unreleased ---------- +* Change occurrences of % and format() to f-strings. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1423`. + .. _release_2.17.0: 2.17.0 diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index c9a113148c..b6b386f468 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -84,7 +84,7 @@ def __init__( blob_service_kwargs = blob_service_kwargs or {} client = ContainerClient( - "https://{}.blob.core.windows.net/".format(account_name), + f"https://{account_name}.blob.core.windows.net/", container, credential=account_key, **blob_service_kwargs, @@ -141,7 +141,7 @@ def __getitem__(self, key): try: return self.client.download_blob(blob_name).readall() except ResourceNotFoundError: - raise KeyError("Blob %s not found" % blob_name) + raise KeyError(f"Blob {blob_name} not found") def __setitem__(self, key, value): value = ensure_bytes(value) @@ -154,7 +154,7 @@ def __delitem__(self, key): try: self.client.delete_blob(self._append_path_to_prefix(key)) except ResourceNotFoundError: - raise KeyError("Blob %s not found" % key) + raise KeyError(f"Blob {key} not found") def __eq__(self, other): return ( diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 36b596769a..209f118534 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -227,7 +227,7 @@ def _validate_key(self, key: str): # TODO: Possibly allow key == ".zmetadata" too if we write a # consolidated metadata spec corresponding to this? ): - raise ValueError("keys starts with unexpected value: `{}`".format(key)) + raise ValueError(f"key starts with unexpected value: `{key}`") if key.endswith("/"): raise ValueError("keys may not end in /") diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 32e78f7a34..56bae74361 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -569,7 +569,7 @@ def __init__(self, store: StoreLike, metadata_key=meta_root + "consolidated/.zme consolidated_format = meta.get("zarr_consolidated_format", None) if consolidated_format != 1: raise MetadataError( - "unsupported zarr consolidated metadata format: %s" % consolidated_format + f"unsupported zarr consolidated metadata format: {consolidated_format}" ) # decode metadata diff --git a/zarr/convenience.py b/zarr/convenience.py index b4b8bb5293..7ca5d426f0 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -259,7 +259,7 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): try: grp = _create_group(_store, path=path, overwrite=True, zarr_version=zarr_version) for i, arr in enumerate(args): - k = "arr_{}".format(i) + k = f"arr_{i}" grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version) for k, arr in kwargs.items(): grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version) @@ -499,7 +499,7 @@ def __init__(self, log): self.log_file = log else: raise TypeError( - "log must be a callable function, file path or " "file-like object, found %r" % log + f"log must be a callable function, file path or file-like object, found {log!r}" ) def __enter__(self): @@ -526,9 +526,9 @@ def _log_copy_summary(log, dry_run, n_copied, n_skipped, n_bytes_copied): message = "dry run: " else: message = "all done: " - message += "{:,} copied, {:,} skipped".format(n_copied, n_skipped) + message += f"{n_copied:,} copied, {n_skipped:,} skipped" if not dry_run: - message += ", {:,} bytes copied".format(n_bytes_copied) + message += f", {n_bytes_copied:,} bytes copied" log(message) @@ -657,9 +657,7 @@ def copy_store( # check if_exists parameter valid_if_exists = ["raise", "replace", "skip"] if if_exists not in valid_if_exists: - raise ValueError( - "if_exists must be one of {!r}; found {!r}".format(valid_if_exists, if_exists) - ) + raise ValueError(f"if_exists must be one of {valid_if_exists!r}; found {if_exists!r}") # setup counting variables n_copied = n_skipped = n_bytes_copied = 0 @@ -720,20 +718,20 @@ def copy_store( if if_exists != "replace": if dest_key in dest: if if_exists == "raise": - raise CopyError("key {!r} exists in destination".format(dest_key)) + raise CopyError(f"key {dest_key!r} exists in destination") elif if_exists == "skip": do_copy = False # take action if do_copy: - log("copy {}".format(descr)) + log(f"copy {descr}") if not dry_run: data = source[source_key] n_bytes_copied += buffer_size(data) dest[dest_key] = data n_copied += 1 else: - log("skip {}".format(descr)) + log(f"skip {descr}") n_skipped += 1 # log a final message with a summary of what happened @@ -744,7 +742,7 @@ def copy_store( def _check_dest_is_group(dest): if not hasattr(dest, "create_dataset"): - raise ValueError("dest must be a group, got {!r}".format(dest)) + raise ValueError(f"dest must be a group, got {dest!r}") def copy( @@ -910,11 +908,9 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ # check if_exists parameter valid_if_exists = ["raise", "replace", "skip", "skip_initialized"] if if_exists not in valid_if_exists: - raise ValueError( - "if_exists must be one of {!r}; found {!r}".format(valid_if_exists, if_exists) - ) + raise ValueError(f"if_exists must be one of {valid_if_exists!r}; found {if_exists!r}") if dest_h5py and if_exists == "skip_initialized": - raise ValueError("{!r} can only be used when copying to zarr".format(if_exists)) + raise ValueError(f"{if_exists!r} can only be used when copying to zarr") # determine name to copy to if name is None: @@ -934,9 +930,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ exists = dest is not None and name in dest if exists: if if_exists == "raise": - raise CopyError( - "an object {!r} already exists in destination " "{!r}".format(name, dest.name) - ) + raise CopyError(f"an object {name!r} already exists in destination {dest.name!r}") elif if_exists == "skip": do_copy = False elif if_exists == "skip_initialized": @@ -947,7 +941,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ # take action if do_copy: # log a message about what we're going to do - log("copy {} {} {}".format(source.name, source.shape, source.dtype)) + log(f"copy {source.name} {source.shape} {source.dtype}") if not dry_run: # clear the way @@ -1015,7 +1009,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ n_copied += 1 else: - log("skip {} {} {}".format(source.name, source.shape, source.dtype)) + log(f"skip {source.name} {source.shape} {source.dtype}") n_skipped += 1 elif root or not shallow: @@ -1026,16 +1020,14 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ exists_array = dest is not None and name in dest and hasattr(dest[name], "shape") if exists_array: if if_exists == "raise": - raise CopyError( - "an array {!r} already exists in destination " "{!r}".format(name, dest.name) - ) + raise CopyError(f"an array {name!r} already exists in destination {dest.name!r}") elif if_exists == "skip": do_copy = False # take action if do_copy: # log action - log("copy {}".format(source.name)) + log(f"copy {source.name}") if not dry_run: # clear the way @@ -1078,7 +1070,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ n_copied += 1 else: - log("skip {}".format(source.name)) + log(f"skip {source.name}") n_skipped += 1 return n_copied, n_skipped, n_bytes_copied @@ -1327,7 +1319,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** store, storage_options=kwargs.get("storage_options"), mode=mode, zarr_version=zarr_version ) if mode not in {"r", "r+"}: - raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}".format(mode)) + raise ValueError(f"invalid mode, expected either 'r' or 'r+'; found {mode!r}") path = kwargs.pop("path", None) if store._store_version == 2: diff --git a/zarr/core.py b/zarr/core.py index 5727afa884..c3184c6652 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -2396,11 +2396,11 @@ def _encode_chunk(self, chunk): def __repr__(self): t = type(self) - r = "<{}.{}".format(t.__module__, t.__name__) + r = f"<{t.__module__}.{t.__name__}" if self.name: - r += " %r" % self.name - r += " %s" % str(self.shape) - r += " %s" % self.dtype + r += f" {self.name!r}" + r += f" {str(self.shape)}" + r += f" {self.dtype}" if self._read_only: r += " read-only" r += ">" @@ -2436,11 +2436,11 @@ def info_items(self): def _info_items_nosync(self): def typestr(o): - return "{}.{}".format(type(o).__module__, type(o).__name__) + return f"{type(o).__module__}.{type(o).__name__}" def bytestr(n): if n > 2**10: - return "{} ({})".format(n, human_readable_size(n)) + return f"{n} ({human_readable_size(n)})" else: return str(n) @@ -2451,7 +2451,7 @@ def bytestr(n): items += [("Name", self.name)] items += [ ("Type", typestr(self)), - ("Data type", "%s" % self.dtype), + ("Data type", str(self.dtype)), ("Shape", str(self.shape)), ("Chunk shape", str(self.chunks)), ("Order", self.order), @@ -2461,7 +2461,7 @@ def bytestr(n): # filters if self.filters: for i, f in enumerate(self.filters): - items += [("Filter [%s]" % i, repr(f))] + items += [(f"Filter [{i}]", repr(f))] # compressor items += [("Compressor", repr(self.compressor))] @@ -2478,9 +2478,9 @@ def bytestr(n): if self.nbytes_stored > 0: items += [ ("No. bytes stored", bytestr(self.nbytes_stored)), - ("Storage ratio", "%.1f" % (self.nbytes / self.nbytes_stored)), + ("Storage ratio", f"{self.nbytes / self.nbytes_stored:.1f}"), ] - items += [("Chunks initialized", "{}/{}".format(self.nchunks_initialized, self.nchunks))] + items += [("Chunks initialized", f"{self.nchunks_initialized}/{self.nchunks}")] return items diff --git a/zarr/creation.py b/zarr/creation.py index d4f570895a..264715b040 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -287,7 +287,7 @@ def _kwargs_compat(compressor, fill_value, kwargs): compressor = compression else: - raise ValueError("bad value for compression: %r" % compression) + raise ValueError(f"bad value for compression: {compression!r}") # handle 'fillvalue' if "fillvalue" in kwargs: @@ -297,7 +297,7 @@ def _kwargs_compat(compressor, fill_value, kwargs): # ignore other keyword arguments for k in kwargs: - warn("ignoring keyword argument %r" % k) + warn(f"ignoring keyword argument {k!r}") return compressor, fill_value diff --git a/zarr/errors.py b/zarr/errors.py index 30c9b13d39..85789fbcbf 100644 --- a/zarr/errors.py +++ b/zarr/errors.py @@ -67,9 +67,7 @@ def __init__(self): def err_too_many_indices(selection, shape): - raise IndexError( - "too many indices for array; expected {}, got {}".format(len(shape), len(selection)) - ) + raise IndexError(f"too many indices for array; expected {len(shape)}, got {len(selection)}") class VindexInvalidSelectionError(_BaseZarrIndexError): diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 1cfea89c81..44af1d63d1 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -340,9 +340,9 @@ def __len__(self): def __repr__(self): t = type(self) - r = "<{}.{}".format(t.__module__, t.__name__) + r = f"<{t.__module__}.{t.__name__}" if self.name: - r += " %r" % self.name + r += f" {self.name!r}" if self._read_only: r += " read-only" r += ">" @@ -358,7 +358,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def info_items(self): def typestr(o): - return "{}.{}".format(type(o).__module__, type(o).__name__) + return f"{type(o).__module__}.{type(o).__name__}" items = [] @@ -1157,17 +1157,15 @@ def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, **kwargs shape = normalize_shape(shape) if shape != a.shape: raise TypeError( - "shape do not match existing array; expected {}, got {}".format(a.shape, shape) + f"shape do not match existing array; expected {a.shape}, got {shape}" ) dtype = np.dtype(dtype) if exact: if dtype != a.dtype: - raise TypeError( - "dtypes do not match exactly; expected {}, got {}".format(a.dtype, dtype) - ) + raise TypeError(f"dtypes do not match exactly; expected {a.dtype}, got {dtype}") else: if not np.can_cast(dtype, a.dtype): - raise TypeError("dtypes ({}, {}) cannot be safely cast".format(dtype, a.dtype)) + raise TypeError(f"dtypes ({dtype}, {a.dtype}) cannot be safely cast") return a else: diff --git a/zarr/indexing.py b/zarr/indexing.py index 5a2b7c0eb4..9889fcadad 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -338,8 +338,8 @@ def __init__(self, selection, array): else: raise IndexError( - "unsupported selection item for basic indexing; " - "expected integer or slice, got {!r}".format(type(dim_sel)) + f"unsupported selection item for basic indexing; " + f"expected integer or slice, got {type(dim_sel)!r}" ) dim_indexers.append(dim_indexer) @@ -370,8 +370,8 @@ def __init__(self, dim_sel, dim_len, dim_chunk_len): # check shape if dim_sel.shape[0] != dim_len: raise IndexError( - "Boolean array has the wrong length for dimension; " - "expected {}, got {}".format(dim_len, dim_sel.shape[0]) + f"Boolean array has the wrong length for dimension; " + f"expected {dim_len}, got { dim_sel.shape[0]}" ) # store attributes @@ -610,9 +610,9 @@ def __init__(self, selection, array): else: raise IndexError( - "unsupported selection item for orthogonal indexing; " - "expected integer, slice, integer array or Boolean " - "array, got {!r}".format(type(dim_sel)) + f"unsupported selection item for orthogonal indexing; " + f"expected integer, slice, integer array or Boolean " + f"array, got {type(dim_sel)!r}" ) dim_indexers.append(dim_indexer) @@ -698,8 +698,8 @@ def __init__(self, selection, array): if dim_sel.step not in {1, None}: raise IndexError( - "unsupported selection item for block indexing; " - "expected integer or slice with step=1, got {!r}".format(type(dim_sel)) + f"unsupported selection item for block indexing; " + f"expected integer or slice with step=1, got {type(dim_sel)!r}" ) # Can't reuse wraparound_indices because it expects a numpy array @@ -715,8 +715,8 @@ def __init__(self, selection, array): else: raise IndexError( - "unsupported selection item for block indexing; " - "expected integer or slice, got {!r}".format(type(dim_sel)) + f"unsupported selection item for block indexing; " + f"expected integer or slice, got {type(dim_sel)!r}" ) dim_indexer = SliceDimIndexer(slice_, dim_len, dim_chunk_size) @@ -782,9 +782,9 @@ def __init__(self, selection, array): # validation if not is_coordinate_selection(selection, array): raise IndexError( - "invalid coordinate selection; expected one integer " - "(coordinate) array per dimension of the target array, " - "got {!r}".format(selection) + f"invalid coordinate selection; expected one integer " + f"(coordinate) array per dimension of the target array, " + f"got {selection!r}" ) # handle wraparound, boundscheck @@ -874,8 +874,8 @@ def __init__(self, selection, array): # validation if not is_mask_selection(selection, array): raise IndexError( - "invalid mask selection; expected one Boolean (mask)" - "array with the same shape as the target array, got {!r}".format(selection) + f"invalid mask selection; expected one Boolean (mask)" + f"array with the same shape as the target array, got {selection!r}" ) # convert to indices @@ -919,8 +919,7 @@ def check_fields(fields, dtype): # check type if not isinstance(fields, (str, list, tuple)): raise IndexError( - "'fields' argument must be a string or list of strings; found " - "{!r}".format(type(fields)) + f"'fields' argument must be a string or list of strings; found " f"{type(fields)!r}" ) if fields: if dtype.names is None: @@ -933,7 +932,7 @@ def check_fields(fields, dtype): # multiple field selection out_dtype = np.dtype([(f, dtype[f]) for f in fields]) except KeyError as e: - raise IndexError("invalid 'fields' argument, field not found: {!r}".format(e)) + raise IndexError(f"invalid 'fields' argument, field not found: {e!r}") else: return out_dtype else: diff --git a/zarr/meta.py b/zarr/meta.py index d9797e4754..4b360270de 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -111,7 +111,7 @@ def decode_array_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType # check metadata format zarr_format = meta.get("zarr_format", None) if zarr_format != cls.ZARR_FORMAT: - raise MetadataError("unsupported zarr format: %s" % zarr_format) + raise MetadataError(f"unsupported zarr format: {zarr_format}") # extract array metadata fields try: @@ -199,7 +199,7 @@ def decode_group_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType # check metadata format version zarr_format = meta.get("zarr_format", None) if zarr_format != cls.ZARR_FORMAT: - raise MetadataError("unsupported zarr format: %s" % zarr_format) + raise MetadataError(f"unsupported zarr format: {zarr_format}") meta = dict(zarr_format=zarr_format) return meta @@ -346,7 +346,7 @@ def decode_group_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType # # check metadata format version # zarr_format = meta.get("zarr_format", None) # if zarr_format != cls.ZARR_FORMAT: - # raise MetadataError("unsupported zarr format: %s" % zarr_format) + # raise MetadataError(f"unsupported zarr format: {zarr_format}") assert "attributes" in meta # meta = dict(attributes=meta['attributes']) @@ -383,7 +383,7 @@ def decode_hierarchy_metadata(cls, s: Union[MappingType, bytes, str]) -> Mapping # check metadata format # zarr_format = meta.get("zarr_format", None) # if zarr_format != "https://purl.org/zarr/spec/protocol/core/3.0": - # raise MetadataError("unsupported zarr format: %s" % zarr_format) + # raise MetadataError(f"unsupported zarr format: {zarr_format}") if set(meta.keys()) != { "zarr_format", "metadata_encoding", @@ -518,7 +518,7 @@ def decode_array_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType meta["storage_transformers"] = storage_transformers except Exception as e: - raise MetadataError("error decoding metadata: %s" % e) + raise MetadataError(f"error decoding metadata: {e}") else: return meta diff --git a/zarr/meta_v1.py b/zarr/meta_v1.py index 4ac381f2ca..65bfd3488e 100644 --- a/zarr/meta_v1.py +++ b/zarr/meta_v1.py @@ -10,7 +10,7 @@ def decode_metadata(b): meta = json.loads(s) zarr_format = meta.get("zarr_format", None) if zarr_format != 1: - raise MetadataError("unsupported zarr format: %s" % zarr_format) + raise MetadataError(f"unsupported zarr format: {zarr_format}") try: meta = dict( zarr_format=meta["zarr_format"], @@ -23,7 +23,7 @@ def decode_metadata(b): order=meta["order"], ) except Exception as e: - raise MetadataError("error decoding metadata: %s" % e) + raise MetadataError(f"error decoding metadata: {e}") else: return meta diff --git a/zarr/n5.py b/zarr/n5.py index c50c18f718..fdd3d5babf 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -826,9 +826,9 @@ def decode(self, chunk, out=None) -> bytes: if out is not None: # out should only be used if we read a complete chunk - assert chunk_shape == self.chunk_shape, "Expected chunk of shape {}, found {}".format( - self.chunk_shape, chunk_shape - ) + assert ( + chunk_shape == self.chunk_shape + ), f"Expected chunk of shape {self.chunk_shape}, found {chunk_shape}" if self._compressor: self._compressor.decode(chunk, out) diff --git a/zarr/storage.py b/zarr/storage.py index a26dc636db..73a6dc9630 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -2700,14 +2700,12 @@ def listdir(self, path=None): path = normalize_storage_path(path) sep = "_" if path == "" else "/" keys = self.cursor.execute( - """ + f""" SELECT DISTINCT SUBSTR(m, 0, INSTR(m, "/")) AS l FROM ( SELECT LTRIM(SUBSTR(k, LENGTH(?) + 1), "/") || "/" AS m FROM zarr WHERE k LIKE (? || "{sep}%") ) ORDER BY l ASC - """.format( - sep=sep - ), + """, (path, path), ) keys = list(map(operator.itemgetter(0), keys)) @@ -2863,7 +2861,7 @@ def __init__(self, prefix="zarr", dimension_separator=None, **kwargs): self.client = redis.Redis(**kwargs) def _key(self, key): - return "{prefix}:{key}".format(prefix=self._prefix, key=key) + return f"{self._prefix}:{key}" def __getitem__(self, key): return self.client[self._key(key)] @@ -2948,7 +2946,7 @@ def __init__(self, store: StoreLike, metadata_key=".zmetadata"): consolidated_format = meta.get("zarr_consolidated_format", None) if consolidated_format != 1: raise MetadataError( - "unsupported zarr consolidated metadata format: %s" % consolidated_format + f"unsupported zarr consolidated metadata format: {consolidated_format}" ) # decode metadata diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index cf15703497..d9447c0832 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -188,7 +188,7 @@ def test_store_has_text_keys(self): for k in z.chunk_store.keys(): if not isinstance(k, expected_type): # pragma: no cover - pytest.fail("Non-text key: %s" % repr(k)) + pytest.fail(f"Non-text key: {k!r}") z.store.close() @@ -202,7 +202,7 @@ def test_store_has_binary_values(self): try: ensure_ndarray(v) except TypeError: # pragma: no cover - pytest.fail("Non-bytes-like value: %s" % repr(v)) + pytest.fail(f"Non-bytes-like value: {v!r}") z.store.close() @@ -1212,7 +1212,7 @@ def test_dtypes(self): # datetime, timedelta for base_type in "Mm": for resolution in "D", "us", "ns": - dtype = "{}8[{}]".format(base_type, resolution) + dtype = f"{base_type}8[{resolution}]" z = self.create_array(shape=100, dtype=dtype, fill_value=0) assert z.dtype == np.dtype(dtype) a = np.random.randint( @@ -1402,7 +1402,7 @@ def compare_arrays(expected, actual, item_dtype): # convenience API for item_type in "int", " Tuple[np.dtype object_codec = codec_registry[codec_id](*args) except KeyError: # pragma: no cover raise ValueError( - "codec %r for object type %r is not " - "available; please provide an " - "object_codec manually" % (codec_id, key) + f"codec {codec_id!r} for object type {key!r} is not " + f"available; please provide an object_codec manually" ) return dtype, object_codec @@ -241,7 +240,7 @@ def is_total_slice(item, shape: Tuple[int]) -> bool: for it, sh in zip(item, shape) ) else: - raise TypeError("expected slice or tuple of slices, found %r" % item) + raise TypeError(f"expected slice or tuple of slices, found {item!r}") def normalize_resize_args(old_shape, *args): @@ -265,23 +264,23 @@ def normalize_resize_args(old_shape, *args): def human_readable_size(size) -> str: if size < 2**10: - return "%s" % size + return f"{size}" elif size < 2**20: - return "%.1fK" % (size / float(2**10)) + return f"{size / float(2**10):.1f}K" elif size < 2**30: - return "%.1fM" % (size / float(2**20)) + return f"{size / float(2**20):.1f}M" elif size < 2**40: - return "%.1fG" % (size / float(2**30)) + return f"{size / float(2**30):.1f}G" elif size < 2**50: - return "%.1fT" % (size / float(2**40)) + return f"{size / float(2**40):.1f}T" else: - return "%.1fP" % (size / float(2**50)) + return f"{size / float(2**50):.1f}P" def normalize_order(order: str) -> str: order = str(order).upper() if order not in ["C", "F"]: - raise ValueError("order must be either 'C' or 'F', found: %r" % order) + raise ValueError(f"order must be either 'C' or 'F', found: {order!r}") return order @@ -289,7 +288,7 @@ def normalize_dimension_separator(sep: Optional[str]) -> Optional[DIMENSION_SEPA if sep in (".", "/", None): return cast(Optional[DIMENSION_SEPARATOR], sep) else: - raise ValueError("dimension_separator must be either '.' or '/', found: %r" % sep) + raise ValueError(f"dimension_separator must be either '.' or '/', found: {sep!r}") def normalize_fill_value(fill_value, dtype: np.dtype): @@ -307,8 +306,8 @@ def normalize_fill_value(fill_value, dtype: np.dtype): if not isinstance(fill_value, str): raise ValueError( - "fill_value {!r} is not valid for dtype {}; must be a " - "unicode string".format(fill_value, dtype) + f"fill_value {fill_value!r} is not valid for dtype {dtype}; " + f"must be a unicode string" ) else: @@ -322,8 +321,8 @@ def normalize_fill_value(fill_value, dtype: np.dtype): except Exception as e: # re-raise with our own error message to be helpful raise ValueError( - "fill_value {!r} is not valid for dtype {}; nested " - "exception: {}".format(fill_value, dtype, e) + f"fill_value {fill_value!r} is not valid for dtype {dtype}; " + f"nested exception: {e}" ) return fill_value @@ -396,10 +395,10 @@ def info_html_report(items) -> str: report += "" for k, v in items: report += ( - "" - '%s' - '%s' - "" % (k, v) + f"" + f'{k}' + f'{v}' + f"" ) report += "" report += "" @@ -435,7 +434,7 @@ def get_children(self): def get_text(self): name = self.obj.name.split("/")[-1] or "/" if hasattr(self.obj, "shape"): - name += " {} {}".format(self.obj.shape, self.obj.dtype) + name += f" {self.obj.shape} {self.obj.dtype}" return name def get_type(self): @@ -463,7 +462,7 @@ def tree_get_icon(stype: str) -> str: elif stype == "Group": return tree_group_icon else: - raise ValueError("Unknown type: %s" % stype) + raise ValueError(f"Unknown type: {stype}") def tree_widget_sublist(node, root=False, expand=False): @@ -487,10 +486,10 @@ def tree_widget(group, expand, level): import ipytree except ImportError as error: raise ImportError( - "{}: Run `pip install zarr[jupyter]` or `conda install ipytree`" - "to get the required ipytree dependency for displaying the tree " - "widget. If using jupyterlab<3, you also need to run " - "`jupyter labextension install ipytree`".format(error) + f"{error}: Run `pip install zarr[jupyter]` or `conda install ipytree`" + f"to get the required ipytree dependency for displaying the tree " + f"widget. If using jupyterlab<3, you also need to run " + f"`jupyter labextension install ipytree`" ) result = ipytree.Tree() @@ -549,14 +548,10 @@ def _repr_mimebundle_(self, **kwargs): def check_array_shape(param, array, shape): if not hasattr(array, "shape"): - raise TypeError( - "parameter {!r}: expected an array-like object, got {!r}".format(param, type(array)) - ) + raise TypeError(f"parameter {param!r}: expected an array-like object, got {type(array)!r}") if array.shape != shape: raise ValueError( - "parameter {!r}: expected array with shape {!r}, got {!r}".format( - param, shape, array.shape - ) + f"parameter {param!r}: expected array with shape {shape!r}, got {array.shape!r}" ) From 76c345071db950b2362f7588ad20da4a1af03b85 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Mon, 19 Feb 2024 14:45:11 +0100 Subject: [PATCH 0439/1078] Remove attrs (#1660) * begin removing attrs in favor of frozen dataclasses * remove runtime_configuration from corearraymetadata; rename CodecMetadata to NamedConfig; turn chunk encoding into stand-alone functions; put codecs on ArrayMetadata instead of just CodecMetadata; add runtime_configuration parameter to codec encode / decode methods; add parsers to codecs * add typing_extensions dependency * remove CodecMetadatas and data_types; move basic parsers to common * feat: base to_dict method that actually works * Making Codec classes self-contained * fixes * chunk_grids * chunk key encodings * dry-er parse_* * serialize to enums * ruff * organize imports * rm src/zarr/v3/codecs/common.py * cleanup error messages * better codec evolve * __init__ types * add validation to RuntimeConfiguration.__init__; create a validator for c / f order; add order to ArrayV2Metadata.__init__ * import Dict at runtime * fix parse_dimension_names * add tests; tweak parse_name function; adjust some exception messages * fix shapelike parser and tests; clean up imports * typing * improve typing * blacken test_common --------- Co-authored-by: Norman Rzepka --- pyproject.toml | 6 +- src/zarr/v3/__init__.py | 22 +- src/zarr/v3/abc/codec.py | 32 +-- src/zarr/v3/abc/metadata.py | 44 ++++ src/zarr/v3/array.py | 181 ++++++-------- src/zarr/v3/array_v2.py | 144 +++++------ src/zarr/v3/chunk_grids.py | 47 ++++ src/zarr/v3/chunk_key_encodings.py | 81 ++++++ src/zarr/v3/codecs/__init__.py | 328 +----------------------- src/zarr/v3/codecs/blosc.py | 174 +++++++++---- src/zarr/v3/codecs/bytes.py | 92 +++---- src/zarr/v3/codecs/crc32c_.py | 41 ++- src/zarr/v3/codecs/gzip.py | 60 ++--- src/zarr/v3/codecs/pipeline.py | 240 ++++++++++++++++++ src/zarr/v3/codecs/registry.py | 33 +-- src/zarr/v3/codecs/sharding.py | 229 +++++++++-------- src/zarr/v3/codecs/transpose.py | 87 +++---- src/zarr/v3/codecs/zstd.py | 62 ++--- src/zarr/v3/common.py | 205 ++++++++------- src/zarr/v3/config.py | 44 +++- src/zarr/v3/group.py | 82 ++++-- src/zarr/v3/metadata.py | 383 +++++++++++++++++++---------- src/zarr/v3/store/memory.py | 2 +- src/zarr/v3/sync.py | 25 +- tests/test_codecs_v3.py | 254 +++++++++++-------- tests/v3/test_common.py | 97 ++++++++ tests/v3/test_metadata.py | 60 +++++ 27 files changed, 1812 insertions(+), 1243 deletions(-) create mode 100644 src/zarr/v3/abc/metadata.py create mode 100644 src/zarr/v3/chunk_grids.py create mode 100644 src/zarr/v3/chunk_key_encodings.py create mode 100644 src/zarr/v3/codecs/pipeline.py create mode 100644 tests/v3/test_common.py create mode 100644 tests/v3/test_metadata.py diff --git a/pyproject.toml b/pyproject.toml index 922b10346d..3933376b12 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,8 +92,12 @@ extra-dependencies = [ "coverage", "pytest", "pytest-cov", + "msgpack", + "lmdb", + "zstandard", + "crc32c", "pytest-asyncio", - "mypy", + "typing_extensions" ] features = ["extra"] diff --git a/src/zarr/v3/__init__.py b/src/zarr/v3/__init__.py index 038dff89be..3441fa67be 100644 --- a/src/zarr/v3/__init__.py +++ b/src/zarr/v3/__init__.py @@ -3,10 +3,11 @@ from typing import Union import zarr.v3.codecs # noqa: F401 -from zarr.v3.array import Array # noqa: F401 -from zarr.v3.array_v2 import ArrayV2 # noqa: F401 -from zarr.v3.group import Group # noqa: F401 -from zarr.v3.metadata import RuntimeConfiguration, runtime_configuration # noqa: F401 +from zarr.v3.array import Array, AsyncArray # noqa: F401 +from zarr.v3.array_v2 import ArrayV2 +from zarr.v3.config import RuntimeConfiguration # noqa: F401 +from zarr.v3.group import AsyncGroup, Group # noqa: F401 +from zarr.v3.metadata import runtime_configuration # noqa: F401 from zarr.v3.store import ( # noqa: F401 StoreLike, make_store_path, @@ -17,19 +18,24 @@ async def open_auto_async( store: StoreLike, runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), -) -> Union[Array, ArrayV2, Group]: +) -> Union[AsyncArray, AsyncGroup]: store_path = make_store_path(store) try: - return await Array.open(store_path, runtime_configuration=runtime_configuration_) + return await AsyncArray.open(store_path, runtime_configuration=runtime_configuration_) except KeyError: - return await Group.open(store_path, runtime_configuration=runtime_configuration_) + return await AsyncGroup.open(store_path, runtime_configuration=runtime_configuration_) def open_auto( store: StoreLike, runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), ) -> Union[Array, ArrayV2, Group]: - return _sync( + object = _sync( open_auto_async(store, runtime_configuration_), runtime_configuration_.asyncio_loop, ) + if isinstance(object, AsyncArray): + return Array(object) + if isinstance(object, AsyncGroup): + return Group(object) + raise TypeError(f"Unexpected object type. Got {type(object)}.") diff --git a/src/zarr/v3/abc/codec.py b/src/zarr/v3/abc/codec.py index 0a7c68784f..d0e51ff894 100644 --- a/src/zarr/v3/abc/codec.py +++ b/src/zarr/v3/abc/codec.py @@ -1,37 +1,25 @@ from __future__ import annotations -from abc import abstractmethod, ABC -from typing import TYPE_CHECKING, Optional, Type +from abc import abstractmethod +from typing import TYPE_CHECKING, Optional import numpy as np +from zarr.v3.abc.metadata import Metadata -from zarr.v3.common import BytesLike, SliceSelection +from zarr.v3.common import ArraySpec from zarr.v3.store import StorePath if TYPE_CHECKING: - from zarr.v3.metadata import ( - ArraySpec, - ArrayMetadata, - DataType, - CodecMetadata, - RuntimeConfiguration, - ) + from typing_extensions import Self + from zarr.v3.common import BytesLike, SliceSelection + from zarr.v3.metadata import ArrayMetadata + from zarr.v3.config import RuntimeConfiguration -class Codec(ABC): +class Codec(Metadata): is_fixed_size: bool - @classmethod - @abstractmethod - def get_metadata_class(cls) -> Type[CodecMetadata]: - pass - - @classmethod - @abstractmethod - def from_metadata(cls, codec_metadata: CodecMetadata) -> Codec: - pass - @abstractmethod def compute_encoded_size(self, input_byte_length: int, chunk_spec: ArraySpec) -> int: pass @@ -39,7 +27,7 @@ def compute_encoded_size(self, input_byte_length: int, chunk_spec: ArraySpec) -> def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: return chunk_spec - def evolve(self, *, ndim: int, data_type: DataType) -> Codec: + def evolve(self, array_spec: ArraySpec) -> Self: return self def validate(self, array_metadata: ArrayMetadata) -> None: diff --git a/src/zarr/v3/abc/metadata.py b/src/zarr/v3/abc/metadata.py new file mode 100644 index 0000000000..bdd2f86d59 --- /dev/null +++ b/src/zarr/v3/abc/metadata.py @@ -0,0 +1,44 @@ +from __future__ import annotations +from typing import TYPE_CHECKING, Sequence + +if TYPE_CHECKING: + from typing import Dict + from typing_extensions import Self + +from dataclasses import fields + +from zarr.v3.common import JSON + + +class Metadata: + def to_dict(self) -> JSON: + """ + Recursively serialize this model to a dictionary. + This method inspects the fields of self and calls `x.to_dict()` for any fields that + are instances of `Metadata`. Sequences of `Metadata` are similarly recursed into, and + the output of that recursion is collected in a list. + """ + ... + out_dict = {} + for field in fields(self): + key = field.name + value = getattr(self, key) + if isinstance(value, Metadata): + out_dict[field.name] = getattr(self, field.name).to_dict() + elif isinstance(value, str): + out_dict[key] = value + elif isinstance(value, Sequence): + out_dict[key] = [v.to_dict() if isinstance(v, Metadata) else v for v in value] + else: + out_dict[key] = value + + return out_dict + + @classmethod + def from_dict(cls, data: Dict[str, JSON]) -> Self: + """ + Create an instance of the model from a dictionary + """ + ... + + return cls(**data) diff --git a/src/zarr/v3/array.py b/src/zarr/v3/array.py index dadde1658a..632f7d8ec7 100644 --- a/src/zarr/v3/array.py +++ b/src/zarr/v3/array.py @@ -2,7 +2,7 @@ # 1. Split Array into AsyncArray and Array # 3. Added .size and .attrs methods # 4. Temporarily disabled the creation of ArrayV2 -# 5. Added from_json to AsyncArray +# 5. Added from_dict to AsyncArray # Questions to consider: # 1. Was splitting the array into two classes really necessary? @@ -10,47 +10,65 @@ from __future__ import annotations +from dataclasses import dataclass, replace + import json from typing import Any, Dict, Iterable, Literal, Optional, Tuple, Union import numpy as np -from attr import evolve, frozen +from zarr.v3.abc.codec import Codec # from zarr.v3.array_v2 import ArrayV2 -from zarr.v3.codecs import CodecMetadata, CodecPipeline, bytes_codec -from zarr.v3.codecs.registry import get_codec_from_metadata +from zarr.v3.codecs import BytesCodec from zarr.v3.common import ( ZARR_JSON, + ArraySpec, ChunkCoords, Selection, SliceSelection, concurrent_map, ) +from zarr.v3.config import RuntimeConfiguration + from zarr.v3.indexing import BasicIndexer, all_chunk_coords, is_total_slice -from zarr.v3.metadata import ( - ArrayMetadata, - ArraySpec, - DataType, - DefaultChunkKeyEncodingConfigurationMetadata, - DefaultChunkKeyEncodingMetadata, - RegularChunkGridConfigurationMetadata, - RegularChunkGridMetadata, - RuntimeConfiguration, - V2ChunkKeyEncodingConfigurationMetadata, - V2ChunkKeyEncodingMetadata, - dtype_to_data_type, -) +from zarr.v3.chunk_grids import RegularChunkGrid +from zarr.v3.chunk_key_encodings import DefaultChunkKeyEncoding, V2ChunkKeyEncoding +from zarr.v3.metadata import ArrayMetadata from zarr.v3.store import StoreLike, StorePath, make_store_path from zarr.v3.sync import sync -@frozen +def parse_array_metadata(data: Any): + if isinstance(data, ArrayMetadata): + return data + elif isinstance(data, dict): + return ArrayMetadata.from_dict(data) + else: + raise TypeError + + +@dataclass(frozen=True) class AsyncArray: metadata: ArrayMetadata store_path: StorePath runtime_configuration: RuntimeConfiguration - codec_pipeline: CodecPipeline + + @property + def codecs(self): + return self.metadata.codecs + + def __init__( + self, + metadata: ArrayMetadata, + store_path: StorePath, + runtime_configuration: RuntimeConfiguration, + ): + metadata_parsed = parse_array_metadata(metadata) + + object.__setattr__(self, "metadata", metadata_parsed) + object.__setattr__(self, "store_path", store_path) + object.__setattr__(self, "runtime_configuration", runtime_configuration) @classmethod async def create( @@ -65,7 +83,7 @@ async def create( Tuple[Literal["default"], Literal[".", "/"]], Tuple[Literal["v2"], Literal[".", "/"]], ] = ("default", "/"), - codecs: Optional[Iterable[CodecMetadata]] = None, + codecs: Optional[Iterable[Union[Codec, Dict[str, Any]]]] = None, dimension_names: Optional[Iterable[str]] = None, attributes: Optional[Dict[str, Any]] = None, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), @@ -75,36 +93,22 @@ async def create( if not exists_ok: assert not await (store_path / ZARR_JSON).exists() - data_type = ( - DataType[dtype] if isinstance(dtype, str) else DataType[dtype_to_data_type[dtype.str]] - ) - - codecs = list(codecs) if codecs is not None else [bytes_codec()] + codecs = list(codecs) if codecs is not None else [BytesCodec()] if fill_value is None: - if data_type == DataType.bool: + if dtype == np.dtype("bool"): fill_value = False else: fill_value = 0 metadata = ArrayMetadata( shape=shape, - data_type=data_type, - chunk_grid=RegularChunkGridMetadata( - configuration=RegularChunkGridConfigurationMetadata(chunk_shape=chunk_shape) - ), + data_type=dtype, + chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), chunk_key_encoding=( - V2ChunkKeyEncodingMetadata( - configuration=V2ChunkKeyEncodingConfigurationMetadata( - separator=chunk_key_encoding[1] - ) - ) + V2ChunkKeyEncoding(separator=chunk_key_encoding[1]) if chunk_key_encoding[0] == "v2" - else DefaultChunkKeyEncodingMetadata( - configuration=DefaultChunkKeyEncodingConfigurationMetadata( - separator=chunk_key_encoding[1] - ) - ) + else DefaultChunkKeyEncoding(separator=chunk_key_encoding[1]) ), fill_value=fill_value, codecs=codecs, @@ -117,38 +121,22 @@ async def create( metadata=metadata, store_path=store_path, runtime_configuration=runtime_configuration, - codec_pipeline=CodecPipeline.create( - [ - get_codec_from_metadata(codec).evolve(ndim=len(shape), data_type=data_type) - for codec in codecs - ] - ), ) await array._save_metadata() return array @classmethod - def from_json( + def from_dict( cls, store_path: StorePath, - zarr_json: Any, + data: Dict[str, Any], runtime_configuration: RuntimeConfiguration, ) -> AsyncArray: - metadata = ArrayMetadata.from_json(zarr_json) - codecs = [ - get_codec_from_metadata(codec).evolve( - ndim=len(metadata.shape), data_type=metadata.data_type - ) - for codec in metadata.codecs - ] + metadata = ArrayMetadata.from_dict(data) async_array = cls( - metadata=metadata, - store_path=store_path, - runtime_configuration=runtime_configuration, - codec_pipeline=CodecPipeline.create(codecs), + metadata=metadata, store_path=store_path, runtime_configuration=runtime_configuration ) - async_array._validate_metadata() return async_array @classmethod @@ -160,7 +148,7 @@ async def open( store_path = make_store_path(store) zarr_json_bytes = await (store_path / ZARR_JSON).get() assert zarr_json_bytes is not None - return cls.from_json( + return cls.from_dict( store_path, json.loads(zarr_json_bytes), runtime_configuration=runtime_configuration, @@ -175,7 +163,7 @@ async def open_auto( store_path = make_store_path(store) v3_metadata_bytes = await (store_path / ZARR_JSON).get() if v3_metadata_bytes is not None: - return cls.from_json( + return cls.from_dict( store_path, json.loads(v3_metadata_bytes), runtime_configuration=runtime_configuration or RuntimeConfiguration(), @@ -205,10 +193,11 @@ def attrs(self) -> dict: return self.metadata.attributes async def getitem(self, selection: Selection): + assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) indexer = BasicIndexer( selection, shape=self.metadata.shape, - chunk_shape=self.metadata.chunk_grid.configuration.chunk_shape, + chunk_shape=self.metadata.chunk_grid.chunk_shape, ) # setup output array @@ -234,20 +223,8 @@ async def getitem(self, selection: Selection): return out[()] async def _save_metadata(self) -> None: - self._validate_metadata() - await (self.store_path / ZARR_JSON).set(self.metadata.to_bytes()) - def _validate_metadata(self) -> None: - assert len(self.metadata.shape) == len( - self.metadata.chunk_grid.configuration.chunk_shape - ), "`chunk_shape` and `shape` need to have the same number of dimensions." - assert self.metadata.dimension_names is None or len(self.metadata.shape) == len( - self.metadata.dimension_names - ), "`dimension_names` and `shape` need to have the same number of dimensions." - assert self.metadata.fill_value is not None, "`fill_value` is required." - self.codec_pipeline.validate(self.metadata) - async def _read_chunk( self, chunk_coords: ChunkCoords, @@ -260,8 +237,8 @@ async def _read_chunk( chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) store_path = self.store_path / chunk_key - if self.codec_pipeline.supports_partial_decode: - chunk_array = await self.codec_pipeline.decode_partial( + if self.codecs.supports_partial_decode: + chunk_array = await self.codecs.decode_partial( store_path, chunk_selection, chunk_spec, self.runtime_configuration ) if chunk_array is not None: @@ -271,7 +248,7 @@ async def _read_chunk( else: chunk_bytes = await store_path.get() if chunk_bytes is not None: - chunk_array = await self.codec_pipeline.decode( + chunk_array = await self.codecs.decode( chunk_bytes, chunk_spec, self.runtime_configuration ) tmp = chunk_array[chunk_selection] @@ -280,7 +257,8 @@ async def _read_chunk( out[out_selection] = self.metadata.fill_value async def setitem(self, selection: Selection, value: np.ndarray) -> None: - chunk_shape = self.metadata.chunk_grid.configuration.chunk_shape + assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) + chunk_shape = self.metadata.chunk_grid.chunk_shape indexer = BasicIndexer( selection, shape=self.metadata.shape, @@ -341,9 +319,9 @@ async def _write_chunk( chunk_array = value[out_selection] await self._write_chunk_to_store(store_path, chunk_array, chunk_spec) - elif self.codec_pipeline.supports_partial_encode: + elif self.codecs.supports_partial_encode: # print("encode_partial", chunk_coords, chunk_selection, repr(self)) - await self.codec_pipeline.encode_partial( + await self.codecs.encode_partial( store_path, value[out_selection], chunk_selection, @@ -364,9 +342,7 @@ async def _write_chunk( chunk_array.fill(self.metadata.fill_value) else: chunk_array = ( - await self.codec_pipeline.decode( - chunk_bytes, chunk_spec, self.runtime_configuration - ) + await self.codecs.decode(chunk_bytes, chunk_spec, self.runtime_configuration) ).copy() # make a writable copy chunk_array[chunk_selection] = value[out_selection] @@ -379,7 +355,7 @@ async def _write_chunk_to_store( # chunks that only contain fill_value will be removed await store_path.delete() else: - chunk_bytes = await self.codec_pipeline.encode( + chunk_bytes = await self.codecs.encode( chunk_array, chunk_spec, self.runtime_configuration ) if chunk_bytes is None: @@ -388,11 +364,17 @@ async def _write_chunk_to_store( await store_path.set(chunk_bytes) async def resize(self, new_shape: ChunkCoords) -> AsyncArray: - assert len(new_shape) == len(self.metadata.shape) - new_metadata = evolve(self.metadata, shape=new_shape) + if len(new_shape) != len(self.metadata.shape): + raise ValueError( + "The new shape must have the same number of dimensions " + + f"(={len(self.metadata.shape)})." + ) + + new_metadata = replace(self.metadata, shape=new_shape) # Remove all chunks outside of the new shape - chunk_shape = self.metadata.chunk_grid.configuration.chunk_shape + assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) + chunk_shape = self.metadata.chunk_grid.chunk_shape chunk_key_encoding = self.metadata.chunk_key_encoding old_chunk_coords = set(all_chunk_coords(self.metadata.shape, chunk_shape)) new_chunk_coords = set(all_chunk_coords(new_shape, chunk_shape)) @@ -411,14 +393,14 @@ async def _delete_key(key: str) -> None: # Write new metadata await (self.store_path / ZARR_JSON).set(new_metadata.to_bytes()) - return evolve(self, metadata=new_metadata) + return replace(self, metadata=new_metadata) - async def update_attributes(self, new_attributes: Dict[str, Any]) -> Array: - new_metadata = evolve(self.metadata, attributes=new_attributes) + async def update_attributes(self, new_attributes: Dict[str, Any]) -> AsyncArray: + new_metadata = replace(self.metadata, attributes=new_attributes) # Write new metadata await (self.store_path / ZARR_JSON).set(new_metadata.to_bytes()) - return evolve(self, metadata=new_metadata) + return replace(self, metadata=new_metadata) def __repr__(self): return f"" @@ -427,7 +409,7 @@ async def info(self): return NotImplemented -@frozen +@dataclass(frozen=True) class Array: _async_array: AsyncArray @@ -444,7 +426,7 @@ def create( Tuple[Literal["default"], Literal[".", "/"]], Tuple[Literal["v2"], Literal[".", "/"]], ] = ("default", "/"), - codecs: Optional[Iterable[CodecMetadata]] = None, + codecs: Optional[Iterable[Union[Codec, Dict[str, Any]]]] = None, dimension_names: Optional[Iterable[str]] = None, attributes: Optional[Dict[str, Any]] = None, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), @@ -469,14 +451,14 @@ def create( return cls(async_array) @classmethod - def from_json( + def from_dict( cls, store_path: StorePath, - zarr_json: Any, + data: Dict[str, Any], runtime_configuration: RuntimeConfiguration, ) -> Array: - async_array = AsyncArray.from_json( - store_path=store_path, zarr_json=zarr_json, runtime_configuration=runtime_configuration + async_array = AsyncArray.from_dict( + store_path=store_path, data=data, runtime_configuration=runtime_configuration ) return cls(async_array) @@ -490,7 +472,6 @@ def open( AsyncArray.open(store, runtime_configuration=runtime_configuration), runtime_configuration.asyncio_loop, ) - async_array._validate_metadata() return cls(async_array) @classmethod @@ -530,7 +511,7 @@ def metadata(self) -> ArrayMetadata: return self._async_array.metadata @property - def store_path(self) -> str: + def store_path(self) -> StorePath: return self._async_array.store_path def __getitem__(self, selection: Selection): diff --git a/src/zarr/v3/array_v2.py b/src/zarr/v3/array_v2.py index dc4cbebd5e..f150d2dbd2 100644 --- a/src/zarr/v3/array_v2.py +++ b/src/zarr/v3/array_v2.py @@ -1,12 +1,13 @@ from __future__ import annotations import asyncio +from dataclasses import dataclass, replace import json from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union import numcodecs import numpy as np -from attr import evolve, frozen + from numcodecs.compat import ensure_bytes, ensure_ndarray from zarr.v3.common import ( @@ -19,8 +20,9 @@ concurrent_map, to_thread, ) +from zarr.v3.config import RuntimeConfiguration from zarr.v3.indexing import BasicIndexer, all_chunk_coords, is_total_slice -from zarr.v3.metadata import ArrayV2Metadata, CodecMetadata, RuntimeConfiguration +from zarr.v3.metadata import ArrayV2Metadata from zarr.v3.store import StoreLike, StorePath, make_store_path from zarr.v3.sync import sync @@ -28,7 +30,7 @@ from zarr.v3.array import Array -@frozen +@dataclass(frozen=True) class _AsyncArrayProxy: array: ArrayV2 @@ -36,7 +38,7 @@ def __getitem__(self, selection: Selection) -> _AsyncArraySelectionProxy: return _AsyncArraySelectionProxy(self.array, selection) -@frozen +@dataclass(frozen=True) class _AsyncArraySelectionProxy: array: ArrayV2 selection: Selection @@ -48,7 +50,7 @@ async def set(self, value: np.ndarray): return await self.array.set_async(self.selection, value) -@frozen +@dataclass(frozen=True) class ArrayV2: metadata: ArrayV2Metadata attributes: Optional[Dict[str, Any]] @@ -74,7 +76,7 @@ async def create_async( ) -> ArrayV2: store_path = make_store_path(store) if not exists_ok: - assert not await (store_path / ZARRAY_JSON).exists_async() + assert not await (store_path / ZARRAY_JSON).exists() metadata = ArrayV2Metadata( shape=shape, @@ -144,11 +146,11 @@ async def open_async( ) -> ArrayV2: store_path = make_store_path(store) zarray_bytes, zattrs_bytes = await asyncio.gather( - (store_path / ZARRAY_JSON).get_async(), - (store_path / ZATTRS_JSON).get_async(), + (store_path / ZARRAY_JSON).get(), + (store_path / ZATTRS_JSON).get(), ) assert zarray_bytes is not None - return cls.from_json( + return cls.from_dict( store_path, zarray_json=json.loads(zarray_bytes), zattrs_json=json.loads(zattrs_bytes) if zattrs_bytes is not None else None, @@ -167,14 +169,14 @@ def open( ) @classmethod - def from_json( + def from_dict( cls, store_path: StorePath, zarray_json: Any, zattrs_json: Optional[Any], runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> ArrayV2: - metadata = ArrayV2Metadata.from_json(zarray_json) + metadata = ArrayV2Metadata.from_dict(zarray_json) out = cls( store_path=store_path, metadata=metadata, @@ -187,13 +189,13 @@ def from_json( async def _save_metadata(self) -> None: self._validate_metadata() - await (self.store_path / ZARRAY_JSON).set_async(self.metadata.to_bytes()) + await (self.store_path / ZARRAY_JSON).set(self.metadata.to_bytes()) if self.attributes is not None and len(self.attributes) > 0: - await (self.store_path / ZATTRS_JSON).set_async( + await (self.store_path / ZATTRS_JSON).set( json.dumps(self.attributes).encode(), ) else: - await (self.store_path / ZATTRS_JSON).delete_async() + await (self.store_path / ZATTRS_JSON).delete() def _validate_metadata(self) -> None: assert len(self.metadata.shape) == len( @@ -256,7 +258,7 @@ async def _read_chunk( ): store_path = self.store_path / self._encode_chunk_key(chunk_coords) - chunk_array = await self._decode_chunk(await store_path.get_async()) + chunk_array = await self._decode_chunk(await store_path.get()) if chunk_array is not None: tmp = chunk_array[chunk_selection] out[out_selection] = tmp @@ -357,7 +359,7 @@ async def _write_chunk( else: # writing partial chunks # read chunk first - tmp = await self._decode_chunk(await store_path.get_async()) + tmp = await self._decode_chunk(await store_path.get()) # merge new value if tmp is None: @@ -379,13 +381,13 @@ async def _write_chunk_to_store(self, store_path: StorePath, chunk_array: np.nda chunk_bytes: Optional[BytesLike] if np.all(chunk_array == self.metadata.fill_value): # chunks that only contain fill_value will be removed - await store_path.delete_async() + await store_path.delete() else: chunk_bytes = await self._encode_chunk(chunk_array) if chunk_bytes is None: - await store_path.delete_async() + await store_path.delete() else: - await store_path.set_async(chunk_bytes) + await store_path.set(chunk_bytes) async def _encode_chunk(self, chunk_array: np.ndarray) -> Optional[BytesLike]: chunk_array = chunk_array.ravel(order=self.metadata.order) @@ -411,7 +413,7 @@ def _encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: async def resize_async(self, new_shape: ChunkCoords) -> ArrayV2: assert len(new_shape) == len(self.metadata.shape) - new_metadata = evolve(self.metadata, shape=new_shape) + new_metadata = replace(self.metadata, shape=new_shape) # Remove all chunks outside of the new shape chunk_shape = self.metadata.chunks @@ -419,7 +421,7 @@ async def resize_async(self, new_shape: ChunkCoords) -> ArrayV2: new_chunk_coords = set(all_chunk_coords(new_shape, chunk_shape)) async def _delete_key(key: str) -> None: - await (self.store_path / key).delete_async() + await (self.store_path / key).delete() await concurrent_map( [ @@ -430,8 +432,8 @@ async def _delete_key(key: str) -> None: ) # Write new metadata - await (self.store_path / ZARRAY_JSON).set_async(new_metadata.to_bytes()) - return evolve(self, metadata=new_metadata) + await (self.store_path / ZARRAY_JSON).set(new_metadata.to_bytes()) + return replace(self, metadata=new_metadata) def resize(self, new_shape: ChunkCoords) -> ArrayV2: return sync(self.resize_async(new_shape), self.runtime_configuration.asyncio_loop) @@ -439,36 +441,22 @@ def resize(self, new_shape: ChunkCoords) -> ArrayV2: async def convert_to_v3_async(self) -> Array: from sys import byteorder as sys_byteorder + from zarr.v3.abc.codec import Codec from zarr.v3.array import Array from zarr.v3.common import ZARR_JSON - from zarr.v3.metadata import ( - ArrayMetadata, - DataType, - RegularChunkGridConfigurationMetadata, - RegularChunkGridMetadata, - V2ChunkKeyEncodingConfigurationMetadata, - V2ChunkKeyEncodingMetadata, - dtype_to_data_type, - ) - from zarr.v3.codecs.blosc import ( - BloscCodecConfigurationMetadata, - BloscCodecMetadata, - blosc_shuffle_int_to_str, - ) - from zarr.v3.codecs.bytes import ( - BytesCodecConfigurationMetadata, - BytesCodecMetadata, - ) - from zarr.v3.codecs.gzip import ( - GzipCodecConfigurationMetadata, - GzipCodecMetadata, - ) - from zarr.v3.codecs.transpose import ( - TransposeCodecConfigurationMetadata, - TransposeCodecMetadata, + from zarr.v3.chunk_grids import RegularChunkGrid + from zarr.v3.chunk_key_encodings import V2ChunkKeyEncoding + from zarr.v3.metadata import ArrayMetadata, DataType + + from zarr.v3.codecs import ( + BloscCodec, + BloscShuffle, + BytesCodec, + GzipCodec, + TransposeCodec, ) - data_type = DataType[dtype_to_data_type[self.metadata.dtype.str]] + data_type = DataType.from_dtype(self.metadata.dtype) endian: Literal["little", "big"] if self.metadata.dtype.byteorder == "=": endian = sys_byteorder @@ -481,19 +469,11 @@ async def convert_to_v3_async(self) -> Array: self.metadata.filters is None or len(self.metadata.filters) == 0 ), "Filters are not supported by v3." - codecs: List[CodecMetadata] = [] + codecs: List[Codec] = [] if self.metadata.order == "F": - codecs.append( - TransposeCodecMetadata( - configuration=TransposeCodecConfigurationMetadata( - order=tuple(reversed(range(self.metadata.ndim))) - ) - ) - ) - codecs.append( - BytesCodecMetadata(configuration=BytesCodecConfigurationMetadata(endian=endian)) - ) + codecs.append(TransposeCodec(order=tuple(reversed(range(self.metadata.ndim))))) + codecs.append(BytesCodec(endian=endian)) if self.metadata.compressor is not None: v2_codec = numcodecs.get_codec(self.metadata.compressor).get_config() @@ -502,55 +482,41 @@ async def convert_to_v3_async(self) -> Array: "gzip", ), "Only blosc and gzip are supported by v3." if v2_codec["id"] == "blosc": - shuffle = blosc_shuffle_int_to_str[v2_codec.get("shuffle", 0)] codecs.append( - BloscCodecMetadata( - configuration=BloscCodecConfigurationMetadata( - typesize=data_type.byte_count, - cname=v2_codec["cname"], - clevel=v2_codec["clevel"], - shuffle=shuffle, - blocksize=v2_codec.get("blocksize", 0), - ) + BloscCodec( + typesize=data_type.byte_count, + cname=v2_codec["cname"], + clevel=v2_codec["clevel"], + shuffle=BloscShuffle.from_int(v2_codec.get("shuffle", 0)), + blocksize=v2_codec.get("blocksize", 0), ) ) elif v2_codec["id"] == "gzip": - codecs.append( - GzipCodecMetadata( - configuration=GzipCodecConfigurationMetadata(level=v2_codec.get("level", 5)) - ) - ) + codecs.append(GzipCodec(level=v2_codec.get("level", 5))) new_metadata = ArrayMetadata( shape=self.metadata.shape, - chunk_grid=RegularChunkGridMetadata( - configuration=RegularChunkGridConfigurationMetadata( - chunk_shape=self.metadata.chunks - ) - ), + chunk_grid=RegularChunkGrid(chunk_shape=self.metadata.chunks), data_type=data_type, fill_value=0 if self.metadata.fill_value is None else self.metadata.fill_value, - chunk_key_encoding=V2ChunkKeyEncodingMetadata( - configuration=V2ChunkKeyEncodingConfigurationMetadata( - separator=self.metadata.dimension_separator - ) - ), + chunk_key_encoding=V2ChunkKeyEncoding(separator=self.metadata.dimension_separator), codecs=codecs, attributes=self.attributes or {}, + dimension_names=None, ) new_metadata_bytes = new_metadata.to_bytes() - await (self.store_path / ZARR_JSON).set_async(new_metadata_bytes) + await (self.store_path / ZARR_JSON).set(new_metadata_bytes) - return Array.from_json( + return Array.from_dict( store_path=self.store_path, - zarr_json=json.loads(new_metadata_bytes), + data=json.loads(new_metadata_bytes), runtime_configuration=self.runtime_configuration, ) async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> ArrayV2: - await (self.store_path / ZATTRS_JSON).set_async(json.dumps(new_attributes).encode()) - return evolve(self, attributes=new_attributes) + await (self.store_path / ZATTRS_JSON).set(json.dumps(new_attributes).encode()) + return replace(self, attributes=new_attributes) def update_attributes(self, new_attributes: Dict[str, Any]) -> ArrayV2: return sync( diff --git a/src/zarr/v3/chunk_grids.py b/src/zarr/v3/chunk_grids.py new file mode 100644 index 0000000000..6c48323798 --- /dev/null +++ b/src/zarr/v3/chunk_grids.py @@ -0,0 +1,47 @@ +from __future__ import annotations +from typing import TYPE_CHECKING, Any, Dict +from dataclasses import dataclass +from zarr.v3.abc.metadata import Metadata + +from zarr.v3.common import ( + JSON, + ChunkCoords, + ChunkCoordsLike, + parse_named_configuration, + parse_shapelike, +) + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(frozen=True) +class ChunkGrid(Metadata): + @classmethod + def from_dict(cls, data: Dict[str, JSON]) -> ChunkGrid: + if isinstance(data, ChunkGrid): + return data # type: ignore + + name_parsed, _ = parse_named_configuration(data) + if name_parsed == "regular": + return RegularChunkGrid.from_dict(data) + raise ValueError(f"Unknown chunk grid. Got {name_parsed}.") + + +@dataclass(frozen=True) +class RegularChunkGrid(ChunkGrid): + chunk_shape: ChunkCoords + + def __init__(self, *, chunk_shape: ChunkCoordsLike) -> None: + chunk_shape_parsed = parse_shapelike(chunk_shape) + + object.__setattr__(self, "chunk_shape", chunk_shape_parsed) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> Self: + _, configuration_parsed = parse_named_configuration(data, "regular") + + return cls(**configuration_parsed) # type: ignore[arg-type] + + def to_dict(self) -> Dict[str, JSON]: + return {"name": "regular", "configuration": {"chunk_shape": list(self.chunk_shape)}} diff --git a/src/zarr/v3/chunk_key_encodings.py b/src/zarr/v3/chunk_key_encodings.py new file mode 100644 index 0000000000..e4339240e3 --- /dev/null +++ b/src/zarr/v3/chunk_key_encodings.py @@ -0,0 +1,81 @@ +from __future__ import annotations +from abc import abstractmethod +from typing import TYPE_CHECKING, Dict, Literal +from dataclasses import dataclass +from zarr.v3.abc.metadata import Metadata + +from zarr.v3.common import ( + JSON, + ChunkCoords, + parse_named_configuration, +) + +if TYPE_CHECKING: + pass + +SeparatorLiteral = Literal[".", "/"] + + +def parse_separator(data: JSON) -> SeparatorLiteral: + if data not in (".", "/"): + raise ValueError(f"Expected an '.' or '/' separator. Got {data} instead.") + return data # type: ignore + + +@dataclass(frozen=True) +class ChunkKeyEncoding(Metadata): + name: str + separator: SeparatorLiteral = "." + + def __init__(self, *, separator: SeparatorLiteral) -> None: + separator_parsed = parse_separator(separator) + + object.__setattr__(self, "separator", separator_parsed) + + @classmethod + def from_dict(cls, data: Dict[str, JSON]) -> ChunkKeyEncoding: + if isinstance(data, ChunkKeyEncoding): + return data # type: ignore + + name_parsed, configuration_parsed = parse_named_configuration(data) + if name_parsed == "default": + return DefaultChunkKeyEncoding(**configuration_parsed) # type: ignore[arg-type] + if name_parsed == "v2": + return V2ChunkKeyEncoding(**configuration_parsed) # type: ignore[arg-type] + raise ValueError(f"Unknown chunk key encoding. Got {name_parsed}.") + + def to_dict(self) -> Dict[str, JSON]: + return {"name": self.name, "configuration": {"separator": self.separator}} + + @abstractmethod + def decode_chunk_key(self, chunk_key: str) -> ChunkCoords: + pass + + @abstractmethod + def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + pass + + +@dataclass(frozen=True) +class DefaultChunkKeyEncoding(ChunkKeyEncoding): + name: Literal["default"] = "default" + + def decode_chunk_key(self, chunk_key: str) -> ChunkCoords: + if chunk_key == "c": + return () + return tuple(map(int, chunk_key[1:].split(self.separator))) + + def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + return self.separator.join(map(str, ("c",) + chunk_coords)) + + +@dataclass(frozen=True) +class V2ChunkKeyEncoding(ChunkKeyEncoding): + name: Literal["v2"] = "v2" + + def decode_chunk_key(self, chunk_key: str) -> ChunkCoords: + return tuple(map(int, chunk_key.split(self.separator))) + + def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + chunk_identifier = self.separator.join(map(str, chunk_coords)) + return "0" if chunk_identifier == "" else chunk_identifier diff --git a/src/zarr/v3/codecs/__init__.py b/src/zarr/v3/codecs/__init__.py index 40c71f6807..474344ec25 100644 --- a/src/zarr/v3/codecs/__init__.py +++ b/src/zarr/v3/codecs/__init__.py @@ -1,323 +1,9 @@ from __future__ import annotations -from typing import ( - TYPE_CHECKING, - Iterable, - Iterator, - List, - Literal, - Optional, - Tuple, - Union, -) -from warnings import warn -from attr import frozen - -import numpy as np - -from zarr.v3.abc.codec import ( - ArrayBytesCodecPartialDecodeMixin, - ArrayBytesCodecPartialEncodeMixin, - Codec, - ArrayArrayCodec, - ArrayBytesCodec, - BytesBytesCodec, -) -from zarr.v3.common import BytesLike, SliceSelection -from zarr.v3.metadata import CodecMetadata, ShardingCodecIndexLocation, RuntimeConfiguration -from zarr.v3.store import StorePath - -if TYPE_CHECKING: - from zarr.v3.metadata import ArrayMetadata, ArraySpec - from zarr.v3.codecs.sharding import ShardingCodecMetadata - from zarr.v3.codecs.blosc import BloscCodecMetadata - from zarr.v3.codecs.bytes import BytesCodecMetadata - from zarr.v3.codecs.transpose import TransposeCodecMetadata - from zarr.v3.codecs.gzip import GzipCodecMetadata - from zarr.v3.codecs.zstd import ZstdCodecMetadata - from zarr.v3.codecs.crc32c_ import Crc32cCodecMetadata - - -def _find_array_bytes_codec( - codecs: Iterable[Tuple[Codec, ArraySpec]] -) -> Tuple[ArrayBytesCodec, ArraySpec]: - for codec, array_spec in codecs: - if isinstance(codec, ArrayBytesCodec): - return (codec, array_spec) - raise KeyError - - -@frozen -class CodecPipeline: - array_array_codecs: List[ArrayArrayCodec] - array_bytes_codec: ArrayBytesCodec - bytes_bytes_codecs: List[BytesBytesCodec] - - @classmethod - def create(cls, codecs: List[Codec]) -> CodecPipeline: - from zarr.v3.codecs.sharding import ShardingCodec - - assert any( - isinstance(codec, ArrayBytesCodec) for codec in codecs - ), "Exactly one array-to-bytes codec is required." - - prev_codec: Optional[Codec] = None - for codec in codecs: - if prev_codec is not None: - assert not isinstance(codec, ArrayBytesCodec) or not isinstance( - prev_codec, ArrayBytesCodec - ), ( - f"ArrayBytesCodec '{type(codec)}' cannot follow after " - + f"ArrayBytesCodec '{type(prev_codec)}' because exactly " - + "1 ArrayBytesCodec is allowed." - ) - assert not isinstance(codec, ArrayBytesCodec) or not isinstance( - prev_codec, BytesBytesCodec - ), ( - f"ArrayBytesCodec '{type(codec)}' cannot follow after " - + f"BytesBytesCodec '{type(prev_codec)}'." - ) - assert not isinstance(codec, ArrayArrayCodec) or not isinstance( - prev_codec, ArrayBytesCodec - ), ( - f"ArrayArrayCodec '{type(codec)}' cannot follow after " - + f"ArrayBytesCodec '{type(prev_codec)}'." - ) - assert not isinstance(codec, ArrayArrayCodec) or not isinstance( - prev_codec, BytesBytesCodec - ), ( - f"ArrayArrayCodec '{type(codec)}' cannot follow after " - + f"BytesBytesCodec '{type(prev_codec)}'." - ) - prev_codec = codec - - if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: - warn( - "Combining a `sharding_indexed` codec disables partial reads and " - + "writes, which may lead to inefficient performance." - ) - - return CodecPipeline( - array_array_codecs=[codec for codec in codecs if isinstance(codec, ArrayArrayCodec)], - array_bytes_codec=[codec for codec in codecs if isinstance(codec, ArrayBytesCodec)][0], - bytes_bytes_codecs=[codec for codec in codecs if isinstance(codec, BytesBytesCodec)], - ) - - @property - def supports_partial_decode(self) -> bool: - return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance( - self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin - ) - - @property - def supports_partial_encode(self) -> bool: - return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance( - self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin - ) - - def __iter__(self) -> Iterator[Codec]: - for aa_codec in self.array_array_codecs: - yield aa_codec - - yield self.array_bytes_codec - - for bb_codec in self.bytes_bytes_codecs: - yield bb_codec - - def validate(self, array_metadata: ArrayMetadata) -> None: - for codec in self: - codec.validate(array_metadata) - - def _codecs_with_resolved_metadata( - self, array_spec: ArraySpec - ) -> Tuple[ - List[Tuple[ArrayArrayCodec, ArraySpec]], - Tuple[ArrayBytesCodec, ArraySpec], - List[Tuple[BytesBytesCodec, ArraySpec]], - ]: - aa_codecs_with_spec: List[Tuple[ArrayArrayCodec, ArraySpec]] = [] - for aa_codec in self.array_array_codecs: - aa_codecs_with_spec.append((aa_codec, array_spec)) - array_spec = aa_codec.resolve_metadata(array_spec) - - ab_codec_with_spec = (self.array_bytes_codec, array_spec) - array_spec = self.array_bytes_codec.resolve_metadata(array_spec) - - bb_codecs_with_spec: List[Tuple[BytesBytesCodec, ArraySpec]] = [] - for bb_codec in self.bytes_bytes_codecs: - bb_codecs_with_spec.append((bb_codec, array_spec)) - array_spec = bb_codec.resolve_metadata(array_spec) - - return (aa_codecs_with_spec, ab_codec_with_spec, bb_codecs_with_spec) - - async def decode( - self, - chunk_bytes: BytesLike, - array_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, - ) -> np.ndarray: - ( - aa_codecs_with_spec, - ab_codec_with_spec, - bb_codecs_with_spec, - ) = self._codecs_with_resolved_metadata(array_spec) - - for bb_codec, array_spec in bb_codecs_with_spec[::-1]: - chunk_bytes = await bb_codec.decode(chunk_bytes, array_spec, runtime_configuration) - - ab_codec, array_spec = ab_codec_with_spec - chunk_array = await ab_codec.decode(chunk_bytes, array_spec, runtime_configuration) - - for aa_codec, array_spec in aa_codecs_with_spec[::-1]: - chunk_array = await aa_codec.decode(chunk_array, array_spec, runtime_configuration) - - return chunk_array - - async def decode_partial( - self, - store_path: StorePath, - selection: SliceSelection, - chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, - ) -> Optional[np.ndarray]: - assert self.supports_partial_decode - assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin) - return await self.array_bytes_codec.decode_partial( - store_path, selection, chunk_spec, runtime_configuration - ) - - async def encode( - self, - chunk_array: np.ndarray, - array_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, - ) -> Optional[BytesLike]: - ( - aa_codecs_with_spec, - ab_codec_with_spec, - bb_codecs_with_spec, - ) = self._codecs_with_resolved_metadata(array_spec) - - for aa_codec, array_spec in aa_codecs_with_spec: - chunk_array_maybe = await aa_codec.encode( - chunk_array, array_spec, runtime_configuration - ) - if chunk_array_maybe is None: - return None - chunk_array = chunk_array_maybe - - ab_codec, array_spec = ab_codec_with_spec - chunk_bytes_maybe = await ab_codec.encode(chunk_array, array_spec, runtime_configuration) - if chunk_bytes_maybe is None: - return None - chunk_bytes = chunk_bytes_maybe - - for bb_codec, array_spec in bb_codecs_with_spec: - chunk_bytes_maybe = await bb_codec.encode( - chunk_bytes, array_spec, runtime_configuration - ) - if chunk_bytes_maybe is None: - return None - chunk_bytes = chunk_bytes_maybe - - return chunk_bytes - - async def encode_partial( - self, - store_path: StorePath, - chunk_array: np.ndarray, - selection: SliceSelection, - chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, - ) -> None: - assert self.supports_partial_encode - assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin) - await self.array_bytes_codec.encode_partial( - store_path, chunk_array, selection, chunk_spec, runtime_configuration - ) - - def compute_encoded_size(self, byte_length: int, array_spec: ArraySpec) -> int: - for codec in self: - byte_length = codec.compute_encoded_size(byte_length, array_spec) - array_spec = codec.resolve_metadata(array_spec) - return byte_length - - -def blosc_codec( - typesize: int, - cname: Literal["lz4", "lz4hc", "blosclz", "zstd", "snappy", "zlib"] = "zstd", - clevel: int = 5, - shuffle: Literal["noshuffle", "shuffle", "bitshuffle"] = "noshuffle", - blocksize: int = 0, -) -> "BloscCodecMetadata": - from zarr.v3.codecs.blosc import BloscCodecMetadata, BloscCodecConfigurationMetadata - - return BloscCodecMetadata( - configuration=BloscCodecConfigurationMetadata( - cname=cname, - clevel=clevel, - shuffle=shuffle, - blocksize=blocksize, - typesize=typesize, - ) - ) - - -def bytes_codec(endian: Optional[Literal["big", "little"]] = "little") -> "BytesCodecMetadata": - from zarr.v3.codecs.bytes import BytesCodecMetadata, BytesCodecConfigurationMetadata - - return BytesCodecMetadata(configuration=BytesCodecConfigurationMetadata(endian)) - - -def transpose_codec( - order: Union[Tuple[int, ...], Literal["C", "F"]], ndim: Optional[int] = None -) -> "TransposeCodecMetadata": - from zarr.v3.codecs.transpose import TransposeCodecMetadata, TransposeCodecConfigurationMetadata - - if order == "C" or order == "F": - assert ( - isinstance(ndim, int) and ndim > 0 - ), 'When using "C" or "F" the `ndim` argument needs to be provided.' - if order == "C": - order = tuple(range(ndim)) - if order == "F": - order = tuple(ndim - i - 1 for i in range(ndim)) - - return TransposeCodecMetadata(configuration=TransposeCodecConfigurationMetadata(order)) - - -def gzip_codec(level: int = 5) -> "GzipCodecMetadata": - from zarr.v3.codecs.gzip import GzipCodecMetadata, GzipCodecConfigurationMetadata - - return GzipCodecMetadata(configuration=GzipCodecConfigurationMetadata(level)) - - -def zstd_codec(level: int = 0, checksum: bool = False) -> "ZstdCodecMetadata": - from zarr.v3.codecs.zstd import ZstdCodecMetadata, ZstdCodecConfigurationMetadata - - return ZstdCodecMetadata(configuration=ZstdCodecConfigurationMetadata(level, checksum)) - - -def crc32c_codec() -> "Crc32cCodecMetadata": - from zarr.v3.codecs.crc32c_ import Crc32cCodecMetadata - - return Crc32cCodecMetadata() - - -def sharding_codec( - chunk_shape: Tuple[int, ...], - codecs: Optional[Iterable[CodecMetadata]] = None, - index_codecs: Optional[Iterable[CodecMetadata]] = None, - index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end, -) -> "ShardingCodecMetadata": - from zarr.v3.codecs.sharding import ShardingCodecMetadata, ShardingCodecConfigurationMetadata - - codecs = tuple(codecs) if codecs is not None else (bytes_codec(),) - index_codecs = ( - tuple(index_codecs) if index_codecs is not None else (bytes_codec(), crc32c_codec()) - ) - return ShardingCodecMetadata( - configuration=ShardingCodecConfigurationMetadata( - chunk_shape, codecs, index_codecs, index_location - ) - ) +from zarr.v3.codecs.blosc import BloscCodec, BloscCname, BloscShuffle # noqa: F401 +from zarr.v3.codecs.bytes import BytesCodec, Endian # noqa: F401 +from zarr.v3.codecs.crc32c_ import Crc32cCodec # noqa: F401 +from zarr.v3.codecs.gzip import GzipCodec # noqa: F401 +from zarr.v3.codecs.sharding import ShardingCodec, ShardingCodecIndexLocation # noqa: F401 +from zarr.v3.codecs.transpose import TransposeCodec # noqa: F401 +from zarr.v3.codecs.zstd import ZstdCodec # noqa: F401 diff --git a/src/zarr/v3/codecs/blosc.py b/src/zarr/v3/codecs/blosc.py index efc862e636..479865241f 100644 --- a/src/zarr/v3/codecs/blosc.py +++ b/src/zarr/v3/codecs/blosc.py @@ -1,85 +1,161 @@ from __future__ import annotations -from functools import lru_cache +from dataclasses import dataclass, replace +from enum import Enum +from functools import cached_property -from typing import ( - TYPE_CHECKING, - Dict, - Literal, - Optional, - Type, -) +from typing import TYPE_CHECKING, Union import numcodecs import numpy as np -from attr import evolve, frozen, field from numcodecs.blosc import Blosc from zarr.v3.abc.codec import BytesBytesCodec from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import BytesLike, to_thread +from zarr.v3.common import parse_enum, parse_named_configuration, to_thread if TYPE_CHECKING: - from zarr.v3.metadata import ArraySpec, CodecMetadata, DataType, RuntimeConfiguration + from typing import Dict, Optional + from typing_extensions import Self + from zarr.v3.common import JSON, ArraySpec, BytesLike + from zarr.v3.config import RuntimeConfiguration -BloscShuffle = Literal["noshuffle", "shuffle", "bitshuffle"] +class BloscShuffle(Enum): + noshuffle = "noshuffle" + shuffle = "shuffle" + bitshuffle = "bitshuffle" + + @classmethod + def from_int(cls, num: int) -> BloscShuffle: + blosc_shuffle_int_to_str = { + 0: "noshuffle", + 1: "shuffle", + 2: "bitshuffle", + } + if num not in blosc_shuffle_int_to_str: + raise ValueError(f"Value must be between 0 and 2. Got {num}.") + return BloscShuffle[blosc_shuffle_int_to_str[num]] + + +class BloscCname(Enum): + lz4 = "lz4" + lz4hc = "lz4hc" + blosclz = "blosclz" + zstd = "zstd" + snappy = "snappy" + zlib = "zlib" + # See https://zarr.readthedocs.io/en/stable/tutorial.html#configuring-blosc numcodecs.blosc.use_threads = False -@frozen -class BloscCodecConfigurationMetadata: - typesize: int - cname: Literal["lz4", "lz4hc", "blosclz", "zstd", "snappy", "zlib"] = "zstd" - clevel: int = 5 - shuffle: BloscShuffle = "noshuffle" - blocksize: int = 0 +def parse_typesize(data: JSON) -> int: + if isinstance(data, int): + if data > 0: + return data + else: + raise ValueError( + f"Value must be greater than 0. Got {data}, which is less or equal to 0." + ) + raise TypeError(f"Value must be an int. Got {type(data)} instead.") -blosc_shuffle_int_to_str: Dict[int, BloscShuffle] = { - 0: "noshuffle", - 1: "shuffle", - 2: "bitshuffle", -} +# todo: real validation +def parse_clevel(data: JSON) -> int: + if isinstance(data, int): + return data + raise TypeError(f"Value should be an int. Got {type(data)} instead.") -@frozen -class BloscCodecMetadata: - configuration: BloscCodecConfigurationMetadata - name: Literal["blosc"] = field(default="blosc", init=False) +def parse_blocksize(data: JSON) -> int: + if isinstance(data, int): + return data + raise TypeError(f"Value should be an int. Got {type(data)} instead.") -@frozen +@dataclass(frozen=True) class BloscCodec(BytesBytesCodec): - configuration: BloscCodecConfigurationMetadata is_fixed_size = False - @classmethod - def from_metadata(cls, codec_metadata: CodecMetadata) -> BloscCodec: - assert isinstance(codec_metadata, BloscCodecMetadata) - return cls(configuration=codec_metadata.configuration) + typesize: int + cname: BloscCname = BloscCname.zstd + clevel: int = 5 + shuffle: BloscShuffle = BloscShuffle.noshuffle + blocksize: int = 0 + + def __init__( + self, + *, + typesize: Optional[int] = None, + cname: Union[BloscCname, str] = BloscCname.zstd, + clevel: int = 5, + shuffle: Union[BloscShuffle, str, None] = None, + blocksize: int = 0, + ) -> None: + typesize_parsed = parse_typesize(typesize) if typesize is not None else None + cname_parsed = parse_enum(cname, BloscCname) + clevel_parsed = parse_clevel(clevel) + shuffle_parsed = parse_enum(shuffle, BloscShuffle) if shuffle is not None else None + blocksize_parsed = parse_blocksize(blocksize) + + object.__setattr__(self, "typesize", typesize_parsed) + object.__setattr__(self, "cname", cname_parsed) + object.__setattr__(self, "clevel", clevel_parsed) + object.__setattr__(self, "shuffle", shuffle_parsed) + object.__setattr__(self, "blocksize", blocksize_parsed) @classmethod - def get_metadata_class(cls) -> Type[BloscCodecMetadata]: - return BloscCodecMetadata + def from_dict(cls, data: Dict[str, JSON]) -> Self: + _, configuration_parsed = parse_named_configuration(data, "blosc") + return cls(**configuration_parsed) # type: ignore[arg-type] + + def to_dict(self) -> Dict[str, JSON]: + if self.typesize is None: + raise ValueError("`typesize` needs to be set for serialization.") + if self.shuffle is None: + raise ValueError("`shuffle` needs to be set for serialization.") + return { + "name": "blosc", + "configuration": { + "typesize": self.typesize, + "cname": self.cname, + "clevel": self.clevel, + "shuffle": self.shuffle, + "blocksize": self.blocksize, + }, + } - def evolve(self, *, data_type: DataType, **_kwargs) -> BloscCodec: + def evolve(self, array_spec: ArraySpec) -> Self: new_codec = self - if new_codec.configuration.typesize == 0: - new_configuration = evolve(new_codec.configuration, typesize=data_type.byte_count) - new_codec = evolve(new_codec, configuration=new_configuration) + if new_codec.typesize is None: + new_codec = replace(new_codec, typesize=array_spec.dtype.itemsize) + if new_codec.shuffle is None: + new_codec = replace( + new_codec, + shuffle=( + BloscShuffle.bitshuffle + if array_spec.dtype.itemsize == 1 + else BloscShuffle.shuffle + ), + ) return new_codec - @lru_cache - def get_blosc_codec(self) -> Blosc: - map_shuffle_str_to_int = {"noshuffle": 0, "shuffle": 1, "bitshuffle": 2} + @cached_property + def _blosc_codec(self) -> Blosc: + if self.shuffle is None: + raise ValueError("`shuffle` needs to be set for decoding and encoding.") + map_shuffle_str_to_int = { + BloscShuffle.noshuffle: 0, + BloscShuffle.shuffle: 1, + BloscShuffle.bitshuffle: 2, + } config_dict = { - "cname": self.configuration.cname, - "clevel": self.configuration.clevel, - "shuffle": map_shuffle_str_to_int[self.configuration.shuffle], - "blocksize": self.configuration.blocksize, + "cname": self.cname.name, + "clevel": self.clevel, + "shuffle": map_shuffle_str_to_int[self.shuffle], + "blocksize": self.blocksize, } return Blosc.from_config(config_dict) @@ -89,7 +165,7 @@ async def decode( _chunk_spec: ArraySpec, _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: - return await to_thread(self.get_blosc_codec().decode, chunk_bytes) + return await to_thread(self._blosc_codec.decode, chunk_bytes) async def encode( self, @@ -98,7 +174,7 @@ async def encode( _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: chunk_array = np.frombuffer(chunk_bytes, dtype=chunk_spec.dtype) - return await to_thread(self.get_blosc_codec().encode, chunk_array) + return await to_thread(self._blosc_codec.encode, chunk_array) def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: raise NotImplementedError diff --git a/src/zarr/v3/codecs/bytes.py b/src/zarr/v3/codecs/bytes.py index de7c750bc9..f92fe5606d 100644 --- a/src/zarr/v3/codecs/bytes.py +++ b/src/zarr/v3/codecs/bytes.py @@ -1,65 +1,72 @@ from __future__ import annotations +from dataclasses import dataclass, replace +from enum import Enum +import sys -from typing import ( - TYPE_CHECKING, - Literal, - Optional, - Type, -) +from typing import TYPE_CHECKING, Dict, Optional, Union import numpy as np -from attr import frozen, field from zarr.v3.abc.codec import ArrayBytesCodec from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import BytesLike +from zarr.v3.common import parse_enum, parse_named_configuration if TYPE_CHECKING: - from zarr.v3.metadata import CodecMetadata, ArraySpec, ArrayMetadata, RuntimeConfiguration + from zarr.v3.common import JSON, ArraySpec, BytesLike + from zarr.v3.config import RuntimeConfiguration + from typing_extensions import Self -Endian = Literal["big", "little"] +class Endian(Enum): + big = "big" + little = "little" -@frozen -class BytesCodecConfigurationMetadata: - endian: Optional[Endian] = "little" +default_system_endian = Endian(sys.byteorder) -@frozen -class BytesCodecMetadata: - configuration: BytesCodecConfigurationMetadata - name: Literal["bytes"] = field(default="bytes", init=True) - - -@frozen +@dataclass(frozen=True) class BytesCodec(ArrayBytesCodec): - configuration: BytesCodecConfigurationMetadata is_fixed_size = True - @classmethod - def from_metadata(cls, codec_metadata: CodecMetadata) -> BytesCodec: - assert isinstance(codec_metadata, BytesCodecMetadata) - return cls(configuration=codec_metadata.configuration) + endian: Optional[Endian] - @classmethod - def get_metadata_class(cls) -> Type[BytesCodecMetadata]: - return BytesCodecMetadata + def __init__(self, *, endian: Union[Endian, str, None] = default_system_endian) -> None: + endian_parsed = None if endian is None else parse_enum(endian, Endian) - def validate(self, array_metadata: ArrayMetadata) -> None: - assert ( - not array_metadata.data_type.has_endianness or self.configuration.endian is not None - ), "The `endian` configuration needs to be specified for multi-byte data types." + object.__setattr__(self, "endian", endian_parsed) + + @classmethod + def from_dict(cls, data: Dict[str, JSON]) -> Self: + _, configuration_parsed = parse_named_configuration( + data, "bytes", require_configuration=False + ) + configuration_parsed = configuration_parsed or {} + return cls(**configuration_parsed) # type: ignore[arg-type] + + def to_dict(self) -> Dict[str, JSON]: + if self.endian is None: + return {"name": "bytes"} + else: + return {"name": "bytes", "configuration": {"endian": self.endian}} + + def evolve(self, array_spec: ArraySpec) -> Self: + if array_spec.dtype.itemsize == 0: + if self.endian is not None: + return replace(self, endian=None) + elif self.endian is None: + raise ValueError( + "The `endian` configuration needs to be specified for multi-byte data types." + ) + return self def _get_byteorder(self, array: np.ndarray) -> Endian: if array.dtype.byteorder == "<": - return "little" + return Endian.little elif array.dtype.byteorder == ">": - return "big" + return Endian.big else: - import sys - - return sys.byteorder + return default_system_endian async def decode( self, @@ -68,13 +75,14 @@ async def decode( _runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: if chunk_spec.dtype.itemsize > 0: - if self.configuration.endian == "little": + if self.endian == Endian.little: prefix = "<" else: prefix = ">" - dtype = np.dtype(f"{prefix}{chunk_spec.data_type.to_numpy_shortname()}") + dtype = np.dtype(f"{prefix}{chunk_spec.dtype.str[1:]}") else: - dtype = np.dtype(f"|{chunk_spec.data_type.to_numpy_shortname()}") + dtype = np.dtype(f"|{chunk_spec.dtype.str[1:]}") + print(dtype) chunk_array = np.frombuffer(chunk_bytes, dtype) # ensure correct chunk shape @@ -92,8 +100,8 @@ async def encode( ) -> Optional[BytesLike]: if chunk_array.dtype.itemsize > 1: byteorder = self._get_byteorder(chunk_array) - if self.configuration.endian != byteorder: - new_dtype = chunk_array.dtype.newbyteorder(self.configuration.endian) + if self.endian is not None and self.endian != byteorder: + new_dtype = chunk_array.dtype.newbyteorder(self.endian.name) chunk_array = chunk_array.astype(new_dtype) return chunk_array.tobytes() diff --git a/src/zarr/v3/codecs/crc32c_.py b/src/zarr/v3/codecs/crc32c_.py index 4f8b9c7b0b..555bdeae3b 100644 --- a/src/zarr/v3/codecs/crc32c_.py +++ b/src/zarr/v3/codecs/crc32c_.py @@ -1,41 +1,34 @@ from __future__ import annotations +from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - Literal, - Optional, - Type, -) +from typing import TYPE_CHECKING import numpy as np -from attr import frozen, field + from crc32c import crc32c from zarr.v3.abc.codec import BytesBytesCodec from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import BytesLike +from zarr.v3.common import parse_named_configuration if TYPE_CHECKING: - from zarr.v3.metadata import ArraySpec, CodecMetadata, RuntimeConfiguration - + from typing import Dict, Optional + from typing_extensions import Self + from zarr.v3.common import JSON, BytesLike, ArraySpec + from zarr.v3.config import RuntimeConfiguration -@frozen -class Crc32cCodecMetadata: - name: Literal["crc32c"] = field(default="crc32c", init=False) - -@frozen +@dataclass(frozen=True) class Crc32cCodec(BytesBytesCodec): is_fixed_size = True @classmethod - def from_metadata(cls, codec_metadata: CodecMetadata) -> Crc32cCodec: - assert isinstance(codec_metadata, Crc32cCodecMetadata) + def from_dict(cls, data: Dict[str, JSON]) -> Self: + parse_named_configuration(data, "crc32c", require_configuration=False) return cls() - @classmethod - def get_metadata_class(cls) -> Type[Crc32cCodecMetadata]: - return Crc32cCodecMetadata + def to_dict(self) -> Dict[str, JSON]: + return {"name": "crc32c"} async def decode( self, @@ -46,7 +39,13 @@ async def decode( crc32_bytes = chunk_bytes[-4:] inner_bytes = chunk_bytes[:-4] - assert np.uint32(crc32c(inner_bytes)).tobytes() == bytes(crc32_bytes) + computed_checksum = np.uint32(crc32c(inner_bytes)).tobytes() + stored_checksum = bytes(crc32_bytes) + if computed_checksum != stored_checksum: + raise ValueError( + "Stored and computed checksum do not match. " + + f"Stored: {stored_checksum!r}. Computed: {computed_checksum!r}." + ) return inner_bytes async def encode( diff --git a/src/zarr/v3/codecs/gzip.py b/src/zarr/v3/codecs/gzip.py index a3fafc1382..478eee90c1 100644 --- a/src/zarr/v3/codecs/gzip.py +++ b/src/zarr/v3/codecs/gzip.py @@ -1,48 +1,48 @@ from __future__ import annotations +from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - Literal, - Optional, - Type, -) +from typing import TYPE_CHECKING -from attr import frozen, field from numcodecs.gzip import GZip - from zarr.v3.abc.codec import BytesBytesCodec from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import BytesLike, to_thread +from zarr.v3.common import parse_named_configuration, to_thread if TYPE_CHECKING: - from zarr.v3.metadata import ArraySpec, CodecMetadata, RuntimeConfiguration - - -@frozen -class GzipCodecConfigurationMetadata: - level: int = 5 + from typing import Optional, Dict + from typing_extensions import Self + from zarr.v3.common import JSON, ArraySpec, BytesLike + from zarr.v3.config import RuntimeConfiguration -@frozen -class GzipCodecMetadata: - configuration: GzipCodecConfigurationMetadata - name: Literal["gzip"] = field(default="gzip", init=False) +def parse_gzip_level(data: JSON) -> int: + if not isinstance(data, (int)): + raise TypeError(f"Expected int, got {type(data)}") + if data not in range(0, 10): + raise ValueError( + f"Expected an integer from the inclusive range (0, 9). Got {data} instead." + ) + return data -@frozen +@dataclass(frozen=True) class GzipCodec(BytesBytesCodec): - configuration: GzipCodecConfigurationMetadata - is_fixed_size = True + is_fixed_size = False - @classmethod - def from_metadata(cls, codec_metadata: CodecMetadata) -> GzipCodec: - assert isinstance(codec_metadata, GzipCodecMetadata) + level: int = 5 - return cls(configuration=codec_metadata.configuration) + def __init__(self, *, level: int = 5) -> None: + level_parsed = parse_gzip_level(level) + + object.__setattr__(self, "level", level_parsed) @classmethod - def get_metadata_class(cls) -> Type[GzipCodecMetadata]: - return GzipCodecMetadata + def from_dict(cls, data: Dict[str, JSON]) -> Self: + _, configuration_parsed = parse_named_configuration(data, "gzip") + return cls(**configuration_parsed) # type: ignore[arg-type] + + def to_dict(self) -> Dict[str, JSON]: + return {"name": "gzip", "configuration": {"level": self.level}} async def decode( self, @@ -50,7 +50,7 @@ async def decode( _chunk_spec: ArraySpec, _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: - return await to_thread(GZip(self.configuration.level).decode, chunk_bytes) + return await to_thread(GZip(self.level).decode, chunk_bytes) async def encode( self, @@ -58,7 +58,7 @@ async def encode( _chunk_spec: ArraySpec, _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: - return await to_thread(GZip(self.configuration.level).encode, chunk_bytes) + return await to_thread(GZip(self.level).encode, chunk_bytes) def compute_encoded_size( self, diff --git a/src/zarr/v3/codecs/pipeline.py b/src/zarr/v3/codecs/pipeline.py new file mode 100644 index 0000000000..7bb872eb79 --- /dev/null +++ b/src/zarr/v3/codecs/pipeline.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Iterable +import numpy as np +from dataclasses import dataclass +from warnings import warn + +from zarr.v3.abc.codec import ( + ArrayArrayCodec, + ArrayBytesCodec, + ArrayBytesCodecPartialDecodeMixin, + ArrayBytesCodecPartialEncodeMixin, + BytesBytesCodec, + Codec, +) +from zarr.v3.abc.metadata import Metadata +from zarr.v3.codecs.registry import get_codec_class +from zarr.v3.common import parse_named_configuration + +if TYPE_CHECKING: + from typing import Iterator, List, Optional, Tuple, Union + from zarr.v3.store import StorePath + from zarr.v3.metadata import ArrayMetadata + from zarr.v3.config import RuntimeConfiguration + from zarr.v3.common import JSON, ArraySpec, BytesLike, SliceSelection + + +@dataclass(frozen=True) +class CodecPipeline(Metadata): + array_array_codecs: Tuple[ArrayArrayCodec, ...] + array_bytes_codec: ArrayBytesCodec + bytes_bytes_codecs: Tuple[BytesBytesCodec, ...] + + @classmethod + def from_dict(cls, data: Iterable[Union[JSON, Codec]]) -> CodecPipeline: + out: List[Codec] = [] + if not isinstance(data, Iterable): + raise TypeError(f"Expected iterable, got {type(data)}") + + for c in data: + if isinstance(c, Codec): + out.append(c) + else: + name_parsed, _ = parse_named_configuration(c, require_configuration=False) + out.append(get_codec_class(name_parsed).from_dict(c)) # type: ignore[arg-type] + return CodecPipeline.from_list(out) + + def to_dict(self) -> JSON: + return [c.to_dict() for c in self] + + def evolve(self, array_spec: ArraySpec) -> CodecPipeline: + return CodecPipeline.from_list([c.evolve(array_spec) for c in self]) + + @classmethod + def from_list(cls, codecs: List[Codec]) -> CodecPipeline: + from zarr.v3.codecs.sharding import ShardingCodec + + if not any(isinstance(codec, ArrayBytesCodec) for codec in codecs): + raise ValueError("Exactly one array-to-bytes codec is required.") + + prev_codec: Optional[Codec] = None + for codec in codecs: + if prev_codec is not None: + if isinstance(codec, ArrayBytesCodec) and isinstance(prev_codec, ArrayBytesCodec): + raise ValueError( + f"ArrayBytesCodec '{type(codec)}' cannot follow after " + + f"ArrayBytesCodec '{type(prev_codec)}' because exactly " + + "1 ArrayBytesCodec is allowed." + ) + if isinstance(codec, ArrayBytesCodec) and isinstance(prev_codec, BytesBytesCodec): + raise ValueError( + f"ArrayBytesCodec '{type(codec)}' cannot follow after " + + f"BytesBytesCodec '{type(prev_codec)}'." + ) + if isinstance(codec, ArrayArrayCodec) and isinstance(prev_codec, ArrayBytesCodec): + raise ValueError( + f"ArrayArrayCodec '{type(codec)}' cannot follow after " + + f"ArrayBytesCodec '{type(prev_codec)}'." + ) + if isinstance(codec, ArrayArrayCodec) and isinstance(prev_codec, BytesBytesCodec): + raise ValueError( + f"ArrayArrayCodec '{type(codec)}' cannot follow after " + + f"BytesBytesCodec '{type(prev_codec)}'." + ) + prev_codec = codec + + if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: + warn( + "Combining a `sharding_indexed` codec disables partial reads and " + + "writes, which may lead to inefficient performance." + ) + + return CodecPipeline( + array_array_codecs=tuple( + codec for codec in codecs if isinstance(codec, ArrayArrayCodec) + ), + array_bytes_codec=[codec for codec in codecs if isinstance(codec, ArrayBytesCodec)][0], + bytes_bytes_codecs=tuple( + codec for codec in codecs if isinstance(codec, BytesBytesCodec) + ), + ) + + @property + def supports_partial_decode(self) -> bool: + return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance( + self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin + ) + + @property + def supports_partial_encode(self) -> bool: + return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance( + self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin + ) + + def __iter__(self) -> Iterator[Codec]: + for aa_codec in self.array_array_codecs: + yield aa_codec + + yield self.array_bytes_codec + + for bb_codec in self.bytes_bytes_codecs: + yield bb_codec + + def validate(self, array_metadata: ArrayMetadata) -> None: + for codec in self: + codec.validate(array_metadata) + + def _codecs_with_resolved_metadata( + self, array_spec: ArraySpec + ) -> Tuple[ + List[Tuple[ArrayArrayCodec, ArraySpec]], + Tuple[ArrayBytesCodec, ArraySpec], + List[Tuple[BytesBytesCodec, ArraySpec]], + ]: + aa_codecs_with_spec: List[Tuple[ArrayArrayCodec, ArraySpec]] = [] + for aa_codec in self.array_array_codecs: + aa_codecs_with_spec.append((aa_codec, array_spec)) + array_spec = aa_codec.resolve_metadata(array_spec) + + ab_codec_with_spec = (self.array_bytes_codec, array_spec) + array_spec = self.array_bytes_codec.resolve_metadata(array_spec) + + bb_codecs_with_spec: List[Tuple[BytesBytesCodec, ArraySpec]] = [] + for bb_codec in self.bytes_bytes_codecs: + bb_codecs_with_spec.append((bb_codec, array_spec)) + array_spec = bb_codec.resolve_metadata(array_spec) + + return (aa_codecs_with_spec, ab_codec_with_spec, bb_codecs_with_spec) + + async def decode( + self, + chunk_bytes: BytesLike, + array_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> np.ndarray: + ( + aa_codecs_with_spec, + ab_codec_with_spec, + bb_codecs_with_spec, + ) = self._codecs_with_resolved_metadata(array_spec) + + for bb_codec, array_spec in bb_codecs_with_spec[::-1]: + chunk_bytes = await bb_codec.decode(chunk_bytes, array_spec, runtime_configuration) + + ab_codec, array_spec = ab_codec_with_spec + chunk_array = await ab_codec.decode(chunk_bytes, array_spec, runtime_configuration) + + for aa_codec, array_spec in aa_codecs_with_spec[::-1]: + chunk_array = await aa_codec.decode(chunk_array, array_spec, runtime_configuration) + + return chunk_array + + async def decode_partial( + self, + store_path: StorePath, + selection: SliceSelection, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> Optional[np.ndarray]: + assert self.supports_partial_decode + assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin) + return await self.array_bytes_codec.decode_partial( + store_path, selection, chunk_spec, runtime_configuration + ) + + async def encode( + self, + chunk_array: np.ndarray, + array_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> Optional[BytesLike]: + ( + aa_codecs_with_spec, + ab_codec_with_spec, + bb_codecs_with_spec, + ) = self._codecs_with_resolved_metadata(array_spec) + + for aa_codec, array_spec in aa_codecs_with_spec: + chunk_array_maybe = await aa_codec.encode( + chunk_array, array_spec, runtime_configuration + ) + if chunk_array_maybe is None: + return None + chunk_array = chunk_array_maybe + + ab_codec, array_spec = ab_codec_with_spec + chunk_bytes_maybe = await ab_codec.encode(chunk_array, array_spec, runtime_configuration) + if chunk_bytes_maybe is None: + return None + chunk_bytes = chunk_bytes_maybe + + for bb_codec, array_spec in bb_codecs_with_spec: + chunk_bytes_maybe = await bb_codec.encode( + chunk_bytes, array_spec, runtime_configuration + ) + if chunk_bytes_maybe is None: + return None + chunk_bytes = chunk_bytes_maybe + + return chunk_bytes + + async def encode_partial( + self, + store_path: StorePath, + chunk_array: np.ndarray, + selection: SliceSelection, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> None: + assert self.supports_partial_encode + assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin) + await self.array_bytes_codec.encode_partial( + store_path, chunk_array, selection, chunk_spec, runtime_configuration + ) + + def compute_encoded_size(self, byte_length: int, array_spec: ArraySpec) -> int: + for codec in self: + byte_length = codec.compute_encoded_size(byte_length, array_spec) + array_spec = codec.resolve_metadata(array_spec) + return byte_length diff --git a/src/zarr/v3/codecs/registry.py b/src/zarr/v3/codecs/registry.py index bdd9a5765d..4cf2736685 100644 --- a/src/zarr/v3/codecs/registry.py +++ b/src/zarr/v3/codecs/registry.py @@ -1,18 +1,14 @@ from __future__ import annotations +from typing import TYPE_CHECKING -from typing import Dict, NamedTuple, Type -from importlib.metadata import EntryPoint, entry_points as get_entry_points - -from zarr.v3.abc.codec import Codec -from zarr.v3.metadata import CodecMetadata - +if TYPE_CHECKING: + from typing import Dict, Type + from zarr.v3.abc.codec import Codec -class CodecRegistryItem(NamedTuple): - codec_cls: Type[Codec] - codec_metadata_cls: Type[CodecMetadata] +from importlib.metadata import EntryPoint, entry_points as get_entry_points -__codec_registry: Dict[str, CodecRegistryItem] = {} +__codec_registry: Dict[str, Type[Codec]] = {} __lazy_load_codecs: Dict[str, EntryPoint] = {} @@ -29,10 +25,10 @@ def _collect_entrypoints() -> None: def register_codec(key: str, codec_cls: Type[Codec]) -> None: - __codec_registry[key] = CodecRegistryItem(codec_cls, codec_cls.get_metadata_class()) + __codec_registry[key] = codec_cls -def _get_codec_item(key: str) -> CodecRegistryItem: +def get_codec_class(key: str) -> Type[Codec]: item = __codec_registry.get(key) if item is None: if key in __lazy_load_codecs: @@ -45,17 +41,4 @@ def _get_codec_item(key: str) -> CodecRegistryItem: raise KeyError(key) -def get_codec_from_metadata(val: CodecMetadata) -> Codec: - key = val.name - return _get_codec_item(key).codec_cls.from_metadata(val) - - -def get_codec_metadata_class(key: str) -> Type[CodecMetadata]: - return _get_codec_item(key).codec_metadata_cls - - -def get_codec_class(key: str) -> Type[Codec]: - return _get_codec_item(key).codec_cls - - _collect_entrypoints() diff --git a/src/zarr/v3/codecs/sharding.py b/src/zarr/v3/codecs/sharding.py index 26020f160f..0385154c0f 100644 --- a/src/zarr/v3/codecs/sharding.py +++ b/src/zarr/v3/codecs/sharding.py @@ -1,37 +1,31 @@ from __future__ import annotations -from functools import cached_property, lru_cache - -from typing import ( - Awaitable, - Callable, - Iterator, - List, - Literal, - Mapping, - NamedTuple, - Optional, - Set, - Tuple, - Type, -) -from attr import field, frozen +from enum import Enum +from typing import TYPE_CHECKING, Iterable, Mapping, NamedTuple, Union +from dataclasses import dataclass, replace +from functools import lru_cache + import numpy as np from zarr.v3.abc.codec import ( + Codec, ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin, ) - -from zarr.v3.codecs import CodecPipeline -from zarr.v3.codecs.registry import get_codec_from_metadata, register_codec +from zarr.v3.codecs.bytes import BytesCodec +from zarr.v3.codecs.crc32c_ import Crc32cCodec +from zarr.v3.codecs.pipeline import CodecPipeline +from zarr.v3.codecs.registry import register_codec from zarr.v3.common import ( - BytesLike, - ChunkCoords, - SliceSelection, + ArraySpec, + ChunkCoordsLike, concurrent_map, + parse_enum, + parse_named_configuration, + parse_shapelike, product, ) +from zarr.v3.chunk_grids import RegularChunkGrid from zarr.v3.indexing import ( BasicIndexer, c_order_iter, @@ -40,31 +34,33 @@ ) from zarr.v3.metadata import ( ArrayMetadata, - ArraySpec, - DataType, - CodecMetadata, - RegularChunkGridMetadata, - ShardingCodecIndexLocation, - RuntimeConfiguration, runtime_configuration as make_runtime_configuration, + parse_codecs, ) -from zarr.v3.store import StorePath + +if TYPE_CHECKING: + from typing import Awaitable, Callable, Dict, Iterator, List, Optional, Set, Tuple + from typing_extensions import Self + + from zarr.v3.store import StorePath + from zarr.v3.common import ( + JSON, + ChunkCoords, + BytesLike, + SliceSelection, + ) + from zarr.v3.config import RuntimeConfiguration MAX_UINT_64 = 2**64 - 1 -@frozen -class ShardingCodecConfigurationMetadata: - chunk_shape: ChunkCoords - codecs: Tuple[CodecMetadata, ...] - index_codecs: Tuple[CodecMetadata, ...] - index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end +class ShardingCodecIndexLocation(Enum): + start = "start" + end = "end" -@frozen -class ShardingCodecMetadata: - configuration: ShardingCodecConfigurationMetadata - name: Literal["sharding_indexed"] = field(default="sharding_indexed", init=False) +def parse_index_location(data: JSON) -> ShardingCodecIndexLocation: + return parse_enum(data, ShardingCodecIndexLocation) class _ShardIndex(NamedTuple): @@ -141,7 +137,7 @@ async def from_bytes( shard_index_size = codec._shard_index_size(chunks_per_shard) obj = cls() obj.buf = memoryview(buf) - if codec.configuration.index_location == ShardingCodecIndexLocation.start: + if codec.index_location == ShardingCodecIndexLocation.start: shard_index_bytes = obj.buf[:shard_index_size] else: shard_index_bytes = obj.buf[-shard_index_size:] @@ -222,42 +218,85 @@ async def finalize( return out_buf -@frozen +@dataclass(frozen=True) class ShardingCodec( ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin ): - configuration: ShardingCodecConfigurationMetadata + chunk_shape: ChunkCoords + codecs: CodecPipeline + index_codecs: CodecPipeline + index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end - @classmethod - def from_metadata( - cls, - codec_metadata: CodecMetadata, - ) -> ShardingCodec: - assert isinstance(codec_metadata, ShardingCodecMetadata) - return cls(configuration=codec_metadata.configuration) + def __init__( + self, + *, + chunk_shape: ChunkCoordsLike, + codecs: Optional[Iterable[Union[Codec, JSON]]] = None, + index_codecs: Optional[Iterable[Union[Codec, JSON]]] = None, + index_location: Optional[ShardingCodecIndexLocation] = ShardingCodecIndexLocation.end, + ) -> None: + chunk_shape_parsed = parse_shapelike(chunk_shape) + codecs_parsed = ( + parse_codecs(codecs) if codecs is not None else CodecPipeline.from_list([BytesCodec()]) + ) + index_codecs_parsed = ( + parse_codecs(index_codecs) + if index_codecs is not None + else CodecPipeline.from_list([BytesCodec(), Crc32cCodec()]) + ) + index_location_parsed = ( + parse_index_location(index_location) + if index_location is not None + else ShardingCodecIndexLocation.end + ) + + object.__setattr__(self, "chunk_shape", chunk_shape_parsed) + object.__setattr__(self, "codecs", codecs_parsed) + object.__setattr__(self, "index_codecs", index_codecs_parsed) + object.__setattr__(self, "index_location", index_location_parsed) @classmethod - def get_metadata_class(cls) -> Type[ShardingCodecMetadata]: - return ShardingCodecMetadata + def from_dict(cls, data: Dict[str, JSON]) -> Self: + _, configuration_parsed = parse_named_configuration(data, "sharding_indexed") + return cls(**configuration_parsed) # type: ignore[arg-type] + + def to_dict(self) -> Dict[str, JSON]: + return { + "name": "sharding_indexed", + "configuration": { + "chunk_shape": list(self.chunk_shape), + "codecs": self.codecs.to_dict(), + "index_codecs": self.index_codecs.to_dict(), + "index_location": self.index_location, + }, + } + + def evolve(self, array_spec: ArraySpec) -> Self: + shard_spec = self._get_chunk_spec(array_spec) + evolved_codecs = self.codecs.evolve(shard_spec) + if evolved_codecs != self.codecs: + return replace(self, codecs=evolved_codecs) + return self def validate(self, array_metadata: ArrayMetadata) -> None: - assert len(self.configuration.chunk_shape) == array_metadata.ndim, ( - "The shard's `chunk_shape` and array's `shape` need to have the " - + "same number of dimensions." - ) - assert isinstance( - array_metadata.chunk_grid, RegularChunkGridMetadata - ), "Sharding is only compatible with regular chunk grids." - assert all( + if len(self.chunk_shape) != array_metadata.ndim: + raise ValueError( + "The shard's `chunk_shape` and array's `shape` need to have the " + + "same number of dimensions." + ) + if not isinstance(array_metadata.chunk_grid, RegularChunkGrid): + raise ValueError("Sharding is only compatible with regular chunk grids.") + if not all( s % c == 0 for s, c in zip( - array_metadata.chunk_grid.configuration.chunk_shape, - self.configuration.chunk_shape, + array_metadata.chunk_grid.chunk_shape, + self.chunk_shape, + ) + ): + raise ValueError( + "The array's `chunk_shape` needs to be divisible by the " + + "shard's inner `chunk_shape`." ) - ), ( - "The array's `chunk_shape` needs to be divisible by the " - + "shard's inner `chunk_shape`." - ) async def decode( self, @@ -267,7 +306,7 @@ async def decode( ) -> np.ndarray: # print("decode") shard_shape = shard_spec.shape - chunk_shape = self.configuration.chunk_shape + chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) indexer = BasicIndexer( @@ -316,7 +355,7 @@ async def decode_partial( runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: shard_shape = shard_spec.shape - chunk_shape = self.configuration.chunk_shape + chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) indexer = BasicIndexer( @@ -389,9 +428,7 @@ async def _read_chunk( chunk_spec = self._get_chunk_spec(shard_spec) chunk_bytes = shard_dict.get(chunk_coords, None) if chunk_bytes is not None: - chunk_array = await self._codec_pipeline.decode( - chunk_bytes, chunk_spec, runtime_configuration - ) + chunk_array = await self.codecs.decode(chunk_bytes, chunk_spec, runtime_configuration) tmp = chunk_array[chunk_selection] out[out_selection] = tmp else: @@ -404,7 +441,7 @@ async def encode( runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: shard_shape = shard_spec.shape - chunk_shape = self.configuration.chunk_shape + chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) indexer = list( @@ -435,9 +472,7 @@ async def _write_chunk( chunk_spec = self._get_chunk_spec(shard_spec) return ( chunk_coords, - await self._codec_pipeline.encode( - chunk_array, chunk_spec, runtime_configuration - ), + await self.codecs.encode(chunk_array, chunk_spec, runtime_configuration), ) return (chunk_coords, None) @@ -458,9 +493,7 @@ async def _write_chunk( if chunk_bytes is not None: shard_builder.append(chunk_coords, chunk_bytes) - return await shard_builder.finalize( - self.configuration.index_location, self._encode_shard_index - ) + return await shard_builder.finalize(self.index_location, self._encode_shard_index) async def encode_partial( self, @@ -472,7 +505,7 @@ async def encode_partial( ) -> None: # print("encode_partial") shard_shape = shard_spec.shape - chunk_shape = self.configuration.chunk_shape + chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) chunk_spec = self._get_chunk_spec(shard_spec) @@ -496,7 +529,7 @@ async def _write_chunk( out_selection: SliceSelection, ) -> Tuple[ChunkCoords, Optional[BytesLike]]: chunk_array = None - if is_total_slice(chunk_selection, self.configuration.chunk_shape): + if is_total_slice(chunk_selection, self.chunk_shape): chunk_array = shard_array[out_selection] else: # handling writing partial chunks @@ -506,24 +539,20 @@ async def _write_chunk( # merge new value if chunk_bytes is None: chunk_array = np.empty( - self.configuration.chunk_shape, + self.chunk_shape, dtype=shard_spec.dtype, ) chunk_array.fill(shard_spec.fill_value) else: chunk_array = ( - await self._codec_pipeline.decode( - chunk_bytes, chunk_spec, runtime_configuration - ) + await self.codecs.decode(chunk_bytes, chunk_spec, runtime_configuration) ).copy() # make a writable copy chunk_array[chunk_selection] = shard_array[out_selection] if not np.array_equiv(chunk_array, shard_spec.fill_value): return ( chunk_coords, - await self._codec_pipeline.encode( - chunk_array, chunk_spec, runtime_configuration - ), + await self.codecs.encode(chunk_array, chunk_spec, runtime_configuration), ) else: return (chunk_coords, None) @@ -559,7 +588,7 @@ async def _write_chunk( else: await store_path.set( await shard_builder.finalize( - self.configuration.index_location, + self.index_location, self._encode_shard_index, ) ) @@ -575,7 +604,7 @@ async def _decode_shard_index( self, index_bytes: BytesLike, chunks_per_shard: ChunkCoords ) -> _ShardIndex: return _ShardIndex( - await self._index_codec_pipeline.decode( + await self.index_codecs.decode( index_bytes, self._get_index_chunk_spec(chunks_per_shard), make_runtime_configuration("C"), @@ -583,7 +612,7 @@ async def _decode_shard_index( ) async def _encode_shard_index(self, index: _ShardIndex) -> BytesLike: - index_bytes = await self._index_codec_pipeline.encode( + index_bytes = await self.index_codecs.encode( index.offsets_and_lengths, self._get_index_chunk_spec(index.chunks_per_shard), make_runtime_configuration("C"), @@ -592,7 +621,7 @@ async def _encode_shard_index(self, index: _ShardIndex) -> BytesLike: return index_bytes def _shard_index_size(self, chunks_per_shard: ChunkCoords) -> int: - return self._index_codec_pipeline.compute_encoded_size( + return self.index_codecs.compute_encoded_size( 16 * product(chunks_per_shard), self._get_index_chunk_spec(chunks_per_shard) ) @@ -600,15 +629,15 @@ def _shard_index_size(self, chunks_per_shard: ChunkCoords) -> int: def _get_index_chunk_spec(self, chunks_per_shard: ChunkCoords) -> ArraySpec: return ArraySpec( shape=chunks_per_shard + (2,), - data_type=DataType.uint64, + dtype=np.dtype(" ArraySpec: return ArraySpec( - shape=self.configuration.chunk_shape, - data_type=shard_spec.data_type, + shape=self.chunk_shape, + dtype=shard_spec.dtype, fill_value=shard_spec.fill_value, ) @@ -618,25 +647,15 @@ def _get_chunks_per_shard(self, shard_spec: ArraySpec) -> ChunkCoords: s // c for s, c in zip( shard_spec.shape, - self.configuration.chunk_shape, + self.chunk_shape, ) ) - @cached_property - def _index_codec_pipeline(self) -> CodecPipeline: - return CodecPipeline.create( - [get_codec_from_metadata(c) for c in self.configuration.index_codecs] - ) - - @cached_property - def _codec_pipeline(self) -> CodecPipeline: - return CodecPipeline.create([get_codec_from_metadata(c) for c in self.configuration.codecs]) - async def _load_shard_index_maybe( self, store_path: StorePath, chunks_per_shard: ChunkCoords ) -> Optional[_ShardIndex]: shard_index_size = self._shard_index_size(chunks_per_shard) - if self.configuration.index_location == ShardingCodecIndexLocation.start: + if self.index_location == ShardingCodecIndexLocation.start: index_bytes = await store_path.get((0, shard_index_size)) else: index_bytes = await store_path.get((-shard_index_size, None)) diff --git a/src/zarr/v3/codecs/transpose.py b/src/zarr/v3/codecs/transpose.py index de6eb0a480..f214d1e7f1 100644 --- a/src/zarr/v3/codecs/transpose.py +++ b/src/zarr/v3/codecs/transpose.py @@ -1,80 +1,75 @@ from __future__ import annotations +from typing import TYPE_CHECKING, Dict, Iterable -from typing import ( - TYPE_CHECKING, - Literal, - Optional, - Tuple, - Type, -) +from dataclasses import dataclass, replace + +from zarr.v3.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration + +if TYPE_CHECKING: + from zarr.v3.config import RuntimeConfiguration + from typing import TYPE_CHECKING, Optional, Tuple + from typing_extensions import Self import numpy as np -from attr import evolve, frozen, field from zarr.v3.abc.codec import ArrayArrayCodec from zarr.v3.codecs.registry import register_codec -if TYPE_CHECKING: - from zarr.v3.metadata import ArraySpec, CodecMetadata, RuntimeConfiguration +def parse_transpose_order(data: JSON) -> Tuple[int]: + if not isinstance(data, Iterable): + raise TypeError(f"Expected an iterable. Got {data} instead.") + if not all(isinstance(a, int) for a in data): + raise TypeError(f"Expected an iterable of integers. Got {data} instead.") + return tuple(data) # type: ignore[return-value] -@frozen -class TransposeCodecConfigurationMetadata: - order: Tuple[int, ...] +@dataclass(frozen=True) +class TransposeCodec(ArrayArrayCodec): + is_fixed_size = True -@frozen -class TransposeCodecMetadata: - configuration: TransposeCodecConfigurationMetadata - name: Literal["transpose"] = field(default="transpose", init=False) + order: Tuple[int, ...] + def __init__(self, *, order: ChunkCoordsLike) -> None: + order_parsed = parse_transpose_order(order) # type: ignore[arg-type] -@frozen -class TransposeCodec(ArrayArrayCodec): - order: Tuple[int, ...] - is_fixed_size = True + object.__setattr__(self, "order", order_parsed) @classmethod - def from_metadata(cls, codec_metadata: CodecMetadata) -> TransposeCodec: - assert isinstance(codec_metadata, TransposeCodecMetadata) - return cls(order=codec_metadata.configuration.order) + def from_dict(cls, data: Dict[str, JSON]) -> Self: + _, configuration_parsed = parse_named_configuration(data, "transpose") + return cls(**configuration_parsed) # type: ignore[arg-type] - def evolve(self, *, ndim: int, **_kwargs) -> TransposeCodec: - # Compatibility with older version of ZEP1 - if self.order == "F": # type: ignore - order = tuple(ndim - x - 1 for x in range(ndim)) + def to_dict(self) -> Dict[str, JSON]: + return {"name": "transpose", "configuration": {"order": list(self.order)}} - elif self.order == "C": # type: ignore - order = tuple(range(ndim)) - - else: - assert len(self.order) == ndim, ( + def evolve(self, array_spec: ArraySpec) -> Self: + if len(self.order) != array_spec.ndim: + raise ValueError( "The `order` tuple needs have as many entries as " - + f"there are dimensions in the array. Got: {self.order}" + + f"there are dimensions in the array. Got {self.order}." ) - assert len(self.order) == len(set(self.order)), ( - "There must not be duplicates in the `order` tuple. " + f"Got: {self.order}" + if len(self.order) != len(set(self.order)): + raise ValueError( + f"There must not be duplicates in the `order` tuple. Got {self.order}." ) - assert all(0 <= x < ndim for x in self.order), ( + if not all(0 <= x < array_spec.ndim for x in self.order): + raise ValueError( "All entries in the `order` tuple must be between 0 and " - + f"the number of dimensions in the array. Got: {self.order}" + + f"the number of dimensions in the array. Got {self.order}." ) - order = tuple(self.order) + order = tuple(self.order) if order != self.order: - return evolve(self, order=order) + return replace(self, order=order) return self - @classmethod - def get_metadata_class(cls) -> Type[TransposeCodecMetadata]: - return TransposeCodecMetadata - def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: - from zarr.v3.metadata import ArraySpec + from zarr.v3.common import ArraySpec return ArraySpec( shape=tuple(chunk_spec.shape[self.order[i]] for i in range(chunk_spec.ndim)), - data_type=chunk_spec.data_type, + dtype=chunk_spec.dtype, fill_value=chunk_spec.fill_value, ) diff --git a/src/zarr/v3/codecs/zstd.py b/src/zarr/v3/codecs/zstd.py index 59ce1cf088..774bb8bdbb 100644 --- a/src/zarr/v3/codecs/zstd.py +++ b/src/zarr/v3/codecs/zstd.py @@ -1,53 +1,59 @@ from __future__ import annotations +from typing import TYPE_CHECKING +from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - Literal, - Optional, - Type, -) -from attr import frozen, field from zstandard import ZstdCompressor, ZstdDecompressor from zarr.v3.abc.codec import BytesBytesCodec from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import BytesLike, to_thread +from zarr.v3.common import parse_named_configuration, to_thread if TYPE_CHECKING: - from zarr.v3.metadata import ArraySpec, CodecMetadata, RuntimeConfiguration + from typing import Dict, Optional + from typing_extensions import Self + from zarr.v3.config import RuntimeConfiguration + from zarr.v3.common import BytesLike, JSON, ArraySpec -@frozen -class ZstdCodecConfigurationMetadata: - level: int = 0 - checksum: bool = False +def parse_zstd_level(data: JSON) -> int: + if isinstance(data, int): + if data >= 23: + raise ValueError(f"Value must be less than or equal to 22. Got {data} instead.") + return data + raise TypeError(f"Got value with type {type(data)}, but expected an int.") -@frozen -class ZstdCodecMetadata: - configuration: ZstdCodecConfigurationMetadata - name: Literal["zstd"] = field(default="zstd", init=False) +def parse_checksum(data: JSON) -> bool: + if isinstance(data, bool): + return data + raise TypeError(f"Expected bool. Got {type(data)}.") -@frozen +@dataclass(frozen=True) class ZstdCodec(BytesBytesCodec): - configuration: ZstdCodecConfigurationMetadata is_fixed_size = True - @classmethod - def from_metadata(cls, codec_metadata: CodecMetadata) -> ZstdCodec: - assert isinstance(codec_metadata, ZstdCodecMetadata) - return cls(configuration=codec_metadata.configuration) + level: int = 0 + checksum: bool = False + + def __init__(self, *, level: int = 0, checksum: bool = False) -> None: + level_parsed = parse_zstd_level(level) + checksum_parsed = parse_checksum(checksum) + + object.__setattr__(self, "level", level_parsed) + object.__setattr__(self, "checksum", checksum_parsed) @classmethod - def get_metadata_class(cls) -> Type[ZstdCodecMetadata]: - return ZstdCodecMetadata + def from_dict(cls, data: Dict[str, JSON]) -> Self: + _, configuration_parsed = parse_named_configuration(data, "zstd") + return cls(**configuration_parsed) # type: ignore[arg-type] + + def to_dict(self) -> Dict[str, JSON]: + return {"name": "zstd", "configuration": {"level": self.level, "checksum": self.checksum}} def _compress(self, data: bytes) -> bytes: - ctx = ZstdCompressor( - level=self.configuration.level, write_checksum=self.configuration.checksum - ) + ctx = ZstdCompressor(level=self.level, write_checksum=self.checksum) return ctx.compress(data) def _decompress(self, data: bytes) -> bytes: diff --git a/src/zarr/v3/common.py b/src/zarr/v3/common.py index e91356c4e2..1caf83a764 100644 --- a/src/zarr/v3/common.py +++ b/src/zarr/v3/common.py @@ -1,23 +1,15 @@ from __future__ import annotations - +from typing import TYPE_CHECKING, Union, Tuple, Iterable, Dict, List, TypeVar, overload import asyncio import contextvars +from dataclasses import dataclass +from enum import Enum import functools -from typing import ( - Any, - Awaitable, - Callable, - Dict, - List, - Literal, - Optional, - Tuple, - TypeVar, - Union, -) + +if TYPE_CHECKING: + from typing import Any, Awaitable, Callable, Iterator, Optional, Type import numpy as np -from cattr import Converter ZARR_JSON = "zarr.json" ZARRAY_JSON = ".zarray" @@ -26,83 +18,10 @@ BytesLike = Union[bytes, bytearray, memoryview] ChunkCoords = Tuple[int, ...] +ChunkCoordsLike = Iterable[int] SliceSelection = Tuple[slice, ...] Selection = Union[slice, SliceSelection] - - -def make_cattr(): - from zarr.v3.metadata import ( - ChunkKeyEncodingMetadata, - CodecMetadata, - DefaultChunkKeyEncodingMetadata, - V2ChunkKeyEncodingMetadata, - ) - from zarr.v3.codecs.registry import get_codec_metadata_class - - converter = Converter() - - def _structure_chunk_key_encoding_metadata(d: Dict[str, Any], _t) -> ChunkKeyEncodingMetadata: - if d["name"] == "default": - return converter.structure(d, DefaultChunkKeyEncodingMetadata) - if d["name"] == "v2": - return converter.structure(d, V2ChunkKeyEncodingMetadata) - raise KeyError - - converter.register_structure_hook( - ChunkKeyEncodingMetadata, _structure_chunk_key_encoding_metadata - ) - - def _structure_codec_metadata(d: Dict[str, Any], _t=None) -> CodecMetadata: - codec_metadata_cls = get_codec_metadata_class(d["name"]) - return converter.structure(d, codec_metadata_cls) - - converter.register_structure_hook(CodecMetadata, _structure_codec_metadata) - - converter.register_structure_hook_factory( - lambda t: str(t) == "ForwardRef('CodecMetadata')", - lambda t: _structure_codec_metadata, - ) - - def _structure_order(d: Any, _t=None) -> Union[Literal["C", "F"], Tuple[int, ...]]: - if d == "C": - return "C" - if d == "F": - return "F" - if isinstance(d, list): - return tuple(d) - raise KeyError - - converter.register_structure_hook_factory( - lambda t: str(t) == "typing.Union[typing.Literal['C', 'F'], typing.Tuple[int, ...]]", - lambda t: _structure_order, - ) - - # Needed for v2 fill_value - def _structure_fill_value(d: Any, _t=None) -> Union[None, int, float]: - if d is None: - return None - try: - return int(d) - except ValueError: - pass - try: - return float(d) - except ValueError: - pass - raise ValueError - - converter.register_structure_hook_factory( - lambda t: str(t) == "typing.Union[NoneType, int, float]", - lambda t: _structure_fill_value, - ) - - # Needed for v2 dtype - converter.register_structure_hook( - np.dtype, - lambda d, _: np.dtype(d), - ) - - return converter +JSON = Union[str, None, int, float, Enum, Dict[str, "JSON"], List["JSON"], Tuple["JSON", ...]] def product(tup: ChunkCoords) -> int: @@ -134,3 +53,111 @@ async def to_thread(func, /, *args, **kwargs): ctx = contextvars.copy_context() func_call = functools.partial(ctx.run, func, *args, **kwargs) return await loop.run_in_executor(None, func_call) + + +E = TypeVar("E", bound=Enum) + + +def enum_names(enum: Type[E]) -> Iterator[str]: + for item in enum: + yield item.name + + +def parse_enum(data: JSON, cls: Type[E]) -> E: + if isinstance(data, cls): + return data + if not isinstance(data, str): + raise TypeError(f"Expected str, got {type(data)}") + if data in enum_names(cls): + return cls(data) + raise ValueError(f"Value must be one of {repr(list(enum_names(cls)))}. Got {data} instead.") + + +@dataclass(frozen=True) +class ArraySpec: + shape: ChunkCoords + dtype: np.dtype + fill_value: Any + + def __init__(self, shape, dtype, fill_value): + shape_parsed = parse_shapelike(shape) + dtype_parsed = parse_dtype(dtype) + fill_value_parsed = parse_fill_value(fill_value) + + object.__setattr__(self, "shape", shape_parsed) + object.__setattr__(self, "dtype", dtype_parsed) + object.__setattr__(self, "fill_value", fill_value_parsed) + + @property + def ndim(self) -> int: + return len(self.shape) + + +def parse_name(data: JSON, expected: Optional[str] = None) -> str: + if isinstance(data, str): + if expected is None or data == expected: + return data + raise ValueError(f"Expected '{expected}'. Got {data} instead.") + else: + raise TypeError(f"Expected a string, got an instance of {type(data)}.") + + +def parse_configuration(data: JSON) -> dict: + if not isinstance(data, dict): + raise TypeError(f"Expected dict, got {type(data)}") + return data + + +@overload +def parse_named_configuration( + data: JSON, expected_name: Optional[str] = None +) -> Tuple[str, Dict[str, JSON]]: + ... + + +@overload +def parse_named_configuration( + data: JSON, expected_name: Optional[str] = None, *, require_configuration: bool = True +) -> Tuple[str, Optional[Dict[str, JSON]]]: + ... + + +def parse_named_configuration( + data: JSON, expected_name: Optional[str] = None, *, require_configuration: bool = True +) -> Tuple[str, Optional[JSON]]: + if not isinstance(data, dict): + raise TypeError(f"Expected dict, got {type(data)}") + if "name" not in data: + raise ValueError(f"Named configuration does not have a 'name' key. Got {data}.") + name_parsed = parse_name(data["name"], expected_name) + if "configuration" in data: + configuration_parsed = parse_configuration(data["configuration"]) + elif require_configuration: + raise ValueError(f"Named configuration does not have a 'configuration' key. Got {data}.") + else: + configuration_parsed = None + return name_parsed, configuration_parsed + + +def parse_shapelike(data: Any) -> Tuple[int, ...]: + if not isinstance(data, Iterable): + raise TypeError(f"Expected an iterable. Got {data} instead.") + data_tuple = tuple(data) + if len(data_tuple) == 0: + raise ValueError("Expected at least one element. Got 0.") + if not all(isinstance(v, int) for v in data_tuple): + msg = f"Expected an iterable of integers. Got {type(data)} instead." + raise TypeError(msg) + if not all(lambda v: v > 0 for v in data_tuple): + raise ValueError(f"All values must be greater than 0. Got {data}.") + return data_tuple + + +def parse_dtype(data: Any) -> np.dtype: + # todo: real validation + return np.dtype(data) + + +def parse_fill_value(data: Any) -> Any: + # todo: real validation + return data diff --git a/src/zarr/v3/config.py b/src/zarr/v3/config.py index 28df25899a..98a25994c4 100644 --- a/src/zarr/v3/config.py +++ b/src/zarr/v3/config.py @@ -1,19 +1,53 @@ from __future__ import annotations from asyncio import AbstractEventLoop -from typing import Literal, Optional -from attr import frozen +from dataclasses import dataclass +from typing import Any, Literal, Optional -@frozen +@dataclass(frozen=True) class SyncConfiguration: concurrency: Optional[int] = None asyncio_loop: Optional[AbstractEventLoop] = None -@frozen +def parse_indexing_order(data: Any) -> Literal["C", "F"]: + if data in ("C", "F"): + return data + msg = f"Expected one of ('C', 'F'), got {data} instead." + raise ValueError(msg) + + +# todo: handle negative values? +def parse_concurrency(data: Any) -> int | None: + if data is None or isinstance(data, int): + return data + raise TypeError(f"Expected int or None, got {type(data)}") + + +def parse_asyncio_loop(data: Any) -> AbstractEventLoop | None: + if data is None or isinstance(data, AbstractEventLoop): + return data + raise TypeError(f"Expected AbstractEventLoop or None, got {type(data)}") + + +@dataclass(frozen=True) class RuntimeConfiguration: order: Literal["C", "F"] = "C" - # TODO: remove these in favor of the SyncConfiguration object concurrency: Optional[int] = None asyncio_loop: Optional[AbstractEventLoop] = None + + def __init__( + self, + order: Literal["C", "F"] = "C", + concurrency: Optional[int] = None, + asyncio_loop: Optional[AbstractEventLoop] = None, + ): + + order_parsed = parse_indexing_order(order) + concurrency_parsed = parse_concurrency(concurrency) + asyncio_loop_parsed = parse_asyncio_loop(asyncio_loop) + + object.__setattr__(self, "order", order_parsed) + object.__setattr__(self, "concurrency", concurrency_parsed) + object.__setattr__(self, "asyncio_loop_parsed", asyncio_loop_parsed) diff --git a/src/zarr/v3/group.py b/src/zarr/v3/group.py index a5d0e68165..acd5ca0d62 100644 --- a/src/zarr/v3/group.py +++ b/src/zarr/v3/group.py @@ -1,15 +1,15 @@ from __future__ import annotations +from dataclasses import asdict, dataclass, field, replace import asyncio import json import logging from typing import Any, Dict, Literal, Optional, Union, AsyncIterator, Iterator, List - -from attr import asdict, field, frozen # , validators +from zarr.v3.abc.metadata import Metadata from zarr.v3.array import AsyncArray, Array from zarr.v3.attributes import Attributes -from zarr.v3.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON, make_cattr +from zarr.v3.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON from zarr.v3.config import RuntimeConfiguration, SyncConfiguration from zarr.v3.store import StoreLike, StorePath, make_store_path from zarr.v3.sync import SyncMixin, sync @@ -17,29 +17,56 @@ logger = logging.getLogger("zarr.group") -@frozen -class GroupMetadata: - attributes: Dict[str, Any] = field(factory=dict) +def parse_zarr_format(data: Any) -> Literal[2, 3]: + if data in (2, 3): + return data + msg = msg = f"Invalid zarr_format. Expected one 2 or 3. Got {data}." + raise ValueError(msg) + + +# todo: convert None to empty dict +def parse_attributes(data: Any) -> Dict[str, Any]: + if data is None: + return {} + elif isinstance(data, dict) and all(map(lambda v: isinstance(v, str), data.keys())): + return data + msg = f"Expected dict with string keys. Got {type(data)} instead." + raise TypeError(msg) + + +@dataclass(frozen=True) +class GroupMetadata(Metadata): + attributes: Dict[str, Any] = field(default_factory=dict) zarr_format: Literal[2, 3] = 3 - node_type: Literal["group"] = field(default="group", init=True) + node_type: Literal["group"] = field(default="group", init=False) + # todo: rename this, since it doesn't return bytes def to_bytes(self) -> Dict[str, bytes]: if self.zarr_format == 3: - return {ZARR_JSON: json.dumps(asdict(self)).encode()} - elif self.zarr_format == 2: + return {ZARR_JSON: json.dumps(self.to_dict()).encode()} + else: return { ZGROUP_JSON: self.zarr_format, ZATTRS_JSON: json.dumps(self.attributes).encode(), } - else: - raise ValueError(f"unexpected zarr_format: {self.zarr_format}") + + def __init__(self, attributes: Dict[str, Any] = None, zarr_format: Literal[2, 3] = 3): + attributes_parsed = parse_attributes(attributes) + zarr_format_parsed = parse_zarr_format(zarr_format) + + object.__setattr__(self, "attributes", attributes_parsed) + object.__setattr__(self, "zarr_format", zarr_format_parsed) @classmethod - def from_json(cls, zarr_json: Any) -> GroupMetadata: - return make_cattr().structure(zarr_json, GroupMetadata) + def from_dict(cls, data: Dict[str, Any]) -> GroupMetadata: + assert data.pop("node_type", None) in ("group", None) + return cls(**data) + + def to_dict(self) -> Dict[str, Any]: + return asdict(self) -@frozen +@dataclass(frozen=True) class AsyncGroup: metadata: GroupMetadata store_path: StorePath @@ -77,6 +104,8 @@ async def open( zarr_format: Literal[2, 3] = 3, ) -> AsyncGroup: store_path = make_store_path(store) + zarr_json_bytes = await (store_path / ZARR_JSON).get_async() + assert zarr_json_bytes is not None # TODO: consider trying to autodiscover the zarr-format here if zarr_format == 3: @@ -102,17 +131,17 @@ async def open( zarr_json = {**zgroup, "attributes": zattrs} else: raise ValueError(f"unexpected zarr_format: {zarr_format}") - return cls.from_json(store_path, zarr_json, runtime_configuration) + return cls.from_dict(store_path, zarr_json, runtime_configuration) @classmethod - def from_json( + def from_dict( cls, store_path: StorePath, - zarr_json: Any, + data: Dict[str, Any], runtime_configuration: RuntimeConfiguration, ) -> Group: group = cls( - metadata=GroupMetadata.from_json(zarr_json), + metadata=GroupMetadata.from_dict(data), store_path=store_path, runtime_configuration=runtime_configuration, ) @@ -138,9 +167,9 @@ async def getitem( else: zarr_json = json.loads(zarr_json_bytes) if zarr_json["node_type"] == "group": - return type(self).from_json(store_path, zarr_json, self.runtime_configuration) + return type(self).from_dict(store_path, zarr_json, self.runtime_configuration) if zarr_json["node_type"] == "array": - return AsyncArray.from_json( + return AsyncArray.from_dict( store_path, zarr_json, runtime_configuration=self.runtime_configuration ) elif self.metadata.zarr_format == 2: @@ -160,7 +189,7 @@ async def getitem( if zarray is not None: # TODO: update this once the V2 array support is part of the primary array class zarr_json = {**zarray, "attributes": zattrs} - return AsyncArray.from_json( + return AsyncArray.from_dict( store_path, zarray, runtime_configuration=self.runtime_configuration ) else: @@ -173,7 +202,7 @@ async def getitem( else {"zarr_format": self.metadata.zarr_format} ) zarr_json = {**zgroup, "attributes": zattrs} - return type(self).from_json(store_path, zarr_json, self.runtime_configuration) + return type(self).from_dict(store_path, zarr_json, self.runtime_configuration) else: raise ValueError(f"unexpected zarr_format: {self.metadata.zarr_format}") @@ -291,7 +320,7 @@ async def move(self, source: str, dest: str) -> None: raise NotImplementedError -@frozen +@dataclass(frozen=True) class Group(SyncMixin): _async_group: AsyncGroup _sync_configuration: SyncConfiguration = field(init=True, default=SyncConfiguration()) @@ -348,6 +377,13 @@ def __setitem__(self, key, value): """__setitem__ is not supported in v3""" raise NotImplementedError + async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> Group: + new_metadata = replace(self.metadata, attributes=new_attributes) + + # Write new metadata + await (self.store_path / ZARR_JSON).set_async(new_metadata.to_bytes()) + return replace(self, metadata=new_metadata) + @property def metadata(self) -> GroupMetadata: return self._async_group.metadata diff --git a/src/zarr/v3/metadata.py b/src/zarr/v3/metadata.py index c6dd9f1f46..de3055abdc 100644 --- a/src/zarr/v3/metadata.py +++ b/src/zarr/v3/metadata.py @@ -1,21 +1,31 @@ from __future__ import annotations - -import json -from asyncio import AbstractEventLoop from enum import Enum -from typing import Any, Dict, List, Literal, Optional, Protocol, Tuple, Union - +from typing import TYPE_CHECKING, cast, Dict, Iterable +from dataclasses import dataclass, field +import json import numpy as np -from attr import asdict, field, frozen -from zarr.v3.common import ChunkCoords, make_cattr +from zarr.v3.chunk_grids import ChunkGrid, RegularChunkGrid +from zarr.v3.chunk_key_encodings import ChunkKeyEncoding, parse_separator -@frozen -class RuntimeConfiguration: - order: Literal["C", "F"] = "C" - concurrency: Optional[int] = None - asyncio_loop: Optional[AbstractEventLoop] = None +if TYPE_CHECKING: + from typing import Any, Literal, Union, List, Optional, Tuple + from zarr.v3.codecs.pipeline import CodecPipeline + + +from zarr.v3.abc.codec import Codec +from zarr.v3.abc.metadata import Metadata + +from zarr.v3.common import ( + JSON, + ArraySpec, + ChunkCoords, + parse_dtype, + parse_fill_value, + parse_shapelike, +) +from zarr.v3.config import RuntimeConfiguration, parse_indexing_order def runtime_configuration( @@ -79,120 +89,95 @@ def to_numpy_shortname(self) -> str: } return data_type_to_numpy[self] - -dtype_to_data_type = { - "|b1": "bool", - "bool": "bool", - "|i1": "int8", - " ChunkCoords: - if chunk_key == "c": - return () - return tuple(map(int, chunk_key[1:].split(self.configuration.separator))) - - def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: - return self.configuration.separator.join(map(str, ("c",) + chunk_coords)) - - -@frozen -class V2ChunkKeyEncodingConfigurationMetadata: - separator: Literal[".", "/"] = "." - - -@frozen -class V2ChunkKeyEncodingMetadata: - configuration: V2ChunkKeyEncodingConfigurationMetadata = ( - V2ChunkKeyEncodingConfigurationMetadata() - ) - name: Literal["v2"] = "v2" - - def decode_chunk_key(self, chunk_key: str) -> ChunkCoords: - return tuple(map(int, chunk_key.split(self.configuration.separator))) - - def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: - chunk_identifier = self.configuration.separator.join(map(str, chunk_coords)) - return "0" if chunk_identifier == "" else chunk_identifier - - -ChunkKeyEncodingMetadata = Union[DefaultChunkKeyEncodingMetadata, V2ChunkKeyEncodingMetadata] - - -class CodecMetadata(Protocol): - @property - def name(self) -> str: - pass - - -class ShardingCodecIndexLocation(Enum): - start = "start" - end = "end" - - -@frozen -class ArraySpec: - shape: ChunkCoords - data_type: DataType - fill_value: Any - - @property - def dtype(self) -> np.dtype: - return np.dtype(self.data_type.value) - - @property - def ndim(self) -> int: - return len(self.shape) + @classmethod + def from_dtype(cls, dtype: np.dtype) -> DataType: + dtype_to_data_type = { + "|b1": "bool", + "bool": "bool", + "|i1": "int8", + " None: + if isinstance(self.chunk_grid, RegularChunkGrid) and len(self.shape) != len( + self.chunk_grid.chunk_shape + ): + raise ValueError( + "`chunk_shape` and `shape` need to have the same number of dimensions." + ) + if self.dimension_names is not None and len(self.shape) != len(self.dimension_names): + raise ValueError( + "`dimension_names` and `shape` need to have the same number of dimensions." + ) + if self.fill_value is None: + raise ValueError("`fill_value` is required.") + self.codecs.validate(self) @property def dtype(self) -> np.dtype: - return np.dtype(self.data_type.value) + return self.data_type @property def ndim(self) -> int: @@ -200,35 +185,57 @@ def ndim(self) -> int: def get_chunk_spec(self, _chunk_coords: ChunkCoords) -> ArraySpec: assert isinstance( - self.chunk_grid, RegularChunkGridMetadata + self.chunk_grid, RegularChunkGrid ), "Currently, only regular chunk grid is supported" return ArraySpec( - shape=self.chunk_grid.configuration.chunk_shape, - data_type=self.data_type, + shape=self.chunk_grid.chunk_shape, + dtype=self.dtype, fill_value=self.fill_value, ) def to_bytes(self) -> bytes: def _json_convert(o): + if isinstance(o, np.dtype): + return str(o) if isinstance(o, Enum): return o.name + # this serializes numcodecs compressors + # todo: implement to_dict for codecs + elif hasattr(o, "get_config"): + return o.get_config() raise TypeError return json.dumps( - asdict( - self, - filter=lambda attr, value: attr.name != "dimension_names" or value is not None, - ), + self.to_dict(), default=_json_convert, ).encode() @classmethod - def from_json(cls, zarr_json: Any) -> ArrayMetadata: - return make_cattr().structure(zarr_json, cls) + def from_dict(cls, data: Dict[str, Any]) -> ArrayMetadata: + # check that the zarr_format attribute is correct + _ = parse_zarr_format_v3(data.pop("zarr_format")) + # check that the node_type attribute is correct + _ = parse_node_type_array(data.pop("node_type")) + + dimension_names = data.pop("dimension_names", None) + + return cls(**data, dimension_names=dimension_names) + + def to_dict(self) -> Dict[str, Any]: + out_dict = super().to_dict() + + if not isinstance(out_dict, dict): + raise TypeError(f"Expected dict. Got {type(out_dict)}.") + # if `dimension_names` is `None`, we do not include it in + # the metadata document + if out_dict["dimension_names"] is None: + out_dict.pop("dimension_names") + return out_dict -@frozen -class ArrayV2Metadata: + +@dataclass(frozen=True) +class ArrayV2Metadata(Metadata): shape: ChunkCoords chunks: ChunkCoords dtype: np.dtype @@ -237,7 +244,47 @@ class ArrayV2Metadata: filters: Optional[List[Dict[str, Any]]] = None dimension_separator: Literal[".", "/"] = "." compressor: Optional[Dict[str, Any]] = None - zarr_format: Literal[2] = 2 + attributes: Optional[Dict[str, Any]] = field(default_factory=dict) + zarr_format: Literal[2] = field(init=False, default=2) + + def __init__( + self, + *, + shape: ChunkCoords, + dtype: np.dtype, + chunks: ChunkCoords, + fill_value: Any, + order: Literal["C", "F"], + dimension_separator: Literal[".", "/"] = ".", + compressor: Optional[Dict[str, Any]] = None, + filters: Optional[List[Dict[str, Any]]] = None, + attributes: Optional[Dict[str, JSON]] = None, + ): + """ + Metadata for a Zarr version 2 array. + """ + shape_parsed = parse_shapelike(shape) + data_type_parsed = parse_dtype(dtype) + chunks_parsed = parse_shapelike(chunks) + compressor_parsed = parse_compressor(compressor) + order_parsed = parse_indexing_order(order) + dimension_separator_parsed = parse_separator(order) + filters_parsed = parse_filters(filters) + fill_value_parsed = parse_fill_value(fill_value) + attributes_parsed = parse_attributes(attributes) + + object.__setattr__(self, "shape", shape_parsed) + object.__setattr__(self, "data_type", data_type_parsed) + object.__setattr__(self, "chunks", chunks_parsed) + object.__setattr__(self, "compressor", compressor_parsed) + object.__setattr__(self, "order", order_parsed) + object.__setattr__(self, "dimension_separator", dimension_separator_parsed) + object.__setattr__(self, "filters", filters_parsed) + object.__setattr__(self, "fill_value", fill_value_parsed) + object.__setattr__(self, "attributes", attributes_parsed) + + # ensure that the metadata document is consistent + _ = parse_v2_metadata(self) @property def ndim(self) -> int: @@ -252,8 +299,78 @@ def _json_convert(o): return o.descr raise TypeError - return json.dumps(asdict(self), default=_json_convert).encode() + return json.dumps(self.to_dict(), default=_json_convert).encode() @classmethod - def from_json(cls, zarr_json: Any) -> ArrayV2Metadata: - return make_cattr().structure(zarr_json, cls) + def from_dict(cls, data: Dict[str, Any]) -> ArrayV2Metadata: + # check that the zarr_format attribute is correct + _ = parse_zarr_format_v2(data.pop("zarr_format")) + return cls(**data) + + +def parse_dimension_names(data: Any) -> Tuple[str, ...] | None: + if data is None: + return data + if isinstance(data, Iterable) and all([isinstance(x, str) for x in data]): + return tuple(data) + msg = f"Expected either None or a iterable of str, got {type(data)}" + raise TypeError(msg) + + +# todo: real validation +def parse_attributes(data: Any) -> Dict[str, JSON]: + if data is None: + return {} + + data_json = cast(Dict[str, JSON], data) + return data_json + + +# todo: move to its own module and drop _v3 suffix +# todo: consider folding all the literal parsing into a single function +# that takes 2 arguments +def parse_zarr_format_v3(data: Any) -> Literal[3]: + if data == 3: + return data + raise ValueError(f"Invalid value. Expected 3. Got {data}.") + + +# todo: move to its own module and drop _v2 suffix +def parse_zarr_format_v2(data: Any) -> Literal[2]: + if data == 2: + return data + raise ValueError(f"Invalid value. Expected 2. Got {data}.") + + +def parse_node_type_array(data: Any) -> Literal["array"]: + if data == "array": + return data + raise ValueError(f"Invalid value. Expected 'array'. Got {data}.") + + +# todo: real validation +def parse_filters(data: Any) -> List[Codec]: + return data + + +# todo: real validation +def parse_compressor(data: Any) -> Codec: + return data + + +def parse_v2_metadata(data: ArrayV2Metadata) -> ArrayV2Metadata: + if (l_chunks := len(data.chunks)) != (l_shape := len(data.shape)): + msg = ( + f"The `shape` and `chunks` attributes must have the same length. " + f"`chunks` has length {l_chunks}, but `shape` has length {l_shape}." + ) + raise ValueError(msg) + return data + + +def parse_codecs(data: Iterable[Union[Codec, JSON]]) -> CodecPipeline: + from zarr.v3.codecs.pipeline import CodecPipeline + + if not isinstance(data, Iterable): + raise TypeError(f"Expected iterable, got {type(data)}") + return CodecPipeline.from_dict(data) diff --git a/src/zarr/v3/store/memory.py b/src/zarr/v3/store/memory.py index 1370375851..afacfa4321 100644 --- a/src/zarr/v3/store/memory.py +++ b/src/zarr/v3/store/memory.py @@ -49,7 +49,7 @@ async def set( ) -> None: assert isinstance(key, str) if not isinstance(value, (bytes, bytearray, memoryview)): - raise TypeError(f"expected BytesLike, got {type(value)}") + raise TypeError(f"Expected BytesLike. Got {type(value)}.") if byte_range is not None: buf = bytearray(self._store_dict[key]) diff --git a/src/zarr/v3/sync.py b/src/zarr/v3/sync.py index e88c8e93f2..f0996c019e 100644 --- a/src/zarr/v3/sync.py +++ b/src/zarr/v3/sync.py @@ -2,7 +2,16 @@ import asyncio import threading -from typing import Any, Coroutine, List, Optional +from typing import ( + Any, + AsyncIterator, + Callable, + Coroutine, + List, + Optional, + TypeVar, +) +from typing_extensions import ParamSpec from zarr.v3.config import SyncConfiguration @@ -90,18 +99,24 @@ def _get_loop(): return loop[0] +P = ParamSpec("P") +T = TypeVar("T") + + class SyncMixin: _sync_configuration: SyncConfiguration - def _sync(self, coroutine: Coroutine): # TODO: type this + def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: # TODO: refactor this to to take *args and **kwargs and pass those to the method # this should allow us to better type the sync wrapper return sync(coroutine, loop=self._sync_configuration.asyncio_loop) - def _sync_iter(self, func: Coroutine, *args, **kwargs) -> List[Any]: # TODO: type this - async def iter_to_list() -> List[Any]: + def _sync_iter( + self, func: Callable[P, AsyncIterator[T]], *args: P.args, **kwargs: P.kwargs + ) -> List[T]: + async def iter_to_list() -> List[T]: # TODO: replace with generators so we don't materialize the entire iterator at once return [item async for item in func(*args, **kwargs)] - return self._sync(iter_to_list) + return self._sync(iter_to_list()) diff --git a/tests/test_codecs_v3.py b/tests/test_codecs_v3.py index 2b18969874..333c2094bf 100644 --- a/tests/test_codecs_v3.py +++ b/tests/test_codecs_v3.py @@ -1,23 +1,33 @@ from __future__ import annotations +from dataclasses import dataclass import json -from typing import Iterator, List, Literal, Optional -from attr import frozen +from typing import Iterator, List, Literal, Optional, Tuple + import numpy as np import pytest import zarr -from zarr.v3 import codecs +from zarr.v3.abc.codec import Codec from zarr.v3.array import Array, AsyncArray from zarr.v3.common import Selection from zarr.v3.indexing import morton_order_iter -from zarr.v3.metadata import CodecMetadata, ShardingCodecIndexLocation, runtime_configuration +from zarr.v3.codecs import ( + ShardingCodec, + ShardingCodecIndexLocation, + BloscCodec, + BytesCodec, + GzipCodec, + TransposeCodec, + ZstdCodec, +) +from zarr.v3.metadata import runtime_configuration from zarr.v3.abc.store import Store from zarr.v3.store import MemoryStore, StorePath -@frozen +@dataclass(frozen=True) class _AsyncArrayProxy: array: AsyncArray @@ -25,7 +35,7 @@ def __getitem__(self, selection: Selection) -> _AsyncArraySelectionProxy: return _AsyncArraySelectionProxy(self.array, selection) -@frozen +@dataclass(frozen=True) class _AsyncArraySelectionProxy: array: AsyncArray selection: Selection @@ -47,9 +57,14 @@ def sample_data() -> np.ndarray: return np.arange(0, 128 * 128 * 128, dtype="uint16").reshape((128, 128, 128), order="F") -@pytest.mark.parametrize( - "index_location", [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end] -) +def order_from_dim(order: Literal["F", "C"], ndim: int) -> Tuple[int, ...]: + if order == "F": + return tuple(ndim - x - 1 for x in range(ndim)) + else: + return tuple(range(ndim)) + + +@pytest.mark.parametrize("index_location", ["start", "end"]) def test_sharding( store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation ): @@ -60,12 +75,12 @@ def test_sharding( dtype=sample_data.dtype, fill_value=0, codecs=[ - codecs.sharding_codec( - (32, 32, 32), - [ - codecs.transpose_codec("F", sample_data.ndim), - codecs.bytes_codec(), - codecs.blosc_codec(typesize=sample_data.dtype.itemsize, cname="lz4"), + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + TransposeCodec(order=order_from_dim("F", sample_data.ndim)), + BytesCodec(), + BloscCodec(cname="lz4"), ], index_location=index_location, ) @@ -79,9 +94,7 @@ def test_sharding( assert np.array_equal(sample_data, read_data) -@pytest.mark.parametrize( - "index_location", [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end] -) +@pytest.mark.parametrize("index_location", ["start", "end"]) def test_sharding_partial( store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation ): @@ -92,12 +105,12 @@ def test_sharding_partial( dtype=sample_data.dtype, fill_value=0, codecs=[ - codecs.sharding_codec( - (32, 32, 32), - [ - codecs.transpose_codec("F", sample_data.ndim), - codecs.bytes_codec(), - codecs.blosc_codec(typesize=sample_data.dtype.itemsize, cname="lz4"), + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + TransposeCodec(order=order_from_dim("F", sample_data.ndim)), + BytesCodec(), + BloscCodec(cname="lz4"), ], index_location=index_location, ) @@ -114,9 +127,7 @@ def test_sharding_partial( assert np.array_equal(sample_data, read_data) -@pytest.mark.parametrize( - "index_location", [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end] -) +@pytest.mark.parametrize("index_location", ["start", "end"]) def test_sharding_partial_read( store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation ): @@ -127,12 +138,12 @@ def test_sharding_partial_read( dtype=sample_data.dtype, fill_value=1, codecs=[ - codecs.sharding_codec( - (32, 32, 32), - [ - codecs.transpose_codec("F", sample_data.ndim), - codecs.bytes_codec(), - codecs.blosc_codec(typesize=sample_data.dtype.itemsize, cname="lz4"), + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + TransposeCodec(order=order_from_dim("F", sample_data.ndim)), + BytesCodec(), + BloscCodec(cname="lz4"), ], index_location=index_location, ) @@ -143,9 +154,7 @@ def test_sharding_partial_read( assert np.all(read_data == 1) -@pytest.mark.parametrize( - "index_location", [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end] -) +@pytest.mark.parametrize("index_location", ["start", "end"]) def test_sharding_partial_overwrite( store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation ): @@ -158,12 +167,12 @@ def test_sharding_partial_overwrite( dtype=data.dtype, fill_value=1, codecs=[ - codecs.sharding_codec( - (32, 32, 32), - [ - codecs.transpose_codec("F", data.ndim), - codecs.bytes_codec(), - codecs.blosc_codec(typesize=data.dtype.itemsize, cname="lz4"), + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + TransposeCodec(order=order_from_dim("F", data.ndim)), + BytesCodec(), + BloscCodec(cname="lz4"), ], index_location=index_location, ) @@ -183,11 +192,11 @@ def test_sharding_partial_overwrite( @pytest.mark.parametrize( "outer_index_location", - [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end], + ["start", "end"], ) @pytest.mark.parametrize( "inner_index_location", - [ShardingCodecIndexLocation.start, ShardingCodecIndexLocation.end], + ["start", "end"], ) def test_nested_sharding( store: Store, @@ -202,9 +211,11 @@ def test_nested_sharding( dtype=sample_data.dtype, fill_value=0, codecs=[ - codecs.sharding_codec( - (32, 32, 32), - [codecs.sharding_codec((16, 16, 16), index_location=inner_index_location)], + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + ShardingCodec(chunk_shape=(16, 16, 16), index_location=inner_index_location) + ], index_location=outer_index_location, ) ], @@ -233,15 +244,15 @@ async def test_order( ): data = np.arange(0, 256, dtype="uint16").reshape((32, 8), order=input_order) - codecs_: List[CodecMetadata] = ( + codecs_: List[Codec] = ( [ - codecs.sharding_codec( - (16, 8), - codecs=[codecs.transpose_codec(store_order, data.ndim), codecs.bytes_codec()], + ShardingCodec( + chunk_shape=(16, 8), + codecs=[TransposeCodec(order=order_from_dim(store_order, data.ndim)), BytesCodec()], ) ] if with_sharding - else [codecs.transpose_codec(store_order, data.ndim), codecs.bytes_codec()] + else [TransposeCodec(order=order_from_dim(store_order, data.ndim)), BytesCodec()] ) a = await AsyncArray.create( @@ -300,9 +311,7 @@ def test_order_implicit( ): data = np.arange(0, 256, dtype="uint16").reshape((16, 16), order=input_order) - codecs_: Optional[List[CodecMetadata]] = ( - [codecs.sharding_codec((8, 8))] if with_sharding else None - ) + codecs_: Optional[List[Codec]] = [ShardingCodec(chunk_shape=(8, 8))] if with_sharding else None a = Array.create( store / "order_implicit", @@ -345,15 +354,15 @@ async def test_transpose( ): data = np.arange(0, 256, dtype="uint16").reshape((1, 32, 8), order=input_order) - codecs_: List[CodecMetadata] = ( + codecs_: List[Codec] = ( [ - codecs.sharding_codec( - (1, 16, 8), - codecs=[codecs.transpose_codec((2, 1, 0)), codecs.bytes_codec()], + ShardingCodec( + chunk_shape=(1, 16, 8), + codecs=[TransposeCodec(order=(2, 1, 0)), BytesCodec()], ) ] if with_sharding - else [codecs.transpose_codec((2, 1, 0)), codecs.bytes_codec()] + else [TransposeCodec(order=(2, 1, 0)), BytesCodec()] ) a = await AsyncArray.create( @@ -405,7 +414,7 @@ def test_transpose_invalid( data = np.arange(0, 256, dtype="uint16").reshape((1, 32, 8)) for order in [(1, 0), (3, 2, 1), (3, 3, 1)]: - with pytest.raises(AssertionError): + with pytest.raises(ValueError): Array.create( store / "transpose_invalid", shape=data.shape, @@ -413,7 +422,7 @@ def test_transpose_invalid( dtype=data.dtype, fill_value=0, chunk_key_encoding=("v2", "."), - codecs=[codecs.transpose_codec(order), codecs.bytes_codec()], + codecs=[TransposeCodec(order=order), BytesCodec()], ) @@ -437,12 +446,12 @@ def test_open_sharding(store: Store): dtype="int32", fill_value=0, codecs=[ - codecs.sharding_codec( - (8, 8), - [ - codecs.transpose_codec("F", 2), - codecs.bytes_codec(), - codecs.blosc_codec(typesize=4), + ShardingCodec( + chunk_shape=(8, 8), + codecs=[ + TransposeCodec(order=order_from_dim("F", 2)), + BytesCodec(), + BloscCodec(), ], ) ], @@ -580,11 +589,11 @@ def test_write_partial_sharded_chunks(store: Store): dtype=data.dtype, fill_value=1, codecs=[ - codecs.sharding_codec( + ShardingCodec( chunk_shape=(10, 10), codecs=[ - codecs.bytes_codec(), - codecs.blosc_codec(typesize=data.dtype.itemsize), + BytesCodec(), + BloscCodec(), ], ) ], @@ -618,7 +627,7 @@ async def test_delete_empty_sharded_chunks(store: Store): chunk_shape=(8, 16), dtype="uint16", fill_value=1, - codecs=[codecs.sharding_codec(chunk_shape=(8, 8))], + codecs=[ShardingCodec(chunk_shape=(8, 8))], ) await _AsyncArrayProxy(a)[:, :].set(np.zeros((16, 16))) await _AsyncArrayProxy(a)[8:, :].set(np.ones((8, 16))) @@ -679,7 +688,7 @@ async def test_zarr_compat_F(store: Store): dtype=data.dtype, chunk_key_encoding=("v2", "."), fill_value=1, - codecs=[codecs.transpose_codec("F", data.ndim), codecs.bytes_codec()], + codecs=[TransposeCodec(order=order_from_dim("F", data.ndim)), BytesCodec()], ) z2 = zarr.create( @@ -743,7 +752,7 @@ def test_gzip(store: Store): chunk_shape=(16, 16), dtype=data.dtype, fill_value=0, - codecs=[codecs.bytes_codec(), codecs.gzip_codec()], + codecs=[BytesCodec(), GzipCodec()], ) a[:, :] = data @@ -760,7 +769,7 @@ def test_zstd(store: Store, checksum: bool): chunk_shape=(16, 16), dtype=data.dtype, fill_value=0, - codecs=[codecs.bytes_codec(), codecs.zstd_codec(level=0, checksum=checksum)], + codecs=[BytesCodec(), ZstdCodec(level=0, checksum=checksum)], ) a[:, :] = data @@ -779,7 +788,7 @@ async def test_endian(store: Store, endian: Literal["big", "little"]): dtype=data.dtype, fill_value=0, chunk_key_encoding=("v2", "."), - codecs=[codecs.bytes_codec(endian)], + codecs=[BytesCodec(endian=endian)], ) await _AsyncArrayProxy(a)[:, :].set(data) @@ -815,7 +824,7 @@ async def test_endian_write( dtype="uint16", fill_value=0, chunk_key_encoding=("v2", "."), - codecs=[codecs.bytes_codec(dtype_store_endian)], + codecs=[BytesCodec(endian=dtype_store_endian)], ) await _AsyncArrayProxy(a)[:, :].set(data) @@ -835,7 +844,7 @@ async def test_endian_write( def test_invalid_metadata(store: Store): - with pytest.raises(AssertionError): + with pytest.raises(ValueError): Array.create( store / "invalid_chunk_shape", shape=(16, 16, 16), @@ -844,7 +853,7 @@ def test_invalid_metadata(store: Store): fill_value=0, ) - with pytest.raises(AssertionError): + with pytest.raises(ValueError): Array.create( store / "invalid_endian", shape=(16, 16), @@ -852,12 +861,12 @@ def test_invalid_metadata(store: Store): dtype=np.dtype("uint8"), fill_value=0, codecs=[ - codecs.bytes_codec("big"), - codecs.transpose_codec("F", 2), + BytesCodec(endian="big"), + TransposeCodec(order=order_from_dim("F", 2)), ], ) - with pytest.raises(AssertionError): + with pytest.raises(TypeError): Array.create( store / "invalid_order", shape=(16, 16), @@ -865,12 +874,12 @@ def test_invalid_metadata(store: Store): dtype=np.dtype("uint8"), fill_value=0, codecs=[ - codecs.bytes_codec(), - codecs.transpose_codec("F"), + BytesCodec(), + TransposeCodec(order="F"), ], ) - with pytest.raises(AssertionError): + with pytest.raises(ValueError): Array.create( store / "invalid_missing_bytes_codec", shape=(16, 16), @@ -878,11 +887,11 @@ def test_invalid_metadata(store: Store): dtype=np.dtype("uint8"), fill_value=0, codecs=[ - codecs.transpose_codec("F", 2), + TransposeCodec(order=order_from_dim("F", 2)), ], ) - with pytest.raises(AssertionError): + with pytest.raises(ValueError): Array.create( store / "invalid_inner_chunk_shape", shape=(16, 16), @@ -890,10 +899,10 @@ def test_invalid_metadata(store: Store): dtype=np.dtype("uint8"), fill_value=0, codecs=[ - codecs.sharding_codec(chunk_shape=(8,)), + ShardingCodec(chunk_shape=(8,)), ], ) - with pytest.raises(AssertionError): + with pytest.raises(ValueError): Array.create( store / "invalid_inner_chunk_shape", shape=(16, 16), @@ -901,7 +910,7 @@ def test_invalid_metadata(store: Store): dtype=np.dtype("uint8"), fill_value=0, codecs=[ - codecs.sharding_codec(chunk_shape=(8, 7)), + ShardingCodec(chunk_shape=(8, 7)), ], ) @@ -913,8 +922,8 @@ def test_invalid_metadata(store: Store): dtype=np.dtype("uint8"), fill_value=0, codecs=[ - codecs.sharding_codec(chunk_shape=(8, 8)), - codecs.gzip_codec(), + ShardingCodec(chunk_shape=(8, 8)), + GzipCodec(), ], ) @@ -933,17 +942,62 @@ async def test_resize(store: Store): ) await _AsyncArrayProxy(a)[:16, :18].set(data) - assert await (store / "resize/0.0").get() is not None - assert await (store / "resize/0.1").get() is not None - assert await (store / "resize/1.0").get() is not None - assert await (store / "resize/1.1").get() is not None + assert await (store / "resize" / "0.0").get() is not None + assert await (store / "resize" / "0.1").get() is not None + assert await (store / "resize" / "1.0").get() is not None + assert await (store / "resize" / "1.1").get() is not None a = await a.resize((10, 12)) assert a.metadata.shape == (10, 12) - assert await (store / "resize/0.0").get() is not None - assert await (store / "resize/0.1").get() is not None - assert await (store / "resize/1.0").get() is None - assert await (store / "resize/1.1").get() is None + assert await (store / "resize" / "0.0").get() is not None + assert await (store / "resize" / "0.1").get() is not None + assert await (store / "resize" / "1.0").get() is None + assert await (store / "resize" / "1.1").get() is None + + +@pytest.mark.asyncio +async def test_blosc_evolve(store: Store): + await AsyncArray.create( + store / "blosc_evolve_u1", + shape=(16, 16), + chunk_shape=(16, 16), + dtype="uint8", + fill_value=0, + codecs=[BytesCodec(), BloscCodec()], + ) + + zarr_json = json.loads(await (store / "blosc_evolve_u1" / "zarr.json").get()) + blosc_configuration_json = zarr_json["codecs"][1]["configuration"] + assert blosc_configuration_json["typesize"] == 1 + assert blosc_configuration_json["shuffle"] == "bitshuffle" + + await AsyncArray.create( + store / "blosc_evolve_u2", + shape=(16, 16), + chunk_shape=(16, 16), + dtype="uint16", + fill_value=0, + codecs=[BytesCodec(), BloscCodec()], + ) + + zarr_json = json.loads(await (store / "blosc_evolve_u2" / "zarr.json").get()) + blosc_configuration_json = zarr_json["codecs"][1]["configuration"] + assert blosc_configuration_json["typesize"] == 2 + assert blosc_configuration_json["shuffle"] == "shuffle" + + await AsyncArray.create( + store / "sharding_blosc_evolve", + shape=(16, 16), + chunk_shape=(16, 16), + dtype="uint16", + fill_value=0, + codecs=[ShardingCodec(chunk_shape=(16, 16), codecs=[BytesCodec(), BloscCodec()])], + ) + + zarr_json = json.loads(await (store / "sharding_blosc_evolve" / "zarr.json").get()) + blosc_configuration_json = zarr_json["codecs"][0]["configuration"]["codecs"][1]["configuration"] + assert blosc_configuration_json["typesize"] == 2 + assert blosc_configuration_json["shuffle"] == "shuffle" def test_exists_ok(store: Store): diff --git a/tests/v3/test_common.py b/tests/v3/test_common.py new file mode 100644 index 0000000000..33e91d793f --- /dev/null +++ b/tests/v3/test_common.py @@ -0,0 +1,97 @@ +from __future__ import annotations +from typing import TYPE_CHECKING, Iterable + +if TYPE_CHECKING: + from typing import Literal, Any, Tuple + +import numpy as np +from src.zarr.v3.config import parse_indexing_order +from src.zarr.v3.common import parse_shapelike +from zarr.v3.common import parse_name, product +import pytest + + +@pytest.mark.parametrize("data", [(0, 0, 0, 0), (1, 3, 4, 5, 6), (2, 4)]) +def test_product(data: Tuple[int, ...]): + assert product(data) == np.prod(data) + + +# todo: test +def test_concurrent_map(): + ... + + +# todo: test +def test_to_thread(): + ... + + +# todo: test +def test_enum_names(): + ... + + +# todo: test +def test_parse_enum(): + ... + + +@pytest.mark.parametrize("data", [("foo", "bar"), (10, 11)]) +def test_parse_name_invalid(data: Tuple[Any, Any]): + observed, expected = data + if isinstance(observed, str): + with pytest.raises(ValueError, match=f"Expected '{expected}'. Got {observed} instead."): + parse_name(observed, expected) + else: + with pytest.raises( + TypeError, match=f"Expected a string, got an instance of {type(observed)}." + ): + parse_name(observed, expected) + + +@pytest.mark.parametrize("data", [("foo", "foo"), ("10", "10")]) +def test_parse_name_valid(data: Tuple[Any, Any]): + observed, expected = data + assert parse_name(observed, expected) == observed + + +@pytest.mark.parametrize("data", [0, 1, "hello", "f"]) +def test_parse_indexing_order_invalid(data): + with pytest.raises(ValueError, match="Expected one of"): + parse_indexing_order(data) + + +@pytest.mark.parametrize("data", ["C", "F"]) +def parse_indexing_order_valid(data: Literal["C", "F"]): + assert parse_indexing_order(data) == data + + +@pytest.mark.parametrize("data", [10, ("0", 1, 2, 3), {"0": "0"}, []]) +def test_parse_shapelike_invalid(data: Any): + if isinstance(data, Iterable): + if len(data) == 0: + with pytest.raises(ValueError, match="Expected at least one element."): + parse_shapelike(data) + else: + with pytest.raises(TypeError, match="Expected an iterable of integers"): + parse_shapelike(data) + else: + with pytest.raises(TypeError, match="Expected an iterable."): + parse_shapelike(data) + + +@pytest.mark.parametrize("data", [range(10), [0, 1, 2, 3], (3, 4, 5)]) +def test_parse_shapelike_valid(data: Iterable[Any]): + assert parse_shapelike(data) == tuple(data) + + +# todo: more dtypes +@pytest.mark.parametrize("data", [("uint8", np.uint8), ("float64", np.float64)]) +def parse_dtype(data: Tuple[str, np.dtype]): + unparsed, parsed = data + assert parse_dtype(unparsed) == parsed + + +# todo: figure out what it means to test this +def test_parse_fill_value(): + ... diff --git a/tests/v3/test_metadata.py b/tests/v3/test_metadata.py new file mode 100644 index 0000000000..c7ca0f2e1a --- /dev/null +++ b/tests/v3/test_metadata.py @@ -0,0 +1,60 @@ +from __future__ import annotations +import pytest +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Sequence, Any + +from zarr.v3.metadata import parse_dimension_names, parse_zarr_format_v2, parse_zarr_format_v3 + +# todo: test +def test_datatype_enum(): + ... + + +# todo: test +# this will almost certainly be a collection of tests +def test_array_metadata_v3(): + ... + + +# todo: test +# this will almost certainly be a collection of tests +def test_array_metadata_v2(): + ... + + +@pytest.mark.parametrize("data", [None, ("a", "b", "c"), ["a", "a", "a"]]) +def parse_dimension_names_valid(data: Sequence[str] | None) -> None: + assert parse_dimension_names(data) == data + + +@pytest.mark.parametrize("data", [(), [1, 2, "a"], {"foo": 10}]) +def parse_dimension_names_invalid(data: Any) -> None: + with pytest.raises(TypeError, match="Expected either None or iterable of str,"): + parse_dimension_names(data) + + +# todo: test +def test_parse_attributes() -> None: + ... + + +def test_parse_zarr_format_v3_valid() -> None: + assert parse_zarr_format_v3(3) == 3 + + +@pytest.mark.parametrize("data", [None, 1, 2, 4, 5, "3"]) +def test_parse_zarr_foramt_v3_invalid(data: Any) -> None: + with pytest.raises(ValueError, match=f"Invalid value. Expected 3. Got {data}"): + parse_zarr_format_v3(data) + + +def test_parse_zarr_format_v2_valid() -> None: + assert parse_zarr_format_v2(2) == 2 + + +@pytest.mark.parametrize("data", [None, 1, 3, 4, 5, "3"]) +def test_parse_zarr_foramt_v2_invalid(data: Any) -> None: + with pytest.raises(ValueError, match=f"Invalid value. Expected 2. Got {data}"): + parse_zarr_format_v2(data) From f80f697c2612cf41c5bdb158a602c1ae8a737e70 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 18:28:10 +0530 Subject: [PATCH 0440/1078] chore: update pre-commit hooks (#1672) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.2.1 → v0.2.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.2.1...v0.2.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c7d4f32c68..41b65f1d02 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.2.1' + rev: 'v0.2.2' hooks: - id: ruff - repo: https://github.com/psf/black From 54bc90c8682472cc40fba35ec6b313cb1f046c34 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 09:57:06 -0800 Subject: [PATCH 0441/1078] Bump pymongo from 4.6.1 to 4.6.2 (#1674) Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.6.1 to 4.6.2. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/4.6.2/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.6.1...4.6.2) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index e94b814173..85f6fccffc 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -11,7 +11,7 @@ azure-storage-blob==12.16.0 # pyup: ignore redis==5.0.1 types-redis types-setuptools -pymongo==4.6.1 +pymongo==4.6.2 # optional test requirements coverage pytest-cov==4.1.0 From 70a15bbe595031ad24b82ca5cee9468a8229e775 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 10:09:11 -0800 Subject: [PATCH 0442/1078] Bump conda-incubator/setup-miniconda from 3.0.1 to 3.0.2 (#1677) Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 3.0.1 to 3.0.2. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v3.0.1...v3.0.2) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index 2cc0213781..d95b2bc540 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3.0.1 + uses: conda-incubator/setup-miniconda@v3.0.2 with: channels: conda-forge environment-file: environment.yml diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index d74df9ce67..946b7efa7d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -42,7 +42,7 @@ jobs: with: fetch-depth: 0 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3.0.1 + uses: conda-incubator/setup-miniconda@v3.0.2 with: channels: conda-forge python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 0ef7f21758..85e5c3e6b6 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v3.0.1 + - uses: conda-incubator/setup-miniconda@v3.0.2 with: auto-update-conda: true python-version: ${{ matrix.python-version }} From ec4d2162828c2616a388dda2bdbcf40c8747a36d Mon Sep 17 00:00:00 2001 From: Josh Moore Date: Tue, 27 Feb 2024 15:26:29 +0100 Subject: [PATCH 0443/1078] Update config.yml with Zulip --- .github/ISSUE_TEMPLATE/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 9cb5ec9a78..907121f858 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -3,8 +3,8 @@ contact_links: - name: ✨ Propose a new major feature url: https://github.com/zarr-developers/zarr-specs about: A new major feature should be discussed in the Zarr specifications repository. - - name: ❓ Discuss something on gitter - url: https://gitter.im/zarr-developers/community + - name: ❓ Discuss something on Zulip + url: https://ossci.zulipchat.com/ about: For questions like "How do I do X with Zarr?", you can move to our Gitter channel. - name: ❓ Discuss something on GitHub Discussions url: https://github.com/zarr-developers/zarr-python/discussions From a0e5559c38bf1a9d7c1a70a81f51f5eece5701c2 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 27 Feb 2024 17:04:50 +0100 Subject: [PATCH 0444/1078] Type dimension separator (#1620) Co-authored-by: Davis Bennett --- zarr/_storage/absstore.py | 5 ++- zarr/_storage/v3.py | 7 ++-- zarr/_storage/v3_storage_transformers.py | 3 +- zarr/creation.py | 2 +- zarr/storage.py | 45 +++++++++++++++++------- 5 files changed, 44 insertions(+), 18 deletions(-) diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index b6b386f468..217b2a29e0 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -1,9 +1,12 @@ """This module contains storage classes related to Azure Blob Storage (ABS)""" +from typing import Optional import warnings + from numcodecs.compat import ensure_bytes from zarr.util import normalize_storage_path from zarr._storage.store import _get_metadata_suffix, data_root, meta_root, Store, StoreV3 +from zarr.types import DIMENSION_SEPARATOR __doctest_requires__ = { ("ABSStore", "ABSStore.*"): ["azure.storage.blob"], @@ -67,7 +70,7 @@ def __init__( account_name=None, account_key=None, blob_service_kwargs=None, - dimension_separator=None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, client=None, ): self._dimension_separator = dimension_separator diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 56bae74361..4987f820cf 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -3,13 +3,14 @@ from collections import OrderedDict from collections.abc import MutableMapping from threading import Lock -from typing import Union, Dict, Any +from typing import Union, Dict, Any, Optional from zarr.errors import ( MetadataError, ReadOnlyError, ) from zarr.util import buffer_size, json_loads, normalize_storage_path +from zarr.types import DIMENSION_SEPARATOR from zarr._storage.absstore import ABSStoreV3 # noqa: F401 from zarr._storage.store import ( # noqa: F401 @@ -224,7 +225,9 @@ def get_partial_values(self, key_ranges): class MemoryStoreV3(MemoryStore, StoreV3): - def __init__(self, root=None, cls=dict, dimension_separator=None): + def __init__( + self, root=None, cls=dict, dimension_separator: Optional[DIMENSION_SEPARATOR] = None + ): if root is None: self.root = cls() else: diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py index 3afc3823a3..37e56f8ecd 100644 --- a/zarr/_storage/v3_storage_transformers.py +++ b/zarr/_storage/v3_storage_transformers.py @@ -8,6 +8,7 @@ from zarr._storage.store import StorageTransformer, StoreV3, _rmdir_from_keys_v3 from zarr.util import normalize_storage_path +from zarr.types import DIMENSION_SEPARATOR MAX_UINT_64 = 2**64 - 1 @@ -118,7 +119,7 @@ def _copy_for_array(self, array, inner_store): return transformer_copy @property - def dimension_separator(self) -> str: + def dimension_separator(self) -> DIMENSION_SEPARATOR: assert ( self._dimension_separator is not None ), "dimension_separator is not initialized, first get a copy via _copy_for_array." diff --git a/zarr/creation.py b/zarr/creation.py index 264715b040..c541531d54 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -470,7 +470,7 @@ def open_array( write_empty_chunks=True, *, zarr_version=None, - dimension_separator=None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, meta_array=None, **kwargs, ): diff --git a/zarr/storage.py b/zarr/storage.py index 73a6dc9630..f6903d29b2 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -41,7 +41,8 @@ from numcodecs.compat import ensure_bytes, ensure_text, ensure_contiguous_ndarray_like from numcodecs.registry import codec_registry from zarr.context import Context -from zarr.types import PathLike as Path +from zarr.types import PathLike as Path, DIMENSION_SEPARATOR +from zarr.util import NoLock from zarr.errors import ( MetadataError, @@ -327,7 +328,7 @@ def init_array( chunk_store: Optional[StoreLike] = None, filters=None, object_codec=None, - dimension_separator=None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, storage_transformers=(), ): """Initialize an array store with the given configuration. Note that this is a low-level @@ -481,7 +482,7 @@ def _init_array_metadata( chunk_store: Optional[StoreLike] = None, filters=None, object_codec=None, - dimension_separator=None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, storage_transformers=(), ): store_version = getattr(store, "_store_version", 2) @@ -1054,7 +1055,9 @@ class DirectoryStore(Store): """ - def __init__(self, path, normalize_keys=False, dimension_separator=None): + def __init__( + self, path, normalize_keys=False, dimension_separator: Optional[DIMENSION_SEPARATOR] = None + ): # guard conditions path = os.path.abspath(path) if os.path.exists(path) and not os.path.isdir(path): @@ -1349,7 +1352,7 @@ def __init__( key_separator=None, mode="w", exceptions=(KeyError, PermissionError, IOError), - dimension_separator=None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, fs=None, check=False, create=False, @@ -1568,7 +1571,12 @@ class TempStore(DirectoryStore): # noinspection PyShadowingBuiltins def __init__( - self, suffix="", prefix="zarr", dir=None, normalize_keys=False, dimension_separator=None + self, + suffix="", + prefix="zarr", + dir=None, + normalize_keys=False, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, ): path = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir) atexit.register(atexit_rmtree, path) @@ -1652,7 +1660,9 @@ class NestedDirectoryStore(DirectoryStore): """ - def __init__(self, path, normalize_keys=False, dimension_separator="/"): + def __init__( + self, path, normalize_keys=False, dimension_separator: Optional[DIMENSION_SEPARATOR] = "/" + ): super().__init__(path, normalize_keys=normalize_keys) if dimension_separator is None: dimension_separator = "/" @@ -1765,7 +1775,7 @@ def __init__( compression=zipfile.ZIP_STORED, allowZip64=True, mode="a", - dimension_separator=None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, ): # store properties path = os.path.abspath(path) @@ -2058,7 +2068,7 @@ def __init__( mode=0o666, open=None, write_lock=True, - dimension_separator=None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, **open_kwargs, ): if open is None: @@ -2073,6 +2083,7 @@ def __init__( self.mode = mode self.open = open self.write_lock = write_lock + self.write_mutex: Union[Lock, NoLock] if write_lock: # This may not be required as some dbm implementations manage their own # locks, but err on the side of caution. @@ -2229,7 +2240,13 @@ class LMDBStore(Store): """ - def __init__(self, path, buffers=True, dimension_separator=None, **kwargs): + def __init__( + self, + path, + buffers=True, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, + **kwargs, + ): import lmdb # set default memory map size to something larger than the lmdb default, which is @@ -2580,7 +2597,7 @@ class SQLiteStore(Store): >>> store.close() # don't forget to call this when you're done """ - def __init__(self, path, dimension_separator=None, **kwargs): + def __init__(self, path, dimension_separator: Optional[DIMENSION_SEPARATOR] = None, **kwargs): import sqlite3 self._dimension_separator = dimension_separator @@ -2776,7 +2793,7 @@ def __init__( self, database="mongodb_zarr", collection="zarr_collection", - dimension_separator=None, + dimension_separator: Optional[DIMENSION_SEPARATOR] = None, **kwargs, ): import pymongo @@ -2851,7 +2868,9 @@ class RedisStore(Store): """ - def __init__(self, prefix="zarr", dimension_separator=None, **kwargs): + def __init__( + self, prefix="zarr", dimension_separator: Optional[DIMENSION_SEPARATOR] = None, **kwargs + ): import redis self._prefix = prefix From 99e03c684729b188457024a53afc45cb1b160027 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Wed, 28 Feb 2024 18:22:28 +0530 Subject: [PATCH 0445/1078] Replace Gitter with new Zulip Chat link (#1685) * Replace Gitter with Zulip * Replace Gitter with Zulip in remaining places --- .github/ISSUE_TEMPLATE/config.yml | 2 +- README.md | 6 +++--- docs/index.rst | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 907121f858..9ceaab2ae7 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -5,7 +5,7 @@ contact_links: about: A new major feature should be discussed in the Zarr specifications repository. - name: ❓ Discuss something on Zulip url: https://ossci.zulipchat.com/ - about: For questions like "How do I do X with Zarr?", you can move to our Gitter channel. + about: For questions like "How do I do X with Zarr?", you can move to our Zulip Chat. - name: ❓ Discuss something on GitHub Discussions url: https://github.com/zarr-developers/zarr-python/discussions about: For questions like "How do I do X with Zarr?", you can move to GitHub Discussions. diff --git a/README.md b/README.md index b035ffa597..e379c9719f 100644 --- a/README.md +++ b/README.md @@ -70,10 +70,10 @@ - Gitter + Zulip - - + + diff --git a/docs/index.rst b/docs/index.rst index 06f79b7e7c..a5dbfbc5bf 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -25,7 +25,7 @@ Zarr-Python `Installation `_ | `Source Repository `_ | `Issue Tracker `_ | -`Gitter `_ +`Zulip Chat `_ Zarr is a file storage format for chunked, compressed, N-dimensional arrays based on an open-source specification. From 67d5d82317451c9072a187efd6d638e718cdaced Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 2 Mar 2024 02:03:35 +0530 Subject: [PATCH 0446/1078] Bump redis from 5.0.1 to 5.0.2 (#1688) Bumps [redis](https://github.com/redis/redis-py) from 5.0.1 to 5.0.2. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v5.0.1...v5.0.2) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 85f6fccffc..c3d747a47e 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.1.2 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.16.0 # pyup: ignore -redis==5.0.1 +redis==5.0.2 types-redis types-setuptools pymongo==4.6.2 From 9c2a412d70ed717165966bc47615bdef195d68c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:48:15 +0100 Subject: [PATCH 0447/1078] Bump pypa/gh-action-pypi-publish from 1.8.11 to 1.8.12 (#1691) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.11 to 1.8.12. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.11...v1.8.12) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 250c6112c8..6d417042b5 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.11 + - uses: pypa/gh-action-pypi-publish@v1.8.12 with: user: __token__ password: ${{ secrets.pypi_password }} From 237f934f5ac7d7a04c6b144f97e54776eda628c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 01:02:48 +0530 Subject: [PATCH 0448/1078] Bump pytest-doctestplus from 1.1.0 to 1.2.0 (#1693) Bumps [pytest-doctestplus](https://github.com/scientific-python/pytest-doctestplus) from 1.1.0 to 1.2.0. - [Release notes](https://github.com/scientific-python/pytest-doctestplus/releases) - [Changelog](https://github.com/scientific-python/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/scientific-python/pytest-doctestplus/compare/v1.1.0...v1.2.0) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index c3d747a47e..0f4493b1d4 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -15,7 +15,7 @@ pymongo==4.6.2 # optional test requirements coverage pytest-cov==4.1.0 -pytest-doctestplus==1.1.0 +pytest-doctestplus==1.2.0 pytest-timeout==2.2.0 h5py==3.10.0 fsspec==2023.12.2 From 240bb824b86a68aa0eb6f03fecbe5439882607ca Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Wed, 6 Mar 2024 19:48:22 +0530 Subject: [PATCH 0449/1078] Fix RTD build (#1694) --- .readthedocs.yaml | 4 +++- docs/index.rst | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 08cac8d78d..e45cae1b45 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -16,4 +16,6 @@ python: extra_requirements: - docs -formats: all +formats: + - htmlzip + - pdf diff --git a/docs/index.rst b/docs/index.rst index a5dbfbc5bf..cf54e261af 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,7 +19,7 @@ Zarr-Python **Version**: |version| -**Download documentation**: `PDF/Zipped HTML/EPUB `_ +**Download documentation**: `PDF/Zipped HTML `_ **Useful links**: `Installation `_ | From a1fbedb18c1fc70f026c423fafca6d84ad88ce53 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Thu, 7 Mar 2024 00:53:47 +0530 Subject: [PATCH 0450/1078] Update release.rst for v2.17.1 (#1673) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update release.rst for v2.17.1 * Change the copyright year from 2023 → 2024. * Update release.rst for v2.17.1 --- LICENSE.txt | 2 +- docs/conf.py | 2 +- docs/release.rst | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/LICENSE.txt b/LICENSE.txt index 850a0d8772..a4de1c39d3 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2015-2023 Zarr Developers +Copyright (c) 2015-2024 Zarr Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/docs/conf.py b/docs/conf.py index 318843a9fb..048e77f51d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -72,7 +72,7 @@ # General information about the project. project = "zarr" -copyright = "2023, Zarr Developers" +copyright = "2024, Zarr Developers" author = "Zarr Developers" version = zarr.__version__ diff --git a/docs/release.rst b/docs/release.rst index 8ce4b2e33c..037432ca58 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,9 +18,41 @@ Release notes Unreleased ---------- +.. _release_2.17.1: + +2.17.1 +------ + +Enhancements +~~~~~~~~~~~~ + * Change occurrences of % and format() to f-strings. By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1423`. +* Proper argument for numpy.reshape. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1425`. + +* Add typing to dimension separator arguments. + By :user:`David Stansby ` :issue:`1620`. + +Docs +~~~~ + +* ZIP related tweaks. + By :user:`Davis Bennett ` :issue:`1641`. + +Maintenance +~~~~~~~~~~~ + +* Update config.yml with Zulip. + By :user:`Josh Moore `. + +* Replace Gitter with the new Zulip Chat link. + By :user:`Sanket Verma ` :issue:`1685`. + +* Fix RTD build. + By :user:`Sanket Verma ` :issue:`1694`. + .. _release_2.17.0: 2.17.0 From d986f8973eafbf847179d8c3f7e16451d0fcd63d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Mar 2024 17:40:55 -0800 Subject: [PATCH 0451/1078] Bump pytest-timeout from 2.2.0 to 2.3.1 (#1697) Bumps [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) from 2.2.0 to 2.3.1. - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/2.2.0...2.3.1) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 0f4493b1d4..b14381dd6e 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -16,7 +16,7 @@ pymongo==4.6.2 coverage pytest-cov==4.1.0 pytest-doctestplus==1.2.0 -pytest-timeout==2.2.0 +pytest-timeout==2.3.1 h5py==3.10.0 fsspec==2023.12.2 s3fs==2023.12.2 From d642da6320793c64dc227cf8062a9936d0fd398e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 10 Mar 2024 14:26:24 -0700 Subject: [PATCH 0452/1078] Bump conda-incubator/setup-miniconda from 3.0.2 to 3.0.3 (#1690) Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 3.0.2 to 3.0.3. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v3.0.2...v3.0.3) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sanket Verma --- .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index d95b2bc540..dba6918514 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3.0.2 + uses: conda-incubator/setup-miniconda@v3.0.3 with: channels: conda-forge environment-file: environment.yml diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 946b7efa7d..fd2603ff95 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -42,7 +42,7 @@ jobs: with: fetch-depth: 0 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3.0.2 + uses: conda-incubator/setup-miniconda@v3.0.3 with: channels: conda-forge python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index 85e5c3e6b6..d580ef3f0e 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v3.0.2 + - uses: conda-incubator/setup-miniconda@v3.0.3 with: auto-update-conda: true python-version: ${{ matrix.python-version }} From 029cff71b86871cde76c7909cfecd28764953377 Mon Sep 17 00:00:00 2001 From: "Daniel Jahn (dahn)" Date: Sun, 10 Mar 2024 22:37:36 +0100 Subject: [PATCH 0453/1078] docs(tutorial.rst): fix link to GCSMap (#1689) --- docs/tutorial.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 1f7accab3a..214dd4f63f 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -868,7 +868,7 @@ implementations of the ``MutableMapping`` interface for Amazon S3 (`S3Map Distributed File System (`HDFSMap `_) and Google Cloud Storage (`GCSMap -`_), which +`_), which can be used with Zarr. Here is an example using S3Map to read an array created previously:: From 9fc4981ddfb0e032fcc76fa6585b5a66dc5d2f06 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Mon, 11 Mar 2024 03:19:28 +0530 Subject: [PATCH 0454/1078] Update installation.rst stating version support policy (#1665) * Update installation.rst stating version support policy * Update docs/installation.rst Co-authored-by: Joe Hamman * Update docs/installation.rst --------- Co-authored-by: Joe Hamman --- docs/installation.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/installation.rst b/docs/installation.rst index 8553d451cb..35865c764d 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -6,6 +6,11 @@ Zarr depends on NumPy. It is generally best to `install NumPy appropriate for your operating system and Python distribution. Other dependencies should be installed automatically if using one of the installation methods below. +Note: Zarr has endorsed `Scientific-Python SPEC 0 `_ and now follows the version support window as outlined below: + +- Python: 36 months after initial release +- Core package dependencies (e.g. NumPy): 24 months after initial release + Install Zarr from PyPI:: $ pip install zarr From f58065b221452acd70235902ad59d920da6fb02f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 08:47:15 -0700 Subject: [PATCH 0455/1078] Bump pypa/gh-action-pypi-publish from 1.8.12 to 1.8.14 (#1700) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.12 to 1.8.14. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.12...v1.8.14) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 6d417042b5..fe168d2862 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -64,7 +64,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.12 + - uses: pypa/gh-action-pypi-publish@v1.8.14 with: user: __token__ password: ${{ secrets.pypi_password }} From bbac25472e0a781dc5c0256d26a481eacb27390b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 16:58:21 +0100 Subject: [PATCH 0456/1078] Bump pytest-doctestplus from 1.2.0 to 1.2.1 (#1699) Bumps [pytest-doctestplus](https://github.com/scientific-python/pytest-doctestplus) from 1.2.0 to 1.2.1. - [Release notes](https://github.com/scientific-python/pytest-doctestplus/releases) - [Changelog](https://github.com/scientific-python/pytest-doctestplus/blob/main/CHANGES.rst) - [Commits](https://github.com/scientific-python/pytest-doctestplus/compare/v1.2.0...v1.2.1) --- updated-dependencies: - dependency-name: pytest-doctestplus dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sanket Verma --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index b14381dd6e..62b257ea70 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -15,7 +15,7 @@ pymongo==4.6.2 # optional test requirements coverage pytest-cov==4.1.0 -pytest-doctestplus==1.2.0 +pytest-doctestplus==1.2.1 pytest-timeout==2.3.1 h5py==3.10.0 fsspec==2023.12.2 From 6fe553df925c224fcc0a12ecdd074997ce9e56f7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 17:53:29 +0100 Subject: [PATCH 0457/1078] Bump redis from 5.0.2 to 5.0.3 (#1698) Bumps [redis](https://github.com/redis/redis-py) from 5.0.2 to 5.0.3. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v5.0.2...v5.0.3) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sanket Verma --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 62b257ea70..7ff673cebd 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.1.2 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.16.0 # pyup: ignore -redis==5.0.2 +redis==5.0.3 types-redis types-setuptools pymongo==4.6.2 From f4f0b42d5ced9f777709fb87ff06ff09fbaa3055 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 25 Mar 2024 09:44:40 -0700 Subject: [PATCH 0458/1078] Add Python 3.12 to CI (#1719) * Update python-package.yml * bump numpy versions * bump min python version * Update release.rst --- .github/workflows/python-package.yml | 10 ++++++---- docs/release.rst | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index fd2603ff95..2f9166ae96 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,13 +15,15 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.9', '3.10', '3.11'] - numpy_version: ['>=1.22.0', '==1.21.*'] + python-version: ['3.9', '3.10', '3.11', '3.12'] + numpy_version: ['>=1.24.0', '==1.23.*'] exclude: - python-version: '3.10' - numpy_version: '==1.21.*' + numpy_version: '==1.23.*' - python-version: '3.11' - numpy_version: '==1.21.*' + numpy_version: '==1.23.*' + - python-version: '3.12' + numpy_version: '==1.23.*' services: redis: image: redis diff --git a/docs/release.rst b/docs/release.rst index 037432ca58..5c4da710b2 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -53,6 +53,12 @@ Maintenance * Fix RTD build. By :user:`Sanket Verma ` :issue:`1694`. +* Add CI test environment for Python 3.12 + By :user:`Joe Hamman ` :issue:`1719`. + +* Bump minimum supported NumPy version to 1.23 (per spec 0000) + By :user:`Joe Hamman ` :issue:`1719`. + .. _release_2.17.0: 2.17.0 diff --git a/pyproject.toml b/pyproject.toml index 4da3079808..0be79f990e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ maintainers = [ requires-python = ">=3.9" dependencies = [ 'asciitree', - 'numpy>=1.21.1', + 'numpy>=1.23', 'fasteners; sys_platform != "emscripten"', 'numcodecs>=0.10.0', ] From 04e862cfcaf58aeb5e13e40b66007f44caa9bcae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Mar 2024 19:02:07 +0100 Subject: [PATCH 0459/1078] Bump pytest-cov from 4.1.0 to 5.0.0 (#1722) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.1.0 to 5.0.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v4.1.0...v5.0.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 7ff673cebd..a3411acd67 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -14,7 +14,7 @@ types-setuptools pymongo==4.6.2 # optional test requirements coverage -pytest-cov==4.1.0 +pytest-cov==5.0.0 pytest-doctestplus==1.2.1 pytest-timeout==2.3.1 h5py==3.10.0 From 0e58b79dd38911a80648f5a4cbf01439fe90c7a5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 26 Mar 2024 00:27:27 +0530 Subject: [PATCH 0460/1078] chore: update pre-commit hooks (#1708) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.2.2 → v0.3.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.2.2...v0.3.3) - [github.com/psf/black: 24.2.0 → 24.3.0](https://github.com/psf/black/compare/24.2.0...24.3.0) - [github.com/pre-commit/mirrors-mypy: v1.8.0 → v1.9.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.8.0...v1.9.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Sanket Verma --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 41b65f1d02..46aadb554b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,11 +8,11 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.2.2' + rev: 'v0.3.3' hooks: - id: ruff - repo: https://github.com/psf/black - rev: 24.2.0 + rev: 24.3.0 hooks: - id: black - repo: https://github.com/codespell-project/codespell @@ -24,7 +24,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.8.0 + rev: v1.9.0 hooks: - id: mypy files: zarr From 8d0d910f27453157558d201e7a6a84bb565c87ad Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Mar 2024 13:46:34 -0700 Subject: [PATCH 0461/1078] chore: update pre-commit hooks (#1723) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.3 → v0.3.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.3...v0.3.4) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46aadb554b..6c2762f34d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.3.3' + rev: 'v0.3.4' hooks: - id: ruff - repo: https://github.com/psf/black From 2534413e2f8b56d9c64744419628a464d639f1dc Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 26 Mar 2024 18:44:50 -0700 Subject: [PATCH 0462/1078] Fix release notes (following #1719) (#1725) --- docs/release.rst | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 5c4da710b2..116393d417 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,15 @@ Release notes Unreleased ---------- +Maintenance +~~~~~~~~~~~ + +* Add CI test environment for Python 3.12 + By :user:`Joe Hamman ` :issue:`1719`. + +* Bump minimum supported NumPy version to 1.23 (per spec 0000) + By :user:`Joe Hamman ` :issue:`1719`. + .. _release_2.17.1: 2.17.1 @@ -53,12 +62,6 @@ Maintenance * Fix RTD build. By :user:`Sanket Verma ` :issue:`1694`. -* Add CI test environment for Python 3.12 - By :user:`Joe Hamman ` :issue:`1719`. - -* Bump minimum supported NumPy version to 1.23 (per spec 0000) - By :user:`Joe Hamman ` :issue:`1719`. - .. _release_2.17.0: 2.17.0 From bad8dd0240861c48e768c265911aec2aec24481c Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Wed, 27 Mar 2024 12:42:33 -0600 Subject: [PATCH 0463/1078] Override ipython repr methods. (#1724) Closes #1716 This avoids expensive lookups against object stores. --- docs/release.rst | 6 ++++++ zarr/hierarchy.py | 41 ++++++++++++++++++++++++++++++++++++ zarr/tests/test_hierarchy.py | 21 ++++++++++++++++++ 3 files changed, 68 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 116393d417..fd48a53b38 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,12 @@ Release notes Unreleased ---------- +Enhancements +~~~~~~~~~~~~ + +* Override IPython ``_repr_*_`` methods to avoid expensive lookups against object stores. + By :user:`Deepak Cherian ` :issue:`1716`. + Maintenance ~~~~~~~~~~~ diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 44af1d63d1..c88892c932 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -515,6 +515,13 @@ def _delitem_nosync(self, item): raise KeyError(item) def __getattr__(self, item): + # https://github.com/jupyter/notebook/issues/2014 + # Save a possibly expensive lookup (for e.g. against cloud stores) + # Note: The _ipython_display_ method is required to display the right info as a side-effect. + # It is simpler to pretend it doesn't exist. + if item in ["_ipython_canary_method_should_not_exist_", "_ipython_display_"]: + raise AttributeError + # allow access to group members via dot notation try: return self.__getitem__(item) @@ -1331,6 +1338,40 @@ def move(self, source, dest): self._write_op(self._move_nosync, source, dest) + # Override ipython repr methods, GH1716 + # https://ipython.readthedocs.io/en/stable/config/integrating.html#custom-methods + # " If the methods don’t exist, the standard repr() is used. If a method exists and + # returns None, it is treated the same as if it does not exist." + def _repr_html_(self): + return None + + def _repr_latex_(self): + return None + + def _repr_mimebundle_(self, **kwargs): + return None + + def _repr_svg_(self): + return None + + def _repr_png_(self): + return None + + def _repr_jpeg_(self): + return None + + def _repr_markdown_(self): + return None + + def _repr_javascript_(self): + return None + + def _repr_pdf_(self): + return None + + def _repr_json_(self): + return None + def _normalize_store_arg(store, *, storage_options=None, mode="r", zarr_version=None): if zarr_version is None: diff --git a/zarr/tests/test_hierarchy.py b/zarr/tests/test_hierarchy.py index 6c08d7b88a..161e1eb813 100644 --- a/zarr/tests/test_hierarchy.py +++ b/zarr/tests/test_hierarchy.py @@ -1,4 +1,5 @@ import atexit +import operator import os import sys import pickle @@ -87,6 +88,26 @@ def create_group( ) return g + def test_ipython_repr_methods(self): + g = self.create_group() + for method in [ + "html", + "json", + "javascript", + "markdown", + "svg", + "png", + "jpeg", + "latex", + "pdf", + "mimebundle", + ]: + assert operator.methodcaller(f"_repr_{method}_")(g) is None + with pytest.raises(AttributeError): + g._ipython_display_() + with pytest.raises(AttributeError): + g._ipython_canary_method_should_not_exist_() + def test_group_init_1(self): store, chunk_store = self.create_store() g = self.create_group(store, chunk_store=chunk_store) From 37e0a1a0c22f552daf6fd94fec5474a9b92db33d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 16:40:01 +0530 Subject: [PATCH 0464/1078] Bump pymongo from 4.6.2 to 4.6.3 (#1729) --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index a3411acd67..809d1c0eee 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -11,7 +11,7 @@ azure-storage-blob==12.16.0 # pyup: ignore redis==5.0.3 types-redis types-setuptools -pymongo==4.6.2 +pymongo==4.6.3 # optional test requirements coverage pytest-cov==5.0.0 From cb5b77a7fcf8312bc28d7970a31395c5690f5898 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Fri, 29 Mar 2024 05:37:50 +0530 Subject: [PATCH 0465/1078] Remove v1 and v2 specification (#1582) * Remove v1 and v2 specification * fix warning --------- Co-authored-by: Davis Bennett --- docs/release.rst | 4 +- docs/spec/v1.rst | 267 +--------------------- docs/spec/v2.rst | 562 +---------------------------------------------- docs/spec/v3.rst | 2 +- 4 files changed, 7 insertions(+), 828 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index fd48a53b38..9c75dc4feb 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -1625,11 +1625,11 @@ Bug fixes Documentation ~~~~~~~~~~~~~ -* Some changes have been made to the :ref:`spec_v2` document to clarify +* Some changes have been made to the Zarr Specification v2 document to clarify ambiguities and add some missing information. These changes do not break compatibility with any of the material as previously implemented, and so the changes have been made in-place in the document without incrementing the document version number. See the - section on :ref:`spec_v2_changes` in the specification document for more information. + section on changes in the specification document for more information. * A new :ref:`tutorial_indexing` section has been added to the tutorial. * A new :ref:`tutorial_strings` section has been added to the tutorial (:issue:`135`, :issue:`175`). diff --git a/docs/spec/v1.rst b/docs/spec/v1.rst index 13f68ef36e..27a0490e0a 100644 --- a/docs/spec/v1.rst +++ b/docs/spec/v1.rst @@ -3,268 +3,5 @@ Zarr Storage Specification Version 1 ==================================== -This document provides a technical specification of the protocol and -format used for storing a Zarr array. The key words "MUST", "MUST -NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", -"RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be -interpreted as described in `RFC 2119 -`_. - -Status ------- - -This specification is deprecated. See :ref:`spec` for the latest version. - -Storage -------- - -A Zarr array can be stored in any storage system that provides a -key/value interface, where a key is an ASCII string and a value is an -arbitrary sequence of bytes, and the supported operations are read -(get the sequence of bytes associated with a given key), write (set -the sequence of bytes associated with a given key) and delete (remove -a key/value pair). - -For example, a directory in a file system can provide this interface, -where keys are file names, values are file contents, and files can be -read, written or deleted via the operating system. Equally, an S3 -bucket can provide this interface, where keys are resource names, -values are resource contents, and resources can be read, written or -deleted via HTTP. - -Below an "array store" refers to any system implementing this -interface. - -Metadata --------- - -Each array requires essential configuration metadata to be stored, -enabling correct interpretation of the stored data. This metadata is -encoded using JSON and stored as the value of the 'meta' key within an -array store. - -The metadata resource is a JSON object. The following keys MUST be -present within the object: - -zarr_format - An integer defining the version of the storage specification to which the - array store adheres. -shape - A list of integers defining the length of each dimension of the array. -chunks - A list of integers defining the length of each dimension of a chunk of the - array. Note that all chunks within a Zarr array have the same shape. -dtype - A string or list defining a valid data type for the array. See also - the subsection below on data type encoding. -compression - A string identifying the primary compression library used to compress - each chunk of the array. -compression_opts - An integer, string or dictionary providing options to the primary - compression library. -fill_value - A scalar value providing the default value to use for uninitialized - portions of the array. -order - Either 'C' or 'F', defining the layout of bytes within each chunk of the - array. 'C' means row-major order, i.e., the last dimension varies fastest; - 'F' means column-major order, i.e., the first dimension varies fastest. - -Other keys MAY be present within the metadata object however they MUST -NOT alter the interpretation of the required fields defined above. - -For example, the JSON object below defines a 2-dimensional array of -64-bit little-endian floating point numbers with 10000 rows and 10000 -columns, divided into chunks of 1000 rows and 1000 columns (so there -will be 100 chunks in total arranged in a 10 by 10 grid). Within each -chunk the data are laid out in C contiguous order, and each chunk is -compressed using the Blosc compression library:: - - { - "chunks": [ - 1000, - 1000 - ], - "compression": "blosc", - "compression_opts": { - "clevel": 5, - "cname": "lz4", - "shuffle": 1 - }, - "dtype": "`_. The -format consists of 3 parts: a character describing the byteorder of -the data (``<``: little-endian, ``>``: big-endian, ``|``: -not-relevant), a character code giving the basic type of the array, -and an integer providing the number of bytes the type uses. The byte -order MUST be specified. E.g., ``"i4"``, ``"|b1"`` and -``"|S12"`` are valid data types. - -Structure data types (i.e., with multiple named fields) are encoded as -a list of two-element lists, following `NumPy array protocol type -descriptions (descr) -`_. -For example, the JSON list ``[["r", "|u1"], ["g", "|u1"], ["b", -"|u1"]]`` defines a data type composed of three single-byte unsigned -integers labelled 'r', 'g' and 'b'. - -Chunks ------- - -Each chunk of the array is compressed by passing the raw bytes for the -chunk through the primary compression library to obtain a new sequence -of bytes comprising the compressed chunk data. No header is added to -the compressed bytes or any other modification made. The internal -structure of the compressed bytes will depend on which primary -compressor was used. For example, the `Blosc compressor -`_ -produces a sequence of bytes that begins with a 16-byte header -followed by compressed data. - -The compressed sequence of bytes for each chunk is stored under a key -formed from the index of the chunk within the grid of chunks -representing the array. To form a string key for a chunk, the indices -are converted to strings and concatenated with the period character -('.') separating each index. For example, given an array with shape -(10000, 10000) and chunk shape (1000, 1000) there will be 100 chunks -laid out in a 10 by 10 grid. The chunk with indices (0, 0) provides -data for rows 0-999 and columns 0-999 and is stored under the key -'0.0'; the chunk with indices (2, 4) provides data for rows 2000-2999 -and columns 4000-4999 and is stored under the key '2.4'; etc. - -There is no need for all chunks to be present within an array -store. If a chunk is not present then it is considered to be in an -uninitialized state. An uninitialized chunk MUST be treated as if it -was uniformly filled with the value of the 'fill_value' field in the -array metadata. If the 'fill_value' field is ``null`` then the -contents of the chunk are undefined. - -Note that all chunks in an array have the same shape. If the length of -any array dimension is not exactly divisible by the length of the -corresponding chunk dimension then some chunks will overhang the edge -of the array. The contents of any chunk region falling outside the -array are undefined. - -Attributes ----------- - -Each array can also be associated with custom attributes, which are -simple key/value items with application-specific meaning. Custom -attributes are encoded as a JSON object and stored under the 'attrs' -key within an array store. Even if the attributes are empty, the -'attrs' key MUST be present within an array store. - -For example, the JSON object below encodes three attributes named -'foo', 'bar' and 'baz':: - - { - "foo": 42, - "bar": "apples", - "baz": [1, 2, 3, 4] - } - -Example -------- - -Below is an example of storing a Zarr array, using a directory on the -local file system as storage. - -Initialize the store:: - - >>> import zarr - >>> store = zarr.DirectoryStore('example.zarr') - >>> zarr.init_store(store, shape=(20, 20), chunks=(10, 10), - ... dtype='i4', fill_value=42, compression='zlib', - ... compression_opts=1, overwrite=True) - -No chunks are initialized yet, so only the 'meta' and 'attrs' keys -have been set:: - - >>> import os - >>> sorted(os.listdir('example.zarr')) - ['attrs', 'meta'] - -Inspect the array metadata:: - - >>> print(open('example.zarr/meta').read()) - { - "chunks": [ - 10, - 10 - ], - "compression": "zlib", - "compression_opts": 1, - "dtype": ">> print(open('example.zarr/attrs').read()) - {} - -Set some data:: - - >>> z = zarr.Array(store) - >>> z[0:10, 0:10] = 1 - >>> sorted(os.listdir('example.zarr')) - ['0.0', 'attrs', 'meta'] - -Set some more data:: - - >>> z[0:10, 10:20] = 2 - >>> z[10:20, :] = 3 - >>> sorted(os.listdir('example.zarr')) - ['0.0', '0.1', '1.0', '1.1', 'attrs', 'meta'] - -Manually decompress a single chunk for illustration:: - - >>> import zlib - >>> b = zlib.decompress(open('example.zarr/0.0', 'rb').read()) - >>> import numpy as np - >>> a = np.frombuffer(b, dtype='>> a - array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1], dtype=int32) - -Modify the array attributes:: - - >>> z.attrs['foo'] = 42 - >>> z.attrs['bar'] = 'apples' - >>> z.attrs['baz'] = [1, 2, 3, 4] - >>> print(open('example.zarr/attrs').read()) - { - "bar": "apples", - "baz": [ - 1, - 2, - 3, - 4 - ], - "foo": 42 - } +The V1 Specification has been migrated to its website → +https://zarr-specs.readthedocs.io/. diff --git a/docs/spec/v2.rst b/docs/spec/v2.rst index c1e12e1218..deb6d46ce6 100644 --- a/docs/spec/v2.rst +++ b/docs/spec/v2.rst @@ -3,563 +3,5 @@ Zarr Storage Specification Version 2 ==================================== -This document provides a technical specification of the protocol and format -used for storing Zarr arrays. The key words "MUST", "MUST NOT", "REQUIRED", -"SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and -"OPTIONAL" in this document are to be interpreted as described in `RFC 2119 -`_. - -Status ------- - -This specification is the latest version. See :ref:`spec` for previous -versions. - -.. _spec_v2_storage: - -Storage -------- - -A Zarr array can be stored in any storage system that provides a key/value -interface, where a key is an ASCII string and a value is an arbitrary sequence -of bytes, and the supported operations are read (get the sequence of bytes -associated with a given key), write (set the sequence of bytes associated with -a given key) and delete (remove a key/value pair). - -For example, a directory in a file system can provide this interface, where -keys are file names, values are file contents, and files can be read, written -or deleted via the operating system. Equally, an S3 bucket can provide this -interface, where keys are resource names, values are resource contents, and -resources can be read, written or deleted via HTTP. - -Below an "array store" refers to any system implementing this interface. - -.. _spec_v2_array: - -Arrays ------- - -.. _spec_v2_array_metadata: - -Metadata -~~~~~~~~ - -Each array requires essential configuration metadata to be stored, enabling -correct interpretation of the stored data. This metadata is encoded using JSON -and stored as the value of the ".zarray" key within an array store. - -The metadata resource is a JSON object. The following keys MUST be present -within the object: - -zarr_format - An integer defining the version of the storage specification to which the - array store adheres. -shape - A list of integers defining the length of each dimension of the array. -chunks - A list of integers defining the length of each dimension of a chunk of the - array. Note that all chunks within a Zarr array have the same shape. -dtype - A string or list defining a valid data type for the array. See also - the subsection below on data type encoding. -compressor - A JSON object identifying the primary compression codec and providing - configuration parameters, or ``null`` if no compressor is to be used. - The object MUST contain an ``"id"`` key identifying the codec to be used. -fill_value - A scalar value providing the default value to use for uninitialized - portions of the array, or ``null`` if no fill_value is to be used. -order - Either "C" or "F", defining the layout of bytes within each chunk of the - array. "C" means row-major order, i.e., the last dimension varies fastest; - "F" means column-major order, i.e., the first dimension varies fastest. -filters - A list of JSON objects providing codec configurations, or ``null`` if no - filters are to be applied. Each codec configuration object MUST contain a - ``"id"`` key identifying the codec to be used. - -The following keys MAY be present within the object: - -dimension_separator - If present, either the string ``"."`` or ``"/"`` defining the separator placed - between the dimensions of a chunk. If the value is not set, then the - default MUST be assumed to be ``"."``, leading to chunk keys of the form "0.0". - Arrays defined with ``"/"`` as the dimension separator can be considered to have - nested, or hierarchical, keys of the form "0/0" that SHOULD where possible - produce a directory-like structure. - -Other keys SHOULD NOT be present within the metadata object and SHOULD be -ignored by implementations. - -For example, the JSON object below defines a 2-dimensional array of 64-bit -little-endian floating point numbers with 10000 rows and 10000 columns, divided -into chunks of 1000 rows and 1000 columns (so there will be 100 chunks in total -arranged in a 10 by 10 grid). Within each chunk the data are laid out in C -contiguous order. Each chunk is encoded using a delta filter and compressed -using the Blosc compression library prior to storage:: - - { - "chunks": [ - 1000, - 1000 - ], - "compressor": { - "id": "blosc", - "cname": "lz4", - "clevel": 5, - "shuffle": 1 - }, - "dtype": "`. The format -consists of 3 parts: - -* One character describing the byteorder of the data (``"<"``: little-endian; - ``">"``: big-endian; ``"|"``: not-relevant) -* One character code giving the basic type of the array (``"b"``: Boolean (integer - type where all values are only True or False); ``"i"``: integer; ``"u"``: unsigned - integer; ``"f"``: floating point; ``"c"``: complex floating point; ``"m"``: timedelta; - ``"M"``: datetime; ``"S"``: string (fixed-length sequence of char); ``"U"``: unicode - (fixed-length sequence of Py_UNICODE); ``"V"``: other (void * – each item is a - fixed-size chunk of memory)) -* An integer specifying the number of bytes the type uses. - -The byte order MUST be specified. E.g., ``"i4"``, ``"|b1"`` and -``"|S12"`` are valid data type encodings. - -For datetime64 ("M") and timedelta64 ("m") data types, these MUST also include the -units within square brackets. A list of valid units and their definitions are given in -the :ref:`NumPy documentation on Datetimes and Timedeltas -`. -For example, ``"`. Each -sub-list has the form ``[fieldname, datatype, shape]`` where ``shape`` -is optional. ``fieldname`` is a string, ``datatype`` is a string -specifying a simple data type (see above), and ``shape`` is a list of -integers specifying subarray shape. For example, the JSON list below -defines a data type composed of three single-byte unsigned integer -fields named "r", "g" and "b":: - - [["r", "|u1"], ["g", "|u1"], ["b", "|u1"]] - -For example, the JSON list below defines a data type composed of three -fields named "x", "y" and "z", where "x" and "y" each contain 32-bit -floats, and each item in "z" is a 2 by 2 array of floats:: - - [["x", "`_ -produces a sequence of bytes that begins with a 16-byte header followed by -compressed data. - -The compressed sequence of bytes for each chunk is stored under a key formed -from the index of the chunk within the grid of chunks representing the array. -To form a string key for a chunk, the indices are converted to strings and -concatenated with the period character (".") separating each index. For -example, given an array with shape (10000, 10000) and chunk shape (1000, 1000) -there will be 100 chunks laid out in a 10 by 10 grid. The chunk with indices -(0, 0) provides data for rows 0-999 and columns 0-999 and is stored under the -key "0.0"; the chunk with indices (2, 4) provides data for rows 2000-2999 and -columns 4000-4999 and is stored under the key "2.4"; etc. - -There is no need for all chunks to be present within an array store. If a chunk -is not present then it is considered to be in an uninitialized state. An -uninitialized chunk MUST be treated as if it was uniformly filled with the value -of the "fill_value" field in the array metadata. If the "fill_value" field is -``null`` then the contents of the chunk are undefined. - -Note that all chunks in an array have the same shape. If the length of any -array dimension is not exactly divisible by the length of the corresponding -chunk dimension then some chunks will overhang the edge of the array. The -contents of any chunk region falling outside the array are undefined. - -.. _spec_v2_array_filters: - -Filters -~~~~~~~ - -Optionally a sequence of one or more filters can be used to transform chunk -data prior to compression. When storing data, filters are applied in the order -specified in array metadata to encode data, then the encoded data are passed to -the primary compressor. When retrieving data, stored chunk data are -decompressed by the primary compressor then decoded using filters in the -reverse order. - -.. _spec_v2_hierarchy: - -Hierarchies ------------ - -.. _spec_v2_hierarchy_paths: - -Logical storage paths -~~~~~~~~~~~~~~~~~~~~~ - -Multiple arrays can be stored in the same array store by associating each array -with a different logical path. A logical path is simply an ASCII string. The -logical path is used to form a prefix for keys used by the array. For example, -if an array is stored at logical path "foo/bar" then the array metadata will be -stored under the key "foo/bar/.zarray", the user-defined attributes will be -stored under the key "foo/bar/.zattrs", and the chunks will be stored under -keys like "foo/bar/0.0", "foo/bar/0.1", etc. - -To ensure consistent behaviour across different storage systems, logical paths -MUST be normalized as follows: - -* Replace all backward slash characters ("\\\\") with forward slash characters - ("/") -* Strip any leading "/" characters -* Strip any trailing "/" characters -* Collapse any sequence of more than one "/" character into a single "/" - character - -The key prefix is then obtained by appending a single "/" character to the -normalized logical path. - -After normalization, if splitting a logical path by the "/" character results -in any path segment equal to the string "." or the string ".." then an error -MUST be raised. - -N.B., how the underlying array store processes requests to store values under -keys containing the "/" character is entirely up to the store implementation -and is not constrained by this specification. E.g., an array store could simply -treat all keys as opaque ASCII strings; equally, an array store could map -logical paths onto some kind of hierarchical storage (e.g., directories on a -file system). - -.. _spec_v2_hierarchy_groups: - -Groups -~~~~~~ - -Arrays can be organized into groups which can also contain other groups. A -group is created by storing group metadata under the ".zgroup" key under some -logical path. E.g., a group exists at the root of an array store if the -".zgroup" key exists in the store, and a group exists at logical path "foo/bar" -if the "foo/bar/.zgroup" key exists in the store. - -If the user requests a group to be created under some logical path, then groups -MUST also be created at all ancestor paths. E.g., if the user requests group -creation at path "foo/bar" then groups MUST be created at path "foo" and the -root of the store, if they don't already exist. - -If the user requests an array to be created under some logical path, then -groups MUST also be created at all ancestor paths. E.g., if the user requests -array creation at path "foo/bar/baz" then groups must be created at path -"foo/bar", path "foo", and the root of the store, if they don't already exist. - -The group metadata resource is a JSON object. The following keys MUST be present -within the object: - -zarr_format - An integer defining the version of the storage specification to which the - array store adheres. - -Other keys MUST NOT be present within the metadata object. - -The members of a group are arrays and groups stored under logical paths that -are direct children of the parent group's logical path. E.g., if groups exist -under the logical paths "foo" and "foo/bar" and an array exists at logical path -"foo/baz" then the members of the group at path "foo" are the group at path -"foo/bar" and the array at path "foo/baz". - -.. _spec_v2_attrs: - -Attributes ----------- - -An array or group can be associated with custom attributes, which are arbitrary -key/value pairs with application-specific meaning. Custom attributes are encoded -as a JSON object and stored under the ".zattrs" key within an array store. The -".zattrs" key does not have to be present, and if it is absent the attributes -should be treated as empty. - -For example, the JSON object below encodes three attributes named -"foo", "bar" and "baz":: - - { - "foo": 42, - "bar": "apples", - "baz": [1, 2, 3, 4] - } - -.. _spec_v2_examples: - -Examples --------- - -Storing a single array -~~~~~~~~~~~~~~~~~~~~~~ - -Below is an example of storing a Zarr array, using a directory on the -local file system as storage. - -Create an array:: - - >>> import zarr - >>> store = zarr.DirectoryStore('data/example.zarr') - >>> a = zarr.create(shape=(20, 20), chunks=(10, 10), dtype='i4', - ... fill_value=42, compressor=zarr.Zlib(level=1), - ... store=store, overwrite=True) - -No chunks are initialized yet, so only the ".zarray" and ".zattrs" keys -have been set in the store:: - - >>> import os - >>> sorted(os.listdir('data/example.zarr')) - ['.zarray'] - -Inspect the array metadata:: - - >>> print(open('data/example.zarr/.zarray').read()) - { - "chunks": [ - 10, - 10 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": ">> a[0:10, 0:10] = 1 - >>> sorted(os.listdir('data/example.zarr')) - ['.zarray', '0.0'] - -Set some more data:: - - >>> a[0:10, 10:20] = 2 - >>> a[10:20, :] = 3 - >>> sorted(os.listdir('data/example.zarr')) - ['.zarray', '0.0', '0.1', '1.0', '1.1'] - -Manually decompress a single chunk for illustration:: - - >>> import zlib - >>> buf = zlib.decompress(open('data/example.zarr/0.0', 'rb').read()) - >>> import numpy as np - >>> chunk = np.frombuffer(buf, dtype='>> chunk - array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1], dtype=int32) - -Modify the array attributes:: - - >>> a.attrs['foo'] = 42 - >>> a.attrs['bar'] = 'apples' - >>> a.attrs['baz'] = [1, 2, 3, 4] - >>> sorted(os.listdir('data/example.zarr')) - ['.zarray', '.zattrs', '0.0', '0.1', '1.0', '1.1'] - >>> print(open('data/example.zarr/.zattrs').read()) - { - "bar": "apples", - "baz": [ - 1, - 2, - 3, - 4 - ], - "foo": 42 - } - -Storing multiple arrays in a hierarchy -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Below is an example of storing multiple Zarr arrays organized into a group -hierarchy, using a directory on the local file system as storage. This storage -implementation maps logical paths onto directory paths on the file system, -however this is an implementation choice and is not required. - -Setup the store:: - - >>> import zarr - >>> store = zarr.DirectoryStore('data/group.zarr') - -Create the root group:: - - >>> root_grp = zarr.group(store, overwrite=True) - -The metadata resource for the root group has been created:: - - >>> import os - >>> sorted(os.listdir('data/group.zarr')) - ['.zgroup'] - -Inspect the group metadata:: - - >>> print(open('data/group.zarr/.zgroup').read()) - { - "zarr_format": 2 - } - -Create a sub-group:: - - >>> sub_grp = root_grp.create_group('foo') - -What has been stored:: - - >>> sorted(os.listdir('data/group.zarr')) - ['.zgroup', 'foo'] - >>> sorted(os.listdir('data/group.zarr/foo')) - ['.zgroup'] - -Create an array within the sub-group:: - - >>> a = sub_grp.create_dataset('bar', shape=(20, 20), chunks=(10, 10)) - >>> a[:] = 42 - -Set a custom attributes:: - - >>> a.attrs['comment'] = 'answer to life, the universe and everything' - -What has been stored:: - - >>> sorted(os.listdir('data/group.zarr')) - ['.zgroup', 'foo'] - >>> sorted(os.listdir('data/group.zarr/foo')) - ['.zgroup', 'bar'] - >>> sorted(os.listdir('data/group.zarr/foo/bar')) - ['.zarray', '.zattrs', '0.0', '0.1', '1.0', '1.1'] - -Here is the same example using a Zip file as storage:: - - >>> store = zarr.ZipStore('data/group.zip', mode='w') - >>> root_grp = zarr.group(store) - >>> sub_grp = root_grp.create_group('foo') - >>> a = sub_grp.create_dataset('bar', shape=(20, 20), chunks=(10, 10)) - >>> a[:] = 42 - >>> a.attrs['comment'] = 'answer to life, the universe and everything' - >>> store.close() - -What has been stored:: - - >>> import zipfile - >>> zf = zipfile.ZipFile('data/group.zip', mode='r') - >>> for name in sorted(zf.namelist()): - ... print(name) - .zgroup - foo/.zgroup - foo/bar/.zarray - foo/bar/.zattrs - foo/bar/0.0 - foo/bar/0.1 - foo/bar/1.0 - foo/bar/1.1 - -.. _spec_v2_changes: - -Changes -------- - -Version 2 clarifications -~~~~~~~~~~~~~~~~~~~~~~~~ - -The following changes have been made to the version 2 specification since it was -initially published to clarify ambiguities and add some missing information. - -* The specification now describes how bytes fill values should be encoded and - decoded for arrays with a fixed-length byte string data type (:issue:`165`, - :issue:`176`). - -* The specification now clarifies that units must be specified for datetime64 and - timedelta64 data types (:issue:`85`, :issue:`215`). - -* The specification now clarifies that the '.zattrs' key does not have to be present for - either arrays or groups, and if absent then custom attributes should be treated as - empty. - -* The specification now describes how structured datatypes with - subarray shapes and/or with nested structured data types are encoded - in array metadata (:issue:`111`, :issue:`296`). - -* Clarified the key/value pairs of custom attributes as "arbitrary" rather than - "simple". - -Changes from version 1 to version 2 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The following changes were made between version 1 and version 2 of this specification: - -* Added support for storing multiple arrays in the same store and organising - arrays into hierarchies using groups. -* Array metadata is now stored under the ".zarray" key instead of the "meta" - key. -* Custom attributes are now stored under the ".zattrs" key instead of the - "attrs" key. -* Added support for filters. -* Changed encoding of "fill_value" field within array metadata. -* Changed encoding of compressor information within array metadata to be - consistent with representation of filter information. +The V2 Specification has been migrated to its website → +https://zarr-specs.readthedocs.io/. diff --git a/docs/spec/v3.rst b/docs/spec/v3.rst index bd8852707b..3d39f35ba6 100644 --- a/docs/spec/v3.rst +++ b/docs/spec/v3.rst @@ -1,7 +1,7 @@ .. _spec_v3: Zarr Storage Specification Version 3 -======================================================= +==================================== The V3 Specification has been migrated to its website → https://zarr-specs.readthedocs.io/. From 97cb3a7361ab763d6c2a7ab1128780f9142bd6aa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 23:46:31 +0200 Subject: [PATCH 0466/1078] chore: update pre-commit hooks (#1738) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.4 → v0.3.5](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.4...v0.3.5) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6c2762f34d..24ff72a12f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.3.4' + rev: 'v0.3.5' hooks: - id: ruff - repo: https://github.com/psf/black From 6ef6714bc63d66c409cf5780791d6349081fcd45 Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Mon, 1 Apr 2024 23:13:42 -0600 Subject: [PATCH 0467/1078] Optimize Array.info and Group.info (#1733) * Optimize Array.info. Avoid repeated computes of the same value (getsize) * Don't have InfoReporter query items twice. Apparently IPython will run both __repr__ and _repr_html_ so we were calling `getsize` twice. * Group too * Apply suggestions from code review Co-authored-by: Joe Hamman --------- Co-authored-by: Joe Hamman --- docs/release.rst | 2 ++ zarr/core.py | 13 +++++++------ zarr/hierarchy.py | 3 +-- zarr/util.py | 7 +++---- 4 files changed, 13 insertions(+), 12 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 9c75dc4feb..736838cdaf 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,6 +21,8 @@ Unreleased Enhancements ~~~~~~~~~~~~ +* Optimize ``Array.info`` so that it calls `getsize` only once. + By :user:`Deepak Cherian `. * Override IPython ``_repr_*_`` methods to avoid expensive lookups against object stores. By :user:`Deepak Cherian ` :issue:`1716`. diff --git a/zarr/core.py b/zarr/core.py index c3184c6652..07f38cd96d 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -176,7 +176,6 @@ def __init__( ) # initialize info reporter - self._info_reporter = InfoReporter(self) # initialize indexing helpers self._oindex = OIndex(self) @@ -2429,7 +2428,7 @@ def info(self): Chunks initialized : 0/10 """ - return self._info_reporter + return InfoReporter(self) def info_items(self): return self._synchronized_op(self._info_items_nosync) @@ -2471,14 +2470,16 @@ def bytestr(n): items += [("Synchronizer type", typestr(self._synchronizer))] # storage info + nbytes = self.nbytes + nbytes_stored = self.nbytes_stored items += [("Store type", typestr(self._store))] if self._chunk_store is not None: items += [("Chunk store type", typestr(self._chunk_store))] - items += [("No. bytes", bytestr(self.nbytes))] - if self.nbytes_stored > 0: + items += [("No. bytes", bytestr(nbytes))] + if nbytes_stored > 0: items += [ - ("No. bytes stored", bytestr(self.nbytes_stored)), - ("Storage ratio", f"{self.nbytes / self.nbytes_stored:.1f}"), + ("No. bytes stored", bytestr(nbytes_stored)), + ("Storage ratio", f"{nbytes / nbytes_stored:.1f}"), ] items += [("Chunks initialized", f"{self.nchunks_initialized}/{self.nchunks}")] diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index c88892c932..c5f7a37bc6 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -211,7 +211,6 @@ def __init__( ) # setup info - self._info = InfoReporter(self) @property def store(self): @@ -266,7 +265,7 @@ def attrs(self): @property def info(self): """Return diagnostic information about the group.""" - return self._info + return InfoReporter(self) @property def meta_array(self): diff --git a/zarr/util.py b/zarr/util.py index dc8aff0edf..848f1ed114 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -408,14 +408,13 @@ def info_html_report(items) -> str: class InfoReporter: def __init__(self, obj): self.obj = obj + self.items = self.obj.info_items() def __repr__(self): - items = self.obj.info_items() - return info_text_report(items) + return info_text_report(self.items) def _repr_html_(self): - items = self.obj.info_items() - return info_html_report(items) + return info_html_report(self.items) class TreeNode: From 9864b40731a11ed826f6b69e6b9e381a12168096 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 05:29:11 +0000 Subject: [PATCH 0468/1078] Bump actions/setup-python from 5.0.0 to 5.1.0 (#1736) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 5.0.0 to 5.1.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v5.0.0...v5.1.0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index fe168d2862..8ac76c899b 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v5.0.0 + - uses: actions/setup-python@v5.1.0 name: Install Python with: python-version: '3.9' From aa9a0d5e1874217bcaa15340860cb8b606944fb6 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Apr 2024 19:45:48 +0200 Subject: [PATCH 0469/1078] Couple fixes (#1737) * Use `is` when comparing `type` of two objects * Unnecessary `None` provided as default --- zarr/_storage/store.py | 2 +- zarr/tests/test_core.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 209f118534..0a08080548 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -462,7 +462,7 @@ def inner_store(self) -> Union["StorageTransformer", StoreV3]: def __eq__(self, other): return ( - type(self) == type(other) + type(self) is type(other) and self._inner_store == other._inner_store and self.get_config() == other.get_config() ) diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index d9447c0832..730f724314 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -123,7 +123,7 @@ def create_array(self, shape: Union[int, Tuple[int, ...]], **kwargs): "compressor": kwargs.pop("compressor", self.compressor), "chunk_store": chunk_store, "storage_transformers": self.create_storage_transformers(shape), - "filters": kwargs.pop("filters", self.create_filters(kwargs.get("dtype", None))), + "filters": kwargs.pop("filters", self.create_filters(kwargs.get("dtype"))), } # keyword arguments for array instantiation From d0fb8758483dd811234300eea7a1f24909bb929f Mon Sep 17 00:00:00 2001 From: David Stansby Date: Wed, 3 Apr 2024 20:18:18 +0200 Subject: [PATCH 0470/1078] Fix tests with Pytest 8 (#1714) * Bump pytest version * Use editable install when testing --------- Co-authored-by: Sanket Verma --- .github/workflows/python-package.yml | 2 +- requirements_dev_minimal.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 2f9166ae96..a37fa3c63a 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -64,7 +64,7 @@ jobs: python -m pip install --upgrade pip python -m pip install -U pip setuptools wheel line_profiler python -m pip install -rrequirements_dev_minimal.txt numpy${{matrix.numpy_version}} -rrequirements_dev_optional.txt pymongo redis - python -m pip install . + python -m pip install -e . python -m pip freeze - name: Tests shell: "bash -l {0}" diff --git a/requirements_dev_minimal.txt b/requirements_dev_minimal.txt index 94d3fff8a6..5d156db655 100644 --- a/requirements_dev_minimal.txt +++ b/requirements_dev_minimal.txt @@ -5,4 +5,4 @@ numcodecs==0.12.1 msgpack-python==0.5.6 setuptools-scm==8.0.4 # test requirements -pytest==7.4.4 +pytest==8.1.1 From bcb7684b5a15d4ead21e305301581e02193e8677 Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Thu, 4 Apr 2024 16:17:08 -0600 Subject: [PATCH 0471/1078] Avoid redundant __contains__ (#1739) Let's try grabbing the array.json and group.json files, and check for `*NotFoundError`, instead of using contains first. Co-authored-by: Davis Bennett --- zarr/hierarchy.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index c5f7a37bc6..0067eaebb5 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -27,6 +27,7 @@ from zarr.errors import ( ContainsArrayError, ContainsGroupError, + ArrayNotFoundError, GroupNotFoundError, ReadOnlyError, ) @@ -457,7 +458,7 @@ def __getitem__(self, item): """ path = self._item_path(item) - if contains_array(self._store, path): + try: return Array( self._store, read_only=self._read_only, @@ -468,7 +469,10 @@ def __getitem__(self, item): zarr_version=self._version, meta_array=self._meta_array, ) - elif contains_group(self._store, path, explicit_only=True): + except ArrayNotFoundError: + pass + + try: return Group( self._store, read_only=self._read_only, @@ -479,7 +483,10 @@ def __getitem__(self, item): zarr_version=self._version, meta_array=self._meta_array, ) - elif self._version == 3: + except GroupNotFoundError: + pass + + if self._version == 3: implicit_group = meta_root + path + "/" # non-empty folder in the metadata path implies an implicit group if self._store.list_prefix(implicit_group): From 0cfd2be24feb7cd86002b9ab22cd65f65ed85e3a Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Thu, 4 Apr 2024 20:13:23 -0600 Subject: [PATCH 0472/1078] Array & Group: Use already loaded attributes to populate cache. (#1734) * Array: Use already loaded attributes to populate cache. * Group: Use already loaded attributes to populate cache. * Fix * Add release note --- docs/release.rst | 4 ++++ zarr/attrs.py | 6 ++++-- zarr/core.py | 7 ++++++- zarr/hierarchy.py | 7 ++++++- 4 files changed, 20 insertions(+), 4 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 736838cdaf..346d673d68 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -21,8 +21,12 @@ Unreleased Enhancements ~~~~~~~~~~~~ +* [v3] Reuse the download array metadata when creating an ``Array``. + By :user:`Deepak Cherian `. + * Optimize ``Array.info`` so that it calls `getsize` only once. By :user:`Deepak Cherian `. + * Override IPython ``_repr_*_`` methods to avoid expensive lookups against object stores. By :user:`Deepak Cherian ` :issue:`1716`. diff --git a/zarr/attrs.py b/zarr/attrs.py index e967c5b853..2afcaf295e 100644 --- a/zarr/attrs.py +++ b/zarr/attrs.py @@ -25,14 +25,16 @@ class Attributes(MutableMapping): """ - def __init__(self, store, key=".zattrs", read_only=False, cache=True, synchronizer=None): + def __init__( + self, store, key=".zattrs", read_only=False, cache=True, synchronizer=None, cached_dict=None + ): self._version = getattr(store, "_store_version", 2) _Store = Store if self._version == 2 else StoreV3 self.store = _Store._ensure_store(store) self.key = key self.read_only = read_only self.cache = cache - self._cached_asdict = None + self._cached_asdict = cached_dict if cache else None self.synchronizer = synchronizer def _get_nosync(self): diff --git a/zarr/core.py b/zarr/core.py index 07f38cd96d..1bd081acee 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -172,7 +172,12 @@ def __init__( # initialize attributes akey = _prefix_to_attrs_key(self._store, self._key_prefix) self._attrs = Attributes( - store, key=akey, read_only=read_only, synchronizer=synchronizer, cache=cache_attrs + store, + key=akey, + read_only=read_only, + synchronizer=synchronizer, + cache=cache_attrs, + cached_dict=self._meta["attributes"] if self._version == 3 else None, ) # initialize info reporter diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 0067eaebb5..0fb07dd620 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -208,7 +208,12 @@ def __init__( # object can still be created. akey = mkey self._attrs = Attributes( - store, key=akey, read_only=read_only, cache=cache_attrs, synchronizer=synchronizer + store, + key=akey, + read_only=read_only, + cache=cache_attrs, + synchronizer=synchronizer, + cached_dict=self._meta["attributes"] if self._version == 3 and self._meta else None, ) # setup info From 62910bcebd3a583cea79af2077fa1d1798845797 Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Thu, 4 Apr 2024 20:14:09 -0600 Subject: [PATCH 0473/1078] Optimize attribute setting (#1741) * Optimize attribute setting * Add release note --------- Co-authored-by: Davis Bennett --- docs/release.rst | 3 +++ zarr/attrs.py | 7 ++++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 346d673d68..b011b0986b 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -20,6 +20,9 @@ Unreleased Enhancements ~~~~~~~~~~~~ +* [v3] Dramatically reduce number of ``__contains_`` requests in favor of optimistically calling `__getitem__` + and handling any error that may arise. + By :user:`Deepak Cherian `. * [v3] Reuse the download array metadata when creating an ``Array``. By :user:`Deepak Cherian `. diff --git a/zarr/attrs.py b/zarr/attrs.py index 2afcaf295e..af9a5f1d30 100644 --- a/zarr/attrs.py +++ b/zarr/attrs.py @@ -151,19 +151,20 @@ def _put_nosync(self, d): if self.cache: self._cached_asdict = d else: - if self.key in self.store: + try: + meta_unparsed = self.store[self.key] # Cannot write the attributes directly to JSON, but have to # store it within the pre-existing attributes key of the v3 # metadata. # Note: this changes the store.counter result in test_caching_on! - meta = self.store._metadata_class.parse_metadata(self.store[self.key]) + meta = self.store._metadata_class.parse_metadata(meta_unparsed) if "attributes" in meta and "filters" in meta["attributes"]: # need to preserve any existing "filters" attribute d["attributes"]["filters"] = meta["attributes"]["filters"] meta["attributes"] = d["attributes"] - else: + except KeyError: meta = d self.store[self.key] = json_dumps(meta) if self.cache: From 5fde3a29ca0a9f1a005eb5000d1e8585d4ca0bde Mon Sep 17 00:00:00 2001 From: Ian Carroll Date: Fri, 5 Apr 2024 13:15:10 -0400 Subject: [PATCH 0474/1078] Make sure fs exceptions are raised if not MissingFs exceptions (clone) (#1604) * Make sure fs exceptions are raised if not Missing * lint * add missing argument in tests, lint * clear memory filesystem during test * improve commenting * add memory_store fixture, getitems performance * Update release.rst * improve FSStore.test_exception coverage --------- Co-authored-by: Martin Durant Co-authored-by: Joe Hamman Co-authored-by: Josh Moore Co-authored-by: Sanket Verma --- docs/release.rst | 3 +++ zarr/storage.py | 22 +++++++++++++++++----- zarr/tests/test_storage.py | 25 +++++++++++++++++++++++++ 3 files changed, 45 insertions(+), 5 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index b011b0986b..da802651c2 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -41,6 +41,9 @@ Maintenance * Bump minimum supported NumPy version to 1.23 (per spec 0000) By :user:`Joe Hamman ` :issue:`1719`. + +* FSStore now raises rather than return bad data. + By :user:`Martin Durant ` and :user:`Ian Carroll ` :issue:`1604`. .. _release_2.17.1: diff --git a/zarr/storage.py b/zarr/storage.py index f6903d29b2..10f55f0ba3 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -1417,11 +1417,23 @@ def _normalize_key(self, key): def getitems( self, keys: Sequence[str], *, contexts: Mapping[str, Context] ) -> Mapping[str, Any]: - keys_transformed = [self._normalize_key(key) for key in keys] - results = self.map.getitems(keys_transformed, on_error="omit") - # The function calling this method may not recognize the transformed keys - # So we send the values returned by self.map.getitems back into the original key space. - return {keys[keys_transformed.index(rk)]: rv for rk, rv in results.items()} + keys_transformed = {self._normalize_key(key): key for key in keys} + results_transformed = self.map.getitems(list(keys_transformed), on_error="return") + results = {} + for k, v in results_transformed.items(): + if isinstance(v, self.exceptions): + # Cause recognized exceptions to prompt a KeyError in the + # function calling this method + continue + elif isinstance(v, Exception): + # Raise any other exception + raise v + else: + # The function calling this method may not recognize the transformed + # keys, so we send the values returned by self.map.getitems back into + # the original key space. + results[keys_transformed[k]] = v + return results def __getitem__(self, key): key = self._normalize_key(key) diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index 358d043ad6..ae8a56fa61 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1098,6 +1098,12 @@ def mock_walker_no_slash(_path): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestFSStore(StoreTests): + @pytest.fixture + def memory_store(self): + store = FSStore("memory://") + yield store + store.fs.store.clear() + def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): if path is None: path = tempfile.mkdtemp() @@ -1337,6 +1343,25 @@ def test_s3_complex(self): ) assert (a[:] == -np.ones((8, 8, 8))).all() + def test_exceptions(self, memory_store): + fs = memory_store.fs + group = zarr.open(memory_store, mode="w") + x = group.create_dataset("x", data=[1, 2, 3]) + y = group.create_dataset("y", data=1) + fs.store["/x/0"] = None + fs.store["/y/0"] = None + # no exception from FSStore.getitems getting KeyError + assert group.store.getitems(["foo"], contexts={}) == {} + # exception from FSStore.getitems getting AttributeError + with pytest.raises(Exception): + group.store.getitems(["x/0"], contexts={}) + # exception from FSStore.getitems getting AttributeError + with pytest.raises(Exception): + x[...] + # exception from FSStore.__getitem__ getting AttributeError + with pytest.raises(Exception): + y[...] + @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestFSStoreWithKeySeparator(StoreTests): From 1631c109c655c71221eab6d964063abd015011f2 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Fri, 5 Apr 2024 13:53:33 -0700 Subject: [PATCH 0475/1078] chore(release): update changelog for 2.17.2 (#1775) --- docs/release.rst | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index da802651c2..60d01b8244 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -13,18 +13,18 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. -.. _unreleased: +.. _release_2.17.2: -Unreleased ----------- +2.17.2 +------ Enhancements ~~~~~~~~~~~~ -* [v3] Dramatically reduce number of ``__contains_`` requests in favor of optimistically calling `__getitem__` +* [v3] Dramatically reduce number of ``__contains__`` requests in favor of optimistically calling `__getitem__` and handling any error that may arise. By :user:`Deepak Cherian `. -* [v3] Reuse the download array metadata when creating an ``Array``. +* [v3] Reuse the downloaded array metadata when creating an ``Array``. By :user:`Deepak Cherian `. * Optimize ``Array.info`` so that it calls `getsize` only once. @@ -33,6 +33,9 @@ Enhancements * Override IPython ``_repr_*_`` methods to avoid expensive lookups against object stores. By :user:`Deepak Cherian ` :issue:`1716`. +* FSStore now raises rather than return bad data. + By :user:`Martin Durant ` and :user:`Ian Carroll ` :issue:`1604`. + Maintenance ~~~~~~~~~~~ @@ -42,8 +45,6 @@ Maintenance * Bump minimum supported NumPy version to 1.23 (per spec 0000) By :user:`Joe Hamman ` :issue:`1719`. -* FSStore now raises rather than return bad data. - By :user:`Martin Durant ` and :user:`Ian Carroll ` :issue:`1604`. .. _release_2.17.1: From 15a9747f2b41681d6cec403ca12edb9cecd6b3bb Mon Sep 17 00:00:00 2001 From: "Daniel Jahn (dahn)" Date: Sat, 6 Apr 2024 10:48:13 +0200 Subject: [PATCH 0476/1078] Resolve Mypy erorrs in `v3` branch (#1692) * refactor(v3): Using appropriate types * fix(v3): Typing fixes + minor code fixes * fix(v3): _sync_iter works with coroutines * docs(v3/store/core.py): clearer comment * fix(metadata.py): Use Any outside TYPE_CHECKING for Pydantic * fix(zarr/v3): correct zarr format + remove unused method * fix(v3/store/core.py): Potential suggestion on handling str store_like * refactor(zarr/v3): Add more typing * ci(.pre-commit-config.yaml): zarr v3 mypy checks turned on in pre-commit --- .pre-commit-config.yaml | 1 - src/zarr/v3/abc/metadata.py | 3 +- src/zarr/v3/array.py | 2 +- src/zarr/v3/chunk_grids.py | 2 +- src/zarr/v3/chunk_key_encodings.py | 6 ++-- src/zarr/v3/codecs/transpose.py | 8 +++--- src/zarr/v3/group.py | 44 ++++++++++++++++++------------ src/zarr/v3/metadata.py | 6 ++-- src/zarr/v3/store/core.py | 15 ++-------- src/zarr/v3/store/local.py | 2 +- src/zarr/v3/sync.py | 8 ++---- 11 files changed, 47 insertions(+), 50 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 79344604a5..10aff8b4c6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,6 @@ repos: hooks: - id: mypy files: src - exclude: ^src/zarr/v3 args: [] additional_dependencies: - types-redis diff --git a/src/zarr/v3/abc/metadata.py b/src/zarr/v3/abc/metadata.py index bdd2f86d59..4fcabf72a1 100644 --- a/src/zarr/v3/abc/metadata.py +++ b/src/zarr/v3/abc/metadata.py @@ -5,11 +5,12 @@ from typing import Dict from typing_extensions import Self -from dataclasses import fields +from dataclasses import fields, dataclass from zarr.v3.common import JSON +@dataclass(frozen=True) class Metadata: def to_dict(self) -> JSON: """ diff --git a/src/zarr/v3/array.py b/src/zarr/v3/array.py index 632f7d8ec7..c0a00a624e 100644 --- a/src/zarr/v3/array.py +++ b/src/zarr/v3/array.py @@ -182,7 +182,7 @@ def shape(self) -> ChunkCoords: @property def size(self) -> int: - return np.prod(self.metadata.shape) + return np.prod(self.metadata.shape).item() @property def dtype(self) -> np.dtype: diff --git a/src/zarr/v3/chunk_grids.py b/src/zarr/v3/chunk_grids.py index 6c48323798..b0a2a7bb36 100644 --- a/src/zarr/v3/chunk_grids.py +++ b/src/zarr/v3/chunk_grids.py @@ -20,7 +20,7 @@ class ChunkGrid(Metadata): @classmethod def from_dict(cls, data: Dict[str, JSON]) -> ChunkGrid: if isinstance(data, ChunkGrid): - return data # type: ignore + return data name_parsed, _ = parse_named_configuration(data) if name_parsed == "regular": diff --git a/src/zarr/v3/chunk_key_encodings.py b/src/zarr/v3/chunk_key_encodings.py index e4339240e3..9889a2f04a 100644 --- a/src/zarr/v3/chunk_key_encodings.py +++ b/src/zarr/v3/chunk_key_encodings.py @@ -1,6 +1,6 @@ from __future__ import annotations from abc import abstractmethod -from typing import TYPE_CHECKING, Dict, Literal +from typing import TYPE_CHECKING, Dict, Literal, cast from dataclasses import dataclass from zarr.v3.abc.metadata import Metadata @@ -19,7 +19,7 @@ def parse_separator(data: JSON) -> SeparatorLiteral: if data not in (".", "/"): raise ValueError(f"Expected an '.' or '/' separator. Got {data} instead.") - return data # type: ignore + return cast(SeparatorLiteral, data) @dataclass(frozen=True) @@ -35,7 +35,7 @@ def __init__(self, *, separator: SeparatorLiteral) -> None: @classmethod def from_dict(cls, data: Dict[str, JSON]) -> ChunkKeyEncoding: if isinstance(data, ChunkKeyEncoding): - return data # type: ignore + return data name_parsed, configuration_parsed = parse_named_configuration(data) if name_parsed == "default": diff --git a/src/zarr/v3/codecs/transpose.py b/src/zarr/v3/codecs/transpose.py index f214d1e7f1..b663230e35 100644 --- a/src/zarr/v3/codecs/transpose.py +++ b/src/zarr/v3/codecs/transpose.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Dict, Iterable +from typing import TYPE_CHECKING, Dict, Iterable, Union, cast from dataclasses import dataclass, replace @@ -16,12 +16,12 @@ from zarr.v3.codecs.registry import register_codec -def parse_transpose_order(data: JSON) -> Tuple[int]: +def parse_transpose_order(data: Union[JSON, Iterable[int]]) -> Tuple[int, ...]: if not isinstance(data, Iterable): raise TypeError(f"Expected an iterable. Got {data} instead.") if not all(isinstance(a, int) for a in data): raise TypeError(f"Expected an iterable of integers. Got {data} instead.") - return tuple(data) # type: ignore[return-value] + return tuple(cast(Iterable[int], data)) @dataclass(frozen=True) @@ -31,7 +31,7 @@ class TransposeCodec(ArrayArrayCodec): order: Tuple[int, ...] def __init__(self, *, order: ChunkCoordsLike) -> None: - order_parsed = parse_transpose_order(order) # type: ignore[arg-type] + order_parsed = parse_transpose_order(order) object.__setattr__(self, "order", order_parsed) diff --git a/src/zarr/v3/group.py b/src/zarr/v3/group.py index acd5ca0d62..0012a77a81 100644 --- a/src/zarr/v3/group.py +++ b/src/zarr/v3/group.py @@ -4,7 +4,7 @@ import asyncio import json import logging -from typing import Any, Dict, Literal, Optional, Union, AsyncIterator, Iterator, List +from typing import Any, Dict, Literal, Optional, Union, AsyncIterator, List from zarr.v3.abc.metadata import Metadata from zarr.v3.array import AsyncArray, Array @@ -46,11 +46,11 @@ def to_bytes(self) -> Dict[str, bytes]: return {ZARR_JSON: json.dumps(self.to_dict()).encode()} else: return { - ZGROUP_JSON: self.zarr_format, + ZGROUP_JSON: json.dumps({"zarr_format": 2}).encode(), ZATTRS_JSON: json.dumps(self.attributes).encode(), } - def __init__(self, attributes: Dict[str, Any] = None, zarr_format: Literal[2, 3] = 3): + def __init__(self, attributes: Optional[Dict[str, Any]] = None, zarr_format: Literal[2, 3] = 3): attributes_parsed = parse_attributes(attributes) zarr_format_parsed = parse_zarr_format(zarr_format) @@ -104,7 +104,7 @@ async def open( zarr_format: Literal[2, 3] = 3, ) -> AsyncGroup: store_path = make_store_path(store) - zarr_json_bytes = await (store_path / ZARR_JSON).get_async() + zarr_json_bytes = await (store_path / ZARR_JSON).get() assert zarr_json_bytes is not None # TODO: consider trying to autodiscover the zarr-format here @@ -139,7 +139,7 @@ def from_dict( store_path: StorePath, data: Dict[str, Any], runtime_configuration: RuntimeConfiguration, - ) -> Group: + ) -> AsyncGroup: group = cls( metadata=GroupMetadata.from_dict(data), store_path=store_path, @@ -168,10 +168,12 @@ async def getitem( zarr_json = json.loads(zarr_json_bytes) if zarr_json["node_type"] == "group": return type(self).from_dict(store_path, zarr_json, self.runtime_configuration) - if zarr_json["node_type"] == "array": + elif zarr_json["node_type"] == "array": return AsyncArray.from_dict( store_path, zarr_json, runtime_configuration=self.runtime_configuration ) + else: + raise ValueError(f"unexpected node_type: {zarr_json['node_type']}") elif self.metadata.zarr_format == 2: # Q: how do we like optimistically fetching .zgroup, .zarray, and .zattrs? # This guarantees that we will always make at least one extra request to the store @@ -271,7 +273,7 @@ def __repr__(self): async def nchildren(self) -> int: raise NotImplementedError - async def children(self) -> AsyncIterator[AsyncArray, AsyncGroup]: + async def children(self) -> AsyncIterator[Union[AsyncArray, AsyncGroup]]: raise NotImplementedError async def contains(self, child: str) -> bool: @@ -381,8 +383,12 @@ async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> Group new_metadata = replace(self.metadata, attributes=new_attributes) # Write new metadata - await (self.store_path / ZARR_JSON).set_async(new_metadata.to_bytes()) - return replace(self, metadata=new_metadata) + to_save = new_metadata.to_bytes() + awaitables = [(self.store_path / key).set(value) for key, value in to_save.items()] + await asyncio.gather(*awaitables) + + async_group = replace(self._async_group, metadata=new_metadata) + return replace(self, _async_group=async_group) @property def metadata(self) -> GroupMetadata: @@ -396,34 +402,38 @@ def attrs(self) -> Attributes: def info(self): return self._async_group.info + @property + def store_path(self) -> StorePath: + return self._async_group.store_path + def update_attributes(self, new_attributes: Dict[str, Any]): self._sync(self._async_group.update_attributes(new_attributes)) return self @property def nchildren(self) -> int: - return self._sync(self._async_group.nchildren) + return self._sync(self._async_group.nchildren()) @property - def children(self) -> List[Array, Group]: - _children = self._sync_iter(self._async_group.children) + def children(self) -> List[Union[Array, Group]]: + _children = self._sync_iter(self._async_group.children()) return [Array(obj) if isinstance(obj, AsyncArray) else Group(obj) for obj in _children] def __contains__(self, child) -> bool: return self._sync(self._async_group.contains(child)) - def group_keys(self) -> Iterator[str]: - return self._sync_iter(self._async_group.group_keys) + def group_keys(self) -> List[str]: + return self._sync_iter(self._async_group.group_keys()) def groups(self) -> List[Group]: # TODO: in v2 this was a generator that return key: Group - return [Group(obj) for obj in self._sync_iter(self._async_group.groups)] + return [Group(obj) for obj in self._sync_iter(self._async_group.groups())] def array_keys(self) -> List[str]: - return self._sync_iter(self._async_group.array_keys) + return self._sync_iter(self._async_group.array_keys()) def arrays(self) -> List[Array]: - return [Array(obj) for obj in self._sync_iter(self._async_group.arrays)] + return [Array(obj) for obj in self._sync_iter(self._async_group.arrays())] def tree(self, expand=False, level=None) -> Any: return self._sync(self._async_group.tree(expand=expand, level=level)) diff --git a/src/zarr/v3/metadata.py b/src/zarr/v3/metadata.py index de3055abdc..a5e8927311 100644 --- a/src/zarr/v3/metadata.py +++ b/src/zarr/v3/metadata.py @@ -1,6 +1,6 @@ from __future__ import annotations from enum import Enum -from typing import TYPE_CHECKING, cast, Dict, Iterable +from typing import TYPE_CHECKING, cast, Dict, Iterable, Any from dataclasses import dataclass, field import json import numpy as np @@ -10,7 +10,7 @@ if TYPE_CHECKING: - from typing import Any, Literal, Union, List, Optional, Tuple + from typing import Literal, Union, List, Optional, Tuple from zarr.v3.codecs.pipeline import CodecPipeline @@ -244,7 +244,7 @@ class ArrayV2Metadata(Metadata): filters: Optional[List[Dict[str, Any]]] = None dimension_separator: Literal[".", "/"] = "." compressor: Optional[Dict[str, Any]] = None - attributes: Optional[Dict[str, Any]] = field(default_factory=dict) + attributes: Optional[Dict[str, Any]] = cast(Dict[str, Any], field(default_factory=dict)) zarr_format: Literal[2] = field(init=False, default=2) def __init__( diff --git a/src/zarr/v3/store/core.py b/src/zarr/v3/store/core.py index 0ef1c8569e..16714d9e30 100644 --- a/src/zarr/v3/store/core.py +++ b/src/zarr/v3/store/core.py @@ -5,6 +5,7 @@ from zarr.v3.common import BytesLike from zarr.v3.abc.store import Store +from zarr.v3.store.local import LocalStore def _dereference_path(root: str, path: str) -> str: @@ -24,10 +25,6 @@ def __init__(self, store: Store, path: Optional[str] = None): self.store = store self.path = path or "" - @classmethod - def from_path(cls, pth: Path) -> StorePath: - return cls(Store.from_path(pth)) - async def get( self, byte_range: Optional[Tuple[int, Optional[int]]] = None ) -> Optional[BytesLike]: @@ -70,14 +67,6 @@ def make_store_path(store_like: StoreLike) -> StorePath: return store_like elif isinstance(store_like, Store): return StorePath(store_like) - # elif isinstance(store_like, Path): - # return StorePath(Store.from_path(store_like)) elif isinstance(store_like, str): - try: - from upath import UPath - - return StorePath(Store.from_path(UPath(store_like))) - except ImportError as e: - raise e - # return StorePath(LocalStore(Path(store_like))) + return StorePath(LocalStore(Path(store_like))) raise TypeError diff --git a/src/zarr/v3/store/local.py b/src/zarr/v3/store/local.py index a62eea20f7..c3da110450 100644 --- a/src/zarr/v3/store/local.py +++ b/src/zarr/v3/store/local.py @@ -146,7 +146,7 @@ async def list_prefix(self, prefix: str) -> List[str]: """ def _list_prefix(root: Path, prefix: str) -> List[str]: - files = [p for p in (root / prefix).rglob("*") if p.is_file()] + files = [str(p) for p in (root / prefix).rglob("*") if p.is_file()] return files return await to_thread(_list_prefix, self.root, prefix) diff --git a/src/zarr/v3/sync.py b/src/zarr/v3/sync.py index f0996c019e..fcc8e7b275 100644 --- a/src/zarr/v3/sync.py +++ b/src/zarr/v3/sync.py @@ -5,7 +5,6 @@ from typing import ( Any, AsyncIterator, - Callable, Coroutine, List, Optional, @@ -112,11 +111,10 @@ def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: # this should allow us to better type the sync wrapper return sync(coroutine, loop=self._sync_configuration.asyncio_loop) - def _sync_iter( - self, func: Callable[P, AsyncIterator[T]], *args: P.args, **kwargs: P.kwargs - ) -> List[T]: + def _sync_iter(self, coroutine: Coroutine[Any, Any, AsyncIterator[T]]) -> List[T]: async def iter_to_list() -> List[T]: # TODO: replace with generators so we don't materialize the entire iterator at once - return [item async for item in func(*args, **kwargs)] + async_iterator = await coroutine + return [item async for item in async_iterator] return self._sync(iter_to_list()) From 6105ef203e3e5c5390aa01db613bc42e8fb98c1a Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Sat, 6 Apr 2024 11:50:52 -0700 Subject: [PATCH 0477/1078] chore(docs): reset release notes as unreleased (#1776) --- docs/release.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 60d01b8244..75193bc3e3 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -13,6 +13,23 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. +.. _unreleased: + +Unreleased +---------- + +Enhancements +~~~~~~~~~~~~ + + +Docs +~~~~ + + +Maintenance +~~~~~~~~~~~ + + .. _release_2.17.2: 2.17.2 From 4e6cca2c08992b57326db5d519d3c380d0ea5e16 Mon Sep 17 00:00:00 2001 From: Max Jones <14077947+maxrjones@users.noreply.github.com> Date: Mon, 8 Apr 2024 12:29:26 -0400 Subject: [PATCH 0478/1078] Specify hatch envs using GitHub actions matrix for v3 tests (#1728) * Specify v3 hatch envs using GitHub actions matrix * Update .github/workflows/test-v3.yml Co-authored-by: Joe Hamman * Update .github/workflows/test-v3.yml Co-authored-by: Joe Hamman * test on 3.12 too * no 3.12 --------- Co-authored-by: Joe Hamman Co-authored-by: Joe Hamman --- .github/workflows/test-v3.yml | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-v3.yml b/.github/workflows/test-v3.yml index bdc6e99299..e767541c75 100644 --- a/.github/workflows/test-v3.yml +++ b/.github/workflows/test-v3.yml @@ -10,15 +10,22 @@ on: branches: [ v3 ] jobs: - run-tests: + test: + name: py=${{ matrix.python-version }}, np=${{ matrix.numpy-version }}, deps=${{ matrix.dependency-set }} runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.10', '3.11'] + numpy-version: ['1.24', '1.26'] + dependency-set: ["minimal", "optional"] + steps: - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: ${{ matrix.python-version }} cache: 'pip' - name: Install Hatch run: | @@ -29,8 +36,8 @@ jobs: hatch env create - name: Run Tests run: | - hatch run test:run + hatch env run --env test.py${{ matrix.python-version }}-${{ matrix.numpy-version }}-${{ matrix.dependency-set }} run - name: Run mypy continue-on-error: true run: | - hatch run test:run-mypy \ No newline at end of file + hatch run test:run-mypy From 77292b12d1e644f31f81a78973b09d4b6f2ed16c Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Mon, 8 Apr 2024 19:42:20 +0200 Subject: [PATCH 0479/1078] black -> ruff format + cleanup (#1639) * black -> ruff + cleanup * format * Preserve git blame * pre-commit fix --- .flake8 | 2 -- .git-blame-ignore-revs | 2 ++ .pre-commit-config.yaml | 17 ++++------- bench/compress_normal.py | 1 - pyproject.toml | 18 ++--------- src/zarr/_storage/absstore.py | 3 +- src/zarr/_storage/store.py | 1 - src/zarr/_storage/v3.py | 1 - src/zarr/attrs.py | 6 ---- src/zarr/convenience.py | 20 +++--------- src/zarr/core.py | 2 +- src/zarr/creation.py | 2 -- src/zarr/hierarchy.py | 46 ++++++++++++++-------------- src/zarr/indexing.py | 35 ++++----------------- src/zarr/meta.py | 1 - src/zarr/n5.py | 57 ----------------------------------- src/zarr/storage.py | 9 +----- src/zarr/util.py | 16 ++++------ src/zarr/v3/config.py | 1 - src/zarr/v3/group.py | 1 - src/zarr/v3/store/local.py | 3 +- src/zarr/v3/sync.py | 1 - tests/test_attrs.py | 6 ---- tests/test_convenience.py | 7 ----- tests/test_creation.py | 9 ------ tests/test_dim_separator.py | 1 - tests/test_filters.py | 12 -------- tests/test_group_v3.py | 2 -- tests/test_hierarchy.py | 3 -- tests/test_indexing.py | 35 --------------------- tests/test_info.py | 1 - tests/test_meta.py | 27 ++--------------- tests/test_storage.py | 1 - tests/test_sync.py | 2 -- tests/test_util.py | 2 -- tests/v3/test_metadata.py | 1 + 36 files changed, 58 insertions(+), 296 deletions(-) delete mode 100644 .flake8 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 7da1f9608e..0000000000 --- a/.flake8 +++ /dev/null @@ -1,2 +0,0 @@ -[flake8] -max-line-length = 100 diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 53bf4633f0..9e0316032f 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,2 +1,4 @@ # lint codebase with black and ruff 4e348d6b80c96da461fd866576c971b8a659ba15 +# migrate from black to ruff format +22cea005629913208a85799372e045f353744add diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 10aff8b4c6..d4aee4ce86 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,27 +7,22 @@ default_language_version: python: python3 repos: - repo: https://github.com/charliermarsh/ruff-pre-commit - # Ruff version. - rev: 'v0.0.224' + rev: 'v0.2.1' hooks: - id: ruff - # Respect `exclude` and `extend-exclude` settings. - args: ["--force-exclude"] - - repo: https://github.com/psf/black - rev: 22.12.0 - hooks: - - id: black + args: ["--fix", "--show-fixes"] + - id: ruff-format - repo: https://github.com/codespell-project/codespell - rev: v2.2.5 + rev: v2.2.6 hooks: - id: codespell args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo,zar", "-S", "fixture"] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.7.1 + rev: v1.8.0 hooks: - id: mypy files: src diff --git a/bench/compress_normal.py b/bench/compress_normal.py index 9f1655541c..803d54b76b 100644 --- a/bench/compress_normal.py +++ b/bench/compress_normal.py @@ -8,7 +8,6 @@ from zarr import blosc if __name__ == "__main__": - sys.path.insert(0, "..") # setup diff --git a/pyproject.toml b/pyproject.toml index 3933376b12..9f21a84aee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -127,7 +127,8 @@ serve = "sphinx-autobuild docs docs/_build --ignore 'docs/_autoapi/**/*' --host [tool.ruff] line-length = 100 -exclude = [ +force-exclude = true +extend-exclude = [ ".bzr", ".direnv", ".eggs", @@ -146,21 +147,6 @@ exclude = [ "docs" ] -[tool.black] -line-length = 100 -exclude = ''' -/( - \.git - | \.mypy_cache - | \.venv - | _build - | buck-out - | build - | dist - | docs -)/ -''' - [tool.mypy] python_version = "3.8" ignore_missing_imports = true diff --git a/src/zarr/_storage/absstore.py b/src/zarr/_storage/absstore.py index f62529f096..c9a113148c 100644 --- a/src/zarr/_storage/absstore.py +++ b/src/zarr/_storage/absstore.py @@ -87,7 +87,7 @@ def __init__( "https://{}.blob.core.windows.net/".format(account_name), container, credential=account_key, - **blob_service_kwargs + **blob_service_kwargs, ) self.client = client @@ -240,7 +240,6 @@ def __setitem__(self, key, value): super().__setitem__(key, value) def rmdir(self, path=None): - if not path: # Currently allowing clear to delete everything as in v2 diff --git a/src/zarr/_storage/store.py b/src/zarr/_storage/store.py index 8daedae48f..80e4ad8f75 100644 --- a/src/zarr/_storage/store.py +++ b/src/zarr/_storage/store.py @@ -629,7 +629,6 @@ def _rmdir_from_keys(store: StoreLike, path: Optional[str] = None) -> None: def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: - meta_dir = meta_root + path meta_dir = meta_dir.rstrip("/") _rmdir_from_keys(store, meta_dir) diff --git a/src/zarr/_storage/v3.py b/src/zarr/_storage/v3.py index 8ab54984b7..d3cbc58235 100644 --- a/src/zarr/_storage/v3.py +++ b/src/zarr/_storage/v3.py @@ -118,7 +118,6 @@ def _get_files_and_dirs_from_path(store, path): class FSStoreV3(FSStore, StoreV3): - # FSStoreV3 doesn't use this (FSStore uses it within _normalize_key) _META_KEYS = () diff --git a/src/zarr/attrs.py b/src/zarr/attrs.py index 01fc617b3c..e967c5b853 100644 --- a/src/zarr/attrs.py +++ b/src/zarr/attrs.py @@ -26,7 +26,6 @@ class Attributes(MutableMapping): """ def __init__(self, store, key=".zattrs", read_only=False, cache=True, synchronizer=None): - self._version = getattr(store, "_store_version", 2) _Store = Store if self._version == 2 else StoreV3 self.store = _Store._ensure_store(store) @@ -73,7 +72,6 @@ def __getitem__(self, item): return self.asdict()[item] def _write_op(self, f, *args, **kwargs): - # guard condition if self.read_only: raise PermissionError("attributes are read-only") @@ -89,7 +87,6 @@ def __setitem__(self, item, value): self._write_op(self._setitem_nosync, item, value) def _setitem_nosync(self, item, value): - # load existing data d = self._get_nosync() @@ -106,7 +103,6 @@ def __delitem__(self, item): self._write_op(self._delitem_nosync, item) def _delitem_nosync(self, key): - # load existing data d = self._get_nosync() @@ -128,7 +124,6 @@ def put(self, d): self._write_op(self._put_nosync, dict(attributes=d)) def _put_nosync(self, d): - d_to_check = d if self._version == 2 else d["attributes"] if not all(isinstance(item, str) for item in d_to_check): # TODO: Raise an error for non-string keys @@ -178,7 +173,6 @@ def update(self, *args, **kwargs): self._write_op(self._update_nosync, *args, **kwargs) def _update_nosync(self, *args, **kwargs): - # load existing data d = self._get_nosync() diff --git a/src/zarr/convenience.py b/src/zarr/convenience.py index 0ee8a8d323..9c0deeea47 100644 --- a/src/zarr/convenience.py +++ b/src/zarr/convenience.py @@ -675,10 +675,8 @@ def copy_store( # setup logging with _LogWriter(log) as log: - # iterate over source keys for source_key in sorted(source.keys()): - # filter to keys under source path if source_store_version == 2: if not source_key.startswith(source_path): @@ -757,7 +755,7 @@ def copy( log=None, if_exists="raise", dry_run=False, - **create_kws + **create_kws, ): """Copy the `source` array or group into the `dest` group. @@ -878,7 +876,6 @@ def copy( # setup logging with _LogWriter(log) as log: - # do the copying n_copied, n_skipped, n_bytes_copied = _copy( log, @@ -890,7 +887,7 @@ def copy( without_attrs=without_attrs, if_exists=if_exists, dry_run=dry_run, - **create_kws + **create_kws, ) # log a final message with a summary of what happened @@ -948,12 +945,10 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ # take action if do_copy: - # log a message about what we're going to do log("copy {} {} {}".format(source.name, source.shape, source.dtype)) if not dry_run: - # clear the way if exists: del dest[name] @@ -1038,12 +1033,10 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ # take action if do_copy: - # log action log("copy {}".format(source.name)) if not dry_run: - # clear the way if exists_array: del dest[name] @@ -1056,7 +1049,6 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ grp.attrs.update(source.attrs) else: - # setup for dry run without creating any groups in the # destination if dest is not None: @@ -1076,7 +1068,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ without_attrs=without_attrs, if_exists=if_exists, dry_run=dry_run, - **create_kws + **create_kws, ) n_copied += c n_skipped += s @@ -1099,7 +1091,7 @@ def copy_all( log=None, if_exists="raise", dry_run=False, - **create_kws + **create_kws, ): """Copy all children of the `source` group into the `dest` group. @@ -1189,7 +1181,6 @@ def copy_all( # setup logging with _LogWriter(log) as log: - for k in source.keys(): c, s, b = _copy( log, @@ -1201,7 +1192,7 @@ def copy_all( without_attrs=without_attrs, if_exists=if_exists, dry_run=dry_run, - **create_kws + **create_kws, ) n_copied += c n_skipped += s @@ -1262,7 +1253,6 @@ def is_zarr_key(key): return key.endswith(".zarray") or key.endswith(".zgroup") or key.endswith(".zattrs") else: - assert_zarr_v3_api_available() sfx = _get_metadata_suffix(store) # type: ignore diff --git a/src/zarr/core.py b/src/zarr/core.py index c07a31e95f..d22a9d79c3 100644 --- a/src/zarr/core.py +++ b/src/zarr/core.py @@ -2536,7 +2536,7 @@ def hexdigest(self, hashname="sha1"): checksum = binascii.hexlify(self.digest(hashname=hashname)) # This is a bytes object on Python 3 and we want a str. - if type(checksum) is not str: + if not isinstance(checksum, str): checksum = checksum.decode("utf8") return checksum diff --git a/src/zarr/creation.py b/src/zarr/creation.py index 726d0b5932..6227f90b7b 100644 --- a/src/zarr/creation.py +++ b/src/zarr/creation.py @@ -234,7 +234,6 @@ def create( def _kwargs_compat(compressor, fill_value, kwargs): - # to be compatible with h5py, as well as backwards-compatible with Zarr # 1.x, accept 'compression' and 'compression_opts' keyword arguments @@ -697,7 +696,6 @@ def open_array( def _like_args(a, kwargs): - shape, chunks = _get_shape_chunks(a) if shape is not None: kwargs.setdefault("shape", shape) diff --git a/src/zarr/hierarchy.py b/src/zarr/hierarchy.py index 3361969f08..1c9848e647 100644 --- a/src/zarr/hierarchy.py +++ b/src/zarr/hierarchy.py @@ -145,7 +145,7 @@ def __init__( synchronizer=None, zarr_version=None, *, - meta_array=None + meta_array=None, ): store: BaseStore = _normalize_store_arg(store, zarr_version=zarr_version) if zarr_version is None: @@ -591,7 +591,25 @@ def groups(self): for key in sorted(listdir(self._store, self._path)): path = self._key_prefix + key if contains_group(self._store, path, explicit_only=False): - yield key, Group( + yield ( + key, + Group( + self._store, + path=path, + read_only=self._read_only, + chunk_store=self._chunk_store, + cache_attrs=self.attrs.cache, + synchronizer=self._synchronizer, + zarr_version=self._version, + ), + ) + + else: + for key in self.group_keys(): + path = self._key_prefix + key + yield ( + key, + Group( self._store, path=path, read_only=self._read_only, @@ -599,19 +617,7 @@ def groups(self): cache_attrs=self.attrs.cache, synchronizer=self._synchronizer, zarr_version=self._version, - ) - - else: - for key in self.group_keys(): - path = self._key_prefix + key - yield key, Group( - self._store, - path=path, - read_only=self._read_only, - chunk_store=self._chunk_store, - cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, - zarr_version=self._version, + ), ) def array_keys(self, recurse=False): @@ -919,7 +925,6 @@ def tree(self, expand=False, level=None): return TreeViewer(self, expand=expand, level=level) def _write_op(self, f, *args, **kwargs): - # guard condition if self._read_only: raise ReadOnlyError() @@ -1094,7 +1099,6 @@ def create_dataset(self, name, **kwargs): return self._write_op(self._create_dataset_nosync, name, **kwargs) def _create_dataset_nosync(self, name, data=None, **kwargs): - assert "mode" not in kwargs path = self._item_path(name) @@ -1138,11 +1142,9 @@ def require_dataset(self, name, shape, dtype=None, exact=False, **kwargs): ) def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, **kwargs): - path = self._item_path(name) if contains_array(self._store, path): - # array already exists at path, validate that it is the right shape and type synchronizer = kwargs.get("synchronizer", self._synchronizer) @@ -1235,7 +1237,7 @@ def _full_nosync(self, name, fill_value, **kwargs): path=path, chunk_store=self._chunk_store, fill_value=fill_value, - **kwargs + **kwargs, ) def array(self, name, data, **kwargs): @@ -1361,7 +1363,7 @@ def group( path=None, *, zarr_version=None, - meta_array=None + meta_array=None, ): """Create a group. @@ -1452,7 +1454,7 @@ def open_group( storage_options=None, *, zarr_version=None, - meta_array=None + meta_array=None, ): """Open a group using file-mode-like semantics. diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 487cc8b9d9..b72d5a255d 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -111,7 +111,6 @@ def is_pure_orthogonal_indexing(selection, ndim): def normalize_integer_selection(dim_sel, dim_len): - # normalize type to int dim_sel = int(dim_sel) @@ -145,7 +144,6 @@ def normalize_integer_selection(dim_sel, dim_len): class IntDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): - # normalize dim_sel = normalize_integer_selection(dim_sel, dim_len) @@ -169,7 +167,6 @@ def ceildiv(a, b): class SliceDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): - # normalize self.start, self.stop, self.step = dim_sel.indices(dim_len) if self.step < 1: @@ -182,14 +179,12 @@ def __init__(self, dim_sel, dim_len, dim_chunk_len): self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) def __iter__(self): - # figure out the range of chunks we need to visit dim_chunk_ix_from = self.start // self.dim_chunk_len dim_chunk_ix_to = ceildiv(self.stop, self.dim_chunk_len) # iterate over chunks in range for dim_chunk_ix in range(dim_chunk_ix_from, dim_chunk_ix_to): - # compute offsets for chunk within overall array dim_offset = dim_chunk_ix * self.dim_chunk_len dim_limit = min(self.dim_len, (dim_chunk_ix + 1) * self.dim_chunk_len) @@ -237,7 +232,6 @@ def check_selection_length(selection, shape): def replace_ellipsis(selection, shape): - selection = ensure_tuple(selection) # count number of ellipsis present @@ -330,14 +324,12 @@ def is_basic_selection(selection): # noinspection PyProtectedMember class BasicIndexer: def __init__(self, selection, array): - # handle ellipsis selection = replace_ellipsis(selection, array._shape) # setup per-dimension indexers dim_indexers = [] for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): - if is_integer(dim_sel): dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) @@ -358,7 +350,6 @@ def __init__(self, selection, array): def __iter__(self): for dim_projections in itertools.product(*self.dim_indexers): - chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) out_selection = tuple( @@ -370,7 +361,6 @@ def __iter__(self): class BoolArrayDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): - # check number of dimensions if not is_bool_array(dim_sel, 1): raise IndexError( @@ -380,8 +370,9 @@ def __init__(self, dim_sel, dim_len, dim_chunk_len): # check shape if dim_sel.shape[0] != dim_len: raise IndexError( - "Boolean array has the wrong length for dimension; " - "expected {}, got {}".format(dim_len, dim_sel.shape[0]) + "Boolean array has the wrong length for dimension; " "expected {}, got {}".format( + dim_len, dim_sel.shape[0] + ) ) # store attributes @@ -402,10 +393,8 @@ def __init__(self, dim_sel, dim_len, dim_chunk_len): self.dim_chunk_ixs = np.nonzero(self.chunk_nitems)[0] def __iter__(self): - # iterate over chunks with at least one item for dim_chunk_ix in self.dim_chunk_ixs: - # find region in chunk dim_offset = dim_chunk_ix * self.dim_chunk_len dim_chunk_sel = self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] @@ -472,7 +461,6 @@ def __init__( boundscheck=True, order=Order.UNKNOWN, ): - # ensure 1d array dim_sel = np.asanyarray(dim_sel) if not is_integer_array(dim_sel, 1): @@ -526,9 +514,7 @@ def __init__( self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) def __iter__(self): - for dim_chunk_ix in self.dim_chunk_ixs: - # find region in output if dim_chunk_ix == 0: start = 0 @@ -602,7 +588,6 @@ def oindex_set(a, selection, value): # noinspection PyProtectedMember class OrthogonalIndexer: def __init__(self, selection, array): - # handle ellipsis selection = replace_ellipsis(selection, array._shape) @@ -612,7 +597,6 @@ def __init__(self, selection, array): # setup per-dimension indexers dim_indexers = [] for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): - if is_integer(dim_sel): dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) @@ -649,7 +633,6 @@ def __init__(self, selection, array): def __iter__(self): for dim_projections in itertools.product(*self.dim_indexers): - chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) out_selection = tuple( @@ -658,7 +641,6 @@ def __iter__(self): # handle advanced indexing arrays orthogonally if self.is_advanced: - # N.B., numpy doesn't support orthogonal indexing directly as yet, # so need to work around via np.ix_. Also np.ix_ does not support a # mixture of arrays and slices or integers, so need to convert slices @@ -692,7 +674,6 @@ def __setitem__(self, selection, value): # noinspection PyProtectedMember class BlockIndexer: def __init__(self, selection, array): - # handle ellipsis selection = replace_ellipsis(selection, array._shape) @@ -794,7 +775,6 @@ def is_mask_selection(selection, array): # noinspection PyProtectedMember class CoordinateIndexer: def __init__(self, selection, array): - # some initial normalization selection = ensure_tuple(selection) selection = tuple([i] if is_integer(i) else i for i in selection) @@ -810,7 +790,6 @@ def __init__(self, selection, array): # handle wraparound, boundscheck for dim_sel, dim_len in zip(selection, array.shape): - # handle wraparound wraparound_indices(dim_sel, dim_len) @@ -861,10 +840,8 @@ def __init__(self, selection, array): self.chunk_mixs = np.unravel_index(self.chunk_rixs, array._cdata_shape) def __iter__(self): - # iterate over chunks for i, chunk_rix in enumerate(self.chunk_rixs): - chunk_coords = tuple(m[i] for m in self.chunk_mixs) if chunk_rix == 0: start = 0 @@ -891,7 +868,6 @@ def __iter__(self): # noinspection PyProtectedMember class MaskIndexer(CoordinateIndexer): def __init__(self, selection, array): - # some initial normalization selection = ensure_tuple(selection) selection = replace_lists(selection) @@ -944,8 +920,9 @@ def check_fields(fields, dtype): # check type if not isinstance(fields, (str, list, tuple)): raise IndexError( - "'fields' argument must be a string or list of strings; found " - "{!r}".format(type(fields)) + "'fields' argument must be a string or list of strings; found " "{!r}".format( + type(fields) + ) ) if fields: if dtype.names is None: diff --git a/src/zarr/meta.py b/src/zarr/meta.py index bd1f4ee037..80f9017456 100644 --- a/src/zarr/meta.py +++ b/src/zarr/meta.py @@ -89,7 +89,6 @@ class Metadata2: @classmethod def parse_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: - # Here we allow that a store may return an already-parsed metadata object, # or a string of JSON that we will parse here. We allow for an already-parsed # object to accommodate a consolidated metadata store, where all the metadata for diff --git a/src/zarr/n5.py b/src/zarr/n5.py index 7e73905527..44b44e69e2 100644 --- a/src/zarr/n5.py +++ b/src/zarr/n5.py @@ -72,21 +72,18 @@ class N5Store(NestedDirectoryStore): def __getitem__(self, key: str) -> bytes: if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, n5_attrs_key) value = group_metadata_to_zarr(self._load_n5_attrs(key_new)) return json_dumps(value) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, n5_attrs_key) top_level = key == zarr_array_meta_key value = array_metadata_to_zarr(self._load_n5_attrs(key_new), top_level=top_level) return json_dumps(value) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, n5_attrs_key) value = attrs_to_zarr(self._load_n5_attrs(key_new)) @@ -104,9 +101,7 @@ def __getitem__(self, key: str) -> bytes: return super().__getitem__(key_new) def __setitem__(self, key: str, value: Any): - if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, n5_attrs_key) n5_attrs = self._load_n5_attrs(key_new) @@ -115,7 +110,6 @@ def __setitem__(self, key: str, value: Any): value = json_dumps(n5_attrs) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, n5_attrs_key) top_level = key == zarr_array_meta_key n5_attrs = self._load_n5_attrs(key_new) @@ -123,7 +117,6 @@ def __setitem__(self, key: str, value: Any): value = json_dumps(n5_attrs) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, n5_attrs_key) n5_attrs = self._load_n5_attrs(key_new) @@ -166,9 +159,7 @@ def __delitem__(self, key: str): super().__delitem__(key_new) def __contains__(self, key): - if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, n5_attrs_key) if key_new not in self: return False @@ -176,18 +167,15 @@ def __contains__(self, key): return "dimensions" not in self._load_n5_attrs(key_new) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, n5_attrs_key) # array if attributes contain 'dimensions' return "dimensions" in self._load_n5_attrs(key_new) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, n5_attrs_key) return self._contains_attrs(key_new) elif is_chunk_key(key): - key_new = invert_chunk_coords(key) else: key_new = key @@ -198,7 +186,6 @@ def __eq__(self, other): return isinstance(other, N5Store) and self.path == other.path def listdir(self, path: Optional[str] = None): - if path is not None: path = invert_chunk_coords(path) path = cast(str, path) @@ -208,7 +195,6 @@ def listdir(self, path: Optional[str] = None): children = super().listdir(path=path) if self._is_array(path): - # replace n5 attribute file with respective zarr attribute files children.remove(n5_attrs_key) children.append(zarr_array_meta_key) @@ -234,7 +220,6 @@ def listdir(self, path: Optional[str] = None): return sorted(new_children) elif self._is_group(path): - # replace n5 attribute file with respective zarr attribute files children.remove(n5_attrs_key) children.append(zarr_group_meta_key) @@ -244,7 +229,6 @@ def listdir(self, path: Optional[str] = None): return sorted(children) else: - return children def _load_n5_attrs(self, path: str) -> Dict[str, Any]: @@ -255,7 +239,6 @@ def _load_n5_attrs(self, path: str) -> Dict[str, Any]: return {} def _is_group(self, path: str): - if path is None: attrs_key = n5_attrs_key else: @@ -265,7 +248,6 @@ def _is_group(self, path: str): return len(n5_attrs) > 0 and "dimensions" not in n5_attrs def _is_array(self, path: str): - if path is None: attrs_key = n5_attrs_key else: @@ -274,7 +256,6 @@ def _is_array(self, path: str): return "dimensions" in self._load_n5_attrs(attrs_key) def _contains_attrs(self, path: str): - if path is None: attrs_key = n5_attrs_key else: @@ -376,21 +357,18 @@ def _normalize_key(self, key: str): def __getitem__(self, key: str) -> bytes: if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, self._group_meta_key) value = group_metadata_to_zarr(self._load_n5_attrs(key_new)) return json_dumps(value) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, self._array_meta_key) top_level = key == zarr_array_meta_key value = array_metadata_to_zarr(self._load_n5_attrs(key_new), top_level=top_level) return json_dumps(value) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, self._attrs_key) value = attrs_to_zarr(self._load_n5_attrs(key_new)) @@ -409,7 +387,6 @@ def __getitem__(self, key: str) -> bytes: def __setitem__(self, key: str, value: Any): if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, self._group_meta_key) n5_attrs = self._load_n5_attrs(key_new) @@ -418,7 +395,6 @@ def __setitem__(self, key: str, value: Any): value = json_dumps(n5_attrs) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, self._array_meta_key) top_level = key == zarr_array_meta_key n5_attrs = self._load_n5_attrs(key_new) @@ -427,7 +403,6 @@ def __setitem__(self, key: str, value: Any): value = json_dumps(n5_attrs) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, self._attrs_key) n5_attrs = self._load_n5_attrs(key_new) @@ -456,7 +431,6 @@ def __setitem__(self, key: str, value: Any): super().__setitem__(key_new, value) def __delitem__(self, key: str): - if key.endswith(zarr_group_meta_key): key_new = key.replace(zarr_group_meta_key, self._group_meta_key) elif key.endswith(zarr_array_meta_key): @@ -471,7 +445,6 @@ def __delitem__(self, key: str): def __contains__(self, key: Any): if key.endswith(zarr_group_meta_key): - key_new = key.replace(zarr_group_meta_key, self._group_meta_key) if key_new not in self: return False @@ -479,13 +452,11 @@ def __contains__(self, key: Any): return "dimensions" not in self._load_n5_attrs(key_new) elif key.endswith(zarr_array_meta_key): - key_new = key.replace(zarr_array_meta_key, self._array_meta_key) # array if attributes contain 'dimensions' return "dimensions" in self._load_n5_attrs(key_new) elif key.endswith(zarr_attrs_key): - key_new = key.replace(zarr_attrs_key, self._attrs_key) return self._contains_attrs(key_new) @@ -508,7 +479,6 @@ def listdir(self, path: Optional[str] = None): # doesn't provide. children = super().listdir(path=path) if self._is_array(path): - # replace n5 attribute file with respective zarr attribute files children.remove(self._array_meta_key) children.append(zarr_array_meta_key) @@ -532,7 +502,6 @@ def listdir(self, path: Optional[str] = None): return sorted(new_children) elif self._is_group(path): - # replace n5 attribute file with respective zarr attribute files children.remove(self._group_meta_key) children.append(zarr_group_meta_key) @@ -550,7 +519,6 @@ def _load_n5_attrs(self, path: str): return {} def _is_group(self, path: Optional[str]): - if path is None: attrs_key = self._attrs_key else: @@ -560,7 +528,6 @@ def _is_group(self, path: Optional[str]): return len(n5_attrs) > 0 and "dimensions" not in n5_attrs def _is_array(self, path: Optional[str]): - if path is None: attrs_key = self._attrs_key else: @@ -569,7 +536,6 @@ def _is_array(self, path: Optional[str]): return "dimensions" in self._load_n5_attrs(attrs_key) def _contains_attrs(self, path: Optional[str]): - if path is None: attrs_key = self._attrs_key else: @@ -712,7 +678,6 @@ def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]: def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict[str, Any]: - if compressor_config is None: return {"type": "raw"} else: @@ -726,19 +691,16 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict n5_config = {"type": codec_id} if codec_id == "bz2": - n5_config["type"] = "bzip2" n5_config["blockSize"] = _compressor_config["level"] elif codec_id == "blosc": - n5_config["cname"] = _compressor_config["cname"] n5_config["clevel"] = _compressor_config["clevel"] n5_config["shuffle"] = _compressor_config["shuffle"] n5_config["blocksize"] = _compressor_config["blocksize"] elif codec_id == "lzma": - # Switch to XZ for N5 if we are using the default XZ format. # Note: 4 is the default, which is lzma.CHECK_CRC64. if _compressor_config["format"] == 1 and _compressor_config["check"] in [-1, 4]: @@ -760,50 +722,42 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict n5_config["preset"] = 6 elif codec_id == "zlib": - n5_config["type"] = "gzip" n5_config["level"] = _compressor_config["level"] n5_config["useZlib"] = True elif codec_id == "gzip": - n5_config["type"] = "gzip" n5_config["level"] = _compressor_config["level"] n5_config["useZlib"] = False else: - n5_config.update({k: v for k, v in _compressor_config.items() if k != "type"}) return n5_config def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dict[str, Any]]: - codec_id = compressor_config["type"] zarr_config = {"id": codec_id} if codec_id == "bzip2": - zarr_config["id"] = "bz2" zarr_config["level"] = compressor_config["blockSize"] elif codec_id == "blosc": - zarr_config["cname"] = compressor_config["cname"] zarr_config["clevel"] = compressor_config["clevel"] zarr_config["shuffle"] = compressor_config["shuffle"] zarr_config["blocksize"] = compressor_config["blocksize"] elif codec_id == "lzma": - zarr_config["format"] = compressor_config["format"] zarr_config["check"] = compressor_config["check"] zarr_config["preset"] = compressor_config["preset"] zarr_config["filters"] = compressor_config["filters"] elif codec_id == "xz": - zarr_config["id"] = "lzma" zarr_config["format"] = 1 # lzma.FORMAT_XZ zarr_config["check"] = -1 @@ -811,7 +765,6 @@ def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dic zarr_config["filters"] = None elif codec_id == "gzip": - if "useZlib" in compressor_config and compressor_config["useZlib"]: zarr_config["id"] = "zlib" zarr_config["level"] = compressor_config["level"] @@ -820,22 +773,18 @@ def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dic zarr_config["level"] = compressor_config["level"] elif codec_id == "raw": - return None else: - zarr_config.update({k: v for k, v in compressor_config.items() if k != "type"}) return zarr_config class N5ChunkWrapper(Codec): - codec_id = "n5_wrapper" def __init__(self, dtype, chunk_shape, compressor_config=None, compressor=None): - self.dtype = np.dtype(dtype) self.chunk_shape = tuple(chunk_shape) # is the dtype a little endian format? @@ -860,7 +809,6 @@ def get_config(self): return config def encode(self, chunk): - assert chunk.flags.c_contiguous header = self._create_header(chunk) @@ -872,12 +820,10 @@ def encode(self, chunk): return header + chunk.tobytes(order="A") def decode(self, chunk, out=None) -> bytes: - len_header, chunk_shape = self._read_header(chunk) chunk = chunk[len_header:] if out is not None: - # out should only be used if we read a complete chunk assert chunk_shape == self.chunk_shape, "Expected chunk of shape {}, found {}".format( self.chunk_shape, chunk_shape @@ -895,7 +841,6 @@ def decode(self, chunk, out=None) -> bytes: return out else: - if self._compressor: chunk = self._compressor.decode(chunk) @@ -915,7 +860,6 @@ def decode(self, chunk, out=None) -> bytes: @staticmethod def _create_header(chunk): - mode = struct.pack(">H", 0) num_dims = struct.pack(">H", len(chunk.shape)) shape = b"".join(struct.pack(">I", d) for d in chunk.shape[::-1]) @@ -924,7 +868,6 @@ def _create_header(chunk): @staticmethod def _read_header(chunk): - num_dims = struct.unpack(">H", chunk[2:4])[0] shape = tuple( struct.unpack(">I", chunk[i : i + 4])[0] for i in range(4, num_dims * 4 + 4, 4) diff --git a/src/zarr/storage.py b/src/zarr/storage.py index b36f804ebd..e7bd0c4cf4 100644 --- a/src/zarr/storage.py +++ b/src/zarr/storage.py @@ -482,7 +482,6 @@ def _init_array_metadata( dimension_separator=None, storage_transformers=(), ): - store_version = getattr(store, "_store_version", 2) path = normalize_storage_path(path) @@ -687,7 +686,6 @@ def _init_group_metadata( path: Optional[str] = None, chunk_store: Optional[StoreLike] = None, ): - store_version = getattr(store, "_store_version", 2) path = normalize_storage_path(path) @@ -1055,7 +1053,6 @@ class DirectoryStore(Store): """ def __init__(self, path, normalize_keys=False, dimension_separator=None): - # guard conditions path = os.path.abspath(path) if os.path.exists(path) and not os.path.isdir(path): @@ -1415,7 +1412,6 @@ def _normalize_key(self, key): def getitems( self, keys: Sequence[str], *, contexts: Mapping[str, Context] ) -> Mapping[str, Any]: - keys_transformed = [self._normalize_key(key) for key in keys] results = self.map.getitems(keys_transformed, on_error="omit") # The function calling this method may not recognize the transformed keys @@ -1768,7 +1764,6 @@ def __init__( mode="a", dimension_separator=None, ): - # store properties path = os.path.abspath(path) self.path = path @@ -2707,9 +2702,7 @@ def listdir(self, path=None): SELECT LTRIM(SUBSTR(k, LENGTH(?) + 1), "/") || "/" AS m FROM zarr WHERE k LIKE (? || "{sep}%") ) ORDER BY l ASC - """.format( - sep=sep - ), + """.format(sep=sep), (path, path), ) keys = list(map(operator.itemgetter(0), keys)) diff --git a/src/zarr/util.py b/src/zarr/util.py index ea0dd9fcec..270a444524 100644 --- a/src/zarr/util.py +++ b/src/zarr/util.py @@ -180,7 +180,6 @@ def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tupl def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype, Any]: - # convenience API for object arrays if inspect.isclass(dtype): dtype = dtype.__name__ # type: ignore @@ -245,7 +244,6 @@ def is_total_slice(item, shape: Tuple[int]) -> bool: def normalize_resize_args(old_shape, *args): - # normalize new shape argument if len(args) == 1: new_shape = args[0] @@ -294,7 +292,6 @@ def normalize_dimension_separator(sep: Optional[str]) -> Optional[str]: def normalize_fill_value(fill_value, dtype: np.dtype): - if fill_value is None or dtype.hasobject: # no fill value pass @@ -309,8 +306,9 @@ def normalize_fill_value(fill_value, dtype: np.dtype): if not isinstance(fill_value, str): raise ValueError( - "fill_value {!r} is not valid for dtype {}; must be a " - "unicode string".format(fill_value, dtype) + "fill_value {!r} is not valid for dtype {}; must be a " "unicode string".format( + fill_value, dtype + ) ) else: @@ -324,15 +322,15 @@ def normalize_fill_value(fill_value, dtype: np.dtype): except Exception as e: # re-raise with our own error message to be helpful raise ValueError( - "fill_value {!r} is not valid for dtype {}; nested " - "exception: {}".format(fill_value, dtype, e) + "fill_value {!r} is not valid for dtype {}; nested " "exception: {}".format( + fill_value, dtype, e + ) ) return fill_value def normalize_storage_path(path: Union[str, bytes, None]) -> str: - # handle bytes if isinstance(path, bytes): path = str(path, "ascii") @@ -342,7 +340,6 @@ def normalize_storage_path(path: Union[str, bytes, None]) -> str: path = str(path) if path: - # convert backslash to forward slash path = path.replace("\\", "/") @@ -506,7 +503,6 @@ def tree_widget(group, expand, level): class TreeViewer: def __init__(self, group, expand=False, level=None): - self.group = group self.expand = expand self.level = level diff --git a/src/zarr/v3/config.py b/src/zarr/v3/config.py index 98a25994c4..cebe5c1b09 100644 --- a/src/zarr/v3/config.py +++ b/src/zarr/v3/config.py @@ -43,7 +43,6 @@ def __init__( concurrency: Optional[int] = None, asyncio_loop: Optional[AbstractEventLoop] = None, ): - order_parsed = parse_indexing_order(order) concurrency_parsed = parse_concurrency(concurrency) asyncio_loop_parsed = parse_asyncio_loop(asyncio_loop) diff --git a/src/zarr/v3/group.py b/src/zarr/v3/group.py index 0012a77a81..fcd2fea215 100644 --- a/src/zarr/v3/group.py +++ b/src/zarr/v3/group.py @@ -151,7 +151,6 @@ async def getitem( self, key: str, ) -> Union[AsyncArray, AsyncGroup]: - store_path = self.store_path / key if self.metadata.zarr_format == 3: diff --git a/src/zarr/v3/store/local.py b/src/zarr/v3/store/local.py index c3da110450..5d22b30e9a 100644 --- a/src/zarr/v3/store/local.py +++ b/src/zarr/v3/store/local.py @@ -46,7 +46,6 @@ def _put( class LocalStore(Store): - supports_writes: bool = True supports_partial_writes: bool = True supports_listing: bool = True @@ -126,6 +125,7 @@ async def list(self) -> List[str]: ------- list[str] """ + # Q: do we want to return strings or Paths? def _list(root: Path) -> List[str]: files = [str(p) for p in root.rglob("") if p.is_file()] @@ -166,7 +166,6 @@ async def list_dir(self, prefix: str) -> List[str]: """ def _list_dir(root: Path, prefix: str) -> List[str]: - base = root / prefix to_strip = str(base) + "/" try: diff --git a/src/zarr/v3/sync.py b/src/zarr/v3/sync.py index fcc8e7b275..2e94a815cc 100644 --- a/src/zarr/v3/sync.py +++ b/src/zarr/v3/sync.py @@ -103,7 +103,6 @@ def _get_loop(): class SyncMixin: - _sync_configuration: SyncConfiguration def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: diff --git a/tests/test_attrs.py b/tests/test_attrs.py index a5ce4bac89..7e3377f664 100644 --- a/tests/test_attrs.py +++ b/tests/test_attrs.py @@ -30,7 +30,6 @@ def init_attributes(self, store, read_only=False, cache=True, zarr_version=2): return Attributes(store, key=root + "attrs", read_only=read_only, cache=cache) def test_storage(self, zarr_version): - store = _init_store(zarr_version) root = ".z" if zarr_version == 2 else meta_root attrs_key = root + "attrs" @@ -50,7 +49,6 @@ def test_storage(self, zarr_version): assert dict(foo="bar", baz=42) == d def test_utf8_encoding(self, zarr_version): - project_root = pathlib.Path(zarr.__file__).resolve().parent.parent fixdir = project_root / "fixture" testdir = fixdir / "utf8attrs" @@ -67,7 +65,6 @@ def test_utf8_encoding(self, zarr_version): assert fixture["utf8attrs"].attrs.asdict() == dict(foo="た") def test_get_set_del_contains(self, zarr_version): - store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) assert "foo" not in a @@ -84,7 +81,6 @@ def test_get_set_del_contains(self, zarr_version): a["foo"] def test_update_put(self, zarr_version): - store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) assert "foo" not in a @@ -102,7 +98,6 @@ def test_update_put(self, zarr_version): assert "baz" not in a def test_iterators(self, zarr_version): - store = _init_store(zarr_version) a = self.init_attributes(store, zarr_version=zarr_version) assert 0 == len(a) @@ -232,7 +227,6 @@ def test_caching_on(self, zarr_version): assert get_cnt == store.counter["__getitem__", attrs_key] def test_caching_off(self, zarr_version): - # setup store store = CountingDict() if zarr_version == 2 else CountingDictV3() attrs_key = ".zattrs" if zarr_version == 2 else "meta/root/attrs" diff --git a/tests/test_convenience.py b/tests/test_convenience.py index 0970a9e1aa..7cb4db7a35 100644 --- a/tests/test_convenience.py +++ b/tests/test_convenience.py @@ -57,7 +57,6 @@ def _init_creation_kwargs(zarr_version): @pytest.mark.parametrize("zarr_version", _VERSIONS) def test_open_array(path_type, zarr_version): - store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) store = path_type(store) @@ -86,7 +85,6 @@ def test_open_array(path_type, zarr_version): @pytest.mark.parametrize("zarr_version", _VERSIONS) def test_open_group(path_type, zarr_version): - store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) store = path_type(store) @@ -210,7 +208,6 @@ def test_tree(zarr_version): def test_consolidate_metadata( with_chunk_store, zarr_version, listable, monkeypatch, stores_from_path ): - # setup initial data if stores_from_path: store = tempfile.mkdtemp() @@ -399,7 +396,6 @@ def test_save_array_separator(tmpdir, options): class TestCopyStore(unittest.TestCase): - _version = 2 def setUp(self): @@ -536,7 +532,6 @@ def test_if_exists(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestCopyStoreV3(TestCopyStore): - _version = 3 def setUp(self): @@ -557,7 +552,6 @@ def test_mismatched_store_versions(self): def check_copied_array(original, copied, without_attrs=False, expect_props=None): - # setup source_h5py = original.__module__.startswith("h5py.") dest_h5py = copied.__module__.startswith("h5py.") @@ -621,7 +615,6 @@ def check_copied_array(original, copied, without_attrs=False, expect_props=None) def check_copied_group(original, copied, without_attrs=False, expect_props=None, shallow=False): - # setup if expect_props is None: expect_props = dict() diff --git a/tests/test_creation.py b/tests/test_creation.py index 9307b81b52..27ce00bc8a 100644 --- a/tests/test_creation.py +++ b/tests/test_creation.py @@ -74,7 +74,6 @@ def _init_creation_kwargs(zarr_version, at_root=True): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_array(zarr_version, at_root): - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version kwargs = _init_creation_kwargs(zarr_version, at_root) @@ -213,7 +212,6 @@ def test_full_additional_dtypes(zarr_version): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_open_array(zarr_version, at_root, dimension_separator): - store = "data/array.zarr" kwargs = _init_creation_kwargs(zarr_version, at_root) @@ -329,7 +327,6 @@ def test_open_array(zarr_version, at_root, dimension_separator): def test_open_array_none(): - # open with both store and zarr_version = None z = open_array(mode="w", shape=100, chunks=10) assert isinstance(z, Array) @@ -339,7 +336,6 @@ def test_open_array_none(): @pytest.mark.parametrize("dimension_separator", [".", "/", None]) @pytest.mark.parametrize("zarr_version", _VERSIONS2) def test_open_array_infer_separator_from_store(zarr_version, dimension_separator): - if zarr_version == 3: StoreClass = DirectoryStoreV3 path = "data" @@ -370,7 +366,6 @@ def test_open_array_infer_separator_from_store(zarr_version, dimension_separator # TODO: N5 support for v3 @pytest.mark.parametrize("zarr_version", [None, 2]) def test_open_array_n5(zarr_version): - store = "data/array.zarr" kwargs = _init_creation_kwargs(zarr_version) @@ -409,7 +404,6 @@ def test_open_array_n5(zarr_version): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_open_array_dict_store(zarr_version, at_root): - # dict will become a KVStore store = dict() kwargs = _init_creation_kwargs(zarr_version, at_root) @@ -503,7 +497,6 @@ def test_empty_like(zarr_version, at_root): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_zeros_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version @@ -529,7 +522,6 @@ def test_zeros_like(zarr_version, at_root): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_ones_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version @@ -556,7 +548,6 @@ def test_ones_like(zarr_version, at_root): @pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) def test_full_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version diff --git a/tests/test_dim_separator.py b/tests/test_dim_separator.py index 83f4d3b5b9..4276d1829d 100644 --- a/tests/test_dim_separator.py +++ b/tests/test_dim_separator.py @@ -46,7 +46,6 @@ def dataset(tmpdir, request): static = project_root / "fixture" / suffix if not static.exists(): # pragma: no cover - if "nested" in which: # No way to reproduce the nested_legacy file via code generator = NestedDirectoryStore diff --git a/tests/test_filters.py b/tests/test_filters.py index d55be9145f..fc63cdca8d 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -30,7 +30,6 @@ def test_array_with_delta_filter(): - # setup astype = "u1" dtype = "i8" @@ -38,7 +37,6 @@ def test_array_with_delta_filter(): data = np.arange(100, dtype=dtype) for compressor in compressors: - a = array(data, chunks=10, compressor=compressor, filters=filters) # check round-trip @@ -57,7 +55,6 @@ def test_array_with_delta_filter(): def test_array_with_astype_filter(): - # setup encode_dtype = "i1" decode_dtype = "i8" @@ -68,7 +65,6 @@ def test_array_with_astype_filter(): data = np.arange(shape, dtype=decode_dtype) for compressor in compressors: - a = array(data, chunks=chunks, compressor=compressor, filters=filters) # check round-trip @@ -88,7 +84,6 @@ def test_array_with_astype_filter(): def test_array_with_scaleoffset_filter(): - # setup astype = "u1" dtype = "f8" @@ -97,7 +92,6 @@ def test_array_with_scaleoffset_filter(): data = np.linspace(1000, 1001, 34, dtype="f8") for compressor in compressors: - a = array(data, chunks=5, compressor=compressor, filters=filters) # check round-trip @@ -116,7 +110,6 @@ def test_array_with_scaleoffset_filter(): def test_array_with_quantize_filter(): - # setup dtype = "f8" digits = 3 @@ -125,7 +118,6 @@ def test_array_with_quantize_filter(): data = np.linspace(0, 1, 34, dtype=dtype) for compressor in compressors: - a = array(data, chunks=5, compressor=compressor, filters=filters) # check round-trip @@ -144,14 +136,12 @@ def test_array_with_quantize_filter(): def test_array_with_packbits_filter(): - # setup flt = PackBits() filters = [flt] data = np.random.randint(0, 2, size=100, dtype=bool) for compressor in compressors: - a = array(data, chunks=5, compressor=compressor, filters=filters) # check round-trip @@ -170,14 +160,12 @@ def test_array_with_packbits_filter(): def test_array_with_categorize_filter(): - # setup data = np.random.choice(["foo", "bar", "baz"], size=100) flt = Categorize(dtype=data.dtype, labels=["foo", "bar", "baz"]) filters = [flt] for compressor in compressors: - a = array(data, chunks=5, compressor=compressor, filters=filters) # check round-trip diff --git a/tests/test_group_v3.py b/tests/test_group_v3.py index 1498d6779b..f5b5dde86d 100644 --- a/tests/test_group_v3.py +++ b/tests/test_group_v3.py @@ -14,7 +14,6 @@ def store_path(tmpdir): def test_group(store_path) -> None: - agroup = AsyncGroup( metadata=GroupMetadata(), store_path=store_path, @@ -57,7 +56,6 @@ def test_group(store_path) -> None: def test_group_sync_constructor(store_path) -> None: - group = Group.create( store=store_path, attributes={"title": "test 123"}, diff --git a/tests/test_hierarchy.py b/tests/test_hierarchy.py index 3eaa4743dd..6d4b1ff54c 100644 --- a/tests/test_hierarchy.py +++ b/tests/test_hierarchy.py @@ -1085,7 +1085,6 @@ def test_paths(self): g1.store.close() def test_pickle(self): - # setup group g = self.create_group() d = g.create_dataset("foo/bar", shape=100, chunks=10) @@ -1113,7 +1112,6 @@ def test_pickle(self): g2.store.close() def test_context_manager(self): - with self.create_group() as g: d = g.create_dataset("foo/bar", shape=100, chunks=10) d[:] = np.arange(100) @@ -1375,7 +1373,6 @@ def create_store(): return store, None def test_context_manager(self): - with self.create_group() as g: store = g.store d = g.create_dataset("foo/bar", shape=100, chunks=10) diff --git a/tests/test_indexing.py b/tests/test_indexing.py index 1835206819..d441f3b8fa 100644 --- a/tests/test_indexing.py +++ b/tests/test_indexing.py @@ -17,7 +17,6 @@ def test_normalize_integer_selection(): - assert 1 == normalize_integer_selection(1, 100) assert 99 == normalize_integer_selection(-1, 100) with pytest.raises(IndexError): @@ -29,7 +28,6 @@ def test_normalize_integer_selection(): def test_replace_ellipsis(): - # 1D, single item assert (0,) == replace_ellipsis(0, (100,)) @@ -68,7 +66,6 @@ def test_replace_ellipsis(): def test_get_basic_selection_0d(): - # setup a = np.array(42) z = zarr.create(shape=a.shape, dtype=a.dtype, fill_value=None) @@ -191,7 +188,6 @@ def _test_get_basic_selection(a, z, selection): # noinspection PyStatementEffect def test_get_basic_selection_1d(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -264,7 +260,6 @@ def test_get_basic_selection_1d(): # noinspection PyStatementEffect def test_get_basic_selection_2d(): - # setup a = np.arange(10000, dtype=int).reshape(1000, 10) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) @@ -423,7 +418,6 @@ def test_fancy_indexing_doesnt_mix_with_implicit_slicing(): def test_set_basic_selection_0d(): - # setup v = np.array(42) a = np.zeros_like(v) @@ -479,7 +473,6 @@ def _test_get_orthogonal_selection(a, z, selection): # noinspection PyStatementEffect def test_get_orthogonal_selection_1d_bool(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -502,7 +495,6 @@ def test_get_orthogonal_selection_1d_bool(): # noinspection PyStatementEffect def test_get_orthogonal_selection_1d_int(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -561,7 +553,6 @@ def _test_get_orthogonal_selection_2d(a, z, ix0, ix1): # noinspection PyStatementEffect def test_get_orthogonal_selection_2d(): - # setup a = np.arange(10000, dtype=int).reshape(1000, 10) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) @@ -570,7 +561,6 @@ def test_get_orthogonal_selection_2d(): np.random.seed(42) # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: - # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) @@ -641,7 +631,6 @@ def _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2): def test_get_orthogonal_selection_3d(): - # setup a = np.arange(100000, dtype=int).reshape(200, 50, 10) z = zarr.create(shape=a.shape, chunks=(60, 20, 3), dtype=a.dtype) @@ -650,7 +639,6 @@ def test_get_orthogonal_selection_3d(): np.random.seed(42) # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: - # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) @@ -673,7 +661,6 @@ def test_get_orthogonal_selection_3d(): def test_orthogonal_indexing_edge_cases(): - a = np.arange(6).reshape(1, 2, 3) z = zarr.create(shape=a.shape, chunks=(1, 2, 3), dtype=a.dtype) z[:] = a @@ -706,7 +693,6 @@ def _test_set_orthogonal_selection(v, a, z, selection): def test_set_orthogonal_selection_1d(): - # setup v = np.arange(1050, dtype=int) a = np.empty(v.shape, dtype=int) @@ -715,7 +701,6 @@ def test_set_orthogonal_selection_1d(): # test with different degrees of sparseness np.random.seed(42) for p in 0.5, 0.1, 0.01: - # boolean arrays ix = np.random.binomial(1, p, size=a.shape[0]).astype(bool) _test_set_orthogonal_selection(v, a, z, ix) @@ -734,7 +719,6 @@ def test_set_orthogonal_selection_1d(): def _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1): - selections = [ # index both axes with array (ix0, ix1), @@ -749,7 +733,6 @@ def _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1): def test_set_orthogonal_selection_2d(): - # setup v = np.arange(10000, dtype=int).reshape(1000, 10) a = np.empty_like(v) @@ -758,7 +741,6 @@ def test_set_orthogonal_selection_2d(): np.random.seed(42) # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: - # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) @@ -780,7 +762,6 @@ def test_set_orthogonal_selection_2d(): def _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2): - selections = ( # single value (84, 42, 4), @@ -807,7 +788,6 @@ def _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2): def test_set_orthogonal_selection_3d(): - # setup v = np.arange(100000, dtype=int).reshape(200, 50, 10) a = np.empty_like(v) @@ -816,7 +796,6 @@ def test_set_orthogonal_selection_3d(): np.random.seed(42) # test with different degrees of sparseness for p in 0.5, 0.1, 0.01: - # boolean arrays ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) @@ -888,7 +867,6 @@ def _test_get_coordinate_selection(a, z, selection): # noinspection PyStatementEffect def test_get_coordinate_selection_1d(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -932,7 +910,6 @@ def test_get_coordinate_selection_1d(): def test_get_coordinate_selection_2d(): - # setup a = np.arange(10000, dtype=int).reshape(1000, 10) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) @@ -1027,7 +1004,6 @@ def test_set_coordinate_selection_1d(): def test_set_coordinate_selection_2d(): - # setup v = np.arange(10000, dtype=int).reshape(1000, 10) a = np.empty_like(v) @@ -1258,7 +1234,6 @@ def _test_get_mask_selection(a, z, selection): # noinspection PyStatementEffect def test_get_mask_selection_1d(): - # setup a = np.arange(1050, dtype=int) z = zarr.create(shape=a.shape, chunks=100, dtype=a.dtype) @@ -1285,7 +1260,6 @@ def test_get_mask_selection_1d(): # noinspection PyStatementEffect def test_get_mask_selection_2d(): - # setup a = np.arange(10000, dtype=int).reshape(1000, 10) z = zarr.create(shape=a.shape, chunks=(300, 3), dtype=a.dtype) @@ -1318,7 +1292,6 @@ def _test_set_mask_selection(v, a, z, selection): def test_set_mask_selection_1d(): - # setup v = np.arange(1050, dtype=int) a = np.empty_like(v) @@ -1338,7 +1311,6 @@ def test_set_mask_selection_1d(): def test_set_mask_selection_2d(): - # setup v = np.arange(10000, dtype=int).reshape(1000, 10) a = np.empty_like(v) @@ -1352,7 +1324,6 @@ def test_set_mask_selection_2d(): def test_get_selection_out(): - # basic selections a = np.arange(1050) z = zarr.create(shape=1050, chunks=100, dtype=a.dtype) @@ -1426,7 +1397,6 @@ def test_get_selection_out(): def test_get_selections_with_fields(): - a = [("aaa", 1, 4.2), ("bbb", 2, 8.4), ("ccc", 3, 12.6)] a = np.array(a, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) z = zarr.create(shape=a.shape, chunks=2, dtype=a.dtype, fill_value=None) @@ -1444,7 +1414,6 @@ def test_get_selections_with_fields(): ] for fields in fields_fixture: - # total selection expect = a[fields] actual = z.get_basic_selection(Ellipsis, fields=fields) @@ -1534,7 +1503,6 @@ def test_get_selections_with_fields(): def test_set_selections_with_fields(): - v = [("aaa", 1, 4.2), ("bbb", 2, 8.4), ("ccc", 3, 12.6)] v = np.array(v, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) a = np.empty_like(v) @@ -1553,7 +1521,6 @@ def test_set_selections_with_fields(): ] for fields in fields_fixture: - # currently multi-field assignment is not supported in numpy, so we won't support # it either if isinstance(fields, list) and len(fields) > 1: @@ -1567,7 +1534,6 @@ def test_set_selections_with_fields(): z.set_mask_selection([True, False, True], v, fields=fields) else: - if isinstance(fields, list) and len(fields) == 1: # work around numpy does not support multi-field assignment even if there # is only one field @@ -1752,7 +1718,6 @@ def test_accessed_chunks(shape, chunks, ops): z = zarr.create(shape=shape, chunks=chunks, store=store) for ii, (optype, slices) in enumerate(ops): - # Resolve the slices into the accessed chunks for each dimension chunks_per_dim = [] for N, C, sl in zip(shape, chunks, slices): diff --git a/tests/test_info.py b/tests/test_info.py index 7fb6feb11b..96eae999f4 100644 --- a/tests/test_info.py +++ b/tests/test_info.py @@ -7,7 +7,6 @@ @pytest.mark.parametrize("array_size", [10, 15000]) def test_info(array_size): - # setup g = zarr.group(store=dict(), chunk_store=dict(), synchronizer=zarr.ThreadSynchronizer()) g.create_group("foo") diff --git a/tests/test_meta.py b/tests/test_meta.py index db50560c8e..50f51929ef 100644 --- a/tests/test_meta.py +++ b/tests/test_meta.py @@ -34,7 +34,6 @@ def assert_json_equal(expect, actual): def test_encode_decode_array_1(): - meta = dict( shape=(100,), chunks=(10,), @@ -76,7 +75,6 @@ def test_encode_decode_array_1(): def test_encode_decode_array_2(): - # some variations df = Delta(astype=" Date: Mon, 8 Apr 2024 13:43:18 -0400 Subject: [PATCH 0480/1078] Remove outdated dev install docs from installation.rst and link to contributing.rst (#1643) Co-authored-by: Joe Hamman --- docs/installation.rst | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/docs/installation.rst b/docs/installation.rst index 8553d451cb..3d4ac41072 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -19,13 +19,4 @@ latest GitHub main:: $ pip install git+https://github.com/zarr-developers/zarr-python.git -To work with Zarr source code in development, install from GitHub:: - - $ git clone --recursive https://github.com/zarr-developers/zarr-python.git - $ cd zarr-python - $ python -m pip install -e . - -To verify that Zarr has been fully installed, run the test suite:: - - $ pip install pytest - $ python -m pytest -v --pyargs zarr +To work with Zarr source code in development, see `Contributing `_. \ No newline at end of file From 3a73950160e9c26bdad2e141c42711b762823b82 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Thu, 11 Apr 2024 19:48:57 +0100 Subject: [PATCH 0481/1078] Allow dmypy to be run (#1780) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9f21a84aee..77b7dcd66f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -150,7 +150,7 @@ extend-exclude = [ [tool.mypy] python_version = "3.8" ignore_missing_imports = true -follow_imports = "silent" +namespace_packages = false [tool.pytest.ini_options] doctest_optionflags = [ From 3a9d968dd856efdb3d56cd73ad527baac6ffeded Mon Sep 17 00:00:00 2001 From: David Stansby Date: Thu, 11 Apr 2024 19:50:31 +0100 Subject: [PATCH 0482/1078] Remove unused typing ignore comments (#1781) Co-authored-by: Davis Bennett --- pyproject.toml | 5 +++++ src/zarr/_storage/store.py | 4 ++-- src/zarr/_storage/v3_storage_transformers.py | 2 +- src/zarr/meta.py | 4 ++-- src/zarr/storage.py | 12 ++++++------ src/zarr/util.py | 2 +- 6 files changed, 17 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 77b7dcd66f..966065655f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -152,6 +152,11 @@ python_version = "3.8" ignore_missing_imports = true namespace_packages = false +warn_unused_configs = true +warn_redundant_casts = true +warn_unused_ignores = true + + [tool.pytest.ini_options] doctest_optionflags = [ "NORMALIZE_WHITESPACE", diff --git a/src/zarr/_storage/store.py b/src/zarr/_storage/store.py index 80e4ad8f75..9911cfa12d 100644 --- a/src/zarr/_storage/store.py +++ b/src/zarr/_storage/store.py @@ -642,10 +642,10 @@ def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: sfx = _get_metadata_suffix(store) array_meta_file = meta_dir + ".array" + sfx if array_meta_file in store: - store.erase(array_meta_file) # type: ignore + store.erase(array_meta_file) group_meta_file = meta_dir + ".group" + sfx if group_meta_file in store: - store.erase(group_meta_file) # type: ignore + store.erase(group_meta_file) def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str]: diff --git a/src/zarr/_storage/v3_storage_transformers.py b/src/zarr/_storage/v3_storage_transformers.py index 3090aea28c..cb11cea52e 100644 --- a/src/zarr/_storage/v3_storage_transformers.py +++ b/src/zarr/_storage/v3_storage_transformers.py @@ -367,7 +367,7 @@ def erase_prefix(self, prefix): def rmdir(self, path=None): path = normalize_storage_path(path) - _rmdir_from_keys_v3(self, path) # type: ignore + _rmdir_from_keys_v3(self, path) def __contains__(self, key): if self._is_data_key(key): diff --git a/src/zarr/meta.py b/src/zarr/meta.py index 80f9017456..3a5435a174 100644 --- a/src/zarr/meta.py +++ b/src/zarr/meta.py @@ -234,8 +234,8 @@ def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return np.array(v, dtype=dtype)[()] elif dtype.kind in "c": v = ( - cls.decode_fill_value(v[0], dtype.type().real.dtype), # type: ignore - cls.decode_fill_value(v[1], dtype.type().imag.dtype), # type: ignore + cls.decode_fill_value(v[0], dtype.type().real.dtype), + cls.decode_fill_value(v[1], dtype.type().imag.dtype), ) v = v[0] + 1j * v[1] return np.array(v, dtype=dtype)[()] diff --git a/src/zarr/storage.py b/src/zarr/storage.py index e7bd0c4cf4..e3a43d26c8 100644 --- a/src/zarr/storage.py +++ b/src/zarr/storage.py @@ -205,7 +205,7 @@ def rmdir(store: StoreLike, path: Path = None): store_version = getattr(store, "_store_version", 2) if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore # pass through - store.rmdir(path) # type: ignore + store.rmdir(path) else: # slow version, delete one key at a time if store_version == 2: @@ -235,7 +235,7 @@ def listdir(store: BaseStore, path: Path = None): path = normalize_storage_path(path) if hasattr(store, "listdir"): # pass through - return store.listdir(path) # type: ignore + return store.listdir(path) else: # slow version, iterate through all keys warnings.warn( @@ -288,7 +288,7 @@ def getsize(store: BaseStore, path: Path = None) -> int: if hasattr(store, "getsize"): # pass through path = normalize_storage_path(path) - return store.getsize(path) # type: ignore + return store.getsize(path) elif isinstance(store, MutableMapping): return _getsize(store, path) else: @@ -626,7 +626,7 @@ def _init_array_metadata( key = _prefix_to_array_key(store, _path_to_prefix(path)) if hasattr(store, "_metadata_class"): - store[key] = store._metadata_class.encode_array_metadata(meta) # type: ignore + store[key] = store._metadata_class.encode_array_metadata(meta) else: store[key] = encode_array_metadata(meta) @@ -729,10 +729,10 @@ def _init_group_metadata( if store_version == 3: meta = {"attributes": {}} # type: ignore else: - meta = {} # type: ignore + meta = {} key = _prefix_to_group_key(store, _path_to_prefix(path)) if hasattr(store, "_metadata_class"): - store[key] = store._metadata_class.encode_group_metadata(meta) # type: ignore + store[key] = store._metadata_class.encode_group_metadata(meta) else: store[key] = encode_group_metadata(meta) diff --git a/src/zarr/util.py b/src/zarr/util.py index 270a444524..35ecc64bba 100644 --- a/src/zarr/util.py +++ b/src/zarr/util.py @@ -182,7 +182,7 @@ def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tupl def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype, Any]: # convenience API for object arrays if inspect.isclass(dtype): - dtype = dtype.__name__ # type: ignore + dtype = dtype.__name__ if isinstance(dtype, str): # allow ':' to delimit class from codec arguments tokens = dtype.split(":") From ce6fcbbee24f4429a740963cec8b239a0b76cd34 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 12 Apr 2024 10:42:09 +0100 Subject: [PATCH 0483/1078] Check untyped defs (#1784) Co-authored-by: Davis Bennett --- pyproject.toml | 20 +++++++++++++++++++- src/zarr/attrs.py | 3 ++- src/zarr/n5.py | 5 ++--- src/zarr/v3/sync.py | 5 +++-- 4 files changed, 26 insertions(+), 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 966065655f..b67f5fec94 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -148,7 +148,7 @@ extend-exclude = [ ] [tool.mypy] -python_version = "3.8" +python_version = "3.10" ignore_missing_imports = true namespace_packages = false @@ -157,6 +157,24 @@ warn_redundant_casts = true warn_unused_ignores = true +check_untyped_defs = true + +[[tool.mypy.overrides]] +module = [ + "zarr._storage.store", + "zarr._storage.v3_storage_transformers", + "zarr.v3.group", + "zarr.core", + "zarr.hierarchy", + "zarr.indexing", + "zarr.storage", + "zarr.sync", + "zarr.util", + "tests.*", +] +check_untyped_defs = false + + [tool.pytest.ini_options] doctest_optionflags = [ "NORMALIZE_WHITESPACE", diff --git a/src/zarr/attrs.py b/src/zarr/attrs.py index e967c5b853..e589bc9022 100644 --- a/src/zarr/attrs.py +++ b/src/zarr/attrs.py @@ -1,3 +1,4 @@ +from typing import Any import warnings from collections.abc import MutableMapping @@ -39,7 +40,7 @@ def _get_nosync(self): try: data = self.store[self.key] except KeyError: - d = dict() + d: dict[str, Any] = dict() if self._version > 2: d["attributes"] = {} else: diff --git a/src/zarr/n5.py b/src/zarr/n5.py index 44b44e69e2..79bab20576 100644 --- a/src/zarr/n5.py +++ b/src/zarr/n5.py @@ -325,10 +325,9 @@ class N5FSStore(FSStore): def __init__(self, *args, **kwargs): if "dimension_separator" in kwargs: - kwargs.pop("dimension_separator") warnings.warn("Keyword argument `dimension_separator` will be ignored") - dimension_separator = "." - super().__init__(*args, dimension_separator=dimension_separator, **kwargs) + kwargs["dimension_separator"] = "." + super().__init__(*args, **kwargs) @staticmethod def _swap_separator(key: str): diff --git a/src/zarr/v3/sync.py b/src/zarr/v3/sync.py index 2e94a815cc..41dfeadba9 100644 --- a/src/zarr/v3/sync.py +++ b/src/zarr/v3/sync.py @@ -90,8 +90,9 @@ def _get_loop(): # repeat the check just in case the loop got filled between the # previous two calls from another thread if loop[0] is None: - loop[0] = asyncio.new_event_loop() - th = threading.Thread(target=loop[0].run_forever, name="zarrIO") + new_loop = asyncio.new_event_loop() + loop[0] = new_loop + th = threading.Thread(target=new_loop.run_forever, name="zarrIO") th.daemon = True th.start() iothread[0] = th From a33ebf2a141c8c1ff5e9ac70870e0de4450bd577 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 19 Apr 2024 04:58:14 +0100 Subject: [PATCH 0484/1078] [v3] Enable some more strict mypy options (#1793) * Enable two more strict mypy options * Disallow untyped decorators --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b67f5fec94..e0ca815d95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -155,9 +155,11 @@ namespace_packages = false warn_unused_configs = true warn_redundant_casts = true warn_unused_ignores = true - +strict_equality = true +strict_concatenate = true check_untyped_defs = true +disallow_untyped_decorators = true [[tool.mypy.overrides]] module = [ From 136daec953772478c62886ddddfa9db155b6d2cd Mon Sep 17 00:00:00 2001 From: Charoula-Kyriakides <92877693+Charoula-Kyriakides@users.noreply.github.com> Date: Sun, 21 Apr 2024 12:22:18 +0100 Subject: [PATCH 0485/1078] implement __eq__ for LocalStore (#1792) * LocalStore__eq__ #1744 Fix equality comparison in LocalStore class by implementing __eq__ method * Update src/zarr/v3/store/local.py Changed 'LocalStore' to object in __eq__ Co-authored-by: Davis Bennett --------- Co-authored-by: Davis Bennett --- src/zarr/v3/store/local.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/zarr/v3/store/local.py b/src/zarr/v3/store/local.py index 5d22b30e9a..8f02b904c0 100644 --- a/src/zarr/v3/store/local.py +++ b/src/zarr/v3/store/local.py @@ -67,6 +67,9 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"LocalStore({repr(str(self))})" + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) and self.root == other.root + async def get( self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None ) -> Optional[bytes]: From 472ab13a3a84c89b2aecaf7120b5bd70e3418f67 Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Mon, 22 Apr 2024 14:34:58 +0200 Subject: [PATCH 0486/1078] moves v3 tests --- tests/{test_codecs_v3.py => v3/test_codecs.py} | 0 tests/{test_group_v3.py => v3/test_group.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename tests/{test_codecs_v3.py => v3/test_codecs.py} (100%) rename tests/{test_group_v3.py => v3/test_group.py} (100%) diff --git a/tests/test_codecs_v3.py b/tests/v3/test_codecs.py similarity index 100% rename from tests/test_codecs_v3.py rename to tests/v3/test_codecs.py diff --git a/tests/test_group_v3.py b/tests/v3/test_group.py similarity index 100% rename from tests/test_group_v3.py rename to tests/v3/test_group.py From 368b170f09ea837aea18b375084ecf27912caabb Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Mon, 22 Apr 2024 14:38:56 +0200 Subject: [PATCH 0487/1078] Remove old v3 (#1742) * chore: add deprecation warnings to v3 classes / functions * Resolve Mypy erorrs in `v3` branch (#1692) * refactor(v3): Using appropriate types * fix(v3): Typing fixes + minor code fixes * fix(v3): _sync_iter works with coroutines * docs(v3/store/core.py): clearer comment * fix(metadata.py): Use Any outside TYPE_CHECKING for Pydantic * fix(zarr/v3): correct zarr format + remove unused method * fix(v3/store/core.py): Potential suggestion on handling str store_like * refactor(zarr/v3): Add more typing * ci(.pre-commit-config.yaml): zarr v3 mypy checks turned on in pre-commit * Specify hatch envs using GitHub actions matrix for v3 tests (#1728) * Specify v3 hatch envs using GitHub actions matrix * Update .github/workflows/test-v3.yml Co-authored-by: Joe Hamman * Update .github/workflows/test-v3.yml Co-authored-by: Joe Hamman * test on 3.12 too * no 3.12 --------- Co-authored-by: Joe Hamman Co-authored-by: Joe Hamman * black -> ruff format + cleanup (#1639) * black -> ruff + cleanup * format * Preserve git blame * pre-commit fix * Remove outdated dev install docs from installation.rst and link to contributing.rst (#1643) Co-authored-by: Joe Hamman * chore: remove old v3 implementation * chore: remove more version-conditional logic * chore: remove v3_storage_transformers.py again --------- Co-authored-by: Daniel Jahn (dahn) Co-authored-by: Max Jones <14077947+maxrjones@users.noreply.github.com> Co-authored-by: Joe Hamman Co-authored-by: Joe Hamman Co-authored-by: Saransh Chopra Co-authored-by: Alden Keefe Sampson --- src/zarr/__init__.py | 16 - src/zarr/_storage/absstore.py | 55 +- src/zarr/_storage/store.py | 485 +------------- src/zarr/_storage/v3.py | 625 ----------------- src/zarr/_storage/v3_storage_transformers.py | 382 ----------- src/zarr/attrs.py | 67 +- src/zarr/convenience.py | 108 +-- src/zarr/core.py | 172 ++--- src/zarr/creation.py | 54 +- src/zarr/hierarchy.py | 235 +------ src/zarr/meta.py | 279 +------- src/zarr/storage.py | 209 +----- tests/test_attrs.py | 125 ++-- tests/test_convenience.py | 347 ++-------- tests/test_core.py | 666 +------------------ tests/test_creation.py | 215 ++---- tests/test_hierarchy.py | 638 +++--------------- tests/test_meta.py | 142 ---- tests/test_sync.py | 12 +- tests/util.py | 5 - 20 files changed, 424 insertions(+), 4413 deletions(-) delete mode 100644 src/zarr/_storage/v3.py delete mode 100644 src/zarr/_storage/v3_storage_transformers.py diff --git a/src/zarr/__init__.py b/src/zarr/__init__.py index b3c1e05b7e..601b1295ab 100644 --- a/src/zarr/__init__.py +++ b/src/zarr/__init__.py @@ -31,7 +31,6 @@ from zarr.errors import CopyError, MetadataError from zarr.hierarchy import Group, group, open_group from zarr.n5 import N5Store, N5FSStore -from zarr._storage.store import v3_api_available from zarr.storage import ( ABSStore, DBMStore, @@ -53,18 +52,3 @@ # in case setuptools scm screw up and find version to be 0.0.0 assert not __version__.startswith("0.0.0") - -if v3_api_available: - from zarr._storage.v3 import ( - ABSStoreV3, - DBMStoreV3, - KVStoreV3, - DirectoryStoreV3, - LMDBStoreV3, - LRUStoreCacheV3, - MemoryStoreV3, - MongoDBStoreV3, - RedisStoreV3, - SQLiteStoreV3, - ZipStoreV3, - ) diff --git a/src/zarr/_storage/absstore.py b/src/zarr/_storage/absstore.py index c9a113148c..d8e292535c 100644 --- a/src/zarr/_storage/absstore.py +++ b/src/zarr/_storage/absstore.py @@ -3,7 +3,7 @@ import warnings from numcodecs.compat import ensure_bytes from zarr.util import normalize_storage_path -from zarr._storage.store import _get_metadata_suffix, data_root, meta_root, Store, StoreV3 +from zarr._storage.store import Store __doctest_requires__ = { ("ABSStore", "ABSStore.*"): ["azure.storage.blob"], @@ -222,56 +222,3 @@ def getsize(self, path=None): def clear(self): self.rmdir() - - -class ABSStoreV3(ABSStore, StoreV3): - def list(self): - return list(self.keys()) - - def __eq__(self, other): - return ( - isinstance(other, ABSStoreV3) - and self.client == other.client - and self.prefix == other.prefix - ) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def rmdir(self, path=None): - if not path: - # Currently allowing clear to delete everything as in v2 - - # If we disallow an empty path then we will need to modify - # TestABSStoreV3 to have the create_store method use a prefix. - ABSStore.rmdir(self, "") - return - - meta_dir = meta_root + path - meta_dir = meta_dir.rstrip("/") - ABSStore.rmdir(self, meta_dir) - - # remove data folder - data_dir = data_root + path - data_dir = data_dir.rstrip("/") - ABSStore.rmdir(self, data_dir) - - # remove metadata files - sfx = _get_metadata_suffix(self) - array_meta_file = meta_dir + ".array" + sfx - if array_meta_file in self: - del self[array_meta_file] - group_meta_file = meta_dir + ".group" + sfx - if group_meta_file in self: - del self[group_meta_file] - - # TODO: adapt the v2 getsize method to work for v3 - # For now, calling the generic keys-based _getsize - def getsize(self, path=None): - from zarr.storage import _getsize # avoid circular import - - return _getsize(self, path) - - -ABSStoreV3.__doc__ = ABSStore.__doc__ diff --git a/src/zarr/_storage/store.py b/src/zarr/_storage/store.py index 9911cfa12d..911af20fda 100644 --- a/src/zarr/_storage/store.py +++ b/src/zarr/_storage/store.py @@ -1,36 +1,18 @@ -import abc -import os -from collections import defaultdict from collections.abc import MutableMapping -from copy import copy -from string import ascii_letters, digits -from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union +from typing import Any, List, Mapping, Optional, Sequence, Union -from zarr.meta import Metadata2, Metadata3 +from zarr.meta import Metadata2 from zarr.util import normalize_storage_path from zarr.context import Context + # v2 store keys array_meta_key = ".zarray" group_meta_key = ".zgroup" attrs_key = ".zattrs" -# v3 paths -meta_root = "meta/root/" -data_root = "data/root/" - DEFAULT_ZARR_VERSION = 2 -v3_api_available = os.environ.get("ZARR_V3_EXPERIMENTAL_API", "0").lower() not in ["0", "false"] - - -def assert_zarr_v3_api_available(): - if not v3_api_available: - raise NotImplementedError( - "# V3 reading and writing is experimental! To enable support, set:\n" - "ZARR_V3_EXPERIMENTAL_API=1" - ) # pragma: no cover - class BaseStore(MutableMapping): """Abstract base class for store implementations. @@ -182,371 +164,6 @@ def rmdir(self, path: str = "") -> None: _rmdir_from_keys(self, path) -class StoreV3(BaseStore): - _store_version = 3 - _metadata_class = Metadata3 - _valid_key_characters = set(ascii_letters + digits + "/.-_") - - def _valid_key(self, key: str) -> bool: - """ - Verify that a key conforms to the specification. - - A key is any string containing only character in the range a-z, A-Z, - 0-9, or in the set /.-_ it will return True if that's the case, False - otherwise. - """ - if not isinstance(key, str) or not key.isascii(): - return False - if set(key) - self._valid_key_characters: - return False - return True - - def _validate_key(self, key: str): - """ - Verify that a key conforms to the v3 specification. - - A key is any string containing only character in the range a-z, A-Z, - 0-9, or in the set /.-_ it will return True if that's the case, False - otherwise. - - In spec v3, keys can only start with the prefix meta/, data/ or be - exactly zarr.json and should not end with /. This should not be exposed - to the user, and is a store implementation detail, so this method will - raise a ValueError in that case. - """ - if not self._valid_key(key): - raise ValueError( - f"Keys must be ascii strings and may only contain the " - f"characters {''.join(sorted(self._valid_key_characters))}" - ) - - if ( - not key.startswith("data/") - and (not key.startswith("meta/")) - and (not key == "zarr.json") - # TODO: Possibly allow key == ".zmetadata" too if we write a - # consolidated metadata spec corresponding to this? - ): - raise ValueError("keys starts with unexpected value: `{}`".format(key)) - - if key.endswith("/"): - raise ValueError("keys may not end in /") - - def list_prefix(self, prefix): - if prefix.startswith("/"): - raise ValueError("prefix must not begin with /") - # TODO: force prefix to end with /? - return [k for k in self.list() if k.startswith(prefix)] - - def erase(self, key): - self.__delitem__(key) - - def erase_prefix(self, prefix): - assert prefix.endswith("/") - - if prefix == "/": - all_keys = self.list() - else: - all_keys = self.list_prefix(prefix) - for key in all_keys: - self.erase(key) - - def list_dir(self, prefix): - """ - TODO: carefully test this with trailing/leading slashes - """ - if prefix: # allow prefix = "" ? - assert prefix.endswith("/") - - all_keys = self.list_prefix(prefix) - len_prefix = len(prefix) - keys = [] - prefixes = [] - for k in all_keys: - trail = k[len_prefix:] - if "/" not in trail: - keys.append(prefix + trail) - else: - prefixes.append(prefix + trail.split("/", maxsplit=1)[0] + "/") - return keys, list(set(prefixes)) - - def list(self): - return list(self.keys()) - - def __contains__(self, key): - return key in self.list() - - @abc.abstractmethod - def __setitem__(self, key, value): - """Set a value.""" - - @abc.abstractmethod - def __getitem__(self, key): - """Get a value.""" - - @abc.abstractmethod - def rmdir(self, path=None): - """Remove a data path and all its subkeys and related metadata. - Expects a path without the data or meta root prefix.""" - - @property - def supports_efficient_get_partial_values(self): - return False - - def get_partial_values( - self, key_ranges: Sequence[Tuple[str, Tuple[int, Optional[int]]]] - ) -> List[Union[bytes, memoryview, bytearray]]: - """Get multiple partial values. - key_ranges can be an iterable of key, range pairs, - where a range specifies two integers range_start and range_length - as a tuple, (range_start, range_length). - range_length may be None to indicate to read until the end. - range_start may be negative to start reading range_start bytes - from the end of the file. - A key may occur multiple times with different ranges. - Inserts None for missing keys into the returned list.""" - results: List[Union[bytes, memoryview, bytearray]] = [None] * len(key_ranges) # type: ignore[list-item] # noqa: E501 - indexed_ranges_by_key: Dict[str, List[Tuple[int, Tuple[int, Optional[int]]]]] = defaultdict( - list - ) - for i, (key, range_) in enumerate(key_ranges): - indexed_ranges_by_key[key].append((i, range_)) - for key, indexed_ranges in indexed_ranges_by_key.items(): - try: - value = self[key] - except KeyError: # pragma: no cover - continue - for i, (range_from, range_length) in indexed_ranges: - if range_length is None: - results[i] = value[range_from:] - else: - results[i] = value[range_from : range_from + range_length] - return results - - def supports_efficient_set_partial_values(self): - return False - - def set_partial_values(self, key_start_values): - """Set multiple partial values. - key_start_values can be an iterable of key, start and value triplets - as tuples, (key, start, value), where start defines the offset in bytes. - A key may occur multiple times with different starts and non-overlapping values. - Also, start may only be beyond the current value if other values fill the gap. - start may be negative to start writing start bytes from the current - end of the file, ending the file with the new value.""" - unique_keys = set(next(zip(*key_start_values))) - values = {} - for key in unique_keys: - old_value = self.get(key) - values[key] = None if old_value is None else bytearray(old_value) - for key, start, value in key_start_values: - if values[key] is None: - assert start == 0 - values[key] = value - else: - if start > len(values[key]): # pragma: no cover - raise ValueError( - f"Cannot set value at start {start}, " - + f"since it is beyond the data at key {key}, " - + f"having length {len(values[key])}." - ) - if start < 0: - values[key][start:] = value - else: - values[key][start : start + len(value)] = value - for key, value in values.items(): - self[key] = value - - def clear(self): - """Remove all items from store.""" - self.erase_prefix("/") - - def __eq__(self, other): - return NotImplemented - - @staticmethod - def _ensure_store(store): - """ - We want to make sure internally that zarr stores are always a class - with a specific interface derived from ``Store``, which is slightly - different than ``MutableMapping``. - - We'll do this conversion in a few places automatically - """ - from zarr._storage.v3 import KVStoreV3 # avoid circular import - - if store is None: - return None - elif isinstance(store, StoreV3): - return store - elif isinstance(store, Store): - raise ValueError(f"cannot initialize a v3 store with a v{store._store_version} store") - elif isinstance(store, MutableMapping): - return KVStoreV3(store) - else: - for attr in [ - "keys", - "values", - "get", - "__setitem__", - "__getitem__", - "__delitem__", - "__contains__", - ]: - if not hasattr(store, attr): - break - else: - return KVStoreV3(store) - - raise ValueError( - "v3 stores must be subclasses of StoreV3, " - "if your store exposes the MutableMapping interface wrap it in " - f"Zarr.storage.KVStoreV3. Got {store}" - ) - - -class StorageTransformer(MutableMapping, abc.ABC): - """Base class for storage transformers. The methods simply pass on the data as-is - and should be overwritten by sub-classes.""" - - _store_version = 3 - _metadata_class = Metadata3 - - def __init__(self, _type) -> None: - if _type not in self.valid_types: # pragma: no cover - raise ValueError( - f"Storage transformer cannot be initialized with type {_type}, " - + f"must be one of {list(self.valid_types)}." - ) - self.type = _type - self._inner_store = None - - def _copy_for_array(self, array, inner_store): - transformer_copy = copy(self) - transformer_copy._inner_store = inner_store - return transformer_copy - - @abc.abstractproperty - def extension_uri(self): - pass # pragma: no cover - - @abc.abstractproperty - def valid_types(self): - pass # pragma: no cover - - def get_config(self): - """Return a dictionary holding configuration parameters for this - storage transformer. All values must be compatible with JSON encoding.""" - # Override in sub-class if need special encoding of config values. - # By default, assume all non-private members are configuration - # parameters except for type . - return {k: v for k, v in self.__dict__.items() if not k.startswith("_") and k != "type"} - - @classmethod - def from_config(cls, _type, config): - """Instantiate storage transformer from a configuration object.""" - # override in sub-class if need special decoding of config values - - # by default, assume constructor accepts configuration parameters as - # keyword arguments without any special decoding - return cls(_type, **config) - - @property - def inner_store(self) -> Union["StorageTransformer", StoreV3]: - assert ( - self._inner_store is not None - ), "inner_store is not initialized, first get a copy via _copy_for_array." - return self._inner_store - - # The following implementations are usually fine to keep as-is: - - def __eq__(self, other): - return ( - type(self) == type(other) - and self._inner_store == other._inner_store - and self.get_config() == other.get_config() - ) - - def erase(self, key): - self.__delitem__(key) - - def list(self): - return list(self.keys()) - - def list_dir(self, prefix): - return StoreV3.list_dir(self, prefix) - - def is_readable(self): - return self.inner_store.is_readable() - - def is_writeable(self): - return self.inner_store.is_writeable() - - def is_listable(self): - return self.inner_store.is_listable() - - def is_erasable(self): - return self.inner_store.is_erasable() - - def clear(self): - return self.inner_store.clear() - - def __enter__(self): - return self.inner_store.__enter__() - - def __exit__(self, exc_type, exc_value, traceback): - return self.inner_store.__exit__(exc_type, exc_value, traceback) - - def close(self) -> None: - return self.inner_store.close() - - # The following implementations might need to be re-implemented - # by subclasses implementing storage transformers: - - def rename(self, src_path: str, dst_path: str) -> None: - return self.inner_store.rename(src_path, dst_path) - - def list_prefix(self, prefix): - return self.inner_store.list_prefix(prefix) - - def erase_prefix(self, prefix): - return self.inner_store.erase_prefix(prefix) - - def rmdir(self, path=None): - return self.inner_store.rmdir(path) - - def __contains__(self, key): - return self.inner_store.__contains__(key) - - def __setitem__(self, key, value): - return self.inner_store.__setitem__(key, value) - - def __getitem__(self, key): - return self.inner_store.__getitem__(key) - - def __delitem__(self, key): - return self.inner_store.__delitem__(key) - - def __iter__(self): - return self.inner_store.__iter__() - - def __len__(self): - return self.inner_store.__len__() - - @property - def supports_efficient_get_partial_values(self): - return self.inner_store.supports_efficient_get_partial_values - - def get_partial_values(self, key_ranges): - return self.inner_store.get_partial_values(key_ranges) - - def supports_efficient_set_partial_values(self): - return self.inner_store.supports_efficient_set_partial_values() - - def set_partial_values(self, key_start_values): - return self.inner_store.set_partial_values(key_start_values) - - # allow MutableMapping for backwards compatibility StoreLike = Union[BaseStore, MutableMapping] @@ -560,40 +177,6 @@ def _path_to_prefix(path: Optional[str]) -> str: return prefix -def _get_hierarchy_metadata(store: StoreV3) -> Mapping[str, Any]: - version = getattr(store, "_store_version", 2) - if version < 3: - raise ValueError("zarr.json hierarchy metadata not stored for " f"zarr v{version} stores") - if "zarr.json" not in store: - raise ValueError("zarr.json metadata not found in store") - return store._metadata_class.decode_hierarchy_metadata(store["zarr.json"]) - - -def _get_metadata_suffix(store: StoreV3) -> str: - if "zarr.json" in store: - return _get_hierarchy_metadata(store)["metadata_key_suffix"] - return ".json" - - -def _rename_metadata_v3(store: StoreV3, src_path: str, dst_path: str) -> bool: - """Rename source or group metadata file associated with src_path.""" - any_renamed = False - sfx = _get_metadata_suffix(store) - src_path = src_path.rstrip("/") - dst_path = dst_path.rstrip("/") - _src_array_json = meta_root + src_path + ".array" + sfx - if _src_array_json in store: - new_key = meta_root + dst_path + ".array" + sfx - store[new_key] = store.pop(_src_array_json) - any_renamed = True - _src_group_json = meta_root + src_path + ".group" + sfx - if _src_group_json in store: - new_key = meta_root + dst_path + ".group" + sfx - store[new_key] = store.pop(_src_group_json) - any_renamed = True - return any_renamed - - def _rename_from_keys(store: BaseStore, src_path: str, dst_path: str) -> None: # assume path already normalized src_prefix = _path_to_prefix(src_path) @@ -605,19 +188,7 @@ def _rename_from_keys(store: BaseStore, src_path: str, dst_path: str) -> None: new_key = dst_prefix + key.lstrip(src_prefix) store[new_key] = store.pop(key) else: - any_renamed = False - for root_prefix in [meta_root, data_root]: - _src_prefix = root_prefix + src_prefix - _dst_prefix = root_prefix + dst_prefix - for key in store.list_prefix(_src_prefix): # type: ignore - new_key = _dst_prefix + key[len(_src_prefix) :] - store[new_key] = store.pop(key) - any_renamed = True - any_meta_renamed = _rename_metadata_v3(store, src_path, dst_path) # type: ignore - any_renamed = any_meta_renamed or any_renamed - - if not any_renamed: - raise ValueError(f"no item {src_path} found to rename") + raise NotImplementedError("This function only supports Zarr version 2.") def _rmdir_from_keys(store: StoreLike, path: Optional[str] = None) -> None: @@ -628,26 +199,6 @@ def _rmdir_from_keys(store: StoreLike, path: Optional[str] = None) -> None: del store[key] -def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: - meta_dir = meta_root + path - meta_dir = meta_dir.rstrip("/") - _rmdir_from_keys(store, meta_dir) - - # remove data folder - data_dir = data_root + path - data_dir = data_dir.rstrip("/") - _rmdir_from_keys(store, data_dir) - - # remove metadata files - sfx = _get_metadata_suffix(store) - array_meta_file = meta_dir + ".array" + sfx - if array_meta_file in store: - store.erase(array_meta_file) - group_meta_file = meta_dir + ".group" + sfx - if group_meta_file in store: - store.erase(group_meta_file) - - def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str]: # assume path already normalized prefix = _path_to_prefix(path) @@ -661,37 +212,15 @@ def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str def _prefix_to_array_key(store: StoreLike, prefix: str) -> str: - if getattr(store, "_store_version", 2) == 3: - sfx = _get_metadata_suffix(store) # type: ignore - if prefix: - key = meta_root + prefix.rstrip("/") + ".array" + sfx - else: - key = meta_root[:-1] + ".array" + sfx - else: - key = prefix + array_meta_key + key = prefix + array_meta_key return key def _prefix_to_group_key(store: StoreLike, prefix: str) -> str: - if getattr(store, "_store_version", 2) == 3: - sfx = _get_metadata_suffix(store) # type: ignore - if prefix: - key = meta_root + prefix.rstrip("/") + ".group" + sfx - else: - key = meta_root[:-1] + ".group" + sfx - else: - key = prefix + group_meta_key + key = prefix + group_meta_key return key def _prefix_to_attrs_key(store: StoreLike, prefix: str) -> str: - if getattr(store, "_store_version", 2) == 3: - # for v3, attributes are stored in the array metadata - sfx = _get_metadata_suffix(store) # type: ignore - if prefix: - key = meta_root + prefix.rstrip("/") + ".array" + sfx - else: - key = meta_root[:-1] + ".array" + sfx - else: - key = prefix + attrs_key + key = prefix + attrs_key return key diff --git a/src/zarr/_storage/v3.py b/src/zarr/_storage/v3.py deleted file mode 100644 index d3cbc58235..0000000000 --- a/src/zarr/_storage/v3.py +++ /dev/null @@ -1,625 +0,0 @@ -import os -import shutil -from collections import OrderedDict -from collections.abc import MutableMapping -from threading import Lock -from typing import Union, Dict, Any - -from zarr.errors import ( - MetadataError, - ReadOnlyError, -) -from zarr.util import buffer_size, json_loads, normalize_storage_path - -from zarr._storage.absstore import ABSStoreV3 # noqa: F401 -from zarr._storage.store import ( # noqa: F401 - _get_hierarchy_metadata, - _get_metadata_suffix, - _listdir_from_keys, - _rename_from_keys, - _rename_metadata_v3, - _rmdir_from_keys, - _rmdir_from_keys_v3, - _path_to_prefix, - _prefix_to_array_key, - _prefix_to_group_key, - array_meta_key, - attrs_key, - data_root, - group_meta_key, - meta_root, - BaseStore, - Store, - StoreV3, -) -from zarr.storage import ( - DBMStore, - ConsolidatedMetadataStore, - DirectoryStore, - FSStore, - KVStore, - LMDBStore, - LRUStoreCache, - MemoryStore, - MongoDBStore, - RedisStore, - SQLiteStore, - ZipStore, - _getsize, -) - -__doctest_requires__ = { - ("RedisStore", "RedisStore.*"): ["redis"], - ("MongoDBStore", "MongoDBStore.*"): ["pymongo"], - ("LRUStoreCache", "LRUStoreCache.*"): ["s3fs"], -} - - -try: - # noinspection PyUnresolvedReferences - from zarr.codecs import Blosc - - default_compressor = Blosc() -except ImportError: # pragma: no cover - from zarr.codecs import Zlib - - default_compressor = Zlib() - - -Path = Union[str, bytes, None] -# allow MutableMapping for backwards compatibility -StoreLike = Union[BaseStore, MutableMapping] - - -class RmdirV3: - """Mixin class that can be used to ensure override of any existing v2 rmdir class.""" - - def rmdir(self, path: str = "") -> None: - path = normalize_storage_path(path) - _rmdir_from_keys_v3(self, path) # type: ignore - - -class KVStoreV3(RmdirV3, KVStore, StoreV3): - def list(self): - return list(self._mutable_mapping.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def __eq__(self, other): - return isinstance(other, KVStoreV3) and self._mutable_mapping == other._mutable_mapping - - -KVStoreV3.__doc__ = KVStore.__doc__ - - -def _get_files_and_dirs_from_path(store, path): - path = normalize_storage_path(path) - - files = [] - # add array metadata file if present - array_key = _prefix_to_array_key(store, path) - if array_key in store: - files.append(os.path.join(store.path, array_key)) - - # add group metadata file if present - group_key = _prefix_to_group_key(store, path) - if group_key in store: - files.append(os.path.join(store.path, group_key)) - - dirs = [] - # add array and group folders if present - for d in [data_root + path, meta_root + path]: - dir_path = os.path.join(store.path, d) - if os.path.exists(dir_path): - dirs.append(dir_path) - return files, dirs - - -class FSStoreV3(FSStore, StoreV3): - # FSStoreV3 doesn't use this (FSStore uses it within _normalize_key) - _META_KEYS = () - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def _default_key_separator(self): - if self.key_separator is None: - self.key_separator = "/" - - def list(self): - return list(self.keys()) - - def _normalize_key(self, key): - key = normalize_storage_path(key).lstrip("/") - return key.lower() if self.normalize_keys else key - - def getsize(self, path=None): - size = 0 - if path is None or path == "": - # size of both the data and meta subdirs - dirs = [] - for d in ["data/root", "meta/root"]: - dir_path = os.path.join(self.path, d) - if os.path.exists(dir_path): - dirs.append(dir_path) - elif path in self: - # access individual element by full path - return buffer_size(self[path]) - else: - files, dirs = _get_files_and_dirs_from_path(self, path) - for file in files: - size += os.path.getsize(file) - for d in dirs: - size += self.fs.du(d, total=True, maxdepth=None) - return size - - def setitems(self, values): - if self.mode == "r": - raise ReadOnlyError() - values = {self._normalize_key(key): val for key, val in values.items()} - - # initialize the /data/root/... folder corresponding to the array! - # Note: tests.test_core_v3.TestArrayWithFSStoreV3PartialRead fails - # without this explicit creation of directories - subdirectories = set(os.path.dirname(v) for v in values.keys()) - for subdirectory in subdirectories: - data_dir = os.path.join(self.path, subdirectory) - if not self.fs.exists(data_dir): - self.fs.mkdir(data_dir) - - self.map.setitems(values) - - def rmdir(self, path=None): - if self.mode == "r": - raise ReadOnlyError() - if path: - for base in [meta_root, data_root]: - store_path = self.dir_path(base + path) - if self.fs.isdir(store_path): - self.fs.rm(store_path, recursive=True) - - # remove any associated metadata files - sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip("/") - array_meta_file = meta_dir + ".array" + sfx - self.pop(array_meta_file, None) - group_meta_file = meta_dir + ".group" + sfx - self.pop(group_meta_file, None) - else: - store_path = self.dir_path(path) - if self.fs.isdir(store_path): - self.fs.rm(store_path, recursive=True) - - @property - def supports_efficient_get_partial_values(self): - return True - - def get_partial_values(self, key_ranges): - """Get multiple partial values. - key_ranges can be an iterable of key, range pairs, - where a range specifies two integers range_start and range_length - as a tuple, (range_start, range_length). - range_length may be None to indicate to read until the end. - range_start may be negative to start reading range_start bytes - from the end of the file. - A key may occur multiple times with different ranges. - Inserts None for missing keys into the returned list.""" - results = [] - for key, (range_start, range_length) in key_ranges: - key = self._normalize_key(key) - path = self.dir_path(key) - try: - if range_start is None or range_length is None: - end = None - else: - end = range_start + range_length - result = self.fs.cat_file(path, start=range_start, end=end) - except self.map.missing_exceptions: - result = None - results.append(result) - return results - - -class MemoryStoreV3(MemoryStore, StoreV3): - def __init__(self, root=None, cls=dict, dimension_separator=None): - if root is None: - self.root = cls() - else: - self.root = root - self.cls = cls - self.write_mutex = Lock() - self._dimension_separator = dimension_separator # TODO: modify for v3? - - def __eq__(self, other): - return ( - isinstance(other, MemoryStoreV3) and self.root == other.root and self.cls == other.cls - ) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def list(self): - return list(self.keys()) - - def getsize(self, path: Path = None): - return _getsize(self, path) - - def rename(self, src_path: Path, dst_path: Path): - src_path = normalize_storage_path(src_path) - dst_path = normalize_storage_path(dst_path) - - any_renamed = False - for base in [meta_root, data_root]: - if self.list_prefix(base + src_path): - src_parent, src_key = self._get_parent(base + src_path) - dst_parent, dst_key = self._require_parent(base + dst_path) - - if src_key in src_parent: - dst_parent[dst_key] = src_parent.pop(src_key) - - if base == meta_root: - # check for and move corresponding metadata - sfx = _get_metadata_suffix(self) - src_meta = src_key + ".array" + sfx - if src_meta in src_parent: - dst_meta = dst_key + ".array" + sfx - dst_parent[dst_meta] = src_parent.pop(src_meta) - src_meta = src_key + ".group" + sfx - if src_meta in src_parent: - dst_meta = dst_key + ".group" + sfx - dst_parent[dst_meta] = src_parent.pop(src_meta) - any_renamed = True - any_renamed = _rename_metadata_v3(self, src_path, dst_path) or any_renamed - if not any_renamed: - raise ValueError(f"no item {src_path} found to rename") - - def rmdir(self, path: Path = None): - path = normalize_storage_path(path) - if path: - for base in [meta_root, data_root]: - try: - parent, key = self._get_parent(base + path) - value = parent[key] - except KeyError: - continue - else: - if isinstance(value, self.cls): - del parent[key] - - # remove any associated metadata files - sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip("/") - array_meta_file = meta_dir + ".array" + sfx - self.pop(array_meta_file, None) - group_meta_file = meta_dir + ".group" + sfx - self.pop(group_meta_file, None) - else: - # clear out root - self.root = self.cls() - - -MemoryStoreV3.__doc__ = MemoryStore.__doc__ - - -class DirectoryStoreV3(DirectoryStore, StoreV3): - def list(self): - return list(self.keys()) - - def __eq__(self, other): - return isinstance(other, DirectoryStoreV3) and self.path == other.path - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def getsize(self, path: Path = None): - return _getsize(self, path) - - def rename(self, src_path, dst_path, metadata_key_suffix=".json"): - store_src_path = normalize_storage_path(src_path) - store_dst_path = normalize_storage_path(dst_path) - - dir_path = self.path - any_existed = False - for root_prefix in ["meta", "data"]: - src_path = os.path.join(dir_path, root_prefix, "root", store_src_path) - if os.path.exists(src_path): - any_existed = True - dst_path = os.path.join(dir_path, root_prefix, "root", store_dst_path) - os.renames(src_path, dst_path) - - for suffix in [".array" + metadata_key_suffix, ".group" + metadata_key_suffix]: - src_meta = os.path.join(dir_path, "meta", "root", store_src_path + suffix) - if os.path.exists(src_meta): - any_existed = True - dst_meta = os.path.join(dir_path, "meta", "root", store_dst_path + suffix) - dst_dir = os.path.dirname(dst_meta) - if not os.path.exists(dst_dir): - os.makedirs(dst_dir) - os.rename(src_meta, dst_meta) - if not any_existed: - raise FileNotFoundError("nothing found at src_path") - - def rmdir(self, path=None): - store_path = normalize_storage_path(path) - dir_path = self.path - if store_path: - for base in [meta_root, data_root]: - dir_path = os.path.join(dir_path, base + store_path) - if os.path.isdir(dir_path): - shutil.rmtree(dir_path) - - # remove any associated metadata files - sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip("/") - array_meta_file = meta_dir + ".array" + sfx - self.pop(array_meta_file, None) - group_meta_file = meta_dir + ".group" + sfx - self.pop(group_meta_file, None) - - elif os.path.isdir(dir_path): - shutil.rmtree(dir_path) - - -DirectoryStoreV3.__doc__ = DirectoryStore.__doc__ - - -class ZipStoreV3(ZipStore, StoreV3): - def list(self): - return list(self.keys()) - - def __eq__(self, other): - return ( - isinstance(other, ZipStore) - and self.path == other.path - and self.compression == other.compression - and self.allowZip64 == other.allowZip64 - ) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def getsize(self, path=None): - path = normalize_storage_path(path) - with self.mutex: - children = self.list_prefix(data_root + path) - children += self.list_prefix(meta_root + path) - print(f"path={path}, children={children}") - if children: - size = 0 - for name in children: - info = self.zf.getinfo(name) - size += info.compress_size - return size - elif path in self: - info = self.zf.getinfo(path) - return info.compress_size - else: - return 0 - - -ZipStoreV3.__doc__ = ZipStore.__doc__ - - -class RedisStoreV3(RmdirV3, RedisStore, StoreV3): - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -RedisStoreV3.__doc__ = RedisStore.__doc__ - - -class MongoDBStoreV3(RmdirV3, MongoDBStore, StoreV3): - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -MongoDBStoreV3.__doc__ = MongoDBStore.__doc__ - - -class DBMStoreV3(RmdirV3, DBMStore, StoreV3): - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -DBMStoreV3.__doc__ = DBMStore.__doc__ - - -class LMDBStoreV3(RmdirV3, LMDBStore, StoreV3): - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -LMDBStoreV3.__doc__ = LMDBStore.__doc__ - - -class SQLiteStoreV3(SQLiteStore, StoreV3): - def list(self): - return list(self.keys()) - - def getsize(self, path=None): - # TODO: why does the query below not work in this case? - # For now fall back to the default _getsize implementation - # size = 0 - # for _path in [data_root + path, meta_root + path]: - # c = self.cursor.execute( - # ''' - # SELECT COALESCE(SUM(LENGTH(v)), 0) FROM zarr - # WHERE k LIKE (? || "%") AND - # 0 == INSTR(LTRIM(SUBSTR(k, LENGTH(?) + 1), "/"), "/") - # ''', - # (_path, _path) - # ) - # for item_size, in c: - # size += item_size - # return size - - # fallback to default implementation for now - return _getsize(self, path) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - def rmdir(self, path=None): - path = normalize_storage_path(path) - if path: - for base in [meta_root, data_root]: - with self.lock: - self.cursor.execute('DELETE FROM zarr WHERE k LIKE (? || "/%")', (base + path,)) - # remove any associated metadata files - sfx = _get_metadata_suffix(self) - meta_dir = (meta_root + path).rstrip("/") - array_meta_file = meta_dir + ".array" + sfx - self.pop(array_meta_file, None) - group_meta_file = meta_dir + ".group" + sfx - self.pop(group_meta_file, None) - else: - self.clear() - - -SQLiteStoreV3.__doc__ = SQLiteStore.__doc__ - - -class LRUStoreCacheV3(RmdirV3, LRUStoreCache, StoreV3): - def __init__(self, store, max_size: int): - self._store = StoreV3._ensure_store(store) - self._max_size = max_size - self._current_size = 0 - self._keys_cache = None - self._contains_cache = {} - self._listdir_cache: Dict[Path, Any] = dict() - self._values_cache: Dict[Path, Any] = OrderedDict() - self._mutex = Lock() - self.hits = self.misses = 0 - - def list(self): - return list(self.keys()) - - def __setitem__(self, key, value): - self._validate_key(key) - super().__setitem__(key, value) - - -LRUStoreCacheV3.__doc__ = LRUStoreCache.__doc__ - - -class ConsolidatedMetadataStoreV3(ConsolidatedMetadataStore, StoreV3): - """A layer over other storage, where the metadata has been consolidated into - a single key. - - The purpose of this class, is to be able to get all of the metadata for - a given array in a single read operation from the underlying storage. - See :func:`zarr.convenience.consolidate_metadata` for how to create this - single metadata key. - - This class loads from the one key, and stores the data in a dict, so that - accessing the keys no longer requires operations on the backend store. - - This class is read-only, and attempts to change the array metadata will - fail, but changing the data is possible. If the backend storage is changed - directly, then the metadata stored here could become obsolete, and - :func:`zarr.convenience.consolidate_metadata` should be called again and the class - re-invoked. The use case is for write once, read many times. - - .. note:: This is an experimental feature. - - Parameters - ---------- - store: Store - Containing the zarr array. - metadata_key: str - The target in the store where all of the metadata are stored. We - assume JSON encoding. - - See Also - -------- - zarr.convenience.consolidate_metadata, zarr.convenience.open_consolidated - - """ - - def __init__(self, store: StoreLike, metadata_key=meta_root + "consolidated/.zmetadata"): - self.store = StoreV3._ensure_store(store) - - # retrieve consolidated metadata - meta = json_loads(self.store[metadata_key]) - - # check format of consolidated metadata - consolidated_format = meta.get("zarr_consolidated_format", None) - if consolidated_format != 1: - raise MetadataError( - "unsupported zarr consolidated metadata format: %s" % consolidated_format - ) - - # decode metadata - self.meta_store: Store = KVStoreV3(meta["metadata"]) - - def rmdir(self, key): - raise ReadOnlyError() - - -def _normalize_store_arg_v3(store: Any, storage_options=None, mode="r") -> BaseStore: - # default to v2 store for backward compatibility - zarr_version = getattr(store, "_store_version", 3) - if zarr_version != 3: - raise ValueError("store must be a version 3 store") - if store is None: - store = KVStoreV3(dict()) - # add default zarr.json metadata - store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) - return store - if isinstance(store, os.PathLike): - store = os.fspath(store) - if FSStore._fsspec_installed(): - import fsspec - - if isinstance(store, fsspec.FSMap): - return FSStoreV3( - store.root, - fs=store.fs, - mode=mode, - check=store.check, - create=store.create, - missing_exceptions=store.missing_exceptions, - **(storage_options or {}), - ) - if isinstance(store, str): - if "://" in store or "::" in store: - store = FSStoreV3(store, mode=mode, **(storage_options or {})) - elif storage_options: - raise ValueError("storage_options passed with non-fsspec path") - elif store.endswith(".zip"): - store = ZipStoreV3(store, mode=mode) - elif store.endswith(".n5"): - raise NotImplementedError("N5Store not yet implemented for V3") - # return N5StoreV3(store) - else: - store = DirectoryStoreV3(store) - else: - store = StoreV3._ensure_store(store) - - if "zarr.json" not in store: - # add default zarr.json metadata - store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) - return store diff --git a/src/zarr/_storage/v3_storage_transformers.py b/src/zarr/_storage/v3_storage_transformers.py deleted file mode 100644 index cb11cea52e..0000000000 --- a/src/zarr/_storage/v3_storage_transformers.py +++ /dev/null @@ -1,382 +0,0 @@ -import functools -import itertools -import os -from typing import NamedTuple, Tuple, Optional, Union, Iterator - -from numcodecs.compat import ensure_bytes -import numpy as np - -from zarr._storage.store import StorageTransformer, StoreV3, _rmdir_from_keys_v3 -from zarr.util import normalize_storage_path - - -MAX_UINT_64 = 2**64 - 1 - - -v3_sharding_available = os.environ.get("ZARR_V3_SHARDING", "0").lower() not in ["0", "false"] - - -def assert_zarr_v3_sharding_available(): - if not v3_sharding_available: - raise NotImplementedError( - "Using V3 sharding is experimental and not yet finalized! To enable support, set:\n" - "ZARR_V3_SHARDING=1" - ) # pragma: no cover - - -class _ShardIndex(NamedTuple): - store: "ShardingStorageTransformer" - # dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2) - offsets_and_lengths: np.ndarray - - def __localize_chunk__(self, chunk: Tuple[int, ...]) -> Tuple[int, ...]: - return tuple( - chunk_i % shard_i for chunk_i, shard_i in zip(chunk, self.store.chunks_per_shard) - ) - - def is_all_empty(self) -> bool: - return np.array_equiv(self.offsets_and_lengths, MAX_UINT_64) - - def get_chunk_slice(self, chunk: Tuple[int, ...]) -> Optional[slice]: - localized_chunk = self.__localize_chunk__(chunk) - chunk_start, chunk_len = self.offsets_and_lengths[localized_chunk] - if (chunk_start, chunk_len) == (MAX_UINT_64, MAX_UINT_64): - return None - else: - return slice(int(chunk_start), int(chunk_start + chunk_len)) - - def set_chunk_slice(self, chunk: Tuple[int, ...], chunk_slice: Optional[slice]) -> None: - localized_chunk = self.__localize_chunk__(chunk) - if chunk_slice is None: - self.offsets_and_lengths[localized_chunk] = (MAX_UINT_64, MAX_UINT_64) - else: - self.offsets_and_lengths[localized_chunk] = ( - chunk_slice.start, - chunk_slice.stop - chunk_slice.start, - ) - - def to_bytes(self) -> bytes: - return self.offsets_and_lengths.tobytes(order="C") - - @classmethod - def from_bytes( - cls, buffer: Union[bytes, bytearray], store: "ShardingStorageTransformer" - ) -> "_ShardIndex": - try: - return cls( - store=store, - offsets_and_lengths=np.frombuffer(bytearray(buffer), dtype=" None: - super().__init__(_type) - assert test_value == self.TEST_CONSTANT - self.test_value = test_value - - -class ShardingStorageTransformer(StorageTransformer): # lgtm[py/missing-equals] - """Implements sharding as a storage transformer, as described in the spec: - https://zarr-specs.readthedocs.io/en/latest/extensions/storage-transformers/sharding/v1.0.html - https://purl.org/zarr/spec/storage_transformers/sharding/1.0 - """ - - extension_uri = "https://purl.org/zarr/spec/storage_transformers/sharding/1.0" - valid_types = ["indexed"] - - def __init__(self, _type, chunks_per_shard) -> None: - assert_zarr_v3_sharding_available() - super().__init__(_type) - if isinstance(chunks_per_shard, int): - chunks_per_shard = (chunks_per_shard,) - else: - chunks_per_shard = tuple(int(i) for i in chunks_per_shard) - if chunks_per_shard == (): - chunks_per_shard = (1,) - self.chunks_per_shard = chunks_per_shard - self._num_chunks_per_shard = functools.reduce(lambda x, y: x * y, chunks_per_shard, 1) - self._dimension_separator = None - self._data_key_prefix = None - - def _copy_for_array(self, array, inner_store): - transformer_copy = super()._copy_for_array(array, inner_store) - transformer_copy._dimension_separator = array._dimension_separator - transformer_copy._data_key_prefix = array._data_key_prefix - if len(array._shape) > len(self.chunks_per_shard): - # The array shape might be longer when initialized with subdtypes. - # subdtypes dimensions come last, therefore padding chunks_per_shard - # with ones, effectively disabling sharding on the unlisted dimensions. - transformer_copy.chunks_per_shard += (1,) * ( - len(array._shape) - len(self.chunks_per_shard) - ) - return transformer_copy - - @property - def dimension_separator(self) -> str: - assert ( - self._dimension_separator is not None - ), "dimension_separator is not initialized, first get a copy via _copy_for_array." - return self._dimension_separator - - def _is_data_key(self, key: str) -> bool: - assert ( - self._data_key_prefix is not None - ), "data_key_prefix is not initialized, first get a copy via _copy_for_array." - return key.startswith(self._data_key_prefix) - - def _key_to_shard(self, chunk_key: str) -> Tuple[str, Tuple[int, ...]]: - prefix, _, chunk_string = chunk_key.rpartition("c") - chunk_subkeys = ( - tuple(map(int, chunk_string.split(self.dimension_separator))) if chunk_string else (0,) - ) - shard_key_tuple = ( - subkey // shard_i for subkey, shard_i in zip(chunk_subkeys, self.chunks_per_shard) - ) - shard_key = prefix + "c" + self.dimension_separator.join(map(str, shard_key_tuple)) - return shard_key, chunk_subkeys - - def _get_index_from_store(self, shard_key: str) -> _ShardIndex: - # At the end of each shard 2*64bit per chunk for offset and length define the index: - index_bytes = self.inner_store.get_partial_values( - [(shard_key, (-16 * self._num_chunks_per_shard, None))] - )[0] - if index_bytes is None: - raise KeyError(shard_key) - return _ShardIndex.from_bytes( - index_bytes, - self, - ) - - def _get_index_from_buffer(self, buffer: Union[bytes, bytearray]) -> _ShardIndex: - # At the end of each shard 2*64bit per chunk for offset and length define the index: - return _ShardIndex.from_bytes(buffer[-16 * self._num_chunks_per_shard :], self) - - def _get_chunks_in_shard(self, shard_key: str) -> Iterator[Tuple[int, ...]]: - _, _, chunk_string = shard_key.rpartition("c") - shard_key_tuple = ( - tuple(map(int, chunk_string.split(self.dimension_separator))) if chunk_string else (0,) - ) - for chunk_offset in itertools.product(*(range(i) for i in self.chunks_per_shard)): - yield tuple( - shard_key_i * shards_i + offset_i - for shard_key_i, offset_i, shards_i in zip( - shard_key_tuple, chunk_offset, self.chunks_per_shard - ) - ) - - def __getitem__(self, key): - if self._is_data_key(key): - if self.supports_efficient_get_partial_values: - # Use the partial implementation, which fetches the index separately - value = self.get_partial_values([(key, (0, None))])[0] - if value is None: - raise KeyError(key) - else: - return value - shard_key, chunk_subkey = self._key_to_shard(key) - try: - full_shard_value = self.inner_store[shard_key] - except KeyError: - raise KeyError(key) - index = self._get_index_from_buffer(full_shard_value) - chunk_slice = index.get_chunk_slice(chunk_subkey) - if chunk_slice is not None: - return full_shard_value[chunk_slice] - else: - raise KeyError(key) - else: - return self.inner_store.__getitem__(key) - - def __setitem__(self, key, value): - value = ensure_bytes(value) - if self._is_data_key(key): - shard_key, chunk_subkey = self._key_to_shard(key) - chunks_to_read = set(self._get_chunks_in_shard(shard_key)) - chunks_to_read.remove(chunk_subkey) - new_content = {chunk_subkey: value} - try: - if self.supports_efficient_get_partial_values: - index = self._get_index_from_store(shard_key) - full_shard_value = None - else: - full_shard_value = self.inner_store[shard_key] - index = self._get_index_from_buffer(full_shard_value) - except KeyError: - index = _ShardIndex.create_empty(self) - else: - chunk_slices = [ - (chunk_to_read, index.get_chunk_slice(chunk_to_read)) - for chunk_to_read in chunks_to_read - ] - valid_chunk_slices = [ - (chunk_to_read, chunk_slice) - for chunk_to_read, chunk_slice in chunk_slices - if chunk_slice is not None - ] - # use get_partial_values if less than half of the available chunks must be read: - # (This can be changed when set_partial_values can be used efficiently.) - use_partial_get = ( - self.supports_efficient_get_partial_values - and len(valid_chunk_slices) < len(chunk_slices) / 2 - ) - - if use_partial_get: - chunk_values = self.inner_store.get_partial_values( - [ - ( - shard_key, - ( - chunk_slice.start, - chunk_slice.stop - chunk_slice.start, - ), - ) - for _, chunk_slice in valid_chunk_slices - ] - ) - for chunk_value, (chunk_to_read, _) in zip(chunk_values, valid_chunk_slices): - new_content[chunk_to_read] = chunk_value - else: - if full_shard_value is None: - full_shard_value = self.inner_store[shard_key] - for chunk_to_read, chunk_slice in valid_chunk_slices: - if chunk_slice is not None: - new_content[chunk_to_read] = full_shard_value[chunk_slice] - - shard_content = b"" - for chunk_subkey, chunk_content in new_content.items(): - chunk_slice = slice(len(shard_content), len(shard_content) + len(chunk_content)) - index.set_chunk_slice(chunk_subkey, chunk_slice) - shard_content += chunk_content - # Appending the index at the end of the shard: - shard_content += index.to_bytes() - self.inner_store[shard_key] = shard_content - else: # pragma: no cover - self.inner_store[key] = value - - def __delitem__(self, key): - if self._is_data_key(key): - shard_key, chunk_subkey = self._key_to_shard(key) - try: - index = self._get_index_from_store(shard_key) - except KeyError: - raise KeyError(key) - - index.set_chunk_slice(chunk_subkey, None) - - if index.is_all_empty(): - del self.inner_store[shard_key] - else: - index_bytes = index.to_bytes() - self.inner_store.set_partial_values([(shard_key, -len(index_bytes), index_bytes)]) - else: # pragma: no cover - del self.inner_store[key] - - def _shard_key_to_original_keys(self, key: str) -> Iterator[str]: - if self._is_data_key(key): - index = self._get_index_from_store(key) - prefix, _, _ = key.rpartition("c") - for chunk_tuple in self._get_chunks_in_shard(key): - if index.get_chunk_slice(chunk_tuple) is not None: - yield prefix + "c" + self.dimension_separator.join(map(str, chunk_tuple)) - else: - yield key - - def __iter__(self) -> Iterator[str]: - for key in self.inner_store: - yield from self._shard_key_to_original_keys(key) - - def __len__(self): - return sum(1 for _ in self.keys()) - - def get_partial_values(self, key_ranges): - if self.supports_efficient_get_partial_values: - transformed_key_ranges = [] - cached_indices = {} - none_indices = [] - for i, (key, range_) in enumerate(key_ranges): - if self._is_data_key(key): - shard_key, chunk_subkey = self._key_to_shard(key) - try: - index = cached_indices[shard_key] - except KeyError: - try: - index = self._get_index_from_store(shard_key) - except KeyError: - none_indices.append(i) - continue - cached_indices[shard_key] = index - chunk_slice = index.get_chunk_slice(chunk_subkey) - if chunk_slice is None: - none_indices.append(i) - continue - range_start, range_length = range_ - if range_length is None: - range_length = chunk_slice.stop - chunk_slice.start - transformed_key_ranges.append( - (shard_key, (range_start + chunk_slice.start, range_length)) - ) - else: # pragma: no cover - transformed_key_ranges.append((key, range_)) - values = self.inner_store.get_partial_values(transformed_key_ranges) - for i in none_indices: - values.insert(i, None) - return values - else: - return StoreV3.get_partial_values(self, key_ranges) - - def supports_efficient_set_partial_values(self): - return False - - def set_partial_values(self, key_start_values): - # This does not yet implement efficient set_partial_values - StoreV3.set_partial_values(self, key_start_values) - - def rename(self, src_path: str, dst_path: str) -> None: - StoreV3.rename(self, src_path, dst_path) # type: ignore[arg-type] - - def list_prefix(self, prefix): - return StoreV3.list_prefix(self, prefix) - - def erase_prefix(self, prefix): - if self._is_data_key(prefix): - StoreV3.erase_prefix(self, prefix) - else: - self.inner_store.erase_prefix(prefix) - - def rmdir(self, path=None): - path = normalize_storage_path(path) - _rmdir_from_keys_v3(self, path) - - def __contains__(self, key): - if self._is_data_key(key): - shard_key, chunk_subkeys = self._key_to_shard(key) - try: - index = self._get_index_from_store(shard_key) - except KeyError: - return False - chunk_slice = index.get_chunk_slice(chunk_subkeys) - return chunk_slice is not None - else: - return self._inner_store.__contains__(key) diff --git a/src/zarr/attrs.py b/src/zarr/attrs.py index e589bc9022..65f0423ecd 100644 --- a/src/zarr/attrs.py +++ b/src/zarr/attrs.py @@ -2,7 +2,7 @@ import warnings from collections.abc import MutableMapping -from zarr._storage.store import Store, StoreV3 +from zarr._storage.store import Store from zarr.util import json_dumps @@ -27,8 +27,7 @@ class Attributes(MutableMapping): """ def __init__(self, store, key=".zattrs", read_only=False, cache=True, synchronizer=None): - self._version = getattr(store, "_store_version", 2) - _Store = Store if self._version == 2 else StoreV3 + _Store = Store self.store = _Store._ensure_store(store) self.key = key self.read_only = read_only @@ -41,8 +40,6 @@ def _get_nosync(self): data = self.store[self.key] except KeyError: d: dict[str, Any] = dict() - if self._version > 2: - d["attributes"] = {} else: d = self.store._metadata_class.parse_metadata(data) return d @@ -52,8 +49,6 @@ def asdict(self): if self.cache and self._cached_asdict is not None: return self._cached_asdict d = self._get_nosync() - if self._version == 3: - d = d["attributes"] if self.cache: self._cached_asdict = d return d @@ -61,10 +56,7 @@ def asdict(self): def refresh(self): """Refresh cached attributes from the store.""" if self.cache: - if self._version == 2: - self._cached_asdict = self._get_nosync() - else: - self._cached_asdict = self._get_nosync()["attributes"] + self._cached_asdict = self._get_nosync() def __contains__(self, x): return x in self.asdict() @@ -92,10 +84,8 @@ def _setitem_nosync(self, item, value): d = self._get_nosync() # set key value - if self._version == 2: - d[item] = value - else: - d["attributes"][item] = value + + d[item] = value # _put modified data self._put_nosync(d) @@ -108,10 +98,7 @@ def _delitem_nosync(self, key): d = self._get_nosync() # delete key value - if self._version == 2: - del d[key] - else: - del d["attributes"][key] + del d[key] # _put modified data self._put_nosync(d) @@ -119,13 +106,10 @@ def _delitem_nosync(self, key): def put(self, d): """Overwrite all attributes with the key/value pairs in the provided dictionary `d` in a single operation.""" - if self._version == 2: - self._write_op(self._put_nosync, d) - else: - self._write_op(self._put_nosync, dict(attributes=d)) + self._write_op(self._put_nosync, d) def _put_nosync(self, d): - d_to_check = d if self._version == 2 else d["attributes"] + d_to_check = d if not all(isinstance(item, str) for item in d_to_check): # TODO: Raise an error for non-string keys # raise TypeError("attribute keys must be strings") @@ -140,33 +124,11 @@ def _put_nosync(self, d): except TypeError as ex: # pragma: no cover raise TypeError("attribute keys can not be stringified") from ex - if self._version == 2: - d = d_to_check - else: - d["attributes"] = d_to_check + d = d_to_check - if self._version == 2: - self.store[self.key] = json_dumps(d) - if self.cache: - self._cached_asdict = d - else: - if self.key in self.store: - # Cannot write the attributes directly to JSON, but have to - # store it within the pre-existing attributes key of the v3 - # metadata. - - # Note: this changes the store.counter result in test_caching_on! - - meta = self.store._metadata_class.parse_metadata(self.store[self.key]) - if "attributes" in meta and "filters" in meta["attributes"]: - # need to preserve any existing "filters" attribute - d["attributes"]["filters"] = meta["attributes"]["filters"] - meta["attributes"] = d["attributes"] - else: - meta = d - self.store[self.key] = json_dumps(meta) - if self.cache: - self._cached_asdict = d["attributes"] + self.store[self.key] = json_dumps(d) + if self.cache: + self._cached_asdict = d # noinspection PyMethodOverriding def update(self, *args, **kwargs): @@ -178,10 +140,7 @@ def _update_nosync(self, *args, **kwargs): d = self._get_nosync() # update - if self._version == 2: - d.update(*args, **kwargs) - else: - d["attributes"].update(*args, **kwargs) + d.update(*args, **kwargs) # _put modified data self._put_nosync(d) diff --git a/src/zarr/convenience.py b/src/zarr/convenience.py index 9c0deeea47..615a019dc3 100644 --- a/src/zarr/convenience.py +++ b/src/zarr/convenience.py @@ -3,8 +3,6 @@ import os import re from collections.abc import Mapping, MutableMapping - -from zarr._storage.store import data_root, meta_root, assert_zarr_v3_api_available from zarr.core import Array from zarr.creation import array as _create_array from zarr.creation import open_array @@ -14,14 +12,12 @@ from zarr.hierarchy import open_group from zarr.meta import json_dumps, json_loads from zarr.storage import ( - _get_metadata_suffix, contains_array, contains_group, normalize_store_arg, BaseStore, ConsolidatedMetadataStore, ) -from zarr._storage.v3 import ConsolidatedMetadataStoreV3 from zarr.util import TreeViewer, buffer_size, normalize_storage_path from typing import Union @@ -38,7 +34,7 @@ def _check_and_update_path(store: BaseStore, path): # noinspection PyShadowingBuiltins -def open(store: StoreLike = None, mode: str = "a", *, zarr_version=None, path=None, **kwargs): +def open(store: StoreLike = None, mode: str = "a", *, path=None, **kwargs): """Convenience function to open a group or array using file-mode-like semantics. Parameters @@ -50,10 +46,6 @@ def open(store: StoreLike = None, mode: str = "a", *, zarr_version=None, path=No read/write (must exist); 'a' means read/write (create if doesn't exist); 'w' means create (overwrite if exists); 'w-' means create (fail if exists). - zarr_version : {2, 3, None}, optional - The zarr protocol version to use. The default value of None will attempt - to infer the version from `store` if possible, otherwise it will fall - back to 2. path : str or None, optional The path within the store to open. **kwargs @@ -101,10 +93,7 @@ def open(store: StoreLike = None, mode: str = "a", *, zarr_version=None, path=No # we pass storage options explicitly, since normalize_store_arg might construct # a store if the input is a fsspec-compatible URL _store: BaseStore = normalize_store_arg( - store, - storage_options=kwargs.pop("storage_options", {}), - mode=mode, - zarr_version=zarr_version, + store, storage_options=kwargs.pop("storage_options", {}), mode=mode ) # path = _check_and_update_path(_store, path) path = normalize_storage_path(path) @@ -135,7 +124,7 @@ def _might_close(path): return isinstance(path, (str, os.PathLike)) -def save_array(store: StoreLike, arr, *, zarr_version=None, path=None, **kwargs): +def save_array(store: StoreLike, arr, *, path=None, **kwargs): """Convenience function to save a NumPy array to the local file system, following a similar API to the NumPy save() function. @@ -145,10 +134,6 @@ def save_array(store: StoreLike, arr, *, zarr_version=None, path=None, **kwargs) Store or path to directory in file system or name of zip file. arr : ndarray NumPy array with data to save. - zarr_version : {2, 3, None}, optional - The zarr protocol version to use when saving. The default value of None - will attempt to infer the version from `store` if possible, otherwise - it will fall back to 2. path : str or None, optional The path within the store where the array will be saved. kwargs @@ -173,19 +158,17 @@ def save_array(store: StoreLike, arr, *, zarr_version=None, path=None, **kwargs) """ may_need_closing = _might_close(store) - _store: BaseStore = normalize_store_arg(store, mode="w", zarr_version=zarr_version) + _store: BaseStore = normalize_store_arg(store, mode="w") path = _check_and_update_path(_store, path) try: - _create_array( - arr, store=_store, overwrite=True, zarr_version=zarr_version, path=path, **kwargs - ) + _create_array(arr, store=_store, overwrite=True, path=path, **kwargs) finally: if may_need_closing: # needed to ensure zip file records are written _store.close() -def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): +def save_group(store: StoreLike, *args, path=None, **kwargs): """Convenience function to save several NumPy arrays to the local file system, following a similar API to the NumPy savez()/savez_compressed() functions. @@ -195,10 +178,6 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): Store or path to directory in file system or name of zip file. args : ndarray NumPy arrays with data to save. - zarr_version : {2, 3, None}, optional - The zarr protocol version to use when saving. The default value of None - will attempt to infer the version from `store` if possible, otherwise - it will fall back to 2. path : str or None, optional Path within the store where the group will be saved. kwargs @@ -253,22 +232,22 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): raise ValueError("at least one array must be provided") # handle polymorphic store arg may_need_closing = _might_close(store) - _store: BaseStore = normalize_store_arg(store, mode="w", zarr_version=zarr_version) + _store: BaseStore = normalize_store_arg(store, mode="w") path = _check_and_update_path(_store, path) try: - grp = _create_group(_store, path=path, overwrite=True, zarr_version=zarr_version) + grp = _create_group(_store, path=path, overwrite=True) for i, arr in enumerate(args): k = "arr_{}".format(i) - grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version) + grp.create_dataset(k, data=arr, overwrite=True) for k, arr in kwargs.items(): - grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version) + grp.create_dataset(k, data=arr, overwrite=True) finally: if may_need_closing: # needed to ensure zip file records are written _store.close() -def save(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): +def save(store: StoreLike, *args, path=None, **kwargs): """Convenience function to save an array or group of arrays to the local file system. Parameters @@ -277,10 +256,6 @@ def save(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): Store or path to directory in file system or name of zip file. args : ndarray NumPy arrays with data to save. - zarr_version : {2, 3, None}, optional - The zarr protocol version to use when saving. The default value of None - will attempt to infer the version from `store` if possible, otherwise - it will fall back to 2. path : str or None, optional The path within the group where the arrays will be saved. kwargs @@ -349,9 +324,9 @@ def save(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): if len(args) == 0 and len(kwargs) == 0: raise ValueError("at least one array must be provided") if len(args) == 1 and len(kwargs) == 0: - save_array(store, args[0], zarr_version=zarr_version, path=path) + save_array(store, args[0], path=path) else: - save_group(store, *args, zarr_version=zarr_version, path=path, **kwargs) + save_group(store, *args, path=path, **kwargs) class LazyLoader(Mapping): @@ -383,17 +358,13 @@ def __repr__(self): return r -def load(store: StoreLike, zarr_version=None, path=None): +def load(store: StoreLike, path=None): """Load data from an array or group into memory. Parameters ---------- store : MutableMapping or string Store or path to directory in file system or name of zip file. - zarr_version : {2, 3, None}, optional - The zarr protocol version to use when loading. The default value of - None will attempt to infer the version from `store` if possible, - otherwise it will fall back to 2. path : str or None, optional The path within the store from which to load. @@ -415,7 +386,7 @@ def load(store: StoreLike, zarr_version=None, path=None): """ # handle polymorphic store arg - _store = normalize_store_arg(store, zarr_version=zarr_version) + _store = normalize_store_arg(store) path = _check_and_update_path(_store, path) if contains_array(_store, path=path): return Array(store=_store, path=path)[...] @@ -669,9 +640,7 @@ def copy_store( raise ValueError("zarr stores must share the same protocol version") if source_store_version > 2: - nchar_root = len(meta_root) - # code below assumes len(meta_root) === len(data_root) - assert len(data_root) == nchar_root + raise NotImplementedError("This function only supports Zarr version 2.") # setup logging with _LogWriter(log) as log: @@ -682,10 +651,7 @@ def copy_store( if not source_key.startswith(source_path): continue elif source_store_version == 3: - # skip 'meta/root/' or 'data/root/' at start of source_key - if not source_key[nchar_root:].startswith(source_path): - continue - + raise NotImplementedError("This function only supports Zarr version 2.") # process excludes and includes exclude = False for prog in excludes: @@ -705,10 +671,7 @@ def copy_store( key_suffix = source_key[len(source_path) :] dest_key = dest_path + key_suffix elif source_store_version == 3: - # nchar_root is length of 'meta/root/' or 'data/root/' - key_suffix = source_key[nchar_root + len(source_path) :] - dest_key = source_key[:nchar_root] + dest_path + key_suffix - + raise NotImplementedError("This function only supports Zarr version 2.") # create a descriptive label for this operation descr = source_key if dest_key != source_key: @@ -1177,8 +1140,6 @@ def copy_all( # setup counting variables n_copied = n_skipped = n_bytes_copied = 0 - zarr_version = getattr(source, "_version", 2) - # setup logging with _LogWriter(log) as log: for k in source.keys(): @@ -1197,8 +1158,8 @@ def copy_all( n_copied += c n_skipped += s n_bytes_copied += b - if zarr_version == 2: - dest.attrs.update(**source.attrs) + + dest.attrs.update(**source.attrs) # log a final message with a summary of what happened _log_copy_summary(log, dry_run, n_copied, n_skipped, n_bytes_copied) @@ -1253,23 +1214,7 @@ def is_zarr_key(key): return key.endswith(".zarray") or key.endswith(".zgroup") or key.endswith(".zattrs") else: - assert_zarr_v3_api_available() - - sfx = _get_metadata_suffix(store) # type: ignore - - def is_zarr_key(key): - return ( - key.endswith(".array" + sfx) or key.endswith(".group" + sfx) or key == "zarr.json" - ) - - # cannot create a group without a path in v3 - # so create /meta/root/consolidated group to store the metadata - if "consolidated" not in store: - _create_group(store, path="consolidated") - if not metadata_key.startswith("meta/root/"): - metadata_key = "meta/root/consolidated/" + metadata_key - # path = 'consolidated' - + raise NotImplementedError("This function only supports Zarr version 2.") out = { "zarr_consolidated_format": 1, "metadata": {key: json_loads(store[key]) for key in store if is_zarr_key(key)}, @@ -1321,10 +1266,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** """ # normalize parameters - zarr_version = kwargs.get("zarr_version") - store = normalize_store_arg( - store, storage_options=kwargs.get("storage_options"), mode=mode, zarr_version=zarr_version - ) + store = normalize_store_arg(store, storage_options=kwargs.get("storage_options"), mode=mode) if mode not in {"r", "r+"}: raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}".format(mode)) @@ -1332,11 +1274,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** if store._store_version == 2: ConsolidatedStoreClass = ConsolidatedMetadataStore else: - assert_zarr_v3_api_available() - ConsolidatedStoreClass = ConsolidatedMetadataStoreV3 - # default is to store within 'consolidated' group on v3 - if not metadata_key.startswith("meta/root/"): - metadata_key = "meta/root/consolidated/" + metadata_key + raise NotImplementedError("This function only supports Zarr version 2.") # setup metadata store meta_store = ConsolidatedStoreClass(store, metadata_key=metadata_key) diff --git a/src/zarr/core.py b/src/zarr/core.py index d22a9d79c3..06dcb32063 100644 --- a/src/zarr/core.py +++ b/src/zarr/core.py @@ -10,7 +10,7 @@ import numpy as np from numcodecs.compat import ensure_bytes -from zarr._storage.store import _prefix_to_attrs_key, assert_zarr_v3_api_available +from zarr._storage.store import _prefix_to_attrs_key from zarr.attrs import Attributes from zarr.codecs import AsType, get_codec from zarr.context import Context @@ -36,7 +36,6 @@ pop_fields, ) from zarr.storage import ( - _get_hierarchy_metadata, _prefix_to_array_key, KVStore, getsize, @@ -45,6 +44,7 @@ ) from zarr.util import ( ConstantMap, + UncompressedPartialReadBufferV3, all_equal, InfoReporter, check_array_shape, @@ -56,7 +56,6 @@ normalize_shape, normalize_storage_path, PartialReadBuffer, - UncompressedPartialReadBufferV3, ensure_ndarray_like, ) @@ -125,21 +124,14 @@ def __init__( cache_attrs=True, partial_decompress=False, write_empty_chunks=True, - zarr_version=None, meta_array=None, ): # N.B., expect at this point store is fully initialized with all # configuration metadata fully specified and normalized - - store = normalize_store_arg(store, zarr_version=zarr_version) - if zarr_version is None: - zarr_version = store._store_version - - if zarr_version != 2: - assert_zarr_v3_api_available() + store = normalize_store_arg(store) if chunk_store is not None: - chunk_store = normalize_store_arg(chunk_store, zarr_version=zarr_version) + chunk_store = normalize_store_arg(chunk_store) self._store = store self._chunk_store = chunk_store @@ -159,12 +151,6 @@ def __init__( self._meta_array = np.empty_like(meta_array, shape=()) else: self._meta_array = np.empty(()) - self._version = zarr_version - if self._version == 3: - self._data_key_prefix = "data/root/" + self._key_prefix - self._data_path = "data/root/" + self._path - self._hierarchy_metadata = _get_hierarchy_metadata(store=self._store) - self._metadata_key_suffix = self._hierarchy_metadata["metadata_key_suffix"] # initialize metadata self._load_metadata() @@ -205,26 +191,19 @@ def _load_metadata_nosync(self): self._shape = meta["shape"] self._fill_value = meta["fill_value"] dimension_separator = meta.get("dimension_separator", None) - if self._version == 2: - self._chunks = meta["chunks"] - self._dtype = meta["dtype"] - self._order = meta["order"] - if dimension_separator is None: - try: - dimension_separator = self._store._dimension_separator - except (AttributeError, KeyError): - pass - - # Fallback for any stores which do not choose a default - if dimension_separator is None: - dimension_separator = "." - else: - self._chunks = meta["chunk_grid"]["chunk_shape"] - self._dtype = meta["data_type"] - self._order = meta["chunk_memory_layout"] - chunk_separator = meta["chunk_grid"]["separator"] + + self._chunks = meta["chunks"] + self._dtype = meta["dtype"] + self._order = meta["order"] + if dimension_separator is None: + try: + dimension_separator = self._store._dimension_separator + except (AttributeError, KeyError): + pass + + # Fallback for any stores which do not choose a default if dimension_separator is None: - dimension_separator = meta.get("dimension_separator", chunk_separator) + dimension_separator = "." self._dimension_separator = dimension_separator @@ -232,32 +211,17 @@ def _load_metadata_nosync(self): compressor = meta.get("compressor", None) if compressor is None: self._compressor = None - elif self._version == 2: - self._compressor = get_codec(compressor) else: - self._compressor = compressor + self._compressor = get_codec(compressor) # setup filters - if self._version == 2: - filters = meta.get("filters", []) - else: - # TODO: storing filters under attributes for now since the v3 - # array metadata does not have a 'filters' attribute. - filters = meta["attributes"].get("filters", []) + + filters = meta.get("filters", []) + if filters: filters = [get_codec(config) for config in filters] self._filters = filters - if self._version == 3: - storage_transformers = meta.get("storage_transformers", []) - if storage_transformers: - transformed_store = self._chunk_store or self._store - for storage_transformer in storage_transformers[::-1]: - transformed_store = storage_transformer._copy_for_array( - self, transformed_store - ) - self._transformed_chunk_store = transformed_store - def _refresh_metadata(self): if not self._cache_metadata: self._load_metadata() @@ -278,35 +242,22 @@ def _flush_metadata_nosync(self): filters_config = [f.get_config() for f in self._filters] else: filters_config = None - _compressor = compressor_config if self._version == 2 else self._compressor + _compressor = compressor_config meta = dict( shape=self._shape, compressor=_compressor, fill_value=self._fill_value, filters=filters_config, ) - if getattr(self._store, "_store_version", 2) == 2: - meta.update( - dict( - chunks=self._chunks, - dtype=self._dtype, - order=self._order, - dimension_separator=self._dimension_separator, - ) - ) - else: - meta.update( - dict( - chunk_grid=dict( - type="regular", - chunk_shape=self._chunks, - separator=self._dimension_separator, - ), - data_type=self._dtype, - chunk_memory_layout=self._order, - attributes=self.attrs.asdict(), - ) + + meta.update( + dict( + chunks=self._chunks, + dtype=self._dtype, + order=self._order, + dimension_separator=self._dimension_separator, ) + ) mkey = _prefix_to_array_key(self._store, self._key_prefix) self._store[mkey] = self._store._metadata_class.encode_array_metadata(meta) @@ -496,28 +447,11 @@ def nchunks(self): def nchunks_initialized(self): """The number of chunks that have been initialized with some data.""" - # count chunk keys - if self._version == 3: - # # key pattern for chunk keys - # prog = re.compile(r'\.'.join([r'c\d+'] * min(1, self.ndim))) - # # get chunk keys, excluding the prefix - # members = self.chunk_store.list_prefix(self._data_path) - # members = [k.split(self._data_key_prefix)[1] for k in members] - # # count the chunk keys - # return sum(1 for k in members if prog.match(k)) - - # key pattern for chunk keys - prog = re.compile(self._data_key_prefix + r"c\d+") # TODO: ndim == 0 case? - # get chunk keys, excluding the prefix - members = self.chunk_store.list_prefix(self._data_path) - # count the chunk keys - return sum(1 for k in members if prog.match(k)) - else: - # key pattern for chunk keys - prog = re.compile(r"\.".join([r"\d+"] * min(1, self.ndim))) + # key pattern for chunk keys + prog = re.compile(r"\.".join([r"\d+"] * min(1, self.ndim))) - # count chunk keys - return sum(1 for k in listdir(self.chunk_store, self._path) if prog.match(k)) + # count chunk keys + return sum(1 for k in listdir(self.chunk_store, self._path) if prog.match(k)) # backwards compatibility initialized = nchunks_initialized @@ -2044,8 +1978,6 @@ def _process_chunk( cdata = cdata.read_full() self._compressor.decode(cdata, dest) else: - if isinstance(cdata, UncompressedPartialReadBufferV3): - cdata = cdata.read_full() chunk = ensure_ndarray_like(cdata).view(self._dtype) chunk = chunk.reshape(self._chunks, order=self._order) np.copyto(dest, chunk) @@ -2065,21 +1997,13 @@ def _process_chunk( else dim for i, dim in enumerate(self.chunks) ] - if isinstance(cdata, UncompressedPartialReadBufferV3): - chunk_partial = self._decode_chunk( - cdata.read_part(start, nitems), - start=start, - nitems=nitems, - expected_shape=expected_shape, - ) - else: - cdata.read_part(start, nitems) - chunk_partial = self._decode_chunk( - cdata.buff, - start=start, - nitems=nitems, - expected_shape=expected_shape, - ) + cdata.read_part(start, nitems) + chunk_partial = self._decode_chunk( + cdata.buff, + start=start, + nitems=nitems, + expected_shape=expected_shape, + ) tmp[partial_out_selection] = chunk_partial out[out_selection] = tmp[chunk_selection] return @@ -2318,19 +2242,7 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None): return chunk def _chunk_key(self, chunk_coords): - if self._version == 3: - # _chunk_key() corresponds to data_key(P, i, j, ...) example in the spec - # where P = self._key_prefix, i, j, ... = chunk_coords - # e.g. c0/2/3 for 3d array with chunk index (0, 2, 3) - # https://zarr-specs.readthedocs.io/en/core-protocol-v3.0-dev/protocol/core/v3.0.html#regular-grids - return ( - "data/root/" - + self._key_prefix - + "c" - + self._dimension_separator.join(map(str, chunk_coords)) - ) - else: - return self._key_prefix + self._dimension_separator.join(map(str, chunk_coords)) + return self._key_prefix + self._dimension_separator.join(map(str, chunk_coords)) def _decode_chunk(self, cdata, start=None, nitems=None, expected_shape=None): # decompress @@ -2552,7 +2464,6 @@ def __getstate__(self): "cache_attrs": self._attrs.cache, "partial_decompress": self._partial_decompress, "write_empty_chunks": self._write_empty_chunks, - "zarr_version": self._version, "meta_array": self._meta_array, } @@ -2860,7 +2771,6 @@ def view( read_only=read_only, synchronizer=synchronizer, cache_metadata=True, - zarr_version=self._version, ) a._is_view = True diff --git a/src/zarr/creation.py b/src/zarr/creation.py index 6227f90b7b..c93178c0e7 100644 --- a/src/zarr/creation.py +++ b/src/zarr/creation.py @@ -4,7 +4,6 @@ import numpy as np from numcodecs.registry import codec_registry -from zarr._storage.store import DEFAULT_ZARR_VERSION from zarr.core import Array from zarr.errors import ( ArrayNotFoundError, @@ -42,9 +41,7 @@ def create( dimension_separator=None, write_empty_chunks=True, *, - zarr_version=None, meta_array=None, - storage_transformers=(), **kwargs, ): """Create an array. @@ -109,21 +106,6 @@ def create( .. versionadded:: 2.11 - storage_transformers : sequence of StorageTransformers, optional - Setting storage transformers, changes the storage structure and behaviour - of data coming from the underlying store. The transformers are applied in the - order of the given sequence. Supplying an empty sequence is the same as omitting - the argument or setting it to None. May only be set when using zarr_version 3. - - .. versionadded:: 2.13 - - zarr_version : {None, 2, 3}, optional - The zarr protocol version of the created array. If None, it will be - inferred from ``store`` or ``chunk_store`` if they are provided, - otherwise defaulting to 2. - - .. versionadded:: 2.12 - meta_array : array-like, optional An array instance to use for determining arrays to create and return to users. Use `numpy.empty(())` by default. @@ -173,12 +155,9 @@ def create( """ - if zarr_version is None and store is None: - zarr_version = getattr(chunk_store, "_store_version", DEFAULT_ZARR_VERSION) # handle polymorphic store arg - store = normalize_store_arg(store, zarr_version=zarr_version, mode="w") - zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) + store = normalize_store_arg(store, mode="w") # API compatibility with h5py compressor, fill_value = _kwargs_compat(compressor, fill_value, kwargs) @@ -196,9 +175,6 @@ def create( ) dimension_separator = normalize_dimension_separator(dimension_separator) - if zarr_version > 2 and path is None: - path = "/" - # initialize array metadata init_array( store, @@ -214,7 +190,6 @@ def create( filters=filters, object_codec=object_codec, dimension_separator=dimension_separator, - storage_transformers=storage_transformers, ) # instantiate array @@ -463,7 +438,6 @@ def open_array( partial_decompress=False, write_empty_chunks=True, *, - zarr_version=None, dimension_separator=None, meta_array=None, **kwargs, @@ -531,15 +505,10 @@ def open_array( .. versionadded:: 2.11 - zarr_version : {None, 2, 3}, optional - The zarr protocol version of the array to be opened. If None, it will - be inferred from ``store`` or ``chunk_store`` if they are provided, - otherwise defaulting to 2. dimension_separator : {None, '.', '/'}, optional Can be used to specify whether the array is in a flat ('.') or nested ('/') format. If None, the appropriate value will be read from `store` - when present. Otherwise, defaults to '.' when ``zarr_version == 2`` - and `/` otherwise. + when present. Otherwise, defaults to '.'. meta_array : array-like, optional An array instance to use for determining arrays to create and return to users. Use `numpy.empty(())` by default. @@ -579,28 +548,18 @@ def open_array( # w- or x : create, fail if exists # a : read/write if exists, create otherwise (default) - if zarr_version is None and store is None: - zarr_version = getattr(chunk_store, "_store_version", DEFAULT_ZARR_VERSION) - # handle polymorphic store arg - store = normalize_store_arg( - store, storage_options=storage_options, mode=mode, zarr_version=zarr_version - ) - zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) + store = normalize_store_arg(store, storage_options=storage_options, mode=mode) + if chunk_store is not None: - chunk_store = normalize_store_arg( - chunk_store, storage_options=storage_options, mode=mode, zarr_version=zarr_version - ) + chunk_store = normalize_store_arg(chunk_store, storage_options=storage_options, mode=mode) # respect the dimension separator specified in a store, if present if dimension_separator is None: if hasattr(store, "_dimension_separator"): dimension_separator = store._dimension_separator else: - dimension_separator = "." if zarr_version == 2 else "/" - - if zarr_version == 3 and path is None: - path = "array" # TODO: raise ValueError instead? + dimension_separator = "." path = normalize_storage_path(path) @@ -709,7 +668,6 @@ def _like_args(a, kwargs): kwargs.setdefault("compressor", a.compressor) kwargs.setdefault("order", a.order) kwargs.setdefault("filters", a.filters) - kwargs.setdefault("zarr_version", a._version) else: kwargs.setdefault("compressor", "default") kwargs.setdefault("order", "C") diff --git a/src/zarr/hierarchy.py b/src/zarr/hierarchy.py index 1c9848e647..e30d2d7996 100644 --- a/src/zarr/hierarchy.py +++ b/src/zarr/hierarchy.py @@ -3,13 +3,6 @@ import numpy as np -from zarr._storage.store import ( - _get_metadata_suffix, - data_root, - meta_root, - DEFAULT_ZARR_VERSION, - assert_zarr_v3_api_available, -) from zarr.attrs import Attributes from zarr.core import Array from zarr.creation import ( @@ -31,21 +24,20 @@ ReadOnlyError, ) from zarr.storage import ( - _get_hierarchy_metadata, _prefix_to_group_key, BaseStore, MemoryStore, + group_meta_key, attrs_key, contains_array, contains_group, - group_meta_key, init_group, listdir, normalize_store_arg, rename, rmdir, ) -from zarr._storage.v3 import MemoryStoreV3 + from zarr.util import ( InfoReporter, TreeViewer, @@ -143,19 +135,12 @@ def __init__( chunk_store=None, cache_attrs=True, synchronizer=None, - zarr_version=None, *, meta_array=None, ): - store: BaseStore = _normalize_store_arg(store, zarr_version=zarr_version) - if zarr_version is None: - zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) - - if zarr_version != 2: - assert_zarr_v3_api_available() - + store: BaseStore = _normalize_store_arg(store) if chunk_store is not None: - chunk_store: BaseStore = _normalize_store_arg(chunk_store, zarr_version=zarr_version) + chunk_store: BaseStore = _normalize_store_arg(chunk_store) self._store = store self._chunk_store = chunk_store self._path = normalize_storage_path(path) @@ -169,12 +154,6 @@ def __init__( self._meta_array = np.empty_like(meta_array, shape=()) else: self._meta_array = np.empty(()) - self._version = zarr_version - if self._version == 3: - self._data_key_prefix = data_root + self._key_prefix - self._data_path = data_root + self._path - self._hierarchy_metadata = _get_hierarchy_metadata(store=self._store) - self._metadata_key_suffix = _get_metadata_suffix(store=self._store) # guard conditions if contains_array(store, path=self._path): @@ -187,25 +166,13 @@ def __init__( assert not mkey.endswith("root/.group") meta_bytes = store[mkey] except KeyError: - if self._version == 2: - raise GroupNotFoundError(path) - else: - implicit_prefix = meta_root + self._key_prefix - if self._store.list_prefix(implicit_prefix): - # implicit group does not have any metadata - self._meta = None - else: - raise GroupNotFoundError(path) + raise GroupNotFoundError(path) else: self._meta = self._store._metadata_class.decode_group_metadata(meta_bytes) # setup attributes - if self._version == 2: - akey = self._key_prefix + attrs_key - else: - # Note: mkey doesn't actually exist for implicit groups, but the - # object can still be created. - akey = mkey + akey = self._key_prefix + attrs_key + self._attrs = Attributes( store, key=akey, read_only=read_only, cache=cache_attrs, synchronizer=synchronizer ) @@ -304,35 +271,11 @@ def __iter__(self): quux """ - if getattr(self._store, "_store_version", 2) == 2: - for key in sorted(listdir(self._store, self._path)): - path = self._key_prefix + key - if contains_array(self._store, path) or contains_group(self._store, path): - yield key - else: - # TODO: Should this iterate over data folders and/or metadata - # folders and/or metadata files - - dir_path = meta_root + self._key_prefix - name_start = len(dir_path) - keys, prefixes = self._store.list_dir(dir_path) - - # yield any groups or arrays - sfx = self._metadata_key_suffix - for key in keys: - len_suffix = len(".group") + len(sfx) # same for .array - if key.endswith((".group" + sfx, ".array" + sfx)): - yield key[name_start:-len_suffix] - - # also yield any implicit groups - for prefix in prefixes: - prefix = prefix.rstrip("/") - # only implicit if there is no .group.sfx file - if prefix + ".group" + sfx not in self._store: - yield prefix[name_start:] - - # Note: omit data/root/ to avoid duplicate listings - # any group in data/root/ must has an entry in meta/root/ + + for key in sorted(listdir(self._store, self._path)): + path = self._key_prefix + key + if contains_array(self._store, path) or contains_group(self._store, path): + yield key def __len__(self): """Number of members.""" @@ -400,7 +343,6 @@ def __getstate__(self): "chunk_store": self._chunk_store, "cache_attrs": self._attrs.cache, "synchronizer": self._synchronizer, - "zarr_version": self._version, "meta_array": self._meta_array, } @@ -466,7 +408,6 @@ def __getitem__(self, item): chunk_store=self._chunk_store, synchronizer=self._synchronizer, cache_attrs=self.attrs.cache, - zarr_version=self._version, meta_array=self._meta_array, ) elif contains_group(self._store, path, explicit_only=True): @@ -477,25 +418,8 @@ def __getitem__(self, item): chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, synchronizer=self._synchronizer, - zarr_version=self._version, meta_array=self._meta_array, ) - elif self._version == 3: - implicit_group = meta_root + path + "/" - # non-empty folder in the metadata path implies an implicit group - if self._store.list_prefix(implicit_group): - return Group( - self._store, - read_only=self._read_only, - path=path, - chunk_store=self._chunk_store, - cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, - zarr_version=self._version, - meta_array=self._meta_array, - ) - else: - raise KeyError(item) else: raise KeyError(item) @@ -546,29 +470,11 @@ def group_keys(self): ['bar', 'foo'] """ - if self._version == 2: - for key in sorted(listdir(self._store, self._path)): - path = self._key_prefix + key - if contains_group(self._store, path): - yield key - else: - dir_name = meta_root + self._path - group_sfx = ".group" + self._metadata_key_suffix - # The fact that we call sorted means this can't be a streaming generator. - # The keys are already in memory. - all_keys = sorted(listdir(self._store, dir_name)) - for key in all_keys: - if key.endswith(group_sfx): - key = key[: -len(group_sfx)] - if key in all_keys: - # otherwise we will double count this group - continue - path = self._key_prefix + key - if path.endswith(".array" + self._metadata_key_suffix): - # skip array keys - continue - if contains_group(self._store, path, explicit_only=False): - yield key + + for key in sorted(listdir(self._store, self._path)): + path = self._key_prefix + key + if contains_group(self._store, path): + yield key def groups(self): """Return an iterator over (name, value) pairs for groups only. @@ -587,26 +493,10 @@ def groups(self): foo """ - if self._version == 2: - for key in sorted(listdir(self._store, self._path)): - path = self._key_prefix + key - if contains_group(self._store, path, explicit_only=False): - yield ( - key, - Group( - self._store, - path=path, - read_only=self._read_only, - chunk_store=self._chunk_store, - cache_attrs=self.attrs.cache, - synchronizer=self._synchronizer, - zarr_version=self._version, - ), - ) - else: - for key in self.group_keys(): - path = self._key_prefix + key + for key in sorted(listdir(self._store, self._path)): + path = self._key_prefix + key + if contains_group(self._store, path, explicit_only=False): yield ( key, Group( @@ -616,7 +506,6 @@ def groups(self): chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, synchronizer=self._synchronizer, - zarr_version=self._version, ), ) @@ -671,34 +560,14 @@ def arrays(self, recurse=False): return self._array_iter(keys_only=False, method="arrays", recurse=recurse) def _array_iter(self, keys_only, method, recurse): - if self._version == 2: - for key in sorted(listdir(self._store, self._path)): - path = self._key_prefix + key - if contains_array(self._store, path): - _key = key.rstrip("/") - yield _key if keys_only else (_key, self[key]) - elif recurse and contains_group(self._store, path): - group = self[key] - yield from getattr(group, method)(recurse=recurse) - else: - dir_name = meta_root + self._path - array_sfx = ".array" + self._metadata_key_suffix - group_sfx = ".group" + self._metadata_key_suffix - - for key in sorted(listdir(self._store, dir_name)): - if key.endswith(array_sfx): - key = key[: -len(array_sfx)] - _key = key.rstrip("/") - yield _key if keys_only else (_key, self[key]) - - path = self._key_prefix + key - assert not path.startswith("meta/") - if key.endswith(group_sfx): - # skip group metadata keys - continue - elif recurse and contains_group(self._store, path): - group = self[key] - yield from getattr(group, method)(recurse=recurse) + for key in sorted(listdir(self._store, self._path)): + path = self._key_prefix + key + if contains_array(self._store, path): + _key = key.rstrip("/") + yield _key if keys_only else (_key, self[key]) + elif recurse and contains_group(self._store, path): + group = self[key] + yield from getattr(group, method)(recurse=recurse) def visitvalues(self, func): """Run ``func`` on each object. @@ -978,7 +847,6 @@ def _create_group_nosync(self, name, overwrite=False): chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, synchronizer=self._synchronizer, - zarr_version=self._version, ) def create_groups(self, *names, **kwargs): @@ -1028,7 +896,6 @@ def _require_group_nosync(self, name, overwrite=False): chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, synchronizer=self._synchronizer, - zarr_version=self._version, ) def require_groups(self, *names): @@ -1340,18 +1207,10 @@ def move(self, source, dest): self._write_op(self._move_nosync, source, dest) -def _normalize_store_arg(store, *, storage_options=None, mode="r", zarr_version=None): - if zarr_version is None: - zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) - - if zarr_version != 2: - assert_zarr_v3_api_available() - +def _normalize_store_arg(store, *, storage_options=None, mode="r"): if store is None: - return MemoryStore() if zarr_version == 2 else MemoryStoreV3() - return normalize_store_arg( - store, storage_options=storage_options, mode=mode, zarr_version=zarr_version - ) + return MemoryStore() + return normalize_store_arg(store, storage_options=storage_options, mode=mode) def group( @@ -1362,7 +1221,6 @@ def group( synchronizer=None, path=None, *, - zarr_version=None, meta_array=None, ): """Create a group. @@ -1414,20 +1272,11 @@ def group( """ # handle polymorphic store arg - store = _normalize_store_arg(store, zarr_version=zarr_version, mode="w") - if zarr_version is None: - zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) - - if zarr_version != 2: - assert_zarr_v3_api_available() + store = _normalize_store_arg(store, mode="w") path = normalize_storage_path(path) - requires_init = None - if zarr_version == 2: - requires_init = overwrite or not contains_group(store) - elif zarr_version == 3: - requires_init = overwrite or not contains_group(store, path) + requires_init = overwrite or not contains_group(store) if requires_init: init_group(store, overwrite=overwrite, chunk_store=chunk_store, path=path) @@ -1439,7 +1288,6 @@ def group( cache_attrs=cache_attrs, synchronizer=synchronizer, path=path, - zarr_version=zarr_version, meta_array=meta_array, ) @@ -1453,7 +1301,6 @@ def open_group( chunk_store=None, storage_options=None, *, - zarr_version=None, meta_array=None, ): """Open a group using file-mode-like semantics. @@ -1507,21 +1354,10 @@ def open_group( """ # handle polymorphic store arg - store = _normalize_store_arg( - store, storage_options=storage_options, mode=mode, zarr_version=zarr_version - ) - if zarr_version is None: - zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) - - if zarr_version != 2: - assert_zarr_v3_api_available() + store = _normalize_store_arg(store, storage_options=storage_options, mode=mode) if chunk_store is not None: - chunk_store = _normalize_store_arg( - chunk_store, storage_options=storage_options, mode=mode, zarr_version=zarr_version - ) - if getattr(chunk_store, "_store_version", DEFAULT_ZARR_VERSION) != zarr_version: - raise ValueError("zarr_version of store and chunk_store must match") # pragma: no cover + chunk_store = _normalize_store_arg(chunk_store, storage_options=storage_options, mode=mode) path = normalize_storage_path(path) @@ -1560,6 +1396,5 @@ def open_group( synchronizer=synchronizer, path=path, chunk_store=chunk_store, - zarr_version=zarr_version, meta_array=meta_array, ) diff --git a/src/zarr/meta.py b/src/zarr/meta.py index 3a5435a174..7cca228a14 100644 --- a/src/zarr/meta.py +++ b/src/zarr/meta.py @@ -2,31 +2,21 @@ import itertools from collections.abc import Mapping -import numcodecs import numpy as np -from numcodecs.abc import Codec from zarr.errors import MetadataError from zarr.util import json_dumps, json_loads -from typing import cast, Union, Any, List, Mapping as MappingType, Optional, TYPE_CHECKING +from typing import cast, Union, Any, List, Mapping as MappingType, TYPE_CHECKING if TYPE_CHECKING: # pragma: no cover - from zarr._storage.store import StorageTransformer + pass ZARR_FORMAT = 2 -ZARR_FORMAT_v3 = 3 # FLOAT_FILLS = {"NaN": np.nan, "Infinity": np.PINF, "-Infinity": np.NINF} -_default_entry_point_metadata_v3 = { - "zarr_format": "https://purl.org/zarr/spec/protocol/core/3.0", - "metadata_encoding": "https://purl.org/zarr/spec/protocol/core/3.0", - "metadata_key_suffix": ".json", - "extensions": [], -} - _v3_core_types = set("".join(d) for d in itertools.product("<>", ("u", "i", "f"), ("2", "4", "8"))) _v3_core_types = {"bool", "i1", "u1"} | _v3_core_types @@ -301,271 +291,6 @@ def encode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return v -class Metadata3(Metadata2): - ZARR_FORMAT = ZARR_FORMAT_v3 - - @classmethod - def decode_dtype(cls, d, validate=True): - if isinstance(d, dict): - # extract the type from the extension info - try: - d = d["type"] - except KeyError: - raise KeyError("Extended dtype info must provide a key named 'type'.") - d = cls._decode_dtype_descr(d) - dtype = np.dtype(d) - if validate: - if dtype.str in (_v3_core_types | {"|b1", "|u1", "|i1"}): - # it is a core dtype of the v3 spec - pass - else: - # will raise if this is not a recognized extended dtype - get_extended_dtype_info(dtype) - return dtype - - @classmethod - def encode_dtype(cls, d): - s = d.str - if s == "|b1": - return "bool" - elif s == "|u1": - return "u1" - elif s == "|i1": - return "i1" - elif s in _v3_core_types: - return Metadata2.encode_dtype(d) - else: - # Check if this dtype corresponds to a supported extension to - # the v3 protocol. - return get_extended_dtype_info(np.dtype(d)) - - @classmethod - def decode_group_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: - meta = cls.parse_metadata(s) - # 1 / 0 - # # check metadata format version - # zarr_format = meta.get("zarr_format", None) - # if zarr_format != cls.ZARR_FORMAT: - # raise MetadataError("unsupported zarr format: %s" % zarr_format) - - assert "attributes" in meta - # meta = dict(attributes=meta['attributes']) - return meta - - # return json.loads(s) - - @classmethod - def encode_group_metadata(cls, meta=None) -> bytes: - # The ZARR_FORMAT should not be in the group metadata, but in the - # entry point metadata instead - # meta = dict(zarr_format=cls.ZARR_FORMAT) - if meta is None: - meta = {"attributes": {}} - meta = dict(attributes=meta.get("attributes", {})) - return json_dumps(meta) - - @classmethod - def encode_hierarchy_metadata(cls, meta=None) -> bytes: - if meta is None: - meta = _default_entry_point_metadata_v3 - elif set(meta.keys()) != { - "zarr_format", - "metadata_encoding", - "metadata_key_suffix", - "extensions", - }: - raise ValueError(f"Unexpected keys in metadata. meta={meta}") - return json_dumps(meta) - - @classmethod - def decode_hierarchy_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: - meta = cls.parse_metadata(s) - # check metadata format - # zarr_format = meta.get("zarr_format", None) - # if zarr_format != "https://purl.org/zarr/spec/protocol/core/3.0": - # raise MetadataError("unsupported zarr format: %s" % zarr_format) - if set(meta.keys()) != { - "zarr_format", - "metadata_encoding", - "metadata_key_suffix", - "extensions", - }: - raise ValueError(f"Unexpected keys in metadata. meta={meta}") - return meta - - @classmethod - def _encode_codec_metadata(cls, codec: Codec) -> Optional[Mapping]: - if codec is None: - return None - - # only support gzip for now - config = codec.get_config() - del config["id"] - uri = "https://purl.org/zarr/spec/codec/" - if isinstance(codec, numcodecs.GZip): - uri = uri + "gzip/1.0" - elif isinstance(codec, numcodecs.Zlib): - uri = uri + "zlib/1.0" - elif isinstance(codec, numcodecs.Blosc): - uri = uri + "blosc/1.0" - elif isinstance(codec, numcodecs.BZ2): - uri = uri + "bz2/1.0" - elif isinstance(codec, numcodecs.LZ4): - uri = uri + "lz4/1.0" - elif isinstance(codec, numcodecs.LZMA): - uri = uri + "lzma/1.0" - meta = { - "codec": uri, - "configuration": config, - } - return meta - - @classmethod - def _decode_codec_metadata(cls, meta: Optional[Mapping]) -> Optional[Codec]: - if meta is None: - return None - - uri = "https://purl.org/zarr/spec/codec/" - conf = meta["configuration"] - if meta["codec"].startswith(uri + "gzip/"): - conf["id"] = "gzip" - elif meta["codec"].startswith(uri + "zlib/"): - conf["id"] = "zlib" - elif meta["codec"].startswith(uri + "blosc/"): - conf["id"] = "blosc" - elif meta["codec"].startswith(uri + "bz2/"): - conf["id"] = "bz2" - elif meta["codec"].startswith(uri + "lz4/"): - conf["id"] = "lz4" - elif meta["codec"].startswith(uri + "lzma/"): - conf["id"] = "lzma" - else: - raise NotImplementedError - - codec = numcodecs.get_codec(conf) - - return codec - - @classmethod - def _encode_storage_transformer_metadata( - cls, storage_transformer: "StorageTransformer" - ) -> Optional[Mapping]: - return { - "extension": storage_transformer.extension_uri, - "type": storage_transformer.type, - "configuration": storage_transformer.get_config(), - } - - @classmethod - def _decode_storage_transformer_metadata(cls, meta: Mapping) -> "StorageTransformer": - from zarr._storage.v3_storage_transformers import ( - ShardingStorageTransformer, - DummyStorageTransfomer, - ) - - # This might be changed to a proper registry in the future - KNOWN_STORAGE_TRANSFORMERS = [DummyStorageTransfomer, ShardingStorageTransformer] - - conf = meta.get("configuration", {}) - extension_uri = meta["extension"] - transformer_type = meta["type"] - - for StorageTransformerCls in KNOWN_STORAGE_TRANSFORMERS: - if StorageTransformerCls.extension_uri == extension_uri: - break - else: # pragma: no cover - raise NotImplementedError - - return StorageTransformerCls.from_config(transformer_type, conf) - - @classmethod - def decode_array_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType[str, Any]: - meta = cls.parse_metadata(s) - - # extract array metadata fields - try: - dtype = cls.decode_dtype(meta["data_type"]) - if dtype.hasobject: - import numcodecs - - object_codec = numcodecs.get_codec(meta["attributes"]["filters"][0]) - else: - object_codec = None - fill_value = cls.decode_fill_value(meta["fill_value"], dtype, object_codec) - # TODO: remove dimension_separator? - - compressor = cls._decode_codec_metadata(meta.get("compressor", None)) - storage_transformers = meta.get("storage_transformers", ()) - storage_transformers = [ - cls._decode_storage_transformer_metadata(i) for i in storage_transformers - ] - extensions = meta.get("extensions", []) - meta = dict( - shape=tuple(meta["shape"]), - chunk_grid=dict( - type=meta["chunk_grid"]["type"], - chunk_shape=tuple(meta["chunk_grid"]["chunk_shape"]), - separator=meta["chunk_grid"]["separator"], - ), - data_type=dtype, - fill_value=fill_value, - chunk_memory_layout=meta["chunk_memory_layout"], - attributes=meta["attributes"], - extensions=extensions, - ) - # compressor field should be absent when there is no compression - if compressor: - meta["compressor"] = compressor - if storage_transformers: - meta["storage_transformers"] = storage_transformers - - except Exception as e: - raise MetadataError("error decoding metadata: %s" % e) - else: - return meta - - @classmethod - def encode_array_metadata(cls, meta: MappingType[str, Any]) -> bytes: - dtype = meta["data_type"] - sdshape = () - if dtype.subdtype is not None: - dtype, sdshape = dtype.subdtype - dimension_separator = meta.get("dimension_separator") - if dtype.hasobject: - import numcodecs - - object_codec = numcodecs.get_codec(meta["attributes"]["filters"][0]) - else: - object_codec = None - - compressor = cls._encode_codec_metadata(meta.get("compressor", None)) - storage_transformers = meta.get("storage_transformers", ()) - storage_transformers = [ - cls._encode_storage_transformer_metadata(i) for i in storage_transformers - ] - extensions = meta.get("extensions", []) - meta = dict( - shape=meta["shape"] + sdshape, - chunk_grid=dict( - type=meta["chunk_grid"]["type"], - chunk_shape=tuple(meta["chunk_grid"]["chunk_shape"]), - separator=meta["chunk_grid"]["separator"], - ), - data_type=cls.encode_dtype(dtype), - fill_value=encode_fill_value(meta["fill_value"], dtype, object_codec), - chunk_memory_layout=meta["chunk_memory_layout"], - attributes=meta.get("attributes", {}), - extensions=extensions, - ) - if compressor: - meta["compressor"] = compressor - if dimension_separator: - meta["dimension_separator"] = dimension_separator - if storage_transformers: - meta["storage_transformers"] = storage_transformers - return json_dumps(meta) - - parse_metadata = Metadata2.parse_metadata decode_array_metadata = Metadata2.decode_array_metadata encode_array_metadata = Metadata2.encode_array_metadata diff --git a/src/zarr/storage.py b/src/zarr/storage.py index e3a43d26c8..a7bd22a6b9 100644 --- a/src/zarr/storage.py +++ b/src/zarr/storage.py @@ -35,7 +35,6 @@ import uuid import time -from numcodecs.abc import Codec from numcodecs.compat import ensure_bytes, ensure_text, ensure_contiguous_ndarray_like from numcodecs.registry import codec_registry from zarr.context import Context @@ -66,21 +65,15 @@ from zarr._storage.absstore import ABSStore # noqa: F401 from zarr._storage.store import ( # noqa: F401 - _get_hierarchy_metadata, - _get_metadata_suffix, _listdir_from_keys, _rename_from_keys, - _rename_metadata_v3, _rmdir_from_keys, - _rmdir_from_keys_v3, _path_to_prefix, _prefix_to_array_key, _prefix_to_group_key, array_meta_key, attrs_key, - data_root, group_meta_key, - meta_root, DEFAULT_ZARR_VERSION, BaseStore, Store, @@ -122,28 +115,10 @@ def contains_group(store: StoreLike, path: Path = None, explicit_only=True) -> b path = normalize_storage_path(path) prefix = _path_to_prefix(path) key = _prefix_to_group_key(store, prefix) - store_version = getattr(store, "_store_version", 2) - if store_version == 2 or explicit_only: - return key in store - else: - if key in store: - return True - # for v3, need to also handle implicit groups - - sfx = _get_metadata_suffix(store) # type: ignore - implicit_prefix = key.replace(".group" + sfx, "") - if not implicit_prefix.endswith("/"): - implicit_prefix += "/" - if store.list_prefix(implicit_prefix): # type: ignore - return True - return False + return key in store -def _normalize_store_arg_v2(store: Any, storage_options=None, mode="r") -> BaseStore: - # default to v2 store for backward compatibility - zarr_version = getattr(store, "_store_version", 2) - if zarr_version != 2: - raise ValueError("store must be a version 2 store") +def normalize_store_arg(store: Any, storage_options=None, mode="r") -> BaseStore: if store is None: store = KVStore(dict()) return store @@ -180,38 +155,17 @@ def _normalize_store_arg_v2(store: Any, storage_options=None, mode="r") -> BaseS return store -def normalize_store_arg( - store: Any, storage_options=None, mode="r", *, zarr_version=None -) -> BaseStore: - if zarr_version is None: - # default to v2 store for backward compatibility - zarr_version = getattr(store, "_store_version", DEFAULT_ZARR_VERSION) - if zarr_version == 2: - normalize_store = _normalize_store_arg_v2 - elif zarr_version == 3: - from zarr._storage.v3 import _normalize_store_arg_v3 - - normalize_store = _normalize_store_arg_v3 - else: - raise ValueError("zarr_version must be either 2 or 3") - return normalize_store(store, storage_options, mode) - - def rmdir(store: StoreLike, path: Path = None): """Remove all items under the given path. If `store` provides a `rmdir` method, this will be called, otherwise will fall back to implementation via the `Store` interface.""" path = normalize_storage_path(path) - store_version = getattr(store, "_store_version", 2) if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore # pass through store.rmdir(path) else: # slow version, delete one key at a time - if store_version == 2: - _rmdir_from_keys(store, path) - else: - _rmdir_from_keys_v3(store, path) # type: ignore + _rmdir_from_keys(store, path) def rename(store: Store, src_path: Path, dst_path: Path): @@ -254,21 +208,10 @@ def _getsize(store: BaseStore, path: Path = None) -> int: else: path = "" if path is None else normalize_storage_path(path) size = 0 - store_version = getattr(store, "_store_version", 2) - if store_version == 3: - if path == "": - # have to list the root folders without trailing / in this case - members = store.list_prefix(data_root.rstrip("/")) # type: ignore - members += store.list_prefix(meta_root.rstrip("/")) # type: ignore - else: - members = store.list_prefix(data_root + path) # type: ignore - members += store.list_prefix(meta_root + path) # type: ignore - # also include zarr.json? - # members += ['zarr.json'] - else: - members = listdir(store, path) - prefix = _path_to_prefix(path) - members = [prefix + k for k in members] + + members = listdir(store, path) + prefix = _path_to_prefix(path) + members = [prefix + k for k in members] for k in members: try: v = store[k] @@ -437,13 +380,8 @@ def init_array( path = normalize_storage_path(path) # ensure parent group initialized - store_version = getattr(store, "_store_version", 2) - if store_version < 3: - _require_parent_group(path, store=store, chunk_store=chunk_store, overwrite=overwrite) - if store_version == 3 and "zarr.json" not in store: - # initialize with default zarr.json entry level metadata - store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore + _require_parent_group(path, store=store, chunk_store=chunk_store, overwrite=overwrite) if not compressor: # compatibility with legacy tests using compressor=[] @@ -482,50 +420,20 @@ def _init_array_metadata( dimension_separator=None, storage_transformers=(), ): - store_version = getattr(store, "_store_version", 2) - path = normalize_storage_path(path) # guard conditions if overwrite: - if store_version == 2: - # attempt to delete any pre-existing array in store - rmdir(store, path) - if chunk_store is not None: - rmdir(chunk_store, path) - else: - group_meta_key = _prefix_to_group_key(store, _path_to_prefix(path)) - array_meta_key = _prefix_to_array_key(store, _path_to_prefix(path)) - data_prefix = data_root + _path_to_prefix(path) - - # attempt to delete any pre-existing array in store - if array_meta_key in store: - store.erase(array_meta_key) # type: ignore - if group_meta_key in store: - store.erase(group_meta_key) # type: ignore - store.erase_prefix(data_prefix) # type: ignore - if chunk_store is not None: - chunk_store.erase_prefix(data_prefix) # type: ignore - - if "/" in path: - # path is a subfolder of an existing array, remove that array - parent_path = "/".join(path.split("/")[:-1]) - sfx = _get_metadata_suffix(store) # type: ignore - array_key = meta_root + parent_path + ".array" + sfx - if array_key in store: - store.erase(array_key) # type: ignore + # attempt to delete any pre-existing array in store + rmdir(store, path) + if chunk_store is not None: + rmdir(chunk_store, path) if not overwrite: if contains_array(store, path): raise ContainsArrayError(path) - elif contains_group(store, path, explicit_only=False): + if contains_group(store, path, explicit_only=False): raise ContainsGroupError(path) - elif store_version == 3: - if "/" in path: - # cannot create an array within an existing array path - parent_path = "/".join(path.split("/")[:-1]) - if contains_array(store, parent_path): - raise ContainsArrayError(path) # normalize metadata dtype, object_codec = normalize_dtype(dtype, object_codec) @@ -536,7 +444,7 @@ def _init_array_metadata( fill_value = normalize_fill_value(fill_value, dtype) # optional array metadata - if dimension_separator is None and store_version == 2: + if dimension_separator is None: dimension_separator = getattr(store, "_dimension_separator", None) dimension_separator = normalize_dimension_separator(dimension_separator) @@ -553,16 +461,10 @@ def _init_array_metadata( # obtain compressor config compressor_config = None if compressor: - if store_version == 2: - try: - compressor_config = compressor.get_config() - except AttributeError as e: - raise BadCompressorError(compressor) from e - elif not isinstance(compressor, Codec): - raise ValueError("expected a numcodecs Codec for compressor") - # TODO: alternatively, could autoconvert str to a Codec - # e.g. 'zlib' -> numcodec.Zlib object - # compressor = numcodecs.get_codec({'id': compressor}) + try: + compressor_config = compressor.get_config() + except AttributeError as e: + raise BadCompressorError(compressor) from e # obtain filters config if filters: @@ -596,33 +498,16 @@ def _init_array_metadata( filters_config = None # type: ignore # initialize metadata - # TODO: don't store redundant dimension_separator for v3? - _compressor = compressor_config if store_version == 2 else compressor + _compressor = compressor_config meta = dict( shape=shape, compressor=_compressor, fill_value=fill_value, dimension_separator=dimension_separator, ) - if store_version < 3: - meta.update(dict(chunks=chunks, dtype=dtype, order=order, filters=filters_config)) - assert not storage_transformers - else: - if dimension_separator is None: - dimension_separator = "/" - if filters_config: - attributes = {"filters": filters_config} - else: - attributes = {} - meta.update( - dict( - chunk_grid=dict(type="regular", chunk_shape=chunks, separator=dimension_separator), - chunk_memory_layout=order, - data_type=dtype, - attributes=attributes, - storage_transformers=storage_transformers, - ) - ) + + meta.update(dict(chunks=chunks, dtype=dtype, order=order, filters=filters_config)) + assert not storage_transformers key = _prefix_to_array_key(store, _path_to_prefix(path)) if hasattr(store, "_metadata_class"): @@ -661,24 +546,11 @@ def init_group( # normalize path path = normalize_storage_path(path) - store_version = getattr(store, "_store_version", 2) - if store_version < 3: - # ensure parent group initialized - _require_parent_group(path, store=store, chunk_store=chunk_store, overwrite=overwrite) - - if store_version == 3 and "zarr.json" not in store: - # initialize with default zarr.json entry level metadata - store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore + _require_parent_group(path, store=store, chunk_store=chunk_store, overwrite=overwrite) # initialise metadata _init_group_metadata(store=store, overwrite=overwrite, path=path, chunk_store=chunk_store) - if store_version == 3: - # TODO: Should initializing a v3 group also create a corresponding - # empty folder under data/root/? I think probably not until there - # is actual data written there. - pass - def _init_group_metadata( store: StoreLike, @@ -686,50 +558,25 @@ def _init_group_metadata( path: Optional[str] = None, chunk_store: Optional[StoreLike] = None, ): - store_version = getattr(store, "_store_version", 2) path = normalize_storage_path(path) # guard conditions if overwrite: - if store_version == 2: - # attempt to delete any pre-existing items in store - rmdir(store, path) - if chunk_store is not None: - rmdir(chunk_store, path) - else: - group_meta_key = _prefix_to_group_key(store, _path_to_prefix(path)) - array_meta_key = _prefix_to_array_key(store, _path_to_prefix(path)) - data_prefix = data_root + _path_to_prefix(path) - meta_prefix = meta_root + _path_to_prefix(path) - - # attempt to delete any pre-existing array in store - if array_meta_key in store: - store.erase(array_meta_key) # type: ignore - if group_meta_key in store: - store.erase(group_meta_key) # type: ignore - store.erase_prefix(data_prefix) # type: ignore - store.erase_prefix(meta_prefix) # type: ignore - if chunk_store is not None: - chunk_store.erase_prefix(data_prefix) # type: ignore + # attempt to delete any pre-existing items in store + rmdir(store, path) + if chunk_store is not None: + rmdir(chunk_store, path) if not overwrite: if contains_array(store, path): raise ContainsArrayError(path) elif contains_group(store, path): raise ContainsGroupError(path) - elif store_version == 3 and "/" in path: - # cannot create a group overlapping with an existing array name - parent_path = "/".join(path.split("/")[:-1]) - if contains_array(store, parent_path): - raise ContainsArrayError(path) # initialize metadata # N.B., currently no metadata properties are needed, however there may # be in future - if store_version == 3: - meta = {"attributes": {}} # type: ignore - else: - meta = {} + meta: dict[str, Any] = {} key = _prefix_to_group_key(store, _path_to_prefix(path)) if hasattr(store, "_metadata_class"): store[key] = store._metadata_class.encode_group_metadata(meta) diff --git a/tests/test_attrs.py b/tests/test_attrs.py index 7e3377f664..2575163840 100644 --- a/tests/test_attrs.py +++ b/tests/test_attrs.py @@ -4,34 +4,24 @@ import pytest import zarr -from zarr._storage.store import meta_root from zarr.attrs import Attributes from zarr.storage import KVStore, DirectoryStore -from zarr._storage.v3 import KVStoreV3 -from .util import CountingDict, CountingDictV3 +from .util import CountingDict from zarr.hierarchy import group -@pytest.fixture(params=[2, 3]) -def zarr_version(request): - return request.param - - -def _init_store(version): - """Use a plain dict() for v2, but KVStoreV3 otherwise.""" - if version == 2: - return dict() - return KVStoreV3(dict()) +def _init_store(): + return dict() class TestAttributes: - def init_attributes(self, store, read_only=False, cache=True, zarr_version=2): - root = ".z" if zarr_version == 2 else meta_root + def init_attributes(self, store, read_only=False, cache=True): + root = ".z" return Attributes(store, key=root + "attrs", read_only=read_only, cache=cache) - def test_storage(self, zarr_version): - store = _init_store(zarr_version) - root = ".z" if zarr_version == 2 else meta_root + def test_storage(self): + store = _init_store() + root = ".z" attrs_key = root + "attrs" a = Attributes(store=store, key=attrs_key) assert isinstance(a.store, KVStore) @@ -44,11 +34,9 @@ def test_storage(self, zarr_version): assert attrs_key in store assert isinstance(store[attrs_key], bytes) d = json.loads(str(store[attrs_key], "utf-8")) - if zarr_version == 3: - d = d["attributes"] assert dict(foo="bar", baz=42) == d - def test_utf8_encoding(self, zarr_version): + def test_utf8_encoding(self): project_root = pathlib.Path(zarr.__file__).resolve().parent.parent fixdir = project_root / "fixture" testdir = fixdir / "utf8attrs" @@ -64,9 +52,9 @@ def test_utf8_encoding(self, zarr_version): fixture = group(store=DirectoryStore(str(fixdir))) assert fixture["utf8attrs"].attrs.asdict() == dict(foo="た") - def test_get_set_del_contains(self, zarr_version): - store = _init_store(zarr_version) - a = self.init_attributes(store, zarr_version=zarr_version) + def test_get_set_del_contains(self): + store = _init_store() + a = self.init_attributes(store) assert "foo" not in a a["foo"] = "bar" a["baz"] = 42 @@ -80,9 +68,9 @@ def test_get_set_del_contains(self, zarr_version): # noinspection PyStatementEffect a["foo"] - def test_update_put(self, zarr_version): - store = _init_store(zarr_version) - a = self.init_attributes(store, zarr_version=zarr_version) + def test_update_put(self): + store = _init_store() + a = self.init_attributes(store) assert "foo" not in a assert "bar" not in a assert "baz" not in a @@ -97,9 +85,9 @@ def test_update_put(self, zarr_version): assert a["bar"] == 84 assert "baz" not in a - def test_iterators(self, zarr_version): - store = _init_store(zarr_version) - a = self.init_attributes(store, zarr_version=zarr_version) + def test_iterators(self): + store = _init_store() + a = self.init_attributes(store) assert 0 == len(a) assert set() == set(a) assert set() == set(a.keys()) @@ -115,15 +103,10 @@ def test_iterators(self, zarr_version): assert {"bar", 42} == set(a.values()) assert {("foo", "bar"), ("baz", 42)} == set(a.items()) - def test_read_only(self, zarr_version): - store = _init_store(zarr_version) - a = self.init_attributes(store, read_only=True, zarr_version=zarr_version) - if zarr_version == 2: - store[".zattrs"] = json.dumps(dict(foo="bar", baz=42)).encode("ascii") - else: - store["meta/root/attrs"] = json.dumps(dict(attributes=dict(foo="bar", baz=42))).encode( - "ascii" - ) + def test_read_only(self): + store = _init_store() + a = self.init_attributes(store, read_only=True) + store[".zattrs"] = json.dumps(dict(foo="bar", baz=42)).encode("ascii") assert a["foo"] == "bar" assert a["baz"] == 42 with pytest.raises(PermissionError): @@ -133,9 +116,9 @@ def test_read_only(self, zarr_version): with pytest.raises(PermissionError): a.update(foo="quux") - def test_key_completions(self, zarr_version): - store = _init_store(zarr_version) - a = self.init_attributes(store, zarr_version=zarr_version) + def test_key_completions(self): + store = _init_store() + a = self.init_attributes(store) d = a._ipython_key_completions_() assert "foo" not in d assert "123" not in d @@ -150,23 +133,20 @@ def test_key_completions(self, zarr_version): assert "asdf;" in d assert "baz" not in d - def test_caching_on(self, zarr_version): + def test_caching_on(self): # caching is turned on by default # setup store - store = CountingDict() if zarr_version == 2 else CountingDictV3() - attrs_key = ".zattrs" if zarr_version == 2 else "meta/root/attrs" + store = CountingDict() + attrs_key = ".zattrs" assert 0 == store.counter["__getitem__", attrs_key] assert 0 == store.counter["__setitem__", attrs_key] - if zarr_version == 2: - store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") - else: - store[attrs_key] = json.dumps(dict(attributes=dict(foo="xxx", bar=42))).encode("ascii") + store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") assert 0 == store.counter["__getitem__", attrs_key] assert 1 == store.counter["__setitem__", attrs_key] # setup attributes - a = self.init_attributes(store, zarr_version=zarr_version) + a = self.init_attributes(store) # test __getitem__ causes all attributes to be cached assert a["foo"] == "xxx" @@ -178,7 +158,7 @@ def test_caching_on(self, zarr_version): # test __setitem__ updates the cache a["foo"] = "yyy" - get_cnt = 2 if zarr_version == 2 else 3 + get_cnt = 2 assert get_cnt == store.counter["__getitem__", attrs_key] assert 2 == store.counter["__setitem__", attrs_key] assert a["foo"] == "yyy" @@ -187,7 +167,7 @@ def test_caching_on(self, zarr_version): # test update() updates the cache a.update(foo="zzz", bar=84) - get_cnt = 3 if zarr_version == 2 else 5 + get_cnt = 3 assert get_cnt == store.counter["__getitem__", attrs_key] assert 3 == store.counter["__setitem__", attrs_key] assert a["foo"] == "zzz" @@ -205,7 +185,7 @@ def test_caching_on(self, zarr_version): # test __delitem__ updates the cache del a["bar"] - get_cnt = 4 if zarr_version == 2 else 7 + get_cnt = 4 assert get_cnt == store.counter["__getitem__", attrs_key] assert 4 == store.counter["__setitem__", attrs_key] assert "bar" not in a @@ -213,35 +193,28 @@ def test_caching_on(self, zarr_version): assert 4 == store.counter["__setitem__", attrs_key] # test refresh() - if zarr_version == 2: - store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") - else: - store[attrs_key] = json.dumps(dict(attributes=dict(foo="xxx", bar=42))).encode("ascii") + store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") assert get_cnt == store.counter["__getitem__", attrs_key] a.refresh() - get_cnt = 5 if zarr_version == 2 else 8 + get_cnt = 5 assert get_cnt == store.counter["__getitem__", attrs_key] assert a["foo"] == "xxx" assert get_cnt == store.counter["__getitem__", attrs_key] assert a["bar"] == 42 assert get_cnt == store.counter["__getitem__", attrs_key] - def test_caching_off(self, zarr_version): + def test_caching_off(self): # setup store - store = CountingDict() if zarr_version == 2 else CountingDictV3() - attrs_key = ".zattrs" if zarr_version == 2 else "meta/root/attrs" + store = CountingDict() + attrs_key = ".zattrs" assert 0 == store.counter["__getitem__", attrs_key] assert 0 == store.counter["__setitem__", attrs_key] - - if zarr_version == 2: - store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") - else: - store[attrs_key] = json.dumps(dict(attributes=dict(foo="xxx", bar=42))).encode("ascii") + store[attrs_key] = json.dumps(dict(foo="xxx", bar=42)).encode("ascii") assert 0 == store.counter["__getitem__", attrs_key] assert 1 == store.counter["__setitem__", attrs_key] # setup attributes - a = self.init_attributes(store, cache=False, zarr_version=zarr_version) + a = self.init_attributes(store, cache=False) # test __getitem__ assert a["foo"] == "xxx" @@ -253,38 +226,38 @@ def test_caching_off(self, zarr_version): # test __setitem__ a["foo"] = "yyy" - get_cnt = 4 if zarr_version == 2 else 5 + get_cnt = 4 assert get_cnt == store.counter["__getitem__", attrs_key] assert 2 == store.counter["__setitem__", attrs_key] assert a["foo"] == "yyy" - get_cnt = 5 if zarr_version == 2 else 6 + get_cnt = 5 assert get_cnt == store.counter["__getitem__", attrs_key] assert 2 == store.counter["__setitem__", attrs_key] # test update() a.update(foo="zzz", bar=84) - get_cnt = 6 if zarr_version == 2 else 8 + get_cnt = 6 assert get_cnt == store.counter["__getitem__", attrs_key] assert 3 == store.counter["__setitem__", attrs_key] assert a["foo"] == "zzz" assert a["bar"] == 84 - get_cnt = 8 if zarr_version == 2 else 10 + get_cnt = 8 assert get_cnt == store.counter["__getitem__", attrs_key] assert 3 == store.counter["__setitem__", attrs_key] # test __contains__ assert "foo" in a - get_cnt = 9 if zarr_version == 2 else 11 + get_cnt = 9 assert get_cnt == store.counter["__getitem__", attrs_key] assert 3 == store.counter["__setitem__", attrs_key] assert "spam" not in a - get_cnt = 10 if zarr_version == 2 else 12 + get_cnt = 10 assert get_cnt == store.counter["__getitem__", attrs_key] assert 3 == store.counter["__setitem__", attrs_key] - def test_wrong_keys(self, zarr_version): - store = _init_store(zarr_version) - a = self.init_attributes(store, zarr_version=zarr_version) + def test_wrong_keys(self): + store = _init_store() + a = self.init_attributes(store) warning_msg = "only attribute keys of type 'string' will be allowed in the future" diff --git a/tests/test_convenience.py b/tests/test_convenience.py index 7cb4db7a35..d50533e847 100644 --- a/tests/test_convenience.py +++ b/tests/test_convenience.py @@ -27,53 +27,29 @@ from zarr.storage import ( ConsolidatedMetadataStore, FSStore, - KVStore, MemoryStore, atexit_rmtree, - data_root, - meta_root, getsize, ) -from zarr._storage.store import v3_api_available -from zarr._storage.v3 import ( - ConsolidatedMetadataStoreV3, - DirectoryStoreV3, - FSStoreV3, - KVStoreV3, - MemoryStoreV3, - SQLiteStoreV3, -) -from .util import have_fsspec - -_VERSIONS = (2, 3) if v3_api_available else (2,) - - -def _init_creation_kwargs(zarr_version): - kwargs = {"zarr_version": zarr_version} - if zarr_version == 3: - kwargs["path"] = "dataset" - return kwargs -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_open_array(path_type, zarr_version): +def test_open_array(path_type): store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) store = path_type(store) - kwargs = _init_creation_kwargs(zarr_version) # open array, create if doesn't exist - z = open(store, mode="a", shape=100, **kwargs) + z = open(store, mode="a", shape=100) assert isinstance(z, Array) assert z.shape == (100,) # open array, overwrite - z = open(store, mode="w", shape=200, **kwargs) + z = open(store, mode="w", shape=200) assert isinstance(z, Array) assert z.shape == (200,) # open array, read-only - z = open(store, mode="r", **kwargs) + z = open(store, mode="r") assert isinstance(z, Array) assert z.shape == (200,) assert z.read_only @@ -83,79 +59,46 @@ def test_open_array(path_type, zarr_version): open("doesnotexist", mode="r") -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_open_group(path_type, zarr_version): +def test_open_group(path_type): store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) store = path_type(store) - kwargs = _init_creation_kwargs(zarr_version) # open group, create if doesn't exist - g = open(store, mode="a", **kwargs) + g = open(store, mode="a") g.create_group("foo") assert isinstance(g, Group) assert "foo" in g # open group, overwrite - g = open(store, mode="w", **kwargs) + g = open(store, mode="w") assert isinstance(g, Group) assert "foo" not in g # open group, read-only - g = open(store, mode="r", **kwargs) + g = open(store, mode="r") assert isinstance(g, Group) assert g.read_only -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_save_errors(zarr_version): +def test_save_errors(): with pytest.raises(ValueError): # no arrays provided - save_group("data/group.zarr", zarr_version=zarr_version) + save_group("data/group.zarr") with pytest.raises(TypeError): # no array provided - save_array("data/group.zarr", zarr_version=zarr_version) + save_array("data/group.zarr") with pytest.raises(ValueError): # no arrays provided - save("data/group.zarr", zarr_version=zarr_version) - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -def test_zarr_v3_save_multiple_unnamed(): - x = np.ones(8) - y = np.zeros(8) - store = KVStoreV3(dict()) - # no path provided - save_group(store, x, y, path="dataset", zarr_version=3) - # names become arr_{i} for unnamed *args - assert data_root + "dataset/arr_0/c0" in store - assert data_root + "dataset/arr_1/c0" in store - assert meta_root + "dataset/arr_0.array.json" in store - assert meta_root + "dataset/arr_1.array.json" in store - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -def test_zarr_v3_save_errors(): - x = np.ones(8) - with pytest.raises(ValueError): - # no path provided - save_group("data/group.zr3", x, zarr_version=3) - with pytest.raises(ValueError): - # no path provided - save_array("data/group.zr3", x, zarr_version=3) - with pytest.raises(ValueError): - # no path provided - save("data/group.zr3", x, zarr_version=3) + save("data/group.zarr") -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_lazy_loader(zarr_version): +def test_lazy_loader(): foo = np.arange(100) bar = np.arange(100, 0, -1) - store = "data/group.zarr" if zarr_version == 2 else "data/group.zr3" - kwargs = _init_creation_kwargs(zarr_version) - save(store, foo=foo, bar=bar, **kwargs) - loader = load(store, **kwargs) + store = "data/group.zarr" + save(store, foo=foo, bar=bar) + loader = load(store) assert "foo" in loader assert "bar" in loader assert "baz" not in loader @@ -166,18 +109,16 @@ def test_lazy_loader(zarr_version): assert "LazyLoader: " in repr(loader) -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_load_array(zarr_version): +def test_load_array(): foo = np.arange(100) bar = np.arange(100, 0, -1) - store = "data/group.zarr" if zarr_version == 2 else "data/group.zr3" - kwargs = _init_creation_kwargs(zarr_version) - save(store, foo=foo, bar=bar, **kwargs) + store = "data/group.zarr" + save(store, foo=foo, bar=bar) # can also load arrays directly into a numpy array for array_name in ["foo", "bar"]: - array_path = "dataset/" + array_name if zarr_version == 3 else array_name - array = load(store, path=array_path, zarr_version=zarr_version) + array_path = array_name + array = load(store, path=array_path) assert isinstance(array, np.ndarray) if array_name == "foo": assert_array_equal(foo, array) @@ -185,10 +126,8 @@ def test_load_array(zarr_version): assert_array_equal(bar, array) -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_tree(zarr_version): - kwargs = _init_creation_kwargs(zarr_version) - g1 = zarr.group(**kwargs) +def test_tree(): + g1 = zarr.group() g1.create_group("foo") g3 = g1.create_group("bar") g3.create_group("baz") @@ -198,16 +137,13 @@ def test_tree(zarr_version): assert str(zarr.tree(g1)) == str(g1.tree()) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("stores_from_path", [False, True]) @pytest.mark.parametrize( "with_chunk_store,listable", [(False, True), (True, True), (False, False)], ids=["default-listable", "with_chunk_store-listable", "default-unlistable"], ) -def test_consolidate_metadata( - with_chunk_store, zarr_version, listable, monkeypatch, stores_from_path -): +def test_consolidate_metadata(with_chunk_store, listable, monkeypatch, stores_from_path): # setup initial data if stores_from_path: store = tempfile.mkdtemp() @@ -217,17 +153,11 @@ def test_consolidate_metadata( atexit.register(atexit_rmtree, chunk_store) else: chunk_store = None - version_kwarg = {"zarr_version": zarr_version} else: - if zarr_version == 2: - store = MemoryStore() - chunk_store = MemoryStore() if with_chunk_store else None - elif zarr_version == 3: - store = MemoryStoreV3() - chunk_store = MemoryStoreV3() if with_chunk_store else None - version_kwarg = {} - path = "dataset" if zarr_version == 3 else None - z = group(store, chunk_store=chunk_store, path=path, **version_kwarg) + store = MemoryStore() + chunk_store = MemoryStore() if with_chunk_store else None + path = None + z = group(store, chunk_store=chunk_store, path=path) # Reload the actual store implementation in case str store_to_copy = z.store @@ -248,41 +178,22 @@ def test_consolidate_metadata( else: store_class = store - if zarr_version == 3: - # error on v3 if path not provided - with pytest.raises(ValueError): - consolidate_metadata(store_class, path=None) - - with pytest.raises(ValueError): - consolidate_metadata(store_class, path="") - # perform consolidation out = consolidate_metadata(store_class, path=path) assert isinstance(out, Group) assert ["g1", "g2"] == list(out) if not stores_from_path: - if zarr_version == 2: - assert isinstance(out._store, ConsolidatedMetadataStore) - assert ".zmetadata" in store - meta_keys = [ - ".zgroup", - "g1/.zgroup", - "g2/.zgroup", - "g2/.zattrs", - "g2/arr/.zarray", - "g2/arr/.zattrs", - ] - else: - assert isinstance(out._store, ConsolidatedMetadataStoreV3) - assert "meta/root/consolidated/.zmetadata" in store - meta_keys = [ - "zarr.json", - meta_root + "dataset.group.json", - meta_root + "dataset/g1.group.json", - meta_root + "dataset/g2.group.json", - meta_root + "dataset/g2/arr.array.json", - "meta/root/consolidated.group.json", - ] + assert isinstance(out._store, ConsolidatedMetadataStore) + assert ".zmetadata" in store + meta_keys = [ + ".zgroup", + "g1/.zgroup", + "g2/.zgroup", + "g2/.zattrs", + "g2/arr/.zarray", + "g2/arr/.zattrs", + ] + for key in meta_keys: del store[key] @@ -293,11 +204,7 @@ def test_consolidate_metadata( monkeypatch.setattr(fs_memory.MemoryFileSystem, "isdir", lambda x, y: False) monkeypatch.delattr(fs_memory.MemoryFileSystem, "ls") fs = fs_memory.MemoryFileSystem() - if zarr_version == 2: - store_to_open = FSStore("", fs=fs) - else: - store_to_open = FSStoreV3("", fs=fs) - + store_to_open = FSStore("", fs=fs) # copy original store to new unlistable store store_to_open.update(store_to_copy) @@ -305,7 +212,7 @@ def test_consolidate_metadata( store_to_open = store # open consolidated - z2 = open_consolidated(store_to_open, chunk_store=chunk_store, path=path, **version_kwarg) + z2 = open_consolidated(store_to_open, chunk_store=chunk_store, path=path) assert ["g1", "g2"] == list(z2) assert "world" == z2.g2.attrs["hello"] assert 1 == z2.g2.arr.attrs["data"] @@ -320,26 +227,17 @@ def test_consolidate_metadata( if stores_from_path: # path string is note a BaseStore subclass so cannot be used to # initialize a ConsolidatedMetadataStore. - if zarr_version == 2: - with pytest.raises(ValueError): - cmd = ConsolidatedMetadataStore(store) - elif zarr_version == 3: - with pytest.raises(ValueError): - cmd = ConsolidatedMetadataStoreV3(store) + + with pytest.raises(ValueError): + cmd = ConsolidatedMetadataStore(store) else: # tests del/write on the store - if zarr_version == 2: - cmd = ConsolidatedMetadataStore(store) - with pytest.raises(PermissionError): - del cmd[".zgroup"] - with pytest.raises(PermissionError): - cmd[".zgroup"] = None - else: - cmd = ConsolidatedMetadataStoreV3(store) - with pytest.raises(PermissionError): - del cmd[meta_root + "dataset.group.json"] - with pytest.raises(PermissionError): - cmd[meta_root + "dataset.group.json"] = None + + cmd = ConsolidatedMetadataStore(store) + with pytest.raises(PermissionError): + del cmd[".zgroup"] + with pytest.raises(PermissionError): + cmd[".zgroup"] = None # test getsize on the store assert isinstance(getsize(cmd), Integral) @@ -377,7 +275,6 @@ def test_consolidate_metadata( path=path, cache_attrs=True, synchronizer=None, - **version_kwarg, ) @@ -469,7 +366,7 @@ def test_excludes_includes(self): copy_store(source, dest, excludes=excludes) assert len(dest) == 2 - root = "" if self._version == 2 else meta_root + root = "" assert root + "foo" not in dest # multiple excludes @@ -500,7 +397,7 @@ def test_dry_run(self): def test_if_exists(self): source = self.source dest = self._get_dest_store() - root = "" if self._version == 2 else meta_root + root = "" dest[root + "bar/baz"] = b"mmm" # default ('raise') @@ -530,27 +427,6 @@ def test_if_exists(self): copy_store(source, dest, if_exists="foobar") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestCopyStoreV3(TestCopyStore): - _version = 3 - - def setUp(self): - source = KVStoreV3(dict()) - source["meta/root/foo"] = b"xxx" - source["meta/root/bar/baz"] = b"yyy" - source["meta/root/bar/qux"] = b"zzz" - self.source = source - - def _get_dest_store(self): - return KVStoreV3(dict()) - - def test_mismatched_store_versions(self): - # cannot copy between stores of mixed Zarr versions - dest = KVStore(dict()) - with pytest.raises(ValueError): - copy_store(self.source, dest) - - def check_copied_array(original, copied, without_attrs=False, expect_props=None): # setup source_h5py = original.__module__.startswith("h5py.") @@ -672,28 +548,6 @@ def test_copy_all(): assert destination_group.subgroup.attrs["info"] == "sub attrs" -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -def test_copy_all_v3(): - """ - https://github.com/zarr-developers/zarr-python/issues/269 - - copy_all used to not copy attributes as `.keys()` - - """ - original_group = zarr.group(store=MemoryStoreV3(), path="group1", overwrite=True) - original_group.create_group("subgroup") - - destination_group = zarr.group(store=MemoryStoreV3(), path="group2", overwrite=True) - - # copy from memory to directory store - copy_all( - original_group, - destination_group, - dry_run=False, - ) - assert "subgroup" in destination_group - - class TestCopy: @pytest.fixture(params=[False, True], ids=["zarr", "hdf5"]) def source(self, request, tmpdir): @@ -948,100 +802,3 @@ def test_logging(self, source, dest, tmpdir): # bad option with pytest.raises(TypeError): copy(source["foo"], dest, dry_run=True, log=True) - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestCopyV3(TestCopy): - @pytest.fixture(params=["zarr", "hdf5"]) - def source(self, request, tmpdir): - def prep_source(source): - foo = source.create_group("foo") - foo.attrs["experiment"] = "weird science" - baz = foo.create_dataset("bar/baz", data=np.arange(100), chunks=(50,)) - baz.attrs["units"] = "metres" - if request.param == "hdf5": - extra_kws = dict( - compression="gzip", - compression_opts=3, - fillvalue=84, - shuffle=True, - fletcher32=True, - ) - else: - extra_kws = dict(compressor=Zlib(3), order="F", fill_value=42, filters=[Adler32()]) - source.create_dataset( - "spam", - data=np.arange(100, 200).reshape(20, 5), - chunks=(10, 2), - dtype="i2", - **extra_kws, - ) - return source - - if request.param == "hdf5": - h5py = pytest.importorskip("h5py") - fn = tmpdir.join("source.h5") - with h5py.File(str(fn), mode="w") as h5f: - yield prep_source(h5f) - elif request.param == "zarr": - yield prep_source(group(path="group1", zarr_version=3)) - - # Test with various destination StoreV3 types as TestCopyV3 covers rmdir - destinations = ["hdf5", "zarr", "zarr_kvstore", "zarr_directorystore", "zarr_sqlitestore"] - if have_fsspec: - destinations += ["zarr_fsstore"] - - @pytest.fixture(params=destinations) - def dest(self, request, tmpdir): - if request.param == "hdf5": - h5py = pytest.importorskip("h5py") - fn = tmpdir.join("dest.h5") - with h5py.File(str(fn), mode="w") as h5f: - yield h5f - elif request.param == "zarr": - yield group(path="group2", zarr_version=3) - elif request.param == "zarr_kvstore": - store = KVStoreV3(dict()) - yield group(store, path="group2", zarr_version=3) - elif request.param == "zarr_fsstore": - fn = tmpdir.join("dest.zr3") - store = FSStoreV3(str(fn), auto_mkdir=True) - yield group(store, path="group2", zarr_version=3) - elif request.param == "zarr_directorystore": - fn = tmpdir.join("dest.zr3") - store = DirectoryStoreV3(str(fn)) - yield group(store, path="group2", zarr_version=3) - elif request.param == "zarr_sqlitestore": - fn = tmpdir.join("dest.db") - store = SQLiteStoreV3(str(fn)) - yield group(store, path="group2", zarr_version=3) - - def test_copy_array_create_options(self, source, dest): - dest_h5py = dest.__module__.startswith("h5py.") - - # copy array, provide creation options - compressor = Zlib(9) - create_kws = dict(chunks=(10,)) - if dest_h5py: - create_kws.update( - compression="gzip", compression_opts=9, shuffle=True, fletcher32=True, fillvalue=42 - ) - else: - # v3 case has no filters argument in zarr create_kws - create_kws.update(compressor=compressor, fill_value=42, order="F") - copy(source["foo/bar/baz"], dest, without_attrs=True, **create_kws) - check_copied_array( - source["foo/bar/baz"], dest["baz"], without_attrs=True, expect_props=create_kws - ) - - def test_copy_group_no_name(self, source, dest): - if source.__module__.startswith("h5py"): - with pytest.raises(TypeError): - copy(source, dest) - else: - # For v3, dest.name will be inferred from source.name - copy(source, dest) - check_copied_group(source, dest[source.name.lstrip("/")]) - - copy(source, dest, name="root") - check_copied_group(source, dest["root"]) diff --git a/tests/test_core.py b/tests/test_core.py index e8d527c4ef..6303371793 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -33,15 +33,9 @@ import zarr from zarr._storage.store import ( BaseStore, - v3_api_available, -) -from zarr._storage.v3_storage_transformers import ( - DummyStorageTransfomer, - ShardingStorageTransformer, - v3_sharding_available, ) + from zarr.core import Array -from zarr.errors import ArrayNotFoundError, ContainsGroupError from zarr.meta import json_loads from zarr.n5 import N5Store, N5FSStore, n5_keywords from zarr.storage import ( @@ -56,24 +50,10 @@ SQLiteStore, atexit_rmglob, atexit_rmtree, - data_root, init_array, init_group, - meta_root, normalize_store_arg, ) -from zarr._storage.v3 import ( - ABSStoreV3, - DBMStoreV3, - DirectoryStoreV3, - FSStoreV3, - KVStoreV3, - LMDBStoreV3, - LRUStoreCacheV3, - RmdirV3, - SQLiteStoreV3, - StoreV3, -) from zarr.util import buffer_size from .util import abs_container, skip_test_env_var, have_fsspec, mktemp @@ -82,7 +62,6 @@ class TestArray: - version = 2 root = "" path = "" compressor = Zlib(level=1) @@ -139,7 +118,7 @@ def test_array_init(self): # normal initialization store = self.create_store() init_array(store, shape=100, chunks=10, dtype=" 2: - # in v3, attributes are in a sub-dictionary of the metadata - attrs = attrs["attributes"] assert "foo" in attrs and attrs["foo"] == "bar" a.attrs["bar"] = "foo" assert a.attrs.key in a.store attrs = json_loads(a.store[a.attrs.key]) - if self.version > 2: - # in v3, attributes are in a sub-dictionary of the metadata - attrs = attrs["attributes"] assert "foo" in attrs and attrs["foo"] == "bar" assert "bar" in attrs and attrs["bar"] == "foo" a.store.close() @@ -2298,7 +2256,7 @@ def test_nbytes_stored(self): class TestArrayNoCache(TestArray): def test_cache_metadata(self): a1 = self.create_array(shape=100, chunks=10, dtype="i1", cache_metadata=False) - path = None if self.version == 2 else a1.path + path = None a2 = Array(a1.store, path=path, cache_metadata=True) assert a1.shape == a2.shape assert a1.size == a2.size @@ -2339,7 +2297,7 @@ def test_cache_metadata(self): def test_cache_attrs(self): a1 = self.create_array(shape=100, chunks=10, dtype="i1", cache_attrs=False) - path = None if self.version == 2 else "arr1" + path = None a2 = Array(a1.store, path=path, cache_attrs=True) assert a1.attrs.asdict() == a2.attrs.asdict() @@ -2460,7 +2418,7 @@ def test_read_nitems_less_than_blocksize_from_multiple_chunks(self): """ z = self.create_array(shape=1000000, chunks=100_000) z[40_000:80_000] = 1 - path = None if self.version == 2 else z.path + path = None b = Array(z.store, path=path, read_only=True, partial_decompress=True) assert (b[40_000:80_000] == 1).all() @@ -2470,7 +2428,7 @@ def test_read_from_all_blocks(self): """ z = self.create_array(shape=1000000, chunks=100_000) z[2:99_000] = 1 - path = None if self.version == 2 else z.path + path = None b = Array(z.store, path=path, read_only=True, partial_decompress=True) assert (b[2:99_000] == 1).all() @@ -2517,7 +2475,7 @@ def test_read_nitems_less_than_blocksize_from_multiple_chunks(self): """ z = self.create_array(shape=1000000, chunks=100_000) z[40_000:80_000] = 1 - path = None if self.version == 2 else z.path + path = None b = Array(z.store, path=path, read_only=True, partial_decompress=True) assert (b[40_000:80_000] == 1).all() @@ -2527,607 +2485,11 @@ def test_read_from_all_blocks(self): """ z = self.create_array(shape=1000000, chunks=100_000) z[2:99_000] = 1 - path = None if self.version == 2 else z.path + path = None b = Array(z.store, path=path, read_only=True, partial_decompress=True) assert (b[2:99_000] == 1).all() -#### -# StoreV3 test classes inheriting from the above below this point -#### - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayV3(TestArray): - version = 3 - root = meta_root - path = "arr1" - - def create_store(self): - return KVStoreV3(dict()) - - def expected(self): - # tests for array without path will not be run for v3 stores - assert self.version == 3 - return [ - "73ab8ace56719a5c9308c3754f5e2d57bc73dc20", - "5fb3d02b8f01244721582929b3cad578aec5cea5", - "26b098bedb640846e18dc2fbc1c27684bb02b532", - "799a458c287d431d747bec0728987ca4fe764549", - "c780221df84eb91cb62f633f12d3f1eaa9cee6bd", - ] - - # TODO: fix test_nbytes_stored - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithPathV3(TestArrayV3): - def test_array_init(self): - store = self.create_store() - # can initialize an array without a path - init_array(store, shape=100, chunks=10, dtype=" BaseStore: - path = mkdtemp() - atexit.register(shutil.rmtree, path) - return DirectoryStoreV3(path) - - def test_nbytes_stored(self): - # dict as store - z = self.create_array(shape=1000, chunks=100) - expect_nbytes_stored = sum(buffer_size(v) for k, v in z.store.items() if k != "zarr.json") - assert expect_nbytes_stored == z.nbytes_stored - z[:] = 42 - expect_nbytes_stored = sum(buffer_size(v) for k, v in z.store.items() if k != "zarr.json") - assert expect_nbytes_stored == z.nbytes_stored - - -@skip_test_env_var("ZARR_TEST_ABS") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithABSStoreV3(TestArrayV3): - def create_store(self) -> ABSStoreV3: - client = abs_container() - store = ABSStoreV3(client=client) - store.rmdir() - return store - - -# TODO: TestArrayWithN5StoreV3 -# class TestArrayWithN5StoreV3(TestArrayWithDirectoryStoreV3): - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithDBMStoreV3(TestArrayV3): - def create_store(self) -> DBMStoreV3: - path = mktemp(suffix=".anydbm") - atexit.register(atexit_rmglob, path + "*") - store = DBMStoreV3(path, flag="n") - return store - - def test_nbytes_stored(self): - pass # not implemented - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithDBMStoreV3BerkeleyDB(TestArrayV3): - def create_store(self) -> DBMStoreV3: - bsddb3 = pytest.importorskip("bsddb3") - path = mktemp(suffix=".dbm") - atexit.register(os.remove, path) - store = DBMStoreV3(path, flag="n", open=bsddb3.btopen) - return store - - def test_nbytes_stored(self): - pass # not implemented - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithLMDBStoreV3(TestArrayV3): - lmdb_buffers = True - - def create_store(self) -> LMDBStoreV3: - pytest.importorskip("lmdb") - path = mktemp(suffix=".lmdb") - atexit.register(atexit_rmtree, path) - store = LMDBStoreV3(path, buffers=self.lmdb_buffers) - return store - - def test_store_has_bytes_values(self): - pass # returns values as memoryviews/buffers instead of bytes - - def test_nbytes_stored(self): - pass # not implemented - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithLMDBStoreV3NoBuffers(TestArrayWithLMDBStoreV3): - lmdb_buffers = False - - def test_nbytes_stored(self): - pass # not implemented - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithSQLiteStoreV3(TestArrayV3): - def create_store(self): - pytest.importorskip("sqlite3") - path = mktemp(suffix=".db") - atexit.register(atexit_rmtree, path) - store = SQLiteStoreV3(path) - return store - - def test_nbytes_stored(self): - pass # not implemented - - -# skipped adding V3 equivalents for compressors (no change in v3): -# TestArrayWithNoCompressor -# TestArrayWithBZ2Compressor -# TestArrayWithBloscCompressor -# TestArrayWithLZMACompressor - -# skipped test with filters (v3 protocol removed filters) -# TestArrayWithFilters - - -# custom store, does not support getsize() -# Note: this custom mapping doesn't actually have all methods in the -# v3 spec (e.g. erase), but they aren't needed here. - - -class CustomMappingV3(RmdirV3, StoreV3): - def __init__(self): - self.inner = KVStoreV3(dict()) - - def __iter__(self): - return iter(self.keys()) - - def __len__(self): - return len(self.inner) - - def keys(self): - return self.inner.keys() - - def values(self): - return self.inner.values() - - def get(self, item, default=None): - try: - return self.inner[item] - except KeyError: - return default - - def __getitem__(self, item): - return self.inner[item] - - def __setitem__(self, item, value): - self.inner[item] = ensure_bytes(value) - - def __delitem__(self, key): - del self.inner[key] - - def __contains__(self, item): - return item in self.inner - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithCustomMappingV3(TestArrayV3): - def create_store(self): - store = CustomMappingV3() - return store - - def test_nbytes_stored(self): - z = self.create_array(shape=1000, chunks=100) - expect_nbytes_stored = sum(buffer_size(v) for k, v in z.store.items() if k != "zarr.json") - assert expect_nbytes_stored == z.nbytes_stored - z[:] = 42 - expect_nbytes_stored = sum(buffer_size(v) for k, v in z.store.items() if k != "zarr.json") - assert expect_nbytes_stored == z.nbytes_stored - - def test_len(self): - # dict as store - z = self.create_array(shape=1000, chunks=100) - assert len(z._store) == 2 - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayNoCacheV3(TestArrayWithPathV3): - def create_store(self): - store = KVStoreV3(dict()) - return store - - def test_object_arrays_danger(self): - # skip this one as it only works if metadata are cached - pass - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithStoreCacheV3(TestArrayV3): - def create_store(self): - store = LRUStoreCacheV3(dict(), max_size=None) - return store - - def test_store_has_bytes_values(self): - # skip as the cache has no control over how the store provides values - pass - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3(TestArrayV3): - compressor = Blosc() - - def create_store(self): - path = mkdtemp() - atexit.register(shutil.rmtree, path) - key_separator = self.dimension_separator - store = FSStoreV3( - path, - key_separator=key_separator, - auto_mkdir=True, - create=True, - check=True, - missing_exceptions=None, - ) - return store - - def expected(self): - return [ - "1509abec4285494b61cd3e8d21f44adc3cf8ddf6", - "7cfb82ec88f7ecb7ab20ae3cb169736bc76332b8", - "b663857bb89a8ab648390454954a9cdd453aa24b", - "21e90fa927d09cbaf0e3b773130e2dc05d18ff9b", - "e8c1fdd18b5c2ee050b59d0c8c95d07db642459c", - ] - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3FromFilesystem(TestArrayWithFSStoreV3): - def create_store(self): - from fsspec.implementations.local import LocalFileSystem - - fs = LocalFileSystem(auto_mkdir=True) - path = mkdtemp() - atexit.register(shutil.rmtree, path) - key_separator = self.dimension_separator - store = FSStoreV3( - path, - fs=fs, - key_separator=key_separator, - create=True, - check=True, - missing_exceptions=None, - ) - return store - - def expected(self): - return [ - "1509abec4285494b61cd3e8d21f44adc3cf8ddf6", - "7cfb82ec88f7ecb7ab20ae3cb169736bc76332b8", - "b663857bb89a8ab648390454954a9cdd453aa24b", - "21e90fa927d09cbaf0e3b773130e2dc05d18ff9b", - "e8c1fdd18b5c2ee050b59d0c8c95d07db642459c", - ] - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3PartialRead(TestArrayWithFSStoreV3): - partial_decompress = True - - def expected(self): - return [ - "1509abec4285494b61cd3e8d21f44adc3cf8ddf6", - "7cfb82ec88f7ecb7ab20ae3cb169736bc76332b8", - "b663857bb89a8ab648390454954a9cdd453aa24b", - "21e90fa927d09cbaf0e3b773130e2dc05d18ff9b", - "e8c1fdd18b5c2ee050b59d0c8c95d07db642459c", - ] - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -@pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") -class TestArrayWithFSStoreV3PartialReadUncompressedSharded(TestArrayWithFSStoreV3): - partial_decompress = True - compressor = None - - def create_storage_transformers(self, shape) -> Tuple[Any]: - num_dims = 1 if isinstance(shape, int) else len(shape) - sharding_transformer = ShardingStorageTransformer( - "indexed", chunks_per_shard=(2,) * num_dims - ) - return (sharding_transformer,) - - def test_nbytes_stored(self): - z = self.create_array(shape=1000, chunks=100) - expect_nbytes_stored = sum(buffer_size(v) for k, v in z._store.items() if k != "zarr.json") - assert expect_nbytes_stored == z.nbytes_stored - z[:] = 42 - expect_nbytes_stored = sum(buffer_size(v) for k, v in z._store.items() if k != "zarr.json") - assert expect_nbytes_stored == z.nbytes_stored - - def test_supports_efficient_get_set_partial_values(self): - z = self.create_array(shape=100, chunks=10) - assert z.chunk_store.supports_efficient_get_partial_values - assert not z.chunk_store.supports_efficient_set_partial_values() - - def expected(self): - return [ - "90109fc2a4e17efbcb447003ea1c08828b91f71e", - "2b73519f7260dba3ddce0d2b70041888856fec6b", - "bca5798be2ed71d444f3045b05432d937682b7dd", - "9ff1084501e28520e577662a6e3073f1116c76a2", - "882a97cad42417f90f111d0cb916a21579650467", - ] - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3Nested(TestArrayWithFSStoreV3): - dimension_separator = "/" - - def expected(self): - return [ - "1509abec4285494b61cd3e8d21f44adc3cf8ddf6", - "7cfb82ec88f7ecb7ab20ae3cb169736bc76332b8", - "b663857bb89a8ab648390454954a9cdd453aa24b", - "21e90fa927d09cbaf0e3b773130e2dc05d18ff9b", - "e8c1fdd18b5c2ee050b59d0c8c95d07db642459c", - ] - - -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithFSStoreV3NestedPartialRead(TestArrayWithFSStoreV3): - dimension_separator = "/" - - def expected(self): - return [ - "1509abec4285494b61cd3e8d21f44adc3cf8ddf6", - "7cfb82ec88f7ecb7ab20ae3cb169736bc76332b8", - "b663857bb89a8ab648390454954a9cdd453aa24b", - "21e90fa927d09cbaf0e3b773130e2dc05d18ff9b", - "e8c1fdd18b5c2ee050b59d0c8c95d07db642459c", - ] - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestArrayWithStorageTransformersV3(TestArrayWithChunkStoreV3): - def create_storage_transformers(self, shape) -> Tuple[Any]: - return ( - DummyStorageTransfomer("dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT), - ) - - def expected(self): - return [ - "3fb9a4f8233b09ad02067b6b7fc9fd5caa405c7d", - "89c8eb364beb84919fc9153d2c1ed2696274ec18", - "73307055c3aec095dd1232c38d793ef82a06bd97", - "6152c09255a5efa43b1a115546e35affa00c138c", - "2f8802fc391f67f713302e84fad4fd8f1366d6c2", - ] - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -@pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") -class TestArrayWithShardingStorageTransformerV3(TestArrayV3): - compressor = None - - def create_storage_transformers(self, shape) -> Tuple[Any]: - num_dims = 1 if isinstance(shape, int) else len(shape) - return (ShardingStorageTransformer("indexed", chunks_per_shard=(2,) * num_dims),) - - def test_nbytes_stored(self): - z = self.create_array(shape=1000, chunks=100) - expect_nbytes_stored = sum(buffer_size(v) for k, v in z._store.items() if k != "zarr.json") - assert expect_nbytes_stored == z.nbytes_stored - z[:] = 42 - expect_nbytes_stored = sum(buffer_size(v) for k, v in z._store.items() if k != "zarr.json") - assert expect_nbytes_stored == z.nbytes_stored - - # mess with store - z.store[data_root + z._key_prefix + "foo"] = list(range(10)) - assert -1 == z.nbytes_stored - - def test_keys_inner_store(self): - z = self.create_array(shape=1000, chunks=100) - assert z.chunk_store.keys() == z._store.keys() - meta_keys = set(z.store.keys()) - z[:] = 42 - assert len(z.chunk_store.keys() - meta_keys) == 10 - # inner store should have half the data keys, - # since chunks_per_shard is 2: - assert len(z._store.keys() - meta_keys) == 5 - - def test_supports_efficient_get_set_partial_values(self): - z = self.create_array(shape=100, chunks=10) - assert not z.chunk_store.supports_efficient_get_partial_values - assert not z.chunk_store.supports_efficient_set_partial_values() - - def expected(self): - return [ - "90109fc2a4e17efbcb447003ea1c08828b91f71e", - "2b73519f7260dba3ddce0d2b70041888856fec6b", - "bca5798be2ed71d444f3045b05432d937682b7dd", - "9ff1084501e28520e577662a6e3073f1116c76a2", - "882a97cad42417f90f111d0cb916a21579650467", - ] - - -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -def test_array_mismatched_store_versions(): - store_v3 = KVStoreV3(dict()) - store_v2 = KVStore(dict()) - - # separate chunk store - chunk_store_v2 = KVStore(dict()) - chunk_store_v3 = KVStoreV3(dict()) - - init_kwargs = dict(shape=100, chunks=10, dtype="""" diff --git a/tests/test_creation.py b/tests/test_creation.py index 27ce00bc8a..369d755700 100644 --- a/tests/test_creation.py +++ b/tests/test_creation.py @@ -7,8 +7,6 @@ import pytest from numpy.testing import assert_array_equal -from zarr._storage.store import DEFAULT_ZARR_VERSION -from zarr._storage.v3_storage_transformers import DummyStorageTransfomer from zarr.codecs import Zlib from zarr.core import Array from zarr.creation import ( @@ -28,14 +26,12 @@ from zarr.hierarchy import open_group from zarr.n5 import N5Store from zarr.storage import DirectoryStore, KVStore -from zarr._storage.store import v3_api_available -from zarr._storage.v3 import DirectoryStoreV3, KVStoreV3 from zarr.sync import ThreadSynchronizer from .util import mktemp, have_fsspec -_VERSIONS = (None, 2, 3) if v3_api_available else (None, 2) -_VERSIONS2 = (2, 3) if v3_api_available else (2,) +_VERSIONS = (None, 2) +_VERSIONS2 = (2,) # something bcolz-like @@ -64,25 +60,22 @@ def __getitem__(self, item): return self.data[item] -def _init_creation_kwargs(zarr_version, at_root=True): - kwargs = {"zarr_version": zarr_version} +def _init_creation_kwargs(at_root=True): + kwargs = {} if not at_root: kwargs["path"] = "array" return kwargs -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_array(zarr_version, at_root): - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version - kwargs = _init_creation_kwargs(zarr_version, at_root) +def test_array(at_root): + kwargs = _init_creation_kwargs(at_root) # with numpy array a = np.arange(100) z = array(a, chunks=10, **kwargs) assert a.shape == z.shape assert a.dtype == z.dtype - assert z._store._store_version == expected_zarr_version assert_array_equal(a, z[:]) # with array-like @@ -131,39 +124,35 @@ def test_array(zarr_version, at_root): assert np.dtype("i8") == z.dtype -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_empty(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) +def test_empty(at_root): + kwargs = _init_creation_kwargs(at_root) z = empty(100, chunks=10, **kwargs) assert (100,) == z.shape assert (10,) == z.chunks -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_zeros(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) +def test_zeros(at_root): + kwargs = _init_creation_kwargs(at_root) z = zeros(100, chunks=10, **kwargs) assert (100,) == z.shape assert (10,) == z.chunks assert_array_equal(np.zeros(100), z[:]) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_ones(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) +def test_ones(at_root): + kwargs = _init_creation_kwargs(at_root) z = ones(100, chunks=10, **kwargs) assert (100,) == z.shape assert (10,) == z.chunks assert_array_equal(np.ones(100), z[:]) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_full(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) +def test_full(at_root): + kwargs = _init_creation_kwargs(at_root) z = full(100, chunks=10, fill_value=42, dtype="i4", **kwargs) assert (100,) == z.shape assert (10,) == z.chunks @@ -174,10 +163,9 @@ def test_full(zarr_version, at_root): assert np.all(np.isnan(z[:])) -@pytest.mark.parametrize("zarr_version", [None, 2]) # TODO -def test_full_additional_dtypes(zarr_version): +def test_full_additional_dtypes(): """Test additional types that aren't part of the base v3 spec.""" - kwargs = _init_creation_kwargs(zarr_version) + kwargs = _init_creation_kwargs() # NaT z = full(100, chunks=10, fill_value="NaT", dtype="M8[s]", **kwargs) assert np.all(np.isnat(z[:])) @@ -209,11 +197,10 @@ def test_full_additional_dtypes(zarr_version): @pytest.mark.parametrize("dimension_separator", [".", "/", None]) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_open_array(zarr_version, at_root, dimension_separator): +def test_open_array(at_root, dimension_separator): store = "data/array.zarr" - kwargs = _init_creation_kwargs(zarr_version, at_root) + kwargs = _init_creation_kwargs(at_root) # mode == 'w' z = open_array( @@ -221,23 +208,19 @@ def test_open_array(zarr_version, at_root, dimension_separator): ) z[:] = 42 assert isinstance(z, Array) - if z._store._store_version == 2: - assert isinstance(z.store, DirectoryStore) - else: - assert isinstance(z.store, DirectoryStoreV3) + + assert isinstance(z.store, DirectoryStore) assert (100,) == z.shape assert (10,) == z.chunks assert_array_equal(np.full(100, fill_value=42), z[:]) if dimension_separator is None: - assert z._dimension_separator == "/" if zarr_version == 3 else "." + assert z._dimension_separator == "." else: assert z._dimension_separator == dimension_separator # mode in 'r', 'r+' group_kwargs = kwargs.copy() - if zarr_version == 3: - group_kwargs["path"] = "group" open_group("data/group.zarr", mode="w", **group_kwargs) for mode in "r", "r+": with pytest.raises(ValueError): @@ -246,10 +229,7 @@ def test_open_array(zarr_version, at_root, dimension_separator): open_array("data/group.zarr", mode=mode) z = open_array(store, mode="r", **kwargs) assert isinstance(z, Array) - if z._store._store_version == 2: - assert isinstance(z.store, DirectoryStore) - else: - assert isinstance(z.store, DirectoryStoreV3) + assert isinstance(z.store, DirectoryStore) assert (100,) == z.shape assert (10,) == z.chunks assert_array_equal(np.full(100, fill_value=42), z[:]) @@ -257,10 +237,7 @@ def test_open_array(zarr_version, at_root, dimension_separator): z[:] = 43 z = open_array(store, mode="r+", **kwargs) assert isinstance(z, Array) - if z._store._store_version == 2: - assert isinstance(z.store, DirectoryStore) - else: - assert isinstance(z.store, DirectoryStoreV3) + assert isinstance(z.store, DirectoryStore) assert (100,) == z.shape assert (10,) == z.chunks assert_array_equal(np.full(100, fill_value=42), z[:]) @@ -272,18 +249,12 @@ def test_open_array(zarr_version, at_root, dimension_separator): z = open_array(store, mode="a", shape=100, chunks=10, **kwargs) z[:] = 42 assert isinstance(z, Array) - if z._store._store_version == 2: - assert isinstance(z.store, DirectoryStore) - else: - assert isinstance(z.store, DirectoryStoreV3) + assert isinstance(z.store, DirectoryStore) assert (100,) == z.shape assert (10,) == z.chunks assert_array_equal(np.full(100, fill_value=42), z[:]) - expected_error = TypeError if zarr_version == 3 else ValueError - # v3 path does not conflict, but will raise TypeError without shape kwarg - with pytest.raises(expected_error): - # array would end up at data/group.zarr/meta/root/array.array.json + with pytest.raises(ValueError): open_array("data/group.zarr", mode="a", **kwargs) # mode in 'w-', 'x' @@ -292,18 +263,14 @@ def test_open_array(zarr_version, at_root, dimension_separator): z = open_array(store, mode=mode, shape=100, chunks=10, **kwargs) z[:] = 42 assert isinstance(z, Array) - if z._store._store_version == 2: - assert isinstance(z.store, DirectoryStore) - else: - assert isinstance(z.store, DirectoryStoreV3) + assert isinstance(z.store, DirectoryStore) assert (100,) == z.shape assert (10,) == z.chunks assert_array_equal(np.full(100, fill_value=42), z[:]) with pytest.raises(ValueError): open_array(store, mode=mode, **kwargs) - expected_error = TypeError if zarr_version == 3 else ValueError - # v3 path does not conflict, but will raise TypeError without shape kwarg - with pytest.raises(expected_error): + + with pytest.raises(ValueError): open_array("data/group.zarr", mode=mode, **kwargs) # with synchronizer @@ -327,21 +294,15 @@ def test_open_array(zarr_version, at_root, dimension_separator): def test_open_array_none(): - # open with both store and zarr_version = None + # open with store = None z = open_array(mode="w", shape=100, chunks=10) assert isinstance(z, Array) - assert z._version == 2 @pytest.mark.parametrize("dimension_separator", [".", "/", None]) -@pytest.mark.parametrize("zarr_version", _VERSIONS2) -def test_open_array_infer_separator_from_store(zarr_version, dimension_separator): - if zarr_version == 3: - StoreClass = DirectoryStoreV3 - path = "data" - else: - StoreClass = DirectoryStore - path = None +def test_open_array_infer_separator_from_store(dimension_separator): + StoreClass = DirectoryStore + path = None store = StoreClass("data/array.zarr", dimension_separator=dimension_separator) # Note: no dimension_separator kwarg to open_array @@ -349,25 +310,20 @@ def test_open_array_infer_separator_from_store(zarr_version, dimension_separator z = open_array(store, path=path, mode="w", shape=100, chunks=10) z[:] = 42 assert isinstance(z, Array) - if z._store._store_version == 2: - assert isinstance(z.store, DirectoryStore) - else: - assert isinstance(z.store, DirectoryStoreV3) + assert isinstance(z.store, DirectoryStore) assert (100,) == z.shape assert (10,) == z.chunks assert_array_equal(np.full(100, fill_value=42), z[:]) if dimension_separator is None: - assert z._dimension_separator == "/" if zarr_version == 3 else "." + assert z._dimension_separator == "." else: assert z._dimension_separator == dimension_separator -# TODO: N5 support for v3 -@pytest.mark.parametrize("zarr_version", [None, 2]) -def test_open_array_n5(zarr_version): +def test_open_array_n5(): store = "data/array.zarr" - kwargs = _init_creation_kwargs(zarr_version) + kwargs = _init_creation_kwargs() # for N5 store store = "data/array.n5" @@ -381,8 +337,6 @@ def test_open_array_n5(zarr_version): store = "data/group.n5" group_kwargs = kwargs.copy() - # if zarr_version == 3: - # group_kwargs['path'] = 'group' z = open_group(store, mode="w", **group_kwargs) i = z.create_group("inner") a = i.zeros("array", shape=100, chunks=10) @@ -401,13 +355,12 @@ def test_open_array_n5(zarr_version): assert_array_equal(np.full(100, fill_value=42), a[:]) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_open_array_dict_store(zarr_version, at_root): +def test_open_array_dict_store(at_root): # dict will become a KVStore store = dict() - kwargs = _init_creation_kwargs(zarr_version, at_root) - expected_store_type = KVStoreV3 if zarr_version == 3 else KVStore + kwargs = _init_creation_kwargs(at_root) + expected_store_type = KVStore # mode == 'w' z = open_array(store, mode="w", shape=100, chunks=10, **kwargs) @@ -419,11 +372,10 @@ def test_open_array_dict_store(zarr_version, at_root): assert_array_equal(np.full(100, fill_value=42), z[:]) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_create_in_dict(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) - expected_store_type = KVStoreV3 if zarr_version == 3 else KVStore +def test_create_in_dict(at_root): + kwargs = _init_creation_kwargs(at_root) + expected_store_type = KVStore for func in [empty, zeros, ones]: a = func(100, store=dict(), **kwargs) @@ -434,27 +386,23 @@ def test_create_in_dict(zarr_version, at_root): @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_create_writeable_mode(zarr_version, at_root, tmp_path): +def test_create_writeable_mode(at_root, tmp_path): # Regression test for https://github.com/zarr-developers/zarr-python/issues/1306 import fsspec - kwargs = _init_creation_kwargs(zarr_version, at_root) + kwargs = _init_creation_kwargs(at_root) store = fsspec.get_mapper(str(tmp_path)) z = create(100, store=store, **kwargs) assert z.store.map == store -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_empty_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version +def test_empty_like(at_root): + kwargs = _init_creation_kwargs(at_root) # zarr array z = empty(100, chunks=10, dtype="f4", compressor=Zlib(5), order="F", **kwargs) - # zarr_version will be inferred from z, but have to specify a path in v3 z2 = empty_like(z, path=kwargs.get("path")) assert z.shape == z2.shape assert z.chunks == z2.chunks @@ -462,7 +410,6 @@ def test_empty_like(zarr_version, at_root): assert z.compressor.get_config() == z2.compressor.get_config() assert z.fill_value == z2.fill_value assert z.order == z2.order - assert z._store._store_version == z2._store._store_version == expected_zarr_version # numpy array a = np.empty(100, dtype="f4") @@ -471,7 +418,6 @@ def test_empty_like(zarr_version, at_root): assert (100,) == z3.chunks assert a.dtype == z3.dtype assert z3.fill_value is None - assert z3._store._store_version == expected_zarr_version # something slightly silly a = [0] * 100 @@ -494,11 +440,9 @@ def test_empty_like(zarr_version, at_root): assert isinstance(z.chunks, tuple) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_zeros_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version +def test_zeros_like(at_root): + kwargs = _init_creation_kwargs(at_root) # zarr array z = zeros(100, chunks=10, dtype="f4", compressor=Zlib(5), order="F", **kwargs) @@ -509,7 +453,7 @@ def test_zeros_like(zarr_version, at_root): assert z.compressor.get_config() == z2.compressor.get_config() assert z.fill_value == z2.fill_value assert z.order == z2.order - assert z._store._store_version == z2._store._store_version == expected_zarr_version + # numpy array a = np.empty(100, dtype="f4") z3 = zeros_like(a, chunks=10, **kwargs) @@ -519,11 +463,9 @@ def test_zeros_like(zarr_version, at_root): assert 0 == z3.fill_value -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_ones_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version +def test_ones_like(at_root): + kwargs = _init_creation_kwargs(at_root) # zarr array z = ones(100, chunks=10, dtype="f4", compressor=Zlib(5), order="F", **kwargs) @@ -534,7 +476,7 @@ def test_ones_like(zarr_version, at_root): assert z.compressor.get_config() == z2.compressor.get_config() assert z.fill_value == z2.fill_value assert z.order == z2.order - assert z._store._store_version == z2._store._store_version == expected_zarr_version + # numpy array a = np.empty(100, dtype="f4") z3 = ones_like(a, chunks=10, **kwargs) @@ -542,14 +484,11 @@ def test_ones_like(zarr_version, at_root): assert (10,) == z3.chunks assert a.dtype == z3.dtype assert 1 == z3.fill_value - assert z3._store._store_version == expected_zarr_version -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_full_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version +def test_full_like(at_root): + kwargs = _init_creation_kwargs(at_root) z = full(100, chunks=10, dtype="f4", compressor=Zlib(5), fill_value=42, order="F", **kwargs) z2 = full_like(z, path=kwargs.get("path")) @@ -559,7 +498,7 @@ def test_full_like(zarr_version, at_root): assert z.compressor.get_config() == z2.compressor.get_config() assert z.fill_value == z2.fill_value assert z.order == z2.order - assert z._store._store_version == z2._store._store_version == expected_zarr_version + # numpy array a = np.empty(100, dtype="f4") z3 = full_like(a, chunks=10, fill_value=42, **kwargs) @@ -567,17 +506,15 @@ def test_full_like(zarr_version, at_root): assert (10,) == z3.chunks assert a.dtype == z3.dtype assert 42 == z3.fill_value - assert z3._store._store_version == expected_zarr_version + with pytest.raises(TypeError): # fill_value missing full_like(a, chunks=10, **kwargs) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_open_like(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version +def test_open_like(at_root): + kwargs = _init_creation_kwargs(at_root) # zarr array path = mktemp() @@ -590,24 +527,21 @@ def test_open_like(zarr_version, at_root): assert z.compressor.get_config() == z2.compressor.get_config() assert z.fill_value == z2.fill_value assert z.order == z2.order - assert z._store._store_version == z2._store._store_version == expected_zarr_version + # numpy array path = mktemp() atexit.register(shutil.rmtree, path) a = np.empty(100, dtype="f4") - z3 = open_like(a, path, chunks=10, zarr_version=zarr_version) + z3 = open_like(a, path, chunks=10) assert a.shape == z3.shape assert (10,) == z3.chunks assert a.dtype == z3.dtype assert 0 == z3.fill_value - assert z3._store._store_version == expected_zarr_version -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_create(zarr_version, at_root): - kwargs = _init_creation_kwargs(zarr_version, at_root) - expected_zarr_version = DEFAULT_ZARR_VERSION if zarr_version is None else zarr_version +def test_create(at_root): + kwargs = _init_creation_kwargs(at_root) # defaults z = create(100, **kwargs) @@ -617,7 +551,6 @@ def test_create(zarr_version, at_root): assert np.dtype(None) == z.dtype assert "blosc" == z.compressor.codec_id assert 0 == z.fill_value - assert z._store._store_version == expected_zarr_version # all specified z = create(100, chunks=10, dtype="i4", compressor=Zlib(1), fill_value=42, order="F", **kwargs) @@ -629,7 +562,6 @@ def test_create(zarr_version, at_root): assert 1 == z.compressor.level assert 42 == z.fill_value assert "F" == z.order - assert z._store._store_version == expected_zarr_version # with synchronizer synchronizer = ThreadSynchronizer() @@ -638,7 +570,6 @@ def test_create(zarr_version, at_root): assert (100,) == z.shape assert (10,) == z.chunks assert synchronizer is z.synchronizer - assert z._store._store_version == expected_zarr_version # don't allow string as compressor arg with pytest.raises(ValueError): @@ -671,9 +602,8 @@ def test_create(zarr_version, at_root): assert z.chunks == z.shape -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_compression_args(zarr_version): - kwargs = _init_creation_kwargs(zarr_version) +def test_compression_args(): + kwargs = _init_creation_kwargs() with warnings.catch_warnings(): warnings.simplefilter("default") @@ -704,12 +634,11 @@ def test_compression_args(zarr_version): create(100, compressor=Zlib(9), compression_opts=1, **kwargs) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_create_read_only(zarr_version, at_root): +def test_create_read_only(at_root): # https://github.com/alimanfoo/zarr/issues/151 - kwargs = _init_creation_kwargs(zarr_version, at_root) + kwargs = _init_creation_kwargs(at_root) # create an array initially read-only, then enable writing z = create(100, read_only=True, **kwargs) @@ -738,18 +667,6 @@ def test_json_dumps_chunks_numpy_dtype(): assert np.all(z[...] == 0) -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -@pytest.mark.parametrize("at_root", [False, True]) -def test_create_with_storage_transformers(at_root): - kwargs = _init_creation_kwargs(zarr_version=3, at_root=at_root) - transformer = DummyStorageTransfomer( - "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT - ) - z = create(1000000000, chunks=True, storage_transformers=[transformer], **kwargs) - assert isinstance(z.chunk_store, DummyStorageTransfomer) - assert z.chunk_store.test_value == DummyStorageTransfomer.TEST_CONSTANT - - @pytest.mark.parametrize( ("init_shape", "init_chunks", "shape", "chunks"), ( diff --git a/tests/test_hierarchy.py b/tests/test_hierarchy.py index 6d4b1ff54c..8cd51cc940 100644 --- a/tests/test_hierarchy.py +++ b/tests/test_hierarchy.py @@ -18,7 +18,6 @@ from numcodecs import Zlib from numpy.testing import assert_array_equal -from zarr._storage.store import _get_metadata_suffix, v3_api_available from zarr.attrs import Attributes from zarr.core import Array from zarr.creation import open_array @@ -38,29 +37,13 @@ array_meta_key, atexit_rmglob, atexit_rmtree, - data_root, group_meta_key, init_array, init_group, - meta_root, ) -from zarr._storage.v3 import ( - ABSStoreV3, - KVStoreV3, - DirectoryStoreV3, - MemoryStoreV3, - FSStoreV3, - ZipStoreV3, - DBMStoreV3, - LMDBStoreV3, - SQLiteStoreV3, - LRUStoreCacheV3, -) -from zarr.util import InfoReporter, buffer_size -from .util import skip_test_env_var, have_fsspec, abs_container, mktemp - -_VERSIONS = (2, 3) if v3_api_available else (2,) +from zarr.util import InfoReporter +from .util import skip_test_env_var, have_fsspec, abs_container, mktemp # noinspection PyStatementEffect @@ -148,10 +131,7 @@ def _subgroup_path(self, group, path): def test_create_group(self): g1 = self.create_group() - if g1._version == 2: - path, name = "", "/" - else: - path, name = "group", "/group" + path, name = "", "/" # check root group assert path == g1.path assert name == g1.name @@ -205,12 +185,8 @@ def __str__(self): # test bad keys with pytest.raises(ValueError): g1.create_group("foo") # already exists - if g1._version == 2: - with pytest.raises(ValueError): - g1.create_group("a/b/c") # already exists - elif g1._version == 3: - # for v3 'group/a/b/c' does not already exist - g1.create_group("a/b/c") + with pytest.raises(ValueError): + g1.create_group("a/b/c") # already exists with pytest.raises(ValueError): g4.create_group("/a/b/c") # already exists with pytest.raises(ValueError): @@ -260,16 +236,7 @@ def test_require_group(self): assert g5.store is g5a.store # test path normalization - if g1._version == 2: - assert g1.require_group("quux") == g1.require_group("/quux/") - elif g1._version: - # These are not equal in v3! - # 'quux' will be within the group: - # meta/root/group/quux.group.json - # '/quux/' will be outside of the group at: - # meta/root/quux.group.json - assert g1.require_group("quux") != g1.require_group("/quux/") - + assert g1.require_group("quux") == g1.require_group("/quux/") # multi g6, g7 = g1.require_groups("y", "z") assert isinstance(g6, Group) @@ -289,24 +256,9 @@ def test_rmdir_group_and_array_metadata_files(self): g1.create_dataset("arr1", shape=(100,), chunks=(10,), dtype=np.uint8) # create level 1 child group - g2 = g1.create_group("foo") + _ = g1.create_group("foo") g1.create_dataset("arr2", shape=(100,), chunks=(10,), dtype=np.uint8) - if g1._version > 2 and g1.store.is_erasable(): - arr_path = g1.path + "/arr1" - sfx = _get_metadata_suffix(g1.store) - array_meta_file = meta_root + arr_path + ".array" + sfx - assert array_meta_file in g1.store - group_meta_file = meta_root + g2.path + ".group" + sfx - assert group_meta_file in g1.store - - # rmdir on the array path should also remove the metadata file - g1.store.rmdir(arr_path) - assert array_meta_file not in g1.store - # rmdir on the group path should also remove its metadata file - g1.store.rmdir(g2.path) - assert group_meta_file not in g1.store - def _dataset_path(self, group, path): path = path.rstrip("/") absolute = path.startswith("/") @@ -541,12 +493,9 @@ def test_getitem_contains_iterators(self): # setup g1 = self.create_group() g2 = g1.create_group("foo/bar") - if g1._version == 2: - d1 = g2.create_dataset("/a/b/c", shape=1000, chunks=100) - else: - # v3: cannot create a dataset at the root by starting with / - # instead, need to create the dataset on g1 directly - d1 = g1.create_dataset("a/b/c", shape=1000, chunks=100) + + d1 = g2.create_dataset("/a/b/c", shape=1000, chunks=100) + d1[:] = np.arange(1000) d2 = g1.create_dataset("foo/baz", shape=3000, chunks=300) d2[:] = np.arange(3000) @@ -555,13 +504,7 @@ def test_getitem_contains_iterators(self): assert isinstance(g1["foo"], Group) assert isinstance(g1["foo"]["bar"], Group) assert isinstance(g1["foo/bar"], Group) - if g1._version == 2: - assert isinstance(g1["/foo/bar/"], Group) - else: - # start or end with / raises KeyError - # TODO: should we allow stripping of these on v3? - with pytest.raises(KeyError): - assert isinstance(g1["/foo/bar/"], Group) + assert isinstance(g1["/foo/bar/"], Group) assert isinstance(g1["foo/baz"], Array) assert g2 == g1["foo/bar"] assert g1["foo"]["bar"] == g1["foo/bar"] @@ -604,18 +547,12 @@ def test_getitem_contains_iterators(self): # test __iter__, keys() - if g1._version == 2: - # currently assumes sorted by key - assert ["a", "foo"] == list(g1) - assert ["a", "foo"] == list(g1.keys()) - assert ["bar", "baz"] == list(g1["foo"]) - assert ["bar", "baz"] == list(g1["foo"].keys()) - else: - # v3 is not necessarily sorted by key - assert ["a", "foo"] == sorted(list(g1)) - assert ["a", "foo"] == sorted(list(g1.keys())) - assert ["bar", "baz"] == sorted(list(g1["foo"])) - assert ["bar", "baz"] == sorted(list(g1["foo"].keys())) + # currently assumes sorted by key + assert ["a", "foo"] == list(g1) + assert ["a", "foo"] == list(g1.keys()) + assert ["bar", "baz"] == list(g1["foo"]) + assert ["bar", "baz"] == list(g1["foo"].keys()) + assert [] == sorted(g1["foo/bar"]) assert [] == sorted(g1["foo/bar"].keys()) @@ -624,9 +561,6 @@ def test_getitem_contains_iterators(self): items = list(g1.items()) values = list(g1.values()) - if g1._version == 3: - # v3 are not automatically sorted by key - items, values = zip(*sorted(zip(items, values), key=lambda x: x[0])) assert "a" == items[0][0] assert g1["a"] == items[0][1] assert g1["a"] == values[0] @@ -636,9 +570,6 @@ def test_getitem_contains_iterators(self): items = list(g1["foo"].items()) values = list(g1["foo"].values()) - if g1._version == 3: - # v3 are not automatically sorted by key - items, values = zip(*sorted(zip(items, values), key=lambda x: x[0])) assert "bar" == items[0][0] assert g1["foo"]["bar"] == items[0][1] assert g1["foo"]["bar"] == values[0] @@ -650,13 +581,8 @@ def test_getitem_contains_iterators(self): groups = list(g1.groups()) arrays = list(g1.arrays()) - if g1._version == 2: - # currently assumes sorted by key - assert ["a", "foo"] == list(g1.group_keys()) - else: - assert ["a", "foo"] == sorted(list(g1.group_keys())) - groups = sorted(groups) - arrays = sorted(arrays) + # currently assumes sorted by key + assert ["a", "foo"] == list(g1.group_keys()) assert "a" == groups[0][0] assert g1["a"] == groups[0][1] assert "foo" == groups[1][0] @@ -668,9 +594,6 @@ def test_getitem_contains_iterators(self): assert ["baz"] == list(g1["foo"].array_keys()) groups = list(g1["foo"].groups()) arrays = list(g1["foo"].arrays()) - if g1._version == 3: - groups = sorted(groups) - arrays = sorted(arrays) assert "bar" == groups[0][0] assert g1["foo"]["bar"] == groups[0][1] assert "baz" == arrays[0][0] @@ -699,8 +622,6 @@ def visitor4(name, obj): "foo/bar", "foo/baz", ] - if g1._version == 3: - expected_items = [g1.path + "/" + i for i in expected_items] assert expected_items == items del items[:] @@ -709,8 +630,6 @@ def visitor4(name, obj): "foo/bar", "foo/baz", ] - if g1._version == 3: - expected_items = [g1.path + "/" + i for i in expected_items] assert expected_items == items del items[:] @@ -937,28 +856,10 @@ def test_move(self): g2.move("bar", "/bar") assert "foo2" in g assert "foo2/bar" not in g - if g2._version == 2: - assert "bar" in g - else: - # The `g2.move` call above moved bar to meta/root/bar and - # meta/data/bar. This is outside the `g` group located at - # /meta/root/group, so bar is no longer within `g`. - assert "bar" not in g - assert "meta/root/bar.array.json" in g._store - if g._chunk_store: - assert "data/root/bar/c0" in g._chunk_store - else: - assert "data/root/bar/c0" in g._store + assert "bar" in g assert isinstance(g["foo2"], Group) - if g2._version == 2: - assert_array_equal(data, g["bar"]) - else: - # TODO: How to access element created outside of group.path in v3? - # One option is to make a Hierarchy class representing the - # root. Currently Group requires specification of `path`, - # but the path of the root would be just '' which is not - # currently allowed. - pass + + assert_array_equal(data, g["bar"]) with pytest.raises(ValueError): g2.move("bar", "bar2") @@ -1035,39 +936,19 @@ def test_paths(self): g1 = self.create_group() g2 = g1.create_group("foo/bar") - if g1._version == 2: - assert g1 == g1["/"] - assert g1 == g1["//"] - assert g1 == g1["///"] - assert g1 == g2["/"] - assert g1 == g2["//"] - assert g1 == g2["///"] - assert g2 == g1["foo/bar"] - assert g2 == g1["/foo/bar"] - assert g2 == g1["foo/bar/"] - assert g2 == g1["//foo/bar"] - assert g2 == g1["//foo//bar//"] - assert g2 == g1["///foo///bar///"] - assert g2 == g2["/foo/bar"] - else: - # the expected key format gives a match - assert g2 == g1["foo/bar"] - - # TODO: Should presence of a trailing slash raise KeyError? - # The spec says "the final character is not a / character" - # but we currently strip trailing '/' as done for v2. - assert g2 == g1["foo/bar/"] - - # double slash also currently works (spec doesn't mention this - # case, but have kept it for v2 behavior compatibility) - assert g2 == g1["foo//bar"] - - # TODO, root: fix these cases - # v3: leading / implies we are at the root, not within a group, - # so these all raise KeyError - for path in ["/foo/bar", "//foo/bar", "//foo//bar//", "///fooo///bar///"]: - with pytest.raises(KeyError): - g1[path] + assert g1 == g1["/"] + assert g1 == g1["//"] + assert g1 == g1["///"] + assert g1 == g2["/"] + assert g1 == g2["//"] + assert g1 == g2["///"] + assert g2 == g1["foo/bar"] + assert g2 == g1["/foo/bar"] + assert g2 == g1["foo/bar/"] + assert g2 == g1["//foo/bar"] + assert g2 == g1["//foo//bar//"] + assert g2 == g1["///foo///bar///"] + assert g2 == g2["/foo/bar"] with pytest.raises(ValueError): g1["."] @@ -1133,77 +1014,12 @@ def test_group_init_from_dict(chunk_dict): assert chunk_store is not g.chunk_store -# noinspection PyStatementEffect -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3(TestGroup, unittest.TestCase): - @staticmethod - def create_store(): - # can be overridden in sub-classes - return KVStoreV3(dict()), None - - def create_group( - self, store=None, path="group", read_only=False, chunk_store=None, synchronizer=None - ): - # can be overridden in sub-classes - if store is None: - store, chunk_store = self.create_store() - init_group(store, path=path, chunk_store=chunk_store) - g = Group( - store, - path=path, - read_only=read_only, - chunk_store=chunk_store, - synchronizer=synchronizer, - ) - return g - - def test_group_init_1(self): - store, chunk_store = self.create_store() - g = self.create_group(store, chunk_store=chunk_store) - assert store is g.store - if chunk_store is None: - assert store is g.chunk_store - else: - assert chunk_store is g.chunk_store - assert not g.read_only - # different path/name in v3 case - assert "group" == g.path - assert "/group" == g.name - assert "group" == g.basename - - assert isinstance(g.attrs, Attributes) - g.attrs["foo"] = "bar" - assert g.attrs["foo"] == "bar" - - assert isinstance(g.info, InfoReporter) - assert isinstance(repr(g.info), str) - assert isinstance(g.info._repr_html_(), str) - store.close() - - def test_group_init_errors_2(self): - store, chunk_store = self.create_store() - path = "tmp" - init_array(store, path=path, shape=1000, chunks=100, chunk_store=chunk_store) - # array blocks group - with pytest.raises(ValueError): - Group(store, path=path, chunk_store=chunk_store) - store.close() - - class TestGroupWithMemoryStore(TestGroup): @staticmethod def create_store(): return MemoryStore(), None -# noinspection PyStatementEffect -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithMemoryStore(TestGroupWithMemoryStore, TestGroupV3): - @staticmethod - def create_store(): - return MemoryStoreV3(), None - - class TestGroupWithDirectoryStore(TestGroup): @staticmethod def create_store(): @@ -1213,16 +1029,6 @@ def create_store(): return store, None -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithDirectoryStore(TestGroupWithDirectoryStore, TestGroupV3): - @staticmethod - def create_store(): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = DirectoryStoreV3(path) - return store, None - - @skip_test_env_var("ZARR_TEST_ABS") class TestGroupWithABSStore(TestGroup): @staticmethod @@ -1238,22 +1044,6 @@ def test_pickle(self): super().test_pickle() -@skip_test_env_var("ZARR_TEST_ABS") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithABSStore(TestGroupV3): - @staticmethod - def create_store(): - container_client = abs_container() - store = ABSStoreV3(client=container_client) - store.rmdir() - return store, None - - @pytest.mark.skipif(sys.version_info < (3, 7), reason="attr not serializable in py36") - def test_pickle(self): - # internal attribute on ContainerClient isn't serializable for py36 and earlier - super().test_pickle() - - class TestGroupWithNestedDirectoryStore(TestGroup): @staticmethod def create_store(): @@ -1284,39 +1074,6 @@ def test_round_trip_nd(self): np.testing.assert_array_equal(h[name][:], data) -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithFSStore(TestGroupWithFSStore, TestGroupV3): - @staticmethod - def create_store(): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = FSStoreV3(path) - return store, None - - def test_round_trip_nd(self): - data = np.arange(1000).reshape(10, 10, 10) - name = "raw" - - store, _ = self.create_store() - f = open_group(store, path="group", mode="w") - f.create_dataset(name, data=data, chunks=(5, 5, 5), compressor=None) - h = open_group(store, path="group", mode="r") - np.testing.assert_array_equal(h[name][:], data) - - f = open_group(store, path="group2", mode="w") - - data_size = data.nbytes - group_meta_size = buffer_size(store[meta_root + "group.group.json"]) - group2_meta_size = buffer_size(store[meta_root + "group2.group.json"]) - array_meta_size = buffer_size(store[meta_root + "group/raw.array.json"]) - assert store.getsize() == data_size + group_meta_size + group2_meta_size + array_meta_size - # added case with path to complete coverage - assert store.getsize("group") == data_size + group_meta_size + array_meta_size - assert store.getsize("group2") == group2_meta_size - assert store.getsize("group/raw") == data_size + array_meta_size - - @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") class TestGroupWithNestedFSStore(TestGroupWithFSStore): @staticmethod @@ -1340,30 +1097,6 @@ def test_inconsistent_dimension_separator(self): ) -@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithNestedFSStore(TestGroupV3WithFSStore): - @staticmethod - def create_store(): - path = tempfile.mkdtemp() - atexit.register(atexit_rmtree, path) - store = FSStoreV3(path, key_separator="/", auto_mkdir=True) - return store, None - - def test_inconsistent_dimension_separator(self): - data = np.arange(1000).reshape(10, 10, 10) - name = "raw" - - store, _ = self.create_store() - f = open_group(store, path="group", mode="w") - - # cannot specify dimension_separator that conflicts with the store - with pytest.raises(ValueError): - f.create_dataset( - name, data=data, chunks=(5, 5, 5), compressor=None, dimension_separator="." - ) - - class TestGroupWithZipStore(TestGroup): @staticmethod def create_store(): @@ -1389,16 +1122,6 @@ def test_move(self): pass -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithZipStore(TestGroupWithZipStore, TestGroupV3): - @staticmethod - def create_store(): - path = mktemp(suffix=".zip") - atexit.register(os.remove, path) - store = ZipStoreV3(path) - return store, None - - class TestGroupWithDBMStore(TestGroup): @staticmethod def create_store(): @@ -1408,16 +1131,6 @@ def create_store(): return store, None -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithDBMStore(TestGroupWithDBMStore, TestGroupV3): - @staticmethod - def create_store(): - path = mktemp(suffix=".anydbm") - atexit.register(atexit_rmglob, path + "*") - store = DBMStoreV3(path, flag="n") - return store, None - - class TestGroupWithDBMStoreBerkeleyDB(TestGroup): @staticmethod def create_store(): @@ -1428,17 +1141,6 @@ def create_store(): return store, None -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithDBMStoreBerkeleyDB(TestGroupWithDBMStoreBerkeleyDB, TestGroupV3): - @staticmethod - def create_store(): - bsddb3 = pytest.importorskip("bsddb3") - path = mktemp(suffix=".dbm") - atexit.register(os.remove, path) - store = DBMStoreV3(path, flag="n", open=bsddb3.btopen) - return store, None - - class TestGroupWithLMDBStore(TestGroup): @staticmethod def create_store(): @@ -1449,17 +1151,6 @@ def create_store(): return store, None -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithLMDBStore(TestGroupWithLMDBStore, TestGroupV3): - @staticmethod - def create_store(): - pytest.importorskip("lmdb") - path = mktemp(suffix=".lmdb") - atexit.register(atexit_rmtree, path) - store = LMDBStoreV3(path) - return store, None - - class TestGroupWithSQLiteStore(TestGroup): def create_store(self): pytest.importorskip("sqlite3") @@ -1469,16 +1160,6 @@ def create_store(self): return store, None -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithSQLiteStore(TestGroupWithSQLiteStore, TestGroupV3): - def create_store(self): - pytest.importorskip("sqlite3") - path = mktemp(suffix=".db") - atexit.register(atexit_rmtree, path) - store = SQLiteStoreV3(path) - return store, None - - class TestGroupWithChunkStore(TestGroup): @staticmethod def create_store(): @@ -1509,41 +1190,6 @@ def test_chunk_store(self): assert expect == actual -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithChunkStore(TestGroupWithChunkStore, TestGroupV3): - @staticmethod - def create_store(): - return KVStoreV3(dict()), KVStoreV3(dict()) - - def test_chunk_store(self): - # setup - store, chunk_store = self.create_store() - path = "group1" - g = self.create_group(store, path=path, chunk_store=chunk_store) - - # check attributes - assert store is g.store - assert chunk_store is g.chunk_store - - # create array - a = g.zeros("foo", shape=100, chunks=10) - assert store is a.store - assert chunk_store is a.chunk_store - a[:] = np.arange(100) - assert_array_equal(np.arange(100), a[:]) - - # check store keys - group_key = meta_root + path + ".group.json" - array_key = meta_root + path + "/foo" + ".array.json" - expect = sorted([group_key, array_key, "zarr.json"]) - actual = sorted(store.keys()) - assert expect == actual - expect = [data_root + path + "/foo/c" + str(i) for i in range(10)] - expect += ["zarr.json"] - actual = sorted(chunk_store.keys()) - assert expect == actual - - class TestGroupWithStoreCache(TestGroup): @staticmethod def create_store(): @@ -1551,58 +1197,8 @@ def create_store(): return store, None -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -class TestGroupV3WithStoreCache(TestGroupWithStoreCache, TestGroupV3): - @staticmethod - def create_store(): - store = LRUStoreCacheV3(dict(), max_size=None) - return store, None - - -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_group(zarr_version): - # test the group() convenience function - - # basic usage - if zarr_version == 2: - g = group() - assert "" == g.path - assert "/" == g.name - else: - g = group(path="group1", zarr_version=zarr_version) - assert "group1" == g.path - assert "/group1" == g.name - assert isinstance(g, Group) - - # usage with custom store - if zarr_version == 2: - store = KVStore(dict()) - path = None - else: - store = KVStoreV3(dict()) - path = "foo" - g = group(store=store, path=path) - assert isinstance(g, Group) - assert store is g.store - - # overwrite behaviour - if zarr_version == 2: - store = KVStore(dict()) - path = None - else: - store = KVStoreV3(dict()) - path = "foo" - init_array(store, path=path, shape=100, chunks=10) - with pytest.raises(ValueError): - group(store, path=path) - g = group(store, path=path, overwrite=True) - assert isinstance(g, Group) - assert store is g.store - - @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_group_writeable_mode(zarr_version, tmp_path): +def test_group_writeable_mode(tmp_path): # Regression test for https://github.com/zarr-developers/zarr-python/issues/1353 import fsspec @@ -1611,17 +1207,16 @@ def test_group_writeable_mode(zarr_version, tmp_path): assert zg.store.map == store -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_open_group(zarr_version): +def test_open_group(): # test the open_group() convenience function store = "data/group.zarr" - expected_store_type = DirectoryStore if zarr_version == 2 else DirectoryStoreV3 + expected_store_type = DirectoryStore # mode == 'w' - path = None if zarr_version == 2 else "group1" - g = open_group(store, path=path, mode="w", zarr_version=zarr_version) + path = None + g = open_group(store, path=path, mode="w") assert isinstance(g, Group) assert isinstance(g.store, expected_store_type) assert 0 == len(g) @@ -1648,44 +1243,39 @@ def test_open_group(zarr_version): # mode == 'a' shutil.rmtree(store) - g = open_group(store, path=path, mode="a", zarr_version=zarr_version) + g = open_group(store, path=path, mode="a") assert isinstance(g, Group) assert isinstance(g.store, expected_store_type) assert 0 == len(g) g.create_groups("foo", "bar") assert 2 == len(g) - if zarr_version == 2: - with pytest.raises(ValueError): - open_group("data/array.zarr", mode="a", zarr_version=zarr_version) - else: - # TODO, root: should this raise an error? - open_group("data/array.zarr", mode="a", zarr_version=zarr_version) + + with pytest.raises(ValueError): + open_group("data/array.zarr", mode="a") # mode in 'w-', 'x' for mode in "w-", "x": shutil.rmtree(store) - g = open_group(store, path=path, mode=mode, zarr_version=zarr_version) + g = open_group(store, path=path, mode=mode) assert isinstance(g, Group) assert isinstance(g.store, expected_store_type) assert 0 == len(g) g.create_groups("foo", "bar") assert 2 == len(g) with pytest.raises(ValueError): - open_group(store, path=path, mode=mode, zarr_version=zarr_version) - if zarr_version == 2: - with pytest.raises(ValueError): - open_group("data/array.zarr", mode=mode) + open_group(store, path=path, mode=mode) + with pytest.raises(ValueError): + open_group("data/array.zarr", mode=mode) # open with path - g = open_group(store, path="foo/bar", zarr_version=zarr_version) + g = open_group(store, path="foo/bar") assert isinstance(g, Group) assert "foo/bar" == g.path -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_group_completions(zarr_version): - path = None if zarr_version == 2 else "group1" - g = group(path=path, zarr_version=zarr_version) +def test_group_completions(): + path = None + g = group(path=path) d = dir(g) assert "foo" not in d assert "bar" not in d @@ -1713,10 +1303,9 @@ def test_group_completions(zarr_version): assert "456" not in d # not valid identifier -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_group_key_completions(zarr_version): - path = None if zarr_version == 2 else "group1" - g = group(path=path, zarr_version=zarr_version) +def test_group_key_completions(): + path = None + g = group(path=path) d = dir(g) # noinspection PyProtectedMember k = g._ipython_key_completions_() @@ -1750,12 +1339,7 @@ def test_group_key_completions(zarr_version): g.zeros("yyy", shape=100) g.zeros("zzz", shape=100) g.zeros("456", shape=100) - if zarr_version == 2: - g.zeros("asdf;", shape=100) - else: - # cannot have ; in key name for v3 - with pytest.raises(ValueError): - g.zeros("asdf;", shape=100) + g.zeros("asdf;", shape=100) d = dir(g) # noinspection PyProtectedMember @@ -1770,8 +1354,7 @@ def test_group_key_completions(zarr_version): assert "zzz" in d assert "123" not in d # not valid identifier assert "456" not in d # not valid identifier - if zarr_version == 2: - assert "asdf;" not in d # not valid identifier + assert "asdf;" not in d # not valid identifier assert "foo" in k assert "bar" in k @@ -1782,8 +1365,7 @@ def test_group_key_completions(zarr_version): assert "zzz" in k assert "123" in k assert "456" in k - if zarr_version == 2: - assert "asdf;" in k + assert "asdf;" in k def _check_tree(g, expect_bytes, expect_text): @@ -1797,12 +1379,11 @@ def _check_tree(g, expect_bytes, expect_text): isinstance(widget, ipytree.Tree) -@pytest.mark.parametrize("zarr_version", _VERSIONS) @pytest.mark.parametrize("at_root", [False, True]) -def test_tree(zarr_version, at_root): +def test_tree(at_root): # setup path = None if at_root else "group1" - g1 = group(path=path, zarr_version=zarr_version) + g1 = group(path=path) g2 = g1.create_group("foo") g3 = g1.create_group("bar") g3.create_group("baz") @@ -1811,46 +1392,25 @@ def test_tree(zarr_version, at_root): tree_path = "/" if at_root else path # test root group - if zarr_version == 2: - expect_bytes = textwrap.dedent( - f"""\ - {tree_path} - +-- bar - | +-- baz - | +-- quux - | +-- baz (100,) float64 - +-- foo""" - ).encode() - expect_text = textwrap.dedent( - f"""\ - {tree_path} - ├── bar - │ ├── baz - │ └── quux - │ └── baz (100,) float64 - └── foo""" - ) - else: - # Almost the same as for v2, but has a path name and the - # subgroups are not necessarily sorted alphabetically. - expect_bytes = textwrap.dedent( - f"""\ - {tree_path} - +-- foo - +-- bar - +-- baz - +-- quux - +-- baz (100,) float64""" - ).encode() - expect_text = textwrap.dedent( - f"""\ - {tree_path} - ├── foo - └── bar - ├── baz - └── quux - └── baz (100,) float64""" - ) + + expect_bytes = textwrap.dedent( + f"""\ + {tree_path} + +-- bar + | +-- baz + | +-- quux + | +-- baz (100,) float64 + +-- foo""" + ).encode() + expect_text = textwrap.dedent( + f"""\ + {tree_path} + ├── bar + │ ├── baz + │ └── quux + │ └── baz (100,) float64 + └── foo""" + ) _check_tree(g1, expect_bytes, expect_text) # test different group @@ -1882,47 +1442,11 @@ def test_tree(zarr_version, at_root): _check_tree(g3, expect_bytes, expect_text) -@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") -def test_group_mismatched_store_versions(): - store_v3 = KVStoreV3(dict()) - store_v2 = KVStore(dict()) - - # separate chunk store - chunk_store_v2 = KVStore(dict()) - chunk_store_v3 = KVStoreV3(dict()) - - init_group(store_v2, path="group1", chunk_store=chunk_store_v2) - init_group(store_v3, path="group1", chunk_store=chunk_store_v3) - - g1_v3 = Group(store_v3, path="group1", read_only=True, chunk_store=chunk_store_v3) - assert isinstance(g1_v3._store, KVStoreV3) - g1_v2 = Group(store_v2, path="group1", read_only=True, chunk_store=chunk_store_v2) - assert isinstance(g1_v2._store, KVStore) - - # store and chunk_store must have the same zarr protocol version - with pytest.raises(ValueError): - Group(store_v3, path="group1", read_only=False, chunk_store=chunk_store_v2) - with pytest.raises(ValueError): - Group(store_v2, path="group1", read_only=False, chunk_store=chunk_store_v3) - with pytest.raises(ValueError): - open_group(store_v2, path="group1", chunk_store=chunk_store_v3) - with pytest.raises(ValueError): - open_group(store_v3, path="group1", chunk_store=chunk_store_v2) - - # raises Value if read_only and path is not a pre-existing group - with pytest.raises(ValueError): - Group(store_v3, path="group2", read_only=True, chunk_store=chunk_store_v3) - with pytest.raises(ValueError): - Group(store_v3, path="group2", read_only=True, chunk_store=chunk_store_v3) - - -@pytest.mark.parametrize("zarr_version", _VERSIONS) -def test_open_group_from_paths(zarr_version): +def test_open_group_from_paths(): """Verify zarr_version is applied to both the store and chunk_store.""" store = tempfile.mkdtemp() chunk_store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) atexit.register(atexit_rmtree, chunk_store) path = "g1" - g = open_group(store, path=path, chunk_store=chunk_store, zarr_version=zarr_version) - assert g._store._store_version == g._chunk_store._store_version == zarr_version + _ = open_group(store, path=path, chunk_store=chunk_store) diff --git a/tests/test_meta.py b/tests/test_meta.py index 50f51929ef..089afec781 100644 --- a/tests/test_meta.py +++ b/tests/test_meta.py @@ -1,5 +1,4 @@ import base64 -import copy import json import numpy as np @@ -16,11 +15,6 @@ encode_dtype, encode_fill_value, decode_fill_value, - get_extended_dtype_info, - _v3_complex_types, - _v3_datetime_types, - _default_entry_point_metadata_v3, - Metadata3, ) from zarr.util import normalize_dtype, normalize_fill_value @@ -285,77 +279,6 @@ def test_encode_decode_array_dtype_shape(): assert meta_dec["filters"] is None -def test_encode_decode_array_dtype_shape_v3(): - meta = dict( - shape=(100,), - chunk_grid=dict(type="regular", chunk_shape=(10,), separator=("/")), - data_type=np.dtype("(10, 10)U4", " Date: Mon, 22 Apr 2024 18:58:44 +0200 Subject: [PATCH 0488/1078] Bump codecov/codecov-action from 3 to 4 (#1647) * Bump codecov/codecov-action from 3 to 4 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v3...v4) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Set codecov env --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Josh Moore --- .github/workflows/python-package.yml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index a37fa3c63a..8ff6e9a2eb 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -80,11 +80,8 @@ jobs: mkdir ~/blob_emulator azurite -l ~/blob_emulator --debug debug.log 2>&1 > stdouterr.log & pytest --cov=zarr --cov-config=pyproject.toml --doctest-plus --cov-report xml --cov=./ --timeout=300 - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: - token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos - #files: ./coverage1.xml,./coverage2.xml # optional - #flags: unittests # optional - #name: codecov-umbrella # optional - #fail_ci_if_error: true # optional (default = false) verbose: true # optional (default = false) From f046322478c5a08d77f0bf0fde14fe0e447f83d4 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Mon, 22 Apr 2024 19:39:36 +0100 Subject: [PATCH 0489/1078] Disallow generic Any typing (#1794) Co-authored-by: Davis Bennett --- pyproject.toml | 16 ++++++++++++++++ src/zarr/_storage/store.py | 4 ++-- src/zarr/attrs.py | 2 +- src/zarr/convenience.py | 4 ++-- src/zarr/hierarchy.py | 3 ++- src/zarr/storage.py | 6 +++--- src/zarr/util.py | 9 +++++---- src/zarr/v3/common.py | 10 +++++----- src/zarr/v3/metadata.py | 11 ++++++----- 9 files changed, 42 insertions(+), 23 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e0ca815d95..00c6333aa5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -160,6 +160,7 @@ strict_concatenate = true check_untyped_defs = true disallow_untyped_decorators = true +disallow_any_generics = true [[tool.mypy.overrides]] module = [ @@ -176,6 +177,21 @@ module = [ ] check_untyped_defs = false +[[tool.mypy.overrides]] +module = [ + "zarr.v3.abc.codec", + "zarr.v3.codecs.bytes", + "zarr.v3.codecs.pipeline", + "zarr.v3.codecs.sharding", + "zarr.v3.codecs.transpose", + "zarr.v3.array_v2", + "zarr.v3.array", + "zarr.v3.sync", + "zarr.convenience", + "zarr.meta", +] +disallow_any_generics = false + [tool.pytest.ini_options] doctest_optionflags = [ diff --git a/src/zarr/_storage/store.py b/src/zarr/_storage/store.py index 911af20fda..6e13b08cc7 100644 --- a/src/zarr/_storage/store.py +++ b/src/zarr/_storage/store.py @@ -14,7 +14,7 @@ DEFAULT_ZARR_VERSION = 2 -class BaseStore(MutableMapping): +class BaseStore(MutableMapping[str, Any]): """Abstract base class for store implementations. This is a thin wrapper over MutableMapping that provides methods to check @@ -165,7 +165,7 @@ def rmdir(self, path: str = "") -> None: # allow MutableMapping for backwards compatibility -StoreLike = Union[BaseStore, MutableMapping] +StoreLike = Union[BaseStore, MutableMapping[str, Any]] def _path_to_prefix(path: Optional[str]) -> str: diff --git a/src/zarr/attrs.py b/src/zarr/attrs.py index 65f0423ecd..89cfefc22e 100644 --- a/src/zarr/attrs.py +++ b/src/zarr/attrs.py @@ -6,7 +6,7 @@ from zarr.util import json_dumps -class Attributes(MutableMapping): +class Attributes(MutableMapping[str, Any]): """Class providing access to user attributes on an array or group. Should not be instantiated directly, will be available via the `.attrs` property of an array or group. diff --git a/src/zarr/convenience.py b/src/zarr/convenience.py index 615a019dc3..b357c26c55 100644 --- a/src/zarr/convenience.py +++ b/src/zarr/convenience.py @@ -20,9 +20,9 @@ ) from zarr.util import TreeViewer, buffer_size, normalize_storage_path -from typing import Union +from typing import Any, Union -StoreLike = Union[BaseStore, MutableMapping, str, None] +StoreLike = Union[BaseStore, MutableMapping[str, Any], str, None] _builtin_open = open # builtin open is later shadowed by a local open function diff --git a/src/zarr/hierarchy.py b/src/zarr/hierarchy.py index e30d2d7996..9044c1681e 100644 --- a/src/zarr/hierarchy.py +++ b/src/zarr/hierarchy.py @@ -1,5 +1,6 @@ from collections.abc import MutableMapping from itertools import islice +from typing import Any import numpy as np @@ -48,7 +49,7 @@ ) -class Group(MutableMapping): +class Group(MutableMapping[str, Any]): """Instantiate a group from an initialized store. Parameters diff --git a/src/zarr/storage.py b/src/zarr/storage.py index a7bd22a6b9..b98cee99dd 100644 --- a/src/zarr/storage.py +++ b/src/zarr/storage.py @@ -99,7 +99,7 @@ Path = Union[str, bytes, None] # allow MutableMapping for backwards compatibility -StoreLike = Union[BaseStore, MutableMapping] +StoreLike = Union[BaseStore, MutableMapping[str, Any]] def contains_array(store: StoreLike, path: Path = None) -> bool: @@ -202,7 +202,7 @@ def listdir(store: BaseStore, path: Path = None): def _getsize(store: BaseStore, path: Path = None) -> int: # compute from size of values - if path and path in store: + if isinstance(path, str) and path in store: v = store[path] size = buffer_size(v) else: @@ -584,7 +584,7 @@ def _init_group_metadata( store[key] = encode_group_metadata(meta) -def _dict_store_keys(d: Dict, prefix="", cls=dict): +def _dict_store_keys(d: dict[str, Any], prefix="", cls=dict): for k in d.keys(): v = d[k] if isinstance(v, cls): diff --git a/src/zarr/util.py b/src/zarr/util.py index 35ecc64bba..0588e1a558 100644 --- a/src/zarr/util.py +++ b/src/zarr/util.py @@ -20,6 +20,7 @@ ) import numpy as np +import numpy.typing as npt from asciitree import BoxStyle, LeftAligned from asciitree.traversal import Traversal from numcodecs.compat import ( @@ -36,7 +37,7 @@ ValueType = TypeVar("ValueType") -def flatten(arg: Iterable) -> Iterable: +def flatten(arg: Iterable[Any]) -> Iterable[Any]: for element in arg: if isinstance(element, Iterable) and not isinstance(element, (str, bytes)): yield from flatten(element) @@ -179,7 +180,7 @@ def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tupl return chunks -def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype, Any]: +def normalize_dtype(dtype: Union[str, npt.DTypeLike], object_codec) -> Tuple[np.dtype[Any], Any]: # convenience API for object arrays if inspect.isclass(dtype): dtype = dtype.__name__ @@ -291,7 +292,7 @@ def normalize_dimension_separator(sep: Optional[str]) -> Optional[str]: raise ValueError("dimension_separator must be either '.' or '/', found: %r" % sep) -def normalize_fill_value(fill_value, dtype: np.dtype): +def normalize_fill_value(fill_value, dtype: np.dtype[Any]): if fill_value is None or dtype.hasobject: # no fill value pass @@ -668,7 +669,7 @@ def read_full(self): def retry_call( - callabl: Callable, + callabl: Callable[..., Any], args=None, kwargs=None, exceptions: Tuple[Any, ...] = (), diff --git a/src/zarr/v3/common.py b/src/zarr/v3/common.py index 1caf83a764..9f0a5fdb63 100644 --- a/src/zarr/v3/common.py +++ b/src/zarr/v3/common.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Union, Tuple, Iterable, Dict, List, TypeVar, overload +from typing import TYPE_CHECKING, Union, Tuple, Iterable, Dict, List, TypeVar, overload, Any import asyncio import contextvars from dataclasses import dataclass @@ -28,7 +28,7 @@ def product(tup: ChunkCoords) -> int: return functools.reduce(lambda x, y: x * y, tup, 1) -T = TypeVar("T", bound=Tuple) +T = TypeVar("T", bound=Tuple[Any, ...]) V = TypeVar("V") @@ -76,7 +76,7 @@ def parse_enum(data: JSON, cls: Type[E]) -> E: @dataclass(frozen=True) class ArraySpec: shape: ChunkCoords - dtype: np.dtype + dtype: np.dtype[Any] fill_value: Any def __init__(self, shape, dtype, fill_value): @@ -102,7 +102,7 @@ def parse_name(data: JSON, expected: Optional[str] = None) -> str: raise TypeError(f"Expected a string, got an instance of {type(data)}.") -def parse_configuration(data: JSON) -> dict: +def parse_configuration(data: JSON) -> JSON: if not isinstance(data, dict): raise TypeError(f"Expected dict, got {type(data)}") return data @@ -153,7 +153,7 @@ def parse_shapelike(data: Any) -> Tuple[int, ...]: return data_tuple -def parse_dtype(data: Any) -> np.dtype: +def parse_dtype(data: Any) -> np.dtype[Any]: # todo: real validation return np.dtype(data) diff --git a/src/zarr/v3/metadata.py b/src/zarr/v3/metadata.py index a5e8927311..573b8484f0 100644 --- a/src/zarr/v3/metadata.py +++ b/src/zarr/v3/metadata.py @@ -4,6 +4,7 @@ from dataclasses import dataclass, field import json import numpy as np +import numpy.typing as npt from zarr.v3.chunk_grids import ChunkGrid, RegularChunkGrid from zarr.v3.chunk_key_encodings import ChunkKeyEncoding, parse_separator @@ -90,7 +91,7 @@ def to_numpy_shortname(self) -> str: return data_type_to_numpy[self] @classmethod - def from_dtype(cls, dtype: np.dtype) -> DataType: + def from_dtype(cls, dtype: np.dtype[Any]) -> DataType: dtype_to_data_type = { "|b1": "bool", "bool": "bool", @@ -111,7 +112,7 @@ def from_dtype(cls, dtype: np.dtype) -> DataType: @dataclass(frozen=True) class ArrayMetadata(Metadata): shape: ChunkCoords - data_type: np.dtype + data_type: np.dtype[Any] chunk_grid: ChunkGrid chunk_key_encoding: ChunkKeyEncoding fill_value: Any @@ -176,7 +177,7 @@ def _validate_metadata(self) -> None: self.codecs.validate(self) @property - def dtype(self) -> np.dtype: + def dtype(self) -> np.dtype[Any]: return self.data_type @property @@ -238,7 +239,7 @@ def to_dict(self) -> Dict[str, Any]: class ArrayV2Metadata(Metadata): shape: ChunkCoords chunks: ChunkCoords - dtype: np.dtype + dtype: np.dtype[Any] fill_value: Union[None, int, float] = 0 order: Literal["C", "F"] = "C" filters: Optional[List[Dict[str, Any]]] = None @@ -251,7 +252,7 @@ def __init__( self, *, shape: ChunkCoords, - dtype: np.dtype, + dtype: npt.DTypeLike, chunks: ChunkCoords, fill_value: Any, order: Literal["C", "F"], From b98f6941912da99dcd8350c49a5306e74af05e14 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Mon, 22 Apr 2024 21:26:31 +0200 Subject: [PATCH 0490/1078] Update release.rst for v2.17.2 (#1778) * Update release.rst for v2.17.2 * Minor edits --------- Co-authored-by: Joe Hamman --- docs/release.rst | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 75193bc3e3..3b5ab631df 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -37,15 +37,16 @@ Maintenance Enhancements ~~~~~~~~~~~~ + * [v3] Dramatically reduce number of ``__contains__`` requests in favor of optimistically calling `__getitem__` and handling any error that may arise. - By :user:`Deepak Cherian `. + By :user:`Deepak Cherian ` :issue:`1741`. * [v3] Reuse the downloaded array metadata when creating an ``Array``. - By :user:`Deepak Cherian `. + By :user:`Deepak Cherian ` :issue:`1734`. * Optimize ``Array.info`` so that it calls `getsize` only once. - By :user:`Deepak Cherian `. + By :user:`Deepak Cherian ` :issue:`1733`. * Override IPython ``_repr_*_`` methods to avoid expensive lookups against object stores. By :user:`Deepak Cherian ` :issue:`1716`. @@ -53,6 +54,21 @@ Enhancements * FSStore now raises rather than return bad data. By :user:`Martin Durant ` and :user:`Ian Carroll ` :issue:`1604`. +* Avoid redundant ``__contains__``. + By :user:`Deepak Cherian ` :issue:`1739`. + +Docs +~~~~ + +* Fix link to GCSMap in ``tutorial.rst``. + By :user:`Daniel Jahn ` :issue:`1689`. + +* Endorse `SPEC0000 `_ and state version support policy in ``installation.rst``. + By :user:`Sanket Verma ` :issue:`1665`. + +* Migrate v1 and v2 specification to `Zarr-Specs `_. + By :user:`Sanket Verma ` :issue:`1582`. + Maintenance ~~~~~~~~~~~ @@ -61,7 +77,12 @@ Maintenance * Bump minimum supported NumPy version to 1.23 (per spec 0000) By :user:`Joe Hamman ` :issue:`1719`. - + +* Minor fixes: Using ``is`` instead of ``type`` and removing unnecessary ``None``. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1737`. + +* Fix tests failure related to Pytest 8. + By :user:`David Stansby ` :issue:`1714`. .. _release_2.17.1: From dd3dd96c7a5b7891bfa6be6ff9e989b49d00b0f5 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 22 Apr 2024 12:45:07 -0700 Subject: [PATCH 0491/1078] Deprecate the experimental v3 implementation (#1802) * deprecate(exp-v3): Add a future warning about the pending removal of the experimental v3 implementation * ignore warning * add test --- pyproject.toml | 1 + zarr/_storage/store.py | 13 +++++++++++++ zarr/tests/test_storage_v3.py | 20 +++++++++++++++++++- 3 files changed, 33 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0be79f990e..904c974424 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -137,6 +137,7 @@ filterwarnings = [ "error:::zarr.*", "ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning", "ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning", + "ignore:The experimental Zarr V3 implementation in this version .*:FutureWarning", ] diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 0a08080548..69986ecadd 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -1,5 +1,6 @@ import abc import os +import warnings from collections import defaultdict from collections.abc import MutableMapping from copy import copy @@ -23,9 +24,21 @@ DEFAULT_ZARR_VERSION: ZARR_VERSION = 2 v3_api_available = os.environ.get("ZARR_V3_EXPERIMENTAL_API", "0").lower() not in ["0", "false"] +_has_warned_about_v3 = False # to avoid printing the warning multiple times def assert_zarr_v3_api_available(): + # we issue a warning about the experimental v3 implementation when it is first used + global _has_warned_about_v3 + if v3_api_available and not _has_warned_about_v3: + warnings.warn( + "The experimental Zarr V3 implementation in this version of Zarr-Python is not " + "in alignment with the final V3 specification. This version will be removed in " + "Zarr-Python 3 in favor of a spec compliant version.", + FutureWarning, + stacklevel=1, + ) + _has_warned_about_v3 = True if not v3_api_available: raise NotImplementedError( "# V3 reading and writing is experimental! To enable support, set:\n" diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index e15b2db743..c096f9cb02 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -4,12 +4,18 @@ import inspect import os import tempfile +import warnings import numpy as np import pytest import zarr -from zarr._storage.store import _get_hierarchy_metadata, v3_api_available, StorageTransformer +from zarr._storage.store import ( + _get_hierarchy_metadata, + assert_zarr_v3_api_available, + v3_api_available, + StorageTransformer, +) from zarr._storage.v3_storage_transformers import ShardingStorageTransformer, v3_sharding_available from zarr.core import Array from zarr.meta import _default_entry_point_metadata_v3 @@ -668,6 +674,18 @@ def test_top_level_imports(): assert not hasattr(zarr, store_name) # pragma: no cover +def test_assert_zarr_v3_api_available_warns_once(): + import zarr._storage.store + + zarr._storage.store._has_warned_about_v3 = False + warnings.resetwarnings() + with pytest.warns() as record: + assert_zarr_v3_api_available() + assert_zarr_v3_api_available() + assert len(record) == 1 + assert "The experimental Zarr V3 implementation" in str(record[0].message) + + def _get_public_and_dunder_methods(some_class): return set( name From 0a29fb3e40e57dd8de62f786a18c807982d5a2da Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 20:26:11 +0000 Subject: [PATCH 0492/1078] chore: update pre-commit hooks (#1779) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.5 → v0.3.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.5...v0.3.7) - [github.com/psf/black: 24.3.0 → 24.4.0](https://github.com/psf/black/compare/24.3.0...24.4.0) - [github.com/pre-commit/pre-commit-hooks: v4.5.0 → v4.6.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.5.0...v4.6.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 24ff72a12f..0aa13b31a3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,11 +8,11 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.3.5' + rev: 'v0.4.1' hooks: - id: ruff - repo: https://github.com/psf/black - rev: 24.3.0 + rev: 24.4.0 hooks: - id: black - repo: https://github.com/codespell-project/codespell @@ -20,7 +20,7 @@ repos: hooks: - id: codespell - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy From 9d046ea0d2878af7d15b3de3ec3036fe31661340 Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Mon, 22 Apr 2024 14:34:17 -0600 Subject: [PATCH 0493/1078] Fix `is_total_slice` for size-1 dimensions (#1800) Closes #1730 Co-authored-by: Ryan Abernathey Co-authored-by: Joe Hamman --- docs/release.rst | 2 ++ zarr/tests/test_util.py | 9 +++++++++ zarr/util.py | 13 +++++++++++-- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 3b5ab631df..07c2a47e7c 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -20,6 +20,8 @@ Unreleased Enhancements ~~~~~~~~~~~~ +* Performance improvement for reading and writing chunks if any of the dimensions is size 1. :issue:`1730` + By :user:`Deepak Cherian `. Docs diff --git a/zarr/tests/test_util.py b/zarr/tests/test_util.py index 1f7efc9214..d908c7b2d7 100644 --- a/zarr/tests/test_util.py +++ b/zarr/tests/test_util.py @@ -89,6 +89,15 @@ def test_is_total_slice(): assert not is_total_slice((slice(0, 50), slice(0, 50)), (100, 100)) assert not is_total_slice((slice(0, 100, 2), slice(0, 100)), (100, 100)) + # size-1 dimension edge-case + # https://github.com/zarr-developers/zarr-python/issues/1730 + assert is_total_slice((slice(0, 1),), (1,)) + # this is an equivalent selection (without a slice) + assert is_total_slice((0,), (1,)) + # same for multidimensional selection + assert is_total_slice((slice(0, 1), slice(0, 10)), (1, 10)) + assert is_total_slice((0, slice(0, 10)), (1, 10)) + with pytest.raises(TypeError): is_total_slice("foo", (100,)) diff --git a/zarr/util.py b/zarr/util.py index 848f1ed114..e58aed80ab 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -234,8 +234,17 @@ def is_total_slice(item, shape: Tuple[int]) -> bool: if isinstance(item, tuple): return all( ( - isinstance(it, slice) - and ((it == slice(None)) or ((it.stop - it.start == sh) and (it.step in [1, None]))) + ( + isinstance(it, slice) + and ( + (it == slice(None)) + or ((it.stop - it.start == sh) and (it.step in [1, None])) + ) + ) + # The only scalar edge case, indexing with int 0 along a size-1 dimension + # is identical to a total slice + # https://github.com/zarr-developers/zarr-python/issues/1730 + or (isinstance(it, int) and it == 0 and sh == 1) ) for it, sh in zip(item, shape) ) From f7993db7883fc0c132e1c5cdd6fe2cd9a0a6c9d9 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 22 Apr 2024 17:09:44 -0700 Subject: [PATCH 0494/1078] V3 update pre commit (#1808) --- .pre-commit-config.yaml | 6 +++--- src/zarr/convenience.py | 1 + src/zarr/n5.py | 4 ++-- src/zarr/storage.py | 1 + src/zarr/util.py | 2 +- src/zarr/v3/common.py | 6 ++---- tests/v3/test_common.py | 15 +++++---------- tests/v3/test_metadata.py | 12 ++++-------- 8 files changed, 19 insertions(+), 28 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d4aee4ce86..3fec787b74 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: 'v0.2.1' + rev: 'v0.4.1' hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -18,11 +18,11 @@ repos: - id: codespell args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo,zar", "-S", "fixture"] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.8.0 + rev: v1.9.0 hooks: - id: mypy files: src diff --git a/src/zarr/convenience.py b/src/zarr/convenience.py index b357c26c55..e4bbade527 100644 --- a/src/zarr/convenience.py +++ b/src/zarr/convenience.py @@ -1,4 +1,5 @@ """Convenience functions for storing and loading data.""" + import itertools import os import re diff --git a/src/zarr/n5.py b/src/zarr/n5.py index 79bab20576..1293d1739b 100644 --- a/src/zarr/n5.py +++ b/src/zarr/n5.py @@ -1,5 +1,5 @@ -"""This module contains a storage class and codec to support the N5 format. -""" +"""This module contains a storage class and codec to support the N5 format.""" + import os import struct import sys diff --git a/src/zarr/storage.py b/src/zarr/storage.py index b98cee99dd..ae596756f8 100644 --- a/src/zarr/storage.py +++ b/src/zarr/storage.py @@ -14,6 +14,7 @@ path) and a `getsize` method (return the size in bytes of a given value). """ + import atexit import errno import glob diff --git a/src/zarr/util.py b/src/zarr/util.py index 0588e1a558..8751b39cdc 100644 --- a/src/zarr/util.py +++ b/src/zarr/util.py @@ -518,7 +518,7 @@ def __init__(self, group, expand=False, level=None): UP_AND_RIGHT="\u2514", HORIZONTAL="\u2500", VERTICAL="\u2502", - VERTICAL_AND_RIGHT="\u251C", + VERTICAL_AND_RIGHT="\u251c", ) def __bytes__(self): diff --git a/src/zarr/v3/common.py b/src/zarr/v3/common.py index 9f0a5fdb63..6940ec3fe3 100644 --- a/src/zarr/v3/common.py +++ b/src/zarr/v3/common.py @@ -111,15 +111,13 @@ def parse_configuration(data: JSON) -> JSON: @overload def parse_named_configuration( data: JSON, expected_name: Optional[str] = None -) -> Tuple[str, Dict[str, JSON]]: - ... +) -> Tuple[str, Dict[str, JSON]]: ... @overload def parse_named_configuration( data: JSON, expected_name: Optional[str] = None, *, require_configuration: bool = True -) -> Tuple[str, Optional[Dict[str, JSON]]]: - ... +) -> Tuple[str, Optional[Dict[str, JSON]]]: ... def parse_named_configuration( diff --git a/tests/v3/test_common.py b/tests/v3/test_common.py index 33e91d793f..d9af59a38e 100644 --- a/tests/v3/test_common.py +++ b/tests/v3/test_common.py @@ -17,23 +17,19 @@ def test_product(data: Tuple[int, ...]): # todo: test -def test_concurrent_map(): - ... +def test_concurrent_map(): ... # todo: test -def test_to_thread(): - ... +def test_to_thread(): ... # todo: test -def test_enum_names(): - ... +def test_enum_names(): ... # todo: test -def test_parse_enum(): - ... +def test_parse_enum(): ... @pytest.mark.parametrize("data", [("foo", "bar"), (10, 11)]) @@ -93,5 +89,4 @@ def parse_dtype(data: Tuple[str, np.dtype]): # todo: figure out what it means to test this -def test_parse_fill_value(): - ... +def test_parse_fill_value(): ... diff --git a/tests/v3/test_metadata.py b/tests/v3/test_metadata.py index e477842259..6dacf2c700 100644 --- a/tests/v3/test_metadata.py +++ b/tests/v3/test_metadata.py @@ -9,20 +9,17 @@ # todo: test -def test_datatype_enum(): - ... +def test_datatype_enum(): ... # todo: test # this will almost certainly be a collection of tests -def test_array_metadata_v3(): - ... +def test_array_metadata_v3(): ... # todo: test # this will almost certainly be a collection of tests -def test_array_metadata_v2(): - ... +def test_array_metadata_v2(): ... @pytest.mark.parametrize("data", [None, ("a", "b", "c"), ["a", "a", "a"]]) @@ -37,8 +34,7 @@ def parse_dimension_names_invalid(data: Any) -> None: # todo: test -def test_parse_attributes() -> None: - ... +def test_parse_attributes() -> None: ... def test_parse_zarr_format_v3_valid() -> None: From d1a0d99670d32947c33c3300f7338223a0addc15 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 23 Apr 2024 07:47:35 -0700 Subject: [PATCH 0495/1078] V3 reorg (#1809) * move v2 code/tests to v2 directories * move zarr v3 code to root * add extra doc deps * import __version__ in root __init__ --- docs/api.rst | 1 - docs/api/attrs.rst | 6 +- docs/api/codecs.rst | 8 +- docs/api/convenience.rst | 6 +- docs/api/core.rst | 6 +- docs/api/creation.rst | 6 +- docs/api/hierarchy.rst | 6 +- docs/api/n5.rst | 6 +- docs/api/storage.rst | 6 +- docs/api/sync.rst | 6 +- docs/api/v3.rst | 77 -- pyproject.toml | 42 +- src/zarr/__init__.py | 89 +- src/zarr/{_storage => abc}/__init__.py | 0 src/zarr/{v3 => }/abc/codec.py | 12 +- src/zarr/{v3 => }/abc/metadata.py | 2 +- src/zarr/{v3 => }/abc/store.py | 0 src/zarr/{v3 => }/array.py | 24 +- src/zarr/{v3 => }/array_v2.py | 28 +- src/zarr/{v3 => }/attributes.py | 4 +- src/zarr/{v3 => }/chunk_grids.py | 4 +- src/zarr/{v3 => }/chunk_key_encodings.py | 4 +- src/zarr/codecs/__init__.py | 9 + src/zarr/{v3 => }/codecs/blosc.py | 10 +- src/zarr/{v3 => }/codecs/bytes.py | 10 +- src/zarr/{v3 => }/codecs/crc32c_.py | 10 +- src/zarr/{v3 => }/codecs/gzip.py | 10 +- src/zarr/{v3 => }/codecs/pipeline.py | 18 +- src/zarr/{v3 => }/codecs/registry.py | 2 +- src/zarr/{v3 => }/codecs/sharding.py | 24 +- src/zarr/{v3 => }/codecs/transpose.py | 10 +- src/zarr/{v3 => }/codecs/zstd.py | 10 +- src/zarr/{v3 => }/common.py | 0 src/zarr/{v3 => }/config.py | 0 src/zarr/{v3 => }/group.py | 16 +- src/zarr/indexing.py | 1086 ++---------------- src/zarr/{v3 => }/metadata.py | 16 +- src/zarr/store/__init__.py | 5 + src/zarr/{v3 => }/store/core.py | 6 +- src/zarr/{v3 => }/store/local.py | 4 +- src/zarr/{v3 => }/store/memory.py | 4 +- src/zarr/{v3 => }/store/remote.py | 6 +- src/zarr/sync.py | 138 ++- src/zarr/v2/__init__.py | 54 + src/zarr/{v3/abc => v2/_storage}/__init__.py | 0 src/zarr/{ => v2}/_storage/absstore.py | 4 +- src/zarr/{ => v2}/_storage/store.py | 10 +- src/zarr/{ => v2}/attrs.py | 4 +- src/zarr/{ => v2}/codecs.py | 0 src/zarr/{ => v2}/context.py | 0 src/zarr/{ => v2}/convenience.py | 142 +-- src/zarr/{ => v2}/core.py | 94 +- src/zarr/{ => v2}/creation.py | 62 +- src/zarr/{ => v2}/errors.py | 0 src/zarr/{ => v2}/hierarchy.py | 138 +-- src/zarr/v2/indexing.py | 1080 +++++++++++++++++ src/zarr/{ => v2}/meta.py | 4 +- src/zarr/{ => v2}/meta_v1.py | 2 +- src/zarr/{ => v2}/n5.py | 12 +- src/zarr/{ => v2}/storage.py | 114 +- src/zarr/v2/sync.py | 48 + src/zarr/{ => v2}/util.py | 0 src/zarr/v3/__init__.py | 41 - src/zarr/v3/codecs/__init__.py | 9 - src/zarr/v3/indexing.py | 208 ---- src/zarr/v3/store/__init__.py | 5 - src/zarr/v3/sync.py | 120 -- tests/{ => v2}/__init__.py | 0 tests/{ => v2}/conftest.py | 5 + tests/{ => v2}/data/store.zip | Bin tests/{ => v2}/data/store/foo | 0 tests/{ => v2}/test_attrs.py | 11 +- tests/{ => v2}/test_convenience.py | 12 +- tests/{ => v2}/test_core.py | 20 +- tests/{ => v2}/test_creation.py | 14 +- tests/{ => v2}/test_dim_separator.py | 18 +- tests/{ => v2}/test_filters.py | 2 +- tests/{ => v2}/test_hierarchy.py | 12 +- tests/{ => v2}/test_indexing.py | 94 +- tests/{ => v2}/test_info.py | 6 +- tests/{ => v2}/test_meta.py | 8 +- tests/{ => v2}/test_meta_array.py | 12 +- tests/{ => v2}/test_n5.py | 6 +- tests/{ => v2}/test_storage.py | 6 +- tests/{ => v2}/test_storage_v3.py | 0 tests/{ => v2}/test_sync.py | 10 +- tests/{ => v2}/test_util.py | 4 +- tests/{ => v2}/util.py | 4 +- tests/v3/test_codecs.py | 30 +- tests/v3/test_common.py | 6 +- tests/v3/test_group.py | 6 +- tests/v3/test_metadata.py | 2 +- 92 files changed, 2055 insertions(+), 2121 deletions(-) delete mode 100644 docs/api/v3.rst rename src/zarr/{_storage => abc}/__init__.py (100%) rename src/zarr/{v3 => }/abc/codec.py (90%) rename src/zarr/{v3 => }/abc/metadata.py (97%) rename src/zarr/{v3 => }/abc/store.py (100%) rename src/zarr/{v3 => }/array.py (97%) rename src/zarr/{v3 => }/array_v2.py (96%) rename src/zarr/{v3 => }/attributes.py (92%) rename src/zarr/{v3 => }/chunk_grids.py (94%) rename src/zarr/{v3 => }/chunk_key_encodings.py (97%) create mode 100644 src/zarr/codecs/__init__.py rename src/zarr/{v3 => }/codecs/blosc.py (95%) rename src/zarr/{v3 => }/codecs/bytes.py (92%) rename src/zarr/{v3 => }/codecs/crc32c_.py (86%) rename src/zarr/{v3 => }/codecs/gzip.py (87%) rename src/zarr/{v3 => }/codecs/pipeline.py (95%) rename src/zarr/{v3 => }/codecs/registry.py (96%) rename src/zarr/{v3 => }/codecs/sharding.py (97%) rename src/zarr/{v3 => }/codecs/transpose.py (92%) rename src/zarr/{v3 => }/codecs/zstd.py (89%) rename src/zarr/{v3 => }/common.py (100%) rename src/zarr/{v3 => }/config.py (100%) rename src/zarr/{v3 => }/group.py (97%) rename src/zarr/{v3 => }/metadata.py (96%) create mode 100644 src/zarr/store/__init__.py rename src/zarr/{v3 => }/store/core.py (94%) rename src/zarr/{v3 => }/store/local.py (98%) rename src/zarr/{v3 => }/store/memory.py (97%) rename src/zarr/{v3 => }/store/remote.py (96%) create mode 100644 src/zarr/v2/__init__.py rename src/zarr/{v3/abc => v2/_storage}/__init__.py (100%) rename src/zarr/{ => v2}/_storage/absstore.py (98%) rename src/zarr/{ => v2}/_storage/store.py (96%) rename src/zarr/{ => v2}/attrs.py (98%) rename src/zarr/{ => v2}/codecs.py (100%) rename src/zarr/{ => v2}/context.py (100%) rename src/zarr/{ => v2}/convenience.py (91%) rename src/zarr/{ => v2}/core.py (97%) rename src/zarr/{ => v2}/creation.py (92%) rename src/zarr/{ => v2}/errors.py (100%) rename src/zarr/{ => v2}/hierarchy.py (93%) create mode 100644 src/zarr/v2/indexing.py rename src/zarr/{ => v2}/meta.py (99%) rename src/zarr/{ => v2}/meta_v1.py (97%) rename src/zarr/{ => v2}/n5.py (98%) rename src/zarr/{ => v2}/storage.py (96%) create mode 100644 src/zarr/v2/sync.py rename src/zarr/{ => v2}/util.py (100%) delete mode 100644 src/zarr/v3/__init__.py delete mode 100644 src/zarr/v3/codecs/__init__.py delete mode 100644 src/zarr/v3/indexing.py delete mode 100644 src/zarr/v3/store/__init__.py delete mode 100644 src/zarr/v3/sync.py rename tests/{ => v2}/__init__.py (100%) rename tests/{ => v2}/conftest.py (60%) rename tests/{ => v2}/data/store.zip (100%) rename tests/{ => v2}/data/store/foo (100%) rename tests/{ => v2}/test_attrs.py (97%) rename tests/{ => v2}/test_convenience.py (99%) rename tests/{ => v2}/test_core.py (99%) rename tests/{ => v2}/test_creation.py (98%) rename tests/{ => v2}/test_dim_separator.py (87%) rename tests/{ => v2}/test_filters.py (99%) rename tests/{ => v2}/test_hierarchy.py (99%) rename tests/{ => v2}/test_indexing.py (95%) rename tests/{ => v2}/test_info.py (91%) rename tests/{ => v2}/test_meta.py (98%) rename tests/{ => v2}/test_meta_array.py (95%) rename tests/{ => v2}/test_n5.py (93%) rename tests/{ => v2}/test_storage.py (99%) rename tests/{ => v2}/test_storage_v3.py (100%) rename tests/{ => v2}/test_sync.py (97%) rename tests/{ => v2}/test_util.py (99%) rename tests/{ => v2}/util.py (97%) diff --git a/docs/api.rst b/docs/api.rst index e200dd908d..2b6e7ea516 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -13,7 +13,6 @@ API reference api/codecs api/attrs api/sync - api/v3 Indices and tables ------------------ diff --git a/docs/api/attrs.rst b/docs/api/attrs.rst index f95e63af3a..067b45fac6 100644 --- a/docs/api/attrs.rst +++ b/docs/api/attrs.rst @@ -1,6 +1,6 @@ -The Attributes class (``zarr.attrs``) -===================================== -.. module:: zarr.attrs +The Attributes class (``zarr.v2.attrs``) +======================================== +.. module:: zarr.v2.attrs .. autoclass:: Attributes diff --git a/docs/api/codecs.rst b/docs/api/codecs.rst index b50f747d74..454c5ccd20 100644 --- a/docs/api/codecs.rst +++ b/docs/api/codecs.rst @@ -1,5 +1,5 @@ -Compressors and filters (``zarr.codecs``) -========================================= +Compressors and filters (``zarr.v2.codecs``) +============================================ .. module:: zarr.codecs This module contains compressor and filter classes for use with Zarr. Please note that this module @@ -8,9 +8,9 @@ onwards, all codec classes have been moved to a separate package called Numcodec packages (Zarr and Numcodecs_) are designed to be used together. For example, a Numcodecs_ codec class can be used as a compressor for a Zarr array:: - >>> import zarr + >>> import zarr.v2 >>> from numcodecs import Blosc - >>> z = zarr.zeros(1000000, compressor=Blosc(cname='zstd', clevel=1, shuffle=Blosc.SHUFFLE)) + >>> z = zarr.v2.zeros(1000000, compressor=Blosc(cname='zstd', clevel=1, shuffle=Blosc.SHUFFLE)) Codec classes can also be used as filters. See the tutorial section on :ref:`tutorial_filters` for more information. diff --git a/docs/api/convenience.rst b/docs/api/convenience.rst index a70a90ce7c..1ff26452fa 100644 --- a/docs/api/convenience.rst +++ b/docs/api/convenience.rst @@ -1,6 +1,6 @@ -Convenience functions (``zarr.convenience``) -============================================ -.. automodule:: zarr.convenience +Convenience functions (``zarr.v2.convenience``) +=============================================== +.. automodule:: zarr.v2.convenience .. autofunction:: open .. autofunction:: save .. autofunction:: load diff --git a/docs/api/core.rst b/docs/api/core.rst index b310460e51..aacd03e2a5 100644 --- a/docs/api/core.rst +++ b/docs/api/core.rst @@ -1,5 +1,5 @@ -The Array class (``zarr.core``) -=============================== +The Array class (``zarr.v2.core``) +================================== -.. automodapi:: zarr.core +.. automodapi:: zarr.v2.core :no-heading: diff --git a/docs/api/creation.rst b/docs/api/creation.rst index 66422c0670..ad0a2ead49 100644 --- a/docs/api/creation.rst +++ b/docs/api/creation.rst @@ -1,6 +1,6 @@ -Array creation (``zarr.creation``) -================================== -.. module:: zarr.creation +Array creation (``zarr.v2.creation``) +===================================== +.. module:: zarr.v2.creation .. autofunction:: create .. autofunction:: empty .. autofunction:: zeros diff --git a/docs/api/hierarchy.rst b/docs/api/hierarchy.rst index 11a5575144..5d9280af1e 100644 --- a/docs/api/hierarchy.rst +++ b/docs/api/hierarchy.rst @@ -1,6 +1,6 @@ -Groups (``zarr.hierarchy``) -=========================== -.. module:: zarr.hierarchy +Groups (``zarr.v2.hierarchy``) +============================== +.. module:: zarr.v2.hierarchy .. autofunction:: group .. autofunction:: open_group diff --git a/docs/api/n5.rst b/docs/api/n5.rst index b6a8d8c61e..22e490bad4 100644 --- a/docs/api/n5.rst +++ b/docs/api/n5.rst @@ -1,5 +1,5 @@ -N5 (``zarr.n5``) -================ -.. automodule:: zarr.n5 +N5 (``zarr.v2.n5``) +=================== +.. automodule:: zarr.v2.n5 .. autoclass:: N5Store diff --git a/docs/api/storage.rst b/docs/api/storage.rst index 4321837449..d0ebd8a429 100644 --- a/docs/api/storage.rst +++ b/docs/api/storage.rst @@ -1,6 +1,6 @@ -Storage (``zarr.storage``) -========================== -.. automodule:: zarr.storage +Storage (``zarr.v2.storage``) +============================= +.. automodule:: zarr.v2.storage .. autoclass:: MemoryStore .. autoclass:: DirectoryStore diff --git a/docs/api/sync.rst b/docs/api/sync.rst index a139805e78..ff961543af 100644 --- a/docs/api/sync.rst +++ b/docs/api/sync.rst @@ -1,6 +1,6 @@ -Synchronization (``zarr.sync``) -=============================== -.. module:: zarr.sync +Synchronization (``zarr.v2.sync``) +================================== +.. module:: zarr.v2.sync .. autoclass:: ThreadSynchronizer .. autoclass:: ProcessSynchronizer diff --git a/docs/api/v3.rst b/docs/api/v3.rst deleted file mode 100644 index 7665b2ddd1..0000000000 --- a/docs/api/v3.rst +++ /dev/null @@ -1,77 +0,0 @@ -V3 Specification Implementation(``zarr._storage.v3``) -===================================================== - -This module contains the implementation of the `Zarr V3 Specification `_. - -.. warning:: - Since Zarr Python 2.12 release, this module provides experimental infrastructure for reading and - writing the upcoming V3 spec of the Zarr format. Users wishing to prepare for the migration can set - the environment variable ``ZARR_V3_EXPERIMENTAL_API=1`` to begin experimenting, however data - written with this API should be expected to become stale, as the implementation will still change. - -The new ``zarr._store.v3`` package has the necessary classes and functions for evaluating Zarr V3. -Since the design is not finalised, the classes and functions are not automatically imported into -the regular Zarr namespace. - -Code snippet for creating Zarr V3 arrays:: - - >>> import zarr - >>> z = zarr.create((10000, 10000), - >>> chunks=(100, 100), - >>> dtype='f8', - >>> compressor='default', - >>> path='path-where-you-want-zarr-v3-array', - >>> zarr_version=3) - -Further, you can use `z.info` to see details about the array you just created:: - - >>> z.info - Name : path-where-you-want-zarr-v3-array - Type : zarr.core.Array - Data type : float64 - Shape : (10000, 10000) - Chunk shape : (100, 100) - Order : C - Read-only : False - Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0) - Store type : zarr._storage.v3.KVStoreV3 - No. bytes : 800000000 (762.9M) - No. bytes stored : 557 - Storage ratio : 1436265.7 - Chunks initialized : 0/10000 - -You can also check ``Store type`` here (which indicates Zarr V3). - -.. module:: zarr._storage.v3 - -.. autoclass:: RmdirV3 -.. autoclass:: KVStoreV3 -.. autoclass:: FSStoreV3 -.. autoclass:: MemoryStoreV3 -.. autoclass:: DirectoryStoreV3 -.. autoclass:: ZipStoreV3 -.. autoclass:: RedisStoreV3 -.. autoclass:: MongoDBStoreV3 -.. autoclass:: DBMStoreV3 -.. autoclass:: LMDBStoreV3 -.. autoclass:: SQLiteStoreV3 -.. autoclass:: LRUStoreCacheV3 -.. autoclass:: ConsolidatedMetadataStoreV3 - -In v3 `storage transformers `_ -can be set via ``zarr.create(…, storage_transformers=[…])``. -The experimental sharding storage transformer can be tested by setting -the environment variable ``ZARR_V3_SHARDING=1``. Data written with this flag -enabled should be expected to become stale until -`ZEP 2 `_ is approved -and fully implemented. - -.. module:: zarr._storage.v3_storage_transformers - -.. autoclass:: ShardingStorageTransformer - -The abstract base class for storage transformers is - -.. module:: zarr._storage.store - -.. autoclass:: StorageTransformer diff --git a/pyproject.toml b/pyproject.toml index 00c6333aa5..3dcda98980 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,10 @@ docs = [ 'pydata-sphinx-theme', 'numpydoc', 'numcodecs[msgpack]', + "msgpack", + "lmdb", + "zstandard", + "crc32c", ] extra = [ 'attrs', @@ -164,31 +168,31 @@ disallow_any_generics = true [[tool.mypy.overrides]] module = [ - "zarr._storage.store", - "zarr._storage.v3_storage_transformers", - "zarr.v3.group", - "zarr.core", - "zarr.hierarchy", - "zarr.indexing", - "zarr.storage", - "zarr.sync", - "zarr.util", + "zarr.v2._storage.store", + "zarr.v2._storage.v3_storage_transformers", + "zarr.group", + "zarr.v2.core", + "zarr.v2.hierarchy", + "zarr.v2.indexing", + "zarr.v2.storage", + "zarr.v2.sync", + "zarr.v2.util", "tests.*", ] check_untyped_defs = false [[tool.mypy.overrides]] module = [ - "zarr.v3.abc.codec", - "zarr.v3.codecs.bytes", - "zarr.v3.codecs.pipeline", - "zarr.v3.codecs.sharding", - "zarr.v3.codecs.transpose", - "zarr.v3.array_v2", - "zarr.v3.array", - "zarr.v3.sync", - "zarr.convenience", - "zarr.meta", + "zarr.abc.codec", + "zarr.codecs.bytes", + "zarr.codecs.pipeline", + "zarr.codecs.sharding", + "zarr.codecs.transpose", + "zarr.array_v2", + "zarr.array", + "zarr.sync", + "zarr.v2.convenience", + "zarr.v2.meta", ] disallow_any_generics = false diff --git a/src/zarr/__init__.py b/src/zarr/__init__.py index 601b1295ab..9ae9dc54c4 100644 --- a/src/zarr/__init__.py +++ b/src/zarr/__init__.py @@ -1,54 +1,45 @@ -# flake8: noqa -from zarr.codecs import * -from zarr.convenience import ( - consolidate_metadata, - copy, - copy_all, - copy_store, - load, - open, - open_consolidated, - save, - save_array, - save_group, - tree, -) -from zarr.core import Array -from zarr.creation import ( - array, - create, - empty, - empty_like, - full, - full_like, - ones, - ones_like, - open_array, - open_like, - zeros, - zeros_like, -) -from zarr.errors import CopyError, MetadataError -from zarr.hierarchy import Group, group, open_group -from zarr.n5 import N5Store, N5FSStore -from zarr.storage import ( - ABSStore, - DBMStore, - DictStore, - DirectoryStore, - KVStore, - LMDBStore, - LRUStoreCache, - MemoryStore, - MongoDBStore, - NestedDirectoryStore, - RedisStore, - SQLiteStore, - TempStore, - ZipStore, +from __future__ import annotations + +from typing import Union + +import zarr.codecs # noqa: F401 +from zarr.array import Array, AsyncArray # noqa: F401 +from zarr.array_v2 import ArrayV2 +from zarr.config import RuntimeConfiguration # noqa: F401 +from zarr.group import AsyncGroup, Group # noqa: F401 +from zarr.metadata import runtime_configuration # noqa: F401 +from zarr.store import ( # noqa: F401 + StoreLike, + make_store_path, ) -from zarr.sync import ProcessSynchronizer, ThreadSynchronizer +from zarr.sync import sync as _sync from zarr._version import version as __version__ # in case setuptools scm screw up and find version to be 0.0.0 assert not __version__.startswith("0.0.0") + + +async def open_auto_async( + store: StoreLike, + runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), +) -> Union[AsyncArray, AsyncGroup]: + store_path = make_store_path(store) + try: + return await AsyncArray.open(store_path, runtime_configuration=runtime_configuration_) + except KeyError: + return await AsyncGroup.open(store_path, runtime_configuration=runtime_configuration_) + + +def open_auto( + store: StoreLike, + runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), +) -> Union[Array, ArrayV2, Group]: + object = _sync( + open_auto_async(store, runtime_configuration_), + runtime_configuration_.asyncio_loop, + ) + if isinstance(object, AsyncArray): + return Array(object) + if isinstance(object, AsyncGroup): + return Group(object) + raise TypeError(f"Unexpected object type. Got {type(object)}.") diff --git a/src/zarr/_storage/__init__.py b/src/zarr/abc/__init__.py similarity index 100% rename from src/zarr/_storage/__init__.py rename to src/zarr/abc/__init__.py diff --git a/src/zarr/v3/abc/codec.py b/src/zarr/abc/codec.py similarity index 90% rename from src/zarr/v3/abc/codec.py rename to src/zarr/abc/codec.py index d0e51ff894..1abc21b30b 100644 --- a/src/zarr/v3/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -4,17 +4,17 @@ from typing import TYPE_CHECKING, Optional import numpy as np -from zarr.v3.abc.metadata import Metadata +from zarr.abc.metadata import Metadata -from zarr.v3.common import ArraySpec -from zarr.v3.store import StorePath +from zarr.common import ArraySpec +from zarr.store import StorePath if TYPE_CHECKING: from typing_extensions import Self - from zarr.v3.common import BytesLike, SliceSelection - from zarr.v3.metadata import ArrayMetadata - from zarr.v3.config import RuntimeConfiguration + from zarr.common import BytesLike, SliceSelection + from zarr.metadata import ArrayMetadata + from zarr.config import RuntimeConfiguration class Codec(Metadata): diff --git a/src/zarr/v3/abc/metadata.py b/src/zarr/abc/metadata.py similarity index 97% rename from src/zarr/v3/abc/metadata.py rename to src/zarr/abc/metadata.py index 4fcabf72a1..f27b37cba4 100644 --- a/src/zarr/v3/abc/metadata.py +++ b/src/zarr/abc/metadata.py @@ -7,7 +7,7 @@ from dataclasses import fields, dataclass -from zarr.v3.common import JSON +from zarr.common import JSON @dataclass(frozen=True) diff --git a/src/zarr/v3/abc/store.py b/src/zarr/abc/store.py similarity index 100% rename from src/zarr/v3/abc/store.py rename to src/zarr/abc/store.py diff --git a/src/zarr/v3/array.py b/src/zarr/array.py similarity index 97% rename from src/zarr/v3/array.py rename to src/zarr/array.py index c0a00a624e..b739b310d4 100644 --- a/src/zarr/v3/array.py +++ b/src/zarr/array.py @@ -16,12 +16,12 @@ from typing import Any, Dict, Iterable, Literal, Optional, Tuple, Union import numpy as np -from zarr.v3.abc.codec import Codec +from zarr.abc.codec import Codec -# from zarr.v3.array_v2 import ArrayV2 -from zarr.v3.codecs import BytesCodec -from zarr.v3.common import ( +# from zarr.array_v2 import ArrayV2 +from zarr.codecs import BytesCodec +from zarr.common import ( ZARR_JSON, ArraySpec, ChunkCoords, @@ -29,14 +29,14 @@ SliceSelection, concurrent_map, ) -from zarr.v3.config import RuntimeConfiguration - -from zarr.v3.indexing import BasicIndexer, all_chunk_coords, is_total_slice -from zarr.v3.chunk_grids import RegularChunkGrid -from zarr.v3.chunk_key_encodings import DefaultChunkKeyEncoding, V2ChunkKeyEncoding -from zarr.v3.metadata import ArrayMetadata -from zarr.v3.store import StoreLike, StorePath, make_store_path -from zarr.v3.sync import sync +from zarr.config import RuntimeConfiguration + +from zarr.indexing import BasicIndexer, all_chunk_coords, is_total_slice +from zarr.chunk_grids import RegularChunkGrid +from zarr.chunk_key_encodings import DefaultChunkKeyEncoding, V2ChunkKeyEncoding +from zarr.metadata import ArrayMetadata +from zarr.store import StoreLike, StorePath, make_store_path +from zarr.sync import sync def parse_array_metadata(data: Any): diff --git a/src/zarr/v3/array_v2.py b/src/zarr/array_v2.py similarity index 96% rename from src/zarr/v3/array_v2.py rename to src/zarr/array_v2.py index f150d2dbd2..8c2cd3faec 100644 --- a/src/zarr/v3/array_v2.py +++ b/src/zarr/array_v2.py @@ -10,7 +10,7 @@ from numcodecs.compat import ensure_bytes, ensure_ndarray -from zarr.v3.common import ( +from zarr.common import ( ZARRAY_JSON, ZATTRS_JSON, BytesLike, @@ -20,14 +20,14 @@ concurrent_map, to_thread, ) -from zarr.v3.config import RuntimeConfiguration -from zarr.v3.indexing import BasicIndexer, all_chunk_coords, is_total_slice -from zarr.v3.metadata import ArrayV2Metadata -from zarr.v3.store import StoreLike, StorePath, make_store_path -from zarr.v3.sync import sync +from zarr.config import RuntimeConfiguration +from zarr.indexing import BasicIndexer, all_chunk_coords, is_total_slice +from zarr.metadata import ArrayV2Metadata +from zarr.store import StoreLike, StorePath, make_store_path +from zarr.sync import sync if TYPE_CHECKING: - from zarr.v3.array import Array + from zarr.array import Array @dataclass(frozen=True) @@ -441,14 +441,14 @@ def resize(self, new_shape: ChunkCoords) -> ArrayV2: async def convert_to_v3_async(self) -> Array: from sys import byteorder as sys_byteorder - from zarr.v3.abc.codec import Codec - from zarr.v3.array import Array - from zarr.v3.common import ZARR_JSON - from zarr.v3.chunk_grids import RegularChunkGrid - from zarr.v3.chunk_key_encodings import V2ChunkKeyEncoding - from zarr.v3.metadata import ArrayMetadata, DataType + from zarr.abc.codec import Codec + from zarr.array import Array + from zarr.common import ZARR_JSON + from zarr.chunk_grids import RegularChunkGrid + from zarr.chunk_key_encodings import V2ChunkKeyEncoding + from zarr.metadata import ArrayMetadata, DataType - from zarr.v3.codecs import ( + from zarr.codecs import ( BloscCodec, BloscShuffle, BytesCodec, diff --git a/src/zarr/v3/attributes.py b/src/zarr/attributes.py similarity index 92% rename from src/zarr/v3/attributes.py rename to src/zarr/attributes.py index edbc84d8aa..8086e18d7b 100644 --- a/src/zarr/v3/attributes.py +++ b/src/zarr/attributes.py @@ -3,8 +3,8 @@ from typing import TYPE_CHECKING, Any, Union if TYPE_CHECKING: - from zarr.v3.group import Group - from zarr.v3.array import Array + from zarr.group import Group + from zarr.array import Array class Attributes(MutableMapping[str, Any]): diff --git a/src/zarr/v3/chunk_grids.py b/src/zarr/chunk_grids.py similarity index 94% rename from src/zarr/v3/chunk_grids.py rename to src/zarr/chunk_grids.py index b0a2a7bb36..73557f6e4b 100644 --- a/src/zarr/v3/chunk_grids.py +++ b/src/zarr/chunk_grids.py @@ -1,9 +1,9 @@ from __future__ import annotations from typing import TYPE_CHECKING, Any, Dict from dataclasses import dataclass -from zarr.v3.abc.metadata import Metadata +from zarr.abc.metadata import Metadata -from zarr.v3.common import ( +from zarr.common import ( JSON, ChunkCoords, ChunkCoordsLike, diff --git a/src/zarr/v3/chunk_key_encodings.py b/src/zarr/chunk_key_encodings.py similarity index 97% rename from src/zarr/v3/chunk_key_encodings.py rename to src/zarr/chunk_key_encodings.py index 9889a2f04a..ebc7654dde 100644 --- a/src/zarr/v3/chunk_key_encodings.py +++ b/src/zarr/chunk_key_encodings.py @@ -2,9 +2,9 @@ from abc import abstractmethod from typing import TYPE_CHECKING, Dict, Literal, cast from dataclasses import dataclass -from zarr.v3.abc.metadata import Metadata +from zarr.abc.metadata import Metadata -from zarr.v3.common import ( +from zarr.common import ( JSON, ChunkCoords, parse_named_configuration, diff --git a/src/zarr/codecs/__init__.py b/src/zarr/codecs/__init__.py new file mode 100644 index 0000000000..8fa0c9f7b0 --- /dev/null +++ b/src/zarr/codecs/__init__.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from zarr.codecs.blosc import BloscCodec, BloscCname, BloscShuffle # noqa: F401 +from zarr.codecs.bytes import BytesCodec, Endian # noqa: F401 +from zarr.codecs.crc32c_ import Crc32cCodec # noqa: F401 +from zarr.codecs.gzip import GzipCodec # noqa: F401 +from zarr.codecs.sharding import ShardingCodec, ShardingCodecIndexLocation # noqa: F401 +from zarr.codecs.transpose import TransposeCodec # noqa: F401 +from zarr.codecs.zstd import ZstdCodec # noqa: F401 diff --git a/src/zarr/v3/codecs/blosc.py b/src/zarr/codecs/blosc.py similarity index 95% rename from src/zarr/v3/codecs/blosc.py rename to src/zarr/codecs/blosc.py index 479865241f..374375e6c2 100644 --- a/src/zarr/v3/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -9,15 +9,15 @@ import numpy as np from numcodecs.blosc import Blosc -from zarr.v3.abc.codec import BytesBytesCodec -from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import parse_enum, parse_named_configuration, to_thread +from zarr.abc.codec import BytesBytesCodec +from zarr.codecs.registry import register_codec +from zarr.common import parse_enum, parse_named_configuration, to_thread if TYPE_CHECKING: from typing import Dict, Optional from typing_extensions import Self - from zarr.v3.common import JSON, ArraySpec, BytesLike - from zarr.v3.config import RuntimeConfiguration + from zarr.common import JSON, ArraySpec, BytesLike + from zarr.config import RuntimeConfiguration class BloscShuffle(Enum): diff --git a/src/zarr/v3/codecs/bytes.py b/src/zarr/codecs/bytes.py similarity index 92% rename from src/zarr/v3/codecs/bytes.py rename to src/zarr/codecs/bytes.py index f92fe5606d..1b872ac6c6 100644 --- a/src/zarr/v3/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -7,13 +7,13 @@ import numpy as np -from zarr.v3.abc.codec import ArrayBytesCodec -from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import parse_enum, parse_named_configuration +from zarr.abc.codec import ArrayBytesCodec +from zarr.codecs.registry import register_codec +from zarr.common import parse_enum, parse_named_configuration if TYPE_CHECKING: - from zarr.v3.common import JSON, ArraySpec, BytesLike - from zarr.v3.config import RuntimeConfiguration + from zarr.common import JSON, ArraySpec, BytesLike + from zarr.config import RuntimeConfiguration from typing_extensions import Self diff --git a/src/zarr/v3/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py similarity index 86% rename from src/zarr/v3/codecs/crc32c_.py rename to src/zarr/codecs/crc32c_.py index 555bdeae3b..04d5b88d70 100644 --- a/src/zarr/v3/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -7,15 +7,15 @@ from crc32c import crc32c -from zarr.v3.abc.codec import BytesBytesCodec -from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import parse_named_configuration +from zarr.abc.codec import BytesBytesCodec +from zarr.codecs.registry import register_codec +from zarr.common import parse_named_configuration if TYPE_CHECKING: from typing import Dict, Optional from typing_extensions import Self - from zarr.v3.common import JSON, BytesLike, ArraySpec - from zarr.v3.config import RuntimeConfiguration + from zarr.common import JSON, BytesLike, ArraySpec + from zarr.config import RuntimeConfiguration @dataclass(frozen=True) diff --git a/src/zarr/v3/codecs/gzip.py b/src/zarr/codecs/gzip.py similarity index 87% rename from src/zarr/v3/codecs/gzip.py rename to src/zarr/codecs/gzip.py index 478eee90c1..f75f5b743e 100644 --- a/src/zarr/v3/codecs/gzip.py +++ b/src/zarr/codecs/gzip.py @@ -4,15 +4,15 @@ from typing import TYPE_CHECKING from numcodecs.gzip import GZip -from zarr.v3.abc.codec import BytesBytesCodec -from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import parse_named_configuration, to_thread +from zarr.abc.codec import BytesBytesCodec +from zarr.codecs.registry import register_codec +from zarr.common import parse_named_configuration, to_thread if TYPE_CHECKING: from typing import Optional, Dict from typing_extensions import Self - from zarr.v3.common import JSON, ArraySpec, BytesLike - from zarr.v3.config import RuntimeConfiguration + from zarr.common import JSON, ArraySpec, BytesLike + from zarr.config import RuntimeConfiguration def parse_gzip_level(data: JSON) -> int: diff --git a/src/zarr/v3/codecs/pipeline.py b/src/zarr/codecs/pipeline.py similarity index 95% rename from src/zarr/v3/codecs/pipeline.py rename to src/zarr/codecs/pipeline.py index 7bb872eb79..4908ee8057 100644 --- a/src/zarr/v3/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from warnings import warn -from zarr.v3.abc.codec import ( +from zarr.abc.codec import ( ArrayArrayCodec, ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, @@ -13,16 +13,16 @@ BytesBytesCodec, Codec, ) -from zarr.v3.abc.metadata import Metadata -from zarr.v3.codecs.registry import get_codec_class -from zarr.v3.common import parse_named_configuration +from zarr.abc.metadata import Metadata +from zarr.codecs.registry import get_codec_class +from zarr.common import parse_named_configuration if TYPE_CHECKING: from typing import Iterator, List, Optional, Tuple, Union - from zarr.v3.store import StorePath - from zarr.v3.metadata import ArrayMetadata - from zarr.v3.config import RuntimeConfiguration - from zarr.v3.common import JSON, ArraySpec, BytesLike, SliceSelection + from zarr.store import StorePath + from zarr.metadata import ArrayMetadata + from zarr.config import RuntimeConfiguration + from zarr.common import JSON, ArraySpec, BytesLike, SliceSelection @dataclass(frozen=True) @@ -53,7 +53,7 @@ def evolve(self, array_spec: ArraySpec) -> CodecPipeline: @classmethod def from_list(cls, codecs: List[Codec]) -> CodecPipeline: - from zarr.v3.codecs.sharding import ShardingCodec + from zarr.codecs.sharding import ShardingCodec if not any(isinstance(codec, ArrayBytesCodec) for codec in codecs): raise ValueError("Exactly one array-to-bytes codec is required.") diff --git a/src/zarr/v3/codecs/registry.py b/src/zarr/codecs/registry.py similarity index 96% rename from src/zarr/v3/codecs/registry.py rename to src/zarr/codecs/registry.py index 4cf2736685..140e1372ef 100644 --- a/src/zarr/v3/codecs/registry.py +++ b/src/zarr/codecs/registry.py @@ -3,7 +3,7 @@ if TYPE_CHECKING: from typing import Dict, Type - from zarr.v3.abc.codec import Codec + from zarr.abc.codec import Codec from importlib.metadata import EntryPoint, entry_points as get_entry_points diff --git a/src/zarr/v3/codecs/sharding.py b/src/zarr/codecs/sharding.py similarity index 97% rename from src/zarr/v3/codecs/sharding.py rename to src/zarr/codecs/sharding.py index 0385154c0f..948e46f132 100644 --- a/src/zarr/v3/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -6,17 +6,17 @@ import numpy as np -from zarr.v3.abc.codec import ( +from zarr.abc.codec import ( Codec, ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin, ) -from zarr.v3.codecs.bytes import BytesCodec -from zarr.v3.codecs.crc32c_ import Crc32cCodec -from zarr.v3.codecs.pipeline import CodecPipeline -from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import ( +from zarr.codecs.bytes import BytesCodec +from zarr.codecs.crc32c_ import Crc32cCodec +from zarr.codecs.pipeline import CodecPipeline +from zarr.codecs.registry import register_codec +from zarr.common import ( ArraySpec, ChunkCoordsLike, concurrent_map, @@ -25,14 +25,14 @@ parse_shapelike, product, ) -from zarr.v3.chunk_grids import RegularChunkGrid -from zarr.v3.indexing import ( +from zarr.chunk_grids import RegularChunkGrid +from zarr.indexing import ( BasicIndexer, c_order_iter, is_total_slice, morton_order_iter, ) -from zarr.v3.metadata import ( +from zarr.metadata import ( ArrayMetadata, runtime_configuration as make_runtime_configuration, parse_codecs, @@ -42,14 +42,14 @@ from typing import Awaitable, Callable, Dict, Iterator, List, Optional, Set, Tuple from typing_extensions import Self - from zarr.v3.store import StorePath - from zarr.v3.common import ( + from zarr.store import StorePath + from zarr.common import ( JSON, ChunkCoords, BytesLike, SliceSelection, ) - from zarr.v3.config import RuntimeConfiguration + from zarr.config import RuntimeConfiguration MAX_UINT_64 = 2**64 - 1 diff --git a/src/zarr/v3/codecs/transpose.py b/src/zarr/codecs/transpose.py similarity index 92% rename from src/zarr/v3/codecs/transpose.py rename to src/zarr/codecs/transpose.py index b663230e35..c63327f6fc 100644 --- a/src/zarr/v3/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -3,17 +3,17 @@ from dataclasses import dataclass, replace -from zarr.v3.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration +from zarr.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration if TYPE_CHECKING: - from zarr.v3.config import RuntimeConfiguration + from zarr.config import RuntimeConfiguration from typing import TYPE_CHECKING, Optional, Tuple from typing_extensions import Self import numpy as np -from zarr.v3.abc.codec import ArrayArrayCodec -from zarr.v3.codecs.registry import register_codec +from zarr.abc.codec import ArrayArrayCodec +from zarr.codecs.registry import register_codec def parse_transpose_order(data: Union[JSON, Iterable[int]]) -> Tuple[int, ...]: @@ -65,7 +65,7 @@ def evolve(self, array_spec: ArraySpec) -> Self: return self def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: - from zarr.v3.common import ArraySpec + from zarr.common import ArraySpec return ArraySpec( shape=tuple(chunk_spec.shape[self.order[i]] for i in range(chunk_spec.ndim)), diff --git a/src/zarr/v3/codecs/zstd.py b/src/zarr/codecs/zstd.py similarity index 89% rename from src/zarr/v3/codecs/zstd.py rename to src/zarr/codecs/zstd.py index 774bb8bdbb..41db850ab6 100644 --- a/src/zarr/v3/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -5,15 +5,15 @@ from zstandard import ZstdCompressor, ZstdDecompressor -from zarr.v3.abc.codec import BytesBytesCodec -from zarr.v3.codecs.registry import register_codec -from zarr.v3.common import parse_named_configuration, to_thread +from zarr.abc.codec import BytesBytesCodec +from zarr.codecs.registry import register_codec +from zarr.common import parse_named_configuration, to_thread if TYPE_CHECKING: from typing import Dict, Optional from typing_extensions import Self - from zarr.v3.config import RuntimeConfiguration - from zarr.v3.common import BytesLike, JSON, ArraySpec + from zarr.config import RuntimeConfiguration + from zarr.common import BytesLike, JSON, ArraySpec def parse_zstd_level(data: JSON) -> int: diff --git a/src/zarr/v3/common.py b/src/zarr/common.py similarity index 100% rename from src/zarr/v3/common.py rename to src/zarr/common.py diff --git a/src/zarr/v3/config.py b/src/zarr/config.py similarity index 100% rename from src/zarr/v3/config.py rename to src/zarr/config.py diff --git a/src/zarr/v3/group.py b/src/zarr/group.py similarity index 97% rename from src/zarr/v3/group.py rename to src/zarr/group.py index fcd2fea215..aff24ed0d9 100644 --- a/src/zarr/v3/group.py +++ b/src/zarr/group.py @@ -5,14 +5,14 @@ import json import logging from typing import Any, Dict, Literal, Optional, Union, AsyncIterator, List -from zarr.v3.abc.metadata import Metadata - -from zarr.v3.array import AsyncArray, Array -from zarr.v3.attributes import Attributes -from zarr.v3.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON -from zarr.v3.config import RuntimeConfiguration, SyncConfiguration -from zarr.v3.store import StoreLike, StorePath, make_store_path -from zarr.v3.sync import SyncMixin, sync +from zarr.abc.metadata import Metadata + +from zarr.array import AsyncArray, Array +from zarr.attributes import Attributes +from zarr.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON +from zarr.config import RuntimeConfiguration, SyncConfiguration +from zarr.store import StoreLike, StorePath, make_store_path +from zarr.sync import SyncMixin, sync logger = logging.getLogger("zarr.group") diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index b72d5a255d..7c1a4df226 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -1,187 +1,83 @@ -import collections +from __future__ import annotations + import itertools import math -import numbers - -import numpy as np - - -from zarr.errors import ( - ArrayIndexError, - NegativeStepError, - err_too_many_indices, - VindexInvalidSelectionError, - BoundsCheckError, -) - - -def is_integer(x): - """True if x is an integer (both pure Python or NumPy). - - Note that Python's bool is considered an integer too. - """ - return isinstance(x, numbers.Integral) - - -def is_integer_list(x): - """True if x is a list of integers. +from typing import Iterator, List, NamedTuple, Optional, Tuple - This function assumes ie *does not check* that all elements of the list - have the same type. Mixed type lists will result in other errors that will - bubble up anyway. - """ - return isinstance(x, list) and len(x) > 0 and is_integer(x[0]) - - -def is_integer_array(x, ndim=None): - t = not np.isscalar(x) and hasattr(x, "shape") and hasattr(x, "dtype") and x.dtype.kind in "ui" - if ndim is not None: - t = t and len(x.shape) == ndim - return t - - -def is_bool_array(x, ndim=None): - t = hasattr(x, "shape") and hasattr(x, "dtype") and x.dtype == bool - if ndim is not None: - t = t and len(x.shape) == ndim - return t - - -def is_scalar(value, dtype): - if np.isscalar(value): - return True - if isinstance(value, tuple) and dtype.names and len(value) == len(dtype.names): - return True - return False +from zarr.common import ChunkCoords, Selection, SliceSelection, product -def is_pure_fancy_indexing(selection, ndim): - """Check whether a selection contains only scalars or integer array-likes. - - Parameters - ---------- - selection : tuple, slice, or scalar - A valid selection value for indexing into arrays. - - Returns - ------- - is_pure : bool - True if the selection is a pure fancy indexing expression (ie not mixed - with boolean or slices). - """ - if ndim == 1: - if is_integer_list(selection) or is_integer_array(selection): - return True - # if not, we go through the normal path below, because a 1-tuple - # of integers is also allowed. - no_slicing = ( - isinstance(selection, tuple) - and len(selection) == ndim - and not (any(isinstance(elem, slice) or elem is Ellipsis for elem in selection)) - ) - return ( - no_slicing - and all( - is_integer(elem) or is_integer_list(elem) or is_integer_array(elem) - for elem in selection - ) - and any(is_integer_list(elem) or is_integer_array(elem) for elem in selection) - ) - - -def is_pure_orthogonal_indexing(selection, ndim): - if not ndim: - return False +def _ensure_tuple(v: Selection) -> SliceSelection: + if not isinstance(v, tuple): + v = (v,) + return v - # Case 1: Selection is a single iterable of integers - if is_integer_list(selection) or is_integer_array(selection, ndim=1): - return True - # Case two: selection contains either zero or one integer iterables. - # All other selection elements are slices or integers - return ( - isinstance(selection, tuple) - and len(selection) == ndim - and sum(is_integer_list(elem) or is_integer_array(elem) for elem in selection) <= 1 - and all( - is_integer_list(elem) or is_integer_array(elem) or isinstance(elem, (int, slice)) - for elem in selection - ) +def _err_too_many_indices(selection: SliceSelection, shape: ChunkCoords): + raise IndexError( + "too many indices for array; expected {}, got {}".format(len(shape), len(selection)) ) -def normalize_integer_selection(dim_sel, dim_len): - # normalize type to int - dim_sel = int(dim_sel) +def _err_negative_step(): + raise IndexError("only slices with step >= 1 are supported") - # handle wraparound - if dim_sel < 0: - dim_sel = dim_len + dim_sel - # handle out of bounds - if dim_sel >= dim_len or dim_sel < 0: - raise BoundsCheckError(dim_len) +def _check_selection_length(selection: SliceSelection, shape: ChunkCoords): + if len(selection) > len(shape): + _err_too_many_indices(selection, shape) - return dim_sel +def _ensure_selection( + selection: Selection, + shape: ChunkCoords, +) -> SliceSelection: + selection = _ensure_tuple(selection) -ChunkDimProjection = collections.namedtuple( - "ChunkDimProjection", ("dim_chunk_ix", "dim_chunk_sel", "dim_out_sel") -) -"""A mapping from chunk to output array for a single dimension. + # fill out selection if not completely specified + if len(selection) < len(shape): + selection += (slice(None),) * (len(shape) - len(selection)) -Parameters ----------- -dim_chunk_ix - Index of chunk. -dim_chunk_sel - Selection of items from chunk array. -dim_out_sel - Selection of items in target (output) array. + # check selection not too long + _check_selection_length(selection, shape) -""" + return selection -class IntDimIndexer: - def __init__(self, dim_sel, dim_len, dim_chunk_len): - # normalize - dim_sel = normalize_integer_selection(dim_sel, dim_len) +class _ChunkDimProjection(NamedTuple): + dim_chunk_ix: int + dim_chunk_sel: slice + dim_out_sel: Optional[slice] - # store attributes - self.dim_sel = dim_sel - self.dim_len = dim_len - self.dim_chunk_len = dim_chunk_len - self.nitems = 1 - def __iter__(self): - dim_chunk_ix = self.dim_sel // self.dim_chunk_len - dim_offset = dim_chunk_ix * self.dim_chunk_len - dim_chunk_sel = self.dim_sel - dim_offset - dim_out_sel = None - yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) +def _ceildiv(a, b): + return math.ceil(a / b) -def ceildiv(a, b): - return math.ceil(a / b) +class _SliceDimIndexer: + dim_sel: slice + dim_len: int + dim_chunk_len: int + nitems: int + start: int + stop: int + step: int -class SliceDimIndexer: - def __init__(self, dim_sel, dim_len, dim_chunk_len): - # normalize + def __init__(self, dim_sel: slice, dim_len: int, dim_chunk_len: int): self.start, self.stop, self.step = dim_sel.indices(dim_len) if self.step < 1: - raise NegativeStepError() + _err_negative_step() - # store attributes self.dim_len = dim_len self.dim_chunk_len = dim_chunk_len - self.nitems = max(0, ceildiv((self.stop - self.start), self.step)) - self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) + self.nitems = max(0, _ceildiv((self.stop - self.start), self.step)) + self.nchunks = _ceildiv(self.dim_len, self.dim_chunk_len) - def __iter__(self): + def __iter__(self) -> Iterator[_ChunkDimProjection]: # figure out the range of chunks we need to visit dim_chunk_ix_from = self.start // self.dim_chunk_len - dim_chunk_ix_to = ceildiv(self.stop, self.dim_chunk_len) + dim_chunk_ix_to = _ceildiv(self.stop, self.dim_chunk_len) # iterate over chunks in range for dim_chunk_ix in range(dim_chunk_ix_from, dim_chunk_ix_to): @@ -199,7 +95,7 @@ def __iter__(self): if remainder: dim_chunk_sel_start += self.step - remainder # compute number of previous items, provides offset into output array - dim_out_offset = ceildiv((dim_offset - self.start), self.step) + dim_out_offset = _ceildiv((dim_offset - self.start), self.step) else: # selection starts within current chunk @@ -215,522 +111,38 @@ def __iter__(self): dim_chunk_sel_stop = self.stop - dim_offset dim_chunk_sel = slice(dim_chunk_sel_start, dim_chunk_sel_stop, self.step) - dim_chunk_nitems = ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) - - # If there are no elements on the selection within this chunk, then skip - if dim_chunk_nitems == 0: - continue - + dim_chunk_nitems = _ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) dim_out_sel = slice(dim_out_offset, dim_out_offset + dim_chunk_nitems) - yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) - - -def check_selection_length(selection, shape): - if len(selection) > len(shape): - err_too_many_indices(selection, shape) - - -def replace_ellipsis(selection, shape): - selection = ensure_tuple(selection) - - # count number of ellipsis present - n_ellipsis = sum(1 for i in selection if i is Ellipsis) - - if n_ellipsis > 1: - # more than 1 is an error - raise IndexError("an index can only have a single ellipsis ('...')") + yield _ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) - elif n_ellipsis == 1: - # locate the ellipsis, count how many items to left and right - n_items_l = selection.index(Ellipsis) # items to left of ellipsis - n_items_r = len(selection) - (n_items_l + 1) # items to right of ellipsis - n_items = len(selection) - 1 # all non-ellipsis items - if n_items >= len(shape): - # ellipsis does nothing, just remove it - selection = tuple(i for i in selection if i != Ellipsis) +class _ChunkProjection(NamedTuple): + chunk_coords: ChunkCoords + chunk_selection: SliceSelection + out_selection: SliceSelection - else: - # replace ellipsis with as many slices are needed for number of dims - new_item = selection[:n_items_l] + ((slice(None),) * (len(shape) - n_items)) - if n_items_r: - new_item += selection[-n_items_r:] - selection = new_item - - # fill out selection if not completely specified - if len(selection) < len(shape): - selection += (slice(None),) * (len(shape) - len(selection)) - - # check selection not too long - check_selection_length(selection, shape) - - return selection - - -def replace_lists(selection): - return tuple( - np.asarray(dim_sel) if isinstance(dim_sel, list) else dim_sel for dim_sel in selection - ) - - -def ensure_tuple(v): - if not isinstance(v, tuple): - v = (v,) - return v - -ChunkProjection = collections.namedtuple( - "ChunkProjection", ("chunk_coords", "chunk_selection", "out_selection") -) -"""A mapping of items from chunk to output array. Can be used to extract items from the -chunk array for loading into an output array. Can also be used to extract items from a -value array for setting/updating in a chunk array. - -Parameters ----------- -chunk_coords - Indices of chunk. -chunk_selection - Selection of items from chunk array. -out_selection - Selection of items in target (output) array. - -""" - - -def is_slice(s): - return isinstance(s, slice) - - -def is_contiguous_slice(s): - return is_slice(s) and (s.step is None or s.step == 1) - - -def is_positive_slice(s): - return is_slice(s) and (s.step is None or s.step >= 1) - - -def is_contiguous_selection(selection): - selection = ensure_tuple(selection) - return all((is_integer_array(s) or is_contiguous_slice(s) or s == Ellipsis) for s in selection) - - -def is_basic_selection(selection): - selection = ensure_tuple(selection) - return all(is_integer(s) or is_positive_slice(s) for s in selection) - - -# noinspection PyProtectedMember class BasicIndexer: - def __init__(self, selection, array): - # handle ellipsis - selection = replace_ellipsis(selection, array._shape) - - # setup per-dimension indexers - dim_indexers = [] - for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): - if is_integer(dim_sel): - dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) - - elif is_slice(dim_sel): - dim_indexer = SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) - - else: - raise IndexError( - "unsupported selection item for basic indexing; " - "expected integer or slice, got {!r}".format(type(dim_sel)) - ) - - dim_indexers.append(dim_indexer) - - self.dim_indexers = dim_indexers - self.shape = tuple(s.nitems for s in self.dim_indexers if not isinstance(s, IntDimIndexer)) - self.drop_axes = None - - def __iter__(self): - for dim_projections in itertools.product(*self.dim_indexers): - chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) - chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) - out_selection = tuple( - p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None - ) - - yield ChunkProjection(chunk_coords, chunk_selection, out_selection) - - -class BoolArrayDimIndexer: - def __init__(self, dim_sel, dim_len, dim_chunk_len): - # check number of dimensions - if not is_bool_array(dim_sel, 1): - raise IndexError( - "Boolean arrays in an orthogonal selection must " "be 1-dimensional only" - ) - - # check shape - if dim_sel.shape[0] != dim_len: - raise IndexError( - "Boolean array has the wrong length for dimension; " "expected {}, got {}".format( - dim_len, dim_sel.shape[0] - ) - ) - - # store attributes - self.dim_sel = dim_sel - self.dim_len = dim_len - self.dim_chunk_len = dim_chunk_len - self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) - - # precompute number of selected items for each chunk - self.chunk_nitems = np.zeros(self.nchunks, dtype="i8") - for dim_chunk_ix in range(self.nchunks): - dim_offset = dim_chunk_ix * self.dim_chunk_len - self.chunk_nitems[dim_chunk_ix] = np.count_nonzero( - self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] - ) - self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) - self.nitems = self.chunk_nitems_cumsum[-1] - self.dim_chunk_ixs = np.nonzero(self.chunk_nitems)[0] - - def __iter__(self): - # iterate over chunks with at least one item - for dim_chunk_ix in self.dim_chunk_ixs: - # find region in chunk - dim_offset = dim_chunk_ix * self.dim_chunk_len - dim_chunk_sel = self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] - - # pad out if final chunk - if dim_chunk_sel.shape[0] < self.dim_chunk_len: - tmp = np.zeros(self.dim_chunk_len, dtype=bool) - tmp[: dim_chunk_sel.shape[0]] = dim_chunk_sel - dim_chunk_sel = tmp - - # find region in output - if dim_chunk_ix == 0: - start = 0 - else: - start = self.chunk_nitems_cumsum[dim_chunk_ix - 1] - stop = self.chunk_nitems_cumsum[dim_chunk_ix] - dim_out_sel = slice(start, stop) - - yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) - - -class Order: - UNKNOWN = 0 - INCREASING = 1 - DECREASING = 2 - UNORDERED = 3 - - @staticmethod - def check(a): - diff = np.diff(a) - diff_positive = diff >= 0 - n_diff_positive = np.count_nonzero(diff_positive) - all_increasing = n_diff_positive == len(diff_positive) - any_increasing = n_diff_positive > 0 - if all_increasing: - order = Order.INCREASING - elif any_increasing: - order = Order.UNORDERED - else: - order = Order.DECREASING - return order - - -def wraparound_indices(x, dim_len): - loc_neg = x < 0 - if np.any(loc_neg): - x[loc_neg] = x[loc_neg] + dim_len - - -def boundscheck_indices(x, dim_len): - if np.any(x < 0) or np.any(x >= dim_len): - raise BoundsCheckError(dim_len) - - -class IntArrayDimIndexer: - """Integer array selection against a single dimension.""" + dim_indexers: List[_SliceDimIndexer] + shape: ChunkCoords def __init__( self, - dim_sel, - dim_len, - dim_chunk_len, - wraparound=True, - boundscheck=True, - order=Order.UNKNOWN, + selection: Selection, + shape: Tuple[int, ...], + chunk_shape: Tuple[int, ...], ): - # ensure 1d array - dim_sel = np.asanyarray(dim_sel) - if not is_integer_array(dim_sel, 1): - raise IndexError( - "integer arrays in an orthogonal selection must be " "1-dimensional only" - ) - - # handle wraparound - if wraparound: - wraparound_indices(dim_sel, dim_len) - - # handle out of bounds - if boundscheck: - boundscheck_indices(dim_sel, dim_len) - - # store attributes - self.dim_len = dim_len - self.dim_chunk_len = dim_chunk_len - self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) - self.nitems = len(dim_sel) - - # determine which chunk is needed for each selection item - # note: for dense integer selections, the division operation here is the - # bottleneck - dim_sel_chunk = dim_sel // dim_chunk_len - - # determine order of indices - if order == Order.UNKNOWN: - order = Order.check(dim_sel) - self.order = order - - if self.order == Order.INCREASING: - self.dim_sel = dim_sel - self.dim_out_sel = None - elif self.order == Order.DECREASING: - self.dim_sel = dim_sel[::-1] - # TODO should be possible to do this without creating an arange - self.dim_out_sel = np.arange(self.nitems - 1, -1, -1) - else: - # sort indices to group by chunk - self.dim_out_sel = np.argsort(dim_sel_chunk) - self.dim_sel = np.take(dim_sel, self.dim_out_sel) - - # precompute number of selected items for each chunk - self.chunk_nitems = np.bincount(dim_sel_chunk, minlength=self.nchunks) - - # find chunks that we need to visit - self.dim_chunk_ixs = np.nonzero(self.chunk_nitems)[0] - - # compute offsets into the output array - self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) - - def __iter__(self): - for dim_chunk_ix in self.dim_chunk_ixs: - # find region in output - if dim_chunk_ix == 0: - start = 0 - else: - start = self.chunk_nitems_cumsum[dim_chunk_ix - 1] - stop = self.chunk_nitems_cumsum[dim_chunk_ix] - if self.order == Order.INCREASING: - dim_out_sel = slice(start, stop) - else: - dim_out_sel = self.dim_out_sel[start:stop] - - # find region in chunk - dim_offset = dim_chunk_ix * self.dim_chunk_len - dim_chunk_sel = self.dim_sel[start:stop] - dim_offset - - yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) - - -def slice_to_range(s: slice, l: int): # noqa: E741 - return range(*s.indices(l)) - - -def ix_(selection, shape): - """Convert an orthogonal selection to a numpy advanced (fancy) selection, like numpy.ix_ - but with support for slices and single ints.""" - - # normalisation - selection = replace_ellipsis(selection, shape) - - # replace slice and int as these are not supported by numpy.ix_ - selection = [ - slice_to_range(dim_sel, dim_len) - if isinstance(dim_sel, slice) - else [dim_sel] - if is_integer(dim_sel) - else dim_sel - for dim_sel, dim_len in zip(selection, shape) - ] - - # now get numpy to convert to a coordinate selection - selection = np.ix_(*selection) - - return selection - - -def oindex(a, selection): - """Implementation of orthogonal indexing with slices and ints.""" - selection = replace_ellipsis(selection, a.shape) - drop_axes = tuple(i for i, s in enumerate(selection) if is_integer(s)) - selection = ix_(selection, a.shape) - result = a[selection] - if drop_axes: - result = result.squeeze(axis=drop_axes) - return result - - -def oindex_set(a, selection, value): - selection = replace_ellipsis(selection, a.shape) - drop_axes = tuple(i for i, s in enumerate(selection) if is_integer(s)) - selection = ix_(selection, a.shape) - if not np.isscalar(value) and drop_axes: - value = np.asanyarray(value) - value_selection = [slice(None)] * len(a.shape) - for i in drop_axes: - value_selection[i] = np.newaxis - value_selection = tuple(value_selection) - value = value[value_selection] - a[selection] = value - - -# noinspection PyProtectedMember -class OrthogonalIndexer: - def __init__(self, selection, array): - # handle ellipsis - selection = replace_ellipsis(selection, array._shape) - - # normalize list to array - selection = replace_lists(selection) - # setup per-dimension indexers - dim_indexers = [] - for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): - if is_integer(dim_sel): - dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) - - elif isinstance(dim_sel, slice): - dim_indexer = SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) - - elif is_integer_array(dim_sel): - dim_indexer = IntArrayDimIndexer(dim_sel, dim_len, dim_chunk_len) - - elif is_bool_array(dim_sel): - dim_indexer = BoolArrayDimIndexer(dim_sel, dim_len, dim_chunk_len) - - else: - raise IndexError( - "unsupported selection item for orthogonal indexing; " - "expected integer, slice, integer array or Boolean " - "array, got {!r}".format(type(dim_sel)) - ) - - dim_indexers.append(dim_indexer) - - self.array = array - self.dim_indexers = dim_indexers - self.shape = tuple(s.nitems for s in self.dim_indexers if not isinstance(s, IntDimIndexer)) - self.is_advanced = not is_basic_selection(selection) - if self.is_advanced: - self.drop_axes = tuple( - i - for i, dim_indexer in enumerate(self.dim_indexers) - if isinstance(dim_indexer, IntDimIndexer) - ) - else: - self.drop_axes = None - - def __iter__(self): - for dim_projections in itertools.product(*self.dim_indexers): - chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) - chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) - out_selection = tuple( - p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + self.dim_indexers = [ + _SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) + for dim_sel, dim_len, dim_chunk_len in zip( + _ensure_selection(selection, shape), shape, chunk_shape ) - - # handle advanced indexing arrays orthogonally - if self.is_advanced: - # N.B., numpy doesn't support orthogonal indexing directly as yet, - # so need to work around via np.ix_. Also np.ix_ does not support a - # mixture of arrays and slices or integers, so need to convert slices - # and integers into ranges. - chunk_selection = ix_(chunk_selection, self.array._chunks) - - # special case for non-monotonic indices - if not is_basic_selection(out_selection): - out_selection = ix_(out_selection, self.shape) - - yield ChunkProjection(chunk_coords, chunk_selection, out_selection) - - -class OIndex: - def __init__(self, array): - self.array = array - - def __getitem__(self, selection): - fields, selection = pop_fields(selection) - selection = ensure_tuple(selection) - selection = replace_lists(selection) - return self.array.get_orthogonal_selection(selection, fields=fields) - - def __setitem__(self, selection, value): - fields, selection = pop_fields(selection) - selection = ensure_tuple(selection) - selection = replace_lists(selection) - return self.array.set_orthogonal_selection(selection, value, fields=fields) - - -# noinspection PyProtectedMember -class BlockIndexer: - def __init__(self, selection, array): - # handle ellipsis - selection = replace_ellipsis(selection, array._shape) - - # normalize list to array - selection = replace_lists(selection) - - # setup per-dimension indexers - dim_indexers = [] - for dim_sel, dim_len, dim_chunk_size in zip(selection, array._shape, array._chunks): - dim_numchunks = int(np.ceil(dim_len / dim_chunk_size)) - - if is_integer(dim_sel): - if dim_sel < 0: - dim_sel = dim_numchunks + dim_sel - - start = dim_sel * dim_chunk_size - stop = start + dim_chunk_size - slice_ = slice(start, stop) - - elif is_slice(dim_sel): - start = dim_sel.start if dim_sel.start is not None else 0 - stop = dim_sel.stop if dim_sel.stop is not None else dim_numchunks - - if dim_sel.step not in {1, None}: - raise IndexError( - "unsupported selection item for block indexing; " - "expected integer or slice with step=1, got {!r}".format(type(dim_sel)) - ) - - # Can't reuse wraparound_indices because it expects a numpy array - # We have integers here. - if start < 0: - start = dim_numchunks + start - if stop < 0: - stop = dim_numchunks + stop - - start = start * dim_chunk_size - stop = stop * dim_chunk_size - slice_ = slice(start, stop) - - else: - raise IndexError( - "unsupported selection item for block indexing; " - "expected integer or slice, got {!r}".format(type(dim_sel)) - ) - - dim_indexer = SliceDimIndexer(slice_, dim_len, dim_chunk_size) - dim_indexers.append(dim_indexer) - - if start >= dim_len or start < 0: - raise BoundsCheckError(dim_len) - - self.dim_indexers = dim_indexers + ] self.shape = tuple(s.nitems for s in self.dim_indexers) - self.drop_axes = None - def __iter__(self): + def __iter__(self) -> Iterator[_ChunkProjection]: for dim_projections in itertools.product(*self.dim_indexers): chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) @@ -738,343 +150,59 @@ def __iter__(self): p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None ) - yield ChunkProjection(chunk_coords, chunk_selection, out_selection) - - -class BlockIndex: - def __init__(self, array): - self.array = array - - def __getitem__(self, selection): - fields, selection = pop_fields(selection) - selection = ensure_tuple(selection) - selection = replace_lists(selection) - return self.array.get_block_selection(selection, fields=fields) + yield _ChunkProjection(chunk_coords, chunk_selection, out_selection) - def __setitem__(self, selection, value): - fields, selection = pop_fields(selection) - selection = ensure_tuple(selection) - selection = replace_lists(selection) - return self.array.set_block_selection(selection, value, fields=fields) +def morton_order_iter(chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: + def decode_morton(z: int, chunk_shape: ChunkCoords) -> ChunkCoords: + # Inspired by compressed morton code as implemented in Neuroglancer + # https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#compressed-morton-code + bits = tuple(math.ceil(math.log2(c)) for c in chunk_shape) + max_coords_bits = max(*bits) + input_bit = 0 + input_value = z + out = [0 for _ in range(len(chunk_shape))] -# noinspection PyProtectedMember -def is_coordinate_selection(selection, array): - return (len(selection) == len(array._shape)) and all( - is_integer(dim_sel) or is_integer_array(dim_sel) for dim_sel in selection - ) - - -# noinspection PyProtectedMember -def is_mask_selection(selection, array): - return ( - len(selection) == 1 and is_bool_array(selection[0]) and selection[0].shape == array._shape - ) - - -# noinspection PyProtectedMember -class CoordinateIndexer: - def __init__(self, selection, array): - # some initial normalization - selection = ensure_tuple(selection) - selection = tuple([i] if is_integer(i) else i for i in selection) - selection = replace_lists(selection) - - # validation - if not is_coordinate_selection(selection, array): - raise IndexError( - "invalid coordinate selection; expected one integer " - "(coordinate) array per dimension of the target array, " - "got {!r}".format(selection) - ) - - # handle wraparound, boundscheck - for dim_sel, dim_len in zip(selection, array.shape): - # handle wraparound - wraparound_indices(dim_sel, dim_len) - - # handle out of bounds - boundscheck_indices(dim_sel, dim_len) - - # compute chunk index for each point in the selection - chunks_multi_index = tuple( - dim_sel // dim_chunk_len for (dim_sel, dim_chunk_len) in zip(selection, array._chunks) - ) + for coord_bit in range(max_coords_bits): + for dim in range(len(chunk_shape)): + if coord_bit < bits[dim]: + bit = (input_value >> input_bit) & 1 + out[dim] |= bit << coord_bit + input_bit += 1 + return tuple(out) - # broadcast selection - this will raise error if array dimensions don't match - selection = np.broadcast_arrays(*selection) - chunks_multi_index = np.broadcast_arrays(*chunks_multi_index) + for i in range(product(chunk_shape)): + yield decode_morton(i, chunk_shape) - # remember shape of selection, because we will flatten indices for processing - self.sel_shape = selection[0].shape if selection[0].shape else (1,) - # flatten selection - selection = [dim_sel.reshape(-1) for dim_sel in selection] - chunks_multi_index = [dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index] - - # ravel chunk indices - chunks_raveled_indices = np.ravel_multi_index(chunks_multi_index, dims=array._cdata_shape) - - # group points by chunk - if np.any(np.diff(chunks_raveled_indices) < 0): - # optimisation, only sort if needed - sel_sort = np.argsort(chunks_raveled_indices) - selection = tuple(dim_sel[sel_sort] for dim_sel in selection) - else: - sel_sort = None - - # store attributes - self.selection = selection - self.sel_sort = sel_sort - self.shape = selection[0].shape if selection[0].shape else (1,) - self.drop_axes = None - self.array = array - - # precompute number of selected items for each chunk - self.chunk_nitems = np.bincount(chunks_raveled_indices, minlength=array.nchunks) - self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) - # locate the chunks we need to process - self.chunk_rixs = np.nonzero(self.chunk_nitems)[0] - - # unravel chunk indices - self.chunk_mixs = np.unravel_index(self.chunk_rixs, array._cdata_shape) - - def __iter__(self): - # iterate over chunks - for i, chunk_rix in enumerate(self.chunk_rixs): - chunk_coords = tuple(m[i] for m in self.chunk_mixs) - if chunk_rix == 0: - start = 0 - else: - start = self.chunk_nitems_cumsum[chunk_rix - 1] - stop = self.chunk_nitems_cumsum[chunk_rix] - if self.sel_sort is None: - out_selection = slice(start, stop) - else: - out_selection = self.sel_sort[start:stop] - - chunk_offsets = tuple( - dim_chunk_ix * dim_chunk_len - for dim_chunk_ix, dim_chunk_len in zip(chunk_coords, self.array._chunks) - ) - chunk_selection = tuple( - dim_sel[start:stop] - dim_chunk_offset - for (dim_sel, dim_chunk_offset) in zip(self.selection, chunk_offsets) - ) - - yield ChunkProjection(chunk_coords, chunk_selection, out_selection) - - -# noinspection PyProtectedMember -class MaskIndexer(CoordinateIndexer): - def __init__(self, selection, array): - # some initial normalization - selection = ensure_tuple(selection) - selection = replace_lists(selection) - - # validation - if not is_mask_selection(selection, array): - raise IndexError( - "invalid mask selection; expected one Boolean (mask)" - "array with the same shape as the target array, got {!r}".format(selection) - ) +def c_order_iter(chunks_per_shard: ChunkCoords) -> Iterator[ChunkCoords]: + return itertools.product(*(range(x) for x in chunks_per_shard)) - # convert to indices - selection = np.nonzero(selection[0]) - # delegate the rest to superclass - super().__init__(selection, array) +def is_total_slice(item: Selection, shape: ChunkCoords): + """Determine whether `item` specifies a complete slice of array with the + given `shape`. Used to optimize __setitem__ operations on the Chunk + class.""" - -class VIndex: - def __init__(self, array): - self.array = array - - def __getitem__(self, selection): - fields, selection = pop_fields(selection) - selection = ensure_tuple(selection) - selection = replace_lists(selection) - if is_coordinate_selection(selection, self.array): - return self.array.get_coordinate_selection(selection, fields=fields) - elif is_mask_selection(selection, self.array): - return self.array.get_mask_selection(selection, fields=fields) - else: - raise VindexInvalidSelectionError(selection) - - def __setitem__(self, selection, value): - fields, selection = pop_fields(selection) - selection = ensure_tuple(selection) - selection = replace_lists(selection) - if is_coordinate_selection(selection, self.array): - self.array.set_coordinate_selection(selection, value, fields=fields) - elif is_mask_selection(selection, self.array): - self.array.set_mask_selection(selection, value, fields=fields) - else: - raise VindexInvalidSelectionError(selection) - - -def check_fields(fields, dtype): - # early out - if fields is None: - return dtype - # check type - if not isinstance(fields, (str, list, tuple)): - raise IndexError( - "'fields' argument must be a string or list of strings; found " "{!r}".format( - type(fields) + # N.B., assume shape is normalized + if item == slice(None): + return True + if isinstance(item, slice): + item = (item,) + if isinstance(item, tuple): + return all( + ( + isinstance(dim_sel, slice) + and ( + (dim_sel == slice(None)) + or ((dim_sel.stop - dim_sel.start == dim_len) and (dim_sel.step in [1, None])) + ) ) + for dim_sel, dim_len in zip(item, shape) ) - if fields: - if dtype.names is None: - raise IndexError("invalid 'fields' argument, array does not have any fields") - try: - if isinstance(fields, str): - # single field selection - out_dtype = dtype[fields] - else: - # multiple field selection - out_dtype = np.dtype([(f, dtype[f]) for f in fields]) - except KeyError as e: - raise IndexError("invalid 'fields' argument, field not found: {!r}".format(e)) - else: - return out_dtype else: - return dtype - - -def check_no_multi_fields(fields): - if isinstance(fields, list): - if len(fields) == 1: - return fields[0] - elif len(fields) > 1: - raise IndexError("multiple fields are not supported for this operation") - return fields - - -def pop_fields(selection): - if isinstance(selection, str): - # single field selection - fields = selection - selection = () - elif not isinstance(selection, tuple): - # single selection item, no fields - fields = None - # leave selection as-is - else: - # multiple items, split fields from selection items - fields = [f for f in selection if isinstance(f, str)] - fields = fields[0] if len(fields) == 1 else fields - selection = tuple(s for s in selection if not isinstance(s, str)) - selection = selection[0] if len(selection) == 1 else selection - return fields, selection - - -def make_slice_selection(selection): - ls = [] - for dim_selection in selection: - if is_integer(dim_selection): - ls.append(slice(int(dim_selection), int(dim_selection) + 1, 1)) - elif isinstance(dim_selection, np.ndarray): - if len(dim_selection) == 1: - ls.append(slice(int(dim_selection[0]), int(dim_selection[0]) + 1, 1)) - else: - raise ArrayIndexError() - else: - ls.append(dim_selection) - return ls + raise TypeError("expected slice or tuple of slices, found %r" % item) -class PartialChunkIterator: - """Iterator to retrieve the specific coordinates of requested data - from within a compressed chunk. - - Parameters - ---------- - selection : tuple - tuple of slice objects to take from the chunk - arr_shape : shape of chunk to select data from - - Attributes - ----------- - arr_shape - selection - - Returns - ------- - Tuple with 3 elements: - - start: int - elements offset in the chunk to read from - nitems: int - number of elements to read in the chunk from start - partial_out_selection: list of slices - indices of a temporary empty array of size `Array._chunks` to assign - the decompressed data to after the partial read. - - Notes - ----- - An array is flattened when compressed with blosc, so this iterator takes - the wanted selection of an array and determines the wanted coordinates - of the flattened, compressed data to be read and then decompressed. The - decompressed data is then placed in a temporary empty array of size - `Array._chunks` at the indices yielded as partial_out_selection. - Once all the slices yielded by this iterator have been read, decompressed - and written to the temporary array, the wanted slice of the chunk can be - indexed from the temporary array and written to the out_selection slice - of the out array. - - """ - - def __init__(self, selection, arr_shape): - selection = make_slice_selection(selection) - self.arr_shape = arr_shape - - # number of selection dimensions can't be greater than the number of chunk dimensions - if len(selection) > len(self.arr_shape): - raise ValueError( - "Selection has more dimensions then the array:\n" - f"selection dimensions = {len(selection)}\n" - f"array dimensions = {len(self.arr_shape)}" - ) - - # any selection can not be out of the range of the chunk - selection_shape = np.empty(self.arr_shape)[tuple(selection)].shape - if any( - selection_dim < 0 or selection_dim > arr_dim - for selection_dim, arr_dim in zip(selection_shape, self.arr_shape) - ): - raise IndexError( - "a selection index is out of range for the dimension" - ) # pragma: no cover - - for i, dim_size in enumerate(self.arr_shape[::-1]): - index = len(self.arr_shape) - (i + 1) - if index <= len(selection) - 1: - slice_size = selection_shape[index] - if slice_size == dim_size and index > 0: - selection.pop() - else: - break - - chunk_loc_slices = [] - last_dim_slice = None if selection[-1].step > 1 else selection.pop() - for arr_shape_i, sl in zip(arr_shape, selection): - dim_chunk_loc_slices = [] - assert isinstance(sl, slice) - for x in slice_to_range(sl, arr_shape_i): - dim_chunk_loc_slices.append(slice(x, x + 1, 1)) - chunk_loc_slices.append(dim_chunk_loc_slices) - if last_dim_slice: - chunk_loc_slices.append([last_dim_slice]) - self.chunk_loc_slices = list(itertools.product(*chunk_loc_slices)) - - def __iter__(self): - chunk1 = self.chunk_loc_slices[0] - nitems = (chunk1[-1].stop - chunk1[-1].start) * np.prod( - self.arr_shape[len(chunk1) :], dtype=int - ) - for partial_out_selection in self.chunk_loc_slices: - start = 0 - for i, sl in enumerate(partial_out_selection): - start += sl.start * np.prod(self.arr_shape[i + 1 :], dtype=int) - yield start, nitems, partial_out_selection +def all_chunk_coords(shape: ChunkCoords, chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: + return itertools.product(*(range(0, _ceildiv(s, c)) for s, c in zip(shape, chunk_shape))) diff --git a/src/zarr/v3/metadata.py b/src/zarr/metadata.py similarity index 96% rename from src/zarr/v3/metadata.py rename to src/zarr/metadata.py index 573b8484f0..8eba9a0b5a 100644 --- a/src/zarr/v3/metadata.py +++ b/src/zarr/metadata.py @@ -6,19 +6,19 @@ import numpy as np import numpy.typing as npt -from zarr.v3.chunk_grids import ChunkGrid, RegularChunkGrid -from zarr.v3.chunk_key_encodings import ChunkKeyEncoding, parse_separator +from zarr.chunk_grids import ChunkGrid, RegularChunkGrid +from zarr.chunk_key_encodings import ChunkKeyEncoding, parse_separator if TYPE_CHECKING: from typing import Literal, Union, List, Optional, Tuple - from zarr.v3.codecs.pipeline import CodecPipeline + from zarr.codecs.pipeline import CodecPipeline -from zarr.v3.abc.codec import Codec -from zarr.v3.abc.metadata import Metadata +from zarr.abc.codec import Codec +from zarr.abc.metadata import Metadata -from zarr.v3.common import ( +from zarr.common import ( JSON, ArraySpec, ChunkCoords, @@ -26,7 +26,7 @@ parse_fill_value, parse_shapelike, ) -from zarr.v3.config import RuntimeConfiguration, parse_indexing_order +from zarr.config import RuntimeConfiguration, parse_indexing_order def runtime_configuration( @@ -370,7 +370,7 @@ def parse_v2_metadata(data: ArrayV2Metadata) -> ArrayV2Metadata: def parse_codecs(data: Iterable[Union[Codec, JSON]]) -> CodecPipeline: - from zarr.v3.codecs.pipeline import CodecPipeline + from zarr.codecs.pipeline import CodecPipeline if not isinstance(data, Iterable): raise TypeError(f"Expected iterable, got {type(data)}") diff --git a/src/zarr/store/__init__.py b/src/zarr/store/__init__.py new file mode 100644 index 0000000000..b1c3a5f720 --- /dev/null +++ b/src/zarr/store/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from zarr.store.core import StorePath, StoreLike, make_store_path +from zarr.store.remote import RemoteStore +from zarr.store.local import LocalStore +from zarr.store.memory import MemoryStore diff --git a/src/zarr/v3/store/core.py b/src/zarr/store/core.py similarity index 94% rename from src/zarr/v3/store/core.py rename to src/zarr/store/core.py index 16714d9e30..29506aa619 100644 --- a/src/zarr/v3/store/core.py +++ b/src/zarr/store/core.py @@ -3,9 +3,9 @@ from pathlib import Path from typing import Any, Optional, Tuple, Union -from zarr.v3.common import BytesLike -from zarr.v3.abc.store import Store -from zarr.v3.store.local import LocalStore +from zarr.common import BytesLike +from zarr.abc.store import Store +from zarr.store.local import LocalStore def _dereference_path(root: str, path: str) -> str: diff --git a/src/zarr/v3/store/local.py b/src/zarr/store/local.py similarity index 98% rename from src/zarr/v3/store/local.py rename to src/zarr/store/local.py index 8f02b904c0..1e9e880875 100644 --- a/src/zarr/v3/store/local.py +++ b/src/zarr/store/local.py @@ -5,8 +5,8 @@ from pathlib import Path from typing import Union, Optional, List, Tuple -from zarr.v3.abc.store import Store -from zarr.v3.common import BytesLike, concurrent_map, to_thread +from zarr.abc.store import Store +from zarr.common import BytesLike, concurrent_map, to_thread def _get(path: Path, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> bytes: diff --git a/src/zarr/v3/store/memory.py b/src/zarr/store/memory.py similarity index 97% rename from src/zarr/v3/store/memory.py rename to src/zarr/store/memory.py index afacfa4321..9661b6cea7 100644 --- a/src/zarr/v3/store/memory.py +++ b/src/zarr/store/memory.py @@ -2,8 +2,8 @@ from typing import Optional, MutableMapping, List, Tuple -from zarr.v3.common import BytesLike -from zarr.v3.abc.store import Store +from zarr.common import BytesLike +from zarr.abc.store import Store # TODO: this store could easily be extended to wrap any MutuableMapping store from v2 diff --git a/src/zarr/v3/store/remote.py b/src/zarr/store/remote.py similarity index 96% rename from src/zarr/v3/store/remote.py rename to src/zarr/store/remote.py index 0e6fc84e08..c42cf3f56d 100644 --- a/src/zarr/v3/store/remote.py +++ b/src/zarr/store/remote.py @@ -2,9 +2,9 @@ from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union -from zarr.v3.abc.store import Store -from zarr.v3.store.core import _dereference_path -from zarr.v3.common import BytesLike +from zarr.abc.store import Store +from zarr.store.core import _dereference_path +from zarr.common import BytesLike if TYPE_CHECKING: diff --git a/src/zarr/sync.py b/src/zarr/sync.py index 49684a51ee..d9665b4c58 100644 --- a/src/zarr/sync.py +++ b/src/zarr/sync.py @@ -1,48 +1,120 @@ -import os -from collections import defaultdict -from threading import Lock +from __future__ import annotations -import fasteners +import asyncio +import threading +from typing import ( + Any, + AsyncIterator, + Coroutine, + List, + Optional, + TypeVar, +) +from typing_extensions import ParamSpec +from zarr.config import SyncConfiguration -class ThreadSynchronizer: - """Provides synchronization using thread locks.""" - def __init__(self): - self.mutex = Lock() - self.locks = defaultdict(Lock) +# From https://github.com/fsspec/filesystem_spec/blob/master/fsspec/asyn.py - def __getitem__(self, item): - with self.mutex: - return self.locks[item] +iothread: List[Optional[threading.Thread]] = [None] # dedicated IO thread +loop: List[Optional[asyncio.AbstractEventLoop]] = [ + None +] # global event loop for any non-async instance +_lock: Optional[threading.Lock] = None # global lock placeholder +get_running_loop = asyncio.get_running_loop - def __getstate__(self): - return True - def __setstate__(self, *args): - # reinitialize from scratch - self.__init__() +def _get_lock() -> threading.Lock: + """Allocate or return a threading lock. + + The lock is allocated on first use to allow setting one lock per forked process. + """ + global _lock + if not _lock: + _lock = threading.Lock() + return _lock -class ProcessSynchronizer: - """Provides synchronization using file locks via the - `fasteners `_ - package. +async def _runner(event: threading.Event, coro: Coroutine, result_box: List[Optional[Any]]): + try: + result_box[0] = await coro + except Exception as ex: + result_box[0] = ex + finally: + event.set() - Parameters - ---------- - path : string - Path to a directory on a file system that is shared by all processes. - N.B., this should be a *different* path to where you store the array. +def sync(coro: Coroutine, loop: Optional[asyncio.AbstractEventLoop] = None): + """ + Make loop run coroutine until it returns. Runs in other thread + + Examples + -------- + >>> sync(async_function(), existing_loop) """ + if loop is None: + # NB: if the loop is not running *yet*, it is OK to submit work + # and we will wait for it + loop = _get_loop() + if loop is None or loop.is_closed(): + raise RuntimeError("Loop is not running") + try: + loop0 = asyncio.events.get_running_loop() + if loop0 is loop: + raise NotImplementedError("Calling sync() from within a running loop") + except RuntimeError: + pass + result_box: List[Optional[Any]] = [None] + event = threading.Event() + asyncio.run_coroutine_threadsafe(_runner(event, coro, result_box), loop) + while True: + # this loops allows thread to get interrupted + if event.wait(1): + break + + return_result = result_box[0] + if isinstance(return_result, BaseException): + raise return_result + else: + return return_result + + +def _get_loop(): + """Create or return the default fsspec IO loop + + The loop will be running on a separate thread. + """ + if loop[0] is None: + with _get_lock(): + # repeat the check just in case the loop got filled between the + # previous two calls from another thread + if loop[0] is None: + new_loop = asyncio.new_event_loop() + loop[0] = new_loop + th = threading.Thread(target=new_loop.run_forever, name="zarrIO") + th.daemon = True + th.start() + iothread[0] = th + return loop[0] + + +P = ParamSpec("P") +T = TypeVar("T") + + +class SyncMixin: + _sync_configuration: SyncConfiguration - def __init__(self, path): - self.path = path + def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: + # TODO: refactor this to to take *args and **kwargs and pass those to the method + # this should allow us to better type the sync wrapper + return sync(coroutine, loop=self._sync_configuration.asyncio_loop) - def __getitem__(self, item): - path = os.path.join(self.path, item) - lock = fasteners.InterProcessLock(path) - return lock + def _sync_iter(self, coroutine: Coroutine[Any, Any, AsyncIterator[T]]) -> List[T]: + async def iter_to_list() -> List[T]: + # TODO: replace with generators so we don't materialize the entire iterator at once + async_iterator = await coroutine + return [item async for item in async_iterator] - # pickling and unpickling should be handled automatically + return self._sync(iter_to_list()) diff --git a/src/zarr/v2/__init__.py b/src/zarr/v2/__init__.py new file mode 100644 index 0000000000..27c7595580 --- /dev/null +++ b/src/zarr/v2/__init__.py @@ -0,0 +1,54 @@ +# flake8: noqa +from zarr.v2.codecs import * +from zarr.v2.convenience import ( + consolidate_metadata, + copy, + copy_all, + copy_store, + load, + open, + open_consolidated, + save, + save_array, + save_group, + tree, +) +from zarr.v2.core import Array +from zarr.v2.creation import ( + array, + create, + empty, + empty_like, + full, + full_like, + ones, + ones_like, + open_array, + open_like, + zeros, + zeros_like, +) +from zarr.v2.errors import CopyError, MetadataError +from zarr.v2.hierarchy import Group, group, open_group +from zarr.v2.n5 import N5Store, N5FSStore +from zarr.v2.storage import ( + ABSStore, + DBMStore, + DictStore, + DirectoryStore, + KVStore, + LMDBStore, + LRUStoreCache, + MemoryStore, + MongoDBStore, + NestedDirectoryStore, + RedisStore, + SQLiteStore, + TempStore, + ZipStore, +) +from zarr.v2.sync import ProcessSynchronizer, ThreadSynchronizer +from zarr._version import version as __version__ + +# in case setuptools scm screw up and find version to be 0.0.0 +assert not __version__.startswith("0.0.0") diff --git a/src/zarr/v3/abc/__init__.py b/src/zarr/v2/_storage/__init__.py similarity index 100% rename from src/zarr/v3/abc/__init__.py rename to src/zarr/v2/_storage/__init__.py diff --git a/src/zarr/_storage/absstore.py b/src/zarr/v2/_storage/absstore.py similarity index 98% rename from src/zarr/_storage/absstore.py rename to src/zarr/v2/_storage/absstore.py index d8e292535c..ee03d44bd4 100644 --- a/src/zarr/_storage/absstore.py +++ b/src/zarr/v2/_storage/absstore.py @@ -2,8 +2,8 @@ import warnings from numcodecs.compat import ensure_bytes -from zarr.util import normalize_storage_path -from zarr._storage.store import Store +from zarr.v2.util import normalize_storage_path +from zarr.v2._storage.store import Store __doctest_requires__ = { ("ABSStore", "ABSStore.*"): ["azure.storage.blob"], diff --git a/src/zarr/_storage/store.py b/src/zarr/v2/_storage/store.py similarity index 96% rename from src/zarr/_storage/store.py rename to src/zarr/v2/_storage/store.py index 6e13b08cc7..ec1dbf0565 100644 --- a/src/zarr/_storage/store.py +++ b/src/zarr/v2/_storage/store.py @@ -1,9 +1,9 @@ from collections.abc import MutableMapping from typing import Any, List, Mapping, Optional, Sequence, Union -from zarr.meta import Metadata2 -from zarr.util import normalize_storage_path -from zarr.context import Context +from zarr.v2.meta import Metadata2 +from zarr.v2.util import normalize_storage_path +from zarr.v2.context import Context # v2 store keys @@ -83,7 +83,7 @@ def _ensure_store(store: Any): We'll do this conversion in a few places automatically """ - from zarr.storage import KVStore # avoid circular import + from zarr.v2.storage import KVStore # avoid circular import if isinstance(store, BaseStore): if not store._store_version == 2: @@ -111,7 +111,7 @@ def _ensure_store(store: Any): raise ValueError( "Starting with Zarr 2.11.0, stores must be subclasses of " "BaseStore, if your store exposes the MutableMapping interface " - f"wrap it in Zarr.storage.KVStore. Got {store}" + f"wrap it in zarr.v2.storage.KVStore. Got {store}" ) def getitems( diff --git a/src/zarr/attrs.py b/src/zarr/v2/attrs.py similarity index 98% rename from src/zarr/attrs.py rename to src/zarr/v2/attrs.py index 89cfefc22e..af23d43b9e 100644 --- a/src/zarr/attrs.py +++ b/src/zarr/v2/attrs.py @@ -2,8 +2,8 @@ import warnings from collections.abc import MutableMapping -from zarr._storage.store import Store -from zarr.util import json_dumps +from zarr.v2._storage.store import Store +from zarr.v2.util import json_dumps class Attributes(MutableMapping[str, Any]): diff --git a/src/zarr/codecs.py b/src/zarr/v2/codecs.py similarity index 100% rename from src/zarr/codecs.py rename to src/zarr/v2/codecs.py diff --git a/src/zarr/context.py b/src/zarr/v2/context.py similarity index 100% rename from src/zarr/context.py rename to src/zarr/v2/context.py diff --git a/src/zarr/convenience.py b/src/zarr/v2/convenience.py similarity index 91% rename from src/zarr/convenience.py rename to src/zarr/v2/convenience.py index e4bbade527..6355a11af9 100644 --- a/src/zarr/convenience.py +++ b/src/zarr/v2/convenience.py @@ -4,22 +4,22 @@ import os import re from collections.abc import Mapping, MutableMapping -from zarr.core import Array -from zarr.creation import array as _create_array -from zarr.creation import open_array -from zarr.errors import CopyError, PathNotFoundError -from zarr.hierarchy import Group -from zarr.hierarchy import group as _create_group -from zarr.hierarchy import open_group -from zarr.meta import json_dumps, json_loads -from zarr.storage import ( +from zarr.v2.core import Array +from zarr.v2.creation import array as _create_array +from zarr.v2.creation import open_array +from zarr.v2.errors import CopyError, PathNotFoundError +from zarr.v2.hierarchy import Group +from zarr.v2.hierarchy import group as _create_group +from zarr.v2.hierarchy import open_group +from zarr.v2.meta import json_dumps, json_loads +from zarr.v2.storage import ( contains_array, contains_group, normalize_store_arg, BaseStore, ConsolidatedMetadataStore, ) -from zarr.util import TreeViewer, buffer_size, normalize_storage_path +from zarr.v2.util import TreeViewer, buffer_size, normalize_storage_path from typing import Any, Union @@ -50,17 +50,17 @@ def open(store: StoreLike = None, mode: str = "a", *, path=None, **kwargs): path : str or None, optional The path within the store to open. **kwargs - Additional parameters are passed through to :func:`zarr.creation.open_array` or - :func:`zarr.hierarchy.open_group`. + Additional parameters are passed through to :func:`zarr.v2.creation.open_array` or + :func:`zarr.v2.hierarchy.open_group`. Returns ------- - z : :class:`zarr.core.Array` or :class:`zarr.hierarchy.Group` + z : :class:`zarr.v2.core.Array` or :class:`zarr.v2.hierarchy.Group` Array or group, depending on what exists in the given store. See Also -------- - zarr.creation.open_array, zarr.hierarchy.open_group + zarr.v2.creation.open_array, zarr.v2.hierarchy.open_group Examples -------- @@ -69,24 +69,24 @@ def open(store: StoreLike = None, mode: str = "a", *, path=None, **kwargs): >>> import zarr >>> store = 'data/example.zarr' - >>> zw = zarr.open(store, mode='w', shape=100, dtype='i4') # open new array + >>> zw = zarr.v2.open(store, mode='w', shape=100, dtype='i4') # open new array >>> zw - - >>> za = zarr.open(store, mode='a') # open existing array for reading and writing + + >>> za = zarr.v2.open(store, mode='a') # open existing array for reading and writing >>> za - - >>> zr = zarr.open(store, mode='r') # open existing array read-only + + >>> zr = zarr.v2.open(store, mode='r') # open existing array read-only >>> zr - - >>> gw = zarr.open(store, mode='w') # open new group, overwriting previous data + + >>> gw = zarr.v2.open(store, mode='w') # open new group, overwriting previous data >>> gw - - >>> ga = zarr.open(store, mode='a') # open existing group for reading and writing + + >>> ga = zarr.v2.open(store, mode='a') # open existing group for reading and writing >>> ga - - >>> gr = zarr.open(store, mode='r') # open existing group read-only + + >>> gr = zarr.v2.open(store, mode='r') # open existing group read-only >>> gr - + """ @@ -147,14 +147,14 @@ def save_array(store: StoreLike, arr, *, path=None, **kwargs): >>> import zarr >>> import numpy as np >>> arr = np.arange(10000) - >>> zarr.save_array('data/example.zarr', arr) - >>> zarr.load('data/example.zarr') + >>> zarr.v2.save_array('data/example.zarr', arr) + >>> zarr.v2.load('data/example.zarr') array([ 0, 1, 2, ..., 9997, 9998, 9999]) Save an array to a single file (uses a :class:`ZipStore`):: - >>> zarr.save_array('data/example.zip', arr) - >>> zarr.load('data/example.zip') + >>> zarr.v2.save_array('data/example.zip', arr) + >>> zarr.v2.load('data/example.zip') array([ 0, 1, 2, ..., 9997, 9998, 9999]) """ @@ -193,8 +193,8 @@ def save_group(store: StoreLike, *args, path=None, **kwargs): >>> import numpy as np >>> a1 = np.arange(10000) >>> a2 = np.arange(10000, 0, -1) - >>> zarr.save_group('data/example.zarr', a1, a2) - >>> loader = zarr.load('data/example.zarr') + >>> zarr.v2.save_group('data/example.zarr', a1, a2) + >>> loader = zarr.v2.load('data/example.zarr') >>> loader >>> loader['arr_0'] @@ -204,8 +204,8 @@ def save_group(store: StoreLike, *args, path=None, **kwargs): Save several arrays using named keyword arguments:: - >>> zarr.save_group('data/example.zarr', foo=a1, bar=a2) - >>> loader = zarr.load('data/example.zarr') + >>> zarr.v2.save_group('data/example.zarr', foo=a1, bar=a2) + >>> loader = zarr.v2.load('data/example.zarr') >>> loader >>> loader['foo'] @@ -215,8 +215,8 @@ def save_group(store: StoreLike, *args, path=None, **kwargs): Store several arrays in a single zip file (uses a :class:`ZipStore`):: - >>> zarr.save_group('data/example.zip', foo=a1, bar=a2) - >>> loader = zarr.load('data/example.zip') + >>> zarr.v2.save_group('data/example.zip', foo=a1, bar=a2) + >>> loader = zarr.v2.load('data/example.zip') >>> loader >>> loader['foo'] @@ -269,14 +269,14 @@ def save(store: StoreLike, *args, path=None, **kwargs): >>> import zarr >>> import numpy as np >>> arr = np.arange(10000) - >>> zarr.save('data/example.zarr', arr) - >>> zarr.load('data/example.zarr') + >>> zarr.v2.save('data/example.zarr', arr) + >>> zarr.v2.load('data/example.zarr') array([ 0, 1, 2, ..., 9997, 9998, 9999]) Save an array to a Zip file (uses a :class:`ZipStore`):: - >>> zarr.save('data/example.zip', arr) - >>> zarr.load('data/example.zip') + >>> zarr.v2.save('data/example.zip', arr) + >>> zarr.v2.load('data/example.zip') array([ 0, 1, 2, ..., 9997, 9998, 9999]) Save several arrays to a directory on the file system (uses a @@ -286,8 +286,8 @@ def save(store: StoreLike, *args, path=None, **kwargs): >>> import numpy as np >>> a1 = np.arange(10000) >>> a2 = np.arange(10000, 0, -1) - >>> zarr.save('data/example.zarr', a1, a2) - >>> loader = zarr.load('data/example.zarr') + >>> zarr.v2.save('data/example.zarr', a1, a2) + >>> loader = zarr.v2.load('data/example.zarr') >>> loader >>> loader['arr_0'] @@ -297,8 +297,8 @@ def save(store: StoreLike, *args, path=None, **kwargs): Save several arrays using named keyword arguments:: - >>> zarr.save('data/example.zarr', foo=a1, bar=a2) - >>> loader = zarr.load('data/example.zarr') + >>> zarr.v2.save('data/example.zarr', foo=a1, bar=a2) + >>> loader = zarr.v2.load('data/example.zarr') >>> loader >>> loader['foo'] @@ -308,8 +308,8 @@ def save(store: StoreLike, *args, path=None, **kwargs): Store several arrays in a single zip file (uses a :class:`ZipStore`):: - >>> zarr.save('data/example.zip', foo=a1, bar=a2) - >>> loader = zarr.load('data/example.zip') + >>> zarr.v2.save('data/example.zip', foo=a1, bar=a2) + >>> loader = zarr.v2.load('data/example.zip') >>> loader >>> loader['foo'] @@ -413,7 +413,7 @@ def tree(grp, expand=False, level=None): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> g4 = g3.create_group('baz') @@ -428,9 +428,9 @@ def tree(grp, expand=False, level=None): └── foo >>> import h5py >>> h5f = h5py.File('data/example.h5', mode='w') - >>> zarr.copy_all(g1, h5f) + >>> zarr.v2.copy_all(g1, h5f) (5, 0, 800) - >>> zarr.tree(h5f) + >>> zarr.v2.tree(h5f) / ├── bar │ ├── baz @@ -440,7 +440,7 @@ def tree(grp, expand=False, level=None): See Also -------- - zarr.hierarchy.Group.tree + zarr.v2.hierarchy.Group.tree Notes ----- @@ -564,8 +564,8 @@ def copy_store( -------- >>> import zarr - >>> store1 = zarr.DirectoryStore('data/example.zarr') - >>> root = zarr.group(store1, overwrite=True) + >>> store1 = zarr.v2.DirectoryStore('data/example.zarr') + >>> root = zarr.v2.group(store1, overwrite=True) >>> foo = root.create_group('foo') >>> bar = foo.create_group('bar') >>> baz = bar.create_dataset('baz', shape=100, chunks=50, dtype='i8') @@ -577,8 +577,8 @@ def copy_store( └── bar └── baz (100,) int64 >>> from sys import stdout - >>> store2 = zarr.ZipStore('data/example.zip', mode='w') - >>> zarr.copy_store(store1, store2, log=stdout) + >>> store2 = zarr.v2.ZipStore('data/example.zip', mode='w') + >>> zarr.v2.copy_store(store1, store2, log=stdout) copy .zgroup copy foo/.zgroup copy foo/bar/.zgroup @@ -587,7 +587,7 @@ def copy_store( copy foo/bar/baz/1 all done: 6 copied, 0 skipped, 566 bytes copied (6, 0, 566) - >>> new_root = zarr.group(store2) + >>> new_root = zarr.v2.group(store2) >>> new_root.tree() / └── foo @@ -772,15 +772,15 @@ def copy( >>> foo = source.create_group('foo') >>> baz = foo.create_dataset('bar/baz', data=np.arange(100), chunks=(50,)) >>> spam = source.create_dataset('spam', data=np.arange(100, 200), chunks=(30,)) - >>> zarr.tree(source) + >>> zarr.v2.tree(source) / ├── foo │ └── bar │ └── baz (100,) int64 └── spam (100,) int64 - >>> dest = zarr.group() + >>> dest = zarr.v2.group() >>> from sys import stdout - >>> zarr.copy(source['foo'], dest, log=stdout) + >>> zarr.v2.copy(source['foo'], dest, log=stdout) copy /foo copy /foo/bar copy /foo/bar/baz (100,) int64 @@ -797,29 +797,29 @@ def copy( the destination. Here are some examples of these options, also using ``dry_run=True`` to find out what would happen without actually copying anything:: - >>> source = zarr.group() - >>> dest = zarr.group() + >>> source = zarr.v2.group() + >>> dest = zarr.v2.group() >>> baz = source.create_dataset('foo/bar/baz', data=np.arange(100)) >>> spam = source.create_dataset('foo/spam', data=np.arange(1000)) >>> existing_spam = dest.create_dataset('foo/spam', data=np.arange(1000)) >>> from sys import stdout >>> try: - ... zarr.copy(source['foo'], dest, log=stdout, dry_run=True) - ... except zarr.CopyError as e: + ... zarr.v2.copy(source['foo'], dest, log=stdout, dry_run=True) + ... except zarr.v2.CopyError as e: ... print(e) ... copy /foo copy /foo/bar copy /foo/bar/baz (100,) int64 an object 'spam' already exists in destination '/foo' - >>> zarr.copy(source['foo'], dest, log=stdout, if_exists='replace', dry_run=True) + >>> zarr.v2.copy(source['foo'], dest, log=stdout, if_exists='replace', dry_run=True) copy /foo copy /foo/bar copy /foo/bar/baz (100,) int64 copy /foo/spam (1000,) int64 dry run: 4 copied, 0 skipped (4, 0, 0) - >>> zarr.copy(source['foo'], dest, log=stdout, if_exists='skip', dry_run=True) + >>> zarr.v2.copy(source['foo'], dest, log=stdout, if_exists='skip', dry_run=True) copy /foo copy /foo/bar copy /foo/bar/baz (100,) int64 @@ -1104,15 +1104,15 @@ def copy_all( >>> foo = source.create_group('foo') >>> baz = foo.create_dataset('bar/baz', data=np.arange(100), chunks=(50,)) >>> spam = source.create_dataset('spam', data=np.arange(100, 200), chunks=(30,)) - >>> zarr.tree(source) + >>> zarr.v2.tree(source) / ├── foo │ └── bar │ └── baz (100,) int64 └── spam (100,) int64 - >>> dest = zarr.group() + >>> dest = zarr.v2.group() >>> import sys - >>> zarr.copy_all(source, dest, log=sys.stdout) + >>> zarr.v2.copy_all(source, dest, log=sys.stdout) copy /foo copy /foo/bar copy /foo/bar/baz (100,) int64 @@ -1197,7 +1197,7 @@ def consolidate_metadata(store: BaseStore, metadata_key=".zmetadata", *, path="" Returns ------- - g : :class:`zarr.hierarchy.Group` + g : :class:`zarr.v2.hierarchy.Group` Group instance, opened with the new consolidated metadata. See Also @@ -1252,12 +1252,12 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", ** changes to metadata including creation of new arrays or group are not allowed. **kwargs - Additional parameters are passed through to :func:`zarr.creation.open_array` or - :func:`zarr.hierarchy.open_group`. + Additional parameters are passed through to :func:`zarr.v2.creation.open_array` or + :func:`zarr.v2.hierarchy.open_group`. Returns ------- - g : :class:`zarr.hierarchy.Group` + g : :class:`zarr.v2.hierarchy.Group` Group instance, opened with the consolidated metadata. See Also diff --git a/src/zarr/core.py b/src/zarr/v2/core.py similarity index 97% rename from src/zarr/core.py rename to src/zarr/v2/core.py index 06dcb32063..273d2857e8 100644 --- a/src/zarr/core.py +++ b/src/zarr/v2/core.py @@ -10,12 +10,12 @@ import numpy as np from numcodecs.compat import ensure_bytes -from zarr._storage.store import _prefix_to_attrs_key -from zarr.attrs import Attributes -from zarr.codecs import AsType, get_codec -from zarr.context import Context -from zarr.errors import ArrayNotFoundError, ReadOnlyError, ArrayIndexError -from zarr.indexing import ( +from zarr.v2._storage.store import _prefix_to_attrs_key +from zarr.v2.attrs import Attributes +from zarr.v2.codecs import AsType, get_codec +from zarr.v2.context import Context +from zarr.v2.errors import ArrayNotFoundError, ReadOnlyError, ArrayIndexError +from zarr.v2.indexing import ( BasicIndexer, CoordinateIndexer, MaskIndexer, @@ -35,14 +35,14 @@ is_scalar, pop_fields, ) -from zarr.storage import ( +from zarr.v2.storage import ( _prefix_to_array_key, KVStore, getsize, listdir, normalize_store_arg, ) -from zarr.util import ( +from zarr.v2.util import ( ConstantMap, UncompressedPartialReadBufferV3, all_equal, @@ -535,7 +535,7 @@ def islice(self, start=None, end=None): >>> import zarr >>> import numpy as np - >>> z = zarr.array(np.arange(100)) + >>> z = zarr.v2.array(np.arange(100)) Iterate over part of the array: >>> for value in z.islice(25, 30): value; @@ -604,7 +604,7 @@ def __getitem__(self, selection): >>> import zarr >>> import numpy as np - >>> z = zarr.array(np.arange(100)) + >>> z = zarr.v2.array(np.arange(100)) Retrieve a single item:: @@ -631,7 +631,7 @@ def __getitem__(self, selection): Setup a 2-dimensional array:: - >>> z = zarr.array(np.arange(100).reshape(10, 10)) + >>> z = zarr.v2.array(np.arange(100).reshape(10, 10)) Retrieve an item:: @@ -688,7 +688,7 @@ def __getitem__(self, selection): ... (b'bbb', 2, 8.4), ... (b'ccc', 3, 12.6)], ... dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) - >>> z = zarr.array(a) + >>> z = zarr.v2.array(a) >>> z['foo'] array([b'aaa', b'bbb', b'ccc'], dtype='|S3') @@ -755,7 +755,7 @@ def get_basic_selection(self, selection=Ellipsis, out=None, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.array(np.arange(100)) + >>> z = zarr.v2.array(np.arange(100)) Retrieve a single item:: @@ -777,7 +777,7 @@ def get_basic_selection(self, selection=Ellipsis, out=None, fields=None): Setup a 2-dimensional array:: - >>> z = zarr.array(np.arange(100).reshape(10, 10)) + >>> z = zarr.v2.array(np.arange(100).reshape(10, 10)) Retrieve an item:: @@ -820,7 +820,7 @@ def get_basic_selection(self, selection=Ellipsis, out=None, fields=None): ... (b'bbb', 2, 8.4), ... (b'ccc', 3, 12.6)], ... dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) - >>> z = zarr.array(a) + >>> z = zarr.v2.array(a) >>> z.get_basic_selection(slice(2), fields='foo') array([b'aaa', b'bbb'], dtype='|S3') @@ -926,7 +926,7 @@ def get_orthogonal_selection(self, selection, out=None, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.array(np.arange(100).reshape(10, 10)) + >>> z = zarr.v2.array(np.arange(100).reshape(10, 10)) Retrieve rows and columns via any combination of int, slice, integer array and/or Boolean array:: @@ -1034,7 +1034,7 @@ def get_coordinate_selection(self, selection, out=None, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.array(np.arange(100).reshape(10, 10)) + >>> z = zarr.v2.array(np.arange(100).reshape(10, 10)) Retrieve items by specifying their coordinates:: @@ -1115,7 +1115,7 @@ def get_block_selection(self, selection, out=None, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.array(np.arange(100).reshape(10, 10), chunks=(3, 3)) + >>> z = zarr.v2.array(np.arange(100).reshape(10, 10), chunks=(3, 3)) Retrieve items by specifying their block coordinates:: @@ -1201,7 +1201,7 @@ def get_mask_selection(self, selection, out=None, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.array(np.arange(100).reshape(10, 10)) + >>> z = zarr.v2.array(np.arange(100).reshape(10, 10)) Retrieve items by specifying a mask:: @@ -1299,7 +1299,7 @@ def __setitem__(self, selection, value): Setup a 1-dimensional array:: >>> import zarr - >>> z = zarr.zeros(100, dtype=int) + >>> z = zarr.v2.zeros(100, dtype=int) Set all array elements to the same scalar value:: @@ -1316,7 +1316,7 @@ def __setitem__(self, selection, value): Setup a 2-dimensional array:: - >>> z = zarr.zeros((5, 5), dtype=int) + >>> z = zarr.v2.zeros((5, 5), dtype=int) Set all array elements to the same scalar value:: @@ -1339,7 +1339,7 @@ def __setitem__(self, selection, value): ... (b'bbb', 2, 8.4), ... (b'ccc', 3, 12.6)], ... dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) - >>> z = zarr.array(a) + >>> z = zarr.v2.array(a) >>> z['foo'] = b'zzz' >>> z[...] array([(b'zzz', 1, 4.2), (b'zzz', 2, 8.4), (b'zzz', 3, 12.6)], @@ -1401,7 +1401,7 @@ def set_basic_selection(self, selection, value, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.zeros(100, dtype=int) + >>> z = zarr.v2.zeros(100, dtype=int) Set all array elements to the same scalar value:: @@ -1418,7 +1418,7 @@ def set_basic_selection(self, selection, value, fields=None): Setup a 2-dimensional array:: - >>> z = zarr.zeros((5, 5), dtype=int) + >>> z = zarr.v2.zeros((5, 5), dtype=int) Set all array elements to the same scalar value:: @@ -1442,7 +1442,7 @@ def set_basic_selection(self, selection, value, fields=None): ... (b'bbb', 2, 8.4), ... (b'ccc', 3, 12.6)], ... dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')]) - >>> z = zarr.array(a) + >>> z = zarr.v2.array(a) >>> z.set_basic_selection(slice(0, 2), b'zzz', fields='foo') >>> z[:] array([(b'zzz', 1, 4.2), (b'zzz', 2, 8.4), (b'ccc', 3, 12.6)], @@ -1497,7 +1497,7 @@ def set_orthogonal_selection(self, selection, value, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.zeros((5, 5), dtype=int) + >>> z = zarr.v2.zeros((5, 5), dtype=int) Set data for a selection of rows:: @@ -1588,7 +1588,7 @@ def set_coordinate_selection(self, selection, value, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.zeros((5, 5), dtype=int) + >>> z = zarr.v2.zeros((5, 5), dtype=int) Set data for a selection of items:: @@ -1671,7 +1671,7 @@ def set_block_selection(self, selection, value, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.zeros((6, 6), dtype=int, chunks=2) + >>> z = zarr.v2.zeros((6, 6), dtype=int, chunks=2) Set data for a selection of items:: @@ -1756,7 +1756,7 @@ def set_mask_selection(self, selection, value, fields=None): >>> import zarr >>> import numpy as np - >>> z = zarr.zeros((5, 5), dtype=int) + >>> z = zarr.v2.zeros((5, 5), dtype=int) Set data for a selection of items:: @@ -2323,16 +2323,16 @@ def info(self): Examples -------- >>> import zarr - >>> z = zarr.zeros(1000000, chunks=100000, dtype='i4') + >>> z = zarr.v2.zeros(1000000, chunks=100000, dtype='i4') >>> z.info - Type : zarr.core.Array + Type : zarr.v2.core.Array Data type : int32 Shape : (1000000,) Chunk shape : (100000,) Order : C Read-only : False Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0) - Store type : zarr.storage.KVStore + Store type : zarr.v2.storage.KVStore No. bytes : 4000000 (3.8M) No. bytes stored : 320 Storage ratio : 12500.0 @@ -2402,13 +2402,13 @@ def digest(self, hashname="sha1"): -------- >>> import binascii >>> import zarr - >>> z = zarr.empty(shape=(10000, 10000), chunks=(1000, 1000)) + >>> z = zarr.v2.empty(shape=(10000, 10000), chunks=(1000, 1000)) >>> binascii.hexlify(z.digest()) b'041f90bc7a571452af4f850a8ca2c6cddfa8a1ac' - >>> z = zarr.zeros(shape=(10000, 10000), chunks=(1000, 1000)) + >>> z = zarr.v2.zeros(shape=(10000, 10000), chunks=(1000, 1000)) >>> binascii.hexlify(z.digest()) b'7162d416d26a68063b66ed1f30e0a866e4abed60' - >>> z = zarr.zeros(shape=(10000, 10000), dtype="u1", chunks=(1000, 1000)) + >>> z = zarr.v2.zeros(shape=(10000, 10000), dtype="u1", chunks=(1000, 1000)) >>> binascii.hexlify(z.digest()) b'cb387af37410ae5a3222e893cf3373e4e4f22816' """ @@ -2434,13 +2434,13 @@ def hexdigest(self, hashname="sha1"): Examples -------- >>> import zarr - >>> z = zarr.empty(shape=(10000, 10000), chunks=(1000, 1000)) + >>> z = zarr.v2.empty(shape=(10000, 10000), chunks=(1000, 1000)) >>> z.hexdigest() '041f90bc7a571452af4f850a8ca2c6cddfa8a1ac' - >>> z = zarr.zeros(shape=(10000, 10000), chunks=(1000, 1000)) + >>> z = zarr.v2.zeros(shape=(10000, 10000), chunks=(1000, 1000)) >>> z.hexdigest() '7162d416d26a68063b66ed1f30e0a866e4abed60' - >>> z = zarr.zeros(shape=(10000, 10000), dtype="u1", chunks=(1000, 1000)) + >>> z = zarr.v2.zeros(shape=(10000, 10000), dtype="u1", chunks=(1000, 1000)) >>> z.hexdigest() 'cb387af37410ae5a3222e893cf3373e4e4f22816' """ @@ -2500,7 +2500,7 @@ def resize(self, *args): Examples -------- >>> import zarr - >>> z = zarr.zeros(shape=(10000, 10000), chunks=(1000, 1000)) + >>> z = zarr.v2.zeros(shape=(10000, 10000), chunks=(1000, 1000)) >>> z.shape (10000, 10000) >>> z.resize(20000, 10000) @@ -2590,7 +2590,7 @@ def append(self, data, axis=0): >>> import numpy as np >>> import zarr >>> a = np.arange(10000000, dtype='i4').reshape(10000, 1000) - >>> z = zarr.array(a, chunks=(1000, 100)) + >>> z = zarr.v2.array(a, chunks=(1000, 100)) >>> z.shape (10000, 1000) >>> z.append(a) @@ -2686,10 +2686,10 @@ def view( >>> np.random.seed(42) >>> labels = ['female', 'male'] >>> data = np.random.choice(labels, size=10000) - >>> filters = [zarr.Categorize(labels=labels, + >>> filters = [zarr.v2.Categorize(labels=labels, ... dtype=data.dtype, ... astype='u1')] - >>> a = zarr.array(data, chunks=1000, filters=filters) + >>> a = zarr.v2.array(data, chunks=1000, filters=filters) >>> a[:] array(['female', 'male', 'female', ..., 'male', 'male', 'female'], dtype='>> data = np.random.randint(0, 2, size=10000, dtype='u1') - >>> a = zarr.array(data, chunks=1000) + >>> a = zarr.v2.array(data, chunks=1000) >>> a[:] array([0, 0, 1, ..., 1, 0, 0], dtype=uint8) >>> v = a.view(dtype=bool) @@ -2727,7 +2727,7 @@ def view( data is interpreted correctly: >>> data = np.arange(10000, dtype='u2') - >>> a = zarr.array(data, chunks=1000) + >>> a = zarr.v2.array(data, chunks=1000) >>> a[:10] array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=uint16) >>> v = a.view(dtype='u1', shape=20000, chunks=2000) @@ -2738,7 +2738,7 @@ def view( Change fill value for uninitialized chunks: - >>> a = zarr.full(10000, chunks=1000, fill_value=-1, dtype='i1') + >>> a = zarr.v2.full(10000, chunks=1000, fill_value=-1, dtype='i1') >>> a[:] array([-1, -1, -1, ..., -1, -1, -1], dtype=int8) >>> v = a.view(fill_value=42) @@ -2747,7 +2747,7 @@ def view( Note that resizing or appending to views is not permitted: - >>> a = zarr.empty(10000) + >>> a = zarr.v2.empty(10000) >>> v = a.view() >>> try: ... v.resize(20000) @@ -2820,7 +2820,7 @@ def astype(self, dtype): >>> import zarr >>> import numpy as np >>> data = np.arange(100, dtype=np.uint8) - >>> a = zarr.array(data, chunks=10) + >>> a = zarr.v2.array(data, chunks=10) >>> a[:] array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, diff --git a/src/zarr/creation.py b/src/zarr/v2/creation.py similarity index 92% rename from src/zarr/creation.py rename to src/zarr/v2/creation.py index c93178c0e7..d0ba00603d 100644 --- a/src/zarr/creation.py +++ b/src/zarr/v2/creation.py @@ -4,13 +4,13 @@ import numpy as np from numcodecs.registry import codec_registry -from zarr.core import Array -from zarr.errors import ( +from zarr.v2.core import Array +from zarr.v2.errors import ( ArrayNotFoundError, ContainsArrayError, ContainsGroupError, ) -from zarr.storage import ( +from zarr.v2.storage import ( contains_array, contains_group, default_compressor, @@ -18,7 +18,7 @@ normalize_storage_path, normalize_store_arg, ) -from zarr.util import normalize_dimension_separator +from zarr.v2.util import normalize_dimension_separator def create( @@ -114,7 +114,7 @@ def create( Returns ------- - z : zarr.core.Array + z : zarr.v2.core.Array Examples -------- @@ -122,37 +122,37 @@ def create( Create an array with default settings:: >>> import zarr - >>> z = zarr.create((10000, 10000), chunks=(1000, 1000)) + >>> z = zarr.v2.create((10000, 10000), chunks=(1000, 1000)) >>> z - + Create an array with different some different configuration options:: >>> from numcodecs import Blosc >>> compressor = Blosc(cname='zstd', clevel=1, shuffle=Blosc.BITSHUFFLE) - >>> z = zarr.create((10000, 10000), chunks=(1000, 1000), dtype='i1', order='F', + >>> z = zarr.v2.create((10000, 10000), chunks=(1000, 1000), dtype='i1', order='F', ... compressor=compressor) >>> z - + To create an array with object dtype requires a filter that can handle Python object encoding, e.g., `MsgPack` or `Pickle` from `numcodecs`:: >>> from numcodecs import MsgPack - >>> z = zarr.create((10000, 10000), chunks=(1000, 1000), dtype=object, + >>> z = zarr.v2.create((10000, 10000), chunks=(1000, 1000), dtype=object, ... object_codec=MsgPack()) >>> z - + Example with some filters, and also storing chunks separately from metadata:: >>> from numcodecs import Quantize, Adler32 >>> store, chunk_store = dict(), dict() - >>> z = zarr.create((10000, 10000), chunks=(1000, 1000), dtype='f8', + >>> z = zarr.v2.create((10000, 10000), chunks=(1000, 1000), dtype='f8', ... filters=[Quantize(digits=2, dtype='f8'), Adler32()], ... store=store, chunk_store=chunk_store) >>> z - + """ @@ -274,7 +274,7 @@ def _kwargs_compat(compressor, fill_value, kwargs): def empty(shape, **kwargs): """Create an empty array. - For parameter definitions see :func:`zarr.creation.create`. + For parameter definitions see :func:`zarr.v2.creation.create`. Notes ----- @@ -290,14 +290,14 @@ def zeros(shape, **kwargs): """Create an array, with zero being used as the default value for uninitialized portions of the array. - For parameter definitions see :func:`zarr.creation.create`. + For parameter definitions see :func:`zarr.v2.creation.create`. Examples -------- >>> import zarr - >>> z = zarr.zeros((10000, 10000), chunks=(1000, 1000)) + >>> z = zarr.v2.zeros((10000, 10000), chunks=(1000, 1000)) >>> z - + >>> z[:2, :2] array([[0., 0.], [0., 0.]]) @@ -311,14 +311,14 @@ def ones(shape, **kwargs): """Create an array, with one being used as the default value for uninitialized portions of the array. - For parameter definitions see :func:`zarr.creation.create`. + For parameter definitions see :func:`zarr.v2.creation.create`. Examples -------- >>> import zarr - >>> z = zarr.ones((10000, 10000), chunks=(1000, 1000)) + >>> z = zarr.v2.ones((10000, 10000), chunks=(1000, 1000)) >>> z - + >>> z[:2, :2] array([[1., 1.], [1., 1.]]) @@ -332,14 +332,14 @@ def full(shape, fill_value, **kwargs): """Create an array, with `fill_value` being used as the default value for uninitialized portions of the array. - For parameter definitions see :func:`zarr.creation.create`. + For parameter definitions see :func:`zarr.v2.creation.create`. Examples -------- >>> import zarr - >>> z = zarr.full((10000, 10000), chunks=(1000, 1000), fill_value=42) + >>> z = zarr.v2.full((10000, 10000), chunks=(1000, 1000), fill_value=42) >>> z - + >>> z[:2, :2] array([[42., 42.], [42., 42.]]) @@ -370,16 +370,16 @@ def array(data, **kwargs): """Create an array filled with `data`. The `data` argument should be a NumPy array or array-like object. For - other parameter definitions see :func:`zarr.creation.create`. + other parameter definitions see :func:`zarr.v2.creation.create`. Examples -------- >>> import numpy as np >>> import zarr >>> a = np.arange(100000000).reshape(10000, 10000) - >>> z = zarr.array(a, chunks=(1000, 1000)) + >>> z = zarr.v2.array(a, chunks=(1000, 1000)) >>> z - + """ @@ -517,20 +517,20 @@ def open_array( Returns ------- - z : zarr.core.Array + z : zarr.v2.core.Array Examples -------- >>> import numpy as np >>> import zarr - >>> z1 = zarr.open_array('data/example.zarr', mode='w', shape=(10000, 10000), + >>> z1 = zarr.v2.open_array('data/example.zarr', mode='w', shape=(10000, 10000), ... chunks=(1000, 1000), fill_value=0) >>> z1[:] = np.arange(100000000).reshape(10000, 10000) >>> z1 - - >>> z2 = zarr.open_array('data/example.zarr', mode='r') + + >>> z2 = zarr.v2.open_array('data/example.zarr', mode='r') >>> z2 - + >>> np.all(z1[:] == z2[:]) True diff --git a/src/zarr/errors.py b/src/zarr/v2/errors.py similarity index 100% rename from src/zarr/errors.py rename to src/zarr/v2/errors.py diff --git a/src/zarr/hierarchy.py b/src/zarr/v2/hierarchy.py similarity index 93% rename from src/zarr/hierarchy.py rename to src/zarr/v2/hierarchy.py index 9044c1681e..acd65750e3 100644 --- a/src/zarr/hierarchy.py +++ b/src/zarr/v2/hierarchy.py @@ -4,9 +4,9 @@ import numpy as np -from zarr.attrs import Attributes -from zarr.core import Array -from zarr.creation import ( +from zarr.v2.attrs import Attributes +from zarr.v2.core import Array +from zarr.v2.creation import ( array, create, empty, @@ -18,13 +18,13 @@ zeros, zeros_like, ) -from zarr.errors import ( +from zarr.v2.errors import ( ContainsArrayError, ContainsGroupError, GroupNotFoundError, ReadOnlyError, ) -from zarr.storage import ( +from zarr.v2.storage import ( _prefix_to_group_key, BaseStore, MemoryStore, @@ -39,7 +39,7 @@ rmdir, ) -from zarr.util import ( +from zarr.v2.util import ( InfoReporter, TreeViewer, is_valid_python_name, @@ -259,7 +259,7 @@ def __iter__(self): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) @@ -363,7 +363,7 @@ def __contains__(self, item): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> d1 = g1.create_dataset('bar', shape=100, chunks=10) >>> 'foo' in g1 @@ -390,14 +390,14 @@ def __getitem__(self, item): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> d1 = g1.create_dataset('foo/bar/baz', shape=100, chunks=10) >>> g1['foo'] - + >>> g1['foo/bar'] - + >>> g1['foo/bar/baz'] - + """ path = self._item_path(item) @@ -462,7 +462,7 @@ def group_keys(self): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) @@ -483,15 +483,15 @@ def groups(self): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) >>> d2 = g1.create_dataset('quux', shape=200, chunks=20) >>> for n, v in g1.groups(): ... print(n, type(v)) - bar - foo + bar + foo """ @@ -523,7 +523,7 @@ def array_keys(self, recurse=False): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) @@ -547,15 +547,15 @@ def arrays(self, recurse=False): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) >>> d2 = g1.create_dataset('quux', shape=200, chunks=20) >>> for n, v in g1.arrays(): ... print(n, type(v)) - baz - quux + baz + quux """ return self._array_iter(keys_only=False, method="arrays", recurse=recurse) @@ -580,7 +580,7 @@ def visitvalues(self, func): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> g4 = g3.create_group('baz') @@ -588,13 +588,13 @@ def visitvalues(self, func): >>> def print_visitor(obj): ... print(obj) >>> g1.visitvalues(print_visitor) - - - - + + + + >>> g3.visitvalues(print_visitor) - - + + """ @@ -619,7 +619,7 @@ def visit(self, func): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> g4 = g3.create_group('baz') @@ -649,11 +649,11 @@ def visit(self, func): It is created as follows: - >>> root = zarr.group() + >>> root = zarr.v2.group() >>> foo = root.create_group("foo") >>> bar = root.create_group("bar") >>> root.create_group("aaa").create_group("bbb").create_group("ccc").create_group("aaa") - + For ``find``, the first path that matches a given pattern (for example "aaa") is returned. Note that a non-None value is returned in the visit @@ -725,7 +725,7 @@ def visititems(self, func): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> g4 = g3.create_group('baz') @@ -733,13 +733,13 @@ def visititems(self, func): >>> def print_visitor(name, obj): ... print((name, obj)) >>> g1.visititems(print_visitor) - ('bar', ) - ('bar/baz', ) - ('bar/quux', ) - ('foo', ) + ('bar', ) + ('bar/baz', ) + ('bar/quux', ) + ('foo', ) >>> g3.visititems(print_visitor) - ('baz', ) - ('quux', ) + ('baz', ) + ('quux', ) """ @@ -759,7 +759,7 @@ def tree(self, expand=False, level=None): Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> g4 = g3.create_group('baz') @@ -821,12 +821,12 @@ def create_group(self, name, overwrite=False): Returns ------- - g : zarr.hierarchy.Group + g : zarr.v2.hierarchy.Group Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> g4 = g1.create_group('baz/quux') @@ -866,12 +866,12 @@ def require_group(self, name, overwrite=False): Returns ------- - g : zarr.hierarchy.Group + g : zarr.v2.hierarchy.Group Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> g2 = g1.require_group('foo') >>> g3 = g1.require_group('foo') >>> g2 == g3 @@ -929,7 +929,7 @@ def create_dataset(self, name, **kwargs): Default value to use for uninitialized portions of the array. order : {'C', 'F'}, optional Memory layout to be used within each chunk. - synchronizer : zarr.sync.ArraySynchronizer, optional + synchronizer : zarr.v2.sync.ArraySynchronizer, optional Array synchronizer. filters : sequence of Codecs, optional Sequence of filters to use to encode chunk data prior to @@ -946,20 +946,20 @@ def create_dataset(self, name, **kwargs): Returns ------- - a : zarr.core.Array + a : zarr.v2.core.Array Examples -------- >>> import zarr - >>> g1 = zarr.group() + >>> g1 = zarr.v2.group() >>> d1 = g1.create_dataset('foo', shape=(10000, 10000), ... chunks=(1000, 1000)) >>> d1 - + >>> d2 = g1.create_dataset('bar/baz/qux', shape=(100, 100, 100), ... chunks=(100, 10, 10)) >>> d2 - + """ assert "mode" not in kwargs @@ -989,7 +989,7 @@ def require_dataset(self, name, shape, dtype=None, exact=False, **kwargs): Arrays are known as "datasets" in HDF5 terminology. For compatibility with h5py, Zarr groups also implement the create_dataset() method. - Other `kwargs` are as per :func:`zarr.hierarchy.Group.create_dataset`. + Other `kwargs` are as per :func:`zarr.v2.hierarchy.Group.create_dataset`. Parameters ---------- @@ -1049,7 +1049,7 @@ def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, **kwargs def create(self, name, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.create`.""" + :func:`zarr.v2.creation.create`.""" return self._write_op(self._create_nosync, name, **kwargs) def _create_nosync(self, name, **kwargs): @@ -1060,7 +1060,7 @@ def _create_nosync(self, name, **kwargs): def empty(self, name, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.empty`.""" + :func:`zarr.v2.creation.empty`.""" return self._write_op(self._empty_nosync, name, **kwargs) def _empty_nosync(self, name, **kwargs): @@ -1071,7 +1071,7 @@ def _empty_nosync(self, name, **kwargs): def zeros(self, name, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.zeros`.""" + :func:`zarr.v2.creation.zeros`.""" return self._write_op(self._zeros_nosync, name, **kwargs) def _zeros_nosync(self, name, **kwargs): @@ -1082,7 +1082,7 @@ def _zeros_nosync(self, name, **kwargs): def ones(self, name, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.ones`.""" + :func:`zarr.v2.creation.ones`.""" return self._write_op(self._ones_nosync, name, **kwargs) def _ones_nosync(self, name, **kwargs): @@ -1093,7 +1093,7 @@ def _ones_nosync(self, name, **kwargs): def full(self, name, fill_value, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.full`.""" + :func:`zarr.v2.creation.full`.""" return self._write_op(self._full_nosync, name, fill_value, **kwargs) def _full_nosync(self, name, fill_value, **kwargs): @@ -1110,7 +1110,7 @@ def _full_nosync(self, name, fill_value, **kwargs): def array(self, name, data, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.array`.""" + :func:`zarr.v2.creation.array`.""" return self._write_op(self._array_nosync, name, data, **kwargs) def _array_nosync(self, name, data, **kwargs): @@ -1121,7 +1121,7 @@ def _array_nosync(self, name, data, **kwargs): def empty_like(self, name, data, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.empty_like`.""" + :func:`zarr.v2.creation.empty_like`.""" return self._write_op(self._empty_like_nosync, name, data, **kwargs) def _empty_like_nosync(self, name, data, **kwargs): @@ -1134,7 +1134,7 @@ def _empty_like_nosync(self, name, data, **kwargs): def zeros_like(self, name, data, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.zeros_like`.""" + :func:`zarr.v2.creation.zeros_like`.""" return self._write_op(self._zeros_like_nosync, name, data, **kwargs) def _zeros_like_nosync(self, name, data, **kwargs): @@ -1147,7 +1147,7 @@ def _zeros_like_nosync(self, name, data, **kwargs): def ones_like(self, name, data, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.ones_like`.""" + :func:`zarr.v2.creation.ones_like`.""" return self._write_op(self._ones_like_nosync, name, data, **kwargs) def _ones_like_nosync(self, name, data, **kwargs): @@ -1160,7 +1160,7 @@ def _ones_like_nosync(self, name, data, **kwargs): def full_like(self, name, data, **kwargs): """Create an array. Keyword arguments as per - :func:`zarr.creation.full_like`.""" + :func:`zarr.v2.creation.full_like`.""" return self._write_op(self._full_like_nosync, name, data, **kwargs) def _full_like_nosync(self, name, data, **kwargs): @@ -1252,23 +1252,23 @@ def group( Returns ------- - g : zarr.hierarchy.Group + g : zarr.v2.hierarchy.Group Examples -------- Create a group in memory:: >>> import zarr - >>> g = zarr.group() + >>> g = zarr.v2.group() >>> g - + Create a group with a different store:: - >>> store = zarr.DirectoryStore('data/example.zarr') - >>> g = zarr.group(store=store, overwrite=True) + >>> store = zarr.v2.DirectoryStore('data/example.zarr') + >>> g = zarr.v2.group(store=store, overwrite=True) >>> g - + """ @@ -1336,19 +1336,19 @@ def open_group( Returns ------- - g : zarr.hierarchy.Group + g : zarr.v2.hierarchy.Group Examples -------- >>> import zarr - >>> root = zarr.open_group('data/example.zarr', mode='w') + >>> root = zarr.v2.open_group('data/example.zarr', mode='w') >>> foo = root.create_group('foo') >>> bar = root.create_group('bar') >>> root - - >>> root2 = zarr.open_group('data/example.zarr', mode='a') + + >>> root2 = zarr.v2.open_group('data/example.zarr', mode='a') >>> root2 - + >>> root == root2 True diff --git a/src/zarr/v2/indexing.py b/src/zarr/v2/indexing.py new file mode 100644 index 0000000000..1c11409d05 --- /dev/null +++ b/src/zarr/v2/indexing.py @@ -0,0 +1,1080 @@ +import collections +import itertools +import math +import numbers + +import numpy as np + + +from zarr.v2.errors import ( + ArrayIndexError, + NegativeStepError, + err_too_many_indices, + VindexInvalidSelectionError, + BoundsCheckError, +) + + +def is_integer(x): + """True if x is an integer (both pure Python or NumPy). + + Note that Python's bool is considered an integer too. + """ + return isinstance(x, numbers.Integral) + + +def is_integer_list(x): + """True if x is a list of integers. + + This function assumes ie *does not check* that all elements of the list + have the same type. Mixed type lists will result in other errors that will + bubble up anyway. + """ + return isinstance(x, list) and len(x) > 0 and is_integer(x[0]) + + +def is_integer_array(x, ndim=None): + t = not np.isscalar(x) and hasattr(x, "shape") and hasattr(x, "dtype") and x.dtype.kind in "ui" + if ndim is not None: + t = t and len(x.shape) == ndim + return t + + +def is_bool_array(x, ndim=None): + t = hasattr(x, "shape") and hasattr(x, "dtype") and x.dtype == bool + if ndim is not None: + t = t and len(x.shape) == ndim + return t + + +def is_scalar(value, dtype): + if np.isscalar(value): + return True + if isinstance(value, tuple) and dtype.names and len(value) == len(dtype.names): + return True + return False + + +def is_pure_fancy_indexing(selection, ndim): + """Check whether a selection contains only scalars or integer array-likes. + + Parameters + ---------- + selection : tuple, slice, or scalar + A valid selection value for indexing into arrays. + + Returns + ------- + is_pure : bool + True if the selection is a pure fancy indexing expression (ie not mixed + with boolean or slices). + """ + if ndim == 1: + if is_integer_list(selection) or is_integer_array(selection): + return True + # if not, we go through the normal path below, because a 1-tuple + # of integers is also allowed. + no_slicing = ( + isinstance(selection, tuple) + and len(selection) == ndim + and not (any(isinstance(elem, slice) or elem is Ellipsis for elem in selection)) + ) + return ( + no_slicing + and all( + is_integer(elem) or is_integer_list(elem) or is_integer_array(elem) + for elem in selection + ) + and any(is_integer_list(elem) or is_integer_array(elem) for elem in selection) + ) + + +def is_pure_orthogonal_indexing(selection, ndim): + if not ndim: + return False + + # Case 1: Selection is a single iterable of integers + if is_integer_list(selection) or is_integer_array(selection, ndim=1): + return True + + # Case two: selection contains either zero or one integer iterables. + # All other selection elements are slices or integers + return ( + isinstance(selection, tuple) + and len(selection) == ndim + and sum(is_integer_list(elem) or is_integer_array(elem) for elem in selection) <= 1 + and all( + is_integer_list(elem) or is_integer_array(elem) or isinstance(elem, (int, slice)) + for elem in selection + ) + ) + + +def normalize_integer_selection(dim_sel, dim_len): + # normalize type to int + dim_sel = int(dim_sel) + + # handle wraparound + if dim_sel < 0: + dim_sel = dim_len + dim_sel + + # handle out of bounds + if dim_sel >= dim_len or dim_sel < 0: + raise BoundsCheckError(dim_len) + + return dim_sel + + +ChunkDimProjection = collections.namedtuple( + "ChunkDimProjection", ("dim_chunk_ix", "dim_chunk_sel", "dim_out_sel") +) +"""A mapping from chunk to output array for a single dimension. + +Parameters +---------- +dim_chunk_ix + Index of chunk. +dim_chunk_sel + Selection of items from chunk array. +dim_out_sel + Selection of items in target (output) array. + +""" + + +class IntDimIndexer: + def __init__(self, dim_sel, dim_len, dim_chunk_len): + # normalize + dim_sel = normalize_integer_selection(dim_sel, dim_len) + + # store attributes + self.dim_sel = dim_sel + self.dim_len = dim_len + self.dim_chunk_len = dim_chunk_len + self.nitems = 1 + + def __iter__(self): + dim_chunk_ix = self.dim_sel // self.dim_chunk_len + dim_offset = dim_chunk_ix * self.dim_chunk_len + dim_chunk_sel = self.dim_sel - dim_offset + dim_out_sel = None + yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + + +def ceildiv(a, b): + return math.ceil(a / b) + + +class SliceDimIndexer: + def __init__(self, dim_sel, dim_len, dim_chunk_len): + # normalize + self.start, self.stop, self.step = dim_sel.indices(dim_len) + if self.step < 1: + raise NegativeStepError() + + # store attributes + self.dim_len = dim_len + self.dim_chunk_len = dim_chunk_len + self.nitems = max(0, ceildiv((self.stop - self.start), self.step)) + self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) + + def __iter__(self): + # figure out the range of chunks we need to visit + dim_chunk_ix_from = self.start // self.dim_chunk_len + dim_chunk_ix_to = ceildiv(self.stop, self.dim_chunk_len) + + # iterate over chunks in range + for dim_chunk_ix in range(dim_chunk_ix_from, dim_chunk_ix_to): + # compute offsets for chunk within overall array + dim_offset = dim_chunk_ix * self.dim_chunk_len + dim_limit = min(self.dim_len, (dim_chunk_ix + 1) * self.dim_chunk_len) + + # determine chunk length, accounting for trailing chunk + dim_chunk_len = dim_limit - dim_offset + + if self.start < dim_offset: + # selection starts before current chunk + dim_chunk_sel_start = 0 + remainder = (dim_offset - self.start) % self.step + if remainder: + dim_chunk_sel_start += self.step - remainder + # compute number of previous items, provides offset into output array + dim_out_offset = ceildiv((dim_offset - self.start), self.step) + + else: + # selection starts within current chunk + dim_chunk_sel_start = self.start - dim_offset + dim_out_offset = 0 + + if self.stop > dim_limit: + # selection ends after current chunk + dim_chunk_sel_stop = dim_chunk_len + + else: + # selection ends within current chunk + dim_chunk_sel_stop = self.stop - dim_offset + + dim_chunk_sel = slice(dim_chunk_sel_start, dim_chunk_sel_stop, self.step) + dim_chunk_nitems = ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) + + # If there are no elements on the selection within this chunk, then skip + if dim_chunk_nitems == 0: + continue + + dim_out_sel = slice(dim_out_offset, dim_out_offset + dim_chunk_nitems) + + yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + + +def check_selection_length(selection, shape): + if len(selection) > len(shape): + err_too_many_indices(selection, shape) + + +def replace_ellipsis(selection, shape): + selection = ensure_tuple(selection) + + # count number of ellipsis present + n_ellipsis = sum(1 for i in selection if i is Ellipsis) + + if n_ellipsis > 1: + # more than 1 is an error + raise IndexError("an index can only have a single ellipsis ('...')") + + elif n_ellipsis == 1: + # locate the ellipsis, count how many items to left and right + n_items_l = selection.index(Ellipsis) # items to left of ellipsis + n_items_r = len(selection) - (n_items_l + 1) # items to right of ellipsis + n_items = len(selection) - 1 # all non-ellipsis items + + if n_items >= len(shape): + # ellipsis does nothing, just remove it + selection = tuple(i for i in selection if i != Ellipsis) + + else: + # replace ellipsis with as many slices are needed for number of dims + new_item = selection[:n_items_l] + ((slice(None),) * (len(shape) - n_items)) + if n_items_r: + new_item += selection[-n_items_r:] + selection = new_item + + # fill out selection if not completely specified + if len(selection) < len(shape): + selection += (slice(None),) * (len(shape) - len(selection)) + + # check selection not too long + check_selection_length(selection, shape) + + return selection + + +def replace_lists(selection): + return tuple( + np.asarray(dim_sel) if isinstance(dim_sel, list) else dim_sel for dim_sel in selection + ) + + +def ensure_tuple(v): + if not isinstance(v, tuple): + v = (v,) + return v + + +ChunkProjection = collections.namedtuple( + "ChunkProjection", ("chunk_coords", "chunk_selection", "out_selection") +) +"""A mapping of items from chunk to output array. Can be used to extract items from the +chunk array for loading into an output array. Can also be used to extract items from a +value array for setting/updating in a chunk array. + +Parameters +---------- +chunk_coords + Indices of chunk. +chunk_selection + Selection of items from chunk array. +out_selection + Selection of items in target (output) array. + +""" + + +def is_slice(s): + return isinstance(s, slice) + + +def is_contiguous_slice(s): + return is_slice(s) and (s.step is None or s.step == 1) + + +def is_positive_slice(s): + return is_slice(s) and (s.step is None or s.step >= 1) + + +def is_contiguous_selection(selection): + selection = ensure_tuple(selection) + return all((is_integer_array(s) or is_contiguous_slice(s) or s == Ellipsis) for s in selection) + + +def is_basic_selection(selection): + selection = ensure_tuple(selection) + return all(is_integer(s) or is_positive_slice(s) for s in selection) + + +# noinspection PyProtectedMember +class BasicIndexer: + def __init__(self, selection, array): + # handle ellipsis + selection = replace_ellipsis(selection, array._shape) + + # setup per-dimension indexers + dim_indexers = [] + for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): + if is_integer(dim_sel): + dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) + + elif is_slice(dim_sel): + dim_indexer = SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) + + else: + raise IndexError( + "unsupported selection item for basic indexing; " + "expected integer or slice, got {!r}".format(type(dim_sel)) + ) + + dim_indexers.append(dim_indexer) + + self.dim_indexers = dim_indexers + self.shape = tuple(s.nitems for s in self.dim_indexers if not isinstance(s, IntDimIndexer)) + self.drop_axes = None + + def __iter__(self): + for dim_projections in itertools.product(*self.dim_indexers): + chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) + chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) + out_selection = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + ) + + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +class BoolArrayDimIndexer: + def __init__(self, dim_sel, dim_len, dim_chunk_len): + # check number of dimensions + if not is_bool_array(dim_sel, 1): + raise IndexError( + "Boolean arrays in an orthogonal selection must " "be 1-dimensional only" + ) + + # check shape + if dim_sel.shape[0] != dim_len: + raise IndexError( + "Boolean array has the wrong length for dimension; " "expected {}, got {}".format( + dim_len, dim_sel.shape[0] + ) + ) + + # store attributes + self.dim_sel = dim_sel + self.dim_len = dim_len + self.dim_chunk_len = dim_chunk_len + self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) + + # precompute number of selected items for each chunk + self.chunk_nitems = np.zeros(self.nchunks, dtype="i8") + for dim_chunk_ix in range(self.nchunks): + dim_offset = dim_chunk_ix * self.dim_chunk_len + self.chunk_nitems[dim_chunk_ix] = np.count_nonzero( + self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] + ) + self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) + self.nitems = self.chunk_nitems_cumsum[-1] + self.dim_chunk_ixs = np.nonzero(self.chunk_nitems)[0] + + def __iter__(self): + # iterate over chunks with at least one item + for dim_chunk_ix in self.dim_chunk_ixs: + # find region in chunk + dim_offset = dim_chunk_ix * self.dim_chunk_len + dim_chunk_sel = self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] + + # pad out if final chunk + if dim_chunk_sel.shape[0] < self.dim_chunk_len: + tmp = np.zeros(self.dim_chunk_len, dtype=bool) + tmp[: dim_chunk_sel.shape[0]] = dim_chunk_sel + dim_chunk_sel = tmp + + # find region in output + if dim_chunk_ix == 0: + start = 0 + else: + start = self.chunk_nitems_cumsum[dim_chunk_ix - 1] + stop = self.chunk_nitems_cumsum[dim_chunk_ix] + dim_out_sel = slice(start, stop) + + yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + + +class Order: + UNKNOWN = 0 + INCREASING = 1 + DECREASING = 2 + UNORDERED = 3 + + @staticmethod + def check(a): + diff = np.diff(a) + diff_positive = diff >= 0 + n_diff_positive = np.count_nonzero(diff_positive) + all_increasing = n_diff_positive == len(diff_positive) + any_increasing = n_diff_positive > 0 + if all_increasing: + order = Order.INCREASING + elif any_increasing: + order = Order.UNORDERED + else: + order = Order.DECREASING + return order + + +def wraparound_indices(x, dim_len): + loc_neg = x < 0 + if np.any(loc_neg): + x[loc_neg] = x[loc_neg] + dim_len + + +def boundscheck_indices(x, dim_len): + if np.any(x < 0) or np.any(x >= dim_len): + raise BoundsCheckError(dim_len) + + +class IntArrayDimIndexer: + """Integer array selection against a single dimension.""" + + def __init__( + self, + dim_sel, + dim_len, + dim_chunk_len, + wraparound=True, + boundscheck=True, + order=Order.UNKNOWN, + ): + # ensure 1d array + dim_sel = np.asanyarray(dim_sel) + if not is_integer_array(dim_sel, 1): + raise IndexError( + "integer arrays in an orthogonal selection must be " "1-dimensional only" + ) + + # handle wraparound + if wraparound: + wraparound_indices(dim_sel, dim_len) + + # handle out of bounds + if boundscheck: + boundscheck_indices(dim_sel, dim_len) + + # store attributes + self.dim_len = dim_len + self.dim_chunk_len = dim_chunk_len + self.nchunks = ceildiv(self.dim_len, self.dim_chunk_len) + self.nitems = len(dim_sel) + + # determine which chunk is needed for each selection item + # note: for dense integer selections, the division operation here is the + # bottleneck + dim_sel_chunk = dim_sel // dim_chunk_len + + # determine order of indices + if order == Order.UNKNOWN: + order = Order.check(dim_sel) + self.order = order + + if self.order == Order.INCREASING: + self.dim_sel = dim_sel + self.dim_out_sel = None + elif self.order == Order.DECREASING: + self.dim_sel = dim_sel[::-1] + # TODO should be possible to do this without creating an arange + self.dim_out_sel = np.arange(self.nitems - 1, -1, -1) + else: + # sort indices to group by chunk + self.dim_out_sel = np.argsort(dim_sel_chunk) + self.dim_sel = np.take(dim_sel, self.dim_out_sel) + + # precompute number of selected items for each chunk + self.chunk_nitems = np.bincount(dim_sel_chunk, minlength=self.nchunks) + + # find chunks that we need to visit + self.dim_chunk_ixs = np.nonzero(self.chunk_nitems)[0] + + # compute offsets into the output array + self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) + + def __iter__(self): + for dim_chunk_ix in self.dim_chunk_ixs: + # find region in output + if dim_chunk_ix == 0: + start = 0 + else: + start = self.chunk_nitems_cumsum[dim_chunk_ix - 1] + stop = self.chunk_nitems_cumsum[dim_chunk_ix] + if self.order == Order.INCREASING: + dim_out_sel = slice(start, stop) + else: + dim_out_sel = self.dim_out_sel[start:stop] + + # find region in chunk + dim_offset = dim_chunk_ix * self.dim_chunk_len + dim_chunk_sel = self.dim_sel[start:stop] - dim_offset + + yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + + +def slice_to_range(s: slice, l: int): # noqa: E741 + return range(*s.indices(l)) + + +def ix_(selection, shape): + """Convert an orthogonal selection to a numpy advanced (fancy) selection, like numpy.ix_ + but with support for slices and single ints.""" + + # normalisation + selection = replace_ellipsis(selection, shape) + + # replace slice and int as these are not supported by numpy.ix_ + selection = [ + slice_to_range(dim_sel, dim_len) + if isinstance(dim_sel, slice) + else [dim_sel] + if is_integer(dim_sel) + else dim_sel + for dim_sel, dim_len in zip(selection, shape) + ] + + # now get numpy to convert to a coordinate selection + selection = np.ix_(*selection) + + return selection + + +def oindex(a, selection): + """Implementation of orthogonal indexing with slices and ints.""" + selection = replace_ellipsis(selection, a.shape) + drop_axes = tuple(i for i, s in enumerate(selection) if is_integer(s)) + selection = ix_(selection, a.shape) + result = a[selection] + if drop_axes: + result = result.squeeze(axis=drop_axes) + return result + + +def oindex_set(a, selection, value): + selection = replace_ellipsis(selection, a.shape) + drop_axes = tuple(i for i, s in enumerate(selection) if is_integer(s)) + selection = ix_(selection, a.shape) + if not np.isscalar(value) and drop_axes: + value = np.asanyarray(value) + value_selection = [slice(None)] * len(a.shape) + for i in drop_axes: + value_selection[i] = np.newaxis + value_selection = tuple(value_selection) + value = value[value_selection] + a[selection] = value + + +# noinspection PyProtectedMember +class OrthogonalIndexer: + def __init__(self, selection, array): + # handle ellipsis + selection = replace_ellipsis(selection, array._shape) + + # normalize list to array + selection = replace_lists(selection) + + # setup per-dimension indexers + dim_indexers = [] + for dim_sel, dim_len, dim_chunk_len in zip(selection, array._shape, array._chunks): + if is_integer(dim_sel): + dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) + + elif isinstance(dim_sel, slice): + dim_indexer = SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) + + elif is_integer_array(dim_sel): + dim_indexer = IntArrayDimIndexer(dim_sel, dim_len, dim_chunk_len) + + elif is_bool_array(dim_sel): + dim_indexer = BoolArrayDimIndexer(dim_sel, dim_len, dim_chunk_len) + + else: + raise IndexError( + "unsupported selection item for orthogonal indexing; " + "expected integer, slice, integer array or Boolean " + "array, got {!r}".format(type(dim_sel)) + ) + + dim_indexers.append(dim_indexer) + + self.array = array + self.dim_indexers = dim_indexers + self.shape = tuple(s.nitems for s in self.dim_indexers if not isinstance(s, IntDimIndexer)) + self.is_advanced = not is_basic_selection(selection) + if self.is_advanced: + self.drop_axes = tuple( + i + for i, dim_indexer in enumerate(self.dim_indexers) + if isinstance(dim_indexer, IntDimIndexer) + ) + else: + self.drop_axes = None + + def __iter__(self): + for dim_projections in itertools.product(*self.dim_indexers): + chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) + chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) + out_selection = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + ) + + # handle advanced indexing arrays orthogonally + if self.is_advanced: + # N.B., numpy doesn't support orthogonal indexing directly as yet, + # so need to work around via np.ix_. Also np.ix_ does not support a + # mixture of arrays and slices or integers, so need to convert slices + # and integers into ranges. + chunk_selection = ix_(chunk_selection, self.array._chunks) + + # special case for non-monotonic indices + if not is_basic_selection(out_selection): + out_selection = ix_(out_selection, self.shape) + + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +class OIndex: + def __init__(self, array): + self.array = array + + def __getitem__(self, selection): + fields, selection = pop_fields(selection) + selection = ensure_tuple(selection) + selection = replace_lists(selection) + return self.array.get_orthogonal_selection(selection, fields=fields) + + def __setitem__(self, selection, value): + fields, selection = pop_fields(selection) + selection = ensure_tuple(selection) + selection = replace_lists(selection) + return self.array.set_orthogonal_selection(selection, value, fields=fields) + + +# noinspection PyProtectedMember +class BlockIndexer: + def __init__(self, selection, array): + # handle ellipsis + selection = replace_ellipsis(selection, array._shape) + + # normalize list to array + selection = replace_lists(selection) + + # setup per-dimension indexers + dim_indexers = [] + for dim_sel, dim_len, dim_chunk_size in zip(selection, array._shape, array._chunks): + dim_numchunks = int(np.ceil(dim_len / dim_chunk_size)) + + if is_integer(dim_sel): + if dim_sel < 0: + dim_sel = dim_numchunks + dim_sel + + start = dim_sel * dim_chunk_size + stop = start + dim_chunk_size + slice_ = slice(start, stop) + + elif is_slice(dim_sel): + start = dim_sel.start if dim_sel.start is not None else 0 + stop = dim_sel.stop if dim_sel.stop is not None else dim_numchunks + + if dim_sel.step not in {1, None}: + raise IndexError( + "unsupported selection item for block indexing; " + "expected integer or slice with step=1, got {!r}".format(type(dim_sel)) + ) + + # Can't reuse wraparound_indices because it expects a numpy array + # We have integers here. + if start < 0: + start = dim_numchunks + start + if stop < 0: + stop = dim_numchunks + stop + + start = start * dim_chunk_size + stop = stop * dim_chunk_size + slice_ = slice(start, stop) + + else: + raise IndexError( + "unsupported selection item for block indexing; " + "expected integer or slice, got {!r}".format(type(dim_sel)) + ) + + dim_indexer = SliceDimIndexer(slice_, dim_len, dim_chunk_size) + dim_indexers.append(dim_indexer) + + if start >= dim_len or start < 0: + raise BoundsCheckError(dim_len) + + self.dim_indexers = dim_indexers + self.shape = tuple(s.nitems for s in self.dim_indexers) + self.drop_axes = None + + def __iter__(self): + for dim_projections in itertools.product(*self.dim_indexers): + chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) + chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) + out_selection = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + ) + + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +class BlockIndex: + def __init__(self, array): + self.array = array + + def __getitem__(self, selection): + fields, selection = pop_fields(selection) + selection = ensure_tuple(selection) + selection = replace_lists(selection) + return self.array.get_block_selection(selection, fields=fields) + + def __setitem__(self, selection, value): + fields, selection = pop_fields(selection) + selection = ensure_tuple(selection) + selection = replace_lists(selection) + return self.array.set_block_selection(selection, value, fields=fields) + + +# noinspection PyProtectedMember +def is_coordinate_selection(selection, array): + return (len(selection) == len(array._shape)) and all( + is_integer(dim_sel) or is_integer_array(dim_sel) for dim_sel in selection + ) + + +# noinspection PyProtectedMember +def is_mask_selection(selection, array): + return ( + len(selection) == 1 and is_bool_array(selection[0]) and selection[0].shape == array._shape + ) + + +# noinspection PyProtectedMember +class CoordinateIndexer: + def __init__(self, selection, array): + # some initial normalization + selection = ensure_tuple(selection) + selection = tuple([i] if is_integer(i) else i for i in selection) + selection = replace_lists(selection) + + # validation + if not is_coordinate_selection(selection, array): + raise IndexError( + "invalid coordinate selection; expected one integer " + "(coordinate) array per dimension of the target array, " + "got {!r}".format(selection) + ) + + # handle wraparound, boundscheck + for dim_sel, dim_len in zip(selection, array.shape): + # handle wraparound + wraparound_indices(dim_sel, dim_len) + + # handle out of bounds + boundscheck_indices(dim_sel, dim_len) + + # compute chunk index for each point in the selection + chunks_multi_index = tuple( + dim_sel // dim_chunk_len for (dim_sel, dim_chunk_len) in zip(selection, array._chunks) + ) + + # broadcast selection - this will raise error if array dimensions don't match + selection = np.broadcast_arrays(*selection) + chunks_multi_index = np.broadcast_arrays(*chunks_multi_index) + + # remember shape of selection, because we will flatten indices for processing + self.sel_shape = selection[0].shape if selection[0].shape else (1,) + + # flatten selection + selection = [dim_sel.reshape(-1) for dim_sel in selection] + chunks_multi_index = [dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index] + + # ravel chunk indices + chunks_raveled_indices = np.ravel_multi_index(chunks_multi_index, dims=array._cdata_shape) + + # group points by chunk + if np.any(np.diff(chunks_raveled_indices) < 0): + # optimisation, only sort if needed + sel_sort = np.argsort(chunks_raveled_indices) + selection = tuple(dim_sel[sel_sort] for dim_sel in selection) + else: + sel_sort = None + + # store attributes + self.selection = selection + self.sel_sort = sel_sort + self.shape = selection[0].shape if selection[0].shape else (1,) + self.drop_axes = None + self.array = array + + # precompute number of selected items for each chunk + self.chunk_nitems = np.bincount(chunks_raveled_indices, minlength=array.nchunks) + self.chunk_nitems_cumsum = np.cumsum(self.chunk_nitems) + # locate the chunks we need to process + self.chunk_rixs = np.nonzero(self.chunk_nitems)[0] + + # unravel chunk indices + self.chunk_mixs = np.unravel_index(self.chunk_rixs, array._cdata_shape) + + def __iter__(self): + # iterate over chunks + for i, chunk_rix in enumerate(self.chunk_rixs): + chunk_coords = tuple(m[i] for m in self.chunk_mixs) + if chunk_rix == 0: + start = 0 + else: + start = self.chunk_nitems_cumsum[chunk_rix - 1] + stop = self.chunk_nitems_cumsum[chunk_rix] + if self.sel_sort is None: + out_selection = slice(start, stop) + else: + out_selection = self.sel_sort[start:stop] + + chunk_offsets = tuple( + dim_chunk_ix * dim_chunk_len + for dim_chunk_ix, dim_chunk_len in zip(chunk_coords, self.array._chunks) + ) + chunk_selection = tuple( + dim_sel[start:stop] - dim_chunk_offset + for (dim_sel, dim_chunk_offset) in zip(self.selection, chunk_offsets) + ) + + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +# noinspection PyProtectedMember +class MaskIndexer(CoordinateIndexer): + def __init__(self, selection, array): + # some initial normalization + selection = ensure_tuple(selection) + selection = replace_lists(selection) + + # validation + if not is_mask_selection(selection, array): + raise IndexError( + "invalid mask selection; expected one Boolean (mask)" + "array with the same shape as the target array, got {!r}".format(selection) + ) + + # convert to indices + selection = np.nonzero(selection[0]) + + # delegate the rest to superclass + super().__init__(selection, array) + + +class VIndex: + def __init__(self, array): + self.array = array + + def __getitem__(self, selection): + fields, selection = pop_fields(selection) + selection = ensure_tuple(selection) + selection = replace_lists(selection) + if is_coordinate_selection(selection, self.array): + return self.array.get_coordinate_selection(selection, fields=fields) + elif is_mask_selection(selection, self.array): + return self.array.get_mask_selection(selection, fields=fields) + else: + raise VindexInvalidSelectionError(selection) + + def __setitem__(self, selection, value): + fields, selection = pop_fields(selection) + selection = ensure_tuple(selection) + selection = replace_lists(selection) + if is_coordinate_selection(selection, self.array): + self.array.set_coordinate_selection(selection, value, fields=fields) + elif is_mask_selection(selection, self.array): + self.array.set_mask_selection(selection, value, fields=fields) + else: + raise VindexInvalidSelectionError(selection) + + +def check_fields(fields, dtype): + # early out + if fields is None: + return dtype + # check type + if not isinstance(fields, (str, list, tuple)): + raise IndexError( + "'fields' argument must be a string or list of strings; found " "{!r}".format( + type(fields) + ) + ) + if fields: + if dtype.names is None: + raise IndexError("invalid 'fields' argument, array does not have any fields") + try: + if isinstance(fields, str): + # single field selection + out_dtype = dtype[fields] + else: + # multiple field selection + out_dtype = np.dtype([(f, dtype[f]) for f in fields]) + except KeyError as e: + raise IndexError("invalid 'fields' argument, field not found: {!r}".format(e)) + else: + return out_dtype + else: + return dtype + + +def check_no_multi_fields(fields): + if isinstance(fields, list): + if len(fields) == 1: + return fields[0] + elif len(fields) > 1: + raise IndexError("multiple fields are not supported for this operation") + return fields + + +def pop_fields(selection): + if isinstance(selection, str): + # single field selection + fields = selection + selection = () + elif not isinstance(selection, tuple): + # single selection item, no fields + fields = None + # leave selection as-is + else: + # multiple items, split fields from selection items + fields = [f for f in selection if isinstance(f, str)] + fields = fields[0] if len(fields) == 1 else fields + selection = tuple(s for s in selection if not isinstance(s, str)) + selection = selection[0] if len(selection) == 1 else selection + return fields, selection + + +def make_slice_selection(selection): + ls = [] + for dim_selection in selection: + if is_integer(dim_selection): + ls.append(slice(int(dim_selection), int(dim_selection) + 1, 1)) + elif isinstance(dim_selection, np.ndarray): + if len(dim_selection) == 1: + ls.append(slice(int(dim_selection[0]), int(dim_selection[0]) + 1, 1)) + else: + raise ArrayIndexError() + else: + ls.append(dim_selection) + return ls + + +class PartialChunkIterator: + """Iterator to retrieve the specific coordinates of requested data + from within a compressed chunk. + + Parameters + ---------- + selection : tuple + tuple of slice objects to take from the chunk + arr_shape : shape of chunk to select data from + + Attributes + ----------- + arr_shape + selection + + Returns + ------- + Tuple with 3 elements: + + start: int + elements offset in the chunk to read from + nitems: int + number of elements to read in the chunk from start + partial_out_selection: list of slices + indices of a temporary empty array of size `Array._chunks` to assign + the decompressed data to after the partial read. + + Notes + ----- + An array is flattened when compressed with blosc, so this iterator takes + the wanted selection of an array and determines the wanted coordinates + of the flattened, compressed data to be read and then decompressed. The + decompressed data is then placed in a temporary empty array of size + `Array._chunks` at the indices yielded as partial_out_selection. + Once all the slices yielded by this iterator have been read, decompressed + and written to the temporary array, the wanted slice of the chunk can be + indexed from the temporary array and written to the out_selection slice + of the out array. + + """ + + def __init__(self, selection, arr_shape): + selection = make_slice_selection(selection) + self.arr_shape = arr_shape + + # number of selection dimensions can't be greater than the number of chunk dimensions + if len(selection) > len(self.arr_shape): + raise ValueError( + "Selection has more dimensions then the array:\n" + f"selection dimensions = {len(selection)}\n" + f"array dimensions = {len(self.arr_shape)}" + ) + + # any selection can not be out of the range of the chunk + selection_shape = np.empty(self.arr_shape)[tuple(selection)].shape + if any( + selection_dim < 0 or selection_dim > arr_dim + for selection_dim, arr_dim in zip(selection_shape, self.arr_shape) + ): + raise IndexError( + "a selection index is out of range for the dimension" + ) # pragma: no cover + + for i, dim_size in enumerate(self.arr_shape[::-1]): + index = len(self.arr_shape) - (i + 1) + if index <= len(selection) - 1: + slice_size = selection_shape[index] + if slice_size == dim_size and index > 0: + selection.pop() + else: + break + + chunk_loc_slices = [] + last_dim_slice = None if selection[-1].step > 1 else selection.pop() + for arr_shape_i, sl in zip(arr_shape, selection): + dim_chunk_loc_slices = [] + assert isinstance(sl, slice) + for x in slice_to_range(sl, arr_shape_i): + dim_chunk_loc_slices.append(slice(x, x + 1, 1)) + chunk_loc_slices.append(dim_chunk_loc_slices) + if last_dim_slice: + chunk_loc_slices.append([last_dim_slice]) + self.chunk_loc_slices = list(itertools.product(*chunk_loc_slices)) + + def __iter__(self): + chunk1 = self.chunk_loc_slices[0] + nitems = (chunk1[-1].stop - chunk1[-1].start) * np.prod( + self.arr_shape[len(chunk1) :], dtype=int + ) + for partial_out_selection in self.chunk_loc_slices: + start = 0 + for i, sl in enumerate(partial_out_selection): + start += sl.start * np.prod(self.arr_shape[i + 1 :], dtype=int) + yield start, nitems, partial_out_selection diff --git a/src/zarr/meta.py b/src/zarr/v2/meta.py similarity index 99% rename from src/zarr/meta.py rename to src/zarr/v2/meta.py index 7cca228a14..ee9cc57389 100644 --- a/src/zarr/meta.py +++ b/src/zarr/v2/meta.py @@ -4,8 +4,8 @@ import numpy as np -from zarr.errors import MetadataError -from zarr.util import json_dumps, json_loads +from zarr.v2.errors import MetadataError +from zarr.v2.util import json_dumps, json_loads from typing import cast, Union, Any, List, Mapping as MappingType, TYPE_CHECKING diff --git a/src/zarr/meta_v1.py b/src/zarr/v2/meta_v1.py similarity index 97% rename from src/zarr/meta_v1.py rename to src/zarr/v2/meta_v1.py index 4ac381f2ca..881b9191eb 100644 --- a/src/zarr/meta_v1.py +++ b/src/zarr/v2/meta_v1.py @@ -2,7 +2,7 @@ import numpy as np -from zarr.errors import MetadataError +from zarr.v2.errors import MetadataError def decode_metadata(b): diff --git a/src/zarr/n5.py b/src/zarr/v2/n5.py similarity index 98% rename from src/zarr/n5.py rename to src/zarr/v2/n5.py index 1293d1739b..92b0f37924 100644 --- a/src/zarr/n5.py +++ b/src/zarr/v2/n5.py @@ -11,12 +11,12 @@ from numcodecs.compat import ndarray_copy from numcodecs.registry import get_codec, register_codec -from .meta import ZARR_FORMAT, json_dumps, json_loads -from .storage import FSStore -from .storage import NestedDirectoryStore, _prog_ckey, _prog_number, normalize_storage_path -from .storage import array_meta_key as zarr_array_meta_key -from .storage import attrs_key as zarr_attrs_key -from .storage import group_meta_key as zarr_group_meta_key +from zarr.v2.meta import ZARR_FORMAT, json_dumps, json_loads +from zarr.v2.storage import FSStore +from zarr.v2.storage import NestedDirectoryStore, _prog_ckey, _prog_number, normalize_storage_path +from zarr.v2.storage import array_meta_key as zarr_array_meta_key +from zarr.v2.storage import attrs_key as zarr_attrs_key +from zarr.v2.storage import group_meta_key as zarr_group_meta_key N5_FORMAT = "2.0.0" diff --git a/src/zarr/storage.py b/src/zarr/v2/storage.py similarity index 96% rename from src/zarr/storage.py rename to src/zarr/v2/storage.py index ae596756f8..56deeeb555 100644 --- a/src/zarr/storage.py +++ b/src/zarr/v2/storage.py @@ -38,9 +38,9 @@ from numcodecs.compat import ensure_bytes, ensure_text, ensure_contiguous_ndarray_like from numcodecs.registry import codec_registry -from zarr.context import Context +from zarr.v2.context import Context -from zarr.errors import ( +from zarr.v2.errors import ( MetadataError, BadCompressorError, ContainsArrayError, @@ -48,8 +48,8 @@ FSPathExistNotDir, ReadOnlyError, ) -from zarr.meta import encode_array_metadata, encode_group_metadata -from zarr.util import ( +from zarr.v2.meta import encode_array_metadata, encode_group_metadata +from zarr.v2.util import ( buffer_size, json_loads, nolock, @@ -64,8 +64,8 @@ ensure_contiguous_ndarray_or_bytes, ) -from zarr._storage.absstore import ABSStore # noqa: F401 -from zarr._storage.store import ( # noqa: F401 +from zarr.v2._storage.absstore import ABSStore # noqa: F401 +from zarr.v2._storage.store import ( # noqa: F401 _listdir_from_keys, _rename_from_keys, _rmdir_from_keys, @@ -89,11 +89,11 @@ try: # noinspection PyUnresolvedReferences - from zarr.codecs import Blosc + from zarr.v2.codecs import Blosc default_compressor = Blosc() except ImportError: # pragma: no cover - from zarr.codecs import Zlib + from zarr.v2.codecs import Zlib default_compressor = Zlib() @@ -146,7 +146,7 @@ def normalize_store_arg(store: Any, storage_options=None, mode="r") -> BaseStore if store.endswith(".zip"): return ZipStore(store, mode=mode) elif store.endswith(".n5"): - from zarr.n5 import N5Store + from zarr.v2.n5 import N5Store return N5Store(store) else: @@ -310,7 +310,7 @@ def init_array( -------- Initialize an array store:: - >>> from zarr.storage import init_array, KVStore + >>> from zarr.v2.storage import init_array, KVStore >>> store = KVStore(dict()) >>> init_array(store, shape=(10000, 10000), chunks=(1000, 1000)) >>> sorted(store.keys()) @@ -649,16 +649,16 @@ class MemoryStore(Store): This is the default class used when creating a group. E.g.:: >>> import zarr - >>> g = zarr.group() + >>> g = zarr.v2.group() >>> type(g.store) - + Note that the default class when creating an array is the built-in :class:`KVStore` class, i.e.:: - >>> z = zarr.zeros(100) + >>> z = zarr.v2.zeros(100) >>> type(z.store) - + Notes ----- @@ -860,8 +860,8 @@ class DirectoryStore(Store): Store a single array:: >>> import zarr - >>> store = zarr.DirectoryStore('data/array.zarr') - >>> z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> store = zarr.v2.DirectoryStore('data/array.zarr') + >>> z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) >>> z[...] = 42 Each chunk of the array is stored as a separate file on the file system, @@ -873,8 +873,8 @@ class DirectoryStore(Store): Store a group:: - >>> store = zarr.DirectoryStore('data/group.zarr') - >>> root = zarr.group(store=store, overwrite=True) + >>> store = zarr.v2.DirectoryStore('data/group.zarr') + >>> root = zarr.v2.group(store=store, overwrite=True) >>> foo = root.create_group('foo') >>> bar = foo.zeros('bar', shape=(10, 10), chunks=(5, 5)) >>> bar[...] = 42 @@ -1447,8 +1447,8 @@ class NestedDirectoryStore(DirectoryStore): Store a single array:: >>> import zarr - >>> store = zarr.NestedDirectoryStore('data/array.zarr') - >>> z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> store = zarr.v2.NestedDirectoryStore('data/array.zarr') + >>> z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) >>> z[...] = 42 Each chunk of the array is stored as a separate file on the file system, @@ -1464,8 +1464,8 @@ class NestedDirectoryStore(DirectoryStore): Store a group:: - >>> store = zarr.NestedDirectoryStore('data/group.zarr') - >>> root = zarr.group(store=store, overwrite=True) + >>> store = zarr.v2.NestedDirectoryStore('data/group.zarr') + >>> root = zarr.v2.group(store=store, overwrite=True) >>> foo = root.create_group('foo') >>> bar = foo.zeros('bar', shape=(10, 10), chunks=(5, 5)) >>> bar[...] = 42 @@ -1536,15 +1536,15 @@ class ZipStore(Store): Store a single array:: >>> import zarr - >>> store = zarr.ZipStore('data/array.zip', mode='w') - >>> z = zarr.zeros((10, 10), chunks=(5, 5), store=store) + >>> store = zarr.v2.ZipStore('data/array.zip', mode='w') + >>> z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store) >>> z[...] = 42 >>> store.close() # don't forget to call this when you're done Store a group:: - >>> store = zarr.ZipStore('data/group.zip', mode='w') - >>> root = zarr.group(store=store) + >>> store = zarr.v2.ZipStore('data/group.zip', mode='w') + >>> root = zarr.v2.group(store=store) >>> foo = root.create_group('foo') >>> bar = foo.zeros('bar', shape=(10, 10), chunks=(5, 5)) >>> bar[...] = 42 @@ -1555,8 +1555,8 @@ class ZipStore(Store): class also supports the context manager protocol, which ensures the ``close()`` method is called on leaving the context, e.g.:: - >>> with zarr.ZipStore('data/array.zip', mode='w') as store: - ... z = zarr.zeros((10, 10), chunks=(5, 5), store=store) + >>> with zarr.v2.ZipStore('data/array.zip', mode='w') as store: + ... z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store) ... z[...] = 42 ... # no need to call store.close() @@ -1569,8 +1569,8 @@ class also supports the context manager protocol, which ensures the ``close()`` triggered if you attempt to write data to a Zarr array more than once, e.g.:: - >>> store = zarr.ZipStore('data/example.zip', mode='w') - >>> z = zarr.zeros(100, chunks=10, store=store) + >>> store = zarr.v2.ZipStore('data/example.zip', mode='w') + >>> z = zarr.v2.zeros(100, chunks=10, store=store) >>> # first write OK ... z[...] = 42 >>> # second write generates warnings @@ -1581,22 +1581,22 @@ class also supports the context manager protocol, which ensures the ``close()`` once to a Zarr array, but the write operations are not aligned with chunk boundaries, e.g.:: - >>> store = zarr.ZipStore('data/example.zip', mode='w') - >>> z = zarr.zeros(100, chunks=10, store=store) + >>> store = zarr.v2.ZipStore('data/example.zip', mode='w') + >>> z = zarr.v2.zeros(100, chunks=10, store=store) >>> z[5:15] = 42 >>> # write overlaps chunk previously written, generates warnings ... z[15:25] = 42 # doctest: +SKIP To avoid creating duplicate entries, only write data once, and align writes with chunk boundaries. This alignment is done automatically if you call - ``z[...] = ...`` or create an array from existing data via :func:`zarr.array`. + ``z[...] = ...`` or create an array from existing data via :func:`zarr.v2.array`. Alternatively, use a :class:`DirectoryStore` when writing the data, then manually Zip the directory and use the Zip file for subsequent reads. Take note that the files in the Zip file must be relative to the root of the Zarr archive. You may find it easier to create such a Zip file with ``7z``, e.g.:: - 7z a -tzip archive.zarr.zip archive.zarr/. + 7z a -tzip archive.zarr.v2.zip archive.zarr/. Safe to write in multiple threads but not in multiple processes. @@ -1841,15 +1841,15 @@ class DBMStore(Store): Store a single array:: >>> import zarr - >>> store = zarr.DBMStore('data/array.db') - >>> z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> store = zarr.v2.DBMStore('data/array.db') + >>> z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) >>> z[...] = 42 >>> store.close() # don't forget to call this when you're done Store a group:: - >>> store = zarr.DBMStore('data/group.db') - >>> root = zarr.group(store=store, overwrite=True) + >>> store = zarr.v2.DBMStore('data/group.db') + >>> root = zarr.v2.group(store=store, overwrite=True) >>> foo = root.create_group('foo') >>> bar = foo.zeros('bar', shape=(10, 10), chunks=(5, 5)) >>> bar[...] = 42 @@ -1860,8 +1860,8 @@ class DBMStore(Store): DBMStore class also supports the context manager protocol, which ensures the ``close()`` method is called on leaving the context, e.g.:: - >>> with zarr.DBMStore('data/array.db') as store: - ... z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> with zarr.v2.DBMStore('data/array.db') as store: + ... z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) ... z[...] = 42 ... # no need to call store.close() @@ -1871,8 +1871,8 @@ class DBMStore(Store): Berkeley DB database can be used:: >>> import bsddb3 - >>> store = zarr.DBMStore('data/array.bdb', open=bsddb3.btopen) - >>> z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> store = zarr.v2.DBMStore('data/array.bdb', open=bsddb3.btopen) + >>> z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) >>> z[...] = 42 >>> store.close() @@ -2040,15 +2040,15 @@ class LMDBStore(Store): Store a single array:: >>> import zarr - >>> store = zarr.LMDBStore('data/array.mdb') - >>> z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> store = zarr.v2.LMDBStore('data/array.mdb') + >>> z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) >>> z[...] = 42 >>> store.close() # don't forget to call this when you're done Store a group:: - >>> store = zarr.LMDBStore('data/group.mdb') - >>> root = zarr.group(store=store, overwrite=True) + >>> store = zarr.v2.LMDBStore('data/group.mdb') + >>> root = zarr.v2.group(store=store, overwrite=True) >>> foo = root.create_group('foo') >>> bar = foo.zeros('bar', shape=(10, 10), chunks=(5, 5)) >>> bar[...] = 42 @@ -2059,8 +2059,8 @@ class LMDBStore(Store): DBMStore class also supports the context manager protocol, which ensures the ``close()`` method is called on leaving the context, e.g.:: - >>> with zarr.LMDBStore('data/array.mdb') as store: - ... z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> with zarr.v2.LMDBStore('data/array.mdb') as store: + ... z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) ... z[...] = 42 ... # no need to call store.close() @@ -2216,8 +2216,8 @@ class LRUStoreCache(Store): >>> import zarr >>> s3 = s3fs.S3FileSystem(anon=True, client_kwargs=dict(region_name='eu-west-2')) >>> store = s3fs.S3Map(root='zarr-demo/store', s3=s3, check=False) - >>> cache = zarr.LRUStoreCache(store, max_size=2**28) - >>> root = zarr.group(store=cache) # doctest: +REMOTE_DATA + >>> cache = zarr.v2.LRUStoreCache(store, max_size=2**28) + >>> root = zarr.v2.group(store=cache) # doctest: +REMOTE_DATA >>> z = root['foo/bar/baz'] # doctest: +REMOTE_DATA >>> from timeit import timeit >>> # first data access is relatively slow, retrieved from store @@ -2410,15 +2410,15 @@ class SQLiteStore(Store): Store a single array:: >>> import zarr - >>> store = zarr.SQLiteStore('data/array.sqldb') - >>> z = zarr.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) + >>> store = zarr.v2.SQLiteStore('data/array.sqldb') + >>> z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store, overwrite=True) >>> z[...] = 42 >>> store.close() # don't forget to call this when you're done Store a group:: - >>> store = zarr.SQLiteStore('data/group.sqldb') - >>> root = zarr.group(store=store, overwrite=True) + >>> store = zarr.v2.SQLiteStore('data/group.sqldb') + >>> root = zarr.v2.group(store=store, overwrite=True) >>> foo = root.create_group('foo') >>> bar = foo.zeros('bar', shape=(10, 10), chunks=(5, 5)) >>> bar[...] = 42 @@ -2751,7 +2751,7 @@ class ConsolidatedMetadataStore(Store): The purpose of this class, is to be able to get all of the metadata for a given array in a single read operation from the underlying storage. - See :func:`zarr.convenience.consolidate_metadata` for how to create this + See :func:`zarr.v2.convenience.consolidate_metadata` for how to create this single metadata key. This class loads from the one key, and stores the data in a dict, so that @@ -2760,7 +2760,7 @@ class ConsolidatedMetadataStore(Store): This class is read-only, and attempts to change the array metadata will fail, but changing the data is possible. If the backend storage is changed directly, then the metadata stored here could become obsolete, and - :func:`zarr.convenience.consolidate_metadata` should be called again and the class + :func:`zarr.v2.convenience.consolidate_metadata` should be called again and the class re-invoked. The use case is for write once, read many times. .. versionadded:: 2.3 @@ -2777,7 +2777,7 @@ class ConsolidatedMetadataStore(Store): See Also -------- - zarr.convenience.consolidate_metadata, zarr.convenience.open_consolidated + zarr.v2.convenience.consolidate_metadata, zarr.v2.convenience.open_consolidated """ diff --git a/src/zarr/v2/sync.py b/src/zarr/v2/sync.py new file mode 100644 index 0000000000..49684a51ee --- /dev/null +++ b/src/zarr/v2/sync.py @@ -0,0 +1,48 @@ +import os +from collections import defaultdict +from threading import Lock + +import fasteners + + +class ThreadSynchronizer: + """Provides synchronization using thread locks.""" + + def __init__(self): + self.mutex = Lock() + self.locks = defaultdict(Lock) + + def __getitem__(self, item): + with self.mutex: + return self.locks[item] + + def __getstate__(self): + return True + + def __setstate__(self, *args): + # reinitialize from scratch + self.__init__() + + +class ProcessSynchronizer: + """Provides synchronization using file locks via the + `fasteners `_ + package. + + Parameters + ---------- + path : string + Path to a directory on a file system that is shared by all processes. + N.B., this should be a *different* path to where you store the array. + + """ + + def __init__(self, path): + self.path = path + + def __getitem__(self, item): + path = os.path.join(self.path, item) + lock = fasteners.InterProcessLock(path) + return lock + + # pickling and unpickling should be handled automatically diff --git a/src/zarr/util.py b/src/zarr/v2/util.py similarity index 100% rename from src/zarr/util.py rename to src/zarr/v2/util.py diff --git a/src/zarr/v3/__init__.py b/src/zarr/v3/__init__.py deleted file mode 100644 index 3441fa67be..0000000000 --- a/src/zarr/v3/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - -from typing import Union - -import zarr.v3.codecs # noqa: F401 -from zarr.v3.array import Array, AsyncArray # noqa: F401 -from zarr.v3.array_v2 import ArrayV2 -from zarr.v3.config import RuntimeConfiguration # noqa: F401 -from zarr.v3.group import AsyncGroup, Group # noqa: F401 -from zarr.v3.metadata import runtime_configuration # noqa: F401 -from zarr.v3.store import ( # noqa: F401 - StoreLike, - make_store_path, -) -from zarr.v3.sync import sync as _sync - - -async def open_auto_async( - store: StoreLike, - runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), -) -> Union[AsyncArray, AsyncGroup]: - store_path = make_store_path(store) - try: - return await AsyncArray.open(store_path, runtime_configuration=runtime_configuration_) - except KeyError: - return await AsyncGroup.open(store_path, runtime_configuration=runtime_configuration_) - - -def open_auto( - store: StoreLike, - runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), -) -> Union[Array, ArrayV2, Group]: - object = _sync( - open_auto_async(store, runtime_configuration_), - runtime_configuration_.asyncio_loop, - ) - if isinstance(object, AsyncArray): - return Array(object) - if isinstance(object, AsyncGroup): - return Group(object) - raise TypeError(f"Unexpected object type. Got {type(object)}.") diff --git a/src/zarr/v3/codecs/__init__.py b/src/zarr/v3/codecs/__init__.py deleted file mode 100644 index 474344ec25..0000000000 --- a/src/zarr/v3/codecs/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import annotations - -from zarr.v3.codecs.blosc import BloscCodec, BloscCname, BloscShuffle # noqa: F401 -from zarr.v3.codecs.bytes import BytesCodec, Endian # noqa: F401 -from zarr.v3.codecs.crc32c_ import Crc32cCodec # noqa: F401 -from zarr.v3.codecs.gzip import GzipCodec # noqa: F401 -from zarr.v3.codecs.sharding import ShardingCodec, ShardingCodecIndexLocation # noqa: F401 -from zarr.v3.codecs.transpose import TransposeCodec # noqa: F401 -from zarr.v3.codecs.zstd import ZstdCodec # noqa: F401 diff --git a/src/zarr/v3/indexing.py b/src/zarr/v3/indexing.py deleted file mode 100644 index 15adad111d..0000000000 --- a/src/zarr/v3/indexing.py +++ /dev/null @@ -1,208 +0,0 @@ -from __future__ import annotations - -import itertools -import math -from typing import Iterator, List, NamedTuple, Optional, Tuple - -from zarr.v3.common import ChunkCoords, Selection, SliceSelection, product - - -def _ensure_tuple(v: Selection) -> SliceSelection: - if not isinstance(v, tuple): - v = (v,) - return v - - -def _err_too_many_indices(selection: SliceSelection, shape: ChunkCoords): - raise IndexError( - "too many indices for array; expected {}, got {}".format(len(shape), len(selection)) - ) - - -def _err_negative_step(): - raise IndexError("only slices with step >= 1 are supported") - - -def _check_selection_length(selection: SliceSelection, shape: ChunkCoords): - if len(selection) > len(shape): - _err_too_many_indices(selection, shape) - - -def _ensure_selection( - selection: Selection, - shape: ChunkCoords, -) -> SliceSelection: - selection = _ensure_tuple(selection) - - # fill out selection if not completely specified - if len(selection) < len(shape): - selection += (slice(None),) * (len(shape) - len(selection)) - - # check selection not too long - _check_selection_length(selection, shape) - - return selection - - -class _ChunkDimProjection(NamedTuple): - dim_chunk_ix: int - dim_chunk_sel: slice - dim_out_sel: Optional[slice] - - -def _ceildiv(a, b): - return math.ceil(a / b) - - -class _SliceDimIndexer: - dim_sel: slice - dim_len: int - dim_chunk_len: int - nitems: int - - start: int - stop: int - step: int - - def __init__(self, dim_sel: slice, dim_len: int, dim_chunk_len: int): - self.start, self.stop, self.step = dim_sel.indices(dim_len) - if self.step < 1: - _err_negative_step() - - self.dim_len = dim_len - self.dim_chunk_len = dim_chunk_len - self.nitems = max(0, _ceildiv((self.stop - self.start), self.step)) - self.nchunks = _ceildiv(self.dim_len, self.dim_chunk_len) - - def __iter__(self) -> Iterator[_ChunkDimProjection]: - # figure out the range of chunks we need to visit - dim_chunk_ix_from = self.start // self.dim_chunk_len - dim_chunk_ix_to = _ceildiv(self.stop, self.dim_chunk_len) - - # iterate over chunks in range - for dim_chunk_ix in range(dim_chunk_ix_from, dim_chunk_ix_to): - # compute offsets for chunk within overall array - dim_offset = dim_chunk_ix * self.dim_chunk_len - dim_limit = min(self.dim_len, (dim_chunk_ix + 1) * self.dim_chunk_len) - - # determine chunk length, accounting for trailing chunk - dim_chunk_len = dim_limit - dim_offset - - if self.start < dim_offset: - # selection starts before current chunk - dim_chunk_sel_start = 0 - remainder = (dim_offset - self.start) % self.step - if remainder: - dim_chunk_sel_start += self.step - remainder - # compute number of previous items, provides offset into output array - dim_out_offset = _ceildiv((dim_offset - self.start), self.step) - - else: - # selection starts within current chunk - dim_chunk_sel_start = self.start - dim_offset - dim_out_offset = 0 - - if self.stop > dim_limit: - # selection ends after current chunk - dim_chunk_sel_stop = dim_chunk_len - - else: - # selection ends within current chunk - dim_chunk_sel_stop = self.stop - dim_offset - - dim_chunk_sel = slice(dim_chunk_sel_start, dim_chunk_sel_stop, self.step) - dim_chunk_nitems = _ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) - dim_out_sel = slice(dim_out_offset, dim_out_offset + dim_chunk_nitems) - - yield _ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) - - -class _ChunkProjection(NamedTuple): - chunk_coords: ChunkCoords - chunk_selection: SliceSelection - out_selection: SliceSelection - - -class BasicIndexer: - dim_indexers: List[_SliceDimIndexer] - shape: ChunkCoords - - def __init__( - self, - selection: Selection, - shape: Tuple[int, ...], - chunk_shape: Tuple[int, ...], - ): - # setup per-dimension indexers - self.dim_indexers = [ - _SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) - for dim_sel, dim_len, dim_chunk_len in zip( - _ensure_selection(selection, shape), shape, chunk_shape - ) - ] - self.shape = tuple(s.nitems for s in self.dim_indexers) - - def __iter__(self) -> Iterator[_ChunkProjection]: - for dim_projections in itertools.product(*self.dim_indexers): - chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) - chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) - out_selection = tuple( - p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None - ) - - yield _ChunkProjection(chunk_coords, chunk_selection, out_selection) - - -def morton_order_iter(chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: - def decode_morton(z: int, chunk_shape: ChunkCoords) -> ChunkCoords: - # Inspired by compressed morton code as implemented in Neuroglancer - # https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#compressed-morton-code - bits = tuple(math.ceil(math.log2(c)) for c in chunk_shape) - max_coords_bits = max(*bits) - input_bit = 0 - input_value = z - out = [0 for _ in range(len(chunk_shape))] - - for coord_bit in range(max_coords_bits): - for dim in range(len(chunk_shape)): - if coord_bit < bits[dim]: - bit = (input_value >> input_bit) & 1 - out[dim] |= bit << coord_bit - input_bit += 1 - return tuple(out) - - for i in range(product(chunk_shape)): - yield decode_morton(i, chunk_shape) - - -def c_order_iter(chunks_per_shard: ChunkCoords) -> Iterator[ChunkCoords]: - return itertools.product(*(range(x) for x in chunks_per_shard)) - - -def is_total_slice(item: Selection, shape: ChunkCoords): - """Determine whether `item` specifies a complete slice of array with the - given `shape`. Used to optimize __setitem__ operations on the Chunk - class.""" - - # N.B., assume shape is normalized - if item == slice(None): - return True - if isinstance(item, slice): - item = (item,) - if isinstance(item, tuple): - return all( - ( - isinstance(dim_sel, slice) - and ( - (dim_sel == slice(None)) - or ((dim_sel.stop - dim_sel.start == dim_len) and (dim_sel.step in [1, None])) - ) - ) - for dim_sel, dim_len in zip(item, shape) - ) - else: - raise TypeError("expected slice or tuple of slices, found %r" % item) - - -def all_chunk_coords(shape: ChunkCoords, chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: - return itertools.product(*(range(0, _ceildiv(s, c)) for s, c in zip(shape, chunk_shape))) diff --git a/src/zarr/v3/store/__init__.py b/src/zarr/v3/store/__init__.py deleted file mode 100644 index 2268381d2a..0000000000 --- a/src/zarr/v3/store/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# flake8: noqa -from zarr.v3.store.core import StorePath, StoreLike, make_store_path -from zarr.v3.store.remote import RemoteStore -from zarr.v3.store.local import LocalStore -from zarr.v3.store.memory import MemoryStore diff --git a/src/zarr/v3/sync.py b/src/zarr/v3/sync.py deleted file mode 100644 index 41dfeadba9..0000000000 --- a/src/zarr/v3/sync.py +++ /dev/null @@ -1,120 +0,0 @@ -from __future__ import annotations - -import asyncio -import threading -from typing import ( - Any, - AsyncIterator, - Coroutine, - List, - Optional, - TypeVar, -) -from typing_extensions import ParamSpec - -from zarr.v3.config import SyncConfiguration - - -# From https://github.com/fsspec/filesystem_spec/blob/master/fsspec/asyn.py - -iothread: List[Optional[threading.Thread]] = [None] # dedicated IO thread -loop: List[Optional[asyncio.AbstractEventLoop]] = [ - None -] # global event loop for any non-async instance -_lock: Optional[threading.Lock] = None # global lock placeholder -get_running_loop = asyncio.get_running_loop - - -def _get_lock() -> threading.Lock: - """Allocate or return a threading lock. - - The lock is allocated on first use to allow setting one lock per forked process. - """ - global _lock - if not _lock: - _lock = threading.Lock() - return _lock - - -async def _runner(event: threading.Event, coro: Coroutine, result_box: List[Optional[Any]]): - try: - result_box[0] = await coro - except Exception as ex: - result_box[0] = ex - finally: - event.set() - - -def sync(coro: Coroutine, loop: Optional[asyncio.AbstractEventLoop] = None): - """ - Make loop run coroutine until it returns. Runs in other thread - - Examples - -------- - >>> sync(async_function(), existing_loop) - """ - if loop is None: - # NB: if the loop is not running *yet*, it is OK to submit work - # and we will wait for it - loop = _get_loop() - if loop is None or loop.is_closed(): - raise RuntimeError("Loop is not running") - try: - loop0 = asyncio.events.get_running_loop() - if loop0 is loop: - raise NotImplementedError("Calling sync() from within a running loop") - except RuntimeError: - pass - result_box: List[Optional[Any]] = [None] - event = threading.Event() - asyncio.run_coroutine_threadsafe(_runner(event, coro, result_box), loop) - while True: - # this loops allows thread to get interrupted - if event.wait(1): - break - - return_result = result_box[0] - if isinstance(return_result, BaseException): - raise return_result - else: - return return_result - - -def _get_loop(): - """Create or return the default fsspec IO loop - - The loop will be running on a separate thread. - """ - if loop[0] is None: - with _get_lock(): - # repeat the check just in case the loop got filled between the - # previous two calls from another thread - if loop[0] is None: - new_loop = asyncio.new_event_loop() - loop[0] = new_loop - th = threading.Thread(target=new_loop.run_forever, name="zarrIO") - th.daemon = True - th.start() - iothread[0] = th - return loop[0] - - -P = ParamSpec("P") -T = TypeVar("T") - - -class SyncMixin: - _sync_configuration: SyncConfiguration - - def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: - # TODO: refactor this to to take *args and **kwargs and pass those to the method - # this should allow us to better type the sync wrapper - return sync(coroutine, loop=self._sync_configuration.asyncio_loop) - - def _sync_iter(self, coroutine: Coroutine[Any, Any, AsyncIterator[T]]) -> List[T]: - async def iter_to_list() -> List[T]: - # TODO: replace with generators so we don't materialize the entire iterator at once - async_iterator = await coroutine - return [item async for item in async_iterator] - - return self._sync(iter_to_list()) diff --git a/tests/__init__.py b/tests/v2/__init__.py similarity index 100% rename from tests/__init__.py rename to tests/v2/__init__.py diff --git a/tests/conftest.py b/tests/v2/conftest.py similarity index 60% rename from tests/conftest.py rename to tests/v2/conftest.py index aa73b8691e..a7a445c640 100644 --- a/tests/conftest.py +++ b/tests/v2/conftest.py @@ -6,3 +6,8 @@ @pytest.fixture(params=[str, pathlib.Path]) def path_type(request): return request.param + + +@pytest.fixture +def project_root(request): + return request.config.rootpath diff --git a/tests/data/store.zip b/tests/v2/data/store.zip similarity index 100% rename from tests/data/store.zip rename to tests/v2/data/store.zip diff --git a/tests/data/store/foo b/tests/v2/data/store/foo similarity index 100% rename from tests/data/store/foo rename to tests/v2/data/store/foo diff --git a/tests/test_attrs.py b/tests/v2/test_attrs.py similarity index 97% rename from tests/test_attrs.py rename to tests/v2/test_attrs.py index 2575163840..b477b8befe 100644 --- a/tests/test_attrs.py +++ b/tests/v2/test_attrs.py @@ -1,13 +1,11 @@ import json -import pathlib import pytest -import zarr -from zarr.attrs import Attributes -from zarr.storage import KVStore, DirectoryStore +from zarr.v2.attrs import Attributes +from zarr.v2.storage import KVStore, DirectoryStore from .util import CountingDict -from zarr.hierarchy import group +from zarr.v2.hierarchy import group def _init_store(): @@ -36,8 +34,7 @@ def test_storage(self): d = json.loads(str(store[attrs_key], "utf-8")) assert dict(foo="bar", baz=42) == d - def test_utf8_encoding(self): - project_root = pathlib.Path(zarr.__file__).resolve().parent.parent + def test_utf8_encoding(self, project_root): fixdir = project_root / "fixture" testdir = fixdir / "utf8attrs" if not testdir.exists(): # pragma: no cover diff --git a/tests/test_convenience.py b/tests/v2/test_convenience.py similarity index 99% rename from tests/test_convenience.py rename to tests/v2/test_convenience.py index d50533e847..f558a8800f 100644 --- a/tests/test_convenience.py +++ b/tests/v2/test_convenience.py @@ -8,8 +8,8 @@ from numcodecs import Adler32, Zlib from numpy.testing import assert_array_equal -import zarr -from zarr.convenience import ( +import zarr.v2 as zarr +from zarr.v2.convenience import ( consolidate_metadata, copy, copy_store, @@ -21,10 +21,10 @@ save_array, copy_all, ) -from zarr.core import Array -from zarr.errors import CopyError -from zarr.hierarchy import Group, group -from zarr.storage import ( +from zarr.v2.core import Array +from zarr.v2.errors import CopyError +from zarr.v2.hierarchy import Group, group +from zarr.v2.storage import ( ConsolidatedMetadataStore, FSStore, MemoryStore, diff --git a/tests/test_core.py b/tests/v2/test_core.py similarity index 99% rename from tests/test_core.py rename to tests/v2/test_core.py index 6303371793..197461d129 100644 --- a/tests/test_core.py +++ b/tests/v2/test_core.py @@ -30,15 +30,15 @@ from numcodecs.tests.common import greetings from numpy.testing import assert_array_almost_equal, assert_array_equal -import zarr -from zarr._storage.store import ( +import zarr.v2 +from zarr.v2._storage.store import ( BaseStore, ) -from zarr.core import Array -from zarr.meta import json_loads -from zarr.n5 import N5Store, N5FSStore, n5_keywords -from zarr.storage import ( +from zarr.v2.core import Array +from zarr.v2.meta import json_loads +from zarr.v2.n5 import N5Store, N5FSStore, n5_keywords +from zarr.v2.storage import ( ABSStore, DBMStore, DirectoryStore, @@ -55,7 +55,7 @@ normalize_store_arg, ) -from zarr.util import buffer_size +from zarr.v2.util import buffer_size from .util import abs_container, skip_test_env_var, have_fsspec, mktemp # noinspection PyMethodMayBeStatic @@ -721,7 +721,7 @@ def test_resize_2d(self): # checks that resizing preserves metadata if self.dimension_separator == "/": - z_ = zarr.open(z.store) + z_ = zarr.v2.open(z.store) if hasattr(z_, "dimension_separator"): assert z_.dimension_separator == self.dimension_separator z_.store.close() @@ -2495,7 +2495,7 @@ def test_issue_1279(tmpdir): """See """ data = np.arange(25).reshape((5, 5)) - ds = zarr.create( + ds = zarr.v2.create( shape=data.shape, chunks=(5, 5), dtype=data.dtype, @@ -2506,7 +2506,7 @@ def test_issue_1279(tmpdir): ds[:] = data - ds_reopened = zarr.open_array(store=FSStore(url=str(tmpdir), mode="r")) + ds_reopened = zarr.v2.open_array(store=FSStore(url=str(tmpdir), mode="r")) written_data = ds_reopened[:] assert_array_equal(data, written_data) diff --git a/tests/test_creation.py b/tests/v2/test_creation.py similarity index 98% rename from tests/test_creation.py rename to tests/v2/test_creation.py index 369d755700..08073a8ac3 100644 --- a/tests/test_creation.py +++ b/tests/v2/test_creation.py @@ -7,9 +7,9 @@ import pytest from numpy.testing import assert_array_equal -from zarr.codecs import Zlib -from zarr.core import Array -from zarr.creation import ( +from zarr.v2.codecs import Zlib +from zarr.v2.core import Array +from zarr.v2.creation import ( array, create, empty, @@ -23,10 +23,10 @@ zeros, zeros_like, ) -from zarr.hierarchy import open_group -from zarr.n5 import N5Store -from zarr.storage import DirectoryStore, KVStore -from zarr.sync import ThreadSynchronizer +from zarr.v2.hierarchy import open_group +from zarr.v2.n5 import N5Store +from zarr.v2.storage import DirectoryStore, KVStore +from zarr.v2.sync import ThreadSynchronizer from .util import mktemp, have_fsspec diff --git a/tests/test_dim_separator.py b/tests/v2/test_dim_separator.py similarity index 87% rename from tests/test_dim_separator.py rename to tests/v2/test_dim_separator.py index 4276d1829d..b0e9d0ecc8 100644 --- a/tests/test_dim_separator.py +++ b/tests/v2/test_dim_separator.py @@ -4,9 +4,9 @@ from numpy.testing import assert_array_equal from functools import partial -import zarr -from zarr.core import Array -from zarr.storage import DirectoryStore, NestedDirectoryStore, FSStore +import zarr.v2 +from zarr.v2.core import Array +from zarr.v2.storage import DirectoryStore, NestedDirectoryStore, FSStore from .util import have_fsspec @@ -41,7 +41,7 @@ def dataset(tmpdir, request): kwargs = {} if which.startswith("static"): - project_root = pathlib.Path(zarr.__file__).resolve().parent.parent + project_root = pathlib.Path(zarr.v2.__file__).resolve().parent.parent suffix = which[len("static_") :] static = project_root / "fixture" / suffix @@ -59,7 +59,7 @@ def dataset(tmpdir, request): # store the data - should be one-time operation s = generator(str(static)) - a = zarr.open(store=s, mode="w", shape=(2, 2), dtype=" CuPyCPUCompressor: if compressor: - compressor = getattr(zarr.codecs, compressor)() + compressor = getattr(zarr.v2.codecs, compressor)() return CuPyCPUCompressor(compressor) diff --git a/tests/test_n5.py b/tests/v2/test_n5.py similarity index 93% rename from tests/test_n5.py rename to tests/v2/test_n5.py index 755d60b607..238e9b2c6e 100644 --- a/tests/test_n5.py +++ b/tests/v2/test_n5.py @@ -1,8 +1,8 @@ import pytest -from zarr.n5 import N5ChunkWrapper, N5FSStore -from zarr.creation import create -from zarr.storage import atexit_rmtree +from zarr.v2.n5 import N5ChunkWrapper, N5FSStore +from zarr.v2.creation import create +from zarr.v2.storage import atexit_rmtree from numcodecs import GZip import numpy as np from typing import Tuple diff --git a/tests/test_storage.py b/tests/v2/test_storage.py similarity index 99% rename from tests/test_storage.py rename to tests/v2/test_storage.py index 5d82b879ad..b6877aa713 100644 --- a/tests/test_storage.py +++ b/tests/v2/test_storage.py @@ -31,7 +31,7 @@ # from zarr.meta import ZARR_FORMAT, decode_array_metadata # from zarr.n5 import N5Store, N5FSStore, N5_FORMAT, n5_attrs_key -from zarr.storage import ( +from zarr.v2.storage import ( # ABSStore, # ConsolidatedMetadataStore, # DBMStore, @@ -68,8 +68,8 @@ # from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp # from zarr.util import ConstantMap, json_dumps -from zarr.v3.abc.store import Store -from zarr.v3.store import MemoryStore as KVStore, LocalStore +from zarr.abc.store import Store +from zarr.store import MemoryStore as KVStore, LocalStore # @contextmanager diff --git a/tests/test_storage_v3.py b/tests/v2/test_storage_v3.py similarity index 100% rename from tests/test_storage_v3.py rename to tests/v2/test_storage_v3.py diff --git a/tests/test_sync.py b/tests/v2/test_sync.py similarity index 97% rename from tests/test_sync.py rename to tests/v2/test_sync.py index 9d805ee2c1..8bf1304dc2 100644 --- a/tests/test_sync.py +++ b/tests/v2/test_sync.py @@ -9,11 +9,11 @@ import numpy as np from numpy.testing import assert_array_equal -from zarr.attrs import Attributes -from zarr.core import Array -from zarr.hierarchy import Group -from zarr.storage import DirectoryStore, KVStore, atexit_rmtree, init_array, init_group -from zarr.sync import ProcessSynchronizer, ThreadSynchronizer +from zarr.v2.attrs import Attributes +from zarr.v2.core import Array +from zarr.v2.hierarchy import Group +from zarr.v2.storage import DirectoryStore, KVStore, atexit_rmtree, init_array, init_group +from zarr.v2.sync import ProcessSynchronizer, ThreadSynchronizer # zarr_version fixture must be imported although not used directly here from .test_attrs import TestAttributes # noqa diff --git a/tests/test_util.py b/tests/v2/test_util.py similarity index 99% rename from tests/test_util.py rename to tests/v2/test_util.py index 1f7efc9214..35c355693a 100644 --- a/tests/test_util.py +++ b/tests/v2/test_util.py @@ -4,8 +4,8 @@ import numpy as np import pytest -from zarr.core import Array -from zarr.util import ( +from zarr.v2.core import Array +from zarr.v2.util import ( ConstantMap, all_equal, flatten, diff --git a/tests/util.py b/tests/v2/util.py similarity index 97% rename from tests/util.py rename to tests/v2/util.py index 8e53bf3b63..12c5e379f6 100644 --- a/tests/util.py +++ b/tests/v2/util.py @@ -2,9 +2,9 @@ import os import tempfile from typing import Any, Mapping, Sequence -from zarr.context import Context +from zarr.v2.context import Context -from zarr.storage import Store +from zarr.v2.storage import Store import pytest diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index 333c2094bf..ffd225668b 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -7,12 +7,12 @@ import numpy as np import pytest -import zarr -from zarr.v3.abc.codec import Codec -from zarr.v3.array import Array, AsyncArray -from zarr.v3.common import Selection -from zarr.v3.indexing import morton_order_iter -from zarr.v3.codecs import ( +import zarr.v2 +from zarr.abc.codec import Codec +from zarr.array import Array, AsyncArray +from zarr.common import Selection +from zarr.indexing import morton_order_iter +from zarr.codecs import ( ShardingCodec, ShardingCodecIndexLocation, BloscCodec, @@ -21,10 +21,10 @@ TransposeCodec, ZstdCodec, ) -from zarr.v3.metadata import runtime_configuration +from zarr.metadata import runtime_configuration -from zarr.v3.abc.store import Store -from zarr.v3.store import MemoryStore, StorePath +from zarr.abc.store import Store +from zarr.store import MemoryStore, StorePath @dataclass(frozen=True) @@ -286,7 +286,7 @@ async def test_order( if not with_sharding: # Compare with zarr-python - z = zarr.create( + z = zarr.v2.create( shape=data.shape, chunks=(32, 8), dtype=" Date: Wed, 24 Apr 2024 13:11:49 +0200 Subject: [PATCH 0496/1078] [v3] Sync with futures (#1804) * fix: return Array from resize and update_attributes instead of AsyncArray * test(sync): add tests for sync module * clear up wait usage * _get_loop must return a loop * chore: clean up type hints * feat: add timeout to sync and sync config class, and add a test * chore: reword docstring * chore: adjust line length for the linter * update after v3 reorg merge * improve tests using asyncmock --------- Co-authored-by: Joseph Hamman --- src/zarr/array.py | 16 ++++-- src/zarr/config.py | 1 + src/zarr/group.py | 24 ++++++-- src/zarr/sync.py | 89 +++++++++++++++------------- src/zarr/v3/sync.py | 131 ++++++++++++++++++++++++++++++++++++++++++ tests/v3/test_sync.py | 127 ++++++++++++++++++++++++++++++++++++++++ 6 files changed, 337 insertions(+), 51 deletions(-) create mode 100644 src/zarr/v3/sync.py create mode 100644 tests/v3/test_sync.py diff --git a/src/zarr/array.py b/src/zarr/array.py index b739b310d4..c1263230c0 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -527,15 +527,19 @@ def __setitem__(self, selection: Selection, value: np.ndarray) -> None: ) def resize(self, new_shape: ChunkCoords) -> Array: - return sync( - self._async_array.resize(new_shape), - self._async_array.runtime_configuration.asyncio_loop, + return type(self)( + sync( + self._async_array.resize(new_shape), + self._async_array.runtime_configuration.asyncio_loop, + ) ) def update_attributes(self, new_attributes: Dict[str, Any]) -> Array: - return sync( - self._async_array.update_attributes(new_attributes), - self._async_array.runtime_configuration.asyncio_loop, + return type(self)( + sync( + self._async_array.update_attributes(new_attributes), + self._async_array.runtime_configuration.asyncio_loop, + ) ) def __repr__(self): diff --git a/src/zarr/config.py b/src/zarr/config.py index cebe5c1b09..cd4d82597b 100644 --- a/src/zarr/config.py +++ b/src/zarr/config.py @@ -9,6 +9,7 @@ class SyncConfiguration: concurrency: Optional[int] = None asyncio_loop: Optional[AbstractEventLoop] = None + timeout: float | None = None def parse_indexing_order(data: Any) -> Literal["C", "F"]: diff --git a/src/zarr/group.py b/src/zarr/group.py index aff24ed0d9..cd2c00dc11 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -415,24 +415,36 @@ def nchildren(self) -> int: @property def children(self) -> List[Union[Array, Group]]: - _children = self._sync_iter(self._async_group.children()) - return [Array(obj) if isinstance(obj, AsyncArray) else Group(obj) for obj in _children] + raise NotImplementedError + # Uncomment with AsyncGroup implements this method + # _children: List[Union[AsyncArray, AsyncGroup]] = self._sync_iter( + # self._async_group.children() + # ) + # return [Array(obj) if isinstance(obj, AsyncArray) else Group(obj) for obj in _children] def __contains__(self, child) -> bool: return self._sync(self._async_group.contains(child)) def group_keys(self) -> List[str]: - return self._sync_iter(self._async_group.group_keys()) + raise NotImplementedError + # uncomment with AsyncGroup implements this method + # return self._sync_iter(self._async_group.group_keys()) def groups(self) -> List[Group]: # TODO: in v2 this was a generator that return key: Group - return [Group(obj) for obj in self._sync_iter(self._async_group.groups())] + raise NotImplementedError + # uncomment with AsyncGroup implements this method + # return [Group(obj) for obj in self._sync_iter(self._async_group.groups())] def array_keys(self) -> List[str]: - return self._sync_iter(self._async_group.array_keys()) + # uncomment with AsyncGroup implements this method + # return self._sync_iter(self._async_group.array_keys()) + raise NotImplementedError def arrays(self) -> List[Array]: - return [Array(obj) for obj in self._sync_iter(self._async_group.arrays())] + raise NotImplementedError + # uncomment with AsyncGroup implements this method + # return [Array(obj) for obj in self._sync_iter(self._async_group.arrays())] def tree(self, expand=False, level=None) -> Any: return self._sync(self._async_group.tree(expand=expand, level=level)) diff --git a/src/zarr/sync.py b/src/zarr/sync.py index d9665b4c58..a152030e89 100644 --- a/src/zarr/sync.py +++ b/src/zarr/sync.py @@ -1,30 +1,34 @@ from __future__ import annotations +from typing import TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from typing import Any, AsyncIterator, Coroutine import asyncio +from concurrent.futures import wait import threading -from typing import ( - Any, - AsyncIterator, - Coroutine, - List, - Optional, - TypeVar, -) + from typing_extensions import ParamSpec from zarr.config import SyncConfiguration +P = ParamSpec("P") +T = TypeVar("T") # From https://github.com/fsspec/filesystem_spec/blob/master/fsspec/asyn.py -iothread: List[Optional[threading.Thread]] = [None] # dedicated IO thread -loop: List[Optional[asyncio.AbstractEventLoop]] = [ +iothread: list[threading.Thread | None] = [None] # dedicated IO thread +loop: list[asyncio.AbstractEventLoop | None] = [ None ] # global event loop for any non-async instance -_lock: Optional[threading.Lock] = None # global lock placeholder +_lock: threading.Lock | None = None # global lock placeholder get_running_loop = asyncio.get_running_loop +class SyncError(Exception): + pass + + def _get_lock() -> threading.Lock: """Allocate or return a threading lock. @@ -36,16 +40,22 @@ def _get_lock() -> threading.Lock: return _lock -async def _runner(event: threading.Event, coro: Coroutine, result_box: List[Optional[Any]]): +async def _runner(coro: Coroutine[Any, Any, T]) -> T | BaseException: + """ + Await a coroutine and return the result of running it. If awaiting the coroutine raises an + exception, the exception will be returned. + """ try: - result_box[0] = await coro + return await coro except Exception as ex: - result_box[0] = ex - finally: - event.set() + return ex -def sync(coro: Coroutine, loop: Optional[asyncio.AbstractEventLoop] = None): +def sync( + coro: Coroutine[Any, Any, T], + loop: asyncio.AbstractEventLoop | None = None, + timeout: float | None = None, +) -> T: """ Make loop run coroutine until it returns. Runs in other thread @@ -57,30 +67,32 @@ def sync(coro: Coroutine, loop: Optional[asyncio.AbstractEventLoop] = None): # NB: if the loop is not running *yet*, it is OK to submit work # and we will wait for it loop = _get_loop() - if loop is None or loop.is_closed(): + if not isinstance(loop, asyncio.AbstractEventLoop): + raise TypeError(f"loop cannot be of type {type(loop)}") + if loop.is_closed(): raise RuntimeError("Loop is not running") try: loop0 = asyncio.events.get_running_loop() if loop0 is loop: - raise NotImplementedError("Calling sync() from within a running loop") + raise SyncError("Calling sync() from within a running loop") except RuntimeError: pass - result_box: List[Optional[Any]] = [None] - event = threading.Event() - asyncio.run_coroutine_threadsafe(_runner(event, coro, result_box), loop) - while True: - # this loops allows thread to get interrupted - if event.wait(1): - break - - return_result = result_box[0] + + future = asyncio.run_coroutine_threadsafe(_runner(coro), loop) + + finished, unfinished = wait([future], return_when=asyncio.ALL_COMPLETED, timeout=timeout) + if len(unfinished) > 0: + raise asyncio.TimeoutError(f"Coroutine {coro} failed to finish in within {timeout}s") + assert len(finished) == 1 + return_result = list(finished)[0].result() + if isinstance(return_result, BaseException): raise return_result else: return return_result -def _get_loop(): +def _get_loop() -> asyncio.AbstractEventLoop: """Create or return the default fsspec IO loop The loop will be running on a separate thread. @@ -96,25 +108,24 @@ def _get_loop(): th.daemon = True th.start() iothread[0] = th + assert loop[0] is not None return loop[0] -P = ParamSpec("P") -T = TypeVar("T") - - class SyncMixin: _sync_configuration: SyncConfiguration def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: # TODO: refactor this to to take *args and **kwargs and pass those to the method # this should allow us to better type the sync wrapper - return sync(coroutine, loop=self._sync_configuration.asyncio_loop) - - def _sync_iter(self, coroutine: Coroutine[Any, Any, AsyncIterator[T]]) -> List[T]: - async def iter_to_list() -> List[T]: - # TODO: replace with generators so we don't materialize the entire iterator at once - async_iterator = await coroutine + return sync( + coroutine, + loop=self._sync_configuration.asyncio_loop, + timeout=self._sync_configuration.timeout, + ) + + def _sync_iter(self, async_iterator: AsyncIterator[T]) -> list[T]: + async def iter_to_list() -> list[T]: return [item async for item in async_iterator] return self._sync(iter_to_list()) diff --git a/src/zarr/v3/sync.py b/src/zarr/v3/sync.py new file mode 100644 index 0000000000..2838f68172 --- /dev/null +++ b/src/zarr/v3/sync.py @@ -0,0 +1,131 @@ +from __future__ import annotations +from typing import TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from typing import Any, AsyncIterator, Coroutine + +import asyncio +from concurrent.futures import wait +import threading + +from typing_extensions import ParamSpec + +from zarr.v3.config import SyncConfiguration + +P = ParamSpec("P") +T = TypeVar("T") + +# From https://github.com/fsspec/filesystem_spec/blob/master/fsspec/asyn.py + +iothread: list[threading.Thread | None] = [None] # dedicated IO thread +loop: list[asyncio.AbstractEventLoop | None] = [ + None +] # global event loop for any non-async instance +_lock: threading.Lock | None = None # global lock placeholder +get_running_loop = asyncio.get_running_loop + + +class SyncError(Exception): + pass + + +def _get_lock() -> threading.Lock: + """Allocate or return a threading lock. + + The lock is allocated on first use to allow setting one lock per forked process. + """ + global _lock + if not _lock: + _lock = threading.Lock() + return _lock + + +async def _runner(coro: Coroutine[Any, Any, T]) -> T | BaseException: + """ + Await a coroutine and return the result of running it. If awaiting the coroutine raises an + exception, the exception will be returned. + """ + try: + return await coro + except Exception as ex: + return ex + + +def sync( + coro: Coroutine[Any, Any, T], + loop: asyncio.AbstractEventLoop | None = None, + timeout: float | None = None, +) -> T: + """ + Make loop run coroutine until it returns. Runs in other thread + + Examples + -------- + >>> sync(async_function(), existing_loop) + """ + if loop is None: + # NB: if the loop is not running *yet*, it is OK to submit work + # and we will wait for it + loop = _get_loop() + if not isinstance(loop, asyncio.AbstractEventLoop): + raise TypeError(f"loop cannot be of type {type(loop)}") + if loop.is_closed(): + raise RuntimeError("Loop is not running") + try: + loop0 = asyncio.events.get_running_loop() + if loop0 is loop: + raise SyncError("Calling sync() from within a running loop") + except RuntimeError: + pass + + future = asyncio.run_coroutine_threadsafe(_runner(coro), loop) + + finished, unfinished = wait([future], return_when=asyncio.ALL_COMPLETED, timeout=timeout) + if len(unfinished) > 0: + raise asyncio.TimeoutError(f"Coroutine {coro} failed to finish in within {timeout}s") + assert len(finished) == 1 + return_result = list(finished)[0].result() + + if isinstance(return_result, BaseException): + raise return_result + else: + return return_result + + +def _get_loop() -> asyncio.AbstractEventLoop: + """Create or return the default fsspec IO loop + + The loop will be running on a separate thread. + """ + if loop[0] is None: + with _get_lock(): + # repeat the check just in case the loop got filled between the + # previous two calls from another thread + if loop[0] is None: + new_loop = asyncio.new_event_loop() + loop[0] = new_loop + th = threading.Thread(target=new_loop.run_forever, name="zarrIO") + th.daemon = True + th.start() + iothread[0] = th + assert loop[0] is not None + return loop[0] + + +class SyncMixin: + _sync_configuration: SyncConfiguration + + def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: + # TODO: refactor this to to take *args and **kwargs and pass those to the method + # this should allow us to better type the sync wrapper + return sync( + coroutine, + loop=self._sync_configuration.asyncio_loop, + timeout=self._sync_configuration.timeout, + ) + + def _sync_iter(self, async_iterator: AsyncIterator[T]) -> list[T]: + async def iter_to_list() -> list[T]: + return [item async for item in async_iterator] + + return self._sync(iter_to_list()) diff --git a/tests/v3/test_sync.py b/tests/v3/test_sync.py new file mode 100644 index 0000000000..8f644745d2 --- /dev/null +++ b/tests/v3/test_sync.py @@ -0,0 +1,127 @@ +from collections.abc import AsyncGenerator +import asyncio +import time +from unittest.mock import patch, AsyncMock + +from zarr.sync import sync, _get_loop, _get_lock, SyncError, SyncMixin +from zarr.config import SyncConfiguration + +import pytest + + +@pytest.fixture(params=[True, False]) +def sync_loop(request) -> asyncio.AbstractEventLoop | None: + if request.param is True: + return _get_loop() + + if request.param is False: + return None + + +def test_get_loop() -> None: + # test that calling _get_loop() twice returns the same loop + loop = _get_loop() + loop2 = _get_loop() + assert loop is loop2 + + +def test_get_lock() -> None: + # test that calling _get_lock() twice returns the same lock + lock = _get_lock() + lock2 = _get_lock() + assert lock is lock2 + + +def test_sync(sync_loop: asyncio.AbstractEventLoop | None) -> None: + foo = AsyncMock(return_value="foo") + assert sync(foo(), loop=sync_loop) == "foo" + foo.assert_awaited_once() + + +def test_sync_raises(sync_loop: asyncio.AbstractEventLoop | None) -> None: + foo = AsyncMock(side_effect=ValueError("foo-bar")) + with pytest.raises(ValueError, match="foo-bar"): + sync(foo(), loop=sync_loop) + foo.assert_awaited_once() + + +def test_sync_timeout() -> None: + duration = 0.002 + + async def foo() -> None: + time.sleep(duration) + + with pytest.raises(asyncio.TimeoutError): + sync(foo(), timeout=duration / 2) + + +def test_sync_raises_if_no_coroutine(sync_loop: asyncio.AbstractEventLoop | None) -> None: + def foo() -> str: + return "foo" + + with pytest.raises(TypeError): + sync(foo(), loop=sync_loop) + + +@pytest.mark.filterwarnings("ignore:coroutine.*was never awaited") +def test_sync_raises_if_loop_is_closed() -> None: + loop = _get_loop() + + foo = AsyncMock(return_value="foo") + with patch.object(loop, "is_closed", return_value=True): + with pytest.raises(RuntimeError): + sync(foo(), loop=loop) + foo.assert_not_awaited() + + +@pytest.mark.filterwarnings("ignore:coroutine.*was never awaited") +def test_sync_raises_if_calling_sync_from_within_a_running_loop( + sync_loop: asyncio.AbstractEventLoop | None, +) -> None: + def foo() -> str: + # technically, this should be an async function but doing that + # yields a warning because it is never awaited by the inner function + return "foo" + + async def bar() -> str: + return sync(foo(), loop=sync_loop) + + with pytest.raises(SyncError): + sync(bar(), loop=sync_loop) + + +@pytest.mark.filterwarnings("ignore:coroutine.*was never awaited") +def test_sync_raises_if_loop_is_invalid_type() -> None: + foo = AsyncMock(return_value="foo") + with pytest.raises(TypeError): + sync(foo(), loop=1) + foo.assert_not_awaited() + + +def test_sync_mixin(sync_loop) -> None: + class AsyncFoo: + def __init__(self) -> None: + pass + + async def foo(self) -> str: + return "foo" + + async def bar(self) -> AsyncGenerator: + for i in range(10): + yield i + + class SyncFoo(SyncMixin): + def __init__(self, async_foo: AsyncFoo) -> None: + self._async_foo = async_foo + self._sync_configuration = SyncConfiguration(asyncio_loop=sync_loop) + + def foo(self) -> str: + return self._sync(self._async_foo.foo()) + + def bar(self) -> list[int]: + return self._sync_iter(self._async_foo.bar()) + + async_foo = AsyncFoo() + foo = SyncFoo(async_foo) + assert foo.foo() == "foo" + assert foo.bar() == list(range(10)) From 57d6acefc8260bf5d8ba165f0bf7d4b52c46f249 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Wed, 24 Apr 2024 18:44:21 +0200 Subject: [PATCH 0497/1078] implement `group.members` (#1726) * feat: functional .children method for groups * changes necessary for correctly generating list of children * add stand-alone test for group.children * give type hints a glow-up * test: use separate assert statements to avoid platform-dependent ordering issues * test: put fixtures in conftest, add MemoryStore fixture * docs: release notes * test: remove prematurely-added mock s3 fixture * fix: Rename children to members; AsyncGroup.members yields tuples of (name, AsyncArray / AsyncGroup) pairs; Group.members repackages these into a dict. * fix: make Group.members return a tuple of str, Array | Group pairs * fix: revert changes to synchronization code; this is churn that we need to deal with * make mypy happy * feat: implement member-specific iteration methods in asyncgroup * chore: clean up some post-merge issues * chore: remove extra directory added by test code --------- Co-authored-by: Joseph Hamman --- docs/release.rst | 6 ++ src/zarr/group.py | 180 ++++++++++++++++++++++++++++------------- tests/v2/conftest.py | 1 - tests/v3/conftest.py | 32 ++++++++ tests/v3/test_group.py | 60 ++++++++++++-- 5 files changed, 215 insertions(+), 64 deletions(-) create mode 100644 tests/v3/conftest.py diff --git a/docs/release.rst b/docs/release.rst index 3ed47ff9f5..b78e709c0e 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,12 @@ Release notes Unreleased (v3) --------------- +Enhancements +~~~~~~~~~~~~ + +* Implement listing of the sub-arrays and sub-groups for a V3 ``Group``. + By :user:`Davis Bennett ` :issue:`1726`. + Maintenance ~~~~~~~~~~~ diff --git a/src/zarr/group.py b/src/zarr/group.py index cd2c00dc11..c40b5f9a34 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -1,10 +1,18 @@ from __future__ import annotations +from typing import TYPE_CHECKING from dataclasses import asdict, dataclass, field, replace import asyncio import json import logging -from typing import Any, Dict, Literal, Optional, Union, AsyncIterator, List + +if TYPE_CHECKING: + from typing import ( + Any, + AsyncGenerator, + Literal, + AsyncIterator, + ) from zarr.abc.metadata import Metadata from zarr.array import AsyncArray, Array @@ -25,7 +33,7 @@ def parse_zarr_format(data: Any) -> Literal[2, 3]: # todo: convert None to empty dict -def parse_attributes(data: Any) -> Dict[str, Any]: +def parse_attributes(data: Any) -> dict[str, Any]: if data is None: return {} elif isinstance(data, dict) and all(map(lambda v: isinstance(v, str), data.keys())): @@ -36,12 +44,12 @@ def parse_attributes(data: Any) -> Dict[str, Any]: @dataclass(frozen=True) class GroupMetadata(Metadata): - attributes: Dict[str, Any] = field(default_factory=dict) + attributes: dict[str, Any] = field(default_factory=dict) zarr_format: Literal[2, 3] = 3 node_type: Literal["group"] = field(default="group", init=False) # todo: rename this, since it doesn't return bytes - def to_bytes(self) -> Dict[str, bytes]: + def to_bytes(self) -> dict[str, bytes]: if self.zarr_format == 3: return {ZARR_JSON: json.dumps(self.to_dict()).encode()} else: @@ -50,7 +58,7 @@ def to_bytes(self) -> Dict[str, bytes]: ZATTRS_JSON: json.dumps(self.attributes).encode(), } - def __init__(self, attributes: Optional[Dict[str, Any]] = None, zarr_format: Literal[2, 3] = 3): + def __init__(self, attributes: dict[str, Any] | None = None, zarr_format: Literal[2, 3] = 3): attributes_parsed = parse_attributes(attributes) zarr_format_parsed = parse_zarr_format(zarr_format) @@ -58,11 +66,11 @@ def __init__(self, attributes: Optional[Dict[str, Any]] = None, zarr_format: Lit object.__setattr__(self, "zarr_format", zarr_format_parsed) @classmethod - def from_dict(cls, data: Dict[str, Any]) -> GroupMetadata: + def from_dict(cls, data: dict[str, Any]) -> GroupMetadata: assert data.pop("node_type", None) in ("group", None) return cls(**data) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: return asdict(self) @@ -70,14 +78,14 @@ def to_dict(self) -> Dict[str, Any]: class AsyncGroup: metadata: GroupMetadata store_path: StorePath - runtime_configuration: RuntimeConfiguration + runtime_configuration: RuntimeConfiguration = RuntimeConfiguration() @classmethod async def create( cls, store: StoreLike, *, - attributes: Optional[Dict[str, Any]] = None, + attributes: dict[str, Any] = {}, exists_ok: bool = False, zarr_format: Literal[2, 3] = 3, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), @@ -89,7 +97,7 @@ async def create( elif zarr_format == 2: assert not await (store_path / ZGROUP_JSON).exists() group = cls( - metadata=GroupMetadata(attributes=attributes or {}, zarr_format=zarr_format), + metadata=GroupMetadata(attributes=attributes, zarr_format=zarr_format), store_path=store_path, runtime_configuration=runtime_configuration, ) @@ -137,7 +145,7 @@ async def open( def from_dict( cls, store_path: StorePath, - data: Dict[str, Any], + data: dict[str, Any], runtime_configuration: RuntimeConfiguration, ) -> AsyncGroup: group = cls( @@ -150,14 +158,24 @@ def from_dict( async def getitem( self, key: str, - ) -> Union[AsyncArray, AsyncGroup]: + ) -> AsyncArray | AsyncGroup: store_path = self.store_path / key + # Note: + # in zarr-python v2, we first check if `key` references an Array, else if `key` references + # a group,using standalone `contains_array` and `contains_group` functions. These functions + # are reusable, but for v3 they would perform redundant I/O operations. + # Not clear how much of that strategy we want to keep here. + + # if `key` names an object in storage, it cannot be an array or group + if await store_path.exists(): + raise KeyError(key) + if self.metadata.zarr_format == 3: zarr_json_bytes = await (store_path / ZARR_JSON).get() if zarr_json_bytes is None: # implicit group? - logger.warning("group at {} is an implicit group", store_path) + logger.warning("group at %s is an implicit group", store_path) zarr_json = { "zarr_format": self.metadata.zarr_format, "node_type": "group", @@ -196,7 +214,7 @@ async def getitem( else: if zgroup_bytes is None: # implicit group? - logger.warning("group at {} is an implicit group", store_path) + logger.warning("group at %s is an implicit group", store_path) zgroup = ( json.loads(zgroup_bytes) if zgroup_bytes is not None @@ -248,7 +266,7 @@ async def create_array(self, path: str, **kwargs) -> AsyncArray: **kwargs, ) - async def update_attributes(self, new_attributes: Dict[str, Any]): + async def update_attributes(self, new_attributes: dict[str, Any]): # metadata.attributes is "frozen" so we simply clear and update the dict self.metadata.attributes.clear() self.metadata.attributes.update(new_attributes) @@ -269,26 +287,68 @@ async def update_attributes(self, new_attributes: Dict[str, Any]): def __repr__(self): return f"" - async def nchildren(self) -> int: - raise NotImplementedError - - async def children(self) -> AsyncIterator[Union[AsyncArray, AsyncGroup]]: - raise NotImplementedError - - async def contains(self, child: str) -> bool: + async def nmembers(self) -> int: raise NotImplementedError - async def group_keys(self) -> AsyncIterator[str]: - raise NotImplementedError + async def members(self) -> AsyncGenerator[tuple[str, AsyncArray | AsyncGroup], None]: + """ + Returns an AsyncGenerator over the arrays and groups contained in this group. + This method requires that `store_path.store` supports directory listing. + + The results are not guaranteed to be ordered. + """ + if not self.store_path.store.supports_listing: + msg = ( + f"The store associated with this group ({type(self.store_path.store)}) " + "does not support listing, " + "specifically via the `list_dir` method. " + "This function requires a store that supports listing." + ) - async def groups(self) -> AsyncIterator[AsyncGroup]: - raise NotImplementedError + raise ValueError(msg) + subkeys = await self.store_path.store.list_dir(self.store_path.path) + # would be nice to make these special keys accessible programmatically, + # and scoped to specific zarr versions + subkeys_filtered = filter(lambda v: v not in ("zarr.json", ".zgroup", ".zattrs"), subkeys) + # is there a better way to schedule this? + for subkey in subkeys_filtered: + try: + yield (subkey, await self.getitem(subkey)) + except KeyError: + # keyerror is raised when `subkey` names an object (in the object storage sense), + # as opposed to a prefix, in the store under the prefix associated with this group + # in which case `subkey` cannot be the name of a sub-array or sub-group. + logger.warning( + "Object at %s is not recognized as a component of a Zarr hierarchy.", subkey + ) + pass - async def array_keys(self) -> AsyncIterator[str]: + async def contains(self, member: str) -> bool: raise NotImplementedError + # todo: decide if this method should be separate from `groups` + async def group_keys(self) -> AsyncGenerator[str, None]: + async for key, value in self.members(): + if isinstance(value, AsyncGroup): + yield key + + # todo: decide if this method should be separate from `group_keys` + async def groups(self) -> AsyncGenerator[AsyncGroup, None]: + async for key, value in self.members(): + if isinstance(value, AsyncGroup): + yield value + + # todo: decide if this method should be separate from `arrays` + async def array_keys(self) -> AsyncGenerator[str, None]: + async for key, value in self.members(): + if isinstance(value, AsyncArray): + yield key + + # todo: decide if this method should be separate from `array_keys` async def arrays(self) -> AsyncIterator[AsyncArray]: - raise NotImplementedError + async for key, value in self.members(): + if isinstance(value, AsyncArray): + yield value async def tree(self, expand=False, level=None) -> Any: raise NotImplementedError @@ -331,7 +391,7 @@ def create( cls, store: StoreLike, *, - attributes: Optional[Dict[str, Any]] = None, + attributes: dict[str, Any] = {}, exists_ok: bool = False, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> Group: @@ -358,7 +418,7 @@ def open( ) return cls(obj) - def __getitem__(self, path: str) -> Union[Array, Group]: + def __getitem__(self, path: str) -> Array | Group: obj = self._sync(self._async_group.getitem(path)) if isinstance(obj, AsyncArray): return Array(obj) @@ -378,7 +438,7 @@ def __setitem__(self, key, value): """__setitem__ is not supported in v3""" raise NotImplementedError - async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> Group: + async def update_attributes_async(self, new_attributes: dict[str, Any]) -> Group: new_metadata = replace(self.metadata, attributes=new_attributes) # Write new metadata @@ -389,6 +449,10 @@ async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> Group async_group = replace(self._async_group, metadata=new_metadata) return replace(self, _async_group=async_group) + @property + def store_path(self) -> StorePath: + return self._async_group.store_path + @property def metadata(self) -> GroupMetadata: return self._async_group.metadata @@ -401,50 +465,54 @@ def attrs(self) -> Attributes: def info(self): return self._async_group.info - @property - def store_path(self) -> StorePath: - return self._async_group.store_path - - def update_attributes(self, new_attributes: Dict[str, Any]): + def update_attributes(self, new_attributes: dict[str, Any]): self._sync(self._async_group.update_attributes(new_attributes)) return self @property - def nchildren(self) -> int: - return self._sync(self._async_group.nchildren()) + def nmembers(self) -> int: + return self._sync(self._async_group.nmembers()) @property - def children(self) -> List[Union[Array, Group]]: - raise NotImplementedError - # Uncomment with AsyncGroup implements this method - # _children: List[Union[AsyncArray, AsyncGroup]] = self._sync_iter( - # self._async_group.children() - # ) - # return [Array(obj) if isinstance(obj, AsyncArray) else Group(obj) for obj in _children] + def members(self) -> tuple[tuple[str, Array | Group], ...]: + """ + Return the sub-arrays and sub-groups of this group as a `tuple` of (name, array | group) + pairs + """ + _members: list[tuple[str, AsyncArray | AsyncGroup]] = self._sync_iter( + self._async_group.members() + ) + ret: list[tuple[str, Array | Group]] = [] + for key, value in _members: + if isinstance(value, AsyncArray): + ret.append((key, Array(value))) + else: + ret.append((key, Group(value))) + return tuple(ret) - def __contains__(self, child) -> bool: - return self._sync(self._async_group.contains(child)) + def __contains__(self, member) -> bool: + return self._sync(self._async_group.contains(member)) - def group_keys(self) -> List[str]: - raise NotImplementedError + def group_keys(self) -> list[str]: # uncomment with AsyncGroup implements this method # return self._sync_iter(self._async_group.group_keys()) + raise NotImplementedError - def groups(self) -> List[Group]: + def groups(self) -> list[Group]: # TODO: in v2 this was a generator that return key: Group - raise NotImplementedError # uncomment with AsyncGroup implements this method # return [Group(obj) for obj in self._sync_iter(self._async_group.groups())] + raise NotImplementedError - def array_keys(self) -> List[str]: + def array_keys(self) -> list[str]: # uncomment with AsyncGroup implements this method - # return self._sync_iter(self._async_group.array_keys()) + # return self._sync_iter(self._async_group.array_keys) raise NotImplementedError - def arrays(self) -> List[Array]: - raise NotImplementedError + def arrays(self) -> list[Array]: # uncomment with AsyncGroup implements this method - # return [Array(obj) for obj in self._sync_iter(self._async_group.arrays())] + # return [Array(obj) for obj in self._sync_iter(self._async_group.arrays)] + raise NotImplementedError def tree(self, expand=False, level=None) -> Any: return self._sync(self._async_group.tree(expand=expand, level=level)) diff --git a/tests/v2/conftest.py b/tests/v2/conftest.py index a7a445c640..c84cdfa439 100644 --- a/tests/v2/conftest.py +++ b/tests/v2/conftest.py @@ -1,5 +1,4 @@ import pathlib - import pytest diff --git a/tests/v3/conftest.py b/tests/v3/conftest.py new file mode 100644 index 0000000000..3dc55c0298 --- /dev/null +++ b/tests/v3/conftest.py @@ -0,0 +1,32 @@ +import pathlib +import pytest + +from zarr.store import LocalStore, StorePath, MemoryStore, RemoteStore + + +@pytest.fixture(params=[str, pathlib.Path]) +def path_type(request): + return request.param + + +# todo: harmonize this with local_store fixture +@pytest.fixture +def store_path(tmpdir): + store = LocalStore(str(tmpdir)) + p = StorePath(store) + return p + + +@pytest.fixture(scope="function") +def local_store(tmpdir): + return LocalStore(str(tmpdir)) + + +@pytest.fixture(scope="function") +def remote_store(): + return RemoteStore() + + +@pytest.fixture(scope="function") +def memory_store(): + return MemoryStore() diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 1150469db1..941256bdd2 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -1,3 +1,10 @@ +from __future__ import annotations +from typing import TYPE_CHECKING + +from zarr.sync import sync + +if TYPE_CHECKING: + from zarr.store import MemoryStore, LocalStore import pytest import numpy as np @@ -6,21 +13,60 @@ from zarr.config import RuntimeConfiguration -@pytest.fixture -def store_path(tmpdir): - store = LocalStore(str(tmpdir)) - p = StorePath(store) - return p +# todo: put RemoteStore in here +@pytest.mark.parametrize("store_type", ("local_store", "memory_store")) +def test_group_members(store_type, request): + """ + Test that `Group.members` returns correct values, i.e. the arrays and groups + (explicit and implicit) contained in that group. + """ + + store: LocalStore | MemoryStore = request.getfixturevalue(store_type) + path = "group" + agroup = AsyncGroup( + metadata=GroupMetadata(), + store_path=StorePath(store=store, path=path), + ) + group = Group(agroup) + members_expected = {} + members_expected["subgroup"] = group.create_group("subgroup") + # make a sub-sub-subgroup, to ensure that the children calculation doesn't go + # too deep in the hierarchy + _ = members_expected["subgroup"].create_group("subsubgroup") -def test_group(store_path) -> None: + members_expected["subarray"] = group.create_array( + "subarray", shape=(100,), dtype="uint8", chunk_shape=(10,), exists_ok=True + ) + + # add an extra object to the domain of the group. + # the list of children should ignore this object. + sync(store.set(f"{path}/extra_object", b"000000")) + # add an extra object under a directory-like prefix in the domain of the group. + # this creates an implicit group called implicit_subgroup + sync(store.set(f"{path}/implicit_subgroup/extra_object", b"000000")) + # make the implicit subgroup + members_expected["implicit_subgroup"] = Group( + AsyncGroup( + metadata=GroupMetadata(), + store_path=StorePath(store=store, path=f"{path}/implicit_subgroup"), + ) + ) + members_observed = group.members + # members are not guaranteed to be ordered, so sort before comparing + assert sorted(dict(members_observed)) == sorted(members_expected) + + +@pytest.mark.parametrize("store_type", (("local_store",))) +def test_group(store_type, request) -> None: + store = request.getfixturevalue(store_type) + store_path = StorePath(store) agroup = AsyncGroup( metadata=GroupMetadata(), store_path=store_path, runtime_configuration=RuntimeConfiguration(), ) group = Group(agroup) - assert agroup.metadata is group.metadata # create two groups From 096c900a76d08eb6bde86f5583eda697781710c8 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 24 Apr 2024 15:32:00 -0700 Subject: [PATCH 0498/1078] remove windows testing on v3 branch (#1817) --- .github/workflows/{test-v3.yml => test.yml} | 0 .github/workflows/windows-testing.yml | 61 --------------------- 2 files changed, 61 deletions(-) rename .github/workflows/{test-v3.yml => test.yml} (100%) delete mode 100644 .github/workflows/windows-testing.yml diff --git a/.github/workflows/test-v3.yml b/.github/workflows/test.yml similarity index 100% rename from .github/workflows/test-v3.yml rename to .github/workflows/test.yml diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml deleted file mode 100644 index 78945e97aa..0000000000 --- a/.github/workflows/windows-testing.yml +++ /dev/null @@ -1,61 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - windows: - name: Windows Tests - runs-on: "windows-latest" - strategy: - fail-fast: True - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v2.3.0 - with: - auto-update-conda: true - python-version: ${{ matrix.python-version }} - channels: conda-forge - env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true - - name: Create Conda environment with the rights deps - shell: bash -l {0} - run: | - conda create -n zarr-env python==${{matrix.python-version}} numcodecs pip nodejs - - name: Install dependencies - shell: bash -l {0} - run: | - conda activate zarr-env - python -m pip install --upgrade pip - python -m pip install -U pip setuptools wheel - python -m pip install -r requirements_dev_numpy.txt -r requirements_dev_minimal.txt -r requirements_dev_optional.txt - python -m pip install . - python -m pip freeze - npm install -g azurite - - name: Run Tests - shell: bash -l {0} - run: | - conda activate zarr-env - mkdir ~/blob_emulator - azurite -l ~/blob_emulator --debug debug.log 2>&1 > stdouterr.log & - pytest -sv --timeout=300 - env: - ZARR_TEST_ABS: 1 - ZARR_V3_EXPERIMENTAL_API: 1 - ZARR_V3_SHARDING: 1 - - name: Conda info - shell: bash -l {0} - run: conda info - - name: Conda list - shell: pwsh - run: conda list From 60b53b059e0d5b4fd57dab74520af0184da32006 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 25 Apr 2024 06:14:40 -0700 Subject: [PATCH 0499/1078] add note to the top of the release page noting the plan for 2.18.* and 3.0 (#1816) --- docs/release.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 07c2a47e7c..1c4e52f7c0 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -13,6 +13,11 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. +.. note:: + Zarr-Python 2.18.* is expected be the final release in the 2.* series. Work on Zarr-Python 3.0 is underway. + See `GH1777 `_ for more details on the upcoming + 3.0 release. + .. _unreleased: Unreleased From f4f6e8692026201223c04850bbe13e0551471c39 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 13:12:24 +0200 Subject: [PATCH 0500/1078] Bump conda-incubator/setup-miniconda from 3.0.3 to 3.0.4 (#1824) Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 3.0.3 to 3.0.4. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v3.0.3...v3.0.4) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/minimal.yml | 2 +- .github/workflows/python-package.yml | 2 +- .github/workflows/windows-testing.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml index dba6918514..b5b2f48d62 100644 --- a/.github/workflows/minimal.yml +++ b/.github/workflows/minimal.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3.0.3 + uses: conda-incubator/setup-miniconda@v3.0.4 with: channels: conda-forge environment-file: environment.yml diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 8ff6e9a2eb..f53cb2d9a9 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -44,7 +44,7 @@ jobs: with: fetch-depth: 0 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3.0.3 + uses: conda-incubator/setup-miniconda@v3.0.4 with: channels: conda-forge python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/windows-testing.yml b/.github/workflows/windows-testing.yml index d580ef3f0e..ab86831aae 100644 --- a/.github/workflows/windows-testing.yml +++ b/.github/workflows/windows-testing.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v3.0.3 + - uses: conda-incubator/setup-miniconda@v3.0.4 with: auto-update-conda: true python-version: ${{ matrix.python-version }} From 29b4acc45ed7b1f865c31bfa5d7beda4b3b9269a Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 30 Apr 2024 09:17:05 -0700 Subject: [PATCH 0501/1078] dep(docs): deprecate experimental v3 support in docs (#1807) * dep(docs): deprecate experimental v3 support in docs * Apply suggestions from code review Co-authored-by: Josh Moore Co-authored-by: Sanket Verma --------- Co-authored-by: Josh Moore Co-authored-by: Sanket Verma --- docs/api/v3.rst | 9 ++++----- docs/release.rst | 6 ++++++ zarr/convenience.py | 25 +++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 5 deletions(-) diff --git a/docs/api/v3.rst b/docs/api/v3.rst index 7665b2ddd1..3503e3fe81 100644 --- a/docs/api/v3.rst +++ b/docs/api/v3.rst @@ -1,13 +1,12 @@ V3 Specification Implementation(``zarr._storage.v3``) ===================================================== -This module contains the implementation of the `Zarr V3 Specification `_. +This module contains an experimental implementation of the `Zarr V3 Specification `_. .. warning:: - Since Zarr Python 2.12 release, this module provides experimental infrastructure for reading and - writing the upcoming V3 spec of the Zarr format. Users wishing to prepare for the migration can set - the environment variable ``ZARR_V3_EXPERIMENTAL_API=1`` to begin experimenting, however data - written with this API should be expected to become stale, as the implementation will still change. + The experimental v3 implementation included in Zarr Python >2.12,<3 is not aligned with the final + V3 specification. This version is deprecated and will be removed in Zarr Python 3.0 in favor of a + spec compliant version. The new ``zarr._store.v3`` package has the necessary classes and functions for evaluating Zarr V3. Since the design is not finalised, the classes and functions are not automatically imported into diff --git a/docs/release.rst b/docs/release.rst index 1c4e52f7c0..811ede3d58 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -36,6 +36,12 @@ Docs Maintenance ~~~~~~~~~~~ +Deprecations +~~~~~~~~~~~~ + +* Deprecate experimental v3 support by issuing a `FutureWarning`. + Also updated docs to warn about using the experimental v3 version. + By :user:`Joe Hamman ` :issue:`1802` and :issue: `1807`. .. _release_2.17.2: diff --git a/zarr/convenience.py b/zarr/convenience.py index 7ca5d426f0..bd284e0844 100644 --- a/zarr/convenience.py +++ b/zarr/convenience.py @@ -55,6 +55,11 @@ def open(store: StoreLike = None, mode: str = "a", *, zarr_version=None, path=No The zarr protocol version to use. The default value of None will attempt to infer the version from `store` if possible, otherwise it will fall back to 2. + + .. warning:: `zarr_version=3` is currently using the experimental Zarr V3 + implementation. This implementation is not in sync with the final specification + and will be replaced with a spec compliant version in the version 3.0. + path : str or None, optional The path within the store to open. **kwargs @@ -150,6 +155,11 @@ def save_array(store: StoreLike, arr, *, zarr_version=None, path=None, **kwargs) The zarr protocol version to use when saving. The default value of None will attempt to infer the version from `store` if possible, otherwise it will fall back to 2. + + .. warning:: `zarr_version=3` is currently using the experimental Zarr V3 + implementation. This implementation is not in sync with the final specification + and will be replaced with a spec compliant version in the version 3.0. + path : str or None, optional The path within the store where the array will be saved. kwargs @@ -200,6 +210,11 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): The zarr protocol version to use when saving. The default value of None will attempt to infer the version from `store` if possible, otherwise it will fall back to 2. + + .. warning:: `zarr_version=3` is currently using the experimental Zarr V3 + implementation. This implementation is not in sync with the final specification + and will be replaced with a spec compliant version in the version 3.0. + path : str or None, optional Path within the store where the group will be saved. kwargs @@ -282,6 +297,11 @@ def save(store: StoreLike, *args, zarr_version=None, path=None, **kwargs): The zarr protocol version to use when saving. The default value of None will attempt to infer the version from `store` if possible, otherwise it will fall back to 2. + + .. warning:: `zarr_version=3` is currently using the experimental Zarr V3 + implementation. This implementation is not in sync with the final specification + and will be replaced with a spec compliant version in the version 3.0. + path : str or None, optional The path within the group where the arrays will be saved. kwargs @@ -395,6 +415,11 @@ def load(store: StoreLike, zarr_version=None, path=None): The zarr protocol version to use when loading. The default value of None will attempt to infer the version from `store` if possible, otherwise it will fall back to 2. + + .. warning:: `zarr_version=3` is currently using the experimental Zarr V3 + implementation. This implementation is not in sync with the final specification + and will be replaced with a spec compliant version in the version 3.0. + path : str or None, optional The path within the store from which to load. From 5889e96ca22cdc2f5a1878048b3a859af8743d6c Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 30 Apr 2024 17:29:03 +0100 Subject: [PATCH 0502/1078] Disallow incomplete type definitions (#1814) --- pyproject.toml | 9 +++++++++ src/zarr/array.py | 12 ++++++------ src/zarr/codecs/sharding.py | 4 ++-- src/zarr/indexing.py | 6 +++--- src/zarr/store/local.py | 3 ++- 5 files changed, 22 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3dcda98980..8244947a1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -166,6 +166,8 @@ check_untyped_defs = true disallow_untyped_decorators = true disallow_any_generics = true +disallow_incomplete_defs = true + [[tool.mypy.overrides]] module = [ "zarr.v2._storage.store", @@ -196,6 +198,13 @@ module = [ ] disallow_any_generics = false +[[tool.mypy.overrides]] +module = [ + "zarr.v2.*", + "zarr.array_v2", + "zarr.group" +] +disallow_incomplete_defs = false [tool.pytest.ini_options] doctest_optionflags = [ diff --git a/src/zarr/array.py b/src/zarr/array.py index c1263230c0..18e26b64dd 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -39,7 +39,7 @@ from zarr.sync import sync -def parse_array_metadata(data: Any): +def parse_array_metadata(data: Any) -> ArrayMetadata: if isinstance(data, ArrayMetadata): return data elif isinstance(data, dict): @@ -192,7 +192,7 @@ def dtype(self) -> np.dtype: def attrs(self) -> dict: return self.metadata.attributes - async def getitem(self, selection: Selection): + async def getitem(self, selection: Selection) -> np.ndarray: assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) indexer = BasicIndexer( selection, @@ -231,7 +231,7 @@ async def _read_chunk( chunk_selection: SliceSelection, out_selection: SliceSelection, out: np.ndarray, - ): + ) -> None: chunk_spec = self.metadata.get_chunk_spec(chunk_coords) chunk_key_encoding = self.metadata.chunk_key_encoding chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) @@ -301,7 +301,7 @@ async def _write_chunk( chunk_coords: ChunkCoords, chunk_selection: SliceSelection, out_selection: SliceSelection, - ): + ) -> None: chunk_spec = self.metadata.get_chunk_spec(chunk_coords) chunk_key_encoding = self.metadata.chunk_key_encoding chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) @@ -350,7 +350,7 @@ async def _write_chunk( async def _write_chunk_to_store( self, store_path: StorePath, chunk_array: np.ndarray, chunk_spec: ArraySpec - ): + ) -> None: if np.all(chunk_array == self.metadata.fill_value): # chunks that only contain fill_value will be removed await store_path.delete() @@ -514,7 +514,7 @@ def metadata(self) -> ArrayMetadata: def store_path(self) -> StorePath: return self._async_array.store_path - def __getitem__(self, selection: Selection): + def __getitem__(self, selection: Selection) -> np.ndarray: return sync( self._async_array.getitem(selection), self._async_array.runtime_configuration.asyncio_loop, diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index 948e46f132..d4f8b7dfc9 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -195,7 +195,7 @@ def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardBuilder: obj.index = _ShardIndex.create_empty(chunks_per_shard) return obj - def append(self, chunk_coords: ChunkCoords, value: BytesLike): + def append(self, chunk_coords: ChunkCoords, value: BytesLike) -> None: chunk_start = len(self.buf) chunk_length = len(value) self.buf.extend(value) @@ -424,7 +424,7 @@ async def _read_chunk( shard_spec: ArraySpec, runtime_configuration: RuntimeConfiguration, out: np.ndarray, - ): + ) -> None: chunk_spec = self._get_chunk_spec(shard_spec) chunk_bytes = shard_dict.get(chunk_coords, None) if chunk_bytes is not None: diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 7c1a4df226..75bed63384 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -13,7 +13,7 @@ def _ensure_tuple(v: Selection) -> SliceSelection: return v -def _err_too_many_indices(selection: SliceSelection, shape: ChunkCoords): +def _err_too_many_indices(selection: SliceSelection, shape: ChunkCoords) -> None: raise IndexError( "too many indices for array; expected {}, got {}".format(len(shape), len(selection)) ) @@ -23,7 +23,7 @@ def _err_negative_step(): raise IndexError("only slices with step >= 1 are supported") -def _check_selection_length(selection: SliceSelection, shape: ChunkCoords): +def _check_selection_length(selection: SliceSelection, shape: ChunkCoords) -> None: if len(selection) > len(shape): _err_too_many_indices(selection, shape) @@ -179,7 +179,7 @@ def c_order_iter(chunks_per_shard: ChunkCoords) -> Iterator[ChunkCoords]: return itertools.product(*(range(x) for x in chunks_per_shard)) -def is_total_slice(item: Selection, shape: ChunkCoords): +def is_total_slice(item: Selection, shape: ChunkCoords) -> bool: """Determine whether `item` specifies a complete slice of array with the given `shape`. Used to optimize __setitem__ operations on the Chunk class.""" diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index 1e9e880875..73e3c6c0e1 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -34,13 +34,14 @@ def _put( value: BytesLike, start: Optional[int] = None, auto_mkdir: bool = True, -): +) -> int | None: if auto_mkdir: path.parent.mkdir(parents=True, exist_ok=True) if start is not None: with path.open("r+b") as f: f.seek(start) f.write(value) + return None else: return path.write_bytes(value) From c1323c4dd0c41d2bf94b59e3ac3090f5c040ba23 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Thu, 2 May 2024 02:01:53 +0200 Subject: [PATCH 0503/1078] fix: add mypy to test dependencies (#1789) Co-authored-by: Joe Hamman --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8244947a1e..76a3c668d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,7 +101,8 @@ extra-dependencies = [ "zstandard", "crc32c", "pytest-asyncio", - "typing_extensions" + "typing_extensions", + "mypy" ] features = ["extra"] From 7fa17b8e1350e8fd254ea530c7806b484fe1fc60 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 May 2024 00:26:02 +0000 Subject: [PATCH 0504/1078] Bump h5py from 3.10.0 to 3.11.0 (#1786) Bumps [h5py](https://github.com/h5py/h5py) from 3.10.0 to 3.11.0. - [Release notes](https://github.com/h5py/h5py/releases) - [Changelog](https://github.com/h5py/h5py/blob/master/docs/release_guide.rst) - [Commits](https://github.com/h5py/h5py/compare/3.10.0...3.11.0) --- updated-dependencies: - dependency-name: h5py dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 809d1c0eee..951594460a 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -17,7 +17,7 @@ coverage pytest-cov==5.0.0 pytest-doctestplus==1.2.1 pytest-timeout==2.3.1 -h5py==3.10.0 +h5py==3.11.0 fsspec==2023.12.2 s3fs==2023.12.2 moto[server]>=5.0.1 From 9331430dfb1e348b8b2e5c6290cbce0956fd0cf1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 May 2024 00:26:56 +0000 Subject: [PATCH 0505/1078] Bump redis from 5.0.3 to 5.0.4 (#1810) Bumps [redis](https://github.com/redis/redis-py) from 5.0.3 to 5.0.4. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v5.0.3...v5.0.4) --- updated-dependencies: - dependency-name: redis dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_dev_optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_dev_optional.txt b/requirements_dev_optional.txt index 951594460a..3456cca21a 100644 --- a/requirements_dev_optional.txt +++ b/requirements_dev_optional.txt @@ -8,7 +8,7 @@ ipywidgets==8.1.2 # don't let pyup change pinning for azure-storage-blob, need to pin to older # version to get compatibility with azure storage emulator on appveyor (FIXME) azure-storage-blob==12.16.0 # pyup: ignore -redis==5.0.3 +redis==5.0.4 types-redis types-setuptools pymongo==4.6.3 From f44fd091da8a6ee4c5df5c8f1347bfbf94f4ceea Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 3 May 2024 18:59:18 +0100 Subject: [PATCH 0506/1078] Disallow untyped calls (#1811) * Simplify mypy override lists * Disallow untyped calls --- pyproject.toml | 27 +++++++++++++++++---------- src/zarr/common.py | 2 +- src/zarr/indexing.py | 4 ++-- 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 76a3c668d3..9887c824ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -168,24 +168,19 @@ disallow_untyped_decorators = true disallow_any_generics = true disallow_incomplete_defs = true +disallow_untyped_calls = true [[tool.mypy.overrides]] module = [ - "zarr.v2._storage.store", - "zarr.v2._storage.v3_storage_transformers", + "zarr.v2.*", "zarr.group", - "zarr.v2.core", - "zarr.v2.hierarchy", - "zarr.v2.indexing", - "zarr.v2.storage", - "zarr.v2.sync", - "zarr.v2.util", "tests.*", ] check_untyped_defs = false [[tool.mypy.overrides]] module = [ + "zarr.v2.*", "zarr.abc.codec", "zarr.codecs.bytes", "zarr.codecs.pipeline", @@ -194,8 +189,6 @@ module = [ "zarr.array_v2", "zarr.array", "zarr.sync", - "zarr.v2.convenience", - "zarr.v2.meta", ] disallow_any_generics = false @@ -207,6 +200,20 @@ module = [ ] disallow_incomplete_defs = false +[[tool.mypy.overrides]] +module = [ + "zarr.v2.*", + "zarr.array_v2", + "zarr.array", + "zarr.common", + "zarr.store.local", + "zarr.codecs.blosc", + "zarr.codecs.gzip", + "zarr.codecs.zstd", +] +disallow_untyped_calls = false + + [tool.pytest.ini_options] doctest_optionflags = [ "NORMALIZE_WHITESPACE", diff --git a/src/zarr/common.py b/src/zarr/common.py index 6940ec3fe3..7d8431f97e 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -79,7 +79,7 @@ class ArraySpec: dtype: np.dtype[Any] fill_value: Any - def __init__(self, shape, dtype, fill_value): + def __init__(self, shape: ChunkCoords, dtype: np.dtype[Any], fill_value: Any) -> None: shape_parsed = parse_shapelike(shape) dtype_parsed = parse_dtype(dtype) fill_value_parsed = parse_fill_value(fill_value) diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 75bed63384..9f324eb5ea 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -19,7 +19,7 @@ def _err_too_many_indices(selection: SliceSelection, shape: ChunkCoords) -> None ) -def _err_negative_step(): +def _err_negative_step() -> None: raise IndexError("only slices with step >= 1 are supported") @@ -50,7 +50,7 @@ class _ChunkDimProjection(NamedTuple): dim_out_sel: Optional[slice] -def _ceildiv(a, b): +def _ceildiv(a: float, b: float) -> int: return math.ceil(a / b) From 19a28dfc54763c9a49e0bba2594d3124130f69a2 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 6 May 2024 08:27:56 -0700 Subject: [PATCH 0507/1078] chore(ci): add numpy 2 release candidate to test matrix (#1828) * chore(ci): add numpy 2 release candidate to test matrix * also add to pyproject.toml * list env in ci * specify numpy in matrix deps * add list-env * use np.inf * more inf fixes --- .github/workflows/test.yml | 5 +++-- pyproject.toml | 8 ++++++-- src/zarr/v2/meta.py | 4 ++-- tests/v2/test_meta.py | 4 ++-- 4 files changed, 13 insertions(+), 8 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e767541c75..fee7380511 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,7 +17,7 @@ jobs: strategy: matrix: python-version: ['3.10', '3.11'] - numpy-version: ['1.24', '1.26'] + numpy-version: ['1.24', '1.26', '2.0.0rc1'] dependency-set: ["minimal", "optional"] steps: @@ -33,7 +33,8 @@ jobs: pip install hatch - name: Set Up Hatch Env run: | - hatch env create + hatch env create test.py${{ matrix.python-version }}-${{ matrix.numpy-version }}-${{ matrix.dependency-set }} + hatch env run -e test.py${{ matrix.python-version }}-${{ matrix.numpy-version }}-${{ matrix.dependency-set }} list-env - name: Run Tests run: | hatch env run --env test.py${{ matrix.python-version }}-${{ matrix.numpy-version }}-${{ matrix.dependency-set }} run diff --git a/pyproject.toml b/pyproject.toml index 9887c824ca..b60b78dcdc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,6 +92,9 @@ version.source = "vcs" build.hooks.vcs.version-file = "src/zarr/_version.py" [tool.hatch.envs.test] +dependencies = [ + "numpy~={matrix:numpy}", +] extra-dependencies = [ "coverage", "pytest", @@ -108,12 +111,12 @@ features = ["extra"] [[tool.hatch.envs.test.matrix]] python = ["3.10", "3.11"] -numpy = ["1.24", "1.26"] +numpy = ["1.24", "1.26", "2.0.0rc1"] version = ["minimal"] [[tool.hatch.envs.test.matrix]] python = ["3.10", "3.11"] -numpy = ["1.24", "1.26"] +numpy = ["1.24", "1.26", "2.0.0rc1"] features = ["optional"] [tool.hatch.envs.test.scripts] @@ -121,6 +124,7 @@ run-coverage = "pytest --cov-config=pyproject.toml --cov=pkg --cov=tests" run = "run-coverage --no-cov" run-verbose = "run-coverage --verbose" run-mypy = "mypy src" +list-env = "pip list" [tool.hatch.envs.docs] features = ['docs'] diff --git a/src/zarr/v2/meta.py b/src/zarr/v2/meta.py index ee9cc57389..2f2f3b9487 100644 --- a/src/zarr/v2/meta.py +++ b/src/zarr/v2/meta.py @@ -217,9 +217,9 @@ def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> if v == "NaN": return np.nan elif v == "Infinity": - return np.PINF + return np.inf elif v == "-Infinity": - return np.NINF + return -np.inf else: return np.array(v, dtype=dtype)[()] elif dtype.kind in "c": diff --git a/tests/v2/test_meta.py b/tests/v2/test_meta.py index e6531adfb5..b7c00ec64c 100644 --- a/tests/v2/test_meta.py +++ b/tests/v2/test_meta.py @@ -325,8 +325,8 @@ def test_encode_decode_array_structured(): def test_encode_decode_fill_values_nan(): fills = ( (np.nan, "NaN", np.isnan), - (np.NINF, "-Infinity", np.isneginf), - (np.PINF, "Infinity", np.isposinf), + (-np.inf, "-Infinity", np.isneginf), + (np.inf, "Infinity", np.isposinf), ) for v, s, f in fills: From 2f2914456e391ec77032223e9587b1d3ef5608aa Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 6 May 2024 15:33:09 -0700 Subject: [PATCH 0508/1078] deprecate(stores): add deprecation warnings to stores that we plan to remove in v3 (#1801) * deprecate(stores): add deprecation warnings to DBMStore, LMDBStore, SQLiteStore, MongoDBStore, RedisStore, and ABSStore * filter warnings in pytest config * more deprecation warnings in docstrings * add release note --- docs/release.rst | 7 ++++ pyproject.toml | 1 + zarr/_storage/absstore.py | 15 ++++++++- zarr/_storage/store.py | 5 +++ zarr/storage.py | 68 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 95 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index 811ede3d58..5184ab5f9f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -43,6 +43,13 @@ Deprecations Also updated docs to warn about using the experimental v3 version. By :user:`Joe Hamman ` :issue:`1802` and :issue: `1807`. +Deprecations +~~~~~~~~~~~~ +* Deprecate the following stores: :class:`zarr.storage.DBMStore`, :class:`zarr.storage.LMDBStore`, + :class:`zarr.storage.SQLiteStore`, :class:`zarr.storage.MongoDBStore`, :class:`zarr.storage.RedisStore`, + and :class:`zarr.storage.ABSStore`. These stores are slated to be removed from Zarr-Python in version 3.0. + By :user:`Joe Hamman ` :issue:`1801`. + .. _release_2.17.2: 2.17.2 diff --git a/pyproject.toml b/pyproject.toml index 904c974424..f2356480bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -137,6 +137,7 @@ filterwarnings = [ "error:::zarr.*", "ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning", "ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning", + "ignore:The .* is deprecated and will be removed in a Zarr-Python version 3*:FutureWarning", "ignore:The experimental Zarr V3 implementation in this version .*:FutureWarning", ] diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index 217b2a29e0..5d2606f2f2 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -5,7 +5,14 @@ from numcodecs.compat import ensure_bytes from zarr.util import normalize_storage_path -from zarr._storage.store import _get_metadata_suffix, data_root, meta_root, Store, StoreV3 +from zarr._storage.store import ( + _get_metadata_suffix, + data_root, + meta_root, + Store, + StoreV3, + V3_DEPRECATION_MESSAGE, +) from zarr.types import DIMENSION_SEPARATOR __doctest_requires__ = { @@ -73,6 +80,12 @@ def __init__( dimension_separator: Optional[DIMENSION_SEPARATOR] = None, client=None, ): + warnings.warn( + V3_DEPRECATION_MESSAGE.format(store=self.__class__.__name__), + FutureWarning, + stacklevel=3, + ) + self._dimension_separator = dimension_separator self.prefix = normalize_storage_path(prefix) if client is None: diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 69986ecadd..dba29d13c0 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -26,6 +26,11 @@ v3_api_available = os.environ.get("ZARR_V3_EXPERIMENTAL_API", "0").lower() not in ["0", "false"] _has_warned_about_v3 = False # to avoid printing the warning multiple times +V3_DEPRECATION_MESSAGE = ( + "The {store} is deprecated and will be removed in a Zarr-Python version 3, see " + "https://github.com/zarr-developers/zarr-python/issues/1274 for more information." +) + def assert_zarr_v3_api_available(): # we issue a warning about the experimental v3 implementation when it is first used diff --git a/zarr/storage.py b/zarr/storage.py index 10f55f0ba3..772fa7646a 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -88,6 +88,7 @@ DEFAULT_ZARR_VERSION, BaseStore, Store, + V3_DEPRECATION_MESSAGE, ) __doctest_requires__ = { @@ -1604,6 +1605,12 @@ class NestedDirectoryStore(DirectoryStore): special handling for chunk keys so that chunk files for multidimensional arrays are stored in a nested directory tree. + .. deprecated:: 2.18.0 + NestedDirectoryStore will be removed in Zarr-Python 3.0 where controlling + the chunk key encoding will be supported as part of the array metadata. See + `GH1274 `_ + for more information. + Parameters ---------- path : string @@ -1675,6 +1682,13 @@ class NestedDirectoryStore(DirectoryStore): def __init__( self, path, normalize_keys=False, dimension_separator: Optional[DIMENSION_SEPARATOR] = "/" ): + + warnings.warn( + V3_DEPRECATION_MESSAGE.format(store=self.__class__.__name__), + FutureWarning, + stacklevel=2, + ) + super().__init__(path, normalize_keys=normalize_keys) if dimension_separator is None: dimension_separator = "/" @@ -1995,6 +2009,11 @@ def migrate_1to2(store): class DBMStore(Store): """Storage class using a DBM-style database. + .. deprecated:: 2.18.0 + DBMStore will be removed in Zarr-Python 3.0. See + `GH1274 `_ + for more information. + Parameters ---------- path : string @@ -2083,6 +2102,12 @@ def __init__( dimension_separator: Optional[DIMENSION_SEPARATOR] = None, **open_kwargs, ): + warnings.warn( + V3_DEPRECATION_MESSAGE.format(store=self.__class__.__name__), + FutureWarning, + stacklevel=2, + ) + if open is None: import dbm @@ -2200,6 +2225,10 @@ class LMDBStore(Store): """Storage class using LMDB. Requires the `lmdb `_ package to be installed. + .. deprecated:: 2.18.0 + LMDBStore will be removed in Zarr-Python 3.0. See + `GH1274 `_ + for more information. Parameters ---------- @@ -2261,6 +2290,12 @@ def __init__( ): import lmdb + warnings.warn( + V3_DEPRECATION_MESSAGE.format(store=self.__class__.__name__), + FutureWarning, + stacklevel=2, + ) + # set default memory map size to something larger than the lmdb default, which is # very likely to be too small for any moderate array (logic copied from zict) map_size = 2**40 if sys.maxsize >= 2**32 else 2**28 @@ -2580,6 +2615,11 @@ def __delitem__(self, key): class SQLiteStore(Store): """Storage class using SQLite. + .. deprecated:: 2.18.0 + SQLiteStore will be removed in Zarr-Python 3.0. See + `GH1274 `_ + for more information. + Parameters ---------- path : string @@ -2612,6 +2652,12 @@ class SQLiteStore(Store): def __init__(self, path, dimension_separator: Optional[DIMENSION_SEPARATOR] = None, **kwargs): import sqlite3 + warnings.warn( + V3_DEPRECATION_MESSAGE.format(store=self.__class__.__name__), + FutureWarning, + stacklevel=2, + ) + self._dimension_separator = dimension_separator # normalize path @@ -2778,6 +2824,11 @@ class MongoDBStore(Store): .. note:: This is an experimental feature. + .. deprecated:: 2.18.0 + MongoDBStore will be removed in Zarr-Python 3.0. See + `GH1274 `_ + for more information. + Requires the `pymongo `_ package to be installed. @@ -2810,6 +2861,12 @@ def __init__( ): import pymongo + warnings.warn( + V3_DEPRECATION_MESSAGE.format(store=self.__class__.__name__), + FutureWarning, + stacklevel=2, + ) + self._database = database self._collection = collection self._dimension_separator = dimension_separator @@ -2866,6 +2923,11 @@ class RedisStore(Store): .. note:: This is an experimental feature. + .. deprecated:: 2.18.0 + RedisStore will be removed in Zarr-Python 3.0. See + `GH1274 `_ + for more information. + Requires the `redis `_ package to be installed. @@ -2885,6 +2947,12 @@ def __init__( ): import redis + warnings.warn( + V3_DEPRECATION_MESSAGE.format(store=self.__class__.__name__), + FutureWarning, + stacklevel=2, + ) + self._prefix = prefix self._kwargs = kwargs self._dimension_separator = dimension_separator From dd0ea50bcf5b061e0b8b18e927f8770279c33492 Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Tue, 7 May 2024 01:27:37 +0200 Subject: [PATCH 0509/1078] fix dependencies (#1840) --- pyproject.toml | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b60b78dcdc..37a1e817c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,9 @@ dependencies = [ 'numpy>=1.24', 'fasteners', 'numcodecs>=0.10.0', + 'crc32c', + 'zstandard', + 'typing_extensions', ] dynamic = [ "version", @@ -54,15 +57,9 @@ docs = [ 'numcodecs[msgpack]', "msgpack", "lmdb", - "zstandard", - "crc32c", ] extra = [ - 'attrs', - 'cattrs', 'msgpack', - 'crc32c', - 'zstandard' ] optional = [ 'lmdb', @@ -101,10 +98,7 @@ extra-dependencies = [ "pytest-cov", "msgpack", "lmdb", - "zstandard", - "crc32c", "pytest-asyncio", - "typing_extensions", "mypy" ] features = ["extra"] From 863c3c0838adc1f65eb5fcddb6cf8fd65480acb5 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 6 May 2024 21:56:37 -0700 Subject: [PATCH 0510/1078] use np.inf instead of PINF/NINF (#1842) * use np.inf instead of PINF/NINF * update release notes --- docs/release.rst | 12 +++++------- zarr/meta.py | 4 ++-- zarr/tests/test_meta.py | 4 ++-- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 5184ab5f9f..0cb1777210 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -25,9 +25,8 @@ Unreleased Enhancements ~~~~~~~~~~~~ -* Performance improvement for reading and writing chunks if any of the dimensions is size 1. :issue:`1730` - By :user:`Deepak Cherian `. - +* Performance improvement for reading and writing chunks if any of the dimensions is size 1. + By :user:`Deepak Cherian ` :issue:`1730`. Docs ~~~~ @@ -35,16 +34,15 @@ Docs Maintenance ~~~~~~~~~~~ +* Minor updates to use `np.inf` instead of `np.PINF` / `np.NINF` in preparation for NumPy 2.0.0 release. + By :user:`Joe Hamman ` :issue:`1842`. Deprecations ~~~~~~~~~~~~ * Deprecate experimental v3 support by issuing a `FutureWarning`. Also updated docs to warn about using the experimental v3 version. - By :user:`Joe Hamman ` :issue:`1802` and :issue: `1807`. - -Deprecations -~~~~~~~~~~~~ + By :user:`Joe Hamman ` :issue:`1802` and :issue:`1807`. * Deprecate the following stores: :class:`zarr.storage.DBMStore`, :class:`zarr.storage.LMDBStore`, :class:`zarr.storage.SQLiteStore`, :class:`zarr.storage.MongoDBStore`, :class:`zarr.storage.RedisStore`, and :class:`zarr.storage.ABSStore`. These stores are slated to be removed from Zarr-Python in version 3.0. diff --git a/zarr/meta.py b/zarr/meta.py index 4b360270de..747d8bec8a 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -227,9 +227,9 @@ def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> if v == "NaN": return np.nan elif v == "Infinity": - return np.PINF + return np.inf elif v == "-Infinity": - return np.NINF + return -np.inf else: return np.array(v, dtype=dtype)[()] elif dtype.kind in "c": diff --git a/zarr/tests/test_meta.py b/zarr/tests/test_meta.py index c85d3f923f..54347835d7 100644 --- a/zarr/tests/test_meta.py +++ b/zarr/tests/test_meta.py @@ -382,8 +382,8 @@ def test_encode_decode_array_structured(): def test_encode_decode_fill_values_nan(): fills = ( (np.nan, "NaN", np.isnan), - (np.NINF, "-Infinity", np.isneginf), - (np.PINF, "Infinity", np.isposinf), + (-np.inf, "-Infinity", np.isneginf), + (np.inf, "Infinity", np.isposinf), ) for v, s, f in fills: From 360eb53f636c33bf255682f53078e91bf6e24094 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 22:40:05 -0700 Subject: [PATCH 0511/1078] chore: update pre-commit hooks (#1825) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.1 → v0.4.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.1...v0.4.3) - [github.com/psf/black: 24.4.0 → 24.4.2](https://github.com/psf/black/compare/24.4.0...24.4.2) - [github.com/pre-commit/mirrors-mypy: v1.9.0 → v1.10.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.9.0...v1.10.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0aa13b31a3..747cb86688 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,11 +8,11 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.4.1' + rev: 'v0.4.3' hooks: - id: ruff - repo: https://github.com/psf/black - rev: 24.4.0 + rev: 24.4.2 hooks: - id: black - repo: https://github.com/codespell-project/codespell @@ -24,7 +24,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.9.0 + rev: v1.10.0 hooks: - id: mypy files: zarr From 7e944f099172fa1b7413c7b54a7c7685a6538d4f Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 7 May 2024 15:39:15 +0100 Subject: [PATCH 0512/1078] Add pytest to mypy dependencies (#1846) --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3fec787b74..dcbccf6e97 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,3 +31,4 @@ repos: - types-redis - types-setuptools - attrs + - pytest From 270aff18dc780a0a9e7409455277aae017190765 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 7 May 2024 17:29:34 +0200 Subject: [PATCH 0513/1078] Enable ruff/bugbear rules (B) and fix issues (#1702) * Enable ruff/bugbear rules (B) As suggested by Repo-Review. * Fix ruff/bugbear issue (B007) B007 Loop control variable `key` not used within loop body https://docs.astral.sh/ruff/rules/unused-loop-control-variable/ * Fix ruff/bugbear issue (B015) B015 Pointless comparison. Did you mean to assign a value? Otherwise, prepend `assert` or remove it. https://docs.astral.sh/ruff/rules/useless-comparison/ * Fix ruff/bugbear issues (B028) B028 No explicit `stacklevel` keyword argument found https://docs.astral.sh/ruff/rules/no-explicit-stacklevel/ * Fix ruff/bugbear issues (B904) B904 Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling https://docs.astral.sh/ruff/rules/raise-without-from-inside-except/ * Document changes in docs/release.rst * Disable ruff/bugbear rule (B017) B017 `pytest.raises(Exception)` should be considered evil https://docs.astral.sh/ruff/rules/assert-raises-exception/ --------- Co-authored-by: Joe Hamman --- docs/release.rst | 3 +++ pyproject.toml | 5 +++++ zarr/_storage/absstore.py | 8 ++++---- zarr/_storage/v3_storage_transformers.py | 8 ++++---- zarr/core.py | 4 ++-- zarr/creation.py | 2 +- zarr/hierarchy.py | 10 +++++----- zarr/indexing.py | 2 +- zarr/meta.py | 6 +++--- zarr/meta_v1.py | 2 +- zarr/n5.py | 22 +++++++++++++++++----- zarr/storage.py | 16 ++++++++++------ zarr/tests/test_meta.py | 2 +- zarr/tests/test_storage.py | 6 +++--- zarr/tests/test_storage_v3.py | 2 +- zarr/util.py | 12 ++++++------ 16 files changed, 67 insertions(+), 43 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 0cb1777210..e2bc40bf99 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -34,6 +34,9 @@ Docs Maintenance ~~~~~~~~~~~ +* Enable ruff/bugbear rules (B) and fix issues. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1702`. + * Minor updates to use `np.inf` instead of `np.PINF` / `np.NINF` in preparation for NumPy 2.0.0 release. By :user:`Joe Hamman ` :issue:`1842`. diff --git a/pyproject.toml b/pyproject.toml index f2356480bd..dacd45ec2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,6 +103,11 @@ exclude = [ "docs" ] +[tool.ruff.lint] +extend-select = [ + "B" +] + [tool.black] line-length = 100 exclude = ''' diff --git a/zarr/_storage/absstore.py b/zarr/_storage/absstore.py index 5d2606f2f2..1e49754f38 100644 --- a/zarr/_storage/absstore.py +++ b/zarr/_storage/absstore.py @@ -156,8 +156,8 @@ def __getitem__(self, key): blob_name = self._append_path_to_prefix(key) try: return self.client.download_blob(blob_name).readall() - except ResourceNotFoundError: - raise KeyError(f"Blob {blob_name} not found") + except ResourceNotFoundError as e: + raise KeyError(f"Blob {blob_name} not found") from e def __setitem__(self, key, value): value = ensure_bytes(value) @@ -169,8 +169,8 @@ def __delitem__(self, key): try: self.client.delete_blob(self._append_path_to_prefix(key)) - except ResourceNotFoundError: - raise KeyError(f"Blob {key} not found") + except ResourceNotFoundError as e: + raise KeyError(f"Blob {key} not found") from e def __eq__(self, other): return ( diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py index 37e56f8ecd..00467d44f9 100644 --- a/zarr/_storage/v3_storage_transformers.py +++ b/zarr/_storage/v3_storage_transformers.py @@ -183,8 +183,8 @@ def __getitem__(self, key): shard_key, chunk_subkey = self._key_to_shard(key) try: full_shard_value = self.inner_store[shard_key] - except KeyError: - raise KeyError(key) + except KeyError as e: + raise KeyError(key) from e index = self._get_index_from_buffer(full_shard_value) chunk_slice = index.get_chunk_slice(chunk_subkey) if chunk_slice is not None: @@ -265,8 +265,8 @@ def __delitem__(self, key): shard_key, chunk_subkey = self._key_to_shard(key) try: index = self._get_index_from_store(shard_key) - except KeyError: - raise KeyError(key) + except KeyError as e: + raise KeyError(key) from e index.set_chunk_slice(chunk_subkey, None) diff --git a/zarr/core.py b/zarr/core.py index 1bd081acee..6aa86b6465 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -200,8 +200,8 @@ def _load_metadata_nosync(self): try: mkey = _prefix_to_array_key(self._store, self._key_prefix) meta_bytes = self._store[mkey] - except KeyError: - raise ArrayNotFoundError(self._path) + except KeyError as e: + raise ArrayNotFoundError(self._path) from e else: # decode and store metadata as instance members meta = self._store._metadata_class.decode_array_metadata(meta_bytes) diff --git a/zarr/creation.py b/zarr/creation.py index c541531d54..9b2b1d6d4c 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -297,7 +297,7 @@ def _kwargs_compat(compressor, fill_value, kwargs): # ignore other keyword arguments for k in kwargs: - warn(f"ignoring keyword argument {k!r}") + warn(f"ignoring keyword argument {k!r}", stacklevel=2) return compressor, fill_value diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 0fb07dd620..8894a5ed57 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -187,16 +187,16 @@ def __init__( mkey = _prefix_to_group_key(self._store, self._key_prefix) assert not mkey.endswith("root/.group") meta_bytes = store[mkey] - except KeyError: + except KeyError as e: if self._version == 2: - raise GroupNotFoundError(path) + raise GroupNotFoundError(path) from e else: implicit_prefix = meta_root + self._key_prefix if self._store.list_prefix(implicit_prefix): # implicit group does not have any metadata self._meta = None else: - raise GroupNotFoundError(path) + raise GroupNotFoundError(path) from e else: self._meta = self._store._metadata_class.decode_group_metadata(meta_bytes) @@ -536,8 +536,8 @@ def __getattr__(self, item): # allow access to group members via dot notation try: return self.__getitem__(item) - except KeyError: - raise AttributeError + except KeyError as e: + raise AttributeError from e def __dir__(self): # noinspection PyUnresolvedReferences diff --git a/zarr/indexing.py b/zarr/indexing.py index 9889fcadad..2f2402fe27 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -932,7 +932,7 @@ def check_fields(fields, dtype): # multiple field selection out_dtype = np.dtype([(f, dtype[f]) for f in fields]) except KeyError as e: - raise IndexError(f"invalid 'fields' argument, field not found: {e!r}") + raise IndexError(f"invalid 'fields' argument, field not found: {e!r}") from e else: return out_dtype else: diff --git a/zarr/meta.py b/zarr/meta.py index 747d8bec8a..5430ab305d 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -310,8 +310,8 @@ def decode_dtype(cls, d, validate=True): # extract the type from the extension info try: d = d["type"] - except KeyError: - raise KeyError("Extended dtype info must provide a key named 'type'.") + except KeyError as e: + raise KeyError("Extended dtype info must provide a key named 'type'.") from e d = cls._decode_dtype_descr(d) dtype = np.dtype(d) if validate: @@ -518,7 +518,7 @@ def decode_array_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType meta["storage_transformers"] = storage_transformers except Exception as e: - raise MetadataError(f"error decoding metadata: {e}") + raise MetadataError(f"error decoding metadata: {e}") from e else: return meta diff --git a/zarr/meta_v1.py b/zarr/meta_v1.py index 65bfd3488e..714f55f477 100644 --- a/zarr/meta_v1.py +++ b/zarr/meta_v1.py @@ -23,7 +23,7 @@ def decode_metadata(b): order=meta["order"], ) except Exception as e: - raise MetadataError(f"error decoding metadata: {e}") + raise MetadataError(f"error decoding metadata: {e}") from e else: return meta diff --git a/zarr/n5.py b/zarr/n5.py index fdd3d5babf..3d3e9afa26 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -125,7 +125,11 @@ def __setitem__(self, key: str, value: Any): for k in n5_keywords: if k in zarr_attrs: - warnings.warn(f"Attribute {k} is a reserved N5 keyword", UserWarning) + warnings.warn( + f"Attribute {k} is a reserved N5 keyword", + UserWarning, + stacklevel=2, + ) # remove previous user attributes for k in list(n5_attrs.keys()): @@ -327,7 +331,10 @@ class N5FSStore(FSStore): def __init__(self, *args, **kwargs): if "dimension_separator" in kwargs: kwargs.pop("dimension_separator") - warnings.warn("Keyword argument `dimension_separator` will be ignored") + warnings.warn( + "Keyword argument `dimension_separator` will be ignored", + stacklevel=2, + ) dimension_separator = "." super().__init__(*args, dimension_separator=dimension_separator, **kwargs) @@ -411,7 +418,11 @@ def __setitem__(self, key: str, value: Any): for k in n5_keywords: if k in zarr_attrs.keys(): - warnings.warn(f"Attribute {k} is a reserved N5 keyword", UserWarning) + warnings.warn( + f"Attribute {k} is a reserved N5 keyword", + UserWarning, + stacklevel=2, + ) # replace previous user attributes for k in list(n5_attrs.keys()): @@ -597,8 +608,8 @@ def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dic array_metadata["n5"] = N5_FORMAT try: dtype = np.dtype(array_metadata["dataType"]) - except TypeError: - raise TypeError(f"Data type {array_metadata['dataType']} is not supported by N5") + except TypeError as e: + raise TypeError(f"Data type {array_metadata['dataType']} is not supported by N5") from e array_metadata["dataType"] = dtype.name array_metadata["dimensions"] = array_metadata["dimensions"][::-1] @@ -711,6 +722,7 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict "Not all N5 implementations support lzma compression (yet). You " "might not be able to open the dataset with another N5 library.", RuntimeWarning, + stacklevel=2, ) n5_config["format"] = _compressor_config["format"] n5_config["check"] = _compressor_config["check"] diff --git a/zarr/storage.py b/zarr/storage.py index 772fa7646a..f412870f75 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -589,11 +589,15 @@ def _init_array_metadata( "missing object_codec for object array; this will raise a " "ValueError in version 3.0", FutureWarning, + stacklevel=2, ) else: filters_config.insert(0, object_codec.get_config()) elif object_codec is not None: - warnings.warn("an object_codec is only needed for object arrays") + warnings.warn( + "an object_codec is only needed for object arrays", + stacklevel=2, + ) # use null to indicate no filters if not filters_config: @@ -869,8 +873,8 @@ def __getitem__(self, item: str): parent, key = self._get_parent(item) try: value = parent[key] - except KeyError: - raise KeyError(item) + except KeyError as e: + raise KeyError(item) from e else: if isinstance(value, self.cls): raise KeyError(item) @@ -888,8 +892,8 @@ def __delitem__(self, item: str): parent, key = self._get_parent(item) try: del parent[key] - except KeyError: - raise KeyError(item) + except KeyError as e: + raise KeyError(item) from e def __contains__(self, item: str): # type: ignore[override] try: @@ -1137,7 +1141,7 @@ def __setitem__(self, key, value): os.makedirs(dir_path) except OSError as e: if e.errno != errno.EEXIST: - raise KeyError(key) + raise KeyError(key) from e # write to temporary file # note we're not using tempfile.NamedTemporaryFile to avoid restrictive file permissions diff --git a/zarr/tests/test_meta.py b/zarr/tests/test_meta.py index 54347835d7..f9010d6788 100644 --- a/zarr/tests/test_meta.py +++ b/zarr/tests/test_meta.py @@ -601,7 +601,7 @@ def test_metadata3_exceptions(): required = ["zarr_format", "metadata_encoding", "metadata_key_suffix", "extensions"] for key in required: meta = copy.copy(_default_entry_point_metadata_v3) - meta.pop("zarr_format") + meta.pop(key) with pytest.raises(ValueError): # cannot encode metadata that is missing a required key Metadata3.encode_hierarchy_metadata(meta) diff --git a/zarr/tests/test_storage.py b/zarr/tests/test_storage.py index ae8a56fa61..da690f5959 100644 --- a/zarr/tests/test_storage.py +++ b/zarr/tests/test_storage.py @@ -1353,13 +1353,13 @@ def test_exceptions(self, memory_store): # no exception from FSStore.getitems getting KeyError assert group.store.getitems(["foo"], contexts={}) == {} # exception from FSStore.getitems getting AttributeError - with pytest.raises(Exception): + with pytest.raises(Exception): # noqa: B017 group.store.getitems(["x/0"], contexts={}) # exception from FSStore.getitems getting AttributeError - with pytest.raises(Exception): + with pytest.raises(Exception): # noqa: B017 x[...] # exception from FSStore.__getitem__ getting AttributeError - with pytest.raises(Exception): + with pytest.raises(Exception): # noqa: B017 y[...] diff --git a/zarr/tests/test_storage_v3.py b/zarr/tests/test_storage_v3.py index c096f9cb02..e8675786e0 100644 --- a/zarr/tests/test_storage_v3.py +++ b/zarr/tests/test_storage_v3.py @@ -286,7 +286,7 @@ def test_rename_nonexisting(self): def test_get_partial_values(self): store = self.create_store() - store.supports_efficient_get_partial_values in [True, False] + assert store.supports_efficient_get_partial_values in [True, False] store[data_root + "foo"] = b"abcdefg" store[data_root + "baz"] = b"z" assert [b"a"] == store.get_partial_values([(data_root + "foo", (0, 1))]) diff --git a/zarr/util.py b/zarr/util.py index e58aed80ab..8a96f92c24 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -198,11 +198,11 @@ def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype args = [] try: object_codec = codec_registry[codec_id](*args) - except KeyError: # pragma: no cover + except KeyError as e: # pragma: no cover raise ValueError( f"codec {codec_id!r} for object type {key!r} is not " f"available; please provide an object_codec manually" - ) + ) from e return dtype, object_codec dtype = np.dtype(dtype) @@ -332,7 +332,7 @@ def normalize_fill_value(fill_value, dtype: np.dtype): raise ValueError( f"fill_value {fill_value!r} is not valid for dtype {dtype}; " f"nested exception: {e}" - ) + ) from e return fill_value @@ -492,13 +492,13 @@ def tree_widget_sublist(node, root=False, expand=False): def tree_widget(group, expand, level): try: import ipytree - except ImportError as error: + except ImportError as e: raise ImportError( - f"{error}: Run `pip install zarr[jupyter]` or `conda install ipytree`" + f"{e}: Run `pip install zarr[jupyter]` or `conda install ipytree`" f"to get the required ipytree dependency for displaying the tree " f"widget. If using jupyterlab<3, you also need to run " f"`jupyter labextension install ipytree`" - ) + ) from e result = ipytree.Tree() root = TreeNode(group, level=level) From 4f5ca4bd3669048779e60e94d4d6d979d1252121 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 7 May 2024 08:30:49 -0700 Subject: [PATCH 0514/1078] chore(pre-commit): update pre-commit versions and remove attrs dep mypy (#1848) section --- .pre-commit-config.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dcbccf6e97..70812439ca 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: 'v0.4.1' + rev: 'v0.4.3' hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -22,7 +22,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.9.0 + rev: v1.10.0 hooks: - id: mypy files: src @@ -30,5 +30,4 @@ repos: additional_dependencies: - types-redis - types-setuptools - - attrs - pytest From 8d483c98322ce5e22fa223f94f07a2deab532090 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 7 May 2024 08:52:21 -0700 Subject: [PATCH 0515/1078] Remove implicit groups (#1827) * wip: more group members work * remove implicit groups refactor Group.getitem and Group.open to better handle loading members for v2/v3 * tidy --- src/zarr/codecs/bytes.py | 1 - src/zarr/group.py | 112 +++++++++++++++++++++------------------ tests/v3/test_group.py | 14 ++--- 3 files changed, 65 insertions(+), 62 deletions(-) diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index 1b872ac6c6..aa24c3167e 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -82,7 +82,6 @@ async def decode( dtype = np.dtype(f"{prefix}{chunk_spec.dtype.str[1:]}") else: dtype = np.dtype(f"|{chunk_spec.dtype.str[1:]}") - print(dtype) chunk_array = np.frombuffer(chunk_bytes, dtype) # ensure correct chunk shape diff --git a/src/zarr/group.py b/src/zarr/group.py index c40b5f9a34..c9b729f4c9 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -109,37 +109,52 @@ async def open( cls, store: StoreLike, runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), - zarr_format: Literal[2, 3] = 3, + zarr_format: Literal[2, 3, None] = 3, ) -> AsyncGroup: store_path = make_store_path(store) - zarr_json_bytes = await (store_path / ZARR_JSON).get() - assert zarr_json_bytes is not None - - # TODO: consider trying to autodiscover the zarr-format here - if zarr_format == 3: - # V3 groups are comprised of a zarr.json object - # (it is optional in the case of implicit groups) - zarr_json_bytes = await (store_path / ZARR_JSON).get() - zarr_json = ( - json.loads(zarr_json_bytes) if zarr_json_bytes is not None else {"zarr_format": 3} - ) - elif zarr_format == 2: - # V2 groups are comprised of a .zgroup and .zattrs objects - # (both are optional in the case of implicit groups) + if zarr_format == 2: zgroup_bytes, zattrs_bytes = await asyncio.gather( (store_path / ZGROUP_JSON).get(), (store_path / ZATTRS_JSON).get() ) - zgroup = ( - json.loads(json.loads(zgroup_bytes)) - if zgroup_bytes is not None - else {"zarr_format": 2} + if zgroup_bytes is None: + raise KeyError(store_path) # filenotfounderror? + elif zarr_format == 3: + zarr_json_bytes = await (store_path / ZARR_JSON).get() + if zarr_json_bytes is None: + raise KeyError(store_path) # filenotfounderror? + elif zarr_format is None: + zarr_json_bytes, zgroup_bytes, zattrs_bytes = await asyncio.gather( + (store_path / ZARR_JSON).get(), + (store_path / ZGROUP_JSON).get(), + (store_path / ZATTRS_JSON).get(), ) - zattrs = json.loads(json.loads(zattrs_bytes)) if zattrs_bytes is not None else {} - zarr_json = {**zgroup, "attributes": zattrs} + if zarr_json_bytes is not None and zgroup_bytes is not None: + # TODO: revisit this exception type + # alternatively, we could warn and favor v3 + raise ValueError("Both zarr.json and .zgroup objects exist") + if zarr_json_bytes is None and zgroup_bytes is None: + raise KeyError(store_path) # filenotfounderror? + # set zarr_format based on which keys were found + if zarr_json_bytes is not None: + zarr_format = 3 + else: + zarr_format = 2 else: raise ValueError(f"unexpected zarr_format: {zarr_format}") - return cls.from_dict(store_path, zarr_json, runtime_configuration) + + if zarr_format == 2: + # V2 groups are comprised of a .zgroup and .zattrs objects + assert zgroup_bytes is not None + zgroup = json.loads(zgroup_bytes) + zattrs = json.loads(zattrs_bytes) if zattrs_bytes is not None else {} + group_metadata = {**zgroup, "attributes": zattrs} + else: + # V3 groups are comprised of a zarr.json object + assert zarr_json_bytes is not None + group_metadata = json.loads(zarr_json_bytes) + + return cls.from_dict(store_path, group_metadata, runtime_configuration) @classmethod def from_dict( @@ -174,13 +189,7 @@ async def getitem( if self.metadata.zarr_format == 3: zarr_json_bytes = await (store_path / ZARR_JSON).get() if zarr_json_bytes is None: - # implicit group? - logger.warning("group at %s is an implicit group", store_path) - zarr_json = { - "zarr_format": self.metadata.zarr_format, - "node_type": "group", - "attributes": {}, - } + raise KeyError(key) else: zarr_json = json.loads(zarr_json_bytes) if zarr_json["node_type"] == "group": @@ -200,6 +209,9 @@ async def getitem( (store_path / ZATTRS_JSON).get(), ) + if zgroup_bytes is None and zarray_bytes is None: + raise KeyError(key) + # unpack the zarray, if this is None then we must be opening a group zarray = json.loads(zarray_bytes) if zarray_bytes else None # unpack the zattrs, this can be None if no attrs were written @@ -212,9 +224,6 @@ async def getitem( store_path, zarray, runtime_configuration=self.runtime_configuration ) else: - if zgroup_bytes is None: - # implicit group? - logger.warning("group at %s is an implicit group", store_path) zgroup = ( json.loads(zgroup_bytes) if zgroup_bytes is not None @@ -288,7 +297,12 @@ def __repr__(self): return f"" async def nmembers(self) -> int: - raise NotImplementedError + # TODO: consider using aioitertools.builtins.sum for this + # return await aioitertools.builtins.sum((1 async for _ in self.members()), start=0) + n = 0 + async for _ in self.members(): + n += 1 + return n async def members(self) -> AsyncGenerator[tuple[str, AsyncArray | AsyncGroup], None]: """ @@ -321,10 +335,14 @@ async def members(self) -> AsyncGenerator[tuple[str, AsyncArray | AsyncGroup], N logger.warning( "Object at %s is not recognized as a component of a Zarr hierarchy.", subkey ) - pass async def contains(self, member: str) -> bool: - raise NotImplementedError + # TODO: this can be made more efficient. + try: + await self.getitem(member) + return True + except KeyError: + return False # todo: decide if this method should be separate from `groups` async def group_keys(self) -> AsyncGenerator[str, None]: @@ -493,26 +511,18 @@ def members(self) -> tuple[tuple[str, Array | Group], ...]: def __contains__(self, member) -> bool: return self._sync(self._async_group.contains(member)) - def group_keys(self) -> list[str]: - # uncomment with AsyncGroup implements this method - # return self._sync_iter(self._async_group.group_keys()) - raise NotImplementedError + def group_keys(self) -> tuple[str, ...]: + return tuple(self._sync_iter(self._async_group.group_keys())) - def groups(self) -> list[Group]: + def groups(self) -> tuple[Group, ...]: # TODO: in v2 this was a generator that return key: Group - # uncomment with AsyncGroup implements this method - # return [Group(obj) for obj in self._sync_iter(self._async_group.groups())] - raise NotImplementedError + return tuple(Group(obj) for obj in self._sync_iter(self._async_group.groups())) - def array_keys(self) -> list[str]: - # uncomment with AsyncGroup implements this method - # return self._sync_iter(self._async_group.array_keys) - raise NotImplementedError + def array_keys(self) -> tuple[str, ...]: + return tuple(self._sync_iter(self._async_group.array_keys())) - def arrays(self) -> list[Array]: - # uncomment with AsyncGroup implements this method - # return [Array(obj) for obj in self._sync_iter(self._async_group.arrays)] - raise NotImplementedError + def arrays(self) -> tuple[Array, ...]: + return tuple(Array(obj) for obj in self._sync_iter(self._async_group.arrays())) def tree(self, expand=False, level=None) -> Any: return self._sync(self._async_group.tree(expand=expand, level=level)) diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 941256bdd2..cf5c147c39 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -41,17 +41,11 @@ def test_group_members(store_type, request): # add an extra object to the domain of the group. # the list of children should ignore this object. - sync(store.set(f"{path}/extra_object", b"000000")) + sync(store.set(f"{path}/extra_object-1", b"000000")) # add an extra object under a directory-like prefix in the domain of the group. - # this creates an implicit group called implicit_subgroup - sync(store.set(f"{path}/implicit_subgroup/extra_object", b"000000")) - # make the implicit subgroup - members_expected["implicit_subgroup"] = Group( - AsyncGroup( - metadata=GroupMetadata(), - store_path=StorePath(store=store, path=f"{path}/implicit_subgroup"), - ) - ) + # this creates a directory with a random key in it + # this should not show up as a member + sync(store.set(f"{path}/extra_directory/extra_object-2", b"000000")) members_observed = group.members # members are not guaranteed to be ordered, so sort before comparing assert sorted(dict(members_observed)) == sorted(members_expected) From 3499acb6a57c824eb54db30ec5ab497de695060d Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 7 May 2024 09:26:13 -0700 Subject: [PATCH 0516/1078] feature(store): list_* -> AsyncGenerators (#1844) * feature(store): list_* -> AsyncGenerators - add zarr.testing.store module to support downstream use cases - set pytest-asyncio mode to auto - * revert changes to v2/test_storage.py * fix v2 import --- pyproject.toml | 1 + src/zarr/abc/store.py | 15 +- src/zarr/group.py | 16 +- src/zarr/store/local.py | 86 +- src/zarr/store/memory.py | 37 +- src/zarr/testing/__init__.py | 9 + src/zarr/testing/store.py | 81 + src/zarr/v2/storage.py | 2 +- tests/v2/test_storage.py | 4839 +++++++++++++++++----------------- tests/v3/test_storage.py | 18 + 10 files changed, 2550 insertions(+), 2554 deletions(-) create mode 100644 src/zarr/testing/__init__.py create mode 100644 src/zarr/testing/store.py create mode 100644 tests/v3/test_storage.py diff --git a/pyproject.toml b/pyproject.toml index 37a1e817c6..93888a205c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -213,6 +213,7 @@ disallow_untyped_calls = false [tool.pytest.ini_options] +asyncio_mode = "auto" doctest_optionflags = [ "NORMALIZE_WHITESPACE", "ELLIPSIS", diff --git a/src/zarr/abc/store.py b/src/zarr/abc/store.py index ce5de279c4..d92f8d4e2e 100644 --- a/src/zarr/abc/store.py +++ b/src/zarr/abc/store.py @@ -1,5 +1,6 @@ from abc import abstractmethod, ABC +from collections.abc import AsyncGenerator from typing import List, Tuple, Optional @@ -24,7 +25,7 @@ async def get( @abstractmethod async def get_partial_values( self, key_ranges: List[Tuple[str, Tuple[int, int]]] - ) -> List[bytes]: + ) -> List[Optional[bytes]]: """Retrieve possibly partial values from given key_ranges. Parameters @@ -106,17 +107,17 @@ def supports_listing(self) -> bool: ... @abstractmethod - async def list(self) -> List[str]: + def list(self) -> AsyncGenerator[str, None]: """Retrieve all keys in the store. Returns ------- - list[str] + AsyncGenerator[str, None] """ ... @abstractmethod - async def list_prefix(self, prefix: str) -> List[str]: + def list_prefix(self, prefix: str) -> AsyncGenerator[str, None]: """Retrieve all keys in the store with a given prefix. Parameters @@ -125,12 +126,12 @@ async def list_prefix(self, prefix: str) -> List[str]: Returns ------- - list[str] + AsyncGenerator[str, None] """ ... @abstractmethod - async def list_dir(self, prefix: str) -> List[str]: + def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: """ Retrieve all keys and prefixes with a given prefix and which do not contain the character “/” after the given prefix. @@ -141,6 +142,6 @@ async def list_dir(self, prefix: str) -> List[str]: Returns ------- - list[str] + AsyncGenerator[str, None] """ ... diff --git a/src/zarr/group.py b/src/zarr/group.py index c9b729f4c9..4da059c814 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -320,20 +320,20 @@ async def members(self) -> AsyncGenerator[tuple[str, AsyncArray | AsyncGroup], N ) raise ValueError(msg) - subkeys = await self.store_path.store.list_dir(self.store_path.path) # would be nice to make these special keys accessible programmatically, # and scoped to specific zarr versions - subkeys_filtered = filter(lambda v: v not in ("zarr.json", ".zgroup", ".zattrs"), subkeys) - # is there a better way to schedule this? - for subkey in subkeys_filtered: + _skip_keys = ("zarr.json", ".zgroup", ".zattrs") + async for key in self.store_path.store.list_dir(self.store_path.path): + if key in _skip_keys: + continue try: - yield (subkey, await self.getitem(subkey)) + yield (key, await self.getitem(key)) except KeyError: - # keyerror is raised when `subkey` names an object (in the object storage sense), + # keyerror is raised when `key` names an object (in the object storage sense), # as opposed to a prefix, in the store under the prefix associated with this group - # in which case `subkey` cannot be the name of a sub-array or sub-group. + # in which case `key` cannot be the name of a sub-array or sub-group. logger.warning( - "Object at %s is not recognized as a component of a Zarr hierarchy.", subkey + "Object at %s is not recognized as a component of a Zarr hierarchy.", key ) async def contains(self, member: str) -> bool: diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index 73e3c6c0e1..dde28d5214 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -2,6 +2,7 @@ import io import shutil +from collections.abc import AsyncGenerator from pathlib import Path from typing import Union, Optional, List, Tuple @@ -10,8 +11,24 @@ def _get(path: Path, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> bytes: + """ + Fetch a contiguous region of bytes from a file. + Parameters + ---------- + path: Path + The file to read bytes from. + byte_range: Optional[Tuple[int, Optional[int]]] = None + The range of bytes to read. If `byte_range` is `None`, then the entire file will be read. + If `byte_range` is a tuple, the first value specifies the index of the first byte to read, + and the second value specifies the total number of bytes to read. If the total value is + `None`, then the entire file after the first byte will be read. + """ if byte_range is not None: - start = byte_range[0] + if byte_range[0] is None: + start = 0 + else: + start = byte_range[0] + end = (start + byte_range[1]) if byte_range[1] is not None else None else: return path.read_bytes() @@ -84,21 +101,28 @@ async def get( async def get_partial_values( self, key_ranges: List[Tuple[str, Tuple[int, int]]] - ) -> List[bytes]: + ) -> List[Optional[bytes]]: + """ + Read byte ranges from multiple keys. + Parameters + ---------- + key_ranges: List[Tuple[str, Tuple[int, int]]] + A list of (key, (start, length)) tuples. The first element of the tuple is the name of + the key in storage to fetch bytes from. The second element the tuple defines the byte + range to retrieve. These values are arguments to `get`, as this method wraps + concurrent invocation of `get`. + """ args = [] for key, byte_range in key_ranges: assert isinstance(key, str) path = self.root / key - if byte_range is not None: - args.append((_get, path, byte_range[0], byte_range[1])) - else: - args.append((_get, path)) + args.append((_get, path, byte_range)) return await concurrent_map(args, to_thread, limit=None) # TODO: fix limit async def set(self, key: str, value: BytesLike) -> None: assert isinstance(key, str) path = self.root / key - await to_thread(_put, path, value) + await to_thread(_put, path, value, auto_mkdir=self.auto_mkdir) async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: args = [] @@ -122,22 +146,19 @@ async def exists(self, key: str) -> bool: path = self.root / key return await to_thread(path.is_file) - async def list(self) -> List[str]: + async def list(self) -> AsyncGenerator[str, None]: """Retrieve all keys in the store. Returns ------- - list[str] + AsyncGenerator[str, None] """ + to_strip = str(self.root) + "/" + for p in list(self.root.rglob("*")): + if p.is_file(): + yield str(p).replace(to_strip, "") - # Q: do we want to return strings or Paths? - def _list(root: Path) -> List[str]: - files = [str(p) for p in root.rglob("") if p.is_file()] - return files - - return await to_thread(_list, self.root) - - async def list_prefix(self, prefix: str) -> List[str]: + async def list_prefix(self, prefix: str) -> AsyncGenerator[str, None]: """Retrieve all keys in the store with a given prefix. Parameters @@ -146,16 +167,15 @@ async def list_prefix(self, prefix: str) -> List[str]: Returns ------- - list[str] + AsyncGenerator[str, None] """ - def _list_prefix(root: Path, prefix: str) -> List[str]: - files = [str(p) for p in (root / prefix).rglob("*") if p.is_file()] - return files + to_strip = str(self.root) + "/" + for p in (self.root / prefix).rglob("*"): + if p.is_file(): + yield str(p).replace(to_strip, "") - return await to_thread(_list_prefix, self.root, prefix) - - async def list_dir(self, prefix: str) -> List[str]: + async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: """ Retrieve all keys and prefixes with a given prefix and which do not contain the character “/” after the given prefix. @@ -166,15 +186,15 @@ async def list_dir(self, prefix: str) -> List[str]: Returns ------- - list[str] + AsyncGenerator[str, None] """ - def _list_dir(root: Path, prefix: str) -> List[str]: - base = root / prefix - to_strip = str(base) + "/" - try: - return [str(key).replace(to_strip, "") for key in base.iterdir()] - except (FileNotFoundError, NotADirectoryError): - return [] + base = self.root / prefix + to_strip = str(base) + "/" - return await to_thread(_list_dir, self.root, prefix) + try: + key_iter = base.iterdir() + for key in key_iter: + yield str(key).replace(to_strip, "") + except (FileNotFoundError, NotADirectoryError): + pass diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index 9661b6cea7..2a09bc2dd5 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -1,8 +1,9 @@ from __future__ import annotations +from collections.abc import AsyncGenerator from typing import Optional, MutableMapping, List, Tuple -from zarr.common import BytesLike +from zarr.common import BytesLike, concurrent_map from zarr.abc.store import Store @@ -38,8 +39,9 @@ async def get( async def get_partial_values( self, key_ranges: List[Tuple[str, Tuple[int, int]]] - ) -> List[bytes]: - raise NotImplementedError + ) -> List[Optional[BytesLike]]: + vals = await concurrent_map(key_ranges, self.get, limit=None) + return vals async def exists(self, key: str) -> bool: return key in self._store_dict @@ -67,20 +69,23 @@ async def delete(self, key: str) -> None: async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: raise NotImplementedError - async def list(self) -> List[str]: - return list(self._store_dict.keys()) + async def list(self) -> AsyncGenerator[str, None]: + for key in self._store_dict: + yield key + + async def list_prefix(self, prefix: str) -> AsyncGenerator[str, None]: + for key in self._store_dict: + if key.startswith(prefix): + yield key - async def list_prefix(self, prefix: str) -> List[str]: - return [key for key in self._store_dict if key.startswith(prefix)] + async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: + if prefix.endswith("/"): + prefix = prefix[:-1] - async def list_dir(self, prefix: str) -> List[str]: if prefix == "": - return list({key.split("/", maxsplit=1)[0] for key in self._store_dict}) + for key in self._store_dict: + yield key.split("/", maxsplit=1)[0] else: - return list( - { - key.strip(prefix + "/").split("/")[0] - for key in self._store_dict - if (key.startswith(prefix + "/") and key != prefix) - } - ) + for key in self._store_dict: + if key.startswith(prefix + "/") and key != prefix: + yield key.strip(prefix + "/").split("/")[0] diff --git a/src/zarr/testing/__init__.py b/src/zarr/testing/__init__.py new file mode 100644 index 0000000000..9b622b43cd --- /dev/null +++ b/src/zarr/testing/__init__.py @@ -0,0 +1,9 @@ +import importlib.util +import warnings + +if importlib.util.find_spec("pytest") is not None: + from zarr.testing.store import StoreTests +else: + warnings.warn("pytest not installed, skipping test suite") + +__all__ = ["StoreTests"] diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py new file mode 100644 index 0000000000..601ef7f393 --- /dev/null +++ b/src/zarr/testing/store.py @@ -0,0 +1,81 @@ +import pytest + +from zarr.abc.store import Store + + +class StoreTests: + store_cls: type[Store] + + @pytest.fixture(scope="function") + def store(self) -> Store: + return self.store_cls() + + def test_store_type(self, store: Store) -> None: + assert isinstance(store, Store) + assert isinstance(store, self.store_cls) + + def test_store_repr(self, store: Store) -> None: + assert repr(store) + + def test_store_capabilities(self, store: Store) -> None: + assert store.supports_writes + assert store.supports_partial_writes + assert store.supports_listing + + @pytest.mark.parametrize("key", ["c/0", "foo/c/0.0", "foo/0/0"]) + @pytest.mark.parametrize("data", [b"\x01\x02\x03\x04", b""]) + async def test_set_get_bytes_roundtrip(self, store: Store, key: str, data: bytes) -> None: + await store.set(key, data) + assert await store.get(key) == data + + @pytest.mark.parametrize("key", ["foo/c/0"]) + @pytest.mark.parametrize("data", [b"\x01\x02\x03\x04", b""]) + async def test_get_partial_values(self, store: Store, key: str, data: bytes) -> None: + # put all of the data + await store.set(key, data) + # read back just part of it + vals = await store.get_partial_values([(key, (0, 2))]) + assert vals == [data[0:2]] + + # read back multiple parts of it at once + vals = await store.get_partial_values([(key, (0, 2)), (key, (2, 4))]) + assert vals == [data[0:2], data[2:4]] + + async def test_exists(self, store: Store) -> None: + assert not await store.exists("foo") + await store.set("foo/zarr.json", b"bar") + assert await store.exists("foo/zarr.json") + + async def test_delete(self, store: Store) -> None: + await store.set("foo/zarr.json", b"bar") + assert await store.exists("foo/zarr.json") + await store.delete("foo/zarr.json") + assert not await store.exists("foo/zarr.json") + + async def test_list(self, store: Store) -> None: + assert [k async for k in store.list()] == [] + await store.set("foo/zarr.json", b"bar") + keys = [k async for k in store.list()] + assert keys == ["foo/zarr.json"], keys + + expected = ["foo/zarr.json"] + for i in range(10): + key = f"foo/c/{i}" + expected.append(key) + await store.set(f"foo/c/{i}", i.to_bytes(length=3, byteorder="little")) + + async def test_list_prefix(self, store: Store) -> None: + # TODO: we currently don't use list_prefix anywhere + pass + + async def test_list_dir(self, store: Store) -> None: + assert [k async for k in store.list_dir("")] == [] + assert [k async for k in store.list_dir("foo")] == [] + await store.set("foo/zarr.json", b"bar") + await store.set("foo/c/1", b"\x01") + + keys = [k async for k in store.list_dir("foo")] + assert keys == ["zarr.json", "c"], keys + + keys = [k async for k in store.list_dir("foo/")] + assert keys == ["zarr.json", "c"], keys diff --git a/src/zarr/v2/storage.py b/src/zarr/v2/storage.py index 56deeeb555..dd0b090a81 100644 --- a/src/zarr/v2/storage.py +++ b/src/zarr/v2/storage.py @@ -1779,7 +1779,7 @@ def migrate_1to2(store): """ # migrate metadata - from zarr import meta_v1 + from zarr.v2 import meta_v1 meta = meta_v1.decode_metadata(store["meta"]) del store["meta"] diff --git a/tests/v2/test_storage.py b/tests/v2/test_storage.py index b6877aa713..17b80e6a5c 100644 --- a/tests/v2/test_storage.py +++ b/tests/v2/test_storage.py @@ -1,96 +1,84 @@ -# import array +import array import atexit - -# import json -# import os -# import pathlib -# import sys +import json +import os +import pathlib +import sys import pickle - -# import shutil +import shutil import tempfile +from contextlib import contextmanager +from pickle import PicklingError +from zipfile import ZipFile -# from contextlib import contextmanager -# from pickle import PicklingError -# from zipfile import ZipFile - -# import numpy as np +import numpy as np import pytest - -# from numpy.testing import assert_array_almost_equal, assert_array_equal - -# from numcodecs.compat import ensure_bytes - -# import zarr -# from zarr._storage.store import _get_hierarchy_metadata -# from zarr.codecs import BZ2, AsType, Blosc, Zlib -# from zarr.context import Context -# from zarr.convenience import consolidate_metadata -# from zarr.errors import ContainsArrayError, ContainsGroupError, MetadataError -# from zarr.hierarchy import group -# from zarr.meta import ZARR_FORMAT, decode_array_metadata - -# from zarr.n5 import N5Store, N5FSStore, N5_FORMAT, n5_attrs_key +from numpy.testing import assert_array_almost_equal, assert_array_equal + +from numcodecs.compat import ensure_bytes + +import zarr.v2 +from zarr.v2 import meta_v1 +from zarr.v2.codecs import BZ2, AsType, Blosc, Zlib +from zarr.v2.context import Context +from zarr.v2.convenience import consolidate_metadata +from zarr.v2.errors import ContainsArrayError, ContainsGroupError, MetadataError +from zarr.v2.hierarchy import group +from zarr.v2.meta import ZARR_FORMAT, decode_array_metadata +from zarr.v2.n5 import N5Store, N5FSStore, N5_FORMAT, n5_attrs_key from zarr.v2.storage import ( - # ABSStore, - # ConsolidatedMetadataStore, - # DBMStore, - # DictStore, - # DirectoryStore, - # KVStore, - # LMDBStore, - # LRUStoreCache, - # MemoryStore, - # MongoDBStore, - # NestedDirectoryStore, - # RedisStore, - # SQLiteStore, - # Store, - # TempStore, - # ZipStore, - # array_meta_key, - # atexit_rmglob, + ABSStore, + ConsolidatedMetadataStore, + DBMStore, + DictStore, + DirectoryStore, + KVStore, + LMDBStore, + LRUStoreCache, + MemoryStore, + MongoDBStore, + NestedDirectoryStore, + RedisStore, + SQLiteStore, + Store, + TempStore, + ZipStore, + array_meta_key, + atexit_rmglob, atexit_rmtree, - # attrs_key, - # data_root, - # default_compressor, - # getsize, - # group_meta_key, - # init_array, - # init_group, - # migrate_1to2, - # meta_root, - # normalize_store_arg, + attrs_key, + default_compressor, + getsize, + group_meta_key, + init_array, + init_group, + migrate_1to2, + normalize_store_arg, ) +from zarr.v2.storage import FSStore, rename, listdir +from .util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp +from zarr.v2.util import ConstantMap, json_dumps -# from zarr.storage import FSStore, rename, listdir -# from zarr._storage.v3 import KVStoreV3 -# from zarr.tests.util import CountingDict, have_fsspec, skip_test_env_var, abs_container, mktemp -# from zarr.util import ConstantMap, json_dumps - -from zarr.abc.store import Store -from zarr.store import MemoryStore as KVStore, LocalStore +@contextmanager +def does_not_raise(): + yield -# @contextmanager -# def does_not_raise(): -# yield - -# @pytest.fixture( -# params=[ -# (None, "."), -# (".", "."), -# ("/", "/"), -# ] -# ) -# def dimension_separator_fixture(request): -# return request.param +@pytest.fixture( + params=[ + (None, "."), + (".", "."), + ("/", "/"), + ] +) +def dimension_separator_fixture(request): + return request.param -# def skip_if_nested_chunks(**kwargs): -# if kwargs.get("dimension_separator") == "/": -# pytest.skip("nested chunks are unsupported") +def skip_if_nested_chunks(**kwargs): + if kwargs.get("dimension_separator") == "/": + pytest.skip("nested chunks are unsupported") def test_kvstore_repr(): @@ -101,2535 +89,2408 @@ def test_ensure_store(): class InvalidStore: pass - assert not isinstance(InvalidStore(), Store) + with pytest.raises(ValueError): + Store._ensure_store(InvalidStore()) - # with pytest.raises(ValueError): - # Store._ensure_store(InvalidStore()) - - # # cannot initialize with a store from a different Zarr version - # with pytest.raises(ValueError): - # Store._ensure_store(KVStoreV3(dict())) - - # # cannot initialize without a store - # with pytest.raises(ValueError): - # Store._ensure_store(None) + # cannot initialize without a store + with pytest.raises(ValueError): + Store._ensure_store(None) def test_capabilities(): s = KVStore(dict()) - # assert s.is_readable() # Q(JH): do we like these flags more? - # assert s.is_listable() - # assert s.is_erasable() - # assert s.is_writeable() - - assert s.supports_writes - assert s.supports_partial_writes - assert s.supports_listing + assert s.is_readable() + assert s.is_listable() + assert s.is_erasable() + assert s.is_writeable() -# def test_getsize_non_implemented(): -# assert getsize(object()) == -1 +def test_getsize_non_implemented(): + assert getsize(object()) == -1 -# def test_kvstore_eq(): -# assert KVStore(dict()) != dict() +def test_kvstore_eq(): + assert KVStore(dict()) != dict() -# def test_coverage_rename(): -# store = dict() -# store["a"] = 1 -# rename(store, "a", "b") +def test_coverage_rename(): + store = dict() + store["a"] = 1 + rename(store, "a", "b") -# def test_deprecated_listdir_nosotre(): -# store = dict() -# with pytest.warns(UserWarning, match="has no `listdir`"): -# listdir(store) +def test_deprecated_listdir_nosotre(): + store = dict() + with pytest.warns(UserWarning, match="has no `listdir`"): + listdir(store) class StoreTests: """Abstract store tests.""" - # version = 2 + version = 2 root = "" def create_store(self, **kwargs): # pragma: no cover # implement in sub-class raise NotImplementedError - # def test_context_manager(self): - # with self.create_store(): - # pass + def test_context_manager(self): + with self.create_store(): + pass - @pytest.mark.asyncio - async def test_get_set_del_contains(self): + def test_get_set_del_contains(self): store = self.create_store() - # test exists, get, set + # test __contains__, __getitem__, __setitem__ key = self.root + "foo" - assert not await store.exists(key) - assert await store.get(key) is None - await store.set(key, b"bar") - assert await store.exists(key) - assert b"bar" == await store.get(key) - - # test delete (optional) + assert key not in store + with pytest.raises(KeyError): + # noinspection PyStatementEffect + store[key] + store[key] = b"bar" + assert key in store + assert b"bar" == ensure_bytes(store[key]) + + # test __delitem__ (optional) try: - await store.delete(key) + del store[key] except NotImplementedError: pass else: - assert not await store.exists(key) - assert await store.get(key) is None + assert key not in store + with pytest.raises(KeyError): + # noinspection PyStatementEffect + store[key] + with pytest.raises(KeyError): + # noinspection PyStatementEffect + del store[key] - assert await store.delete(key) is None + store.close() - # store.close() - - @pytest.mark.asyncio - async def test_set_invalid_content(self): + def test_set_invalid_content(self): store = self.create_store() with pytest.raises(TypeError): - await store.set(self.root + "baz", list(range(5))) - - # store.close() - - # def test_clear(self): - # store = self.create_store() - # store[self.root + "foo"] = b"bar" - # store[self.root + "baz"] = b"qux" - # assert len(store) == 2 - # store.clear() - # assert len(store) == 0 - # assert self.root + "foo" not in store - # assert self.root + "baz" not in store - - # store.close() - - # def test_pop(self): - # store = self.create_store() - # store[self.root + "foo"] = b"bar" - # store[self.root + "baz"] = b"qux" - # assert len(store) == 2 - # v = store.pop(self.root + "foo") - # assert ensure_bytes(v) == b"bar" - # assert len(store) == 1 - # v = store.pop(self.root + "baz") - # assert ensure_bytes(v) == b"qux" - # assert len(store) == 0 - # with pytest.raises(KeyError): - # store.pop(self.root + "xxx") - # v = store.pop(self.root + "xxx", b"default") - # assert v == b"default" - # v = store.pop(self.root + "xxx", b"") - # assert v == b"" - # v = store.pop(self.root + "xxx", None) - # assert v is None - - # store.close() - - # def test_popitem(self): - # store = self.create_store() - # store[self.root + "foo"] = b"bar" - # k, v = store.popitem() - # assert k == self.root + "foo" - # assert ensure_bytes(v) == b"bar" - # assert len(store) == 0 - # with pytest.raises(KeyError): - # store.popitem() - - # store.close() - - @pytest.mark.asyncio - async def test_writeable_values(self): + store[self.root + "baz"] = list(range(5)) + + store.close() + + def test_clear(self): store = self.create_store() + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"qux" + assert len(store) == 2 + store.clear() + assert len(store) == 0 + assert self.root + "foo" not in store + assert self.root + "baz" not in store - # set should accept any value that implements buffer interface - await store.set(self.root + "foo1", b"bar") - await store.set(self.root + "foo2", bytearray(b"bar")) - # TODO(v3): revisit passing numpy arrays directly to the store - # await store.set(self.root + "foo3", array.array("B", b"bar")) - # await store.set(self.root + "foo4", np.frombuffer(b"bar", dtype="u1")) - - # store.close() - - # def test_update(self): - # store = self.create_store() - # assert self.root + "foo" not in store - # assert self.root + "baz" not in store - - # if self.version == 2: - # store.update(foo=b"bar", baz=b"quux") - # else: - # kv = {self.root + "foo": b"bar", self.root + "baz": b"quux"} - # store.update(kv) - - # assert b"bar" == ensure_bytes(store[self.root + "foo"]) - # assert b"quux" == ensure_bytes(store[self.root + "baz"]) - - # store.close() - - # def test_iterators(self): - # store = self.create_store() - - # # test iterator methods on empty store - # assert 0 == len(store) - # assert set() == set(store) - # assert set() == set(store.keys()) - # assert set() == set(store.values()) - # assert set() == set(store.items()) - - # # setup some values - # store[self.root + "a"] = b"aaa" - # store[self.root + "b"] = b"bbb" - # store[self.root + "c/d"] = b"ddd" - # store[self.root + "c/e/f"] = b"fff" - - # # test iterators on store with data - # assert 4 == len(store) - # expected = set(self.root + k for k in ["a", "b", "c/d", "c/e/f"]) - # assert expected == set(store) - # assert expected == set(store.keys()) - # assert {b"aaa", b"bbb", b"ddd", b"fff"} == set(map(ensure_bytes, store.values())) - # assert { - # (self.root + "a", b"aaa"), - # (self.root + "b", b"bbb"), - # (self.root + "c/d", b"ddd"), - # (self.root + "c/e/f", b"fff"), - # } == set(map(lambda kv: (kv[0], ensure_bytes(kv[1])), store.items())) - - # store.close() - - @pytest.mark.asyncio - async def test_pickle(self): + store.close() + + def test_pop(self): + store = self.create_store() + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"qux" + assert len(store) == 2 + v = store.pop(self.root + "foo") + assert ensure_bytes(v) == b"bar" + assert len(store) == 1 + v = store.pop(self.root + "baz") + assert ensure_bytes(v) == b"qux" + assert len(store) == 0 + with pytest.raises(KeyError): + store.pop(self.root + "xxx") + v = store.pop(self.root + "xxx", b"default") + assert v == b"default" + v = store.pop(self.root + "xxx", b"") + assert v == b"" + v = store.pop(self.root + "xxx", None) + assert v is None + + store.close() + + def test_popitem(self): + store = self.create_store() + store[self.root + "foo"] = b"bar" + k, v = store.popitem() + assert k == self.root + "foo" + assert ensure_bytes(v) == b"bar" + assert len(store) == 0 + with pytest.raises(KeyError): + store.popitem() + + store.close() + + def test_writeable_values(self): + store = self.create_store() + + # __setitem__ should accept any value that implements buffer interface + store[self.root + "foo1"] = b"bar" + store[self.root + "foo2"] = bytearray(b"bar") + store[self.root + "foo3"] = array.array("B", b"bar") + store[self.root + "foo4"] = np.frombuffer(b"bar", dtype="u1") + + store.close() + + def test_update(self): + store = self.create_store() + assert self.root + "foo" not in store + assert self.root + "baz" not in store + + if self.version == 2: + store.update(foo=b"bar", baz=b"quux") + else: + kv = {self.root + "foo": b"bar", self.root + "baz": b"quux"} + store.update(kv) + + assert b"bar" == ensure_bytes(store[self.root + "foo"]) + assert b"quux" == ensure_bytes(store[self.root + "baz"]) + + store.close() + + def test_iterators(self): + store = self.create_store() + + # test iterator methods on empty store + assert 0 == len(store) + assert set() == set(store) + assert set() == set(store.keys()) + assert set() == set(store.values()) + assert set() == set(store.items()) + + # setup some values + store[self.root + "a"] = b"aaa" + store[self.root + "b"] = b"bbb" + store[self.root + "c/d"] = b"ddd" + store[self.root + "c/e/f"] = b"fff" + + # test iterators on store with data + assert 4 == len(store) + expected = set(self.root + k for k in ["a", "b", "c/d", "c/e/f"]) + assert expected == set(store) + assert expected == set(store.keys()) + assert {b"aaa", b"bbb", b"ddd", b"fff"} == set(map(ensure_bytes, store.values())) + assert { + (self.root + "a", b"aaa"), + (self.root + "b", b"bbb"), + (self.root + "c/d", b"ddd"), + (self.root + "c/e/f", b"fff"), + } == set(map(lambda kv: (kv[0], ensure_bytes(kv[1])), store.items())) + + store.close() + + def test_pickle(self): # setup store store = self.create_store() - await store.set(self.root + "foo", b"bar") - await store.set(self.root + "baz", b"quux") - # n = len(store) - keys = sorted(await store.list()) + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"quux" + n = len(store) + keys = sorted(store.keys()) # round-trip through pickle dump = pickle.dumps(store) - # # some stores cannot be opened twice at the same time, need to close - # # store before can round-trip through pickle - # store.close() + # some stores cannot be opened twice at the same time, need to close + # store before can round-trip through pickle + store.close() # check can still pickle after close assert dump == pickle.dumps(store) store2 = pickle.loads(dump) # verify - # assert n == len(store2) - assert keys == sorted(await store2.list()) - assert await store2.get(self.root + "foo") == b"bar" - assert await store2.get(self.root + "baz") == b"quux" - - # store2.close() - - # def test_getsize(self): - # store = self.create_store() - # if isinstance(store, dict) or hasattr(store, "getsize"): - # assert 0 == getsize(store) - # store["foo"] = b"x" - # assert 1 == getsize(store) - # assert 1 == getsize(store, "foo") - # store["bar"] = b"yy" - # assert 3 == getsize(store) - # assert 2 == getsize(store, "bar") - # store["baz"] = bytearray(b"zzz") - # assert 6 == getsize(store) - # assert 3 == getsize(store, "baz") - # store["quux"] = array.array("B", b"zzzz") - # assert 10 == getsize(store) - # assert 4 == getsize(store, "quux") - # store["spong"] = np.frombuffer(b"zzzzz", dtype="u1") - # assert 15 == getsize(store) - # assert 5 == getsize(store, "spong") - - # store.close() - - # # noinspection PyStatementEffect - @pytest.mark.asyncio - async def test_hierarchy(self): + assert n == len(store2) + assert keys == sorted(store2.keys()) + assert b"bar" == ensure_bytes(store2[self.root + "foo"]) + assert b"quux" == ensure_bytes(store2[self.root + "baz"]) + + store2.close() + + def test_getsize(self): + store = self.create_store() + if isinstance(store, dict) or hasattr(store, "getsize"): + assert 0 == getsize(store) + store["foo"] = b"x" + assert 1 == getsize(store) + assert 1 == getsize(store, "foo") + store["bar"] = b"yy" + assert 3 == getsize(store) + assert 2 == getsize(store, "bar") + store["baz"] = bytearray(b"zzz") + assert 6 == getsize(store) + assert 3 == getsize(store, "baz") + store["quux"] = array.array("B", b"zzzz") + assert 10 == getsize(store) + assert 4 == getsize(store, "quux") + store["spong"] = np.frombuffer(b"zzzzz", dtype="u1") + assert 15 == getsize(store) + assert 5 == getsize(store, "spong") + + store.close() + + # noinspection PyStatementEffect + def test_hierarchy(self): # setup store = self.create_store() - await store.set(self.root + "a", b"aaa") - await store.set(self.root + "b", b"bbb") - await store.set(self.root + "c/d", b"ddd") - await store.set(self.root + "c/e/f", b"fff") - await store.set(self.root + "c/e/g", b"ggg") + store[self.root + "a"] = b"aaa" + store[self.root + "b"] = b"bbb" + store[self.root + "c/d"] = b"ddd" + store[self.root + "c/e/f"] = b"fff" + store[self.root + "c/e/g"] = b"ggg" # check keys - assert await store.exists(self.root + "a") - assert await store.exists(self.root + "b") - assert await store.exists(self.root + "c/d") - assert await store.exists(self.root + "c/e/f") - assert await store.exists(self.root + "c/e/g") - assert not await store.exists(self.root + "c") - assert not await store.exists(self.root + "c/") - assert not await store.exists(self.root + "c/e") - assert not await store.exists(self.root + "c/e/") - assert not await store.exists(self.root + "c/d/x") - - # check get - # with pytest.raises(KeyError): - # store[self.root + "c"] - assert await store.get(self.root + "c") is None - - # with pytest.raises(KeyError): - # store[self.root + "c/e"] - assert await store.get(self.root + "c/e") is None - # with pytest.raises(KeyError): - # store[self.root + "c/d/x"] - assert await store.get(self.root + "c/d/x") is None - - # # test getsize (optional) - # if hasattr(store, "getsize"): - # # TODO: proper behavior of getsize? - # # v3 returns size of all nested arrays, not just the - # # size of the arrays in the current folder. - # if self.version == 2: - # assert 6 == store.getsize() - # else: - # assert 15 == store.getsize() - # assert 3 == store.getsize("a") - # assert 3 == store.getsize("b") - # if self.version == 2: - # assert 3 == store.getsize("c") - # else: - # assert 9 == store.getsize("c") - # assert 3 == store.getsize("c/d") - # assert 6 == store.getsize("c/e") - # assert 3 == store.getsize("c/e/f") - # assert 3 == store.getsize("c/e/g") - # # non-existent paths - # assert 0 == store.getsize("x") - # assert 0 == store.getsize("a/x") - # assert 0 == store.getsize("c/x") - # assert 0 == store.getsize("c/x/y") - # assert 0 == store.getsize("c/d/y") - # assert 0 == store.getsize("c/d/y/z") - - # # access item via full path - # assert 3 == store.getsize(self.root + "a") - - # test list_dir (optional) - if store.supports_listing: - assert set(await store.list_dir(self.root)) == {"a", "b", "c"} - assert set(await store.list_dir(self.root + "c")) == {"d", "e"} - assert set(await store.list_dir(self.root + "c/e")) == {"f", "g"} + assert self.root + "a" in store + assert self.root + "b" in store + assert self.root + "c/d" in store + assert self.root + "c/e/f" in store + assert self.root + "c/e/g" in store + assert self.root + "c" not in store + assert self.root + "c/" not in store + assert self.root + "c/e" not in store + assert self.root + "c/e/" not in store + assert self.root + "c/d/x" not in store + + # check __getitem__ + with pytest.raises(KeyError): + store[self.root + "c"] + with pytest.raises(KeyError): + store[self.root + "c/e"] + with pytest.raises(KeyError): + store[self.root + "c/d/x"] + + # test getsize (optional) + if hasattr(store, "getsize"): + # TODO: proper behavior of getsize? + # v3 returns size of all nested arrays, not just the + # size of the arrays in the current folder. + if self.version == 2: + assert 6 == store.getsize() + else: + assert 15 == store.getsize() + assert 3 == store.getsize("a") + assert 3 == store.getsize("b") + if self.version == 2: + assert 3 == store.getsize("c") + else: + assert 9 == store.getsize("c") + assert 3 == store.getsize("c/d") + assert 6 == store.getsize("c/e") + assert 3 == store.getsize("c/e/f") + assert 3 == store.getsize("c/e/g") + # non-existent paths + assert 0 == store.getsize("x") + assert 0 == store.getsize("a/x") + assert 0 == store.getsize("c/x") + assert 0 == store.getsize("c/x/y") + assert 0 == store.getsize("c/d/y") + assert 0 == store.getsize("c/d/y/z") + + # access item via full path + assert 3 == store.getsize(self.root + "a") + + # test listdir (optional) + if hasattr(store, "listdir"): + assert {"a", "b", "c"} == set(store.listdir(self.root)) + assert {"d", "e"} == set(store.listdir(self.root + "c")) + assert {"f", "g"} == set(store.listdir(self.root + "c/e")) # no exception raised if path does not exist or is leaf - assert await store.list_dir(self.root + "x") == [] - assert await store.list_dir(self.root + "a/x") == [] - assert await store.list_dir(self.root + "c/x") == [] - assert await store.list_dir(self.root + "c/x/y") == [] - assert await store.list_dir(self.root + "c/d/y") == [] - assert await store.list_dir(self.root + "c/d/y/z") == [] - assert await store.list_dir(self.root + "c/e/f") == [] - - -# # test rename (optional) -# if store.is_erasable(): -# store.rename("c/e", "c/e2") -# assert self.root + "c/d" in store -# assert self.root + "c/e" not in store -# assert self.root + "c/e/f" not in store -# assert self.root + "c/e/g" not in store -# assert self.root + "c/e2" not in store -# assert self.root + "c/e2/f" in store -# assert self.root + "c/e2/g" in store -# store.rename("c/e2", "c/e") -# assert self.root + "c/d" in store -# assert self.root + "c/e2" not in store -# assert self.root + "c/e2/f" not in store -# assert self.root + "c/e2/g" not in store -# assert self.root + "c/e" not in store -# assert self.root + "c/e/f" in store -# assert self.root + "c/e/g" in store -# store.rename("c", "c1/c2/c3") -# assert self.root + "a" in store -# assert self.root + "c" not in store -# assert self.root + "c/d" not in store -# assert self.root + "c/e" not in store -# assert self.root + "c/e/f" not in store -# assert self.root + "c/e/g" not in store -# assert self.root + "c1" not in store -# assert self.root + "c1/c2" not in store -# assert self.root + "c1/c2/c3" not in store -# assert self.root + "c1/c2/c3/d" in store -# assert self.root + "c1/c2/c3/e" not in store -# assert self.root + "c1/c2/c3/e/f" in store -# assert self.root + "c1/c2/c3/e/g" in store -# store.rename("c1/c2/c3", "c") -# assert self.root + "c" not in store -# assert self.root + "c/d" in store -# assert self.root + "c/e" not in store -# assert self.root + "c/e/f" in store -# assert self.root + "c/e/g" in store -# assert self.root + "c1" not in store -# assert self.root + "c1/c2" not in store -# assert self.root + "c1/c2/c3" not in store -# assert self.root + "c1/c2/c3/d" not in store -# assert self.root + "c1/c2/c3/e" not in store -# assert self.root + "c1/c2/c3/e/f" not in store -# assert self.root + "c1/c2/c3/e/g" not in store - -# # test rmdir (optional) -# store.rmdir("c/e") -# assert self.root + "c/d" in store -# assert self.root + "c/e/f" not in store -# assert self.root + "c/e/g" not in store -# store.rmdir("c") -# assert self.root + "c/d" not in store -# store.rmdir() -# assert self.root + "a" not in store -# assert self.root + "b" not in store -# store[self.root + "a"] = b"aaa" -# store[self.root + "c/d"] = b"ddd" -# store[self.root + "c/e/f"] = b"fff" -# # no exceptions raised if path does not exist or is leaf -# store.rmdir("x") -# store.rmdir("a/x") -# store.rmdir("c/x") -# store.rmdir("c/x/y") -# store.rmdir("c/d/y") -# store.rmdir("c/d/y/z") -# store.rmdir("c/e/f") -# assert self.root + "a" in store -# assert self.root + "c/d" in store -# assert self.root + "c/e/f" in store - -# store.close() - -# def test_init_array(self, dimension_separator_fixture): - -# pass_dim_sep, want_dim_sep = dimension_separator_fixture - -# store = self.create_store(dimension_separator=pass_dim_sep) -# init_array(store, shape=1000, chunks=100) - -# # check metadata -# assert array_meta_key in store -# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunks"] -# assert np.dtype(None) == meta["dtype"] -# assert default_compressor.get_config() == meta["compressor"] -# assert meta["fill_value"] is None -# # Missing MUST be assumed to be "." -# assert meta.get("dimension_separator", ".") is want_dim_sep - -# store.close() - -# def test_init_array_overwrite(self): -# self._test_init_array_overwrite("F") - -# def test_init_array_overwrite_path(self): -# self._test_init_array_overwrite_path("F") - -# def test_init_array_overwrite_chunk_store(self): -# self._test_init_array_overwrite_chunk_store("F") - -# def test_init_group_overwrite(self): -# self._test_init_group_overwrite("F") - -# def test_init_group_overwrite_path(self): -# self._test_init_group_overwrite_path("F") - -# def test_init_group_overwrite_chunk_store(self): -# self._test_init_group_overwrite_chunk_store("F") - -# def _test_init_array_overwrite(self, order): -# # setup -# store = self.create_store() -# if self.version == 2: -# path = None -# mkey = array_meta_key -# meta = dict( -# shape=(2000,), -# chunks=(200,), -# dtype=np.dtype("u1"), -# compressor=Zlib(1).get_config(), -# fill_value=0, -# order=order, -# filters=None, -# ) -# else: -# path = "arr1" # no default, have to specify for v3 -# mkey = meta_root + path + ".array.json" -# meta = dict( -# shape=(2000,), -# chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), -# data_type=np.dtype("u1"), -# compressor=Zlib(1), -# fill_value=0, -# chunk_memory_layout=order, -# filters=None, -# ) -# store[mkey] = store._metadata_class.encode_array_metadata(meta) - -# # don't overwrite (default) -# with pytest.raises(ContainsArrayError): -# init_array(store, shape=1000, chunks=100, path=path) - -# # do overwrite -# try: -# init_array(store, shape=1000, chunks=100, dtype="i4", overwrite=True, path=path) -# except NotImplementedError: -# pass -# else: -# assert mkey in store -# meta = store._metadata_class.decode_array_metadata(store[mkey]) -# if self.version == 2: -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (100,) == meta["chunks"] -# assert np.dtype("i4") == meta["dtype"] -# else: -# assert (100,) == meta["chunk_grid"]["chunk_shape"] -# assert np.dtype("i4") == meta["data_type"] -# assert (1000,) == meta["shape"] - -# store.close() - -# def test_init_array_path(self): -# path = "foo/bar" -# store = self.create_store() -# init_array(store, shape=1000, chunks=100, path=path) - -# # check metadata -# if self.version == 2: -# mkey = path + "/" + array_meta_key -# else: -# mkey = meta_root + path + ".array.json" -# assert mkey in store -# meta = store._metadata_class.decode_array_metadata(store[mkey]) -# if self.version == 2: -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (100,) == meta["chunks"] -# assert np.dtype(None) == meta["dtype"] -# assert default_compressor.get_config() == meta["compressor"] -# else: -# assert (100,) == meta["chunk_grid"]["chunk_shape"] -# assert np.dtype(None) == meta["data_type"] -# assert default_compressor == meta["compressor"] -# assert (1000,) == meta["shape"] -# assert meta["fill_value"] is None - -# store.close() - -# def _test_init_array_overwrite_path(self, order): -# # setup -# path = "foo/bar" -# store = self.create_store() -# if self.version == 2: -# mkey = path + "/" + array_meta_key -# meta = dict( -# shape=(2000,), -# chunks=(200,), -# dtype=np.dtype("u1"), -# compressor=Zlib(1).get_config(), -# fill_value=0, -# order=order, -# filters=None, -# ) -# else: -# mkey = meta_root + path + ".array.json" -# meta = dict( -# shape=(2000,), -# chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), -# data_type=np.dtype("u1"), -# compressor=Zlib(1), -# fill_value=0, -# chunk_memory_layout=order, -# filters=None, -# ) -# store[mkey] = store._metadata_class.encode_array_metadata(meta) - -# # don't overwrite -# with pytest.raises(ContainsArrayError): -# init_array(store, shape=1000, chunks=100, path=path) - -# # do overwrite -# try: -# init_array(store, shape=1000, chunks=100, dtype="i4", path=path, overwrite=True) -# except NotImplementedError: -# pass -# else: -# if self.version == 2: -# assert group_meta_key in store -# assert array_meta_key not in store -# assert mkey in store -# # should have been overwritten -# meta = store._metadata_class.decode_array_metadata(store[mkey]) -# if self.version == 2: -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (100,) == meta["chunks"] -# assert np.dtype("i4") == meta["dtype"] -# else: -# assert (100,) == meta["chunk_grid"]["chunk_shape"] -# assert np.dtype("i4") == meta["data_type"] -# assert (1000,) == meta["shape"] - -# store.close() - -# def test_init_array_overwrite_group(self): -# # setup -# path = "foo/bar" -# store = self.create_store() -# if self.version == 2: -# array_key = path + "/" + array_meta_key -# group_key = path + "/" + group_meta_key -# else: -# array_key = meta_root + path + ".array.json" -# group_key = meta_root + path + ".group.json" -# store[group_key] = store._metadata_class.encode_group_metadata() - -# # don't overwrite -# with pytest.raises(ContainsGroupError): -# init_array(store, shape=1000, chunks=100, path=path) - -# # do overwrite -# try: -# init_array(store, shape=1000, chunks=100, dtype="i4", path=path, overwrite=True) -# except NotImplementedError: -# pass -# else: -# assert group_key not in store -# assert array_key in store -# meta = store._metadata_class.decode_array_metadata(store[array_key]) -# if self.version == 2: -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (100,) == meta["chunks"] -# assert np.dtype("i4") == meta["dtype"] -# else: -# assert (100,) == meta["chunk_grid"]["chunk_shape"] -# assert np.dtype("i4") == meta["data_type"] -# assert (1000,) == meta["shape"] - -# store.close() - -# def _test_init_array_overwrite_chunk_store(self, order): -# # setup -# store = self.create_store() -# chunk_store = self.create_store() - -# if self.version == 2: -# path = None -# data_path = "" -# mkey = array_meta_key -# meta = dict( -# shape=(2000,), -# chunks=(200,), -# dtype=np.dtype("u1"), -# compressor=None, -# fill_value=0, -# filters=None, -# order=order, -# ) -# else: -# path = "arr1" -# data_path = data_root + "arr1/" -# mkey = meta_root + path + ".array.json" -# meta = dict( -# shape=(2000,), -# chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), -# data_type=np.dtype("u1"), -# compressor=None, -# fill_value=0, -# filters=None, -# chunk_memory_layout=order, -# ) - -# store[mkey] = store._metadata_class.encode_array_metadata(meta) - -# chunk_store[data_path + "0"] = b"aaa" -# chunk_store[data_path + "1"] = b"bbb" - -# # don't overwrite (default) -# with pytest.raises(ContainsArrayError): -# init_array(store, path=path, shape=1000, chunks=100, chunk_store=chunk_store) - -# # do overwrite -# try: -# init_array( -# store, -# path=path, -# shape=1000, -# chunks=100, -# dtype="i4", -# overwrite=True, -# chunk_store=chunk_store, -# ) -# except NotImplementedError: -# pass -# else: -# assert mkey in store -# meta = store._metadata_class.decode_array_metadata(store[mkey]) -# if self.version == 2: -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (100,) == meta["chunks"] -# assert np.dtype("i4") == meta["dtype"] -# else: -# assert (100,) == meta["chunk_grid"]["chunk_shape"] -# assert np.dtype("i4") == meta["data_type"] -# assert (1000,) == meta["shape"] -# assert data_path + "0" not in chunk_store -# assert data_path + "1" not in chunk_store - -# store.close() -# chunk_store.close() - -# def test_init_array_compat(self): -# store = self.create_store() -# if self.version == 2: -# path = None -# mkey = array_meta_key -# else: -# path = "arr1" -# mkey = meta_root + path + ".array.json" -# init_array(store, path=path, shape=1000, chunks=100, compressor="none") -# meta = store._metadata_class.decode_array_metadata(store[mkey]) -# if self.version == 2: -# assert meta["compressor"] is None -# else: -# assert "compressor" not in meta -# store.close() - -# def test_init_group(self): -# store = self.create_store() -# if self.version == 2: -# path = None -# mkey = group_meta_key -# else: -# path = "foo" -# mkey = meta_root + path + ".group.json" -# init_group(store, path=path) - -# # check metadata -# assert mkey in store -# meta = store._metadata_class.decode_group_metadata(store[mkey]) -# if self.version == 2: -# assert ZARR_FORMAT == meta["zarr_format"] -# else: -# assert meta == {"attributes": {}} - -# store.close() - -# def _test_init_group_overwrite(self, order): -# if self.version == 3: -# pytest.skip("In v3 array and group names cannot overlap") -# # setup -# store = self.create_store() -# store[array_meta_key] = store._metadata_class.encode_array_metadata( -# dict( -# shape=(2000,), -# chunks=(200,), -# dtype=np.dtype("u1"), -# compressor=None, -# fill_value=0, -# order=order, -# filters=None, -# ) -# ) - -# # don't overwrite array (default) -# with pytest.raises(ContainsArrayError): -# init_group(store) - -# # do overwrite -# try: -# init_group(store, overwrite=True) -# except NotImplementedError: -# pass -# else: -# assert array_meta_key not in store -# assert group_meta_key in store -# meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] - -# # don't overwrite group -# with pytest.raises(ValueError): -# init_group(store) - -# store.close() - -# def _test_init_group_overwrite_path(self, order): -# # setup -# path = "foo/bar" -# store = self.create_store() -# if self.version == 2: -# meta = dict( -# shape=(2000,), -# chunks=(200,), -# dtype=np.dtype("u1"), -# compressor=None, -# fill_value=0, -# order=order, -# filters=None, -# ) -# array_key = path + "/" + array_meta_key -# group_key = path + "/" + group_meta_key -# else: -# meta = dict( -# shape=(2000,), -# chunk_grid=dict(type="regular", chunk_shape=(200,), separator=("/")), -# data_type=np.dtype("u1"), -# compressor=None, -# fill_value=0, -# filters=None, -# chunk_memory_layout=order, -# ) -# array_key = meta_root + path + ".array.json" -# group_key = meta_root + path + ".group.json" -# store[array_key] = store._metadata_class.encode_array_metadata(meta) - -# # don't overwrite -# with pytest.raises(ValueError): -# init_group(store, path=path) - -# # do overwrite -# try: -# init_group(store, overwrite=True, path=path) -# except NotImplementedError: -# pass -# else: -# if self.version == 2: -# assert array_meta_key not in store -# assert group_meta_key in store -# assert array_key not in store -# assert group_key in store -# # should have been overwritten -# meta = store._metadata_class.decode_group_metadata(store[group_key]) -# if self.version == 2: -# assert ZARR_FORMAT == meta["zarr_format"] -# else: -# assert meta == {"attributes": {}} - -# store.close() - -# def _test_init_group_overwrite_chunk_store(self, order): -# if self.version == 3: -# pytest.skip("In v3 array and group names cannot overlap") -# # setup -# store = self.create_store() -# chunk_store = self.create_store() -# store[array_meta_key] = store._metadata_class.encode_array_metadata( -# dict( -# shape=(2000,), -# chunks=(200,), -# dtype=np.dtype("u1"), -# compressor=None, -# fill_value=0, -# filters=None, -# order=order, -# ) -# ) -# chunk_store["foo"] = b"bar" -# chunk_store["baz"] = b"quux" - -# # don't overwrite array (default) -# with pytest.raises(ValueError): -# init_group(store, chunk_store=chunk_store) - -# # do overwrite -# try: -# init_group(store, overwrite=True, chunk_store=chunk_store) -# except NotImplementedError: -# pass -# else: -# assert array_meta_key not in store -# assert group_meta_key in store -# meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] -# assert "foo" not in chunk_store -# assert "baz" not in chunk_store - -# # don't overwrite group -# with pytest.raises(ValueError): -# init_group(store) - -# store.close() -# chunk_store.close() + assert [] == store.listdir(self.root + "x") + assert [] == store.listdir(self.root + "a/x") + assert [] == store.listdir(self.root + "c/x") + assert [] == store.listdir(self.root + "c/x/y") + assert [] == store.listdir(self.root + "c/d/y") + assert [] == store.listdir(self.root + "c/d/y/z") + assert [] == store.listdir(self.root + "c/e/f") + + # test rename (optional) + if store.is_erasable(): + store.rename("c/e", "c/e2") + assert self.root + "c/d" in store + assert self.root + "c/e" not in store + assert self.root + "c/e/f" not in store + assert self.root + "c/e/g" not in store + assert self.root + "c/e2" not in store + assert self.root + "c/e2/f" in store + assert self.root + "c/e2/g" in store + store.rename("c/e2", "c/e") + assert self.root + "c/d" in store + assert self.root + "c/e2" not in store + assert self.root + "c/e2/f" not in store + assert self.root + "c/e2/g" not in store + assert self.root + "c/e" not in store + assert self.root + "c/e/f" in store + assert self.root + "c/e/g" in store + store.rename("c", "c1/c2/c3") + assert self.root + "a" in store + assert self.root + "c" not in store + assert self.root + "c/d" not in store + assert self.root + "c/e" not in store + assert self.root + "c/e/f" not in store + assert self.root + "c/e/g" not in store + assert self.root + "c1" not in store + assert self.root + "c1/c2" not in store + assert self.root + "c1/c2/c3" not in store + assert self.root + "c1/c2/c3/d" in store + assert self.root + "c1/c2/c3/e" not in store + assert self.root + "c1/c2/c3/e/f" in store + assert self.root + "c1/c2/c3/e/g" in store + store.rename("c1/c2/c3", "c") + assert self.root + "c" not in store + assert self.root + "c/d" in store + assert self.root + "c/e" not in store + assert self.root + "c/e/f" in store + assert self.root + "c/e/g" in store + assert self.root + "c1" not in store + assert self.root + "c1/c2" not in store + assert self.root + "c1/c2/c3" not in store + assert self.root + "c1/c2/c3/d" not in store + assert self.root + "c1/c2/c3/e" not in store + assert self.root + "c1/c2/c3/e/f" not in store + assert self.root + "c1/c2/c3/e/g" not in store + + # test rmdir (optional) + store.rmdir("c/e") + assert self.root + "c/d" in store + assert self.root + "c/e/f" not in store + assert self.root + "c/e/g" not in store + store.rmdir("c") + assert self.root + "c/d" not in store + store.rmdir() + assert self.root + "a" not in store + assert self.root + "b" not in store + store[self.root + "a"] = b"aaa" + store[self.root + "c/d"] = b"ddd" + store[self.root + "c/e/f"] = b"fff" + # no exceptions raised if path does not exist or is leaf + store.rmdir("x") + store.rmdir("a/x") + store.rmdir("c/x") + store.rmdir("c/x/y") + store.rmdir("c/d/y") + store.rmdir("c/d/y/z") + store.rmdir("c/e/f") + assert self.root + "a" in store + assert self.root + "c/d" in store + assert self.root + "c/e/f" in store + + store.close() + + def test_init_array(self, dimension_separator_fixture): + pass_dim_sep, want_dim_sep = dimension_separator_fixture + + store = self.create_store(dimension_separator=pass_dim_sep) + init_array(store, shape=1000, chunks=100) + + # check metadata + assert array_meta_key in store + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + assert default_compressor.get_config() == meta["compressor"] + assert meta["fill_value"] is None + # Missing MUST be assumed to be "." + assert meta.get("dimension_separator", ".") is want_dim_sep + + store.close() + + def test_init_array_overwrite(self): + self._test_init_array_overwrite("F") + + def test_init_array_overwrite_path(self): + self._test_init_array_overwrite_path("F") + + def test_init_array_overwrite_chunk_store(self): + self._test_init_array_overwrite_chunk_store("F") + + def test_init_group_overwrite(self): + self._test_init_group_overwrite("F") + + def test_init_group_overwrite_path(self): + self._test_init_group_overwrite_path("F") + + def test_init_group_overwrite_chunk_store(self): + self._test_init_group_overwrite_chunk_store("F") + + def _test_init_array_overwrite(self, order): + # setup + store = self.create_store() + path = None + mkey = array_meta_key + meta = dict( + shape=(2000,), + chunks=(200,), + dtype=np.dtype("u1"), + compressor=Zlib(1).get_config(), + fill_value=0, + order=order, + filters=None, + ) + + store[mkey] = store._metadata_class.encode_array_metadata(meta) + + # don't overwrite (default) + with pytest.raises(ContainsArrayError): + init_array(store, shape=1000, chunks=100, path=path) + + # do overwrite + try: + init_array(store, shape=1000, chunks=100, dtype="i4", overwrite=True, path=path) + except NotImplementedError: + pass + else: + assert mkey in store + meta = store._metadata_class.decode_array_metadata(store[mkey]) + if self.version == 2: + assert ZARR_FORMAT == meta["zarr_format"] + assert (100,) == meta["chunks"] + assert np.dtype("i4") == meta["dtype"] + else: + assert (100,) == meta["chunk_grid"]["chunk_shape"] + assert np.dtype("i4") == meta["data_type"] + assert (1000,) == meta["shape"] + + store.close() + + def test_init_array_path(self): + path = "foo/bar" + store = self.create_store() + init_array(store, shape=1000, chunks=100, path=path) + + # check metadata + mkey = path + "/" + array_meta_key + assert mkey in store + meta = store._metadata_class.decode_array_metadata(store[mkey]) + assert ZARR_FORMAT == meta["zarr_format"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + assert default_compressor.get_config() == meta["compressor"] + assert (1000,) == meta["shape"] + assert meta["fill_value"] is None + + store.close() + + def _test_init_array_overwrite_path(self, order): + # setup + path = "foo/bar" + store = self.create_store() + mkey = path + "/" + array_meta_key + meta = dict( + shape=(2000,), + chunks=(200,), + dtype=np.dtype("u1"), + compressor=Zlib(1).get_config(), + fill_value=0, + order=order, + filters=None, + ) + store[mkey] = store._metadata_class.encode_array_metadata(meta) + + # don't overwrite + with pytest.raises(ContainsArrayError): + init_array(store, shape=1000, chunks=100, path=path) + + # do overwrite + try: + init_array(store, shape=1000, chunks=100, dtype="i4", path=path, overwrite=True) + except NotImplementedError: + pass + else: + if self.version == 2: + assert group_meta_key in store + assert array_meta_key not in store + assert mkey in store + # should have been overwritten + meta = store._metadata_class.decode_array_metadata(store[mkey]) + if self.version == 2: + assert ZARR_FORMAT == meta["zarr_format"] + assert (100,) == meta["chunks"] + assert np.dtype("i4") == meta["dtype"] + else: + assert (100,) == meta["chunk_grid"]["chunk_shape"] + assert np.dtype("i4") == meta["data_type"] + assert (1000,) == meta["shape"] + + store.close() + + def test_init_array_overwrite_group(self): + # setup + path = "foo/bar" + store = self.create_store() + array_key = path + "/" + array_meta_key + group_key = path + "/" + group_meta_key + store[group_key] = store._metadata_class.encode_group_metadata() + + # don't overwrite + with pytest.raises(ContainsGroupError): + init_array(store, shape=1000, chunks=100, path=path) + + # do overwrite + try: + init_array(store, shape=1000, chunks=100, dtype="i4", path=path, overwrite=True) + except NotImplementedError: + pass + else: + assert group_key not in store + assert array_key in store + meta = store._metadata_class.decode_array_metadata(store[array_key]) + if self.version == 2: + assert ZARR_FORMAT == meta["zarr_format"] + assert (100,) == meta["chunks"] + assert np.dtype("i4") == meta["dtype"] + else: + assert (100,) == meta["chunk_grid"]["chunk_shape"] + assert np.dtype("i4") == meta["data_type"] + assert (1000,) == meta["shape"] + + store.close() + + def _test_init_array_overwrite_chunk_store(self, order): + # setup + store = self.create_store() + chunk_store = self.create_store() + + path = None + data_path = "" + mkey = array_meta_key + meta = dict( + shape=(2000,), + chunks=(200,), + dtype=np.dtype("u1"), + compressor=None, + fill_value=0, + filters=None, + order=order, + ) + + store[mkey] = store._metadata_class.encode_array_metadata(meta) + + chunk_store[data_path + "0"] = b"aaa" + chunk_store[data_path + "1"] = b"bbb" + + # don't overwrite (default) + with pytest.raises(ContainsArrayError): + init_array(store, path=path, shape=1000, chunks=100, chunk_store=chunk_store) + + # do overwrite + try: + init_array( + store, + path=path, + shape=1000, + chunks=100, + dtype="i4", + overwrite=True, + chunk_store=chunk_store, + ) + except NotImplementedError: + pass + else: + assert mkey in store + meta = store._metadata_class.decode_array_metadata(store[mkey]) + if self.version == 2: + assert ZARR_FORMAT == meta["zarr_format"] + assert (100,) == meta["chunks"] + assert np.dtype("i4") == meta["dtype"] + else: + assert (100,) == meta["chunk_grid"]["chunk_shape"] + assert np.dtype("i4") == meta["data_type"] + assert (1000,) == meta["shape"] + assert data_path + "0" not in chunk_store + assert data_path + "1" not in chunk_store + + store.close() + chunk_store.close() + + def test_init_array_compat(self): + store = self.create_store() + path = None + mkey = array_meta_key + init_array(store, path=path, shape=1000, chunks=100, compressor="none") + meta = store._metadata_class.decode_array_metadata(store[mkey]) + if self.version == 2: + assert meta["compressor"] is None + else: + assert "compressor" not in meta + store.close() + + def test_init_group(self): + store = self.create_store() + path = None + mkey = group_meta_key + init_group(store, path=path) + + # check metadata + assert mkey in store + meta = store._metadata_class.decode_group_metadata(store[mkey]) + if self.version == 2: + assert ZARR_FORMAT == meta["zarr_format"] + else: + assert meta == {"attributes": {}} + + store.close() + + def _test_init_group_overwrite(self, order): + # setup + store = self.create_store() + store[array_meta_key] = store._metadata_class.encode_array_metadata( + dict( + shape=(2000,), + chunks=(200,), + dtype=np.dtype("u1"), + compressor=None, + fill_value=0, + order=order, + filters=None, + ) + ) + + # don't overwrite array (default) + with pytest.raises(ContainsArrayError): + init_group(store) + + # do overwrite + try: + init_group(store, overwrite=True) + except NotImplementedError: + pass + else: + assert array_meta_key not in store + assert group_meta_key in store + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + + # don't overwrite group + with pytest.raises(ValueError): + init_group(store) + + store.close() + + def _test_init_group_overwrite_path(self, order): + # setup + path = "foo/bar" + store = self.create_store() + meta = dict( + shape=(2000,), + chunks=(200,), + dtype=np.dtype("u1"), + compressor=None, + fill_value=0, + order=order, + filters=None, + ) + array_key = path + "/" + array_meta_key + group_key = path + "/" + group_meta_key + store[array_key] = store._metadata_class.encode_array_metadata(meta) + + # don't overwrite + with pytest.raises(ValueError): + init_group(store, path=path) + + # do overwrite + try: + init_group(store, overwrite=True, path=path) + except NotImplementedError: + pass + else: + if self.version == 2: + assert array_meta_key not in store + assert group_meta_key in store + assert array_key not in store + assert group_key in store + # should have been overwritten + meta = store._metadata_class.decode_group_metadata(store[group_key]) + if self.version == 2: + assert ZARR_FORMAT == meta["zarr_format"] + else: + assert meta == {"attributes": {}} + + store.close() + + def _test_init_group_overwrite_chunk_store(self, order): + # setup + store = self.create_store() + chunk_store = self.create_store() + store[array_meta_key] = store._metadata_class.encode_array_metadata( + dict( + shape=(2000,), + chunks=(200,), + dtype=np.dtype("u1"), + compressor=None, + fill_value=0, + filters=None, + order=order, + ) + ) + chunk_store["foo"] = b"bar" + chunk_store["baz"] = b"quux" + + # don't overwrite array (default) + with pytest.raises(ValueError): + init_group(store, chunk_store=chunk_store) + + # do overwrite + try: + init_group(store, overwrite=True, chunk_store=chunk_store) + except NotImplementedError: + pass + else: + assert array_meta_key not in store + assert group_meta_key in store + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + assert "foo" not in chunk_store + assert "baz" not in chunk_store + + # don't overwrite group + with pytest.raises(ValueError): + init_group(store) + + store.close() + chunk_store.close() class TestMappingStore(StoreTests): def create_store(self, **kwargs): - # skip_if_nested_chunks(**kwargs) + skip_if_nested_chunks(**kwargs) return KVStore(dict()) - # def test_set_invalid_content(self): - # # Generic mappings support non-buffer types - # pass - - -# def setdel_hierarchy_checks(store, root=""): -# # these tests are for stores that are aware of hierarchy levels; this -# # behaviour is not strictly required by Zarr but these tests are included -# # to define behaviour of MemoryStore and DirectoryStore classes + def test_set_invalid_content(self): + # Generic mappings support non-buffer types + pass -# # check __setitem__ and __delitem__ blocked by leaf -# store[root + "a/b"] = b"aaa" -# with pytest.raises(KeyError): -# store[root + "a/b/c"] = b"xxx" -# with pytest.raises(KeyError): -# del store[root + "a/b/c"] +def setdel_hierarchy_checks(store, root=""): + # these tests are for stores that are aware of hierarchy levels; this + # behaviour is not strictly required by Zarr but these tests are included + # to define behaviour of MemoryStore and DirectoryStore classes + + # check __setitem__ and __delitem__ blocked by leaf + + store[root + "a/b"] = b"aaa" + with pytest.raises(KeyError): + store[root + "a/b/c"] = b"xxx" + with pytest.raises(KeyError): + del store[root + "a/b/c"] + + store[root + "d"] = b"ddd" + with pytest.raises(KeyError): + store[root + "d/e/f"] = b"xxx" + with pytest.raises(KeyError): + del store[root + "d/e/f"] + + # test __setitem__ overwrite level + store[root + "x/y/z"] = b"xxx" + store[root + "x/y"] = b"yyy" + assert b"yyy" == ensure_bytes(store[root + "x/y"]) + assert root + "x/y/z" not in store + store[root + "x"] = b"zzz" + assert b"zzz" == ensure_bytes(store[root + "x"]) + assert root + "x/y" not in store + + # test __delitem__ overwrite level + store[root + "r/s/t"] = b"xxx" + del store[root + "r/s"] + assert root + "r/s/t" not in store + store[root + "r/s"] = b"xxx" + del store[root + "r"] + assert root + "r/s" not in store + + +class TestMemoryStore(StoreTests): + def create_store(self, **kwargs): + skip_if_nested_chunks(**kwargs) + return MemoryStore(**kwargs) -# store[root + "d"] = b"ddd" -# with pytest.raises(KeyError): -# store[root + "d/e/f"] = b"xxx" -# with pytest.raises(KeyError): -# del store[root + "d/e/f"] + def test_store_contains_bytes(self): + store = self.create_store() + store[self.root + "foo"] = np.array([97, 98, 99, 100, 101], dtype=np.uint8) + assert store[self.root + "foo"] == b"abcde" -# # test __setitem__ overwrite level -# store[root + "x/y/z"] = b"xxx" -# store[root + "x/y"] = b"yyy" -# assert b"yyy" == ensure_bytes(store[root + "x/y"]) -# assert root + "x/y/z" not in store -# store[root + "x"] = b"zzz" -# assert b"zzz" == ensure_bytes(store[root + "x"]) -# assert root + "x/y" not in store + def test_setdel(self): + store = self.create_store() + setdel_hierarchy_checks(store, self.root) -# # test __delitem__ overwrite level -# store[root + "r/s/t"] = b"xxx" -# del store[root + "r/s"] -# assert root + "r/s/t" not in store -# store[root + "r/s"] = b"xxx" -# del store[root + "r"] -# assert root + "r/s" not in store +class TestDictStore(StoreTests): + def create_store(self, **kwargs): + skip_if_nested_chunks(**kwargs) -# class TestMemoryStore(StoreTests): -# def create_store(self, **kwargs): -# skip_if_nested_chunks(**kwargs) -# return MemoryStore(**kwargs) + with pytest.warns(DeprecationWarning): + return DictStore(**kwargs) -# def test_store_contains_bytes(self): -# store = self.create_store() -# store[self.root + "foo"] = np.array([97, 98, 99, 100, 101], dtype=np.uint8) -# assert store[self.root + "foo"] == b"abcde" + def test_deprecated(self): + store = self.create_store() + assert isinstance(store, MemoryStore) -# def test_setdel(self): -# store = self.create_store() -# setdel_hierarchy_checks(store, self.root) + def test_pickle(self): + with pytest.warns(DeprecationWarning): + # pickle.load() will also trigger deprecation warning + super().test_pickle() class TestDirectoryStore(StoreTests): def create_store(self, normalize_keys=False, dimension_separator=".", **kwargs): path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) - store = LocalStore( + store = DirectoryStore( + path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs + ) + return store + + def test_filesystem_path(self): + # test behaviour with path that does not exist + path = "data/store" + if os.path.exists(path): + shutil.rmtree(path) + store = DirectoryStore(path) + # should only be created on demand + assert not os.path.exists(path) + store["foo"] = b"bar" + assert os.path.isdir(path) + + # check correct permissions + # regression test for https://github.com/zarr-developers/zarr-python/issues/325 + stat = os.stat(path) + mode = stat.st_mode & 0o666 + umask = os.umask(0) + os.umask(umask) + assert mode == (0o666 & ~umask) + + # test behaviour with file path + with tempfile.NamedTemporaryFile() as f: + with pytest.raises(ValueError): + DirectoryStore(f.name) + + def test_init_pathlib(self): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + DirectoryStore(pathlib.Path(path)) + + def test_pickle_ext(self): + store = self.create_store() + store2 = pickle.loads(pickle.dumps(store)) + + # check path is preserved + assert store.path == store2.path + + # check point to same underlying directory + assert self.root + "xxx" not in store + store2[self.root + "xxx"] = b"yyy" + assert b"yyy" == ensure_bytes(store[self.root + "xxx"]) + + def test_setdel(self): + store = self.create_store() + setdel_hierarchy_checks(store, self.root) + + def test_normalize_keys(self): + store = self.create_store(normalize_keys=True) + store[self.root + "FOO"] = b"bar" + assert self.root + "FOO" in store + assert self.root + "foo" in store + + def test_listing_keys_slash(self): + def mock_walker_slash(_path): + yield from [ + # trailing slash in first key + ("root_with_slash/", ["d1", "g1"], [".zgroup"]), + ("root_with_slash/d1", [], [".zarray"]), + ("root_with_slash/g1", [], [".zgroup"]), + ] + + res = set(DirectoryStore._keys_fast("root_with_slash/", walker=mock_walker_slash)) + assert res == {".zgroup", "g1/.zgroup", "d1/.zarray"} + + def test_listing_keys_no_slash(self): + def mock_walker_no_slash(_path): + yield from [ + # no trailing slash in first key + ("root_with_no_slash", ["d1", "g1"], [".zgroup"]), + ("root_with_no_slash/d1", [], [".zarray"]), + ("root_with_no_slash/g1", [], [".zgroup"]), + ] + + res = set(DirectoryStore._keys_fast("root_with_no_slash", mock_walker_no_slash)) + assert res == {".zgroup", "g1/.zgroup", "d1/.zarray"} + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestFSStore(StoreTests): + def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): + if path is None: + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + + store = FSStore( + path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs + ) + return store + + def test_init_array(self): + store = self.create_store() + init_array(store, shape=1000, chunks=100) + + # check metadata + assert array_meta_key in store + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + assert meta["dimension_separator"] == "." + + def test_dimension_separator(self): + for x in (".", "/"): + store = self.create_store(dimension_separator=x) + norm = store._normalize_key + assert ".zarray" == norm(".zarray") + assert ".zarray" == norm("/.zarray") + assert ".zgroup" == norm("/.zgroup") + assert "group/.zarray" == norm("group/.zarray") + assert "group/.zgroup" == norm("group/.zgroup") + assert "group/.zarray" == norm("/group/.zarray") + assert "group/.zgroup" == norm("/group/.zgroup") + + def test_complex(self): + path1 = tempfile.mkdtemp() + path2 = tempfile.mkdtemp() + store = self.create_store( + path="simplecache::file://" + path1, + simplecache={"same_names": True, "cache_storage": path2}, + ) + assert not store + assert not os.listdir(path1) + assert not os.listdir(path2) + store[self.root + "foo"] = b"hello" + assert "foo" in os.listdir(str(path1) + "/" + self.root) + assert self.root + "foo" in store + assert not os.listdir(str(path2)) + assert store[self.root + "foo"] == b"hello" + assert "foo" in os.listdir(str(path2)) + + def test_deep_ndim(self): + import zarr.v2 + + store = self.create_store() + path = None if self.version == 2 else "group1" + foo = zarr.v2.open_group(store=store, path=path) + bar = foo.create_group("bar") + baz = bar.create_dataset("baz", shape=(4, 4, 4), chunks=(2, 2, 2), dtype="i8") + baz[:] = 1 + if self.version == 2: + assert set(store.listdir()) == {".zgroup", "bar"} + else: + assert set(store.listdir()) == {"data", "meta", "zarr.v2.json"} + assert set(store.listdir("meta/root/" + path)) == {"bar", "bar.group.json"} + assert set(store.listdir("data/root/" + path)) == {"bar"} + assert foo["bar"]["baz"][(0, 0, 0)] == 1 + + def test_not_fsspec(self): + path = tempfile.mkdtemp() + with pytest.raises(ValueError, match="storage_options"): + zarr.v2.open_array(path, mode="w", storage_options={"some": "kwargs"}) + with pytest.raises(ValueError, match="storage_options"): + zarr.v2.open_group(path, mode="w", storage_options={"some": "kwargs"}) + zarr.v2.open_array("file://" + path, mode="w", shape=(1,), dtype="f8") + + def test_create(self): + path1 = tempfile.mkdtemp() + path2 = tempfile.mkdtemp() + g = zarr.v2.open_group("file://" + path1, mode="w", storage_options={"auto_mkdir": True}) + a = g.create_dataset("data", shape=(8,)) + a[:4] = [0, 1, 2, 3] + assert "data" in os.listdir(path1) + assert ".zgroup" in os.listdir(path1) + + # consolidated metadata (GH#915) + consolidate_metadata("file://" + path1) + assert ".zmetadata" in os.listdir(path1) + + g = zarr.v2.open_group( + "simplecache::file://" + path1, + mode="r", + storage_options={"cache_storage": path2, "same_names": True}, + ) + assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] + with pytest.raises(PermissionError): + g.data[:] = 1 + + @pytest.mark.parametrize("mode,allowed", [("r", False), ("r+", True)]) + def test_modify_consolidated(self, mode, allowed): + url = "file://" + tempfile.mkdtemp() + + # create + root = zarr.v2.open_group(url, mode="w") + root.zeros("baz", shape=(10000, 10000), chunks=(1000, 1000), dtype="i4") + zarr.v2.consolidate_metadata(url) + + # reopen and modify + root = zarr.v2.open_consolidated(url, mode=mode) + if allowed: + root["baz"][0, 0] = 7 + + root = zarr.v2.open_consolidated(url, mode="r") + assert root["baz"][0, 0] == 7 + else: + with pytest.raises(zarr.v2.errors.ReadOnlyError): + root["baz"][0, 0] = 7 + + @pytest.mark.parametrize("mode", ["r", "r+"]) + def test_modify_consolidated_metadata_raises(self, mode): + url = "file://" + tempfile.mkdtemp() + + # create + root = zarr.v2.open_group(url, mode="w") + root.zeros("baz", shape=(10000, 10000), chunks=(1000, 1000), dtype="i4") + zarr.v2.consolidate_metadata(url) + + # reopen and modify + root = zarr.v2.open_consolidated(url, mode=mode) + with pytest.raises(zarr.v2.errors.ReadOnlyError): + root["baz"].resize(100, 100) + + def test_read_only(self): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = self.create_store(path=path) + store[self.root + "foo"] = b"bar" + + store = self.create_store(path=path, mode="r") + + with pytest.raises(PermissionError): + store[self.root + "foo"] = b"hex" + + with pytest.raises(PermissionError): + del store[self.root + "foo"] + + with pytest.raises(PermissionError): + store.delitems([self.root + "foo"]) + + with pytest.raises(PermissionError): + store.setitems({self.root + "foo": b"baz"}) + + with pytest.raises(PermissionError): + store.clear() + + with pytest.raises(PermissionError): + store.rmdir(self.root + "anydir") + + assert store[self.root + "foo"] == b"bar" + + def test_eq(self): + store1 = self.create_store(path="anypath") + store2 = self.create_store(path="anypath") + assert store1 == store2 + + @pytest.mark.usefixtures("s3") + def test_s3(self): + g = zarr.v2.open_group("s3://test/out.zarr", mode="w", storage_options=self.s3so) + a = g.create_dataset("data", shape=(8,)) + a[:4] = [0, 1, 2, 3] + + g = zarr.v2.open_group("s3://test/out.zarr", mode="r", storage_options=self.s3so) + + assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] + + # test via convenience + g = zarr.v2.open("s3://test/out.zarr", mode="r", storage_options=self.s3so) + assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] + + @pytest.mark.usefixtures("s3") + def test_s3_complex(self): + g = zarr.v2.open_group("s3://test/out.zarr", mode="w", storage_options=self.s3so) + expected = np.empty((8, 8, 8), dtype="int64") + expected[:] = -1 + a = g.create_dataset( + "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True + ) + expected[0] = 0 + expected[3] = 3 + expected[6, 6, 6] = 6 + a[6, 6, 6] = 6 + a[:4] = expected[:4] + + b = g.create_dataset( + "data_f", + shape=(8,), + chunks=(1,), + dtype=[("foo", "S3"), ("bar", "i4")], + fill_value=(b"b", 1), + ) + b[:4] = (b"aaa", 2) + g2 = zarr.v2.open_group("s3://test/out.zarr", mode="r", storage_options=self.s3so) + + assert (g2.data[:] == expected).all() + a.chunk_store.fs.invalidate_cache("test/out.zarr/data") + a[:] = 5 + assert (a[:] == 5).all() + + assert g2.data_f["foo"].tolist() == [b"aaa"] * 4 + [b"b"] * 4 + with pytest.raises(PermissionError): + g2.data[:] = 5 + + with pytest.raises(PermissionError): + g2.store.setitems({}) + + with pytest.raises(PermissionError): + # even though overwrite=True, store is read-only, so fails + g2.create_dataset( + "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True + ) + + a = g.create_dataset( + "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True + ) + assert (a[:] == -np.ones((8, 8, 8))).all() + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestFSStoreWithKeySeparator(StoreTests): + def create_store(self, normalize_keys=False, key_separator=".", **kwargs): + # Since the user is passing key_separator, that will take priority. + skip_if_nested_chunks(**kwargs) + + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + return FSStore(path, normalize_keys=normalize_keys, key_separator=key_separator) + + +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestFSStoreFromFilesystem(StoreTests): + def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): + import fsspec + + fs = fsspec.filesystem("file") + + if path is None: + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + + with pytest.raises(ValueError): + # can't specify storage_options when passing an + # existing fs object + _ = FSStore(path, fs=fs, auto_mkdir=True) + + store = FSStore( path, - auto_mkdir=True, - # normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs + normalize_keys=normalize_keys, + dimension_separator=dimension_separator, + fs=fs, + **kwargs, ) + + return store + + +@pytest.fixture() +def s3(request): + # writable local S3 system + import shlex + import subprocess + import time + + if "BOTO_CONFIG" not in os.environ: # pragma: no cover + os.environ["BOTO_CONFIG"] = "/dev/null" + if "AWS_ACCESS_KEY_ID" not in os.environ: # pragma: no cover + os.environ["AWS_ACCESS_KEY_ID"] = "foo" + if "AWS_SECRET_ACCESS_KEY" not in os.environ: # pragma: no cover + os.environ["AWS_SECRET_ACCESS_KEY"] = "bar" + requests = pytest.importorskip("requests") + s3fs = pytest.importorskip("s3fs") + pytest.importorskip("moto") + + port = 5555 + endpoint_uri = "http://127.0.0.1:%d/" % port + proc = subprocess.Popen( + shlex.split("moto_server s3 -p %d" % port), + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + ) + + timeout = 5 + while timeout > 0: + try: + r = requests.get(endpoint_uri) + if r.ok: + break + except Exception: # pragma: no cover + pass + timeout -= 0.1 # pragma: no cover + time.sleep(0.1) # pragma: no cover + s3so = dict(client_kwargs={"endpoint_url": endpoint_uri}, use_listings_cache=False) + s3 = s3fs.S3FileSystem(anon=False, **s3so) + s3.mkdir("test") + request.cls.s3so = s3so + yield + proc.terminate() + proc.wait() + + +class TestNestedDirectoryStore(TestDirectoryStore): + def create_store(self, normalize_keys=False, **kwargs): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = NestedDirectoryStore(path, normalize_keys=normalize_keys, **kwargs) return store + def test_init_array(self): + store = self.create_store() + assert store._dimension_separator == "/" + init_array(store, shape=1000, chunks=100) + + # check metadata + assert array_meta_key in store + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + assert meta["dimension_separator"] == "/" + + def test_chunk_nesting(self): + store = self.create_store() + # any path where last segment looks like a chunk key gets special handling + store[self.root + "0.0"] = b"xxx" + assert b"xxx" == store[self.root + "0.0"] + # assert b'xxx' == store['0/0'] + store[self.root + "foo/10.20.30"] = b"yyy" + assert b"yyy" == store[self.root + "foo/10.20.30"] + # assert b'yyy' == store['foo/10/20/30'] + store[self.root + "42"] = b"zzz" + assert b"zzz" == store[self.root + "42"] + + def test_listdir(self): + store = self.create_store() + z = zarr.v2.zeros((10, 10), chunks=(5, 5), store=store) + z[:] = 1 # write to all chunks + for k in store.listdir(): + assert store.get(k) is not None -# def test_filesystem_path(self): - -# # test behaviour with path that does not exist -# path = "data/store" -# if os.path.exists(path): -# shutil.rmtree(path) -# store = DirectoryStore(path) -# # should only be created on demand -# assert not os.path.exists(path) -# store["foo"] = b"bar" -# assert os.path.isdir(path) - -# # check correct permissions -# # regression test for https://github.com/zarr-developers/zarr-python/issues/325 -# stat = os.stat(path) -# mode = stat.st_mode & 0o666 -# umask = os.umask(0) -# os.umask(umask) -# assert mode == (0o666 & ~umask) - -# # test behaviour with file path -# with tempfile.NamedTemporaryFile() as f: -# with pytest.raises(ValueError): -# DirectoryStore(f.name) - -# def test_init_pathlib(self): -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# DirectoryStore(pathlib.Path(path)) - -# def test_pickle_ext(self): -# store = self.create_store() -# store2 = pickle.loads(pickle.dumps(store)) - -# # check path is preserved -# assert store.path == store2.path - -# # check point to same underlying directory -# assert self.root + "xxx" not in store -# store2[self.root + "xxx"] = b"yyy" -# assert b"yyy" == ensure_bytes(store[self.root + "xxx"]) - -# def test_setdel(self): -# store = self.create_store() -# setdel_hierarchy_checks(store, self.root) - -# def test_normalize_keys(self): -# store = self.create_store(normalize_keys=True) -# store[self.root + "FOO"] = b"bar" -# assert self.root + "FOO" in store -# assert self.root + "foo" in store - -# def test_listing_keys_slash(self): -# def mock_walker_slash(_path): -# yield from [ -# # trailing slash in first key -# ("root_with_slash/", ["d1", "g1"], [".zgroup"]), -# ("root_with_slash/d1", [], [".zarray"]), -# ("root_with_slash/g1", [], [".zgroup"]), -# ] - -# res = set(DirectoryStore._keys_fast("root_with_slash/", walker=mock_walker_slash)) -# assert res == {".zgroup", "g1/.zgroup", "d1/.zarray"} - -# def test_listing_keys_no_slash(self): -# def mock_walker_no_slash(_path): -# yield from [ -# # no trailing slash in first key -# ("root_with_no_slash", ["d1", "g1"], [".zgroup"]), -# ("root_with_no_slash/d1", [], [".zarray"]), -# ("root_with_no_slash/g1", [], [".zgroup"]), -# ] - -# res = set(DirectoryStore._keys_fast("root_with_no_slash", mock_walker_no_slash)) -# assert res == {".zgroup", "g1/.zgroup", "d1/.zarray"} - - -# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -# class TestFSStore(StoreTests): -# def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): - -# if path is None: -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) - -# store = FSStore( -# path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs -# ) -# return store - -# def test_init_array(self): -# store = self.create_store() -# init_array(store, shape=1000, chunks=100) - -# # check metadata -# assert array_meta_key in store -# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunks"] -# assert np.dtype(None) == meta["dtype"] -# assert meta["dimension_separator"] == "." - -# def test_dimension_separator(self): -# for x in (".", "/"): -# store = self.create_store(dimension_separator=x) -# norm = store._normalize_key -# assert ".zarray" == norm(".zarray") -# assert ".zarray" == norm("/.zarray") -# assert ".zgroup" == norm("/.zgroup") -# assert "group/.zarray" == norm("group/.zarray") -# assert "group/.zgroup" == norm("group/.zgroup") -# assert "group/.zarray" == norm("/group/.zarray") -# assert "group/.zgroup" == norm("/group/.zgroup") - -# def test_complex(self): -# path1 = tempfile.mkdtemp() -# path2 = tempfile.mkdtemp() -# store = self.create_store( -# path="simplecache::file://" + path1, -# simplecache={"same_names": True, "cache_storage": path2}, -# ) -# assert not store -# assert not os.listdir(path1) -# assert not os.listdir(path2) -# store[self.root + "foo"] = b"hello" -# assert "foo" in os.listdir(str(path1) + "/" + self.root) -# assert self.root + "foo" in store -# assert not os.listdir(str(path2)) -# assert store[self.root + "foo"] == b"hello" -# assert "foo" in os.listdir(str(path2)) - -# def test_deep_ndim(self): -# import zarr - -# store = self.create_store() -# path = None if self.version == 2 else "group1" -# foo = zarr.open_group(store=store, path=path) -# bar = foo.create_group("bar") -# baz = bar.create_dataset("baz", shape=(4, 4, 4), chunks=(2, 2, 2), dtype="i8") -# baz[:] = 1 -# if self.version == 2: -# assert set(store.listdir()) == {".zgroup", "bar"} -# else: -# assert set(store.listdir()) == {"data", "meta", "zarr.json"} -# assert set(store.listdir("meta/root/" + path)) == {"bar", "bar.group.json"} -# assert set(store.listdir("data/root/" + path)) == {"bar"} -# assert foo["bar"]["baz"][(0, 0, 0)] == 1 - -# def test_not_fsspec(self): -# import zarr - -# path = tempfile.mkdtemp() -# with pytest.raises(ValueError, match="storage_options"): -# zarr.open_array(path, mode="w", storage_options={"some": "kwargs"}) -# with pytest.raises(ValueError, match="storage_options"): -# zarr.open_group(path, mode="w", storage_options={"some": "kwargs"}) -# zarr.open_array("file://" + path, mode="w", shape=(1,), dtype="f8") - -# def test_create(self): -# import zarr - -# path1 = tempfile.mkdtemp() -# path2 = tempfile.mkdtemp() -# g = zarr.open_group("file://" + path1, mode="w", storage_options={"auto_mkdir": True}) -# a = g.create_dataset("data", shape=(8,)) -# a[:4] = [0, 1, 2, 3] -# assert "data" in os.listdir(path1) -# assert ".zgroup" in os.listdir(path1) - -# # consolidated metadata (GH#915) -# consolidate_metadata("file://" + path1) -# assert ".zmetadata" in os.listdir(path1) - -# g = zarr.open_group( -# "simplecache::file://" + path1, -# mode="r", -# storage_options={"cache_storage": path2, "same_names": True}, -# ) -# assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] -# with pytest.raises(PermissionError): -# g.data[:] = 1 - -# @pytest.mark.parametrize("mode,allowed", [("r", False), ("r+", True)]) -# def test_modify_consolidated(self, mode, allowed): -# import zarr - -# url = "file://" + tempfile.mkdtemp() - -# # create -# root = zarr.open_group(url, mode="w") -# root.zeros("baz", shape=(10000, 10000), chunks=(1000, 1000), dtype="i4") -# zarr.consolidate_metadata(url) - -# # reopen and modify -# root = zarr.open_consolidated(url, mode=mode) -# if allowed: -# root["baz"][0, 0] = 7 - -# root = zarr.open_consolidated(url, mode="r") -# assert root["baz"][0, 0] == 7 -# else: -# with pytest.raises(zarr.errors.ReadOnlyError): -# root["baz"][0, 0] = 7 - -# @pytest.mark.parametrize("mode", ["r", "r+"]) -# def test_modify_consolidated_metadata_raises(self, mode): -# import zarr - -# url = "file://" + tempfile.mkdtemp() - -# # create -# root = zarr.open_group(url, mode="w") -# root.zeros("baz", shape=(10000, 10000), chunks=(1000, 1000), dtype="i4") -# zarr.consolidate_metadata(url) - -# # reopen and modify -# root = zarr.open_consolidated(url, mode=mode) -# with pytest.raises(zarr.errors.ReadOnlyError): -# root["baz"].resize(100, 100) - -# def test_read_only(self): -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# store = self.create_store(path=path) -# store[self.root + "foo"] = b"bar" - -# store = self.create_store(path=path, mode="r") - -# with pytest.raises(PermissionError): -# store[self.root + "foo"] = b"hex" - -# with pytest.raises(PermissionError): -# del store[self.root + "foo"] - -# with pytest.raises(PermissionError): -# store.delitems([self.root + "foo"]) - -# with pytest.raises(PermissionError): -# store.setitems({self.root + "foo": b"baz"}) - -# with pytest.raises(PermissionError): -# store.clear() - -# with pytest.raises(PermissionError): -# store.rmdir(self.root + "anydir") - -# assert store[self.root + "foo"] == b"bar" - -# def test_eq(self): -# store1 = self.create_store(path="anypath") -# store2 = self.create_store(path="anypath") -# assert store1 == store2 - -# @pytest.mark.usefixtures("s3") -# def test_s3(self): -# import zarr - -# g = zarr.open_group("s3://test/out.zarr", mode="w", storage_options=self.s3so) -# a = g.create_dataset("data", shape=(8,)) -# a[:4] = [0, 1, 2, 3] - -# g = zarr.open_group("s3://test/out.zarr", mode="r", storage_options=self.s3so) - -# assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] - -# # test via convenience -# g = zarr.open("s3://test/out.zarr", mode="r", storage_options=self.s3so) -# assert g.data[:].tolist() == [0, 1, 2, 3, 0, 0, 0, 0] - -# @pytest.mark.usefixtures("s3") -# def test_s3_complex(self): -# import zarr - -# g = zarr.open_group("s3://test/out.zarr", mode="w", storage_options=self.s3so) -# expected = np.empty((8, 8, 8), dtype="int64") -# expected[:] = -1 -# a = g.create_dataset( -# "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True -# ) -# expected[0] = 0 -# expected[3] = 3 -# expected[6, 6, 6] = 6 -# a[6, 6, 6] = 6 -# a[:4] = expected[:4] - -# b = g.create_dataset( -# "data_f", -# shape=(8,), -# chunks=(1,), -# dtype=[("foo", "S3"), ("bar", "i4")], -# fill_value=(b"b", 1), -# ) -# b[:4] = (b"aaa", 2) -# g2 = zarr.open_group("s3://test/out.zarr", mode="r", storage_options=self.s3so) - -# assert (g2.data[:] == expected).all() -# a.chunk_store.fs.invalidate_cache("test/out.zarr/data") -# a[:] = 5 -# assert (a[:] == 5).all() - -# assert g2.data_f["foo"].tolist() == [b"aaa"] * 4 + [b"b"] * 4 -# with pytest.raises(PermissionError): -# g2.data[:] = 5 - -# with pytest.raises(PermissionError): -# g2.store.setitems({}) - -# with pytest.raises(PermissionError): -# # even though overwrite=True, store is read-only, so fails -# g2.create_dataset( -# "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True -# ) - -# a = g.create_dataset( -# "data", shape=(8, 8, 8), fill_value=-1, chunks=(1, 1, 1), overwrite=True -# ) -# assert (a[:] == -np.ones((8, 8, 8))).all() - - -# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -# class TestFSStoreWithKeySeparator(StoreTests): -# def create_store(self, normalize_keys=False, key_separator=".", **kwargs): - -# # Since the user is passing key_separator, that will take priority. -# skip_if_nested_chunks(**kwargs) - -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# return FSStore(path, normalize_keys=normalize_keys, key_separator=key_separator) - - -# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -# class TestFSStoreFromFilesystem(StoreTests): -# def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): -# import fsspec - -# fs = fsspec.filesystem("file") - -# if path is None: -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) - -# with pytest.raises(ValueError): -# # can't specify storage_options when passing an -# # existing fs object -# _ = FSStore(path, fs=fs, auto_mkdir=True) - -# store = FSStore( -# path, -# normalize_keys=normalize_keys, -# dimension_separator=dimension_separator, -# fs=fs, -# **kwargs, -# ) - -# return store - - -# @pytest.fixture() -# def s3(request): -# # writable local S3 system -# import shlex -# import subprocess -# import time - -# if "BOTO_CONFIG" not in os.environ: # pragma: no cover -# os.environ["BOTO_CONFIG"] = "/dev/null" -# if "AWS_ACCESS_KEY_ID" not in os.environ: # pragma: no cover -# os.environ["AWS_ACCESS_KEY_ID"] = "foo" -# if "AWS_SECRET_ACCESS_KEY" not in os.environ: # pragma: no cover -# os.environ["AWS_SECRET_ACCESS_KEY"] = "bar" -# requests = pytest.importorskip("requests") -# s3fs = pytest.importorskip("s3fs") -# pytest.importorskip("moto") - -# port = 5555 -# endpoint_uri = "http://127.0.0.1:%d/" % port -# proc = subprocess.Popen( -# shlex.split("moto_server s3 -p %d" % port), -# stderr=subprocess.DEVNULL, -# stdout=subprocess.DEVNULL, -# ) - -# timeout = 5 -# while timeout > 0: -# try: -# r = requests.get(endpoint_uri) -# if r.ok: -# break -# except Exception: # pragma: no cover -# pass -# timeout -= 0.1 # pragma: no cover -# time.sleep(0.1) # pragma: no cover -# s3so = dict(client_kwargs={"endpoint_url": endpoint_uri}, use_listings_cache=False) -# s3 = s3fs.S3FileSystem(anon=False, **s3so) -# s3.mkdir("test") -# request.cls.s3so = s3so -# yield -# proc.terminate() -# proc.wait() - - -# class TestNestedDirectoryStore(TestDirectoryStore): -# def create_store(self, normalize_keys=False, **kwargs): -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# store = NestedDirectoryStore(path, normalize_keys=normalize_keys, **kwargs) -# return store - -# def test_init_array(self): -# store = self.create_store() -# assert store._dimension_separator == "/" -# init_array(store, shape=1000, chunks=100) - -# # check metadata -# assert array_meta_key in store -# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunks"] -# assert np.dtype(None) == meta["dtype"] -# assert meta["dimension_separator"] == "/" - -# def test_chunk_nesting(self): -# store = self.create_store() -# # any path where last segment looks like a chunk key gets special handling -# store[self.root + "0.0"] = b"xxx" -# assert b"xxx" == store[self.root + "0.0"] -# # assert b'xxx' == store['0/0'] -# store[self.root + "foo/10.20.30"] = b"yyy" -# assert b"yyy" == store[self.root + "foo/10.20.30"] -# # assert b'yyy' == store['foo/10/20/30'] -# store[self.root + "42"] = b"zzz" -# assert b"zzz" == store[self.root + "42"] - -# def test_listdir(self): -# store = self.create_store() -# z = zarr.zeros((10, 10), chunks=(5, 5), store=store) -# z[:] = 1 # write to all chunks -# for k in store.listdir(): -# assert store.get(k) is not None - - -# class TestNestedDirectoryStoreNone: -# def test_value_error(self): -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# store = NestedDirectoryStore(path, normalize_keys=True, dimension_separator=None) -# assert store._dimension_separator == "/" - - -# class TestNestedDirectoryStoreWithWrongValue: -# def test_value_error(self): -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# with pytest.raises(ValueError): -# NestedDirectoryStore(path, normalize_keys=True, dimension_separator=".") - - -# class TestN5Store(TestNestedDirectoryStore): -# def create_store(self, normalize_keys=False): -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# store = N5Store(path, normalize_keys=normalize_keys) -# return store - -# def test_equal(self): -# store_a = self.create_store() -# store_b = N5Store(store_a.path) -# assert store_a == store_b - -# @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) -# def test_del_zarr_meta_key(self, zarr_meta_key): -# store = self.create_store() -# store[n5_attrs_key] = json_dumps({"foo": "bar"}) -# del store[zarr_meta_key] -# assert n5_attrs_key not in store - -# def test_chunk_nesting(self): -# store = self.create_store() -# store["0.0"] = b"xxx" -# assert "0.0" in store -# assert b"xxx" == store["0.0"] -# # assert b'xxx' == store['0/0'] -# store["foo/10.20.30"] = b"yyy" -# assert "foo/10.20.30" in store -# assert b"yyy" == store["foo/10.20.30"] -# # N5 reverses axis order -# assert b"yyy" == store["foo/30/20/10"] -# del store["foo/10.20.30"] -# assert "foo/30/20/10" not in store -# store["42"] = b"zzz" -# assert "42" in store -# assert b"zzz" == store["42"] - -# def test_init_array(self): -# store = self.create_store() -# init_array(store, shape=1000, chunks=100) - -# # check metadata -# assert array_meta_key in store -# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunks"] -# assert np.dtype(None) == meta["dtype"] -# # N5Store wraps the actual compressor -# compressor_config = meta["compressor"]["compressor_config"] -# assert default_compressor.get_config() == compressor_config -# # N5Store always has a fill value of 0 -# assert meta["fill_value"] == 0 -# assert meta["dimension_separator"] == "." -# # Top-level groups AND arrays should have -# # the n5 keyword in metadata -# raw_n5_meta = json.loads(store[n5_attrs_key]) -# assert raw_n5_meta.get("n5", None) == N5_FORMAT - -# def test_init_array_path(self): -# path = "foo/bar" -# store = self.create_store() -# init_array(store, shape=1000, chunks=100, path=path) - -# # check metadata -# key = path + "/" + array_meta_key -# assert key in store -# meta = store._metadata_class.decode_array_metadata(store[key]) -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunks"] -# assert np.dtype(None) == meta["dtype"] -# # N5Store wraps the actual compressor -# compressor_config = meta["compressor"]["compressor_config"] -# assert default_compressor.get_config() == compressor_config -# # N5Store always has a fill value of 0 -# assert meta["fill_value"] == 0 - -# def test_init_array_compat(self): -# store = self.create_store() -# init_array(store, shape=1000, chunks=100, compressor="none") -# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) -# # N5Store wraps the actual compressor -# compressor_config = meta["compressor"]["compressor_config"] -# assert compressor_config is None - -# def test_init_array_overwrite(self): -# self._test_init_array_overwrite("C") - -# def test_init_array_overwrite_path(self): -# self._test_init_array_overwrite_path("C") - -# def test_init_array_overwrite_chunk_store(self): -# self._test_init_array_overwrite_chunk_store("C") - -# def test_init_group_overwrite(self): -# self._test_init_group_overwrite("C") - -# def test_init_group_overwrite_path(self): -# self._test_init_group_overwrite_path("C") - -# def test_init_group_overwrite_chunk_store(self): -# self._test_init_group_overwrite_chunk_store("C") - -# def test_init_group(self): -# store = self.create_store() -# init_group(store) -# store[".zattrs"] = json_dumps({"foo": "bar"}) -# # check metadata -# assert group_meta_key in store -# assert group_meta_key in store.listdir() -# assert group_meta_key in store.listdir("") -# meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] - -# def test_filters(self): -# all_filters, all_errors = zip( -# *[ -# (None, does_not_raise()), -# ([], does_not_raise()), -# ([AsType("f4", "f8")], pytest.raises(ValueError)), -# ] -# ) -# for filters, error in zip(all_filters, all_errors): -# store = self.create_store() -# with error: -# init_array(store, shape=1000, chunks=100, filters=filters) - - -# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -# class TestN5FSStore(TestFSStore): -# def create_store(self, normalize_keys=False, path=None, **kwargs): - -# if path is None: -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) - -# store = N5FSStore(path, normalize_keys=normalize_keys, **kwargs) -# return store - -# def test_equal(self): -# store_a = self.create_store() -# store_b = N5FSStore(store_a.path) -# assert store_a == store_b - -# # This is copied wholesale from the N5Store tests. The same test could -# # be run by making TestN5FSStore inherit from both TestFSStore and -# # TestN5Store, but a direct copy is arguably more explicit. - -# @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) -# def test_del_zarr_meta_key(self, zarr_meta_key): -# store = self.create_store() -# store[n5_attrs_key] = json_dumps({"foo": "bar"}) -# del store[zarr_meta_key] -# assert n5_attrs_key not in store - -# def test_chunk_nesting(self): -# store = self.create_store() -# store["0.0"] = b"xxx" -# assert "0.0" in store -# assert b"xxx" == store["0.0"] -# # assert b'xxx' == store['0/0'] -# store["foo/10.20.30"] = b"yyy" -# assert "foo/10.20.30" in store -# assert b"yyy" == store["foo/10.20.30"] -# # N5 reverses axis order -# assert b"yyy" == store["foo/30/20/10"] -# del store["foo/10.20.30"] -# assert "foo/30/20/10" not in store -# store["42"] = b"zzz" -# assert "42" in store -# assert b"zzz" == store["42"] - -# def test_init_array(self): -# store = self.create_store() -# init_array(store, shape=1000, chunks=100) - -# # check metadata -# assert array_meta_key in store -# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunks"] -# assert np.dtype(None) == meta["dtype"] -# # N5Store wraps the actual compressor -# compressor_config = meta["compressor"]["compressor_config"] -# assert default_compressor.get_config() == compressor_config -# # N5Store always has a fill value of 0 -# assert meta["fill_value"] == 0 -# assert meta["dimension_separator"] == "." -# # Top-level groups AND arrays should have -# # the n5 keyword in metadata -# raw_n5_meta = json.loads(store[n5_attrs_key]) -# assert raw_n5_meta.get("n5", None) == N5_FORMAT - -# def test_init_array_path(self): -# path = "foo/bar" -# store = self.create_store() -# init_array(store, shape=1000, chunks=100, path=path) - -# # check metadata -# key = path + "/" + array_meta_key -# assert key in store -# meta = store._metadata_class.decode_array_metadata(store[key]) -# assert ZARR_FORMAT == meta["zarr_format"] -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunks"] -# assert np.dtype(None) == meta["dtype"] -# # N5Store wraps the actual compressor -# compressor_config = meta["compressor"]["compressor_config"] -# assert default_compressor.get_config() == compressor_config -# # N5Store always has a fill value of 0 -# assert meta["fill_value"] == 0 - -# def test_init_array_compat(self): -# store = self.create_store() -# init_array(store, shape=1000, chunks=100, compressor="none") -# meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) -# # N5Store wraps the actual compressor -# compressor_config = meta["compressor"]["compressor_config"] -# assert compressor_config is None - -# def test_init_array_overwrite(self): -# self._test_init_array_overwrite("C") - -# def test_init_array_overwrite_path(self): -# self._test_init_array_overwrite_path("C") - -# def test_init_array_overwrite_chunk_store(self): -# self._test_init_array_overwrite_chunk_store("C") - -# def test_init_group_overwrite(self): -# self._test_init_group_overwrite("C") - -# def test_init_group_overwrite_path(self): -# self._test_init_group_overwrite_path("C") - -# def test_init_group_overwrite_chunk_store(self): -# self._test_init_group_overwrite_chunk_store("C") - -# def test_dimension_separator(self): - -# with pytest.warns(UserWarning, match="dimension_separator"): -# self.create_store(dimension_separator="/") - -# def test_init_group(self): -# store = self.create_store() -# init_group(store) -# store[".zattrs"] = json_dumps({"foo": "bar"}) -# # check metadata -# assert group_meta_key in store -# assert group_meta_key in store.listdir() -# assert group_meta_key in store.listdir("") -# meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) -# assert ZARR_FORMAT == meta["zarr_format"] - -# def test_filters(self): -# all_filters, all_errors = zip( -# *[ -# (None, does_not_raise()), -# ([], does_not_raise()), -# ([AsType("f4", "f8")], pytest.raises(ValueError)), -# ] -# ) -# for filters, error in zip(all_filters, all_errors): -# store = self.create_store() -# with error: -# init_array(store, shape=1000, chunks=100, filters=filters) - - -# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -# class TestNestedFSStore(TestNestedDirectoryStore): -# def create_store(self, normalize_keys=False, path=None, **kwargs): -# if path is None: -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# store = FSStore( -# path, normalize_keys=normalize_keys, -# dimension_separator="/", auto_mkdir=True, **kwargs -# ) -# return store - -# def test_numbered_groups(self): -# import zarr - -# # Create an array -# store = self.create_store() -# group = zarr.group(store=store) -# arr = group.create_dataset("0", shape=(10, 10)) -# arr[1] = 1 - -# # Read it back -# store = self.create_store(path=store.path) -# zarr.open_group(store.path)["0"] - - -# class TestTempStore(StoreTests): -# def create_store(self, **kwargs): -# skip_if_nested_chunks(**kwargs) -# return TempStore(**kwargs) - -# def test_setdel(self): -# store = self.create_store() -# setdel_hierarchy_checks(store, self.root) - - -# class TestZipStore(StoreTests): - -# ZipStoreClass = ZipStore - -# def create_store(self, **kwargs): -# path = mktemp(suffix=".zip") -# atexit.register(os.remove, path) -# store = ZipStore(path, mode="w", **kwargs) -# return store - -# def test_mode(self): -# with self.ZipStoreClass("data/store.zip", mode="w") as store: -# store[self.root + "foo"] = b"bar" -# store = self.ZipStoreClass("data/store.zip", mode="r") -# with pytest.raises(PermissionError): -# store[self.root + "foo"] = b"bar" -# with pytest.raises(PermissionError): -# store.clear() - -# def test_flush(self): -# store = self.ZipStoreClass("data/store.zip", mode="w") -# store[self.root + "foo"] = b"bar" -# store.flush() -# assert store[self.root + "foo"] == b"bar" -# store.close() - -# store = self.ZipStoreClass("data/store.zip", mode="r") -# store.flush() # no-op - -# def test_context_manager(self): -# with self.create_store() as store: -# store[self.root + "foo"] = b"bar" -# store[self.root + "baz"] = b"qux" -# assert 2 == len(store) - -# def test_pop(self): -# # override because not implemented -# store = self.create_store() -# store[self.root + "foo"] = b"bar" -# with pytest.raises(NotImplementedError): -# store.pop(self.root + "foo") - -# def test_popitem(self): -# # override because not implemented -# store = self.create_store() -# store[self.root + "foo"] = b"bar" -# with pytest.raises(NotImplementedError): -# store.popitem() - -# def test_permissions(self): -# store = self.ZipStoreClass("data/store.zip", mode="w") -# foo_key = "foo" if self.version == 2 else self.root + "foo" -# # TODO: cannot provide key ending in / for v3 -# # how to create an empty folder in that case? -# baz_key = "baz/" if self.version == 2 else self.root + "baz" -# store[foo_key] = b"bar" -# store[baz_key] = b"" - -# store.flush() -# store.close() -# z = ZipFile("data/store.zip", "r") -# info = z.getinfo(foo_key) -# perm = oct(info.external_attr >> 16) -# assert perm == "0o644" -# info = z.getinfo(baz_key) -# perm = oct(info.external_attr >> 16) -# # only for posix platforms -# if os.name == "posix": -# if self.version == 2: -# assert perm == "0o40775" -# else: -# # baz/ on v2, but baz on v3, so not a directory -# assert perm == "0o644" -# z.close() - -# def test_store_and_retrieve_ndarray(self): -# store = ZipStore("data/store.zip") -# x = np.array([[1, 2], [3, 4]]) -# store["foo"] = x -# y = np.frombuffer(store["foo"], dtype=x.dtype).reshape(x.shape) -# assert np.array_equiv(y, x) - - -# class TestDBMStore(StoreTests): -# def create_store(self, dimension_separator=None): -# path = mktemp(suffix=".anydbm") -# atexit.register(atexit_rmglob, path + "*") -# # create store using default dbm implementation -# store = DBMStore(path, flag="n", dimension_separator=dimension_separator) -# return store - -# def test_context_manager(self): -# with self.create_store() as store: -# store[self.root + "foo"] = b"bar" -# store[self.root + "baz"] = b"qux" -# assert 2 == len(store) - - -# class TestDBMStoreDumb(TestDBMStore): -# def create_store(self, **kwargs): -# path = mktemp(suffix=".dumbdbm") -# atexit.register(atexit_rmglob, path + "*") - -# import dbm.dumb as dumbdbm - -# store = DBMStore(path, flag="n", open=dumbdbm.open, **kwargs) -# return store - - -# class TestDBMStoreGnu(TestDBMStore): -# def create_store(self, **kwargs): -# gdbm = pytest.importorskip("dbm.gnu") -# path = mktemp(suffix=".gdbm") # pragma: no cover -# atexit.register(os.remove, path) # pragma: no cover -# store = DBMStore( -# path, flag="n", open=gdbm.open, write_lock=False, **kwargs -# ) # pragma: no cover -# return store # pragma: no cover - - -# class TestDBMStoreNDBM(TestDBMStore): -# def create_store(self, **kwargs): -# ndbm = pytest.importorskip("dbm.ndbm") -# path = mktemp(suffix=".ndbm") # pragma: no cover -# atexit.register(atexit_rmglob, path + "*") # pragma: no cover -# store = DBMStore(path, flag="n", open=ndbm.open, **kwargs) # pragma: no cover -# return store # pragma: no cover - - -# class TestDBMStoreBerkeleyDB(TestDBMStore): -# def create_store(self, **kwargs): -# bsddb3 = pytest.importorskip("bsddb3") -# path = mktemp(suffix=".dbm") -# atexit.register(os.remove, path) -# store = DBMStore(path, flag="n", open=bsddb3.btopen, write_lock=False, **kwargs) -# return store - - -# class TestLMDBStore(StoreTests): -# def create_store(self, **kwargs): -# pytest.importorskip("lmdb") -# path = mktemp(suffix=".lmdb") -# atexit.register(atexit_rmtree, path) -# buffers = True -# store = LMDBStore(path, buffers=buffers, **kwargs) -# return store - -# def test_context_manager(self): -# with self.create_store() as store: -# store[self.root + "foo"] = b"bar" -# store[self.root + "baz"] = b"qux" -# assert 2 == len(store) - - -# class TestSQLiteStore(StoreTests): -# def create_store(self, **kwargs): -# pytest.importorskip("sqlite3") -# path = mktemp(suffix=".db") -# atexit.register(atexit_rmtree, path) -# store = SQLiteStore(path, **kwargs) -# return store - -# def test_underscore_in_name(self): -# path = mktemp(suffix=".db") -# atexit.register(atexit_rmtree, path) -# store = SQLiteStore(path) -# store["a"] = b"aaa" -# store["a_b"] = b"aa_bb" -# store.rmdir("a") -# assert "a_b" in store - - -# class TestSQLiteStoreInMemory(TestSQLiteStore): -# def create_store(self, **kwargs): -# pytest.importorskip("sqlite3") -# store = SQLiteStore(":memory:", **kwargs) -# return store - -# def test_pickle(self): - -# # setup store -# store = self.create_store() -# store[self.root + "foo"] = b"bar" -# store[self.root + "baz"] = b"quux" - -# # round-trip through pickle -# with pytest.raises(PicklingError): -# pickle.dumps(store) - - -# @skip_test_env_var("ZARR_TEST_MONGO") -# class TestMongoDBStore(StoreTests): -# def create_store(self, **kwargs): -# pytest.importorskip("pymongo") -# store = MongoDBStore( -# host="127.0.0.1", database="zarr_tests", collection="zarr_tests", **kwargs -# ) -# # start with an empty store -# store.clear() -# return store - - -# @skip_test_env_var("ZARR_TEST_REDIS") -# class TestRedisStore(StoreTests): -# def create_store(self, **kwargs): -# # TODO: this is the default host for Redis on Travis, -# # we probably want to generalize this though -# pytest.importorskip("redis") -# store = RedisStore(host="localhost", port=6379, **kwargs) -# # start with an empty store -# store.clear() -# return store - - -# class TestLRUStoreCache(StoreTests): - -# CountingClass = CountingDict -# LRUStoreClass = LRUStoreCache - -# def create_store(self, **kwargs): -# # wrapper therefore no dimension_separator argument -# skip_if_nested_chunks(**kwargs) -# return self.LRUStoreClass(dict(), max_size=2**27) - -# def test_cache_values_no_max_size(self): - -# # setup store -# store = self.CountingClass() -# foo_key = self.root + "foo" -# bar_key = self.root + "bar" -# store[foo_key] = b"xxx" -# store[bar_key] = b"yyy" -# assert 0 == store.counter["__getitem__", foo_key] -# assert 1 == store.counter["__setitem__", foo_key] -# assert 0 == store.counter["__getitem__", bar_key] -# assert 1 == store.counter["__setitem__", bar_key] - -# # setup cache -# cache = self.LRUStoreClass(store, max_size=None) -# assert 0 == cache.hits -# assert 0 == cache.misses - -# # test first __getitem__, cache miss -# assert b"xxx" == cache[foo_key] -# assert 1 == store.counter["__getitem__", foo_key] -# assert 1 == store.counter["__setitem__", foo_key] -# assert 0 == cache.hits -# assert 1 == cache.misses - -# # test second __getitem__, cache hit -# assert b"xxx" == cache[foo_key] -# assert 1 == store.counter["__getitem__", foo_key] -# assert 1 == store.counter["__setitem__", foo_key] -# assert 1 == cache.hits -# assert 1 == cache.misses - -# # test __setitem__, __getitem__ -# cache[foo_key] = b"zzz" -# assert 1 == store.counter["__getitem__", foo_key] -# assert 2 == store.counter["__setitem__", foo_key] -# # should be a cache hit -# assert b"zzz" == cache[foo_key] -# assert 1 == store.counter["__getitem__", foo_key] -# assert 2 == store.counter["__setitem__", foo_key] -# assert 2 == cache.hits -# assert 1 == cache.misses - -# # manually invalidate all cached values -# cache.invalidate_values() -# assert b"zzz" == cache[foo_key] -# assert 2 == store.counter["__getitem__", foo_key] -# assert 2 == store.counter["__setitem__", foo_key] -# cache.invalidate() -# assert b"zzz" == cache[foo_key] -# assert 3 == store.counter["__getitem__", foo_key] -# assert 2 == store.counter["__setitem__", foo_key] - -# # test __delitem__ -# del cache[foo_key] -# with pytest.raises(KeyError): -# # noinspection PyStatementEffect -# cache[foo_key] -# with pytest.raises(KeyError): -# # noinspection PyStatementEffect -# store[foo_key] - -# # verify other keys untouched -# assert 0 == store.counter["__getitem__", bar_key] -# assert 1 == store.counter["__setitem__", bar_key] - -# def test_cache_values_with_max_size(self): - -# # setup store -# store = self.CountingClass() -# foo_key = self.root + "foo" -# bar_key = self.root + "bar" -# store[foo_key] = b"xxx" -# store[bar_key] = b"yyy" -# assert 0 == store.counter["__getitem__", foo_key] -# assert 0 == store.counter["__getitem__", bar_key] -# # setup cache - can only hold one item -# cache = self.LRUStoreClass(store, max_size=5) -# assert 0 == cache.hits -# assert 0 == cache.misses - -# # test first 'foo' __getitem__, cache miss -# assert b"xxx" == cache[foo_key] -# assert 1 == store.counter["__getitem__", foo_key] -# assert 0 == cache.hits -# assert 1 == cache.misses - -# # test second 'foo' __getitem__, cache hit -# assert b"xxx" == cache[foo_key] -# assert 1 == store.counter["__getitem__", foo_key] -# assert 1 == cache.hits -# assert 1 == cache.misses - -# # test first 'bar' __getitem__, cache miss -# assert b"yyy" == cache[bar_key] -# assert 1 == store.counter["__getitem__", bar_key] -# assert 1 == cache.hits -# assert 2 == cache.misses - -# # test second 'bar' __getitem__, cache hit -# assert b"yyy" == cache[bar_key] -# assert 1 == store.counter["__getitem__", bar_key] -# assert 2 == cache.hits -# assert 2 == cache.misses - -# # test 'foo' __getitem__, should have been evicted, cache miss -# assert b"xxx" == cache[foo_key] -# assert 2 == store.counter["__getitem__", foo_key] -# assert 2 == cache.hits -# assert 3 == cache.misses - -# # test 'bar' __getitem__, should have been evicted, cache miss -# assert b"yyy" == cache[bar_key] -# assert 2 == store.counter["__getitem__", bar_key] -# assert 2 == cache.hits -# assert 4 == cache.misses - -# # setup store -# store = self.CountingClass() -# store[foo_key] = b"xxx" -# store[bar_key] = b"yyy" -# assert 0 == store.counter["__getitem__", foo_key] -# assert 0 == store.counter["__getitem__", bar_key] -# # setup cache - can hold two items -# cache = self.LRUStoreClass(store, max_size=6) -# assert 0 == cache.hits -# assert 0 == cache.misses - -# # test first 'foo' __getitem__, cache miss -# assert b"xxx" == cache[foo_key] -# assert 1 == store.counter["__getitem__", foo_key] -# assert 0 == cache.hits -# assert 1 == cache.misses - -# # test second 'foo' __getitem__, cache hit -# assert b"xxx" == cache[foo_key] -# assert 1 == store.counter["__getitem__", foo_key] -# assert 1 == cache.hits -# assert 1 == cache.misses - -# # test first 'bar' __getitem__, cache miss -# assert b"yyy" == cache[bar_key] -# assert 1 == store.counter["__getitem__", bar_key] -# assert 1 == cache.hits -# assert 2 == cache.misses - -# # test second 'bar' __getitem__, cache hit -# assert b"yyy" == cache[bar_key] -# assert 1 == store.counter["__getitem__", bar_key] -# assert 2 == cache.hits -# assert 2 == cache.misses - -# # test 'foo' __getitem__, should still be cached -# assert b"xxx" == cache[foo_key] -# assert 1 == store.counter["__getitem__", foo_key] -# assert 3 == cache.hits -# assert 2 == cache.misses - -# # test 'bar' __getitem__, should still be cached -# assert b"yyy" == cache[bar_key] -# assert 1 == store.counter["__getitem__", bar_key] -# assert 4 == cache.hits -# assert 2 == cache.misses - -# def test_cache_keys(self): - -# # setup -# store = self.CountingClass() -# foo_key = self.root + "foo" -# bar_key = self.root + "bar" -# baz_key = self.root + "baz" -# store[foo_key] = b"xxx" -# store[bar_key] = b"yyy" -# assert 0 == store.counter["__contains__", foo_key] -# assert 0 == store.counter["__iter__"] -# assert 0 == store.counter["keys"] -# cache = self.LRUStoreClass(store, max_size=None) - -# # keys should be cached on first call -# keys = sorted(cache.keys()) -# assert keys == [bar_key, foo_key] -# assert 1 == store.counter["keys"] -# # keys should now be cached -# assert keys == sorted(cache.keys()) -# assert 1 == store.counter["keys"] -# assert foo_key in cache -# assert 1 == store.counter["__contains__", foo_key] -# # the next check for `foo_key` is cached -# assert foo_key in cache -# assert 1 == store.counter["__contains__", foo_key] -# assert keys == sorted(cache) -# assert 0 == store.counter["__iter__"] -# assert 1 == store.counter["keys"] - -# # cache should be cleared if store is modified - crude but simple for now -# cache[baz_key] = b"zzz" -# keys = sorted(cache.keys()) -# assert keys == [bar_key, baz_key, foo_key] -# assert 2 == store.counter["keys"] -# # keys should now be cached -# assert keys == sorted(cache.keys()) -# assert 2 == store.counter["keys"] - -# # manually invalidate keys -# cache.invalidate_keys() -# keys = sorted(cache.keys()) -# assert keys == [bar_key, baz_key, foo_key] -# assert 3 == store.counter["keys"] -# assert 1 == store.counter["__contains__", foo_key] -# assert 0 == store.counter["__iter__"] -# cache.invalidate_keys() -# keys = sorted(cache) -# assert keys == [bar_key, baz_key, foo_key] -# assert 4 == store.counter["keys"] -# assert 1 == store.counter["__contains__", foo_key] -# assert 0 == store.counter["__iter__"] -# cache.invalidate_keys() -# assert foo_key in cache -# assert 4 == store.counter["keys"] -# assert 2 == store.counter["__contains__", foo_key] -# assert 0 == store.counter["__iter__"] - -# # check these would get counted if called directly -# assert foo_key in store -# assert 3 == store.counter["__contains__", foo_key] -# assert keys == sorted(store) -# assert 1 == store.counter["__iter__"] - - -# def test_getsize(): -# store = KVStore(dict()) -# store["foo"] = b"aaa" -# store["bar"] = b"bbbb" -# store["baz/quux"] = b"ccccc" -# assert 7 == getsize(store) -# assert 5 == getsize(store, "baz") - -# store = KVStore(dict()) -# store["boo"] = None -# assert -1 == getsize(store) - - -# @pytest.mark.parametrize("dict_store", [False, True]) -# def test_migrate_1to2(dict_store): -# from zarr import meta_v1 - -# # N.B., version 1 did not support hierarchies, so we only have to be -# # concerned about migrating a single array at the root of the store - -# # setup -# store = dict() if dict_store else KVStore(dict()) -# meta = dict( -# shape=(100,), -# chunks=(10,), -# dtype=np.dtype("f4"), -# compression="zlib", -# compression_opts=1, -# fill_value=None, -# order="C", -# ) -# meta_json = meta_v1.encode_metadata(meta) -# store["meta"] = meta_json -# store["attrs"] = json.dumps(dict()).encode("ascii") - -# # run migration -# migrate_1to2(store) - -# # check results -# assert "meta" not in store -# assert array_meta_key in store -# assert "attrs" not in store -# assert attrs_key in store -# meta_migrated = decode_array_metadata(store[array_meta_key]) -# assert 2 == meta_migrated["zarr_format"] - -# # preserved fields -# for f in "shape", "chunks", "dtype", "fill_value", "order": -# assert meta[f] == meta_migrated[f] - -# # migrate should have added empty filters field -# assert meta_migrated["filters"] is None - -# # check compression and compression_opts migrated to compressor -# assert "compression" not in meta_migrated -# assert "compression_opts" not in meta_migrated -# assert meta_migrated["compressor"] == Zlib(1).get_config() - -# # check dict compression_opts -# store = dict() if dict_store else KVStore(dict()) -# meta["compression"] = "blosc" -# meta["compression_opts"] = dict(cname="lz4", clevel=5, shuffle=1) -# meta_json = meta_v1.encode_metadata(meta) -# store["meta"] = meta_json -# store["attrs"] = json.dumps(dict()).encode("ascii") -# migrate_1to2(store) -# meta_migrated = decode_array_metadata(store[array_meta_key]) -# assert "compression" not in meta_migrated -# assert "compression_opts" not in meta_migrated -# assert meta_migrated["compressor"] == Blosc(cname="lz4", clevel=5, shuffle=1).get_config() - -# # check 'none' compression is migrated to None (null in JSON) -# store = dict() if dict_store else KVStore(dict()) -# meta["compression"] = "none" -# meta_json = meta_v1.encode_metadata(meta) -# store["meta"] = meta_json -# store["attrs"] = json.dumps(dict()).encode("ascii") -# migrate_1to2(store) -# meta_migrated = decode_array_metadata(store[array_meta_key]) -# assert "compression" not in meta_migrated -# assert "compression_opts" not in meta_migrated -# assert meta_migrated["compressor"] is None - - -# def test_format_compatibility(): - -# # This test is intended to catch any unintended changes that break the ability to -# # read data stored with a previous minor version (which should be format-compatible). - -# # fixture data -# fixture = group(store=DirectoryStore("fixture")) - -# # set seed to get consistent random data -# np.random.seed(42) - -# arrays_chunks = [ -# (np.arange(1111, dtype=" 2 else "" -# # setup some values -# store[prefix + "a"] = b"aaa" -# store[prefix + "b"] = b"bbb" -# store[prefix + "c/d"] = b"ddd" -# store[prefix + "c/e/f"] = b"fff" - -# # test iterators on store with data -# assert 4 == len(store) -# keys = [prefix + "a", prefix + "b", prefix + "c/d", prefix + "c/e/f"] -# values = [b"aaa", b"bbb", b"ddd", b"fff"] -# items = list(zip(keys, values)) -# assert set(keys) == set(store) -# assert set(keys) == set(store.keys()) -# assert set(values) == set(store.values()) -# assert set(items) == set(store.items()) -# def test_getsize(self): -# return super().test_getsize() +class TestNestedDirectoryStoreNone: + def test_value_error(self): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = NestedDirectoryStore(path, normalize_keys=True, dimension_separator=None) + assert store._dimension_separator == "/" -# def test_hierarchy(self): -# return super().test_hierarchy() -# @pytest.mark.skipif(sys.version_info < (3, 7), reason="attr not serializable in py36") -# def test_pickle(self): -# # internal attribute on ContainerClient isn't serializable for py36 and earlier -# super().test_pickle() +class TestNestedDirectoryStoreWithWrongValue: + def test_value_error(self): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + with pytest.raises(ValueError): + NestedDirectoryStore(path, normalize_keys=True, dimension_separator=".") -# class TestConsolidatedMetadataStore: +class TestN5Store(TestNestedDirectoryStore): + def create_store(self, normalize_keys=False): + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = N5Store(path, normalize_keys=normalize_keys) + return store -# version = 2 -# ConsolidatedMetadataClass = ConsolidatedMetadataStore + def test_equal(self): + store_a = self.create_store() + store_b = N5Store(store_a.path) + assert store_a == store_b -# @property -# def metadata_key(self): -# return ".zmetadata" + @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) + def test_del_zarr_meta_key(self, zarr_meta_key): + store = self.create_store() + store[n5_attrs_key] = json_dumps({"foo": "bar"}) + del store[zarr_meta_key] + assert n5_attrs_key not in store -# def test_bad_format(self): + def test_chunk_nesting(self): + store = self.create_store() + store["0.0"] = b"xxx" + assert "0.0" in store + assert b"xxx" == store["0.0"] + # assert b'xxx' == store['0/0'] + store["foo/10.20.30"] = b"yyy" + assert "foo/10.20.30" in store + assert b"yyy" == store["foo/10.20.30"] + # N5 reverses axis order + assert b"yyy" == store["foo/30/20/10"] + del store["foo/10.20.30"] + assert "foo/30/20/10" not in store + store["42"] = b"zzz" + assert "42" in store + assert b"zzz" == store["42"] + + def test_init_array(self): + store = self.create_store() + init_array(store, shape=1000, chunks=100) + + # check metadata + assert array_meta_key in store + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + # N5Store wraps the actual compressor + compressor_config = meta["compressor"]["compressor_config"] + assert default_compressor.get_config() == compressor_config + # N5Store always has a fill value of 0 + assert meta["fill_value"] == 0 + assert meta["dimension_separator"] == "." + # Top-level groups AND arrays should have + # the n5 keyword in metadata + raw_n5_meta = json.loads(store[n5_attrs_key]) + assert raw_n5_meta.get("n5", None) == N5_FORMAT + + def test_init_array_path(self): + path = "foo/bar" + store = self.create_store() + init_array(store, shape=1000, chunks=100, path=path) + + # check metadata + key = path + "/" + array_meta_key + assert key in store + meta = store._metadata_class.decode_array_metadata(store[key]) + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + # N5Store wraps the actual compressor + compressor_config = meta["compressor"]["compressor_config"] + assert default_compressor.get_config() == compressor_config + # N5Store always has a fill value of 0 + assert meta["fill_value"] == 0 + + def test_init_array_compat(self): + store = self.create_store() + init_array(store, shape=1000, chunks=100, compressor="none") + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) + # N5Store wraps the actual compressor + compressor_config = meta["compressor"]["compressor_config"] + assert compressor_config is None -# # setup store with consolidated metadata -# store = dict() -# consolidated = { -# # bad format version -# "zarr_consolidated_format": 0, -# } -# store[self.metadata_key] = json.dumps(consolidated).encode() + def test_init_array_overwrite(self): + self._test_init_array_overwrite("C") -# # check appropriate error is raised -# with pytest.raises(MetadataError): -# self.ConsolidatedMetadataClass(store) + def test_init_array_overwrite_path(self): + self._test_init_array_overwrite_path("C") -# def test_bad_store_version(self): -# with pytest.raises(ValueError): -# self.ConsolidatedMetadataClass(KVStoreV3(dict())) + def test_init_array_overwrite_chunk_store(self): + self._test_init_array_overwrite_chunk_store("C") -# def test_read_write(self): + def test_init_group_overwrite(self): + self._test_init_group_overwrite("C") -# # setup store with consolidated metadata -# store = dict() -# consolidated = { -# "zarr_consolidated_format": 1, -# "metadata": { -# "foo": "bar", -# "baz": 42, -# }, -# } -# store[self.metadata_key] = json.dumps(consolidated).encode() + def test_init_group_overwrite_path(self): + self._test_init_group_overwrite_path("C") -# # create consolidated store -# cs = self.ConsolidatedMetadataClass(store) + def test_init_group_overwrite_chunk_store(self): + self._test_init_group_overwrite_chunk_store("C") -# # test __contains__, __getitem__ -# for key, value in consolidated["metadata"].items(): -# assert key in cs -# assert value == cs[key] + def test_init_group(self): + store = self.create_store() + init_group(store) + store[".zattrs"] = json_dumps({"foo": "bar"}) + # check metadata + assert group_meta_key in store + assert group_meta_key in store.listdir() + assert group_meta_key in store.listdir("") + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + + def test_filters(self): + all_filters, all_errors = zip( + *[ + (None, does_not_raise()), + ([], does_not_raise()), + ([AsType("f4", "f8")], pytest.raises(ValueError)), + ] + ) + for filters, error in zip(all_filters, all_errors): + store = self.create_store() + with error: + init_array(store, shape=1000, chunks=100, filters=filters) -# # test __delitem__, __setitem__ -# with pytest.raises(PermissionError): -# del cs["foo"] -# with pytest.raises(PermissionError): -# cs["bar"] = 0 -# with pytest.raises(PermissionError): -# cs["spam"] = "eggs" +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestN5FSStore(TestFSStore): + def create_store(self, normalize_keys=False, path=None, **kwargs): + if path is None: + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) -# # standalone test we do not want to run on each store. + store = N5FSStore(path, normalize_keys=normalize_keys, **kwargs) + return store + def test_equal(self): + store_a = self.create_store() + store_b = N5FSStore(store_a.path) + assert store_a == store_b -# def test_fill_value_change(): -# a = zarr.create((10, 10), dtype=int) + # This is copied wholesale from the N5Store tests. The same test could + # be run by making TestN5FSStore inherit from both TestFSStore and + # TestN5Store, but a direct copy is arguably more explicit. -# assert a[0, 0] == 0 + @pytest.mark.parametrize("zarr_meta_key", [".zarray", ".zattrs", ".zgroup"]) + def test_del_zarr_meta_key(self, zarr_meta_key): + store = self.create_store() + store[n5_attrs_key] = json_dumps({"foo": "bar"}) + del store[zarr_meta_key] + assert n5_attrs_key not in store -# a.fill_value = 1 + def test_chunk_nesting(self): + store = self.create_store() + store["0.0"] = b"xxx" + assert "0.0" in store + assert b"xxx" == store["0.0"] + # assert b'xxx' == store['0/0'] + store["foo/10.20.30"] = b"yyy" + assert "foo/10.20.30" in store + assert b"yyy" == store["foo/10.20.30"] + # N5 reverses axis order + assert b"yyy" == store["foo/30/20/10"] + del store["foo/10.20.30"] + assert "foo/30/20/10" not in store + store["42"] = b"zzz" + assert "42" in store + assert b"zzz" == store["42"] + + def test_init_array(self): + store = self.create_store() + init_array(store, shape=1000, chunks=100) + + # check metadata + assert array_meta_key in store + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + # N5Store wraps the actual compressor + compressor_config = meta["compressor"]["compressor_config"] + assert default_compressor.get_config() == compressor_config + # N5Store always has a fill value of 0 + assert meta["fill_value"] == 0 + assert meta["dimension_separator"] == "." + # Top-level groups AND arrays should have + # the n5 keyword in metadata + raw_n5_meta = json.loads(store[n5_attrs_key]) + assert raw_n5_meta.get("n5", None) == N5_FORMAT + + def test_init_array_path(self): + path = "foo/bar" + store = self.create_store() + init_array(store, shape=1000, chunks=100, path=path) + + # check metadata + key = path + "/" + array_meta_key + assert key in store + meta = store._metadata_class.decode_array_metadata(store[key]) + assert ZARR_FORMAT == meta["zarr_format"] + assert (1000,) == meta["shape"] + assert (100,) == meta["chunks"] + assert np.dtype(None) == meta["dtype"] + # N5Store wraps the actual compressor + compressor_config = meta["compressor"]["compressor_config"] + assert default_compressor.get_config() == compressor_config + # N5Store always has a fill value of 0 + assert meta["fill_value"] == 0 + + def test_init_array_compat(self): + store = self.create_store() + init_array(store, shape=1000, chunks=100, compressor="none") + meta = store._metadata_class.decode_array_metadata(store[array_meta_key]) + # N5Store wraps the actual compressor + compressor_config = meta["compressor"]["compressor_config"] + assert compressor_config is None -# assert a[0, 0] == 1 + def test_init_array_overwrite(self): + self._test_init_array_overwrite("C") -# assert json.loads(a.store[".zarray"])["fill_value"] == 1 + def test_init_array_overwrite_path(self): + self._test_init_array_overwrite_path("C") + def test_init_array_overwrite_chunk_store(self): + self._test_init_array_overwrite_chunk_store("C") -# def test_get_hierarchy_metadata_v2(): -# # v2 stores do not have hierarchy metadata (i.e. zarr.json) -# with pytest.raises(ValueError): -# _get_hierarchy_metadata(KVStore(dict)) + def test_init_group_overwrite(self): + self._test_init_group_overwrite("C") + def test_init_group_overwrite_path(self): + self._test_init_group_overwrite_path("C") -# def test_normalize_store_arg(tmpdir): -# with pytest.raises(ValueError): -# normalize_store_arg(dict(), zarr_version=4) + def test_init_group_overwrite_chunk_store(self): + self._test_init_group_overwrite_chunk_store("C") -# for ext, Class in [(".zip", ZipStore), (".n5", N5Store)]: -# fn = tmpdir.join("store" + ext) -# store = normalize_store_arg(str(fn), zarr_version=2, mode="w") -# assert isinstance(store, Class) + def test_dimension_separator(self): + with pytest.warns(UserWarning, match="dimension_separator"): + self.create_store(dimension_separator="/") -# if have_fsspec: -# import fsspec + def test_init_group(self): + store = self.create_store() + init_group(store) + store[".zattrs"] = json_dumps({"foo": "bar"}) + # check metadata + assert group_meta_key in store + assert group_meta_key in store.listdir() + assert group_meta_key in store.listdir("") + meta = store._metadata_class.decode_group_metadata(store[group_meta_key]) + assert ZARR_FORMAT == meta["zarr_format"] + + def test_filters(self): + all_filters, all_errors = zip( + *[ + (None, does_not_raise()), + ([], does_not_raise()), + ([AsType("f4", "f8")], pytest.raises(ValueError)), + ] + ) + for filters, error in zip(all_filters, all_errors): + store = self.create_store() + with error: + init_array(store, shape=1000, chunks=100, filters=filters) -# path = tempfile.mkdtemp() -# store = normalize_store_arg("file://" + path, zarr_version=2, mode="w") -# assert isinstance(store, FSStore) -# store = normalize_store_arg(fsspec.get_mapper("file://" + path)) -# assert isinstance(store, FSStore) +@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +class TestNestedFSStore(TestNestedDirectoryStore): + def create_store(self, normalize_keys=False, path=None, **kwargs): + if path is None: + path = tempfile.mkdtemp() + atexit.register(atexit_rmtree, path) + store = FSStore( + path, normalize_keys=normalize_keys, dimension_separator="/", auto_mkdir=True, **kwargs + ) + return store + def test_numbered_groups(self): + # Create an array + store = self.create_store() + group = zarr.v2.group(store=store) + arr = group.create_dataset("0", shape=(10, 10)) + arr[1] = 1 -# def test_meta_prefix_6853(): + # Read it back + store = self.create_store(path=store.path) + zarr.v2.open_group(store.path)["0"] -# fixture = pathlib.Path(zarr.__file__).resolve().parent.parent / "fixture" -# meta = fixture / "meta" -# if not meta.exists(): # pragma: no cover -# s = DirectoryStore(str(meta), dimension_separator=".") -# a = zarr.open(store=s, mode="w", shape=(2, 2), dtype="> 16) + assert perm == "0o644" + info = z.getinfo(baz_key) + perm = oct(info.external_attr >> 16) + # only for posix platforms + if os.name == "posix": + if self.version == 2: + assert perm == "0o40775" + else: + # baz/ on v2, but baz on v3, so not a directory + assert perm == "0o644" + z.close() + + def test_store_and_retrieve_ndarray(self): + store = ZipStore("data/store.zip") + x = np.array([[1, 2], [3, 4]]) + store["foo"] = x + y = np.frombuffer(store["foo"], dtype=x.dtype).reshape(x.shape) + assert np.array_equiv(y, x) + + +class TestDBMStore(StoreTests): + def create_store(self, dimension_separator=None): + path = mktemp(suffix=".anydbm") + atexit.register(atexit_rmglob, path + "*") + # create store using default dbm implementation + store = DBMStore(path, flag="n", dimension_separator=dimension_separator) + return store -# store = MyStore() -# z = zarr.create(shape=(10,), chunks=1, store=store) + def test_context_manager(self): + with self.create_store() as store: + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"qux" + assert 2 == len(store) -# # By default, not contexts are given to the store's getitems() -# z[0] -# assert len(store.last_contexts) == 0 -# # Setting a non-default meta_array, will create contexts for the store's getitems() -# z._meta_array = "my_meta_array" -# z[0] -# assert store.last_contexts == {"0": {"meta_array": "my_meta_array"}} -# assert isinstance(store.last_contexts, ConstantMap) -# # Accseeing different chunks should trigger different key request -# z[1] -# assert store.last_contexts == {"1": {"meta_array": "my_meta_array"}} -# assert isinstance(store.last_contexts, ConstantMap) -# z[2:4] -# expected = ConstantMap(["2", "3"], Context({"meta_array": "my_meta_array"})) -# assert store.last_contexts == expected -# assert isinstance(store.last_contexts, ConstantMap) +class TestDBMStoreDumb(TestDBMStore): + def create_store(self, **kwargs): + path = mktemp(suffix=".dumbdbm") + atexit.register(atexit_rmglob, path + "*") + + import dbm.dumb as dumbdbm + + store = DBMStore(path, flag="n", open=dumbdbm.open, **kwargs) + return store + + +class TestDBMStoreGnu(TestDBMStore): + def create_store(self, **kwargs): + gdbm = pytest.importorskip("dbm.gnu") + path = mktemp(suffix=".gdbm") # pragma: no cover + atexit.register(os.remove, path) # pragma: no cover + store = DBMStore( + path, flag="n", open=gdbm.open, write_lock=False, **kwargs + ) # pragma: no cover + return store # pragma: no cover + + +class TestDBMStoreNDBM(TestDBMStore): + def create_store(self, **kwargs): + ndbm = pytest.importorskip("dbm.ndbm") + path = mktemp(suffix=".ndbm") # pragma: no cover + atexit.register(atexit_rmglob, path + "*") # pragma: no cover + store = DBMStore(path, flag="n", open=ndbm.open, **kwargs) # pragma: no cover + return store # pragma: no cover + + +class TestDBMStoreBerkeleyDB(TestDBMStore): + def create_store(self, **kwargs): + bsddb3 = pytest.importorskip("bsddb3") + path = mktemp(suffix=".dbm") + atexit.register(os.remove, path) + store = DBMStore(path, flag="n", open=bsddb3.btopen, write_lock=False, **kwargs) + return store + + +class TestLMDBStore(StoreTests): + def create_store(self, **kwargs): + pytest.importorskip("lmdb") + path = mktemp(suffix=".lmdb") + atexit.register(atexit_rmtree, path) + buffers = True + store = LMDBStore(path, buffers=buffers, **kwargs) + return store + + def test_context_manager(self): + with self.create_store() as store: + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"qux" + assert 2 == len(store) + + +class TestSQLiteStore(StoreTests): + def create_store(self, **kwargs): + pytest.importorskip("sqlite3") + path = mktemp(suffix=".db") + atexit.register(atexit_rmtree, path) + store = SQLiteStore(path, **kwargs) + return store + + def test_underscore_in_name(self): + path = mktemp(suffix=".db") + atexit.register(atexit_rmtree, path) + store = SQLiteStore(path) + store["a"] = b"aaa" + store["a_b"] = b"aa_bb" + store.rmdir("a") + assert "a_b" in store + + +class TestSQLiteStoreInMemory(TestSQLiteStore): + def create_store(self, **kwargs): + pytest.importorskip("sqlite3") + store = SQLiteStore(":memory:", **kwargs) + return store + + def test_pickle(self): + # setup store + store = self.create_store() + store[self.root + "foo"] = b"bar" + store[self.root + "baz"] = b"quux" + + # round-trip through pickle + with pytest.raises(PicklingError): + pickle.dumps(store) + + +@skip_test_env_var("ZARR_TEST_MONGO") +class TestMongoDBStore(StoreTests): + def create_store(self, **kwargs): + pytest.importorskip("pymongo") + store = MongoDBStore( + host="127.0.0.1", database="zarr_tests", collection="zarr_tests", **kwargs + ) + # start with an empty store + store.clear() + return store + + +@skip_test_env_var("ZARR_TEST_REDIS") +class TestRedisStore(StoreTests): + def create_store(self, **kwargs): + # TODO: this is the default host for Redis on Travis, + # we probably want to generalize this though + pytest.importorskip("redis") + store = RedisStore(host="localhost", port=6379, **kwargs) + # start with an empty store + store.clear() + return store + + +class TestLRUStoreCache(StoreTests): + CountingClass = CountingDict + LRUStoreClass = LRUStoreCache + + def create_store(self, **kwargs): + # wrapper therefore no dimension_separator argument + skip_if_nested_chunks(**kwargs) + return self.LRUStoreClass(dict(), max_size=2**27) + + def test_cache_values_no_max_size(self): + # setup store + store = self.CountingClass() + foo_key = self.root + "foo" + bar_key = self.root + "bar" + store[foo_key] = b"xxx" + store[bar_key] = b"yyy" + assert 0 == store.counter["__getitem__", foo_key] + assert 1 == store.counter["__setitem__", foo_key] + assert 0 == store.counter["__getitem__", bar_key] + assert 1 == store.counter["__setitem__", bar_key] + + # setup cache + cache = self.LRUStoreClass(store, max_size=None) + assert 0 == cache.hits + assert 0 == cache.misses + + # test first __getitem__, cache miss + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 1 == store.counter["__setitem__", foo_key] + assert 0 == cache.hits + assert 1 == cache.misses + + # test second __getitem__, cache hit + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 1 == store.counter["__setitem__", foo_key] + assert 1 == cache.hits + assert 1 == cache.misses + + # test __setitem__, __getitem__ + cache[foo_key] = b"zzz" + assert 1 == store.counter["__getitem__", foo_key] + assert 2 == store.counter["__setitem__", foo_key] + # should be a cache hit + assert b"zzz" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 2 == store.counter["__setitem__", foo_key] + assert 2 == cache.hits + assert 1 == cache.misses + + # manually invalidate all cached values + cache.invalidate_values() + assert b"zzz" == cache[foo_key] + assert 2 == store.counter["__getitem__", foo_key] + assert 2 == store.counter["__setitem__", foo_key] + cache.invalidate() + assert b"zzz" == cache[foo_key] + assert 3 == store.counter["__getitem__", foo_key] + assert 2 == store.counter["__setitem__", foo_key] + + # test __delitem__ + del cache[foo_key] + with pytest.raises(KeyError): + # noinspection PyStatementEffect + cache[foo_key] + with pytest.raises(KeyError): + # noinspection PyStatementEffect + store[foo_key] + + # verify other keys untouched + assert 0 == store.counter["__getitem__", bar_key] + assert 1 == store.counter["__setitem__", bar_key] + + def test_cache_values_with_max_size(self): + # setup store + store = self.CountingClass() + foo_key = self.root + "foo" + bar_key = self.root + "bar" + store[foo_key] = b"xxx" + store[bar_key] = b"yyy" + assert 0 == store.counter["__getitem__", foo_key] + assert 0 == store.counter["__getitem__", bar_key] + # setup cache - can only hold one item + cache = self.LRUStoreClass(store, max_size=5) + assert 0 == cache.hits + assert 0 == cache.misses + + # test first 'foo' __getitem__, cache miss + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 0 == cache.hits + assert 1 == cache.misses + + # test second 'foo' __getitem__, cache hit + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 1 == cache.hits + assert 1 == cache.misses + + # test first 'bar' __getitem__, cache miss + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] + assert 1 == cache.hits + assert 2 == cache.misses + + # test second 'bar' __getitem__, cache hit + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] + assert 2 == cache.hits + assert 2 == cache.misses + + # test 'foo' __getitem__, should have been evicted, cache miss + assert b"xxx" == cache[foo_key] + assert 2 == store.counter["__getitem__", foo_key] + assert 2 == cache.hits + assert 3 == cache.misses + + # test 'bar' __getitem__, should have been evicted, cache miss + assert b"yyy" == cache[bar_key] + assert 2 == store.counter["__getitem__", bar_key] + assert 2 == cache.hits + assert 4 == cache.misses + + # setup store + store = self.CountingClass() + store[foo_key] = b"xxx" + store[bar_key] = b"yyy" + assert 0 == store.counter["__getitem__", foo_key] + assert 0 == store.counter["__getitem__", bar_key] + # setup cache - can hold two items + cache = self.LRUStoreClass(store, max_size=6) + assert 0 == cache.hits + assert 0 == cache.misses + + # test first 'foo' __getitem__, cache miss + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 0 == cache.hits + assert 1 == cache.misses + + # test second 'foo' __getitem__, cache hit + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 1 == cache.hits + assert 1 == cache.misses + + # test first 'bar' __getitem__, cache miss + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] + assert 1 == cache.hits + assert 2 == cache.misses + + # test second 'bar' __getitem__, cache hit + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] + assert 2 == cache.hits + assert 2 == cache.misses + + # test 'foo' __getitem__, should still be cached + assert b"xxx" == cache[foo_key] + assert 1 == store.counter["__getitem__", foo_key] + assert 3 == cache.hits + assert 2 == cache.misses + + # test 'bar' __getitem__, should still be cached + assert b"yyy" == cache[bar_key] + assert 1 == store.counter["__getitem__", bar_key] + assert 4 == cache.hits + assert 2 == cache.misses + + def test_cache_keys(self): + # setup + store = self.CountingClass() + foo_key = self.root + "foo" + bar_key = self.root + "bar" + baz_key = self.root + "baz" + store[foo_key] = b"xxx" + store[bar_key] = b"yyy" + assert 0 == store.counter["__contains__", foo_key] + assert 0 == store.counter["__iter__"] + assert 0 == store.counter["keys"] + cache = self.LRUStoreClass(store, max_size=None) + + # keys should be cached on first call + keys = sorted(cache.keys()) + assert keys == [bar_key, foo_key] + assert 1 == store.counter["keys"] + # keys should now be cached + assert keys == sorted(cache.keys()) + assert 1 == store.counter["keys"] + assert foo_key in cache + assert 1 == store.counter["__contains__", foo_key] + # the next check for `foo_key` is cached + assert foo_key in cache + assert 1 == store.counter["__contains__", foo_key] + assert keys == sorted(cache) + assert 0 == store.counter["__iter__"] + assert 1 == store.counter["keys"] + + # cache should be cleared if store is modified - crude but simple for now + cache[baz_key] = b"zzz" + keys = sorted(cache.keys()) + assert keys == [bar_key, baz_key, foo_key] + assert 2 == store.counter["keys"] + # keys should now be cached + assert keys == sorted(cache.keys()) + assert 2 == store.counter["keys"] + + # manually invalidate keys + cache.invalidate_keys() + keys = sorted(cache.keys()) + assert keys == [bar_key, baz_key, foo_key] + assert 3 == store.counter["keys"] + assert 1 == store.counter["__contains__", foo_key] + assert 0 == store.counter["__iter__"] + cache.invalidate_keys() + keys = sorted(cache) + assert keys == [bar_key, baz_key, foo_key] + assert 4 == store.counter["keys"] + assert 1 == store.counter["__contains__", foo_key] + assert 0 == store.counter["__iter__"] + cache.invalidate_keys() + assert foo_key in cache + assert 4 == store.counter["keys"] + assert 2 == store.counter["__contains__", foo_key] + assert 0 == store.counter["__iter__"] + + # check these would get counted if called directly + assert foo_key in store + assert 3 == store.counter["__contains__", foo_key] + assert keys == sorted(store) + assert 1 == store.counter["__iter__"] + + +def test_getsize(): + store = KVStore(dict()) + store["foo"] = b"aaa" + store["bar"] = b"bbbb" + store["baz/quux"] = b"ccccc" + assert 7 == getsize(store) + assert 5 == getsize(store, "baz") + + store = KVStore(dict()) + store["boo"] = None + assert -1 == getsize(store) + + +@pytest.mark.parametrize("dict_store", [False, True]) +def test_migrate_1to2(dict_store): + # N.B., version 1 did not support hierarchies, so we only have to be + # concerned about migrating a single array at the root of the store + + # setup + store = dict() if dict_store else KVStore(dict()) + meta = dict( + shape=(100,), + chunks=(10,), + dtype=np.dtype("f4"), + compression="zlib", + compression_opts=1, + fill_value=None, + order="C", + ) + meta_json = meta_v1.encode_metadata(meta) + store["meta"] = meta_json + store["attrs"] = json.dumps(dict()).encode("ascii") + + # run migration + migrate_1to2(store) + + # check results + assert "meta" not in store + assert array_meta_key in store + assert "attrs" not in store + assert attrs_key in store + meta_migrated = decode_array_metadata(store[array_meta_key]) + assert 2 == meta_migrated["zarr_format"] + + # preserved fields + for f in "shape", "chunks", "dtype", "fill_value", "order": + assert meta[f] == meta_migrated[f] + + # migrate should have added empty filters field + assert meta_migrated["filters"] is None + + # check compression and compression_opts migrated to compressor + assert "compression" not in meta_migrated + assert "compression_opts" not in meta_migrated + assert meta_migrated["compressor"] == Zlib(1).get_config() + + # check dict compression_opts + store = dict() if dict_store else KVStore(dict()) + meta["compression"] = "blosc" + meta["compression_opts"] = dict(cname="lz4", clevel=5, shuffle=1) + meta_json = meta_v1.encode_metadata(meta) + store["meta"] = meta_json + store["attrs"] = json.dumps(dict()).encode("ascii") + migrate_1to2(store) + meta_migrated = decode_array_metadata(store[array_meta_key]) + assert "compression" not in meta_migrated + assert "compression_opts" not in meta_migrated + assert meta_migrated["compressor"] == Blosc(cname="lz4", clevel=5, shuffle=1).get_config() + + # check 'none' compression is migrated to None (null in JSON) + store = dict() if dict_store else KVStore(dict()) + meta["compression"] = "none" + meta_json = meta_v1.encode_metadata(meta) + store["meta"] = meta_json + store["attrs"] = json.dumps(dict()).encode("ascii") + migrate_1to2(store) + meta_migrated = decode_array_metadata(store[array_meta_key]) + assert "compression" not in meta_migrated + assert "compression_opts" not in meta_migrated + assert meta_migrated["compressor"] is None + + +def test_format_compatibility(): + # This test is intended to catch any unintended changes that break the ability to + # read data stored with a previous minor version (which should be format-compatible). + + # fixture data + fixture = group(store=DirectoryStore("fixture")) + + # set seed to get consistent random data + np.random.seed(42) + + arrays_chunks = [ + (np.arange(1111, dtype=" LocalStore: + return self.store_cls(str(tmpdir)) From 056657ca5ed70aa3d77a9e2db42253fca39800b0 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 7 May 2024 14:01:44 -0700 Subject: [PATCH 0517/1078] Release notes for 2.18.0 (#1843) * doc: cleanup release notes for 2.18.0 --- docs/release.rst | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index e2bc40bf99..ba26549402 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,20 +18,16 @@ Release notes See `GH1777 `_ for more details on the upcoming 3.0 release. -.. _unreleased: +.. _release_2.18.0: -Unreleased ----------- +2.18.0 +------ Enhancements ~~~~~~~~~~~~ * Performance improvement for reading and writing chunks if any of the dimensions is size 1. By :user:`Deepak Cherian ` :issue:`1730`. -Docs -~~~~ - - Maintenance ~~~~~~~~~~~ * Enable ruff/bugbear rules (B) and fix issues. @@ -46,6 +42,7 @@ Deprecations * Deprecate experimental v3 support by issuing a `FutureWarning`. Also updated docs to warn about using the experimental v3 version. By :user:`Joe Hamman ` :issue:`1802` and :issue:`1807`. + * Deprecate the following stores: :class:`zarr.storage.DBMStore`, :class:`zarr.storage.LMDBStore`, :class:`zarr.storage.SQLiteStore`, :class:`zarr.storage.MongoDBStore`, :class:`zarr.storage.RedisStore`, and :class:`zarr.storage.ABSStore`. These stores are slated to be removed from Zarr-Python in version 3.0. From cb4230dd9a2a0c68b78384b24a2a5567df0b704a Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 7 May 2024 21:27:39 -0700 Subject: [PATCH 0518/1078] Update release.rst (#1850) --- docs/release.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index ba26549402..e2f9f3de85 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,23 @@ Release notes See `GH1777 `_ for more details on the upcoming 3.0 release. +.. _unreleased: + +Unreleased +---------- + +Enhancements +~~~~~~~~~~~~ + +Docs +~~~~ + +Maintenance +~~~~~~~~~~~ + +Deprecations +~~~~~~~~~~~~ + .. _release_2.18.0: 2.18.0 From fcab6505eaf15c959a0093cefe1b5b1a31f6c3ed Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Wed, 8 May 2024 17:49:43 +0200 Subject: [PATCH 0519/1078] adds test for codec entrypoint (#1835) --- src/zarr/codecs/registry.py | 13 +++------ .../entry_points.txt | 2 ++ tests/v3/package_with_entrypoint/__init__.py | 27 +++++++++++++++++++ tests/v3/test_codec_entrypoints.py | 25 +++++++++++++++++ 4 files changed, 58 insertions(+), 9 deletions(-) create mode 100644 tests/v3/package_with_entrypoint-0.1.dist-info/entry_points.txt create mode 100644 tests/v3/package_with_entrypoint/__init__.py create mode 100644 tests/v3/test_codec_entrypoints.py diff --git a/src/zarr/codecs/registry.py b/src/zarr/codecs/registry.py index 140e1372ef..7d46041255 100644 --- a/src/zarr/codecs/registry.py +++ b/src/zarr/codecs/registry.py @@ -12,16 +12,11 @@ __lazy_load_codecs: Dict[str, EntryPoint] = {} -def _collect_entrypoints() -> None: +def _collect_entrypoints() -> Dict[str, EntryPoint]: entry_points = get_entry_points() - if hasattr(entry_points, "select"): - # If entry_points() has a select method, use that. Python 3.10+ - for e in entry_points.select(group="zarr.codecs"): - __lazy_load_codecs[e.name] = e - else: - # Otherwise, fallback to using get - for e in entry_points.get("zarr.codecs", []): - __lazy_load_codecs[e.name] = e + for e in entry_points.select(group="zarr.codecs"): + __lazy_load_codecs[e.name] = e + return __lazy_load_codecs def register_codec(key: str, codec_cls: Type[Codec]) -> None: diff --git a/tests/v3/package_with_entrypoint-0.1.dist-info/entry_points.txt b/tests/v3/package_with_entrypoint-0.1.dist-info/entry_points.txt new file mode 100644 index 0000000000..2c9dc375de --- /dev/null +++ b/tests/v3/package_with_entrypoint-0.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[zarr.codecs] +test = package_with_entrypoint:TestCodec diff --git a/tests/v3/package_with_entrypoint/__init__.py b/tests/v3/package_with_entrypoint/__init__.py new file mode 100644 index 0000000000..cf7df20457 --- /dev/null +++ b/tests/v3/package_with_entrypoint/__init__.py @@ -0,0 +1,27 @@ +from numpy import ndarray +from zarr.abc.codec import ArrayBytesCodec +from zarr.common import ArraySpec, BytesLike +from zarr.config import RuntimeConfiguration + + +class TestCodec(ArrayBytesCodec): + is_fixed_size = True + + async def encode( + self, + chunk_array: ndarray, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> BytesLike | None: + pass + + async def decode( + self, + chunk_bytes: BytesLike, + chunk_spec: ArraySpec, + runtime_configuration: RuntimeConfiguration, + ) -> ndarray: + pass + + def compute_encoded_size(self, input_byte_length: int, chunk_spec: ArraySpec) -> int: + return input_byte_length diff --git a/tests/v3/test_codec_entrypoints.py b/tests/v3/test_codec_entrypoints.py new file mode 100644 index 0000000000..8fbf76b83d --- /dev/null +++ b/tests/v3/test_codec_entrypoints.py @@ -0,0 +1,25 @@ +import os.path +import sys + +import pytest + +import zarr.codecs.registry + + +here = os.path.abspath(os.path.dirname(__file__)) + + +@pytest.fixture() +def set_path(): + sys.path.append(here) + zarr.codecs.registry._collect_entrypoints() + yield + sys.path.remove(here) + entry_points = zarr.codecs.registry._collect_entrypoints() + entry_points.pop("test") + + +@pytest.mark.usefixtures("set_path") +def test_entrypoint_codec(): + cls = zarr.codecs.registry.get_codec_class("test") + assert cls.__name__ == "TestCodec" From 666a8b9460468b047d1cc093d5ac944b7faee6a2 Mon Sep 17 00:00:00 2001 From: Max Jones <14077947+maxrjones@users.noreply.github.com> Date: Wed, 8 May 2024 19:30:46 -0400 Subject: [PATCH 0520/1078] Remove extra v3 sync module (#1856) --- src/zarr/v3/sync.py | 131 -------------------------------------------- 1 file changed, 131 deletions(-) delete mode 100644 src/zarr/v3/sync.py diff --git a/src/zarr/v3/sync.py b/src/zarr/v3/sync.py deleted file mode 100644 index 2838f68172..0000000000 --- a/src/zarr/v3/sync.py +++ /dev/null @@ -1,131 +0,0 @@ -from __future__ import annotations -from typing import TYPE_CHECKING, TypeVar - -if TYPE_CHECKING: - from typing import Any, AsyncIterator, Coroutine - -import asyncio -from concurrent.futures import wait -import threading - -from typing_extensions import ParamSpec - -from zarr.v3.config import SyncConfiguration - -P = ParamSpec("P") -T = TypeVar("T") - -# From https://github.com/fsspec/filesystem_spec/blob/master/fsspec/asyn.py - -iothread: list[threading.Thread | None] = [None] # dedicated IO thread -loop: list[asyncio.AbstractEventLoop | None] = [ - None -] # global event loop for any non-async instance -_lock: threading.Lock | None = None # global lock placeholder -get_running_loop = asyncio.get_running_loop - - -class SyncError(Exception): - pass - - -def _get_lock() -> threading.Lock: - """Allocate or return a threading lock. - - The lock is allocated on first use to allow setting one lock per forked process. - """ - global _lock - if not _lock: - _lock = threading.Lock() - return _lock - - -async def _runner(coro: Coroutine[Any, Any, T]) -> T | BaseException: - """ - Await a coroutine and return the result of running it. If awaiting the coroutine raises an - exception, the exception will be returned. - """ - try: - return await coro - except Exception as ex: - return ex - - -def sync( - coro: Coroutine[Any, Any, T], - loop: asyncio.AbstractEventLoop | None = None, - timeout: float | None = None, -) -> T: - """ - Make loop run coroutine until it returns. Runs in other thread - - Examples - -------- - >>> sync(async_function(), existing_loop) - """ - if loop is None: - # NB: if the loop is not running *yet*, it is OK to submit work - # and we will wait for it - loop = _get_loop() - if not isinstance(loop, asyncio.AbstractEventLoop): - raise TypeError(f"loop cannot be of type {type(loop)}") - if loop.is_closed(): - raise RuntimeError("Loop is not running") - try: - loop0 = asyncio.events.get_running_loop() - if loop0 is loop: - raise SyncError("Calling sync() from within a running loop") - except RuntimeError: - pass - - future = asyncio.run_coroutine_threadsafe(_runner(coro), loop) - - finished, unfinished = wait([future], return_when=asyncio.ALL_COMPLETED, timeout=timeout) - if len(unfinished) > 0: - raise asyncio.TimeoutError(f"Coroutine {coro} failed to finish in within {timeout}s") - assert len(finished) == 1 - return_result = list(finished)[0].result() - - if isinstance(return_result, BaseException): - raise return_result - else: - return return_result - - -def _get_loop() -> asyncio.AbstractEventLoop: - """Create or return the default fsspec IO loop - - The loop will be running on a separate thread. - """ - if loop[0] is None: - with _get_lock(): - # repeat the check just in case the loop got filled between the - # previous two calls from another thread - if loop[0] is None: - new_loop = asyncio.new_event_loop() - loop[0] = new_loop - th = threading.Thread(target=new_loop.run_forever, name="zarrIO") - th.daemon = True - th.start() - iothread[0] = th - assert loop[0] is not None - return loop[0] - - -class SyncMixin: - _sync_configuration: SyncConfiguration - - def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: - # TODO: refactor this to to take *args and **kwargs and pass those to the method - # this should allow us to better type the sync wrapper - return sync( - coroutine, - loop=self._sync_configuration.asyncio_loop, - timeout=self._sync_configuration.timeout, - ) - - def _sync_iter(self, async_iterator: AsyncIterator[T]) -> list[T]: - async def iter_to_list() -> list[T]: - return [item async for item in async_iterator] - - return self._sync(iter_to_list()) From 5bb3375b385b2612ca3899b2d4d3c2e90fe4e510 Mon Sep 17 00:00:00 2001 From: Max Jones <14077947+maxrjones@users.noreply.github.com> Date: Fri, 10 May 2024 17:25:47 -0400 Subject: [PATCH 0521/1078] Use donfig for V3 configuration (#1855) * Use donfig for sync configuration * Consolidate concurrency config * Remove unused parameter * finish removing runtime config -- a few todos remain * fix order constructor * add basic tests for config state * add order property to Array * update comment in sharding codec --------- Co-authored-by: Joseph Hamman --- pyproject.toml | 1 + src/zarr/__init__.py | 12 +-- src/zarr/abc/codec.py | 9 --- src/zarr/array.py | 78 ++++++------------ src/zarr/array_v2.py | 25 ++---- src/zarr/codecs/blosc.py | 3 - src/zarr/codecs/bytes.py | 3 - src/zarr/codecs/crc32c_.py | 3 - src/zarr/codecs/gzip.py | 3 - src/zarr/codecs/pipeline.py | 29 ++----- src/zarr/codecs/sharding.py | 34 +++----- src/zarr/codecs/transpose.py | 4 +- src/zarr/codecs/zstd.py | 3 - src/zarr/common.py | 26 +++++- src/zarr/config.py | 48 ++--------- src/zarr/group.py | 34 ++------ src/zarr/metadata.py | 16 ++-- src/zarr/sync.py | 7 +- tests/v3/package_with_entrypoint/__init__.py | 3 - tests/v3/test_codecs.py | 85 ++++++++++---------- tests/v3/test_config.py | 15 ++++ tests/v3/test_group.py | 3 - tests/v3/test_sync.py | 2 - 23 files changed, 158 insertions(+), 288 deletions(-) create mode 100644 tests/v3/test_config.py diff --git a/pyproject.toml b/pyproject.toml index 93888a205c..3f5450845f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,7 @@ dependencies = [ 'crc32c', 'zstandard', 'typing_extensions', + 'donfig' ] dynamic = [ "version", diff --git a/src/zarr/__init__.py b/src/zarr/__init__.py index 9ae9dc54c4..a8aff30b52 100644 --- a/src/zarr/__init__.py +++ b/src/zarr/__init__.py @@ -5,9 +5,8 @@ import zarr.codecs # noqa: F401 from zarr.array import Array, AsyncArray # noqa: F401 from zarr.array_v2 import ArrayV2 -from zarr.config import RuntimeConfiguration # noqa: F401 +from zarr.config import config # noqa: F401 from zarr.group import AsyncGroup, Group # noqa: F401 -from zarr.metadata import runtime_configuration # noqa: F401 from zarr.store import ( # noqa: F401 StoreLike, make_store_path, @@ -21,22 +20,19 @@ async def open_auto_async( store: StoreLike, - runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), ) -> Union[AsyncArray, AsyncGroup]: store_path = make_store_path(store) try: - return await AsyncArray.open(store_path, runtime_configuration=runtime_configuration_) + return await AsyncArray.open(store_path) except KeyError: - return await AsyncGroup.open(store_path, runtime_configuration=runtime_configuration_) + return await AsyncGroup.open(store_path) def open_auto( store: StoreLike, - runtime_configuration_: RuntimeConfiguration = RuntimeConfiguration(), ) -> Union[Array, ArrayV2, Group]: object = _sync( - open_auto_async(store, runtime_configuration_), - runtime_configuration_.asyncio_loop, + open_auto_async(store), ) if isinstance(object, AsyncArray): return Array(object) diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index 1abc21b30b..8897cced89 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -14,7 +14,6 @@ from typing_extensions import Self from zarr.common import BytesLike, SliceSelection from zarr.metadata import ArrayMetadata - from zarr.config import RuntimeConfiguration class Codec(Metadata): @@ -40,7 +39,6 @@ async def decode( self, chunk_array: np.ndarray, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: pass @@ -49,7 +47,6 @@ async def encode( self, chunk_array: np.ndarray, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: pass @@ -60,7 +57,6 @@ async def decode( self, chunk_array: BytesLike, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: pass @@ -69,7 +65,6 @@ async def encode( self, chunk_array: np.ndarray, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: pass @@ -81,7 +76,6 @@ async def decode_partial( store_path: StorePath, selection: SliceSelection, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: pass @@ -94,7 +88,6 @@ async def encode_partial( chunk_array: np.ndarray, selection: SliceSelection, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> None: pass @@ -105,7 +98,6 @@ async def decode( self, chunk_array: BytesLike, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> BytesLike: pass @@ -114,6 +106,5 @@ async def encode( self, chunk_array: BytesLike, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: pass diff --git a/src/zarr/array.py b/src/zarr/array.py index 18e26b64dd..9f4ec911b4 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -6,7 +6,6 @@ # Questions to consider: # 1. Was splitting the array into two classes really necessary? -# 2. Do we really need runtime_configuration? Specifically, the asyncio_loop seems problematic from __future__ import annotations @@ -29,12 +28,12 @@ SliceSelection, concurrent_map, ) -from zarr.config import RuntimeConfiguration +from zarr.config import config from zarr.indexing import BasicIndexer, all_chunk_coords, is_total_slice from zarr.chunk_grids import RegularChunkGrid from zarr.chunk_key_encodings import DefaultChunkKeyEncoding, V2ChunkKeyEncoding -from zarr.metadata import ArrayMetadata +from zarr.metadata import ArrayMetadata, parse_indexing_order from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import sync @@ -52,7 +51,7 @@ def parse_array_metadata(data: Any) -> ArrayMetadata: class AsyncArray: metadata: ArrayMetadata store_path: StorePath - runtime_configuration: RuntimeConfiguration + order: Literal["C", "F"] @property def codecs(self): @@ -62,13 +61,14 @@ def __init__( self, metadata: ArrayMetadata, store_path: StorePath, - runtime_configuration: RuntimeConfiguration, + order: Literal["C", "F"] | None = None, ): metadata_parsed = parse_array_metadata(metadata) + order_parsed = parse_indexing_order(order or config.get("array.order")) object.__setattr__(self, "metadata", metadata_parsed) object.__setattr__(self, "store_path", store_path) - object.__setattr__(self, "runtime_configuration", runtime_configuration) + object.__setattr__(self, "order", order_parsed) @classmethod async def create( @@ -86,7 +86,6 @@ async def create( codecs: Optional[Iterable[Union[Codec, Dict[str, Any]]]] = None, dimension_names: Optional[Iterable[str]] = None, attributes: Optional[Dict[str, Any]] = None, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), exists_ok: bool = False, ) -> AsyncArray: store_path = make_store_path(store) @@ -115,12 +114,10 @@ async def create( dimension_names=tuple(dimension_names) if dimension_names else None, attributes=attributes or {}, ) - runtime_configuration = runtime_configuration or RuntimeConfiguration() array = cls( metadata=metadata, store_path=store_path, - runtime_configuration=runtime_configuration, ) await array._save_metadata() @@ -131,19 +128,15 @@ def from_dict( cls, store_path: StorePath, data: Dict[str, Any], - runtime_configuration: RuntimeConfiguration, ) -> AsyncArray: metadata = ArrayMetadata.from_dict(data) - async_array = cls( - metadata=metadata, store_path=store_path, runtime_configuration=runtime_configuration - ) + async_array = cls(metadata=metadata, store_path=store_path) return async_array @classmethod async def open( cls, store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> AsyncArray: store_path = make_store_path(store) zarr_json_bytes = await (store_path / ZARR_JSON).get() @@ -151,14 +144,12 @@ async def open( return cls.from_dict( store_path, json.loads(zarr_json_bytes), - runtime_configuration=runtime_configuration, ) @classmethod async def open_auto( cls, store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> AsyncArray: # TODO: Union[AsyncArray, ArrayV2] store_path = make_store_path(store) v3_metadata_bytes = await (store_path / ZARR_JSON).get() @@ -166,7 +157,6 @@ async def open_auto( return cls.from_dict( store_path, json.loads(v3_metadata_bytes), - runtime_configuration=runtime_configuration or RuntimeConfiguration(), ) else: raise ValueError("no v2 support yet") @@ -204,7 +194,7 @@ async def getitem(self, selection: Selection) -> np.ndarray: out = np.zeros( indexer.shape, dtype=self.metadata.dtype, - order=self.runtime_configuration.order, + order=self.order, ) # reading chunks and decoding them @@ -214,7 +204,7 @@ async def getitem(self, selection: Selection) -> np.ndarray: for chunk_coords, chunk_selection, out_selection in indexer ], self._read_chunk, - self.runtime_configuration.concurrency, + config.get("async.concurrency"), ) if out.shape: @@ -232,15 +222,13 @@ async def _read_chunk( out_selection: SliceSelection, out: np.ndarray, ) -> None: - chunk_spec = self.metadata.get_chunk_spec(chunk_coords) + chunk_spec = self.metadata.get_chunk_spec(chunk_coords, self.order) chunk_key_encoding = self.metadata.chunk_key_encoding chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) store_path = self.store_path / chunk_key if self.codecs.supports_partial_decode: - chunk_array = await self.codecs.decode_partial( - store_path, chunk_selection, chunk_spec, self.runtime_configuration - ) + chunk_array = await self.codecs.decode_partial(store_path, chunk_selection, chunk_spec) if chunk_array is not None: out[out_selection] = chunk_array else: @@ -248,9 +236,7 @@ async def _read_chunk( else: chunk_bytes = await store_path.get() if chunk_bytes is not None: - chunk_array = await self.codecs.decode( - chunk_bytes, chunk_spec, self.runtime_configuration - ) + chunk_array = await self.codecs.decode(chunk_bytes, chunk_spec) tmp = chunk_array[chunk_selection] out[out_selection] = tmp else: @@ -291,7 +277,7 @@ async def setitem(self, selection: Selection, value: np.ndarray) -> None: for chunk_coords, chunk_selection, out_selection in indexer ], self._write_chunk, - self.runtime_configuration.concurrency, + config.get("async.concurrency"), ) async def _write_chunk( @@ -302,7 +288,7 @@ async def _write_chunk( chunk_selection: SliceSelection, out_selection: SliceSelection, ) -> None: - chunk_spec = self.metadata.get_chunk_spec(chunk_coords) + chunk_spec = self.metadata.get_chunk_spec(chunk_coords, self.order) chunk_key_encoding = self.metadata.chunk_key_encoding chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) store_path = self.store_path / chunk_key @@ -326,7 +312,6 @@ async def _write_chunk( value[out_selection], chunk_selection, chunk_spec, - self.runtime_configuration, ) else: # writing partial chunks @@ -342,7 +327,7 @@ async def _write_chunk( chunk_array.fill(self.metadata.fill_value) else: chunk_array = ( - await self.codecs.decode(chunk_bytes, chunk_spec, self.runtime_configuration) + await self.codecs.decode(chunk_bytes, chunk_spec) ).copy() # make a writable copy chunk_array[chunk_selection] = value[out_selection] @@ -355,9 +340,7 @@ async def _write_chunk_to_store( # chunks that only contain fill_value will be removed await store_path.delete() else: - chunk_bytes = await self.codecs.encode( - chunk_array, chunk_spec, self.runtime_configuration - ) + chunk_bytes = await self.codecs.encode(chunk_array, chunk_spec) if chunk_bytes is None: await store_path.delete() else: @@ -388,7 +371,7 @@ async def _delete_key(key: str) -> None: for chunk_coords in old_chunk_coords.difference(new_chunk_coords) ], _delete_key, - self.runtime_configuration.concurrency, + config.get("async.concurrency"), ) # Write new metadata @@ -429,7 +412,6 @@ def create( codecs: Optional[Iterable[Union[Codec, Dict[str, Any]]]] = None, dimension_names: Optional[Iterable[str]] = None, attributes: Optional[Dict[str, Any]] = None, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), exists_ok: bool = False, ) -> Array: async_array = sync( @@ -443,10 +425,8 @@ def create( codecs=codecs, dimension_names=dimension_names, attributes=attributes, - runtime_configuration=runtime_configuration, exists_ok=exists_ok, ), - runtime_configuration.asyncio_loop, ) return cls(async_array) @@ -455,34 +435,25 @@ def from_dict( cls, store_path: StorePath, data: Dict[str, Any], - runtime_configuration: RuntimeConfiguration, ) -> Array: - async_array = AsyncArray.from_dict( - store_path=store_path, data=data, runtime_configuration=runtime_configuration - ) + async_array = AsyncArray.from_dict(store_path=store_path, data=data) return cls(async_array) @classmethod def open( cls, store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> Array: - async_array = sync( - AsyncArray.open(store, runtime_configuration=runtime_configuration), - runtime_configuration.asyncio_loop, - ) + async_array = sync(AsyncArray.open(store)) return cls(async_array) @classmethod def open_auto( cls, store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> Array: # TODO: Union[Array, ArrayV2]: async_array = sync( - AsyncArray.open_auto(store, runtime_configuration), - runtime_configuration.asyncio_loop, + AsyncArray.open_auto(store), ) return cls(async_array) @@ -514,23 +485,24 @@ def metadata(self) -> ArrayMetadata: def store_path(self) -> StorePath: return self._async_array.store_path + @property + def order(self) -> Literal["C", "F"]: + return self._async_array.order + def __getitem__(self, selection: Selection) -> np.ndarray: return sync( self._async_array.getitem(selection), - self._async_array.runtime_configuration.asyncio_loop, ) def __setitem__(self, selection: Selection, value: np.ndarray) -> None: sync( self._async_array.setitem(selection, value), - self._async_array.runtime_configuration.asyncio_loop, ) def resize(self, new_shape: ChunkCoords) -> Array: return type(self)( sync( self._async_array.resize(new_shape), - self._async_array.runtime_configuration.asyncio_loop, ) ) @@ -538,7 +510,6 @@ def update_attributes(self, new_attributes: Dict[str, Any]) -> Array: return type(self)( sync( self._async_array.update_attributes(new_attributes), - self._async_array.runtime_configuration.asyncio_loop, ) ) @@ -548,5 +519,4 @@ def __repr__(self): def info(self): return sync( self._async_array.info(), - self._async_array.runtime_configuration.asyncio_loop, ) diff --git a/src/zarr/array_v2.py b/src/zarr/array_v2.py index 8c2cd3faec..18251e7db7 100644 --- a/src/zarr/array_v2.py +++ b/src/zarr/array_v2.py @@ -20,7 +20,6 @@ concurrent_map, to_thread, ) -from zarr.config import RuntimeConfiguration from zarr.indexing import BasicIndexer, all_chunk_coords, is_total_slice from zarr.metadata import ArrayV2Metadata from zarr.store import StoreLike, StorePath, make_store_path @@ -55,7 +54,6 @@ class ArrayV2: metadata: ArrayV2Metadata attributes: Optional[Dict[str, Any]] store_path: StorePath - runtime_configuration: RuntimeConfiguration @classmethod async def create_async( @@ -72,7 +70,6 @@ async def create_async( compressor: Optional[Dict[str, Any]] = None, attributes: Optional[Dict[str, Any]] = None, exists_ok: bool = False, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> ArrayV2: store_path = make_store_path(store) if not exists_ok: @@ -98,7 +95,6 @@ async def create_async( metadata=metadata, store_path=store_path, attributes=attributes, - runtime_configuration=runtime_configuration, ) await array._save_metadata() return array @@ -118,7 +114,6 @@ def create( compressor: Optional[Dict[str, Any]] = None, attributes: Optional[Dict[str, Any]] = None, exists_ok: bool = False, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> ArrayV2: return sync( cls.create_async( @@ -133,16 +128,13 @@ def create( filters=filters, attributes=attributes, exists_ok=exists_ok, - runtime_configuration=runtime_configuration, ), - runtime_configuration.asyncio_loop, ) @classmethod async def open_async( cls, store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> ArrayV2: store_path = make_store_path(store) zarray_bytes, zattrs_bytes = await asyncio.gather( @@ -154,18 +146,15 @@ async def open_async( store_path, zarray_json=json.loads(zarray_bytes), zattrs_json=json.loads(zattrs_bytes) if zattrs_bytes is not None else None, - runtime_configuration=runtime_configuration, ) @classmethod def open( cls, store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> ArrayV2: return sync( - cls.open_async(store, runtime_configuration), - runtime_configuration.asyncio_loop, + cls.open_async(store), ) @classmethod @@ -174,14 +163,12 @@ def from_dict( store_path: StorePath, zarray_json: Any, zattrs_json: Optional[Any], - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> ArrayV2: metadata = ArrayV2Metadata.from_dict(zarray_json) out = cls( store_path=store_path, metadata=metadata, attributes=zattrs_json, - runtime_configuration=runtime_configuration, ) out._validate_metadata() return out @@ -219,7 +206,7 @@ def async_(self) -> _AsyncArrayProxy: return _AsyncArrayProxy(self) def __getitem__(self, selection: Selection): - return sync(self.get_async(selection), self.runtime_configuration.asyncio_loop) + return sync(self.get_async(selection)) async def get_async(self, selection: Selection): indexer = BasicIndexer( @@ -295,7 +282,7 @@ async def _decode_chunk(self, chunk_bytes: Optional[BytesLike]) -> Optional[np.n return chunk_array def __setitem__(self, selection: Selection, value: np.ndarray) -> None: - sync(self.set_async(selection, value), self.runtime_configuration.asyncio_loop) + sync(self.set_async(selection, value)) async def set_async(self, selection: Selection, value: np.ndarray) -> None: chunk_shape = self.metadata.chunks @@ -436,7 +423,7 @@ async def _delete_key(key: str) -> None: return replace(self, metadata=new_metadata) def resize(self, new_shape: ChunkCoords) -> ArrayV2: - return sync(self.resize_async(new_shape), self.runtime_configuration.asyncio_loop) + return sync(self.resize_async(new_shape)) async def convert_to_v3_async(self) -> Array: from sys import byteorder as sys_byteorder @@ -511,7 +498,6 @@ async def convert_to_v3_async(self) -> Array: return Array.from_dict( store_path=self.store_path, data=json.loads(new_metadata_bytes), - runtime_configuration=self.runtime_configuration, ) async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> ArrayV2: @@ -521,11 +507,10 @@ async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> Array def update_attributes(self, new_attributes: Dict[str, Any]) -> ArrayV2: return sync( self.update_attributes_async(new_attributes), - self.runtime_configuration.asyncio_loop, ) def convert_to_v3(self) -> Array: - return sync(self.convert_to_v3_async(), loop=self.runtime_configuration.asyncio_loop) + return sync(self.convert_to_v3_async()) def __repr__(self): return f"" diff --git a/src/zarr/codecs/blosc.py b/src/zarr/codecs/blosc.py index 374375e6c2..5ee2b7640d 100644 --- a/src/zarr/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -17,7 +17,6 @@ from typing import Dict, Optional from typing_extensions import Self from zarr.common import JSON, ArraySpec, BytesLike - from zarr.config import RuntimeConfiguration class BloscShuffle(Enum): @@ -163,7 +162,6 @@ async def decode( self, chunk_bytes: bytes, _chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: return await to_thread(self._blosc_codec.decode, chunk_bytes) @@ -171,7 +169,6 @@ async def encode( self, chunk_bytes: bytes, chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: chunk_array = np.frombuffer(chunk_bytes, dtype=chunk_spec.dtype) return await to_thread(self._blosc_codec.encode, chunk_array) diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index aa24c3167e..566b3a8df9 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -13,7 +13,6 @@ if TYPE_CHECKING: from zarr.common import JSON, ArraySpec, BytesLike - from zarr.config import RuntimeConfiguration from typing_extensions import Self @@ -72,7 +71,6 @@ async def decode( self, chunk_bytes: BytesLike, chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: if chunk_spec.dtype.itemsize > 0: if self.endian == Endian.little: @@ -95,7 +93,6 @@ async def encode( self, chunk_array: np.ndarray, _chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: if chunk_array.dtype.itemsize > 1: byteorder = self._get_byteorder(chunk_array) diff --git a/src/zarr/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py index 04d5b88d70..dd61b3425e 100644 --- a/src/zarr/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -15,7 +15,6 @@ from typing import Dict, Optional from typing_extensions import Self from zarr.common import JSON, BytesLike, ArraySpec - from zarr.config import RuntimeConfiguration @dataclass(frozen=True) @@ -34,7 +33,6 @@ async def decode( self, chunk_bytes: bytes, _chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: crc32_bytes = chunk_bytes[-4:] inner_bytes = chunk_bytes[:-4] @@ -52,7 +50,6 @@ async def encode( self, chunk_bytes: bytes, _chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: return chunk_bytes + np.uint32(crc32c(chunk_bytes)).tobytes() diff --git a/src/zarr/codecs/gzip.py b/src/zarr/codecs/gzip.py index f75f5b743e..71dcaa6bb5 100644 --- a/src/zarr/codecs/gzip.py +++ b/src/zarr/codecs/gzip.py @@ -12,7 +12,6 @@ from typing import Optional, Dict from typing_extensions import Self from zarr.common import JSON, ArraySpec, BytesLike - from zarr.config import RuntimeConfiguration def parse_gzip_level(data: JSON) -> int: @@ -48,7 +47,6 @@ async def decode( self, chunk_bytes: bytes, _chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: return await to_thread(GZip(self.level).decode, chunk_bytes) @@ -56,7 +54,6 @@ async def encode( self, chunk_bytes: bytes, _chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: return await to_thread(GZip(self.level).encode, chunk_bytes) diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index 4908ee8057..c2338f717d 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -21,7 +21,6 @@ from typing import Iterator, List, Optional, Tuple, Union from zarr.store import StorePath from zarr.metadata import ArrayMetadata - from zarr.config import RuntimeConfiguration from zarr.common import JSON, ArraySpec, BytesLike, SliceSelection @@ -151,7 +150,6 @@ async def decode( self, chunk_bytes: BytesLike, array_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: ( aa_codecs_with_spec, @@ -160,13 +158,13 @@ async def decode( ) = self._codecs_with_resolved_metadata(array_spec) for bb_codec, array_spec in bb_codecs_with_spec[::-1]: - chunk_bytes = await bb_codec.decode(chunk_bytes, array_spec, runtime_configuration) + chunk_bytes = await bb_codec.decode(chunk_bytes, array_spec) ab_codec, array_spec = ab_codec_with_spec - chunk_array = await ab_codec.decode(chunk_bytes, array_spec, runtime_configuration) + chunk_array = await ab_codec.decode(chunk_bytes, array_spec) for aa_codec, array_spec in aa_codecs_with_spec[::-1]: - chunk_array = await aa_codec.decode(chunk_array, array_spec, runtime_configuration) + chunk_array = await aa_codec.decode(chunk_array, array_spec) return chunk_array @@ -175,19 +173,15 @@ async def decode_partial( store_path: StorePath, selection: SliceSelection, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: assert self.supports_partial_decode assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin) - return await self.array_bytes_codec.decode_partial( - store_path, selection, chunk_spec, runtime_configuration - ) + return await self.array_bytes_codec.decode_partial(store_path, selection, chunk_spec) async def encode( self, chunk_array: np.ndarray, array_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: ( aa_codecs_with_spec, @@ -196,23 +190,19 @@ async def encode( ) = self._codecs_with_resolved_metadata(array_spec) for aa_codec, array_spec in aa_codecs_with_spec: - chunk_array_maybe = await aa_codec.encode( - chunk_array, array_spec, runtime_configuration - ) + chunk_array_maybe = await aa_codec.encode(chunk_array, array_spec) if chunk_array_maybe is None: return None chunk_array = chunk_array_maybe ab_codec, array_spec = ab_codec_with_spec - chunk_bytes_maybe = await ab_codec.encode(chunk_array, array_spec, runtime_configuration) + chunk_bytes_maybe = await ab_codec.encode(chunk_array, array_spec) if chunk_bytes_maybe is None: return None chunk_bytes = chunk_bytes_maybe for bb_codec, array_spec in bb_codecs_with_spec: - chunk_bytes_maybe = await bb_codec.encode( - chunk_bytes, array_spec, runtime_configuration - ) + chunk_bytes_maybe = await bb_codec.encode(chunk_bytes, array_spec) if chunk_bytes_maybe is None: return None chunk_bytes = chunk_bytes_maybe @@ -225,13 +215,10 @@ async def encode_partial( chunk_array: np.ndarray, selection: SliceSelection, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> None: assert self.supports_partial_encode assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin) - await self.array_bytes_codec.encode_partial( - store_path, chunk_array, selection, chunk_spec, runtime_configuration - ) + await self.array_bytes_codec.encode_partial(store_path, chunk_array, selection, chunk_spec) def compute_encoded_size(self, byte_length: int, array_spec: ArraySpec) -> int: for codec in self: diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index d4f8b7dfc9..e94074e63e 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -25,6 +25,7 @@ parse_shapelike, product, ) +from zarr.config import config from zarr.chunk_grids import RegularChunkGrid from zarr.indexing import ( BasicIndexer, @@ -34,7 +35,6 @@ ) from zarr.metadata import ( ArrayMetadata, - runtime_configuration as make_runtime_configuration, parse_codecs, ) @@ -49,7 +49,6 @@ BytesLike, SliceSelection, ) - from zarr.config import RuntimeConfiguration MAX_UINT_64 = 2**64 - 1 @@ -302,7 +301,6 @@ async def decode( self, shard_bytes: BytesLike, shard_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: # print("decode") shard_shape = shard_spec.shape @@ -319,7 +317,7 @@ async def decode( out = np.zeros( shard_shape, dtype=shard_spec.dtype, - order=runtime_configuration.order, + order=shard_spec.order, ) shard_dict = await _ShardProxy.from_bytes(shard_bytes, self, chunks_per_shard) @@ -336,13 +334,12 @@ async def decode( chunk_selection, out_selection, shard_spec, - runtime_configuration, out, ) for chunk_coords, chunk_selection, out_selection in indexer ], self._read_chunk, - runtime_configuration.concurrency, + config.get("async.concurrency"), ) return out @@ -352,7 +349,6 @@ async def decode_partial( store_path: StorePath, selection: SliceSelection, shard_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: shard_shape = shard_spec.shape chunk_shape = self.chunk_shape @@ -368,7 +364,7 @@ async def decode_partial( out = np.zeros( indexer.shape, dtype=shard_spec.dtype, - order=runtime_configuration.order, + order=shard_spec.order, ) indexed_chunks = list(indexer) @@ -404,13 +400,12 @@ async def decode_partial( chunk_selection, out_selection, shard_spec, - runtime_configuration, out, ) for chunk_coords, chunk_selection, out_selection in indexed_chunks ], self._read_chunk, - runtime_configuration.concurrency, + config.get("async.concurrency"), ) return out @@ -422,13 +417,12 @@ async def _read_chunk( chunk_selection: SliceSelection, out_selection: SliceSelection, shard_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, out: np.ndarray, ) -> None: chunk_spec = self._get_chunk_spec(shard_spec) chunk_bytes = shard_dict.get(chunk_coords, None) if chunk_bytes is not None: - chunk_array = await self.codecs.decode(chunk_bytes, chunk_spec, runtime_configuration) + chunk_array = await self.codecs.decode(chunk_bytes, chunk_spec) tmp = chunk_array[chunk_selection] out[out_selection] = tmp else: @@ -438,7 +432,6 @@ async def encode( self, shard_array: np.ndarray, shard_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: shard_shape = shard_spec.shape chunk_shape = self.chunk_shape @@ -472,7 +465,7 @@ async def _write_chunk( chunk_spec = self._get_chunk_spec(shard_spec) return ( chunk_coords, - await self.codecs.encode(chunk_array, chunk_spec, runtime_configuration), + await self.codecs.encode(chunk_array, chunk_spec), ) return (chunk_coords, None) @@ -483,7 +476,7 @@ async def _write_chunk( for chunk_coords, chunk_selection, out_selection in indexer ], _write_chunk, - runtime_configuration.concurrency, + config.get("async.concurrency"), ) if len(encoded_chunks) == 0: return None @@ -501,7 +494,6 @@ async def encode_partial( shard_array: np.ndarray, selection: SliceSelection, shard_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> None: # print("encode_partial") shard_shape = shard_spec.shape @@ -545,14 +537,14 @@ async def _write_chunk( chunk_array.fill(shard_spec.fill_value) else: chunk_array = ( - await self.codecs.decode(chunk_bytes, chunk_spec, runtime_configuration) + await self.codecs.decode(chunk_bytes, chunk_spec) ).copy() # make a writable copy chunk_array[chunk_selection] = shard_array[out_selection] if not np.array_equiv(chunk_array, shard_spec.fill_value): return ( chunk_coords, - await self.codecs.encode(chunk_array, chunk_spec, runtime_configuration), + await self.codecs.encode(chunk_array, chunk_spec), ) else: return (chunk_coords, None) @@ -567,7 +559,7 @@ async def _write_chunk( for chunk_coords, chunk_selection, out_selection in indexer ], _write_chunk, - runtime_configuration.concurrency, + config.get("async.concurrency"), ) for chunk_coords, chunk_bytes in encoded_chunks: @@ -607,7 +599,6 @@ async def _decode_shard_index( await self.index_codecs.decode( index_bytes, self._get_index_chunk_spec(chunks_per_shard), - make_runtime_configuration("C"), ) ) @@ -615,7 +606,6 @@ async def _encode_shard_index(self, index: _ShardIndex) -> BytesLike: index_bytes = await self.index_codecs.encode( index.offsets_and_lengths, self._get_index_chunk_spec(index.chunks_per_shard), - make_runtime_configuration("C"), ) assert index_bytes is not None return index_bytes @@ -631,6 +621,7 @@ def _get_index_chunk_spec(self, chunks_per_shard: ChunkCoords) -> ArraySpec: shape=chunks_per_shard + (2,), dtype=np.dtype(" ArraySpec: shape=self.chunk_shape, dtype=shard_spec.dtype, fill_value=shard_spec.fill_value, + order=shard_spec.order, ) @lru_cache diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index c63327f6fc..a13708955c 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -6,7 +6,6 @@ from zarr.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration if TYPE_CHECKING: - from zarr.config import RuntimeConfiguration from typing import TYPE_CHECKING, Optional, Tuple from typing_extensions import Self @@ -71,13 +70,13 @@ def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: shape=tuple(chunk_spec.shape[self.order[i]] for i in range(chunk_spec.ndim)), dtype=chunk_spec.dtype, fill_value=chunk_spec.fill_value, + order=chunk_spec.order, ) async def decode( self, chunk_array: np.ndarray, chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> np.ndarray: inverse_order = [0] * chunk_spec.ndim for x, i in enumerate(self.order): @@ -89,7 +88,6 @@ async def encode( self, chunk_array: np.ndarray, chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> Optional[np.ndarray]: chunk_array = chunk_array.transpose(self.order) return chunk_array diff --git a/src/zarr/codecs/zstd.py b/src/zarr/codecs/zstd.py index 41db850ab6..ad10a7fdb8 100644 --- a/src/zarr/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -12,7 +12,6 @@ if TYPE_CHECKING: from typing import Dict, Optional from typing_extensions import Self - from zarr.config import RuntimeConfiguration from zarr.common import BytesLike, JSON, ArraySpec @@ -64,7 +63,6 @@ async def decode( self, chunk_bytes: bytes, _chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> BytesLike: return await to_thread(self._decompress, chunk_bytes) @@ -72,7 +70,6 @@ async def encode( self, chunk_bytes: bytes, _chunk_spec: ArraySpec, - _runtime_configuration: RuntimeConfiguration, ) -> Optional[BytesLike]: return await to_thread(self._compress, chunk_bytes) diff --git a/src/zarr/common.py b/src/zarr/common.py index 7d8431f97e..7ef2fc9a61 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -1,5 +1,16 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Union, Tuple, Iterable, Dict, List, TypeVar, overload, Any +from typing import ( + TYPE_CHECKING, + Literal, + Union, + Tuple, + Iterable, + Dict, + List, + TypeVar, + overload, + Any, +) import asyncio import contextvars from dataclasses import dataclass @@ -78,15 +89,20 @@ class ArraySpec: shape: ChunkCoords dtype: np.dtype[Any] fill_value: Any + order: Literal["C", "F"] - def __init__(self, shape: ChunkCoords, dtype: np.dtype[Any], fill_value: Any) -> None: + def __init__( + self, shape: ChunkCoords, dtype: np.dtype[Any], fill_value: Any, order: Literal["C", "F"] + ) -> None: shape_parsed = parse_shapelike(shape) dtype_parsed = parse_dtype(dtype) fill_value_parsed = parse_fill_value(fill_value) + order_parsed = parse_order(order) object.__setattr__(self, "shape", shape_parsed) object.__setattr__(self, "dtype", dtype_parsed) object.__setattr__(self, "fill_value", fill_value_parsed) + object.__setattr__(self, "order", order_parsed) @property def ndim(self) -> int: @@ -159,3 +175,9 @@ def parse_dtype(data: Any) -> np.dtype[Any]: def parse_fill_value(data: Any) -> Any: # todo: real validation return data + + +def parse_order(data: Any) -> Literal["C", "F"]: + if data in ("C", "F"): + return data + raise ValueError(f"Expected one of ('C', 'F'), got {data} instead.") diff --git a/src/zarr/config.py b/src/zarr/config.py index cd4d82597b..e546cb1c23 100644 --- a/src/zarr/config.py +++ b/src/zarr/config.py @@ -1,15 +1,13 @@ from __future__ import annotations -from asyncio import AbstractEventLoop -from dataclasses import dataclass -from typing import Any, Literal, Optional +from typing import Any, Literal +from donfig import Config -@dataclass(frozen=True) -class SyncConfiguration: - concurrency: Optional[int] = None - asyncio_loop: Optional[AbstractEventLoop] = None - timeout: float | None = None +config = Config( + "zarr", + defaults=[{"array": {"order": "C"}, "async": {"concurrency": None, "timeout": None}}], +) def parse_indexing_order(data: Any) -> Literal["C", "F"]: @@ -17,37 +15,3 @@ def parse_indexing_order(data: Any) -> Literal["C", "F"]: return data msg = f"Expected one of ('C', 'F'), got {data} instead." raise ValueError(msg) - - -# todo: handle negative values? -def parse_concurrency(data: Any) -> int | None: - if data is None or isinstance(data, int): - return data - raise TypeError(f"Expected int or None, got {type(data)}") - - -def parse_asyncio_loop(data: Any) -> AbstractEventLoop | None: - if data is None or isinstance(data, AbstractEventLoop): - return data - raise TypeError(f"Expected AbstractEventLoop or None, got {type(data)}") - - -@dataclass(frozen=True) -class RuntimeConfiguration: - order: Literal["C", "F"] = "C" - concurrency: Optional[int] = None - asyncio_loop: Optional[AbstractEventLoop] = None - - def __init__( - self, - order: Literal["C", "F"] = "C", - concurrency: Optional[int] = None, - asyncio_loop: Optional[AbstractEventLoop] = None, - ): - order_parsed = parse_indexing_order(order) - concurrency_parsed = parse_concurrency(concurrency) - asyncio_loop_parsed = parse_asyncio_loop(asyncio_loop) - - object.__setattr__(self, "order", order_parsed) - object.__setattr__(self, "concurrency", concurrency_parsed) - object.__setattr__(self, "asyncio_loop_parsed", asyncio_loop_parsed) diff --git a/src/zarr/group.py b/src/zarr/group.py index 4da059c814..c71860b1b6 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -18,7 +18,6 @@ from zarr.array import AsyncArray, Array from zarr.attributes import Attributes from zarr.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON -from zarr.config import RuntimeConfiguration, SyncConfiguration from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import SyncMixin, sync @@ -78,7 +77,6 @@ def to_dict(self) -> dict[str, Any]: class AsyncGroup: metadata: GroupMetadata store_path: StorePath - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration() @classmethod async def create( @@ -88,7 +86,6 @@ async def create( attributes: dict[str, Any] = {}, exists_ok: bool = False, zarr_format: Literal[2, 3] = 3, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> AsyncGroup: store_path = make_store_path(store) if not exists_ok: @@ -99,7 +96,6 @@ async def create( group = cls( metadata=GroupMetadata(attributes=attributes, zarr_format=zarr_format), store_path=store_path, - runtime_configuration=runtime_configuration, ) await group._save_metadata() return group @@ -108,7 +104,6 @@ async def create( async def open( cls, store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), zarr_format: Literal[2, 3, None] = 3, ) -> AsyncGroup: store_path = make_store_path(store) @@ -154,19 +149,17 @@ async def open( assert zarr_json_bytes is not None group_metadata = json.loads(zarr_json_bytes) - return cls.from_dict(store_path, group_metadata, runtime_configuration) + return cls.from_dict(store_path, group_metadata) @classmethod def from_dict( cls, store_path: StorePath, data: dict[str, Any], - runtime_configuration: RuntimeConfiguration, ) -> AsyncGroup: group = cls( metadata=GroupMetadata.from_dict(data), store_path=store_path, - runtime_configuration=runtime_configuration, ) return group @@ -193,11 +186,9 @@ async def getitem( else: zarr_json = json.loads(zarr_json_bytes) if zarr_json["node_type"] == "group": - return type(self).from_dict(store_path, zarr_json, self.runtime_configuration) + return type(self).from_dict(store_path, zarr_json) elif zarr_json["node_type"] == "array": - return AsyncArray.from_dict( - store_path, zarr_json, runtime_configuration=self.runtime_configuration - ) + return AsyncArray.from_dict(store_path, zarr_json) else: raise ValueError(f"unexpected node_type: {zarr_json['node_type']}") elif self.metadata.zarr_format == 2: @@ -220,9 +211,7 @@ async def getitem( if zarray is not None: # TODO: update this once the V2 array support is part of the primary array class zarr_json = {**zarray, "attributes": zattrs} - return AsyncArray.from_dict( - store_path, zarray, runtime_configuration=self.runtime_configuration - ) + return AsyncArray.from_dict(store_path, zarray) else: zgroup = ( json.loads(zgroup_bytes) @@ -230,7 +219,7 @@ async def getitem( else {"zarr_format": self.metadata.zarr_format} ) zarr_json = {**zgroup, "attributes": zattrs} - return type(self).from_dict(store_path, zarr_json, self.runtime_configuration) + return type(self).from_dict(store_path, zarr_json) else: raise ValueError(f"unexpected zarr_format: {self.metadata.zarr_format}") @@ -260,18 +249,14 @@ def info(self): return self.metadata.info async def create_group(self, path: str, **kwargs) -> AsyncGroup: - runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) return await type(self).create( self.store_path / path, - runtime_configuration=runtime_configuration, **kwargs, ) async def create_array(self, path: str, **kwargs) -> AsyncArray: - runtime_configuration = kwargs.pop("runtime_configuration", self.runtime_configuration) return await AsyncArray.create( self.store_path / path, - runtime_configuration=runtime_configuration, **kwargs, ) @@ -402,7 +387,6 @@ async def move(self, source: str, dest: str) -> None: @dataclass(frozen=True) class Group(SyncMixin): _async_group: AsyncGroup - _sync_configuration: SyncConfiguration = field(init=True, default=SyncConfiguration()) @classmethod def create( @@ -411,16 +395,13 @@ def create( *, attributes: dict[str, Any] = {}, exists_ok: bool = False, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> Group: obj = sync( AsyncGroup.create( store, attributes=attributes, exists_ok=exists_ok, - runtime_configuration=runtime_configuration, ), - loop=runtime_configuration.asyncio_loop, ) return cls(obj) @@ -429,11 +410,8 @@ def create( def open( cls, store: StoreLike, - runtime_configuration: RuntimeConfiguration = RuntimeConfiguration(), ) -> Group: - obj = sync( - AsyncGroup.open(store, runtime_configuration), loop=runtime_configuration.asyncio_loop - ) + obj = sync(AsyncGroup.open(store)) return cls(obj) def __getitem__(self, path: str) -> Array | Group: diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 8eba9a0b5a..3903bacd42 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -26,13 +26,7 @@ parse_fill_value, parse_shapelike, ) -from zarr.config import RuntimeConfiguration, parse_indexing_order - - -def runtime_configuration( - order: Literal["C", "F"], concurrency: Optional[int] = None -) -> RuntimeConfiguration: - return RuntimeConfiguration(order=order, concurrency=concurrency) +from zarr.config import parse_indexing_order # For type checking @@ -146,7 +140,10 @@ def __init__( attributes_parsed = parse_attributes(attributes) array_spec = ArraySpec( - shape=shape_parsed, dtype=data_type_parsed, fill_value=fill_value_parsed + shape=shape_parsed, + dtype=data_type_parsed, + fill_value=fill_value_parsed, + order="C", # TODO: order is not needed here. ) codecs_parsed = parse_codecs(codecs).evolve(array_spec) @@ -184,7 +181,7 @@ def dtype(self) -> np.dtype[Any]: def ndim(self) -> int: return len(self.shape) - def get_chunk_spec(self, _chunk_coords: ChunkCoords) -> ArraySpec: + def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) -> ArraySpec: assert isinstance( self.chunk_grid, RegularChunkGrid ), "Currently, only regular chunk grid is supported" @@ -192,6 +189,7 @@ def get_chunk_spec(self, _chunk_coords: ChunkCoords) -> ArraySpec: shape=self.chunk_grid.chunk_shape, dtype=self.dtype, fill_value=self.fill_value, + order=order, ) def to_bytes(self) -> bytes: diff --git a/src/zarr/sync.py b/src/zarr/sync.py index a152030e89..649db0be76 100644 --- a/src/zarr/sync.py +++ b/src/zarr/sync.py @@ -10,7 +10,7 @@ from typing_extensions import ParamSpec -from zarr.config import SyncConfiguration +from zarr.config import config P = ParamSpec("P") T = TypeVar("T") @@ -113,15 +113,12 @@ def _get_loop() -> asyncio.AbstractEventLoop: class SyncMixin: - _sync_configuration: SyncConfiguration - def _sync(self, coroutine: Coroutine[Any, Any, T]) -> T: # TODO: refactor this to to take *args and **kwargs and pass those to the method # this should allow us to better type the sync wrapper return sync( coroutine, - loop=self._sync_configuration.asyncio_loop, - timeout=self._sync_configuration.timeout, + timeout=config.get("async.timeout"), ) def _sync_iter(self, async_iterator: AsyncIterator[T]) -> list[T]: diff --git a/tests/v3/package_with_entrypoint/__init__.py b/tests/v3/package_with_entrypoint/__init__.py index cf7df20457..8b31733069 100644 --- a/tests/v3/package_with_entrypoint/__init__.py +++ b/tests/v3/package_with_entrypoint/__init__.py @@ -1,7 +1,6 @@ from numpy import ndarray from zarr.abc.codec import ArrayBytesCodec from zarr.common import ArraySpec, BytesLike -from zarr.config import RuntimeConfiguration class TestCodec(ArrayBytesCodec): @@ -11,7 +10,6 @@ async def encode( self, chunk_array: ndarray, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> BytesLike | None: pass @@ -19,7 +17,6 @@ async def decode( self, chunk_bytes: BytesLike, chunk_spec: ArraySpec, - runtime_configuration: RuntimeConfiguration, ) -> ndarray: pass diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index ffd225668b..e042c7f275 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -21,9 +21,9 @@ TransposeCodec, ZstdCodec, ) -from zarr.metadata import runtime_configuration from zarr.abc.store import Store +from zarr.config import config from zarr.store import MemoryStore, StorePath @@ -255,25 +255,25 @@ async def test_order( else [TransposeCodec(order=order_from_dim(store_order, data.ndim)), BytesCodec()] ) - a = await AsyncArray.create( - store / "order", - shape=data.shape, - chunk_shape=(32, 8), - dtype=data.dtype, - fill_value=0, - chunk_key_encoding=("v2", "."), - codecs=codecs_, - runtime_configuration=runtime_configuration(runtime_write_order), - ) + with config.set({"array.order": runtime_write_order}): + a = await AsyncArray.create( + store / "order", + shape=data.shape, + chunk_shape=(32, 8), + dtype=data.dtype, + fill_value=0, + chunk_key_encoding=("v2", "."), + codecs=codecs_, + ) await _AsyncArrayProxy(a)[:, :].set(data) read_data = await _AsyncArrayProxy(a)[:, :].get() assert np.array_equal(data, read_data) - a = await AsyncArray.open( - store / "order", - runtime_configuration=runtime_configuration(order=runtime_read_order), - ) + with config.set({"array.order": runtime_read_order}): + a = await AsyncArray.open( + store / "order", + ) read_data = await _AsyncArrayProxy(a)[:, :].get() assert np.array_equal(data, read_data) @@ -313,22 +313,22 @@ def test_order_implicit( codecs_: Optional[List[Codec]] = [ShardingCodec(chunk_shape=(8, 8))] if with_sharding else None - a = Array.create( - store / "order_implicit", - shape=data.shape, - chunk_shape=(16, 16), - dtype=data.dtype, - fill_value=0, - codecs=codecs_, - runtime_configuration=runtime_configuration(runtime_write_order), - ) + with config.set({"array.order": runtime_write_order}): + a = Array.create( + store / "order_implicit", + shape=data.shape, + chunk_shape=(16, 16), + dtype=data.dtype, + fill_value=0, + codecs=codecs_, + ) a[:, :] = data - a = Array.open( - store / "order_implicit", - runtime_configuration=runtime_configuration(order=runtime_read_order), - ) + with config.set({"array.order": runtime_read_order}): + a = Array.open( + store / "order_implicit", + ) read_data = a[:, :] assert np.array_equal(data, read_data) @@ -364,26 +364,25 @@ async def test_transpose( if with_sharding else [TransposeCodec(order=(2, 1, 0)), BytesCodec()] ) - - a = await AsyncArray.create( - store / "transpose", - shape=data.shape, - chunk_shape=(1, 32, 8), - dtype=data.dtype, - fill_value=0, - chunk_key_encoding=("v2", "."), - codecs=codecs_, - runtime_configuration=runtime_configuration(runtime_write_order), - ) + with config.set({"array.order": runtime_write_order}): + a = await AsyncArray.create( + store / "transpose", + shape=data.shape, + chunk_shape=(1, 32, 8), + dtype=data.dtype, + fill_value=0, + chunk_key_encoding=("v2", "."), + codecs=codecs_, + ) await _AsyncArrayProxy(a)[:, :].set(data) read_data = await _AsyncArrayProxy(a)[:, :].get() assert np.array_equal(data, read_data) - a = await AsyncArray.open( - store / "transpose", - runtime_configuration=runtime_configuration(runtime_read_order), - ) + with config.set({"array.order": runtime_read_order}): + a = await AsyncArray.open( + store / "transpose", + ) read_data = await _AsyncArrayProxy(a)[:, :].get() assert np.array_equal(data, read_data) diff --git a/tests/v3/test_config.py b/tests/v3/test_config.py new file mode 100644 index 0000000000..43acdec5fa --- /dev/null +++ b/tests/v3/test_config.py @@ -0,0 +1,15 @@ +from zarr.config import config + + +def test_config_defaults_set(): + # regression test for available defaults + assert config.defaults == [ + {"array": {"order": "C"}, "async": {"concurrency": None, "timeout": None}} + ] + assert config.get("array.order") == "C" + + +def test_config_defaults_can_be_overridden(): + assert config.get("array.order") == "C" + with config.set({"array.order": "F"}): + assert config.get("array.order") == "F" diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index cf5c147c39..11400ef809 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -10,7 +10,6 @@ from zarr.group import AsyncGroup, Group, GroupMetadata from zarr.store import LocalStore, StorePath -from zarr.config import RuntimeConfiguration # todo: put RemoteStore in here @@ -58,7 +57,6 @@ def test_group(store_type, request) -> None: agroup = AsyncGroup( metadata=GroupMetadata(), store_path=store_path, - runtime_configuration=RuntimeConfiguration(), ) group = Group(agroup) assert agroup.metadata is group.metadata @@ -99,7 +97,6 @@ def test_group_sync_constructor(store_path) -> None: group = Group.create( store=store_path, attributes={"title": "test 123"}, - runtime_configuration=RuntimeConfiguration(), ) assert group._async_group.metadata.attributes["title"] == "test 123" diff --git a/tests/v3/test_sync.py b/tests/v3/test_sync.py index 8f644745d2..ba262f521d 100644 --- a/tests/v3/test_sync.py +++ b/tests/v3/test_sync.py @@ -4,7 +4,6 @@ from unittest.mock import patch, AsyncMock from zarr.sync import sync, _get_loop, _get_lock, SyncError, SyncMixin -from zarr.config import SyncConfiguration import pytest @@ -113,7 +112,6 @@ async def bar(self) -> AsyncGenerator: class SyncFoo(SyncMixin): def __init__(self, async_foo: AsyncFoo) -> None: self._async_foo = async_foo - self._sync_configuration = SyncConfiguration(asyncio_loop=sync_loop) def foo(self) -> str: return self._sync(self._async_foo.foo()) From f632771d4845e3ed3db0ea4b6bec64820b0e8577 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Sat, 11 May 2024 06:12:11 +0100 Subject: [PATCH 0522/1078] Disallow any generics in zarr.array (#1861) Co-authored-by: Joe Hamman --- pyproject.toml | 2 -- src/zarr/array.py | 27 ++++++++++++++------------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3f5450845f..f2a8ffef8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -186,8 +186,6 @@ module = [ "zarr.codecs.sharding", "zarr.codecs.transpose", "zarr.array_v2", - "zarr.array", - "zarr.sync", ] disallow_any_generics = false diff --git a/src/zarr/array.py b/src/zarr/array.py index 9f4ec911b4..ce7a937a36 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -15,6 +15,7 @@ from typing import Any, Dict, Iterable, Literal, Optional, Tuple, Union import numpy as np +import numpy.typing as npt from zarr.abc.codec import Codec @@ -76,7 +77,7 @@ async def create( store: StoreLike, *, shape: ChunkCoords, - dtype: Union[str, np.dtype], + dtype: npt.DTypeLike, chunk_shape: ChunkCoords, fill_value: Optional[Any] = None, chunk_key_encoding: Union[ @@ -175,14 +176,14 @@ def size(self) -> int: return np.prod(self.metadata.shape).item() @property - def dtype(self) -> np.dtype: + def dtype(self) -> np.dtype[Any]: return self.metadata.dtype @property - def attrs(self) -> dict: + def attrs(self) -> dict[str, Any]: return self.metadata.attributes - async def getitem(self, selection: Selection) -> np.ndarray: + async def getitem(self, selection: Selection) -> npt.NDArray[Any]: assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) indexer = BasicIndexer( selection, @@ -220,7 +221,7 @@ async def _read_chunk( chunk_coords: ChunkCoords, chunk_selection: SliceSelection, out_selection: SliceSelection, - out: np.ndarray, + out: npt.NDArray[Any], ) -> None: chunk_spec = self.metadata.get_chunk_spec(chunk_coords, self.order) chunk_key_encoding = self.metadata.chunk_key_encoding @@ -242,7 +243,7 @@ async def _read_chunk( else: out[out_selection] = self.metadata.fill_value - async def setitem(self, selection: Selection, value: np.ndarray) -> None: + async def setitem(self, selection: Selection, value: npt.NDArray[Any]) -> None: assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) chunk_shape = self.metadata.chunk_grid.chunk_shape indexer = BasicIndexer( @@ -282,7 +283,7 @@ async def setitem(self, selection: Selection, value: np.ndarray) -> None: async def _write_chunk( self, - value: np.ndarray, + value: npt.NDArray[Any], chunk_shape: ChunkCoords, chunk_coords: ChunkCoords, chunk_selection: SliceSelection, @@ -334,7 +335,7 @@ async def _write_chunk( await self._write_chunk_to_store(store_path, chunk_array, chunk_spec) async def _write_chunk_to_store( - self, store_path: StorePath, chunk_array: np.ndarray, chunk_spec: ArraySpec + self, store_path: StorePath, chunk_array: npt.NDArray[Any], chunk_spec: ArraySpec ) -> None: if np.all(chunk_array == self.metadata.fill_value): # chunks that only contain fill_value will be removed @@ -402,7 +403,7 @@ def create( store: StoreLike, *, shape: ChunkCoords, - dtype: Union[str, np.dtype], + dtype: npt.DTypeLike, chunk_shape: ChunkCoords, fill_value: Optional[Any] = None, chunk_key_encoding: Union[ @@ -470,11 +471,11 @@ def size(self) -> int: return self._async_array.size @property - def dtype(self) -> np.dtype: + def dtype(self) -> np.dtype[Any]: return self._async_array.dtype @property - def attrs(self) -> dict: + def attrs(self) -> dict[str, Any]: return self._async_array.attrs @property @@ -489,12 +490,12 @@ def store_path(self) -> StorePath: def order(self) -> Literal["C", "F"]: return self._async_array.order - def __getitem__(self, selection: Selection) -> np.ndarray: + def __getitem__(self, selection: Selection) -> npt.NDArray[Any]: return sync( self._async_array.getitem(selection), ) - def __setitem__(self, selection: Selection, value: np.ndarray) -> None: + def __setitem__(self, selection: Selection, value: npt.NDArray[Any]) -> None: sync( self._async_array.setitem(selection, value), ) From 8da3df61eef5a547d6deaef681fe311c3b2b3175 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Sun, 12 May 2024 17:29:29 +0100 Subject: [PATCH 0523/1078] Fix some untyped calls (#1865) --- pyproject.toml | 5 ----- src/zarr/common.py | 4 ++-- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f2a8ffef8f..1ab68baed5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -202,11 +202,6 @@ module = [ "zarr.v2.*", "zarr.array_v2", "zarr.array", - "zarr.common", - "zarr.store.local", - "zarr.codecs.blosc", - "zarr.codecs.gzip", - "zarr.codecs.zstd", ] disallow_untyped_calls = false diff --git a/src/zarr/common.py b/src/zarr/common.py index 7ef2fc9a61..647ddf271c 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -52,14 +52,14 @@ async def concurrent_map( else: sem = asyncio.Semaphore(limit) - async def run(item): + async def run(item: Tuple[Any]) -> V: async with sem: return await func(*item) return await asyncio.gather(*[asyncio.ensure_future(run(item)) for item in items]) -async def to_thread(func, /, *args, **kwargs): +async def to_thread(func: Callable[..., V], /, *args: Any, **kwargs: Any) -> V: loop = asyncio.get_running_loop() ctx = contextvars.copy_context() func_call = functools.partial(ctx.run, func, *args, **kwargs) From 0bb2be223f419c73fd59fa0d6171d614c144bf8c Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 13 May 2024 08:51:04 -0700 Subject: [PATCH 0524/1078] chore: update project settings per scientific python repo-review (#1863) * chore: update project settings per scientific python repo-review suggestions * newline * udpate maintainers --- .github/dependabot.yml | 8 ++++++++ .github/workflows/test.yml | 5 +++++ pyproject.toml | 22 +++++++++++++++++++--- 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d8e8d4d57a..52fe24c7a7 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,7 +5,15 @@ updates: directory: "/" schedule: interval: "daily" + groups: + actions: + patterns: + - "*" - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" + groups: + actions: + patterns: + - "*" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fee7380511..d063186b27 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,6 +8,11 @@ on: branches: [ v3 ] pull_request: branches: [ v3 ] + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: test: diff --git a/pyproject.toml b/pyproject.toml index 1ab68baed5..38c97c2815 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,8 +7,18 @@ build-backend = "hatchling.build" name = "zarr" description = "An implementation of chunked, compressed, N-dimensional arrays for Python" readme = { file = "README.md", content-type = "text/markdown" } +authors = [ + { name = "Alistair Miles", email = "alimanfoo@googlemail.com" }, +] maintainers = [ - { name = "Alistair Miles", email = "alimanfoo@googlemail.com" } + { name = "Davis Bennett", email = "davis.v.bennett@gmail.com" }, + { name = "jakirkham" }, + { name = "Josh Moore", email = "josh@openmicroscopy.org" }, + { name = "Joe Hamman", email = "joe@earthmover.io" }, + { name = "Juan Nunez-Iglesias", email = "juan.nunez-iglesias@monash.edu" }, + { name = "Martin Durant", email = "mdurant@anaconda.com" }, + { name = "Norman Rzepka" }, + { name = "Ryan Abernathey" } ] requires-python = ">=3.10" dependencies = [ @@ -38,7 +48,8 @@ classifiers = [ 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', ] -license = "MIT" +license = {text = "MIT License"} +keywords = ["Python", "compressed", "ndimensional-arrays", "zarr"] [project.optional-dependencies] jupyter = [ @@ -131,6 +142,7 @@ serve = "sphinx-autobuild docs docs/_build --ignore 'docs/_autoapi/**/*' --host [tool.ruff] line-length = 100 +src = ["src"] force-exclude = true extend-exclude = [ ".bzr", @@ -207,6 +219,10 @@ disallow_untyped_calls = false [tool.pytest.ini_options] +minversion = "7" +testpaths = ["tests"] +log_cli_level = "INFO" +xfail_strict = true asyncio_mode = "auto" doctest_optionflags = [ "NORMALIZE_WHITESPACE", @@ -214,7 +230,7 @@ doctest_optionflags = [ "IGNORE_EXCEPTION_DETAIL", ] addopts = [ - "--durations=10", + "--durations=10", "-ra", "--strict-config", "--strict-markers", ] filterwarnings = [ "error:::zarr.*", From 8979205fbebd991974172ac23b436372911f4666 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 13 May 2024 16:45:41 -0700 Subject: [PATCH 0525/1078] Group dependabot updates (#1854) --- .github/dependabot.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d8e8d4d57a..5a0befe9b5 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,6 +5,10 @@ updates: directory: "/" schedule: interval: "daily" + groups: + requirements: + patterns: + - "*" - package-ecosystem: "github-actions" directory: "/" schedule: From 5a39ff6c63eee41075486a8d822d3e523fc39b9f Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 14 May 2024 04:31:36 +0100 Subject: [PATCH 0526/1078] Disallow untyped defs (#1834) --- pyproject.toml | 12 ++++++++++++ src/zarr/array.py | 7 ++++--- src/zarr/attributes.py | 12 ++++++------ 3 files changed, 22 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 38c97c2815..ddfe75f153 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -181,6 +181,8 @@ disallow_any_generics = true disallow_incomplete_defs = true disallow_untyped_calls = true +disallow_untyped_defs = true + [[tool.mypy.overrides]] module = [ "zarr.v2.*", @@ -217,6 +219,16 @@ module = [ ] disallow_untyped_calls = false +[[tool.mypy.overrides]] +module = [ + "zarr.v2.*", + "zarr.array_v2", + "zarr.array", + "zarr.common", + "zarr.group", + "zarr.metadata" +] +disallow_untyped_defs = false [tool.pytest.ini_options] minversion = "7" diff --git a/src/zarr/array.py b/src/zarr/array.py index ce7a937a36..128d7c58e1 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -21,6 +21,7 @@ # from zarr.array_v2 import ArrayV2 from zarr.codecs import BytesCodec +from zarr.codecs.pipeline import CodecPipeline from zarr.common import ( ZARR_JSON, ArraySpec, @@ -55,7 +56,7 @@ class AsyncArray: order: Literal["C", "F"] @property - def codecs(self): + def codecs(self) -> CodecPipeline: return self.metadata.codecs def __init__( @@ -386,7 +387,7 @@ async def update_attributes(self, new_attributes: Dict[str, Any]) -> AsyncArray: await (self.store_path / ZARR_JSON).set(new_metadata.to_bytes()) return replace(self, metadata=new_metadata) - def __repr__(self): + def __repr__(self) -> str: return f"" async def info(self): @@ -514,7 +515,7 @@ def update_attributes(self, new_attributes: Dict[str, Any]) -> Array: ) ) - def __repr__(self): + def __repr__(self) -> str: return f"" def info(self): diff --git a/src/zarr/attributes.py b/src/zarr/attributes.py index 8086e18d7b..18f6a63a55 100644 --- a/src/zarr/attributes.py +++ b/src/zarr/attributes.py @@ -1,6 +1,6 @@ from __future__ import annotations from collections.abc import MutableMapping -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any, Iterator, Union if TYPE_CHECKING: from zarr.group import Group @@ -12,21 +12,21 @@ def __init__(self, obj: Union[Array, Group]): # key=".zattrs", read_only=False, cache=True, synchronizer=None self._obj = obj - def __getitem__(self, key): + def __getitem__(self, key: str) -> Any: return self._obj.metadata.attributes[key] - def __setitem__(self, key, value): + def __setitem__(self, key: str, value: Any) -> None: new_attrs = dict(self._obj.metadata.attributes) new_attrs[key] = value self._obj = self._obj.update_attributes(new_attrs) - def __delitem__(self, key): + def __delitem__(self, key: str) -> None: new_attrs = dict(self._obj.metadata.attributes) del new_attrs[key] self._obj = self._obj.update_attributes(new_attrs) - def __iter__(self): + def __iter__(self) -> Iterator[str]: return iter(self._obj.metadata.attributes) - def __len__(self): + def __len__(self) -> int: return len(self._obj.metadata.attributes) From 8264acebc5e1671c36bc434f14cac914f5a099e1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 May 2024 20:33:05 -0700 Subject: [PATCH 0527/1078] chore: update pre-commit hooks (#1876) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.3 → v0.4.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.3...v0.4.4) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 747cb86688..be57770200 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ default_language_version: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.4.3' + rev: 'v0.4.4' hooks: - id: ruff - repo: https://github.com/psf/black From c31a7859fc36482b9b5b02063ebffd3ed3b7a560 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 14 May 2024 13:50:37 +0200 Subject: [PATCH 0528/1078] Enable some ruff rules (RUF) and fix issues (#1869) * Enable some ruff rules (RUF) * Apply ruff rule RUF010 RUF010 Use explicit conversion flag * Apply ruff rule RUF019 RUF019 Unnecessary key check before dictionary access * Apply ruff rule RUF100 RUF100 Unused `noqa` directive --- pyproject.toml | 12 ++++++++++++ src/zarr/__init__.py | 6 +++--- src/zarr/codecs/pipeline.py | 2 +- src/zarr/common.py | 2 +- src/zarr/store/core.py | 2 +- src/zarr/store/local.py | 2 +- src/zarr/store/memory.py | 2 +- src/zarr/store/remote.py | 2 +- src/zarr/sync.py | 2 +- src/zarr/v2/_storage/absstore.py | 2 +- src/zarr/v2/meta.py | 12 ++++++------ src/zarr/v2/n5.py | 2 +- src/zarr/v2/storage.py | 2 +- tests/v2/test_storage_v3.py | 2 +- tests/v2/test_sync.py | 2 +- 15 files changed, 33 insertions(+), 21 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ddfe75f153..fca263db9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -163,6 +163,18 @@ extend-exclude = [ "docs" ] +[tool.ruff.lint] +extend-select = [ + "RUF" +] +ignore = [ + "RUF003", + "RUF005", + "RUF009", + "RUF012", + "RUF015", +] + [tool.mypy] python_version = "3.10" ignore_missing_imports = true diff --git a/src/zarr/__init__.py b/src/zarr/__init__.py index a8aff30b52..65daae8f6d 100644 --- a/src/zarr/__init__.py +++ b/src/zarr/__init__.py @@ -3,11 +3,11 @@ from typing import Union import zarr.codecs # noqa: F401 -from zarr.array import Array, AsyncArray # noqa: F401 +from zarr.array import Array, AsyncArray from zarr.array_v2 import ArrayV2 from zarr.config import config # noqa: F401 -from zarr.group import AsyncGroup, Group # noqa: F401 -from zarr.store import ( # noqa: F401 +from zarr.group import AsyncGroup, Group +from zarr.store import ( StoreLike, make_store_path, ) diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index c2338f717d..da131868c4 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -93,7 +93,7 @@ def from_list(cls, codecs: List[Codec]) -> CodecPipeline: array_array_codecs=tuple( codec for codec in codecs if isinstance(codec, ArrayArrayCodec) ), - array_bytes_codec=[codec for codec in codecs if isinstance(codec, ArrayBytesCodec)][0], + array_bytes_codec=next(codec for codec in codecs if isinstance(codec, ArrayBytesCodec)), bytes_bytes_codecs=tuple( codec for codec in codecs if isinstance(codec, BytesBytesCodec) ), diff --git a/src/zarr/common.py b/src/zarr/common.py index 647ddf271c..ea26cae7b1 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -81,7 +81,7 @@ def parse_enum(data: JSON, cls: Type[E]) -> E: raise TypeError(f"Expected str, got {type(data)}") if data in enum_names(cls): return cls(data) - raise ValueError(f"Value must be one of {repr(list(enum_names(cls)))}. Got {data} instead.") + raise ValueError(f"Value must be one of {list(enum_names(cls))!r}. Got {data} instead.") @dataclass(frozen=True) diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index 29506aa619..cc017ec982 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -48,7 +48,7 @@ def __str__(self) -> str: return _dereference_path(str(self.store), self.path) def __repr__(self) -> str: - return f"StorePath({self.store.__class__.__name__}, {repr(str(self))})" + return f"StorePath({self.store.__class__.__name__}, {str(self)!r})" def __eq__(self, other: Any) -> bool: try: diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index dde28d5214..e5021b6483 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -83,7 +83,7 @@ def __str__(self) -> str: return f"file://{self.root}" def __repr__(self) -> str: - return f"LocalStore({repr(str(self))})" + return f"LocalStore({str(self)!r})" def __eq__(self, other: object) -> bool: return isinstance(other, type(self)) and self.root == other.root diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index 2a09bc2dd5..9f10ed22a3 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -23,7 +23,7 @@ def __str__(self) -> str: return f"memory://{id(self._store_dict)}" def __repr__(self) -> str: - return f"MemoryStore({repr(str(self))})" + return f"MemoryStore({str(self)!r})" async def get( self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index c42cf3f56d..fa6cd2167e 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -39,7 +39,7 @@ def __str__(self) -> str: return str(self.root) def __repr__(self) -> str: - return f"RemoteStore({repr(str(self))})" + return f"RemoteStore({str(self)!r})" def _make_fs(self) -> Tuple[AsyncFileSystem, str]: import fsspec diff --git a/src/zarr/sync.py b/src/zarr/sync.py index 649db0be76..ea765077ce 100644 --- a/src/zarr/sync.py +++ b/src/zarr/sync.py @@ -84,7 +84,7 @@ def sync( if len(unfinished) > 0: raise asyncio.TimeoutError(f"Coroutine {coro} failed to finish in within {timeout}s") assert len(finished) == 1 - return_result = list(finished)[0].result() + return_result = next(iter(finished)).result() if isinstance(return_result, BaseException): raise return_result diff --git a/src/zarr/v2/_storage/absstore.py b/src/zarr/v2/_storage/absstore.py index ee03d44bd4..c04ad240da 100644 --- a/src/zarr/v2/_storage/absstore.py +++ b/src/zarr/v2/_storage/absstore.py @@ -58,7 +58,7 @@ class ABSStore(Store): ----- In order to use this store, you must install the Microsoft Azure Storage SDK for Python, ``azure-storage-blob>=12.5.0``. - """ # noqa: E501 + """ def __init__( self, diff --git a/src/zarr/v2/meta.py b/src/zarr/v2/meta.py index 2f2f3b9487..70c424c8b5 100644 --- a/src/zarr/v2/meta.py +++ b/src/zarr/v2/meta.py @@ -36,37 +36,37 @@ def get_extended_dtype_info(dtype) -> dict: if dtype.str in _v3_complex_types: return dict( - extension="https://zarr-specs.readthedocs.io/en/core-protocol-v3.0-dev/protocol/extensions/complex-dtypes/v1.0.html", # noqa + extension="https://zarr-specs.readthedocs.io/en/core-protocol-v3.0-dev/protocol/extensions/complex-dtypes/v1.0.html", type=dtype.str, fallback=None, ) elif dtype.str == "|O": return dict( - extension="TODO: object array protocol URL", # noqa + extension="TODO: object array protocol URL", type=dtype.str, fallback=None, ) elif dtype.str.startswith("|S"): return dict( - extension="TODO: bytestring array protocol URL", # noqa + extension="TODO: bytestring array protocol URL", type=dtype.str, fallback=None, ) elif dtype.str.startswith("U"): return dict( - extension="TODO: unicode array protocol URL", # noqa + extension="TODO: unicode array protocol URL", type=dtype.str, fallback=None, ) elif dtype.str.startswith("|V"): return dict( - extension="TODO: structured array protocol URL", # noqa + extension="TODO: structured array protocol URL", type=dtype.descr, fallback=None, ) elif dtype.str in _v3_datetime_types: return dict( - extension="https://zarr-specs.readthedocs.io/en/latest/extensions/data-types/datetime/v1.0.html", # noqa + extension="https://zarr-specs.readthedocs.io/en/latest/extensions/data-types/datetime/v1.0.html", type=dtype.str, fallback=None, ) diff --git a/src/zarr/v2/n5.py b/src/zarr/v2/n5.py index 92b0f37924..4ea5e45721 100644 --- a/src/zarr/v2/n5.py +++ b/src/zarr/v2/n5.py @@ -764,7 +764,7 @@ def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dic zarr_config["filters"] = None elif codec_id == "gzip": - if "useZlib" in compressor_config and compressor_config["useZlib"]: + if compressor_config.get("useZlib"): zarr_config["id"] = "zlib" zarr_config["level"] = compressor_config["level"] else: diff --git a/src/zarr/v2/storage.py b/src/zarr/v2/storage.py index dd0b090a81..de45201fc0 100644 --- a/src/zarr/v2/storage.py +++ b/src/zarr/v2/storage.py @@ -631,7 +631,7 @@ def __len__(self): return len(self._mutable_mapping) def __repr__(self): - return f"<{self.__class__.__name__}: \n{repr(self._mutable_mapping)}\n at {hex(id(self))}>" + return f"<{self.__class__.__name__}: \n{self._mutable_mapping!r}\n at {hex(id(self))}>" def __eq__(self, other): if isinstance(other, KVStore): diff --git a/tests/v2/test_storage_v3.py b/tests/v2/test_storage_v3.py index 3d8024de70..a8bd5ca65c 100644 --- a/tests/v2/test_storage_v3.py +++ b/tests/v2/test_storage_v3.py @@ -65,7 +65,7 @@ # from .test_storage import TestSQLiteStore as _TestSQLiteStore # from .test_storage import TestSQLiteStoreInMemory as _TestSQLiteStoreInMemory # from .test_storage import TestZipStore as _TestZipStore -# from .test_storage import dimension_separator_fixture, s3, skip_if_nested_chunks # noqa +# from .test_storage import dimension_separator_fixture, s3, skip_if_nested_chunks # pytestmark = pytest.mark.skipif(not v3_api_available, reason="v3 api is not available") diff --git a/tests/v2/test_sync.py b/tests/v2/test_sync.py index 8bf1304dc2..ea6fd0523d 100644 --- a/tests/v2/test_sync.py +++ b/tests/v2/test_sync.py @@ -16,7 +16,7 @@ from zarr.v2.sync import ProcessSynchronizer, ThreadSynchronizer # zarr_version fixture must be imported although not used directly here -from .test_attrs import TestAttributes # noqa +from .test_attrs import TestAttributes from .test_core import TestArray from .test_hierarchy import TestGroup From f3305d92aabeb9be7a3c2b7bd4767440bb55dd5e Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Wed, 15 May 2024 17:35:48 +0200 Subject: [PATCH 0529/1078] groundwork for V3 group tests (#1743) * feat: functional .children method for groups * changes necessary for correctly generating list of children * add stand-alone test for group.children * give type hints a glow-up * test: use separate assert statements to avoid platform-dependent ordering issues * test: put fixtures in conftest, add MemoryStore fixture * docs: release notes * test: remove prematurely-added mock s3 fixture * chore: move v3 tests into v3 folder * chore: type hints * test: add schema for group method tests * chore: add type for zarr_formats * chore: remove localstore for now * test: add __init__.py to support imports from top-level conftest.py, and add some docstrings, and remove redundant def * fix: return valid JSON from GroupMetadata.to_bytes for v2 metadata * fix: don't use a type as a value * test: add getitem test * fix: replace reference to nonexistent method in with , which does exist * test: declare v3ness via directory structure, not test file name * add a docstring to _get, and pass auto_mkdir to _put * fix: add docstring to LocalStore.get_partial_values; adjust body of LocalStore.get_partial_values to properly handle the byte_range parameter of LocalStore.get. * test: add tests for localstore init, set, get, get_partial * fix: Rename children to members; AsyncGroup.members yields tuples of (name, AsyncArray / AsyncGroup) pairs; Group.members repackages these into a dict. * fix: make Group.members return a tuple of str, Array | Group pairs * fix: revert changes to synchronization code; this is churn that we need to deal with * chore: move v3 tests into v3 folder * chore: type hints * test: add schema for group method tests * chore: add type for zarr_formats * chore: remove localstore for now * test: add __init__.py to support imports from top-level conftest.py, and add some docstrings, and remove redundant def * fix: return valid JSON from GroupMetadata.to_bytes for v2 metadata * fix: don't use a type as a value * test: add getitem test * fix: replace reference to nonexistent method in with , which does exist * test: declare v3ness via directory structure, not test file name * add a docstring to _get, and pass auto_mkdir to _put * fix: add docstring to LocalStore.get_partial_values; adjust body of LocalStore.get_partial_values to properly handle the byte_range parameter of LocalStore.get. * test: add tests for localstore init, set, get, get_partial * fix: remove pre-emptive fetching from group.open * fix: use removeprefix (removes a substring) instead of strip (removes any member of a set); comment out / avoid tests that cannot pass right now; don't consider implicit groups for v2; check if prefix is present in storage before opening for Group.getitem * xfail v2 tests that are sure to fail; add delitem tests; partition xfailing tests into subtests * fix: handle byte_range[0] being None * fix: adjust test for localstore.get to check that get on nonexistent keys returns None; correctly create intermediate directories when preparing test data in test_local_store_get_partial * fix: add zarr_format parameter to array creation routines (which raises if zarr_format is not 3), and xfail the tests that will hit this condition. add tests for create_group, create_array, and update_attributes methods of asyncgroup. * test: add group init test * feature(store): make list_* methods async generators (#110) * feature(store): make list_* methods async generators * Update src/zarr/v3/store/memory.py * Apply suggestions from code review - simplify code comments - use `removeprefix` instead of `strip` --------- Co-authored-by: Davis Bennett * fix: define utility for converting asyncarray to array, and similar for group, largely to appease mypy * chore: remove checks that only existed because of implicit groups * chore: clean up docstring and modernize some type hints * chore: move imports to top-level * remove fixture files * remove commented imports * remove explicit asyncio marks; use __eq__ method of LocalStore for test * rename test_storage to test_store * modern type hints --------- Co-authored-by: Joe Hamman --- src/zarr/abc/store.py | 2 +- src/zarr/array.py | 2 + src/zarr/common.py | 29 +- src/zarr/group.py | 95 +++-- src/zarr/store/local.py | 23 +- src/zarr/store/memory.py | 2 +- tests/v2/conftest.py | 2 +- tests/v3/__init__.py | 0 tests/v3/conftest.py | 52 ++- tests/v3/test_codecs.py | 11 - tests/v3/test_group.py | 315 +++++++++++++++- tests/v3/test_storage.py | 18 - tests/v3/test_store.py | 797 +++++++++++++++++++++++++++++++++++++++ 13 files changed, 1241 insertions(+), 107 deletions(-) create mode 100644 tests/v3/__init__.py delete mode 100644 tests/v3/test_storage.py create mode 100644 tests/v3/test_store.py diff --git a/src/zarr/abc/store.py b/src/zarr/abc/store.py index d92f8d4e2e..3d9550f733 100644 --- a/src/zarr/abc/store.py +++ b/src/zarr/abc/store.py @@ -1,6 +1,6 @@ from abc import abstractmethod, ABC - from collections.abc import AsyncGenerator + from typing import List, Tuple, Optional diff --git a/src/zarr/array.py b/src/zarr/array.py index 128d7c58e1..a594b3dd11 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -28,6 +28,7 @@ ChunkCoords, Selection, SliceSelection, + ZarrFormat, concurrent_map, ) from zarr.config import config @@ -89,6 +90,7 @@ async def create( dimension_names: Optional[Iterable[str]] = None, attributes: Optional[Dict[str, Any]] = None, exists_ok: bool = False, + zarr_format: ZarrFormat = 3, ) -> AsyncArray: store_path = make_store_path(store) if not exists_ok: diff --git a/src/zarr/common.py b/src/zarr/common.py index ea26cae7b1..95cb8f4a3e 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -5,8 +5,6 @@ Union, Tuple, Iterable, - Dict, - List, TypeVar, overload, Any, @@ -18,7 +16,7 @@ import functools if TYPE_CHECKING: - from typing import Any, Awaitable, Callable, Iterator, Optional, Type + from typing import Awaitable, Callable, Iterator, Optional, Type import numpy as np @@ -27,25 +25,26 @@ ZGROUP_JSON = ".zgroup" ZATTRS_JSON = ".zattrs" -BytesLike = Union[bytes, bytearray, memoryview] -ChunkCoords = Tuple[int, ...] +BytesLike = bytes | bytearray | memoryview +ChunkCoords = tuple[int, ...] ChunkCoordsLike = Iterable[int] -SliceSelection = Tuple[slice, ...] -Selection = Union[slice, SliceSelection] -JSON = Union[str, None, int, float, Enum, Dict[str, "JSON"], List["JSON"], Tuple["JSON", ...]] +SliceSelection = tuple[slice, ...] +Selection = slice | SliceSelection +ZarrFormat = Literal[2, 3] +JSON = Union[str, None, int, float, Enum, dict[str, "JSON"], list["JSON"], tuple["JSON", ...]] def product(tup: ChunkCoords) -> int: return functools.reduce(lambda x, y: x * y, tup, 1) -T = TypeVar("T", bound=Tuple[Any, ...]) +T = TypeVar("T", bound=tuple[Any, ...]) V = TypeVar("V") async def concurrent_map( - items: List[T], func: Callable[..., Awaitable[V]], limit: Optional[int] = None -) -> List[V]: + items: list[T], func: Callable[..., Awaitable[V]], limit: Optional[int] = None +) -> list[V]: if limit is None: return await asyncio.gather(*[func(*item) for item in items]) @@ -127,18 +126,18 @@ def parse_configuration(data: JSON) -> JSON: @overload def parse_named_configuration( data: JSON, expected_name: Optional[str] = None -) -> Tuple[str, Dict[str, JSON]]: ... +) -> tuple[str, dict[str, JSON]]: ... @overload def parse_named_configuration( data: JSON, expected_name: Optional[str] = None, *, require_configuration: bool = True -) -> Tuple[str, Optional[Dict[str, JSON]]]: ... +) -> tuple[str, Optional[dict[str, JSON]]]: ... def parse_named_configuration( data: JSON, expected_name: Optional[str] = None, *, require_configuration: bool = True -) -> Tuple[str, Optional[JSON]]: +) -> tuple[str, Optional[JSON]]: if not isinstance(data, dict): raise TypeError(f"Expected dict, got {type(data)}") if "name" not in data: @@ -153,7 +152,7 @@ def parse_named_configuration( return name_parsed, configuration_parsed -def parse_shapelike(data: Any) -> Tuple[int, ...]: +def parse_shapelike(data: Any) -> tuple[int, ...]: if not isinstance(data, Iterable): raise TypeError(f"Expected an iterable. Got {data} instead.") data_tuple = tuple(data) diff --git a/src/zarr/group.py b/src/zarr/group.py index c71860b1b6..cce53d0a98 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -5,21 +5,19 @@ import asyncio import json import logging +import numpy.typing as npt if TYPE_CHECKING: - from typing import ( - Any, - AsyncGenerator, - Literal, - AsyncIterator, - ) + from typing import Any, AsyncGenerator, Literal, Iterable +from zarr.abc.codec import Codec from zarr.abc.metadata import Metadata from zarr.array import AsyncArray, Array from zarr.attributes import Attributes -from zarr.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON +from zarr.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON, ChunkCoords from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import SyncMixin, sync +from typing import overload logger = logging.getLogger("zarr.group") @@ -41,6 +39,26 @@ def parse_attributes(data: Any) -> dict[str, Any]: raise TypeError(msg) +@overload +def _parse_async_node(node: AsyncArray) -> Array: ... + + +@overload +def _parse_async_node(node: AsyncGroup) -> Group: ... + + +def _parse_async_node(node: AsyncArray | AsyncGroup) -> Array | Group: + """ + Wrap an AsyncArray in an Array, or an AsyncGroup in a Group. + """ + if isinstance(node, AsyncArray): + return Array(node) + elif isinstance(node, AsyncGroup): + return Group(node) + else: + assert False + + @dataclass(frozen=True) class GroupMetadata(Metadata): attributes: dict[str, Any] = field(default_factory=dict) @@ -53,7 +71,7 @@ def to_bytes(self) -> dict[str, bytes]: return {ZARR_JSON: json.dumps(self.to_dict()).encode()} else: return { - ZGROUP_JSON: json.dumps({"zarr_format": 2}).encode(), + ZGROUP_JSON: json.dumps({"zarr_format": self.zarr_format}).encode(), ZATTRS_JSON: json.dumps(self.attributes).encode(), } @@ -113,11 +131,11 @@ async def open( (store_path / ZGROUP_JSON).get(), (store_path / ZATTRS_JSON).get() ) if zgroup_bytes is None: - raise KeyError(store_path) # filenotfounderror? + raise FileNotFoundError(store_path) elif zarr_format == 3: zarr_json_bytes = await (store_path / ZARR_JSON).get() if zarr_json_bytes is None: - raise KeyError(store_path) # filenotfounderror? + raise FileNotFoundError(store_path) elif zarr_format is None: zarr_json_bytes, zgroup_bytes, zattrs_bytes = await asyncio.gather( (store_path / ZARR_JSON).get(), @@ -168,6 +186,7 @@ async def getitem( key: str, ) -> AsyncArray | AsyncGroup: store_path = self.store_path / key + logger.warning("key=%s, store_path=%s", key, store_path) # Note: # in zarr-python v2, we first check if `key` references an Array, else if `key` references @@ -175,10 +194,6 @@ async def getitem( # are reusable, but for v3 they would perform redundant I/O operations. # Not clear how much of that strategy we want to keep here. - # if `key` names an object in storage, it cannot be an array or group - if await store_path.exists(): - raise KeyError(key) - if self.metadata.zarr_format == 3: zarr_json_bytes = await (store_path / ZARR_JSON).get() if zarr_json_bytes is None: @@ -248,16 +263,42 @@ def attrs(self): def info(self): return self.metadata.info - async def create_group(self, path: str, **kwargs) -> AsyncGroup: + async def create_group( + self, path: str, exists_ok: bool = False, attributes: dict[str, Any] = {} + ) -> AsyncGroup: return await type(self).create( self.store_path / path, - **kwargs, + attributes=attributes, + exists_ok=exists_ok, + zarr_format=self.metadata.zarr_format, ) - async def create_array(self, path: str, **kwargs) -> AsyncArray: + async def create_array( + self, + path: str, + shape: ChunkCoords, + dtype: npt.DTypeLike, + chunk_shape: ChunkCoords, + fill_value: Any | None = None, + chunk_key_encoding: tuple[Literal["default"], Literal[".", "/"]] + | tuple[Literal["v2"], Literal[".", "/"]] = ("default", "/"), + codecs: Iterable[Codec | dict[str, Any]] | None = None, + dimension_names: Iterable[str] | None = None, + attributes: dict[str, Any] | None = None, + exists_ok: bool = False, + ) -> AsyncArray: return await AsyncArray.create( self.store_path / path, - **kwargs, + shape=shape, + dtype=dtype, + chunk_shape=chunk_shape, + fill_value=fill_value, + chunk_key_encoding=chunk_key_encoding, + codecs=codecs, + dimension_names=dimension_names, + attributes=attributes, + exists_ok=exists_ok, + zarr_format=self.metadata.zarr_format, ) async def update_attributes(self, new_attributes: dict[str, Any]): @@ -348,7 +389,7 @@ async def array_keys(self) -> AsyncGenerator[str, None]: yield key # todo: decide if this method should be separate from `array_keys` - async def arrays(self) -> AsyncIterator[AsyncArray]: + async def arrays(self) -> AsyncGenerator[AsyncArray, None]: async for key, value in self.members(): if isinstance(value, AsyncArray): yield value @@ -472,19 +513,13 @@ def nmembers(self) -> int: @property def members(self) -> tuple[tuple[str, Array | Group], ...]: """ - Return the sub-arrays and sub-groups of this group as a `tuple` of (name, array | group) + Return the sub-arrays and sub-groups of this group as a tuple of (name, array | group) pairs """ - _members: list[tuple[str, AsyncArray | AsyncGroup]] = self._sync_iter( - self._async_group.members() - ) - ret: list[tuple[str, Array | Group]] = [] - for key, value in _members: - if isinstance(value, AsyncArray): - ret.append((key, Array(value))) - else: - ret.append((key, Group(value))) - return tuple(ret) + _members = self._sync_iter(self._async_group.members()) + + result = tuple(map(lambda kv: (kv[0], _parse_async_node(kv[1])), _members)) + return result def __contains__(self, member) -> bool: return self._sync(self._async_group.contains(member)) diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index e5021b6483..a3dd65979b 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -4,20 +4,20 @@ import shutil from collections.abc import AsyncGenerator from pathlib import Path -from typing import Union, Optional, List, Tuple from zarr.abc.store import Store from zarr.common import BytesLike, concurrent_map, to_thread -def _get(path: Path, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> bytes: +def _get(path: Path, byte_range: tuple[int, int | None] | None) -> bytes: """ Fetch a contiguous region of bytes from a file. + Parameters ---------- path: Path The file to read bytes from. - byte_range: Optional[Tuple[int, Optional[int]]] = None + byte_range: tuple[int, int | None] | None = None The range of bytes to read. If `byte_range` is `None`, then the entire file will be read. If `byte_range` is a tuple, the first value specifies the index of the first byte to read, and the second value specifies the total number of bytes to read. If the total value is @@ -49,7 +49,7 @@ def _get(path: Path, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> def _put( path: Path, value: BytesLike, - start: Optional[int] = None, + start: int | None = None, auto_mkdir: bool = True, ) -> int | None: if auto_mkdir: @@ -71,7 +71,7 @@ class LocalStore(Store): root: Path auto_mkdir: bool - def __init__(self, root: Union[Path, str], auto_mkdir: bool = True): + def __init__(self, root: Path | str, auto_mkdir: bool = True): if isinstance(root, str): root = Path(root) assert isinstance(root, Path) @@ -88,9 +88,7 @@ def __repr__(self) -> str: def __eq__(self, other: object) -> bool: return isinstance(other, type(self)) and self.root == other.root - async def get( - self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[bytes]: + async def get(self, key: str, byte_range: tuple[int, int | None] | None = None) -> bytes | None: assert isinstance(key, str) path = self.root / key @@ -100,8 +98,8 @@ async def get( return None async def get_partial_values( - self, key_ranges: List[Tuple[str, Tuple[int, int]]] - ) -> List[Optional[bytes]]: + self, key_ranges: list[tuple[str, tuple[int, int]]] + ) -> list[bytes | None]: """ Read byte ranges from multiple keys. Parameters @@ -124,7 +122,7 @@ async def set(self, key: str, value: BytesLike) -> None: path = self.root / key await to_thread(_put, path, value, auto_mkdir=self.auto_mkdir) - async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: + async def set_partial_values(self, key_start_values: list[tuple[str, int, bytes]]) -> None: args = [] for key, start, value in key_start_values: assert isinstance(key, str) @@ -169,6 +167,9 @@ async def list_prefix(self, prefix: str) -> AsyncGenerator[str, None]: ------- AsyncGenerator[str, None] """ + for p in (self.root / prefix).rglob("*"): + if p.is_file(): + yield str(p) to_strip = str(self.root) + "/" for p in (self.root / prefix).rglob("*"): diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index 9f10ed22a3..9730d635d5 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -88,4 +88,4 @@ async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: else: for key in self._store_dict: if key.startswith(prefix + "/") and key != prefix: - yield key.strip(prefix + "/").split("/")[0] + yield key.removeprefix(prefix + "/").split("/")[0] diff --git a/tests/v2/conftest.py b/tests/v2/conftest.py index c84cdfa439..225f3fd563 100644 --- a/tests/v2/conftest.py +++ b/tests/v2/conftest.py @@ -1,5 +1,5 @@ -import pathlib import pytest +import pathlib @pytest.fixture(params=[str, pathlib.Path]) diff --git a/tests/v3/__init__.py b/tests/v3/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/v3/conftest.py b/tests/v3/conftest.py index 3dc55c0298..3588048906 100644 --- a/tests/v3/conftest.py +++ b/tests/v3/conftest.py @@ -1,7 +1,30 @@ +from __future__ import annotations +from typing import TYPE_CHECKING + +from zarr.common import ZarrFormat +from zarr.group import AsyncGroup + +if TYPE_CHECKING: + from typing import Any, Literal +from dataclasses import dataclass, field import pathlib + import pytest -from zarr.store import LocalStore, StorePath, MemoryStore, RemoteStore +from zarr.store import LocalStore, StorePath, MemoryStore +from zarr.store.remote import RemoteStore + + +def parse_store( + store: Literal["local", "memory", "remote"], path: str +) -> LocalStore | MemoryStore | RemoteStore: + if store == "local": + return LocalStore(path) + if store == "memory": + return MemoryStore() + if store == "remote": + return RemoteStore() + assert False @pytest.fixture(params=[str, pathlib.Path]) @@ -30,3 +53,30 @@ def remote_store(): @pytest.fixture(scope="function") def memory_store(): return MemoryStore() + + +@pytest.fixture(scope="function") +def store(request: str, tmpdir): + param = request.param + return parse_store(param, str(tmpdir)) + + +@dataclass +class AsyncGroupRequest: + zarr_format: ZarrFormat + store: Literal["local", "remote", "memory"] + attributes: dict[str, Any] = field(default_factory=dict) + + +@pytest.fixture(scope="function") +async def async_group(request: pytest.FixtureRequest, tmpdir) -> AsyncGroup: + param: AsyncGroupRequest = request.param + + store = parse_store(param.store, str(tmpdir)) + agroup = await AsyncGroup.create( + store, + attributes=param.attributes, + zarr_format=param.zarr_format, + exists_ok=False, + ) + return agroup diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index e042c7f275..fc209bd5e6 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -233,7 +233,6 @@ def test_nested_sharding( @pytest.mark.parametrize("runtime_write_order", ["F", "C"]) @pytest.mark.parametrize("runtime_read_order", ["F", "C"]) @pytest.mark.parametrize("with_sharding", [True, False]) -@pytest.mark.asyncio async def test_order( store: Store, input_order: Literal["F", "C"], @@ -344,7 +343,6 @@ def test_order_implicit( @pytest.mark.parametrize("runtime_write_order", ["F", "C"]) @pytest.mark.parametrize("runtime_read_order", ["F", "C"]) @pytest.mark.parametrize("with_sharding", [True, False]) -@pytest.mark.asyncio async def test_transpose( store: Store, input_order: Literal["F", "C"], @@ -601,7 +599,6 @@ def test_write_partial_sharded_chunks(store: Store): assert np.array_equal(a[0:16, 0:16], data) -@pytest.mark.asyncio async def test_delete_empty_chunks(store: Store): data = np.ones((16, 16)) @@ -618,7 +615,6 @@ async def test_delete_empty_chunks(store: Store): assert await (store / "delete_empty_chunks/c0/0").get() is None -@pytest.mark.asyncio async def test_delete_empty_sharded_chunks(store: Store): a = await AsyncArray.create( store / "delete_empty_sharded_chunks", @@ -644,7 +640,6 @@ async def test_delete_empty_sharded_chunks(store: Store): assert chunk_bytes is not None and len(chunk_bytes) == 16 * 2 + 8 * 8 * 2 + 4 -@pytest.mark.asyncio async def test_zarr_compat(store: Store): data = np.zeros((16, 18), dtype="uint16") @@ -676,7 +671,6 @@ async def test_zarr_compat(store: Store): assert z2._store["1.1"] == await (store / "zarr_compat3/1.1").get() -@pytest.mark.asyncio async def test_zarr_compat_F(store: Store): data = np.zeros((16, 18), dtype="uint16", order="F") @@ -710,7 +704,6 @@ async def test_zarr_compat_F(store: Store): assert z2._store["1.1"] == await (store / "zarr_compatF3/1.1").get() -@pytest.mark.asyncio async def test_dimension_names(store: Store): data = np.arange(0, 256, dtype="uint16").reshape((16, 16)) @@ -776,7 +769,6 @@ def test_zstd(store: Store, checksum: bool): @pytest.mark.parametrize("endian", ["big", "little"]) -@pytest.mark.asyncio async def test_endian(store: Store, endian: Literal["big", "little"]): data = np.arange(0, 256, dtype="uint16").reshape((16, 16)) @@ -808,7 +800,6 @@ async def test_endian(store: Store, endian: Literal["big", "little"]): @pytest.mark.parametrize("dtype_input_endian", [">u2", "u2", " None: """ Test that `Group.members` returns correct values, i.e. the arrays and groups (explicit and implicit) contained in that group. """ - store: LocalStore | MemoryStore = request.getfixturevalue(store_type) path = "group" agroup = AsyncGroup( metadata=GroupMetadata(), @@ -50,14 +53,10 @@ def test_group_members(store_type, request): assert sorted(dict(members_observed)) == sorted(members_expected) -@pytest.mark.parametrize("store_type", (("local_store",))) -def test_group(store_type, request) -> None: - store = request.getfixturevalue(store_type) +@pytest.mark.parametrize("store", (("local", "memory")), indirect=["store"]) +def test_group(store: MemoryStore | LocalStore) -> None: store_path = StorePath(store) - agroup = AsyncGroup( - metadata=GroupMetadata(), - store_path=store_path, - ) + agroup = AsyncGroup(metadata=GroupMetadata(), store_path=store_path) group = Group(agroup) assert agroup.metadata is group.metadata @@ -93,10 +92,290 @@ def test_group(store_type, request) -> None: assert dict(bar3.attrs) == {"baz": "qux", "name": "bar"} -def test_group_sync_constructor(store_path) -> None: - group = Group.create( - store=store_path, - attributes={"title": "test 123"}, +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("exists_ok", (True, False)) +def test_group_create(store: MemoryStore | LocalStore, exists_ok: bool) -> None: + """ + Test that `Group.create` works as expected. + """ + attributes = {"foo": 100} + group = Group.create(store, attributes=attributes, exists_ok=exists_ok) + + assert group.attrs == attributes + + if not exists_ok: + with pytest.raises(AssertionError): + group = Group.create( + store, + attributes=attributes, + exists_ok=exists_ok, + ) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +@pytest.mark.parametrize("exists_ok", (True, False)) +async def test_asyncgroup_create( + store: MemoryStore | LocalStore, + exists_ok: bool, + zarr_format: ZarrFormat, +) -> None: + """ + Test that `AsyncGroup.create` works as expected. + """ + attributes = {"foo": 100} + agroup = await AsyncGroup.create( + store, + attributes=attributes, + exists_ok=exists_ok, + zarr_format=zarr_format, + ) + + assert agroup.metadata == GroupMetadata(zarr_format=zarr_format, attributes=attributes) + assert agroup.store_path == make_store_path(store) + + if not exists_ok: + with pytest.raises(AssertionError): + agroup = await AsyncGroup.create( + store, + attributes=attributes, + exists_ok=exists_ok, + zarr_format=zarr_format, + ) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +async def test_asyncgroup_attrs(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: + attributes = {"foo": 100} + agroup = await AsyncGroup.create(store, zarr_format=zarr_format, attributes=attributes) + + assert agroup.attrs == agroup.metadata.attributes == attributes + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +async def test_asyncgroup_info(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: + agroup = await AsyncGroup.create( # noqa + store, + zarr_format=zarr_format, + ) + pytest.xfail("Info is not implemented for metadata yet") + # assert agroup.info == agroup.metadata.info + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +async def test_asyncgroup_open( + store: LocalStore | MemoryStore, + zarr_format: ZarrFormat, +) -> None: + """ + Create an `AsyncGroup`, then ensure that we can open it using `AsyncGroup.open` + """ + attributes = {"foo": 100} + group_w = await AsyncGroup.create( + store=store, + attributes=attributes, + exists_ok=False, + zarr_format=zarr_format, + ) + + group_r = await AsyncGroup.open(store=store, zarr_format=zarr_format) + + assert group_w.attrs == group_w.attrs == attributes + assert group_w == group_r + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +async def test_asyncgroup_open_wrong_format( + store: LocalStore | MemoryStore, + zarr_format: ZarrFormat, +) -> None: + _ = await AsyncGroup.create(store=store, exists_ok=False, zarr_format=zarr_format) + + # try opening with the wrong zarr format + if zarr_format == 3: + zarr_format_wrong = 2 + elif zarr_format == 2: + zarr_format_wrong = 3 + else: + assert False + + with pytest.raises(FileNotFoundError): + await AsyncGroup.open(store=store, zarr_format=zarr_format_wrong) + + +# todo: replace the dict[str, Any] type with something a bit more specific +# should this be async? +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize( + "data", + ( + {"zarr_format": 3, "node_type": "group", "attributes": {"foo": 100}}, + {"zarr_format": 2, "attributes": {"foo": 100}}, + ), +) +def test_asyncgroup_from_dict(store: MemoryStore | LocalStore, data: dict[str, Any]) -> None: + """ + Test that we can create an AsyncGroup from a dict + """ + path = "test" + store_path = StorePath(store=store, path=path) + group = AsyncGroup.from_dict(store_path, data=data) + + assert group.metadata.zarr_format == data["zarr_format"] + assert group.metadata.attributes == data["attributes"] + + +# todo: replace this with a declarative API where we model a full hierarchy + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize( + "zarr_format", + (pytest.param(2, marks=pytest.mark.xfail(reason="V2 arrays cannot be created yet.")), 3), +) +async def test_asyncgroup_getitem(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: + """ + Create an `AsyncGroup`, then create members of that group, and ensure that we can access those + members via the `AsyncGroup.getitem` method. + """ + agroup = await AsyncGroup.create(store=store, zarr_format=zarr_format) + + sub_array_path = "sub_array" + sub_array = await agroup.create_array( + path=sub_array_path, shape=(10,), dtype="uint8", chunk_shape=(2,) ) + assert await agroup.getitem(sub_array_path) == sub_array + + sub_group_path = "sub_group" + sub_group = await agroup.create_group(sub_group_path, attributes={"foo": 100}) + assert await agroup.getitem(sub_group_path) == sub_group + + # check that asking for a nonexistent key raises KeyError + with pytest.raises(KeyError): + await agroup.getitem("foo") + + +# todo: replace this with a declarative API where we model a full hierarchy + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize( + "zarr_format", + (2, 3), +) +async def test_asyncgroup_delitem(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: + agroup = await AsyncGroup.create(store=store, zarr_format=zarr_format) + sub_array_path = "sub_array" + _ = await agroup.create_array( + path=sub_array_path, shape=(10,), dtype="uint8", chunk_shape=(2,), attributes={"foo": 100} + ) + await agroup.delitem(sub_array_path) + + # todo: clean up the code duplication here + if zarr_format == 2: + assert not await agroup.store_path.store.exists(sub_array_path + "/" + ".zarray") + assert not await agroup.store_path.store.exists(sub_array_path + "/" + ".zattrs") + elif zarr_format == 3: + assert not await agroup.store_path.store.exists(sub_array_path + "/" + "zarr.json") + else: + assert False + + sub_group_path = "sub_group" + _ = await agroup.create_group(sub_group_path, attributes={"foo": 100}) + await agroup.delitem(sub_group_path) + if zarr_format == 2: + assert not await agroup.store_path.store.exists(sub_array_path + "/" + ".zgroup") + assert not await agroup.store_path.store.exists(sub_array_path + "/" + ".zattrs") + elif zarr_format == 3: + assert not await agroup.store_path.store.exists(sub_array_path + "/" + "zarr.json") + else: + assert False + - assert group._async_group.metadata.attributes["title"] == "test 123" +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +async def test_asyncgroup_create_group( + store: LocalStore | MemoryStore, + zarr_format: ZarrFormat, +) -> None: + agroup = await AsyncGroup.create(store=store, zarr_format=zarr_format) + sub_node_path = "sub_group" + attributes = {"foo": 999} + subnode = await agroup.create_group(path=sub_node_path, attributes=attributes) + + assert isinstance(subnode, AsyncGroup) + assert subnode.attrs == attributes + assert subnode.store_path.path == sub_node_path + assert subnode.store_path.store == store + assert subnode.metadata.zarr_format == zarr_format + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize( + "zarr_format", + (pytest.param(2, marks=pytest.mark.xfail(reason="V2 arrays cannot be created yet")), 3), +) +async def test_asyncgroup_create_array( + store: LocalStore | MemoryStore, + zarr_format: ZarrFormat, +) -> None: + """ + Test that the AsyncGroup.create_array method works correctly. We ensure that array properties + specified in create_array are present on the resulting array. + """ + + agroup = await AsyncGroup.create(store=store, zarr_format=zarr_format) + + shape = (10,) + dtype = "uint8" + chunk_shape = (4,) + attributes = {"foo": 100} + + sub_node_path = "sub_array" + subnode = await agroup.create_array( + path=sub_node_path, + shape=shape, + dtype=dtype, + chunk_shape=chunk_shape, + attributes=attributes, + ) + assert isinstance(subnode, AsyncArray) + assert subnode.attrs == attributes + assert subnode.store_path.path == sub_node_path + assert subnode.store_path.store == store + assert subnode.shape == shape + assert subnode.dtype == dtype + # todo: fix the type annotation of array.metadata.chunk_grid so that we get some autocomplete + # here. + assert subnode.metadata.chunk_grid.chunk_shape == chunk_shape + assert subnode.metadata.zarr_format == zarr_format + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +async def test_asyncgroup_update_attributes( + store: LocalStore | MemoryStore, zarr_format: ZarrFormat +) -> None: + """ + Test that the AsyncGroup.update_attributes method works correctly. + """ + attributes_old = {"foo": 10} + attributes_new = {"baz": "new"} + agroup = await AsyncGroup.create( + store=store, zarr_format=zarr_format, attributes=attributes_old + ) + + agroup_new_attributes = await agroup.update_attributes(attributes_new) + assert agroup_new_attributes.attrs == attributes_new + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +async def test_group_init(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: + agroup = sync(AsyncGroup.create(store=store, zarr_format=zarr_format)) + group = Group(agroup) + assert group._async_group == agroup diff --git a/tests/v3/test_storage.py b/tests/v3/test_storage.py deleted file mode 100644 index 2761e608e2..0000000000 --- a/tests/v3/test_storage.py +++ /dev/null @@ -1,18 +0,0 @@ -import pytest - -from zarr.testing.store import StoreTests -from zarr.store.local import LocalStore -from zarr.store.memory import MemoryStore - - -class TestMemoryStore(StoreTests): - store_cls = MemoryStore - - -class TestLocalStore(StoreTests): - store_cls = LocalStore - - @pytest.fixture(scope="function") - @pytest.mark.parametrize("auto_mkdir", (True, False)) - def store(self, tmpdir) -> LocalStore: - return self.store_cls(str(tmpdir)) diff --git a/tests/v3/test_store.py b/tests/v3/test_store.py new file mode 100644 index 0000000000..e514d505ce --- /dev/null +++ b/tests/v3/test_store.py @@ -0,0 +1,797 @@ +from __future__ import annotations +from zarr.store.local import LocalStore +from pathlib import Path +import pytest + +from zarr.testing.store import StoreTests +from zarr.store.memory import MemoryStore + + +@pytest.mark.parametrize("auto_mkdir", (True, False)) +def test_local_store_init(tmpdir, auto_mkdir: bool) -> None: + tmpdir_str = str(tmpdir) + tmpdir_path = Path(tmpdir_str) + store = LocalStore(root=tmpdir_str, auto_mkdir=auto_mkdir) + + assert store.root == tmpdir_path + assert store.auto_mkdir == auto_mkdir + + # ensure that str and pathlib.Path get normalized to the same output + assert store == LocalStore(root=tmpdir_path, auto_mkdir=auto_mkdir) + + store_str = f"file://{tmpdir_str}" + assert str(store) == store_str + assert repr(store) == f"LocalStore({store_str!r})" + + +@pytest.mark.parametrize("byte_range", (None, (0, None), (1, None), (1, 2), (None, 1))) +async def test_local_store_get( + local_store, byte_range: None | tuple[int | None, int | None] +) -> None: + payload = b"\x01\x02\x03\x04" + object_name = "foo" + (local_store.root / object_name).write_bytes(payload) + observed = await local_store.get(object_name, byte_range=byte_range) + + if byte_range is None: + start = 0 + length = len(payload) + else: + maybe_start, maybe_len = byte_range + if maybe_start is None: + start = 0 + else: + start = maybe_start + + if maybe_len is None: + length = len(payload) - start + else: + length = maybe_len + + expected = payload[start : start + length] + assert observed == expected + + # test that getting from a file that doesn't exist returns None + assert await local_store.get(object_name + "_absent", byte_range=byte_range) is None + + +@pytest.mark.parametrize( + "key_ranges", + ( + [], + [("key_0", (0, 1))], + [("dir/key_0", (0, 1)), ("key_1", (0, 2))], + [("key_0", (0, 1)), ("key_1", (0, 2)), ("key_1", (0, 2))], + ), +) +async def test_local_store_get_partial( + tmpdir, key_ranges: tuple[list[tuple[str, tuple[int, int]]]] +) -> None: + store = LocalStore(str(tmpdir), auto_mkdir=True) + # use the utf-8 encoding of the key as the bytes + for key, _ in key_ranges: + payload = bytes(key, encoding="utf-8") + target_path: Path = store.root / key + # create the parent directories + target_path.parent.mkdir(parents=True, exist_ok=True) + # write bytes + target_path.write_bytes(payload) + + results = await store.get_partial_values(key_ranges) + for idx, observed in enumerate(results): + key, byte_range = key_ranges[idx] + expected = await store.get(key, byte_range=byte_range) + assert observed == expected + + +@pytest.mark.parametrize("path", ("foo", "foo/bar")) +@pytest.mark.parametrize("auto_mkdir", (True, False)) +async def test_local_store_set(tmpdir, path: str, auto_mkdir: bool) -> None: + store = LocalStore(str(tmpdir), auto_mkdir=auto_mkdir) + payload = b"\x01\x02\x03\x04" + + if "/" in path and not auto_mkdir: + with pytest.raises(FileNotFoundError): + await store.set(path, payload) + else: + x = await store.set(path, payload) + + # this method should not return anything + assert x is None + + assert (store.root / path).read_bytes() == payload + + +# import zarr +# from zarr._storage.store import _get_hierarchy_metadata, v3_api_available, StorageTransformer +# from zarr._storage.v3_storage_transformers import ( +# DummyStorageTransfomer, +# ShardingStorageTransformer, +# v3_sharding_available, +# ) +# from zarr.core import Array +# from zarr.meta import _default_entry_point_metadata_v3 +# from zarr.storage import ( +# atexit_rmglob, +# atexit_rmtree, +# data_root, +# default_compressor, +# getsize, +# init_array, +# meta_root, +# normalize_store_arg, +# ) +# from zarr._storage.v3 import ( +# ABSStoreV3, +# ConsolidatedMetadataStoreV3, +# DBMStoreV3, +# DirectoryStoreV3, +# FSStoreV3, +# KVStore, +# KVStoreV3, +# LMDBStoreV3, +# LRUStoreCacheV3, +# MemoryStoreV3, +# MongoDBStoreV3, +# RedisStoreV3, +# SQLiteStoreV3, +# StoreV3, +# ZipStoreV3, +# ) +# from .util import CountingDictV3, have_fsspec, skip_test_env_var, mktemp + +# # pytest will fail to run if the following fixtures aren't imported here +# from .test_storage import StoreTests as _StoreTests +# from .test_storage import TestABSStore as _TestABSStore +# from .test_storage import TestConsolidatedMetadataStore as _TestConsolidatedMetadataStore +# from .test_storage import TestDBMStore as _TestDBMStore +# from .test_storage import TestDBMStoreBerkeleyDB as _TestDBMStoreBerkeleyDB +# from .test_storage import TestDBMStoreDumb as _TestDBMStoreDumb +# from .test_storage import TestDBMStoreGnu as _TestDBMStoreGnu +# from .test_storage import TestDBMStoreNDBM as _TestDBMStoreNDBM +# from .test_storage import TestDirectoryStore as _TestDirectoryStore +# from .test_storage import TestFSStore as _TestFSStore +# from .test_storage import TestLMDBStore as _TestLMDBStore +# from .test_storage import TestLRUStoreCache as _TestLRUStoreCache +# from .test_storage import TestMemoryStore as _TestMemoryStore +# from .test_storage import TestSQLiteStore as _TestSQLiteStore +# from .test_storage import TestSQLiteStoreInMemory as _TestSQLiteStoreInMemory +# from .test_storage import TestZipStore as _TestZipStore +# from .test_storage import dimension_separator_fixture, s3, skip_if_nested_chunks + + +# pytestmark = pytest.mark.skipif(not v3_api_available, reason="v3 api is not available") + + +# @pytest.fixture( +# params=[ +# (None, "/"), +# (".", "."), +# ("/", "/"), +# ] +# ) +# def dimension_separator_fixture_v3(request): +# return request.param + + +# class DummyStore: +# # contains all methods expected of Mutable Mapping + +# def keys(self): +# """keys""" + +# def values(self): +# """values""" + +# def get(self, value, default=None): +# """get""" + +# def __setitem__(self, key, value): +# """__setitem__""" + +# def __getitem__(self, key): +# """__getitem__""" + +# def __delitem__(self, key): +# """__delitem__""" + +# def __contains__(self, key): +# """__contains__""" + + +# class InvalidDummyStore: +# # does not contain expected methods of a MutableMapping + +# def keys(self): +# """keys""" + + +# def test_ensure_store_v3(): +# class InvalidStore: +# pass + +# with pytest.raises(ValueError): +# StoreV3._ensure_store(InvalidStore()) + +# # cannot initialize with a store from a different Zarr version +# with pytest.raises(ValueError): +# StoreV3._ensure_store(KVStore(dict())) + +# assert StoreV3._ensure_store(None) is None + +# # class with all methods of a MutableMapping will become a KVStoreV3 +# assert isinstance(StoreV3._ensure_store(DummyStore), KVStoreV3) + +# with pytest.raises(ValueError): +# # does not have the methods expected of a MutableMapping +# StoreV3._ensure_store(InvalidDummyStore) + + +# def test_valid_key(): +# store = KVStoreV3(dict) + +# # only ascii keys are valid +# assert not store._valid_key(5) +# assert not store._valid_key(2.8) + +# for key in store._valid_key_characters: +# assert store._valid_key(key) + +# # other characters not in store._valid_key_characters are not allowed +# assert not store._valid_key("*") +# assert not store._valid_key("~") +# assert not store._valid_key("^") + + +# def test_validate_key(): +# store = KVStoreV3(dict) + +# # zarr.json is a valid key +# store._validate_key("zarr.json") +# # but other keys not starting with meta/ or data/ are not +# with pytest.raises(ValueError): +# store._validate_key("zar.json") + +# # valid ascii keys +# for valid in [ +# meta_root + "arr1.array.json", +# data_root + "arr1.array.json", +# meta_root + "subfolder/item_1-0.group.json", +# ]: +# store._validate_key(valid) +# # but otherwise valid keys cannot end in / +# with pytest.raises(ValueError): +# assert store._validate_key(valid + "/") + +# for invalid in [0, "*", "~", "^", "&"]: +# with pytest.raises(ValueError): +# store._validate_key(invalid) + + +# class StoreV3Tests(_StoreTests): + +# version = 3 +# root = meta_root + +# def test_getsize(self): +# # TODO: determine proper getsize() behavior for v3 +# # Currently returns the combined size of entries under +# # meta/root/path and data/root/path. +# # Any path not under meta/root/ or data/root/ (including zarr.json) +# # returns size 0. + +# store = self.create_store() +# if isinstance(store, dict) or hasattr(store, "getsize"): +# assert 0 == getsize(store, "zarr.json") +# store[meta_root + "foo/a"] = b"x" +# assert 1 == getsize(store) +# assert 1 == getsize(store, "foo") +# store[meta_root + "foo/b"] = b"x" +# assert 2 == getsize(store, "foo") +# assert 1 == getsize(store, "foo/b") +# store[meta_root + "bar/a"] = b"yy" +# assert 2 == getsize(store, "bar") +# store[data_root + "bar/a"] = b"zzz" +# assert 5 == getsize(store, "bar") +# store[data_root + "baz/a"] = b"zzz" +# assert 3 == getsize(store, "baz") +# assert 10 == getsize(store) +# store[data_root + "quux"] = array.array("B", b"zzzz") +# assert 14 == getsize(store) +# assert 4 == getsize(store, "quux") +# store[data_root + "spong"] = np.frombuffer(b"zzzzz", dtype="u1") +# assert 19 == getsize(store) +# assert 5 == getsize(store, "spong") +# store.close() + +# def test_init_array(self, dimension_separator_fixture_v3): + +# pass_dim_sep, want_dim_sep = dimension_separator_fixture_v3 + +# store = self.create_store() +# path = "arr1" +# transformer = DummyStorageTransfomer( +# "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT +# ) +# init_array( +# store, +# path=path, +# shape=1000, +# chunks=100, +# dimension_separator=pass_dim_sep, +# storage_transformers=[transformer], +# ) + +# # check metadata +# mkey = meta_root + path + ".array.json" +# assert mkey in store +# meta = store._metadata_class.decode_array_metadata(store[mkey]) +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunk_grid"]["chunk_shape"] +# assert np.dtype(None) == meta["data_type"] +# assert default_compressor == meta["compressor"] +# assert meta["fill_value"] is None +# # Missing MUST be assumed to be "/" +# assert meta["chunk_grid"]["separator"] is want_dim_sep +# assert len(meta["storage_transformers"]) == 1 +# assert isinstance(meta["storage_transformers"][0], DummyStorageTransfomer) +# assert meta["storage_transformers"][0].test_value == DummyStorageTransfomer.TEST_CONSTANT +# store.close() + +# def test_list_prefix(self): + +# store = self.create_store() +# path = "arr1" +# init_array(store, path=path, shape=1000, chunks=100) + +# expected = [meta_root + "arr1.array.json", "zarr.json"] +# assert sorted(store.list_prefix("")) == expected + +# expected = [meta_root + "arr1.array.json"] +# assert sorted(store.list_prefix(meta_root.rstrip("/"))) == expected + +# # cannot start prefix with '/' +# with pytest.raises(ValueError): +# store.list_prefix(prefix="/" + meta_root.rstrip("/")) + +# def test_equal(self): +# store = self.create_store() +# assert store == store + +# def test_rename_nonexisting(self): +# store = self.create_store() +# if store.is_erasable(): +# with pytest.raises(ValueError): +# store.rename("a", "b") +# else: +# with pytest.raises(NotImplementedError): +# store.rename("a", "b") + +# def test_get_partial_values(self): +# store = self.create_store() +# store.supports_efficient_get_partial_values in [True, False] +# store[data_root + "foo"] = b"abcdefg" +# store[data_root + "baz"] = b"z" +# assert [b"a"] == store.get_partial_values([(data_root + "foo", (0, 1))]) +# assert [ +# b"d", +# b"b", +# b"z", +# b"abc", +# b"defg", +# b"defg", +# b"g", +# b"ef", +# ] == store.get_partial_values( +# [ +# (data_root + "foo", (3, 1)), +# (data_root + "foo", (1, 1)), +# (data_root + "baz", (0, 1)), +# (data_root + "foo", (0, 3)), +# (data_root + "foo", (3, 4)), +# (data_root + "foo", (3, None)), +# (data_root + "foo", (-1, None)), +# (data_root + "foo", (-3, 2)), +# ] +# ) + +# def test_set_partial_values(self): +# store = self.create_store() +# store.supports_efficient_set_partial_values() +# store[data_root + "foo"] = b"abcdefg" +# store.set_partial_values([(data_root + "foo", 0, b"hey")]) +# assert store[data_root + "foo"] == b"heydefg" + +# store.set_partial_values([(data_root + "baz", 0, b"z")]) +# assert store[data_root + "baz"] == b"z" +# store.set_partial_values( +# [ +# (data_root + "foo", 1, b"oo"), +# (data_root + "baz", 1, b"zzz"), +# (data_root + "baz", 4, b"aaaa"), +# (data_root + "foo", 6, b"done"), +# ] +# ) +# assert store[data_root + "foo"] == b"hoodefdone" +# assert store[data_root + "baz"] == b"zzzzaaaa" +# store.set_partial_values( +# [ +# (data_root + "foo", -2, b"NE"), +# (data_root + "baz", -5, b"q"), +# ] +# ) +# assert store[data_root + "foo"] == b"hoodefdoNE" +# assert store[data_root + "baz"] == b"zzzq" + + +# class TestMappingStoreV3(StoreV3Tests): +# def create_store(self, **kwargs): +# return KVStoreV3(dict()) + +# def test_set_invalid_content(self): +# # Generic mappings support non-buffer types +# pass + + +# class TestMemoryStoreV3(_TestMemoryStore, StoreV3Tests): +# def create_store(self, **kwargs): +# skip_if_nested_chunks(**kwargs) +# return MemoryStoreV3(**kwargs) + + +# class TestDirectoryStoreV3(_TestDirectoryStore, StoreV3Tests): +# def create_store(self, normalize_keys=False, **kwargs): +# # For v3, don't have to skip if nested. +# # skip_if_nested_chunks(**kwargs) + +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# store = DirectoryStoreV3(path, normalize_keys=normalize_keys, **kwargs) +# return store + +# def test_rename_nonexisting(self): +# store = self.create_store() +# with pytest.raises(FileNotFoundError): +# store.rename(meta_root + "a", meta_root + "b") + + +# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +# class TestFSStoreV3(_TestFSStore, StoreV3Tests): +# def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): + +# if path is None: +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) + +# store = FSStoreV3( +# path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs +# ) +# return store + +# def test_init_array(self): +# store = self.create_store() +# path = "arr1" +# init_array(store, path=path, shape=1000, chunks=100) + +# # check metadata +# mkey = meta_root + path + ".array.json" +# assert mkey in store +# meta = store._metadata_class.decode_array_metadata(store[mkey]) +# assert (1000,) == meta["shape"] +# assert (100,) == meta["chunk_grid"]["chunk_shape"] +# assert np.dtype(None) == meta["data_type"] +# assert meta["chunk_grid"]["separator"] == "/" + + +# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") +# class TestFSStoreV3WithKeySeparator(StoreV3Tests): +# def create_store(self, normalize_keys=False, key_separator=".", **kwargs): + +# # Since the user is passing key_separator, that will take priority. +# skip_if_nested_chunks(**kwargs) + +# path = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, path) +# return FSStoreV3(path, normalize_keys=normalize_keys, key_separator=key_separator) + + +# # TODO: enable once N5StoreV3 has been implemented +# # @pytest.mark.skipif(True, reason="N5StoreV3 not yet fully implemented") +# # class TestN5StoreV3(_TestN5Store, TestDirectoryStoreV3, StoreV3Tests): + + +# class TestZipStoreV3(_TestZipStore, StoreV3Tests): + +# ZipStoreClass = ZipStoreV3 + +# def create_store(self, **kwargs): +# path = mktemp(suffix=".zip") +# atexit.register(os.remove, path) +# store = ZipStoreV3(path, mode="w", **kwargs) +# return store + + +# class TestDBMStoreV3(_TestDBMStore, StoreV3Tests): +# def create_store(self, dimension_separator=None): +# path = mktemp(suffix=".anydbm") +# atexit.register(atexit_rmglob, path + "*") +# # create store using default dbm implementation +# store = DBMStoreV3(path, flag="n", dimension_separator=dimension_separator) +# return store + + +# class TestDBMStoreV3Dumb(_TestDBMStoreDumb, StoreV3Tests): +# def create_store(self, **kwargs): +# path = mktemp(suffix=".dumbdbm") +# atexit.register(atexit_rmglob, path + "*") + +# import dbm.dumb as dumbdbm + +# store = DBMStoreV3(path, flag="n", open=dumbdbm.open, **kwargs) +# return store + + +# class TestDBMStoreV3Gnu(_TestDBMStoreGnu, StoreV3Tests): +# def create_store(self, **kwargs): +# gdbm = pytest.importorskip("dbm.gnu") +# path = mktemp(suffix=".gdbm") # pragma: no cover +# atexit.register(os.remove, path) # pragma: no cover +# store = DBMStoreV3( +# path, flag="n", open=gdbm.open, write_lock=False, **kwargs +# ) # pragma: no cover +# return store # pragma: no cover + + +# class TestDBMStoreV3NDBM(_TestDBMStoreNDBM, StoreV3Tests): +# def create_store(self, **kwargs): +# ndbm = pytest.importorskip("dbm.ndbm") +# path = mktemp(suffix=".ndbm") # pragma: no cover +# atexit.register(atexit_rmglob, path + "*") # pragma: no cover +# store = DBMStoreV3(path, flag="n", open=ndbm.open, **kwargs) # pragma: no cover +# return store # pragma: no cover + + +# class TestDBMStoreV3BerkeleyDB(_TestDBMStoreBerkeleyDB, StoreV3Tests): +# def create_store(self, **kwargs): +# bsddb3 = pytest.importorskip("bsddb3") +# path = mktemp(suffix=".dbm") +# atexit.register(os.remove, path) +# store = DBMStoreV3(path, flag="n", open=bsddb3.btopen, write_lock=False, **kwargs) +# return store + + +# class TestLMDBStoreV3(_TestLMDBStore, StoreV3Tests): +# def create_store(self, **kwargs): +# pytest.importorskip("lmdb") +# path = mktemp(suffix=".lmdb") +# atexit.register(atexit_rmtree, path) +# buffers = True +# store = LMDBStoreV3(path, buffers=buffers, **kwargs) +# return store + + +# class TestSQLiteStoreV3(_TestSQLiteStore, StoreV3Tests): +# def create_store(self, **kwargs): +# pytest.importorskip("sqlite3") +# path = mktemp(suffix=".db") +# atexit.register(atexit_rmtree, path) +# store = SQLiteStoreV3(path, **kwargs) +# return store + + +# class TestSQLiteStoreV3InMemory(_TestSQLiteStoreInMemory, StoreV3Tests): +# def create_store(self, **kwargs): +# pytest.importorskip("sqlite3") +# store = SQLiteStoreV3(":memory:", **kwargs) +# return store + + +# @skip_test_env_var("ZARR_TEST_MONGO") +# class TestMongoDBStoreV3(StoreV3Tests): +# def create_store(self, **kwargs): +# pytest.importorskip("pymongo") +# store = MongoDBStoreV3( +# host="127.0.0.1", database="zarr_tests", collection="zarr_tests", **kwargs +# ) +# # start with an empty store +# store.clear() +# return store + + +# @skip_test_env_var("ZARR_TEST_REDIS") +# class TestRedisStoreV3(StoreV3Tests): +# def create_store(self, **kwargs): +# # TODO: this is the default host for Redis on Travis, +# # we probably want to generalize this though +# pytest.importorskip("redis") +# store = RedisStoreV3(host="localhost", port=6379, **kwargs) +# # start with an empty store +# store.clear() +# return store + + +# @pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") +# class TestStorageTransformerV3(TestMappingStoreV3): +# def create_store(self, **kwargs): +# inner_store = super().create_store(**kwargs) +# dummy_transformer = DummyStorageTransfomer( +# "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT +# ) +# sharding_transformer = ShardingStorageTransformer( +# "indexed", +# chunks_per_shard=2, +# ) +# path = "bla" +# init_array( +# inner_store, +# path=path, +# shape=1000, +# chunks=100, +# dimension_separator=".", +# storage_transformers=[dummy_transformer, sharding_transformer], +# ) +# store = Array(store=inner_store, path=path).chunk_store +# store.erase_prefix("data/root/bla/") +# store.clear() +# return store + +# def test_method_forwarding(self): +# store = self.create_store() +# inner_store = store.inner_store.inner_store +# assert store.list() == inner_store.list() +# assert store.list_dir(data_root) == inner_store.list_dir(data_root) + +# assert store.is_readable() +# assert store.is_writeable() +# assert store.is_listable() +# inner_store._readable = False +# inner_store._writeable = False +# inner_store._listable = False +# assert not store.is_readable() +# assert not store.is_writeable() +# assert not store.is_listable() + + +# class TestLRUStoreCacheV3(_TestLRUStoreCache, StoreV3Tests): + +# CountingClass = CountingDictV3 +# LRUStoreClass = LRUStoreCacheV3 + + +# @skip_test_env_var("ZARR_TEST_ABS") +# class TestABSStoreV3(_TestABSStore, StoreV3Tests): + +# ABSStoreClass = ABSStoreV3 + + +# def test_normalize_store_arg_v3(tmpdir): + +# fn = tmpdir.join("store.zip") +# store = normalize_store_arg(str(fn), zarr_version=3, mode="w") +# assert isinstance(store, ZipStoreV3) +# assert "zarr.json" in store + +# # can't pass storage_options to non-fsspec store +# with pytest.raises(ValueError): +# normalize_store_arg(str(fn), zarr_version=3, mode="w", storage_options={"some": "kwargs"}) + +# if have_fsspec: +# import fsspec + +# path = tempfile.mkdtemp() +# store = normalize_store_arg("file://" + path, zarr_version=3, mode="w") +# assert isinstance(store, FSStoreV3) +# assert "zarr.json" in store + +# store = normalize_store_arg(fsspec.get_mapper("file://" + path), zarr_version=3) +# assert isinstance(store, FSStoreV3) + +# # regression for https://github.com/zarr-developers/zarr-python/issues/1382 +# # contents of zarr.json are not important for this test +# out = {"version": 1, "refs": {"zarr.json": "{...}"}} +# store = normalize_store_arg( +# "reference://", +# storage_options={"fo": out, "remote_protocol": "memory"}, zarr_version=3 +# ) +# assert isinstance(store, FSStoreV3) + +# fn = tmpdir.join("store.n5") +# with pytest.raises(NotImplementedError): +# normalize_store_arg(str(fn), zarr_version=3, mode="w") + +# # error on zarr_version=3 with a v2 store +# with pytest.raises(ValueError): +# normalize_store_arg(KVStore(dict()), zarr_version=3, mode="w") + +# # error on zarr_version=2 with a v3 store +# with pytest.raises(ValueError): +# normalize_store_arg(KVStoreV3(dict()), zarr_version=2, mode="w") + + +# class TestConsolidatedMetadataStoreV3(_TestConsolidatedMetadataStore): + +# version = 3 +# ConsolidatedMetadataClass = ConsolidatedMetadataStoreV3 + +# @property +# def metadata_key(self): +# return meta_root + "consolidated/.zmetadata" + +# def test_bad_store_version(self): +# with pytest.raises(ValueError): +# self.ConsolidatedMetadataClass(KVStore(dict())) + + +# def test_get_hierarchy_metadata(): +# store = KVStoreV3({}) + +# # error raised if 'jarr.json' is not in the store +# with pytest.raises(ValueError): +# _get_hierarchy_metadata(store) + +# store["zarr.json"] = _default_entry_point_metadata_v3 +# assert _get_hierarchy_metadata(store) == _default_entry_point_metadata_v3 + +# # ValueError if only a subset of keys are present +# store["zarr.json"] = {"zarr_format": "https://purl.org/zarr/spec/protocol/core/3.0"} +# with pytest.raises(ValueError): +# _get_hierarchy_metadata(store) + +# # ValueError if any unexpected keys are present +# extra_metadata = copy.copy(_default_entry_point_metadata_v3) +# extra_metadata["extra_key"] = "value" +# store["zarr.json"] = extra_metadata +# with pytest.raises(ValueError): +# _get_hierarchy_metadata(store) + + +# def test_top_level_imports(): +# for store_name in [ +# "ABSStoreV3", +# "DBMStoreV3", +# "KVStoreV3", +# "DirectoryStoreV3", +# "LMDBStoreV3", +# "LRUStoreCacheV3", +# "MemoryStoreV3", +# "MongoDBStoreV3", +# "RedisStoreV3", +# "SQLiteStoreV3", +# "ZipStoreV3", +# ]: +# if v3_api_available: +# assert hasattr(zarr, store_name) # pragma: no cover +# else: +# assert not hasattr(zarr, store_name) # pragma: no cover + + +# def _get_public_and_dunder_methods(some_class): +# return set( +# name +# for name, _ in inspect.getmembers(some_class, predicate=inspect.isfunction) +# if not name.startswith("_") or name.startswith("__") +# ) + + +# def test_storage_transformer_interface(): +# store_v3_methods = _get_public_and_dunder_methods(StoreV3) +# store_v3_methods.discard("__init__") +# # Note, getitems() isn't mandatory when get_partial_values() is available +# store_v3_methods.discard("getitems") +# storage_transformer_methods = _get_public_and_dunder_methods(StorageTransformer) +# storage_transformer_methods.discard("__init__") +# storage_transformer_methods.discard("get_config") +# assert storage_transformer_methods == store_v3_methods + + +class TestMemoryStore(StoreTests): + store_cls = MemoryStore + + +class TestLocalStore(StoreTests): + store_cls = LocalStore + + @pytest.fixture(scope="function") + @pytest.mark.parametrize("auto_mkdir", (True, False)) + def store(self, tmpdir) -> LocalStore: + return self.store_cls(str(tmpdir)) From 67b07fbc8653bfaa3a6feb7878d056b415cd6732 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Wed, 15 May 2024 16:54:44 +0100 Subject: [PATCH 0530/1078] Add more typing to zarr.group (#1870) Co-authored-by: Joe Hamman --- src/zarr/group.py | 60 +++++++++++++++++++++++------------------------ 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/src/zarr/group.py b/src/zarr/group.py index cce53d0a98..f8d57e3fba 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Iterator from dataclasses import asdict, dataclass, field, replace import asyncio @@ -256,7 +256,7 @@ async def _save_metadata(self) -> None: await asyncio.gather(*awaitables) @property - def attrs(self): + def attrs(self) -> dict[str, Any]: return self.metadata.attributes @property @@ -301,7 +301,7 @@ async def create_array( zarr_format=self.metadata.zarr_format, ) - async def update_attributes(self, new_attributes: dict[str, Any]): + async def update_attributes(self, new_attributes: dict[str, Any]) -> "AsyncGroup": # metadata.attributes is "frozen" so we simply clear and update the dict self.metadata.attributes.clear() self.metadata.attributes.update(new_attributes) @@ -319,7 +319,7 @@ async def update_attributes(self, new_attributes: dict[str, Any]): return self - def __repr__(self): + def __repr__(self) -> str: return f"" async def nmembers(self) -> int: @@ -394,31 +394,31 @@ async def arrays(self) -> AsyncGenerator[AsyncArray, None]: if isinstance(value, AsyncArray): yield value - async def tree(self, expand=False, level=None) -> Any: + async def tree(self, expand: bool = False, level: int | None = None) -> Any: raise NotImplementedError - async def empty(self, **kwargs) -> AsyncArray: + async def empty(self, **kwargs: Any) -> AsyncArray: raise NotImplementedError - async def zeros(self, **kwargs) -> AsyncArray: + async def zeros(self, **kwargs: Any) -> AsyncArray: raise NotImplementedError - async def ones(self, **kwargs) -> AsyncArray: + async def ones(self, **kwargs: Any) -> AsyncArray: raise NotImplementedError - async def full(self, **kwargs) -> AsyncArray: + async def full(self, **kwargs: Any) -> AsyncArray: raise NotImplementedError - async def empty_like(self, prototype: AsyncArray, **kwargs) -> AsyncArray: + async def empty_like(self, prototype: AsyncArray, **kwargs: Any) -> AsyncArray: raise NotImplementedError - async def zeros_like(self, prototype: AsyncArray, **kwargs) -> AsyncArray: + async def zeros_like(self, prototype: AsyncArray, **kwargs: Any) -> AsyncArray: raise NotImplementedError - async def ones_like(self, prototype: AsyncArray, **kwargs) -> AsyncArray: + async def ones_like(self, prototype: AsyncArray, **kwargs: Any) -> AsyncArray: raise NotImplementedError - async def full_like(self, prototype: AsyncArray, **kwargs) -> AsyncArray: + async def full_like(self, prototype: AsyncArray, **kwargs: Any) -> AsyncArray: raise NotImplementedError async def move(self, source: str, dest: str) -> None: @@ -462,16 +462,16 @@ def __getitem__(self, path: str) -> Array | Group: else: return Group(obj) - def __delitem__(self, key) -> None: + def __delitem__(self, key: str) -> None: self._sync(self._async_group.delitem(key)) - def __iter__(self): + def __iter__(self) -> Iterator[str]: raise NotImplementedError - def __len__(self): + def __len__(self) -> int: raise NotImplementedError - def __setitem__(self, key, value): + def __setitem__(self, key: str, value: Any) -> None: """__setitem__ is not supported in v3""" raise NotImplementedError @@ -502,7 +502,7 @@ def attrs(self) -> Attributes: def info(self): return self._async_group.info - def update_attributes(self, new_attributes: dict[str, Any]): + def update_attributes(self, new_attributes: dict[str, Any]) -> "Group": self._sync(self._async_group.update_attributes(new_attributes)) return self @@ -521,7 +521,7 @@ def members(self) -> tuple[tuple[str, Array | Group], ...]: result = tuple(map(lambda kv: (kv[0], _parse_async_node(kv[1])), _members)) return result - def __contains__(self, member) -> bool: + def __contains__(self, member: str) -> bool: return self._sync(self._async_group.contains(member)) def group_keys(self) -> tuple[str, ...]: @@ -537,37 +537,37 @@ def array_keys(self) -> tuple[str, ...]: def arrays(self) -> tuple[Array, ...]: return tuple(Array(obj) for obj in self._sync_iter(self._async_group.arrays())) - def tree(self, expand=False, level=None) -> Any: + def tree(self, expand: bool = False, level: int | None = None) -> Any: return self._sync(self._async_group.tree(expand=expand, level=level)) - def create_group(self, name: str, **kwargs) -> Group: + def create_group(self, name: str, **kwargs: Any) -> Group: return Group(self._sync(self._async_group.create_group(name, **kwargs))) - def create_array(self, name: str, **kwargs) -> Array: + def create_array(self, name: str, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.create_array(name, **kwargs))) - def empty(self, **kwargs) -> Array: + def empty(self, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.empty(**kwargs))) - def zeros(self, **kwargs) -> Array: + def zeros(self, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.zeros(**kwargs))) - def ones(self, **kwargs) -> Array: + def ones(self, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.ones(**kwargs))) - def full(self, **kwargs) -> Array: + def full(self, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.full(**kwargs))) - def empty_like(self, prototype: AsyncArray, **kwargs) -> Array: + def empty_like(self, prototype: AsyncArray, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.empty_like(prototype, **kwargs))) - def zeros_like(self, prototype: AsyncArray, **kwargs) -> Array: + def zeros_like(self, prototype: AsyncArray, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.zeros_like(prototype, **kwargs))) - def ones_like(self, prototype: AsyncArray, **kwargs) -> Array: + def ones_like(self, prototype: AsyncArray, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.ones_like(prototype, **kwargs))) - def full_like(self, prototype: AsyncArray, **kwargs) -> Array: + def full_like(self, prototype: AsyncArray, **kwargs: Any) -> Array: return Array(self._sync(self._async_group.full_like(prototype, **kwargs))) def move(self, source: str, dest: str) -> None: From ceee364157c21d759b89c572f5d416f4d7f682df Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Thu, 16 May 2024 11:49:52 +0200 Subject: [PATCH 0531/1078] [v3] First step to generalizes ndarray and bytes (#1826) * use Buffer * use memoryview as the underlying memory * use NDBuffer * convert to Buffer for the v2 tests * clean up * spilling * remove return_as_bytes_wrapper * remove as_ndarray * doc * clean up * as_buffer(): handle bytes like * removed sync.py again * separate Buffer and NNBuffer * impl. NDBuffer.from_numpy_array() * remove as_buffer() * remove Buffer.as_numpy_array() * impl. NDBuffer.as_buffer() * reduce the use of as_numpy_array() * impl. and use NDBuffer.all_equal * as_numpy_array(): doc * remove as_bytearray() * impl. Buffer.from_numpy_array() * NDArrayLike * Factory.Create * Factory.FromNumpy * doc * doc * remove the buffer factories again * NDBuffer.create(): take fill_value * getitem and setitem now use factory * doc * test * check_item_key_is_1d_contiguous * Buffer.create_zero_length() * Buffer.__add__(): use concat * Buffer.as_ndarray_like * Buffer.as_numpy_array * crc32c: use as_numpy_array * as_numpy_array_wrapper * fix import * use from __future__ import annotations * doc and clean up * doc * Apply suggestions from code review Co-authored-by: Norman Rzepka * Buffer is now backed by ArrayLike --------- Co-authored-by: Norman Rzepka --- src/zarr/abc/codec.py | 32 +-- src/zarr/abc/store.py | 13 +- src/zarr/array.py | 62 ++--- src/zarr/array_v2.py | 44 ++-- src/zarr/buffer.py | 448 +++++++++++++++++++++++++++++++++++ src/zarr/codecs/blosc.py | 22 +- src/zarr/codecs/bytes.py | 28 +-- src/zarr/codecs/crc32c_.py | 24 +- src/zarr/codecs/gzip.py | 15 +- src/zarr/codecs/pipeline.py | 17 +- src/zarr/codecs/sharding.py | 104 ++++---- src/zarr/codecs/transpose.py | 10 +- src/zarr/codecs/zstd.py | 15 +- src/zarr/group.py | 28 ++- src/zarr/metadata.py | 5 +- src/zarr/store/core.py | 8 +- src/zarr/store/local.py | 30 ++- src/zarr/store/memory.py | 24 +- src/zarr/store/remote.py | 6 +- src/zarr/testing/store.py | 23 +- tests/v3/test_buffer.py | 63 +++++ tests/v3/test_codecs.py | 14 +- tests/v3/test_group.py | 5 +- 23 files changed, 794 insertions(+), 246 deletions(-) create mode 100644 src/zarr/buffer.py create mode 100644 tests/v3/test_buffer.py diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index 8897cced89..a91bd63c3b 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -3,16 +3,16 @@ from abc import abstractmethod from typing import TYPE_CHECKING, Optional -import numpy as np from zarr.abc.metadata import Metadata +from zarr.buffer import Buffer, NDBuffer from zarr.common import ArraySpec from zarr.store import StorePath if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import BytesLike, SliceSelection + from zarr.common import SliceSelection from zarr.metadata import ArrayMetadata @@ -37,17 +37,17 @@ class ArrayArrayCodec(Codec): @abstractmethod async def decode( self, - chunk_array: np.ndarray, + chunk_array: NDBuffer, chunk_spec: ArraySpec, - ) -> np.ndarray: + ) -> NDBuffer: pass @abstractmethod async def encode( self, - chunk_array: np.ndarray, + chunk_array: NDBuffer, chunk_spec: ArraySpec, - ) -> Optional[np.ndarray]: + ) -> Optional[NDBuffer]: pass @@ -55,17 +55,17 @@ class ArrayBytesCodec(Codec): @abstractmethod async def decode( self, - chunk_array: BytesLike, + chunk_array: Buffer, chunk_spec: ArraySpec, - ) -> np.ndarray: + ) -> NDBuffer: pass @abstractmethod async def encode( self, - chunk_array: np.ndarray, + chunk_array: NDBuffer, chunk_spec: ArraySpec, - ) -> Optional[BytesLike]: + ) -> Optional[Buffer]: pass @@ -76,7 +76,7 @@ async def decode_partial( store_path: StorePath, selection: SliceSelection, chunk_spec: ArraySpec, - ) -> Optional[np.ndarray]: + ) -> Optional[NDBuffer]: pass @@ -85,7 +85,7 @@ class ArrayBytesCodecPartialEncodeMixin: async def encode_partial( self, store_path: StorePath, - chunk_array: np.ndarray, + chunk_array: NDBuffer, selection: SliceSelection, chunk_spec: ArraySpec, ) -> None: @@ -96,15 +96,15 @@ class BytesBytesCodec(Codec): @abstractmethod async def decode( self, - chunk_array: BytesLike, + chunk_array: Buffer, chunk_spec: ArraySpec, - ) -> BytesLike: + ) -> Buffer: pass @abstractmethod async def encode( self, - chunk_array: BytesLike, + chunk_array: Buffer, chunk_spec: ArraySpec, - ) -> Optional[BytesLike]: + ) -> Optional[Buffer]: pass diff --git a/src/zarr/abc/store.py b/src/zarr/abc/store.py index 3d9550f733..914987cda7 100644 --- a/src/zarr/abc/store.py +++ b/src/zarr/abc/store.py @@ -3,12 +3,14 @@ from typing import List, Tuple, Optional +from zarr.buffer import Buffer + class Store(ABC): @abstractmethod async def get( self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[bytes]: + ) -> Optional[Buffer]: """Retrieve the value associated with a given key. Parameters @@ -18,14 +20,14 @@ async def get( Returns ------- - bytes + Buffer """ ... @abstractmethod async def get_partial_values( self, key_ranges: List[Tuple[str, Tuple[int, int]]] - ) -> List[Optional[bytes]]: + ) -> List[Optional[Buffer]]: """Retrieve possibly partial values from given key_ranges. Parameters @@ -35,8 +37,7 @@ async def get_partial_values( Returns ------- - list[bytes] - list of values, in the order of the key_ranges, may contain null/none for missing keys + list of values, in the order of the key_ranges, may contain null/none for missing keys """ ... @@ -61,7 +62,7 @@ def supports_writes(self) -> bool: ... @abstractmethod - async def set(self, key: str, value: bytes) -> None: + async def set(self, key: str, value: Buffer) -> None: """Store a (key, value) pair. Parameters diff --git a/src/zarr/array.py b/src/zarr/array.py index a594b3dd11..1567c9bbe5 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -20,6 +20,7 @@ # from zarr.array_v2 import ArrayV2 +from zarr.buffer import Buffer, Factory, NDArrayLike, NDBuffer from zarr.codecs import BytesCodec from zarr.codecs.pipeline import CodecPipeline from zarr.common import ( @@ -147,7 +148,7 @@ async def open( assert zarr_json_bytes is not None return cls.from_dict( store_path, - json.loads(zarr_json_bytes), + json.loads(zarr_json_bytes.to_bytes()), ) @classmethod @@ -160,7 +161,7 @@ async def open_auto( if v3_metadata_bytes is not None: return cls.from_dict( store_path, - json.loads(v3_metadata_bytes), + json.loads(v3_metadata_bytes.to_bytes()), ) else: raise ValueError("no v2 support yet") @@ -186,7 +187,9 @@ def dtype(self) -> np.dtype[Any]: def attrs(self) -> dict[str, Any]: return self.metadata.attributes - async def getitem(self, selection: Selection) -> npt.NDArray[Any]: + async def getitem( + self, selection: Selection, *, factory: Factory.Create = NDBuffer.create + ) -> NDArrayLike: assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) indexer = BasicIndexer( selection, @@ -195,10 +198,8 @@ async def getitem(self, selection: Selection) -> npt.NDArray[Any]: ) # setup output array - out = np.zeros( - indexer.shape, - dtype=self.metadata.dtype, - order=self.order, + out = factory( + shape=indexer.shape, dtype=self.metadata.dtype, order=self.order, fill_value=0 ) # reading chunks and decoding them @@ -210,21 +211,17 @@ async def getitem(self, selection: Selection) -> npt.NDArray[Any]: self._read_chunk, config.get("async.concurrency"), ) - - if out.shape: - return out - else: - return out[()] + return out.as_ndarray_like() async def _save_metadata(self) -> None: - await (self.store_path / ZARR_JSON).set(self.metadata.to_bytes()) + await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(self.metadata.to_bytes())) async def _read_chunk( self, chunk_coords: ChunkCoords, chunk_selection: SliceSelection, out_selection: SliceSelection, - out: npt.NDArray[Any], + out: NDBuffer, ) -> None: chunk_spec = self.metadata.get_chunk_spec(chunk_coords, self.order) chunk_key_encoding = self.metadata.chunk_key_encoding @@ -246,7 +243,12 @@ async def _read_chunk( else: out[out_selection] = self.metadata.fill_value - async def setitem(self, selection: Selection, value: npt.NDArray[Any]) -> None: + async def setitem( + self, + selection: Selection, + value: NDArrayLike, + factory: Factory.NDArrayLike = NDBuffer.from_ndarray_like, + ) -> None: assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) chunk_shape = self.metadata.chunk_grid.chunk_shape indexer = BasicIndexer( @@ -259,8 +261,7 @@ async def setitem(self, selection: Selection, value: npt.NDArray[Any]) -> None: # check value shape if np.isscalar(value): - # setting a scalar value - pass + value = np.asanyarray(value) else: if not hasattr(value, "shape"): value = np.asarray(value, self.metadata.dtype) @@ -268,6 +269,11 @@ async def setitem(self, selection: Selection, value: npt.NDArray[Any]) -> None: if value.dtype.name != self.metadata.dtype.name: value = value.astype(self.metadata.dtype, order="A") + # We accept any ndarray like object from the user and convert it + # to a NDBuffer (or subclass). From this point onwards, we only pass + # Buffer and NDBuffer between components. + value = factory(value) + # merging with existing data and encoding chunks await concurrent_map( [ @@ -286,7 +292,7 @@ async def setitem(self, selection: Selection, value: npt.NDArray[Any]) -> None: async def _write_chunk( self, - value: npt.NDArray[Any], + value: NDBuffer, chunk_shape: ChunkCoords, chunk_coords: ChunkCoords, chunk_selection: SliceSelection, @@ -300,11 +306,9 @@ async def _write_chunk( if is_total_slice(chunk_selection, chunk_shape): # write entire chunks if np.isscalar(value): - chunk_array = np.empty( - chunk_shape, - dtype=self.metadata.dtype, + chunk_array = NDBuffer.create( + shape=chunk_shape, dtype=self.metadata.dtype, fill_value=value ) - chunk_array.fill(value) else: chunk_array = value[out_selection] await self._write_chunk_to_store(store_path, chunk_array, chunk_spec) @@ -324,11 +328,11 @@ async def _write_chunk( # merge new value if chunk_bytes is None: - chunk_array = np.empty( - chunk_shape, + chunk_array = NDBuffer.create( + shape=chunk_shape, dtype=self.metadata.dtype, + fill_value=self.metadata.fill_value, ) - chunk_array.fill(self.metadata.fill_value) else: chunk_array = ( await self.codecs.decode(chunk_bytes, chunk_spec) @@ -338,9 +342,9 @@ async def _write_chunk( await self._write_chunk_to_store(store_path, chunk_array, chunk_spec) async def _write_chunk_to_store( - self, store_path: StorePath, chunk_array: npt.NDArray[Any], chunk_spec: ArraySpec + self, store_path: StorePath, chunk_array: NDBuffer, chunk_spec: ArraySpec ) -> None: - if np.all(chunk_array == self.metadata.fill_value): + if chunk_array.all_equal(self.metadata.fill_value): # chunks that only contain fill_value will be removed await store_path.delete() else: @@ -379,14 +383,14 @@ async def _delete_key(key: str) -> None: ) # Write new metadata - await (self.store_path / ZARR_JSON).set(new_metadata.to_bytes()) + await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(new_metadata.to_bytes())) return replace(self, metadata=new_metadata) async def update_attributes(self, new_attributes: Dict[str, Any]) -> AsyncArray: new_metadata = replace(self.metadata, attributes=new_attributes) # Write new metadata - await (self.store_path / ZARR_JSON).set(new_metadata.to_bytes()) + await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(new_metadata.to_bytes())) return replace(self, metadata=new_metadata) def __repr__(self) -> str: diff --git a/src/zarr/array_v2.py b/src/zarr/array_v2.py index 18251e7db7..053d58eb1a 100644 --- a/src/zarr/array_v2.py +++ b/src/zarr/array_v2.py @@ -10,6 +10,7 @@ from numcodecs.compat import ensure_bytes, ensure_ndarray +from zarr.buffer import Buffer, NDBuffer from zarr.common import ( ZARRAY_JSON, ZATTRS_JSON, @@ -29,6 +30,13 @@ from zarr.array import Array +def as_bytearray(data: Optional[Buffer]) -> Optional[bytes]: + """Help function to convert a Buffer into bytes if not None""" + if data is None: + return data + return data.to_bytes() + + @dataclass(frozen=True) class _AsyncArrayProxy: array: ArrayV2 @@ -144,8 +152,8 @@ async def open_async( assert zarray_bytes is not None return cls.from_dict( store_path, - zarray_json=json.loads(zarray_bytes), - zattrs_json=json.loads(zattrs_bytes) if zattrs_bytes is not None else None, + zarray_json=json.loads(zarray_bytes.to_bytes()), + zattrs_json=json.loads(zattrs_bytes.to_bytes()) if zattrs_bytes is not None else None, ) @classmethod @@ -179,7 +187,7 @@ async def _save_metadata(self) -> None: await (self.store_path / ZARRAY_JSON).set(self.metadata.to_bytes()) if self.attributes is not None and len(self.attributes) > 0: await (self.store_path / ZATTRS_JSON).set( - json.dumps(self.attributes).encode(), + Buffer.from_bytes(json.dumps(self.attributes).encode()), ) else: await (self.store_path / ZATTRS_JSON).delete() @@ -216,10 +224,8 @@ async def get_async(self, selection: Selection): ) # setup output array - out = np.zeros( - indexer.shape, - dtype=self.metadata.dtype, - order=self.metadata.order, + out = NDBuffer.create( + shape=indexer.shape, dtype=self.metadata.dtype, order=self.metadata.order, fill_value=0 ) # reading chunks and decoding them @@ -245,7 +251,7 @@ async def _read_chunk( ): store_path = self.store_path / self._encode_chunk_key(chunk_coords) - chunk_array = await self._decode_chunk(await store_path.get()) + chunk_array = await self._decode_chunk(as_bytearray(await store_path.get())) if chunk_array is not None: tmp = chunk_array[chunk_selection] out[out_selection] = tmp @@ -333,12 +339,12 @@ async def _write_chunk( if is_total_slice(chunk_selection, chunk_shape): # write entire chunks if np.isscalar(value): - chunk_array = np.empty( - chunk_shape, + chunk_array = NDBuffer.create( + shape=chunk_shape, dtype=self.metadata.dtype, order=self.metadata.order, + fill_value=value, ) - chunk_array.fill(value) else: chunk_array = value[out_selection] await self._write_chunk_to_store(store_path, chunk_array) @@ -346,16 +352,16 @@ async def _write_chunk( else: # writing partial chunks # read chunk first - tmp = await self._decode_chunk(await store_path.get()) + tmp = await self._decode_chunk(as_bytearray(await store_path.get())) # merge new value if tmp is None: - chunk_array = np.empty( - chunk_shape, + chunk_array = NDBuffer.create( + shape=chunk_shape, dtype=self.metadata.dtype, order=self.metadata.order, + fill_value=self.metadata.fill_value, ) - chunk_array.fill(self.metadata.fill_value) else: chunk_array = tmp.copy( order=self.metadata.order, @@ -374,7 +380,7 @@ async def _write_chunk_to_store(self, store_path: StorePath, chunk_array: np.nda if chunk_bytes is None: await store_path.delete() else: - await store_path.set(chunk_bytes) + await store_path.set(Buffer.from_bytes(chunk_bytes)) async def _encode_chunk(self, chunk_array: np.ndarray) -> Optional[BytesLike]: chunk_array = chunk_array.ravel(order=self.metadata.order) @@ -493,7 +499,7 @@ async def convert_to_v3_async(self) -> Array: ) new_metadata_bytes = new_metadata.to_bytes() - await (self.store_path / ZARR_JSON).set(new_metadata_bytes) + await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(new_metadata_bytes)) return Array.from_dict( store_path=self.store_path, @@ -501,7 +507,9 @@ async def convert_to_v3_async(self) -> Array: ) async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> ArrayV2: - await (self.store_path / ZATTRS_JSON).set(json.dumps(new_attributes).encode()) + await (self.store_path / ZATTRS_JSON).set( + Buffer.from_bytes(json.dumps(new_attributes).encode()) + ) return replace(self, attributes=new_attributes) def update_attributes(self, new_attributes: Dict[str, Any]) -> ArrayV2: diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py new file mode 100644 index 0000000000..a633cc09ec --- /dev/null +++ b/src/zarr/buffer.py @@ -0,0 +1,448 @@ +from __future__ import annotations + +import sys +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterable, + Literal, + Optional, + Protocol, + Tuple, + TypeAlias, +) + +import numpy as np + +if TYPE_CHECKING: + from typing_extensions import Self + from zarr.codecs.bytes import Endian + from zarr.common import BytesLike + +# TODO: create a protocol for the attributes we need, for now we alias Numpy's ndarray +# both for the array-like and ndarray-like +ArrayLike: TypeAlias = np.ndarray +NDArrayLike: TypeAlias = np.ndarray + + +def check_item_key_is_1d_contiguous(key: Any) -> None: + """Raises error if `key` isn't a 1d contiguous slice""" + if not isinstance(key, slice): + raise TypeError( + f"Item key has incorrect type (expected slice, got {key.__class__.__name__})" + ) + if not (key.step is None or key.step == 1): + raise ValueError("slice must be contiguous") + + +class Factory: + class Create(Protocol): + def __call__( + self, + *, + shape: Iterable[int], + dtype: np.DTypeLike, + order: Literal["C", "F"], + fill_value: Optional[Any], + ) -> NDBuffer: + """Factory function to create a new NDBuffer (or subclass) + + Callables implementing the `Factory.Create` protocol must create a new + instance of NDBuffer (or subclass) given the following parameters. + + Parameters + ---------- + shape + The shape of the new buffer + dtype + The datatype of each element in the new buffer + order + Whether to store multi-dimensional data in row-major (C-style) or + column-major (Fortran-style) order in memory. + fill_value + If not None, fill the new buffer with a scalar value. + + Return + ------ + A new NDBuffer or subclass instance + """ + + class NDArrayLike(Protocol): + def __call__(self, ndarray_like: NDArrayLike) -> NDBuffer: + """Factory function to coerce an array into a NDBuffer (or subclass) + + Callables implementing the `Factory.NDArrayLike` protocol must return + an instance of NDBuffer (or subclass) given an ndarray-like object. + + Parameters + ---------- + ndarray_like + ndarray-like object + + Return + ------ + A NDBuffer or subclass instance that represents `ndarray_like` + """ + + +class Buffer: + """A flat contiguous memory block + + We use Buffer throughout Zarr to represent a contiguous block of memory. + + A Buffer is backed by a underlying array-like instance that represents + the memory. The memory type is unspecified; can be regular host memory, + CUDA device memory, or something else. The only requirement is that the + array-like instance can be copied/converted to a regular Numpy array + (host memory). + + Note + ---- + This buffer is untyped, so all indexing and sizes are in bytes. + + Parameters + ---------- + array_like + array-like object that must be 1-dim, contiguous, and byte dtype. + """ + + def __init__(self, array_like: ArrayLike): + if array_like.ndim != 1: + raise ValueError("array_like: only 1-dim allowed") + if array_like.dtype != np.dtype("b"): + raise ValueError("array_like: only byte dtype allowed") + self._data = array_like + + @classmethod + def create_zero_length(cls) -> Self: + """Create an empty buffer with length zero + + Return + ------ + New empty 0-length buffer + """ + return cls(np.array([], dtype="b")) + + @classmethod + def from_array_like(cls, array_like: NDArrayLike) -> Self: + """Create a new buffer of a array-like object + + Parameters + ---------- + array_like + array-like object that must be 1-dim, contiguous, and byte dtype. + + Return + ------ + New buffer representing `array_like` + """ + return cls(array_like) + + @classmethod + def from_bytes(cls, bytes_like: BytesLike) -> Self: + """Create a new buffer of a bytes-like object (host memory) + + Parameters + ---------- + bytes_like + bytes-like object + + Return + ------ + New buffer representing `bytes_like` + """ + return cls.from_array_like(np.frombuffer(bytes_like, dtype="b")) + + def as_array_like(self) -> NDArrayLike: + """Return the underlying array (host or device memory) of this buffer + + This will never copy data. + + Return + ------ + The underlying 1d array such as a NumPy or CuPy array. + """ + return self._data + + def as_nd_buffer(self, *, dtype: np.DTypeLike) -> NDBuffer: + """Create a new NDBuffer from this one. + + This will never copy data. + + Parameters + ---------- + dtype + The datatype of the returned buffer (reinterpretation of the bytes) + + Return + ------ + New NDbuffer representing `self.as_array_like()` + """ + return NDBuffer.from_ndarray_like(self._data.view(dtype=dtype)) + + def as_numpy_array(self) -> np.ndarray: + """Return the buffer as a NumPy array (host memory). + + Warning + ------- + Might have to copy data, consider using `.as_array_like()` instead. + + Return + ------ + NumPy array of this buffer (might be a data copy) + """ + return np.asanyarray(self._data) + + def to_bytes(self) -> bytes: + """Return the buffer as `bytes` (host memory). + + Warning + ------- + Will always copy data, only use this method for small buffers such as metadata + buffers. If possible, use `.as_numpy_array()` or `.as_array_like()` instead. + + Return + ------ + `bytes` of this buffer (data copy) + """ + return bytes(self.as_numpy_array()) + + def __getitem__(self, key: slice) -> Self: + check_item_key_is_1d_contiguous(key) + return self.__class__(self._data.__getitem__(key)) + + def __setitem__(self, key: slice, value: Any) -> None: + check_item_key_is_1d_contiguous(key) + self._data.__setitem__(key, value) + + def __len__(self) -> int: + return self._data.size + + def __add__(self, other: Buffer) -> Self: + """Concatenate two buffers""" + + other_array = other.as_array_like() + assert other_array.dtype == np.dtype("b") + return self.__class__(np.concatenate((self._data, other_array))) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, (bytes, bytearray)): + # Many of the tests compares `Buffer` with `bytes` so we + # convert the bytes to a Buffer and try again + return self == self.from_bytes(other) + if isinstance(other, Buffer): + return (self._data == other.as_array_like()).all() + raise ValueError( + f"equal operator not supported between {self.__class__} and {other.__class__}" + ) + + +class NDBuffer: + """A n-dimensional memory block + + We use NDBuffer throughout Zarr to represent a n-dimensional memory block. + + A NDBuffer is backed by a underlying ndarray-like instance that represents + the memory. The memory type is unspecified; can be regular host memory, + CUDA device memory, or something else. The only requirement is that the + ndarray-like instance can be copied/converted to a regular Numpy array + (host memory). + + Note + ---- + The two buffer classes Buffer and NDBuffer are very similar. In fact, Buffer + is a special case of NDBuffer where dim=1, stride=1, and dtype="b". However, + in order to use Python's type system to differentiate between the contiguous + Buffer and the n-dim (non-contiguous) NDBuffer, we keep the definition of the + two classes separate. + + Parameters + ---------- + ndarray_like + ndarray-like object that is convertible to a regular Numpy array. + """ + + def __init__(self, array: NDArrayLike): + assert array.ndim > 0 + assert array.dtype != object + self._data = array + + @classmethod + def create( + cls, + *, + shape: Iterable[int], + dtype: np.DTypeLike, + order: Literal["C", "F"] = "C", + fill_value: Optional[Any] = None, + ) -> Self: + """Create a new buffer and its underlying ndarray-like object + + Parameters + ---------- + shape + The shape of the buffer and its underlying ndarray-like object + dtype + The datatype of the buffer and its underlying ndarray-like object + order + Whether to store multi-dimensional data in row-major (C-style) or + column-major (Fortran-style) order in memory. + fill_value + If not None, fill the new buffer with a scalar value. + + Return + ------ + New buffer representing a new ndarray_like object + + Developer Notes + --------------- + A subclass can overwrite this method to create a ndarray-like object + other then the default Numpy array. + """ + ret = cls(np.empty(shape=shape, dtype=dtype, order=order)) + if fill_value is not None: + ret.fill(fill_value) + return ret + + @classmethod + def from_ndarray_like(cls, ndarray_like: NDArrayLike) -> Self: + """Create a new buffer of a ndarray-like object + + Parameters + ---------- + ndarray_like + ndarray-like object + + Return + ------ + New buffer representing `ndarray_like` + """ + return cls(ndarray_like) + + @classmethod + def from_numpy_array(cls, array_like: np.ArrayLike) -> Self: + """Create a new buffer of Numpy array-like object + + Parameters + ---------- + array_like + Object that can be coerced into a Numpy array + + Return + ------ + New buffer representing `array_like` + """ + return cls.from_ndarray_like(np.asanyarray(array_like)) + + def as_ndarray_like(self) -> NDArrayLike: + """Return the underlying array (host or device memory) of this buffer + + This will never copy data. + + Return + ------ + The underlying array such as a NumPy or CuPy array. + """ + return self._data + + def as_buffer(self) -> Buffer: + """Create a new Buffer from this one. + + Warning + ------- + Copies data if the buffer is non-contiguous. + + Return + ------ + The new buffer (might be data copy) + """ + data = self._data + if not self._data.flags.contiguous: + data = np.ascontiguousarray(self._data) + return Buffer(data.reshape(-1).view(dtype="b")) # Flatten the array without copy + + def as_numpy_array(self) -> np.ndarray: + """Return the buffer as a NumPy array (host memory). + + Warning + ------- + Might have to copy data, consider using `.as_ndarray_like()` instead. + + Return + ------ + NumPy array of this buffer (might be a data copy) + """ + return np.asanyarray(self._data) + + @property + def dtype(self) -> np.dtype[Any]: + return self._data.dtype + + @property + def shape(self) -> Tuple[int, ...]: + return self._data.shape + + @property + def byteorder(self) -> Endian: + from zarr.codecs.bytes import Endian + + if self.dtype.byteorder == "<": + return Endian.little + elif self.dtype.byteorder == ">": + return Endian.big + else: + return Endian(sys.byteorder) + + def reshape(self, newshape: Iterable[int]) -> Self: + return self.__class__(self._data.reshape(newshape)) + + def astype(self, dtype: np.DTypeLike, order: Literal["K", "A", "C", "F"] = "K") -> Self: + return self.__class__(self._data.astype(dtype=dtype, order=order)) + + def __getitem__(self, key: Any) -> Self: + return self.__class__(np.asanyarray(self._data.__getitem__(key))) + + def __setitem__(self, key: Any, value: Any) -> None: + if isinstance(value, NDBuffer): + value = value._data + self._data.__setitem__(key, value) + + def __len__(self) -> int: + return self._data.__len__() + + def all_equal(self, other: Any) -> bool: + return bool((self._data == other).all()) + + def fill(self, value: Any) -> None: + self._data.fill(value) + + def copy(self) -> Self: + return self.__class__(self._data.copy()) + + def transpose(self, *axes: np.SupportsIndex) -> Self: + return self.__class__(self._data.transpose(*axes)) + + +def as_numpy_array_wrapper(func: Callable[[np.ndarray], bytes], buf: Buffer) -> Buffer: + """Converts the input of `func` to a numpy array and the output back to `Buffer`. + + This function is useful when calling a `func` that only support host memory such + as `GZip.decode` and `Blosc.decode`. In this case, use this wrapper to convert + the input `buf` to a Numpy array and convert the result back into a `Buffer`. + + Parameters + ---------- + func + The callable that will be called with the converted `buf` as input. + `func` must return bytes, which will be converted into a `Buffer` + before returned. + buf + The buffer that will be converted to a Numpy array before given as + input to `func`. + + Return + ------ + The result of `func` converted to a `Buffer` + """ + return Buffer.from_bytes(func(buf.as_numpy_array())) diff --git a/src/zarr/codecs/blosc.py b/src/zarr/codecs/blosc.py index 5ee2b7640d..7e94575f9a 100644 --- a/src/zarr/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -6,17 +6,17 @@ from typing import TYPE_CHECKING, Union import numcodecs -import numpy as np from numcodecs.blosc import Blosc from zarr.abc.codec import BytesBytesCodec +from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration, to_thread if TYPE_CHECKING: from typing import Dict, Optional from typing_extensions import Self - from zarr.common import JSON, ArraySpec, BytesLike + from zarr.common import JSON, ArraySpec class BloscShuffle(Enum): @@ -160,18 +160,22 @@ def _blosc_codec(self) -> Blosc: async def decode( self, - chunk_bytes: bytes, + chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> BytesLike: - return await to_thread(self._blosc_codec.decode, chunk_bytes) + ) -> Buffer: + return await to_thread(as_numpy_array_wrapper, self._blosc_codec.decode, chunk_bytes) async def encode( self, - chunk_bytes: bytes, + chunk_bytes: Buffer, chunk_spec: ArraySpec, - ) -> Optional[BytesLike]: - chunk_array = np.frombuffer(chunk_bytes, dtype=chunk_spec.dtype) - return await to_thread(self._blosc_codec.encode, chunk_array) + ) -> Optional[Buffer]: + # Since blosc only takes bytes, we convert the input and output of the encoding + # between bytes and Buffer + return await to_thread( + lambda chunk: Buffer.from_bytes(self._blosc_codec.encode(chunk.as_array_like())), + chunk_bytes, + ) def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: raise NotImplementedError diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index 566b3a8df9..d6a626e160 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -8,11 +8,12 @@ import numpy as np from zarr.abc.codec import ArrayBytesCodec +from zarr.buffer import Buffer, NDBuffer from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration if TYPE_CHECKING: - from zarr.common import JSON, ArraySpec, BytesLike + from zarr.common import JSON, ArraySpec from typing_extensions import Self @@ -59,19 +60,12 @@ def evolve(self, array_spec: ArraySpec) -> Self: ) return self - def _get_byteorder(self, array: np.ndarray) -> Endian: - if array.dtype.byteorder == "<": - return Endian.little - elif array.dtype.byteorder == ">": - return Endian.big - else: - return default_system_endian - async def decode( self, - chunk_bytes: BytesLike, + chunk_bytes: Buffer, chunk_spec: ArraySpec, - ) -> np.ndarray: + ) -> NDBuffer: + assert isinstance(chunk_bytes, Buffer) if chunk_spec.dtype.itemsize > 0: if self.endian == Endian.little: prefix = "<" @@ -80,7 +74,7 @@ async def decode( dtype = np.dtype(f"{prefix}{chunk_spec.dtype.str[1:]}") else: dtype = np.dtype(f"|{chunk_spec.dtype.str[1:]}") - chunk_array = np.frombuffer(chunk_bytes, dtype) + chunk_array = chunk_bytes.as_nd_buffer(dtype=dtype) # ensure correct chunk shape if chunk_array.shape != chunk_spec.shape: @@ -91,15 +85,15 @@ async def decode( async def encode( self, - chunk_array: np.ndarray, + chunk_array: NDBuffer, _chunk_spec: ArraySpec, - ) -> Optional[BytesLike]: + ) -> Optional[Buffer]: + assert isinstance(chunk_array, NDBuffer) if chunk_array.dtype.itemsize > 1: - byteorder = self._get_byteorder(chunk_array) - if self.endian is not None and self.endian != byteorder: + if self.endian is not None and self.endian != chunk_array.byteorder: new_dtype = chunk_array.dtype.newbyteorder(self.endian.name) chunk_array = chunk_array.astype(new_dtype) - return chunk_array.tobytes() + return chunk_array.as_buffer() def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int: return input_byte_length diff --git a/src/zarr/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py index dd61b3425e..1daf512e43 100644 --- a/src/zarr/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -8,13 +8,14 @@ from crc32c import crc32c from zarr.abc.codec import BytesBytesCodec +from zarr.buffer import Buffer from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration if TYPE_CHECKING: from typing import Dict, Optional from typing_extensions import Self - from zarr.common import JSON, BytesLike, ArraySpec + from zarr.common import JSON, ArraySpec @dataclass(frozen=True) @@ -31,11 +32,12 @@ def to_dict(self) -> Dict[str, JSON]: async def decode( self, - chunk_bytes: bytes, + chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> BytesLike: - crc32_bytes = chunk_bytes[-4:] - inner_bytes = chunk_bytes[:-4] + ) -> Buffer: + data = chunk_bytes.as_numpy_array() + crc32_bytes = data[-4:] + inner_bytes = data[:-4] computed_checksum = np.uint32(crc32c(inner_bytes)).tobytes() stored_checksum = bytes(crc32_bytes) @@ -44,14 +46,18 @@ async def decode( "Stored and computed checksum do not match. " + f"Stored: {stored_checksum!r}. Computed: {computed_checksum!r}." ) - return inner_bytes + return Buffer.from_array_like(inner_bytes) async def encode( self, - chunk_bytes: bytes, + chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> Optional[BytesLike]: - return chunk_bytes + np.uint32(crc32c(chunk_bytes)).tobytes() + ) -> Optional[Buffer]: + data = chunk_bytes.as_numpy_array() + # Calculate the checksum and "cast" it to a numpy array + checksum = np.array([crc32c(data)], dtype=np.uint32) + # Append the checksum (as bytes) to the data + return Buffer.from_array_like(np.append(data, checksum.view("b"))) def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int: return input_byte_length + 4 diff --git a/src/zarr/codecs/gzip.py b/src/zarr/codecs/gzip.py index 71dcaa6bb5..a8d7f815aa 100644 --- a/src/zarr/codecs/gzip.py +++ b/src/zarr/codecs/gzip.py @@ -5,13 +5,14 @@ from numcodecs.gzip import GZip from zarr.abc.codec import BytesBytesCodec +from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration, to_thread if TYPE_CHECKING: from typing import Optional, Dict from typing_extensions import Self - from zarr.common import JSON, ArraySpec, BytesLike + from zarr.common import JSON, ArraySpec def parse_gzip_level(data: JSON) -> int: @@ -45,17 +46,17 @@ def to_dict(self) -> Dict[str, JSON]: async def decode( self, - chunk_bytes: bytes, + chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> BytesLike: - return await to_thread(GZip(self.level).decode, chunk_bytes) + ) -> Buffer: + return await to_thread(as_numpy_array_wrapper, GZip(self.level).decode, chunk_bytes) async def encode( self, - chunk_bytes: bytes, + chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> Optional[BytesLike]: - return await to_thread(GZip(self.level).encode, chunk_bytes) + ) -> Optional[Buffer]: + return await to_thread(as_numpy_array_wrapper, GZip(self.level).encode, chunk_bytes) def compute_encoded_size( self, diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index da131868c4..1602eb1ef8 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -1,7 +1,6 @@ from __future__ import annotations from typing import TYPE_CHECKING, Iterable -import numpy as np from dataclasses import dataclass from warnings import warn @@ -14,6 +13,7 @@ Codec, ) from zarr.abc.metadata import Metadata +from zarr.buffer import Buffer, NDBuffer from zarr.codecs.registry import get_codec_class from zarr.common import parse_named_configuration @@ -21,7 +21,7 @@ from typing import Iterator, List, Optional, Tuple, Union from zarr.store import StorePath from zarr.metadata import ArrayMetadata - from zarr.common import JSON, ArraySpec, BytesLike, SliceSelection + from zarr.common import JSON, ArraySpec, SliceSelection @dataclass(frozen=True) @@ -148,9 +148,9 @@ def _codecs_with_resolved_metadata( async def decode( self, - chunk_bytes: BytesLike, + chunk_bytes: Buffer, array_spec: ArraySpec, - ) -> np.ndarray: + ) -> NDBuffer: ( aa_codecs_with_spec, ab_codec_with_spec, @@ -173,16 +173,16 @@ async def decode_partial( store_path: StorePath, selection: SliceSelection, chunk_spec: ArraySpec, - ) -> Optional[np.ndarray]: + ) -> Optional[NDBuffer]: assert self.supports_partial_decode assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin) return await self.array_bytes_codec.decode_partial(store_path, selection, chunk_spec) async def encode( self, - chunk_array: np.ndarray, + chunk_array: NDBuffer, array_spec: ArraySpec, - ) -> Optional[BytesLike]: + ) -> Optional[Buffer]: ( aa_codecs_with_spec, ab_codec_with_spec, @@ -207,12 +207,13 @@ async def encode( return None chunk_bytes = chunk_bytes_maybe + assert isinstance(chunk_bytes, Buffer) return chunk_bytes async def encode_partial( self, store_path: StorePath, - chunk_array: np.ndarray, + chunk_array: NDBuffer, selection: SliceSelection, chunk_spec: ArraySpec, ) -> None: diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index e94074e63e..b63d1e499b 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -37,6 +37,7 @@ ArrayMetadata, parse_codecs, ) +from zarr.buffer import Buffer, NDBuffer if TYPE_CHECKING: from typing import Awaitable, Callable, Dict, Iterator, List, Optional, Set, Tuple @@ -46,7 +47,6 @@ from zarr.common import ( JSON, ChunkCoords, - BytesLike, SliceSelection, ) @@ -127,15 +127,15 @@ def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardIndex: class _ShardProxy(Mapping): index: _ShardIndex - buf: BytesLike + buf: Buffer @classmethod async def from_bytes( - cls, buf: BytesLike, codec: ShardingCodec, chunks_per_shard: ChunkCoords + cls, buf: Buffer, codec: ShardingCodec, chunks_per_shard: ChunkCoords ) -> _ShardProxy: shard_index_size = codec._shard_index_size(chunks_per_shard) obj = cls() - obj.buf = memoryview(buf) + obj.buf = buf if codec.index_location == ShardingCodecIndexLocation.start: shard_index_bytes = obj.buf[:shard_index_size] else: @@ -148,11 +148,11 @@ async def from_bytes( def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardProxy: index = _ShardIndex.create_empty(chunks_per_shard) obj = cls() - obj.buf = memoryview(b"") + obj.buf = Buffer.create_zero_length() obj.index = index return obj - def __getitem__(self, chunk_coords: ChunkCoords) -> Optional[BytesLike]: + def __getitem__(self, chunk_coords: ChunkCoords) -> Optional[Buffer]: chunk_byte_slice = self.index.get_chunk_slice(chunk_coords) if chunk_byte_slice: return self.buf[chunk_byte_slice[0] : chunk_byte_slice[1]] @@ -166,7 +166,7 @@ def __iter__(self) -> Iterator[ChunkCoords]: class _ShardBuilder(_ShardProxy): - buf: bytearray + buf: Buffer index: _ShardIndex @classmethod @@ -174,7 +174,7 @@ def merge_with_morton_order( cls, chunks_per_shard: ChunkCoords, tombstones: Set[ChunkCoords], - *shard_dicts: Mapping[ChunkCoords, BytesLike], + *shard_dicts: Mapping[ChunkCoords, Buffer], ) -> _ShardBuilder: obj = cls.create_empty(chunks_per_shard) for chunk_coords in morton_order_iter(chunks_per_shard): @@ -190,30 +190,28 @@ def merge_with_morton_order( @classmethod def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardBuilder: obj = cls() - obj.buf = bytearray() + obj.buf = Buffer.create_zero_length() obj.index = _ShardIndex.create_empty(chunks_per_shard) return obj - def append(self, chunk_coords: ChunkCoords, value: BytesLike) -> None: + def append(self, chunk_coords: ChunkCoords, value: Buffer) -> None: chunk_start = len(self.buf) chunk_length = len(value) - self.buf.extend(value) + self.buf = self.buf + value self.index.set_chunk_slice(chunk_coords, slice(chunk_start, chunk_start + chunk_length)) async def finalize( self, index_location: ShardingCodecIndexLocation, - index_encoder: Callable[[_ShardIndex], Awaitable[BytesLike]], - ) -> BytesLike: + index_encoder: Callable[[_ShardIndex], Awaitable[Buffer]], + ) -> Buffer: index_bytes = await index_encoder(self.index) if index_location == ShardingCodecIndexLocation.start: self.index.offsets_and_lengths[..., 0] += len(index_bytes) index_bytes = await index_encoder(self.index) # encode again with corrected offsets - out_buf = bytearray(index_bytes) - out_buf.extend(self.buf) + out_buf = index_bytes + self.buf else: - out_buf = self.buf - out_buf.extend(index_bytes) + out_buf = self.buf + index_bytes return out_buf @@ -299,9 +297,9 @@ def validate(self, array_metadata: ArrayMetadata) -> None: async def decode( self, - shard_bytes: BytesLike, + shard_bytes: Buffer, shard_spec: ArraySpec, - ) -> np.ndarray: + ) -> NDBuffer: # print("decode") shard_shape = shard_spec.shape chunk_shape = self.chunk_shape @@ -314,10 +312,8 @@ async def decode( ) # setup output array - out = np.zeros( - shard_shape, - dtype=shard_spec.dtype, - order=shard_spec.order, + out = NDBuffer.create( + shape=shard_shape, dtype=shard_spec.dtype, order=shard_spec.order, fill_value=0 ) shard_dict = await _ShardProxy.from_bytes(shard_bytes, self, chunks_per_shard) @@ -349,7 +345,7 @@ async def decode_partial( store_path: StorePath, selection: SliceSelection, shard_spec: ArraySpec, - ) -> Optional[np.ndarray]: + ) -> Optional[NDBuffer]: shard_shape = shard_spec.shape chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) @@ -361,17 +357,15 @@ async def decode_partial( ) # setup output array - out = np.zeros( - indexer.shape, - dtype=shard_spec.dtype, - order=shard_spec.order, + out = NDBuffer.create( + shape=indexer.shape, dtype=shard_spec.dtype, order=shard_spec.order, fill_value=0 ) indexed_chunks = list(indexer) all_chunk_coords = set(chunk_coords for chunk_coords, _, _ in indexed_chunks) # reading bytes of all requested chunks - shard_dict: Mapping[ChunkCoords, BytesLike] = {} + shard_dict: Mapping[ChunkCoords, Buffer] = {} if self._is_total_shard(all_chunk_coords, chunks_per_shard): # read entire shard shard_dict_maybe = await self._load_full_shard_maybe(store_path, chunks_per_shard) @@ -407,17 +401,16 @@ async def decode_partial( self._read_chunk, config.get("async.concurrency"), ) - return out async def _read_chunk( self, - shard_dict: Mapping[ChunkCoords, Optional[BytesLike]], + shard_dict: Mapping[ChunkCoords, Optional[Buffer]], chunk_coords: ChunkCoords, chunk_selection: SliceSelection, out_selection: SliceSelection, shard_spec: ArraySpec, - out: np.ndarray, + out: NDBuffer, ) -> None: chunk_spec = self._get_chunk_spec(shard_spec) chunk_bytes = shard_dict.get(chunk_coords, None) @@ -430,9 +423,9 @@ async def _read_chunk( async def encode( self, - shard_array: np.ndarray, + shard_array: NDBuffer, shard_spec: ArraySpec, - ) -> Optional[BytesLike]: + ) -> Optional[Buffer]: shard_shape = shard_spec.shape chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) @@ -446,22 +439,23 @@ async def encode( ) async def _write_chunk( - shard_array: np.ndarray, + shard_array: NDBuffer, chunk_coords: ChunkCoords, chunk_selection: SliceSelection, out_selection: SliceSelection, - ) -> Tuple[ChunkCoords, Optional[BytesLike]]: + ) -> Tuple[ChunkCoords, Optional[Buffer]]: + assert isinstance(shard_array, NDBuffer) if is_total_slice(chunk_selection, chunk_shape): chunk_array = shard_array[out_selection] else: # handling writing partial chunks - chunk_array = np.empty( - chunk_shape, + chunk_array = NDBuffer.create( + shape=chunk_shape, dtype=shard_spec.dtype, ) chunk_array.fill(shard_spec.fill_value) chunk_array[chunk_selection] = shard_array[out_selection] - if not np.array_equiv(chunk_array, shard_spec.fill_value): + if not chunk_array.all_equal(shard_spec.fill_value): chunk_spec = self._get_chunk_spec(shard_spec) return ( chunk_coords, @@ -470,7 +464,7 @@ async def _write_chunk( return (chunk_coords, None) # assembling and encoding chunks within the shard - encoded_chunks: List[Tuple[ChunkCoords, Optional[BytesLike]]] = await concurrent_map( + encoded_chunks: List[Tuple[ChunkCoords, Optional[Buffer]]] = await concurrent_map( [ (shard_array, chunk_coords, chunk_selection, out_selection) for chunk_coords, chunk_selection, out_selection in indexer @@ -491,7 +485,7 @@ async def _write_chunk( async def encode_partial( self, store_path: StorePath, - shard_array: np.ndarray, + shard_array: NDBuffer, selection: SliceSelection, shard_spec: ArraySpec, ) -> None: @@ -519,8 +513,7 @@ async def _write_chunk( chunk_coords: ChunkCoords, chunk_selection: SliceSelection, out_selection: SliceSelection, - ) -> Tuple[ChunkCoords, Optional[BytesLike]]: - chunk_array = None + ) -> Tuple[ChunkCoords, Optional[Buffer]]: if is_total_slice(chunk_selection, self.chunk_shape): chunk_array = shard_array[out_selection] else: @@ -530,8 +523,8 @@ async def _write_chunk( # merge new value if chunk_bytes is None: - chunk_array = np.empty( - self.chunk_shape, + chunk_array = NDBuffer.create( + shape=self.chunk_shape, dtype=shard_spec.dtype, ) chunk_array.fill(shard_spec.fill_value) @@ -541,7 +534,7 @@ async def _write_chunk( ).copy() # make a writable copy chunk_array[chunk_selection] = shard_array[out_selection] - if not np.array_equiv(chunk_array, shard_spec.fill_value): + if not chunk_array.all_equal(shard_spec.fill_value): return ( chunk_coords, await self.codecs.encode(chunk_array, chunk_spec), @@ -549,7 +542,7 @@ async def _write_chunk( else: return (chunk_coords, None) - encoded_chunks: List[Tuple[ChunkCoords, Optional[BytesLike]]] = await concurrent_map( + encoded_chunks: List[Tuple[ChunkCoords, Optional[Buffer]]] = await concurrent_map( [ ( chunk_coords, @@ -593,21 +586,24 @@ def _is_total_shard( ) async def _decode_shard_index( - self, index_bytes: BytesLike, chunks_per_shard: ChunkCoords + self, index_bytes: Buffer, chunks_per_shard: ChunkCoords ) -> _ShardIndex: return _ShardIndex( - await self.index_codecs.decode( - index_bytes, - self._get_index_chunk_spec(chunks_per_shard), - ) + ( + await self.index_codecs.decode( + index_bytes, + self._get_index_chunk_spec(chunks_per_shard), + ) + ).as_numpy_array() ) - async def _encode_shard_index(self, index: _ShardIndex) -> BytesLike: + async def _encode_shard_index(self, index: _ShardIndex) -> Buffer: index_bytes = await self.index_codecs.encode( - index.offsets_and_lengths, + NDBuffer.from_numpy_array(index.offsets_and_lengths), self._get_index_chunk_spec(index.chunks_per_shard), ) assert index_bytes is not None + assert isinstance(index_bytes, Buffer) return index_bytes def _shard_index_size(self, chunks_per_shard: ChunkCoords) -> int: diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index a13708955c..70ae30f908 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -3,13 +3,13 @@ from dataclasses import dataclass, replace +from zarr.buffer import NDBuffer from zarr.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration if TYPE_CHECKING: from typing import TYPE_CHECKING, Optional, Tuple from typing_extensions import Self -import numpy as np from zarr.abc.codec import ArrayArrayCodec from zarr.codecs.registry import register_codec @@ -75,9 +75,9 @@ def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: async def decode( self, - chunk_array: np.ndarray, + chunk_array: NDBuffer, chunk_spec: ArraySpec, - ) -> np.ndarray: + ) -> NDBuffer: inverse_order = [0] * chunk_spec.ndim for x, i in enumerate(self.order): inverse_order[x] = i @@ -86,9 +86,9 @@ async def decode( async def encode( self, - chunk_array: np.ndarray, + chunk_array: NDBuffer, chunk_spec: ArraySpec, - ) -> Optional[np.ndarray]: + ) -> Optional[NDBuffer]: chunk_array = chunk_array.transpose(self.order) return chunk_array diff --git a/src/zarr/codecs/zstd.py b/src/zarr/codecs/zstd.py index ad10a7fdb8..0cc99a0368 100644 --- a/src/zarr/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -6,13 +6,14 @@ from zstandard import ZstdCompressor, ZstdDecompressor from zarr.abc.codec import BytesBytesCodec +from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration, to_thread if TYPE_CHECKING: from typing import Dict, Optional from typing_extensions import Self - from zarr.common import BytesLike, JSON, ArraySpec + from zarr.common import JSON, ArraySpec def parse_zstd_level(data: JSON) -> int: @@ -61,17 +62,17 @@ def _decompress(self, data: bytes) -> bytes: async def decode( self, - chunk_bytes: bytes, + chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> BytesLike: - return await to_thread(self._decompress, chunk_bytes) + ) -> Buffer: + return await to_thread(as_numpy_array_wrapper, self._decompress, chunk_bytes) async def encode( self, - chunk_bytes: bytes, + chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> Optional[BytesLike]: - return await to_thread(self._compress, chunk_bytes) + ) -> Optional[Buffer]: + return await to_thread(as_numpy_array_wrapper, self._compress, chunk_bytes) def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: raise NotImplementedError diff --git a/src/zarr/group.py b/src/zarr/group.py index f8d57e3fba..d344b3db00 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -7,6 +7,8 @@ import logging import numpy.typing as npt +from zarr.buffer import Buffer + if TYPE_CHECKING: from typing import Any, AsyncGenerator, Literal, Iterable from zarr.abc.codec import Codec @@ -159,13 +161,13 @@ async def open( if zarr_format == 2: # V2 groups are comprised of a .zgroup and .zattrs objects assert zgroup_bytes is not None - zgroup = json.loads(zgroup_bytes) - zattrs = json.loads(zattrs_bytes) if zattrs_bytes is not None else {} + zgroup = json.loads(zgroup_bytes.to_bytes()) + zattrs = json.loads(zattrs_bytes.to_bytes()) if zattrs_bytes is not None else {} group_metadata = {**zgroup, "attributes": zattrs} else: # V3 groups are comprised of a zarr.json object assert zarr_json_bytes is not None - group_metadata = json.loads(zarr_json_bytes) + group_metadata = json.loads(zarr_json_bytes.to_bytes()) return cls.from_dict(store_path, group_metadata) @@ -199,7 +201,7 @@ async def getitem( if zarr_json_bytes is None: raise KeyError(key) else: - zarr_json = json.loads(zarr_json_bytes) + zarr_json = json.loads(zarr_json_bytes.to_bytes()) if zarr_json["node_type"] == "group": return type(self).from_dict(store_path, zarr_json) elif zarr_json["node_type"] == "array": @@ -219,9 +221,9 @@ async def getitem( raise KeyError(key) # unpack the zarray, if this is None then we must be opening a group - zarray = json.loads(zarray_bytes) if zarray_bytes else None + zarray = json.loads(zarray_bytes.to_bytes()) if zarray_bytes else None # unpack the zattrs, this can be None if no attrs were written - zattrs = json.loads(zattrs_bytes) if zattrs_bytes is not None else {} + zattrs = json.loads(zattrs_bytes.to_bytes()) if zattrs_bytes is not None else {} if zarray is not None: # TODO: update this once the V2 array support is part of the primary array class @@ -229,7 +231,7 @@ async def getitem( return AsyncArray.from_dict(store_path, zarray) else: zgroup = ( - json.loads(zgroup_bytes) + json.loads(zgroup_bytes.to_bytes()) if zgroup_bytes is not None else {"zarr_format": self.metadata.zarr_format} ) @@ -252,7 +254,9 @@ async def delitem(self, key: str) -> None: async def _save_metadata(self) -> None: to_save = self.metadata.to_bytes() - awaitables = [(self.store_path / key).set(value) for key, value in to_save.items()] + awaitables = [ + (self.store_path / key).set(Buffer.from_bytes(value)) for key, value in to_save.items() + ] await asyncio.gather(*awaitables) @property @@ -310,9 +314,9 @@ async def update_attributes(self, new_attributes: dict[str, Any]) -> "AsyncGroup to_save = self.metadata.to_bytes() if self.metadata.zarr_format == 2: # only save the .zattrs object - await (self.store_path / ZATTRS_JSON).set(to_save[ZATTRS_JSON]) + await (self.store_path / ZATTRS_JSON).set(Buffer.from_bytes(to_save[ZATTRS_JSON])) else: - await (self.store_path / ZARR_JSON).set(to_save[ZARR_JSON]) + await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(to_save[ZARR_JSON])) self.metadata.attributes.clear() self.metadata.attributes.update(new_attributes) @@ -480,7 +484,9 @@ async def update_attributes_async(self, new_attributes: dict[str, Any]) -> Group # Write new metadata to_save = new_metadata.to_bytes() - awaitables = [(self.store_path / key).set(value) for key, value in to_save.items()] + awaitables = [ + (self.store_path / key).set(Buffer.from_bytes(value)) for key, value in to_save.items() + ] await asyncio.gather(*awaitables) async_group = replace(self._async_group, metadata=new_metadata) diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 3903bacd42..098ab34b86 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -6,6 +6,7 @@ import numpy as np import numpy.typing as npt +from zarr.buffer import Buffer from zarr.chunk_grids import ChunkGrid, RegularChunkGrid from zarr.chunk_key_encodings import ChunkKeyEncoding, parse_separator @@ -289,7 +290,7 @@ def __init__( def ndim(self) -> int: return len(self.shape) - def to_bytes(self) -> bytes: + def to_bytes(self) -> Buffer: def _json_convert(o): if isinstance(o, np.dtype): if o.fields is None: @@ -298,7 +299,7 @@ def _json_convert(o): return o.descr raise TypeError - return json.dumps(self.to_dict(), default=_json_convert).encode() + return Buffer.from_bytes(json.dumps(self.to_dict(), default=_json_convert).encode()) @classmethod def from_dict(cls, data: Dict[str, Any]) -> ArrayV2Metadata: diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index cc017ec982..c6ffbc6c05 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -3,8 +3,8 @@ from pathlib import Path from typing import Any, Optional, Tuple, Union -from zarr.common import BytesLike from zarr.abc.store import Store +from zarr.buffer import Buffer from zarr.store.local import LocalStore @@ -25,12 +25,10 @@ def __init__(self, store: Store, path: Optional[str] = None): self.store = store self.path = path or "" - async def get( - self, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[BytesLike]: + async def get(self, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> Optional[Buffer]: return await self.store.get(self.path, byte_range) - async def set(self, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None) -> None: + async def set(self, value: Buffer, byte_range: Optional[Tuple[int, int]] = None) -> None: if byte_range is not None: raise NotImplementedError("Store.set does not have partial writes yet") await self.store.set(self.path, value) diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index a3dd65979b..f27b832a39 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -6,10 +6,11 @@ from pathlib import Path from zarr.abc.store import Store -from zarr.common import BytesLike, concurrent_map, to_thread +from zarr.buffer import Buffer +from zarr.common import concurrent_map, to_thread -def _get(path: Path, byte_range: tuple[int, int | None] | None) -> bytes: +def _get(path: Path, byte_range: tuple[int, int | None] | None) -> Buffer: """ Fetch a contiguous region of bytes from a file. @@ -31,7 +32,7 @@ def _get(path: Path, byte_range: tuple[int, int | None] | None) -> bytes: end = (start + byte_range[1]) if byte_range[1] is not None else None else: - return path.read_bytes() + return Buffer.from_bytes(path.read_bytes()) with path.open("rb") as f: size = f.seek(0, io.SEEK_END) if start is not None: @@ -42,13 +43,13 @@ def _get(path: Path, byte_range: tuple[int, int | None] | None) -> bytes: if end is not None: if end < 0: end = size + end - return f.read(end - f.tell()) - return f.read() + return Buffer.from_bytes(f.read(end - f.tell())) + return Buffer.from_bytes(f.read()) def _put( path: Path, - value: BytesLike, + value: Buffer, start: int | None = None, auto_mkdir: bool = True, ) -> int | None: @@ -57,10 +58,10 @@ def _put( if start is not None: with path.open("r+b") as f: f.seek(start) - f.write(value) + f.write(value.as_numpy_array()) return None else: - return path.write_bytes(value) + return path.write_bytes(value.as_numpy_array()) class LocalStore(Store): @@ -88,7 +89,9 @@ def __repr__(self) -> str: def __eq__(self, other: object) -> bool: return isinstance(other, type(self)) and self.root == other.root - async def get(self, key: str, byte_range: tuple[int, int | None] | None = None) -> bytes | None: + async def get( + self, key: str, byte_range: tuple[int, int | None] | None = None + ) -> Buffer | None: assert isinstance(key, str) path = self.root / key @@ -99,7 +102,7 @@ async def get(self, key: str, byte_range: tuple[int, int | None] | None = None) async def get_partial_values( self, key_ranges: list[tuple[str, tuple[int, int]]] - ) -> list[bytes | None]: + ) -> list[Buffer | None]: """ Read byte ranges from multiple keys. Parameters @@ -117,8 +120,13 @@ async def get_partial_values( args.append((_get, path, byte_range)) return await concurrent_map(args, to_thread, limit=None) # TODO: fix limit - async def set(self, key: str, value: BytesLike) -> None: + async def set(self, key: str, value: Buffer) -> None: assert isinstance(key, str) + if isinstance(value, (bytes, bytearray)): + # TODO: to support the v2 tests, we convert bytes to Buffer here + value = Buffer.from_bytes(value) + if not isinstance(value, Buffer): + raise TypeError("LocalStore.set(): `value` must a Buffer instance") path = self.root / key await to_thread(_put, path, value, auto_mkdir=self.auto_mkdir) diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index 9730d635d5..c053f941ef 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -3,20 +3,21 @@ from collections.abc import AsyncGenerator from typing import Optional, MutableMapping, List, Tuple -from zarr.common import BytesLike, concurrent_map +from zarr.common import concurrent_map from zarr.abc.store import Store +from zarr.buffer import Buffer -# TODO: this store could easily be extended to wrap any MutuableMapping store from v2 +# TODO: this store could easily be extended to wrap any MutableMapping store from v2 # When that is done, the `MemoryStore` will just be a store that wraps a dict. class MemoryStore(Store): supports_writes: bool = True supports_partial_writes: bool = True supports_listing: bool = True - _store_dict: MutableMapping[str, bytes] + _store_dict: MutableMapping[str, Buffer] - def __init__(self, store_dict: Optional[MutableMapping[str, bytes]] = None): + def __init__(self, store_dict: Optional[MutableMapping[str, Buffer]] = None): self._store_dict = store_dict or {} def __str__(self) -> str: @@ -27,7 +28,7 @@ def __repr__(self) -> str: async def get( self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[BytesLike]: + ) -> Optional[Buffer]: assert isinstance(key, str) try: value = self._store_dict[key] @@ -39,7 +40,7 @@ async def get( async def get_partial_values( self, key_ranges: List[Tuple[str, Tuple[int, int]]] - ) -> List[Optional[BytesLike]]: + ) -> List[Optional[Buffer]]: vals = await concurrent_map(key_ranges, self.get, limit=None) return vals @@ -47,14 +48,17 @@ async def exists(self, key: str) -> bool: return key in self._store_dict async def set( - self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + self, key: str, value: Buffer, byte_range: Optional[Tuple[int, int]] = None ) -> None: assert isinstance(key, str) - if not isinstance(value, (bytes, bytearray, memoryview)): - raise TypeError(f"Expected BytesLike. Got {type(value)}.") + if isinstance(value, (bytes, bytearray)): + # TODO: to support the v2 tests, we convert bytes to Buffer here + value = Buffer.from_bytes(value) + if not isinstance(value, Buffer): + raise TypeError(f"Expected Buffer. Got {type(value)}.") if byte_range is not None: - buf = bytearray(self._store_dict[key]) + buf = self._store_dict[key] buf[byte_range[0] : byte_range[1]] = value self._store_dict[key] = buf else: diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index fa6cd2167e..35fd2d60b6 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -3,8 +3,8 @@ from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union from zarr.abc.store import Store +from zarr.buffer import Buffer from zarr.store.core import _dereference_path -from zarr.common import BytesLike if TYPE_CHECKING: @@ -52,7 +52,7 @@ def _make_fs(self) -> Tuple[AsyncFileSystem, str]: async def get( self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[BytesLike]: + ) -> Optional[Buffer]: assert isinstance(key, str) fs, root = self._make_fs() path = _dereference_path(root, key) @@ -69,7 +69,7 @@ async def get( return value async def set( - self, key: str, value: BytesLike, byte_range: Optional[Tuple[int, int]] = None + self, key: str, value: Buffer, byte_range: Optional[Tuple[int, int]] = None ) -> None: assert isinstance(key, str) fs, root = self._make_fs() diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py index 601ef7f393..99f8021594 100644 --- a/src/zarr/testing/store.py +++ b/src/zarr/testing/store.py @@ -1,6 +1,7 @@ import pytest from zarr.abc.store import Store +from zarr.buffer import Buffer class StoreTests: @@ -25,14 +26,14 @@ def test_store_capabilities(self, store: Store) -> None: @pytest.mark.parametrize("key", ["c/0", "foo/c/0.0", "foo/0/0"]) @pytest.mark.parametrize("data", [b"\x01\x02\x03\x04", b""]) async def test_set_get_bytes_roundtrip(self, store: Store, key: str, data: bytes) -> None: - await store.set(key, data) + await store.set(key, Buffer.from_bytes(data)) assert await store.get(key) == data @pytest.mark.parametrize("key", ["foo/c/0"]) @pytest.mark.parametrize("data", [b"\x01\x02\x03\x04", b""]) async def test_get_partial_values(self, store: Store, key: str, data: bytes) -> None: # put all of the data - await store.set(key, data) + await store.set(key, Buffer.from_bytes(data)) # read back just part of it vals = await store.get_partial_values([(key, (0, 2))]) assert vals == [data[0:2]] @@ -43,18 +44,18 @@ async def test_get_partial_values(self, store: Store, key: str, data: bytes) -> async def test_exists(self, store: Store) -> None: assert not await store.exists("foo") - await store.set("foo/zarr.json", b"bar") + await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) assert await store.exists("foo/zarr.json") async def test_delete(self, store: Store) -> None: - await store.set("foo/zarr.json", b"bar") + await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) assert await store.exists("foo/zarr.json") await store.delete("foo/zarr.json") assert not await store.exists("foo/zarr.json") async def test_list(self, store: Store) -> None: assert [k async for k in store.list()] == [] - await store.set("foo/zarr.json", b"bar") + await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) keys = [k async for k in store.list()] assert keys == ["foo/zarr.json"], keys @@ -62,7 +63,9 @@ async def test_list(self, store: Store) -> None: for i in range(10): key = f"foo/c/{i}" expected.append(key) - await store.set(f"foo/c/{i}", i.to_bytes(length=3, byteorder="little")) + await store.set( + f"foo/c/{i}", Buffer.from_bytes(i.to_bytes(length=3, byteorder="little")) + ) async def test_list_prefix(self, store: Store) -> None: # TODO: we currently don't use list_prefix anywhere @@ -71,11 +74,11 @@ async def test_list_prefix(self, store: Store) -> None: async def test_list_dir(self, store: Store) -> None: assert [k async for k in store.list_dir("")] == [] assert [k async for k in store.list_dir("foo")] == [] - await store.set("foo/zarr.json", b"bar") - await store.set("foo/c/1", b"\x01") + await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) + await store.set("foo/c/1", Buffer.from_bytes(b"\x01")) keys = [k async for k in store.list_dir("foo")] - assert keys == ["zarr.json", "c"], keys + assert set(keys) == set(["zarr.json", "c"]), keys keys = [k async for k in store.list_dir("foo/")] - assert keys == ["zarr.json", "c"], keys + assert set(keys) == set(["zarr.json", "c"]), keys diff --git a/tests/v3/test_buffer.py b/tests/v3/test_buffer.py new file mode 100644 index 0000000000..a56c768782 --- /dev/null +++ b/tests/v3/test_buffer.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Iterable, Literal, Optional + +import numpy as np +import numpy.typing as npt +import pytest + +from zarr.array import AsyncArray +from zarr.buffer import NDBuffer +from zarr.store.core import StorePath +from zarr.store.memory import MemoryStore + +if TYPE_CHECKING: + from typing_extensions import Self + + +class MyNDArrayLike(np.ndarray): + """An example of a ndarray-like class""" + + pass + + +class MyNDBuffer(NDBuffer): + """Example of a custom NDBuffer that handles MyNDArrayLike""" + + @classmethod + def create( + cls, + *, + shape: Iterable[int], + dtype: npt.DTypeLike, + order: Literal["C", "F"] = "C", + fill_value: Optional[Any] = None, + ) -> Self: + """Overwrite `NDBuffer.create` to create an MyNDArrayLike instance""" + ret = cls(MyNDArrayLike(shape=shape, dtype=dtype, order=order)) + if fill_value is not None: + ret.fill(fill_value) + return ret + + +@pytest.mark.asyncio +async def test_async_array_factory(): + store = StorePath(MemoryStore()) + expect = np.zeros((9, 9), dtype="uint16", order="F") + a = await AsyncArray.create( + store / "test_async_array", + shape=expect.shape, + chunk_shape=(5, 5), + dtype=expect.dtype, + fill_value=0, + ) + expect[1:4, 3:6] = np.ones((3, 3)) + + await a.setitem( + selection=(slice(1, 4), slice(3, 6)), + value=np.ones((3, 3)), + factory=MyNDBuffer.from_ndarray_like, + ) + got = await a.getitem(selection=(slice(0, 9), slice(0, 9)), factory=MyNDBuffer.create) + assert isinstance(got, MyNDArrayLike) + assert np.array_equal(expect, got) diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index fc209bd5e6..665e3124c0 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -294,7 +294,7 @@ async def test_order( fill_value=1, ) z[:, :] = data - assert await (store / "order/0.0").get() == z._store["0.0"] + assert (await (store / "order/0.0").get()) == z._store["0.0"] @pytest.mark.parametrize("input_order", ["F", "C"]) @@ -730,9 +730,9 @@ async def test_dimension_names(store: Store): ) assert (await AsyncArray.open(store / "dimension_names2")).metadata.dimension_names is None - zarr_json_bytes = await (store / "dimension_names2" / "zarr.json").get() - assert zarr_json_bytes is not None - assert "dimension_names" not in json.loads(zarr_json_bytes) + zarr_json_buffer = await (store / "dimension_names2" / "zarr.json").get() + assert zarr_json_buffer is not None + assert "dimension_names" not in json.loads(zarr_json_buffer.to_bytes()) def test_gzip(store: Store): @@ -954,7 +954,7 @@ async def test_blosc_evolve(store: Store): codecs=[BytesCodec(), BloscCodec()], ) - zarr_json = json.loads(await (store / "blosc_evolve_u1" / "zarr.json").get()) + zarr_json = json.loads((await (store / "blosc_evolve_u1" / "zarr.json").get()).to_bytes()) blosc_configuration_json = zarr_json["codecs"][1]["configuration"] assert blosc_configuration_json["typesize"] == 1 assert blosc_configuration_json["shuffle"] == "bitshuffle" @@ -968,7 +968,7 @@ async def test_blosc_evolve(store: Store): codecs=[BytesCodec(), BloscCodec()], ) - zarr_json = json.loads(await (store / "blosc_evolve_u2" / "zarr.json").get()) + zarr_json = json.loads((await (store / "blosc_evolve_u2" / "zarr.json").get()).to_bytes()) blosc_configuration_json = zarr_json["codecs"][1]["configuration"] assert blosc_configuration_json["typesize"] == 2 assert blosc_configuration_json["shuffle"] == "shuffle" @@ -982,7 +982,7 @@ async def test_blosc_evolve(store: Store): codecs=[ShardingCodec(chunk_shape=(16, 16), codecs=[BytesCodec(), BloscCodec()])], ) - zarr_json = json.loads(await (store / "sharding_blosc_evolve" / "zarr.json").get()) + zarr_json = json.loads((await (store / "sharding_blosc_evolve" / "zarr.json").get()).to_bytes()) blosc_configuration_json = zarr_json["codecs"][0]["configuration"]["codecs"][1]["configuration"] assert blosc_configuration_json["typesize"] == 2 assert blosc_configuration_json["shuffle"] == "shuffle" diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 710eb3e527..16e4ceeecf 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -1,6 +1,8 @@ from __future__ import annotations from typing import TYPE_CHECKING, Any +from zarr.buffer import Buffer +from zarr.sync import sync from zarr.array import AsyncArray from zarr.store.core import make_store_path @@ -13,7 +15,6 @@ from zarr.group import AsyncGroup, Group, GroupMetadata from zarr.store import StorePath -from zarr.sync import sync # todo: put RemoteStore in here @@ -43,7 +44,7 @@ def test_group_children(store: MemoryStore | LocalStore) -> None: # add an extra object to the domain of the group. # the list of children should ignore this object. - sync(store.set(f"{path}/extra_object-1", b"000000")) + sync(store.set(f"{path}/extra_object-1", Buffer.from_bytes(b"000000"))) # add an extra object under a directory-like prefix in the domain of the group. # this creates a directory with a random key in it # this should not show up as a member From 908e920bfd52371a4148b19d305262a804bc0201 Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Thu, 16 May 2024 16:37:54 -0600 Subject: [PATCH 0532/1078] Fix a regression with scalar indexing due to #1800 (#1875) --- docs/release.rst | 2 ++ zarr/core.py | 4 +++- zarr/indexing.py | 2 ++ zarr/tests/test_core.py | 49 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 56 insertions(+), 1 deletion(-) diff --git a/docs/release.rst b/docs/release.rst index e2f9f3de85..a81be4638f 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -31,6 +31,8 @@ Docs Maintenance ~~~~~~~~~~~ +* Fix a regression when getting or setting a single value from arrays with size-1 chunks. + By :user:`Deepak Cherian ` :issue:`1874` Deprecations ~~~~~~~~~~~~ diff --git a/zarr/core.py b/zarr/core.py index 6aa86b6465..b1ccd203db 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -2030,7 +2030,9 @@ def _process_chunk( and not self._filters and self._dtype != object ): - dest = out[out_selection] + # For 0D arrays out_selection = () and out[out_selection] is a scalar + # Avoid that + dest = out[out_selection] if out_selection else out # Assume that array-like objects that doesn't have a # `writeable` flag is writable. dest_is_writable = getattr(dest, "writeable", True) diff --git a/zarr/indexing.py b/zarr/indexing.py index 2f2402fe27..35c1e813b1 100644 --- a/zarr/indexing.py +++ b/zarr/indexing.py @@ -52,6 +52,8 @@ def is_scalar(value, dtype): return True if isinstance(value, tuple) and dtype.names and len(value) == len(dtype.names): return True + if dtype.kind == "O" and not isinstance(value, np.ndarray): + return True return False diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 730f724314..01a78ecd68 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -3157,3 +3157,52 @@ def test_issue_1279(tmpdir): written_data = ds_reopened[:] assert_array_equal(data, written_data) + + +def test_scalar_indexing(): + store = zarr.KVStore({}) + + store["a"] = zarr.create((3,), chunks=(1,), store=store) + store["a"][:] = [1, 2, 3] + + assert store["a"][1] == np.array(2.0) + assert store["a"][(1,)] == np.array(2.0) + + store["a"][slice(1)] = [-1] + assert store["a"][0] == np.array(-1) + + store["a"][0] = -2 + assert store["a"][0] == np.array(-2) + + store["a"][slice(1)] = (-3,) + assert store["a"][0] == np.array(-3) + + +def test_object_array_indexing(): + # regression test for #1874 + from numcodecs import MsgPack + + root = zarr.group() + arr = root.create_dataset( + name="my_dataset", + shape=0, + dtype=object, + object_codec=MsgPack(), + ) + new_items = [ + ["A", 1], + ["B", 2, "hello"], + ] + arr_add = np.empty(len(new_items), dtype=object) + arr_add[:] = new_items + arr.append(arr_add) + + # heterogeneous elements + elem = ["C", 3] + arr[0] = elem + assert arr[0] == elem + + # homogeneous elements + elem = [1, 3] + arr[1] = elem + assert arr[1] == elem From 4bd9764c1857cb563d155ad797011dfd4a44616e Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 16 May 2024 17:57:40 -0700 Subject: [PATCH 0533/1078] release notes for 2.18.1 (#1885) --- docs/release.rst | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index a81be4638f..5ca60b8166 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,25 +18,16 @@ Release notes See `GH1777 `_ for more details on the upcoming 3.0 release. -.. _unreleased: +.. _release_2.18.1: -Unreleased ----------- - -Enhancements -~~~~~~~~~~~~ - -Docs -~~~~ +2.18.1 +------ Maintenance ~~~~~~~~~~~ * Fix a regression when getting or setting a single value from arrays with size-1 chunks. By :user:`Deepak Cherian ` :issue:`1874` -Deprecations -~~~~~~~~~~~~ - .. _release_2.18.0: 2.18.0 From 846c08562a80ac84253a43511bc3d152b23e4b97 Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Fri, 17 May 2024 09:16:30 +0200 Subject: [PATCH 0534/1078] Reworked codec pipelines (#1670) * merge * refactors CodecPipelines * fixes * adds HybridCodecPipeline * fixes * typing * typing * consistent naming * Apply suggestions from code review Co-authored-by: Davis Bennett * encode/decode are batched by default * use zarr.config for batch_size * don't use global lru_cache * removes HybridCodecPipeline * generic codec classes * default batch size = 1 * default batch size = 1 * docs * Update src/zarr/codecs/batched_codec_pipeline.py Co-authored-by: Joe Hamman * mv batched_codec_pipeline -> pipeline * Remove ArrayV2 (#1857) * adds wrapper codecs for the v2 codec pipeline * encode_chunk_key * refactor ArrayV2 away * empty zattrs * Apply suggestions from code review Co-authored-by: Davis Bennett * unify ArrayMetadata * abstract ArrayMetadata * unified Array.create * use zarr.config for batch_size * __init__.py aktualisieren Co-authored-by: Joe Hamman * ruff --------- Co-authored-by: Davis Bennett Co-authored-by: Joe Hamman * merge --------- Co-authored-by: Davis Bennett Co-authored-by: Joe Hamman --- pyproject.toml | 4 - src/zarr/__init__.py | 11 +- src/zarr/abc/codec.py | 340 ++++++++++++++++++---- src/zarr/abc/store.py | 35 ++- src/zarr/array.py | 527 ++++++++++++++++++++--------------- src/zarr/array_v2.py | 524 ---------------------------------- src/zarr/attributes.py | 13 +- src/zarr/chunk_grids.py | 12 +- src/zarr/codecs/__init__.py | 1 + src/zarr/codecs/_v2.py | 107 +++++++ src/zarr/codecs/blosc.py | 8 +- src/zarr/codecs/bytes.py | 8 +- src/zarr/codecs/crc32c_.py | 8 +- src/zarr/codecs/gzip.py | 8 +- src/zarr/codecs/mixins.py | 131 +++++++++ src/zarr/codecs/pipeline.py | 443 ++++++++++++++++++++++------- src/zarr/codecs/registry.py | 1 - src/zarr/codecs/sharding.py | 392 +++++++++++++------------- src/zarr/codecs/transpose.py | 12 +- src/zarr/codecs/zstd.py | 8 +- src/zarr/common.py | 7 +- src/zarr/config.py | 8 +- src/zarr/group.py | 89 +++--- src/zarr/indexing.py | 18 +- src/zarr/metadata.py | 196 ++++++++++--- tests/v3/test_codecs.py | 8 +- tests/v3/test_config.py | 6 +- tests/v3/test_group.py | 15 +- tests/v3/test_v2.py | 28 ++ 29 files changed, 1711 insertions(+), 1257 deletions(-) delete mode 100644 src/zarr/array_v2.py create mode 100644 src/zarr/codecs/_v2.py create mode 100644 src/zarr/codecs/mixins.py create mode 100644 tests/v3/test_v2.py diff --git a/pyproject.toml b/pyproject.toml index fca263db9a..3014f98031 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -207,10 +207,6 @@ check_untyped_defs = false module = [ "zarr.v2.*", "zarr.abc.codec", - "zarr.codecs.bytes", - "zarr.codecs.pipeline", - "zarr.codecs.sharding", - "zarr.codecs.transpose", "zarr.array_v2", ] disallow_any_generics = false diff --git a/src/zarr/__init__.py b/src/zarr/__init__.py index 65daae8f6d..00c01560f4 100644 --- a/src/zarr/__init__.py +++ b/src/zarr/__init__.py @@ -1,10 +1,7 @@ from __future__ import annotations -from typing import Union - import zarr.codecs # noqa: F401 from zarr.array import Array, AsyncArray -from zarr.array_v2 import ArrayV2 from zarr.config import config # noqa: F401 from zarr.group import AsyncGroup, Group from zarr.store import ( @@ -18,9 +15,7 @@ assert not __version__.startswith("0.0.0") -async def open_auto_async( - store: StoreLike, -) -> Union[AsyncArray, AsyncGroup]: +async def open_auto_async(store: StoreLike) -> AsyncArray | AsyncGroup: store_path = make_store_path(store) try: return await AsyncArray.open(store_path) @@ -28,9 +23,7 @@ async def open_auto_async( return await AsyncGroup.open(store_path) -def open_auto( - store: StoreLike, -) -> Union[Array, ArrayV2, Group]: +def open_auto(store: StoreLike) -> Array | Group: object = _sync( open_auto_async(store), ) diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index a91bd63c3b..1c665590bf 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -1,110 +1,348 @@ from __future__ import annotations from abc import abstractmethod -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Generic, Iterable, TypeVar from zarr.abc.metadata import Metadata - +from zarr.abc.store import ByteGetter, ByteSetter from zarr.buffer import Buffer, NDBuffer -from zarr.common import ArraySpec -from zarr.store import StorePath if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import SliceSelection + from zarr.common import ArraySpec, SliceSelection from zarr.metadata import ArrayMetadata -class Codec(Metadata): +CodecInput = TypeVar("CodecInput", bound=NDBuffer | Buffer) +CodecOutput = TypeVar("CodecOutput", bound=NDBuffer | Buffer) + + +class _Codec(Generic[CodecInput, CodecOutput], Metadata): + """Generic base class for codecs. + Please use ArrayArrayCodec, ArrayBytesCodec or BytesBytesCodec for subclassing. + + Codecs can be registered via zarr.codecs.registry. + """ + is_fixed_size: bool @abstractmethod def compute_encoded_size(self, input_byte_length: int, chunk_spec: ArraySpec) -> int: - pass + """Given an input byte length, this method returns the output byte length. + Raises a NotImplementedError for codecs with variable-sized outputs (e.g. compressors). + + Parameters + ---------- + input_byte_length : int + chunk_spec : ArraySpec + + Returns + ------- + int + """ + ... def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: + """Computed the spec of the chunk after it has been encoded by the codec. + This is important for codecs that change the shape, data type or fill value of a chunk. + The spec will then be used for subsequent codecs in the pipeline. + + Parameters + ---------- + chunk_spec : ArraySpec + + Returns + ------- + ArraySpec + """ return chunk_spec def evolve(self, array_spec: ArraySpec) -> Self: + """Fills in codec configuration parameters that can be automatically + inferred from the array metadata. + + Parameters + ---------- + chunk_spec : ArraySpec + + Returns + ------- + Self + """ return self def validate(self, array_metadata: ArrayMetadata) -> None: - pass + """Validates that the codec configuration is compatible with the array metadata. + Raises errors when the codec configuration is not compatible. + Parameters + ---------- + array_metadata : ArrayMetadata + """ + ... -class ArrayArrayCodec(Codec): @abstractmethod async def decode( self, - chunk_array: NDBuffer, - chunk_spec: ArraySpec, - ) -> NDBuffer: - pass + chunks_and_specs: Iterable[tuple[CodecOutput | None, ArraySpec]], + ) -> Iterable[CodecInput | None]: + """Decodes a batch of chunks. + Chunks can be None in which case they are ignored by the codec. + + Parameters + ---------- + chunks_and_specs : Iterable[tuple[CodecOutput | None, ArraySpec]] + Ordered set of encoded chunks with their accompanying chunk spec. + + Returns + ------- + Iterable[CodecInput | None] + """ + ... @abstractmethod async def encode( self, - chunk_array: NDBuffer, - chunk_spec: ArraySpec, - ) -> Optional[NDBuffer]: - pass + chunks_and_specs: Iterable[tuple[CodecInput | None, ArraySpec]], + ) -> Iterable[CodecOutput | None]: + """Encodes a batch of chunks. + Chunks can be None in which case they are ignored by the codec. + Parameters + ---------- + chunks_and_specs : Iterable[tuple[CodecInput | None, ArraySpec]] + Ordered set of to-be-encoded chunks with their accompanying chunk spec. -class ArrayBytesCodec(Codec): - @abstractmethod - async def decode( - self, - chunk_array: Buffer, - chunk_spec: ArraySpec, - ) -> NDBuffer: - pass + Returns + ------- + Iterable[CodecOutput | None] + """ + ... - @abstractmethod - async def encode( - self, - chunk_array: NDBuffer, - chunk_spec: ArraySpec, - ) -> Optional[Buffer]: - pass + +class ArrayArrayCodec(_Codec[NDBuffer, NDBuffer]): + """Base class for array-to-array codecs.""" + + ... + + +class ArrayBytesCodec(_Codec[NDBuffer, Buffer]): + """Base class for array-to-bytes codecs.""" + + ... + + +class BytesBytesCodec(_Codec[Buffer, Buffer]): + """Base class for bytes-to-bytes codecs.""" + + ... + + +Codec = ArrayArrayCodec | ArrayBytesCodec | BytesBytesCodec class ArrayBytesCodecPartialDecodeMixin: + """Mixin for array-to-bytes codecs that implement partial decoding.""" + @abstractmethod async def decode_partial( self, - store_path: StorePath, - selection: SliceSelection, - chunk_spec: ArraySpec, - ) -> Optional[NDBuffer]: - pass + batch_info: Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]], + ) -> Iterable[NDBuffer | None]: + """Partially decodes a batch of chunks. + This method determines parts of a chunk from the slice selection, + fetches these parts from the store (via ByteGetter) and decodes them. + + Parameters + ---------- + batch_info : Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]] + Ordered set of information about slices of encoded chunks. + The slice selection determines which parts of the chunk will be fetched. + The ByteGetter is used to fetch the necessary bytes. + The chunk spec contains information about the construction of an array from the bytes. + + Returns + ------- + Iterable[NDBuffer | None] + """ + ... class ArrayBytesCodecPartialEncodeMixin: + """Mixin for array-to-bytes codecs that implement partial encoding.""" + @abstractmethod async def encode_partial( self, - store_path: StorePath, - chunk_array: NDBuffer, - selection: SliceSelection, - chunk_spec: ArraySpec, + batch_info: Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]], ) -> None: - pass + """Partially encodes a batch of chunks. + This method determines parts of a chunk from the slice selection, encodes them and + writes these parts to the store (via ByteSetter). + If merging with existing chunk data in the store is necessary, this method will + read from the store first and perform the merge. + + Parameters + ---------- + batch_info : Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]] + Ordered set of information about slices of to-be-encoded chunks. + The slice selection determines which parts of the chunk will be encoded. + The ByteSetter is used to write the necessary bytes and fetch bytes for existing chunk data. + The chunk spec contains information about the chunk. + """ + ... + +class CodecPipeline(Metadata): + """Base class for implementing CodecPipeline. + A CodecPipeline implements the read and write paths for chunk data. + On the read path, it is responsible for fetching chunks from a store (via ByteGetter), + decoding them and assembling an output array. On the write path, it encodes the chunks + and writes them to a store (via ByteSetter).""" + + @abstractmethod + def evolve(self, array_spec: ArraySpec) -> Self: + """Fills in codec configuration parameters that can be automatically + inferred from the array metadata. + + Parameters + ---------- + array_spec : ArraySpec + + Returns + ------- + Self + """ + ... + + @classmethod + @abstractmethod + def from_list(cls, codecs: list[Codec]) -> Self: + """Creates a codec pipeline from a list of codecs. + + Parameters + ---------- + codecs : list[Codec] + + Returns + ------- + Self + """ + ... + + @property + @abstractmethod + def supports_partial_decode(self) -> bool: ... + + @property + @abstractmethod + def supports_partial_encode(self) -> bool: ... + + @abstractmethod + def validate(self, array_metadata: ArrayMetadata) -> None: + """Validates that all codec configurations are compatible with the array metadata. + Raises errors when a codec configuration is not compatible. + + Parameters + ---------- + array_metadata : ArrayMetadata + """ + ... + + @abstractmethod + def compute_encoded_size(self, byte_length: int, array_spec: ArraySpec) -> int: + """Given an input byte length, this method returns the output byte length. + Raises a NotImplementedError for codecs with variable-sized outputs (e.g. compressors). + + Parameters + ---------- + input_byte_length : int + array_spec : ArraySpec + + Returns + ------- + int + """ + ... -class BytesBytesCodec(Codec): @abstractmethod async def decode( self, - chunk_array: Buffer, - chunk_spec: ArraySpec, - ) -> Buffer: - pass + chunk_bytes_and_specs: Iterable[tuple[Buffer | None, ArraySpec]], + ) -> Iterable[NDBuffer | None]: + """Decodes a batch of chunks. + Chunks can be None in which case they are ignored by the codec. + + Parameters + ---------- + chunks_and_specs : Iterable[tuple[Buffer | None, ArraySpec]] + Ordered set of encoded chunks with their accompanying chunk spec. + + Returns + ------- + Iterable[NDBuffer | None] + """ + ... @abstractmethod async def encode( self, - chunk_array: Buffer, - chunk_spec: ArraySpec, - ) -> Optional[Buffer]: - pass + chunk_arrays_and_specs: Iterable[tuple[NDBuffer | None, ArraySpec]], + ) -> Iterable[Buffer | None]: + """Encodes a batch of chunks. + Chunks can be None in which case they are ignored by the codec. + + Parameters + ---------- + chunks_and_specs : Iterable[tuple[NDBuffer | None, ArraySpec]] + Ordered set of to-be-encoded chunks with their accompanying chunk spec. + + Returns + ------- + Iterable[Buffer | None] + """ + ... + + @abstractmethod + async def read( + self, + batch_info: Iterable[tuple[ByteGetter, ArraySpec, SliceSelection, SliceSelection]], + out: NDBuffer, + ) -> None: + """Reads chunk data from the store, decodes it and writes it into an output array. + Partial decoding may be utilized if the codecs and stores support it. + + Parameters + ---------- + batch_info : Iterable[tuple[ByteGetter, ArraySpec, SliceSelection, SliceSelection]] + Ordered set of information about the chunks. + The first slice selection determines which parts of the chunk will be fetched. + The second slice selection determines where in the output array the chunk data will be written. + The ByteGetter is used to fetch the necessary bytes. + The chunk spec contains information about the construction of an array from the bytes. + out : NDBuffer + """ + ... + + @abstractmethod + async def write( + self, + batch_info: Iterable[tuple[ByteSetter, ArraySpec, SliceSelection, SliceSelection]], + value: NDBuffer, + ) -> None: + """Encodes chunk data and writes it to the store. + Merges with existing chunk data by reading first, if necessary. + Partial encoding may be utilized if the codecs and stores support it. + + Parameters + ---------- + batch_info : Iterable[tuple[ByteSetter, ArraySpec, SliceSelection, SliceSelection]] + Ordered set of information about the chunks. + The first slice selection determines which parts of the chunk will be encoded. + The second slice selection determines where in the value array the chunk data is located. + The ByteSetter is used to fetch and write the necessary bytes. + The chunk spec contains information about the chunk. + value : NDBuffer + """ + ... diff --git a/src/zarr/abc/store.py b/src/zarr/abc/store.py index 914987cda7..a3a112e58e 100644 --- a/src/zarr/abc/store.py +++ b/src/zarr/abc/store.py @@ -1,8 +1,8 @@ from abc import abstractmethod, ABC from collections.abc import AsyncGenerator +from typing import List, Protocol, Tuple, Optional, runtime_checkable -from typing import List, Tuple, Optional - +from zarr.common import BytesLike from zarr.buffer import Buffer @@ -68,7 +68,7 @@ async def set(self, key: str, value: Buffer) -> None: Parameters ---------- key : str - value : bytes + value : Buffer """ ... @@ -89,12 +89,12 @@ def supports_partial_writes(self) -> bool: ... @abstractmethod - async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: + async def set_partial_values(self, key_start_values: list[tuple[str, int, BytesLike]]) -> None: """Store values at a given key, starting at byte range_start. Parameters ---------- - key_start_values : list[tuple[str, int, bytes]] + key_start_values : list[tuple[str, int, BytesLike]] set of key, range_start, values triples, a key may occur multiple times with different range_starts, range_starts (considering the length of the respective values) must not specify overlapping ranges for the same key @@ -146,3 +146,28 @@ def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: AsyncGenerator[str, None] """ ... + + +@runtime_checkable +class ByteGetter(Protocol): + async def get( + self, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[Buffer]: ... + + +@runtime_checkable +class ByteSetter(Protocol): + async def get( + self, byte_range: Optional[Tuple[int, Optional[int]]] = None + ) -> Optional[Buffer]: ... + + async def set(self, value: Buffer, byte_range: Optional[Tuple[int, int]] = None) -> None: ... + + async def delete(self) -> None: ... + + +async def set_or_delete(byte_setter: ByteSetter, value: Buffer | None) -> None: + if value is None: + await byte_setter.delete() + else: + await byte_setter.set(value) diff --git a/src/zarr/array.py b/src/zarr/array.py index 1567c9bbe5..61f91ab966 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -1,3 +1,5 @@ +from __future__ import annotations + # Notes on what I've changed here: # 1. Split Array into AsyncArray and Array # 3. Added .size and .attrs methods @@ -7,37 +9,38 @@ # Questions to consider: # 1. Was splitting the array into two classes really necessary? -from __future__ import annotations +from asyncio import gather from dataclasses import dataclass, replace import json -from typing import Any, Dict, Iterable, Literal, Optional, Tuple, Union +from typing import Any, Iterable, Literal import numpy as np import numpy.typing as npt from zarr.abc.codec import Codec +from zarr.abc.store import set_or_delete -# from zarr.array_v2 import ArrayV2 -from zarr.buffer import Buffer, Factory, NDArrayLike, NDBuffer +from zarr.attributes import Attributes +from zarr.buffer import Factory, NDArrayLike, NDBuffer from zarr.codecs import BytesCodec -from zarr.codecs.pipeline import CodecPipeline from zarr.common import ( + JSON, ZARR_JSON, - ArraySpec, + ZARRAY_JSON, + ZATTRS_JSON, ChunkCoords, Selection, - SliceSelection, ZarrFormat, concurrent_map, ) from zarr.config import config -from zarr.indexing import BasicIndexer, all_chunk_coords, is_total_slice +from zarr.indexing import BasicIndexer from zarr.chunk_grids import RegularChunkGrid -from zarr.chunk_key_encodings import DefaultChunkKeyEncoding, V2ChunkKeyEncoding -from zarr.metadata import ArrayMetadata, parse_indexing_order +from zarr.chunk_key_encodings import ChunkKeyEncoding, DefaultChunkKeyEncoding, V2ChunkKeyEncoding +from zarr.metadata import ArrayMetadata, ArrayV3Metadata, ArrayV2Metadata, parse_indexing_order from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import sync @@ -46,9 +49,11 @@ def parse_array_metadata(data: Any) -> ArrayMetadata: if isinstance(data, ArrayMetadata): return data elif isinstance(data, dict): - return ArrayMetadata.from_dict(data) - else: - raise TypeError + if data["zarr_format"] == 3: + return ArrayV3Metadata.from_dict(data) + elif data["zarr_format"] == 2: + return ArrayV2Metadata.from_dict(data) + raise TypeError @dataclass(frozen=True) @@ -57,10 +62,6 @@ class AsyncArray: store_path: StorePath order: Literal["C", "F"] - @property - def codecs(self) -> CodecPipeline: - return self.metadata.codecs - def __init__( self, metadata: ArrayMetadata, @@ -79,21 +80,116 @@ async def create( cls, store: StoreLike, *, + # v2 and v3 shape: ChunkCoords, dtype: npt.DTypeLike, - chunk_shape: ChunkCoords, - fill_value: Optional[Any] = None, - chunk_key_encoding: Union[ - Tuple[Literal["default"], Literal[".", "/"]], - Tuple[Literal["v2"], Literal[".", "/"]], - ] = ("default", "/"), - codecs: Optional[Iterable[Union[Codec, Dict[str, Any]]]] = None, - dimension_names: Optional[Iterable[str]] = None, - attributes: Optional[Dict[str, Any]] = None, - exists_ok: bool = False, zarr_format: ZarrFormat = 3, + fill_value: Any | None = None, + attributes: dict[str, JSON] | None = None, + # v3 only + chunk_shape: ChunkCoords | None = None, + chunk_key_encoding: ( + ChunkKeyEncoding + | tuple[Literal["default"], Literal[".", "/"]] + | tuple[Literal["v2"], Literal[".", "/"]] + | None + ) = None, + codecs: Iterable[Codec | dict[str, JSON]] | None = None, + dimension_names: Iterable[str] | None = None, + # v2 only + chunks: ChunkCoords | None = None, + dimension_separator: Literal[".", "/"] | None = None, + order: Literal["C", "F"] | None = None, + filters: list[dict[str, JSON]] | None = None, + compressor: dict[str, JSON] | None = None, + # runtime + exists_ok: bool = False, ) -> AsyncArray: store_path = make_store_path(store) + + if chunk_shape is None: + if chunks is None: + raise ValueError("Either chunk_shape or chunks needs to be provided.") + chunk_shape = chunks + elif chunks is not None: + raise ValueError("Only one of chunk_shape or chunks must be provided.") + + if zarr_format == 3: + if dimension_separator is not None: + raise ValueError( + "dimension_separator cannot be used for arrays with version 3. Use chunk_key_encoding instead." + ) + if order is not None: + raise ValueError( + "order cannot be used for arrays with version 3. Use a transpose codec instead." + ) + if filters is not None: + raise ValueError( + "filters cannot be used for arrays with version 3. Use array-to-array codecs instead." + ) + if compressor is not None: + raise ValueError( + "compressor cannot be used for arrays with version 3. Use bytes-to-bytes codecs instead." + ) + return await cls._create_v3( + store_path, + shape=shape, + dtype=dtype, + chunk_shape=chunk_shape, + fill_value=fill_value, + chunk_key_encoding=chunk_key_encoding, + codecs=codecs, + dimension_names=dimension_names, + attributes=attributes, + exists_ok=exists_ok, + ) + elif zarr_format == 2: + if codecs is not None: + raise ValueError( + "codecs cannot be used for arrays with version 2. Use filters and compressor instead." + ) + if chunk_key_encoding is not None: + raise ValueError( + "chunk_key_encoding cannot be used for arrays with version 2. Use dimension_separator instead." + ) + if dimension_names is not None: + raise ValueError("dimension_names cannot be used for arrays with version 2.") + return await cls._create_v2( + store_path, + shape=shape, + dtype=dtype, + chunks=chunk_shape, + dimension_separator=dimension_separator, + fill_value=fill_value, + order=order, + filters=filters, + compressor=compressor, + attributes=attributes, + exists_ok=exists_ok, + ) + else: + raise ValueError(f"Insupported zarr_format. Got: {zarr_format}") + + @classmethod + async def _create_v3( + cls, + store_path: StorePath, + *, + shape: ChunkCoords, + dtype: npt.DTypeLike, + chunk_shape: ChunkCoords, + fill_value: Any | None = None, + chunk_key_encoding: ( + ChunkKeyEncoding + | tuple[Literal["default"], Literal[".", "/"]] + | tuple[Literal["v2"], Literal[".", "/"]] + | None + ) = None, + codecs: Iterable[Codec | dict[str, JSON]] | None = None, + dimension_names: Iterable[str] | None = None, + attributes: dict[str, JSON] | None = None, + exists_ok: bool = False, + ) -> AsyncArray: if not exists_ok: assert not await (store_path / ZARR_JSON).exists() @@ -105,36 +201,86 @@ async def create( else: fill_value = 0 - metadata = ArrayMetadata( - shape=shape, - data_type=dtype, - chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), - chunk_key_encoding=( + if chunk_key_encoding is None: + chunk_key_encoding = ("default", "/") + if isinstance(chunk_key_encoding, tuple): + chunk_key_encoding = ( V2ChunkKeyEncoding(separator=chunk_key_encoding[1]) if chunk_key_encoding[0] == "v2" else DefaultChunkKeyEncoding(separator=chunk_key_encoding[1]) - ), + ) + + metadata = ArrayV3Metadata( + shape=shape, + data_type=dtype, + chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), + chunk_key_encoding=chunk_key_encoding, fill_value=fill_value, codecs=codecs, dimension_names=tuple(dimension_names) if dimension_names else None, attributes=attributes or {}, ) - array = cls( - metadata=metadata, - store_path=store_path, - ) + array = cls(metadata=metadata, store_path=store_path) + + await array._save_metadata(metadata) + return array + + @classmethod + async def _create_v2( + cls, + store_path: StorePath, + *, + shape: ChunkCoords, + dtype: npt.DTypeLike, + chunks: ChunkCoords, + dimension_separator: Literal[".", "/"] | None = None, + fill_value: None | int | float = None, + order: Literal["C", "F"] | None = None, + filters: list[dict[str, JSON]] | None = None, + compressor: dict[str, JSON] | None = None, + attributes: dict[str, JSON] | None = None, + exists_ok: bool = False, + ) -> AsyncArray: + import numcodecs + + if not exists_ok: + assert not await (store_path / ZARRAY_JSON).exists() + + if order is None: + order = "C" + + if dimension_separator is None: + dimension_separator = "." - await array._save_metadata() + metadata = ArrayV2Metadata( + shape=shape, + dtype=np.dtype(dtype), + chunks=chunks, + order=order, + dimension_separator=dimension_separator, + fill_value=0 if fill_value is None else fill_value, + compressor=( + numcodecs.get_codec(compressor).get_config() if compressor is not None else None + ), + filters=( + [numcodecs.get_codec(filter).get_config() for filter in filters] + if filters is not None + else None + ), + attributes=attributes, + ) + array = cls(metadata=metadata, store_path=store_path) + await array._save_metadata(metadata) return array @classmethod def from_dict( cls, store_path: StorePath, - data: Dict[str, Any], + data: dict[str, JSON], ) -> AsyncArray: - metadata = ArrayMetadata.from_dict(data) + metadata = parse_array_metadata(data) async_array = cls(metadata=metadata, store_path=store_path) return async_array @@ -142,30 +288,54 @@ def from_dict( async def open( cls, store: StoreLike, + zarr_format: ZarrFormat | None = 3, ) -> AsyncArray: store_path = make_store_path(store) - zarr_json_bytes = await (store_path / ZARR_JSON).get() - assert zarr_json_bytes is not None - return cls.from_dict( - store_path, - json.loads(zarr_json_bytes.to_bytes()), - ) - @classmethod - async def open_auto( - cls, - store: StoreLike, - ) -> AsyncArray: # TODO: Union[AsyncArray, ArrayV2] - store_path = make_store_path(store) - v3_metadata_bytes = await (store_path / ZARR_JSON).get() - if v3_metadata_bytes is not None: - return cls.from_dict( - store_path, - json.loads(v3_metadata_bytes.to_bytes()), + if zarr_format == 2: + zarray_bytes, zattrs_bytes = await gather( + (store_path / ZARRAY_JSON).get(), (store_path / ZATTRS_JSON).get() ) + if zarray_bytes is None: + raise KeyError(store_path) # filenotfounderror? + elif zarr_format == 3: + zarr_json_bytes = await (store_path / ZARR_JSON).get() + if zarr_json_bytes is None: + raise KeyError(store_path) # filenotfounderror? + elif zarr_format is None: + zarr_json_bytes, zarray_bytes, zattrs_bytes = await gather( + (store_path / ZARR_JSON).get(), + (store_path / ZARRAY_JSON).get(), + (store_path / ZATTRS_JSON).get(), + ) + if zarr_json_bytes is not None and zarray_bytes is not None: + # TODO: revisit this exception type + # alternatively, we could warn and favor v3 + raise ValueError("Both zarr.json and .zarray objects exist") + if zarr_json_bytes is None and zarray_bytes is None: + raise KeyError(store_path) # filenotfounderror? + # set zarr_format based on which keys were found + if zarr_json_bytes is not None: + zarr_format = 3 + else: + zarr_format = 2 else: - raise ValueError("no v2 support yet") - # return await ArrayV2.open(store_path) + raise ValueError(f"unexpected zarr_format: {zarr_format}") + + if zarr_format == 2: + # V2 arrays are comprised of a .zarray and .zattrs objects + assert zarray_bytes is not None + zarray_dict = json.loads(zarray_bytes.to_bytes()) + zattrs_dict = json.loads(zattrs_bytes.to_bytes()) if zattrs_bytes is not None else {} + zarray_dict["attributes"] = zattrs_dict + return cls(store_path=store_path, metadata=ArrayV2Metadata.from_dict(zarray_dict)) + else: + # V3 arrays are comprised of a zarr.json object + assert zarr_json_bytes is not None + return cls( + store_path=store_path, + metadata=ArrayV3Metadata.from_dict(json.loads(zarr_json_bytes.to_bytes())), + ) @property def ndim(self) -> int: @@ -184,64 +354,45 @@ def dtype(self) -> np.dtype[Any]: return self.metadata.dtype @property - def attrs(self) -> dict[str, Any]: + def attrs(self) -> dict[str, JSON]: return self.metadata.attributes async def getitem( self, selection: Selection, *, factory: Factory.Create = NDBuffer.create ) -> NDArrayLike: - assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) indexer = BasicIndexer( selection, shape=self.metadata.shape, - chunk_shape=self.metadata.chunk_grid.chunk_shape, + chunk_grid=self.metadata.chunk_grid, ) # setup output array out = factory( - shape=indexer.shape, dtype=self.metadata.dtype, order=self.order, fill_value=0 + shape=indexer.shape, + dtype=self.metadata.dtype, + order=self.order, + fill_value=0, # TODO use fill_value ) # reading chunks and decoding them - await concurrent_map( + await self.metadata.codec_pipeline.read( [ - (chunk_coords, chunk_selection, out_selection, out) + ( + self.store_path / self.metadata.encode_chunk_key(chunk_coords), + self.metadata.get_chunk_spec(chunk_coords, self.order), + chunk_selection, + out_selection, + ) for chunk_coords, chunk_selection, out_selection in indexer ], - self._read_chunk, - config.get("async.concurrency"), + out, ) return out.as_ndarray_like() - async def _save_metadata(self) -> None: - await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(self.metadata.to_bytes())) - - async def _read_chunk( - self, - chunk_coords: ChunkCoords, - chunk_selection: SliceSelection, - out_selection: SliceSelection, - out: NDBuffer, - ) -> None: - chunk_spec = self.metadata.get_chunk_spec(chunk_coords, self.order) - chunk_key_encoding = self.metadata.chunk_key_encoding - chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) - store_path = self.store_path / chunk_key - - if self.codecs.supports_partial_decode: - chunk_array = await self.codecs.decode_partial(store_path, chunk_selection, chunk_spec) - if chunk_array is not None: - out[out_selection] = chunk_array - else: - out[out_selection] = self.metadata.fill_value - else: - chunk_bytes = await store_path.get() - if chunk_bytes is not None: - chunk_array = await self.codecs.decode(chunk_bytes, chunk_spec) - tmp = chunk_array[chunk_selection] - out[out_selection] = tmp - else: - out[out_selection] = self.metadata.fill_value + async def _save_metadata(self, metadata: ArrayMetadata) -> None: + to_save = metadata.to_buffer_dict() + awaitables = [set_or_delete(self.store_path / key, value) for key, value in to_save.items()] + await gather(*awaitables) async def setitem( self, @@ -249,12 +400,10 @@ async def setitem( value: NDArrayLike, factory: Factory.NDArrayLike = NDBuffer.from_ndarray_like, ) -> None: - assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) - chunk_shape = self.metadata.chunk_grid.chunk_shape indexer = BasicIndexer( selection, shape=self.metadata.shape, - chunk_shape=chunk_shape, + chunk_grid=self.metadata.chunk_grid, ) sel_shape = indexer.shape @@ -275,122 +424,52 @@ async def setitem( value = factory(value) # merging with existing data and encoding chunks - await concurrent_map( + await self.metadata.codec_pipeline.write( [ ( - value, - chunk_shape, - chunk_coords, + self.store_path / self.metadata.encode_chunk_key(chunk_coords), + self.metadata.get_chunk_spec(chunk_coords, self.order), chunk_selection, out_selection, ) for chunk_coords, chunk_selection, out_selection in indexer ], - self._write_chunk, - config.get("async.concurrency"), + value, ) - async def _write_chunk( - self, - value: NDBuffer, - chunk_shape: ChunkCoords, - chunk_coords: ChunkCoords, - chunk_selection: SliceSelection, - out_selection: SliceSelection, - ) -> None: - chunk_spec = self.metadata.get_chunk_spec(chunk_coords, self.order) - chunk_key_encoding = self.metadata.chunk_key_encoding - chunk_key = chunk_key_encoding.encode_chunk_key(chunk_coords) - store_path = self.store_path / chunk_key - - if is_total_slice(chunk_selection, chunk_shape): - # write entire chunks - if np.isscalar(value): - chunk_array = NDBuffer.create( - shape=chunk_shape, dtype=self.metadata.dtype, fill_value=value - ) - else: - chunk_array = value[out_selection] - await self._write_chunk_to_store(store_path, chunk_array, chunk_spec) + async def resize( + self, new_shape: ChunkCoords, delete_outside_chunks: bool = True + ) -> AsyncArray: + assert len(new_shape) == len(self.metadata.shape) + new_metadata = self.metadata.update_shape(new_shape) - elif self.codecs.supports_partial_encode: - # print("encode_partial", chunk_coords, chunk_selection, repr(self)) - await self.codecs.encode_partial( - store_path, - value[out_selection], - chunk_selection, - chunk_spec, - ) - else: - # writing partial chunks - # read chunk first - chunk_bytes = await store_path.get() - - # merge new value - if chunk_bytes is None: - chunk_array = NDBuffer.create( - shape=chunk_shape, - dtype=self.metadata.dtype, - fill_value=self.metadata.fill_value, - ) - else: - chunk_array = ( - await self.codecs.decode(chunk_bytes, chunk_spec) - ).copy() # make a writable copy - chunk_array[chunk_selection] = value[out_selection] + # Remove all chunks outside of the new shape + old_chunk_coords = set(self.metadata.chunk_grid.all_chunk_coords(self.metadata.shape)) + new_chunk_coords = set(self.metadata.chunk_grid.all_chunk_coords(new_shape)) - await self._write_chunk_to_store(store_path, chunk_array, chunk_spec) + if delete_outside_chunks: - async def _write_chunk_to_store( - self, store_path: StorePath, chunk_array: NDBuffer, chunk_spec: ArraySpec - ) -> None: - if chunk_array.all_equal(self.metadata.fill_value): - # chunks that only contain fill_value will be removed - await store_path.delete() - else: - chunk_bytes = await self.codecs.encode(chunk_array, chunk_spec) - if chunk_bytes is None: - await store_path.delete() - else: - await store_path.set(chunk_bytes) + async def _delete_key(key: str) -> None: + await (self.store_path / key).delete() - async def resize(self, new_shape: ChunkCoords) -> AsyncArray: - if len(new_shape) != len(self.metadata.shape): - raise ValueError( - "The new shape must have the same number of dimensions " - + f"(={len(self.metadata.shape)})." + await concurrent_map( + [ + (self.metadata.encode_chunk_key(chunk_coords),) + for chunk_coords in old_chunk_coords.difference(new_chunk_coords) + ], + _delete_key, + config.get("async.concurrency"), ) - new_metadata = replace(self.metadata, shape=new_shape) - - # Remove all chunks outside of the new shape - assert isinstance(self.metadata.chunk_grid, RegularChunkGrid) - chunk_shape = self.metadata.chunk_grid.chunk_shape - chunk_key_encoding = self.metadata.chunk_key_encoding - old_chunk_coords = set(all_chunk_coords(self.metadata.shape, chunk_shape)) - new_chunk_coords = set(all_chunk_coords(new_shape, chunk_shape)) - - async def _delete_key(key: str) -> None: - await (self.store_path / key).delete() - - await concurrent_map( - [ - (chunk_key_encoding.encode_chunk_key(chunk_coords),) - for chunk_coords in old_chunk_coords.difference(new_chunk_coords) - ], - _delete_key, - config.get("async.concurrency"), - ) - # Write new metadata - await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(new_metadata.to_bytes())) + await self._save_metadata(new_metadata) return replace(self, metadata=new_metadata) - async def update_attributes(self, new_attributes: Dict[str, Any]) -> AsyncArray: - new_metadata = replace(self.metadata, attributes=new_attributes) + async def update_attributes(self, new_attributes: dict[str, JSON]) -> AsyncArray: + new_metadata = self.metadata.update_attributes(new_attributes) # Write new metadata - await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(new_metadata.to_bytes())) + await self._save_metadata(new_metadata) return replace(self, metadata=new_metadata) def __repr__(self) -> str: @@ -409,17 +488,29 @@ def create( cls, store: StoreLike, *, + # v2 and v3 shape: ChunkCoords, dtype: npt.DTypeLike, - chunk_shape: ChunkCoords, - fill_value: Optional[Any] = None, - chunk_key_encoding: Union[ - Tuple[Literal["default"], Literal[".", "/"]], - Tuple[Literal["v2"], Literal[".", "/"]], - ] = ("default", "/"), - codecs: Optional[Iterable[Union[Codec, Dict[str, Any]]]] = None, - dimension_names: Optional[Iterable[str]] = None, - attributes: Optional[Dict[str, Any]] = None, + zarr_format: ZarrFormat = 3, + fill_value: Any | None = None, + attributes: dict[str, JSON] | None = None, + # v3 only + chunk_shape: ChunkCoords | None = None, + chunk_key_encoding: ( + ChunkKeyEncoding + | tuple[Literal["default"], Literal[".", "/"]] + | tuple[Literal["v2"], Literal[".", "/"]] + | None + ) = None, + codecs: Iterable[Codec | dict[str, JSON]] | None = None, + dimension_names: Iterable[str] | None = None, + # v2 only + chunks: ChunkCoords | None = None, + dimension_separator: Literal[".", "/"] | None = None, + order: Literal["C", "F"] | None = None, + filters: list[dict[str, JSON]] | None = None, + compressor: dict[str, JSON] | None = None, + # runtime exists_ok: bool = False, ) -> Array: async_array = sync( @@ -427,12 +518,18 @@ def create( store=store, shape=shape, dtype=dtype, - chunk_shape=chunk_shape, + zarr_format=zarr_format, + attributes=attributes, fill_value=fill_value, + chunk_shape=chunk_shape, chunk_key_encoding=chunk_key_encoding, codecs=codecs, dimension_names=dimension_names, - attributes=attributes, + chunks=chunks, + dimension_separator=dimension_separator, + order=order, + filters=filters, + compressor=compressor, exists_ok=exists_ok, ), ) @@ -442,7 +539,7 @@ def create( def from_dict( cls, store_path: StorePath, - data: Dict[str, Any], + data: dict[str, JSON], ) -> Array: async_array = AsyncArray.from_dict(store_path=store_path, data=data) return cls(async_array) @@ -455,16 +552,6 @@ def open( async_array = sync(AsyncArray.open(store)) return cls(async_array) - @classmethod - def open_auto( - cls, - store: StoreLike, - ) -> Array: # TODO: Union[Array, ArrayV2]: - async_array = sync( - AsyncArray.open_auto(store), - ) - return cls(async_array) - @property def ndim(self) -> int: return self._async_array.ndim @@ -482,8 +569,8 @@ def dtype(self) -> np.dtype[Any]: return self._async_array.dtype @property - def attrs(self) -> dict[str, Any]: - return self._async_array.attrs + def attrs(self) -> Attributes: + return Attributes(self) @property def metadata(self) -> ArrayMetadata: @@ -514,7 +601,7 @@ def resize(self, new_shape: ChunkCoords) -> Array: ) ) - def update_attributes(self, new_attributes: Dict[str, Any]) -> Array: + def update_attributes(self, new_attributes: dict[str, JSON]) -> Array: return type(self)( sync( self._async_array.update_attributes(new_attributes), diff --git a/src/zarr/array_v2.py b/src/zarr/array_v2.py deleted file mode 100644 index 053d58eb1a..0000000000 --- a/src/zarr/array_v2.py +++ /dev/null @@ -1,524 +0,0 @@ -from __future__ import annotations - -import asyncio -from dataclasses import dataclass, replace -import json -from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union - -import numcodecs -import numpy as np - -from numcodecs.compat import ensure_bytes, ensure_ndarray - -from zarr.buffer import Buffer, NDBuffer -from zarr.common import ( - ZARRAY_JSON, - ZATTRS_JSON, - BytesLike, - ChunkCoords, - Selection, - SliceSelection, - concurrent_map, - to_thread, -) -from zarr.indexing import BasicIndexer, all_chunk_coords, is_total_slice -from zarr.metadata import ArrayV2Metadata -from zarr.store import StoreLike, StorePath, make_store_path -from zarr.sync import sync - -if TYPE_CHECKING: - from zarr.array import Array - - -def as_bytearray(data: Optional[Buffer]) -> Optional[bytes]: - """Help function to convert a Buffer into bytes if not None""" - if data is None: - return data - return data.to_bytes() - - -@dataclass(frozen=True) -class _AsyncArrayProxy: - array: ArrayV2 - - def __getitem__(self, selection: Selection) -> _AsyncArraySelectionProxy: - return _AsyncArraySelectionProxy(self.array, selection) - - -@dataclass(frozen=True) -class _AsyncArraySelectionProxy: - array: ArrayV2 - selection: Selection - - async def get(self) -> np.ndarray: - return await self.array.get_async(self.selection) - - async def set(self, value: np.ndarray): - return await self.array.set_async(self.selection, value) - - -@dataclass(frozen=True) -class ArrayV2: - metadata: ArrayV2Metadata - attributes: Optional[Dict[str, Any]] - store_path: StorePath - - @classmethod - async def create_async( - cls, - store: StoreLike, - *, - shape: ChunkCoords, - dtype: np.dtype, - chunks: ChunkCoords, - dimension_separator: Literal[".", "/"] = ".", - fill_value: Optional[Union[None, int, float]] = None, - order: Literal["C", "F"] = "C", - filters: Optional[List[Dict[str, Any]]] = None, - compressor: Optional[Dict[str, Any]] = None, - attributes: Optional[Dict[str, Any]] = None, - exists_ok: bool = False, - ) -> ArrayV2: - store_path = make_store_path(store) - if not exists_ok: - assert not await (store_path / ZARRAY_JSON).exists() - - metadata = ArrayV2Metadata( - shape=shape, - dtype=np.dtype(dtype), - chunks=chunks, - order=order, - dimension_separator=dimension_separator, - fill_value=0 if fill_value is None else fill_value, - compressor=( - numcodecs.get_codec(compressor).get_config() if compressor is not None else None - ), - filters=( - [numcodecs.get_codec(filter).get_config() for filter in filters] - if filters is not None - else None - ), - ) - array = cls( - metadata=metadata, - store_path=store_path, - attributes=attributes, - ) - await array._save_metadata() - return array - - @classmethod - def create( - cls, - store: StoreLike, - *, - shape: ChunkCoords, - dtype: np.dtype, - chunks: ChunkCoords, - dimension_separator: Literal[".", "/"] = ".", - fill_value: Optional[Union[None, int, float]] = None, - order: Literal["C", "F"] = "C", - filters: Optional[List[Dict[str, Any]]] = None, - compressor: Optional[Dict[str, Any]] = None, - attributes: Optional[Dict[str, Any]] = None, - exists_ok: bool = False, - ) -> ArrayV2: - return sync( - cls.create_async( - store, - shape=shape, - dtype=dtype, - chunks=chunks, - order=order, - dimension_separator=dimension_separator, - fill_value=0 if fill_value is None else fill_value, - compressor=compressor, - filters=filters, - attributes=attributes, - exists_ok=exists_ok, - ), - ) - - @classmethod - async def open_async( - cls, - store: StoreLike, - ) -> ArrayV2: - store_path = make_store_path(store) - zarray_bytes, zattrs_bytes = await asyncio.gather( - (store_path / ZARRAY_JSON).get(), - (store_path / ZATTRS_JSON).get(), - ) - assert zarray_bytes is not None - return cls.from_dict( - store_path, - zarray_json=json.loads(zarray_bytes.to_bytes()), - zattrs_json=json.loads(zattrs_bytes.to_bytes()) if zattrs_bytes is not None else None, - ) - - @classmethod - def open( - cls, - store: StoreLike, - ) -> ArrayV2: - return sync( - cls.open_async(store), - ) - - @classmethod - def from_dict( - cls, - store_path: StorePath, - zarray_json: Any, - zattrs_json: Optional[Any], - ) -> ArrayV2: - metadata = ArrayV2Metadata.from_dict(zarray_json) - out = cls( - store_path=store_path, - metadata=metadata, - attributes=zattrs_json, - ) - out._validate_metadata() - return out - - async def _save_metadata(self) -> None: - self._validate_metadata() - - await (self.store_path / ZARRAY_JSON).set(self.metadata.to_bytes()) - if self.attributes is not None and len(self.attributes) > 0: - await (self.store_path / ZATTRS_JSON).set( - Buffer.from_bytes(json.dumps(self.attributes).encode()), - ) - else: - await (self.store_path / ZATTRS_JSON).delete() - - def _validate_metadata(self) -> None: - assert len(self.metadata.shape) == len( - self.metadata.chunks - ), "`chunks` and `shape` need to have the same number of dimensions." - - @property - def ndim(self) -> int: - return len(self.metadata.shape) - - @property - def shape(self) -> ChunkCoords: - return self.metadata.shape - - @property - def dtype(self) -> np.dtype: - return self.metadata.dtype - - @property - def async_(self) -> _AsyncArrayProxy: - return _AsyncArrayProxy(self) - - def __getitem__(self, selection: Selection): - return sync(self.get_async(selection)) - - async def get_async(self, selection: Selection): - indexer = BasicIndexer( - selection, - shape=self.metadata.shape, - chunk_shape=self.metadata.chunks, - ) - - # setup output array - out = NDBuffer.create( - shape=indexer.shape, dtype=self.metadata.dtype, order=self.metadata.order, fill_value=0 - ) - - # reading chunks and decoding them - await concurrent_map( - [ - (chunk_coords, chunk_selection, out_selection, out) - for chunk_coords, chunk_selection, out_selection in indexer - ], - self._read_chunk, - ) - - if out.shape: - return out - else: - return out[()] - - async def _read_chunk( - self, - chunk_coords: ChunkCoords, - chunk_selection: SliceSelection, - out_selection: SliceSelection, - out: np.ndarray, - ): - store_path = self.store_path / self._encode_chunk_key(chunk_coords) - - chunk_array = await self._decode_chunk(as_bytearray(await store_path.get())) - if chunk_array is not None: - tmp = chunk_array[chunk_selection] - out[out_selection] = tmp - else: - out[out_selection] = self.metadata.fill_value - - async def _decode_chunk(self, chunk_bytes: Optional[BytesLike]) -> Optional[np.ndarray]: - if chunk_bytes is None: - return None - - if self.metadata.compressor is not None: - compressor = numcodecs.get_codec(self.metadata.compressor) - chunk_array = ensure_ndarray(await to_thread(compressor.decode, chunk_bytes)) - else: - chunk_array = ensure_ndarray(chunk_bytes) - - # ensure correct dtype - if str(chunk_array.dtype) != self.metadata.dtype: - chunk_array = chunk_array.view(self.metadata.dtype) - - # apply filters in reverse order - if self.metadata.filters is not None: - for filter_metadata in self.metadata.filters[::-1]: - filter = numcodecs.get_codec(filter_metadata) - chunk_array = await to_thread(filter.decode, chunk_array) - - # ensure correct chunk shape - if chunk_array.shape != self.metadata.chunks: - chunk_array = chunk_array.reshape( - self.metadata.chunks, - order=self.metadata.order, - ) - - return chunk_array - - def __setitem__(self, selection: Selection, value: np.ndarray) -> None: - sync(self.set_async(selection, value)) - - async def set_async(self, selection: Selection, value: np.ndarray) -> None: - chunk_shape = self.metadata.chunks - indexer = BasicIndexer( - selection, - shape=self.metadata.shape, - chunk_shape=chunk_shape, - ) - - sel_shape = indexer.shape - - # check value shape - if np.isscalar(value): - # setting a scalar value - pass - else: - if not hasattr(value, "shape"): - value = np.asarray(value, self.metadata.dtype) - assert value.shape == sel_shape - if value.dtype != self.metadata.dtype: - value = value.astype(self.metadata.dtype, order="A") - - # merging with existing data and encoding chunks - await concurrent_map( - [ - ( - value, - chunk_shape, - chunk_coords, - chunk_selection, - out_selection, - ) - for chunk_coords, chunk_selection, out_selection in indexer - ], - self._write_chunk, - ) - - async def _write_chunk( - self, - value: np.ndarray, - chunk_shape: ChunkCoords, - chunk_coords: ChunkCoords, - chunk_selection: SliceSelection, - out_selection: SliceSelection, - ): - store_path = self.store_path / self._encode_chunk_key(chunk_coords) - - if is_total_slice(chunk_selection, chunk_shape): - # write entire chunks - if np.isscalar(value): - chunk_array = NDBuffer.create( - shape=chunk_shape, - dtype=self.metadata.dtype, - order=self.metadata.order, - fill_value=value, - ) - else: - chunk_array = value[out_selection] - await self._write_chunk_to_store(store_path, chunk_array) - - else: - # writing partial chunks - # read chunk first - tmp = await self._decode_chunk(as_bytearray(await store_path.get())) - - # merge new value - if tmp is None: - chunk_array = NDBuffer.create( - shape=chunk_shape, - dtype=self.metadata.dtype, - order=self.metadata.order, - fill_value=self.metadata.fill_value, - ) - else: - chunk_array = tmp.copy( - order=self.metadata.order, - ) # make a writable copy - chunk_array[chunk_selection] = value[out_selection] - - await self._write_chunk_to_store(store_path, chunk_array) - - async def _write_chunk_to_store(self, store_path: StorePath, chunk_array: np.ndarray): - chunk_bytes: Optional[BytesLike] - if np.all(chunk_array == self.metadata.fill_value): - # chunks that only contain fill_value will be removed - await store_path.delete() - else: - chunk_bytes = await self._encode_chunk(chunk_array) - if chunk_bytes is None: - await store_path.delete() - else: - await store_path.set(Buffer.from_bytes(chunk_bytes)) - - async def _encode_chunk(self, chunk_array: np.ndarray) -> Optional[BytesLike]: - chunk_array = chunk_array.ravel(order=self.metadata.order) - - if self.metadata.filters is not None: - for filter_metadata in self.metadata.filters: - filter = numcodecs.get_codec(filter_metadata) - chunk_array = await to_thread(filter.encode, chunk_array) - - if self.metadata.compressor is not None: - compressor = numcodecs.get_codec(self.metadata.compressor) - if not chunk_array.flags.c_contiguous and not chunk_array.flags.f_contiguous: - chunk_array = chunk_array.copy(order="A") - encoded_chunk_bytes = ensure_bytes(await to_thread(compressor.encode, chunk_array)) - else: - encoded_chunk_bytes = ensure_bytes(chunk_array) - - return encoded_chunk_bytes - - def _encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: - chunk_identifier = self.metadata.dimension_separator.join(map(str, chunk_coords)) - return "0" if chunk_identifier == "" else chunk_identifier - - async def resize_async(self, new_shape: ChunkCoords) -> ArrayV2: - assert len(new_shape) == len(self.metadata.shape) - new_metadata = replace(self.metadata, shape=new_shape) - - # Remove all chunks outside of the new shape - chunk_shape = self.metadata.chunks - old_chunk_coords = set(all_chunk_coords(self.metadata.shape, chunk_shape)) - new_chunk_coords = set(all_chunk_coords(new_shape, chunk_shape)) - - async def _delete_key(key: str) -> None: - await (self.store_path / key).delete() - - await concurrent_map( - [ - (self._encode_chunk_key(chunk_coords),) - for chunk_coords in old_chunk_coords.difference(new_chunk_coords) - ], - _delete_key, - ) - - # Write new metadata - await (self.store_path / ZARRAY_JSON).set(new_metadata.to_bytes()) - return replace(self, metadata=new_metadata) - - def resize(self, new_shape: ChunkCoords) -> ArrayV2: - return sync(self.resize_async(new_shape)) - - async def convert_to_v3_async(self) -> Array: - from sys import byteorder as sys_byteorder - - from zarr.abc.codec import Codec - from zarr.array import Array - from zarr.common import ZARR_JSON - from zarr.chunk_grids import RegularChunkGrid - from zarr.chunk_key_encodings import V2ChunkKeyEncoding - from zarr.metadata import ArrayMetadata, DataType - - from zarr.codecs import ( - BloscCodec, - BloscShuffle, - BytesCodec, - GzipCodec, - TransposeCodec, - ) - - data_type = DataType.from_dtype(self.metadata.dtype) - endian: Literal["little", "big"] - if self.metadata.dtype.byteorder == "=": - endian = sys_byteorder - elif self.metadata.dtype.byteorder == ">": - endian = "big" - else: - endian = "little" - - assert ( - self.metadata.filters is None or len(self.metadata.filters) == 0 - ), "Filters are not supported by v3." - - codecs: List[Codec] = [] - - if self.metadata.order == "F": - codecs.append(TransposeCodec(order=tuple(reversed(range(self.metadata.ndim))))) - codecs.append(BytesCodec(endian=endian)) - - if self.metadata.compressor is not None: - v2_codec = numcodecs.get_codec(self.metadata.compressor).get_config() - assert v2_codec["id"] in ( - "blosc", - "gzip", - ), "Only blosc and gzip are supported by v3." - if v2_codec["id"] == "blosc": - codecs.append( - BloscCodec( - typesize=data_type.byte_count, - cname=v2_codec["cname"], - clevel=v2_codec["clevel"], - shuffle=BloscShuffle.from_int(v2_codec.get("shuffle", 0)), - blocksize=v2_codec.get("blocksize", 0), - ) - ) - elif v2_codec["id"] == "gzip": - codecs.append(GzipCodec(level=v2_codec.get("level", 5))) - - new_metadata = ArrayMetadata( - shape=self.metadata.shape, - chunk_grid=RegularChunkGrid(chunk_shape=self.metadata.chunks), - data_type=data_type, - fill_value=0 if self.metadata.fill_value is None else self.metadata.fill_value, - chunk_key_encoding=V2ChunkKeyEncoding(separator=self.metadata.dimension_separator), - codecs=codecs, - attributes=self.attributes or {}, - dimension_names=None, - ) - - new_metadata_bytes = new_metadata.to_bytes() - await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(new_metadata_bytes)) - - return Array.from_dict( - store_path=self.store_path, - data=json.loads(new_metadata_bytes), - ) - - async def update_attributes_async(self, new_attributes: Dict[str, Any]) -> ArrayV2: - await (self.store_path / ZATTRS_JSON).set( - Buffer.from_bytes(json.dumps(new_attributes).encode()) - ) - return replace(self, attributes=new_attributes) - - def update_attributes(self, new_attributes: Dict[str, Any]) -> ArrayV2: - return sync( - self.update_attributes_async(new_attributes), - ) - - def convert_to_v3(self) -> Array: - return sync(self.convert_to_v3_async()) - - def __repr__(self): - return f"" diff --git a/src/zarr/attributes.py b/src/zarr/attributes.py index 18f6a63a55..e6b26309f2 100644 --- a/src/zarr/attributes.py +++ b/src/zarr/attributes.py @@ -1,21 +1,24 @@ from __future__ import annotations + from collections.abc import MutableMapping -from typing import TYPE_CHECKING, Any, Iterator, Union +from typing import TYPE_CHECKING, Iterator + +from zarr.common import JSON if TYPE_CHECKING: from zarr.group import Group from zarr.array import Array -class Attributes(MutableMapping[str, Any]): - def __init__(self, obj: Union[Array, Group]): +class Attributes(MutableMapping[str, JSON]): + def __init__(self, obj: Array | Group): # key=".zattrs", read_only=False, cache=True, synchronizer=None self._obj = obj - def __getitem__(self, key: str) -> Any: + def __getitem__(self, key: str) -> JSON: return self._obj.metadata.attributes[key] - def __setitem__(self, key: str, value: Any) -> None: + def __setitem__(self, key: str, value: JSON) -> None: new_attrs = dict(self._obj.metadata.attributes) new_attrs[key] = value self._obj = self._obj.update_attributes(new_attrs) diff --git a/src/zarr/chunk_grids.py b/src/zarr/chunk_grids.py index 73557f6e4b..16c0df9174 100644 --- a/src/zarr/chunk_grids.py +++ b/src/zarr/chunk_grids.py @@ -1,5 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict +import itertools +from typing import TYPE_CHECKING, Any, Dict, Iterator from dataclasses import dataclass from zarr.abc.metadata import Metadata @@ -10,6 +11,7 @@ parse_named_configuration, parse_shapelike, ) +from zarr.indexing import _ceildiv if TYPE_CHECKING: from typing_extensions import Self @@ -27,6 +29,9 @@ def from_dict(cls, data: Dict[str, JSON]) -> ChunkGrid: return RegularChunkGrid.from_dict(data) raise ValueError(f"Unknown chunk grid. Got {name_parsed}.") + def all_chunk_coords(self, array_shape: ChunkCoords) -> Iterator[ChunkCoords]: + raise NotImplementedError + @dataclass(frozen=True) class RegularChunkGrid(ChunkGrid): @@ -45,3 +50,8 @@ def from_dict(cls, data: Dict[str, Any]) -> Self: def to_dict(self) -> Dict[str, JSON]: return {"name": "regular", "configuration": {"chunk_shape": list(self.chunk_shape)}} + + def all_chunk_coords(self, array_shape: ChunkCoords) -> Iterator[ChunkCoords]: + return itertools.product( + *(range(0, _ceildiv(s, c)) for s, c in zip(array_shape, self.chunk_shape)) + ) diff --git a/src/zarr/codecs/__init__.py b/src/zarr/codecs/__init__.py index 8fa0c9f7b0..959a85af57 100644 --- a/src/zarr/codecs/__init__.py +++ b/src/zarr/codecs/__init__.py @@ -7,3 +7,4 @@ from zarr.codecs.sharding import ShardingCodec, ShardingCodecIndexLocation # noqa: F401 from zarr.codecs.transpose import TransposeCodec # noqa: F401 from zarr.codecs.zstd import ZstdCodec # noqa: F401 +from zarr.codecs.pipeline import BatchedCodecPipeline # noqa: F401 diff --git a/src/zarr/codecs/_v2.py b/src/zarr/codecs/_v2.py new file mode 100644 index 0000000000..fb7122600f --- /dev/null +++ b/src/zarr/codecs/_v2.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +from dataclasses import dataclass + +from zarr.buffer import Buffer, NDBuffer +from zarr.codecs.mixins import ArrayArrayCodecBatchMixin, ArrayBytesCodecBatchMixin +from zarr.common import JSON, ArraySpec, to_thread + +import numcodecs +from numcodecs.compat import ensure_bytes, ensure_ndarray + + +@dataclass(frozen=True) +class V2Compressor(ArrayBytesCodecBatchMixin): + compressor: dict[str, JSON] | None + + is_fixed_size = False + + async def decode_single( + self, + chunk_bytes: Buffer, + chunk_spec: ArraySpec, + ) -> NDBuffer: + if chunk_bytes is None: + return None + + if self.compressor is not None: + compressor = numcodecs.get_codec(self.compressor) + chunk_numpy_array = ensure_ndarray( + await to_thread(compressor.decode, chunk_bytes.as_array_like()) + ) + else: + chunk_numpy_array = ensure_ndarray(chunk_bytes.as_array_like()) + + # ensure correct dtype + if str(chunk_numpy_array.dtype) != chunk_spec.dtype: + chunk_numpy_array = chunk_numpy_array.view(chunk_spec.dtype) + + return NDBuffer.from_numpy_array(chunk_numpy_array) + + async def encode_single( + self, + chunk_array: NDBuffer, + _chunk_spec: ArraySpec, + ) -> Buffer | None: + chunk_numpy_array = chunk_array.as_numpy_array() + if self.compressor is not None: + compressor = numcodecs.get_codec(self.compressor) + if ( + not chunk_numpy_array.flags.c_contiguous + and not chunk_numpy_array.flags.f_contiguous + ): + chunk_numpy_array = chunk_numpy_array.copy(order="A") + encoded_chunk_bytes = ensure_bytes( + await to_thread(compressor.encode, chunk_numpy_array) + ) + else: + encoded_chunk_bytes = ensure_bytes(chunk_numpy_array) + + return Buffer.from_bytes(encoded_chunk_bytes) + + def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: + raise NotImplementedError + + +@dataclass(frozen=True) +class V2Filters(ArrayArrayCodecBatchMixin): + filters: list[dict[str, JSON]] + + is_fixed_size = False + + async def decode_single( + self, + chunk_array: NDBuffer, + chunk_spec: ArraySpec, + ) -> NDBuffer: + chunk_numpy_array = chunk_array.as_numpy_array() + # apply filters in reverse order + if self.filters is not None: + for filter_metadata in self.filters[::-1]: + filter = numcodecs.get_codec(filter_metadata) + chunk_numpy_array = await to_thread(filter.decode, chunk_numpy_array) + + # ensure correct chunk shape + if chunk_numpy_array.shape != chunk_spec.shape: + chunk_numpy_array = chunk_numpy_array.reshape( + chunk_spec.shape, + order=chunk_spec.order, + ) + + return NDBuffer.from_numpy_array(chunk_numpy_array) + + async def encode_single( + self, + chunk_array: NDBuffer, + chunk_spec: ArraySpec, + ) -> NDBuffer | None: + chunk_numpy_array = chunk_array.as_numpy_array().ravel(order=chunk_spec.order) + + for filter_metadata in self.filters: + filter = numcodecs.get_codec(filter_metadata) + chunk_numpy_array = await to_thread(filter.encode, chunk_numpy_array) + + return NDBuffer.from_numpy_array(chunk_numpy_array) + + def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: + raise NotImplementedError diff --git a/src/zarr/codecs/blosc.py b/src/zarr/codecs/blosc.py index 7e94575f9a..ab3ffab479 100644 --- a/src/zarr/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -8,7 +8,7 @@ import numcodecs from numcodecs.blosc import Blosc -from zarr.abc.codec import BytesBytesCodec +from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration, to_thread @@ -74,7 +74,7 @@ def parse_blocksize(data: JSON) -> int: @dataclass(frozen=True) -class BloscCodec(BytesBytesCodec): +class BloscCodec(BytesBytesCodecBatchMixin): is_fixed_size = False typesize: int @@ -158,14 +158,14 @@ def _blosc_codec(self) -> Blosc: } return Blosc.from_config(config_dict) - async def decode( + async def decode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, ) -> Buffer: return await to_thread(as_numpy_array_wrapper, self._blosc_codec.decode, chunk_bytes) - async def encode( + async def encode_single( self, chunk_bytes: Buffer, chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index d6a626e160..6df78a08b8 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -7,7 +7,7 @@ import numpy as np -from zarr.abc.codec import ArrayBytesCodec +from zarr.codecs.mixins import ArrayBytesCodecBatchMixin from zarr.buffer import Buffer, NDBuffer from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration @@ -26,7 +26,7 @@ class Endian(Enum): @dataclass(frozen=True) -class BytesCodec(ArrayBytesCodec): +class BytesCodec(ArrayBytesCodecBatchMixin): is_fixed_size = True endian: Optional[Endian] @@ -60,7 +60,7 @@ def evolve(self, array_spec: ArraySpec) -> Self: ) return self - async def decode( + async def decode_single( self, chunk_bytes: Buffer, chunk_spec: ArraySpec, @@ -83,7 +83,7 @@ async def decode( ) return chunk_array - async def encode( + async def encode_single( self, chunk_array: NDBuffer, _chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py index 1daf512e43..ab4bad65fe 100644 --- a/src/zarr/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -7,7 +7,7 @@ from crc32c import crc32c -from zarr.abc.codec import BytesBytesCodec +from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.buffer import Buffer from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration @@ -19,7 +19,7 @@ @dataclass(frozen=True) -class Crc32cCodec(BytesBytesCodec): +class Crc32cCodec(BytesBytesCodecBatchMixin): is_fixed_size = True @classmethod @@ -30,7 +30,7 @@ def from_dict(cls, data: Dict[str, JSON]) -> Self: def to_dict(self) -> Dict[str, JSON]: return {"name": "crc32c"} - async def decode( + async def decode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, @@ -48,7 +48,7 @@ async def decode( ) return Buffer.from_array_like(inner_bytes) - async def encode( + async def encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/gzip.py b/src/zarr/codecs/gzip.py index a8d7f815aa..6a8e30db13 100644 --- a/src/zarr/codecs/gzip.py +++ b/src/zarr/codecs/gzip.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING from numcodecs.gzip import GZip -from zarr.abc.codec import BytesBytesCodec +from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration, to_thread @@ -26,7 +26,7 @@ def parse_gzip_level(data: JSON) -> int: @dataclass(frozen=True) -class GzipCodec(BytesBytesCodec): +class GzipCodec(BytesBytesCodecBatchMixin): is_fixed_size = False level: int = 5 @@ -44,14 +44,14 @@ def from_dict(cls, data: Dict[str, JSON]) -> Self: def to_dict(self) -> Dict[str, JSON]: return {"name": "gzip", "configuration": {"level": self.level}} - async def decode( + async def decode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, ) -> Buffer: return await to_thread(as_numpy_array_wrapper, GZip(self.level).decode, chunk_bytes) - async def encode( + async def encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/mixins.py b/src/zarr/codecs/mixins.py new file mode 100644 index 0000000000..8b0a684509 --- /dev/null +++ b/src/zarr/codecs/mixins.py @@ -0,0 +1,131 @@ +from __future__ import annotations + +from abc import abstractmethod +from typing import Awaitable, Callable, Generic, Iterable, TypeVar + + +from zarr.abc.codec import ( + ArrayArrayCodec, + ArrayBytesCodec, + ArrayBytesCodecPartialDecodeMixin, + ArrayBytesCodecPartialEncodeMixin, + ByteGetter, + ByteSetter, + BytesBytesCodec, +) +from zarr.buffer import Buffer, NDBuffer +from zarr.common import ArraySpec, SliceSelection, concurrent_map +from zarr.config import config + + +CodecInput = TypeVar("CodecInput", bound=NDBuffer | Buffer) +CodecOutput = TypeVar("CodecOutput", bound=NDBuffer | Buffer) + + +async def batching_helper( + func: Callable[[CodecInput, ArraySpec], Awaitable[CodecOutput | None]], + batch_info: Iterable[tuple[CodecInput | None, ArraySpec]], +) -> list[CodecOutput | None]: + return await concurrent_map( + [(chunk_array, chunk_spec) for chunk_array, chunk_spec in batch_info], + noop_for_none(func), + config.get("async.concurrency"), + ) + + +def noop_for_none( + func: Callable[[CodecInput, ArraySpec], Awaitable[CodecOutput | None]], +) -> Callable[[CodecInput | None, ArraySpec], Awaitable[CodecOutput | None]]: + async def wrap(chunk: CodecInput | None, chunk_spec: ArraySpec) -> CodecOutput | None: + if chunk is None: + return None + return await func(chunk, chunk_spec) + + return wrap + + +class CodecBatchMixin(Generic[CodecInput, CodecOutput]): + """The default interface from the Codec class expects batches of codecs. + However, many codec implementation operate on single codecs. + This mixin provides abstract methods for decode_single and encode_single and + implements batching through concurrent processing. + + Use ArrayArrayCodecBatchMixin, ArrayBytesCodecBatchMixin and BytesBytesCodecBatchMixin + for subclassing. + """ + + @abstractmethod + async def decode_single(self, chunk_data: CodecOutput, chunk_spec: ArraySpec) -> CodecInput: + pass + + async def decode( + self, chunk_data_and_specs: Iterable[tuple[CodecOutput | None, ArraySpec]] + ) -> Iterable[CodecInput | None]: + return await batching_helper(self.decode_single, chunk_data_and_specs) + + @abstractmethod + async def encode_single( + self, chunk_data: CodecInput, chunk_spec: ArraySpec + ) -> CodecOutput | None: + pass + + async def encode( + self, chunk_data_and_specs: Iterable[tuple[CodecInput | None, ArraySpec]] + ) -> Iterable[CodecOutput | None]: + return await batching_helper(self.encode_single, chunk_data_and_specs) + + +class ArrayArrayCodecBatchMixin(CodecBatchMixin[NDBuffer, NDBuffer], ArrayArrayCodec): + pass + + +class ArrayBytesCodecBatchMixin(CodecBatchMixin[NDBuffer, Buffer], ArrayBytesCodec): + pass + + +class BytesBytesCodecBatchMixin(CodecBatchMixin[Buffer, Buffer], BytesBytesCodec): + pass + + +class ArrayBytesCodecPartialDecodeBatchMixin(ArrayBytesCodecPartialDecodeMixin): + @abstractmethod + async def decode_partial_single( + self, byte_getter: ByteGetter, selection: SliceSelection, chunk_spec: ArraySpec + ) -> NDBuffer | None: + pass + + async def decode_partial( + self, batch_info: Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]] + ) -> Iterable[NDBuffer | None]: + return await concurrent_map( + [ + (byte_getter, selection, chunk_spec) + for byte_getter, selection, chunk_spec in batch_info + ], + self.decode_partial_single, + config.get("async.concurrency"), + ) + + +class ArrayBytesCodecPartialEncodeBatchMixin(ArrayBytesCodecPartialEncodeMixin): + @abstractmethod + async def encode_partial_single( + self, + byte_setter: ByteSetter, + chunk_array: NDBuffer, + selection: SliceSelection, + chunk_spec: ArraySpec, + ) -> None: + pass + + async def encode_partial( + self, batch_info: Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]] + ) -> None: + await concurrent_map( + [ + (byte_setter, chunk_array, selection, chunk_spec) + for byte_setter, chunk_array, selection, chunk_spec in batch_info + ], + self.encode_partial_single, + config.get("async.concurrency"), + ) diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index 1602eb1ef8..8396a0c2ce 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -1,63 +1,103 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterable -from dataclasses import dataclass +from itertools import islice +from typing import TYPE_CHECKING, Iterator, TypeVar, Iterable from warnings import warn +from dataclasses import dataclass +from zarr.config import config from zarr.abc.codec import ( + ByteGetter, + ByteSetter, + Codec, + CodecPipeline, ArrayArrayCodec, ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin, BytesBytesCodec, - Codec, ) -from zarr.abc.metadata import Metadata from zarr.buffer import Buffer, NDBuffer from zarr.codecs.registry import get_codec_class -from zarr.common import parse_named_configuration +from zarr.common import JSON, concurrent_map, parse_named_configuration +from zarr.indexing import is_total_slice +from zarr.metadata import ArrayMetadata if TYPE_CHECKING: - from typing import Iterator, List, Optional, Tuple, Union - from zarr.store import StorePath - from zarr.metadata import ArrayMetadata - from zarr.common import JSON, ArraySpec, SliceSelection + from typing_extensions import Self + from zarr.common import ArraySpec, SliceSelection + +T = TypeVar("T") +U = TypeVar("U") + + +def _unzip2(iterable: Iterable[tuple[T, U]]) -> tuple[list[T], list[U]]: + out0: list[T] = [] + out1: list[U] = [] + for item0, item1 in iterable: + out0.append(item0) + out1.append(item1) + return (out0, out1) + + +def batched(iterable: Iterable[T], n: int) -> Iterable[tuple[T, ...]]: + if n < 1: + raise ValueError("n must be at least one") + it = iter(iterable) + while batch := tuple(islice(it, n)): + yield batch + + +def resolve_batched(codec: Codec, chunk_specs: Iterable[ArraySpec]) -> Iterable[ArraySpec]: + return [codec.resolve_metadata(chunk_spec) for chunk_spec in chunk_specs] @dataclass(frozen=True) -class CodecPipeline(Metadata): - array_array_codecs: Tuple[ArrayArrayCodec, ...] +class BatchedCodecPipeline(CodecPipeline): + """Default codec pipeline. + + This batched codec pipeline divides the chunk batches into batches of a configurable + batch size ("mini-batch"). Fetching, decoding, encoding and storing are performed in + lock step for each mini-batch. Multiple mini-batches are processing concurrently. + """ + + array_array_codecs: tuple[ArrayArrayCodec, ...] array_bytes_codec: ArrayBytesCodec - bytes_bytes_codecs: Tuple[BytesBytesCodec, ...] + bytes_bytes_codecs: tuple[BytesBytesCodec, ...] + batch_size: int @classmethod - def from_dict(cls, data: Iterable[Union[JSON, Codec]]) -> CodecPipeline: - out: List[Codec] = [] + def from_dict(cls, data: Iterable[JSON | Codec], *, batch_size: int | None = None) -> Self: + out: list[Codec] = [] if not isinstance(data, Iterable): raise TypeError(f"Expected iterable, got {type(data)}") for c in data: - if isinstance(c, Codec): + if isinstance( + c, ArrayArrayCodec | ArrayBytesCodec | BytesBytesCodec + ): # Can't use Codec here because of mypy limitation out.append(c) else: name_parsed, _ = parse_named_configuration(c, require_configuration=False) out.append(get_codec_class(name_parsed).from_dict(c)) # type: ignore[arg-type] - return CodecPipeline.from_list(out) + return cls.from_list(out, batch_size=batch_size) def to_dict(self) -> JSON: return [c.to_dict() for c in self] - def evolve(self, array_spec: ArraySpec) -> CodecPipeline: - return CodecPipeline.from_list([c.evolve(array_spec) for c in self]) + def evolve(self, array_spec: ArraySpec) -> Self: + return type(self).from_list([c.evolve(array_spec) for c in self]) - @classmethod - def from_list(cls, codecs: List[Codec]) -> CodecPipeline: + @staticmethod + def codecs_from_list( + codecs: list[Codec], + ) -> tuple[tuple[ArrayArrayCodec, ...], ArrayBytesCodec, tuple[BytesBytesCodec, ...]]: from zarr.codecs.sharding import ShardingCodec if not any(isinstance(codec, ArrayBytesCodec) for codec in codecs): raise ValueError("Exactly one array-to-bytes codec is required.") - prev_codec: Optional[Codec] = None + prev_codec: Codec | None = None for codec in codecs: if prev_codec is not None: if isinstance(codec, ArrayBytesCodec) and isinstance(prev_codec, ArrayBytesCodec): @@ -86,27 +126,55 @@ def from_list(cls, codecs: List[Codec]) -> CodecPipeline: if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: warn( "Combining a `sharding_indexed` codec disables partial reads and " - + "writes, which may lead to inefficient performance." + + "writes, which may lead to inefficient performance.", + stacklevel=3, ) - return CodecPipeline( - array_array_codecs=tuple( - codec for codec in codecs if isinstance(codec, ArrayArrayCodec) - ), - array_bytes_codec=next(codec for codec in codecs if isinstance(codec, ArrayBytesCodec)), - bytes_bytes_codecs=tuple( - codec for codec in codecs if isinstance(codec, BytesBytesCodec) - ), + return ( + tuple(codec for codec in codecs if isinstance(codec, ArrayArrayCodec)), + next(codec for codec in codecs if isinstance(codec, ArrayBytesCodec)), + tuple(codec for codec in codecs if isinstance(codec, BytesBytesCodec)), + ) + + @classmethod + def from_list(cls, codecs: list[Codec], *, batch_size: int | None = None) -> Self: + array_array_codecs, array_bytes_codec, bytes_bytes_codecs = cls.codecs_from_list(codecs) + + return cls( + array_array_codecs=array_array_codecs, + array_bytes_codec=array_bytes_codec, + bytes_bytes_codecs=bytes_bytes_codecs, + batch_size=batch_size or config.get("codec_pipeline.batch_size"), ) @property def supports_partial_decode(self) -> bool: + """Determines whether the codec pipeline supports partial decoding. + + Currently, only codec pipelines with a single ArrayBytesCodec that supports + partial decoding can support partial decoding. This limitation is due to the fact + that ArrayArrayCodecs can change the slice selection leading to non-contiguous + slices and BytesBytesCodecs can change the chunk bytes in a way that slice + selections cannot be attributed to byte ranges anymore which renders partial + decoding infeasible. + + This limitation may softened in the future.""" return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance( self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin ) @property def supports_partial_encode(self) -> bool: + """Determines whether the codec pipeline supports partial encoding. + + Currently, only codec pipelines with a single ArrayBytesCodec that supports + partial encoding can support partial encoding. This limitation is due to the fact + that ArrayArrayCodecs can change the slice selection leading to non-contiguous + slices and BytesBytesCodecs can change the chunk bytes in a way that slice + selections cannot be attributed to byte ranges anymore which renders partial + encoding infeasible. + + This limitation may softened in the future.""" return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance( self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin ) @@ -124,105 +192,272 @@ def validate(self, array_metadata: ArrayMetadata) -> None: for codec in self: codec.validate(array_metadata) - def _codecs_with_resolved_metadata( - self, array_spec: ArraySpec - ) -> Tuple[ - List[Tuple[ArrayArrayCodec, ArraySpec]], - Tuple[ArrayBytesCodec, ArraySpec], - List[Tuple[BytesBytesCodec, ArraySpec]], + def compute_encoded_size(self, byte_length: int, array_spec: ArraySpec) -> int: + for codec in self: + byte_length = codec.compute_encoded_size(byte_length, array_spec) + array_spec = codec.resolve_metadata(array_spec) + return byte_length + + def _codecs_with_resolved_metadata_batched( + self, chunk_specs: Iterable[ArraySpec] + ) -> tuple[ + list[tuple[ArrayArrayCodec, list[ArraySpec]]], + tuple[ArrayBytesCodec, list[ArraySpec]], + list[tuple[BytesBytesCodec, list[ArraySpec]]], ]: - aa_codecs_with_spec: List[Tuple[ArrayArrayCodec, ArraySpec]] = [] + aa_codecs_with_spec: list[tuple[ArrayArrayCodec, list[ArraySpec]]] = [] + chunk_specs = list(chunk_specs) for aa_codec in self.array_array_codecs: - aa_codecs_with_spec.append((aa_codec, array_spec)) - array_spec = aa_codec.resolve_metadata(array_spec) + aa_codecs_with_spec.append((aa_codec, chunk_specs)) + chunk_specs = [aa_codec.resolve_metadata(chunk_spec) for chunk_spec in chunk_specs] - ab_codec_with_spec = (self.array_bytes_codec, array_spec) - array_spec = self.array_bytes_codec.resolve_metadata(array_spec) + ab_codec_with_spec = (self.array_bytes_codec, chunk_specs) + chunk_specs = [ + self.array_bytes_codec.resolve_metadata(chunk_spec) for chunk_spec in chunk_specs + ] - bb_codecs_with_spec: List[Tuple[BytesBytesCodec, ArraySpec]] = [] + bb_codecs_with_spec: list[tuple[BytesBytesCodec, list[ArraySpec]]] = [] for bb_codec in self.bytes_bytes_codecs: - bb_codecs_with_spec.append((bb_codec, array_spec)) - array_spec = bb_codec.resolve_metadata(array_spec) + bb_codecs_with_spec.append((bb_codec, chunk_specs)) + chunk_specs = [bb_codec.resolve_metadata(chunk_spec) for chunk_spec in chunk_specs] return (aa_codecs_with_spec, ab_codec_with_spec, bb_codecs_with_spec) - async def decode( + async def decode_batch( self, - chunk_bytes: Buffer, - array_spec: ArraySpec, - ) -> NDBuffer: + chunk_bytes_and_specs: Iterable[tuple[Buffer | None, ArraySpec]], + ) -> Iterable[NDBuffer | None]: + chunk_bytes_batch: Iterable[Buffer | None] + chunk_bytes_batch, chunk_specs = _unzip2(chunk_bytes_and_specs) + ( aa_codecs_with_spec, ab_codec_with_spec, bb_codecs_with_spec, - ) = self._codecs_with_resolved_metadata(array_spec) + ) = self._codecs_with_resolved_metadata_batched(chunk_specs) - for bb_codec, array_spec in bb_codecs_with_spec[::-1]: - chunk_bytes = await bb_codec.decode(chunk_bytes, array_spec) + for bb_codec, chunk_spec_batch in bb_codecs_with_spec[::-1]: + chunk_bytes_batch = await bb_codec.decode(zip(chunk_bytes_batch, chunk_spec_batch)) - ab_codec, array_spec = ab_codec_with_spec - chunk_array = await ab_codec.decode(chunk_bytes, array_spec) + ab_codec, chunk_spec_batch = ab_codec_with_spec + chunk_array_batch = await ab_codec.decode(zip(chunk_bytes_batch, chunk_spec_batch)) - for aa_codec, array_spec in aa_codecs_with_spec[::-1]: - chunk_array = await aa_codec.decode(chunk_array, array_spec) + for aa_codec, chunk_spec_batch in aa_codecs_with_spec[::-1]: + chunk_array_batch = await aa_codec.decode(zip(chunk_array_batch, chunk_spec_batch)) - return chunk_array + return chunk_array_batch - async def decode_partial( + async def decode_partial_batch( self, - store_path: StorePath, - selection: SliceSelection, - chunk_spec: ArraySpec, - ) -> Optional[NDBuffer]: + batch_info: Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]], + ) -> Iterable[NDBuffer | None]: assert self.supports_partial_decode assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin) - return await self.array_bytes_codec.decode_partial(store_path, selection, chunk_spec) + return await self.array_bytes_codec.decode_partial(batch_info) - async def encode( + async def encode_batch( self, - chunk_array: NDBuffer, - array_spec: ArraySpec, - ) -> Optional[Buffer]: - ( - aa_codecs_with_spec, - ab_codec_with_spec, - bb_codecs_with_spec, - ) = self._codecs_with_resolved_metadata(array_spec) - - for aa_codec, array_spec in aa_codecs_with_spec: - chunk_array_maybe = await aa_codec.encode(chunk_array, array_spec) - if chunk_array_maybe is None: - return None - chunk_array = chunk_array_maybe - - ab_codec, array_spec = ab_codec_with_spec - chunk_bytes_maybe = await ab_codec.encode(chunk_array, array_spec) - if chunk_bytes_maybe is None: - return None - chunk_bytes = chunk_bytes_maybe - - for bb_codec, array_spec in bb_codecs_with_spec: - chunk_bytes_maybe = await bb_codec.encode(chunk_bytes, array_spec) - if chunk_bytes_maybe is None: - return None - chunk_bytes = chunk_bytes_maybe - - assert isinstance(chunk_bytes, Buffer) - return chunk_bytes - - async def encode_partial( + chunk_arrays_and_specs: Iterable[tuple[NDBuffer | None, ArraySpec]], + ) -> Iterable[Buffer | None]: + chunk_array_batch: Iterable[NDBuffer | None] + chunk_specs: Iterable[ArraySpec] + chunk_array_batch, chunk_specs = _unzip2(chunk_arrays_and_specs) + + for aa_codec in self.array_array_codecs: + chunk_array_batch = await aa_codec.encode(zip(chunk_array_batch, chunk_specs)) + chunk_specs = resolve_batched(aa_codec, chunk_specs) + + chunk_bytes_batch = await self.array_bytes_codec.encode(zip(chunk_array_batch, chunk_specs)) + chunk_specs = resolve_batched(self.array_bytes_codec, chunk_specs) + + for bb_codec in self.bytes_bytes_codecs: + chunk_bytes_batch = await bb_codec.encode(zip(chunk_bytes_batch, chunk_specs)) + chunk_specs = resolve_batched(bb_codec, chunk_specs) + + return chunk_bytes_batch + + async def encode_partial_batch( self, - store_path: StorePath, - chunk_array: NDBuffer, - selection: SliceSelection, - chunk_spec: ArraySpec, + batch_info: Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]], ) -> None: assert self.supports_partial_encode assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin) - await self.array_bytes_codec.encode_partial(store_path, chunk_array, selection, chunk_spec) + await self.array_bytes_codec.encode_partial(batch_info) - def compute_encoded_size(self, byte_length: int, array_spec: ArraySpec) -> int: - for codec in self: - byte_length = codec.compute_encoded_size(byte_length, array_spec) - array_spec = codec.resolve_metadata(array_spec) - return byte_length + async def read_batch( + self, + batch_info: Iterable[tuple[ByteGetter, ArraySpec, SliceSelection, SliceSelection]], + out: NDBuffer, + ) -> None: + if self.supports_partial_decode: + chunk_array_batch = await self.decode_partial_batch( + [ + (byte_getter, chunk_selection, chunk_spec) + for byte_getter, chunk_spec, chunk_selection, _ in batch_info + ] + ) + for chunk_array, (_, chunk_spec, _, out_selection) in zip( + chunk_array_batch, batch_info + ): + if chunk_array is not None: + out[out_selection] = chunk_array + else: + out[out_selection] = chunk_spec.fill_value + else: + chunk_bytes_batch = await concurrent_map( + [(byte_getter,) for byte_getter, _, _, _ in batch_info], + lambda byte_getter: byte_getter.get(), + config.get("async.concurrency"), + ) + chunk_array_batch = await self.decode_batch( + [ + (chunk_bytes, chunk_spec) + for chunk_bytes, (_, chunk_spec, _, _) in zip(chunk_bytes_batch, batch_info) + ], + ) + for chunk_array, (_, chunk_spec, chunk_selection, out_selection) in zip( + chunk_array_batch, batch_info + ): + if chunk_array is not None: + tmp = chunk_array[chunk_selection] + out[out_selection] = tmp + else: + out[out_selection] = chunk_spec.fill_value + + async def write_batch( + self, + batch_info: Iterable[tuple[ByteSetter, ArraySpec, SliceSelection, SliceSelection]], + value: NDBuffer, + ) -> None: + if self.supports_partial_encode: + await self.encode_partial_batch( + [ + (byte_setter, value[out_selection], chunk_selection, chunk_spec) + for byte_setter, chunk_spec, chunk_selection, out_selection in batch_info + ], + ) + + else: + # Read existing bytes if not total slice + async def _read_key(byte_setter: ByteSetter | None) -> Buffer | None: + if byte_setter is None: + return None + return await byte_setter.get() + + chunk_bytes_batch: Iterable[Buffer | None] + chunk_bytes_batch = await concurrent_map( + [ + (None if is_total_slice(chunk_selection, chunk_spec.shape) else byte_setter,) + for byte_setter, chunk_spec, chunk_selection, _ in batch_info + ], + _read_key, + config.get("async.concurrency"), + ) + chunk_array_batch = await self.decode_batch( + [ + (chunk_bytes, chunk_spec) + for chunk_bytes, (_, chunk_spec, _, _) in zip(chunk_bytes_batch, batch_info) + ], + ) + + def _merge_chunk_array( + existing_chunk_array: NDBuffer | None, + new_chunk_array_slice: NDBuffer, + chunk_spec: ArraySpec, + chunk_selection: SliceSelection, + ) -> NDBuffer: + if is_total_slice(chunk_selection, chunk_spec.shape): + return new_chunk_array_slice + if existing_chunk_array is None: + chunk_array = NDBuffer.create( + shape=chunk_spec.shape, + dtype=chunk_spec.dtype, + order=chunk_spec.order, + fill_value=chunk_spec.fill_value, + ) + else: + chunk_array = existing_chunk_array.copy() # make a writable copy + chunk_array[chunk_selection] = new_chunk_array_slice + return chunk_array + + chunk_array_batch = [ + _merge_chunk_array(chunk_array, value[out_selection], chunk_spec, chunk_selection) + for chunk_array, (_, chunk_spec, chunk_selection, out_selection) in zip( + chunk_array_batch, batch_info + ) + ] + + chunk_array_batch = [ + None + if chunk_array is None or chunk_array.all_equal(chunk_spec.fill_value) + else chunk_array + for chunk_array, (_, chunk_spec, _, _) in zip(chunk_array_batch, batch_info) + ] + + chunk_bytes_batch = await self.encode_batch( + [ + (chunk_array, chunk_spec) + for chunk_array, (_, chunk_spec, _, _) in zip(chunk_array_batch, batch_info) + ], + ) + + async def _write_key(byte_setter: ByteSetter, chunk_bytes: Buffer | None) -> None: + if chunk_bytes is None: + await byte_setter.delete() + else: + await byte_setter.set(chunk_bytes) + + await concurrent_map( + [ + (byte_setter, chunk_bytes) + for chunk_bytes, (byte_setter, _, _, _) in zip(chunk_bytes_batch, batch_info) + ], + _write_key, + config.get("async.concurrency"), + ) + + async def decode( + self, + chunk_bytes_and_specs: Iterable[tuple[Buffer | None, ArraySpec]], + ) -> Iterable[NDBuffer | None]: + output: list[NDBuffer | None] = [] + for batch_info in batched(chunk_bytes_and_specs, self.batch_size): + output.extend(await self.decode_batch(batch_info)) + return output + + async def encode( + self, + chunk_arrays_and_specs: Iterable[tuple[NDBuffer | None, ArraySpec]], + ) -> Iterable[Buffer | None]: + output: list[Buffer | None] = [] + for single_batch_info in batched(chunk_arrays_and_specs, self.batch_size): + output.extend(await self.encode_batch(single_batch_info)) + return output + + async def read( + self, + batch_info: Iterable[tuple[ByteGetter, ArraySpec, SliceSelection, SliceSelection]], + out: NDBuffer, + ) -> None: + await concurrent_map( + [ + (single_batch_info, out) + for single_batch_info in batched(batch_info, self.batch_size) + ], + self.read_batch, + config.get("async.concurrency"), + ) + + async def write( + self, + batch_info: Iterable[tuple[ByteSetter, ArraySpec, SliceSelection, SliceSelection]], + value: NDBuffer, + ) -> None: + await concurrent_map( + [ + (single_batch_info, value) + for single_batch_info in batched(batch_info, self.batch_size) + ], + self.write_batch, + config.get("async.concurrency"), + ) diff --git a/src/zarr/codecs/registry.py b/src/zarr/codecs/registry.py index 7d46041255..b981f1f36c 100644 --- a/src/zarr/codecs/registry.py +++ b/src/zarr/codecs/registry.py @@ -7,7 +7,6 @@ from importlib.metadata import EntryPoint, entry_points as get_entry_points - __codec_registry: Dict[str, Type[Codec]] = {} __lazy_load_codecs: Dict[str, EntryPoint] = {} diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index b63d1e499b..dd7cdcd0b4 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -1,56 +1,47 @@ from __future__ import annotations from enum import Enum -from typing import TYPE_CHECKING, Iterable, Mapping, NamedTuple, Union -from dataclasses import dataclass, replace +from typing import TYPE_CHECKING, Iterable, Mapping, MutableMapping, NamedTuple, Tuple, Union +from dataclasses import dataclass, field, replace from functools import lru_cache import numpy as np -from zarr.abc.codec import ( - Codec, - ArrayBytesCodec, - ArrayBytesCodecPartialDecodeMixin, - ArrayBytesCodecPartialEncodeMixin, -) +from zarr.abc.codec import ByteGetter, ByteSetter, Codec, CodecPipeline from zarr.codecs.bytes import BytesCodec from zarr.codecs.crc32c_ import Crc32cCodec -from zarr.codecs.pipeline import CodecPipeline +from zarr.codecs.mixins import ( + ArrayBytesCodecBatchMixin, + ArrayBytesCodecPartialDecodeBatchMixin, + ArrayBytesCodecPartialEncodeBatchMixin, +) +from zarr.codecs.pipeline import BatchedCodecPipeline from zarr.codecs.registry import register_codec from zarr.common import ( ArraySpec, + ChunkCoords, ChunkCoordsLike, - concurrent_map, parse_enum, parse_named_configuration, parse_shapelike, product, ) -from zarr.config import config from zarr.chunk_grids import RegularChunkGrid from zarr.indexing import ( BasicIndexer, c_order_iter, - is_total_slice, morton_order_iter, ) -from zarr.metadata import ( - ArrayMetadata, - parse_codecs, -) +from zarr.metadata import ArrayMetadata, parse_codecs from zarr.buffer import Buffer, NDBuffer if TYPE_CHECKING: - from typing import Awaitable, Callable, Dict, Iterator, List, Optional, Set, Tuple + from typing import Awaitable, Callable, Dict, Iterator, Optional, Set from typing_extensions import Self - - from zarr.store import StorePath - from zarr.common import ( - JSON, - ChunkCoords, - SliceSelection, - ) + from zarr.common import JSON, SliceSelection MAX_UINT_64 = 2**64 - 1 +ShardMapping = Mapping[ChunkCoords, Buffer] +ShardMutableMapping = MutableMapping[ChunkCoords, Buffer] class ShardingCodecIndexLocation(Enum): @@ -62,6 +53,28 @@ def parse_index_location(data: JSON) -> ShardingCodecIndexLocation: return parse_enum(data, ShardingCodecIndexLocation) +@dataclass(frozen=True) +class _ShardingByteGetter(ByteGetter): + shard_dict: ShardMapping + chunk_coords: ChunkCoords + + async def get(self, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> Optional[Buffer]: + assert byte_range is None, "byte_range is not supported within shards" + return self.shard_dict.get(self.chunk_coords) + + +@dataclass(frozen=True) +class _ShardingByteSetter(_ShardingByteGetter, ByteSetter): + shard_dict: ShardMutableMapping + + async def set(self, value: Buffer, byte_range: Optional[Tuple[int, int]] = None) -> None: + assert byte_range is None, "byte_range is not supported within shards" + self.shard_dict[self.chunk_coords] = value + + async def delete(self) -> None: + del self.shard_dict[self.chunk_coords] + + class _ShardIndex(NamedTuple): # dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2) offsets_and_lengths: np.ndarray @@ -79,6 +92,9 @@ def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords: def is_all_empty(self) -> bool: return bool(np.array_equiv(self.offsets_and_lengths, MAX_UINT_64)) + def get_full_chunk_map(self) -> np.ndarray: + return self.offsets_and_lengths[..., 0] != MAX_UINT_64 + def get_chunk_slice(self, chunk_coords: ChunkCoords) -> Optional[Tuple[int, int]]: localized_chunk = self._localize_chunk(chunk_coords) chunk_start, chunk_len = self.offsets_and_lengths[localized_chunk] @@ -125,14 +141,14 @@ def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardIndex: return cls(offsets_and_lengths) -class _ShardProxy(Mapping): - index: _ShardIndex +class _ShardReader(ShardMapping): buf: Buffer + index: _ShardIndex @classmethod async def from_bytes( cls, buf: Buffer, codec: ShardingCodec, chunks_per_shard: ChunkCoords - ) -> _ShardProxy: + ) -> _ShardReader: shard_index_size = codec._shard_index_size(chunks_per_shard) obj = cls() obj.buf = buf @@ -145,18 +161,18 @@ async def from_bytes( return obj @classmethod - def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardProxy: + def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardReader: index = _ShardIndex.create_empty(chunks_per_shard) obj = cls() obj.buf = Buffer.create_zero_length() obj.index = index return obj - def __getitem__(self, chunk_coords: ChunkCoords) -> Optional[Buffer]: + def __getitem__(self, chunk_coords: ChunkCoords) -> Buffer: chunk_byte_slice = self.index.get_chunk_slice(chunk_coords) if chunk_byte_slice: return self.buf[chunk_byte_slice[0] : chunk_byte_slice[1]] - return None + raise KeyError def __len__(self) -> int: return int(self.index.offsets_and_lengths.size / 2) @@ -164,8 +180,11 @@ def __len__(self) -> int: def __iter__(self) -> Iterator[ChunkCoords]: return c_order_iter(self.index.offsets_and_lengths.shape[:-1]) + def is_empty(self) -> bool: + return self.index.is_all_empty() + -class _ShardBuilder(_ShardProxy): +class _ShardBuilder(_ShardReader, ShardMutableMapping): buf: Buffer index: _ShardIndex @@ -174,7 +193,7 @@ def merge_with_morton_order( cls, chunks_per_shard: ChunkCoords, tombstones: Set[ChunkCoords], - *shard_dicts: Mapping[ChunkCoords, Buffer], + *shard_dicts: ShardMapping, ) -> _ShardBuilder: obj = cls.create_empty(chunks_per_shard) for chunk_coords in morton_order_iter(chunks_per_shard): @@ -183,7 +202,7 @@ def merge_with_morton_order( for shard_dict in shard_dicts: maybe_value = shard_dict.get(chunk_coords, None) if maybe_value is not None: - obj.append(chunk_coords, maybe_value) + obj[chunk_coords] = maybe_value break return obj @@ -194,12 +213,15 @@ def create_empty(cls, chunks_per_shard: ChunkCoords) -> _ShardBuilder: obj.index = _ShardIndex.create_empty(chunks_per_shard) return obj - def append(self, chunk_coords: ChunkCoords, value: Buffer) -> None: + def __setitem__(self, chunk_coords: ChunkCoords, value: Buffer) -> None: chunk_start = len(self.buf) chunk_length = len(value) self.buf = self.buf + value self.index.set_chunk_slice(chunk_coords, slice(chunk_start, chunk_start + chunk_length)) + def __delitem__(self, chunk_coords: ChunkCoords) -> None: + raise NotImplementedError + async def finalize( self, index_location: ShardingCodecIndexLocation, @@ -215,9 +237,58 @@ async def finalize( return out_buf +@dataclass(frozen=True) +class _MergingShardBuilder(ShardMutableMapping): + old_dict: _ShardReader + new_dict: _ShardBuilder + tombstones: Set[ChunkCoords] = field(default_factory=set) + + def __getitem__(self, chunk_coords: ChunkCoords) -> Buffer: + chunk_bytes_maybe = self.new_dict.get(chunk_coords) + if chunk_bytes_maybe is not None: + return chunk_bytes_maybe + return self.old_dict[chunk_coords] + + def __setitem__(self, chunk_coords: ChunkCoords, value: Buffer) -> None: + self.new_dict[chunk_coords] = value + + def __delitem__(self, chunk_coords: ChunkCoords) -> None: + self.tombstones.add(chunk_coords) + + def __len__(self) -> int: + return self.old_dict.__len__() + + def __iter__(self) -> Iterator[ChunkCoords]: + return self.old_dict.__iter__() + + def is_empty(self) -> bool: + full_chunk_coords_map = self.old_dict.index.get_full_chunk_map() + full_chunk_coords_map = np.logical_or( + full_chunk_coords_map, self.new_dict.index.get_full_chunk_map() + ) + for tombstone in self.tombstones: + full_chunk_coords_map[tombstone] = False + return bool(np.array_equiv(full_chunk_coords_map, False)) + + async def finalize( + self, + index_location: ShardingCodecIndexLocation, + index_encoder: Callable[[_ShardIndex], Awaitable[Buffer]], + ) -> Buffer: + shard_builder = _ShardBuilder.merge_with_morton_order( + self.new_dict.index.chunks_per_shard, + self.tombstones, + self.new_dict, + self.old_dict, + ) + return await shard_builder.finalize(index_location, index_encoder) + + @dataclass(frozen=True) class ShardingCodec( - ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin + ArrayBytesCodecBatchMixin, + ArrayBytesCodecPartialDecodeBatchMixin, + ArrayBytesCodecPartialEncodeBatchMixin, ): chunk_shape: ChunkCoords codecs: CodecPipeline @@ -234,12 +305,14 @@ def __init__( ) -> None: chunk_shape_parsed = parse_shapelike(chunk_shape) codecs_parsed = ( - parse_codecs(codecs) if codecs is not None else CodecPipeline.from_list([BytesCodec()]) + parse_codecs(codecs) + if codecs is not None + else BatchedCodecPipeline.from_list([BytesCodec()]) ) index_codecs_parsed = ( parse_codecs(index_codecs) if index_codecs is not None - else CodecPipeline.from_list([BytesCodec(), Crc32cCodec()]) + else BatchedCodecPipeline.from_list([BytesCodec(), Crc32cCodec()]) ) index_location_parsed = ( parse_index_location(index_location) @@ -252,6 +325,11 @@ def __init__( object.__setattr__(self, "index_codecs", index_codecs_parsed) object.__setattr__(self, "index_location", index_location_parsed) + # Use instance-local lru_cache to avoid memory leaks + object.__setattr__(self, "_get_chunk_spec", lru_cache()(self._get_chunk_spec)) + object.__setattr__(self, "_get_index_chunk_spec", lru_cache()(self._get_index_chunk_spec)) + object.__setattr__(self, "_get_chunks_per_shard", lru_cache()(self._get_chunks_per_shard)) + @classmethod def from_dict(cls, data: Dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "sharding_indexed") @@ -295,65 +373,63 @@ def validate(self, array_metadata: ArrayMetadata) -> None: + "shard's inner `chunk_shape`." ) - async def decode( + async def decode_single( self, shard_bytes: Buffer, shard_spec: ArraySpec, ) -> NDBuffer: - # print("decode") shard_shape = shard_spec.shape chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) + chunk_spec = self._get_chunk_spec(shard_spec) indexer = BasicIndexer( tuple(slice(0, s) for s in shard_shape), shape=shard_shape, - chunk_shape=chunk_shape, + chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), ) # setup output array out = NDBuffer.create( shape=shard_shape, dtype=shard_spec.dtype, order=shard_spec.order, fill_value=0 ) - shard_dict = await _ShardProxy.from_bytes(shard_bytes, self, chunks_per_shard) + shard_dict = await _ShardReader.from_bytes(shard_bytes, self, chunks_per_shard) if shard_dict.index.is_all_empty(): out.fill(shard_spec.fill_value) return out # decoding chunks and writing them into the output buffer - await concurrent_map( + await self.codecs.read( [ ( - shard_dict, - chunk_coords, + _ShardingByteGetter(shard_dict, chunk_coords), + chunk_spec, chunk_selection, out_selection, - shard_spec, - out, ) for chunk_coords, chunk_selection, out_selection in indexer ], - self._read_chunk, - config.get("async.concurrency"), + out, ) return out - async def decode_partial( + async def decode_partial_single( self, - store_path: StorePath, + byte_getter: ByteGetter, selection: SliceSelection, shard_spec: ArraySpec, ) -> Optional[NDBuffer]: shard_shape = shard_spec.shape chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) + chunk_spec = self._get_chunk_spec(shard_spec) indexer = BasicIndexer( selection, shape=shard_shape, - chunk_shape=chunk_shape, + chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), ) # setup output array @@ -365,63 +441,42 @@ async def decode_partial( all_chunk_coords = set(chunk_coords for chunk_coords, _, _ in indexed_chunks) # reading bytes of all requested chunks - shard_dict: Mapping[ChunkCoords, Buffer] = {} + shard_dict: ShardMapping = {} if self._is_total_shard(all_chunk_coords, chunks_per_shard): # read entire shard - shard_dict_maybe = await self._load_full_shard_maybe(store_path, chunks_per_shard) + shard_dict_maybe = await self._load_full_shard_maybe(byte_getter, chunks_per_shard) if shard_dict_maybe is None: return None shard_dict = shard_dict_maybe else: # read some chunks within the shard - shard_index = await self._load_shard_index_maybe(store_path, chunks_per_shard) + shard_index = await self._load_shard_index_maybe(byte_getter, chunks_per_shard) if shard_index is None: return None shard_dict = {} for chunk_coords in all_chunk_coords: chunk_byte_slice = shard_index.get_chunk_slice(chunk_coords) if chunk_byte_slice: - chunk_bytes = await store_path.get(chunk_byte_slice) + chunk_bytes = await byte_getter.get(chunk_byte_slice) if chunk_bytes: shard_dict[chunk_coords] = chunk_bytes # decoding chunks and writing them into the output buffer - await concurrent_map( + await self.codecs.read( [ ( - shard_dict, - chunk_coords, + _ShardingByteGetter(shard_dict, chunk_coords), + chunk_spec, chunk_selection, out_selection, - shard_spec, - out, ) - for chunk_coords, chunk_selection, out_selection in indexed_chunks + for chunk_coords, chunk_selection, out_selection in indexer ], - self._read_chunk, - config.get("async.concurrency"), + out, ) return out - async def _read_chunk( - self, - shard_dict: Mapping[ChunkCoords, Optional[Buffer]], - chunk_coords: ChunkCoords, - chunk_selection: SliceSelection, - out_selection: SliceSelection, - shard_spec: ArraySpec, - out: NDBuffer, - ) -> None: - chunk_spec = self._get_chunk_spec(shard_spec) - chunk_bytes = shard_dict.get(chunk_coords, None) - if chunk_bytes is not None: - chunk_array = await self.codecs.decode(chunk_bytes, chunk_spec) - tmp = chunk_array[chunk_selection] - out[out_selection] = tmp - else: - out[out_selection] = chunk_spec.fill_value - - async def encode( + async def encode_single( self, shard_array: NDBuffer, shard_spec: ArraySpec, @@ -429,150 +484,77 @@ async def encode( shard_shape = shard_spec.shape chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) + chunk_spec = self._get_chunk_spec(shard_spec) indexer = list( BasicIndexer( tuple(slice(0, s) for s in shard_shape), shape=shard_shape, - chunk_shape=chunk_shape, + chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), ) ) - async def _write_chunk( - shard_array: NDBuffer, - chunk_coords: ChunkCoords, - chunk_selection: SliceSelection, - out_selection: SliceSelection, - ) -> Tuple[ChunkCoords, Optional[Buffer]]: - assert isinstance(shard_array, NDBuffer) - if is_total_slice(chunk_selection, chunk_shape): - chunk_array = shard_array[out_selection] - else: - # handling writing partial chunks - chunk_array = NDBuffer.create( - shape=chunk_shape, - dtype=shard_spec.dtype, - ) - chunk_array.fill(shard_spec.fill_value) - chunk_array[chunk_selection] = shard_array[out_selection] - if not chunk_array.all_equal(shard_spec.fill_value): - chunk_spec = self._get_chunk_spec(shard_spec) - return ( - chunk_coords, - await self.codecs.encode(chunk_array, chunk_spec), - ) - return (chunk_coords, None) + shard_builder = _ShardBuilder.create_empty(chunks_per_shard) - # assembling and encoding chunks within the shard - encoded_chunks: List[Tuple[ChunkCoords, Optional[Buffer]]] = await concurrent_map( + await self.codecs.write( [ - (shard_array, chunk_coords, chunk_selection, out_selection) + ( + _ShardingByteSetter(shard_builder, chunk_coords), + chunk_spec, + chunk_selection, + out_selection, + ) for chunk_coords, chunk_selection, out_selection in indexer ], - _write_chunk, - config.get("async.concurrency"), + shard_array, ) - if len(encoded_chunks) == 0: - return None - - shard_builder = _ShardBuilder.create_empty(chunks_per_shard) - for chunk_coords, chunk_bytes in encoded_chunks: - if chunk_bytes is not None: - shard_builder.append(chunk_coords, chunk_bytes) return await shard_builder.finalize(self.index_location, self._encode_shard_index) - async def encode_partial( + async def encode_partial_single( self, - store_path: StorePath, + byte_setter: ByteSetter, shard_array: NDBuffer, selection: SliceSelection, shard_spec: ArraySpec, ) -> None: - # print("encode_partial") shard_shape = shard_spec.shape chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) chunk_spec = self._get_chunk_spec(shard_spec) - old_shard_dict = ( - await self._load_full_shard_maybe(store_path, chunks_per_shard) - ) or _ShardProxy.create_empty(chunks_per_shard) - new_shard_builder = _ShardBuilder.create_empty(chunks_per_shard) - tombstones: Set[ChunkCoords] = set() + shard_dict = _MergingShardBuilder( + await self._load_full_shard_maybe(byte_setter, chunks_per_shard) + or _ShardReader.create_empty(chunks_per_shard), + _ShardBuilder.create_empty(chunks_per_shard), + ) indexer = list( BasicIndexer( selection, shape=shard_shape, - chunk_shape=chunk_shape, + chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), ) ) - async def _write_chunk( - chunk_coords: ChunkCoords, - chunk_selection: SliceSelection, - out_selection: SliceSelection, - ) -> Tuple[ChunkCoords, Optional[Buffer]]: - if is_total_slice(chunk_selection, self.chunk_shape): - chunk_array = shard_array[out_selection] - else: - # handling writing partial chunks - # read chunk first - chunk_bytes = old_shard_dict.get(chunk_coords, None) - - # merge new value - if chunk_bytes is None: - chunk_array = NDBuffer.create( - shape=self.chunk_shape, - dtype=shard_spec.dtype, - ) - chunk_array.fill(shard_spec.fill_value) - else: - chunk_array = ( - await self.codecs.decode(chunk_bytes, chunk_spec) - ).copy() # make a writable copy - chunk_array[chunk_selection] = shard_array[out_selection] - - if not chunk_array.all_equal(shard_spec.fill_value): - return ( - chunk_coords, - await self.codecs.encode(chunk_array, chunk_spec), - ) - else: - return (chunk_coords, None) - - encoded_chunks: List[Tuple[ChunkCoords, Optional[Buffer]]] = await concurrent_map( + await self.codecs.write( [ ( - chunk_coords, + _ShardingByteSetter(shard_dict, chunk_coords), + chunk_spec, chunk_selection, out_selection, ) for chunk_coords, chunk_selection, out_selection in indexer ], - _write_chunk, - config.get("async.concurrency"), - ) - - for chunk_coords, chunk_bytes in encoded_chunks: - if chunk_bytes is not None: - new_shard_builder.append(chunk_coords, chunk_bytes) - else: - tombstones.add(chunk_coords) - - shard_builder = _ShardBuilder.merge_with_morton_order( - chunks_per_shard, - tombstones, - new_shard_builder, - old_shard_dict, + shard_array, ) - if shard_builder.index.is_all_empty(): - await store_path.delete() + if shard_dict.is_empty(): + await byte_setter.delete() else: - await store_path.set( - await shard_builder.finalize( + await byte_setter.set( + await shard_dict.finalize( self.index_location, self._encode_shard_index, ) @@ -588,19 +570,28 @@ def _is_total_shard( async def _decode_shard_index( self, index_bytes: Buffer, chunks_per_shard: ChunkCoords ) -> _ShardIndex: - return _ShardIndex( - ( + index_array = next( + iter( await self.index_codecs.decode( - index_bytes, - self._get_index_chunk_spec(chunks_per_shard), + [(index_bytes, self._get_index_chunk_spec(chunks_per_shard))], ) - ).as_numpy_array() + ) ) + assert index_array is not None + return _ShardIndex(index_array.as_numpy_array()) async def _encode_shard_index(self, index: _ShardIndex) -> Buffer: - index_bytes = await self.index_codecs.encode( - NDBuffer.from_numpy_array(index.offsets_and_lengths), - self._get_index_chunk_spec(index.chunks_per_shard), + index_bytes = next( + iter( + await self.index_codecs.encode( + [ + ( + NDBuffer.from_numpy_array(index.offsets_and_lengths), + self._get_index_chunk_spec(index.chunks_per_shard), + ) + ], + ) + ) ) assert index_bytes is not None assert isinstance(index_bytes, Buffer) @@ -611,7 +602,6 @@ def _shard_index_size(self, chunks_per_shard: ChunkCoords) -> int: 16 * product(chunks_per_shard), self._get_index_chunk_spec(chunks_per_shard) ) - @lru_cache def _get_index_chunk_spec(self, chunks_per_shard: ChunkCoords) -> ArraySpec: return ArraySpec( shape=chunks_per_shard + (2,), @@ -620,7 +610,6 @@ def _get_index_chunk_spec(self, chunks_per_shard: ChunkCoords) -> ArraySpec: order="C", # Note: this is hard-coded for simplicity -- it is not surfaced into user code ) - @lru_cache def _get_chunk_spec(self, shard_spec: ArraySpec) -> ArraySpec: return ArraySpec( shape=self.chunk_shape, @@ -629,7 +618,6 @@ def _get_chunk_spec(self, shard_spec: ArraySpec) -> ArraySpec: order=shard_spec.order, ) - @lru_cache def _get_chunks_per_shard(self, shard_spec: ArraySpec) -> ChunkCoords: return tuple( s // c @@ -640,31 +628,31 @@ def _get_chunks_per_shard(self, shard_spec: ArraySpec) -> ChunkCoords: ) async def _load_shard_index_maybe( - self, store_path: StorePath, chunks_per_shard: ChunkCoords + self, byte_getter: ByteGetter, chunks_per_shard: ChunkCoords ) -> Optional[_ShardIndex]: shard_index_size = self._shard_index_size(chunks_per_shard) if self.index_location == ShardingCodecIndexLocation.start: - index_bytes = await store_path.get((0, shard_index_size)) + index_bytes = await byte_getter.get((0, shard_index_size)) else: - index_bytes = await store_path.get((-shard_index_size, None)) + index_bytes = await byte_getter.get((-shard_index_size, None)) if index_bytes is not None: return await self._decode_shard_index(index_bytes, chunks_per_shard) return None async def _load_shard_index( - self, store_path: StorePath, chunks_per_shard: ChunkCoords + self, byte_getter: ByteGetter, chunks_per_shard: ChunkCoords ) -> _ShardIndex: return ( - await self._load_shard_index_maybe(store_path, chunks_per_shard) + await self._load_shard_index_maybe(byte_getter, chunks_per_shard) ) or _ShardIndex.create_empty(chunks_per_shard) async def _load_full_shard_maybe( - self, store_path: StorePath, chunks_per_shard: ChunkCoords - ) -> Optional[_ShardProxy]: - shard_bytes = await store_path.get() + self, byte_getter: ByteGetter, chunks_per_shard: ChunkCoords + ) -> Optional[_ShardReader]: + shard_bytes = await byte_getter.get() return ( - await _ShardProxy.from_bytes(shard_bytes, self, chunks_per_shard) + await _ShardReader.from_bytes(shard_bytes, self, chunks_per_shard) if shard_bytes else None ) diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index 70ae30f908..5d4d2a7b84 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -3,18 +3,16 @@ from dataclasses import dataclass, replace +from zarr.codecs.mixins import ArrayArrayCodecBatchMixin from zarr.buffer import NDBuffer from zarr.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration +from zarr.codecs.registry import register_codec if TYPE_CHECKING: from typing import TYPE_CHECKING, Optional, Tuple from typing_extensions import Self -from zarr.abc.codec import ArrayArrayCodec -from zarr.codecs.registry import register_codec - - def parse_transpose_order(data: Union[JSON, Iterable[int]]) -> Tuple[int, ...]: if not isinstance(data, Iterable): raise TypeError(f"Expected an iterable. Got {data} instead.") @@ -24,7 +22,7 @@ def parse_transpose_order(data: Union[JSON, Iterable[int]]) -> Tuple[int, ...]: @dataclass(frozen=True) -class TransposeCodec(ArrayArrayCodec): +class TransposeCodec(ArrayArrayCodecBatchMixin): is_fixed_size = True order: Tuple[int, ...] @@ -73,7 +71,7 @@ def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: order=chunk_spec.order, ) - async def decode( + async def decode_single( self, chunk_array: NDBuffer, chunk_spec: ArraySpec, @@ -84,7 +82,7 @@ async def decode( chunk_array = chunk_array.transpose(inverse_order) return chunk_array - async def encode( + async def encode_single( self, chunk_array: NDBuffer, chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/zstd.py b/src/zarr/codecs/zstd.py index 0cc99a0368..4422188d25 100644 --- a/src/zarr/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -5,7 +5,7 @@ from zstandard import ZstdCompressor, ZstdDecompressor -from zarr.abc.codec import BytesBytesCodec +from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration, to_thread @@ -31,7 +31,7 @@ def parse_checksum(data: JSON) -> bool: @dataclass(frozen=True) -class ZstdCodec(BytesBytesCodec): +class ZstdCodec(BytesBytesCodecBatchMixin): is_fixed_size = True level: int = 0 @@ -60,14 +60,14 @@ def _decompress(self, data: bytes) -> bytes: ctx = ZstdDecompressor() return ctx.decompress(data) - async def decode( + async def decode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, ) -> Buffer: return await to_thread(as_numpy_array_wrapper, self._decompress, chunk_bytes) - async def encode( + async def encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, diff --git a/src/zarr/common.py b/src/zarr/common.py index 95cb8f4a3e..3ef847a1f3 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -1,6 +1,7 @@ from __future__ import annotations from typing import ( TYPE_CHECKING, + ParamSpec, Literal, Union, Tuple, @@ -58,7 +59,11 @@ async def run(item: Tuple[Any]) -> V: return await asyncio.gather(*[asyncio.ensure_future(run(item)) for item in items]) -async def to_thread(func: Callable[..., V], /, *args: Any, **kwargs: Any) -> V: +P = ParamSpec("P") +U = TypeVar("U") + + +async def to_thread(func: Callable[P, U], /, *args: P.args, **kwargs: P.kwargs) -> U: loop = asyncio.get_running_loop() ctx = contextvars.copy_context() func_call = functools.partial(ctx.run, func, *args, **kwargs) diff --git a/src/zarr/config.py b/src/zarr/config.py index e546cb1c23..5b1640bd56 100644 --- a/src/zarr/config.py +++ b/src/zarr/config.py @@ -6,7 +6,13 @@ config = Config( "zarr", - defaults=[{"array": {"order": "C"}, "async": {"concurrency": None, "timeout": None}}], + defaults=[ + { + "array": {"order": "C"}, + "async": {"concurrency": None, "timeout": None}, + "codec_pipeline": {"batch_size": 1}, + } + ], ) diff --git a/src/zarr/group.py b/src/zarr/group.py index d344b3db00..6cd6ab6aad 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -7,24 +7,34 @@ import logging import numpy.typing as npt -from zarr.buffer import Buffer - -if TYPE_CHECKING: - from typing import Any, AsyncGenerator, Literal, Iterable +from zarr.abc.store import set_or_delete from zarr.abc.codec import Codec from zarr.abc.metadata import Metadata +from zarr.buffer import Buffer from zarr.array import AsyncArray, Array from zarr.attributes import Attributes -from zarr.common import ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, ZGROUP_JSON, ChunkCoords +from zarr.chunk_key_encodings import ChunkKeyEncoding +from zarr.common import ( + JSON, + ZARR_JSON, + ZARRAY_JSON, + ZATTRS_JSON, + ZGROUP_JSON, + ChunkCoords, + ZarrFormat, +) from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import SyncMixin, sync from typing import overload +if TYPE_CHECKING: + from typing import Any, AsyncGenerator, Literal, Iterable + logger = logging.getLogger("zarr.group") -def parse_zarr_format(data: Any) -> Literal[2, 3]: +def parse_zarr_format(data: Any) -> ZarrFormat: if data in (2, 3): return data msg = msg = f"Invalid zarr_format. Expected one 2 or 3. Got {data}." @@ -64,20 +74,21 @@ def _parse_async_node(node: AsyncArray | AsyncGroup) -> Array | Group: @dataclass(frozen=True) class GroupMetadata(Metadata): attributes: dict[str, Any] = field(default_factory=dict) - zarr_format: Literal[2, 3] = 3 + zarr_format: ZarrFormat = 3 node_type: Literal["group"] = field(default="group", init=False) - # todo: rename this, since it doesn't return bytes - def to_bytes(self) -> dict[str, bytes]: + def to_buffer_dict(self) -> dict[str, Buffer]: if self.zarr_format == 3: - return {ZARR_JSON: json.dumps(self.to_dict()).encode()} + return {ZARR_JSON: Buffer.from_bytes(json.dumps(self.to_dict()).encode())} else: return { - ZGROUP_JSON: json.dumps({"zarr_format": self.zarr_format}).encode(), - ZATTRS_JSON: json.dumps(self.attributes).encode(), + ZGROUP_JSON: Buffer.from_bytes( + json.dumps({"zarr_format": self.zarr_format}).encode() + ), + ZATTRS_JSON: Buffer.from_bytes(json.dumps(self.attributes).encode()), } - def __init__(self, attributes: dict[str, Any] | None = None, zarr_format: Literal[2, 3] = 3): + def __init__(self, attributes: dict[str, Any] | None = None, zarr_format: ZarrFormat = 3): attributes_parsed = parse_attributes(attributes) zarr_format_parsed = parse_zarr_format(zarr_format) @@ -105,7 +116,7 @@ async def create( *, attributes: dict[str, Any] = {}, exists_ok: bool = False, - zarr_format: Literal[2, 3] = 3, + zarr_format: ZarrFormat = 3, ) -> AsyncGroup: store_path = make_store_path(store) if not exists_ok: @@ -247,16 +258,15 @@ async def delitem(self, key: str) -> None: elif self.metadata.zarr_format == 2: await asyncio.gather( (store_path / ZGROUP_JSON).delete(), # TODO: missing_ok=False + (store_path / ZARRAY_JSON).delete(), # TODO: missing_ok=False (store_path / ZATTRS_JSON).delete(), # TODO: missing_ok=True ) else: raise ValueError(f"unexpected zarr_format: {self.metadata.zarr_format}") async def _save_metadata(self) -> None: - to_save = self.metadata.to_bytes() - awaitables = [ - (self.store_path / key).set(Buffer.from_bytes(value)) for key, value in to_save.items() - ] + to_save = self.metadata.to_buffer_dict() + awaitables = [set_or_delete(self.store_path / key, value) for key, value in to_save.items()] await asyncio.gather(*awaitables) @property @@ -282,13 +292,25 @@ async def create_array( path: str, shape: ChunkCoords, dtype: npt.DTypeLike, - chunk_shape: ChunkCoords, fill_value: Any | None = None, - chunk_key_encoding: tuple[Literal["default"], Literal[".", "/"]] - | tuple[Literal["v2"], Literal[".", "/"]] = ("default", "/"), - codecs: Iterable[Codec | dict[str, Any]] | None = None, + attributes: dict[str, JSON] | None = None, + # v3 only + chunk_shape: ChunkCoords | None = None, + chunk_key_encoding: ( + ChunkKeyEncoding + | tuple[Literal["default"], Literal[".", "/"]] + | tuple[Literal["v2"], Literal[".", "/"]] + | None + ) = None, + codecs: Iterable[Codec | dict[str, JSON]] | None = None, dimension_names: Iterable[str] | None = None, - attributes: dict[str, Any] | None = None, + # v2 only + chunks: ChunkCoords | None = None, + dimension_separator: Literal[".", "/"] | None = None, + order: Literal["C", "F"] | None = None, + filters: list[dict[str, JSON]] | None = None, + compressor: dict[str, JSON] | None = None, + # runtime exists_ok: bool = False, ) -> AsyncArray: return await AsyncArray.create( @@ -301,6 +323,11 @@ async def create_array( codecs=codecs, dimension_names=dimension_names, attributes=attributes, + chunks=chunks, + dimension_separator=dimension_separator, + order=order, + filters=filters, + compressor=compressor, exists_ok=exists_ok, zarr_format=self.metadata.zarr_format, ) @@ -311,15 +338,7 @@ async def update_attributes(self, new_attributes: dict[str, Any]) -> "AsyncGroup self.metadata.attributes.update(new_attributes) # Write new metadata - to_save = self.metadata.to_bytes() - if self.metadata.zarr_format == 2: - # only save the .zattrs object - await (self.store_path / ZATTRS_JSON).set(Buffer.from_bytes(to_save[ZATTRS_JSON])) - else: - await (self.store_path / ZARR_JSON).set(Buffer.from_bytes(to_save[ZARR_JSON])) - - self.metadata.attributes.clear() - self.metadata.attributes.update(new_attributes) + await self._save_metadata() return self @@ -483,10 +502,8 @@ async def update_attributes_async(self, new_attributes: dict[str, Any]) -> Group new_metadata = replace(self.metadata, attributes=new_attributes) # Write new metadata - to_save = new_metadata.to_bytes() - awaitables = [ - (self.store_path / key).set(Buffer.from_bytes(value)) for key, value in to_save.items() - ] + to_save = new_metadata.to_buffer_dict() + awaitables = [set_or_delete(self.store_path / key, value) for key, value in to_save.items()] await asyncio.gather(*awaitables) async_group = replace(self._async_group, metadata=new_metadata) diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 9f324eb5ea..8e7cd95430 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -2,10 +2,13 @@ import itertools import math -from typing import Iterator, List, NamedTuple, Optional, Tuple +from typing import TYPE_CHECKING, Iterator, List, NamedTuple, Optional, Tuple from zarr.common import ChunkCoords, Selection, SliceSelection, product +if TYPE_CHECKING: + from zarr.chunk_grids import ChunkGrid + def _ensure_tuple(v: Selection) -> SliceSelection: if not isinstance(v, tuple): @@ -131,13 +134,18 @@ def __init__( self, selection: Selection, shape: Tuple[int, ...], - chunk_shape: Tuple[int, ...], + chunk_grid: ChunkGrid, ): + from zarr.chunk_grids import RegularChunkGrid + + assert isinstance( + chunk_grid, RegularChunkGrid + ), "Only regular chunk grid is supported, currently." # setup per-dimension indexers self.dim_indexers = [ _SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) for dim_sel, dim_len, dim_chunk_len in zip( - _ensure_selection(selection, shape), shape, chunk_shape + _ensure_selection(selection, shape), shape, chunk_grid.chunk_shape ) ] self.shape = tuple(s.nitems for s in self.dim_indexers) @@ -202,7 +210,3 @@ def is_total_slice(item: Selection, shape: ChunkCoords) -> bool: ) else: raise TypeError("expected slice or tuple of slices, found %r" % item) - - -def all_chunk_coords(shape: ChunkCoords, chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: - return itertools.product(*(range(0, _ceildiv(s, c)) for s, c in zip(shape, chunk_shape))) diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 098ab34b86..695d83da55 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -1,26 +1,30 @@ from __future__ import annotations +from abc import ABC, abstractmethod from enum import Enum -from typing import TYPE_CHECKING, cast, Dict, Iterable, Any -from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any, cast, Iterable +from dataclasses import dataclass, field, replace import json import numpy as np import numpy.typing as npt +from zarr.abc.codec import Codec, CodecPipeline +from zarr.abc.metadata import Metadata from zarr.buffer import Buffer from zarr.chunk_grids import ChunkGrid, RegularChunkGrid from zarr.chunk_key_encodings import ChunkKeyEncoding, parse_separator +from zarr.codecs._v2 import V2Compressor, V2Filters if TYPE_CHECKING: - from typing import Literal, Union, List, Optional, Tuple - from zarr.codecs.pipeline import CodecPipeline - + from typing import Literal + from typing_extensions import Self -from zarr.abc.codec import Codec -from zarr.abc.metadata import Metadata from zarr.common import ( JSON, + ZARR_JSON, + ZARRAY_JSON, + ZATTRS_JSON, ArraySpec, ChunkCoords, parse_dtype, @@ -104,16 +108,58 @@ def from_dtype(cls, dtype: np.dtype[Any]) -> DataType: return DataType[dtype_to_data_type[dtype.str]] -@dataclass(frozen=True) -class ArrayMetadata(Metadata): +@dataclass(frozen=True, kw_only=True) +class ArrayMetadata(Metadata, ABC): + shape: ChunkCoords + chunk_grid: ChunkGrid + attributes: dict[str, JSON] + + @property + @abstractmethod + def dtype(self) -> np.dtype[Any]: + pass + + @property + @abstractmethod + def ndim(self) -> int: + pass + + @property + @abstractmethod + def codec_pipeline(self) -> CodecPipeline: + pass + + @abstractmethod + def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) -> ArraySpec: + pass + + @abstractmethod + def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + pass + + @abstractmethod + def to_buffer_dict(self) -> dict[str, Buffer]: + pass + + @abstractmethod + def update_shape(self, shape: ChunkCoords) -> Self: + pass + + @abstractmethod + def update_attributes(self, attributes: dict[str, JSON]) -> Self: + pass + + +@dataclass(frozen=True, kw_only=True) +class ArrayV3Metadata(ArrayMetadata): shape: ChunkCoords data_type: np.dtype[Any] chunk_grid: ChunkGrid chunk_key_encoding: ChunkKeyEncoding fill_value: Any codecs: CodecPipeline - attributes: Dict[str, Any] = field(default_factory=dict) - dimension_names: Optional[Tuple[str, ...]] = None + attributes: dict[str, Any] = field(default_factory=dict) + dimension_names: tuple[str, ...] | None = None zarr_format: Literal[3] = field(default=3, init=False) node_type: Literal["array"] = field(default="array", init=False) @@ -182,6 +228,10 @@ def dtype(self) -> np.dtype[Any]: def ndim(self) -> int: return len(self.shape) + @property + def codec_pipeline(self) -> CodecPipeline: + return self.codecs + def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) -> ArraySpec: assert isinstance( self.chunk_grid, RegularChunkGrid @@ -193,7 +243,10 @@ def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) - order=order, ) - def to_bytes(self) -> bytes: + def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + return self.chunk_key_encoding.encode_chunk_key(chunk_coords) + + def to_buffer_dict(self) -> dict[str, Buffer]: def _json_convert(o): if isinstance(o, np.dtype): return str(o) @@ -205,13 +258,12 @@ def _json_convert(o): return o.get_config() raise TypeError - return json.dumps( - self.to_dict(), - default=_json_convert, - ).encode() + return { + ZARR_JSON: Buffer.from_bytes(json.dumps(self.to_dict(), default=_json_convert).encode()) + } @classmethod - def from_dict(cls, data: Dict[str, Any]) -> ArrayMetadata: + def from_dict(cls, data: dict[str, JSON]) -> ArrayV3Metadata: # check that the zarr_format attribute is correct _ = parse_zarr_format_v3(data.pop("zarr_format")) # check that the node_type attribute is correct @@ -221,7 +273,7 @@ def from_dict(cls, data: Dict[str, Any]) -> ArrayMetadata: return cls(**data, dimension_names=dimension_names) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: out_dict = super().to_dict() if not isinstance(out_dict, dict): @@ -233,18 +285,24 @@ def to_dict(self) -> Dict[str, Any]: out_dict.pop("dimension_names") return out_dict + def update_shape(self, shape: ChunkCoords) -> Self: + return replace(self, shape=shape) -@dataclass(frozen=True) -class ArrayV2Metadata(Metadata): + def update_attributes(self, attributes: dict[str, JSON]) -> Self: + return replace(self, attributes=attributes) + + +@dataclass(frozen=True, kw_only=True) +class ArrayV2Metadata(ArrayMetadata): shape: ChunkCoords - chunks: ChunkCoords - dtype: np.dtype[Any] - fill_value: Union[None, int, float] = 0 + chunk_grid: RegularChunkGrid + data_type: np.dtype[Any] + fill_value: None | int | float = 0 order: Literal["C", "F"] = "C" - filters: Optional[List[Dict[str, Any]]] = None + filters: list[dict[str, JSON]] | None = None dimension_separator: Literal[".", "/"] = "." - compressor: Optional[Dict[str, Any]] = None - attributes: Optional[Dict[str, Any]] = cast(Dict[str, Any], field(default_factory=dict)) + compressor: dict[str, JSON] | None = None + attributes: dict[str, JSON] = cast(dict[str, JSON], field(default_factory=dict)) zarr_format: Literal[2] = field(init=False, default=2) def __init__( @@ -256,9 +314,9 @@ def __init__( fill_value: Any, order: Literal["C", "F"], dimension_separator: Literal[".", "/"] = ".", - compressor: Optional[Dict[str, Any]] = None, - filters: Optional[List[Dict[str, Any]]] = None, - attributes: Optional[Dict[str, JSON]] = None, + compressor: dict[str, JSON] | None = None, + filters: list[dict[str, JSON]] | None = None, + attributes: dict[str, JSON] | None = None, ): """ Metadata for a Zarr version 2 array. @@ -268,14 +326,14 @@ def __init__( chunks_parsed = parse_shapelike(chunks) compressor_parsed = parse_compressor(compressor) order_parsed = parse_indexing_order(order) - dimension_separator_parsed = parse_separator(order) + dimension_separator_parsed = parse_separator(dimension_separator) filters_parsed = parse_filters(filters) fill_value_parsed = parse_fill_value(fill_value) attributes_parsed = parse_attributes(attributes) object.__setattr__(self, "shape", shape_parsed) object.__setattr__(self, "data_type", data_type_parsed) - object.__setattr__(self, "chunks", chunks_parsed) + object.__setattr__(self, "chunk_grid", RegularChunkGrid(chunk_shape=chunks_parsed)) object.__setattr__(self, "compressor", compressor_parsed) object.__setattr__(self, "order", order_parsed) object.__setattr__(self, "dimension_separator", dimension_separator_parsed) @@ -290,7 +348,23 @@ def __init__( def ndim(self) -> int: return len(self.shape) - def to_bytes(self) -> Buffer: + @property + def dtype(self) -> np.dtype[Any]: + return self.data_type + + @property + def chunks(self) -> ChunkCoords: + return self.chunk_grid.chunk_shape + + @property + def codec_pipeline(self) -> CodecPipeline: + from zarr.codecs import BatchedCodecPipeline + + return BatchedCodecPipeline.from_list( + [V2Filters(self.filters or []), V2Compressor(self.compressor)] + ) + + def to_buffer_dict(self) -> dict[str, Buffer]: def _json_convert(o): if isinstance(o, np.dtype): if o.fields is None: @@ -299,16 +373,54 @@ def _json_convert(o): return o.descr raise TypeError - return Buffer.from_bytes(json.dumps(self.to_dict(), default=_json_convert).encode()) + zarray_dict = self.to_dict() + assert isinstance(zarray_dict, dict) + zattrs_dict = zarray_dict.pop("attributes", {}) + assert isinstance(zattrs_dict, dict) + return { + ZARRAY_JSON: Buffer.from_bytes(json.dumps(zarray_dict, default=_json_convert).encode()), + ZATTRS_JSON: Buffer.from_bytes(json.dumps(zattrs_dict).encode()), + } @classmethod - def from_dict(cls, data: Dict[str, Any]) -> ArrayV2Metadata: + def from_dict(cls, data: dict[str, Any]) -> ArrayV2Metadata: # check that the zarr_format attribute is correct _ = parse_zarr_format_v2(data.pop("zarr_format")) return cls(**data) + def to_dict(self) -> JSON: + zarray_dict = super().to_dict() + + assert isinstance(zarray_dict, dict) + + _ = zarray_dict.pop("chunk_grid") + zarray_dict["chunks"] = self.chunk_grid.chunk_shape + + _ = zarray_dict.pop("data_type") + zarray_dict["dtype"] = self.data_type + + return zarray_dict + + def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) -> ArraySpec: + return ArraySpec( + shape=self.chunk_grid.chunk_shape, + dtype=self.dtype, + fill_value=self.fill_value, + order=order, + ) + + def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: + chunk_identifier = self.dimension_separator.join(map(str, chunk_coords)) + return "0" if chunk_identifier == "" else chunk_identifier + + def update_shape(self, shape: ChunkCoords) -> Self: + return replace(self, shape=shape) + + def update_attributes(self, attributes: dict[str, JSON]) -> Self: + return replace(self, attributes=attributes) + -def parse_dimension_names(data: Any) -> Tuple[str, ...] | None: +def parse_dimension_names(data: Any) -> tuple[str, ...] | None: if data is None: return data if isinstance(data, Iterable) and all([isinstance(x, str) for x in data]): @@ -318,11 +430,11 @@ def parse_dimension_names(data: Any) -> Tuple[str, ...] | None: # todo: real validation -def parse_attributes(data: Any) -> Dict[str, JSON]: +def parse_attributes(data: Any) -> dict[str, JSON]: if data is None: return {} - data_json = cast(Dict[str, JSON], data) + data_json = cast(dict[str, JSON], data) return data_json @@ -349,12 +461,12 @@ def parse_node_type_array(data: Any) -> Literal["array"]: # todo: real validation -def parse_filters(data: Any) -> List[Codec]: +def parse_filters(data: Any) -> list[dict[str, JSON]]: return data # todo: real validation -def parse_compressor(data: Any) -> Codec: +def parse_compressor(data: Any) -> dict[str, JSON] | None: return data @@ -368,9 +480,9 @@ def parse_v2_metadata(data: ArrayV2Metadata) -> ArrayV2Metadata: return data -def parse_codecs(data: Iterable[Union[Codec, JSON]]) -> CodecPipeline: - from zarr.codecs.pipeline import CodecPipeline +def parse_codecs(data: Iterable[Codec | JSON]) -> CodecPipeline: + from zarr.codecs import BatchedCodecPipeline if not isinstance(data, Iterable): raise TypeError(f"Expected iterable, got {type(data)}") - return CodecPipeline.from_dict(data) + return BatchedCodecPipeline.from_dict(data) diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index 665e3124c0..73553b5565 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -615,9 +615,9 @@ async def test_delete_empty_chunks(store: Store): assert await (store / "delete_empty_chunks/c0/0").get() is None -async def test_delete_empty_sharded_chunks(store: Store): +async def test_delete_empty_shards(store: Store): a = await AsyncArray.create( - store / "delete_empty_sharded_chunks", + store / "delete_empty_shards", shape=(16, 16), chunk_shape=(8, 16), dtype="uint16", @@ -635,8 +635,8 @@ async def test_delete_empty_sharded_chunks(store: Store): data = np.ones((16, 16), dtype="uint16") data[:8, :8] = 0 assert np.array_equal(data, await _AsyncArrayProxy(a)[:, :].get()) - assert await (store / "delete_empty_sharded_chunks/c/1/0").get() is None - chunk_bytes = await (store / "delete_empty_sharded_chunks/c/0/0").get() + assert await (store / "delete_empty_shards/c/1/0").get() is None + chunk_bytes = await (store / "delete_empty_shards/c/0/0").get() assert chunk_bytes is not None and len(chunk_bytes) == 16 * 2 + 8 * 8 * 2 + 4 diff --git a/tests/v3/test_config.py b/tests/v3/test_config.py index 43acdec5fa..aed9775d17 100644 --- a/tests/v3/test_config.py +++ b/tests/v3/test_config.py @@ -4,7 +4,11 @@ def test_config_defaults_set(): # regression test for available defaults assert config.defaults == [ - {"array": {"order": "C"}, "async": {"concurrency": None, "timeout": None}} + { + "array": {"order": "C"}, + "async": {"concurrency": None, "timeout": None}, + "codec_pipeline": {"batch_size": 1}, + } ] assert config.get("array.order") == "C" diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 16e4ceeecf..5a6751c11a 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -234,10 +234,7 @@ def test_asyncgroup_from_dict(store: MemoryStore | LocalStore, data: dict[str, A @pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) -@pytest.mark.parametrize( - "zarr_format", - (pytest.param(2, marks=pytest.mark.xfail(reason="V2 arrays cannot be created yet.")), 3), -) +@pytest.mark.parametrize("zarr_format", (2, 3)) async def test_asyncgroup_getitem(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: """ Create an `AsyncGroup`, then create members of that group, and ensure that we can access those @@ -264,10 +261,7 @@ async def test_asyncgroup_getitem(store: LocalStore | MemoryStore, zarr_format: @pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) -@pytest.mark.parametrize( - "zarr_format", - (2, 3), -) +@pytest.mark.parametrize("zarr_format", (2, 3)) async def test_asyncgroup_delitem(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: agroup = await AsyncGroup.create(store=store, zarr_format=zarr_format) sub_array_path = "sub_array" @@ -316,10 +310,7 @@ async def test_asyncgroup_create_group( @pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) -@pytest.mark.parametrize( - "zarr_format", - (pytest.param(2, marks=pytest.mark.xfail(reason="V2 arrays cannot be created yet")), 3), -) +@pytest.mark.parametrize("zarr_format", (2, 3)) async def test_asyncgroup_create_array( store: LocalStore | MemoryStore, zarr_format: ZarrFormat, diff --git a/tests/v3/test_v2.py b/tests/v3/test_v2.py new file mode 100644 index 0000000000..5b831b1bb0 --- /dev/null +++ b/tests/v3/test_v2.py @@ -0,0 +1,28 @@ +from typing import Iterator +import numpy as np +import pytest + +from zarr.abc.store import Store +from zarr.array import Array +from zarr.store import StorePath, MemoryStore + + +@pytest.fixture +def store() -> Iterator[Store]: + yield StorePath(MemoryStore()) + + +def test_simple(store: Store): + data = np.arange(0, 256, dtype="uint16").reshape((16, 16)) + + a = Array.create( + store / "simple_v2", + zarr_format=2, + shape=data.shape, + chunks=(16, 16), + dtype=data.dtype, + fill_value=0, + ) + + a[:, :] = data + assert np.array_equal(data, a[:, :]) From 69ad5e7c99559e3f282a3ec27d7ed27778b721b5 Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Fri, 17 May 2024 11:41:15 +0200 Subject: [PATCH 0535/1078] Configure Ruff to apply flake8-bugbear/isort/pyupgrade (#1890) * config change * auto changes * manual fixes * ci --- bench/compress_normal.py | 2 +- pyproject.toml | 9 ++- src/zarr/__init__.py | 2 +- src/zarr/abc/codec.py | 5 +- src/zarr/abc/metadata.py | 9 ++- src/zarr/abc/store.py | 24 +++--- src/zarr/array.py | 20 ++--- src/zarr/attributes.py | 6 +- src/zarr/buffer.py | 14 ++-- src/zarr/chunk_grids.py | 14 ++-- src/zarr/chunk_key_encodings.py | 9 ++- src/zarr/codecs/__init__.py | 4 +- src/zarr/codecs/_v2.py | 6 +- src/zarr/codecs/blosc.py | 20 ++--- src/zarr/codecs/bytes.py | 21 +++--- src/zarr/codecs/crc32c_.py | 13 ++-- src/zarr/codecs/gzip.py | 13 ++-- src/zarr/codecs/mixins.py | 7 +- src/zarr/codecs/pipeline.py | 74 ++++++++++++------- src/zarr/codecs/registry.py | 15 ++-- src/zarr/codecs/sharding.py | 52 +++++++------ src/zarr/codecs/transpose.py | 20 ++--- src/zarr/codecs/zstd.py | 14 ++-- src/zarr/common.py | 40 ++++------ src/zarr/fixture/.zgroup | 3 + src/zarr/fixture/flat/.zarray | 23 ++++++ src/zarr/fixture/flat/0.0 | Bin 0 -> 48 bytes src/zarr/fixture/flat_legacy/.zarray | 22 ++++++ src/zarr/fixture/flat_legacy/0.0 | Bin 0 -> 48 bytes src/zarr/fixture/meta/.zarray | 23 ++++++ src/zarr/fixture/meta/0.0 | Bin 0 -> 48 bytes src/zarr/fixture/nested/.zarray | 23 ++++++ src/zarr/fixture/nested/0/0 | Bin 0 -> 48 bytes src/zarr/fixture/nested_legacy/.zarray | 23 ++++++ src/zarr/fixture/nested_legacy/0/0 | Bin 0 -> 48 bytes src/zarr/group.py | 36 +++++---- src/zarr/indexing.py | 19 +++-- src/zarr/metadata.py | 12 +-- src/zarr/store/core.py | 10 +-- src/zarr/store/local.py | 2 +- src/zarr/store/memory.py | 23 +++--- src/zarr/store/remote.py | 19 ++--- src/zarr/sync.py | 6 +- src/zarr/testing/__init__.py | 2 +- tests/v3/conftest.py | 7 +- tests/v3/package_with_entrypoint/__init__.py | 1 + tests/v3/test_buffer.py | 5 +- tests/v3/test_codec_entrypoints.py | 1 - tests/v3/test_codecs.py | 26 +++---- tests/v3/test_common.py | 20 ++--- tests/v3/test_group.py | 15 ++-- tests/v3/test_metadata.py | 7 +- tests/v3/test_store.py | 6 +- tests/v3/test_sync.py | 8 +- tests/v3/test_v2.py | 5 +- 55 files changed, 455 insertions(+), 305 deletions(-) create mode 100644 src/zarr/fixture/.zgroup create mode 100644 src/zarr/fixture/flat/.zarray create mode 100644 src/zarr/fixture/flat/0.0 create mode 100644 src/zarr/fixture/flat_legacy/.zarray create mode 100644 src/zarr/fixture/flat_legacy/0.0 create mode 100644 src/zarr/fixture/meta/.zarray create mode 100644 src/zarr/fixture/meta/0.0 create mode 100644 src/zarr/fixture/nested/.zarray create mode 100644 src/zarr/fixture/nested/0/0 create mode 100644 src/zarr/fixture/nested_legacy/.zarray create mode 100644 src/zarr/fixture/nested_legacy/0/0 diff --git a/bench/compress_normal.py b/bench/compress_normal.py index 803d54b76b..608cfe8dce 100644 --- a/bench/compress_normal.py +++ b/bench/compress_normal.py @@ -1,9 +1,9 @@ import sys import timeit +import line_profiler import numpy as np -import line_profiler import zarr from zarr import blosc diff --git a/pyproject.toml b/pyproject.toml index 3014f98031..1aafb64a2b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -160,12 +160,17 @@ extend-exclude = [ "build", "dist", "venv", - "docs" + "docs", + "src/zarr/v2/", + "tests/v2/", ] [tool.ruff.lint] extend-select = [ - "RUF" + "B", # flake8-bugbear + "I", # isort + "UP", # pyupgrade + "RUF", ] ignore = [ "RUF003", diff --git a/src/zarr/__init__.py b/src/zarr/__init__.py index 00c01560f4..fdab564c64 100644 --- a/src/zarr/__init__.py +++ b/src/zarr/__init__.py @@ -1,6 +1,7 @@ from __future__ import annotations import zarr.codecs # noqa: F401 +from zarr._version import version as __version__ from zarr.array import Array, AsyncArray from zarr.config import config # noqa: F401 from zarr.group import AsyncGroup, Group @@ -9,7 +10,6 @@ make_store_path, ) from zarr.sync import sync as _sync -from zarr._version import version as __version__ # in case setuptools scm screw up and find version to be 0.0.0 assert not __version__.startswith("0.0.0") diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index 1c665590bf..d8d7edf547 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -1,15 +1,16 @@ from __future__ import annotations from abc import abstractmethod -from typing import TYPE_CHECKING, Generic, Iterable, TypeVar +from collections.abc import Iterable +from typing import TYPE_CHECKING, Generic, TypeVar from zarr.abc.metadata import Metadata from zarr.abc.store import ByteGetter, ByteSetter from zarr.buffer import Buffer, NDBuffer - if TYPE_CHECKING: from typing_extensions import Self + from zarr.common import ArraySpec, SliceSelection from zarr.metadata import ArrayMetadata diff --git a/src/zarr/abc/metadata.py b/src/zarr/abc/metadata.py index f27b37cba4..36edf69534 100644 --- a/src/zarr/abc/metadata.py +++ b/src/zarr/abc/metadata.py @@ -1,11 +1,12 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Sequence + +from collections.abc import Sequence +from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Dict from typing_extensions import Self -from dataclasses import fields, dataclass +from dataclasses import dataclass, fields from zarr.common import JSON @@ -36,7 +37,7 @@ def to_dict(self) -> JSON: return out_dict @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> Self: + def from_dict(cls, data: dict[str, JSON]) -> Self: """ Create an instance of the model from a dictionary """ diff --git a/src/zarr/abc/store.py b/src/zarr/abc/store.py index a3a112e58e..fee5422e9e 100644 --- a/src/zarr/abc/store.py +++ b/src/zarr/abc/store.py @@ -1,16 +1,16 @@ -from abc import abstractmethod, ABC +from abc import ABC, abstractmethod from collections.abc import AsyncGenerator -from typing import List, Protocol, Tuple, Optional, runtime_checkable +from typing import Protocol, runtime_checkable -from zarr.common import BytesLike from zarr.buffer import Buffer +from zarr.common import BytesLike class Store(ABC): @abstractmethod async def get( - self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[Buffer]: + self, key: str, byte_range: tuple[int, int | None] | None = None + ) -> Buffer | None: """Retrieve the value associated with a given key. Parameters @@ -26,8 +26,8 @@ async def get( @abstractmethod async def get_partial_values( - self, key_ranges: List[Tuple[str, Tuple[int, int]]] - ) -> List[Optional[Buffer]]: + self, key_ranges: list[tuple[str, tuple[int, int]]] + ) -> list[Buffer | None]: """Retrieve possibly partial values from given key_ranges. Parameters @@ -150,18 +150,14 @@ def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: @runtime_checkable class ByteGetter(Protocol): - async def get( - self, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[Buffer]: ... + async def get(self, byte_range: tuple[int, int | None] | None = None) -> Buffer | None: ... @runtime_checkable class ByteSetter(Protocol): - async def get( - self, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[Buffer]: ... + async def get(self, byte_range: tuple[int, int | None] | None = None) -> Buffer | None: ... - async def set(self, value: Buffer, byte_range: Optional[Tuple[int, int]] = None) -> None: ... + async def set(self, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: ... async def delete(self) -> None: ... diff --git a/src/zarr/array.py b/src/zarr/array.py index 61f91ab966..039f39e98e 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -1,29 +1,28 @@ from __future__ import annotations +import json + # Notes on what I've changed here: # 1. Split Array into AsyncArray and Array # 3. Added .size and .attrs methods # 4. Temporarily disabled the creation of ArrayV2 # 5. Added from_dict to AsyncArray - # Questions to consider: # 1. Was splitting the array into two classes really necessary? - - from asyncio import gather +from collections.abc import Iterable from dataclasses import dataclass, replace - -import json -from typing import Any, Iterable, Literal +from typing import Any, Literal import numpy as np import numpy.typing as npt + from zarr.abc.codec import Codec from zarr.abc.store import set_or_delete - - from zarr.attributes import Attributes from zarr.buffer import Factory, NDArrayLike, NDBuffer +from zarr.chunk_grids import RegularChunkGrid +from zarr.chunk_key_encodings import ChunkKeyEncoding, DefaultChunkKeyEncoding, V2ChunkKeyEncoding from zarr.codecs import BytesCodec from zarr.common import ( JSON, @@ -36,11 +35,8 @@ concurrent_map, ) from zarr.config import config - from zarr.indexing import BasicIndexer -from zarr.chunk_grids import RegularChunkGrid -from zarr.chunk_key_encodings import ChunkKeyEncoding, DefaultChunkKeyEncoding, V2ChunkKeyEncoding -from zarr.metadata import ArrayMetadata, ArrayV3Metadata, ArrayV2Metadata, parse_indexing_order +from zarr.metadata import ArrayMetadata, ArrayV2Metadata, ArrayV3Metadata, parse_indexing_order from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import sync diff --git a/src/zarr/attributes.py b/src/zarr/attributes.py index e6b26309f2..079ae38a33 100644 --- a/src/zarr/attributes.py +++ b/src/zarr/attributes.py @@ -1,13 +1,13 @@ from __future__ import annotations -from collections.abc import MutableMapping -from typing import TYPE_CHECKING, Iterator +from collections.abc import Iterator, MutableMapping +from typing import TYPE_CHECKING from zarr.common import JSON if TYPE_CHECKING: - from zarr.group import Group from zarr.array import Array + from zarr.group import Group class Attributes(MutableMapping[str, JSON]): diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index a633cc09ec..84bf6b0bb0 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -1,15 +1,12 @@ from __future__ import annotations import sys +from collections.abc import Callable, Iterable from typing import ( TYPE_CHECKING, Any, - Callable, - Iterable, Literal, - Optional, Protocol, - Tuple, TypeAlias, ) @@ -17,6 +14,7 @@ if TYPE_CHECKING: from typing_extensions import Self + from zarr.codecs.bytes import Endian from zarr.common import BytesLike @@ -44,7 +42,7 @@ def __call__( shape: Iterable[int], dtype: np.DTypeLike, order: Literal["C", "F"], - fill_value: Optional[Any], + fill_value: Any | None, ) -> NDBuffer: """Factory function to create a new NDBuffer (or subclass) @@ -227,7 +225,7 @@ def __add__(self, other: Buffer) -> Self: return self.__class__(np.concatenate((self._data, other_array))) def __eq__(self, other: Any) -> bool: - if isinstance(other, (bytes, bytearray)): + if isinstance(other, bytes | bytearray): # Many of the tests compares `Buffer` with `bytes` so we # convert the bytes to a Buffer and try again return self == self.from_bytes(other) @@ -275,7 +273,7 @@ def create( shape: Iterable[int], dtype: np.DTypeLike, order: Literal["C", "F"] = "C", - fill_value: Optional[Any] = None, + fill_value: Any | None = None, ) -> Self: """Create a new buffer and its underlying ndarray-like object @@ -380,7 +378,7 @@ def dtype(self) -> np.dtype[Any]: return self._data.dtype @property - def shape(self) -> Tuple[int, ...]: + def shape(self) -> tuple[int, ...]: return self._data.shape @property diff --git a/src/zarr/chunk_grids.py b/src/zarr/chunk_grids.py index 16c0df9174..45f77cc99c 100644 --- a/src/zarr/chunk_grids.py +++ b/src/zarr/chunk_grids.py @@ -1,9 +1,11 @@ from __future__ import annotations + import itertools -from typing import TYPE_CHECKING, Any, Dict, Iterator +from collections.abc import Iterator from dataclasses import dataclass -from zarr.abc.metadata import Metadata +from typing import TYPE_CHECKING, Any +from zarr.abc.metadata import Metadata from zarr.common import ( JSON, ChunkCoords, @@ -20,7 +22,7 @@ @dataclass(frozen=True) class ChunkGrid(Metadata): @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> ChunkGrid: + def from_dict(cls, data: dict[str, JSON]) -> ChunkGrid: if isinstance(data, ChunkGrid): return data @@ -43,15 +45,15 @@ def __init__(self, *, chunk_shape: ChunkCoordsLike) -> None: object.__setattr__(self, "chunk_shape", chunk_shape_parsed) @classmethod - def from_dict(cls, data: Dict[str, Any]) -> Self: + def from_dict(cls, data: dict[str, Any]) -> Self: _, configuration_parsed = parse_named_configuration(data, "regular") return cls(**configuration_parsed) # type: ignore[arg-type] - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: return {"name": "regular", "configuration": {"chunk_shape": list(self.chunk_shape)}} def all_chunk_coords(self, array_shape: ChunkCoords) -> Iterator[ChunkCoords]: return itertools.product( - *(range(0, _ceildiv(s, c)) for s, c in zip(array_shape, self.chunk_shape)) + *(range(0, _ceildiv(s, c)) for s, c in zip(array_shape, self.chunk_shape, strict=False)) ) diff --git a/src/zarr/chunk_key_encodings.py b/src/zarr/chunk_key_encodings.py index ebc7654dde..5ecb98ef61 100644 --- a/src/zarr/chunk_key_encodings.py +++ b/src/zarr/chunk_key_encodings.py @@ -1,9 +1,10 @@ from __future__ import annotations + from abc import abstractmethod -from typing import TYPE_CHECKING, Dict, Literal, cast from dataclasses import dataclass -from zarr.abc.metadata import Metadata +from typing import TYPE_CHECKING, Literal, cast +from zarr.abc.metadata import Metadata from zarr.common import ( JSON, ChunkCoords, @@ -33,7 +34,7 @@ def __init__(self, *, separator: SeparatorLiteral) -> None: object.__setattr__(self, "separator", separator_parsed) @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> ChunkKeyEncoding: + def from_dict(cls, data: dict[str, JSON]) -> ChunkKeyEncoding: if isinstance(data, ChunkKeyEncoding): return data @@ -44,7 +45,7 @@ def from_dict(cls, data: Dict[str, JSON]) -> ChunkKeyEncoding: return V2ChunkKeyEncoding(**configuration_parsed) # type: ignore[arg-type] raise ValueError(f"Unknown chunk key encoding. Got {name_parsed}.") - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: return {"name": self.name, "configuration": {"separator": self.separator}} @abstractmethod diff --git a/src/zarr/codecs/__init__.py b/src/zarr/codecs/__init__.py index 959a85af57..0f0ff55df5 100644 --- a/src/zarr/codecs/__init__.py +++ b/src/zarr/codecs/__init__.py @@ -1,10 +1,10 @@ from __future__ import annotations -from zarr.codecs.blosc import BloscCodec, BloscCname, BloscShuffle # noqa: F401 +from zarr.codecs.blosc import BloscCname, BloscCodec, BloscShuffle # noqa: F401 from zarr.codecs.bytes import BytesCodec, Endian # noqa: F401 from zarr.codecs.crc32c_ import Crc32cCodec # noqa: F401 from zarr.codecs.gzip import GzipCodec # noqa: F401 +from zarr.codecs.pipeline import BatchedCodecPipeline # noqa: F401 from zarr.codecs.sharding import ShardingCodec, ShardingCodecIndexLocation # noqa: F401 from zarr.codecs.transpose import TransposeCodec # noqa: F401 from zarr.codecs.zstd import ZstdCodec # noqa: F401 -from zarr.codecs.pipeline import BatchedCodecPipeline # noqa: F401 diff --git a/src/zarr/codecs/_v2.py b/src/zarr/codecs/_v2.py index fb7122600f..06bd866c0f 100644 --- a/src/zarr/codecs/_v2.py +++ b/src/zarr/codecs/_v2.py @@ -2,13 +2,13 @@ from dataclasses import dataclass +import numcodecs +from numcodecs.compat import ensure_bytes, ensure_ndarray + from zarr.buffer import Buffer, NDBuffer from zarr.codecs.mixins import ArrayArrayCodecBatchMixin, ArrayBytesCodecBatchMixin from zarr.common import JSON, ArraySpec, to_thread -import numcodecs -from numcodecs.compat import ensure_bytes, ensure_ndarray - @dataclass(frozen=True) class V2Compressor(ArrayBytesCodecBatchMixin): diff --git a/src/zarr/codecs/blosc.py b/src/zarr/codecs/blosc.py index ab3ffab479..24fac962db 100644 --- a/src/zarr/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -1,21 +1,21 @@ from __future__ import annotations + from dataclasses import dataclass, replace from enum import Enum from functools import cached_property - -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING import numcodecs from numcodecs.blosc import Blosc -from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.buffer import Buffer, as_numpy_array_wrapper +from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration, to_thread if TYPE_CHECKING: - from typing import Dict, Optional from typing_extensions import Self + from zarr.common import JSON, ArraySpec @@ -86,10 +86,10 @@ class BloscCodec(BytesBytesCodecBatchMixin): def __init__( self, *, - typesize: Optional[int] = None, - cname: Union[BloscCname, str] = BloscCname.zstd, + typesize: int | None = None, + cname: BloscCname | str = BloscCname.zstd, clevel: int = 5, - shuffle: Union[BloscShuffle, str, None] = None, + shuffle: BloscShuffle | str | None = None, blocksize: int = 0, ) -> None: typesize_parsed = parse_typesize(typesize) if typesize is not None else None @@ -105,11 +105,11 @@ def __init__( object.__setattr__(self, "blocksize", blocksize_parsed) @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> Self: + def from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "blosc") return cls(**configuration_parsed) # type: ignore[arg-type] - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: if self.typesize is None: raise ValueError("`typesize` needs to be set for serialization.") if self.shuffle is None: @@ -169,7 +169,7 @@ async def encode_single( self, chunk_bytes: Buffer, chunk_spec: ArraySpec, - ) -> Optional[Buffer]: + ) -> Buffer | None: # Since blosc only takes bytes, we convert the input and output of the encoding # between bytes and Buffer return await to_thread( diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index 6df78a08b8..a6045852e6 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -1,21 +1,22 @@ from __future__ import annotations + +import sys from dataclasses import dataclass, replace from enum import Enum -import sys - -from typing import TYPE_CHECKING, Dict, Optional, Union +from typing import TYPE_CHECKING import numpy as np -from zarr.codecs.mixins import ArrayBytesCodecBatchMixin from zarr.buffer import Buffer, NDBuffer +from zarr.codecs.mixins import ArrayBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration if TYPE_CHECKING: - from zarr.common import JSON, ArraySpec from typing_extensions import Self + from zarr.common import JSON, ArraySpec + class Endian(Enum): big = "big" @@ -29,22 +30,22 @@ class Endian(Enum): class BytesCodec(ArrayBytesCodecBatchMixin): is_fixed_size = True - endian: Optional[Endian] + endian: Endian | None - def __init__(self, *, endian: Union[Endian, str, None] = default_system_endian) -> None: + def __init__(self, *, endian: Endian | str | None = default_system_endian) -> None: endian_parsed = None if endian is None else parse_enum(endian, Endian) object.__setattr__(self, "endian", endian_parsed) @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> Self: + def from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration( data, "bytes", require_configuration=False ) configuration_parsed = configuration_parsed or {} return cls(**configuration_parsed) # type: ignore[arg-type] - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: if self.endian is None: return {"name": "bytes"} else: @@ -87,7 +88,7 @@ async def encode_single( self, chunk_array: NDBuffer, _chunk_spec: ArraySpec, - ) -> Optional[Buffer]: + ) -> Buffer | None: assert isinstance(chunk_array, NDBuffer) if chunk_array.dtype.itemsize > 1: if self.endian is not None and self.endian != chunk_array.byteorder: diff --git a/src/zarr/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py index ab4bad65fe..0b9c8c9a96 100644 --- a/src/zarr/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -1,20 +1,19 @@ from __future__ import annotations -from dataclasses import dataclass +from dataclasses import dataclass from typing import TYPE_CHECKING import numpy as np - from crc32c import crc32c -from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.buffer import Buffer +from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration if TYPE_CHECKING: - from typing import Dict, Optional from typing_extensions import Self + from zarr.common import JSON, ArraySpec @@ -23,11 +22,11 @@ class Crc32cCodec(BytesBytesCodecBatchMixin): is_fixed_size = True @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> Self: + def from_dict(cls, data: dict[str, JSON]) -> Self: parse_named_configuration(data, "crc32c", require_configuration=False) return cls() - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: return {"name": "crc32c"} async def decode_single( @@ -52,7 +51,7 @@ async def encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> Optional[Buffer]: + ) -> Buffer | None: data = chunk_bytes.as_numpy_array() # Calculate the checksum and "cast" it to a numpy array checksum = np.array([crc32c(data)], dtype=np.uint32) diff --git a/src/zarr/codecs/gzip.py b/src/zarr/codecs/gzip.py index 6a8e30db13..58c1fc6fec 100644 --- a/src/zarr/codecs/gzip.py +++ b/src/zarr/codecs/gzip.py @@ -1,17 +1,18 @@ from __future__ import annotations -from dataclasses import dataclass +from dataclasses import dataclass from typing import TYPE_CHECKING from numcodecs.gzip import GZip -from zarr.codecs.mixins import BytesBytesCodecBatchMixin + from zarr.buffer import Buffer, as_numpy_array_wrapper +from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration, to_thread if TYPE_CHECKING: - from typing import Optional, Dict from typing_extensions import Self + from zarr.common import JSON, ArraySpec @@ -37,11 +38,11 @@ def __init__(self, *, level: int = 5) -> None: object.__setattr__(self, "level", level_parsed) @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> Self: + def from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "gzip") return cls(**configuration_parsed) # type: ignore[arg-type] - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: return {"name": "gzip", "configuration": {"level": self.level}} async def decode_single( @@ -55,7 +56,7 @@ async def encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> Optional[Buffer]: + ) -> Buffer | None: return await to_thread(as_numpy_array_wrapper, GZip(self.level).encode, chunk_bytes) def compute_encoded_size( diff --git a/src/zarr/codecs/mixins.py b/src/zarr/codecs/mixins.py index 8b0a684509..b571fd35ee 100644 --- a/src/zarr/codecs/mixins.py +++ b/src/zarr/codecs/mixins.py @@ -1,8 +1,8 @@ from __future__ import annotations from abc import abstractmethod -from typing import Awaitable, Callable, Generic, Iterable, TypeVar - +from collections.abc import Awaitable, Callable, Iterable +from typing import Generic, TypeVar from zarr.abc.codec import ( ArrayArrayCodec, @@ -10,14 +10,13 @@ ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin, ByteGetter, - ByteSetter, BytesBytesCodec, + ByteSetter, ) from zarr.buffer import Buffer, NDBuffer from zarr.common import ArraySpec, SliceSelection, concurrent_map from zarr.config import config - CodecInput = TypeVar("CodecInput", bound=NDBuffer | Buffer) CodecOutput = TypeVar("CodecOutput", bound=NDBuffer | Buffer) diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index 8396a0c2ce..57b4fa4668 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -1,30 +1,32 @@ from __future__ import annotations +from collections.abc import Iterable, Iterator +from dataclasses import dataclass from itertools import islice -from typing import TYPE_CHECKING, Iterator, TypeVar, Iterable +from typing import TYPE_CHECKING, TypeVar from warnings import warn -from dataclasses import dataclass -from zarr.config import config from zarr.abc.codec import ( - ByteGetter, - ByteSetter, - Codec, - CodecPipeline, ArrayArrayCodec, ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin, + ByteGetter, BytesBytesCodec, + ByteSetter, + Codec, + CodecPipeline, ) from zarr.buffer import Buffer, NDBuffer from zarr.codecs.registry import get_codec_class from zarr.common import JSON, concurrent_map, parse_named_configuration +from zarr.config import config from zarr.indexing import is_total_slice from zarr.metadata import ArrayMetadata if TYPE_CHECKING: from typing_extensions import Self + from zarr.common import ArraySpec, SliceSelection T = TypeVar("T") @@ -180,13 +182,9 @@ def supports_partial_encode(self) -> bool: ) def __iter__(self) -> Iterator[Codec]: - for aa_codec in self.array_array_codecs: - yield aa_codec - + yield from self.array_array_codecs yield self.array_bytes_codec - - for bb_codec in self.bytes_bytes_codecs: - yield bb_codec + yield from self.bytes_bytes_codecs def validate(self, array_metadata: ArrayMetadata) -> None: for codec in self: @@ -237,13 +235,19 @@ async def decode_batch( ) = self._codecs_with_resolved_metadata_batched(chunk_specs) for bb_codec, chunk_spec_batch in bb_codecs_with_spec[::-1]: - chunk_bytes_batch = await bb_codec.decode(zip(chunk_bytes_batch, chunk_spec_batch)) + chunk_bytes_batch = await bb_codec.decode( + zip(chunk_bytes_batch, chunk_spec_batch, strict=False) + ) ab_codec, chunk_spec_batch = ab_codec_with_spec - chunk_array_batch = await ab_codec.decode(zip(chunk_bytes_batch, chunk_spec_batch)) + chunk_array_batch = await ab_codec.decode( + zip(chunk_bytes_batch, chunk_spec_batch, strict=False) + ) for aa_codec, chunk_spec_batch in aa_codecs_with_spec[::-1]: - chunk_array_batch = await aa_codec.decode(zip(chunk_array_batch, chunk_spec_batch)) + chunk_array_batch = await aa_codec.decode( + zip(chunk_array_batch, chunk_spec_batch, strict=False) + ) return chunk_array_batch @@ -264,14 +268,20 @@ async def encode_batch( chunk_array_batch, chunk_specs = _unzip2(chunk_arrays_and_specs) for aa_codec in self.array_array_codecs: - chunk_array_batch = await aa_codec.encode(zip(chunk_array_batch, chunk_specs)) + chunk_array_batch = await aa_codec.encode( + zip(chunk_array_batch, chunk_specs, strict=False) + ) chunk_specs = resolve_batched(aa_codec, chunk_specs) - chunk_bytes_batch = await self.array_bytes_codec.encode(zip(chunk_array_batch, chunk_specs)) + chunk_bytes_batch = await self.array_bytes_codec.encode( + zip(chunk_array_batch, chunk_specs, strict=False) + ) chunk_specs = resolve_batched(self.array_bytes_codec, chunk_specs) for bb_codec in self.bytes_bytes_codecs: - chunk_bytes_batch = await bb_codec.encode(zip(chunk_bytes_batch, chunk_specs)) + chunk_bytes_batch = await bb_codec.encode( + zip(chunk_bytes_batch, chunk_specs, strict=False) + ) chunk_specs = resolve_batched(bb_codec, chunk_specs) return chunk_bytes_batch @@ -297,7 +307,7 @@ async def read_batch( ] ) for chunk_array, (_, chunk_spec, _, out_selection) in zip( - chunk_array_batch, batch_info + chunk_array_batch, batch_info, strict=False ): if chunk_array is not None: out[out_selection] = chunk_array @@ -312,11 +322,13 @@ async def read_batch( chunk_array_batch = await self.decode_batch( [ (chunk_bytes, chunk_spec) - for chunk_bytes, (_, chunk_spec, _, _) in zip(chunk_bytes_batch, batch_info) + for chunk_bytes, (_, chunk_spec, _, _) in zip( + chunk_bytes_batch, batch_info, strict=False + ) ], ) for chunk_array, (_, chunk_spec, chunk_selection, out_selection) in zip( - chunk_array_batch, batch_info + chunk_array_batch, batch_info, strict=False ): if chunk_array is not None: tmp = chunk_array[chunk_selection] @@ -356,7 +368,9 @@ async def _read_key(byte_setter: ByteSetter | None) -> Buffer | None: chunk_array_batch = await self.decode_batch( [ (chunk_bytes, chunk_spec) - for chunk_bytes, (_, chunk_spec, _, _) in zip(chunk_bytes_batch, batch_info) + for chunk_bytes, (_, chunk_spec, _, _) in zip( + chunk_bytes_batch, batch_info, strict=False + ) ], ) @@ -383,7 +397,7 @@ def _merge_chunk_array( chunk_array_batch = [ _merge_chunk_array(chunk_array, value[out_selection], chunk_spec, chunk_selection) for chunk_array, (_, chunk_spec, chunk_selection, out_selection) in zip( - chunk_array_batch, batch_info + chunk_array_batch, batch_info, strict=False ) ] @@ -391,13 +405,17 @@ def _merge_chunk_array( None if chunk_array is None or chunk_array.all_equal(chunk_spec.fill_value) else chunk_array - for chunk_array, (_, chunk_spec, _, _) in zip(chunk_array_batch, batch_info) + for chunk_array, (_, chunk_spec, _, _) in zip( + chunk_array_batch, batch_info, strict=False + ) ] chunk_bytes_batch = await self.encode_batch( [ (chunk_array, chunk_spec) - for chunk_array, (_, chunk_spec, _, _) in zip(chunk_array_batch, batch_info) + for chunk_array, (_, chunk_spec, _, _) in zip( + chunk_array_batch, batch_info, strict=False + ) ], ) @@ -410,7 +428,9 @@ async def _write_key(byte_setter: ByteSetter, chunk_bytes: Buffer | None) -> Non await concurrent_map( [ (byte_setter, chunk_bytes) - for chunk_bytes, (byte_setter, _, _, _) in zip(chunk_bytes_batch, batch_info) + for chunk_bytes, (byte_setter, _, _, _) in zip( + chunk_bytes_batch, batch_info, strict=False + ) ], _write_key, config.get("async.concurrency"), diff --git a/src/zarr/codecs/registry.py b/src/zarr/codecs/registry.py index b981f1f36c..2f2b09499f 100644 --- a/src/zarr/codecs/registry.py +++ b/src/zarr/codecs/registry.py @@ -1,28 +1,29 @@ from __future__ import annotations + from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Dict, Type from zarr.abc.codec import Codec -from importlib.metadata import EntryPoint, entry_points as get_entry_points +from importlib.metadata import EntryPoint +from importlib.metadata import entry_points as get_entry_points -__codec_registry: Dict[str, Type[Codec]] = {} -__lazy_load_codecs: Dict[str, EntryPoint] = {} +__codec_registry: dict[str, type[Codec]] = {} +__lazy_load_codecs: dict[str, EntryPoint] = {} -def _collect_entrypoints() -> Dict[str, EntryPoint]: +def _collect_entrypoints() -> dict[str, EntryPoint]: entry_points = get_entry_points() for e in entry_points.select(group="zarr.codecs"): __lazy_load_codecs[e.name] = e return __lazy_load_codecs -def register_codec(key: str, codec_cls: Type[Codec]) -> None: +def register_codec(key: str, codec_cls: type[Codec]) -> None: __codec_registry[key] = codec_cls -def get_codec_class(key: str) -> Type[Codec]: +def get_codec_class(key: str) -> type[Codec]: item = __codec_registry.get(key) if item is None: if key in __lazy_load_codecs: diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index dd7cdcd0b4..a6c5bac6a7 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -1,12 +1,16 @@ from __future__ import annotations -from enum import Enum -from typing import TYPE_CHECKING, Iterable, Mapping, MutableMapping, NamedTuple, Tuple, Union + +from collections.abc import Iterable, Mapping, MutableMapping from dataclasses import dataclass, field, replace +from enum import Enum from functools import lru_cache - +from typing import TYPE_CHECKING, NamedTuple import numpy as np + from zarr.abc.codec import ByteGetter, ByteSetter, Codec, CodecPipeline +from zarr.buffer import Buffer, NDBuffer +from zarr.chunk_grids import RegularChunkGrid from zarr.codecs.bytes import BytesCodec from zarr.codecs.crc32c_ import Crc32cCodec from zarr.codecs.mixins import ( @@ -25,18 +29,18 @@ parse_shapelike, product, ) -from zarr.chunk_grids import RegularChunkGrid from zarr.indexing import ( BasicIndexer, c_order_iter, morton_order_iter, ) from zarr.metadata import ArrayMetadata, parse_codecs -from zarr.buffer import Buffer, NDBuffer if TYPE_CHECKING: - from typing import Awaitable, Callable, Dict, Iterator, Optional, Set + from collections.abc import Awaitable, Callable, Iterator + from typing_extensions import Self + from zarr.common import JSON, SliceSelection MAX_UINT_64 = 2**64 - 1 @@ -58,7 +62,7 @@ class _ShardingByteGetter(ByteGetter): shard_dict: ShardMapping chunk_coords: ChunkCoords - async def get(self, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> Optional[Buffer]: + async def get(self, byte_range: tuple[int, int | None] | None = None) -> Buffer | None: assert byte_range is None, "byte_range is not supported within shards" return self.shard_dict.get(self.chunk_coords) @@ -67,7 +71,7 @@ async def get(self, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> O class _ShardingByteSetter(_ShardingByteGetter, ByteSetter): shard_dict: ShardMutableMapping - async def set(self, value: Buffer, byte_range: Optional[Tuple[int, int]] = None) -> None: + async def set(self, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: assert byte_range is None, "byte_range is not supported within shards" self.shard_dict[self.chunk_coords] = value @@ -86,7 +90,7 @@ def chunks_per_shard(self) -> ChunkCoords: def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords: return tuple( chunk_i % shard_i - for chunk_i, shard_i in zip(chunk_coords, self.offsets_and_lengths.shape) + for chunk_i, shard_i in zip(chunk_coords, self.offsets_and_lengths.shape, strict=False) ) def is_all_empty(self) -> bool: @@ -95,7 +99,7 @@ def is_all_empty(self) -> bool: def get_full_chunk_map(self) -> np.ndarray: return self.offsets_and_lengths[..., 0] != MAX_UINT_64 - def get_chunk_slice(self, chunk_coords: ChunkCoords) -> Optional[Tuple[int, int]]: + def get_chunk_slice(self, chunk_coords: ChunkCoords) -> tuple[int, int] | None: localized_chunk = self._localize_chunk(chunk_coords) chunk_start, chunk_len = self.offsets_and_lengths[localized_chunk] if (chunk_start, chunk_len) == (MAX_UINT_64, MAX_UINT_64): @@ -103,7 +107,7 @@ def get_chunk_slice(self, chunk_coords: ChunkCoords) -> Optional[Tuple[int, int] else: return (int(chunk_start), int(chunk_start) + int(chunk_len)) - def set_chunk_slice(self, chunk_coords: ChunkCoords, chunk_slice: Optional[slice]) -> None: + def set_chunk_slice(self, chunk_coords: ChunkCoords, chunk_slice: slice | None) -> None: localized_chunk = self._localize_chunk(chunk_coords) if chunk_slice is None: self.offsets_and_lengths[localized_chunk] = (MAX_UINT_64, MAX_UINT_64) @@ -192,7 +196,7 @@ class _ShardBuilder(_ShardReader, ShardMutableMapping): def merge_with_morton_order( cls, chunks_per_shard: ChunkCoords, - tombstones: Set[ChunkCoords], + tombstones: set[ChunkCoords], *shard_dicts: ShardMapping, ) -> _ShardBuilder: obj = cls.create_empty(chunks_per_shard) @@ -241,7 +245,7 @@ async def finalize( class _MergingShardBuilder(ShardMutableMapping): old_dict: _ShardReader new_dict: _ShardBuilder - tombstones: Set[ChunkCoords] = field(default_factory=set) + tombstones: set[ChunkCoords] = field(default_factory=set) def __getitem__(self, chunk_coords: ChunkCoords) -> Buffer: chunk_bytes_maybe = self.new_dict.get(chunk_coords) @@ -299,9 +303,9 @@ def __init__( self, *, chunk_shape: ChunkCoordsLike, - codecs: Optional[Iterable[Union[Codec, JSON]]] = None, - index_codecs: Optional[Iterable[Union[Codec, JSON]]] = None, - index_location: Optional[ShardingCodecIndexLocation] = ShardingCodecIndexLocation.end, + codecs: Iterable[Codec | JSON] | None = None, + index_codecs: Iterable[Codec | JSON] | None = None, + index_location: ShardingCodecIndexLocation | None = ShardingCodecIndexLocation.end, ) -> None: chunk_shape_parsed = parse_shapelike(chunk_shape) codecs_parsed = ( @@ -331,11 +335,11 @@ def __init__( object.__setattr__(self, "_get_chunks_per_shard", lru_cache()(self._get_chunks_per_shard)) @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> Self: + def from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "sharding_indexed") return cls(**configuration_parsed) # type: ignore[arg-type] - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: return { "name": "sharding_indexed", "configuration": { @@ -366,6 +370,7 @@ def validate(self, array_metadata: ArrayMetadata) -> None: for s, c in zip( array_metadata.chunk_grid.chunk_shape, self.chunk_shape, + strict=False, ) ): raise ValueError( @@ -420,7 +425,7 @@ async def decode_partial_single( byte_getter: ByteGetter, selection: SliceSelection, shard_spec: ArraySpec, - ) -> Optional[NDBuffer]: + ) -> NDBuffer | None: shard_shape = shard_spec.shape chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) @@ -480,7 +485,7 @@ async def encode_single( self, shard_array: NDBuffer, shard_spec: ArraySpec, - ) -> Optional[Buffer]: + ) -> Buffer | None: shard_shape = shard_spec.shape chunk_shape = self.chunk_shape chunks_per_shard = self._get_chunks_per_shard(shard_spec) @@ -561,7 +566,7 @@ async def encode_partial_single( ) def _is_total_shard( - self, all_chunk_coords: Set[ChunkCoords], chunks_per_shard: ChunkCoords + self, all_chunk_coords: set[ChunkCoords], chunks_per_shard: ChunkCoords ) -> bool: return len(all_chunk_coords) == product(chunks_per_shard) and all( chunk_coords in all_chunk_coords for chunk_coords in c_order_iter(chunks_per_shard) @@ -624,12 +629,13 @@ def _get_chunks_per_shard(self, shard_spec: ArraySpec) -> ChunkCoords: for s, c in zip( shard_spec.shape, self.chunk_shape, + strict=False, ) ) async def _load_shard_index_maybe( self, byte_getter: ByteGetter, chunks_per_shard: ChunkCoords - ) -> Optional[_ShardIndex]: + ) -> _ShardIndex | None: shard_index_size = self._shard_index_size(chunks_per_shard) if self.index_location == ShardingCodecIndexLocation.start: index_bytes = await byte_getter.get((0, shard_index_size)) @@ -648,7 +654,7 @@ async def _load_shard_index( async def _load_full_shard_maybe( self, byte_getter: ByteGetter, chunks_per_shard: ChunkCoords - ) -> Optional[_ShardReader]: + ) -> _ShardReader | None: shard_bytes = await byte_getter.get() return ( diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index 5d4d2a7b84..774393464c 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -1,19 +1,21 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Dict, Iterable, Union, cast +from collections.abc import Iterable from dataclasses import dataclass, replace +from typing import TYPE_CHECKING, cast -from zarr.codecs.mixins import ArrayArrayCodecBatchMixin from zarr.buffer import NDBuffer -from zarr.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration +from zarr.codecs.mixins import ArrayArrayCodecBatchMixin from zarr.codecs.registry import register_codec +from zarr.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration if TYPE_CHECKING: - from typing import TYPE_CHECKING, Optional, Tuple + from typing import TYPE_CHECKING + from typing_extensions import Self -def parse_transpose_order(data: Union[JSON, Iterable[int]]) -> Tuple[int, ...]: +def parse_transpose_order(data: JSON | Iterable[int]) -> tuple[int, ...]: if not isinstance(data, Iterable): raise TypeError(f"Expected an iterable. Got {data} instead.") if not all(isinstance(a, int) for a in data): @@ -25,7 +27,7 @@ def parse_transpose_order(data: Union[JSON, Iterable[int]]) -> Tuple[int, ...]: class TransposeCodec(ArrayArrayCodecBatchMixin): is_fixed_size = True - order: Tuple[int, ...] + order: tuple[int, ...] def __init__(self, *, order: ChunkCoordsLike) -> None: order_parsed = parse_transpose_order(order) @@ -33,11 +35,11 @@ def __init__(self, *, order: ChunkCoordsLike) -> None: object.__setattr__(self, "order", order_parsed) @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> Self: + def from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "transpose") return cls(**configuration_parsed) # type: ignore[arg-type] - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: return {"name": "transpose", "configuration": {"order": list(self.order)}} def evolve(self, array_spec: ArraySpec) -> Self: @@ -86,7 +88,7 @@ async def encode_single( self, chunk_array: NDBuffer, chunk_spec: ArraySpec, - ) -> Optional[NDBuffer]: + ) -> NDBuffer | None: chunk_array = chunk_array.transpose(self.order) return chunk_array diff --git a/src/zarr/codecs/zstd.py b/src/zarr/codecs/zstd.py index 4422188d25..3c6aac4ce3 100644 --- a/src/zarr/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -1,18 +1,18 @@ from __future__ import annotations -from typing import TYPE_CHECKING -from dataclasses import dataclass +from dataclasses import dataclass +from typing import TYPE_CHECKING from zstandard import ZstdCompressor, ZstdDecompressor -from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.buffer import Buffer, as_numpy_array_wrapper +from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration, to_thread if TYPE_CHECKING: - from typing import Dict, Optional from typing_extensions import Self + from zarr.common import JSON, ArraySpec @@ -45,11 +45,11 @@ def __init__(self, *, level: int = 0, checksum: bool = False) -> None: object.__setattr__(self, "checksum", checksum_parsed) @classmethod - def from_dict(cls, data: Dict[str, JSON]) -> Self: + def from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "zstd") return cls(**configuration_parsed) # type: ignore[arg-type] - def to_dict(self) -> Dict[str, JSON]: + def to_dict(self) -> dict[str, JSON]: return {"name": "zstd", "configuration": {"level": self.level, "checksum": self.checksum}} def _compress(self, data: bytes) -> bytes: @@ -71,7 +71,7 @@ async def encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, - ) -> Optional[Buffer]: + ) -> Buffer | None: return await to_thread(as_numpy_array_wrapper, self._compress, chunk_bytes) def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: diff --git a/src/zarr/common.py b/src/zarr/common.py index 3ef847a1f3..5781cc423b 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -1,23 +1,15 @@ from __future__ import annotations -from typing import ( - TYPE_CHECKING, - ParamSpec, - Literal, - Union, - Tuple, - Iterable, - TypeVar, - overload, - Any, -) + import asyncio import contextvars +import functools +from collections.abc import Iterable from dataclasses import dataclass from enum import Enum -import functools +from typing import TYPE_CHECKING, Any, Literal, ParamSpec, TypeVar, overload if TYPE_CHECKING: - from typing import Awaitable, Callable, Iterator, Optional, Type + from collections.abc import Awaitable, Callable, Iterator import numpy as np @@ -32,7 +24,7 @@ SliceSelection = tuple[slice, ...] Selection = slice | SliceSelection ZarrFormat = Literal[2, 3] -JSON = Union[str, None, int, float, Enum, dict[str, "JSON"], list["JSON"], tuple["JSON", ...]] +JSON = None | str | int | float | Enum | dict[str, "JSON"] | list["JSON"] | tuple["JSON", ...] def product(tup: ChunkCoords) -> int: @@ -44,7 +36,7 @@ def product(tup: ChunkCoords) -> int: async def concurrent_map( - items: list[T], func: Callable[..., Awaitable[V]], limit: Optional[int] = None + items: list[T], func: Callable[..., Awaitable[V]], limit: int | None = None ) -> list[V]: if limit is None: return await asyncio.gather(*[func(*item) for item in items]) @@ -52,7 +44,7 @@ async def concurrent_map( else: sem = asyncio.Semaphore(limit) - async def run(item: Tuple[Any]) -> V: + async def run(item: tuple[Any]) -> V: async with sem: return await func(*item) @@ -73,12 +65,12 @@ async def to_thread(func: Callable[P, U], /, *args: P.args, **kwargs: P.kwargs) E = TypeVar("E", bound=Enum) -def enum_names(enum: Type[E]) -> Iterator[str]: +def enum_names(enum: type[E]) -> Iterator[str]: for item in enum: yield item.name -def parse_enum(data: JSON, cls: Type[E]) -> E: +def parse_enum(data: JSON, cls: type[E]) -> E: if isinstance(data, cls): return data if not isinstance(data, str): @@ -113,7 +105,7 @@ def ndim(self) -> int: return len(self.shape) -def parse_name(data: JSON, expected: Optional[str] = None) -> str: +def parse_name(data: JSON, expected: str | None = None) -> str: if isinstance(data, str): if expected is None or data == expected: return data @@ -130,19 +122,19 @@ def parse_configuration(data: JSON) -> JSON: @overload def parse_named_configuration( - data: JSON, expected_name: Optional[str] = None + data: JSON, expected_name: str | None = None ) -> tuple[str, dict[str, JSON]]: ... @overload def parse_named_configuration( - data: JSON, expected_name: Optional[str] = None, *, require_configuration: bool = True -) -> tuple[str, Optional[dict[str, JSON]]]: ... + data: JSON, expected_name: str | None = None, *, require_configuration: bool = True +) -> tuple[str, dict[str, JSON] | None]: ... def parse_named_configuration( - data: JSON, expected_name: Optional[str] = None, *, require_configuration: bool = True -) -> tuple[str, Optional[JSON]]: + data: JSON, expected_name: str | None = None, *, require_configuration: bool = True +) -> tuple[str, JSON | None]: if not isinstance(data, dict): raise TypeError(f"Expected dict, got {type(data)}") if "name" not in data: diff --git a/src/zarr/fixture/.zgroup b/src/zarr/fixture/.zgroup new file mode 100644 index 0000000000..3b7daf227c --- /dev/null +++ b/src/zarr/fixture/.zgroup @@ -0,0 +1,3 @@ +{ + "zarr_format": 2 +} \ No newline at end of file diff --git a/src/zarr/fixture/flat/.zarray b/src/zarr/fixture/flat/.zarray new file mode 100644 index 0000000000..d1acce7665 --- /dev/null +++ b/src/zarr/fixture/flat/.zarray @@ -0,0 +1,23 @@ +{ + "chunks": [ + 2, + 2 + ], + "compressor": { + "blocksize": 0, + "clevel": 5, + "cname": "lz4", + "id": "blosc", + "shuffle": 1 + }, + "dimension_separator": ".", + "dtype": " Array | Group: elif isinstance(node, AsyncGroup): return Group(node) else: - assert False + raise TypeError(f"Unknown node type, got {type(node)}") @dataclass(frozen=True) @@ -114,7 +115,7 @@ async def create( cls, store: StoreLike, *, - attributes: dict[str, Any] = {}, + attributes: dict[str, Any] = {}, # noqa: B006, FIXME exists_ok: bool = False, zarr_format: ZarrFormat = 3, ) -> AsyncGroup: @@ -278,7 +279,10 @@ def info(self): return self.metadata.info async def create_group( - self, path: str, exists_ok: bool = False, attributes: dict[str, Any] = {} + self, + path: str, + exists_ok: bool = False, + attributes: dict[str, Any] = {}, # noqa: B006, FIXME ) -> AsyncGroup: return await type(self).create( self.store_path / path, @@ -332,7 +336,7 @@ async def create_array( zarr_format=self.metadata.zarr_format, ) - async def update_attributes(self, new_attributes: dict[str, Any]) -> "AsyncGroup": + async def update_attributes(self, new_attributes: dict[str, Any]) -> AsyncGroup: # metadata.attributes is "frozen" so we simply clear and update the dict self.metadata.attributes.clear() self.metadata.attributes.update(new_attributes) @@ -401,7 +405,7 @@ async def group_keys(self) -> AsyncGenerator[str, None]: # todo: decide if this method should be separate from `group_keys` async def groups(self) -> AsyncGenerator[AsyncGroup, None]: - async for key, value in self.members(): + async for _, value in self.members(): if isinstance(value, AsyncGroup): yield value @@ -413,7 +417,7 @@ async def array_keys(self) -> AsyncGenerator[str, None]: # todo: decide if this method should be separate from `array_keys` async def arrays(self) -> AsyncGenerator[AsyncArray, None]: - async for key, value in self.members(): + async for _, value in self.members(): if isinstance(value, AsyncArray): yield value @@ -457,7 +461,7 @@ def create( cls, store: StoreLike, *, - attributes: dict[str, Any] = {}, + attributes: dict[str, Any] = {}, # noqa: B006, FIXME exists_ok: bool = False, ) -> Group: obj = sync( @@ -525,7 +529,7 @@ def attrs(self) -> Attributes: def info(self): return self._async_group.info - def update_attributes(self, new_attributes: dict[str, Any]) -> "Group": + def update_attributes(self, new_attributes: dict[str, Any]) -> Group: self._sync(self._async_group.update_attributes(new_attributes)) return self diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 8e7cd95430..45413bc5b2 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -2,7 +2,8 @@ import itertools import math -from typing import TYPE_CHECKING, Iterator, List, NamedTuple, Optional, Tuple +from collections.abc import Iterator +from typing import TYPE_CHECKING, NamedTuple from zarr.common import ChunkCoords, Selection, SliceSelection, product @@ -17,9 +18,7 @@ def _ensure_tuple(v: Selection) -> SliceSelection: def _err_too_many_indices(selection: SliceSelection, shape: ChunkCoords) -> None: - raise IndexError( - "too many indices for array; expected {}, got {}".format(len(shape), len(selection)) - ) + raise IndexError(f"too many indices for array; expected {len(shape)}, got {len(selection)}") def _err_negative_step() -> None: @@ -50,7 +49,7 @@ def _ensure_selection( class _ChunkDimProjection(NamedTuple): dim_chunk_ix: int dim_chunk_sel: slice - dim_out_sel: Optional[slice] + dim_out_sel: slice | None def _ceildiv(a: float, b: float) -> int: @@ -127,13 +126,13 @@ class _ChunkProjection(NamedTuple): class BasicIndexer: - dim_indexers: List[_SliceDimIndexer] + dim_indexers: list[_SliceDimIndexer] shape: ChunkCoords def __init__( self, selection: Selection, - shape: Tuple[int, ...], + shape: tuple[int, ...], chunk_grid: ChunkGrid, ): from zarr.chunk_grids import RegularChunkGrid @@ -145,7 +144,7 @@ def __init__( self.dim_indexers = [ _SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) for dim_sel, dim_len, dim_chunk_len in zip( - _ensure_selection(selection, shape), shape, chunk_grid.chunk_shape + _ensure_selection(selection, shape), shape, chunk_grid.chunk_shape, strict=False ) ] self.shape = tuple(s.nitems for s in self.dim_indexers) @@ -206,7 +205,7 @@ def is_total_slice(item: Selection, shape: ChunkCoords) -> bool: or ((dim_sel.stop - dim_sel.start == dim_len) and (dim_sel.step in [1, None])) ) ) - for dim_sel, dim_len in zip(item, shape) + for dim_sel, dim_len in zip(item, shape, strict=False) ) else: - raise TypeError("expected slice or tuple of slices, found %r" % item) + raise TypeError(f"expected slice or tuple of slices, found {item!r}") diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 695d83da55..8db8c8033e 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -1,9 +1,12 @@ from __future__ import annotations + +import json from abc import ABC, abstractmethod -from enum import Enum -from typing import TYPE_CHECKING, Any, cast, Iterable +from collections.abc import Iterable from dataclasses import dataclass, field, replace -import json +from enum import Enum +from typing import TYPE_CHECKING, Any, cast + import numpy as np import numpy.typing as npt @@ -14,9 +17,9 @@ from zarr.chunk_key_encodings import ChunkKeyEncoding, parse_separator from zarr.codecs._v2 import V2Compressor, V2Filters - if TYPE_CHECKING: from typing import Literal + from typing_extensions import Self @@ -33,7 +36,6 @@ ) from zarr.config import parse_indexing_order - # For type checking _bool = bool diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index c6ffbc6c05..31cce65095 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Optional, Tuple, Union +from typing import Any from zarr.abc.store import Store from zarr.buffer import Buffer @@ -21,14 +21,14 @@ class StorePath: store: Store path: str - def __init__(self, store: Store, path: Optional[str] = None): + def __init__(self, store: Store, path: str | None = None): self.store = store self.path = path or "" - async def get(self, byte_range: Optional[Tuple[int, Optional[int]]] = None) -> Optional[Buffer]: + async def get(self, byte_range: tuple[int, int | None] | None = None) -> Buffer | None: return await self.store.get(self.path, byte_range) - async def set(self, value: Buffer, byte_range: Optional[Tuple[int, int]] = None) -> None: + async def set(self, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: if byte_range is not None: raise NotImplementedError("Store.set does not have partial writes yet") await self.store.set(self.path, value) @@ -57,7 +57,7 @@ def __eq__(self, other: Any) -> bool: return False -StoreLike = Union[Store, StorePath, Path, str] +StoreLike = Store | StorePath | Path | str def make_store_path(store_like: StoreLike) -> StorePath: diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index f27b832a39..60d0022f94 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -122,7 +122,7 @@ async def get_partial_values( async def set(self, key: str, value: Buffer) -> None: assert isinstance(key, str) - if isinstance(value, (bytes, bytearray)): + if isinstance(value, bytes | bytearray): # TODO: to support the v2 tests, we convert bytes to Buffer here value = Buffer.from_bytes(value) if not isinstance(value, Buffer): diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index c053f941ef..c6e838417e 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -1,11 +1,10 @@ from __future__ import annotations -from collections.abc import AsyncGenerator -from typing import Optional, MutableMapping, List, Tuple +from collections.abc import AsyncGenerator, MutableMapping -from zarr.common import concurrent_map from zarr.abc.store import Store from zarr.buffer import Buffer +from zarr.common import concurrent_map # TODO: this store could easily be extended to wrap any MutableMapping store from v2 @@ -17,7 +16,7 @@ class MemoryStore(Store): _store_dict: MutableMapping[str, Buffer] - def __init__(self, store_dict: Optional[MutableMapping[str, Buffer]] = None): + def __init__(self, store_dict: MutableMapping[str, Buffer] | None = None): self._store_dict = store_dict or {} def __str__(self) -> str: @@ -27,8 +26,8 @@ def __repr__(self) -> str: return f"MemoryStore({str(self)!r})" async def get( - self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[Buffer]: + self, key: str, byte_range: tuple[int, int | None] | None = None + ) -> Buffer | None: assert isinstance(key, str) try: value = self._store_dict[key] @@ -39,19 +38,17 @@ async def get( return None async def get_partial_values( - self, key_ranges: List[Tuple[str, Tuple[int, int]]] - ) -> List[Optional[Buffer]]: + self, key_ranges: list[tuple[str, tuple[int, int]]] + ) -> list[Buffer | None]: vals = await concurrent_map(key_ranges, self.get, limit=None) return vals async def exists(self, key: str) -> bool: return key in self._store_dict - async def set( - self, key: str, value: Buffer, byte_range: Optional[Tuple[int, int]] = None - ) -> None: + async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: assert isinstance(key, str) - if isinstance(value, (bytes, bytearray)): + if isinstance(value, bytes | bytearray): # TODO: to support the v2 tests, we convert bytes to Buffer here value = Buffer.from_bytes(value) if not isinstance(value, Buffer): @@ -70,7 +67,7 @@ async def delete(self, key: str) -> None: except KeyError: pass # Q(JH): why not raise? - async def set_partial_values(self, key_start_values: List[Tuple[str, int, bytes]]) -> None: + async def set_partial_values(self, key_start_values: list[tuple[str, int, bytes]]) -> None: raise NotImplementedError async def list(self) -> AsyncGenerator[str, None]: diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index 35fd2d60b6..2986133fbd 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -1,15 +1,14 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any from zarr.abc.store import Store from zarr.buffer import Buffer from zarr.store.core import _dereference_path - if TYPE_CHECKING: - from upath import UPath from fsspec.asyn import AsyncFileSystem + from upath import UPath class RemoteStore(Store): @@ -19,9 +18,9 @@ class RemoteStore(Store): root: UPath - def __init__(self, url: Union[UPath, str], **storage_options: Dict[str, Any]): - from upath import UPath + def __init__(self, url: UPath | str, **storage_options: dict[str, Any]): import fsspec + from upath import UPath if isinstance(url, str): self.root = UPath(url, **storage_options) @@ -41,7 +40,7 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"RemoteStore({str(self)!r})" - def _make_fs(self) -> Tuple[AsyncFileSystem, str]: + def _make_fs(self) -> tuple[AsyncFileSystem, str]: import fsspec storage_options = self.root._kwargs.copy() @@ -51,8 +50,8 @@ def _make_fs(self) -> Tuple[AsyncFileSystem, str]: return fs, root async def get( - self, key: str, byte_range: Optional[Tuple[int, Optional[int]]] = None - ) -> Optional[Buffer]: + self, key: str, byte_range: tuple[int, int | None] | None = None + ) -> Buffer | None: assert isinstance(key, str) fs, root = self._make_fs() path = _dereference_path(root, key) @@ -68,9 +67,7 @@ async def get( return value - async def set( - self, key: str, value: Buffer, byte_range: Optional[Tuple[int, int]] = None - ) -> None: + async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: assert isinstance(key, str) fs, root = self._make_fs() path = _dereference_path(root, key) diff --git a/src/zarr/sync.py b/src/zarr/sync.py index ea765077ce..8af14f602e 100644 --- a/src/zarr/sync.py +++ b/src/zarr/sync.py @@ -1,12 +1,14 @@ from __future__ import annotations + from typing import TYPE_CHECKING, TypeVar if TYPE_CHECKING: - from typing import Any, AsyncIterator, Coroutine + from collections.abc import AsyncIterator, Coroutine + from typing import Any import asyncio -from concurrent.futures import wait import threading +from concurrent.futures import wait from typing_extensions import ParamSpec diff --git a/src/zarr/testing/__init__.py b/src/zarr/testing/__init__.py index 9b622b43cd..35b91f9167 100644 --- a/src/zarr/testing/__init__.py +++ b/src/zarr/testing/__init__.py @@ -4,6 +4,6 @@ if importlib.util.find_spec("pytest") is not None: from zarr.testing.store import StoreTests else: - warnings.warn("pytest not installed, skipping test suite") + warnings.warn("pytest not installed, skipping test suite", stacklevel=2) __all__ = ["StoreTests"] diff --git a/tests/v3/conftest.py b/tests/v3/conftest.py index 3588048906..b6a121520d 100644 --- a/tests/v3/conftest.py +++ b/tests/v3/conftest.py @@ -1,4 +1,5 @@ from __future__ import annotations + from typing import TYPE_CHECKING from zarr.common import ZarrFormat @@ -6,12 +7,12 @@ if TYPE_CHECKING: from typing import Any, Literal -from dataclasses import dataclass, field import pathlib +from dataclasses import dataclass, field import pytest -from zarr.store import LocalStore, StorePath, MemoryStore +from zarr.store import LocalStore, MemoryStore, StorePath from zarr.store.remote import RemoteStore @@ -24,7 +25,7 @@ def parse_store( return MemoryStore() if store == "remote": return RemoteStore() - assert False + raise AssertionError() @pytest.fixture(params=[str, pathlib.Path]) diff --git a/tests/v3/package_with_entrypoint/__init__.py b/tests/v3/package_with_entrypoint/__init__.py index 8b31733069..b8bf903c01 100644 --- a/tests/v3/package_with_entrypoint/__init__.py +++ b/tests/v3/package_with_entrypoint/__init__.py @@ -1,4 +1,5 @@ from numpy import ndarray + from zarr.abc.codec import ArrayBytesCodec from zarr.common import ArraySpec, BytesLike diff --git a/tests/v3/test_buffer.py b/tests/v3/test_buffer.py index a56c768782..4ab92768b4 100644 --- a/tests/v3/test_buffer.py +++ b/tests/v3/test_buffer.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Iterable, Literal, Optional +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, Literal import numpy as np import numpy.typing as npt @@ -31,7 +32,7 @@ def create( shape: Iterable[int], dtype: npt.DTypeLike, order: Literal["C", "F"] = "C", - fill_value: Optional[Any] = None, + fill_value: Any | None = None, ) -> Self: """Overwrite `NDBuffer.create` to create an MyNDArrayLike instance""" ret = cls(MyNDArrayLike(shape=shape, dtype=dtype, order=order)) diff --git a/tests/v3/test_codec_entrypoints.py b/tests/v3/test_codec_entrypoints.py index 8fbf76b83d..6b5c221f4d 100644 --- a/tests/v3/test_codec_entrypoints.py +++ b/tests/v3/test_codec_entrypoints.py @@ -5,7 +5,6 @@ import zarr.codecs.registry - here = os.path.abspath(os.path.dirname(__file__)) diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index 73553b5565..5f94114ede 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -1,29 +1,29 @@ from __future__ import annotations -from dataclasses import dataclass import json -from typing import Iterator, List, Literal, Optional, Tuple - +from collections.abc import Iterator +from dataclasses import dataclass +from typing import Literal import numpy as np import pytest + import zarr.v2 from zarr.abc.codec import Codec +from zarr.abc.store import Store from zarr.array import Array, AsyncArray -from zarr.common import Selection -from zarr.indexing import morton_order_iter from zarr.codecs import ( - ShardingCodec, - ShardingCodecIndexLocation, BloscCodec, BytesCodec, GzipCodec, + ShardingCodec, + ShardingCodecIndexLocation, TransposeCodec, ZstdCodec, ) - -from zarr.abc.store import Store +from zarr.common import Selection from zarr.config import config +from zarr.indexing import morton_order_iter from zarr.store import MemoryStore, StorePath @@ -57,7 +57,7 @@ def sample_data() -> np.ndarray: return np.arange(0, 128 * 128 * 128, dtype="uint16").reshape((128, 128, 128), order="F") -def order_from_dim(order: Literal["F", "C"], ndim: int) -> Tuple[int, ...]: +def order_from_dim(order: Literal["F", "C"], ndim: int) -> tuple[int, ...]: if order == "F": return tuple(ndim - x - 1 for x in range(ndim)) else: @@ -243,7 +243,7 @@ async def test_order( ): data = np.arange(0, 256, dtype="uint16").reshape((32, 8), order=input_order) - codecs_: List[Codec] = ( + codecs_: list[Codec] = ( [ ShardingCodec( chunk_shape=(16, 8), @@ -310,7 +310,7 @@ def test_order_implicit( ): data = np.arange(0, 256, dtype="uint16").reshape((16, 16), order=input_order) - codecs_: Optional[List[Codec]] = [ShardingCodec(chunk_shape=(8, 8))] if with_sharding else None + codecs_: list[Codec] | None = [ShardingCodec(chunk_shape=(8, 8))] if with_sharding else None with config.set({"array.order": runtime_write_order}): a = Array.create( @@ -352,7 +352,7 @@ async def test_transpose( ): data = np.arange(0, 256, dtype="uint16").reshape((1, 32, 8), order=input_order) - codecs_: List[Codec] = ( + codecs_: list[Codec] = ( [ ShardingCodec( chunk_shape=(1, 16, 8), diff --git a/tests/v3/test_common.py b/tests/v3/test_common.py index b5690d0d7e..3bdbd2bffe 100644 --- a/tests/v3/test_common.py +++ b/tests/v3/test_common.py @@ -1,18 +1,20 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterable + +from collections.abc import Iterable +from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Literal, Any, Tuple + from typing import Any, Literal import numpy as np -from zarr.config import parse_indexing_order -from zarr.common import parse_shapelike -from zarr.common import parse_name, product import pytest +from zarr.common import parse_name, parse_shapelike, product +from zarr.config import parse_indexing_order + @pytest.mark.parametrize("data", [(0, 0, 0, 0), (1, 3, 4, 5, 6), (2, 4)]) -def test_product(data: Tuple[int, ...]): +def test_product(data: tuple[int, ...]): assert product(data) == np.prod(data) @@ -33,7 +35,7 @@ def test_parse_enum(): ... @pytest.mark.parametrize("data", [("foo", "bar"), (10, 11)]) -def test_parse_name_invalid(data: Tuple[Any, Any]): +def test_parse_name_invalid(data: tuple[Any, Any]): observed, expected = data if isinstance(observed, str): with pytest.raises(ValueError, match=f"Expected '{expected}'. Got {observed} instead."): @@ -46,7 +48,7 @@ def test_parse_name_invalid(data: Tuple[Any, Any]): @pytest.mark.parametrize("data", [("foo", "foo"), ("10", "10")]) -def test_parse_name_valid(data: Tuple[Any, Any]): +def test_parse_name_valid(data: tuple[Any, Any]): observed, expected = data assert parse_name(observed, expected) == observed @@ -83,7 +85,7 @@ def test_parse_shapelike_valid(data: Iterable[Any]): # todo: more dtypes @pytest.mark.parametrize("data", [("uint8", np.uint8), ("float64", np.float64)]) -def parse_dtype(data: Tuple[str, np.dtype]): +def parse_dtype(data: tuple[str, np.dtype]): unparsed, parsed = data assert parse_dtype(unparsed) == parsed diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 5a6751c11a..771baddc0b 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -1,17 +1,18 @@ from __future__ import annotations + from typing import TYPE_CHECKING, Any -from zarr.buffer import Buffer -from zarr.sync import sync from zarr.array import AsyncArray +from zarr.buffer import Buffer from zarr.store.core import make_store_path +from zarr.sync import sync if TYPE_CHECKING: - from zarr.store import MemoryStore, LocalStore from zarr.common import ZarrFormat + from zarr.store import LocalStore, MemoryStore -import pytest import numpy as np +import pytest from zarr.group import AsyncGroup, Group, GroupMetadata from zarr.store import StorePath @@ -202,7 +203,7 @@ async def test_asyncgroup_open_wrong_format( elif zarr_format == 2: zarr_format_wrong = 3 else: - assert False + raise AssertionError() with pytest.raises(FileNotFoundError): await AsyncGroup.open(store=store, zarr_format=zarr_format_wrong) @@ -277,7 +278,7 @@ async def test_asyncgroup_delitem(store: LocalStore | MemoryStore, zarr_format: elif zarr_format == 3: assert not await agroup.store_path.store.exists(sub_array_path + "/" + "zarr.json") else: - assert False + raise AssertionError() sub_group_path = "sub_group" _ = await agroup.create_group(sub_group_path, attributes={"foo": 100}) @@ -288,7 +289,7 @@ async def test_asyncgroup_delitem(store: LocalStore | MemoryStore, zarr_format: elif zarr_format == 3: assert not await agroup.store_path.store.exists(sub_array_path + "/" + "zarr.json") else: - assert False + raise AssertionError() @pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) diff --git a/tests/v3/test_metadata.py b/tests/v3/test_metadata.py index 33df4a6438..65297c52d8 100644 --- a/tests/v3/test_metadata.py +++ b/tests/v3/test_metadata.py @@ -1,9 +1,12 @@ from __future__ import annotations -import pytest + from typing import TYPE_CHECKING +import pytest + if TYPE_CHECKING: - from typing import Sequence, Any + from collections.abc import Sequence + from typing import Any from zarr.metadata import parse_dimension_names, parse_zarr_format_v2, parse_zarr_format_v3 diff --git a/tests/v3/test_store.py b/tests/v3/test_store.py index e514d505ce..f7ba46aa33 100644 --- a/tests/v3/test_store.py +++ b/tests/v3/test_store.py @@ -1,10 +1,12 @@ from __future__ import annotations -from zarr.store.local import LocalStore + from pathlib import Path + import pytest -from zarr.testing.store import StoreTests +from zarr.store.local import LocalStore from zarr.store.memory import MemoryStore +from zarr.testing.store import StoreTests @pytest.mark.parametrize("auto_mkdir", (True, False)) diff --git a/tests/v3/test_sync.py b/tests/v3/test_sync.py index ba262f521d..5b953573d8 100644 --- a/tests/v3/test_sync.py +++ b/tests/v3/test_sync.py @@ -1,12 +1,12 @@ -from collections.abc import AsyncGenerator import asyncio import time -from unittest.mock import patch, AsyncMock - -from zarr.sync import sync, _get_loop, _get_lock, SyncError, SyncMixin +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch import pytest +from zarr.sync import SyncError, SyncMixin, _get_lock, _get_loop, sync + @pytest.fixture(params=[True, False]) def sync_loop(request) -> asyncio.AbstractEventLoop | None: diff --git a/tests/v3/test_v2.py b/tests/v3/test_v2.py index 5b831b1bb0..2a38dc8fdc 100644 --- a/tests/v3/test_v2.py +++ b/tests/v3/test_v2.py @@ -1,10 +1,11 @@ -from typing import Iterator +from collections.abc import Iterator + import numpy as np import pytest from zarr.abc.store import Store from zarr.array import Array -from zarr.store import StorePath, MemoryStore +from zarr.store import MemoryStore, StorePath @pytest.fixture From bf895332cdb8f028284f958c58cda773085d5e68 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Fri, 17 May 2024 05:52:31 -0700 Subject: [PATCH 0536/1078] reset release notes (#1886) --- docs/release.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 5ca60b8166..59051bbf97 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,6 +18,23 @@ Release notes See `GH1777 `_ for more details on the upcoming 3.0 release. +.. _unreleased: + +Unreleased +---------- + +Enhancements +~~~~~~~~~~~~ + +Docs +~~~~ + +Maintenance +~~~~~~~~~~~ + +Deprecations +~~~~~~~~~~~~ + .. _release_2.18.1: 2.18.1 From ea2da93191c55e97ee517dac4317d7cc1674a957 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Fri, 17 May 2024 07:52:19 -0700 Subject: [PATCH 0537/1078] chore(ci): remove mypy from test action in favor of pre-commit settings (#1887) --- .github/workflows/test.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d063186b27..dffcf3a8ee 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -43,7 +43,3 @@ jobs: - name: Run Tests run: | hatch env run --env test.py${{ matrix.python-version }}-${{ matrix.numpy-version }}-${{ matrix.dependency-set }} run - - name: Run mypy - continue-on-error: true - run: | - hatch run test:run-mypy From 333f37faffeeb911a9ab8846f69fe31a2e495404 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 17 May 2024 22:21:54 +0200 Subject: [PATCH 0538/1078] Enable ruff/flake8-raise rules (RSE) and fix issues (#1872) * Enable ruff/flake8-raise rules (RSE) * Apply ruff/flake8-raise rule RSE102 RSE102 Unnecessary parentheses on raised exception --- pyproject.toml | 1 + src/zarr/v2/core.py | 12 ++++++------ src/zarr/v2/hierarchy.py | 2 +- src/zarr/v2/indexing.py | 4 ++-- src/zarr/v2/storage.py | 20 ++++++++++---------- tests/v2/test_util.py | 2 +- tests/v3/conftest.py | 2 +- tests/v3/test_group.py | 6 +++--- 8 files changed, 25 insertions(+), 24 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1aafb64a2b..4fb90c7496 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -170,6 +170,7 @@ extend-select = [ "B", # flake8-bugbear "I", # isort "UP", # pyupgrade + "RSE", "RUF", ] ignore = [ diff --git a/src/zarr/v2/core.py b/src/zarr/v2/core.py index 273d2857e8..c1223daced 100644 --- a/src/zarr/v2/core.py +++ b/src/zarr/v2/core.py @@ -1465,7 +1465,7 @@ def set_basic_selection(self, selection, value, fields=None): # guard conditions if self._read_only: - raise ReadOnlyError() + raise ReadOnlyError # refresh metadata if not self._cache_metadata: @@ -1557,7 +1557,7 @@ def set_orthogonal_selection(self, selection, value, fields=None): # guard conditions if self._read_only: - raise ReadOnlyError() + raise ReadOnlyError # refresh metadata if not self._cache_metadata: @@ -1630,7 +1630,7 @@ def set_coordinate_selection(self, selection, value, fields=None): # guard conditions if self._read_only: - raise ReadOnlyError() + raise ReadOnlyError # refresh metadata if not self._cache_metadata: @@ -1723,7 +1723,7 @@ def set_block_selection(self, selection, value, fields=None): """ # guard conditions if self._read_only: - raise ReadOnlyError() + raise ReadOnlyError # refresh metadata if not self._cache_metadata: @@ -1799,7 +1799,7 @@ def set_mask_selection(self, selection, value, fields=None): # guard conditions if self._read_only: - raise ReadOnlyError() + raise ReadOnlyError # refresh metadata if not self._cache_metadata: @@ -2489,7 +2489,7 @@ def _synchronized_op(self, f, *args, **kwargs): def _write_op(self, f, *args, **kwargs): # guard condition if self._read_only: - raise ReadOnlyError() + raise ReadOnlyError return self._synchronized_op(f, *args, **kwargs) diff --git a/src/zarr/v2/hierarchy.py b/src/zarr/v2/hierarchy.py index acd65750e3..b0660d181d 100644 --- a/src/zarr/v2/hierarchy.py +++ b/src/zarr/v2/hierarchy.py @@ -797,7 +797,7 @@ def tree(self, expand=False, level=None): def _write_op(self, f, *args, **kwargs): # guard condition if self._read_only: - raise ReadOnlyError() + raise ReadOnlyError if self._synchronizer is None: # no synchronization diff --git a/src/zarr/v2/indexing.py b/src/zarr/v2/indexing.py index 1c11409d05..7b9b74d227 100644 --- a/src/zarr/v2/indexing.py +++ b/src/zarr/v2/indexing.py @@ -170,7 +170,7 @@ def __init__(self, dim_sel, dim_len, dim_chunk_len): # normalize self.start, self.stop, self.step = dim_sel.indices(dim_len) if self.step < 1: - raise NegativeStepError() + raise NegativeStepError # store attributes self.dim_len = dim_len @@ -978,7 +978,7 @@ def make_slice_selection(selection): if len(dim_selection) == 1: ls.append(slice(int(dim_selection[0]), int(dim_selection[0]) + 1, 1)) else: - raise ArrayIndexError() + raise ArrayIndexError else: ls.append(dim_selection) return ls diff --git a/src/zarr/v2/storage.py b/src/zarr/v2/storage.py index de45201fc0..67240e520d 100644 --- a/src/zarr/v2/storage.py +++ b/src/zarr/v2/storage.py @@ -1275,7 +1275,7 @@ def __getitem__(self, key): def setitems(self, values): if self.mode == "r": - raise ReadOnlyError() + raise ReadOnlyError # Normalize keys and make sure the values are bytes values = { @@ -1286,7 +1286,7 @@ def setitems(self, values): def __setitem__(self, key, value): if self.mode == "r": - raise ReadOnlyError() + raise ReadOnlyError key = self._normalize_key(key) value = ensure_contiguous_ndarray_or_bytes(value) path = self.dir_path(key) @@ -1300,7 +1300,7 @@ def __setitem__(self, key, value): def __delitem__(self, key): if self.mode == "r": - raise ReadOnlyError() + raise ReadOnlyError key = self._normalize_key(key) path = self.dir_path(key) if self.fs.isdir(path): @@ -1310,7 +1310,7 @@ def __delitem__(self, key): def delitems(self, keys): if self.mode == "r": - raise ReadOnlyError() + raise ReadOnlyError # only remove the keys that exist in the store nkeys = [self._normalize_key(key) for key in keys if key in self] # rm errors if you pass an empty collection @@ -1369,7 +1369,7 @@ def listdir(self, path=None): def rmdir(self, path=None): if self.mode == "r": - raise ReadOnlyError() + raise ReadOnlyError store_path = self.dir_path(path) if self.fs.isdir(store_path): self.fs.rm(store_path, recursive=True) @@ -1380,7 +1380,7 @@ def getsize(self, path=None): def clear(self): if self.mode == "r": - raise ReadOnlyError() + raise ReadOnlyError self.map.clear() @classmethod @@ -1670,7 +1670,7 @@ def __getitem__(self, key): def __setitem__(self, key, value): if self.mode == "r": - raise ReadOnlyError() + raise ReadOnlyError value = ensure_contiguous_ndarray_like(value).view("u1") with self.mutex: # writestr(key, value) writes with default permissions from @@ -1752,7 +1752,7 @@ def getsize(self, path=None): def clear(self): if self.mode == "r": - raise ReadOnlyError() + raise ReadOnlyError with self.mutex: self.close() os.remove(self.path) @@ -2810,10 +2810,10 @@ def __len__(self): return len(self.meta_store) def __delitem__(self, key): - raise ReadOnlyError() + raise ReadOnlyError def __setitem__(self, key, value): - raise ReadOnlyError() + raise ReadOnlyError def getsize(self, path): return getsize(self.meta_store, path) diff --git a/tests/v2/test_util.py b/tests/v2/test_util.py index 35c355693a..0111165df9 100644 --- a/tests/v2/test_util.py +++ b/tests/v2/test_util.py @@ -210,7 +210,7 @@ def __init__(self, pass_on=1): def __call__(self): self.c += 1 if self.c != self.pass_on: - raise PermissionError() + raise PermissionError for x in range(1, 11): # Any number of failures less than 10 will be accepted. diff --git a/tests/v3/conftest.py b/tests/v3/conftest.py index b6a121520d..b9f56014bc 100644 --- a/tests/v3/conftest.py +++ b/tests/v3/conftest.py @@ -25,7 +25,7 @@ def parse_store( return MemoryStore() if store == "remote": return RemoteStore() - raise AssertionError() + raise AssertionError @pytest.fixture(params=[str, pathlib.Path]) diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 771baddc0b..36b82f413c 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -203,7 +203,7 @@ async def test_asyncgroup_open_wrong_format( elif zarr_format == 2: zarr_format_wrong = 3 else: - raise AssertionError() + raise AssertionError with pytest.raises(FileNotFoundError): await AsyncGroup.open(store=store, zarr_format=zarr_format_wrong) @@ -278,7 +278,7 @@ async def test_asyncgroup_delitem(store: LocalStore | MemoryStore, zarr_format: elif zarr_format == 3: assert not await agroup.store_path.store.exists(sub_array_path + "/" + "zarr.json") else: - raise AssertionError() + raise AssertionError sub_group_path = "sub_group" _ = await agroup.create_group(sub_group_path, attributes={"foo": 100}) @@ -289,7 +289,7 @@ async def test_asyncgroup_delitem(store: LocalStore | MemoryStore, zarr_format: elif zarr_format == 3: assert not await agroup.store_path.store.exists(sub_array_path + "/" + "zarr.json") else: - raise AssertionError() + raise AssertionError @pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) From 88946d2c1b55eed9fffdd98b7170a44361cbd62c Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 17 May 2024 22:26:43 +0200 Subject: [PATCH 0539/1078] Apply assorted ruff/refurb rules (FURB) (#1873) * Apply ruff/refurb rule FURB118 FURB118 Use `operator....` instead of defining a lambda * Apply ruff/refurb rule FURB171 FURB171 Membership test against single-item container * Apply ruff/refurb rule FURB113 FURB113 Use `....extend(...)` instead of repeatedly calling `....append()` --------- Co-authored-by: Joe Hamman --- src/zarr/codecs/sharding.py | 3 ++- src/zarr/common.py | 3 ++- src/zarr/v2/meta.py | 10 +++++----- tests/v2/test_core.py | 4 +--- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index a6c5bac6a7..1fdc7d34b4 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -4,6 +4,7 @@ from dataclasses import dataclass, field, replace from enum import Enum from functools import lru_cache +from operator import itemgetter from typing import TYPE_CHECKING, NamedTuple import numpy as np @@ -124,7 +125,7 @@ def is_dense(self, chunk_byte_length: int) -> bool: for offset, length in self.offsets_and_lengths if offset != MAX_UINT_64 ], - key=lambda entry: entry[0], + key=itemgetter(0), ) # Are all non-empty offsets unique? diff --git a/src/zarr/common.py b/src/zarr/common.py index 5781cc423b..32a6c2fd0c 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -3,6 +3,7 @@ import asyncio import contextvars import functools +import operator from collections.abc import Iterable from dataclasses import dataclass from enum import Enum @@ -28,7 +29,7 @@ def product(tup: ChunkCoords) -> int: - return functools.reduce(lambda x, y: x * y, tup, 1) + return functools.reduce(operator.mul, tup, 1) T = TypeVar("T", bound=tuple[Any, ...]) diff --git a/src/zarr/v2/meta.py b/src/zarr/v2/meta.py index 70c424c8b5..418c0727cf 100644 --- a/src/zarr/v2/meta.py +++ b/src/zarr/v2/meta.py @@ -222,7 +222,7 @@ def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return -np.inf else: return np.array(v, dtype=dtype)[()] - elif dtype.kind in "c": + elif dtype.kind == "c": v = ( cls.decode_fill_value(v[0], dtype.type().real.dtype), cls.decode_fill_value(v[1], dtype.type().imag.dtype), @@ -269,23 +269,23 @@ def encode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return "-Infinity" else: return float(v) - elif dtype.kind in "ui": + elif dtype.kind in ("u", "i"): return int(v) elif dtype.kind == "b": return bool(v) - elif dtype.kind in "c": + elif dtype.kind == "c": c = cast(np.complex128, np.dtype(complex).type()) v = ( cls.encode_fill_value(v.real, c.real.dtype, object_codec), cls.encode_fill_value(v.imag, c.imag.dtype, object_codec), ) return v - elif dtype.kind in "SV": + elif dtype.kind in ("S", "V"): v = str(base64.standard_b64encode(v), "ascii") return v elif dtype.kind == "U": return v - elif dtype.kind in "mM": + elif dtype.kind in ("m", "M"): return int(v.view("i8")) else: return v diff --git a/tests/v2/test_core.py b/tests/v2/test_core.py index 197461d129..f053725b95 100644 --- a/tests/v2/test_core.py +++ b/tests/v2/test_core.py @@ -1947,9 +1947,7 @@ def test_attrs_n5_keywords(self): def test_compressors(self): compressors = [None, BZ2(), Zlib(), GZip(), MsgPack()] if LZMA: - compressors.append(LZMA()) - compressors.append(LZMA(preset=1)) - compressors.append(LZMA(preset=6)) + compressors.extend((LZMA(), LZMA(preset=1), LZMA(preset=6))) for compressor in compressors: a1 = self.create_array(shape=1000, chunks=100, compressor=compressor) a1[0:100] = 1 From 50b3fb1a16e9247ba910aa410dd04ccf3f37dc5e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 17 May 2024 22:43:24 +0200 Subject: [PATCH 0540/1078] Enable ruff/flake8-implicit-str-concat rules (ISC) and fix issues (#1868) * Enable ruff/flake8-implicit-str-concat rules (ISC) * Apply ruff/flake8-implicit-str-concat rule ISC001 ISC001 Implicitly concatenated string literals on one line * A round of formatting after linting * Aplly ruff/flake8-implicit-str-concat rule ISC003 ISC003 Explicitly concatenated string should be implicitly concatenated --------- Co-authored-by: Joe Hamman --- pyproject.toml | 1 + src/zarr/codecs/crc32c_.py | 3 +-- src/zarr/codecs/pipeline.py | 15 +++++---------- src/zarr/codecs/sharding.py | 6 ++---- src/zarr/codecs/transpose.py | 6 ++---- src/zarr/store/remote.py | 7 +++---- src/zarr/v2/convenience.py | 6 +++--- src/zarr/v2/indexing.py | 14 ++++---------- src/zarr/v2/util.py | 4 ++-- 9 files changed, 23 insertions(+), 39 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4fb90c7496..05022261fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -169,6 +169,7 @@ extend-exclude = [ extend-select = [ "B", # flake8-bugbear "I", # isort + "ISC", "UP", # pyupgrade "RSE", "RUF", diff --git a/src/zarr/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py index 0b9c8c9a96..d4fd80064f 100644 --- a/src/zarr/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -42,8 +42,7 @@ async def decode_single( stored_checksum = bytes(crc32_bytes) if computed_checksum != stored_checksum: raise ValueError( - "Stored and computed checksum do not match. " - + f"Stored: {stored_checksum!r}. Computed: {computed_checksum!r}." + f"Stored and computed checksum do not match. Stored: {stored_checksum!r}. Computed: {computed_checksum!r}." ) return Buffer.from_array_like(inner_bytes) diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index 57b4fa4668..56e73a4b29 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -104,31 +104,26 @@ def codecs_from_list( if prev_codec is not None: if isinstance(codec, ArrayBytesCodec) and isinstance(prev_codec, ArrayBytesCodec): raise ValueError( - f"ArrayBytesCodec '{type(codec)}' cannot follow after " - + f"ArrayBytesCodec '{type(prev_codec)}' because exactly " - + "1 ArrayBytesCodec is allowed." + f"ArrayBytesCodec '{type(codec)}' cannot follow after ArrayBytesCodec '{type(prev_codec)}' because exactly 1 ArrayBytesCodec is allowed." ) if isinstance(codec, ArrayBytesCodec) and isinstance(prev_codec, BytesBytesCodec): raise ValueError( - f"ArrayBytesCodec '{type(codec)}' cannot follow after " - + f"BytesBytesCodec '{type(prev_codec)}'." + f"ArrayBytesCodec '{type(codec)}' cannot follow after BytesBytesCodec '{type(prev_codec)}'." ) if isinstance(codec, ArrayArrayCodec) and isinstance(prev_codec, ArrayBytesCodec): raise ValueError( - f"ArrayArrayCodec '{type(codec)}' cannot follow after " - + f"ArrayBytesCodec '{type(prev_codec)}'." + f"ArrayArrayCodec '{type(codec)}' cannot follow after ArrayBytesCodec '{type(prev_codec)}'." ) if isinstance(codec, ArrayArrayCodec) and isinstance(prev_codec, BytesBytesCodec): raise ValueError( - f"ArrayArrayCodec '{type(codec)}' cannot follow after " - + f"BytesBytesCodec '{type(prev_codec)}'." + f"ArrayArrayCodec '{type(codec)}' cannot follow after BytesBytesCodec '{type(prev_codec)}'." ) prev_codec = codec if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: warn( "Combining a `sharding_indexed` codec disables partial reads and " - + "writes, which may lead to inefficient performance.", + "writes, which may lead to inefficient performance.", stacklevel=3, ) diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index 1fdc7d34b4..563f216148 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -361,8 +361,7 @@ def evolve(self, array_spec: ArraySpec) -> Self: def validate(self, array_metadata: ArrayMetadata) -> None: if len(self.chunk_shape) != array_metadata.ndim: raise ValueError( - "The shard's `chunk_shape` and array's `shape` need to have the " - + "same number of dimensions." + "The shard's `chunk_shape` and array's `shape` need to have the same number of dimensions." ) if not isinstance(array_metadata.chunk_grid, RegularChunkGrid): raise ValueError("Sharding is only compatible with regular chunk grids.") @@ -375,8 +374,7 @@ def validate(self, array_metadata: ArrayMetadata) -> None: ) ): raise ValueError( - "The array's `chunk_shape` needs to be divisible by the " - + "shard's inner `chunk_shape`." + "The array's `chunk_shape` needs to be divisible by the shard's inner `chunk_shape`." ) async def decode_single( diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index 774393464c..b20a36fe98 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -45,8 +45,7 @@ def to_dict(self) -> dict[str, JSON]: def evolve(self, array_spec: ArraySpec) -> Self: if len(self.order) != array_spec.ndim: raise ValueError( - "The `order` tuple needs have as many entries as " - + f"there are dimensions in the array. Got {self.order}." + f"The `order` tuple needs have as many entries as there are dimensions in the array. Got {self.order}." ) if len(self.order) != len(set(self.order)): raise ValueError( @@ -54,8 +53,7 @@ def evolve(self, array_spec: ArraySpec) -> Self: ) if not all(0 <= x < array_spec.ndim for x in self.order): raise ValueError( - "All entries in the `order` tuple must be between 0 and " - + f"the number of dimensions in the array. Got {self.order}." + f"All entries in the `order` tuple must be between 0 and the number of dimensions in the array. Got {self.order}." ) order = tuple(self.order) diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index 2986133fbd..8058c61035 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -25,10 +25,9 @@ def __init__(self, url: UPath | str, **storage_options: dict[str, Any]): if isinstance(url, str): self.root = UPath(url, **storage_options) else: - assert len(storage_options) == 0, ( - "If constructed with a UPath object, no additional " - + "storage_options are allowed." - ) + assert ( + len(storage_options) == 0 + ), "If constructed with a UPath object, no additional storage_options are allowed." self.root = url.rstrip("/") # test instantiate file system fs, _ = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) diff --git a/src/zarr/v2/convenience.py b/src/zarr/v2/convenience.py index 6355a11af9..aa322bfb98 100644 --- a/src/zarr/v2/convenience.py +++ b/src/zarr/v2/convenience.py @@ -470,7 +470,7 @@ def __init__(self, log): self.log_file = log else: raise TypeError( - "log must be a callable function, file path or " "file-like object, found %r" % log + "log must be a callable function, file path or file-like object, found %r" % log ) def __enter__(self): @@ -898,7 +898,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ if exists: if if_exists == "raise": raise CopyError( - "an object {!r} already exists in destination " "{!r}".format(name, dest.name) + "an object {!r} already exists in destination {!r}".format(name, dest.name) ) elif if_exists == "skip": do_copy = False @@ -990,7 +990,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_ if exists_array: if if_exists == "raise": raise CopyError( - "an array {!r} already exists in destination " "{!r}".format(name, dest.name) + "an array {!r} already exists in destination {!r}".format(name, dest.name) ) elif if_exists == "skip": do_copy = False diff --git a/src/zarr/v2/indexing.py b/src/zarr/v2/indexing.py index 7b9b74d227..0e266ad908 100644 --- a/src/zarr/v2/indexing.py +++ b/src/zarr/v2/indexing.py @@ -363,14 +363,12 @@ class BoolArrayDimIndexer: def __init__(self, dim_sel, dim_len, dim_chunk_len): # check number of dimensions if not is_bool_array(dim_sel, 1): - raise IndexError( - "Boolean arrays in an orthogonal selection must " "be 1-dimensional only" - ) + raise IndexError("Boolean arrays in an orthogonal selection must be 1-dimensional only") # check shape if dim_sel.shape[0] != dim_len: raise IndexError( - "Boolean array has the wrong length for dimension; " "expected {}, got {}".format( + "Boolean array has the wrong length for dimension; expected {}, got {}".format( dim_len, dim_sel.shape[0] ) ) @@ -464,9 +462,7 @@ def __init__( # ensure 1d array dim_sel = np.asanyarray(dim_sel) if not is_integer_array(dim_sel, 1): - raise IndexError( - "integer arrays in an orthogonal selection must be " "1-dimensional only" - ) + raise IndexError("integer arrays in an orthogonal selection must be 1-dimensional only") # handle wraparound if wraparound: @@ -920,9 +916,7 @@ def check_fields(fields, dtype): # check type if not isinstance(fields, (str, list, tuple)): raise IndexError( - "'fields' argument must be a string or list of strings; found " "{!r}".format( - type(fields) - ) + "'fields' argument must be a string or list of strings; found {!r}".format(type(fields)) ) if fields: if dtype.names is None: diff --git a/src/zarr/v2/util.py b/src/zarr/v2/util.py index 8751b39cdc..48d7d30d88 100644 --- a/src/zarr/v2/util.py +++ b/src/zarr/v2/util.py @@ -307,7 +307,7 @@ def normalize_fill_value(fill_value, dtype: np.dtype[Any]): if not isinstance(fill_value, str): raise ValueError( - "fill_value {!r} is not valid for dtype {}; must be a " "unicode string".format( + "fill_value {!r} is not valid for dtype {}; must be a unicode string".format( fill_value, dtype ) ) @@ -323,7 +323,7 @@ def normalize_fill_value(fill_value, dtype: np.dtype[Any]): except Exception as e: # re-raise with our own error message to be helpful raise ValueError( - "fill_value {!r} is not valid for dtype {}; nested " "exception: {}".format( + "fill_value {!r} is not valid for dtype {}; nested exception: {}".format( fill_value, dtype, e ) ) From 55f49132d63868d94b511feb22365e89feee6f54 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 21 May 2024 06:18:04 +0100 Subject: [PATCH 0541/1078] Remove some unused mypy overrides (#1894) --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 05022261fa..c093b09527 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -213,7 +213,6 @@ check_untyped_defs = false [[tool.mypy.overrides]] module = [ "zarr.v2.*", - "zarr.abc.codec", "zarr.array_v2", ] disallow_any_generics = false @@ -239,7 +238,6 @@ module = [ "zarr.v2.*", "zarr.array_v2", "zarr.array", - "zarr.common", "zarr.group", "zarr.metadata" ] From 549cf2801d4b48a26d5e5d71a9c0120203005152 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 21 May 2024 06:20:19 +0100 Subject: [PATCH 0542/1078] Finish typing zarr.metadata (#1880) --- pyproject.toml | 1 - src/zarr/array.py | 2 ++ src/zarr/chunk_grids.py | 8 +++---- src/zarr/chunk_key_encodings.py | 2 +- src/zarr/common.py | 5 ++-- src/zarr/metadata.py | 41 ++++++++++++++++++--------------- 6 files changed, 32 insertions(+), 27 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c093b09527..62a81144ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -239,7 +239,6 @@ module = [ "zarr.array_v2", "zarr.array", "zarr.group", - "zarr.metadata" ] disallow_untyped_defs = false diff --git a/src/zarr/array.py b/src/zarr/array.py index 039f39e98e..86ff262940 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -199,6 +199,8 @@ async def _create_v3( if chunk_key_encoding is None: chunk_key_encoding = ("default", "/") + assert chunk_key_encoding is not None + if isinstance(chunk_key_encoding, tuple): chunk_key_encoding = ( V2ChunkKeyEncoding(separator=chunk_key_encoding[1]) diff --git a/src/zarr/chunk_grids.py b/src/zarr/chunk_grids.py index 45f77cc99c..f6366b8038 100644 --- a/src/zarr/chunk_grids.py +++ b/src/zarr/chunk_grids.py @@ -3,7 +3,7 @@ import itertools from collections.abc import Iterator from dataclasses import dataclass -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING from zarr.abc.metadata import Metadata from zarr.common import ( @@ -22,13 +22,13 @@ @dataclass(frozen=True) class ChunkGrid(Metadata): @classmethod - def from_dict(cls, data: dict[str, JSON]) -> ChunkGrid: + def from_dict(cls, data: dict[str, JSON] | ChunkGrid) -> ChunkGrid: if isinstance(data, ChunkGrid): return data name_parsed, _ = parse_named_configuration(data) if name_parsed == "regular": - return RegularChunkGrid.from_dict(data) + return RegularChunkGrid._from_dict(data) raise ValueError(f"Unknown chunk grid. Got {name_parsed}.") def all_chunk_coords(self, array_shape: ChunkCoords) -> Iterator[ChunkCoords]: @@ -45,7 +45,7 @@ def __init__(self, *, chunk_shape: ChunkCoordsLike) -> None: object.__setattr__(self, "chunk_shape", chunk_shape_parsed) @classmethod - def from_dict(cls, data: dict[str, Any]) -> Self: + def _from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "regular") return cls(**configuration_parsed) # type: ignore[arg-type] diff --git a/src/zarr/chunk_key_encodings.py b/src/zarr/chunk_key_encodings.py index 5ecb98ef61..ed6c181764 100644 --- a/src/zarr/chunk_key_encodings.py +++ b/src/zarr/chunk_key_encodings.py @@ -34,7 +34,7 @@ def __init__(self, *, separator: SeparatorLiteral) -> None: object.__setattr__(self, "separator", separator_parsed) @classmethod - def from_dict(cls, data: dict[str, JSON]) -> ChunkKeyEncoding: + def from_dict(cls, data: dict[str, JSON] | ChunkKeyEncoding) -> ChunkKeyEncoding: if isinstance(data, ChunkKeyEncoding): return data diff --git a/src/zarr/common.py b/src/zarr/common.py index 32a6c2fd0c..9d8315abc8 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -13,6 +13,7 @@ from collections.abc import Awaitable, Callable, Iterator import numpy as np +import numpy.typing as npt ZARR_JSON = "zarr.json" ZARRAY_JSON = ".zarray" @@ -150,7 +151,7 @@ def parse_named_configuration( return name_parsed, configuration_parsed -def parse_shapelike(data: Any) -> tuple[int, ...]: +def parse_shapelike(data: Iterable[int]) -> tuple[int, ...]: if not isinstance(data, Iterable): raise TypeError(f"Expected an iterable. Got {data} instead.") data_tuple = tuple(data) @@ -164,7 +165,7 @@ def parse_shapelike(data: Any) -> tuple[int, ...]: return data_tuple -def parse_dtype(data: Any) -> np.dtype[Any]: +def parse_dtype(data: npt.DTypeLike) -> np.dtype[Any]: # todo: real validation return np.dtype(data) diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 8db8c8033e..e2b7b987c0 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -22,6 +22,7 @@ from typing_extensions import Self +import numcodecs.abc from zarr.common import ( JSON, @@ -168,15 +169,15 @@ class ArrayV3Metadata(ArrayMetadata): def __init__( self, *, - shape, - data_type, - chunk_grid, - chunk_key_encoding, - fill_value, - codecs, - attributes, - dimension_names, - ): + shape: Iterable[int], + data_type: npt.DTypeLike, + chunk_grid: dict[str, JSON] | ChunkGrid, + chunk_key_encoding: dict[str, JSON] | ChunkKeyEncoding, + fill_value: Any, + codecs: Iterable[Codec | JSON], + attributes: None | dict[str, JSON], + dimension_names: None | Iterable[str], + ) -> None: """ Because the class is a frozen dataclass, we set attributes using object.__setattr__ """ @@ -249,14 +250,14 @@ def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: return self.chunk_key_encoding.encode_chunk_key(chunk_coords) def to_buffer_dict(self) -> dict[str, Buffer]: - def _json_convert(o): + def _json_convert(o: np.dtype[Any] | Enum | Codec) -> str | dict[str, Any]: if isinstance(o, np.dtype): return str(o) if isinstance(o, Enum): return o.name # this serializes numcodecs compressors # todo: implement to_dict for codecs - elif hasattr(o, "get_config"): + elif isinstance(o, numcodecs.abc.Codec): return o.get_config() raise TypeError @@ -271,9 +272,10 @@ def from_dict(cls, data: dict[str, JSON]) -> ArrayV3Metadata: # check that the node_type attribute is correct _ = parse_node_type_array(data.pop("node_type")) - dimension_names = data.pop("dimension_names", None) + data["dimension_names"] = data.pop("dimension_names", None) - return cls(**data, dimension_names=dimension_names) + # TODO: Remove the ignores and use a TypedDict to type `data` + return cls(**data) # type: ignore[arg-type] def to_dict(self) -> dict[str, Any]: out_dict = super().to_dict() @@ -367,7 +369,9 @@ def codec_pipeline(self) -> CodecPipeline: ) def to_buffer_dict(self) -> dict[str, Buffer]: - def _json_convert(o): + def _json_convert( + o: np.dtype[Any], + ) -> str | list[tuple[str, str] | tuple[str, str, tuple[int, ...]]]: if isinstance(o, np.dtype): if o.fields is None: return o.str @@ -399,7 +403,7 @@ def to_dict(self) -> JSON: zarray_dict["chunks"] = self.chunk_grid.chunk_shape _ = zarray_dict.pop("data_type") - zarray_dict["dtype"] = self.data_type + zarray_dict["dtype"] = self.data_type.str return zarray_dict @@ -422,7 +426,7 @@ def update_attributes(self, attributes: dict[str, JSON]) -> Self: return replace(self, attributes=attributes) -def parse_dimension_names(data: Any) -> tuple[str, ...] | None: +def parse_dimension_names(data: None | Iterable[str]) -> tuple[str, ...] | None: if data is None: return data if isinstance(data, Iterable) and all([isinstance(x, str) for x in data]): @@ -432,12 +436,11 @@ def parse_dimension_names(data: Any) -> tuple[str, ...] | None: # todo: real validation -def parse_attributes(data: Any) -> dict[str, JSON]: +def parse_attributes(data: None | dict[str, JSON]) -> dict[str, JSON]: if data is None: return {} - data_json = cast(dict[str, JSON], data) - return data_json + return data # todo: move to its own module and drop _v3 suffix From 0c513fc7818965d0945e1e4c2d89fc6b4d6e511a Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Tue, 21 May 2024 10:56:29 +0200 Subject: [PATCH 0543/1078] Followup on codecs (#1889) --- src/zarr/abc/codec.py | 78 ++++++++++++++++++--- src/zarr/codecs/_v2.py | 30 ++++---- src/zarr/codecs/blosc.py | 10 +-- src/zarr/codecs/bytes.py | 10 +-- src/zarr/codecs/crc32c_.py | 8 +-- src/zarr/codecs/gzip.py | 8 +-- src/zarr/codecs/mixins.py | 130 ----------------------------------- src/zarr/codecs/pipeline.py | 4 +- src/zarr/codecs/sharding.py | 31 +++++---- src/zarr/codecs/transpose.py | 10 +-- src/zarr/codecs/zstd.py | 8 +-- src/zarr/metadata.py | 2 +- 12 files changed, 128 insertions(+), 201 deletions(-) delete mode 100644 src/zarr/codecs/mixins.py diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index d8d7edf547..028d1757ce 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -1,12 +1,14 @@ from __future__ import annotations from abc import abstractmethod -from collections.abc import Iterable +from collections.abc import Awaitable, Callable, Iterable from typing import TYPE_CHECKING, Generic, TypeVar from zarr.abc.metadata import Metadata from zarr.abc.store import ByteGetter, ByteSetter from zarr.buffer import Buffer, NDBuffer +from zarr.common import concurrent_map +from zarr.config import config if TYPE_CHECKING: from typing_extensions import Self @@ -59,7 +61,7 @@ def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: """ return chunk_spec - def evolve(self, array_spec: ArraySpec) -> Self: + def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: """Fills in codec configuration parameters that can be automatically inferred from the array metadata. @@ -83,7 +85,9 @@ def validate(self, array_metadata: ArrayMetadata) -> None: """ ... - @abstractmethod + async def _decode_single(self, chunk_data: CodecOutput, chunk_spec: ArraySpec) -> CodecInput: + raise NotImplementedError + async def decode( self, chunks_and_specs: Iterable[tuple[CodecOutput | None, ArraySpec]], @@ -100,9 +104,13 @@ async def decode( ------- Iterable[CodecInput | None] """ - ... + return await batching_helper(self._decode_single, chunks_and_specs) + + async def _encode_single( + self, chunk_data: CodecInput, chunk_spec: ArraySpec + ) -> CodecOutput | None: + raise NotImplementedError - @abstractmethod async def encode( self, chunks_and_specs: Iterable[tuple[CodecInput | None, ArraySpec]], @@ -119,7 +127,7 @@ async def encode( ------- Iterable[CodecOutput | None] """ - ... + return await batching_helper(self._encode_single, chunks_and_specs) class ArrayArrayCodec(_Codec[NDBuffer, NDBuffer]): @@ -146,7 +154,11 @@ class BytesBytesCodec(_Codec[Buffer, Buffer]): class ArrayBytesCodecPartialDecodeMixin: """Mixin for array-to-bytes codecs that implement partial decoding.""" - @abstractmethod + async def _decode_partial_single( + self, byte_getter: ByteGetter, selection: SliceSelection, chunk_spec: ArraySpec + ) -> NDBuffer | None: + raise NotImplementedError + async def decode_partial( self, batch_info: Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]], @@ -167,13 +179,28 @@ async def decode_partial( ------- Iterable[NDBuffer | None] """ - ... + return await concurrent_map( + [ + (byte_getter, selection, chunk_spec) + for byte_getter, selection, chunk_spec in batch_info + ], + self._decode_partial_single, + config.get("async.concurrency"), + ) class ArrayBytesCodecPartialEncodeMixin: """Mixin for array-to-bytes codecs that implement partial encoding.""" - @abstractmethod + async def _encode_partial_single( + self, + byte_setter: ByteSetter, + chunk_array: NDBuffer, + selection: SliceSelection, + chunk_spec: ArraySpec, + ) -> None: + raise NotImplementedError + async def encode_partial( self, batch_info: Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]], @@ -192,7 +219,14 @@ async def encode_partial( The ByteSetter is used to write the necessary bytes and fetch bytes for existing chunk data. The chunk spec contains information about the chunk. """ - ... + await concurrent_map( + [ + (byte_setter, chunk_array, selection, chunk_spec) + for byte_setter, chunk_array, selection, chunk_spec in batch_info + ], + self._encode_partial_single, + config.get("async.concurrency"), + ) class CodecPipeline(Metadata): @@ -203,7 +237,7 @@ class CodecPipeline(Metadata): and writes them to a store (via ByteSetter).""" @abstractmethod - def evolve(self, array_spec: ArraySpec) -> Self: + def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: """Fills in codec configuration parameters that can be automatically inferred from the array metadata. @@ -347,3 +381,25 @@ async def write( value : NDBuffer """ ... + + +async def batching_helper( + func: Callable[[CodecInput, ArraySpec], Awaitable[CodecOutput | None]], + batch_info: Iterable[tuple[CodecInput | None, ArraySpec]], +) -> list[CodecOutput | None]: + return await concurrent_map( + [(chunk_array, chunk_spec) for chunk_array, chunk_spec in batch_info], + noop_for_none(func), + config.get("async.concurrency"), + ) + + +def noop_for_none( + func: Callable[[CodecInput, ArraySpec], Awaitable[CodecOutput | None]], +) -> Callable[[CodecInput | None, ArraySpec], Awaitable[CodecOutput | None]]: + async def wrap(chunk: CodecInput | None, chunk_spec: ArraySpec) -> CodecOutput | None: + if chunk is None: + return None + return await func(chunk, chunk_spec) + + return wrap diff --git a/src/zarr/codecs/_v2.py b/src/zarr/codecs/_v2.py index 06bd866c0f..ceb3de0a06 100644 --- a/src/zarr/codecs/_v2.py +++ b/src/zarr/codecs/_v2.py @@ -5,18 +5,18 @@ import numcodecs from numcodecs.compat import ensure_bytes, ensure_ndarray +from zarr.abc.codec import ArrayArrayCodec, ArrayBytesCodec from zarr.buffer import Buffer, NDBuffer -from zarr.codecs.mixins import ArrayArrayCodecBatchMixin, ArrayBytesCodecBatchMixin from zarr.common import JSON, ArraySpec, to_thread @dataclass(frozen=True) -class V2Compressor(ArrayBytesCodecBatchMixin): +class V2Compressor(ArrayBytesCodec): compressor: dict[str, JSON] | None is_fixed_size = False - async def decode_single( + async def _decode_single( self, chunk_bytes: Buffer, chunk_spec: ArraySpec, @@ -38,7 +38,7 @@ async def decode_single( return NDBuffer.from_numpy_array(chunk_numpy_array) - async def encode_single( + async def _encode_single( self, chunk_array: NDBuffer, _chunk_spec: ArraySpec, @@ -64,44 +64,44 @@ def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) @dataclass(frozen=True) -class V2Filters(ArrayArrayCodecBatchMixin): +class V2Filters(ArrayArrayCodec): filters: list[dict[str, JSON]] is_fixed_size = False - async def decode_single( + async def _decode_single( self, chunk_array: NDBuffer, chunk_spec: ArraySpec, ) -> NDBuffer: - chunk_numpy_array = chunk_array.as_numpy_array() + chunk_ndarray = chunk_array.as_ndarray_like() # apply filters in reverse order if self.filters is not None: for filter_metadata in self.filters[::-1]: filter = numcodecs.get_codec(filter_metadata) - chunk_numpy_array = await to_thread(filter.decode, chunk_numpy_array) + chunk_ndarray = await to_thread(filter.decode, chunk_ndarray) # ensure correct chunk shape - if chunk_numpy_array.shape != chunk_spec.shape: - chunk_numpy_array = chunk_numpy_array.reshape( + if chunk_ndarray.shape != chunk_spec.shape: + chunk_ndarray = chunk_ndarray.reshape( chunk_spec.shape, order=chunk_spec.order, ) - return NDBuffer.from_numpy_array(chunk_numpy_array) + return NDBuffer.from_ndarray_like(chunk_ndarray) - async def encode_single( + async def _encode_single( self, chunk_array: NDBuffer, chunk_spec: ArraySpec, ) -> NDBuffer | None: - chunk_numpy_array = chunk_array.as_numpy_array().ravel(order=chunk_spec.order) + chunk_ndarray = chunk_array.as_ndarray_like().ravel(order=chunk_spec.order) for filter_metadata in self.filters: filter = numcodecs.get_codec(filter_metadata) - chunk_numpy_array = await to_thread(filter.encode, chunk_numpy_array) + chunk_ndarray = await to_thread(filter.encode, chunk_ndarray) - return NDBuffer.from_numpy_array(chunk_numpy_array) + return NDBuffer.from_ndarray_like(chunk_ndarray) def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: raise NotImplementedError diff --git a/src/zarr/codecs/blosc.py b/src/zarr/codecs/blosc.py index 24fac962db..e8921b8beb 100644 --- a/src/zarr/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -8,8 +8,8 @@ import numcodecs from numcodecs.blosc import Blosc +from zarr.abc.codec import BytesBytesCodec from zarr.buffer import Buffer, as_numpy_array_wrapper -from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration, to_thread @@ -74,7 +74,7 @@ def parse_blocksize(data: JSON) -> int: @dataclass(frozen=True) -class BloscCodec(BytesBytesCodecBatchMixin): +class BloscCodec(BytesBytesCodec): is_fixed_size = False typesize: int @@ -125,7 +125,7 @@ def to_dict(self) -> dict[str, JSON]: }, } - def evolve(self, array_spec: ArraySpec) -> Self: + def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: new_codec = self if new_codec.typesize is None: new_codec = replace(new_codec, typesize=array_spec.dtype.itemsize) @@ -158,14 +158,14 @@ def _blosc_codec(self) -> Blosc: } return Blosc.from_config(config_dict) - async def decode_single( + async def _decode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, ) -> Buffer: return await to_thread(as_numpy_array_wrapper, self._blosc_codec.decode, chunk_bytes) - async def encode_single( + async def _encode_single( self, chunk_bytes: Buffer, chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index a6045852e6..2581157690 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -7,8 +7,8 @@ import numpy as np +from zarr.abc.codec import ArrayBytesCodec from zarr.buffer import Buffer, NDBuffer -from zarr.codecs.mixins import ArrayBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration @@ -27,7 +27,7 @@ class Endian(Enum): @dataclass(frozen=True) -class BytesCodec(ArrayBytesCodecBatchMixin): +class BytesCodec(ArrayBytesCodec): is_fixed_size = True endian: Endian | None @@ -51,7 +51,7 @@ def to_dict(self) -> dict[str, JSON]: else: return {"name": "bytes", "configuration": {"endian": self.endian}} - def evolve(self, array_spec: ArraySpec) -> Self: + def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: if array_spec.dtype.itemsize == 0: if self.endian is not None: return replace(self, endian=None) @@ -61,7 +61,7 @@ def evolve(self, array_spec: ArraySpec) -> Self: ) return self - async def decode_single( + async def _decode_single( self, chunk_bytes: Buffer, chunk_spec: ArraySpec, @@ -84,7 +84,7 @@ async def decode_single( ) return chunk_array - async def encode_single( + async def _encode_single( self, chunk_array: NDBuffer, _chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py index d4fd80064f..724b785d67 100644 --- a/src/zarr/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -6,8 +6,8 @@ import numpy as np from crc32c import crc32c +from zarr.abc.codec import BytesBytesCodec from zarr.buffer import Buffer -from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration @@ -18,7 +18,7 @@ @dataclass(frozen=True) -class Crc32cCodec(BytesBytesCodecBatchMixin): +class Crc32cCodec(BytesBytesCodec): is_fixed_size = True @classmethod @@ -29,7 +29,7 @@ def from_dict(cls, data: dict[str, JSON]) -> Self: def to_dict(self) -> dict[str, JSON]: return {"name": "crc32c"} - async def decode_single( + async def _decode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, @@ -46,7 +46,7 @@ async def decode_single( ) return Buffer.from_array_like(inner_bytes) - async def encode_single( + async def _encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/gzip.py b/src/zarr/codecs/gzip.py index 58c1fc6fec..6a8aaf08bb 100644 --- a/src/zarr/codecs/gzip.py +++ b/src/zarr/codecs/gzip.py @@ -5,8 +5,8 @@ from numcodecs.gzip import GZip +from zarr.abc.codec import BytesBytesCodec from zarr.buffer import Buffer, as_numpy_array_wrapper -from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration, to_thread @@ -27,7 +27,7 @@ def parse_gzip_level(data: JSON) -> int: @dataclass(frozen=True) -class GzipCodec(BytesBytesCodecBatchMixin): +class GzipCodec(BytesBytesCodec): is_fixed_size = False level: int = 5 @@ -45,14 +45,14 @@ def from_dict(cls, data: dict[str, JSON]) -> Self: def to_dict(self) -> dict[str, JSON]: return {"name": "gzip", "configuration": {"level": self.level}} - async def decode_single( + async def _decode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, ) -> Buffer: return await to_thread(as_numpy_array_wrapper, GZip(self.level).decode, chunk_bytes) - async def encode_single( + async def _encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/mixins.py b/src/zarr/codecs/mixins.py deleted file mode 100644 index b571fd35ee..0000000000 --- a/src/zarr/codecs/mixins.py +++ /dev/null @@ -1,130 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -from collections.abc import Awaitable, Callable, Iterable -from typing import Generic, TypeVar - -from zarr.abc.codec import ( - ArrayArrayCodec, - ArrayBytesCodec, - ArrayBytesCodecPartialDecodeMixin, - ArrayBytesCodecPartialEncodeMixin, - ByteGetter, - BytesBytesCodec, - ByteSetter, -) -from zarr.buffer import Buffer, NDBuffer -from zarr.common import ArraySpec, SliceSelection, concurrent_map -from zarr.config import config - -CodecInput = TypeVar("CodecInput", bound=NDBuffer | Buffer) -CodecOutput = TypeVar("CodecOutput", bound=NDBuffer | Buffer) - - -async def batching_helper( - func: Callable[[CodecInput, ArraySpec], Awaitable[CodecOutput | None]], - batch_info: Iterable[tuple[CodecInput | None, ArraySpec]], -) -> list[CodecOutput | None]: - return await concurrent_map( - [(chunk_array, chunk_spec) for chunk_array, chunk_spec in batch_info], - noop_for_none(func), - config.get("async.concurrency"), - ) - - -def noop_for_none( - func: Callable[[CodecInput, ArraySpec], Awaitable[CodecOutput | None]], -) -> Callable[[CodecInput | None, ArraySpec], Awaitable[CodecOutput | None]]: - async def wrap(chunk: CodecInput | None, chunk_spec: ArraySpec) -> CodecOutput | None: - if chunk is None: - return None - return await func(chunk, chunk_spec) - - return wrap - - -class CodecBatchMixin(Generic[CodecInput, CodecOutput]): - """The default interface from the Codec class expects batches of codecs. - However, many codec implementation operate on single codecs. - This mixin provides abstract methods for decode_single and encode_single and - implements batching through concurrent processing. - - Use ArrayArrayCodecBatchMixin, ArrayBytesCodecBatchMixin and BytesBytesCodecBatchMixin - for subclassing. - """ - - @abstractmethod - async def decode_single(self, chunk_data: CodecOutput, chunk_spec: ArraySpec) -> CodecInput: - pass - - async def decode( - self, chunk_data_and_specs: Iterable[tuple[CodecOutput | None, ArraySpec]] - ) -> Iterable[CodecInput | None]: - return await batching_helper(self.decode_single, chunk_data_and_specs) - - @abstractmethod - async def encode_single( - self, chunk_data: CodecInput, chunk_spec: ArraySpec - ) -> CodecOutput | None: - pass - - async def encode( - self, chunk_data_and_specs: Iterable[tuple[CodecInput | None, ArraySpec]] - ) -> Iterable[CodecOutput | None]: - return await batching_helper(self.encode_single, chunk_data_and_specs) - - -class ArrayArrayCodecBatchMixin(CodecBatchMixin[NDBuffer, NDBuffer], ArrayArrayCodec): - pass - - -class ArrayBytesCodecBatchMixin(CodecBatchMixin[NDBuffer, Buffer], ArrayBytesCodec): - pass - - -class BytesBytesCodecBatchMixin(CodecBatchMixin[Buffer, Buffer], BytesBytesCodec): - pass - - -class ArrayBytesCodecPartialDecodeBatchMixin(ArrayBytesCodecPartialDecodeMixin): - @abstractmethod - async def decode_partial_single( - self, byte_getter: ByteGetter, selection: SliceSelection, chunk_spec: ArraySpec - ) -> NDBuffer | None: - pass - - async def decode_partial( - self, batch_info: Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]] - ) -> Iterable[NDBuffer | None]: - return await concurrent_map( - [ - (byte_getter, selection, chunk_spec) - for byte_getter, selection, chunk_spec in batch_info - ], - self.decode_partial_single, - config.get("async.concurrency"), - ) - - -class ArrayBytesCodecPartialEncodeBatchMixin(ArrayBytesCodecPartialEncodeMixin): - @abstractmethod - async def encode_partial_single( - self, - byte_setter: ByteSetter, - chunk_array: NDBuffer, - selection: SliceSelection, - chunk_spec: ArraySpec, - ) -> None: - pass - - async def encode_partial( - self, batch_info: Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]] - ) -> None: - await concurrent_map( - [ - (byte_setter, chunk_array, selection, chunk_spec) - for byte_setter, chunk_array, selection, chunk_spec in batch_info - ], - self.encode_partial_single, - config.get("async.concurrency"), - ) diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index 56e73a4b29..893cbc8b4b 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -87,8 +87,8 @@ def from_dict(cls, data: Iterable[JSON | Codec], *, batch_size: int | None = Non def to_dict(self) -> JSON: return [c.to_dict() for c in self] - def evolve(self, array_spec: ArraySpec) -> Self: - return type(self).from_list([c.evolve(array_spec) for c in self]) + def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: + return type(self).from_list([c.evolve_from_array_spec(array_spec) for c in self]) @staticmethod def codecs_from_list( diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index 563f216148..11035f1f22 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -9,16 +9,19 @@ import numpy as np -from zarr.abc.codec import ByteGetter, ByteSetter, Codec, CodecPipeline +from zarr.abc.codec import ( + ArrayBytesCodec, + ArrayBytesCodecPartialDecodeMixin, + ArrayBytesCodecPartialEncodeMixin, + ByteGetter, + ByteSetter, + Codec, + CodecPipeline, +) from zarr.buffer import Buffer, NDBuffer from zarr.chunk_grids import RegularChunkGrid from zarr.codecs.bytes import BytesCodec from zarr.codecs.crc32c_ import Crc32cCodec -from zarr.codecs.mixins import ( - ArrayBytesCodecBatchMixin, - ArrayBytesCodecPartialDecodeBatchMixin, - ArrayBytesCodecPartialEncodeBatchMixin, -) from zarr.codecs.pipeline import BatchedCodecPipeline from zarr.codecs.registry import register_codec from zarr.common import ( @@ -291,9 +294,7 @@ async def finalize( @dataclass(frozen=True) class ShardingCodec( - ArrayBytesCodecBatchMixin, - ArrayBytesCodecPartialDecodeBatchMixin, - ArrayBytesCodecPartialEncodeBatchMixin, + ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin ): chunk_shape: ChunkCoords codecs: CodecPipeline @@ -351,9 +352,9 @@ def to_dict(self) -> dict[str, JSON]: }, } - def evolve(self, array_spec: ArraySpec) -> Self: + def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: shard_spec = self._get_chunk_spec(array_spec) - evolved_codecs = self.codecs.evolve(shard_spec) + evolved_codecs = self.codecs.evolve_from_array_spec(shard_spec) if evolved_codecs != self.codecs: return replace(self, codecs=evolved_codecs) return self @@ -377,7 +378,7 @@ def validate(self, array_metadata: ArrayMetadata) -> None: "The array's `chunk_shape` needs to be divisible by the shard's inner `chunk_shape`." ) - async def decode_single( + async def _decode_single( self, shard_bytes: Buffer, shard_spec: ArraySpec, @@ -419,7 +420,7 @@ async def decode_single( return out - async def decode_partial_single( + async def _decode_partial_single( self, byte_getter: ByteGetter, selection: SliceSelection, @@ -480,7 +481,7 @@ async def decode_partial_single( ) return out - async def encode_single( + async def _encode_single( self, shard_array: NDBuffer, shard_spec: ArraySpec, @@ -515,7 +516,7 @@ async def encode_single( return await shard_builder.finalize(self.index_location, self._encode_shard_index) - async def encode_partial_single( + async def _encode_partial_single( self, byte_setter: ByteSetter, shard_array: NDBuffer, diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index b20a36fe98..373a27cab9 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -4,8 +4,8 @@ from dataclasses import dataclass, replace from typing import TYPE_CHECKING, cast +from zarr.abc.codec import ArrayArrayCodec from zarr.buffer import NDBuffer -from zarr.codecs.mixins import ArrayArrayCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration @@ -24,7 +24,7 @@ def parse_transpose_order(data: JSON | Iterable[int]) -> tuple[int, ...]: @dataclass(frozen=True) -class TransposeCodec(ArrayArrayCodecBatchMixin): +class TransposeCodec(ArrayArrayCodec): is_fixed_size = True order: tuple[int, ...] @@ -42,7 +42,7 @@ def from_dict(cls, data: dict[str, JSON]) -> Self: def to_dict(self) -> dict[str, JSON]: return {"name": "transpose", "configuration": {"order": list(self.order)}} - def evolve(self, array_spec: ArraySpec) -> Self: + def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: if len(self.order) != array_spec.ndim: raise ValueError( f"The `order` tuple needs have as many entries as there are dimensions in the array. Got {self.order}." @@ -71,7 +71,7 @@ def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: order=chunk_spec.order, ) - async def decode_single( + async def _decode_single( self, chunk_array: NDBuffer, chunk_spec: ArraySpec, @@ -82,7 +82,7 @@ async def decode_single( chunk_array = chunk_array.transpose(inverse_order) return chunk_array - async def encode_single( + async def _encode_single( self, chunk_array: NDBuffer, chunk_spec: ArraySpec, diff --git a/src/zarr/codecs/zstd.py b/src/zarr/codecs/zstd.py index 3c6aac4ce3..d53199d0a8 100644 --- a/src/zarr/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -5,8 +5,8 @@ from zstandard import ZstdCompressor, ZstdDecompressor +from zarr.abc.codec import BytesBytesCodec from zarr.buffer import Buffer, as_numpy_array_wrapper -from zarr.codecs.mixins import BytesBytesCodecBatchMixin from zarr.codecs.registry import register_codec from zarr.common import parse_named_configuration, to_thread @@ -31,7 +31,7 @@ def parse_checksum(data: JSON) -> bool: @dataclass(frozen=True) -class ZstdCodec(BytesBytesCodecBatchMixin): +class ZstdCodec(BytesBytesCodec): is_fixed_size = True level: int = 0 @@ -60,14 +60,14 @@ def _decompress(self, data: bytes) -> bytes: ctx = ZstdDecompressor() return ctx.decompress(data) - async def decode_single( + async def _decode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, ) -> Buffer: return await to_thread(as_numpy_array_wrapper, self._decompress, chunk_bytes) - async def encode_single( + async def _encode_single( self, chunk_bytes: Buffer, _chunk_spec: ArraySpec, diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index e2b7b987c0..58cc276c29 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -195,7 +195,7 @@ def __init__( fill_value=fill_value_parsed, order="C", # TODO: order is not needed here. ) - codecs_parsed = parse_codecs(codecs).evolve(array_spec) + codecs_parsed = parse_codecs(codecs).evolve_from_array_spec(array_spec) object.__setattr__(self, "shape", shape_parsed) object.__setattr__(self, "data_type", data_type_parsed) From c2e03c3bd66bf4038c15e5c22d8a0c52abe16cc2 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Wed, 22 May 2024 13:59:21 +0100 Subject: [PATCH 0544/1078] Add numpy to mypy pre-commit check env (#1893) * Add numpy to mypy pre-commit check env * fixes for zstd * Ignore errors in zarr.buffer --------- Co-authored-by: Norman Rzepka --- .pre-commit-config.yaml | 2 +- pyproject.toml | 1 + src/zarr/array.py | 4 ++-- src/zarr/buffer.py | 27 ++++++++++++++------------- src/zarr/codecs/bytes.py | 4 +++- src/zarr/codecs/sharding.py | 5 +++-- src/zarr/codecs/zstd.py | 7 ++++--- src/zarr/store/local.py | 4 ++-- 8 files changed, 30 insertions(+), 24 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 70812439ca..ffa3c94efa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,8 +26,8 @@ repos: hooks: - id: mypy files: src - args: [] additional_dependencies: - types-redis - types-setuptools - pytest + - numpy diff --git a/pyproject.toml b/pyproject.toml index 62a81144ba..947bec9369 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -230,6 +230,7 @@ module = [ "zarr.v2.*", "zarr.array_v2", "zarr.array", + "zarr.buffer" ] disallow_untyped_calls = false diff --git a/src/zarr/array.py b/src/zarr/array.py index 86ff262940..2828e25119 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -419,7 +419,7 @@ async def setitem( # We accept any ndarray like object from the user and convert it # to a NDBuffer (or subclass). From this point onwards, we only pass # Buffer and NDBuffer between components. - value = factory(value) + value_buffer = factory(value) # merging with existing data and encoding chunks await self.metadata.codec_pipeline.write( @@ -432,7 +432,7 @@ async def setitem( ) for chunk_coords, chunk_selection, out_selection in indexer ], - value, + value_buffer, ) async def resize( diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index 84bf6b0bb0..e9aa1120f8 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -11,6 +11,7 @@ ) import numpy as np +import numpy.typing as npt if TYPE_CHECKING: from typing_extensions import Self @@ -20,8 +21,8 @@ # TODO: create a protocol for the attributes we need, for now we alias Numpy's ndarray # both for the array-like and ndarray-like -ArrayLike: TypeAlias = np.ndarray -NDArrayLike: TypeAlias = np.ndarray +ArrayLike: TypeAlias = npt.NDArray[Any] +NDArrayLike: TypeAlias = npt.NDArray[Any] def check_item_key_is_1d_contiguous(key: Any) -> None: @@ -40,7 +41,7 @@ def __call__( self, *, shape: Iterable[int], - dtype: np.DTypeLike, + dtype: npt.DTypeLike, order: Literal["C", "F"], fill_value: Any | None, ) -> NDBuffer: @@ -163,7 +164,7 @@ def as_array_like(self) -> NDArrayLike: """ return self._data - def as_nd_buffer(self, *, dtype: np.DTypeLike) -> NDBuffer: + def as_nd_buffer(self, *, dtype: npt.DTypeLike) -> NDBuffer: """Create a new NDBuffer from this one. This will never copy data. @@ -179,7 +180,7 @@ def as_nd_buffer(self, *, dtype: np.DTypeLike) -> NDBuffer: """ return NDBuffer.from_ndarray_like(self._data.view(dtype=dtype)) - def as_numpy_array(self) -> np.ndarray: + def as_numpy_array(self) -> npt.NDArray[Any]: """Return the buffer as a NumPy array (host memory). Warning @@ -271,7 +272,7 @@ def create( cls, *, shape: Iterable[int], - dtype: np.DTypeLike, + dtype: npt.DTypeLike, order: Literal["C", "F"] = "C", fill_value: Any | None = None, ) -> Self: @@ -298,7 +299,7 @@ def create( A subclass can overwrite this method to create a ndarray-like object other then the default Numpy array. """ - ret = cls(np.empty(shape=shape, dtype=dtype, order=order)) + ret = cls(np.empty(shape=tuple(shape), dtype=dtype, order=order)) if fill_value is not None: ret.fill(fill_value) return ret @@ -319,7 +320,7 @@ def from_ndarray_like(cls, ndarray_like: NDArrayLike) -> Self: return cls(ndarray_like) @classmethod - def from_numpy_array(cls, array_like: np.ArrayLike) -> Self: + def from_numpy_array(cls, array_like: npt.ArrayLike) -> Self: """Create a new buffer of Numpy array-like object Parameters @@ -360,7 +361,7 @@ def as_buffer(self) -> Buffer: data = np.ascontiguousarray(self._data) return Buffer(data.reshape(-1).view(dtype="b")) # Flatten the array without copy - def as_numpy_array(self) -> np.ndarray: + def as_numpy_array(self) -> npt.NDArray[Any]: """Return the buffer as a NumPy array (host memory). Warning @@ -393,9 +394,9 @@ def byteorder(self) -> Endian: return Endian(sys.byteorder) def reshape(self, newshape: Iterable[int]) -> Self: - return self.__class__(self._data.reshape(newshape)) + return self.__class__(self._data.reshape(tuple(newshape))) - def astype(self, dtype: np.DTypeLike, order: Literal["K", "A", "C", "F"] = "K") -> Self: + def astype(self, dtype: npt.DTypeLike, order: Literal["K", "A", "C", "F"] = "K") -> Self: return self.__class__(self._data.astype(dtype=dtype, order=order)) def __getitem__(self, key: Any) -> Self: @@ -418,11 +419,11 @@ def fill(self, value: Any) -> None: def copy(self) -> Self: return self.__class__(self._data.copy()) - def transpose(self, *axes: np.SupportsIndex) -> Self: + def transpose(self, *axes: np.SupportsIndex) -> Self: # type: ignore[name-defined] return self.__class__(self._data.transpose(*axes)) -def as_numpy_array_wrapper(func: Callable[[np.ndarray], bytes], buf: Buffer) -> Buffer: +def as_numpy_array_wrapper(func: Callable[[npt.NDArray[Any]], bytes], buf: Buffer) -> Buffer: """Converts the input of `func` to a numpy array and the output back to `Buffer`. This function is useful when calling a `func` that only support host memory such diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index 2581157690..aebaf94e76 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -92,7 +92,9 @@ async def _encode_single( assert isinstance(chunk_array, NDBuffer) if chunk_array.dtype.itemsize > 1: if self.endian is not None and self.endian != chunk_array.byteorder: - new_dtype = chunk_array.dtype.newbyteorder(self.endian.name) + # type-ignore is a numpy bug + # see https://github.com/numpy/numpy/issues/26473 + new_dtype = chunk_array.dtype.newbyteorder(self.endian.name) # type: ignore[arg-type] chunk_array = chunk_array.astype(new_dtype) return chunk_array.as_buffer() diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index 11035f1f22..a68577be68 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING, NamedTuple import numpy as np +import numpy.typing as npt from zarr.abc.codec import ( ArrayBytesCodec, @@ -85,7 +86,7 @@ async def delete(self) -> None: class _ShardIndex(NamedTuple): # dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2) - offsets_and_lengths: np.ndarray + offsets_and_lengths: npt.NDArray[np.uint64] @property def chunks_per_shard(self) -> ChunkCoords: @@ -100,7 +101,7 @@ def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords: def is_all_empty(self) -> bool: return bool(np.array_equiv(self.offsets_and_lengths, MAX_UINT_64)) - def get_full_chunk_map(self) -> np.ndarray: + def get_full_chunk_map(self) -> npt.NDArray[np.bool_]: return self.offsets_and_lengths[..., 0] != MAX_UINT_64 def get_chunk_slice(self, chunk_coords: ChunkCoords) -> tuple[int, int] | None: diff --git a/src/zarr/codecs/zstd.py b/src/zarr/codecs/zstd.py index d53199d0a8..76e625ad6a 100644 --- a/src/zarr/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -1,8 +1,9 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any +import numpy.typing as npt from zstandard import ZstdCompressor, ZstdDecompressor from zarr.abc.codec import BytesBytesCodec @@ -52,11 +53,11 @@ def from_dict(cls, data: dict[str, JSON]) -> Self: def to_dict(self) -> dict[str, JSON]: return {"name": "zstd", "configuration": {"level": self.level, "checksum": self.checksum}} - def _compress(self, data: bytes) -> bytes: + def _compress(self, data: npt.NDArray[Any]) -> bytes: ctx = ZstdCompressor(level=self.level, write_checksum=self.checksum) return ctx.compress(data) - def _decompress(self, data: bytes) -> bytes: + def _decompress(self, data: npt.NDArray[Any]) -> bytes: ctx = ZstdDecompressor() return ctx.decompress(data) diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index 60d0022f94..64eb8632b9 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -58,10 +58,10 @@ def _put( if start is not None: with path.open("r+b") as f: f.seek(start) - f.write(value.as_numpy_array()) + f.write(value.as_numpy_array().tobytes()) return None else: - return path.write_bytes(value.as_numpy_array()) + return path.write_bytes(value.as_numpy_array().tobytes()) class LocalStore(Store): From 4da9505ce657513b7a092bae25407ebf0e476775 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Wed, 22 May 2024 16:17:45 +0200 Subject: [PATCH 0545/1078] remove fixture files from src (#1897) --- src/zarr/fixture/.zgroup | 3 --- src/zarr/fixture/flat/.zarray | 23 ----------------------- src/zarr/fixture/flat/0.0 | Bin 48 -> 0 bytes src/zarr/fixture/flat_legacy/.zarray | 22 ---------------------- src/zarr/fixture/flat_legacy/0.0 | Bin 48 -> 0 bytes src/zarr/fixture/meta/.zarray | 23 ----------------------- src/zarr/fixture/meta/0.0 | Bin 48 -> 0 bytes src/zarr/fixture/nested/.zarray | 23 ----------------------- src/zarr/fixture/nested/0/0 | Bin 48 -> 0 bytes src/zarr/fixture/nested_legacy/.zarray | 23 ----------------------- src/zarr/fixture/nested_legacy/0/0 | Bin 48 -> 0 bytes 11 files changed, 117 deletions(-) delete mode 100644 src/zarr/fixture/.zgroup delete mode 100644 src/zarr/fixture/flat/.zarray delete mode 100644 src/zarr/fixture/flat/0.0 delete mode 100644 src/zarr/fixture/flat_legacy/.zarray delete mode 100644 src/zarr/fixture/flat_legacy/0.0 delete mode 100644 src/zarr/fixture/meta/.zarray delete mode 100644 src/zarr/fixture/meta/0.0 delete mode 100644 src/zarr/fixture/nested/.zarray delete mode 100644 src/zarr/fixture/nested/0/0 delete mode 100644 src/zarr/fixture/nested_legacy/.zarray delete mode 100644 src/zarr/fixture/nested_legacy/0/0 diff --git a/src/zarr/fixture/.zgroup b/src/zarr/fixture/.zgroup deleted file mode 100644 index 3b7daf227c..0000000000 --- a/src/zarr/fixture/.zgroup +++ /dev/null @@ -1,3 +0,0 @@ -{ - "zarr_format": 2 -} \ No newline at end of file diff --git a/src/zarr/fixture/flat/.zarray b/src/zarr/fixture/flat/.zarray deleted file mode 100644 index d1acce7665..0000000000 --- a/src/zarr/fixture/flat/.zarray +++ /dev/null @@ -1,23 +0,0 @@ -{ - "chunks": [ - 2, - 2 - ], - "compressor": { - "blocksize": 0, - "clevel": 5, - "cname": "lz4", - "id": "blosc", - "shuffle": 1 - }, - "dimension_separator": ".", - "dtype": " Date: Wed, 22 May 2024 17:09:18 +0200 Subject: [PATCH 0546/1078] Protocols for `Buffer` and `NDBuffer` (#1899) --- src/zarr/array.py | 4 +- src/zarr/buffer.py | 135 ++++++++++++++++++++++---------------- src/zarr/codecs/bytes.py | 16 ++++- src/zarr/testing/store.py | 8 ++- src/zarr/testing/utils.py | 18 +++++ tests/v3/conftest.py | 9 +++ tests/v3/test_buffer.py | 15 +++-- tests/v3/test_codecs.py | 23 +++---- tests/v3/test_store.py | 5 +- 9 files changed, 151 insertions(+), 82 deletions(-) create mode 100644 src/zarr/testing/utils.py diff --git a/src/zarr/array.py b/src/zarr/array.py index 2828e25119..7da39c285e 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -582,12 +582,12 @@ def store_path(self) -> StorePath: def order(self) -> Literal["C", "F"]: return self._async_array.order - def __getitem__(self, selection: Selection) -> npt.NDArray[Any]: + def __getitem__(self, selection: Selection) -> NDArrayLike: return sync( self._async_array.getitem(selection), ) - def __setitem__(self, selection: Selection, value: npt.NDArray[Any]) -> None: + def __setitem__(self, selection: Selection, value: NDArrayLike) -> None: sync( self._async_array.setitem(selection, value), ) diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index e9aa1120f8..0f055093c1 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -1,28 +1,94 @@ from __future__ import annotations import sys -from collections.abc import Callable, Iterable +from collections.abc import Callable, Iterable, Sequence from typing import ( TYPE_CHECKING, Any, Literal, Protocol, - TypeAlias, + SupportsIndex, + runtime_checkable, ) import numpy as np import numpy.typing as npt +from zarr.common import ChunkCoords + if TYPE_CHECKING: from typing_extensions import Self from zarr.codecs.bytes import Endian from zarr.common import BytesLike -# TODO: create a protocol for the attributes we need, for now we alias Numpy's ndarray -# both for the array-like and ndarray-like -ArrayLike: TypeAlias = npt.NDArray[Any] -NDArrayLike: TypeAlias = npt.NDArray[Any] + +@runtime_checkable +class ArrayLike(Protocol): + """Protocol for the array-like type that underlie Buffer""" + + @property + def dtype(self) -> np.dtype[Any]: ... + + @property + def ndim(self) -> int: ... + + @property + def size(self) -> int: ... + + def __getitem__(self, key: slice) -> Self: ... + + def __setitem__(self, key: slice, value: Any) -> None: ... + + +@runtime_checkable +class NDArrayLike(Protocol): + """Protocol for the nd-array-like type that underlie NDBuffer""" + + @property + def dtype(self) -> np.dtype[Any]: ... + + @property + def ndim(self) -> int: ... + + @property + def size(self) -> int: ... + + @property + def shape(self) -> ChunkCoords: ... + + def __len__(self) -> int: ... + + def __getitem__(self, key: slice) -> Self: ... + + def __setitem__(self, key: slice, value: Any) -> None: ... + + def reshape(self, shape: ChunkCoords, *, order: Literal["A", "C", "F"] = ...) -> Self: ... + + def view(self, dtype: npt.DTypeLike) -> Self: ... + + def astype(self, dtype: npt.DTypeLike, order: Literal["K", "A", "C", "F"] = ...) -> Self: ... + + def fill(self, value: Any) -> None: ... + + def copy(self) -> Self: ... + + def transpose(self, axes: SupportsIndex | Sequence[SupportsIndex] | None) -> Self: ... + + def ravel(self, order: Literal["K", "A", "C", "F"] = "C") -> Self: ... + + def all(self) -> bool: ... + + def __eq__(self, other: Any) -> Self: # type: ignore + """Element-wise equal + + Notice + ------ + Type checkers such as mypy complains because the return type isn't a bool like + its supertype "object", which violates the Liskov substitution principle. + This is true, but since NumPy's ndarray is defined as an element-wise equal, + our hands are tied. + """ def check_item_key_is_1d_contiguous(key: Any) -> None: @@ -124,7 +190,7 @@ def create_zero_length(cls) -> Self: return cls(np.array([], dtype="b")) @classmethod - def from_array_like(cls, array_like: NDArrayLike) -> Self: + def from_array_like(cls, array_like: ArrayLike) -> Self: """Create a new buffer of a array-like object Parameters @@ -153,7 +219,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self: """ return cls.from_array_like(np.frombuffer(bytes_like, dtype="b")) - def as_array_like(self) -> NDArrayLike: + def as_array_like(self) -> ArrayLike: """Return the underlying array (host or device memory) of this buffer This will never copy data. @@ -164,22 +230,6 @@ def as_array_like(self) -> NDArrayLike: """ return self._data - def as_nd_buffer(self, *, dtype: npt.DTypeLike) -> NDBuffer: - """Create a new NDBuffer from this one. - - This will never copy data. - - Parameters - ---------- - dtype - The datatype of the returned buffer (reinterpretation of the bytes) - - Return - ------ - New NDbuffer representing `self.as_array_like()` - """ - return NDBuffer.from_ndarray_like(self._data.view(dtype=dtype)) - def as_numpy_array(self) -> npt.NDArray[Any]: """Return the buffer as a NumPy array (host memory). @@ -223,17 +273,8 @@ def __add__(self, other: Buffer) -> Self: other_array = other.as_array_like() assert other_array.dtype == np.dtype("b") - return self.__class__(np.concatenate((self._data, other_array))) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, bytes | bytearray): - # Many of the tests compares `Buffer` with `bytes` so we - # convert the bytes to a Buffer and try again - return self == self.from_bytes(other) - if isinstance(other, Buffer): - return (self._data == other.as_array_like()).all() - raise ValueError( - f"equal operator not supported between {self.__class__} and {other.__class__}" + return self.__class__( + np.concatenate((np.asanyarray(self._data), np.asanyarray(other_array))) ) @@ -345,22 +386,6 @@ def as_ndarray_like(self) -> NDArrayLike: """ return self._data - def as_buffer(self) -> Buffer: - """Create a new Buffer from this one. - - Warning - ------- - Copies data if the buffer is non-contiguous. - - Return - ------ - The new buffer (might be data copy) - """ - data = self._data - if not self._data.flags.contiguous: - data = np.ascontiguousarray(self._data) - return Buffer(data.reshape(-1).view(dtype="b")) # Flatten the array without copy - def as_numpy_array(self) -> npt.NDArray[Any]: """Return the buffer as a NumPy array (host memory). @@ -393,8 +418,8 @@ def byteorder(self) -> Endian: else: return Endian(sys.byteorder) - def reshape(self, newshape: Iterable[int]) -> Self: - return self.__class__(self._data.reshape(tuple(newshape))) + def reshape(self, newshape: ChunkCoords) -> Self: + return self.__class__(self._data.reshape(newshape)) def astype(self, dtype: npt.DTypeLike, order: Literal["K", "A", "C", "F"] = "K") -> Self: return self.__class__(self._data.astype(dtype=dtype, order=order)) @@ -419,8 +444,8 @@ def fill(self, value: Any) -> None: def copy(self) -> Self: return self.__class__(self._data.copy()) - def transpose(self, *axes: np.SupportsIndex) -> Self: # type: ignore[name-defined] - return self.__class__(self._data.transpose(*axes)) + def transpose(self, axes: SupportsIndex | Sequence[SupportsIndex] | None) -> Self: + return self.__class__(self._data.transpose(axes)) def as_numpy_array_wrapper(func: Callable[[npt.NDArray[Any]], bytes], buf: Buffer) -> Buffer: diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index aebaf94e76..f275ae37d1 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -8,7 +8,7 @@ import numpy as np from zarr.abc.codec import ArrayBytesCodec -from zarr.buffer import Buffer, NDBuffer +from zarr.buffer import Buffer, NDArrayLike, NDBuffer from zarr.codecs.registry import register_codec from zarr.common import parse_enum, parse_named_configuration @@ -75,7 +75,13 @@ async def _decode_single( dtype = np.dtype(f"{prefix}{chunk_spec.dtype.str[1:]}") else: dtype = np.dtype(f"|{chunk_spec.dtype.str[1:]}") - chunk_array = chunk_bytes.as_nd_buffer(dtype=dtype) + + as_array_like = chunk_bytes.as_array_like() + if isinstance(as_array_like, NDArrayLike): + as_nd_array_like = as_array_like + else: + as_nd_array_like = np.asanyarray(as_array_like) + chunk_array = NDBuffer.from_ndarray_like(as_nd_array_like.view(dtype=dtype)) # ensure correct chunk shape if chunk_array.shape != chunk_spec.shape: @@ -96,7 +102,11 @@ async def _encode_single( # see https://github.com/numpy/numpy/issues/26473 new_dtype = chunk_array.dtype.newbyteorder(self.endian.name) # type: ignore[arg-type] chunk_array = chunk_array.astype(new_dtype) - return chunk_array.as_buffer() + + as_nd_array_like = chunk_array.as_ndarray_like() + # Flatten the nd-array (only copy if needed) + as_nd_array_like = as_nd_array_like.ravel().view(dtype="b") + return Buffer.from_array_like(as_nd_array_like) def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int: return input_byte_length diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py index 99f8021594..1e6fe09a9f 100644 --- a/src/zarr/testing/store.py +++ b/src/zarr/testing/store.py @@ -2,6 +2,7 @@ from zarr.abc.store import Store from zarr.buffer import Buffer +from zarr.testing.utils import assert_bytes_equal class StoreTests: @@ -27,7 +28,7 @@ def test_store_capabilities(self, store: Store) -> None: @pytest.mark.parametrize("data", [b"\x01\x02\x03\x04", b""]) async def test_set_get_bytes_roundtrip(self, store: Store, key: str, data: bytes) -> None: await store.set(key, Buffer.from_bytes(data)) - assert await store.get(key) == data + assert_bytes_equal(await store.get(key), data) @pytest.mark.parametrize("key", ["foo/c/0"]) @pytest.mark.parametrize("data", [b"\x01\x02\x03\x04", b""]) @@ -36,11 +37,12 @@ async def test_get_partial_values(self, store: Store, key: str, data: bytes) -> await store.set(key, Buffer.from_bytes(data)) # read back just part of it vals = await store.get_partial_values([(key, (0, 2))]) - assert vals == [data[0:2]] + assert_bytes_equal(vals[0], data[0:2]) # read back multiple parts of it at once vals = await store.get_partial_values([(key, (0, 2)), (key, (2, 4))]) - assert vals == [data[0:2], data[2:4]] + assert_bytes_equal(vals[0], data[0:2]) + assert_bytes_equal(vals[1], data[2:4]) async def test_exists(self, store: Store) -> None: assert not await store.exists("foo") diff --git a/src/zarr/testing/utils.py b/src/zarr/testing/utils.py new file mode 100644 index 0000000000..04b05d1b1c --- /dev/null +++ b/src/zarr/testing/utils.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from zarr.buffer import Buffer +from zarr.common import BytesLike + + +def assert_bytes_equal(b1: Buffer | BytesLike | None, b2: Buffer | BytesLike | None) -> None: + """Help function to assert if two bytes-like or Buffers are equal + + Warning + ------- + Always copies data, only use for testing and debugging + """ + if isinstance(b1, Buffer): + b1 = b1.to_bytes() + if isinstance(b2, Buffer): + b2 = b2.to_bytes() + assert b1 == b2 diff --git a/tests/v3/conftest.py b/tests/v3/conftest.py index b9f56014bc..21dc58197e 100644 --- a/tests/v3/conftest.py +++ b/tests/v3/conftest.py @@ -1,5 +1,7 @@ from __future__ import annotations +from collections.abc import Iterator +from types import ModuleType from typing import TYPE_CHECKING from zarr.common import ZarrFormat @@ -81,3 +83,10 @@ async def async_group(request: pytest.FixtureRequest, tmpdir) -> AsyncGroup: exists_ok=False, ) return agroup + + +@pytest.fixture(params=["numpy", "cupy"]) +def xp(request: pytest.FixtureRequest) -> Iterator[ModuleType]: + """Fixture to parametrize over numpy-like libraries""" + + yield pytest.importorskip(request.param) diff --git a/tests/v3/test_buffer.py b/tests/v3/test_buffer.py index 4ab92768b4..2f58d116fe 100644 --- a/tests/v3/test_buffer.py +++ b/tests/v3/test_buffer.py @@ -8,9 +8,7 @@ import pytest from zarr.array import AsyncArray -from zarr.buffer import NDBuffer -from zarr.store.core import StorePath -from zarr.store.memory import MemoryStore +from zarr.buffer import ArrayLike, NDArrayLike, NDBuffer if TYPE_CHECKING: from typing_extensions import Self @@ -41,12 +39,17 @@ def create( return ret +def test_nd_array_like(xp): + ary = xp.arange(10) + assert isinstance(ary, ArrayLike) + assert isinstance(ary, NDArrayLike) + + @pytest.mark.asyncio -async def test_async_array_factory(): - store = StorePath(MemoryStore()) +async def test_async_array_factory(store_path): expect = np.zeros((9, 9), dtype="uint16", order="F") a = await AsyncArray.create( - store / "test_async_array", + store_path, shape=expect.shape, chunk_shape=(5, 5), dtype=expect.dtype, diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index 5f94114ede..a595b12494 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -25,6 +25,7 @@ from zarr.config import config from zarr.indexing import morton_order_iter from zarr.store import MemoryStore, StorePath +from zarr.testing.utils import assert_bytes_equal @dataclass(frozen=True) @@ -294,7 +295,7 @@ async def test_order( fill_value=1, ) z[:, :] = data - assert (await (store / "order/0.0").get()) == z._store["0.0"] + assert_bytes_equal(await (store / "order/0.0").get(), z._store["0.0"]) @pytest.mark.parametrize("input_order", ["F", "C"]) @@ -665,10 +666,10 @@ async def test_zarr_compat(store: Store): assert np.array_equal(data, await _AsyncArrayProxy(a)[:16, :18].get()) assert np.array_equal(data, z2[:16, :18]) - assert z2._store["0.0"] == await (store / "zarr_compat3/0.0").get() - assert z2._store["0.1"] == await (store / "zarr_compat3/0.1").get() - assert z2._store["1.0"] == await (store / "zarr_compat3/1.0").get() - assert z2._store["1.1"] == await (store / "zarr_compat3/1.1").get() + assert_bytes_equal(z2._store["0.0"], await (store / "zarr_compat3/0.0").get()) + assert_bytes_equal(z2._store["0.1"], await (store / "zarr_compat3/0.1").get()) + assert_bytes_equal(z2._store["1.0"], await (store / "zarr_compat3/1.0").get()) + assert_bytes_equal(z2._store["1.1"], await (store / "zarr_compat3/1.1").get()) async def test_zarr_compat_F(store: Store): @@ -698,10 +699,10 @@ async def test_zarr_compat_F(store: Store): assert np.array_equal(data, await _AsyncArrayProxy(a)[:16, :18].get()) assert np.array_equal(data, z2[:16, :18]) - assert z2._store["0.0"] == await (store / "zarr_compatF3/0.0").get() - assert z2._store["0.1"] == await (store / "zarr_compatF3/0.1").get() - assert z2._store["1.0"] == await (store / "zarr_compatF3/1.0").get() - assert z2._store["1.1"] == await (store / "zarr_compatF3/1.1").get() + assert_bytes_equal(z2._store["0.0"], await (store / "zarr_compatF3/0.0").get()) + assert_bytes_equal(z2._store["0.1"], await (store / "zarr_compatF3/0.1").get()) + assert_bytes_equal(z2._store["1.0"], await (store / "zarr_compatF3/1.0").get()) + assert_bytes_equal(z2._store["1.1"], await (store / "zarr_compatF3/1.1").get()) async def test_dimension_names(store: Store): @@ -795,7 +796,7 @@ async def test_endian(store: Store, endian: Literal["big", "little"]): fill_value=1, ) z[:, :] = data - assert await (store / "endian/0.0").get() == z._store["0.0"] + assert_bytes_equal(await (store / "endian/0.0").get(), z._store["0.0"]) @pytest.mark.parametrize("dtype_input_endian", [">u2", " Date: Sun, 26 May 2024 14:54:33 -0400 Subject: [PATCH 0547/1078] Add zstd to old V3 supported codecs (#1914) * add zstd to old V3 supported codecs * get to full test coverage and add release note * fix pre-commit --- docs/release.rst | 3 +++ zarr/codecs.py | 2 +- zarr/meta.py | 4 ++++ zarr/tests/test_meta.py | 23 +++++++++++++++++------ 4 files changed, 25 insertions(+), 7 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 59051bbf97..48a6c9ca2d 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -32,6 +32,9 @@ Docs Maintenance ~~~~~~~~~~~ +* Add Zstd codec to old V3 code path. + By :user:`Ryan Abernathey ` + Deprecations ~~~~~~~~~~~~ diff --git a/zarr/codecs.py b/zarr/codecs.py index 4ad68b8627..6fd5e20401 100644 --- a/zarr/codecs.py +++ b/zarr/codecs.py @@ -1,4 +1,4 @@ # flake8: noqa from numcodecs import * -from numcodecs import get_codec, Blosc, Pickle, Zlib, Delta, AsType, BZ2 +from numcodecs import get_codec, Blosc, Pickle, Zlib, Zstd, Delta, AsType, BZ2 from numcodecs.registry import codec_registry diff --git a/zarr/meta.py b/zarr/meta.py index 5430ab305d..44a2b7ebec 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -414,6 +414,8 @@ def _encode_codec_metadata(cls, codec: Codec) -> Optional[Mapping]: uri = uri + "lz4/1.0" elif isinstance(codec, numcodecs.LZMA): uri = uri + "lzma/1.0" + elif isinstance(codec, numcodecs.Zstd): + uri = uri + "zstd/1.0" meta = { "codec": uri, "configuration": config, @@ -439,6 +441,8 @@ def _decode_codec_metadata(cls, meta: Optional[Mapping]) -> Optional[Codec]: conf["id"] = "lz4" elif meta["codec"].startswith(uri + "lzma/"): conf["id"] = "lzma" + elif meta["codec"].startswith(uri + "zstd/"): + conf["id"] = "zstd" else: raise NotImplementedError diff --git a/zarr/tests/test_meta.py b/zarr/tests/test_meta.py index f9010d6788..7b7d526476 100644 --- a/zarr/tests/test_meta.py +++ b/zarr/tests/test_meta.py @@ -5,7 +5,7 @@ import numpy as np import pytest -from zarr.codecs import Blosc, Delta, Pickle, Zlib +from zarr.codecs import Blosc, Delta, Pickle, Zlib, Zstd from zarr.errors import MetadataError from zarr.meta import ( ZARR_FORMAT, @@ -268,17 +268,23 @@ def test_encode_decode_array_dtype_shape(): assert meta_dec["filters"] is None -def test_encode_decode_array_dtype_shape_v3(): +@pytest.mark.parametrize("cname", ["zlib", "zstd"]) +def test_encode_decode_array_dtype_shape_v3(cname): + if cname == "zlib": + compressor = Zlib(1) + elif cname == "zstd": + compressor = Zstd(1) meta = dict( shape=(100,), chunk_grid=dict(type="regular", chunk_shape=(10,), separator=("/")), data_type=np.dtype("(10, 10) Date: Sun, 26 May 2024 12:20:32 -0700 Subject: [PATCH 0548/1078] doc: update release notes for 2.18.2 (#1915) --- docs/release.rst | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 48a6c9ca2d..6c7ba5139b 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,26 +18,14 @@ Release notes See `GH1777 `_ for more details on the upcoming 3.0 release. -.. _unreleased: - -Unreleased ----------- +.. _release_2.18.2: Enhancements ~~~~~~~~~~~~ -Docs -~~~~ - -Maintenance -~~~~~~~~~~~ - * Add Zstd codec to old V3 code path. By :user:`Ryan Abernathey ` -Deprecations -~~~~~~~~~~~~ - .. _release_2.18.1: 2.18.1 From fc7fa4f30dc376a228cce08a7078378a1fe946b5 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Tue, 28 May 2024 23:52:30 +0200 Subject: [PATCH 0549/1078] [V3] Expand store tests (#1900) * Fill in some test methods with NotImplementedError to force implementations to implement them; make StoreTests generic w.r.t. the store class being tested; update store.get abc to match actual type signature * remove auto_mkdir from LocalStore; add set and get methods to StoreTests class * fix: use from_bytes method on buffer * fix: use Buffer instead of bytes for store tests * docstrings, add some Nones to test_get_partial_values; normalize function signatures --- src/zarr/abc/store.py | 4 +- src/zarr/store/core.py | 24 ++ src/zarr/store/local.py | 16 +- src/zarr/store/memory.py | 10 +- src/zarr/store/remote.py | 2 +- src/zarr/testing/store.py | 125 ++++-- tests/v3/test_store.py | 816 ++------------------------------------ 7 files changed, 179 insertions(+), 818 deletions(-) diff --git a/src/zarr/abc/store.py b/src/zarr/abc/store.py index fee5422e9e..7087706b33 100644 --- a/src/zarr/abc/store.py +++ b/src/zarr/abc/store.py @@ -9,7 +9,7 @@ class Store(ABC): @abstractmethod async def get( - self, key: str, byte_range: tuple[int, int | None] | None = None + self, key: str, byte_range: tuple[int | None, int | None] | None = None ) -> Buffer | None: """Retrieve the value associated with a given key. @@ -26,7 +26,7 @@ async def get( @abstractmethod async def get_partial_values( - self, key_ranges: list[tuple[str, tuple[int, int]]] + self, key_ranges: list[tuple[str, tuple[int | None, int | None]]] ) -> list[Buffer | None]: """Retrieve possibly partial values from given key_ranges. diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index 31cce65095..4e7a7fcca1 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -68,3 +68,27 @@ def make_store_path(store_like: StoreLike) -> StorePath: elif isinstance(store_like, str): return StorePath(LocalStore(Path(store_like))) raise TypeError + + +def _normalize_interval_index( + data: Buffer, interval: None | tuple[int | None, int | None] +) -> tuple[int, int]: + """ + Convert an implicit interval into an explicit start and length + """ + if interval is None: + start = 0 + length = len(data) + else: + maybe_start, maybe_len = interval + if maybe_start is None: + start = 0 + else: + start = maybe_start + + if maybe_len is None: + length = len(data) - start + else: + length = maybe_len + + return (start, length) diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index 64eb8632b9..50fe9701fc 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -10,7 +10,7 @@ from zarr.common import concurrent_map, to_thread -def _get(path: Path, byte_range: tuple[int, int | None] | None) -> Buffer: +def _get(path: Path, byte_range: tuple[int | None, int | None] | None) -> Buffer: """ Fetch a contiguous region of bytes from a file. @@ -51,10 +51,8 @@ def _put( path: Path, value: Buffer, start: int | None = None, - auto_mkdir: bool = True, ) -> int | None: - if auto_mkdir: - path.parent.mkdir(parents=True, exist_ok=True) + path.parent.mkdir(parents=True, exist_ok=True) if start is not None: with path.open("r+b") as f: f.seek(start) @@ -70,15 +68,13 @@ class LocalStore(Store): supports_listing: bool = True root: Path - auto_mkdir: bool - def __init__(self, root: Path | str, auto_mkdir: bool = True): + def __init__(self, root: Path | str): if isinstance(root, str): root = Path(root) assert isinstance(root, Path) self.root = root - self.auto_mkdir = auto_mkdir def __str__(self) -> str: return f"file://{self.root}" @@ -90,7 +86,7 @@ def __eq__(self, other: object) -> bool: return isinstance(other, type(self)) and self.root == other.root async def get( - self, key: str, byte_range: tuple[int, int | None] | None = None + self, key: str, byte_range: tuple[int | None, int | None] | None = None ) -> Buffer | None: assert isinstance(key, str) path = self.root / key @@ -101,7 +97,7 @@ async def get( return None async def get_partial_values( - self, key_ranges: list[tuple[str, tuple[int, int]]] + self, key_ranges: list[tuple[str, tuple[int | None, int | None]]] ) -> list[Buffer | None]: """ Read byte ranges from multiple keys. @@ -128,7 +124,7 @@ async def set(self, key: str, value: Buffer) -> None: if not isinstance(value, Buffer): raise TypeError("LocalStore.set(): `value` must a Buffer instance") path = self.root / key - await to_thread(_put, path, value, auto_mkdir=self.auto_mkdir) + await to_thread(_put, path, value) async def set_partial_values(self, key_start_values: list[tuple[str, int, bytes]]) -> None: args = [] diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index c6e838417e..5e438919cf 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -5,6 +5,7 @@ from zarr.abc.store import Store from zarr.buffer import Buffer from zarr.common import concurrent_map +from zarr.store.core import _normalize_interval_index # TODO: this store could easily be extended to wrap any MutableMapping store from v2 @@ -26,19 +27,18 @@ def __repr__(self) -> str: return f"MemoryStore({str(self)!r})" async def get( - self, key: str, byte_range: tuple[int, int | None] | None = None + self, key: str, byte_range: tuple[int | None, int | None] | None = None ) -> Buffer | None: assert isinstance(key, str) try: value = self._store_dict[key] - if byte_range is not None: - value = value[byte_range[0] : byte_range[1]] - return value + start, length = _normalize_interval_index(value, byte_range) + return value[start : start + length] except KeyError: return None async def get_partial_values( - self, key_ranges: list[tuple[str, tuple[int, int]]] + self, key_ranges: list[tuple[str, tuple[int | None, int | None]]] ) -> list[Buffer | None]: vals = await concurrent_map(key_ranges, self.get, limit=None) return vals diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index 8058c61035..a3395459fd 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -49,7 +49,7 @@ def _make_fs(self) -> tuple[AsyncFileSystem, str]: return fs, root async def get( - self, key: str, byte_range: tuple[int, int | None] | None = None + self, key: str, byte_range: tuple[int | None, int | None] | None = None ) -> Buffer | None: assert isinstance(key, str) fs, root = self._make_fs() diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py index 1e6fe09a9f..1c0ed93734 100644 --- a/src/zarr/testing/store.py +++ b/src/zarr/testing/store.py @@ -1,61 +1,130 @@ +from typing import Generic, TypeVar + import pytest from zarr.abc.store import Store from zarr.buffer import Buffer +from zarr.store.core import _normalize_interval_index from zarr.testing.utils import assert_bytes_equal +S = TypeVar("S", bound=Store) + + +class StoreTests(Generic[S]): + store_cls: type[S] -class StoreTests: - store_cls: type[Store] + def set(self, store: S, key: str, value: Buffer) -> None: + """ + Insert a value into a storage backend, with a specific key. + This should not not use any store methods. Bypassing the store methods allows them to be + tested. + """ + raise NotImplementedError + + def get(self, store: S, key: str) -> Buffer: + """ + Retrieve a value from a storage backend, by key. + This should not not use any store methods. Bypassing the store methods allows them to be + tested. + """ + + raise NotImplementedError @pytest.fixture(scope="function") def store(self) -> Store: return self.store_cls() - def test_store_type(self, store: Store) -> None: + def test_store_type(self, store: S) -> None: assert isinstance(store, Store) assert isinstance(store, self.store_cls) - def test_store_repr(self, store: Store) -> None: - assert repr(store) + def test_store_repr(self, store: S) -> None: + raise NotImplementedError + + def test_store_supports_writes(self, store: S) -> None: + raise NotImplementedError - def test_store_capabilities(self, store: Store) -> None: - assert store.supports_writes - assert store.supports_partial_writes - assert store.supports_listing + def test_store_supports_partial_writes(self, store: S) -> None: + raise NotImplementedError + + def test_store_supports_listing(self, store: S) -> None: + raise NotImplementedError @pytest.mark.parametrize("key", ["c/0", "foo/c/0.0", "foo/0/0"]) @pytest.mark.parametrize("data", [b"\x01\x02\x03\x04", b""]) - async def test_set_get_bytes_roundtrip(self, store: Store, key: str, data: bytes) -> None: - await store.set(key, Buffer.from_bytes(data)) - assert_bytes_equal(await store.get(key), data) - - @pytest.mark.parametrize("key", ["foo/c/0"]) + @pytest.mark.parametrize("byte_range", (None, (0, None), (1, None), (1, 2), (None, 1))) + async def test_get( + self, store: S, key: str, data: bytes, byte_range: None | tuple[int | None, int | None] + ) -> None: + """ + Ensure that data can be read from the store using the store.get method. + """ + data_buf = Buffer.from_bytes(data) + self.set(store, key, data_buf) + observed = await store.get(key, byte_range=byte_range) + start, length = _normalize_interval_index(data_buf, interval=byte_range) + expected = data_buf[start : start + length] + assert_bytes_equal(observed, expected) + + @pytest.mark.parametrize("key", ["zarr.json", "c/0", "foo/c/0.0", "foo/0/0"]) @pytest.mark.parametrize("data", [b"\x01\x02\x03\x04", b""]) - async def test_get_partial_values(self, store: Store, key: str, data: bytes) -> None: + async def test_set(self, store: S, key: str, data: bytes) -> None: + """ + Ensure that data can be written to the store using the store.set method. + """ + data_buf = Buffer.from_bytes(data) + await store.set(key, data_buf) + observed = self.get(store, key) + assert_bytes_equal(observed, data_buf) + + @pytest.mark.parametrize( + "key_ranges", + ( + [], + [("zarr.json", (0, 1))], + [("c/0", (0, 1)), ("zarr.json", (0, None))], + [("c/0/0", (0, 1)), ("c/0/1", (None, 2)), ("c/0/2", (0, 3))], + ), + ) + async def test_get_partial_values( + self, store: S, key_ranges: list[tuple[str, tuple[int | None, int | None]]] + ) -> None: # put all of the data - await store.set(key, Buffer.from_bytes(data)) + for key, _ in key_ranges: + self.set(store, key, Buffer.from_bytes(bytes(key, encoding="utf-8"))) + # read back just part of it - vals = await store.get_partial_values([(key, (0, 2))]) - assert_bytes_equal(vals[0], data[0:2]) + observed_maybe = await store.get_partial_values(key_ranges=key_ranges) + + observed: list[Buffer] = [] + expected: list[Buffer] = [] + + for obs in observed_maybe: + assert obs is not None + observed.append(obs) + + for idx in range(len(observed)): + key, byte_range = key_ranges[idx] + result = await store.get(key, byte_range=byte_range) + assert result is not None + expected.append(result) - # read back multiple parts of it at once - vals = await store.get_partial_values([(key, (0, 2)), (key, (2, 4))]) - assert_bytes_equal(vals[0], data[0:2]) - assert_bytes_equal(vals[1], data[2:4]) + assert all( + obs.to_bytes() == exp.to_bytes() for obs, exp in zip(observed, expected, strict=True) + ) - async def test_exists(self, store: Store) -> None: + async def test_exists(self, store: S) -> None: assert not await store.exists("foo") await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) assert await store.exists("foo/zarr.json") - async def test_delete(self, store: Store) -> None: + async def test_delete(self, store: S) -> None: await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) assert await store.exists("foo/zarr.json") await store.delete("foo/zarr.json") assert not await store.exists("foo/zarr.json") - async def test_list(self, store: Store) -> None: + async def test_list(self, store: S) -> None: assert [k async for k in store.list()] == [] await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) keys = [k async for k in store.list()] @@ -69,11 +138,11 @@ async def test_list(self, store: Store) -> None: f"foo/c/{i}", Buffer.from_bytes(i.to_bytes(length=3, byteorder="little")) ) - async def test_list_prefix(self, store: Store) -> None: + async def test_list_prefix(self, store: S) -> None: # TODO: we currently don't use list_prefix anywhere - pass + raise NotImplementedError - async def test_list_dir(self, store: Store) -> None: + async def test_list_dir(self, store: S) -> None: assert [k async for k in store.list_dir("")] == [] assert [k async for k in store.list_dir("foo")] == [] await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) diff --git a/tests/v3/test_store.py b/tests/v3/test_store.py index 9bdf2f5a8f..75438f8612 100644 --- a/tests/v3/test_store.py +++ b/tests/v3/test_store.py @@ -1,800 +1,72 @@ from __future__ import annotations -from pathlib import Path +from collections.abc import MutableMapping import pytest +from zarr.buffer import Buffer from zarr.store.local import LocalStore from zarr.store.memory import MemoryStore from zarr.testing.store import StoreTests -from zarr.testing.utils import assert_bytes_equal -@pytest.mark.parametrize("auto_mkdir", (True, False)) -def test_local_store_init(tmpdir, auto_mkdir: bool) -> None: - tmpdir_str = str(tmpdir) - tmpdir_path = Path(tmpdir_str) - store = LocalStore(root=tmpdir_str, auto_mkdir=auto_mkdir) - - assert store.root == tmpdir_path - assert store.auto_mkdir == auto_mkdir - - # ensure that str and pathlib.Path get normalized to the same output - assert store == LocalStore(root=tmpdir_path, auto_mkdir=auto_mkdir) - - store_str = f"file://{tmpdir_str}" - assert str(store) == store_str - assert repr(store) == f"LocalStore({store_str!r})" - - -@pytest.mark.parametrize("byte_range", (None, (0, None), (1, None), (1, 2), (None, 1))) -async def test_local_store_get( - local_store, byte_range: None | tuple[int | None, int | None] -) -> None: - payload = b"\x01\x02\x03\x04" - object_name = "foo" - (local_store.root / object_name).write_bytes(payload) - observed = await local_store.get(object_name, byte_range=byte_range) - - if byte_range is None: - start = 0 - length = len(payload) - else: - maybe_start, maybe_len = byte_range - if maybe_start is None: - start = 0 - else: - start = maybe_start - - if maybe_len is None: - length = len(payload) - start - else: - length = maybe_len - - expected = payload[start : start + length] - assert_bytes_equal(observed, expected) - - # test that getting from a file that doesn't exist returns None - assert await local_store.get(object_name + "_absent", byte_range=byte_range) is None - - -@pytest.mark.parametrize( - "key_ranges", - ( - [], - [("key_0", (0, 1))], - [("dir/key_0", (0, 1)), ("key_1", (0, 2))], - [("key_0", (0, 1)), ("key_1", (0, 2)), ("key_1", (0, 2))], - ), -) -async def test_local_store_get_partial( - tmpdir, key_ranges: tuple[list[tuple[str, tuple[int, int]]]] -) -> None: - store = LocalStore(str(tmpdir), auto_mkdir=True) - # use the utf-8 encoding of the key as the bytes - for key, _ in key_ranges: - payload = bytes(key, encoding="utf-8") - target_path: Path = store.root / key - # create the parent directories - target_path.parent.mkdir(parents=True, exist_ok=True) - # write bytes - target_path.write_bytes(payload) - - results = await store.get_partial_values(key_ranges) - for idx, observed in enumerate(results): - key, byte_range = key_ranges[idx] - expected = await store.get(key, byte_range=byte_range) - assert_bytes_equal(observed, expected) - - -@pytest.mark.parametrize("path", ("foo", "foo/bar")) -@pytest.mark.parametrize("auto_mkdir", (True, False)) -async def test_local_store_set(tmpdir, path: str, auto_mkdir: bool) -> None: - store = LocalStore(str(tmpdir), auto_mkdir=auto_mkdir) - payload = b"\x01\x02\x03\x04" - - if "/" in path and not auto_mkdir: - with pytest.raises(FileNotFoundError): - await store.set(path, payload) - else: - x = await store.set(path, payload) - - # this method should not return anything - assert x is None - - assert (store.root / path).read_bytes() == payload - - -# import zarr -# from zarr._storage.store import _get_hierarchy_metadata, v3_api_available, StorageTransformer -# from zarr._storage.v3_storage_transformers import ( -# DummyStorageTransfomer, -# ShardingStorageTransformer, -# v3_sharding_available, -# ) -# from zarr.core import Array -# from zarr.meta import _default_entry_point_metadata_v3 -# from zarr.storage import ( -# atexit_rmglob, -# atexit_rmtree, -# data_root, -# default_compressor, -# getsize, -# init_array, -# meta_root, -# normalize_store_arg, -# ) -# from zarr._storage.v3 import ( -# ABSStoreV3, -# ConsolidatedMetadataStoreV3, -# DBMStoreV3, -# DirectoryStoreV3, -# FSStoreV3, -# KVStore, -# KVStoreV3, -# LMDBStoreV3, -# LRUStoreCacheV3, -# MemoryStoreV3, -# MongoDBStoreV3, -# RedisStoreV3, -# SQLiteStoreV3, -# StoreV3, -# ZipStoreV3, -# ) -# from .util import CountingDictV3, have_fsspec, skip_test_env_var, mktemp - -# # pytest will fail to run if the following fixtures aren't imported here -# from .test_storage import StoreTests as _StoreTests -# from .test_storage import TestABSStore as _TestABSStore -# from .test_storage import TestConsolidatedMetadataStore as _TestConsolidatedMetadataStore -# from .test_storage import TestDBMStore as _TestDBMStore -# from .test_storage import TestDBMStoreBerkeleyDB as _TestDBMStoreBerkeleyDB -# from .test_storage import TestDBMStoreDumb as _TestDBMStoreDumb -# from .test_storage import TestDBMStoreGnu as _TestDBMStoreGnu -# from .test_storage import TestDBMStoreNDBM as _TestDBMStoreNDBM -# from .test_storage import TestDirectoryStore as _TestDirectoryStore -# from .test_storage import TestFSStore as _TestFSStore -# from .test_storage import TestLMDBStore as _TestLMDBStore -# from .test_storage import TestLRUStoreCache as _TestLRUStoreCache -# from .test_storage import TestMemoryStore as _TestMemoryStore -# from .test_storage import TestSQLiteStore as _TestSQLiteStore -# from .test_storage import TestSQLiteStoreInMemory as _TestSQLiteStoreInMemory -# from .test_storage import TestZipStore as _TestZipStore -# from .test_storage import dimension_separator_fixture, s3, skip_if_nested_chunks - - -# pytestmark = pytest.mark.skipif(not v3_api_available, reason="v3 api is not available") - - -# @pytest.fixture( -# params=[ -# (None, "/"), -# (".", "."), -# ("/", "/"), -# ] -# ) -# def dimension_separator_fixture_v3(request): -# return request.param - - -# class DummyStore: -# # contains all methods expected of Mutable Mapping - -# def keys(self): -# """keys""" - -# def values(self): -# """values""" - -# def get(self, value, default=None): -# """get""" - -# def __setitem__(self, key, value): -# """__setitem__""" - -# def __getitem__(self, key): -# """__getitem__""" - -# def __delitem__(self, key): -# """__delitem__""" - -# def __contains__(self, key): -# """__contains__""" - - -# class InvalidDummyStore: -# # does not contain expected methods of a MutableMapping - -# def keys(self): -# """keys""" - - -# def test_ensure_store_v3(): -# class InvalidStore: -# pass - -# with pytest.raises(ValueError): -# StoreV3._ensure_store(InvalidStore()) - -# # cannot initialize with a store from a different Zarr version -# with pytest.raises(ValueError): -# StoreV3._ensure_store(KVStore(dict())) - -# assert StoreV3._ensure_store(None) is None - -# # class with all methods of a MutableMapping will become a KVStoreV3 -# assert isinstance(StoreV3._ensure_store(DummyStore), KVStoreV3) - -# with pytest.raises(ValueError): -# # does not have the methods expected of a MutableMapping -# StoreV3._ensure_store(InvalidDummyStore) - - -# def test_valid_key(): -# store = KVStoreV3(dict) - -# # only ascii keys are valid -# assert not store._valid_key(5) -# assert not store._valid_key(2.8) - -# for key in store._valid_key_characters: -# assert store._valid_key(key) - -# # other characters not in store._valid_key_characters are not allowed -# assert not store._valid_key("*") -# assert not store._valid_key("~") -# assert not store._valid_key("^") - - -# def test_validate_key(): -# store = KVStoreV3(dict) - -# # zarr.json is a valid key -# store._validate_key("zarr.json") -# # but other keys not starting with meta/ or data/ are not -# with pytest.raises(ValueError): -# store._validate_key("zar.json") - -# # valid ascii keys -# for valid in [ -# meta_root + "arr1.array.json", -# data_root + "arr1.array.json", -# meta_root + "subfolder/item_1-0.group.json", -# ]: -# store._validate_key(valid) -# # but otherwise valid keys cannot end in / -# with pytest.raises(ValueError): -# assert store._validate_key(valid + "/") - -# for invalid in [0, "*", "~", "^", "&"]: -# with pytest.raises(ValueError): -# store._validate_key(invalid) - - -# class StoreV3Tests(_StoreTests): - -# version = 3 -# root = meta_root - -# def test_getsize(self): -# # TODO: determine proper getsize() behavior for v3 -# # Currently returns the combined size of entries under -# # meta/root/path and data/root/path. -# # Any path not under meta/root/ or data/root/ (including zarr.json) -# # returns size 0. - -# store = self.create_store() -# if isinstance(store, dict) or hasattr(store, "getsize"): -# assert 0 == getsize(store, "zarr.json") -# store[meta_root + "foo/a"] = b"x" -# assert 1 == getsize(store) -# assert 1 == getsize(store, "foo") -# store[meta_root + "foo/b"] = b"x" -# assert 2 == getsize(store, "foo") -# assert 1 == getsize(store, "foo/b") -# store[meta_root + "bar/a"] = b"yy" -# assert 2 == getsize(store, "bar") -# store[data_root + "bar/a"] = b"zzz" -# assert 5 == getsize(store, "bar") -# store[data_root + "baz/a"] = b"zzz" -# assert 3 == getsize(store, "baz") -# assert 10 == getsize(store) -# store[data_root + "quux"] = array.array("B", b"zzzz") -# assert 14 == getsize(store) -# assert 4 == getsize(store, "quux") -# store[data_root + "spong"] = np.frombuffer(b"zzzzz", dtype="u1") -# assert 19 == getsize(store) -# assert 5 == getsize(store, "spong") -# store.close() - -# def test_init_array(self, dimension_separator_fixture_v3): - -# pass_dim_sep, want_dim_sep = dimension_separator_fixture_v3 - -# store = self.create_store() -# path = "arr1" -# transformer = DummyStorageTransfomer( -# "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT -# ) -# init_array( -# store, -# path=path, -# shape=1000, -# chunks=100, -# dimension_separator=pass_dim_sep, -# storage_transformers=[transformer], -# ) - -# # check metadata -# mkey = meta_root + path + ".array.json" -# assert mkey in store -# meta = store._metadata_class.decode_array_metadata(store[mkey]) -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunk_grid"]["chunk_shape"] -# assert np.dtype(None) == meta["data_type"] -# assert default_compressor == meta["compressor"] -# assert meta["fill_value"] is None -# # Missing MUST be assumed to be "/" -# assert meta["chunk_grid"]["separator"] is want_dim_sep -# assert len(meta["storage_transformers"]) == 1 -# assert isinstance(meta["storage_transformers"][0], DummyStorageTransfomer) -# assert meta["storage_transformers"][0].test_value == DummyStorageTransfomer.TEST_CONSTANT -# store.close() - -# def test_list_prefix(self): - -# store = self.create_store() -# path = "arr1" -# init_array(store, path=path, shape=1000, chunks=100) - -# expected = [meta_root + "arr1.array.json", "zarr.json"] -# assert sorted(store.list_prefix("")) == expected - -# expected = [meta_root + "arr1.array.json"] -# assert sorted(store.list_prefix(meta_root.rstrip("/"))) == expected - -# # cannot start prefix with '/' -# with pytest.raises(ValueError): -# store.list_prefix(prefix="/" + meta_root.rstrip("/")) - -# def test_equal(self): -# store = self.create_store() -# assert store == store - -# def test_rename_nonexisting(self): -# store = self.create_store() -# if store.is_erasable(): -# with pytest.raises(ValueError): -# store.rename("a", "b") -# else: -# with pytest.raises(NotImplementedError): -# store.rename("a", "b") - -# def test_get_partial_values(self): -# store = self.create_store() -# store.supports_efficient_get_partial_values in [True, False] -# store[data_root + "foo"] = b"abcdefg" -# store[data_root + "baz"] = b"z" -# assert [b"a"] == store.get_partial_values([(data_root + "foo", (0, 1))]) -# assert [ -# b"d", -# b"b", -# b"z", -# b"abc", -# b"defg", -# b"defg", -# b"g", -# b"ef", -# ] == store.get_partial_values( -# [ -# (data_root + "foo", (3, 1)), -# (data_root + "foo", (1, 1)), -# (data_root + "baz", (0, 1)), -# (data_root + "foo", (0, 3)), -# (data_root + "foo", (3, 4)), -# (data_root + "foo", (3, None)), -# (data_root + "foo", (-1, None)), -# (data_root + "foo", (-3, 2)), -# ] -# ) - -# def test_set_partial_values(self): -# store = self.create_store() -# store.supports_efficient_set_partial_values() -# store[data_root + "foo"] = b"abcdefg" -# store.set_partial_values([(data_root + "foo", 0, b"hey")]) -# assert store[data_root + "foo"] == b"heydefg" - -# store.set_partial_values([(data_root + "baz", 0, b"z")]) -# assert store[data_root + "baz"] == b"z" -# store.set_partial_values( -# [ -# (data_root + "foo", 1, b"oo"), -# (data_root + "baz", 1, b"zzz"), -# (data_root + "baz", 4, b"aaaa"), -# (data_root + "foo", 6, b"done"), -# ] -# ) -# assert store[data_root + "foo"] == b"hoodefdone" -# assert store[data_root + "baz"] == b"zzzzaaaa" -# store.set_partial_values( -# [ -# (data_root + "foo", -2, b"NE"), -# (data_root + "baz", -5, b"q"), -# ] -# ) -# assert store[data_root + "foo"] == b"hoodefdoNE" -# assert store[data_root + "baz"] == b"zzzq" - - -# class TestMappingStoreV3(StoreV3Tests): -# def create_store(self, **kwargs): -# return KVStoreV3(dict()) - -# def test_set_invalid_content(self): -# # Generic mappings support non-buffer types -# pass - - -# class TestMemoryStoreV3(_TestMemoryStore, StoreV3Tests): -# def create_store(self, **kwargs): -# skip_if_nested_chunks(**kwargs) -# return MemoryStoreV3(**kwargs) - - -# class TestDirectoryStoreV3(_TestDirectoryStore, StoreV3Tests): -# def create_store(self, normalize_keys=False, **kwargs): -# # For v3, don't have to skip if nested. -# # skip_if_nested_chunks(**kwargs) - -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# store = DirectoryStoreV3(path, normalize_keys=normalize_keys, **kwargs) -# return store - -# def test_rename_nonexisting(self): -# store = self.create_store() -# with pytest.raises(FileNotFoundError): -# store.rename(meta_root + "a", meta_root + "b") - - -# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -# class TestFSStoreV3(_TestFSStore, StoreV3Tests): -# def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): - -# if path is None: -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) - -# store = FSStoreV3( -# path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs -# ) -# return store - -# def test_init_array(self): -# store = self.create_store() -# path = "arr1" -# init_array(store, path=path, shape=1000, chunks=100) - -# # check metadata -# mkey = meta_root + path + ".array.json" -# assert mkey in store -# meta = store._metadata_class.decode_array_metadata(store[mkey]) -# assert (1000,) == meta["shape"] -# assert (100,) == meta["chunk_grid"]["chunk_shape"] -# assert np.dtype(None) == meta["data_type"] -# assert meta["chunk_grid"]["separator"] == "/" - - -# @pytest.mark.skipif(have_fsspec is False, reason="needs fsspec") -# class TestFSStoreV3WithKeySeparator(StoreV3Tests): -# def create_store(self, normalize_keys=False, key_separator=".", **kwargs): - -# # Since the user is passing key_separator, that will take priority. -# skip_if_nested_chunks(**kwargs) - -# path = tempfile.mkdtemp() -# atexit.register(atexit_rmtree, path) -# return FSStoreV3(path, normalize_keys=normalize_keys, key_separator=key_separator) - - -# # TODO: enable once N5StoreV3 has been implemented -# # @pytest.mark.skipif(True, reason="N5StoreV3 not yet fully implemented") -# # class TestN5StoreV3(_TestN5Store, TestDirectoryStoreV3, StoreV3Tests): - - -# class TestZipStoreV3(_TestZipStore, StoreV3Tests): - -# ZipStoreClass = ZipStoreV3 - -# def create_store(self, **kwargs): -# path = mktemp(suffix=".zip") -# atexit.register(os.remove, path) -# store = ZipStoreV3(path, mode="w", **kwargs) -# return store - - -# class TestDBMStoreV3(_TestDBMStore, StoreV3Tests): -# def create_store(self, dimension_separator=None): -# path = mktemp(suffix=".anydbm") -# atexit.register(atexit_rmglob, path + "*") -# # create store using default dbm implementation -# store = DBMStoreV3(path, flag="n", dimension_separator=dimension_separator) -# return store - - -# class TestDBMStoreV3Dumb(_TestDBMStoreDumb, StoreV3Tests): -# def create_store(self, **kwargs): -# path = mktemp(suffix=".dumbdbm") -# atexit.register(atexit_rmglob, path + "*") - -# import dbm.dumb as dumbdbm - -# store = DBMStoreV3(path, flag="n", open=dumbdbm.open, **kwargs) -# return store - - -# class TestDBMStoreV3Gnu(_TestDBMStoreGnu, StoreV3Tests): -# def create_store(self, **kwargs): -# gdbm = pytest.importorskip("dbm.gnu") -# path = mktemp(suffix=".gdbm") # pragma: no cover -# atexit.register(os.remove, path) # pragma: no cover -# store = DBMStoreV3( -# path, flag="n", open=gdbm.open, write_lock=False, **kwargs -# ) # pragma: no cover -# return store # pragma: no cover - - -# class TestDBMStoreV3NDBM(_TestDBMStoreNDBM, StoreV3Tests): -# def create_store(self, **kwargs): -# ndbm = pytest.importorskip("dbm.ndbm") -# path = mktemp(suffix=".ndbm") # pragma: no cover -# atexit.register(atexit_rmglob, path + "*") # pragma: no cover -# store = DBMStoreV3(path, flag="n", open=ndbm.open, **kwargs) # pragma: no cover -# return store # pragma: no cover - - -# class TestDBMStoreV3BerkeleyDB(_TestDBMStoreBerkeleyDB, StoreV3Tests): -# def create_store(self, **kwargs): -# bsddb3 = pytest.importorskip("bsddb3") -# path = mktemp(suffix=".dbm") -# atexit.register(os.remove, path) -# store = DBMStoreV3(path, flag="n", open=bsddb3.btopen, write_lock=False, **kwargs) -# return store - - -# class TestLMDBStoreV3(_TestLMDBStore, StoreV3Tests): -# def create_store(self, **kwargs): -# pytest.importorskip("lmdb") -# path = mktemp(suffix=".lmdb") -# atexit.register(atexit_rmtree, path) -# buffers = True -# store = LMDBStoreV3(path, buffers=buffers, **kwargs) -# return store - - -# class TestSQLiteStoreV3(_TestSQLiteStore, StoreV3Tests): -# def create_store(self, **kwargs): -# pytest.importorskip("sqlite3") -# path = mktemp(suffix=".db") -# atexit.register(atexit_rmtree, path) -# store = SQLiteStoreV3(path, **kwargs) -# return store - - -# class TestSQLiteStoreV3InMemory(_TestSQLiteStoreInMemory, StoreV3Tests): -# def create_store(self, **kwargs): -# pytest.importorskip("sqlite3") -# store = SQLiteStoreV3(":memory:", **kwargs) -# return store - - -# @skip_test_env_var("ZARR_TEST_MONGO") -# class TestMongoDBStoreV3(StoreV3Tests): -# def create_store(self, **kwargs): -# pytest.importorskip("pymongo") -# store = MongoDBStoreV3( -# host="127.0.0.1", database="zarr_tests", collection="zarr_tests", **kwargs -# ) -# # start with an empty store -# store.clear() -# return store - - -# @skip_test_env_var("ZARR_TEST_REDIS") -# class TestRedisStoreV3(StoreV3Tests): -# def create_store(self, **kwargs): -# # TODO: this is the default host for Redis on Travis, -# # we probably want to generalize this though -# pytest.importorskip("redis") -# store = RedisStoreV3(host="localhost", port=6379, **kwargs) -# # start with an empty store -# store.clear() -# return store - - -# @pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled") -# class TestStorageTransformerV3(TestMappingStoreV3): -# def create_store(self, **kwargs): -# inner_store = super().create_store(**kwargs) -# dummy_transformer = DummyStorageTransfomer( -# "dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT -# ) -# sharding_transformer = ShardingStorageTransformer( -# "indexed", -# chunks_per_shard=2, -# ) -# path = "bla" -# init_array( -# inner_store, -# path=path, -# shape=1000, -# chunks=100, -# dimension_separator=".", -# storage_transformers=[dummy_transformer, sharding_transformer], -# ) -# store = Array(store=inner_store, path=path).chunk_store -# store.erase_prefix("data/root/bla/") -# store.clear() -# return store - -# def test_method_forwarding(self): -# store = self.create_store() -# inner_store = store.inner_store.inner_store -# assert store.list() == inner_store.list() -# assert store.list_dir(data_root) == inner_store.list_dir(data_root) - -# assert store.is_readable() -# assert store.is_writeable() -# assert store.is_listable() -# inner_store._readable = False -# inner_store._writeable = False -# inner_store._listable = False -# assert not store.is_readable() -# assert not store.is_writeable() -# assert not store.is_listable() - - -# class TestLRUStoreCacheV3(_TestLRUStoreCache, StoreV3Tests): - -# CountingClass = CountingDictV3 -# LRUStoreClass = LRUStoreCacheV3 - - -# @skip_test_env_var("ZARR_TEST_ABS") -# class TestABSStoreV3(_TestABSStore, StoreV3Tests): - -# ABSStoreClass = ABSStoreV3 - - -# def test_normalize_store_arg_v3(tmpdir): - -# fn = tmpdir.join("store.zip") -# store = normalize_store_arg(str(fn), zarr_version=3, mode="w") -# assert isinstance(store, ZipStoreV3) -# assert "zarr.json" in store - -# # can't pass storage_options to non-fsspec store -# with pytest.raises(ValueError): -# normalize_store_arg(str(fn), zarr_version=3, mode="w", storage_options={"some": "kwargs"}) - -# if have_fsspec: -# import fsspec - -# path = tempfile.mkdtemp() -# store = normalize_store_arg("file://" + path, zarr_version=3, mode="w") -# assert isinstance(store, FSStoreV3) -# assert "zarr.json" in store - -# store = normalize_store_arg(fsspec.get_mapper("file://" + path), zarr_version=3) -# assert isinstance(store, FSStoreV3) - -# # regression for https://github.com/zarr-developers/zarr-python/issues/1382 -# # contents of zarr.json are not important for this test -# out = {"version": 1, "refs": {"zarr.json": "{...}"}} -# store = normalize_store_arg( -# "reference://", -# storage_options={"fo": out, "remote_protocol": "memory"}, zarr_version=3 -# ) -# assert isinstance(store, FSStoreV3) - -# fn = tmpdir.join("store.n5") -# with pytest.raises(NotImplementedError): -# normalize_store_arg(str(fn), zarr_version=3, mode="w") - -# # error on zarr_version=3 with a v2 store -# with pytest.raises(ValueError): -# normalize_store_arg(KVStore(dict()), zarr_version=3, mode="w") - -# # error on zarr_version=2 with a v3 store -# with pytest.raises(ValueError): -# normalize_store_arg(KVStoreV3(dict()), zarr_version=2, mode="w") - - -# class TestConsolidatedMetadataStoreV3(_TestConsolidatedMetadataStore): - -# version = 3 -# ConsolidatedMetadataClass = ConsolidatedMetadataStoreV3 - -# @property -# def metadata_key(self): -# return meta_root + "consolidated/.zmetadata" +@pytest.mark.parametrize("store_dict", (None, {})) +class TestMemoryStore(StoreTests[MemoryStore]): + store_cls = MemoryStore -# def test_bad_store_version(self): -# with pytest.raises(ValueError): -# self.ConsolidatedMetadataClass(KVStore(dict())) + def set(self, store: MemoryStore, key: str, value: Buffer) -> None: + store._store_dict[key] = value + def get(self, store: MemoryStore, key: str) -> Buffer: + return store._store_dict[key] -# def test_get_hierarchy_metadata(): -# store = KVStoreV3({}) + @pytest.fixture(scope="function") + def store(self, store_dict: MutableMapping[str, Buffer] | None): + return MemoryStore(store_dict=store_dict) -# # error raised if 'jarr.json' is not in the store -# with pytest.raises(ValueError): -# _get_hierarchy_metadata(store) + def test_store_repr(self, store: MemoryStore) -> None: + assert str(store) == f"memory://{id(store._store_dict)}" -# store["zarr.json"] = _default_entry_point_metadata_v3 -# assert _get_hierarchy_metadata(store) == _default_entry_point_metadata_v3 + def test_store_supports_writes(self, store: MemoryStore) -> None: + assert True -# # ValueError if only a subset of keys are present -# store["zarr.json"] = {"zarr_format": "https://purl.org/zarr/spec/protocol/core/3.0"} -# with pytest.raises(ValueError): -# _get_hierarchy_metadata(store) + def test_store_supports_listing(self, store: MemoryStore) -> None: + assert True -# # ValueError if any unexpected keys are present -# extra_metadata = copy.copy(_default_entry_point_metadata_v3) -# extra_metadata["extra_key"] = "value" -# store["zarr.json"] = extra_metadata -# with pytest.raises(ValueError): -# _get_hierarchy_metadata(store) + def test_store_supports_partial_writes(self, store: MemoryStore) -> None: + assert True + def test_list_prefix(self, store: MemoryStore) -> None: + assert True -# def test_top_level_imports(): -# for store_name in [ -# "ABSStoreV3", -# "DBMStoreV3", -# "KVStoreV3", -# "DirectoryStoreV3", -# "LMDBStoreV3", -# "LRUStoreCacheV3", -# "MemoryStoreV3", -# "MongoDBStoreV3", -# "RedisStoreV3", -# "SQLiteStoreV3", -# "ZipStoreV3", -# ]: -# if v3_api_available: -# assert hasattr(zarr, store_name) # pragma: no cover -# else: -# assert not hasattr(zarr, store_name) # pragma: no cover +class TestLocalStore(StoreTests[LocalStore]): + store_cls = LocalStore -# def _get_public_and_dunder_methods(some_class): -# return set( -# name -# for name, _ in inspect.getmembers(some_class, predicate=inspect.isfunction) -# if not name.startswith("_") or name.startswith("__") -# ) + def get(self, store: LocalStore, key: str) -> Buffer: + return Buffer.from_bytes((store.root / key).read_bytes()) + def set(self, store: LocalStore, key: str, value: Buffer) -> None: + parent = (store.root / key).parent + if not parent.exists(): + parent.mkdir(parents=True) + (store.root / key).write_bytes(value.to_bytes()) -# def test_storage_transformer_interface(): -# store_v3_methods = _get_public_and_dunder_methods(StoreV3) -# store_v3_methods.discard("__init__") -# # Note, getitems() isn't mandatory when get_partial_values() is available -# store_v3_methods.discard("getitems") -# storage_transformer_methods = _get_public_and_dunder_methods(StorageTransformer) -# storage_transformer_methods.discard("__init__") -# storage_transformer_methods.discard("get_config") -# assert storage_transformer_methods == store_v3_methods + @pytest.fixture(scope="function") + def store(self, tmpdir) -> LocalStore: + return self.store_cls(str(tmpdir)) + def test_store_repr(self, store: LocalStore) -> None: + assert str(store) == f"file://{store.root!s}" -class TestMemoryStore(StoreTests): - store_cls = MemoryStore + def test_store_supports_writes(self, store: LocalStore) -> None: + assert True + def test_store_supports_partial_writes(self, store: LocalStore) -> None: + assert True -class TestLocalStore(StoreTests): - store_cls = LocalStore + def test_store_supports_listing(self, store: LocalStore) -> None: + assert True - @pytest.fixture(scope="function") - @pytest.mark.parametrize("auto_mkdir", (True, False)) - def store(self, tmpdir) -> LocalStore: - return self.store_cls(str(tmpdir)) + def test_list_prefix(self, store: LocalStore) -> None: + assert True From ef15e20192c294cc82b9194ca29190ac9806d6fa Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 29 May 2024 08:00:52 -0700 Subject: [PATCH 0550/1078] [v3] Feature: Store open mode (#1911) * wip * feature(store): set open mode on store initialization --- src/zarr/abc/store.py | 27 ++++++++++++++++++++++++++- src/zarr/common.py | 1 + src/zarr/store/core.py | 10 ++++++++-- src/zarr/store/local.py | 8 ++++++-- src/zarr/store/memory.py | 9 +++++++-- src/zarr/store/remote.py | 10 +++++++++- src/zarr/testing/store.py | 37 ++++++++++++++++++++++++++++++++++--- tests/v3/conftest.py | 14 +++++++------- tests/v3/test_codecs.py | 2 +- tests/v3/test_store.py | 29 ++++++++++++++++------------- tests/v3/test_v2.py | 2 +- 11 files changed, 116 insertions(+), 33 deletions(-) diff --git a/src/zarr/abc/store.py b/src/zarr/abc/store.py index 7087706b33..e86fe5d07a 100644 --- a/src/zarr/abc/store.py +++ b/src/zarr/abc/store.py @@ -3,10 +3,31 @@ from typing import Protocol, runtime_checkable from zarr.buffer import Buffer -from zarr.common import BytesLike +from zarr.common import BytesLike, OpenMode class Store(ABC): + _mode: OpenMode + + def __init__(self, mode: OpenMode = "r"): + if mode not in ("r", "r+", "w", "w-", "a"): + raise ValueError("mode must be one of 'r', 'r+', 'w', 'w-', 'a'") + self._mode = mode + + @property + def mode(self) -> OpenMode: + """Access mode of the store.""" + return self._mode + + @property + def writeable(self) -> bool: + """Is the store writeable?""" + return self.mode in ("a", "w", "w-") + + def _check_writable(self) -> None: + if not self.writeable: + raise ValueError("store mode does not support writing") + @abstractmethod async def get( self, key: str, byte_range: tuple[int | None, int | None] | None = None @@ -147,6 +168,10 @@ def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: """ ... + def close(self) -> None: # noqa: B027 + """Close the store.""" + pass + @runtime_checkable class ByteGetter(Protocol): diff --git a/src/zarr/common.py b/src/zarr/common.py index 9d8315abc8..9527efbbce 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -27,6 +27,7 @@ Selection = slice | SliceSelection ZarrFormat = Literal[2, 3] JSON = None | str | int | float | Enum | dict[str, "JSON"] | list["JSON"] | tuple["JSON", ...] +OpenMode = Literal["r", "r+", "a", "w", "w-"] def product(tup: ChunkCoords) -> int: diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index 4e7a7fcca1..abb08291df 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -5,6 +5,7 @@ from zarr.abc.store import Store from zarr.buffer import Buffer +from zarr.common import OpenMode from zarr.store.local import LocalStore @@ -60,13 +61,18 @@ def __eq__(self, other: Any) -> bool: StoreLike = Store | StorePath | Path | str -def make_store_path(store_like: StoreLike) -> StorePath: +def make_store_path(store_like: StoreLike, *, mode: OpenMode | None = None) -> StorePath: if isinstance(store_like, StorePath): + if mode is not None: + assert mode == store_like.store.mode return store_like elif isinstance(store_like, Store): + if mode is not None: + assert mode == store_like.mode return StorePath(store_like) elif isinstance(store_like, str): - return StorePath(LocalStore(Path(store_like))) + assert mode is not None + return StorePath(LocalStore(Path(store_like), mode=mode)) raise TypeError diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index 50fe9701fc..40abe12932 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -7,7 +7,7 @@ from zarr.abc.store import Store from zarr.buffer import Buffer -from zarr.common import concurrent_map, to_thread +from zarr.common import OpenMode, concurrent_map, to_thread def _get(path: Path, byte_range: tuple[int | None, int | None] | None) -> Buffer: @@ -69,7 +69,8 @@ class LocalStore(Store): root: Path - def __init__(self, root: Path | str): + def __init__(self, root: Path | str, *, mode: OpenMode = "r"): + super().__init__(mode=mode) if isinstance(root, str): root = Path(root) assert isinstance(root, Path) @@ -117,6 +118,7 @@ async def get_partial_values( return await concurrent_map(args, to_thread, limit=None) # TODO: fix limit async def set(self, key: str, value: Buffer) -> None: + self._check_writable() assert isinstance(key, str) if isinstance(value, bytes | bytearray): # TODO: to support the v2 tests, we convert bytes to Buffer here @@ -127,6 +129,7 @@ async def set(self, key: str, value: Buffer) -> None: await to_thread(_put, path, value) async def set_partial_values(self, key_start_values: list[tuple[str, int, bytes]]) -> None: + self._check_writable() args = [] for key, start, value in key_start_values: assert isinstance(key, str) @@ -138,6 +141,7 @@ async def set_partial_values(self, key_start_values: list[tuple[str, int, bytes] await concurrent_map(args, to_thread, limit=None) # TODO: fix limit async def delete(self, key: str) -> None: + self._check_writable() path = self.root / key if path.is_dir(): # TODO: support deleting directories? shutil.rmtree? shutil.rmtree(path) diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index 5e438919cf..74bb5454fe 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -4,7 +4,7 @@ from zarr.abc.store import Store from zarr.buffer import Buffer -from zarr.common import concurrent_map +from zarr.common import OpenMode, concurrent_map from zarr.store.core import _normalize_interval_index @@ -17,7 +17,10 @@ class MemoryStore(Store): _store_dict: MutableMapping[str, Buffer] - def __init__(self, store_dict: MutableMapping[str, Buffer] | None = None): + def __init__( + self, store_dict: MutableMapping[str, Buffer] | None = None, *, mode: OpenMode = "r" + ): + super().__init__(mode=mode) self._store_dict = store_dict or {} def __str__(self) -> str: @@ -47,6 +50,7 @@ async def exists(self, key: str) -> bool: return key in self._store_dict async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: + self._check_writable() assert isinstance(key, str) if isinstance(value, bytes | bytearray): # TODO: to support the v2 tests, we convert bytes to Buffer here @@ -62,6 +66,7 @@ async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None self._store_dict[key] = value async def delete(self, key: str) -> None: + self._check_writable() try: del self._store_dict[key] except KeyError: diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index a3395459fd..3b086f0a03 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -4,6 +4,7 @@ from zarr.abc.store import Store from zarr.buffer import Buffer +from zarr.common import OpenMode from zarr.store.core import _dereference_path if TYPE_CHECKING: @@ -18,10 +19,14 @@ class RemoteStore(Store): root: UPath - def __init__(self, url: UPath | str, **storage_options: dict[str, Any]): + def __init__( + self, url: UPath | str, *, mode: OpenMode = "r", **storage_options: dict[str, Any] + ): import fsspec from upath import UPath + super().__init__(mode=mode) + if isinstance(url, str): self.root = UPath(url, **storage_options) else: @@ -29,6 +34,7 @@ def __init__(self, url: UPath | str, **storage_options: dict[str, Any]): len(storage_options) == 0 ), "If constructed with a UPath object, no additional storage_options are allowed." self.root = url.rstrip("/") + # test instantiate file system fs, _ = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) assert fs.__class__.async_impl, "FileSystem needs to support async operations." @@ -67,6 +73,7 @@ async def get( return value async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: + self._check_writable() assert isinstance(key, str) fs, root = self._make_fs() path = _dereference_path(root, key) @@ -80,6 +87,7 @@ async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None await fs._pipe_file(path, value) async def delete(self, key: str) -> None: + self._check_writable() fs, root = self._make_fs() path = _dereference_path(root, key) if await fs._exists(path): diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py index 1c0ed93734..b317f383f6 100644 --- a/src/zarr/testing/store.py +++ b/src/zarr/testing/store.py @@ -1,4 +1,4 @@ -from typing import Generic, TypeVar +from typing import Any, Generic, TypeVar import pytest @@ -31,13 +31,43 @@ def get(self, store: S, key: str) -> Buffer: raise NotImplementedError @pytest.fixture(scope="function") - def store(self) -> Store: - return self.store_cls() + def store_kwargs(self) -> dict[str, Any]: + return {"mode": "w"} + + @pytest.fixture(scope="function") + def store(self, store_kwargs: dict[str, Any]) -> Store: + return self.store_cls(**store_kwargs) def test_store_type(self, store: S) -> None: assert isinstance(store, Store) assert isinstance(store, self.store_cls) + def test_store_mode(self, store: S, store_kwargs: dict[str, Any]) -> None: + assert store.mode == "w", store.mode + assert store.writeable + + with pytest.raises(AttributeError): + store.mode = "w" # type: ignore + + # read-only + kwargs = {**store_kwargs, "mode": "r"} + read_store = self.store_cls(**kwargs) + assert read_store.mode == "r", read_store.mode + assert not read_store.writeable + + async def test_not_writable_store_raises(self, store_kwargs: dict[str, Any]) -> None: + kwargs = {**store_kwargs, "mode": "r"} + store = self.store_cls(**kwargs) + assert not store.writeable + + # set + with pytest.raises(ValueError): + await store.set("foo", Buffer.from_bytes(b"bar")) + + # delete + with pytest.raises(ValueError): + await store.delete("foo") + def test_store_repr(self, store: S) -> None: raise NotImplementedError @@ -72,6 +102,7 @@ async def test_set(self, store: S, key: str, data: bytes) -> None: """ Ensure that data can be written to the store using the store.set method. """ + assert store.writeable data_buf = Buffer.from_bytes(data) await store.set(key, data_buf) observed = self.get(store, key) diff --git a/tests/v3/conftest.py b/tests/v3/conftest.py index 21dc58197e..6b58cce412 100644 --- a/tests/v3/conftest.py +++ b/tests/v3/conftest.py @@ -22,11 +22,11 @@ def parse_store( store: Literal["local", "memory", "remote"], path: str ) -> LocalStore | MemoryStore | RemoteStore: if store == "local": - return LocalStore(path) + return LocalStore(path, mode="w") if store == "memory": - return MemoryStore() + return MemoryStore(mode="w") if store == "remote": - return RemoteStore() + return RemoteStore(mode="w") raise AssertionError @@ -38,24 +38,24 @@ def path_type(request): # todo: harmonize this with local_store fixture @pytest.fixture def store_path(tmpdir): - store = LocalStore(str(tmpdir)) + store = LocalStore(str(tmpdir), mode="w") p = StorePath(store) return p @pytest.fixture(scope="function") def local_store(tmpdir): - return LocalStore(str(tmpdir)) + return LocalStore(str(tmpdir), mode="w") @pytest.fixture(scope="function") def remote_store(): - return RemoteStore() + return RemoteStore(mode="w") @pytest.fixture(scope="function") def memory_store(): - return MemoryStore() + return MemoryStore(mode="w") @pytest.fixture(scope="function") diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index a595b12494..251570f767 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -50,7 +50,7 @@ async def set(self, value: np.ndarray): @pytest.fixture def store() -> Iterator[Store]: - yield StorePath(MemoryStore()) + yield StorePath(MemoryStore(mode="w")) @pytest.fixture diff --git a/tests/v3/test_store.py b/tests/v3/test_store.py index 75438f8612..52882ea78c 100644 --- a/tests/v3/test_store.py +++ b/tests/v3/test_store.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import MutableMapping +from typing import Any import pytest @@ -10,7 +10,6 @@ from zarr.testing.store import StoreTests -@pytest.mark.parametrize("store_dict", (None, {})) class TestMemoryStore(StoreTests[MemoryStore]): store_cls = MemoryStore @@ -20,21 +19,25 @@ def set(self, store: MemoryStore, key: str, value: Buffer) -> None: def get(self, store: MemoryStore, key: str) -> Buffer: return store._store_dict[key] + @pytest.fixture(scope="function", params=[None, {}]) + def store_kwargs(self, request) -> dict[str, Any]: + return {"store_dict": request.param, "mode": "w"} + @pytest.fixture(scope="function") - def store(self, store_dict: MutableMapping[str, Buffer] | None): - return MemoryStore(store_dict=store_dict) + def store(self, store_kwargs: dict[str, Any]) -> MemoryStore: + return self.store_cls(**store_kwargs) def test_store_repr(self, store: MemoryStore) -> None: assert str(store) == f"memory://{id(store._store_dict)}" def test_store_supports_writes(self, store: MemoryStore) -> None: - assert True + assert store.supports_writes def test_store_supports_listing(self, store: MemoryStore) -> None: - assert True + assert store.supports_listing def test_store_supports_partial_writes(self, store: MemoryStore) -> None: - assert True + assert store.supports_partial_writes def test_list_prefix(self, store: MemoryStore) -> None: assert True @@ -52,21 +55,21 @@ def set(self, store: LocalStore, key: str, value: Buffer) -> None: parent.mkdir(parents=True) (store.root / key).write_bytes(value.to_bytes()) - @pytest.fixture(scope="function") - def store(self, tmpdir) -> LocalStore: - return self.store_cls(str(tmpdir)) + @pytest.fixture + def store_kwargs(self, tmpdir) -> dict[str, str]: + return {"root": str(tmpdir), "mode": "w"} def test_store_repr(self, store: LocalStore) -> None: assert str(store) == f"file://{store.root!s}" def test_store_supports_writes(self, store: LocalStore) -> None: - assert True + assert store.supports_writes def test_store_supports_partial_writes(self, store: LocalStore) -> None: - assert True + assert store.supports_partial_writes def test_store_supports_listing(self, store: LocalStore) -> None: - assert True + assert store.supports_listing def test_list_prefix(self, store: LocalStore) -> None: assert True diff --git a/tests/v3/test_v2.py b/tests/v3/test_v2.py index 2a38dc8fdc..41555bbd26 100644 --- a/tests/v3/test_v2.py +++ b/tests/v3/test_v2.py @@ -10,7 +10,7 @@ @pytest.fixture def store() -> Iterator[Store]: - yield StorePath(MemoryStore()) + yield StorePath(MemoryStore(mode="w")) def test_simple(store: Store): From 14e2ed38bd5ce4219d15eee9a093262a947baa8e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 31 May 2024 14:00:03 -0700 Subject: [PATCH 0551/1078] chore: update pre-commit hooks (#1920) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.4.3 → v0.4.5](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.3...v0.4.5) - [github.com/codespell-project/codespell: v2.2.6 → v2.3.0](https://github.com/codespell-project/codespell/compare/v2.2.6...v2.3.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ffa3c94efa..58cbac3c45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,14 +6,14 @@ default_stages: [commit, push] default_language_version: python: python3 repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: 'v0.4.3' + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: 'v0.4.5' hooks: - id: ruff args: ["--fix", "--show-fixes"] - id: ruff-format - repo: https://github.com/codespell-project/codespell - rev: v2.2.6 + rev: v2.3.0 hooks: - id: codespell args: ["-L", "ba,ihs,kake,nd,noe,nwo,te,fo,zar", "-S", "fixture"] From 9ad01f1f8f80e9c96fb58df38435f1adad0d6def Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 31 May 2024 22:01:25 +0100 Subject: [PATCH 0552/1078] Disallow implicit re-exports (#1908) * No implict reexports * Clean up v2 mypy ignores --- pyproject.toml | 19 +++++++------------ src/zarr/array.py | 4 ++-- src/zarr/codecs/__init__.py | 31 +++++++++++++++++++++++-------- src/zarr/codecs/pipeline.py | 3 +-- src/zarr/codecs/sharding.py | 3 +-- src/zarr/metadata.py | 3 +++ src/zarr/store/__init__.py | 2 ++ 7 files changed, 39 insertions(+), 26 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 947bec9369..4bcbfd0a0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -201,34 +201,30 @@ disallow_incomplete_defs = true disallow_untyped_calls = true disallow_untyped_defs = true +no_implicit_reexport = true + [[tool.mypy.overrides]] module = [ "zarr.v2.*", - "zarr.group", - "tests.*", ] -check_untyped_defs = false +ignore_errors = true [[tool.mypy.overrides]] module = [ - "zarr.v2.*", - "zarr.array_v2", + "zarr.group", + "tests.*", ] -disallow_any_generics = false +check_untyped_defs = false [[tool.mypy.overrides]] module = [ - "zarr.v2.*", - "zarr.array_v2", "zarr.group" ] disallow_incomplete_defs = false [[tool.mypy.overrides]] module = [ - "zarr.v2.*", - "zarr.array_v2", "zarr.array", "zarr.buffer" ] @@ -236,13 +232,12 @@ disallow_untyped_calls = false [[tool.mypy.overrides]] module = [ - "zarr.v2.*", - "zarr.array_v2", "zarr.array", "zarr.group", ] disallow_untyped_defs = false + [tool.pytest.ini_options] minversion = "7" testpaths = ["tests"] diff --git a/src/zarr/array.py b/src/zarr/array.py index 7da39c285e..cb780bb8d7 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -34,9 +34,9 @@ ZarrFormat, concurrent_map, ) -from zarr.config import config +from zarr.config import config, parse_indexing_order from zarr.indexing import BasicIndexer -from zarr.metadata import ArrayMetadata, ArrayV2Metadata, ArrayV3Metadata, parse_indexing_order +from zarr.metadata import ArrayMetadata, ArrayV2Metadata, ArrayV3Metadata from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import sync diff --git a/src/zarr/codecs/__init__.py b/src/zarr/codecs/__init__.py index 0f0ff55df5..3ef3a87db7 100644 --- a/src/zarr/codecs/__init__.py +++ b/src/zarr/codecs/__init__.py @@ -1,10 +1,25 @@ from __future__ import annotations -from zarr.codecs.blosc import BloscCname, BloscCodec, BloscShuffle # noqa: F401 -from zarr.codecs.bytes import BytesCodec, Endian # noqa: F401 -from zarr.codecs.crc32c_ import Crc32cCodec # noqa: F401 -from zarr.codecs.gzip import GzipCodec # noqa: F401 -from zarr.codecs.pipeline import BatchedCodecPipeline # noqa: F401 -from zarr.codecs.sharding import ShardingCodec, ShardingCodecIndexLocation # noqa: F401 -from zarr.codecs.transpose import TransposeCodec # noqa: F401 -from zarr.codecs.zstd import ZstdCodec # noqa: F401 +from zarr.codecs.blosc import BloscCname, BloscCodec, BloscShuffle +from zarr.codecs.bytes import BytesCodec, Endian +from zarr.codecs.crc32c_ import Crc32cCodec +from zarr.codecs.gzip import GzipCodec +from zarr.codecs.pipeline import BatchedCodecPipeline +from zarr.codecs.sharding import ShardingCodec, ShardingCodecIndexLocation +from zarr.codecs.transpose import TransposeCodec +from zarr.codecs.zstd import ZstdCodec + +__all__ = [ + "BatchedCodecPipeline", + "BloscCodec", + "BloscCname", + "BloscShuffle", + "BytesCodec", + "Endian", + "Crc32cCodec", + "GzipCodec", + "ShardingCodec", + "ShardingCodecIndexLocation", + "TransposeCodec", + "ZstdCodec", +] diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index 893cbc8b4b..6f493c9e81 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -11,12 +11,11 @@ ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin, - ByteGetter, BytesBytesCodec, - ByteSetter, Codec, CodecPipeline, ) +from zarr.abc.store import ByteGetter, ByteSetter from zarr.buffer import Buffer, NDBuffer from zarr.codecs.registry import get_codec_class from zarr.common import JSON, concurrent_map, parse_named_configuration diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index a68577be68..ec5306ee80 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -14,11 +14,10 @@ ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin, - ByteGetter, - ByteSetter, Codec, CodecPipeline, ) +from zarr.abc.store import ByteGetter, ByteSetter from zarr.buffer import Buffer, NDBuffer from zarr.chunk_grids import RegularChunkGrid from zarr.codecs.bytes import BytesCodec diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 58cc276c29..2d8a455152 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -41,6 +41,9 @@ _bool = bool +__all__ = ["ArrayMetadata"] + + class DataType(Enum): bool = "bool" int8 = "int8" diff --git a/src/zarr/store/__init__.py b/src/zarr/store/__init__.py index b1c3a5f720..b7cd6cc0fd 100644 --- a/src/zarr/store/__init__.py +++ b/src/zarr/store/__init__.py @@ -3,3 +3,5 @@ from zarr.store.remote import RemoteStore from zarr.store.local import LocalStore from zarr.store.memory import MemoryStore + +__all__ = ["StorePath", "StoreLike", "make_store_path", "RemoteStore", "LocalStore", "MemoryStore"] From 016964b90a0a30c10e5b6d08b4cc9d7eb15b6aa0 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Sat, 1 Jun 2024 03:39:16 +0100 Subject: [PATCH 0553/1078] Make typing strict (#1879) * Disallow subclassing any * Disallow returning any * Enable strict * Fix literal import * pre-commit fixes * Remove old group imports --- .pre-commit-config.yaml | 2 ++ pyproject.toml | 23 ++++++++--------------- src/zarr/codecs/sharding.py | 2 +- src/zarr/codecs/zstd.py | 4 ++-- src/zarr/common.py | 12 ++++++++++-- src/zarr/config.py | 4 ++-- src/zarr/group.py | 4 ++-- src/zarr/v2/n5.py | 2 +- src/zarr/v2/util.py | 2 +- 9 files changed, 29 insertions(+), 26 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 58cbac3c45..578a66fd07 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,3 +31,5 @@ repos: - types-setuptools - pytest - numpy + - numcodecs + - zstandard diff --git a/pyproject.toml b/pyproject.toml index 4bcbfd0a0a..616c388f99 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -187,21 +187,7 @@ python_version = "3.10" ignore_missing_imports = true namespace_packages = false -warn_unused_configs = true -warn_redundant_casts = true -warn_unused_ignores = true -strict_equality = true -strict_concatenate = true - -check_untyped_defs = true -disallow_untyped_decorators = true -disallow_any_generics = true - -disallow_incomplete_defs = true -disallow_untyped_calls = true - -disallow_untyped_defs = true -no_implicit_reexport = true +strict = true [[tool.mypy.overrides]] @@ -238,6 +224,13 @@ module = [ disallow_untyped_defs = false +[[tool.mypy.overrides]] +module = [ + "zarr.metadata", + "zarr.store.remote" +] +warn_return_any = false + [tool.pytest.ini_options] minversion = "7" testpaths = ["tests"] diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index ec5306ee80..cea788840d 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -101,7 +101,7 @@ def is_all_empty(self) -> bool: return bool(np.array_equiv(self.offsets_and_lengths, MAX_UINT_64)) def get_full_chunk_map(self) -> npt.NDArray[np.bool_]: - return self.offsets_and_lengths[..., 0] != MAX_UINT_64 + return np.not_equal(self.offsets_and_lengths[..., 0], MAX_UINT_64) def get_chunk_slice(self, chunk_coords: ChunkCoords) -> tuple[int, int] | None: localized_chunk = self._localize_chunk(chunk_coords) diff --git a/src/zarr/codecs/zstd.py b/src/zarr/codecs/zstd.py index 76e625ad6a..451fae8b37 100644 --- a/src/zarr/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -55,11 +55,11 @@ def to_dict(self) -> dict[str, JSON]: def _compress(self, data: npt.NDArray[Any]) -> bytes: ctx = ZstdCompressor(level=self.level, write_checksum=self.checksum) - return ctx.compress(data) + return ctx.compress(data.tobytes()) def _decompress(self, data: npt.NDArray[Any]) -> bytes: ctx = ZstdDecompressor() - return ctx.decompress(data) + return ctx.decompress(data.tobytes()) async def _decode_single( self, diff --git a/src/zarr/common.py b/src/zarr/common.py index 9527efbbce..ec5d870f92 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -7,7 +7,15 @@ from collections.abc import Iterable from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Any, Literal, ParamSpec, TypeVar, overload +from typing import ( + TYPE_CHECKING, + Any, + Literal, + ParamSpec, + TypeVar, + cast, + overload, +) if TYPE_CHECKING: from collections.abc import Awaitable, Callable, Iterator @@ -178,5 +186,5 @@ def parse_fill_value(data: Any) -> Any: def parse_order(data: Any) -> Literal["C", "F"]: if data in ("C", "F"): - return data + return cast(Literal["C", "F"], data) raise ValueError(f"Expected one of ('C', 'F'), got {data} instead.") diff --git a/src/zarr/config.py b/src/zarr/config.py index 5b1640bd56..7c5b48a16c 100644 --- a/src/zarr/config.py +++ b/src/zarr/config.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Literal +from typing import Any, Literal, cast from donfig import Config @@ -18,6 +18,6 @@ def parse_indexing_order(data: Any) -> Literal["C", "F"]: if data in ("C", "F"): - return data + return cast(Literal["C", "F"], data) msg = f"Expected one of ('C', 'F'), got {data} instead." raise ValueError(msg) diff --git a/src/zarr/group.py b/src/zarr/group.py index 4ff2176fd9..2401934b84 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -5,7 +5,7 @@ import logging from collections.abc import Iterator from dataclasses import asdict, dataclass, field, replace -from typing import TYPE_CHECKING, overload +from typing import TYPE_CHECKING, Literal, cast, overload import numpy.typing as npt @@ -37,7 +37,7 @@ def parse_zarr_format(data: Any) -> ZarrFormat: if data in (2, 3): - return data + return cast(Literal[2, 3], data) msg = msg = f"Invalid zarr_format. Expected one 2 or 3. Got {data}." raise ValueError(msg) diff --git a/src/zarr/v2/n5.py b/src/zarr/v2/n5.py index 4ea5e45721..a6fd39f5b8 100644 --- a/src/zarr/v2/n5.py +++ b/src/zarr/v2/n5.py @@ -780,7 +780,7 @@ def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dic return zarr_config -class N5ChunkWrapper(Codec): +class N5ChunkWrapper(Codec): # type: ignore[misc] codec_id = "n5_wrapper" def __init__(self, dtype, chunk_shape, compressor_config=None, compressor=None): diff --git a/src/zarr/v2/util.py b/src/zarr/v2/util.py index 48d7d30d88..6926bb2d14 100644 --- a/src/zarr/v2/util.py +++ b/src/zarr/v2/util.py @@ -444,7 +444,7 @@ def get_type(self): return type(self.obj).__name__ -class TreeTraversal(Traversal): +class TreeTraversal(Traversal): # type: ignore[misc] def get_children(self, node): return node.get_children() From b4cdf94c1ff80629ae902432567943a1cdcc443e Mon Sep 17 00:00:00 2001 From: David Stansby Date: Sat, 1 Jun 2024 03:46:08 +0100 Subject: [PATCH 0554/1078] Enable extra mypy error codes (#1909) * Clean up v2 mypy ignores * Enable extra mypy error codes * Specific error code --------- Co-authored-by: Joe Hamman --- pyproject.toml | 2 ++ src/zarr/buffer.py | 2 +- src/zarr/codecs/sharding.py | 2 +- src/zarr/indexing.py | 7 ++----- src/zarr/metadata.py | 7 ++++--- src/zarr/testing/store.py | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 616c388f99..9f50c33db0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -190,6 +190,8 @@ namespace_packages = false strict = true +enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] + [[tool.mypy.overrides]] module = [ "zarr.v2.*", diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index 0f055093c1..59994e70d6 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -79,7 +79,7 @@ def ravel(self, order: Literal["K", "A", "C", "F"] = "C") -> Self: ... def all(self) -> bool: ... - def __eq__(self, other: Any) -> Self: # type: ignore + def __eq__(self, other: Any) -> Self: # type: ignore[explicit-override, override] """Element-wise equal Notice diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index cea788840d..a7b6edc3b4 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -205,7 +205,7 @@ def merge_with_morton_order( ) -> _ShardBuilder: obj = cls.create_empty(chunks_per_shard) for chunk_coords in morton_order_iter(chunks_per_shard): - if tombstones is not None and chunk_coords in tombstones: + if chunk_coords in tombstones: continue for shard_dict in shard_dicts: maybe_value = shard_dict.get(chunk_coords, None) diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 45413bc5b2..6bc83d5062 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -199,11 +199,8 @@ def is_total_slice(item: Selection, shape: ChunkCoords) -> bool: if isinstance(item, tuple): return all( ( - isinstance(dim_sel, slice) - and ( - (dim_sel == slice(None)) - or ((dim_sel.stop - dim_sel.start == dim_len) and (dim_sel.step in [1, None])) - ) + (dim_sel == slice(None)) + or ((dim_sel.stop - dim_sel.start == dim_len) and (dim_sel.step in [1, None])) ) for dim_sel, dim_len in zip(item, shape, strict=False) ) diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 2d8a455152..29d0d19a06 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -432,10 +432,11 @@ def update_attributes(self, attributes: dict[str, JSON]) -> Self: def parse_dimension_names(data: None | Iterable[str]) -> tuple[str, ...] | None: if data is None: return data - if isinstance(data, Iterable) and all([isinstance(x, str) for x in data]): + elif all([isinstance(x, str) for x in data]): return tuple(data) - msg = f"Expected either None or a iterable of str, got {type(data)}" - raise TypeError(msg) + else: + msg = f"Expected either None or a iterable of str, got {type(data)}" + raise TypeError(msg) # todo: real validation diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py index b317f383f6..cb4dc9f7b5 100644 --- a/src/zarr/testing/store.py +++ b/src/zarr/testing/store.py @@ -47,7 +47,7 @@ def test_store_mode(self, store: S, store_kwargs: dict[str, Any]) -> None: assert store.writeable with pytest.raises(AttributeError): - store.mode = "w" # type: ignore + store.mode = "w" # type: ignore[misc] # read-only kwargs = {**store_kwargs, "mode": "r"} From 2060a53b2d9dcbf023b7d8c8481e470516fafd8e Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Sat, 1 Jun 2024 02:25:34 -0700 Subject: [PATCH 0555/1078] fix(types): Group.info -> NotImplementedError (#1936) (until we implement it) --- pyproject.toml | 8 -------- src/zarr/group.py | 8 ++++---- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9f50c33db0..66f06dfe5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -200,17 +200,10 @@ ignore_errors = true [[tool.mypy.overrides]] module = [ - "zarr.group", "tests.*", ] check_untyped_defs = false -[[tool.mypy.overrides]] -module = [ - "zarr.group" -] -disallow_incomplete_defs = false - [[tool.mypy.overrides]] module = [ "zarr.array", @@ -221,7 +214,6 @@ disallow_untyped_calls = false [[tool.mypy.overrides]] module = [ "zarr.array", - "zarr.group", ] disallow_untyped_defs = false diff --git a/src/zarr/group.py b/src/zarr/group.py index 2401934b84..88e7fd0922 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -275,8 +275,8 @@ def attrs(self) -> dict[str, Any]: return self.metadata.attributes @property - def info(self): - return self.metadata.info + def info(self) -> None: + raise NotImplementedError async def create_group( self, @@ -526,8 +526,8 @@ def attrs(self) -> Attributes: return Attributes(self) @property - def info(self): - return self._async_group.info + def info(self) -> None: + raise NotImplementedError def update_attributes(self, new_attributes: dict[str, Any]) -> Group: self._sync(self._async_group.update_attributes(new_attributes)) From 54a0958fa1e8a3cb94b32a66520dccd6a408b764 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Sat, 1 Jun 2024 16:40:56 +0100 Subject: [PATCH 0556/1078] Enable warn_unreachable for mypy (#1937) --- pyproject.toml | 2 +- src/zarr/codecs/_v2.py | 3 --- src/zarr/codecs/blosc.py | 4 ++-- src/zarr/store/local.py | 9 +++------ src/zarr/store/memory.py | 4 ++-- 5 files changed, 8 insertions(+), 14 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 66f06dfe5d..baacf2fc80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -188,7 +188,7 @@ ignore_missing_imports = true namespace_packages = false strict = true - +warn_unreachable = true enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] diff --git a/src/zarr/codecs/_v2.py b/src/zarr/codecs/_v2.py index ceb3de0a06..c4e4756094 100644 --- a/src/zarr/codecs/_v2.py +++ b/src/zarr/codecs/_v2.py @@ -21,9 +21,6 @@ async def _decode_single( chunk_bytes: Buffer, chunk_spec: ArraySpec, ) -> NDBuffer: - if chunk_bytes is None: - return None - if self.compressor is not None: compressor = numcodecs.get_codec(self.compressor) chunk_numpy_array = ensure_ndarray( diff --git a/src/zarr/codecs/blosc.py b/src/zarr/codecs/blosc.py index e8921b8beb..acba698d94 100644 --- a/src/zarr/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -77,10 +77,10 @@ def parse_blocksize(data: JSON) -> int: class BloscCodec(BytesBytesCodec): is_fixed_size = False - typesize: int + typesize: int | None cname: BloscCname = BloscCname.zstd clevel: int = 5 - shuffle: BloscShuffle = BloscShuffle.noshuffle + shuffle: BloscShuffle | None = BloscShuffle.noshuffle blocksize: int = 0 def __init__( diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index 40abe12932..945c6160ad 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -120,9 +120,9 @@ async def get_partial_values( async def set(self, key: str, value: Buffer) -> None: self._check_writable() assert isinstance(key, str) - if isinstance(value, bytes | bytearray): + if isinstance(value, bytes | bytearray): # type:ignore[unreachable] # TODO: to support the v2 tests, we convert bytes to Buffer here - value = Buffer.from_bytes(value) + value = Buffer.from_bytes(value) # type:ignore[unreachable] if not isinstance(value, Buffer): raise TypeError("LocalStore.set(): `value` must a Buffer instance") path = self.root / key @@ -134,10 +134,7 @@ async def set_partial_values(self, key_start_values: list[tuple[str, int, bytes] for key, start, value in key_start_values: assert isinstance(key, str) path = self.root / key - if start is not None: - args.append((_put, path, value, start)) - else: - args.append((_put, path, value)) + args.append((_put, path, value, start)) await concurrent_map(args, to_thread, limit=None) # TODO: fix limit async def delete(self, key: str) -> None: diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index 74bb5454fe..fd6fadd3ee 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -52,9 +52,9 @@ async def exists(self, key: str) -> bool: async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: self._check_writable() assert isinstance(key, str) - if isinstance(value, bytes | bytearray): + if isinstance(value, bytes | bytearray): # type:ignore[unreachable] # TODO: to support the v2 tests, we convert bytes to Buffer here - value = Buffer.from_bytes(value) + value = Buffer.from_bytes(value) # type:ignore[unreachable] if not isinstance(value, Buffer): raise TypeError(f"Expected Buffer. Got {type(value)}.") From 67d521e90529eb4e898af9c7fde6aa639da341d0 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Sat, 1 Jun 2024 16:56:59 +0100 Subject: [PATCH 0557/1078] Run sphinx directly on readthedocs (#1919) * Run sphinx directly on readthedocs * Update doc build script --- .readthedocs.yaml | 12 ++++++++---- pyproject.toml | 3 +-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 8c791a292e..cae58c064a 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,15 +1,19 @@ version: 2 build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: python: "3.10" - commands: - - pip install hatch - - hatch run docs:rtd sphinx: configuration: docs/conf.py fail_on_warning: true formats: all + +python: + install: + - method: pip + path: . + extra_requirements: + - docs diff --git a/pyproject.toml b/pyproject.toml index baacf2fc80..d149470e3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -136,8 +136,7 @@ list-env = "pip list" features = ['docs'] [tool.hatch.envs.docs.scripts] -build = "sphinx-build docs/ docs/_build/" -rtd = "sphinx-build docs/ _readthedocs/html/" +build = "cd docs && make html" serve = "sphinx-autobuild docs docs/_build --ignore 'docs/_autoapi/**/*' --host 0.0.0.0" [tool.ruff] From 2bac07494a37ec638e5bf2bf172cb72a30526886 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Sat, 1 Jun 2024 17:02:41 +0100 Subject: [PATCH 0558/1078] Fix list of packages in mypy pre-commit environment (#1907) --- .pre-commit-config.yaml | 15 +++++++++++---- pyproject.toml | 1 + 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 578a66fd07..77b4a45ff5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,9 +27,16 @@ repos: - id: mypy files: src additional_dependencies: - - types-redis - - types-setuptools - - pytest - - numpy + # Package dependencies + - asciitree + - crc32c + - donfig + - fasteners - numcodecs + - numpy + - typing_extensions - zstandard + # Tests + - pytest + # Zarr v2 + - types-redis diff --git a/pyproject.toml b/pyproject.toml index d149470e3a..1f3ec86507 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ maintainers = [ { name = "Ryan Abernathey" } ] requires-python = ">=3.10" +# If you add a new dependency here, please also add it to .pre-commit-config.yml dependencies = [ 'asciitree', 'numpy>=1.24', From bd6cf32197cd66e25a73cad2ac8620e537e68743 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jun 2024 09:06:38 -0700 Subject: [PATCH 0559/1078] Bump the actions group with 6 updates (#1904) Bumps the actions group with 6 updates: | Package | From | To | | --- | --- | --- | | [actions/checkout](https://github.com/actions/checkout) | `3` | `4` | | [github/codeql-action](https://github.com/github/codeql-action) | `2` | `3` | | [actions/setup-python](https://github.com/actions/setup-python) | `4` | `5` | | [actions/upload-artifact](https://github.com/actions/upload-artifact) | `3` | `4` | | [actions/download-artifact](https://github.com/actions/download-artifact) | `3` | `4` | | [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) | `1.8.10` | `1.8.14` | Updates `actions/checkout` from 3 to 4 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) Updates `github/codeql-action` from 2 to 3 - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v2...v3) Updates `actions/setup-python` from 4 to 5 - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) Updates `actions/upload-artifact` from 3 to 4 - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v3...v4) Updates `actions/download-artifact` from 3 to 4 - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v3...v4) Updates `pypa/gh-action-pypi-publish` from 1.8.10 to 1.8.14 - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.10...v1.8.14) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: actions - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch dependency-group: actions ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joe Hamman --- .github/workflows/codeql-analysis.yml | 6 +++--- .github/workflows/releases.yml | 10 +++++----- .github/workflows/test.yml | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 7013f1784f..bb3d433629 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -42,7 +42,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -56,7 +56,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun @@ -69,4 +69,4 @@ jobs: # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 4d9565a6ab..51fcf08591 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v4.7.1 + - uses: actions/setup-python@v5 name: Install Python with: python-version: '3.11' @@ -27,7 +27,7 @@ jobs: pip install hatch - name: Build wheel and sdist run: hatch build - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: releases path: dist @@ -36,7 +36,7 @@ jobs: needs: [build_artifacts] runs-on: ubuntu-latest steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: releases path: dist @@ -51,11 +51,11 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.10 + - uses: pypa/gh-action-pypi-publish@v1.8.14 with: user: __token__ password: ${{ secrets.pypi_password }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index dffcf3a8ee..a0e67ad79a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -26,9 +26,9 @@ jobs: dependency-set: ["minimal", "optional"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' From 19365e20522e5d67b0b14698a7c6c953c450c5c6 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Sat, 1 Jun 2024 21:22:18 +0100 Subject: [PATCH 0560/1078] Fix final typing errors (#1939) --- pyproject.toml | 27 --------------------------- src/zarr/array.py | 6 +++--- src/zarr/metadata.py | 19 ++++++++++--------- src/zarr/store/remote.py | 5 +++-- 4 files changed, 16 insertions(+), 41 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1f3ec86507..e9722711b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -198,33 +198,6 @@ module = [ ] ignore_errors = true -[[tool.mypy.overrides]] -module = [ - "tests.*", -] -check_untyped_defs = false - -[[tool.mypy.overrides]] -module = [ - "zarr.array", - "zarr.buffer" -] -disallow_untyped_calls = false - -[[tool.mypy.overrides]] -module = [ - "zarr.array", -] -disallow_untyped_defs = false - - -[[tool.mypy.overrides]] -module = [ - "zarr.metadata", - "zarr.store.remote" -] -warn_return_any = false - [tool.pytest.ini_options] minversion = "7" testpaths = ["tests"] diff --git a/src/zarr/array.py b/src/zarr/array.py index cb780bb8d7..278e58d2d6 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -473,8 +473,8 @@ async def update_attributes(self, new_attributes: dict[str, JSON]) -> AsyncArray def __repr__(self) -> str: return f"" - async def info(self): - return NotImplemented + async def info(self) -> None: + raise NotImplementedError @dataclass(frozen=True) @@ -609,7 +609,7 @@ def update_attributes(self, new_attributes: dict[str, JSON]) -> Array: def __repr__(self) -> str: return f"" - def info(self): + def info(self) -> None: return sync( self._async_array.info(), ) diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 29d0d19a06..39a1d53199 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -261,7 +261,8 @@ def _json_convert(o: np.dtype[Any] | Enum | Codec) -> str | dict[str, Any]: # this serializes numcodecs compressors # todo: implement to_dict for codecs elif isinstance(o, numcodecs.abc.Codec): - return o.get_config() + config: dict[str, Any] = o.get_config() + return config raise TypeError return { @@ -270,14 +271,14 @@ def _json_convert(o: np.dtype[Any] | Enum | Codec) -> str | dict[str, Any]: @classmethod def from_dict(cls, data: dict[str, JSON]) -> ArrayV3Metadata: + # TODO: Remove the type: ignores[] comments below and use a TypedDict to type `data` # check that the zarr_format attribute is correct - _ = parse_zarr_format_v3(data.pop("zarr_format")) + _ = parse_zarr_format_v3(data.pop("zarr_format")) # type: ignore[arg-type] # check that the node_type attribute is correct - _ = parse_node_type_array(data.pop("node_type")) + _ = parse_node_type_array(data.pop("node_type")) # type: ignore[arg-type] data["dimension_names"] = data.pop("dimension_names", None) - # TODO: Remove the ignores and use a TypedDict to type `data` return cls(**data) # type: ignore[arg-type] def to_dict(self) -> dict[str, Any]: @@ -450,32 +451,32 @@ def parse_attributes(data: None | dict[str, JSON]) -> dict[str, JSON]: # todo: move to its own module and drop _v3 suffix # todo: consider folding all the literal parsing into a single function # that takes 2 arguments -def parse_zarr_format_v3(data: Any) -> Literal[3]: +def parse_zarr_format_v3(data: Literal[3]) -> Literal[3]: if data == 3: return data raise ValueError(f"Invalid value. Expected 3. Got {data}.") # todo: move to its own module and drop _v2 suffix -def parse_zarr_format_v2(data: Any) -> Literal[2]: +def parse_zarr_format_v2(data: Literal[2]) -> Literal[2]: if data == 2: return data raise ValueError(f"Invalid value. Expected 2. Got {data}.") -def parse_node_type_array(data: Any) -> Literal["array"]: +def parse_node_type_array(data: Literal["array"]) -> Literal["array"]: if data == "array": return data raise ValueError(f"Invalid value. Expected 'array'. Got {data}.") # todo: real validation -def parse_filters(data: Any) -> list[dict[str, JSON]]: +def parse_filters(data: list[dict[str, JSON]] | None) -> list[dict[str, JSON]] | None: return data # todo: real validation -def parse_compressor(data: Any) -> dict[str, JSON] | None: +def parse_compressor(data: dict[str, JSON] | None) -> dict[str, JSON] | None: return data diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index 3b086f0a03..60217fb72c 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -62,7 +62,7 @@ async def get( path = _dereference_path(root, key) try: - value = await ( + value: Buffer | None = await ( fs._cat_file(path, start=byte_range[0], end=byte_range[1]) if byte_range else fs._cat_file(path) @@ -96,4 +96,5 @@ async def delete(self, key: str) -> None: async def exists(self, key: str) -> bool: fs, root = self._make_fs() path = _dereference_path(root, key) - return await fs._exists(path) + exists: bool = await fs._exists(path) + return exists From c2a1d2e3dde9e82ecfd30c5bb4a8cff62d968b02 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Sun, 2 Jun 2024 06:14:54 -0700 Subject: [PATCH 0561/1078] feature(typing): add py.typed file to package root (#1935) --- src/zarr/py.typed | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 src/zarr/py.typed diff --git a/src/zarr/py.typed b/src/zarr/py.typed new file mode 100644 index 0000000000..e69de29bb2 From bd20e4624f7875eef6bf00c38ef35b1787592e2b Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 2 Jun 2024 23:33:34 +0200 Subject: [PATCH 0562/1078] Apply preview ruff rules (#1942) * Apply ruff rule RUF022 RUF022 `__all__` is not sorted * Apply ruff rule RUF029 RUF029 Function is declared `async`, but doesn't `await` or use `async` features. --- src/zarr/codecs/__init__.py | 4 ++-- tests/v3/test_group.py | 2 +- tests/v3/test_sync.py | 3 +-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/zarr/codecs/__init__.py b/src/zarr/codecs/__init__.py index 3ef3a87db7..9394284319 100644 --- a/src/zarr/codecs/__init__.py +++ b/src/zarr/codecs/__init__.py @@ -11,12 +11,12 @@ __all__ = [ "BatchedCodecPipeline", - "BloscCodec", "BloscCname", + "BloscCodec", "BloscShuffle", "BytesCodec", - "Endian", "Crc32cCodec", + "Endian", "GzipCodec", "ShardingCodec", "ShardingCodecIndexLocation", diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 36b82f413c..4d55d72282 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -368,7 +368,7 @@ async def test_asyncgroup_update_attributes( @pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) @pytest.mark.parametrize("zarr_format", (2, 3)) -async def test_group_init(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: +def test_group_init(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: agroup = sync(AsyncGroup.create(store=store, zarr_format=zarr_format)) group = Group(agroup) assert group._async_group == agroup diff --git a/tests/v3/test_sync.py b/tests/v3/test_sync.py index 5b953573d8..7e3b8dd111 100644 --- a/tests/v3/test_sync.py +++ b/tests/v3/test_sync.py @@ -1,5 +1,4 @@ import asyncio -import time from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, patch @@ -48,7 +47,7 @@ def test_sync_timeout() -> None: duration = 0.002 async def foo() -> None: - time.sleep(duration) + await asyncio.sleep(duration) with pytest.raises(asyncio.TimeoutError): sync(foo(), timeout=duration / 2) From 56d36e3bc02ff818e879ad29c6d35dcb4bf52f41 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 2 Jun 2024 23:37:07 +0200 Subject: [PATCH 0563/1078] Enable and apply ruff rule RUF009 (#1941) RUF009 Do not perform function call `cast` in dataclass defaults --- pyproject.toml | 1 - src/zarr/metadata.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e9722711b1..80e03322a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -177,7 +177,6 @@ extend-select = [ ignore = [ "RUF003", "RUF005", - "RUF009", "RUF012", "RUF015", ] diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 39a1d53199..b461449991 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -5,7 +5,7 @@ from collections.abc import Iterable from dataclasses import dataclass, field, replace from enum import Enum -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, Any import numpy as np import numpy.typing as npt @@ -310,7 +310,7 @@ class ArrayV2Metadata(ArrayMetadata): filters: list[dict[str, JSON]] | None = None dimension_separator: Literal[".", "/"] = "." compressor: dict[str, JSON] | None = None - attributes: dict[str, JSON] = cast(dict[str, JSON], field(default_factory=dict)) + attributes: dict[str, JSON] = field(default_factory=dict) zarr_format: Literal[2] = field(init=False, default=2) def __init__( From 24e855c765ac82919ba70bbea03108bdcd8078e6 Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Mon, 3 Jun 2024 07:49:59 +0200 Subject: [PATCH 0564/1078] Support all indexing variants (#1917) --- src/zarr/abc/codec.py | 25 +- src/zarr/array.py | 263 +++++- src/zarr/buffer.py | 15 +- src/zarr/chunk_grids.py | 21 +- src/zarr/codecs/pipeline.py | 88 +- src/zarr/codecs/sharding.py | 20 +- src/zarr/indexing.py | 1207 ++++++++++++++++++++++-- src/zarr/metadata.py | 1 + src/zarr/v2/indexing.py | 8 +- tests/v3/test_indexing.py | 1721 +++++++++++++++++++++++++++++++++++ 10 files changed, 3200 insertions(+), 169 deletions(-) create mode 100644 tests/v3/test_indexing.py diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index 028d1757ce..0836d878ae 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -13,7 +13,8 @@ if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import ArraySpec, SliceSelection + from zarr.common import ArraySpec + from zarr.indexing import SelectorTuple from zarr.metadata import ArrayMetadata @@ -155,13 +156,13 @@ class ArrayBytesCodecPartialDecodeMixin: """Mixin for array-to-bytes codecs that implement partial decoding.""" async def _decode_partial_single( - self, byte_getter: ByteGetter, selection: SliceSelection, chunk_spec: ArraySpec + self, byte_getter: ByteGetter, selection: SelectorTuple, chunk_spec: ArraySpec ) -> NDBuffer | None: raise NotImplementedError async def decode_partial( self, - batch_info: Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]], + batch_info: Iterable[tuple[ByteGetter, SelectorTuple, ArraySpec]], ) -> Iterable[NDBuffer | None]: """Partially decodes a batch of chunks. This method determines parts of a chunk from the slice selection, @@ -169,7 +170,7 @@ async def decode_partial( Parameters ---------- - batch_info : Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]] + batch_info : Iterable[tuple[ByteGetter, SelectorTuple, ArraySpec]] Ordered set of information about slices of encoded chunks. The slice selection determines which parts of the chunk will be fetched. The ByteGetter is used to fetch the necessary bytes. @@ -196,14 +197,14 @@ async def _encode_partial_single( self, byte_setter: ByteSetter, chunk_array: NDBuffer, - selection: SliceSelection, + selection: SelectorTuple, chunk_spec: ArraySpec, ) -> None: raise NotImplementedError async def encode_partial( self, - batch_info: Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]], + batch_info: Iterable[tuple[ByteSetter, NDBuffer, SelectorTuple, ArraySpec]], ) -> None: """Partially encodes a batch of chunks. This method determines parts of a chunk from the slice selection, encodes them and @@ -213,7 +214,7 @@ async def encode_partial( Parameters ---------- - batch_info : Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]] + batch_info : Iterable[tuple[ByteSetter, NDBuffer, SelectorTuple, ArraySpec]] Ordered set of information about slices of to-be-encoded chunks. The slice selection determines which parts of the chunk will be encoded. The ByteSetter is used to write the necessary bytes and fetch bytes for existing chunk data. @@ -342,15 +343,16 @@ async def encode( @abstractmethod async def read( self, - batch_info: Iterable[tuple[ByteGetter, ArraySpec, SliceSelection, SliceSelection]], + batch_info: Iterable[tuple[ByteGetter, ArraySpec, SelectorTuple, SelectorTuple]], out: NDBuffer, + drop_axes: tuple[int, ...] = (), ) -> None: """Reads chunk data from the store, decodes it and writes it into an output array. Partial decoding may be utilized if the codecs and stores support it. Parameters ---------- - batch_info : Iterable[tuple[ByteGetter, ArraySpec, SliceSelection, SliceSelection]] + batch_info : Iterable[tuple[ByteGetter, ArraySpec, SelectorTuple, SelectorTuple]] Ordered set of information about the chunks. The first slice selection determines which parts of the chunk will be fetched. The second slice selection determines where in the output array the chunk data will be written. @@ -363,8 +365,9 @@ async def read( @abstractmethod async def write( self, - batch_info: Iterable[tuple[ByteSetter, ArraySpec, SliceSelection, SliceSelection]], + batch_info: Iterable[tuple[ByteSetter, ArraySpec, SelectorTuple, SelectorTuple]], value: NDBuffer, + drop_axes: tuple[int, ...] = (), ) -> None: """Encodes chunk data and writes it to the store. Merges with existing chunk data by reading first, if necessary. @@ -372,7 +375,7 @@ async def write( Parameters ---------- - batch_info : Iterable[tuple[ByteSetter, ArraySpec, SliceSelection, SliceSelection]] + batch_info : Iterable[tuple[ByteSetter, ArraySpec, SelectorTuple, SelectorTuple]] Ordered set of information about the chunks. The first slice selection determines which parts of the chunk will be encoded. The second slice selection determines where in the value array the chunk data is located. diff --git a/src/zarr/array.py b/src/zarr/array.py index 278e58d2d6..a45e7c6ba2 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -12,7 +12,7 @@ from asyncio import gather from collections.abc import Iterable from dataclasses import dataclass, replace -from typing import Any, Literal +from typing import Any, Literal, cast import numpy as np import numpy.typing as npt @@ -33,9 +33,32 @@ Selection, ZarrFormat, concurrent_map, + product, ) from zarr.config import config, parse_indexing_order -from zarr.indexing import BasicIndexer +from zarr.indexing import ( + BasicIndexer, + BasicSelection, + BlockIndex, + BlockIndexer, + BlockSelection, + CoordinateIndexer, + CoordinateSelection, + Fields, + Indexer, + MaskIndexer, + MaskSelection, + OIndex, + OrthogonalIndexer, + OrthogonalSelection, + VIndex, + check_fields, + check_no_multi_fields, + is_pure_fancy_indexing, + is_pure_orthogonal_indexing, + is_scalar, + pop_fields, +) from zarr.metadata import ArrayMetadata, ArrayV2Metadata, ArrayV3Metadata from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import sync @@ -355,6 +378,51 @@ def dtype(self) -> np.dtype[Any]: def attrs(self) -> dict[str, JSON]: return self.metadata.attributes + async def _get_selection( + self, + indexer: Indexer, + *, + out: NDBuffer | None = None, + factory: Factory.Create = NDBuffer.create, + fields: Fields | None = None, + ) -> NDArrayLike: + # check fields are sensible + out_dtype = check_fields(fields, self.dtype) + + # setup output buffer + if out is not None: + if isinstance(out, NDBuffer): + out_buffer = out + else: + raise TypeError(f"out argument needs to be an NDBuffer. Got {type(out)!r}") + if out_buffer.shape != indexer.shape: + raise ValueError( + f"shape of out argument doesn't match. Expected {indexer.shape}, got {out.shape}" + ) + else: + out_buffer = factory( + shape=indexer.shape, + dtype=out_dtype, + order=self.order, + fill_value=self.metadata.fill_value, + ) + if product(indexer.shape) > 0: + # reading chunks and decoding them + await self.metadata.codec_pipeline.read( + [ + ( + self.store_path / self.metadata.encode_chunk_key(chunk_coords), + self.metadata.get_chunk_spec(chunk_coords, self.order), + chunk_selection, + out_selection, + ) + for chunk_coords, chunk_selection, out_selection in indexer + ], + out_buffer, + drop_axes=indexer.drop_axes, + ) + return out_buffer.as_ndarray_like() + async def getitem( self, selection: Selection, *, factory: Factory.Create = NDBuffer.create ) -> NDArrayLike: @@ -363,48 +431,24 @@ async def getitem( shape=self.metadata.shape, chunk_grid=self.metadata.chunk_grid, ) - - # setup output array - out = factory( - shape=indexer.shape, - dtype=self.metadata.dtype, - order=self.order, - fill_value=0, # TODO use fill_value - ) - - # reading chunks and decoding them - await self.metadata.codec_pipeline.read( - [ - ( - self.store_path / self.metadata.encode_chunk_key(chunk_coords), - self.metadata.get_chunk_spec(chunk_coords, self.order), - chunk_selection, - out_selection, - ) - for chunk_coords, chunk_selection, out_selection in indexer - ], - out, - ) - return out.as_ndarray_like() + return await self._get_selection(indexer, factory=factory) async def _save_metadata(self, metadata: ArrayMetadata) -> None: to_save = metadata.to_buffer_dict() awaitables = [set_or_delete(self.store_path / key, value) for key, value in to_save.items()] await gather(*awaitables) - async def setitem( + async def _set_selection( self, - selection: Selection, + indexer: Indexer, value: NDArrayLike, + *, factory: Factory.NDArrayLike = NDBuffer.from_ndarray_like, + fields: Fields | None = None, ) -> None: - indexer = BasicIndexer( - selection, - shape=self.metadata.shape, - chunk_grid=self.metadata.chunk_grid, - ) - - sel_shape = indexer.shape + # check fields are sensible + check_fields(fields, self.dtype) + fields = check_no_multi_fields(fields) # check value shape if np.isscalar(value): @@ -412,7 +456,9 @@ async def setitem( else: if not hasattr(value, "shape"): value = np.asarray(value, self.metadata.dtype) - assert value.shape == sel_shape + # assert ( + # value.shape == indexer.shape + # ), f"shape of value doesn't match indexer shape. Expected {indexer.shape}, got {value.shape}" if value.dtype.name != self.metadata.dtype.name: value = value.astype(self.metadata.dtype, order="A") @@ -433,7 +479,21 @@ async def setitem( for chunk_coords, chunk_selection, out_selection in indexer ], value_buffer, + drop_axes=indexer.drop_axes, + ) + + async def setitem( + self, + selection: Selection, + value: NDArrayLike, + factory: Factory.NDArrayLike = NDBuffer.from_ndarray_like, + ) -> None: + indexer = BasicIndexer( + selection, + shape=self.metadata.shape, + chunk_grid=self.metadata.chunk_grid, ) + return await self._set_selection(indexer, value, factory=factory) async def resize( self, new_shape: ChunkCoords, delete_outside_chunks: bool = True @@ -583,14 +643,135 @@ def order(self) -> Literal["C", "F"]: return self._async_array.order def __getitem__(self, selection: Selection) -> NDArrayLike: - return sync( - self._async_array.getitem(selection), - ) + fields, pure_selection = pop_fields(selection) + if is_pure_fancy_indexing(pure_selection, self.ndim): + return self.vindex[cast(CoordinateSelection | MaskSelection, selection)] + elif is_pure_orthogonal_indexing(pure_selection, self.ndim): + return self.get_orthogonal_selection(pure_selection, fields=fields) + else: + return self.get_basic_selection(cast(BasicSelection, pure_selection), fields=fields) def __setitem__(self, selection: Selection, value: NDArrayLike) -> None: - sync( - self._async_array.setitem(selection, value), - ) + fields, pure_selection = pop_fields(selection) + if is_pure_fancy_indexing(pure_selection, self.ndim): + self.vindex[cast(CoordinateSelection | MaskSelection, selection)] = value + elif is_pure_orthogonal_indexing(pure_selection, self.ndim): + self.set_orthogonal_selection(pure_selection, value, fields=fields) + else: + self.set_basic_selection(cast(BasicSelection, pure_selection), value, fields=fields) + + def get_basic_selection( + self, + selection: BasicSelection = Ellipsis, + out: NDBuffer | None = None, + fields: Fields | None = None, + ) -> NDArrayLike: + if self.shape == (): + raise NotImplementedError + else: + return sync( + self._async_array._get_selection( + BasicIndexer(selection, self.shape, self.metadata.chunk_grid), + out=out, + fields=fields, + ) + ) + + def set_basic_selection( + self, selection: BasicSelection, value: NDArrayLike, fields: Fields | None = None + ) -> None: + indexer = BasicIndexer(selection, self.shape, self.metadata.chunk_grid) + sync(self._async_array._set_selection(indexer, value, fields=fields)) + + def get_orthogonal_selection( + self, + selection: OrthogonalSelection, + out: NDBuffer | None = None, + fields: Fields | None = None, + ) -> NDArrayLike: + indexer = OrthogonalIndexer(selection, self.shape, self.metadata.chunk_grid) + return sync(self._async_array._get_selection(indexer=indexer, out=out, fields=fields)) + + def set_orthogonal_selection( + self, selection: OrthogonalSelection, value: NDArrayLike, fields: Fields | None = None + ) -> None: + indexer = OrthogonalIndexer(selection, self.shape, self.metadata.chunk_grid) + return sync(self._async_array._set_selection(indexer, value, fields=fields)) + + def get_mask_selection( + self, mask: MaskSelection, out: NDBuffer | None = None, fields: Fields | None = None + ) -> NDArrayLike: + indexer = MaskIndexer(mask, self.shape, self.metadata.chunk_grid) + return sync(self._async_array._get_selection(indexer=indexer, out=out, fields=fields)) + + def set_mask_selection( + self, mask: MaskSelection, value: NDArrayLike, fields: Fields | None = None + ) -> None: + indexer = MaskIndexer(mask, self.shape, self.metadata.chunk_grid) + sync(self._async_array._set_selection(indexer, value, fields=fields)) + + def get_coordinate_selection( + self, + selection: CoordinateSelection, + out: NDBuffer | None = None, + fields: Fields | None = None, + ) -> NDArrayLike: + indexer = CoordinateIndexer(selection, self.shape, self.metadata.chunk_grid) + out_array = sync(self._async_array._get_selection(indexer=indexer, out=out, fields=fields)) + + # restore shape + out_array = out_array.reshape(indexer.sel_shape) + return out_array + + def set_coordinate_selection( + self, selection: CoordinateSelection, value: NDArrayLike, fields: Fields | None = None + ) -> None: + # setup indexer + indexer = CoordinateIndexer(selection, self.shape, self.metadata.chunk_grid) + + # handle value - need ndarray-like flatten value + if not is_scalar(value, self.dtype): + try: + from numcodecs.compat import ensure_ndarray_like + + value = ensure_ndarray_like(value) # TODO replace with agnostic + except TypeError: + # Handle types like `list` or `tuple` + value = np.array(value) # TODO replace with agnostic + if hasattr(value, "shape") and len(value.shape) > 1: + value = value.reshape(-1) + + sync(self._async_array._set_selection(indexer, value, fields=fields)) + + def get_block_selection( + self, + selection: BlockSelection, + out: NDBuffer | None = None, + fields: Fields | None = None, + ) -> NDArrayLike: + indexer = BlockIndexer(selection, self.shape, self.metadata.chunk_grid) + return sync(self._async_array._get_selection(indexer=indexer, out=out, fields=fields)) + + def set_block_selection( + self, + selection: BlockSelection, + value: NDArrayLike, + fields: Fields | None = None, + ) -> None: + indexer = BlockIndexer(selection, self.shape, self.metadata.chunk_grid) + sync(self._async_array._set_selection(indexer, value, fields=fields)) + + @property + def vindex(self) -> VIndex: + return VIndex(self) + + @property + def oindex(self) -> OIndex: + return OIndex(self) + + @property + def blocks(self) -> BlockIndex: + return BlockIndex(self) def resize(self, new_shape: ChunkCoords) -> Array: return type(self)( diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index 59994e70d6..138c7f66d2 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -63,7 +63,9 @@ def __getitem__(self, key: slice) -> Self: ... def __setitem__(self, key: slice, value: Any) -> None: ... - def reshape(self, shape: ChunkCoords, *, order: Literal["A", "C", "F"] = ...) -> Self: ... + def reshape( + self, shape: ChunkCoords | Literal[-1], *, order: Literal["A", "C", "F"] = ... + ) -> Self: ... def view(self, dtype: npt.DTypeLike) -> Self: ... @@ -304,7 +306,7 @@ class NDBuffer: """ def __init__(self, array: NDArrayLike): - assert array.ndim > 0 + # assert array.ndim > 0 assert array.dtype != object self._data = array @@ -418,7 +420,11 @@ def byteorder(self) -> Endian: else: return Endian(sys.byteorder) - def reshape(self, newshape: ChunkCoords) -> Self: + def reshape(self, newshape: ChunkCoords | Literal[-1]) -> Self: + return self.__class__(self._data.reshape(newshape)) + + def squeeze(self, axis: tuple[int, ...]) -> Self: + newshape = tuple(a for i, a in enumerate(self.shape) if i not in axis) return self.__class__(self._data.reshape(newshape)) def astype(self, dtype: npt.DTypeLike, order: Literal["K", "A", "C", "F"] = "K") -> Self: @@ -435,6 +441,9 @@ def __setitem__(self, key: Any, value: Any) -> None: def __len__(self) -> int: return self._data.__len__() + def __repr__(self) -> str: + return f"" + def all_equal(self, other: Any) -> bool: return bool((self._data == other).all()) diff --git a/src/zarr/chunk_grids.py b/src/zarr/chunk_grids.py index f6366b8038..941f799849 100644 --- a/src/zarr/chunk_grids.py +++ b/src/zarr/chunk_grids.py @@ -1,8 +1,11 @@ from __future__ import annotations import itertools +import operator +from abc import abstractmethod from collections.abc import Iterator from dataclasses import dataclass +from functools import reduce from typing import TYPE_CHECKING from zarr.abc.metadata import Metadata @@ -13,7 +16,7 @@ parse_named_configuration, parse_shapelike, ) -from zarr.indexing import _ceildiv +from zarr.indexing import ceildiv if TYPE_CHECKING: from typing_extensions import Self @@ -31,8 +34,13 @@ def from_dict(cls, data: dict[str, JSON] | ChunkGrid) -> ChunkGrid: return RegularChunkGrid._from_dict(data) raise ValueError(f"Unknown chunk grid. Got {name_parsed}.") + @abstractmethod def all_chunk_coords(self, array_shape: ChunkCoords) -> Iterator[ChunkCoords]: - raise NotImplementedError + pass + + @abstractmethod + def get_nchunks(self, array_shape: ChunkCoords) -> int: + pass @dataclass(frozen=True) @@ -55,5 +63,12 @@ def to_dict(self) -> dict[str, JSON]: def all_chunk_coords(self, array_shape: ChunkCoords) -> Iterator[ChunkCoords]: return itertools.product( - *(range(0, _ceildiv(s, c)) for s, c in zip(array_shape, self.chunk_shape, strict=False)) + *(range(0, ceildiv(s, c)) for s, c in zip(array_shape, self.chunk_shape, strict=False)) + ) + + def get_nchunks(self, array_shape: ChunkCoords) -> int: + return reduce( + operator.mul, + (ceildiv(s, c) for s, c in zip(array_shape, self.chunk_shape, strict=True)), + 1, ) diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index 6f493c9e81..ada4ae23f9 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -20,13 +20,13 @@ from zarr.codecs.registry import get_codec_class from zarr.common import JSON, concurrent_map, parse_named_configuration from zarr.config import config -from zarr.indexing import is_total_slice +from zarr.indexing import SelectorTuple, is_scalar, is_total_slice from zarr.metadata import ArrayMetadata if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import ArraySpec, SliceSelection + from zarr.common import ArraySpec T = TypeVar("T") U = TypeVar("U") @@ -247,7 +247,7 @@ async def decode_batch( async def decode_partial_batch( self, - batch_info: Iterable[tuple[ByteGetter, SliceSelection, ArraySpec]], + batch_info: Iterable[tuple[ByteGetter, SelectorTuple, ArraySpec]], ) -> Iterable[NDBuffer | None]: assert self.supports_partial_decode assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin) @@ -282,7 +282,7 @@ async def encode_batch( async def encode_partial_batch( self, - batch_info: Iterable[tuple[ByteSetter, NDBuffer, SliceSelection, ArraySpec]], + batch_info: Iterable[tuple[ByteSetter, NDBuffer, SelectorTuple, ArraySpec]], ) -> None: assert self.supports_partial_encode assert isinstance(self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin) @@ -290,8 +290,9 @@ async def encode_partial_batch( async def read_batch( self, - batch_info: Iterable[tuple[ByteGetter, ArraySpec, SliceSelection, SliceSelection]], + batch_info: Iterable[tuple[ByteGetter, ArraySpec, SelectorTuple, SelectorTuple]], out: NDBuffer, + drop_axes: tuple[int, ...] = (), ) -> None: if self.supports_partial_decode: chunk_array_batch = await self.decode_partial_batch( @@ -326,14 +327,55 @@ async def read_batch( ): if chunk_array is not None: tmp = chunk_array[chunk_selection] + if drop_axes != (): + tmp = tmp.squeeze(axis=drop_axes) out[out_selection] = tmp else: out[out_selection] = chunk_spec.fill_value + def _merge_chunk_array( + self, + existing_chunk_array: NDBuffer | None, + value: NDBuffer, + out_selection: SelectorTuple, + chunk_spec: ArraySpec, + chunk_selection: SelectorTuple, + drop_axes: tuple[int, ...], + ) -> NDBuffer: + if is_total_slice(chunk_selection, chunk_spec.shape) and value.shape == chunk_spec.shape: + return value + if existing_chunk_array is None: + chunk_array = NDBuffer.create( + shape=chunk_spec.shape, + dtype=chunk_spec.dtype, + order=chunk_spec.order, + fill_value=chunk_spec.fill_value, + ) + else: + chunk_array = existing_chunk_array.copy() # make a writable copy + if chunk_selection == (): + chunk_value = value + elif is_scalar(value.as_ndarray_like(), chunk_spec.dtype): + chunk_value = value + else: + chunk_value = value[out_selection] + # handle missing singleton dimensions + if drop_axes != (): + item = tuple( + None # equivalent to np.newaxis + if idx in drop_axes + else slice(None) + for idx in range(chunk_spec.ndim) + ) + chunk_value = chunk_value[item] + chunk_array[chunk_selection] = chunk_value + return chunk_array + async def write_batch( self, - batch_info: Iterable[tuple[ByteSetter, ArraySpec, SliceSelection, SliceSelection]], + batch_info: Iterable[tuple[ByteSetter, ArraySpec, SelectorTuple, SelectorTuple]], value: NDBuffer, + drop_axes: tuple[int, ...] = (), ) -> None: if self.supports_partial_encode: await self.encode_partial_batch( @@ -368,28 +410,10 @@ async def _read_key(byte_setter: ByteSetter | None) -> Buffer | None: ], ) - def _merge_chunk_array( - existing_chunk_array: NDBuffer | None, - new_chunk_array_slice: NDBuffer, - chunk_spec: ArraySpec, - chunk_selection: SliceSelection, - ) -> NDBuffer: - if is_total_slice(chunk_selection, chunk_spec.shape): - return new_chunk_array_slice - if existing_chunk_array is None: - chunk_array = NDBuffer.create( - shape=chunk_spec.shape, - dtype=chunk_spec.dtype, - order=chunk_spec.order, - fill_value=chunk_spec.fill_value, - ) - else: - chunk_array = existing_chunk_array.copy() # make a writable copy - chunk_array[chunk_selection] = new_chunk_array_slice - return chunk_array - chunk_array_batch = [ - _merge_chunk_array(chunk_array, value[out_selection], chunk_spec, chunk_selection) + self._merge_chunk_array( + chunk_array, value, out_selection, chunk_spec, chunk_selection, drop_axes + ) for chunk_array, (_, chunk_spec, chunk_selection, out_selection) in zip( chunk_array_batch, batch_info, strict=False ) @@ -450,12 +474,13 @@ async def encode( async def read( self, - batch_info: Iterable[tuple[ByteGetter, ArraySpec, SliceSelection, SliceSelection]], + batch_info: Iterable[tuple[ByteGetter, ArraySpec, SelectorTuple, SelectorTuple]], out: NDBuffer, + drop_axes: tuple[int, ...] = (), ) -> None: await concurrent_map( [ - (single_batch_info, out) + (single_batch_info, out, drop_axes) for single_batch_info in batched(batch_info, self.batch_size) ], self.read_batch, @@ -464,12 +489,13 @@ async def read( async def write( self, - batch_info: Iterable[tuple[ByteSetter, ArraySpec, SliceSelection, SliceSelection]], + batch_info: Iterable[tuple[ByteSetter, ArraySpec, SelectorTuple, SelectorTuple]], value: NDBuffer, + drop_axes: tuple[int, ...] = (), ) -> None: await concurrent_map( [ - (single_batch_info, value) + (single_batch_info, value, drop_axes) for single_batch_info in batched(batch_info, self.batch_size) ], self.write_batch, diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index a7b6edc3b4..dab2810f35 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -33,11 +33,7 @@ parse_shapelike, product, ) -from zarr.indexing import ( - BasicIndexer, - c_order_iter, - morton_order_iter, -) +from zarr.indexing import BasicIndexer, SelectorTuple, c_order_iter, get_indexer, morton_order_iter from zarr.metadata import ArrayMetadata, parse_codecs if TYPE_CHECKING: @@ -45,7 +41,7 @@ from typing_extensions import Self - from zarr.common import JSON, SliceSelection + from zarr.common import JSON MAX_UINT_64 = 2**64 - 1 ShardMapping = Mapping[ChunkCoords, Buffer] @@ -423,7 +419,7 @@ async def _decode_single( async def _decode_partial_single( self, byte_getter: ByteGetter, - selection: SliceSelection, + selection: SelectorTuple, shard_spec: ArraySpec, ) -> NDBuffer | None: shard_shape = shard_spec.shape @@ -431,7 +427,7 @@ async def _decode_partial_single( chunks_per_shard = self._get_chunks_per_shard(shard_spec) chunk_spec = self._get_chunk_spec(shard_spec) - indexer = BasicIndexer( + indexer = get_indexer( selection, shape=shard_shape, chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), @@ -520,7 +516,7 @@ async def _encode_partial_single( self, byte_setter: ByteSetter, shard_array: NDBuffer, - selection: SliceSelection, + selection: SelectorTuple, shard_spec: ArraySpec, ) -> None: shard_shape = shard_spec.shape @@ -535,10 +531,8 @@ async def _encode_partial_single( ) indexer = list( - BasicIndexer( - selection, - shape=shard_shape, - chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape), + get_indexer( + selection, shape=shard_shape, chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape) ) ) diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 6bc83d5062..98130fe0cd 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -2,84 +2,294 @@ import itertools import math -from collections.abc import Iterator -from typing import TYPE_CHECKING, NamedTuple +import numbers +import operator +from collections.abc import Iterator, Sequence +from dataclasses import dataclass +from enum import Enum +from functools import reduce +from types import EllipsisType +from typing import ( + TYPE_CHECKING, + Any, + NamedTuple, + Protocol, + TypeGuard, + TypeVar, + cast, + runtime_checkable, +) -from zarr.common import ChunkCoords, Selection, SliceSelection, product +import numpy as np +import numpy.typing as npt + +from zarr.common import ChunkCoords, product if TYPE_CHECKING: + from zarr.array import Array + from zarr.buffer import NDArrayLike from zarr.chunk_grids import ChunkGrid +BasicSelector = int | slice | EllipsisType +BasicSelectorTuple = tuple[BasicSelector, ...] +BasicSelection = BasicSelector | BasicSelectorTuple +BasicSelectionNormalized = tuple[int | slice, ...] +CoordinateSelector = list[int] | npt.NDArray[np.intp] +CoordinateSelection = CoordinateSelector | tuple[CoordinateSelector, ...] +CoordinateSelectionNormalized = tuple[npt.NDArray[np.intp], ...] +BlockSelector = int | slice +BlockSelection = BlockSelector | tuple[BlockSelector, ...] +BlockSelectionNormalized = tuple[BlockSelector, ...] +MaskSelection = npt.NDArray[np.bool_] +OrthogonalSelector = int | slice | npt.NDArray[np.intp] | npt.NDArray[np.bool_] +OrthogonalSelection = OrthogonalSelector | tuple[OrthogonalSelector, ...] +OrthogonalSelectionNormalized = tuple[OrthogonalSelector, ...] -def _ensure_tuple(v: Selection) -> SliceSelection: - if not isinstance(v, tuple): - v = (v,) - return v +Selection = ( + BasicSelection | CoordinateSelection | BlockSelection | MaskSelection | OrthogonalSelection +) +SelectionNormalized = ( + BasicSelectionNormalized + | CoordinateSelectionNormalized + | BlockSelectionNormalized + | MaskSelection + | OrthogonalSelectionNormalized +) +Selector = int | slice | npt.NDArray[np.intp] | npt.NDArray[np.bool_] +SelectionWithFields = Selection | str | Sequence[str] +SelectorTuple = tuple[Selector, ...] | npt.NDArray[np.intp] | slice +Fields = str | list[str] | tuple[str, ...] + + +class ArrayIndexError(IndexError): + pass + + +class BoundsCheckError(IndexError): + _msg = "" + + def __init__(self, dim_len: int): + self._msg = f"index out of bounds for dimension with length {dim_len}" + + +class NegativeStepError(IndexError): + _msg = "only slices with step >= 1 are supported" -def _err_too_many_indices(selection: SliceSelection, shape: ChunkCoords) -> None: +class VindexInvalidSelectionError(IndexError): + _msg = ( + "unsupported selection type for vectorized indexing; only " + "coordinate selection (tuple of integer arrays) and mask selection " + "(single Boolean array) are supported; got {0!r}" + ) + + +def err_too_many_indices(selection: Any, shape: ChunkCoords) -> None: raise IndexError(f"too many indices for array; expected {len(shape)}, got {len(selection)}") -def _err_negative_step() -> None: - raise IndexError("only slices with step >= 1 are supported") +@runtime_checkable +class Indexer(Protocol): + shape: ChunkCoords + drop_axes: ChunkCoords + def __iter__(self) -> Iterator[ChunkProjection]: ... -def _check_selection_length(selection: SliceSelection, shape: ChunkCoords) -> None: - if len(selection) > len(shape): - _err_too_many_indices(selection, shape) +def ceildiv(a: float, b: float) -> int: + return math.ceil(a / b) -def _ensure_selection( - selection: Selection, - shape: ChunkCoords, -) -> SliceSelection: - selection = _ensure_tuple(selection) - # fill out selection if not completely specified - if len(selection) < len(shape): - selection += (slice(None),) * (len(shape) - len(selection)) +def is_integer(x: Any) -> TypeGuard[int]: + """True if x is an integer (both pure Python or NumPy). + + Note that Python's bool is considered an integer too. + """ + return isinstance(x, numbers.Integral) + + +def is_integer_list(x: Any) -> TypeGuard[list[int]]: + """True if x is a list of integers. + + This function assumes ie *does not check* that all elements of the list + have the same type. Mixed type lists will result in other errors that will + bubble up anyway. + """ + return isinstance(x, list) and len(x) > 0 and is_integer(x[0]) + + +def is_integer_array(x: Any, ndim: int | None = None) -> TypeGuard[npt.NDArray[np.intp]]: + t = not np.isscalar(x) and hasattr(x, "shape") and hasattr(x, "dtype") and x.dtype.kind in "ui" + if ndim is not None: + t = t and hasattr(x, "shape") and len(x.shape) == ndim + return t + + +def is_bool_array(x: Any, ndim: int | None = None) -> TypeGuard[npt.NDArray[np.bool_]]: + t = hasattr(x, "shape") and hasattr(x, "dtype") and x.dtype == bool + if ndim is not None: + t = t and hasattr(x, "shape") and len(x.shape) == ndim + return t + + +def is_scalar(value: Any, dtype: np.dtype[Any]) -> bool: + if np.isscalar(value): + return True + if hasattr(value, "shape") and value.shape == (): + return True + if isinstance(value, tuple) and dtype.names and len(value) == len(dtype.names): + return True + return False + + +def is_pure_fancy_indexing(selection: Any, ndim: int) -> bool: + """Check whether a selection contains only scalars or integer array-likes. + + Parameters + ---------- + selection : tuple, slice, or scalar + A valid selection value for indexing into arrays. + + Returns + ------- + is_pure : bool + True if the selection is a pure fancy indexing expression (ie not mixed + with boolean or slices). + """ + if ndim == 1: + if is_integer_list(selection) or is_integer_array(selection): + return True + # if not, we go through the normal path below, because a 1-tuple + # of integers is also allowed. + no_slicing = ( + isinstance(selection, tuple) + and len(selection) == ndim + and not (any(isinstance(elem, slice) or elem is Ellipsis for elem in selection)) + ) + return ( + no_slicing + and all( + is_integer(elem) or is_integer_list(elem) or is_integer_array(elem) + for elem in selection + ) + and any(is_integer_list(elem) or is_integer_array(elem) for elem in selection) + ) - # check selection not too long - _check_selection_length(selection, shape) - return selection +def is_pure_orthogonal_indexing(selection: Selection, ndim: int) -> TypeGuard[OrthogonalSelection]: + if not ndim: + return False + # Case 1: Selection is a single iterable of integers + if is_integer_list(selection) or is_integer_array(selection, ndim=1): + return True + + # Case two: selection contains either zero or one integer iterables. + # All other selection elements are slices or integers + return ( + isinstance(selection, tuple) + and len(selection) == ndim + and sum(is_integer_list(elem) or is_integer_array(elem) for elem in selection) <= 1 + and all( + is_integer_list(elem) or is_integer_array(elem) or isinstance(elem, int | slice) + for elem in selection + ) + ) + + +def get_chunk_shape(chunk_grid: ChunkGrid) -> ChunkCoords: + from zarr.chunk_grids import RegularChunkGrid + + assert isinstance( + chunk_grid, RegularChunkGrid + ), "Only regular chunk grid is supported, currently." + return chunk_grid.chunk_shape + + +def normalize_integer_selection(dim_sel: int, dim_len: int) -> int: + # normalize type to int + dim_sel = int(dim_sel) + + # handle wraparound + if dim_sel < 0: + dim_sel = dim_len + dim_sel + + # handle out of bounds + if dim_sel >= dim_len or dim_sel < 0: + raise BoundsCheckError(dim_len) + + return dim_sel + + +class ChunkDimProjection(NamedTuple): + """A mapping from chunk to output array for a single dimension. + + Parameters + ---------- + dim_chunk_ix + Index of chunk. + dim_chunk_sel + Selection of items from chunk array. + dim_out_sel + Selection of items in target (output) array. + + """ -class _ChunkDimProjection(NamedTuple): dim_chunk_ix: int - dim_chunk_sel: slice - dim_out_sel: slice | None + dim_chunk_sel: Selector + dim_out_sel: Selector | None -def _ceildiv(a: float, b: float) -> int: - return math.ceil(a / b) +@dataclass(frozen=True) +class IntDimIndexer: + dim_sel: int + dim_len: int + dim_chunk_len: int + nitems: int = 1 + + def __init__(self, dim_sel: int, dim_len: int, dim_chunk_len: int): + object.__setattr__(self, "dim_sel", normalize_integer_selection(dim_sel, dim_len)) + object.__setattr__(self, "dim_len", dim_len) + object.__setattr__(self, "dim_chunk_len", dim_chunk_len) + def __iter__(self) -> Iterator[ChunkDimProjection]: + dim_chunk_ix = self.dim_sel // self.dim_chunk_len + dim_offset = dim_chunk_ix * self.dim_chunk_len + dim_chunk_sel = self.dim_sel - dim_offset + dim_out_sel = None + yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) -class _SliceDimIndexer: - dim_sel: slice + +@dataclass(frozen=True) +class SliceDimIndexer: dim_len: int dim_chunk_len: int nitems: int + nchunks: int start: int stop: int step: int def __init__(self, dim_sel: slice, dim_len: int, dim_chunk_len: int): - self.start, self.stop, self.step = dim_sel.indices(dim_len) - if self.step < 1: - _err_negative_step() + # normalize + start, stop, step = dim_sel.indices(dim_len) + if step < 1: + raise NegativeStepError + + object.__setattr__(self, "start", start) + object.__setattr__(self, "stop", stop) + object.__setattr__(self, "step", step) - self.dim_len = dim_len - self.dim_chunk_len = dim_chunk_len - self.nitems = max(0, _ceildiv((self.stop - self.start), self.step)) - self.nchunks = _ceildiv(self.dim_len, self.dim_chunk_len) + object.__setattr__(self, "dim_len", dim_len) + object.__setattr__(self, "dim_chunk_len", dim_chunk_len) + object.__setattr__(self, "nitems", max(0, ceildiv((stop - start), step))) + object.__setattr__(self, "nchunks", ceildiv(dim_len, dim_chunk_len)) - def __iter__(self) -> Iterator[_ChunkDimProjection]: + def __iter__(self) -> Iterator[ChunkDimProjection]: # figure out the range of chunks we need to visit dim_chunk_ix_from = self.start // self.dim_chunk_len - dim_chunk_ix_to = _ceildiv(self.stop, self.dim_chunk_len) + dim_chunk_ix_to = ceildiv(self.stop, self.dim_chunk_len) # iterate over chunks in range for dim_chunk_ix in range(dim_chunk_ix_from, dim_chunk_ix_to): @@ -97,7 +307,7 @@ def __iter__(self) -> Iterator[_ChunkDimProjection]: if remainder: dim_chunk_sel_start += self.step - remainder # compute number of previous items, provides offset into output array - dim_out_offset = _ceildiv((dim_offset - self.start), self.step) + dim_out_offset = ceildiv((dim_offset - self.start), self.step) else: # selection starts within current chunk @@ -113,43 +323,609 @@ def __iter__(self) -> Iterator[_ChunkDimProjection]: dim_chunk_sel_stop = self.stop - dim_offset dim_chunk_sel = slice(dim_chunk_sel_start, dim_chunk_sel_stop, self.step) - dim_chunk_nitems = _ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) + dim_chunk_nitems = ceildiv((dim_chunk_sel_stop - dim_chunk_sel_start), self.step) + + # If there are no elements on the selection within this chunk, then skip + if dim_chunk_nitems == 0: + continue + dim_out_sel = slice(dim_out_offset, dim_out_offset + dim_chunk_nitems) - yield _ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + + +def check_selection_length(selection: SelectionNormalized, shape: ChunkCoords) -> None: + if len(selection) > len(shape): + err_too_many_indices(selection, shape) + + +def replace_ellipsis(selection: Any, shape: ChunkCoords) -> SelectionNormalized: + selection = ensure_tuple(selection) + + # count number of ellipsis present + n_ellipsis = sum(1 for i in selection if i is Ellipsis) + + if n_ellipsis > 1: + # more than 1 is an error + raise IndexError("an index can only have a single ellipsis ('...')") + + elif n_ellipsis == 1: + # locate the ellipsis, count how many items to left and right + n_items_l = selection.index(Ellipsis) # items to left of ellipsis + n_items_r = len(selection) - (n_items_l + 1) # items to right of ellipsis + n_items = len(selection) - 1 # all non-ellipsis items + + if n_items >= len(shape): + # ellipsis does nothing, just remove it + selection = tuple(i for i in selection if i != Ellipsis) + + else: + # replace ellipsis with as many slices are needed for number of dims + new_item = selection[:n_items_l] + ((slice(None),) * (len(shape) - n_items)) + if n_items_r: + new_item += selection[-n_items_r:] + selection = new_item + + # fill out selection if not completely specified + if len(selection) < len(shape): + selection += (slice(None),) * (len(shape) - len(selection)) + + # check selection not too long + check_selection_length(selection, shape) + + return cast(SelectionNormalized, selection) + + +def replace_lists(selection: SelectionNormalized) -> SelectionNormalized: + return tuple( + np.asarray(dim_sel) if isinstance(dim_sel, list) else dim_sel for dim_sel in selection + ) -class _ChunkProjection(NamedTuple): +T = TypeVar("T") + + +def ensure_tuple(v: Any) -> SelectionNormalized: + if not isinstance(v, tuple): + v = (v,) + return cast(SelectionNormalized, v) + + +class ChunkProjection(NamedTuple): + """A mapping of items from chunk to output array. Can be used to extract items from the + chunk array for loading into an output array. Can also be used to extract items from a + value array for setting/updating in a chunk array. + + Parameters + ---------- + chunk_coords + Indices of chunk. + chunk_selection + Selection of items from chunk array. + out_selection + Selection of items in target (output) array. + + """ + chunk_coords: ChunkCoords - chunk_selection: SliceSelection - out_selection: SliceSelection + chunk_selection: tuple[Selector, ...] | npt.NDArray[np.intp] + out_selection: tuple[Selector, ...] | npt.NDArray[np.intp] | slice + + +def is_slice(s: Any) -> TypeGuard[slice]: + return isinstance(s, slice) + + +def is_contiguous_slice(s: Any) -> TypeGuard[slice]: + return is_slice(s) and (s.step is None or s.step == 1) + +def is_positive_slice(s: Any) -> TypeGuard[slice]: + return is_slice(s) and (s.step is None or s.step >= 1) -class BasicIndexer: - dim_indexers: list[_SliceDimIndexer] + +def is_contiguous_selection(selection: Any) -> TypeGuard[slice]: + selection = ensure_tuple(selection) + return all((is_integer_array(s) or is_contiguous_slice(s) or s == Ellipsis) for s in selection) + + +def is_basic_selection(selection: Any) -> TypeGuard[BasicSelection]: + selection = ensure_tuple(selection) + return all(is_integer(s) or is_positive_slice(s) for s in selection) + + +@dataclass(frozen=True) +class BasicIndexer(Indexer): + dim_indexers: list[IntDimIndexer | SliceDimIndexer] shape: ChunkCoords + drop_axes: ChunkCoords def __init__( self, - selection: Selection, - shape: tuple[int, ...], + selection: BasicSelection, + shape: ChunkCoords, chunk_grid: ChunkGrid, ): - from zarr.chunk_grids import RegularChunkGrid + chunk_shape = get_chunk_shape(chunk_grid) + # handle ellipsis + selection_normalized = replace_ellipsis(selection, shape) - assert isinstance( - chunk_grid, RegularChunkGrid - ), "Only regular chunk grid is supported, currently." # setup per-dimension indexers - self.dim_indexers = [ - _SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) - for dim_sel, dim_len, dim_chunk_len in zip( - _ensure_selection(selection, shape), shape, chunk_grid.chunk_shape, strict=False + dim_indexers: list[IntDimIndexer | SliceDimIndexer] = [] + for dim_sel, dim_len, dim_chunk_len in zip( + selection_normalized, shape, chunk_shape, strict=True + ): + dim_indexer: IntDimIndexer | SliceDimIndexer + if is_integer(dim_sel): + dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) + + elif is_slice(dim_sel): + dim_indexer = SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) + + else: + raise IndexError( + "unsupported selection item for basic indexing; " + f"expected integer or slice, got {type(dim_sel)!r}" + ) + + dim_indexers.append(dim_indexer) + + object.__setattr__(self, "dim_indexers", dim_indexers) + object.__setattr__( + self, + "shape", + tuple(s.nitems for s in self.dim_indexers if not isinstance(s, IntDimIndexer)), + ) + object.__setattr__(self, "drop_axes", ()) + + def __iter__(self) -> Iterator[ChunkProjection]: + for dim_projections in itertools.product(*self.dim_indexers): + chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) + chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) + out_selection = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None ) - ] - self.shape = tuple(s.nitems for s in self.dim_indexers) - def __iter__(self) -> Iterator[_ChunkProjection]: + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +@dataclass(frozen=True) +class BoolArrayDimIndexer: + dim_sel: npt.NDArray[np.bool_] + dim_len: int + dim_chunk_len: int + nchunks: int + + chunk_nitems: npt.NDArray[Any] + chunk_nitems_cumsum: npt.NDArray[Any] + nitems: int + dim_chunk_ixs: npt.NDArray[np.intp] + + def __init__(self, dim_sel: npt.NDArray[np.bool_], dim_len: int, dim_chunk_len: int): + # check number of dimensions + if not is_bool_array(dim_sel, 1): + raise IndexError("Boolean arrays in an orthogonal selection must be 1-dimensional only") + + # check shape + if dim_sel.shape[0] != dim_len: + raise IndexError( + f"Boolean array has the wrong length for dimension; expected {dim_len}, got {dim_sel.shape[0]}" + ) + + # precompute number of selected items for each chunk + nchunks = ceildiv(dim_len, dim_chunk_len) + chunk_nitems = np.zeros(nchunks, dtype="i8") + for dim_chunk_ix in range(nchunks): + dim_offset = dim_chunk_ix * dim_chunk_len + chunk_nitems[dim_chunk_ix] = np.count_nonzero( + dim_sel[dim_offset : dim_offset + dim_chunk_len] + ) + chunk_nitems_cumsum = np.cumsum(chunk_nitems) + nitems = chunk_nitems_cumsum[-1] + dim_chunk_ixs = np.nonzero(chunk_nitems)[0] + + # store attributes + object.__setattr__(self, "dim_sel", dim_sel) + object.__setattr__(self, "dim_len", dim_len) + object.__setattr__(self, "dim_chunk_len", dim_chunk_len) + object.__setattr__(self, "nchunks", nchunks) + object.__setattr__(self, "chunk_nitems", chunk_nitems) + object.__setattr__(self, "chunk_nitems_cumsum", chunk_nitems_cumsum) + object.__setattr__(self, "nitems", nitems) + object.__setattr__(self, "dim_chunk_ixs", dim_chunk_ixs) + + def __iter__(self) -> Iterator[ChunkDimProjection]: + # iterate over chunks with at least one item + for dim_chunk_ix in self.dim_chunk_ixs: + # find region in chunk + dim_offset = dim_chunk_ix * self.dim_chunk_len + dim_chunk_sel = self.dim_sel[dim_offset : dim_offset + self.dim_chunk_len] + + # pad out if final chunk + if dim_chunk_sel.shape[0] < self.dim_chunk_len: + tmp = np.zeros(self.dim_chunk_len, dtype=bool) + tmp[: dim_chunk_sel.shape[0]] = dim_chunk_sel + dim_chunk_sel = tmp + + # find region in output + if dim_chunk_ix == 0: + start = 0 + else: + start = self.chunk_nitems_cumsum[dim_chunk_ix - 1] + stop = self.chunk_nitems_cumsum[dim_chunk_ix] + dim_out_sel = slice(start, stop) + + yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + + +class Order(Enum): + UNKNOWN = 0 + INCREASING = 1 + DECREASING = 2 + UNORDERED = 3 + + @staticmethod + def check(a: npt.NDArray[Any]) -> Order: + diff = np.diff(a) + diff_positive = diff >= 0 + n_diff_positive = np.count_nonzero(diff_positive) + all_increasing = n_diff_positive == len(diff_positive) + any_increasing = n_diff_positive > 0 + if all_increasing: + order = Order.INCREASING + elif any_increasing: + order = Order.UNORDERED + else: + order = Order.DECREASING + return order + + +def wraparound_indices(x: npt.NDArray[Any], dim_len: int) -> None: + loc_neg = x < 0 + if np.any(loc_neg): + x[loc_neg] = x[loc_neg] + dim_len + + +def boundscheck_indices(x: npt.NDArray[Any], dim_len: int) -> None: + if np.any(x < 0) or np.any(x >= dim_len): + raise BoundsCheckError(dim_len) + + +@dataclass(frozen=True) +class IntArrayDimIndexer: + """Integer array selection against a single dimension.""" + + dim_len: int + dim_chunk_len: int + nchunks: int + nitems: int + order: Order + dim_sel: npt.NDArray[np.intp] + dim_out_sel: npt.NDArray[np.intp] + chunk_nitems: int + dim_chunk_ixs: npt.NDArray[np.intp] + chunk_nitems_cumsum: npt.NDArray[np.intp] + + def __init__( + self, + dim_sel: npt.NDArray[np.intp], + dim_len: int, + dim_chunk_len: int, + wraparound: bool = True, + boundscheck: bool = True, + order: Order = Order.UNKNOWN, + ): + # ensure 1d array + dim_sel = np.asanyarray(dim_sel) + if not is_integer_array(dim_sel, 1): + raise IndexError("integer arrays in an orthogonal selection must be 1-dimensional only") + + nitems = len(dim_sel) + nchunks = ceildiv(dim_len, dim_chunk_len) + + # handle wraparound + if wraparound: + wraparound_indices(dim_sel, dim_len) + + # handle out of bounds + if boundscheck: + boundscheck_indices(dim_sel, dim_len) + + # determine which chunk is needed for each selection item + # note: for dense integer selections, the division operation here is the + # bottleneck + dim_sel_chunk = dim_sel // dim_chunk_len + + # determine order of indices + if order == Order.UNKNOWN: + order = Order.check(dim_sel) + order = Order(order) + + if order == Order.INCREASING: + dim_sel = dim_sel + dim_out_sel = None + elif order == Order.DECREASING: + dim_sel = dim_sel[::-1] + # TODO should be possible to do this without creating an arange + dim_out_sel = np.arange(nitems - 1, -1, -1) + else: + # sort indices to group by chunk + dim_out_sel = np.argsort(dim_sel_chunk) + dim_sel = np.take(dim_sel, dim_out_sel) + + # precompute number of selected items for each chunk + chunk_nitems = np.bincount(dim_sel_chunk, minlength=nchunks) + + # find chunks that we need to visit + dim_chunk_ixs = np.nonzero(chunk_nitems)[0] + + # compute offsets into the output array + chunk_nitems_cumsum = np.cumsum(chunk_nitems) + + # store attributes + object.__setattr__(self, "dim_len", dim_len) + object.__setattr__(self, "dim_chunk_len", dim_chunk_len) + object.__setattr__(self, "nchunks", nchunks) + object.__setattr__(self, "nitems", nitems) + object.__setattr__(self, "order", order) + object.__setattr__(self, "dim_sel", dim_sel) + object.__setattr__(self, "dim_out_sel", dim_out_sel) + object.__setattr__(self, "chunk_nitems", chunk_nitems) + object.__setattr__(self, "dim_chunk_ixs", dim_chunk_ixs) + object.__setattr__(self, "chunk_nitems_cumsum", chunk_nitems_cumsum) + + def __iter__(self) -> Iterator[ChunkDimProjection]: + for dim_chunk_ix in self.dim_chunk_ixs: + dim_out_sel: slice | npt.NDArray[np.intp] + # find region in output + if dim_chunk_ix == 0: + start = 0 + else: + start = self.chunk_nitems_cumsum[dim_chunk_ix - 1] + stop = self.chunk_nitems_cumsum[dim_chunk_ix] + if self.order == Order.INCREASING: + dim_out_sel = slice(start, stop) + else: + dim_out_sel = self.dim_out_sel[start:stop] + + # find region in chunk + dim_offset = dim_chunk_ix * self.dim_chunk_len + dim_chunk_sel = self.dim_sel[start:stop] - dim_offset + + yield ChunkDimProjection(dim_chunk_ix, dim_chunk_sel, dim_out_sel) + + +def slice_to_range(s: slice, length: int) -> range: + return range(*s.indices(length)) + + +def ix_(selection: Any, shape: ChunkCoords) -> npt.NDArray[np.intp]: + """Convert an orthogonal selection to a numpy advanced (fancy) selection, like numpy.ix_ + but with support for slices and single ints.""" + + # normalisation + selection = replace_ellipsis(selection, shape) + + # replace slice and int as these are not supported by numpy.ix_ + selection = [ + slice_to_range(dim_sel, dim_len) + if isinstance(dim_sel, slice) + else [dim_sel] + if is_integer(dim_sel) + else dim_sel + for dim_sel, dim_len in zip(selection, shape, strict=True) + ] + + # now get numpy to convert to a coordinate selection + selection = np.ix_(*selection) + + return cast(npt.NDArray[np.intp], selection) + + +def oindex(a: npt.NDArray[Any], selection: Selection) -> npt.NDArray[Any]: + """Implementation of orthogonal indexing with slices and ints.""" + selection = replace_ellipsis(selection, a.shape) + drop_axes = tuple(i for i, s in enumerate(selection) if is_integer(s)) + selection = ix_(selection, a.shape) + result = a[selection] + if drop_axes: + result = result.squeeze(axis=drop_axes) + return result + + +def oindex_set(a: npt.NDArray[Any], selection: Selection, value: Any) -> None: + selection = replace_ellipsis(selection, a.shape) + drop_axes = tuple(i for i, s in enumerate(selection) if is_integer(s)) + selection = ix_(selection, a.shape) + if not np.isscalar(value) and drop_axes: + value = np.asanyarray(value) + value_selection: list[Selector | None] = [slice(None)] * len(a.shape) + for i in drop_axes: + value_selection[i] = np.newaxis + value = value[tuple(value_selection)] + a[selection] = value + + +@dataclass(frozen=True) +class OrthogonalIndexer(Indexer): + dim_indexers: list[IntDimIndexer | SliceDimIndexer | IntArrayDimIndexer | BoolArrayDimIndexer] + shape: ChunkCoords + chunk_shape: ChunkCoords + is_advanced: bool + drop_axes: tuple[int, ...] + + def __init__(self, selection: Selection, shape: ChunkCoords, chunk_grid: ChunkGrid): + chunk_shape = get_chunk_shape(chunk_grid) + + # handle ellipsis + selection = replace_ellipsis(selection, shape) + + # normalize list to array + selection = replace_lists(selection) + + # setup per-dimension indexers + dim_indexers: list[ + IntDimIndexer | SliceDimIndexer | IntArrayDimIndexer | BoolArrayDimIndexer + ] = [] + for dim_sel, dim_len, dim_chunk_len in zip(selection, shape, chunk_shape, strict=True): + dim_indexer: IntDimIndexer | SliceDimIndexer | IntArrayDimIndexer | BoolArrayDimIndexer + if is_integer(dim_sel): + dim_indexer = IntDimIndexer(dim_sel, dim_len, dim_chunk_len) + + elif isinstance(dim_sel, slice): + dim_indexer = SliceDimIndexer(dim_sel, dim_len, dim_chunk_len) + + elif is_integer_array(dim_sel): + dim_indexer = IntArrayDimIndexer(dim_sel, dim_len, dim_chunk_len) + + elif is_bool_array(dim_sel): + dim_indexer = BoolArrayDimIndexer(dim_sel, dim_len, dim_chunk_len) + + else: + raise IndexError( + "unsupported selection item for orthogonal indexing; " + "expected integer, slice, integer array or Boolean " + f"array, got {type(dim_sel)!r}" + ) + + dim_indexers.append(dim_indexer) + + dim_indexers = dim_indexers + shape = tuple(s.nitems for s in dim_indexers if not isinstance(s, IntDimIndexer)) + chunk_shape = chunk_shape + is_advanced = not is_basic_selection(selection) + if is_advanced: + drop_axes = tuple( + i + for i, dim_indexer in enumerate(dim_indexers) + if isinstance(dim_indexer, IntDimIndexer) + ) + else: + drop_axes = () + + object.__setattr__(self, "dim_indexers", dim_indexers) + object.__setattr__(self, "shape", shape) + object.__setattr__(self, "chunk_shape", chunk_shape) + object.__setattr__(self, "is_advanced", is_advanced) + object.__setattr__(self, "drop_axes", drop_axes) + + def __iter__(self) -> Iterator[ChunkProjection]: + for dim_projections in itertools.product(*self.dim_indexers): + chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) + chunk_selection: tuple[Selector, ...] | npt.NDArray[Any] = tuple( + p.dim_chunk_sel for p in dim_projections + ) + out_selection: tuple[Selector, ...] | npt.NDArray[Any] = tuple( + p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None + ) + + # handle advanced indexing arrays orthogonally + if self.is_advanced: + # N.B., numpy doesn't support orthogonal indexing directly as yet, + # so need to work around via np.ix_. Also np.ix_ does not support a + # mixture of arrays and slices or integers, so need to convert slices + # and integers into ranges. + chunk_selection = ix_(chunk_selection, self.chunk_shape) + + # special case for non-monotonic indices + if not is_basic_selection(out_selection): + out_selection = ix_(out_selection, self.shape) + + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +@dataclass(frozen=True) +class OIndex: + array: Array + + def __getitem__(self, selection: OrthogonalSelection) -> NDArrayLike: + fields, new_selection = pop_fields(selection) + new_selection = ensure_tuple(new_selection) + new_selection = replace_lists(new_selection) + return self.array.get_orthogonal_selection( + cast(OrthogonalSelection, new_selection), fields=fields + ) + + def __setitem__(self, selection: OrthogonalSelection, value: NDArrayLike) -> None: + fields, new_selection = pop_fields(selection) + new_selection = ensure_tuple(new_selection) + new_selection = replace_lists(new_selection) + return self.array.set_orthogonal_selection( + cast(OrthogonalSelection, new_selection), value, fields=fields + ) + + +@dataclass(frozen=True) +class BlockIndexer(Indexer): + dim_indexers: list[SliceDimIndexer] + shape: ChunkCoords + drop_axes: ChunkCoords + + def __init__(self, selection: BlockSelection, shape: ChunkCoords, chunk_grid: ChunkGrid): + chunk_shape = get_chunk_shape(chunk_grid) + + # handle ellipsis + selection_normalized = replace_ellipsis(selection, shape) + + # normalize list to array + selection_normalized = replace_lists(selection_normalized) + + # setup per-dimension indexers + dim_indexers = [] + for dim_sel, dim_len, dim_chunk_size in zip( + selection_normalized, shape, chunk_shape, strict=True + ): + dim_numchunks = int(np.ceil(dim_len / dim_chunk_size)) + + if is_integer(dim_sel): + if dim_sel < 0: + dim_sel = dim_numchunks + dim_sel + + start = dim_sel * dim_chunk_size + stop = start + dim_chunk_size + slice_ = slice(start, stop) + + elif is_slice(dim_sel): + start = dim_sel.start if dim_sel.start is not None else 0 + stop = dim_sel.stop if dim_sel.stop is not None else dim_numchunks + + if dim_sel.step not in {1, None}: + raise IndexError( + "unsupported selection item for block indexing; " + f"expected integer or slice with step=1, got {type(dim_sel)!r}" + ) + + # Can't reuse wraparound_indices because it expects a numpy array + # We have integers here. + if start < 0: + start = dim_numchunks + start + if stop < 0: + stop = dim_numchunks + stop + + start = start * dim_chunk_size + stop = stop * dim_chunk_size + slice_ = slice(start, stop) + + else: + raise IndexError( + "unsupported selection item for block indexing; " + f"expected integer or slice, got {type(dim_sel)!r}" + ) + + dim_indexer = SliceDimIndexer(slice_, dim_len, dim_chunk_size) + dim_indexers.append(dim_indexer) + + if start >= dim_len or start < 0: + raise BoundsCheckError(dim_len) + + dim_indexers = dim_indexers + shape = tuple(s.nitems for s in dim_indexers) + + object.__setattr__(self, "dim_indexers", dim_indexers) + object.__setattr__(self, "shape", shape) + object.__setattr__(self, "drop_axes", ()) + + def __iter__(self) -> Iterator[ChunkProjection]: for dim_projections in itertools.product(*self.dim_indexers): chunk_coords = tuple(p.dim_chunk_ix for p in dim_projections) chunk_selection = tuple(p.dim_chunk_sel for p in dim_projections) @@ -157,7 +933,292 @@ def __iter__(self) -> Iterator[_ChunkProjection]: p.dim_out_sel for p in dim_projections if p.dim_out_sel is not None ) - yield _ChunkProjection(chunk_coords, chunk_selection, out_selection) + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +@dataclass(frozen=True) +class BlockIndex: + array: Array + + def __getitem__(self, selection: BlockSelection) -> NDArrayLike: + fields, new_selection = pop_fields(selection) + new_selection = ensure_tuple(new_selection) + new_selection = replace_lists(new_selection) + return self.array.get_block_selection(cast(BlockSelection, new_selection), fields=fields) + + def __setitem__(self, selection: BlockSelection, value: NDArrayLike) -> None: + fields, new_selection = pop_fields(selection) + new_selection = ensure_tuple(new_selection) + new_selection = replace_lists(new_selection) + return self.array.set_block_selection( + cast(BlockSelection, new_selection), value, fields=fields + ) + + +def is_coordinate_selection( + selection: SelectionNormalized, shape: ChunkCoords +) -> TypeGuard[CoordinateSelectionNormalized]: + return ( + isinstance(selection, tuple) + and len(selection) == len(shape) + and all(is_integer(dim_sel) or is_integer_array(dim_sel) for dim_sel in selection) + ) + + +def is_mask_selection(selection: Selection, shape: ChunkCoords) -> TypeGuard[MaskSelection]: + return ( + isinstance(selection, tuple) + and len(selection) == 1 + and is_bool_array(selection[0]) + and selection[0].shape == shape + ) + + +@dataclass(frozen=True) +class CoordinateIndexer(Indexer): + sel_shape: ChunkCoords + selection: CoordinateSelectionNormalized + sel_sort: npt.NDArray[np.intp] | None + chunk_nitems_cumsum: npt.NDArray[np.intp] + chunk_rixs: npt.NDArray[np.intp] + chunk_mixs: tuple[npt.NDArray[np.intp], ...] + shape: ChunkCoords + chunk_shape: ChunkCoords + drop_axes: ChunkCoords + + def __init__(self, selection: CoordinateSelection, shape: ChunkCoords, chunk_grid: ChunkGrid): + chunk_shape = get_chunk_shape(chunk_grid) + + cdata_shape: ChunkCoords + if shape == (): + cdata_shape = (1,) + else: + cdata_shape = tuple(math.ceil(s / c) for s, c in zip(shape, chunk_shape, strict=True)) + nchunks = reduce(operator.mul, cdata_shape, 1) + + # some initial normalization + selection_normalized = cast(CoordinateSelectionNormalized, ensure_tuple(selection)) + selection_normalized = tuple( + np.asarray([i]) if is_integer(i) else i for i in selection_normalized + ) + selection_normalized = cast( + CoordinateSelectionNormalized, replace_lists(selection_normalized) + ) + + # validation + if not is_coordinate_selection(selection_normalized, shape): + raise IndexError( + "invalid coordinate selection; expected one integer " + "(coordinate) array per dimension of the target array, " + f"got {selection!r}" + ) + + # handle wraparound, boundscheck + for dim_sel, dim_len in zip(selection_normalized, shape, strict=True): + # handle wraparound + wraparound_indices(dim_sel, dim_len) + + # handle out of bounds + boundscheck_indices(dim_sel, dim_len) + + # compute chunk index for each point in the selection + chunks_multi_index = tuple( + dim_sel // dim_chunk_len + for (dim_sel, dim_chunk_len) in zip(selection_normalized, chunk_shape, strict=True) + ) + + # broadcast selection - this will raise error if array dimensions don't match + selection_broadcast = tuple(np.broadcast_arrays(*selection_normalized)) + chunks_multi_index_broadcast = np.broadcast_arrays(*chunks_multi_index) + + # remember shape of selection, because we will flatten indices for processing + sel_shape = selection_broadcast[0].shape if selection_broadcast[0].shape else (1,) + + # flatten selection + selection_broadcast = tuple(dim_sel.reshape(-1) for dim_sel in selection_broadcast) + chunks_multi_index_broadcast = [ + dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast + ] + + # ravel chunk indices + chunks_raveled_indices = np.ravel_multi_index( + chunks_multi_index_broadcast, dims=cdata_shape + ) + + # group points by chunk + if np.any(np.diff(chunks_raveled_indices) < 0): + # optimisation, only sort if needed + sel_sort = np.argsort(chunks_raveled_indices) + selection_broadcast = tuple(dim_sel[sel_sort] for dim_sel in selection_broadcast) + else: + sel_sort = None + + shape = selection_broadcast[0].shape if selection_broadcast[0].shape else (1,) + + # precompute number of selected items for each chunk + chunk_nitems = np.bincount(chunks_raveled_indices, minlength=nchunks) + chunk_nitems_cumsum = np.cumsum(chunk_nitems) + # locate the chunks we need to process + chunk_rixs = np.nonzero(chunk_nitems)[0] + + # unravel chunk indices + chunk_mixs = np.unravel_index(chunk_rixs, cdata_shape) + + object.__setattr__(self, "sel_shape", sel_shape) + object.__setattr__(self, "selection", selection_broadcast) + object.__setattr__(self, "sel_sort", sel_sort) + object.__setattr__(self, "chunk_nitems_cumsum", chunk_nitems_cumsum) + object.__setattr__(self, "chunk_rixs", chunk_rixs) + object.__setattr__(self, "chunk_mixs", chunk_mixs) + object.__setattr__(self, "chunk_shape", chunk_shape) + object.__setattr__(self, "shape", shape) + object.__setattr__(self, "drop_axes", ()) + + def __iter__(self) -> Iterator[ChunkProjection]: + # iterate over chunks + for i, chunk_rix in enumerate(self.chunk_rixs): + chunk_coords = tuple(m[i] for m in self.chunk_mixs) + if chunk_rix == 0: + start = 0 + else: + start = self.chunk_nitems_cumsum[chunk_rix - 1] + stop = self.chunk_nitems_cumsum[chunk_rix] + out_selection: slice | npt.NDArray[np.intp] + if self.sel_sort is None: + out_selection = slice(start, stop) + else: + out_selection = self.sel_sort[start:stop] + + chunk_offsets = tuple( + dim_chunk_ix * dim_chunk_len + for dim_chunk_ix, dim_chunk_len in zip(chunk_coords, self.chunk_shape, strict=True) + ) + chunk_selection = tuple( + dim_sel[start:stop] - dim_chunk_offset + for (dim_sel, dim_chunk_offset) in zip(self.selection, chunk_offsets, strict=True) + ) + + yield ChunkProjection(chunk_coords, chunk_selection, out_selection) + + +@dataclass(frozen=True) +class MaskIndexer(CoordinateIndexer): + def __init__(self, selection: MaskSelection, shape: ChunkCoords, chunk_grid: ChunkGrid): + # some initial normalization + selection_normalized = cast(tuple[MaskSelection], ensure_tuple(selection)) + selection_normalized = cast(tuple[MaskSelection], replace_lists(selection_normalized)) + + # validation + if not is_mask_selection(selection_normalized, shape): + raise IndexError( + "invalid mask selection; expected one Boolean (mask)" + f"array with the same shape as the target array, got {selection_normalized!r}" + ) + + # convert to indices + selection_indices = np.nonzero(selection_normalized[0]) + + # delegate the rest to superclass + super().__init__(selection_indices, shape, chunk_grid) + + +@dataclass(frozen=True) +class VIndex: + array: Array + + def __getitem__(self, selection: CoordinateSelection | MaskSelection) -> NDArrayLike: + fields, new_selection = pop_fields(selection) + new_selection = ensure_tuple(new_selection) + new_selection = replace_lists(new_selection) + if is_coordinate_selection(new_selection, self.array.shape): + return self.array.get_coordinate_selection(new_selection, fields=fields) + elif is_mask_selection(new_selection, self.array.shape): + return self.array.get_mask_selection(new_selection, fields=fields) + else: + raise VindexInvalidSelectionError(new_selection) + + def __setitem__( + self, selection: CoordinateSelection | MaskSelection, value: NDArrayLike + ) -> None: + fields, new_selection = pop_fields(selection) + new_selection = ensure_tuple(new_selection) + new_selection = replace_lists(new_selection) + if is_coordinate_selection(new_selection, self.array.shape): + self.array.set_coordinate_selection(new_selection, value, fields=fields) + elif is_mask_selection(new_selection, self.array.shape): + self.array.set_mask_selection(new_selection, value, fields=fields) + else: + raise VindexInvalidSelectionError(new_selection) + + +def check_fields(fields: Fields | None, dtype: np.dtype[Any]) -> np.dtype[Any]: + # early out + if fields is None: + return dtype + # check type + if not isinstance(fields, str | list | tuple): + raise IndexError( + f"'fields' argument must be a string or list of strings; found {type(fields)!r}" + ) + if fields: + if dtype.names is None: + raise IndexError("invalid 'fields' argument, array does not have any fields") + try: + if isinstance(fields, str): + # single field selection + out_dtype = dtype[fields] + else: + # multiple field selection + out_dtype = np.dtype([(f, dtype[f]) for f in fields]) + except KeyError as e: + raise IndexError(f"invalid 'fields' argument, field not found: {e!r}") from e + else: + return out_dtype + else: + return dtype + + +def check_no_multi_fields(fields: Fields | None) -> Fields | None: + if isinstance(fields, list): + if len(fields) == 1: + return fields[0] + elif len(fields) > 1: + raise IndexError("multiple fields are not supported for this operation") + return fields + + +def pop_fields(selection: SelectionWithFields) -> tuple[Fields | None, Selection]: + if isinstance(selection, str): + # single field selection + return selection, () + elif not isinstance(selection, tuple): + # single selection item, no fields + # leave selection as-is + return None, cast(Selection, selection) + else: + # multiple items, split fields from selection items + fields: Fields = [f for f in selection if isinstance(f, str)] + fields = fields[0] if len(fields) == 1 else fields + selection_tuple = tuple(s for s in selection if not isinstance(s, str)) + selection = cast( + Selection, selection_tuple[0] if len(selection_tuple) == 1 else selection_tuple + ) + return fields, selection + + +def make_slice_selection(selection: Any) -> list[int | slice]: + ls: list[int | slice] = [] + for dim_selection in selection: + if is_integer(dim_selection): + ls.append(slice(int(dim_selection), int(dim_selection) + 1, 1)) + elif isinstance(dim_selection, np.ndarray): + if len(dim_selection) == 1: + ls.append(slice(int(dim_selection[0]), int(dim_selection[0]) + 1, 1)) + else: + raise ArrayIndexError + else: + ls.append(dim_selection) + return ls def morton_order_iter(chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: @@ -198,7 +1259,8 @@ def is_total_slice(item: Selection, shape: ChunkCoords) -> bool: item = (item,) if isinstance(item, tuple): return all( - ( + isinstance(dim_sel, slice) + and ( (dim_sel == slice(None)) or ((dim_sel.stop - dim_sel.start == dim_len) and (dim_sel.step in [1, None])) ) @@ -206,3 +1268,22 @@ def is_total_slice(item: Selection, shape: ChunkCoords) -> bool: ) else: raise TypeError(f"expected slice or tuple of slices, found {item!r}") + + +def get_indexer( + selection: SelectionWithFields, shape: ChunkCoords, chunk_grid: ChunkGrid +) -> Indexer: + _, pure_selection = pop_fields(selection) + if is_pure_fancy_indexing(pure_selection, len(shape)): + new_selection = ensure_tuple(selection) + new_selection = replace_lists(new_selection) + if is_coordinate_selection(new_selection, shape): + return CoordinateIndexer(cast(CoordinateSelection, selection), shape, chunk_grid) + elif is_mask_selection(new_selection, shape): + return MaskIndexer(cast(MaskSelection, selection), shape, chunk_grid) + else: + raise VindexInvalidSelectionError(new_selection) + elif is_pure_orthogonal_indexing(pure_selection, len(shape)): + return OrthogonalIndexer(cast(OrthogonalSelection, selection), shape, chunk_grid) + else: + return BasicIndexer(cast(BasicSelection, selection), shape, chunk_grid) diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index b461449991..ca8cf1cdd2 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -118,6 +118,7 @@ def from_dtype(cls, dtype: np.dtype[Any]) -> DataType: class ArrayMetadata(Metadata, ABC): shape: ChunkCoords chunk_grid: ChunkGrid + fill_value: Any attributes: dict[str, JSON] @property diff --git a/src/zarr/v2/indexing.py b/src/zarr/v2/indexing.py index 0e266ad908..242e9ae849 100644 --- a/src/zarr/v2/indexing.py +++ b/src/zarr/v2/indexing.py @@ -346,7 +346,7 @@ def __init__(self, selection, array): self.dim_indexers = dim_indexers self.shape = tuple(s.nitems for s in self.dim_indexers if not isinstance(s, IntDimIndexer)) - self.drop_axes = None + self.drop_axes = () def __iter__(self): for dim_projections in itertools.product(*self.dim_indexers): @@ -625,7 +625,7 @@ def __init__(self, selection, array): if isinstance(dim_indexer, IntDimIndexer) ) else: - self.drop_axes = None + self.drop_axes = () def __iter__(self): for dim_projections in itertools.product(*self.dim_indexers): @@ -724,7 +724,7 @@ def __init__(self, selection, array): self.dim_indexers = dim_indexers self.shape = tuple(s.nitems for s in self.dim_indexers) - self.drop_axes = None + self.drop_axes = () def __iter__(self): for dim_projections in itertools.product(*self.dim_indexers): @@ -823,7 +823,7 @@ def __init__(self, selection, array): self.selection = selection self.sel_sort = sel_sort self.shape = selection[0].shape if selection[0].shape else (1,) - self.drop_axes = None + self.drop_axes = () self.array = array # precompute number of selected items for each chunk diff --git a/tests/v3/test_indexing.py b/tests/v3/test_indexing.py new file mode 100644 index 0000000000..9ce485945b --- /dev/null +++ b/tests/v3/test_indexing.py @@ -0,0 +1,1721 @@ +from __future__ import annotations + +from collections import Counter +from collections.abc import Iterator +from typing import Any +from uuid import uuid4 + +import numpy as np +import numpy.typing as npt +import pytest +from numpy.testing import assert_array_equal + +import zarr +from zarr.abc.store import Store +from zarr.buffer import NDBuffer +from zarr.common import ChunkCoords +from zarr.indexing import ( + make_slice_selection, + normalize_integer_selection, + oindex, + oindex_set, + replace_ellipsis, +) +from zarr.store.core import StorePath +from zarr.store.memory import MemoryStore + + +@pytest.fixture +def store() -> Iterator[Store]: + yield StorePath(MemoryStore(mode="w")) + + +def zarr_array_from_numpy_array( + store: StorePath, + a: npt.NDArray[Any], + chunk_shape: ChunkCoords | None = None, +) -> zarr.Array: + z = zarr.Array.create( + store=store / str(uuid4()), + shape=a.shape, + dtype=a.dtype, + chunk_shape=chunk_shape or a.shape, + chunk_key_encoding=("v2", "."), + ) + z[:] = a + return z + + +class CountingDict(MemoryStore): + def __init__(self): + super().__init__(mode="w") + self.counter = Counter() + + async def get(self, key, byte_range=None): + key_suffix = "/".join(key.split("/")[1:]) + self.counter["__getitem__", key_suffix] += 1 + return await super().get(key, byte_range) + + async def set(self, key, value, byte_range=None): + key_suffix = "/".join(key.split("/")[1:]) + self.counter["__setitem__", key_suffix] += 1 + return await super().set(key, value, byte_range) + + +def test_normalize_integer_selection(): + assert 1 == normalize_integer_selection(1, 100) + assert 99 == normalize_integer_selection(-1, 100) + with pytest.raises(IndexError): + normalize_integer_selection(100, 100) + with pytest.raises(IndexError): + normalize_integer_selection(1000, 100) + with pytest.raises(IndexError): + normalize_integer_selection(-1000, 100) + + +def test_replace_ellipsis(): + # 1D, single item + assert (0,) == replace_ellipsis(0, (100,)) + + # 1D + assert (slice(None),) == replace_ellipsis(Ellipsis, (100,)) + assert (slice(None),) == replace_ellipsis(slice(None), (100,)) + assert (slice(None, 100),) == replace_ellipsis(slice(None, 100), (100,)) + assert (slice(0, None),) == replace_ellipsis(slice(0, None), (100,)) + assert (slice(None),) == replace_ellipsis((slice(None), Ellipsis), (100,)) + assert (slice(None),) == replace_ellipsis((Ellipsis, slice(None)), (100,)) + + # 2D, single item + assert (0, 0) == replace_ellipsis((0, 0), (100, 100)) + assert (-1, 1) == replace_ellipsis((-1, 1), (100, 100)) + + # 2D, single col/row + assert (0, slice(None)) == replace_ellipsis((0, slice(None)), (100, 100)) + assert (0, slice(None)) == replace_ellipsis((0,), (100, 100)) + assert (slice(None), 0) == replace_ellipsis((slice(None), 0), (100, 100)) + + # 2D slice + assert (slice(None), slice(None)) == replace_ellipsis(Ellipsis, (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis(slice(None), (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis((slice(None), slice(None)), (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis((Ellipsis, slice(None)), (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis((slice(None), Ellipsis), (100, 100)) + assert (slice(None), slice(None)) == replace_ellipsis( + (slice(None), Ellipsis, slice(None)), (100, 100) + ) + assert (slice(None), slice(None)) == replace_ellipsis( + (Ellipsis, slice(None), slice(None)), (100, 100) + ) + assert (slice(None), slice(None)) == replace_ellipsis( + (slice(None), slice(None), Ellipsis), (100, 100) + ) + + +@pytest.mark.xfail(reason="zero-dimension arrays are not supported in v3") +def test_get_basic_selection_0d(store: StorePath): + # setup + a = np.array(42) + z = zarr_array_from_numpy_array(store, a) + + assert_array_equal(a, z.get_basic_selection(Ellipsis)) + assert_array_equal(a, z[...]) + assert 42 == z.get_basic_selection(()) + assert 42 == z[()] + + # test out param + b = NDBuffer.from_numpy_array(np.zeros_like(a)) + z.get_basic_selection(Ellipsis, out=b) + assert_array_equal(a, b) + + # test structured array + value = (b"aaa", 1, 4.2) + a = np.array(value, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) + z = zarr_array_from_numpy_array(store, a) + z[()] = value + assert_array_equal(a, z.get_basic_selection(Ellipsis)) + assert_array_equal(a, z[...]) + assert a[()] == z.get_basic_selection(()) + assert a[()] == z[()] + assert b"aaa" == z.get_basic_selection((), fields="foo") + assert b"aaa" == z["foo"] + assert a[["foo", "bar"]] == z.get_basic_selection((), fields=["foo", "bar"]) + assert a[["foo", "bar"]] == z["foo", "bar"] + # test out param + b = NDBuffer.from_numpy_array(np.zeros_like(a)) + z.get_basic_selection(Ellipsis, out=b) + assert_array_equal(a, b) + c = NDBuffer.from_numpy_array(np.zeros_like(a[["foo", "bar"]])) + z.get_basic_selection(Ellipsis, out=c, fields=["foo", "bar"]) + assert_array_equal(a[["foo", "bar"]], c) + + +basic_selections_1d = [ + # single value + 42, + -1, + # slices + slice(0, 1050), + slice(50, 150), + slice(0, 2000), + slice(-150, -50), + slice(-2000, 2000), + slice(0, 0), # empty result + slice(-1, 0), # empty result + # total selections + slice(None), + Ellipsis, + (), + (Ellipsis, slice(None)), + # slice with step + slice(None), + slice(None, None), + slice(None, None, 1), + slice(None, None, 10), + slice(None, None, 100), + slice(None, None, 1000), + slice(None, None, 10000), + slice(0, 1050), + slice(0, 1050, 1), + slice(0, 1050, 10), + slice(0, 1050, 100), + slice(0, 1050, 1000), + slice(0, 1050, 10000), + slice(1, 31, 3), + slice(1, 31, 30), + slice(1, 31, 300), + slice(81, 121, 3), + slice(81, 121, 30), + slice(81, 121, 300), + slice(50, 150), + slice(50, 150, 1), + slice(50, 150, 10), +] + + +basic_selections_1d_bad = [ + # only positive step supported + slice(None, None, -1), + slice(None, None, -10), + slice(None, None, -100), + slice(None, None, -1000), + slice(None, None, -10000), + slice(1050, -1, -1), + slice(1050, -1, -10), + slice(1050, -1, -100), + slice(1050, -1, -1000), + slice(1050, -1, -10000), + slice(1050, 0, -1), + slice(1050, 0, -10), + slice(1050, 0, -100), + slice(1050, 0, -1000), + slice(1050, 0, -10000), + slice(150, 50, -1), + slice(150, 50, -10), + slice(31, 1, -3), + slice(121, 81, -3), + slice(-1, 0, -1), + # bad stuff + 2.3, + "foo", + b"xxx", + None, + (0, 0), + (slice(None), slice(None)), +] + + +def _test_get_basic_selection(a, z, selection): + print(a, z, selection) + expect = a[selection] + actual = z.get_basic_selection(selection) + assert_array_equal(expect, actual) + actual = z[selection] + assert_array_equal(expect, actual) + + # test out param + b = NDBuffer.from_numpy_array(np.empty(shape=expect.shape, dtype=expect.dtype)) + z.get_basic_selection(selection, out=b) + assert_array_equal(expect, b.as_numpy_array()) + + +# noinspection PyStatementEffect +def test_get_basic_selection_1d(store: StorePath): + # setup + a = np.arange(1050, dtype=int) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + for selection in basic_selections_1d: + _test_get_basic_selection(a, z, selection) + + for selection in basic_selections_1d_bad: + with pytest.raises(IndexError): + z.get_basic_selection(selection) + with pytest.raises(IndexError): + z[selection] + + with pytest.raises(IndexError): + z.get_basic_selection([1, 0]) + + +basic_selections_2d = [ + # single row + 42, + -1, + (42, slice(None)), + (-1, slice(None)), + # single col + (slice(None), 4), + (slice(None), -1), + # row slices + slice(None), + slice(0, 1000), + slice(250, 350), + slice(0, 2000), + slice(-350, -250), + slice(0, 0), # empty result + slice(-1, 0), # empty result + slice(-2000, 0), + slice(-2000, 2000), + # 2D slices + (slice(None), slice(1, 5)), + (slice(250, 350), slice(None)), + (slice(250, 350), slice(1, 5)), + (slice(250, 350), slice(-5, -1)), + (slice(250, 350), slice(-50, 50)), + (slice(250, 350, 10), slice(1, 5)), + (slice(250, 350), slice(1, 5, 2)), + (slice(250, 350, 33), slice(1, 5, 3)), + # total selections + (slice(None), slice(None)), + Ellipsis, + (), + (Ellipsis, slice(None)), + (Ellipsis, slice(None), slice(None)), +] + + +basic_selections_2d_bad = [ + # bad stuff + 2.3, + "foo", + b"xxx", + None, + (2.3, slice(None)), + # only positive step supported + slice(None, None, -1), + (slice(None, None, -1), slice(None)), + (0, 0, 0), + (slice(None), slice(None), slice(None)), +] + + +# noinspection PyStatementEffect +def test_get_basic_selection_2d(store: StorePath): + # setup + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + for selection in basic_selections_2d: + _test_get_basic_selection(a, z, selection) + + bad_selections = basic_selections_2d_bad + [ + # integer arrays + [0, 1], + (slice(None), [0, 1]), + ] + for selection in bad_selections: + with pytest.raises(IndexError): + z.get_basic_selection(selection) + # check fallback on fancy indexing + fancy_selection = ([0, 1], [0, 1]) + np.testing.assert_array_equal(z[fancy_selection], [0, 11]) + + +def test_fancy_indexing_fallback_on_get_setitem(store: StorePath): + z = zarr_array_from_numpy_array(store, np.zeros((20, 20))) + z[[1, 2, 3], [1, 2, 3]] = 1 + np.testing.assert_array_equal( + z[:4, :4], + [ + [0, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + ], + ) + np.testing.assert_array_equal(z[[1, 2, 3], [1, 2, 3]], 1) + # test broadcasting + np.testing.assert_array_equal(z[1, [1, 2, 3]], [1, 0, 0]) + # test 1D fancy indexing + z2 = zarr_array_from_numpy_array(store, np.zeros(5)) + z2[[1, 2, 3]] = 1 + np.testing.assert_array_equal(z2[:], [0, 1, 1, 1, 0]) + + +@pytest.mark.parametrize( + "index,expected_result", + [ + # Single iterable of integers + ([0, 1], [[0, 1, 2], [3, 4, 5]]), + # List first, then slice + (([0, 1], slice(None)), [[0, 1, 2], [3, 4, 5]]), + # List first, then slice + (([0, 1], slice(1, None)), [[1, 2], [4, 5]]), + # Slice first, then list + ((slice(0, 2), [0, 2]), [[0, 2], [3, 5]]), + # Slices only + ((slice(0, 2), slice(0, 2)), [[0, 1], [3, 4]]), + # List with repeated index + (([1, 0, 1], slice(1, None)), [[4, 5], [1, 2], [4, 5]]), + # 1D indexing + (([1, 0, 1]), [[3, 4, 5], [0, 1, 2], [3, 4, 5]]), + ], +) +def test_orthogonal_indexing_fallback_on_getitem_2d(store: StorePath, index, expected_result): + """ + Tests the orthogonal indexing fallback on __getitem__ for a 2D matrix. + + In addition to checking expected behavior, all indexing + is also checked against numpy. + """ + # [0, 1, 2], + # [3, 4, 5], + # [6, 7, 8] + a = np.arange(9).reshape(3, 3) + z = zarr_array_from_numpy_array(store, a) + + np.testing.assert_array_equal(z[index], a[index], err_msg="Indexing disagrees with numpy") + np.testing.assert_array_equal(z[index], expected_result) + + +@pytest.mark.parametrize( + "index,expected_result", + [ + # Single iterable of integers + ([0, 1], [[[0, 1, 2], [3, 4, 5], [6, 7, 8]], [[9, 10, 11], [12, 13, 14], [15, 16, 17]]]), + # One slice, two integers + ((slice(0, 2), 1, 1), [4, 13]), + # One integer, two slices + ((slice(0, 2), 1, slice(0, 2)), [[3, 4], [12, 13]]), + # Two slices and a list + ((slice(0, 2), [1, 2], slice(0, 2)), [[[3, 4], [6, 7]], [[12, 13], [15, 16]]]), + ], +) +def test_orthogonal_indexing_fallback_on_getitem_3d(store: StorePath, index, expected_result): + """ + Tests the orthogonal indexing fallback on __getitem__ for a 3D matrix. + + In addition to checking expected behavior, all indexing + is also checked against numpy. + """ + # [[[ 0, 1, 2], + # [ 3, 4, 5], + # [ 6, 7, 8]], + + # [[ 9, 10, 11], + # [12, 13, 14], + # [15, 16, 17]], + + # [[18, 19, 20], + # [21, 22, 23], + # [24, 25, 26]]] + a = np.arange(27).reshape(3, 3, 3) + z = zarr_array_from_numpy_array(store, a) + + np.testing.assert_array_equal(z[index], a[index], err_msg="Indexing disagrees with numpy") + np.testing.assert_array_equal(z[index], expected_result) + + +@pytest.mark.parametrize( + "index,expected_result", + [ + # Single iterable of integers + ([0, 1], [[1, 1, 1], [1, 1, 1], [0, 0, 0]]), + # List and slice combined + (([0, 1], slice(1, 3)), [[0, 1, 1], [0, 1, 1], [0, 0, 0]]), + # Index repetition is ignored on setitem + (([0, 1, 1, 1, 1, 1, 1], slice(1, 3)), [[0, 1, 1], [0, 1, 1], [0, 0, 0]]), + # Slice with step + (([0, 2], slice(None, None, 2)), [[1, 0, 1], [0, 0, 0], [1, 0, 1]]), + ], +) +def test_orthogonal_indexing_fallback_on_setitem_2d(store: StorePath, index, expected_result): + """ + Tests the orthogonal indexing fallback on __setitem__ for a 3D matrix. + + In addition to checking expected behavior, all indexing + is also checked against numpy. + """ + # Slice + fancy index + a = np.zeros((3, 3)) + z = zarr_array_from_numpy_array(store, a) + z[index] = 1 + a[index] = 1 + np.testing.assert_array_equal(z[:], expected_result) + np.testing.assert_array_equal(z[:], a, err_msg="Indexing disagrees with numpy") + + +def test_fancy_indexing_doesnt_mix_with_implicit_slicing(store: StorePath): + z2 = zarr_array_from_numpy_array(store, np.zeros((5, 5, 5))) + with pytest.raises(IndexError): + z2[[1, 2, 3], [1, 2, 3]] = 2 + with pytest.raises(IndexError): + np.testing.assert_array_equal(z2[[1, 2, 3], [1, 2, 3]], 0) + with pytest.raises(IndexError): + z2[..., [1, 2, 3]] = 2 + with pytest.raises(IndexError): + np.testing.assert_array_equal(z2[..., [1, 2, 3]], 0) + + +@pytest.mark.xfail(reason="zero-dimension arrays are not supported in v3") +def test_set_basic_selection_0d(store: StorePath): + # setup + v = np.array(42) + a = np.zeros_like(v) + z = zarr_array_from_numpy_array(store, v) + assert_array_equal(a, z[:]) + + # tests + z.set_basic_selection(Ellipsis, v) + assert_array_equal(v, z[:]) + z[...] = 0 + assert_array_equal(a, z[:]) + z[...] = v + assert_array_equal(v, z[:]) + + # test structured array + value = (b"aaa", 1, 4.2) + v = np.array(value, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) + a = np.zeros_like(v) + z = zarr_array_from_numpy_array(store, a) + + # tests + z.set_basic_selection(Ellipsis, v) + assert_array_equal(v, z[:]) + z.set_basic_selection(Ellipsis, a) + assert_array_equal(a, z[:]) + z[...] = v + assert_array_equal(v, z[:]) + z[...] = a + assert_array_equal(a, z[:]) + # with fields + z.set_basic_selection(Ellipsis, v["foo"], fields="foo") + assert v["foo"] == z["foo"] + assert a["bar"] == z["bar"] + assert a["baz"] == z["baz"] + z["bar"] = v["bar"] + assert v["foo"] == z["foo"] + assert v["bar"] == z["bar"] + assert a["baz"] == z["baz"] + # multiple field assignment not supported + with pytest.raises(IndexError): + z.set_basic_selection(Ellipsis, v[["foo", "bar"]], fields=["foo", "bar"]) + with pytest.raises(IndexError): + z[..., "foo", "bar"] = v[["foo", "bar"]] + + +def _test_get_orthogonal_selection(a, z, selection): + expect = oindex(a, selection) + actual = z.get_orthogonal_selection(selection) + assert_array_equal(expect, actual) + actual = z.oindex[selection] + assert_array_equal(expect, actual) + + +# noinspection PyStatementEffect +def test_get_orthogonal_selection_1d_bool(store: StorePath): + # setup + a = np.arange(1050, dtype=int) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + ix = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + _test_get_orthogonal_selection(a, z, ix) + + # test errors + with pytest.raises(IndexError): + z.oindex[np.zeros(50, dtype=bool)] # too short + with pytest.raises(IndexError): + z.oindex[np.zeros(2000, dtype=bool)] # too long + with pytest.raises(IndexError): + z.oindex[[[True, False], [False, True]]] # too many dimensions + + +# noinspection PyStatementEffect +def test_get_orthogonal_selection_1d_int(store: StorePath): + # setup + a = np.arange(1050, dtype=int) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 2, 0.5, 0.1, 0.01: + # unordered + ix = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) + _test_get_orthogonal_selection(a, z, ix) + # increasing + ix.sort() + _test_get_orthogonal_selection(a, z, ix) + # decreasing + ix = ix[::-1] + _test_get_orthogonal_selection(a, z, ix) + + selections = basic_selections_1d + [ + # test wraparound + [0, 3, 10, -23, -12, -1], + # explicit test not sorted + [3, 105, 23, 127], + ] + for selection in selections: + _test_get_orthogonal_selection(a, z, selection) + + bad_selections = basic_selections_1d_bad + [ + [a.shape[0] + 1], # out of bounds + [-(a.shape[0] + 1)], # out of bounds + [[2, 4], [6, 8]], # too many dimensions + ] + for selection in bad_selections: + with pytest.raises(IndexError): + z.get_orthogonal_selection(selection) + with pytest.raises(IndexError): + z.oindex[selection] + + +def _test_get_orthogonal_selection_2d(a, z, ix0, ix1): + selections = [ + # index both axes with array + (ix0, ix1), + # mixed indexing with array / slice + (ix0, slice(1, 5)), + (ix0, slice(1, 5, 2)), + (slice(250, 350), ix1), + (slice(250, 350, 10), ix1), + # mixed indexing with array / int + (ix0, 4), + (42, ix1), + ] + for selection in selections: + _test_get_orthogonal_selection(a, z, selection) + + +# noinspection PyStatementEffect +def test_get_orthogonal_selection_2d(store: StorePath): + # setup + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + # boolean arrays + ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) + _test_get_orthogonal_selection_2d(a, z, ix0, ix1) + + # mixed int array / bool array + selections = ( + (ix0, np.nonzero(ix1)[0]), + (np.nonzero(ix0)[0], ix1), + ) + for selection in selections: + _test_get_orthogonal_selection(a, z, selection) + + # integer arrays + ix0 = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) + ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * 0.5), replace=True) + _test_get_orthogonal_selection_2d(a, z, ix0, ix1) + ix0.sort() + ix1.sort() + _test_get_orthogonal_selection_2d(a, z, ix0, ix1) + ix0 = ix0[::-1] + ix1 = ix1[::-1] + _test_get_orthogonal_selection_2d(a, z, ix0, ix1) + + for selection in basic_selections_2d: + _test_get_orthogonal_selection(a, z, selection) + + for selection in basic_selections_2d_bad: + with pytest.raises(IndexError): + z.get_orthogonal_selection(selection) + with pytest.raises(IndexError): + z.oindex[selection] + + +def _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2): + selections = [ + # single value + (84, 42, 4), + (-1, -1, -1), + # index all axes with array + (ix0, ix1, ix2), + # mixed indexing with single array / slices + (ix0, slice(15, 25), slice(1, 5)), + (slice(50, 70), ix1, slice(1, 5)), + (slice(50, 70), slice(15, 25), ix2), + (ix0, slice(15, 25, 5), slice(1, 5, 2)), + (slice(50, 70, 3), ix1, slice(1, 5, 2)), + (slice(50, 70, 3), slice(15, 25, 5), ix2), + # mixed indexing with single array / ints + (ix0, 42, 4), + (84, ix1, 4), + (84, 42, ix2), + # mixed indexing with single array / slice / int + (ix0, slice(15, 25), 4), + (42, ix1, slice(1, 5)), + (slice(50, 70), 42, ix2), + # mixed indexing with two array / slice + (ix0, ix1, slice(1, 5)), + (slice(50, 70), ix1, ix2), + (ix0, slice(15, 25), ix2), + # mixed indexing with two array / integer + (ix0, ix1, 4), + (42, ix1, ix2), + (ix0, 42, ix2), + ] + for selection in selections: + _test_get_orthogonal_selection(a, z, selection) + + +def test_get_orthogonal_selection_3d(store: StorePath): + # setup + a = np.arange(100000, dtype=int).reshape(200, 50, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(60, 20, 3)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + # boolean arrays + ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) + ix2 = np.random.binomial(1, 0.5, size=a.shape[2]).astype(bool) + _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2) + + # integer arrays + ix0 = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) + ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * 0.5), replace=True) + ix2 = np.random.choice(a.shape[2], size=int(a.shape[2] * 0.5), replace=True) + _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2) + ix0.sort() + ix1.sort() + ix2.sort() + _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2) + ix0 = ix0[::-1] + ix1 = ix1[::-1] + ix2 = ix2[::-1] + _test_get_orthogonal_selection_3d(a, z, ix0, ix1, ix2) + + +def test_orthogonal_indexing_edge_cases(store: StorePath): + a = np.arange(6).reshape(1, 2, 3) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(1, 2, 3)) + + expect = oindex(a, (0, slice(None), [0, 1, 2])) + actual = z.oindex[0, :, [0, 1, 2]] + assert_array_equal(expect, actual) + + expect = oindex(a, (0, slice(None), [True, True, True])) + actual = z.oindex[0, :, [True, True, True]] + assert_array_equal(expect, actual) + + +def _test_set_orthogonal_selection(v, a, z, selection): + for value in 42, oindex(v, selection), oindex(v, selection).tolist(): + if isinstance(value, list) and value == []: + # skip these cases as cannot preserve all dimensions + continue + # setup expectation + a[:] = 0 + oindex_set(a, selection, value) + # long-form API + z[:] = 0 + z.set_orthogonal_selection(selection, value) + assert_array_equal(a, z[:]) + # short-form API + z[:] = 0 + z.oindex[selection] = value + assert_array_equal(a, z[:]) + + +def test_set_orthogonal_selection_1d(store: StorePath): + # setup + v = np.arange(1050, dtype=int) + a = np.empty(v.shape, dtype=int) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + # test with different degrees of sparseness + np.random.seed(42) + for p in 0.5, 0.1, 0.01: + # boolean arrays + ix = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + _test_set_orthogonal_selection(v, a, z, ix) + + # integer arrays + ix = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) + _test_set_orthogonal_selection(v, a, z, ix) + ix.sort() + _test_set_orthogonal_selection(v, a, z, ix) + ix = ix[::-1] + _test_set_orthogonal_selection(v, a, z, ix) + + # basic selections + for selection in basic_selections_1d: + _test_set_orthogonal_selection(v, a, z, selection) + + +def _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1): + selections = [ + # index both axes with array + (ix0, ix1), + # mixed indexing with array / slice or int + (ix0, slice(1, 5)), + (slice(250, 350), ix1), + (ix0, 4), + (42, ix1), + ] + for selection in selections: + _test_set_orthogonal_selection(v, a, z, selection) + + +def test_set_orthogonal_selection_2d(store: StorePath): + # setup + v = np.arange(10000, dtype=int).reshape(1000, 10) + a = np.empty_like(v) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + # boolean arrays + ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) + _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1) + + # integer arrays + ix0 = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) + ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * 0.5), replace=True) + _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1) + ix0.sort() + ix1.sort() + _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1) + ix0 = ix0[::-1] + ix1 = ix1[::-1] + _test_set_orthogonal_selection_2d(v, a, z, ix0, ix1) + + for selection in basic_selections_2d: + _test_set_orthogonal_selection(v, a, z, selection) + + +def _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2): + selections = ( + # single value + (84, 42, 4), + (-1, -1, -1), + # index all axes with bool array + (ix0, ix1, ix2), + # mixed indexing with single bool array / slice or int + (ix0, slice(15, 25), slice(1, 5)), + (slice(50, 70), ix1, slice(1, 5)), + (slice(50, 70), slice(15, 25), ix2), + (ix0, 42, 4), + (84, ix1, 4), + (84, 42, ix2), + (ix0, slice(15, 25), 4), + (slice(50, 70), ix1, 4), + (slice(50, 70), 42, ix2), + # indexing with two arrays / slice + (ix0, ix1, slice(1, 5)), + # indexing with two arrays / integer + (ix0, ix1, 4), + ) + for selection in selections: + _test_set_orthogonal_selection(v, a, z, selection) + + +def test_set_orthogonal_selection_3d(store: StorePath): + # setup + v = np.arange(100000, dtype=int).reshape(200, 50, 10) + a = np.empty_like(v) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(60, 20, 3)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + # boolean arrays + ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) + ix2 = np.random.binomial(1, 0.5, size=a.shape[2]).astype(bool) + _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2) + + # integer arrays + ix0 = np.random.choice(a.shape[0], size=int(a.shape[0] * p), replace=True) + ix1 = np.random.choice(a.shape[1], size=int(a.shape[1] * 0.5), replace=True) + ix2 = np.random.choice(a.shape[2], size=int(a.shape[2] * 0.5), replace=True) + _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2) + + # sorted increasing + ix0.sort() + ix1.sort() + ix2.sort() + _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2) + + # sorted decreasing + ix0 = ix0[::-1] + ix1 = ix1[::-1] + ix2 = ix2[::-1] + _test_set_orthogonal_selection_3d(v, a, z, ix0, ix1, ix2) + + +def test_orthogonal_indexing_fallback_on_get_setitem(store: StorePath): + z = zarr_array_from_numpy_array(store, np.zeros((20, 20))) + z[[1, 2, 3], [1, 2, 3]] = 1 + np.testing.assert_array_equal( + z[:4, :4], + [ + [0, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + ], + ) + np.testing.assert_array_equal(z[[1, 2, 3], [1, 2, 3]], 1) + # test broadcasting + np.testing.assert_array_equal(z[1, [1, 2, 3]], [1, 0, 0]) + # test 1D fancy indexing + z2 = zarr_array_from_numpy_array(store, np.zeros(5)) + z2[[1, 2, 3]] = 1 + np.testing.assert_array_equal(z2[:], [0, 1, 1, 1, 0]) + + +def _test_get_coordinate_selection(a, z, selection): + expect = a[selection] + actual = z.get_coordinate_selection(selection) + assert_array_equal(expect, actual) + actual = z.vindex[selection] + assert_array_equal(expect, actual) + + +coordinate_selections_1d_bad = [ + # slice not supported + slice(5, 15), + slice(None), + Ellipsis, + # bad stuff + 2.3, + "foo", + b"xxx", + None, + (0, 0), + (slice(None), slice(None)), +] + + +# noinspection PyStatementEffect +def test_get_coordinate_selection_1d(store: StorePath): + # setup + a = np.arange(1050, dtype=int) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 2, 0.5, 0.1, 0.01: + n = int(a.size * p) + ix = np.random.choice(a.shape[0], size=n, replace=True) + _test_get_coordinate_selection(a, z, ix) + ix.sort() + _test_get_coordinate_selection(a, z, ix) + ix = ix[::-1] + _test_get_coordinate_selection(a, z, ix) + + selections = [ + # test single item + 42, + -1, + # test wraparound + [0, 3, 10, -23, -12, -1], + # test out of order + [3, 105, 23, 127], # not monotonically increasing + # test multi-dimensional selection + np.array([[2, 4], [6, 8]]), + ] + for selection in selections: + _test_get_coordinate_selection(a, z, selection) + + # test errors + bad_selections = coordinate_selections_1d_bad + [ + [a.shape[0] + 1], # out of bounds + [-(a.shape[0] + 1)], # out of bounds + ] + for selection in bad_selections: + with pytest.raises(IndexError): + z.get_coordinate_selection(selection) + with pytest.raises(IndexError): + z.vindex[selection] + + +def test_get_coordinate_selection_2d(store: StorePath): + # setup + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 2, 0.5, 0.1, 0.01: + n = int(a.size * p) + ix0 = np.random.choice(a.shape[0], size=n, replace=True) + ix1 = np.random.choice(a.shape[1], size=n, replace=True) + selections = [ + # single value + (42, 4), + (-1, -1), + # index both axes with array + (ix0, ix1), + # mixed indexing with array / int + (ix0, 4), + (42, ix1), + (42, 4), + ] + for selection in selections: + _test_get_coordinate_selection(a, z, selection) + + # not monotonically increasing (first dim) + ix0 = [3, 3, 4, 2, 5] + ix1 = [1, 3, 5, 7, 9] + _test_get_coordinate_selection(a, z, (ix0, ix1)) + + # not monotonically increasing (second dim) + ix0 = [1, 1, 2, 2, 5] + ix1 = [1, 3, 2, 1, 0] + _test_get_coordinate_selection(a, z, (ix0, ix1)) + + # multi-dimensional selection + ix0 = np.array([[1, 1, 2], [2, 2, 5]]) + ix1 = np.array([[1, 3, 2], [1, 0, 0]]) + _test_get_coordinate_selection(a, z, (ix0, ix1)) + + with pytest.raises(IndexError): + selection = slice(5, 15), [1, 2, 3] + z.get_coordinate_selection(selection) + with pytest.raises(IndexError): + selection = [1, 2, 3], slice(5, 15) + z.get_coordinate_selection(selection) + with pytest.raises(IndexError): + selection = Ellipsis, [1, 2, 3] + z.get_coordinate_selection(selection) + with pytest.raises(IndexError): + selection = Ellipsis + z.get_coordinate_selection(selection) + + +def _test_set_coordinate_selection(v, a, z, selection): + for value in 42, v[selection], v[selection].tolist(): + # setup expectation + a[:] = 0 + a[selection] = value + # test long-form API + z[:] = 0 + z.set_coordinate_selection(selection, value) + assert_array_equal(a, z[:]) + # test short-form API + z[:] = 0 + z.vindex[selection] = value + assert_array_equal(a, z[:]) + + +def test_set_coordinate_selection_1d(store: StorePath): + # setup + v = np.arange(1050, dtype=int) + a = np.empty(v.shape, dtype=v.dtype) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 2, 0.5, 0.1, 0.01: + n = int(a.size * p) + ix = np.random.choice(a.shape[0], size=n, replace=True) + _test_set_coordinate_selection(v, a, z, ix) + + # multi-dimensional selection + ix = np.array([[2, 4], [6, 8]]) + _test_set_coordinate_selection(v, a, z, ix) + + for selection in coordinate_selections_1d_bad: + with pytest.raises(IndexError): + z.set_coordinate_selection(selection, 42) + with pytest.raises(IndexError): + z.vindex[selection] = 42 + + +def test_set_coordinate_selection_2d(store: StorePath): + # setup + v = np.arange(10000, dtype=int).reshape(1000, 10) + a = np.empty_like(v) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 2, 0.5, 0.1, 0.01: + n = int(a.size * p) + ix0 = np.random.choice(a.shape[0], size=n, replace=True) + ix1 = np.random.choice(a.shape[1], size=n, replace=True) + + selections = ( + (42, 4), + (-1, -1), + # index both axes with array + (ix0, ix1), + # mixed indexing with array / int + (ix0, 4), + (42, ix1), + ) + for selection in selections: + _test_set_coordinate_selection(v, a, z, selection) + + # multi-dimensional selection + ix0 = np.array([[1, 2, 3], [4, 5, 6]]) + ix1 = np.array([[1, 3, 2], [2, 0, 5]]) + _test_set_coordinate_selection(v, a, z, (ix0, ix1)) + + +def _test_get_block_selection(a, z, selection, expected_idx): + expect = a[expected_idx] + actual = z.get_block_selection(selection) + assert_array_equal(expect, actual) + actual = z.blocks[selection] + assert_array_equal(expect, actual) + + +block_selections_1d = [ + # test single item + 0, + 5, + # test wraparound + -1, + -4, + # test slice + slice(5), + slice(None, 3), + slice(5, 6), + slice(-3, -1), + slice(None), # Full slice +] + +block_selections_1d_array_projection = [ + # test single item + slice(100), + slice(500, 600), + # test wraparound + slice(1000, None), + slice(700, 800), + # test slice + slice(500), + slice(None, 300), + slice(500, 600), + slice(800, 1000), + slice(None), +] + +block_selections_1d_bad = [ + # slice not supported + slice(3, 8, 2), + # bad stuff + 2.3, + # "foo", # TODO + b"xxx", + None, + (0, 0), + (slice(None), slice(None)), + [0, 5, 3], +] + + +def test_get_block_selection_1d(store: StorePath): + # setup + a = np.arange(1050, dtype=int) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + for selection, expected_idx in zip( + block_selections_1d, block_selections_1d_array_projection, strict=True + ): + _test_get_block_selection(a, z, selection, expected_idx) + + bad_selections = block_selections_1d_bad + [ + z.metadata.chunk_grid.get_nchunks(z.shape) + 1, # out of bounds + -(z.metadata.chunk_grid.get_nchunks(z.shape) + 1), # out of bounds + ] + + for selection in bad_selections: + with pytest.raises(IndexError): + z.get_block_selection(selection) + with pytest.raises(IndexError): + z.blocks[selection] + + +block_selections_2d = [ + # test single item + (0, 0), + (1, 2), + # test wraparound + (-1, -1), + (-3, -2), + # test slice + (slice(1), slice(2)), + (slice(None, 2), slice(-2, -1)), + (slice(2, 3), slice(-2, None)), + (slice(-3, -1), slice(-3, -2)), + (slice(None), slice(None)), # Full slice +] + +block_selections_2d_array_projection = [ + # test single item + (slice(300), slice(3)), + (slice(300, 600), slice(6, 9)), + # test wraparound + (slice(900, None), slice(9, None)), + (slice(300, 600), slice(6, 9)), + # test slice + (slice(300), slice(6)), + (slice(None, 600), slice(6, 9)), + (slice(600, 900), slice(6, None)), + (slice(300, 900), slice(3, 6)), + (slice(None), slice(None)), # Full slice +] + + +def test_get_block_selection_2d(store: StorePath): + # setup + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + for selection, expected_idx in zip( + block_selections_2d, block_selections_2d_array_projection, strict=True + ): + _test_get_block_selection(a, z, selection, expected_idx) + + with pytest.raises(IndexError): + selection = slice(5, 15), [1, 2, 3] + z.get_block_selection(selection) + with pytest.raises(IndexError): + selection = Ellipsis, [1, 2, 3] + z.get_block_selection(selection) + with pytest.raises(IndexError): # out of bounds + selection = slice(15, 20), slice(None) + z.get_block_selection(selection) + + +def _test_set_block_selection(v: np.ndarray, a: np.ndarray, z: zarr.Array, selection, expected_idx): + for value in 42, v[expected_idx], v[expected_idx].tolist(): + # setup expectation + a[:] = 0 + a[expected_idx] = value + # test long-form API + z[:] = 0 + z.set_block_selection(selection, value) + assert_array_equal(a, z[:]) + # test short-form API + z[:] = 0 + z.blocks[selection] = value + assert_array_equal(a, z[:]) + + +def test_set_block_selection_1d(store: StorePath): + # setup + v = np.arange(1050, dtype=int) + a = np.empty(v.shape, dtype=v.dtype) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + for selection, expected_idx in zip( + block_selections_1d, block_selections_1d_array_projection, strict=True + ): + _test_set_block_selection(v, a, z, selection, expected_idx) + + for selection in block_selections_1d_bad: + with pytest.raises(IndexError): + z.set_block_selection(selection, 42) + with pytest.raises(IndexError): + z.blocks[selection] = 42 + + +def test_set_block_selection_2d(store: StorePath): + # setup + v = np.arange(10000, dtype=int).reshape(1000, 10) + a = np.empty(v.shape, dtype=v.dtype) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + for selection, expected_idx in zip( + block_selections_2d, block_selections_2d_array_projection, strict=True + ): + _test_set_block_selection(v, a, z, selection, expected_idx) + + with pytest.raises(IndexError): + selection = slice(5, 15), [1, 2, 3] + z.set_block_selection(selection, 42) + with pytest.raises(IndexError): + selection = Ellipsis, [1, 2, 3] + z.set_block_selection(selection, 42) + with pytest.raises(IndexError): # out of bounds + selection = slice(15, 20), slice(None) + z.set_block_selection(selection, 42) + + +def _test_get_mask_selection(a, z, selection): + expect = a[selection] + actual = z.get_mask_selection(selection) + assert_array_equal(expect, actual) + actual = z.vindex[selection] + assert_array_equal(expect, actual) + + +mask_selections_1d_bad = [ + # slice not supported + slice(5, 15), + slice(None), + Ellipsis, + # bad stuff + 2.3, + "foo", + b"xxx", + None, + (0, 0), + (slice(None), slice(None)), +] + + +# noinspection PyStatementEffect +def test_get_mask_selection_1d(store: StorePath): + # setup + a = np.arange(1050, dtype=int) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + ix = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + _test_get_mask_selection(a, z, ix) + + # test errors + bad_selections = mask_selections_1d_bad + [ + np.zeros(50, dtype=bool), # too short + np.zeros(2000, dtype=bool), # too long + [[True, False], [False, True]], # too many dimensions + ] + for selection in bad_selections: + with pytest.raises(IndexError): + z.get_mask_selection(selection) + with pytest.raises(IndexError): + z.vindex[selection] + + +# noinspection PyStatementEffect +def test_get_mask_selection_2d(store: StorePath): + # setup + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + ix = np.random.binomial(1, p, size=a.size).astype(bool).reshape(a.shape) + _test_get_mask_selection(a, z, ix) + + # test errors + with pytest.raises(IndexError): + z.vindex[np.zeros((1000, 5), dtype=bool)] # too short + with pytest.raises(IndexError): + z.vindex[np.zeros((2000, 10), dtype=bool)] # too long + with pytest.raises(IndexError): + z.vindex[[True, False]] # wrong no. dimensions + + +def _test_set_mask_selection(v, a, z, selection): + a[:] = 0 + z[:] = 0 + a[selection] = v[selection] + z.set_mask_selection(selection, v[selection]) + assert_array_equal(a, z[:]) + z[:] = 0 + z.vindex[selection] = v[selection] + assert_array_equal(a, z[:]) + + +def test_set_mask_selection_1d(store: StorePath): + # setup + v = np.arange(1050, dtype=int) + a = np.empty_like(v) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + ix = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + _test_set_mask_selection(v, a, z, ix) + + for selection in mask_selections_1d_bad: + with pytest.raises(IndexError): + z.set_mask_selection(selection, 42) + with pytest.raises(IndexError): + z.vindex[selection] = 42 + + +def test_set_mask_selection_2d(store: StorePath): + # setup + v = np.arange(10000, dtype=int).reshape(1000, 10) + a = np.empty_like(v) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + ix = np.random.binomial(1, p, size=a.size).astype(bool).reshape(a.shape) + _test_set_mask_selection(v, a, z, ix) + + +def test_get_selection_out(store: StorePath): + # basic selections + a = np.arange(1050) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + + selections = [ + slice(50, 150), + slice(0, 1050), + slice(1, 2), + ] + for selection in selections: + expect = a[selection] + out = NDBuffer.from_numpy_array(np.empty(expect.shape)) + z.get_basic_selection(selection, out=out) + assert_array_equal(expect, out.as_numpy_array()[:]) + + with pytest.raises(TypeError): + z.get_basic_selection(Ellipsis, out=[]) + + # orthogonal selections + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + ix0 = np.random.binomial(1, p, size=a.shape[0]).astype(bool) + ix1 = np.random.binomial(1, 0.5, size=a.shape[1]).astype(bool) + selections = [ + # index both axes with array + (ix0, ix1), + # mixed indexing with array / slice + (ix0, slice(1, 5)), + (slice(250, 350), ix1), + # mixed indexing with array / int + (ix0, 4), + (42, ix1), + # mixed int array / bool array + (ix0, np.nonzero(ix1)[0]), + (np.nonzero(ix0)[0], ix1), + ] + for selection in selections: + expect = oindex(a, selection) + out = NDBuffer.from_numpy_array(np.zeros(expect.shape, dtype=expect.dtype)) + z.get_orthogonal_selection(selection, out=out) + assert_array_equal(expect, out.as_numpy_array()[:]) + + # coordinate selections + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + np.random.seed(42) + # test with different degrees of sparseness + for p in 0.5, 0.1, 0.01: + n = int(a.size * p) + ix0 = np.random.choice(a.shape[0], size=n, replace=True) + ix1 = np.random.choice(a.shape[1], size=n, replace=True) + selections = [ + # index both axes with array + (ix0, ix1), + # mixed indexing with array / int + (ix0, 4), + (42, ix1), + ] + for selection in selections: + expect = a[selection] + out = NDBuffer.from_numpy_array(np.zeros(expect.shape, dtype=expect.dtype)) + z.get_coordinate_selection(selection, out=out) + assert_array_equal(expect, out.as_numpy_array()[:]) + + +@pytest.mark.xfail(reason="fields are not supported in v3") +def test_get_selections_with_fields(store: StorePath): + a = [("aaa", 1, 4.2), ("bbb", 2, 8.4), ("ccc", 3, 12.6)] + a = np.array(a, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(2,)) + + fields_fixture = [ + "foo", + ["foo"], + ["foo", "bar"], + ["foo", "baz"], + ["bar", "baz"], + ["foo", "bar", "baz"], + ["bar", "foo"], + ["baz", "bar", "foo"], + ] + + for fields in fields_fixture: + # total selection + expect = a[fields] + actual = z.get_basic_selection(Ellipsis, fields=fields) + assert_array_equal(expect, actual) + # alternative API + if isinstance(fields, str): + actual = z[fields] + assert_array_equal(expect, actual) + elif len(fields) == 2: + actual = z[fields[0], fields[1]] + assert_array_equal(expect, actual) + if isinstance(fields, str): + actual = z[..., fields] + assert_array_equal(expect, actual) + elif len(fields) == 2: + actual = z[..., fields[0], fields[1]] + assert_array_equal(expect, actual) + + # basic selection with slice + expect = a[fields][0:2] + actual = z.get_basic_selection(slice(0, 2), fields=fields) + assert_array_equal(expect, actual) + # alternative API + if isinstance(fields, str): + actual = z[0:2, fields] + assert_array_equal(expect, actual) + elif len(fields) == 2: + actual = z[0:2, fields[0], fields[1]] + assert_array_equal(expect, actual) + + # basic selection with single item + expect = a[fields][1] + actual = z.get_basic_selection(1, fields=fields) + assert_array_equal(expect, actual) + # alternative API + if isinstance(fields, str): + actual = z[1, fields] + assert_array_equal(expect, actual) + elif len(fields) == 2: + actual = z[1, fields[0], fields[1]] + assert_array_equal(expect, actual) + + # orthogonal selection + ix = [0, 2] + expect = a[fields][ix] + actual = z.get_orthogonal_selection(ix, fields=fields) + assert_array_equal(expect, actual) + # alternative API + if isinstance(fields, str): + actual = z.oindex[ix, fields] + assert_array_equal(expect, actual) + elif len(fields) == 2: + actual = z.oindex[ix, fields[0], fields[1]] + assert_array_equal(expect, actual) + + # coordinate selection + ix = [0, 2] + expect = a[fields][ix] + actual = z.get_coordinate_selection(ix, fields=fields) + assert_array_equal(expect, actual) + # alternative API + if isinstance(fields, str): + actual = z.vindex[ix, fields] + assert_array_equal(expect, actual) + elif len(fields) == 2: + actual = z.vindex[ix, fields[0], fields[1]] + assert_array_equal(expect, actual) + + # mask selection + ix = [True, False, True] + expect = a[fields][ix] + actual = z.get_mask_selection(ix, fields=fields) + assert_array_equal(expect, actual) + # alternative API + if isinstance(fields, str): + actual = z.vindex[ix, fields] + assert_array_equal(expect, actual) + elif len(fields) == 2: + actual = z.vindex[ix, fields[0], fields[1]] + assert_array_equal(expect, actual) + + # missing/bad fields + with pytest.raises(IndexError): + z.get_basic_selection(Ellipsis, fields=["notafield"]) + with pytest.raises(IndexError): + z.get_basic_selection(Ellipsis, fields=slice(None)) + + +@pytest.mark.xfail(reason="fields are not supported in v3") +def test_set_selections_with_fields(store: StorePath): + v = [("aaa", 1, 4.2), ("bbb", 2, 8.4), ("ccc", 3, 12.6)] + v = np.array(v, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) + a = np.empty_like(v) + z = zarr_array_from_numpy_array(store, v, chunk_shape=(2,)) + + fields_fixture = [ + "foo", + [], + ["foo"], + ["foo", "bar"], + ["foo", "baz"], + ["bar", "baz"], + ["foo", "bar", "baz"], + ["bar", "foo"], + ["baz", "bar", "foo"], + ] + + for fields in fields_fixture: + # currently multi-field assignment is not supported in numpy, so we won't support + # it either + if isinstance(fields, list) and len(fields) > 1: + with pytest.raises(IndexError): + z.set_basic_selection(Ellipsis, v, fields=fields) + with pytest.raises(IndexError): + z.set_orthogonal_selection([0, 2], v, fields=fields) + with pytest.raises(IndexError): + z.set_coordinate_selection([0, 2], v, fields=fields) + with pytest.raises(IndexError): + z.set_mask_selection([True, False, True], v, fields=fields) + + else: + if isinstance(fields, list) and len(fields) == 1: + # work around numpy does not support multi-field assignment even if there + # is only one field + key = fields[0] + elif isinstance(fields, list) and len(fields) == 0: + # work around numpy ambiguity about what is a field selection + key = Ellipsis + else: + key = fields + + # setup expectation + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) + assert_array_equal(a, z[:]) + a[key] = v[key] + # total selection + z.set_basic_selection(Ellipsis, v[key], fields=fields) + assert_array_equal(a, z[:]) + + # basic selection with slice + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) + a[key][0:2] = v[key][0:2] + z.set_basic_selection(slice(0, 2), v[key][0:2], fields=fields) + assert_array_equal(a, z[:]) + + # orthogonal selection + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) + ix = [0, 2] + a[key][ix] = v[key][ix] + z.set_orthogonal_selection(ix, v[key][ix], fields=fields) + assert_array_equal(a, z[:]) + + # coordinate selection + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) + ix = [0, 2] + a[key][ix] = v[key][ix] + z.set_coordinate_selection(ix, v[key][ix], fields=fields) + assert_array_equal(a, z[:]) + + # mask selection + a[:] = ("", 0, 0) + z[:] = ("", 0, 0) + ix = [True, False, True] + a[key][ix] = v[key][ix] + z.set_mask_selection(ix, v[key][ix], fields=fields) + assert_array_equal(a, z[:]) + + +def test_slice_selection_uints(): + arr = np.arange(24).reshape((4, 6)) + idx = np.uint64(3) + slice_sel = make_slice_selection((idx,)) + assert arr[tuple(slice_sel)].shape == (1, 6) + + +def test_numpy_int_indexing(store: StorePath): + a = np.arange(1050) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(100,)) + assert a[42] == z[42] + assert a[np.int64(42)] == z[np.int64(42)] + + +@pytest.mark.parametrize( + "shape, chunks, ops", + [ + # 1D test cases + ((1070,), (50,), [("__getitem__", (slice(200, 400),))]), + ((1070,), (50,), [("__getitem__", (slice(200, 400, 100),))]), + ( + (1070,), + (50,), + [ + ("__getitem__", (slice(200, 400),)), + ("__setitem__", (slice(200, 400, 100),)), + ], + ), + # 2D test cases + ( + (40, 50), + (5, 8), + [ + ("__getitem__", (slice(6, 37, 13), (slice(4, 10)))), + ("__setitem__", (slice(None), (slice(None)))), + ], + ), + ], +) +def test_accessed_chunks(shape, chunks, ops): + # Test that only the required chunks are accessed during basic selection operations + # shape: array shape + # chunks: chunk size + # ops: list of tuples with (optype, tuple of slices) + # optype = "__getitem__" or "__setitem__", tuple length must match number of dims + import itertools + + # Use a counting dict as the backing store so we can track the items access + store = CountingDict() + z = zarr_array_from_numpy_array(StorePath(store), np.zeros(shape), chunk_shape=chunks) + + for ii, (optype, slices) in enumerate(ops): + # Resolve the slices into the accessed chunks for each dimension + chunks_per_dim = [] + for N, C, sl in zip(shape, chunks, slices, strict=True): + chunk_ind = np.arange(N, dtype=int)[sl] // C + chunks_per_dim.append(np.unique(chunk_ind)) + + # Combine and generate the cartesian product to determine the chunks keys that + # will be accessed + chunks_accessed = [] + for comb in itertools.product(*chunks_per_dim): + chunks_accessed.append(".".join([str(ci) for ci in comb])) + + counts_before = store.counter.copy() + + # Perform the operation + if optype == "__getitem__": + z[slices] + else: + z[slices] = ii + + # Get the change in counts + delta_counts = store.counter - counts_before + + # Check that the access counts for the operation have increased by one for all + # the chunks we expect to be included + for ci in chunks_accessed: + assert delta_counts.pop((optype, ci)) == 1 + + # If the chunk was partially written to it will also have been read once. We + # don't determine if the chunk was actually partial here, just that the + # counts are consistent that this might have happened + if optype == "__setitem__": + assert ("__getitem__", ci) not in delta_counts or delta_counts.pop( + ("__getitem__", ci) + ) == 1 + # Check that no other chunks were accessed + assert len(delta_counts) == 0 From 72005d71fe6e78c03870b3e3b28b3bea3f367043 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 3 Jun 2024 07:35:36 -0700 Subject: [PATCH 0565/1078] Feature: group and array name properties (#1940) * feature: group and array path/name/basename properties * tests --- src/zarr/array.py | 38 ++++++++++++++++++++++++++++++++++++++ src/zarr/group.py | 38 ++++++++++++++++++++++++++++++++++++++ tests/v3/test_array.py | 36 ++++++++++++++++++++++++++++++++++++ tests/v3/test_group.py | 19 +++++++++++++++++++ 4 files changed, 131 insertions(+) create mode 100644 tests/v3/test_array.py diff --git a/src/zarr/array.py b/src/zarr/array.py index a45e7c6ba2..3e6cf5531c 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -378,6 +378,29 @@ def dtype(self) -> np.dtype[Any]: def attrs(self) -> dict[str, JSON]: return self.metadata.attributes + @property + def path(self) -> str: + """Storage path.""" + return self.store_path.path + + @property + def name(self) -> str | None: + """Array name following h5py convention.""" + if self.path: + # follow h5py convention: add leading slash + name = self.path + if name[0] != "/": + name = "/" + name + return name + return None + + @property + def basename(self) -> str | None: + """Final component of name.""" + if self.name is not None: + return self.name.split("/")[-1] + return None + async def _get_selection( self, indexer: Indexer, @@ -630,6 +653,21 @@ def dtype(self) -> np.dtype[Any]: def attrs(self) -> Attributes: return Attributes(self) + @property + def path(self) -> str: + """Storage path.""" + return self._async_array.path + + @property + def name(self) -> str | None: + """Array name following h5py convention.""" + return self._async_array.name + + @property + def basename(self) -> str | None: + """Final component of name.""" + return self._async_array.basename + @property def metadata(self) -> ArrayMetadata: return self._async_array.metadata diff --git a/src/zarr/group.py b/src/zarr/group.py index 88e7fd0922..ccad0e5661 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -270,6 +270,27 @@ async def _save_metadata(self) -> None: awaitables = [set_or_delete(self.store_path / key, value) for key, value in to_save.items()] await asyncio.gather(*awaitables) + @property + def path(self) -> str: + """Storage path.""" + return self.store_path.path + + @property + def name(self) -> str: + """Group name following h5py convention.""" + if self.path: + # follow h5py convention: add leading slash + name = self.path + if name[0] != "/": + name = "/" + name + return name + return "/" + + @property + def basename(self) -> str: + """Final component of name.""" + return self.name.split("/")[-1] + @property def attrs(self) -> dict[str, Any]: return self.metadata.attributes @@ -462,6 +483,7 @@ def create( store: StoreLike, *, attributes: dict[str, Any] = {}, # noqa: B006, FIXME + zarr_format: ZarrFormat = 3, exists_ok: bool = False, ) -> Group: obj = sync( @@ -469,6 +491,7 @@ def create( store, attributes=attributes, exists_ok=exists_ok, + zarr_format=zarr_format, ), ) @@ -521,6 +544,21 @@ def store_path(self) -> StorePath: def metadata(self) -> GroupMetadata: return self._async_group.metadata + @property + def path(self) -> str: + """Storage path.""" + return self._async_group.path + + @property + def name(self) -> str: + """Group name following h5py convention.""" + return self._async_group.name + + @property + def basename(self) -> str: + """Final component of name.""" + return self._async_group.basename + @property def attrs(self) -> Attributes: return Attributes(self) diff --git a/tests/v3/test_array.py b/tests/v3/test_array.py new file mode 100644 index 0000000000..203cfbf860 --- /dev/null +++ b/tests/v3/test_array.py @@ -0,0 +1,36 @@ +import pytest + +from zarr.array import Array +from zarr.common import ZarrFormat +from zarr.group import Group +from zarr.store import LocalStore, MemoryStore + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +def test_array_name_properties_no_group( + store: LocalStore | MemoryStore, zarr_format: ZarrFormat +) -> None: + arr = Array.create(store=store, shape=(100,), chunks=(10,), zarr_format=zarr_format, dtype="i4") + assert arr.path == "" + assert arr.name is None + assert arr.basename is None + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +def test_array_name_properties_with_group( + store: LocalStore | MemoryStore, zarr_format: ZarrFormat +) -> None: + root = Group.create(store=store, zarr_format=zarr_format) + foo = root.create_array("foo", shape=(100,), chunks=(10,), dtype="i4") + assert foo.path == "foo" + assert foo.name == "/foo" + assert foo.basename == "foo" + + bar = root.create_group("bar") + spam = bar.create_array("spam", shape=(100,), chunks=(10,), dtype="i4") + + assert spam.path == "bar/spam" + assert spam.name == "/bar/spam" + assert spam.basename == "spam" diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 4d55d72282..9ce9b07a20 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -372,3 +372,22 @@ def test_group_init(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> agroup = sync(AsyncGroup.create(store=store, zarr_format=zarr_format)) group = Group(agroup) assert group._async_group == agroup + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("zarr_format", (2, 3)) +def test_group_name_properties(store: LocalStore | MemoryStore, zarr_format: ZarrFormat) -> None: + root = Group.create(store=store, zarr_format=zarr_format) + assert root.path == "" + assert root.name == "/" + assert root.basename == "" + + foo = root.create_group("foo") + assert foo.path == "foo" + assert foo.name == "/foo" + assert foo.basename == "foo" + + bar = root.create_group("foo/bar") + assert bar.path == "foo/bar" + assert bar.name == "/foo/bar" + assert bar.basename == "bar" From da9885cdf4da16a4b52695b5522bc42982bb2f41 Mon Sep 17 00:00:00 2001 From: Ryan Abernathey Date: Mon, 3 Jun 2024 14:07:44 -0400 Subject: [PATCH 0566/1078] implement .chunks on v3 arrays (#1929) * implement .chunks on v3 arrays * remove noqa: B009 * make mypy happy * only return chunks for regular chunk grids --------- Co-authored-by: Davis Bennett Co-authored-by: Joseph Hamman --- src/zarr/array.py | 13 +++++++++++++ tests/v3/test_group.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/src/zarr/array.py b/src/zarr/array.py index 3e6cf5531c..698894ba0c 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -366,6 +366,15 @@ def ndim(self) -> int: def shape(self) -> ChunkCoords: return self.metadata.shape + @property + def chunks(self) -> ChunkCoords: + if isinstance(self.metadata.chunk_grid, RegularChunkGrid): + return self.metadata.chunk_grid.chunk_shape + else: + raise ValueError( + f"chunk attribute is only available for RegularChunkGrid, this array has a {self.metadata.chunk_grid}" + ) + @property def size(self) -> int: return np.prod(self.metadata.shape).item() @@ -641,6 +650,10 @@ def ndim(self) -> int: def shape(self) -> ChunkCoords: return self._async_array.shape + @property + def chunks(self) -> ChunkCoords: + return self._async_array.chunks + @property def size(self) -> int: return self._async_array.size diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 9ce9b07a20..c529e2491f 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -79,7 +79,7 @@ def test_group(store: MemoryStore | LocalStore) -> None: assert arr.dtype == data.dtype # TODO: update this once the array api settles down - # assert arr.chunk_shape == (2, 2) + assert arr.chunks == (2, 2) bar2 = foo["bar"] assert dict(bar2.attrs) == {"baz": "qux"} From 859994455e4b0e451d72db1974da4f2f2362d79d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 13:29:05 -0700 Subject: [PATCH 0567/1078] chore: update pre-commit hooks (#1948) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.5 → v0.4.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.5...v0.4.7) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 77b4a45ff5..4bb39f0e06 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 'v0.4.5' + rev: 'v0.4.7' hooks: - id: ruff args: ["--fix", "--show-fixes"] From 5c4dc2339d68f6ca48ed6949b0e0b08371e46245 Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Tue, 4 Jun 2024 12:57:55 +0200 Subject: [PATCH 0568/1078] fixes bug in transpose (#1949) --- src/zarr/codecs/transpose.py | 6 +++--- tests/v3/test_codecs.py | 17 +++++++++++++++++ 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index 373a27cab9..9fcee4e66b 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -4,6 +4,8 @@ from dataclasses import dataclass, replace from typing import TYPE_CHECKING, cast +import numpy as np + from zarr.abc.codec import ArrayArrayCodec from zarr.buffer import NDBuffer from zarr.codecs.registry import register_codec @@ -76,9 +78,7 @@ async def _decode_single( chunk_array: NDBuffer, chunk_spec: ArraySpec, ) -> NDBuffer: - inverse_order = [0] * chunk_spec.ndim - for x, i in enumerate(self.order): - inverse_order[x] = i + inverse_order = np.argsort(self.order) chunk_array = chunk_array.transpose(inverse_order) return chunk_array diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index 251570f767..514294c4b0 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -406,6 +406,23 @@ async def test_transpose( assert await (store / "transpose/0.0").get() == await (store / "transpose_zarr/0.0").get() +@pytest.mark.parametrize("order", [[1, 2, 0], [1, 2, 3, 0], [3, 2, 4, 0, 1]]) +def test_transpose_non_self_inverse(store: Store, order): + shape = [i + 3 for i in range(len(order))] + data = np.arange(0, np.prod(shape), dtype="uint16").reshape(shape) + a = Array.create( + store / "transpose_non_self_inverse", + shape=data.shape, + chunk_shape=data.shape, + dtype=data.dtype, + fill_value=0, + codecs=[TransposeCodec(order=order), BytesCodec()], + ) + a[:, :] = data + read_data = a[:, :] + assert np.array_equal(data, read_data) + + def test_transpose_invalid( store: Store, ): From b431cf75d5a1495f6a69b3d961bbd527658bd919 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 4 Jun 2024 10:04:29 -0700 Subject: [PATCH 0569/1078] Create issue-metrics.yml --- .github/workflows/issue-metrics.yml | 42 +++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 .github/workflows/issue-metrics.yml diff --git a/.github/workflows/issue-metrics.yml b/.github/workflows/issue-metrics.yml new file mode 100644 index 0000000000..34bda59ff6 --- /dev/null +++ b/.github/workflows/issue-metrics.yml @@ -0,0 +1,42 @@ +name: Monthly issue metrics +on: + workflow_dispatch: + schedule: + - cron: '3 2 1 * *' + +permissions: + contents: read + +jobs: + build: + name: issue metrics + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: read + steps: + - name: Get dates for last month + shell: bash + run: | + # Calculate the first day of the previous month + first_day=$(date -d "last month" +%Y-%m-01) + + # Calculate the last day of the previous month + last_day=$(date -d "$first_day +1 month -1 day" +%Y-%m-%d) + + #Set an environment variable with the date range + echo "$first_day..$last_day" + echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV" + + - name: Run issue-metrics tool + uses: github/issue-metrics@v3 + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SEARCH_QUERY: 'repo:zarr-developers/zarr-python is:issue created:${{ env.last_month }} -reason:"not planned"' + + - name: Create issue + uses: peter-evans/create-issue-from-file@v5 + with: + title: Monthly issue metrics report + token: ${{ secrets.GITHUB_TOKEN }} + content-filepath: ./issue_metrics.md From 661acb37f08a77cc1a86c0da55288e89f2388801 Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Tue, 4 Jun 2024 22:43:56 +0200 Subject: [PATCH 0570/1078] Buffer Prototype Argument (#1910) --- src/zarr/abc/codec.py | 3 +- src/zarr/abc/store.py | 19 ++- src/zarr/array.py | 104 ++++++++++---- src/zarr/array_spec.py | 41 ++++++ src/zarr/buffer.py | 142 ++++++++----------- src/zarr/codecs/_v2.py | 3 +- src/zarr/codecs/blosc.py | 19 +-- src/zarr/codecs/bytes.py | 19 +-- src/zarr/codecs/crc32c_.py | 13 +- src/zarr/codecs/gzip.py | 17 ++- src/zarr/codecs/pipeline.py | 24 ++-- src/zarr/codecs/sharding.py | 45 ++++-- src/zarr/codecs/transpose.py | 8 +- src/zarr/codecs/zstd.py | 17 ++- src/zarr/common.py | 26 ---- src/zarr/metadata.py | 23 +-- src/zarr/store/core.py | 10 +- src/zarr/store/local.py | 28 ++-- src/zarr/store/memory.py | 20 ++- src/zarr/store/remote.py | 7 +- src/zarr/testing/store.py | 10 +- tests/v3/package_with_entrypoint/__init__.py | 3 +- tests/v3/test_buffer.py | 80 ++++++++++- tests/v3/test_group.py | 2 +- tests/v3/test_indexing.py | 7 +- 25 files changed, 438 insertions(+), 252 deletions(-) create mode 100644 src/zarr/array_spec.py diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index 0836d878ae..1f452159ed 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -13,11 +13,10 @@ if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import ArraySpec + from zarr.array_spec import ArraySpec from zarr.indexing import SelectorTuple from zarr.metadata import ArrayMetadata - CodecInput = TypeVar("CodecInput", bound=NDBuffer | Buffer) CodecOutput = TypeVar("CodecOutput", bound=NDBuffer | Buffer) diff --git a/src/zarr/abc/store.py b/src/zarr/abc/store.py index e86fe5d07a..14566dfed2 100644 --- a/src/zarr/abc/store.py +++ b/src/zarr/abc/store.py @@ -2,7 +2,7 @@ from collections.abc import AsyncGenerator from typing import Protocol, runtime_checkable -from zarr.buffer import Buffer +from zarr.buffer import Buffer, BufferPrototype from zarr.common import BytesLike, OpenMode @@ -30,7 +30,10 @@ def _check_writable(self) -> None: @abstractmethod async def get( - self, key: str, byte_range: tuple[int | None, int | None] | None = None + self, + key: str, + prototype: BufferPrototype, + byte_range: tuple[int | None, int | None] | None = None, ) -> Buffer | None: """Retrieve the value associated with a given key. @@ -47,7 +50,9 @@ async def get( @abstractmethod async def get_partial_values( - self, key_ranges: list[tuple[str, tuple[int | None, int | None]]] + self, + prototype: BufferPrototype, + key_ranges: list[tuple[str, tuple[int | None, int | None]]], ) -> list[Buffer | None]: """Retrieve possibly partial values from given key_ranges. @@ -175,12 +180,16 @@ def close(self) -> None: # noqa: B027 @runtime_checkable class ByteGetter(Protocol): - async def get(self, byte_range: tuple[int, int | None] | None = None) -> Buffer | None: ... + async def get( + self, prototype: BufferPrototype, byte_range: tuple[int, int | None] | None = None + ) -> Buffer | None: ... @runtime_checkable class ByteSetter(Protocol): - async def get(self, byte_range: tuple[int, int | None] | None = None) -> Buffer | None: ... + async def get( + self, prototype: BufferPrototype, byte_range: tuple[int, int | None] | None = None + ) -> Buffer | None: ... async def set(self, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: ... diff --git a/src/zarr/array.py b/src/zarr/array.py index 698894ba0c..28b19f44f0 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -20,7 +20,7 @@ from zarr.abc.codec import Codec from zarr.abc.store import set_or_delete from zarr.attributes import Attributes -from zarr.buffer import Factory, NDArrayLike, NDBuffer +from zarr.buffer import BufferPrototype, NDArrayLike, NDBuffer, default_buffer_prototype from zarr.chunk_grids import RegularChunkGrid from zarr.chunk_key_encodings import ChunkKeyEncoding, DefaultChunkKeyEncoding, V2ChunkKeyEncoding from zarr.codecs import BytesCodec @@ -414,8 +414,8 @@ async def _get_selection( self, indexer: Indexer, *, + prototype: BufferPrototype, out: NDBuffer | None = None, - factory: Factory.Create = NDBuffer.create, fields: Fields | None = None, ) -> NDArrayLike: # check fields are sensible @@ -432,7 +432,7 @@ async def _get_selection( f"shape of out argument doesn't match. Expected {indexer.shape}, got {out.shape}" ) else: - out_buffer = factory( + out_buffer = prototype.nd_buffer.create( shape=indexer.shape, dtype=out_dtype, order=self.order, @@ -444,7 +444,7 @@ async def _get_selection( [ ( self.store_path / self.metadata.encode_chunk_key(chunk_coords), - self.metadata.get_chunk_spec(chunk_coords, self.order), + self.metadata.get_chunk_spec(chunk_coords, self.order, prototype=prototype), chunk_selection, out_selection, ) @@ -456,14 +456,14 @@ async def _get_selection( return out_buffer.as_ndarray_like() async def getitem( - self, selection: Selection, *, factory: Factory.Create = NDBuffer.create + self, selection: Selection, *, prototype: BufferPrototype = default_buffer_prototype ) -> NDArrayLike: indexer = BasicIndexer( selection, shape=self.metadata.shape, chunk_grid=self.metadata.chunk_grid, ) - return await self._get_selection(indexer, factory=factory) + return await self._get_selection(indexer, prototype=prototype) async def _save_metadata(self, metadata: ArrayMetadata) -> None: to_save = metadata.to_buffer_dict() @@ -475,7 +475,7 @@ async def _set_selection( indexer: Indexer, value: NDArrayLike, *, - factory: Factory.NDArrayLike = NDBuffer.from_ndarray_like, + prototype: BufferPrototype, fields: Fields | None = None, ) -> None: # check fields are sensible @@ -497,14 +497,14 @@ async def _set_selection( # We accept any ndarray like object from the user and convert it # to a NDBuffer (or subclass). From this point onwards, we only pass # Buffer and NDBuffer between components. - value_buffer = factory(value) + value_buffer = prototype.nd_buffer.from_ndarray_like(value) # merging with existing data and encoding chunks await self.metadata.codec_pipeline.write( [ ( self.store_path / self.metadata.encode_chunk_key(chunk_coords), - self.metadata.get_chunk_spec(chunk_coords, self.order), + self.metadata.get_chunk_spec(chunk_coords, self.order, prototype), chunk_selection, out_selection, ) @@ -518,14 +518,14 @@ async def setitem( self, selection: Selection, value: NDArrayLike, - factory: Factory.NDArrayLike = NDBuffer.from_ndarray_like, + prototype: BufferPrototype = default_buffer_prototype, ) -> None: indexer = BasicIndexer( selection, shape=self.metadata.shape, chunk_grid=self.metadata.chunk_grid, ) - return await self._set_selection(indexer, value, factory=factory) + return await self._set_selection(indexer, value, prototype=prototype) async def resize( self, new_shape: ChunkCoords, delete_outside_chunks: bool = True @@ -714,7 +714,9 @@ def __setitem__(self, selection: Selection, value: NDArrayLike) -> None: def get_basic_selection( self, selection: BasicSelection = Ellipsis, + *, out: NDBuffer | None = None, + prototype: BufferPrototype = default_buffer_prototype, fields: Fields | None = None, ) -> NDArrayLike: if self.shape == (): @@ -725,57 +727,101 @@ def get_basic_selection( BasicIndexer(selection, self.shape, self.metadata.chunk_grid), out=out, fields=fields, + prototype=prototype, ) ) def set_basic_selection( - self, selection: BasicSelection, value: NDArrayLike, fields: Fields | None = None + self, + selection: BasicSelection, + value: NDArrayLike, + *, + fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> None: indexer = BasicIndexer(selection, self.shape, self.metadata.chunk_grid) - sync(self._async_array._set_selection(indexer, value, fields=fields)) + sync(self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype)) def get_orthogonal_selection( self, selection: OrthogonalSelection, + *, out: NDBuffer | None = None, fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> NDArrayLike: indexer = OrthogonalIndexer(selection, self.shape, self.metadata.chunk_grid) - return sync(self._async_array._get_selection(indexer=indexer, out=out, fields=fields)) + return sync( + self._async_array._get_selection( + indexer=indexer, out=out, fields=fields, prototype=prototype + ) + ) def set_orthogonal_selection( - self, selection: OrthogonalSelection, value: NDArrayLike, fields: Fields | None = None + self, + selection: OrthogonalSelection, + value: NDArrayLike, + *, + fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> None: indexer = OrthogonalIndexer(selection, self.shape, self.metadata.chunk_grid) - return sync(self._async_array._set_selection(indexer, value, fields=fields)) + return sync( + self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype) + ) def get_mask_selection( - self, mask: MaskSelection, out: NDBuffer | None = None, fields: Fields | None = None + self, + mask: MaskSelection, + *, + out: NDBuffer | None = None, + fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> NDArrayLike: indexer = MaskIndexer(mask, self.shape, self.metadata.chunk_grid) - return sync(self._async_array._get_selection(indexer=indexer, out=out, fields=fields)) + return sync( + self._async_array._get_selection( + indexer=indexer, out=out, fields=fields, prototype=prototype + ) + ) def set_mask_selection( - self, mask: MaskSelection, value: NDArrayLike, fields: Fields | None = None + self, + mask: MaskSelection, + value: NDArrayLike, + *, + fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> None: indexer = MaskIndexer(mask, self.shape, self.metadata.chunk_grid) - sync(self._async_array._set_selection(indexer, value, fields=fields)) + sync(self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype)) def get_coordinate_selection( self, selection: CoordinateSelection, + *, out: NDBuffer | None = None, fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> NDArrayLike: indexer = CoordinateIndexer(selection, self.shape, self.metadata.chunk_grid) - out_array = sync(self._async_array._get_selection(indexer=indexer, out=out, fields=fields)) + out_array = sync( + self._async_array._get_selection( + indexer=indexer, out=out, fields=fields, prototype=prototype + ) + ) # restore shape out_array = out_array.reshape(indexer.sel_shape) return out_array def set_coordinate_selection( - self, selection: CoordinateSelection, value: NDArrayLike, fields: Fields | None = None + self, + selection: CoordinateSelection, + value: NDArrayLike, + *, + fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> None: # setup indexer indexer = CoordinateIndexer(selection, self.shape, self.metadata.chunk_grid) @@ -792,25 +838,33 @@ def set_coordinate_selection( if hasattr(value, "shape") and len(value.shape) > 1: value = value.reshape(-1) - sync(self._async_array._set_selection(indexer, value, fields=fields)) + sync(self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype)) def get_block_selection( self, selection: BlockSelection, + *, out: NDBuffer | None = None, fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> NDArrayLike: indexer = BlockIndexer(selection, self.shape, self.metadata.chunk_grid) - return sync(self._async_array._get_selection(indexer=indexer, out=out, fields=fields)) + return sync( + self._async_array._get_selection( + indexer=indexer, out=out, fields=fields, prototype=prototype + ) + ) def set_block_selection( self, selection: BlockSelection, value: NDArrayLike, + *, fields: Fields | None = None, + prototype: BufferPrototype = default_buffer_prototype, ) -> None: indexer = BlockIndexer(selection, self.shape, self.metadata.chunk_grid) - sync(self._async_array._set_selection(indexer, value, fields=fields)) + sync(self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype)) @property def vindex(self) -> VIndex: diff --git a/src/zarr/array_spec.py b/src/zarr/array_spec.py new file mode 100644 index 0000000000..d5717944b4 --- /dev/null +++ b/src/zarr/array_spec.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any, Literal + +import numpy as np + +from zarr.buffer import BufferPrototype +from zarr.common import ChunkCoords, parse_dtype, parse_fill_value, parse_order, parse_shapelike + + +@dataclass(frozen=True) +class ArraySpec: + shape: ChunkCoords + dtype: np.dtype[Any] + fill_value: Any + order: Literal["C", "F"] + prototype: BufferPrototype + + def __init__( + self, + shape: ChunkCoords, + dtype: np.dtype[Any], + fill_value: Any, + order: Literal["C", "F"], + prototype: BufferPrototype, + ) -> None: + shape_parsed = parse_shapelike(shape) + dtype_parsed = parse_dtype(dtype) + fill_value_parsed = parse_fill_value(fill_value) + order_parsed = parse_order(order) + + object.__setattr__(self, "shape", shape_parsed) + object.__setattr__(self, "dtype", dtype_parsed) + object.__setattr__(self, "fill_value", fill_value_parsed) + object.__setattr__(self, "order", order_parsed) + object.__setattr__(self, "prototype", prototype) + + @property + def ndim(self) -> int: + return len(self.shape) diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index 138c7f66d2..1a34d9f290 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -6,6 +6,7 @@ TYPE_CHECKING, Any, Literal, + NamedTuple, Protocol, SupportsIndex, runtime_checkable, @@ -77,7 +78,7 @@ def copy(self) -> Self: ... def transpose(self, axes: SupportsIndex | Sequence[SupportsIndex] | None) -> Self: ... - def ravel(self, order: Literal["K", "A", "C", "F"] = "C") -> Self: ... + def ravel(self, order: Literal["K", "A", "C", "F"] = ...) -> Self: ... def all(self) -> bool: ... @@ -103,56 +104,6 @@ def check_item_key_is_1d_contiguous(key: Any) -> None: raise ValueError("slice must be contiguous") -class Factory: - class Create(Protocol): - def __call__( - self, - *, - shape: Iterable[int], - dtype: npt.DTypeLike, - order: Literal["C", "F"], - fill_value: Any | None, - ) -> NDBuffer: - """Factory function to create a new NDBuffer (or subclass) - - Callables implementing the `Factory.Create` protocol must create a new - instance of NDBuffer (or subclass) given the following parameters. - - Parameters - ---------- - shape - The shape of the new buffer - dtype - The datatype of each element in the new buffer - order - Whether to store multi-dimensional data in row-major (C-style) or - column-major (Fortran-style) order in memory. - fill_value - If not None, fill the new buffer with a scalar value. - - Return - ------ - A new NDBuffer or subclass instance - """ - - class NDArrayLike(Protocol): - def __call__(self, ndarray_like: NDArrayLike) -> NDBuffer: - """Factory function to coerce an array into a NDBuffer (or subclass) - - Callables implementing the `Factory.NDArrayLike` protocol must return - an instance of NDBuffer (or subclass) given an ndarray-like object. - - Parameters - ---------- - ndarray_like - ndarray-like object - - Return - ------ - A NDBuffer or subclass instance that represents `ndarray_like` - """ - - class Buffer: """A flat contiguous memory block @@ -185,8 +136,8 @@ def __init__(self, array_like: ArrayLike): def create_zero_length(cls) -> Self: """Create an empty buffer with length zero - Return - ------ + Returns + ------- New empty 0-length buffer """ return cls(np.array([], dtype="b")) @@ -200,8 +151,8 @@ def from_array_like(cls, array_like: ArrayLike) -> Self: array_like array-like object that must be 1-dim, contiguous, and byte dtype. - Return - ------ + Returns + ------- New buffer representing `array_like` """ return cls(array_like) @@ -215,46 +166,46 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self: bytes_like bytes-like object - Return - ------ + Returns + ------- New buffer representing `bytes_like` """ return cls.from_array_like(np.frombuffer(bytes_like, dtype="b")) def as_array_like(self) -> ArrayLike: - """Return the underlying array (host or device memory) of this buffer + """Returns the underlying array (host or device memory) of this buffer This will never copy data. - Return - ------ + Returns + ------- The underlying 1d array such as a NumPy or CuPy array. """ return self._data def as_numpy_array(self) -> npt.NDArray[Any]: - """Return the buffer as a NumPy array (host memory). + """Returns the buffer as a NumPy array (host memory). Warning ------- Might have to copy data, consider using `.as_array_like()` instead. - Return - ------ + Returns + ------- NumPy array of this buffer (might be a data copy) """ return np.asanyarray(self._data) def to_bytes(self) -> bytes: - """Return the buffer as `bytes` (host memory). + """Returns the buffer as `bytes` (host memory). Warning ------- Will always copy data, only use this method for small buffers such as metadata buffers. If possible, use `.as_numpy_array()` or `.as_array_like()` instead. - Return - ------ + Returns + ------- `bytes` of this buffer (data copy) """ return bytes(self.as_numpy_array()) @@ -333,8 +284,8 @@ def create( fill_value If not None, fill the new buffer with a scalar value. - Return - ------ + Returns + ------- New buffer representing a new ndarray_like object Developer Notes @@ -356,8 +307,8 @@ def from_ndarray_like(cls, ndarray_like: NDArrayLike) -> Self: ndarray_like ndarray-like object - Return - ------ + Returns + ------- New buffer representing `ndarray_like` """ return cls(ndarray_like) @@ -371,32 +322,32 @@ def from_numpy_array(cls, array_like: npt.ArrayLike) -> Self: array_like Object that can be coerced into a Numpy array - Return - ------ + Returns + ------- New buffer representing `array_like` """ return cls.from_ndarray_like(np.asanyarray(array_like)) def as_ndarray_like(self) -> NDArrayLike: - """Return the underlying array (host or device memory) of this buffer + """Returns the underlying array (host or device memory) of this buffer This will never copy data. - Return - ------ + Returns + ------- The underlying array such as a NumPy or CuPy array. """ return self._data def as_numpy_array(self) -> npt.NDArray[Any]: - """Return the buffer as a NumPy array (host memory). + """Returns the buffer as a NumPy array (host memory). Warning ------- Might have to copy data, consider using `.as_ndarray_like()` instead. - Return - ------ + Returns + ------- NumPy array of this buffer (might be a data copy) """ return np.asanyarray(self._data) @@ -457,7 +408,9 @@ def transpose(self, axes: SupportsIndex | Sequence[SupportsIndex] | None) -> Sel return self.__class__(self._data.transpose(axes)) -def as_numpy_array_wrapper(func: Callable[[npt.NDArray[Any]], bytes], buf: Buffer) -> Buffer: +def as_numpy_array_wrapper( + func: Callable[[npt.NDArray[Any]], bytes], buf: Buffer, prototype: BufferPrototype +) -> Buffer: """Converts the input of `func` to a numpy array and the output back to `Buffer`. This function is useful when calling a `func` that only support host memory such @@ -473,9 +426,32 @@ def as_numpy_array_wrapper(func: Callable[[npt.NDArray[Any]], bytes], buf: Buffe buf The buffer that will be converted to a Numpy array before given as input to `func`. + prototype + The prototype of the output buffer. + + Returns + ------- + The result of `func` converted to a `prototype.buffer` + """ + return prototype.buffer.from_bytes(func(buf.as_numpy_array())) + + +class BufferPrototype(NamedTuple): + """Prototype of the Buffer and NDBuffer class + + The protocol must be pickable. - Return - ------ - The result of `func` converted to a `Buffer` + Attributes + ---------- + buffer + The Buffer class to use when Zarr needs to create new Buffer. + nd_buffer + The NDBuffer class to use when Zarr needs to create new NDBuffer. """ - return Buffer.from_bytes(func(buf.as_numpy_array())) + + buffer: type[Buffer] + nd_buffer: type[NDBuffer] + + +# The default buffer prototype used throughout the Zarr codebase. +default_buffer_prototype = BufferPrototype(buffer=Buffer, nd_buffer=NDBuffer) diff --git a/src/zarr/codecs/_v2.py b/src/zarr/codecs/_v2.py index c4e4756094..c43a087a94 100644 --- a/src/zarr/codecs/_v2.py +++ b/src/zarr/codecs/_v2.py @@ -6,8 +6,9 @@ from numcodecs.compat import ensure_bytes, ensure_ndarray from zarr.abc.codec import ArrayArrayCodec, ArrayBytesCodec +from zarr.array_spec import ArraySpec from zarr.buffer import Buffer, NDBuffer -from zarr.common import JSON, ArraySpec, to_thread +from zarr.common import JSON, to_thread @dataclass(frozen=True) diff --git a/src/zarr/codecs/blosc.py b/src/zarr/codecs/blosc.py index acba698d94..e577d18fb2 100644 --- a/src/zarr/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -9,15 +9,14 @@ from numcodecs.blosc import Blosc from zarr.abc.codec import BytesBytesCodec +from zarr.array_spec import ArraySpec from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec -from zarr.common import parse_enum, parse_named_configuration, to_thread +from zarr.common import JSON, parse_enum, parse_named_configuration, to_thread if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import JSON, ArraySpec - class BloscShuffle(Enum): noshuffle = "noshuffle" @@ -161,19 +160,23 @@ def _blosc_codec(self) -> Blosc: async def _decode_single( self, chunk_bytes: Buffer, - _chunk_spec: ArraySpec, + chunk_spec: ArraySpec, ) -> Buffer: - return await to_thread(as_numpy_array_wrapper, self._blosc_codec.decode, chunk_bytes) + return await to_thread( + as_numpy_array_wrapper, self._blosc_codec.decode, chunk_bytes, chunk_spec.prototype + ) async def _encode_single( self, chunk_bytes: Buffer, chunk_spec: ArraySpec, ) -> Buffer | None: - # Since blosc only takes bytes, we convert the input and output of the encoding - # between bytes and Buffer + # Since blosc only support host memory, we convert the input and output of the encoding + # between numpy array and buffer return await to_thread( - lambda chunk: Buffer.from_bytes(self._blosc_codec.encode(chunk.as_array_like())), + lambda chunk: chunk_spec.prototype.buffer.from_bytes( + self._blosc_codec.encode(chunk.as_numpy_array()) + ), chunk_bytes, ) diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index f275ae37d1..0b9a5c089e 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -8,15 +8,14 @@ import numpy as np from zarr.abc.codec import ArrayBytesCodec +from zarr.array_spec import ArraySpec from zarr.buffer import Buffer, NDArrayLike, NDBuffer from zarr.codecs.registry import register_codec -from zarr.common import parse_enum, parse_named_configuration +from zarr.common import JSON, parse_enum, parse_named_configuration if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import JSON, ArraySpec - class Endian(Enum): big = "big" @@ -81,7 +80,9 @@ async def _decode_single( as_nd_array_like = as_array_like else: as_nd_array_like = np.asanyarray(as_array_like) - chunk_array = NDBuffer.from_ndarray_like(as_nd_array_like.view(dtype=dtype)) + chunk_array = chunk_spec.prototype.nd_buffer.from_ndarray_like( + as_nd_array_like.view(dtype=dtype) + ) # ensure correct chunk shape if chunk_array.shape != chunk_spec.shape: @@ -93,7 +94,7 @@ async def _decode_single( async def _encode_single( self, chunk_array: NDBuffer, - _chunk_spec: ArraySpec, + chunk_spec: ArraySpec, ) -> Buffer | None: assert isinstance(chunk_array, NDBuffer) if chunk_array.dtype.itemsize > 1: @@ -103,10 +104,10 @@ async def _encode_single( new_dtype = chunk_array.dtype.newbyteorder(self.endian.name) # type: ignore[arg-type] chunk_array = chunk_array.astype(new_dtype) - as_nd_array_like = chunk_array.as_ndarray_like() - # Flatten the nd-array (only copy if needed) - as_nd_array_like = as_nd_array_like.ravel().view(dtype="b") - return Buffer.from_array_like(as_nd_array_like) + nd_array = chunk_array.as_ndarray_like() + # Flatten the nd-array (only copy if needed) and reinterpret as bytes + nd_array = nd_array.ravel().view(dtype="b") + return chunk_spec.prototype.buffer.from_array_like(nd_array) def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int: return input_byte_length diff --git a/src/zarr/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py index 724b785d67..b670b25429 100644 --- a/src/zarr/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -7,15 +7,14 @@ from crc32c import crc32c from zarr.abc.codec import BytesBytesCodec +from zarr.array_spec import ArraySpec from zarr.buffer import Buffer from zarr.codecs.registry import register_codec -from zarr.common import parse_named_configuration +from zarr.common import JSON, parse_named_configuration if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import JSON, ArraySpec - @dataclass(frozen=True) class Crc32cCodec(BytesBytesCodec): @@ -32,7 +31,7 @@ def to_dict(self) -> dict[str, JSON]: async def _decode_single( self, chunk_bytes: Buffer, - _chunk_spec: ArraySpec, + chunk_spec: ArraySpec, ) -> Buffer: data = chunk_bytes.as_numpy_array() crc32_bytes = data[-4:] @@ -44,18 +43,18 @@ async def _decode_single( raise ValueError( f"Stored and computed checksum do not match. Stored: {stored_checksum!r}. Computed: {computed_checksum!r}." ) - return Buffer.from_array_like(inner_bytes) + return chunk_spec.prototype.buffer.from_array_like(inner_bytes) async def _encode_single( self, chunk_bytes: Buffer, - _chunk_spec: ArraySpec, + chunk_spec: ArraySpec, ) -> Buffer | None: data = chunk_bytes.as_numpy_array() # Calculate the checksum and "cast" it to a numpy array checksum = np.array([crc32c(data)], dtype=np.uint32) # Append the checksum (as bytes) to the data - return Buffer.from_array_like(np.append(data, checksum.view("b"))) + return chunk_spec.prototype.buffer.from_array_like(np.append(data, checksum.view("b"))) def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int: return input_byte_length + 4 diff --git a/src/zarr/codecs/gzip.py b/src/zarr/codecs/gzip.py index 6a8aaf08bb..0ad97c1207 100644 --- a/src/zarr/codecs/gzip.py +++ b/src/zarr/codecs/gzip.py @@ -6,15 +6,14 @@ from numcodecs.gzip import GZip from zarr.abc.codec import BytesBytesCodec +from zarr.array_spec import ArraySpec from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec -from zarr.common import parse_named_configuration, to_thread +from zarr.common import JSON, parse_named_configuration, to_thread if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import JSON, ArraySpec - def parse_gzip_level(data: JSON) -> int: if not isinstance(data, (int)): @@ -48,16 +47,20 @@ def to_dict(self) -> dict[str, JSON]: async def _decode_single( self, chunk_bytes: Buffer, - _chunk_spec: ArraySpec, + chunk_spec: ArraySpec, ) -> Buffer: - return await to_thread(as_numpy_array_wrapper, GZip(self.level).decode, chunk_bytes) + return await to_thread( + as_numpy_array_wrapper, GZip(self.level).decode, chunk_bytes, chunk_spec.prototype + ) async def _encode_single( self, chunk_bytes: Buffer, - _chunk_spec: ArraySpec, + chunk_spec: ArraySpec, ) -> Buffer | None: - return await to_thread(as_numpy_array_wrapper, GZip(self.level).encode, chunk_bytes) + return await to_thread( + as_numpy_array_wrapper, GZip(self.level).encode, chunk_bytes, chunk_spec.prototype + ) def compute_encoded_size( self, diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index ada4ae23f9..acef311a8c 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -16,7 +16,7 @@ CodecPipeline, ) from zarr.abc.store import ByteGetter, ByteSetter -from zarr.buffer import Buffer, NDBuffer +from zarr.buffer import Buffer, BufferPrototype, NDBuffer from zarr.codecs.registry import get_codec_class from zarr.common import JSON, concurrent_map, parse_named_configuration from zarr.config import config @@ -26,7 +26,7 @@ if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import ArraySpec + from zarr.array_spec import ArraySpec T = TypeVar("T") U = TypeVar("U") @@ -310,8 +310,11 @@ async def read_batch( out[out_selection] = chunk_spec.fill_value else: chunk_bytes_batch = await concurrent_map( - [(byte_getter,) for byte_getter, _, _, _ in batch_info], - lambda byte_getter: byte_getter.get(), + [ + (byte_getter, array_spec.prototype) + for byte_getter, array_spec, _, _ in batch_info + ], + lambda byte_getter, prototype: byte_getter.get(prototype), config.get("async.concurrency"), ) chunk_array_batch = await self.decode_batch( @@ -345,7 +348,7 @@ def _merge_chunk_array( if is_total_slice(chunk_selection, chunk_spec.shape) and value.shape == chunk_spec.shape: return value if existing_chunk_array is None: - chunk_array = NDBuffer.create( + chunk_array = chunk_spec.prototype.nd_buffer.create( shape=chunk_spec.shape, dtype=chunk_spec.dtype, order=chunk_spec.order, @@ -387,15 +390,20 @@ async def write_batch( else: # Read existing bytes if not total slice - async def _read_key(byte_setter: ByteSetter | None) -> Buffer | None: + async def _read_key( + byte_setter: ByteSetter | None, prototype: BufferPrototype + ) -> Buffer | None: if byte_setter is None: return None - return await byte_setter.get() + return await byte_setter.get(prototype=prototype) chunk_bytes_batch: Iterable[Buffer | None] chunk_bytes_batch = await concurrent_map( [ - (None if is_total_slice(chunk_selection, chunk_spec.shape) else byte_setter,) + ( + None if is_total_slice(chunk_selection, chunk_spec.shape) else byte_setter, + chunk_spec.prototype, + ) for byte_setter, chunk_spec, chunk_selection, _ in batch_info ], _read_key, diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index dab2810f35..74ad5ac44f 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -18,14 +18,14 @@ CodecPipeline, ) from zarr.abc.store import ByteGetter, ByteSetter -from zarr.buffer import Buffer, NDBuffer +from zarr.array_spec import ArraySpec +from zarr.buffer import Buffer, BufferPrototype, NDBuffer, default_buffer_prototype from zarr.chunk_grids import RegularChunkGrid from zarr.codecs.bytes import BytesCodec from zarr.codecs.crc32c_ import Crc32cCodec from zarr.codecs.pipeline import BatchedCodecPipeline from zarr.codecs.registry import register_codec from zarr.common import ( - ArraySpec, ChunkCoords, ChunkCoordsLike, parse_enum, @@ -62,8 +62,13 @@ class _ShardingByteGetter(ByteGetter): shard_dict: ShardMapping chunk_coords: ChunkCoords - async def get(self, byte_range: tuple[int, int | None] | None = None) -> Buffer | None: + async def get( + self, prototype: BufferPrototype, byte_range: tuple[int, int | None] | None = None + ) -> Buffer | None: assert byte_range is None, "byte_range is not supported within shards" + assert ( + prototype is default_buffer_prototype + ), "prototype is not supported within shards currently" return self.shard_dict.get(self.chunk_coords) @@ -391,7 +396,7 @@ async def _decode_single( ) # setup output array - out = NDBuffer.create( + out = chunk_spec.prototype.nd_buffer.create( shape=shard_shape, dtype=shard_spec.dtype, order=shard_spec.order, fill_value=0 ) shard_dict = await _ShardReader.from_bytes(shard_bytes, self, chunks_per_shard) @@ -434,7 +439,7 @@ async def _decode_partial_single( ) # setup output array - out = NDBuffer.create( + out = shard_spec.prototype.nd_buffer.create( shape=indexer.shape, dtype=shard_spec.dtype, order=shard_spec.order, fill_value=0 ) @@ -445,7 +450,11 @@ async def _decode_partial_single( shard_dict: ShardMapping = {} if self._is_total_shard(all_chunk_coords, chunks_per_shard): # read entire shard - shard_dict_maybe = await self._load_full_shard_maybe(byte_getter, chunks_per_shard) + shard_dict_maybe = await self._load_full_shard_maybe( + byte_getter=byte_getter, + prototype=chunk_spec.prototype, + chunks_per_shard=chunks_per_shard, + ) if shard_dict_maybe is None: return None shard_dict = shard_dict_maybe @@ -458,7 +467,9 @@ async def _decode_partial_single( for chunk_coords in all_chunk_coords: chunk_byte_slice = shard_index.get_chunk_slice(chunk_coords) if chunk_byte_slice: - chunk_bytes = await byte_getter.get(chunk_byte_slice) + chunk_bytes = await byte_getter.get( + prototype=chunk_spec.prototype, byte_range=chunk_byte_slice + ) if chunk_bytes: shard_dict[chunk_coords] = chunk_bytes @@ -525,7 +536,11 @@ async def _encode_partial_single( chunk_spec = self._get_chunk_spec(shard_spec) shard_dict = _MergingShardBuilder( - await self._load_full_shard_maybe(byte_setter, chunks_per_shard) + await self._load_full_shard_maybe( + byte_getter=byte_setter, + prototype=chunk_spec.prototype, + chunks_per_shard=chunks_per_shard, + ) or _ShardReader.create_empty(chunks_per_shard), _ShardBuilder.create_empty(chunks_per_shard), ) @@ -607,6 +622,7 @@ def _get_index_chunk_spec(self, chunks_per_shard: ChunkCoords) -> ArraySpec: dtype=np.dtype(" ArraySpec: @@ -615,6 +631,7 @@ def _get_chunk_spec(self, shard_spec: ArraySpec) -> ArraySpec: dtype=shard_spec.dtype, fill_value=shard_spec.fill_value, order=shard_spec.order, + prototype=shard_spec.prototype, ) def _get_chunks_per_shard(self, shard_spec: ArraySpec) -> ChunkCoords: @@ -632,9 +649,13 @@ async def _load_shard_index_maybe( ) -> _ShardIndex | None: shard_index_size = self._shard_index_size(chunks_per_shard) if self.index_location == ShardingCodecIndexLocation.start: - index_bytes = await byte_getter.get((0, shard_index_size)) + index_bytes = await byte_getter.get( + prototype=default_buffer_prototype, byte_range=(0, shard_index_size) + ) else: - index_bytes = await byte_getter.get((-shard_index_size, None)) + index_bytes = await byte_getter.get( + prototype=default_buffer_prototype, byte_range=(-shard_index_size, None) + ) if index_bytes is not None: return await self._decode_shard_index(index_bytes, chunks_per_shard) return None @@ -647,9 +668,9 @@ async def _load_shard_index( ) or _ShardIndex.create_empty(chunks_per_shard) async def _load_full_shard_maybe( - self, byte_getter: ByteGetter, chunks_per_shard: ChunkCoords + self, byte_getter: ByteGetter, prototype: BufferPrototype, chunks_per_shard: ChunkCoords ) -> _ShardReader | None: - shard_bytes = await byte_getter.get() + shard_bytes = await byte_getter.get(prototype=prototype) return ( await _ShardReader.from_bytes(shard_bytes, self, chunks_per_shard) diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index 9fcee4e66b..33dab21fb6 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -7,9 +7,10 @@ import numpy as np from zarr.abc.codec import ArrayArrayCodec +from zarr.array_spec import ArraySpec from zarr.buffer import NDBuffer from zarr.codecs.registry import register_codec -from zarr.common import JSON, ArraySpec, ChunkCoordsLike, parse_named_configuration +from zarr.common import JSON, ChunkCoordsLike, parse_named_configuration if TYPE_CHECKING: from typing import TYPE_CHECKING @@ -64,13 +65,12 @@ def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: return self def resolve_metadata(self, chunk_spec: ArraySpec) -> ArraySpec: - from zarr.common import ArraySpec - return ArraySpec( shape=tuple(chunk_spec.shape[self.order[i]] for i in range(chunk_spec.ndim)), dtype=chunk_spec.dtype, fill_value=chunk_spec.fill_value, order=chunk_spec.order, + prototype=chunk_spec.prototype, ) async def _decode_single( @@ -85,7 +85,7 @@ async def _decode_single( async def _encode_single( self, chunk_array: NDBuffer, - chunk_spec: ArraySpec, + _chunk_spec: ArraySpec, ) -> NDBuffer | None: chunk_array = chunk_array.transpose(self.order) return chunk_array diff --git a/src/zarr/codecs/zstd.py b/src/zarr/codecs/zstd.py index 451fae8b37..4c5afba00b 100644 --- a/src/zarr/codecs/zstd.py +++ b/src/zarr/codecs/zstd.py @@ -7,15 +7,14 @@ from zstandard import ZstdCompressor, ZstdDecompressor from zarr.abc.codec import BytesBytesCodec +from zarr.array_spec import ArraySpec from zarr.buffer import Buffer, as_numpy_array_wrapper from zarr.codecs.registry import register_codec -from zarr.common import parse_named_configuration, to_thread +from zarr.common import JSON, parse_named_configuration, to_thread if TYPE_CHECKING: from typing_extensions import Self - from zarr.common import JSON, ArraySpec - def parse_zstd_level(data: JSON) -> int: if isinstance(data, int): @@ -64,16 +63,20 @@ def _decompress(self, data: npt.NDArray[Any]) -> bytes: async def _decode_single( self, chunk_bytes: Buffer, - _chunk_spec: ArraySpec, + chunk_spec: ArraySpec, ) -> Buffer: - return await to_thread(as_numpy_array_wrapper, self._decompress, chunk_bytes) + return await to_thread( + as_numpy_array_wrapper, self._decompress, chunk_bytes, chunk_spec.prototype + ) async def _encode_single( self, chunk_bytes: Buffer, - _chunk_spec: ArraySpec, + chunk_spec: ArraySpec, ) -> Buffer | None: - return await to_thread(as_numpy_array_wrapper, self._compress, chunk_bytes) + return await to_thread( + as_numpy_array_wrapper, self._compress, chunk_bytes, chunk_spec.prototype + ) def compute_encoded_size(self, _input_byte_length: int, _chunk_spec: ArraySpec) -> int: raise NotImplementedError diff --git a/src/zarr/common.py b/src/zarr/common.py index ec5d870f92..bca9f171af 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -5,7 +5,6 @@ import functools import operator from collections.abc import Iterable -from dataclasses import dataclass from enum import Enum from typing import ( TYPE_CHECKING, @@ -91,31 +90,6 @@ def parse_enum(data: JSON, cls: type[E]) -> E: raise ValueError(f"Value must be one of {list(enum_names(cls))!r}. Got {data} instead.") -@dataclass(frozen=True) -class ArraySpec: - shape: ChunkCoords - dtype: np.dtype[Any] - fill_value: Any - order: Literal["C", "F"] - - def __init__( - self, shape: ChunkCoords, dtype: np.dtype[Any], fill_value: Any, order: Literal["C", "F"] - ) -> None: - shape_parsed = parse_shapelike(shape) - dtype_parsed = parse_dtype(dtype) - fill_value_parsed = parse_fill_value(fill_value) - order_parsed = parse_order(order) - - object.__setattr__(self, "shape", shape_parsed) - object.__setattr__(self, "dtype", dtype_parsed) - object.__setattr__(self, "fill_value", fill_value_parsed) - object.__setattr__(self, "order", order_parsed) - - @property - def ndim(self) -> int: - return len(self.shape) - - def parse_name(data: JSON, expected: str | None = None) -> str: if isinstance(data, str): if expected is None or data == expected: diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index ca8cf1cdd2..bcb70bd4b2 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -5,31 +5,29 @@ from collections.abc import Iterable from dataclasses import dataclass, field, replace from enum import Enum -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Literal import numpy as np import numpy.typing as npt from zarr.abc.codec import Codec, CodecPipeline from zarr.abc.metadata import Metadata -from zarr.buffer import Buffer +from zarr.buffer import Buffer, BufferPrototype, default_buffer_prototype from zarr.chunk_grids import ChunkGrid, RegularChunkGrid from zarr.chunk_key_encodings import ChunkKeyEncoding, parse_separator from zarr.codecs._v2 import V2Compressor, V2Filters if TYPE_CHECKING: - from typing import Literal - from typing_extensions import Self import numcodecs.abc +from zarr.array_spec import ArraySpec from zarr.common import ( JSON, ZARR_JSON, ZARRAY_JSON, ZATTRS_JSON, - ArraySpec, ChunkCoords, parse_dtype, parse_fill_value, @@ -137,7 +135,9 @@ def codec_pipeline(self) -> CodecPipeline: pass @abstractmethod - def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) -> ArraySpec: + def get_chunk_spec( + self, _chunk_coords: ChunkCoords, order: Literal["C", "F"], prototype: BufferPrototype + ) -> ArraySpec: pass @abstractmethod @@ -198,6 +198,7 @@ def __init__( dtype=data_type_parsed, fill_value=fill_value_parsed, order="C", # TODO: order is not needed here. + prototype=default_buffer_prototype, # TODO: prototype is not needed here. ) codecs_parsed = parse_codecs(codecs).evolve_from_array_spec(array_spec) @@ -239,7 +240,9 @@ def ndim(self) -> int: def codec_pipeline(self) -> CodecPipeline: return self.codecs - def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) -> ArraySpec: + def get_chunk_spec( + self, _chunk_coords: ChunkCoords, order: Literal["C", "F"], prototype: BufferPrototype + ) -> ArraySpec: assert isinstance( self.chunk_grid, RegularChunkGrid ), "Currently, only regular chunk grid is supported" @@ -248,6 +251,7 @@ def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) - dtype=self.dtype, fill_value=self.fill_value, order=order, + prototype=prototype, ) def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: @@ -412,12 +416,15 @@ def to_dict(self) -> JSON: return zarray_dict - def get_chunk_spec(self, _chunk_coords: ChunkCoords, order: Literal["C", "F"]) -> ArraySpec: + def get_chunk_spec( + self, _chunk_coords: ChunkCoords, order: Literal["C", "F"], prototype: BufferPrototype + ) -> ArraySpec: return ArraySpec( shape=self.chunk_grid.chunk_shape, dtype=self.dtype, fill_value=self.fill_value, order=order, + prototype=prototype, ) def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index abb08291df..70c39db1b7 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -4,7 +4,7 @@ from typing import Any from zarr.abc.store import Store -from zarr.buffer import Buffer +from zarr.buffer import Buffer, BufferPrototype, default_buffer_prototype from zarr.common import OpenMode from zarr.store.local import LocalStore @@ -26,8 +26,12 @@ def __init__(self, store: Store, path: str | None = None): self.store = store self.path = path or "" - async def get(self, byte_range: tuple[int, int | None] | None = None) -> Buffer | None: - return await self.store.get(self.path, byte_range) + async def get( + self, + prototype: BufferPrototype = default_buffer_prototype, + byte_range: tuple[int, int | None] | None = None, + ) -> Buffer | None: + return await self.store.get(self.path, prototype=prototype, byte_range=byte_range) async def set(self, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: if byte_range is not None: diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index 945c6160ad..9238700445 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -6,11 +6,13 @@ from pathlib import Path from zarr.abc.store import Store -from zarr.buffer import Buffer +from zarr.buffer import Buffer, BufferPrototype from zarr.common import OpenMode, concurrent_map, to_thread -def _get(path: Path, byte_range: tuple[int | None, int | None] | None) -> Buffer: +def _get( + path: Path, prototype: BufferPrototype, byte_range: tuple[int | None, int | None] | None +) -> Buffer: """ Fetch a contiguous region of bytes from a file. @@ -32,7 +34,7 @@ def _get(path: Path, byte_range: tuple[int | None, int | None] | None) -> Buffer end = (start + byte_range[1]) if byte_range[1] is not None else None else: - return Buffer.from_bytes(path.read_bytes()) + return prototype.buffer.from_bytes(path.read_bytes()) with path.open("rb") as f: size = f.seek(0, io.SEEK_END) if start is not None: @@ -43,8 +45,8 @@ def _get(path: Path, byte_range: tuple[int | None, int | None] | None) -> Buffer if end is not None: if end < 0: end = size + end - return Buffer.from_bytes(f.read(end - f.tell())) - return Buffer.from_bytes(f.read()) + return prototype.buffer.from_bytes(f.read(end - f.tell())) + return prototype.buffer.from_bytes(f.read()) def _put( @@ -87,18 +89,23 @@ def __eq__(self, other: object) -> bool: return isinstance(other, type(self)) and self.root == other.root async def get( - self, key: str, byte_range: tuple[int | None, int | None] | None = None + self, + key: str, + prototype: BufferPrototype, + byte_range: tuple[int | None, int | None] | None = None, ) -> Buffer | None: assert isinstance(key, str) path = self.root / key try: - return await to_thread(_get, path, byte_range) + return await to_thread(_get, path, prototype, byte_range) except (FileNotFoundError, IsADirectoryError, NotADirectoryError): return None async def get_partial_values( - self, key_ranges: list[tuple[str, tuple[int | None, int | None]]] + self, + prototype: BufferPrototype, + key_ranges: list[tuple[str, tuple[int | None, int | None]]], ) -> list[Buffer | None]: """ Read byte ranges from multiple keys. @@ -114,15 +121,12 @@ async def get_partial_values( for key, byte_range in key_ranges: assert isinstance(key, str) path = self.root / key - args.append((_get, path, byte_range)) + args.append((_get, path, prototype, byte_range)) return await concurrent_map(args, to_thread, limit=None) # TODO: fix limit async def set(self, key: str, value: Buffer) -> None: self._check_writable() assert isinstance(key, str) - if isinstance(value, bytes | bytearray): # type:ignore[unreachable] - # TODO: to support the v2 tests, we convert bytes to Buffer here - value = Buffer.from_bytes(value) # type:ignore[unreachable] if not isinstance(value, Buffer): raise TypeError("LocalStore.set(): `value` must a Buffer instance") path = self.root / key diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index fd6fadd3ee..d75e8c348c 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -3,7 +3,7 @@ from collections.abc import AsyncGenerator, MutableMapping from zarr.abc.store import Store -from zarr.buffer import Buffer +from zarr.buffer import Buffer, BufferPrototype from zarr.common import OpenMode, concurrent_map from zarr.store.core import _normalize_interval_index @@ -30,7 +30,10 @@ def __repr__(self) -> str: return f"MemoryStore({str(self)!r})" async def get( - self, key: str, byte_range: tuple[int | None, int | None] | None = None + self, + key: str, + prototype: BufferPrototype, + byte_range: tuple[int | None, int | None] | None = None, ) -> Buffer | None: assert isinstance(key, str) try: @@ -41,9 +44,15 @@ async def get( return None async def get_partial_values( - self, key_ranges: list[tuple[str, tuple[int | None, int | None]]] + self, + prototype: BufferPrototype, + key_ranges: list[tuple[str, tuple[int | None, int | None]]], ) -> list[Buffer | None]: - vals = await concurrent_map(key_ranges, self.get, limit=None) + # All the key-ranges arguments goes with the same prototype + async def _get(key: str, byte_range: tuple[int, int | None]) -> Buffer | None: + return await self.get(key, prototype=prototype, byte_range=byte_range) + + vals = await concurrent_map(key_ranges, _get, limit=None) return vals async def exists(self, key: str) -> bool: @@ -52,9 +61,6 @@ async def exists(self, key: str) -> bool: async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: self._check_writable() assert isinstance(key, str) - if isinstance(value, bytes | bytearray): # type:ignore[unreachable] - # TODO: to support the v2 tests, we convert bytes to Buffer here - value = Buffer.from_bytes(value) # type:ignore[unreachable] if not isinstance(value, Buffer): raise TypeError(f"Expected Buffer. Got {type(value)}.") diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index 60217fb72c..3eb057f9b8 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Any from zarr.abc.store import Store -from zarr.buffer import Buffer +from zarr.buffer import Buffer, BufferPrototype from zarr.common import OpenMode from zarr.store.core import _dereference_path @@ -55,7 +55,10 @@ def _make_fs(self) -> tuple[AsyncFileSystem, str]: return fs, root async def get( - self, key: str, byte_range: tuple[int | None, int | None] | None = None + self, + key: str, + prototype: BufferPrototype, + byte_range: tuple[int | None, int | None] | None = None, ) -> Buffer | None: assert isinstance(key, str) fs, root = self._make_fs() diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py index cb4dc9f7b5..5929f47049 100644 --- a/src/zarr/testing/store.py +++ b/src/zarr/testing/store.py @@ -3,7 +3,7 @@ import pytest from zarr.abc.store import Store -from zarr.buffer import Buffer +from zarr.buffer import Buffer, default_buffer_prototype from zarr.store.core import _normalize_interval_index from zarr.testing.utils import assert_bytes_equal @@ -91,7 +91,7 @@ async def test_get( """ data_buf = Buffer.from_bytes(data) self.set(store, key, data_buf) - observed = await store.get(key, byte_range=byte_range) + observed = await store.get(key, prototype=default_buffer_prototype, byte_range=byte_range) start, length = _normalize_interval_index(data_buf, interval=byte_range) expected = data_buf[start : start + length] assert_bytes_equal(observed, expected) @@ -125,7 +125,9 @@ async def test_get_partial_values( self.set(store, key, Buffer.from_bytes(bytes(key, encoding="utf-8"))) # read back just part of it - observed_maybe = await store.get_partial_values(key_ranges=key_ranges) + observed_maybe = await store.get_partial_values( + prototype=default_buffer_prototype, key_ranges=key_ranges + ) observed: list[Buffer] = [] expected: list[Buffer] = [] @@ -136,7 +138,7 @@ async def test_get_partial_values( for idx in range(len(observed)): key, byte_range = key_ranges[idx] - result = await store.get(key, byte_range=byte_range) + result = await store.get(key, prototype=default_buffer_prototype, byte_range=byte_range) assert result is not None expected.append(result) diff --git a/tests/v3/package_with_entrypoint/__init__.py b/tests/v3/package_with_entrypoint/__init__.py index b8bf903c01..6368e5b236 100644 --- a/tests/v3/package_with_entrypoint/__init__.py +++ b/tests/v3/package_with_entrypoint/__init__.py @@ -1,7 +1,8 @@ from numpy import ndarray from zarr.abc.codec import ArrayBytesCodec -from zarr.common import ArraySpec, BytesLike +from zarr.array_spec import ArraySpec +from zarr.common import BytesLike class TestCodec(ArrayBytesCodec): diff --git a/tests/v3/test_buffer.py b/tests/v3/test_buffer.py index 2f58d116fe..e814afef15 100644 --- a/tests/v3/test_buffer.py +++ b/tests/v3/test_buffer.py @@ -8,7 +8,15 @@ import pytest from zarr.array import AsyncArray -from zarr.buffer import ArrayLike, NDArrayLike, NDBuffer +from zarr.buffer import ArrayLike, Buffer, BufferPrototype, NDArrayLike, NDBuffer +from zarr.codecs.blosc import BloscCodec +from zarr.codecs.bytes import BytesCodec +from zarr.codecs.crc32c_ import Crc32cCodec +from zarr.codecs.gzip import GzipCodec +from zarr.codecs.transpose import TransposeCodec +from zarr.codecs.zstd import ZstdCodec +from zarr.store.core import StorePath +from zarr.store.memory import MemoryStore if TYPE_CHECKING: from typing_extensions import Self @@ -17,7 +25,9 @@ class MyNDArrayLike(np.ndarray): """An example of a ndarray-like class""" - pass + +class MyBuffer(Buffer): + """Example of a custom Buffer that handles ArrayLike""" class MyNDBuffer(NDBuffer): @@ -39,6 +49,28 @@ def create( return ret +class MyStore(MemoryStore): + """Example of a custom Store that expect MyBuffer for all its non-metadata + + We assume that keys containing "json" is metadata + """ + + async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: + if "json" not in key: + assert isinstance(value, MyBuffer) + await super().set(key, value, byte_range) + + async def get( + self, + key: str, + prototype: BufferPrototype, + byte_range: tuple[int, int | None] | None = None, + ) -> Buffer | None: + if "json" not in key: + assert prototype.buffer is MyBuffer + return await super().get(key, byte_range) + + def test_nd_array_like(xp): ary = xp.arange(10) assert isinstance(ary, ArrayLike) @@ -46,10 +78,12 @@ def test_nd_array_like(xp): @pytest.mark.asyncio -async def test_async_array_factory(store_path): +async def test_async_array_prototype(): + """Test the use of a custom buffer prototype""" + expect = np.zeros((9, 9), dtype="uint16", order="F") a = await AsyncArray.create( - store_path, + StorePath(MyStore(mode="w")) / "test_async_array_prototype", shape=expect.shape, chunk_shape=(5, 5), dtype=expect.dtype, @@ -57,11 +91,45 @@ async def test_async_array_factory(store_path): ) expect[1:4, 3:6] = np.ones((3, 3)) + my_prototype = BufferPrototype(buffer=MyBuffer, nd_buffer=MyNDBuffer) + await a.setitem( selection=(slice(1, 4), slice(3, 6)), value=np.ones((3, 3)), - factory=MyNDBuffer.from_ndarray_like, + prototype=my_prototype, + ) + got = await a.getitem(selection=(slice(0, 9), slice(0, 9)), prototype=my_prototype) + assert isinstance(got, MyNDArrayLike) + assert np.array_equal(expect, got) + + +@pytest.mark.asyncio +async def test_codecs_use_of_prototype(): + expect = np.zeros((10, 10), dtype="uint16", order="F") + a = await AsyncArray.create( + StorePath(MyStore(mode="w")) / "test_codecs_use_of_prototype", + shape=expect.shape, + chunk_shape=(5, 5), + dtype=expect.dtype, + fill_value=0, + codecs=[ + TransposeCodec(order=(1, 0)), + BytesCodec(), + BloscCodec(), + Crc32cCodec(), + GzipCodec(), + ZstdCodec(), + ], + ) + expect[:] = np.arange(100).reshape(10, 10) + + my_prototype = BufferPrototype(buffer=MyBuffer, nd_buffer=MyNDBuffer) + + await a.setitem( + selection=(slice(0, 10), slice(0, 10)), + value=expect[:], + prototype=my_prototype, ) - got = await a.getitem(selection=(slice(0, 9), slice(0, 9)), factory=MyNDBuffer.create) + got = await a.getitem(selection=(slice(0, 10), slice(0, 10)), prototype=my_prototype) assert isinstance(got, MyNDArrayLike) assert np.array_equal(expect, got) diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index c529e2491f..e11af748b3 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -49,7 +49,7 @@ def test_group_children(store: MemoryStore | LocalStore) -> None: # add an extra object under a directory-like prefix in the domain of the group. # this creates a directory with a random key in it # this should not show up as a member - sync(store.set(f"{path}/extra_directory/extra_object-2", b"000000")) + sync(store.set(f"{path}/extra_directory/extra_object-2", Buffer.from_bytes(b"000000"))) members_observed = group.members # members are not guaranteed to be ordered, so sort before comparing assert sorted(dict(members_observed)) == sorted(members_expected) diff --git a/tests/v3/test_indexing.py b/tests/v3/test_indexing.py index 9ce485945b..00ea947b49 100644 --- a/tests/v3/test_indexing.py +++ b/tests/v3/test_indexing.py @@ -12,7 +12,7 @@ import zarr from zarr.abc.store import Store -from zarr.buffer import NDBuffer +from zarr.buffer import BufferPrototype, NDBuffer from zarr.common import ChunkCoords from zarr.indexing import ( make_slice_selection, @@ -51,10 +51,10 @@ def __init__(self): super().__init__(mode="w") self.counter = Counter() - async def get(self, key, byte_range=None): + async def get(self, key, prototype: BufferPrototype, byte_range=None): key_suffix = "/".join(key.split("/")[1:]) self.counter["__getitem__", key_suffix] += 1 - return await super().get(key, byte_range) + return await super().get(key, prototype, byte_range) async def set(self, key, value, byte_range=None): key_suffix = "/".join(key.split("/")[1:]) @@ -225,7 +225,6 @@ def test_get_basic_selection_0d(store: StorePath): def _test_get_basic_selection(a, z, selection): - print(a, z, selection) expect = a[selection] actual = z.get_basic_selection(selection) assert_array_equal(expect, actual) From ee30347020d7ad6335a6d1cdf55c61c6160bbfa7 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Fri, 7 Jun 2024 08:50:47 -0700 Subject: [PATCH 0571/1078] Feature: Top level V3 API (#1884) * feature(api): add top level synchronous and asynchronous api * add group/open_group * minor doc improvement * sync with v3 branch * fix mypy errors * progress integrating store mode * basic tests are passing * docs and missing store utils file * fix parse shapelike test * fix bad merge * respond to reviews --- src/zarr/__init__.py | 88 +++- src/zarr/api/__init__.py | 0 src/zarr/api/asynchronous.py | 929 +++++++++++++++++++++++++++++++++++ src/zarr/api/synchronous.py | 273 ++++++++++ src/zarr/array.py | 8 + src/zarr/common.py | 5 +- src/zarr/convenience.py | 35 ++ src/zarr/creation.py | 37 ++ src/zarr/group.py | 4 +- src/zarr/metadata.py | 4 +- src/zarr/store/__init__.py | 5 +- src/zarr/store/core.py | 34 +- src/zarr/store/memory.py | 2 +- src/zarr/store/utils.py | 25 + src/zarr/testing/store.py | 2 +- tests/v3/test_api.py | 778 +++++++++++++++++++++++++++++ tests/v3/test_common.py | 2 +- 17 files changed, 2167 insertions(+), 64 deletions(-) create mode 100644 src/zarr/api/__init__.py create mode 100644 src/zarr/api/asynchronous.py create mode 100644 src/zarr/api/synchronous.py create mode 100644 src/zarr/convenience.py create mode 100644 src/zarr/creation.py create mode 100644 src/zarr/store/utils.py create mode 100644 tests/v3/test_api.py diff --git a/src/zarr/__init__.py b/src/zarr/__init__.py index fdab564c64..227b0cf63e 100644 --- a/src/zarr/__init__.py +++ b/src/zarr/__init__.py @@ -1,34 +1,68 @@ -from __future__ import annotations - -import zarr.codecs # noqa: F401 from zarr._version import version as __version__ +from zarr.api.synchronous import ( + array, + consolidate_metadata, + copy, + copy_all, + copy_store, + create, + empty, + empty_like, + full, + full_like, + group, + load, + ones, + ones_like, + open, + open_array, + open_consolidated, + open_group, + open_like, + save, + save_array, + save_group, + tree, + zeros, + zeros_like, +) from zarr.array import Array, AsyncArray -from zarr.config import config # noqa: F401 +from zarr.config import config from zarr.group import AsyncGroup, Group -from zarr.store import ( - StoreLike, - make_store_path, -) -from zarr.sync import sync as _sync # in case setuptools scm screw up and find version to be 0.0.0 assert not __version__.startswith("0.0.0") - -async def open_auto_async(store: StoreLike) -> AsyncArray | AsyncGroup: - store_path = make_store_path(store) - try: - return await AsyncArray.open(store_path) - except KeyError: - return await AsyncGroup.open(store_path) - - -def open_auto(store: StoreLike) -> Array | Group: - object = _sync( - open_auto_async(store), - ) - if isinstance(object, AsyncArray): - return Array(object) - if isinstance(object, AsyncGroup): - return Group(object) - raise TypeError(f"Unexpected object type. Got {type(object)}.") +__all__ = [ + "__version__", + "config", + "Array", + "AsyncArray", + "Group", + "AsyncGroup", + "tree", + "array", + "consolidate_metadata", + "copy", + "copy_all", + "copy_store", + "create", + "empty", + "empty_like", + "full", + "full_like", + "group", + "load", + "ones", + "ones_like", + "open", + "open_array", + "open_consolidated", + "open_group", + "open_like", + "save", + "save_array", + "save_group", + "zeros", + "zeros_like", +] diff --git a/src/zarr/api/__init__.py b/src/zarr/api/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/zarr/api/asynchronous.py b/src/zarr/api/asynchronous.py new file mode 100644 index 0000000000..52d07fb6fe --- /dev/null +++ b/src/zarr/api/asynchronous.py @@ -0,0 +1,929 @@ +from __future__ import annotations + +import asyncio +import warnings +from collections.abc import Iterable +from typing import Any, Literal, Union, cast + +import numpy as np +import numpy.typing as npt + +from zarr.abc.codec import Codec +from zarr.array import Array, AsyncArray +from zarr.buffer import NDArrayLike +from zarr.chunk_key_encodings import ChunkKeyEncoding +from zarr.common import JSON, ChunkCoords, MemoryOrder, OpenMode, ZarrFormat +from zarr.group import AsyncGroup +from zarr.metadata import ArrayV2Metadata, ArrayV3Metadata +from zarr.store import ( + StoreLike, + make_store_path, +) + +# TODO: this type could use some more thought, noqa to avoid "Variable "asynchronous.ArrayLike" is not valid as a type" +ArrayLike = Union[AsyncArray | Array | npt.NDArray[Any]] # noqa +PathLike = str + + +def _get_shape_chunks(a: ArrayLike | Any) -> tuple[ChunkCoords | None, ChunkCoords | None]: + """helper function to get the shape and chunks from an array-like object""" + shape = None + chunks = None + + if hasattr(a, "shape") and isinstance(a.shape, tuple): + shape = a.shape + + if hasattr(a, "chunks") and isinstance(a.chunks, tuple) and (len(a.chunks) == len(a.shape)): + chunks = a.chunks + + elif hasattr(a, "chunklen"): + # bcolz carray + chunks = (a.chunklen,) + a.shape[1:] + + return shape, chunks + + +def _like_args(a: ArrayLike, kwargs: dict[str, Any]) -> dict[str, Any]: + """set default values for shape and chunks if they are not present in the array-like object""" + + new = kwargs.copy() + + shape, chunks = _get_shape_chunks(a) + if shape is not None: + new["shape"] = shape + if chunks is not None: + new["chunks"] = chunks + + if hasattr(a, "dtype"): + new["dtype"] = a.dtype + + if isinstance(a, AsyncArray): + new["order"] = a.order + if isinstance(a.metadata, ArrayV2Metadata): + new["compressor"] = a.metadata.compressor + new["filters"] = a.metadata.filters + + if isinstance(a.metadata, ArrayV3Metadata): + new["codecs"] = a.metadata.codecs + else: + raise ValueError(f"Unsupported zarr format: {a.metadata.zarr_format}") + else: + # TODO: set default values compressor/codecs + # to do this, we may need to evaluate if this is a v2 or v3 array + # new["compressor"] = "default" + pass + + return new + + +def _handle_zarr_version_or_format( + *, zarr_version: ZarrFormat | None, zarr_format: ZarrFormat | None +) -> ZarrFormat | None: + """handle the deprecated zarr_version kwarg and return zarr_format""" + if zarr_format is not None and zarr_version is not None and zarr_format != zarr_version: + raise ValueError( + f"zarr_format {zarr_format} does not match zarr_version {zarr_version}, please only set one" + ) + if zarr_version is not None: + warnings.warn( + "zarr_version is deprecated, use zarr_format", DeprecationWarning, stacklevel=2 + ) + return zarr_version + return zarr_format + + +def _default_zarr_version() -> ZarrFormat: + """return the default zarr_version""" + # TODO: set default value from config + return 3 + + +async def consolidate_metadata(*args: Any, **kwargs: Any) -> AsyncGroup: + raise NotImplementedError + + +async def copy(*args: Any, **kwargs: Any) -> tuple[int, int, int]: + raise NotImplementedError + + +async def copy_all(*args: Any, **kwargs: Any) -> tuple[int, int, int]: + raise NotImplementedError + + +async def copy_store(*args: Any, **kwargs: Any) -> tuple[int, int, int]: + raise NotImplementedError + + +async def load( + *, + store: StoreLike, + path: str | None = None, + zarr_format: ZarrFormat | None = None, + zarr_version: ZarrFormat | None = None, +) -> NDArrayLike | dict[str, NDArrayLike]: + """Load data from an array or group into memory. + + Parameters + ---------- + store : Store or string + Store or path to directory in file system or name of zip file. + path : str or None, optional + The path within the store from which to load. + + Returns + ------- + out + If the path contains an array, out will be a numpy array. If the path contains + a group, out will be a dict-like object where keys are array names and values + are numpy arrays. + + See Also + -------- + save, savez + + Notes + ----- + If loading data from a group of arrays, data will not be immediately loaded into + memory. Rather, arrays will be loaded into memory as they are requested. + """ + zarr_format = _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + + obj = await open(store=store, path=path, zarr_format=zarr_format) + if isinstance(obj, AsyncArray): + return await obj.getitem(slice(None)) + else: + raise NotImplementedError("loading groups not yet supported") + + +async def open( + *, + store: StoreLike | None = None, + mode: OpenMode | None = None, # type and value changed + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: str | None = None, + **kwargs: Any, # TODO: type kwargs as valid args to open_array +) -> AsyncArray | AsyncGroup: + """Convenience function to open a group or array using file-mode-like semantics. + + Parameters + ---------- + store : Store or string, optional + Store or path to directory in file system or name of zip file. + mode : {'r', 'r+', 'a', 'w', 'w-'}, optional + Persistence mode: 'r' means read only (must exist); 'r+' means + read/write (must exist); 'a' means read/write (create if doesn't + exist); 'w' means create (overwrite if exists); 'w-' means create + (fail if exists). + zarr_format : {2, 3, None}, optional + The zarr format to use when saving. + path : str or None, optional + The path within the store to open. + **kwargs + Additional parameters are passed through to :func:`zarr.creation.open_array` or + :func:`zarr.hierarchy.open_group`. + + Returns + ------- + z : array or group + Return type depends on what exists in the given store. + """ + zarr_format = _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + store_path = make_store_path(store, mode=mode) + + if path is not None: + store_path = store_path / path + + try: + return await open_array(store=store_path, zarr_format=zarr_format, **kwargs) + except KeyError: + return await open_group(store=store_path, zarr_format=zarr_format, **kwargs) + + +async def open_consolidated(*args: Any, **kwargs: Any) -> AsyncGroup: + raise NotImplementedError + + +async def save( + store: StoreLike, + *args: NDArrayLike, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: str | None = None, + **kwargs: Any, # TODO: type kwargs as valid args to save +) -> None: + """Convenience function to save an array or group of arrays to the local file system. + + Parameters + ---------- + store : Store or string + Store or path to directory in file system or name of zip file. + args : ndarray + NumPy arrays with data to save. + zarr_format : {2, 3, None}, optional + The zarr format to use when saving. + path : str or None, optional + The path within the group where the arrays will be saved. + kwargs + NumPy arrays with data to save. + """ + zarr_format = _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + + if len(args) == 0 and len(kwargs) == 0: + raise ValueError("at least one array must be provided") + if len(args) == 1 and len(kwargs) == 0: + await save_array(store, args[0], zarr_format=zarr_format, path=path) + else: + await save_group(store, *args, zarr_format=zarr_format, path=path, **kwargs) + + +async def save_array( + store: StoreLike, + arr: NDArrayLike, + *, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: str | None = None, + **kwargs: Any, # TODO: type kwargs as valid args to create +) -> None: + """Convenience function to save a NumPy array to the local file system, following a + similar API to the NumPy save() function. + + Parameters + ---------- + store : Store or string + Store or path to directory in file system or name of zip file. + arr : ndarray + NumPy array with data to save. + zarr_format : {2, 3, None}, optional + The zarr format to use when saving. + path : str or None, optional + The path within the store where the array will be saved. + kwargs + Passed through to :func:`create`, e.g., compressor. + """ + zarr_format = ( + _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + or _default_zarr_version() + ) + + store_path = make_store_path(store, mode="w") + if path is not None: + store_path = store_path / path + new = await AsyncArray.create( + store_path, + zarr_format=zarr_format, + shape=arr.shape, + dtype=arr.dtype, + chunks=arr.shape, + **kwargs, + ) + await new.setitem(slice(None), arr) + + +async def save_group( + store: StoreLike, + *args: NDArrayLike, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: str | None = None, + **kwargs: NDArrayLike, +) -> None: + """Convenience function to save several NumPy arrays to the local file system, following a + similar API to the NumPy savez()/savez_compressed() functions. + + Parameters + ---------- + store : Store or string + Store or path to directory in file system or name of zip file. + args : ndarray + NumPy arrays with data to save. + zarr_format : {2, 3, None}, optional + The zarr format to use when saving. + path : str or None, optional + Path within the store where the group will be saved. + kwargs + NumPy arrays with data to save. + """ + zarr_format = _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + + if len(args) == 0 and len(kwargs) == 0: + raise ValueError("at least one array must be provided") + aws = [] + for i, arr in enumerate(args): + aws.append(save_array(store, arr, zarr_format=zarr_format, path=f"{path}/arr_{i}")) + for k, arr in kwargs.items(): + _path = f"{path}/{k}" if path is not None else k + aws.append(save_array(store, arr, zarr_format=zarr_format, path=_path)) + await asyncio.gather(*aws) + + +async def tree(*args: Any, **kwargs: Any) -> None: + raise NotImplementedError + + +async def array(data: NDArrayLike, **kwargs: Any) -> AsyncArray: + """Create an array filled with `data`. + + Parameters + ---------- + data : array_like + The data to fill the array with. + kwargs + Passed through to :func:`create`. + + Returns + ------- + array : array + The new array. + """ + + # ensure data is array-like + if not hasattr(data, "shape") or not hasattr(data, "dtype"): + data = np.asanyarray(data) + + # setup dtype + kw_dtype = kwargs.get("dtype") + if kw_dtype is None: + kwargs["dtype"] = data.dtype + else: + kwargs["dtype"] = kw_dtype + + # setup shape and chunks + data_shape, data_chunks = _get_shape_chunks(data) + kwargs["shape"] = data_shape + kw_chunks = kwargs.get("chunks") + if kw_chunks is None: + kwargs["chunks"] = data_chunks + else: + kwargs["chunks"] = kw_chunks + + read_only = kwargs.pop("read_only", False) + if read_only: + raise ValueError("read_only=True is no longer supported when creating new arrays") + + # instantiate array + z = await create(**kwargs) + + # fill with data + await z.setitem(slice(None), data) + + return z + + +async def group( + *, # Note: this is a change from v2 + store: StoreLike | None = None, + overwrite: bool = False, + chunk_store: StoreLike | None = None, # not used + cache_attrs: bool | None = None, # not used, default changed + synchronizer: Any | None = None, # not used + path: str | None = None, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + meta_array: Any | None = None, # not used + attributes: dict[str, JSON] | None = None, +) -> AsyncGroup: + """Create a group. + + Parameters + ---------- + store : Store or string, optional + Store or path to directory in file system. + overwrite : bool, optional + If True, delete any pre-existing data in `store` at `path` before + creating the group. + chunk_store : Store, optional + Separate storage for chunks. If not provided, `store` will be used + for storage of both chunks and metadata. + cache_attrs : bool, optional + If True (default), user attributes will be cached for attribute read + operations. If False, user attributes are reloaded from the store prior + to all attribute read operations. + synchronizer : object, optional + Array synchronizer. + path : string, optional + Group path within store. + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + zarr_format : {2, 3, None}, optional + The zarr format to use when saving. + + Returns + ------- + g : group + The new group. + """ + + zarr_format = ( + _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + or _default_zarr_version() + ) + + store_path = make_store_path(store) + if path is not None: + store_path = store_path / path + + if chunk_store is not None: + warnings.warn("chunk_store is not yet implemented", RuntimeWarning, stacklevel=2) + if cache_attrs is not None: + warnings.warn("cache_attrs is not yet implemented", RuntimeWarning, stacklevel=2) + if synchronizer is not None: + warnings.warn("synchronizer is not yet implemented", RuntimeWarning, stacklevel=2) + if meta_array is not None: + warnings.warn("meta_array is not yet implemented", RuntimeWarning, stacklevel=2) + + if attributes is None: + attributes = {} + + try: + return await AsyncGroup.open(store=store_path, zarr_format=zarr_format) + except (KeyError, FileNotFoundError): + return await AsyncGroup.create( + store=store_path, + zarr_format=zarr_format, + exists_ok=overwrite, + attributes=attributes, + ) + + +async def open_group( + *, # Note: this is a change from v2 + store: StoreLike | None = None, + mode: OpenMode | None = None, # not used + cache_attrs: bool | None = None, # not used, default changed + synchronizer: Any = None, # not used + path: str | None = None, + chunk_store: StoreLike | None = None, # not used + storage_options: dict[str, Any] | None = None, # not used + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + meta_array: Any | None = None, # not used + attributes: dict[str, JSON] | None = None, +) -> AsyncGroup: + """Open a group using file-mode-like semantics. + + Parameters + ---------- + store : Store or string, optional + Store or path to directory in file system or name of zip file. + mode : {'r', 'r+', 'a', 'w', 'w-'}, optional + Persistence mode: 'r' means read only (must exist); 'r+' means + read/write (must exist); 'a' means read/write (create if doesn't + exist); 'w' means create (overwrite if exists); 'w-' means create + (fail if exists). + cache_attrs : bool, optional + If True (default), user attributes will be cached for attribute read + operations. If False, user attributes are reloaded from the store prior + to all attribute read operations. + synchronizer : object, optional + Array synchronizer. + path : string, optional + Group path within store. + chunk_store : Store or string, optional + Store or path to directory in file system or name of zip file. + storage_options : dict + If using an fsspec URL to create the store, these will be passed to + the backend implementation. Ignored otherwise. + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + + Returns + ------- + g : group + The new group. + """ + + zarr_format = ( + _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + or _default_zarr_version() + ) + + if cache_attrs is not None: + warnings.warn("cache_attrs is not yet implemented", RuntimeWarning, stacklevel=2) + if synchronizer is not None: + warnings.warn("synchronizer is not yet implemented", RuntimeWarning, stacklevel=2) + if meta_array is not None: + warnings.warn("meta_array is not yet implemented", RuntimeWarning, stacklevel=2) + if chunk_store is not None: + warnings.warn("chunk_store is not yet implemented", RuntimeWarning, stacklevel=2) + if storage_options is not None: + warnings.warn("storage_options is not yet implemented", RuntimeWarning, stacklevel=2) + + store_path = make_store_path(store, mode=mode) + if path is not None: + store_path = store_path / path + + if attributes is None: + attributes = {} + + try: + return await AsyncGroup.open(store_path, zarr_format=zarr_format) + except (KeyError, FileNotFoundError): + return await AsyncGroup.create( + store_path, zarr_format=zarr_format, exists_ok=True, attributes=attributes + ) + + +async def create( + shape: ChunkCoords, + *, # Note: this is a change from v2 + chunks: ChunkCoords | None = None, # TODO: v2 allowed chunks=True + dtype: npt.DTypeLike | None = None, + compressor: dict[str, JSON] | None = None, # TODO: default and type change + fill_value: Any = 0, # TODO: need type + order: MemoryOrder | None = None, # TODO: default change + store: str | StoreLike | None = None, + synchronizer: Any | None = None, + overwrite: bool = False, + path: PathLike | None = None, + chunk_store: StoreLike | None = None, + filters: list[dict[str, JSON]] | None = None, # TODO: type has changed + cache_metadata: bool | None = None, + cache_attrs: bool | None = None, + read_only: bool | None = None, + object_codec: Codec | None = None, # TODO: type has changed + dimension_separator: Literal[".", "/"] | None = None, + write_empty_chunks: bool = False, # TODO: default has changed + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + meta_array: Any | None = None, # TODO: need type + attributes: dict[str, JSON] | None = None, + # v3 only + chunk_shape: ChunkCoords | None = None, + chunk_key_encoding: ( + ChunkKeyEncoding + | tuple[Literal["default"], Literal[".", "/"]] + | tuple[Literal["v2"], Literal[".", "/"]] + | None + ) = None, + codecs: Iterable[Codec | dict[str, JSON]] | None = None, + dimension_names: Iterable[str] | None = None, + **kwargs: Any, +) -> AsyncArray: + """Create an array. + + Parameters + ---------- + shape : int or tuple of ints + Array shape. + chunks : int or tuple of ints, optional + Chunk shape. If True, will be guessed from `shape` and `dtype`. If + False, will be set to `shape`, i.e., single chunk for the whole array. + If an int, the chunk size in each dimension will be given by the value + of `chunks`. Default is True. + dtype : string or dtype, optional + NumPy dtype. + compressor : Codec, optional + Primary compressor. + fill_value : object + Default value to use for uninitialized portions of the array. + order : {'C', 'F'}, optional + Memory layout to be used within each chunk. + store : Store or string + Store or path to directory in file system or name of zip file. + synchronizer : object, optional + Array synchronizer. + overwrite : bool, optional + If True, delete all pre-existing data in `store` at `path` before + creating the array. + path : string, optional + Path under which array is stored. + chunk_store : MutableMapping, optional + Separate storage for chunks. If not provided, `store` will be used + for storage of both chunks and metadata. + filters : sequence of Codecs, optional + Sequence of filters to use to encode chunk data prior to compression. + cache_metadata : bool, optional + If True, array configuration metadata will be cached for the + lifetime of the object. If False, array metadata will be reloaded + prior to all data access and modification operations (may incur + overhead depending on storage and data access pattern). + cache_attrs : bool, optional + If True (default), user attributes will be cached for attribute read + operations. If False, user attributes are reloaded from the store prior + to all attribute read operations. + read_only : bool, optional + True if array should be protected against modification. + object_codec : Codec, optional + A codec to encode object arrays, only needed if dtype=object. + dimension_separator : {'.', '/'}, optional + Separator placed between the dimensions of a chunk. + + .. versionadded:: 2.8 + + write_empty_chunks : bool, optional + If True (default), all chunks will be stored regardless of their + contents. If False, each chunk is compared to the array's fill value + prior to storing. If a chunk is uniformly equal to the fill value, then + that chunk is not be stored, and the store entry for that chunk's key + is deleted. This setting enables sparser storage, as only chunks with + non-fill-value data are stored, at the expense of overhead associated + with checking the data of each chunk. + + .. versionadded:: 2.11 + + zarr_format : {2, 3, None}, optional + The zarr format to use when saving. + meta_array : array-like, optional + An array instance to use for determining arrays to create and return + to users. Use `numpy.empty(())` by default. + + .. versionadded:: 2.13 + + Returns + ------- + z : array + The array. + """ + zarr_format = ( + _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + or _default_zarr_version() + ) + + if zarr_format == 2 and chunks is None: + chunks = shape + if zarr_format == 3 and chunk_shape is None: + chunk_shape = shape + + if order is not None: + warnings.warn( + "order is deprecated, use config `array.order` instead", + DeprecationWarning, + stacklevel=2, + ) + if synchronizer is not None: + warnings.warn("synchronizer is not yet implemented", RuntimeWarning, stacklevel=2) + if chunk_store is not None: + warnings.warn("chunk_store is not yet implemented", RuntimeWarning, stacklevel=2) + if cache_metadata is not None: + warnings.warn("cache_metadata is not yet implemented", RuntimeWarning, stacklevel=2) + if cache_attrs is not None: + warnings.warn("cache_attrs is not yet implemented", RuntimeWarning, stacklevel=2) + if object_codec is not None: + warnings.warn("object_codec is not yet implemented", RuntimeWarning, stacklevel=2) + if dimension_separator is not None: + if zarr_format == 3: + raise ValueError( + "dimension_separator is not supported for zarr format 3, use chunk_key_encoding instead" + ) + else: + warnings.warn( + "dimension_separator is not yet implemented", RuntimeWarning, stacklevel=2 + ) + if write_empty_chunks: + warnings.warn("write_empty_chunks is not yet implemented", RuntimeWarning, stacklevel=2) + if meta_array is not None: + warnings.warn("meta_array is not yet implemented", RuntimeWarning, stacklevel=2) + + mode = cast(OpenMode, "r" if read_only else "w") + store_path = make_store_path(store, mode=mode) + if path is not None: + store_path = store_path / path + + return await AsyncArray.create( + store_path, + shape=shape, + chunks=chunks, + dtype=dtype, + compressor=compressor, + fill_value=fill_value, + exists_ok=overwrite, # TODO: name change + filters=filters, + dimension_separator=dimension_separator, + zarr_format=zarr_format, + chunk_shape=chunk_shape, + chunk_key_encoding=chunk_key_encoding, + codecs=codecs, + dimension_names=dimension_names, + attributes=attributes, + **kwargs, + ) + + +async def empty(shape: ChunkCoords, **kwargs: Any) -> AsyncArray: + """Create an empty array. + + Parameters + ---------- + shape : int or tuple of int + Shape of the empty array. + **kwargs + Keyword arguments passed to :func:`zarr.api.asynchronous.create`. + + Notes + ----- + The contents of an empty Zarr array are not defined. On attempting to + retrieve data from an empty Zarr array, any values may be returned, + and these are not guaranteed to be stable from one access to the next. + """ + return await create(shape=shape, fill_value=None, **kwargs) + + +async def empty_like(a: ArrayLike, **kwargs: Any) -> AsyncArray: + """Create an empty array like `a`. + + Parameters + ---------- + a : array-like + The array to create an empty array like. + **kwargs + Keyword arguments passed to :func:`zarr.api.asynchronous.create`. + + Returns + ------- + Array + The new array. + """ + like_kwargs = _like_args(a, kwargs) + return await empty(**like_kwargs) + + +# TODO: add type annotations for fill_value and kwargs +async def full(shape: ChunkCoords, fill_value: Any, **kwargs: Any) -> AsyncArray: + """Create an array, with `fill_value` being used as the default value for + uninitialized portions of the array. + + Parameters + ---------- + shape : int or tuple of int + Shape of the empty array. + fill_value : scalar + Fill value. + **kwargs + Keyword arguments passed to :func:`zarr.api.asynchronous.create`. + + Returns + ------- + Array + The new array. + """ + return await create(shape=shape, fill_value=fill_value, **kwargs) + + +# TODO: add type annotations for kwargs +async def full_like(a: ArrayLike, **kwargs: Any) -> AsyncArray: + """Create a filled array like `a`. + + Parameters + ---------- + a : array-like + The array to create an empty array like. + **kwargs + Keyword arguments passed to :func:`zarr.api.asynchronous.create`. + + Returns + ------- + Array + The new array. + """ + like_kwargs = _like_args(a, kwargs) + if isinstance(a, AsyncArray): + kwargs.setdefault("fill_value", a.metadata.fill_value) + return await full(**like_kwargs) + + +async def ones(shape: ChunkCoords, **kwargs: Any) -> AsyncArray: + """Create an array, with one being used as the default value for + uninitialized portions of the array. + + Parameters + ---------- + shape : int or tuple of int + Shape of the empty array. + **kwargs + Keyword arguments passed to :func:`zarr.api.asynchronous.create`. + + Returns + ------- + Array + The new array. + """ + return await create(shape=shape, fill_value=1, **kwargs) + + +async def ones_like(a: ArrayLike, **kwargs: Any) -> AsyncArray: + """Create an array of ones like `a`. + + Parameters + ---------- + a : array-like + The array to create an empty array like. + **kwargs + Keyword arguments passed to :func:`zarr.api.asynchronous.create`. + + Returns + ------- + Array + The new array. + """ + like_kwargs = _like_args(a, kwargs) + return await ones(**like_kwargs) + + +async def open_array( + *, # note: this is a change from v2 + store: StoreLike | None = None, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: PathLike | None = None, + **kwargs: Any, # TODO: type kwargs as valid args to save +) -> AsyncArray: + """Open an array using file-mode-like semantics. + + Parameters + ---------- + store : Store or string + Store or path to directory in file system or name of zip file. + zarr_format : {2, 3, None}, optional + The zarr format to use when saving. + path : string, optional + Path in store to array. + **kwargs + Any keyword arguments to pass to the array constructor. + + Returns + ------- + AsyncArray + The opened array. + """ + + store_path = make_store_path(store) + if path is not None: + store_path = store_path / path + + zarr_format = _handle_zarr_version_or_format(zarr_version=zarr_version, zarr_format=zarr_format) + + try: + return await AsyncArray.open(store_path, zarr_format=zarr_format) + except KeyError as e: + if store_path.store.writeable: + pass + else: + raise e + + # if array was not found, create it + return await create(store=store, path=path, zarr_format=zarr_format, **kwargs) + + +async def open_like(a: ArrayLike, path: str, **kwargs: Any) -> AsyncArray: + """Open a persistent array like `a`. + + Parameters + ---------- + a : Array + The shape and data-type of a define these same attributes of the returned array. + path : str + The path to the new array. + **kwargs + Any keyword arguments to pass to the array constructor. + + Returns + ------- + AsyncArray + The opened array. + """ + like_kwargs = _like_args(a, kwargs) + if isinstance(a, (AsyncArray | Array)): + kwargs.setdefault("fill_value", a.metadata.fill_value) + return await open_array(path=path, **like_kwargs) + + +async def zeros(shape: ChunkCoords, **kwargs: Any) -> AsyncArray: + """Create an array, with zero being used as the default value for + uninitialized portions of the array. + + Parameters + ---------- + shape : int or tuple of int + Shape of the empty array. + **kwargs + Keyword arguments passed to :func:`zarr.api.asynchronous.create`. + + Returns + ------- + Array + The new array. + """ + return await create(shape=shape, fill_value=0, **kwargs) + + +async def zeros_like(a: ArrayLike, **kwargs: Any) -> AsyncArray: + """Create an array of zeros like `a`. + + Parameters + ---------- + a : array-like + The array to create an empty array like. + **kwargs + Keyword arguments passed to :func:`zarr.api.asynchronous.create`. + + Returns + ------- + Array + The new array. + """ + like_kwargs = _like_args(a, kwargs) + return await zeros(**like_kwargs) diff --git a/src/zarr/api/synchronous.py b/src/zarr/api/synchronous.py new file mode 100644 index 0000000000..57b9d5630f --- /dev/null +++ b/src/zarr/api/synchronous.py @@ -0,0 +1,273 @@ +from __future__ import annotations + +from typing import Any + +import zarr.api.asynchronous as async_api +from zarr.array import Array, AsyncArray +from zarr.buffer import NDArrayLike +from zarr.common import JSON, ChunkCoords, OpenMode, ZarrFormat +from zarr.group import Group +from zarr.store import StoreLike +from zarr.sync import sync + + +def consolidate_metadata(*args: Any, **kwargs: Any) -> Group: + return Group(sync(async_api.consolidate_metadata(*args, **kwargs))) + + +def copy(*args: Any, **kwargs: Any) -> tuple[int, int, int]: + return sync(async_api.copy(*args, **kwargs)) + + +def copy_all(*args: Any, **kwargs: Any) -> tuple[int, int, int]: + return sync(async_api.copy_all(*args, **kwargs)) + + +def copy_store(*args: Any, **kwargs: Any) -> tuple[int, int, int]: + return sync(async_api.copy_store(*args, **kwargs)) + + +def load( + store: StoreLike, zarr_version: ZarrFormat | None = None, path: str | None = None +) -> NDArrayLike | dict[str, NDArrayLike]: + return sync(async_api.load(store=store, zarr_version=zarr_version, path=path)) + + +def open( + *, + store: StoreLike | None = None, + mode: OpenMode | None = None, # type and value changed + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: str | None = None, + **kwargs: Any, # TODO: type kwargs as valid args to async_api.open +) -> Array | Group: + obj = sync( + async_api.open( + store=store, + mode=mode, + zarr_version=zarr_version, + zarr_format=zarr_format, + path=path, + **kwargs, + ) + ) + if isinstance(obj, AsyncArray): + return Array(obj) + else: + return Group(obj) + + +def open_consolidated(*args: Any, **kwargs: Any) -> Group: + return Group(sync(async_api.open_consolidated(*args, **kwargs))) + + +def save( + store: StoreLike, + *args: NDArrayLike, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: str | None = None, + **kwargs: Any, # TODO: type kwargs as valid args to async_api.save +) -> None: + return sync( + async_api.save( + store, *args, zarr_version=zarr_version, zarr_format=zarr_format, path=path, **kwargs + ) + ) + + +def save_array( + store: StoreLike, + arr: NDArrayLike, + *, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: str | None = None, + **kwargs: Any, # TODO: type kwargs as valid args to async_api.save_array +) -> None: + return sync( + async_api.save_array( + store=store, + arr=arr, + zarr_version=zarr_version, + zarr_format=zarr_format, + path=path, + **kwargs, + ) + ) + + +def save_group( + store: StoreLike, + *args: NDArrayLike, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + path: str | None = None, + **kwargs: NDArrayLike, +) -> None: + return sync( + async_api.save_group( + store, + *args, + zarr_version=zarr_version, + zarr_format=zarr_format, + path=path, + **kwargs, + ) + ) + + +def tree(*args: Any, **kwargs: Any) -> None: + return sync(async_api.tree(*args, **kwargs)) + + +# TODO: add type annotations for kwargs +def array(data: NDArrayLike, **kwargs: Any) -> Array: + return Array(sync(async_api.array(data=data, **kwargs))) + + +def group( + *, # Note: this is a change from v2 + store: StoreLike | None = None, + overwrite: bool = False, + chunk_store: StoreLike | None = None, # not used in async_api + cache_attrs: bool | None = None, # default changed, not used in async_api + synchronizer: Any | None = None, # not used in async_api + path: str | None = None, + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + meta_array: Any | None = None, # not used in async_api + attributes: dict[str, JSON] | None = None, +) -> Group: + return Group( + sync( + async_api.group( + store=store, + overwrite=overwrite, + chunk_store=chunk_store, + cache_attrs=cache_attrs, + synchronizer=synchronizer, + path=path, + zarr_version=zarr_version, + zarr_format=zarr_format, + meta_array=meta_array, + attributes=attributes, + ) + ) + ) + + +def open_group( + *, # Note: this is a change from v2 + store: StoreLike | None = None, + mode: OpenMode | None = None, # not used in async api + cache_attrs: bool | None = None, # default changed, not used in async api + synchronizer: Any = None, # not used in async api + path: str | None = None, + chunk_store: StoreLike | None = None, # not used in async api + storage_options: dict[str, Any] | None = None, # not used in async api + zarr_version: ZarrFormat | None = None, # deprecated + zarr_format: ZarrFormat | None = None, + meta_array: Any | None = None, # not used in async api +) -> Group: + return Group( + sync( + async_api.open_group( + store=store, + mode=mode, + cache_attrs=cache_attrs, + synchronizer=synchronizer, + path=path, + chunk_store=chunk_store, + storage_options=storage_options, + zarr_version=zarr_version, + zarr_format=zarr_format, + meta_array=meta_array, + ) + ) + ) + + +# TODO: add type annotations for kwargs +def create(*args: Any, **kwargs: Any) -> Array: + return Array(sync(async_api.create(*args, **kwargs))) + + +# TODO: add type annotations for kwargs +def empty(shape: ChunkCoords, **kwargs: Any) -> Array: + return Array(sync(async_api.empty(shape, **kwargs))) + + +# TODO: move ArrayLike to common module +# TODO: add type annotations for kwargs +def empty_like(a: async_api.ArrayLike, **kwargs: Any) -> Array: + return Array(sync(async_api.empty_like(a, **kwargs))) + + +# TODO: add type annotations for kwargs and fill_value +def full(shape: ChunkCoords, fill_value: Any, **kwargs: Any) -> Array: + return Array(sync(async_api.full(shape=shape, fill_value=fill_value, **kwargs))) + + +# TODO: move ArrayLike to common module +# TODO: add type annotations for kwargs +def full_like(a: async_api.ArrayLike, **kwargs: Any) -> Array: + return Array(sync(async_api.full_like(a, **kwargs))) + + +# TODO: add type annotations for kwargs +def ones(shape: ChunkCoords, **kwargs: Any) -> Array: + return Array(sync(async_api.ones(shape, **kwargs))) + + +# TODO: add type annotations for kwargs +def ones_like(a: async_api.ArrayLike, **kwargs: Any) -> Array: + return Array(sync(async_api.ones_like(a, **kwargs))) + + +# TODO: update this once async_api.open_array is fully implemented +def open_array(*args: Any, **kwargs: Any) -> Array: + return Array(sync(async_api.open_array(*args, **kwargs))) + + +# TODO: add type annotations for kwargs +def open_like(a: async_api.ArrayLike, **kwargs: Any) -> Array: + return Array(sync(async_api.open_like(a, **kwargs))) + + +# TODO: add type annotations for kwargs +def zeros(*args: Any, **kwargs: Any) -> Array: + return Array(sync(async_api.zeros(*args, **kwargs))) + + +# TODO: add type annotations for kwargs +def zeros_like(a: async_api.ArrayLike, **kwargs: Any) -> Array: + return Array(sync(async_api.zeros_like(a, **kwargs))) + + +consolidate_metadata.__doc__ = async_api.copy.__doc__ +copy.__doc__ = async_api.copy.__doc__ +copy_all.__doc__ = async_api.copy_all.__doc__ +copy_store.__doc__ = async_api.copy_store.__doc__ +load.__doc__ = async_api.load.__doc__ +open.__doc__ = async_api.open.__doc__ +open_consolidated.__doc__ = async_api.open_consolidated.__doc__ +save.__doc__ = async_api.save.__doc__ +save_array.__doc__ = async_api.save_array.__doc__ +save_group.__doc__ = async_api.save_group.__doc__ +tree.__doc__ = async_api.tree.__doc__ +array.__doc__ = async_api.array.__doc__ +group.__doc__ = async_api.group.__doc__ +open_group.__doc__ = async_api.open_group.__doc__ +create.__doc__ = async_api.create.__doc__ +empty.__doc__ = async_api.empty.__doc__ +empty_like.__doc__ = async_api.empty_like.__doc__ +full.__doc__ = async_api.full.__doc__ +full_like.__doc__ = async_api.full_like.__doc__ +ones.__doc__ = async_api.ones.__doc__ +ones_like.__doc__ = async_api.ones_like.__doc__ +open_array.__doc__ = async_api.open_array.__doc__ +open_like.__doc__ = async_api.open_like.__doc__ +zeros.__doc__ = async_api.zeros.__doc__ +zeros_like.__doc__ = async_api.zeros_like.__doc__ diff --git a/src/zarr/array.py b/src/zarr/array.py index 28b19f44f0..9ac1ce41ec 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -387,6 +387,10 @@ def dtype(self) -> np.dtype[Any]: def attrs(self) -> dict[str, JSON]: return self.metadata.attributes + @property + def read_only(self) -> bool: + return bool(~self.store_path.store.writeable) + @property def path(self) -> str: """Storage path.""" @@ -693,6 +697,10 @@ def store_path(self) -> StorePath: def order(self) -> Literal["C", "F"]: return self._async_array.order + @property + def read_only(self) -> bool: + return self._async_array.read_only + def __getitem__(self, selection: Selection) -> NDArrayLike: fields, pure_selection = pop_fields(selection) if is_pure_fancy_indexing(pure_selection, self.ndim): diff --git a/src/zarr/common.py b/src/zarr/common.py index bca9f171af..9349f9f018 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -34,6 +34,7 @@ Selection = slice | SliceSelection ZarrFormat = Literal[2, 3] JSON = None | str | int | float | Enum | dict[str, "JSON"] | list["JSON"] | tuple["JSON", ...] +MemoryOrder = Literal["C", "F"] OpenMode = Literal["r", "r+", "a", "w", "w-"] @@ -134,7 +135,9 @@ def parse_named_configuration( return name_parsed, configuration_parsed -def parse_shapelike(data: Iterable[int]) -> tuple[int, ...]: +def parse_shapelike(data: int | Iterable[int]) -> tuple[int, ...]: + if isinstance(data, int): + return (data,) if not isinstance(data, Iterable): raise TypeError(f"Expected an iterable. Got {data} instead.") data_tuple = tuple(data) diff --git a/src/zarr/convenience.py b/src/zarr/convenience.py new file mode 100644 index 0000000000..be0a6b2813 --- /dev/null +++ b/src/zarr/convenience.py @@ -0,0 +1,35 @@ +import warnings + +from zarr.api.synchronous import ( + consolidate_metadata, + copy, + copy_all, + copy_store, + load, + open, + open_consolidated, + save, + save_array, + save_group, + tree, +) + +warnings.warn( + "zarr.convenience is deprecated, use zarr.api.synchronous", + DeprecationWarning, + stacklevel=2, +) + +__all__ = [ + "open", + "save_array", + "save_group", + "save", + "load", + "tree", + "copy_store", + "copy", + "copy_all", + "consolidate_metadata", + "open_consolidated", +] diff --git a/src/zarr/creation.py b/src/zarr/creation.py new file mode 100644 index 0000000000..df3f764610 --- /dev/null +++ b/src/zarr/creation.py @@ -0,0 +1,37 @@ +import warnings + +from zarr.api.synchronous import ( + array, + create, + empty, + empty_like, + full, + full_like, + ones, + ones_like, + open_array, + open_like, + zeros, + zeros_like, +) + +warnings.warn( + "zarr.creation is deprecated, use zarr.api.synchronous", + DeprecationWarning, + stacklevel=2, +) + +__all__ = [ + "create", + "empty", + "zeros", + "ones", + "full", + "array", + "open_array", + "empty_like", + "zeros_like", + "ones_like", + "full_like", + "open_like", +] diff --git a/src/zarr/group.py b/src/zarr/group.py index ccad0e5661..4bb4b6b4dd 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -200,7 +200,7 @@ async def getitem( key: str, ) -> AsyncArray | AsyncGroup: store_path = self.store_path / key - logger.warning("key=%s, store_path=%s", key, store_path) + logger.debug("key=%s, store_path=%s", key, store_path) # Note: # in zarr-python v2, we first check if `key` references an Array, else if `key` references @@ -316,7 +316,7 @@ async def create_array( self, path: str, shape: ChunkCoords, - dtype: npt.DTypeLike, + dtype: npt.DTypeLike = "float64", fill_value: Any | None = None, attributes: dict[str, JSON] | None = None, # v3 only diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index bcb70bd4b2..8329bd9200 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -29,6 +29,7 @@ ZARRAY_JSON, ZATTRS_JSON, ChunkCoords, + ZarrFormat, parse_dtype, parse_fill_value, parse_shapelike, @@ -115,9 +116,10 @@ def from_dtype(cls, dtype: np.dtype[Any]) -> DataType: @dataclass(frozen=True, kw_only=True) class ArrayMetadata(Metadata, ABC): shape: ChunkCoords - chunk_grid: ChunkGrid fill_value: Any + chunk_grid: ChunkGrid attributes: dict[str, JSON] + zarr_format: ZarrFormat @property @abstractmethod diff --git a/src/zarr/store/__init__.py b/src/zarr/store/__init__.py index b7cd6cc0fd..fbdcdb9255 100644 --- a/src/zarr/store/__init__.py +++ b/src/zarr/store/__init__.py @@ -1,7 +1,6 @@ -# flake8: noqa -from zarr.store.core import StorePath, StoreLike, make_store_path -from zarr.store.remote import RemoteStore +from zarr.store.core import StoreLike, StorePath, make_store_path from zarr.store.local import LocalStore from zarr.store.memory import MemoryStore +from zarr.store.remote import RemoteStore __all__ = ["StorePath", "StoreLike", "make_store_path", "RemoteStore", "LocalStore", "MemoryStore"] diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index 70c39db1b7..5fbde208f4 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -7,6 +7,7 @@ from zarr.buffer import Buffer, BufferPrototype, default_buffer_prototype from zarr.common import OpenMode from zarr.store.local import LocalStore +from zarr.store.memory import MemoryStore def _dereference_path(root: str, path: str) -> str: @@ -65,7 +66,7 @@ def __eq__(self, other: Any) -> bool: StoreLike = Store | StorePath | Path | str -def make_store_path(store_like: StoreLike, *, mode: OpenMode | None = None) -> StorePath: +def make_store_path(store_like: StoreLike | None, *, mode: OpenMode | None = None) -> StorePath: if isinstance(store_like, StorePath): if mode is not None: assert mode == store_like.store.mode @@ -74,31 +75,10 @@ def make_store_path(store_like: StoreLike, *, mode: OpenMode | None = None) -> S if mode is not None: assert mode == store_like.mode return StorePath(store_like) + elif store_like is None: + if mode is None: + mode = "w" # exception to the default mode = 'r' + return StorePath(MemoryStore(mode=mode)) elif isinstance(store_like, str): - assert mode is not None - return StorePath(LocalStore(Path(store_like), mode=mode)) + return StorePath(LocalStore(Path(store_like), mode=mode or "r")) raise TypeError - - -def _normalize_interval_index( - data: Buffer, interval: None | tuple[int | None, int | None] -) -> tuple[int, int]: - """ - Convert an implicit interval into an explicit start and length - """ - if interval is None: - start = 0 - length = len(data) - else: - maybe_start, maybe_len = interval - if maybe_start is None: - start = 0 - else: - start = maybe_start - - if maybe_len is None: - length = len(data) - start - else: - length = maybe_len - - return (start, length) diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index d75e8c348c..43d65ce836 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -5,7 +5,7 @@ from zarr.abc.store import Store from zarr.buffer import Buffer, BufferPrototype from zarr.common import OpenMode, concurrent_map -from zarr.store.core import _normalize_interval_index +from zarr.store.utils import _normalize_interval_index # TODO: this store could easily be extended to wrap any MutableMapping store from v2 diff --git a/src/zarr/store/utils.py b/src/zarr/store/utils.py new file mode 100644 index 0000000000..17c9234221 --- /dev/null +++ b/src/zarr/store/utils.py @@ -0,0 +1,25 @@ +from zarr.buffer import Buffer + + +def _normalize_interval_index( + data: Buffer, interval: None | tuple[int | None, int | None] +) -> tuple[int, int]: + """ + Convert an implicit interval into an explicit start and length + """ + if interval is None: + start = 0 + length = len(data) + else: + maybe_start, maybe_len = interval + if maybe_start is None: + start = 0 + else: + start = maybe_start + + if maybe_len is None: + length = len(data) - start + else: + length = maybe_len + + return (start, length) diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py index 5929f47049..3c7f082099 100644 --- a/src/zarr/testing/store.py +++ b/src/zarr/testing/store.py @@ -4,7 +4,7 @@ from zarr.abc.store import Store from zarr.buffer import Buffer, default_buffer_prototype -from zarr.store.core import _normalize_interval_index +from zarr.store.utils import _normalize_interval_index from zarr.testing.utils import assert_bytes_equal S = TypeVar("S", bound=Store) diff --git a/tests/v3/test_api.py b/tests/v3/test_api.py new file mode 100644 index 0000000000..31e6fbfcd9 --- /dev/null +++ b/tests/v3/test_api.py @@ -0,0 +1,778 @@ +import numpy as np +import pytest +from numpy.testing import assert_array_equal + +import zarr +from zarr import Array, Group +from zarr.abc.store import Store +from zarr.api.synchronous import load, open, open_group, save, save_array, save_group + + +def test_open_array(memory_store: Store) -> None: + store = memory_store + + # open array, create if doesn't exist + z = open(store=store, shape=100) + assert isinstance(z, Array) + assert z.shape == (100,) + + # open array, overwrite + store._store_dict = {} + z = open(store=store, shape=200, mode="w") # mode="w" + assert isinstance(z, Array) + assert z.shape == (200,) + + # open array, read-only + ro_store = type(store)(store_dict=store._store_dict, mode="r") + z = open(store=ro_store) + assert isinstance(z, Array) + assert z.shape == (200,) + assert z.read_only + + # path not found + with pytest.raises(ValueError): + open(store="doesnotexist", mode="r") + + +def test_open_group(memory_store: Store) -> None: + store = memory_store + + # open group, create if doesn't exist + g = open_group(store=store) + g.create_group("foo") + assert isinstance(g, Group) + assert "foo" in g + + # open group, overwrite + # g = open_group(store=store) + # assert isinstance(g, Group) + # assert "foo" not in g + + # open group, read-only + ro_store = type(store)(store_dict=store._store_dict, mode="r") + g = open_group(store=ro_store) + assert isinstance(g, Group) + # assert g.read_only + + +def test_save_errors() -> None: + with pytest.raises(ValueError): + # no arrays provided + save_group("data/group.zarr") + with pytest.raises(TypeError): + # no array provided + save_array("data/group.zarr") + with pytest.raises(ValueError): + # no arrays provided + save("data/group.zarr") + + +# def test_lazy_loader(): +# foo = np.arange(100) +# bar = np.arange(100, 0, -1) +# store = "data/group.zarr" +# save(store, foo=foo, bar=bar) +# loader = load(store) +# assert "foo" in loader +# assert "bar" in loader +# assert "baz" not in loader +# assert len(loader) == 2 +# assert sorted(loader) == ["bar", "foo"] +# assert_array_equal(foo, loader["foo"]) +# assert_array_equal(bar, loader["bar"]) +# assert "LazyLoader: " in repr(loader) + + +def test_load_array(memory_store: Store) -> None: + store = memory_store + foo = np.arange(100) + bar = np.arange(100, 0, -1) + save(store, foo=foo, bar=bar) + + # can also load arrays directly into a numpy array + for array_name in ["foo", "bar"]: + array = load(store, path=array_name) + assert isinstance(array, np.ndarray) + if array_name == "foo": + assert_array_equal(foo, array) + else: + assert_array_equal(bar, array) + + +def test_tree() -> None: + g1 = zarr.group() + g1.create_group("foo") + g3 = g1.create_group("bar") + g3.create_group("baz") + g5 = g3.create_group("qux") + g5.create_array("baz", shape=100, chunks=10) + # TODO: complete after tree has been reimplemented + # assert repr(zarr.tree(g1)) == repr(g1.tree()) + # assert str(zarr.tree(g1)) == str(g1.tree()) + + +# @pytest.mark.parametrize("stores_from_path", [False, True]) +# @pytest.mark.parametrize( +# "with_chunk_store,listable", +# [(False, True), (True, True), (False, False)], +# ids=["default-listable", "with_chunk_store-listable", "default-unlistable"], +# ) +# def test_consolidate_metadata(with_chunk_store, listable, monkeypatch, stores_from_path): +# # setup initial data +# if stores_from_path: +# store = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, store) +# if with_chunk_store: +# chunk_store = tempfile.mkdtemp() +# atexit.register(atexit_rmtree, chunk_store) +# else: +# chunk_store = None +# else: +# store = MemoryStore() +# chunk_store = MemoryStore() if with_chunk_store else None +# path = None +# z = group(store, chunk_store=chunk_store, path=path) + +# # Reload the actual store implementation in case str +# store_to_copy = z.store + +# z.create_group("g1") +# g2 = z.create_group("g2") +# g2.attrs["hello"] = "world" +# arr = g2.create_array("arr", shape=(20, 20), chunks=(5, 5), dtype="f8") +# assert 16 == arr.nchunks +# assert 0 == arr.nchunks_initialized +# arr.attrs["data"] = 1 +# arr[:] = 1.0 +# assert 16 == arr.nchunks_initialized + +# if stores_from_path: +# # get the actual store class for use with consolidate_metadata +# store_class = z._store +# else: +# store_class = store + +# # perform consolidation +# out = consolidate_metadata(store_class, path=path) +# assert isinstance(out, Group) +# assert ["g1", "g2"] == list(out) +# if not stores_from_path: +# assert isinstance(out._store, ConsolidatedMetadataStore) +# assert ".zmetadata" in store +# meta_keys = [ +# ".zgroup", +# "g1/.zgroup", +# "g2/.zgroup", +# "g2/.zattrs", +# "g2/arr/.zarray", +# "g2/arr/.zattrs", +# ] + +# for key in meta_keys: +# del store[key] + +# # https://github.com/zarr-developers/zarr-python/issues/993 +# # Make sure we can still open consolidated on an unlistable store: +# if not listable: +# fs_memory = pytest.importorskip("fsspec.implementations.memory") +# monkeypatch.setattr(fs_memory.MemoryFileSystem, "isdir", lambda x, y: False) +# monkeypatch.delattr(fs_memory.MemoryFileSystem, "ls") +# fs = fs_memory.MemoryFileSystem() +# store_to_open = FSStore("", fs=fs) +# # copy original store to new unlistable store +# store_to_open.update(store_to_copy) + +# else: +# store_to_open = store + +# # open consolidated +# z2 = open_consolidated(store_to_open, chunk_store=chunk_store, path=path) +# assert ["g1", "g2"] == list(z2) +# assert "world" == z2.g2.attrs["hello"] +# assert 1 == z2.g2.arr.attrs["data"] +# assert (z2.g2.arr[:] == 1.0).all() +# assert 16 == z2.g2.arr.nchunks +# if listable: +# assert 16 == z2.g2.arr.nchunks_initialized +# else: +# with pytest.raises(NotImplementedError): +# _ = z2.g2.arr.nchunks_initialized + +# if stores_from_path: +# # path string is note a BaseStore subclass so cannot be used to +# # initialize a ConsolidatedMetadataStore. + +# with pytest.raises(ValueError): +# cmd = ConsolidatedMetadataStore(store) +# else: +# # tests del/write on the store + +# cmd = ConsolidatedMetadataStore(store) +# with pytest.raises(PermissionError): +# del cmd[".zgroup"] +# with pytest.raises(PermissionError): +# cmd[".zgroup"] = None + +# # test getsize on the store +# assert isinstance(getsize(cmd), Integral) + +# # test new metadata are not writeable +# with pytest.raises(PermissionError): +# z2.create_group("g3") +# with pytest.raises(PermissionError): +# z2.create_dataset("spam", shape=42, chunks=7, dtype="i4") +# with pytest.raises(PermissionError): +# del z2["g2"] + +# # test consolidated metadata are not writeable +# with pytest.raises(PermissionError): +# z2.g2.attrs["hello"] = "universe" +# with pytest.raises(PermissionError): +# z2.g2.arr.attrs["foo"] = "bar" + +# # test the data are writeable +# z2.g2.arr[:] = 2 +# assert (z2.g2.arr[:] == 2).all() + +# # test invalid modes +# with pytest.raises(ValueError): +# open_consolidated(store, chunk_store=chunk_store, mode="a", path=path) +# with pytest.raises(ValueError): +# open_consolidated(store, chunk_store=chunk_store, mode="w", path=path) +# with pytest.raises(ValueError): +# open_consolidated(store, chunk_store=chunk_store, mode="w-", path=path) + +# # make sure keyword arguments are passed through without error +# open_consolidated( +# store, +# chunk_store=chunk_store, +# path=path, +# cache_attrs=True, +# synchronizer=None, +# ) + + +# @pytest.mark.parametrize( +# "options", +# ( +# {"dimension_separator": "/"}, +# {"dimension_separator": "."}, +# {"dimension_separator": None}, +# ), +# ) +# def test_save_array_separator(tmpdir, options): +# data = np.arange(6).reshape((3, 2)) +# url = tmpdir.join("test.zarr") +# save_array(url, data, **options) + + +# class TestCopyStore(unittest.TestCase): +# _version = 2 + +# def setUp(self): +# source = dict() +# source["foo"] = b"xxx" +# source["bar/baz"] = b"yyy" +# source["bar/qux"] = b"zzz" +# self.source = source + +# def _get_dest_store(self): +# return dict() + +# def test_no_paths(self): +# source = self.source +# dest = self._get_dest_store() +# copy_store(source, dest) +# assert len(source) == len(dest) +# for key in source: +# assert source[key] == dest[key] + +# def test_source_path(self): +# source = self.source +# # paths should be normalized +# for source_path in "bar", "bar/", "/bar", "/bar/": +# dest = self._get_dest_store() +# copy_store(source, dest, source_path=source_path) +# assert 2 == len(dest) +# for key in source: +# if key.startswith("bar/"): +# dest_key = key.split("bar/")[1] +# assert source[key] == dest[dest_key] +# else: +# assert key not in dest + +# def test_dest_path(self): +# source = self.source +# # paths should be normalized +# for dest_path in "new", "new/", "/new", "/new/": +# dest = self._get_dest_store() +# copy_store(source, dest, dest_path=dest_path) +# assert len(source) == len(dest) +# for key in source: +# if self._version == 3: +# dest_key = key[:10] + "new/" + key[10:] +# else: +# dest_key = "new/" + key +# assert source[key] == dest[dest_key] + +# def test_source_dest_path(self): +# source = self.source +# # paths should be normalized +# for source_path in "bar", "bar/", "/bar", "/bar/": +# for dest_path in "new", "new/", "/new", "/new/": +# dest = self._get_dest_store() +# copy_store(source, dest, source_path=source_path, dest_path=dest_path) +# assert 2 == len(dest) +# for key in source: +# if key.startswith("bar/"): +# dest_key = "new/" + key.split("bar/")[1] +# assert source[key] == dest[dest_key] +# else: +# assert key not in dest +# assert ("new/" + key) not in dest + +# def test_excludes_includes(self): +# source = self.source + +# # single excludes +# dest = self._get_dest_store() +# excludes = "f.*" +# copy_store(source, dest, excludes=excludes) +# assert len(dest) == 2 + +# root = "" +# assert root + "foo" not in dest + +# # multiple excludes +# dest = self._get_dest_store() +# excludes = "b.z", ".*x" +# copy_store(source, dest, excludes=excludes) +# assert len(dest) == 1 +# assert root + "foo" in dest +# assert root + "bar/baz" not in dest +# assert root + "bar/qux" not in dest + +# # excludes and includes +# dest = self._get_dest_store() +# excludes = "b.*" +# includes = ".*x" +# copy_store(source, dest, excludes=excludes, includes=includes) +# assert len(dest) == 2 +# assert root + "foo" in dest +# assert root + "bar/baz" not in dest +# assert root + "bar/qux" in dest + +# def test_dry_run(self): +# source = self.source +# dest = self._get_dest_store() +# copy_store(source, dest, dry_run=True) +# assert 0 == len(dest) + +# def test_if_exists(self): +# source = self.source +# dest = self._get_dest_store() +# root = "" +# dest[root + "bar/baz"] = b"mmm" + +# # default ('raise') +# with pytest.raises(CopyError): +# copy_store(source, dest) + +# # explicit 'raise' +# with pytest.raises(CopyError): +# copy_store(source, dest, if_exists="raise") + +# # skip +# copy_store(source, dest, if_exists="skip") +# assert 3 == len(dest) +# assert dest[root + "foo"] == b"xxx" +# assert dest[root + "bar/baz"] == b"mmm" +# assert dest[root + "bar/qux"] == b"zzz" + +# # replace +# copy_store(source, dest, if_exists="replace") +# assert 3 == len(dest) +# assert dest[root + "foo"] == b"xxx" +# assert dest[root + "bar/baz"] == b"yyy" +# assert dest[root + "bar/qux"] == b"zzz" + +# # invalid option +# with pytest.raises(ValueError): +# copy_store(source, dest, if_exists="foobar") + + +# def check_copied_array(original, copied, without_attrs=False, expect_props=None): +# # setup +# source_h5py = original.__module__.startswith("h5py.") +# dest_h5py = copied.__module__.startswith("h5py.") +# zarr_to_zarr = not (source_h5py or dest_h5py) +# h5py_to_h5py = source_h5py and dest_h5py +# zarr_to_h5py = not source_h5py and dest_h5py +# h5py_to_zarr = source_h5py and not dest_h5py +# if expect_props is None: +# expect_props = dict() +# else: +# expect_props = expect_props.copy() + +# # common properties in zarr and h5py +# for p in "dtype", "shape", "chunks": +# expect_props.setdefault(p, getattr(original, p)) + +# # zarr-specific properties +# if zarr_to_zarr: +# for p in "compressor", "filters", "order", "fill_value": +# expect_props.setdefault(p, getattr(original, p)) + +# # h5py-specific properties +# if h5py_to_h5py: +# for p in ( +# "maxshape", +# "compression", +# "compression_opts", +# "shuffle", +# "scaleoffset", +# "fletcher32", +# "fillvalue", +# ): +# expect_props.setdefault(p, getattr(original, p)) + +# # common properties with some name differences +# if h5py_to_zarr: +# expect_props.setdefault("fill_value", original.fillvalue) +# if zarr_to_h5py: +# expect_props.setdefault("fillvalue", original.fill_value) + +# # compare properties +# for k, v in expect_props.items(): +# assert v == getattr(copied, k) + +# # compare data +# assert_array_equal(original[:], copied[:]) + +# # compare attrs +# if without_attrs: +# for k in original.attrs.keys(): +# assert k not in copied.attrs +# else: +# if dest_h5py and "filters" in original.attrs: +# # special case in v3 (storing filters metadata under attributes) +# # we explicitly do not copy this info over to HDF5 +# original_attrs = original.attrs.asdict().copy() +# original_attrs.pop("filters") +# else: +# original_attrs = original.attrs +# assert sorted(original_attrs.items()) == sorted(copied.attrs.items()) + + +# def check_copied_group(original, copied, without_attrs=False, expect_props=None, shallow=False): +# # setup +# if expect_props is None: +# expect_props = dict() +# else: +# expect_props = expect_props.copy() + +# # compare children +# for k, v in original.items(): +# if hasattr(v, "shape"): +# assert k in copied +# check_copied_array(v, copied[k], without_attrs=without_attrs, expect_props=expect_props) +# elif shallow: +# assert k not in copied +# else: +# assert k in copied +# check_copied_group( +# v, +# copied[k], +# without_attrs=without_attrs, +# shallow=shallow, +# expect_props=expect_props, +# ) + +# # compare attrs +# if without_attrs: +# for k in original.attrs.keys(): +# assert k not in copied.attrs +# else: +# assert sorted(original.attrs.items()) == sorted(copied.attrs.items()) + + +# def test_copy_all(): +# """ +# https://github.com/zarr-developers/zarr-python/issues/269 + +# copy_all used to not copy attributes as `.keys()` does not return hidden `.zattrs`. + +# """ +# original_group = zarr.group(store=MemoryStore(), overwrite=True) +# original_group.attrs["info"] = "group attrs" +# original_subgroup = original_group.create_group("subgroup") +# original_subgroup.attrs["info"] = "sub attrs" + +# destination_group = zarr.group(store=MemoryStore(), overwrite=True) + +# # copy from memory to directory store +# copy_all( +# original_group, +# destination_group, +# dry_run=False, +# ) + +# assert "subgroup" in destination_group +# assert destination_group.attrs["info"] == "group attrs" +# assert destination_group.subgroup.attrs["info"] == "sub attrs" + + +# class TestCopy: +# @pytest.fixture(params=[False, True], ids=["zarr", "hdf5"]) +# def source(self, request, tmpdir): +# def prep_source(source): +# foo = source.create_group("foo") +# foo.attrs["experiment"] = "weird science" +# baz = foo.create_dataset("bar/baz", data=np.arange(100), chunks=(50,)) +# baz.attrs["units"] = "metres" +# if request.param: +# extra_kws = dict( +# compression="gzip", +# compression_opts=3, +# fillvalue=84, +# shuffle=True, +# fletcher32=True, +# ) +# else: +# extra_kws = dict(compressor=Zlib(3), order="F", fill_value=42, filters=[Adler32()]) +# source.create_dataset( +# "spam", +# data=np.arange(100, 200).reshape(20, 5), +# chunks=(10, 2), +# dtype="i2", +# **extra_kws, +# ) +# return source + +# if request.param: +# h5py = pytest.importorskip("h5py") +# fn = tmpdir.join("source.h5") +# with h5py.File(str(fn), mode="w") as h5f: +# yield prep_source(h5f) +# else: +# yield prep_source(group()) + +# @pytest.fixture(params=[False, True], ids=["zarr", "hdf5"]) +# def dest(self, request, tmpdir): +# if request.param: +# h5py = pytest.importorskip("h5py") +# fn = tmpdir.join("dest.h5") +# with h5py.File(str(fn), mode="w") as h5f: +# yield h5f +# else: +# yield group() + +# def test_copy_array(self, source, dest): +# # copy array with default options +# copy(source["foo/bar/baz"], dest) +# check_copied_array(source["foo/bar/baz"], dest["baz"]) +# copy(source["spam"], dest) +# check_copied_array(source["spam"], dest["spam"]) + +# def test_copy_bad_dest(self, source, dest): +# # try to copy to an array, dest must be a group +# dest = dest.create_dataset("eggs", shape=(100,)) +# with pytest.raises(ValueError): +# copy(source["foo/bar/baz"], dest) + +# def test_copy_array_name(self, source, dest): +# # copy array with name +# copy(source["foo/bar/baz"], dest, name="qux") +# assert "baz" not in dest +# check_copied_array(source["foo/bar/baz"], dest["qux"]) + +# def test_copy_array_create_options(self, source, dest): +# dest_h5py = dest.__module__.startswith("h5py.") + +# # copy array, provide creation options +# compressor = Zlib(9) +# create_kws = dict(chunks=(10,)) +# if dest_h5py: +# create_kws.update( +# compression="gzip", compression_opts=9, shuffle=True, fletcher32=True, fillvalue=42 +# ) +# else: +# create_kws.update(compressor=compressor, fill_value=42, order="F", filters=[Adler32()]) +# copy(source["foo/bar/baz"], dest, without_attrs=True, **create_kws) +# check_copied_array( +# source["foo/bar/baz"], dest["baz"], without_attrs=True, expect_props=create_kws +# ) + +# def test_copy_array_exists_array(self, source, dest): +# # copy array, dest array in the way +# dest.create_dataset("baz", shape=(10,)) + +# # raise +# with pytest.raises(CopyError): +# # should raise by default +# copy(source["foo/bar/baz"], dest) +# assert (10,) == dest["baz"].shape +# with pytest.raises(CopyError): +# copy(source["foo/bar/baz"], dest, if_exists="raise") +# assert (10,) == dest["baz"].shape + +# # skip +# copy(source["foo/bar/baz"], dest, if_exists="skip") +# assert (10,) == dest["baz"].shape + +# # replace +# copy(source["foo/bar/baz"], dest, if_exists="replace") +# check_copied_array(source["foo/bar/baz"], dest["baz"]) + +# # invalid option +# with pytest.raises(ValueError): +# copy(source["foo/bar/baz"], dest, if_exists="foobar") + +# def test_copy_array_exists_group(self, source, dest): +# # copy array, dest group in the way +# dest.create_group("baz") + +# # raise +# with pytest.raises(CopyError): +# copy(source["foo/bar/baz"], dest) +# assert not hasattr(dest["baz"], "shape") +# with pytest.raises(CopyError): +# copy(source["foo/bar/baz"], dest, if_exists="raise") +# assert not hasattr(dest["baz"], "shape") + +# # skip +# copy(source["foo/bar/baz"], dest, if_exists="skip") +# assert not hasattr(dest["baz"], "shape") + +# # replace +# copy(source["foo/bar/baz"], dest, if_exists="replace") +# check_copied_array(source["foo/bar/baz"], dest["baz"]) + +# def test_copy_array_skip_initialized(self, source, dest): +# dest_h5py = dest.__module__.startswith("h5py.") + +# dest.create_dataset("baz", shape=(100,), chunks=(10,), dtype="i8") +# assert not np.all(source["foo/bar/baz"][:] == dest["baz"][:]) + +# if dest_h5py: +# with pytest.raises(ValueError): +# # not available with copy to h5py +# copy(source["foo/bar/baz"], dest, if_exists="skip_initialized") + +# else: +# # copy array, dest array exists but not yet initialized +# copy(source["foo/bar/baz"], dest, if_exists="skip_initialized") +# check_copied_array(source["foo/bar/baz"], dest["baz"]) + +# # copy array, dest array exists and initialized, will be skipped +# dest["baz"][:] = np.arange(100, 200) +# copy(source["foo/bar/baz"], dest, if_exists="skip_initialized") +# assert_array_equal(np.arange(100, 200), dest["baz"][:]) +# assert not np.all(source["foo/bar/baz"][:] == dest["baz"][:]) + +# def test_copy_group(self, source, dest): +# # copy group, default options +# copy(source["foo"], dest) +# check_copied_group(source["foo"], dest["foo"]) + +# def test_copy_group_no_name(self, source, dest): +# with pytest.raises(TypeError): +# # need a name if copy root +# copy(source, dest) + +# copy(source, dest, name="root") +# check_copied_group(source, dest["root"]) + +# def test_copy_group_options(self, source, dest): +# # copy group, non-default options +# copy(source["foo"], dest, name="qux", without_attrs=True) +# assert "foo" not in dest +# check_copied_group(source["foo"], dest["qux"], without_attrs=True) + +# def test_copy_group_shallow(self, source, dest): +# # copy group, shallow +# copy(source, dest, name="eggs", shallow=True) +# check_copied_group(source, dest["eggs"], shallow=True) + +# def test_copy_group_exists_group(self, source, dest): +# # copy group, dest groups exist +# dest.create_group("foo/bar") +# copy(source["foo"], dest) +# check_copied_group(source["foo"], dest["foo"]) + +# def test_copy_group_exists_array(self, source, dest): +# # copy group, dest array in the way +# dest.create_dataset("foo/bar", shape=(10,)) + +# # raise +# with pytest.raises(CopyError): +# copy(source["foo"], dest) +# assert dest["foo/bar"].shape == (10,) +# with pytest.raises(CopyError): +# copy(source["foo"], dest, if_exists="raise") +# assert dest["foo/bar"].shape == (10,) + +# # skip +# copy(source["foo"], dest, if_exists="skip") +# assert dest["foo/bar"].shape == (10,) + +# # replace +# copy(source["foo"], dest, if_exists="replace") +# check_copied_group(source["foo"], dest["foo"]) + +# def test_copy_group_dry_run(self, source, dest): +# # dry run, empty destination +# n_copied, n_skipped, n_bytes_copied = copy( +# source["foo"], dest, dry_run=True, return_stats=True +# ) +# assert 0 == len(dest) +# assert 3 == n_copied +# assert 0 == n_skipped +# assert 0 == n_bytes_copied + +# # dry run, array exists in destination +# baz = np.arange(100, 200) +# dest.create_dataset("foo/bar/baz", data=baz) +# assert not np.all(source["foo/bar/baz"][:] == dest["foo/bar/baz"][:]) +# assert 1 == len(dest) + +# # raise +# with pytest.raises(CopyError): +# copy(source["foo"], dest, dry_run=True) +# assert 1 == len(dest) + +# # skip +# n_copied, n_skipped, n_bytes_copied = copy( +# source["foo"], dest, dry_run=True, if_exists="skip", return_stats=True +# ) +# assert 1 == len(dest) +# assert 2 == n_copied +# assert 1 == n_skipped +# assert 0 == n_bytes_copied +# assert_array_equal(baz, dest["foo/bar/baz"]) + +# # replace +# n_copied, n_skipped, n_bytes_copied = copy( +# source["foo"], dest, dry_run=True, if_exists="replace", return_stats=True +# ) +# assert 1 == len(dest) +# assert 3 == n_copied +# assert 0 == n_skipped +# assert 0 == n_bytes_copied +# assert_array_equal(baz, dest["foo/bar/baz"]) + +# def test_logging(self, source, dest, tmpdir): +# # callable log +# copy(source["foo"], dest, dry_run=True, log=print) + +# # file name +# fn = str(tmpdir.join("log_name")) +# copy(source["foo"], dest, dry_run=True, log=fn) + +# # file +# with tmpdir.join("log_file").open(mode="w") as f: +# copy(source["foo"], dest, dry_run=True, log=f) + +# # bad option +# with pytest.raises(TypeError): +# copy(source["foo"], dest, dry_run=True, log=True) diff --git a/tests/v3/test_common.py b/tests/v3/test_common.py index 3bdbd2bffe..cc33aa75cf 100644 --- a/tests/v3/test_common.py +++ b/tests/v3/test_common.py @@ -64,7 +64,7 @@ def parse_indexing_order_valid(data: Literal["C", "F"]): assert parse_indexing_order(data) == data -@pytest.mark.parametrize("data", [10, ("0", 1, 2, 3), {"0": "0"}, []]) +@pytest.mark.parametrize("data", [("0", 1, 2, 3), {"0": "0"}, []]) def test_parse_shapelike_invalid(data: Any): if isinstance(data, Iterable): if len(data) == 0: From 7ded5d6ea0109c28e06fc6204449a76fdcd2e220 Mon Sep 17 00:00:00 2001 From: Martin Durant Date: Tue, 11 Jun 2024 14:45:12 -0400 Subject: [PATCH 0572/1078] Basic working FsspecStore (#1785) * Basic working FsspecStore * upath to be optional * fill out methods * add fsspec to deps (I believe we want this) * fixes * importable * exceptions * Add simple test * Add to test env * fix typing * Update src/zarr/store/remote.py Co-authored-by: Norman Rzepka * BufferPrototype * set up testing infrastructure for remote store * broken tests but get and set are implemented correctly for TestRemoteStoreS3 * remove implementation of test_get, and make s3 fixture autoused, to reveal multiple event loop error * Update tests/v3/test_store/test_remote.py Co-authored-by: Martin Durant * don't use fsmap, and don't use os.path.join * scope s3 fixture to session, mark test_store_supports_partial_writes as xfail * Update src/zarr/store/remote.py Co-authored-by: Davis Bennett * Fix most * fixed more * fix rest * Massage old v2 tests * just skip them.. * Attribute rename to allowed_exceptions --------- Co-authored-by: Davis Bennett Co-authored-by: Joe Hamman Co-authored-by: Norman Rzepka --- pyproject.toml | 9 +- src/zarr/store/core.py | 2 +- src/zarr/store/remote.py | 191 +++++++++++++----- src/zarr/testing/store.py | 4 +- tests/v2/test_storage.py | 12 +- .../test_local.py} | 36 ---- tests/v3/test_store/test_memory.py | 40 ++++ tests/v3/test_store/test_remote.py | 118 +++++++++++ 8 files changed, 317 insertions(+), 95 deletions(-) rename tests/v3/{test_store.py => test_store/test_local.py} (50%) create mode 100644 tests/v3/test_store/test_memory.py create mode 100644 tests/v3/test_store/test_remote.py diff --git a/pyproject.toml b/pyproject.toml index 80e03322a6..96a884b737 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,10 +27,12 @@ dependencies = [ 'numpy>=1.24', 'fasteners', 'numcodecs>=0.10.0', + 'fsspec>2024', 'crc32c', 'zstandard', 'typing_extensions', - 'donfig' + 'donfig', + 'pytest' ] dynamic = [ "version", @@ -111,7 +113,12 @@ extra-dependencies = [ "pytest-cov", "msgpack", "lmdb", + "s3fs", "pytest-asyncio", + "moto[s3]", + "flask-cors", + "flask", + "requests", "mypy" ] features = ["extra"] diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index 5fbde208f4..512c8383eb 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -14,7 +14,7 @@ def _dereference_path(root: str, path: str) -> str: assert isinstance(root, str) assert isinstance(path, str) root = root.rstrip("/") - path = f"{root}/{path}" if root != "" else path + path = f"{root}/{path}" if root else path path = path.rstrip("/") return path diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index 3eb057f9b8..db826f456d 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -1,9 +1,12 @@ from __future__ import annotations +from collections.abc import AsyncGenerator from typing import TYPE_CHECKING, Any +import fsspec + from zarr.abc.store import Store -from zarr.buffer import Buffer, BufferPrototype +from zarr.buffer import Buffer, BufferPrototype, default_buffer_prototype from zarr.common import OpenMode from zarr.store.core import _dereference_path @@ -11,93 +14,177 @@ from fsspec.asyn import AsyncFileSystem from upath import UPath + from zarr.buffer import Buffer + from zarr.common import BytesLike + class RemoteStore(Store): + # based on FSSpec supports_writes: bool = True supports_partial_writes: bool = False supports_listing: bool = True - root: UPath + _fs: AsyncFileSystem + path: str + allowed_exceptions: tuple[type[Exception], ...] def __init__( - self, url: UPath | str, *, mode: OpenMode = "r", **storage_options: dict[str, Any] + self, + url: UPath | str, + mode: OpenMode = "r", + allowed_exceptions: tuple[type[Exception], ...] = ( + FileNotFoundError, + IsADirectoryError, + NotADirectoryError, + ), + **storage_options: Any, ): - import fsspec - from upath import UPath + """ + Parameters + ---------- + url: root of the datastore. In fsspec notation, this is usually like "protocol://path/to". + Can also be a upath.UPath instance/ + allowed_exceptions: when fetching data, these cases will be deemed to correspond to missing + keys, rather than some other IO failure + storage_options: passed on to fsspec to make the filesystem instance. If url is a UPath, + this must not be used. + """ super().__init__(mode=mode) if isinstance(url, str): - self.root = UPath(url, **storage_options) + self._fs, self.path = fsspec.url_to_fs(url, **storage_options) + elif hasattr(url, "protocol") and hasattr(url, "fs"): + # is UPath-like - but without importing + if storage_options: + raise ValueError( + "If constructed with a UPath object, no additional " + "storage_options are allowed" + ) + self.path = url.path + self._fs = url._fs else: - assert ( - len(storage_options) == 0 - ), "If constructed with a UPath object, no additional storage_options are allowed." - self.root = url.rstrip("/") - + raise ValueError("URL not understood, %s", url) + self.allowed_exceptions = allowed_exceptions # test instantiate file system - fs, _ = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) - assert fs.__class__.async_impl, "FileSystem needs to support async operations." + if not self._fs.async_impl: + raise TypeError("FileSystem needs to support async operations") def __str__(self) -> str: - return str(self.root) + return f"Remote fsspec store: {type(self._fs).__name__} , {self.path}" def __repr__(self) -> str: - return f"RemoteStore({str(self)!r})" - - def _make_fs(self) -> tuple[AsyncFileSystem, str]: - import fsspec - - storage_options = self.root._kwargs.copy() - storage_options.pop("_url", None) - fs, root = fsspec.core.url_to_fs(str(self.root), asynchronous=True, **self.root._kwargs) - assert fs.__class__.async_impl, "FileSystem needs to support async operations." - return fs, root + return f"" async def get( self, key: str, - prototype: BufferPrototype, + prototype: BufferPrototype = default_buffer_prototype, byte_range: tuple[int | None, int | None] | None = None, ) -> Buffer | None: - assert isinstance(key, str) - fs, root = self._make_fs() - path = _dereference_path(root, key) + path = _dereference_path(self.path, key) try: - value: Buffer | None = await ( - fs._cat_file(path, start=byte_range[0], end=byte_range[1]) - if byte_range - else fs._cat_file(path) + if byte_range: + # fsspec uses start/end, not start/length + start, length = byte_range + if start is not None and length is not None: + end = start + length + elif length is not None: + end = length + else: + end = None + value: Buffer = prototype.buffer.from_bytes( + await ( + self._fs._cat_file(path, start=byte_range[0], end=end) + if byte_range + else self._fs._cat_file(path) + ) ) - except (FileNotFoundError, IsADirectoryError, NotADirectoryError): - return None + return value - return value + except self.allowed_exceptions: + return None + except OSError as e: + if "not satisfiable" in str(e): + # this is an s3-specific condition we probably don't want to leak + return prototype.buffer.from_bytes(b"") + raise - async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None: + async def set( + self, + key: str, + value: Buffer, + byte_range: tuple[int, int] | None = None, + ) -> None: self._check_writable() - assert isinstance(key, str) - fs, root = self._make_fs() - path = _dereference_path(root, key) - + path = _dereference_path(self.path, key) # write data if byte_range: - with fs._open(path, "r+b") as f: - f.seek(byte_range[0]) - f.write(value) - else: - await fs._pipe_file(path, value) + raise NotImplementedError + await self._fs._pipe_file(path, value.to_bytes()) async def delete(self, key: str) -> None: self._check_writable() - fs, root = self._make_fs() - path = _dereference_path(root, key) - if await fs._exists(path): - await fs._rm(path) + path = _dereference_path(self.path, key) + try: + await self._fs._rm(path) + except FileNotFoundError: + pass + except self.allowed_exceptions: + pass async def exists(self, key: str) -> bool: - fs, root = self._make_fs() - path = _dereference_path(root, key) - exists: bool = await fs._exists(path) + path = _dereference_path(self.path, key) + exists: bool = await self._fs._exists(path) return exists + + async def get_partial_values( + self, + prototype: BufferPrototype, + key_ranges: list[tuple[str, tuple[int | None, int | None]]], + ) -> list[Buffer | None]: + if key_ranges: + paths, starts, stops = zip( + *( + ( + _dereference_path(self.path, k[0]), + k[1][0], + ((k[1][0] or 0) + k[1][1]) if k[1][1] is not None else None, + ) + for k in key_ranges + ), + strict=False, + ) + else: + return [] + # TODO: expectations for exceptions or missing keys? + res = await self._fs._cat_ranges(list(paths), starts, stops, on_error="return") + # the following is an s3-specific condition we probably don't want to leak + res = [b"" if (isinstance(r, OSError) and "not satisfiable" in str(r)) else r for r in res] + for r in res: + if isinstance(r, Exception) and not isinstance(r, self.allowed_exceptions): + raise r + + return [None if isinstance(r, Exception) else prototype.buffer.from_bytes(r) for r in res] + + async def set_partial_values(self, key_start_values: list[tuple[str, int, BytesLike]]) -> None: + raise NotImplementedError + + async def list(self) -> AsyncGenerator[str, None]: + allfiles = await self._fs._find(self.path, detail=False, withdirs=False) + for onefile in (a.replace(self.path + "/", "") for a in allfiles): + yield onefile + + async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: + prefix = f"{self.path}/{prefix.rstrip('/')}" + try: + allfiles = await self._fs._ls(prefix, detail=False) + except FileNotFoundError: + return + for onefile in (a.replace(prefix + "/", "") for a in allfiles): + yield onefile + + async def list_prefix(self, prefix: str) -> AsyncGenerator[str, None]: + for onefile in await self._fs._ls(prefix, detail=False): + yield onefile diff --git a/src/zarr/testing/store.py b/src/zarr/testing/store.py index 3c7f082099..9c37ce0434 100644 --- a/src/zarr/testing/store.py +++ b/src/zarr/testing/store.py @@ -171,12 +171,14 @@ async def test_list(self, store: S) -> None: f"foo/c/{i}", Buffer.from_bytes(i.to_bytes(length=3, byteorder="little")) ) + @pytest.mark.xfail async def test_list_prefix(self, store: S) -> None: # TODO: we currently don't use list_prefix anywhere raise NotImplementedError async def test_list_dir(self, store: S) -> None: - assert [k async for k in store.list_dir("")] == [] + out = [k async for k in store.list_dir("")] + assert out == [] assert [k async for k in store.list_dir("foo")] == [] await store.set("foo/zarr.json", Buffer.from_bytes(b"bar")) await store.set("foo/c/1", Buffer.from_bytes(b"\x01")) diff --git a/tests/v2/test_storage.py b/tests/v2/test_storage.py index 17b80e6a5c..88e99e91a1 100644 --- a/tests/v2/test_storage.py +++ b/tests/v2/test_storage.py @@ -399,7 +399,9 @@ def test_hierarchy(self): assert [] == store.listdir(self.root + "c/x/y") assert [] == store.listdir(self.root + "c/d/y") assert [] == store.listdir(self.root + "c/d/y/z") - assert [] == store.listdir(self.root + "c/e/f") + # the following is listdir(filepath), for which fsspec gives [filepath] + # as posix would, but an empty list was previously assumed + # assert [] == store.listdir(self.root + "c/e/f") # test rename (optional) if store.is_erasable(): @@ -1064,9 +1066,8 @@ def test_complex(self): store[self.root + "foo"] = b"hello" assert "foo" in os.listdir(str(path1) + "/" + self.root) assert self.root + "foo" in store - assert not os.listdir(str(path2)) - assert store[self.root + "foo"] == b"hello" assert "foo" in os.listdir(str(path2)) + assert store[self.root + "foo"] == b"hello" def test_deep_ndim(self): import zarr.v2 @@ -1285,6 +1286,8 @@ def create_store(self, normalize_keys=False, dimension_separator=".", path=None, @pytest.fixture() def s3(request): # writable local S3 system + pytest.skip("old v3 tests are disabled", allow_module_level=True) + import shlex import subprocess import time @@ -1299,7 +1302,7 @@ def s3(request): s3fs = pytest.importorskip("s3fs") pytest.importorskip("moto") - port = 5555 + port = 5556 endpoint_uri = "http://127.0.0.1:%d/" % port proc = subprocess.Popen( shlex.split("moto_server s3 -p %d" % port), @@ -1318,6 +1321,7 @@ def s3(request): timeout -= 0.1 # pragma: no cover time.sleep(0.1) # pragma: no cover s3so = dict(client_kwargs={"endpoint_url": endpoint_uri}, use_listings_cache=False) + s3fs.S3FileSystem.clear_instance_cache() s3 = s3fs.S3FileSystem(anon=False, **s3so) s3.mkdir("test") request.cls.s3so = s3so diff --git a/tests/v3/test_store.py b/tests/v3/test_store/test_local.py similarity index 50% rename from tests/v3/test_store.py rename to tests/v3/test_store/test_local.py index 52882ea78c..191a137d46 100644 --- a/tests/v3/test_store.py +++ b/tests/v3/test_store/test_local.py @@ -1,48 +1,12 @@ from __future__ import annotations -from typing import Any - import pytest from zarr.buffer import Buffer from zarr.store.local import LocalStore -from zarr.store.memory import MemoryStore from zarr.testing.store import StoreTests -class TestMemoryStore(StoreTests[MemoryStore]): - store_cls = MemoryStore - - def set(self, store: MemoryStore, key: str, value: Buffer) -> None: - store._store_dict[key] = value - - def get(self, store: MemoryStore, key: str) -> Buffer: - return store._store_dict[key] - - @pytest.fixture(scope="function", params=[None, {}]) - def store_kwargs(self, request) -> dict[str, Any]: - return {"store_dict": request.param, "mode": "w"} - - @pytest.fixture(scope="function") - def store(self, store_kwargs: dict[str, Any]) -> MemoryStore: - return self.store_cls(**store_kwargs) - - def test_store_repr(self, store: MemoryStore) -> None: - assert str(store) == f"memory://{id(store._store_dict)}" - - def test_store_supports_writes(self, store: MemoryStore) -> None: - assert store.supports_writes - - def test_store_supports_listing(self, store: MemoryStore) -> None: - assert store.supports_listing - - def test_store_supports_partial_writes(self, store: MemoryStore) -> None: - assert store.supports_partial_writes - - def test_list_prefix(self, store: MemoryStore) -> None: - assert True - - class TestLocalStore(StoreTests[LocalStore]): store_cls = LocalStore diff --git a/tests/v3/test_store/test_memory.py b/tests/v3/test_store/test_memory.py new file mode 100644 index 0000000000..96b8b19e2c --- /dev/null +++ b/tests/v3/test_store/test_memory.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +import pytest + +from zarr.buffer import Buffer +from zarr.store.memory import MemoryStore +from zarr.testing.store import StoreTests + + +class TestMemoryStore(StoreTests[MemoryStore]): + store_cls = MemoryStore + + def set(self, store: MemoryStore, key: str, value: Buffer) -> None: + store._store_dict[key] = value + + def get(self, store: MemoryStore, key: str) -> Buffer: + return store._store_dict[key] + + @pytest.fixture(scope="function", params=[None, {}]) + def store_kwargs(self, request) -> dict[str, str | None | dict[str, Buffer]]: + return {"store_dict": request.param, "mode": "w"} + + @pytest.fixture(scope="function") + def store(self, store_kwargs: str | None | dict[str, Buffer]) -> MemoryStore: + return self.store_cls(**store_kwargs) + + def test_store_repr(self, store: MemoryStore) -> None: + assert str(store) == f"memory://{id(store._store_dict)}" + + def test_store_supports_writes(self, store: MemoryStore) -> None: + assert store.supports_writes + + def test_store_supports_listing(self, store: MemoryStore) -> None: + assert store.supports_listing + + def test_store_supports_partial_writes(self, store: MemoryStore) -> None: + assert store.supports_partial_writes + + def test_list_prefix(self, store: MemoryStore) -> None: + assert True diff --git a/tests/v3/test_store/test_remote.py b/tests/v3/test_store/test_remote.py new file mode 100644 index 0000000000..936cf206d9 --- /dev/null +++ b/tests/v3/test_store/test_remote.py @@ -0,0 +1,118 @@ +import os + +import fsspec +import pytest + +from zarr.buffer import Buffer, default_buffer_prototype +from zarr.store import RemoteStore +from zarr.testing.store import StoreTests + +s3fs = pytest.importorskip("s3fs") +requests = pytest.importorskip("requests") +moto_server = pytest.importorskip("moto.moto_server.threaded_moto_server") +moto = pytest.importorskip("moto") + +# ### amended from s3fs ### # +test_bucket_name = "test" +secure_bucket_name = "test-secure" +port = 5555 +endpoint_uri = f"http://127.0.0.1:{port}/" + + +@pytest.fixture(scope="module") +def s3_base(): + # writable local S3 system + + # This fixture is module-scoped, meaning that we can reuse the MotoServer across all tests + server = moto_server.ThreadedMotoServer(ip_address="127.0.0.1", port=port) + server.start() + if "AWS_SECRET_ACCESS_KEY" not in os.environ: + os.environ["AWS_SECRET_ACCESS_KEY"] = "foo" + if "AWS_ACCESS_KEY_ID" not in os.environ: + os.environ["AWS_ACCESS_KEY_ID"] = "foo" + + yield + server.stop() + + +def get_boto3_client(): + from botocore.session import Session + + # NB: we use the sync botocore client for setup + session = Session() + return session.create_client("s3", endpoint_url=endpoint_uri) + + +@pytest.fixture(autouse=True, scope="function") +def s3(s3_base): + client = get_boto3_client() + client.create_bucket(Bucket=test_bucket_name, ACL="public-read") + s3fs.S3FileSystem.clear_instance_cache() + s3 = s3fs.S3FileSystem(anon=False, client_kwargs={"endpoint_url": endpoint_uri}) + s3.invalidate_cache() + yield s3 + requests.post(f"{endpoint_uri}/moto-api/reset") + + +# ### end from s3fs ### # + + +async def alist(it): + out = [] + async for a in it: + out.append(a) + return out + + +async def test_basic(): + store = RemoteStore(f"s3://{test_bucket_name}", mode="w", endpoint_url=endpoint_uri, anon=False) + assert not await alist(store.list()) + assert not await store.exists("foo") + data = b"hello" + await store.set("foo", Buffer.from_bytes(data)) + assert await store.exists("foo") + assert (await store.get("foo")).to_bytes() == data + out = await store.get_partial_values( + prototype=default_buffer_prototype, key_ranges=[("foo", (1, None))] + ) + assert out[0].to_bytes() == data[1:] + + +class TestRemoteStoreS3(StoreTests[RemoteStore]): + store_cls = RemoteStore + + @pytest.fixture(scope="function") + def store_kwargs(self) -> dict[str, str | bool]: + return { + "mode": "w", + "endpoint_url": endpoint_uri, + "anon": False, + "url": f"s3://{test_bucket_name}", + } + + @pytest.fixture(scope="function") + def store(self, store_kwargs: dict[str, str | bool]) -> RemoteStore: + self._fs, _ = fsspec.url_to_fs(asynchronous=False, **store_kwargs) + out = self.store_cls(asynchronous=True, **store_kwargs) + return out + + def get(self, store: RemoteStore, key: str) -> Buffer: + return Buffer.from_bytes(self._fs.cat(f"{store.path}/{key}")) + + def set(self, store: RemoteStore, key: str, value: Buffer) -> None: + self._fs.write_bytes(f"{store.path}/{key}", value.to_bytes()) + + def test_store_repr(self, store: RemoteStore) -> None: + rep = str(store) + assert "fsspec" in rep + assert store.path in rep + + def test_store_supports_writes(self, store: RemoteStore) -> None: + assert True + + @pytest.mark.xfail + def test_store_supports_partial_writes(self, store: RemoteStore) -> None: + raise AssertionError + + def test_store_supports_listing(self, store: RemoteStore) -> None: + assert True From 5ccd83cfae0ba805c38c0b960c0299d1c0e2ceff Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 11 Jun 2024 17:24:41 -0700 Subject: [PATCH 0573/1078] doc: update release notes for 3.0.0.alpha (#1959) also port in changes from 2.18 releases --- docs/release.rst | 239 +++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 222 insertions(+), 17 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index b78e709c0e..3fb79774a4 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -13,10 +13,24 @@ Release notes # to document your changes. On releases it will be # re-indented so that it does not show up in the notes. -.. _unreleased(v3): +.. note:: + Zarr-Python 2.18.* is expected be the final release in the 2.* series. Work on Zarr-Python 3.0 is underway. + See `GH1777 `_ for more details on the upcoming + 3.0 release. -Unreleased (v3) ---------------- +.. release_3.0.0.alpha: + +3.0.0.alpha +----------- + +.. warning:: + Zarr-Python 3.0.0.alpha is a pre-release of the upcoming 3.0 release. This release is not feature complete or + expected to be ready for production applications. + +.. note:: + The complete release notes for 3.0 have not been added to this document yet. See the + `3.0.0.alpha `_ release on GitHub + for a record of changes included in this release. Enhancements ~~~~~~~~~~~~ @@ -31,10 +45,160 @@ Maintenance The dependency relationship is now reversed: the test suite imports this class from ``zarr-python``. By :user:`Davis Bennett ` :issue:`1601`. -.. _unreleased: -Unreleased (v2) ---------------- +.. _release_2.18.2: + +2.18.2 +------ + +Enhancements +~~~~~~~~~~~~ + +* Add Zstd codec to old V3 code path. + By :user:`Ryan Abernathey ` + +.. _release_2.18.1: + +2.18.1 +------ + +Maintenance +~~~~~~~~~~~ +* Fix a regression when getting or setting a single value from arrays with size-1 chunks. + By :user:`Deepak Cherian ` :issue:`1874` + +.. _release_2.18.0: + +2.18.0 +------ + +Enhancements +~~~~~~~~~~~~ +* Performance improvement for reading and writing chunks if any of the dimensions is size 1. + By :user:`Deepak Cherian ` :issue:`1730`. + +Maintenance +~~~~~~~~~~~ +* Enable ruff/bugbear rules (B) and fix issues. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1702`. + +* Minor updates to use `np.inf` instead of `np.PINF` / `np.NINF` in preparation for NumPy 2.0.0 release. + By :user:`Joe Hamman ` :issue:`1842`. + +Deprecations +~~~~~~~~~~~~ + +* Deprecate experimental v3 support by issuing a `FutureWarning`. + Also updated docs to warn about using the experimental v3 version. + By :user:`Joe Hamman ` :issue:`1802` and :issue:`1807`. + +* Deprecate the following stores: :class:`zarr.storage.DBMStore`, :class:`zarr.storage.LMDBStore`, + :class:`zarr.storage.SQLiteStore`, :class:`zarr.storage.MongoDBStore`, :class:`zarr.storage.RedisStore`, + and :class:`zarr.storage.ABSStore`. These stores are slated to be removed from Zarr-Python in version 3.0. + By :user:`Joe Hamman ` :issue:`1801`. + +.. _release_2.17.2: + +2.17.2 +------ + +Enhancements +~~~~~~~~~~~~ + +* [v3] Dramatically reduce number of ``__contains__`` requests in favor of optimistically calling `__getitem__` + and handling any error that may arise. + By :user:`Deepak Cherian ` :issue:`1741`. + +* [v3] Reuse the downloaded array metadata when creating an ``Array``. + By :user:`Deepak Cherian ` :issue:`1734`. + +* Optimize ``Array.info`` so that it calls `getsize` only once. + By :user:`Deepak Cherian ` :issue:`1733`. + +* Override IPython ``_repr_*_`` methods to avoid expensive lookups against object stores. + By :user:`Deepak Cherian ` :issue:`1716`. + +* FSStore now raises rather than return bad data. + By :user:`Martin Durant ` and :user:`Ian Carroll ` :issue:`1604`. + +* Avoid redundant ``__contains__``. + By :user:`Deepak Cherian ` :issue:`1739`. + +Docs +~~~~ + +* Fix link to GCSMap in ``tutorial.rst``. + By :user:`Daniel Jahn ` :issue:`1689`. + +* Endorse `SPEC0000 `_ and state version support policy in ``installation.rst``. + By :user:`Sanket Verma ` :issue:`1665`. + +* Migrate v1 and v2 specification to `Zarr-Specs `_. + By :user:`Sanket Verma ` :issue:`1582`. + +Maintenance +~~~~~~~~~~~ + +* Add CI test environment for Python 3.12 + By :user:`Joe Hamman ` :issue:`1719`. + +* Bump minimum supported NumPy version to 1.23 (per spec 0000) + By :user:`Joe Hamman ` :issue:`1719`. + +* Minor fixes: Using ``is`` instead of ``type`` and removing unnecessary ``None``. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1737`. + +* Fix tests failure related to Pytest 8. + By :user:`David Stansby ` :issue:`1714`. + +.. _release_2.17.1: + +2.17.1 +------ + +Enhancements +~~~~~~~~~~~~ + +* Change occurrences of % and format() to f-strings. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1423`. + +* Proper argument for numpy.reshape. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1425`. + +* Add typing to dimension separator arguments. + By :user:`David Stansby ` :issue:`1620`. + +Docs +~~~~ + +* ZIP related tweaks. + By :user:`Davis Bennett ` :issue:`1641`. + +Maintenance +~~~~~~~~~~~ + +* Update config.yml with Zulip. + By :user:`Josh Moore `. + +* Replace Gitter with the new Zulip Chat link. + By :user:`Sanket Verma ` :issue:`1685`. + +* Fix RTD build. + By :user:`Sanket Verma ` :issue:`1694`. + +.. _release_2.17.0: + +2.17.0 +------ + +Enhancements +~~~~~~~~~~~~ + +* Added type hints to ``zarr.creation.create()``. + By :user:`David Stansby ` :issue:`1536`. + +* Pyodide support: Don't require fasteners on Emscripten. + By :user:`Hood Chatham ` :issue:`1663`. Docs ~~~~ @@ -57,10 +221,30 @@ Docs * Minor tweak to advanced indexing tutorial examples. By :user:`Ross Barnowski ` :issue:`1550`. +* Automatically document array members using sphinx-automodapi. + By :user:`David Stansby ` :issue:`1547`. + +* Add a markdown file documenting the current and former core-developer team. + By :user:`Joe Hamman ` :issue:`1628`. + +* Add Norman Rzepka to core-dev team. + By :user:`Joe Hamman ` :issue:`1630`. + +* Added section about accessing ZIP archives on s3. + By :user:`Jeff Peck ` :issue:`1613`, :issue:`1615`, and :user:`Davis Bennett ` :issue:`1641`. + +* Add V3 roadmap and design document. + By :user:`Joe Hamman ` :issue:`1583`. Maintenance ~~~~~~~~~~~ +* Drop Python 3.8 and NumPy 1.20 + By :user:`Josh Moore `; :issue:`1557`. + +* Cache result of ``FSStore._fsspec_installed()``. + By :user:`Janick Martinez Esturo ` :issue:`1581`. + * Extend copyright notice to 2023. By :user:`Jack Kelly ` :issue:`1528`. @@ -79,6 +263,27 @@ Maintenance * Remove ``sphinx-rtd-theme`` dependency from ``pyproject.toml``. By :user:`Sanket Verma ` :issue:`1563`. +* Remove ``CODE_OF_CONDUCT.md`` file from the Zarr-Python repository. + By :user:`Sanket Verma ` :issue:`1572`. + +* Bump version of black in pre-commit. + By :user:`David Stansby ` :issue:`1559`. + +* Use list comprehension where applicable. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1555`. + +* Use format specification mini-language to format string. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1558`. + +* Single startswith() call instead of multiple ones. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1556`. + +* Move codespell options around. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1196`. + +* Remove unused mypy ignore comments. + By :user:`David Stansby ` :issue:`1602`. + .. _release_2.16.1: 2.16.1 @@ -120,10 +325,10 @@ Maintenance By :user:`Davis Bennett ` :issue:`1462`. * Style the codebase with ``ruff`` and ``black``. - By :user:`Davis Bennett` :issue:`1459` + By :user:`Davis Bennett ` :issue:`1459` * Ensure that chunks is tuple of ints upon array creation. - By :user:`Philipp Hanslovsky` :issue:`1461` + By :user:`Philipp Hanslovsky ` :issue:`1461` .. _release_2.15.0: @@ -511,7 +716,7 @@ Maintenance By :user:`Saransh Chopra ` :issue:`1079`. * Remove option to return None from _ensure_store. - By :user:`Greggory Lee ` :issue:`1068`. + By :user:`Gregory Lee ` :issue:`1068`. * Fix a typo of "integers". By :user:`Richard Scott ` :issue:`1056`. @@ -529,7 +734,7 @@ Enhancements Since the format is not yet finalized, the classes and functions are not automatically imported into the regular `zarr` name space. Setting the `ZARR_V3_EXPERIMENTAL_API` environment variable will activate them. - By :user:`Greggory Lee `; :issue:`898`, :issue:`1006`, and :issue:`1007` + By :user:`Gregory Lee `; :issue:`898`, :issue:`1006`, and :issue:`1007` as well as by :user:`Josh Moore ` :issue:`1032`. * **Create FSStore from an existing fsspec filesystem**. If you have created @@ -651,7 +856,7 @@ Enhancements higher-level array creation and convenience functions still accept plain Python dicts or other mutable mappings for the ``store`` argument, but will internally convert these to a ``KVStore``. - By :user:`Greggory Lee `; :issue:`839`, :issue:`789`, and :issue:`950`. + By :user:`Gregory Lee `; :issue:`839`, :issue:`789`, and :issue:`950`. * Allow to assign array ``fill_values`` and update metadata accordingly. By :user:`Ryan Abernathey `, :issue:`662`. @@ -798,7 +1003,7 @@ Bug fixes ~~~~~~~~~ * Fix FSStore.listdir behavior for nested directories. - By :user:`Greggory Lee `; :issue:`802`. + By :user:`Gregory Lee `; :issue:`802`. .. _release_2.9.4: @@ -882,7 +1087,7 @@ Bug fixes By :user:`Josh Moore `; :issue:`781`. * avoid NumPy 1.21.0 due to https://github.com/numpy/numpy/issues/19325 - By :user:`Greggory Lee `; :issue:`791`. + By :user:`Gregory Lee `; :issue:`791`. Maintenance ~~~~~~~~~~~ @@ -894,7 +1099,7 @@ Maintenance By :user:`Elliott Sales de Andrade `; :issue:`799`. * TST: add missing assert in test_hexdigest. - By :user:`Greggory Lee `; :issue:`801`. + By :user:`Gregory Lee `; :issue:`801`. .. _release_2.8.3: @@ -1538,11 +1743,11 @@ Bug fixes Documentation ~~~~~~~~~~~~~ -* Some changes have been made to the :ref:`spec_v2` document to clarify +* Some changes have been made to the Zarr Specification v2 document to clarify ambiguities and add some missing information. These changes do not break compatibility with any of the material as previously implemented, and so the changes have been made in-place in the document without incrementing the document version number. See the - section on :ref:`spec_v2_changes` in the specification document for more information. + section on changes in the specification document for more information. * A new :ref:`tutorial_indexing` section has been added to the tutorial. * A new :ref:`tutorial_strings` section has been added to the tutorial (:issue:`135`, :issue:`175`). @@ -1812,4 +2017,4 @@ See `v0.4.0 release notes on GitHub See `v0.3.0 release notes on GitHub `_. -.. _Numcodecs: https://numcodecs.readthedocs.io/ +.. _Numcodecs: https://numcodecs.readthedocs.io/ \ No newline at end of file From 194862802e2d31191a49a291b294807755e5b8d6 Mon Sep 17 00:00:00 2001 From: Sanket Verma Date: Tue, 11 Jun 2024 20:28:17 -0400 Subject: [PATCH 0574/1078] Update release.rst (#1960) --- docs/release.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/release.rst b/docs/release.rst index 3fb79774a4..7e54035915 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -18,18 +18,18 @@ Release notes See `GH1777 `_ for more details on the upcoming 3.0 release. -.. release_3.0.0.alpha: +.. release_3.0.0-alpha: -3.0.0.alpha +3.0.0-alpha ----------- .. warning:: - Zarr-Python 3.0.0.alpha is a pre-release of the upcoming 3.0 release. This release is not feature complete or + Zarr-Python 3.0.0-alpha is a pre-release of the upcoming 3.0 release. This release is not feature complete or expected to be ready for production applications. .. note:: The complete release notes for 3.0 have not been added to this document yet. See the - `3.0.0.alpha `_ release on GitHub + `3.0.0-alpha `_ release on GitHub for a record of changes included in this release. Enhancements @@ -2017,4 +2017,4 @@ See `v0.4.0 release notes on GitHub See `v0.3.0 release notes on GitHub `_. -.. _Numcodecs: https://numcodecs.readthedocs.io/ \ No newline at end of file +.. _Numcodecs: https://numcodecs.readthedocs.io/ From f648b619e53cde809cd516f308ff40f206a742bd Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 12 Jun 2024 15:59:35 -0400 Subject: [PATCH 0575/1078] chore: update pre-commit hooks (#1957) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.7 → v0.4.8](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.7...v0.4.8) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4bb39f0e06..de1adb8840 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 'v0.4.7' + rev: 'v0.4.8' hooks: - id: ruff args: ["--fix", "--show-fixes"] From ef1817c1115b6c0237f0228f7777fa58162fa3c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jun 2024 13:04:34 -0700 Subject: [PATCH 0576/1078] Bump pypa/gh-action-pypi-publish in the actions group (#1969) Bumps the actions group with 1 update: [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish). Updates `pypa/gh-action-pypi-publish` from 1.8.14 to 1.9.0 - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.14...v1.9.0) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor dependency-group: actions ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/releases.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 51fcf08591..b54cbe48b3 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -55,7 +55,7 @@ jobs: with: name: releases path: dist - - uses: pypa/gh-action-pypi-publish@v1.8.14 + - uses: pypa/gh-action-pypi-publish@v1.9.0 with: user: __token__ password: ${{ secrets.pypi_password }} From 143faeaf9007657cd688ea8afa2cc62e1135ae2b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 17 Jun 2024 13:38:21 -0700 Subject: [PATCH 0577/1078] chore: update pre-commit hooks (#1973) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.8 → v0.4.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.8...v0.4.9) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index de1adb8840..1ef226cd28 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 'v0.4.8' + rev: 'v0.4.9' hooks: - id: ruff args: ["--fix", "--show-fixes"] From ba6b5c004f3d901f6bffb455c420eb701e58360b Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Wed, 19 Jun 2024 01:01:00 +0200 Subject: [PATCH 0578/1078] add json indentation to config (#1952) --- src/zarr/config.py | 1 + src/zarr/group.py | 14 +++++++++++--- src/zarr/metadata.py | 13 ++++++++++--- tests/v3/test_config.py | 23 ++++++++++++++++++----- 4 files changed, 40 insertions(+), 11 deletions(-) diff --git a/src/zarr/config.py b/src/zarr/config.py index 7c5b48a16c..e711a98cb5 100644 --- a/src/zarr/config.py +++ b/src/zarr/config.py @@ -11,6 +11,7 @@ "array": {"order": "C"}, "async": {"concurrency": None, "timeout": None}, "codec_pipeline": {"batch_size": 1}, + "json_indent": 2, } ], ) diff --git a/src/zarr/group.py b/src/zarr/group.py index 4bb4b6b4dd..e6e2ac183f 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -25,6 +25,7 @@ ChunkCoords, ZarrFormat, ) +from zarr.config import config from zarr.store import StoreLike, StorePath, make_store_path from zarr.sync import SyncMixin, sync @@ -79,14 +80,21 @@ class GroupMetadata(Metadata): node_type: Literal["group"] = field(default="group", init=False) def to_buffer_dict(self) -> dict[str, Buffer]: + json_indent = config.get("json_indent") if self.zarr_format == 3: - return {ZARR_JSON: Buffer.from_bytes(json.dumps(self.to_dict()).encode())} + return { + ZARR_JSON: Buffer.from_bytes( + json.dumps(self.to_dict(), indent=json_indent).encode() + ) + } else: return { ZGROUP_JSON: Buffer.from_bytes( - json.dumps({"zarr_format": self.zarr_format}).encode() + json.dumps({"zarr_format": self.zarr_format}, indent=json_indent).encode() + ), + ZATTRS_JSON: Buffer.from_bytes( + json.dumps(self.attributes, indent=json_indent).encode() ), - ZATTRS_JSON: Buffer.from_bytes(json.dumps(self.attributes).encode()), } def __init__(self, attributes: dict[str, Any] | None = None, zarr_format: ZarrFormat = 3): diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 8329bd9200..c6a71c00b8 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -16,6 +16,7 @@ from zarr.chunk_grids import ChunkGrid, RegularChunkGrid from zarr.chunk_key_encodings import ChunkKeyEncoding, parse_separator from zarr.codecs._v2 import V2Compressor, V2Filters +from zarr.config import config if TYPE_CHECKING: from typing_extensions import Self @@ -272,8 +273,11 @@ def _json_convert(o: np.dtype[Any] | Enum | Codec) -> str | dict[str, Any]: return config raise TypeError + json_indent = config.get("json_indent") return { - ZARR_JSON: Buffer.from_bytes(json.dumps(self.to_dict(), default=_json_convert).encode()) + ZARR_JSON: Buffer.from_bytes( + json.dumps(self.to_dict(), default=_json_convert, indent=json_indent).encode() + ) } @classmethod @@ -394,9 +398,12 @@ def _json_convert( assert isinstance(zarray_dict, dict) zattrs_dict = zarray_dict.pop("attributes", {}) assert isinstance(zattrs_dict, dict) + json_indent = config.get("json_indent") return { - ZARRAY_JSON: Buffer.from_bytes(json.dumps(zarray_dict, default=_json_convert).encode()), - ZATTRS_JSON: Buffer.from_bytes(json.dumps(zattrs_dict).encode()), + ZARRAY_JSON: Buffer.from_bytes( + json.dumps(zarray_dict, default=_json_convert, indent=json_indent).encode() + ), + ZATTRS_JSON: Buffer.from_bytes(json.dumps(zattrs_dict, indent=json_indent).encode()), } @classmethod diff --git a/tests/v3/test_config.py b/tests/v3/test_config.py index aed9775d17..684ab0dfce 100644 --- a/tests/v3/test_config.py +++ b/tests/v3/test_config.py @@ -1,19 +1,32 @@ +from typing import Any + +import pytest + from zarr.config import config -def test_config_defaults_set(): +def test_config_defaults_set() -> None: # regression test for available defaults assert config.defaults == [ { "array": {"order": "C"}, "async": {"concurrency": None, "timeout": None}, "codec_pipeline": {"batch_size": 1}, + "json_indent": 2, } ] assert config.get("array.order") == "C" + assert config.get("async.concurrency") is None + assert config.get("async.timeout") is None + assert config.get("codec_pipeline.batch_size") == 1 + assert config.get("json_indent") == 2 -def test_config_defaults_can_be_overridden(): - assert config.get("array.order") == "C" - with config.set({"array.order": "F"}): - assert config.get("array.order") == "F" +@pytest.mark.parametrize( + "key, old_val, new_val", + [("array.order", "C", "F"), ("async.concurrency", None, 10), ("json_indent", 2, 0)], +) +def test_config_defaults_can_be_overridden(key: str, old_val: Any, new_val: Any) -> None: + assert config.get(key) == old_val + with config.set({key: new_val}): + assert config.get(key) == new_val From d4c25b2b08ecfaf43b5141808bbab99d63a6c050 Mon Sep 17 00:00:00 2001 From: Hannes Spitz <44113112+brokkoli71@users.noreply.github.com> Date: Wed, 19 Jun 2024 09:33:38 +0200 Subject: [PATCH 0579/1078] Clean up typing and docs for indexing (#1961) * fix typing * add docstring for get_block_selection * add docstring for get_basic_selection and get_coordinate_selection * add note for get_basic_selection with structured dtype * remove common.Selection and replace by indexing.Selection * add docstring for set_block_selection * add docstring for __getitem__ and __setitem__ * add docstring for set_basic_selection * add docstring for set and get_orthogonal_selection * add docstring for set and get_mask_selection * add docstring for set_coordinate_selection * add docstring for oindex and vindex * ruff formatting * setting input interfaces values to npt.ArrayLike * improve typing * improve docstring examples * add docstring for Array.resize * ruff format --- src/zarr/api/asynchronous.py | 2 +- src/zarr/array.py | 1119 +++++++++++++++++++++++++++++++++- src/zarr/buffer.py | 4 +- src/zarr/common.py | 2 - src/zarr/indexing.py | 54 +- tests/v3/test_codecs.py | 3 +- 6 files changed, 1124 insertions(+), 60 deletions(-) diff --git a/src/zarr/api/asynchronous.py b/src/zarr/api/asynchronous.py index 52d07fb6fe..6cf7378bfa 100644 --- a/src/zarr/api/asynchronous.py +++ b/src/zarr/api/asynchronous.py @@ -322,7 +322,7 @@ async def tree(*args: Any, **kwargs: Any) -> None: raise NotImplementedError -async def array(data: NDArrayLike, **kwargs: Any) -> AsyncArray: +async def array(data: npt.ArrayLike, **kwargs: Any) -> AsyncArray: """Create an array filled with `data`. Parameters diff --git a/src/zarr/array.py b/src/zarr/array.py index 9ac1ce41ec..3b5ecce8ee 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -30,7 +30,6 @@ ZARRAY_JSON, ZATTRS_JSON, ChunkCoords, - Selection, ZarrFormat, concurrent_map, product, @@ -41,7 +40,6 @@ BasicSelection, BlockIndex, BlockIndexer, - BlockSelection, CoordinateIndexer, CoordinateSelection, Fields, @@ -51,6 +49,7 @@ OIndex, OrthogonalIndexer, OrthogonalSelection, + Selection, VIndex, check_fields, check_no_multi_fields, @@ -460,7 +459,7 @@ async def _get_selection( return out_buffer.as_ndarray_like() async def getitem( - self, selection: Selection, *, prototype: BufferPrototype = default_buffer_prototype + self, selection: BasicSelection, *, prototype: BufferPrototype = default_buffer_prototype ) -> NDArrayLike: indexer = BasicIndexer( selection, @@ -477,7 +476,7 @@ async def _save_metadata(self, metadata: ArrayMetadata) -> None: async def _set_selection( self, indexer: Indexer, - value: NDArrayLike, + value: npt.ArrayLike, *, prototype: BufferPrototype, fields: Fields | None = None, @@ -495,9 +494,9 @@ async def _set_selection( # assert ( # value.shape == indexer.shape # ), f"shape of value doesn't match indexer shape. Expected {indexer.shape}, got {value.shape}" - if value.dtype.name != self.metadata.dtype.name: - value = value.astype(self.metadata.dtype, order="A") - + if not hasattr(value, "dtype") or value.dtype.name != self.metadata.dtype.name: + value = np.array(value, dtype=self.metadata.dtype, order="A") + value = cast(NDArrayLike, value) # We accept any ndarray like object from the user and convert it # to a NDBuffer (or subclass). From this point onwards, we only pass # Buffer and NDBuffer between components. @@ -520,8 +519,8 @@ async def _set_selection( async def setitem( self, - selection: Selection, - value: NDArrayLike, + selection: BasicSelection, + value: npt.ArrayLike, prototype: BufferPrototype = default_buffer_prototype, ) -> None: indexer = BasicIndexer( @@ -702,6 +701,148 @@ def read_only(self) -> bool: return self._async_array.read_only def __getitem__(self, selection: Selection) -> NDArrayLike: + """Retrieve data for an item or region of the array. + + Parameters + ---------- + selection : tuple + An integer index or slice or tuple of int/slice objects specifying the + requested item or region for each dimension of the array. + + Returns + ------- + NDArrayLike + An array-like containing the data for the requested region. + + Examples + -------- + Setup a 1-dimensional array:: + + >>> import zarr + >>> import numpy as np + >>> data = np.arange(100, dtype="uint16") + >>> z = Array.create( + >>> StorePath(MemoryStore(mode="w")), + >>> shape=data.shape, + >>> chunk_shape=(10,), + >>> dtype=data.dtype, + >>> ) + >>> z[:] = data + + Retrieve a single item:: + + >>> z[5] + 5 + + Retrieve a region via slicing:: + + >>> z[:5] + array([0, 1, 2, 3, 4]) + >>> z[-5:] + array([95, 96, 97, 98, 99]) + >>> z[5:10] + array([5, 6, 7, 8, 9]) + >>> z[5:10:2] + array([5, 7, 9]) + >>> z[::2] + array([ 0, 2, 4, ..., 94, 96, 98]) + + Load the entire array into memory:: + + >>> z[...] + array([ 0, 1, 2, ..., 97, 98, 99]) + + Setup a 2-dimensional array:: + + >>> data = np.arange(100, dtype="uint16").reshape(10, 10) + >>> z = Array.create( + >>> StorePath(MemoryStore(mode="w")), + >>> shape=data.shape, + >>> chunk_shape=(10, 10), + >>> dtype=data.dtype, + >>> ) + >>> z[:] = data + + Retrieve an item:: + + >>> z[2, 2] + 22 + + Retrieve a region via slicing:: + + >>> z[1:3, 1:3] + array([[11, 12], + [21, 22]]) + >>> z[1:3, :] + array([[10, 11, 12, 13, 14, 15, 16, 17, 18, 19], + [20, 21, 22, 23, 24, 25, 26, 27, 28, 29]]) + >>> z[:, 1:3] + array([[ 1, 2], + [11, 12], + [21, 22], + [31, 32], + [41, 42], + [51, 52], + [61, 62], + [71, 72], + [81, 82], + [91, 92]]) + >>> z[0:5:2, 0:5:2] + array([[ 0, 2, 4], + [20, 22, 24], + [40, 42, 44]]) + >>> z[::2, ::2] + array([[ 0, 2, 4, 6, 8], + [20, 22, 24, 26, 28], + [40, 42, 44, 46, 48], + [60, 62, 64, 66, 68], + [80, 82, 84, 86, 88]]) + + Load the entire array into memory:: + + >>> z[...] + array([[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9], + [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], + [20, 21, 22, 23, 24, 25, 26, 27, 28, 29], + [30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59], + [60, 61, 62, 63, 64, 65, 66, 67, 68, 69], + [70, 71, 72, 73, 74, 75, 76, 77, 78, 79], + [80, 81, 82, 83, 84, 85, 86, 87, 88, 89], + [90, 91, 92, 93, 94, 95, 96, 97, 98, 99]]) + + Notes + ----- + Slices with step > 1 are supported, but slices with negative step are not. + + For arrays with a structured dtype, see zarr v2 for examples of how to use + fields + + Currently the implementation for __getitem__ is provided by + :func:`vindex` if the indexing is pure fancy indexing (ie a + broadcast-compatible tuple of integer array indices), or by + :func:`set_basic_selection` otherwise. + + Effectively, this means that the following indexing modes are supported: + + - integer indexing + - slice indexing + - mixed slice and integer indexing + - boolean indexing + - fancy indexing (vectorized list of integers) + + For specific indexing options including outer indexing, see the + methods listed under See Also. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, + set_orthogonal_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __setitem__ + + """ fields, pure_selection = pop_fields(selection) if is_pure_fancy_indexing(pure_selection, self.ndim): return self.vindex[cast(CoordinateSelection | MaskSelection, selection)] @@ -710,7 +851,97 @@ def __getitem__(self, selection: Selection) -> NDArrayLike: else: return self.get_basic_selection(cast(BasicSelection, pure_selection), fields=fields) - def __setitem__(self, selection: Selection, value: NDArrayLike) -> None: + def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None: + """Modify data for an item or region of the array. + + Parameters + ---------- + selection : tuple + An integer index or slice or tuple of int/slice specifying the requested + region for each dimension of the array. + value : npt.ArrayLike + An array-like containing the data to be stored in the selection. + + Examples + -------- + Setup a 1-dimensional array:: + + >>> import zarr + >>> z = zarr.zeros( + >>> shape=(100,), + >>> store=StorePath(MemoryStore(mode="w")), + >>> chunk_shape=(5,), + >>> dtype="i4", + >>> ) + + Set all array elements to the same scalar value:: + + >>> z[...] = 42 + >>> z[...] + array([42, 42, 42, ..., 42, 42, 42]) + + Set a portion of the array:: + + >>> z[:10] = np.arange(10) + >>> z[-10:] = np.arange(10)[::-1] + >>> z[...] + array([ 0, 1, 2, ..., 2, 1, 0]) + + Setup a 2-dimensional array:: + + >>> z = zarr.zeros( + >>> shape=(5, 5), + >>> store=StorePath(MemoryStore(mode="w")), + >>> chunk_shape=(5, 5), + >>> dtype="i4", + >>> ) + + Set all array elements to the same scalar value:: + + >>> z[...] = 42 + + Set a portion of the array:: + + >>> z[0, :] = np.arange(z.shape[1]) + >>> z[:, 0] = np.arange(z.shape[0]) + >>> z[...] + array([[ 0, 1, 2, 3, 4], + [ 1, 42, 42, 42, 42], + [ 2, 42, 42, 42, 42], + [ 3, 42, 42, 42, 42], + [ 4, 42, 42, 42, 42]]) + + Notes + ----- + Slices with step > 1 are supported, but slices with negative step are not. + + For arrays with a structured dtype, see zarr v2 for examples of how to use + fields + + Currently the implementation for __setitem__ is provided by + :func:`vindex` if the indexing is pure fancy indexing (ie a + broadcast-compatible tuple of integer array indices), or by + :func:`set_basic_selection` otherwise. + + Effectively, this means that the following indexing modes are supported: + + - integer indexing + - slice indexing + - mixed slice and integer indexing + - boolean indexing + - fancy indexing (vectorized list of integers) + + For specific indexing options including outer indexing, see the + methods listed under See Also. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, + set_orthogonal_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__ + + """ fields, pure_selection = pop_fields(selection) if is_pure_fancy_indexing(pure_selection, self.ndim): self.vindex[cast(CoordinateSelection | MaskSelection, selection)] = value @@ -727,6 +958,110 @@ def get_basic_selection( prototype: BufferPrototype = default_buffer_prototype, fields: Fields | None = None, ) -> NDArrayLike: + """Retrieve data for an item or region of the array. + + Parameters + ---------- + selection : tuple + A tuple specifying the requested item or region for each dimension of the + array. May be any combination of int and/or slice or ellipsis for multidimensional arrays. + out : NDBuffer, optional + If given, load the selected data directly into this buffer. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to + extract data for. + prototype : BufferPrototype, optional + The prototype of the buffer to use for the output data. If not provided, the default buffer prototype is used. + + Returns + ------- + NDArrayLike + An array-like containing the data for the requested region. + + Examples + -------- + Setup a 1-dimensional array:: + + >>> import zarr + >>> import numpy as np + >>> data = np.arange(100, dtype="uint16") + >>> z = Array.create( + >>> StorePath(MemoryStore(mode="w")), + >>> shape=data.shape, + >>> chunk_shape=(3,), + >>> dtype=data.dtype, + >>> ) + >>> z[:] = data + + Retrieve a single item:: + + >>> z.get_basic_selection(5) + 5 + + Retrieve a region via slicing:: + + >>> z.get_basic_selection(slice(5)) + array([0, 1, 2, 3, 4]) + >>> z.get_basic_selection(slice(-5, None)) + array([95, 96, 97, 98, 99]) + >>> z.get_basic_selection(slice(5, 10)) + array([5, 6, 7, 8, 9]) + >>> z.get_basic_selection(slice(5, 10, 2)) + array([5, 7, 9]) + >>> z.get_basic_selection(slice(None, None, 2)) + array([ 0, 2, 4, ..., 94, 96, 98]) + + Setup a 3-dimensional array:: + + >>> data = np.arange(1000).reshape(10, 10, 10) + >>> z = Array.create( + >>> StorePath(MemoryStore(mode="w")), + >>> shape=data.shape, + >>> chunk_shape=(5, 5, 5), + >>> dtype=data.dtype, + >>> ) + >>> z[:] = data + + Retrieve an item:: + + >>> z.get_basic_selection((1, 2, 3)) + 123 + + Retrieve a region via slicing and Ellipsis:: + + >>> z.get_basic_selection((slice(1, 3), slice(1, 3), 0)) + array([[110, 120], + [210, 220]]) + >>> z.get_basic_selection(0, (slice(1, 3), slice(None))) + array([[10, 11, 12, 13, 14, 15, 16, 17, 18, 19], + [20, 21, 22, 23, 24, 25, 26, 27, 28, 29]]) + >>> z.get_basic_selection((..., 5)) + array([[ 2 12 22 32 42 52 62 72 82 92] + [102 112 122 132 142 152 162 172 182 192] + ... + [802 812 822 832 842 852 862 872 882 892] + [902 912 922 932 942 952 962 972 982 992]] + + Notes + ----- + Slices with step > 1 are supported, but slices with negative step are not. + + For arrays with a structured dtype, see zarr v2 for examples of how to use + the `fields` parameter. + + This method provides the implementation for accessing data via the + square bracket notation (__getitem__). See :func:`__getitem__` for examples + using the alternative notation. + + See Also + -------- + set_basic_selection, get_mask_selection, set_mask_selection, + get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, + set_orthogonal_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ + if self.shape == (): raise NotImplementedError else: @@ -742,11 +1077,93 @@ def get_basic_selection( def set_basic_selection( self, selection: BasicSelection, - value: NDArrayLike, + value: npt.ArrayLike, *, fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> None: + """Modify data for an item or region of the array. + + Parameters + ---------- + selection : tuple + A tuple specifying the requested item or region for each dimension of the + array. May be any combination of int and/or slice or ellipsis for multidimensional arrays. + value : npt.ArrayLike + An array-like containing values to be stored into the array. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to set + data for. + prototype : BufferPrototype, optional + The prototype of the buffer used for setting the data. If not provided, the + default buffer prototype is used. + + Examples + -------- + Setup a 1-dimensional array:: + + >>> import zarr + >>> z = zarr.zeros( + >>> shape=(100,), + >>> store=StorePath(MemoryStore(mode="w")), + >>> chunk_shape=(100,), + >>> dtype="i4", + >>> ) + + Set all array elements to the same scalar value:: + + >>> z.set_basic_selection(..., 42) + >>> z[...] + array([42, 42, 42, ..., 42, 42, 42]) + + Set a portion of the array:: + + >>> z.set_basic_selection(slice(10), np.arange(10)) + >>> z.set_basic_selection(slice(-10, None), np.arange(10)[::-1]) + >>> z[...] + array([ 0, 1, 2, ..., 2, 1, 0]) + + Setup a 2-dimensional array:: + + >>> z = zarr.zeros( + >>> shape=(5, 5), + >>> store=StorePath(MemoryStore(mode="w")), + >>> chunk_shape=(5, 5), + >>> dtype="i4", + >>> ) + + Set all array elements to the same scalar value:: + + >>> z.set_basic_selection(..., 42) + + Set a portion of the array:: + + >>> z.set_basic_selection((0, slice(None)), np.arange(z.shape[1])) + >>> z.set_basic_selection((slice(None), 0), np.arange(z.shape[0])) + >>> z[...] + array([[ 0, 1, 2, 3, 4], + [ 1, 42, 42, 42, 42], + [ 2, 42, 42, 42, 42], + [ 3, 42, 42, 42, 42], + [ 4, 42, 42, 42, 42]]) + + Notes + ----- + For arrays with a structured dtype, see zarr v2 for examples of how to use + the `fields` parameter. + + This method provides the underlying implementation for modifying data via square + bracket notation, see :func:`__setitem__` for equivalent examples using the + alternative notation. + + See Also + -------- + get_basic_selection, get_mask_selection, set_mask_selection, + get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, + set_orthogonal_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ indexer = BasicIndexer(selection, self.shape, self.metadata.chunk_grid) sync(self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype)) @@ -758,6 +1175,113 @@ def get_orthogonal_selection( fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> NDArrayLike: + """Retrieve data by making a selection for each dimension of the array. For + example, if an array has 2 dimensions, allows selecting specific rows and/or + columns. The selection for each dimension can be either an integer (indexing a + single item), a slice, an array of integers, or a Boolean array where True + values indicate a selection. + + Parameters + ---------- + selection : tuple + A selection for each dimension of the array. May be any combination of int, + slice, integer array or Boolean array. + out : NDBuffer, optional + If given, load the selected data directly into this buffer. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to + extract data for. + prototype : BufferPrototype, optional + The prototype of the buffer to use for the output data. If not provided, the default buffer prototype is used. + + Returns + ------- + NDArrayLike + An array-like containing the data for the requested selection. + + Examples + -------- + Setup a 2-dimensional array:: + + >>> import zarr + >>> import numpy as np + >>> data = np.arange(100).reshape(10, 10) + >>> z = Array.create( + >>> StorePath(MemoryStore(mode="w")), + >>> shape=data.shape, + >>> chunk_shape=data.shape, + >>> dtype=data.dtype, + >>> ) + >>> z[:] = data + + Retrieve rows and columns via any combination of int, slice, integer array and/or + Boolean array:: + + >>> z.get_orthogonal_selection(([1, 4], slice(None))) + array([[10, 11, 12, 13, 14, 15, 16, 17, 18, 19], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49]]) + >>> z.get_orthogonal_selection((slice(None), [1, 4])) + array([[ 1, 4], + [11, 14], + [21, 24], + [31, 34], + [41, 44], + [51, 54], + [61, 64], + [71, 74], + [81, 84], + [91, 94]]) + >>> z.get_orthogonal_selection(([1, 4], [1, 4])) + array([[11, 14], + [41, 44]]) + >>> sel = np.zeros(z.shape[0], dtype=bool) + >>> sel[1] = True + >>> sel[4] = True + >>> z.get_orthogonal_selection((sel, sel)) + array([[11, 14], + [41, 44]]) + + For convenience, the orthogonal selection functionality is also available via the + `oindex` property, e.g.:: + + >>> z.oindex[[1, 4], :] + array([[10, 11, 12, 13, 14, 15, 16, 17, 18, 19], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49]]) + >>> z.oindex[:, [1, 4]] + array([[ 1, 4], + [11, 14], + [21, 24], + [31, 34], + [41, 44], + [51, 54], + [61, 64], + [71, 74], + [81, 84], + [91, 94]]) + >>> z.oindex[[1, 4], [1, 4]] + array([[11, 14], + [41, 44]]) + >>> sel = np.zeros(z.shape[0], dtype=bool) + >>> sel[1] = True + >>> sel[4] = True + >>> z.oindex[sel, sel] + array([[11, 14], + [41, 44]]) + + Notes + ----- + Orthogonal indexing is also known as outer indexing. + + Slices with step > 1 are supported, but slices with negative step are not. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_coordinate_selection, set_coordinate_selection, set_orthogonal_selection, + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ indexer = OrthogonalIndexer(selection, self.shape, self.metadata.chunk_grid) return sync( self._async_array._get_selection( @@ -768,11 +1292,106 @@ def get_orthogonal_selection( def set_orthogonal_selection( self, selection: OrthogonalSelection, - value: NDArrayLike, + value: npt.ArrayLike, *, fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> None: + """Modify data via a selection for each dimension of the array. + + Parameters + ---------- + selection : tuple + A selection for each dimension of the array. May be any combination of int, + slice, integer array or Boolean array. + value : npt.ArrayLike + An array-like array containing the data to be stored in the array. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to set + data for. + prototype : BufferPrototype, optional + The prototype of the buffer used for setting the data. If not provided, the + default buffer prototype is used. + + Examples + -------- + Setup a 2-dimensional array:: + + >>> import zarr + >>> z = zarr.zeros( + >>> shape=(5, 5), + >>> store=StorePath(MemoryStore(mode="w")), + >>> chunk_shape=(5, 5), + >>> dtype="i4", + >>> ) + + + Set data for a selection of rows:: + + >>> z.set_orthogonal_selection(([1, 4], slice(None)), 1) + >>> z[...] + array([[0, 0, 0, 0, 0], + [1, 1, 1, 1, 1], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [1, 1, 1, 1, 1]]) + + Set data for a selection of columns:: + + >>> z.set_orthogonal_selection((slice(None), [1, 4]), 2) + >>> z[...] + array([[0, 2, 0, 0, 2], + [1, 2, 1, 1, 2], + [0, 2, 0, 0, 2], + [0, 2, 0, 0, 2], + [1, 2, 1, 1, 2]]) + + Set data for a selection of rows and columns:: + + >>> z.set_orthogonal_selection(([1, 4], [1, 4]), 3) + >>> z[...] + array([[0, 2, 0, 0, 2], + [1, 3, 1, 1, 3], + [0, 2, 0, 0, 2], + [0, 2, 0, 0, 2], + [1, 3, 1, 1, 3]]) + + Set data from a 2D array:: + + >>> values = np.arange(10).reshape(2, 5) + >>> z.set_orthogonal_selection(([0, 3], ...), values) + >>> z[...] + array([[0, 1, 2, 3, 4], + [1, 3, 1, 1, 3], + [0, 2, 0, 0, 2], + [5, 6, 7, 8, 9], + [1, 3, 1, 1, 3]]) + + For convenience, this functionality is also available via the `oindex` property. + E.g.:: + + >>> z.oindex[[1, 4], [1, 4]] = 4 + >>> z[...] + array([[0, 1, 2, 3, 4], + [1, 4, 1, 1, 4], + [0, 2, 0, 0, 2], + [5, 6, 7, 8, 9], + [1, 4, 1, 1, 4]]) + + Notes + ----- + Orthogonal indexing is also known as outer indexing. + + Slices with step > 1 are supported, but slices with negative step are not. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection, + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ indexer = OrthogonalIndexer(selection, self.shape, self.metadata.chunk_grid) return sync( self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype) @@ -786,6 +1405,71 @@ def get_mask_selection( fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> NDArrayLike: + """Retrieve a selection of individual items, by providing a Boolean array of the + same shape as the array against which the selection is being made, where True + values indicate a selected item. + + Parameters + ---------- + selection : ndarray, bool + A Boolean array of the same shape as the array against which the selection is + being made. + out : NDBuffer, optional + If given, load the selected data directly into this buffer. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to + extract data for. + prototype : BufferPrototype, optional + The prototype of the buffer to use for the output data. If not provided, the default buffer prototype is used. + + Returns + ------- + NDArrayLike + An array-like containing the data for the requested selection. + + Examples + -------- + Setup a 2-dimensional array:: + + >>> import zarr + >>> import numpy as np + >>> data = np.arange(100).reshape(10, 10) + >>> z = Array.create( + >>> StorePath(MemoryStore(mode="w")), + >>> shape=data.shape, + >>> chunk_shape=data.shape, + >>> dtype=data.dtype, + >>> ) + >>> z[:] = data + + Retrieve items by specifying a mask:: + + >>> sel = np.zeros_like(z, dtype=bool) + >>> sel[1, 1] = True + >>> sel[4, 4] = True + >>> z.get_mask_selection(sel) + array([11, 44]) + + For convenience, the mask selection functionality is also available via the + `vindex` property, e.g.:: + + >>> z.vindex[sel] + array([11, 44]) + + Notes + ----- + Mask indexing is a form of vectorized or inner indexing, and is equivalent to + coordinate indexing. Internally the mask array is converted to coordinate + arrays by calling `np.nonzero`. + + See Also + -------- + get_basic_selection, set_basic_selection, set_mask_selection, + get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, + set_coordinate_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + """ + indexer = MaskIndexer(mask, self.shape, self.metadata.chunk_grid) return sync( self._async_array._get_selection( @@ -796,11 +1480,76 @@ def get_mask_selection( def set_mask_selection( self, mask: MaskSelection, - value: NDArrayLike, + value: npt.ArrayLike, *, fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> None: + """Modify a selection of individual items, by providing a Boolean array of the + same shape as the array against which the selection is being made, where True + values indicate a selected item. + + Parameters + ---------- + selection : ndarray, bool + A Boolean array of the same shape as the array against which the selection is + being made. + value : npt.ArrayLike + An array-like containing values to be stored into the array. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to set + data for. + + Examples + -------- + Setup a 2-dimensional array:: + + >>> import zarr + >>> z = zarr.zeros( + >>> shape=(5, 5), + >>> store=StorePath(MemoryStore(mode="w")), + >>> chunk_shape=(5, 5), + >>> dtype="i4", + >>> ) + + Set data for a selection of items:: + + >>> sel = np.zeros_like(z, dtype=bool) + >>> sel[1, 1] = True + >>> sel[4, 4] = True + >>> z.set_mask_selection(sel, 1) + >>> z[...] + array([[0, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 1]]) + + For convenience, this functionality is also available via the `vindex` property. + E.g.:: + + >>> z.vindex[sel] = 2 + >>> z[...] + array([[0, 0, 0, 0, 0], + [0, 2, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 2]]) + + Notes + ----- + Mask indexing is a form of vectorized or inner indexing, and is equivalent to + coordinate indexing. Internally the mask array is converted to coordinate + arrays by calling `np.nonzero`. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, + get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, + set_coordinate_selection, get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ indexer = MaskIndexer(mask, self.shape, self.metadata.chunk_grid) sync(self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype)) @@ -812,6 +1561,73 @@ def get_coordinate_selection( fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> NDArrayLike: + """Retrieve a selection of individual items, by providing the indices + (coordinates) for each selected item. + + Parameters + ---------- + selection : tuple + An integer (coordinate) array for each dimension of the array. + out : NDBuffer, optional + If given, load the selected data directly into this buffer. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to + extract data for. + prototype : BufferPrototype, optional + The prototype of the buffer to use for the output data. If not provided, the default buffer prototype is used. + + Returns + ------- + NDArrayLike + An array-like containing the data for the requested coordinate selection. + + Examples + -------- + Setup a 2-dimensional array:: + + >>> import zarr + >>> import numpy as np + >>> data = np.arange(0, 100, dtype="uint16").reshape((10, 10)) + >>> z = Array.create( + >>> StorePath(MemoryStore(mode="w")), + >>> shape=data.shape, + >>> chunk_shape=(3, 3), + >>> dtype=data.dtype, + >>> ) + >>> z[:] = data + + Retrieve items by specifying their coordinates:: + + >>> z.get_coordinate_selection(([1, 4], [1, 4])) + array([11, 44]) + + For convenience, the coordinate selection functionality is also available via the + `vindex` property, e.g.:: + + >>> z.vindex[[1, 4], [1, 4]] + array([11, 44]) + + Notes + ----- + Coordinate indexing is also known as point selection, and is a form of vectorized + or inner indexing. + + Slices are not supported. Coordinate arrays must be provided for all dimensions + of the array. + + Coordinate arrays may be multidimensional, in which case the output array will + also be multidimensional. Coordinate arrays are broadcast against each other + before being applied. The shape of the output will be the same as the shape of + each coordinate array after broadcasting. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_orthogonal_selection, set_orthogonal_selection, set_coordinate_selection, + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ indexer = CoordinateIndexer(selection, self.shape, self.metadata.chunk_grid) out_array = sync( self._async_array._get_selection( @@ -819,18 +1635,81 @@ def get_coordinate_selection( ) ) - # restore shape - out_array = out_array.reshape(indexer.sel_shape) + if hasattr(out_array, "shape"): + # restore shape + out_array = np.array(out_array).reshape(indexer.sel_shape) return out_array def set_coordinate_selection( self, selection: CoordinateSelection, - value: NDArrayLike, + value: npt.ArrayLike, *, fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> None: + """Modify a selection of individual items, by providing the indices (coordinates) + for each item to be modified. + + Parameters + ---------- + selection : tuple + An integer (coordinate) array for each dimension of the array. + value : npt.ArrayLike + An array-like containing values to be stored into the array. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to set + data for. + + Examples + -------- + Setup a 2-dimensional array:: + + >>> import zarr + >>> z = zarr.zeros( + >>> shape=(5, 5), + >>> store=StorePath(MemoryStore(mode="w")), + >>> chunk_shape=(5, 5), + >>> dtype="i4", + >>> ) + + Set data for a selection of items:: + + >>> z.set_coordinate_selection(([1, 4], [1, 4]), 1) + >>> z[...] + array([[0, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 1]]) + + For convenience, this functionality is also available via the `vindex` property. + E.g.:: + + >>> z.vindex[[1, 4], [1, 4]] = 2 + >>> z[...] + array([[0, 0, 0, 0, 0], + [0, 2, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 2]]) + + Notes + ----- + Coordinate indexing is also known as point selection, and is a form of vectorized + or inner indexing. + + Slices are not supported. Coordinate arrays must be provided for all dimensions + of the array. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ # setup indexer indexer = CoordinateIndexer(selection, self.shape, self.metadata.chunk_grid) @@ -844,18 +1723,99 @@ def set_coordinate_selection( # Handle types like `list` or `tuple` value = np.array(value) # TODO replace with agnostic if hasattr(value, "shape") and len(value.shape) > 1: - value = value.reshape(-1) + value = np.array(value).reshape(-1) sync(self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype)) def get_block_selection( self, - selection: BlockSelection, + selection: BasicSelection, *, out: NDBuffer | None = None, fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> NDArrayLike: + """Retrieve a selection of individual items, by providing the indices + (coordinates) for each selected item. + + Parameters + ---------- + selection : int or slice or tuple of int or slice + An integer (coordinate) or slice for each dimension of the array. + out : NDBuffer, optional + If given, load the selected data directly into this buffer. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to + extract data for. + prototype : BufferPrototype, optional + The prototype of the buffer to use for the output data. If not provided, the default buffer prototype is used. + + Returns + ------- + NDArrayLike + An array-like containing the data for the requested block selection. + + Examples + -------- + Setup a 2-dimensional array:: + + >>> import zarr + >>> import numpy as np + >>> data = np.arange(0, 100, dtype="uint16").reshape((10, 10)) + >>> z = Array.create( + >>> StorePath(MemoryStore(mode="w")), + >>> shape=data.shape, + >>> chunk_shape=(3, 3), + >>> dtype=data.dtype, + >>> ) + >>> z[:] = data + + Retrieve items by specifying their block coordinates:: + + >>> z.get_block_selection((1, slice(None))) + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + + Which is equivalent to:: + + >>> z[3:6, :] + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + + For convenience, the block selection functionality is also available via the + `blocks` property, e.g.:: + + >>> z.blocks[1] + array([[30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], + [50, 51, 52, 53, 54, 55, 56, 57, 58, 59]]) + + Notes + ----- + Block indexing is a convenience indexing method to work on individual chunks + with chunk index slicing. It has the same concept as Dask's `Array.blocks` + indexing. + + Slices are supported. However, only with a step size of one. + + Block index arrays may be multidimensional to index multidimensional arrays. + For example:: + + >>> z.blocks[0, 1:3] + array([[ 3, 4, 5, 6, 7, 8], + [13, 14, 15, 16, 17, 18], + [23, 24, 25, 26, 27, 28]]) + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, + set_coordinate_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ indexer = BlockIndexer(selection, self.shape, self.metadata.chunk_grid) return sync( self._async_array._get_selection( @@ -865,28 +1825,147 @@ def get_block_selection( def set_block_selection( self, - selection: BlockSelection, - value: NDArrayLike, + selection: BasicSelection, + value: npt.ArrayLike, *, fields: Fields | None = None, prototype: BufferPrototype = default_buffer_prototype, ) -> None: + """Modify a selection of individual blocks, by providing the chunk indices + (coordinates) for each block to be modified. + + Parameters + ---------- + selection : tuple + An integer (coordinate) or slice for each dimension of the array. + value : npt.ArrayLike + An array-like containing the data to be stored in the block selection. + fields : str or sequence of str, optional + For arrays with a structured dtype, one or more fields can be specified to set + data for. + prototype : BufferPrototype, optional + The prototype of the buffer used for setting the data. If not provided, the + default buffer prototype is used. + + Examples + -------- + Set up a 2-dimensional array:: + + >>> import zarr + >>> z = zarr.zeros( + >>> shape=(6, 6), + >>> store=StorePath(MemoryStore(mode="w")), + >>> chunk_shape=(2, 2), + >>> dtype="i4", + >>> ) + + Set data for a selection of items:: + + >>> z.set_block_selection((1, 0), 1) + >>> z[...] + array([[0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0]]) + + For convenience, this functionality is also available via the `blocks` property. + E.g.:: + + >>> z.blocks[2, 1] = 4 + >>> z[...] + array([[0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [0, 0, 4, 4, 0, 0], + [0, 0, 4, 4, 0, 0]]) + + >>> z.blocks[:, 2] = 7 + >>> z[...] + array([[0, 0, 0, 0, 7, 7], + [0, 0, 0, 0, 7, 7], + [1, 1, 0, 0, 7, 7], + [1, 1, 0, 0, 7, 7], + [0, 0, 4, 4, 7, 7], + [0, 0, 4, 4, 7, 7]]) + + Notes + ----- + Block indexing is a convenience indexing method to work on individual chunks + with chunk index slicing. It has the same concept as Dask's `Array.blocks` + indexing. + + Slices are supported. However, only with a step size of one. + + See Also + -------- + get_basic_selection, set_basic_selection, get_mask_selection, set_mask_selection, + get_orthogonal_selection, set_orthogonal_selection, get_coordinate_selection, + get_block_selection, set_block_selection, + vindex, oindex, blocks, __getitem__, __setitem__ + + """ indexer = BlockIndexer(selection, self.shape, self.metadata.chunk_grid) sync(self._async_array._set_selection(indexer, value, fields=fields, prototype=prototype)) @property def vindex(self) -> VIndex: + """Shortcut for vectorized (inner) indexing, see :func:`get_coordinate_selection`, + :func:`set_coordinate_selection`, :func:`get_mask_selection` and + :func:`set_mask_selection` for documentation and examples.""" return VIndex(self) @property def oindex(self) -> OIndex: + """Shortcut for orthogonal (outer) indexing, see :func:`get_orthogonal_selection` and + :func:`set_orthogonal_selection` for documentation and examples.""" return OIndex(self) @property def blocks(self) -> BlockIndex: + """Shortcut for blocked chunked indexing, see :func:`get_block_selection` and + :func:`set_block_selection` for documentation and examples.""" return BlockIndex(self) def resize(self, new_shape: ChunkCoords) -> Array: + """ + Change the shape of the array by growing or shrinking one or more + dimensions. + + This method does not modify the original Array object. Instead, it returns a new Array + with the specified shape. + + Examples + -------- + >>> import zarr + >>> z = zarr.zeros(shape=(10000, 10000), + >>> chunk_shape=(1000, 1000), + >>> store=StorePath(MemoryStore(mode="w")), + >>> dtype="i4",) + >>> z.shape + (10000, 10000) + >>> z = z.resize(20000, 1000) + >>> z.shape + (20000, 1000) + >>> z2 = z.resize(50, 50) + >>> z.shape + (20000, 1000) + >>> z2.shape + (50, 50) + + Notes + ----- + When resizing an array, the data are not rearranged in any way. + + If one or more dimensions are shrunk, any chunks falling outside the + new array shape will be deleted from the underlying store. + However, it is noteworthy that the chunks partially falling inside the new array + (i.e. boundary chunks) will remain intact, and therefore, + the data falling outside the new array but inside the boundary chunks + would be restored by a subsequent resize operation that grows the array size. + """ return type(self)( sync( self._async_array.resize(new_shape), diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index 1a34d9f290..44691ea352 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -64,6 +64,8 @@ def __getitem__(self, key: slice) -> Self: ... def __setitem__(self, key: slice, value: Any) -> None: ... + def __array__(self) -> npt.NDArray[Any]: ... + def reshape( self, shape: ChunkCoords | Literal[-1], *, order: Literal["A", "C", "F"] = ... ) -> Self: ... @@ -232,7 +234,7 @@ def __add__(self, other: Buffer) -> Self: class NDBuffer: - """A n-dimensional memory block + """An n-dimensional memory block We use NDBuffer throughout Zarr to represent a n-dimensional memory block. diff --git a/src/zarr/common.py b/src/zarr/common.py index 9349f9f018..6bff189e86 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -30,8 +30,6 @@ BytesLike = bytes | bytearray | memoryview ChunkCoords = tuple[int, ...] ChunkCoordsLike = Iterable[int] -SliceSelection = tuple[slice, ...] -Selection = slice | SliceSelection ZarrFormat = Literal[2, 3] JSON = None | str | int | float | Enum | dict[str, "JSON"] | list["JSON"] | tuple["JSON", ...] MemoryOrder = Literal["C", "F"] diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 98130fe0cd..74cbbe8c6b 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -23,39 +23,25 @@ import numpy as np import numpy.typing as npt +from zarr.buffer import NDArrayLike from zarr.common import ChunkCoords, product if TYPE_CHECKING: from zarr.array import Array - from zarr.buffer import NDArrayLike from zarr.chunk_grids import ChunkGrid +IntSequence = list[int] | npt.NDArray[np.intp] +ArrayOfIntOrBool = npt.NDArray[np.intp] | npt.NDArray[np.bool_] BasicSelector = int | slice | EllipsisType -BasicSelectorTuple = tuple[BasicSelector, ...] -BasicSelection = BasicSelector | BasicSelectorTuple -BasicSelectionNormalized = tuple[int | slice, ...] -CoordinateSelector = list[int] | npt.NDArray[np.intp] -CoordinateSelection = CoordinateSelector | tuple[CoordinateSelector, ...] -CoordinateSelectionNormalized = tuple[npt.NDArray[np.intp], ...] -BlockSelector = int | slice -BlockSelection = BlockSelector | tuple[BlockSelector, ...] -BlockSelectionNormalized = tuple[BlockSelector, ...] -MaskSelection = npt.NDArray[np.bool_] -OrthogonalSelector = int | slice | npt.NDArray[np.intp] | npt.NDArray[np.bool_] -OrthogonalSelection = OrthogonalSelector | tuple[OrthogonalSelector, ...] -OrthogonalSelectionNormalized = tuple[OrthogonalSelector, ...] +Selector = BasicSelector | ArrayOfIntOrBool -Selection = ( - BasicSelection | CoordinateSelection | BlockSelection | MaskSelection | OrthogonalSelection -) -SelectionNormalized = ( - BasicSelectionNormalized - | CoordinateSelectionNormalized - | BlockSelectionNormalized - | MaskSelection - | OrthogonalSelectionNormalized -) -Selector = int | slice | npt.NDArray[np.intp] | npt.NDArray[np.bool_] +BasicSelection = BasicSelector | tuple[BasicSelector, ...] # also used for BlockIndex +CoordinateSelection = IntSequence | tuple[IntSequence, ...] +MaskSelection = npt.NDArray[np.bool_] +OrthogonalSelection = Selector | tuple[Selector, ...] +Selection = BasicSelection | CoordinateSelection | MaskSelection | OrthogonalSelection +CoordinateSelectionNormalized = tuple[npt.NDArray[np.intp], ...] +SelectionNormalized = tuple[Selector, ...] | ArrayOfIntOrBool SelectionWithFields = Selection | str | Sequence[str] SelectorTuple = tuple[Selector, ...] | npt.NDArray[np.intp] | slice Fields = str | list[str] | tuple[str, ...] @@ -846,7 +832,7 @@ def __getitem__(self, selection: OrthogonalSelection) -> NDArrayLike: cast(OrthogonalSelection, new_selection), fields=fields ) - def __setitem__(self, selection: OrthogonalSelection, value: NDArrayLike) -> None: + def __setitem__(self, selection: OrthogonalSelection, value: npt.ArrayLike) -> None: fields, new_selection = pop_fields(selection) new_selection = ensure_tuple(new_selection) new_selection = replace_lists(new_selection) @@ -861,7 +847,7 @@ class BlockIndexer(Indexer): shape: ChunkCoords drop_axes: ChunkCoords - def __init__(self, selection: BlockSelection, shape: ChunkCoords, chunk_grid: ChunkGrid): + def __init__(self, selection: BasicSelection, shape: ChunkCoords, chunk_grid: ChunkGrid): chunk_shape = get_chunk_shape(chunk_grid) # handle ellipsis @@ -940,18 +926,18 @@ def __iter__(self) -> Iterator[ChunkProjection]: class BlockIndex: array: Array - def __getitem__(self, selection: BlockSelection) -> NDArrayLike: + def __getitem__(self, selection: BasicSelection) -> NDArrayLike: fields, new_selection = pop_fields(selection) new_selection = ensure_tuple(new_selection) new_selection = replace_lists(new_selection) - return self.array.get_block_selection(cast(BlockSelection, new_selection), fields=fields) + return self.array.get_block_selection(cast(BasicSelection, new_selection), fields=fields) - def __setitem__(self, selection: BlockSelection, value: NDArrayLike) -> None: + def __setitem__(self, selection: BasicSelection, value: npt.ArrayLike) -> None: fields, new_selection = pop_fields(selection) new_selection = ensure_tuple(new_selection) new_selection = replace_lists(new_selection) return self.array.set_block_selection( - cast(BlockSelection, new_selection), value, fields=fields + cast(BasicSelection, new_selection), value, fields=fields ) @@ -1138,7 +1124,7 @@ def __getitem__(self, selection: CoordinateSelection | MaskSelection) -> NDArray raise VindexInvalidSelectionError(new_selection) def __setitem__( - self, selection: CoordinateSelection | MaskSelection, value: NDArrayLike + self, selection: CoordinateSelection | MaskSelection, value: npt.ArrayLike ) -> None: fields, new_selection = pop_fields(selection) new_selection = ensure_tuple(new_selection) @@ -1206,8 +1192,8 @@ def pop_fields(selection: SelectionWithFields) -> tuple[Fields | None, Selection return fields, selection -def make_slice_selection(selection: Any) -> list[int | slice]: - ls: list[int | slice] = [] +def make_slice_selection(selection: Any) -> list[slice]: + ls: list[slice] = [] for dim_selection in selection: if is_integer(dim_selection): ls.append(slice(int(dim_selection), int(dim_selection) + 1, 1)) diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py index 514294c4b0..7cb0d0f804 100644 --- a/tests/v3/test_codecs.py +++ b/tests/v3/test_codecs.py @@ -21,9 +21,8 @@ TransposeCodec, ZstdCodec, ) -from zarr.common import Selection from zarr.config import config -from zarr.indexing import morton_order_iter +from zarr.indexing import Selection, morton_order_iter from zarr.store import MemoryStore, StorePath from zarr.testing.utils import assert_bytes_equal From 6fc05b70e7b7eee202f73beaf2db13ac81cbf475 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Fri, 21 Jun 2024 17:33:26 +0200 Subject: [PATCH 0580/1078] `parse_shapelike` allows 0 (#1979) * fill in shapelike tests, handle negative values correctly, allow 0 * Update tests/v3/test_common.py Co-authored-by: Joe Hamman * move float test case to the appropriate test --------- Co-authored-by: Joe Hamman --- src/zarr/common.py | 20 ++++++----- tests/v3/test_common.py | 74 +++++++++++++++++++++++++++-------------- 2 files changed, 61 insertions(+), 33 deletions(-) diff --git a/src/zarr/common.py b/src/zarr/common.py index 6bff189e86..342db1412d 100644 --- a/src/zarr/common.py +++ b/src/zarr/common.py @@ -135,17 +135,21 @@ def parse_named_configuration( def parse_shapelike(data: int | Iterable[int]) -> tuple[int, ...]: if isinstance(data, int): + if data < 0: + raise ValueError(f"Expected a non-negative integer. Got {data} instead") return (data,) - if not isinstance(data, Iterable): - raise TypeError(f"Expected an iterable. Got {data} instead.") - data_tuple = tuple(data) - if len(data_tuple) == 0: - raise ValueError("Expected at least one element. Got 0.") + try: + data_tuple = tuple(data) + except TypeError as e: + msg = f"Expected an integer or an iterable of integers. Got {data} instead." + raise TypeError(msg) from e + if not all(isinstance(v, int) for v in data_tuple): - msg = f"Expected an iterable of integers. Got {type(data)} instead." + msg = f"Expected an iterable of integers. Got {data} instead." raise TypeError(msg) - if not all(lambda v: v > 0 for v in data_tuple): - raise ValueError(f"All values must be greater than 0. Got {data}.") + if not all(v > -1 for v in data_tuple): + msg = f"Expected all values to be non-negative. Got {data} instead." + raise ValueError(msg) return data_tuple diff --git a/tests/v3/test_common.py b/tests/v3/test_common.py index cc33aa75cf..bb59789843 100644 --- a/tests/v3/test_common.py +++ b/tests/v3/test_common.py @@ -14,28 +14,28 @@ @pytest.mark.parametrize("data", [(0, 0, 0, 0), (1, 3, 4, 5, 6), (2, 4)]) -def test_product(data: tuple[int, ...]): +def test_product(data: tuple[int, ...]) -> None: assert product(data) == np.prod(data) # todo: test -def test_concurrent_map(): ... +def test_concurrent_map() -> None: ... # todo: test -def test_to_thread(): ... +def test_to_thread() -> None: ... # todo: test -def test_enum_names(): ... +def test_enum_names() -> None: ... # todo: test -def test_parse_enum(): ... +def test_parse_enum() -> None: ... @pytest.mark.parametrize("data", [("foo", "bar"), (10, 11)]) -def test_parse_name_invalid(data: tuple[Any, Any]): +def test_parse_name_invalid(data: tuple[Any, Any]) -> None: observed, expected = data if isinstance(observed, str): with pytest.raises(ValueError, match=f"Expected '{expected}'. Got {observed} instead."): @@ -48,47 +48,71 @@ def test_parse_name_invalid(data: tuple[Any, Any]): @pytest.mark.parametrize("data", [("foo", "foo"), ("10", "10")]) -def test_parse_name_valid(data: tuple[Any, Any]): +def test_parse_name_valid(data: tuple[Any, Any]) -> None: observed, expected = data assert parse_name(observed, expected) == observed @pytest.mark.parametrize("data", [0, 1, "hello", "f"]) -def test_parse_indexing_order_invalid(data): +def test_parse_indexing_order_invalid(data: Any) -> None: with pytest.raises(ValueError, match="Expected one of"): parse_indexing_order(data) @pytest.mark.parametrize("data", ["C", "F"]) -def parse_indexing_order_valid(data: Literal["C", "F"]): +def parse_indexing_order_valid(data: Literal["C", "F"]) -> None: assert parse_indexing_order(data) == data -@pytest.mark.parametrize("data", [("0", 1, 2, 3), {"0": "0"}, []]) -def test_parse_shapelike_invalid(data: Any): - if isinstance(data, Iterable): - if len(data) == 0: - with pytest.raises(ValueError, match="Expected at least one element."): - parse_shapelike(data) - else: - with pytest.raises(TypeError, match="Expected an iterable of integers"): - parse_shapelike(data) - else: - with pytest.raises(TypeError, match="Expected an iterable."): - parse_shapelike(data) +@pytest.mark.parametrize("data", [lambda v: v, slice(None)]) +def test_parse_shapelike_invalid_single_type(data: Any) -> None: + """ + Test that we get the expected error message when passing in a value that is not an integer + or an iterable of integers. + """ + with pytest.raises(TypeError, match="Expected an integer or an iterable of integers."): + parse_shapelike(data) + + +def test_parse_shapelike_invalid_single_value() -> None: + """ + Test that we get the expected error message when passing in a negative integer. + """ + with pytest.raises(ValueError, match="Expected a non-negative integer."): + parse_shapelike(-1) + + +@pytest.mark.parametrize("data", ["shape", ("0", 1, 2, 3), {"0": "0"}, ((1, 2), (2, 2)), (4.0, 2)]) +def test_parse_shapelike_invalid_iterable_types(data: Any) -> None: + """ + Test that we get the expected error message when passing in an iterable containing + non-integer elements + """ + with pytest.raises(TypeError, match="Expected an iterable of integers"): + parse_shapelike(data) + + +@pytest.mark.parametrize("data", [(1, 2, 3, -1), (-10,)]) +def test_parse_shapelike_invalid_iterable_values(data: Any) -> None: + """ + Test that we get the expected error message when passing in an iterable containing negative + integers + """ + with pytest.raises(ValueError, match="Expected all values to be non-negative."): + parse_shapelike(data) -@pytest.mark.parametrize("data", [range(10), [0, 1, 2, 3], (3, 4, 5)]) -def test_parse_shapelike_valid(data: Iterable[Any]): +@pytest.mark.parametrize("data", [range(10), [0, 1, 2, 3], (3, 4, 5), ()]) +def test_parse_shapelike_valid(data: Iterable[int]) -> None: assert parse_shapelike(data) == tuple(data) # todo: more dtypes @pytest.mark.parametrize("data", [("uint8", np.uint8), ("float64", np.float64)]) -def parse_dtype(data: tuple[str, np.dtype]): +def parse_dtype(data: tuple[str, np.dtype]) -> None: unparsed, parsed = data assert parse_dtype(unparsed) == parsed # todo: figure out what it means to test this -def test_parse_fill_value(): ... +def test_parse_fill_value() -> None: ... From 65dc4ccc41475621c245d82d7cc0c968297569dd Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Sat, 22 Jun 2024 14:29:05 +0200 Subject: [PATCH 0581/1078] 0 dim arrays: indexing (#1980) * fill in shapelike tests, handle negative values correctly, allow 0 * support indexing arrays with empty shape * fix behavior of AsyncArray._set_selection when converting a scalar input into an array. ensures that the dtype of the array is used. * remove array interface dict --- src/zarr/array.py | 37 ++++++--- tests/v3/test_indexing.py | 162 ++++++++++++++++++++------------------ 2 files changed, 111 insertions(+), 88 deletions(-) diff --git a/src/zarr/array.py b/src/zarr/array.py index 3b5ecce8ee..8be901b0fc 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -487,7 +487,7 @@ async def _set_selection( # check value shape if np.isscalar(value): - value = np.asanyarray(value) + value = np.asanyarray(value, dtype=self.metadata.dtype) else: if not hasattr(value, "shape"): value = np.asarray(value, self.metadata.dtype) @@ -700,6 +700,24 @@ def order(self) -> Literal["C", "F"]: def read_only(self) -> bool: return self._async_array.read_only + def __array__( + self, dtype: npt.DTypeLike | None = None, copy: bool | None = None + ) -> NDArrayLike: + """ + This method is used by numpy when converting zarr.Array into a numpy array. + For more information, see https://numpy.org/devdocs/user/basics.interoperability.html#the-array-method + """ + if copy is False: + msg = "`copy=False` is not supported. This method always creates a copy." + raise ValueError(msg) + + arr_np = self[...] + + if dtype is not None: + arr_np = arr_np.astype(dtype) + + return arr_np + def __getitem__(self, selection: Selection) -> NDArrayLike: """Retrieve data for an item or region of the array. @@ -1062,17 +1080,14 @@ def get_basic_selection( """ - if self.shape == (): - raise NotImplementedError - else: - return sync( - self._async_array._get_selection( - BasicIndexer(selection, self.shape, self.metadata.chunk_grid), - out=out, - fields=fields, - prototype=prototype, - ) + return sync( + self._async_array._get_selection( + BasicIndexer(selection, self.shape, self.metadata.chunk_grid), + out=out, + fields=fields, + prototype=prototype, ) + ) def set_basic_selection( self, diff --git a/tests/v3/test_indexing.py b/tests/v3/test_indexing.py index 00ea947b49..13a7d953e1 100644 --- a/tests/v3/test_indexing.py +++ b/tests/v3/test_indexing.py @@ -42,7 +42,7 @@ def zarr_array_from_numpy_array( chunk_shape=chunk_shape or a.shape, chunk_key_encoding=("v2", "."), ) - z[:] = a + z[()] = a return z @@ -111,42 +111,55 @@ def test_replace_ellipsis(): ) -@pytest.mark.xfail(reason="zero-dimension arrays are not supported in v3") -def test_get_basic_selection_0d(store: StorePath): +@pytest.mark.parametrize( + "value, dtype", + [ + (42, "uint8"), + pytest.param( + (b"aaa", 1, 4.2), [("foo", "S3"), ("bar", "i4"), ("baz", "f8")], marks=pytest.mark.xfail + ), + ], +) +@pytest.mark.parametrize("use_out", (True, False)) +def test_get_basic_selection_0d(store: StorePath, use_out: bool, value: Any, dtype: Any) -> None: # setup - a = np.array(42) - z = zarr_array_from_numpy_array(store, a) + arr_np = np.array(value, dtype=dtype) + arr_z = zarr_array_from_numpy_array(store, arr_np) - assert_array_equal(a, z.get_basic_selection(Ellipsis)) - assert_array_equal(a, z[...]) - assert 42 == z.get_basic_selection(()) - assert 42 == z[()] + assert_array_equal(arr_np, arr_z.get_basic_selection(Ellipsis)) + assert_array_equal(arr_np, arr_z[...]) + assert value == arr_z.get_basic_selection(()) + assert value == arr_z[()] - # test out param - b = NDBuffer.from_numpy_array(np.zeros_like(a)) - z.get_basic_selection(Ellipsis, out=b) - assert_array_equal(a, b) + if use_out: + # test out param + b = NDBuffer.from_numpy_array(np.zeros_like(arr_np)) + arr_z.get_basic_selection(Ellipsis, out=b) + assert_array_equal(arr_np, b.as_ndarray_like()) + + # todo: uncomment the structured array tests when we can make them pass, + # or delete them if we formally decide not to support structured dtypes. # test structured array - value = (b"aaa", 1, 4.2) - a = np.array(value, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) - z = zarr_array_from_numpy_array(store, a) - z[()] = value - assert_array_equal(a, z.get_basic_selection(Ellipsis)) - assert_array_equal(a, z[...]) - assert a[()] == z.get_basic_selection(()) - assert a[()] == z[()] - assert b"aaa" == z.get_basic_selection((), fields="foo") - assert b"aaa" == z["foo"] - assert a[["foo", "bar"]] == z.get_basic_selection((), fields=["foo", "bar"]) - assert a[["foo", "bar"]] == z["foo", "bar"] - # test out param - b = NDBuffer.from_numpy_array(np.zeros_like(a)) - z.get_basic_selection(Ellipsis, out=b) - assert_array_equal(a, b) - c = NDBuffer.from_numpy_array(np.zeros_like(a[["foo", "bar"]])) - z.get_basic_selection(Ellipsis, out=c, fields=["foo", "bar"]) - assert_array_equal(a[["foo", "bar"]], c) + # value = (b"aaa", 1, 4.2) + # a = np.array(value, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) + # z = zarr_array_from_numpy_array(store, a) + # z[()] = value + # assert_array_equal(a, z.get_basic_selection(Ellipsis)) + # assert_array_equal(a, z[...]) + # assert a[()] == z.get_basic_selection(()) + # assert a[()] == z[()] + # assert b"aaa" == z.get_basic_selection((), fields="foo") + # assert b"aaa" == z["foo"] + # assert a[["foo", "bar"]] == z.get_basic_selection((), fields=["foo", "bar"]) + # assert a[["foo", "bar"]] == z["foo", "bar"] + # # test out param + # b = NDBuffer.from_numpy_array(np.zeros_like(a)) + # z.get_basic_selection(Ellipsis, out=b) + # assert_array_equal(a, b) + # c = NDBuffer.from_numpy_array(np.zeros_like(a[["foo", "bar"]])) + # z.get_basic_selection(Ellipsis, out=c, fields=["foo", "bar"]) + # assert_array_equal(a[["foo", "bar"]], c) basic_selections_1d = [ @@ -466,51 +479,46 @@ def test_fancy_indexing_doesnt_mix_with_implicit_slicing(store: StorePath): np.testing.assert_array_equal(z2[..., [1, 2, 3]], 0) -@pytest.mark.xfail(reason="zero-dimension arrays are not supported in v3") -def test_set_basic_selection_0d(store: StorePath): - # setup - v = np.array(42) - a = np.zeros_like(v) - z = zarr_array_from_numpy_array(store, v) - assert_array_equal(a, z[:]) - - # tests - z.set_basic_selection(Ellipsis, v) - assert_array_equal(v, z[:]) - z[...] = 0 - assert_array_equal(a, z[:]) - z[...] = v - assert_array_equal(v, z[:]) - - # test structured array - value = (b"aaa", 1, 4.2) - v = np.array(value, dtype=[("foo", "S3"), ("bar", "i4"), ("baz", "f8")]) - a = np.zeros_like(v) - z = zarr_array_from_numpy_array(store, a) - - # tests - z.set_basic_selection(Ellipsis, v) - assert_array_equal(v, z[:]) - z.set_basic_selection(Ellipsis, a) - assert_array_equal(a, z[:]) - z[...] = v - assert_array_equal(v, z[:]) - z[...] = a - assert_array_equal(a, z[:]) - # with fields - z.set_basic_selection(Ellipsis, v["foo"], fields="foo") - assert v["foo"] == z["foo"] - assert a["bar"] == z["bar"] - assert a["baz"] == z["baz"] - z["bar"] = v["bar"] - assert v["foo"] == z["foo"] - assert v["bar"] == z["bar"] - assert a["baz"] == z["baz"] - # multiple field assignment not supported - with pytest.raises(IndexError): - z.set_basic_selection(Ellipsis, v[["foo", "bar"]], fields=["foo", "bar"]) - with pytest.raises(IndexError): - z[..., "foo", "bar"] = v[["foo", "bar"]] +@pytest.mark.parametrize( + "value, dtype", + [ + (42, "uint8"), + pytest.param( + (b"aaa", 1, 4.2), [("foo", "S3"), ("bar", "i4"), ("baz", "f8")], marks=pytest.mark.xfail + ), + ], +) +def test_set_basic_selection_0d( + store: StorePath, value: Any, dtype: str | list[tuple[str, str]] +) -> None: + arr_np = np.array(value, dtype=dtype) + arr_np_zeros = np.zeros_like(arr_np, dtype=dtype) + arr_z = zarr_array_from_numpy_array(store, arr_np_zeros) + assert_array_equal(arr_np_zeros, arr_z) + + arr_z.set_basic_selection(Ellipsis, value) + assert_array_equal(value, arr_z) + arr_z[...] = 0 + assert_array_equal(arr_np_zeros, arr_z) + arr_z[...] = value + assert_array_equal(value, arr_z) + + # todo: uncomment the structured array tests when we can make them pass, + # or delete them if we formally decide not to support structured dtypes. + + # arr_z.set_basic_selection(Ellipsis, v["foo"], fields="foo") + # assert v["foo"] == arr_z["foo"] + # assert arr_np_zeros["bar"] == arr_z["bar"] + # assert arr_np_zeros["baz"] == arr_z["baz"] + # arr_z["bar"] = v["bar"] + # assert v["foo"] == arr_z["foo"] + # assert v["bar"] == arr_z["bar"] + # assert arr_np_zeros["baz"] == arr_z["baz"] + # # multiple field assignment not supported + # with pytest.raises(IndexError): + # arr_z.set_basic_selection(Ellipsis, v[["foo", "bar"]], fields=["foo", "bar"]) + # with pytest.raises(IndexError): + # arr_z[..., "foo", "bar"] = v[["foo", "bar"]] def _test_get_orthogonal_selection(a, z, selection): From 8aadd15326ed2cc3fb43d6b82aa67e02ed21cb0a Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Sun, 23 Jun 2024 17:58:54 +0200 Subject: [PATCH 0582/1078] [v3] Elevate codec pipeline (#1932) * initial work toward pushing codecpipeline higher in the stacK * remove codecpipeline from metadata, add it to AsyncArray and or create it dynamically * revert changes to blosc.py * revert changes to test_codecs.py * consistent expanded function signature for evolve_from_array_spec * restore wider function signature for codec.validate to avoid self-referential function call * remove commented code block * make codec_pipeline a cached property of sharding codec * cached_property -> vanilla property --- src/zarr/abc/codec.py | 26 ++++++-- src/zarr/array.py | 27 ++++++-- src/zarr/codecs/blosc.py | 9 +-- src/zarr/codecs/pipeline.py | 124 ++++++++++++++++++++--------------- src/zarr/codecs/sharding.py | 67 ++++++++----------- src/zarr/codecs/transpose.py | 22 ++++++- src/zarr/metadata.py | 52 ++++++++------- 7 files changed, 191 insertions(+), 136 deletions(-) diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index 1f452159ed..1d7106e25a 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -2,12 +2,15 @@ from abc import abstractmethod from collections.abc import Awaitable, Callable, Iterable -from typing import TYPE_CHECKING, Generic, TypeVar +from typing import TYPE_CHECKING, Any, Generic, TypeVar + +import numpy as np from zarr.abc.metadata import Metadata from zarr.abc.store import ByteGetter, ByteSetter from zarr.buffer import Buffer, NDBuffer -from zarr.common import concurrent_map +from zarr.chunk_grids import ChunkGrid +from zarr.common import ChunkCoords, concurrent_map from zarr.config import config if TYPE_CHECKING: @@ -15,7 +18,6 @@ from zarr.array_spec import ArraySpec from zarr.indexing import SelectorTuple - from zarr.metadata import ArrayMetadata CodecInput = TypeVar("CodecInput", bound=NDBuffer | Buffer) CodecOutput = TypeVar("CodecOutput", bound=NDBuffer | Buffer) @@ -75,13 +77,18 @@ def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: """ return self - def validate(self, array_metadata: ArrayMetadata) -> None: + def validate(self, *, shape: ChunkCoords, dtype: np.dtype[Any], chunk_grid: ChunkGrid) -> None: """Validates that the codec configuration is compatible with the array metadata. Raises errors when the codec configuration is not compatible. Parameters ---------- - array_metadata : ArrayMetadata + shape: ChunkCoords + The array shape + dtype: np.dtype[Any] + The array data type + chunk_grid: ChunkGrid + The array chunk grid """ ... @@ -275,13 +282,18 @@ def supports_partial_decode(self) -> bool: ... def supports_partial_encode(self) -> bool: ... @abstractmethod - def validate(self, array_metadata: ArrayMetadata) -> None: + def validate(self, *, shape: ChunkCoords, dtype: np.dtype[Any], chunk_grid: ChunkGrid) -> None: """Validates that all codec configurations are compatible with the array metadata. Raises errors when a codec configuration is not compatible. Parameters ---------- - array_metadata : ArrayMetadata + shape: ChunkCoords + The array shape + dtype: np.dtype[Any] + The array data type + chunk_grid: ChunkGrid + The array chunk grid """ ... diff --git a/src/zarr/array.py b/src/zarr/array.py index 8be901b0fc..4318050dd5 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -11,19 +11,21 @@ # 1. Was splitting the array into two classes really necessary? from asyncio import gather from collections.abc import Iterable -from dataclasses import dataclass, replace +from dataclasses import dataclass, field, replace from typing import Any, Literal, cast import numpy as np import numpy.typing as npt -from zarr.abc.codec import Codec +from zarr.abc.codec import Codec, CodecPipeline from zarr.abc.store import set_or_delete from zarr.attributes import Attributes from zarr.buffer import BufferPrototype, NDArrayLike, NDBuffer, default_buffer_prototype from zarr.chunk_grids import RegularChunkGrid from zarr.chunk_key_encodings import ChunkKeyEncoding, DefaultChunkKeyEncoding, V2ChunkKeyEncoding from zarr.codecs import BytesCodec +from zarr.codecs._v2 import V2Compressor, V2Filters +from zarr.codecs.pipeline import BatchedCodecPipeline from zarr.common import ( JSON, ZARR_JSON, @@ -63,8 +65,8 @@ from zarr.sync import sync -def parse_array_metadata(data: Any) -> ArrayMetadata: - if isinstance(data, ArrayMetadata): +def parse_array_metadata(data: Any) -> ArrayV2Metadata | ArrayV3Metadata: + if isinstance(data, ArrayV2Metadata | ArrayV3Metadata): return data elif isinstance(data, dict): if data["zarr_format"] == 3: @@ -74,10 +76,22 @@ def parse_array_metadata(data: Any) -> ArrayMetadata: raise TypeError +def create_codec_pipeline(metadata: ArrayV2Metadata | ArrayV3Metadata) -> BatchedCodecPipeline: + if isinstance(metadata, ArrayV3Metadata): + return BatchedCodecPipeline.from_list(metadata.codecs) + elif isinstance(metadata, ArrayV2Metadata): + return BatchedCodecPipeline.from_list( + [V2Filters(metadata.filters or []), V2Compressor(metadata.compressor)] + ) + else: + raise AssertionError + + @dataclass(frozen=True) class AsyncArray: metadata: ArrayMetadata store_path: StorePath + codec_pipeline: CodecPipeline = field(init=False) order: Literal["C", "F"] def __init__( @@ -92,6 +106,7 @@ def __init__( object.__setattr__(self, "metadata", metadata_parsed) object.__setattr__(self, "store_path", store_path) object.__setattr__(self, "order", order_parsed) + object.__setattr__(self, "codec_pipeline", create_codec_pipeline(metadata=metadata_parsed)) @classmethod async def create( @@ -443,7 +458,7 @@ async def _get_selection( ) if product(indexer.shape) > 0: # reading chunks and decoding them - await self.metadata.codec_pipeline.read( + await self.codec_pipeline.read( [ ( self.store_path / self.metadata.encode_chunk_key(chunk_coords), @@ -503,7 +518,7 @@ async def _set_selection( value_buffer = prototype.nd_buffer.from_ndarray_like(value) # merging with existing data and encoding chunks - await self.metadata.codec_pipeline.write( + await self.codec_pipeline.write( [ ( self.store_path / self.metadata.encode_chunk_key(chunk_coords), diff --git a/src/zarr/codecs/blosc.py b/src/zarr/codecs/blosc.py index e577d18fb2..df1976d4c1 100644 --- a/src/zarr/codecs/blosc.py +++ b/src/zarr/codecs/blosc.py @@ -125,17 +125,14 @@ def to_dict(self) -> dict[str, JSON]: } def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: + dtype = array_spec.dtype new_codec = self if new_codec.typesize is None: - new_codec = replace(new_codec, typesize=array_spec.dtype.itemsize) + new_codec = replace(new_codec, typesize=dtype.itemsize) if new_codec.shuffle is None: new_codec = replace( new_codec, - shuffle=( - BloscShuffle.bitshuffle - if array_spec.dtype.itemsize == 1 - else BloscShuffle.shuffle - ), + shuffle=(BloscShuffle.bitshuffle if dtype.itemsize == 1 else BloscShuffle.shuffle), ) return new_codec diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index acef311a8c..a7f47661b8 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -2,10 +2,12 @@ from collections.abc import Iterable, Iterator from dataclasses import dataclass -from itertools import islice -from typing import TYPE_CHECKING, TypeVar +from itertools import islice, pairwise +from typing import TYPE_CHECKING, Any, TypeVar from warnings import warn +import numpy as np + from zarr.abc.codec import ( ArrayArrayCodec, ArrayBytesCodec, @@ -17,11 +19,11 @@ ) from zarr.abc.store import ByteGetter, ByteSetter from zarr.buffer import Buffer, BufferPrototype, NDBuffer +from zarr.chunk_grids import ChunkGrid from zarr.codecs.registry import get_codec_class -from zarr.common import JSON, concurrent_map, parse_named_configuration +from zarr.common import JSON, ChunkCoords, concurrent_map, parse_named_configuration from zarr.config import config from zarr.indexing import SelectorTuple, is_scalar, is_total_slice -from zarr.metadata import ArrayMetadata if TYPE_CHECKING: from typing_extensions import Self @@ -87,54 +89,11 @@ def to_dict(self) -> JSON: return [c.to_dict() for c in self] def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: - return type(self).from_list([c.evolve_from_array_spec(array_spec) for c in self]) - - @staticmethod - def codecs_from_list( - codecs: list[Codec], - ) -> tuple[tuple[ArrayArrayCodec, ...], ArrayBytesCodec, tuple[BytesBytesCodec, ...]]: - from zarr.codecs.sharding import ShardingCodec - - if not any(isinstance(codec, ArrayBytesCodec) for codec in codecs): - raise ValueError("Exactly one array-to-bytes codec is required.") - - prev_codec: Codec | None = None - for codec in codecs: - if prev_codec is not None: - if isinstance(codec, ArrayBytesCodec) and isinstance(prev_codec, ArrayBytesCodec): - raise ValueError( - f"ArrayBytesCodec '{type(codec)}' cannot follow after ArrayBytesCodec '{type(prev_codec)}' because exactly 1 ArrayBytesCodec is allowed." - ) - if isinstance(codec, ArrayBytesCodec) and isinstance(prev_codec, BytesBytesCodec): - raise ValueError( - f"ArrayBytesCodec '{type(codec)}' cannot follow after BytesBytesCodec '{type(prev_codec)}'." - ) - if isinstance(codec, ArrayArrayCodec) and isinstance(prev_codec, ArrayBytesCodec): - raise ValueError( - f"ArrayArrayCodec '{type(codec)}' cannot follow after ArrayBytesCodec '{type(prev_codec)}'." - ) - if isinstance(codec, ArrayArrayCodec) and isinstance(prev_codec, BytesBytesCodec): - raise ValueError( - f"ArrayArrayCodec '{type(codec)}' cannot follow after BytesBytesCodec '{type(prev_codec)}'." - ) - prev_codec = codec - - if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(codecs) > 1: - warn( - "Combining a `sharding_indexed` codec disables partial reads and " - "writes, which may lead to inefficient performance.", - stacklevel=3, - ) - - return ( - tuple(codec for codec in codecs if isinstance(codec, ArrayArrayCodec)), - next(codec for codec in codecs if isinstance(codec, ArrayBytesCodec)), - tuple(codec for codec in codecs if isinstance(codec, BytesBytesCodec)), - ) + return type(self).from_list([c.evolve_from_array_spec(array_spec=array_spec) for c in self]) @classmethod - def from_list(cls, codecs: list[Codec], *, batch_size: int | None = None) -> Self: - array_array_codecs, array_bytes_codec, bytes_bytes_codecs = cls.codecs_from_list(codecs) + def from_list(cls, codecs: Iterable[Codec], *, batch_size: int | None = None) -> Self: + array_array_codecs, array_bytes_codec, bytes_bytes_codecs = codecs_from_list(codecs) return cls( array_array_codecs=array_array_codecs, @@ -180,9 +139,9 @@ def __iter__(self) -> Iterator[Codec]: yield self.array_bytes_codec yield from self.bytes_bytes_codecs - def validate(self, array_metadata: ArrayMetadata) -> None: + def validate(self, *, shape: ChunkCoords, dtype: np.dtype[Any], chunk_grid: ChunkGrid) -> None: for codec in self: - codec.validate(array_metadata) + codec.validate(shape=shape, dtype=dtype, chunk_grid=chunk_grid) def compute_encoded_size(self, byte_length: int, array_spec: ArraySpec) -> int: for codec in self: @@ -509,3 +468,64 @@ async def write( self.write_batch, config.get("async.concurrency"), ) + + +def codecs_from_list( + codecs: Iterable[Codec], +) -> tuple[tuple[ArrayArrayCodec, ...], ArrayBytesCodec, tuple[BytesBytesCodec, ...]]: + from zarr.codecs.sharding import ShardingCodec + + array_array: tuple[ArrayArrayCodec, ...] = () + array_bytes_maybe: ArrayBytesCodec | None = None + bytes_bytes: tuple[BytesBytesCodec, ...] = () + + if any(isinstance(codec, ShardingCodec) for codec in codecs) and len(tuple(codecs)) > 1: + warn( + "Combining a `sharding_indexed` codec disables partial reads and " + "writes, which may lead to inefficient performance.", + stacklevel=3, + ) + + for prev_codec, cur_codec in pairwise((None, *codecs)): + if isinstance(cur_codec, ArrayArrayCodec): + if isinstance(prev_codec, ArrayBytesCodec | BytesBytesCodec): + msg = ( + f"Invalid codec order. ArrayArrayCodec {cur_codec}" + "must be preceded by another ArrayArrayCodec. " + f"Got {type(prev_codec)} instead." + ) + raise ValueError(msg) + array_array += (cur_codec,) + + elif isinstance(cur_codec, ArrayBytesCodec): + if isinstance(prev_codec, BytesBytesCodec): + msg = ( + f"Invalid codec order. ArrayBytes codec {cur_codec}" + f" must be preceded by an ArrayArrayCodec. Got {type(prev_codec)} instead." + ) + raise ValueError(msg) + + if array_bytes_maybe is not None: + msg = ( + f"Got two instances of ArrayBytesCodec: {array_bytes_maybe} and {cur_codec}. " + "Only one array-to-bytes codec is allowed." + ) + raise ValueError(msg) + + array_bytes_maybe = cur_codec + + elif isinstance(cur_codec, BytesBytesCodec): + if isinstance(prev_codec, ArrayArrayCodec): + msg = ( + f"Invalid codec order. BytesBytesCodec {cur_codec}" + "must be preceded by either another BytesBytesCodec, or an ArrayBytesCodec. " + f"Got {type(prev_codec)} instead." + ) + bytes_bytes += (cur_codec,) + else: + raise AssertionError + + if array_bytes_maybe is None: + raise ValueError("Required ArrayBytesCodec was not found.") + else: + return array_array, array_bytes_maybe, bytes_bytes diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index 74ad5ac44f..def95b206d 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -5,7 +5,7 @@ from enum import Enum from functools import lru_cache from operator import itemgetter -from typing import TYPE_CHECKING, NamedTuple +from typing import TYPE_CHECKING, Any, NamedTuple import numpy as np import numpy.typing as npt @@ -15,12 +15,11 @@ ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin, Codec, - CodecPipeline, ) from zarr.abc.store import ByteGetter, ByteSetter from zarr.array_spec import ArraySpec from zarr.buffer import Buffer, BufferPrototype, NDBuffer, default_buffer_prototype -from zarr.chunk_grids import RegularChunkGrid +from zarr.chunk_grids import ChunkGrid, RegularChunkGrid from zarr.codecs.bytes import BytesCodec from zarr.codecs.crc32c_ import Crc32cCodec from zarr.codecs.pipeline import BatchedCodecPipeline @@ -34,7 +33,7 @@ product, ) from zarr.indexing import BasicIndexer, SelectorTuple, c_order_iter, get_indexer, morton_order_iter -from zarr.metadata import ArrayMetadata, parse_codecs +from zarr.metadata import parse_codecs if TYPE_CHECKING: from collections.abc import Awaitable, Callable, Iterator @@ -298,34 +297,22 @@ class ShardingCodec( ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin ): chunk_shape: ChunkCoords - codecs: CodecPipeline - index_codecs: CodecPipeline + codecs: tuple[Codec, ...] + index_codecs: tuple[Codec, ...] index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end def __init__( self, *, chunk_shape: ChunkCoordsLike, - codecs: Iterable[Codec | JSON] | None = None, - index_codecs: Iterable[Codec | JSON] | None = None, - index_location: ShardingCodecIndexLocation | None = ShardingCodecIndexLocation.end, + codecs: Iterable[Codec | dict[str, JSON]] = (BytesCodec(),), + index_codecs: Iterable[Codec | dict[str, JSON]] = (BytesCodec(), Crc32cCodec()), + index_location: ShardingCodecIndexLocation = ShardingCodecIndexLocation.end, ) -> None: chunk_shape_parsed = parse_shapelike(chunk_shape) - codecs_parsed = ( - parse_codecs(codecs) - if codecs is not None - else BatchedCodecPipeline.from_list([BytesCodec()]) - ) - index_codecs_parsed = ( - parse_codecs(index_codecs) - if index_codecs is not None - else BatchedCodecPipeline.from_list([BytesCodec(), Crc32cCodec()]) - ) - index_location_parsed = ( - parse_index_location(index_location) - if index_location is not None - else ShardingCodecIndexLocation.end - ) + codecs_parsed = parse_codecs(codecs) + index_codecs_parsed = parse_codecs(index_codecs) + index_location_parsed = parse_index_location(index_location) object.__setattr__(self, "chunk_shape", chunk_shape_parsed) object.__setattr__(self, "codecs", codecs_parsed) @@ -342,35 +329,39 @@ def from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "sharding_indexed") return cls(**configuration_parsed) # type: ignore[arg-type] + @property + def codec_pipeline(self) -> BatchedCodecPipeline: + return BatchedCodecPipeline.from_list(self.codecs) + def to_dict(self) -> dict[str, JSON]: return { "name": "sharding_indexed", "configuration": { "chunk_shape": list(self.chunk_shape), - "codecs": self.codecs.to_dict(), - "index_codecs": self.index_codecs.to_dict(), + "codecs": [s.to_dict() for s in self.codecs], + "index_codecs": [s.to_dict() for s in self.index_codecs], "index_location": self.index_location, }, } def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: shard_spec = self._get_chunk_spec(array_spec) - evolved_codecs = self.codecs.evolve_from_array_spec(shard_spec) + evolved_codecs = tuple(c.evolve_from_array_spec(array_spec=shard_spec) for c in self.codecs) if evolved_codecs != self.codecs: return replace(self, codecs=evolved_codecs) return self - def validate(self, array_metadata: ArrayMetadata) -> None: - if len(self.chunk_shape) != array_metadata.ndim: + def validate(self, *, shape: ChunkCoords, dtype: np.dtype[Any], chunk_grid: ChunkGrid) -> None: + if len(self.chunk_shape) != len(shape): raise ValueError( "The shard's `chunk_shape` and array's `shape` need to have the same number of dimensions." ) - if not isinstance(array_metadata.chunk_grid, RegularChunkGrid): + if not isinstance(chunk_grid, RegularChunkGrid): raise ValueError("Sharding is only compatible with regular chunk grids.") if not all( s % c == 0 for s, c in zip( - array_metadata.chunk_grid.chunk_shape, + chunk_grid.chunk_shape, self.chunk_shape, strict=False, ) @@ -406,7 +397,7 @@ async def _decode_single( return out # decoding chunks and writing them into the output buffer - await self.codecs.read( + await self.codec_pipeline.read( [ ( _ShardingByteGetter(shard_dict, chunk_coords), @@ -474,7 +465,7 @@ async def _decode_partial_single( shard_dict[chunk_coords] = chunk_bytes # decoding chunks and writing them into the output buffer - await self.codecs.read( + await self.codec_pipeline.read( [ ( _ShardingByteGetter(shard_dict, chunk_coords), @@ -508,7 +499,7 @@ async def _encode_single( shard_builder = _ShardBuilder.create_empty(chunks_per_shard) - await self.codecs.write( + await self.codec_pipeline.write( [ ( _ShardingByteSetter(shard_builder, chunk_coords), @@ -551,7 +542,7 @@ async def _encode_partial_single( ) ) - await self.codecs.write( + await self.codec_pipeline.write( [ ( _ShardingByteSetter(shard_dict, chunk_coords), @@ -586,7 +577,7 @@ async def _decode_shard_index( ) -> _ShardIndex: index_array = next( iter( - await self.index_codecs.decode( + await BatchedCodecPipeline.from_list(self.index_codecs).decode( [(index_bytes, self._get_index_chunk_spec(chunks_per_shard))], ) ) @@ -597,7 +588,7 @@ async def _decode_shard_index( async def _encode_shard_index(self, index: _ShardIndex) -> Buffer: index_bytes = next( iter( - await self.index_codecs.encode( + await BatchedCodecPipeline.from_list(self.index_codecs).encode( [ ( NDBuffer.from_numpy_array(index.offsets_and_lengths), @@ -612,7 +603,7 @@ async def _encode_shard_index(self, index: _ShardIndex) -> Buffer: return index_bytes def _shard_index_size(self, chunks_per_shard: ChunkCoords) -> int: - return self.index_codecs.compute_encoded_size( + return BatchedCodecPipeline.from_list(self.index_codecs).compute_encoded_size( 16 * product(chunks_per_shard), self._get_index_chunk_spec(chunks_per_shard) ) diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index 33dab21fb6..0c55a6ec4a 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -2,13 +2,14 @@ from collections.abc import Iterable from dataclasses import dataclass, replace -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING, Any, cast import numpy as np from zarr.abc.codec import ArrayArrayCodec from zarr.array_spec import ArraySpec from zarr.buffer import NDBuffer +from zarr.chunk_grids import ChunkGrid from zarr.codecs.registry import register_codec from zarr.common import JSON, ChunkCoordsLike, parse_named_configuration @@ -45,8 +46,23 @@ def from_dict(cls, data: dict[str, JSON]) -> Self: def to_dict(self) -> dict[str, JSON]: return {"name": "transpose", "configuration": {"order": list(self.order)}} + def validate(self, shape: tuple[int, ...], dtype: np.dtype[Any], chunk_grid: ChunkGrid) -> None: + if len(self.order) != len(shape): + raise ValueError( + f"The `order` tuple needs have as many entries as there are dimensions in the array. Got {self.order}." + ) + if len(self.order) != len(set(self.order)): + raise ValueError( + f"There must not be duplicates in the `order` tuple. Got {self.order}." + ) + if not all(0 <= x < len(shape) for x in self.order): + raise ValueError( + f"All entries in the `order` tuple must be between 0 and the number of dimensions in the array. Got {self.order}." + ) + def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: - if len(self.order) != array_spec.ndim: + ndim = array_spec.ndim + if len(self.order) != ndim: raise ValueError( f"The `order` tuple needs have as many entries as there are dimensions in the array. Got {self.order}." ) @@ -54,7 +70,7 @@ def evolve_from_array_spec(self, array_spec: ArraySpec) -> Self: raise ValueError( f"There must not be duplicates in the `order` tuple. Got {self.order}." ) - if not all(0 <= x < array_spec.ndim for x in self.order): + if not all(0 <= x < ndim for x in self.order): raise ValueError( f"All entries in the `order` tuple must be between 0 and the number of dimensions in the array. Got {self.order}." ) diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index c6a71c00b8..729c7ba13c 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -10,12 +10,12 @@ import numpy as np import numpy.typing as npt -from zarr.abc.codec import Codec, CodecPipeline +from zarr.abc.codec import ArrayArrayCodec, ArrayBytesCodec, BytesBytesCodec, Codec, CodecPipeline from zarr.abc.metadata import Metadata from zarr.buffer import Buffer, BufferPrototype, default_buffer_prototype from zarr.chunk_grids import ChunkGrid, RegularChunkGrid from zarr.chunk_key_encodings import ChunkKeyEncoding, parse_separator -from zarr.codecs._v2 import V2Compressor, V2Filters +from zarr.codecs.registry import get_codec_class from zarr.config import config if TYPE_CHECKING: @@ -33,6 +33,7 @@ ZarrFormat, parse_dtype, parse_fill_value, + parse_named_configuration, parse_shapelike, ) from zarr.config import parse_indexing_order @@ -132,11 +133,6 @@ def dtype(self) -> np.dtype[Any]: def ndim(self) -> int: pass - @property - @abstractmethod - def codec_pipeline(self) -> CodecPipeline: - pass - @abstractmethod def get_chunk_spec( self, _chunk_coords: ChunkCoords, order: Literal["C", "F"], prototype: BufferPrototype @@ -167,7 +163,7 @@ class ArrayV3Metadata(ArrayMetadata): chunk_grid: ChunkGrid chunk_key_encoding: ChunkKeyEncoding fill_value: Any - codecs: CodecPipeline + codecs: tuple[Codec, ...] attributes: dict[str, Any] = field(default_factory=dict) dimension_names: tuple[str, ...] | None = None zarr_format: Literal[3] = field(default=3, init=False) @@ -181,7 +177,7 @@ def __init__( chunk_grid: dict[str, JSON] | ChunkGrid, chunk_key_encoding: dict[str, JSON] | ChunkKeyEncoding, fill_value: Any, - codecs: Iterable[Codec | JSON], + codecs: Iterable[Codec | dict[str, JSON]], attributes: None | dict[str, JSON], dimension_names: None | Iterable[str], ) -> None: @@ -195,6 +191,7 @@ def __init__( dimension_names_parsed = parse_dimension_names(dimension_names) fill_value_parsed = parse_fill_value(fill_value) attributes_parsed = parse_attributes(attributes) + codecs_parsed_partial = parse_codecs(codecs) array_spec = ArraySpec( shape=shape_parsed, @@ -203,7 +200,7 @@ def __init__( order="C", # TODO: order is not needed here. prototype=default_buffer_prototype, # TODO: prototype is not needed here. ) - codecs_parsed = parse_codecs(codecs).evolve_from_array_spec(array_spec) + codecs_parsed = [c.evolve_from_array_spec(array_spec) for c in codecs_parsed_partial] object.__setattr__(self, "shape", shape_parsed) object.__setattr__(self, "data_type", data_type_parsed) @@ -229,7 +226,8 @@ def _validate_metadata(self) -> None: ) if self.fill_value is None: raise ValueError("`fill_value` is required.") - self.codecs.validate(self) + for codec in self.codecs: + codec.validate(shape=self.shape, dtype=self.data_type, chunk_grid=self.chunk_grid) @property def dtype(self) -> np.dtype[Any]: @@ -239,10 +237,6 @@ def dtype(self) -> np.dtype[Any]: def ndim(self) -> int: return len(self.shape) - @property - def codec_pipeline(self) -> CodecPipeline: - return self.codecs - def get_chunk_spec( self, _chunk_coords: ChunkCoords, order: Literal["C", "F"], prototype: BufferPrototype ) -> ArraySpec: @@ -375,14 +369,6 @@ def dtype(self) -> np.dtype[Any]: def chunks(self) -> ChunkCoords: return self.chunk_grid.chunk_shape - @property - def codec_pipeline(self) -> CodecPipeline: - from zarr.codecs import BatchedCodecPipeline - - return BatchedCodecPipeline.from_list( - [V2Filters(self.filters or []), V2Compressor(self.compressor)] - ) - def to_buffer_dict(self) -> dict[str, Buffer]: def _json_convert( o: np.dtype[Any], @@ -507,9 +493,27 @@ def parse_v2_metadata(data: ArrayV2Metadata) -> ArrayV2Metadata: return data -def parse_codecs(data: Iterable[Codec | JSON]) -> CodecPipeline: +def create_pipeline(data: Iterable[Codec | JSON]) -> CodecPipeline: from zarr.codecs import BatchedCodecPipeline if not isinstance(data, Iterable): raise TypeError(f"Expected iterable, got {type(data)}") return BatchedCodecPipeline.from_dict(data) + + +def parse_codecs(data: Iterable[Codec | dict[str, JSON]]) -> tuple[Codec, ...]: + out: tuple[Codec, ...] = () + + if not isinstance(data, Iterable): + raise TypeError(f"Expected iterable, got {type(data)}") + + for c in data: + if isinstance( + c, ArrayArrayCodec | ArrayBytesCodec | BytesBytesCodec + ): # Can't use Codec here because of mypy limitation + out += (c,) + else: + name_parsed, _ = parse_named_configuration(c, require_configuration=False) + out += (get_codec_class(name_parsed).from_dict(c),) + + return out From ea6b441b351d008ffe099f7b193ccea5027dee36 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Sun, 23 Jun 2024 23:52:38 +0200 Subject: [PATCH 0583/1078] Update `RemoteStore.__str__` and add UPath tests (#1964) * normalize remotestore __str__ method * add UPath tests (failing) * remove trailing path delimiter from ._url property of remotestore * remove trailing path delimiter from path attribute in remotestore * add upath to test dependencies * more aggressive cleanup in s3 fixture * remove redundant elif --- pyproject.toml | 1 + src/zarr/store/remote.py | 17 ++++--- tests/v3/test_store/test_remote.py | 77 ++++++++++++++++++++++-------- 3 files changed, 69 insertions(+), 26 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 96a884b737..7f8c23f2b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -106,6 +106,7 @@ build.hooks.vcs.version-file = "src/zarr/_version.py" [tool.hatch.envs.test] dependencies = [ "numpy~={matrix:numpy}", + "universal_pathlib" ] extra-dependencies = [ "coverage", diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index db826f456d..15051334e9 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -25,6 +25,7 @@ class RemoteStore(Store): supports_listing: bool = True _fs: AsyncFileSystem + _url: str path: str allowed_exceptions: tuple[type[Exception], ...] @@ -51,9 +52,10 @@ def __init__( """ super().__init__(mode=mode) - if isinstance(url, str): - self._fs, self.path = fsspec.url_to_fs(url, **storage_options) + self._url = url.rstrip("/") + self._fs, _path = fsspec.url_to_fs(url, **storage_options) + self.path = _path.rstrip("/") elif hasattr(url, "protocol") and hasattr(url, "fs"): # is UPath-like - but without importing if storage_options: @@ -61,8 +63,11 @@ def __init__( "If constructed with a UPath object, no additional " "storage_options are allowed" ) - self.path = url.path - self._fs = url._fs + # n.b. UPath returns the url and path attributes with a trailing /, at least for s3 + # that trailing / must be removed to compose with the store interface + self._url = str(url).rstrip("/") + self.path = url.path.rstrip("/") + self._fs = url.fs else: raise ValueError("URL not understood, %s", url) self.allowed_exceptions = allowed_exceptions @@ -71,10 +76,10 @@ def __init__( raise TypeError("FileSystem needs to support async operations") def __str__(self) -> str: - return f"Remote fsspec store: {type(self._fs).__name__} , {self.path}" + return f"{self._url}" def __repr__(self) -> str: - return f"" + return f"" async def get( self, diff --git a/tests/v3/test_store/test_remote.py b/tests/v3/test_store/test_remote.py index 936cf206d9..98206d427f 100644 --- a/tests/v3/test_store/test_remote.py +++ b/tests/v3/test_store/test_remote.py @@ -2,9 +2,11 @@ import fsspec import pytest +from upath import UPath from zarr.buffer import Buffer, default_buffer_prototype from zarr.store import RemoteStore +from zarr.sync import sync from zarr.testing.store import StoreTests s3fs = pytest.importorskip("s3fs") @@ -16,7 +18,7 @@ test_bucket_name = "test" secure_bucket_name = "test-secure" port = 5555 -endpoint_uri = f"http://127.0.0.1:{port}/" +endpoint_url = f"http://127.0.0.1:{port}/" @pytest.fixture(scope="module") @@ -40,18 +42,33 @@ def get_boto3_client(): # NB: we use the sync botocore client for setup session = Session() - return session.create_client("s3", endpoint_url=endpoint_uri) + return session.create_client("s3", endpoint_url=endpoint_url) @pytest.fixture(autouse=True, scope="function") def s3(s3_base): + """ + Quoting Martin Durant: + pytest-asyncio creates a new event loop for each async test. + When an async-mode s3fs instance is made from async, it will be assigned to the loop from + which it is made. That means that if you use s3fs again from a subsequent test, + you will have the same identical instance, but be running on a different loop - which fails. + + For the rest: it's very convenient to clean up the state of the store between tests, + make sure we start off blank each time. + + https://github.com/zarr-developers/zarr-python/pull/1785#discussion_r1634856207 + """ client = get_boto3_client() client.create_bucket(Bucket=test_bucket_name, ACL="public-read") s3fs.S3FileSystem.clear_instance_cache() - s3 = s3fs.S3FileSystem(anon=False, client_kwargs={"endpoint_url": endpoint_uri}) + s3 = s3fs.S3FileSystem(anon=False, client_kwargs={"endpoint_url": endpoint_url}) + session = sync(s3.set_session()) s3.invalidate_cache() yield s3 - requests.post(f"{endpoint_uri}/moto-api/reset") + requests.post(f"{endpoint_url}/moto-api/reset") + client.close() + sync(session.close()) # ### end from s3fs ### # @@ -65,7 +82,7 @@ async def alist(it): async def test_basic(): - store = RemoteStore(f"s3://{test_bucket_name}", mode="w", endpoint_url=endpoint_uri, anon=False) + store = RemoteStore(f"s3://{test_bucket_name}", mode="w", endpoint_url=endpoint_url, anon=False) assert not await alist(store.list()) assert not await store.exists("foo") data = b"hello" @@ -81,31 +98,51 @@ async def test_basic(): class TestRemoteStoreS3(StoreTests[RemoteStore]): store_cls = RemoteStore - @pytest.fixture(scope="function") - def store_kwargs(self) -> dict[str, str | bool]: - return { - "mode": "w", - "endpoint_url": endpoint_uri, - "anon": False, - "url": f"s3://{test_bucket_name}", - } + @pytest.fixture(scope="function", params=("use_upath", "use_str")) + def store_kwargs(self, request) -> dict[str, str | bool]: + url = f"s3://{test_bucket_name}" + anon = False + mode = "w" + if request.param == "use_upath": + return {"mode": mode, "url": UPath(url, endpoint_url=endpoint_url, anon=anon)} + elif request.param == "use_str": + return {"url": url, "mode": mode, "anon": anon, "endpoint_url": endpoint_url} + + raise AssertionError @pytest.fixture(scope="function") def store(self, store_kwargs: dict[str, str | bool]) -> RemoteStore: - self._fs, _ = fsspec.url_to_fs(asynchronous=False, **store_kwargs) - out = self.store_cls(asynchronous=True, **store_kwargs) + url = store_kwargs["url"] + mode = store_kwargs["mode"] + if isinstance(url, UPath): + out = self.store_cls(url=url, mode=mode) + else: + endpoint_url = store_kwargs["endpoint_url"] + out = self.store_cls(url=url, asynchronous=True, mode=mode, endpoint_url=endpoint_url) return out def get(self, store: RemoteStore, key: str) -> Buffer: - return Buffer.from_bytes(self._fs.cat(f"{store.path}/{key}")) + # make a new, synchronous instance of the filesystem because this test is run in sync code + fs, _ = fsspec.url_to_fs( + url=store._url, + asynchronous=False, + anon=store._fs.anon, + endpoint_url=store._fs.endpoint_url, + ) + return Buffer.from_bytes(fs.cat(f"{store.path}/{key}")) def set(self, store: RemoteStore, key: str, value: Buffer) -> None: - self._fs.write_bytes(f"{store.path}/{key}", value.to_bytes()) + # make a new, synchronous instance of the filesystem because this test is run in sync code + fs, _ = fsspec.url_to_fs( + url=store._url, + asynchronous=False, + anon=store._fs.anon, + endpoint_url=store._fs.endpoint_url, + ) + fs.write_bytes(f"{store.path}/{key}", value.to_bytes()) def test_store_repr(self, store: RemoteStore) -> None: - rep = str(store) - assert "fsspec" in rep - assert store.path in rep + assert str(store) == f"s3://{test_bucket_name}" def test_store_supports_writes(self, store: RemoteStore) -> None: assert True From 11a959aac2413b35a63d489af2090c0d5bcb0259 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Mon, 24 Jun 2024 15:27:48 +0100 Subject: [PATCH 0584/1078] Automatically generate API reference docs (#1918) * Automatically generate API reference * Remove old automodapi refs * Fix some doc warnings * Skip private modules * Skip all private objects * Add API ref page for zarr namespace * Fix import errors with zarr.v2 * Remove old numpydoc validation * Use groupwise member order --------- Co-authored-by: Davis Bennett --- .gitignore | 1 - docs/api.rst | 22 ----------------- docs/api/attrs.rst | 16 ------------- docs/api/codecs.rst | 23 ------------------ docs/api/convenience.rst | 14 ----------- docs/api/core.rst | 5 ---- docs/api/creation.rst | 15 ------------ docs/api/hierarchy.rst | 41 -------------------------------- docs/api/index.rst | 7 ++++++ docs/api/n5.rst | 5 ---- docs/api/storage.rst | 50 --------------------------------------- docs/api/sync.rst | 6 ----- docs/api/zarr.rst | 5 ++++ docs/conf.py | 19 +++++++++------ docs/index.rst | 4 ++-- pyproject.toml | 4 ++-- src/zarr/buffer.py | 30 +++++++++++------------ src/zarr/store/local.py | 1 + src/zarr/testing/utils.py | 4 ++-- src/zarr/v2/core.py | 5 ++-- 20 files changed, 48 insertions(+), 229 deletions(-) delete mode 100644 docs/api.rst delete mode 100644 docs/api/attrs.rst delete mode 100644 docs/api/codecs.rst delete mode 100644 docs/api/convenience.rst delete mode 100644 docs/api/core.rst delete mode 100644 docs/api/creation.rst delete mode 100644 docs/api/hierarchy.rst create mode 100644 docs/api/index.rst delete mode 100644 docs/api/n5.rst delete mode 100644 docs/api/storage.rst delete mode 100644 docs/api/sync.rst create mode 100644 docs/api/zarr.rst diff --git a/.gitignore b/.gitignore index 7d32026e13..35957f2c99 100644 --- a/.gitignore +++ b/.gitignore @@ -51,7 +51,6 @@ coverage.xml # Sphinx documentation docs/_build/ -docs/_autoapi/ # PyBuilder target/ diff --git a/docs/api.rst b/docs/api.rst deleted file mode 100644 index 2b6e7ea516..0000000000 --- a/docs/api.rst +++ /dev/null @@ -1,22 +0,0 @@ -API reference -============= - -.. toctree:: - :maxdepth: 3 - - api/creation - api/core - api/hierarchy - api/storage - api/n5 - api/convenience - api/codecs - api/attrs - api/sync - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/api/attrs.rst b/docs/api/attrs.rst deleted file mode 100644 index 067b45fac6..0000000000 --- a/docs/api/attrs.rst +++ /dev/null @@ -1,16 +0,0 @@ -The Attributes class (``zarr.v2.attrs``) -======================================== -.. module:: zarr.v2.attrs - -.. autoclass:: Attributes - - .. automethod:: __getitem__ - .. automethod:: __setitem__ - .. automethod:: __delitem__ - .. automethod:: __iter__ - .. automethod:: __len__ - .. automethod:: keys - .. automethod:: asdict - .. automethod:: put - .. automethod:: update - .. automethod:: refresh diff --git a/docs/api/codecs.rst b/docs/api/codecs.rst deleted file mode 100644 index 454c5ccd20..0000000000 --- a/docs/api/codecs.rst +++ /dev/null @@ -1,23 +0,0 @@ -Compressors and filters (``zarr.v2.codecs``) -============================================ -.. module:: zarr.codecs - -This module contains compressor and filter classes for use with Zarr. Please note that this module -is provided for backwards compatibility with previous versions of Zarr. From Zarr version 2.2 -onwards, all codec classes have been moved to a separate package called Numcodecs_. The two -packages (Zarr and Numcodecs_) are designed to be used together. For example, a Numcodecs_ codec -class can be used as a compressor for a Zarr array:: - - >>> import zarr.v2 - >>> from numcodecs import Blosc - >>> z = zarr.v2.zeros(1000000, compressor=Blosc(cname='zstd', clevel=1, shuffle=Blosc.SHUFFLE)) - -Codec classes can also be used as filters. See the tutorial section on :ref:`tutorial_filters` -for more information. - -Please note that it is also relatively straightforward to define and register custom codec -classes. See the Numcodecs `codec API `_ and -`codec registry `_ documentation for more -information. - -.. _Numcodecs: https://numcodecs.readthedocs.io/ diff --git a/docs/api/convenience.rst b/docs/api/convenience.rst deleted file mode 100644 index 1ff26452fa..0000000000 --- a/docs/api/convenience.rst +++ /dev/null @@ -1,14 +0,0 @@ -Convenience functions (``zarr.v2.convenience``) -=============================================== -.. automodule:: zarr.v2.convenience -.. autofunction:: open -.. autofunction:: save -.. autofunction:: load -.. autofunction:: save_array -.. autofunction:: save_group -.. autofunction:: copy -.. autofunction:: copy_all -.. autofunction:: copy_store -.. autofunction:: tree -.. autofunction:: consolidate_metadata -.. autofunction:: open_consolidated diff --git a/docs/api/core.rst b/docs/api/core.rst deleted file mode 100644 index aacd03e2a5..0000000000 --- a/docs/api/core.rst +++ /dev/null @@ -1,5 +0,0 @@ -The Array class (``zarr.v2.core``) -================================== - -.. automodapi:: zarr.v2.core - :no-heading: diff --git a/docs/api/creation.rst b/docs/api/creation.rst deleted file mode 100644 index ad0a2ead49..0000000000 --- a/docs/api/creation.rst +++ /dev/null @@ -1,15 +0,0 @@ -Array creation (``zarr.v2.creation``) -===================================== -.. module:: zarr.v2.creation -.. autofunction:: create -.. autofunction:: empty -.. autofunction:: zeros -.. autofunction:: ones -.. autofunction:: full -.. autofunction:: array -.. autofunction:: open_array -.. autofunction:: empty_like -.. autofunction:: zeros_like -.. autofunction:: ones_like -.. autofunction:: full_like -.. autofunction:: open_like diff --git a/docs/api/hierarchy.rst b/docs/api/hierarchy.rst deleted file mode 100644 index 5d9280af1e..0000000000 --- a/docs/api/hierarchy.rst +++ /dev/null @@ -1,41 +0,0 @@ -Groups (``zarr.v2.hierarchy``) -============================== -.. module:: zarr.v2.hierarchy - -.. autofunction:: group -.. autofunction:: open_group - -.. autoclass:: Group - - .. automethod:: __len__ - .. automethod:: __iter__ - .. automethod:: __contains__ - .. automethod:: __getitem__ - .. automethod:: __enter__ - .. automethod:: __exit__ - .. automethod:: group_keys - .. automethod:: groups - .. automethod:: array_keys - .. automethod:: arrays - .. automethod:: visit - .. automethod:: visitkeys - .. automethod:: visitvalues - .. automethod:: visititems - .. automethod:: tree - .. automethod:: create_group - .. automethod:: require_group - .. automethod:: create_groups - .. automethod:: require_groups - .. automethod:: create_dataset - .. automethod:: require_dataset - .. automethod:: create - .. automethod:: empty - .. automethod:: zeros - .. automethod:: ones - .. automethod:: full - .. automethod:: array - .. automethod:: empty_like - .. automethod:: zeros_like - .. automethod:: ones_like - .. automethod:: full_like - .. automethod:: move diff --git a/docs/api/index.rst b/docs/api/index.rst new file mode 100644 index 0000000000..58ea531905 --- /dev/null +++ b/docs/api/index.rst @@ -0,0 +1,7 @@ +API Reference +============= + +.. toctree:: + :maxdepth: 1 + + zarr diff --git a/docs/api/n5.rst b/docs/api/n5.rst deleted file mode 100644 index 22e490bad4..0000000000 --- a/docs/api/n5.rst +++ /dev/null @@ -1,5 +0,0 @@ -N5 (``zarr.v2.n5``) -=================== -.. automodule:: zarr.v2.n5 - -.. autoclass:: N5Store diff --git a/docs/api/storage.rst b/docs/api/storage.rst deleted file mode 100644 index d0ebd8a429..0000000000 --- a/docs/api/storage.rst +++ /dev/null @@ -1,50 +0,0 @@ -Storage (``zarr.v2.storage``) -============================= -.. automodule:: zarr.v2.storage - -.. autoclass:: MemoryStore -.. autoclass:: DirectoryStore -.. autoclass:: TempStore -.. autoclass:: NestedDirectoryStore -.. autoclass:: ZipStore - - .. automethod:: close - .. automethod:: flush - -.. autoclass:: DBMStore - - .. automethod:: close - .. automethod:: flush - -.. autoclass:: LMDBStore - - .. automethod:: close - .. automethod:: flush - -.. autoclass:: SQLiteStore - - .. automethod:: close - -.. autoclass:: MongoDBStore -.. autoclass:: RedisStore -.. autoclass:: LRUStoreCache - - .. automethod:: invalidate - .. automethod:: invalidate_values - .. automethod:: invalidate_keys - -.. autoclass:: ABSStore - -.. autoclass:: FSStore - -.. autoclass:: ConsolidatedMetadataStore - -.. autofunction:: init_array -.. autofunction:: init_group -.. autofunction:: contains_array -.. autofunction:: contains_group -.. autofunction:: listdir -.. autofunction:: rmdir -.. autofunction:: getsize -.. autofunction:: rename -.. autofunction:: migrate_1to2 diff --git a/docs/api/sync.rst b/docs/api/sync.rst deleted file mode 100644 index ff961543af..0000000000 --- a/docs/api/sync.rst +++ /dev/null @@ -1,6 +0,0 @@ -Synchronization (``zarr.v2.sync``) -================================== -.. module:: zarr.v2.sync - -.. autoclass:: ThreadSynchronizer -.. autoclass:: ProcessSynchronizer diff --git a/docs/api/zarr.rst b/docs/api/zarr.rst new file mode 100644 index 0000000000..8a9216f19e --- /dev/null +++ b/docs/api/zarr.rst @@ -0,0 +1,5 @@ +zarr +==== + +.. autoapimodule:: zarr + :members: diff --git a/docs/conf.py b/docs/conf.py index 318843a9fb..35afa60577 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,6 +15,9 @@ import os import sys +from typing import Any + +import sphinx.application # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -22,6 +25,7 @@ # # The short X.Y version. import zarr +import sphinx # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -42,19 +46,21 @@ "sphinx.ext.autosummary", "sphinx.ext.viewcode", "sphinx.ext.intersphinx", - "sphinx_automodapi.automodapi", + 'autoapi.extension', "numpydoc", "sphinx_issues", "sphinx_copybutton", "sphinx_design", ] -numpydoc_show_class_members = False -numpydoc_class_members_toctree = False issues_github_path = "zarr-developers/zarr-python" -automodapi_inheritance_diagram = False -automodapi_toctreedirnm = "_autoapi" +autoapi_dirs = ['../src/zarr'] +autoapi_add_toctree_entry = False +autoapi_generate_api_docs = False +autoapi_member_order = "groupwise" +autoapi_root = "api" + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -166,8 +172,7 @@ html_logo = "_static/logo1.png" -# Add custom css -def setup(app): +def setup(app: sphinx.application.Sphinx) -> None: app.add_css_file("custom.css") diff --git a/docs/index.rst b/docs/index.rst index 06f79b7e7c..92f9a3df18 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,7 +10,7 @@ Zarr-Python getting_started tutorial - api + api/index spec release license @@ -80,7 +80,7 @@ Zarr is a file storage format for chunked, compressed, N-dimensional arrays base +++ - .. button-ref:: api + .. button-ref:: api/index :expand: :color: dark :click-parent: diff --git a/pyproject.toml b/pyproject.toml index 7f8c23f2b6..d61eadea84 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ jupyter = [ docs = [ 'sphinx', 'sphinx-autobuild>=2021.3.14', - 'sphinx-automodapi', + 'sphinx-autoapi', 'sphinx_design', 'sphinx-issues', 'sphinx-copybutton', @@ -146,7 +146,7 @@ features = ['docs'] [tool.hatch.envs.docs.scripts] build = "cd docs && make html" -serve = "sphinx-autobuild docs docs/_build --ignore 'docs/_autoapi/**/*' --host 0.0.0.0" +serve = "sphinx-autobuild docs docs/_build --host 0.0.0.0" [tool.ruff] line-length = 100 diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index 44691ea352..1298711d4e 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -87,8 +87,8 @@ def all(self) -> bool: ... def __eq__(self, other: Any) -> Self: # type: ignore[explicit-override, override] """Element-wise equal - Notice - ------ + Notes + ----- Type checkers such as mypy complains because the return type isn't a bool like its supertype "object", which violates the Liskov substitution principle. This is true, but since NumPy's ndarray is defined as an element-wise equal, @@ -117,8 +117,8 @@ class Buffer: array-like instance can be copied/converted to a regular Numpy array (host memory). - Note - ---- + Notes + ----- This buffer is untyped, so all indexing and sizes are in bytes. Parameters @@ -188,8 +188,8 @@ def as_array_like(self) -> ArrayLike: def as_numpy_array(self) -> npt.NDArray[Any]: """Returns the buffer as a NumPy array (host memory). - Warning - ------- + Notes + ----- Might have to copy data, consider using `.as_array_like()` instead. Returns @@ -201,8 +201,8 @@ def as_numpy_array(self) -> npt.NDArray[Any]: def to_bytes(self) -> bytes: """Returns the buffer as `bytes` (host memory). - Warning - ------- + Warnings + -------- Will always copy data, only use this method for small buffers such as metadata buffers. If possible, use `.as_numpy_array()` or `.as_array_like()` instead. @@ -244,8 +244,8 @@ class NDBuffer: ndarray-like instance can be copied/converted to a regular Numpy array (host memory). - Note - ---- + Notes + ----- The two buffer classes Buffer and NDBuffer are very similar. In fact, Buffer is a special case of NDBuffer where dim=1, stride=1, and dtype="b". However, in order to use Python's type system to differentiate between the contiguous @@ -290,8 +290,8 @@ def create( ------- New buffer representing a new ndarray_like object - Developer Notes - --------------- + Notes + ----- A subclass can overwrite this method to create a ndarray-like object other then the default Numpy array. """ @@ -344,8 +344,8 @@ def as_ndarray_like(self) -> NDArrayLike: def as_numpy_array(self) -> npt.NDArray[Any]: """Returns the buffer as a NumPy array (host memory). - Warning - ------- + Warnings + -------- Might have to copy data, consider using `.as_ndarray_like()` instead. Returns @@ -433,7 +433,7 @@ def as_numpy_array_wrapper( Returns ------- - The result of `func` converted to a `prototype.buffer` + The result of `func` converted to a `Buffer` """ return prototype.buffer.from_bytes(func(buf.as_numpy_array())) diff --git a/src/zarr/store/local.py b/src/zarr/store/local.py index 9238700445..5915559900 100644 --- a/src/zarr/store/local.py +++ b/src/zarr/store/local.py @@ -109,6 +109,7 @@ async def get_partial_values( ) -> list[Buffer | None]: """ Read byte ranges from multiple keys. + Parameters ---------- key_ranges: List[Tuple[str, Tuple[int, int]]] diff --git a/src/zarr/testing/utils.py b/src/zarr/testing/utils.py index 04b05d1b1c..67c6c72de7 100644 --- a/src/zarr/testing/utils.py +++ b/src/zarr/testing/utils.py @@ -7,8 +7,8 @@ def assert_bytes_equal(b1: Buffer | BytesLike | None, b2: Buffer | BytesLike | None) -> None: """Help function to assert if two bytes-like or Buffers are equal - Warning - ------- + Warnings + -------- Always copies data, only use for testing and debugging """ if isinstance(b1, Buffer): diff --git a/src/zarr/v2/core.py b/src/zarr/v2/core.py index c1223daced..9eeb467d68 100644 --- a/src/zarr/v2/core.py +++ b/src/zarr/v2/core.py @@ -8,11 +8,11 @@ from typing import Any import numpy as np -from numcodecs.compat import ensure_bytes +from numcodecs import AsType, get_codec +from numcodecs.compat import ensure_bytes, ensure_ndarray_like from zarr.v2._storage.store import _prefix_to_attrs_key from zarr.v2.attrs import Attributes -from zarr.v2.codecs import AsType, get_codec from zarr.v2.context import Context from zarr.v2.errors import ArrayNotFoundError, ReadOnlyError, ArrayIndexError from zarr.v2.indexing import ( @@ -56,7 +56,6 @@ normalize_shape, normalize_storage_path, PartialReadBuffer, - ensure_ndarray_like, ) __all__ = ["Array"] From cb9605ef09d80c3fffcba452abfd7655a46c3113 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Mon, 24 Jun 2024 20:17:02 +0100 Subject: [PATCH 0585/1078] Fix doc build warnings (#1985) * Fix doc build warnings * Fix mypy error --- src/zarr/indexing.py | 6 +++--- src/zarr/v2/context.py | 4 ++-- src/zarr/v2/indexing.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 74cbbe8c6b..bb5ed660cf 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -1022,9 +1022,9 @@ def __init__(self, selection: CoordinateSelection, shape: ChunkCoords, chunk_gri # flatten selection selection_broadcast = tuple(dim_sel.reshape(-1) for dim_sel in selection_broadcast) - chunks_multi_index_broadcast = [ - dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast - ] + chunks_multi_index_broadcast = tuple( + [dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast] + ) # ravel chunk indices chunks_raveled_indices = np.ravel_multi_index( diff --git a/src/zarr/v2/context.py b/src/zarr/v2/context.py index 3dd7dda4ac..4eb1db7491 100644 --- a/src/zarr/v2/context.py +++ b/src/zarr/v2/context.py @@ -9,8 +9,8 @@ class Context(TypedDict, total=False): All keys are optional. Any component reading the context must provide a default implementation in the case a key cannot be found. - Items - ----- + Attributes + ---------- meta_array : array-like, optional An array-like instance to use for determining the preferred output array type. diff --git a/src/zarr/v2/indexing.py b/src/zarr/v2/indexing.py index 242e9ae849..bb2d9f1adb 100644 --- a/src/zarr/v2/indexing.py +++ b/src/zarr/v2/indexing.py @@ -989,7 +989,7 @@ class PartialChunkIterator: arr_shape : shape of chunk to select data from Attributes - ----------- + ---------- arr_shape selection From e3ee09eaa8a5e17a92d00ae4979aa069f755eaba Mon Sep 17 00:00:00 2001 From: David Stansby Date: Mon, 24 Jun 2024 20:51:34 +0100 Subject: [PATCH 0586/1078] Fix doc build (#1987) --- docs/_static/donotdelete | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 docs/_static/donotdelete diff --git a/docs/_static/donotdelete b/docs/_static/donotdelete deleted file mode 100644 index e69de29bb2..0000000000 From c677da4b0bc3e600b23cad0cea1253b7923810b0 Mon Sep 17 00:00:00 2001 From: "Mads R. B. Kristensen" Date: Tue, 25 Jun 2024 13:43:57 +0200 Subject: [PATCH 0587/1078] [v3] `Buffer` ensure correct subclass based on the `BufferPrototype` argument (#1974) * impl. and use Buffer.from_buffer() * Update src/zarr/buffer.py Co-authored-by: Davis Bennett * Apply suggestions from code review Co-authored-by: Davis Bennett --------- Co-authored-by: Davis Bennett --- src/zarr/buffer.py | 25 ++++++++++++++++++++++++- src/zarr/store/memory.py | 2 +- src/zarr/store/remote.py | 6 +++--- tests/v3/test_buffer.py | 5 ++++- tests/v3/test_store/test_remote.py | 2 +- 5 files changed, 33 insertions(+), 7 deletions(-) diff --git a/src/zarr/buffer.py b/src/zarr/buffer.py index 1298711d4e..86f9b53477 100644 --- a/src/zarr/buffer.py +++ b/src/zarr/buffer.py @@ -146,7 +146,7 @@ def create_zero_length(cls) -> Self: @classmethod def from_array_like(cls, array_like: ArrayLike) -> Self: - """Create a new buffer of a array-like object + """Create a new buffer of an array-like object Parameters ---------- @@ -159,6 +159,29 @@ def from_array_like(cls, array_like: ArrayLike) -> Self: """ return cls(array_like) + @classmethod + def from_buffer(cls, buffer: Buffer) -> Self: + """Create a new buffer of an existing Buffer + + This is useful if you want to ensure that an existing buffer is + of the correct subclass of Buffer. E.g., MemoryStore uses this + to return a buffer instance of the subclass specified by its + BufferPrototype argument. + + Typically, this only copies data if the data has to be moved between + memory types, such as from host to device memory. + + Parameters + ---------- + buffer + buffer object. + + Returns + ------- + A new buffer representing the content of the input buffer + """ + return cls.from_array_like(buffer.as_array_like()) + @classmethod def from_bytes(cls, bytes_like: BytesLike) -> Self: """Create a new buffer of a bytes-like object (host memory) diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index 43d65ce836..7b73330b6c 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -39,7 +39,7 @@ async def get( try: value = self._store_dict[key] start, length = _normalize_interval_index(value, byte_range) - return value[start : start + length] + return prototype.buffer.from_buffer(value[start : start + length]) except KeyError: return None diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index 15051334e9..50a02dcbcd 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -6,7 +6,7 @@ import fsspec from zarr.abc.store import Store -from zarr.buffer import Buffer, BufferPrototype, default_buffer_prototype +from zarr.buffer import Buffer, BufferPrototype from zarr.common import OpenMode from zarr.store.core import _dereference_path @@ -84,7 +84,7 @@ def __repr__(self) -> str: async def get( self, key: str, - prototype: BufferPrototype = default_buffer_prototype, + prototype: BufferPrototype, byte_range: tuple[int | None, int | None] | None = None, ) -> Buffer | None: path = _dereference_path(self.path, key) @@ -99,7 +99,7 @@ async def get( end = length else: end = None - value: Buffer = prototype.buffer.from_bytes( + value = prototype.buffer.from_bytes( await ( self._fs._cat_file(path, start=byte_range[0], end=end) if byte_range diff --git a/tests/v3/test_buffer.py b/tests/v3/test_buffer.py index e814afef15..77e1b6b688 100644 --- a/tests/v3/test_buffer.py +++ b/tests/v3/test_buffer.py @@ -68,7 +68,10 @@ async def get( ) -> Buffer | None: if "json" not in key: assert prototype.buffer is MyBuffer - return await super().get(key, byte_range) + ret = await super().get(key=key, prototype=prototype, byte_range=byte_range) + if ret is not None: + assert isinstance(ret, prototype.buffer) + return ret def test_nd_array_like(xp): diff --git a/tests/v3/test_store/test_remote.py b/tests/v3/test_store/test_remote.py index 98206d427f..0dc399be42 100644 --- a/tests/v3/test_store/test_remote.py +++ b/tests/v3/test_store/test_remote.py @@ -88,7 +88,7 @@ async def test_basic(): data = b"hello" await store.set("foo", Buffer.from_bytes(data)) assert await store.exists("foo") - assert (await store.get("foo")).to_bytes() == data + assert (await store.get("foo", prototype=default_buffer_prototype)).to_bytes() == data out = await store.get_partial_values( prototype=default_buffer_prototype, key_ranges=[("foo", (1, None))] ) From 5a4a50fe9636cdf36bd9ca9fb45f5845b0f98763 Mon Sep 17 00:00:00 2001 From: Tom White Date: Tue, 25 Jun 2024 13:30:45 +0100 Subject: [PATCH 0588/1078] Allow 'chunks' as an alias for 'chunk_shape' in array creation. (#1991) --- src/zarr/api/asynchronous.py | 6 +++++- tests/v3/test_api.py | 23 ++++++++++++++++++++++- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/src/zarr/api/asynchronous.py b/src/zarr/api/asynchronous.py index 6cf7378bfa..fa63ab46a8 100644 --- a/src/zarr/api/asynchronous.py +++ b/src/zarr/api/asynchronous.py @@ -646,7 +646,11 @@ async def create( if zarr_format == 2 and chunks is None: chunks = shape if zarr_format == 3 and chunk_shape is None: - chunk_shape = shape + if chunks is not None: + chunk_shape = chunks + chunks = None + else: + chunk_shape = shape if order is not None: warnings.warn( diff --git a/tests/v3/test_api.py b/tests/v3/test_api.py index 31e6fbfcd9..67e2904a83 100644 --- a/tests/v3/test_api.py +++ b/tests/v3/test_api.py @@ -5,7 +5,28 @@ import zarr from zarr import Array, Group from zarr.abc.store import Store -from zarr.api.synchronous import load, open, open_group, save, save_array, save_group +from zarr.api.synchronous import create, load, open, open_group, save, save_array, save_group + + +def test_create_array(memory_store: Store) -> None: + store = memory_store + + # create array + z = create(shape=100, store=store) + assert isinstance(z, Array) + assert z.shape == (100,) + + # create array, overwrite, specify chunk shape + z = create(shape=200, chunk_shape=20, store=store, overwrite=True) + assert isinstance(z, Array) + assert z.shape == (200,) + assert z.chunks == (20,) + + # create array, overwrite, specify chunk shape via chunks param + z = create(shape=400, chunks=40, store=store, overwrite=True) + assert isinstance(z, Array) + assert z.shape == (400,) + assert z.chunks == (40,) def test_open_array(memory_store: Store) -> None: From bc30f2eb65a0928d7138048a8073d5e7469fb065 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 26 Jun 2024 14:56:03 +0200 Subject: [PATCH 0589/1078] Use f-strings instead of legacy interpolation (#1995) --- src/zarr/v2/hierarchy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/zarr/v2/hierarchy.py b/src/zarr/v2/hierarchy.py index b0660d181d..25e47311b6 100644 --- a/src/zarr/v2/hierarchy.py +++ b/src/zarr/v2/hierarchy.py @@ -1195,11 +1195,11 @@ def move(self, source, dest): contains_array(self._store, source) or contains_group(self._store, source, explicit_only=False) ): - raise ValueError('The source, "%s", does not exist.' % source) + raise ValueError(f'The source, "{source}", does not exist.') if contains_array(self._store, dest) or contains_group( self._store, dest, explicit_only=False ): - raise ValueError('The dest, "%s", already exists.' % dest) + raise ValueError(f'The dest, "{dest}", already exists.') # Ensure groups needed for `dest` exist. if "/" in dest: From b1bb15ad6e70d90207759edffbcf18e72ec41c0f Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 26 Jun 2024 14:56:18 +0200 Subject: [PATCH 0590/1078] Unnecessary use of a comprehension (#1994) --- src/zarr/v2/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zarr/v2/util.py b/src/zarr/v2/util.py index 6926bb2d14..6233687086 100644 --- a/src/zarr/v2/util.py +++ b/src/zarr/v2/util.py @@ -786,4 +786,4 @@ def __contains__(self, key: object) -> bool: return key in self._keys def __repr__(self) -> str: - return repr({k: v for k, v in self.items()}) + return repr(dict(self.items())) From 50235c07cdf93bffffe79bd365fdcbcb6bdc41a7 Mon Sep 17 00:00:00 2001 From: Tom White Date: Wed, 26 Jun 2024 14:29:33 +0100 Subject: [PATCH 0591/1078] Handle Path in `make_store_path` (#1992) * Handle Path in `make_store_path` * Add test for `make_store_path` --------- Co-authored-by: Davis Bennett --- src/zarr/store/core.py | 2 ++ tests/v3/test_store/test_core.py | 36 ++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 tests/v3/test_store/test_core.py diff --git a/src/zarr/store/core.py b/src/zarr/store/core.py index 512c8383eb..caa30d6997 100644 --- a/src/zarr/store/core.py +++ b/src/zarr/store/core.py @@ -79,6 +79,8 @@ def make_store_path(store_like: StoreLike | None, *, mode: OpenMode | None = Non if mode is None: mode = "w" # exception to the default mode = 'r' return StorePath(MemoryStore(mode=mode)) + elif isinstance(store_like, Path): + return StorePath(LocalStore(store_like, mode=mode or "r")) elif isinstance(store_like, str): return StorePath(LocalStore(Path(store_like), mode=mode or "r")) raise TypeError diff --git a/tests/v3/test_store/test_core.py b/tests/v3/test_store/test_core.py new file mode 100644 index 0000000000..b573b0fef5 --- /dev/null +++ b/tests/v3/test_store/test_core.py @@ -0,0 +1,36 @@ +from pathlib import Path + +import pytest + +from zarr.store.core import make_store_path +from zarr.store.local import LocalStore +from zarr.store.memory import MemoryStore + + +def test_make_store_path(tmpdir) -> None: + # None + store_path = make_store_path(None) + assert isinstance(store_path.store, MemoryStore) + + # str + store_path = make_store_path(str(tmpdir)) + assert isinstance(store_path.store, LocalStore) + assert Path(store_path.store.root) == Path(tmpdir) + + # Path + store_path = make_store_path(Path(tmpdir)) + assert isinstance(store_path.store, LocalStore) + assert Path(store_path.store.root) == Path(tmpdir) + + # Store + store_path = make_store_path(store_path.store) + assert isinstance(store_path.store, LocalStore) + assert Path(store_path.store.root) == Path(tmpdir) + + # StorePath + store_path = make_store_path(store_path) + assert isinstance(store_path.store, LocalStore) + assert Path(store_path.store.root) == Path(tmpdir) + + with pytest.raises(TypeError): + make_store_path(1) From 61b940484972f82fe0e9c9acf5b66ceb4cf9b2ba Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 27 Jun 2024 08:32:08 +0200 Subject: [PATCH 0592/1078] Unnecessary comprehension (#1996) * Unnecessary comprehension `all` can take a generator. * Unnecessary comprehension `tuple` can take a generator. --- src/zarr/indexing.py | 2 +- src/zarr/metadata.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index bb5ed660cf..29b6dd790a 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -1023,7 +1023,7 @@ def __init__(self, selection: CoordinateSelection, shape: ChunkCoords, chunk_gri # flatten selection selection_broadcast = tuple(dim_sel.reshape(-1) for dim_sel in selection_broadcast) chunks_multi_index_broadcast = tuple( - [dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast] + dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast ) # ravel chunk indices diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index 729c7ba13c..ef7edbd560 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -436,7 +436,7 @@ def update_attributes(self, attributes: dict[str, JSON]) -> Self: def parse_dimension_names(data: None | Iterable[str]) -> tuple[str, ...] | None: if data is None: return data - elif all([isinstance(x, str) for x in data]): + elif all(isinstance(x, str) for x in data): return tuple(data) else: msg = f"Expected either None or a iterable of str, got {type(data)}" From b3010fcddb5f14aaed51bf456888e636df8c2697 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 27 Jun 2024 10:10:03 +0200 Subject: [PATCH 0593/1078] Merge collapsible if statements (#1999) --- src/zarr/codecs/bytes.py | 15 +++++++++------ src/zarr/codecs/pipeline.py | 4 +--- src/zarr/codecs/registry.py | 11 +++++------ src/zarr/v2/convenience.py | 11 +++++------ src/zarr/v2/util.py | 7 +++---- 5 files changed, 23 insertions(+), 25 deletions(-) diff --git a/src/zarr/codecs/bytes.py b/src/zarr/codecs/bytes.py index 0b9a5c089e..80b596a157 100644 --- a/src/zarr/codecs/bytes.py +++ b/src/zarr/codecs/bytes.py @@ -97,12 +97,15 @@ async def _encode_single( chunk_spec: ArraySpec, ) -> Buffer | None: assert isinstance(chunk_array, NDBuffer) - if chunk_array.dtype.itemsize > 1: - if self.endian is not None and self.endian != chunk_array.byteorder: - # type-ignore is a numpy bug - # see https://github.com/numpy/numpy/issues/26473 - new_dtype = chunk_array.dtype.newbyteorder(self.endian.name) # type: ignore[arg-type] - chunk_array = chunk_array.astype(new_dtype) + if ( + chunk_array.dtype.itemsize > 1 + and self.endian is not None + and self.endian != chunk_array.byteorder + ): + # type-ignore is a numpy bug + # see https://github.com/numpy/numpy/issues/26473 + new_dtype = chunk_array.dtype.newbyteorder(self.endian.name) # type: ignore[arg-type] + chunk_array = chunk_array.astype(new_dtype) nd_array = chunk_array.as_ndarray_like() # Flatten the nd-array (only copy if needed) and reinterpret as bytes diff --git a/src/zarr/codecs/pipeline.py b/src/zarr/codecs/pipeline.py index a7f47661b8..f75491d29f 100644 --- a/src/zarr/codecs/pipeline.py +++ b/src/zarr/codecs/pipeline.py @@ -315,9 +315,7 @@ def _merge_chunk_array( ) else: chunk_array = existing_chunk_array.copy() # make a writable copy - if chunk_selection == (): - chunk_value = value - elif is_scalar(value.as_ndarray_like(), chunk_spec.dtype): + if chunk_selection == () or is_scalar(value.as_ndarray_like(), chunk_spec.dtype): chunk_value = value else: chunk_value = value[out_selection] diff --git a/src/zarr/codecs/registry.py b/src/zarr/codecs/registry.py index 2f2b09499f..0956ce75d0 100644 --- a/src/zarr/codecs/registry.py +++ b/src/zarr/codecs/registry.py @@ -25,12 +25,11 @@ def register_codec(key: str, codec_cls: type[Codec]) -> None: def get_codec_class(key: str) -> type[Codec]: item = __codec_registry.get(key) - if item is None: - if key in __lazy_load_codecs: - # logger.debug("Auto loading codec '%s' from entrypoint", codec_id) - cls = __lazy_load_codecs[key].load() - register_codec(key, cls) - item = __codec_registry.get(key) + if item is None and key in __lazy_load_codecs: + # logger.debug("Auto loading codec '%s' from entrypoint", codec_id) + cls = __lazy_load_codecs[key].load() + register_codec(key, cls) + item = __codec_registry.get(key) if item: return item raise KeyError(key) diff --git a/src/zarr/v2/convenience.py b/src/zarr/v2/convenience.py index aa322bfb98..c066ee59e0 100644 --- a/src/zarr/v2/convenience.py +++ b/src/zarr/v2/convenience.py @@ -680,12 +680,11 @@ def copy_store( # decide what to do do_copy = True - if if_exists != "replace": - if dest_key in dest: - if if_exists == "raise": - raise CopyError("key {!r} exists in destination".format(dest_key)) - elif if_exists == "skip": - do_copy = False + if if_exists != "replace" and dest_key in dest: + if if_exists == "raise": + raise CopyError("key {!r} exists in destination".format(dest_key)) + elif if_exists == "skip": + do_copy = False # take action if do_copy: diff --git a/src/zarr/v2/util.py b/src/zarr/v2/util.py index 6233687086..7e3bd788ec 100644 --- a/src/zarr/v2/util.py +++ b/src/zarr/v2/util.py @@ -428,10 +428,9 @@ def __init__(self, obj, depth=0, level=None): self.level = level def get_children(self): - if hasattr(self.obj, "values"): - if self.level is None or self.depth < self.level: - depth = self.depth + 1 - return [TreeNode(o, depth=depth, level=self.level) for o in self.obj.values()] + if hasattr(self.obj, "values") and (self.level is None or self.depth < self.level): + depth = self.depth + 1 + return [TreeNode(o, depth=depth, level=self.level) for o in self.obj.values()] return [] def get_text(self): From d6201acb24d713415a83a5cbacfbb72ce3a45798 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 27 Jun 2024 20:06:56 +0200 Subject: [PATCH 0594/1078] Stop ignoring these ruff rules (#2001) All issues seem to have been fixed. --- pyproject.toml | 3 --- 1 file changed, 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d61eadea84..5b97feb80a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -183,10 +183,7 @@ extend-select = [ "RUF", ] ignore = [ - "RUF003", "RUF005", - "RUF012", - "RUF015", ] [tool.mypy] From f0dd375e2fec74004cba7adf713437f74ca120ce Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 27 Jun 2024 23:23:10 +0200 Subject: [PATCH 0595/1078] Unnecessary comprehension (#1997) Use `list(...)` instead. --- src/zarr/abc/codec.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/src/zarr/abc/codec.py b/src/zarr/abc/codec.py index 1d7106e25a..8ce9cc0043 100644 --- a/src/zarr/abc/codec.py +++ b/src/zarr/abc/codec.py @@ -187,10 +187,7 @@ async def decode_partial( Iterable[NDBuffer | None] """ return await concurrent_map( - [ - (byte_getter, selection, chunk_spec) - for byte_getter, selection, chunk_spec in batch_info - ], + list(batch_info), self._decode_partial_single, config.get("async.concurrency"), ) @@ -227,10 +224,7 @@ async def encode_partial( The chunk spec contains information about the chunk. """ await concurrent_map( - [ - (byte_setter, chunk_array, selection, chunk_spec) - for byte_setter, chunk_array, selection, chunk_spec in batch_info - ], + list(batch_info), self._encode_partial_single, config.get("async.concurrency"), ) @@ -402,7 +396,7 @@ async def batching_helper( batch_info: Iterable[tuple[CodecInput | None, ArraySpec]], ) -> list[CodecOutput | None]: return await concurrent_map( - [(chunk_array, chunk_spec) for chunk_array, chunk_spec in batch_info], + list(batch_info), noop_for_none(func), config.get("async.concurrency"), ) From 08b576360153b74012f629e1a5d50e793d058876 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 27 Jun 2024 23:23:37 +0200 Subject: [PATCH 0596/1078] Fix string interpolation (#1998) Fix typo in legacy string interpolation (`,` instead of `%`) by replacing with f-string. --- src/zarr/store/remote.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index 50a02dcbcd..e0b69cac50 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -69,7 +69,7 @@ def __init__( self.path = url.path.rstrip("/") self._fs = url.fs else: - raise ValueError("URL not understood, %s", url) + raise ValueError(f"URL not understood, {url}") self.allowed_exceptions = allowed_exceptions # test instantiate file system if not self._fs.async_impl: From e42a6be86d0153c9cf275c188eec91c9c3ed99b6 Mon Sep 17 00:00:00 2001 From: Hannes Spitz <44113112+brokkoli71@users.noreply.github.com> Date: Fri, 28 Jun 2024 13:10:23 +0200 Subject: [PATCH 0597/1078] Fix indexing with bools (#1968) * test z[selection] for orthogonal selection * include boolean indexing in is_pure_orthogonal_indexing * Revert "test z[selection] for orthogonal selection" This reverts commit 38578dd7172d533403a755795412bae9c8364955. * add test_indexing_equals_numpy * extend _test_get_mask_selection for square bracket notation * fix is_pure_fancy_indexing for mask selection * add test_orthogonal_bool_indexing_like_numpy_ix * fix for mypy * ruff format * fix is_pure_orthogonal_indexing * fix is_pure_orthogonal_indexing * replace deprecated ~ by not * restrict is_integer to not bool * correct typing Co-authored-by: Joe Hamman * correct typing * check if bool list has only bools * check if bool list has only bools * fix list unpacking in test for python3.10 * Apply spelling suggestions from code review Co-authored-by: Davis Bennett * fix mypy --------- Co-authored-by: Joe Hamman Co-authored-by: Davis Bennett --- src/zarr/array.py | 2 +- src/zarr/indexing.py | 55 ++++++++++++++++++++++++--------------- tests/v3/test_indexing.py | 55 +++++++++++++++++++++++++++++++++++++-- 3 files changed, 88 insertions(+), 24 deletions(-) diff --git a/src/zarr/array.py b/src/zarr/array.py index 4318050dd5..26a19e64ab 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -403,7 +403,7 @@ def attrs(self) -> dict[str, JSON]: @property def read_only(self) -> bool: - return bool(~self.store_path.store.writeable) + return bool(not self.store_path.store.writeable) @property def path(self) -> str: diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 29b6dd790a..ae4aa0681b 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -87,21 +87,23 @@ def ceildiv(a: float, b: float) -> int: def is_integer(x: Any) -> TypeGuard[int]: - """True if x is an integer (both pure Python or NumPy). + """True if x is an integer (both pure Python or NumPy).""" + return isinstance(x, numbers.Integral) and not is_bool(x) - Note that Python's bool is considered an integer too. - """ - return isinstance(x, numbers.Integral) + +def is_bool(x: Any) -> TypeGuard[bool | np.bool_]: + """True if x is a boolean (both pure Python or NumPy).""" + return type(x) in [bool, np.bool_] def is_integer_list(x: Any) -> TypeGuard[list[int]]: - """True if x is a list of integers. + """True if x is a list of integers.""" + return isinstance(x, list) and len(x) > 0 and all(is_integer(i) for i in x) - This function assumes ie *does not check* that all elements of the list - have the same type. Mixed type lists will result in other errors that will - bubble up anyway. - """ - return isinstance(x, list) and len(x) > 0 and is_integer(x[0]) + +def is_bool_list(x: Any) -> TypeGuard[list[bool | np.bool_]]: + """True if x is a list of boolean.""" + return isinstance(x, list) and len(x) > 0 and all(is_bool(i) for i in x) def is_integer_array(x: Any, ndim: int | None = None) -> TypeGuard[npt.NDArray[np.intp]]: @@ -118,6 +120,10 @@ def is_bool_array(x: Any, ndim: int | None = None) -> TypeGuard[npt.NDArray[np.b return t +def is_int_or_bool_iterable(x: Any) -> bool: + return is_integer_list(x) or is_integer_array(x) or is_bool_array(x) or is_bool_list(x) + + def is_scalar(value: Any, dtype: np.dtype[Any]) -> bool: if np.isscalar(value): return True @@ -129,7 +135,7 @@ def is_scalar(value: Any, dtype: np.dtype[Any]) -> bool: def is_pure_fancy_indexing(selection: Any, ndim: int) -> bool: - """Check whether a selection contains only scalars or integer array-likes. + """Check whether a selection contains only scalars or integer/bool array-likes. Parameters ---------- @@ -142,9 +148,14 @@ def is_pure_fancy_indexing(selection: Any, ndim: int) -> bool: True if the selection is a pure fancy indexing expression (ie not mixed with boolean or slices). """ + if is_bool_array(selection): + # is mask selection + return True + if ndim == 1: - if is_integer_list(selection) or is_integer_array(selection): + if is_integer_list(selection) or is_integer_array(selection) or is_bool_list(selection): return True + # if not, we go through the normal path below, because a 1-tuple # of integers is also allowed. no_slicing = ( @@ -166,19 +177,21 @@ def is_pure_orthogonal_indexing(selection: Selection, ndim: int) -> TypeGuard[Or if not ndim: return False - # Case 1: Selection is a single iterable of integers - if is_integer_list(selection) or is_integer_array(selection, ndim=1): + selection_normalized = (selection,) if not isinstance(selection, tuple) else selection + + # Case 1: Selection contains of iterable of integers or boolean + if len(selection_normalized) == ndim and all( + is_int_or_bool_iterable(s) for s in selection_normalized + ): return True - # Case two: selection contains either zero or one integer iterables. + # Case 2: selection contains either zero or one integer iterables. # All other selection elements are slices or integers return ( - isinstance(selection, tuple) - and len(selection) == ndim - and sum(is_integer_list(elem) or is_integer_array(elem) for elem in selection) <= 1 + len(selection_normalized) <= ndim + and sum(is_int_or_bool_iterable(s) for s in selection_normalized) <= 1 and all( - is_integer_list(elem) or is_integer_array(elem) or isinstance(elem, int | slice) - for elem in selection + is_int_or_bool_iterable(s) or isinstance(s, int | slice) for s in selection_normalized ) ) @@ -1023,7 +1036,7 @@ def __init__(self, selection: CoordinateSelection, shape: ChunkCoords, chunk_gri # flatten selection selection_broadcast = tuple(dim_sel.reshape(-1) for dim_sel in selection_broadcast) chunks_multi_index_broadcast = tuple( - dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast + [dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast] ) # ravel chunk indices diff --git a/tests/v3/test_indexing.py b/tests/v3/test_indexing.py index 13a7d953e1..c84c091089 100644 --- a/tests/v3/test_indexing.py +++ b/tests/v3/test_indexing.py @@ -204,7 +204,6 @@ def test_get_basic_selection_0d(store: StorePath, use_out: bool, value: Any, dty slice(50, 150, 10), ] - basic_selections_1d_bad = [ # only positive step supported slice(None, None, -1), @@ -305,7 +304,6 @@ def test_get_basic_selection_1d(store: StorePath): (Ellipsis, slice(None), slice(None)), ] - basic_selections_2d_bad = [ # bad stuff 2.3, @@ -1272,6 +1270,8 @@ def _test_get_mask_selection(a, z, selection): assert_array_equal(expect, actual) actual = z.vindex[selection] assert_array_equal(expect, actual) + actual = z[selection] + assert_array_equal(expect, actual) mask_selections_1d_bad = [ @@ -1344,6 +1344,9 @@ def _test_set_mask_selection(v, a, z, selection): z[:] = 0 z.vindex[selection] = v[selection] assert_array_equal(a, z[:]) + z[:] = 0 + z[selection] = v[selection] + assert_array_equal(a, z[:]) def test_set_mask_selection_1d(store: StorePath): @@ -1726,3 +1729,51 @@ def test_accessed_chunks(shape, chunks, ops): ) == 1 # Check that no other chunks were accessed assert len(delta_counts) == 0 + + +@pytest.mark.parametrize( + "selection", + [ + # basic selection + [...], + [1, ...], + [slice(None)], + [1, 3], + [[1, 2, 3], 9], + [np.arange(1000)], + [slice(5, 15)], + [slice(2, 4), 4], + [[1, 3]], + # mask selection + [np.tile([True, False], (1000, 5))], + [np.full((1000, 10), False)], + # coordinate selection + [[1, 2, 3, 4], [5, 6, 7, 8]], + [[100, 200, 300], [4, 5, 6]], + ], +) +def test_indexing_equals_numpy(store, selection): + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + # note: in python 3.10 a[*selection] is not valid unpacking syntax + expected = a[(*selection,)] + actual = z[(*selection,)] + assert_array_equal(expected, actual, err_msg=f"selection: {selection}") + + +@pytest.mark.parametrize( + "selection", + [ + [np.tile([True, False], 500), np.tile([True, False], 5)], + [np.full(1000, False), np.tile([True, False], 5)], + [np.full(1000, True), np.full(10, True)], + [np.full(1000, True), [True, False] * 5], + ], +) +def test_orthogonal_bool_indexing_like_numpy_ix(store, selection): + a = np.arange(10000, dtype=int).reshape(1000, 10) + z = zarr_array_from_numpy_array(store, a, chunk_shape=(300, 3)) + expected = a[np.ix_(*selection)] + # note: in python 3.10 z[*selection] is not valid unpacking syntax + actual = z[(*selection,)] + assert_array_equal(expected, actual, err_msg=f"{selection=}") From 0c5b0e6d17af44bda32b77b3986898fb3d85310e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:33:44 -0700 Subject: [PATCH 0598/1078] chore: update pre-commit hooks (#1989) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.9 → v0.5.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.9...v0.5.0) - [github.com/pre-commit/mirrors-mypy: v1.10.0 → v1.10.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.10.0...v1.10.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1ef226cd28..9db1b75f75 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 'v0.4.9' + rev: 'v0.5.0' hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -22,7 +22,7 @@ repos: hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 + rev: v1.10.1 hooks: - id: mypy files: src From 6c2ca632b5591e7fba5df2d258ef0c834db48393 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 1 Jul 2024 15:15:01 -0700 Subject: [PATCH 0599/1078] Bump NumPy to 2.0 (#1983) * Bump NumPy to 2.0 * update pyproject.toml and add python 3.12 * update pyproject.toml and add python 3.12 * revert 3.12 --- .github/workflows/test.yml | 2 +- pyproject.toml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a0e67ad79a..4a4249e8eb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -22,7 +22,7 @@ jobs: strategy: matrix: python-version: ['3.10', '3.11'] - numpy-version: ['1.24', '1.26', '2.0.0rc1'] + numpy-version: ['1.24', '1.26', '2.0'] dependency-set: ["minimal", "optional"] steps: diff --git a/pyproject.toml b/pyproject.toml index 5b97feb80a..f1be6725b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -125,13 +125,13 @@ extra-dependencies = [ features = ["extra"] [[tool.hatch.envs.test.matrix]] -python = ["3.10", "3.11"] -numpy = ["1.24", "1.26", "2.0.0rc1"] +python = ["3.10", "3.11", "3.12"] +numpy = ["1.24", "1.26", "2.0"] version = ["minimal"] [[tool.hatch.envs.test.matrix]] -python = ["3.10", "3.11"] -numpy = ["1.24", "1.26", "2.0.0rc1"] +python = ["3.10", "3.11", "3.12"] +numpy = ["1.24", "1.26", "2.0"] features = ["optional"] [tool.hatch.envs.test.scripts] From ace96f569490882e89c181387d27f20b0babd6a1 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 1 Jul 2024 20:29:06 -0700 Subject: [PATCH 0600/1078] build(ci): enable python 3.12 in github actions (#2005) --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 4a4249e8eb..48e579711b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.10', '3.11'] + python-version: ['3.10', '3.11', '3.12'] numpy-version: ['1.24', '1.26', '2.0'] dependency-set: ["minimal", "optional"] From 22e3fc5070f21c8131f3293723dc1cda85db8665 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 3 Jul 2024 07:05:55 -0700 Subject: [PATCH 0601/1078] doc: copy 3.0.0.alpha changelog into release.rst (#2007) * doc: copy 3.0.0.alpha changelog into release.rst * Fix formatting for * Fix pre-commit * Fix pre-commit * Fix pre-commit * Fix RTD build --------- Co-authored-by: Sanket Verma --- docs/release.rst | 257 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 257 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index 7e54035915..dbf390a800 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -38,6 +38,159 @@ Enhancements * Implement listing of the sub-arrays and sub-groups for a V3 ``Group``. By :user:`Davis Bennett ` :issue:`1726`. +* Bootstrap v3 branch with zarrita. + By :user:`Joe Hamman ` :issue:`1584`. + +* Extensible codecs for V3. + By :user:`Norman Rzepka ` :issue:`1588`. + +* Don't import from tests. + By :user:`Davis Bennett ` :issue:`1601`. + +* Listable V3 Stores. + By :user:`Joe Hamman ` :issue:`1634`. + +* Codecs without array metadata. + By :user:`Norman Rzepka ` :issue:`1632`. + +* fix sync group class methods. + By :user:`Joe Hamman ` :issue:`1652`. + +* implement eq for LocalStore. + By :user:`Charoula Kyriakides ` :issue:`1792`. + +* V3 reorg. + By :user:`Joe Hamman ` :issue:`1809`. + +* [v3] Sync with futures. + By :user:`Davis Bennett ` :issue:`1804`. + +* implement group.members. + By :user:`Davis Bennett ` :issue:`1726`. + +* Remove implicit groups. + By :user:`Joe Hamman ` :issue:`1827`. + +* feature(store): ``list_*`` -> AsyncGenerators. + By :user:`Joe Hamman ` :issue:`1844`. + +* Test codec entrypoints. + By :user:`Norman Rzepka ` :issue:`1835`. + +* Remove extra v3 sync module. + By :user:`Max Jones ` :issue:`1856`. + +* Use donfig for V3 configuration. + By :user:`Max Jones ` :issue:`1655`. + +* groundwork for V3 group tests. + By :user:`Davis Bennett ` :issue:`1743`. + +* [v3] First step to generalizes ndarray and bytes. + By :user:`Mads R. B. Kristensen ` :issue:`1826`. + +* Reworked codec pipelines. + By :user:`Norman Rzepka ` :issue:`1670`. + +* Followup on codecs. + By :user:`Norman Rzepka ` :issue:`1889`. + +* Protocols for Buffer and NDBuffer. + By :user:`Mads R. B. Kristensen ` :issue:`1899`. + +* [V3] Expand store tests. + By :user:`Davis Bennett ` :issue:`1900`. + +* [v3] Feature: Store open mode. + By :user:`Joe Hamman ` :issue:`1911`. + +* fix(types): Group.info -> NotImplementedError. + By :user:`Joe Hamman ` :issue:`1936`. + +* feature(typing): add py.typed file to package root. + By :user:`Joe Hamman ` :issue:`1935`. + +* Support all indexing variants. + By :user:`Norman Rzepka ` :issue:`1917`. + +* Feature: group and array name properties. + By :user:`Joe Hamman ` :issue:`1940`. + +* implement .chunks on v3 arrays. + By :user:`Ryan Abernathey ` :issue:`1929`. + +* Fixes bug in transpose. + By :user:`Norman Rzepka ` :issue:`1949`. + +* Buffer Prototype Argument. + By :user:`Mads R. B. Kristensen ` :issue:`1910`. + +* Feature: Top level V3 API. + By :user:`Joe Hamman ` :issue:`1884`. + +* Basic working FsspecStore. + By :user:`Martin Durant `; :issue:`1785`. + +Typing +~~~~~~ + +* Resolve Mypy errors in v3 branch. + By :user:`Daniel Jahn ` :issue:`1692`. + +* Allow dmypy to be run on v3 branch. + By :user:`David Stansby ` :issue:`1780`. + +* Remove unused typing ignore comments. + By :user:`David Stansby ` :issue:`1781`. + +* Check untyped defs on v3. + By :user:`David Stansby ` :issue:`1784`. + +* [v3] Enable some more strict mypy options. + By :user:`David Stansby ` :issue:`1793`. + +* [v3] Disallow generic Any typing. + By :user:`David Stansby ` :issue:`1794`. + +* Disallow incomplete type definitions. + By :user:`David Stansby ` :issue:`1814`. + +* Disallow untyped calls. + By :user:`David Stansby ` :issue:`1811`. + +* Fix some untyped calls. + By :user:`David Stansby ` :issue:`1865`. + +* Disallow untyped defs. + By :user:`David Stansby ` :issue:`1834`. + +* Add more typing to zarr.group. + By :user:`David Stansby ` :issue:`1870`. + +* Fix any generics in zarr.array. + By :user:`David Stansby ` :issue:`1861`. + +* Remove some unused mypy overrides. + By :user:`David Stansby ` :issue:`1894`. + +* Finish typing zarr.metadata. + By :user:`David Stansby ` :issue:`1880`. + +* Disallow implicit re-exports. + By :user:`David Stansby ` :issue:`1908`. + +* Make typing strict. + By :user:`David Stansby ` :issue:`1879`. + +* Enable extra mypy error codes. + By :user:`David Stansby ` :issue:`1909`. + +* Enable warn_unreachable for mypy. + By :user:`David Stansby ` :issue:`1937`. + +* Fix final typing errors. + By :user:`David Stansby ` :issue:`1939`. + Maintenance ~~~~~~~~~~~ @@ -45,6 +198,110 @@ Maintenance The dependency relationship is now reversed: the test suite imports this class from ``zarr-python``. By :user:`Davis Bennett ` :issue:`1601`. +* [V3] Update minimum supported Python and Numpy versions. + By :user:`Joe Hamman ` :issue:`1638` + +* use src layout and use hatch for packaging. + By :user:`Davis Bennett ` :issue:`1592`. + +* temporarily disable mypy in v3 directory. + By :user:`Joe Hamman ` :issue:`1649`. + +* create hatch test env. + By :user:`Ryan Abernathey ` :issue:`1650`. + +* removed unused environments and workflows. + By :user:`Ryan Abernathey ` :issue:`1651`. + +* Add env variables to sprint setup instructions. + By :user:`Max Jones ` :issue:`1654`. + +* Add test matrix for V3. + By :user:`Max Jones ` :issue:`1656`. + +* Remove attrs. + By :user:`Davis Bennett ` :issue:`1660`. + +* Specify hatch envs using GitHub actions matrix for v3 tests. + By :user:`Max Jones ` :issue:`1728`. + +* black -> ruff format + cleanup. + By :user:`Saransh Chopra ` :issue:`1639`. + +* Remove old v3. + By :user:`Davis Bennett ` :issue:`1742`. + +* V3 update pre commit. + By :user:`Joe Hamman ` :issue:`1808`. + +* remove windows testing on v3 branch. + By :user:`Joe Hamman ` :issue:`1817`. + +* fix: add mypy to test dependencies. + By :user:`Davis Bennett ` :issue:`1789`. + +* chore(ci): add numpy 2 release candidate to test matrix. + By :user:`Joe Hamman ` :issue:`1828`. + +* fix dependencies. + By :user:`Norman Rzepka ` :issue:`1840`. + +* Add pytest to mypy dependencies. + By :user:`David Stansby ` :issue:`1846`. + +* chore(pre-commit): update pre-commit versions and remove attrs dep mypy section. + By :user:`Joe Hamman ` :issue:`1848`. + +* Enable some ruff rules (RUF) and fix issues. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1869`. + +* Configure Ruff to apply flake8-bugbear/isort/pyupgrade. + By :user:`Norman Rzepka ` :issue:`1890`. + +* chore(ci): remove mypy from test action in favor of pre-commit action. + By :user:`Joe Hamman ` :issue:`1887`. + +* Enable ruff/flake8-raise rules (RSE) and fix issues. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1872`. + +* Apply assorted ruff/refurb rules (FURB). + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1873`. + +* Enable ruff/flake8-implicit-str-concat rules (ISC) and fix issues. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1868`. + +* Add numpy to mypy pre-commit check env. + By :user:`David Stansby ` :issue:`1893`. + +* remove fixture files from src. + By :user:`Davis Bennett ` :issue:`1897`. + +* Fix list of packages in mypy pre-commit environment. + By :user:`David Stansby ` :issue:`1907`. + +* Run sphinx directly on readthedocs. + By :user:`David Stansby ` :issue:`1919`. + +* Apply preview ruff rules. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1942`. + +* Enable and apply ruff rule RUF009. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1941`. + +Documentation +~~~~~~~~~~~~~ + +* Specify docs hatch env for v3 branch. + By :user:`Max Jones ` :issue:`1655`. + +* Development installation/contributing docs updates. + By :user:`Alden Keefe Sampson ` :issue:`1643`. + +* chore: update project settings per scientific python repo-review. + By :user:`Joe Hamman ` :issue:`1863`. + +* doc: update release notes for 3.0.0.alpha. + By :user:`Joe Hamman ` :issue:`1959`. .. _release_2.18.2: From e84057a7e741b77e80ec4604e6d7bf1d4dd96a9a Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Fri, 5 Jul 2024 10:38:12 +0200 Subject: [PATCH 0602/1078] make shardingcodec pickleable (#2011) * use tmpdir for test * type annotations * refactor morton decode and remove destructuring in call to max * parametrize sharding codec test by data shape * refactor codec tests * add test for pickling sharding codec, and make it pass * Revert "use tmpdir for test" This reverts commit 6ad2ca61b78fd1ecfc10d7fc80ae5055ed1a9d8b. * move fixtures into conftest.py * Update tests/v3/test_codecs/test_endian.py --- src/zarr/codecs/sharding.py | 16 + src/zarr/indexing.py | 35 +- tests/v3/conftest.py | 41 +- tests/v3/test_codecs.py | 1049 ------------------------ tests/v3/test_codecs/__init__.py | 0 tests/v3/test_codecs/test_blosc.py | 57 ++ tests/v3/test_codecs/test_codecs.py | 486 +++++++++++ tests/v3/test_codecs/test_endian.py | 87 ++ tests/v3/test_codecs/test_gzip.py | 24 + tests/v3/test_codecs/test_sharding.py | 324 ++++++++ tests/v3/test_codecs/test_transpose.py | 121 +++ tests/v3/test_codecs/test_zstd.py | 25 + 12 files changed, 1189 insertions(+), 1076 deletions(-) delete mode 100644 tests/v3/test_codecs.py create mode 100644 tests/v3/test_codecs/__init__.py create mode 100644 tests/v3/test_codecs/test_blosc.py create mode 100644 tests/v3/test_codecs/test_codecs.py create mode 100644 tests/v3/test_codecs/test_endian.py create mode 100644 tests/v3/test_codecs/test_gzip.py create mode 100644 tests/v3/test_codecs/test_sharding.py create mode 100644 tests/v3/test_codecs/test_transpose.py create mode 100644 tests/v3/test_codecs/test_zstd.py diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index def95b206d..e3ef664b94 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -324,6 +324,22 @@ def __init__( object.__setattr__(self, "_get_index_chunk_spec", lru_cache()(self._get_index_chunk_spec)) object.__setattr__(self, "_get_chunks_per_shard", lru_cache()(self._get_chunks_per_shard)) + # todo: typedict return type + def __getstate__(self) -> dict[str, Any]: + return self.to_dict() + + def __setstate__(self, state: dict[str, Any]) -> None: + config = state["configuration"] + object.__setattr__(self, "chunk_shape", parse_shapelike(config["chunk_shape"])) + object.__setattr__(self, "codecs", parse_codecs(config["codecs"])) + object.__setattr__(self, "index_codecs", parse_codecs(config["index_codecs"])) + object.__setattr__(self, "index_location", parse_index_location(config["index_location"])) + + # Use instance-local lru_cache to avoid memory leaks + object.__setattr__(self, "_get_chunk_spec", lru_cache()(self._get_chunk_spec)) + object.__setattr__(self, "_get_index_chunk_spec", lru_cache()(self._get_index_chunk_spec)) + object.__setattr__(self, "_get_chunks_per_shard", lru_cache()(self._get_chunks_per_shard)) + @classmethod def from_dict(cls, data: dict[str, JSON]) -> Self: _, configuration_parsed = parse_named_configuration(data, "sharding_indexed") diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index ae4aa0681b..1f483e1c15 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -1220,24 +1220,25 @@ def make_slice_selection(selection: Any) -> list[slice]: return ls -def morton_order_iter(chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: - def decode_morton(z: int, chunk_shape: ChunkCoords) -> ChunkCoords: - # Inspired by compressed morton code as implemented in Neuroglancer - # https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#compressed-morton-code - bits = tuple(math.ceil(math.log2(c)) for c in chunk_shape) - max_coords_bits = max(*bits) - input_bit = 0 - input_value = z - out = [0 for _ in range(len(chunk_shape))] - - for coord_bit in range(max_coords_bits): - for dim in range(len(chunk_shape)): - if coord_bit < bits[dim]: - bit = (input_value >> input_bit) & 1 - out[dim] |= bit << coord_bit - input_bit += 1 - return tuple(out) +def decode_morton(z: int, chunk_shape: ChunkCoords) -> ChunkCoords: + # Inspired by compressed morton code as implemented in Neuroglancer + # https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#compressed-morton-code + bits = tuple(math.ceil(math.log2(c)) for c in chunk_shape) + max_coords_bits = max(bits) + input_bit = 0 + input_value = z + out = [0] * len(chunk_shape) + + for coord_bit in range(max_coords_bits): + for dim in range(len(chunk_shape)): + if coord_bit < bits[dim]: + bit = (input_value >> input_bit) & 1 + out[dim] |= bit << coord_bit + input_bit += 1 + return tuple(out) + +def morton_order_iter(chunk_shape: ChunkCoords) -> Iterator[ChunkCoords]: for i in range(product(chunk_shape)): yield decode_morton(i, chunk_shape) diff --git a/tests/v3/conftest.py b/tests/v3/conftest.py index 6b58cce412..8b75d9f2f8 100644 --- a/tests/v3/conftest.py +++ b/tests/v3/conftest.py @@ -4,7 +4,10 @@ from types import ModuleType from typing import TYPE_CHECKING -from zarr.common import ZarrFormat +from _pytest.compat import LEGACY_PATH + +from zarr.abc.store import Store +from zarr.common import ChunkCoords, MemoryOrder, ZarrFormat from zarr.group import AsyncGroup if TYPE_CHECKING: @@ -12,6 +15,7 @@ import pathlib from dataclasses import dataclass, field +import numpy as np import pytest from zarr.store import LocalStore, MemoryStore, StorePath @@ -26,40 +30,40 @@ def parse_store( if store == "memory": return MemoryStore(mode="w") if store == "remote": - return RemoteStore(mode="w") + return RemoteStore(url=path, mode="w") raise AssertionError @pytest.fixture(params=[str, pathlib.Path]) -def path_type(request): +def path_type(request: pytest.FixtureRequest) -> Any: return request.param # todo: harmonize this with local_store fixture @pytest.fixture -def store_path(tmpdir): +def store_path(tmpdir: LEGACY_PATH) -> StorePath: store = LocalStore(str(tmpdir), mode="w") p = StorePath(store) return p @pytest.fixture(scope="function") -def local_store(tmpdir): +def local_store(tmpdir: LEGACY_PATH) -> LocalStore: return LocalStore(str(tmpdir), mode="w") @pytest.fixture(scope="function") -def remote_store(): - return RemoteStore(mode="w") +def remote_store(url: str) -> RemoteStore: + return RemoteStore(url, mode="w") @pytest.fixture(scope="function") -def memory_store(): +def memory_store() -> MemoryStore: return MemoryStore(mode="w") @pytest.fixture(scope="function") -def store(request: str, tmpdir): +def store(request: pytest.FixtureRequest, tmpdir: LEGACY_PATH) -> Store: param = request.param return parse_store(param, str(tmpdir)) @@ -72,7 +76,7 @@ class AsyncGroupRequest: @pytest.fixture(scope="function") -async def async_group(request: pytest.FixtureRequest, tmpdir) -> AsyncGroup: +async def async_group(request: pytest.FixtureRequest, tmpdir: LEGACY_PATH) -> AsyncGroup: param: AsyncGroupRequest = request.param store = parse_store(param.store, str(tmpdir)) @@ -90,3 +94,20 @@ def xp(request: pytest.FixtureRequest) -> Iterator[ModuleType]: """Fixture to parametrize over numpy-like libraries""" yield pytest.importorskip(request.param) + + +@dataclass +class ArrayRequest: + shape: ChunkCoords + dtype: str + order: MemoryOrder + + +@pytest.fixture +def array_fixture(request: pytest.FixtureRequest) -> np.ndarray: + array_request: ArrayRequest = request.param + return ( + np.arange(np.prod(array_request.shape)) + .reshape(array_request.shape, order=array_request.order) + .astype(array_request.dtype) + ) diff --git a/tests/v3/test_codecs.py b/tests/v3/test_codecs.py deleted file mode 100644 index 7cb0d0f804..0000000000 --- a/tests/v3/test_codecs.py +++ /dev/null @@ -1,1049 +0,0 @@ -from __future__ import annotations - -import json -from collections.abc import Iterator -from dataclasses import dataclass -from typing import Literal - -import numpy as np -import pytest - -import zarr.v2 -from zarr.abc.codec import Codec -from zarr.abc.store import Store -from zarr.array import Array, AsyncArray -from zarr.codecs import ( - BloscCodec, - BytesCodec, - GzipCodec, - ShardingCodec, - ShardingCodecIndexLocation, - TransposeCodec, - ZstdCodec, -) -from zarr.config import config -from zarr.indexing import Selection, morton_order_iter -from zarr.store import MemoryStore, StorePath -from zarr.testing.utils import assert_bytes_equal - - -@dataclass(frozen=True) -class _AsyncArrayProxy: - array: AsyncArray - - def __getitem__(self, selection: Selection) -> _AsyncArraySelectionProxy: - return _AsyncArraySelectionProxy(self.array, selection) - - -@dataclass(frozen=True) -class _AsyncArraySelectionProxy: - array: AsyncArray - selection: Selection - - async def get(self) -> np.ndarray: - return await self.array.getitem(self.selection) - - async def set(self, value: np.ndarray): - return await self.array.setitem(self.selection, value) - - -@pytest.fixture -def store() -> Iterator[Store]: - yield StorePath(MemoryStore(mode="w")) - - -@pytest.fixture -def sample_data() -> np.ndarray: - return np.arange(0, 128 * 128 * 128, dtype="uint16").reshape((128, 128, 128), order="F") - - -def order_from_dim(order: Literal["F", "C"], ndim: int) -> tuple[int, ...]: - if order == "F": - return tuple(ndim - x - 1 for x in range(ndim)) - else: - return tuple(range(ndim)) - - -@pytest.mark.parametrize("index_location", ["start", "end"]) -def test_sharding( - store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation -): - a = Array.create( - store / "sample", - shape=sample_data.shape, - chunk_shape=(64, 64, 64), - dtype=sample_data.dtype, - fill_value=0, - codecs=[ - ShardingCodec( - chunk_shape=(32, 32, 32), - codecs=[ - TransposeCodec(order=order_from_dim("F", sample_data.ndim)), - BytesCodec(), - BloscCodec(cname="lz4"), - ], - index_location=index_location, - ) - ], - ) - - a[:, :, :] = sample_data - - read_data = a[0 : sample_data.shape[0], 0 : sample_data.shape[1], 0 : sample_data.shape[2]] - assert sample_data.shape == read_data.shape - assert np.array_equal(sample_data, read_data) - - -@pytest.mark.parametrize("index_location", ["start", "end"]) -def test_sharding_partial( - store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation -): - a = Array.create( - store / "sample", - shape=tuple(a + 10 for a in sample_data.shape), - chunk_shape=(64, 64, 64), - dtype=sample_data.dtype, - fill_value=0, - codecs=[ - ShardingCodec( - chunk_shape=(32, 32, 32), - codecs=[ - TransposeCodec(order=order_from_dim("F", sample_data.ndim)), - BytesCodec(), - BloscCodec(cname="lz4"), - ], - index_location=index_location, - ) - ], - ) - - a[10:, 10:, 10:] = sample_data - - read_data = a[0:10, 0:10, 0:10] - assert np.all(read_data == 0) - - read_data = a[10:, 10:, 10:] - assert sample_data.shape == read_data.shape - assert np.array_equal(sample_data, read_data) - - -@pytest.mark.parametrize("index_location", ["start", "end"]) -def test_sharding_partial_read( - store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation -): - a = Array.create( - store / "sample", - shape=tuple(a + 10 for a in sample_data.shape), - chunk_shape=(64, 64, 64), - dtype=sample_data.dtype, - fill_value=1, - codecs=[ - ShardingCodec( - chunk_shape=(32, 32, 32), - codecs=[ - TransposeCodec(order=order_from_dim("F", sample_data.ndim)), - BytesCodec(), - BloscCodec(cname="lz4"), - ], - index_location=index_location, - ) - ], - ) - - read_data = a[0:10, 0:10, 0:10] - assert np.all(read_data == 1) - - -@pytest.mark.parametrize("index_location", ["start", "end"]) -def test_sharding_partial_overwrite( - store: Store, sample_data: np.ndarray, index_location: ShardingCodecIndexLocation -): - data = sample_data[:10, :10, :10] - - a = Array.create( - store / "sample", - shape=tuple(a + 10 for a in data.shape), - chunk_shape=(64, 64, 64), - dtype=data.dtype, - fill_value=1, - codecs=[ - ShardingCodec( - chunk_shape=(32, 32, 32), - codecs=[ - TransposeCodec(order=order_from_dim("F", data.ndim)), - BytesCodec(), - BloscCodec(cname="lz4"), - ], - index_location=index_location, - ) - ], - ) - - a[:10, :10, :10] = data - - read_data = a[0:10, 0:10, 0:10] - assert np.array_equal(data, read_data) - - data = data + 10 - a[:10, :10, :10] = data - read_data = a[0:10, 0:10, 0:10] - assert np.array_equal(data, read_data) - - -@pytest.mark.parametrize( - "outer_index_location", - ["start", "end"], -) -@pytest.mark.parametrize( - "inner_index_location", - ["start", "end"], -) -def test_nested_sharding( - store: Store, - sample_data: np.ndarray, - outer_index_location: ShardingCodecIndexLocation, - inner_index_location: ShardingCodecIndexLocation, -): - a = Array.create( - store / "l4_sample" / "color" / "1", - shape=sample_data.shape, - chunk_shape=(64, 64, 64), - dtype=sample_data.dtype, - fill_value=0, - codecs=[ - ShardingCodec( - chunk_shape=(32, 32, 32), - codecs=[ - ShardingCodec(chunk_shape=(16, 16, 16), index_location=inner_index_location) - ], - index_location=outer_index_location, - ) - ], - ) - - a[:, :, :] = sample_data - - read_data = a[0 : sample_data.shape[0], 0 : sample_data.shape[1], 0 : sample_data.shape[2]] - assert sample_data.shape == read_data.shape - assert np.array_equal(sample_data, read_data) - - -@pytest.mark.parametrize("input_order", ["F", "C"]) -@pytest.mark.parametrize("store_order", ["F", "C"]) -@pytest.mark.parametrize("runtime_write_order", ["F", "C"]) -@pytest.mark.parametrize("runtime_read_order", ["F", "C"]) -@pytest.mark.parametrize("with_sharding", [True, False]) -async def test_order( - store: Store, - input_order: Literal["F", "C"], - store_order: Literal["F", "C"], - runtime_write_order: Literal["F", "C"], - runtime_read_order: Literal["F", "C"], - with_sharding: bool, -): - data = np.arange(0, 256, dtype="uint16").reshape((32, 8), order=input_order) - - codecs_: list[Codec] = ( - [ - ShardingCodec( - chunk_shape=(16, 8), - codecs=[TransposeCodec(order=order_from_dim(store_order, data.ndim)), BytesCodec()], - ) - ] - if with_sharding - else [TransposeCodec(order=order_from_dim(store_order, data.ndim)), BytesCodec()] - ) - - with config.set({"array.order": runtime_write_order}): - a = await AsyncArray.create( - store / "order", - shape=data.shape, - chunk_shape=(32, 8), - dtype=data.dtype, - fill_value=0, - chunk_key_encoding=("v2", "."), - codecs=codecs_, - ) - - await _AsyncArrayProxy(a)[:, :].set(data) - read_data = await _AsyncArrayProxy(a)[:, :].get() - assert np.array_equal(data, read_data) - - with config.set({"array.order": runtime_read_order}): - a = await AsyncArray.open( - store / "order", - ) - read_data = await _AsyncArrayProxy(a)[:, :].get() - assert np.array_equal(data, read_data) - - if runtime_read_order == "F": - assert read_data.flags["F_CONTIGUOUS"] - assert not read_data.flags["C_CONTIGUOUS"] - else: - assert not read_data.flags["F_CONTIGUOUS"] - assert read_data.flags["C_CONTIGUOUS"] - - if not with_sharding: - # Compare with zarr-python - z = zarr.v2.create( - shape=data.shape, - chunks=(32, 8), - dtype="u2", "u2", " None: + typesize = np.dtype(dtype).itemsize + path = "blosc_evolve" + spath = StorePath(store, path) + await AsyncArray.create( + spath, + shape=(16, 16), + chunk_shape=(16, 16), + dtype=dtype, + fill_value=0, + codecs=[BytesCodec(), BloscCodec()], + ) + + zarr_json = json.loads( + (await store.get(f"{path}/zarr.json", prototype=default_buffer_prototype)).to_bytes() + ) + blosc_configuration_json = zarr_json["codecs"][1]["configuration"] + assert blosc_configuration_json["typesize"] == typesize + if typesize == 1: + assert blosc_configuration_json["shuffle"] == "bitshuffle" + else: + assert blosc_configuration_json["shuffle"] == "shuffle" + + path2 = "blosc_evolve_sharding" + spath2 = StorePath(store, path2) + await AsyncArray.create( + spath2, + shape=(16, 16), + chunk_shape=(16, 16), + dtype=dtype, + fill_value=0, + codecs=[ShardingCodec(chunk_shape=(16, 16), codecs=[BytesCodec(), BloscCodec()])], + ) + + zarr_json = json.loads( + (await store.get(f"{path2}/zarr.json", prototype=default_buffer_prototype)).to_bytes() + ) + blosc_configuration_json = zarr_json["codecs"][0]["configuration"]["codecs"][1]["configuration"] + assert blosc_configuration_json["typesize"] == typesize + if typesize == 1: + assert blosc_configuration_json["shuffle"] == "bitshuffle" + else: + assert blosc_configuration_json["shuffle"] == "shuffle" diff --git a/tests/v3/test_codecs/test_codecs.py b/tests/v3/test_codecs/test_codecs.py new file mode 100644 index 0000000000..1104805d4b --- /dev/null +++ b/tests/v3/test_codecs/test_codecs.py @@ -0,0 +1,486 @@ +from __future__ import annotations + +import json +from dataclasses import dataclass + +import numpy as np +import pytest + +import zarr.v2 +from zarr.abc.codec import Codec +from zarr.abc.store import Store +from zarr.array import Array, AsyncArray +from zarr.buffer import default_buffer_prototype +from zarr.codecs import ( + BytesCodec, + GzipCodec, + ShardingCodec, + TransposeCodec, +) +from zarr.common import MemoryOrder +from zarr.config import config +from zarr.indexing import Selection, morton_order_iter +from zarr.store import StorePath +from zarr.testing.utils import assert_bytes_equal + + +@dataclass(frozen=True) +class _AsyncArrayProxy: + array: AsyncArray + + def __getitem__(self, selection: Selection) -> _AsyncArraySelectionProxy: + return _AsyncArraySelectionProxy(self.array, selection) + + +@dataclass(frozen=True) +class _AsyncArraySelectionProxy: + array: AsyncArray + selection: Selection + + async def get(self) -> np.ndarray: + return await self.array.getitem(self.selection) + + async def set(self, value: np.ndarray) -> None: + return await self.array.setitem(self.selection, value) + + +def order_from_dim(order: MemoryOrder, ndim: int) -> tuple[int, ...]: + if order == "F": + return tuple(ndim - x - 1 for x in range(ndim)) + else: + return tuple(range(ndim)) + + +def test_sharding_pickle() -> None: + """ + Test that sharding codecs can be pickled + """ + pass + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("input_order", ["F", "C"]) +@pytest.mark.parametrize("store_order", ["F", "C"]) +@pytest.mark.parametrize("runtime_write_order", ["F", "C"]) +@pytest.mark.parametrize("runtime_read_order", ["F", "C"]) +@pytest.mark.parametrize("with_sharding", [True, False]) +async def test_order( + store: Store, + input_order: MemoryOrder, + store_order: MemoryOrder, + runtime_write_order: MemoryOrder, + runtime_read_order: MemoryOrder, + with_sharding: bool, +) -> None: + data = np.arange(0, 256, dtype="uint16").reshape((32, 8), order=input_order) + path = "order" + spath = StorePath(store, path=path) + codecs_: list[Codec] = ( + [ + ShardingCodec( + chunk_shape=(16, 8), + codecs=[TransposeCodec(order=order_from_dim(store_order, data.ndim)), BytesCodec()], + ) + ] + if with_sharding + else [TransposeCodec(order=order_from_dim(store_order, data.ndim)), BytesCodec()] + ) + + with config.set({"array.order": runtime_write_order}): + a = await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(32, 8), + dtype=data.dtype, + fill_value=0, + chunk_key_encoding=("v2", "."), + codecs=codecs_, + ) + + await _AsyncArrayProxy(a)[:, :].set(data) + read_data = await _AsyncArrayProxy(a)[:, :].get() + assert np.array_equal(data, read_data) + + with config.set({"array.order": runtime_read_order}): + a = await AsyncArray.open( + spath, + ) + read_data = await _AsyncArrayProxy(a)[:, :].get() + assert np.array_equal(data, read_data) + + if runtime_read_order == "F": + assert read_data.flags["F_CONTIGUOUS"] + assert not read_data.flags["C_CONTIGUOUS"] + else: + assert not read_data.flags["F_CONTIGUOUS"] + assert read_data.flags["C_CONTIGUOUS"] + + if not with_sharding: + # Compare with zarr-python + z = zarr.v2.create( + shape=data.shape, + chunks=(32, 8), + dtype=" None: + data = np.arange(0, 256, dtype="uint16").reshape((16, 16), order=input_order) + path = "order_implicit" + spath = StorePath(store, path) + codecs_: list[Codec] | None = [ShardingCodec(chunk_shape=(8, 8))] if with_sharding else None + + with config.set({"array.order": runtime_write_order}): + a = Array.create( + spath, + shape=data.shape, + chunk_shape=(16, 16), + dtype=data.dtype, + fill_value=0, + codecs=codecs_, + ) + + a[:, :] = data + + with config.set({"array.order": runtime_read_order}): + a = Array.open(spath) + read_data = a[:, :] + assert np.array_equal(data, read_data) + + if runtime_read_order == "F": + assert read_data.flags["F_CONTIGUOUS"] + assert not read_data.flags["C_CONTIGUOUS"] + else: + assert not read_data.flags["F_CONTIGUOUS"] + assert read_data.flags["C_CONTIGUOUS"] + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_open(store: Store) -> None: + spath = StorePath(store) + a = Array.create( + spath, + shape=(16, 16), + chunk_shape=(16, 16), + dtype="int32", + fill_value=0, + ) + b = Array.open(spath) + assert a.metadata == b.metadata + + +def test_morton() -> None: + assert list(morton_order_iter((2, 2))) == [(0, 0), (1, 0), (0, 1), (1, 1)] + assert list(morton_order_iter((2, 2, 2))) == [ + (0, 0, 0), + (1, 0, 0), + (0, 1, 0), + (1, 1, 0), + (0, 0, 1), + (1, 0, 1), + (0, 1, 1), + (1, 1, 1), + ] + assert list(morton_order_iter((2, 2, 2, 2))) == [ + (0, 0, 0, 0), + (1, 0, 0, 0), + (0, 1, 0, 0), + (1, 1, 0, 0), + (0, 0, 1, 0), + (1, 0, 1, 0), + (0, 1, 1, 0), + (1, 1, 1, 0), + (0, 0, 0, 1), + (1, 0, 0, 1), + (0, 1, 0, 1), + (1, 1, 0, 1), + (0, 0, 1, 1), + (1, 0, 1, 1), + (0, 1, 1, 1), + (1, 1, 1, 1), + ] + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_write_partial_chunks(store: Store) -> None: + data = np.arange(0, 256, dtype="uint16").reshape((16, 16)) + spath = StorePath(store) + a = Array.create( + spath, + shape=data.shape, + chunk_shape=(20, 20), + dtype=data.dtype, + fill_value=1, + ) + a[0:16, 0:16] = data + assert np.array_equal(a[0:16, 0:16], data) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +async def test_delete_empty_chunks(store: Store) -> None: + data = np.ones((16, 16)) + path = "delete_empty_chunks" + spath = StorePath(store, path) + a = await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(32, 32), + dtype=data.dtype, + fill_value=1, + ) + await _AsyncArrayProxy(a)[:16, :16].set(np.zeros((16, 16))) + await _AsyncArrayProxy(a)[:16, :16].set(data) + assert np.array_equal(await _AsyncArrayProxy(a)[:16, :16].get(), data) + assert await store.get(f"{path}/c0/0", prototype=default_buffer_prototype) is None + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +async def test_zarr_compat(store: Store) -> None: + data = np.zeros((16, 18), dtype="uint16") + path = "zarr_compat3" + spath = StorePath(store, path) + a = await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(10, 10), + dtype=data.dtype, + chunk_key_encoding=("v2", "."), + fill_value=1, + ) + + z2 = zarr.v2.create( + shape=data.shape, + chunks=(10, 10), + dtype=data.dtype, + compressor=None, + fill_value=1, + ) + + await _AsyncArrayProxy(a)[:16, :18].set(data) + z2[:16, :18] = data + assert np.array_equal(data, await _AsyncArrayProxy(a)[:16, :18].get()) + assert np.array_equal(data, z2[:16, :18]) + + assert_bytes_equal( + z2._store["0.0"], await store.get(f"{path}/0.0", prototype=default_buffer_prototype) + ) + assert_bytes_equal( + z2._store["0.1"], await store.get(f"{path}/0.1", prototype=default_buffer_prototype) + ) + assert_bytes_equal( + z2._store["1.0"], await store.get(f"{path}/1.0", prototype=default_buffer_prototype) + ) + assert_bytes_equal( + z2._store["1.1"], await store.get(f"{path}/1.1", prototype=default_buffer_prototype) + ) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +async def test_zarr_compat_F(store: Store) -> None: + data = np.zeros((16, 18), dtype="uint16", order="F") + path = "zarr_compatF3" + spath = StorePath(store, path) + a = await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(10, 10), + dtype=data.dtype, + chunk_key_encoding=("v2", "."), + fill_value=1, + codecs=[TransposeCodec(order=order_from_dim("F", data.ndim)), BytesCodec()], + ) + + z2 = zarr.v2.create( + shape=data.shape, + chunks=(10, 10), + dtype=data.dtype, + compressor=None, + order="F", + fill_value=1, + ) + + await _AsyncArrayProxy(a)[:16, :18].set(data) + z2[:16, :18] = data + assert np.array_equal(data, await _AsyncArrayProxy(a)[:16, :18].get()) + assert np.array_equal(data, z2[:16, :18]) + + assert_bytes_equal( + z2._store["0.0"], await store.get(f"{path}/0.0", prototype=default_buffer_prototype) + ) + assert_bytes_equal( + z2._store["0.1"], await store.get(f"{path}/0.1", prototype=default_buffer_prototype) + ) + assert_bytes_equal( + z2._store["1.0"], await store.get(f"{path}/1.0", prototype=default_buffer_prototype) + ) + assert_bytes_equal( + z2._store["1.1"], await store.get(f"{path}/1.1", prototype=default_buffer_prototype) + ) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +async def test_dimension_names(store: Store) -> None: + data = np.arange(0, 256, dtype="uint16").reshape((16, 16)) + path = "dimension_names" + spath = StorePath(store, path) + await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(16, 16), + dtype=data.dtype, + fill_value=0, + dimension_names=("x", "y"), + ) + + assert (await AsyncArray.open(spath)).metadata.dimension_names == ( + "x", + "y", + ) + path2 = "dimension_names2" + spath2 = StorePath(store, path2) + await AsyncArray.create( + spath2, + shape=data.shape, + chunk_shape=(16, 16), + dtype=data.dtype, + fill_value=0, + ) + + assert (await AsyncArray.open(spath2)).metadata.dimension_names is None + zarr_json_buffer = await store.get(f"{path2}/zarr.json", prototype=default_buffer_prototype) + assert zarr_json_buffer is not None + assert "dimension_names" not in json.loads(zarr_json_buffer.to_bytes()) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_invalid_metadata(store: Store) -> None: + spath = StorePath(store, "invalid_metadata") + with pytest.raises(ValueError): + Array.create( + spath, + shape=(16, 16, 16), + chunk_shape=(16, 16), + dtype=np.dtype("uint8"), + fill_value=0, + ) + spath2 = StorePath(store, "invalid_endian") + with pytest.raises(ValueError): + Array.create( + spath2, + shape=(16, 16), + chunk_shape=(16, 16), + dtype=np.dtype("uint8"), + fill_value=0, + codecs=[ + BytesCodec(endian="big"), + TransposeCodec(order=order_from_dim("F", 2)), + ], + ) + spath3 = StorePath(store, "invalid_order") + with pytest.raises(TypeError): + Array.create( + spath3, + shape=(16, 16), + chunk_shape=(16, 16), + dtype=np.dtype("uint8"), + fill_value=0, + codecs=[ + BytesCodec(), + TransposeCodec(order="F"), + ], + ) + spath4 = StorePath(store, "invalid_missing_bytes_codec") + with pytest.raises(ValueError): + Array.create( + spath4, + shape=(16, 16), + chunk_shape=(16, 16), + dtype=np.dtype("uint8"), + fill_value=0, + codecs=[ + TransposeCodec(order=order_from_dim("F", 2)), + ], + ) + spath5 = StorePath(store, "invalid_inner_chunk_shape") + with pytest.raises(ValueError): + Array.create( + spath5, + shape=(16, 16), + chunk_shape=(16, 16), + dtype=np.dtype("uint8"), + fill_value=0, + codecs=[ + ShardingCodec(chunk_shape=(8,)), + ], + ) + spath6 = StorePath(store, "invalid_inner_chunk_shape") + with pytest.raises(ValueError): + Array.create( + spath6, + shape=(16, 16), + chunk_shape=(16, 16), + dtype=np.dtype("uint8"), + fill_value=0, + codecs=[ + ShardingCodec(chunk_shape=(8, 7)), + ], + ) + spath7 = StorePath(store, "warning_inefficient_codecs") + with pytest.warns(UserWarning): + Array.create( + spath7, + shape=(16, 16), + chunk_shape=(16, 16), + dtype=np.dtype("uint8"), + fill_value=0, + codecs=[ + ShardingCodec(chunk_shape=(8, 8)), + GzipCodec(), + ], + ) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +async def test_resize(store: Store) -> None: + data = np.zeros((16, 18), dtype="uint16") + path = "resize" + spath = StorePath(store, path) + a = await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(10, 10), + dtype=data.dtype, + chunk_key_encoding=("v2", "."), + fill_value=1, + ) + + await _AsyncArrayProxy(a)[:16, :18].set(data) + assert await store.get(f"{path}/1.1", prototype=default_buffer_prototype) is not None + assert await store.get(f"{path}/0.0", prototype=default_buffer_prototype) is not None + assert await store.get(f"{path}/0.1", prototype=default_buffer_prototype) is not None + assert await store.get(f"{path}/1.0", prototype=default_buffer_prototype) is not None + + a = await a.resize((10, 12)) + assert a.metadata.shape == (10, 12) + assert await store.get(f"{path}/0.0", prototype=default_buffer_prototype) is not None + assert await store.get(f"{path}/0.1", prototype=default_buffer_prototype) is not None + assert await store.get(f"{path}/1.0", prototype=default_buffer_prototype) is None + assert await store.get(f"{path}/1.1", prototype=default_buffer_prototype) is None diff --git a/tests/v3/test_codecs/test_endian.py b/tests/v3/test_codecs/test_endian.py new file mode 100644 index 0000000000..8301a424b9 --- /dev/null +++ b/tests/v3/test_codecs/test_endian.py @@ -0,0 +1,87 @@ +from typing import Literal + +import numpy as np +import pytest + +import zarr.v2 +from zarr.abc.store import Store +from zarr.array import AsyncArray +from zarr.buffer import default_buffer_prototype +from zarr.codecs import BytesCodec +from zarr.store.core import StorePath +from zarr.testing.utils import assert_bytes_equal + +from .test_codecs import _AsyncArrayProxy + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("endian", ["big", "little"]) +async def test_endian(store: Store, endian: Literal["big", "little"]) -> None: + data = np.arange(0, 256, dtype="uint16").reshape((16, 16)) + path = "endian" + spath = StorePath(store, path) + a = await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(16, 16), + dtype=data.dtype, + fill_value=0, + chunk_key_encoding=("v2", "."), + codecs=[BytesCodec(endian=endian)], + ) + + await _AsyncArrayProxy(a)[:, :].set(data) + readback_data = await _AsyncArrayProxy(a)[:, :].get() + assert np.array_equal(data, readback_data) + + # Compare with v2 + z = zarr.v2.create( + shape=data.shape, + chunks=(16, 16), + dtype=">u2" if endian == "big" else "u2", "u2", " None: + data = np.arange(0, 256, dtype=dtype_input_endian).reshape((16, 16)) + path = "endian" + spath = StorePath(store, path) + a = await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(16, 16), + dtype="uint16", + fill_value=0, + chunk_key_encoding=("v2", "."), + codecs=[BytesCodec(endian=dtype_store_endian)], + ) + + await _AsyncArrayProxy(a)[:, :].set(data) + readback_data = await _AsyncArrayProxy(a)[:, :].get() + assert np.array_equal(data, readback_data) + + # Compare with zarr-python + z = zarr.v2.create( + shape=data.shape, + chunks=(16, 16), + dtype=">u2" if dtype_store_endian == "big" else " None: + data = np.arange(0, 256, dtype="uint16").reshape((16, 16)) + + a = Array.create( + StorePath(store), + shape=data.shape, + chunk_shape=(16, 16), + dtype=data.dtype, + fill_value=0, + codecs=[BytesCodec(), GzipCodec()], + ) + + a[:, :] = data + assert np.array_equal(data, a[:, :]) diff --git a/tests/v3/test_codecs/test_sharding.py b/tests/v3/test_codecs/test_sharding.py new file mode 100644 index 0000000000..f0031349cb --- /dev/null +++ b/tests/v3/test_codecs/test_sharding.py @@ -0,0 +1,324 @@ +import pickle + +import numpy as np +import pytest + +from zarr.abc.store import Store +from zarr.array import Array, AsyncArray +from zarr.buffer import default_buffer_prototype +from zarr.codecs import ( + BloscCodec, + BytesCodec, + ShardingCodec, + ShardingCodecIndexLocation, + TransposeCodec, +) +from zarr.store.core import StorePath + +from ..conftest import ArrayRequest +from .test_codecs import _AsyncArrayProxy, order_from_dim + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("index_location", ["start", "end"]) +@pytest.mark.parametrize( + "array_fixture", + [ + ArrayRequest(shape=(128,) * 1, dtype="uint8", order="C"), + ArrayRequest(shape=(128,) * 2, dtype="uint8", order="C"), + ArrayRequest(shape=(128,) * 3, dtype="uint16", order="F"), + ], + indirect=["array_fixture"], +) +@pytest.mark.parametrize("offset", [0, 10]) +def test_sharding( + store: Store, array_fixture: np.ndarray, index_location: ShardingCodecIndexLocation, offset: int +) -> None: + """ + Test that we can create an array with a sharding codec, write data to that array, and get + the same data out via indexing. + """ + data = array_fixture + spath = StorePath(store) + arr = Array.create( + spath, + shape=tuple(s + offset for s in data.shape), + chunk_shape=(64,) * data.ndim, + dtype=data.dtype, + fill_value=6, + codecs=[ + ShardingCodec( + chunk_shape=(32,) * data.ndim, + codecs=[ + TransposeCodec(order=order_from_dim("F", data.ndim)), + BytesCodec(), + BloscCodec(cname="lz4"), + ], + index_location=index_location, + ) + ], + ) + write_region = tuple(slice(offset, None) for dim in range(data.ndim)) + arr[write_region] = data + + if offset > 0: + empty_region = tuple(slice(0, offset) for dim in range(data.ndim)) + assert np.all(arr[empty_region] == arr.metadata.fill_value) + + read_data = arr[write_region] + assert data.shape == read_data.shape + assert np.array_equal(data, read_data) + + +@pytest.mark.parametrize("index_location", ["start", "end"]) +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize( + "array_fixture", + [ + ArrayRequest(shape=(128,) * 3, dtype="uint16", order="F"), + ], + indirect=["array_fixture"], +) +def test_sharding_partial( + store: Store, array_fixture: np.ndarray, index_location: ShardingCodecIndexLocation +) -> None: + data = array_fixture + spath = StorePath(store) + a = Array.create( + spath, + shape=tuple(a + 10 for a in data.shape), + chunk_shape=(64, 64, 64), + dtype=data.dtype, + fill_value=0, + codecs=[ + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + TransposeCodec(order=order_from_dim("F", data.ndim)), + BytesCodec(), + BloscCodec(cname="lz4"), + ], + index_location=index_location, + ) + ], + ) + + a[10:, 10:, 10:] = data + + read_data = a[0:10, 0:10, 0:10] + assert np.all(read_data == 0) + + read_data = a[10:, 10:, 10:] + assert data.shape == read_data.shape + assert np.array_equal(data, read_data) + + +@pytest.mark.parametrize( + "array_fixture", + [ + ArrayRequest(shape=(128,) * 3, dtype="uint16", order="F"), + ], + indirect=["array_fixture"], +) +@pytest.mark.parametrize("index_location", ["start", "end"]) +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_sharding_partial_read( + store: Store, array_fixture: np.ndarray, index_location: ShardingCodecIndexLocation +) -> None: + data = array_fixture + spath = StorePath(store) + a = Array.create( + spath, + shape=tuple(a + 10 for a in data.shape), + chunk_shape=(64, 64, 64), + dtype=data.dtype, + fill_value=1, + codecs=[ + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + TransposeCodec(order=order_from_dim("F", data.ndim)), + BytesCodec(), + BloscCodec(cname="lz4"), + ], + index_location=index_location, + ) + ], + ) + + read_data = a[0:10, 0:10, 0:10] + assert np.all(read_data == 1) + + +@pytest.mark.parametrize( + "array_fixture", + [ + ArrayRequest(shape=(128,) * 3, dtype="uint16", order="F"), + ], + indirect=["array_fixture"], +) +@pytest.mark.parametrize("index_location", ["start", "end"]) +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_sharding_partial_overwrite( + store: Store, array_fixture: np.ndarray, index_location: ShardingCodecIndexLocation +) -> None: + data = array_fixture[:10, :10, :10] + spath = StorePath(store) + a = Array.create( + spath, + shape=tuple(a + 10 for a in data.shape), + chunk_shape=(64, 64, 64), + dtype=data.dtype, + fill_value=1, + codecs=[ + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + TransposeCodec(order=order_from_dim("F", data.ndim)), + BytesCodec(), + BloscCodec(cname="lz4"), + ], + index_location=index_location, + ) + ], + ) + + a[:10, :10, :10] = data + + read_data = a[0:10, 0:10, 0:10] + assert np.array_equal(data, read_data) + + data = data + 10 + a[:10, :10, :10] = data + read_data = a[0:10, 0:10, 0:10] + assert np.array_equal(data, read_data) + + +@pytest.mark.parametrize( + "array_fixture", + [ + ArrayRequest(shape=(128,) * 3, dtype="uint16", order="F"), + ], + indirect=["array_fixture"], +) +@pytest.mark.parametrize( + "outer_index_location", + ["start", "end"], +) +@pytest.mark.parametrize( + "inner_index_location", + ["start", "end"], +) +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_nested_sharding( + store: Store, + array_fixture: np.ndarray, + outer_index_location: ShardingCodecIndexLocation, + inner_index_location: ShardingCodecIndexLocation, +) -> None: + data = array_fixture + spath = StorePath(store) + a = Array.create( + spath, + shape=data.shape, + chunk_shape=(64, 64, 64), + dtype=data.dtype, + fill_value=0, + codecs=[ + ShardingCodec( + chunk_shape=(32, 32, 32), + codecs=[ + ShardingCodec(chunk_shape=(16, 16, 16), index_location=inner_index_location) + ], + index_location=outer_index_location, + ) + ], + ) + + a[:, :, :] = data + + read_data = a[0 : data.shape[0], 0 : data.shape[1], 0 : data.shape[2]] + assert data.shape == read_data.shape + assert np.array_equal(data, read_data) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_open_sharding(store: Store) -> None: + path = "open_sharding" + spath = StorePath(store, path) + a = Array.create( + spath, + shape=(16, 16), + chunk_shape=(16, 16), + dtype="int32", + fill_value=0, + codecs=[ + ShardingCodec( + chunk_shape=(8, 8), + codecs=[ + TransposeCodec(order=order_from_dim("F", 2)), + BytesCodec(), + BloscCodec(), + ], + ) + ], + ) + b = Array.open(spath) + assert a.metadata == b.metadata + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_write_partial_sharded_chunks(store: Store) -> None: + data = np.arange(0, 16 * 16, dtype="uint16").reshape((16, 16)) + spath = StorePath(store) + a = Array.create( + spath, + shape=(40, 40), + chunk_shape=(20, 20), + dtype=data.dtype, + fill_value=1, + codecs=[ + ShardingCodec( + chunk_shape=(10, 10), + codecs=[ + BytesCodec(), + BloscCodec(), + ], + ) + ], + ) + a[0:16, 0:16] = data + assert np.array_equal(a[0:16, 0:16], data) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +async def test_delete_empty_shards(store: Store) -> None: + path = "delete_empty_shards" + spath = StorePath(store, path) + a = await AsyncArray.create( + spath, + shape=(16, 16), + chunk_shape=(8, 16), + dtype="uint16", + fill_value=1, + codecs=[ShardingCodec(chunk_shape=(8, 8))], + ) + await _AsyncArrayProxy(a)[:, :].set(np.zeros((16, 16))) + await _AsyncArrayProxy(a)[8:, :].set(np.ones((8, 16))) + await _AsyncArrayProxy(a)[:, 8:].set(np.ones((16, 8))) + # chunk (0, 0) is full + # chunks (0, 1), (1, 0), (1, 1) are empty + # shard (0, 0) is half-full + # shard (1, 0) is empty + + data = np.ones((16, 16), dtype="uint16") + data[:8, :8] = 0 + assert np.array_equal(data, await _AsyncArrayProxy(a)[:, :].get()) + assert await store.get(f"{path}/c/1/0", prototype=default_buffer_prototype) is None + chunk_bytes = await store.get(f"{path}/c/0/0", prototype=default_buffer_prototype) + assert chunk_bytes is not None and len(chunk_bytes) == 16 * 2 + 8 * 8 * 2 + 4 + + +def test_pickle() -> None: + codec = ShardingCodec(chunk_shape=(8, 8)) + assert pickle.loads(pickle.dumps(codec)) == codec diff --git a/tests/v3/test_codecs/test_transpose.py b/tests/v3/test_codecs/test_transpose.py new file mode 100644 index 0000000000..3fd4350299 --- /dev/null +++ b/tests/v3/test_codecs/test_transpose.py @@ -0,0 +1,121 @@ +import numpy as np +import pytest + +import zarr.v2 +from zarr.abc.codec import Codec +from zarr.abc.store import Store +from zarr.array import Array, AsyncArray +from zarr.buffer import default_buffer_prototype +from zarr.codecs import BytesCodec, ShardingCodec, TransposeCodec +from zarr.common import MemoryOrder +from zarr.config import config +from zarr.store.core import StorePath + +from .test_codecs import _AsyncArrayProxy + + +@pytest.mark.parametrize("input_order", ["F", "C"]) +@pytest.mark.parametrize("runtime_write_order", ["F", "C"]) +@pytest.mark.parametrize("runtime_read_order", ["F", "C"]) +@pytest.mark.parametrize("with_sharding", [True, False]) +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +async def test_transpose( + store: Store, + input_order: MemoryOrder, + runtime_write_order: MemoryOrder, + runtime_read_order: MemoryOrder, + with_sharding: bool, +) -> None: + data = np.arange(0, 256, dtype="uint16").reshape((1, 32, 8), order=input_order) + spath = StorePath(store, path="transpose") + codecs_: list[Codec] = ( + [ + ShardingCodec( + chunk_shape=(1, 16, 8), + codecs=[TransposeCodec(order=(2, 1, 0)), BytesCodec()], + ) + ] + if with_sharding + else [TransposeCodec(order=(2, 1, 0)), BytesCodec()] + ) + with config.set({"array.order": runtime_write_order}): + a = await AsyncArray.create( + spath, + shape=data.shape, + chunk_shape=(1, 32, 8), + dtype=data.dtype, + fill_value=0, + chunk_key_encoding=("v2", "."), + codecs=codecs_, + ) + + await _AsyncArrayProxy(a)[:, :].set(data) + read_data = await _AsyncArrayProxy(a)[:, :].get() + assert np.array_equal(data, read_data) + + with config.set({"array.order": runtime_read_order}): + a = await AsyncArray.open( + spath, + ) + read_data = await _AsyncArrayProxy(a)[:, :].get() + assert np.array_equal(data, read_data) + + if runtime_read_order == "F": + assert read_data.flags["F_CONTIGUOUS"] + assert not read_data.flags["C_CONTIGUOUS"] + else: + assert not read_data.flags["F_CONTIGUOUS"] + assert read_data.flags["C_CONTIGUOUS"] + + if not with_sharding: + # Compare with zarr-python + z = zarr.v2.create( + shape=data.shape, + chunks=(1, 32, 8), + dtype=" None: + shape = [i + 3 for i in range(len(order))] + data = np.arange(0, np.prod(shape), dtype="uint16").reshape(shape) + spath = StorePath(store, "transpose_non_self_inverse") + a = Array.create( + spath, + shape=data.shape, + chunk_shape=data.shape, + dtype=data.dtype, + fill_value=0, + codecs=[TransposeCodec(order=order), BytesCodec()], + ) + a[:, :] = data + read_data = a[:, :] + assert np.array_equal(data, read_data) + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +def test_transpose_invalid( + store: Store, +) -> None: + data = np.arange(0, 256, dtype="uint16").reshape((1, 32, 8)) + spath = StorePath(store, "transpose_invalid") + for order in [(1, 0), (3, 2, 1), (3, 3, 1)]: + with pytest.raises(ValueError): + Array.create( + spath, + shape=data.shape, + chunk_shape=(1, 32, 8), + dtype=data.dtype, + fill_value=0, + chunk_key_encoding=("v2", "."), + codecs=[TransposeCodec(order=order), BytesCodec()], + ) diff --git a/tests/v3/test_codecs/test_zstd.py b/tests/v3/test_codecs/test_zstd.py new file mode 100644 index 0000000000..1e1b1e02c9 --- /dev/null +++ b/tests/v3/test_codecs/test_zstd.py @@ -0,0 +1,25 @@ +import numpy as np +import pytest + +from zarr.abc.store import Store +from zarr.array import Array +from zarr.codecs import BytesCodec, ZstdCodec +from zarr.store.core import StorePath + + +@pytest.mark.parametrize("store", ("local", "memory"), indirect=["store"]) +@pytest.mark.parametrize("checksum", [True, False]) +def test_zstd(store: Store, checksum: bool) -> None: + data = np.arange(0, 256, dtype="uint16").reshape((16, 16)) + + a = Array.create( + StorePath(store, path="zstd"), + shape=data.shape, + chunk_shape=(16, 16), + dtype=data.dtype, + fill_value=0, + codecs=[BytesCodec(), ZstdCodec(level=0, checksum=checksum)], + ) + + a[:, :] = data + assert np.array_equal(data, a[:, :]) From 33b158974a55f1818f27dcc9a3bd2135c51450ff Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 13:10:49 -0700 Subject: [PATCH 0603/1078] chore: update pre-commit hooks (#2017) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.5.0 → v0.5.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.5.0...v0.5.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9db1b75f75..93bd47ee2f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 'v0.5.0' + rev: 'v0.5.1' hooks: - id: ruff args: ["--fix", "--show-fixes"] From b8baa6868c8fa95e6b2948c9fd9c725331ca23ec Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Wed, 10 Jul 2024 17:39:20 +0200 Subject: [PATCH 0604/1078] Cast fill value to array's dtype (#2020) * add fill value parsing routines and tests * add fill_value attribute to array, and test that it works as expected for v3 arrays * Update tests/v3/test_metadata/test_v3.py * clean up docstrings --- src/zarr/array.py | 4 + src/zarr/metadata.py | 130 ++++++++++++++++++++++-- tests/v3/test_array.py | 49 +++++++++ tests/v3/test_metadata.py | 60 ----------- tests/v3/test_metadata/test_v2.py | 20 ++++ tests/v3/test_metadata/test_v3.py | 159 ++++++++++++++++++++++++++++++ 6 files changed, 354 insertions(+), 68 deletions(-) create mode 100644 tests/v3/test_metadata/test_v2.py create mode 100644 tests/v3/test_metadata/test_v3.py diff --git a/src/zarr/array.py b/src/zarr/array.py index 26a19e64ab..1cc4c8ccff 100644 --- a/src/zarr/array.py +++ b/src/zarr/array.py @@ -715,6 +715,10 @@ def order(self) -> Literal["C", "F"]: def read_only(self) -> bool: return self._async_array.read_only + @property + def fill_value(self) -> Any: + return self.metadata.fill_value + def __array__( self, dtype: npt.DTypeLike | None = None, copy: bool | None = None ) -> NDArrayLike: diff --git a/src/zarr/metadata.py b/src/zarr/metadata.py index ef7edbd560..71462b6583 100644 --- a/src/zarr/metadata.py +++ b/src/zarr/metadata.py @@ -2,10 +2,10 @@ import json from abc import ABC, abstractmethod -from collections.abc import Iterable +from collections.abc import Iterable, Sequence from dataclasses import dataclass, field, replace from enum import Enum -from typing import TYPE_CHECKING, Any, Literal +from typing import TYPE_CHECKING, Any, Literal, cast, overload import numpy as np import numpy.typing as npt @@ -32,7 +32,6 @@ ChunkCoords, ZarrFormat, parse_dtype, - parse_fill_value, parse_named_configuration, parse_shapelike, ) @@ -189,7 +188,7 @@ def __init__( chunk_grid_parsed = ChunkGrid.from_dict(chunk_grid) chunk_key_encoding_parsed = ChunkKeyEncoding.from_dict(chunk_key_encoding) dimension_names_parsed = parse_dimension_names(dimension_names) - fill_value_parsed = parse_fill_value(fill_value) + fill_value_parsed = parse_fill_value_v3(fill_value, dtype=data_type_parsed) attributes_parsed = parse_attributes(attributes) codecs_parsed_partial = parse_codecs(codecs) @@ -255,9 +254,18 @@ def encode_chunk_key(self, chunk_coords: ChunkCoords) -> str: return self.chunk_key_encoding.encode_chunk_key(chunk_coords) def to_buffer_dict(self) -> dict[str, Buffer]: - def _json_convert(o: np.dtype[Any] | Enum | Codec) -> str | dict[str, Any]: + def _json_convert(o: Any) -> Any: if isinstance(o, np.dtype): return str(o) + if np.isscalar(o): + # convert numpy scalar to python type, and pass + # python types through + out = getattr(o, "item", lambda: o)() + if isinstance(out, complex): + # python complex types are not JSON serializable, so we use the + # serialization defined in the zarr v3 spec + return [out.real, out.imag] + return out if isinstance(o, Enum): return o.name # this serializes numcodecs compressors @@ -341,7 +349,7 @@ def __init__( order_parsed = parse_indexing_order(order) dimension_separator_parsed = parse_separator(dimension_separator) filters_parsed = parse_filters(filters) - fill_value_parsed = parse_fill_value(fill_value) + fill_value_parsed = parse_fill_value_v2(fill_value, dtype=data_type_parsed) attributes_parsed = parse_attributes(attributes) object.__setattr__(self, "shape", shape_parsed) @@ -371,13 +379,17 @@ def chunks(self) -> ChunkCoords: def to_buffer_dict(self) -> dict[str, Buffer]: def _json_convert( - o: np.dtype[Any], - ) -> str | list[tuple[str, str] | tuple[str, str, tuple[int, ...]]]: + o: Any, + ) -> Any: if isinstance(o, np.dtype): if o.fields is None: return o.str else: return o.descr + if np.isscalar(o): + # convert numpy scalar to python type, and pass + # python types through + return getattr(o, "item", lambda: o)() raise TypeError zarray_dict = self.to_dict() @@ -517,3 +529,105 @@ def parse_codecs(data: Iterable[Codec | dict[str, JSON]]) -> tuple[Codec, ...]: out += (get_codec_class(name_parsed).from_dict(c),) return out + + +def parse_fill_value_v2(fill_value: Any, dtype: np.dtype[Any]) -> Any: + """ + Parse a potential fill value into a value that is compatible with the provided dtype. + + This is a light wrapper around zarr.v2.util.normalize_fill_value. + + Parameters + ---------- + fill_value: Any + A potential fill value. + dtype: np.dtype[Any] + A numpy dtype. + + Returns + An instance of `dtype`, or `None`, or any python object (in the case of an object dtype) + """ + from zarr.v2.util import normalize_fill_value + + return normalize_fill_value(fill_value=fill_value, dtype=dtype) + + +BOOL = np.bool_ +BOOL_DTYPE = np.dtypes.BoolDType + +INTEGER_DTYPE = ( + np.dtypes.Int8DType + | np.dtypes.Int16DType + | np.dtypes.Int32DType + | np.dtypes.Int64DType + | np.dtypes.UByteDType + | np.dtypes.UInt16DType + | np.dtypes.UInt32DType + | np.dtypes.UInt64DType +) + +INTEGER = np.int8 | np.int16 | np.int32 | np.int64 | np.uint8 | np.uint16 | np.uint32 | np.uint64 +FLOAT_DTYPE = np.dtypes.Float16DType | np.dtypes.Float32DType | np.dtypes.Float64DType +FLOAT = np.float16 | np.float32 | np.float64 +COMPLEX_DTYPE = np.dtypes.Complex64DType | np.dtypes.Complex128DType +COMPLEX = np.complex64 | np.complex128 +# todo: r* dtypes + + +@overload +def parse_fill_value_v3(fill_value: Any, dtype: BOOL_DTYPE) -> BOOL: ... + + +@overload +def parse_fill_value_v3(fill_value: Any, dtype: INTEGER_DTYPE) -> INTEGER: ... + + +@overload +def parse_fill_value_v3(fill_value: Any, dtype: FLOAT_DTYPE) -> FLOAT: ... + + +@overload +def parse_fill_value_v3(fill_value: Any, dtype: COMPLEX_DTYPE) -> COMPLEX: ... + + +def parse_fill_value_v3( + fill_value: Any, dtype: BOOL_DTYPE | INTEGER_DTYPE | FLOAT_DTYPE | COMPLEX_DTYPE +) -> BOOL | INTEGER | FLOAT | COMPLEX: + """ + Parse `fill_value`, a potential fill value, into an instance of `dtype`, a data type. + If `fill_value` is `None`, then this function will return the result of casting the value 0 + to the provided data type. Otherwise, `fill_value` will be cast to the provided data type. + + Note that some numpy dtypes use very permissive casting rules. For example, + `np.bool_({'not remotely a bool'})` returns `True`. Thus this function should not be used for + validating that the provided fill value is a valid instance of the data type. + + Parameters + ---------- + fill_value: Any + A potential fill value. + dtype: BOOL_DTYPE | INTEGER_DTYPE | FLOAT_DTYPE | COMPLEX_DTYPE + A numpy data type that models a data type defined in the Zarr V3 specification. + + Returns + ------- + A scalar instance of `dtype` + """ + if fill_value is None: + return dtype.type(0) + if isinstance(fill_value, Sequence) and not isinstance(fill_value, str): + if dtype in (np.complex64, np.complex128): + dtype = cast(COMPLEX_DTYPE, dtype) + if len(fill_value) == 2: + # complex datatypes serialize to JSON arrays with two elements + return dtype.type(complex(*fill_value)) + else: + msg = ( + f"Got an invalid fill value for complex data type {dtype}." + f"Expected a sequence with 2 elements, but {fill_value} has " + f"length {len(fill_value)}." + ) + raise ValueError(msg) + msg = f"Cannot parse non-string sequence {fill_value} as a scalar with type {dtype}." + raise TypeError(msg) + return dtype.type(fill_value) diff --git a/tests/v3/test_array.py b/tests/v3/test_array.py index 203cfbf860..08678f5989 100644 --- a/tests/v3/test_array.py +++ b/tests/v3/test_array.py @@ -1,3 +1,4 @@ +import numpy as np import pytest from zarr.array import Array @@ -34,3 +35,51 @@ def test_array_name_properties_with_group( assert spam.path == "bar/spam" assert spam.name == "/bar/spam" assert spam.basename == "spam" + + +@pytest.mark.parametrize("store", ["memory"], indirect=True) +@pytest.mark.parametrize("specifiy_fill_value", [True, False]) +@pytest.mark.parametrize("dtype_str", ["bool", "uint8", "complex64"]) +def test_array_v3_fill_value_default( + store: MemoryStore, specifiy_fill_value: bool, dtype_str: str +) -> None: + """ + Test that creating an array with the fill_value parameter set to None, or unspecified, + results in the expected fill_value attribute of the array, i.e. 0 cast to the array's dtype. + """ + shape = (10,) + default_fill_value = 0 + if specifiy_fill_value: + arr = Array.create( + store=store, + shape=shape, + dtype=dtype_str, + zarr_format=3, + chunk_shape=shape, + fill_value=None, + ) + else: + arr = Array.create( + store=store, shape=shape, dtype=dtype_str, zarr_format=3, chunk_shape=shape + ) + + assert arr.fill_value == np.dtype(dtype_str).type(default_fill_value) + assert arr.fill_value.dtype == arr.dtype + + +@pytest.mark.parametrize("store", ["memory"], indirect=True) +@pytest.mark.parametrize("fill_value", [False, 0.0, 1, 2.3]) +@pytest.mark.parametrize("dtype_str", ["bool", "uint8", "float32", "complex64"]) +def test_array_v3_fill_value(store: MemoryStore, fill_value: int, dtype_str: str) -> None: + shape = (10,) + arr = Array.create( + store=store, + shape=shape, + dtype=dtype_str, + zarr_format=3, + chunk_shape=shape, + fill_value=fill_value, + ) + + assert arr.fill_value == np.dtype(dtype_str).type(fill_value) + assert arr.fill_value.dtype == arr.dtype diff --git a/tests/v3/test_metadata.py b/tests/v3/test_metadata.py index 65297c52d8..e69de29bb2 100644 --- a/tests/v3/test_metadata.py +++ b/tests/v3/test_metadata.py @@ -1,60 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -import pytest - -if TYPE_CHECKING: - from collections.abc import Sequence - from typing import Any - -from zarr.metadata import parse_dimension_names, parse_zarr_format_v2, parse_zarr_format_v3 - - -# todo: test -def test_datatype_enum(): ... - - -# todo: test -# this will almost certainly be a collection of tests -def test_array_metadata_v3(): ... - - -# todo: test -# this will almost certainly be a collection of tests -def test_array_metadata_v2(): ... - - -@pytest.mark.parametrize("data", [None, ("a", "b", "c"), ["a", "a", "a"]]) -def parse_dimension_names_valid(data: Sequence[str] | None) -> None: - assert parse_dimension_names(data) == data - - -@pytest.mark.parametrize("data", [(), [1, 2, "a"], {"foo": 10}]) -def parse_dimension_names_invalid(data: Any) -> None: - with pytest.raises(TypeError, match="Expected either None or iterable of str,"): - parse_dimension_names(data) - - -# todo: test -def test_parse_attributes() -> None: ... - - -def test_parse_zarr_format_v3_valid() -> None: - assert parse_zarr_format_v3(3) == 3 - - -@pytest.mark.parametrize("data", [None, 1, 2, 4, 5, "3"]) -def test_parse_zarr_foramt_v3_invalid(data: Any) -> None: - with pytest.raises(ValueError, match=f"Invalid value. Expected 3. Got {data}"): - parse_zarr_format_v3(data) - - -def test_parse_zarr_format_v2_valid() -> None: - assert parse_zarr_format_v2(2) == 2 - - -@pytest.mark.parametrize("data", [None, 1, 3, 4, 5, "3"]) -def test_parse_zarr_foramt_v2_invalid(data: Any) -> None: - with pytest.raises(ValueError, match=f"Invalid value. Expected 2. Got {data}"): - parse_zarr_format_v2(data) diff --git a/tests/v3/test_metadata/test_v2.py b/tests/v3/test_metadata/test_v2.py new file mode 100644 index 0000000000..8a82b29620 --- /dev/null +++ b/tests/v3/test_metadata/test_v2.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + +import pytest + +from zarr.metadata import parse_zarr_format_v2 + + +def test_parse_zarr_format_valid() -> None: + assert parse_zarr_format_v2(2) == 2 + + +@pytest.mark.parametrize("data", [None, 1, 3, 4, 5, "3"]) +def test_parse_zarr_format_invalid(data: Any) -> None: + with pytest.raises(ValueError, match=f"Invalid value. Expected 2. Got {data}"): + parse_zarr_format_v2(data) diff --git a/tests/v3/test_metadata/test_v3.py b/tests/v3/test_metadata/test_v3.py new file mode 100644 index 0000000000..04456473d7 --- /dev/null +++ b/tests/v3/test_metadata/test_v3.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + +from collections.abc import Sequence + +import numpy as np +import pytest + +from zarr.metadata import parse_dimension_names +from zarr.metadata import parse_fill_value_v3 as parse_fill_value +from zarr.metadata import parse_zarr_format_v3 as parse_zarr_format + +bool_dtypes = ("bool",) + +int_dtypes = ( + "int8", + "int16", + "int32", + "int64", + "uint8", + "uint16", + "uint32", + "uint64", +) + +float_dtypes = ( + "float16", + "float32", + "float64", +) + +complex_dtypes = ("complex64", "complex128") + +dtypes = (*bool_dtypes, *int_dtypes, *float_dtypes, *complex_dtypes) + + +@pytest.mark.parametrize("data", [None, 1, 2, 4, 5, "3"]) +def test_parse_zarr_format_invalid(data: Any) -> None: + with pytest.raises(ValueError, match=f"Invalid value. Expected 3. Got {data}"): + parse_zarr_format(data) + + +def test_parse_zarr_format_valid() -> None: + assert parse_zarr_format(3) == 3 + + +@pytest.mark.parametrize("data", [(), [1, 2, "a"], {"foo": 10}]) +def parse_dimension_names_invalid(data: Any) -> None: + with pytest.raises(TypeError, match="Expected either None or iterable of str,"): + parse_dimension_names(data) + + +@pytest.mark.parametrize("data", [None, ("a", "b", "c"), ["a", "a", "a"]]) +def parse_dimension_names_valid(data: Sequence[str] | None) -> None: + assert parse_dimension_names(data) == data + + +@pytest.mark.parametrize("dtype_str", dtypes) +def test_parse_auto_fill_value(dtype_str: str) -> None: + """ + Test that parse_fill_value(None, dtype) results in the 0 value for the given dtype. + """ + dtype = np.dtype(dtype_str) + fill_value = None + assert parse_fill_value(fill_value, dtype) == dtype.type(0) + + +@pytest.mark.parametrize("fill_value", [0, 1.11, False, True]) +@pytest.mark.parametrize("dtype_str", dtypes) +def test_parse_fill_value_valid(fill_value: Any, dtype_str: str) -> None: + """ + Test that parse_fill_value(fill_value, dtype) casts fill_value to the given dtype. + """ + dtype = np.dtype(dtype_str) + assert parse_fill_value(fill_value, dtype) == dtype.type(fill_value) + + +@pytest.mark.parametrize("fill_value", ["not a valid value"]) +@pytest.mark.parametrize("dtype_str", [*int_dtypes, *float_dtypes, *complex_dtypes]) +def test_parse_fill_value_invalid_value(fill_value: Any, dtype_str: str) -> None: + """ + Test that parse_fill_value(fill_value, dtype) raises ValueError for invalid values. + This test excludes bool because the bool constructor takes anything. + """ + dtype = np.dtype(dtype_str) + with pytest.raises(ValueError): + parse_fill_value(fill_value, dtype) + + +@pytest.mark.parametrize("fill_value", [[1.0, 0.0], [0, 1], complex(1, 1), np.complex64(0)]) +@pytest.mark.parametrize("dtype_str", [*complex_dtypes]) +def test_parse_fill_value_complex(fill_value: Any, dtype_str: str) -> None: + """ + Test that parse_fill_value(fill_value, dtype) correctly handles complex values represented + as length-2 sequences + """ + dtype = np.dtype(dtype_str) + if isinstance(fill_value, list): + expected = dtype.type(complex(*fill_value)) + else: + expected = dtype.type(fill_value) + assert expected == parse_fill_value(fill_value, dtype) + + +@pytest.mark.parametrize("fill_value", [[1.0, 0.0, 3.0], [0, 1, 3], [1]]) +@pytest.mark.parametrize("dtype_str", [*complex_dtypes]) +def test_parse_fill_value_complex_invalid(fill_value: Any, dtype_str: str) -> None: + """ + Test that parse_fill_value(fill_value, dtype) correctly rejects sequences with length not + equal to 2 + """ + dtype = np.dtype(dtype_str) + match = ( + f"Got an invalid fill value for complex data type {dtype}." + f"Expected a sequence with 2 elements, but {fill_value} has " + f"length {len(fill_value)}." + ) + with pytest.raises(ValueError, match=re.escape(match)): + parse_fill_value(fill_value=fill_value, dtype=dtype) + + +@pytest.mark.parametrize("fill_value", [{"foo": 10}]) +@pytest.mark.parametrize("dtype_str", [*int_dtypes, *float_dtypes, *complex_dtypes]) +def test_parse_fill_value_invalid_type(fill_value: Any, dtype_str: str) -> None: + """ + Test that parse_fill_value(fill_value, dtype) raises TypeError for invalid non-sequential types. + This test excludes bool because the bool constructor takes anything. + """ + dtype = np.dtype(dtype_str) + match = "must be" + with pytest.raises(TypeError, match=match): + parse_fill_value(fill_value, dtype) + + +@pytest.mark.parametrize( + "fill_value", + [ + [ + 1, + ], + (1, 23, 4), + ], +) +@pytest.mark.parametrize("dtype_str", [*int_dtypes, *float_dtypes]) +def test_parse_fill_value_invalid_type_sequence(fill_value: Any, dtype_str: str) -> None: + """ + Test that parse_fill_value(fill_value, dtype) raises TypeError for invalid sequential types. + This test excludes bool because the bool constructor takes anything, and complex because + complex values can be created from length-2 sequences. + """ + dtype = np.dtype(dtype_str) + match = f"Cannot parse non-string sequence {fill_value} as a scalar with type {dtype}" + with pytest.raises(TypeError, match=re.escape(match)): + parse_fill_value(fill_value, dtype) From 37a8441c20dae3b284803bb1b0d2e6c8f040fb3e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 15 Jul 2024 20:15:38 -0700 Subject: [PATCH 0605/1078] chore: update pre-commit hooks (#2039) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.5.1 → v0.5.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.5.1...v0.5.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 93bd47ee2f..9d432dc1ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 'v0.5.1' + rev: 'v0.5.2' hooks: - id: ruff args: ["--fix", "--show-fixes"] From eec0f41deac33641df3909ef2bcea160056b4825 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 21 Jul 2024 18:37:08 +0200 Subject: [PATCH 0606/1078] Redundant list comprehension (#2048) --- src/zarr/indexing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zarr/indexing.py b/src/zarr/indexing.py index 1f483e1c15..6987f69c11 100644 --- a/src/zarr/indexing.py +++ b/src/zarr/indexing.py @@ -1036,7 +1036,7 @@ def __init__(self, selection: CoordinateSelection, shape: ChunkCoords, chunk_gri # flatten selection selection_broadcast = tuple(dim_sel.reshape(-1) for dim_sel in selection_broadcast) chunks_multi_index_broadcast = tuple( - [dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast] + dim_chunks.reshape(-1) for dim_chunks in chunks_multi_index_broadcast ) # ravel chunk indices From 4b7be5bb222f308ec0a806c201a24fbe8caf668b Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 21 Jul 2024 18:38:23 +0200 Subject: [PATCH 0607/1078] Multiple imports for an import name (#2047) Co-authored-by: Davis Bennett --- src/zarr/codecs/transpose.py | 4 ++-- src/zarr/group.py | 2 +- src/zarr/store/remote.py | 3 +-- tests/v2/test_indexing.py | 3 +-- tests/v2/test_storage.py | 2 -- tests/v2/test_sync.py | 6 ++---- 6 files changed, 7 insertions(+), 13 deletions(-) diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index 0c55a6ec4a..9dad89002e 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -2,7 +2,7 @@ from collections.abc import Iterable from dataclasses import dataclass, replace -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, cast import numpy as np @@ -14,7 +14,7 @@ from zarr.common import JSON, ChunkCoordsLike, parse_named_configuration if TYPE_CHECKING: - from typing import TYPE_CHECKING + from typing import Any from typing_extensions import Self diff --git a/src/zarr/group.py b/src/zarr/group.py index e6e2ac183f..5cf0e48db9 100644 --- a/src/zarr/group.py +++ b/src/zarr/group.py @@ -31,7 +31,7 @@ if TYPE_CHECKING: from collections.abc import AsyncGenerator, Iterable - from typing import Any, Literal + from typing import Any logger = logging.getLogger("zarr.group") diff --git a/src/zarr/store/remote.py b/src/zarr/store/remote.py index e0b69cac50..18ad3fa0bf 100644 --- a/src/zarr/store/remote.py +++ b/src/zarr/store/remote.py @@ -6,7 +6,6 @@ import fsspec from zarr.abc.store import Store -from zarr.buffer import Buffer, BufferPrototype from zarr.common import OpenMode from zarr.store.core import _dereference_path @@ -14,7 +13,7 @@ from fsspec.asyn import AsyncFileSystem from upath import UPath - from zarr.buffer import Buffer + from zarr.buffer import Buffer, BufferPrototype from zarr.common import BytesLike diff --git a/tests/v2/test_indexing.py b/tests/v2/test_indexing.py index 13fbc878e0..c1fd87572d 100644 --- a/tests/v2/test_indexing.py +++ b/tests/v2/test_indexing.py @@ -1,4 +1,3 @@ -import numpy import numpy as np import pytest from numpy.testing import assert_array_equal @@ -1679,7 +1678,7 @@ def test_numpy_int_indexing(): z = zarr.v2.create(shape=1050, chunks=100, dtype=a.dtype) z[:] = a assert a[42] == z[42] - assert a[numpy.int64(42)] == z[numpy.int64(42)] + assert a[np.int64(42)] == z[np.int64(42)] @pytest.mark.parametrize( diff --git a/tests/v2/test_storage.py b/tests/v2/test_storage.py index 88e99e91a1..bcf5fa200b 100644 --- a/tests/v2/test_storage.py +++ b/tests/v2/test_storage.py @@ -1070,8 +1070,6 @@ def test_complex(self): assert store[self.root + "foo"] == b"hello" def test_deep_ndim(self): - import zarr.v2 - store = self.create_store() path = None if self.version == 2 else "group1" foo = zarr.v2.open_group(store=store, path=path) diff --git a/tests/v2/test_sync.py b/tests/v2/test_sync.py index ea6fd0523d..76d037c2db 100644 --- a/tests/v2/test_sync.py +++ b/tests/v2/test_sync.py @@ -43,9 +43,8 @@ def init_attributes(self, store, read_only=False, cache=True): def _append(arg): z, i = arg - import numpy - x = numpy.empty(1000, dtype="i4") + x = np.empty(1000, dtype="i4") x[:] = i shape = z.append(x) return shape @@ -53,9 +52,8 @@ def _append(arg): def _set_arange(arg): z, i = arg - import numpy - x = numpy.arange(i * 1000, (i * 1000) + 1000, 1) + x = np.arange(i * 1000, (i * 1000) + 1000, 1) z[i * 1000 : (i * 1000) + 1000] = x return i From 48e24754ac88e02c6951b09dd711d76f6034d9a1 Mon Sep 17 00:00:00 2001 From: Davis Bennett Date: Sun, 21 Jul 2024 18:56:11 +0200 Subject: [PATCH 0608/1078] Move fixtures to `tests` (#1813) * fix: move test fixtures into tests/fixture * fix: move test fixtures into tests/fixture * use tmpdir instead of data fixture * refactor fixture-depdendent tests * refactor fixture-depdendent tests * restore old tests, to try and make a clean commit, again * checkout updated test routines --------- Co-authored-by: Joe Hamman --- fixture/.zattrs | 1 - fixture/0/.zattrs | 1 - fixture/0/0/.zarray | 14 ---- fixture/0/0/.zattrs | 1 - fixture/0/0/0 | Bin 100 -> 0 bytes fixture/0/0/1 | 1 - fixture/0/0/10 | Bin 100 -> 0 bytes fixture/0/0/11 | Bin 100 -> 0 bytes fixture/0/0/2 | Bin 100 -> 0 bytes fixture/0/0/3 | 1 - fixture/0/0/4 | 1 - fixture/0/0/5 | Bin 100 -> 0 bytes fixture/0/0/6 | 1 - fixture/0/0/7 | Bin 100 -> 0 bytes fixture/0/0/8 | 1 - fixture/0/0/9 | 1 - fixture/0/1/.zarray | 17 ---- fixture/0/1/.zattrs | 1 - fixture/0/1/0 | Bin 108 -> 0 bytes fixture/0/1/1 | Bin 111 -> 0 bytes fixture/0/1/10 | Bin 111 -> 0 bytes fixture/0/1/11 | Bin 22 -> 0 bytes fixture/0/1/2 | Bin 111 -> 0 bytes fixture/0/1/3 | Bin 108 -> 0 bytes fixture/0/1/4 | Bin 111 -> 0 bytes fixture/0/1/5 | Bin 109 -> 0 bytes fixture/0/1/6 | Bin 111 -> 0 bytes fixture/0/1/7 | Bin 111 -> 0 bytes fixture/0/1/8 | Bin 108 -> 0 bytes fixture/0/1/9 | Bin 111 -> 0 bytes fixture/0/2/.zarray | 17 ---- fixture/0/2/.zattrs | 1 - fixture/0/2/0 | Bin 176 -> 0 bytes fixture/0/2/1 | Bin 176 -> 0 bytes fixture/0/2/10 | Bin 176 -> 0 bytes fixture/0/2/11 | Bin 52 -> 0 bytes fixture/0/2/2 | Bin 176 -> 0 bytes fixture/0/2/3 | Bin 176 -> 0 bytes fixture/0/2/4 | Bin 176 -> 0 bytes fixture/0/2/5 | Bin 176 -> 0 bytes fixture/0/2/6 | Bin 176 -> 0 bytes fixture/0/2/7 | Bin 176 -> 0 bytes fixture/0/2/8 | Bin 176 -> 0 bytes fixture/0/2/9 | Bin 176 -> 0 bytes fixture/0/3/.zarray | 19 ----- fixture/0/3/.zattrs | 1 - fixture/0/3/0 | Bin 116 -> 0 bytes fixture/0/3/1 | Bin 116 -> 0 bytes fixture/0/3/10 | Bin 116 -> 0 bytes fixture/0/3/11 | Bin 116 -> 0 bytes fixture/0/3/2 | Bin 116 -> 0 bytes fixture/0/3/3 | Bin 116 -> 0 bytes fixture/0/3/4 | Bin 116 -> 0 bytes fixture/0/3/5 | Bin 116 -> 0 bytes fixture/0/3/6 | Bin 116 -> 0 bytes fixture/0/3/7 | Bin 116 -> 0 bytes fixture/0/3/8 | Bin 116 -> 0 bytes fixture/0/3/9 | Bin 116 -> 0 bytes fixture/0/4/.zarray | 19 ----- fixture/0/4/.zattrs | 1 - fixture/0/4/0 | Bin 116 -> 0 bytes fixture/0/4/1 | Bin 116 -> 0 bytes fixture/0/4/10 | Bin 116 -> 0 bytes fixture/0/4/11 | Bin 116 -> 0 bytes fixture/0/4/2 | Bin 116 -> 0 bytes fixture/0/4/3 | Bin 116 -> 0 bytes fixture/0/4/4 | Bin 116 -> 0 bytes fixture/0/4/5 | Bin 116 -> 0 bytes fixture/0/4/6 | Bin 116 -> 0 bytes fixture/0/4/7 | Bin 116 -> 0 bytes fixture/0/4/8 | Bin 116 -> 0 bytes fixture/0/4/9 | Bin 116 -> 0 bytes fixture/0/5/.zarray | 19 ----- fixture/0/5/.zattrs | 1 - fixture/0/5/0 | Bin 116 -> 0 bytes fixture/0/5/1 | Bin 116 -> 0 bytes fixture/0/5/10 | Bin 116 -> 0 bytes fixture/0/5/11 | Bin 116 -> 0 bytes fixture/0/5/2 | Bin 116 -> 0 bytes fixture/0/5/3 | Bin 116 -> 0 bytes fixture/0/5/4 | Bin 116 -> 0 bytes fixture/0/5/5 | Bin 116 -> 0 bytes fixture/0/5/6 | Bin 116 -> 0 bytes fixture/0/5/7 | Bin 116 -> 0 bytes fixture/0/5/8 | Bin 116 -> 0 bytes fixture/0/5/9 | Bin 116 -> 0 bytes fixture/0/6/.zarray | 19 ----- fixture/0/6/.zattrs | 1 - fixture/0/6/0 | Bin 116 -> 0 bytes fixture/0/6/1 | Bin 116 -> 0 bytes fixture/0/6/10 | Bin 116 -> 0 bytes fixture/0/6/11 | Bin 116 -> 0 bytes fixture/0/6/2 | Bin 116 -> 0 bytes fixture/0/6/3 | Bin 116 -> 0 bytes fixture/0/6/4 | Bin 116 -> 0 bytes fixture/0/6/5 | Bin 116 -> 0 bytes fixture/0/6/6 | Bin 116 -> 0 bytes fixture/0/6/7 | Bin 116 -> 0 bytes fixture/0/6/8 | Bin 116 -> 0 bytes fixture/0/6/9 | Bin 116 -> 0 bytes fixture/1/.zattrs | 1 - fixture/1/0/.zarray | 14 ---- fixture/1/0/.zattrs | 1 - fixture/1/0/0 | Bin 200 -> 0 bytes fixture/1/0/1 | Bin 200 -> 0 bytes fixture/1/0/10 | Bin 200 -> 0 bytes fixture/1/0/11 | Bin 200 -> 0 bytes fixture/1/0/2 | Bin 200 -> 0 bytes fixture/1/0/3 | 1 - fixture/1/0/4 | 1 - fixture/1/0/5 | Bin 200 -> 0 bytes fixture/1/0/6 | 1 - fixture/1/0/7 | Bin 200 -> 0 bytes fixture/1/0/8 | 1 - fixture/1/0/9 | 1 - fixture/1/1/.zarray | 17 ---- fixture/1/1/.zattrs | 1 - fixture/1/1/0 | Bin 145 -> 0 bytes fixture/1/1/1 | Bin 145 -> 0 bytes fixture/1/1/10 | Bin 150 -> 0 bytes fixture/1/1/11 | Bin 33 -> 0 bytes fixture/1/1/2 | Bin 158 -> 0 bytes fixture/1/1/3 | Bin 146 -> 0 bytes fixture/1/1/4 | Bin 147 -> 0 bytes fixture/1/1/5 | Bin 149 -> 0 bytes fixture/1/1/6 | Bin 147 -> 0 bytes fixture/1/1/7 | Bin 152 -> 0 bytes fixture/1/1/8 | Bin 147 -> 0 bytes fixture/1/1/9 | Bin 147 -> 0 bytes fixture/1/2/.zarray | 17 ---- fixture/1/2/.zattrs | 1 - fixture/1/2/0 | Bin 181 -> 0 bytes fixture/1/2/1 | Bin 181 -> 0 bytes fixture/1/2/10 | Bin 182 -> 0 bytes fixture/1/2/11 | Bin 58 -> 0 bytes fixture/1/2/2 | Bin 174 -> 0 bytes fixture/1/2/3 | Bin 181 -> 0 bytes fixture/1/2/4 | Bin 181 -> 0 bytes fixture/1/2/5 | Bin 181 -> 0 bytes fixture/1/2/6 | Bin 181 -> 0 bytes fixture/1/2/7 | Bin 170 -> 0 bytes fixture/1/2/8 | Bin 181 -> 0 bytes fixture/1/2/9 | Bin 181 -> 0 bytes fixture/1/3/.zarray | 19 ----- fixture/1/3/.zattrs | 1 - fixture/1/3/0 | Bin 165 -> 0 bytes fixture/1/3/1 | Bin 190 -> 0 bytes fixture/1/3/10 | Bin 203 -> 0 bytes fixture/1/3/11 | Bin 63 -> 0 bytes fixture/1/3/2 | Bin 206 -> 0 bytes fixture/1/3/3 | Bin 181 -> 0 bytes fixture/1/3/4 | Bin 195 -> 0 bytes fixture/1/3/5 | Bin 198 -> 0 bytes fixture/1/3/6 | Bin 188 -> 0 bytes fixture/1/3/7 | Bin 207 -> 0 bytes fixture/1/3/8 | Bin 178 -> 0 bytes fixture/1/3/9 | Bin 194 -> 0 bytes fixture/1/4/.zarray | 19 ----- fixture/1/4/.zattrs | 1 - fixture/1/4/0 | Bin 142 -> 0 bytes fixture/1/4/1 | Bin 142 -> 0 bytes fixture/1/4/10 | Bin 145 -> 0 bytes fixture/1/4/11 | Bin 57 -> 0 bytes fixture/1/4/2 | Bin 145 -> 0 bytes fixture/1/4/3 | Bin 142 -> 0 bytes fixture/1/4/4 | Bin 142 -> 0 bytes fixture/1/4/5 | Bin 145 -> 0 bytes fixture/1/4/6 | Bin 142 -> 0 bytes fixture/1/4/7 | Bin 145 -> 0 bytes fixture/1/4/8 | Bin 142 -> 0 bytes fixture/1/4/9 | Bin 142 -> 0 bytes fixture/1/5/.zarray | 19 ----- fixture/1/5/.zattrs | 1 - fixture/1/5/0 | Bin 165 -> 0 bytes fixture/1/5/1 | Bin 190 -> 0 bytes fixture/1/5/10 | Bin 203 -> 0 bytes fixture/1/5/11 | Bin 63 -> 0 bytes fixture/1/5/2 | Bin 206 -> 0 bytes fixture/1/5/3 | Bin 181 -> 0 bytes fixture/1/5/4 | Bin 195 -> 0 bytes fixture/1/5/5 | Bin 198 -> 0 bytes fixture/1/5/6 | Bin 188 -> 0 bytes fixture/1/5/7 | Bin 207 -> 0 bytes fixture/1/5/8 | Bin 178 -> 0 bytes fixture/1/5/9 | Bin 194 -> 0 bytes fixture/1/6/.zarray | 19 ----- fixture/1/6/.zattrs | 1 - fixture/1/6/0 | Bin 216 -> 0 bytes fixture/1/6/1 | Bin 216 -> 0 bytes fixture/1/6/10 | Bin 216 -> 0 bytes fixture/1/6/11 | Bin 66 -> 0 bytes fixture/1/6/2 | Bin 216 -> 0 bytes fixture/1/6/3 | Bin 216 -> 0 bytes fixture/1/6/4 | Bin 216 -> 0 bytes fixture/1/6/5 | Bin 216 -> 0 bytes fixture/1/6/6 | Bin 216 -> 0 bytes fixture/1/6/7 | Bin 216 -> 0 bytes fixture/1/6/8 | Bin 216 -> 0 bytes fixture/1/6/9 | Bin 216 -> 0 bytes fixture/10/.zattrs | 1 - fixture/10/0/.zarray | 14 ---- fixture/10/0/.zattrs | 1 - fixture/10/0/0 | Bin 800 -> 0 bytes fixture/10/0/1 | Bin 800 -> 0 bytes fixture/10/0/10 | Bin 800 -> 0 bytes fixture/10/0/11 | Bin 800 -> 0 bytes fixture/10/0/12 | Bin 800 -> 0 bytes fixture/10/0/13 | Bin 800 -> 0 bytes fixture/10/0/14 | Bin 800 -> 0 bytes fixture/10/0/15 | Bin 800 -> 0 bytes fixture/10/0/16 | Bin 800 -> 0 bytes fixture/10/0/17 | 1 - fixture/10/0/18 | Bin 800 -> 0 bytes fixture/10/0/19 | Bin 800 -> 0 bytes fixture/10/0/2 | Bin 800 -> 0 bytes fixture/10/0/20 | Bin 800 -> 0 bytes fixture/10/0/21 | Bin 800 -> 0 bytes fixture/10/0/22 | Bin 800 -> 0 bytes fixture/10/0/23 | Bin 800 -> 0 bytes fixture/10/0/24 | Bin 800 -> 0 bytes fixture/10/0/25 | 2 - fixture/10/0/26 | Bin 800 -> 0 bytes fixture/10/0/27 | Bin 800 -> 0 bytes fixture/10/0/28 | Bin 800 -> 0 bytes fixture/10/0/29 | Bin 800 -> 0 bytes fixture/10/0/3 | Bin 800 -> 0 bytes fixture/10/0/30 | Bin 800 -> 0 bytes fixture/10/0/31 | Bin 800 -> 0 bytes fixture/10/0/32 | Bin 800 -> 0 bytes fixture/10/0/33 | Bin 800 -> 0 bytes fixture/10/0/4 | Bin 800 -> 0 bytes fixture/10/0/5 | Bin 800 -> 0 bytes fixture/10/0/6 | Bin 800 -> 0 bytes fixture/10/0/7 | Bin 800 -> 0 bytes fixture/10/0/8 | Bin 800 -> 0 bytes fixture/10/0/9 | Bin 800 -> 0 bytes fixture/10/1/.zarray | 17 ---- fixture/10/1/.zattrs | 1 - fixture/10/1/0 | 1 - fixture/10/1/1 | Bin 735 -> 0 bytes fixture/10/1/10 | Bin 709 -> 0 bytes fixture/10/1/11 | Bin 703 -> 0 bytes fixture/10/1/12 | Bin 700 -> 0 bytes fixture/10/1/13 | Bin 700 -> 0 bytes fixture/10/1/14 | 3 - fixture/10/1/15 | 1 - fixture/10/1/16 | Bin 709 -> 0 bytes fixture/10/1/17 | Bin 705 -> 0 bytes fixture/10/1/18 | Bin 701 -> 0 bytes fixture/10/1/19 | Bin 706 -> 0 bytes fixture/10/1/2 | Bin 736 -> 0 bytes fixture/10/1/20 | Bin 706 -> 0 bytes fixture/10/1/21 | Bin 706 -> 0 bytes fixture/10/1/22 | Bin 701 -> 0 bytes fixture/10/1/23 | Bin 701 -> 0 bytes fixture/10/1/24 | Bin 695 -> 0 bytes fixture/10/1/25 | Bin 699 -> 0 bytes fixture/10/1/26 | Bin 694 -> 0 bytes fixture/10/1/27 | Bin 696 -> 0 bytes fixture/10/1/28 | Bin 699 -> 0 bytes fixture/10/1/29 | Bin 701 -> 0 bytes fixture/10/1/3 | Bin 725 -> 0 bytes fixture/10/1/30 | Bin 706 -> 0 bytes fixture/10/1/31 | Bin 705 -> 0 bytes fixture/10/1/32 | Bin 704 -> 0 bytes fixture/10/1/33 | Bin 283 -> 0 bytes fixture/10/1/4 | Bin 722 -> 0 bytes fixture/10/1/5 | Bin 725 -> 0 bytes fixture/10/1/6 | Bin 712 -> 0 bytes fixture/10/1/7 | Bin 720 -> 0 bytes fixture/10/1/8 | Bin 714 -> 0 bytes fixture/10/1/9 | Bin 709 -> 0 bytes fixture/10/2/.zarray | 17 ---- fixture/10/2/.zattrs | 1 - fixture/10/2/0 | Bin 755 -> 0 bytes fixture/10/2/1 | Bin 963 -> 0 bytes fixture/10/2/10 | Bin 914 -> 0 bytes fixture/10/2/11 | Bin 946 -> 0 bytes fixture/10/2/12 | Bin 903 -> 0 bytes fixture/10/2/13 | Bin 903 -> 0 bytes fixture/10/2/14 | Bin 943 -> 0 bytes fixture/10/2/15 | Bin 942 -> 0 bytes fixture/10/2/16 | Bin 949 -> 0 bytes fixture/10/2/17 | Bin 899 -> 0 bytes fixture/10/2/18 | Bin 901 -> 0 bytes fixture/10/2/19 | Bin 953 -> 0 bytes fixture/10/2/2 | Bin 953 -> 0 bytes fixture/10/2/20 | Bin 868 -> 0 bytes fixture/10/2/21 | Bin 875 -> 0 bytes fixture/10/2/22 | Bin 869 -> 0 bytes fixture/10/2/23 | Bin 891 -> 0 bytes fixture/10/2/24 | Bin 857 -> 0 bytes fixture/10/2/25 | Bin 823 -> 0 bytes fixture/10/2/26 | Bin 857 -> 0 bytes fixture/10/2/27 | Bin 900 -> 0 bytes fixture/10/2/28 | Bin 936 -> 0 bytes fixture/10/2/29 | Bin 852 -> 0 bytes fixture/10/2/3 | Bin 961 -> 0 bytes fixture/10/2/30 | Bin 943 -> 0 bytes fixture/10/2/31 | Bin 916 -> 0 bytes fixture/10/2/32 | Bin 874 -> 0 bytes fixture/10/2/33 | Bin 374 -> 0 bytes fixture/10/2/4 | Bin 915 -> 0 bytes fixture/10/2/5 | Bin 947 -> 0 bytes fixture/10/2/6 | Bin 932 -> 0 bytes fixture/10/2/7 | Bin 954 -> 0 bytes fixture/10/2/8 | Bin 926 -> 0 bytes fixture/10/2/9 | Bin 952 -> 0 bytes fixture/10/3/.zarray | 19 ----- fixture/10/3/.zattrs | 1 - fixture/10/3/0 | Bin 621 -> 0 bytes fixture/10/3/1 | Bin 816 -> 0 bytes fixture/10/3/10 | Bin 808 -> 0 bytes fixture/10/3/11 | Bin 816 -> 0 bytes fixture/10/3/12 | Bin 816 -> 0 bytes fixture/10/3/13 | Bin 806 -> 0 bytes fixture/10/3/14 | Bin 808 -> 0 bytes fixture/10/3/15 | Bin 805 -> 0 bytes fixture/10/3/16 | Bin 816 -> 0 bytes fixture/10/3/17 | Bin 802 -> 0 bytes fixture/10/3/18 | Bin 797 -> 0 bytes fixture/10/3/19 | Bin 798 -> 0 bytes fixture/10/3/2 | Bin 816 -> 0 bytes fixture/10/3/20 | Bin 795 -> 0 bytes fixture/10/3/21 | Bin 794 -> 0 bytes fixture/10/3/22 | Bin 793 -> 0 bytes fixture/10/3/23 | Bin 789 -> 0 bytes fixture/10/3/24 | Bin 786 -> 0 bytes fixture/10/3/25 | Bin 787 -> 0 bytes fixture/10/3/26 | Bin 787 -> 0 bytes fixture/10/3/27 | Bin 792 -> 0 bytes fixture/10/3/28 | Bin 792 -> 0 bytes fixture/10/3/29 | Bin 795 -> 0 bytes fixture/10/3/3 | Bin 816 -> 0 bytes fixture/10/3/30 | Bin 797 -> 0 bytes fixture/10/3/31 | Bin 802 -> 0 bytes fixture/10/3/32 | Bin 805 -> 0 bytes fixture/10/3/33 | Bin 305 -> 0 bytes fixture/10/3/4 | Bin 816 -> 0 bytes fixture/10/3/5 | Bin 816 -> 0 bytes fixture/10/3/6 | Bin 816 -> 0 bytes fixture/10/3/7 | Bin 816 -> 0 bytes fixture/10/3/8 | Bin 816 -> 0 bytes fixture/10/3/9 | Bin 804 -> 0 bytes fixture/10/4/.zarray | 19 ----- fixture/10/4/.zattrs | 1 - fixture/10/4/0 | Bin 744 -> 0 bytes fixture/10/4/1 | Bin 682 -> 0 bytes fixture/10/4/10 | Bin 653 -> 0 bytes fixture/10/4/11 | Bin 653 -> 0 bytes fixture/10/4/12 | Bin 630 -> 0 bytes fixture/10/4/13 | Bin 651 -> 0 bytes fixture/10/4/14 | Bin 632 -> 0 bytes fixture/10/4/15 | Bin 653 -> 0 bytes fixture/10/4/16 | Bin 648 -> 0 bytes fixture/10/4/17 | Bin 651 -> 0 bytes fixture/10/4/18 | Bin 651 -> 0 bytes fixture/10/4/19 | Bin 651 -> 0 bytes fixture/10/4/2 | Bin 664 -> 0 bytes fixture/10/4/20 | Bin 651 -> 0 bytes fixture/10/4/21 | Bin 651 -> 0 bytes fixture/10/4/22 | Bin 651 -> 0 bytes fixture/10/4/23 | Bin 651 -> 0 bytes fixture/10/4/24 | Bin 650 -> 0 bytes fixture/10/4/25 | Bin 649 -> 0 bytes fixture/10/4/26 | Bin 652 -> 0 bytes fixture/10/4/27 | Bin 651 -> 0 bytes fixture/10/4/28 | Bin 651 -> 0 bytes fixture/10/4/29 | Bin 651 -> 0 bytes fixture/10/4/3 | Bin 664 -> 0 bytes fixture/10/4/30 | Bin 651 -> 0 bytes fixture/10/4/31 | Bin 651 -> 0 bytes fixture/10/4/32 | Bin 651 -> 0 bytes fixture/10/4/33 | Bin 262 -> 0 bytes fixture/10/4/4 | Bin 660 -> 0 bytes fixture/10/4/5 | Bin 656 -> 0 bytes fixture/10/4/6 | Bin 655 -> 0 bytes fixture/10/4/7 | Bin 658 -> 0 bytes fixture/10/4/8 | Bin 655 -> 0 bytes fixture/10/4/9 | Bin 631 -> 0 bytes fixture/10/5/.zarray | 19 ----- fixture/10/5/.zattrs | 1 - fixture/10/5/0 | Bin 621 -> 0 bytes fixture/10/5/1 | Bin 816 -> 0 bytes fixture/10/5/10 | Bin 808 -> 0 bytes fixture/10/5/11 | Bin 816 -> 0 bytes fixture/10/5/12 | Bin 816 -> 0 bytes fixture/10/5/13 | Bin 806 -> 0 bytes fixture/10/5/14 | Bin 808 -> 0 bytes fixture/10/5/15 | Bin 805 -> 0 bytes fixture/10/5/16 | Bin 816 -> 0 bytes fixture/10/5/17 | Bin 802 -> 0 bytes fixture/10/5/18 | Bin 797 -> 0 bytes fixture/10/5/19 | Bin 798 -> 0 bytes fixture/10/5/2 | Bin 816 -> 0 bytes fixture/10/5/20 | Bin 795 -> 0 bytes fixture/10/5/21 | Bin 794 -> 0 bytes fixture/10/5/22 | Bin 793 -> 0 bytes fixture/10/5/23 | Bin 789 -> 0 bytes fixture/10/5/24 | Bin 786 -> 0 bytes fixture/10/5/25 | Bin 787 -> 0 bytes fixture/10/5/26 | Bin 787 -> 0 bytes fixture/10/5/27 | Bin 792 -> 0 bytes fixture/10/5/28 | Bin 792 -> 0 bytes fixture/10/5/29 | Bin 795 -> 0 bytes fixture/10/5/3 | Bin 816 -> 0 bytes fixture/10/5/30 | Bin 797 -> 0 bytes fixture/10/5/31 | Bin 802 -> 0 bytes fixture/10/5/32 | Bin 805 -> 0 bytes fixture/10/5/33 | Bin 305 -> 0 bytes fixture/10/5/4 | Bin 816 -> 0 bytes fixture/10/5/5 | Bin 816 -> 0 bytes fixture/10/5/6 | Bin 816 -> 0 bytes fixture/10/5/7 | Bin 816 -> 0 bytes fixture/10/5/8 | Bin 816 -> 0 bytes fixture/10/5/9 | Bin 804 -> 0 bytes fixture/10/6/.zarray | 19 ----- fixture/10/6/.zattrs | 1 - fixture/10/6/0 | Bin 800 -> 0 bytes fixture/10/6/1 | Bin 816 -> 0 bytes fixture/10/6/10 | Bin 816 -> 0 bytes fixture/10/6/11 | Bin 816 -> 0 bytes fixture/10/6/12 | Bin 816 -> 0 bytes fixture/10/6/13 | Bin 816 -> 0 bytes fixture/10/6/14 | Bin 816 -> 0 bytes fixture/10/6/15 | Bin 816 -> 0 bytes fixture/10/6/16 | Bin 816 -> 0 bytes fixture/10/6/17 | Bin 816 -> 0 bytes fixture/10/6/18 | Bin 816 -> 0 bytes fixture/10/6/19 | Bin 816 -> 0 bytes fixture/10/6/2 | Bin 816 -> 0 bytes fixture/10/6/20 | Bin 816 -> 0 bytes fixture/10/6/21 | Bin 816 -> 0 bytes fixture/10/6/22 | Bin 816 -> 0 bytes fixture/10/6/23 | Bin 816 -> 0 bytes fixture/10/6/24 | Bin 816 -> 0 bytes fixture/10/6/25 | Bin 816 -> 0 bytes fixture/10/6/26 | Bin 816 -> 0 bytes fixture/10/6/27 | Bin 816 -> 0 bytes fixture/10/6/28 | Bin 816 -> 0 bytes fixture/10/6/29 | Bin 816 -> 0 bytes fixture/10/6/3 | Bin 816 -> 0 bytes fixture/10/6/30 | Bin 816 -> 0 bytes fixture/10/6/31 | Bin 816 -> 0 bytes fixture/10/6/32 | Bin 816 -> 0 bytes fixture/10/6/33 | Bin 310 -> 0 bytes fixture/10/6/4 | Bin 816 -> 0 bytes fixture/10/6/5 | Bin 816 -> 0 bytes fixture/10/6/6 | Bin 816 -> 0 bytes fixture/10/6/7 | Bin 816 -> 0 bytes fixture/10/6/8 | Bin 816 -> 0 bytes fixture/10/6/9 | Bin 816 -> 0 bytes fixture/11/.zattrs | 1 - fixture/11/0/.zarray | 14 ---- fixture/11/0/.zattrs | 1 - fixture/11/0/0 | 1 - fixture/11/0/1 | 3 - fixture/11/0/10 | Bin 200 -> 0 bytes fixture/11/0/11 | 2 - fixture/11/0/12 | 1 - fixture/11/0/13 | 2 - fixture/11/0/14 | 1 - fixture/11/0/15 | Bin 200 -> 0 bytes fixture/11/0/16 | Bin 200 -> 0 bytes fixture/11/0/17 | 2 - fixture/11/0/18 | Bin 200 -> 0 bytes fixture/11/0/19 | 2 - fixture/11/0/2 | Bin 200 -> 0 bytes fixture/11/0/20 | 1 - fixture/11/0/21 | 1 - fixture/11/0/22 | 1 - fixture/11/0/23 | Bin 200 -> 0 bytes fixture/11/0/24 | 1 - fixture/11/0/25 | Bin 200 -> 0 bytes fixture/11/0/26 | 1 - fixture/11/0/27 | Bin 200 -> 0 bytes fixture/11/0/28 | Bin 200 -> 0 bytes fixture/11/0/29 | 1 - fixture/11/0/3 | 1 - fixture/11/0/30 | 1 - fixture/11/0/31 | Bin 200 -> 0 bytes fixture/11/0/32 | 3 - fixture/11/0/33 | Bin 200 -> 0 bytes fixture/11/0/34 | Bin 200 -> 0 bytes fixture/11/0/35 | 2 - fixture/11/0/36 | 1 - fixture/11/0/37 | 1 - fixture/11/0/38 | 1 - fixture/11/0/39 | 1 - fixture/11/0/4 | 1 - fixture/11/0/40 | 1 - fixture/11/0/41 | Bin 200 -> 0 bytes fixture/11/0/42 | 1 - fixture/11/0/43 | Bin 200 -> 0 bytes fixture/11/0/44 | Bin 200 -> 0 bytes fixture/11/0/5 | Bin 200 -> 0 bytes fixture/11/0/6 | Bin 200 -> 0 bytes fixture/11/0/7 | 2 - fixture/11/0/8 | 1 - fixture/11/0/9 | 1 - fixture/11/1/.zarray | 17 ---- fixture/11/1/.zattrs | 1 - fixture/11/1/0 | Bin 211 -> 0 bytes fixture/11/1/1 | Bin 211 -> 0 bytes fixture/11/1/10 | Bin 211 -> 0 bytes fixture/11/1/11 | Bin 211 -> 0 bytes fixture/11/1/12 | Bin 211 -> 0 bytes fixture/11/1/13 | Bin 211 -> 0 bytes fixture/11/1/14 | Bin 211 -> 0 bytes fixture/11/1/15 | Bin 211 -> 0 bytes fixture/11/1/16 | Bin 211 -> 0 bytes fixture/11/1/17 | Bin 211 -> 0 bytes fixture/11/1/18 | Bin 211 -> 0 bytes fixture/11/1/19 | Bin 211 -> 0 bytes fixture/11/1/2 | Bin 211 -> 0 bytes fixture/11/1/20 | Bin 211 -> 0 bytes fixture/11/1/21 | Bin 211 -> 0 bytes fixture/11/1/22 | Bin 211 -> 0 bytes fixture/11/1/23 | Bin 211 -> 0 bytes fixture/11/1/24 | Bin 211 -> 0 bytes fixture/11/1/25 | Bin 211 -> 0 bytes fixture/11/1/26 | Bin 211 -> 0 bytes fixture/11/1/27 | Bin 211 -> 0 bytes fixture/11/1/28 | Bin 211 -> 0 bytes fixture/11/1/29 | Bin 211 -> 0 bytes fixture/11/1/3 | Bin 211 -> 0 bytes fixture/11/1/30 | Bin 211 -> 0 bytes fixture/11/1/31 | Bin 211 -> 0 bytes fixture/11/1/32 | Bin 211 -> 0 bytes fixture/11/1/33 | Bin 211 -> 0 bytes fixture/11/1/34 | Bin 211 -> 0 bytes fixture/11/1/35 | Bin 211 -> 0 bytes fixture/11/1/36 | Bin 211 -> 0 bytes fixture/11/1/37 | Bin 211 -> 0 bytes fixture/11/1/38 | Bin 211 -> 0 bytes fixture/11/1/39 | Bin 211 -> 0 bytes fixture/11/1/4 | Bin 211 -> 0 bytes fixture/11/1/40 | Bin 211 -> 0 bytes fixture/11/1/41 | Bin 211 -> 0 bytes fixture/11/1/42 | Bin 211 -> 0 bytes fixture/11/1/43 | Bin 211 -> 0 bytes fixture/11/1/44 | Bin 105 -> 0 bytes fixture/11/1/5 | Bin 211 -> 0 bytes fixture/11/1/6 | Bin 211 -> 0 bytes fixture/11/1/7 | Bin 211 -> 0 bytes fixture/11/1/8 | Bin 211 -> 0 bytes fixture/11/1/9 | Bin 211 -> 0 bytes fixture/11/2/.zarray | 17 ---- fixture/11/2/.zattrs | 1 - fixture/11/2/0 | Bin 287 -> 0 bytes fixture/11/2/1 | Bin 295 -> 0 bytes fixture/11/2/10 | Bin 290 -> 0 bytes fixture/11/2/11 | Bin 288 -> 0 bytes fixture/11/2/12 | Bin 297 -> 0 bytes fixture/11/2/13 | Bin 290 -> 0 bytes fixture/11/2/14 | Bin 287 -> 0 bytes fixture/11/2/15 | Bin 294 -> 0 bytes fixture/11/2/16 | Bin 288 -> 0 bytes fixture/11/2/17 | Bin 289 -> 0 bytes fixture/11/2/18 | Bin 301 -> 0 bytes fixture/11/2/19 | Bin 294 -> 0 bytes fixture/11/2/2 | Bin 288 -> 0 bytes fixture/11/2/20 | Bin 295 -> 0 bytes fixture/11/2/21 | Bin 288 -> 0 bytes fixture/11/2/22 | Bin 299 -> 0 bytes fixture/11/2/23 | Bin 290 -> 0 bytes fixture/11/2/24 | Bin 302 -> 0 bytes fixture/11/2/25 | Bin 296 -> 0 bytes fixture/11/2/26 | Bin 289 -> 0 bytes fixture/11/2/27 | Bin 290 -> 0 bytes fixture/11/2/28 | Bin 291 -> 0 bytes fixture/11/2/29 | Bin 290 -> 0 bytes fixture/11/2/3 | Bin 292 -> 0 bytes fixture/11/2/30 | Bin 289 -> 0 bytes fixture/11/2/31 | Bin 295 -> 0 bytes fixture/11/2/32 | Bin 288 -> 0 bytes fixture/11/2/33 | Bin 290 -> 0 bytes fixture/11/2/34 | Bin 291 -> 0 bytes fixture/11/2/35 | Bin 292 -> 0 bytes fixture/11/2/36 | Bin 291 -> 0 bytes fixture/11/2/37 | Bin 291 -> 0 bytes fixture/11/2/38 | Bin 289 -> 0 bytes fixture/11/2/39 | Bin 289 -> 0 bytes fixture/11/2/4 | Bin 301 -> 0 bytes fixture/11/2/40 | Bin 289 -> 0 bytes fixture/11/2/41 | Bin 291 -> 0 bytes fixture/11/2/42 | Bin 288 -> 0 bytes fixture/11/2/43 | Bin 290 -> 0 bytes fixture/11/2/44 | Bin 156 -> 0 bytes fixture/11/2/5 | Bin 305 -> 0 bytes fixture/11/2/6 | Bin 287 -> 0 bytes fixture/11/2/7 | Bin 290 -> 0 bytes fixture/11/2/8 | Bin 288 -> 0 bytes fixture/11/2/9 | Bin 291 -> 0 bytes fixture/11/3/.zarray | 19 ----- fixture/11/3/.zattrs | 1 - fixture/11/3/0 | Bin 216 -> 0 bytes fixture/11/3/1 | Bin 216 -> 0 bytes fixture/11/3/10 | Bin 216 -> 0 bytes fixture/11/3/11 | Bin 216 -> 0 bytes fixture/11/3/12 | Bin 216 -> 0 bytes fixture/11/3/13 | Bin 216 -> 0 bytes fixture/11/3/14 | Bin 216 -> 0 bytes fixture/11/3/15 | Bin 216 -> 0 bytes fixture/11/3/16 | Bin 216 -> 0 bytes fixture/11/3/17 | Bin 216 -> 0 bytes fixture/11/3/18 | Bin 216 -> 0 bytes fixture/11/3/19 | Bin 216 -> 0 bytes fixture/11/3/2 | Bin 216 -> 0 bytes fixture/11/3/20 | Bin 216 -> 0 bytes fixture/11/3/21 | Bin 216 -> 0 bytes fixture/11/3/22 | Bin 216 -> 0 bytes fixture/11/3/23 | Bin 216 -> 0 bytes fixture/11/3/24 | Bin 216 -> 0 bytes fixture/11/3/25 | Bin 216 -> 0 bytes fixture/11/3/26 | Bin 216 -> 0 bytes fixture/11/3/27 | Bin 216 -> 0 bytes fixture/11/3/28 | Bin 216 -> 0 bytes fixture/11/3/29 | Bin 216 -> 0 bytes fixture/11/3/3 | Bin 216 -> 0 bytes fixture/11/3/30 | Bin 216 -> 0 bytes fixture/11/3/31 | Bin 216 -> 0 bytes fixture/11/3/32 | Bin 216 -> 0 bytes fixture/11/3/33 | Bin 216 -> 0 bytes fixture/11/3/34 | Bin 216 -> 0 bytes fixture/11/3/35 | Bin 216 -> 0 bytes fixture/11/3/36 | Bin 216 -> 0 bytes fixture/11/3/37 | Bin 216 -> 0 bytes fixture/11/3/38 | Bin 216 -> 0 bytes fixture/11/3/39 | Bin 216 -> 0 bytes fixture/11/3/4 | Bin 216 -> 0 bytes fixture/11/3/40 | Bin 216 -> 0 bytes fixture/11/3/41 | Bin 216 -> 0 bytes fixture/11/3/42 | Bin 216 -> 0 bytes fixture/11/3/43 | Bin 216 -> 0 bytes fixture/11/3/44 | Bin 130 -> 0 bytes fixture/11/3/5 | Bin 216 -> 0 bytes fixture/11/3/6 | Bin 216 -> 0 bytes fixture/11/3/7 | Bin 216 -> 0 bytes fixture/11/3/8 | Bin 216 -> 0 bytes fixture/11/3/9 | Bin 216 -> 0 bytes fixture/11/4/.zarray | 19 ----- fixture/11/4/.zattrs | 1 - fixture/11/4/0 | Bin 216 -> 0 bytes fixture/11/4/1 | Bin 216 -> 0 bytes fixture/11/4/10 | Bin 216 -> 0 bytes fixture/11/4/11 | Bin 216 -> 0 bytes fixture/11/4/12 | Bin 216 -> 0 bytes fixture/11/4/13 | Bin 216 -> 0 bytes fixture/11/4/14 | Bin 216 -> 0 bytes fixture/11/4/15 | Bin 216 -> 0 bytes fixture/11/4/16 | Bin 216 -> 0 bytes fixture/11/4/17 | Bin 216 -> 0 bytes fixture/11/4/18 | Bin 216 -> 0 bytes fixture/11/4/19 | Bin 216 -> 0 bytes fixture/11/4/2 | Bin 216 -> 0 bytes fixture/11/4/20 | Bin 216 -> 0 bytes fixture/11/4/21 | Bin 216 -> 0 bytes fixture/11/4/22 | Bin 216 -> 0 bytes fixture/11/4/23 | Bin 216 -> 0 bytes fixture/11/4/24 | Bin 216 -> 0 bytes fixture/11/4/25 | Bin 216 -> 0 bytes fixture/11/4/26 | Bin 216 -> 0 bytes fixture/11/4/27 | Bin 216 -> 0 bytes fixture/11/4/28 | Bin 216 -> 0 bytes fixture/11/4/29 | Bin 216 -> 0 bytes fixture/11/4/3 | Bin 216 -> 0 bytes fixture/11/4/30 | Bin 216 -> 0 bytes fixture/11/4/31 | Bin 216 -> 0 bytes fixture/11/4/32 | Bin 216 -> 0 bytes fixture/11/4/33 | Bin 216 -> 0 bytes fixture/11/4/34 | Bin 216 -> 0 bytes fixture/11/4/35 | Bin 216 -> 0 bytes fixture/11/4/36 | Bin 216 -> 0 bytes fixture/11/4/37 | Bin 216 -> 0 bytes fixture/11/4/38 | Bin 216 -> 0 bytes fixture/11/4/39 | Bin 216 -> 0 bytes fixture/11/4/4 | Bin 216 -> 0 bytes fixture/11/4/40 | Bin 216 -> 0 bytes fixture/11/4/41 | Bin 216 -> 0 bytes fixture/11/4/42 | Bin 216 -> 0 bytes fixture/11/4/43 | Bin 216 -> 0 bytes fixture/11/4/44 | Bin 133 -> 0 bytes fixture/11/4/5 | Bin 216 -> 0 bytes fixture/11/4/6 | Bin 216 -> 0 bytes fixture/11/4/7 | Bin 216 -> 0 bytes fixture/11/4/8 | Bin 216 -> 0 bytes fixture/11/4/9 | Bin 216 -> 0 bytes fixture/11/5/.zarray | 19 ----- fixture/11/5/.zattrs | 1 - fixture/11/5/0 | Bin 216 -> 0 bytes fixture/11/5/1 | Bin 216 -> 0 bytes fixture/11/5/10 | Bin 216 -> 0 bytes fixture/11/5/11 | Bin 216 -> 0 bytes fixture/11/5/12 | Bin 216 -> 0 bytes fixture/11/5/13 | Bin 216 -> 0 bytes fixture/11/5/14 | Bin 216 -> 0 bytes fixture/11/5/15 | Bin 216 -> 0 bytes fixture/11/5/16 | Bin 216 -> 0 bytes fixture/11/5/17 | Bin 216 -> 0 bytes fixture/11/5/18 | Bin 216 -> 0 bytes fixture/11/5/19 | Bin 216 -> 0 bytes fixture/11/5/2 | Bin 216 -> 0 bytes fixture/11/5/20 | Bin 216 -> 0 bytes fixture/11/5/21 | Bin 216 -> 0 bytes fixture/11/5/22 | Bin 216 -> 0 bytes fixture/11/5/23 | Bin 216 -> 0 bytes fixture/11/5/24 | Bin 216 -> 0 bytes fixture/11/5/25 | Bin 216 -> 0 bytes fixture/11/5/26 | Bin 216 -> 0 bytes fixture/11/5/27 | Bin 216 -> 0 bytes fixture/11/5/28 | Bin 216 -> 0 bytes fixture/11/5/29 | Bin 216 -> 0 bytes fixture/11/5/3 | Bin 216 -> 0 bytes fixture/11/5/30 | Bin 216 -> 0 bytes fixture/11/5/31 | Bin 216 -> 0 bytes fixture/11/5/32 | Bin 216 -> 0 bytes fixture/11/5/33 | Bin 216 -> 0 bytes fixture/11/5/34 | Bin 216 -> 0 bytes fixture/11/5/35 | Bin 216 -> 0 bytes fixture/11/5/36 | Bin 216 -> 0 bytes fixture/11/5/37 | Bin 216 -> 0 bytes fixture/11/5/38 | Bin 216 -> 0 bytes fixture/11/5/39 | Bin 216 -> 0 bytes fixture/11/5/4 | Bin 216 -> 0 bytes fixture/11/5/40 | Bin 216 -> 0 bytes fixture/11/5/41 | Bin 216 -> 0 bytes fixture/11/5/42 | Bin 216 -> 0 bytes fixture/11/5/43 | Bin 216 -> 0 bytes fixture/11/5/44 | Bin 130 -> 0 bytes fixture/11/5/5 | Bin 216 -> 0 bytes fixture/11/5/6 | Bin 216 -> 0 bytes fixture/11/5/7 | Bin 216 -> 0 bytes fixture/11/5/8 | Bin 216 -> 0 bytes fixture/11/5/9 | Bin 216 -> 0 bytes fixture/11/6/.zarray | 19 ----- fixture/11/6/.zattrs | 1 - fixture/11/6/0 | Bin 216 -> 0 bytes fixture/11/6/1 | Bin 216 -> 0 bytes fixture/11/6/10 | Bin 216 -> 0 bytes fixture/11/6/11 | Bin 216 -> 0 bytes fixture/11/6/12 | Bin 216 -> 0 bytes fixture/11/6/13 | Bin 216 -> 0 bytes fixture/11/6/14 | Bin 216 -> 0 bytes fixture/11/6/15 | Bin 216 -> 0 bytes fixture/11/6/16 | Bin 216 -> 0 bytes fixture/11/6/17 | Bin 216 -> 0 bytes fixture/11/6/18 | Bin 216 -> 0 bytes fixture/11/6/19 | Bin 216 -> 0 bytes fixture/11/6/2 | Bin 216 -> 0 bytes fixture/11/6/20 | Bin 216 -> 0 bytes fixture/11/6/21 | Bin 216 -> 0 bytes fixture/11/6/22 | Bin 216 -> 0 bytes fixture/11/6/23 | Bin 216 -> 0 bytes fixture/11/6/24 | Bin 216 -> 0 bytes fixture/11/6/25 | Bin 216 -> 0 bytes fixture/11/6/26 | Bin 216 -> 0 bytes fixture/11/6/27 | Bin 216 -> 0 bytes fixture/11/6/28 | Bin 216 -> 0 bytes fixture/11/6/29 | Bin 216 -> 0 bytes fixture/11/6/3 | Bin 216 -> 0 bytes fixture/11/6/30 | Bin 216 -> 0 bytes fixture/11/6/31 | Bin 216 -> 0 bytes fixture/11/6/32 | Bin 216 -> 0 bytes fixture/11/6/33 | Bin 216 -> 0 bytes fixture/11/6/34 | Bin 216 -> 0 bytes fixture/11/6/35 | Bin 216 -> 0 bytes fixture/11/6/36 | Bin 216 -> 0 bytes fixture/11/6/37 | Bin 216 -> 0 bytes fixture/11/6/38 | Bin 216 -> 0 bytes fixture/11/6/39 | Bin 216 -> 0 bytes fixture/11/6/4 | Bin 216 -> 0 bytes fixture/11/6/40 | Bin 216 -> 0 bytes fixture/11/6/41 | Bin 216 -> 0 bytes fixture/11/6/42 | Bin 216 -> 0 bytes fixture/11/6/43 | Bin 216 -> 0 bytes fixture/11/6/44 | Bin 132 -> 0 bytes fixture/11/6/5 | Bin 216 -> 0 bytes fixture/11/6/6 | Bin 216 -> 0 bytes fixture/11/6/7 | Bin 216 -> 0 bytes fixture/11/6/8 | Bin 216 -> 0 bytes fixture/11/6/9 | Bin 216 -> 0 bytes fixture/12/.zattrs | 1 - fixture/12/0/.zarray | 14 ---- fixture/12/0/.zattrs | 1 - fixture/12/0/0 | 2 - fixture/12/0/1 | Bin 400 -> 0 bytes fixture/12/0/10 | Bin 400 -> 0 bytes fixture/12/0/11 | Bin 400 -> 0 bytes fixture/12/0/12 | Bin 400 -> 0 bytes fixture/12/0/13 | Bin 400 -> 0 bytes fixture/12/0/14 | Bin 400 -> 0 bytes fixture/12/0/15 | Bin 400 -> 0 bytes fixture/12/0/16 | 3 - fixture/12/0/17 | 1 - fixture/12/0/18 | 3 - fixture/12/0/19 | 2 - fixture/12/0/2 | Bin 400 -> 0 bytes fixture/12/0/20 | Bin 400 -> 0 bytes fixture/12/0/21 | Bin 400 -> 0 bytes fixture/12/0/22 | 2 - fixture/12/0/23 | Bin 400 -> 0 bytes fixture/12/0/24 | Bin 400 -> 0 bytes fixture/12/0/25 | Bin 400 -> 0 bytes fixture/12/0/26 | Bin 400 -> 0 bytes fixture/12/0/27 | 2 - fixture/12/0/28 | Bin 400 -> 0 bytes fixture/12/0/29 | 3 - fixture/12/0/3 | Bin 400 -> 0 bytes fixture/12/0/30 | Bin 400 -> 0 bytes fixture/12/0/31 | Bin 400 -> 0 bytes fixture/12/0/32 | Bin 400 -> 0 bytes fixture/12/0/33 | 3 - fixture/12/0/34 | Bin 400 -> 0 bytes fixture/12/0/35 | 3 - fixture/12/0/36 | Bin 400 -> 0 bytes fixture/12/0/37 | Bin 400 -> 0 bytes fixture/12/0/38 | Bin 400 -> 0 bytes fixture/12/0/39 | 3 - fixture/12/0/4 | 3 - fixture/12/0/40 | Bin 400 -> 0 bytes fixture/12/0/41 | 3 - fixture/12/0/42 | Bin 400 -> 0 bytes fixture/12/0/43 | Bin 400 -> 0 bytes fixture/12/0/44 | Bin 400 -> 0 bytes fixture/12/0/5 | Bin 400 -> 0 bytes fixture/12/0/6 | Bin 400 -> 0 bytes fixture/12/0/7 | Bin 400 -> 0 bytes fixture/12/0/8 | Bin 400 -> 0 bytes fixture/12/0/9 | Bin 400 -> 0 bytes fixture/12/1/.zarray | 17 ---- fixture/12/1/.zattrs | 1 - fixture/12/1/0 | 2 - fixture/12/1/1 | Bin 411 -> 0 bytes fixture/12/1/10 | Bin 411 -> 0 bytes fixture/12/1/11 | Bin 411 -> 0 bytes fixture/12/1/12 | Bin 411 -> 0 bytes fixture/12/1/13 | Bin 411 -> 0 bytes fixture/12/1/14 | Bin 411 -> 0 bytes fixture/12/1/15 | Bin 411 -> 0 bytes fixture/12/1/16 | 3 - fixture/12/1/17 | 1 - fixture/12/1/18 | 3 - fixture/12/1/19 | 2 - fixture/12/1/2 | Bin 411 -> 0 bytes fixture/12/1/20 | Bin 411 -> 0 bytes fixture/12/1/21 | Bin 411 -> 0 bytes fixture/12/1/22 | 2 - fixture/12/1/23 | Bin 411 -> 0 bytes fixture/12/1/24 | Bin 411 -> 0 bytes fixture/12/1/25 | Bin 411 -> 0 bytes fixture/12/1/26 | Bin 411 -> 0 bytes fixture/12/1/27 | Bin 411 -> 0 bytes fixture/12/1/28 | Bin 411 -> 0 bytes fixture/12/1/29 | 3 - fixture/12/1/3 | Bin 411 -> 0 bytes fixture/12/1/30 | Bin 411 -> 0 bytes fixture/12/1/31 | Bin 411 -> 0 bytes fixture/12/1/32 | Bin 411 -> 0 bytes fixture/12/1/33 | 3 - fixture/12/1/34 | Bin 411 -> 0 bytes fixture/12/1/35 | 3 - fixture/12/1/36 | Bin 411 -> 0 bytes fixture/12/1/37 | Bin 411 -> 0 bytes fixture/12/1/38 | Bin 411 -> 0 bytes fixture/12/1/39 | 3 - fixture/12/1/4 | 3 - fixture/12/1/40 | Bin 411 -> 0 bytes fixture/12/1/41 | 3 - fixture/12/1/42 | Bin 411 -> 0 bytes fixture/12/1/43 | Bin 411 -> 0 bytes fixture/12/1/44 | Bin 198 -> 0 bytes fixture/12/1/5 | Bin 411 -> 0 bytes fixture/12/1/6 | Bin 411 -> 0 bytes fixture/12/1/7 | Bin 411 -> 0 bytes fixture/12/1/8 | Bin 411 -> 0 bytes fixture/12/1/9 | Bin 411 -> 0 bytes fixture/12/2/.zarray | 17 ---- fixture/12/2/.zattrs | 1 - fixture/12/2/0 | Bin 557 -> 0 bytes fixture/12/2/1 | Bin 548 -> 0 bytes fixture/12/2/10 | Bin 550 -> 0 bytes fixture/12/2/11 | Bin 548 -> 0 bytes fixture/12/2/12 | Bin 546 -> 0 bytes fixture/12/2/13 | Bin 551 -> 0 bytes fixture/12/2/14 | Bin 552 -> 0 bytes fixture/12/2/15 | Bin 554 -> 0 bytes fixture/12/2/16 | Bin 547 -> 0 bytes fixture/12/2/17 | Bin 552 -> 0 bytes fixture/12/2/18 | Bin 547 -> 0 bytes fixture/12/2/19 | Bin 552 -> 0 bytes fixture/12/2/2 | Bin 555 -> 0 bytes fixture/12/2/20 | Bin 546 -> 0 bytes fixture/12/2/21 | Bin 556 -> 0 bytes fixture/12/2/22 | Bin 541 -> 0 bytes fixture/12/2/23 | Bin 550 -> 0 bytes fixture/12/2/24 | Bin 540 -> 0 bytes fixture/12/2/25 | Bin 556 -> 0 bytes fixture/12/2/26 | Bin 544 -> 0 bytes fixture/12/2/27 | Bin 552 -> 0 bytes fixture/12/2/28 | Bin 550 -> 0 bytes fixture/12/2/29 | Bin 543 -> 0 bytes fixture/12/2/3 | Bin 552 -> 0 bytes fixture/12/2/30 | Bin 547 -> 0 bytes fixture/12/2/31 | Bin 545 -> 0 bytes fixture/12/2/32 | Bin 558 -> 0 bytes fixture/12/2/33 | Bin 540 -> 0 bytes fixture/12/2/34 | Bin 552 -> 0 bytes fixture/12/2/35 | Bin 554 -> 0 bytes fixture/12/2/36 | Bin 551 -> 0 bytes fixture/12/2/37 | Bin 551 -> 0 bytes fixture/12/2/38 | Bin 552 -> 0 bytes fixture/12/2/39 | Bin 549 -> 0 bytes fixture/12/2/4 | Bin 551 -> 0 bytes fixture/12/2/40 | Bin 551 -> 0 bytes fixture/12/2/41 | Bin 550 -> 0 bytes fixture/12/2/42 | Bin 549 -> 0 bytes fixture/12/2/43 | Bin 548 -> 0 bytes fixture/12/2/44 | Bin 253 -> 0 bytes fixture/12/2/5 | Bin 552 -> 0 bytes fixture/12/2/6 | Bin 547 -> 0 bytes fixture/12/2/7 | Bin 550 -> 0 bytes fixture/12/2/8 | Bin 554 -> 0 bytes fixture/12/2/9 | Bin 552 -> 0 bytes fixture/12/3/.zarray | 19 ----- fixture/12/3/.zattrs | 1 - fixture/12/3/0 | Bin 416 -> 0 bytes fixture/12/3/1 | Bin 416 -> 0 bytes fixture/12/3/10 | Bin 416 -> 0 bytes fixture/12/3/11 | Bin 416 -> 0 bytes fixture/12/3/12 | Bin 416 -> 0 bytes fixture/12/3/13 | Bin 416 -> 0 bytes fixture/12/3/14 | Bin 416 -> 0 bytes fixture/12/3/15 | Bin 416 -> 0 bytes fixture/12/3/16 | Bin 416 -> 0 bytes fixture/12/3/17 | Bin 416 -> 0 bytes fixture/12/3/18 | Bin 416 -> 0 bytes fixture/12/3/19 | Bin 416 -> 0 bytes fixture/12/3/2 | Bin 416 -> 0 bytes fixture/12/3/20 | Bin 416 -> 0 bytes fixture/12/3/21 | Bin 416 -> 0 bytes fixture/12/3/22 | Bin 416 -> 0 bytes fixture/12/3/23 | Bin 416 -> 0 bytes fixture/12/3/24 | Bin 416 -> 0 bytes fixture/12/3/25 | Bin 416 -> 0 bytes fixture/12/3/26 | Bin 416 -> 0 bytes fixture/12/3/27 | Bin 416 -> 0 bytes fixture/12/3/28 | Bin 416 -> 0 bytes fixture/12/3/29 | Bin 416 -> 0 bytes fixture/12/3/3 | Bin 416 -> 0 bytes fixture/12/3/30 | Bin 416 -> 0 bytes fixture/12/3/31 | Bin 416 -> 0 bytes fixture/12/3/32 | Bin 416 -> 0 bytes fixture/12/3/33 | Bin 416 -> 0 bytes fixture/12/3/34 | Bin 416 -> 0 bytes fixture/12/3/35 | Bin 416 -> 0 bytes fixture/12/3/36 | Bin 416 -> 0 bytes fixture/12/3/37 | Bin 416 -> 0 bytes fixture/12/3/38 | Bin 416 -> 0 bytes fixture/12/3/39 | Bin 416 -> 0 bytes fixture/12/3/4 | Bin 416 -> 0 bytes fixture/12/3/40 | Bin 416 -> 0 bytes fixture/12/3/41 | Bin 416 -> 0 bytes fixture/12/3/42 | Bin 416 -> 0 bytes fixture/12/3/43 | Bin 416 -> 0 bytes fixture/12/3/44 | Bin 219 -> 0 bytes fixture/12/3/5 | Bin 416 -> 0 bytes fixture/12/3/6 | Bin 416 -> 0 bytes fixture/12/3/7 | Bin 416 -> 0 bytes fixture/12/3/8 | Bin 416 -> 0 bytes fixture/12/3/9 | Bin 416 -> 0 bytes fixture/12/4/.zarray | 19 ----- fixture/12/4/.zattrs | 1 - fixture/12/4/0 | Bin 416 -> 0 bytes fixture/12/4/1 | Bin 416 -> 0 bytes fixture/12/4/10 | Bin 416 -> 0 bytes fixture/12/4/11 | Bin 416 -> 0 bytes fixture/12/4/12 | Bin 416 -> 0 bytes fixture/12/4/13 | Bin 416 -> 0 bytes fixture/12/4/14 | Bin 416 -> 0 bytes fixture/12/4/15 | Bin 416 -> 0 bytes fixture/12/4/16 | Bin 416 -> 0 bytes fixture/12/4/17 | Bin 416 -> 0 bytes fixture/12/4/18 | Bin 416 -> 0 bytes fixture/12/4/19 | Bin 416 -> 0 bytes fixture/12/4/2 | Bin 416 -> 0 bytes fixture/12/4/20 | Bin 416 -> 0 bytes fixture/12/4/21 | Bin 416 -> 0 bytes fixture/12/4/22 | Bin 416 -> 0 bytes fixture/12/4/23 | Bin 416 -> 0 bytes fixture/12/4/24 | Bin 416 -> 0 bytes fixture/12/4/25 | Bin 416 -> 0 bytes fixture/12/4/26 | Bin 416 -> 0 bytes fixture/12/4/27 | Bin 416 -> 0 bytes fixture/12/4/28 | Bin 416 -> 0 bytes fixture/12/4/29 | Bin 416 -> 0 bytes fixture/12/4/3 | Bin 416 -> 0 bytes fixture/12/4/30 | Bin 416 -> 0 bytes fixture/12/4/31 | Bin 416 -> 0 bytes fixture/12/4/32 | Bin 416 -> 0 bytes fixture/12/4/33 | Bin 416 -> 0 bytes fixture/12/4/34 | Bin 416 -> 0 bytes fixture/12/4/35 | Bin 416 -> 0 bytes fixture/12/4/36 | Bin 416 -> 0 bytes fixture/12/4/37 | Bin 416 -> 0 bytes fixture/12/4/38 | Bin 416 -> 0 bytes fixture/12/4/39 | Bin 416 -> 0 bytes fixture/12/4/4 | Bin 416 -> 0 bytes fixture/12/4/40 | Bin 416 -> 0 bytes fixture/12/4/41 | Bin 416 -> 0 bytes fixture/12/4/42 | Bin 416 -> 0 bytes fixture/12/4/43 | Bin 416 -> 0 bytes fixture/12/4/44 | Bin 225 -> 0 bytes fixture/12/4/5 | Bin 416 -> 0 bytes fixture/12/4/6 | Bin 416 -> 0 bytes fixture/12/4/7 | Bin 416 -> 0 bytes fixture/12/4/8 | Bin 416 -> 0 bytes fixture/12/4/9 | Bin 416 -> 0 bytes fixture/12/5/.zarray | 19 ----- fixture/12/5/.zattrs | 1 - fixture/12/5/0 | Bin 416 -> 0 bytes fixture/12/5/1 | Bin 416 -> 0 bytes fixture/12/5/10 | Bin 416 -> 0 bytes fixture/12/5/11 | Bin 416 -> 0 bytes fixture/12/5/12 | Bin 416 -> 0 bytes fixture/12/5/13 | Bin 416 -> 0 bytes fixture/12/5/14 | Bin 416 -> 0 bytes fixture/12/5/15 | Bin 416 -> 0 bytes fixture/12/5/16 | Bin 416 -> 0 bytes fixture/12/5/17 | Bin 416 -> 0 bytes fixture/12/5/18 | Bin 416 -> 0 bytes fixture/12/5/19 | Bin 416 -> 0 bytes fixture/12/5/2 | Bin 416 -> 0 bytes fixture/12/5/20 | Bin 416 -> 0 bytes fixture/12/5/21 | Bin 416 -> 0 bytes fixture/12/5/22 | Bin 416 -> 0 bytes fixture/12/5/23 | Bin 416 -> 0 bytes fixture/12/5/24 | Bin 416 -> 0 bytes fixture/12/5/25 | Bin 416 -> 0 bytes fixture/12/5/26 | Bin 416 -> 0 bytes fixture/12/5/27 | Bin 416 -> 0 bytes fixture/12/5/28 | Bin 416 -> 0 bytes fixture/12/5/29 | Bin 416 -> 0 bytes fixture/12/5/3 | Bin 416 -> 0 bytes fixture/12/5/30 | Bin 416 -> 0 bytes fixture/12/5/31 | Bin 416 -> 0 bytes fixture/12/5/32 | Bin 416 -> 0 bytes fixture/12/5/33 | Bin 416 -> 0 bytes fixture/12/5/34 | Bin 416 -> 0 bytes fixture/12/5/35 | Bin 416 -> 0 bytes fixture/12/5/36 | Bin 416 -> 0 bytes fixture/12/5/37 | Bin 416 -> 0 bytes fixture/12/5/38 | Bin 416 -> 0 bytes fixture/12/5/39 | Bin 416 -> 0 bytes fixture/12/5/4 | Bin 416 -> 0 bytes fixture/12/5/40 | Bin 416 -> 0 bytes fixture/12/5/41 | Bin 416 -> 0 bytes fixture/12/5/42 | Bin 416 -> 0 bytes fixture/12/5/43 | Bin 416 -> 0 bytes fixture/12/5/44 | Bin 219 -> 0 bytes fixture/12/5/5 | Bin 416 -> 0 bytes fixture/12/5/6 | Bin 416 -> 0 bytes fixture/12/5/7 | Bin 416 -> 0 bytes fixture/12/5/8 | Bin 416 -> 0 bytes fixture/12/5/9 | Bin 416 -> 0 bytes fixture/12/6/.zarray | 19 ----- fixture/12/6/.zattrs | 1 - fixture/12/6/0 | Bin 416 -> 0 bytes fixture/12/6/1 | Bin 416 -> 0 bytes fixture/12/6/10 | Bin 416 -> 0 bytes fixture/12/6/11 | Bin 416 -> 0 bytes fixture/12/6/12 | Bin 416 -> 0 bytes fixture/12/6/13 | Bin 416 -> 0 bytes fixture/12/6/14 | Bin 416 -> 0 bytes fixture/12/6/15 | Bin 416 -> 0 bytes fixture/12/6/16 | Bin 416 -> 0 bytes fixture/12/6/17 | Bin 416 -> 0 bytes fixture/12/6/18 | Bin 416 -> 0 bytes fixture/12/6/19 | Bin 416 -> 0 bytes fixture/12/6/2 | Bin 416 -> 0 bytes fixture/12/6/20 | Bin 416 -> 0 bytes fixture/12/6/21 | Bin 416 -> 0 bytes fixture/12/6/22 | Bin 416 -> 0 bytes fixture/12/6/23 | Bin 416 -> 0 bytes fixture/12/6/24 | Bin 416 -> 0 bytes fixture/12/6/25 | Bin 416 -> 0 bytes fixture/12/6/26 | Bin 416 -> 0 bytes fixture/12/6/27 | Bin 416 -> 0 bytes fixture/12/6/28 | Bin 416 -> 0 bytes fixture/12/6/29 | Bin 416 -> 0 bytes fixture/12/6/3 | Bin 416 -> 0 bytes fixture/12/6/30 | Bin 416 -> 0 bytes fixture/12/6/31 | Bin 416 -> 0 bytes fixture/12/6/32 | Bin 416 -> 0 bytes fixture/12/6/33 | Bin 416 -> 0 bytes fixture/12/6/34 | Bin 416 -> 0 bytes fixture/12/6/35 | Bin 416 -> 0 bytes fixture/12/6/36 | Bin 416 -> 0 bytes fixture/12/6/37 | Bin 416 -> 0 bytes fixture/12/6/38 | Bin 416 -> 0 bytes fixture/12/6/39 | Bin 416 -> 0 bytes fixture/12/6/4 | Bin 416 -> 0 bytes fixture/12/6/40 | Bin 416 -> 0 bytes fixture/12/6/41 | Bin 416 -> 0 bytes fixture/12/6/42 | Bin 416 -> 0 bytes fixture/12/6/43 | Bin 416 -> 0 bytes fixture/12/6/44 | Bin 220 -> 0 bytes fixture/12/6/5 | Bin 416 -> 0 bytes fixture/12/6/6 | Bin 416 -> 0 bytes fixture/12/6/7 | Bin 416 -> 0 bytes fixture/12/6/8 | Bin 416 -> 0 bytes fixture/12/6/9 | Bin 416 -> 0 bytes fixture/13/.zattrs | 1 - fixture/13/0/.zarray | 14 ---- fixture/13/0/.zattrs | 1 - fixture/13/0/0 | Bin 800 -> 0 bytes fixture/13/0/1 | Bin 800 -> 0 bytes fixture/13/0/10 | Bin 800 -> 0 bytes fixture/13/0/11 | Bin 800 -> 0 bytes fixture/13/0/12 | Bin 800 -> 0 bytes fixture/13/0/13 | Bin 800 -> 0 bytes fixture/13/0/14 | Bin 800 -> 0 bytes fixture/13/0/15 | Bin 800 -> 0 bytes fixture/13/0/16 | Bin 800 -> 0 bytes fixture/13/0/17 | Bin 800 -> 0 bytes fixture/13/0/18 | Bin 800 -> 0 bytes fixture/13/0/19 | Bin 800 -> 0 bytes fixture/13/0/2 | Bin 800 -> 0 bytes fixture/13/0/20 | Bin 800 -> 0 bytes fixture/13/0/21 | Bin 800 -> 0 bytes fixture/13/0/22 | Bin 800 -> 0 bytes fixture/13/0/23 | Bin 800 -> 0 bytes fixture/13/0/24 | Bin 800 -> 0 bytes fixture/13/0/25 | Bin 800 -> 0 bytes fixture/13/0/26 | Bin 800 -> 0 bytes fixture/13/0/27 | Bin 800 -> 0 bytes fixture/13/0/28 | Bin 800 -> 0 bytes fixture/13/0/29 | Bin 800 -> 0 bytes fixture/13/0/3 | Bin 800 -> 0 bytes fixture/13/0/30 | Bin 800 -> 0 bytes fixture/13/0/31 | Bin 800 -> 0 bytes fixture/13/0/32 | Bin 800 -> 0 bytes fixture/13/0/33 | Bin 800 -> 0 bytes fixture/13/0/34 | Bin 800 -> 0 bytes fixture/13/0/35 | Bin 800 -> 0 bytes fixture/13/0/36 | Bin 800 -> 0 bytes fixture/13/0/37 | Bin 800 -> 0 bytes fixture/13/0/38 | Bin 800 -> 0 bytes fixture/13/0/39 | Bin 800 -> 0 bytes fixture/13/0/4 | Bin 800 -> 0 bytes fixture/13/0/40 | Bin 800 -> 0 bytes fixture/13/0/41 | Bin 800 -> 0 bytes fixture/13/0/42 | Bin 800 -> 0 bytes fixture/13/0/43 | Bin 800 -> 0 bytes fixture/13/0/44 | Bin 800 -> 0 bytes fixture/13/0/5 | Bin 800 -> 0 bytes fixture/13/0/6 | Bin 800 -> 0 bytes fixture/13/0/7 | Bin 800 -> 0 bytes fixture/13/0/8 | Bin 800 -> 0 bytes fixture/13/0/9 | Bin 800 -> 0 bytes fixture/13/1/.zarray | 17 ---- fixture/13/1/.zattrs | 1 - fixture/13/1/0 | Bin 811 -> 0 bytes fixture/13/1/1 | Bin 811 -> 0 bytes fixture/13/1/10 | Bin 811 -> 0 bytes fixture/13/1/11 | Bin 811 -> 0 bytes fixture/13/1/12 | Bin 811 -> 0 bytes fixture/13/1/13 | Bin 811 -> 0 bytes fixture/13/1/14 | Bin 811 -> 0 bytes fixture/13/1/15 | Bin 811 -> 0 bytes fixture/13/1/16 | Bin 811 -> 0 bytes fixture/13/1/17 | Bin 811 -> 0 bytes fixture/13/1/18 | Bin 811 -> 0 bytes fixture/13/1/19 | Bin 811 -> 0 bytes fixture/13/1/2 | Bin 811 -> 0 bytes fixture/13/1/20 | Bin 811 -> 0 bytes fixture/13/1/21 | Bin 811 -> 0 bytes fixture/13/1/22 | Bin 811 -> 0 bytes fixture/13/1/23 | Bin 811 -> 0 bytes fixture/13/1/24 | Bin 811 -> 0 bytes fixture/13/1/25 | Bin 811 -> 0 bytes fixture/13/1/26 | Bin 811 -> 0 bytes fixture/13/1/27 | Bin 811 -> 0 bytes fixture/13/1/28 | Bin 811 -> 0 bytes fixture/13/1/29 | Bin 811 -> 0 bytes fixture/13/1/3 | Bin 811 -> 0 bytes fixture/13/1/30 | Bin 811 -> 0 bytes fixture/13/1/31 | Bin 811 -> 0 bytes fixture/13/1/32 | Bin 811 -> 0 bytes fixture/13/1/33 | Bin 811 -> 0 bytes fixture/13/1/34 | Bin 811 -> 0 bytes fixture/13/1/35 | Bin 811 -> 0 bytes fixture/13/1/36 | Bin 811 -> 0 bytes fixture/13/1/37 | Bin 811 -> 0 bytes fixture/13/1/38 | Bin 811 -> 0 bytes fixture/13/1/39 | Bin 811 -> 0 bytes fixture/13/1/4 | Bin 811 -> 0 bytes fixture/13/1/40 | Bin 811 -> 0 bytes fixture/13/1/41 | Bin 811 -> 0 bytes fixture/13/1/42 | Bin 811 -> 0 bytes fixture/13/1/43 | Bin 811 -> 0 bytes fixture/13/1/44 | Bin 387 -> 0 bytes fixture/13/1/5 | Bin 811 -> 0 bytes fixture/13/1/6 | Bin 811 -> 0 bytes fixture/13/1/7 | Bin 811 -> 0 bytes fixture/13/1/8 | Bin 811 -> 0 bytes fixture/13/1/9 | Bin 811 -> 0 bytes fixture/13/2/.zarray | 17 ---- fixture/13/2/.zattrs | 1 - fixture/13/2/0 | Bin 1065 -> 0 bytes fixture/13/2/1 | Bin 1035 -> 0 bytes fixture/13/2/10 | Bin 1053 -> 0 bytes fixture/13/2/11 | Bin 1009 -> 0 bytes fixture/13/2/12 | Bin 1029 -> 0 bytes fixture/13/2/13 | Bin 1022 -> 0 bytes fixture/13/2/14 | Bin 1041 -> 0 bytes fixture/13/2/15 | Bin 1052 -> 0 bytes fixture/13/2/16 | Bin 1045 -> 0 bytes fixture/13/2/17 | Bin 1064 -> 0 bytes fixture/13/2/18 | Bin 1024 -> 0 bytes fixture/13/2/19 | Bin 1050 -> 0 bytes fixture/13/2/2 | Bin 1021 -> 0 bytes fixture/13/2/20 | Bin 1035 -> 0 bytes fixture/13/2/21 | Bin 1035 -> 0 bytes fixture/13/2/22 | Bin 1041 -> 0 bytes fixture/13/2/23 | Bin 1031 -> 0 bytes fixture/13/2/24 | Bin 1016 -> 0 bytes fixture/13/2/25 | Bin 1013 -> 0 bytes fixture/13/2/26 | Bin 1022 -> 0 bytes fixture/13/2/27 | Bin 1043 -> 0 bytes fixture/13/2/28 | Bin 1067 -> 0 bytes fixture/13/2/29 | Bin 1023 -> 0 bytes fixture/13/2/3 | Bin 1032 -> 0 bytes fixture/13/2/30 | Bin 1011 -> 0 bytes fixture/13/2/31 | Bin 1063 -> 0 bytes fixture/13/2/32 | Bin 1039 -> 0 bytes fixture/13/2/33 | Bin 1072 -> 0 bytes fixture/13/2/34 | Bin 1029 -> 0 bytes fixture/13/2/35 | Bin 1053 -> 0 bytes fixture/13/2/36 | Bin 1038 -> 0 bytes fixture/13/2/37 | Bin 1047 -> 0 bytes fixture/13/2/38 | Bin 1019 -> 0 bytes fixture/13/2/39 | Bin 1012 -> 0 bytes fixture/13/2/4 | Bin 1040 -> 0 bytes fixture/13/2/40 | Bin 1014 -> 0 bytes fixture/13/2/41 | Bin 1041 -> 0 bytes fixture/13/2/42 | Bin 1005 -> 0 bytes fixture/13/2/43 | Bin 1043 -> 0 bytes fixture/13/2/44 | Bin 518 -> 0 bytes fixture/13/2/5 | Bin 1031 -> 0 bytes fixture/13/2/6 | Bin 1009 -> 0 bytes fixture/13/2/7 | Bin 1039 -> 0 bytes fixture/13/2/8 | Bin 1023 -> 0 bytes fixture/13/2/9 | Bin 1031 -> 0 bytes fixture/13/3/.zarray | 19 ----- fixture/13/3/.zattrs | 1 - fixture/13/3/0 | Bin 816 -> 0 bytes fixture/13/3/1 | Bin 816 -> 0 bytes fixture/13/3/10 | Bin 816 -> 0 bytes fixture/13/3/11 | Bin 816 -> 0 bytes fixture/13/3/12 | Bin 816 -> 0 bytes fixture/13/3/13 | Bin 816 -> 0 bytes fixture/13/3/14 | Bin 816 -> 0 bytes fixture/13/3/15 | Bin 816 -> 0 bytes fixture/13/3/16 | Bin 816 -> 0 bytes fixture/13/3/17 | Bin 816 -> 0 bytes fixture/13/3/18 | Bin 816 -> 0 bytes fixture/13/3/19 | Bin 816 -> 0 bytes fixture/13/3/2 | Bin 816 -> 0 bytes fixture/13/3/20 | Bin 816 -> 0 bytes fixture/13/3/21 | Bin 816 -> 0 bytes fixture/13/3/22 | Bin 816 -> 0 bytes fixture/13/3/23 | Bin 816 -> 0 bytes fixture/13/3/24 | Bin 816 -> 0 bytes fixture/13/3/25 | Bin 816 -> 0 bytes fixture/13/3/26 | Bin 816 -> 0 bytes fixture/13/3/27 | Bin 816 -> 0 bytes fixture/13/3/28 | Bin 816 -> 0 bytes fixture/13/3/29 | Bin 816 -> 0 bytes fixture/13/3/3 | Bin 816 -> 0 bytes fixture/13/3/30 | Bin 816 -> 0 bytes fixture/13/3/31 | Bin 816 -> 0 bytes fixture/13/3/32 | Bin 816 -> 0 bytes fixture/13/3/33 | Bin 816 -> 0 bytes fixture/13/3/34 | Bin 816 -> 0 bytes fixture/13/3/35 | Bin 816 -> 0 bytes fixture/13/3/36 | Bin 816 -> 0 bytes fixture/13/3/37 | Bin 816 -> 0 bytes fixture/13/3/38 | Bin 816 -> 0 bytes fixture/13/3/39 | Bin 816 -> 0 bytes fixture/13/3/4 | Bin 816 -> 0 bytes fixture/13/3/40 | Bin 816 -> 0 bytes fixture/13/3/41 | Bin 816 -> 0 bytes fixture/13/3/42 | Bin 816 -> 0 bytes fixture/13/3/43 | Bin 816 -> 0 bytes fixture/13/3/44 | Bin 396 -> 0 bytes fixture/13/3/5 | Bin 816 -> 0 bytes fixture/13/3/6 | Bin 816 -> 0 bytes fixture/13/3/7 | Bin 816 -> 0 bytes fixture/13/3/8 | Bin 816 -> 0 bytes fixture/13/3/9 | Bin 816 -> 0 bytes fixture/13/4/.zarray | 19 ----- fixture/13/4/.zattrs | 1 - fixture/13/4/0 | Bin 816 -> 0 bytes fixture/13/4/1 | Bin 816 -> 0 bytes fixture/13/4/10 | Bin 806 -> 0 bytes fixture/13/4/11 | Bin 816 -> 0 bytes fixture/13/4/12 | Bin 816 -> 0 bytes fixture/13/4/13 | Bin 806 -> 0 bytes fixture/13/4/14 | Bin 816 -> 0 bytes fixture/13/4/15 | Bin 816 -> 0 bytes fixture/13/4/16 | Bin 816 -> 0 bytes fixture/13/4/17 | Bin 798 -> 0 bytes fixture/13/4/18 | Bin 799 -> 0 bytes fixture/13/4/19 | Bin 816 -> 0 bytes fixture/13/4/2 | Bin 816 -> 0 bytes fixture/13/4/20 | Bin 805 -> 0 bytes fixture/13/4/21 | Bin 816 -> 0 bytes fixture/13/4/22 | Bin 816 -> 0 bytes fixture/13/4/23 | Bin 816 -> 0 bytes fixture/13/4/24 | Bin 816 -> 0 bytes fixture/13/4/25 | Bin 805 -> 0 bytes fixture/13/4/26 | Bin 806 -> 0 bytes fixture/13/4/27 | Bin 816 -> 0 bytes fixture/13/4/28 | Bin 807 -> 0 bytes fixture/13/4/29 | Bin 816 -> 0 bytes fixture/13/4/3 | Bin 816 -> 0 bytes fixture/13/4/30 | Bin 816 -> 0 bytes fixture/13/4/31 | Bin 816 -> 0 bytes fixture/13/4/32 | Bin 816 -> 0 bytes fixture/13/4/33 | Bin 816 -> 0 bytes fixture/13/4/34 | Bin 816 -> 0 bytes fixture/13/4/35 | Bin 803 -> 0 bytes fixture/13/4/36 | Bin 816 -> 0 bytes fixture/13/4/37 | Bin 799 -> 0 bytes fixture/13/4/38 | Bin 816 -> 0 bytes fixture/13/4/39 | Bin 806 -> 0 bytes fixture/13/4/4 | Bin 816 -> 0 bytes fixture/13/4/40 | Bin 816 -> 0 bytes fixture/13/4/41 | Bin 816 -> 0 bytes fixture/13/4/42 | Bin 796 -> 0 bytes fixture/13/4/43 | Bin 816 -> 0 bytes fixture/13/4/44 | Bin 407 -> 0 bytes fixture/13/4/5 | Bin 816 -> 0 bytes fixture/13/4/6 | Bin 816 -> 0 bytes fixture/13/4/7 | Bin 804 -> 0 bytes fixture/13/4/8 | Bin 801 -> 0 bytes fixture/13/4/9 | Bin 794 -> 0 bytes fixture/13/5/.zarray | 19 ----- fixture/13/5/.zattrs | 1 - fixture/13/5/0 | Bin 816 -> 0 bytes fixture/13/5/1 | Bin 816 -> 0 bytes fixture/13/5/10 | Bin 816 -> 0 bytes fixture/13/5/11 | Bin 816 -> 0 bytes fixture/13/5/12 | Bin 816 -> 0 bytes fixture/13/5/13 | Bin 816 -> 0 bytes fixture/13/5/14 | Bin 816 -> 0 bytes fixture/13/5/15 | Bin 816 -> 0 bytes fixture/13/5/16 | Bin 816 -> 0 bytes fixture/13/5/17 | Bin 816 -> 0 bytes fixture/13/5/18 | Bin 816 -> 0 bytes fixture/13/5/19 | Bin 816 -> 0 bytes fixture/13/5/2 | Bin 816 -> 0 bytes fixture/13/5/20 | Bin 816 -> 0 bytes fixture/13/5/21 | Bin 816 -> 0 bytes fixture/13/5/22 | Bin 816 -> 0 bytes fixture/13/5/23 | Bin 816 -> 0 bytes fixture/13/5/24 | Bin 816 -> 0 bytes fixture/13/5/25 | Bin 816 -> 0 bytes fixture/13/5/26 | Bin 816 -> 0 bytes fixture/13/5/27 | Bin 816 -> 0 bytes fixture/13/5/28 | Bin 816 -> 0 bytes fixture/13/5/29 | Bin 816 -> 0 bytes fixture/13/5/3 | Bin 816 -> 0 bytes fixture/13/5/30 | Bin 816 -> 0 bytes fixture/13/5/31 | Bin 816 -> 0 bytes fixture/13/5/32 | Bin 816 -> 0 bytes fixture/13/5/33 | Bin 816 -> 0 bytes fixture/13/5/34 | Bin 816 -> 0 bytes fixture/13/5/35 | Bin 816 -> 0 bytes fixture/13/5/36 | Bin 816 -> 0 bytes fixture/13/5/37 | Bin 816 -> 0 bytes fixture/13/5/38 | Bin 816 -> 0 bytes fixture/13/5/39 | Bin 816 -> 0 bytes fixture/13/5/4 | Bin 816 -> 0 bytes fixture/13/5/40 | Bin 816 -> 0 bytes fixture/13/5/41 | Bin 816 -> 0 bytes fixture/13/5/42 | Bin 816 -> 0 bytes fixture/13/5/43 | Bin 816 -> 0 bytes fixture/13/5/44 | Bin 396 -> 0 bytes fixture/13/5/5 | Bin 816 -> 0 bytes fixture/13/5/6 | Bin 816 -> 0 bytes fixture/13/5/7 | Bin 816 -> 0 bytes fixture/13/5/8 | Bin 816 -> 0 bytes fixture/13/5/9 | Bin 816 -> 0 bytes fixture/13/6/.zarray | 19 ----- fixture/13/6/.zattrs | 1 - fixture/13/6/0 | Bin 816 -> 0 bytes fixture/13/6/1 | Bin 816 -> 0 bytes fixture/13/6/10 | Bin 816 -> 0 bytes fixture/13/6/11 | Bin 816 -> 0 bytes fixture/13/6/12 | Bin 816 -> 0 bytes fixture/13/6/13 | Bin 816 -> 0 bytes fixture/13/6/14 | Bin 816 -> 0 bytes fixture/13/6/15 | Bin 816 -> 0 bytes fixture/13/6/16 | Bin 816 -> 0 bytes fixture/13/6/17 | Bin 816 -> 0 bytes fixture/13/6/18 | Bin 816 -> 0 bytes fixture/13/6/19 | Bin 816 -> 0 bytes fixture/13/6/2 | Bin 816 -> 0 bytes fixture/13/6/20 | Bin 816 -> 0 bytes fixture/13/6/21 | Bin 816 -> 0 bytes fixture/13/6/22 | Bin 816 -> 0 bytes fixture/13/6/23 | Bin 816 -> 0 bytes fixture/13/6/24 | Bin 816 -> 0 bytes fixture/13/6/25 | Bin 816 -> 0 bytes fixture/13/6/26 | Bin 816 -> 0 bytes fixture/13/6/27 | Bin 816 -> 0 bytes fixture/13/6/28 | Bin 816 -> 0 bytes fixture/13/6/29 | Bin 816 -> 0 bytes fixture/13/6/3 | Bin 816 -> 0 bytes fixture/13/6/30 | Bin 816 -> 0 bytes fixture/13/6/31 | Bin 816 -> 0 bytes fixture/13/6/32 | Bin 816 -> 0 bytes fixture/13/6/33 | Bin 816 -> 0 bytes fixture/13/6/34 | Bin 816 -> 0 bytes fixture/13/6/35 | Bin 816 -> 0 bytes fixture/13/6/36 | Bin 816 -> 0 bytes fixture/13/6/37 | Bin 816 -> 0 bytes fixture/13/6/38 | Bin 816 -> 0 bytes fixture/13/6/39 | Bin 816 -> 0 bytes fixture/13/6/4 | Bin 816 -> 0 bytes fixture/13/6/40 | Bin 816 -> 0 bytes fixture/13/6/41 | Bin 816 -> 0 bytes fixture/13/6/42 | Bin 816 -> 0 bytes fixture/13/6/43 | Bin 816 -> 0 bytes fixture/13/6/44 | Bin 398 -> 0 bytes fixture/13/6/5 | Bin 816 -> 0 bytes fixture/13/6/6 | Bin 816 -> 0 bytes fixture/13/6/7 | Bin 816 -> 0 bytes fixture/13/6/8 | Bin 816 -> 0 bytes fixture/13/6/9 | Bin 816 -> 0 bytes fixture/14/.zattrs | 1 - fixture/14/0/.zarray | 14 ---- fixture/14/0/.zattrs | 1 - fixture/14/0/0 | 1 - fixture/14/0/1 | 1 - fixture/14/0/10 | 1 - fixture/14/0/11 | 1 - fixture/14/0/12 | 1 - fixture/14/0/13 | 1 - fixture/14/0/14 | 1 - fixture/14/0/15 | 1 - fixture/14/0/16 | 1 - fixture/14/0/17 | 1 - fixture/14/0/18 | 1 - fixture/14/0/19 | 1 - fixture/14/0/2 | 1 - fixture/14/0/20 | 1 - fixture/14/0/21 | 1 - fixture/14/0/22 | 1 - fixture/14/0/23 | 1 - fixture/14/0/24 | 1 - fixture/14/0/25 | 1 - fixture/14/0/26 | 1 - fixture/14/0/27 | 1 - fixture/14/0/28 | 1 - fixture/14/0/29 | 1 - fixture/14/0/3 | 1 - fixture/14/0/30 | 1 - fixture/14/0/31 | 1 - fixture/14/0/32 | 1 - fixture/14/0/33 | 1 - fixture/14/0/34 | 1 - fixture/14/0/35 | 1 - fixture/14/0/36 | 1 - fixture/14/0/37 | 1 - fixture/14/0/38 | 1 - fixture/14/0/39 | 1 - fixture/14/0/4 | 1 - fixture/14/0/40 | 1 - fixture/14/0/41 | 1 - fixture/14/0/42 | 1 - fixture/14/0/43 | 1 - fixture/14/0/44 | 1 - fixture/14/0/45 | 1 - fixture/14/0/46 | 1 - fixture/14/0/47 | 1 - fixture/14/0/48 | 1 - fixture/14/0/49 | 1 - fixture/14/0/5 | 1 - fixture/14/0/50 | 1 - fixture/14/0/51 | 1 - fixture/14/0/52 | 1 - fixture/14/0/53 | 1 - fixture/14/0/54 | 1 - fixture/14/0/55 | 1 - fixture/14/0/6 | 1 - fixture/14/0/7 | 1 - fixture/14/0/8 | 1 - fixture/14/0/9 | 1 - fixture/14/1/.zarray | 17 ---- fixture/14/1/.zattrs | 1 - fixture/14/1/0 | Bin 58 -> 0 bytes fixture/14/1/1 | Bin 58 -> 0 bytes fixture/14/1/10 | Bin 60 -> 0 bytes fixture/14/1/11 | Bin 60 -> 0 bytes fixture/14/1/12 | Bin 60 -> 0 bytes fixture/14/1/13 | Bin 60 -> 0 bytes fixture/14/1/14 | Bin 60 -> 0 bytes fixture/14/1/15 | Bin 59 -> 0 bytes fixture/14/1/16 | Bin 59 -> 0 bytes fixture/14/1/17 | Bin 59 -> 0 bytes fixture/14/1/18 | Bin 60 -> 0 bytes fixture/14/1/19 | Bin 60 -> 0 bytes fixture/14/1/2 | Bin 58 -> 0 bytes fixture/14/1/20 | Bin 60 -> 0 bytes fixture/14/1/21 | Bin 60 -> 0 bytes fixture/14/1/22 | Bin 59 -> 0 bytes fixture/14/1/23 | Bin 60 -> 0 bytes fixture/14/1/24 | Bin 60 -> 0 bytes fixture/14/1/25 | Bin 60 -> 0 bytes fixture/14/1/26 | Bin 60 -> 0 bytes fixture/14/1/27 | Bin 59 -> 0 bytes fixture/14/1/28 | Bin 59 -> 0 bytes fixture/14/1/29 | Bin 59 -> 0 bytes fixture/14/1/3 | Bin 60 -> 0 bytes fixture/14/1/30 | Bin 60 -> 0 bytes fixture/14/1/31 | Bin 58 -> 0 bytes fixture/14/1/32 | Bin 60 -> 0 bytes fixture/14/1/33 | Bin 59 -> 0 bytes fixture/14/1/34 | Bin 60 -> 0 bytes fixture/14/1/35 | Bin 59 -> 0 bytes fixture/14/1/36 | Bin 60 -> 0 bytes fixture/14/1/37 | Bin 59 -> 0 bytes fixture/14/1/38 | Bin 59 -> 0 bytes fixture/14/1/39 | Bin 61 -> 0 bytes fixture/14/1/4 | Bin 60 -> 0 bytes fixture/14/1/40 | Bin 60 -> 0 bytes fixture/14/1/41 | Bin 59 -> 0 bytes fixture/14/1/42 | Bin 57 -> 0 bytes fixture/14/1/43 | Bin 59 -> 0 bytes fixture/14/1/44 | Bin 60 -> 0 bytes fixture/14/1/45 | Bin 59 -> 0 bytes fixture/14/1/46 | Bin 61 -> 0 bytes fixture/14/1/47 | Bin 61 -> 0 bytes fixture/14/1/48 | Bin 60 -> 0 bytes fixture/14/1/49 | Bin 61 -> 0 bytes fixture/14/1/5 | Bin 59 -> 0 bytes fixture/14/1/50 | Bin 60 -> 0 bytes fixture/14/1/51 | Bin 59 -> 0 bytes fixture/14/1/52 | Bin 58 -> 0 bytes fixture/14/1/53 | Bin 60 -> 0 bytes fixture/14/1/54 | Bin 60 -> 0 bytes fixture/14/1/55 | Bin 49 -> 0 bytes fixture/14/1/6 | Bin 61 -> 0 bytes fixture/14/1/7 | Bin 59 -> 0 bytes fixture/14/1/8 | Bin 60 -> 0 bytes fixture/14/1/9 | Bin 58 -> 0 bytes fixture/14/2/.zarray | 17 ---- fixture/14/2/.zattrs | 1 - fixture/14/2/0 | Bin 76 -> 0 bytes fixture/14/2/1 | Bin 69 -> 0 bytes fixture/14/2/10 | Bin 72 -> 0 bytes fixture/14/2/11 | Bin 72 -> 0 bytes fixture/14/2/12 | Bin 69 -> 0 bytes fixture/14/2/13 | Bin 72 -> 0 bytes fixture/14/2/14 | Bin 68 -> 0 bytes fixture/14/2/15 | Bin 74 -> 0 bytes fixture/14/2/16 | Bin 68 -> 0 bytes fixture/14/2/17 | Bin 72 -> 0 bytes fixture/14/2/18 | Bin 69 -> 0 bytes fixture/14/2/19 | Bin 74 -> 0 bytes fixture/14/2/2 | Bin 73 -> 0 bytes fixture/14/2/20 | Bin 73 -> 0 bytes fixture/14/2/21 | Bin 69 -> 0 bytes fixture/14/2/22 | Bin 74 -> 0 bytes fixture/14/2/23 | Bin 67 -> 0 bytes fixture/14/2/24 | Bin 73 -> 0 bytes fixture/14/2/25 | Bin 69 -> 0 bytes fixture/14/2/26 | Bin 72 -> 0 bytes fixture/14/2/27 | Bin 73 -> 0 bytes fixture/14/2/28 | Bin 73 -> 0 bytes fixture/14/2/29 | Bin 73 -> 0 bytes fixture/14/2/3 | Bin 74 -> 0 bytes fixture/14/2/30 | Bin 72 -> 0 bytes fixture/14/2/31 | Bin 75 -> 0 bytes fixture/14/2/32 | Bin 75 -> 0 bytes fixture/14/2/33 | Bin 74 -> 0 bytes fixture/14/2/34 | Bin 68 -> 0 bytes fixture/14/2/35 | Bin 75 -> 0 bytes fixture/14/2/36 | Bin 73 -> 0 bytes fixture/14/2/37 | Bin 70 -> 0 bytes fixture/14/2/38 | Bin 78 -> 0 bytes fixture/14/2/39 | Bin 73 -> 0 bytes fixture/14/2/4 | Bin 73 -> 0 bytes fixture/14/2/40 | Bin 72 -> 0 bytes fixture/14/2/41 | Bin 74 -> 0 bytes fixture/14/2/42 | Bin 74 -> 0 bytes fixture/14/2/43 | Bin 72 -> 0 bytes fixture/14/2/44 | Bin 68 -> 0 bytes fixture/14/2/45 | Bin 68 -> 0 bytes fixture/14/2/46 | Bin 73 -> 0 bytes fixture/14/2/47 | Bin 72 -> 0 bytes fixture/14/2/48 | Bin 74 -> 0 bytes fixture/14/2/49 | Bin 68 -> 0 bytes fixture/14/2/5 | Bin 75 -> 0 bytes fixture/14/2/50 | Bin 73 -> 0 bytes fixture/14/2/51 | Bin 76 -> 0 bytes fixture/14/2/52 | Bin 74 -> 0 bytes fixture/14/2/53 | Bin 74 -> 0 bytes fixture/14/2/54 | Bin 73 -> 0 bytes fixture/14/2/55 | Bin 67 -> 0 bytes fixture/14/2/6 | Bin 68 -> 0 bytes fixture/14/2/7 | Bin 72 -> 0 bytes fixture/14/2/8 | Bin 72 -> 0 bytes fixture/14/2/9 | Bin 69 -> 0 bytes fixture/14/3/.zarray | 19 ----- fixture/14/3/.zattrs | 1 - fixture/14/3/0 | Bin 116 -> 0 bytes fixture/14/3/1 | Bin 116 -> 0 bytes fixture/14/3/10 | Bin 116 -> 0 bytes fixture/14/3/11 | Bin 116 -> 0 bytes fixture/14/3/12 | Bin 116 -> 0 bytes fixture/14/3/13 | Bin 116 -> 0 bytes fixture/14/3/14 | Bin 116 -> 0 bytes fixture/14/3/15 | Bin 116 -> 0 bytes fixture/14/3/16 | Bin 116 -> 0 bytes fixture/14/3/17 | Bin 116 -> 0 bytes fixture/14/3/18 | Bin 116 -> 0 bytes fixture/14/3/19 | Bin 116 -> 0 bytes fixture/14/3/2 | Bin 116 -> 0 bytes fixture/14/3/20 | Bin 116 -> 0 bytes fixture/14/3/21 | Bin 116 -> 0 bytes fixture/14/3/22 | Bin 116 -> 0 bytes fixture/14/3/23 | Bin 116 -> 0 bytes fixture/14/3/24 | Bin 116 -> 0 bytes fixture/14/3/25 | Bin 116 -> 0 bytes fixture/14/3/26 | Bin 116 -> 0 bytes fixture/14/3/27 | Bin 116 -> 0 bytes fixture/14/3/28 | Bin 116 -> 0 bytes fixture/14/3/29 | Bin 116 -> 0 bytes fixture/14/3/3 | Bin 116 -> 0 bytes fixture/14/3/30 | Bin 116 -> 0 bytes fixture/14/3/31 | Bin 116 -> 0 bytes fixture/14/3/32 | Bin 116 -> 0 bytes fixture/14/3/33 | Bin 116 -> 0 bytes fixture/14/3/34 | Bin 116 -> 0 bytes fixture/14/3/35 | Bin 116 -> 0 bytes fixture/14/3/36 | Bin 116 -> 0 bytes fixture/14/3/37 | Bin 116 -> 0 bytes fixture/14/3/38 | Bin 116 -> 0 bytes fixture/14/3/39 | Bin 116 -> 0 bytes fixture/14/3/4 | Bin 116 -> 0 bytes fixture/14/3/40 | Bin 116 -> 0 bytes fixture/14/3/41 | Bin 116 -> 0 bytes fixture/14/3/42 | Bin 116 -> 0 bytes fixture/14/3/43 | Bin 116 -> 0 bytes fixture/14/3/44 | Bin 116 -> 0 bytes fixture/14/3/45 | Bin 116 -> 0 bytes fixture/14/3/46 | Bin 116 -> 0 bytes fixture/14/3/47 | Bin 116 -> 0 bytes fixture/14/3/48 | Bin 116 -> 0 bytes fixture/14/3/49 | Bin 116 -> 0 bytes fixture/14/3/5 | Bin 116 -> 0 bytes fixture/14/3/50 | Bin 116 -> 0 bytes fixture/14/3/51 | Bin 116 -> 0 bytes fixture/14/3/52 | Bin 116 -> 0 bytes fixture/14/3/53 | Bin 116 -> 0 bytes fixture/14/3/54 | Bin 116 -> 0 bytes fixture/14/3/55 | Bin 116 -> 0 bytes fixture/14/3/6 | Bin 116 -> 0 bytes fixture/14/3/7 | Bin 116 -> 0 bytes fixture/14/3/8 | Bin 116 -> 0 bytes fixture/14/3/9 | Bin 116 -> 0 bytes fixture/14/4/.zarray | 19 ----- fixture/14/4/.zattrs | 1 - fixture/14/4/0 | Bin 116 -> 0 bytes fixture/14/4/1 | Bin 116 -> 0 bytes fixture/14/4/10 | Bin 116 -> 0 bytes fixture/14/4/11 | Bin 116 -> 0 bytes fixture/14/4/12 | Bin 116 -> 0 bytes fixture/14/4/13 | Bin 116 -> 0 bytes fixture/14/4/14 | Bin 116 -> 0 bytes fixture/14/4/15 | Bin 116 -> 0 bytes fixture/14/4/16 | Bin 116 -> 0 bytes fixture/14/4/17 | Bin 116 -> 0 bytes fixture/14/4/18 | Bin 116 -> 0 bytes fixture/14/4/19 | Bin 116 -> 0 bytes fixture/14/4/2 | Bin 116 -> 0 bytes fixture/14/4/20 | Bin 116 -> 0 bytes fixture/14/4/21 | Bin 116 -> 0 bytes fixture/14/4/22 | Bin 116 -> 0 bytes fixture/14/4/23 | Bin 116 -> 0 bytes fixture/14/4/24 | Bin 116 -> 0 bytes fixture/14/4/25 | Bin 116 -> 0 bytes fixture/14/4/26 | Bin 116 -> 0 bytes fixture/14/4/27 | Bin 116 -> 0 bytes fixture/14/4/28 | Bin 116 -> 0 bytes fixture/14/4/29 | Bin 116 -> 0 bytes fixture/14/4/3 | Bin 116 -> 0 bytes fixture/14/4/30 | Bin 116 -> 0 bytes fixture/14/4/31 | Bin 116 -> 0 bytes fixture/14/4/32 | Bin 116 -> 0 bytes fixture/14/4/33 | Bin 116 -> 0 bytes fixture/14/4/34 | Bin 116 -> 0 bytes fixture/14/4/35 | Bin 116 -> 0 bytes fixture/14/4/36 | Bin 116 -> 0 bytes fixture/14/4/37 | Bin 116 -> 0 bytes fixture/14/4/38 | Bin 116 -> 0 bytes fixture/14/4/39 | Bin 116 -> 0 bytes fixture/14/4/4 | Bin 116 -> 0 bytes fixture/14/4/40 | Bin 116 -> 0 bytes fixture/14/4/41 | Bin 116 -> 0 bytes fixture/14/4/42 | Bin 116 -> 0 bytes fixture/14/4/43 | Bin 116 -> 0 bytes fixture/14/4/44 | Bin 116 -> 0 bytes fixture/14/4/45 | Bin 116 -> 0 bytes fixture/14/4/46 | Bin 116 -> 0 bytes fixture/14/4/47 | Bin 116 -> 0 bytes fixture/14/4/48 | Bin 116 -> 0 bytes fixture/14/4/49 | Bin 116 -> 0 bytes fixture/14/4/5 | Bin 116 -> 0 bytes fixture/14/4/50 | Bin 116 -> 0 bytes fixture/14/4/51 | Bin 116 -> 0 bytes fixture/14/4/52 | Bin 116 -> 0 bytes fixture/14/4/53 | Bin 116 -> 0 bytes fixture/14/4/54 | Bin 116 -> 0 bytes fixture/14/4/55 | Bin 116 -> 0 bytes fixture/14/4/6 | Bin 116 -> 0 bytes fixture/14/4/7 | Bin 116 -> 0 bytes fixture/14/4/8 | Bin 116 -> 0 bytes fixture/14/4/9 | Bin 116 -> 0 bytes fixture/14/5/.zarray | 19 ----- fixture/14/5/.zattrs | 1 - fixture/14/5/0 | Bin 116 -> 0 bytes fixture/14/5/1 | Bin 116 -> 0 bytes fixture/14/5/10 | Bin 116 -> 0 bytes fixture/14/5/11 | Bin 116 -> 0 bytes fixture/14/5/12 | Bin 116 -> 0 bytes fixture/14/5/13 | Bin 116 -> 0 bytes fixture/14/5/14 | Bin 116 -> 0 bytes fixture/14/5/15 | Bin 116 -> 0 bytes fixture/14/5/16 | Bin 116 -> 0 bytes fixture/14/5/17 | Bin 116 -> 0 bytes fixture/14/5/18 | Bin 116 -> 0 bytes fixture/14/5/19 | Bin 116 -> 0 bytes fixture/14/5/2 | Bin 116 -> 0 bytes fixture/14/5/20 | Bin 116 -> 0 bytes fixture/14/5/21 | Bin 116 -> 0 bytes fixture/14/5/22 | Bin 116 -> 0 bytes fixture/14/5/23 | Bin 116 -> 0 bytes fixture/14/5/24 | Bin 116 -> 0 bytes fixture/14/5/25 | Bin 116 -> 0 bytes fixture/14/5/26 | Bin 116 -> 0 bytes fixture/14/5/27 | Bin 116 -> 0 bytes fixture/14/5/28 | Bin 116 -> 0 bytes fixture/14/5/29 | Bin 116 -> 0 bytes fixture/14/5/3 | Bin 116 -> 0 bytes fixture/14/5/30 | Bin 116 -> 0 bytes fixture/14/5/31 | Bin 116 -> 0 bytes fixture/14/5/32 | Bin 116 -> 0 bytes fixture/14/5/33 | Bin 116 -> 0 bytes fixture/14/5/34 | Bin 116 -> 0 bytes fixture/14/5/35 | Bin 116 -> 0 bytes fixture/14/5/36 | Bin 116 -> 0 bytes fixture/14/5/37 | Bin 116 -> 0 bytes fixture/14/5/38 | Bin 116 -> 0 bytes fixture/14/5/39 | Bin 116 -> 0 bytes fixture/14/5/4 | Bin 116 -> 0 bytes fixture/14/5/40 | Bin 116 -> 0 bytes fixture/14/5/41 | Bin 116 -> 0 bytes fixture/14/5/42 | Bin 116 -> 0 bytes fixture/14/5/43 | Bin 116 -> 0 bytes fixture/14/5/44 | Bin 116 -> 0 bytes fixture/14/5/45 | Bin 116 -> 0 bytes fixture/14/5/46 | Bin 116 -> 0 bytes fixture/14/5/47 | Bin 116 -> 0 bytes fixture/14/5/48 | Bin 116 -> 0 bytes fixture/14/5/49 | Bin 116 -> 0 bytes fixture/14/5/5 | Bin 116 -> 0 bytes fixture/14/5/50 | Bin 116 -> 0 bytes fixture/14/5/51 | Bin 116 -> 0 bytes fixture/14/5/52 | Bin 116 -> 0 bytes fixture/14/5/53 | Bin 116 -> 0 bytes fixture/14/5/54 | Bin 116 -> 0 bytes fixture/14/5/55 | Bin 116 -> 0 bytes fixture/14/5/6 | Bin 116 -> 0 bytes fixture/14/5/7 | Bin 116 -> 0 bytes fixture/14/5/8 | Bin 116 -> 0 bytes fixture/14/5/9 | Bin 116 -> 0 bytes fixture/14/6/.zarray | 19 ----- fixture/14/6/.zattrs | 1 - fixture/14/6/0 | Bin 116 -> 0 bytes fixture/14/6/1 | Bin 116 -> 0 bytes fixture/14/6/10 | Bin 116 -> 0 bytes fixture/14/6/11 | Bin 116 -> 0 bytes fixture/14/6/12 | Bin 116 -> 0 bytes fixture/14/6/13 | Bin 116 -> 0 bytes fixture/14/6/14 | Bin 116 -> 0 bytes fixture/14/6/15 | Bin 116 -> 0 bytes fixture/14/6/16 | Bin 116 -> 0 bytes fixture/14/6/17 | Bin 116 -> 0 bytes fixture/14/6/18 | Bin 116 -> 0 bytes fixture/14/6/19 | Bin 116 -> 0 bytes fixture/14/6/2 | Bin 116 -> 0 bytes fixture/14/6/20 | Bin 116 -> 0 bytes fixture/14/6/21 | Bin 116 -> 0 bytes fixture/14/6/22 | Bin 116 -> 0 bytes fixture/14/6/23 | Bin 116 -> 0 bytes fixture/14/6/24 | Bin 116 -> 0 bytes fixture/14/6/25 | Bin 116 -> 0 bytes fixture/14/6/26 | Bin 116 -> 0 bytes fixture/14/6/27 | Bin 116 -> 0 bytes fixture/14/6/28 | Bin 116 -> 0 bytes fixture/14/6/29 | Bin 116 -> 0 bytes fixture/14/6/3 | Bin 116 -> 0 bytes fixture/14/6/30 | Bin 116 -> 0 bytes fixture/14/6/31 | Bin 116 -> 0 bytes fixture/14/6/32 | Bin 116 -> 0 bytes fixture/14/6/33 | Bin 116 -> 0 bytes fixture/14/6/34 | Bin 116 -> 0 bytes fixture/14/6/35 | Bin 116 -> 0 bytes fixture/14/6/36 | Bin 116 -> 0 bytes fixture/14/6/37 | Bin 116 -> 0 bytes fixture/14/6/38 | Bin 116 -> 0 bytes fixture/14/6/39 | Bin 116 -> 0 bytes fixture/14/6/4 | Bin 116 -> 0 bytes fixture/14/6/40 | Bin 116 -> 0 bytes fixture/14/6/41 | Bin 116 -> 0 bytes fixture/14/6/42 | Bin 116 -> 0 bytes fixture/14/6/43 | Bin 116 -> 0 bytes fixture/14/6/44 | Bin 116 -> 0 bytes fixture/14/6/45 | Bin 116 -> 0 bytes fixture/14/6/46 | Bin 116 -> 0 bytes fixture/14/6/47 | Bin 116 -> 0 bytes fixture/14/6/48 | Bin 116 -> 0 bytes fixture/14/6/49 | Bin 116 -> 0 bytes fixture/14/6/5 | Bin 116 -> 0 bytes fixture/14/6/50 | Bin 116 -> 0 bytes fixture/14/6/51 | Bin 116 -> 0 bytes fixture/14/6/52 | Bin 116 -> 0 bytes fixture/14/6/53 | Bin 116 -> 0 bytes fixture/14/6/54 | Bin 116 -> 0 bytes fixture/14/6/55 | Bin 116 -> 0 bytes fixture/14/6/6 | Bin 116 -> 0 bytes fixture/14/6/7 | Bin 116 -> 0 bytes fixture/14/6/8 | Bin 116 -> 0 bytes fixture/14/6/9 | Bin 116 -> 0 bytes fixture/15/.zattrs | 1 - fixture/15/0/.zarray | 14 ---- fixture/15/0/.zattrs | 1 - fixture/15/0/0 | Bin 1600 -> 0 bytes fixture/15/0/1 | Bin 1600 -> 0 bytes fixture/15/0/10 | Bin 1600 -> 0 bytes fixture/15/0/11 | Bin 1600 -> 0 bytes fixture/15/0/12 | Bin 1600 -> 0 bytes fixture/15/0/13 | Bin 1600 -> 0 bytes fixture/15/0/14 | Bin 1600 -> 0 bytes fixture/15/0/15 | Bin 1600 -> 0 bytes fixture/15/0/16 | Bin 1600 -> 0 bytes fixture/15/0/17 | Bin 1600 -> 0 bytes fixture/15/0/18 | Bin 1600 -> 0 bytes fixture/15/0/19 | Bin 1600 -> 0 bytes fixture/15/0/2 | Bin 1600 -> 0 bytes fixture/15/0/20 | Bin 1600 -> 0 bytes fixture/15/0/21 | Bin 1600 -> 0 bytes fixture/15/0/22 | Bin 1600 -> 0 bytes fixture/15/0/23 | Bin 1600 -> 0 bytes fixture/15/0/24 | Bin 1600 -> 0 bytes fixture/15/0/25 | Bin 1600 -> 0 bytes fixture/15/0/26 | Bin 1600 -> 0 bytes fixture/15/0/27 | Bin 1600 -> 0 bytes fixture/15/0/28 | Bin 1600 -> 0 bytes fixture/15/0/29 | Bin 1600 -> 0 bytes fixture/15/0/3 | Bin 1600 -> 0 bytes fixture/15/0/30 | Bin 1600 -> 0 bytes fixture/15/0/31 | Bin 1600 -> 0 bytes fixture/15/0/32 | Bin 1600 -> 0 bytes fixture/15/0/33 | Bin 1600 -> 0 bytes fixture/15/0/34 | Bin 1600 -> 0 bytes fixture/15/0/35 | Bin 1600 -> 0 bytes fixture/15/0/36 | Bin 1600 -> 0 bytes fixture/15/0/37 | Bin 1600 -> 0 bytes fixture/15/0/38 | Bin 1600 -> 0 bytes fixture/15/0/39 | Bin 1600 -> 0 bytes fixture/15/0/4 | Bin 1600 -> 0 bytes fixture/15/0/40 | Bin 1600 -> 0 bytes fixture/15/0/41 | Bin 1600 -> 0 bytes fixture/15/0/42 | Bin 1600 -> 0 bytes fixture/15/0/43 | Bin 1600 -> 0 bytes fixture/15/0/44 | Bin 1600 -> 0 bytes fixture/15/0/45 | Bin 1600 -> 0 bytes fixture/15/0/46 | Bin 1600 -> 0 bytes fixture/15/0/47 | Bin 1600 -> 0 bytes fixture/15/0/48 | Bin 1600 -> 0 bytes fixture/15/0/49 | Bin 1600 -> 0 bytes fixture/15/0/5 | Bin 1600 -> 0 bytes fixture/15/0/50 | Bin 1600 -> 0 bytes fixture/15/0/51 | Bin 1600 -> 0 bytes fixture/15/0/52 | Bin 1600 -> 0 bytes fixture/15/0/53 | Bin 1600 -> 0 bytes fixture/15/0/54 | Bin 1600 -> 0 bytes fixture/15/0/55 | Bin 1600 -> 0 bytes fixture/15/0/6 | Bin 1600 -> 0 bytes fixture/15/0/7 | Bin 1600 -> 0 bytes fixture/15/0/8 | Bin 1600 -> 0 bytes fixture/15/0/9 | Bin 1600 -> 0 bytes fixture/15/1/.zarray | 17 ---- fixture/15/1/.zattrs | 1 - fixture/15/1/0 | 2 - fixture/15/1/1 | 2 - fixture/15/1/10 | 2 - fixture/15/1/11 | 2 - fixture/15/1/12 | 2 - fixture/15/1/13 | 2 - fixture/15/1/14 | 2 - fixture/15/1/15 | 2 - fixture/15/1/16 | 2 - fixture/15/1/17 | 2 - fixture/15/1/18 | 2 - fixture/15/1/19 | 2 - fixture/15/1/2 | 4 - fixture/15/1/20 | 3 - fixture/15/1/21 | Bin 177 -> 0 bytes fixture/15/1/22 | Bin 182 -> 0 bytes fixture/15/1/23 | 2 - fixture/15/1/24 | 2 - fixture/15/1/25 | 2 - fixture/15/1/26 | 3 - fixture/15/1/27 | 2 - fixture/15/1/28 | 2 - fixture/15/1/29 | 2 - fixture/15/1/3 | Bin 180 -> 0 bytes fixture/15/1/30 | 2 - fixture/15/1/31 | 2 - fixture/15/1/32 | 2 - fixture/15/1/33 | 2 - fixture/15/1/34 | 2 - fixture/15/1/35 | 2 - fixture/15/1/36 | 2 - fixture/15/1/37 | 3 - fixture/15/1/38 | 2 - fixture/15/1/39 | 2 - fixture/15/1/4 | Bin 181 -> 0 bytes fixture/15/1/40 | 2 - fixture/15/1/41 | 3 - fixture/15/1/42 | Bin 181 -> 0 bytes fixture/15/1/43 | 2 - fixture/15/1/44 | 2 - fixture/15/1/45 | 3 - fixture/15/1/46 | 3 - fixture/15/1/47 | 2 - fixture/15/1/48 | 2 - fixture/15/1/49 | 2 - fixture/15/1/5 | 2 - fixture/15/1/50 | 2 - fixture/15/1/51 | 2 - fixture/15/1/52 | 2 - fixture/15/1/53 | Bin 180 -> 0 bytes fixture/15/1/54 | Bin 172 -> 0 bytes fixture/15/1/55 | 2 - fixture/15/1/6 | 2 - fixture/15/1/7 | Bin 184 -> 0 bytes fixture/15/1/8 | 2 - fixture/15/1/9 | 3 - fixture/15/2/.zarray | 17 ---- fixture/15/2/.zattrs | 1 - fixture/15/2/0 | Bin 116 -> 0 bytes fixture/15/2/1 | Bin 114 -> 0 bytes fixture/15/2/10 | Bin 115 -> 0 bytes fixture/15/2/11 | Bin 116 -> 0 bytes fixture/15/2/12 | Bin 113 -> 0 bytes fixture/15/2/13 | Bin 115 -> 0 bytes fixture/15/2/14 | Bin 115 -> 0 bytes fixture/15/2/15 | Bin 115 -> 0 bytes fixture/15/2/16 | Bin 114 -> 0 bytes fixture/15/2/17 | Bin 111 -> 0 bytes fixture/15/2/18 | Bin 115 -> 0 bytes fixture/15/2/19 | Bin 112 -> 0 bytes fixture/15/2/2 | Bin 114 -> 0 bytes fixture/15/2/20 | Bin 110 -> 0 bytes fixture/15/2/21 | Bin 113 -> 0 bytes fixture/15/2/22 | Bin 115 -> 0 bytes fixture/15/2/23 | Bin 115 -> 0 bytes fixture/15/2/24 | Bin 110 -> 0 bytes fixture/15/2/25 | Bin 115 -> 0 bytes fixture/15/2/26 | Bin 113 -> 0 bytes fixture/15/2/27 | Bin 112 -> 0 bytes fixture/15/2/28 | Bin 111 -> 0 bytes fixture/15/2/29 | Bin 116 -> 0 bytes fixture/15/2/3 | Bin 116 -> 0 bytes fixture/15/2/30 | Bin 113 -> 0 bytes fixture/15/2/31 | Bin 115 -> 0 bytes fixture/15/2/32 | Bin 114 -> 0 bytes fixture/15/2/33 | Bin 115 -> 0 bytes fixture/15/2/34 | Bin 112 -> 0 bytes fixture/15/2/35 | Bin 115 -> 0 bytes fixture/15/2/36 | Bin 113 -> 0 bytes fixture/15/2/37 | Bin 113 -> 0 bytes fixture/15/2/38 | Bin 114 -> 0 bytes fixture/15/2/39 | Bin 113 -> 0 bytes fixture/15/2/4 | Bin 116 -> 0 bytes fixture/15/2/40 | Bin 115 -> 0 bytes fixture/15/2/41 | Bin 117 -> 0 bytes fixture/15/2/42 | Bin 112 -> 0 bytes fixture/15/2/43 | Bin 115 -> 0 bytes fixture/15/2/44 | Bin 114 -> 0 bytes fixture/15/2/45 | Bin 116 -> 0 bytes fixture/15/2/46 | Bin 114 -> 0 bytes fixture/15/2/47 | Bin 113 -> 0 bytes fixture/15/2/48 | Bin 113 -> 0 bytes fixture/15/2/49 | Bin 113 -> 0 bytes fixture/15/2/5 | Bin 116 -> 0 bytes fixture/15/2/50 | Bin 113 -> 0 bytes fixture/15/2/51 | Bin 113 -> 0 bytes fixture/15/2/52 | Bin 113 -> 0 bytes fixture/15/2/53 | Bin 116 -> 0 bytes fixture/15/2/54 | Bin 113 -> 0 bytes fixture/15/2/55 | Bin 99 -> 0 bytes fixture/15/2/6 | Bin 115 -> 0 bytes fixture/15/2/7 | Bin 116 -> 0 bytes fixture/15/2/8 | Bin 113 -> 0 bytes fixture/15/2/9 | Bin 113 -> 0 bytes fixture/15/3/.zarray | 19 ----- fixture/15/3/.zattrs | 1 - fixture/15/3/0 | Bin 207 -> 0 bytes fixture/15/3/1 | Bin 201 -> 0 bytes fixture/15/3/10 | Bin 211 -> 0 bytes fixture/15/3/11 | Bin 219 -> 0 bytes fixture/15/3/12 | Bin 212 -> 0 bytes fixture/15/3/13 | Bin 214 -> 0 bytes fixture/15/3/14 | Bin 196 -> 0 bytes fixture/15/3/15 | Bin 206 -> 0 bytes fixture/15/3/16 | Bin 207 -> 0 bytes fixture/15/3/17 | Bin 208 -> 0 bytes fixture/15/3/18 | Bin 214 -> 0 bytes fixture/15/3/19 | Bin 224 -> 0 bytes fixture/15/3/2 | Bin 211 -> 0 bytes fixture/15/3/20 | Bin 196 -> 0 bytes fixture/15/3/21 | Bin 213 -> 0 bytes fixture/15/3/22 | Bin 211 -> 0 bytes fixture/15/3/23 | Bin 223 -> 0 bytes fixture/15/3/24 | Bin 199 -> 0 bytes fixture/15/3/25 | Bin 213 -> 0 bytes fixture/15/3/26 | Bin 214 -> 0 bytes fixture/15/3/27 | Bin 227 -> 0 bytes fixture/15/3/28 | Bin 210 -> 0 bytes fixture/15/3/29 | Bin 219 -> 0 bytes fixture/15/3/3 | Bin 221 -> 0 bytes fixture/15/3/30 | Bin 209 -> 0 bytes fixture/15/3/31 | Bin 208 -> 0 bytes fixture/15/3/32 | Bin 192 -> 0 bytes fixture/15/3/33 | Bin 215 -> 0 bytes fixture/15/3/34 | Bin 216 -> 0 bytes fixture/15/3/35 | Bin 214 -> 0 bytes fixture/15/3/36 | Bin 201 -> 0 bytes fixture/15/3/37 | Bin 216 -> 0 bytes fixture/15/3/38 | Bin 209 -> 0 bytes fixture/15/3/39 | Bin 224 -> 0 bytes fixture/15/3/4 | Bin 208 -> 0 bytes fixture/15/3/40 | Bin 213 -> 0 bytes fixture/15/3/41 | Bin 212 -> 0 bytes fixture/15/3/42 | Bin 198 -> 0 bytes fixture/15/3/43 | Bin 210 -> 0 bytes fixture/15/3/44 | Bin 220 -> 0 bytes fixture/15/3/45 | Bin 205 -> 0 bytes fixture/15/3/46 | Bin 211 -> 0 bytes fixture/15/3/47 | Bin 208 -> 0 bytes fixture/15/3/48 | Bin 222 -> 0 bytes fixture/15/3/49 | Bin 223 -> 0 bytes fixture/15/3/5 | Bin 215 -> 0 bytes fixture/15/3/50 | Bin 210 -> 0 bytes fixture/15/3/51 | Bin 210 -> 0 bytes fixture/15/3/52 | Bin 206 -> 0 bytes fixture/15/3/53 | Bin 219 -> 0 bytes fixture/15/3/54 | Bin 197 -> 0 bytes fixture/15/3/55 | Bin 182 -> 0 bytes fixture/15/3/6 | Bin 207 -> 0 bytes fixture/15/3/7 | Bin 218 -> 0 bytes fixture/15/3/8 | Bin 217 -> 0 bytes fixture/15/3/9 | Bin 208 -> 0 bytes fixture/15/4/.zarray | 19 ----- fixture/15/4/.zattrs | 1 - fixture/15/4/0 | Bin 227 -> 0 bytes fixture/15/4/1 | Bin 230 -> 0 bytes fixture/15/4/10 | Bin 227 -> 0 bytes fixture/15/4/11 | Bin 217 -> 0 bytes fixture/15/4/12 | Bin 217 -> 0 bytes fixture/15/4/13 | Bin 222 -> 0 bytes fixture/15/4/14 | Bin 219 -> 0 bytes fixture/15/4/15 | Bin 217 -> 0 bytes fixture/15/4/16 | Bin 235 -> 0 bytes fixture/15/4/17 | Bin 229 -> 0 bytes fixture/15/4/18 | Bin 221 -> 0 bytes fixture/15/4/19 | Bin 212 -> 0 bytes fixture/15/4/2 | Bin 228 -> 0 bytes fixture/15/4/20 | Bin 220 -> 0 bytes fixture/15/4/21 | Bin 226 -> 0 bytes fixture/15/4/22 | Bin 219 -> 0 bytes fixture/15/4/23 | Bin 215 -> 0 bytes fixture/15/4/24 | Bin 231 -> 0 bytes fixture/15/4/25 | Bin 227 -> 0 bytes fixture/15/4/26 | Bin 232 -> 0 bytes fixture/15/4/27 | Bin 224 -> 0 bytes fixture/15/4/28 | Bin 212 -> 0 bytes fixture/15/4/29 | Bin 225 -> 0 bytes fixture/15/4/3 | Bin 220 -> 0 bytes fixture/15/4/30 | Bin 216 -> 0 bytes fixture/15/4/31 | Bin 230 -> 0 bytes fixture/15/4/32 | Bin 218 -> 0 bytes fixture/15/4/33 | Bin 212 -> 0 bytes fixture/15/4/34 | Bin 217 -> 0 bytes fixture/15/4/35 | Bin 221 -> 0 bytes fixture/15/4/36 | Bin 214 -> 0 bytes fixture/15/4/37 | Bin 234 -> 0 bytes fixture/15/4/38 | Bin 233 -> 0 bytes fixture/15/4/39 | Bin 230 -> 0 bytes fixture/15/4/4 | Bin 214 -> 0 bytes fixture/15/4/40 | Bin 232 -> 0 bytes fixture/15/4/41 | Bin 231 -> 0 bytes fixture/15/4/42 | Bin 217 -> 0 bytes fixture/15/4/43 | Bin 217 -> 0 bytes fixture/15/4/44 | Bin 214 -> 0 bytes fixture/15/4/45 | Bin 230 -> 0 bytes fixture/15/4/46 | Bin 227 -> 0 bytes fixture/15/4/47 | Bin 232 -> 0 bytes fixture/15/4/48 | Bin 231 -> 0 bytes fixture/15/4/49 | Bin 220 -> 0 bytes fixture/15/4/5 | Bin 230 -> 0 bytes fixture/15/4/50 | Bin 220 -> 0 bytes fixture/15/4/51 | Bin 216 -> 0 bytes fixture/15/4/52 | Bin 221 -> 0 bytes fixture/15/4/53 | Bin 221 -> 0 bytes fixture/15/4/54 | Bin 209 -> 0 bytes fixture/15/4/55 | Bin 159 -> 0 bytes fixture/15/4/6 | Bin 218 -> 0 bytes fixture/15/4/7 | Bin 220 -> 0 bytes fixture/15/4/8 | Bin 218 -> 0 bytes fixture/15/4/9 | Bin 212 -> 0 bytes fixture/15/5/.zarray | 19 ----- fixture/15/5/.zattrs | 1 - fixture/15/5/0 | Bin 207 -> 0 bytes fixture/15/5/1 | Bin 201 -> 0 bytes fixture/15/5/10 | Bin 211 -> 0 bytes fixture/15/5/11 | Bin 219 -> 0 bytes fixture/15/5/12 | Bin 212 -> 0 bytes fixture/15/5/13 | Bin 214 -> 0 bytes fixture/15/5/14 | Bin 196 -> 0 bytes fixture/15/5/15 | Bin 206 -> 0 bytes fixture/15/5/16 | Bin 207 -> 0 bytes fixture/15/5/17 | Bin 208 -> 0 bytes fixture/15/5/18 | Bin 214 -> 0 bytes fixture/15/5/19 | Bin 224 -> 0 bytes fixture/15/5/2 | Bin 211 -> 0 bytes fixture/15/5/20 | Bin 196 -> 0 bytes fixture/15/5/21 | Bin 213 -> 0 bytes fixture/15/5/22 | Bin 211 -> 0 bytes fixture/15/5/23 | Bin 223 -> 0 bytes fixture/15/5/24 | Bin 199 -> 0 bytes fixture/15/5/25 | Bin 213 -> 0 bytes fixture/15/5/26 | Bin 214 -> 0 bytes fixture/15/5/27 | Bin 227 -> 0 bytes fixture/15/5/28 | Bin 210 -> 0 bytes fixture/15/5/29 | Bin 219 -> 0 bytes fixture/15/5/3 | Bin 221 -> 0 bytes fixture/15/5/30 | Bin 209 -> 0 bytes fixture/15/5/31 | Bin 208 -> 0 bytes fixture/15/5/32 | Bin 192 -> 0 bytes fixture/15/5/33 | Bin 215 -> 0 bytes fixture/15/5/34 | Bin 216 -> 0 bytes fixture/15/5/35 | Bin 214 -> 0 bytes fixture/15/5/36 | Bin 201 -> 0 bytes fixture/15/5/37 | Bin 216 -> 0 bytes fixture/15/5/38 | Bin 209 -> 0 bytes fixture/15/5/39 | Bin 224 -> 0 bytes fixture/15/5/4 | Bin 208 -> 0 bytes fixture/15/5/40 | Bin 213 -> 0 bytes fixture/15/5/41 | Bin 212 -> 0 bytes fixture/15/5/42 | Bin 198 -> 0 bytes fixture/15/5/43 | Bin 210 -> 0 bytes fixture/15/5/44 | Bin 220 -> 0 bytes fixture/15/5/45 | Bin 205 -> 0 bytes fixture/15/5/46 | Bin 211 -> 0 bytes fixture/15/5/47 | Bin 208 -> 0 bytes fixture/15/5/48 | Bin 222 -> 0 bytes fixture/15/5/49 | Bin 223 -> 0 bytes fixture/15/5/5 | Bin 215 -> 0 bytes fixture/15/5/50 | Bin 210 -> 0 bytes fixture/15/5/51 | Bin 210 -> 0 bytes fixture/15/5/52 | Bin 206 -> 0 bytes fixture/15/5/53 | Bin 219 -> 0 bytes fixture/15/5/54 | Bin 197 -> 0 bytes fixture/15/5/55 | Bin 182 -> 0 bytes fixture/15/5/6 | Bin 207 -> 0 bytes fixture/15/5/7 | Bin 218 -> 0 bytes fixture/15/5/8 | Bin 217 -> 0 bytes fixture/15/5/9 | Bin 208 -> 0 bytes fixture/15/6/.zarray | 19 ----- fixture/15/6/.zattrs | 1 - fixture/15/6/0 | Bin 430 -> 0 bytes fixture/15/6/1 | Bin 406 -> 0 bytes fixture/15/6/10 | Bin 404 -> 0 bytes fixture/15/6/11 | Bin 460 -> 0 bytes fixture/15/6/12 | Bin 421 -> 0 bytes fixture/15/6/13 | Bin 406 -> 0 bytes fixture/15/6/14 | Bin 365 -> 0 bytes fixture/15/6/15 | Bin 412 -> 0 bytes fixture/15/6/16 | Bin 454 -> 0 bytes fixture/15/6/17 | Bin 429 -> 0 bytes fixture/15/6/18 | Bin 417 -> 0 bytes fixture/15/6/19 | Bin 438 -> 0 bytes fixture/15/6/2 | Bin 401 -> 0 bytes fixture/15/6/20 | Bin 427 -> 0 bytes fixture/15/6/21 | Bin 403 -> 0 bytes fixture/15/6/22 | Bin 440 -> 0 bytes fixture/15/6/23 | Bin 420 -> 0 bytes fixture/15/6/24 | Bin 371 -> 0 bytes fixture/15/6/25 | Bin 431 -> 0 bytes fixture/15/6/26 | Bin 405 -> 0 bytes fixture/15/6/27 | Bin 472 -> 0 bytes fixture/15/6/28 | Bin 412 -> 0 bytes fixture/15/6/29 | Bin 397 -> 0 bytes fixture/15/6/3 | Bin 407 -> 0 bytes fixture/15/6/30 | Bin 374 -> 0 bytes fixture/15/6/31 | Bin 395 -> 0 bytes fixture/15/6/32 | Bin 430 -> 0 bytes fixture/15/6/33 | Bin 395 -> 0 bytes fixture/15/6/34 | Bin 443 -> 0 bytes fixture/15/6/35 | Bin 417 -> 0 bytes fixture/15/6/36 | Bin 437 -> 0 bytes fixture/15/6/37 | Bin 435 -> 0 bytes fixture/15/6/38 | Bin 391 -> 0 bytes fixture/15/6/39 | Bin 403 -> 0 bytes fixture/15/6/4 | Bin 428 -> 0 bytes fixture/15/6/40 | Bin 431 -> 0 bytes fixture/15/6/41 | Bin 422 -> 0 bytes fixture/15/6/42 | Bin 415 -> 0 bytes fixture/15/6/43 | Bin 437 -> 0 bytes fixture/15/6/44 | Bin 461 -> 0 bytes fixture/15/6/45 | Bin 412 -> 0 bytes fixture/15/6/46 | Bin 425 -> 0 bytes fixture/15/6/47 | Bin 399 -> 0 bytes fixture/15/6/48 | Bin 394 -> 0 bytes fixture/15/6/49 | Bin 436 -> 0 bytes fixture/15/6/5 | Bin 444 -> 0 bytes fixture/15/6/50 | Bin 428 -> 0 bytes fixture/15/6/51 | Bin 356 -> 0 bytes fixture/15/6/52 | Bin 377 -> 0 bytes fixture/15/6/53 | Bin 438 -> 0 bytes fixture/15/6/54 | Bin 368 -> 0 bytes fixture/15/6/55 | Bin 339 -> 0 bytes fixture/15/6/6 | Bin 410 -> 0 bytes fixture/15/6/7 | Bin 444 -> 0 bytes fixture/15/6/8 | Bin 393 -> 0 bytes fixture/15/6/9 | Bin 429 -> 0 bytes fixture/16/.zattrs | 1 - fixture/16/0/.zarray | 14 ---- fixture/16/0/.zattrs | 1 - fixture/16/0/0 | Bin 800 -> 0 bytes fixture/16/0/1 | Bin 800 -> 0 bytes fixture/16/0/10 | Bin 800 -> 0 bytes fixture/16/0/11 | Bin 800 -> 0 bytes fixture/16/0/12 | Bin 800 -> 0 bytes fixture/16/0/13 | Bin 800 -> 0 bytes fixture/16/0/14 | Bin 800 -> 0 bytes fixture/16/0/15 | Bin 800 -> 0 bytes fixture/16/0/16 | Bin 800 -> 0 bytes fixture/16/0/17 | Bin 800 -> 0 bytes fixture/16/0/18 | Bin 800 -> 0 bytes fixture/16/0/19 | Bin 800 -> 0 bytes fixture/16/0/2 | Bin 800 -> 0 bytes fixture/16/0/20 | Bin 800 -> 0 bytes fixture/16/0/21 | Bin 800 -> 0 bytes fixture/16/0/22 | Bin 800 -> 0 bytes fixture/16/0/23 | Bin 800 -> 0 bytes fixture/16/0/24 | Bin 800 -> 0 bytes fixture/16/0/25 | Bin 800 -> 0 bytes fixture/16/0/26 | Bin 800 -> 0 bytes fixture/16/0/27 | Bin 800 -> 0 bytes fixture/16/0/28 | Bin 800 -> 0 bytes fixture/16/0/29 | Bin 800 -> 0 bytes fixture/16/0/3 | Bin 800 -> 0 bytes fixture/16/0/30 | Bin 800 -> 0 bytes fixture/16/0/31 | Bin 800 -> 0 bytes fixture/16/0/32 | Bin 800 -> 0 bytes fixture/16/0/33 | Bin 800 -> 0 bytes fixture/16/0/34 | Bin 800 -> 0 bytes fixture/16/0/35 | Bin 800 -> 0 bytes fixture/16/0/36 | Bin 800 -> 0 bytes fixture/16/0/37 | Bin 800 -> 0 bytes fixture/16/0/38 | Bin 800 -> 0 bytes fixture/16/0/39 | Bin 800 -> 0 bytes fixture/16/0/4 | Bin 800 -> 0 bytes fixture/16/0/40 | Bin 800 -> 0 bytes fixture/16/0/41 | Bin 800 -> 0 bytes fixture/16/0/42 | Bin 800 -> 0 bytes fixture/16/0/43 | Bin 800 -> 0 bytes fixture/16/0/44 | Bin 800 -> 0 bytes fixture/16/0/45 | Bin 800 -> 0 bytes fixture/16/0/46 | Bin 800 -> 0 bytes fixture/16/0/47 | Bin 800 -> 0 bytes fixture/16/0/48 | Bin 800 -> 0 bytes fixture/16/0/49 | Bin 800 -> 0 bytes fixture/16/0/5 | Bin 800 -> 0 bytes fixture/16/0/50 | Bin 800 -> 0 bytes fixture/16/0/51 | Bin 800 -> 0 bytes fixture/16/0/52 | Bin 800 -> 0 bytes fixture/16/0/53 | Bin 800 -> 0 bytes fixture/16/0/54 | Bin 800 -> 0 bytes fixture/16/0/55 | Bin 800 -> 0 bytes fixture/16/0/6 | Bin 800 -> 0 bytes fixture/16/0/7 | Bin 800 -> 0 bytes fixture/16/0/8 | Bin 800 -> 0 bytes fixture/16/0/9 | Bin 800 -> 0 bytes fixture/16/1/.zarray | 17 ---- fixture/16/1/.zattrs | 1 - fixture/16/1/0 | 2 - fixture/16/1/1 | Bin 155 -> 0 bytes fixture/16/1/10 | 1 - fixture/16/1/11 | 4 - fixture/16/1/12 | 1 - fixture/16/1/13 | 1 - fixture/16/1/14 | 1 - fixture/16/1/15 | 2 - fixture/16/1/16 | 2 - fixture/16/1/17 | Bin 161 -> 0 bytes fixture/16/1/18 | 1 - fixture/16/1/19 | Bin 154 -> 0 bytes fixture/16/1/2 | Bin 160 -> 0 bytes fixture/16/1/20 | Bin 160 -> 0 bytes fixture/16/1/21 | Bin 165 -> 0 bytes fixture/16/1/22 | 1 - fixture/16/1/23 | 1 - fixture/16/1/24 | 2 - fixture/16/1/25 | 3 - fixture/16/1/26 | Bin 156 -> 0 bytes fixture/16/1/27 | Bin 166 -> 0 bytes fixture/16/1/28 | Bin 161 -> 0 bytes fixture/16/1/29 | 1 - fixture/16/1/3 | 1 - fixture/16/1/30 | 1 - fixture/16/1/31 | 2 - fixture/16/1/32 | Bin 161 -> 0 bytes fixture/16/1/33 | 1 - fixture/16/1/34 | 2 - fixture/16/1/35 | Bin 162 -> 0 bytes fixture/16/1/36 | 1 - fixture/16/1/37 | 1 - fixture/16/1/38 | Bin 162 -> 0 bytes fixture/16/1/39 | 1 - fixture/16/1/4 | 1 - fixture/16/1/40 | 1 - fixture/16/1/41 | Bin 156 -> 0 bytes fixture/16/1/42 | 1 - fixture/16/1/43 | Bin 165 -> 0 bytes fixture/16/1/44 | 2 - fixture/16/1/45 | 1 - fixture/16/1/46 | Bin 158 -> 0 bytes fixture/16/1/47 | 2 - fixture/16/1/48 | 1 - fixture/16/1/49 | Bin 156 -> 0 bytes fixture/16/1/5 | Bin 158 -> 0 bytes fixture/16/1/50 | Bin 161 -> 0 bytes fixture/16/1/51 | 1 - fixture/16/1/52 | 2 - fixture/16/1/53 | 1 - fixture/16/1/54 | 2 - fixture/16/1/55 | 2 - fixture/16/1/6 | 1 - fixture/16/1/7 | 2 - fixture/16/1/8 | Bin 163 -> 0 bytes fixture/16/1/9 | 3 - fixture/16/2/.zarray | 17 ---- fixture/16/2/.zattrs | 1 - fixture/16/2/0 | Bin 154 -> 0 bytes fixture/16/2/1 | Bin 151 -> 0 bytes fixture/16/2/10 | Bin 154 -> 0 bytes fixture/16/2/11 | Bin 151 -> 0 bytes fixture/16/2/12 | Bin 153 -> 0 bytes fixture/16/2/13 | Bin 148 -> 0 bytes fixture/16/2/14 | Bin 151 -> 0 bytes fixture/16/2/15 | Bin 149 -> 0 bytes fixture/16/2/16 | Bin 155 -> 0 bytes fixture/16/2/17 | Bin 151 -> 0 bytes fixture/16/2/18 | Bin 148 -> 0 bytes fixture/16/2/19 | Bin 151 -> 0 bytes fixture/16/2/2 | Bin 149 -> 0 bytes fixture/16/2/20 | Bin 150 -> 0 bytes fixture/16/2/21 | Bin 151 -> 0 bytes fixture/16/2/22 | Bin 150 -> 0 bytes fixture/16/2/23 | Bin 150 -> 0 bytes fixture/16/2/24 | Bin 149 -> 0 bytes fixture/16/2/25 | Bin 147 -> 0 bytes fixture/16/2/26 | Bin 151 -> 0 bytes fixture/16/2/27 | Bin 150 -> 0 bytes fixture/16/2/28 | Bin 147 -> 0 bytes fixture/16/2/29 | Bin 153 -> 0 bytes fixture/16/2/3 | Bin 149 -> 0 bytes fixture/16/2/30 | Bin 152 -> 0 bytes fixture/16/2/31 | Bin 150 -> 0 bytes fixture/16/2/32 | Bin 150 -> 0 bytes fixture/16/2/33 | Bin 148 -> 0 bytes fixture/16/2/34 | Bin 151 -> 0 bytes fixture/16/2/35 | Bin 154 -> 0 bytes fixture/16/2/36 | Bin 150 -> 0 bytes fixture/16/2/37 | Bin 152 -> 0 bytes fixture/16/2/38 | Bin 150 -> 0 bytes fixture/16/2/39 | Bin 152 -> 0 bytes fixture/16/2/4 | Bin 151 -> 0 bytes fixture/16/2/40 | Bin 150 -> 0 bytes fixture/16/2/41 | Bin 152 -> 0 bytes fixture/16/2/42 | Bin 152 -> 0 bytes fixture/16/2/43 | Bin 152 -> 0 bytes fixture/16/2/44 | Bin 148 -> 0 bytes fixture/16/2/45 | Bin 154 -> 0 bytes fixture/16/2/46 | Bin 152 -> 0 bytes fixture/16/2/47 | Bin 148 -> 0 bytes fixture/16/2/48 | Bin 149 -> 0 bytes fixture/16/2/49 | Bin 155 -> 0 bytes fixture/16/2/5 | Bin 147 -> 0 bytes fixture/16/2/50 | Bin 152 -> 0 bytes fixture/16/2/51 | Bin 148 -> 0 bytes fixture/16/2/52 | Bin 154 -> 0 bytes fixture/16/2/53 | Bin 148 -> 0 bytes fixture/16/2/54 | Bin 150 -> 0 bytes fixture/16/2/55 | Bin 131 -> 0 bytes fixture/16/2/6 | Bin 152 -> 0 bytes fixture/16/2/7 | Bin 152 -> 0 bytes fixture/16/2/8 | Bin 150 -> 0 bytes fixture/16/2/9 | Bin 153 -> 0 bytes fixture/16/3/.zarray | 19 ----- fixture/16/3/.zattrs | 1 - fixture/16/3/0 | Bin 200 -> 0 bytes fixture/16/3/1 | Bin 193 -> 0 bytes fixture/16/3/10 | Bin 186 -> 0 bytes fixture/16/3/11 | Bin 195 -> 0 bytes fixture/16/3/12 | Bin 193 -> 0 bytes fixture/16/3/13 | Bin 191 -> 0 bytes fixture/16/3/14 | Bin 193 -> 0 bytes fixture/16/3/15 | Bin 193 -> 0 bytes fixture/16/3/16 | Bin 182 -> 0 bytes fixture/16/3/17 | Bin 186 -> 0 bytes fixture/16/3/18 | Bin 193 -> 0 bytes fixture/16/3/19 | Bin 186 -> 0 bytes fixture/16/3/2 | Bin 190 -> 0 bytes fixture/16/3/20 | Bin 194 -> 0 bytes fixture/16/3/21 | Bin 188 -> 0 bytes fixture/16/3/22 | Bin 178 -> 0 bytes fixture/16/3/23 | Bin 186 -> 0 bytes fixture/16/3/24 | Bin 180 -> 0 bytes fixture/16/3/25 | Bin 183 -> 0 bytes fixture/16/3/26 | Bin 177 -> 0 bytes fixture/16/3/27 | Bin 195 -> 0 bytes fixture/16/3/28 | Bin 185 -> 0 bytes fixture/16/3/29 | Bin 192 -> 0 bytes fixture/16/3/3 | Bin 196 -> 0 bytes fixture/16/3/30 | Bin 188 -> 0 bytes fixture/16/3/31 | Bin 176 -> 0 bytes fixture/16/3/32 | Bin 197 -> 0 bytes fixture/16/3/33 | Bin 179 -> 0 bytes fixture/16/3/34 | Bin 174 -> 0 bytes fixture/16/3/35 | Bin 189 -> 0 bytes fixture/16/3/36 | Bin 192 -> 0 bytes fixture/16/3/37 | Bin 184 -> 0 bytes fixture/16/3/38 | Bin 183 -> 0 bytes fixture/16/3/39 | Bin 199 -> 0 bytes fixture/16/3/4 | Bin 185 -> 0 bytes fixture/16/3/40 | Bin 197 -> 0 bytes fixture/16/3/41 | Bin 187 -> 0 bytes fixture/16/3/42 | Bin 186 -> 0 bytes fixture/16/3/43 | Bin 194 -> 0 bytes fixture/16/3/44 | Bin 185 -> 0 bytes fixture/16/3/45 | Bin 187 -> 0 bytes fixture/16/3/46 | Bin 179 -> 0 bytes fixture/16/3/47 | Bin 197 -> 0 bytes fixture/16/3/48 | Bin 180 -> 0 bytes fixture/16/3/49 | Bin 189 -> 0 bytes fixture/16/3/5 | Bin 179 -> 0 bytes fixture/16/3/50 | Bin 186 -> 0 bytes fixture/16/3/51 | Bin 195 -> 0 bytes fixture/16/3/52 | Bin 185 -> 0 bytes fixture/16/3/53 | Bin 183 -> 0 bytes fixture/16/3/54 | Bin 181 -> 0 bytes fixture/16/3/55 | Bin 176 -> 0 bytes fixture/16/3/6 | Bin 184 -> 0 bytes fixture/16/3/7 | Bin 182 -> 0 bytes fixture/16/3/8 | Bin 184 -> 0 bytes fixture/16/3/9 | Bin 193 -> 0 bytes fixture/16/4/.zarray | 19 ----- fixture/16/4/.zattrs | 1 - fixture/16/4/0 | Bin 278 -> 0 bytes fixture/16/4/1 | Bin 275 -> 0 bytes fixture/16/4/10 | Bin 281 -> 0 bytes fixture/16/4/11 | Bin 279 -> 0 bytes fixture/16/4/12 | Bin 278 -> 0 bytes fixture/16/4/13 | Bin 277 -> 0 bytes fixture/16/4/14 | Bin 278 -> 0 bytes fixture/16/4/15 | Bin 280 -> 0 bytes fixture/16/4/16 | Bin 279 -> 0 bytes fixture/16/4/17 | Bin 273 -> 0 bytes fixture/16/4/18 | Bin 278 -> 0 bytes fixture/16/4/19 | Bin 285 -> 0 bytes fixture/16/4/2 | Bin 285 -> 0 bytes fixture/16/4/20 | Bin 285 -> 0 bytes fixture/16/4/21 | Bin 277 -> 0 bytes fixture/16/4/22 | Bin 279 -> 0 bytes fixture/16/4/23 | Bin 281 -> 0 bytes fixture/16/4/24 | Bin 270 -> 0 bytes fixture/16/4/25 | Bin 272 -> 0 bytes fixture/16/4/26 | Bin 283 -> 0 bytes fixture/16/4/27 | Bin 276 -> 0 bytes fixture/16/4/28 | Bin 287 -> 0 bytes fixture/16/4/29 | Bin 278 -> 0 bytes fixture/16/4/3 | Bin 277 -> 0 bytes fixture/16/4/30 | Bin 273 -> 0 bytes fixture/16/4/31 | Bin 276 -> 0 bytes fixture/16/4/32 | Bin 287 -> 0 bytes fixture/16/4/33 | Bin 276 -> 0 bytes fixture/16/4/34 | Bin 280 -> 0 bytes fixture/16/4/35 | Bin 277 -> 0 bytes fixture/16/4/36 | Bin 279 -> 0 bytes fixture/16/4/37 | Bin 276 -> 0 bytes fixture/16/4/38 | Bin 281 -> 0 bytes fixture/16/4/39 | Bin 281 -> 0 bytes fixture/16/4/4 | Bin 279 -> 0 bytes fixture/16/4/40 | Bin 276 -> 0 bytes fixture/16/4/41 | Bin 282 -> 0 bytes fixture/16/4/42 | Bin 281 -> 0 bytes fixture/16/4/43 | Bin 276 -> 0 bytes fixture/16/4/44 | Bin 279 -> 0 bytes fixture/16/4/45 | Bin 282 -> 0 bytes fixture/16/4/46 | Bin 278 -> 0 bytes fixture/16/4/47 | Bin 282 -> 0 bytes fixture/16/4/48 | Bin 278 -> 0 bytes fixture/16/4/49 | Bin 276 -> 0 bytes fixture/16/4/5 | Bin 281 -> 0 bytes fixture/16/4/50 | Bin 282 -> 0 bytes fixture/16/4/51 | Bin 271 -> 0 bytes fixture/16/4/52 | Bin 279 -> 0 bytes fixture/16/4/53 | Bin 284 -> 0 bytes fixture/16/4/54 | Bin 277 -> 0 bytes fixture/16/4/55 | Bin 190 -> 0 bytes fixture/16/4/6 | Bin 284 -> 0 bytes fixture/16/4/7 | Bin 281 -> 0 bytes fixture/16/4/8 | Bin 278 -> 0 bytes fixture/16/4/9 | Bin 275 -> 0 bytes fixture/16/5/.zarray | 19 ----- fixture/16/5/.zattrs | 1 - fixture/16/5/0 | Bin 200 -> 0 bytes fixture/16/5/1 | Bin 193 -> 0 bytes fixture/16/5/10 | Bin 186 -> 0 bytes fixture/16/5/11 | Bin 195 -> 0 bytes fixture/16/5/12 | Bin 193 -> 0 bytes fixture/16/5/13 | Bin 191 -> 0 bytes fixture/16/5/14 | Bin 193 -> 0 bytes fixture/16/5/15 | Bin 193 -> 0 bytes fixture/16/5/16 | Bin 182 -> 0 bytes fixture/16/5/17 | Bin 186 -> 0 bytes fixture/16/5/18 | Bin 193 -> 0 bytes fixture/16/5/19 | Bin 186 -> 0 bytes fixture/16/5/2 | Bin 190 -> 0 bytes fixture/16/5/20 | Bin 194 -> 0 bytes fixture/16/5/21 | Bin 188 -> 0 bytes fixture/16/5/22 | Bin 178 -> 0 bytes fixture/16/5/23 | Bin 186 -> 0 bytes fixture/16/5/24 | Bin 180 -> 0 bytes fixture/16/5/25 | Bin 183 -> 0 bytes fixture/16/5/26 | Bin 177 -> 0 bytes fixture/16/5/27 | Bin 195 -> 0 bytes fixture/16/5/28 | Bin 185 -> 0 bytes fixture/16/5/29 | Bin 192 -> 0 bytes fixture/16/5/3 | Bin 196 -> 0 bytes fixture/16/5/30 | Bin 188 -> 0 bytes fixture/16/5/31 | Bin 176 -> 0 bytes fixture/16/5/32 | Bin 197 -> 0 bytes fixture/16/5/33 | Bin 179 -> 0 bytes fixture/16/5/34 | Bin 174 -> 0 bytes fixture/16/5/35 | Bin 189 -> 0 bytes fixture/16/5/36 | Bin 192 -> 0 bytes fixture/16/5/37 | Bin 184 -> 0 bytes fixture/16/5/38 | Bin 183 -> 0 bytes fixture/16/5/39 | Bin 199 -> 0 bytes fixture/16/5/4 | Bin 185 -> 0 bytes fixture/16/5/40 | Bin 197 -> 0 bytes fixture/16/5/41 | Bin 187 -> 0 bytes fixture/16/5/42 | Bin 186 -> 0 bytes fixture/16/5/43 | Bin 194 -> 0 bytes fixture/16/5/44 | Bin 185 -> 0 bytes fixture/16/5/45 | Bin 187 -> 0 bytes fixture/16/5/46 | Bin 179 -> 0 bytes fixture/16/5/47 | Bin 197 -> 0 bytes fixture/16/5/48 | Bin 180 -> 0 bytes fixture/16/5/49 | Bin 189 -> 0 bytes fixture/16/5/5 | Bin 179 -> 0 bytes fixture/16/5/50 | Bin 186 -> 0 bytes fixture/16/5/51 | Bin 195 -> 0 bytes fixture/16/5/52 | Bin 185 -> 0 bytes fixture/16/5/53 | Bin 183 -> 0 bytes fixture/16/5/54 | Bin 181 -> 0 bytes fixture/16/5/55 | Bin 176 -> 0 bytes fixture/16/5/6 | Bin 184 -> 0 bytes fixture/16/5/7 | Bin 182 -> 0 bytes fixture/16/5/8 | Bin 184 -> 0 bytes fixture/16/5/9 | Bin 193 -> 0 bytes fixture/16/6/.zarray | 19 ----- fixture/16/6/.zattrs | 1 - fixture/16/6/0 | Bin 321 -> 0 bytes fixture/16/6/1 | Bin 311 -> 0 bytes fixture/16/6/10 | Bin 306 -> 0 bytes fixture/16/6/11 | Bin 311 -> 0 bytes fixture/16/6/12 | Bin 320 -> 0 bytes fixture/16/6/13 | Bin 339 -> 0 bytes fixture/16/6/14 | Bin 343 -> 0 bytes fixture/16/6/15 | Bin 312 -> 0 bytes fixture/16/6/16 | Bin 319 -> 0 bytes fixture/16/6/17 | Bin 353 -> 0 bytes fixture/16/6/18 | Bin 304 -> 0 bytes fixture/16/6/19 | Bin 341 -> 0 bytes fixture/16/6/2 | Bin 318 -> 0 bytes fixture/16/6/20 | Bin 331 -> 0 bytes fixture/16/6/21 | Bin 303 -> 0 bytes fixture/16/6/22 | Bin 350 -> 0 bytes fixture/16/6/23 | Bin 364 -> 0 bytes fixture/16/6/24 | Bin 345 -> 0 bytes fixture/16/6/25 | Bin 322 -> 0 bytes fixture/16/6/26 | Bin 303 -> 0 bytes fixture/16/6/27 | Bin 334 -> 0 bytes fixture/16/6/28 | Bin 342 -> 0 bytes fixture/16/6/29 | Bin 298 -> 0 bytes fixture/16/6/3 | Bin 360 -> 0 bytes fixture/16/6/30 | Bin 315 -> 0 bytes fixture/16/6/31 | Bin 358 -> 0 bytes fixture/16/6/32 | Bin 312 -> 0 bytes fixture/16/6/33 | Bin 377 -> 0 bytes fixture/16/6/34 | Bin 313 -> 0 bytes fixture/16/6/35 | Bin 327 -> 0 bytes fixture/16/6/36 | Bin 336 -> 0 bytes fixture/16/6/37 | Bin 327 -> 0 bytes fixture/16/6/38 | Bin 324 -> 0 bytes fixture/16/6/39 | Bin 350 -> 0 bytes fixture/16/6/4 | Bin 324 -> 0 bytes fixture/16/6/40 | Bin 317 -> 0 bytes fixture/16/6/41 | Bin 344 -> 0 bytes fixture/16/6/42 | Bin 312 -> 0 bytes fixture/16/6/43 | Bin 329 -> 0 bytes fixture/16/6/44 | Bin 350 -> 0 bytes fixture/16/6/45 | Bin 355 -> 0 bytes fixture/16/6/46 | Bin 331 -> 0 bytes fixture/16/6/47 | Bin 326 -> 0 bytes fixture/16/6/48 | Bin 336 -> 0 bytes fixture/16/6/49 | Bin 352 -> 0 bytes fixture/16/6/5 | Bin 342 -> 0 bytes fixture/16/6/50 | Bin 357 -> 0 bytes fixture/16/6/51 | Bin 306 -> 0 bytes fixture/16/6/52 | Bin 343 -> 0 bytes fixture/16/6/53 | Bin 376 -> 0 bytes fixture/16/6/54 | Bin 297 -> 0 bytes fixture/16/6/55 | Bin 240 -> 0 bytes fixture/16/6/6 | Bin 340 -> 0 bytes fixture/16/6/7 | Bin 278 -> 0 bytes fixture/16/6/8 | Bin 286 -> 0 bytes fixture/16/6/9 | Bin 309 -> 0 bytes fixture/17/.zattrs | 1 - fixture/17/0/.zarray | 14 ---- fixture/17/0/.zattrs | 1 - fixture/17/0/0 | Bin 100 -> 0 bytes fixture/17/0/1 | Bin 100 -> 0 bytes fixture/17/0/10 | Bin 100 -> 0 bytes fixture/17/0/11 | Bin 100 -> 0 bytes fixture/17/0/12 | Bin 100 -> 0 bytes fixture/17/0/13 | Bin 100 -> 0 bytes fixture/17/0/14 | Bin 100 -> 0 bytes fixture/17/0/15 | Bin 100 -> 0 bytes fixture/17/0/16 | Bin 100 -> 0 bytes fixture/17/0/17 | Bin 100 -> 0 bytes fixture/17/0/18 | Bin 100 -> 0 bytes fixture/17/0/19 | Bin 100 -> 0 bytes fixture/17/0/2 | Bin 100 -> 0 bytes fixture/17/0/20 | Bin 100 -> 0 bytes fixture/17/0/21 | Bin 100 -> 0 bytes fixture/17/0/22 | Bin 100 -> 0 bytes fixture/17/0/23 | Bin 100 -> 0 bytes fixture/17/0/24 | Bin 100 -> 0 bytes fixture/17/0/25 | Bin 100 -> 0 bytes fixture/17/0/26 | Bin 100 -> 0 bytes fixture/17/0/27 | Bin 100 -> 0 bytes fixture/17/0/28 | Bin 100 -> 0 bytes fixture/17/0/29 | Bin 100 -> 0 bytes fixture/17/0/3 | Bin 100 -> 0 bytes fixture/17/0/30 | Bin 100 -> 0 bytes fixture/17/0/31 | Bin 100 -> 0 bytes fixture/17/0/32 | Bin 100 -> 0 bytes fixture/17/0/33 | Bin 100 -> 0 bytes fixture/17/0/34 | Bin 100 -> 0 bytes fixture/17/0/35 | Bin 100 -> 0 bytes fixture/17/0/36 | Bin 100 -> 0 bytes fixture/17/0/37 | Bin 100 -> 0 bytes fixture/17/0/38 | Bin 100 -> 0 bytes fixture/17/0/39 | Bin 100 -> 0 bytes fixture/17/0/4 | Bin 100 -> 0 bytes fixture/17/0/40 | Bin 100 -> 0 bytes fixture/17/0/41 | Bin 100 -> 0 bytes fixture/17/0/42 | Bin 100 -> 0 bytes fixture/17/0/43 | Bin 100 -> 0 bytes fixture/17/0/44 | Bin 100 -> 0 bytes fixture/17/0/45 | Bin 100 -> 0 bytes fixture/17/0/46 | Bin 100 -> 0 bytes fixture/17/0/47 | Bin 100 -> 0 bytes fixture/17/0/48 | Bin 100 -> 0 bytes fixture/17/0/49 | Bin 100 -> 0 bytes fixture/17/0/5 | Bin 100 -> 0 bytes fixture/17/0/50 | Bin 100 -> 0 bytes fixture/17/0/51 | Bin 100 -> 0 bytes fixture/17/0/52 | Bin 100 -> 0 bytes fixture/17/0/53 | Bin 100 -> 0 bytes fixture/17/0/54 | Bin 100 -> 0 bytes fixture/17/0/55 | Bin 100 -> 0 bytes fixture/17/0/6 | Bin 100 -> 0 bytes fixture/17/0/7 | Bin 100 -> 0 bytes fixture/17/0/8 | Bin 100 -> 0 bytes fixture/17/0/9 | Bin 100 -> 0 bytes fixture/17/1/.zarray | 17 ---- fixture/17/1/.zattrs | 1 - fixture/17/1/0 | Bin 47 -> 0 bytes fixture/17/1/1 | Bin 47 -> 0 bytes fixture/17/1/10 | Bin 46 -> 0 bytes fixture/17/1/11 | Bin 45 -> 0 bytes fixture/17/1/12 | Bin 46 -> 0 bytes fixture/17/1/13 | Bin 47 -> 0 bytes fixture/17/1/14 | Bin 46 -> 0 bytes fixture/17/1/15 | Bin 45 -> 0 bytes fixture/17/1/16 | Bin 47 -> 0 bytes fixture/17/1/17 | Bin 46 -> 0 bytes fixture/17/1/18 | Bin 45 -> 0 bytes fixture/17/1/19 | Bin 47 -> 0 bytes fixture/17/1/2 | Bin 45 -> 0 bytes fixture/17/1/20 | Bin 46 -> 0 bytes fixture/17/1/21 | Bin 45 -> 0 bytes fixture/17/1/22 | Bin 47 -> 0 bytes fixture/17/1/23 | Bin 46 -> 0 bytes fixture/17/1/24 | Bin 44 -> 0 bytes fixture/17/1/25 | Bin 46 -> 0 bytes fixture/17/1/26 | Bin 45 -> 0 bytes fixture/17/1/27 | Bin 48 -> 0 bytes fixture/17/1/28 | Bin 45 -> 0 bytes fixture/17/1/29 | Bin 46 -> 0 bytes fixture/17/1/3 | Bin 44 -> 0 bytes fixture/17/1/30 | Bin 46 -> 0 bytes fixture/17/1/31 | Bin 47 -> 0 bytes fixture/17/1/32 | Bin 46 -> 0 bytes fixture/17/1/33 | Bin 46 -> 0 bytes fixture/17/1/34 | Bin 43 -> 0 bytes fixture/17/1/35 | Bin 46 -> 0 bytes fixture/17/1/36 | Bin 44 -> 0 bytes fixture/17/1/37 | Bin 44 -> 0 bytes fixture/17/1/38 | Bin 45 -> 0 bytes fixture/17/1/39 | Bin 45 -> 0 bytes fixture/17/1/4 | Bin 44 -> 0 bytes fixture/17/1/40 | Bin 45 -> 0 bytes fixture/17/1/41 | Bin 47 -> 0 bytes fixture/17/1/42 | Bin 44 -> 0 bytes fixture/17/1/43 | Bin 47 -> 0 bytes fixture/17/1/44 | Bin 47 -> 0 bytes fixture/17/1/45 | Bin 47 -> 0 bytes fixture/17/1/46 | Bin 48 -> 0 bytes fixture/17/1/47 | Bin 46 -> 0 bytes fixture/17/1/48 | Bin 45 -> 0 bytes fixture/17/1/49 | Bin 44 -> 0 bytes fixture/17/1/5 | Bin 44 -> 0 bytes fixture/17/1/50 | Bin 47 -> 0 bytes fixture/17/1/51 | Bin 45 -> 0 bytes fixture/17/1/52 | Bin 46 -> 0 bytes fixture/17/1/53 | Bin 46 -> 0 bytes fixture/17/1/54 | Bin 45 -> 0 bytes fixture/17/1/55 | Bin 38 -> 0 bytes fixture/17/1/6 | Bin 45 -> 0 bytes fixture/17/1/7 | Bin 47 -> 0 bytes fixture/17/1/8 | Bin 47 -> 0 bytes fixture/17/1/9 | Bin 47 -> 0 bytes fixture/17/2/.zarray | 17 ---- fixture/17/2/.zattrs | 1 - fixture/17/2/0 | Bin 59 -> 0 bytes fixture/17/2/1 | Bin 57 -> 0 bytes fixture/17/2/10 | Bin 58 -> 0 bytes fixture/17/2/11 | Bin 58 -> 0 bytes fixture/17/2/12 | Bin 58 -> 0 bytes fixture/17/2/13 | Bin 56 -> 0 bytes fixture/17/2/14 | Bin 57 -> 0 bytes fixture/17/2/15 | Bin 57 -> 0 bytes fixture/17/2/16 | Bin 56 -> 0 bytes fixture/17/2/17 | Bin 59 -> 0 bytes fixture/17/2/18 | Bin 59 -> 0 bytes fixture/17/2/19 | Bin 58 -> 0 bytes fixture/17/2/2 | Bin 56 -> 0 bytes fixture/17/2/20 | Bin 57 -> 0 bytes fixture/17/2/21 | Bin 57 -> 0 bytes fixture/17/2/22 | Bin 57 -> 0 bytes fixture/17/2/23 | Bin 57 -> 0 bytes fixture/17/2/24 | Bin 56 -> 0 bytes fixture/17/2/25 | Bin 56 -> 0 bytes fixture/17/2/26 | Bin 54 -> 0 bytes fixture/17/2/27 | Bin 56 -> 0 bytes fixture/17/2/28 | Bin 58 -> 0 bytes fixture/17/2/29 | Bin 54 -> 0 bytes fixture/17/2/3 | Bin 56 -> 0 bytes fixture/17/2/30 | Bin 55 -> 0 bytes fixture/17/2/31 | Bin 54 -> 0 bytes fixture/17/2/32 | Bin 56 -> 0 bytes fixture/17/2/33 | Bin 55 -> 0 bytes fixture/17/2/34 | Bin 58 -> 0 bytes fixture/17/2/35 | Bin 57 -> 0 bytes fixture/17/2/36 | Bin 57 -> 0 bytes fixture/17/2/37 | Bin 57 -> 0 bytes fixture/17/2/38 | Bin 56 -> 0 bytes fixture/17/2/39 | Bin 55 -> 0 bytes fixture/17/2/4 | Bin 58 -> 0 bytes fixture/17/2/40 | Bin 55 -> 0 bytes fixture/17/2/41 | Bin 59 -> 0 bytes fixture/17/2/42 | Bin 56 -> 0 bytes fixture/17/2/43 | Bin 56 -> 0 bytes fixture/17/2/44 | Bin 56 -> 0 bytes fixture/17/2/45 | Bin 56 -> 0 bytes fixture/17/2/46 | Bin 59 -> 0 bytes fixture/17/2/47 | Bin 57 -> 0 bytes fixture/17/2/48 | Bin 55 -> 0 bytes fixture/17/2/49 | Bin 56 -> 0 bytes fixture/17/2/5 | Bin 54 -> 0 bytes fixture/17/2/50 | Bin 55 -> 0 bytes fixture/17/2/51 | Bin 57 -> 0 bytes fixture/17/2/52 | Bin 55 -> 0 bytes fixture/17/2/53 | Bin 53 -> 0 bytes fixture/17/2/54 | Bin 55 -> 0 bytes fixture/17/2/55 | Bin 52 -> 0 bytes fixture/17/2/6 | Bin 57 -> 0 bytes fixture/17/2/7 | Bin 58 -> 0 bytes fixture/17/2/8 | Bin 55 -> 0 bytes fixture/17/2/9 | Bin 55 -> 0 bytes fixture/17/3/.zarray | 19 ----- fixture/17/3/.zattrs | 1 - fixture/17/3/0 | Bin 116 -> 0 bytes fixture/17/3/1 | Bin 116 -> 0 bytes fixture/17/3/10 | Bin 116 -> 0 bytes fixture/17/3/11 | Bin 116 -> 0 bytes fixture/17/3/12 | Bin 116 -> 0 bytes fixture/17/3/13 | Bin 116 -> 0 bytes fixture/17/3/14 | Bin 116 -> 0 bytes fixture/17/3/15 | Bin 116 -> 0 bytes fixture/17/3/16 | Bin 116 -> 0 bytes fixture/17/3/17 | Bin 116 -> 0 bytes fixture/17/3/18 | Bin 116 -> 0 bytes fixture/17/3/19 | Bin 116 -> 0 bytes fixture/17/3/2 | Bin 116 -> 0 bytes fixture/17/3/20 | Bin 116 -> 0 bytes fixture/17/3/21 | Bin 116 -> 0 bytes fixture/17/3/22 | Bin 116 -> 0 bytes fixture/17/3/23 | Bin 116 -> 0 bytes fixture/17/3/24 | Bin 116 -> 0 bytes fixture/17/3/25 | Bin 116 -> 0 bytes fixture/17/3/26 | Bin 116 -> 0 bytes fixture/17/3/27 | Bin 116 -> 0 bytes fixture/17/3/28 | Bin 116 -> 0 bytes fixture/17/3/29 | Bin 116 -> 0 bytes fixture/17/3/3 | Bin 116 -> 0 bytes fixture/17/3/30 | Bin 116 -> 0 bytes fixture/17/3/31 | Bin 116 -> 0 bytes fixture/17/3/32 | Bin 116 -> 0 bytes fixture/17/3/33 | Bin 116 -> 0 bytes fixture/17/3/34 | Bin 116 -> 0 bytes fixture/17/3/35 | Bin 116 -> 0 bytes fixture/17/3/36 | Bin 116 -> 0 bytes fixture/17/3/37 | Bin 116 -> 0 bytes fixture/17/3/38 | Bin 116 -> 0 bytes fixture/17/3/39 | Bin 116 -> 0 bytes fixture/17/3/4 | Bin 116 -> 0 bytes fixture/17/3/40 | Bin 116 -> 0 bytes fixture/17/3/41 | Bin 116 -> 0 bytes fixture/17/3/42 | Bin 116 -> 0 bytes fixture/17/3/43 | Bin 116 -> 0 bytes fixture/17/3/44 | Bin 116 -> 0 bytes fixture/17/3/45 | Bin 116 -> 0 bytes fixture/17/3/46 | Bin 116 -> 0 bytes fixture/17/3/47 | Bin 116 -> 0 bytes fixture/17/3/48 | Bin 116 -> 0 bytes fixture/17/3/49 | Bin 116 -> 0 bytes fixture/17/3/5 | Bin 116 -> 0 bytes fixture/17/3/50 | Bin 116 -> 0 bytes fixture/17/3/51 | Bin 116 -> 0 bytes fixture/17/3/52 | Bin 116 -> 0 bytes fixture/17/3/53 | Bin 116 -> 0 bytes fixture/17/3/54 | Bin 116 -> 0 bytes fixture/17/3/55 | Bin 116 -> 0 bytes fixture/17/3/6 | Bin 116 -> 0 bytes fixture/17/3/7 | Bin 116 -> 0 bytes fixture/17/3/8 | Bin 116 -> 0 bytes fixture/17/3/9 | Bin 116 -> 0 bytes fixture/17/4/.zarray | 19 ----- fixture/17/4/.zattrs | 1 - fixture/17/4/0 | Bin 116 -> 0 bytes fixture/17/4/1 | Bin 116 -> 0 bytes fixture/17/4/10 | Bin 116 -> 0 bytes fixture/17/4/11 | Bin 116 -> 0 bytes fixture/17/4/12 | Bin 116 -> 0 bytes fixture/17/4/13 | Bin 116 -> 0 bytes fixture/17/4/14 | Bin 116 -> 0 bytes fixture/17/4/15 | Bin 116 -> 0 bytes fixture/17/4/16 | Bin 116 -> 0 bytes fixture/17/4/17 | Bin 116 -> 0 bytes fixture/17/4/18 | Bin 116 -> 0 bytes fixture/17/4/19 | Bin 116 -> 0 bytes fixture/17/4/2 | Bin 116 -> 0 bytes fixture/17/4/20 | Bin 116 -> 0 bytes fixture/17/4/21 | Bin 116 -> 0 bytes fixture/17/4/22 | Bin 116 -> 0 bytes fixture/17/4/23 | Bin 116 -> 0 bytes fixture/17/4/24 | Bin 116 -> 0 bytes fixture/17/4/25 | Bin 116 -> 0 bytes fixture/17/4/26 | Bin 116 -> 0 bytes fixture/17/4/27 | Bin 116 -> 0 bytes fixture/17/4/28 | Bin 116 -> 0 bytes fixture/17/4/29 | Bin 116 -> 0 bytes fixture/17/4/3 | Bin 116 -> 0 bytes fixture/17/4/30 | Bin 116 -> 0 bytes fixture/17/4/31 | Bin 116 -> 0 bytes fixture/17/4/32 | Bin 116 -> 0 bytes fixture/17/4/33 | Bin 116 -> 0 bytes fixture/17/4/34 | Bin 116 -> 0 bytes fixture/17/4/35 | Bin 116 -> 0 bytes fixture/17/4/36 | Bin 116 -> 0 bytes fixture/17/4/37 | Bin 116 -> 0 bytes fixture/17/4/38 | Bin 116 -> 0 bytes fixture/17/4/39 | Bin 116 -> 0 bytes fixture/17/4/4 | Bin 116 -> 0 bytes fixture/17/4/40 | Bin 116 -> 0 bytes fixture/17/4/41 | Bin 116 -> 0 bytes fixture/17/4/42 | Bin 116 -> 0 bytes fixture/17/4/43 | Bin 116 -> 0 bytes fixture/17/4/44 | Bin 116 -> 0 bytes fixture/17/4/45 | Bin 116 -> 0 bytes fixture/17/4/46 | Bin 116 -> 0 bytes fixture/17/4/47 | Bin 116 -> 0 bytes fixture/17/4/48 | Bin 116 -> 0 bytes fixture/17/4/49 | Bin 116 -> 0 bytes fixture/17/4/5 | Bin 116 -> 0 bytes fixture/17/4/50 | Bin 116 -> 0 bytes fixture/17/4/51 | Bin 116 -> 0 bytes fixture/17/4/52 | Bin 116 -> 0 bytes fixture/17/4/53 | Bin 116 -> 0 bytes fixture/17/4/54 | Bin 116 -> 0 bytes fixture/17/4/55 | Bin 116 -> 0 bytes fixture/17/4/6 | Bin 116 -> 0 bytes fixture/17/4/7 | Bin 116 -> 0 bytes fixture/17/4/8 | Bin 116 -> 0 bytes fixture/17/4/9 | Bin 116 -> 0 bytes fixture/17/5/.zarray | 19 ----- fixture/17/5/.zattrs | 1 - fixture/17/5/0 | Bin 116 -> 0 bytes fixture/17/5/1 | Bin 116 -> 0 bytes fixture/17/5/10 | Bin 116 -> 0 bytes fixture/17/5/11 | Bin 116 -> 0 bytes fixture/17/5/12 | Bin 116 -> 0 bytes fixture/17/5/13 | Bin 116 -> 0 bytes fixture/17/5/14 | Bin 116 -> 0 bytes fixture/17/5/15 | Bin 116 -> 0 bytes fixture/17/5/16 | Bin 116 -> 0 bytes fixture/17/5/17 | Bin 116 -> 0 bytes fixture/17/5/18 | Bin 116 -> 0 bytes fixture/17/5/19 | Bin 116 -> 0 bytes fixture/17/5/2 | Bin 116 -> 0 bytes fixture/17/5/20 | Bin 116 -> 0 bytes fixture/17/5/21 | Bin 116 -> 0 bytes fixture/17/5/22 | Bin 116 -> 0 bytes fixture/17/5/23 | Bin 116 -> 0 bytes fixture/17/5/24 | Bin 116 -> 0 bytes fixture/17/5/25 | Bin 116 -> 0 bytes fixture/17/5/26 | Bin 116 -> 0 bytes fixture/17/5/27 | Bin 116 -> 0 bytes fixture/17/5/28 | Bin 116 -> 0 bytes fixture/17/5/29 | Bin 116 -> 0 bytes fixture/17/5/3 | Bin 116 -> 0 bytes fixture/17/5/30 | Bin 116 -> 0 bytes fixture/17/5/31 | Bin 116 -> 0 bytes fixture/17/5/32 | Bin 116 -> 0 bytes fixture/17/5/33 | Bin 116 -> 0 bytes fixture/17/5/34 | Bin 116 -> 0 bytes fixture/17/5/35 | Bin 116 -> 0 bytes fixture/17/5/36 | Bin 116 -> 0 bytes fixture/17/5/37 | Bin 116 -> 0 bytes fixture/17/5/38 | Bin 116 -> 0 bytes fixture/17/5/39 | Bin 116 -> 0 bytes fixture/17/5/4 | Bin 116 -> 0 bytes fixture/17/5/40 | Bin 116 -> 0 bytes fixture/17/5/41 | Bin 116 -> 0 bytes fixture/17/5/42 | Bin 116 -> 0 bytes fixture/17/5/43 | Bin 116 -> 0 bytes fixture/17/5/44 | Bin 116 -> 0 bytes fixture/17/5/45 | Bin 116 -> 0 bytes fixture/17/5/46 | Bin 116 -> 0 bytes fixture/17/5/47 | Bin 116 -> 0 bytes fixture/17/5/48 | Bin 116 -> 0 bytes fixture/17/5/49 | Bin 116 -> 0 bytes fixture/17/5/5 | Bin 116 -> 0 bytes fixture/17/5/50 | Bin 116 -> 0 bytes fixture/17/5/51 | Bin 116 -> 0 bytes fixture/17/5/52 | Bin 116 -> 0 bytes fixture/17/5/53 | Bin 116 -> 0 bytes fixture/17/5/54 | Bin 116 -> 0 bytes fixture/17/5/55 | Bin 116 -> 0 bytes fixture/17/5/6 | Bin 116 -> 0 bytes fixture/17/5/7 | Bin 116 -> 0 bytes fixture/17/5/8 | Bin 116 -> 0 bytes fixture/17/5/9 | Bin 116 -> 0 bytes fixture/17/6/.zarray | 19 ----- fixture/17/6/.zattrs | 1 - fixture/17/6/0 | Bin 116 -> 0 bytes fixture/17/6/1 | Bin 116 -> 0 bytes fixture/17/6/10 | Bin 116 -> 0 bytes fixture/17/6/11 | Bin 116 -> 0 bytes fixture/17/6/12 | Bin 116 -> 0 bytes fixture/17/6/13 | Bin 116 -> 0 bytes fixture/17/6/14 | Bin 116 -> 0 bytes fixture/17/6/15 | Bin 116 -> 0 bytes fixture/17/6/16 | Bin 116 -> 0 bytes fixture/17/6/17 | Bin 116 -> 0 bytes fixture/17/6/18 | Bin 116 -> 0 bytes fixture/17/6/19 | Bin 116 -> 0 bytes fixture/17/6/2 | Bin 116 -> 0 bytes fixture/17/6/20 | Bin 116 -> 0 bytes fixture/17/6/21 | Bin 116 -> 0 bytes fixture/17/6/22 | Bin 116 -> 0 bytes fixture/17/6/23 | Bin 116 -> 0 bytes fixture/17/6/24 | Bin 116 -> 0 bytes fixture/17/6/25 | Bin 116 -> 0 bytes fixture/17/6/26 | Bin 116 -> 0 bytes fixture/17/6/27 | Bin 116 -> 0 bytes fixture/17/6/28 | Bin 116 -> 0 bytes fixture/17/6/29 | Bin 116 -> 0 bytes fixture/17/6/3 | Bin 116 -> 0 bytes fixture/17/6/30 | Bin 116 -> 0 bytes fixture/17/6/31 | Bin 116 -> 0 bytes fixture/17/6/32 | Bin 116 -> 0 bytes fixture/17/6/33 | Bin 116 -> 0 bytes fixture/17/6/34 | Bin 116 -> 0 bytes fixture/17/6/35 | Bin 116 -> 0 bytes fixture/17/6/36 | Bin 116 -> 0 bytes fixture/17/6/37 | Bin 116 -> 0 bytes fixture/17/6/38 | Bin 116 -> 0 bytes fixture/17/6/39 | Bin 116 -> 0 bytes fixture/17/6/4 | Bin 116 -> 0 bytes fixture/17/6/40 | Bin 116 -> 0 bytes fixture/17/6/41 | Bin 116 -> 0 bytes fixture/17/6/42 | Bin 116 -> 0 bytes fixture/17/6/43 | Bin 116 -> 0 bytes fixture/17/6/44 | Bin 116 -> 0 bytes fixture/17/6/45 | Bin 116 -> 0 bytes fixture/17/6/46 | Bin 116 -> 0 bytes fixture/17/6/47 | Bin 116 -> 0 bytes fixture/17/6/48 | Bin 116 -> 0 bytes fixture/17/6/49 | Bin 116 -> 0 bytes fixture/17/6/5 | Bin 116 -> 0 bytes fixture/17/6/50 | Bin 116 -> 0 bytes fixture/17/6/51 | Bin 116 -> 0 bytes fixture/17/6/52 | Bin 116 -> 0 bytes fixture/17/6/53 | Bin 116 -> 0 bytes fixture/17/6/54 | Bin 116 -> 0 bytes fixture/17/6/55 | Bin 116 -> 0 bytes fixture/17/6/6 | Bin 116 -> 0 bytes fixture/17/6/7 | Bin 116 -> 0 bytes fixture/17/6/8 | Bin 116 -> 0 bytes fixture/17/6/9 | Bin 116 -> 0 bytes fixture/18/.zattrs | 1 - fixture/18/0/.zarray | 16 ---- fixture/18/0/.zattrs | 1 - fixture/18/0/0.0 | Bin 1200 -> 0 bytes fixture/18/0/0.1 | Bin 1200 -> 0 bytes fixture/18/0/0.2 | Bin 1200 -> 0 bytes fixture/18/0/0.3 | Bin 1200 -> 0 bytes fixture/18/0/1.0 | Bin 1200 -> 0 bytes fixture/18/0/1.1 | Bin 1200 -> 0 bytes fixture/18/0/1.2 | Bin 1200 -> 0 bytes fixture/18/0/1.3 | Bin 1200 -> 0 bytes fixture/18/0/10.0 | Bin 1200 -> 0 bytes fixture/18/0/10.1 | Bin 1200 -> 0 bytes fixture/18/0/10.2 | Bin 1200 -> 0 bytes fixture/18/0/10.3 | Bin 1200 -> 0 bytes fixture/18/0/11.0 | Bin 1200 -> 0 bytes fixture/18/0/11.1 | Bin 1200 -> 0 bytes fixture/18/0/11.2 | Bin 1200 -> 0 bytes fixture/18/0/11.3 | Bin 1200 -> 0 bytes fixture/18/0/12.0 | Bin 1200 -> 0 bytes fixture/18/0/12.1 | Bin 1200 -> 0 bytes fixture/18/0/12.2 | Bin 1200 -> 0 bytes fixture/18/0/12.3 | Bin 1200 -> 0 bytes fixture/18/0/13.0 | Bin 1200 -> 0 bytes fixture/18/0/13.1 | Bin 1200 -> 0 bytes fixture/18/0/13.2 | Bin 1200 -> 0 bytes fixture/18/0/13.3 | Bin 1200 -> 0 bytes fixture/18/0/14.0 | Bin 1200 -> 0 bytes fixture/18/0/14.1 | Bin 1200 -> 0 bytes fixture/18/0/14.2 | Bin 1200 -> 0 bytes fixture/18/0/14.3 | Bin 1200 -> 0 bytes fixture/18/0/15.0 | Bin 1200 -> 0 bytes fixture/18/0/15.1 | Bin 1200 -> 0 bytes fixture/18/0/15.2 | Bin 1200 -> 0 bytes fixture/18/0/15.3 | Bin 1200 -> 0 bytes fixture/18/0/16.0 | Bin 1200 -> 0 bytes fixture/18/0/16.1 | Bin 1200 -> 0 bytes fixture/18/0/16.2 | Bin 1200 -> 0 bytes fixture/18/0/16.3 | Bin 1200 -> 0 bytes fixture/18/0/17.0 | Bin 1200 -> 0 bytes fixture/18/0/17.1 | Bin 1200 -> 0 bytes fixture/18/0/17.2 | Bin 1200 -> 0 bytes fixture/18/0/17.3 | Bin 1200 -> 0 bytes fixture/18/0/18.0 | Bin 1200 -> 0 bytes fixture/18/0/18.1 | Bin 1200 -> 0 bytes fixture/18/0/18.2 | Bin 1200 -> 0 bytes fixture/18/0/18.3 | Bin 1200 -> 0 bytes fixture/18/0/19.0 | Bin 1200 -> 0 bytes fixture/18/0/19.1 | Bin 1200 -> 0 bytes fixture/18/0/19.2 | Bin 1200 -> 0 bytes fixture/18/0/19.3 | Bin 1200 -> 0 bytes fixture/18/0/2.0 | Bin 1200 -> 0 bytes fixture/18/0/2.1 | Bin 1200 -> 0 bytes fixture/18/0/2.2 | Bin 1200 -> 0 bytes fixture/18/0/2.3 | Bin 1200 -> 0 bytes fixture/18/0/3.0 | Bin 1200 -> 0 bytes fixture/18/0/3.1 | Bin 1200 -> 0 bytes fixture/18/0/3.2 | Bin 1200 -> 0 bytes fixture/18/0/3.3 | Bin 1200 -> 0 bytes fixture/18/0/4.0 | Bin 1200 -> 0 bytes fixture/18/0/4.1 | Bin 1200 -> 0 bytes fixture/18/0/4.2 | Bin 1200 -> 0 bytes fixture/18/0/4.3 | Bin 1200 -> 0 bytes fixture/18/0/5.0 | Bin 1200 -> 0 bytes fixture/18/0/5.1 | Bin 1200 -> 0 bytes fixture/18/0/5.2 | Bin 1200 -> 0 bytes fixture/18/0/5.3 | Bin 1200 -> 0 bytes fixture/18/0/6.0 | Bin 1200 -> 0 bytes fixture/18/0/6.1 | Bin 1200 -> 0 bytes fixture/18/0/6.2 | Bin 1200 -> 0 bytes fixture/18/0/6.3 | Bin 1200 -> 0 bytes fixture/18/0/7.0 | Bin 1200 -> 0 bytes fixture/18/0/7.1 | Bin 1200 -> 0 bytes fixture/18/0/7.2 | Bin 1200 -> 0 bytes fixture/18/0/7.3 | Bin 1200 -> 0 bytes fixture/18/0/8.0 | Bin 1200 -> 0 bytes fixture/18/0/8.1 | Bin 1200 -> 0 bytes fixture/18/0/8.2 | Bin 1200 -> 0 bytes fixture/18/0/8.3 | Bin 1200 -> 0 bytes fixture/18/0/9.0 | Bin 1200 -> 0 bytes fixture/18/0/9.1 | Bin 1200 -> 0 bytes fixture/18/0/9.2 | Bin 1200 -> 0 bytes fixture/18/0/9.3 | Bin 1200 -> 0 bytes fixture/18/1/.zarray | 19 ----- fixture/18/1/.zattrs | 1 - fixture/18/1/0.0 | Bin 483 -> 0 bytes fixture/18/1/0.1 | Bin 479 -> 0 bytes fixture/18/1/0.2 | Bin 482 -> 0 bytes fixture/18/1/0.3 | Bin 249 -> 0 bytes fixture/18/1/1.0 | Bin 485 -> 0 bytes fixture/18/1/1.1 | Bin 483 -> 0 bytes fixture/18/1/1.2 | Bin 482 -> 0 bytes fixture/18/1/1.3 | Bin 254 -> 0 bytes fixture/18/1/10.0 | Bin 479 -> 0 bytes fixture/18/1/10.1 | Bin 481 -> 0 bytes fixture/18/1/10.2 | Bin 478 -> 0 bytes fixture/18/1/10.3 | Bin 251 -> 0 bytes fixture/18/1/11.0 | Bin 481 -> 0 bytes fixture/18/1/11.1 | Bin 483 -> 0 bytes fixture/18/1/11.2 | Bin 481 -> 0 bytes fixture/18/1/11.3 | Bin 251 -> 0 bytes fixture/18/1/12.0 | Bin 482 -> 0 bytes fixture/18/1/12.1 | Bin 483 -> 0 bytes fixture/18/1/12.2 | Bin 483 -> 0 bytes fixture/18/1/12.3 | Bin 254 -> 0 bytes fixture/18/1/13.0 | Bin 484 -> 0 bytes fixture/18/1/13.1 | Bin 484 -> 0 bytes fixture/18/1/13.2 | Bin 481 -> 0 bytes fixture/18/1/13.3 | Bin 254 -> 0 bytes fixture/18/1/14.0 | Bin 481 -> 0 bytes fixture/18/1/14.1 | Bin 483 -> 0 bytes fixture/18/1/14.2 | Bin 483 -> 0 bytes fixture/18/1/14.3 | Bin 255 -> 0 bytes fixture/18/1/15.0 | Bin 484 -> 0 bytes fixture/18/1/15.1 | Bin 482 -> 0 bytes fixture/18/1/15.2 | Bin 480 -> 0 bytes fixture/18/1/15.3 | Bin 255 -> 0 bytes fixture/18/1/16.0 | Bin 481 -> 0 bytes fixture/18/1/16.1 | Bin 482 -> 0 bytes fixture/18/1/16.2 | Bin 481 -> 0 bytes fixture/18/1/16.3 | Bin 254 -> 0 bytes fixture/18/1/17.0 | Bin 481 -> 0 bytes fixture/18/1/17.1 | Bin 483 -> 0 bytes fixture/18/1/17.2 | Bin 483 -> 0 bytes fixture/18/1/17.3 | Bin 254 -> 0 bytes fixture/18/1/18.0 | Bin 482 -> 0 bytes fixture/18/1/18.1 | Bin 482 -> 0 bytes fixture/18/1/18.2 | Bin 481 -> 0 bytes fixture/18/1/18.3 | Bin 254 -> 0 bytes fixture/18/1/19.0 | Bin 481 -> 0 bytes fixture/18/1/19.1 | Bin 482 -> 0 bytes fixture/18/1/19.2 | Bin 483 -> 0 bytes fixture/18/1/19.3 | Bin 254 -> 0 bytes fixture/18/1/2.0 | Bin 482 -> 0 bytes fixture/18/1/2.1 | Bin 482 -> 0 bytes fixture/18/1/2.2 | Bin 480 -> 0 bytes fixture/18/1/2.3 | Bin 254 -> 0 bytes fixture/18/1/3.0 | Bin 482 -> 0 bytes fixture/18/1/3.1 | Bin 483 -> 0 bytes fixture/18/1/3.2 | Bin 482 -> 0 bytes fixture/18/1/3.3 | Bin 254 -> 0 bytes fixture/18/1/4.0 | Bin 481 -> 0 bytes fixture/18/1/4.1 | Bin 483 -> 0 bytes fixture/18/1/4.2 | Bin 481 -> 0 bytes fixture/18/1/4.3 | Bin 254 -> 0 bytes fixture/18/1/5.0 | Bin 481 -> 0 bytes fixture/18/1/5.1 | Bin 483 -> 0 bytes fixture/18/1/5.2 | Bin 483 -> 0 bytes fixture/18/1/5.3 | Bin 254 -> 0 bytes fixture/18/1/6.0 | Bin 484 -> 0 bytes fixture/18/1/6.1 | Bin 482 -> 0 bytes fixture/18/1/6.2 | Bin 483 -> 0 bytes fixture/18/1/6.3 | Bin 254 -> 0 bytes fixture/18/1/7.0 | Bin 481 -> 0 bytes fixture/18/1/7.1 | Bin 483 -> 0 bytes fixture/18/1/7.2 | Bin 481 -> 0 bytes fixture/18/1/7.3 | Bin 254 -> 0 bytes fixture/18/1/8.0 | Bin 482 -> 0 bytes fixture/18/1/8.1 | Bin 481 -> 0 bytes fixture/18/1/8.2 | Bin 480 -> 0 bytes fixture/18/1/8.3 | Bin 254 -> 0 bytes fixture/18/1/9.0 | Bin 482 -> 0 bytes fixture/18/1/9.1 | Bin 483 -> 0 bytes fixture/18/1/9.2 | Bin 480 -> 0 bytes fixture/18/1/9.3 | Bin 253 -> 0 bytes fixture/18/2/.zarray | 19 ----- fixture/18/2/.zattrs | 1 - fixture/18/2/0.0 | Bin 631 -> 0 bytes fixture/18/2/0.1 | Bin 619 -> 0 bytes fixture/18/2/0.2 | Bin 625 -> 0 bytes fixture/18/2/0.3 | Bin 275 -> 0 bytes fixture/18/2/1.0 | Bin 617 -> 0 bytes fixture/18/2/1.1 | Bin 611 -> 0 bytes fixture/18/2/1.2 | Bin 616 -> 0 bytes fixture/18/2/1.3 | Bin 245 -> 0 bytes fixture/18/2/10.0 | Bin 622 -> 0 bytes fixture/18/2/10.1 | Bin 616 -> 0 bytes fixture/18/2/10.2 | Bin 612 -> 0 bytes fixture/18/2/10.3 | Bin 246 -> 0 bytes fixture/18/2/11.0 | Bin 623 -> 0 bytes fixture/18/2/11.1 | Bin 619 -> 0 bytes fixture/18/2/11.2 | Bin 619 -> 0 bytes fixture/18/2/11.3 | Bin 258 -> 0 bytes fixture/18/2/12.0 | Bin 616 -> 0 bytes fixture/18/2/12.1 | Bin 619 -> 0 bytes fixture/18/2/12.2 | Bin 621 -> 0 bytes fixture/18/2/12.3 | Bin 248 -> 0 bytes fixture/18/2/13.0 | Bin 623 -> 0 bytes fixture/18/2/13.1 | Bin 632 -> 0 bytes fixture/18/2/13.2 | Bin 620 -> 0 bytes fixture/18/2/13.3 | Bin 251 -> 0 bytes fixture/18/2/14.0 | Bin 618 -> 0 bytes fixture/18/2/14.1 | Bin 627 -> 0 bytes fixture/18/2/14.2 | Bin 627 -> 0 bytes fixture/18/2/14.3 | Bin 244 -> 0 bytes fixture/18/2/15.0 | Bin 621 -> 0 bytes fixture/18/2/15.1 | Bin 611 -> 0 bytes fixture/18/2/15.2 | Bin 622 -> 0 bytes fixture/18/2/15.3 | Bin 245 -> 0 bytes fixture/18/2/16.0 | Bin 622 -> 0 bytes fixture/18/2/16.1 | Bin 623 -> 0 bytes fixture/18/2/16.2 | Bin 622 -> 0 bytes fixture/18/2/16.3 | Bin 247 -> 0 bytes fixture/18/2/17.0 | Bin 620 -> 0 bytes fixture/18/2/17.1 | Bin 621 -> 0 bytes fixture/18/2/17.2 | Bin 617 -> 0 bytes fixture/18/2/17.3 | Bin 246 -> 0 bytes fixture/18/2/18.0 | Bin 620 -> 0 bytes fixture/18/2/18.1 | Bin 622 -> 0 bytes fixture/18/2/18.2 | Bin 628 -> 0 bytes fixture/18/2/18.3 | Bin 247 -> 0 bytes fixture/18/2/19.0 | Bin 621 -> 0 bytes fixture/18/2/19.1 | Bin 615 -> 0 bytes fixture/18/2/19.2 | Bin 619 -> 0 bytes fixture/18/2/19.3 | Bin 248 -> 0 bytes fixture/18/2/2.0 | Bin 625 -> 0 bytes fixture/18/2/2.1 | Bin 628 -> 0 bytes fixture/18/2/2.2 | Bin 621 -> 0 bytes fixture/18/2/2.3 | Bin 245 -> 0 bytes fixture/18/2/3.0 | Bin 617 -> 0 bytes fixture/18/2/3.1 | Bin 624 -> 0 bytes fixture/18/2/3.2 | Bin 624 -> 0 bytes fixture/18/2/3.3 | Bin 247 -> 0 bytes fixture/18/2/4.0 | Bin 621 -> 0 bytes fixture/18/2/4.1 | Bin 624 -> 0 bytes fixture/18/2/4.2 | Bin 621 -> 0 bytes fixture/18/2/4.3 | Bin 246 -> 0 bytes fixture/18/2/5.0 | Bin 628 -> 0 bytes fixture/18/2/5.1 | Bin 615 -> 0 bytes fixture/18/2/5.2 | Bin 621 -> 0 bytes fixture/18/2/5.3 | Bin 244 -> 0 bytes fixture/18/2/6.0 | Bin 627 -> 0 bytes fixture/18/2/6.1 | Bin 615 -> 0 bytes fixture/18/2/6.2 | Bin 622 -> 0 bytes fixture/18/2/6.3 | Bin 247 -> 0 bytes fixture/18/2/7.0 | Bin 617 -> 0 bytes fixture/18/2/7.1 | Bin 622 -> 0 bytes fixture/18/2/7.2 | Bin 622 -> 0 bytes fixture/18/2/7.3 | Bin 244 -> 0 bytes fixture/18/2/8.0 | Bin 624 -> 0 bytes fixture/18/2/8.1 | Bin 625 -> 0 bytes fixture/18/2/8.2 | Bin 619 -> 0 bytes fixture/18/2/8.3 | Bin 253 -> 0 bytes fixture/18/2/9.0 | Bin 626 -> 0 bytes fixture/18/2/9.1 | Bin 616 -> 0 bytes fixture/18/2/9.2 | Bin 624 -> 0 bytes fixture/18/2/9.3 | Bin 249 -> 0 bytes fixture/18/3/.zarray | 21 ----- fixture/18/3/.zattrs | 1 - fixture/18/3/0.0 | Bin 675 -> 0 bytes fixture/18/3/0.1 | Bin 678 -> 0 bytes fixture/18/3/0.2 | Bin 677 -> 0 bytes fixture/18/3/0.3 | Bin 203 -> 0 bytes fixture/18/3/1.0 | Bin 712 -> 0 bytes fixture/18/3/1.1 | Bin 713 -> 0 bytes fixture/18/3/1.2 | Bin 712 -> 0 bytes fixture/18/3/1.3 | Bin 165 -> 0 bytes fixture/18/3/10.0 | Bin 726 -> 0 bytes fixture/18/3/10.1 | Bin 710 -> 0 bytes fixture/18/3/10.2 | Bin 710 -> 0 bytes fixture/18/3/10.3 | Bin 163 -> 0 bytes fixture/18/3/11.0 | Bin 712 -> 0 bytes fixture/18/3/11.1 | Bin 710 -> 0 bytes fixture/18/3/11.2 | Bin 710 -> 0 bytes fixture/18/3/11.3 | Bin 163 -> 0 bytes fixture/18/3/12.0 | Bin 714 -> 0 bytes fixture/18/3/12.1 | Bin 712 -> 0 bytes fixture/18/3/12.2 | Bin 714 -> 0 bytes fixture/18/3/12.3 | Bin 165 -> 0 bytes fixture/18/3/13.0 | Bin 716 -> 0 bytes fixture/18/3/13.1 | Bin 717 -> 0 bytes fixture/18/3/13.2 | Bin 715 -> 0 bytes fixture/18/3/13.3 | Bin 165 -> 0 bytes fixture/18/3/14.0 | Bin 717 -> 0 bytes fixture/18/3/14.1 | Bin 719 -> 0 bytes fixture/18/3/14.2 | Bin 717 -> 0 bytes fixture/18/3/14.3 | Bin 165 -> 0 bytes fixture/18/3/15.0 | Bin 725 -> 0 bytes fixture/18/3/15.1 | Bin 719 -> 0 bytes fixture/18/3/15.2 | Bin 720 -> 0 bytes fixture/18/3/15.3 | Bin 165 -> 0 bytes fixture/18/3/16.0 | Bin 719 -> 0 bytes fixture/18/3/16.1 | Bin 729 -> 0 bytes fixture/18/3/16.2 | Bin 721 -> 0 bytes fixture/18/3/16.3 | Bin 165 -> 0 bytes fixture/18/3/17.0 | Bin 717 -> 0 bytes fixture/18/3/17.1 | Bin 718 -> 0 bytes fixture/18/3/17.2 | Bin 718 -> 0 bytes fixture/18/3/17.3 | Bin 165 -> 0 bytes fixture/18/3/18.0 | Bin 716 -> 0 bytes fixture/18/3/18.1 | Bin 725 -> 0 bytes fixture/18/3/18.2 | Bin 714 -> 0 bytes fixture/18/3/18.3 | Bin 165 -> 0 bytes fixture/18/3/19.0 | Bin 713 -> 0 bytes fixture/18/3/19.1 | Bin 724 -> 0 bytes fixture/18/3/19.2 | Bin 713 -> 0 bytes fixture/18/3/19.3 | Bin 165 -> 0 bytes fixture/18/3/2.0 | Bin 715 -> 0 bytes fixture/18/3/2.1 | Bin 716 -> 0 bytes fixture/18/3/2.2 | Bin 713 -> 0 bytes fixture/18/3/2.3 | Bin 165 -> 0 bytes fixture/18/3/3.0 | Bin 718 -> 0 bytes fixture/18/3/3.1 | Bin 717 -> 0 bytes fixture/18/3/3.2 | Bin 717 -> 0 bytes fixture/18/3/3.3 | Bin 165 -> 0 bytes fixture/18/3/4.0 | Bin 719 -> 0 bytes fixture/18/3/4.1 | Bin 719 -> 0 bytes fixture/18/3/4.2 | Bin 718 -> 0 bytes fixture/18/3/4.3 | Bin 165 -> 0 bytes fixture/18/3/5.0 | Bin 724 -> 0 bytes fixture/18/3/5.1 | Bin 721 -> 0 bytes fixture/18/3/5.2 | Bin 730 -> 0 bytes fixture/18/3/5.3 | Bin 165 -> 0 bytes fixture/18/3/6.0 | Bin 718 -> 0 bytes fixture/18/3/6.1 | Bin 720 -> 0 bytes fixture/18/3/6.2 | Bin 717 -> 0 bytes fixture/18/3/6.3 | Bin 165 -> 0 bytes fixture/18/3/7.0 | Bin 716 -> 0 bytes fixture/18/3/7.1 | Bin 716 -> 0 bytes fixture/18/3/7.2 | Bin 716 -> 0 bytes fixture/18/3/7.3 | Bin 165 -> 0 bytes fixture/18/3/8.0 | Bin 713 -> 0 bytes fixture/18/3/8.1 | Bin 715 -> 0 bytes fixture/18/3/8.2 | Bin 714 -> 0 bytes fixture/18/3/8.3 | Bin 165 -> 0 bytes fixture/18/3/9.0 | Bin 711 -> 0 bytes fixture/18/3/9.1 | Bin 713 -> 0 bytes fixture/18/3/9.2 | Bin 710 -> 0 bytes fixture/18/3/9.3 | Bin 165 -> 0 bytes fixture/18/4/.zarray | 21 ----- fixture/18/4/.zattrs | 1 - fixture/18/4/0.0 | Bin 356 -> 0 bytes fixture/18/4/0.1 | Bin 356 -> 0 bytes fixture/18/4/0.2 | Bin 356 -> 0 bytes fixture/18/4/0.3 | Bin 236 -> 0 bytes fixture/18/4/1.0 | Bin 358 -> 0 bytes fixture/18/4/1.1 | Bin 358 -> 0 bytes fixture/18/4/1.2 | Bin 358 -> 0 bytes fixture/18/4/1.3 | Bin 240 -> 0 bytes fixture/18/4/10.0 | Bin 356 -> 0 bytes fixture/18/4/10.1 | Bin 356 -> 0 bytes fixture/18/4/10.2 | Bin 356 -> 0 bytes fixture/18/4/10.3 | Bin 238 -> 0 bytes fixture/18/4/11.0 | Bin 359 -> 0 bytes fixture/18/4/11.1 | Bin 359 -> 0 bytes fixture/18/4/11.2 | Bin 358 -> 0 bytes fixture/18/4/11.3 | Bin 238 -> 0 bytes fixture/18/4/12.0 | Bin 358 -> 0 bytes fixture/18/4/12.1 | Bin 358 -> 0 bytes fixture/18/4/12.2 | Bin 358 -> 0 bytes fixture/18/4/12.3 | Bin 242 -> 0 bytes fixture/18/4/13.0 | Bin 358 -> 0 bytes fixture/18/4/13.1 | Bin 358 -> 0 bytes fixture/18/4/13.2 | Bin 358 -> 0 bytes fixture/18/4/13.3 | Bin 242 -> 0 bytes fixture/18/4/14.0 | Bin 358 -> 0 bytes fixture/18/4/14.1 | Bin 358 -> 0 bytes fixture/18/4/14.2 | Bin 358 -> 0 bytes fixture/18/4/14.3 | Bin 242 -> 0 bytes fixture/18/4/15.0 | Bin 357 -> 0 bytes fixture/18/4/15.1 | Bin 357 -> 0 bytes fixture/18/4/15.2 | Bin 357 -> 0 bytes fixture/18/4/15.3 | Bin 241 -> 0 bytes fixture/18/4/16.0 | Bin 357 -> 0 bytes fixture/18/4/16.1 | Bin 357 -> 0 bytes fixture/18/4/16.2 | Bin 357 -> 0 bytes fixture/18/4/16.3 | Bin 243 -> 0 bytes fixture/18/4/17.0 | Bin 358 -> 0 bytes fixture/18/4/17.1 | Bin 358 -> 0 bytes fixture/18/4/17.2 | Bin 358 -> 0 bytes fixture/18/4/17.3 | Bin 243 -> 0 bytes fixture/18/4/18.0 | Bin 358 -> 0 bytes fixture/18/4/18.1 | Bin 358 -> 0 bytes fixture/18/4/18.2 | Bin 358 -> 0 bytes fixture/18/4/18.3 | Bin 241 -> 0 bytes fixture/18/4/19.0 | Bin 358 -> 0 bytes fixture/18/4/19.1 | Bin 358 -> 0 bytes fixture/18/4/19.2 | Bin 358 -> 0 bytes fixture/18/4/19.3 | Bin 243 -> 0 bytes fixture/18/4/2.0 | Bin 358 -> 0 bytes fixture/18/4/2.1 | Bin 358 -> 0 bytes fixture/18/4/2.2 | Bin 358 -> 0 bytes fixture/18/4/2.3 | Bin 242 -> 0 bytes fixture/18/4/3.0 | Bin 358 -> 0 bytes fixture/18/4/3.1 | Bin 358 -> 0 bytes fixture/18/4/3.2 | Bin 358 -> 0 bytes fixture/18/4/3.3 | Bin 242 -> 0 bytes fixture/18/4/4.0 | Bin 357 -> 0 bytes fixture/18/4/4.1 | Bin 357 -> 0 bytes fixture/18/4/4.2 | Bin 357 -> 0 bytes fixture/18/4/4.3 | Bin 241 -> 0 bytes fixture/18/4/5.0 | Bin 357 -> 0 bytes fixture/18/4/5.1 | Bin 357 -> 0 bytes fixture/18/4/5.2 | Bin 357 -> 0 bytes fixture/18/4/5.3 | Bin 241 -> 0 bytes fixture/18/4/6.0 | Bin 358 -> 0 bytes fixture/18/4/6.1 | Bin 357 -> 0 bytes fixture/18/4/6.2 | Bin 357 -> 0 bytes fixture/18/4/6.3 | Bin 242 -> 0 bytes fixture/18/4/7.0 | Bin 358 -> 0 bytes fixture/18/4/7.1 | Bin 358 -> 0 bytes fixture/18/4/7.2 | Bin 358 -> 0 bytes fixture/18/4/7.3 | Bin 242 -> 0 bytes fixture/18/4/8.0 | Bin 358 -> 0 bytes fixture/18/4/8.1 | Bin 358 -> 0 bytes fixture/18/4/8.2 | Bin 358 -> 0 bytes fixture/18/4/8.3 | Bin 242 -> 0 bytes fixture/18/4/9.0 | Bin 358 -> 0 bytes fixture/18/4/9.1 | Bin 357 -> 0 bytes fixture/18/4/9.2 | Bin 357 -> 0 bytes fixture/18/4/9.3 | Bin 241 -> 0 bytes fixture/18/5/.zarray | 21 ----- fixture/18/5/.zattrs | 1 - fixture/18/5/0.0 | Bin 675 -> 0 bytes fixture/18/5/0.1 | Bin 678 -> 0 bytes fixture/18/5/0.2 | Bin 677 -> 0 bytes fixture/18/5/0.3 | Bin 203 -> 0 bytes fixture/18/5/1.0 | Bin 712 -> 0 bytes fixture/18/5/1.1 | Bin 713 -> 0 bytes fixture/18/5/1.2 | Bin 712 -> 0 bytes fixture/18/5/1.3 | Bin 165 -> 0 bytes fixture/18/5/10.0 | Bin 726 -> 0 bytes fixture/18/5/10.1 | Bin 710 -> 0 bytes fixture/18/5/10.2 | Bin 710 -> 0 bytes fixture/18/5/10.3 | Bin 163 -> 0 bytes fixture/18/5/11.0 | Bin 712 -> 0 bytes fixture/18/5/11.1 | Bin 710 -> 0 bytes fixture/18/5/11.2 | Bin 710 -> 0 bytes fixture/18/5/11.3 | Bin 163 -> 0 bytes fixture/18/5/12.0 | Bin 714 -> 0 bytes fixture/18/5/12.1 | Bin 712 -> 0 bytes fixture/18/5/12.2 | Bin 714 -> 0 bytes fixture/18/5/12.3 | Bin 165 -> 0 bytes fixture/18/5/13.0 | Bin 716 -> 0 bytes fixture/18/5/13.1 | Bin 717 -> 0 bytes fixture/18/5/13.2 | Bin 715 -> 0 bytes fixture/18/5/13.3 | Bin 165 -> 0 bytes fixture/18/5/14.0 | Bin 717 -> 0 bytes fixture/18/5/14.1 | Bin 719 -> 0 bytes fixture/18/5/14.2 | Bin 717 -> 0 bytes fixture/18/5/14.3 | Bin 165 -> 0 bytes fixture/18/5/15.0 | Bin 725 -> 0 bytes fixture/18/5/15.1 | Bin 719 -> 0 bytes fixture/18/5/15.2 | Bin 720 -> 0 bytes fixture/18/5/15.3 | Bin 165 -> 0 bytes fixture/18/5/16.0 | Bin 719 -> 0 bytes fixture/18/5/16.1 | Bin 729 -> 0 bytes fixture/18/5/16.2 | Bin 721 -> 0 bytes fixture/18/5/16.3 | Bin 165 -> 0 bytes fixture/18/5/17.0 | Bin 717 -> 0 bytes fixture/18/5/17.1 | Bin 718 -> 0 bytes fixture/18/5/17.2 | Bin 718 -> 0 bytes fixture/18/5/17.3 | Bin 165 -> 0 bytes fixture/18/5/18.0 | Bin 716 -> 0 bytes fixture/18/5/18.1 | Bin 725 -> 0 bytes fixture/18/5/18.2 | Bin 714 -> 0 bytes fixture/18/5/18.3 | Bin 165 -> 0 bytes fixture/18/5/19.0 | Bin 713 -> 0 bytes fixture/18/5/19.1 | Bin 724 -> 0 bytes fixture/18/5/19.2 | Bin 713 -> 0 bytes fixture/18/5/19.3 | Bin 165 -> 0 bytes fixture/18/5/2.0 | Bin 715 -> 0 bytes fixture/18/5/2.1 | Bin 716 -> 0 bytes fixture/18/5/2.2 | Bin 713 -> 0 bytes fixture/18/5/2.3 | Bin 165 -> 0 bytes fixture/18/5/3.0 | Bin 718 -> 0 bytes fixture/18/5/3.1 | Bin 717 -> 0 bytes fixture/18/5/3.2 | Bin 717 -> 0 bytes fixture/18/5/3.3 | Bin 165 -> 0 bytes fixture/18/5/4.0 | Bin 719 -> 0 bytes fixture/18/5/4.1 | Bin 719 -> 0 bytes fixture/18/5/4.2 | Bin 718 -> 0 bytes fixture/18/5/4.3 | Bin 165 -> 0 bytes fixture/18/5/5.0 | Bin 724 -> 0 bytes fixture/18/5/5.1 | Bin 721 -> 0 bytes fixture/18/5/5.2 | Bin 730 -> 0 bytes fixture/18/5/5.3 | Bin 165 -> 0 bytes fixture/18/5/6.0 | Bin 718 -> 0 bytes fixture/18/5/6.1 | Bin 720 -> 0 bytes fixture/18/5/6.2 | Bin 717 -> 0 bytes fixture/18/5/6.3 | Bin 165 -> 0 bytes fixture/18/5/7.0 | Bin 716 -> 0 bytes fixture/18/5/7.1 | Bin 716 -> 0 bytes fixture/18/5/7.2 | Bin 716 -> 0 bytes fixture/18/5/7.3 | Bin 165 -> 0 bytes fixture/18/5/8.0 | Bin 713 -> 0 bytes fixture/18/5/8.1 | Bin 715 -> 0 bytes fixture/18/5/8.2 | Bin 714 -> 0 bytes fixture/18/5/8.3 | Bin 165 -> 0 bytes fixture/18/5/9.0 | Bin 711 -> 0 bytes fixture/18/5/9.1 | Bin 713 -> 0 bytes fixture/18/5/9.2 | Bin 710 -> 0 bytes fixture/18/5/9.3 | Bin 165 -> 0 bytes fixture/18/6/.zarray | 21 ----- fixture/18/6/.zattrs | 1 - fixture/18/6/0.0 | Bin 1216 -> 0 bytes fixture/18/6/0.1 | Bin 1216 -> 0 bytes fixture/18/6/0.2 | Bin 1216 -> 0 bytes fixture/18/6/0.3 | Bin 438 -> 0 bytes fixture/18/6/1.0 | Bin 1216 -> 0 bytes fixture/18/6/1.1 | Bin 1216 -> 0 bytes fixture/18/6/1.2 | Bin 1216 -> 0 bytes fixture/18/6/1.3 | Bin 496 -> 0 bytes fixture/18/6/10.0 | Bin 1216 -> 0 bytes fixture/18/6/10.1 | Bin 1216 -> 0 bytes fixture/18/6/10.2 | Bin 1216 -> 0 bytes fixture/18/6/10.3 | Bin 492 -> 0 bytes fixture/18/6/11.0 | Bin 1216 -> 0 bytes fixture/18/6/11.1 | Bin 1216 -> 0 bytes fixture/18/6/11.2 | Bin 1216 -> 0 bytes fixture/18/6/11.3 | Bin 492 -> 0 bytes fixture/18/6/12.0 | Bin 1216 -> 0 bytes fixture/18/6/12.1 | Bin 1216 -> 0 bytes fixture/18/6/12.2 | Bin 1216 -> 0 bytes fixture/18/6/12.3 | Bin 496 -> 0 bytes fixture/18/6/13.0 | Bin 1216 -> 0 bytes fixture/18/6/13.1 | Bin 1216 -> 0 bytes fixture/18/6/13.2 | Bin 1216 -> 0 bytes fixture/18/6/13.3 | Bin 536 -> 0 bytes fixture/18/6/14.0 | Bin 1216 -> 0 bytes fixture/18/6/14.1 | Bin 1216 -> 0 bytes fixture/18/6/14.2 | Bin 1216 -> 0 bytes fixture/18/6/14.3 | Bin 496 -> 0 bytes fixture/18/6/15.0 | Bin 1216 -> 0 bytes fixture/18/6/15.1 | Bin 1216 -> 0 bytes fixture/18/6/15.2 | Bin 1216 -> 0 bytes fixture/18/6/15.3 | Bin 496 -> 0 bytes fixture/18/6/16.0 | Bin 1216 -> 0 bytes fixture/18/6/16.1 | Bin 1216 -> 0 bytes fixture/18/6/16.2 | Bin 1216 -> 0 bytes fixture/18/6/16.3 | Bin 496 -> 0 bytes fixture/18/6/17.0 | Bin 1216 -> 0 bytes fixture/18/6/17.1 | Bin 1216 -> 0 bytes fixture/18/6/17.2 | Bin 1216 -> 0 bytes fixture/18/6/17.3 | Bin 496 -> 0 bytes fixture/18/6/18.0 | Bin 1216 -> 0 bytes fixture/18/6/18.1 | Bin 1216 -> 0 bytes fixture/18/6/18.2 | Bin 1216 -> 0 bytes fixture/18/6/18.3 | Bin 496 -> 0 bytes fixture/18/6/19.0 | Bin 1216 -> 0 bytes fixture/18/6/19.1 | Bin 1216 -> 0 bytes fixture/18/6/19.2 | Bin 1216 -> 0 bytes fixture/18/6/19.3 | Bin 496 -> 0 bytes fixture/18/6/2.0 | Bin 1216 -> 0 bytes fixture/18/6/2.1 | Bin 1216 -> 0 bytes fixture/18/6/2.2 | Bin 1216 -> 0 bytes fixture/18/6/2.3 | Bin 496 -> 0 bytes fixture/18/6/3.0 | Bin 1216 -> 0 bytes fixture/18/6/3.1 | Bin 1216 -> 0 bytes fixture/18/6/3.2 | Bin 1216 -> 0 bytes fixture/18/6/3.3 | Bin 496 -> 0 bytes fixture/18/6/4.0 | Bin 1216 -> 0 bytes fixture/18/6/4.1 | Bin 1216 -> 0 bytes fixture/18/6/4.2 | Bin 1216 -> 0 bytes fixture/18/6/4.3 | Bin 496 -> 0 bytes fixture/18/6/5.0 | Bin 1216 -> 0 bytes fixture/18/6/5.1 | Bin 1216 -> 0 bytes fixture/18/6/5.2 | Bin 1216 -> 0 bytes fixture/18/6/5.3 | Bin 496 -> 0 bytes fixture/18/6/6.0 | Bin 1216 -> 0 bytes fixture/18/6/6.1 | Bin 1216 -> 0 bytes fixture/18/6/6.2 | Bin 1216 -> 0 bytes fixture/18/6/6.3 | Bin 496 -> 0 bytes fixture/18/6/7.0 | Bin 1216 -> 0 bytes fixture/18/6/7.1 | Bin 1216 -> 0 bytes fixture/18/6/7.2 | Bin 1216 -> 0 bytes fixture/18/6/7.3 | Bin 496 -> 0 bytes fixture/18/6/8.0 | Bin 1216 -> 0 bytes fixture/18/6/8.1 | Bin 1216 -> 0 bytes fixture/18/6/8.2 | Bin 1216 -> 0 bytes fixture/18/6/8.3 | Bin 496 -> 0 bytes fixture/18/6/9.0 | Bin 1216 -> 0 bytes fixture/18/6/9.1 | Bin 1216 -> 0 bytes fixture/18/6/9.2 | Bin 1216 -> 0 bytes fixture/18/6/9.3 | Bin 496 -> 0 bytes fixture/19/.zattrs | 1 - fixture/19/0/.zarray | 16 ---- fixture/19/0/.zattrs | 1 - fixture/19/0/0.0 | Bin 12000 -> 0 bytes fixture/19/0/0.1 | Bin 12000 -> 0 bytes fixture/19/0/0.2 | Bin 12000 -> 0 bytes fixture/19/0/0.3 | Bin 12000 -> 0 bytes fixture/19/0/1.0 | Bin 12000 -> 0 bytes fixture/19/0/1.1 | Bin 12000 -> 0 bytes fixture/19/0/1.2 | Bin 12000 -> 0 bytes fixture/19/0/1.3 | Bin 12000 -> 0 bytes fixture/19/1/.zarray | 19 ----- fixture/19/1/.zattrs | 1 - fixture/19/1/0.0 | Bin 4208 -> 0 bytes fixture/19/1/0.1 | Bin 4200 -> 0 bytes fixture/19/1/0.2 | Bin 4198 -> 0 bytes fixture/19/1/0.3 | Bin 1511 -> 0 bytes fixture/19/1/1.0 | Bin 4200 -> 0 bytes fixture/19/1/1.1 | Bin 4200 -> 0 bytes fixture/19/1/1.2 | Bin 4200 -> 0 bytes fixture/19/1/1.3 | Bin 1515 -> 0 bytes fixture/19/2/.zarray | 19 ----- fixture/19/2/.zattrs | 1 - fixture/19/2/0.0 | Bin 2168 -> 0 bytes fixture/19/2/0.1 | Bin 2349 -> 0 bytes fixture/19/2/0.2 | Bin 2210 -> 0 bytes fixture/19/2/0.3 | Bin 1095 -> 0 bytes fixture/19/2/1.0 | Bin 2342 -> 0 bytes fixture/19/2/1.1 | Bin 2173 -> 0 bytes fixture/19/2/1.2 | Bin 2291 -> 0 bytes fixture/19/2/1.3 | Bin 1150 -> 0 bytes fixture/19/3/.zarray | 21 ----- fixture/19/3/.zattrs | 1 - fixture/19/3/0.0 | Bin 6719 -> 0 bytes fixture/19/3/0.1 | Bin 6864 -> 0 bytes fixture/19/3/0.2 | Bin 6860 -> 0 bytes fixture/19/3/0.3 | Bin 2178 -> 0 bytes fixture/19/3/1.0 | Bin 6777 -> 0 bytes fixture/19/3/1.1 | Bin 6865 -> 0 bytes fixture/19/3/1.2 | Bin 6853 -> 0 bytes fixture/19/3/1.3 | Bin 2182 -> 0 bytes fixture/19/4/.zarray | 21 ----- fixture/19/4/.zattrs | 1 - fixture/19/4/0.0 | Bin 426 -> 0 bytes fixture/19/4/0.1 | Bin 426 -> 0 bytes fixture/19/4/0.2 | Bin 426 -> 0 bytes fixture/19/4/0.3 | Bin 374 -> 0 bytes fixture/19/4/1.0 | Bin 426 -> 0 bytes fixture/19/4/1.1 | Bin 426 -> 0 bytes fixture/19/4/1.2 | Bin 426 -> 0 bytes fixture/19/4/1.3 | Bin 377 -> 0 bytes fixture/19/5/.zarray | 21 ----- fixture/19/5/.zattrs | 1 - fixture/19/5/0.0 | Bin 381 -> 0 bytes fixture/19/5/0.1 | Bin 360 -> 0 bytes fixture/19/5/0.2 | Bin 400 -> 0 bytes fixture/19/5/0.3 | Bin 298 -> 0 bytes fixture/19/5/1.0 | Bin 377 -> 0 bytes fixture/19/5/1.1 | Bin 389 -> 0 bytes fixture/19/5/1.2 | Bin 396 -> 0 bytes fixture/19/5/1.3 | Bin 282 -> 0 bytes fixture/19/6/.zarray | 21 ----- fixture/19/6/.zattrs | 1 - fixture/19/6/0.0 | Bin 12016 -> 0 bytes fixture/19/6/0.1 | Bin 12016 -> 0 bytes fixture/19/6/0.2 | Bin 12016 -> 0 bytes fixture/19/6/0.3 | Bin 4093 -> 0 bytes fixture/19/6/1.0 | Bin 12016 -> 0 bytes fixture/19/6/1.1 | Bin 12016 -> 0 bytes fixture/19/6/1.2 | Bin 12016 -> 0 bytes fixture/19/6/1.3 | Bin 4093 -> 0 bytes fixture/2/.zattrs | 1 - fixture/2/0/.zarray | 14 ---- fixture/2/0/.zattrs | 1 - fixture/2/0/0 | Bin 400 -> 0 bytes fixture/2/0/1 | Bin 400 -> 0 bytes fixture/2/0/10 | Bin 400 -> 0 bytes fixture/2/0/11 | Bin 400 -> 0 bytes fixture/2/0/2 | Bin 400 -> 0 bytes fixture/2/0/3 | Bin 400 -> 0 bytes fixture/2/0/4 | Bin 400 -> 0 bytes fixture/2/0/5 | Bin 400 -> 0 bytes fixture/2/0/6 | Bin 400 -> 0 bytes fixture/2/0/7 | Bin 400 -> 0 bytes fixture/2/0/8 | Bin 400 -> 0 bytes fixture/2/0/9 | Bin 400 -> 0 bytes fixture/2/1/.zarray | 17 ---- fixture/2/1/.zattrs | 1 - fixture/2/1/0 | Bin 163 -> 0 bytes fixture/2/1/1 | Bin 163 -> 0 bytes fixture/2/1/10 | Bin 164 -> 0 bytes fixture/2/1/11 | Bin 41 -> 0 bytes fixture/2/1/2 | Bin 163 -> 0 bytes fixture/2/1/3 | Bin 163 -> 0 bytes fixture/2/1/4 | Bin 164 -> 0 bytes fixture/2/1/5 | Bin 164 -> 0 bytes fixture/2/1/6 | Bin 164 -> 0 bytes fixture/2/1/7 | Bin 164 -> 0 bytes fixture/2/1/8 | Bin 164 -> 0 bytes fixture/2/1/9 | Bin 164 -> 0 bytes fixture/2/2/.zarray | 17 ---- fixture/2/2/.zattrs | 1 - fixture/2/2/0 | Bin 181 -> 0 bytes fixture/2/2/1 | Bin 179 -> 0 bytes fixture/2/2/10 | Bin 187 -> 0 bytes fixture/2/2/11 | Bin 63 -> 0 bytes fixture/2/2/2 | Bin 185 -> 0 bytes fixture/2/2/3 | Bin 181 -> 0 bytes fixture/2/2/4 | Bin 181 -> 0 bytes fixture/2/2/5 | Bin 187 -> 0 bytes fixture/2/2/6 | Bin 181 -> 0 bytes fixture/2/2/7 | Bin 180 -> 0 bytes fixture/2/2/8 | Bin 181 -> 0 bytes fixture/2/2/9 | Bin 181 -> 0 bytes fixture/2/3/.zarray | 19 ----- fixture/2/3/.zattrs | 1 - fixture/2/3/0 | Bin 198 -> 0 bytes fixture/2/3/1 | Bin 224 -> 0 bytes fixture/2/3/10 | Bin 262 -> 0 bytes fixture/2/3/11 | Bin 85 -> 0 bytes fixture/2/3/2 | Bin 236 -> 0 bytes fixture/2/3/3 | Bin 240 -> 0 bytes fixture/2/3/4 | Bin 255 -> 0 bytes fixture/2/3/5 | Bin 258 -> 0 bytes fixture/2/3/6 | Bin 248 -> 0 bytes fixture/2/3/7 | Bin 265 -> 0 bytes fixture/2/3/8 | Bin 237 -> 0 bytes fixture/2/3/9 | Bin 254 -> 0 bytes fixture/2/4/.zarray | 19 ----- fixture/2/4/.zattrs | 1 - fixture/2/4/0 | Bin 143 -> 0 bytes fixture/2/4/1 | Bin 143 -> 0 bytes fixture/2/4/10 | Bin 149 -> 0 bytes fixture/2/4/11 | Bin 58 -> 0 bytes fixture/2/4/2 | Bin 149 -> 0 bytes fixture/2/4/3 | Bin 147 -> 0 bytes fixture/2/4/4 | Bin 147 -> 0 bytes fixture/2/4/5 | Bin 149 -> 0 bytes fixture/2/4/6 | Bin 147 -> 0 bytes fixture/2/4/7 | Bin 149 -> 0 bytes fixture/2/4/8 | Bin 147 -> 0 bytes fixture/2/4/9 | Bin 147 -> 0 bytes fixture/2/5/.zarray | 19 ----- fixture/2/5/.zattrs | 1 - fixture/2/5/0 | Bin 198 -> 0 bytes fixture/2/5/1 | Bin 224 -> 0 bytes fixture/2/5/10 | Bin 262 -> 0 bytes fixture/2/5/11 | Bin 85 -> 0 bytes fixture/2/5/2 | Bin 236 -> 0 bytes fixture/2/5/3 | Bin 240 -> 0 bytes fixture/2/5/4 | Bin 255 -> 0 bytes fixture/2/5/5 | Bin 258 -> 0 bytes fixture/2/5/6 | Bin 248 -> 0 bytes fixture/2/5/7 | Bin 265 -> 0 bytes fixture/2/5/8 | Bin 237 -> 0 bytes fixture/2/5/9 | Bin 254 -> 0 bytes fixture/2/6/.zarray | 19 ----- fixture/2/6/.zattrs | 1 - fixture/2/6/0 | Bin 416 -> 0 bytes fixture/2/6/1 | Bin 416 -> 0 bytes fixture/2/6/10 | Bin 416 -> 0 bytes fixture/2/6/11 | Bin 87 -> 0 bytes fixture/2/6/2 | Bin 416 -> 0 bytes fixture/2/6/3 | Bin 416 -> 0 bytes fixture/2/6/4 | Bin 416 -> 0 bytes fixture/2/6/5 | Bin 416 -> 0 bytes fixture/2/6/6 | Bin 416 -> 0 bytes fixture/2/6/7 | Bin 416 -> 0 bytes fixture/2/6/8 | Bin 416 -> 0 bytes fixture/2/6/9 | Bin 416 -> 0 bytes fixture/20/.zattrs | 1 - fixture/20/0/.zarray | 18 ---- fixture/20/0/.zattrs | 1 - fixture/20/0/0.0.0 | Bin 3600 -> 0 bytes fixture/20/0/0.0.1 | Bin 3600 -> 0 bytes fixture/20/0/0.0.2 | Bin 3600 -> 0 bytes fixture/20/0/0.0.3 | Bin 3600 -> 0 bytes fixture/20/0/0.1.0 | Bin 3600 -> 0 bytes fixture/20/0/0.1.1 | Bin 3600 -> 0 bytes fixture/20/0/0.1.2 | Bin 3600 -> 0 bytes fixture/20/0/0.1.3 | Bin 3600 -> 0 bytes fixture/20/0/0.2.0 | Bin 3600 -> 0 bytes fixture/20/0/0.2.1 | Bin 3600 -> 0 bytes fixture/20/0/0.2.2 | Bin 3600 -> 0 bytes fixture/20/0/0.2.3 | Bin 3600 -> 0 bytes fixture/20/0/0.3.0 | Bin 3600 -> 0 bytes fixture/20/0/0.3.1 | Bin 3600 -> 0 bytes fixture/20/0/0.3.2 | Bin 3600 -> 0 bytes fixture/20/0/0.3.3 | Bin 3600 -> 0 bytes fixture/20/0/1.0.0 | Bin 3600 -> 0 bytes fixture/20/0/1.0.1 | Bin 3600 -> 0 bytes fixture/20/0/1.0.2 | Bin 3600 -> 0 bytes fixture/20/0/1.0.3 | Bin 3600 -> 0 bytes fixture/20/0/1.1.0 | Bin 3600 -> 0 bytes fixture/20/0/1.1.1 | Bin 3600 -> 0 bytes fixture/20/0/1.1.2 | Bin 3600 -> 0 bytes fixture/20/0/1.1.3 | Bin 3600 -> 0 bytes fixture/20/0/1.2.0 | Bin 3600 -> 0 bytes fixture/20/0/1.2.1 | Bin 3600 -> 0 bytes fixture/20/0/1.2.2 | Bin 3600 -> 0 bytes fixture/20/0/1.2.3 | Bin 3600 -> 0 bytes fixture/20/0/1.3.0 | Bin 3600 -> 0 bytes fixture/20/0/1.3.1 | Bin 3600 -> 0 bytes fixture/20/0/1.3.2 | Bin 3600 -> 0 bytes fixture/20/0/1.3.3 | Bin 3600 -> 0 bytes fixture/20/1/.zarray | 21 ----- fixture/20/1/.zattrs | 1 - fixture/20/1/0.0.0 | Bin 1368 -> 0 bytes fixture/20/1/0.0.1 | 9 -- fixture/20/1/0.0.2 | Bin 1370 -> 0 bytes fixture/20/1/0.0.3 | Bin 664 -> 0 bytes fixture/20/1/0.1.0 | 9 -- fixture/20/1/0.1.1 | Bin 1367 -> 0 bytes fixture/20/1/0.1.2 | Bin 1359 -> 0 bytes fixture/20/1/0.1.3 | Bin 659 -> 0 bytes fixture/20/1/0.2.0 | Bin 1363 -> 0 bytes fixture/20/1/0.2.1 | Bin 1365 -> 0 bytes fixture/20/1/0.2.2 | 6 -- fixture/20/1/0.2.3 | Bin 661 -> 0 bytes fixture/20/1/0.3.0 | Bin 744 -> 0 bytes fixture/20/1/0.3.1 | 1 - fixture/20/1/0.3.2 | Bin 737 -> 0 bytes fixture/20/1/0.3.3 | Bin 344 -> 0 bytes fixture/20/1/1.0.0 | 5 -- fixture/20/1/1.0.1 | Bin 1365 -> 0 bytes fixture/20/1/1.0.2 | Bin 1365 -> 0 bytes fixture/20/1/1.0.3 | Bin 660 -> 0 bytes fixture/20/1/1.1.0 | Bin 1366 -> 0 bytes fixture/20/1/1.1.1 | Bin 1364 -> 0 bytes fixture/20/1/1.1.2 | Bin 1362 -> 0 bytes fixture/20/1/1.1.3 | Bin 659 -> 0 bytes fixture/20/1/1.2.0 | Bin 1366 -> 0 bytes fixture/20/1/1.2.1 | 5 -- fixture/20/1/1.2.2 | Bin 1365 -> 0 bytes fixture/20/1/1.2.3 | Bin 662 -> 0 bytes fixture/20/1/1.3.0 | Bin 739 -> 0 bytes fixture/20/1/1.3.1 | Bin 734 -> 0 bytes fixture/20/1/1.3.2 | Bin 733 -> 0 bytes fixture/20/1/1.3.3 | Bin 346 -> 0 bytes fixture/20/2/.zarray | 21 ----- fixture/20/2/.zattrs | 1 - fixture/20/2/0.0.0 | Bin 1508 -> 0 bytes fixture/20/2/0.0.1 | Bin 1493 -> 0 bytes fixture/20/2/0.0.2 | Bin 1552 -> 0 bytes fixture/20/2/0.0.3 | Bin 652 -> 0 bytes fixture/20/2/0.1.0 | Bin 1524 -> 0 bytes fixture/20/2/0.1.1 | Bin 1504 -> 0 bytes fixture/20/2/0.1.2 | Bin 1493 -> 0 bytes fixture/20/2/0.1.3 | Bin 660 -> 0 bytes fixture/20/2/0.2.0 | Bin 1496 -> 0 bytes fixture/20/2/0.2.1 | Bin 1492 -> 0 bytes fixture/20/2/0.2.2 | Bin 1542 -> 0 bytes fixture/20/2/0.2.3 | Bin 647 -> 0 bytes fixture/20/2/0.3.0 | Bin 728 -> 0 bytes fixture/20/2/0.3.1 | Bin 724 -> 0 bytes fixture/20/2/0.3.2 | Bin 736 -> 0 bytes fixture/20/2/0.3.3 | Bin 288 -> 0 bytes fixture/20/2/1.0.0 | Bin 1512 -> 0 bytes fixture/20/2/1.0.1 | Bin 1496 -> 0 bytes fixture/20/2/1.0.2 | Bin 1486 -> 0 bytes fixture/20/2/1.0.3 | Bin 649 -> 0 bytes fixture/20/2/1.1.0 | Bin 1478 -> 0 bytes fixture/20/2/1.1.1 | Bin 1491 -> 0 bytes fixture/20/2/1.1.2 | Bin 1494 -> 0 bytes fixture/20/2/1.1.3 | Bin 660 -> 0 bytes fixture/20/2/1.2.0 | Bin 1489 -> 0 bytes fixture/20/2/1.2.1 | Bin 1492 -> 0 bytes fixture/20/2/1.2.2 | Bin 1570 -> 0 bytes fixture/20/2/1.2.3 | Bin 651 -> 0 bytes fixture/20/2/1.3.0 | Bin 726 -> 0 bytes fixture/20/2/1.3.1 | Bin 727 -> 0 bytes fixture/20/2/1.3.2 | Bin 723 -> 0 bytes fixture/20/2/1.3.3 | Bin 285 -> 0 bytes fixture/20/3/.zarray | 23 ------ fixture/20/3/.zattrs | 1 - fixture/20/3/0.0.0 | Bin 2166 -> 0 bytes fixture/20/3/0.0.1 | Bin 2167 -> 0 bytes fixture/20/3/0.0.2 | Bin 2164 -> 0 bytes fixture/20/3/0.0.3 | Bin 450 -> 0 bytes fixture/20/3/0.1.0 | Bin 2164 -> 0 bytes fixture/20/3/0.1.1 | Bin 2165 -> 0 bytes fixture/20/3/0.1.2 | Bin 2163 -> 0 bytes fixture/20/3/0.1.3 | Bin 448 -> 0 bytes fixture/20/3/0.2.0 | Bin 2168 -> 0 bytes fixture/20/3/0.2.1 | Bin 2173 -> 0 bytes fixture/20/3/0.2.2 | Bin 2171 -> 0 bytes fixture/20/3/0.2.3 | Bin 446 -> 0 bytes fixture/20/3/0.3.0 | Bin 751 -> 0 bytes fixture/20/3/0.3.1 | Bin 752 -> 0 bytes fixture/20/3/0.3.2 | Bin 751 -> 0 bytes fixture/20/3/0.3.3 | Bin 230 -> 0 bytes fixture/20/3/1.0.0 | Bin 2185 -> 0 bytes fixture/20/3/1.0.1 | Bin 2188 -> 0 bytes fixture/20/3/1.0.2 | Bin 2186 -> 0 bytes fixture/20/3/1.0.3 | Bin 436 -> 0 bytes fixture/20/3/1.1.0 | Bin 2184 -> 0 bytes fixture/20/3/1.1.1 | Bin 2188 -> 0 bytes fixture/20/3/1.1.2 | Bin 2188 -> 0 bytes fixture/20/3/1.1.3 | Bin 436 -> 0 bytes fixture/20/3/1.2.0 | Bin 2188 -> 0 bytes fixture/20/3/1.2.1 | Bin 2188 -> 0 bytes fixture/20/3/1.2.2 | Bin 2187 -> 0 bytes fixture/20/3/1.2.3 | Bin 438 -> 0 bytes fixture/20/3/1.3.0 | Bin 757 -> 0 bytes fixture/20/3/1.3.1 | Bin 757 -> 0 bytes fixture/20/3/1.3.2 | Bin 755 -> 0 bytes fixture/20/3/1.3.3 | Bin 230 -> 0 bytes fixture/20/4/.zarray | 23 ------ fixture/20/4/.zattrs | 1 - fixture/20/4/0.0.0 | Bin 738 -> 0 bytes fixture/20/4/0.0.1 | Bin 738 -> 0 bytes fixture/20/4/0.0.2 | Bin 738 -> 0 bytes fixture/20/4/0.0.3 | Bin 476 -> 0 bytes fixture/20/4/0.1.0 | Bin 738 -> 0 bytes fixture/20/4/0.1.1 | Bin 738 -> 0 bytes fixture/20/4/0.1.2 | Bin 738 -> 0 bytes fixture/20/4/0.1.3 | Bin 477 -> 0 bytes fixture/20/4/0.2.0 | Bin 738 -> 0 bytes fixture/20/4/0.2.1 | Bin 740 -> 0 bytes fixture/20/4/0.2.2 | Bin 741 -> 0 bytes fixture/20/4/0.2.3 | Bin 481 -> 0 bytes fixture/20/4/0.3.0 | Bin 482 -> 0 bytes fixture/20/4/0.3.1 | Bin 475 -> 0 bytes fixture/20/4/0.3.2 | Bin 475 -> 0 bytes fixture/20/4/0.3.3 | Bin 253 -> 0 bytes fixture/20/4/1.0.0 | Bin 738 -> 0 bytes fixture/20/4/1.0.1 | Bin 738 -> 0 bytes fixture/20/4/1.0.2 | Bin 738 -> 0 bytes fixture/20/4/1.0.3 | Bin 479 -> 0 bytes fixture/20/4/1.1.0 | Bin 738 -> 0 bytes fixture/20/4/1.1.1 | Bin 738 -> 0 bytes fixture/20/4/1.1.2 | Bin 738 -> 0 bytes fixture/20/4/1.1.3 | Bin 478 -> 0 bytes fixture/20/4/1.2.0 | Bin 741 -> 0 bytes fixture/20/4/1.2.1 | Bin 741 -> 0 bytes fixture/20/4/1.2.2 | Bin 741 -> 0 bytes fixture/20/4/1.2.3 | Bin 481 -> 0 bytes fixture/20/4/1.3.0 | Bin 484 -> 0 bytes fixture/20/4/1.3.1 | Bin 479 -> 0 bytes fixture/20/4/1.3.2 | Bin 478 -> 0 bytes fixture/20/4/1.3.3 | Bin 316 -> 0 bytes fixture/20/5/.zarray | 23 ------ fixture/20/5/.zattrs | 1 - fixture/20/5/0.0.0 | Bin 2166 -> 0 bytes fixture/20/5/0.0.1 | Bin 2167 -> 0 bytes fixture/20/5/0.0.2 | Bin 2164 -> 0 bytes fixture/20/5/0.0.3 | Bin 450 -> 0 bytes fixture/20/5/0.1.0 | Bin 2164 -> 0 bytes fixture/20/5/0.1.1 | Bin 2165 -> 0 bytes fixture/20/5/0.1.2 | Bin 2163 -> 0 bytes fixture/20/5/0.1.3 | Bin 448 -> 0 bytes fixture/20/5/0.2.0 | Bin 2168 -> 0 bytes fixture/20/5/0.2.1 | Bin 2173 -> 0 bytes fixture/20/5/0.2.2 | Bin 2171 -> 0 bytes fixture/20/5/0.2.3 | Bin 446 -> 0 bytes fixture/20/5/0.3.0 | Bin 751 -> 0 bytes fixture/20/5/0.3.1 | Bin 752 -> 0 bytes fixture/20/5/0.3.2 | Bin 751 -> 0 bytes fixture/20/5/0.3.3 | Bin 230 -> 0 bytes fixture/20/5/1.0.0 | Bin 2185 -> 0 bytes fixture/20/5/1.0.1 | Bin 2188 -> 0 bytes fixture/20/5/1.0.2 | Bin 2186 -> 0 bytes fixture/20/5/1.0.3 | Bin 436 -> 0 bytes fixture/20/5/1.1.0 | Bin 2184 -> 0 bytes fixture/20/5/1.1.1 | Bin 2188 -> 0 bytes fixture/20/5/1.1.2 | Bin 2188 -> 0 bytes fixture/20/5/1.1.3 | Bin 436 -> 0 bytes fixture/20/5/1.2.0 | Bin 2188 -> 0 bytes fixture/20/5/1.2.1 | Bin 2188 -> 0 bytes fixture/20/5/1.2.2 | Bin 2187 -> 0 bytes fixture/20/5/1.2.3 | Bin 438 -> 0 bytes fixture/20/5/1.3.0 | Bin 757 -> 0 bytes fixture/20/5/1.3.1 | Bin 757 -> 0 bytes fixture/20/5/1.3.2 | Bin 755 -> 0 bytes fixture/20/5/1.3.3 | Bin 230 -> 0 bytes fixture/20/6/.zarray | 23 ------ fixture/20/6/.zattrs | 1 - fixture/20/6/0.0.0 | Bin 3616 -> 0 bytes fixture/20/6/0.0.1 | Bin 3616 -> 0 bytes fixture/20/6/0.0.2 | Bin 3616 -> 0 bytes fixture/20/6/0.0.3 | Bin 1270 -> 0 bytes fixture/20/6/0.1.0 | Bin 3616 -> 0 bytes fixture/20/6/0.1.1 | Bin 3616 -> 0 bytes fixture/20/6/0.1.2 | Bin 3616 -> 0 bytes fixture/20/6/0.1.3 | Bin 1273 -> 0 bytes fixture/20/6/0.2.0 | Bin 3616 -> 0 bytes fixture/20/6/0.2.1 | Bin 3616 -> 0 bytes fixture/20/6/0.2.2 | Bin 3616 -> 0 bytes fixture/20/6/0.2.3 | Bin 1273 -> 0 bytes fixture/20/6/0.3.0 | Bin 1636 -> 0 bytes fixture/20/6/0.3.1 | Bin 1636 -> 0 bytes fixture/20/6/0.3.2 | Bin 1637 -> 0 bytes fixture/20/6/0.3.3 | Bin 693 -> 0 bytes fixture/20/6/1.0.0 | Bin 3616 -> 0 bytes fixture/20/6/1.0.1 | Bin 3616 -> 0 bytes fixture/20/6/1.0.2 | Bin 3616 -> 0 bytes fixture/20/6/1.0.3 | Bin 1475 -> 0 bytes fixture/20/6/1.1.0 | Bin 3616 -> 0 bytes fixture/20/6/1.1.1 | Bin 3616 -> 0 bytes fixture/20/6/1.1.2 | Bin 3616 -> 0 bytes fixture/20/6/1.1.3 | Bin 1432 -> 0 bytes fixture/20/6/1.2.0 | Bin 3616 -> 0 bytes fixture/20/6/1.2.1 | Bin 3616 -> 0 bytes fixture/20/6/1.2.2 | Bin 3616 -> 0 bytes fixture/20/6/1.2.3 | Bin 1436 -> 0 bytes fixture/20/6/1.3.0 | Bin 1638 -> 0 bytes fixture/20/6/1.3.1 | Bin 1638 -> 0 bytes fixture/20/6/1.3.2 | Bin 1635 -> 0 bytes fixture/20/6/1.3.3 | Bin 697 -> 0 bytes fixture/21/.zattrs | 1 - fixture/21/0/.zarray | 18 ---- fixture/21/0/.zattrs | 1 - fixture/21/0/0.0.0 | Bin 3600 -> 0 bytes fixture/21/0/0.0.1 | Bin 3600 -> 0 bytes fixture/21/0/0.0.2 | Bin 3600 -> 0 bytes fixture/21/0/0.0.3 | Bin 3600 -> 0 bytes fixture/21/0/0.1.0 | Bin 3600 -> 0 bytes fixture/21/0/0.1.1 | Bin 3600 -> 0 bytes fixture/21/0/0.1.2 | Bin 3600 -> 0 bytes fixture/21/0/0.1.3 | Bin 3600 -> 0 bytes fixture/21/0/0.2.0 | Bin 3600 -> 0 bytes fixture/21/0/0.2.1 | Bin 3600 -> 0 bytes fixture/21/0/0.2.2 | Bin 3600 -> 0 bytes fixture/21/0/0.2.3 | Bin 3600 -> 0 bytes fixture/21/0/0.3.0 | Bin 3600 -> 0 bytes fixture/21/0/0.3.1 | Bin 3600 -> 0 bytes fixture/21/0/0.3.2 | Bin 3600 -> 0 bytes fixture/21/0/0.3.3 | Bin 3600 -> 0 bytes fixture/21/0/1.0.0 | Bin 3600 -> 0 bytes fixture/21/0/1.0.1 | Bin 3600 -> 0 bytes fixture/21/0/1.0.2 | Bin 3600 -> 0 bytes fixture/21/0/1.0.3 | Bin 3600 -> 0 bytes fixture/21/0/1.1.0 | Bin 3600 -> 0 bytes fixture/21/0/1.1.1 | Bin 3600 -> 0 bytes fixture/21/0/1.1.2 | Bin 3600 -> 0 bytes fixture/21/0/1.1.3 | Bin 3600 -> 0 bytes fixture/21/0/1.2.0 | Bin 3600 -> 0 bytes fixture/21/0/1.2.1 | Bin 3600 -> 0 bytes fixture/21/0/1.2.2 | Bin 3600 -> 0 bytes fixture/21/0/1.2.3 | Bin 3600 -> 0 bytes fixture/21/0/1.3.0 | Bin 3600 -> 0 bytes fixture/21/0/1.3.1 | Bin 3600 -> 0 bytes fixture/21/0/1.3.2 | Bin 3600 -> 0 bytes fixture/21/0/1.3.3 | Bin 3600 -> 0 bytes fixture/21/1/.zarray | 21 ----- fixture/21/1/.zattrs | 1 - fixture/21/1/0.0.0 | Bin 1324 -> 0 bytes fixture/21/1/0.0.1 | 7 -- fixture/21/1/0.0.2 | Bin 1323 -> 0 bytes fixture/21/1/0.0.3 | Bin 498 -> 0 bytes fixture/21/1/0.1.0 | Bin 1325 -> 0 bytes fixture/21/1/0.1.1 | Bin 1327 -> 0 bytes fixture/21/1/0.1.2 | Bin 1323 -> 0 bytes fixture/21/1/0.1.3 | Bin 498 -> 0 bytes fixture/21/1/0.2.0 | Bin 1322 -> 0 bytes fixture/21/1/0.2.1 | Bin 1326 -> 0 bytes fixture/21/1/0.2.2 | Bin 1324 -> 0 bytes fixture/21/1/0.2.3 | Bin 494 -> 0 bytes fixture/21/1/0.3.0 | Bin 504 -> 0 bytes fixture/21/1/0.3.1 | Bin 503 -> 0 bytes fixture/21/1/0.3.2 | Bin 502 -> 0 bytes fixture/21/1/0.3.3 | 1 - fixture/21/1/1.0.0 | Bin 1321 -> 0 bytes fixture/21/1/1.0.1 | Bin 1323 -> 0 bytes fixture/21/1/1.0.2 | Bin 1323 -> 0 bytes fixture/21/1/1.0.3 | Bin 497 -> 0 bytes fixture/21/1/1.1.0 | Bin 1321 -> 0 bytes fixture/21/1/1.1.1 | Bin 1324 -> 0 bytes fixture/21/1/1.1.2 | Bin 1325 -> 0 bytes fixture/21/1/1.1.3 | Bin 496 -> 0 bytes fixture/21/1/1.2.0 | Bin 1323 -> 0 bytes fixture/21/1/1.2.1 | Bin 1325 -> 0 bytes fixture/21/1/1.2.2 | Bin 1324 -> 0 bytes fixture/21/1/1.2.3 | Bin 497 -> 0 bytes fixture/21/1/1.3.0 | Bin 502 -> 0 bytes fixture/21/1/1.3.1 | Bin 504 -> 0 bytes fixture/21/1/1.3.2 | Bin 505 -> 0 bytes fixture/21/1/1.3.3 | Bin 200 -> 0 bytes fixture/21/2/.zarray | 21 ----- fixture/21/2/.zattrs | 1 - fixture/21/2/0.0.0 | Bin 1349 -> 0 bytes fixture/21/2/0.0.1 | Bin 1308 -> 0 bytes fixture/21/2/0.0.2 | Bin 1341 -> 0 bytes fixture/21/2/0.0.3 | Bin 510 -> 0 bytes fixture/21/2/0.1.0 | Bin 1318 -> 0 bytes fixture/21/2/0.1.1 | Bin 1292 -> 0 bytes fixture/21/2/0.1.2 | Bin 1254 -> 0 bytes fixture/21/2/0.1.3 | Bin 517 -> 0 bytes fixture/21/2/0.2.0 | Bin 1321 -> 0 bytes fixture/21/2/0.2.1 | Bin 1319 -> 0 bytes fixture/21/2/0.2.2 | Bin 1270 -> 0 bytes fixture/21/2/0.2.3 | Bin 510 -> 0 bytes fixture/21/2/0.3.0 | Bin 579 -> 0 bytes fixture/21/2/0.3.1 | Bin 564 -> 0 bytes fixture/21/2/0.3.2 | Bin 604 -> 0 bytes fixture/21/2/0.3.3 | Bin 207 -> 0 bytes fixture/21/2/1.0.0 | Bin 1352 -> 0 bytes fixture/21/2/1.0.1 | Bin 1270 -> 0 bytes fixture/21/2/1.0.2 | Bin 1265 -> 0 bytes fixture/21/2/1.0.3 | Bin 523 -> 0 bytes fixture/21/2/1.1.0 | Bin 1305 -> 0 bytes fixture/21/2/1.1.1 | Bin 1298 -> 0 bytes fixture/21/2/1.1.2 | Bin 1266 -> 0 bytes fixture/21/2/1.1.3 | Bin 529 -> 0 bytes fixture/21/2/1.2.0 | Bin 1350 -> 0 bytes fixture/21/2/1.2.1 | Bin 1274 -> 0 bytes fixture/21/2/1.2.2 | Bin 1246 -> 0 bytes fixture/21/2/1.2.3 | Bin 517 -> 0 bytes fixture/21/2/1.3.0 | Bin 547 -> 0 bytes fixture/21/2/1.3.1 | Bin 556 -> 0 bytes fixture/21/2/1.3.2 | Bin 563 -> 0 bytes fixture/21/2/1.3.3 | Bin 198 -> 0 bytes fixture/21/3/.zarray | 23 ------ fixture/21/3/.zattrs | 1 - fixture/21/3/0.0.0 | Bin 1900 -> 0 bytes fixture/21/3/0.0.1 | Bin 1979 -> 0 bytes fixture/21/3/0.0.2 | Bin 1992 -> 0 bytes fixture/21/3/0.0.3 | Bin 685 -> 0 bytes fixture/21/3/0.1.0 | Bin 1983 -> 0 bytes fixture/21/3/0.1.1 | Bin 1987 -> 0 bytes fixture/21/3/0.1.2 | Bin 1978 -> 0 bytes fixture/21/3/0.1.3 | Bin 694 -> 0 bytes fixture/21/3/0.2.0 | Bin 1982 -> 0 bytes fixture/21/3/0.2.1 | Bin 1983 -> 0 bytes fixture/21/3/0.2.2 | Bin 1982 -> 0 bytes fixture/21/3/0.2.3 | Bin 691 -> 0 bytes fixture/21/3/0.3.0 | Bin 715 -> 0 bytes fixture/21/3/0.3.1 | Bin 714 -> 0 bytes fixture/21/3/0.3.2 | Bin 724 -> 0 bytes fixture/21/3/0.3.3 | Bin 265 -> 0 bytes fixture/21/3/1.0.0 | Bin 1908 -> 0 bytes fixture/21/3/1.0.1 | Bin 1980 -> 0 bytes fixture/21/3/1.0.2 | Bin 1989 -> 0 bytes fixture/21/3/1.0.3 | Bin 687 -> 0 bytes fixture/21/3/1.1.0 | Bin 1981 -> 0 bytes fixture/21/3/1.1.1 | Bin 1984 -> 0 bytes fixture/21/3/1.1.2 | Bin 1984 -> 0 bytes fixture/21/3/1.1.3 | Bin 691 -> 0 bytes fixture/21/3/1.2.0 | Bin 1986 -> 0 bytes fixture/21/3/1.2.1 | Bin 1985 -> 0 bytes fixture/21/3/1.2.2 | Bin 1987 -> 0 bytes fixture/21/3/1.2.3 | Bin 693 -> 0 bytes fixture/21/3/1.3.0 | Bin 721 -> 0 bytes fixture/21/3/1.3.1 | Bin 718 -> 0 bytes fixture/21/3/1.3.2 | Bin 722 -> 0 bytes fixture/21/3/1.3.3 | Bin 269 -> 0 bytes fixture/21/4/.zarray | 23 ------ fixture/21/4/.zattrs | 1 - fixture/21/4/0.0.0 | Bin 551 -> 0 bytes fixture/21/4/0.0.1 | Bin 549 -> 0 bytes fixture/21/4/0.0.2 | Bin 552 -> 0 bytes fixture/21/4/0.0.3 | Bin 289 -> 0 bytes fixture/21/4/0.1.0 | Bin 552 -> 0 bytes fixture/21/4/0.1.1 | Bin 552 -> 0 bytes fixture/21/4/0.1.2 | Bin 552 -> 0 bytes fixture/21/4/0.1.3 | Bin 289 -> 0 bytes fixture/21/4/0.2.0 | Bin 549 -> 0 bytes fixture/21/4/0.2.1 | Bin 554 -> 0 bytes fixture/21/4/0.2.2 | Bin 549 -> 0 bytes fixture/21/4/0.2.3 | Bin 289 -> 0 bytes fixture/21/4/0.3.0 | Bin 350 -> 0 bytes fixture/21/4/0.3.1 | Bin 350 -> 0 bytes fixture/21/4/0.3.2 | Bin 353 -> 0 bytes fixture/21/4/0.3.3 | Bin 153 -> 0 bytes fixture/21/4/1.0.0 | Bin 552 -> 0 bytes fixture/21/4/1.0.1 | Bin 549 -> 0 bytes fixture/21/4/1.0.2 | Bin 554 -> 0 bytes fixture/21/4/1.0.3 | Bin 289 -> 0 bytes fixture/21/4/1.1.0 | Bin 549 -> 0 bytes fixture/21/4/1.1.1 | Bin 552 -> 0 bytes fixture/21/4/1.1.2 | Bin 552 -> 0 bytes fixture/21/4/1.1.3 | Bin 289 -> 0 bytes fixture/21/4/1.2.0 | Bin 552 -> 0 bytes fixture/21/4/1.2.1 | Bin 554 -> 0 bytes fixture/21/4/1.2.2 | Bin 549 -> 0 bytes fixture/21/4/1.2.3 | Bin 289 -> 0 bytes fixture/21/4/1.3.0 | Bin 350 -> 0 bytes fixture/21/4/1.3.1 | Bin 350 -> 0 bytes fixture/21/4/1.3.2 | Bin 353 -> 0 bytes fixture/21/4/1.3.3 | Bin 153 -> 0 bytes fixture/21/5/.zarray | 23 ------ fixture/21/5/.zattrs | 1 - fixture/21/5/0.0.0 | Bin 1900 -> 0 bytes fixture/21/5/0.0.1 | Bin 1979 -> 0 bytes fixture/21/5/0.0.2 | Bin 1992 -> 0 bytes fixture/21/5/0.0.3 | Bin 685 -> 0 bytes fixture/21/5/0.1.0 | Bin 1983 -> 0 bytes fixture/21/5/0.1.1 | Bin 1987 -> 0 bytes fixture/21/5/0.1.2 | Bin 1978 -> 0 bytes fixture/21/5/0.1.3 | Bin 694 -> 0 bytes fixture/21/5/0.2.0 | Bin 1982 -> 0 bytes fixture/21/5/0.2.1 | Bin 1983 -> 0 bytes fixture/21/5/0.2.2 | Bin 1982 -> 0 bytes fixture/21/5/0.2.3 | Bin 691 -> 0 bytes fixture/21/5/0.3.0 | Bin 715 -> 0 bytes fixture/21/5/0.3.1 | Bin 714 -> 0 bytes fixture/21/5/0.3.2 | Bin 724 -> 0 bytes fixture/21/5/0.3.3 | Bin 265 -> 0 bytes fixture/21/5/1.0.0 | Bin 1908 -> 0 bytes fixture/21/5/1.0.1 | Bin 1980 -> 0 bytes fixture/21/5/1.0.2 | Bin 1989 -> 0 bytes fixture/21/5/1.0.3 | Bin 687 -> 0 bytes fixture/21/5/1.1.0 | Bin 1981 -> 0 bytes fixture/21/5/1.1.1 | Bin 1984 -> 0 bytes fixture/21/5/1.1.2 | Bin 1984 -> 0 bytes fixture/21/5/1.1.3 | Bin 691 -> 0 bytes fixture/21/5/1.2.0 | Bin 1986 -> 0 bytes fixture/21/5/1.2.1 | Bin 1985 -> 0 bytes fixture/21/5/1.2.2 | Bin 1987 -> 0 bytes fixture/21/5/1.2.3 | Bin 693 -> 0 bytes fixture/21/5/1.3.0 | Bin 721 -> 0 bytes fixture/21/5/1.3.1 | Bin 718 -> 0 bytes fixture/21/5/1.3.2 | Bin 722 -> 0 bytes fixture/21/5/1.3.3 | Bin 269 -> 0 bytes fixture/21/6/.zarray | 23 ------ fixture/21/6/.zattrs | 1 - fixture/21/6/0.0.0 | Bin 3616 -> 0 bytes fixture/21/6/0.0.1 | Bin 3616 -> 0 bytes fixture/21/6/0.0.2 | Bin 3616 -> 0 bytes fixture/21/6/0.0.3 | Bin 1256 -> 0 bytes fixture/21/6/0.1.0 | Bin 3616 -> 0 bytes fixture/21/6/0.1.1 | Bin 3616 -> 0 bytes fixture/21/6/0.1.2 | Bin 3616 -> 0 bytes fixture/21/6/0.1.3 | Bin 1256 -> 0 bytes fixture/21/6/0.2.0 | Bin 3616 -> 0 bytes fixture/21/6/0.2.1 | Bin 3616 -> 0 bytes fixture/21/6/0.2.2 | Bin 3616 -> 0 bytes fixture/21/6/0.2.3 | Bin 1256 -> 0 bytes fixture/21/6/0.3.0 | Bin 1266 -> 0 bytes fixture/21/6/0.3.1 | Bin 1266 -> 0 bytes fixture/21/6/0.3.2 | Bin 1266 -> 0 bytes fixture/21/6/0.3.3 | Bin 455 -> 0 bytes fixture/21/6/1.0.0 | Bin 3616 -> 0 bytes fixture/21/6/1.0.1 | Bin 3616 -> 0 bytes fixture/21/6/1.0.2 | Bin 3616 -> 0 bytes fixture/21/6/1.0.3 | Bin 1256 -> 0 bytes fixture/21/6/1.1.0 | Bin 3616 -> 0 bytes fixture/21/6/1.1.1 | Bin 3616 -> 0 bytes fixture/21/6/1.1.2 | Bin 3616 -> 0 bytes fixture/21/6/1.1.3 | Bin 1256 -> 0 bytes fixture/21/6/1.2.0 | Bin 3616 -> 0 bytes fixture/21/6/1.2.1 | Bin 3616 -> 0 bytes fixture/21/6/1.2.2 | Bin 3616 -> 0 bytes fixture/21/6/1.2.3 | Bin 1256 -> 0 bytes fixture/21/6/1.3.0 | Bin 1266 -> 0 bytes fixture/21/6/1.3.1 | Bin 1266 -> 0 bytes fixture/21/6/1.3.2 | Bin 1273 -> 0 bytes fixture/21/6/1.3.3 | Bin 455 -> 0 bytes fixture/22/.zattrs | 1 - fixture/22/0/.zarray | 20 ----- fixture/22/0/.zattrs | 1 - fixture/22/0/0.0.0.0 | Bin 1080 -> 0 bytes fixture/22/0/0.0.0.1 | Bin 1080 -> 0 bytes fixture/22/0/0.0.0.2 | Bin 1080 -> 0 bytes fixture/22/0/0.0.0.3 | Bin 1080 -> 0 bytes fixture/22/0/0.0.1.0 | Bin 1080 -> 0 bytes fixture/22/0/0.0.1.1 | Bin 1080 -> 0 bytes fixture/22/0/0.0.1.2 | Bin 1080 -> 0 bytes fixture/22/0/0.0.1.3 | Bin 1080 -> 0 bytes fixture/22/0/0.0.2.0 | Bin 1080 -> 0 bytes fixture/22/0/0.0.2.1 | Bin 1080 -> 0 bytes fixture/22/0/0.0.2.2 | Bin 1080 -> 0 bytes fixture/22/0/0.0.2.3 | Bin 1080 -> 0 bytes fixture/22/0/0.0.3.0 | Bin 1080 -> 0 bytes fixture/22/0/0.0.3.1 | Bin 1080 -> 0 bytes fixture/22/0/0.0.3.2 | Bin 1080 -> 0 bytes fixture/22/0/0.0.3.3 | Bin 1080 -> 0 bytes fixture/22/0/0.1.0.0 | Bin 1080 -> 0 bytes fixture/22/0/0.1.0.1 | Bin 1080 -> 0 bytes fixture/22/0/0.1.0.2 | Bin 1080 -> 0 bytes fixture/22/0/0.1.0.3 | Bin 1080 -> 0 bytes fixture/22/0/0.1.1.0 | Bin 1080 -> 0 bytes fixture/22/0/0.1.1.1 | Bin 1080 -> 0 bytes fixture/22/0/0.1.1.2 | Bin 1080 -> 0 bytes fixture/22/0/0.1.1.3 | Bin 1080 -> 0 bytes fixture/22/0/0.1.2.0 | Bin 1080 -> 0 bytes fixture/22/0/0.1.2.1 | Bin 1080 -> 0 bytes fixture/22/0/0.1.2.2 | Bin 1080 -> 0 bytes fixture/22/0/0.1.2.3 | Bin 1080 -> 0 bytes fixture/22/0/0.1.3.0 | Bin 1080 -> 0 bytes fixture/22/0/0.1.3.1 | Bin 1080 -> 0 bytes fixture/22/0/0.1.3.2 | Bin 1080 -> 0 bytes fixture/22/0/0.1.3.3 | Bin 1080 -> 0 bytes fixture/22/0/0.2.0.0 | Bin 1080 -> 0 bytes fixture/22/0/0.2.0.1 | Bin 1080 -> 0 bytes fixture/22/0/0.2.0.2 | Bin 1080 -> 0 bytes fixture/22/0/0.2.0.3 | Bin 1080 -> 0 bytes fixture/22/0/0.2.1.0 | Bin 1080 -> 0 bytes fixture/22/0/0.2.1.1 | Bin 1080 -> 0 bytes fixture/22/0/0.2.1.2 | Bin 1080 -> 0 bytes fixture/22/0/0.2.1.3 | Bin 1080 -> 0 bytes fixture/22/0/0.2.2.0 | Bin 1080 -> 0 bytes fixture/22/0/0.2.2.1 | Bin 1080 -> 0 bytes fixture/22/0/0.2.2.2 | Bin 1080 -> 0 bytes fixture/22/0/0.2.2.3 | Bin 1080 -> 0 bytes fixture/22/0/0.2.3.0 | Bin 1080 -> 0 bytes fixture/22/0/0.2.3.1 | Bin 1080 -> 0 bytes fixture/22/0/0.2.3.2 | Bin 1080 -> 0 bytes fixture/22/0/0.2.3.3 | Bin 1080 -> 0 bytes fixture/22/0/0.3.0.0 | Bin 1080 -> 0 bytes fixture/22/0/0.3.0.1 | Bin 1080 -> 0 bytes fixture/22/0/0.3.0.2 | Bin 1080 -> 0 bytes fixture/22/0/0.3.0.3 | Bin 1080 -> 0 bytes fixture/22/0/0.3.1.0 | Bin 1080 -> 0 bytes fixture/22/0/0.3.1.1 | Bin 1080 -> 0 bytes fixture/22/0/0.3.1.2 | Bin 1080 -> 0 bytes fixture/22/0/0.3.1.3 | Bin 1080 -> 0 bytes fixture/22/0/0.3.2.0 | Bin 1080 -> 0 bytes fixture/22/0/0.3.2.1 | Bin 1080 -> 0 bytes fixture/22/0/0.3.2.2 | Bin 1080 -> 0 bytes fixture/22/0/0.3.2.3 | Bin 1080 -> 0 bytes fixture/22/0/0.3.3.0 | Bin 1080 -> 0 bytes fixture/22/0/0.3.3.1 | Bin 1080 -> 0 bytes fixture/22/0/0.3.3.2 | Bin 1080 -> 0 bytes fixture/22/0/0.3.3.3 | Bin 1080 -> 0 bytes fixture/22/0/1.0.0.0 | Bin 1080 -> 0 bytes fixture/22/0/1.0.0.1 | Bin 1080 -> 0 bytes fixture/22/0/1.0.0.2 | Bin 1080 -> 0 bytes fixture/22/0/1.0.0.3 | Bin 1080 -> 0 bytes fixture/22/0/1.0.1.0 | Bin 1080 -> 0 bytes fixture/22/0/1.0.1.1 | Bin 1080 -> 0 bytes fixture/22/0/1.0.1.2 | Bin 1080 -> 0 bytes fixture/22/0/1.0.1.3 | Bin 1080 -> 0 bytes fixture/22/0/1.0.2.0 | Bin 1080 -> 0 bytes fixture/22/0/1.0.2.1 | Bin 1080 -> 0 bytes fixture/22/0/1.0.2.2 | Bin 1080 -> 0 bytes fixture/22/0/1.0.2.3 | Bin 1080 -> 0 bytes fixture/22/0/1.0.3.0 | Bin 1080 -> 0 bytes fixture/22/0/1.0.3.1 | Bin 1080 -> 0 bytes fixture/22/0/1.0.3.2 | Bin 1080 -> 0 bytes fixture/22/0/1.0.3.3 | Bin 1080 -> 0 bytes fixture/22/0/1.1.0.0 | Bin 1080 -> 0 bytes fixture/22/0/1.1.0.1 | Bin 1080 -> 0 bytes fixture/22/0/1.1.0.2 | Bin 1080 -> 0 bytes fixture/22/0/1.1.0.3 | Bin 1080 -> 0 bytes fixture/22/0/1.1.1.0 | Bin 1080 -> 0 bytes fixture/22/0/1.1.1.1 | Bin 1080 -> 0 bytes fixture/22/0/1.1.1.2 | Bin 1080 -> 0 bytes fixture/22/0/1.1.1.3 | Bin 1080 -> 0 bytes fixture/22/0/1.1.2.0 | Bin 1080 -> 0 bytes fixture/22/0/1.1.2.1 | Bin 1080 -> 0 bytes fixture/22/0/1.1.2.2 | Bin 1080 -> 0 bytes fixture/22/0/1.1.2.3 | Bin 1080 -> 0 bytes fixture/22/0/1.1.3.0 | Bin 1080 -> 0 bytes fixture/22/0/1.1.3.1 | Bin 1080 -> 0 bytes fixture/22/0/1.1.3.2 | Bin 1080 -> 0 bytes fixture/22/0/1.1.3.3 | Bin 1080 -> 0 bytes fixture/22/0/1.2.0.0 | Bin 1080 -> 0 bytes fixture/22/0/1.2.0.1 | Bin 1080 -> 0 bytes fixture/22/0/1.2.0.2 | Bin 1080 -> 0 bytes fixture/22/0/1.2.0.3 | Bin 1080 -> 0 bytes fixture/22/0/1.2.1.0 | Bin 1080 -> 0 bytes fixture/22/0/1.2.1.1 | Bin 1080 -> 0 bytes fixture/22/0/1.2.1.2 | Bin 1080 -> 0 bytes fixture/22/0/1.2.1.3 | Bin 1080 -> 0 bytes fixture/22/0/1.2.2.0 | Bin 1080 -> 0 bytes fixture/22/0/1.2.2.1 | Bin 1080 -> 0 bytes fixture/22/0/1.2.2.2 | Bin 1080 -> 0 bytes fixture/22/0/1.2.2.3 | Bin 1080 -> 0 bytes fixture/22/0/1.2.3.0 | Bin 1080 -> 0 bytes fixture/22/0/1.2.3.1 | Bin 1080 -> 0 bytes fixture/22/0/1.2.3.2 | Bin 1080 -> 0 bytes fixture/22/0/1.2.3.3 | Bin 1080 -> 0 bytes fixture/22/0/1.3.0.0 | Bin 1080 -> 0 bytes fixture/22/0/1.3.0.1 | Bin 1080 -> 0 bytes fixture/22/0/1.3.0.2 | Bin 1080 -> 0 bytes fixture/22/0/1.3.0.3 | Bin 1080 -> 0 bytes fixture/22/0/1.3.1.0 | Bin 1080 -> 0 bytes fixture/22/0/1.3.1.1 | Bin 1080 -> 0 bytes fixture/22/0/1.3.1.2 | Bin 1080 -> 0 bytes fixture/22/0/1.3.1.3 | Bin 1080 -> 0 bytes fixture/22/0/1.3.2.0 | Bin 1080 -> 0 bytes fixture/22/0/1.3.2.1 | Bin 1080 -> 0 bytes fixture/22/0/1.3.2.2 | Bin 1080 -> 0 bytes fixture/22/0/1.3.2.3 | Bin 1080 -> 0 bytes fixture/22/0/1.3.3.0 | Bin 1080 -> 0 bytes fixture/22/0/1.3.3.1 | Bin 1080 -> 0 bytes fixture/22/0/1.3.3.2 | Bin 1080 -> 0 bytes fixture/22/0/1.3.3.3 | Bin 1080 -> 0 bytes fixture/22/1/.zarray | 23 ------ fixture/22/1/.zattrs | 1 - fixture/22/1/0.0.0.0 | Bin 473 -> 0 bytes fixture/22/1/0.0.0.1 | Bin 473 -> 0 bytes fixture/22/1/0.0.0.2 | Bin 472 -> 0 bytes fixture/22/1/0.0.0.3 | 1 - fixture/22/1/0.0.1.0 | Bin 469 -> 0 bytes fixture/22/1/0.0.1.1 | Bin 467 -> 0 bytes fixture/22/1/0.0.1.2 | Bin 464 -> 0 bytes fixture/22/1/0.0.1.3 | 4 - fixture/22/1/0.0.2.0 | Bin 469 -> 0 bytes fixture/22/1/0.0.2.1 | Bin 469 -> 0 bytes fixture/22/1/0.0.2.2 | Bin 471 -> 0 bytes fixture/22/1/0.0.2.3 | Bin 255 -> 0 bytes fixture/22/1/0.0.3.0 | Bin 252 -> 0 bytes fixture/22/1/0.0.3.1 | Bin 250 -> 0 bytes fixture/22/1/0.0.3.2 | Bin 250 -> 0 bytes fixture/22/1/0.0.3.3 | Bin 127 -> 0 bytes fixture/22/1/0.1.0.0 | Bin 468 -> 0 bytes fixture/22/1/0.1.0.1 | Bin 469 -> 0 bytes fixture/22/1/0.1.0.2 | Bin 467 -> 0 bytes fixture/22/1/0.1.0.3 | 1 - fixture/22/1/0.1.1.0 | Bin 470 -> 0 bytes fixture/22/1/0.1.1.1 | Bin 468 -> 0 bytes fixture/22/1/0.1.1.2 | Bin 468 -> 0 bytes fixture/22/1/0.1.1.3 | Bin 257 -> 0 bytes fixture/22/1/0.1.2.0 | Bin 468 -> 0 bytes fixture/22/1/0.1.2.1 | Bin 471 -> 0 bytes fixture/22/1/0.1.2.2 | Bin 468 -> 0 bytes fixture/22/1/0.1.2.3 | Bin 257 -> 0 bytes fixture/22/1/0.1.3.0 | Bin 248 -> 0 bytes fixture/22/1/0.1.3.1 | Bin 248 -> 0 bytes fixture/22/1/0.1.3.2 | Bin 247 -> 0 bytes fixture/22/1/0.1.3.3 | Bin 129 -> 0 bytes fixture/22/1/0.2.0.0 | Bin 471 -> 0 bytes fixture/22/1/0.2.0.1 | Bin 469 -> 0 bytes fixture/22/1/0.2.0.2 | Bin 467 -> 0 bytes fixture/22/1/0.2.0.3 | 2 - fixture/22/1/0.2.1.0 | Bin 469 -> 0 bytes fixture/22/1/0.2.1.1 | Bin 469 -> 0 bytes fixture/22/1/0.2.1.2 | Bin 469 -> 0 bytes fixture/22/1/0.2.1.3 | 2 - fixture/22/1/0.2.2.0 | Bin 468 -> 0 bytes fixture/22/1/0.2.2.1 | Bin 467 -> 0 bytes fixture/22/1/0.2.2.2 | Bin 471 -> 0 bytes fixture/22/1/0.2.2.3 | 1 - fixture/22/1/0.2.3.0 | Bin 249 -> 0 bytes fixture/22/1/0.2.3.1 | Bin 247 -> 0 bytes fixture/22/1/0.2.3.2 | Bin 248 -> 0 bytes fixture/22/1/0.2.3.3 | Bin 129 -> 0 bytes fixture/22/1/0.3.0.0 | Bin 200 -> 0 bytes fixture/22/1/0.3.0.1 | Bin 200 -> 0 bytes fixture/22/1/0.3.0.2 | Bin 201 -> 0 bytes fixture/22/1/0.3.0.3 | 1 - fixture/22/1/0.3.1.0 | Bin 201 -> 0 bytes fixture/22/1/0.3.1.1 | Bin 201 -> 0 bytes fixture/22/1/0.3.1.2 | Bin 198 -> 0 bytes fixture/22/1/0.3.1.3 | 1 - fixture/22/1/0.3.2.0 | Bin 198 -> 0 bytes fixture/22/1/0.3.2.1 | Bin 199 -> 0 bytes fixture/22/1/0.3.2.2 | Bin 201 -> 0 bytes fixture/22/1/0.3.2.3 | 1 - fixture/22/1/0.3.3.0 | Bin 104 -> 0 bytes fixture/22/1/0.3.3.1 | Bin 104 -> 0 bytes fixture/22/1/0.3.3.2 | Bin 104 -> 0 bytes fixture/22/1/0.3.3.3 | Bin 56 -> 0 bytes fixture/22/1/1.0.0.0 | Bin 469 -> 0 bytes fixture/22/1/1.0.0.1 | Bin 470 -> 0 bytes fixture/22/1/1.0.0.2 | Bin 472 -> 0 bytes fixture/22/1/1.0.0.3 | Bin 257 -> 0 bytes fixture/22/1/1.0.1.0 | Bin 469 -> 0 bytes fixture/22/1/1.0.1.1 | Bin 469 -> 0 bytes fixture/22/1/1.0.1.2 | Bin 468 -> 0 bytes fixture/22/1/1.0.1.3 | 1 - fixture/22/1/1.0.2.0 | Bin 469 -> 0 bytes fixture/22/1/1.0.2.1 | Bin 468 -> 0 bytes fixture/22/1/1.0.2.2 | Bin 468 -> 0 bytes fixture/22/1/1.0.2.3 | Bin 258 -> 0 bytes fixture/22/1/1.0.3.0 | Bin 248 -> 0 bytes fixture/22/1/1.0.3.1 | Bin 248 -> 0 bytes fixture/22/1/1.0.3.2 | Bin 248 -> 0 bytes fixture/22/1/1.0.3.3 | Bin 129 -> 0 bytes fixture/22/1/1.1.0.0 | Bin 470 -> 0 bytes fixture/22/1/1.1.0.1 | Bin 469 -> 0 bytes fixture/22/1/1.1.0.2 | Bin 469 -> 0 bytes fixture/22/1/1.1.0.3 | Bin 257 -> 0 bytes fixture/22/1/1.1.1.0 | Bin 470 -> 0 bytes fixture/22/1/1.1.1.1 | Bin 468 -> 0 bytes fixture/22/1/1.1.1.2 | Bin 468 -> 0 bytes fixture/22/1/1.1.1.3 | Bin 258 -> 0 bytes fixture/22/1/1.1.2.0 | Bin 467 -> 0 bytes fixture/22/1/1.1.2.1 | Bin 468 -> 0 bytes fixture/22/1/1.1.2.2 | Bin 467 -> 0 bytes fixture/22/1/1.1.2.3 | Bin 259 -> 0 bytes fixture/22/1/1.1.3.0 | Bin 252 -> 0 bytes fixture/22/1/1.1.3.1 | Bin 247 -> 0 bytes fixture/22/1/1.1.3.2 | Bin 246 -> 0 bytes fixture/22/1/1.1.3.3 | Bin 129 -> 0 bytes fixture/22/1/1.2.0.0 | Bin 469 -> 0 bytes fixture/22/1/1.2.0.1 | Bin 469 -> 0 bytes fixture/22/1/1.2.0.2 | Bin 469 -> 0 bytes fixture/22/1/1.2.0.3 | Bin 257 -> 0 bytes fixture/22/1/1.2.1.0 | Bin 469 -> 0 bytes fixture/22/1/1.2.1.1 | Bin 469 -> 0 bytes fixture/22/1/1.2.1.2 | Bin 470 -> 0 bytes fixture/22/1/1.2.1.3 | Bin 258 -> 0 bytes fixture/22/1/1.2.2.0 | Bin 471 -> 0 bytes fixture/22/1/1.2.2.1 | Bin 470 -> 0 bytes fixture/22/1/1.2.2.2 | Bin 468 -> 0 bytes fixture/22/1/1.2.2.3 | Bin 257 -> 0 bytes fixture/22/1/1.2.3.0 | Bin 249 -> 0 bytes fixture/22/1/1.2.3.1 | Bin 248 -> 0 bytes fixture/22/1/1.2.3.2 | Bin 247 -> 0 bytes fixture/22/1/1.2.3.3 | Bin 129 -> 0 bytes fixture/22/1/1.3.0.0 | Bin 199 -> 0 bytes fixture/22/1/1.3.0.1 | Bin 201 -> 0 bytes fixture/22/1/1.3.0.2 | Bin 200 -> 0 bytes fixture/22/1/1.3.0.3 | 1 - fixture/22/1/1.3.1.0 | Bin 200 -> 0 bytes fixture/22/1/1.3.1.1 | Bin 201 -> 0 bytes fixture/22/1/1.3.1.2 | Bin 200 -> 0 bytes fixture/22/1/1.3.1.3 | 1 - fixture/22/1/1.3.2.0 | Bin 200 -> 0 bytes fixture/22/1/1.3.2.1 | Bin 201 -> 0 bytes fixture/22/1/1.3.2.2 | Bin 199 -> 0 bytes fixture/22/1/1.3.2.3 | 1 - fixture/22/1/1.3.3.0 | Bin 104 -> 0 bytes fixture/22/1/1.3.3.1 | Bin 105 -> 0 bytes fixture/22/1/1.3.3.2 | Bin 103 -> 0 bytes fixture/22/1/1.3.3.3 | Bin 56 -> 0 bytes fixture/22/2/.zarray | 23 ------ fixture/22/2/.zattrs | 1 - fixture/22/2/0.0.0.0 | Bin 594 -> 0 bytes fixture/22/2/0.0.0.1 | Bin 603 -> 0 bytes fixture/22/2/0.0.0.2 | Bin 601 -> 0 bytes fixture/22/2/0.0.0.3 | Bin 282 -> 0 bytes fixture/22/2/0.0.1.0 | Bin 588 -> 0 bytes fixture/22/2/0.0.1.1 | Bin 589 -> 0 bytes fixture/22/2/0.0.1.2 | Bin 601 -> 0 bytes fixture/22/2/0.0.1.3 | Bin 277 -> 0 bytes fixture/22/2/0.0.2.0 | Bin 598 -> 0 bytes fixture/22/2/0.0.2.1 | Bin 596 -> 0 bytes fixture/22/2/0.0.2.2 | Bin 599 -> 0 bytes fixture/22/2/0.0.2.3 | Bin 281 -> 0 bytes fixture/22/2/0.0.3.0 | Bin 303 -> 0 bytes fixture/22/2/0.0.3.1 | Bin 308 -> 0 bytes fixture/22/2/0.0.3.2 | Bin 318 -> 0 bytes fixture/22/2/0.0.3.3 | Bin 139 -> 0 bytes fixture/22/2/0.1.0.0 | Bin 588 -> 0 bytes fixture/22/2/0.1.0.1 | Bin 582 -> 0 bytes fixture/22/2/0.1.0.2 | Bin 592 -> 0 bytes fixture/22/2/0.1.0.3 | Bin 267 -> 0 bytes fixture/22/2/0.1.1.0 | Bin 582 -> 0 bytes fixture/22/2/0.1.1.1 | Bin 576 -> 0 bytes fixture/22/2/0.1.1.2 | Bin 588 -> 0 bytes fixture/22/2/0.1.1.3 | Bin 268 -> 0 bytes fixture/22/2/0.1.2.0 | Bin 584 -> 0 bytes fixture/22/2/0.1.2.1 | Bin 586 -> 0 bytes fixture/22/2/0.1.2.2 | Bin 584 -> 0 bytes fixture/22/2/0.1.2.3 | Bin 260 -> 0 bytes fixture/22/2/0.1.3.0 | Bin 314 -> 0 bytes fixture/22/2/0.1.3.1 | Bin 317 -> 0 bytes fixture/22/2/0.1.3.2 | Bin 315 -> 0 bytes fixture/22/2/0.1.3.3 | Bin 138 -> 0 bytes fixture/22/2/0.2.0.0 | Bin 583 -> 0 bytes fixture/22/2/0.2.0.1 | Bin 577 -> 0 bytes fixture/22/2/0.2.0.2 | Bin 583 -> 0 bytes fixture/22/2/0.2.0.3 | Bin 268 -> 0 bytes fixture/22/2/0.2.1.0 | Bin 588 -> 0 bytes fixture/22/2/0.2.1.1 | Bin 576 -> 0 bytes fixture/22/2/0.2.1.2 | Bin 581 -> 0 bytes fixture/22/2/0.2.1.3 | Bin 272 -> 0 bytes fixture/22/2/0.2.2.0 | Bin 590 -> 0 bytes fixture/22/2/0.2.2.1 | Bin 582 -> 0 bytes fixture/22/2/0.2.2.2 | Bin 587 -> 0 bytes fixture/22/2/0.2.2.3 | Bin 265 -> 0 bytes fixture/22/2/0.2.3.0 | Bin 308 -> 0 bytes fixture/22/2/0.2.3.1 | Bin 308 -> 0 bytes fixture/22/2/0.2.3.2 | Bin 318 -> 0 bytes fixture/22/2/0.2.3.3 | Bin 136 -> 0 bytes fixture/22/2/0.3.0.0 | Bin 249 -> 0 bytes fixture/22/2/0.3.0.1 | Bin 259 -> 0 bytes fixture/22/2/0.3.0.2 | Bin 256 -> 0 bytes fixture/22/2/0.3.0.3 | Bin 145 -> 0 bytes fixture/22/2/0.3.1.0 | Bin 262 -> 0 bytes fixture/22/2/0.3.1.1 | Bin 260 -> 0 bytes fixture/22/2/0.3.1.2 | Bin 252 -> 0 bytes fixture/22/2/0.3.1.3 | Bin 136 -> 0 bytes fixture/22/2/0.3.2.0 | Bin 256 -> 0 bytes fixture/22/2/0.3.2.1 | Bin 247 -> 0 bytes fixture/22/2/0.3.2.2 | Bin 258 -> 0 bytes fixture/22/2/0.3.2.3 | Bin 141 -> 0 bytes fixture/22/2/0.3.3.0 | Bin 132 -> 0 bytes fixture/22/2/0.3.3.1 | Bin 128 -> 0 bytes fixture/22/2/0.3.3.2 | Bin 128 -> 0 bytes fixture/22/2/0.3.3.3 | Bin 84 -> 0 bytes fixture/22/2/1.0.0.0 | Bin 603 -> 0 bytes fixture/22/2/1.0.0.1 | Bin 589 -> 0 bytes fixture/22/2/1.0.0.2 | Bin 595 -> 0 bytes fixture/22/2/1.0.0.3 | Bin 273 -> 0 bytes fixture/22/2/1.0.1.0 | Bin 576 -> 0 bytes fixture/22/2/1.0.1.1 | Bin 593 -> 0 bytes fixture/22/2/1.0.1.2 | Bin 601 -> 0 bytes fixture/22/2/1.0.1.3 | Bin 279 -> 0 bytes fixture/22/2/1.0.2.0 | Bin 587 -> 0 bytes fixture/22/2/1.0.2.1 | Bin 590 -> 0 bytes fixture/22/2/1.0.2.2 | Bin 586 -> 0 bytes fixture/22/2/1.0.2.3 | Bin 276 -> 0 bytes fixture/22/2/1.0.3.0 | Bin 323 -> 0 bytes fixture/22/2/1.0.3.1 | Bin 324 -> 0 bytes fixture/22/2/1.0.3.2 | Bin 337 -> 0 bytes fixture/22/2/1.0.3.3 | Bin 138 -> 0 bytes fixture/22/2/1.1.0.0 | Bin 593 -> 0 bytes fixture/22/2/1.1.0.1 | Bin 595 -> 0 bytes fixture/22/2/1.1.0.2 | Bin 590 -> 0 bytes fixture/22/2/1.1.0.3 | Bin 277 -> 0 bytes fixture/22/2/1.1.1.0 | Bin 593 -> 0 bytes fixture/22/2/1.1.1.1 | Bin 595 -> 0 bytes fixture/22/2/1.1.1.2 | Bin 570 -> 0 bytes fixture/22/2/1.1.1.3 | Bin 269 -> 0 bytes fixture/22/2/1.1.2.0 | Bin 579 -> 0 bytes fixture/22/2/1.1.2.1 | Bin 590 -> 0 bytes fixture/22/2/1.1.2.2 | Bin 584 -> 0 bytes fixture/22/2/1.1.2.3 | Bin 276 -> 0 bytes fixture/22/2/1.1.3.0 | Bin 323 -> 0 bytes fixture/22/2/1.1.3.1 | Bin 328 -> 0 bytes fixture/22/2/1.1.3.2 | Bin 331 -> 0 bytes fixture/22/2/1.1.3.3 | Bin 139 -> 0 bytes fixture/22/2/1.2.0.0 | Bin 584 -> 0 bytes fixture/22/2/1.2.0.1 | Bin 590 -> 0 bytes fixture/22/2/1.2.0.2 | Bin 586 -> 0 bytes fixture/22/2/1.2.0.3 | Bin 272 -> 0 bytes fixture/22/2/1.2.1.0 | Bin 590 -> 0 bytes fixture/22/2/1.2.1.1 | Bin 587 -> 0 bytes fixture/22/2/1.2.1.2 | Bin 598 -> 0 bytes fixture/22/2/1.2.1.3 | Bin 276 -> 0 bytes fixture/22/2/1.2.2.0 | Bin 601 -> 0 bytes fixture/22/2/1.2.2.1 | Bin 591 -> 0 bytes fixture/22/2/1.2.2.2 | Bin 587 -> 0 bytes fixture/22/2/1.2.2.3 | Bin 271 -> 0 bytes fixture/22/2/1.2.3.0 | Bin 327 -> 0 bytes fixture/22/2/1.2.3.1 | Bin 324 -> 0 bytes fixture/22/2/1.2.3.2 | Bin 316 -> 0 bytes fixture/22/2/1.2.3.3 | Bin 137 -> 0 bytes fixture/22/2/1.3.0.0 | Bin 261 -> 0 bytes fixture/22/2/1.3.0.1 | Bin 268 -> 0 bytes fixture/22/2/1.3.0.2 | Bin 269 -> 0 bytes fixture/22/2/1.3.0.3 | Bin 142 -> 0 bytes fixture/22/2/1.3.1.0 | Bin 265 -> 0 bytes fixture/22/2/1.3.1.1 | Bin 267 -> 0 bytes fixture/22/2/1.3.1.2 | Bin 261 -> 0 bytes fixture/22/2/1.3.1.3 | Bin 138 -> 0 bytes fixture/22/2/1.3.2.0 | Bin 248 -> 0 bytes fixture/22/2/1.3.2.1 | Bin 252 -> 0 bytes fixture/22/2/1.3.2.2 | Bin 246 -> 0 bytes fixture/22/2/1.3.2.3 | Bin 143 -> 0 bytes fixture/22/2/1.3.3.0 | Bin 138 -> 0 bytes fixture/22/2/1.3.3.1 | Bin 135 -> 0 bytes fixture/22/2/1.3.3.2 | Bin 133 -> 0 bytes fixture/22/2/1.3.3.3 | Bin 86 -> 0 bytes fixture/22/3/.zarray | 25 ------ fixture/22/3/.zattrs | 1 - fixture/22/3/0.0.0.0 | Bin 672 -> 0 bytes fixture/22/3/0.0.0.1 | Bin 673 -> 0 bytes fixture/22/3/0.0.0.2 | Bin 671 -> 0 bytes fixture/22/3/0.0.0.3 | Bin 190 -> 0 bytes fixture/22/3/0.0.1.0 | Bin 669 -> 0 bytes fixture/22/3/0.0.1.1 | Bin 672 -> 0 bytes fixture/22/3/0.0.1.2 | Bin 673 -> 0 bytes fixture/22/3/0.0.1.3 | Bin 191 -> 0 bytes fixture/22/3/0.0.2.0 | Bin 676 -> 0 bytes fixture/22/3/0.0.2.1 | Bin 677 -> 0 bytes fixture/22/3/0.0.2.2 | Bin 674 -> 0 bytes fixture/22/3/0.0.2.3 | Bin 186 -> 0 bytes fixture/22/3/0.0.3.0 | Bin 332 -> 0 bytes fixture/22/3/0.0.3.1 | Bin 332 -> 0 bytes fixture/22/3/0.0.3.2 | Bin 327 -> 0 bytes fixture/22/3/0.0.3.3 | Bin 150 -> 0 bytes fixture/22/3/0.1.0.0 | Bin 694 -> 0 bytes fixture/22/3/0.1.0.1 | Bin 690 -> 0 bytes fixture/22/3/0.1.0.2 | Bin 694 -> 0 bytes fixture/22/3/0.1.0.3 | Bin 177 -> 0 bytes fixture/22/3/0.1.1.0 | Bin 693 -> 0 bytes fixture/22/3/0.1.1.1 | Bin 692 -> 0 bytes fixture/22/3/0.1.1.2 | Bin 689 -> 0 bytes fixture/22/3/0.1.1.3 | Bin 177 -> 0 bytes fixture/22/3/0.1.2.0 | Bin 688 -> 0 bytes fixture/22/3/0.1.2.1 | Bin 691 -> 0 bytes fixture/22/3/0.1.2.2 | Bin 692 -> 0 bytes fixture/22/3/0.1.2.3 | Bin 179 -> 0 bytes fixture/22/3/0.1.3.0 | Bin 339 -> 0 bytes fixture/22/3/0.1.3.1 | Bin 337 -> 0 bytes fixture/22/3/0.1.3.2 | Bin 336 -> 0 bytes fixture/22/3/0.1.3.3 | Bin 149 -> 0 bytes fixture/22/3/0.2.0.0 | Bin 692 -> 0 bytes fixture/22/3/0.2.0.1 | Bin 693 -> 0 bytes fixture/22/3/0.2.0.2 | Bin 689 -> 0 bytes fixture/22/3/0.2.0.3 | Bin 177 -> 0 bytes fixture/22/3/0.2.1.0 | Bin 691 -> 0 bytes fixture/22/3/0.2.1.1 | Bin 692 -> 0 bytes fixture/22/3/0.2.1.2 | Bin 691 -> 0 bytes fixture/22/3/0.2.1.3 | Bin 177 -> 0 bytes fixture/22/3/0.2.2.0 | Bin 691 -> 0 bytes fixture/22/3/0.2.2.1 | Bin 694 -> 0 bytes fixture/22/3/0.2.2.2 | Bin 691 -> 0 bytes fixture/22/3/0.2.2.3 | Bin 179 -> 0 bytes fixture/22/3/0.2.3.0 | Bin 335 -> 0 bytes fixture/22/3/0.2.3.1 | Bin 334 -> 0 bytes fixture/22/3/0.2.3.2 | Bin 337 -> 0 bytes fixture/22/3/0.2.3.3 | Bin 150 -> 0 bytes fixture/22/3/0.3.0.0 | Bin 287 -> 0 bytes fixture/22/3/0.3.0.1 | Bin 286 -> 0 bytes fixture/22/3/0.3.0.2 | Bin 289 -> 0 bytes fixture/22/3/0.3.0.3 | Bin 173 -> 0 bytes fixture/22/3/0.3.1.0 | Bin 287 -> 0 bytes fixture/22/3/0.3.1.1 | Bin 289 -> 0 bytes fixture/22/3/0.3.1.2 | Bin 285 -> 0 bytes fixture/22/3/0.3.1.3 | Bin 171 -> 0 bytes fixture/22/3/0.3.2.0 | Bin 285 -> 0 bytes fixture/22/3/0.3.2.1 | Bin 285 -> 0 bytes fixture/22/3/0.3.2.2 | Bin 289 -> 0 bytes fixture/22/3/0.3.2.3 | Bin 172 -> 0 bytes fixture/22/3/0.3.3.0 | Bin 130 -> 0 bytes fixture/22/3/0.3.3.1 | Bin 130 -> 0 bytes fixture/22/3/0.3.3.2 | Bin 132 -> 0 bytes fixture/22/3/0.3.3.3 | Bin 77 -> 0 bytes fixture/22/3/1.0.0.0 | Bin 695 -> 0 bytes fixture/22/3/1.0.0.1 | Bin 694 -> 0 bytes fixture/22/3/1.0.0.2 | Bin 690 -> 0 bytes fixture/22/3/1.0.0.3 | Bin 177 -> 0 bytes fixture/22/3/1.0.1.0 | Bin 690 -> 0 bytes fixture/22/3/1.0.1.1 | Bin 692 -> 0 bytes fixture/22/3/1.0.1.2 | Bin 692 -> 0 bytes fixture/22/3/1.0.1.3 | Bin 177 -> 0 bytes fixture/22/3/1.0.2.0 | Bin 693 -> 0 bytes fixture/22/3/1.0.2.1 | Bin 693 -> 0 bytes fixture/22/3/1.0.2.2 | Bin 692 -> 0 bytes fixture/22/3/1.0.2.3 | Bin 179 -> 0 bytes fixture/22/3/1.0.3.0 | Bin 336 -> 0 bytes fixture/22/3/1.0.3.1 | Bin 336 -> 0 bytes fixture/22/3/1.0.3.2 | Bin 337 -> 0 bytes fixture/22/3/1.0.3.3 | Bin 149 -> 0 bytes fixture/22/3/1.1.0.0 | Bin 690 -> 0 bytes fixture/22/3/1.1.0.1 | Bin 691 -> 0 bytes fixture/22/3/1.1.0.2 | Bin 689 -> 0 bytes fixture/22/3/1.1.0.3 | Bin 179 -> 0 bytes fixture/22/3/1.1.1.0 | Bin 691 -> 0 bytes fixture/22/3/1.1.1.1 | Bin 696 -> 0 bytes fixture/22/3/1.1.1.2 | Bin 689 -> 0 bytes fixture/22/3/1.1.1.3 | Bin 177 -> 0 bytes fixture/22/3/1.1.2.0 | Bin 690 -> 0 bytes fixture/22/3/1.1.2.1 | Bin 690 -> 0 bytes fixture/22/3/1.1.2.2 | Bin 691 -> 0 bytes fixture/22/3/1.1.2.3 | Bin 177 -> 0 bytes fixture/22/3/1.1.3.0 | Bin 336 -> 0 bytes fixture/22/3/1.1.3.1 | Bin 338 -> 0 bytes fixture/22/3/1.1.3.2 | Bin 335 -> 0 bytes fixture/22/3/1.1.3.3 | Bin 150 -> 0 bytes fixture/22/3/1.2.0.0 | Bin 691 -> 0 bytes fixture/22/3/1.2.0.1 | Bin 692 -> 0 bytes fixture/22/3/1.2.0.2 | Bin 692 -> 0 bytes fixture/22/3/1.2.0.3 | Bin 179 -> 0 bytes fixture/22/3/1.2.1.0 | Bin 693 -> 0 bytes fixture/22/3/1.2.1.1 | Bin 692 -> 0 bytes fixture/22/3/1.2.1.2 | Bin 692 -> 0 bytes fixture/22/3/1.2.1.3 | Bin 179 -> 0 bytes fixture/22/3/1.2.2.0 | Bin 692 -> 0 bytes fixture/22/3/1.2.2.1 | Bin 691 -> 0 bytes fixture/22/3/1.2.2.2 | Bin 692 -> 0 bytes fixture/22/3/1.2.2.3 | Bin 177 -> 0 bytes fixture/22/3/1.2.3.0 | Bin 335 -> 0 bytes fixture/22/3/1.2.3.1 | Bin 334 -> 0 bytes fixture/22/3/1.2.3.2 | Bin 335 -> 0 bytes fixture/22/3/1.2.3.3 | Bin 150 -> 0 bytes fixture/22/3/1.3.0.0 | Bin 288 -> 0 bytes fixture/22/3/1.3.0.1 | Bin 290 -> 0 bytes fixture/22/3/1.3.0.2 | Bin 287 -> 0 bytes fixture/22/3/1.3.0.3 | Bin 172 -> 0 bytes fixture/22/3/1.3.1.0 | Bin 290 -> 0 bytes fixture/22/3/1.3.1.1 | Bin 288 -> 0 bytes fixture/22/3/1.3.1.2 | Bin 290 -> 0 bytes fixture/22/3/1.3.1.3 | Bin 173 -> 0 bytes fixture/22/3/1.3.2.0 | Bin 289 -> 0 bytes fixture/22/3/1.3.2.1 | Bin 288 -> 0 bytes fixture/22/3/1.3.2.2 | Bin 289 -> 0 bytes fixture/22/3/1.3.2.3 | Bin 173 -> 0 bytes fixture/22/3/1.3.3.0 | Bin 130 -> 0 bytes fixture/22/3/1.3.3.1 | Bin 130 -> 0 bytes fixture/22/3/1.3.3.2 | Bin 131 -> 0 bytes fixture/22/3/1.3.3.3 | Bin 77 -> 0 bytes fixture/22/4/.zarray | 25 ------ fixture/22/4/.zattrs | 1 - fixture/22/4/0.0.0.0 | Bin 358 -> 0 bytes fixture/22/4/0.0.0.1 | Bin 358 -> 0 bytes fixture/22/4/0.0.0.2 | Bin 357 -> 0 bytes fixture/22/4/0.0.0.3 | Bin 259 -> 0 bytes fixture/22/4/0.0.1.0 | Bin 355 -> 0 bytes fixture/22/4/0.0.1.1 | Bin 355 -> 0 bytes fixture/22/4/0.0.1.2 | Bin 358 -> 0 bytes fixture/22/4/0.0.1.3 | Bin 257 -> 0 bytes fixture/22/4/0.0.2.0 | Bin 358 -> 0 bytes fixture/22/4/0.0.2.1 | Bin 359 -> 0 bytes fixture/22/4/0.0.2.2 | Bin 359 -> 0 bytes fixture/22/4/0.0.2.3 | Bin 255 -> 0 bytes fixture/22/4/0.0.3.0 | Bin 304 -> 0 bytes fixture/22/4/0.0.3.1 | Bin 304 -> 0 bytes fixture/22/4/0.0.3.2 | Bin 298 -> 0 bytes fixture/22/4/0.0.3.3 | Bin 208 -> 0 bytes fixture/22/4/0.1.0.0 | Bin 360 -> 0 bytes fixture/22/4/0.1.0.1 | Bin 356 -> 0 bytes fixture/22/4/0.1.0.2 | Bin 355 -> 0 bytes fixture/22/4/0.1.0.3 | Bin 257 -> 0 bytes fixture/22/4/0.1.1.0 | Bin 355 -> 0 bytes fixture/22/4/0.1.1.1 | Bin 355 -> 0 bytes fixture/22/4/0.1.1.2 | Bin 355 -> 0 bytes fixture/22/4/0.1.1.3 | Bin 259 -> 0 bytes fixture/22/4/0.1.2.0 | Bin 355 -> 0 bytes fixture/22/4/0.1.2.1 | Bin 357 -> 0 bytes fixture/22/4/0.1.2.2 | Bin 359 -> 0 bytes fixture/22/4/0.1.2.3 | Bin 259 -> 0 bytes fixture/22/4/0.1.3.0 | Bin 307 -> 0 bytes fixture/22/4/0.1.3.1 | Bin 307 -> 0 bytes fixture/22/4/0.1.3.2 | Bin 307 -> 0 bytes fixture/22/4/0.1.3.3 | Bin 208 -> 0 bytes fixture/22/4/0.2.0.0 | Bin 359 -> 0 bytes fixture/22/4/0.2.0.1 | Bin 359 -> 0 bytes fixture/22/4/0.2.0.2 | Bin 357 -> 0 bytes fixture/22/4/0.2.0.3 | Bin 257 -> 0 bytes fixture/22/4/0.2.1.0 | Bin 357 -> 0 bytes fixture/22/4/0.2.1.1 | Bin 355 -> 0 bytes fixture/22/4/0.2.1.2 | Bin 355 -> 0 bytes fixture/22/4/0.2.1.3 | Bin 260 -> 0 bytes fixture/22/4/0.2.2.0 | Bin 355 -> 0 bytes fixture/22/4/0.2.2.1 | Bin 355 -> 0 bytes fixture/22/4/0.2.2.2 | Bin 356 -> 0 bytes fixture/22/4/0.2.2.3 | Bin 263 -> 0 bytes fixture/22/4/0.2.3.0 | Bin 309 -> 0 bytes fixture/22/4/0.2.3.1 | Bin 307 -> 0 bytes fixture/22/4/0.2.3.2 | Bin 308 -> 0 bytes fixture/22/4/0.2.3.3 | Bin 214 -> 0 bytes fixture/22/4/0.3.0.0 | Bin 203 -> 0 bytes fixture/22/4/0.3.0.1 | Bin 202 -> 0 bytes fixture/22/4/0.3.0.2 | Bin 202 -> 0 bytes fixture/22/4/0.3.0.3 | Bin 172 -> 0 bytes fixture/22/4/0.3.1.0 | Bin 202 -> 0 bytes fixture/22/4/0.3.1.1 | Bin 202 -> 0 bytes fixture/22/4/0.3.1.2 | Bin 198 -> 0 bytes fixture/22/4/0.3.1.3 | Bin 170 -> 0 bytes fixture/22/4/0.3.2.0 | Bin 198 -> 0 bytes fixture/22/4/0.3.2.1 | Bin 201 -> 0 bytes fixture/22/4/0.3.2.2 | Bin 202 -> 0 bytes fixture/22/4/0.3.2.3 | Bin 172 -> 0 bytes fixture/22/4/0.3.3.0 | Bin 137 -> 0 bytes fixture/22/4/0.3.3.1 | Bin 153 -> 0 bytes fixture/22/4/0.3.3.2 | Bin 137 -> 0 bytes fixture/22/4/0.3.3.3 | Bin 114 -> 0 bytes fixture/22/4/1.0.0.0 | Bin 358 -> 0 bytes fixture/22/4/1.0.0.1 | Bin 358 -> 0 bytes fixture/22/4/1.0.0.2 | Bin 357 -> 0 bytes fixture/22/4/1.0.0.3 | Bin 257 -> 0 bytes fixture/22/4/1.0.1.0 | Bin 357 -> 0 bytes fixture/22/4/1.0.1.1 | Bin 355 -> 0 bytes fixture/22/4/1.0.1.2 | Bin 355 -> 0 bytes fixture/22/4/1.0.1.3 | Bin 258 -> 0 bytes fixture/22/4/1.0.2.0 | Bin 355 -> 0 bytes fixture/22/4/1.0.2.1 | Bin 355 -> 0 bytes fixture/22/4/1.0.2.2 | Bin 356 -> 0 bytes fixture/22/4/1.0.2.3 | Bin 260 -> 0 bytes fixture/22/4/1.0.3.0 | Bin 307 -> 0 bytes fixture/22/4/1.0.3.1 | Bin 307 -> 0 bytes fixture/22/4/1.0.3.2 | Bin 307 -> 0 bytes fixture/22/4/1.0.3.3 | Bin 209 -> 0 bytes fixture/22/4/1.1.0.0 | Bin 358 -> 0 bytes fixture/22/4/1.1.0.1 | Bin 359 -> 0 bytes fixture/22/4/1.1.0.2 | Bin 359 -> 0 bytes fixture/22/4/1.1.0.3 | Bin 259 -> 0 bytes fixture/22/4/1.1.1.0 | Bin 359 -> 0 bytes fixture/22/4/1.1.1.1 | Bin 358 -> 0 bytes fixture/22/4/1.1.1.2 | Bin 355 -> 0 bytes fixture/22/4/1.1.1.3 | Bin 259 -> 0 bytes fixture/22/4/1.1.2.0 | Bin 355 -> 0 bytes fixture/22/4/1.1.2.1 | Bin 355 -> 0 bytes fixture/22/4/1.1.2.2 | Bin 355 -> 0 bytes fixture/22/4/1.1.2.3 | Bin 259 -> 0 bytes fixture/22/4/1.1.3.0 | Bin 304 -> 0 bytes fixture/22/4/1.1.3.1 | Bin 307 -> 0 bytes fixture/22/4/1.1.3.2 | Bin 307 -> 0 bytes fixture/22/4/1.1.3.3 | Bin 214 -> 0 bytes fixture/22/4/1.2.0.0 | Bin 355 -> 0 bytes fixture/22/4/1.2.0.1 | Bin 357 -> 0 bytes fixture/22/4/1.2.0.2 | Bin 359 -> 0 bytes fixture/22/4/1.2.0.3 | Bin 259 -> 0 bytes fixture/22/4/1.2.1.0 | Bin 359 -> 0 bytes fixture/22/4/1.2.1.1 | Bin 359 -> 0 bytes fixture/22/4/1.2.1.2 | Bin 360 -> 0 bytes fixture/22/4/1.2.1.3 | Bin 260 -> 0 bytes fixture/22/4/1.2.2.0 | Bin 360 -> 0 bytes fixture/22/4/1.2.2.1 | Bin 356 -> 0 bytes fixture/22/4/1.2.2.2 | Bin 355 -> 0 bytes fixture/22/4/1.2.2.3 | Bin 261 -> 0 bytes fixture/22/4/1.2.3.0 | Bin 303 -> 0 bytes fixture/22/4/1.2.3.1 | Bin 303 -> 0 bytes fixture/22/4/1.2.3.2 | Bin 307 -> 0 bytes fixture/22/4/1.2.3.3 | Bin 214 -> 0 bytes fixture/22/4/1.3.0.0 | Bin 202 -> 0 bytes fixture/22/4/1.3.0.1 | Bin 202 -> 0 bytes fixture/22/4/1.3.0.2 | Bin 202 -> 0 bytes fixture/22/4/1.3.0.3 | Bin 173 -> 0 bytes fixture/22/4/1.3.1.0 | Bin 202 -> 0 bytes fixture/22/4/1.3.1.1 | Bin 203 -> 0 bytes fixture/22/4/1.3.1.2 | Bin 202 -> 0 bytes fixture/22/4/1.3.1.3 | Bin 172 -> 0 bytes fixture/22/4/1.3.2.0 | Bin 202 -> 0 bytes fixture/22/4/1.3.2.1 | Bin 202 -> 0 bytes fixture/22/4/1.3.2.2 | Bin 201 -> 0 bytes fixture/22/4/1.3.2.3 | Bin 170 -> 0 bytes fixture/22/4/1.3.3.0 | Bin 137 -> 0 bytes fixture/22/4/1.3.3.1 | Bin 137 -> 0 bytes fixture/22/4/1.3.3.2 | Bin 155 -> 0 bytes fixture/22/4/1.3.3.3 | Bin 97 -> 0 bytes fixture/22/5/.zarray | 25 ------ fixture/22/5/.zattrs | 1 - fixture/22/5/0.0.0.0 | Bin 672 -> 0 bytes fixture/22/5/0.0.0.1 | Bin 673 -> 0 bytes fixture/22/5/0.0.0.2 | Bin 671 -> 0 bytes fixture/22/5/0.0.0.3 | Bin 190 -> 0 bytes fixture/22/5/0.0.1.0 | Bin 669 -> 0 bytes fixture/22/5/0.0.1.1 | Bin 672 -> 0 bytes fixture/22/5/0.0.1.2 | Bin 673 -> 0 bytes fixture/22/5/0.0.1.3 | Bin 191 -> 0 bytes fixture/22/5/0.0.2.0 | Bin 676 -> 0 bytes fixture/22/5/0.0.2.1 | Bin 677 -> 0 bytes fixture/22/5/0.0.2.2 | Bin 674 -> 0 bytes fixture/22/5/0.0.2.3 | Bin 186 -> 0 bytes fixture/22/5/0.0.3.0 | Bin 332 -> 0 bytes fixture/22/5/0.0.3.1 | Bin 332 -> 0 bytes fixture/22/5/0.0.3.2 | Bin 327 -> 0 bytes fixture/22/5/0.0.3.3 | Bin 150 -> 0 bytes fixture/22/5/0.1.0.0 | Bin 694 -> 0 bytes fixture/22/5/0.1.0.1 | Bin 690 -> 0 bytes fixture/22/5/0.1.0.2 | Bin 694 -> 0 bytes fixture/22/5/0.1.0.3 | Bin 177 -> 0 bytes fixture/22/5/0.1.1.0 | Bin 693 -> 0 bytes fixture/22/5/0.1.1.1 | Bin 692 -> 0 bytes fixture/22/5/0.1.1.2 | Bin 689 -> 0 bytes fixture/22/5/0.1.1.3 | Bin 177 -> 0 bytes fixture/22/5/0.1.2.0 | Bin 688 -> 0 bytes fixture/22/5/0.1.2.1 | Bin 691 -> 0 bytes fixture/22/5/0.1.2.2 | Bin 692 -> 0 bytes fixture/22/5/0.1.2.3 | Bin 179 -> 0 bytes fixture/22/5/0.1.3.0 | Bin 339 -> 0 bytes fixture/22/5/0.1.3.1 | Bin 337 -> 0 bytes fixture/22/5/0.1.3.2 | Bin 336 -> 0 bytes fixture/22/5/0.1.3.3 | Bin 149 -> 0 bytes fixture/22/5/0.2.0.0 | Bin 692 -> 0 bytes fixture/22/5/0.2.0.1 | Bin 693 -> 0 bytes fixture/22/5/0.2.0.2 | Bin 689 -> 0 bytes fixture/22/5/0.2.0.3 | Bin 177 -> 0 bytes fixture/22/5/0.2.1.0 | Bin 691 -> 0 bytes fixture/22/5/0.2.1.1 | Bin 692 -> 0 bytes fixture/22/5/0.2.1.2 | Bin 691 -> 0 bytes fixture/22/5/0.2.1.3 | Bin 177 -> 0 bytes fixture/22/5/0.2.2.0 | Bin 691 -> 0 bytes fixture/22/5/0.2.2.1 | Bin 694 -> 0 bytes fixture/22/5/0.2.2.2 | Bin 691 -> 0 bytes fixture/22/5/0.2.2.3 | Bin 179 -> 0 bytes fixture/22/5/0.2.3.0 | Bin 335 -> 0 bytes fixture/22/5/0.2.3.1 | Bin 334 -> 0 bytes fixture/22/5/0.2.3.2 | Bin 337 -> 0 bytes fixture/22/5/0.2.3.3 | Bin 150 -> 0 bytes fixture/22/5/0.3.0.0 | Bin 287 -> 0 bytes fixture/22/5/0.3.0.1 | Bin 286 -> 0 bytes fixture/22/5/0.3.0.2 | Bin 289 -> 0 bytes fixture/22/5/0.3.0.3 | Bin 173 -> 0 bytes fixture/22/5/0.3.1.0 | Bin 287 -> 0 bytes fixture/22/5/0.3.1.1 | Bin 289 -> 0 bytes fixture/22/5/0.3.1.2 | Bin 285 -> 0 bytes fixture/22/5/0.3.1.3 | Bin 171 -> 0 bytes fixture/22/5/0.3.2.0 | Bin 285 -> 0 bytes fixture/22/5/0.3.2.1 | Bin 285 -> 0 bytes fixture/22/5/0.3.2.2 | Bin 289 -> 0 bytes fixture/22/5/0.3.2.3 | Bin 172 -> 0 bytes fixture/22/5/0.3.3.0 | Bin 130 -> 0 bytes fixture/22/5/0.3.3.1 | Bin 130 -> 0 bytes fixture/22/5/0.3.3.2 | Bin 132 -> 0 bytes fixture/22/5/0.3.3.3 | Bin 77 -> 0 bytes fixture/22/5/1.0.0.0 | Bin 695 -> 0 bytes fixture/22/5/1.0.0.1 | Bin 694 -> 0 bytes fixture/22/5/1.0.0.2 | Bin 690 -> 0 bytes fixture/22/5/1.0.0.3 | Bin 177 -> 0 bytes fixture/22/5/1.0.1.0 | Bin 690 -> 0 bytes fixture/22/5/1.0.1.1 | Bin 692 -> 0 bytes fixture/22/5/1.0.1.2 | Bin 692 -> 0 bytes fixture/22/5/1.0.1.3 | Bin 177 -> 0 bytes fixture/22/5/1.0.2.0 | Bin 693 -> 0 bytes fixture/22/5/1.0.2.1 | Bin 693 -> 0 bytes fixture/22/5/1.0.2.2 | Bin 692 -> 0 bytes fixture/22/5/1.0.2.3 | Bin 179 -> 0 bytes fixture/22/5/1.0.3.0 | Bin 336 -> 0 bytes fixture/22/5/1.0.3.1 | Bin 336 -> 0 bytes fixture/22/5/1.0.3.2 | Bin 337 -> 0 bytes fixture/22/5/1.0.3.3 | Bin 149 -> 0 bytes fixture/22/5/1.1.0.0 | Bin 690 -> 0 bytes fixture/22/5/1.1.0.1 | Bin 691 -> 0 bytes fixture/22/5/1.1.0.2 | Bin 689 -> 0 bytes fixture/22/5/1.1.0.3 | Bin 179 -> 0 bytes fixture/22/5/1.1.1.0 | Bin 691 -> 0 bytes fixture/22/5/1.1.1.1 | Bin 696 -> 0 bytes fixture/22/5/1.1.1.2 | Bin 689 -> 0 bytes fixture/22/5/1.1.1.3 | Bin 177 -> 0 bytes fixture/22/5/1.1.2.0 | Bin 690 -> 0 bytes fixture/22/5/1.1.2.1 | Bin 690 -> 0 bytes fixture/22/5/1.1.2.2 | Bin 691 -> 0 bytes fixture/22/5/1.1.2.3 | Bin 177 -> 0 bytes fixture/22/5/1.1.3.0 | Bin 336 -> 0 bytes fixture/22/5/1.1.3.1 | Bin 338 -> 0 bytes fixture/22/5/1.1.3.2 | Bin 335 -> 0 bytes fixture/22/5/1.1.3.3 | Bin 150 -> 0 bytes fixture/22/5/1.2.0.0 | Bin 691 -> 0 bytes fixture/22/5/1.2.0.1 | Bin 692 -> 0 bytes fixture/22/5/1.2.0.2 | Bin 692 -> 0 bytes fixture/22/5/1.2.0.3 | Bin 179 -> 0 bytes fixture/22/5/1.2.1.0 | Bin 693 -> 0 bytes fixture/22/5/1.2.1.1 | Bin 692 -> 0 bytes fixture/22/5/1.2.1.2 | Bin 692 -> 0 bytes fixture/22/5/1.2.1.3 | Bin 179 -> 0 bytes fixture/22/5/1.2.2.0 | Bin 692 -> 0 bytes fixture/22/5/1.2.2.1 | Bin 691 -> 0 bytes fixture/22/5/1.2.2.2 | Bin 692 -> 0 bytes fixture/22/5/1.2.2.3 | Bin 177 -> 0 bytes fixture/22/5/1.2.3.0 | Bin 335 -> 0 bytes fixture/22/5/1.2.3.1 | Bin 334 -> 0 bytes fixture/22/5/1.2.3.2 | Bin 335 -> 0 bytes fixture/22/5/1.2.3.3 | Bin 150 -> 0 bytes fixture/22/5/1.3.0.0 | Bin 288 -> 0 bytes fixture/22/5/1.3.0.1 | Bin 290 -> 0 bytes fixture/22/5/1.3.0.2 | Bin 287 -> 0 bytes fixture/22/5/1.3.0.3 | Bin 172 -> 0 bytes fixture/22/5/1.3.1.0 | Bin 290 -> 0 bytes fixture/22/5/1.3.1.1 | Bin 288 -> 0 bytes fixture/22/5/1.3.1.2 | Bin 290 -> 0 bytes fixture/22/5/1.3.1.3 | Bin 173 -> 0 bytes fixture/22/5/1.3.2.0 | Bin 289 -> 0 bytes fixture/22/5/1.3.2.1 | Bin 288 -> 0 bytes fixture/22/5/1.3.2.2 | Bin 289 -> 0 bytes fixture/22/5/1.3.2.3 | Bin 173 -> 0 bytes fixture/22/5/1.3.3.0 | Bin 130 -> 0 bytes fixture/22/5/1.3.3.1 | Bin 130 -> 0 bytes fixture/22/5/1.3.3.2 | Bin 131 -> 0 bytes fixture/22/5/1.3.3.3 | Bin 77 -> 0 bytes fixture/22/6/.zarray | 25 ------ fixture/22/6/.zattrs | 1 - fixture/22/6/0.0.0.0 | Bin 1096 -> 0 bytes fixture/22/6/0.0.0.1 | Bin 1096 -> 0 bytes fixture/22/6/0.0.0.2 | Bin 1096 -> 0 bytes fixture/22/6/0.0.0.3 | Bin 412 -> 0 bytes fixture/22/6/0.0.1.0 | Bin 1096 -> 0 bytes fixture/22/6/0.0.1.1 | Bin 1096 -> 0 bytes fixture/22/6/0.0.1.2 | Bin 1096 -> 0 bytes fixture/22/6/0.0.1.3 | Bin 413 -> 0 bytes fixture/22/6/0.0.2.0 | Bin 1096 -> 0 bytes fixture/22/6/0.0.2.1 | Bin 1096 -> 0 bytes fixture/22/6/0.0.2.2 | Bin 1096 -> 0 bytes fixture/22/6/0.0.2.3 | Bin 413 -> 0 bytes fixture/22/6/0.0.3.0 | Bin 516 -> 0 bytes fixture/22/6/0.0.3.1 | Bin 516 -> 0 bytes fixture/22/6/0.0.3.2 | Bin 517 -> 0 bytes fixture/22/6/0.0.3.3 | Bin 251 -> 0 bytes fixture/22/6/0.1.0.0 | Bin 1096 -> 0 bytes fixture/22/6/0.1.0.1 | Bin 1096 -> 0 bytes fixture/22/6/0.1.0.2 | Bin 1096 -> 0 bytes fixture/22/6/0.1.0.3 | Bin 508 -> 0 bytes fixture/22/6/0.1.1.0 | Bin 1096 -> 0 bytes fixture/22/6/0.1.1.1 | Bin 1096 -> 0 bytes fixture/22/6/0.1.1.2 | Bin 1096 -> 0 bytes fixture/22/6/0.1.1.3 | Bin 508 -> 0 bytes fixture/22/6/0.1.2.0 | Bin 1096 -> 0 bytes fixture/22/6/0.1.2.1 | Bin 1096 -> 0 bytes fixture/22/6/0.1.2.2 | Bin 1096 -> 0 bytes fixture/22/6/0.1.2.3 | Bin 512 -> 0 bytes fixture/22/6/0.1.3.0 | Bin 518 -> 0 bytes fixture/22/6/0.1.3.1 | Bin 518 -> 0 bytes fixture/22/6/0.1.3.2 | Bin 518 -> 0 bytes fixture/22/6/0.1.3.3 | Bin 258 -> 0 bytes fixture/22/6/0.2.0.0 | Bin 1096 -> 0 bytes fixture/22/6/0.2.0.1 | Bin 1096 -> 0 bytes fixture/22/6/0.2.0.2 | Bin 1096 -> 0 bytes fixture/22/6/0.2.0.3 | Bin 508 -> 0 bytes fixture/22/6/0.2.1.0 | Bin 1096 -> 0 bytes fixture/22/6/0.2.1.1 | Bin 1096 -> 0 bytes fixture/22/6/0.2.1.2 | Bin 1096 -> 0 bytes fixture/22/6/0.2.1.3 | Bin 508 -> 0 bytes fixture/22/6/0.2.2.0 | Bin 1096 -> 0 bytes fixture/22/6/0.2.2.1 | Bin 1096 -> 0 bytes fixture/22/6/0.2.2.2 | Bin 1096 -> 0 bytes fixture/22/6/0.2.2.3 | Bin 512 -> 0 bytes fixture/22/6/0.2.3.0 | Bin 518 -> 0 bytes fixture/22/6/0.2.3.1 | Bin 518 -> 0 bytes fixture/22/6/0.2.3.2 | Bin 518 -> 0 bytes fixture/22/6/0.2.3.3 | Bin 258 -> 0 bytes fixture/22/6/0.3.0.0 | Bin 457 -> 0 bytes fixture/22/6/0.3.0.1 | Bin 457 -> 0 bytes fixture/22/6/0.3.0.2 | Bin 457 -> 0 bytes fixture/22/6/0.3.0.3 | Bin 221 -> 0 bytes fixture/22/6/0.3.1.0 | Bin 457 -> 0 bytes fixture/22/6/0.3.1.1 | Bin 457 -> 0 bytes fixture/22/6/0.3.1.2 | Bin 454 -> 0 bytes fixture/22/6/0.3.1.3 | Bin 220 -> 0 bytes fixture/22/6/0.3.2.0 | Bin 458 -> 0 bytes fixture/22/6/0.3.2.1 | Bin 457 -> 0 bytes fixture/22/6/0.3.2.2 | Bin 457 -> 0 bytes fixture/22/6/0.3.2.3 | Bin 221 -> 0 bytes fixture/22/6/0.3.3.0 | Bin 208 -> 0 bytes fixture/22/6/0.3.3.1 | Bin 208 -> 0 bytes fixture/22/6/0.3.3.2 | Bin 208 -> 0 bytes fixture/22/6/0.3.3.3 | Bin 127 -> 0 bytes fixture/22/6/1.0.0.0 | Bin 1096 -> 0 bytes fixture/22/6/1.0.0.1 | Bin 1096 -> 0 bytes fixture/22/6/1.0.0.2 | Bin 1096 -> 0 bytes fixture/22/6/1.0.0.3 | Bin 508 -> 0 bytes fixture/22/6/1.0.1.0 | Bin 1096 -> 0 bytes fixture/22/6/1.0.1.1 | Bin 1096 -> 0 bytes fixture/22/6/1.0.1.2 | Bin 1096 -> 0 bytes fixture/22/6/1.0.1.3 | Bin 508 -> 0 bytes fixture/22/6/1.0.2.0 | Bin 1096 -> 0 bytes fixture/22/6/1.0.2.1 | Bin 1096 -> 0 bytes fixture/22/6/1.0.2.2 | Bin 1096 -> 0 bytes fixture/22/6/1.0.2.3 | Bin 512 -> 0 bytes fixture/22/6/1.0.3.0 | Bin 518 -> 0 bytes fixture/22/6/1.0.3.1 | Bin 518 -> 0 bytes fixture/22/6/1.0.3.2 | Bin 518 -> 0 bytes fixture/22/6/1.0.3.3 | Bin 259 -> 0 bytes fixture/22/6/1.1.0.0 | Bin 1096 -> 0 bytes fixture/22/6/1.1.0.1 | Bin 1096 -> 0 bytes fixture/22/6/1.1.0.2 | Bin 1096 -> 0 bytes fixture/22/6/1.1.0.3 | Bin 555 -> 0 bytes fixture/22/6/1.1.1.0 | Bin 1096 -> 0 bytes fixture/22/6/1.1.1.1 | Bin 1096 -> 0 bytes fixture/22/6/1.1.1.2 | Bin 1096 -> 0 bytes fixture/22/6/1.1.1.3 | Bin 508 -> 0 bytes fixture/22/6/1.1.2.0 | Bin 1096 -> 0 bytes fixture/22/6/1.1.2.1 | Bin 1096 -> 0 bytes fixture/22/6/1.1.2.2 | Bin 1096 -> 0 bytes fixture/22/6/1.1.2.3 | Bin 508 -> 0 bytes fixture/22/6/1.1.3.0 | Bin 518 -> 0 bytes fixture/22/6/1.1.3.1 | Bin 518 -> 0 bytes fixture/22/6/1.1.3.2 | Bin 530 -> 0 bytes fixture/22/6/1.1.3.3 | Bin 258 -> 0 bytes fixture/22/6/1.2.0.0 | Bin 1096 -> 0 bytes fixture/22/6/1.2.0.1 | Bin 1096 -> 0 bytes fixture/22/6/1.2.0.2 | Bin 1096 -> 0 bytes fixture/22/6/1.2.0.3 | Bin 512 -> 0 bytes fixture/22/6/1.2.1.0 | Bin 1096 -> 0 bytes fixture/22/6/1.2.1.1 | Bin 1096 -> 0 bytes fixture/22/6/1.2.1.2 | Bin 1096 -> 0 bytes fixture/22/6/1.2.1.3 | Bin 512 -> 0 bytes fixture/22/6/1.2.2.0 | Bin 1096 -> 0 bytes fixture/22/6/1.2.2.1 | Bin 1096 -> 0 bytes fixture/22/6/1.2.2.2 | Bin 1096 -> 0 bytes fixture/22/6/1.2.2.3 | Bin 508 -> 0 bytes fixture/22/6/1.2.3.0 | Bin 518 -> 0 bytes fixture/22/6/1.2.3.1 | Bin 518 -> 0 bytes fixture/22/6/1.2.3.2 | Bin 519 -> 0 bytes fixture/22/6/1.2.3.3 | Bin 258 -> 0 bytes fixture/22/6/1.3.0.0 | Bin 457 -> 0 bytes fixture/22/6/1.3.0.1 | Bin 457 -> 0 bytes fixture/22/6/1.3.0.2 | Bin 457 -> 0 bytes fixture/22/6/1.3.0.3 | Bin 221 -> 0 bytes fixture/22/6/1.3.1.0 | Bin 457 -> 0 bytes fixture/22/6/1.3.1.1 | Bin 457 -> 0 bytes fixture/22/6/1.3.1.2 | Bin 457 -> 0 bytes fixture/22/6/1.3.1.3 | Bin 221 -> 0 bytes fixture/22/6/1.3.2.0 | Bin 457 -> 0 bytes fixture/22/6/1.3.2.1 | Bin 457 -> 0 bytes fixture/22/6/1.3.2.2 | Bin 457 -> 0 bytes fixture/22/6/1.3.2.3 | Bin 220 -> 0 bytes fixture/22/6/1.3.3.0 | Bin 208 -> 0 bytes fixture/22/6/1.3.3.1 | Bin 208 -> 0 bytes fixture/22/6/1.3.3.2 | Bin 208 -> 0 bytes fixture/22/6/1.3.3.3 | Bin 127 -> 0 bytes fixture/23/.zattrs | 1 - fixture/23/0/.zarray | 20 ----- fixture/23/0/.zattrs | 1 - fixture/23/0/0.0.0.0 | Bin 1080 -> 0 bytes fixture/23/0/0.0.0.1 | Bin 1080 -> 0 bytes fixture/23/0/0.0.0.2 | Bin 1080 -> 0 bytes fixture/23/0/0.0.0.3 | Bin 1080 -> 0 bytes fixture/23/0/0.0.1.0 | Bin 1080 -> 0 bytes fixture/23/0/0.0.1.1 | Bin 1080 -> 0 bytes fixture/23/0/0.0.1.2 | Bin 1080 -> 0 bytes fixture/23/0/0.0.1.3 | Bin 1080 -> 0 bytes fixture/23/0/0.0.2.0 | Bin 1080 -> 0 bytes fixture/23/0/0.0.2.1 | Bin 1080 -> 0 bytes fixture/23/0/0.0.2.2 | Bin 1080 -> 0 bytes fixture/23/0/0.0.2.3 | Bin 1080 -> 0 bytes fixture/23/0/0.0.3.0 | Bin 1080 -> 0 bytes fixture/23/0/0.0.3.1 | Bin 1080 -> 0 bytes fixture/23/0/0.0.3.2 | Bin 1080 -> 0 bytes fixture/23/0/0.0.3.3 | Bin 1080 -> 0 bytes fixture/23/0/0.1.0.0 | Bin 1080 -> 0 bytes fixture/23/0/0.1.0.1 | Bin 1080 -> 0 bytes fixture/23/0/0.1.0.2 | Bin 1080 -> 0 bytes fixture/23/0/0.1.0.3 | Bin 1080 -> 0 bytes fixture/23/0/0.1.1.0 | Bin 1080 -> 0 bytes fixture/23/0/0.1.1.1 | Bin 1080 -> 0 bytes fixture/23/0/0.1.1.2 | Bin 1080 -> 0 bytes fixture/23/0/0.1.1.3 | Bin 1080 -> 0 bytes fixture/23/0/0.1.2.0 | Bin 1080 -> 0 bytes fixture/23/0/0.1.2.1 | Bin 1080 -> 0 bytes fixture/23/0/0.1.2.2 | Bin 1080 -> 0 bytes fixture/23/0/0.1.2.3 | Bin 1080 -> 0 bytes fixture/23/0/0.1.3.0 | Bin 1080 -> 0 bytes fixture/23/0/0.1.3.1 | Bin 1080 -> 0 bytes fixture/23/0/0.1.3.2 | Bin 1080 -> 0 bytes fixture/23/0/0.1.3.3 | Bin 1080 -> 0 bytes fixture/23/0/0.2.0.0 | Bin 1080 -> 0 bytes fixture/23/0/0.2.0.1 | Bin 1080 -> 0 bytes fixture/23/0/0.2.0.2 | Bin 1080 -> 0 bytes fixture/23/0/0.2.0.3 | Bin 1080 -> 0 bytes fixture/23/0/0.2.1.0 | Bin 1080 -> 0 bytes fixture/23/0/0.2.1.1 | Bin 1080 -> 0 bytes fixture/23/0/0.2.1.2 | Bin 1080 -> 0 bytes fixture/23/0/0.2.1.3 | Bin 1080 -> 0 bytes fixture/23/0/0.2.2.0 | Bin 1080 -> 0 bytes fixture/23/0/0.2.2.1 | Bin 1080 -> 0 bytes fixture/23/0/0.2.2.2 | Bin 1080 -> 0 bytes fixture/23/0/0.2.2.3 | Bin 1080 -> 0 bytes fixture/23/0/0.2.3.0 | Bin 1080 -> 0 bytes fixture/23/0/0.2.3.1 | Bin 1080 -> 0 bytes fixture/23/0/0.2.3.2 | Bin 1080 -> 0 bytes fixture/23/0/0.2.3.3 | Bin 1080 -> 0 bytes fixture/23/0/0.3.0.0 | Bin 1080 -> 0 bytes fixture/23/0/0.3.0.1 | Bin 1080 -> 0 bytes fixture/23/0/0.3.0.2 | Bin 1080 -> 0 bytes fixture/23/0/0.3.0.3 | Bin 1080 -> 0 bytes fixture/23/0/0.3.1.0 | Bin 1080 -> 0 bytes fixture/23/0/0.3.1.1 | Bin 1080 -> 0 bytes fixture/23/0/0.3.1.2 | Bin 1080 -> 0 bytes fixture/23/0/0.3.1.3 | Bin 1080 -> 0 bytes fixture/23/0/0.3.2.0 | Bin 1080 -> 0 bytes fixture/23/0/0.3.2.1 | Bin 1080 -> 0 bytes fixture/23/0/0.3.2.2 | Bin 1080 -> 0 bytes fixture/23/0/0.3.2.3 | Bin 1080 -> 0 bytes fixture/23/0/0.3.3.0 | Bin 1080 -> 0 bytes fixture/23/0/0.3.3.1 | Bin 1080 -> 0 bytes fixture/23/0/0.3.3.2 | Bin 1080 -> 0 bytes fixture/23/0/0.3.3.3 | Bin 1080 -> 0 bytes fixture/23/0/1.0.0.0 | Bin 1080 -> 0 bytes fixture/23/0/1.0.0.1 | Bin 1080 -> 0 bytes fixture/23/0/1.0.0.2 | Bin 1080 -> 0 bytes fixture/23/0/1.0.0.3 | Bin 1080 -> 0 bytes fixture/23/0/1.0.1.0 | Bin 1080 -> 0 bytes fixture/23/0/1.0.1.1 | Bin 1080 -> 0 bytes fixture/23/0/1.0.1.2 | Bin 1080 -> 0 bytes fixture/23/0/1.0.1.3 | Bin 1080 -> 0 bytes fixture/23/0/1.0.2.0 | Bin 1080 -> 0 bytes fixture/23/0/1.0.2.1 | Bin 1080 -> 0 bytes fixture/23/0/1.0.2.2 | Bin 1080 -> 0 bytes fixture/23/0/1.0.2.3 | Bin 1080 -> 0 bytes fixture/23/0/1.0.3.0 | Bin 1080 -> 0 bytes fixture/23/0/1.0.3.1 | Bin 1080 -> 0 bytes fixture/23/0/1.0.3.2 | Bin 1080 -> 0 bytes fixture/23/0/1.0.3.3 | Bin 1080 -> 0 bytes fixture/23/0/1.1.0.0 | Bin 1080 -> 0 bytes fixture/23/0/1.1.0.1 | Bin 1080 -> 0 bytes fixture/23/0/1.1.0.2 | Bin 1080 -> 0 bytes fixture/23/0/1.1.0.3 | Bin 1080 -> 0 bytes fixture/23/0/1.1.1.0 | Bin 1080 -> 0 bytes fixture/23/0/1.1.1.1 | Bin 1080 -> 0 bytes fixture/23/0/1.1.1.2 | Bin 1080 -> 0 bytes fixture/23/0/1.1.1.3 | Bin 1080 -> 0 bytes fixture/23/0/1.1.2.0 | Bin 1080 -> 0 bytes fixture/23/0/1.1.2.1 | Bin 1080 -> 0 bytes fixture/23/0/1.1.2.2 | Bin 1080 -> 0 bytes fixture/23/0/1.1.2.3 | Bin 1080 -> 0 bytes fixture/23/0/1.1.3.0 | Bin 1080 -> 0 bytes fixture/23/0/1.1.3.1 | Bin 1080 -> 0 bytes fixture/23/0/1.1.3.2 | Bin 1080 -> 0 bytes fixture/23/0/1.1.3.3 | Bin 1080 -> 0 bytes fixture/23/0/1.2.0.0 | Bin 1080 -> 0 bytes fixture/23/0/1.2.0.1 | Bin 1080 -> 0 bytes fixture/23/0/1.2.0.2 | Bin 1080 -> 0 bytes fixture/23/0/1.2.0.3 | Bin 1080 -> 0 bytes fixture/23/0/1.2.1.0 | Bin 1080 -> 0 bytes fixture/23/0/1.2.1.1 | Bin 1080 -> 0 bytes fixture/23/0/1.2.1.2 | Bin 1080 -> 0 bytes fixture/23/0/1.2.1.3 | Bin 1080 -> 0 bytes fixture/23/0/1.2.2.0 | Bin 1080 -> 0 bytes fixture/23/0/1.2.2.1 | Bin 1080 -> 0 bytes fixture/23/0/1.2.2.2 | Bin 1080 -> 0 bytes fixture/23/0/1.2.2.3 | Bin 1080 -> 0 bytes fixture/23/0/1.2.3.0 | Bin 1080 -> 0 bytes fixture/23/0/1.2.3.1 | Bin 1080 -> 0 bytes fixture/23/0/1.2.3.2 | Bin 1080 -> 0 bytes fixture/23/0/1.2.3.3 | Bin 1080 -> 0 bytes fixture/23/0/1.3.0.0 | Bin 1080 -> 0 bytes fixture/23/0/1.3.0.1 | Bin 1080 -> 0 bytes fixture/23/0/1.3.0.2 | Bin 1080 -> 0 bytes fixture/23/0/1.3.0.3 | Bin 1080 -> 0 bytes fixture/23/0/1.3.1.0 | Bin 1080 -> 0 bytes fixture/23/0/1.3.1.1 | Bin 1080 -> 0 bytes fixture/23/0/1.3.1.2 | Bin 1080 -> 0 bytes fixture/23/0/1.3.1.3 | Bin 1080 -> 0 bytes fixture/23/0/1.3.2.0 | Bin 1080 -> 0 bytes fixture/23/0/1.3.2.1 | Bin 1080 -> 0 bytes fixture/23/0/1.3.2.2 | Bin 1080 -> 0 bytes fixture/23/0/1.3.2.3 | Bin 1080 -> 0 bytes fixture/23/0/1.3.3.0 | Bin 1080 -> 0 bytes fixture/23/0/1.3.3.1 | Bin 1080 -> 0 bytes fixture/23/0/1.3.3.2 | Bin 1080 -> 0 bytes fixture/23/0/1.3.3.3 | Bin 1080 -> 0 bytes fixture/23/1/.zarray | 23 ------ fixture/23/1/.zattrs | 1 - fixture/23/1/0.0.0.0 | Bin 432 -> 0 bytes fixture/23/1/0.0.0.1 | Bin 430 -> 0 bytes fixture/23/1/0.0.0.2 | Bin 437 -> 0 bytes fixture/23/1/0.0.0.3 | Bin 172 -> 0 bytes fixture/23/1/0.0.1.0 | Bin 432 -> 0 bytes fixture/23/1/0.0.1.1 | Bin 436 -> 0 bytes fixture/23/1/0.0.1.2 | Bin 431 -> 0 bytes fixture/23/1/0.0.1.3 | Bin 173 -> 0 bytes fixture/23/1/0.0.2.0 | Bin 438 -> 0 bytes fixture/23/1/0.0.2.1 | Bin 435 -> 0 bytes fixture/23/1/0.0.2.2 | Bin 434 -> 0 bytes fixture/23/1/0.0.2.3 | Bin 171 -> 0 bytes fixture/23/1/0.0.3.0 | Bin 166 -> 0 bytes fixture/23/1/0.0.3.1 | Bin 166 -> 0 bytes fixture/23/1/0.0.3.2 | Bin 168 -> 0 bytes fixture/23/1/0.0.3.3 | Bin 76 -> 0 bytes fixture/23/1/0.1.0.0 | Bin 431 -> 0 bytes fixture/23/1/0.1.0.1 | Bin 434 -> 0 bytes fixture/23/1/0.1.0.2 | Bin 435 -> 0 bytes fixture/23/1/0.1.0.3 | Bin 171 -> 0 bytes fixture/23/1/0.1.1.0 | Bin 433 -> 0 bytes fixture/23/1/0.1.1.1 | Bin 437 -> 0 bytes fixture/23/1/0.1.1.2 | Bin 429 -> 0 bytes fixture/23/1/0.1.1.3 | Bin 173 -> 0 bytes fixture/23/1/0.1.2.0 | Bin 437 -> 0 bytes fixture/23/1/0.1.2.1 | Bin 435 -> 0 bytes fixture/23/1/0.1.2.2 | Bin 434 -> 0 bytes fixture/23/1/0.1.2.3 | Bin 171 -> 0 bytes fixture/23/1/0.1.3.0 | Bin 166 -> 0 bytes fixture/23/1/0.1.3.1 | Bin 168 -> 0 bytes fixture/23/1/0.1.3.2 | Bin 169 -> 0 bytes fixture/23/1/0.1.3.3 | Bin 76 -> 0 bytes fixture/23/1/0.2.0.0 | Bin 431 -> 0 bytes fixture/23/1/0.2.0.1 | Bin 437 -> 0 bytes fixture/23/1/0.2.0.2 | Bin 432 -> 0 bytes fixture/23/1/0.2.0.3 | Bin 173 -> 0 bytes fixture/23/1/0.2.1.0 | Bin 437 -> 0 bytes fixture/23/1/0.2.1.1 | Bin 434 -> 0 bytes fixture/23/1/0.2.1.2 | Bin 436 -> 0 bytes fixture/23/1/0.2.1.3 | Bin 169 -> 0 bytes fixture/23/1/0.2.2.0 | Bin 435 -> 0 bytes fixture/23/1/0.2.2.1 | Bin 432 -> 0 bytes fixture/23/1/0.2.2.2 | Bin 434 -> 0 bytes fixture/23/1/0.2.2.3 | Bin 172 -> 0 bytes fixture/23/1/0.2.3.0 | Bin 167 -> 0 bytes fixture/23/1/0.2.3.1 | Bin 169 -> 0 bytes fixture/23/1/0.2.3.2 | Bin 163 -> 0 bytes fixture/23/1/0.2.3.3 | 1 - fixture/23/1/0.3.0.0 | Bin 181 -> 0 bytes fixture/23/1/0.3.0.1 | 1 - fixture/23/1/0.3.0.2 | 1 - fixture/23/1/0.3.0.3 | Bin 86 -> 0 bytes fixture/23/1/0.3.1.0 | 1 - fixture/23/1/0.3.1.1 | 1 - fixture/23/1/0.3.1.2 | Bin 181 -> 0 bytes fixture/23/1/0.3.1.3 | Bin 86 -> 0 bytes fixture/23/1/0.3.2.0 | 3 - fixture/23/1/0.3.2.1 | Bin 181 -> 0 bytes fixture/23/1/0.3.2.2 | Bin 184 -> 0 bytes fixture/23/1/0.3.2.3 | Bin 81 -> 0 bytes fixture/23/1/0.3.3.0 | 2 - fixture/23/1/0.3.3.1 | 1 - fixture/23/1/0.3.3.2 | 2 - fixture/23/1/0.3.3.3 | Bin 46 -> 0 bytes fixture/23/1/1.0.0.0 | Bin 433 -> 0 bytes fixture/23/1/1.0.0.1 | Bin 431 -> 0 bytes fixture/23/1/1.0.0.2 | Bin 437 -> 0 bytes fixture/23/1/1.0.0.3 | Bin 172 -> 0 bytes fixture/23/1/1.0.1.0 | Bin 434 -> 0 bytes fixture/23/1/1.0.1.1 | Bin 437 -> 0 bytes fixture/23/1/1.0.1.2 | Bin 433 -> 0 bytes fixture/23/1/1.0.1.3 | Bin 172 -> 0 bytes fixture/23/1/1.0.2.0 | Bin 437 -> 0 bytes fixture/23/1/1.0.2.1 | Bin 436 -> 0 bytes fixture/23/1/1.0.2.2 | Bin 431 -> 0 bytes fixture/23/1/1.0.2.3 | Bin 170 -> 0 bytes fixture/23/1/1.0.3.0 | Bin 167 -> 0 bytes fixture/23/1/1.0.3.1 | Bin 167 -> 0 bytes fixture/23/1/1.0.3.2 | Bin 170 -> 0 bytes fixture/23/1/1.0.3.3 | Bin 76 -> 0 bytes fixture/23/1/1.1.0.0 | Bin 434 -> 0 bytes fixture/23/1/1.1.0.1 | Bin 434 -> 0 bytes fixture/23/1/1.1.0.2 | Bin 435 -> 0 bytes fixture/23/1/1.1.0.3 | Bin 174 -> 0 bytes fixture/23/1/1.1.1.0 | Bin 431 -> 0 bytes fixture/23/1/1.1.1.1 | Bin 435 -> 0 bytes fixture/23/1/1.1.1.2 | Bin 432 -> 0 bytes fixture/23/1/1.1.1.3 | Bin 173 -> 0 bytes fixture/23/1/1.1.2.0 | Bin 438 -> 0 bytes fixture/23/1/1.1.2.1 | Bin 433 -> 0 bytes fixture/23/1/1.1.2.2 | Bin 435 -> 0 bytes fixture/23/1/1.1.2.3 | Bin 170 -> 0 bytes fixture/23/1/1.1.3.0 | Bin 166 -> 0 bytes fixture/23/1/1.1.3.1 | Bin 168 -> 0 bytes fixture/23/1/1.1.3.2 | Bin 165 -> 0 bytes fixture/23/1/1.1.3.3 | Bin 76 -> 0 bytes fixture/23/1/1.2.0.0 | Bin 432 -> 0 bytes fixture/23/1/1.2.0.1 | Bin 438 -> 0 bytes fixture/23/1/1.2.0.2 | Bin 434 -> 0 bytes fixture/23/1/1.2.0.3 | Bin 171 -> 0 bytes fixture/23/1/1.2.1.0 | Bin 437 -> 0 bytes fixture/23/1/1.2.1.1 | Bin 433 -> 0 bytes fixture/23/1/1.2.1.2 | Bin 434 -> 0 bytes fixture/23/1/1.2.1.3 | Bin 170 -> 0 bytes fixture/23/1/1.2.2.0 | Bin 433 -> 0 bytes fixture/23/1/1.2.2.1 | Bin 433 -> 0 bytes fixture/23/1/1.2.2.2 | Bin 435 -> 0 bytes fixture/23/1/1.2.2.3 | Bin 172 -> 0 bytes fixture/23/1/1.2.3.0 | Bin 168 -> 0 bytes fixture/23/1/1.2.3.1 | Bin 167 -> 0 bytes fixture/23/1/1.2.3.2 | Bin 165 -> 0 bytes fixture/23/1/1.2.3.3 | 1 - fixture/23/1/1.3.0.0 | Bin 181 -> 0 bytes fixture/23/1/1.3.0.1 | Bin 183 -> 0 bytes fixture/23/1/1.3.0.2 | 1 - fixture/23/1/1.3.0.3 | Bin 86 -> 0 bytes fixture/23/1/1.3.1.0 | 1 - fixture/23/1/1.3.1.1 | 1 - fixture/23/1/1.3.1.2 | Bin 180 -> 0 bytes fixture/23/1/1.3.1.3 | Bin 85 -> 0 bytes fixture/23/1/1.3.2.0 | 1 - fixture/23/1/1.3.2.1 | Bin 181 -> 0 bytes fixture/23/1/1.3.2.2 | Bin 186 -> 0 bytes fixture/23/1/1.3.2.3 | Bin 81 -> 0 bytes fixture/23/1/1.3.3.0 | 2 - fixture/23/1/1.3.3.1 | 1 - fixture/23/1/1.3.3.2 | 1 - fixture/23/1/1.3.3.3 | Bin 46 -> 0 bytes fixture/23/2/.zarray | 23 ------ fixture/23/2/.zattrs | 1 - fixture/23/2/0.0.0.0 | Bin 508 -> 0 bytes fixture/23/2/0.0.0.1 | Bin 524 -> 0 bytes fixture/23/2/0.0.0.2 | Bin 516 -> 0 bytes fixture/23/2/0.0.0.3 | Bin 187 -> 0 bytes fixture/23/2/0.0.1.0 | Bin 524 -> 0 bytes fixture/23/2/0.0.1.1 | Bin 518 -> 0 bytes fixture/23/2/0.0.1.2 | Bin 537 -> 0 bytes fixture/23/2/0.0.1.3 | Bin 187 -> 0 bytes fixture/23/2/0.0.2.0 | Bin 523 -> 0 bytes fixture/23/2/0.0.2.1 | Bin 511 -> 0 bytes fixture/23/2/0.0.2.2 | Bin 526 -> 0 bytes fixture/23/2/0.0.2.3 | Bin 181 -> 0 bytes fixture/23/2/0.0.3.0 | Bin 193 -> 0 bytes fixture/23/2/0.0.3.1 | Bin 190 -> 0 bytes fixture/23/2/0.0.3.2 | Bin 192 -> 0 bytes fixture/23/2/0.0.3.3 | Bin 95 -> 0 bytes fixture/23/2/0.1.0.0 | Bin 519 -> 0 bytes fixture/23/2/0.1.0.1 | Bin 524 -> 0 bytes fixture/23/2/0.1.0.2 | Bin 535 -> 0 bytes fixture/23/2/0.1.0.3 | Bin 189 -> 0 bytes fixture/23/2/0.1.1.0 | Bin 526 -> 0 bytes fixture/23/2/0.1.1.1 | Bin 517 -> 0 bytes fixture/23/2/0.1.1.2 | Bin 520 -> 0 bytes fixture/23/2/0.1.1.3 | Bin 188 -> 0 bytes fixture/23/2/0.1.2.0 | Bin 509 -> 0 bytes fixture/23/2/0.1.2.1 | Bin 529 -> 0 bytes fixture/23/2/0.1.2.2 | Bin 535 -> 0 bytes fixture/23/2/0.1.2.3 | Bin 182 -> 0 bytes fixture/23/2/0.1.3.0 | Bin 189 -> 0 bytes fixture/23/2/0.1.3.1 | Bin 190 -> 0 bytes fixture/23/2/0.1.3.2 | Bin 195 -> 0 bytes fixture/23/2/0.1.3.3 | Bin 96 -> 0 bytes fixture/23/2/0.2.0.0 | Bin 529 -> 0 bytes fixture/23/2/0.2.0.1 | Bin 505 -> 0 bytes fixture/23/2/0.2.0.2 | Bin 516 -> 0 bytes fixture/23/2/0.2.0.3 | Bin 186 -> 0 bytes fixture/23/2/0.2.1.0 | Bin 506 -> 0 bytes fixture/23/2/0.2.1.1 | Bin 519 -> 0 bytes fixture/23/2/0.2.1.2 | Bin 513 -> 0 bytes fixture/23/2/0.2.1.3 | Bin 191 -> 0 bytes fixture/23/2/0.2.2.0 | Bin 515 -> 0 bytes fixture/23/2/0.2.2.1 | Bin 523 -> 0 bytes fixture/23/2/0.2.2.2 | Bin 517 -> 0 bytes fixture/23/2/0.2.2.3 | Bin 187 -> 0 bytes fixture/23/2/0.2.3.0 | Bin 190 -> 0 bytes fixture/23/2/0.2.3.1 | Bin 190 -> 0 bytes fixture/23/2/0.2.3.2 | Bin 184 -> 0 bytes fixture/23/2/0.2.3.3 | Bin 97 -> 0 bytes fixture/23/2/0.3.0.0 | Bin 217 -> 0 bytes fixture/23/2/0.3.0.1 | Bin 239 -> 0 bytes fixture/23/2/0.3.0.2 | Bin 237 -> 0 bytes fixture/23/2/0.3.0.3 | Bin 104 -> 0 bytes fixture/23/2/0.3.1.0 | Bin 236 -> 0 bytes fixture/23/2/0.3.1.1 | Bin 241 -> 0 bytes fixture/23/2/0.3.1.2 | Bin 222 -> 0 bytes fixture/23/2/0.3.1.3 | Bin 107 -> 0 bytes fixture/23/2/0.3.2.0 | Bin 244 -> 0 bytes fixture/23/2/0.3.2.1 | Bin 216 -> 0 bytes fixture/23/2/0.3.2.2 | Bin 216 -> 0 bytes fixture/23/2/0.3.2.3 | Bin 106 -> 0 bytes fixture/23/2/0.3.3.0 | Bin 110 -> 0 bytes fixture/23/2/0.3.3.1 | Bin 109 -> 0 bytes fixture/23/2/0.3.3.2 | Bin 107 -> 0 bytes fixture/23/2/0.3.3.3 | Bin 63 -> 0 bytes fixture/23/2/1.0.0.0 | Bin 515 -> 0 bytes fixture/23/2/1.0.0.1 | Bin 525 -> 0 bytes fixture/23/2/1.0.0.2 | Bin 502 -> 0 bytes fixture/23/2/1.0.0.3 | Bin 191 -> 0 bytes fixture/23/2/1.0.1.0 | Bin 522 -> 0 bytes fixture/23/2/1.0.1.1 | Bin 513 -> 0 bytes fixture/23/2/1.0.1.2 | Bin 519 -> 0 bytes fixture/23/2/1.0.1.3 | Bin 187 -> 0 bytes fixture/23/2/1.0.2.0 | Bin 508 -> 0 bytes fixture/23/2/1.0.2.1 | Bin 517 -> 0 bytes fixture/23/2/1.0.2.2 | Bin 529 -> 0 bytes fixture/23/2/1.0.2.3 | Bin 185 -> 0 bytes fixture/23/2/1.0.3.0 | Bin 194 -> 0 bytes fixture/23/2/1.0.3.1 | Bin 191 -> 0 bytes fixture/23/2/1.0.3.2 | Bin 194 -> 0 bytes fixture/23/2/1.0.3.3 | Bin 95 -> 0 bytes fixture/23/2/1.1.0.0 | Bin 528 -> 0 bytes fixture/23/2/1.1.0.1 | Bin 528 -> 0 bytes fixture/23/2/1.1.0.2 | Bin 501 -> 0 bytes fixture/23/2/1.1.0.3 | Bin 189 -> 0 bytes fixture/23/2/1.1.1.0 | Bin 510 -> 0 bytes fixture/23/2/1.1.1.1 | Bin 514 -> 0 bytes fixture/23/2/1.1.1.2 | Bin 521 -> 0 bytes fixture/23/2/1.1.1.3 | Bin 186 -> 0 bytes fixture/23/2/1.1.2.0 | Bin 514 -> 0 bytes fixture/23/2/1.1.2.1 | Bin 510 -> 0 bytes fixture/23/2/1.1.2.2 | Bin 526 -> 0 bytes fixture/23/2/1.1.2.3 | Bin 185 -> 0 bytes fixture/23/2/1.1.3.0 | Bin 188 -> 0 bytes fixture/23/2/1.1.3.1 | Bin 190 -> 0 bytes fixture/23/2/1.1.3.2 | Bin 190 -> 0 bytes fixture/23/2/1.1.3.3 | Bin 96 -> 0 bytes fixture/23/2/1.2.0.0 | Bin 528 -> 0 bytes fixture/23/2/1.2.0.1 | Bin 504 -> 0 bytes fixture/23/2/1.2.0.2 | Bin 518 -> 0 bytes fixture/23/2/1.2.0.3 | Bin 186 -> 0 bytes fixture/23/2/1.2.1.0 | Bin 514 -> 0 bytes fixture/23/2/1.2.1.1 | Bin 530 -> 0 bytes fixture/23/2/1.2.1.2 | Bin 510 -> 0 bytes fixture/23/2/1.2.1.3 | Bin 187 -> 0 bytes fixture/23/2/1.2.2.0 | Bin 511 -> 0 bytes fixture/23/2/1.2.2.1 | Bin 521 -> 0 bytes fixture/23/2/1.2.2.2 | Bin 531 -> 0 bytes fixture/23/2/1.2.2.3 | Bin 188 -> 0 bytes fixture/23/2/1.2.3.0 | Bin 190 -> 0 bytes fixture/23/2/1.2.3.1 | Bin 195 -> 0 bytes fixture/23/2/1.2.3.2 | Bin 187 -> 0 bytes fixture/23/2/1.2.3.3 | Bin 95 -> 0 bytes fixture/23/2/1.3.0.0 | Bin 220 -> 0 bytes fixture/23/2/1.3.0.1 | Bin 222 -> 0 bytes fixture/23/2/1.3.0.2 | Bin 236 -> 0 bytes fixture/23/2/1.3.0.3 | Bin 106 -> 0 bytes fixture/23/2/1.3.1.0 | Bin 211 -> 0 bytes fixture/23/2/1.3.1.1 | Bin 247 -> 0 bytes fixture/23/2/1.3.1.2 | Bin 238 -> 0 bytes fixture/23/2/1.3.1.3 | Bin 104 -> 0 bytes fixture/23/2/1.3.2.0 | Bin 245 -> 0 bytes fixture/23/2/1.3.2.1 | Bin 220 -> 0 bytes fixture/23/2/1.3.2.2 | Bin 228 -> 0 bytes fixture/23/2/1.3.2.3 | Bin 105 -> 0 bytes fixture/23/2/1.3.3.0 | Bin 104 -> 0 bytes fixture/23/2/1.3.3.1 | Bin 102 -> 0 bytes fixture/23/2/1.3.3.2 | Bin 101 -> 0 bytes fixture/23/2/1.3.3.3 | Bin 63 -> 0 bytes fixture/23/3/.zarray | 25 ------ fixture/23/3/.zattrs | 1 - fixture/23/3/0.0.0.0 | Bin 636 -> 0 bytes fixture/23/3/0.0.0.1 | Bin 651 -> 0 bytes fixture/23/3/0.0.0.2 | Bin 673 -> 0 bytes fixture/23/3/0.0.0.3 | Bin 258 -> 0 bytes fixture/23/3/0.0.1.0 | Bin 658 -> 0 bytes fixture/23/3/0.0.1.1 | Bin 673 -> 0 bytes fixture/23/3/0.0.1.2 | Bin 656 -> 0 bytes fixture/23/3/0.0.1.3 | Bin 257 -> 0 bytes fixture/23/3/0.0.2.0 | Bin 670 -> 0 bytes fixture/23/3/0.0.2.1 | Bin 663 -> 0 bytes fixture/23/3/0.0.2.2 | Bin 659 -> 0 bytes fixture/23/3/0.0.2.3 | Bin 253 -> 0 bytes fixture/23/3/0.0.3.0 | Bin 266 -> 0 bytes fixture/23/3/0.0.3.1 | Bin 253 -> 0 bytes fixture/23/3/0.0.3.2 | Bin 271 -> 0 bytes fixture/23/3/0.0.3.3 | Bin 107 -> 0 bytes fixture/23/3/0.1.0.0 | Bin 638 -> 0 bytes fixture/23/3/0.1.0.1 | Bin 669 -> 0 bytes fixture/23/3/0.1.0.2 | Bin 662 -> 0 bytes fixture/23/3/0.1.0.3 | Bin 250 -> 0 bytes fixture/23/3/0.1.1.0 | Bin 661 -> 0 bytes fixture/23/3/0.1.1.1 | Bin 666 -> 0 bytes fixture/23/3/0.1.1.2 | Bin 654 -> 0 bytes fixture/23/3/0.1.1.3 | Bin 261 -> 0 bytes fixture/23/3/0.1.2.0 | Bin 668 -> 0 bytes fixture/23/3/0.1.2.1 | Bin 653 -> 0 bytes fixture/23/3/0.1.2.2 | Bin 670 -> 0 bytes fixture/23/3/0.1.2.3 | Bin 254 -> 0 bytes fixture/23/3/0.1.3.0 | Bin 267 -> 0 bytes fixture/23/3/0.1.3.1 | Bin 262 -> 0 bytes fixture/23/3/0.1.3.2 | Bin 269 -> 0 bytes fixture/23/3/0.1.3.3 | Bin 110 -> 0 bytes fixture/23/3/0.2.0.0 | Bin 635 -> 0 bytes fixture/23/3/0.2.0.1 | Bin 671 -> 0 bytes fixture/23/3/0.2.0.2 | Bin 659 -> 0 bytes fixture/23/3/0.2.0.3 | Bin 260 -> 0 bytes fixture/23/3/0.2.1.0 | Bin 675 -> 0 bytes fixture/23/3/0.2.1.1 | Bin 664 -> 0 bytes fixture/23/3/0.2.1.2 | Bin 665 -> 0 bytes fixture/23/3/0.2.1.3 | Bin 252 -> 0 bytes fixture/23/3/0.2.2.0 | Bin 663 -> 0 bytes fixture/23/3/0.2.2.1 | Bin 660 -> 0 bytes fixture/23/3/0.2.2.2 | Bin 664 -> 0 bytes fixture/23/3/0.2.2.3 | Bin 251 -> 0 bytes fixture/23/3/0.2.3.0 | Bin 257 -> 0 bytes fixture/23/3/0.2.3.1 | Bin 270 -> 0 bytes fixture/23/3/0.2.3.2 | Bin 248 -> 0 bytes fixture/23/3/0.2.3.3 | Bin 112 -> 0 bytes fixture/23/3/0.3.0.0 | Bin 277 -> 0 bytes fixture/23/3/0.3.0.1 | Bin 289 -> 0 bytes fixture/23/3/0.3.0.2 | Bin 288 -> 0 bytes fixture/23/3/0.3.0.3 | Bin 123 -> 0 bytes fixture/23/3/0.3.1.0 | Bin 288 -> 0 bytes fixture/23/3/0.3.1.1 | Bin 285 -> 0 bytes fixture/23/3/0.3.1.2 | Bin 284 -> 0 bytes fixture/23/3/0.3.1.3 | Bin 124 -> 0 bytes fixture/23/3/0.3.2.0 | Bin 286 -> 0 bytes fixture/23/3/0.3.2.1 | Bin 282 -> 0 bytes fixture/23/3/0.3.2.2 | Bin 285 -> 0 bytes fixture/23/3/0.3.2.3 | Bin 119 -> 0 bytes fixture/23/3/0.3.3.0 | Bin 122 -> 0 bytes fixture/23/3/0.3.3.1 | Bin 123 -> 0 bytes fixture/23/3/0.3.3.2 | Bin 120 -> 0 bytes fixture/23/3/0.3.3.3 | Bin 81 -> 0 bytes fixture/23/3/1.0.0.0 | Bin 629 -> 0 bytes fixture/23/3/1.0.0.1 | Bin 663 -> 0 bytes fixture/23/3/1.0.0.2 | Bin 671 -> 0 bytes fixture/23/3/1.0.0.3 | Bin 262 -> 0 bytes fixture/23/3/1.0.1.0 | Bin 653 -> 0 bytes fixture/23/3/1.0.1.1 | Bin 673 -> 0 bytes fixture/23/3/1.0.1.2 | Bin 666 -> 0 bytes fixture/23/3/1.0.1.3 | Bin 254 -> 0 bytes fixture/23/3/1.0.2.0 | Bin 673 -> 0 bytes fixture/23/3/1.0.2.1 | Bin 655 -> 0 bytes fixture/23/3/1.0.2.2 | Bin 662 -> 0 bytes fixture/23/3/1.0.2.3 | Bin 254 -> 0 bytes fixture/23/3/1.0.3.0 | Bin 269 -> 0 bytes fixture/23/3/1.0.3.1 | Bin 256 -> 0 bytes fixture/23/3/1.0.3.2 | Bin 273 -> 0 bytes fixture/23/3/1.0.3.3 | Bin 107 -> 0 bytes fixture/23/3/1.1.0.0 | Bin 639 -> 0 bytes fixture/23/3/1.1.0.1 | Bin 671 -> 0 bytes fixture/23/3/1.1.0.2 | Bin 665 -> 0 bytes fixture/23/3/1.1.0.3 | Bin 255 -> 0 bytes fixture/23/3/1.1.1.0 | Bin 663 -> 0 bytes fixture/23/3/1.1.1.1 | Bin 671 -> 0 bytes fixture/23/3/1.1.1.2 | Bin 655 -> 0 bytes fixture/23/3/1.1.1.3 | Bin 262 -> 0 bytes fixture/23/3/1.1.2.0 | Bin 675 -> 0 bytes fixture/23/3/1.1.2.1 | Bin 654 -> 0 bytes fixture/23/3/1.1.2.2 | Bin 669 -> 0 bytes fixture/23/3/1.1.2.3 | Bin 257 -> 0 bytes fixture/23/3/1.1.3.0 | Bin 268 -> 0 bytes fixture/23/3/1.1.3.1 | Bin 263 -> 0 bytes fixture/23/3/1.1.3.2 | Bin 261 -> 0 bytes fixture/23/3/1.1.3.3 | Bin 110 -> 0 bytes fixture/23/3/1.2.0.0 | Bin 639 -> 0 bytes fixture/23/3/1.2.0.1 | Bin 670 -> 0 bytes fixture/23/3/1.2.0.2 | Bin 653 -> 0 bytes fixture/23/3/1.2.0.3 | Bin 264 -> 0 bytes fixture/23/3/1.2.1.0 | Bin 667 -> 0 bytes fixture/23/3/1.2.1.1 | Bin 658 -> 0 bytes fixture/23/3/1.2.1.2 | Bin 669 -> 0 bytes fixture/23/3/1.2.1.3 | Bin 256 -> 0 bytes fixture/23/3/1.2.2.0 | Bin 659 -> 0 bytes fixture/23/3/1.2.2.1 | Bin 665 -> 0 bytes fixture/23/3/1.2.2.2 | Bin 672 -> 0 bytes fixture/23/3/1.2.2.3 | Bin 248 -> 0 bytes fixture/23/3/1.2.3.0 | Bin 258 -> 0 bytes fixture/23/3/1.2.3.1 | Bin 267 -> 0 bytes fixture/23/3/1.2.3.2 | Bin 250 -> 0 bytes fixture/23/3/1.2.3.3 | Bin 117 -> 0 bytes fixture/23/3/1.3.0.0 | Bin 290 -> 0 bytes fixture/23/3/1.3.0.1 | Bin 285 -> 0 bytes fixture/23/3/1.3.0.2 | Bin 287 -> 0 bytes fixture/23/3/1.3.0.3 | Bin 123 -> 0 bytes fixture/23/3/1.3.1.0 | Bin 291 -> 0 bytes fixture/23/3/1.3.1.1 | Bin 284 -> 0 bytes fixture/23/3/1.3.1.2 | Bin 285 -> 0 bytes fixture/23/3/1.3.1.3 | Bin 124 -> 0 bytes fixture/23/3/1.3.2.0 | Bin 282 -> 0 bytes fixture/23/3/1.3.2.1 | Bin 283 -> 0 bytes fixture/23/3/1.3.2.2 | Bin 288 -> 0 bytes fixture/23/3/1.3.2.3 | Bin 121 -> 0 bytes fixture/23/3/1.3.3.0 | Bin 122 -> 0 bytes fixture/23/3/1.3.3.1 | Bin 123 -> 0 bytes fixture/23/3/1.3.3.2 | Bin 121 -> 0 bytes fixture/23/3/1.3.3.3 | Bin 81 -> 0 bytes fixture/23/4/.zarray | 25 ------ fixture/23/4/.zattrs | 1 - fixture/23/4/0.0.0.0 | Bin 315 -> 0 bytes fixture/23/4/0.0.0.1 | Bin 314 -> 0 bytes fixture/23/4/0.0.0.2 | Bin 317 -> 0 bytes fixture/23/4/0.0.0.3 | Bin 144 -> 0 bytes fixture/23/4/0.0.1.0 | Bin 315 -> 0 bytes fixture/23/4/0.0.1.1 | Bin 317 -> 0 bytes fixture/23/4/0.0.1.2 | Bin 312 -> 0 bytes fixture/23/4/0.0.1.3 | Bin 144 -> 0 bytes fixture/23/4/0.0.2.0 | Bin 317 -> 0 bytes fixture/23/4/0.0.2.1 | Bin 315 -> 0 bytes fixture/23/4/0.0.2.2 | Bin 315 -> 0 bytes fixture/23/4/0.0.2.3 | Bin 142 -> 0 bytes fixture/23/4/0.0.3.0 | Bin 157 -> 0 bytes fixture/23/4/0.0.3.1 | Bin 154 -> 0 bytes fixture/23/4/0.0.3.2 | Bin 157 -> 0 bytes fixture/23/4/0.0.3.3 | Bin 79 -> 0 bytes fixture/23/4/0.1.0.0 | Bin 313 -> 0 bytes fixture/23/4/0.1.0.1 | Bin 317 -> 0 bytes fixture/23/4/0.1.0.2 | Bin 315 -> 0 bytes fixture/23/4/0.1.0.3 | Bin 144 -> 0 bytes fixture/23/4/0.1.1.0 | Bin 315 -> 0 bytes fixture/23/4/0.1.1.1 | Bin 315 -> 0 bytes fixture/23/4/0.1.1.2 | Bin 315 -> 0 bytes fixture/23/4/0.1.1.3 | Bin 144 -> 0 bytes fixture/23/4/0.1.2.0 | Bin 317 -> 0 bytes fixture/23/4/0.1.2.1 | Bin 316 -> 0 bytes fixture/23/4/0.1.2.2 | Bin 317 -> 0 bytes fixture/23/4/0.1.2.3 | Bin 142 -> 0 bytes fixture/23/4/0.1.3.0 | Bin 157 -> 0 bytes fixture/23/4/0.1.3.1 | Bin 154 -> 0 bytes fixture/23/4/0.1.3.2 | Bin 157 -> 0 bytes fixture/23/4/0.1.3.3 | Bin 79 -> 0 bytes fixture/23/4/0.2.0.0 | Bin 315 -> 0 bytes fixture/23/4/0.2.0.1 | Bin 318 -> 0 bytes fixture/23/4/0.2.0.2 | Bin 315 -> 0 bytes fixture/23/4/0.2.0.3 | Bin 144 -> 0 bytes fixture/23/4/0.2.1.0 | Bin 317 -> 0 bytes fixture/23/4/0.2.1.1 | Bin 314 -> 0 bytes fixture/23/4/0.2.1.2 | Bin 317 -> 0 bytes fixture/23/4/0.2.1.3 | Bin 142 -> 0 bytes fixture/23/4/0.2.2.0 | Bin 315 -> 0 bytes fixture/23/4/0.2.2.1 | Bin 315 -> 0 bytes fixture/23/4/0.2.2.2 | Bin 315 -> 0 bytes fixture/23/4/0.2.2.3 | Bin 144 -> 0 bytes fixture/23/4/0.2.3.0 | Bin 154 -> 0 bytes fixture/23/4/0.2.3.1 | Bin 157 -> 0 bytes fixture/23/4/0.2.3.2 | Bin 153 -> 0 bytes fixture/23/4/0.2.3.3 | Bin 81 -> 0 bytes fixture/23/4/0.3.0.0 | Bin 188 -> 0 bytes fixture/23/4/0.3.0.1 | Bin 192 -> 0 bytes fixture/23/4/0.3.0.2 | Bin 196 -> 0 bytes fixture/23/4/0.3.0.3 | Bin 92 -> 0 bytes fixture/23/4/0.3.1.0 | Bin 192 -> 0 bytes fixture/23/4/0.3.1.1 | Bin 192 -> 0 bytes fixture/23/4/0.3.1.2 | Bin 192 -> 0 bytes fixture/23/4/0.3.1.3 | Bin 92 -> 0 bytes fixture/23/4/0.3.2.0 | Bin 196 -> 0 bytes fixture/23/4/0.3.2.1 | Bin 192 -> 0 bytes fixture/23/4/0.3.2.2 | Bin 192 -> 0 bytes fixture/23/4/0.3.2.3 | Bin 92 -> 0 bytes fixture/23/4/0.3.3.0 | Bin 94 -> 0 bytes fixture/23/4/0.3.3.1 | Bin 98 -> 0 bytes fixture/23/4/0.3.3.2 | Bin 94 -> 0 bytes fixture/23/4/0.3.3.3 | Bin 58 -> 0 bytes fixture/23/4/1.0.0.0 | Bin 315 -> 0 bytes fixture/23/4/1.0.0.1 | Bin 315 -> 0 bytes fixture/23/4/1.0.0.2 | Bin 317 -> 0 bytes fixture/23/4/1.0.0.3 | Bin 144 -> 0 bytes fixture/23/4/1.0.1.0 | Bin 313 -> 0 bytes fixture/23/4/1.0.1.1 | Bin 317 -> 0 bytes fixture/23/4/1.0.1.2 | Bin 316 -> 0 bytes fixture/23/4/1.0.1.3 | Bin 144 -> 0 bytes fixture/23/4/1.0.2.0 | Bin 317 -> 0 bytes fixture/23/4/1.0.2.1 | Bin 315 -> 0 bytes fixture/23/4/1.0.2.2 | Bin 315 -> 0 bytes fixture/23/4/1.0.2.3 | Bin 142 -> 0 bytes fixture/23/4/1.0.3.0 | Bin 157 -> 0 bytes fixture/23/4/1.0.3.1 | Bin 154 -> 0 bytes fixture/23/4/1.0.3.2 | Bin 157 -> 0 bytes fixture/23/4/1.0.3.3 | Bin 79 -> 0 bytes fixture/23/4/1.1.0.0 | Bin 315 -> 0 bytes fixture/23/4/1.1.0.1 | Bin 317 -> 0 bytes fixture/23/4/1.1.0.2 | Bin 315 -> 0 bytes fixture/23/4/1.1.0.3 | Bin 144 -> 0 bytes fixture/23/4/1.1.1.0 | Bin 315 -> 0 bytes fixture/23/4/1.1.1.1 | Bin 315 -> 0 bytes fixture/23/4/1.1.1.2 | Bin 315 -> 0 bytes fixture/23/4/1.1.1.3 | Bin 144 -> 0 bytes fixture/23/4/1.1.2.0 | Bin 317 -> 0 bytes fixture/23/4/1.1.2.1 | Bin 312 -> 0 bytes fixture/23/4/1.1.2.2 | Bin 317 -> 0 bytes fixture/23/4/1.1.2.3 | Bin 144 -> 0 bytes fixture/23/4/1.1.3.0 | Bin 157 -> 0 bytes fixture/23/4/1.1.3.1 | Bin 154 -> 0 bytes fixture/23/4/1.1.3.2 | Bin 156 -> 0 bytes fixture/23/4/1.1.3.3 | Bin 79 -> 0 bytes fixture/23/4/1.2.0.0 | Bin 315 -> 0 bytes fixture/23/4/1.2.0.1 | Bin 317 -> 0 bytes fixture/23/4/1.2.0.2 | Bin 313 -> 0 bytes fixture/23/4/1.2.0.3 | Bin 144 -> 0 bytes fixture/23/4/1.2.1.0 | Bin 317 -> 0 bytes fixture/23/4/1.2.1.1 | Bin 315 -> 0 bytes fixture/23/4/1.2.1.2 | Bin 317 -> 0 bytes fixture/23/4/1.2.1.3 | Bin 142 -> 0 bytes fixture/23/4/1.2.2.0 | Bin 312 -> 0 bytes fixture/23/4/1.2.2.1 | Bin 319 -> 0 bytes fixture/23/4/1.2.2.2 | Bin 315 -> 0 bytes fixture/23/4/1.2.2.3 | Bin 144 -> 0 bytes fixture/23/4/1.2.3.0 | Bin 154 -> 0 bytes fixture/23/4/1.2.3.1 | Bin 157 -> 0 bytes fixture/23/4/1.2.3.2 | Bin 154 -> 0 bytes fixture/23/4/1.2.3.3 | Bin 81 -> 0 bytes fixture/23/4/1.3.0.0 | Bin 188 -> 0 bytes fixture/23/4/1.3.0.1 | Bin 194 -> 0 bytes fixture/23/4/1.3.0.2 | Bin 194 -> 0 bytes fixture/23/4/1.3.0.3 | Bin 92 -> 0 bytes fixture/23/4/1.3.1.0 | Bin 192 -> 0 bytes fixture/23/4/1.3.1.1 | Bin 192 -> 0 bytes fixture/23/4/1.3.1.2 | Bin 192 -> 0 bytes fixture/23/4/1.3.1.3 | Bin 92 -> 0 bytes fixture/23/4/1.3.2.0 | Bin 194 -> 0 bytes fixture/23/4/1.3.2.1 | Bin 192 -> 0 bytes fixture/23/4/1.3.2.2 | Bin 192 -> 0 bytes fixture/23/4/1.3.2.3 | Bin 92 -> 0 bytes fixture/23/4/1.3.3.0 | Bin 94 -> 0 bytes fixture/23/4/1.3.3.1 | Bin 94 -> 0 bytes fixture/23/4/1.3.3.2 | Bin 94 -> 0 bytes fixture/23/4/1.3.3.3 | Bin 58 -> 0 bytes fixture/23/5/.zarray | 25 ------ fixture/23/5/.zattrs | 1 - fixture/23/5/0.0.0.0 | Bin 636 -> 0 bytes fixture/23/5/0.0.0.1 | Bin 651 -> 0 bytes fixture/23/5/0.0.0.2 | Bin 673 -> 0 bytes fixture/23/5/0.0.0.3 | Bin 258 -> 0 bytes fixture/23/5/0.0.1.0 | Bin 658 -> 0 bytes fixture/23/5/0.0.1.1 | Bin 673 -> 0 bytes fixture/23/5/0.0.1.2 | Bin 656 -> 0 bytes fixture/23/5/0.0.1.3 | Bin 257 -> 0 bytes fixture/23/5/0.0.2.0 | Bin 670 -> 0 bytes fixture/23/5/0.0.2.1 | Bin 663 -> 0 bytes fixture/23/5/0.0.2.2 | Bin 659 -> 0 bytes fixture/23/5/0.0.2.3 | Bin 253 -> 0 bytes fixture/23/5/0.0.3.0 | Bin 266 -> 0 bytes fixture/23/5/0.0.3.1 | Bin 253 -> 0 bytes fixture/23/5/0.0.3.2 | Bin 271 -> 0 bytes fixture/23/5/0.0.3.3 | Bin 107 -> 0 bytes fixture/23/5/0.1.0.0 | Bin 638 -> 0 bytes fixture/23/5/0.1.0.1 | Bin 669 -> 0 bytes fixture/23/5/0.1.0.2 | Bin 662 -> 0 bytes fixture/23/5/0.1.0.3 | Bin 250 -> 0 bytes fixture/23/5/0.1.1.0 | Bin 661 -> 0 bytes fixture/23/5/0.1.1.1 | Bin 666 -> 0 bytes fixture/23/5/0.1.1.2 | Bin 654 -> 0 bytes fixture/23/5/0.1.1.3 | Bin 261 -> 0 bytes fixture/23/5/0.1.2.0 | Bin 668 -> 0 bytes fixture/23/5/0.1.2.1 | Bin 653 -> 0 bytes fixture/23/5/0.1.2.2 | Bin 670 -> 0 bytes fixture/23/5/0.1.2.3 | Bin 254 -> 0 bytes fixture/23/5/0.1.3.0 | Bin 267 -> 0 bytes fixture/23/5/0.1.3.1 | Bin 262 -> 0 bytes fixture/23/5/0.1.3.2 | Bin 269 -> 0 bytes fixture/23/5/0.1.3.3 | Bin 110 -> 0 bytes fixture/23/5/0.2.0.0 | Bin 635 -> 0 bytes fixture/23/5/0.2.0.1 | Bin 671 -> 0 bytes fixture/23/5/0.2.0.2 | Bin 659 -> 0 bytes fixture/23/5/0.2.0.3 | Bin 260 -> 0 bytes fixture/23/5/0.2.1.0 | Bin 675 -> 0 bytes fixture/23/5/0.2.1.1 | Bin 664 -> 0 bytes fixture/23/5/0.2.1.2 | Bin 665 -> 0 bytes fixture/23/5/0.2.1.3 | Bin 252 -> 0 bytes fixture/23/5/0.2.2.0 | Bin 663 -> 0 bytes fixture/23/5/0.2.2.1 | Bin 660 -> 0 bytes fixture/23/5/0.2.2.2 | Bin 664 -> 0 bytes fixture/23/5/0.2.2.3 | Bin 251 -> 0 bytes fixture/23/5/0.2.3.0 | Bin 257 -> 0 bytes fixture/23/5/0.2.3.1 | Bin 270 -> 0 bytes fixture/23/5/0.2.3.2 | Bin 248 -> 0 bytes fixture/23/5/0.2.3.3 | Bin 112 -> 0 bytes fixture/23/5/0.3.0.0 | Bin 277 -> 0 bytes fixture/23/5/0.3.0.1 | Bin 289 -> 0 bytes fixture/23/5/0.3.0.2 | Bin 288 -> 0 bytes fixture/23/5/0.3.0.3 | Bin 123 -> 0 bytes fixture/23/5/0.3.1.0 | Bin 288 -> 0 bytes fixture/23/5/0.3.1.1 | Bin 285 -> 0 bytes fixture/23/5/0.3.1.2 | Bin 284 -> 0 bytes fixture/23/5/0.3.1.3 | Bin 124 -> 0 bytes fixture/23/5/0.3.2.0 | Bin 286 -> 0 bytes fixture/23/5/0.3.2.1 | Bin 282 -> 0 bytes fixture/23/5/0.3.2.2 | Bin 285 -> 0 bytes fixture/23/5/0.3.2.3 | Bin 119 -> 0 bytes fixture/23/5/0.3.3.0 | Bin 122 -> 0 bytes fixture/23/5/0.3.3.1 | Bin 123 -> 0 bytes fixture/23/5/0.3.3.2 | Bin 120 -> 0 bytes fixture/23/5/0.3.3.3 | Bin 81 -> 0 bytes fixture/23/5/1.0.0.0 | Bin 629 -> 0 bytes fixture/23/5/1.0.0.1 | Bin 663 -> 0 bytes fixture/23/5/1.0.0.2 | Bin 671 -> 0 bytes fixture/23/5/1.0.0.3 | Bin 262 -> 0 bytes fixture/23/5/1.0.1.0 | Bin 653 -> 0 bytes fixture/23/5/1.0.1.1 | Bin 673 -> 0 bytes fixture/23/5/1.0.1.2 | Bin 666 -> 0 bytes fixture/23/5/1.0.1.3 | Bin 254 -> 0 bytes fixture/23/5/1.0.2.0 | Bin 673 -> 0 bytes fixture/23/5/1.0.2.1 | Bin 655 -> 0 bytes fixture/23/5/1.0.2.2 | Bin 662 -> 0 bytes fixture/23/5/1.0.2.3 | Bin 254 -> 0 bytes fixture/23/5/1.0.3.0 | Bin 269 -> 0 bytes fixture/23/5/1.0.3.1 | Bin 256 -> 0 bytes fixture/23/5/1.0.3.2 | Bin 273 -> 0 bytes fixture/23/5/1.0.3.3 | Bin 107 -> 0 bytes fixture/23/5/1.1.0.0 | Bin 639 -> 0 bytes fixture/23/5/1.1.0.1 | Bin 671 -> 0 bytes fixture/23/5/1.1.0.2 | Bin 665 -> 0 bytes fixture/23/5/1.1.0.3 | Bin 255 -> 0 bytes fixture/23/5/1.1.1.0 | Bin 663 -> 0 bytes fixture/23/5/1.1.1.1 | Bin 671 -> 0 bytes fixture/23/5/1.1.1.2 | Bin 655 -> 0 bytes fixture/23/5/1.1.1.3 | Bin 262 -> 0 bytes fixture/23/5/1.1.2.0 | Bin 675 -> 0 bytes fixture/23/5/1.1.2.1 | Bin 654 -> 0 bytes fixture/23/5/1.1.2.2 | Bin 669 -> 0 bytes fixture/23/5/1.1.2.3 | Bin 257 -> 0 bytes fixture/23/5/1.1.3.0 | Bin 268 -> 0 bytes fixture/23/5/1.1.3.1 | Bin 263 -> 0 bytes fixture/23/5/1.1.3.2 | Bin 261 -> 0 bytes fixture/23/5/1.1.3.3 | Bin 110 -> 0 bytes fixture/23/5/1.2.0.0 | Bin 639 -> 0 bytes fixture/23/5/1.2.0.1 | Bin 670 -> 0 bytes fixture/23/5/1.2.0.2 | Bin 653 -> 0 bytes fixture/23/5/1.2.0.3 | Bin 264 -> 0 bytes fixture/23/5/1.2.1.0 | Bin 667 -> 0 bytes fixture/23/5/1.2.1.1 | Bin 658 -> 0 bytes fixture/23/5/1.2.1.2 | Bin 669 -> 0 bytes fixture/23/5/1.2.1.3 | Bin 256 -> 0 bytes fixture/23/5/1.2.2.0 | Bin 659 -> 0 bytes fixture/23/5/1.2.2.1 | Bin 665 -> 0 bytes fixture/23/5/1.2.2.2 | Bin 672 -> 0 bytes fixture/23/5/1.2.2.3 | Bin 248 -> 0 bytes fixture/23/5/1.2.3.0 | Bin 258 -> 0 bytes fixture/23/5/1.2.3.1 | Bin 267 -> 0 bytes fixture/23/5/1.2.3.2 | Bin 250 -> 0 bytes fixture/23/5/1.2.3.3 | Bin 117 -> 0 bytes fixture/23/5/1.3.0.0 | Bin 290 -> 0 bytes fixture/23/5/1.3.0.1 | Bin 285 -> 0 bytes fixture/23/5/1.3.0.2 | Bin 287 -> 0 bytes fixture/23/5/1.3.0.3 | Bin 123 -> 0 bytes fixture/23/5/1.3.1.0 | Bin 291 -> 0 bytes fixture/23/5/1.3.1.1 | Bin 284 -> 0 bytes fixture/23/5/1.3.1.2 | Bin 285 -> 0 bytes fixture/23/5/1.3.1.3 | Bin 124 -> 0 bytes fixture/23/5/1.3.2.0 | Bin 282 -> 0 bytes fixture/23/5/1.3.2.1 | Bin 283 -> 0 bytes fixture/23/5/1.3.2.2 | Bin 288 -> 0 bytes fixture/23/5/1.3.2.3 | Bin 121 -> 0 bytes fixture/23/5/1.3.3.0 | Bin 122 -> 0 bytes fixture/23/5/1.3.3.1 | Bin 123 -> 0 bytes fixture/23/5/1.3.3.2 | Bin 121 -> 0 bytes fixture/23/5/1.3.3.3 | Bin 81 -> 0 bytes fixture/23/6/.zarray | 25 ------ fixture/23/6/.zattrs | 1 - fixture/23/6/0.0.0.0 | Bin 1096 -> 0 bytes fixture/23/6/0.0.0.1 | Bin 1096 -> 0 bytes fixture/23/6/0.0.0.2 | Bin 1096 -> 0 bytes fixture/23/6/0.0.0.3 | Bin 405 -> 0 bytes fixture/23/6/0.0.1.0 | Bin 1096 -> 0 bytes fixture/23/6/0.0.1.1 | Bin 1096 -> 0 bytes fixture/23/6/0.0.1.2 | Bin 1096 -> 0 bytes fixture/23/6/0.0.1.3 | Bin 405 -> 0 bytes fixture/23/6/0.0.2.0 | Bin 1096 -> 0 bytes fixture/23/6/0.0.2.1 | Bin 1096 -> 0 bytes fixture/23/6/0.0.2.2 | Bin 1096 -> 0 bytes fixture/23/6/0.0.2.3 | Bin 405 -> 0 bytes fixture/23/6/0.0.3.0 | Bin 414 -> 0 bytes fixture/23/6/0.0.3.1 | Bin 414 -> 0 bytes fixture/23/6/0.0.3.2 | Bin 414 -> 0 bytes fixture/23/6/0.0.3.3 | Bin 165 -> 0 bytes fixture/23/6/0.1.0.0 | Bin 1096 -> 0 bytes fixture/23/6/0.1.0.1 | Bin 1096 -> 0 bytes fixture/23/6/0.1.0.2 | Bin 1096 -> 0 bytes fixture/23/6/0.1.0.3 | Bin 405 -> 0 bytes fixture/23/6/0.1.1.0 | Bin 1096 -> 0 bytes fixture/23/6/0.1.1.1 | Bin 1096 -> 0 bytes fixture/23/6/0.1.1.2 | Bin 1096 -> 0 bytes fixture/23/6/0.1.1.3 | Bin 405 -> 0 bytes fixture/23/6/0.1.2.0 | Bin 1096 -> 0 bytes fixture/23/6/0.1.2.1 | Bin 1096 -> 0 bytes fixture/23/6/0.1.2.2 | Bin 1096 -> 0 bytes fixture/23/6/0.1.2.3 | Bin 405 -> 0 bytes fixture/23/6/0.1.3.0 | Bin 414 -> 0 bytes fixture/23/6/0.1.3.1 | Bin 414 -> 0 bytes fixture/23/6/0.1.3.2 | Bin 414 -> 0 bytes fixture/23/6/0.1.3.3 | Bin 165 -> 0 bytes fixture/23/6/0.2.0.0 | Bin 1096 -> 0 bytes fixture/23/6/0.2.0.1 | Bin 1096 -> 0 bytes fixture/23/6/0.2.0.2 | Bin 1096 -> 0 bytes fixture/23/6/0.2.0.3 | Bin 405 -> 0 bytes fixture/23/6/0.2.1.0 | Bin 1096 -> 0 bytes fixture/23/6/0.2.1.1 | Bin 1096 -> 0 bytes fixture/23/6/0.2.1.2 | Bin 1096 -> 0 bytes fixture/23/6/0.2.1.3 | Bin 405 -> 0 bytes fixture/23/6/0.2.2.0 | Bin 1096 -> 0 bytes fixture/23/6/0.2.2.1 | Bin 1096 -> 0 bytes fixture/23/6/0.2.2.2 | Bin 1096 -> 0 bytes fixture/23/6/0.2.2.3 | Bin 405 -> 0 bytes fixture/23/6/0.2.3.0 | Bin 414 -> 0 bytes fixture/23/6/0.2.3.1 | Bin 414 -> 0 bytes fixture/23/6/0.2.3.2 | Bin 413 -> 0 bytes fixture/23/6/0.2.3.3 | Bin 165 -> 0 bytes fixture/23/6/0.3.0.0 | Bin 449 -> 0 bytes fixture/23/6/0.3.0.1 | Bin 450 -> 0 bytes fixture/23/6/0.3.0.2 | Bin 450 -> 0 bytes fixture/23/6/0.3.0.3 | Bin 177 -> 0 bytes fixture/23/6/0.3.1.0 | Bin 450 -> 0 bytes fixture/23/6/0.3.1.1 | Bin 450 -> 0 bytes fixture/23/6/0.3.1.2 | Bin 450 -> 0 bytes fixture/23/6/0.3.1.3 | Bin 177 -> 0 bytes fixture/23/6/0.3.2.0 | Bin 450 -> 0 bytes fixture/23/6/0.3.2.1 | Bin 450 -> 0 bytes fixture/23/6/0.3.2.2 | Bin 450 -> 0 bytes fixture/23/6/0.3.2.3 | Bin 177 -> 0 bytes fixture/23/6/0.3.3.0 | Bin 177 -> 0 bytes fixture/23/6/0.3.3.1 | Bin 177 -> 0 bytes fixture/23/6/0.3.3.2 | Bin 177 -> 0 bytes fixture/23/6/0.3.3.3 | Bin 85 -> 0 bytes fixture/23/6/1.0.0.0 | Bin 1096 -> 0 bytes fixture/23/6/1.0.0.1 | Bin 1096 -> 0 bytes fixture/23/6/1.0.0.2 | Bin 1096 -> 0 bytes fixture/23/6/1.0.0.3 | Bin 405 -> 0 bytes fixture/23/6/1.0.1.0 | Bin 1096 -> 0 bytes fixture/23/6/1.0.1.1 | Bin 1096 -> 0 bytes fixture/23/6/1.0.1.2 | Bin 1096 -> 0 bytes fixture/23/6/1.0.1.3 | Bin 405 -> 0 bytes fixture/23/6/1.0.2.0 | Bin 1096 -> 0 bytes fixture/23/6/1.0.2.1 | Bin 1096 -> 0 bytes fixture/23/6/1.0.2.2 | Bin 1096 -> 0 bytes fixture/23/6/1.0.2.3 | Bin 405 -> 0 bytes fixture/23/6/1.0.3.0 | Bin 414 -> 0 bytes fixture/23/6/1.0.3.1 | Bin 414 -> 0 bytes fixture/23/6/1.0.3.2 | Bin 421 -> 0 bytes fixture/23/6/1.0.3.3 | Bin 165 -> 0 bytes fixture/23/6/1.1.0.0 | Bin 1096 -> 0 bytes fixture/23/6/1.1.0.1 | Bin 1096 -> 0 bytes fixture/23/6/1.1.0.2 | Bin 1096 -> 0 bytes fixture/23/6/1.1.0.3 | Bin 405 -> 0 bytes fixture/23/6/1.1.1.0 | Bin 1096 -> 0 bytes fixture/23/6/1.1.1.1 | Bin 1096 -> 0 bytes fixture/23/6/1.1.1.2 | Bin 1096 -> 0 bytes fixture/23/6/1.1.1.3 | Bin 405 -> 0 bytes fixture/23/6/1.1.2.0 | Bin 1096 -> 0 bytes fixture/23/6/1.1.2.1 | Bin 1096 -> 0 bytes fixture/23/6/1.1.2.2 | Bin 1096 -> 0 bytes fixture/23/6/1.1.2.3 | Bin 405 -> 0 bytes fixture/23/6/1.1.3.0 | Bin 414 -> 0 bytes fixture/23/6/1.1.3.1 | Bin 414 -> 0 bytes fixture/23/6/1.1.3.2 | Bin 414 -> 0 bytes fixture/23/6/1.1.3.3 | Bin 165 -> 0 bytes fixture/23/6/1.2.0.0 | Bin 1096 -> 0 bytes fixture/23/6/1.2.0.1 | Bin 1096 -> 0 bytes fixture/23/6/1.2.0.2 | Bin 1096 -> 0 bytes fixture/23/6/1.2.0.3 | Bin 405 -> 0 bytes fixture/23/6/1.2.1.0 | Bin 1096 -> 0 bytes fixture/23/6/1.2.1.1 | Bin 1096 -> 0 bytes fixture/23/6/1.2.1.2 | Bin 1096 -> 0 bytes fixture/23/6/1.2.1.3 | Bin 405 -> 0 bytes fixture/23/6/1.2.2.0 | Bin 1096 -> 0 bytes fixture/23/6/1.2.2.1 | Bin 1096 -> 0 bytes fixture/23/6/1.2.2.2 | Bin 1096 -> 0 bytes fixture/23/6/1.2.2.3 | Bin 405 -> 0 bytes fixture/23/6/1.2.3.0 | Bin 414 -> 0 bytes fixture/23/6/1.2.3.1 | Bin 414 -> 0 bytes fixture/23/6/1.2.3.2 | Bin 414 -> 0 bytes fixture/23/6/1.2.3.3 | Bin 165 -> 0 bytes fixture/23/6/1.3.0.0 | Bin 449 -> 0 bytes fixture/23/6/1.3.0.1 | Bin 450 -> 0 bytes fixture/23/6/1.3.0.2 | Bin 450 -> 0 bytes fixture/23/6/1.3.0.3 | Bin 177 -> 0 bytes fixture/23/6/1.3.1.0 | Bin 450 -> 0 bytes fixture/23/6/1.3.1.1 | Bin 450 -> 0 bytes fixture/23/6/1.3.1.2 | Bin 450 -> 0 bytes fixture/23/6/1.3.1.3 | Bin 177 -> 0 bytes fixture/23/6/1.3.2.0 | Bin 450 -> 0 bytes fixture/23/6/1.3.2.1 | Bin 450 -> 0 bytes fixture/23/6/1.3.2.2 | Bin 450 -> 0 bytes fixture/23/6/1.3.2.3 | Bin 177 -> 0 bytes fixture/23/6/1.3.3.0 | Bin 177 -> 0 bytes fixture/23/6/1.3.3.1 | Bin 177 -> 0 bytes fixture/23/6/1.3.3.2 | Bin 177 -> 0 bytes fixture/23/6/1.3.3.3 | Bin 85 -> 0 bytes fixture/3/.zattrs | 1 - fixture/3/0/.zattrs | 1 - fixture/3/1/.zattrs | 1 - fixture/3/2/.zattrs | 1 - fixture/3/3/.zarray | 19 ----- fixture/3/3/.zattrs | 1 - fixture/3/3/0 | Bin 1582 -> 0 bytes fixture/3/3/1 | Bin 177 -> 0 bytes fixture/3/4/.zarray | 19 ----- fixture/3/4/.zattrs | 1 - fixture/3/5/.zarray | 19 ----- fixture/3/5/.zattrs | 1 - fixture/3/5/0 | Bin 163 -> 0 bytes fixture/3/5/1 | Bin 115 -> 0 bytes fixture/3/6/.zarray | 19 ----- fixture/3/6/.zattrs | 1 - fixture/3/6/0 | Bin 4026 -> 0 bytes fixture/3/6/1 | Bin 513 -> 0 bytes fixture/4/.zattrs | 1 - fixture/4/0/.zarray | 14 ---- fixture/4/0/.zattrs | 1 - fixture/4/0/0 | Bin 100 -> 0 bytes fixture/4/0/1 | Bin 100 -> 0 bytes fixture/4/0/10 | Bin 100 -> 0 bytes fixture/4/0/11 | 3 - fixture/4/0/12 | 1 - fixture/4/0/13 | 1 - fixture/4/0/14 | 1 - fixture/4/0/15 | Bin 100 -> 0 bytes fixture/4/0/16 | 1 - fixture/4/0/17 | 2 - fixture/4/0/18 | 1 - fixture/4/0/19 | Bin 100 -> 0 bytes fixture/4/0/2 | 2 - fixture/4/0/20 | 1 - fixture/4/0/21 | Bin 100 -> 0 bytes fixture/4/0/22 | Bin 100 -> 0 bytes fixture/4/0/3 | Bin 100 -> 0 bytes fixture/4/0/4 | Bin 100 -> 0 bytes fixture/4/0/5 | Bin 100 -> 0 bytes fixture/4/0/6 | Bin 100 -> 0 bytes fixture/4/0/7 | Bin 100 -> 0 bytes fixture/4/0/8 | Bin 100 -> 0 bytes fixture/4/0/9 | 1 - fixture/4/1/.zarray | 17 ---- fixture/4/1/.zattrs | 1 - fixture/4/1/0 | Bin 111 -> 0 bytes fixture/4/1/1 | Bin 111 -> 0 bytes fixture/4/1/10 | Bin 111 -> 0 bytes fixture/4/1/11 | Bin 111 -> 0 bytes fixture/4/1/12 | Bin 111 -> 0 bytes fixture/4/1/13 | Bin 111 -> 0 bytes fixture/4/1/14 | Bin 111 -> 0 bytes fixture/4/1/15 | Bin 111 -> 0 bytes fixture/4/1/16 | Bin 111 -> 0 bytes fixture/4/1/17 | Bin 111 -> 0 bytes fixture/4/1/18 | Bin 111 -> 0 bytes fixture/4/1/19 | Bin 111 -> 0 bytes fixture/4/1/2 | Bin 111 -> 0 bytes fixture/4/1/20 | Bin 111 -> 0 bytes fixture/4/1/21 | Bin 111 -> 0 bytes fixture/4/1/22 | Bin 34 -> 0 bytes fixture/4/1/3 | Bin 111 -> 0 bytes fixture/4/1/4 | Bin 111 -> 0 bytes fixture/4/1/5 | Bin 111 -> 0 bytes fixture/4/1/6 | Bin 111 -> 0 bytes fixture/4/1/7 | Bin 111 -> 0 bytes fixture/4/1/8 | Bin 111 -> 0 bytes fixture/4/1/9 | Bin 111 -> 0 bytes fixture/4/2/.zarray | 17 ---- fixture/4/2/.zattrs | 1 - fixture/4/2/0 | Bin 170 -> 0 bytes fixture/4/2/1 | Bin 177 -> 0 bytes fixture/4/2/10 | Bin 175 -> 0 bytes fixture/4/2/11 | Bin 177 -> 0 bytes fixture/4/2/12 | Bin 177 -> 0 bytes fixture/4/2/13 | Bin 177 -> 0 bytes fixture/4/2/14 | Bin 175 -> 0 bytes fixture/4/2/15 | Bin 175 -> 0 bytes fixture/4/2/16 | Bin 175 -> 0 bytes fixture/4/2/17 | Bin 181 -> 0 bytes fixture/4/2/18 | Bin 177 -> 0 bytes fixture/4/2/19 | Bin 176 -> 0 bytes fixture/4/2/2 | Bin 175 -> 0 bytes fixture/4/2/20 | Bin 179 -> 0 bytes fixture/4/2/21 | Bin 175 -> 0 bytes fixture/4/2/22 | Bin 77 -> 0 bytes fixture/4/2/3 | Bin 176 -> 0 bytes fixture/4/2/4 | Bin 177 -> 0 bytes fixture/4/2/5 | Bin 178 -> 0 bytes fixture/4/2/6 | Bin 176 -> 0 bytes fixture/4/2/7 | Bin 175 -> 0 bytes fixture/4/2/8 | Bin 179 -> 0 bytes fixture/4/2/9 | Bin 176 -> 0 bytes fixture/4/3/.zarray | 19 ----- fixture/4/3/.zattrs | 1 - fixture/4/3/0 | Bin 116 -> 0 bytes fixture/4/3/1 | Bin 116 -> 0 bytes fixture/4/3/10 | Bin 116 -> 0 bytes fixture/4/3/11 | Bin 116 -> 0 bytes fixture/4/3/12 | Bin 116 -> 0 bytes fixture/4/3/13 | Bin 116 -> 0 bytes fixture/4/3/14 | Bin 116 -> 0 bytes fixture/4/3/15 | Bin 116 -> 0 bytes fixture/4/3/16 | Bin 116 -> 0 bytes fixture/4/3/17 | Bin 116 -> 0 bytes fixture/4/3/18 | Bin 116 -> 0 bytes fixture/4/3/19 | Bin 116 -> 0 bytes fixture/4/3/2 | Bin 116 -> 0 bytes fixture/4/3/20 | Bin 116 -> 0 bytes fixture/4/3/21 | Bin 116 -> 0 bytes fixture/4/3/22 | Bin 116 -> 0 bytes fixture/4/3/3 | Bin 116 -> 0 bytes fixture/4/3/4 | Bin 116 -> 0 bytes fixture/4/3/5 | Bin 116 -> 0 bytes fixture/4/3/6 | Bin 116 -> 0 bytes fixture/4/3/7 | Bin 116 -> 0 bytes fixture/4/3/8 | Bin 116 -> 0 bytes fixture/4/3/9 | Bin 116 -> 0 bytes fixture/4/4/.zarray | 19 ----- fixture/4/4/.zattrs | 1 - fixture/4/4/0 | Bin 116 -> 0 bytes fixture/4/4/1 | Bin 116 -> 0 bytes fixture/4/4/10 | Bin 116 -> 0 bytes fixture/4/4/11 | Bin 116 -> 0 bytes fixture/4/4/12 | Bin 116 -> 0 bytes fixture/4/4/13 | Bin 116 -> 0 bytes fixture/4/4/14 | Bin 116 -> 0 bytes fixture/4/4/15 | Bin 116 -> 0 bytes fixture/4/4/16 | Bin 116 -> 0 bytes fixture/4/4/17 | Bin 116 -> 0 bytes fixture/4/4/18 | Bin 116 -> 0 bytes fixture/4/4/19 | Bin 116 -> 0 bytes fixture/4/4/2 | Bin 116 -> 0 bytes fixture/4/4/20 | Bin 116 -> 0 bytes fixture/4/4/21 | Bin 116 -> 0 bytes fixture/4/4/22 | Bin 116 -> 0 bytes fixture/4/4/3 | Bin 116 -> 0 bytes fixture/4/4/4 | Bin 116 -> 0 bytes fixture/4/4/5 | Bin 116 -> 0 bytes fixture/4/4/6 | Bin 116 -> 0 bytes fixture/4/4/7 | Bin 116 -> 0 bytes fixture/4/4/8 | Bin 116 -> 0 bytes fixture/4/4/9 | Bin 116 -> 0 bytes fixture/4/5/.zarray | 19 ----- fixture/4/5/.zattrs | 1 - fixture/4/5/0 | Bin 116 -> 0 bytes fixture/4/5/1 | Bin 116 -> 0 bytes fixture/4/5/10 | Bin 116 -> 0 bytes fixture/4/5/11 | Bin 116 -> 0 bytes fixture/4/5/12 | Bin 116 -> 0 bytes fixture/4/5/13 | Bin 116 -> 0 bytes fixture/4/5/14 | Bin 116 -> 0 bytes fixture/4/5/15 | Bin 116 -> 0 bytes fixture/4/5/16 | Bin 116 -> 0 bytes fixture/4/5/17 | Bin 116 -> 0 bytes fixture/4/5/18 | Bin 116 -> 0 bytes fixture/4/5/19 | Bin 116 -> 0 bytes fixture/4/5/2 | Bin 116 -> 0 bytes fixture/4/5/20 | Bin 116 -> 0 bytes fixture/4/5/21 | Bin 116 -> 0 bytes fixture/4/5/22 | Bin 116 -> 0 bytes fixture/4/5/3 | Bin 116 -> 0 bytes fixture/4/5/4 | Bin 116 -> 0 bytes fixture/4/5/5 | Bin 116 -> 0 bytes fixture/4/5/6 | Bin 116 -> 0 bytes fixture/4/5/7 | Bin 116 -> 0 bytes fixture/4/5/8 | Bin 116 -> 0 bytes fixture/4/5/9 | Bin 116 -> 0 bytes fixture/4/6/.zarray | 19 ----- fixture/4/6/.zattrs | 1 - fixture/4/6/0 | Bin 116 -> 0 bytes fixture/4/6/1 | Bin 116 -> 0 bytes fixture/4/6/10 | Bin 116 -> 0 bytes fixture/4/6/11 | Bin 116 -> 0 bytes fixture/4/6/12 | Bin 116 -> 0 bytes fixture/4/6/13 | Bin 116 -> 0 bytes fixture/4/6/14 | Bin 116 -> 0 bytes fixture/4/6/15 | Bin 116 -> 0 bytes fixture/4/6/16 | Bin 116 -> 0 bytes fixture/4/6/17 | Bin 116 -> 0 bytes fixture/4/6/18 | Bin 116 -> 0 bytes fixture/4/6/19 | Bin 116 -> 0 bytes fixture/4/6/2 | Bin 116 -> 0 bytes fixture/4/6/20 | Bin 116 -> 0 bytes fixture/4/6/21 | Bin 116 -> 0 bytes fixture/4/6/22 | Bin 116 -> 0 bytes fixture/4/6/3 | Bin 116 -> 0 bytes fixture/4/6/4 | Bin 116 -> 0 bytes fixture/4/6/5 | Bin 116 -> 0 bytes fixture/4/6/6 | Bin 116 -> 0 bytes fixture/4/6/7 | Bin 116 -> 0 bytes fixture/4/6/8 | Bin 116 -> 0 bytes fixture/4/6/9 | Bin 116 -> 0 bytes fixture/5/.zattrs | 1 - fixture/5/0/.zarray | 14 ---- fixture/5/0/.zattrs | 1 - fixture/5/0/0 | Bin 200 -> 0 bytes fixture/5/0/1 | Bin 200 -> 0 bytes fixture/5/0/10 | Bin 200 -> 0 bytes fixture/5/0/11 | Bin 200 -> 0 bytes fixture/5/0/12 | Bin 200 -> 0 bytes fixture/5/0/13 | Bin 200 -> 0 bytes fixture/5/0/14 | Bin 200 -> 0 bytes fixture/5/0/15 | Bin 200 -> 0 bytes fixture/5/0/16 | Bin 200 -> 0 bytes fixture/5/0/17 | Bin 200 -> 0 bytes fixture/5/0/18 | Bin 200 -> 0 bytes fixture/5/0/19 | Bin 200 -> 0 bytes fixture/5/0/2 | Bin 200 -> 0 bytes fixture/5/0/20 | Bin 200 -> 0 bytes fixture/5/0/21 | Bin 200 -> 0 bytes fixture/5/0/22 | Bin 200 -> 0 bytes fixture/5/0/3 | Bin 200 -> 0 bytes fixture/5/0/4 | Bin 200 -> 0 bytes fixture/5/0/5 | Bin 200 -> 0 bytes fixture/5/0/6 | Bin 200 -> 0 bytes fixture/5/0/7 | Bin 200 -> 0 bytes fixture/5/0/8 | Bin 200 -> 0 bytes fixture/5/0/9 | Bin 200 -> 0 bytes fixture/5/1/.zarray | 17 ---- fixture/5/1/.zattrs | 1 - fixture/5/1/0 | Bin 211 -> 0 bytes fixture/5/1/1 | Bin 211 -> 0 bytes fixture/5/1/10 | 1 - fixture/5/1/11 | Bin 211 -> 0 bytes fixture/5/1/12 | Bin 211 -> 0 bytes fixture/5/1/13 | Bin 211 -> 0 bytes fixture/5/1/14 | Bin 211 -> 0 bytes fixture/5/1/15 | Bin 211 -> 0 bytes fixture/5/1/16 | Bin 211 -> 0 bytes fixture/5/1/17 | Bin 211 -> 0 bytes fixture/5/1/18 | Bin 211 -> 0 bytes fixture/5/1/19 | Bin 211 -> 0 bytes fixture/5/1/2 | Bin 211 -> 0 bytes fixture/5/1/20 | Bin 211 -> 0 bytes fixture/5/1/21 | Bin 211 -> 0 bytes fixture/5/1/22 | Bin 57 -> 0 bytes fixture/5/1/3 | Bin 211 -> 0 bytes fixture/5/1/4 | Bin 211 -> 0 bytes fixture/5/1/5 | Bin 211 -> 0 bytes fixture/5/1/6 | Bin 211 -> 0 bytes fixture/5/1/7 | Bin 211 -> 0 bytes fixture/5/1/8 | Bin 211 -> 0 bytes fixture/5/1/9 | Bin 211 -> 0 bytes fixture/5/2/.zarray | 17 ---- fixture/5/2/.zattrs | 1 - fixture/5/2/0 | Bin 239 -> 0 bytes fixture/5/2/1 | Bin 264 -> 0 bytes fixture/5/2/10 | Bin 258 -> 0 bytes fixture/5/2/11 | Bin 265 -> 0 bytes fixture/5/2/12 | Bin 236 -> 0 bytes fixture/5/2/13 | Bin 267 -> 0 bytes fixture/5/2/14 | Bin 239 -> 0 bytes fixture/5/2/15 | Bin 254 -> 0 bytes fixture/5/2/16 | Bin 236 -> 0 bytes fixture/5/2/17 | Bin 240 -> 0 bytes fixture/5/2/18 | Bin 261 -> 0 bytes fixture/5/2/19 | Bin 235 -> 0 bytes fixture/5/2/2 | Bin 260 -> 0 bytes fixture/5/2/20 | Bin 259 -> 0 bytes fixture/5/2/21 | Bin 236 -> 0 bytes fixture/5/2/22 | Bin 92 -> 0 bytes fixture/5/2/3 | Bin 236 -> 0 bytes fixture/5/2/4 | Bin 240 -> 0 bytes fixture/5/2/5 | Bin 232 -> 0 bytes fixture/5/2/6 | Bin 239 -> 0 bytes fixture/5/2/7 | Bin 262 -> 0 bytes fixture/5/2/8 | Bin 234 -> 0 bytes fixture/5/2/9 | Bin 265 -> 0 bytes fixture/5/3/.zarray | 19 ----- fixture/5/3/.zattrs | 1 - fixture/5/3/0 | Bin 216 -> 0 bytes fixture/5/3/1 | Bin 216 -> 0 bytes fixture/5/3/10 | Bin 216 -> 0 bytes fixture/5/3/11 | Bin 216 -> 0 bytes fixture/5/3/12 | Bin 216 -> 0 bytes fixture/5/3/13 | Bin 216 -> 0 bytes fixture/5/3/14 | Bin 216 -> 0 bytes fixture/5/3/15 | Bin 216 -> 0 bytes fixture/5/3/16 | Bin 216 -> 0 bytes fixture/5/3/17 | Bin 216 -> 0 bytes fixture/5/3/18 | Bin 216 -> 0 bytes fixture/5/3/19 | Bin 216 -> 0 bytes fixture/5/3/2 | Bin 216 -> 0 bytes fixture/5/3/20 | Bin 216 -> 0 bytes fixture/5/3/21 | Bin 216 -> 0 bytes fixture/5/3/22 | Bin 86 -> 0 bytes fixture/5/3/3 | Bin 216 -> 0 bytes fixture/5/3/4 | Bin 216 -> 0 bytes fixture/5/3/5 | Bin 216 -> 0 bytes fixture/5/3/6 | Bin 216 -> 0 bytes fixture/5/3/7 | Bin 216 -> 0 bytes fixture/5/3/8 | Bin 216 -> 0 bytes fixture/5/3/9 | Bin 216 -> 0 bytes fixture/5/4/.zarray | 19 ----- fixture/5/4/.zattrs | 1 - fixture/5/4/0 | Bin 216 -> 0 bytes fixture/5/4/1 | Bin 216 -> 0 bytes fixture/5/4/10 | Bin 216 -> 0 bytes fixture/5/4/11 | Bin 216 -> 0 bytes fixture/5/4/12 | Bin 216 -> 0 bytes fixture/5/4/13 | Bin 216 -> 0 bytes fixture/5/4/14 | Bin 216 -> 0 bytes fixture/5/4/15 | Bin 216 -> 0 bytes fixture/5/4/16 | Bin 216 -> 0 bytes fixture/5/4/17 | Bin 216 -> 0 bytes fixture/5/4/18 | Bin 216 -> 0 bytes fixture/5/4/19 | Bin 216 -> 0 bytes fixture/5/4/2 | Bin 216 -> 0 bytes fixture/5/4/20 | Bin 216 -> 0 bytes fixture/5/4/21 | Bin 216 -> 0 bytes fixture/5/4/22 | Bin 89 -> 0 bytes fixture/5/4/3 | Bin 216 -> 0 bytes fixture/5/4/4 | Bin 216 -> 0 bytes fixture/5/4/5 | Bin 216 -> 0 bytes fixture/5/4/6 | Bin 216 -> 0 bytes fixture/5/4/7 | Bin 216 -> 0 bytes fixture/5/4/8 | Bin 216 -> 0 bytes fixture/5/4/9 | Bin 216 -> 0 bytes fixture/5/5/.zarray | 19 ----- fixture/5/5/.zattrs | 1 - fixture/5/5/0 | Bin 216 -> 0 bytes fixture/5/5/1 | Bin 216 -> 0 bytes fixture/5/5/10 | Bin 216 -> 0 bytes fixture/5/5/11 | Bin 216 -> 0 bytes fixture/5/5/12 | Bin 216 -> 0 bytes fixture/5/5/13 | Bin 216 -> 0 bytes fixture/5/5/14 | Bin 216 -> 0 bytes fixture/5/5/15 | Bin 216 -> 0 bytes fixture/5/5/16 | Bin 216 -> 0 bytes fixture/5/5/17 | Bin 216 -> 0 bytes fixture/5/5/18 | Bin 216 -> 0 bytes fixture/5/5/19 | Bin 216 -> 0 bytes fixture/5/5/2 | Bin 216 -> 0 bytes fixture/5/5/20 | Bin 216 -> 0 bytes fixture/5/5/21 | Bin 216 -> 0 bytes fixture/5/5/22 | Bin 86 -> 0 bytes fixture/5/5/3 | Bin 216 -> 0 bytes fixture/5/5/4 | Bin 216 -> 0 bytes fixture/5/5/5 | Bin 216 -> 0 bytes fixture/5/5/6 | Bin 216 -> 0 bytes fixture/5/5/7 | Bin 216 -> 0 bytes fixture/5/5/8 | Bin 216 -> 0 bytes fixture/5/5/9 | Bin 216 -> 0 bytes fixture/5/6/.zarray | 19 ----- fixture/5/6/.zattrs | 1 - fixture/5/6/0 | Bin 216 -> 0 bytes fixture/5/6/1 | Bin 216 -> 0 bytes fixture/5/6/10 | Bin 216 -> 0 bytes fixture/5/6/11 | Bin 216 -> 0 bytes fixture/5/6/12 | Bin 216 -> 0 bytes fixture/5/6/13 | Bin 216 -> 0 bytes fixture/5/6/14 | Bin 216 -> 0 bytes fixture/5/6/15 | Bin 216 -> 0 bytes fixture/5/6/16 | Bin 216 -> 0 bytes fixture/5/6/17 | Bin 216 -> 0 bytes fixture/5/6/18 | Bin 216 -> 0 bytes fixture/5/6/19 | Bin 216 -> 0 bytes fixture/5/6/2 | Bin 216 -> 0 bytes fixture/5/6/20 | Bin 216 -> 0 bytes fixture/5/6/21 | Bin 216 -> 0 bytes fixture/5/6/22 | Bin 88 -> 0 bytes fixture/5/6/3 | Bin 216 -> 0 bytes fixture/5/6/4 | Bin 216 -> 0 bytes fixture/5/6/5 | Bin 216 -> 0 bytes fixture/5/6/6 | Bin 216 -> 0 bytes fixture/5/6/7 | Bin 216 -> 0 bytes fixture/5/6/8 | Bin 216 -> 0 bytes fixture/5/6/9 | Bin 216 -> 0 bytes fixture/6/.zattrs | 1 - fixture/6/0/.zarray | 14 ---- fixture/6/0/.zattrs | 1 - fixture/6/0/0 | Bin 400 -> 0 bytes fixture/6/0/1 | Bin 400 -> 0 bytes fixture/6/0/10 | Bin 400 -> 0 bytes fixture/6/0/11 | Bin 400 -> 0 bytes fixture/6/0/12 | Bin 400 -> 0 bytes fixture/6/0/13 | Bin 400 -> 0 bytes fixture/6/0/14 | Bin 400 -> 0 bytes fixture/6/0/15 | Bin 400 -> 0 bytes fixture/6/0/16 | Bin 400 -> 0 bytes fixture/6/0/17 | Bin 400 -> 0 bytes fixture/6/0/18 | Bin 400 -> 0 bytes fixture/6/0/19 | Bin 400 -> 0 bytes fixture/6/0/2 | Bin 400 -> 0 bytes fixture/6/0/20 | Bin 400 -> 0 bytes fixture/6/0/21 | Bin 400 -> 0 bytes fixture/6/0/22 | Bin 400 -> 0 bytes fixture/6/0/3 | Bin 400 -> 0 bytes fixture/6/0/4 | Bin 400 -> 0 bytes fixture/6/0/5 | Bin 400 -> 0 bytes fixture/6/0/6 | Bin 400 -> 0 bytes fixture/6/0/7 | Bin 400 -> 0 bytes fixture/6/0/8 | Bin 400 -> 0 bytes fixture/6/0/9 | Bin 400 -> 0 bytes fixture/6/1/.zarray | 17 ---- fixture/6/1/.zattrs | 1 - fixture/6/1/0 | 1 - fixture/6/1/1 | 1 - fixture/6/1/10 | Bin 265 -> 0 bytes fixture/6/1/11 | 1 - fixture/6/1/12 | Bin 265 -> 0 bytes fixture/6/1/13 | 2 - fixture/6/1/14 | 2 - fixture/6/1/15 | Bin 265 -> 0 bytes fixture/6/1/16 | 2 - fixture/6/1/17 | 2 - fixture/6/1/18 | 1 - fixture/6/1/19 | 1 - fixture/6/1/2 | 2 - fixture/6/1/20 | 3 - fixture/6/1/21 | 3 - fixture/6/1/22 | Bin 83 -> 0 bytes fixture/6/1/3 | 1 - fixture/6/1/4 | 1 - fixture/6/1/5 | Bin 266 -> 0 bytes fixture/6/1/6 | 1 - fixture/6/1/7 | Bin 264 -> 0 bytes fixture/6/1/8 | Bin 269 -> 0 bytes fixture/6/1/9 | 2 - fixture/6/2/.zarray | 17 ---- fixture/6/2/.zattrs | 1 - fixture/6/2/0 | Bin 271 -> 0 bytes fixture/6/2/1 | Bin 268 -> 0 bytes fixture/6/2/10 | Bin 270 -> 0 bytes fixture/6/2/11 | Bin 273 -> 0 bytes fixture/6/2/12 | Bin 275 -> 0 bytes fixture/6/2/13 | Bin 271 -> 0 bytes fixture/6/2/14 | Bin 273 -> 0 bytes fixture/6/2/15 | Bin 275 -> 0 bytes fixture/6/2/16 | Bin 275 -> 0 bytes fixture/6/2/17 | Bin 268 -> 0 bytes fixture/6/2/18 | Bin 273 -> 0 bytes fixture/6/2/19 | Bin 274 -> 0 bytes fixture/6/2/2 | Bin 269 -> 0 bytes fixture/6/2/20 | Bin 265 -> 0 bytes fixture/6/2/21 | Bin 274 -> 0 bytes fixture/6/2/22 | Bin 110 -> 0 bytes fixture/6/2/3 | Bin 267 -> 0 bytes fixture/6/2/4 | Bin 263 -> 0 bytes fixture/6/2/5 | Bin 275 -> 0 bytes fixture/6/2/6 | Bin 269 -> 0 bytes fixture/6/2/7 | Bin 270 -> 0 bytes fixture/6/2/8 | Bin 277 -> 0 bytes fixture/6/2/9 | Bin 271 -> 0 bytes fixture/6/3/.zarray | 19 ----- fixture/6/3/.zattrs | 1 - fixture/6/3/0 | Bin 285 -> 0 bytes fixture/6/3/1 | Bin 288 -> 0 bytes fixture/6/3/10 | Bin 282 -> 0 bytes fixture/6/3/11 | Bin 280 -> 0 bytes fixture/6/3/12 | Bin 287 -> 0 bytes fixture/6/3/13 | Bin 281 -> 0 bytes fixture/6/3/14 | Bin 287 -> 0 bytes fixture/6/3/15 | Bin 279 -> 0 bytes fixture/6/3/16 | Bin 280 -> 0 bytes fixture/6/3/17 | Bin 284 -> 0 bytes fixture/6/3/18 | Bin 284 -> 0 bytes fixture/6/3/19 | Bin 284 -> 0 bytes fixture/6/3/2 | Bin 281 -> 0 bytes fixture/6/3/20 | Bin 283 -> 0 bytes fixture/6/3/21 | Bin 277 -> 0 bytes fixture/6/3/22 | Bin 107 -> 0 bytes fixture/6/3/3 | Bin 280 -> 0 bytes fixture/6/3/4 | Bin 286 -> 0 bytes fixture/6/3/5 | Bin 287 -> 0 bytes fixture/6/3/6 | Bin 280 -> 0 bytes fixture/6/3/7 | Bin 285 -> 0 bytes fixture/6/3/8 | Bin 277 -> 0 bytes fixture/6/3/9 | Bin 283 -> 0 bytes fixture/6/4/.zarray | 19 ----- fixture/6/4/.zattrs | 1 - fixture/6/4/0 | Bin 236 -> 0 bytes fixture/6/4/1 | Bin 236 -> 0 bytes fixture/6/4/10 | Bin 234 -> 0 bytes fixture/6/4/11 | Bin 231 -> 0 bytes fixture/6/4/12 | Bin 235 -> 0 bytes fixture/6/4/13 | Bin 230 -> 0 bytes fixture/6/4/14 | Bin 234 -> 0 bytes fixture/6/4/15 | Bin 232 -> 0 bytes fixture/6/4/16 | Bin 234 -> 0 bytes fixture/6/4/17 | Bin 233 -> 0 bytes fixture/6/4/18 | Bin 234 -> 0 bytes fixture/6/4/19 | Bin 234 -> 0 bytes fixture/6/4/2 | Bin 232 -> 0 bytes fixture/6/4/20 | Bin 232 -> 0 bytes fixture/6/4/21 | Bin 232 -> 0 bytes fixture/6/4/22 | Bin 89 -> 0 bytes fixture/6/4/3 | Bin 230 -> 0 bytes fixture/6/4/4 | Bin 232 -> 0 bytes fixture/6/4/5 | Bin 234 -> 0 bytes fixture/6/4/6 | Bin 235 -> 0 bytes fixture/6/4/7 | Bin 232 -> 0 bytes fixture/6/4/8 | Bin 233 -> 0 bytes fixture/6/4/9 | Bin 231 -> 0 bytes fixture/6/5/.zarray | 19 ----- fixture/6/5/.zattrs | 1 - fixture/6/5/0 | Bin 285 -> 0 bytes fixture/6/5/1 | Bin 288 -> 0 bytes fixture/6/5/10 | Bin 282 -> 0 bytes fixture/6/5/11 | Bin 280 -> 0 bytes fixture/6/5/12 | Bin 287 -> 0 bytes fixture/6/5/13 | Bin 281 -> 0 bytes fixture/6/5/14 | Bin 287 -> 0 bytes fixture/6/5/15 | Bin 279 -> 0 bytes fixture/6/5/16 | Bin 280 -> 0 bytes fixture/6/5/17 | Bin 284 -> 0 bytes fixture/6/5/18 | Bin 284 -> 0 bytes fixture/6/5/19 | Bin 284 -> 0 bytes fixture/6/5/2 | Bin 281 -> 0 bytes fixture/6/5/20 | Bin 283 -> 0 bytes fixture/6/5/21 | Bin 277 -> 0 bytes fixture/6/5/22 | Bin 107 -> 0 bytes fixture/6/5/3 | Bin 280 -> 0 bytes fixture/6/5/4 | Bin 286 -> 0 bytes fixture/6/5/5 | Bin 287 -> 0 bytes fixture/6/5/6 | Bin 280 -> 0 bytes fixture/6/5/7 | Bin 285 -> 0 bytes fixture/6/5/8 | Bin 277 -> 0 bytes fixture/6/5/9 | Bin 283 -> 0 bytes fixture/6/6/.zarray | 19 ----- fixture/6/6/.zattrs | 1 - fixture/6/6/0 | Bin 416 -> 0 bytes fixture/6/6/1 | Bin 416 -> 0 bytes fixture/6/6/10 | Bin 416 -> 0 bytes fixture/6/6/11 | Bin 416 -> 0 bytes fixture/6/6/12 | Bin 416 -> 0 bytes fixture/6/6/13 | Bin 416 -> 0 bytes fixture/6/6/14 | Bin 416 -> 0 bytes fixture/6/6/15 | Bin 416 -> 0 bytes fixture/6/6/16 | Bin 416 -> 0 bytes fixture/6/6/17 | Bin 416 -> 0 bytes fixture/6/6/18 | Bin 416 -> 0 bytes fixture/6/6/19 | Bin 416 -> 0 bytes fixture/6/6/2 | Bin 416 -> 0 bytes fixture/6/6/20 | Bin 416 -> 0 bytes fixture/6/6/21 | Bin 416 -> 0 bytes fixture/6/6/22 | Bin 131 -> 0 bytes fixture/6/6/3 | Bin 416 -> 0 bytes fixture/6/6/4 | Bin 416 -> 0 bytes fixture/6/6/5 | Bin 416 -> 0 bytes fixture/6/6/6 | Bin 416 -> 0 bytes fixture/6/6/7 | Bin 416 -> 0 bytes fixture/6/6/8 | Bin 416 -> 0 bytes fixture/6/6/9 | Bin 416 -> 0 bytes fixture/7/.zattrs | 1 - fixture/7/0/.zarray | 14 ---- fixture/7/0/.zattrs | 1 - fixture/7/0/0 | Bin 800 -> 0 bytes fixture/7/0/1 | Bin 800 -> 0 bytes fixture/7/0/10 | Bin 800 -> 0 bytes fixture/7/0/11 | Bin 800 -> 0 bytes fixture/7/0/12 | Bin 800 -> 0 bytes fixture/7/0/13 | Bin 800 -> 0 bytes fixture/7/0/14 | Bin 800 -> 0 bytes fixture/7/0/15 | Bin 800 -> 0 bytes fixture/7/0/16 | Bin 800 -> 0 bytes fixture/7/0/17 | Bin 800 -> 0 bytes fixture/7/0/18 | Bin 800 -> 0 bytes fixture/7/0/19 | Bin 800 -> 0 bytes fixture/7/0/2 | Bin 800 -> 0 bytes fixture/7/0/20 | Bin 800 -> 0 bytes fixture/7/0/21 | Bin 800 -> 0 bytes fixture/7/0/22 | Bin 800 -> 0 bytes fixture/7/0/3 | Bin 800 -> 0 bytes fixture/7/0/4 | Bin 800 -> 0 bytes fixture/7/0/5 | Bin 800 -> 0 bytes fixture/7/0/6 | Bin 800 -> 0 bytes fixture/7/0/7 | Bin 800 -> 0 bytes fixture/7/0/8 | Bin 800 -> 0 bytes fixture/7/0/9 | Bin 800 -> 0 bytes fixture/7/1/.zarray | 17 ---- fixture/7/1/.zattrs | 1 - fixture/7/1/0 | Bin 289 -> 0 bytes fixture/7/1/1 | 1 - fixture/7/1/10 | Bin 287 -> 0 bytes fixture/7/1/11 | 1 - fixture/7/1/12 | 2 - fixture/7/1/13 | 1 - fixture/7/1/14 | Bin 286 -> 0 bytes fixture/7/1/15 | 1 - fixture/7/1/16 | 2 - fixture/7/1/17 | 1 - fixture/7/1/18 | 3 - fixture/7/1/19 | 1 - fixture/7/1/2 | 1 - fixture/7/1/20 | 2 - fixture/7/1/21 | 1 - fixture/7/1/22 | Bin 85 -> 0 bytes fixture/7/1/3 | 1 - fixture/7/1/4 | 2 - fixture/7/1/5 | 6 -- fixture/7/1/6 | 2 - fixture/7/1/7 | 1 - fixture/7/1/8 | 1 - fixture/7/1/9 | 2 - fixture/7/2/.zarray | 17 ---- fixture/7/2/.zattrs | 1 - fixture/7/2/0 | Bin 283 -> 0 bytes fixture/7/2/1 | Bin 283 -> 0 bytes fixture/7/2/10 | Bin 283 -> 0 bytes fixture/7/2/11 | Bin 276 -> 0 bytes fixture/7/2/12 | Bin 276 -> 0 bytes fixture/7/2/13 | Bin 283 -> 0 bytes fixture/7/2/14 | Bin 284 -> 0 bytes fixture/7/2/15 | Bin 270 -> 0 bytes fixture/7/2/16 | Bin 280 -> 0 bytes fixture/7/2/17 | Bin 283 -> 0 bytes fixture/7/2/18 | Bin 283 -> 0 bytes fixture/7/2/19 | Bin 281 -> 0 bytes fixture/7/2/2 | Bin 280 -> 0 bytes fixture/7/2/20 | Bin 280 -> 0 bytes fixture/7/2/21 | Bin 275 -> 0 bytes fixture/7/2/22 | Bin 111 -> 0 bytes fixture/7/2/3 | Bin 277 -> 0 bytes fixture/7/2/4 | Bin 278 -> 0 bytes fixture/7/2/5 | Bin 284 -> 0 bytes fixture/7/2/6 | Bin 282 -> 0 bytes fixture/7/2/7 | Bin 284 -> 0 bytes fixture/7/2/8 | Bin 277 -> 0 bytes fixture/7/2/9 | Bin 281 -> 0 bytes fixture/7/3/.zarray | 19 ----- fixture/7/3/.zattrs | 1 - fixture/7/3/0 | Bin 292 -> 0 bytes fixture/7/3/1 | Bin 265 -> 0 bytes fixture/7/3/10 | Bin 260 -> 0 bytes fixture/7/3/11 | Bin 270 -> 0 bytes fixture/7/3/12 | Bin 255 -> 0 bytes fixture/7/3/13 | Bin 288 -> 0 bytes fixture/7/3/14 | Bin 289 -> 0 bytes fixture/7/3/15 | Bin 262 -> 0 bytes fixture/7/3/16 | Bin 264 -> 0 bytes fixture/7/3/17 | Bin 272 -> 0 bytes fixture/7/3/18 | Bin 265 -> 0 bytes fixture/7/3/19 | Bin 263 -> 0 bytes fixture/7/3/2 | Bin 260 -> 0 bytes fixture/7/3/20 | Bin 267 -> 0 bytes fixture/7/3/21 | Bin 262 -> 0 bytes fixture/7/3/22 | Bin 131 -> 0 bytes fixture/7/3/3 | Bin 266 -> 0 bytes fixture/7/3/4 | Bin 264 -> 0 bytes fixture/7/3/5 | Bin 264 -> 0 bytes fixture/7/3/6 | Bin 290 -> 0 bytes fixture/7/3/7 | Bin 263 -> 0 bytes fixture/7/3/8 | Bin 261 -> 0 bytes fixture/7/3/9 | Bin 263 -> 0 bytes fixture/7/4/.zarray | 19 ----- fixture/7/4/.zattrs | 1 - fixture/7/4/0 | Bin 234 -> 0 bytes fixture/7/4/1 | Bin 237 -> 0 bytes fixture/7/4/10 | Bin 235 -> 0 bytes fixture/7/4/11 | Bin 235 -> 0 bytes fixture/7/4/12 | Bin 235 -> 0 bytes fixture/7/4/13 | Bin 234 -> 0 bytes fixture/7/4/14 | Bin 235 -> 0 bytes fixture/7/4/15 | Bin 236 -> 0 bytes fixture/7/4/16 | Bin 235 -> 0 bytes fixture/7/4/17 | Bin 242 -> 0 bytes fixture/7/4/18 | Bin 233 -> 0 bytes fixture/7/4/19 | Bin 235 -> 0 bytes fixture/7/4/2 | Bin 234 -> 0 bytes fixture/7/4/20 | Bin 235 -> 0 bytes fixture/7/4/21 | Bin 233 -> 0 bytes fixture/7/4/22 | Bin 90 -> 0 bytes fixture/7/4/3 | Bin 234 -> 0 bytes fixture/7/4/4 | Bin 234 -> 0 bytes fixture/7/4/5 | Bin 234 -> 0 bytes fixture/7/4/6 | Bin 235 -> 0 bytes fixture/7/4/7 | Bin 235 -> 0 bytes fixture/7/4/8 | Bin 236 -> 0 bytes fixture/7/4/9 | Bin 233 -> 0 bytes fixture/7/5/.zarray | 19 ----- fixture/7/5/.zattrs | 1 - fixture/7/5/0 | Bin 292 -> 0 bytes fixture/7/5/1 | Bin 265 -> 0 bytes fixture/7/5/10 | Bin 260 -> 0 bytes fixture/7/5/11 | Bin 270 -> 0 bytes fixture/7/5/12 | Bin 255 -> 0 bytes fixture/7/5/13 | Bin 288 -> 0 bytes fixture/7/5/14 | Bin 289 -> 0 bytes fixture/7/5/15 | Bin 262 -> 0 bytes fixture/7/5/16 | Bin 264 -> 0 bytes fixture/7/5/17 | Bin 272 -> 0 bytes fixture/7/5/18 | Bin 265 -> 0 bytes fixture/7/5/19 | Bin 263 -> 0 bytes fixture/7/5/2 | Bin 260 -> 0 bytes fixture/7/5/20 | Bin 267 -> 0 bytes fixture/7/5/21 | Bin 262 -> 0 bytes fixture/7/5/22 | Bin 131 -> 0 bytes fixture/7/5/3 | Bin 266 -> 0 bytes fixture/7/5/4 | Bin 264 -> 0 bytes fixture/7/5/5 | Bin 264 -> 0 bytes fixture/7/5/6 | Bin 290 -> 0 bytes fixture/7/5/7 | Bin 263 -> 0 bytes fixture/7/5/8 | Bin 261 -> 0 bytes fixture/7/5/9 | Bin 263 -> 0 bytes fixture/7/6/.zarray | 19 ----- fixture/7/6/.zattrs | 1 - fixture/7/6/0 | Bin 445 -> 0 bytes fixture/7/6/1 | Bin 445 -> 0 bytes fixture/7/6/10 | Bin 453 -> 0 bytes fixture/7/6/11 | Bin 441 -> 0 bytes fixture/7/6/12 | Bin 439 -> 0 bytes fixture/7/6/13 | Bin 443 -> 0 bytes fixture/7/6/14 | Bin 444 -> 0 bytes fixture/7/6/15 | Bin 443 -> 0 bytes fixture/7/6/16 | Bin 450 -> 0 bytes fixture/7/6/17 | Bin 447 -> 0 bytes fixture/7/6/18 | Bin 440 -> 0 bytes fixture/7/6/19 | Bin 446 -> 0 bytes fixture/7/6/2 | Bin 449 -> 0 bytes fixture/7/6/20 | Bin 449 -> 0 bytes fixture/7/6/21 | Bin 445 -> 0 bytes fixture/7/6/22 | Bin 140 -> 0 bytes fixture/7/6/3 | Bin 450 -> 0 bytes fixture/7/6/4 | Bin 452 -> 0 bytes fixture/7/6/5 | Bin 452 -> 0 bytes fixture/7/6/6 | Bin 453 -> 0 bytes fixture/7/6/7 | Bin 451 -> 0 bytes fixture/7/6/8 | Bin 446 -> 0 bytes fixture/7/6/9 | Bin 446 -> 0 bytes fixture/8/.zattrs | 1 - fixture/8/0/.zarray | 14 ---- fixture/8/0/.zattrs | 1 - fixture/8/0/0 | Bin 200 -> 0 bytes fixture/8/0/1 | 1 - fixture/8/0/10 | Bin 200 -> 0 bytes fixture/8/0/11 | 1 - fixture/8/0/12 | 2 - fixture/8/0/13 | 1 - fixture/8/0/14 | Bin 200 -> 0 bytes fixture/8/0/15 | 1 - fixture/8/0/16 | Bin 200 -> 0 bytes fixture/8/0/17 | 1 - fixture/8/0/18 | 1 - fixture/8/0/19 | 1 - fixture/8/0/2 | 1 - fixture/8/0/20 | Bin 200 -> 0 bytes fixture/8/0/21 | 1 - fixture/8/0/22 | 1 - fixture/8/0/23 | 1 - fixture/8/0/24 | Bin 200 -> 0 bytes fixture/8/0/25 | 3 - fixture/8/0/26 | 1 - fixture/8/0/27 | 1 - fixture/8/0/28 | 1 - fixture/8/0/29 | Bin 200 -> 0 bytes fixture/8/0/3 | 1 - fixture/8/0/30 | 1 - fixture/8/0/31 | 1 - fixture/8/0/32 | 1 - fixture/8/0/33 | Bin 200 -> 0 bytes fixture/8/0/4 | 1 - fixture/8/0/5 | 1 - fixture/8/0/6 | 2 - fixture/8/0/7 | Bin 200 -> 0 bytes fixture/8/0/8 | Bin 200 -> 0 bytes fixture/8/0/9 | 1 - fixture/8/1/.zarray | 17 ---- fixture/8/1/.zattrs | 1 - fixture/8/1/0 | 1 - fixture/8/1/1 | Bin 170 -> 0 bytes fixture/8/1/10 | Bin 169 -> 0 bytes fixture/8/1/11 | Bin 158 -> 0 bytes fixture/8/1/12 | Bin 169 -> 0 bytes fixture/8/1/13 | Bin 159 -> 0 bytes fixture/8/1/14 | Bin 170 -> 0 bytes fixture/8/1/15 | Bin 158 -> 0 bytes fixture/8/1/16 | Bin 151 -> 0 bytes fixture/8/1/17 | Bin 103 -> 0 bytes fixture/8/1/18 | Bin 102 -> 0 bytes fixture/8/1/19 | Bin 103 -> 0 bytes fixture/8/1/2 | Bin 187 -> 0 bytes fixture/8/1/20 | Bin 111 -> 0 bytes fixture/8/1/21 | Bin 103 -> 0 bytes fixture/8/1/22 | Bin 102 -> 0 bytes fixture/8/1/23 | Bin 103 -> 0 bytes fixture/8/1/24 | Bin 101 -> 0 bytes fixture/8/1/25 | Bin 102 -> 0 bytes fixture/8/1/26 | Bin 102 -> 0 bytes fixture/8/1/27 | Bin 102 -> 0 bytes fixture/8/1/28 | Bin 102 -> 0 bytes fixture/8/1/29 | Bin 111 -> 0 bytes fixture/8/1/3 | 1 - fixture/8/1/30 | Bin 102 -> 0 bytes fixture/8/1/31 | Bin 102 -> 0 bytes fixture/8/1/32 | Bin 103 -> 0 bytes fixture/8/1/33 | Bin 52 -> 0 bytes fixture/8/1/4 | Bin 185 -> 0 bytes fixture/8/1/5 | Bin 182 -> 0 bytes fixture/8/1/6 | Bin 183 -> 0 bytes fixture/8/1/7 | Bin 185 -> 0 bytes fixture/8/1/8 | Bin 177 -> 0 bytes fixture/8/1/9 | Bin 159 -> 0 bytes fixture/8/2/.zarray | 17 ---- fixture/8/2/.zattrs | 1 - fixture/8/2/0 | Bin 222 -> 0 bytes fixture/8/2/1 | Bin 198 -> 0 bytes fixture/8/2/10 | Bin 184 -> 0 bytes fixture/8/2/11 | Bin 185 -> 0 bytes fixture/8/2/12 | Bin 184 -> 0 bytes fixture/8/2/13 | Bin 183 -> 0 bytes fixture/8/2/14 | Bin 182 -> 0 bytes fixture/8/2/15 | Bin 183 -> 0 bytes fixture/8/2/16 | Bin 170 -> 0 bytes fixture/8/2/17 | Bin 142 -> 0 bytes fixture/8/2/18 | Bin 142 -> 0 bytes fixture/8/2/19 | Bin 127 -> 0 bytes fixture/8/2/2 | Bin 225 -> 0 bytes fixture/8/2/20 | Bin 137 -> 0 bytes fixture/8/2/21 | Bin 147 -> 0 bytes fixture/8/2/22 | Bin 129 -> 0 bytes fixture/8/2/23 | Bin 129 -> 0 bytes fixture/8/2/24 | Bin 147 -> 0 bytes fixture/8/2/25 | Bin 144 -> 0 bytes fixture/8/2/26 | Bin 127 -> 0 bytes fixture/8/2/27 | Bin 142 -> 0 bytes fixture/8/2/28 | Bin 129 -> 0 bytes fixture/8/2/29 | Bin 143 -> 0 bytes fixture/8/2/3 | Bin 218 -> 0 bytes fixture/8/2/30 | Bin 145 -> 0 bytes fixture/8/2/31 | Bin 142 -> 0 bytes fixture/8/2/32 | Bin 129 -> 0 bytes fixture/8/2/33 | Bin 80 -> 0 bytes fixture/8/2/4 | Bin 207 -> 0 bytes fixture/8/2/5 | Bin 198 -> 0 bytes fixture/8/2/6 | Bin 196 -> 0 bytes fixture/8/2/7 | Bin 195 -> 0 bytes fixture/8/2/8 | Bin 190 -> 0 bytes fixture/8/2/9 | Bin 183 -> 0 bytes fixture/8/3/.zarray | 19 ----- fixture/8/3/.zattrs | 1 - fixture/8/3/0 | Bin 200 -> 0 bytes fixture/8/3/1 | Bin 199 -> 0 bytes fixture/8/3/10 | Bin 207 -> 0 bytes fixture/8/3/11 | Bin 189 -> 0 bytes fixture/8/3/12 | Bin 207 -> 0 bytes fixture/8/3/13 | Bin 188 -> 0 bytes fixture/8/3/14 | Bin 209 -> 0 bytes fixture/8/3/15 | Bin 186 -> 0 bytes fixture/8/3/16 | Bin 202 -> 0 bytes fixture/8/3/17 | Bin 163 -> 0 bytes fixture/8/3/18 | Bin 174 -> 0 bytes fixture/8/3/19 | Bin 179 -> 0 bytes fixture/8/3/2 | Bin 204 -> 0 bytes fixture/8/3/20 | Bin 191 -> 0 bytes fixture/8/3/21 | Bin 164 -> 0 bytes fixture/8/3/22 | Bin 173 -> 0 bytes fixture/8/3/23 | Bin 178 -> 0 bytes fixture/8/3/24 | Bin 183 -> 0 bytes fixture/8/3/25 | Bin 150 -> 0 bytes fixture/8/3/26 | Bin 171 -> 0 bytes fixture/8/3/27 | Bin 177 -> 0 bytes fixture/8/3/28 | Bin 181 -> 0 bytes fixture/8/3/29 | Bin 190 -> 0 bytes fixture/8/3/3 | Bin 202 -> 0 bytes fixture/8/3/30 | Bin 172 -> 0 bytes fixture/8/3/31 | Bin 177 -> 0 bytes fixture/8/3/32 | Bin 181 -> 0 bytes fixture/8/3/33 | Bin 93 -> 0 bytes fixture/8/3/4 | Bin 202 -> 0 bytes fixture/8/3/5 | Bin 204 -> 0 bytes fixture/8/3/6 | Bin 205 -> 0 bytes fixture/8/3/7 | Bin 206 -> 0 bytes fixture/8/3/8 | Bin 208 -> 0 bytes fixture/8/3/9 | Bin 190 -> 0 bytes fixture/8/4/.zarray | 19 ----- fixture/8/4/.zattrs | 1 - fixture/8/4/0 | Bin 176 -> 0 bytes fixture/8/4/1 | Bin 151 -> 0 bytes fixture/8/4/10 | Bin 145 -> 0 bytes fixture/8/4/11 | Bin 142 -> 0 bytes fixture/8/4/12 | Bin 145 -> 0 bytes fixture/8/4/13 | Bin 142 -> 0 bytes fixture/8/4/14 | Bin 145 -> 0 bytes fixture/8/4/15 | Bin 142 -> 0 bytes fixture/8/4/16 | Bin 145 -> 0 bytes fixture/8/4/17 | Bin 142 -> 0 bytes fixture/8/4/18 | Bin 142 -> 0 bytes fixture/8/4/19 | Bin 142 -> 0 bytes fixture/8/4/2 | Bin 130 -> 0 bytes fixture/8/4/20 | Bin 145 -> 0 bytes fixture/8/4/21 | Bin 142 -> 0 bytes fixture/8/4/22 | Bin 142 -> 0 bytes fixture/8/4/23 | Bin 142 -> 0 bytes fixture/8/4/24 | Bin 143 -> 0 bytes fixture/8/4/25 | Bin 135 -> 0 bytes fixture/8/4/26 | Bin 142 -> 0 bytes fixture/8/4/27 | Bin 142 -> 0 bytes fixture/8/4/28 | Bin 142 -> 0 bytes fixture/8/4/29 | Bin 145 -> 0 bytes fixture/8/4/3 | Bin 127 -> 0 bytes fixture/8/4/30 | Bin 142 -> 0 bytes fixture/8/4/31 | Bin 142 -> 0 bytes fixture/8/4/32 | Bin 142 -> 0 bytes fixture/8/4/33 | Bin 80 -> 0 bytes fixture/8/4/4 | Bin 145 -> 0 bytes fixture/8/4/5 | Bin 145 -> 0 bytes fixture/8/4/6 | Bin 145 -> 0 bytes fixture/8/4/7 | Bin 145 -> 0 bytes fixture/8/4/8 | Bin 145 -> 0 bytes fixture/8/4/9 | Bin 142 -> 0 bytes fixture/8/5/.zarray | 19 ----- fixture/8/5/.zattrs | 1 - fixture/8/5/0 | Bin 200 -> 0 bytes fixture/8/5/1 | Bin 199 -> 0 bytes fixture/8/5/10 | Bin 207 -> 0 bytes fixture/8/5/11 | Bin 189 -> 0 bytes fixture/8/5/12 | Bin 207 -> 0 bytes fixture/8/5/13 | Bin 188 -> 0 bytes fixture/8/5/14 | Bin 209 -> 0 bytes fixture/8/5/15 | Bin 186 -> 0 bytes fixture/8/5/16 | Bin 202 -> 0 bytes fixture/8/5/17 | Bin 163 -> 0 bytes fixture/8/5/18 | Bin 174 -> 0 bytes fixture/8/5/19 | Bin 179 -> 0 bytes fixture/8/5/2 | Bin 204 -> 0 bytes fixture/8/5/20 | Bin 191 -> 0 bytes fixture/8/5/21 | Bin 164 -> 0 bytes fixture/8/5/22 | Bin 173 -> 0 bytes fixture/8/5/23 | Bin 178 -> 0 bytes fixture/8/5/24 | Bin 183 -> 0 bytes fixture/8/5/25 | Bin 150 -> 0 bytes fixture/8/5/26 | Bin 171 -> 0 bytes fixture/8/5/27 | Bin 177 -> 0 bytes fixture/8/5/28 | Bin 181 -> 0 bytes fixture/8/5/29 | Bin 190 -> 0 bytes fixture/8/5/3 | Bin 202 -> 0 bytes fixture/8/5/30 | Bin 172 -> 0 bytes fixture/8/5/31 | Bin 177 -> 0 bytes fixture/8/5/32 | Bin 181 -> 0 bytes fixture/8/5/33 | Bin 93 -> 0 bytes fixture/8/5/4 | Bin 202 -> 0 bytes fixture/8/5/5 | Bin 204 -> 0 bytes fixture/8/5/6 | Bin 205 -> 0 bytes fixture/8/5/7 | Bin 206 -> 0 bytes fixture/8/5/8 | Bin 208 -> 0 bytes fixture/8/5/9 | Bin 190 -> 0 bytes fixture/8/6/.zarray | 19 ----- fixture/8/6/.zattrs | 1 - fixture/8/6/0 | Bin 216 -> 0 bytes fixture/8/6/1 | Bin 216 -> 0 bytes fixture/8/6/10 | Bin 216 -> 0 bytes fixture/8/6/11 | Bin 216 -> 0 bytes fixture/8/6/12 | Bin 216 -> 0 bytes fixture/8/6/13 | Bin 216 -> 0 bytes fixture/8/6/14 | Bin 216 -> 0 bytes fixture/8/6/15 | Bin 216 -> 0 bytes fixture/8/6/16 | Bin 216 -> 0 bytes fixture/8/6/17 | Bin 216 -> 0 bytes fixture/8/6/18 | Bin 216 -> 0 bytes fixture/8/6/19 | Bin 216 -> 0 bytes fixture/8/6/2 | Bin 216 -> 0 bytes fixture/8/6/20 | Bin 216 -> 0 bytes fixture/8/6/21 | Bin 216 -> 0 bytes fixture/8/6/22 | Bin 216 -> 0 bytes fixture/8/6/23 | Bin 216 -> 0 bytes fixture/8/6/24 | Bin 216 -> 0 bytes fixture/8/6/25 | Bin 216 -> 0 bytes fixture/8/6/26 | Bin 216 -> 0 bytes fixture/8/6/27 | Bin 216 -> 0 bytes fixture/8/6/28 | Bin 216 -> 0 bytes fixture/8/6/29 | Bin 216 -> 0 bytes fixture/8/6/3 | Bin 216 -> 0 bytes fixture/8/6/30 | Bin 216 -> 0 bytes fixture/8/6/31 | Bin 216 -> 0 bytes fixture/8/6/32 | Bin 216 -> 0 bytes fixture/8/6/33 | Bin 110 -> 0 bytes fixture/8/6/4 | Bin 216 -> 0 bytes fixture/8/6/5 | Bin 216 -> 0 bytes fixture/8/6/6 | Bin 216 -> 0 bytes fixture/8/6/7 | Bin 216 -> 0 bytes fixture/8/6/8 | Bin 216 -> 0 bytes fixture/8/6/9 | Bin 216 -> 0 bytes fixture/9/.zattrs | 1 - fixture/9/0/.zarray | 14 ---- fixture/9/0/.zattrs | 1 - fixture/9/0/0 | Bin 400 -> 0 bytes fixture/9/0/1 | 3 - fixture/9/0/10 | 3 - fixture/9/0/11 | 1 - fixture/9/0/12 | Bin 400 -> 0 bytes fixture/9/0/13 | 2 - fixture/9/0/14 | 3 - fixture/9/0/15 | Bin 400 -> 0 bytes fixture/9/0/16 | Bin 400 -> 0 bytes fixture/9/0/17 | 4 - fixture/9/0/18 | Bin 400 -> 0 bytes fixture/9/0/19 | 1 - fixture/9/0/2 | Bin 400 -> 0 bytes fixture/9/0/20 | 5 -- fixture/9/0/21 | 2 - fixture/9/0/22 | 1 - fixture/9/0/23 | Bin 400 -> 0 bytes fixture/9/0/24 | Bin 400 -> 0 bytes fixture/9/0/25 | 1 - fixture/9/0/26 | Bin 400 -> 0 bytes fixture/9/0/27 | 1 - fixture/9/0/28 | 4 - fixture/9/0/29 | 2 - fixture/9/0/3 | 2 - fixture/9/0/30 | 2 - fixture/9/0/31 | Bin 400 -> 0 bytes fixture/9/0/32 | Bin 400 -> 0 bytes fixture/9/0/33 | Bin 400 -> 0 bytes fixture/9/0/4 | Bin 400 -> 0 bytes fixture/9/0/5 | 2 - fixture/9/0/6 | Bin 400 -> 0 bytes fixture/9/0/7 | 3 - fixture/9/0/8 | Bin 400 -> 0 bytes fixture/9/0/9 | Bin 400 -> 0 bytes fixture/9/1/.zarray | 17 ---- fixture/9/1/.zattrs | 1 - fixture/9/1/0 | Bin 370 -> 0 bytes fixture/9/1/1 | Bin 406 -> 0 bytes fixture/9/1/10 | Bin 375 -> 0 bytes fixture/9/1/11 | Bin 370 -> 0 bytes fixture/9/1/12 | Bin 372 -> 0 bytes fixture/9/1/13 | Bin 375 -> 0 bytes fixture/9/1/14 | Bin 373 -> 0 bytes fixture/9/1/15 | Bin 377 -> 0 bytes fixture/9/1/16 | 1 - fixture/9/1/17 | Bin 360 -> 0 bytes fixture/9/1/18 | 3 - fixture/9/1/19 | Bin 359 -> 0 bytes fixture/9/1/2 | Bin 402 -> 0 bytes fixture/9/1/20 | 3 - fixture/9/1/21 | Bin 358 -> 0 bytes fixture/9/1/22 | 1 - fixture/9/1/23 | Bin 358 -> 0 bytes fixture/9/1/24 | 1 - fixture/9/1/25 | Bin 360 -> 0 bytes fixture/9/1/26 | 1 - fixture/9/1/27 | Bin 360 -> 0 bytes fixture/9/1/28 | 1 - fixture/9/1/29 | Bin 361 -> 0 bytes fixture/9/1/3 | Bin 402 -> 0 bytes fixture/9/1/30 | 3 - fixture/9/1/31 | Bin 355 -> 0 bytes fixture/9/1/32 | 3 - fixture/9/1/33 | 2 - fixture/9/1/4 | 4 - fixture/9/1/5 | Bin 396 -> 0 bytes fixture/9/1/6 | Bin 394 -> 0 bytes fixture/9/1/7 | Bin 398 -> 0 bytes fixture/9/1/8 | Bin 382 -> 0 bytes fixture/9/1/9 | Bin 374 -> 0 bytes fixture/9/2/.zarray | 17 ---- fixture/9/2/.zattrs | 1 - fixture/9/2/0 | Bin 458 -> 0 bytes fixture/9/2/1 | Bin 523 -> 0 bytes fixture/9/2/10 | Bin 422 -> 0 bytes fixture/9/2/11 | Bin 433 -> 0 bytes fixture/9/2/12 | Bin 416 -> 0 bytes fixture/9/2/13 | Bin 418 -> 0 bytes fixture/9/2/14 | Bin 434 -> 0 bytes fixture/9/2/15 | Bin 428 -> 0 bytes fixture/9/2/16 | Bin 419 -> 0 bytes fixture/9/2/17 | Bin 383 -> 0 bytes fixture/9/2/18 | Bin 385 -> 0 bytes fixture/9/2/19 | Bin 382 -> 0 bytes fixture/9/2/2 | Bin 521 -> 0 bytes fixture/9/2/20 | Bin 387 -> 0 bytes fixture/9/2/21 | Bin 396 -> 0 bytes fixture/9/2/22 | Bin 387 -> 0 bytes fixture/9/2/23 | Bin 382 -> 0 bytes fixture/9/2/24 | Bin 390 -> 0 bytes fixture/9/2/25 | Bin 381 -> 0 bytes fixture/9/2/26 | Bin 394 -> 0 bytes fixture/9/2/27 | Bin 386 -> 0 bytes fixture/9/2/28 | Bin 384 -> 0 bytes fixture/9/2/29 | Bin 382 -> 0 bytes fixture/9/2/3 | Bin 503 -> 0 bytes fixture/9/2/30 | Bin 381 -> 0 bytes fixture/9/2/31 | Bin 387 -> 0 bytes fixture/9/2/32 | Bin 370 -> 0 bytes fixture/9/2/33 | Bin 170 -> 0 bytes fixture/9/2/4 | Bin 499 -> 0 bytes fixture/9/2/5 | Bin 485 -> 0 bytes fixture/9/2/6 | Bin 468 -> 0 bytes fixture/9/2/7 | Bin 467 -> 0 bytes fixture/9/2/8 | Bin 433 -> 0 bytes fixture/9/2/9 | Bin 418 -> 0 bytes fixture/9/3/.zarray | 19 ----- fixture/9/3/.zattrs | 1 - fixture/9/3/0 | Bin 416 -> 0 bytes fixture/9/3/1 | Bin 416 -> 0 bytes fixture/9/3/10 | Bin 416 -> 0 bytes fixture/9/3/11 | Bin 416 -> 0 bytes fixture/9/3/12 | Bin 416 -> 0 bytes fixture/9/3/13 | Bin 408 -> 0 bytes fixture/9/3/14 | Bin 409 -> 0 bytes fixture/9/3/15 | Bin 416 -> 0 bytes fixture/9/3/16 | Bin 416 -> 0 bytes fixture/9/3/17 | Bin 395 -> 0 bytes fixture/9/3/18 | Bin 385 -> 0 bytes fixture/9/3/19 | Bin 395 -> 0 bytes fixture/9/3/2 | Bin 416 -> 0 bytes fixture/9/3/20 | Bin 388 -> 0 bytes fixture/9/3/21 | Bin 392 -> 0 bytes fixture/9/3/22 | Bin 381 -> 0 bytes fixture/9/3/23 | Bin 388 -> 0 bytes fixture/9/3/24 | Bin 379 -> 0 bytes fixture/9/3/25 | Bin 393 -> 0 bytes fixture/9/3/26 | Bin 384 -> 0 bytes fixture/9/3/27 | Bin 393 -> 0 bytes fixture/9/3/28 | Bin 384 -> 0 bytes fixture/9/3/29 | Bin 395 -> 0 bytes fixture/9/3/3 | Bin 416 -> 0 bytes fixture/9/3/30 | Bin 387 -> 0 bytes fixture/9/3/31 | Bin 388 -> 0 bytes fixture/9/3/32 | Bin 384 -> 0 bytes fixture/9/3/33 | Bin 162 -> 0 bytes fixture/9/3/4 | Bin 416 -> 0 bytes fixture/9/3/5 | Bin 416 -> 0 bytes fixture/9/3/6 | Bin 416 -> 0 bytes fixture/9/3/7 | Bin 416 -> 0 bytes fixture/9/3/8 | Bin 416 -> 0 bytes fixture/9/3/9 | Bin 416 -> 0 bytes fixture/9/4/.zarray | 19 ----- fixture/9/4/.zattrs | 1 - fixture/9/4/0 | Bin 350 -> 0 bytes fixture/9/4/1 | Bin 346 -> 0 bytes fixture/9/4/10 | Bin 278 -> 0 bytes fixture/9/4/11 | Bin 277 -> 0 bytes fixture/9/4/12 | Bin 281 -> 0 bytes fixture/9/4/13 | Bin 277 -> 0 bytes fixture/9/4/14 | Bin 278 -> 0 bytes fixture/9/4/15 | Bin 279 -> 0 bytes fixture/9/4/16 | Bin 274 -> 0 bytes fixture/9/4/17 | Bin 265 -> 0 bytes fixture/9/4/18 | Bin 255 -> 0 bytes fixture/9/4/19 | Bin 263 -> 0 bytes fixture/9/4/2 | Bin 343 -> 0 bytes fixture/9/4/20 | Bin 266 -> 0 bytes fixture/9/4/21 | Bin 254 -> 0 bytes fixture/9/4/22 | Bin 262 -> 0 bytes fixture/9/4/23 | Bin 259 -> 0 bytes fixture/9/4/24 | Bin 263 -> 0 bytes fixture/9/4/25 | Bin 262 -> 0 bytes fixture/9/4/26 | Bin 258 -> 0 bytes fixture/9/4/27 | Bin 263 -> 0 bytes fixture/9/4/28 | Bin 255 -> 0 bytes fixture/9/4/29 | Bin 265 -> 0 bytes fixture/9/4/3 | Bin 343 -> 0 bytes fixture/9/4/30 | Bin 264 -> 0 bytes fixture/9/4/31 | Bin 254 -> 0 bytes fixture/9/4/32 | Bin 264 -> 0 bytes fixture/9/4/33 | Bin 126 -> 0 bytes fixture/9/4/4 | Bin 346 -> 0 bytes fixture/9/4/5 | Bin 343 -> 0 bytes fixture/9/4/6 | Bin 343 -> 0 bytes fixture/9/4/7 | Bin 343 -> 0 bytes fixture/9/4/8 | Bin 299 -> 0 bytes fixture/9/4/9 | Bin 279 -> 0 bytes fixture/9/5/.zarray | 19 ----- fixture/9/5/.zattrs | 1 - fixture/9/5/0 | Bin 416 -> 0 bytes fixture/9/5/1 | Bin 416 -> 0 bytes fixture/9/5/10 | Bin 416 -> 0 bytes fixture/9/5/11 | Bin 416 -> 0 bytes fixture/9/5/12 | Bin 416 -> 0 bytes fixture/9/5/13 | Bin 408 -> 0 bytes fixture/9/5/14 | Bin 409 -> 0 bytes fixture/9/5/15 | Bin 416 -> 0 bytes fixture/9/5/16 | Bin 416 -> 0 bytes fixture/9/5/17 | Bin 395 -> 0 bytes fixture/9/5/18 | Bin 385 -> 0 bytes fixture/9/5/19 | Bin 395 -> 0 bytes fixture/9/5/2 | Bin 416 -> 0 bytes fixture/9/5/20 | Bin 388 -> 0 bytes fixture/9/5/21 | Bin 392 -> 0 bytes fixture/9/5/22 | Bin 381 -> 0 bytes fixture/9/5/23 | Bin 388 -> 0 bytes fixture/9/5/24 | Bin 379 -> 0 bytes fixture/9/5/25 | Bin 393 -> 0 bytes fixture/9/5/26 | Bin 384 -> 0 bytes fixture/9/5/27 | Bin 393 -> 0 bytes fixture/9/5/28 | Bin 384 -> 0 bytes fixture/9/5/29 | Bin 395 -> 0 bytes fixture/9/5/3 | Bin 416 -> 0 bytes fixture/9/5/30 | Bin 387 -> 0 bytes fixture/9/5/31 | Bin 388 -> 0 bytes fixture/9/5/32 | Bin 384 -> 0 bytes fixture/9/5/33 | Bin 162 -> 0 bytes fixture/9/5/4 | Bin 416 -> 0 bytes fixture/9/5/5 | Bin 416 -> 0 bytes fixture/9/5/6 | Bin 416 -> 0 bytes fixture/9/5/7 | Bin 416 -> 0 bytes fixture/9/5/8 | Bin 416 -> 0 bytes fixture/9/5/9 | Bin 416 -> 0 bytes fixture/9/6/.zarray | 19 ----- fixture/9/6/.zattrs | 1 - fixture/9/6/0 | Bin 416 -> 0 bytes fixture/9/6/1 | Bin 416 -> 0 bytes fixture/9/6/10 | Bin 416 -> 0 bytes fixture/9/6/11 | Bin 416 -> 0 bytes fixture/9/6/12 | Bin 416 -> 0 bytes fixture/9/6/13 | Bin 416 -> 0 bytes fixture/9/6/14 | Bin 416 -> 0 bytes fixture/9/6/15 | Bin 416 -> 0 bytes fixture/9/6/16 | Bin 416 -> 0 bytes fixture/9/6/17 | Bin 416 -> 0 bytes fixture/9/6/18 | Bin 416 -> 0 bytes fixture/9/6/19 | Bin 416 -> 0 bytes fixture/9/6/2 | Bin 416 -> 0 bytes fixture/9/6/20 | Bin 416 -> 0 bytes fixture/9/6/21 | Bin 416 -> 0 bytes fixture/9/6/22 | Bin 416 -> 0 bytes fixture/9/6/23 | Bin 416 -> 0 bytes fixture/9/6/24 | Bin 416 -> 0 bytes fixture/9/6/25 | Bin 416 -> 0 bytes fixture/9/6/26 | Bin 416 -> 0 bytes fixture/9/6/27 | Bin 416 -> 0 bytes fixture/9/6/28 | Bin 416 -> 0 bytes fixture/9/6/29 | Bin 416 -> 0 bytes fixture/9/6/3 | Bin 416 -> 0 bytes fixture/9/6/30 | Bin 416 -> 0 bytes fixture/9/6/31 | Bin 416 -> 0 bytes fixture/9/6/32 | Bin 416 -> 0 bytes fixture/9/6/33 | Bin 176 -> 0 bytes fixture/9/6/4 | Bin 416 -> 0 bytes fixture/9/6/5 | Bin 416 -> 0 bytes fixture/9/6/6 | Bin 416 -> 0 bytes fixture/9/6/7 | Bin 416 -> 0 bytes fixture/9/6/8 | Bin 416 -> 0 bytes fixture/9/6/9 | Bin 416 -> 0 bytes fixture/meta/.zarray | 23 ------ fixture/meta/0.0 | Bin 48 -> 0 bytes fixture/utf8attrs/.zattrs | 1 - tests/v2/conftest.py | 5 -- tests/v2/data/store.zip | Bin 343 -> 0 bytes tests/v2/data/store/foo | 1 - {fixture => tests/v2/fixture}/.zgroup | 0 .../fixture/dimension_separator}/flat/.zarray | 0 .../v2/fixture/dimension_separator}/flat/0.0 | Bin .../dimension_separator}/flat_legacy/.zarray | 0 .../dimension_separator}/flat_legacy/0.0 | Bin .../dimension_separator}/nested/.zarray | 0 .../fixture/dimension_separator}/nested/0/0 | Bin .../nested_legacy/.zarray | 0 .../dimension_separator}/nested_legacy/0/0 | Bin .../test_format_compatibility}/.zgroup | 0 .../array_0}/.zgroup | 0 .../array_0/compressor_0/.zarray | 14 ++++ .../array_0/compressor_0/0 | Bin 0 -> 600 bytes .../array_0/compressor_0/1 | Bin 0 -> 600 bytes .../array_0/compressor_1/.zarray | 17 ++++ .../array_0/compressor_1/0 | Bin 0 -> 284 bytes .../array_0/compressor_1/1 | Bin 0 -> 285 bytes .../array_0/compressor_2/.zarray | 17 ++++ .../array_0/compressor_2/0 | Bin 0 -> 525 bytes .../array_0/compressor_2/1 | Bin 0 -> 500 bytes .../array_0/compressor_3/.zarray | 20 +++++ .../array_0/compressor_3/0 | Bin 0 -> 300 bytes .../array_0/compressor_3/1 | Bin 0 -> 303 bytes .../array_0/compressor_4/.zarray | 20 +++++ .../array_0/compressor_4/0 | Bin 0 -> 300 bytes .../array_0/compressor_4/1 | Bin 0 -> 303 bytes .../array_0/compressor_5/.zarray | 20 +++++ .../array_0/compressor_5/0 | Bin 0 -> 121 bytes .../array_0/compressor_5/1 | Bin 0 -> 135 bytes .../array_0/compressor_6/.zarray | 20 +++++ .../array_0/compressor_6/0 | Bin 0 -> 616 bytes .../array_0/compressor_6/1 | Bin 0 -> 556 bytes .../array_1}/.zgroup | 0 .../array_1/compressor_0/.zarray | 14 ++++ .../array_1/compressor_0/0 | Bin 0 -> 1200 bytes .../array_1/compressor_0/1 | Bin 0 -> 1200 bytes .../array_1/compressor_1/.zarray | 17 ++++ .../array_1/compressor_1/0 | Bin 0 -> 916 bytes .../array_1/compressor_1/1 | Bin 0 -> 770 bytes .../array_1/compressor_2/.zarray | 17 ++++ .../array_1/compressor_2/0 | Bin 0 -> 714 bytes .../array_1/compressor_2/1 | Bin 0 -> 667 bytes .../array_1/compressor_3/.zarray | 20 +++++ .../array_1/compressor_3/0 | Bin 0 -> 945 bytes .../array_1/compressor_3/1 | Bin 0 -> 798 bytes .../array_1/compressor_4/.zarray | 20 +++++ .../array_1/compressor_4/0 | Bin 0 -> 311 bytes .../array_1/compressor_4/1 | Bin 0 -> 318 bytes .../array_1/compressor_5/.zarray | 20 +++++ .../array_1/compressor_5/0 | Bin 0 -> 138 bytes .../array_1/compressor_5/1 | Bin 0 -> 150 bytes .../array_1/compressor_6/.zarray | 20 +++++ .../array_1/compressor_6/0 | Bin 0 -> 1216 bytes .../array_1/compressor_6/1 | Bin 0 -> 1071 bytes .../array_10}/.zgroup | 0 .../array_10/compressor_0/.zarray | 14 ++++ .../array_10/compressor_0/0 | Bin 0 -> 16000 bytes .../array_10/compressor_0/1 | Bin 0 -> 16000 bytes .../array_10/compressor_1/.zarray | 17 ++++ .../array_10/compressor_1/0 | Bin 0 -> 8287 bytes .../array_10/compressor_1/1 | Bin 0 -> 6176 bytes .../array_10/compressor_2/.zarray | 17 ++++ .../array_10/compressor_2/0 | Bin 0 -> 7521 bytes .../array_10/compressor_2/1 | Bin 0 -> 4670 bytes .../array_10/compressor_3/.zarray | 20 +++++ .../array_10/compressor_3/0 | Bin 0 -> 8172 bytes .../array_10/compressor_3/1 | Bin 0 -> 5436 bytes .../array_10/compressor_4/.zarray | 20 +++++ .../array_10/compressor_4/0 | Bin 0 -> 13861 bytes .../array_10/compressor_4/1 | Bin 0 -> 4740 bytes .../array_10/compressor_5/.zarray | 20 +++++ .../array_10/compressor_5/0 | Bin 0 -> 4127 bytes .../array_10/compressor_5/1 | Bin 0 -> 2492 bytes .../array_10/compressor_6/.zarray | 20 +++++ .../array_10/compressor_6/0 | Bin 0 -> 15987 bytes .../array_10/compressor_6/1 | Bin 0 -> 10775 bytes .../array_11}/.zgroup | 0 .../array_11/compressor_0/.zarray | 14 ++++ .../array_11/compressor_0/0 | Bin 0 -> 6000 bytes .../array_11/compressor_0/1 | Bin 0 -> 6000 bytes .../array_11/compressor_1/.zarray | 17 ++++ .../array_11/compressor_1/0 | Bin 0 -> 5584 bytes .../array_11/compressor_1/1 | Bin 0 -> 2758 bytes .../array_11/compressor_2/.zarray | 17 ++++ .../array_11/compressor_2/0 | Bin 0 -> 5888 bytes .../array_11/compressor_2/1 | Bin 0 -> 3083 bytes .../array_11/compressor_3/.zarray | 20 +++++ .../array_11/compressor_3/0 | Bin 0 -> 5609 bytes .../array_11/compressor_3/1 | Bin 0 -> 2768 bytes .../array_11/compressor_4/.zarray | 20 +++++ .../array_11/compressor_4/0 | Bin 0 -> 5609 bytes .../array_11/compressor_4/1 | Bin 0 -> 2778 bytes .../array_11/compressor_5/.zarray | 20 +++++ .../array_11/compressor_5/0 | Bin 0 -> 5726 bytes .../array_11/compressor_5/1 | Bin 0 -> 2934 bytes .../array_11/compressor_6/.zarray | 20 +++++ .../array_11/compressor_6/0 | Bin 0 -> 6016 bytes .../array_11/compressor_6/1 | Bin 0 -> 2973 bytes .../array_12}/.zgroup | 0 .../array_12/compressor_0/.zarray | 14 ++++ .../array_12/compressor_0/0 | Bin 0 -> 12000 bytes .../array_12/compressor_0/1 | Bin 0 -> 12000 bytes .../array_12/compressor_1/.zarray | 17 ++++ .../array_12/compressor_1/0 | Bin 0 -> 11144 bytes .../array_12/compressor_1/1 | Bin 0 -> 5453 bytes .../array_12/compressor_2/.zarray | 17 ++++ .../array_12/compressor_2/0 | Bin 0 -> 12115 bytes .../array_12/compressor_2/1 | Bin 0 -> 6120 bytes .../array_12/compressor_3/.zarray | 20 +++++ .../array_12/compressor_3/0 | Bin 0 -> 11146 bytes .../array_12/compressor_3/1 | Bin 0 -> 5435 bytes .../array_12/compressor_4/.zarray | 20 +++++ .../array_12/compressor_4/0 | Bin 0 -> 11138 bytes .../array_12/compressor_4/1 | Bin 0 -> 5380 bytes .../array_12/compressor_5/.zarray | 20 +++++ .../array_12/compressor_5/0 | Bin 0 -> 10803 bytes .../array_12/compressor_5/1 | Bin 0 -> 5295 bytes .../array_12/compressor_6/.zarray | 20 +++++ .../array_12/compressor_6/0 | Bin 0 -> 12016 bytes .../array_12/compressor_6/1 | Bin 0 -> 5890 bytes .../array_13}/.zgroup | 0 .../array_13/compressor_0/.zarray | 14 ++++ .../array_13/compressor_0/0 | Bin 0 -> 24000 bytes .../array_13/compressor_0/1 | Bin 0 -> 24000 bytes .../array_13/compressor_1/.zarray | 17 ++++ .../array_13/compressor_1/0 | Bin 0 -> 23139 bytes .../array_13/compressor_1/1 | Bin 0 -> 11246 bytes .../array_13/compressor_2/.zarray | 17 ++++ .../array_13/compressor_2/0 | Bin 0 -> 23896 bytes .../array_13/compressor_2/1 | Bin 0 -> 11779 bytes .../array_13/compressor_3/.zarray | 20 +++++ .../array_13/compressor_3/0 | Bin 0 -> 23050 bytes .../array_13/compressor_3/1 | Bin 0 -> 11156 bytes .../array_13/compressor_4/.zarray | 20 +++++ .../array_13/compressor_4/0 | Bin 0 -> 22820 bytes .../array_13/compressor_4/1 | Bin 0 -> 10725 bytes .../array_13/compressor_5/.zarray | 20 +++++ .../array_13/compressor_5/0 | Bin 0 -> 21824 bytes .../array_13/compressor_5/1 | Bin 0 -> 10647 bytes .../array_13/compressor_6/.zarray | 20 +++++ .../array_13/compressor_6/0 | Bin 0 -> 24016 bytes .../array_13/compressor_6/1 | Bin 0 -> 11725 bytes .../array_14}/.zgroup | 0 .../array_14/compressor_0/.zarray | 14 ++++ .../array_14/compressor_0/0 | 1 + .../array_14/compressor_0/1 | Bin 0 -> 3000 bytes .../array_14/compressor_1/.zarray | 17 ++++ .../array_14/compressor_1/0 | Bin 0 -> 1058 bytes .../array_14/compressor_1/1 | Bin 0 -> 918 bytes .../array_14/compressor_2/.zarray | 17 ++++ .../array_14/compressor_2/0 | Bin 0 -> 888 bytes .../array_14/compressor_2/1 | Bin 0 -> 797 bytes .../array_14/compressor_3/.zarray | 20 +++++ .../array_14/compressor_3/0 | Bin 0 -> 1016 bytes .../array_14/compressor_3/1 | Bin 0 -> 895 bytes .../array_14/compressor_4/.zarray | 20 +++++ .../array_14/compressor_4/0 | Bin 0 -> 1016 bytes .../array_14/compressor_4/1 | Bin 0 -> 895 bytes .../array_14/compressor_5/.zarray | 20 +++++ .../array_14/compressor_5/0 | Bin 0 -> 1559 bytes .../array_14/compressor_5/1 | Bin 0 -> 1349 bytes .../array_14/compressor_6/.zarray | 20 +++++ .../array_14/compressor_6/0 | Bin 0 -> 2311 bytes .../array_14/compressor_6/1 | Bin 0 -> 2037 bytes .../array_15}/.zgroup | 0 .../array_15/compressor_0/.zarray | 14 ++++ .../array_15/compressor_0/0 | Bin 0 -> 48000 bytes .../array_15/compressor_0/1 | Bin 0 -> 48000 bytes .../array_15/compressor_1/.zarray | 17 ++++ .../array_15/compressor_1/0 | 1 + .../array_15/compressor_1/1 | Bin 0 -> 3139 bytes .../array_15/compressor_2/.zarray | 17 ++++ .../array_15/compressor_2/0 | Bin 0 -> 1028 bytes .../array_15/compressor_2/1 | Bin 0 -> 906 bytes .../array_15/compressor_3/.zarray | 20 +++++ .../array_15/compressor_3/0 | Bin 0 -> 3116 bytes .../array_15/compressor_3/1 | Bin 0 -> 2684 bytes .../array_15/compressor_4/.zarray | 20 +++++ .../array_15/compressor_4/0 | Bin 0 -> 3623 bytes .../array_15/compressor_4/1 | Bin 0 -> 3130 bytes .../array_15/compressor_5/.zarray | 20 +++++ .../array_15/compressor_5/0 | Bin 0 -> 2464 bytes .../array_15/compressor_5/1 | Bin 0 -> 2152 bytes .../array_15/compressor_6/.zarray | 20 +++++ .../array_15/compressor_6/0 | Bin 0 -> 10106 bytes .../array_15/compressor_6/1 | Bin 0 -> 8644 bytes .../array_16}/.zgroup | 0 .../array_16/compressor_0/.zarray | 14 ++++ .../array_16/compressor_0/0 | Bin 0 -> 24000 bytes .../array_16/compressor_0/1 | Bin 0 -> 24000 bytes .../array_16/compressor_1/.zarray | 17 ++++ .../array_16/compressor_1/0 | 1 + .../array_16/compressor_1/1 | Bin 0 -> 2498 bytes .../array_16/compressor_2/.zarray | 17 ++++ .../array_16/compressor_2/0 | Bin 0 -> 1252 bytes .../array_16/compressor_2/1 | Bin 0 -> 1108 bytes .../array_16/compressor_3/.zarray | 20 +++++ .../array_16/compressor_3/0 | Bin 0 -> 3029 bytes .../array_16/compressor_3/1 | Bin 0 -> 2548 bytes .../array_16/compressor_4/.zarray | 20 +++++ .../array_16/compressor_4/0 | Bin 0 -> 5423 bytes .../array_16/compressor_4/1 | Bin 0 -> 4698 bytes .../array_16/compressor_5/.zarray | 20 +++++ .../array_16/compressor_5/0 | Bin 0 -> 3624 bytes .../array_16/compressor_5/1 | Bin 0 -> 3139 bytes .../array_16/compressor_6/.zarray | 20 +++++ .../array_16/compressor_6/0 | Bin 0 -> 8229 bytes .../array_16/compressor_6/1 | Bin 0 -> 7158 bytes .../array_17}/.zgroup | 0 .../array_17/compressor_0/.zarray | 14 ++++ .../array_17/compressor_0/0 | Bin 0 -> 3000 bytes .../array_17/compressor_0/1 | Bin 0 -> 3000 bytes .../array_17/compressor_1/.zarray | 17 ++++ .../array_17/compressor_1/0 | Bin 0 -> 709 bytes .../array_17/compressor_1/1 | Bin 0 -> 615 bytes .../array_17/compressor_2/.zarray | 17 ++++ .../array_17/compressor_2/0 | Bin 0 -> 537 bytes .../array_17/compressor_2/1 | Bin 0 -> 473 bytes .../array_17/compressor_3/.zarray | 20 +++++ .../array_17/compressor_3/0 | Bin 0 -> 703 bytes .../array_17/compressor_3/1 | Bin 0 -> 608 bytes .../array_17/compressor_4/.zarray | 20 +++++ .../array_17/compressor_4/0 | Bin 0 -> 703 bytes .../array_17/compressor_4/1 | Bin 0 -> 608 bytes .../array_17/compressor_5/.zarray | 20 +++++ .../array_17/compressor_5/0 | Bin 0 -> 419 bytes .../array_17/compressor_5/1 | Bin 0 -> 366 bytes .../array_17/compressor_6/.zarray | 20 +++++ .../array_17/compressor_6/0 | Bin 0 -> 1870 bytes .../array_17/compressor_6/1 | Bin 0 -> 1613 bytes .../array_18}/.zgroup | 0 .../array_18/compressor_0/.zarray | 16 ++++ .../array_18/compressor_0/0.0 | Bin 0 -> 40000 bytes .../array_18/compressor_0/0.1 | Bin 0 -> 40000 bytes .../array_18/compressor_1/.zarray | 19 +++++ .../array_18/compressor_1/0.0 | Bin 0 -> 13951 bytes .../array_18/compressor_1/0.1 | Bin 0 -> 13936 bytes .../array_18/compressor_2/.zarray | 19 +++++ .../array_18/compressor_2/0.0 | Bin 0 -> 9114 bytes .../array_18/compressor_2/0.1 | Bin 0 -> 8672 bytes .../array_18/compressor_3/.zarray | 22 +++++ .../array_18/compressor_3/0.0 | Bin 0 -> 23597 bytes .../array_18/compressor_3/0.1 | Bin 0 -> 23613 bytes .../array_18/compressor_4/.zarray | 22 +++++ .../array_18/compressor_4/0.0 | Bin 0 -> 1607 bytes .../array_18/compressor_4/0.1 | Bin 0 -> 1607 bytes .../array_18/compressor_5/.zarray | 22 +++++ .../array_18/compressor_5/0.0 | Bin 0 -> 682 bytes .../array_18/compressor_5/0.1 | Bin 0 -> 700 bytes .../array_18/compressor_6/.zarray | 22 +++++ .../array_18/compressor_6/0.0 | Bin 0 -> 40016 bytes .../array_18/compressor_6/0.1 | Bin 0 -> 40016 bytes .../array_19}/.zgroup | 0 .../array_19/compressor_0/.zarray | 16 ++++ .../array_19/compressor_0/0.0 | Bin 0 -> 40000 bytes .../array_19/compressor_0/0.1 | Bin 0 -> 40000 bytes .../array_19/compressor_1/.zarray | 19 +++++ .../array_19/compressor_1/0.0 | Bin 0 -> 13895 bytes .../array_19/compressor_1/0.1 | Bin 0 -> 13864 bytes .../array_19/compressor_2/.zarray | 19 +++++ .../array_19/compressor_2/0.0 | Bin 0 -> 3330 bytes .../array_19/compressor_2/0.1 | Bin 0 -> 3372 bytes .../array_19/compressor_3/.zarray | 22 +++++ .../array_19/compressor_3/0.0 | Bin 0 -> 22581 bytes .../array_19/compressor_3/0.1 | Bin 0 -> 22801 bytes .../array_19/compressor_4/.zarray | 22 +++++ .../array_19/compressor_4/0.0 | Bin 0 -> 743 bytes .../array_19/compressor_4/0.1 | Bin 0 -> 747 bytes .../array_19/compressor_5/.zarray | 22 +++++ .../array_19/compressor_5/0.0 | Bin 0 -> 507 bytes .../array_19/compressor_5/0.1 | Bin 0 -> 508 bytes .../array_19/compressor_6/.zarray | 22 +++++ .../array_19/compressor_6/0.0 | Bin 0 -> 40016 bytes .../array_19/compressor_6/0.1 | Bin 0 -> 40016 bytes .../array_2}/.zgroup | 0 .../array_2/compressor_0/.zarray | 14 ++++ .../array_2/compressor_0/0 | Bin 0 -> 2400 bytes .../array_2/compressor_0/1 | Bin 0 -> 2400 bytes .../array_2/compressor_1/.zarray | 17 ++++ .../array_2/compressor_1/0 | Bin 0 -> 874 bytes .../array_2/compressor_1/1 | Bin 0 -> 762 bytes .../array_2/compressor_2/.zarray | 17 ++++ .../array_2/compressor_2/0 | Bin 0 -> 760 bytes .../array_2/compressor_2/1 | Bin 0 -> 686 bytes .../array_2/compressor_3/.zarray | 20 +++++ .../array_2/compressor_3/0 | Bin 0 -> 1133 bytes .../array_2/compressor_3/1 | Bin 0 -> 1055 bytes .../array_2/compressor_4/.zarray | 20 +++++ .../array_2/compressor_4/0 | Bin 0 -> 315 bytes .../array_2/compressor_4/1 | Bin 0 -> 318 bytes .../array_2/compressor_5/.zarray | 20 +++++ .../array_2/compressor_5/0 | Bin 0 -> 138 bytes .../array_2/compressor_5/1 | Bin 0 -> 151 bytes .../array_2/compressor_6/.zarray | 20 +++++ .../array_2/compressor_6/0 | Bin 0 -> 2416 bytes .../array_2/compressor_6/1 | Bin 0 -> 2099 bytes .../array_20}/.zgroup | 0 .../array_20/compressor_0/.zarray | 18 ++++ .../array_20/compressor_0/0.0.0 | Bin 0 -> 40000 bytes .../array_20/compressor_0/0.0.1 | Bin 0 -> 40000 bytes .../array_20/compressor_1/.zarray | 21 +++++ .../array_20/compressor_1/0.0.0 | Bin 0 -> 13951 bytes .../array_20/compressor_1/0.0.1 | Bin 0 -> 13936 bytes .../array_20/compressor_2/.zarray | 21 +++++ .../array_20/compressor_2/0.0.0 | Bin 0 -> 9114 bytes .../array_20/compressor_2/0.0.1 | Bin 0 -> 8672 bytes .../array_20/compressor_3/.zarray | 24 ++++++ .../array_20/compressor_3/0.0.0 | Bin 0 -> 23597 bytes .../array_20/compressor_3/0.0.1 | Bin 0 -> 23613 bytes .../array_20/compressor_4/.zarray | 24 ++++++ .../array_20/compressor_4/0.0.0 | Bin 0 -> 1607 bytes .../array_20/compressor_4/0.0.1 | Bin 0 -> 1607 bytes .../array_20/compressor_5/.zarray | 24 ++++++ .../array_20/compressor_5/0.0.0 | Bin 0 -> 682 bytes .../array_20/compressor_5/0.0.1 | Bin 0 -> 700 bytes .../array_20/compressor_6/.zarray | 24 ++++++ .../array_20/compressor_6/0.0.0 | Bin 0 -> 40016 bytes .../array_20/compressor_6/0.0.1 | Bin 0 -> 40016 bytes .../array_21}/.zgroup | 0 .../array_21/compressor_0/.zarray | 18 ++++ .../array_21/compressor_0/0.0.0 | Bin 0 -> 80000 bytes .../array_21/compressor_1/.zarray | 21 +++++ .../array_21/compressor_1/0.0.0 | Bin 0 -> 27715 bytes .../array_21/compressor_2/.zarray | 21 +++++ .../array_21/compressor_2/0.0.0 | Bin 0 -> 6038 bytes .../array_21/compressor_3/.zarray | 24 ++++++ .../array_21/compressor_3/0.0.0 | Bin 0 -> 45710 bytes .../array_21/compressor_4/.zarray | 24 ++++++ .../array_21/compressor_4/0.0.0 | Bin 0 -> 1180 bytes .../array_21/compressor_5/.zarray | 24 ++++++ .../array_21/compressor_5/0.0.0 | Bin 0 -> 834 bytes .../array_21/compressor_6/.zarray | 24 ++++++ .../array_21/compressor_6/0.0.0 | Bin 0 -> 80016 bytes .../array_22}/.zgroup | 0 .../array_22/compressor_0/.zarray | 20 +++++ .../array_22/compressor_0/0.0.0.0 | Bin 0 -> 40000 bytes .../array_22/compressor_0/0.0.0.1 | Bin 0 -> 40000 bytes .../array_22/compressor_1/.zarray | 23 ++++++ .../array_22/compressor_1/0.0.0.0 | Bin 0 -> 13951 bytes .../array_22/compressor_1/0.0.0.1 | Bin 0 -> 13936 bytes .../array_22/compressor_2/.zarray | 23 ++++++ .../array_22/compressor_2/0.0.0.0 | Bin 0 -> 9114 bytes .../array_22/compressor_2/0.0.0.1 | Bin 0 -> 8672 bytes .../array_22/compressor_3/.zarray | 26 ++++++ .../array_22/compressor_3/0.0.0.0 | Bin 0 -> 23597 bytes .../array_22/compressor_3/0.0.0.1 | Bin 0 -> 23613 bytes .../array_22/compressor_4/.zarray | 26 ++++++ .../array_22/compressor_4/0.0.0.0 | Bin 0 -> 1607 bytes .../array_22/compressor_4/0.0.0.1 | Bin 0 -> 1607 bytes .../array_22/compressor_5/.zarray | 26 ++++++ .../array_22/compressor_5/0.0.0.0 | Bin 0 -> 682 bytes .../array_22/compressor_5/0.0.0.1 | Bin 0 -> 700 bytes .../array_22/compressor_6/.zarray | 26 ++++++ .../array_22/compressor_6/0.0.0.0 | Bin 0 -> 40016 bytes .../array_22/compressor_6/0.0.0.1 | Bin 0 -> 40016 bytes .../array_23}/.zgroup | 0 .../array_23/compressor_0/.zarray | 20 +++++ .../array_23/compressor_0/0.0.0.0 | Bin 0 -> 40000 bytes .../array_23/compressor_0/0.0.0.1 | Bin 0 -> 40000 bytes .../array_23/compressor_1/.zarray | 23 ++++++ .../array_23/compressor_1/0.0.0.0 | Bin 0 -> 13895 bytes .../array_23/compressor_1/0.0.0.1 | Bin 0 -> 13864 bytes .../array_23/compressor_2/.zarray | 23 ++++++ .../array_23/compressor_2/0.0.0.0 | Bin 0 -> 3330 bytes .../array_23/compressor_2/0.0.0.1 | Bin 0 -> 3372 bytes .../array_23/compressor_3/.zarray | 26 ++++++ .../array_23/compressor_3/0.0.0.0 | Bin 0 -> 22581 bytes .../array_23/compressor_3/0.0.0.1 | Bin 0 -> 22801 bytes .../array_23/compressor_4/.zarray | 26 ++++++ .../array_23/compressor_4/0.0.0.0 | Bin 0 -> 743 bytes .../array_23/compressor_4/0.0.0.1 | Bin 0 -> 747 bytes .../array_23/compressor_5/.zarray | 26 ++++++ .../array_23/compressor_5/0.0.0.0 | Bin 0 -> 507 bytes .../array_23/compressor_5/0.0.0.1 | Bin 0 -> 508 bytes .../array_23/compressor_6/.zarray | 26 ++++++ .../array_23/compressor_6/0.0.0.0 | Bin 0 -> 40016 bytes .../array_23/compressor_6/0.0.0.1 | Bin 0 -> 40016 bytes .../array_3}/.zgroup | 0 .../array_3/compressor_0}/.zarray | 0 .../array_3/compressor_0}/0 | Bin .../array_3/compressor_0}/1 | Bin .../array_3/compressor_1}/.zarray | 0 .../array_3/compressor_1}/0 | Bin .../array_3/compressor_1}/1 | Bin .../array_3/compressor_2}/.zarray | 0 .../array_3/compressor_2}/0 | Bin .../array_3/compressor_2}/1 | Bin .../array_3/compressor_3/.zarray | 20 +++++ .../array_3/compressor_3/0 | Bin 0 -> 1441 bytes .../array_3/compressor_3/1 | Bin 0 -> 178 bytes .../array_3/compressor_4/.zarray | 20 +++++ .../array_3/compressor_4}/0 | Bin .../array_3/compressor_4}/1 | Bin .../array_3/compressor_5/.zarray | 20 +++++ .../array_3/compressor_5/0 | Bin 0 -> 151 bytes .../array_3/compressor_5/1 | Bin 0 -> 117 bytes .../array_3/compressor_6/.zarray | 20 +++++ .../array_3/compressor_6/0 | Bin 0 -> 4131 bytes .../array_3/compressor_6/1 | Bin 0 -> 612 bytes .../array_4}/.zgroup | 0 .../array_4/compressor_0/.zarray | 14 ++++ .../array_4/compressor_0/0 | Bin 0 -> 1200 bytes .../array_4/compressor_0/1 | Bin 0 -> 1200 bytes .../array_4/compressor_1/.zarray | 17 ++++ .../array_4/compressor_1/0 | Bin 0 -> 1196 bytes .../array_4/compressor_1/1 | Bin 0 -> 1033 bytes .../array_4/compressor_2/.zarray | 17 ++++ .../array_4/compressor_2/0 | Bin 0 -> 1417 bytes .../array_4/compressor_2/1 | Bin 0 -> 1195 bytes .../array_4/compressor_3/.zarray | 20 +++++ .../array_4/compressor_3/0 | Bin 0 -> 1216 bytes .../array_4/compressor_3/1 | Bin 0 -> 1066 bytes .../array_4/compressor_4/.zarray | 20 +++++ .../array_4/compressor_4/0 | Bin 0 -> 1216 bytes .../array_4/compressor_4/1 | Bin 0 -> 1066 bytes .../array_4/compressor_5/.zarray | 20 +++++ .../array_4/compressor_5/0 | Bin 0 -> 1216 bytes .../array_4/compressor_5/1 | Bin 0 -> 1081 bytes .../array_4/compressor_6/.zarray | 20 +++++ .../array_4/compressor_6/0 | Bin 0 -> 1216 bytes .../array_4/compressor_6/1 | Bin 0 -> 1070 bytes .../array_5}/.zgroup | 0 .../array_5/compressor_0/.zarray | 14 ++++ .../array_5/compressor_0/0 | Bin 0 -> 2400 bytes .../array_5/compressor_0/1 | Bin 0 -> 2400 bytes .../array_5/compressor_1/.zarray | 17 ++++ .../array_5/compressor_1/0 | Bin 0 -> 1998 bytes .../array_5/compressor_1/1 | Bin 0 -> 1724 bytes .../array_5/compressor_2/.zarray | 17 ++++ .../array_5/compressor_2/0 | Bin 0 -> 2030 bytes .../array_5/compressor_2/1 | Bin 0 -> 1809 bytes .../array_5/compressor_3/.zarray | 20 +++++ .../array_5/compressor_3/0 | Bin 0 -> 2010 bytes .../array_5/compressor_3/1 | Bin 0 -> 1740 bytes .../array_5/compressor_4/.zarray | 20 +++++ .../array_5/compressor_4/0 | Bin 0 -> 2010 bytes .../array_5/compressor_4/1 | Bin 0 -> 1743 bytes .../array_5/compressor_5/.zarray | 20 +++++ .../array_5/compressor_5/0 | Bin 0 -> 1700 bytes .../array_5/compressor_5/1 | Bin 0 -> 1478 bytes .../array_5/compressor_6/.zarray | 20 +++++ .../array_5/compressor_6/0 | Bin 0 -> 2416 bytes .../array_5/compressor_6/1 | Bin 0 -> 2097 bytes .../array_6}/.zgroup | 0 .../array_6/compressor_0/.zarray | 14 ++++ .../array_6/compressor_0/0 | Bin 0 -> 4800 bytes .../array_6/compressor_0/1 | Bin 0 -> 4800 bytes .../array_6/compressor_1/.zarray | 17 ++++ .../array_6/compressor_1/0 | Bin 0 -> 2588 bytes .../array_6/compressor_1/1 | Bin 0 -> 2231 bytes .../array_6/compressor_2/.zarray | 17 ++++ .../array_6/compressor_2/0 | Bin 0 -> 2094 bytes .../array_6/compressor_2/1 | Bin 0 -> 1824 bytes .../array_6/compressor_3/.zarray | 20 +++++ .../array_6/compressor_3/0 | Bin 0 -> 2471 bytes .../array_6/compressor_3/1 | Bin 0 -> 2139 bytes .../array_6/compressor_4/.zarray | 20 +++++ .../array_6/compressor_4/0 | Bin 0 -> 2014 bytes .../array_6/compressor_4/1 | Bin 0 -> 1742 bytes .../array_6/compressor_5/.zarray | 20 +++++ .../array_6/compressor_5/0 | Bin 0 -> 1700 bytes .../array_6/compressor_5/1 | Bin 0 -> 1479 bytes .../array_6/compressor_6/.zarray | 20 +++++ .../array_6/compressor_6/0 | Bin 0 -> 4816 bytes .../array_6/compressor_6/1 | Bin 0 -> 4145 bytes .../array_7}/.zgroup | 0 .../array_7/compressor_0/.zarray | 14 ++++ .../array_7/compressor_0/0 | Bin 0 -> 9600 bytes .../array_7/compressor_0/1 | Bin 0 -> 9600 bytes .../array_7/compressor_1/.zarray | 17 ++++ .../array_7/compressor_1/0 | 7 ++ .../array_7/compressor_1/1 | Bin 0 -> 2422 bytes .../array_7/compressor_2/.zarray | 17 ++++ .../array_7/compressor_2/0 | Bin 0 -> 2267 bytes .../array_7/compressor_2/1 | Bin 0 -> 1955 bytes .../array_7/compressor_3/.zarray | 20 +++++ .../array_7/compressor_3/0 | Bin 0 -> 2144 bytes .../array_7/compressor_3/1 | Bin 0 -> 1833 bytes .../array_7/compressor_4/.zarray | 20 +++++ .../array_7/compressor_4/0 | Bin 0 -> 2021 bytes .../array_7/compressor_4/1 | Bin 0 -> 1745 bytes .../array_7/compressor_5/.zarray | 20 +++++ .../array_7/compressor_5/0 | Bin 0 -> 1700 bytes .../array_7/compressor_5/1 | Bin 0 -> 1479 bytes .../array_7/compressor_6/.zarray | 20 +++++ .../array_7/compressor_6/0 | Bin 0 -> 5006 bytes .../array_7/compressor_6/1 | Bin 0 -> 4276 bytes .../array_8}/.zgroup | 0 .../array_8/compressor_0/.zarray | 14 ++++ .../array_8/compressor_0/0 | Bin 0 -> 4000 bytes .../array_8/compressor_0/1 | Bin 0 -> 4000 bytes .../array_8/compressor_1/.zarray | 17 ++++ .../array_8/compressor_1/0 | Bin 0 -> 3291 bytes .../array_8/compressor_1/1 | Bin 0 -> 1336 bytes .../array_8/compressor_2/.zarray | 17 ++++ .../array_8/compressor_2/0 | Bin 0 -> 2993 bytes .../array_8/compressor_2/1 | Bin 0 -> 1128 bytes .../array_8/compressor_3/.zarray | 20 +++++ .../array_8/compressor_3/0 | Bin 0 -> 3477 bytes .../array_8/compressor_3/1 | Bin 0 -> 2055 bytes .../array_8/compressor_4/.zarray | 20 +++++ .../array_8/compressor_4/0 | Bin 0 -> 1915 bytes .../array_8/compressor_4/1 | Bin 0 -> 900 bytes .../array_8/compressor_5/.zarray | 20 +++++ .../array_8/compressor_5/0 | Bin 0 -> 735 bytes .../array_8/compressor_5/1 | Bin 0 -> 404 bytes .../array_8/compressor_6/.zarray | 20 +++++ .../array_8/compressor_6/0 | Bin 0 -> 4016 bytes .../array_8/compressor_6/1 | Bin 0 -> 2722 bytes .../array_9}/.zgroup | 0 .../array_9/compressor_0/.zarray | 14 ++++ .../array_9/compressor_0/0 | Bin 0 -> 8000 bytes .../array_9/compressor_0/1 | Bin 0 -> 8000 bytes .../array_9/compressor_1/.zarray | 17 ++++ .../array_9/compressor_1/0 | Bin 0 -> 6736 bytes .../array_9/compressor_1/1 | Bin 0 -> 4349 bytes .../array_9/compressor_2/.zarray | 17 ++++ .../array_9/compressor_2/0 | Bin 0 -> 5805 bytes .../array_9/compressor_2/1 | Bin 0 -> 3480 bytes .../array_9/compressor_3/.zarray | 20 +++++ .../array_9/compressor_3/0 | Bin 0 -> 7248 bytes .../array_9/compressor_3/1 | Bin 0 -> 4581 bytes .../array_9/compressor_4/.zarray | 20 +++++ .../array_9/compressor_4/0 | Bin 0 -> 5900 bytes .../array_9/compressor_4/1 | Bin 0 -> 1810 bytes .../array_9/compressor_5/.zarray | 20 +++++ .../array_9/compressor_5/0 | Bin 0 -> 1813 bytes .../array_9/compressor_5/1 | Bin 0 -> 1062 bytes .../array_9/compressor_6/.zarray | 20 +++++ .../array_9/compressor_6/0 | Bin 0 -> 8016 bytes .../array_9/compressor_6/1 | Bin 0 -> 5415 bytes tests/v2/test_attrs.py | 25 +++--- tests/v2/test_dim_separator.py | 43 +++++----- tests/v2/test_storage.py | 77 ++++++++++-------- 7883 files changed, 3277 insertions(+), 3978 deletions(-) delete mode 100644 fixture/.zattrs delete mode 100644 fixture/0/.zattrs delete mode 100644 fixture/0/0/.zarray delete mode 100644 fixture/0/0/.zattrs delete mode 100644 fixture/0/0/0 delete mode 100644 fixture/0/0/1 delete mode 100644 fixture/0/0/10 delete mode 100644 fixture/0/0/11 delete mode 100644 fixture/0/0/2 delete mode 100644 fixture/0/0/3 delete mode 100644 fixture/0/0/4 delete mode 100644 fixture/0/0/5 delete mode 100644 fixture/0/0/6 delete mode 100644 fixture/0/0/7 delete mode 100644 fixture/0/0/8 delete mode 100644 fixture/0/0/9 delete mode 100644 fixture/0/1/.zarray delete mode 100644 fixture/0/1/.zattrs delete mode 100644 fixture/0/1/0 delete mode 100644 fixture/0/1/1 delete mode 100644 fixture/0/1/10 delete mode 100644 fixture/0/1/11 delete mode 100644 fixture/0/1/2 delete mode 100644 fixture/0/1/3 delete mode 100644 fixture/0/1/4 delete mode 100644 fixture/0/1/5 delete mode 100644 fixture/0/1/6 delete mode 100644 fixture/0/1/7 delete mode 100644 fixture/0/1/8 delete mode 100644 fixture/0/1/9 delete mode 100644 fixture/0/2/.zarray delete mode 100644 fixture/0/2/.zattrs delete mode 100644 fixture/0/2/0 delete mode 100644 fixture/0/2/1 delete mode 100644 fixture/0/2/10 delete mode 100644 fixture/0/2/11 delete mode 100644 fixture/0/2/2 delete mode 100644 fixture/0/2/3 delete mode 100644 fixture/0/2/4 delete mode 100644 fixture/0/2/5 delete mode 100644 fixture/0/2/6 delete mode 100644 fixture/0/2/7 delete mode 100644 fixture/0/2/8 delete mode 100644 fixture/0/2/9 delete mode 100644 fixture/0/3/.zarray delete mode 100644 fixture/0/3/.zattrs delete mode 100644 fixture/0/3/0 delete mode 100644 fixture/0/3/1 delete mode 100644 fixture/0/3/10 delete mode 100644 fixture/0/3/11 delete mode 100644 fixture/0/3/2 delete mode 100644 fixture/0/3/3 delete mode 100644 fixture/0/3/4 delete mode 100644 fixture/0/3/5 delete mode 100644 fixture/0/3/6 delete mode 100644 fixture/0/3/7 delete mode 100644 fixture/0/3/8 delete mode 100644 fixture/0/3/9 delete mode 100644 fixture/0/4/.zarray delete mode 100644 fixture/0/4/.zattrs delete mode 100644 fixture/0/4/0 delete mode 100644 fixture/0/4/1 delete mode 100644 fixture/0/4/10 delete mode 100644 fixture/0/4/11 delete mode 100644 fixture/0/4/2 delete mode 100644 fixture/0/4/3 delete mode 100644 fixture/0/4/4 delete mode 100644 fixture/0/4/5 delete mode 100644 fixture/0/4/6 delete mode 100644 fixture/0/4/7 delete mode 100644 fixture/0/4/8 delete mode 100644 fixture/0/4/9 delete mode 100644 fixture/0/5/.zarray delete mode 100644 fixture/0/5/.zattrs delete mode 100644 fixture/0/5/0 delete mode 100644 fixture/0/5/1 delete mode 100644 fixture/0/5/10 delete mode 100644 fixture/0/5/11 delete mode 100644 fixture/0/5/2 delete mode 100644 fixture/0/5/3 delete mode 100644 fixture/0/5/4 delete mode 100644 fixture/0/5/5 delete mode 100644 fixture/0/5/6 delete mode 100644 fixture/0/5/7 delete mode 100644 fixture/0/5/8 delete mode 100644 fixture/0/5/9 delete mode 100644 fixture/0/6/.zarray delete mode 100644 fixture/0/6/.zattrs delete mode 100644 fixture/0/6/0 delete mode 100644 fixture/0/6/1 delete mode 100644 fixture/0/6/10 delete mode 100644 fixture/0/6/11 delete mode 100644 fixture/0/6/2 delete mode 100644 fixture/0/6/3 delete mode 100644 fixture/0/6/4 delete mode 100644 fixture/0/6/5 delete mode 100644 fixture/0/6/6 delete mode 100644 fixture/0/6/7 delete mode 100644 fixture/0/6/8 delete mode 100644 fixture/0/6/9 delete mode 100644 fixture/1/.zattrs delete mode 100644 fixture/1/0/.zarray delete mode 100644 fixture/1/0/.zattrs delete mode 100644 fixture/1/0/0 delete mode 100644 fixture/1/0/1 delete mode 100644 fixture/1/0/10 delete mode 100644 fixture/1/0/11 delete mode 100644 fixture/1/0/2 delete mode 100644 fixture/1/0/3 delete mode 100644 fixture/1/0/4 delete mode 100644 fixture/1/0/5 delete mode 100644 fixture/1/0/6 delete mode 100644 fixture/1/0/7 delete mode 100644 fixture/1/0/8 delete mode 100644 fixture/1/0/9 delete mode 100644 fixture/1/1/.zarray delete mode 100644 fixture/1/1/.zattrs delete mode 100644 fixture/1/1/0 delete mode 100644 fixture/1/1/1 delete mode 100644 fixture/1/1/10 delete mode 100644 fixture/1/1/11 delete mode 100644 fixture/1/1/2 delete mode 100644 fixture/1/1/3 delete mode 100644 fixture/1/1/4 delete mode 100644 fixture/1/1/5 delete mode 100644 fixture/1/1/6 delete mode 100644 fixture/1/1/7 delete mode 100644 fixture/1/1/8 delete mode 100644 fixture/1/1/9 delete mode 100644 fixture/1/2/.zarray delete mode 100644 fixture/1/2/.zattrs delete mode 100644 fixture/1/2/0 delete mode 100644 fixture/1/2/1 delete mode 100644 fixture/1/2/10 delete mode 100644 fixture/1/2/11 delete mode 100644 fixture/1/2/2 delete mode 100644 fixture/1/2/3 delete mode 100644 fixture/1/2/4 delete mode 100644 fixture/1/2/5 delete mode 100644 fixture/1/2/6 delete mode 100644 fixture/1/2/7 delete mode 100644 fixture/1/2/8 delete mode 100644 fixture/1/2/9 delete mode 100644 fixture/1/3/.zarray delete mode 100644 fixture/1/3/.zattrs delete mode 100644 fixture/1/3/0 delete mode 100644 fixture/1/3/1 delete mode 100644 fixture/1/3/10 delete mode 100644 fixture/1/3/11 delete mode 100644 fixture/1/3/2 delete mode 100644 fixture/1/3/3 delete mode 100644 fixture/1/3/4 delete mode 100644 fixture/1/3/5 delete mode 100644 fixture/1/3/6 delete mode 100644 fixture/1/3/7 delete mode 100644 fixture/1/3/8 delete mode 100644 fixture/1/3/9 delete mode 100644 fixture/1/4/.zarray delete mode 100644 fixture/1/4/.zattrs delete mode 100644 fixture/1/4/0 delete mode 100644 fixture/1/4/1 delete mode 100644 fixture/1/4/10 delete mode 100644 fixture/1/4/11 delete mode 100644 fixture/1/4/2 delete mode 100644 fixture/1/4/3 delete mode 100644 fixture/1/4/4 delete mode 100644 fixture/1/4/5 delete mode 100644 fixture/1/4/6 delete mode 100644 fixture/1/4/7 delete mode 100644 fixture/1/4/8 delete mode 100644 fixture/1/4/9 delete mode 100644 fixture/1/5/.zarray delete mode 100644 fixture/1/5/.zattrs delete mode 100644 fixture/1/5/0 delete mode 100644 fixture/1/5/1 delete mode 100644 fixture/1/5/10 delete mode 100644 fixture/1/5/11 delete mode 100644 fixture/1/5/2 delete mode 100644 fixture/1/5/3 delete mode 100644 fixture/1/5/4 delete mode 100644 fixture/1/5/5 delete mode 100644 fixture/1/5/6 delete mode 100644 fixture/1/5/7 delete mode 100644 fixture/1/5/8 delete mode 100644 fixture/1/5/9 delete mode 100644 fixture/1/6/.zarray delete mode 100644 fixture/1/6/.zattrs delete mode 100644 fixture/1/6/0 delete mode 100644 fixture/1/6/1 delete mode 100644 fixture/1/6/10 delete mode 100644 fixture/1/6/11 delete mode 100644 fixture/1/6/2 delete mode 100644 fixture/1/6/3 delete mode 100644 fixture/1/6/4 delete mode 100644 fixture/1/6/5 delete mode 100644 fixture/1/6/6 delete mode 100644 fixture/1/6/7 delete mode 100644 fixture/1/6/8 delete mode 100644 fixture/1/6/9 delete mode 100644 fixture/10/.zattrs delete mode 100644 fixture/10/0/.zarray delete mode 100644 fixture/10/0/.zattrs delete mode 100644 fixture/10/0/0 delete mode 100644 fixture/10/0/1 delete mode 100644 fixture/10/0/10 delete mode 100644 fixture/10/0/11 delete mode 100644 fixture/10/0/12 delete mode 100644 fixture/10/0/13 delete mode 100644 fixture/10/0/14 delete mode 100644 fixture/10/0/15 delete mode 100644 fixture/10/0/16 delete mode 100644 fixture/10/0/17 delete mode 100644 fixture/10/0/18 delete mode 100644 fixture/10/0/19 delete mode 100644 fixture/10/0/2 delete mode 100644 fixture/10/0/20 delete mode 100644 fixture/10/0/21 delete mode 100644 fixture/10/0/22 delete mode 100644 fixture/10/0/23 delete mode 100644 fixture/10/0/24 delete mode 100644 fixture/10/0/25 delete mode 100644 fixture/10/0/26 delete mode 100644 fixture/10/0/27 delete mode 100644 fixture/10/0/28 delete mode 100644 fixture/10/0/29 delete mode 100644 fixture/10/0/3 delete mode 100644 fixture/10/0/30 delete mode 100644 fixture/10/0/31 delete mode 100644 fixture/10/0/32 delete mode 100644 fixture/10/0/33 delete mode 100644 fixture/10/0/4 delete mode 100644 fixture/10/0/5 delete mode 100644 fixture/10/0/6 delete mode 100644 fixture/10/0/7 delete mode 100644 fixture/10/0/8 delete mode 100644 fixture/10/0/9 delete mode 100644 fixture/10/1/.zarray delete mode 100644 fixture/10/1/.zattrs delete mode 100644 fixture/10/1/0 delete mode 100644 fixture/10/1/1 delete mode 100644 fixture/10/1/10 delete mode 100644 fixture/10/1/11 delete mode 100644 fixture/10/1/12 delete mode 100644 fixture/10/1/13 delete mode 100644 fixture/10/1/14 delete mode 100644 fixture/10/1/15 delete mode 100644 fixture/10/1/16 delete mode 100644 fixture/10/1/17 delete mode 100644 fixture/10/1/18 delete mode 100644 fixture/10/1/19 delete mode 100644 fixture/10/1/2 delete mode 100644 fixture/10/1/20 delete mode 100644 fixture/10/1/21 delete mode 100644 fixture/10/1/22 delete mode 100644 fixture/10/1/23 delete mode 100644 fixture/10/1/24 delete mode 100644 fixture/10/1/25 delete mode 100644 fixture/10/1/26 delete mode 100644 fixture/10/1/27 delete mode 100644 fixture/10/1/28 delete mode 100644 fixture/10/1/29 delete mode 100644 fixture/10/1/3 delete mode 100644 fixture/10/1/30 delete mode 100644 fixture/10/1/31 delete mode 100644 fixture/10/1/32 delete mode 100644 fixture/10/1/33 delete mode 100644 fixture/10/1/4 delete mode 100644 fixture/10/1/5 delete mode 100644 fixture/10/1/6 delete mode 100644 fixture/10/1/7 delete mode 100644 fixture/10/1/8 delete mode 100644 fixture/10/1/9 delete mode 100644 fixture/10/2/.zarray delete mode 100644 fixture/10/2/.zattrs delete mode 100644 fixture/10/2/0 delete mode 100644 fixture/10/2/1 delete mode 100644 fixture/10/2/10 delete mode 100644 fixture/10/2/11 delete mode 100644 fixture/10/2/12 delete mode 100644 fixture/10/2/13 delete mode 100644 fixture/10/2/14 delete mode 100644 fixture/10/2/15 delete mode 100644 fixture/10/2/16 delete mode 100644 fixture/10/2/17 delete mode 100644 fixture/10/2/18 delete mode 100644 fixture/10/2/19 delete mode 100644 fixture/10/2/2 delete mode 100644 fixture/10/2/20 delete mode 100644 fixture/10/2/21 delete mode 100644 fixture/10/2/22 delete mode 100644 fixture/10/2/23 delete mode 100644 fixture/10/2/24 delete mode 100644 fixture/10/2/25 delete mode 100644 fixture/10/2/26 delete mode 100644 fixture/10/2/27 delete mode 100644 fixture/10/2/28 delete mode 100644 fixture/10/2/29 delete mode 100644 fixture/10/2/3 delete mode 100644 fixture/10/2/30 delete mode 100644 fixture/10/2/31 delete mode 100644 fixture/10/2/32 delete mode 100644 fixture/10/2/33 delete mode 100644 fixture/10/2/4 delete mode 100644 fixture/10/2/5 delete mode 100644 fixture/10/2/6 delete mode 100644 fixture/10/2/7 delete mode 100644 fixture/10/2/8 delete mode 100644 fixture/10/2/9 delete mode 100644 fixture/10/3/.zarray delete mode 100644 fixture/10/3/.zattrs delete mode 100644 fixture/10/3/0 delete mode 100644 fixture/10/3/1 delete mode 100644 fixture/10/3/10 delete mode 100644 fixture/10/3/11 delete mode 100644 fixture/10/3/12 delete mode 100644 fixture/10/3/13 delete mode 100644 fixture/10/3/14 delete mode 100644 fixture/10/3/15 delete mode 100644 fixture/10/3/16 delete mode 100644 fixture/10/3/17 delete mode 100644 fixture/10/3/18 delete mode 100644 fixture/10/3/19 delete mode 100644 fixture/10/3/2 delete mode 100644 fixture/10/3/20 delete mode 100644 fixture/10/3/21 delete mode 100644 fixture/10/3/22 delete mode 100644 fixture/10/3/23 delete mode 100644 fixture/10/3/24 delete mode 100644 fixture/10/3/25 delete mode 100644 fixture/10/3/26 delete mode 100644 fixture/10/3/27 delete mode 100644 fixture/10/3/28 delete mode 100644 fixture/10/3/29 delete mode 100644 fixture/10/3/3 delete mode 100644 fixture/10/3/30 delete mode 100644 fixture/10/3/31 delete mode 100644 fixture/10/3/32 delete mode 100644 fixture/10/3/33 delete mode 100644 fixture/10/3/4 delete mode 100644 fixture/10/3/5 delete mode 100644 fixture/10/3/6 delete mode 100644 fixture/10/3/7 delete mode 100644 fixture/10/3/8 delete mode 100644 fixture/10/3/9 delete mode 100644 fixture/10/4/.zarray delete mode 100644 fixture/10/4/.zattrs delete mode 100644 fixture/10/4/0 delete mode 100644 fixture/10/4/1 delete mode 100644 fixture/10/4/10 delete mode 100644 fixture/10/4/11 delete mode 100644 fixture/10/4/12 delete mode 100644 fixture/10/4/13 delete mode 100644 fixture/10/4/14 delete mode 100644 fixture/10/4/15 delete mode 100644 fixture/10/4/16 delete mode 100644 fixture/10/4/17 delete mode 100644 fixture/10/4/18 delete mode 100644 fixture/10/4/19 delete mode 100644 fixture/10/4/2 delete mode 100644 fixture/10/4/20 delete mode 100644 fixture/10/4/21 delete mode 100644 fixture/10/4/22 delete mode 100644 fixture/10/4/23 delete mode 100644 fixture/10/4/24 delete mode 100644 fixture/10/4/25 delete mode 100644 fixture/10/4/26 delete mode 100644 fixture/10/4/27 delete mode 100644 fixture/10/4/28 delete mode 100644 fixture/10/4/29 delete mode 100644 fixture/10/4/3 delete mode 100644 fixture/10/4/30 delete mode 100644 fixture/10/4/31 delete mode 100644 fixture/10/4/32 delete mode 100644 fixture/10/4/33 delete mode 100644 fixture/10/4/4 delete mode 100644 fixture/10/4/5 delete mode 100644 fixture/10/4/6 delete mode 100644 fixture/10/4/7 delete mode 100644 fixture/10/4/8 delete mode 100644 fixture/10/4/9 delete mode 100644 fixture/10/5/.zarray delete mode 100644 fixture/10/5/.zattrs delete mode 100644 fixture/10/5/0 delete mode 100644 fixture/10/5/1 delete mode 100644 fixture/10/5/10 delete mode 100644 fixture/10/5/11 delete mode 100644 fixture/10/5/12 delete mode 100644 fixture/10/5/13 delete mode 100644 fixture/10/5/14 delete mode 100644 fixture/10/5/15 delete mode 100644 fixture/10/5/16 delete mode 100644 fixture/10/5/17 delete mode 100644 fixture/10/5/18 delete mode 100644 fixture/10/5/19 delete mode 100644 fixture/10/5/2 delete mode 100644 fixture/10/5/20 delete mode 100644 fixture/10/5/21 delete mode 100644 fixture/10/5/22 delete mode 100644 fixture/10/5/23 delete mode 100644 fixture/10/5/24 delete mode 100644 fixture/10/5/25 delete mode 100644 fixture/10/5/26 delete mode 100644 fixture/10/5/27 delete mode 100644 fixture/10/5/28 delete mode 100644 fixture/10/5/29 delete mode 100644 fixture/10/5/3 delete mode 100644 fixture/10/5/30 delete mode 100644 fixture/10/5/31 delete mode 100644 fixture/10/5/32 delete mode 100644 fixture/10/5/33 delete mode 100644 fixture/10/5/4 delete mode 100644 fixture/10/5/5 delete mode 100644 fixture/10/5/6 delete mode 100644 fixture/10/5/7 delete mode 100644 fixture/10/5/8 delete mode 100644 fixture/10/5/9 delete mode 100644 fixture/10/6/.zarray delete mode 100644 fixture/10/6/.zattrs delete mode 100644 fixture/10/6/0 delete mode 100644 fixture/10/6/1 delete mode 100644 fixture/10/6/10 delete mode 100644 fixture/10/6/11 delete mode 100644 fixture/10/6/12 delete mode 100644 fixture/10/6/13 delete mode 100644 fixture/10/6/14 delete mode 100644 fixture/10/6/15 delete mode 100644 fixture/10/6/16 delete mode 100644 fixture/10/6/17 delete mode 100644 fixture/10/6/18 delete mode 100644 fixture/10/6/19 delete mode 100644 fixture/10/6/2 delete mode 100644 fixture/10/6/20 delete mode 100644 fixture/10/6/21 delete mode 100644 fixture/10/6/22 delete mode 100644 fixture/10/6/23 delete mode 100644 fixture/10/6/24 delete mode 100644 fixture/10/6/25 delete mode 100644 fixture/10/6/26 delete mode 100644 fixture/10/6/27 delete mode 100644 fixture/10/6/28 delete mode 100644 fixture/10/6/29 delete mode 100644 fixture/10/6/3 delete mode 100644 fixture/10/6/30 delete mode 100644 fixture/10/6/31 delete mode 100644 fixture/10/6/32 delete mode 100644 fixture/10/6/33 delete mode 100644 fixture/10/6/4 delete mode 100644 fixture/10/6/5 delete mode 100644 fixture/10/6/6 delete mode 100644 fixture/10/6/7 delete mode 100644 fixture/10/6/8 delete mode 100644 fixture/10/6/9 delete mode 100644 fixture/11/.zattrs delete mode 100644 fixture/11/0/.zarray delete mode 100644 fixture/11/0/.zattrs delete mode 100644 fixture/11/0/0 delete mode 100644 fixture/11/0/1 delete mode 100644 fixture/11/0/10 delete mode 100644 fixture/11/0/11 delete mode 100644 fixture/11/0/12 delete mode 100644 fixture/11/0/13 delete mode 100644 fixture/11/0/14 delete mode 100644 fixture/11/0/15 delete mode 100644 fixture/11/0/16 delete mode 100644 fixture/11/0/17 delete mode 100644 fixture/11/0/18 delete mode 100644 fixture/11/0/19 delete mode 100644 fixture/11/0/2 delete mode 100644 fixture/11/0/20 delete mode 100644 fixture/11/0/21 delete mode 100644 fixture/11/0/22 delete mode 100644 fixture/11/0/23 delete mode 100644 fixture/11/0/24 delete mode 100644 fixture/11/0/25 delete mode 100644 fixture/11/0/26 delete mode 100644 fixture/11/0/27 delete mode 100644 fixture/11/0/28 delete mode 100644 fixture/11/0/29 delete mode 100644 fixture/11/0/3 delete mode 100644 fixture/11/0/30 delete mode 100644 fixture/11/0/31 delete mode 100644 fixture/11/0/32 delete mode 100644 fixture/11/0/33 delete mode 100644 fixture/11/0/34 delete mode 100644 fixture/11/0/35 delete mode 100644 fixture/11/0/36 delete mode 100644 fixture/11/0/37 delete mode 100644 fixture/11/0/38 delete mode 100644 fixture/11/0/39 delete mode 100644 fixture/11/0/4 delete mode 100644 fixture/11/0/40 delete mode 100644 fixture/11/0/41 delete mode 100644 fixture/11/0/42 delete mode 100644 fixture/11/0/43 delete mode 100644 fixture/11/0/44 delete mode 100644 fixture/11/0/5 delete mode 100644 fixture/11/0/6 delete mode 100644 fixture/11/0/7 delete mode 100644 fixture/11/0/8 delete mode 100644 fixture/11/0/9 delete mode 100644 fixture/11/1/.zarray delete mode 100644 fixture/11/1/.zattrs delete mode 100644 fixture/11/1/0 delete mode 100644 fixture/11/1/1 delete mode 100644 fixture/11/1/10 delete mode 100644 fixture/11/1/11 delete mode 100644 fixture/11/1/12 delete mode 100644 fixture/11/1/13 delete mode 100644 fixture/11/1/14 delete mode 100644 fixture/11/1/15 delete mode 100644 fixture/11/1/16 delete mode 100644 fixture/11/1/17 delete mode 100644 fixture/11/1/18 delete mode 100644 fixture/11/1/19 delete mode 100644 fixture/11/1/2 delete mode 100644 fixture/11/1/20 delete mode 100644 fixture/11/1/21 delete mode 100644 fixture/11/1/22 delete mode 100644 fixture/11/1/23 delete mode 100644 fixture/11/1/24 delete mode 100644 fixture/11/1/25 delete mode 100644 fixture/11/1/26 delete mode 100644 fixture/11/1/27 delete mode 100644 fixture/11/1/28 delete mode 100644 fixture/11/1/29 delete mode 100644 fixture/11/1/3 delete mode 100644 fixture/11/1/30 delete mode 100644 fixture/11/1/31 delete mode 100644 fixture/11/1/32 delete mode 100644 fixture/11/1/33 delete mode 100644 fixture/11/1/34 delete mode 100644 fixture/11/1/35 delete mode 100644 fixture/11/1/36 delete mode 100644 fixture/11/1/37 delete mode 100644 fixture/11/1/38 delete mode 100644 fixture/11/1/39 delete mode 100644 fixture/11/1/4 delete mode 100644 fixture/11/1/40 delete mode 100644 fixture/11/1/41 delete mode 100644 fixture/11/1/42 delete mode 100644 fixture/11/1/43 delete mode 100644 fixture/11/1/44 delete mode 100644 fixture/11/1/5 delete mode 100644 fixture/11/1/6 delete mode 100644 fixture/11/1/7 delete mode 100644 fixture/11/1/8 delete mode 100644 fixture/11/1/9 delete mode 100644 fixture/11/2/.zarray delete mode 100644 fixture/11/2/.zattrs delete mode 100644 fixture/11/2/0 delete mode 100644 fixture/11/2/1 delete mode 100644 fixture/11/2/10 delete mode 100644 fixture/11/2/11 delete mode 100644 fixture/11/2/12 delete mode 100644 fixture/11/2/13 delete mode 100644 fixture/11/2/14 delete mode 100644 fixture/11/2/15 delete mode 100644 fixture/11/2/16 delete mode 100644 fixture/11/2/17 delete mode 100644 fixture/11/2/18 delete mode 100644 fixture/11/2/19 delete mode 100644 fixture/11/2/2 delete mode 100644 fixture/11/2/20 delete mode 100644 fixture/11/2/21 delete mode 100644 fixture/11/2/22 delete mode 100644 fixture/11/2/23 delete mode 100644 fixture/11/2/24 delete mode 100644 fixture/11/2/25 delete mode 100644 fixture/11/2/26 delete mode 100644 fixture/11/2/27 delete mode 100644 fixture/11/2/28 delete mode 100644 fixture/11/2/29 delete mode 100644 fixture/11/2/3 delete mode 100644 fixture/11/2/30 delete mode 100644 fixture/11/2/31 delete mode 100644 fixture/11/2/32 delete mode 100644 fixture/11/2/33 delete mode 100644 fixture/11/2/34 delete mode 100644 fixture/11/2/35 delete mode 100644 fixture/11/2/36 delete mode 100644 fixture/11/2/37 delete mode 100644 fixture/11/2/38 delete mode 100644 fixture/11/2/39 delete mode 100644 fixture/11/2/4 delete mode 100644 fixture/11/2/40 delete mode 100644 fixture/11/2/41 delete mode 100644 fixture/11/2/42 delete mode 100644 fixture/11/2/43 delete mode 100644 fixture/11/2/44 delete mode 100644 fixture/11/2/5 delete mode 100644 fixture/11/2/6 delete mode 100644 fixture/11/2/7 delete mode 100644 fixture/11/2/8 delete mode 100644 fixture/11/2/9 delete mode 100644 fixture/11/3/.zarray delete mode 100644 fixture/11/3/.zattrs delete mode 100644 fixture/11/3/0 delete mode 100644 fixture/11/3/1 delete mode 100644 fixture/11/3/10 delete mode 100644 fixture/11/3/11 delete mode 100644 fixture/11/3/12 delete mode 100644 fixture/11/3/13 delete mode 100644 fixture/11/3/14 delete mode 100644 fixture/11/3/15 delete mode 100644 fixture/11/3/16 delete mode 100644 fixture/11/3/17 delete mode 100644 fixture/11/3/18 delete mode 100644 fixture/11/3/19 delete mode 100644 fixture/11/3/2 delete mode 100644 fixture/11/3/20 delete mode 100644 fixture/11/3/21 delete mode 100644 fixture/11/3/22 delete mode 100644 fixture/11/3/23 delete mode 100644 fixture/11/3/24 delete mode 100644 fixture/11/3/25 delete mode 100644 fixture/11/3/26 delete mode 100644 fixture/11/3/27 delete mode 100644 fixture/11/3/28 delete mode 100644 fixture/11/3/29 delete mode 100644 fixture/11/3/3 delete mode 100644 fixture/11/3/30 delete mode 100644 fixture/11/3/31 delete mode 100644 fixture/11/3/32 delete mode 100644 fixture/11/3/33 delete mode 100644 fixture/11/3/34 delete mode 100644 fixture/11/3/35 delete mode 100644 fixture/11/3/36 delete mode 100644 fixture/11/3/37 delete mode 100644 fixture/11/3/38 delete mode 100644 fixture/11/3/39 delete mode 100644 fixture/11/3/4 delete mode 100644 fixture/11/3/40 delete mode 100644 fixture/11/3/41 delete mode 100644 fixture/11/3/42 delete mode 100644 fixture/11/3/43 delete mode 100644 fixture/11/3/44 delete mode 100644 fixture/11/3/5 delete mode 100644 fixture/11/3/6 delete mode 100644 fixture/11/3/7 delete mode 100644 fixture/11/3/8 delete mode 100644 fixture/11/3/9 delete mode 100644 fixture/11/4/.zarray delete mode 100644 fixture/11/4/.zattrs delete mode 100644 fixture/11/4/0 delete mode 100644 fixture/11/4/1 delete mode 100644 fixture/11/4/10 delete mode 100644 fixture/11/4/11 delete mode 100644 fixture/11/4/12 delete mode 100644 fixture/11/4/13 delete mode 100644 fixture/11/4/14 delete mode 100644 fixture/11/4/15 delete mode 100644 fixture/11/4/16 delete mode 100644 fixture/11/4/17 delete mode 100644 fixture/11/4/18 delete mode 100644 fixture/11/4/19 delete mode 100644 fixture/11/4/2 delete mode 100644 fixture/11/4/20 delete mode 100644 fixture/11/4/21 delete mode 100644 fixture/11/4/22 delete mode 100644 fixture/11/4/23 delete mode 100644 fixture/11/4/24 delete mode 100644 fixture/11/4/25 delete mode 100644 fixture/11/4/26 delete mode 100644 fixture/11/4/27 delete mode 100644 fixture/11/4/28 delete mode 100644 fixture/11/4/29 delete mode 100644 fixture/11/4/3 delete mode 100644 fixture/11/4/30 delete mode 100644 fixture/11/4/31 delete mode 100644 fixture/11/4/32 delete mode 100644 fixture/11/4/33 delete mode 100644 fixture/11/4/34 delete mode 100644 fixture/11/4/35 delete mode 100644 fixture/11/4/36 delete mode 100644 fixture/11/4/37 delete mode 100644 fixture/11/4/38 delete mode 100644 fixture/11/4/39 delete mode 100644 fixture/11/4/4 delete mode 100644 fixture/11/4/40 delete mode 100644 fixture/11/4/41 delete mode 100644 fixture/11/4/42 delete mode 100644 fixture/11/4/43 delete mode 100644 fixture/11/4/44 delete mode 100644 fixture/11/4/5 delete mode 100644 fixture/11/4/6 delete mode 100644 fixture/11/4/7 delete mode 100644 fixture/11/4/8 delete mode 100644 fixture/11/4/9 delete mode 100644 fixture/11/5/.zarray delete mode 100644 fixture/11/5/.zattrs delete mode 100644 fixture/11/5/0 delete mode 100644 fixture/11/5/1 delete mode 100644 fixture/11/5/10 delete mode 100644 fixture/11/5/11 delete mode 100644 fixture/11/5/12 delete mode 100644 fixture/11/5/13 delete mode 100644 fixture/11/5/14 delete mode 100644 fixture/11/5/15 delete mode 100644 fixture/11/5/16 delete mode 100644 fixture/11/5/17 delete mode 100644 fixture/11/5/18 delete mode 100644 fixture/11/5/19 delete mode 100644 fixture/11/5/2 delete mode 100644 fixture/11/5/20 delete mode 100644 fixture/11/5/21 delete mode 100644 fixture/11/5/22 delete mode 100644 fixture/11/5/23 delete mode 100644 fixture/11/5/24 delete mode 100644 fixture/11/5/25 delete mode 100644 fixture/11/5/26 delete mode 100644 fixture/11/5/27 delete mode 100644 fixture/11/5/28 delete mode 100644 fixture/11/5/29 delete mode 100644 fixture/11/5/3 delete mode 100644 fixture/11/5/30 delete mode 100644 fixture/11/5/31 delete mode 100644 fixture/11/5/32 delete mode 100644 fixture/11/5/33 delete mode 100644 fixture/11/5/34 delete mode 100644 fixture/11/5/35 delete mode 100644 fixture/11/5/36 delete mode 100644 fixture/11/5/37 delete mode 100644 fixture/11/5/38 delete mode 100644 fixture/11/5/39 delete mode 100644 fixture/11/5/4 delete mode 100644 fixture/11/5/40 delete mode 100644 fixture/11/5/41 delete mode 100644 fixture/11/5/42 delete mode 100644 fixture/11/5/43 delete mode 100644 fixture/11/5/44 delete mode 100644 fixture/11/5/5 delete mode 100644 fixture/11/5/6 delete mode 100644 fixture/11/5/7 delete mode 100644 fixture/11/5/8 delete mode 100644 fixture/11/5/9 delete mode 100644 fixture/11/6/.zarray delete mode 100644 fixture/11/6/.zattrs delete mode 100644 fixture/11/6/0 delete mode 100644 fixture/11/6/1 delete mode 100644 fixture/11/6/10 delete mode 100644 fixture/11/6/11 delete mode 100644 fixture/11/6/12 delete mode 100644 fixture/11/6/13 delete mode 100644 fixture/11/6/14 delete mode 100644 fixture/11/6/15 delete mode 100644 fixture/11/6/16 delete mode 100644 fixture/11/6/17 delete mode 100644 fixture/11/6/18 delete mode 100644 fixture/11/6/19 delete mode 100644 fixture/11/6/2 delete mode 100644 fixture/11/6/20 delete mode 100644 fixture/11/6/21 delete mode 100644 fixture/11/6/22 delete mode 100644 fixture/11/6/23 delete mode 100644 fixture/11/6/24 delete mode 100644 fixture/11/6/25 delete mode 100644 fixture/11/6/26 delete mode 100644 fixture/11/6/27 delete mode 100644 fixture/11/6/28 delete mode 100644 fixture/11/6/29 delete mode 100644 fixture/11/6/3 delete mode 100644 fixture/11/6/30 delete mode 100644 fixture/11/6/31 delete mode 100644 fixture/11/6/32 delete mode 100644 fixture/11/6/33 delete mode 100644 fixture/11/6/34 delete mode 100644 fixture/11/6/35 delete mode 100644 fixture/11/6/36 delete mode 100644 fixture/11/6/37 delete mode 100644 fixture/11/6/38 delete mode 100644 fixture/11/6/39 delete mode 100644 fixture/11/6/4 delete mode 100644 fixture/11/6/40 delete mode 100644 fixture/11/6/41 delete mode 100644 fixture/11/6/42 delete mode 100644 fixture/11/6/43 delete mode 100644 fixture/11/6/44 delete mode 100644 fixture/11/6/5 delete mode 100644 fixture/11/6/6 delete mode 100644 fixture/11/6/7 delete mode 100644 fixture/11/6/8 delete mode 100644 fixture/11/6/9 delete mode 100644 fixture/12/.zattrs delete mode 100644 fixture/12/0/.zarray delete mode 100644 fixture/12/0/.zattrs delete mode 100644 fixture/12/0/0 delete mode 100644 fixture/12/0/1 delete mode 100644 fixture/12/0/10 delete mode 100644 fixture/12/0/11 delete mode 100644 fixture/12/0/12 delete mode 100644 fixture/12/0/13 delete mode 100644 fixture/12/0/14 delete mode 100644 fixture/12/0/15 delete mode 100644 fixture/12/0/16 delete mode 100644 fixture/12/0/17 delete mode 100644 fixture/12/0/18 delete mode 100644 fixture/12/0/19 delete mode 100644 fixture/12/0/2 delete mode 100644 fixture/12/0/20 delete mode 100644 fixture/12/0/21 delete mode 100644 fixture/12/0/22 delete mode 100644 fixture/12/0/23 delete mode 100644 fixture/12/0/24 delete mode 100644 fixture/12/0/25 delete mode 100644 fixture/12/0/26 delete mode 100644 fixture/12/0/27 delete mode 100644 fixture/12/0/28 delete mode 100644 fixture/12/0/29 delete mode 100644 fixture/12/0/3 delete mode 100644 fixture/12/0/30 delete mode 100644 fixture/12/0/31 delete mode 100644 fixture/12/0/32 delete mode 100644 fixture/12/0/33 delete mode 100644 fixture/12/0/34 delete mode 100644 fixture/12/0/35 delete mode 100644 fixture/12/0/36 delete mode 100644 fixture/12/0/37 delete mode 100644 fixture/12/0/38 delete mode 100644 fixture/12/0/39 delete mode 100644 fixture/12/0/4 delete mode 100644 fixture/12/0/40 delete mode 100644 fixture/12/0/41 delete mode 100644 fixture/12/0/42 delete mode 100644 fixture/12/0/43 delete mode 100644 fixture/12/0/44 delete mode 100644 fixture/12/0/5 delete mode 100644 fixture/12/0/6 delete mode 100644 fixture/12/0/7 delete mode 100644 fixture/12/0/8 delete mode 100644 fixture/12/0/9 delete mode 100644 fixture/12/1/.zarray delete mode 100644 fixture/12/1/.zattrs delete mode 100644 fixture/12/1/0 delete mode 100644 fixture/12/1/1 delete mode 100644 fixture/12/1/10 delete mode 100644 fixture/12/1/11 delete mode 100644 fixture/12/1/12 delete mode 100644 fixture/12/1/13 delete mode 100644 fixture/12/1/14 delete mode 100644 fixture/12/1/15 delete mode 100644 fixture/12/1/16 delete mode 100644 fixture/12/1/17 delete mode 100644 fixture/12/1/18 delete mode 100644 fixture/12/1/19 delete mode 100644 fixture/12/1/2 delete mode 100644 fixture/12/1/20 delete mode 100644 fixture/12/1/21 delete mode 100644 fixture/12/1/22 delete mode 100644 fixture/12/1/23 delete mode 100644 fixture/12/1/24 delete mode 100644 fixture/12/1/25 delete mode 100644 fixture/12/1/26 delete mode 100644 fixture/12/1/27 delete mode 100644 fixture/12/1/28 delete mode 100644 fixture/12/1/29 delete mode 100644 fixture/12/1/3 delete mode 100644 fixture/12/1/30 delete mode 100644 fixture/12/1/31 delete mode 100644 fixture/12/1/32 delete mode 100644 fixture/12/1/33 delete mode 100644 fixture/12/1/34 delete mode 100644 fixture/12/1/35 delete mode 100644 fixture/12/1/36 delete mode 100644 fixture/12/1/37 delete mode 100644 fixture/12/1/38 delete mode 100644 fixture/12/1/39 delete mode 100644 fixture/12/1/4 delete mode 100644 fixture/12/1/40 delete mode 100644 fixture/12/1/41 delete mode 100644 fixture/12/1/42 delete mode 100644 fixture/12/1/43 delete mode 100644 fixture/12/1/44 delete mode 100644 fixture/12/1/5 delete mode 100644 fixture/12/1/6 delete mode 100644 fixture/12/1/7 delete mode 100644 fixture/12/1/8 delete mode 100644 fixture/12/1/9 delete mode 100644 fixture/12/2/.zarray delete mode 100644 fixture/12/2/.zattrs delete mode 100644 fixture/12/2/0 delete mode 100644 fixture/12/2/1 delete mode 100644 fixture/12/2/10 delete mode 100644 fixture/12/2/11 delete mode 100644 fixture/12/2/12 delete mode 100644 fixture/12/2/13 delete mode 100644 fixture/12/2/14 delete mode 100644 fixture/12/2/15 delete mode 100644 fixture/12/2/16 delete mode 100644 fixture/12/2/17 delete mode 100644 fixture/12/2/18 delete mode 100644 fixture/12/2/19 delete mode 100644 fixture/12/2/2 delete mode 100644 fixture/12/2/20 delete mode 100644 fixture/12/2/21 delete mode 100644 fixture/12/2/22 delete mode 100644 fixture/12/2/23 delete mode 100644 fixture/12/2/24 delete mode 100644 fixture/12/2/25 delete mode 100644 fixture/12/2/26 delete mode 100644 fixture/12/2/27 delete mode 100644 fixture/12/2/28 delete mode 100644 fixture/12/2/29 delete mode 100644 fixture/12/2/3 delete mode 100644 fixture/12/2/30 delete mode 100644 fixture/12/2/31 delete mode 100644 fixture/12/2/32 delete mode 100644 fixture/12/2/33 delete mode 100644 fixture/12/2/34 delete mode 100644 fixture/12/2/35 delete mode 100644 fixture/12/2/36 delete mode 100644 fixture/12/2/37 delete mode 100644 fixture/12/2/38 delete mode 100644 fixture/12/2/39 delete mode 100644 fixture/12/2/4 delete mode 100644 fixture/12/2/40 delete mode 100644 fixture/12/2/41 delete mode 100644 fixture/12/2/42 delete mode 100644 fixture/12/2/43 delete mode 100644 fixture/12/2/44 delete mode 100644 fixture/12/2/5 delete mode 100644 fixture/12/2/6 delete mode 100644 fixture/12/2/7 delete mode 100644 fixture/12/2/8 delete mode 100644 fixture/12/2/9 delete mode 100644 fixture/12/3/.zarray delete mode 100644 fixture/12/3/.zattrs delete mode 100644 fixture/12/3/0 delete mode 100644 fixture/12/3/1 delete mode 100644 fixture/12/3/10 delete mode 100644 fixture/12/3/11 delete mode 100644 fixture/12/3/12 delete mode 100644 fixture/12/3/13 delete mode 100644 fixture/12/3/14 delete mode 100644 fixture/12/3/15 delete mode 100644 fixture/12/3/16 delete mode 100644 fixture/12/3/17 delete mode 100644 fixture/12/3/18 delete mode 100644 fixture/12/3/19 delete mode 100644 fixture/12/3/2 delete mode 100644 fixture/12/3/20 delete mode 100644 fixture/12/3/21 delete mode 100644 fixture/12/3/22 delete mode 100644 fixture/12/3/23 delete mode 100644 fixture/12/3/24 delete mode 100644 fixture/12/3/25 delete mode 100644 fixture/12/3/26 delete mode 100644 fixture/12/3/27 delete mode 100644 fixture/12/3/28 delete mode 100644 fixture/12/3/29 delete mode 100644 fixture/12/3/3 delete mode 100644 fixture/12/3/30 delete mode 100644 fixture/12/3/31 delete mode 100644 fixture/12/3/32 delete mode 100644 fixture/12/3/33 delete mode 100644 fixture/12/3/34 delete mode 100644 fixture/12/3/35 delete mode 100644 fixture/12/3/36 delete mode 100644 fixture/12/3/37 delete mode 100644 fixture/12/3/38 delete mode 100644 fixture/12/3/39 delete mode 100644 fixture/12/3/4 delete mode 100644 fixture/12/3/40 delete mode 100644 fixture/12/3/41 delete mode 100644 fixture/12/3/42 delete mode 100644 fixture/12/3/43 delete mode 100644 fixture/12/3/44 delete mode 100644 fixture/12/3/5 delete mode 100644 fixture/12/3/6 delete mode 100644 fixture/12/3/7 delete mode 100644 fixture/12/3/8 delete mode 100644 fixture/12/3/9 delete mode 100644 fixture/12/4/.zarray delete mode 100644 fixture/12/4/.zattrs delete mode 100644 fixture/12/4/0 delete mode 100644 fixture/12/4/1 delete mode 100644 fixture/12/4/10 delete mode 100644 fixture/12/4/11 delete mode 100644 fixture/12/4/12 delete mode 100644 fixture/12/4/13 delete mode 100644 fixture/12/4/14 delete mode 100644 fixture/12/4/15 delete mode 100644 fixture/12/4/16 delete mode 100644 fixture/12/4/17 delete mode 100644 fixture/12/4/18 delete mode 100644 fixture/12/4/19 delete mode 100644 fixture/12/4/2 delete mode 100644 fixture/12/4/20 delete mode 100644 fixture/12/4/21 delete mode 100644 fixture/12/4/22 delete mode 100644 fixture/12/4/23 delete mode 100644 fixture/12/4/24 delete mode 100644 fixture/12/4/25 delete mode 100644 fixture/12/4/26 delete mode 100644 fixture/12/4/27 delete mode 100644 fixture/12/4/28 delete mode 100644 fixture/12/4/29 delete mode 100644 fixture/12/4/3 delete mode 100644 fixture/12/4/30 delete mode 100644 fixture/12/4/31 delete mode 100644 fixture/12/4/32 delete mode 100644 fixture/12/4/33 delete mode 100644 fixture/12/4/34 delete mode 100644 fixture/12/4/35 delete mode 100644 fixture/12/4/36 delete mode 100644 fixture/12/4/37 delete mode 100644 fixture/12/4/38 delete mode 100644 fixture/12/4/39 delete mode 100644 fixture/12/4/4 delete mode 100644 fixture/12/4/40 delete mode 100644 fixture/12/4/41 delete mode 100644 fixture/12/4/42 delete mode 100644 fixture/12/4/43 delete mode 100644 fixture/12/4/44 delete mode 100644 fixture/12/4/5 delete mode 100644 fixture/12/4/6 delete mode 100644 fixture/12/4/7 delete mode 100644 fixture/12/4/8 delete mode 100644 fixture/12/4/9 delete mode 100644 fixture/12/5/.zarray delete mode 100644 fixture/12/5/.zattrs delete mode 100644 fixture/12/5/0 delete mode 100644 fixture/12/5/1 delete mode 100644 fixture/12/5/10 delete mode 100644 fixture/12/5/11 delete mode 100644 fixture/12/5/12 delete mode 100644 fixture/12/5/13 delete mode 100644 fixture/12/5/14 delete mode 100644 fixture/12/5/15 delete mode 100644 fixture/12/5/16 delete mode 100644 fixture/12/5/17 delete mode 100644 fixture/12/5/18 delete mode 100644 fixture/12/5/19 delete mode 100644 fixture/12/5/2 delete mode 100644 fixture/12/5/20 delete mode 100644 fixture/12/5/21 delete mode 100644 fixture/12/5/22 delete mode 100644 fixture/12/5/23 delete mode 100644 fixture/12/5/24 delete mode 100644 fixture/12/5/25 delete mode 100644 fixture/12/5/26 delete mode 100644 fixture/12/5/27 delete mode 100644 fixture/12/5/28 delete mode 100644 fixture/12/5/29 delete mode 100644 fixture/12/5/3 delete mode 100644 fixture/12/5/30 delete mode 100644 fixture/12/5/31 delete mode 100644 fixture/12/5/32 delete mode 100644 fixture/12/5/33 delete mode 100644 fixture/12/5/34 delete mode 100644 fixture/12/5/35 delete mode 100644 fixture/12/5/36 delete mode 100644 fixture/12/5/37 delete mode 100644 fixture/12/5/38 delete mode 100644 fixture/12/5/39 delete mode 100644 fixture/12/5/4 delete mode 100644 fixture/12/5/40 delete mode 100644 fixture/12/5/41 delete mode 100644 fixture/12/5/42 delete mode 100644 fixture/12/5/43 delete mode 100644 fixture/12/5/44 delete mode 100644 fixture/12/5/5 delete mode 100644 fixture/12/5/6 delete mode 100644 fixture/12/5/7 delete mode 100644 fixture/12/5/8 delete mode 100644 fixture/12/5/9 delete mode 100644 fixture/12/6/.zarray delete mode 100644 fixture/12/6/.zattrs delete mode 100644 fixture/12/6/0 delete mode 100644 fixture/12/6/1 delete mode 100644 fixture/12/6/10 delete mode 100644 fixture/12/6/11 delete mode 100644 fixture/12/6/12 delete mode 100644 fixture/12/6/13 delete mode 100644 fixture/12/6/14 delete mode 100644 fixture/12/6/15 delete mode 100644 fixture/12/6/16 delete mode 100644 fixture/12/6/17 delete mode 100644 fixture/12/6/18 delete mode 100644 fixture/12/6/19 delete mode 100644 fixture/12/6/2 delete mode 100644 fixture/12/6/20 delete mode 100644 fixture/12/6/21 delete mode 100644 fixture/12/6/22 delete mode 100644 fixture/12/6/23 delete mode 100644 fixture/12/6/24 delete mode 100644 fixture/12/6/25 delete mode 100644 fixture/12/6/26 delete mode 100644 fixture/12/6/27 delete mode 100644 fixture/12/6/28 delete mode 100644 fixture/12/6/29 delete mode 100644 fixture/12/6/3 delete mode 100644 fixture/12/6/30 delete mode 100644 fixture/12/6/31 delete mode 100644 fixture/12/6/32 delete mode 100644 fixture/12/6/33 delete mode 100644 fixture/12/6/34 delete mode 100644 fixture/12/6/35 delete mode 100644 fixture/12/6/36 delete mode 100644 fixture/12/6/37 delete mode 100644 fixture/12/6/38 delete mode 100644 fixture/12/6/39 delete mode 100644 fixture/12/6/4 delete mode 100644 fixture/12/6/40 delete mode 100644 fixture/12/6/41 delete mode 100644 fixture/12/6/42 delete mode 100644 fixture/12/6/43 delete mode 100644 fixture/12/6/44 delete mode 100644 fixture/12/6/5 delete mode 100644 fixture/12/6/6 delete mode 100644 fixture/12/6/7 delete mode 100644 fixture/12/6/8 delete mode 100644 fixture/12/6/9 delete mode 100644 fixture/13/.zattrs delete mode 100644 fixture/13/0/.zarray delete mode 100644 fixture/13/0/.zattrs delete mode 100644 fixture/13/0/0 delete mode 100644 fixture/13/0/1 delete mode 100644 fixture/13/0/10 delete mode 100644 fixture/13/0/11 delete mode 100644 fixture/13/0/12 delete mode 100644 fixture/13/0/13 delete mode 100644 fixture/13/0/14 delete mode 100644 fixture/13/0/15 delete mode 100644 fixture/13/0/16 delete mode 100644 fixture/13/0/17 delete mode 100644 fixture/13/0/18 delete mode 100644 fixture/13/0/19 delete mode 100644 fixture/13/0/2 delete mode 100644 fixture/13/0/20 delete mode 100644 fixture/13/0/21 delete mode 100644 fixture/13/0/22 delete mode 100644 fixture/13/0/23 delete mode 100644 fixture/13/0/24 delete mode 100644 fixture/13/0/25 delete mode 100644 fixture/13/0/26 delete mode 100644 fixture/13/0/27 delete mode 100644 fixture/13/0/28 delete mode 100644 fixture/13/0/29 delete mode 100644 fixture/13/0/3 delete mode 100644 fixture/13/0/30 delete mode 100644 fixture/13/0/31 delete mode 100644 fixture/13/0/32 delete mode 100644 fixture/13/0/33 delete mode 100644 fixture/13/0/34 delete mode 100644 fixture/13/0/35 delete mode 100644 fixture/13/0/36 delete mode 100644 fixture/13/0/37 delete mode 100644 fixture/13/0/38 delete mode 100644 fixture/13/0/39 delete mode 100644 fixture/13/0/4 delete mode 100644 fixture/13/0/40 delete mode 100644 fixture/13/0/41 delete mode 100644 fixture/13/0/42 delete mode 100644 fixture/13/0/43 delete mode 100644 fixture/13/0/44 delete mode 100644 fixture/13/0/5 delete mode 100644 fixture/13/0/6 delete mode 100644 fixture/13/0/7 delete mode 100644 fixture/13/0/8 delete mode 100644 fixture/13/0/9 delete mode 100644 fixture/13/1/.zarray delete mode 100644 fixture/13/1/.zattrs delete mode 100644 fixture/13/1/0 delete mode 100644 fixture/13/1/1 delete mode 100644 fixture/13/1/10 delete mode 100644 fixture/13/1/11 delete mode 100644 fixture/13/1/12 delete mode 100644 fixture/13/1/13 delete mode 100644 fixture/13/1/14 delete mode 100644 fixture/13/1/15 delete mode 100644 fixture/13/1/16 delete mode 100644 fixture/13/1/17 delete mode 100644 fixture/13/1/18 delete mode 100644 fixture/13/1/19 delete mode 100644 fixture/13/1/2 delete mode 100644 fixture/13/1/20 delete mode 100644 fixture/13/1/21 delete mode 100644 fixture/13/1/22 delete mode 100644 fixture/13/1/23 delete mode 100644 fixture/13/1/24 delete mode 100644 fixture/13/1/25 delete mode 100644 fixture/13/1/26 delete mode 100644 fixture/13/1/27 delete mode 100644 fixture/13/1/28 delete mode 100644 fixture/13/1/29 delete mode 100644 fixture/13/1/3 delete mode 100644 fixture/13/1/30 delete mode 100644 fixture/13/1/31 delete mode 100644 fixture/13/1/32 delete mode 100644 fixture/13/1/33 delete mode 100644 fixture/13/1/34 delete mode 100644 fixture/13/1/35 delete mode 100644 fixture/13/1/36 delete mode 100644 fixture/13/1/37 delete mode 100644 fixture/13/1/38 delete mode 100644 fixture/13/1/39 delete mode 100644 fixture/13/1/4 delete mode 100644 fixture/13/1/40 delete mode 100644 fixture/13/1/41 delete mode 100644 fixture/13/1/42 delete mode 100644 fixture/13/1/43 delete mode 100644 fixture/13/1/44 delete mode 100644 fixture/13/1/5 delete mode 100644 fixture/13/1/6 delete mode 100644 fixture/13/1/7 delete mode 100644 fixture/13/1/8 delete mode 100644 fixture/13/1/9 delete mode 100644 fixture/13/2/.zarray delete mode 100644 fixture/13/2/.zattrs delete mode 100644 fixture/13/2/0 delete mode 100644 fixture/13/2/1 delete mode 100644 fixture/13/2/10 delete mode 100644 fixture/13/2/11 delete mode 100644 fixture/13/2/12 delete mode 100644 fixture/13/2/13 delete mode 100644 fixture/13/2/14 delete mode 100644 fixture/13/2/15 delete mode 100644 fixture/13/2/16 delete mode 100644 fixture/13/2/17 delete mode 100644 fixture/13/2/18 delete mode 100644 fixture/13/2/19 delete mode 100644 fixture/13/2/2 delete mode 100644 fixture/13/2/20 delete mode 100644 fixture/13/2/21 delete mode 100644 fixture/13/2/22 delete mode 100644 fixture/13/2/23 delete mode 100644 fixture/13/2/24 delete mode 100644 fixture/13/2/25 delete mode 100644 fixture/13/2/26 delete mode 100644 fixture/13/2/27 delete mode 100644 fixture/13/2/28 delete mode 100644 fixture/13/2/29 delete mode 100644 fixture/13/2/3 delete mode 100644 fixture/13/2/30 delete mode 100644 fixture/13/2/31 delete mode 100644 fixture/13/2/32 delete mode 100644 fixture/13/2/33 delete mode 100644 fixture/13/2/34 delete mode 100644 fixture/13/2/35 delete mode 100644 fixture/13/2/36 delete mode 100644 fixture/13/2/37 delete mode 100644 fixture/13/2/38 delete mode 100644 fixture/13/2/39 delete mode 100644 fixture/13/2/4 delete mode 100644 fixture/13/2/40 delete mode 100644 fixture/13/2/41 delete mode 100644 fixture/13/2/42 delete mode 100644 fixture/13/2/43 delete mode 100644 fixture/13/2/44 delete mode 100644 fixture/13/2/5 delete mode 100644 fixture/13/2/6 delete mode 100644 fixture/13/2/7 delete mode 100644 fixture/13/2/8 delete mode 100644 fixture/13/2/9 delete mode 100644 fixture/13/3/.zarray delete mode 100644 fixture/13/3/.zattrs delete mode 100644 fixture/13/3/0 delete mode 100644 fixture/13/3/1 delete mode 100644 fixture/13/3/10 delete mode 100644 fixture/13/3/11 delete mode 100644 fixture/13/3/12 delete mode 100644 fixture/13/3/13 delete mode 100644 fixture/13/3/14 delete mode 100644 fixture/13/3/15 delete mode 100644 fixture/13/3/16 delete mode 100644 fixture/13/3/17 delete mode 100644 fixture/13/3/18 delete mode 100644 fixture/13/3/19 delete mode 100644 fixture/13/3/2 delete mode 100644 fixture/13/3/20 delete mode 100644 fixture/13/3/21 delete mode 100644 fixture/13/3/22 delete mode 100644 fixture/13/3/23 delete mode 100644 fixture/13/3/24 delete mode 100644 fixture/13/3/25 delete mode 100644 fixture/13/3/26 delete mode 100644 fixture/13/3/27 delete mode 100644 fixture/13/3/28 delete mode 100644 fixture/13/3/29 delete mode 100644 fixture/13/3/3 delete mode 100644 fixture/13/3/30 delete mode 100644 fixture/13/3/31 delete mode 100644 fixture/13/3/32 delete mode 100644 fixture/13/3/33 delete mode 100644 fixture/13/3/34 delete mode 100644 fixture/13/3/35 delete mode 100644 fixture/13/3/36 delete mode 100644 fixture/13/3/37 delete mode 100644 fixture/13/3/38 delete mode 100644 fixture/13/3/39 delete mode 100644 fixture/13/3/4 delete mode 100644 fixture/13/3/40 delete mode 100644 fixture/13/3/41 delete mode 100644 fixture/13/3/42 delete mode 100644 fixture/13/3/43 delete mode 100644 fixture/13/3/44 delete mode 100644 fixture/13/3/5 delete mode 100644 fixture/13/3/6 delete mode 100644 fixture/13/3/7 delete mode 100644 fixture/13/3/8 delete mode 100644 fixture/13/3/9 delete mode 100644 fixture/13/4/.zarray delete mode 100644 fixture/13/4/.zattrs delete mode 100644 fixture/13/4/0 delete mode 100644 fixture/13/4/1 delete mode 100644 fixture/13/4/10 delete mode 100644 fixture/13/4/11 delete mode 100644 fixture/13/4/12 delete mode 100644 fixture/13/4/13 delete mode 100644 fixture/13/4/14 delete mode 100644 fixture/13/4/15 delete mode 100644 fixture/13/4/16 delete mode 100644 fixture/13/4/17 delete mode 100644 fixture/13/4/18 delete mode 100644 fixture/13/4/19 delete mode 100644 fixture/13/4/2 delete mode 100644 fixture/13/4/20 delete mode 100644 fixture/13/4/21 delete mode 100644 fixture/13/4/22 delete mode 100644 fixture/13/4/23 delete mode 100644 fixture/13/4/24 delete mode 100644 fixture/13/4/25 delete mode 100644 fixture/13/4/26 delete mode 100644 fixture/13/4/27 delete mode 100644 fixture/13/4/28 delete mode 100644 fixture/13/4/29 delete mode 100644 fixture/13/4/3 delete mode 100644 fixture/13/4/30 delete mode 100644 fixture/13/4/31 delete mode 100644 fixture/13/4/32 delete mode 100644 fixture/13/4/33 delete mode 100644 fixture/13/4/34 delete mode 100644 fixture/13/4/35 delete mode 100644 fixture/13/4/36 delete mode 100644 fixture/13/4/37 delete mode 100644 fixture/13/4/38 delete mode 100644 fixture/13/4/39 delete mode 100644 fixture/13/4/4 delete mode 100644 fixture/13/4/40 delete mode 100644 fixture/13/4/41 delete mode 100644 fixture/13/4/42 delete mode 100644 fixture/13/4/43 delete mode 100644 fixture/13/4/44 delete mode 100644 fixture/13/4/5 delete mode 100644 fixture/13/4/6 delete mode 100644 fixture/13/4/7 delete mode 100644 fixture/13/4/8 delete mode 100644 fixture/13/4/9 delete mode 100644 fixture/13/5/.zarray delete mode 100644 fixture/13/5/.zattrs delete mode 100644 fixture/13/5/0 delete mode 100644 fixture/13/5/1 delete mode 100644 fixture/13/5/10 delete mode 100644 fixture/13/5/11 delete mode 100644 fixture/13/5/12 delete mode 100644 fixture/13/5/13 delete mode 100644 fixture/13/5/14 delete mode 100644 fixture/13/5/15 delete mode 100644 fixture/13/5/16 delete mode 100644 fixture/13/5/17 delete mode 100644 fixture/13/5/18 delete mode 100644 fixture/13/5/19 delete mode 100644 fixture/13/5/2 delete mode 100644 fixture/13/5/20 delete mode 100644 fixture/13/5/21 delete mode 100644 fixture/13/5/22 delete mode 100644 fixture/13/5/23 delete mode 100644 fixture/13/5/24 delete mode 100644 fixture/13/5/25 delete mode 100644 fixture/13/5/26 delete mode 100644 fixture/13/5/27 delete mode 100644 fixture/13/5/28 delete mode 100644 fixture/13/5/29 delete mode 100644 fixture/13/5/3 delete mode 100644 fixture/13/5/30 delete mode 100644 fixture/13/5/31 delete mode 100644 fixture/13/5/32 delete mode 100644 fixture/13/5/33 delete mode 100644 fixture/13/5/34 delete mode 100644 fixture/13/5/35 delete mode 100644 fixture/13/5/36 delete mode 100644 fixture/13/5/37 delete mode 100644 fixture/13/5/38 delete mode 100644 fixture/13/5/39 delete mode 100644 fixture/13/5/4 delete mode 100644 fixture/13/5/40 delete mode 100644 fixture/13/5/41 delete mode 100644 fixture/13/5/42 delete mode 100644 fixture/13/5/43 delete mode 100644 fixture/13/5/44 delete mode 100644 fixture/13/5/5 delete mode 100644 fixture/13/5/6 delete mode 100644 fixture/13/5/7 delete mode 100644 fixture/13/5/8 delete mode 100644 fixture/13/5/9 delete mode 100644 fixture/13/6/.zarray delete mode 100644 fixture/13/6/.zattrs delete mode 100644 fixture/13/6/0 delete mode 100644 fixture/13/6/1 delete mode 100644 fixture/13/6/10 delete mode 100644 fixture/13/6/11 delete mode 100644 fixture/13/6/12 delete mode 100644 fixture/13/6/13 delete mode 100644 fixture/13/6/14 delete mode 100644 fixture/13/6/15 delete mode 100644 fixture/13/6/16 delete mode 100644 fixture/13/6/17 delete mode 100644 fixture/13/6/18 delete mode 100644 fixture/13/6/19 delete mode 100644 fixture/13/6/2 delete mode 100644 fixture/13/6/20 delete mode 100644 fixture/13/6/21 delete mode 100644 fixture/13/6/22 delete mode 100644 fixture/13/6/23 delete mode 100644 fixture/13/6/24 delete mode 100644 fixture/13/6/25 delete mode 100644 fixture/13/6/26 delete mode 100644 fixture/13/6/27 delete mode 100644 fixture/13/6/28 delete mode 100644 fixture/13/6/29 delete mode 100644 fixture/13/6/3 delete mode 100644 fixture/13/6/30 delete mode 100644 fixture/13/6/31 delete mode 100644 fixture/13/6/32 delete mode 100644 fixture/13/6/33 delete mode 100644 fixture/13/6/34 delete mode 100644 fixture/13/6/35 delete mode 100644 fixture/13/6/36 delete mode 100644 fixture/13/6/37 delete mode 100644 fixture/13/6/38 delete mode 100644 fixture/13/6/39 delete mode 100644 fixture/13/6/4 delete mode 100644 fixture/13/6/40 delete mode 100644 fixture/13/6/41 delete mode 100644 fixture/13/6/42 delete mode 100644 fixture/13/6/43 delete mode 100644 fixture/13/6/44 delete mode 100644 fixture/13/6/5 delete mode 100644 fixture/13/6/6 delete mode 100644 fixture/13/6/7 delete mode 100644 fixture/13/6/8 delete mode 100644 fixture/13/6/9 delete mode 100644 fixture/14/.zattrs delete mode 100644 fixture/14/0/.zarray delete mode 100644 fixture/14/0/.zattrs delete mode 100644 fixture/14/0/0 delete mode 100644 fixture/14/0/1 delete mode 100644 fixture/14/0/10 delete mode 100644 fixture/14/0/11 delete mode 100644 fixture/14/0/12 delete mode 100644 fixture/14/0/13 delete mode 100644 fixture/14/0/14 delete mode 100644 fixture/14/0/15 delete mode 100644 fixture/14/0/16 delete mode 100644 fixture/14/0/17 delete mode 100644 fixture/14/0/18 delete mode 100644 fixture/14/0/19 delete mode 100644 fixture/14/0/2 delete mode 100644 fixture/14/0/20 delete mode 100644 fixture/14/0/21 delete mode 100644 fixture/14/0/22 delete mode 100644 fixture/14/0/23 delete mode 100644 fixture/14/0/24 delete mode 100644 fixture/14/0/25 delete mode 100644 fixture/14/0/26 delete mode 100644 fixture/14/0/27 delete mode 100644 fixture/14/0/28 delete mode 100644 fixture/14/0/29 delete mode 100644 fixture/14/0/3 delete mode 100644 fixture/14/0/30 delete mode 100644 fixture/14/0/31 delete mode 100644 fixture/14/0/32 delete mode 100644 fixture/14/0/33 delete mode 100644 fixture/14/0/34 delete mode 100644 fixture/14/0/35 delete mode 100644 fixture/14/0/36 delete mode 100644 fixture/14/0/37 delete mode 100644 fixture/14/0/38 delete mode 100644 fixture/14/0/39 delete mode 100644 fixture/14/0/4 delete mode 100644 fixture/14/0/40 delete mode 100644 fixture/14/0/41 delete mode 100644 fixture/14/0/42 delete mode 100644 fixture/14/0/43 delete mode 100644 fixture/14/0/44 delete mode 100644 fixture/14/0/45 delete mode 100644 fixture/14/0/46 delete mode 100644 fixture/14/0/47 delete mode 100644 fixture/14/0/48 delete mode 100644 fixture/14/0/49 delete mode 100644 fixture/14/0/5 delete mode 100644 fixture/14/0/50 delete mode 100644 fixture/14/0/51 delete mode 100644 fixture/14/0/52 delete mode 100644 fixture/14/0/53 delete mode 100644 fixture/14/0/54 delete mode 100644 fixture/14/0/55 delete mode 100644 fixture/14/0/6 delete mode 100644 fixture/14/0/7 delete mode 100644 fixture/14/0/8 delete mode 100644 fixture/14/0/9 delete mode 100644 fixture/14/1/.zarray delete mode 100644 fixture/14/1/.zattrs delete mode 100644 fixture/14/1/0 delete mode 100644 fixture/14/1/1 delete mode 100644 fixture/14/1/10 delete mode 100644 fixture/14/1/11 delete mode 100644 fixture/14/1/12 delete mode 100644 fixture/14/1/13 delete mode 100644 fixture/14/1/14 delete mode 100644 fixture/14/1/15 delete mode 100644 fixture/14/1/16 delete mode 100644 fixture/14/1/17 delete mode 100644 fixture/14/1/18 delete mode 100644 fixture/14/1/19 delete mode 100644 fixture/14/1/2 delete mode 100644 fixture/14/1/20 delete mode 100644 fixture/14/1/21 delete mode 100644 fixture/14/1/22 delete mode 100644 fixture/14/1/23 delete mode 100644 fixture/14/1/24 delete mode 100644 fixture/14/1/25 delete mode 100644 fixture/14/1/26 delete mode 100644 fixture/14/1/27 delete mode 100644 fixture/14/1/28 delete mode 100644 fixture/14/1/29 delete mode 100644 fixture/14/1/3 delete mode 100644 fixture/14/1/30 delete mode 100644 fixture/14/1/31 delete mode 100644 fixture/14/1/32 delete mode 100644 fixture/14/1/33 delete mode 100644 fixture/14/1/34 delete mode 100644 fixture/14/1/35 delete mode 100644 fixture/14/1/36 delete mode 100644 fixture/14/1/37 delete mode 100644 fixture/14/1/38 delete mode 100644 fixture/14/1/39 delete mode 100644 fixture/14/1/4 delete mode 100644 fixture/14/1/40 delete mode 100644 fixture/14/1/41 delete mode 100644 fixture/14/1/42 delete mode 100644 fixture/14/1/43 delete mode 100644 fixture/14/1/44 delete mode 100644 fixture/14/1/45 delete mode 100644 fixture/14/1/46 delete mode 100644 fixture/14/1/47 delete mode 100644 fixture/14/1/48 delete mode 100644 fixture/14/1/49 delete mode 100644 fixture/14/1/5 delete mode 100644 fixture/14/1/50 delete mode 100644 fixture/14/1/51 delete mode 100644 fixture/14/1/52 delete mode 100644 fixture/14/1/53 delete mode 100644 fixture/14/1/54 delete mode 100644 fixture/14/1/55 delete mode 100644 fixture/14/1/6 delete mode 100644 fixture/14/1/7 delete mode 100644 fixture/14/1/8 delete mode 100644 fixture/14/1/9 delete mode 100644 fixture/14/2/.zarray delete mode 100644 fixture/14/2/.zattrs delete mode 100644 fixture/14/2/0 delete mode 100644 fixture/14/2/1 delete mode 100644 fixture/14/2/10 delete mode 100644 fixture/14/2/11 delete mode 100644 fixture/14/2/12 delete mode 100644 fixture/14/2/13 delete mode 100644 fixture/14/2/14 delete mode 100644 fixture/14/2/15 delete mode 100644 fixture/14/2/16 delete mode 100644 fixture/14/2/17 delete mode 100644 fixture/14/2/18 delete mode 100644 fixture/14/2/19 delete mode 100644 fixture/14/2/2 delete mode 100644 fixture/14/2/20 delete mode 100644 fixture/14/2/21 delete mode 100644 fixture/14/2/22 delete mode 100644 fixture/14/2/23 delete mode 100644 fixture/14/2/24 delete mode 100644 fixture/14/2/25 delete mode 100644 fixture/14/2/26 delete mode 100644 fixture/14/2/27 delete mode 100644 fixture/14/2/28 delete mode 100644 fixture/14/2/29 delete mode 100644 fixture/14/2/3 delete mode 100644 fixture/14/2/30 delete mode 100644 fixture/14/2/31 delete mode 100644 fixture/14/2/32 delete mode 100644 fixture/14/2/33 delete mode 100644 fixture/14/2/34 delete mode 100644 fixture/14/2/35 delete mode 100644 fixture/14/2/36 delete mode 100644 fixture/14/2/37 delete mode 100644 fixture/14/2/38 delete mode 100644 fixture/14/2/39 delete mode 100644 fixture/14/2/4 delete mode 100644 fixture/14/2/40 delete mode 100644 fixture/14/2/41 delete mode 100644 fixture/14/2/42 delete mode 100644 fixture/14/2/43 delete mode 100644 fixture/14/2/44 delete mode 100644 fixture/14/2/45 delete mode 100644 fixture/14/2/46 delete mode 100644 fixture/14/2/47 delete mode 100644 fixture/14/2/48 delete mode 100644 fixture/14/2/49 delete mode 100644 fixture/14/2/5 delete mode 100644 fixture/14/2/50 delete mode 100644 fixture/14/2/51 delete mode 100644 fixture/14/2/52 delete mode 100644 fixture/14/2/53 delete mode 100644 fixture/14/2/54 delete mode 100644 fixture/14/2/55 delete mode 100644 fixture/14/2/6 delete mode 100644 fixture/14/2/7 delete mode 100644 fixture/14/2/8 delete mode 100644 fixture/14/2/9 delete mode 100644 fixture/14/3/.zarray delete mode 100644 fixture/14/3/.zattrs delete mode 100644 fixture/14/3/0 delete mode 100644 fixture/14/3/1 delete mode 100644 fixture/14/3/10 delete mode 100644 fixture/14/3/11 delete mode 100644 fixture/14/3/12 delete mode 100644 fixture/14/3/13 delete mode 100644 fixture/14/3/14 delete mode 100644 fixture/14/3/15 delete mode 100644 fixture/14/3/16 delete mode 100644 fixture/14/3/17 delete mode 100644 fixture/14/3/18 delete mode 100644 fixture/14/3/19 delete mode 100644 fixture/14/3/2 delete mode 100644 fixture/14/3/20 delete mode 100644 fixture/14/3/21 delete mode 100644 fixture/14/3/22 delete mode 100644 fixture/14/3/23 delete mode 100644 fixture/14/3/24 delete mode 100644 fixture/14/3/25 delete mode 100644 fixture/14/3/26 delete mode 100644 fixture/14/3/27 delete mode 100644 fixture/14/3/28 delete mode 100644 fixture/14/3/29 delete mode 100644 fixture/14/3/3 delete mode 100644 fixture/14/3/30 delete mode 100644 fixture/14/3/31 delete mode 100644 fixture/14/3/32 delete mode 100644 fixture/14/3/33 delete mode 100644 fixture/14/3/34 delete mode 100644 fixture/14/3/35 delete mode 100644 fixture/14/3/36 delete mode 100644 fixture/14/3/37 delete mode 100644 fixture/14/3/38 delete mode 100644 fixture/14/3/39 delete mode 100644 fixture/14/3/4 delete mode 100644 fixture/14/3/40 delete mode 100644 fixture/14/3/41 delete mode 100644 fixture/14/3/42 delete mode 100644 fixture/14/3/43 delete mode 100644 fixture/14/3/44 delete mode 100644 fixture/14/3/45 delete mode 100644 fixture/14/3/46 delete mode 100644 fixture/14/3/47 delete mode 100644 fixture/14/3/48 delete mode 100644 fixture/14/3/49 delete mode 100644 fixture/14/3/5 delete mode 100644 fixture/14/3/50 delete mode 100644 fixture/14/3/51 delete mode 100644 fixture/14/3/52 delete mode 100644 fixture/14/3/53 delete mode 100644 fixture/14/3/54 delete mode 100644 fixture/14/3/55 delete mode 100644 fixture/14/3/6 delete mode 100644 fixture/14/3/7 delete mode 100644 fixture/14/3/8 delete mode 100644 fixture/14/3/9 delete mode 100644 fixture/14/4/.zarray delete mode 100644 fixture/14/4/.zattrs delete mode 100644 fixture/14/4/0 delete mode 100644 fixture/14/4/1 delete mode 100644 fixture/14/4/10 delete mode 100644 fixture/14/4/11 delete mode 100644 fixture/14/4/12 delete mode 100644 fixture/14/4/13 delete mode 100644 fixture/14/4/14 delete mode 100644 fixture/14/4/15 delete mode 100644 fixture/14/4/16 delete mode 100644 fixture/14/4/17 delete mode 100644 fixture/14/4/18 delete mode 100644 fixture/14/4/19 delete mode 100644 fixture/14/4/2 delete mode 100644 fixture/14/4/20 delete mode 100644 fixture/14/4/21 delete mode 100644 fixture/14/4/22 delete mode 100644 fixture/14/4/23 delete mode 100644 fixture/14/4/24 delete mode 100644 fixture/14/4/25 delete mode 100644 fixture/14/4/26 delete mode 100644 fixture/14/4/27 delete mode 100644 fixture/14/4/28 delete mode 100644 fixture/14/4/29 delete mode 100644 fixture/14/4/3 delete mode 100644 fixture/14/4/30 delete mode 100644 fixture/14/4/31 delete mode 100644 fixture/14/4/32 delete mode 100644 fixture/14/4/33 delete mode 100644 fixture/14/4/34 delete mode 100644 fixture/14/4/35 delete mode 100644 fixture/14/4/36 delete mode 100644 fixture/14/4/37 delete mode 100644 fixture/14/4/38 delete mode 100644 fixture/14/4/39 delete mode 100644 fixture/14/4/4 delete mode 100644 fixture/14/4/40 delete mode 100644 fixture/14/4/41 delete mode 100644 fixture/14/4/42 delete mode 100644 fixture/14/4/43 delete mode 100644 fixture/14/4/44 delete mode 100644 fixture/14/4/45 delete mode 100644 fixture/14/4/46 delete mode 100644 fixture/14/4/47 delete mode 100644 fixture/14/4/48 delete mode 100644 fixture/14/4/49 delete mode 100644 fixture/14/4/5 delete mode 100644 fixture/14/4/50 delete mode 100644 fixture/14/4/51 delete mode 100644 fixture/14/4/52 delete mode 100644 fixture/14/4/53 delete mode 100644 fixture/14/4/54 delete mode 100644 fixture/14/4/55 delete mode 100644 fixture/14/4/6 delete mode 100644 fixture/14/4/7 delete mode 100644 fixture/14/4/8 delete mode 100644 fixture/14/4/9 delete mode 100644 fixture/14/5/.zarray delete mode 100644 fixture/14/5/.zattrs delete mode 100644 fixture/14/5/0 delete mode 100644 fixture/14/5/1 delete mode 100644 fixture/14/5/10 delete mode 100644 fixture/14/5/11 delete mode 100644 fixture/14/5/12 delete mode 100644 fixture/14/5/13 delete mode 100644 fixture/14/5/14 delete mode 100644 fixture/14/5/15 delete mode 100644 fixture/14/5/16 delete mode 100644 fixture/14/5/17 delete mode 100644 fixture/14/5/18 delete mode 100644 fixture/14/5/19 delete mode 100644 fixture/14/5/2 delete mode 100644 fixture/14/5/20 delete mode 100644 fixture/14/5/21 delete mode 100644 fixture/14/5/22 delete mode 100644 fixture/14/5/23 delete mode 100644 fixture/14/5/24 delete mode 100644 fixture/14/5/25 delete mode 100644 fixture/14/5/26 delete mode 100644 fixture/14/5/27 delete mode 100644 fixture/14/5/28 delete mode 100644 fixture/14/5/29 delete mode 100644 fixture/14/5/3 delete mode 100644 fixture/14/5/30 delete mode 100644 fixture/14/5/31 delete mode 100644 fixture/14/5/32 delete mode 100644 fixture/14/5/33 delete mode 100644 fixture/14/5/34 delete mode 100644 fixture/14/5/35 delete mode 100644 fixture/14/5/36 delete mode 100644 fixture/14/5/37 delete mode 100644 fixture/14/5/38 delete mode 100644 fixture/14/5/39 delete mode 100644 fixture/14/5/4 delete mode 100644 fixture/14/5/40 delete mode 100644 fixture/14/5/41 delete mode 100644 fixture/14/5/42 delete mode 100644 fixture/14/5/43 delete mode 100644 fixture/14/5/44 delete mode 100644 fixture/14/5/45 delete mode 100644 fixture/14/5/46 delete mode 100644 fixture/14/5/47 delete mode 100644 fixture/14/5/48 delete mode 100644 fixture/14/5/49 delete mode 100644 fixture/14/5/5 delete mode 100644 fixture/14/5/50 delete mode 100644 fixture/14/5/51 delete mode 100644 fixture/14/5/52 delete mode 100644 fixture/14/5/53 delete mode 100644 fixture/14/5/54 delete mode 100644 fixture/14/5/55 delete mode 100644 fixture/14/5/6 delete mode 100644 fixture/14/5/7 delete mode 100644 fixture/14/5/8 delete mode 100644 fixture/14/5/9 delete mode 100644 fixture/14/6/.zarray delete mode 100644 fixture/14/6/.zattrs delete mode 100644 fixture/14/6/0 delete mode 100644 fixture/14/6/1 delete mode 100644 fixture/14/6/10 delete mode 100644 fixture/14/6/11 delete mode 100644 fixture/14/6/12 delete mode 100644 fixture/14/6/13 delete mode 100644 fixture/14/6/14 delete mode 100644 fixture/14/6/15 delete mode 100644 fixture/14/6/16 delete mode 100644 fixture/14/6/17 delete mode 100644 fixture/14/6/18 delete mode 100644 fixture/14/6/19 delete mode 100644 fixture/14/6/2 delete mode 100644 fixture/14/6/20 delete mode 100644 fixture/14/6/21 delete mode 100644 fixture/14/6/22 delete mode 100644 fixture/14/6/23 delete mode 100644 fixture/14/6/24 delete mode 100644 fixture/14/6/25 delete mode 100644 fixture/14/6/26 delete mode 100644 fixture/14/6/27 delete mode 100644 fixture/14/6/28 delete mode 100644 fixture/14/6/29 delete mode 100644 fixture/14/6/3 delete mode 100644 fixture/14/6/30 delete mode 100644 fixture/14/6/31 delete mode 100644 fixture/14/6/32 delete mode 100644 fixture/14/6/33 delete mode 100644 fixture/14/6/34 delete mode 100644 fixture/14/6/35 delete mode 100644 fixture/14/6/36 delete mode 100644 fixture/14/6/37 delete mode 100644 fixture/14/6/38 delete mode 100644 fixture/14/6/39 delete mode 100644 fixture/14/6/4 delete mode 100644 fixture/14/6/40 delete mode 100644 fixture/14/6/41 delete mode 100644 fixture/14/6/42 delete mode 100644 fixture/14/6/43 delete mode 100644 fixture/14/6/44 delete mode 100644 fixture/14/6/45 delete mode 100644 fixture/14/6/46 delete mode 100644 fixture/14/6/47 delete mode 100644 fixture/14/6/48 delete mode 100644 fixture/14/6/49 delete mode 100644 fixture/14/6/5 delete mode 100644 fixture/14/6/50 delete mode 100644 fixture/14/6/51 delete mode 100644 fixture/14/6/52 delete mode 100644 fixture/14/6/53 delete mode 100644 fixture/14/6/54 delete mode 100644 fixture/14/6/55 delete mode 100644 fixture/14/6/6 delete mode 100644 fixture/14/6/7 delete mode 100644 fixture/14/6/8 delete mode 100644 fixture/14/6/9 delete mode 100644 fixture/15/.zattrs delete mode 100644 fixture/15/0/.zarray delete mode 100644 fixture/15/0/.zattrs delete mode 100644 fixture/15/0/0 delete mode 100644 fixture/15/0/1 delete mode 100644 fixture/15/0/10 delete mode 100644 fixture/15/0/11 delete mode 100644 fixture/15/0/12 delete mode 100644 fixture/15/0/13 delete mode 100644 fixture/15/0/14 delete mode 100644 fixture/15/0/15 delete mode 100644 fixture/15/0/16 delete mode 100644 fixture/15/0/17 delete mode 100644 fixture/15/0/18 delete mode 100644 fixture/15/0/19 delete mode 100644 fixture/15/0/2 delete mode 100644 fixture/15/0/20 delete mode 100644 fixture/15/0/21 delete mode 100644 fixture/15/0/22 delete mode 100644 fixture/15/0/23 delete mode 100644 fixture/15/0/24 delete mode 100644 fixture/15/0/25 delete mode 100644 fixture/15/0/26 delete mode 100644 fixture/15/0/27 delete mode 100644 fixture/15/0/28 delete mode 100644 fixture/15/0/29 delete mode 100644 fixture/15/0/3 delete mode 100644 fixture/15/0/30 delete mode 100644 fixture/15/0/31 delete mode 100644 fixture/15/0/32 delete mode 100644 fixture/15/0/33 delete mode 100644 fixture/15/0/34 delete mode 100644 fixture/15/0/35 delete mode 100644 fixture/15/0/36 delete mode 100644 fixture/15/0/37 delete mode 100644 fixture/15/0/38 delete mode 100644 fixture/15/0/39 delete mode 100644 fixture/15/0/4 delete mode 100644 fixture/15/0/40 delete mode 100644 fixture/15/0/41 delete mode 100644 fixture/15/0/42 delete mode 100644 fixture/15/0/43 delete mode 100644 fixture/15/0/44 delete mode 100644 fixture/15/0/45 delete mode 100644 fixture/15/0/46 delete mode 100644 fixture/15/0/47 delete mode 100644 fixture/15/0/48 delete mode 100644 fixture/15/0/49 delete mode 100644 fixture/15/0/5 delete mode 100644 fixture/15/0/50 delete mode 100644 fixture/15/0/51 delete mode 100644 fixture/15/0/52 delete mode 100644 fixture/15/0/53 delete mode 100644 fixture/15/0/54 delete mode 100644 fixture/15/0/55 delete mode 100644 fixture/15/0/6 delete mode 100644 fixture/15/0/7 delete mode 100644 fixture/15/0/8 delete mode 100644 fixture/15/0/9 delete mode 100644 fixture/15/1/.zarray delete mode 100644 fixture/15/1/.zattrs delete mode 100644 fixture/15/1/0 delete mode 100644 fixture/15/1/1 delete mode 100644 fixture/15/1/10 delete mode 100644 fixture/15/1/11 delete mode 100644 fixture/15/1/12 delete mode 100644 fixture/15/1/13 delete mode 100644 fixture/15/1/14 delete mode 100644 fixture/15/1/15 delete mode 100644 fixture/15/1/16 delete mode 100644 fixture/15/1/17 delete mode 100644 fixture/15/1/18 delete mode 100644 fixture/15/1/19 delete mode 100644 fixture/15/1/2 delete mode 100644 fixture/15/1/20 delete mode 100644 fixture/15/1/21 delete mode 100644 fixture/15/1/22 delete mode 100644 fixture/15/1/23 delete mode 100644 fixture/15/1/24 delete mode 100644 fixture/15/1/25 delete mode 100644 fixture/15/1/26 delete mode 100644 fixture/15/1/27 delete mode 100644 fixture/15/1/28 delete mode 100644 fixture/15/1/29 delete mode 100644 fixture/15/1/3 delete mode 100644 fixture/15/1/30 delete mode 100644 fixture/15/1/31 delete mode 100644 fixture/15/1/32 delete mode 100644 fixture/15/1/33 delete mode 100644 fixture/15/1/34 delete mode 100644 fixture/15/1/35 delete mode 100644 fixture/15/1/36 delete mode 100644 fixture/15/1/37 delete mode 100644 fixture/15/1/38 delete mode 100644 fixture/15/1/39 delete mode 100644 fixture/15/1/4 delete mode 100644 fixture/15/1/40 delete mode 100644 fixture/15/1/41 delete mode 100644 fixture/15/1/42 delete mode 100644 fixture/15/1/43 delete mode 100644 fixture/15/1/44 delete mode 100644 fixture/15/1/45 delete mode 100644 fixture/15/1/46 delete mode 100644 fixture/15/1/47 delete mode 100644 fixture/15/1/48 delete mode 100644 fixture/15/1/49 delete mode 100644 fixture/15/1/5 delete mode 100644 fixture/15/1/50 delete mode 100644 fixture/15/1/51 delete mode 100644 fixture/15/1/52 delete mode 100644 fixture/15/1/53 delete mode 100644 fixture/15/1/54 delete mode 100644 fixture/15/1/55 delete mode 100644 fixture/15/1/6 delete mode 100644 fixture/15/1/7 delete mode 100644 fixture/15/1/8 delete mode 100644 fixture/15/1/9 delete mode 100644 fixture/15/2/.zarray delete mode 100644 fixture/15/2/.zattrs delete mode 100644 fixture/15/2/0 delete mode 100644 fixture/15/2/1 delete mode 100644 fixture/15/2/10 delete mode 100644 fixture/15/2/11 delete mode 100644 fixture/15/2/12 delete mode 100644 fixture/15/2/13 delete mode 100644 fixture/15/2/14 delete mode 100644 fixture/15/2/15 delete mode 100644 fixture/15/2/16 delete mode 100644 fixture/15/2/17 delete mode 100644 fixture/15/2/18 delete mode 100644 fixture/15/2/19 delete mode 100644 fixture/15/2/2 delete mode 100644 fixture/15/2/20 delete mode 100644 fixture/15/2/21 delete mode 100644 fixture/15/2/22 delete mode 100644 fixture/15/2/23 delete mode 100644 fixture/15/2/24 delete mode 100644 fixture/15/2/25 delete mode 100644 fixture/15/2/26 delete mode 100644 fixture/15/2/27 delete mode 100644 fixture/15/2/28 delete mode 100644 fixture/15/2/29 delete mode 100644 fixture/15/2/3 delete mode 100644 fixture/15/2/30 delete mode 100644 fixture/15/2/31 delete mode 100644 fixture/15/2/32 delete mode 100644 fixture/15/2/33 delete mode 100644 fixture/15/2/34 delete mode 100644 fixture/15/2/35 delete mode 100644 fixture/15/2/36 delete mode 100644 fixture/15/2/37 delete mode 100644 fixture/15/2/38 delete mode 100644 fixture/15/2/39 delete mode 100644 fixture/15/2/4 delete mode 100644 fixture/15/2/40 delete mode 100644 fixture/15/2/41 delete mode 100644 fixture/15/2/42 delete mode 100644 fixture/15/2/43 delete mode 100644 fixture/15/2/44 delete mode 100644 fixture/15/2/45 delete mode 100644 fixture/15/2/46 delete mode 100644 fixture/15/2/47 delete mode 100644 fixture/15/2/48 delete mode 100644 fixture/15/2/49 delete mode 100644 fixture/15/2/5 delete mode 100644 fixture/15/2/50 delete mode 100644 fixture/15/2/51 delete mode 100644 fixture/15/2/52 delete mode 100644 fixture/15/2/53 delete mode 100644 fixture/15/2/54 delete mode 100644 fixture/15/2/55 delete mode 100644 fixture/15/2/6 delete mode 100644 fixture/15/2/7 delete mode 100644 fixture/15/2/8 delete mode 100644 fixture/15/2/9 delete mode 100644 fixture/15/3/.zarray delete mode 100644 fixture/15/3/.zattrs delete mode 100644 fixture/15/3/0 delete mode 100644 fixture/15/3/1 delete mode 100644 fixture/15/3/10 delete mode 100644 fixture/15/3/11 delete mode 100644 fixture/15/3/12 delete mode 100644 fixture/15/3/13 delete mode 100644 fixture/15/3/14 delete mode 100644 fixture/15/3/15 delete mode 100644 fixture/15/3/16 delete mode 100644 fixture/15/3/17 delete mode 100644 fixture/15/3/18 delete mode 100644 fixture/15/3/19 delete mode 100644 fixture/15/3/2 delete mode 100644 fixture/15/3/20 delete mode 100644 fixture/15/3/21 delete mode 100644 fixture/15/3/22 delete mode 100644 fixture/15/3/23 delete mode 100644 fixture/15/3/24 delete mode 100644 fixture/15/3/25 delete mode 100644 fixture/15/3/26 delete mode 100644 fixture/15/3/27 delete mode 100644 fixture/15/3/28 delete mode 100644 fixture/15/3/29 delete mode 100644 fixture/15/3/3 delete mode 100644 fixture/15/3/30 delete mode 100644 fixture/15/3/31 delete mode 100644 fixture/15/3/32 delete mode 100644 fixture/15/3/33 delete mode 100644 fixture/15/3/34 delete mode 100644 fixture/15/3/35 delete mode 100644 fixture/15/3/36 delete mode 100644 fixture/15/3/37 delete mode 100644 fixture/15/3/38 delete mode 100644 fixture/15/3/39 delete mode 100644 fixture/15/3/4 delete mode 100644 fixture/15/3/40 delete mode 100644 fixture/15/3/41 delete mode 100644 fixture/15/3/42 delete mode 100644 fixture/15/3/43 delete mode 100644 fixture/15/3/44 delete mode 100644 fixture/15/3/45 delete mode 100644 fixture/15/3/46 delete mode 100644 fixture/15/3/47 delete mode 100644 fixture/15/3/48 delete mode 100644 fixture/15/3/49 delete mode 100644 fixture/15/3/5 delete mode 100644 fixture/15/3/50 delete mode 100644 fixture/15/3/51 delete mode 100644 fixture/15/3/52 delete mode 100644 fixture/15/3/53 delete mode 100644 fixture/15/3/54 delete mode 100644 fixture/15/3/55 delete mode 100644 fixture/15/3/6 delete mode 100644 fixture/15/3/7 delete mode 100644 fixture/15/3/8 delete mode 100644 fixture/15/3/9 delete mode 100644 fixture/15/4/.zarray delete mode 100644 fixture/15/4/.zattrs delete mode 100644 fixture/15/4/0 delete mode 100644 fixture/15/4/1 delete mode 100644 fixture/15/4/10 delete mode 100644 fixture/15/4/11 delete mode 100644 fixture/15/4/12 delete mode 100644 fixture/15/4/13 delete mode 100644 fixture/15/4/14 delete mode 100644 fixture/15/4/15 delete mode 100644 fixture/15/4/16 delete mode 100644 fixture/15/4/17 delete mode 100644 fixture/15/4/18 delete mode 100644 fixture/15/4/19 delete mode 100644 fixture/15/4/2 delete mode 100644 fixture/15/4/20 delete mode 100644 fixture/15/4/21 delete mode 100644 fixture/15/4/22 delete mode 100644 fixture/15/4/23 delete mode 100644 fixture/15/4/24 delete mode 100644 fixture/15/4/25 delete mode 100644 fixture/15/4/26 delete mode 100644 fixture/15/4/27 delete mode 100644 fixture/15/4/28 delete mode 100644 fixture/15/4/29 delete mode 100644 fixture/15/4/3 delete mode 100644 fixture/15/4/30 delete mode 100644 fixture/15/4/31 delete mode 100644 fixture/15/4/32 delete mode 100644 fixture/15/4/33 delete mode 100644 fixture/15/4/34 delete mode 100644 fixture/15/4/35 delete mode 100644 fixture/15/4/36 delete mode 100644 fixture/15/4/37 delete mode 100644 fixture/15/4/38 delete mode 100644 fixture/15/4/39 delete mode 100644 fixture/15/4/4 delete mode 100644 fixture/15/4/40 delete mode 100644 fixture/15/4/41 delete mode 100644 fixture/15/4/42 delete mode 100644 fixture/15/4/43 delete mode 100644 fixture/15/4/44 delete mode 100644 fixture/15/4/45 delete mode 100644 fixture/15/4/46 delete mode 100644 fixture/15/4/47 delete mode 100644 fixture/15/4/48 delete mode 100644 fixture/15/4/49 delete mode 100644 fixture/15/4/5 delete mode 100644 fixture/15/4/50 delete mode 100644 fixture/15/4/51 delete mode 100644 fixture/15/4/52 delete mode 100644 fixture/15/4/53 delete mode 100644 fixture/15/4/54 delete mode 100644 fixture/15/4/55 delete mode 100644 fixture/15/4/6 delete mode 100644 fixture/15/4/7 delete mode 100644 fixture/15/4/8 delete mode 100644 fixture/15/4/9 delete mode 100644 fixture/15/5/.zarray delete mode 100644 fixture/15/5/.zattrs delete mode 100644 fixture/15/5/0 delete mode 100644 fixture/15/5/1 delete mode 100644 fixture/15/5/10 delete mode 100644 fixture/15/5/11 delete mode 100644 fixture/15/5/12 delete mode 100644 fixture/15/5/13 delete mode 100644 fixture/15/5/14 delete mode 100644 fixture/15/5/15 delete mode 100644 fixture/15/5/16 delete mode 100644 fixture/15/5/17 delete mode 100644 fixture/15/5/18 delete mode 100644 fixture/15/5/19 delete mode 100644 fixture/15/5/2 delete mode 100644 fixture/15/5/20 delete mode 100644 fixture/15/5/21 delete mode 100644 fixture/15/5/22 delete mode 100644 fixture/15/5/23 delete mode 100644 fixture/15/5/24 delete mode 100644 fixture/15/5/25 delete mode 100644 fixture/15/5/26 delete mode 100644 fixture/15/5/27 delete mode 100644 fixture/15/5/28 delete mode 100644 fixture/15/5/29 delete mode 100644 fixture/15/5/3 delete mode 100644 fixture/15/5/30 delete mode 100644 fixture/15/5/31 delete mode 100644 fixture/15/5/32 delete mode 100644 fixture/15/5/33 delete mode 100644 fixture/15/5/34 delete mode 100644 fixture/15/5/35 delete mode 100644 fixture/15/5/36 delete mode 100644 fixture/15/5/37 delete mode 100644 fixture/15/5/38 delete mode 100644 fixture/15/5/39 delete mode 100644 fixture/15/5/4 delete mode 100644 fixture/15/5/40 delete mode 100644 fixture/15/5/41 delete mode 100644 fixture/15/5/42 delete mode 100644 fixture/15/5/43 delete mode 100644 fixture/15/5/44 delete mode 100644 fixture/15/5/45 delete mode 100644 fixture/15/5/46 delete mode 100644 fixture/15/5/47 delete mode 100644 fixture/15/5/48 delete mode 100644 fixture/15/5/49 delete mode 100644 fixture/15/5/5 delete mode 100644 fixture/15/5/50 delete mode 100644 fixture/15/5/51 delete mode 100644 fixture/15/5/52 delete mode 100644 fixture/15/5/53 delete mode 100644 fixture/15/5/54 delete mode 100644 fixture/15/5/55 delete mode 100644 fixture/15/5/6 delete mode 100644 fixture/15/5/7 delete mode 100644 fixture/15/5/8 delete mode 100644 fixture/15/5/9 delete mode 100644 fixture/15/6/.zarray delete mode 100644 fixture/15/6/.zattrs delete mode 100644 fixture/15/6/0 delete mode 100644 fixture/15/6/1 delete mode 100644 fixture/15/6/10 delete mode 100644 fixture/15/6/11 delete mode 100644 fixture/15/6/12 delete mode 100644 fixture/15/6/13 delete mode 100644 fixture/15/6/14 delete mode 100644 fixture/15/6/15 delete mode 100644 fixture/15/6/16 delete mode 100644 fixture/15/6/17 delete mode 100644 fixture/15/6/18 delete mode 100644 fixture/15/6/19 delete mode 100644 fixture/15/6/2 delete mode 100644 fixture/15/6/20 delete mode 100644 fixture/15/6/21 delete mode 100644 fixture/15/6/22 delete mode 100644 fixture/15/6/23 delete mode 100644 fixture/15/6/24 delete mode 100644 fixture/15/6/25 delete mode 100644 fixture/15/6/26 delete mode 100644 fixture/15/6/27 delete mode 100644 fixture/15/6/28 delete mode 100644 fixture/15/6/29 delete mode 100644 fixture/15/6/3 delete mode 100644 fixture/15/6/30 delete mode 100644 fixture/15/6/31 delete mode 100644 fixture/15/6/32 delete mode 100644 fixture/15/6/33 delete mode 100644 fixture/15/6/34 delete mode 100644 fixture/15/6/35 delete mode 100644 fixture/15/6/36 delete mode 100644 fixture/15/6/37 delete mode 100644 fixture/15/6/38 delete mode 100644 fixture/15/6/39 delete mode 100644 fixture/15/6/4 delete mode 100644 fixture/15/6/40 delete mode 100644 fixture/15/6/41 delete mode 100644 fixture/15/6/42 delete mode 100644 fixture/15/6/43 delete mode 100644 fixture/15/6/44 delete mode 100644 fixture/15/6/45 delete mode 100644 fixture/15/6/46 delete mode 100644 fixture/15/6/47 delete mode 100644 fixture/15/6/48 delete mode 100644 fixture/15/6/49 delete mode 100644 fixture/15/6/5 delete mode 100644 fixture/15/6/50 delete mode 100644 fixture/15/6/51 delete mode 100644 fixture/15/6/52 delete mode 100644 fixture/15/6/53 delete mode 100644 fixture/15/6/54 delete mode 100644 fixture/15/6/55 delete mode 100644 fixture/15/6/6 delete mode 100644 fixture/15/6/7 delete mode 100644 fixture/15/6/8 delete mode 100644 fixture/15/6/9 delete mode 100644 fixture/16/.zattrs delete mode 100644 fixture/16/0/.zarray delete mode 100644 fixture/16/0/.zattrs delete mode 100644 fixture/16/0/0 delete mode 100644 fixture/16/0/1 delete mode 100644 fixture/16/0/10 delete mode 100644 fixture/16/0/11 delete mode 100644 fixture/16/0/12 delete mode 100644 fixture/16/0/13 delete mode 100644 fixture/16/0/14 delete mode 100644 fixture/16/0/15 delete mode 100644 fixture/16/0/16 delete mode 100644 fixture/16/0/17 delete mode 100644 fixture/16/0/18 delete mode 100644 fixture/16/0/19 delete mode 100644 fixture/16/0/2 delete mode 100644 fixture/16/0/20 delete mode 100644 fixture/16/0/21 delete mode 100644 fixture/16/0/22 delete mode 100644 fixture/16/0/23 delete mode 100644 fixture/16/0/24 delete mode 100644 fixture/16/0/25 delete mode 100644 fixture/16/0/26 delete mode 100644 fixture/16/0/27 delete mode 100644 fixture/16/0/28 delete mode 100644 fixture/16/0/29 delete mode 100644 fixture/16/0/3 delete mode 100644 fixture/16/0/30 delete mode 100644 fixture/16/0/31 delete mode 100644 fixture/16/0/32 delete mode 100644 fixture/16/0/33 delete mode 100644 fixture/16/0/34 delete mode 100644 fixture/16/0/35 delete mode 100644 fixture/16/0/36 delete mode 100644 fixture/16/0/37 delete mode 100644 fixture/16/0/38 delete mode 100644 fixture/16/0/39 delete mode 100644 fixture/16/0/4 delete mode 100644 fixture/16/0/40 delete mode 100644 fixture/16/0/41 delete mode 100644 fixture/16/0/42 delete mode 100644 fixture/16/0/43 delete mode 100644 fixture/16/0/44 delete mode 100644 fixture/16/0/45 delete mode 100644 fixture/16/0/46 delete mode 100644 fixture/16/0/47 delete mode 100644 fixture/16/0/48 delete mode 100644 fixture/16/0/49 delete mode 100644 fixture/16/0/5 delete mode 100644 fixture/16/0/50 delete mode 100644 fixture/16/0/51 delete mode 100644 fixture/16/0/52 delete mode 100644 fixture/16/0/53 delete mode 100644 fixture/16/0/54 delete mode 100644 fixture/16/0/55 delete mode 100644 fixture/16/0/6 delete mode 100644 fixture/16/0/7 delete mode 100644 fixture/16/0/8 delete mode 100644 fixture/16/0/9 delete mode 100644 fixture/16/1/.zarray delete mode 100644 fixture/16/1/.zattrs delete mode 100644 fixture/16/1/0 delete mode 100644 fixture/16/1/1 delete mode 100644 fixture/16/1/10 delete mode 100644 fixture/16/1/11 delete mode 100644 fixture/16/1/12 delete mode 100644 fixture/16/1/13 delete mode 100644 fixture/16/1/14 delete mode 100644 fixture/16/1/15 delete mode 100644 fixture/16/1/16 delete mode 100644 fixture/16/1/17 delete mode 100644 fixture/16/1/18 delete mode 100644 fixture/16/1/19 delete mode 100644 fixture/16/1/2 delete mode 100644 fixture/16/1/20 delete mode 100644 fixture/16/1/21 delete mode 100644 fixture/16/1/22 delete mode 100644 fixture/16/1/23 delete mode 100644 fixture/16/1/24 delete mode 100644 fixture/16/1/25 delete mode 100644 fixture/16/1/26 delete mode 100644 fixture/16/1/27 delete mode 100644 fixture/16/1/28 delete mode 100644 fixture/16/1/29 delete mode 100644 fixture/16/1/3 delete mode 100644 fixture/16/1/30 delete mode 100644 fixture/16/1/31 delete mode 100644 fixture/16/1/32 delete mode 100644 fixture/16/1/33 delete mode 100644 fixture/16/1/34 delete mode 100644 fixture/16/1/35 delete mode 100644 fixture/16/1/36 delete mode 100644 fixture/16/1/37 delete mode 100644 fixture/16/1/38 delete mode 100644 fixture/16/1/39 delete mode 100644 fixture/16/1/4 delete mode 100644 fixture/16/1/40 delete mode 100644 fixture/16/1/41 delete mode 100644 fixture/16/1/42 delete mode 100644 fixture/16/1/43 delete mode 100644 fixture/16/1/44 delete mode 100644 fixture/16/1/45 delete mode 100644 fixture/16/1/46 delete mode 100644 fixture/16/1/47 delete mode 100644 fixture/16/1/48 delete mode 100644 fixture/16/1/49 delete mode 100644 fixture/16/1/5 delete mode 100644 fixture/16/1/50 delete mode 100644 fixture/16/1/51 delete mode 100644 fixture/16/1/52 delete mode 100644 fixture/16/1/53 delete mode 100644 fixture/16/1/54 delete mode 100644 fixture/16/1/55 delete mode 100644 fixture/16/1/6 delete mode 100644 fixture/16/1/7 delete mode 100644 fixture/16/1/8 delete mode 100644 fixture/16/1/9 delete mode 100644 fixture/16/2/.zarray delete mode 100644 fixture/16/2/.zattrs delete mode 100644 fixture/16/2/0 delete mode 100644 fixture/16/2/1 delete mode 100644 fixture/16/2/10 delete mode 100644 fixture/16/2/11 delete mode 100644 fixture/16/2/12 delete mode 100644 fixture/16/2/13 delete mode 100644 fixture/16/2/14 delete mode 100644 fixture/16/2/15 delete mode 100644 fixture/16/2/16 delete mode 100644 fixture/16/2/17 delete mode 100644 fixture/16/2/18 delete mode 100644 fixture/16/2/19 delete mode 100644 fixture/16/2/2 delete mode 100644 fixture/16/2/20 delete mode 100644 fixture/16/2/21 delete mode 100644 fixture/16/2/22 delete mode 100644 fixture/16/2/23 delete mode 100644 fixture/16/2/24 delete mode 100644 fixture/16/2/25 delete mode 100644 fixture/16/2/26 delete mode 100644 fixture/16/2/27 delete mode 100644 fixture/16/2/28 delete mode 100644 fixture/16/2/29 delete mode 100644 fixture/16/2/3 delete mode 100644 fixture/16/2/30 delete mode 100644 fixture/16/2/31 delete mode 100644 fixture/16/2/32 delete mode 100644 fixture/16/2/33 delete mode 100644 fixture/16/2/34 delete mode 100644 fixture/16/2/35 delete mode 100644 fixture/16/2/36 delete mode 100644 fixture/16/2/37 delete mode 100644 fixture/16/2/38 delete mode 100644 fixture/16/2/39 delete mode 100644 fixture/16/2/4 delete mode 100644 fixture/16/2/40 delete mode 100644 fixture/16/2/41 delete mode 100644 fixture/16/2/42 delete mode 100644 fixture/16/2/43 delete mode 100644 fixture/16/2/44 delete mode 100644 fixture/16/2/45 delete mode 100644 fixture/16/2/46 delete mode 100644 fixture/16/2/47 delete mode 100644 fixture/16/2/48 delete mode 100644 fixture/16/2/49 delete mode 100644 fixture/16/2/5 delete mode 100644 fixture/16/2/50 delete mode 100644 fixture/16/2/51 delete mode 100644 fixture/16/2/52 delete mode 100644 fixture/16/2/53 delete mode 100644 fixture/16/2/54 delete mode 100644 fixture/16/2/55 delete mode 100644 fixture/16/2/6 delete mode 100644 fixture/16/2/7 delete mode 100644 fixture/16/2/8 delete mode 100644 fixture/16/2/9 delete mode 100644 fixture/16/3/.zarray delete mode 100644 fixture/16/3/.zattrs delete mode 100644 fixture/16/3/0 delete mode 100644 fixture/16/3/1 delete mode 100644 fixture/16/3/10 delete mode 100644 fixture/16/3/11 delete mode 100644 fixture/16/3/12 delete mode 100644 fixture/16/3/13 delete mode 100644 fixture/16/3/14 delete mode 100644 fixture/16/3/15 delete mode 100644 fixture/16/3/16 delete mode 100644 fixture/16/3/17 delete mode 100644 fixture/16/3/18 delete mode 100644 fixture/16/3/19 delete mode 100644 fixture/16/3/2 delete mode 100644 fixture/16/3/20 delete mode 100644 fixture/16/3/21 delete mode 100644 fixture/16/3/22 delete mode 100644 fixture/16/3/23 delete mode 100644 fixture/16/3/24 delete mode 100644 fixture/16/3/25 delete mode 100644 fixture/16/3/26 delete mode 100644 fixture/16/3/27 delete mode 100644 fixture/16/3/28 delete mode 100644 fixture/16/3/29 delete mode 100644 fixture/16/3/3 delete mode 100644 fixture/16/3/30 delete mode 100644 fixture/16/3/31 delete mode 100644 fixture/16/3/32 delete mode 100644 fixture/16/3/33 delete mode 100644 fixture/16/3/34 delete mode 100644 fixture/16/3/35 delete mode 100644 fixture/16/3/36 delete mode 100644 fixture/16/3/37 delete mode 100644 fixture/16/3/38 delete mode 100644 fixture/16/3/39 delete mode 100644 fixture/16/3/4 delete mode 100644 fixture/16/3/40 delete mode 100644 fixture/16/3/41 delete mode 100644 fixture/16/3/42 delete mode 100644 fixture/16/3/43 delete mode 100644 fixture/16/3/44 delete mode 100644 fixture/16/3/45 delete mode 100644 fixture/16/3/46 delete mode 100644 fixture/16/3/47 delete mode 100644 fixture/16/3/48 delete mode 100644 fixture/16/3/49 delete mode 100644 fixture/16/3/5 delete mode 100644 fixture/16/3/50 delete mode 100644 fixture/16/3/51 delete mode 100644 fixture/16/3/52 delete mode 100644 fixture/16/3/53 delete mode 100644 fixture/16/3/54 delete mode 100644 fixture/16/3/55 delete mode 100644 fixture/16/3/6 delete mode 100644 fixture/16/3/7 delete mode 100644 fixture/16/3/8 delete mode 100644 fixture/16/3/9 delete mode 100644 fixture/16/4/.zarray delete mode 100644 fixture/16/4/.zattrs delete mode 100644 fixture/16/4/0 delete mode 100644 fixture/16/4/1 delete mode 100644 fixture/16/4/10 delete mode 100644 fixture/16/4/11 delete mode 100644 fixture/16/4/12 delete mode 100644 fixture/16/4/13 delete mode 100644 fixture/16/4/14 delete mode 100644 fixture/16/4/15 delete mode 100644 fixture/16/4/16 delete mode 100644 fixture/16/4/17 delete mode 100644 fixture/16/4/18 delete mode 100644 fixture/16/4/19 delete mode 100644 fixture/16/4/2 delete mode 100644 fixture/16/4/20 delete mode 100644 fixture/16/4/21 delete mode 100644 fixture/16/4/22 delete mode 100644 fixture/16/4/23 delete mode 100644 fixture/16/4/24 delete mode 100644 fixture/16/4/25 delete mode 100644 fixture/16/4/26 delete mode 100644 fixture/16/4/27 delete mode 100644 fixture/16/4/28 delete mode 100644 fixture/16/4/29 delete mode 100644 fixture/16/4/3 delete mode 100644 fixture/16/4/30 delete mode 100644 fixture/16/4/31 delete mode 100644 fixture/16/4/32 delete mode 100644 fixture/16/4/33 delete mode 100644 fixture/16/4/34 delete mode 100644 fixture/16/4/35 delete mode 100644 fixture/16/4/36 delete mode 100644 fixture/16/4/37 delete mode 100644 fixture/16/4/38 delete mode 100644 fixture/16/4/39 delete mode 100644 fixture/16/4/4 delete mode 100644 fixture/16/4/40 delete mode 100644 fixture/16/4/41 delete mode 100644 fixture/16/4/42 delete mode 100644 fixture/16/4/43 delete mode 100644 fixture/16/4/44 delete mode 100644 fixture/16/4/45 delete mode 100644 fixture/16/4/46 delete mode 100644 fixture/16/4/47 delete mode 100644 fixture/16/4/48 delete mode 100644 fixture/16/4/49 delete mode 100644 fixture/16/4/5 delete mode 100644 fixture/16/4/50 delete mode 100644 fixture/16/4/51 delete mode 100644 fixture/16/4/52 delete mode 100644 fixture/16/4/53 delete mode 100644 fixture/16/4/54 delete mode 100644 fixture/16/4/55 delete mode 100644 fixture/16/4/6 delete mode 100644 fixture/16/4/7 delete mode 100644 fixture/16/4/8 delete mode 100644 fixture/16/4/9 delete mode 100644 fixture/16/5/.zarray delete mode 100644 fixture/16/5/.zattrs delete mode 100644 fixture/16/5/0 delete mode 100644 fixture/16/5/1 delete mode 100644 fixture/16/5/10 delete mode 100644 fixture/16/5/11 delete mode 100644 fixture/16/5/12 delete mode 100644 fixture/16/5/13 delete mode 100644 fixture/16/5/14 delete mode 100644 fixture/16/5/15 delete mode 100644 fixture/16/5/16 delete mode 100644 fixture/16/5/17 delete mode 100644 fixture/16/5/18 delete mode 100644 fixture/16/5/19 delete mode 100644 fixture/16/5/2 delete mode 100644 fixture/16/5/20 delete mode 100644 fixture/16/5/21 delete mode 100644 fixture/16/5/22 delete mode 100644 fixture/16/5/23 delete mode 100644 fixture/16/5/24 delete mode 100644 fixture/16/5/25 delete mode 100644 fixture/16/5/26 delete mode 100644 fixture/16/5/27 delete mode 100644 fixture/16/5/28 delete mode 100644 fixture/16/5/29 delete mode 100644 fixture/16/5/3 delete mode 100644 fixture/16/5/30 delete mode 100644 fixture/16/5/31 delete mode 100644 fixture/16/5/32 delete mode 100644 fixture/16/5/33 delete mode 100644 fixture/16/5/34 delete mode 100644 fixture/16/5/35 delete mode 100644 fixture/16/5/36 delete mode 100644 fixture/16/5/37 delete mode 100644 fixture/16/5/38 delete mode 100644 fixture/16/5/39 delete mode 100644 fixture/16/5/4 delete mode 100644 fixture/16/5/40 delete mode 100644 fixture/16/5/41 delete mode 100644 fixture/16/5/42 delete mode 100644 fixture/16/5/43 delete mode 100644 fixture/16/5/44 delete mode 100644 fixture/16/5/45 delete mode 100644 fixture/16/5/46 delete mode 100644 fixture/16/5/47 delete mode 100644 fixture/16/5/48 delete mode 100644 fixture/16/5/49 delete mode 100644 fixture/16/5/5 delete mode 100644 fixture/16/5/50 delete mode 100644 fixture/16/5/51 delete mode 100644 fixture/16/5/52 delete mode 100644 fixture/16/5/53 delete mode 100644 fixture/16/5/54 delete mode 100644 fixture/16/5/55 delete mode 100644 fixture/16/5/6 delete mode 100644 fixture/16/5/7 delete mode 100644 fixture/16/5/8 delete mode 100644 fixture/16/5/9 delete mode 100644 fixture/16/6/.zarray delete mode 100644 fixture/16/6/.zattrs delete mode 100644 fixture/16/6/0 delete mode 100644 fixture/16/6/1 delete mode 100644 fixture/16/6/10 delete mode 100644 fixture/16/6/11 delete mode 100644 fixture/16/6/12 delete mode 100644 fixture/16/6/13 delete mode 100644 fixture/16/6/14 delete mode 100644 fixture/16/6/15 delete mode 100644 fixture/16/6/16 delete mode 100644 fixture/16/6/17 delete mode 100644 fixture/16/6/18 delete mode 100644 fixture/16/6/19 delete mode 100644 fixture/16/6/2 delete mode 100644 fixture/16/6/20 delete mode 100644 fixture/16/6/21 delete mode 100644 fixture/16/6/22 delete mode 100644 fixture/16/6/23 delete mode 100644 fixture/16/6/24 delete mode 100644 fixture/16/6/25 delete mode 100644 fixture/16/6/26 delete mode 100644 fixture/16/6/27 delete mode 100644 fixture/16/6/28 delete mode 100644 fixture/16/6/29 delete mode 100644 fixture/16/6/3 delete mode 100644 fixture/16/6/30 delete mode 100644 fixture/16/6/31 delete mode 100644 fixture/16/6/32 delete mode 100644 fixture/16/6/33 delete mode 100644 fixture/16/6/34 delete mode 100644 fixture/16/6/35 delete mode 100644 fixture/16/6/36 delete mode 100644 fixture/16/6/37 delete mode 100644 fixture/16/6/38 delete mode 100644 fixture/16/6/39 delete mode 100644 fixture/16/6/4 delete mode 100644 fixture/16/6/40 delete mode 100644 fixture/16/6/41 delete mode 100644 fixture/16/6/42 delete mode 100644 fixture/16/6/43 delete mode 100644 fixture/16/6/44 delete mode 100644 fixture/16/6/45 delete mode 100644 fixture/16/6/46 delete mode 100644 fixture/16/6/47 delete mode 100644 fixture/16/6/48 delete mode 100644 fixture/16/6/49 delete mode 100644 fixture/16/6/5 delete mode 100644 fixture/16/6/50 delete mode 100644 fixture/16/6/51 delete mode 100644 fixture/16/6/52 delete mode 100644 fixture/16/6/53 delete mode 100644 fixture/16/6/54 delete mode 100644 fixture/16/6/55 delete mode 100644 fixture/16/6/6 delete mode 100644 fixture/16/6/7 delete mode 100644 fixture/16/6/8 delete mode 100644 fixture/16/6/9 delete mode 100644 fixture/17/.zattrs delete mode 100644 fixture/17/0/.zarray delete mode 100644 fixture/17/0/.zattrs delete mode 100644 fixture/17/0/0 delete mode 100644 fixture/17/0/1 delete mode 100644 fixture/17/0/10 delete mode 100644 fixture/17/0/11 delete mode 100644 fixture/17/0/12 delete mode 100644 fixture/17/0/13 delete mode 100644 fixture/17/0/14 delete mode 100644 fixture/17/0/15 delete mode 100644 fixture/17/0/16 delete mode 100644 fixture/17/0/17 delete mode 100644 fixture/17/0/18 delete mode 100644 fixture/17/0/19 delete mode 100644 fixture/17/0/2 delete mode 100644 fixture/17/0/20 delete mode 100644 fixture/17/0/21 delete mode 100644 fixture/17/0/22 delete mode 100644 fixture/17/0/23 delete mode 100644 fixture/17/0/24 delete mode 100644 fixture/17/0/25 delete mode 100644 fixture/17/0/26 delete mode 100644 fixture/17/0/27 delete mode 100644 fixture/17/0/28 delete mode 100644 fixture/17/0/29 delete mode 100644 fixture/17/0/3 delete mode 100644 fixture/17/0/30 delete mode 100644 fixture/17/0/31 delete mode 100644 fixture/17/0/32 delete mode 100644 fixture/17/0/33 delete mode 100644 fixture/17/0/34 delete mode 100644 fixture/17/0/35 delete mode 100644 fixture/17/0/36 delete mode 100644 fixture/17/0/37 delete mode 100644 fixture/17/0/38 delete mode 100644 fixture/17/0/39 delete mode 100644 fixture/17/0/4 delete mode 100644 fixture/17/0/40 delete mode 100644 fixture/17/0/41 delete mode 100644 fixture/17/0/42 delete mode 100644 fixture/17/0/43 delete mode 100644 fixture/17/0/44 delete mode 100644 fixture/17/0/45 delete mode 100644 fixture/17/0/46 delete mode 100644 fixture/17/0/47 delete mode 100644 fixture/17/0/48 delete mode 100644 fixture/17/0/49 delete mode 100644 fixture/17/0/5 delete mode 100644 fixture/17/0/50 delete mode 100644 fixture/17/0/51 delete mode 100644 fixture/17/0/52 delete mode 100644 fixture/17/0/53 delete mode 100644 fixture/17/0/54 delete mode 100644 fixture/17/0/55 delete mode 100644 fixture/17/0/6 delete mode 100644 fixture/17/0/7 delete mode 100644 fixture/17/0/8 delete mode 100644 fixture/17/0/9 delete mode 100644 fixture/17/1/.zarray delete mode 100644 fixture/17/1/.zattrs delete mode 100644 fixture/17/1/0 delete mode 100644 fixture/17/1/1 delete mode 100644 fixture/17/1/10 delete mode 100644 fixture/17/1/11 delete mode 100644 fixture/17/1/12 delete mode 100644 fixture/17/1/13 delete mode 100644 fixture/17/1/14 delete mode 100644 fixture/17/1/15 delete mode 100644 fixture/17/1/16 delete mode 100644 fixture/17/1/17 delete mode 100644 fixture/17/1/18 delete mode 100644 fixture/17/1/19 delete mode 100644 fixture/17/1/2 delete mode 100644 fixture/17/1/20 delete mode 100644 fixture/17/1/21 delete mode 100644 fixture/17/1/22 delete mode 100644 fixture/17/1/23 delete mode 100644 fixture/17/1/24 delete mode 100644 fixture/17/1/25 delete mode 100644 fixture/17/1/26 delete mode 100644 fixture/17/1/27 delete mode 100644 fixture/17/1/28 delete mode 100644 fixture/17/1/29 delete mode 100644 fixture/17/1/3 delete mode 100644 fixture/17/1/30 delete mode 100644 fixture/17/1/31 delete mode 100644 fixture/17/1/32 delete mode 100644 fixture/17/1/33 delete mode 100644 fixture/17/1/34 delete mode 100644 fixture/17/1/35 delete mode 100644 fixture/17/1/36 delete mode 100644 fixture/17/1/37 delete mode 100644 fixture/17/1/38 delete mode 100644 fixture/17/1/39 delete mode 100644 fixture/17/1/4 delete mode 100644 fixture/17/1/40 delete mode 100644 fixture/17/1/41 delete mode 100644 fixture/17/1/42 delete mode 100644 fixture/17/1/43 delete mode 100644 fixture/17/1/44 delete mode 100644 fixture/17/1/45 delete mode 100644 fixture/17/1/46 delete mode 100644 fixture/17/1/47 delete mode 100644 fixture/17/1/48 delete mode 100644 fixture/17/1/49 delete mode 100644 fixture/17/1/5 delete mode 100644 fixture/17/1/50 delete mode 100644 fixture/17/1/51 delete mode 100644 fixture/17/1/52 delete mode 100644 fixture/17/1/53 delete mode 100644 fixture/17/1/54 delete mode 100644 fixture/17/1/55 delete mode 100644 fixture/17/1/6 delete mode 100644 fixture/17/1/7 delete mode 100644 fixture/17/1/8 delete mode 100644 fixture/17/1/9 delete mode 100644 fixture/17/2/.zarray delete mode 100644 fixture/17/2/.zattrs delete mode 100644 fixture/17/2/0 delete mode 100644 fixture/17/2/1 delete mode 100644 fixture/17/2/10 delete mode 100644 fixture/17/2/11 delete mode 100644 fixture/17/2/12 delete mode 100644 fixture/17/2/13 delete mode 100644 fixture/17/2/14 delete mode 100644 fixture/17/2/15 delete mode 100644 fixture/17/2/16 delete mode 100644 fixture/17/2/17 delete mode 100644 fixture/17/2/18 delete mode 100644 fixture/17/2/19 delete mode 100644 fixture/17/2/2 delete mode 100644 fixture/17/2/20 delete mode 100644 fixture/17/2/21 delete mode 100644 fixture/17/2/22 delete mode 100644 fixture/17/2/23 delete mode 100644 fixture/17/2/24 delete mode 100644 fixture/17/2/25 delete mode 100644 fixture/17/2/26 delete mode 100644 fixture/17/2/27 delete mode 100644 fixture/17/2/28 delete mode 100644 fixture/17/2/29 delete mode 100644 fixture/17/2/3 delete mode 100644 fixture/17/2/30 delete mode 100644 fixture/17/2/31 delete mode 100644 fixture/17/2/32 delete mode 100644 fixture/17/2/33 delete mode 100644 fixture/17/2/34 delete mode 100644 fixture/17/2/35 delete mode 100644 fixture/17/2/36 delete mode 100644 fixture/17/2/37 delete mode 100644 fixture/17/2/38 delete mode 100644 fixture/17/2/39 delete mode 100644 fixture/17/2/4 delete mode 100644 fixture/17/2/40 delete mode 100644 fixture/17/2/41 delete mode 100644 fixture/17/2/42 delete mode 100644 fixture/17/2/43 delete mode 100644 fixture/17/2/44 delete mode 100644 fixture/17/2/45 delete mode 100644 fixture/17/2/46 delete mode 100644 fixture/17/2/47 delete mode 100644 fixture/17/2/48 delete mode 100644 fixture/17/2/49 delete mode 100644 fixture/17/2/5 delete mode 100644 fixture/17/2/50 delete mode 100644 fixture/17/2/51 delete mode 100644 fixture/17/2/52 delete mode 100644 fixture/17/2/53 delete mode 100644 fixture/17/2/54 delete mode 100644 fixture/17/2/55 delete mode 100644 fixture/17/2/6 delete mode 100644 fixture/17/2/7 delete mode 100644 fixture/17/2/8 delete mode 100644 fixture/17/2/9 delete mode 100644 fixture/17/3/.zarray delete mode 100644 fixture/17/3/.zattrs delete mode 100644 fixture/17/3/0 delete mode 100644 fixture/17/3/1 delete mode 100644 fixture/17/3/10 delete mode 100644 fixture/17/3/11 delete mode 100644 fixture/17/3/12 delete mode 100644 fixture/17/3/13 delete mode 100644 fixture/17/3/14 delete mode 100644 fixture/17/3/15 delete mode 100644 fixture/17/3/16 delete mode 100644 fixture/17/3/17 delete mode 100644 fixture/17/3/18 delete mode 100644 fixture/17/3/19 delete mode 100644 fixture/17/3/2 delete mode 100644 fixture/17/3/20 delete mode 100644 fixture/17/3/21 delete mode 100644 fixture/17/3/22 delete mode 100644 fixture/17/3/23 delete mode 100644 fixture/17/3/24 delete mode 100644 fixture/17/3/25 delete mode 100644 fixture/17/3/26 delete mode 100644 fixture/17/3/27 delete mode 100644 fixture/17/3/28 delete mode 100644 fixture/17/3/29 delete mode 100644 fixture/17/3/3 delete mode 100644 fixture/17/3/30 delete mode 100644 fixture/17/3/31 delete mode 100644 fixture/17/3/32 delete mode 100644 fixture/17/3/33 delete mode 100644 fixture/17/3/34 delete mode 100644 fixture/17/3/35 delete mode 100644 fixture/17/3/36 delete mode 100644 fixture/17/3/37 delete mode 100644 fixture/17/3/38 delete mode 100644 fixture/17/3/39 delete mode 100644 fixture/17/3/4 delete mode 100644 fixture/17/3/40 delete mode 100644 fixture/17/3/41 delete mode 100644 fixture/17/3/42 delete mode 100644 fixture/17/3/43 delete mode 100644 fixture/17/3/44 delete mode 100644 fixture/17/3/45 delete mode 100644 fixture/17/3/46 delete mode 100644 fixture/17/3/47 delete mode 100644 fixture/17/3/48 delete mode 100644 fixture/17/3/49 delete mode 100644 fixture/17/3/5 delete mode 100644 fixture/17/3/50 delete mode 100644 fixture/17/3/51 delete mode 100644 fixture/17/3/52 delete mode 100644 fixture/17/3/53 delete mode 100644 fixture/17/3/54 delete mode 100644 fixture/17/3/55 delete mode 100644 fixture/17/3/6 delete mode 100644 fixture/17/3/7 delete mode 100644 fixture/17/3/8 delete mode 100644 fixture/17/3/9 delete mode 100644 fixture/17/4/.zarray delete mode 100644 fixture/17/4/.zattrs delete mode 100644 fixture/17/4/0 delete mode 100644 fixture/17/4/1 delete mode 100644 fixture/17/4/10 delete mode 100644 fixture/17/4/11 delete mode 100644 fixture/17/4/12 delete mode 100644 fixture/17/4/13 delete mode 100644 fixture/17/4/14 delete mode 100644 fixture/17/4/15 delete mode 100644 fixture/17/4/16 delete mode 100644 fixture/17/4/17 delete mode 100644 fixture/17/4/18 delete mode 100644 fixture/17/4/19 delete mode 100644 fixture/17/4/2 delete mode 100644 fixture/17/4/20 delete mode 100644 fixture/17/4/21 delete mode 100644 fixture/17/4/22 delete mode 100644 fixture/17/4/23 delete mode 100644 fixture/17/4/24 delete mode 100644 fixture/17/4/25 delete mode 100644 fixture/17/4/26 delete mode 100644 fixture/17/4/27 delete mode 100644 fixture/17/4/28 delete mode 100644 fixture/17/4/29 delete mode 100644 fixture/17/4/3 delete mode 100644 fixture/17/4/30 delete mode 100644 fixture/17/4/31 delete mode 100644 fixture/17/4/32 delete mode 100644 fixture/17/4/33 delete mode 100644 fixture/17/4/34 delete mode 100644 fixture/17/4/35 delete mode 100644 fixture/17/4/36 delete mode 100644 fixture/17/4/37 delete mode 100644 fixture/17/4/38 delete mode 100644 fixture/17/4/39 delete mode 100644 fixture/17/4/4 delete mode 100644 fixture/17/4/40 delete mode 100644 fixture/17/4/41 delete mode 100644 fixture/17/4/42 delete mode 100644 fixture/17/4/43 delete mode 100644 fixture/17/4/44 delete mode 100644 fixture/17/4/45 delete mode 100644 fixture/17/4/46 delete mode 100644 fixture/17/4/47 delete mode 100644 fixture/17/4/48 delete mode 100644 fixture/17/4/49 delete mode 100644 fixture/17/4/5 delete mode 100644 fixture/17/4/50 delete mode 100644 fixture/17/4/51 delete mode 100644 fixture/17/4/52 delete mode 100644 fixture/17/4/53 delete mode 100644 fixture/17/4/54 delete mode 100644 fixture/17/4/55 delete mode 100644 fixture/17/4/6 delete mode 100644 fixture/17/4/7 delete mode 100644 fixture/17/4/8 delete mode 100644 fixture/17/4/9 delete mode 100644 fixture/17/5/.zarray delete mode 100644 fixture/17/5/.zattrs delete mode 100644 fixture/17/5/0 delete mode 100644 fixture/17/5/1 delete mode 100644 fixture/17/5/10 delete mode 100644 fixture/17/5/11 delete mode 100644 fixture/17/5/12 delete mode 100644 fixture/17/5/13 delete mode 100644 fixture/17/5/14 delete mode 100644 fixture/17/5/15 delete mode 100644 fixture/17/5/16 delete mode 100644 fixture/17/5/17 delete mode 100644 fixture/17/5/18 delete mode 100644 fixture/17/5/19 delete mode 100644 fixture/17/5/2 delete mode 100644 fixture/17/5/20 delete mode 100644 fixture/17/5/21 delete mode 100644 fixture/17/5/22 delete mode 100644 fixture/17/5/23 delete mode 100644 fixture/17/5/24 delete mode 100644 fixture/17/5/25 delete mode 100644 fixture/17/5/26 delete mode 100644 fixture/17/5/27 delete mode 100644 fixture/17/5/28 delete mode 100644 fixture/17/5/29 delete mode 100644 fixture/17/5/3 delete mode 100644 fixture/17/5/30 delete mode 100644 fixture/17/5/31 delete mode 100644 fixture/17/5/32 delete mode 100644 fixture/17/5/33 delete mode 100644 fixture/17/5/34 delete mode 100644 fixture/17/5/35 delete mode 100644 fixture/17/5/36 delete mode 100644 fixture/17/5/37 delete mode 100644 fixture/17/5/38 delete mode 100644 fixture/17/5/39 delete mode 100644 fixture/17/5/4 delete mode 100644 fixture/17/5/40 delete mode 100644 fixture/17/5/41 delete mode 100644 fixture/17/5/42 delete mode 100644 fixture/17/5/43 delete mode 100644 fixture/17/5/44 delete mode 100644 fixture/17/5/45 delete mode 100644 fixture/17/5/46 delete mode 100644 fixture/17/5/47 delete mode 100644 fixture/17/5/48 delete mode 100644 fixture/17/5/49 delete mode 100644 fixture/17/5/5 delete mode 100644 fixture/17/5/50 delete mode 100644 fixture/17/5/51 delete mode 100644 fixture/17/5/52 delete mode 100644 fixture/17/5/53 delete mode 100644 fixture/17/5/54 delete mode 100644 fixture/17/5/55 delete mode 100644 fixture/17/5/6 delete mode 100644 fixture/17/5/7 delete mode 100644 fixture/17/5/8 delete mode 100644 fixture/17/5/9 delete mode 100644 fixture/17/6/.zarray delete mode 100644 fixture/17/6/.zattrs delete mode 100644 fixture/17/6/0 delete mode 100644 fixture/17/6/1 delete mode 100644 fixture/17/6/10 delete mode 100644 fixture/17/6/11 delete mode 100644 fixture/17/6/12 delete mode 100644 fixture/17/6/13 delete mode 100644 fixture/17/6/14 delete mode 100644 fixture/17/6/15 delete mode 100644 fixture/17/6/16 delete mode 100644 fixture/17/6/17 delete mode 100644 fixture/17/6/18 delete mode 100644 fixture/17/6/19 delete mode 100644 fixture/17/6/2 delete mode 100644 fixture/17/6/20 delete mode 100644 fixture/17/6/21 delete mode 100644 fixture/17/6/22 delete mode 100644 fixture/17/6/23 delete mode 100644 fixture/17/6/24 delete mode 100644 fixture/17/6/25 delete mode 100644 fixture/17/6/26 delete mode 100644 fixture/17/6/27 delete mode 100644 fixture/17/6/28 delete mode 100644 fixture/17/6/29 delete mode 100644 fixture/17/6/3 delete mode 100644 fixture/17/6/30 delete mode 100644 fixture/17/6/31 delete mode 100644 fixture/17/6/32 delete mode 100644 fixture/17/6/33 delete mode 100644 fixture/17/6/34 delete mode 100644 fixture/17/6/35 delete mode 100644 fixture/17/6/36 delete mode 100644 fixture/17/6/37 delete mode 100644 fixture/17/6/38 delete mode 100644 fixture/17/6/39 delete mode 100644 fixture/17/6/4 delete mode 100644 fixture/17/6/40 delete mode 100644 fixture/17/6/41 delete mode 100644 fixture/17/6/42 delete mode 100644 fixture/17/6/43 delete mode 100644 fixture/17/6/44 delete mode 100644 fixture/17/6/45 delete mode 100644 fixture/17/6/46 delete mode 100644 fixture/17/6/47 delete mode 100644 fixture/17/6/48 delete mode 100644 fixture/17/6/49 delete mode 100644 fixture/17/6/5 delete mode 100644 fixture/17/6/50 delete mode 100644 fixture/17/6/51 delete mode 100644 fixture/17/6/52 delete mode 100644 fixture/17/6/53 delete mode 100644 fixture/17/6/54 delete mode 100644 fixture/17/6/55 delete mode 100644 fixture/17/6/6 delete mode 100644 fixture/17/6/7 delete mode 100644 fixture/17/6/8 delete mode 100644 fixture/17/6/9 delete mode 100644 fixture/18/.zattrs delete mode 100644 fixture/18/0/.zarray delete mode 100644 fixture/18/0/.zattrs delete mode 100644 fixture/18/0/0.0 delete mode 100644 fixture/18/0/0.1 delete mode 100644 fixture/18/0/0.2 delete mode 100644 fixture/18/0/0.3 delete mode 100644 fixture/18/0/1.0 delete mode 100644 fixture/18/0/1.1 delete mode 100644 fixture/18/0/1.2 delete mode 100644 fixture/18/0/1.3 delete mode 100644 fixture/18/0/10.0 delete mode 100644 fixture/18/0/10.1 delete mode 100644 fixture/18/0/10.2 delete mode 100644 fixture/18/0/10.3 delete mode 100644 fixture/18/0/11.0 delete mode 100644 fixture/18/0/11.1 delete mode 100644 fixture/18/0/11.2 delete mode 100644 fixture/18/0/11.3 delete mode 100644 fixture/18/0/12.0 delete mode 100644 fixture/18/0/12.1 delete mode 100644 fixture/18/0/12.2 delete mode 100644 fixture/18/0/12.3 delete mode 100644 fixture/18/0/13.0 delete mode 100644 fixture/18/0/13.1 delete mode 100644 fixture/18/0/13.2 delete mode 100644 fixture/18/0/13.3 delete mode 100644 fixture/18/0/14.0 delete mode 100644 fixture/18/0/14.1 delete mode 100644 fixture/18/0/14.2 delete mode 100644 fixture/18/0/14.3 delete mode 100644 fixture/18/0/15.0 delete mode 100644 fixture/18/0/15.1 delete mode 100644 fixture/18/0/15.2 delete mode 100644 fixture/18/0/15.3 delete mode 100644 fixture/18/0/16.0 delete mode 100644 fixture/18/0/16.1 delete mode 100644 fixture/18/0/16.2 delete mode 100644 fixture/18/0/16.3 delete mode 100644 fixture/18/0/17.0 delete mode 100644 fixture/18/0/17.1 delete mode 100644 fixture/18/0/17.2 delete mode 100644 fixture/18/0/17.3 delete mode 100644 fixture/18/0/18.0 delete mode 100644 fixture/18/0/18.1 delete mode 100644 fixture/18/0/18.2 delete mode 100644 fixture/18/0/18.3 delete mode 100644 fixture/18/0/19.0 delete mode 100644 fixture/18/0/19.1 delete mode 100644 fixture/18/0/19.2 delete mode 100644 fixture/18/0/19.3 delete mode 100644 fixture/18/0/2.0 delete mode 100644 fixture/18/0/2.1 delete mode 100644 fixture/18/0/2.2 delete mode 100644 fixture/18/0/2.3 delete mode 100644 fixture/18/0/3.0 delete mode 100644 fixture/18/0/3.1 delete mode 100644 fixture/18/0/3.2 delete mode 100644 fixture/18/0/3.3 delete mode 100644 fixture/18/0/4.0 delete mode 100644 fixture/18/0/4.1 delete mode 100644 fixture/18/0/4.2 delete mode 100644 fixture/18/0/4.3 delete mode 100644 fixture/18/0/5.0 delete mode 100644 fixture/18/0/5.1 delete mode 100644 fixture/18/0/5.2 delete mode 100644 fixture/18/0/5.3 delete mode 100644 fixture/18/0/6.0 delete mode 100644 fixture/18/0/6.1 delete mode 100644 fixture/18/0/6.2 delete mode 100644 fixture/18/0/6.3 delete mode 100644 fixture/18/0/7.0 delete mode 100644 fixture/18/0/7.1 delete mode 100644 fixture/18/0/7.2 delete mode 100644 fixture/18/0/7.3 delete mode 100644 fixture/18/0/8.0 delete mode 100644 fixture/18/0/8.1 delete mode 100644 fixture/18/0/8.2 delete mode 100644 fixture/18/0/8.3 delete mode 100644 fixture/18/0/9.0 delete mode 100644 fixture/18/0/9.1 delete mode 100644 fixture/18/0/9.2 delete mode 100644 fixture/18/0/9.3 delete mode 100644 fixture/18/1/.zarray delete mode 100644 fixture/18/1/.zattrs delete mode 100644 fixture/18/1/0.0 delete mode 100644 fixture/18/1/0.1 delete mode 100644 fixture/18/1/0.2 delete mode 100644 fixture/18/1/0.3 delete mode 100644 fixture/18/1/1.0 delete mode 100644 fixture/18/1/1.1 delete mode 100644 fixture/18/1/1.2 delete mode 100644 fixture/18/1/1.3 delete mode 100644 fixture/18/1/10.0 delete mode 100644 fixture/18/1/10.1 delete mode 100644 fixture/18/1/10.2 delete mode 100644 fixture/18/1/10.3 delete mode 100644 fixture/18/1/11.0 delete mode 100644 fixture/18/1/11.1 delete mode 100644 fixture/18/1/11.2 delete mode 100644 fixture/18/1/11.3 delete mode 100644 fixture/18/1/12.0 delete mode 100644 fixture/18/1/12.1 delete mode 100644 fixture/18/1/12.2 delete mode 100644 fixture/18/1/12.3 delete mode 100644 fixture/18/1/13.0 delete mode 100644 fixture/18/1/13.1 delete mode 100644 fixture/18/1/13.2 delete mode 100644 fixture/18/1/13.3 delete mode 100644 fixture/18/1/14.0 delete mode 100644 fixture/18/1/14.1 delete mode 100644 fixture/18/1/14.2 delete mode 100644 fixture/18/1/14.3 delete mode 100644 fixture/18/1/15.0 delete mode 100644 fixture/18/1/15.1 delete mode 100644 fixture/18/1/15.2 delete mode 100644 fixture/18/1/15.3 delete mode 100644 fixture/18/1/16.0 delete mode 100644 fixture/18/1/16.1 delete mode 100644 fixture/18/1/16.2 delete mode 100644 fixture/18/1/16.3 delete mode 100644 fixture/18/1/17.0 delete mode 100644 fixture/18/1/17.1 delete mode 100644 fixture/18/1/17.2 delete mode 100644 fixture/18/1/17.3 delete mode 100644 fixture/18/1/18.0 delete mode 100644 fixture/18/1/18.1 delete mode 100644 fixture/18/1/18.2 delete mode 100644 fixture/18/1/18.3 delete mode 100644 fixture/18/1/19.0 delete mode 100644 fixture/18/1/19.1 delete mode 100644 fixture/18/1/19.2 delete mode 100644 fixture/18/1/19.3 delete mode 100644 fixture/18/1/2.0 delete mode 100644 fixture/18/1/2.1 delete mode 100644 fixture/18/1/2.2 delete mode 100644 fixture/18/1/2.3 delete mode 100644 fixture/18/1/3.0 delete mode 100644 fixture/18/1/3.1 delete mode 100644 fixture/18/1/3.2 delete mode 100644 fixture/18/1/3.3 delete mode 100644 fixture/18/1/4.0 delete mode 100644 fixture/18/1/4.1 delete mode 100644 fixture/18/1/4.2 delete mode 100644 fixture/18/1/4.3 delete mode 100644 fixture/18/1/5.0 delete mode 100644 fixture/18/1/5.1 delete mode 100644 fixture/18/1/5.2 delete mode 100644 fixture/18/1/5.3 delete mode 100644 fixture/18/1/6.0 delete mode 100644 fixture/18/1/6.1 delete mode 100644 fixture/18/1/6.2 delete mode 100644 fixture/18/1/6.3 delete mode 100644 fixture/18/1/7.0 delete mode 100644 fixture/18/1/7.1 delete mode 100644 fixture/18/1/7.2 delete mode 100644 fixture/18/1/7.3 delete mode 100644 fixture/18/1/8.0 delete mode 100644 fixture/18/1/8.1 delete mode 100644 fixture/18/1/8.2 delete mode 100644 fixture/18/1/8.3 delete mode 100644 fixture/18/1/9.0 delete mode 100644 fixture/18/1/9.1 delete mode 100644 fixture/18/1/9.2 delete mode 100644 fixture/18/1/9.3 delete mode 100644 fixture/18/2/.zarray delete mode 100644 fixture/18/2/.zattrs delete mode 100644 fixture/18/2/0.0 delete mode 100644 fixture/18/2/0.1 delete mode 100644 fixture/18/2/0.2 delete mode 100644 fixture/18/2/0.3 delete mode 100644 fixture/18/2/1.0 delete mode 100644 fixture/18/2/1.1 delete mode 100644 fixture/18/2/1.2 delete mode 100644 fixture/18/2/1.3 delete mode 100644 fixture/18/2/10.0 delete mode 100644 fixture/18/2/10.1 delete mode 100644 fixture/18/2/10.2 delete mode 100644 fixture/18/2/10.3 delete mode 100644 fixture/18/2/11.0 delete mode 100644 fixture/18/2/11.1 delete mode 100644 fixture/18/2/11.2 delete mode 100644 fixture/18/2/11.3 delete mode 100644 fixture/18/2/12.0 delete mode 100644 fixture/18/2/12.1 delete mode 100644 fixture/18/2/12.2 delete mode 100644 fixture/18/2/12.3 delete mode 100644 fixture/18/2/13.0 delete mode 100644 fixture/18/2/13.1 delete mode 100644 fixture/18/2/13.2 delete mode 100644 fixture/18/2/13.3 delete mode 100644 fixture/18/2/14.0 delete mode 100644 fixture/18/2/14.1 delete mode 100644 fixture/18/2/14.2 delete mode 100644 fixture/18/2/14.3 delete mode 100644 fixture/18/2/15.0 delete mode 100644 fixture/18/2/15.1 delete mode 100644 fixture/18/2/15.2 delete mode 100644 fixture/18/2/15.3 delete mode 100644 fixture/18/2/16.0 delete mode 100644 fixture/18/2/16.1 delete mode 100644 fixture/18/2/16.2 delete mode 100644 fixture/18/2/16.3 delete mode 100644 fixture/18/2/17.0 delete mode 100644 fixture/18/2/17.1 delete mode 100644 fixture/18/2/17.2 delete mode 100644 fixture/18/2/17.3 delete mode 100644 fixture/18/2/18.0 delete mode 100644 fixture/18/2/18.1 delete mode 100644 fixture/18/2/18.2 delete mode 100644 fixture/18/2/18.3 delete mode 100644 fixture/18/2/19.0 delete mode 100644 fixture/18/2/19.1 delete mode 100644 fixture/18/2/19.2 delete mode 100644 fixture/18/2/19.3 delete mode 100644 fixture/18/2/2.0 delete mode 100644 fixture/18/2/2.1 delete mode 100644 fixture/18/2/2.2 delete mode 100644 fixture/18/2/2.3 delete mode 100644 fixture/18/2/3.0 delete mode 100644 fixture/18/2/3.1 delete mode 100644 fixture/18/2/3.2 delete mode 100644 fixture/18/2/3.3 delete mode 100644 fixture/18/2/4.0 delete mode 100644 fixture/18/2/4.1 delete mode 100644 fixture/18/2/4.2 delete mode 100644 fixture/18/2/4.3 delete mode 100644 fixture/18/2/5.0 delete mode 100644 fixture/18/2/5.1 delete mode 100644 fixture/18/2/5.2 delete mode 100644 fixture/18/2/5.3 delete mode 100644 fixture/18/2/6.0 delete mode 100644 fixture/18/2/6.1 delete mode 100644 fixture/18/2/6.2 delete mode 100644 fixture/18/2/6.3 delete mode 100644 fixture/18/2/7.0 delete mode 100644 fixture/18/2/7.1 delete mode 100644 fixture/18/2/7.2 delete mode 100644 fixture/18/2/7.3 delete mode 100644 fixture/18/2/8.0 delete mode 100644 fixture/18/2/8.1 delete mode 100644 fixture/18/2/8.2 delete mode 100644 fixture/18/2/8.3 delete mode 100644 fixture/18/2/9.0 delete mode 100644 fixture/18/2/9.1 delete mode 100644 fixture/18/2/9.2 delete mode 100644 fixture/18/2/9.3 delete mode 100644 fixture/18/3/.zarray delete mode 100644 fixture/18/3/.zattrs delete mode 100644 fixture/18/3/0.0 delete mode 100644 fixture/18/3/0.1 delete mode 100644 fixture/18/3/0.2 delete mode 100644 fixture/18/3/0.3 delete mode 100644 fixture/18/3/1.0 delete mode 100644 fixture/18/3/1.1 delete mode 100644 fixture/18/3/1.2 delete mode 100644 fixture/18/3/1.3 delete mode 100644 fixture/18/3/10.0 delete mode 100644 fixture/18/3/10.1 delete mode 100644 fixture/18/3/10.2 delete mode 100644 fixture/18/3/10.3 delete mode 100644 fixture/18/3/11.0 delete mode 100644 fixture/18/3/11.1 delete mode 100644 fixture/18/3/11.2 delete mode 100644 fixture/18/3/11.3 delete mode 100644 fixture/18/3/12.0 delete mode 100644 fixture/18/3/12.1 delete mode 100644 fixture/18/3/12.2 delete mode 100644 fixture/18/3/12.3 delete mode 100644 fixture/18/3/13.0 delete mode 100644 fixture/18/3/13.1 delete mode 100644 fixture/18/3/13.2 delete mode 100644 fixture/18/3/13.3 delete mode 100644 fixture/18/3/14.0 delete mode 100644 fixture/18/3/14.1 delete mode 100644 fixture/18/3/14.2 delete mode 100644 fixture/18/3/14.3 delete mode 100644 fixture/18/3/15.0 delete mode 100644 fixture/18/3/15.1 delete mode 100644 fixture/18/3/15.2 delete mode 100644 fixture/18/3/15.3 delete mode 100644 fixture/18/3/16.0 delete mode 100644 fixture/18/3/16.1 delete mode 100644 fixture/18/3/16.2 delete mode 100644 fixture/18/3/16.3 delete mode 100644 fixture/18/3/17.0 delete mode 100644 fixture/18/3/17.1 delete mode 100644 fixture/18/3/17.2 delete mode 100644 fixture/18/3/17.3 delete mode 100644 fixture/18/3/18.0 delete mode 100644 fixture/18/3/18.1 delete mode 100644 fixture/18/3/18.2 delete mode 100644 fixture/18/3/18.3 delete mode 100644 fixture/18/3/19.0 delete mode 100644 fixture/18/3/19.1 delete mode 100644 fixture/18/3/19.2 delete mode 100644 fixture/18/3/19.3 delete mode 100644 fixture/18/3/2.0 delete mode 100644 fixture/18/3/2.1 delete mode 100644 fixture/18/3/2.2 delete mode 100644 fixture/18/3/2.3 delete mode 100644 fixture/18/3/3.0 delete mode 100644 fixture/18/3/3.1 delete mode 100644 fixture/18/3/3.2 delete mode 100644 fixture/18/3/3.3 delete mode 100644 fixture/18/3/4.0 delete mode 100644 fixture/18/3/4.1 delete mode 100644 fixture/18/3/4.2 delete mode 100644 fixture/18/3/4.3 delete mode 100644 fixture/18/3/5.0 delete mode 100644 fixture/18/3/5.1 delete mode 100644 fixture/18/3/5.2 delete mode 100644 fixture/18/3/5.3 delete mode 100644 fixture/18/3/6.0 delete mode 100644 fixture/18/3/6.1 delete mode 100644 fixture/18/3/6.2 delete mode 100644 fixture/18/3/6.3 delete mode 100644 fixture/18/3/7.0 delete mode 100644 fixture/18/3/7.1 delete mode 100644 fixture/18/3/7.2 delete mode 100644 fixture/18/3/7.3 delete mode 100644 fixture/18/3/8.0 delete mode 100644 fixture/18/3/8.1 delete mode 100644 fixture/18/3/8.2 delete mode 100644 fixture/18/3/8.3 delete mode 100644 fixture/18/3/9.0 delete mode 100644 fixture/18/3/9.1 delete mode 100644 fixture/18/3/9.2 delete mode 100644 fixture/18/3/9.3 delete mode 100644 fixture/18/4/.zarray delete mode 100644 fixture/18/4/.zattrs delete mode 100644 fixture/18/4/0.0 delete mode 100644 fixture/18/4/0.1 delete mode 100644 fixture/18/4/0.2 delete mode 100644 fixture/18/4/0.3 delete mode 100644 fixture/18/4/1.0 delete mode 100644 fixture/18/4/1.1 delete mode 100644 fixture/18/4/1.2 delete mode 100644 fixture/18/4/1.3 delete mode 100644 fixture/18/4/10.0 delete mode 100644 fixture/18/4/10.1 delete mode 100644 fixture/18/4/10.2 delete mode 100644 fixture/18/4/10.3 delete mode 100644 fixture/18/4/11.0 delete mode 100644 fixture/18/4/11.1 delete mode 100644 fixture/18/4/11.2 delete mode 100644 fixture/18/4/11.3 delete mode 100644 fixture/18/4/12.0 delete mode 100644 fixture/18/4/12.1 delete mode 100644 fixture/18/4/12.2 delete mode 100644 fixture/18/4/12.3 delete mode 100644 fixture/18/4/13.0 delete mode 100644 fixture/18/4/13.1 delete mode 100644 fixture/18/4/13.2 delete mode 100644 fixture/18/4/13.3 delete mode 100644 fixture/18/4/14.0 delete mode 100644 fixture/18/4/14.1 delete mode 100644 fixture/18/4/14.2 delete mode 100644 fixture/18/4/14.3 delete mode 100644 fixture/18/4/15.0 delete mode 100644 fixture/18/4/15.1 delete mode 100644 fixture/18/4/15.2 delete mode 100644 fixture/18/4/15.3 delete mode 100644 fixture/18/4/16.0 delete mode 100644 fixture/18/4/16.1 delete mode 100644 fixture/18/4/16.2 delete mode 100644 fixture/18/4/16.3 delete mode 100644 fixture/18/4/17.0 delete mode 100644 fixture/18/4/17.1 delete mode 100644 fixture/18/4/17.2 delete mode 100644 fixture/18/4/17.3 delete mode 100644 fixture/18/4/18.0 delete mode 100644 fixture/18/4/18.1 delete mode 100644 fixture/18/4/18.2 delete mode 100644 fixture/18/4/18.3 delete mode 100644 fixture/18/4/19.0 delete mode 100644 fixture/18/4/19.1 delete mode 100644 fixture/18/4/19.2 delete mode 100644 fixture/18/4/19.3 delete mode 100644 fixture/18/4/2.0 delete mode 100644 fixture/18/4/2.1 delete mode 100644 fixture/18/4/2.2 delete mode 100644 fixture/18/4/2.3 delete mode 100644 fixture/18/4/3.0 delete mode 100644 fixture/18/4/3.1 delete mode 100644 fixture/18/4/3.2 delete mode 100644 fixture/18/4/3.3 delete mode 100644 fixture/18/4/4.0 delete mode 100644 fixture/18/4/4.1 delete mode 100644 fixture/18/4/4.2 delete mode 100644 fixture/18/4/4.3 delete mode 100644 fixture/18/4/5.0 delete mode 100644 fixture/18/4/5.1 delete mode 100644 fixture/18/4/5.2 delete mode 100644 fixture/18/4/5.3 delete mode 100644 fixture/18/4/6.0 delete mode 100644 fixture/18/4/6.1 delete mode 100644 fixture/18/4/6.2 delete mode 100644 fixture/18/4/6.3 delete mode 100644 fixture/18/4/7.0 delete mode 100644 fixture/18/4/7.1 delete mode 100644 fixture/18/4/7.2 delete mode 100644 fixture/18/4/7.3 delete mode 100644 fixture/18/4/8.0 delete mode 100644 fixture/18/4/8.1 delete mode 100644 fixture/18/4/8.2 delete mode 100644 fixture/18/4/8.3 delete mode 100644 fixture/18/4/9.0 delete mode 100644 fixture/18/4/9.1 delete mode 100644 fixture/18/4/9.2 delete mode 100644 fixture/18/4/9.3 delete mode 100644 fixture/18/5/.zarray delete mode 100644 fixture/18/5/.zattrs delete mode 100644 fixture/18/5/0.0 delete mode 100644 fixture/18/5/0.1 delete mode 100644 fixture/18/5/0.2 delete mode 100644 fixture/18/5/0.3 delete mode 100644 fixture/18/5/1.0 delete mode 100644 fixture/18/5/1.1 delete mode 100644 fixture/18/5/1.2 delete mode 100644 fixture/18/5/1.3 delete mode 100644 fixture/18/5/10.0 delete mode 100644 fixture/18/5/10.1 delete mode 100644 fixture/18/5/10.2 delete mode 100644 fixture/18/5/10.3 delete mode 100644 fixture/18/5/11.0 delete mode 100644 fixture/18/5/11.1 delete mode 100644 fixture/18/5/11.2 delete mode 100644 fixture/18/5/11.3 delete mode 100644 fixture/18/5/12.0 delete mode 100644 fixture/18/5/12.1 delete mode 100644 fixture/18/5/12.2 delete mode 100644 fixture/18/5/12.3 delete mode 100644 fixture/18/5/13.0 delete mode 100644 fixture/18/5/13.1 delete mode 100644 fixture/18/5/13.2 delete mode 100644 fixture/18/5/13.3 delete mode 100644 fixture/18/5/14.0 delete mode 100644 fixture/18/5/14.1 delete mode 100644 fixture/18/5/14.2 delete mode 100644 fixture/18/5/14.3 delete mode 100644 fixture/18/5/15.0 delete mode 100644 fixture/18/5/15.1 delete mode 100644 fixture/18/5/15.2 delete mode 100644 fixture/18/5/15.3 delete mode 100644 fixture/18/5/16.0 delete mode 100644 fixture/18/5/16.1 delete mode 100644 fixture/18/5/16.2 delete mode 100644 fixture/18/5/16.3 delete mode 100644 fixture/18/5/17.0 delete mode 100644 fixture/18/5/17.1 delete mode 100644 fixture/18/5/17.2 delete mode 100644 fixture/18/5/17.3 delete mode 100644 fixture/18/5/18.0 delete mode 100644 fixture/18/5/18.1 delete mode 100644 fixture/18/5/18.2 delete mode 100644 fixture/18/5/18.3 delete mode 100644 fixture/18/5/19.0 delete mode 100644 fixture/18/5/19.1 delete mode 100644 fixture/18/5/19.2 delete mode 100644 fixture/18/5/19.3 delete mode 100644 fixture/18/5/2.0 delete mode 100644 fixture/18/5/2.1 delete mode 100644 fixture/18/5/2.2 delete mode 100644 fixture/18/5/2.3 delete mode 100644 fixture/18/5/3.0 delete mode 100644 fixture/18/5/3.1 delete mode 100644 fixture/18/5/3.2 delete mode 100644 fixture/18/5/3.3 delete mode 100644 fixture/18/5/4.0 delete mode 100644 fixture/18/5/4.1 delete mode 100644 fixture/18/5/4.2 delete mode 100644 fixture/18/5/4.3 delete mode 100644 fixture/18/5/5.0 delete mode 100644 fixture/18/5/5.1 delete mode 100644 fixture/18/5/5.2 delete mode 100644 fixture/18/5/5.3 delete mode 100644 fixture/18/5/6.0 delete mode 100644 fixture/18/5/6.1 delete mode 100644 fixture/18/5/6.2 delete mode 100644 fixture/18/5/6.3 delete mode 100644 fixture/18/5/7.0 delete mode 100644 fixture/18/5/7.1 delete mode 100644 fixture/18/5/7.2 delete mode 100644 fixture/18/5/7.3 delete mode 100644 fixture/18/5/8.0 delete mode 100644 fixture/18/5/8.1 delete mode 100644 fixture/18/5/8.2 delete mode 100644 fixture/18/5/8.3 delete mode 100644 fixture/18/5/9.0 delete mode 100644 fixture/18/5/9.1 delete mode 100644 fixture/18/5/9.2 delete mode 100644 fixture/18/5/9.3 delete mode 100644 fixture/18/6/.zarray delete mode 100644 fixture/18/6/.zattrs delete mode 100644 fixture/18/6/0.0 delete mode 100644 fixture/18/6/0.1 delete mode 100644 fixture/18/6/0.2 delete mode 100644 fixture/18/6/0.3 delete mode 100644 fixture/18/6/1.0 delete mode 100644 fixture/18/6/1.1 delete mode 100644 fixture/18/6/1.2 delete mode 100644 fixture/18/6/1.3 delete mode 100644 fixture/18/6/10.0 delete mode 100644 fixture/18/6/10.1 delete mode 100644 fixture/18/6/10.2 delete mode 100644 fixture/18/6/10.3 delete mode 100644 fixture/18/6/11.0 delete mode 100644 fixture/18/6/11.1 delete mode 100644 fixture/18/6/11.2 delete mode 100644 fixture/18/6/11.3 delete mode 100644 fixture/18/6/12.0 delete mode 100644 fixture/18/6/12.1 delete mode 100644 fixture/18/6/12.2 delete mode 100644 fixture/18/6/12.3 delete mode 100644 fixture/18/6/13.0 delete mode 100644 fixture/18/6/13.1 delete mode 100644 fixture/18/6/13.2 delete mode 100644 fixture/18/6/13.3 delete mode 100644 fixture/18/6/14.0 delete mode 100644 fixture/18/6/14.1 delete mode 100644 fixture/18/6/14.2 delete mode 100644 fixture/18/6/14.3 delete mode 100644 fixture/18/6/15.0 delete mode 100644 fixture/18/6/15.1 delete mode 100644 fixture/18/6/15.2 delete mode 100644 fixture/18/6/15.3 delete mode 100644 fixture/18/6/16.0 delete mode 100644 fixture/18/6/16.1 delete mode 100644 fixture/18/6/16.2 delete mode 100644 fixture/18/6/16.3 delete mode 100644 fixture/18/6/17.0 delete mode 100644 fixture/18/6/17.1 delete mode 100644 fixture/18/6/17.2 delete mode 100644 fixture/18/6/17.3 delete mode 100644 fixture/18/6/18.0 delete mode 100644 fixture/18/6/18.1 delete mode 100644 fixture/18/6/18.2 delete mode 100644 fixture/18/6/18.3 delete mode 100644 fixture/18/6/19.0 delete mode 100644 fixture/18/6/19.1 delete mode 100644 fixture/18/6/19.2 delete mode 100644 fixture/18/6/19.3 delete mode 100644 fixture/18/6/2.0 delete mode 100644 fixture/18/6/2.1 delete mode 100644 fixture/18/6/2.2 delete mode 100644 fixture/18/6/2.3 delete mode 100644 fixture/18/6/3.0 delete mode 100644 fixture/18/6/3.1 delete mode 100644 fixture/18/6/3.2 delete mode 100644 fixture/18/6/3.3 delete mode 100644 fixture/18/6/4.0 delete mode 100644 fixture/18/6/4.1 delete mode 100644 fixture/18/6/4.2 delete mode 100644 fixture/18/6/4.3 delete mode 100644 fixture/18/6/5.0 delete mode 100644 fixture/18/6/5.1 delete mode 100644 fixture/18/6/5.2 delete mode 100644 fixture/18/6/5.3 delete mode 100644 fixture/18/6/6.0 delete mode 100644 fixture/18/6/6.1 delete mode 100644 fixture/18/6/6.2 delete mode 100644 fixture/18/6/6.3 delete mode 100644 fixture/18/6/7.0 delete mode 100644 fixture/18/6/7.1 delete mode 100644 fixture/18/6/7.2 delete mode 100644 fixture/18/6/7.3 delete mode 100644 fixture/18/6/8.0 delete mode 100644 fixture/18/6/8.1 delete mode 100644 fixture/18/6/8.2 delete mode 100644 fixture/18/6/8.3 delete mode 100644 fixture/18/6/9.0 delete mode 100644 fixture/18/6/9.1 delete mode 100644 fixture/18/6/9.2 delete mode 100644 fixture/18/6/9.3 delete mode 100644 fixture/19/.zattrs delete mode 100644 fixture/19/0/.zarray delete mode 100644 fixture/19/0/.zattrs delete mode 100644 fixture/19/0/0.0 delete mode 100644 fixture/19/0/0.1 delete mode 100644 fixture/19/0/0.2 delete mode 100644 fixture/19/0/0.3 delete mode 100644 fixture/19/0/1.0 delete mode 100644 fixture/19/0/1.1 delete mode 100644 fixture/19/0/1.2 delete mode 100644 fixture/19/0/1.3 delete mode 100644 fixture/19/1/.zarray delete mode 100644 fixture/19/1/.zattrs delete mode 100644 fixture/19/1/0.0 delete mode 100644 fixture/19/1/0.1 delete mode 100644 fixture/19/1/0.2 delete mode 100644 fixture/19/1/0.3 delete mode 100644 fixture/19/1/1.0 delete mode 100644 fixture/19/1/1.1 delete mode 100644 fixture/19/1/1.2 delete mode 100644 fixture/19/1/1.3 delete mode 100644 fixture/19/2/.zarray delete mode 100644 fixture/19/2/.zattrs delete mode 100644 fixture/19/2/0.0 delete mode 100644 fixture/19/2/0.1 delete mode 100644 fixture/19/2/0.2 delete mode 100644 fixture/19/2/0.3 delete mode 100644 fixture/19/2/1.0 delete mode 100644 fixture/19/2/1.1 delete mode 100644 fixture/19/2/1.2 delete mode 100644 fixture/19/2/1.3 delete mode 100644 fixture/19/3/.zarray delete mode 100644 fixture/19/3/.zattrs delete mode 100644 fixture/19/3/0.0 delete mode 100644 fixture/19/3/0.1 delete mode 100644 fixture/19/3/0.2 delete mode 100644 fixture/19/3/0.3 delete mode 100644 fixture/19/3/1.0 delete mode 100644 fixture/19/3/1.1 delete mode 100644 fixture/19/3/1.2 delete mode 100644 fixture/19/3/1.3 delete mode 100644 fixture/19/4/.zarray delete mode 100644 fixture/19/4/.zattrs delete mode 100644 fixture/19/4/0.0 delete mode 100644 fixture/19/4/0.1 delete mode 100644 fixture/19/4/0.2 delete mode 100644 fixture/19/4/0.3 delete mode 100644 fixture/19/4/1.0 delete mode 100644 fixture/19/4/1.1 delete mode 100644 fixture/19/4/1.2 delete mode 100644 fixture/19/4/1.3 delete mode 100644 fixture/19/5/.zarray delete mode 100644 fixture/19/5/.zattrs delete mode 100644 fixture/19/5/0.0 delete mode 100644 fixture/19/5/0.1 delete mode 100644 fixture/19/5/0.2 delete mode 100644 fixture/19/5/0.3 delete mode 100644 fixture/19/5/1.0 delete mode 100644 fixture/19/5/1.1 delete mode 100644 fixture/19/5/1.2 delete mode 100644 fixture/19/5/1.3 delete mode 100644 fixture/19/6/.zarray delete mode 100644 fixture/19/6/.zattrs delete mode 100644 fixture/19/6/0.0 delete mode 100644 fixture/19/6/0.1 delete mode 100644 fixture/19/6/0.2 delete mode 100644 fixture/19/6/0.3 delete mode 100644 fixture/19/6/1.0 delete mode 100644 fixture/19/6/1.1 delete mode 100644 fixture/19/6/1.2 delete mode 100644 fixture/19/6/1.3 delete mode 100644 fixture/2/.zattrs delete mode 100644 fixture/2/0/.zarray delete mode 100644 fixture/2/0/.zattrs delete mode 100644 fixture/2/0/0 delete mode 100644 fixture/2/0/1 delete mode 100644 fixture/2/0/10 delete mode 100644 fixture/2/0/11 delete mode 100644 fixture/2/0/2 delete mode 100644 fixture/2/0/3 delete mode 100644 fixture/2/0/4 delete mode 100644 fixture/2/0/5 delete mode 100644 fixture/2/0/6 delete mode 100644 fixture/2/0/7 delete mode 100644 fixture/2/0/8 delete mode 100644 fixture/2/0/9 delete mode 100644 fixture/2/1/.zarray delete mode 100644 fixture/2/1/.zattrs delete mode 100644 fixture/2/1/0 delete mode 100644 fixture/2/1/1 delete mode 100644 fixture/2/1/10 delete mode 100644 fixture/2/1/11 delete mode 100644 fixture/2/1/2 delete mode 100644 fixture/2/1/3 delete mode 100644 fixture/2/1/4 delete mode 100644 fixture/2/1/5 delete mode 100644 fixture/2/1/6 delete mode 100644 fixture/2/1/7 delete mode 100644 fixture/2/1/8 delete mode 100644 fixture/2/1/9 delete mode 100644 fixture/2/2/.zarray delete mode 100644 fixture/2/2/.zattrs delete mode 100644 fixture/2/2/0 delete mode 100644 fixture/2/2/1 delete mode 100644 fixture/2/2/10 delete mode 100644 fixture/2/2/11 delete mode 100644 fixture/2/2/2 delete mode 100644 fixture/2/2/3 delete mode 100644 fixture/2/2/4 delete mode 100644 fixture/2/2/5 delete mode 100644 fixture/2/2/6 delete mode 100644 fixture/2/2/7 delete mode 100644 fixture/2/2/8 delete mode 100644 fixture/2/2/9 delete mode 100644 fixture/2/3/.zarray delete mode 100644 fixture/2/3/.zattrs delete mode 100644 fixture/2/3/0 delete mode 100644 fixture/2/3/1 delete mode 100644 fixture/2/3/10 delete mode 100644 fixture/2/3/11 delete mode 100644 fixture/2/3/2 delete mode 100644 fixture/2/3/3 delete mode 100644 fixture/2/3/4 delete mode 100644 fixture/2/3/5 delete mode 100644 fixture/2/3/6 delete mode 100644 fixture/2/3/7 delete mode 100644 fixture/2/3/8 delete mode 100644 fixture/2/3/9 delete mode 100644 fixture/2/4/.zarray delete mode 100644 fixture/2/4/.zattrs delete mode 100644 fixture/2/4/0 delete mode 100644 fixture/2/4/1 delete mode 100644 fixture/2/4/10 delete mode 100644 fixture/2/4/11 delete mode 100644 fixture/2/4/2 delete mode 100644 fixture/2/4/3 delete mode 100644 fixture/2/4/4 delete mode 100644 fixture/2/4/5 delete mode 100644 fixture/2/4/6 delete mode 100644 fixture/2/4/7 delete mode 100644 fixture/2/4/8 delete mode 100644 fixture/2/4/9 delete mode 100644 fixture/2/5/.zarray delete mode 100644 fixture/2/5/.zattrs delete mode 100644 fixture/2/5/0 delete mode 100644 fixture/2/5/1 delete mode 100644 fixture/2/5/10 delete mode 100644 fixture/2/5/11 delete mode 100644 fixture/2/5/2 delete mode 100644 fixture/2/5/3 delete mode 100644 fixture/2/5/4 delete mode 100644 fixture/2/5/5 delete mode 100644 fixture/2/5/6 delete mode 100644 fixture/2/5/7 delete mode 100644 fixture/2/5/8 delete mode 100644 fixture/2/5/9 delete mode 100644 fixture/2/6/.zarray delete mode 100644 fixture/2/6/.zattrs delete mode 100644 fixture/2/6/0 delete mode 100644 fixture/2/6/1 delete mode 100644 fixture/2/6/10 delete mode 100644 fixture/2/6/11 delete mode 100644 fixture/2/6/2 delete mode 100644 fixture/2/6/3 delete mode 100644 fixture/2/6/4 delete mode 100644 fixture/2/6/5 delete mode 100644 fixture/2/6/6 delete mode 100644 fixture/2/6/7 delete mode 100644 fixture/2/6/8 delete mode 100644 fixture/2/6/9 delete mode 100644 fixture/20/.zattrs delete mode 100644 fixture/20/0/.zarray delete mode 100644 fixture/20/0/.zattrs delete mode 100644 fixture/20/0/0.0.0 delete mode 100644 fixture/20/0/0.0.1 delete mode 100644 fixture/20/0/0.0.2 delete mode 100644 fixture/20/0/0.0.3 delete mode 100644 fixture/20/0/0.1.0 delete mode 100644 fixture/20/0/0.1.1 delete mode 100644 fixture/20/0/0.1.2 delete mode 100644 fixture/20/0/0.1.3 delete mode 100644 fixture/20/0/0.2.0 delete mode 100644 fixture/20/0/0.2.1 delete mode 100644 fixture/20/0/0.2.2 delete mode 100644 fixture/20/0/0.2.3 delete mode 100644 fixture/20/0/0.3.0 delete mode 100644 fixture/20/0/0.3.1 delete mode 100644 fixture/20/0/0.3.2 delete mode 100644 fixture/20/0/0.3.3 delete mode 100644 fixture/20/0/1.0.0 delete mode 100644 fixture/20/0/1.0.1 delete mode 100644 fixture/20/0/1.0.2 delete mode 100644 fixture/20/0/1.0.3 delete mode 100644 fixture/20/0/1.1.0 delete mode 100644 fixture/20/0/1.1.1 delete mode 100644 fixture/20/0/1.1.2 delete mode 100644 fixture/20/0/1.1.3 delete mode 100644 fixture/20/0/1.2.0 delete mode 100644 fixture/20/0/1.2.1 delete mode 100644 fixture/20/0/1.2.2 delete mode 100644 fixture/20/0/1.2.3 delete mode 100644 fixture/20/0/1.3.0 delete mode 100644 fixture/20/0/1.3.1 delete mode 100644 fixture/20/0/1.3.2 delete mode 100644 fixture/20/0/1.3.3 delete mode 100644 fixture/20/1/.zarray delete mode 100644 fixture/20/1/.zattrs delete mode 100644 fixture/20/1/0.0.0 delete mode 100644 fixture/20/1/0.0.1 delete mode 100644 fixture/20/1/0.0.2 delete mode 100644 fixture/20/1/0.0.3 delete mode 100644 fixture/20/1/0.1.0 delete mode 100644 fixture/20/1/0.1.1 delete mode 100644 fixture/20/1/0.1.2 delete mode 100644 fixture/20/1/0.1.3 delete mode 100644 fixture/20/1/0.2.0 delete mode 100644 fixture/20/1/0.2.1 delete mode 100644 fixture/20/1/0.2.2 delete mode 100644 fixture/20/1/0.2.3 delete mode 100644 fixture/20/1/0.3.0 delete mode 100644 fixture/20/1/0.3.1 delete mode 100644 fixture/20/1/0.3.2 delete mode 100644 fixture/20/1/0.3.3 delete mode 100644 fixture/20/1/1.0.0 delete mode 100644 fixture/20/1/1.0.1 delete mode 100644 fixture/20/1/1.0.2 delete mode 100644 fixture/20/1/1.0.3 delete mode 100644 fixture/20/1/1.1.0 delete mode 100644 fixture/20/1/1.1.1 delete mode 100644 fixture/20/1/1.1.2 delete mode 100644 fixture/20/1/1.1.3 delete mode 100644 fixture/20/1/1.2.0 delete mode 100644 fixture/20/1/1.2.1 delete mode 100644 fixture/20/1/1.2.2 delete mode 100644 fixture/20/1/1.2.3 delete mode 100644 fixture/20/1/1.3.0 delete mode 100644 fixture/20/1/1.3.1 delete mode 100644 fixture/20/1/1.3.2 delete mode 100644 fixture/20/1/1.3.3 delete mode 100644 fixture/20/2/.zarray delete mode 100644 fixture/20/2/.zattrs delete mode 100644 fixture/20/2/0.0.0 delete mode 100644 fixture/20/2/0.0.1 delete mode 100644 fixture/20/2/0.0.2 delete mode 100644 fixture/20/2/0.0.3 delete mode 100644 fixture/20/2/0.1.0 delete mode 100644 fixture/20/2/0.1.1 delete mode 100644 fixture/20/2/0.1.2 delete mode 100644 fixture/20/2/0.1.3 delete mode 100644 fixture/20/2/0.2.0 delete mode 100644 fixture/20/2/0.2.1 delete mode 100644 fixture/20/2/0.2.2 delete mode 100644 fixture/20/2/0.2.3 delete mode 100644 fixture/20/2/0.3.0 delete mode 100644 fixture/20/2/0.3.1 delete mode 100644 fixture/20/2/0.3.2 delete mode 100644 fixture/20/2/0.3.3 delete mode 100644 fixture/20/2/1.0.0 delete mode 100644 fixture/20/2/1.0.1 delete mode 100644 fixture/20/2/1.0.2 delete mode 100644 fixture/20/2/1.0.3 delete mode 100644 fixture/20/2/1.1.0 delete mode 100644 fixture/20/2/1.1.1 delete mode 100644 fixture/20/2/1.1.2 delete mode 100644 fixture/20/2/1.1.3 delete mode 100644 fixture/20/2/1.2.0 delete mode 100644 fixture/20/2/1.2.1 delete mode 100644 fixture/20/2/1.2.2 delete mode 100644 fixture/20/2/1.2.3 delete mode 100644 fixture/20/2/1.3.0 delete mode 100644 fixture/20/2/1.3.1 delete mode 100644 fixture/20/2/1.3.2 delete mode 100644 fixture/20/2/1.3.3 delete mode 100644 fixture/20/3/.zarray delete mode 100644 fixture/20/3/.zattrs delete mode 100644 fixture/20/3/0.0.0 delete mode 100644 fixture/20/3/0.0.1 delete mode 100644 fixture/20/3/0.0.2 delete mode 100644 fixture/20/3/0.0.3 delete mode 100644 fixture/20/3/0.1.0 delete mode 100644 fixture/20/3/0.1.1 delete mode 100644 fixture/20/3/0.1.2 delete mode 100644 fixture/20/3/0.1.3 delete mode 100644 fixture/20/3/0.2.0 delete mode 100644 fixture/20/3/0.2.1 delete mode 100644 fixture/20/3/0.2.2 delete mode 100644 fixture/20/3/0.2.3 delete mode 100644 fixture/20/3/0.3.0 delete mode 100644 fixture/20/3/0.3.1 delete mode 100644 fixture/20/3/0.3.2 delete mode 100644 fixture/20/3/0.3.3 delete mode 100644 fixture/20/3/1.0.0 delete mode 100644 fixture/20/3/1.0.1 delete mode 100644 fixture/20/3/1.0.2 delete mode 100644 fixture/20/3/1.0.3 delete mode 100644 fixture/20/3/1.1.0 delete mode 100644 fixture/20/3/1.1.1 delete mode 100644 fixture/20/3/1.1.2 delete mode 100644 fixture/20/3/1.1.3 delete mode 100644 fixture/20/3/1.2.0 delete mode 100644 fixture/20/3/1.2.1 delete mode 100644 fixture/20/3/1.2.2 delete mode 100644 fixture/20/3/1.2.3 delete mode 100644 fixture/20/3/1.3.0 delete mode 100644 fixture/20/3/1.3.1 delete mode 100644 fixture/20/3/1.3.2 delete mode 100644 fixture/20/3/1.3.3 delete mode 100644 fixture/20/4/.zarray delete mode 100644 fixture/20/4/.zattrs delete mode 100644 fixture/20/4/0.0.0 delete mode 100644 fixture/20/4/0.0.1 delete mode 100644 fixture/20/4/0.0.2 delete mode 100644 fixture/20/4/0.0.3 delete mode 100644 fixture/20/4/0.1.0 delete mode 100644 fixture/20/4/0.1.1 delete mode 100644 fixture/20/4/0.1.2 delete mode 100644 fixture/20/4/0.1.3 delete mode 100644 fixture/20/4/0.2.0 delete mode 100644 fixture/20/4/0.2.1 delete mode 100644 fixture/20/4/0.2.2 delete mode 100644 fixture/20/4/0.2.3 delete mode 100644 fixture/20/4/0.3.0 delete mode 100644 fixture/20/4/0.3.1 delete mode 100644 fixture/20/4/0.3.2 delete mode 100644 fixture/20/4/0.3.3 delete mode 100644 fixture/20/4/1.0.0 delete mode 100644 fixture/20/4/1.0.1 delete mode 100644 fixture/20/4/1.0.2 delete mode 100644 fixture/20/4/1.0.3 delete mode 100644 fixture/20/4/1.1.0 delete mode 100644 fixture/20/4/1.1.1 delete mode 100644 fixture/20/4/1.1.2 delete mode 100644 fixture/20/4/1.1.3 delete mode 100644 fixture/20/4/1.2.0 delete mode 100644 fixture/20/4/1.2.1 delete mode 100644 fixture/20/4/1.2.2 delete mode 100644 fixture/20/4/1.2.3 delete mode 100644 fixture/20/4/1.3.0 delete mode 100644 fixture/20/4/1.3.1 delete mode 100644 fixture/20/4/1.3.2 delete mode 100644 fixture/20/4/1.3.3 delete mode 100644 fixture/20/5/.zarray delete mode 100644 fixture/20/5/.zattrs delete mode 100644 fixture/20/5/0.0.0 delete mode 100644 fixture/20/5/0.0.1 delete mode 100644 fixture/20/5/0.0.2 delete mode 100644 fixture/20/5/0.0.3 delete mode 100644 fixture/20/5/0.1.0 delete mode 100644 fixture/20/5/0.1.1 delete mode 100644 fixture/20/5/0.1.2 delete mode 100644 fixture/20/5/0.1.3 delete mode 100644 fixture/20/5/0.2.0 delete mode 100644 fixture/20/5/0.2.1 delete mode 100644 fixture/20/5/0.2.2 delete mode 100644 fixture/20/5/0.2.3 delete mode 100644 fixture/20/5/0.3.0 delete mode 100644 fixture/20/5/0.3.1 delete mode 100644 fixture/20/5/0.3.2 delete mode 100644 fixture/20/5/0.3.3 delete mode 100644 fixture/20/5/1.0.0 delete mode 100644 fixture/20/5/1.0.1 delete mode 100644 fixture/20/5/1.0.2 delete mode 100644 fixture/20/5/1.0.3 delete mode 100644 fixture/20/5/1.1.0 delete mode 100644 fixture/20/5/1.1.1 delete mode 100644 fixture/20/5/1.1.2 delete mode 100644 fixture/20/5/1.1.3 delete mode 100644 fixture/20/5/1.2.0 delete mode 100644 fixture/20/5/1.2.1 delete mode 100644 fixture/20/5/1.2.2 delete mode 100644 fixture/20/5/1.2.3 delete mode 100644 fixture/20/5/1.3.0 delete mode 100644 fixture/20/5/1.3.1 delete mode 100644 fixture/20/5/1.3.2 delete mode 100644 fixture/20/5/1.3.3 delete mode 100644 fixture/20/6/.zarray delete mode 100644 fixture/20/6/.zattrs delete mode 100644 fixture/20/6/0.0.0 delete mode 100644 fixture/20/6/0.0.1 delete mode 100644 fixture/20/6/0.0.2 delete mode 100644 fixture/20/6/0.0.3 delete mode 100644 fixture/20/6/0.1.0 delete mode 100644 fixture/20/6/0.1.1 delete mode 100644 fixture/20/6/0.1.2 delete mode 100644 fixture/20/6/0.1.3 delete mode 100644 fixture/20/6/0.2.0 delete mode 100644 fixture/20/6/0.2.1 delete mode 100644 fixture/20/6/0.2.2 delete mode 100644 fixture/20/6/0.2.3 delete mode 100644 fixture/20/6/0.3.0 delete mode 100644 fixture/20/6/0.3.1 delete mode 100644 fixture/20/6/0.3.2 delete mode 100644 fixture/20/6/0.3.3 delete mode 100644 fixture/20/6/1.0.0 delete mode 100644 fixture/20/6/1.0.1 delete mode 100644 fixture/20/6/1.0.2 delete mode 100644 fixture/20/6/1.0.3 delete mode 100644 fixture/20/6/1.1.0 delete mode 100644 fixture/20/6/1.1.1 delete mode 100644 fixture/20/6/1.1.2 delete mode 100644 fixture/20/6/1.1.3 delete mode 100644 fixture/20/6/1.2.0 delete mode 100644 fixture/20/6/1.2.1 delete mode 100644 fixture/20/6/1.2.2 delete mode 100644 fixture/20/6/1.2.3 delete mode 100644 fixture/20/6/1.3.0 delete mode 100644 fixture/20/6/1.3.1 delete mode 100644 fixture/20/6/1.3.2 delete mode 100644 fixture/20/6/1.3.3 delete mode 100644 fixture/21/.zattrs delete mode 100644 fixture/21/0/.zarray delete mode 100644 fixture/21/0/.zattrs delete mode 100644 fixture/21/0/0.0.0 delete mode 100644 fixture/21/0/0.0.1 delete mode 100644 fixture/21/0/0.0.2 delete mode 100644 fixture/21/0/0.0.3 delete mode 100644 fixture/21/0/0.1.0 delete mode 100644 fixture/21/0/0.1.1 delete mode 100644 fixture/21/0/0.1.2 delete mode 100644 fixture/21/0/0.1.3 delete mode 100644 fixture/21/0/0.2.0 delete mode 100644 fixture/21/0/0.2.1 delete mode 100644 fixture/21/0/0.2.2 delete mode 100644 fixture/21/0/0.2.3 delete mode 100644 fixture/21/0/0.3.0 delete mode 100644 fixture/21/0/0.3.1 delete mode 100644 fixture/21/0/0.3.2 delete mode 100644 fixture/21/0/0.3.3 delete mode 100644 fixture/21/0/1.0.0 delete mode 100644 fixture/21/0/1.0.1 delete mode 100644 fixture/21/0/1.0.2 delete mode 100644 fixture/21/0/1.0.3 delete mode 100644 fixture/21/0/1.1.0 delete mode 100644 fixture/21/0/1.1.1 delete mode 100644 fixture/21/0/1.1.2 delete mode 100644 fixture/21/0/1.1.3 delete mode 100644 fixture/21/0/1.2.0 delete mode 100644 fixture/21/0/1.2.1 delete mode 100644 fixture/21/0/1.2.2 delete mode 100644 fixture/21/0/1.2.3 delete mode 100644 fixture/21/0/1.3.0 delete mode 100644 fixture/21/0/1.3.1 delete mode 100644 fixture/21/0/1.3.2 delete mode 100644 fixture/21/0/1.3.3 delete mode 100644 fixture/21/1/.zarray delete mode 100644 fixture/21/1/.zattrs delete mode 100644 fixture/21/1/0.0.0 delete mode 100644 fixture/21/1/0.0.1 delete mode 100644 fixture/21/1/0.0.2 delete mode 100644 fixture/21/1/0.0.3 delete mode 100644 fixture/21/1/0.1.0 delete mode 100644 fixture/21/1/0.1.1 delete mode 100644 fixture/21/1/0.1.2 delete mode 100644 fixture/21/1/0.1.3 delete mode 100644 fixture/21/1/0.2.0 delete mode 100644 fixture/21/1/0.2.1 delete mode 100644 fixture/21/1/0.2.2 delete mode 100644 fixture/21/1/0.2.3 delete mode 100644 fixture/21/1/0.3.0 delete mode 100644 fixture/21/1/0.3.1 delete mode 100644 fixture/21/1/0.3.2 delete mode 100644 fixture/21/1/0.3.3 delete mode 100644 fixture/21/1/1.0.0 delete mode 100644 fixture/21/1/1.0.1 delete mode 100644 fixture/21/1/1.0.2 delete mode 100644 fixture/21/1/1.0.3 delete mode 100644 fixture/21/1/1.1.0 delete mode 100644 fixture/21/1/1.1.1 delete mode 100644 fixture/21/1/1.1.2 delete mode 100644 fixture/21/1/1.1.3 delete mode 100644 fixture/21/1/1.2.0 delete mode 100644 fixture/21/1/1.2.1 delete mode 100644 fixture/21/1/1.2.2 delete mode 100644 fixture/21/1/1.2.3 delete mode 100644 fixture/21/1/1.3.0 delete mode 100644 fixture/21/1/1.3.1 delete mode 100644 fixture/21/1/1.3.2 delete mode 100644 fixture/21/1/1.3.3 delete mode 100644 fixture/21/2/.zarray delete mode 100644 fixture/21/2/.zattrs delete mode 100644 fixture/21/2/0.0.0 delete mode 100644 fixture/21/2/0.0.1 delete mode 100644 fixture/21/2/0.0.2 delete mode 100644 fixture/21/2/0.0.3 delete mode 100644 fixture/21/2/0.1.0 delete mode 100644 fixture/21/2/0.1.1 delete mode 100644 fixture/21/2/0.1.2 delete mode 100644 fixture/21/2/0.1.3 delete mode 100644 fixture/21/2/0.2.0 delete mode 100644 fixture/21/2/0.2.1 delete mode 100644 fixture/21/2/0.2.2 delete mode 100644 fixture/21/2/0.2.3 delete mode 100644 fixture/21/2/0.3.0 delete mode 100644 fixture/21/2/0.3.1 delete mode 100644 fixture/21/2/0.3.2 delete mode 100644 fixture/21/2/0.3.3 delete mode 100644 fixture/21/2/1.0.0 delete mode 100644 fixture/21/2/1.0.1 delete mode 100644 fixture/21/2/1.0.2 delete mode 100644 fixture/21/2/1.0.3 delete mode 100644 fixture/21/2/1.1.0 delete mode 100644 fixture/21/2/1.1.1 delete mode 100644 fixture/21/2/1.1.2 delete mode 100644 fixture/21/2/1.1.3 delete mode 100644 fixture/21/2/1.2.0 delete mode 100644 fixture/21/2/1.2.1 delete mode 100644 fixture/21/2/1.2.2 delete mode 100644 fixture/21/2/1.2.3 delete mode 100644 fixture/21/2/1.3.0 delete mode 100644 fixture/21/2/1.3.1 delete mode 100644 fixture/21/2/1.3.2 delete mode 100644 fixture/21/2/1.3.3 delete mode 100644 fixture/21/3/.zarray delete mode 100644 fixture/21/3/.zattrs delete mode 100644 fixture/21/3/0.0.0 delete mode 100644 fixture/21/3/0.0.1 delete mode 100644 fixture/21/3/0.0.2 delete mode 100644 fixture/21/3/0.0.3 delete mode 100644 fixture/21/3/0.1.0 delete mode 100644 fixture/21/3/0.1.1 delete mode 100644 fixture/21/3/0.1.2 delete mode 100644 fixture/21/3/0.1.3 delete mode 100644 fixture/21/3/0.2.0 delete mode 100644 fixture/21/3/0.2.1 delete mode 100644 fixture/21/3/0.2.2 delete mode 100644 fixture/21/3/0.2.3 delete mode 100644 fixture/21/3/0.3.0 delete mode 100644 fixture/21/3/0.3.1 delete mode 100644 fixture/21/3/0.3.2 delete mode 100644 fixture/21/3/0.3.3 delete mode 100644 fixture/21/3/1.0.0 delete mode 100644 fixture/21/3/1.0.1 delete mode 100644 fixture/21/3/1.0.2 delete mode 100644 fixture/21/3/1.0.3 delete mode 100644 fixture/21/3/1.1.0 delete mode 100644 fixture/21/3/1.1.1 delete mode 100644 fixture/21/3/1.1.2 delete mode 100644 fixture/21/3/1.1.3 delete mode 100644 fixture/21/3/1.2.0 delete mode 100644 fixture/21/3/1.2.1 delete mode 100644 fixture/21/3/1.2.2 delete mode 100644 fixture/21/3/1.2.3 delete mode 100644 fixture/21/3/1.3.0 delete mode 100644 fixture/21/3/1.3.1 delete mode 100644 fixture/21/3/1.3.2 delete mode 100644 fixture/21/3/1.3.3 delete mode 100644 fixture/21/4/.zarray delete mode 100644 fixture/21/4/.zattrs delete mode 100644 fixture/21/4/0.0.0 delete mode 100644 fixture/21/4/0.0.1 delete mode 100644 fixture/21/4/0.0.2 delete mode 100644 fixture/21/4/0.0.3 delete mode 100644 fixture/21/4/0.1.0 delete mode 100644 fixture/21/4/0.1.1 delete mode 100644 fixture/21/4/0.1.2 delete mode 100644 fixture/21/4/0.1.3 delete mode 100644 fixture/21/4/0.2.0 delete mode 100644 fixture/21/4/0.2.1 delete mode 100644 fixture/21/4/0.2.2 delete mode 100644 fixture/21/4/0.2.3 delete mode 100644 fixture/21/4/0.3.0 delete mode 100644 fixture/21/4/0.3.1 delete mode 100644 fixture/21/4/0.3.2 delete mode 100644 fixture/21/4/0.3.3 delete mode 100644 fixture/21/4/1.0.0 delete mode 100644 fixture/21/4/1.0.1 delete mode 100644 fixture/21/4/1.0.2 delete mode 100644 fixture/21/4/1.0.3 delete mode 100644 fixture/21/4/1.1.0 delete mode 100644 fixture/21/4/1.1.1 delete mode 100644 fixture/21/4/1.1.2 delete mode 100644 fixture/21/4/1.1.3 delete mode 100644 fixture/21/4/1.2.0 delete mode 100644 fixture/21/4/1.2.1 delete mode 100644 fixture/21/4/1.2.2 delete mode 100644 fixture/21/4/1.2.3 delete mode 100644 fixture/21/4/1.3.0 delete mode 100644 fixture/21/4/1.3.1 delete mode 100644 fixture/21/4/1.3.2 delete mode 100644 fixture/21/4/1.3.3 delete mode 100644 fixture/21/5/.zarray delete mode 100644 fixture/21/5/.zattrs delete mode 100644 fixture/21/5/0.0.0 delete mode 100644 fixture/21/5/0.0.1 delete mode 100644 fixture/21/5/0.0.2 delete mode 100644 fixture/21/5/0.0.3 delete mode 100644 fixture/21/5/0.1.0 delete mode 100644 fixture/21/5/0.1.1 delete mode 100644 fixture/21/5/0.1.2 delete mode 100644 fixture/21/5/0.1.3 delete mode 100644 fixture/21/5/0.2.0 delete mode 100644 fixture/21/5/0.2.1 delete mode 100644 fixture/21/5/0.2.2 delete mode 100644 fixture/21/5/0.2.3 delete mode 100644 fixture/21/5/0.3.0 delete mode 100644 fixture/21/5/0.3.1 delete mode 100644 fixture/21/5/0.3.2 delete mode 100644 fixture/21/5/0.3.3 delete mode 100644 fixture/21/5/1.0.0 delete mode 100644 fixture/21/5/1.0.1 delete mode 100644 fixture/21/5/1.0.2 delete mode 100644 fixture/21/5/1.0.3 delete mode 100644 fixture/21/5/1.1.0 delete mode 100644 fixture/21/5/1.1.1 delete mode 100644 fixture/21/5/1.1.2 delete mode 100644 fixture/21/5/1.1.3 delete mode 100644 fixture/21/5/1.2.0 delete mode 100644 fixture/21/5/1.2.1 delete mode 100644 fixture/21/5/1.2.2 delete mode 100644 fixture/21/5/1.2.3 delete mode 100644 fixture/21/5/1.3.0 delete mode 100644 fixture/21/5/1.3.1 delete mode 100644 fixture/21/5/1.3.2 delete mode 100644 fixture/21/5/1.3.3 delete mode 100644 fixture/21/6/.zarray delete mode 100644 fixture/21/6/.zattrs delete mode 100644 fixture/21/6/0.0.0 delete mode 100644 fixture/21/6/0.0.1 delete mode 100644 fixture/21/6/0.0.2 delete mode 100644 fixture/21/6/0.0.3 delete mode 100644 fixture/21/6/0.1.0 delete mode 100644 fixture/21/6/0.1.1 delete mode 100644 fixture/21/6/0.1.2 delete mode 100644 fixture/21/6/0.1.3 delete mode 100644 fixture/21/6/0.2.0 delete mode 100644 fixture/21/6/0.2.1 delete mode 100644 fixture/21/6/0.2.2 delete mode 100644 fixture/21/6/0.2.3 delete mode 100644 fixture/21/6/0.3.0 delete mode 100644 fixture/21/6/0.3.1 delete mode 100644 fixture/21/6/0.3.2 delete mode 100644 fixture/21/6/0.3.3 delete mode 100644 fixture/21/6/1.0.0 delete mode 100644 fixture/21/6/1.0.1 delete mode 100644 fixture/21/6/1.0.2 delete mode 100644 fixture/21/6/1.0.3 delete mode 100644 fixture/21/6/1.1.0 delete mode 100644 fixture/21/6/1.1.1 delete mode 100644 fixture/21/6/1.1.2 delete mode 100644 fixture/21/6/1.1.3 delete mode 100644 fixture/21/6/1.2.0 delete mode 100644 fixture/21/6/1.2.1 delete mode 100644 fixture/21/6/1.2.2 delete mode 100644 fixture/21/6/1.2.3 delete mode 100644 fixture/21/6/1.3.0 delete mode 100644 fixture/21/6/1.3.1 delete mode 100644 fixture/21/6/1.3.2 delete mode 100644 fixture/21/6/1.3.3 delete mode 100644 fixture/22/.zattrs delete mode 100644 fixture/22/0/.zarray delete mode 100644 fixture/22/0/.zattrs delete mode 100644 fixture/22/0/0.0.0.0 delete mode 100644 fixture/22/0/0.0.0.1 delete mode 100644 fixture/22/0/0.0.0.2 delete mode 100644 fixture/22/0/0.0.0.3 delete mode 100644 fixture/22/0/0.0.1.0 delete mode 100644 fixture/22/0/0.0.1.1 delete mode 100644 fixture/22/0/0.0.1.2 delete mode 100644 fixture/22/0/0.0.1.3 delete mode 100644 fixture/22/0/0.0.2.0 delete mode 100644 fixture/22/0/0.0.2.1 delete mode 100644 fixture/22/0/0.0.2.2 delete mode 100644 fixture/22/0/0.0.2.3 delete mode 100644 fixture/22/0/0.0.3.0 delete mode 100644 fixture/22/0/0.0.3.1 delete mode 100644 fixture/22/0/0.0.3.2 delete mode 100644 fixture/22/0/0.0.3.3 delete mode 100644 fixture/22/0/0.1.0.0 delete mode 100644 fixture/22/0/0.1.0.1 delete mode 100644 fixture/22/0/0.1.0.2 delete mode 100644 fixture/22/0/0.1.0.3 delete mode 100644 fixture/22/0/0.1.1.0 delete mode 100644 fixture/22/0/0.1.1.1 delete mode 100644 fixture/22/0/0.1.1.2 delete mode 100644 fixture/22/0/0.1.1.3 delete mode 100644 fixture/22/0/0.1.2.0 delete mode 100644 fixture/22/0/0.1.2.1 delete mode 100644 fixture/22/0/0.1.2.2 delete mode 100644 fixture/22/0/0.1.2.3 delete mode 100644 fixture/22/0/0.1.3.0 delete mode 100644 fixture/22/0/0.1.3.1 delete mode 100644 fixture/22/0/0.1.3.2 delete mode 100644 fixture/22/0/0.1.3.3 delete mode 100644 fixture/22/0/0.2.0.0 delete mode 100644 fixture/22/0/0.2.0.1 delete mode 100644 fixture/22/0/0.2.0.2 delete mode 100644 fixture/22/0/0.2.0.3 delete mode 100644 fixture/22/0/0.2.1.0 delete mode 100644 fixture/22/0/0.2.1.1 delete mode 100644 fixture/22/0/0.2.1.2 delete mode 100644 fixture/22/0/0.2.1.3 delete mode 100644 fixture/22/0/0.2.2.0 delete mode 100644 fixture/22/0/0.2.2.1 delete mode 100644 fixture/22/0/0.2.2.2 delete mode 100644 fixture/22/0/0.2.2.3 delete mode 100644 fixture/22/0/0.2.3.0 delete mode 100644 fixture/22/0/0.2.3.1 delete mode 100644 fixture/22/0/0.2.3.2 delete mode 100644 fixture/22/0/0.2.3.3 delete mode 100644 fixture/22/0/0.3.0.0 delete mode 100644 fixture/22/0/0.3.0.1 delete mode 100644 fixture/22/0/0.3.0.2 delete mode 100644 fixture/22/0/0.3.0.3 delete mode 100644 fixture/22/0/0.3.1.0 delete mode 100644 fixture/22/0/0.3.1.1 delete mode 100644 fixture/22/0/0.3.1.2 delete mode 100644 fixture/22/0/0.3.1.3 delete mode 100644 fixture/22/0/0.3.2.0 delete mode 100644 fixture/22/0/0.3.2.1 delete mode 100644 fixture/22/0/0.3.2.2 delete mode 100644 fixture/22/0/0.3.2.3 delete mode 100644 fixture/22/0/0.3.3.0 delete mode 100644 fixture/22/0/0.3.3.1 delete mode 100644 fixture/22/0/0.3.3.2 delete mode 100644 fixture/22/0/0.3.3.3 delete mode 100644 fixture/22/0/1.0.0.0 delete mode 100644 fixture/22/0/1.0.0.1 delete mode 100644 fixture/22/0/1.0.0.2 delete mode 100644 fixture/22/0/1.0.0.3 delete mode 100644 fixture/22/0/1.0.1.0 delete mode 100644 fixture/22/0/1.0.1.1 delete mode 100644 fixture/22/0/1.0.1.2 delete mode 100644 fixture/22/0/1.0.1.3 delete mode 100644 fixture/22/0/1.0.2.0 delete mode 100644 fixture/22/0/1.0.2.1 delete mode 100644 fixture/22/0/1.0.2.2 delete mode 100644 fixture/22/0/1.0.2.3 delete mode 100644 fixture/22/0/1.0.3.0 delete mode 100644 fixture/22/0/1.0.3.1 delete mode 100644 fixture/22/0/1.0.3.2 delete mode 100644 fixture/22/0/1.0.3.3 delete mode 100644 fixture/22/0/1.1.0.0 delete mode 100644 fixture/22/0/1.1.0.1 delete mode 100644 fixture/22/0/1.1.0.2 delete mode 100644 fixture/22/0/1.1.0.3 delete mode 100644 fixture/22/0/1.1.1.0 delete mode 100644 fixture/22/0/1.1.1.1 delete mode 100644 fixture/22/0/1.1.1.2 delete mode 100644 fixture/22/0/1.1.1.3 delete mode 100644 fixture/22/0/1.1.2.0 delete mode 100644 fixture/22/0/1.1.2.1 delete mode 100644 fixture/22/0/1.1.2.2 delete mode 100644 fixture/22/0/1.1.2.3 delete mode 100644 fixture/22/0/1.1.3.0 delete mode 100644 fixture/22/0/1.1.3.1 delete mode 100644 fixture/22/0/1.1.3.2 delete mode 100644 fixture/22/0/1.1.3.3 delete mode 100644 fixture/22/0/1.2.0.0 delete mode 100644 fixture/22/0/1.2.0.1 delete mode 100644 fixture/22/0/1.2.0.2 delete mode 100644 fixture/22/0/1.2.0.3 delete mode 100644 fixture/22/0/1.2.1.0 delete mode 100644 fixture/22/0/1.2.1.1 delete mode 100644 fixture/22/0/1.2.1.2 delete mode 100644 fixture/22/0/1.2.1.3 delete mode 100644 fixture/22/0/1.2.2.0 delete mode 100644 fixture/22/0/1.2.2.1 delete mode 100644 fixture/22/0/1.2.2.2 delete mode 100644 fixture/22/0/1.2.2.3 delete mode 100644 fixture/22/0/1.2.3.0 delete mode 100644 fixture/22/0/1.2.3.1 delete mode 100644 fixture/22/0/1.2.3.2 delete mode 100644 fixture/22/0/1.2.3.3 delete mode 100644 fixture/22/0/1.3.0.0 delete mode 100644 fixture/22/0/1.3.0.1 delete mode 100644 fixture/22/0/1.3.0.2 delete mode 100644 fixture/22/0/1.3.0.3 delete mode 100644 fixture/22/0/1.3.1.0 delete mode 100644 fixture/22/0/1.3.1.1 delete mode 100644 fixture/22/0/1.3.1.2 delete mode 100644 fixture/22/0/1.3.1.3 delete mode 100644 fixture/22/0/1.3.2.0 delete mode 100644 fixture/22/0/1.3.2.1 delete mode 100644 fixture/22/0/1.3.2.2 delete mode 100644 fixture/22/0/1.3.2.3 delete mode 100644 fixture/22/0/1.3.3.0 delete mode 100644 fixture/22/0/1.3.3.1 delete mode 100644 fixture/22/0/1.3.3.2 delete mode 100644 fixture/22/0/1.3.3.3 delete mode 100644 fixture/22/1/.zarray delete mode 100644 fixture/22/1/.zattrs delete mode 100644 fixture/22/1/0.0.0.0 delete mode 100644 fixture/22/1/0.0.0.1 delete mode 100644 fixture/22/1/0.0.0.2 delete mode 100644 fixture/22/1/0.0.0.3 delete mode 100644 fixture/22/1/0.0.1.0 delete mode 100644 fixture/22/1/0.0.1.1 delete mode 100644 fixture/22/1/0.0.1.2 delete mode 100644 fixture/22/1/0.0.1.3 delete mode 100644 fixture/22/1/0.0.2.0 delete mode 100644 fixture/22/1/0.0.2.1 delete mode 100644 fixture/22/1/0.0.2.2 delete mode 100644 fixture/22/1/0.0.2.3 delete mode 100644 fixture/22/1/0.0.3.0 delete mode 100644 fixture/22/1/0.0.3.1 delete mode 100644 fixture/22/1/0.0.3.2 delete mode 100644 fixture/22/1/0.0.3.3 delete mode 100644 fixture/22/1/0.1.0.0 delete mode 100644 fixture/22/1/0.1.0.1 delete mode 100644 fixture/22/1/0.1.0.2 delete mode 100644 fixture/22/1/0.1.0.3 delete mode 100644 fixture/22/1/0.1.1.0 delete mode 100644 fixture/22/1/0.1.1.1 delete mode 100644 fixture/22/1/0.1.1.2 delete mode 100644 fixture/22/1/0.1.1.3 delete mode 100644 fixture/22/1/0.1.2.0 delete mode 100644 fixture/22/1/0.1.2.1 delete mode 100644 fixture/22/1/0.1.2.2 delete mode 100644 fixture/22/1/0.1.2.3 delete mode 100644 fixture/22/1/0.1.3.0 delete mode 100644 fixture/22/1/0.1.3.1 delete mode 100644 fixture/22/1/0.1.3.2 delete mode 100644 fixture/22/1/0.1.3.3 delete mode 100644 fixture/22/1/0.2.0.0 delete mode 100644 fixture/22/1/0.2.0.1 delete mode 100644 fixture/22/1/0.2.0.2 delete mode 100644 fixture/22/1/0.2.0.3 delete mode 100644 fixture/22/1/0.2.1.0 delete mode 100644 fixture/22/1/0.2.1.1 delete mode 100644 fixture/22/1/0.2.1.2 delete mode 100644 fixture/22/1/0.2.1.3 delete mode 100644 fixture/22/1/0.2.2.0 delete mode 100644 fixture/22/1/0.2.2.1 delete mode 100644 fixture/22/1/0.2.2.2 delete mode 100644 fixture/22/1/0.2.2.3 delete mode 100644 fixture/22/1/0.2.3.0 delete mode 100644 fixture/22/1/0.2.3.1 delete mode 100644 fixture/22/1/0.2.3.2 delete mode 100644 fixture/22/1/0.2.3.3 delete mode 100644 fixture/22/1/0.3.0.0 delete mode 100644 fixture/22/1/0.3.0.1 delete mode 100644 fixture/22/1/0.3.0.2 delete mode 100644 fixture/22/1/0.3.0.3 delete mode 100644 fixture/22/1/0.3.1.0 delete mode 100644 fixture/22/1/0.3.1.1 delete mode 100644 fixture/22/1/0.3.1.2 delete mode 100644 fixture/22/1/0.3.1.3 delete mode 100644 fixture/22/1/0.3.2.0 delete mode 100644 fixture/22/1/0.3.2.1 delete mode 100644 fixture/22/1/0.3.2.2 delete mode 100644 fixture/22/1/0.3.2.3 delete mode 100644 fixture/22/1/0.3.3.0 delete mode 100644 fixture/22/1/0.3.3.1 delete mode 100644 fixture/22/1/0.3.3.2 delete mode 100644 fixture/22/1/0.3.3.3 delete mode 100644 fixture/22/1/1.0.0.0 delete mode 100644 fixture/22/1/1.0.0.1 delete mode 100644 fixture/22/1/1.0.0.2 delete mode 100644 fixture/22/1/1.0.0.3 delete mode 100644 fixture/22/1/1.0.1.0 delete mode 100644 fixture/22/1/1.0.1.1 delete mode 100644 fixture/22/1/1.0.1.2 delete mode 100644 fixture/22/1/1.0.1.3 delete mode 100644 fixture/22/1/1.0.2.0 delete mode 100644 fixture/22/1/1.0.2.1 delete mode 100644 fixture/22/1/1.0.2.2 delete mode 100644 fixture/22/1/1.0.2.3 delete mode 100644 fixture/22/1/1.0.3.0 delete mode 100644 fixture/22/1/1.0.3.1 delete mode 100644 fixture/22/1/1.0.3.2 delete mode 100644 fixture/22/1/1.0.3.3 delete mode 100644 fixture/22/1/1.1.0.0 delete mode 100644 fixture/22/1/1.1.0.1 delete mode 100644 fixture/22/1/1.1.0.2 delete mode 100644 fixture/22/1/1.1.0.3 delete mode 100644 fixture/22/1/1.1.1.0 delete mode 100644 fixture/22/1/1.1.1.1 delete mode 100644 fixture/22/1/1.1.1.2 delete mode 100644 fixture/22/1/1.1.1.3 delete mode 100644 fixture/22/1/1.1.2.0 delete mode 100644 fixture/22/1/1.1.2.1 delete mode 100644 fixture/22/1/1.1.2.2 delete mode 100644 fixture/22/1/1.1.2.3 delete mode 100644 fixture/22/1/1.1.3.0 delete mode 100644 fixture/22/1/1.1.3.1 delete mode 100644 fixture/22/1/1.1.3.2 delete mode 100644 fixture/22/1/1.1.3.3 delete mode 100644 fixture/22/1/1.2.0.0 delete mode 100644 fixture/22/1/1.2.0.1 delete mode 100644 fixture/22/1/1.2.0.2 delete mode 100644 fixture/22/1/1.2.0.3 delete mode 100644 fixture/22/1/1.2.1.0 delete mode 100644 fixture/22/1/1.2.1.1 delete mode 100644 fixture/22/1/1.2.1.2 delete mode 100644 fixture/22/1/1.2.1.3 delete mode 100644 fixture/22/1/1.2.2.0 delete mode 100644 fixture/22/1/1.2.2.1 delete mode 100644 fixture/22/1/1.2.2.2 delete mode 100644 fixture/22/1/1.2.2.3 delete mode 100644 fixture/22/1/1.2.3.0 delete mode 100644 fixture/22/1/1.2.3.1 delete mode 100644 fixture/22/1/1.2.3.2 delete mode 100644 fixture/22/1/1.2.3.3 delete mode 100644 fixture/22/1/1.3.0.0 delete mode 100644 fixture/22/1/1.3.0.1 delete mode 100644 fixture/22/1/1.3.0.2 delete mode 100644 fixture/22/1/1.3.0.3 delete mode 100644 fixture/22/1/1.3.1.0 delete mode 100644 fixture/22/1/1.3.1.1 delete mode 100644 fixture/22/1/1.3.1.2 delete mode 100644 fixture/22/1/1.3.1.3 delete mode 100644 fixture/22/1/1.3.2.0 delete mode 100644 fixture/22/1/1.3.2.1 delete mode 100644 fixture/22/1/1.3.2.2 delete mode 100644 fixture/22/1/1.3.2.3 delete mode 100644 fixture/22/1/1.3.3.0 delete mode 100644 fixture/22/1/1.3.3.1 delete mode 100644 fixture/22/1/1.3.3.2 delete mode 100644 fixture/22/1/1.3.3.3 delete mode 100644 fixture/22/2/.zarray delete mode 100644 fixture/22/2/.zattrs delete mode 100644 fixture/22/2/0.0.0.0 delete mode 100644 fixture/22/2/0.0.0.1 delete mode 100644 fixture/22/2/0.0.0.2 delete mode 100644 fixture/22/2/0.0.0.3 delete mode 100644 fixture/22/2/0.0.1.0 delete mode 100644 fixture/22/2/0.0.1.1 delete mode 100644 fixture/22/2/0.0.1.2 delete mode 100644 fixture/22/2/0.0.1.3 delete mode 100644 fixture/22/2/0.0.2.0 delete mode 100644 fixture/22/2/0.0.2.1 delete mode 100644 fixture/22/2/0.0.2.2 delete mode 100644 fixture/22/2/0.0.2.3 delete mode 100644 fixture/22/2/0.0.3.0 delete mode 100644 fixture/22/2/0.0.3.1 delete mode 100644 fixture/22/2/0.0.3.2 delete mode 100644 fixture/22/2/0.0.3.3 delete mode 100644 fixture/22/2/0.1.0.0 delete mode 100644 fixture/22/2/0.1.0.1 delete mode 100644 fixture/22/2/0.1.0.2 delete mode 100644 fixture/22/2/0.1.0.3 delete mode 100644 fixture/22/2/0.1.1.0 delete mode 100644 fixture/22/2/0.1.1.1 delete mode 100644 fixture/22/2/0.1.1.2 delete mode 100644 fixture/22/2/0.1.1.3 delete mode 100644 fixture/22/2/0.1.2.0 delete mode 100644 fixture/22/2/0.1.2.1 delete mode 100644 fixture/22/2/0.1.2.2 delete mode 100644 fixture/22/2/0.1.2.3 delete mode 100644 fixture/22/2/0.1.3.0 delete mode 100644 fixture/22/2/0.1.3.1 delete mode 100644 fixture/22/2/0.1.3.2 delete mode 100644 fixture/22/2/0.1.3.3 delete mode 100644 fixture/22/2/0.2.0.0 delete mode 100644 fixture/22/2/0.2.0.1 delete mode 100644 fixture/22/2/0.2.0.2 delete mode 100644 fixture/22/2/0.2.0.3 delete mode 100644 fixture/22/2/0.2.1.0 delete mode 100644 fixture/22/2/0.2.1.1 delete mode 100644 fixture/22/2/0.2.1.2 delete mode 100644 fixture/22/2/0.2.1.3 delete mode 100644 fixture/22/2/0.2.2.0 delete mode 100644 fixture/22/2/0.2.2.1 delete mode 100644 fixture/22/2/0.2.2.2 delete mode 100644 fixture/22/2/0.2.2.3 delete mode 100644 fixture/22/2/0.2.3.0 delete mode 100644 fixture/22/2/0.2.3.1 delete mode 100644 fixture/22/2/0.2.3.2 delete mode 100644 fixture/22/2/0.2.3.3 delete mode 100644 fixture/22/2/0.3.0.0 delete mode 100644 fixture/22/2/0.3.0.1 delete mode 100644 fixture/22/2/0.3.0.2 delete mode 100644 fixture/22/2/0.3.0.3 delete mode 100644 fixture/22/2/0.3.1.0 delete mode 100644 fixture/22/2/0.3.1.1 delete mode 100644 fixture/22/2/0.3.1.2 delete mode 100644 fixture/22/2/0.3.1.3 delete mode 100644 fixture/22/2/0.3.2.0 delete mode 100644 fixture/22/2/0.3.2.1 delete mode 100644 fixture/22/2/0.3.2.2 delete mode 100644 fixture/22/2/0.3.2.3 delete mode 100644 fixture/22/2/0.3.3.0 delete mode 100644 fixture/22/2/0.3.3.1 delete mode 100644 fixture/22/2/0.3.3.2 delete mode 100644 fixture/22/2/0.3.3.3 delete mode 100644 fixture/22/2/1.0.0.0 delete mode 100644 fixture/22/2/1.0.0.1 delete mode 100644 fixture/22/2/1.0.0.2 delete mode 100644 fixture/22/2/1.0.0.3 delete mode 100644 fixture/22/2/1.0.1.0 delete mode 100644 fixture/22/2/1.0.1.1 delete mode 100644 fixture/22/2/1.0.1.2 delete mode 100644 fixture/22/2/1.0.1.3 delete mode 100644 fixture/22/2/1.0.2.0 delete mode 100644 fixture/22/2/1.0.2.1 delete mode 100644 fixture/22/2/1.0.2.2 delete mode 100644 fixture/22/2/1.0.2.3 delete mode 100644 fixture/22/2/1.0.3.0 delete mode 100644 fixture/22/2/1.0.3.1 delete mode 100644 fixture/22/2/1.0.3.2 delete mode 100644 fixture/22/2/1.0.3.3 delete mode 100644 fixture/22/2/1.1.0.0 delete mode 100644 fixture/22/2/1.1.0.1 delete mode 100644 fixture/22/2/1.1.0.2 delete mode 100644 fixture/22/2/1.1.0.3 delete mode 100644 fixture/22/2/1.1.1.0 delete mode 100644 fixture/22/2/1.1.1.1 delete mode 100644 fixture/22/2/1.1.1.2 delete mode 100644 fixture/22/2/1.1.1.3 delete mode 100644 fixture/22/2/1.1.2.0 delete mode 100644 fixture/22/2/1.1.2.1 delete mode 100644 fixture/22/2/1.1.2.2 delete mode 100644 fixture/22/2/1.1.2.3 delete mode 100644 fixture/22/2/1.1.3.0 delete mode 100644 fixture/22/2/1.1.3.1 delete mode 100644 fixture/22/2/1.1.3.2 delete mode 100644 fixture/22/2/1.1.3.3 delete mode 100644 fixture/22/2/1.2.0.0 delete mode 100644 fixture/22/2/1.2.0.1 delete mode 100644 fixture/22/2/1.2.0.2 delete mode 100644 fixture/22/2/1.2.0.3 delete mode 100644 fixture/22/2/1.2.1.0 delete mode 100644 fixture/22/2/1.2.1.1 delete mode 100644 fixture/22/2/1.2.1.2 delete mode 100644 fixture/22/2/1.2.1.3 delete mode 100644 fixture/22/2/1.2.2.0 delete mode 100644 fixture/22/2/1.2.2.1 delete mode 100644 fixture/22/2/1.2.2.2 delete mode 100644 fixture/22/2/1.2.2.3 delete mode 100644 fixture/22/2/1.2.3.0 delete mode 100644 fixture/22/2/1.2.3.1 delete mode 100644 fixture/22/2/1.2.3.2 delete mode 100644 fixture/22/2/1.2.3.3 delete mode 100644 fixture/22/2/1.3.0.0 delete mode 100644 fixture/22/2/1.3.0.1 delete mode 100644 fixture/22/2/1.3.0.2 delete mode 100644 fixture/22/2/1.3.0.3 delete mode 100644 fixture/22/2/1.3.1.0 delete mode 100644 fixture/22/2/1.3.1.1 delete mode 100644 fixture/22/2/1.3.1.2 delete mode 100644 fixture/22/2/1.3.1.3 delete mode 100644 fixture/22/2/1.3.2.0 delete mode 100644 fixture/22/2/1.3.2.1 delete mode 100644 fixture/22/2/1.3.2.2 delete mode 100644 fixture/22/2/1.3.2.3 delete mode 100644 fixture/22/2/1.3.3.0 delete mode 100644 fixture/22/2/1.3.3.1 delete mode 100644 fixture/22/2/1.3.3.2 delete mode 100644 fixture/22/2/1.3.3.3 delete mode 100644 fixture/22/3/.zarray delete mode 100644 fixture/22/3/.zattrs delete mode 100644 fixture/22/3/0.0.0.0 delete mode 100644 fixture/22/3/0.0.0.1 delete mode 100644 fixture/22/3/0.0.0.2 delete mode 100644 fixture/22/3/0.0.0.3 delete mode 100644 fixture/22/3/0.0.1.0 delete mode 100644 fixture/22/3/0.0.1.1 delete mode 100644 fixture/22/3/0.0.1.2 delete mode 100644 fixture/22/3/0.0.1.3 delete mode 100644 fixture/22/3/0.0.2.0 delete mode 100644 fixture/22/3/0.0.2.1 delete mode 100644 fixture/22/3/0.0.2.2 delete mode 100644 fixture/22/3/0.0.2.3 delete mode 100644 fixture/22/3/0.0.3.0 delete mode 100644 fixture/22/3/0.0.3.1 delete mode 100644 fixture/22/3/0.0.3.2 delete mode 100644 fixture/22/3/0.0.3.3 delete mode 100644 fixture/22/3/0.1.0.0 delete mode 100644 fixture/22/3/0.1.0.1 delete mode 100644 fixture/22/3/0.1.0.2 delete mode 100644 fixture/22/3/0.1.0.3 delete mode 100644 fixture/22/3/0.1.1.0 delete mode 100644 fixture/22/3/0.1.1.1 delete mode 100644 fixture/22/3/0.1.1.2 delete mode 100644 fixture/22/3/0.1.1.3 delete mode 100644 fixture/22/3/0.1.2.0 delete mode 100644 fixture/22/3/0.1.2.1 delete mode 100644 fixture/22/3/0.1.2.2 delete mode 100644 fixture/22/3/0.1.2.3 delete mode 100644 fixture/22/3/0.1.3.0 delete mode 100644 fixture/22/3/0.1.3.1 delete mode 100644 fixture/22/3/0.1.3.2 delete mode 100644 fixture/22/3/0.1.3.3 delete mode 100644 fixture/22/3/0.2.0.0 delete mode 100644 fixture/22/3/0.2.0.1 delete mode 100644 fixture/22/3/0.2.0.2 delete mode 100644 fixture/22/3/0.2.0.3 delete mode 100644 fixture/22/3/0.2.1.0 delete mode 100644 fixture/22/3/0.2.1.1 delete mode 100644 fixture/22/3/0.2.1.2 delete mode 100644 fixture/22/3/0.2.1.3 delete mode 100644 fixture/22/3/0.2.2.0 delete mode 100644 fixture/22/3/0.2.2.1 delete mode 100644 fixture/22/3/0.2.2.2 delete mode 100644 fixture/22/3/0.2.2.3 delete mode 100644 fixture/22/3/0.2.3.0 delete mode 100644 fixture/22/3/0.2.3.1 delete mode 100644 fixture/22/3/0.2.3.2 delete mode 100644 fixture/22/3/0.2.3.3 delete mode 100644 fixture/22/3/0.3.0.0 delete mode 100644 fixture/22/3/0.3.0.1 delete mode 100644 fixture/22/3/0.3.0.2 delete mode 100644 fixture/22/3/0.3.0.3 delete mode 100644 fixture/22/3/0.3.1.0 delete mode 100644 fixture/22/3/0.3.1.1 delete mode 100644 fixture/22/3/0.3.1.2 delete mode 100644 fixture/22/3/0.3.1.3 delete mode 100644 fixture/22/3/0.3.2.0 delete mode 100644 fixture/22/3/0.3.2.1 delete mode 100644 fixture/22/3/0.3.2.2 delete mode 100644 fixture/22/3/0.3.2.3 delete mode 100644 fixture/22/3/0.3.3.0 delete mode 100644 fixture/22/3/0.3.3.1 delete mode 100644 fixture/22/3/0.3.3.2 delete mode 100644 fixture/22/3/0.3.3.3 delete mode 100644 fixture/22/3/1.0.0.0 delete mode 100644 fixture/22/3/1.0.0.1 delete mode 100644 fixture/22/3/1.0.0.2 delete mode 100644 fixture/22/3/1.0.0.3 delete mode 100644 fixture/22/3/1.0.1.0 delete mode 100644 fixture/22/3/1.0.1.1 delete mode 100644 fixture/22/3/1.0.1.2 delete mode 100644 fixture/22/3/1.0.1.3 delete mode 100644 fixture/22/3/1.0.2.0 delete mode 100644 fixture/22/3/1.0.2.1 delete mode 100644 fixture/22/3/1.0.2.2 delete mode 100644 fixture/22/3/1.0.2.3 delete mode 100644 fixture/22/3/1.0.3.0 delete mode 100644 fixture/22/3/1.0.3.1 delete mode 100644 fixture/22/3/1.0.3.2 delete mode 100644 fixture/22/3/1.0.3.3 delete mode 100644 fixture/22/3/1.1.0.0 delete mode 100644 fixture/22/3/1.1.0.1 delete mode 100644 fixture/22/3/1.1.0.2 delete mode 100644 fixture/22/3/1.1.0.3 delete mode 100644 fixture/22/3/1.1.1.0 delete mode 100644 fixture/22/3/1.1.1.1 delete mode 100644 fixture/22/3/1.1.1.2 delete mode 100644 fixture/22/3/1.1.1.3 delete mode 100644 fixture/22/3/1.1.2.0 delete mode 100644 fixture/22/3/1.1.2.1 delete mode 100644 fixture/22/3/1.1.2.2 delete mode 100644 fixture/22/3/1.1.2.3 delete mode 100644 fixture/22/3/1.1.3.0 delete mode 100644 fixture/22/3/1.1.3.1 delete mode 100644 fixture/22/3/1.1.3.2 delete mode 100644 fixture/22/3/1.1.3.3 delete mode 100644 fixture/22/3/1.2.0.0 delete mode 100644 fixture/22/3/1.2.0.1 delete mode 100644 fixture/22/3/1.2.0.2 delete mode 100644 fixture/22/3/1.2.0.3 delete mode 100644 fixture/22/3/1.2.1.0 delete mode 100644 fixture/22/3/1.2.1.1 delete mode 100644 fixture/22/3/1.2.1.2 delete mode 100644 fixture/22/3/1.2.1.3 delete mode 100644 fixture/22/3/1.2.2.0 delete mode 100644 fixture/22/3/1.2.2.1 delete mode 100644 fixture/22/3/1.2.2.2 delete mode 100644 fixture/22/3/1.2.2.3 delete mode 100644 fixture/22/3/1.2.3.0 delete mode 100644 fixture/22/3/1.2.3.1 delete mode 100644 fixture/22/3/1.2.3.2 delete mode 100644 fixture/22/3/1.2.3.3 delete mode 100644 fixture/22/3/1.3.0.0 delete mode 100644 fixture/22/3/1.3.0.1 delete mode 100644 fixture/22/3/1.3.0.2 delete mode 100644 fixture/22/3/1.3.0.3 delete mode 100644 fixture/22/3/1.3.1.0 delete mode 100644 fixture/22/3/1.3.1.1 delete mode 100644 fixture/22/3/1.3.1.2 delete mode 100644 fixture/22/3/1.3.1.3 delete mode 100644 fixture/22/3/1.3.2.0 delete mode 100644 fixture/22/3/1.3.2.1 delete mode 100644 fixture/22/3/1.3.2.2 delete mode 100644 fixture/22/3/1.3.2.3 delete mode 100644 fixture/22/3/1.3.3.0 delete mode 100644 fixture/22/3/1.3.3.1 delete mode 100644 fixture/22/3/1.3.3.2 delete mode 100644 fixture/22/3/1.3.3.3 delete mode 100644 fixture/22/4/.zarray delete mode 100644 fixture/22/4/.zattrs delete mode 100644 fixture/22/4/0.0.0.0 delete mode 100644 fixture/22/4/0.0.0.1 delete mode 100644 fixture/22/4/0.0.0.2 delete mode 100644 fixture/22/4/0.0.0.3 delete mode 100644 fixture/22/4/0.0.1.0 delete mode 100644 fixture/22/4/0.0.1.1 delete mode 100644 fixture/22/4/0.0.1.2 delete mode 100644 fixture/22/4/0.0.1.3 delete mode 100644 fixture/22/4/0.0.2.0 delete mode 100644 fixture/22/4/0.0.2.1 delete mode 100644 fixture/22/4/0.0.2.2 delete mode 100644 fixture/22/4/0.0.2.3 delete mode 100644 fixture/22/4/0.0.3.0 delete mode 100644 fixture/22/4/0.0.3.1 delete mode 100644 fixture/22/4/0.0.3.2 delete mode 100644 fixture/22/4/0.0.3.3 delete mode 100644 fixture/22/4/0.1.0.0 delete mode 100644 fixture/22/4/0.1.0.1 delete mode 100644 fixture/22/4/0.1.0.2 delete mode 100644 fixture/22/4/0.1.0.3 delete mode 100644 fixture/22/4/0.1.1.0 delete mode 100644 fixture/22/4/0.1.1.1 delete mode 100644 fixture/22/4/0.1.1.2 delete mode 100644 fixture/22/4/0.1.1.3 delete mode 100644 fixture/22/4/0.1.2.0 delete mode 100644 fixture/22/4/0.1.2.1 delete mode 100644 fixture/22/4/0.1.2.2 delete mode 100644 fixture/22/4/0.1.2.3 delete mode 100644 fixture/22/4/0.1.3.0 delete mode 100644 fixture/22/4/0.1.3.1 delete mode 100644 fixture/22/4/0.1.3.2 delete mode 100644 fixture/22/4/0.1.3.3 delete mode 100644 fixture/22/4/0.2.0.0 delete mode 100644 fixture/22/4/0.2.0.1 delete mode 100644 fixture/22/4/0.2.0.2 delete mode 100644 fixture/22/4/0.2.0.3 delete mode 100644 fixture/22/4/0.2.1.0 delete mode 100644 fixture/22/4/0.2.1.1 delete mode 100644 fixture/22/4/0.2.1.2 delete mode 100644 fixture/22/4/0.2.1.3 delete mode 100644 fixture/22/4/0.2.2.0 delete mode 100644 fixture/22/4/0.2.2.1 delete mode 100644 fixture/22/4/0.2.2.2 delete mode 100644 fixture/22/4/0.2.2.3 delete mode 100644 fixture/22/4/0.2.3.0 delete mode 100644 fixture/22/4/0.2.3.1 delete mode 100644 fixture/22/4/0.2.3.2 delete mode 100644 fixture/22/4/0.2.3.3 delete mode 100644 fixture/22/4/0.3.0.0 delete mode 100644 fixture/22/4/0.3.0.1 delete mode 100644 fixture/22/4/0.3.0.2 delete mode 100644 fixture/22/4/0.3.0.3 delete mode 100644 fixture/22/4/0.3.1.0 delete mode 100644 fixture/22/4/0.3.1.1 delete mode 100644 fixture/22/4/0.3.1.2 delete mode 100644 fixture/22/4/0.3.1.3 delete mode 100644 fixture/22/4/0.3.2.0 delete mode 100644 fixture/22/4/0.3.2.1 delete mode 100644 fixture/22/4/0.3.2.2 delete mode 100644 fixture/22/4/0.3.2.3 delete mode 100644 fixture/22/4/0.3.3.0 delete mode 100644 fixture/22/4/0.3.3.1 delete mode 100644 fixture/22/4/0.3.3.2 delete mode 100644 fixture/22/4/0.3.3.3 delete mode 100644 fixture/22/4/1.0.0.0 delete mode 100644 fixture/22/4/1.0.0.1 delete mode 100644 fixture/22/4/1.0.0.2 delete mode 100644 fixture/22/4/1.0.0.3 delete mode 100644 fixture/22/4/1.0.1.0 delete mode 100644 fixture/22/4/1.0.1.1 delete mode 100644 fixture/22/4/1.0.1.2 delete mode 100644 fixture/22/4/1.0.1.3 delete mode 100644 fixture/22/4/1.0.2.0 delete mode 100644 fixture/22/4/1.0.2.1 delete mode 100644 fixture/22/4/1.0.2.2 delete mode 100644 fixture/22/4/1.0.2.3 delete mode 100644 fixture/22/4/1.0.3.0 delete mode 100644 fixture/22/4/1.0.3.1 delete mode 100644 fixture/22/4/1.0.3.2 delete mode 100644 fixture/22/4/1.0.3.3 delete mode 100644 fixture/22/4/1.1.0.0 delete mode 100644 fixture/22/4/1.1.0.1 delete mode 100644 fixture/22/4/1.1.0.2 delete mode 100644 fixture/22/4/1.1.0.3 delete mode 100644 fixture/22/4/1.1.1.0 delete mode 100644 fixture/22/4/1.1.1.1 delete mode 100644 fixture/22/4/1.1.1.2 delete mode 100644 fixture/22/4/1.1.1.3 delete mode 100644 fixture/22/4/1.1.2.0 delete mode 100644 fixture/22/4/1.1.2.1 delete mode 100644 fixture/22/4/1.1.2.2 delete mode 100644 fixture/22/4/1.1.2.3 delete mode 100644 fixture/22/4/1.1.3.0 delete mode 100644 fixture/22/4/1.1.3.1 delete mode 100644 fixture/22/4/1.1.3.2 delete mode 100644 fixture/22/4/1.1.3.3 delete mode 100644 fixture/22/4/1.2.0.0 delete mode 100644 fixture/22/4/1.2.0.1 delete mode 100644 fixture/22/4/1.2.0.2 delete mode 100644 fixture/22/4/1.2.0.3 delete mode 100644 fixture/22/4/1.2.1.0 delete mode 100644 fixture/22/4/1.2.1.1 delete mode 100644 fixture/22/4/1.2.1.2 delete mode 100644 fixture/22/4/1.2.1.3 delete mode 100644 fixture/22/4/1.2.2.0 delete mode 100644 fixture/22/4/1.2.2.1 delete mode 100644 fixture/22/4/1.2.2.2 delete mode 100644 fixture/22/4/1.2.2.3 delete mode 100644 fixture/22/4/1.2.3.0 delete mode 100644 fixture/22/4/1.2.3.1 delete mode 100644 fixture/22/4/1.2.3.2 delete mode 100644 fixture/22/4/1.2.3.3 delete mode 100644 fixture/22/4/1.3.0.0 delete mode 100644 fixture/22/4/1.3.0.1 delete mode 100644 fixture/22/4/1.3.0.2 delete mode 100644 fixture/22/4/1.3.0.3 delete mode 100644 fixture/22/4/1.3.1.0 delete mode 100644 fixture/22/4/1.3.1.1 delete mode 100644 fixture/22/4/1.3.1.2 delete mode 100644 fixture/22/4/1.3.1.3 delete mode 100644 fixture/22/4/1.3.2.0 delete mode 100644 fixture/22/4/1.3.2.1 delete mode 100644 fixture/22/4/1.3.2.2 delete mode 100644 fixture/22/4/1.3.2.3 delete mode 100644 fixture/22/4/1.3.3.0 delete mode 100644 fixture/22/4/1.3.3.1 delete mode 100644 fixture/22/4/1.3.3.2 delete mode 100644 fixture/22/4/1.3.3.3 delete mode 100644 fixture/22/5/.zarray delete mode 100644 fixture/22/5/.zattrs delete mode 100644 fixture/22/5/0.0.0.0 delete mode 100644 fixture/22/5/0.0.0.1 delete mode 100644 fixture/22/5/0.0.0.2 delete mode 100644 fixture/22/5/0.0.0.3 delete mode 100644 fixture/22/5/0.0.1.0 delete mode 100644 fixture/22/5/0.0.1.1 delete mode 100644 fixture/22/5/0.0.1.2 delete mode 100644 fixture/22/5/0.0.1.3 delete mode 100644 fixture/22/5/0.0.2.0 delete mode 100644 fixture/22/5/0.0.2.1 delete mode 100644 fixture/22/5/0.0.2.2 delete mode 100644 fixture/22/5/0.0.2.3 delete mode 100644 fixture/22/5/0.0.3.0 delete mode 100644 fixture/22/5/0.0.3.1 delete mode 100644 fixture/22/5/0.0.3.2 delete mode 100644 fixture/22/5/0.0.3.3 delete mode 100644 fixture/22/5/0.1.0.0 delete mode 100644 fixture/22/5/0.1.0.1 delete mode 100644 fixture/22/5/0.1.0.2 delete mode 100644 fixture/22/5/0.1.0.3 delete mode 100644 fixture/22/5/0.1.1.0 delete mode 100644 fixture/22/5/0.1.1.1 delete mode 100644 fixture/22/5/0.1.1.2 delete mode 100644 fixture/22/5/0.1.1.3 delete mode 100644 fixture/22/5/0.1.2.0 delete mode 100644 fixture/22/5/0.1.2.1 delete mode 100644 fixture/22/5/0.1.2.2 delete mode 100644 fixture/22/5/0.1.2.3 delete mode 100644 fixture/22/5/0.1.3.0 delete mode 100644 fixture/22/5/0.1.3.1 delete mode 100644 fixture/22/5/0.1.3.2 delete mode 100644 fixture/22/5/0.1.3.3 delete mode 100644 fixture/22/5/0.2.0.0 delete mode 100644 fixture/22/5/0.2.0.1 delete mode 100644 fixture/22/5/0.2.0.2 delete mode 100644 fixture/22/5/0.2.0.3 delete mode 100644 fixture/22/5/0.2.1.0 delete mode 100644 fixture/22/5/0.2.1.1 delete mode 100644 fixture/22/5/0.2.1.2 delete mode 100644 fixture/22/5/0.2.1.3 delete mode 100644 fixture/22/5/0.2.2.0 delete mode 100644 fixture/22/5/0.2.2.1 delete mode 100644 fixture/22/5/0.2.2.2 delete mode 100644 fixture/22/5/0.2.2.3 delete mode 100644 fixture/22/5/0.2.3.0 delete mode 100644 fixture/22/5/0.2.3.1 delete mode 100644 fixture/22/5/0.2.3.2 delete mode 100644 fixture/22/5/0.2.3.3 delete mode 100644 fixture/22/5/0.3.0.0 delete mode 100644 fixture/22/5/0.3.0.1 delete mode 100644 fixture/22/5/0.3.0.2 delete mode 100644 fixture/22/5/0.3.0.3 delete mode 100644 fixture/22/5/0.3.1.0 delete mode 100644 fixture/22/5/0.3.1.1 delete mode 100644 fixture/22/5/0.3.1.2 delete mode 100644 fixture/22/5/0.3.1.3 delete mode 100644 fixture/22/5/0.3.2.0 delete mode 100644 fixture/22/5/0.3.2.1 delete mode 100644 fixture/22/5/0.3.2.2 delete mode 100644 fixture/22/5/0.3.2.3 delete mode 100644 fixture/22/5/0.3.3.0 delete mode 100644 fixture/22/5/0.3.3.1 delete mode 100644 fixture/22/5/0.3.3.2 delete mode 100644 fixture/22/5/0.3.3.3 delete mode 100644 fixture/22/5/1.0.0.0 delete mode 100644 fixture/22/5/1.0.0.1 delete mode 100644 fixture/22/5/1.0.0.2 delete mode 100644 fixture/22/5/1.0.0.3 delete mode 100644 fixture/22/5/1.0.1.0 delete mode 100644 fixture/22/5/1.0.1.1 delete mode 100644 fixture/22/5/1.0.1.2 delete mode 100644 fixture/22/5/1.0.1.3 delete mode 100644 fixture/22/5/1.0.2.0 delete mode 100644 fixture/22/5/1.0.2.1 delete mode 100644 fixture/22/5/1.0.2.2 delete mode 100644 fixture/22/5/1.0.2.3 delete mode 100644 fixture/22/5/1.0.3.0 delete mode 100644 fixture/22/5/1.0.3.1 delete mode 100644 fixture/22/5/1.0.3.2 delete mode 100644 fixture/22/5/1.0.3.3 delete mode 100644 fixture/22/5/1.1.0.0 delete mode 100644 fixture/22/5/1.1.0.1 delete mode 100644 fixture/22/5/1.1.0.2 delete mode 100644 fixture/22/5/1.1.0.3 delete mode 100644 fixture/22/5/1.1.1.0 delete mode 100644 fixture/22/5/1.1.1.1 delete mode 100644 fixture/22/5/1.1.1.2 delete mode 100644 fixture/22/5/1.1.1.3 delete mode 100644 fixture/22/5/1.1.2.0 delete mode 100644 fixture/22/5/1.1.2.1 delete mode 100644 fixture/22/5/1.1.2.2 delete mode 100644 fixture/22/5/1.1.2.3 delete mode 100644 fixture/22/5/1.1.3.0 delete mode 100644 fixture/22/5/1.1.3.1 delete mode 100644 fixture/22/5/1.1.3.2 delete mode 100644 fixture/22/5/1.1.3.3 delete mode 100644 fixture/22/5/1.2.0.0 delete mode 100644 fixture/22/5/1.2.0.1 delete mode 100644 fixture/22/5/1.2.0.2 delete mode 100644 fixture/22/5/1.2.0.3 delete mode 100644 fixture/22/5/1.2.1.0 delete mode 100644 fixture/22/5/1.2.1.1 delete mode 100644 fixture/22/5/1.2.1.2 delete mode 100644 fixture/22/5/1.2.1.3 delete mode 100644 fixture/22/5/1.2.2.0 delete mode 100644 fixture/22/5/1.2.2.1 delete mode 100644 fixture/22/5/1.2.2.2 delete mode 100644 fixture/22/5/1.2.2.3 delete mode 100644 fixture/22/5/1.2.3.0 delete mode 100644 fixture/22/5/1.2.3.1 delete mode 100644 fixture/22/5/1.2.3.2 delete mode 100644 fixture/22/5/1.2.3.3 delete mode 100644 fixture/22/5/1.3.0.0 delete mode 100644 fixture/22/5/1.3.0.1 delete mode 100644 fixture/22/5/1.3.0.2 delete mode 100644 fixture/22/5/1.3.0.3 delete mode 100644 fixture/22/5/1.3.1.0 delete mode 100644 fixture/22/5/1.3.1.1 delete mode 100644 fixture/22/5/1.3.1.2 delete mode 100644 fixture/22/5/1.3.1.3 delete mode 100644 fixture/22/5/1.3.2.0 delete mode 100644 fixture/22/5/1.3.2.1 delete mode 100644 fixture/22/5/1.3.2.2 delete mode 100644 fixture/22/5/1.3.2.3 delete mode 100644 fixture/22/5/1.3.3.0 delete mode 100644 fixture/22/5/1.3.3.1 delete mode 100644 fixture/22/5/1.3.3.2 delete mode 100644 fixture/22/5/1.3.3.3 delete mode 100644 fixture/22/6/.zarray delete mode 100644 fixture/22/6/.zattrs delete mode 100644 fixture/22/6/0.0.0.0 delete mode 100644 fixture/22/6/0.0.0.1 delete mode 100644 fixture/22/6/0.0.0.2 delete mode 100644 fixture/22/6/0.0.0.3 delete mode 100644 fixture/22/6/0.0.1.0 delete mode 100644 fixture/22/6/0.0.1.1 delete mode 100644 fixture/22/6/0.0.1.2 delete mode 100644 fixture/22/6/0.0.1.3 delete mode 100644 fixture/22/6/0.0.2.0 delete mode 100644 fixture/22/6/0.0.2.1 delete mode 100644 fixture/22/6/0.0.2.2 delete mode 100644 fixture/22/6/0.0.2.3 delete mode 100644 fixture/22/6/0.0.3.0 delete mode 100644 fixture/22/6/0.0.3.1 delete mode 100644 fixture/22/6/0.0.3.2 delete mode 100644 fixture/22/6/0.0.3.3 delete mode 100644 fixture/22/6/0.1.0.0 delete mode 100644 fixture/22/6/0.1.0.1 delete mode 100644 fixture/22/6/0.1.0.2 delete mode 100644 fixture/22/6/0.1.0.3 delete mode 100644 fixture/22/6/0.1.1.0 delete mode 100644 fixture/22/6/0.1.1.1 delete mode 100644 fixture/22/6/0.1.1.2 delete mode 100644 fixture/22/6/0.1.1.3 delete mode 100644 fixture/22/6/0.1.2.0 delete mode 100644 fixture/22/6/0.1.2.1 delete mode 100644 fixture/22/6/0.1.2.2 delete mode 100644 fixture/22/6/0.1.2.3 delete mode 100644 fixture/22/6/0.1.3.0 delete mode 100644 fixture/22/6/0.1.3.1 delete mode 100644 fixture/22/6/0.1.3.2 delete mode 100644 fixture/22/6/0.1.3.3 delete mode 100644 fixture/22/6/0.2.0.0 delete mode 100644 fixture/22/6/0.2.0.1 delete mode 100644 fixture/22/6/0.2.0.2 delete mode 100644 fixture/22/6/0.2.0.3 delete mode 100644 fixture/22/6/0.2.1.0 delete mode 100644 fixture/22/6/0.2.1.1 delete mode 100644 fixture/22/6/0.2.1.2 delete mode 100644 fixture/22/6/0.2.1.3 delete mode 100644 fixture/22/6/0.2.2.0 delete mode 100644 fixture/22/6/0.2.2.1 delete mode 100644 fixture/22/6/0.2.2.2 delete mode 100644 fixture/22/6/0.2.2.3 delete mode 100644 fixture/22/6/0.2.3.0 delete mode 100644 fixture/22/6/0.2.3.1 delete mode 100644 fixture/22/6/0.2.3.2 delete mode 100644 fixture/22/6/0.2.3.3 delete mode 100644 fixture/22/6/0.3.0.0 delete mode 100644 fixture/22/6/0.3.0.1 delete mode 100644 fixture/22/6/0.3.0.2 delete mode 100644 fixture/22/6/0.3.0.3 delete mode 100644 fixture/22/6/0.3.1.0 delete mode 100644 fixture/22/6/0.3.1.1 delete mode 100644 fixture/22/6/0.3.1.2 delete mode 100644 fixture/22/6/0.3.1.3 delete mode 100644 fixture/22/6/0.3.2.0 delete mode 100644 fixture/22/6/0.3.2.1 delete mode 100644 fixture/22/6/0.3.2.2 delete mode 100644 fixture/22/6/0.3.2.3 delete mode 100644 fixture/22/6/0.3.3.0 delete mode 100644 fixture/22/6/0.3.3.1 delete mode 100644 fixture/22/6/0.3.3.2 delete mode 100644 fixture/22/6/0.3.3.3 delete mode 100644 fixture/22/6/1.0.0.0 delete mode 100644 fixture/22/6/1.0.0.1 delete mode 100644 fixture/22/6/1.0.0.2 delete mode 100644 fixture/22/6/1.0.0.3 delete mode 100644 fixture/22/6/1.0.1.0 delete mode 100644 fixture/22/6/1.0.1.1 delete mode 100644 fixture/22/6/1.0.1.2 delete mode 100644 fixture/22/6/1.0.1.3 delete mode 100644 fixture/22/6/1.0.2.0 delete mode 100644 fixture/22/6/1.0.2.1 delete mode 100644 fixture/22/6/1.0.2.2 delete mode 100644 fixture/22/6/1.0.2.3 delete mode 100644 fixture/22/6/1.0.3.0 delete mode 100644 fixture/22/6/1.0.3.1 delete mode 100644 fixture/22/6/1.0.3.2 delete mode 100644 fixture/22/6/1.0.3.3 delete mode 100644 fixture/22/6/1.1.0.0 delete mode 100644 fixture/22/6/1.1.0.1 delete mode 100644 fixture/22/6/1.1.0.2 delete mode 100644 fixture/22/6/1.1.0.3 delete mode 100644 fixture/22/6/1.1.1.0 delete mode 100644 fixture/22/6/1.1.1.1 delete mode 100644 fixture/22/6/1.1.1.2 delete mode 100644 fixture/22/6/1.1.1.3 delete mode 100644 fixture/22/6/1.1.2.0 delete mode 100644 fixture/22/6/1.1.2.1 delete mode 100644 fixture/22/6/1.1.2.2 delete mode 100644 fixture/22/6/1.1.2.3 delete mode 100644 fixture/22/6/1.1.3.0 delete mode 100644 fixture/22/6/1.1.3.1 delete mode 100644 fixture/22/6/1.1.3.2 delete mode 100644 fixture/22/6/1.1.3.3 delete mode 100644 fixture/22/6/1.2.0.0 delete mode 100644 fixture/22/6/1.2.0.1 delete mode 100644 fixture/22/6/1.2.0.2 delete mode 100644 fixture/22/6/1.2.0.3 delete mode 100644 fixture/22/6/1.2.1.0 delete mode 100644 fixture/22/6/1.2.1.1 delete mode 100644 fixture/22/6/1.2.1.2 delete mode 100644 fixture/22/6/1.2.1.3 delete mode 100644 fixture/22/6/1.2.2.0 delete mode 100644 fixture/22/6/1.2.2.1 delete mode 100644 fixture/22/6/1.2.2.2 delete mode 100644 fixture/22/6/1.2.2.3 delete mode 100644 fixture/22/6/1.2.3.0 delete mode 100644 fixture/22/6/1.2.3.1 delete mode 100644 fixture/22/6/1.2.3.2 delete mode 100644 fixture/22/6/1.2.3.3 delete mode 100644 fixture/22/6/1.3.0.0 delete mode 100644 fixture/22/6/1.3.0.1 delete mode 100644 fixture/22/6/1.3.0.2 delete mode 100644 fixture/22/6/1.3.0.3 delete mode 100644 fixture/22/6/1.3.1.0 delete mode 100644 fixture/22/6/1.3.1.1 delete mode 100644 fixture/22/6/1.3.1.2 delete mode 100644 fixture/22/6/1.3.1.3 delete mode 100644 fixture/22/6/1.3.2.0 delete mode 100644 fixture/22/6/1.3.2.1 delete mode 100644 fixture/22/6/1.3.2.2 delete mode 100644 fixture/22/6/1.3.2.3 delete mode 100644 fixture/22/6/1.3.3.0 delete mode 100644 fixture/22/6/1.3.3.1 delete mode 100644 fixture/22/6/1.3.3.2 delete mode 100644 fixture/22/6/1.3.3.3 delete mode 100644 fixture/23/.zattrs delete mode 100644 fixture/23/0/.zarray delete mode 100644 fixture/23/0/.zattrs delete mode 100644 fixture/23/0/0.0.0.0 delete mode 100644 fixture/23/0/0.0.0.1 delete mode 100644 fixture/23/0/0.0.0.2 delete mode 100644 fixture/23/0/0.0.0.3 delete mode 100644 fixture/23/0/0.0.1.0 delete mode 100644 fixture/23/0/0.0.1.1 delete mode 100644 fixture/23/0/0.0.1.2 delete mode 100644 fixture/23/0/0.0.1.3 delete mode 100644 fixture/23/0/0.0.2.0 delete mode 100644 fixture/23/0/0.0.2.1 delete mode 100644 fixture/23/0/0.0.2.2 delete mode 100644 fixture/23/0/0.0.2.3 delete mode 100644 fixture/23/0/0.0.3.0 delete mode 100644 fixture/23/0/0.0.3.1 delete mode 100644 fixture/23/0/0.0.3.2 delete mode 100644 fixture/23/0/0.0.3.3 delete mode 100644 fixture/23/0/0.1.0.0 delete mode 100644 fixture/23/0/0.1.0.1 delete mode 100644 fixture/23/0/0.1.0.2 delete mode 100644 fixture/23/0/0.1.0.3 delete mode 100644 fixture/23/0/0.1.1.0 delete mode 100644 fixture/23/0/0.1.1.1 delete mode 100644 fixture/23/0/0.1.1.2 delete mode 100644 fixture/23/0/0.1.1.3 delete mode 100644 fixture/23/0/0.1.2.0 delete mode 100644 fixture/23/0/0.1.2.1 delete mode 100644 fixture/23/0/0.1.2.2 delete mode 100644 fixture/23/0/0.1.2.3 delete mode 100644 fixture/23/0/0.1.3.0 delete mode 100644 fixture/23/0/0.1.3.1 delete mode 100644 fixture/23/0/0.1.3.2 delete mode 100644 fixture/23/0/0.1.3.3 delete mode 100644 fixture/23/0/0.2.0.0 delete mode 100644 fixture/23/0/0.2.0.1 delete mode 100644 fixture/23/0/0.2.0.2 delete mode 100644 fixture/23/0/0.2.0.3 delete mode 100644 fixture/23/0/0.2.1.0 delete mode 100644 fixture/23/0/0.2.1.1 delete mode 100644 fixture/23/0/0.2.1.2 delete mode 100644 fixture/23/0/0.2.1.3 delete mode 100644 fixture/23/0/0.2.2.0 delete mode 100644 fixture/23/0/0.2.2.1 delete mode 100644 fixture/23/0/0.2.2.2 delete mode 100644 fixture/23/0/0.2.2.3 delete mode 100644 fixture/23/0/0.2.3.0 delete mode 100644 fixture/23/0/0.2.3.1 delete mode 100644 fixture/23/0/0.2.3.2 delete mode 100644 fixture/23/0/0.2.3.3 delete mode 100644 fixture/23/0/0.3.0.0 delete mode 100644 fixture/23/0/0.3.0.1 delete mode 100644 fixture/23/0/0.3.0.2 delete mode 100644 fixture/23/0/0.3.0.3 delete mode 100644 fixture/23/0/0.3.1.0 delete mode 100644 fixture/23/0/0.3.1.1 delete mode 100644 fixture/23/0/0.3.1.2 delete mode 100644 fixture/23/0/0.3.1.3 delete mode 100644 fixture/23/0/0.3.2.0 delete mode 100644 fixture/23/0/0.3.2.1 delete mode 100644 fixture/23/0/0.3.2.2 delete mode 100644 fixture/23/0/0.3.2.3 delete mode 100644 fixture/23/0/0.3.3.0 delete mode 100644 fixture/23/0/0.3.3.1 delete mode 100644 fixture/23/0/0.3.3.2 delete mode 100644 fixture/23/0/0.3.3.3 delete mode 100644 fixture/23/0/1.0.0.0 delete mode 100644 fixture/23/0/1.0.0.1 delete mode 100644 fixture/23/0/1.0.0.2 delete mode 100644 fixture/23/0/1.0.0.3 delete mode 100644 fixture/23/0/1.0.1.0 delete mode 100644 fixture/23/0/1.0.1.1 delete mode 100644 fixture/23/0/1.0.1.2 delete mode 100644 fixture/23/0/1.0.1.3 delete mode 100644 fixture/23/0/1.0.2.0 delete mode 100644 fixture/23/0/1.0.2.1 delete mode 100644 fixture/23/0/1.0.2.2 delete mode 100644 fixture/23/0/1.0.2.3 delete mode 100644 fixture/23/0/1.0.3.0 delete mode 100644 fixture/23/0/1.0.3.1 delete mode 100644 fixture/23/0/1.0.3.2 delete mode 100644 fixture/23/0/1.0.3.3 delete mode 100644 fixture/23/0/1.1.0.0 delete mode 100644 fixture/23/0/1.1.0.1 delete mode 100644 fixture/23/0/1.1.0.2 delete mode 100644 fixture/23/0/1.1.0.3 delete mode 100644 fixture/23/0/1.1.1.0 delete mode 100644 fixture/23/0/1.1.1.1 delete mode 100644 fixture/23/0/1.1.1.2 delete mode 100644 fixture/23/0/1.1.1.3 delete mode 100644 fixture/23/0/1.1.2.0 delete mode 100644 fixture/23/0/1.1.2.1 delete mode 100644 fixture/23/0/1.1.2.2 delete mode 100644 fixture/23/0/1.1.2.3 delete mode 100644 fixture/23/0/1.1.3.0 delete mode 100644 fixture/23/0/1.1.3.1 delete mode 100644 fixture/23/0/1.1.3.2 delete mode 100644 fixture/23/0/1.1.3.3 delete mode 100644 fixture/23/0/1.2.0.0 delete mode 100644 fixture/23/0/1.2.0.1 delete mode 100644 fixture/23/0/1.2.0.2 delete mode 100644 fixture/23/0/1.2.0.3 delete mode 100644 fixture/23/0/1.2.1.0 delete mode 100644 fixture/23/0/1.2.1.1 delete mode 100644 fixture/23/0/1.2.1.2 delete mode 100644 fixture/23/0/1.2.1.3 delete mode 100644 fixture/23/0/1.2.2.0 delete mode 100644 fixture/23/0/1.2.2.1 delete mode 100644 fixture/23/0/1.2.2.2 delete mode 100644 fixture/23/0/1.2.2.3 delete mode 100644 fixture/23/0/1.2.3.0 delete mode 100644 fixture/23/0/1.2.3.1 delete mode 100644 fixture/23/0/1.2.3.2 delete mode 100644 fixture/23/0/1.2.3.3 delete mode 100644 fixture/23/0/1.3.0.0 delete mode 100644 fixture/23/0/1.3.0.1 delete mode 100644 fixture/23/0/1.3.0.2 delete mode 100644 fixture/23/0/1.3.0.3 delete mode 100644 fixture/23/0/1.3.1.0 delete mode 100644 fixture/23/0/1.3.1.1 delete mode 100644 fixture/23/0/1.3.1.2 delete mode 100644 fixture/23/0/1.3.1.3 delete mode 100644 fixture/23/0/1.3.2.0 delete mode 100644 fixture/23/0/1.3.2.1 delete mode 100644 fixture/23/0/1.3.2.2 delete mode 100644 fixture/23/0/1.3.2.3 delete mode 100644 fixture/23/0/1.3.3.0 delete mode 100644 fixture/23/0/1.3.3.1 delete mode 100644 fixture/23/0/1.3.3.2 delete mode 100644 fixture/23/0/1.3.3.3 delete mode 100644 fixture/23/1/.zarray delete mode 100644 fixture/23/1/.zattrs delete mode 100644 fixture/23/1/0.0.0.0 delete mode 100644 fixture/23/1/0.0.0.1 delete mode 100644 fixture/23/1/0.0.0.2 delete mode 100644 fixture/23/1/0.0.0.3 delete mode 100644 fixture/23/1/0.0.1.0 delete mode 100644 fixture/23/1/0.0.1.1 delete mode 100644 fixture/23/1/0.0.1.2 delete mode 100644 fixture/23/1/0.0.1.3 delete mode 100644 fixture/23/1/0.0.2.0 delete mode 100644 fixture/23/1/0.0.2.1 delete mode 100644 fixture/23/1/0.0.2.2 delete mode 100644 fixture/23/1/0.0.2.3 delete mode 100644 fixture/23/1/0.0.3.0 delete mode 100644 fixture/23/1/0.0.3.1 delete mode 100644 fixture/23/1/0.0.3.2 delete mode 100644 fixture/23/1/0.0.3.3 delete mode 100644 fixture/23/1/0.1.0.0 delete mode 100644 fixture/23/1/0.1.0.1 delete mode 100644 fixture/23/1/0.1.0.2 delete mode 100644 fixture/23/1/0.1.0.3 delete mode 100644 fixture/23/1/0.1.1.0 delete mode 100644 fixture/23/1/0.1.1.1 delete mode 100644 fixture/23/1/0.1.1.2 delete mode 100644 fixture/23/1/0.1.1.3 delete mode 100644 fixture/23/1/0.1.2.0 delete mode 100644 fixture/23/1/0.1.2.1 delete mode 100644 fixture/23/1/0.1.2.2 delete mode 100644 fixture/23/1/0.1.2.3 delete mode 100644 fixture/23/1/0.1.3.0 delete mode 100644 fixture/23/1/0.1.3.1 delete mode 100644 fixture/23/1/0.1.3.2 delete mode 100644 fixture/23/1/0.1.3.3 delete mode 100644 fixture/23/1/0.2.0.0 delete mode 100644 fixture/23/1/0.2.0.1 delete mode 100644 fixture/23/1/0.2.0.2 delete mode 100644 fixture/23/1/0.2.0.3 delete mode 100644 fixture/23/1/0.2.1.0 delete mode 100644 fixture/23/1/0.2.1.1 delete mode 100644 fixture/23/1/0.2.1.2 delete mode 100644 fixture/23/1/0.2.1.3 delete mode 100644 fixture/23/1/0.2.2.0 delete mode 100644 fixture/23/1/0.2.2.1 delete mode 100644 fixture/23/1/0.2.2.2 delete mode 100644 fixture/23/1/0.2.2.3 delete mode 100644 fixture/23/1/0.2.3.0 delete mode 100644 fixture/23/1/0.2.3.1 delete mode 100644 fixture/23/1/0.2.3.2 delete mode 100644 fixture/23/1/0.2.3.3 delete mode 100644 fixture/23/1/0.3.0.0 delete mode 100644 fixture/23/1/0.3.0.1 delete mode 100644 fixture/23/1/0.3.0.2 delete mode 100644 fixture/23/1/0.3.0.3 delete mode 100644 fixture/23/1/0.3.1.0 delete mode 100644 fixture/23/1/0.3.1.1 delete mode 100644 fixture/23/1/0.3.1.2 delete mode 100644 fixture/23/1/0.3.1.3 delete mode 100644 fixture/23/1/0.3.2.0 delete mode 100644 fixture/23/1/0.3.2.1 delete mode 100644 fixture/23/1/0.3.2.2 delete mode 100644 fixture/23/1/0.3.2.3 delete mode 100644 fixture/23/1/0.3.3.0 delete mode 100644 fixture/23/1/0.3.3.1 delete mode 100644 fixture/23/1/0.3.3.2 delete mode 100644 fixture/23/1/0.3.3.3 delete mode 100644 fixture/23/1/1.0.0.0 delete mode 100644 fixture/23/1/1.0.0.1 delete mode 100644 fixture/23/1/1.0.0.2 delete mode 100644 fixture/23/1/1.0.0.3 delete mode 100644 fixture/23/1/1.0.1.0 delete mode 100644 fixture/23/1/1.0.1.1 delete mode 100644 fixture/23/1/1.0.1.2 delete mode 100644 fixture/23/1/1.0.1.3 delete mode 100644 fixture/23/1/1.0.2.0 delete mode 100644 fixture/23/1/1.0.2.1 delete mode 100644 fixture/23/1/1.0.2.2 delete mode 100644 fixture/23/1/1.0.2.3 delete mode 100644 fixture/23/1/1.0.3.0 delete mode 100644 fixture/23/1/1.0.3.1 delete mode 100644 fixture/23/1/1.0.3.2 delete mode 100644 fixture/23/1/1.0.3.3 delete mode 100644 fixture/23/1/1.1.0.0 delete mode 100644 fixture/23/1/1.1.0.1 delete mode 100644 fixture/23/1/1.1.0.2 delete mode 100644 fixture/23/1/1.1.0.3 delete mode 100644 fixture/23/1/1.1.1.0 delete mode 100644 fixture/23/1/1.1.1.1 delete mode 100644 fixture/23/1/1.1.1.2 delete mode 100644 fixture/23/1/1.1.1.3 delete mode 100644 fixture/23/1/1.1.2.0 delete mode 100644 fixture/23/1/1.1.2.1 delete mode 100644 fixture/23/1/1.1.2.2 delete mode 100644 fixture/23/1/1.1.2.3 delete mode 100644 fixture/23/1/1.1.3.0 delete mode 100644 fixture/23/1/1.1.3.1 delete mode 100644 fixture/23/1/1.1.3.2 delete mode 100644 fixture/23/1/1.1.3.3 delete mode 100644 fixture/23/1/1.2.0.0 delete mode 100644 fixture/23/1/1.2.0.1 delete mode 100644 fixture/23/1/1.2.0.2 delete mode 100644 fixture/23/1/1.2.0.3 delete mode 100644 fixture/23/1/1.2.1.0 delete mode 100644 fixture/23/1/1.2.1.1 delete mode 100644 fixture/23/1/1.2.1.2 delete mode 100644 fixture/23/1/1.2.1.3 delete mode 100644 fixture/23/1/1.2.2.0 delete mode 100644 fixture/23/1/1.2.2.1 delete mode 100644 fixture/23/1/1.2.2.2 delete mode 100644 fixture/23/1/1.2.2.3 delete mode 100644 fixture/23/1/1.2.3.0 delete mode 100644 fixture/23/1/1.2.3.1 delete mode 100644 fixture/23/1/1.2.3.2 delete mode 100644 fixture/23/1/1.2.3.3 delete mode 100644 fixture/23/1/1.3.0.0 delete mode 100644 fixture/23/1/1.3.0.1 delete mode 100644 fixture/23/1/1.3.0.2 delete mode 100644 fixture/23/1/1.3.0.3 delete mode 100644 fixture/23/1/1.3.1.0 delete mode 100644 fixture/23/1/1.3.1.1 delete mode 100644 fixture/23/1/1.3.1.2 delete mode 100644 fixture/23/1/1.3.1.3 delete mode 100644 fixture/23/1/1.3.2.0 delete mode 100644 fixture/23/1/1.3.2.1 delete mode 100644 fixture/23/1/1.3.2.2 delete mode 100644 fixture/23/1/1.3.2.3 delete mode 100644 fixture/23/1/1.3.3.0 delete mode 100644 fixture/23/1/1.3.3.1 delete mode 100644 fixture/23/1/1.3.3.2 delete mode 100644 fixture/23/1/1.3.3.3 delete mode 100644 fixture/23/2/.zarray delete mode 100644 fixture/23/2/.zattrs delete mode 100644 fixture/23/2/0.0.0.0 delete mode 100644 fixture/23/2/0.0.0.1 delete mode 100644 fixture/23/2/0.0.0.2 delete mode 100644 fixture/23/2/0.0.0.3 delete mode 100644 fixture/23/2/0.0.1.0 delete mode 100644 fixture/23/2/0.0.1.1 delete mode 100644 fixture/23/2/0.0.1.2 delete mode 100644 fixture/23/2/0.0.1.3 delete mode 100644 fixture/23/2/0.0.2.0 delete mode 100644 fixture/23/2/0.0.2.1 delete mode 100644 fixture/23/2/0.0.2.2 delete mode 100644 fixture/23/2/0.0.2.3 delete mode 100644 fixture/23/2/0.0.3.0 delete mode 100644 fixture/23/2/0.0.3.1 delete mode 100644 fixture/23/2/0.0.3.2 delete mode 100644 fixture/23/2/0.0.3.3 delete mode 100644 fixture/23/2/0.1.0.0 delete mode 100644 fixture/23/2/0.1.0.1 delete mode 100644 fixture/23/2/0.1.0.2 delete mode 100644 fixture/23/2/0.1.0.3 delete mode 100644 fixture/23/2/0.1.1.0 delete mode 100644 fixture/23/2/0.1.1.1 delete mode 100644 fixture/23/2/0.1.1.2 delete mode 100644 fixture/23/2/0.1.1.3 delete mode 100644 fixture/23/2/0.1.2.0 delete mode 100644 fixture/23/2/0.1.2.1 delete mode 100644 fixture/23/2/0.1.2.2 delete mode 100644 fixture/23/2/0.1.2.3 delete mode 100644 fixture/23/2/0.1.3.0 delete mode 100644 fixture/23/2/0.1.3.1 delete mode 100644 fixture/23/2/0.1.3.2 delete mode 100644 fixture/23/2/0.1.3.3 delete mode 100644 fixture/23/2/0.2.0.0 delete mode 100644 fixture/23/2/0.2.0.1 delete mode 100644 fixture/23/2/0.2.0.2 delete mode 100644 fixture/23/2/0.2.0.3 delete mode 100644 fixture/23/2/0.2.1.0 delete mode 100644 fixture/23/2/0.2.1.1 delete mode 100644 fixture/23/2/0.2.1.2 delete mode 100644 fixture/23/2/0.2.1.3 delete mode 100644 fixture/23/2/0.2.2.0 delete mode 100644 fixture/23/2/0.2.2.1 delete mode 100644 fixture/23/2/0.2.2.2 delete mode 100644 fixture/23/2/0.2.2.3 delete mode 100644 fixture/23/2/0.2.3.0 delete mode 100644 fixture/23/2/0.2.3.1 delete mode 100644 fixture/23/2/0.2.3.2 delete mode 100644 fixture/23/2/0.2.3.3 delete mode 100644 fixture/23/2/0.3.0.0 delete mode 100644 fixture/23/2/0.3.0.1 delete mode 100644 fixture/23/2/0.3.0.2 delete mode 100644 fixture/23/2/0.3.0.3 delete mode 100644 fixture/23/2/0.3.1.0 delete mode 100644 fixture/23/2/0.3.1.1 delete mode 100644 fixture/23/2/0.3.1.2 delete mode 100644 fixture/23/2/0.3.1.3 delete mode 100644 fixture/23/2/0.3.2.0 delete mode 100644 fixture/23/2/0.3.2.1 delete mode 100644 fixture/23/2/0.3.2.2 delete mode 100644 fixture/23/2/0.3.2.3 delete mode 100644 fixture/23/2/0.3.3.0 delete mode 100644 fixture/23/2/0.3.3.1 delete mode 100644 fixture/23/2/0.3.3.2 delete mode 100644 fixture/23/2/0.3.3.3 delete mode 100644 fixture/23/2/1.0.0.0 delete mode 100644 fixture/23/2/1.0.0.1 delete mode 100644 fixture/23/2/1.0.0.2 delete mode 100644 fixture/23/2/1.0.0.3 delete mode 100644 fixture/23/2/1.0.1.0 delete mode 100644 fixture/23/2/1.0.1.1 delete mode 100644 fixture/23/2/1.0.1.2 delete mode 100644 fixture/23/2/1.0.1.3 delete mode 100644 fixture/23/2/1.0.2.0 delete mode 100644 fixture/23/2/1.0.2.1 delete mode 100644 fixture/23/2/1.0.2.2 delete mode 100644 fixture/23/2/1.0.2.3 delete mode 100644 fixture/23/2/1.0.3.0 delete mode 100644 fixture/23/2/1.0.3.1 delete mode 100644 fixture/23/2/1.0.3.2 delete mode 100644 fixture/23/2/1.0.3.3 delete mode 100644 fixture/23/2/1.1.0.0 delete mode 100644 fixture/23/2/1.1.0.1 delete mode 100644 fixture/23/2/1.1.0.2 delete mode 100644 fixture/23/2/1.1.0.3 delete mode 100644 fixture/23/2/1.1.1.0 delete mode 100644 fixture/23/2/1.1.1.1 delete mode 100644 fixture/23/2/1.1.1.2 delete mode 100644 fixture/23/2/1.1.1.3 delete mode 100644 fixture/23/2/1.1.2.0 delete mode 100644 fixture/23/2/1.1.2.1 delete mode 100644 fixture/23/2/1.1.2.2 delete mode 100644 fixture/23/2/1.1.2.3 delete mode 100644 fixture/23/2/1.1.3.0 delete mode 100644 fixture/23/2/1.1.3.1 delete mode 100644 fixture/23/2/1.1.3.2 delete mode 100644 fixture/23/2/1.1.3.3 delete mode 100644 fixture/23/2/1.2.0.0 delete mode 100644 fixture/23/2/1.2.0.1 delete mode 100644 fixture/23/2/1.2.0.2 delete mode 100644 fixture/23/2/1.2.0.3 delete mode 100644 fixture/23/2/1.2.1.0 delete mode 100644 fixture/23/2/1.2.1.1 delete mode 100644 fixture/23/2/1.2.1.2 delete mode 100644 fixture/23/2/1.2.1.3 delete mode 100644 fixture/23/2/1.2.2.0 delete mode 100644 fixture/23/2/1.2.2.1 delete mode 100644 fixture/23/2/1.2.2.2 delete mode 100644 fixture/23/2/1.2.2.3 delete mode 100644 fixture/23/2/1.2.3.0 delete mode 100644 fixture/23/2/1.2.3.1 delete mode 100644 fixture/23/2/1.2.3.2 delete mode 100644 fixture/23/2/1.2.3.3 delete mode 100644 fixture/23/2/1.3.0.0 delete mode 100644 fixture/23/2/1.3.0.1 delete mode 100644 fixture/23/2/1.3.0.2 delete mode 100644 fixture/23/2/1.3.0.3 delete mode 100644 fixture/23/2/1.3.1.0 delete mode 100644 fixture/23/2/1.3.1.1 delete mode 100644 fixture/23/2/1.3.1.2 delete mode 100644 fixture/23/2/1.3.1.3 delete mode 100644 fixture/23/2/1.3.2.0 delete mode 100644 fixture/23/2/1.3.2.1 delete mode 100644 fixture/23/2/1.3.2.2 delete mode 100644 fixture/23/2/1.3.2.3 delete mode 100644 fixture/23/2/1.3.3.0 delete mode 100644 fixture/23/2/1.3.3.1 delete mode 100644 fixture/23/2/1.3.3.2 delete mode 100644 fixture/23/2/1.3.3.3 delete mode 100644 fixture/23/3/.zarray delete mode 100644 fixture/23/3/.zattrs delete mode 100644 fixture/23/3/0.0.0.0 delete mode 100644 fixture/23/3/0.0.0.1 delete mode 100644 fixture/23/3/0.0.0.2 delete mode 100644 fixture/23/3/0.0.0.3 delete mode 100644 fixture/23/3/0.0.1.0 delete mode 100644 fixture/23/3/0.0.1.1 delete mode 100644 fixture/23/3/0.0.1.2 delete mode 100644 fixture/23/3/0.0.1.3 delete mode 100644 fixture/23/3/0.0.2.0 delete mode 100644 fixture/23/3/0.0.2.1 delete mode 100644 fixture/23/3/0.0.2.2 delete mode 100644 fixture/23/3/0.0.2.3 delete mode 100644 fixture/23/3/0.0.3.0 delete mode 100644 fixture/23/3/0.0.3.1 delete mode 100644 fixture/23/3/0.0.3.2 delete mode 100644 fixture/23/3/0.0.3.3 delete mode 100644 fixture/23/3/0.1.0.0 delete mode 100644 fixture/23/3/0.1.0.1 delete mode 100644 fixture/23/3/0.1.0.2 delete mode 100644 fixture/23/3/0.1.0.3 delete mode 100644 fixture/23/3/0.1.1.0 delete mode 100644 fixture/23/3/0.1.1.1 delete mode 100644 fixture/23/3/0.1.1.2 delete mode 100644 fixture/23/3/0.1.1.3 delete mode 100644 fixture/23/3/0.1.2.0 delete mode 100644 fixture/23/3/0.1.2.1 delete mode 100644 fixture/23/3/0.1.2.2 delete mode 100644 fixture/23/3/0.1.2.3 delete mode 100644 fixture/23/3/0.1.3.0 delete mode 100644 fixture/23/3/0.1.3.1 delete mode 100644 fixture/23/3/0.1.3.2 delete mode 100644 fixture/23/3/0.1.3.3 delete mode 100644 fixture/23/3/0.2.0.0 delete mode 100644 fixture/23/3/0.2.0.1 delete mode 100644 fixture/23/3/0.2.0.2 delete mode 100644 fixture/23/3/0.2.0.3 delete mode 100644 fixture/23/3/0.2.1.0 delete mode 100644 fixture/23/3/0.2.1.1 delete mode 100644 fixture/23/3/0.2.1.2 delete mode 100644 fixture/23/3/0.2.1.3 delete mode 100644 fixture/23/3/0.2.2.0 delete mode 100644 fixture/23/3/0.2.2.1 delete mode 100644 fixture/23/3/0.2.2.2 delete mode 100644 fixture/23/3/0.2.2.3 delete mode 100644 fixture/23/3/0.2.3.0 delete mode 100644 fixture/23/3/0.2.3.1 delete mode 100644 fixture/23/3/0.2.3.2 delete mode 100644 fixture/23/3/0.2.3.3 delete mode 100644 fixture/23/3/0.3.0.0 delete mode 100644 fixture/23/3/0.3.0.1 delete mode 100644 fixture/23/3/0.3.0.2 delete mode 100644 fixture/23/3/0.3.0.3 delete mode 100644 fixture/23/3/0.3.1.0 delete mode 100644 fixture/23/3/0.3.1.1 delete mode 100644 fixture/23/3/0.3.1.2 delete mode 100644 fixture/23/3/0.3.1.3 delete mode 100644 fixture/23/3/0.3.2.0 delete mode 100644 fixture/23/3/0.3.2.1 delete mode 100644 fixture/23/3/0.3.2.2 delete mode 100644 fixture/23/3/0.3.2.3 delete mode 100644 fixture/23/3/0.3.3.0 delete mode 100644 fixture/23/3/0.3.3.1 delete mode 100644 fixture/23/3/0.3.3.2 delete mode 100644 fixture/23/3/0.3.3.3 delete mode 100644 fixture/23/3/1.0.0.0 delete mode 100644 fixture/23/3/1.0.0.1 delete mode 100644 fixture/23/3/1.0.0.2 delete mode 100644 fixture/23/3/1.0.0.3 delete mode 100644 fixture/23/3/1.0.1.0 delete mode 100644 fixture/23/3/1.0.1.1 delete mode 100644 fixture/23/3/1.0.1.2 delete mode 100644 fixture/23/3/1.0.1.3 delete mode 100644 fixture/23/3/1.0.2.0 delete mode 100644 fixture/23/3/1.0.2.1 delete mode 100644 fixture/23/3/1.0.2.2 delete mode 100644 fixture/23/3/1.0.2.3 delete mode 100644 fixture/23/3/1.0.3.0 delete mode 100644 fixture/23/3/1.0.3.1 delete mode 100644 fixture/23/3/1.0.3.2 delete mode 100644 fixture/23/3/1.0.3.3 delete mode 100644 fixture/23/3/1.1.0.0 delete mode 100644 fixture/23/3/1.1.0.1 delete mode 100644 fixture/23/3/1.1.0.2 delete mode 100644 fixture/23/3/1.1.0.3 delete mode 100644 fixture/23/3/1.1.1.0 delete mode 100644 fixture/23/3/1.1.1.1 delete mode 100644 fixture/23/3/1.1.1.2 delete mode 100644 fixture/23/3/1.1.1.3 delete mode 100644 fixture/23/3/1.1.2.0 delete mode 100644 fixture/23/3/1.1.2.1 delete mode 100644 fixture/23/3/1.1.2.2 delete mode 100644 fixture/23/3/1.1.2.3 delete mode 100644 fixture/23/3/1.1.3.0 delete mode 100644 fixture/23/3/1.1.3.1 delete mode 100644 fixture/23/3/1.1.3.2 delete mode 100644 fixture/23/3/1.1.3.3 delete mode 100644 fixture/23/3/1.2.0.0 delete mode 100644 fixture/23/3/1.2.0.1 delete mode 100644 fixture/23/3/1.2.0.2 delete mode 100644 fixture/23/3/1.2.0.3 delete mode 100644 fixture/23/3/1.2.1.0 delete mode 100644 fixture/23/3/1.2.1.1 delete mode 100644 fixture/23/3/1.2.1.2 delete mode 100644 fixture/23/3/1.2.1.3 delete mode 100644 fixture/23/3/1.2.2.0 delete mode 100644 fixture/23/3/1.2.2.1 delete mode 100644 fixture/23/3/1.2.2.2 delete mode 100644 fixture/23/3/1.2.2.3 delete mode 100644 fixture/23/3/1.2.3.0 delete mode 100644 fixture/23/3/1.2.3.1 delete mode 100644 fixture/23/3/1.2.3.2 delete mode 100644 fixture/23/3/1.2.3.3 delete mode 100644 fixture/23/3/1.3.0.0 delete mode 100644 fixture/23/3/1.3.0.1 delete mode 100644 fixture/23/3/1.3.0.2 delete mode 100644 fixture/23/3/1.3.0.3 delete mode 100644 fixture/23/3/1.3.1.0 delete mode 100644 fixture/23/3/1.3.1.1 delete mode 100644 fixture/23/3/1.3.1.2 delete mode 100644 fixture/23/3/1.3.1.3 delete mode 100644 fixture/23/3/1.3.2.0 delete mode 100644 fixture/23/3/1.3.2.1 delete mode 100644 fixture/23/3/1.3.2.2 delete mode 100644 fixture/23/3/1.3.2.3 delete mode 100644 fixture/23/3/1.3.3.0 delete mode 100644 fixture/23/3/1.3.3.1 delete mode 100644 fixture/23/3/1.3.3.2 delete mode 100644 fixture/23/3/1.3.3.3 delete mode 100644 fixture/23/4/.zarray delete mode 100644 fixture/23/4/.zattrs delete mode 100644 fixture/23/4/0.0.0.0 delete mode 100644 fixture/23/4/0.0.0.1 delete mode 100644 fixture/23/4/0.0.0.2 delete mode 100644 fixture/23/4/0.0.0.3 delete mode 100644 fixture/23/4/0.0.1.0 delete mode 100644 fixture/23/4/0.0.1.1 delete mode 100644 fixture/23/4/0.0.1.2 delete mode 100644 fixture/23/4/0.0.1.3 delete mode 100644 fixture/23/4/0.0.2.0 delete mode 100644 fixture/23/4/0.0.2.1 delete mode 100644 fixture/23/4/0.0.2.2 delete mode 100644 fixture/23/4/0.0.2.3 delete mode 100644 fixture/23/4/0.0.3.0 delete mode 100644 fixture/23/4/0.0.3.1 delete mode 100644 fixture/23/4/0.0.3.2 delete mode 100644 fixture/23/4/0.0.3.3 delete mode 100644 fixture/23/4/0.1.0.0 delete mode 100644 fixture/23/4/0.1.0.1 delete mode 100644 fixture/23/4/0.1.0.2 delete mode 100644 fixture/23/4/0.1.0.3 delete mode 100644 fixture/23/4/0.1.1.0 delete mode 100644 fixture/23/4/0.1.1.1 delete mode 100644 fixture/23/4/0.1.1.2 delete mode 100644 fixture/23/4/0.1.1.3 delete mode 100644 fixture/23/4/0.1.2.0 delete mode 100644 fixture/23/4/0.1.2.1 delete mode 100644 fixture/23/4/0.1.2.2 delete mode 100644 fixture/23/4/0.1.2.3 delete mode 100644 fixture/23/4/0.1.3.0 delete mode 100644 fixture/23/4/0.1.3.1 delete mode 100644 fixture/23/4/0.1.3.2 delete mode 100644 fixture/23/4/0.1.3.3 delete mode 100644 fixture/23/4/0.2.0.0 delete mode 100644 fixture/23/4/0.2.0.1 delete mode 100644 fixture/23/4/0.2.0.2 delete mode 100644 fixture/23/4/0.2.0.3 delete mode 100644 fixture/23/4/0.2.1.0 delete mode 100644 fixture/23/4/0.2.1.1 delete mode 100644 fixture/23/4/0.2.1.2 delete mode 100644 fixture/23/4/0.2.1.3 delete mode 100644 fixture/23/4/0.2.2.0 delete mode 100644 fixture/23/4/0.2.2.1 delete mode 100644 fixture/23/4/0.2.2.2 delete mode 100644 fixture/23/4/0.2.2.3 delete mode 100644 fixture/23/4/0.2.3.0 delete mode 100644 fixture/23/4/0.2.3.1 delete mode 100644 fixture/23/4/0.2.3.2 delete mode 100644 fixture/23/4/0.2.3.3 delete mode 100644 fixture/23/4/0.3.0.0 delete mode 100644 fixture/23/4/0.3.0.1 delete mode 100644 fixture/23/4/0.3.0.2 delete mode 100644 fixture/23/4/0.3.0.3 delete mode 100644 fixture/23/4/0.3.1.0 delete mode 100644 fixture/23/4/0.3.1.1 delete mode 100644 fixture/23/4/0.3.1.2 delete mode 100644 fixture/23/4/0.3.1.3 delete mode 100644 fixture/23/4/0.3.2.0 delete mode 100644 fixture/23/4/0.3.2.1 delete mode 100644 fixture/23/4/0.3.2.2 delete mode 100644 fixture/23/4/0.3.2.3 delete mode 100644 fixture/23/4/0.3.3.0 delete mode 100644 fixture/23/4/0.3.3.1 delete mode 100644 fixture/23/4/0.3.3.2 delete mode 100644 fixture/23/4/0.3.3.3 delete mode 100644 fixture/23/4/1.0.0.0 delete mode 100644 fixture/23/4/1.0.0.1 delete mode 100644 fixture/23/4/1.0.0.2 delete mode 100644 fixture/23/4/1.0.0.3 delete mode 100644 fixture/23/4/1.0.1.0 delete mode 100644 fixture/23/4/1.0.1.1 delete mode 100644 fixture/23/4/1.0.1.2 delete mode 100644 fixture/23/4/1.0.1.3 delete mode 100644 fixture/23/4/1.0.2.0 delete mode 100644 fixture/23/4/1.0.2.1 delete mode 100644 fixture/23/4/1.0.2.2 delete mode 100644 fixture/23/4/1.0.2.3 delete mode 100644 fixture/23/4/1.0.3.0 delete mode 100644 fixture/23/4/1.0.3.1 delete mode 100644 fixture/23/4/1.0.3.2 delete mode 100644 fixture/23/4/1.0.3.3 delete mode 100644 fixture/23/4/1.1.0.0 delete mode 100644 fixture/23/4/1.1.0.1 delete mode 100644 fixture/23/4/1.1.0.2 delete mode 100644 fixture/23/4/1.1.0.3 delete mode 100644 fixture/23/4/1.1.1.0 delete mode 100644 fixture/23/4/1.1.1.1 delete mode 100644 fixture/23/4/1.1.1.2 delete mode 100644 fixture/23/4/1.1.1.3 delete mode 100644 fixture/23/4/1.1.2.0 delete mode 100644 fixture/23/4/1.1.2.1 delete mode 100644 fixture/23/4/1.1.2.2 delete mode 100644 fixture/23/4/1.1.2.3 delete mode 100644 fixture/23/4/1.1.3.0 delete mode 100644 fixture/23/4/1.1.3.1 delete mode 100644 fixture/23/4/1.1.3.2 delete mode 100644 fixture/23/4/1.1.3.3 delete mode 100644 fixture/23/4/1.2.0.0 delete mode 100644 fixture/23/4/1.2.0.1 delete mode 100644 fixture/23/4/1.2.0.2 delete mode 100644 fixture/23/4/1.2.0.3 delete mode 100644 fixture/23/4/1.2.1.0 delete mode 100644 fixture/23/4/1.2.1.1 delete mode 100644 fixture/23/4/1.2.1.2 delete mode 100644 fixture/23/4/1.2.1.3 delete mode 100644 fixture/23/4/1.2.2.0 delete mode 100644 fixture/23/4/1.2.2.1 delete mode 100644 fixture/23/4/1.2.2.2 delete mode 100644 fixture/23/4/1.2.2.3 delete mode 100644 fixture/23/4/1.2.3.0 delete mode 100644 fixture/23/4/1.2.3.1 delete mode 100644 fixture/23/4/1.2.3.2 delete mode 100644 fixture/23/4/1.2.3.3 delete mode 100644 fixture/23/4/1.3.0.0 delete mode 100644 fixture/23/4/1.3.0.1 delete mode 100644 fixture/23/4/1.3.0.2 delete mode 100644 fixture/23/4/1.3.0.3 delete mode 100644 fixture/23/4/1.3.1.0 delete mode 100644 fixture/23/4/1.3.1.1 delete mode 100644 fixture/23/4/1.3.1.2 delete mode 100644 fixture/23/4/1.3.1.3 delete mode 100644 fixture/23/4/1.3.2.0 delete mode 100644 fixture/23/4/1.3.2.1 delete mode 100644 fixture/23/4/1.3.2.2 delete mode 100644 fixture/23/4/1.3.2.3 delete mode 100644 fixture/23/4/1.3.3.0 delete mode 100644 fixture/23/4/1.3.3.1 delete mode 100644 fixture/23/4/1.3.3.2 delete mode 100644 fixture/23/4/1.3.3.3 delete mode 100644 fixture/23/5/.zarray delete mode 100644 fixture/23/5/.zattrs delete mode 100644 fixture/23/5/0.0.0.0 delete mode 100644 fixture/23/5/0.0.0.1 delete mode 100644 fixture/23/5/0.0.0.2 delete mode 100644 fixture/23/5/0.0.0.3 delete mode 100644 fixture/23/5/0.0.1.0 delete mode 100644 fixture/23/5/0.0.1.1 delete mode 100644 fixture/23/5/0.0.1.2 delete mode 100644 fixture/23/5/0.0.1.3 delete mode 100644 fixture/23/5/0.0.2.0 delete mode 100644 fixture/23/5/0.0.2.1 delete mode 100644 fixture/23/5/0.0.2.2 delete mode 100644 fixture/23/5/0.0.2.3 delete mode 100644 fixture/23/5/0.0.3.0 delete mode 100644 fixture/23/5/0.0.3.1 delete mode 100644 fixture/23/5/0.0.3.2 delete mode 100644 fixture/23/5/0.0.3.3 delete mode 100644 fixture/23/5/0.1.0.0 delete mode 100644 fixture/23/5/0.1.0.1 delete mode 100644 fixture/23/5/0.1.0.2 delete mode 100644 fixture/23/5/0.1.0.3 delete mode 100644 fixture/23/5/0.1.1.0 delete mode 100644 fixture/23/5/0.1.1.1 delete mode 100644 fixture/23/5/0.1.1.2 delete mode 100644 fixture/23/5/0.1.1.3 delete mode 100644 fixture/23/5/0.1.2.0 delete mode 100644 fixture/23/5/0.1.2.1 delete mode 100644 fixture/23/5/0.1.2.2 delete mode 100644 fixture/23/5/0.1.2.3 delete mode 100644 fixture/23/5/0.1.3.0 delete mode 100644 fixture/23/5/0.1.3.1 delete mode 100644 fixture/23/5/0.1.3.2 delete mode 100644 fixture/23/5/0.1.3.3 delete mode 100644 fixture/23/5/0.2.0.0 delete mode 100644 fixture/23/5/0.2.0.1 delete mode 100644 fixture/23/5/0.2.0.2 delete mode 100644 fixture/23/5/0.2.0.3 delete mode 100644 fixture/23/5/0.2.1.0 delete mode 100644 fixture/23/5/0.2.1.1 delete mode 100644 fixture/23/5/0.2.1.2 delete mode 100644 fixture/23/5/0.2.1.3 delete mode 100644 fixture/23/5/0.2.2.0 delete mode 100644 fixture/23/5/0.2.2.1 delete mode 100644 fixture/23/5/0.2.2.2 delete mode 100644 fixture/23/5/0.2.2.3 delete mode 100644 fixture/23/5/0.2.3.0 delete mode 100644 fixture/23/5/0.2.3.1 delete mode 100644 fixture/23/5/0.2.3.2 delete mode 100644 fixture/23/5/0.2.3.3 delete mode 100644 fixture/23/5/0.3.0.0 delete mode 100644 fixture/23/5/0.3.0.1 delete mode 100644 fixture/23/5/0.3.0.2 delete mode 100644 fixture/23/5/0.3.0.3 delete mode 100644 fixture/23/5/0.3.1.0 delete mode 100644 fixture/23/5/0.3.1.1 delete mode 100644 fixture/23/5/0.3.1.2 delete mode 100644 fixture/23/5/0.3.1.3 delete mode 100644 fixture/23/5/0.3.2.0 delete mode 100644 fixture/23/5/0.3.2.1 delete mode 100644 fixture/23/5/0.3.2.2 delete mode 100644 fixture/23/5/0.3.2.3 delete mode 100644 fixture/23/5/0.3.3.0 delete mode 100644 fixture/23/5/0.3.3.1 delete mode 100644 fixture/23/5/0.3.3.2 delete mode 100644 fixture/23/5/0.3.3.3 delete mode 100644 fixture/23/5/1.0.0.0 delete mode 100644 fixture/23/5/1.0.0.1 delete mode 100644 fixture/23/5/1.0.0.2 delete mode 100644 fixture/23/5/1.0.0.3 delete mode 100644 fixture/23/5/1.0.1.0 delete mode 100644 fixture/23/5/1.0.1.1 delete mode 100644 fixture/23/5/1.0.1.2 delete mode 100644 fixture/23/5/1.0.1.3 delete mode 100644 fixture/23/5/1.0.2.0 delete mode 100644 fixture/23/5/1.0.2.1 delete mode 100644 fixture/23/5/1.0.2.2 delete mode 100644 fixture/23/5/1.0.2.3 delete mode 100644 fixture/23/5/1.0.3.0 delete mode 100644 fixture/23/5/1.0.3.1 delete mode 100644 fixture/23/5/1.0.3.2 delete mode 100644 fixture/23/5/1.0.3.3 delete mode 100644 fixture/23/5/1.1.0.0 delete mode 100644 fixture/23/5/1.1.0.1 delete mode 100644 fixture/23/5/1.1.0.2 delete mode 100644 fixture/23/5/1.1.0.3 delete mode 100644 fixture/23/5/1.1.1.0 delete mode 100644 fixture/23/5/1.1.1.1 delete mode 100644 fixture/23/5/1.1.1.2 delete mode 100644 fixture/23/5/1.1.1.3 delete mode 100644 fixture/23/5/1.1.2.0 delete mode 100644 fixture/23/5/1.1.2.1 delete mode 100644 fixture/23/5/1.1.2.2 delete mode 100644 fixture/23/5/1.1.2.3 delete mode 100644 fixture/23/5/1.1.3.0 delete mode 100644 fixture/23/5/1.1.3.1 delete mode 100644 fixture/23/5/1.1.3.2 delete mode 100644 fixture/23/5/1.1.3.3 delete mode 100644 fixture/23/5/1.2.0.0 delete mode 100644 fixture/23/5/1.2.0.1 delete mode 100644 fixture/23/5/1.2.0.2 delete mode 100644 fixture/23/5/1.2.0.3 delete mode 100644 fixture/23/5/1.2.1.0 delete mode 100644 fixture/23/5/1.2.1.1 delete mode 100644 fixture/23/5/1.2.1.2 delete mode 100644 fixture/23/5/1.2.1.3 delete mode 100644 fixture/23/5/1.2.2.0 delete mode 100644 fixture/23/5/1.2.2.1 delete mode 100644 fixture/23/5/1.2.2.2 delete mode 100644 fixture/23/5/1.2.2.3 delete mode 100644 fixture/23/5/1.2.3.0 delete mode 100644 fixture/23/5/1.2.3.1 delete mode 100644 fixture/23/5/1.2.3.2 delete mode 100644 fixture/23/5/1.2.3.3 delete mode 100644 fixture/23/5/1.3.0.0 delete mode 100644 fixture/23/5/1.3.0.1 delete mode 100644 fixture/23/5/1.3.0.2 delete mode 100644 fixture/23/5/1.3.0.3 delete mode 100644 fixture/23/5/1.3.1.0 delete mode 100644 fixture/23/5/1.3.1.1 delete mode 100644 fixture/23/5/1.3.1.2 delete mode 100644 fixture/23/5/1.3.1.3 delete mode 100644 fixture/23/5/1.3.2.0 delete mode 100644 fixture/23/5/1.3.2.1 delete mode 100644 fixture/23/5/1.3.2.2 delete mode 100644 fixture/23/5/1.3.2.3 delete mode 100644 fixture/23/5/1.3.3.0 delete mode 100644 fixture/23/5/1.3.3.1 delete mode 100644 fixture/23/5/1.3.3.2 delete mode 100644 fixture/23/5/1.3.3.3 delete mode 100644 fixture/23/6/.zarray delete mode 100644 fixture/23/6/.zattrs delete mode 100644 fixture/23/6/0.0.0.0 delete mode 100644 fixture/23/6/0.0.0.1 delete mode 100644 fixture/23/6/0.0.0.2 delete mode 100644 fixture/23/6/0.0.0.3 delete mode 100644 fixture/23/6/0.0.1.0 delete mode 100644 fixture/23/6/0.0.1.1 delete mode 100644 fixture/23/6/0.0.1.2 delete mode 100644 fixture/23/6/0.0.1.3 delete mode 100644 fixture/23/6/0.0.2.0 delete mode 100644 fixture/23/6/0.0.2.1 delete mode 100644 fixture/23/6/0.0.2.2 delete mode 100644 fixture/23/6/0.0.2.3 delete mode 100644 fixture/23/6/0.0.3.0 delete mode 100644 fixture/23/6/0.0.3.1 delete mode 100644 fixture/23/6/0.0.3.2 delete mode 100644 fixture/23/6/0.0.3.3 delete mode 100644 fixture/23/6/0.1.0.0 delete mode 100644 fixture/23/6/0.1.0.1 delete mode 100644 fixture/23/6/0.1.0.2 delete mode 100644 fixture/23/6/0.1.0.3 delete mode 100644 fixture/23/6/0.1.1.0 delete mode 100644 fixture/23/6/0.1.1.1 delete mode 100644 fixture/23/6/0.1.1.2 delete mode 100644 fixture/23/6/0.1.1.3 delete mode 100644 fixture/23/6/0.1.2.0 delete mode 100644 fixture/23/6/0.1.2.1 delete mode 100644 fixture/23/6/0.1.2.2 delete mode 100644 fixture/23/6/0.1.2.3 delete mode 100644 fixture/23/6/0.1.3.0 delete mode 100644 fixture/23/6/0.1.3.1 delete mode 100644 fixture/23/6/0.1.3.2 delete mode 100644 fixture/23/6/0.1.3.3 delete mode 100644 fixture/23/6/0.2.0.0 delete mode 100644 fixture/23/6/0.2.0.1 delete mode 100644 fixture/23/6/0.2.0.2 delete mode 100644 fixture/23/6/0.2.0.3 delete mode 100644 fixture/23/6/0.2.1.0 delete mode 100644 fixture/23/6/0.2.1.1 delete mode 100644 fixture/23/6/0.2.1.2 delete mode 100644 fixture/23/6/0.2.1.3 delete mode 100644 fixture/23/6/0.2.2.0 delete mode 100644 fixture/23/6/0.2.2.1 delete mode 100644 fixture/23/6/0.2.2.2 delete mode 100644 fixture/23/6/0.2.2.3 delete mode 100644 fixture/23/6/0.2.3.0 delete mode 100644 fixture/23/6/0.2.3.1 delete mode 100644 fixture/23/6/0.2.3.2 delete mode 100644 fixture/23/6/0.2.3.3 delete mode 100644 fixture/23/6/0.3.0.0 delete mode 100644 fixture/23/6/0.3.0.1 delete mode 100644 fixture/23/6/0.3.0.2 delete mode 100644 fixture/23/6/0.3.0.3 delete mode 100644 fixture/23/6/0.3.1.0 delete mode 100644 fixture/23/6/0.3.1.1 delete mode 100644 fixture/23/6/0.3.1.2 delete mode 100644 fixture/23/6/0.3.1.3 delete mode 100644 fixture/23/6/0.3.2.0 delete mode 100644 fixture/23/6/0.3.2.1 delete mode 100644 fixture/23/6/0.3.2.2 delete mode 100644 fixture/23/6/0.3.2.3 delete mode 100644 fixture/23/6/0.3.3.0 delete mode 100644 fixture/23/6/0.3.3.1 delete mode 100644 fixture/23/6/0.3.3.2 delete mode 100644 fixture/23/6/0.3.3.3 delete mode 100644 fixture/23/6/1.0.0.0 delete mode 100644 fixture/23/6/1.0.0.1 delete mode 100644 fixture/23/6/1.0.0.2 delete mode 100644 fixture/23/6/1.0.0.3 delete mode 100644 fixture/23/6/1.0.1.0 delete mode 100644 fixture/23/6/1.0.1.1 delete mode 100644 fixture/23/6/1.0.1.2 delete mode 100644 fixture/23/6/1.0.1.3 delete mode 100644 fixture/23/6/1.0.2.0 delete mode 100644 fixture/23/6/1.0.2.1 delete mode 100644 fixture/23/6/1.0.2.2 delete mode 100644 fixture/23/6/1.0.2.3 delete mode 100644 fixture/23/6/1.0.3.0 delete mode 100644 fixture/23/6/1.0.3.1 delete mode 100644 fixture/23/6/1.0.3.2 delete mode 100644 fixture/23/6/1.0.3.3 delete mode 100644 fixture/23/6/1.1.0.0 delete mode 100644 fixture/23/6/1.1.0.1 delete mode 100644 fixture/23/6/1.1.0.2 delete mode 100644 fixture/23/6/1.1.0.3 delete mode 100644 fixture/23/6/1.1.1.0 delete mode 100644 fixture/23/6/1.1.1.1 delete mode 100644 fixture/23/6/1.1.1.2 delete mode 100644 fixture/23/6/1.1.1.3 delete mode 100644 fixture/23/6/1.1.2.0 delete mode 100644 fixture/23/6/1.1.2.1 delete mode 100644 fixture/23/6/1.1.2.2 delete mode 100644 fixture/23/6/1.1.2.3 delete mode 100644 fixture/23/6/1.1.3.0 delete mode 100644 fixture/23/6/1.1.3.1 delete mode 100644 fixture/23/6/1.1.3.2 delete mode 100644 fixture/23/6/1.1.3.3 delete mode 100644 fixture/23/6/1.2.0.0 delete mode 100644 fixture/23/6/1.2.0.1 delete mode 100644 fixture/23/6/1.2.0.2 delete mode 100644 fixture/23/6/1.2.0.3 delete mode 100644 fixture/23/6/1.2.1.0 delete mode 100644 fixture/23/6/1.2.1.1 delete mode 100644 fixture/23/6/1.2.1.2 delete mode 100644 fixture/23/6/1.2.1.3 delete mode 100644 fixture/23/6/1.2.2.0 delete mode 100644 fixture/23/6/1.2.2.1 delete mode 100644 fixture/23/6/1.2.2.2 delete mode 100644 fixture/23/6/1.2.2.3 delete mode 100644 fixture/23/6/1.2.3.0 delete mode 100644 fixture/23/6/1.2.3.1 delete mode 100644 fixture/23/6/1.2.3.2 delete mode 100644 fixture/23/6/1.2.3.3 delete mode 100644 fixture/23/6/1.3.0.0 delete mode 100644 fixture/23/6/1.3.0.1 delete mode 100644 fixture/23/6/1.3.0.2 delete mode 100644 fixture/23/6/1.3.0.3 delete mode 100644 fixture/23/6/1.3.1.0 delete mode 100644 fixture/23/6/1.3.1.1 delete mode 100644 fixture/23/6/1.3.1.2 delete mode 100644 fixture/23/6/1.3.1.3 delete mode 100644 fixture/23/6/1.3.2.0 delete mode 100644 fixture/23/6/1.3.2.1 delete mode 100644 fixture/23/6/1.3.2.2 delete mode 100644 fixture/23/6/1.3.2.3 delete mode 100644 fixture/23/6/1.3.3.0 delete mode 100644 fixture/23/6/1.3.3.1 delete mode 100644 fixture/23/6/1.3.3.2 delete mode 100644 fixture/23/6/1.3.3.3 delete mode 100644 fixture/3/.zattrs delete mode 100644 fixture/3/0/.zattrs delete mode 100644 fixture/3/1/.zattrs delete mode 100644 fixture/3/2/.zattrs delete mode 100644 fixture/3/3/.zarray delete mode 100644 fixture/3/3/.zattrs delete mode 100644 fixture/3/3/0 delete mode 100644 fixture/3/3/1 delete mode 100644 fixture/3/4/.zarray delete mode 100644 fixture/3/4/.zattrs delete mode 100644 fixture/3/5/.zarray delete mode 100644 fixture/3/5/.zattrs delete mode 100644 fixture/3/5/0 delete mode 100644 fixture/3/5/1 delete mode 100644 fixture/3/6/.zarray delete mode 100644 fixture/3/6/.zattrs delete mode 100644 fixture/3/6/0 delete mode 100644 fixture/3/6/1 delete mode 100644 fixture/4/.zattrs delete mode 100644 fixture/4/0/.zarray delete mode 100644 fixture/4/0/.zattrs delete mode 100644 fixture/4/0/0 delete mode 100644 fixture/4/0/1 delete mode 100644 fixture/4/0/10 delete mode 100644 fixture/4/0/11 delete mode 100644 fixture/4/0/12 delete mode 100644 fixture/4/0/13 delete mode 100644 fixture/4/0/14 delete mode 100644 fixture/4/0/15 delete mode 100644 fixture/4/0/16 delete mode 100644 fixture/4/0/17 delete mode 100644 fixture/4/0/18 delete mode 100644 fixture/4/0/19 delete mode 100644 fixture/4/0/2 delete mode 100644 fixture/4/0/20 delete mode 100644 fixture/4/0/21 delete mode 100644 fixture/4/0/22 delete mode 100644 fixture/4/0/3 delete mode 100644 fixture/4/0/4 delete mode 100644 fixture/4/0/5 delete mode 100644 fixture/4/0/6 delete mode 100644 fixture/4/0/7 delete mode 100644 fixture/4/0/8 delete mode 100644 fixture/4/0/9 delete mode 100644 fixture/4/1/.zarray delete mode 100644 fixture/4/1/.zattrs delete mode 100644 fixture/4/1/0 delete mode 100644 fixture/4/1/1 delete mode 100644 fixture/4/1/10 delete mode 100644 fixture/4/1/11 delete mode 100644 fixture/4/1/12 delete mode 100644 fixture/4/1/13 delete mode 100644 fixture/4/1/14 delete mode 100644 fixture/4/1/15 delete mode 100644 fixture/4/1/16 delete mode 100644 fixture/4/1/17 delete mode 100644 fixture/4/1/18 delete mode 100644 fixture/4/1/19 delete mode 100644 fixture/4/1/2 delete mode 100644 fixture/4/1/20 delete mode 100644 fixture/4/1/21 delete mode 100644 fixture/4/1/22 delete mode 100644 fixture/4/1/3 delete mode 100644 fixture/4/1/4 delete mode 100644 fixture/4/1/5 delete mode 100644 fixture/4/1/6 delete mode 100644 fixture/4/1/7 delete mode 100644 fixture/4/1/8 delete mode 100644 fixture/4/1/9 delete mode 100644 fixture/4/2/.zarray delete mode 100644 fixture/4/2/.zattrs delete mode 100644 fixture/4/2/0 delete mode 100644 fixture/4/2/1 delete mode 100644 fixture/4/2/10 delete mode 100644 fixture/4/2/11 delete mode 100644 fixture/4/2/12 delete mode 100644 fixture/4/2/13 delete mode 100644 fixture/4/2/14 delete mode 100644 fixture/4/2/15 delete mode 100644 fixture/4/2/16 delete mode 100644 fixture/4/2/17 delete mode 100644 fixture/4/2/18 delete mode 100644 fixture/4/2/19 delete mode 100644 fixture/4/2/2 delete mode 100644 fixture/4/2/20 delete mode 100644 fixture/4/2/21 delete mode 100644 fixture/4/2/22 delete mode 100644 fixture/4/2/3 delete mode 100644 fixture/4/2/4 delete mode 100644 fixture/4/2/5 delete mode 100644 fixture/4/2/6 delete mode 100644 fixture/4/2/7 delete mode 100644 fixture/4/2/8 delete mode 100644 fixture/4/2/9 delete mode 100644 fixture/4/3/.zarray delete mode 100644 fixture/4/3/.zattrs delete mode 100644 fixture/4/3/0 delete mode 100644 fixture/4/3/1 delete mode 100644 fixture/4/3/10 delete mode 100644 fixture/4/3/11 delete mode 100644 fixture/4/3/12 delete mode 100644 fixture/4/3/13 delete mode 100644 fixture/4/3/14 delete mode 100644 fixture/4/3/15 delete mode 100644 fixture/4/3/16 delete mode 100644 fixture/4/3/17 delete mode 100644 fixture/4/3/18 delete mode 100644 fixture/4/3/19 delete mode 100644 fixture/4/3/2 delete mode 100644 fixture/4/3/20 delete mode 100644 fixture/4/3/21 delete mode 100644 fixture/4/3/22 delete mode 100644 fixture/4/3/3 delete mode 100644 fixture/4/3/4 delete mode 100644 fixture/4/3/5 delete mode 100644 fixture/4/3/6 delete mode 100644 fixture/4/3/7 delete mode 100644 fixture/4/3/8 delete mode 100644 fixture/4/3/9 delete mode 100644 fixture/4/4/.zarray delete mode 100644 fixture/4/4/.zattrs delete mode 100644 fixture/4/4/0 delete mode 100644 fixture/4/4/1 delete mode 100644 fixture/4/4/10 delete mode 100644 fixture/4/4/11 delete mode 100644 fixture/4/4/12 delete mode 100644 fixture/4/4/13 delete mode 100644 fixture/4/4/14 delete mode 100644 fixture/4/4/15 delete mode 100644 fixture/4/4/16 delete mode 100644 fixture/4/4/17 delete mode 100644 fixture/4/4/18 delete mode 100644 fixture/4/4/19 delete mode 100644 fixture/4/4/2 delete mode 100644 fixture/4/4/20 delete mode 100644 fixture/4/4/21 delete mode 100644 fixture/4/4/22 delete mode 100644 fixture/4/4/3 delete mode 100644 fixture/4/4/4 delete mode 100644 fixture/4/4/5 delete mode 100644 fixture/4/4/6 delete mode 100644 fixture/4/4/7 delete mode 100644 fixture/4/4/8 delete mode 100644 fixture/4/4/9 delete mode 100644 fixture/4/5/.zarray delete mode 100644 fixture/4/5/.zattrs delete mode 100644 fixture/4/5/0 delete mode 100644 fixture/4/5/1 delete mode 100644 fixture/4/5/10 delete mode 100644 fixture/4/5/11 delete mode 100644 fixture/4/5/12 delete mode 100644 fixture/4/5/13 delete mode 100644 fixture/4/5/14 delete mode 100644 fixture/4/5/15 delete mode 100644 fixture/4/5/16 delete mode 100644 fixture/4/5/17 delete mode 100644 fixture/4/5/18 delete mode 100644 fixture/4/5/19 delete mode 100644 fixture/4/5/2 delete mode 100644 fixture/4/5/20 delete mode 100644 fixture/4/5/21 delete mode 100644 fixture/4/5/22 delete mode 100644 fixture/4/5/3 delete mode 100644 fixture/4/5/4 delete mode 100644 fixture/4/5/5 delete mode 100644 fixture/4/5/6 delete mode 100644 fixture/4/5/7 delete mode 100644 fixture/4/5/8 delete mode 100644 fixture/4/5/9 delete mode 100644 fixture/4/6/.zarray delete mode 100644 fixture/4/6/.zattrs delete mode 100644 fixture/4/6/0 delete mode 100644 fixture/4/6/1 delete mode 100644 fixture/4/6/10 delete mode 100644 fixture/4/6/11 delete mode 100644 fixture/4/6/12 delete mode 100644 fixture/4/6/13 delete mode 100644 fixture/4/6/14 delete mode 100644 fixture/4/6/15 delete mode 100644 fixture/4/6/16 delete mode 100644 fixture/4/6/17 delete mode 100644 fixture/4/6/18 delete mode 100644 fixture/4/6/19 delete mode 100644 fixture/4/6/2 delete mode 100644 fixture/4/6/20 delete mode 100644 fixture/4/6/21 delete mode 100644 fixture/4/6/22 delete mode 100644 fixture/4/6/3 delete mode 100644 fixture/4/6/4 delete mode 100644 fixture/4/6/5 delete mode 100644 fixture/4/6/6 delete mode 100644 fixture/4/6/7 delete mode 100644 fixture/4/6/8 delete mode 100644 fixture/4/6/9 delete mode 100644 fixture/5/.zattrs delete mode 100644 fixture/5/0/.zarray delete mode 100644 fixture/5/0/.zattrs delete mode 100644 fixture/5/0/0 delete mode 100644 fixture/5/0/1 delete mode 100644 fixture/5/0/10 delete mode 100644 fixture/5/0/11 delete mode 100644 fixture/5/0/12 delete mode 100644 fixture/5/0/13 delete mode 100644 fixture/5/0/14 delete mode 100644 fixture/5/0/15 delete mode 100644 fixture/5/0/16 delete mode 100644 fixture/5/0/17 delete mode 100644 fixture/5/0/18 delete mode 100644 fixture/5/0/19 delete mode 100644 fixture/5/0/2 delete mode 100644 fixture/5/0/20 delete mode 100644 fixture/5/0/21 delete mode 100644 fixture/5/0/22 delete mode 100644 fixture/5/0/3 delete mode 100644 fixture/5/0/4 delete mode 100644 fixture/5/0/5 delete mode 100644 fixture/5/0/6 delete mode 100644 fixture/5/0/7 delete mode 100644 fixture/5/0/8 delete mode 100644 fixture/5/0/9 delete mode 100644 fixture/5/1/.zarray delete mode 100644 fixture/5/1/.zattrs delete mode 100644 fixture/5/1/0 delete mode 100644 fixture/5/1/1 delete mode 100644 fixture/5/1/10 delete mode 100644 fixture/5/1/11 delete mode 100644 fixture/5/1/12 delete mode 100644 fixture/5/1/13 delete mode 100644 fixture/5/1/14 delete mode 100644 fixture/5/1/15 delete mode 100644 fixture/5/1/16 delete mode 100644 fixture/5/1/17 delete mode 100644 fixture/5/1/18 delete mode 100644 fixture/5/1/19 delete mode 100644 fixture/5/1/2 delete mode 100644 fixture/5/1/20 delete mode 100644 fixture/5/1/21 delete mode 100644 fixture/5/1/22 delete mode 100644 fixture/5/1/3 delete mode 100644 fixture/5/1/4 delete mode 100644 fixture/5/1/5 delete mode 100644 fixture/5/1/6 delete mode 100644 fixture/5/1/7 delete mode 100644 fixture/5/1/8 delete mode 100644 fixture/5/1/9 delete mode 100644 fixture/5/2/.zarray delete mode 100644 fixture/5/2/.zattrs delete mode 100644 fixture/5/2/0 delete mode 100644 fixture/5/2/1 delete mode 100644 fixture/5/2/10 delete mode 100644 fixture/5/2/11 delete mode 100644 fixture/5/2/12 delete mode 100644 fixture/5/2/13 delete mode 100644 fixture/5/2/14 delete mode 100644 fixture/5/2/15 delete mode 100644 fixture/5/2/16 delete mode 100644 fixture/5/2/17 delete mode 100644 fixture/5/2/18 delete mode 100644 fixture/5/2/19 delete mode 100644 fixture/5/2/2 delete mode 100644 fixture/5/2/20 delete mode 100644 fixture/5/2/21 delete mode 100644 fixture/5/2/22 delete mode 100644 fixture/5/2/3 delete mode 100644 fixture/5/2/4 delete mode 100644 fixture/5/2/5 delete mode 100644 fixture/5/2/6 delete mode 100644 fixture/5/2/7 delete mode 100644 fixture/5/2/8 delete mode 100644 fixture/5/2/9 delete mode 100644 fixture/5/3/.zarray delete mode 100644 fixture/5/3/.zattrs delete mode 100644 fixture/5/3/0 delete mode 100644 fixture/5/3/1 delete mode 100644 fixture/5/3/10 delete mode 100644 fixture/5/3/11 delete mode 100644 fixture/5/3/12 delete mode 100644 fixture/5/3/13 delete mode 100644 fixture/5/3/14 delete mode 100644 fixture/5/3/15 delete mode 100644 fixture/5/3/16 delete mode 100644 fixture/5/3/17 delete mode 100644 fixture/5/3/18 delete mode 100644 fixture/5/3/19 delete mode 100644 fixture/5/3/2 delete mode 100644 fixture/5/3/20 delete mode 100644 fixture/5/3/21 delete mode 100644 fixture/5/3/22 delete mode 100644 fixture/5/3/3 delete mode 100644 fixture/5/3/4 delete mode 100644 fixture/5/3/5 delete mode 100644 fixture/5/3/6 delete mode 100644 fixture/5/3/7 delete mode 100644 fixture/5/3/8 delete mode 100644 fixture/5/3/9 delete mode 100644 fixture/5/4/.zarray delete mode 100644 fixture/5/4/.zattrs delete mode 100644 fixture/5/4/0 delete mode 100644 fixture/5/4/1 delete mode 100644 fixture/5/4/10 delete mode 100644 fixture/5/4/11 delete mode 100644 fixture/5/4/12 delete mode 100644 fixture/5/4/13 delete mode 100644 fixture/5/4/14 delete mode 100644 fixture/5/4/15 delete mode 100644 fixture/5/4/16 delete mode 100644 fixture/5/4/17 delete mode 100644 fixture/5/4/18 delete mode 100644 fixture/5/4/19 delete mode 100644 fixture/5/4/2 delete mode 100644 fixture/5/4/20 delete mode 100644 fixture/5/4/21 delete mode 100644 fixture/5/4/22 delete mode 100644 fixture/5/4/3 delete mode 100644 fixture/5/4/4 delete mode 100644 fixture/5/4/5 delete mode 100644 fixture/5/4/6 delete mode 100644 fixture/5/4/7 delete mode 100644 fixture/5/4/8 delete mode 100644 fixture/5/4/9 delete mode 100644 fixture/5/5/.zarray delete mode 100644 fixture/5/5/.zattrs delete mode 100644 fixture/5/5/0 delete mode 100644 fixture/5/5/1 delete mode 100644 fixture/5/5/10 delete mode 100644 fixture/5/5/11 delete mode 100644 fixture/5/5/12 delete mode 100644 fixture/5/5/13 delete mode 100644 fixture/5/5/14 delete mode 100644 fixture/5/5/15 delete mode 100644 fixture/5/5/16 delete mode 100644 fixture/5/5/17 delete mode 100644 fixture/5/5/18 delete mode 100644 fixture/5/5/19 delete mode 100644 fixture/5/5/2 delete mode 100644 fixture/5/5/20 delete mode 100644 fixture/5/5/21 delete mode 100644 fixture/5/5/22 delete mode 100644 fixture/5/5/3 delete mode 100644 fixture/5/5/4 delete mode 100644 fixture/5/5/5 delete mode 100644 fixture/5/5/6 delete mode 100644 fixture/5/5/7 delete mode 100644 fixture/5/5/8 delete mode 100644 fixture/5/5/9 delete mode 100644 fixture/5/6/.zarray delete mode 100644 fixture/5/6/.zattrs delete mode 100644 fixture/5/6/0 delete mode 100644 fixture/5/6/1 delete mode 100644 fixture/5/6/10 delete mode 100644 fixture/5/6/11 delete mode 100644 fixture/5/6/12 delete mode 100644 fixture/5/6/13 delete mode 100644 fixture/5/6/14 delete mode 100644 fixture/5/6/15 delete mode 100644 fixture/5/6/16 delete mode 100644 fixture/5/6/17 delete mode 100644 fixture/5/6/18 delete mode 100644 fixture/5/6/19 delete mode 100644 fixture/5/6/2 delete mode 100644 fixture/5/6/20 delete mode 100644 fixture/5/6/21 delete mode 100644 fixture/5/6/22 delete mode 100644 fixture/5/6/3 delete mode 100644 fixture/5/6/4 delete mode 100644 fixture/5/6/5 delete mode 100644 fixture/5/6/6 delete mode 100644 fixture/5/6/7 delete mode 100644 fixture/5/6/8 delete mode 100644 fixture/5/6/9 delete mode 100644 fixture/6/.zattrs delete mode 100644 fixture/6/0/.zarray delete mode 100644 fixture/6/0/.zattrs delete mode 100644 fixture/6/0/0 delete mode 100644 fixture/6/0/1 delete mode 100644 fixture/6/0/10 delete mode 100644 fixture/6/0/11 delete mode 100644 fixture/6/0/12 delete mode 100644 fixture/6/0/13 delete mode 100644 fixture/6/0/14 delete mode 100644 fixture/6/0/15 delete mode 100644 fixture/6/0/16 delete mode 100644 fixture/6/0/17 delete mode 100644 fixture/6/0/18 delete mode 100644 fixture/6/0/19 delete mode 100644 fixture/6/0/2 delete mode 100644 fixture/6/0/20 delete mode 100644 fixture/6/0/21 delete mode 100644 fixture/6/0/22 delete mode 100644 fixture/6/0/3 delete mode 100644 fixture/6/0/4 delete mode 100644 fixture/6/0/5 delete mode 100644 fixture/6/0/6 delete mode 100644 fixture/6/0/7 delete mode 100644 fixture/6/0/8 delete mode 100644 fixture/6/0/9 delete mode 100644 fixture/6/1/.zarray delete mode 100644 fixture/6/1/.zattrs delete mode 100644 fixture/6/1/0 delete mode 100644 fixture/6/1/1 delete mode 100644 fixture/6/1/10 delete mode 100644 fixture/6/1/11 delete mode 100644 fixture/6/1/12 delete mode 100644 fixture/6/1/13 delete mode 100644 fixture/6/1/14 delete mode 100644 fixture/6/1/15 delete mode 100644 fixture/6/1/16 delete mode 100644 fixture/6/1/17 delete mode 100644 fixture/6/1/18 delete mode 100644 fixture/6/1/19 delete mode 100644 fixture/6/1/2 delete mode 100644 fixture/6/1/20 delete mode 100644 fixture/6/1/21 delete mode 100644 fixture/6/1/22 delete mode 100644 fixture/6/1/3 delete mode 100644 fixture/6/1/4 delete mode 100644 fixture/6/1/5 delete mode 100644 fixture/6/1/6 delete mode 100644 fixture/6/1/7 delete mode 100644 fixture/6/1/8 delete mode 100644 fixture/6/1/9 delete mode 100644 fixture/6/2/.zarray delete mode 100644 fixture/6/2/.zattrs delete mode 100644 fixture/6/2/0 delete mode 100644 fixture/6/2/1 delete mode 100644 fixture/6/2/10 delete mode 100644 fixture/6/2/11 delete mode 100644 fixture/6/2/12 delete mode 100644 fixture/6/2/13 delete mode 100644 fixture/6/2/14 delete mode 100644 fixture/6/2/15 delete mode 100644 fixture/6/2/16 delete mode 100644 fixture/6/2/17 delete mode 100644 fixture/6/2/18 delete mode 100644 fixture/6/2/19 delete mode 100644 fixture/6/2/2 delete mode 100644 fixture/6/2/20 delete mode 100644 fixture/6/2/21 delete mode 100644 fixture/6/2/22 delete mode 100644 fixture/6/2/3 delete mode 100644 fixture/6/2/4 delete mode 100644 fixture/6/2/5 delete mode 100644 fixture/6/2/6 delete mode 100644 fixture/6/2/7 delete mode 100644 fixture/6/2/8 delete mode 100644 fixture/6/2/9 delete mode 100644 fixture/6/3/.zarray delete mode 100644 fixture/6/3/.zattrs delete mode 100644 fixture/6/3/0 delete mode 100644 fixture/6/3/1 delete mode 100644 fixture/6/3/10 delete mode 100644 fixture/6/3/11 delete mode 100644 fixture/6/3/12 delete mode 100644 fixture/6/3/13 delete mode 100644 fixture/6/3/14 delete mode 100644 fixture/6/3/15 delete mode 100644 fixture/6/3/16 delete mode 100644 fixture/6/3/17 delete mode 100644 fixture/6/3/18 delete mode 100644 fixture/6/3/19 delete mode 100644 fixture/6/3/2 delete mode 100644 fixture/6/3/20 delete mode 100644 fixture/6/3/21 delete mode 100644 fixture/6/3/22 delete mode 100644 fixture/6/3/3 delete mode 100644 fixture/6/3/4 delete mode 100644 fixture/6/3/5 delete mode 100644 fixture/6/3/6 delete mode 100644 fixture/6/3/7 delete mode 100644 fixture/6/3/8 delete mode 100644 fixture/6/3/9 delete mode 100644 fixture/6/4/.zarray delete mode 100644 fixture/6/4/.zattrs delete mode 100644 fixture/6/4/0 delete mode 100644 fixture/6/4/1 delete mode 100644 fixture/6/4/10 delete mode 100644 fixture/6/4/11 delete mode 100644 fixture/6/4/12 delete mode 100644 fixture/6/4/13 delete mode 100644 fixture/6/4/14 delete mode 100644 fixture/6/4/15 delete mode 100644 fixture/6/4/16 delete mode 100644 fixture/6/4/17 delete mode 100644 fixture/6/4/18 delete mode 100644 fixture/6/4/19 delete mode 100644 fixture/6/4/2 delete mode 100644 fixture/6/4/20 delete mode 100644 fixture/6/4/21 delete mode 100644 fixture/6/4/22 delete mode 100644 fixture/6/4/3 delete mode 100644 fixture/6/4/4 delete mode 100644 fixture/6/4/5 delete mode 100644 fixture/6/4/6 delete mode 100644 fixture/6/4/7 delete mode 100644 fixture/6/4/8 delete mode 100644 fixture/6/4/9 delete mode 100644 fixture/6/5/.zarray delete mode 100644 fixture/6/5/.zattrs delete mode 100644 fixture/6/5/0 delete mode 100644 fixture/6/5/1 delete mode 100644 fixture/6/5/10 delete mode 100644 fixture/6/5/11 delete mode 100644 fixture/6/5/12 delete mode 100644 fixture/6/5/13 delete mode 100644 fixture/6/5/14 delete mode 100644 fixture/6/5/15 delete mode 100644 fixture/6/5/16 delete mode 100644 fixture/6/5/17 delete mode 100644 fixture/6/5/18 delete mode 100644 fixture/6/5/19 delete mode 100644 fixture/6/5/2 delete mode 100644 fixture/6/5/20 delete mode 100644 fixture/6/5/21 delete mode 100644 fixture/6/5/22 delete mode 100644 fixture/6/5/3 delete mode 100644 fixture/6/5/4 delete mode 100644 fixture/6/5/5 delete mode 100644 fixture/6/5/6 delete mode 100644 fixture/6/5/7 delete mode 100644 fixture/6/5/8 delete mode 100644 fixture/6/5/9 delete mode 100644 fixture/6/6/.zarray delete mode 100644 fixture/6/6/.zattrs delete mode 100644 fixture/6/6/0 delete mode 100644 fixture/6/6/1 delete mode 100644 fixture/6/6/10 delete mode 100644 fixture/6/6/11 delete mode 100644 fixture/6/6/12 delete mode 100644 fixture/6/6/13 delete mode 100644 fixture/6/6/14 delete mode 100644 fixture/6/6/15 delete mode 100644 fixture/6/6/16 delete mode 100644 fixture/6/6/17 delete mode 100644 fixture/6/6/18 delete mode 100644 fixture/6/6/19 delete mode 100644 fixture/6/6/2 delete mode 100644 fixture/6/6/20 delete mode 100644 fixture/6/6/21 delete mode 100644 fixture/6/6/22 delete mode 100644 fixture/6/6/3 delete mode 100644 fixture/6/6/4 delete mode 100644 fixture/6/6/5 delete mode 100644 fixture/6/6/6 delete mode 100644 fixture/6/6/7 delete mode 100644 fixture/6/6/8 delete mode 100644 fixture/6/6/9 delete mode 100644 fixture/7/.zattrs delete mode 100644 fixture/7/0/.zarray delete mode 100644 fixture/7/0/.zattrs delete mode 100644 fixture/7/0/0 delete mode 100644 fixture/7/0/1 delete mode 100644 fixture/7/0/10 delete mode 100644 fixture/7/0/11 delete mode 100644 fixture/7/0/12 delete mode 100644 fixture/7/0/13 delete mode 100644 fixture/7/0/14 delete mode 100644 fixture/7/0/15 delete mode 100644 fixture/7/0/16 delete mode 100644 fixture/7/0/17 delete mode 100644 fixture/7/0/18 delete mode 100644 fixture/7/0/19 delete mode 100644 fixture/7/0/2 delete mode 100644 fixture/7/0/20 delete mode 100644 fixture/7/0/21 delete mode 100644 fixture/7/0/22 delete mode 100644 fixture/7/0/3 delete mode 100644 fixture/7/0/4 delete mode 100644 fixture/7/0/5 delete mode 100644 fixture/7/0/6 delete mode 100644 fixture/7/0/7 delete mode 100644 fixture/7/0/8 delete mode 100644 fixture/7/0/9 delete mode 100644 fixture/7/1/.zarray delete mode 100644 fixture/7/1/.zattrs delete mode 100644 fixture/7/1/0 delete mode 100644 fixture/7/1/1 delete mode 100644 fixture/7/1/10 delete mode 100644 fixture/7/1/11 delete mode 100644 fixture/7/1/12 delete mode 100644 fixture/7/1/13 delete mode 100644 fixture/7/1/14 delete mode 100644 fixture/7/1/15 delete mode 100644 fixture/7/1/16 delete mode 100644 fixture/7/1/17 delete mode 100644 fixture/7/1/18 delete mode 100644 fixture/7/1/19 delete mode 100644 fixture/7/1/2 delete mode 100644 fixture/7/1/20 delete mode 100644 fixture/7/1/21 delete mode 100644 fixture/7/1/22 delete mode 100644 fixture/7/1/3 delete mode 100644 fixture/7/1/4 delete mode 100644 fixture/7/1/5 delete mode 100644 fixture/7/1/6 delete mode 100644 fixture/7/1/7 delete mode 100644 fixture/7/1/8 delete mode 100644 fixture/7/1/9 delete mode 100644 fixture/7/2/.zarray delete mode 100644 fixture/7/2/.zattrs delete mode 100644 fixture/7/2/0 delete mode 100644 fixture/7/2/1 delete mode 100644 fixture/7/2/10 delete mode 100644 fixture/7/2/11 delete mode 100644 fixture/7/2/12 delete mode 100644 fixture/7/2/13 delete mode 100644 fixture/7/2/14 delete mode 100644 fixture/7/2/15 delete mode 100644 fixture/7/2/16 delete mode 100644 fixture/7/2/17 delete mode 100644 fixture/7/2/18 delete mode 100644 fixture/7/2/19 delete mode 100644 fixture/7/2/2 delete mode 100644 fixture/7/2/20 delete mode 100644 fixture/7/2/21 delete mode 100644 fixture/7/2/22 delete mode 100644 fixture/7/2/3 delete mode 100644 fixture/7/2/4 delete mode 100644 fixture/7/2/5 delete mode 100644 fixture/7/2/6 delete mode 100644 fixture/7/2/7 delete mode 100644 fixture/7/2/8 delete mode 100644 fixture/7/2/9 delete mode 100644 fixture/7/3/.zarray delete mode 100644 fixture/7/3/.zattrs delete mode 100644 fixture/7/3/0 delete mode 100644 fixture/7/3/1 delete mode 100644 fixture/7/3/10 delete mode 100644 fixture/7/3/11 delete mode 100644 fixture/7/3/12 delete mode 100644 fixture/7/3/13 delete mode 100644 fixture/7/3/14 delete mode 100644 fixture/7/3/15 delete mode 100644 fixture/7/3/16 delete mode 100644 fixture/7/3/17 delete mode 100644 fixture/7/3/18 delete mode 100644 fixture/7/3/19 delete mode 100644 fixture/7/3/2 delete mode 100644 fixture/7/3/20 delete mode 100644 fixture/7/3/21 delete mode 100644 fixture/7/3/22 delete mode 100644 fixture/7/3/3 delete mode 100644 fixture/7/3/4 delete mode 100644 fixture/7/3/5 delete mode 100644 fixture/7/3/6 delete mode 100644 fixture/7/3/7 delete mode 100644 fixture/7/3/8 delete mode 100644 fixture/7/3/9 delete mode 100644 fixture/7/4/.zarray delete mode 100644 fixture/7/4/.zattrs delete mode 100644 fixture/7/4/0 delete mode 100644 fixture/7/4/1 delete mode 100644 fixture/7/4/10 delete mode 100644 fixture/7/4/11 delete mode 100644 fixture/7/4/12 delete mode 100644 fixture/7/4/13 delete mode 100644 fixture/7/4/14 delete mode 100644 fixture/7/4/15 delete mode 100644 fixture/7/4/16 delete mode 100644 fixture/7/4/17 delete mode 100644 fixture/7/4/18 delete mode 100644 fixture/7/4/19 delete mode 100644 fixture/7/4/2 delete mode 100644 fixture/7/4/20 delete mode 100644 fixture/7/4/21 delete mode 100644 fixture/7/4/22 delete mode 100644 fixture/7/4/3 delete mode 100644 fixture/7/4/4 delete mode 100644 fixture/7/4/5 delete mode 100644 fixture/7/4/6 delete mode 100644 fixture/7/4/7 delete mode 100644 fixture/7/4/8 delete mode 100644 fixture/7/4/9 delete mode 100644 fixture/7/5/.zarray delete mode 100644 fixture/7/5/.zattrs delete mode 100644 fixture/7/5/0 delete mode 100644 fixture/7/5/1 delete mode 100644 fixture/7/5/10 delete mode 100644 fixture/7/5/11 delete mode 100644 fixture/7/5/12 delete mode 100644 fixture/7/5/13 delete mode 100644 fixture/7/5/14 delete mode 100644 fixture/7/5/15 delete mode 100644 fixture/7/5/16 delete mode 100644 fixture/7/5/17 delete mode 100644 fixture/7/5/18 delete mode 100644 fixture/7/5/19 delete mode 100644 fixture/7/5/2 delete mode 100644 fixture/7/5/20 delete mode 100644 fixture/7/5/21 delete mode 100644 fixture/7/5/22 delete mode 100644 fixture/7/5/3 delete mode 100644 fixture/7/5/4 delete mode 100644 fixture/7/5/5 delete mode 100644 fixture/7/5/6 delete mode 100644 fixture/7/5/7 delete mode 100644 fixture/7/5/8 delete mode 100644 fixture/7/5/9 delete mode 100644 fixture/7/6/.zarray delete mode 100644 fixture/7/6/.zattrs delete mode 100644 fixture/7/6/0 delete mode 100644 fixture/7/6/1 delete mode 100644 fixture/7/6/10 delete mode 100644 fixture/7/6/11 delete mode 100644 fixture/7/6/12 delete mode 100644 fixture/7/6/13 delete mode 100644 fixture/7/6/14 delete mode 100644 fixture/7/6/15 delete mode 100644 fixture/7/6/16 delete mode 100644 fixture/7/6/17 delete mode 100644 fixture/7/6/18 delete mode 100644 fixture/7/6/19 delete mode 100644 fixture/7/6/2 delete mode 100644 fixture/7/6/20 delete mode 100644 fixture/7/6/21 delete mode 100644 fixture/7/6/22 delete mode 100644 fixture/7/6/3 delete mode 100644 fixture/7/6/4 delete mode 100644 fixture/7/6/5 delete mode 100644 fixture/7/6/6 delete mode 100644 fixture/7/6/7 delete mode 100644 fixture/7/6/8 delete mode 100644 fixture/7/6/9 delete mode 100644 fixture/8/.zattrs delete mode 100644 fixture/8/0/.zarray delete mode 100644 fixture/8/0/.zattrs delete mode 100644 fixture/8/0/0 delete mode 100644 fixture/8/0/1 delete mode 100644 fixture/8/0/10 delete mode 100644 fixture/8/0/11 delete mode 100644 fixture/8/0/12 delete mode 100644 fixture/8/0/13 delete mode 100644 fixture/8/0/14 delete mode 100644 fixture/8/0/15 delete mode 100644 fixture/8/0/16 delete mode 100644 fixture/8/0/17 delete mode 100644 fixture/8/0/18 delete mode 100644 fixture/8/0/19 delete mode 100644 fixture/8/0/2 delete mode 100644 fixture/8/0/20 delete mode 100644 fixture/8/0/21 delete mode 100644 fixture/8/0/22 delete mode 100644 fixture/8/0/23 delete mode 100644 fixture/8/0/24 delete mode 100644 fixture/8/0/25 delete mode 100644 fixture/8/0/26 delete mode 100644 fixture/8/0/27 delete mode 100644 fixture/8/0/28 delete mode 100644 fixture/8/0/29 delete mode 100644 fixture/8/0/3 delete mode 100644 fixture/8/0/30 delete mode 100644 fixture/8/0/31 delete mode 100644 fixture/8/0/32 delete mode 100644 fixture/8/0/33 delete mode 100644 fixture/8/0/4 delete mode 100644 fixture/8/0/5 delete mode 100644 fixture/8/0/6 delete mode 100644 fixture/8/0/7 delete mode 100644 fixture/8/0/8 delete mode 100644 fixture/8/0/9 delete mode 100644 fixture/8/1/.zarray delete mode 100644 fixture/8/1/.zattrs delete mode 100644 fixture/8/1/0 delete mode 100644 fixture/8/1/1 delete mode 100644 fixture/8/1/10 delete mode 100644 fixture/8/1/11 delete mode 100644 fixture/8/1/12 delete mode 100644 fixture/8/1/13 delete mode 100644 fixture/8/1/14 delete mode 100644 fixture/8/1/15 delete mode 100644 fixture/8/1/16 delete mode 100644 fixture/8/1/17 delete mode 100644 fixture/8/1/18 delete mode 100644 fixture/8/1/19 delete mode 100644 fixture/8/1/2 delete mode 100644 fixture/8/1/20 delete mode 100644 fixture/8/1/21 delete mode 100644 fixture/8/1/22 delete mode 100644 fixture/8/1/23 delete mode 100644 fixture/8/1/24 delete mode 100644 fixture/8/1/25 delete mode 100644 fixture/8/1/26 delete mode 100644 fixture/8/1/27 delete mode 100644 fixture/8/1/28 delete mode 100644 fixture/8/1/29 delete mode 100644 fixture/8/1/3 delete mode 100644 fixture/8/1/30 delete mode 100644 fixture/8/1/31 delete mode 100644 fixture/8/1/32 delete mode 100644 fixture/8/1/33 delete mode 100644 fixture/8/1/4 delete mode 100644 fixture/8/1/5 delete mode 100644 fixture/8/1/6 delete mode 100644 fixture/8/1/7 delete mode 100644 fixture/8/1/8 delete mode 100644 fixture/8/1/9 delete mode 100644 fixture/8/2/.zarray delete mode 100644 fixture/8/2/.zattrs delete mode 100644 fixture/8/2/0 delete mode 100644 fixture/8/2/1 delete mode 100644 fixture/8/2/10 delete mode 100644 fixture/8/2/11 delete mode 100644 fixture/8/2/12 delete mode 100644 fixture/8/2/13 delete mode 100644 fixture/8/2/14 delete mode 100644 fixture/8/2/15 delete mode 100644 fixture/8/2/16 delete mode 100644 fixture/8/2/17 delete mode 100644 fixture/8/2/18 delete mode 100644 fixture/8/2/19 delete mode 100644 fixture/8/2/2 delete mode 100644 fixture/8/2/20 delete mode 100644 fixture/8/2/21 delete mode 100644 fixture/8/2/22 delete mode 100644 fixture/8/2/23 delete mode 100644 fixture/8/2/24 delete mode 100644 fixture/8/2/25 delete mode 100644 fixture/8/2/26 delete mode 100644 fixture/8/2/27 delete mode 100644 fixture/8/2/28 delete mode 100644 fixture/8/2/29 delete mode 100644 fixture/8/2/3 delete mode 100644 fixture/8/2/30 delete mode 100644 fixture/8/2/31 delete mode 100644 fixture/8/2/32 delete mode 100644 fixture/8/2/33 delete mode 100644 fixture/8/2/4 delete mode 100644 fixture/8/2/5 delete mode 100644 fixture/8/2/6 delete mode 100644 fixture/8/2/7 delete mode 100644 fixture/8/2/8 delete mode 100644 fixture/8/2/9 delete mode 100644 fixture/8/3/.zarray delete mode 100644 fixture/8/3/.zattrs delete mode 100644 fixture/8/3/0 delete mode 100644 fixture/8/3/1 delete mode 100644 fixture/8/3/10 delete mode 100644 fixture/8/3/11 delete mode 100644 fixture/8/3/12 delete mode 100644 fixture/8/3/13 delete mode 100644 fixture/8/3/14 delete mode 100644 fixture/8/3/15 delete mode 100644 fixture/8/3/16 delete mode 100644 fixture/8/3/17 delete mode 100644 fixture/8/3/18 delete mode 100644 fixture/8/3/19 delete mode 100644 fixture/8/3/2 delete mode 100644 fixture/8/3/20 delete mode 100644 fixture/8/3/21 delete mode 100644 fixture/8/3/22 delete mode 100644 fixture/8/3/23 delete mode 100644 fixture/8/3/24 delete mode 100644 fixture/8/3/25 delete mode 100644 fixture/8/3/26 delete mode 100644 fixture/8/3/27 delete mode 100644 fixture/8/3/28 delete mode 100644 fixture/8/3/29 delete mode 100644 fixture/8/3/3 delete mode 100644 fixture/8/3/30 delete mode 100644 fixture/8/3/31 delete mode 100644 fixture/8/3/32 delete mode 100644 fixture/8/3/33 delete mode 100644 fixture/8/3/4 delete mode 100644 fixture/8/3/5 delete mode 100644 fixture/8/3/6 delete mode 100644 fixture/8/3/7 delete mode 100644 fixture/8/3/8 delete mode 100644 fixture/8/3/9 delete mode 100644 fixture/8/4/.zarray delete mode 100644 fixture/8/4/.zattrs delete mode 100644 fixture/8/4/0 delete mode 100644 fixture/8/4/1 delete mode 100644 fixture/8/4/10 delete mode 100644 fixture/8/4/11 delete mode 100644 fixture/8/4/12 delete mode 100644 fixture/8/4/13 delete mode 100644 fixture/8/4/14 delete mode 100644 fixture/8/4/15 delete mode 100644 fixture/8/4/16 delete mode 100644 fixture/8/4/17 delete mode 100644 fixture/8/4/18 delete mode 100644 fixture/8/4/19 delete mode 100644 fixture/8/4/2 delete mode 100644 fixture/8/4/20 delete mode 100644 fixture/8/4/21 delete mode 100644 fixture/8/4/22 delete mode 100644 fixture/8/4/23 delete mode 100644 fixture/8/4/24 delete mode 100644 fixture/8/4/25 delete mode 100644 fixture/8/4/26 delete mode 100644 fixture/8/4/27 delete mode 100644 fixture/8/4/28 delete mode 100644 fixture/8/4/29 delete mode 100644 fixture/8/4/3 delete mode 100644 fixture/8/4/30 delete mode 100644 fixture/8/4/31 delete mode 100644 fixture/8/4/32 delete mode 100644 fixture/8/4/33 delete mode 100644 fixture/8/4/4 delete mode 100644 fixture/8/4/5 delete mode 100644 fixture/8/4/6 delete mode 100644 fixture/8/4/7 delete mode 100644 fixture/8/4/8 delete mode 100644 fixture/8/4/9 delete mode 100644 fixture/8/5/.zarray delete mode 100644 fixture/8/5/.zattrs delete mode 100644 fixture/8/5/0 delete mode 100644 fixture/8/5/1 delete mode 100644 fixture/8/5/10 delete mode 100644 fixture/8/5/11 delete mode 100644 fixture/8/5/12 delete mode 100644 fixture/8/5/13 delete mode 100644 fixture/8/5/14 delete mode 100644 fixture/8/5/15 delete mode 100644 fixture/8/5/16 delete mode 100644 fixture/8/5/17 delete mode 100644 fixture/8/5/18 delete mode 100644 fixture/8/5/19 delete mode 100644 fixture/8/5/2 delete mode 100644 fixture/8/5/20 delete mode 100644 fixture/8/5/21 delete mode 100644 fixture/8/5/22 delete mode 100644 fixture/8/5/23 delete mode 100644 fixture/8/5/24 delete mode 100644 fixture/8/5/25 delete mode 100644 fixture/8/5/26 delete mode 100644 fixture/8/5/27 delete mode 100644 fixture/8/5/28 delete mode 100644 fixture/8/5/29 delete mode 100644 fixture/8/5/3 delete mode 100644 fixture/8/5/30 delete mode 100644 fixture/8/5/31 delete mode 100644 fixture/8/5/32 delete mode 100644 fixture/8/5/33 delete mode 100644 fixture/8/5/4 delete mode 100644 fixture/8/5/5 delete mode 100644 fixture/8/5/6 delete mode 100644 fixture/8/5/7 delete mode 100644 fixture/8/5/8 delete mode 100644 fixture/8/5/9 delete mode 100644 fixture/8/6/.zarray delete mode 100644 fixture/8/6/.zattrs delete mode 100644 fixture/8/6/0 delete mode 100644 fixture/8/6/1 delete mode 100644 fixture/8/6/10 delete mode 100644 fixture/8/6/11 delete mode 100644 fixture/8/6/12 delete mode 100644 fixture/8/6/13 delete mode 100644 fixture/8/6/14 delete mode 100644 fixture/8/6/15 delete mode 100644 fixture/8/6/16 delete mode 100644 fixture/8/6/17 delete mode 100644 fixture/8/6/18 delete mode 100644 fixture/8/6/19 delete mode 100644 fixture/8/6/2 delete mode 100644 fixture/8/6/20 delete mode 100644 fixture/8/6/21 delete mode 100644 fixture/8/6/22 delete mode 100644 fixture/8/6/23 delete mode 100644 fixture/8/6/24 delete mode 100644 fixture/8/6/25 delete mode 100644 fixture/8/6/26 delete mode 100644 fixture/8/6/27 delete mode 100644 fixture/8/6/28 delete mode 100644 fixture/8/6/29 delete mode 100644 fixture/8/6/3 delete mode 100644 fixture/8/6/30 delete mode 100644 fixture/8/6/31 delete mode 100644 fixture/8/6/32 delete mode 100644 fixture/8/6/33 delete mode 100644 fixture/8/6/4 delete mode 100644 fixture/8/6/5 delete mode 100644 fixture/8/6/6 delete mode 100644 fixture/8/6/7 delete mode 100644 fixture/8/6/8 delete mode 100644 fixture/8/6/9 delete mode 100644 fixture/9/.zattrs delete mode 100644 fixture/9/0/.zarray delete mode 100644 fixture/9/0/.zattrs delete mode 100644 fixture/9/0/0 delete mode 100644 fixture/9/0/1 delete mode 100644 fixture/9/0/10 delete mode 100644 fixture/9/0/11 delete mode 100644 fixture/9/0/12 delete mode 100644 fixture/9/0/13 delete mode 100644 fixture/9/0/14 delete mode 100644 fixture/9/0/15 delete mode 100644 fixture/9/0/16 delete mode 100644 fixture/9/0/17 delete mode 100644 fixture/9/0/18 delete mode 100644 fixture/9/0/19 delete mode 100644 fixture/9/0/2 delete mode 100644 fixture/9/0/20 delete mode 100644 fixture/9/0/21 delete mode 100644 fixture/9/0/22 delete mode 100644 fixture/9/0/23 delete mode 100644 fixture/9/0/24 delete mode 100644 fixture/9/0/25 delete mode 100644 fixture/9/0/26 delete mode 100644 fixture/9/0/27 delete mode 100644 fixture/9/0/28 delete mode 100644 fixture/9/0/29 delete mode 100644 fixture/9/0/3 delete mode 100644 fixture/9/0/30 delete mode 100644 fixture/9/0/31 delete mode 100644 fixture/9/0/32 delete mode 100644 fixture/9/0/33 delete mode 100644 fixture/9/0/4 delete mode 100644 fixture/9/0/5 delete mode 100644 fixture/9/0/6 delete mode 100644 fixture/9/0/7 delete mode 100644 fixture/9/0/8 delete mode 100644 fixture/9/0/9 delete mode 100644 fixture/9/1/.zarray delete mode 100644 fixture/9/1/.zattrs delete mode 100644 fixture/9/1/0 delete mode 100644 fixture/9/1/1 delete mode 100644 fixture/9/1/10 delete mode 100644 fixture/9/1/11 delete mode 100644 fixture/9/1/12 delete mode 100644 fixture/9/1/13 delete mode 100644 fixture/9/1/14 delete mode 100644 fixture/9/1/15 delete mode 100644 fixture/9/1/16 delete mode 100644 fixture/9/1/17 delete mode 100644 fixture/9/1/18 delete mode 100644 fixture/9/1/19 delete mode 100644 fixture/9/1/2 delete mode 100644 fixture/9/1/20 delete mode 100644 fixture/9/1/21 delete mode 100644 fixture/9/1/22 delete mode 100644 fixture/9/1/23 delete mode 100644 fixture/9/1/24 delete mode 100644 fixture/9/1/25 delete mode 100644 fixture/9/1/26 delete mode 100644 fixture/9/1/27 delete mode 100644 fixture/9/1/28 delete mode 100644 fixture/9/1/29 delete mode 100644 fixture/9/1/3 delete mode 100644 fixture/9/1/30 delete mode 100644 fixture/9/1/31 delete mode 100644 fixture/9/1/32 delete mode 100644 fixture/9/1/33 delete mode 100644 fixture/9/1/4 delete mode 100644 fixture/9/1/5 delete mode 100644 fixture/9/1/6 delete mode 100644 fixture/9/1/7 delete mode 100644 fixture/9/1/8 delete mode 100644 fixture/9/1/9 delete mode 100644 fixture/9/2/.zarray delete mode 100644 fixture/9/2/.zattrs delete mode 100644 fixture/9/2/0 delete mode 100644 fixture/9/2/1 delete mode 100644 fixture/9/2/10 delete mode 100644 fixture/9/2/11 delete mode 100644 fixture/9/2/12 delete mode 100644 fixture/9/2/13 delete mode 100644 fixture/9/2/14 delete mode 100644 fixture/9/2/15 delete mode 100644 fixture/9/2/16 delete mode 100644 fixture/9/2/17 delete mode 100644 fixture/9/2/18 delete mode 100644 fixture/9/2/19 delete mode 100644 fixture/9/2/2 delete mode 100644 fixture/9/2/20 delete mode 100644 fixture/9/2/21 delete mode 100644 fixture/9/2/22 delete mode 100644 fixture/9/2/23 delete mode 100644 fixture/9/2/24 delete mode 100644 fixture/9/2/25 delete mode 100644 fixture/9/2/26 delete mode 100644 fixture/9/2/27 delete mode 100644 fixture/9/2/28 delete mode 100644 fixture/9/2/29 delete mode 100644 fixture/9/2/3 delete mode 100644 fixture/9/2/30 delete mode 100644 fixture/9/2/31 delete mode 100644 fixture/9/2/32 delete mode 100644 fixture/9/2/33 delete mode 100644 fixture/9/2/4 delete mode 100644 fixture/9/2/5 delete mode 100644 fixture/9/2/6 delete mode 100644 fixture/9/2/7 delete mode 100644 fixture/9/2/8 delete mode 100644 fixture/9/2/9 delete mode 100644 fixture/9/3/.zarray delete mode 100644 fixture/9/3/.zattrs delete mode 100644 fixture/9/3/0 delete mode 100644 fixture/9/3/1 delete mode 100644 fixture/9/3/10 delete mode 100644 fixture/9/3/11 delete mode 100644 fixture/9/3/12 delete mode 100644 fixture/9/3/13 delete mode 100644 fixture/9/3/14 delete mode 100644 fixture/9/3/15 delete mode 100644 fixture/9/3/16 delete mode 100644 fixture/9/3/17 delete mode 100644 fixture/9/3/18 delete mode 100644 fixture/9/3/19 delete mode 100644 fixture/9/3/2 delete mode 100644 fixture/9/3/20 delete mode 100644 fixture/9/3/21 delete mode 100644 fixture/9/3/22 delete mode 100644 fixture/9/3/23 delete mode 100644 fixture/9/3/24 delete mode 100644 fixture/9/3/25 delete mode 100644 fixture/9/3/26 delete mode 100644 fixture/9/3/27 delete mode 100644 fixture/9/3/28 delete mode 100644 fixture/9/3/29 delete mode 100644 fixture/9/3/3 delete mode 100644 fixture/9/3/30 delete mode 100644 fixture/9/3/31 delete mode 100644 fixture/9/3/32 delete mode 100644 fixture/9/3/33 delete mode 100644 fixture/9/3/4 delete mode 100644 fixture/9/3/5 delete mode 100644 fixture/9/3/6 delete mode 100644 fixture/9/3/7 delete mode 100644 fixture/9/3/8 delete mode 100644 fixture/9/3/9 delete mode 100644 fixture/9/4/.zarray delete mode 100644 fixture/9/4/.zattrs delete mode 100644 fixture/9/4/0 delete mode 100644 fixture/9/4/1 delete mode 100644 fixture/9/4/10 delete mode 100644 fixture/9/4/11 delete mode 100644 fixture/9/4/12 delete mode 100644 fixture/9/4/13 delete mode 100644 fixture/9/4/14 delete mode 100644 fixture/9/4/15 delete mode 100644 fixture/9/4/16 delete mode 100644 fixture/9/4/17 delete mode 100644 fixture/9/4/18 delete mode 100644 fixture/9/4/19 delete mode 100644 fixture/9/4/2 delete mode 100644 fixture/9/4/20 delete mode 100644 fixture/9/4/21 delete mode 100644 fixture/9/4/22 delete mode 100644 fixture/9/4/23 delete mode 100644 fixture/9/4/24 delete mode 100644 fixture/9/4/25 delete mode 100644 fixture/9/4/26 delete mode 100644 fixture/9/4/27 delete mode 100644 fixture/9/4/28 delete mode 100644 fixture/9/4/29 delete mode 100644 fixture/9/4/3 delete mode 100644 fixture/9/4/30 delete mode 100644 fixture/9/4/31 delete mode 100644 fixture/9/4/32 delete mode 100644 fixture/9/4/33 delete mode 100644 fixture/9/4/4 delete mode 100644 fixture/9/4/5 delete mode 100644 fixture/9/4/6 delete mode 100644 fixture/9/4/7 delete mode 100644 fixture/9/4/8 delete mode 100644 fixture/9/4/9 delete mode 100644 fixture/9/5/.zarray delete mode 100644 fixture/9/5/.zattrs delete mode 100644 fixture/9/5/0 delete mode 100644 fixture/9/5/1 delete mode 100644 fixture/9/5/10 delete mode 100644 fixture/9/5/11 delete mode 100644 fixture/9/5/12 delete mode 100644 fixture/9/5/13 delete mode 100644 fixture/9/5/14 delete mode 100644 fixture/9/5/15 delete mode 100644 fixture/9/5/16 delete mode 100644 fixture/9/5/17 delete mode 100644 fixture/9/5/18 delete mode 100644 fixture/9/5/19 delete mode 100644 fixture/9/5/2 delete mode 100644 fixture/9/5/20 delete mode 100644 fixture/9/5/21 delete mode 100644 fixture/9/5/22 delete mode 100644 fixture/9/5/23 delete mode 100644 fixture/9/5/24 delete mode 100644 fixture/9/5/25 delete mode 100644 fixture/9/5/26 delete mode 100644 fixture/9/5/27 delete mode 100644 fixture/9/5/28 delete mode 100644 fixture/9/5/29 delete mode 100644 fixture/9/5/3 delete mode 100644 fixture/9/5/30 delete mode 100644 fixture/9/5/31 delete mode 100644 fixture/9/5/32 delete mode 100644 fixture/9/5/33 delete mode 100644 fixture/9/5/4 delete mode 100644 fixture/9/5/5 delete mode 100644 fixture/9/5/6 delete mode 100644 fixture/9/5/7 delete mode 100644 fixture/9/5/8 delete mode 100644 fixture/9/5/9 delete mode 100644 fixture/9/6/.zarray delete mode 100644 fixture/9/6/.zattrs delete mode 100644 fixture/9/6/0 delete mode 100644 fixture/9/6/1 delete mode 100644 fixture/9/6/10 delete mode 100644 fixture/9/6/11 delete mode 100644 fixture/9/6/12 delete mode 100644 fixture/9/6/13 delete mode 100644 fixture/9/6/14 delete mode 100644 fixture/9/6/15 delete mode 100644 fixture/9/6/16 delete mode 100644 fixture/9/6/17 delete mode 100644 fixture/9/6/18 delete mode 100644 fixture/9/6/19 delete mode 100644 fixture/9/6/2 delete mode 100644 fixture/9/6/20 delete mode 100644 fixture/9/6/21 delete mode 100644 fixture/9/6/22 delete mode 100644 fixture/9/6/23 delete mode 100644 fixture/9/6/24 delete mode 100644 fixture/9/6/25 delete mode 100644 fixture/9/6/26 delete mode 100644 fixture/9/6/27 delete mode 100644 fixture/9/6/28 delete mode 100644 fixture/9/6/29 delete mode 100644 fixture/9/6/3 delete mode 100644 fixture/9/6/30 delete mode 100644 fixture/9/6/31 delete mode 100644 fixture/9/6/32 delete mode 100644 fixture/9/6/33 delete mode 100644 fixture/9/6/4 delete mode 100644 fixture/9/6/5 delete mode 100644 fixture/9/6/6 delete mode 100644 fixture/9/6/7 delete mode 100644 fixture/9/6/8 delete mode 100644 fixture/9/6/9 delete mode 100644 fixture/meta/.zarray delete mode 100644 fixture/meta/0.0 delete mode 100644 fixture/utf8attrs/.zattrs delete mode 100644 tests/v2/data/store.zip delete mode 100644 tests/v2/data/store/foo rename {fixture => tests/v2/fixture}/.zgroup (100%) rename {fixture => tests/v2/fixture/dimension_separator}/flat/.zarray (100%) rename {fixture => tests/v2/fixture/dimension_separator}/flat/0.0 (100%) rename {fixture => tests/v2/fixture/dimension_separator}/flat_legacy/.zarray (100%) rename {fixture => tests/v2/fixture/dimension_separator}/flat_legacy/0.0 (100%) rename {fixture => tests/v2/fixture/dimension_separator}/nested/.zarray (100%) rename {fixture => tests/v2/fixture/dimension_separator}/nested/0/0 (100%) rename {fixture => tests/v2/fixture/dimension_separator}/nested_legacy/.zarray (100%) rename {fixture => tests/v2/fixture/dimension_separator}/nested_legacy/0/0 (100%) rename {fixture/0 => tests/v2/fixture/test_format_compatibility}/.zgroup (100%) rename {fixture/1 => tests/v2/fixture/test_format_compatibility/array_0}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_0/compressor_6/1 rename {fixture/10 => tests/v2/fixture/test_format_compatibility/array_1}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_1/compressor_6/1 rename {fixture/11 => tests/v2/fixture/test_format_compatibility/array_10}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_10/compressor_6/1 rename {fixture/12 => tests/v2/fixture/test_format_compatibility/array_11}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_11/compressor_6/1 rename {fixture/13 => tests/v2/fixture/test_format_compatibility/array_12}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_12/compressor_6/1 rename {fixture/14 => tests/v2/fixture/test_format_compatibility/array_13}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_13/compressor_6/1 rename {fixture/15 => tests/v2/fixture/test_format_compatibility/array_14}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_14/compressor_6/1 rename {fixture/16 => tests/v2/fixture/test_format_compatibility/array_15}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_15/compressor_6/1 rename {fixture/17 => tests/v2/fixture/test_format_compatibility/array_16}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_16/compressor_6/1 rename {fixture/18 => tests/v2/fixture/test_format_compatibility/array_17}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_17/compressor_6/1 rename {fixture/19 => tests/v2/fixture/test_format_compatibility/array_18}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_0/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_0/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_1/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_1/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_2/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_2/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_3/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_3/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_4/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_4/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_5/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_5/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_6/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_18/compressor_6/0.1 rename {fixture/2 => tests/v2/fixture/test_format_compatibility/array_19}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_0/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_0/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_1/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_1/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_2/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_2/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_3/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_3/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_4/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_4/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_5/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_5/0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_6/0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_19/compressor_6/0.1 rename {fixture/20 => tests/v2/fixture/test_format_compatibility/array_2}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_2/compressor_6/1 rename {fixture/21 => tests/v2/fixture/test_format_compatibility/array_20}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_0/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_0/0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_1/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_1/0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_2/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_2/0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_3/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_3/0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_4/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_4/0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_5/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_5/0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_6/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_20/compressor_6/0.0.1 rename {fixture/22 => tests/v2/fixture/test_format_compatibility/array_21}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_0/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_1/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_2/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_3/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_4/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_5/0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_21/compressor_6/0.0.0 rename {fixture/23 => tests/v2/fixture/test_format_compatibility/array_22}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_0/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_0/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_1/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_1/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_2/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_2/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_3/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_3/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_4/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_4/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_5/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_5/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_6/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_22/compressor_6/0.0.0.1 rename {fixture/3 => tests/v2/fixture/test_format_compatibility/array_23}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_0/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_0/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_1/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_1/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_2/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_2/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_3/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_3/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_4/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_4/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_5/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_5/0.0.0.1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_6/0.0.0.0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_23/compressor_6/0.0.0.1 rename {fixture/4 => tests/v2/fixture/test_format_compatibility/array_3}/.zgroup (100%) rename {fixture/3/0 => tests/v2/fixture/test_format_compatibility/array_3/compressor_0}/.zarray (100%) rename {fixture/3/0 => tests/v2/fixture/test_format_compatibility/array_3/compressor_0}/0 (100%) rename {fixture/3/0 => tests/v2/fixture/test_format_compatibility/array_3/compressor_0}/1 (100%) rename {fixture/3/1 => tests/v2/fixture/test_format_compatibility/array_3/compressor_1}/.zarray (100%) rename {fixture/3/1 => tests/v2/fixture/test_format_compatibility/array_3/compressor_1}/0 (100%) rename {fixture/3/1 => tests/v2/fixture/test_format_compatibility/array_3/compressor_1}/1 (100%) rename {fixture/3/2 => tests/v2/fixture/test_format_compatibility/array_3/compressor_2}/.zarray (100%) rename {fixture/3/2 => tests/v2/fixture/test_format_compatibility/array_3/compressor_2}/0 (100%) rename {fixture/3/2 => tests/v2/fixture/test_format_compatibility/array_3/compressor_2}/1 (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_4/.zarray rename {fixture/3/4 => tests/v2/fixture/test_format_compatibility/array_3/compressor_4}/0 (100%) rename {fixture/3/4 => tests/v2/fixture/test_format_compatibility/array_3/compressor_4}/1 (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_3/compressor_6/1 rename {fixture/5 => tests/v2/fixture/test_format_compatibility/array_4}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_4/compressor_6/1 rename {fixture/6 => tests/v2/fixture/test_format_compatibility/array_5}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_5/compressor_6/1 rename {fixture/7 => tests/v2/fixture/test_format_compatibility/array_6}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_6/compressor_6/1 rename {fixture/8 => tests/v2/fixture/test_format_compatibility/array_7}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_7/compressor_6/1 rename {fixture/9 => tests/v2/fixture/test_format_compatibility/array_8}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_8/compressor_6/1 rename {fixture/utf8attrs => tests/v2/fixture/test_format_compatibility/array_9}/.zgroup (100%) create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_0/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_0/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_0/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_1/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_1/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_1/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_2/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_2/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_2/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_3/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_3/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_3/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_4/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_4/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_4/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_5/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_5/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_5/1 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_6/.zarray create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_6/0 create mode 100644 tests/v2/fixture/test_format_compatibility/array_9/compressor_6/1 diff --git a/fixture/.zattrs b/fixture/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/.zattrs b/fixture/0/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/0/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/0/.zarray b/fixture/0/0/.zarray deleted file mode 100644 index a5ceafaf51..0000000000 --- a/fixture/0/0/.zarray +++ /dev/null @@ -1,14 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": null, - "dtype": "|i1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 1111 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/0/0/.zattrs b/fixture/0/0/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/0/0/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/0/0 b/fixture/0/0/0 deleted file mode 100644 index e2ff04130692d9bf4db627c66261452b72332a7b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 zcmZQzWMXDvWn<^yMC+6cQE@6%&_`l#-T_m6KOcR8m$^Ra4i{)Y8_`)zddH zG%_|ZH8Z!cw6eCbwX=6{baHlab#wRd^z!!c_45x13FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g4Gs?w5fT#= z6&4p585$cL9UdPbAtECrB_<~*DJm;0EiNxGF)}kWH8wXmIXXK$Jw87`K|(`BMMg(R GNlHupW+C?g diff --git a/fixture/0/0/11 b/fixture/0/0/11 deleted file mode 100644 index 3cea8e9b57a3af2925d37b54b61990abf45c7a92..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 VcmebA_45x13u$;>FVq3?e6dJ@$&QZ z_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g4Gs?w5fT#=6&4p585$cL9UdPbAtECrB_<~* GDJm-$fHijj diff --git a/fixture/0/0/3 b/fixture/0/0/3 deleted file mode 100644 index 8c0210c510..0000000000 --- a/fixture/0/0/3 +++ /dev/null @@ -1 +0,0 @@ -,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ \ No newline at end of file diff --git a/fixture/0/0/4 b/fixture/0/0/4 deleted file mode 100644 index 660224ac06..0000000000 --- a/fixture/0/0/4 +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/fixture/0/0/5 b/fixture/0/0/5 deleted file mode 100644 index 9881b13babf57274afe5b17b5a5ece68517d8e23..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 zcmezJ_1pI!KY#uH^Y`C>21X`k7FITP4o)s^9$r3v0YM>Q5m7O52}vnw8Cf}b1w|!g z6;(BL4NWa=9bG+r14AQY6H_yD3rj0&8(TYj2S+Do7gslT4^J;|A74NJfWV;OkkGJj E01W^drvLx| diff --git a/fixture/0/0/6 b/fixture/0/0/6 deleted file mode 100644 index 1e8a425fa5..0000000000 --- a/fixture/0/0/6 +++ /dev/null @@ -1 +0,0 @@ -XYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ \ No newline at end of file diff --git a/fixture/0/0/7 b/fixture/0/0/7 deleted file mode 100644 index c0723b01b962e9ba0b7fbbafaa4cef5cc867bf83..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 zcmV-q0Gt23y}rM|!NSAD#m2|T$;!*j&Cbuz(bCh@)z;V8+1lIO-QM5e;o{@u<>u$; z>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g4Gs?w5fT#=6&4p585$cL G9UdR}$UMgY diff --git a/fixture/0/0/8 b/fixture/0/0/8 deleted file mode 100644 index a38dc0f2ed..0000000000 --- a/fixture/0/0/8 +++ /dev/null @@ -1 +0,0 @@ - !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ \ No newline at end of file diff --git a/fixture/0/0/9 b/fixture/0/0/9 deleted file mode 100644 index dd735849d9..0000000000 --- a/fixture/0/0/9 +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/fixture/0/1/.zarray b/fixture/0/1/.zarray deleted file mode 100644 index 5bb56828ec..0000000000 --- a/fixture/0/1/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "|i1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 1111 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/0/1/.zattrs b/fixture/0/1/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/0/1/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/1/0 b/fixture/0/1/0 deleted file mode 100644 index 15265bc5897f979bcda4d1643905cfc357f533e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 108 zcmV-y0F(cC0b^ifVrF4wW9Q)H;^yJy;};MV5*85^6PJ*bl9rK`lUGnwQdUt_Q`gYc z($>+{(>E|QGBzQN(KOnd=pnG@iOWF diff --git a/fixture/0/1/1 b/fixture/0/1/1 deleted file mode 100644 index ce1eab1e87f4fc5705dd47b960cdcf7463a9d989..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 111 zcmV-#0FeK90RdzHoBw2GW@l(=YHMt5Zf|gLa&vTbc6WGrdV73*et&?0f`f#GhKGoW zii?bmj*pO$l9QB`mY0~Bnwy-Ro}ZwhqNAjxrl+W>s;jK6uCK7Mva__cwzs&sy1Tr+ RzQ4f1!o$SH#>YUMI%hTHK3o6* diff --git a/fixture/0/1/10 b/fixture/0/1/10 deleted file mode 100644 index c078b2d56215e6792e59359f0674d0ec1416d3ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 111 zcmV-#0FeK90RdzHoB!zP>g(+7?(gvN^7Hid_V@Vt`uqI-{{H|00s{mE1_uZU3JVMk z4i69!5)%{^78e*98XFuP9v>hfA|oUvCMPHg(+7 z?(gvN^7Hid_V@Vt`uqI-{{H|00s{mE1_uZU3JVMk4i69!5)%{^78e*98XFuP9v>hf RA|oUvCMPHE|QGBzQN=`{lOV7y6%FfBn%P%M_DlRE4E3c@ms;;T6t8Zv*YHn$5Ywzgn O>h9_7>jwbup(KZL@Hbch diff --git a/fixture/0/1/4 b/fixture/0/1/4 deleted file mode 100644 index 73036acec71c80b6110db78729f1b69606de23b6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 111 zcmV-#0FeK90RdzHoBxoJl9QB`mY0~Bnwy-Ro}ZwhqNAjxrl+W>s;jK6uCK7Mva__c zwzs&sy1Tr+zQ4f1!o$SH#>dFX%FE2n&d<=%($mz{*4NnC+S}aS-rwNi;^XAy=I7|? R>g(+7?(gvN^7Ev@OPApqP*wl{ diff --git a/fixture/0/1/5 b/fixture/0/1/5 deleted file mode 100644 index d529b5becf0c3194e8805bb28ea0740a88fe4ff9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 109 zcmV-z0FwWB0sHdx+xH(ofBpXR_uqd8MkZz!RyKAHPA+a9UOs*SK_OugQ894|NhxU= zSvh$HMI~hwRW)@DO)YI5T|IpRLnC7oQ!{f5ODk&|TRVFPM<-_&S2uSLPcLsDUqAnV Pz@Xre(6Dd-4FDRa%{3+b diff --git a/fixture/0/1/6 b/fixture/0/1/6 deleted file mode 100644 index 0a70e9d8532be0a42866dab10b32462e00bdfe29..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 111 zcmV-#0FeK90RdzHoBvo@T3cLQUSD8gVq;`wW@l(=YHMt5Zf|gLa&vTbc6WGrdV73* zet&?0f`f#GhKGoWii?bmj*pO$l9QB`mY0~Bnwy-Ro}ZwhqNAjxrl+W>s;jK6uCK7M Rva__cwzs&sy1P@=HMcjKIzs>e diff --git a/fixture/0/1/7 b/fixture/0/1/7 deleted file mode 100644 index d77fbab72df6aba18870c367a850f716d89bc43e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 111 zcmV-#0FeK90RdzHoBzDMzQ4f1!o$SH#>dFX%FE2n&d<=%($mz{*4NnC+S}aS-rwNi z;^XAy=I7|?>g(+7?(gvN^7Hid_V@Vt`uqI-{{H|00s{mE1_uZU3JVMk4i69!5)%{^ R78e*98XFuP9v}F~JjeT3LIMB) diff --git a/fixture/0/1/8 b/fixture/0/1/8 deleted file mode 100644 index eba245b537b976b803614bac97fe09cad4d4b9ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 108 zcmV-y0F(cC0aH*^QdUt_Q`gYc($>+{(>E|QGBzQN=`{lOV7y6%FfBn%P%M_DlRE4E3c@ms;;T6 Ot8Zv*Y6buT=^xic>@(T` diff --git a/fixture/0/1/9 b/fixture/0/1/9 deleted file mode 100644 index 384417a2475b120996124dc564ce731d97718f7b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 111 zcmV-#0FeK90RdzHoBxD`hKGoWii?bmj*pO$l9QB`mY0~Bnwy-Ro}ZwhqNAjxrl+W> zs;jK6uCK7Mva__cwzs&sy1Tr+zQ4f1!o$SH#>dFX%FE2n&d<=%($mz{*4NnC+S}aS R-rwNi;^XAy=I6KiM(5(tOg#Vq diff --git a/fixture/0/2/.zarray b/fixture/0/2/.zarray deleted file mode 100644 index 29781e09b4..0000000000 --- a/fixture/0/2/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "|i1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 1111 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/0/2/.zattrs b/fixture/0/2/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/0/2/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/2/0 b/fixture/0/2/0 deleted file mode 100644 index abd664663b64b0a10dcd1375ed7fa651f0ef1d8b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZ>Y$}lu^j8qGb{FG|f#K6E%&rts#0e&zjFeDtgaB|r!69%Ic#sw!AFkJR!U|?2r zNIY=a*F;K6N>Yk(0$W?dWj6-~9yJA%#09?5<~;69J0&l?yC?AV^ACYU=T0$`BCiRN zlTy8Bnw9)wySUVE;qj|ZTdT!#qj#2l{UZ^0WSYsR)U)&5=0<(Xy}Q3%F6LM1+xv&Q Zen!^E{j2@^x6Sp=L_ZM~o0Y$}lu^j8qGbH28AXjDdlH@elidB+#J1kZ|O}$z`)l7>rUF7o1$caM_oEfmzKV z@xWzY6DcVvNh!t&Y;6sf-5eNr)D%n-7x+e-^SCqZl)Uinp1{}7KLiq;JHj;z09P+kJOBUy diff --git a/fixture/0/2/10 b/fixture/0/2/10 deleted file mode 100644 index cab8dd95620f82ad7834f71c63d9dfd775287290..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZ>Y$}lu^j8qGbw0$c)fq_A-hVefP{9vg6|9=5P!jTInm(4O^FiK%uaB=~|WnTsc zW;KVz1DAbGq@<)Ir5Go$wKZIJb70_6Q!q(f;2Uku5J+_H6f-IE znh-fD)qAE{$uG8xOZ^rezv{HLS}ZquXW7?35`jminS4q;JKt??)VJKb``hJWewDtx cf2iwcWPRMf+P{C>T<=Wu6H(!>c6q`8059@VVgLXD diff --git a/fixture/0/2/11 b/fixture/0/2/11 deleted file mode 100644 index 68767304c94d0b969ec2bf8df86ed63499fb9081..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 52 zcmZ>Y$}lu^j8qGb6#vg!#=yYj#^Auf!2X{>fkBBuD#PJ|3AbkF>a#NZ6}u;C9%^aW I|3iBN0BGnCpa1{> diff --git a/fixture/0/2/2 b/fixture/0/2/2 deleted file mode 100644 index 974b1371aa6977db1f25ffbfc9e412bb1fb4368a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZ>Y$}lu^j8qGbT;HerkAXp~fc-xR{9vdD)BhJRBpkVLa@i~s2BQ?l1t%9UT=r#P zU{-TTJaF09L`q6ZQi^c`TU*0rHwOkDH3gHz1-{YdJnl?8B`>_YC-C+24}nDIPBD`r zuL+TpQoU!ImHcA6xYTdq@vBZ-tHpAocb0wqBN2FHn#rfsv-92NMt#e@yT4s7=2z+4 c`-i%IM%KsutNr`8&GpViKM@t}$KCY}0D?4A%>V!Z diff --git a/fixture/0/2/3 b/fixture/0/2/3 deleted file mode 100644 index c35d1c72df7071f3fd800c414845df50c9018225..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZ>Y$}lu^j8qGbJo)w4Uj_yS`2!5>|B=7~hJ+&*PA;2e!eErbxZvahhRePT49sc{ zi3cwGnn+1WNlGzJU~6l*?B>A0qo!bzxWG5soX4GMr{slq_XNIv{vnX)+$m;KY$}lu^j8qGbjDF!Tn*j*y|097P3-)f^HJ zT=q4Ql9G~?Vw}L%)^OR)fq_R&!6b2kZ?rj&JJU|d3-9g;eEs}GAkn!~%%sR`Lgb`W z@0n&Lzt}D=^;>xSs?*kLvE1mLWnceD1Rj}Y@+tM~e7Ct#-*WHnZb+p{}2i X^>P1d|Nd=ry))5IL`CeEfZ727D%(<{ diff --git a/fixture/0/2/5 b/fixture/0/2/5 deleted file mode 100644 index 19c8db1db53ae03bde47b8ced5d75ce3f75fdf67..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZ>Y$}lu^j8qGbY$}lu^j8qGbbSzu*gn@y9`3FP&e+2lUz>sj{!pUW`Oc;z(7#Eyez;M|Ys7TEr z@xWzY6DcVvNh!t&Y;6sf-5eNr)D%n-7x+e-^SCqZl)Uinp1{}7KLiq;JHY$}lu^j8qGbyimGEn}I|*EY$}lu^j8qGb^b+5iz`(#Df1v(90{mc5U`RM};pDPeCJaU?j0;XKV7Tndz`(5L zka*y-uZfhDl%y2n1h%$@%We(~JZcIii3@zA&3W9Jc1m7&cTeE!=N|%z&YfZ=MP3sk zC#8DNG%NYVc5$iS!sAz+wpNSfM(-^9`bQ%0$TX8rsb}ZA&5in&dv|}kT+FZ1xAzZq Z{fw-S`&aw-Z=36#iGCs~LalEj7y#2IQSkr( diff --git a/fixture/0/2/9 b/fixture/0/2/9 deleted file mode 100644 index 2f6acf8879145f9e84cc90aab0f2c266102ce71e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZ>Y$}lu^j8qGbT$!pC!2ksH?EjHKg91askqal6%`#yyN?}}Zask6-Uj_zdHHX9l zmwip7q@*OJ7$>l`HC%RcVBk?xFiBkC8*R?x&a_kV!n=C{UqAm4NObNLGb!?#5IHH; zd!|{*FSd(I{T3d->a?|5EH`>*+1Ec3fk&p9d`dk#-)(Nxx7@q?+vQ?@mA<`ysOx8B XecZp=zkl0Y?@aU)QPGO@oW=kEr>apr diff --git a/fixture/0/3/.zarray b/fixture/0/3/.zarray deleted file mode 100644 index 72f8a611b7..0000000000 --- a/fixture/0/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "|i1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 1111 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/0/3/.zattrs b/fixture/0/3/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/0/3/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/3/0 b/fixture/0/3/0 deleted file mode 100644 index fba4f8657402557f569bbd87e3418142ad3e51c3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|yx;$i&RT%Er#Y$;HjX%f~MuC?qT*Dkd%=DJ3ltEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9;>gMj@>E-R?>*pU37!({58WtWA P85JE98yBCDn3N0vQ+pLD diff --git a/fixture/0/3/1 b/fixture/0/3/1 deleted file mode 100644 index c9e50670fd7e0207fcc0fbcdb19af29d2800129e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_;xH diff --git a/fixture/0/3/10 b/fixture/0/3/10 deleted file mode 100644 index 778ada21c8ec71adf8c3845f444839da77381b28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|!Y*<*V0k-oAVP;p3;zU%r0({^RGb-+%u8`_I70#LU9V#?HaX z#m&RZ$1fl#BrGB-CN3cC$FHWq^zQrLCi@r*B|rWNcz;W^Q3=Wo=_? SXYb(X*pU37!({58V2Oi1^@sOsRd2| diff --git a/fixture/0/3/2 b/fixture/0/3/2 deleted file mode 100644 index ced542b0e69d8db680d26a8b5fdd45559b26b210..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g4Gs?w5fT#=6&4p5 W85$cL9UdPbAtECrB_<~*DJm=9a5qT+ diff --git a/fixture/0/3/3 b/fixture/0/3/3 deleted file mode 100644 index 0b88a8dbf26dc16137aba3f93954c98de4e82082..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|xmBtEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9;>gMj@>E-R?>*pU3 z7!({58WtWA85JE98yBCDn3SB7nwFlCnU$TBo0nfuSX5k6T2@|BSyf$ATUX!E*woz8 P+ScCD+11_C+t&{Ox1c6j diff --git a/fixture/0/3/4 b/fixture/0/3/4 deleted file mode 100644 index f040a33d2beebf4fe763e8df3e103374eacefd51..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u$;>FVq3?e6dJ@$&O*z)fiY diff --git a/fixture/0/3/5 b/fixture/0/3/5 deleted file mode 100644 index 23f36c034d07d3d3ac087eeece587d80aa33a8cb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|!Y;>$mSee*XIX=kLG&42(?7EUawo9GqO-JiL7T0)j%qBBEmA z5|UEVGO}{=3W`d~DynMg8k$<#I=XuL28Kq)CZ=ZQ7M51lHnw*54vtRFF0O9w9-dy_ QKE8hb0f9lmA)#U60Mzgtc>n+a diff --git a/fixture/0/3/6 b/fixture/0/3/6 deleted file mode 100644 index 100978fe7d1239b59f43406f479b3e4335fa28bc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|sbz85JE98yBCDn3SB7nwFlCnU$TBo0nfuSX5k6T2@|BSyf$A zTUX!E*woz8+ScCD+11_C+t)u~;-tw_rcRqaW9F>cbLP&QzhL2_#Y>hhTfSoDs?}@O Uu3Nuhu$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g W4Gs?w5fT#=6&4p585$cL9UdRR$3B?= diff --git a/fixture/0/3/8 b/fixture/0/3/8 deleted file mode 100644 index 2e8c78de94d9ffc81f02b4c5792f31c8ef4c74b6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|xm8sHCi-s-~`?sim!>tEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9; z>gMj@>E-R?>*pU37!({58WtWA85JE98yBCDn3SB7nwFlCnU$TBo0nfuSX5k6T2@|B PSyf$ATUX!E*whRF%Gn{Q diff --git a/fixture/0/3/9 b/fixture/0/3/9 deleted file mode 100644 index 47ae524b63607017674e2c86050fefb1d41f95cd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u#q`AE3{ diff --git a/fixture/0/4/.zarray b/fixture/0/4/.zarray deleted file mode 100644 index 44f2b6b28d..0000000000 --- a/fixture/0/4/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 1 - }, - "dtype": "|i1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 1111 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/0/4/.zattrs b/fixture/0/4/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/0/4/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/4/0 b/fixture/0/4/0 deleted file mode 100644 index 2f2e353cdba09bc681285e6198b9ca9e4c4072c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oXnWQz`y{)B|yx;$i&RT%Er#Y$;HjX%f~MuC?qT*Dkd%=DJ3ltEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9;>gMj@>E-R?>*pU37!({58WtWA P85JE98yBCDn3N0vQ|%Qh diff --git a/fixture/0/4/1 b/fixture/0/4/1 deleted file mode 100644 index 66aa292396543d63a73d0f356d32d70d584fb1ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_C$FHWq^zQrLCi@r*B|rWNcz;W^Q3=Wo=_? SXYb(X*pU37!({58V2Oi1^@sP8wF1Q diff --git a/fixture/0/4/2 b/fixture/0/4/2 deleted file mode 100644 index 6a4f50a5c65a58d890c51774cc48cd8265f47784..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g4Gs?w5fT#=6&4p5 W85$cL9UdPbAtECrB_<~*DJm=9;x|eF diff --git a/fixture/0/4/3 b/fixture/0/4/3 deleted file mode 100644 index e8808072297ecbff34eb32ce1460b2d6ce2d461c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oXnWQz`y{)B|xmBtEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9;>gMj@>E-R?>*pU3 z7!({58WtWA85JE98yBCDn3SB7nwFlCnU$TBo0nfuSX5k6T2@|BSyf$ATUX!E*woz8 P+ScCD+11_C+t&{OxDqB> diff --git a/fixture/0/4/4 b/fixture/0/4/4 deleted file mode 100644 index 361412ed897db01fd5476b7050e7168fe8a7e662..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u$;>FVq3?e6dJ@$&O+GEHg# diff --git a/fixture/0/4/5 b/fixture/0/4/5 deleted file mode 100644 index ff99c56a587dece8d10eae537221b06fcf10945f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oXnWQz`y{)B|!Y;>$mSee*XIX=kLG&42(?7EUawo9GqO-JiL7T0)j%qBBEmA z5|UEVGO}{=3W`d~DynMg8k$<#I=XuL28Kq)CZ=ZQ7M51lHnw*54vtRFF0O9w9-dy_ QKE8hb0f9lmA)#U60M%j~dH?_b diff --git a/fixture/0/4/6 b/fixture/0/4/6 deleted file mode 100644 index 8d942d9f5901b15db2696bf6c5202dd2c60f4a45..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oXnWQz`y{)B|sbz85JE98yBCDn3SB7nwFlCnU$TBo0nfuSX5k6T2@|BSyf$A zTUX!E*woz8+ScCD+11_C+t)u~;-tw_rcRqaW9F>cbLP&QzhL2_#Y>hhTfSoDs?}@O Uu3Nuhu$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g W4Gs?w5fT#=6&4p585$cL9UdRSIX;>I diff --git a/fixture/0/4/8 b/fixture/0/4/8 deleted file mode 100644 index 949d6169a9563d7e59ae8f2f2bd1dd87ea549cd2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oXnWQz`y{)B|xm8sHCi-s-~`?sim!>tEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9; z>gMj@>E-R?>*pU37!({58WtWA85JE98yBCDn3SB7nwFlCnU$TBo0nfuSX5k6T2@|B PSyf$ATUX!E*whRF%S$1u diff --git a/fixture/0/4/9 b/fixture/0/4/9 deleted file mode 100644 index e8e7bfbc6fcbcf3a0c30a2790d4a05293a599f19..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u#rYe>2P diff --git a/fixture/0/5/.zarray b/fixture/0/5/.zarray deleted file mode 100644 index beafdea229..0000000000 --- a/fixture/0/5/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 2 - }, - "dtype": "|i1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 1111 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/0/5/.zattrs b/fixture/0/5/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/0/5/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/5/0 b/fixture/0/5/0 deleted file mode 100644 index be5fbdb799d242a38ed7a9825194a6ebd28ae66e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oW_{Kz`y{)B|yx;$i&RT%Er#Y$;HjX%f~MuC?qT*Dkd%=DJ3ltEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9;>gMj@>E-R?>*pU37!({58WtWA P85JE98yBCDn3N0vRYMgm diff --git a/fixture/0/5/1 b/fixture/0/5/1 deleted file mode 100644 index a533973891a31a57542b09d3d3f7dcf9c6fee3db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_~?o diff --git a/fixture/0/5/10 b/fixture/0/5/10 deleted file mode 100644 index 8da499ccdc7625901c5c81790d6a048932a6a206..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oW_{Kz`y{)B|!Y*<*V0k-oAVP;p3;zU%r0({^RGb-+%u8`_I70#LU9V#?HaX z#m&RZ$1fl#BrGB-CN3cC$FHWq^zQrLCi@r*B|rWNcz;W^Q3=Wo=_? SXYb(X*pU37!({58V2Oi1^@sQaRpKU diff --git a/fixture/0/5/2 b/fixture/0/5/2 deleted file mode 100644 index b23ccb7a4134ac3bceb3b8252bd0e9b30c841a7e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g4Gs?w5fT#=6&4p5 W85$cL9UdPbAtECrB_<~*DJm=BI5$lI diff --git a/fixture/0/5/3 b/fixture/0/5/3 deleted file mode 100644 index 1a91655f659a4ba70d9ba2f8f4e804a2cd3ebbc0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oW_{Kz`y{)B|xmBtEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9;>gMj@>E-R?>*pU3 z7!({58WtWA85JE98yBCDn3SB7nwFlCnU$TBo0nfuSX5k6T2@|BSyf$ATUX!E*woz8 P+ScCD+11_C+t&{Oxo9R` diff --git a/fixture/0/5/4 b/fixture/0/5/4 deleted file mode 100644 index 12218977e5891e93a1008a218012ce1bc79e9480..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u$;>FVq3?e6dJ@$&O-h)rz( diff --git a/fixture/0/5/5 b/fixture/0/5/5 deleted file mode 100644 index e99841ecc9dd16cd72c6521edbb723a344fb860d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oW_{Kz`y{)B|!Y;>$mSee*XIX=kLG&42(?7EUawo9GqO-JiL7T0)j%qBBEmA z5|UEVGO}{=3W`d~DynMg8k$<#I=XuL28Kq)CZ=ZQ7M51lHnw*54vtRFF0O9w9-dy_ QKE8hb0f9lmA)#U60M@u1eEcbLP&QzhL2_#Y>hhTfSoDs?}@O Uu3Nuhu$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g W4Gs?w5fT#=6&4p585$cL9UdRTk3O9M diff --git a/fixture/0/5/8 b/fixture/0/5/8 deleted file mode 100644 index 5c709708a37462449464c71932e04c3ab8ac815b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oW_{Kz`y{)B|xm8sHCi-s-~`?sim!>tEX>ZXk=_+YG!U>X=QC=YiIA^=;Z9; z>gMj@>E-R?>*pU37!({58WtWA85JE98yBCDn3SB7nwFlCnU$TBo0nfuSX5k6T2@|B PSyf$ATUX!E*whRF%%LHz diff --git a/fixture/0/5/9 b/fixture/0/5/9 deleted file mode 100644 index 45ff8d600aee8026fc8f76d79585330cc694ad9f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u#s!AQLT diff --git a/fixture/0/6/.zarray b/fixture/0/6/.zarray deleted file mode 100644 index 5cdc9c43ce..0000000000 --- a/fixture/0/6/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "lz4", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "|i1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 1111 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/0/6/.zattrs b/fixture/0/6/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/0/6/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/0/6/0 b/fixture/0/6/0 deleted file mode 100644 index 3a1472f2692336a25e4cb1bf118090984e01ef3f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+G(^WMXDvWn<^yMC+6cQE@6%&_`l#-T_m6KOcR8m$^ zRa4i{)Y8_`)zddHG%_|ZH8Z!cw6eCbwX=6{baHlab#wRd^z!!c_45x13~S4{byigVrF4wW9Q)H z;^yJy;};MV5*85^6PJ*bl9rK`lUGnwQdUt_Q`gYc($>+{(>E|QGBzu$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g4Gs?w5fT#=6&4p5 W85$cL9UdPbAtECrB_<~*DJmu$;>FVq3?e6dJ@$&OR08I%1 diff --git a/fixture/0/6/5 b/fixture/0/6/5 deleted file mode 100644 index d83297db06f71f5272139bc5edae2d0f854f6461..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+MHa_1pI!KY#uH^Y`C>21X`k7FITP4o)s^9$r3v0YM>Q5m7O5 z2}vnw8Cf}b1w|!g6;(BL4NWa=9bG+r14AQY6H_yD3rj0&8(TYj2S+Do7gslT4^J;| QA74NJfWV;OkkGJj0IDz?82|tP diff --git a/fixture/0/6/6 b/fixture/0/6/6 deleted file mode 100644 index 3658bddb506e7321db7d6b6f137076d48797ea46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+IIRUzF>}`JIdkXDU$Ah|;w4L$Enl&6)#^2C U*R9{Mant54TeofBv2)jM0PzJkDF6Tf diff --git a/fixture/0/6/7 b/fixture/0/6/7 deleted file mode 100644 index deb2e30b6468a5f52c9a9e96d79d872c43d7407b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_u$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA1qKHQ2?`4g W4Gs?w5fT#=6&4p585$cL9UdQ+2R=Cf diff --git a/fixture/0/6/8 b/fixture/0/6/8 deleted file mode 100644 index aad890f334a48dc3a3a55186b5d929687d076b80..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+GJkR8m$^Ra4i{)Y8_`)zddHG%_|ZH8Z!cw6eCbwX=6{baHla zb#wRd^z!!c_45x13u#AIY?Om diff --git a/fixture/1/.zattrs b/fixture/1/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/1/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/1/0/.zarray b/fixture/1/0/.zarray deleted file mode 100644 index 30d16d3590..0000000000 --- a/fixture/1/0/.zarray +++ /dev/null @@ -1,14 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": null, - "dtype": "^pGi$g!dmr%KM8 oyHHkf=}OhL8#TA?+ zV$_&%6DCcW7Byp5%$&G{q?ELI3l=R|ma!r$XVsc@8#Zm(mRC@;W7nR22M!%MR#JB2 oRK=Nd7cO17cH`Eadk-E}J*jzC*YM)io2Hg`A3lBg_T$%|e}OnUX8-^I diff --git a/fixture/1/0/10 b/fixture/1/0/10 deleted file mode 100644 index cee8dfab6c3a255dbca45ca00b9b0e1fb0e348a4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV~$g(4UL00ltvPIoii-EBIi{{J83G}G?Ra-NG^<|^0umz&(?F86uJW1jMym;BFb z-twN0eC8|PDez07ph&S2rOK46P^n6_8nx=wYtX1kvlgw|{MN2Rr!L)k^y<@Zz@Q<+ tMvNLWZo;G~(`L+?GjGA7CCgT?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ \ No newline at end of file diff --git a/fixture/1/0/4 b/fixture/1/0/4 deleted file mode 100644 index 75c13b9686..0000000000 --- a/fixture/1/0/4 +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/fixture/1/0/5 b/fixture/1/0/5 deleted file mode 100644 index e7e7c3894fba375bb98d5312c97ddeb0b30900cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV~$hav(16aYZ)Y|2PBr4X|B-g~d`|NmfQguB;xi}(15a(u>De8*2z;y3=HN}yVe zT6O9*Xw;-xLQ;!XZQ7-DNbA(4TaR9S`VAPAku@YIZ`g=Y1!KldD4H~7+KgFq<}FyX sWZ8;UYu0Vpv}N0lU3>N&ICSLLiBo6JUAT1R+KpRx?mc)^^5ofze_v1=V*mgE diff --git a/fixture/1/0/6 b/fixture/1/0/6 deleted file mode 100644 index 6efa2226c7..0000000000 --- a/fixture/1/0/6 +++ /dev/null @@ -1 +0,0 @@ -XYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ \ No newline at end of file diff --git a/fixture/1/0/7 b/fixture/1/0/7 deleted file mode 100644 index 40b6edb0e84464b838184cd5cd413c4d234d54dc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV~$gF+Yp00qEyFWXr9|9{A~YZ=R0uI27}^y<@Zz@Q<+MvNLWZo;G~(`L+?GjGA7 zCCgT2Fb v3i2zG6s9P}DM@L{Ql5%brYhB`Np0#?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ \ No newline at end of file diff --git a/fixture/1/0/9 b/fixture/1/0/9 deleted file mode 100644 index c5223e6cb7..0000000000 --- a/fixture/1/0/9 +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/fixture/1/1/.zarray b/fixture/1/1/.zarray deleted file mode 100644 index a97385dba8..0000000000 --- a/fixture/1/1/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "&Y0I`9y9)N~J8DD8tSD%ou zegg(Y3>h|J)R=J-CQX?ZHDgxHoVbLfl(cyZ7A;wpu_7yH)tYr1Hf`CKS5UNL*PeX` z4jnmGQg-50#hG&#E?v2HeFw)pdrIXj2bg;!lWtFX3Uy1Z^5D^%T}yf^T)ako3?D*v1`x11AiSla_q#ZABd_W EZ&u(^_5c6? diff --git a/fixture/1/1/11 b/fixture/1/1/11 deleted file mode 100644 index 3e39a87e0ae7534d40b45b4f5ea5bfe428ff03f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 33 pcmb^JCugN6(namc8{#vCzj!coT@cfv`hoHl97v@>SRI_sSC=3H>m MCG!^i2R4v3qq@CHTmS$7 diff --git a/fixture/1/1/3 b/fixture/1/1/3 deleted file mode 100644 index 699525c8755322f6b937082f2777e414f2411ab6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 146 zcmV;D0B!$x0R_Q_HUazwE9}yAA=Jx^(Li)T>WO zSib>-B8ChbF>1`X36rKwi<&VjW=>o}QcBvq1&fv}%UF?>vue${4V$)X%PT0_v1`x1 z1BZ?rD=9m1s^ZMK3zx23yK(Ezy$6q~p42?6Yk2YMO;gLe51+n#`|<0~KL&6l>mCqC A2mk;8 diff --git a/fixture/1/1/4 b/fixture/1/1/4 deleted file mode 100644 index 11557a82e9deb699e16b67db5cd04718583febb5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 147 zcmV;E0Brww0R_S5IsyOy0Ko6w{|_>vkUb(TD`i)sY{aNB<0ed+GA(Mxte81*2}vnw^A;>xvMggoR?ez5>o#oKvMsNmXveNS z`wkpBa;&86#Hori=Pq2ja_z>gJNF(us(MoMtghk3t2a$8?>>C`^6kg3KmTx{OZ%4O BM%DlT diff --git a/fixture/1/1/5 b/fixture/1/1/5 deleted file mode 100644 index 13380180ec0bc08c32539fe2506c093f80c0afb5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 149 zcmV;G0BZku0R_Q_A_4#u06_0-%1Ab)5VH5)d#~{S|6pW(r%Nk6wNH4H%S>H6$l**oaXDW5!J=nlxqF zj9GK$Em*W<*@{(b)@|6dW!sKjd-feTbmZ8HQ)kXyxOC;(jazr_J$O{|=9{MDZ8>2*)u7W?dROJY1bj3QeYw_wqdWf?27a#pQbw_($kZFvPnJ9h2a zci_;GVLKpx51;BPM+gSSlf5^6L8OvI(eFw)pdrIXj2bg;!lWtF zX3Uy1Z^5D^%T}yf^T)ako3?D*v1`x11AiSla_q#ZGv_W`x^nHG8@KM~S)l2X#JBTV6rYj$M29 z9XNF4SV`H5Qx#{mL BM_m8_ diff --git a/fixture/1/1/9 b/fixture/1/1/9 deleted file mode 100644 index ccfa2d9fd45792c84a34e89d4a19cdcbdcf6b52d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 147 zcmV;E0Brww0R_R=A_4#a0KkX)b!0>#dqi4R%C2lh_Dl+8`we$(+I0x%)TLXGpk94K z!ukyu6ftDjh*4w4O_(%gTGWhLF>~S)l2X#JBTV6rYj$M29 z9XNF4SV`H5Qx#{Y$}lu^j8qGb%>Tdk7Xt&se}?-12=IeJfgz>9os9(cI#3M=B7?2r;sS;YqnnG%`5a2PH_T)E`uPWwV=GU@#AIGMmx(8) z>dc&+?!VA&qU+_AVOy)kawYSgUe1p{*e+(gNq)K!$McKJ!}iWD&wqG)y4~FBa!*sy f51ZapetrM^{{4L|3JlDDVo!=ZbaLGPZ~F@XogY?| diff --git a/fixture/1/2/1 b/fixture/1/2/1 deleted file mode 100644 index 4c5b29f8f003d1df3a7c2b568aef73457ae2adca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 181 zcmZ>Y$}lu^j8qGb3{^1oV_-0H{NupD{vQc6C@^FcE}ZOZl);eDU?OS4z%z@1fz78O zBc#_Pv%^Fxa{&W`B;x{;3nq+<9e8S=TcD_?oY4*+SW&b!oK3Q%zx4Qh} a^XpsdYX59{-N4AeSTA1WY7sC2Y$}lu^j8qGboO;+rj)5VfhVefP{9vg6|9=5PsSK~lWyggJ7fxIt$&iu3Ym&mi za43n{NtHp0fw6;uA>-u21q=)fjBSgKE-;NQ&wqG)y4~FB@{iB2@1I|QTX)y^0>Q~ hd|hjFcFWZQ_48XWa4Y$}lu^j8qGb40YV^%)r2YOuz+5{%2rdRA5kIO3Bh N=4g0mEx5n10RYSk5&{4K diff --git a/fixture/1/2/2 b/fixture/1/2/2 deleted file mode 100644 index f524d52ce778264c9b0a2fc0e15d5584d4bacfcc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 174 zcmZ>Y$}lu^j8qGboX#T?$-v-I!2TZuelXO7>HiBDQVJNrfY*pk*k^_S8-v5zzy+5b zGE8_HGJq0n4M&bJE?|FlW8;(4v+a7Vwl;1M*!J%J;f^FObH$$%{GGZDF89B_y}#dX z?;l5z$$_EM6X)-5U|kShp8xQOA*=L+KPS`|y3WWv{`KwsY$}lu^j8qGbln8x$mw~}3{(u4l1N(m@uz(?>aN%TMqYQ?G1`|mW2A)|A3~W9P z86mwUnH?rlnF|;gBpDZ&Trgo=?7+*@!*F$Tfm4C>L|HE5jrVM8{;@~|JaX-ER2569 z6iW4;X;!%TMc|UcFBevnR*U6E@2I_f{X^oBHj|>%v-6#zO0#cnFZ;*&@yT+#xz*(# bpI_fvSNmtv>jp*!#(ME0SBroNAZG#q9#~MX diff --git a/fixture/1/2/4 b/fixture/1/2/4 deleted file mode 100644 index 7455e536d9b0279ab3fe3cad714a8080e904fcb4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 181 zcmZ>Y$}lu^j8qGb^e(QQ%)nseV84K&{yzfzU{GMlC|o$%*C>M_p}|Dbgn?%k0|T2+ zLqY$}lu^j8qGb{LrWs%)k&=$M7Ek8rc6YU`Q#PxIl`5;j-H-21y3CUdDwJCm3*C zSR1(DvO|UmPeTR+0|Q&bkt2)?nA03xRj!1l-uV4Y$}lu^j8qGb)HwcSA_Iew^A73Qke@F7$g}Nm|QSnTaa0vc zsT4}}o@rLN`9zAlva!7M(?P-ef>k?kv5Z})U)%QqDr%GZZG@C`SHneySdfn cAD>^}T37pL)9VIC2F7~vB3Fxm2_RuCIA2c diff --git a/fixture/1/2/7 b/fixture/1/2/7 deleted file mode 100644 index a79c3b1af293e1e3c4a87242f2c790905ed4cd1f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 170 zcmZ>Y$}lu^j8qGb-2CbvBLjm|0{?#?XkZ7CAh3WTrGNnnBp4VN8Co3}7#A=wD?1!v zb~wS-*1*7U_xQ7`t8Z-1x_f&2`Ui)*<$QK-esOtq+}YjRKRiA?-*4~kAD>?^IPo~T z$uxQC9QLzi3G#Ifi*2j@^ZN&bBjWY$}lu^j8qGboU)VcBm;v{`~ePz`u_;3Qke@F7$g}Nm|QSnTaa0vc zsT4}}o@rLN`9zAlva!7M(?P-ef>k?kv5Z})U)%QqDr%GZZG@C`SHneySdfn cAD>^}T37pL)9VIC2F7~vB3Fxm2_RY$}lu^j8qGb+#IIyl7Yd!8foB#21Dj7n zMo6zoW`~JX<^l!=NyY^x7fcuzJMi-KFkIbS;8Y+zQI^Yi<2~D&e=HIKk6e2kRmD;& zg;Kp|niXz-5xAuA%Y_xC)nd8PJ8ExV|B!g3&7>&x?0l!F((Iet%l>hGe6rkbZgu&` a=hwH^)&AM^x`B~_v0l8$)goX5$e94mOi%p) diff --git a/fixture/1/3/.zarray b/fixture/1/3/.zarray deleted file mode 100644 index fc765ba783..0000000000 --- a/fixture/1/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": ";f-W0RaNcGfYHSb9>nno%o`sJ*P?yN9W zKGrR!9W+P7Q0YiJpR`fT3IoMM&1~F4G8zU72jcI?-HW{&b0_+C)UC*y5jVoGhg}Q3 O8geE0a?quVj0^yg-ab44 diff --git a/fixture/1/3/1 b/fixture/1/3/1 deleted file mode 100644 index c3229706d8ce11f691c1b659220d64b7a6459370..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 190 zcmZQ#oWOK~fq?;p_W`j85HAB_jjj5B6;8ZmVQBJE(-OETd%f$>)sQ>lmWbyKIb1d?2t??{gc4me_{H~mnMN3aj(eR(8 z<5{%i!~_NVsy8PJ7a!}9(2rWKT-~GHNeq6}z=?0Sbu7SlB8S z1tAoDJ%M26AJ+Dd{}U>iG`huhvUPwZPUua%*4K3-TL+Pu2s8MPTsYm odDrsdUCWYpEe+na#Cg|Z<6VoCcP$j&wSalo{Lee*J?3Np02<<5cmMzZ diff --git a/fixture/1/3/11 b/fixture/1/3/11 deleted file mode 100644 index dace4ad02e46d4b062b5d6a8f1d1a41489cbbeb6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 63 zcmZQ#oWOK~fq?;p?SWVXh}D5uW2^pOg%e*H7l0dyP*?DEH{^x}nsqy=sP1m-?&-rB3B8 i38fDCsvC;!(pfVU+r*c=@?G7Wuc-*H%}SmlhZ1 a=VoW7rzR)H$3{nnhXx1w`+9paG5`RH=uE-@ diff --git a/fixture/1/3/4 b/fixture/1/3/4 deleted file mode 100644 index 8112c7ee285eae0edb24b8214373fe423ff3fa1f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 195 zcmZQ#oWOK~fq?;p4+F6X5U&Pejjj5B6;6nl<8^T^+^<6h#6llT^WoKp>#P7;ES+w-j6b=7bI-W&KPE1g+ zuX=N$aPhGo3H_)w9)*jJbO^{><(w#3c&LSgUrNWLV8MX~hWgs-%JS0U!u;Iq%=Fac T#Q50g$nenMK!0CvPeuj+<#R-r diff --git a/fixture/1/3/5 b/fixture/1/3/5 deleted file mode 100644 index 5c511bcd1031a4ce368115a5051d2a8e459e2e79..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 198 zcmZQ#oWOK~fq?;pj{&g=5U&Gbjjj5B6;9}~GBo*U=oyr|GHNf78Z(y@v+g7 b;rmz3wp!V3wW8W;dA8NEXse~(R!bNF@r_C$ diff --git a/fixture/1/3/6 b/fixture/1/3/6 deleted file mode 100644 index be9ac1b752925219498d8c0dd357aa6f446acd66..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 188 zcmZQ#oWOK~fq?;p_W-d75HA5@jjj5B6;9k`VQ4y|s_|i#HBblv7P4#;gTT(ik2YB7 z@O1;J9W&bdrF6YYR$N%%Q2*=A$>Qbb=2+z4TH{%~?92><_+2?CiP+wbJSzcOPn4g=S XnVy=Q7#|xQ86FxO=0GHNeq6^qJP2qh8|P=G+z z0@o=sLEzHvDU9C>_^uc5Y%k!NUcgaaz!qM>VqU-`UcgXpefw%(+2+ZyZxX&+UEkVQ zTAp9JDtM_|>#r-B!OOg-s<~EO$p~EJ+{(uK>vFpPJnPbDM=CF;`OY#wOvHZN4?A|ABns%h(Vv=i@u&P@N2Lk|<_)|py diff --git a/fixture/1/3/8 b/fixture/1/3/8 deleted file mode 100644 index 988cadf04812220595128cafb1d5c8672a7cf127..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 178 zcmZQ#oWOK~fq?;pHvzE-5YGZ)jjj5B6;AZBFf{on%QdVkXE<=4|IeQT=g&83UFBUB zVy^L5ePPIdPmQOn#{E*dUL`9oEO4m*_2y*p@^f=6@^7v2EM9hIhC%$UoRdXMPfgMA zpQYnjwB*DD1^cQuCkhuI>yglpTH{f;=tzfvyj9MLf`x}#IQXS>JPH;ZXke(Xt*$ID dEiTN@&CX0uO-_uDjgAZt4G#48_4Z_B005vXNqPVP diff --git a/fixture/1/3/9 b/fixture/1/3/9 deleted file mode 100644 index 890932a821faec6e2120a560911646baee450c05..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 194 zcmZQ#oWOK~fq?;p4*{_V5U&Dajjj5B6;AN7GBo+9>ls{?T^+^<6h#6llTkx|hxv2pPUiAl)}j0~zCjv@eW>>CpR diff --git a/fixture/1/4/1 b/fixture/1/4/1 deleted file mode 100644 index d4e383145f9525a249bc69b78b8e03e1b5512ef1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 142 zcmZQ#oXB*7fq?;p`+!&kh|7RjW2^pOg%i2V3?XbOscGpMnOWI6xq0~og+;|BrDf$6 zl~vU>wRQCkjZMuht!?ccon75My?y-?CQh0>W$Lu)GiJ`3J!kH``3n{J9h5cy=U*f{Ra*nI(+2lvEvMk45}WEA^!#l$5frKDwK<>VC4($G_w=f0Na=n+a diff --git a/fixture/1/4/11 b/fixture/1/4/11 deleted file mode 100644 index eac7834c39f4b28828c9dedd214532c996736940..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmZQ#oXB*7fq?;pErD1Bh!ufYW2^pOg%d{^7z%uR{rm$0gMvds!x&f?m<1S2Q+J!H GG6DcVB@6uk diff --git a/fixture/1/4/2 b/fixture/1/4/2 deleted file mode 100644 index ad3322ba6d0546ae2c4cb5937a9ff9076433793f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 145 zcmZQ#oXB*7fq?;pCjzku5LW`R##a5m3MX2b8B*9zoIG{<%-M72FI>EI`O4L6*Kgds zb^FfUd-oqaeDwIq(`V0LynOZg&D(eHKYaZ3`ODXD-+%o4_507?fBzX6nV4Bv+1NQa wxwv_F`S=9{g@i>!#l$5frKDwK<>VCkx|hxv2pPUiAl*RscGpMnOWI6xq0~og+;|B mrDf$6l~vU>wRQCkjZMuht!?ccon75My?ys;jK6uCK7Mva__cwzs&sy1Tr+zQ4f1!o$SH#>dFX%FE2n w&d<=%($mz{*4NnC+S}aS-rwNi;^XAy=I7|?>g(+7?(gvN^78=!03}F46cZLxQ~&?~ diff --git a/fixture/1/4/5 b/fixture/1/4/5 deleted file mode 100644 index 3473d58b21960a708b95fe4c6ba91040b3317e04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 145 zcmZQ#oXB*7fq?;pCjzku5LW`R##a5m3MX2b8B*B3eEs(Q$IoBC|NQ;;pMjBynT3^& zor9B$n}?T=UqDbuSVUAzTtZSxT1Hk*UO`bwSw&S%T|-k#TSr$<-@wqw*u>P#+``hz r+Q!z--oeqy*~Qh(-NVz%+sD_>KOitDI3zSIoRNu%fo0hgHUnk=qKzX; diff --git a/fixture/1/4/6 b/fixture/1/4/6 deleted file mode 100644 index dc63deef03fec56662f68a33afdd90afb6809650..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 142 zcmZQ#oXB*7fq?;p`+!&kh|7RjW2^pOg%i2V3?Xb0kx|hxv2pPUiAl*RscGpMnOWI6 zxq0~og+;|BrDf$6l~vU>wRQCkjZMuht!?ccon75My?y-?CQh0>W$Lu)GiJ`3J!kH` s`3n{J9h5c&BVx{>ftB?04V}JbpQYW diff --git a/fixture/1/4/7 b/fixture/1/4/7 deleted file mode 100644 index cb1f21a65e89d3f193467cdeecb168b26f151984..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 145 zcmV;C0B-*R0g(d80002U0001y0000K0001a0000ewJ-f3$b|y{WCpywzQ4f1!o$SH z#>dFX%FE2n&d<=%($mz{*4NnC+S}aS-rwNi;^XAy=I7|?>g(+7?(gvN^7Hid_V@Vt z`uqI-{{H|00s{mE1_uZU3JVMk4i69!5)%{^78e*98XFuP9v=b&0sx?q5>Q|U<~~L4 diff --git a/fixture/1/4/8 b/fixture/1/4/8 deleted file mode 100644 index d295458b4aa1ee2b91103a4256703bf5dbb3f769..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 142 zcmZQ#oXB*7fq?;p`+!&kh|7RjW2^pOg%i2V3?Xa^ib~2Vs%q*Qnp)aAx_bHshDOFF zre@|AmR8m_ws!Uoj!w=lu5Rugo?hNQzJC4zfkD9`p<&?>kx|hxv2pPUiAl*RscGpM mnOWI6xq0~og+;|BrDf$6l~vU>wRQCkjZMwWj0~zCjv@d)?kAQ2 diff --git a/fixture/1/4/9 b/fixture/1/4/9 deleted file mode 100644 index 51792149b3ab364b5f738edc7e4afa289b6a9ed2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 142 zcmV;90CE2U0g(d80002U0001v0000K0001X0000ewJ-f3$ZZ1vR0f2FhKGoWii?bm zj*pO$l9QB`mY0~Bnwy-Ro}ZwhqNAjxrl+W>s;jK6uCK7Mva__cwzs&sy1Tr+zQ4f1 w!o$SH#>dFX%FE2n&d<=%($mz{*4NnC+S}aS-rwNi;^XAy=H~+e03}F46a|e=rT_o{ diff --git a/fixture/1/5/.zarray b/fixture/1/5/.zarray deleted file mode 100644 index bfb0d7e0b0..0000000000 --- a/fixture/1/5/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 2 - }, - "dtype": ";f-W0RaNcGfYHSb9>nno%o`sJ*P?yN9W zKGrR!9W+P7Q0YiJpR`fT3IoMM&1~F4G8zU72jcI?-HW{&b0_+C)UC*y5jVoGhg}Q3 O8geE0a?quVj0^yjYCb>! diff --git a/fixture/1/5/1 b/fixture/1/5/1 deleted file mode 100644 index f8618d97fccc15103e1dd335e86e1bc6cd59b33c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 190 zcmZQ#oWgX1fq?;p_W`j85HAB_jjj5B6;8ZmVQBJE(-OETd%f$>)sQ>lmWbyKIb1d?2t??{gc4me_{H~mnMN3aj(eR(8 z<5{%i!~_NVsy8PJ7a!}9(2rWKT-~GHNeq6}z=?0Sbu7SlB8S z1tAoDJ%M26AJ+Dd{}U>iG`huhvUPwZPUua%*4K3-TL+Pu2s8MPTsYm odDrsdUCWYpEe+na#Cg|Z<6VoCcP$j&wSalo{Lee*J?3Np03HTid;kCd diff --git a/fixture/1/5/11 b/fixture/1/5/11 deleted file mode 100644 index a43aa52ba609b36b3755586515d5e4597ed7ecd4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 63 zcmZQ#oWgX1fq?;p?SWVXh}D5uW2^pOg%e*H7R diff --git a/fixture/1/5/2 b/fixture/1/5/2 deleted file mode 100644 index 70c08a68f1e94659b6e379e69ea05c41e5f60f3b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 206 zcmZQ#oWgX1fq?;p&jGOr5N`uwjjj5B6;9-`GBo*U88}?!{f`Wc&l~U@hNBH(p`o)? zJSRbs=3jaJH3f$_l0dyP*?DEH{^x}nsqy=sP1m-?&-rB3B8 i38fDCsvC;!(pfVU+r*c=@?G7Wuc-*H%}SmlhZ1 a=VoW7rzR)H$3{nnhXx1w`+9paG5`RKv`oeT diff --git a/fixture/1/5/4 b/fixture/1/5/4 deleted file mode 100644 index d2bd6841217110e05e8b7de2ac7938d4334146c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 195 zcmZQ#oWgX1fq?;p4+F6X5U&Pejjj5B6;6nl<8^T^+^<6h#6llT^WoKp>#P7;ES+w-j6b=7bI-W&KPE1g+ zuX=N$aPhGo3H_)w9)*jJbO^{><(w#3c&LSgUrNWLV8MX~hWgs-%JS0U!u;Iq%=Fac T#Q50g$nenMK!0CvPeuj+=yyb$ diff --git a/fixture/1/5/5 b/fixture/1/5/5 deleted file mode 100644 index 879f5ed9bf4cb65500efb67daf12a7c5670442d1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 198 zcmZQ#oWgX1fq?;pj{&g=5U&Gbjjj5B6;9}~GBo*U=oyr|GHNf78Z(y@v+g7 b;rmz3wp!V3wW8W;dA8NEXse~(R!bNF^qop2 diff --git a/fixture/1/5/6 b/fixture/1/5/6 deleted file mode 100644 index 907d599728100a7c386efb1cb7f3f09f4fd94443..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 188 zcmZQ#oWgX1fq?;p_W-d75HA5@jjj5B6;9k`VQ4y|s_|i#HBblv7P4#;gTT(ik2YB7 z@O1;J9W&bdrF6YYR$N%%Q2*=A$>Qbb=2+z4TH{%~?92><_+2?CiP+wbJSzcOPn4g=S XnVy=Q7#|xQ86FxO=0GHNeq6^qJP2qh8|P=G+z z0@o=sLEzHvDU9C>_^uc5Y%k!NUcgaaz!qM>VqU-`UcgXpefw%(+2+ZyZxX&+UEkVQ zTAp9JDtM_|>#r-B!OOg-s<~EO$p~EJ+{(uK>vFpPJnPbDM=CF;`OY#wOvHZN4?A|ABns%h(Vv=i@u&P@N2Lk|@EK^AU diff --git a/fixture/1/5/8 b/fixture/1/5/8 deleted file mode 100644 index ca283ebdb982345dffc8927292fc43371eb74d4e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 178 zcmZQ#oWgX1fq?;pHvzE-5YGZ)jjj5B6;AZBFf{on%QdVkXE<=4|IeQT=g&83UFBUB zVy^L5ePPIdPmQOn#{E*dUL`9oEO4m*_2y*p@^f=6@^7v2EM9hIhC%$UoRdXMPfgMA zpQYnjwB*DD1^cQuCkhuI>yglpTH{f;=tzfvyj9MLf`x}#IQXS>JPH;ZXke(Xt*$ID dEiTN@&CX0uO-_uDjgAZt4G#48_4Z_B005%vNqztT diff --git a/fixture/1/5/9 b/fixture/1/5/9 deleted file mode 100644 index a7896b6c99119c11c315b170216751a59b3bee0a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 194 zcmZQ#oWgX1fq?;p4*{_V5U&Dajjj5B6;AN7GBo+9>ls{?T^+^<6h#6llT6lIyakJvEL%~s zYR$S0o3?B#+p%lUz5|Dj96NF9%()Afu3Wou>(0FgkDgRKd-3YcyAPkLzI^)yIdT>D diff --git a/fixture/1/6/1 b/fixture/1/6/1 deleted file mode 100644 index 80d1c2f44d4fdb859de9353f35ab78477a34aa3d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmWl|2Q~r#007XhBRLb3V&W2pC5;%Bk~U`Cgp5g3rp=f&XWoKEOO~xz zm9=KwhD}>?^0w{RwP)Xff<1EhOA}-@9uHz?d<6;>DORFXnQ|2>RjF2^R-Jkc8Z~Lw zqE(xASsglc>DHrHpMC=d4H-6K)R=J-Cgn_-He=SDc?%XTS+-);nspmCZP~VC*PeX` H4juUgs}&{= diff --git a/fixture/1/6/11 b/fixture/1/6/11 deleted file mode 100644 index 96e125a1d2e06e646dfb64316f9011e33e2f3160..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 66 zcmZQ#G+;Wxz`y{)PCzUI#9Bc7U%-dOm&K38pCy1LkR^yEm?eZIlqHM>C`TswFl2NwoQDjTmBNyj+Z%*Jsun5%K`FDvl8J5Yi z!YXU5lOxXtn{2WD_w2IAJ_Qaqb~_B&Y1l3!cH3jrn7#Jd?|_32IqZm|ju|)MxJf6R ObjoRGOgZbE^Zo)@`#3@X diff --git a/fixture/1/6/3 b/fixture/1/6/3 deleted file mode 100644 index 4dc3dbf1cf114ff1b02d761d20f57e3268b30333..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmWl|2RecQ007YcbsjU6m7To_$=)l;7Rg?PYdM1B$osw*5)SI35|6*me8nx=wYY^6` zNwbKkn79_L+O+G?sY|yWz54V^7%(U)Wk}kv5u-B3WR06JY09)2v*yfOuxQD$6|2^) z%h|AL%eK57y9)N~J89kxCxW;rc9eLYtFm{i&`EM CyB-1n diff --git a/fixture/1/6/6 b/fixture/1/6/6 deleted file mode 100644 index cacb7fa1cb0bb7218e866917a7dfb89968330e20..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmWl|g;hcU006-6jpQ^I7NVG_U|?WlU>9IFb|Cfwj^H@zy?4sWeFs7C|L1RoN>!@W z2&q-4UV}zqO(L4Lh-%fQU58FxV!HL{6_?Pb-+)0$LxznQl`>}Bgh^AT%}ARyXWoL0 zMN5{gShZ%|hD}?xW##1U*tKWhfkQ`*oj7&o+=YTmSFYW-b?4rLM^B2LmArWM=G})+ GUw#1?L^&P+ diff --git a/fixture/1/6/7 b/fixture/1/6/7 deleted file mode 100644 index 6d42cb62e4970e2cb034963b752254fb373cf5e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmWl|2RDKN06@{N_edf$JNyT~-z*~|n-a1jBYRddy60R%6QqM6Xw3beWQu8Km}QP6 zDdt&VktNbBv%)HCWLRf|O}5x(hh6sACrge4@*Hx+F$GRI<&1MKD00aaC9b*QmOIMa z^S~odJoCaUZ@lxtCtp;kQsbLCKl~CH61B-@!?qZ))u?T@8?(bsyX>~dUgP%J?|_32 MIqZlDM;&wAKV~^VP5=M^ diff --git a/fixture/1/6/8 b/fixture/1/6/8 deleted file mode 100644 index f4b0a50c2da7eb96f7eaa47c442f7ce741f58525..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmWl|2RecQ007Wm?;?~fA-ilTd+&(I-h1!MIfCP)_kAx^77zUfK~Vblzg&f|h)Pwe z)u>gcUW2GcF-@Aah-=lRU58Fxx+Ns_=+&p+fI&lsrHn|+7&T_xgh^AT&6qW3-hxF- zmaSN|W?j~XO*vb(AVPC@=o65*AUdMy)#a z8Z>GW)hwn(t2XUAbn4QrN3T9{3H=5P8ZvCes4+0 diff --git a/fixture/10/.zattrs b/fixture/10/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/10/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/10/0/.zarray b/fixture/10/0/.zarray deleted file mode 100644 index f9d15917dd..0000000000 --- a/fixture/10/0/.zarray +++ /dev/null @@ -1,14 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": null, - "dtype": "^k7{-kf0`r4}j1n>-Fp8)&%My9Yz^tqftH5$PT{KOL>aRYB2M+)LIq&;E&+kM;%r2!z+r287jM;rF zGCp=)dr-5B-+NG)u{?d!mc##fi0MobRLJupe3v~}S(Zw9?+E&1tH$%AN_hV`uHRXF zbj5fn>&noz_T|W#6XmQwiH(t~TZi9Ou+M1(UMLP`=E~Wx5{s^e1y!-{S?CoB12LJ^ z?0*hZ;&Oxh_Ib`xAjR_PlFV7dc}k4@nXXWB?gjXzNs_^673Zsv7amI0WvS^yjo3{p zK^pqeqNio$k1uz%^reF)da(E6FCG2WB3)h`6esKHQ;(m+%W5NKb@W@0(2kzNx31LF zw*kfNr?!;5G0=Ylmdvb1=4iw--NUJHP80Jq;Zfzy&-b?*nahZlP}G6A4@TxQLGikv zvqEiRP7`uP?dy9J%*<=XhV1$N#CvAuwqT)BBFMu07JR+YrW~5Ikb@PEKh67=?68uD z6|M36^BdOL$i)WL-q4fx{WkKkqd-3=ZF8xeoa{*J*cWguaFCY+qS1gL2e~;gn)p|f zz0FB}PV`B{WB#{Ja&#eRx_s!L(nX#w%;1Q3ldBs=zjq64+~n&4)=U%ekh2GI#S?-& r0`A=JyAD^L@YJ;5|7hw=ZG)?qV++i5#!8?hQ9?lw+mp z`_+S#9M>vm?yw(}AB!sHBAo#43_U>uVGsIO9-@ z9tB!%DasN|l`y;S#-oJa%ZIU+5lWVy8RXjAFg$ECioixVn>f=7{Vgi!+k=Oj0G3)WZMmzMdya7TGK`r7SX?X zScqC$>LA!*=A%PxPPxr;^3EJd*A#NJ zlp`$3_xn~$mnK_W>WYpaL_Br76(lF<$t&aB zq6wwC^WAI|gutR4ed9R7Z(sek@dpT3KPL$ge9sK;X-y{#%OaiYG6FmIw3Xh*ely&!h#j*3)9O9qJ?G^AwH$c z=4&;<)WUzXt%gu9g%GuU$=T8d!W7-2jX8~k`!`K2kGvy3KI z5H`p@t(ScwjJ*{LAt+;B4`_WOcu#e`J2XNVswz)yR}(G<&-Huummq0qGCt<&2{2AA zKUj9ufU&Ua%K?QUqb+%>*Ct~|vN=uhn#HiJU)gDI&S;DjO&I%itOXb*Hw%PL7>NyMdA^J>Df5T2!j&P_;lbGawteYwZ-!Qkp8%uSsJ1CAfDusTkhdzBp$@tx zcL-w?jv70dMlif`PyW%BGrqX|Y4{$+IN!NffZ>wd8{V*=(Q6iHeJ7D|qVC1df+U8$ zI7>Q1TqQPxarnlZ&@Gvac|KVJ46%(x*^HRe2@hxIGiKNg9GjqIh!!K)7(4bgnRee| z7#O+tzbs=^N?~jYnx9fw&CnRlS$(pGA>E^%u}%_U{OTAk@oi*WPKr!)e8=!KYZYJ& zJ`oFHWNmx&{zngEnYQdkPamV_y0{GExHJqy(imZ=PN`V{2W@vtQqkmIG}OvoCQ(@}6GY&aGB^`|G{ zt;yG#K_9}2=Y9eNlY?E`Yk~<~%J!OtVT5Cq)gLk=3ECdUl`&C-mdPs#K{14gT&1ns z5(o=7CJPYi`X&ZerVs+-LvAZ(5aw(39sQL}kXQ`m5WE5!6<*{K{>ZHBt<5JCi6J;S zs2|EIA^aFg+Z0{ySIv9BZ?44+TUHZd_>5M^a#^?LRhc-On~rOEQFBkey44? zlVCKdmD|aL&I@M(>-q@M7IRFC2MD6Z5aDUuM*%|ka^LKIV+7U4j$N+fgj#=F1FI>5 zzs&s~`hN&=MfwdYGK?~Ni3!7VL|%X~otn6!OPO)q`1|MjMU1WOLDe@k7~dkglh0~1 z#5!~tsHj^SYQz|JGZkQDOk69sHe;AwP>VOTVDy@wbX2ouB=t_I{dX zWf-CVhk=r)!FX_>Pk`a8=yH7L07Ja#5Tn#}-ICQG8Ls0+@72c`Q|Hz1$&54d#iKGD adS>iF;Z9Q>bjc{4UYR6Fx!p6JBik?~o zveT7eG$wQyMOn(45{`F_EF9fJSb4;|>-APbgGyKSQ)|MZhc*HP z759qG*^Y#Y34?zZoC#hTS#9BNg!vNX3@;Bt5y$yCcoAG<#@BE1BTQ=r2@vkoQRAWz zg1xWP)v++bw|RB@-$W3k7A;W(OS}BhvRFd@@Uq(534}y31QVkf_v6Wgp7z75y;BKi z4!{56m`TtRMF?%BP4U{fgm5>Bvs@uz`9zTbp+0@p;M)>Hkj1r<)^dUp%fysd63U}b z*b6T3)cUmKaw9?1Xd&G9=@cNi%tOSkZo*_<*>%4p~=b2`^o%f=eyH~b2C4vzphM}YR)ixlO(Ol!L>XN_^?~%w@DvB_w zb1ROiq%!<%46T1>Fyw}^1Q?}aAq)?L_?)T&#%$|&NI?;!AVj(hW4AaAL)55b+}u+q zz`&Sk;I>AFc+(a}ikbQ!)lP<4Z({RI7o+dgOzLnCBSAbW!?5PP0OQ30_n(DBjL3gY XkFS4WXy;0jPJd@K?R?;IaDwq4lyjG` diff --git a/fixture/10/0/13 b/fixture/10/0/13 deleted file mode 100644 index 570115c870aa89d87408df433e44a27b1b0cea5d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmXAni7(ax9L61`IcmD_Qlk)-sl2u<=@CQaNTP$vD&;7I&>BXhnieuCg$|RmbB2b} zfkjK9e80VmP%7o9RjQZzy+6-Bf52zY^Bm}3uz1sWH|8AA_-Z%NjoKwD0)&8;!>O}p z5Y$2kH_9~$m70DAgW3e2%C-+L<`e#U;!d4D;p*t^gzJWc^=C~42&2;6c?lMTb6x7a zK~@C&xcG8!DPcfwB-Y-RkbZBsqq#l7dPnEXxsC*ya8ZDy!rG#bZUnQnmWMjL2+#To z>>7Lt3X4)dg29s1m)QY?hs{6p69Wk$VhGxrG8><8LUo0w5~2yaJR4grju57aB82j+ z;ywS83EonP)#o(A@6L1q!sXb>$M-V{u6mJ~WjTc5+J66>JVK8D7PF*6g55;Rx6mSj zs8LEd<#0oQu)NR6cj;Zi>y&IGof<--soEE%dV*>5k?NsFLR;|Au~$umC=G=PK~m8! zKzQKc^!{8IA!wxPRx%T2Wtqnx>>*THUv%9%KoINrL{N=QdTTjMDAoNgK=7*F7o##x z_~qB_@KsJIkZ;iHR%SRC)W3eH#`uEe*D5s`+3&7~pVwxrN|`Uf=ruVxJzSr0qUlfj zHbaJ0aM&d$6NWg81w*ll#EN0;@nlj}%6K}wQh*`0v6`W4y>R#AwTy<249lA?428u7 zH-?sO%%fB<#@(91GZDTFKQRntd7G(o0HY)yW6J^=;ypqbKSUA6rDM69N23@{CK`rs z;uxQw#0xO8rV4?r`kv&AX^ft-PWQBQ#&LJWG7L*`7>1~k$2hv8P=K+hW46nJB8GU= zQbvT%FWFcnL#HOBM0ST!w_O(9T+P@k9+jby-zdPiy~gI%i6+Lj-r_4!t&D%klCYia XjAG-{w(GhWQ*|)D?~i=Gu!r#7$KqA)w-3ij2?ufyq1)UnBB5GO0Ovs zz4Tb6QYk7mb-!Pgy11iaF~#-z-QU^g5BThylcKLq(O0YBmg^h*O{0Rwt8q!skP3<+ zmp=_7DmW?f8y=6U@YccddfIOl&b24+y*Qp=8zEU3AtAJx9RDFRBt#W|uJ)Klu-Gk2 zT53Y5|JAz7!i*4lYwbJ(3xe_LqA%aA2xYzV1qcCgsTZ0S6ZEa5+X@{C4{Kx1?>G~5 z774C|KYGDWBHaiXIbEmyHxRtU5JmZF(tuYPt($$M$pUgQ?{fN@;qAtOR@<*gE|v}HmN{>aL?ZQ zc~mjMqwVH3c{$;0n1O6_HQ}c5sRb+Q31S^D2NAa9M72M zxh2M0!VqUMWaw7WPGwB8&+2<`%y`f`Q-Gmsqii1R(>jkQI%L^7ZS?YXQx|+Ar*&Vh#Jw1Jw|Z?jERNcZ2q2Sh&N4O zY#r;We63)ND!mdauQSqIDtF{48SBNPG6rMP1sKV5OiP4Lu(BrptWO@}om{&a1&sJf ihssfa7oYa#J(h&d9`Cb()`Vke zcSBapA=pbHylJ^OZ01Nfu-Bwf+nF%KBqj5Q8{w%WLWo+oYxW~gg6Pm!=}WkmBNriT zUGnh65r2YASmYnMfuKyhYPCT@SZ`(i-6M=J(vV&|e+NO*h#;)ijS(UAmwcJ>Gmenw z9r&dqfv}?ISyf{aL6y2Rp(=%tX; zW6gvcI{n+CTL}Rr+Z;Ew5&n5~h!BcZe$O13;Fo$od-`X>FI$ITeHB5PrI#SB(nshv zKKby;AR*)C9}$A4jr=i;4+C8@Pv|iYpIfVrkufwDn~fMR-@6?5F=gyOSiWZAREDJ# zMq_wyXAika~V<{`Hb817N7oD e#85oHD#FlB|VF>p!ASgX`Lgz+C=50-ZT diff --git a/fixture/10/0/16 b/fixture/10/0/16 deleted file mode 100644 index c424842c42ee51e3cadb73902d00cd8bda13a0ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fc~DIO6vkzXC`)SIkfqIQWemzvK3YbaN@0|xqC_gPj8w`pRF*8Mj6o<#)TlJJ zHf53)io$ys_nedp*^&k|a_>F!`~UpT`M$r)b|mgD7I3+{)jObEz~VFB#_o>=YuE?lCfZd+FIPDh#S`3W5eyar>ElV+Ic_(1TI?ax!?**tdq6_b}3)p+5Q$&EN zv%l+!9|E4VJ=crs5wP>HQ*&UifbkZ0b3FzSD(cK5=MEtRZ@J)TqD9adB@N+r{((1t zbO~#hbzf~&5C(N`+}<#ea6=X$c-guQyQ(DoR+lxMv?5%Jo*_bTHpmPKvLSSq>f3nE zCnWnH(U`xGFi)fJiK#Q8Ib-YD(XIqp!-FurZMg{HRYKL@XFh}&^SM>Uz69gCoTRJ( zLT%V2-=s~1?K(+jF<}HnzGOmp;1wZ4*!Z*KsNEidcIujy%Kd~wn|i~safH?Cg@p(5``u zYw0`cJ`H9>cUg*2Gu$0NoOwB%VVH8U&vhh2u8j_(diEF*#(7zk5w1FzJVl@3U}pa( zRl(4Yk}hX_tjVj~KanA?oy<5DY9zu4EP4BjO>FLm66r9%4lp!c9%(A}dBge|h4a zX^cajk_p2neWnP*vP(*w(eIFDoH&QkCm731@51L|^>Hq)$ diff --git a/fixture/10/0/17 b/fixture/10/0/17 deleted file mode 100644 index 29b6c02a38..0000000000 --- a/fixture/10/0/17 +++ /dev/null @@ -1 +0,0 @@ -S? V V?]NX?ȳZ?bm]?6P;_? Wb?Jd?BgBg? i?W{-l?*5n?LGq?s?'v?xxxxxx?L1z?Cc}?j?ƻ]N? uÄ?m]8?@@?B#?O ?q?n׃?aB(=?5n??4TnY? Ο?9D?V'?)x9/?j??je6?w?K z?]g?Jd?3ں?OO?lvĿ??/d:?Bɯ?撢/%?[?4?``?4և?'A,p?w?ȳZ?m]?Uj&E?)(? 0?\R? Z?vĿ?JO~%?7{??AVf?c?k"Q?>4և?;?HS?&&?w?`t?3. ?jO ?ںq? Z?\\?T?(LG?N??Ο2?}?uAk"?I$I$?ݮ'?3})?ÄPz+? g.?j&E0?=w|R3?55?v=8?iܲ:?aB(=?_ ??2\ B?sD?FF? \ No newline at end of file diff --git a/fixture/10/0/18 b/fixture/10/0/18 deleted file mode 100644 index 6723998f090023939749148c049eeea54e8b21c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fi7%IN0LF_hM77E0iYgTCOA*yTp)6e)5 zsWXZ-`vQplN{BTT^PY7E5tDV^Eb#~?-ua|ldlEvF70M#S#gXv=C1HfGYgNO72x4*O zNeNKO!0fzn*WeA(UC35XYod zsv3w5KB|&sO@u*VvjicxVcbGIbaDKU+D2T?7~V9ogODxGy&~-F59<%SA@q{wpE~$f z@}?4$+Fp0s@t%l{@+fNjNbGFYox1ijVXcS|n&FY1i@J&Snw9Hr_7eH3UwNuu#A&4v z!mWI7n8qMss%`lC_z*GREiXefD8mqn#%O_~E@LDF)@R&*Q9Vw;KskuOH`v@N(h;ak z{5-)#Pav5$gVQDp1lY>yV(L4YKEG<3e^?uKp@Zi>eE}60;dY(hX}Y0>s(E+5ioIi{8v*gAiwm) YQi1yJB%>Qm z7qU{xB+By*k&wz!Io|hu_W1)od!EPs?bbdQ5JuMHd`g!P?GX*-3+#z|burU#E+@{J zJ4z7#!2^F?bRrg%7jD*aAx0PmKa6xGS^{TI?Oj9M&iin7&wApx=H>3zjf961Vn){J z2i2a0Mt{{r2XCU@IqFn_50R0A&RKrM;V-{7qy-SGkyA8wJ24@4rv%Z{Vt!)qZsL*E zn~u=EL~=yJ+Hd=by>+hn{=vjD^I@j14ibt+DDkQ6h=k;AFwfOCjED>TdM7)a@XO0E zo)k&gX!?dHMG?b2j6Y~c6U~{j36axxMuLcPUSYH^mT*rQaP<8-$@@#b*c49;zyZf+ z34~HdB5}3lonBEg5o&cwf^dm&Ye~OMnAPblH@PBttDj`Wr4yAwkvc=K5lLl>4j#!O z_89zZ>by=Y4a}Ax4Dz;TH02Nmq0O;oiKyOlzSC7Bxny4nc+ zKCP;4?L?cCybMvK3_~ay--)euJ-&85#JrdrmA8Kqn#w_l*Oq1%Qu~SQ@W$Vx{}GY3 z=e?sf1U9N=69J2$ArbKZU$ME~T?>>LPXW#EvzUM@0^rK2x9-ckcGE|9U!THX{DiYj`vE-59 z^~D$R_Wcs5XNkN_&r7iF_9S_xM1ojJvU+!`1XT61E3mR>BNR;oMfe1Geu01b< zR``$@7G>RN3L|lD$hYxi5(&XlFNXy8ZOZ6X83~W5S{@Q#|D>EV=pu0`(X*m?goLq* z-n(cyiQ0_uIYt^3L|Tk(s{w^y1!n_J+fcY-?8ZZ3ZJB0Wn>U3f%l@x1VH5(Y?&+8$ zQ4lOTvMJ;}-x3{HM#0W~PO)h%g-%5%gd8ZCadw2l+`*&kR>~<9AF=Z8R;S^p5TVf* z(Qtp24UK4drnJYEhGx7s4~L%*$NTVyP}#?*K?2yI-|=aBaE7U^Kk!7MIl zVBCd+V38%_;F~gYlVd1{-}9P}eNNzTT@emex_-+un?)d|rz3*EWT3%>1ow!^YX_RV59Mrd$AXfT+YdzvXSQX|h zRsr!_!~c4i6H(U3$PKQT?eZ`hLnVxX)MNW3iwk|6RlES#)^i38q^7t%rq&#a^&24RF{uSdwyaN?KS zkeYrGL}jvULZr9vlaRa}XAIg9MYzRxZF_x?Flx9|y(ot0Qf#unf0$6#5lft|dZm^d zM+BN2lOPc4T9tT$&@Jt6H}oXYtam%*Kq67(7uLT!i8xv?DIh3?Sf|li(R7xW>YXM* zXk_^$RiqPddWQdmV;fC<%f6#xHc7#|SK!-f?TJ dfO*OMcQ(cX%2`YW diff --git a/fixture/10/0/21 b/fixture/10/0/21 deleted file mode 100644 index ba57279a90a63a2c8c3adf610c01baabb2334fa6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fiBHdQ0EVT*RgP+MOowBkRhFX^-f~vXQa?i&CCSxAYjiP6YDkvo(2NdZr5r;P zk&;S>P^8}Pq+I2w=0r zsQIlF5U&e0yg#ibuIfB0cXK8}Jc4Gwav_}aEG`HaOwsr<;K6#L zTh?$RUZian5Gh@H78AA-{tlJ#vF^mu^HIHnJc)5_i`;{~h_5!i#U0*6L9}c_#5L>_ z5IfAK{dM*uY(ks16bBH)D=rkx46Bqlq=3W%F&uqoXS(qAEuZiq zlaMW3vIs4mg`c0_AU?YPTZJ4VR~aHweebyGw~37kPto!|UzZ(h9-2HaDnu)pUvWf6kv}h_?h{h|aex!05|p#( dN|0C4)RS=f%C`RSu@bg-ju%Lf+t{ry;Xe|ooBIF& diff --git a/fixture/10/0/22 b/fixture/10/0/22 deleted file mode 100644 index 51a98b25b1a7dfc7d7201777077360fc6b9861d5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmXAniBFGl0LGD4%5=CwD;7dg5jyZtIu>E1gc_B!rg9BQIZ~3!G3!*U=K7YVx&+`mSOR{`Fnt0pdlI&$d6x#LedOD7XJyg8bVFKaZ zxTXBsBx14>qED1`EZc&pt33I8_*CMovA1ucH4(8<|9-!j#G*S-Cho8$#^}c!{%lXQ z`^^y$kBWY+d^wjm(Z8U~-H`}e9z6b@6XAHqB*Dg&81}32d!8GiXt)!%QWpz|BQ2UH zgO(Bjc6Z~VJ&2iyBEJ0gB(xi?Jwm;SH|Ae%d{`yCBV`jJrgDvdSYbT!lcPUjzOi=6 zjdetCS;EyRK}4;dv*GDr;*4MK-Vq_h&Z6thNnwP0|IGqo^m4PJ@U28!Zj-^+ZA5uT zW>i2pk>TRi^eTb~P1SW<97Q;^ROH{^LkzWx76|Y7SrPd$#Lb5GcVpv-!{+(U>G6br zWY9&OL}Esz+z6p%+_3%cQQ~#jaRH&UkxcB@(>T8}jqvg-8&sV^$QDjnM336E`r=c> zbB|V7<`B6`i0}?y?NR55g)Ro0j$9DlsWmUu^N61+S%i3KJ3sqd0g)cxtr2*c2x+(~ zAm%BB5JMs-+CD5HRL|aL*_RR}M)ER5k}?dTXgnaM>pc<>1N`=8#6Bh(m4gtM)Y<|! zRT0r1c~7Mp;hh^eb#)yvsl!;95Z%tN1Vl~BG5?zl#A(%n3adB74qJ7Lv+sxn@ukNN zKM?=xAbyxxt23@)o$y|j&J);)N)yn~Qm zI{T0CF6>M!>?GEzUHNI+L&)#Q>?1)l*Yvi&ngnGQbqVq+-5L`1r7ZilSxbVaYM?-Z Y+=h>igsy|T$JFXcsIIp;;5tOYKTnvTK>z>% diff --git a/fixture/10/0/23 b/fixture/10/0/23 deleted file mode 100644 index 93aaf87e1b6d3d5411c8152cba084bb8f3516b04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fiBFGl0LE3WGAo2oq9h&8q8fT4C8b)2qcxQpiSMWy8GphGf4 zY0Xs;A=$IX-MGOov8uF#CVm^m^Y>bJ?6`xBDc%w;X>lXudGsQ zbHdNwd-9DXg!wtmNFys^R9EBsjHN`IWxRu?HBp>kBOqeieoy$fl2~VQ{ZNP#A5)*Jc5ElgrR3e~0*NF`hv$!i2;T&y zr8c_>v$o32n|q0oCLsc%HGFPRW+-u`fz1;S5ivTM7K!16XRufLuqeW?Ms9={RBzbv z`3UjEFHRu5MH?^Ti37?5X->xp2anQ!wMm3*vE(H2txx}P;c4QYUHgKWXNgorh(O78 zXiOTh+)~v)_5z`iQ1_reo#<4E5M{=T&%Dbf62p52*5?sE4fz6Mk)jY{MDSGOvLfMK z{q&@12~ngjFGEBth9MLhw}{!w6$0Xq`@W>G+eBlId=Mh5uXBTcEfHduQT4KpSe?2- z-}ycE{krFyiDls>abuqeZ@G>a#0PCXw}BSo z)~;6q;&}C0?GNq5CUuu18#{=Fe)2|wY$F7UEv(zN~2Zy(vkN^Mx diff --git a/fixture/10/0/24 b/fixture/10/0/24 deleted file mode 100644 index 8e16623744af2c4e774d208f27b19ba23b16837e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmXAniBHaP0LIHTbU1Rv>r6>Va&#y?rbDd7TNqNg3VG$IwGJuPAwvr_3XwuWC7IEi zW;r@WQhJ>g@`zY^LxlPMex7~)fX|-iaVTAudt9FwAcuJ4{~$DO5K&yP=lM?~B1zX_ z-nL=HYOl_mcgDo@vzPT&DTtn(fjb_JCK_DE35biS&m3=>5WC+W&ayHil(rsybIb|T zeZ7N6PA0lErB9Nl5>*!A_Pwo$%t&hiv9s!9ulAXQ+ovx1KeTqL};=6~Zoh$^P( zpVahJKtoFiaLb5QAC7N1>l$_X0T(fyNesDaIC`Wi}=6(L1L`<{z<+bww zp&MCrgNKNES%kQtm>5@kgxIBOY+aB>ELI;A5EJA=i0^)SiVG)+iv04}(Wi-%I?^)4 zc6k^=);LcL*X9a{PtNP3H(er1kB)l#>Wc7gsdx7-Bm!)buRJIs>=NCFI9(_7HL?jB zC*2YdMG>K{r_@A3rDdK`86mzgFzF6qsX86@x15mcxKGp>7|n03AkO<$3y7%vc!Ro! zgr^Qd7Ct6Sl+r_pZ)XafW<3?&9YNW-8sdbrY5%b=h;2t-2?*zwwEBKcg!GP>H^j$; zerG!05&vf)q*WSQi4766YnQeWwv}H5gwzJU60f(c?OCiPu9l44Jo!5zS!8w*EBy}6 S8rn@v&38-R|3`Ss`G9}-_m<)S diff --git a/fixture/10/0/25 b/fixture/10/0/25 deleted file mode 100644 index c43b3e6f7b..0000000000 --- a/fixture/10/0/25 +++ /dev/null @@ -1,2 +0,0 @@ -Peu?r?z+1`?MC ?!J ?Wb?55?-?n׃ ?B(=?y^ ?ɯĀ?i*?k"k"?c$?7 [V'? -^N)?ݮ'A,?.?Pz+1?X3X3?+6?B#8?ғ_;?v=?y5T??LaB? D D?'LG?xI?pQ7L?n*N?Ak"Q?S? V V?]NX?ȳZ?bm]?6P;_? Wb?Jd?BgBg? i?W{-l?*5n?LGq?s?'v?xxxxxx?K1z?Cc}?j?ƻ]N? uÄ?m]8?@@?B#?O ?q?n׃?aB(=?5n??4TnY? Ο?9D?V'?)x9/?j??je6?w?K z?]g?Jd?3ں?OO?lvĿ??/d:?Bɯ?撢/%?[?4?``?4և?'A,p?w?ȳZ?m]?Uj&E?((? 0?\R? Z?vĿ?JO~%?7{??AVf?c? \ No newline at end of file diff --git a/fixture/10/0/26 b/fixture/10/0/26 deleted file mode 100644 index edd19678d309c5bec971f68038fe1c5707831aef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmXAniBHaP0LGi6RyoR%D3z#?gFcEjy1ZYsO zrYLv0ma9YWgVdTMbol;$o_+p+&z|QAZZS0V>``IioELlV{ZV05(xDF9J{8&;T~=Hi zP@%$nkbu}9an0!D5W>H9XQHkKp%`!Vd5b4}Z8Z|#`vLb@a?<{#ci?EB>Rpg~0bZcz{M5}&hxZOOWJn(GO zrTIi^<$A}dc7&g{!P#sO);`TjOAdr4FD731ISGhjr5ZwAiP+qV)2gM!imn}6 zeoEo(Fk}0358_X{)ClpgImh$eA72|mR=xcHJhxmyi5#MNDm=iCS;iCl@o;x z?t4-ziD)zRuWC04&+sY%VOA4a_NAJT-m(6!@ZS6-{>42)p5;Cvtx{b_IC+n|GWQWN zzW9lNklL8^oVc}YQCn6caU`eLZP+V9vWR;lyzRH?{c0jcq)*+o-N=cJ2mB`UW z6e>dNm?Pgu*Fvo#-|zSB^9OwP-Ww5qxXGwPz`OD#wwJpEWGi|^1RV6gsd4JJ0O!J; zVZ-|b7^qD9w69-4uUo^)_CWzvIj1wdhXkY#u2XAJB0?QUiV&;QAHIJ&n$YPCUv8;P z{DjGsTvehZNf@g?jyT(TdG~ou!grxZow^pW^vEO;LcOj!qfbG!7#th5dkS$k%sSw+ z9uZ&O|JrpL;jXxAQ9Xk&_76<8F(8H)8j2A0$}N7zbBJuWlvh{g5s^7AW|NJH4TGvF z2_}TTW7)qEX2hSgD4+f2#EXu_B19Ux7x*kCLXvJIe^^G?w(b~k0-;zq)vMH+_z@#D zLOiNXG`eL+oH4K$A*41^))Pz0A9!jw5o(ICXR*$rw`kGtLfk26O9hwex3L7q6wdZEM3D`!cy5g z`eGcR>ZaSKd7NmzA(;^Q1E)lYqYi6yLXwEBX~Uwvogs`m3O~Ci6T`5_{>?c;uHyo6 zz2)n~;#4BSG($x6j&W_tyhO~X9dD<1mH4GEJ%p&-8#{hXHgU0R`M$^;B1pTpu{)Po z<(DTyXcz3tYRo63cevaminliIskl$bvlJ21D*464Vu!KUW|t5uX-`B5sf`Jx#O)P} z-o=-T-ihs2e=7;eBD9K_XS#jN_gBPF^yEO#*Th>nM5g{p%eD2y{=L#u3Hcr$iRrQk T(c>FZGqsthxIfqYY%B2}NjICm diff --git a/fixture/10/0/28 b/fixture/10/0/28 deleted file mode 100644 index 7b802c59ef6268d8f8d22fa6cc6318a136623879..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmXAniBFCJ0LGp5rCc2{N)+jmL`BpinWJPxI%bt9trY7JMx@p%O3p+~k}1CADv?~3 zv#A!Eqp(o#QZ{i2#EAO)+hdh0GGlQAzxbs==~o0IFtff z-5*S8>JU(#vomX1r+_^jGymNGA|S~5yNCe$Jw^8~{S=__X05fkO2BKIaR-j~2q;c? z(XI1WKvMnwfYiQ(w~0qpZ-2rfM$Ixtjp(hsnc1cw>hw1M@Yf{rLu^+z3??>}^7g`E z#3Duh#GBfLk*|OHG(F;LfxZY)t*Z4g8bxHgr&SySXC)4LrnWW3}lUP;$=yM2vMXy(Zy;5vD4?x>D(y7s~}rTKbA08c}DGyBYNDl z8vAY$y^l&JL|%Km2(i&=x<*g};g-=A`8<&@Y%XkAluUF@UgLNtg^=smMI5esHc**P zgd1my5YEvH>$3I{+Liqr1|KBe>q-wH%7SD2{mLeGmsp2JCOvIx=Q9dv6* WEpheCXw#&6Vpn&A2oWF`Li__lJD>#s diff --git a/fixture/10/0/29 b/fixture/10/0/29 deleted file mode 100644 index 203b228a5844459cdf7d8df5335546d86662abf7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fiBHdA0LEQ~ktjrtBnnXpldfk%Dsz-XD`6$ExuRKjA<}Awu%gmP2T@VU(Nc?Y z6((hs`|5p)6qQQe@9){?5BThP9`kd{*MF)LP-nk;?)3t7*0-ltH40pf+ZVm`y+B0c z?3SXB0&7jbNCb=`Qi?8p6Zl)bK~Gl*Jk!^TI^Hah>i@WD&`*JYvcqeFehD~?wk`P4 zE-+zhheY56%|Uy#2sP96cb`WSwc(rXJ#>hx)#D{Z zxcDnzu}F5Ov$gom5*RKi5L$6aDTry@eUa#)+&S3+ba4A%)LBSMQQ2+n{ diff --git a/fixture/10/0/3 b/fixture/10/0/3 deleted file mode 100644 index bc547b943471d455f4f998b49d6f526a0516cb7b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fi8EDk0EH{1ycba-Wk|G1jl9+|=CmOcrIfuQDtXVOFwIb&N!yqS*WF86$%g6|;g^lG5cy5H!0 zm!3kp@w2DrdMVgt<@b4yq(R1SbL_UI5qMrDLZfr)K>7+#8b`0y-s%aa;W#(*(*+HU z8ZK;S@%4`gdK@)7ai*J!y3>4a1mb zzdj{{3mrF73T+vpsg3n|2CJB;gdx2Q+TeV#M9D(32()Ekzw+LIfr^FD zc+{!XgGI0$7Qa3^92}-$aeT}7vg=wFPVG!nNH&XFS%gL8UtioLEdFV&)M523PLI%w zu$U(o!lGg9?(T3U2X)?32Zb$%5%$tD917%NILI2o9Nv_xMK~n7Eij7Eau{7bWWI4W zhZ656o9QABUJW@XA_)im`iKgXdJd`YB@+%4qIyI)P~R%OOqB%w@fk^+4MBVFx=Yb6 z1c&z8HJW)4$aMq}RAiLT+Nvf99itT?=*jgtEMyU6PPCx66%ouTlpaFxaAxtCiaLU* z+X;)eHxUeWT@+{0Ly%XgI9sXUv0(Z0hp{$18pU^5y6}h#h?!F5&O;&162wDV#Y)X% zNy~%M>Qo-@;<7||NNr3g;$faLePIp9k7mG;(tg86sWJ*_4_3d zX|*Lc{EvV~^ppWYil+-W5x(jjRad7IR&off=yyj?TN6)fPkuL@O`KEsu1lCp?3G1` z6{?38du)k`MzKk>C%*YRND%k`7lJIEVx_wi5#kj*^{zAFm~Vb$fh%Fsr~Z7&jgU3m ziCbwN5=3&xAoHQCiH&x54##*BHt~^N-@OSv^<2+wzC^QSS9#l7qBu%pLd4dtmmt4JsBnUIF86^=riJr4> zM|}z>s(LjKAu?Tj-Zn-O+tP-+c|=Rz9S@2s_YouQVkC$UaSJ1hVhPPVCLbn}EQ^+A z#1Zl=34~@9sS)TYns)z8Azp_ZlOQfjZM3Hn2Mh-m`lS;-{>q_`GYO4_a}M!Gd(Nxt zxkSCEz^pvttQ^U^cb)#&0%Ey~=Bb2yk4wZ?S%kP}yX16tF_97Xd*G&0Vrx^G1hH5y dgfNb>u)TMKcwf_+V_!ksn4npPNR)>m{sE6ko1Xvx diff --git a/fixture/10/0/31 b/fixture/10/0/31 deleted file mode 100644 index 427b2b0f059f69c49648ddb0d84efd021200928a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fiBHaP0LE2Ij;Kf`DwfFmGNM#WPi(zXwx;9V%+XuU(rSz_rNc-$6OzV~T8>D$ z%2jh8Z_-Mrkh9nEeS{*)@qK^KK7YVx&+|l9^?#ImOTfeCu0)`BWJ>Oq`vT2nQ%t(` z0%y$Hm#(Z4*c5t7|Grj0Tex)M{CWY^52pjQ4FZ2O&m;o1*?SgUX%Z+f1m1IdA+XNd ze0))hKw!qTy~ExJI5w^Qo%v4So7zohVGy{L@KGX=Q+-tZ?Xy6PduM0a}9h&mhMQKMIyzb#?3xN1kN zO*k1mX$TQey}10?P{L6O@iq3zrmW$_&C1;$dN~sZtOJ5Jx)3WA5dvjb?4wk~0JHey z7h{R{A>$=Pg;EHyz2iu!k2|5$%o}`eGBGLJHgSwMVPU9#eRwLNX!sDP(|sjGO5?AA z|7J*U_1X2S{Rr2Dn3o@B5?y*1zwiK}#^L3O`q@N&tkHysubd+xW?A=bb`K>SB5qDU zp(WZ%6N{YY5;sge`|X=YD0M6(q6>~cP0|rQ9gz~kMl-A+dI|Av|Kr{*%ZT#UopE8& z(%Z}b@%8dc*6LOLFNCydxj z>{miWwFY(f%^{|F8J|ih_c%nnRYZu3DvzvJ`Gj=n)D{v8^+giGO(}#hkF{4_EGC{_ ctJ^iUgebP!A(tVNlwk-(;{q|tv_eAs2OF)O=l}o! diff --git a/fixture/10/0/32 b/fixture/10/0/32 deleted file mode 100644 index f09a926dc3f4c29f080b0994094dd430b390b951..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmXAni%-pQ0L6>QM0vk%UOl9;lD4jt9aCvD;^tjat4Ug=FprW)G=;H{MP4f*l#DRB zNkghjOJdA1Rmr%e{a(ZoHV{$6kaB<*6+B! zpRcQ=sFt(uExHQ2<$37X#Mt;K!{a9%Hk#gw=naM58eovT`HUH^Io9AH1YEF z4+7@{-G{rh3T)18lL!PF^WF`kfW2Sn%)Q?Per66U>TVP;wA2mW*de{OyOTtxz?wAQ z=7?VcGoJmC2#j#Lb|Juo_>veu;7&KrHr-F33OJhp@C# zLc9sRzjjA|qPS?=^PdBWbW^WstA-Fu|BGN^Zh=LRhUjjLOsE-2H298@5a-lFh~$?2 zIj;7EPCKhto&#Z*)+26&6VcgN`gq?2Le+33P9(ZXh}G4tW?$V2KkHM`%RPwUu_3k3 zClhV@As%zRh%(FC?7LHm1EGouv7%_YgqULb+hFe}y#tFUW&0D&IdNJ2W)fH09sZ`y zBGftph~Ug4mGL^lwPl`!=%($P8N86F-+8aAb}^CPxFu{xFtN$e^Ik~^F*otMvs)Np zTYWy`bU5)tvs^+{#f%EcSV1W7=ow8USZ0jh6eGQrS>gy~mB<9*vx$Dm%QeI;-8u=O zv{A8vh;0A3%R7ni^v(HtDVb0#9JUfo?+4vHnnGOmcsO=IDzQ@y5!5*C%kOl;#j#61 zl~C`omw2j*5N9>xc04*Dy<=W~@;^k((Pv2r8?_LkW0{5K>@lM9;+?G{PZGyWHp^v* bcy$;;)i^^8ZO@kwtv(USQ5OjHriH{mC-a{( diff --git a/fixture/10/0/33 b/fixture/10/0/33 deleted file mode 100644 index 97fe96ef7876231f775ff1e26da1f19015b86879..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmbRE^r}bj#`pHAX8r3&ke?^%{$)P-`Cg;MDOi4SC+WW+xyxA$YPFZR6n-d-{A$BliL-`js(xY5q!+I#yGpR$&$yz$#TR1 W)%6$XZZP-&r6Iggax?^BApiiMK#hz5 diff --git a/fixture/10/0/4 b/fixture/10/0/4 deleted file mode 100644 index 854be0ff012856a2835d0cf5e413e7208cf39a8a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fi7(c19LKe6lxVHxNV$trEfuY#H)5qEDMyZJGDjp7@+3M~wMAq?F;_}M$wN$ zVi5`xd>AfkRtOU?$Th+l#1--+3<?_#LwZVoJX(DXir|q)1uatLJJe#ecshk*WXy diff --git a/fixture/10/0/5 b/fixture/10/0/5 deleted file mode 100644 index 9c2fd9c4f6010510283fb9cb8ab00b9d6b72d5e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fi7(c10LC3fEhVMs5M`4&VvbbeNfvMC3R#9EcPUh>AsLFwkxq1&ccsG>IXWif zd%RYe=#sO&MWIrynz#G+`#t;o0iQk3)3aA`+rp8+;&#~vH4q&5d2-;njDWcuI9TdM zFg+zefFPKV9i(dfN;V z$oD)N9`~7Gw6-#(OGV(KI%`;|Ns+lc@#c}q6rIW2Jp2tP%*d)<+xZmxv@8WEnpaPq z)Z;**oBds0NXgGbMoFXEw&8WYGqDBgZ##q7H;|&xWos*S8Efj@^ zLgO7fC`KNLA{0x<+gy7+K#?KScdZ+xc%G?xmZ_pxAQi$8JB?~MO=f7=e4{|efI+7q z$5v^=;MXG#!ysumFnr(cD!|}=)ii5~j3IYug6R}5hCZuLci;FkSj9fwcsGn8q596d zoEQehk4u6H!;EEV0t_KZk!8Bs3{_3*-3QJw{MEGYx>v-o&eJe1w}e5eqlV#qtFq9! zfx%I)MSvmAr~L6?J41WksgU|^2IFq=Aq>&;0#8PaGTaMZY3`z8m{7d1ahfKF?|U5q zj?0!`Kir?mA-?0Z2?uV~&yKR-kY;h<5LeN6{VODY`-t4Z>@N|*C3iQct| zGFN?%Lape~h{D7=6e=pe-|yMy5BThP9*4&pRhRV$PHQ@>IA}=Fu-aUJ!1B}>g^3Np z;i`_&-7|>zSka4E(;Nb$UX=iW|KilUR862h8seN)PM}e~ zwdDvQkZa(`np%R)RhBwt?+7}Q#^v_46D;AK0tAWU>tAK}6V$nv`iE*!%s8L*&rOd) z(U6dCZb(sP5@_?=j6!F-{Aran#g<%|0L6`V?)%ZV%ow_57pFv9GdPAv6?n=Rk{^mD3{68W0t{A8w!wAo z3?U~bja7LwJR`ra#`rNz)NNVlrC^Zih+rs4ySaNLj$xoaS%AUS_|VyWC423<;UNt6pUgMTRWroQ(*B`Q&QQJgd;Hr*k3Eunnyw$v8xdrez$D>r!ot zTsZWMLZ6>-=kS)oar-KI?EE->wTe&WknR!AktT_7e8{g~mz2z5+gaM?qvVL3pDn;a jr9wCi9|YcRDdyPoSB~N`j{D2SWjIFS#9=rjjb@JjEYZI9I`KQ{>BM8fLStcK1Go$E&NzIn!xK5vTUGX~Wf{3xPC5sJo?+SP}{D0DhOD~|fy2S8Na2dr{*`MvGqFAlcctUaFXR82(eAR9J#&!y$ zh)d-eT@-;2qBn>1QdBGVbvh4Gs5?1_8!9QhPpM28@|q<1O%jHm(?-^18ZbDxzmtcW zFq}_*9I(Zl;Vr*5HQ$Cotiy>Re9w^;&s`ZBvb+TtB%f?|AN6C{Yr1g2MaFPb`Y&NY z7{hpp!su5F!Z) zYL(nphWnc1K@sf?f8DwS7*uU6>Sf4gMV6z341KzZ%`YbzR2KPa9A`3jjYLW~IzBjD za5vyE5yKG@pjk7vlB2Fz^;8b=9!?y4L=lb~N3(Q2y*WlIPnMecacr893UH)|g>ZBo rUlm#y!?E(2o@PuuN3_P2unfm@aTpF!BcH>cmI`p(nUgwky^`ZU$tRG^ diff --git a/fixture/10/0/8 b/fixture/10/0/8 deleted file mode 100644 index 7a783b85c2f8a8903bf401f702c4e3f34dc025d4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmW-fc`Vm)7{@a@q_nbf#hT*xi&R2J$J_CEVY{YYt{gXRd$ zv>#2)5vA|v`6_2vPcP~jXTK=2;!Id2(8m`Qcpp%+28FRNds9!+p-wHo*qOOVcu zC*&^DjcQ0CIF@G!5I*gh*2=v~xH=x*m7GPe&H7LneuL0w;}w4BHX*sQ*6eNirq(!5qmp3kml^cEn$R?(Zr!UQNHNzFbezRR2&$pO%A2i(W2u|~K}_TH z8$x-@<6pkrgaaXF4X*tJ(PHBu;cl6hui+=c?mcm)^Tr6{Kg1#ATo0^!_mi;QMrsJ8 z_s$Z8vI-(Rto7HArfKr%RpnPlX!A%sq9edVE(OAayYpLjlM#<7r%<8FJXQ|%3!mZ9 lAU%eMq_L65(iU3*9#4Xc`>Gsx_=~jyk9lQR&ZH}N{0Eg^m-zqy diff --git a/fixture/10/0/9 b/fixture/10/0/9 deleted file mode 100644 index af3426735571252135eae72e9f19110768978bce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmXAni7(ax9L8JO93_qNRyv5ZPS@xVk6N{Iyc6jpg-}|oNK`E8pn0RH$dQVUw>E3! zNYo-E?EC$xBr{a1Nwkm<{oY^CK7YVx&+|NxkHe8o1oz&3I|S{0Vj?^BP)%2p@N)q}7BF;{U4q=Z6t2vLZzYO%B~7 zanXe64)5yFSb}LRU-ph8)XiV-?WiDxRuwL?zD$_8f6>6)YlP=o86t$h2RaG;*#sSV z%=Wfi!sDJk<60%bD=DC-AdjGC)m(C;fS_!2K6{~ru=!}Y2w_lWIeTvfA*=ZISEm}n zhF!9Jn|gxE!bl(_XO1{dY9iP=1nd89A#}8>LO6eV%fr{51he^7#|pa$Z(pulce9sp zQW7DUXwCf)K18ThCgu8!5e~bki7;kJg)qt!ziAjxX82orHfrlJv>WTQ24#%=ur)y+ zXE7uVBgXh*QxV3!ovC%V7BgH&CZ;OP83P$nei7D;4E!|nTg6z{?mH|8qbo*b!nkCz zNrbWTJhV+iB zj5}?gr7_Cq(BOpf_Mu4QDE8y2XnTUWir}(nu gSpuY+8VUHK2=+K&DnMc0Vd$|~fHiOV4s!wj07!k9?EnA( diff --git a/fixture/10/1/.zarray b/fixture/10/1/.zarray deleted file mode 100644 index 22d0521bb4..0000000000 --- a/fixture/10/1/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "9>DD.G۽*}+2>v]{P{P<}ҖyLT"|=WYJycJx8Y|tx)еș?[Ui^(fٯUY 1SryQ YyJPrIh竐JeCM*AѢe/w&z/"]^'Sj#eu[Ў÷ӷ1̬Fh3épN4QˈPԁF3A񺠳BmbNrE{kƆ{f`R"|Ѧ?찒Г6@?'msHp-pр 90WI~NIcNrt!h|E.wEw lLq<ńkg]K^ 'JmsǤe}>C3 YO5?g { \ No newline at end of file diff --git a/fixture/10/1/1 b/fixture/10/1/1 deleted file mode 100644 index 02b8839bbe65f8d04de66bb69964faa0a975165f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 735 zcmV<50wDc(0UeTwQw;$C#*-p1HF8X7rbLsb5;_Ree3VX+X&5>YHIBTB4ou{!B$KmJ z5%1WyZ+G9m&DG)Pq>)?6HBq8#LavgeiQWC?_Xm9QeZS%0Jvk}2FJb%cVlNws9IvwO z4L5F-W2Nf*)q|8A*D7c3upgA;^sd=F3N-Ai_+00&z-VTm@AL!(+;Tstq=gE^Dw0R* zYZM?j<4}tp1zK(?$`VYKFuU)@qlDkfhq0CsN~C?8VXuEvfeMQQjmWt*Do8^v>K*D+QK&DIe~9wS7a@J2-x2BZe3ul zMnq$adzPOXd986gYCQU6@ZF_A4gH~_RmZEFsR}yWG zUvGAVk(jnLori?5#%V2y^mW|h?j{nqws_ik{2?K<$epEuU25$6USAD1Wsh05JWhji zVl;R{IQ;xlrGdfK+SOiN8Z4_`&lF9hutzLHfq9d**(Z!bd)L&;;v*Ey`xzbzYs5h) zWSYEqDj%XC-eg6if3ENh4SVqz8e)xn8ihesJT#t#2M>^)G{)`h2^ukG;E|Hb2D>mw z%rG0zgfd{yXrlhf7<{}WG+{79*T}@xzqODI{X%H$GMp|`nyzu&XZAMn}pJRPww?pZk!(x*8K5UlRqUONti z=8!ov2R#T0W9Rx*-h|otS{1c^gl8)f4iyIwc6Il!x)@9_N(~brJg{i$ijE*eJaxMj zBq!*}E92av38lL8-E0(uz@i*|<2b@^U;Vc62MAX`CkYUI&kXNrO(zV?BAx3p2$z~$ zv`aDxi1lgC&LZ?peUzV;O*nnmZs+cN!lDqR0HIxN^w0e|AvN#VE8Akif)(lu)5{5> zg=Q5YKBdd%Yc;{t!hf`_hEOks5Vd{D+0q8W6y2hYIgNz-H%%;$yd#85A_Se!;~s~0 z5K1y*Gdw>KHpo7$mwh6Py%h@~C}Um^XniAiPj$UJG(s4vDo<@!6D|hN^?UZ0AZcha zKIZBPFitH$Sa#Hav9RmQ0fix>EqSZgCSyjjIZg4J#jvbj*=cXiXp9t182fds1sEnb z3xrM>bv~0Pb}VM>987C@zKk&`^M|s+l_AyP!Pxw^ed%#;hE|N90HfHbwka%t5m4oj zw+S;gZ}N-mst1YZhpICy{ZY z?#0i7B!;~>OFBbbB{qX`_{N;jEt!mYK3M_`v5iI9jF{624`=5yX4nlJo1kQf79-af zJN7l1cHd$c7`gYqEMrtkVQdPTpHf)O&=}2GeX@oj-J_ndP7-1K>KHEZZDd?dicEBT z$M7_36<`cL5es2tZF}_oM-O9}w(LevAEW2GxD4aCGz>%17-6VRsaXIAZFgOX|I3hW rs;R7pJrxv&Pc@tjR4}ew^2Xdyg&OIoDs2B{BA~*5e$JGMlfOzR diff --git a/fixture/10/1/11 b/fixture/10/1/11 deleted file mode 100644 index d56a6b14d90bc79132b72a5b6750f4b28df2ef9b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 703 zcmV;w0zmzE0VR@)FP8xrg+oP6?vJHw3N^0?J7+JZ0}egXuOgI(Kef(c#9_L_xZgkzP}A2K5e+8)N0F;Rq;$twv# zF@%U*rLEf%2n#nR3lQr1CI(if5CY>vZYyRG=45twe+Y6#`VA^Fj52$P3Bz+lUVt&3nz*7%nQ`6t`{(*ajIHfK)i*U5 z-y*t`&uTNoI&>MRs9PFp#29un6<}mcTr0OWW0+k~i#N1j^qQY^RI_Fz^-ih&wPhH@ zh4+s-Fglj?m%Mah9BJ4gz*y$rY*XUNXqMiv;JgoGf8niHiGB=mmSBct6^}5+o>VEz z&5?|`#!&(cNgE4e8150?S7s$Jek-uH!}T)yEi9=hg4Y lj5G4ZqcR+NX6$=r7+;PZRB4v(#yQQwH+N;b@eiDpni$7|Qzifa diff --git a/fixture/10/1/12 b/fixture/10/1/12 deleted file mode 100644 index fd2f26059f2ec73e90bb71af1cdd6a09d6736989..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 700 zcmV;t0z>_H0UeTwFV_JW##1S`tf)v7%~7KRzhrvKxn#nY6}g5QN=gYOrbC5FjukCO z)U+Wp&7Dl{iWTp>bjZ=6R*q0g`u@JpK7YVx&+}N-?=8BeL>TaGJ#k)*a9PG`$59Q! z#=FIeo>~O5)0Kj=A8+KFt|RDQk`2=|CUh7@S<0Fcj(3bK9Nj`#dBnTx^;SZIN>}z% zYr>(2HUb0{_lnKgj)aN{gMSyC30@glZQ*W&`4Z&}FAqWy$N4#U5nN-&*KhJ8Olt)R z5bo4b}c z_QR{aQwe7dzyIQxNzfBT2yLZJ@!GkBa5sswTp?lkM3DfYK7G~T+Y&;M#kG>wa)J`e z#FSSO%A-%%3oh`~`n2S7BSF+?A>8-r6d<_FL&UCb!en0AcC%i>ZCiZ>oj!u?aAuEu zKVjseeBRF?LaL$Egdl1EEc603f^b3k)$^QRgbk$*SyyHWFI}sGPR?hDbu4DA zNx$>gNr};Ft|q_;qjPDx8VvR5DIY~GMy+PB@stiDptf(aU!S4iSGG7Mf)OQ#p`-cLHXxSKT;r|klE4t} zk;qsoiZH5kD~_q8GW=}}t$$}QuGZ0mSPK@p=MM7j)Pw>S(# z)Tm_K+*2pOz?f;^wnm0{(-uaGnff2qPKH@;V)INFqwmyA>TnMuK|CtMu;#r0_H0VR@&FV+DZ#vP?OYP#@JqY##ger+>xP8&XH5hMqte`Y z2^NHNUFy9-Rs{RF_;PP4VL)#r*4~zoes8y0QF>lx!oN8bYF}+83pIf@$-S>Y+wLTkz1aS51T{ z4TT9oQqe9zc;Mmm{#+L!Xr$^^G81NHna3aOAyipkblo~Y5bO9vP>oG`YdK6P)%`9& z@T%PxqcTqT<=5@-RZb|7Z_w&iW;hqrzkaC3_=4rvDm59|@2-ZQ*Ji9rnJ>WTH90vw zT%U2G=}-GMLxxpw*d-?uhB%7_L$Qj)iec>WWKvbicsjgNfFZWAnxSjGaQEZ2jE0U3 z%bP9?g~bIohLqf{@(-I~EO5xxvRF$`sSo2hdEqa+_=%K{nVJwg~iL=ncNW4W70 zqZm#m8isG;7@wcS3ox>#3W2Tqp5%*ZjGnSi_q24zad*Ws3`=nshNzLpIJ%-xfU&4! zw#$MdhIrFbMug5U*;pk*rzWICc85{7T^8M3&Dbj*m7$T}D8RVA#^%+DCdRhj;ww?D ijDN|Ju$}FUV&l`c>$(_Ibuhl~k9@wchw&dHtd&Z;S75LJ diff --git a/fixture/10/1/14 b/fixture/10/1/14 deleted file mode 100644 index 0c5434ef4e..0000000000 --- a/fixture/10/1/14 +++ /dev/null @@ -1,3 +0,0 @@ -xO=斚c%IPi<{!պݲ:6@pyT15y9kls5+ b/UV¥ԍ;݄nקs_b{ lbofh/-gW ">1B4*@7a6Ϙ%Zg1dVNeL{ox:Skl6ѿ5V;GW3t'3|;2n4pxgƸx 7#&vdp\nFsL.0V~*QXdg[!aь=Fqlg7M`l|`**3\*w(ţ2csLKN&O\Ilv:bD`X%.wlbc \ No newline at end of file diff --git a/fixture/10/1/15 b/fixture/10/1/15 deleted file mode 100644 index 1e89e57760..0000000000 --- a/fixture/10/1/15 +++ /dev/null @@ -1 +0,0 @@ -x%OuNf!TEtQ"]^)KB.]V&H9TF(2o6,z~{پ|RCVβ6St7щKbձw0>@%"P;r_\%@=jRfCyVp,ˆMW)ԕБ=ﳁքciwCV!K!i8lH8פQۙ0$%$BZk#OWK#!l]LXLjAAXoe#0Ґ^;A҈ ֍!|3#YUcA4*iWu>Ԣ1!PT-(`{+~g/\&* :* M+JBćZi\צ:B]e!a˼c̈́:ۢ[ %7yuU~z?{g/[`}A G6> !1]3O1[14/߻1e?XfTFc}V8k6 v;"hVWTƸ~]N*,-c_9c C- Ð4e"g 3ҥ0-ft$)c$Y:%0lFFp_*czpL3; sRXP<*Ћ4wQa1pIX<_w \ No newline at end of file diff --git a/fixture/10/1/16 b/fixture/10/1/16 deleted file mode 100644 index d5dc7cbf4d1e7baba8ae81f9e53d7f2fbbd5b2d1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 709 zcmV;$0y_P80UeTgP)z|8#$}5rOKRSbrOj()49ZeIT1J{mVU(q!L@Kh3RLU|`mMp1^ zK`2Sos5G`VWs(+(!h0C^oRkXLk_I($?>+PT|NPGRzQ4dYhO4j}|@x!`D`MbH@~ z4dHhFfj57232T>iUu{(o26b=T-Y}7HLlz-;*}4t8swDhYmo=TVB3z1|AwqCA$P5Xx zA#|4N+j!0=B>NxHn7@!PPowXNsWYKDW9!+`t^`@bgD|~qxd`D^Le<}AK7<(axmCr! z1mn7#q^tl!ZP+B=q)mkFI!R_RVFX3KWI}l06(K^{__O1v-5!E=>YA0x{e(iBdc(1C zgw^VWU;2&^G z5|Tc=bXi_Nn3Lc#ZowS_nH5VQzScTNx13-db~)l-CEAT+BpJ?sUu713exT6V_5HYC}MwJ8+-uf4ZXNC~d zoKr{p*7iSgDSmj*-;-NCsxnvN0G zst9r3*dnYlhS=*{*F66;u_W)Tgc#YUxjHGH=I5w9N86I~fL`Zgc zZ`C~lUG_T{-Y3j7?`!g_h_@*@_ER1bWo_HiF4PbSPUe4yJtF*KYbAtJx_NRh?fz6lG_^yb(SU~TCP&pOl%6CTDh!MdY8YpEq_fYZ8*Fkp7?C|enn4Q)*5Xc z-9ac8=iU>mJ&sNp{D~NwJumL?7ow|E2~p*G(`)xPB0VLb;^hxwPurM@>wgi8RS{xD zY|`gNJw$uM%8dnmMCsK25`lQN5CK2k!5C8m0Y|G@A5Iwx3=US75oj(`h7nLTh6|kV zF_Q>*Y9GCQYA#@_9z@{pS`=1T3)E))vY0bgAg8k+>e_gLFjvJyU{%UwiNKh));t3n zflkNtkx_O6l`-ogyQd4JHyAQ-mVjD^gMdrKky{%a1x$6$5&^yC*Kn8l0wqD$TM8En n#FZ%z5!h}t`g$e==J?c`m}#W9eCZiCffqf?B?A8e9oC-922@Pd diff --git a/fixture/10/1/18 b/fixture/10/1/18 deleted file mode 100644 index 64de2010c804b157fdc8444a8d481be43898faf7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 701 zcmV;u0z&{`uUqXQ6&*~WjAAuODXEZJhhiejm`I1K z%;svgNNgom2+4A!WAc1E)`?2UsNVN|_W1)od!DDzbrgn9ZpVoK@ z;;GqszfB25R`}mXm5GvfZP~)5$%NC4BZ>Jb#LWE*`wi2G?sD0LsM1cCAX0p`TSR6O zfrXPU{mv#kqsk z^pfVEI`~%drV^CeUU%B@o`{X|C~Evj>}=JYy7n_+t%wks;gOw-x{3ChmFsTy68WlM zd8%K;X{8Xtt$c5o#voy;ZTR~55Ha8_FGDma!w`zbXn~_HVe$2Mc<)uD^B^ZvQo(;%JU78kjhax-uHd>`2#+Cp2z;});<>yM%LqeN|zAr5e?-F?1_7I zG1G4@C(fBWN)Z0R1Akp~A{LYvZq{-kMi>S^jC3Vh0%uO`T|?Z?`*3#8dg8d|Z=&8g>QsRbk&%MVS$@RfFTXaV1rV!|Q#5uvF(G!R1kuxC zeq!)$;*r&xj?leCazw(~Z~KY8b*}mT!NfB2VWzJR5{gDB@u}>Hgyd~7&($`JhztCB zCp(<*%gZpH6iL`<`i3V(5yL%`%Avq z6i*Dm0mo+vgi=Q$akb^0UQsd;YIRA1aEWkhNxw{()#)rZxgvS1pJc_Q6O}=cIzz7! zNo9);9?2s182oJNyiP0)%$6Vw^0sF*QQU%5XJ6I2P$%j*v$Bs3kxJ~X_cHJ zV!pE~ZSs9$SW1ZmA-ADhO5C!W({Q|;I2O}k`=^qSE%rYlrdw?p+V+(A7d~d!<{F|_ z36ZWoZL{(@5fbE(TKs}oRrYt#>_%d|B0_Wr?5iHvOjP7fn|uDPGiLG`$zIHvtyqFu6w|^3v%0Yr3DoL`{O}qskQQL=SwB+XP;PsPqqcxj zhpvFByVsdH`T||o3?&50du!ZG#|k7moxFF!L||94{1Ab~9sSW^W&(P4cfWS41>~3R om?TixJVip_tR=^ypmdTtXnE7M;G#{6;kOC zmaOajZXp$-qWi#-N_icQ#A3b2@cX^*K7YVx-|ttx=R|7sqe@sFo;}twREcB3`Od~F z65Ncj=qidUvY?RQ0mstA0r^^>Lqnod$u7 zKtmn^tABiXyu_A3!>Rk9pSTh5{}-_gyhvb)RDa00n%T9MC^2-sZxa62T2K)ZH1 z4}qYfsK52K1ZJB$tqJQQaHm4`pw1`(2W!EEK<~pMHe7>5q*LJVazhde8f|z;l<%H_ z2saY0Juib+_>dSDW!-2BBXMrXxA9~W3Bgh?hXnU+%IH-Y36H2+9ui;wq?|M8B5^6v zv!Z!~gt3a=yJ$Iy+KllzMj8}ET8wS00fk=$X9G^#P`G03#zSFknPy#^H-#q4{;x4% z6auR5>6j!@5G*>fDdauh5*=4Y!One7v1u)ZPDLn$94MG^c7(#*!K3R|$|)2dvGVR# zr{Sm&q0tx7aDSBzjc9qMw8xc(X1q5KjY>r!Gt)ZdN=%nFkKC;(*gvK|Pi0_rr5N{S1V_@=Z`+Exm22b7QtM*wl@M-V0zv#+9 z<|UXgxb{Ajhk-@F#;2DO8NB+WJh?ub!O`$G-GNdDO4GSJFV`?A6zb??uwj;Y$)J=$ zk66|%mN5uhSg&NK&O$9W-C{_eMTy>lLuuA5obMW?+PkvoUo-JwSi~ZRt{P1bWwCf; zA`c6)ZkMz4It$?)8Ko@vGZt>DVWF6%gN3k4rj*4)zpZ9lWGuYK)Oa`uZCusokoNNy z>0r&lEG}qZ+=YW+ktO2bn=*5gV88vPK&9(>1zN0 diff --git a/fixture/10/1/20 b/fixture/10/1/20 deleted file mode 100644 index 258a408bc09e1390f433e65f6574be31046e5f7f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 706 zcmV;z0zLhB0VR@&FV}GZhLun$AyP?&A*O!1D5EzWLg`Qo!E73M5f0r6YI|9Y4cQRO{ublY4aBdccrI%guh_jvP57h<6@#MqPpSBu?=ukCk6 z*e)f?9K-iydk{(SHcjI_iLDJE7o74UX4|CaYWfhvqt-|eEmiuve)tkMO`g>Ut|#I` z56^qQkyu~qbZMnOF-^a3*pmQapsW!{ye$ZlAo4UUoUDV11Ky1n(n1K&tfV0ZVT5I` zN667|;+NZyntl;PWwLBSq_^*rkh~pd4B8MyxW#vEdwq~FYPeLrD2C`#Y_h+9m{8Uc zOPsEHrIs5<1ezR^AQ0+Wm3V^CE$weN^d!-&cRS@kB2nZQ*1tQ6I9f0%ASi`cr_ox` zbe5RvohCtOWcegjq!Vv?hX0*&LGpI1*mNV4h)RxmG%=f4(GIB*VuEAO$&puxzPM`= zgxrQ|K5<@QR=%^4*b!N8{iTSIEjHekyiJyM*W4vKLk9aUzDJZOLnP|$wVe5Y*y3lK zkoS<7RnYO>^f95M6p_5WHWX`B5;rmp%?~{%VtcA3h_%W>h$+bnyhJUb-mY?IRUJ|5 zATLAYDu*HdYe?RTmgR~T!XolqQAR7#TPYuecxtR0o$#4R3wiu$KqnDaa%gG9SHeY4 zHX+9N{WSFZNp$32iF(sR+*jZ7VaYGzl$WV{*&kwaX5BUWegeumR0Xu%mh3ZA7Z7I# z%Am0Ao~yQ|K&->=%h5vxeBMKu+Z-34#5_FLa#8 o2rQT0aciuAdCB~DHpT+VSxg1wRSeAqQuDWK9iJfZ5BNl#t(Htyz5oCK diff --git a/fixture/10/1/21 b/fixture/10/1/21 deleted file mode 100644 index 1dea1a19660f2c232c0802b45cc8df30e468e104..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 706 zcmV;z0zLhB0UeTwPtS1xhNZ(*j%spDhhw2tmZKEja#qe#KSLNL$<;+`bTLY5NS5f( zj1FR@977b5l1hhAq~7nOT;-_f@cI7UJ%7Nn_k9C2Oae4ah-K^e=AbDtK0OKT=ES!y z#br+mqR?UFu=-iV+4IkRoGb~iw!^hg=MwXjAx6afPR(9GG&fuyWNb&=GV|J#xQK`f zHL4o5l-N}9)+)lDm|+x`+~Y{7`K=TXuM0K2KdmON>O3lUb0$JOf@Z&RA)NCpE(jM) z(fBgp!Fr-w)^HlVB$^CeO>PDedA`F>Pd!8&FMReVDTHv- z2^R=&59`9nBZO++PvgF$M6G&OjDIANsqp;S5=8{38LZzNLpXKS5u#~%md0L~Sd!HDn z6d`Kt*Jk%WA~F&*_4Yp|4z(5uh&9SWh)K~__BAC$XMKB?V;NCmDla3vmBSE9jcQ`C z;cEdg#`jd_**8R6fqW3+p|N omqywWl(XnckXO;vlW_XVw*K(361I1a7f6uX*sU+&KN6{%`%}zTUH||9 diff --git a/fixture/10/1/22 b/fixture/10/1/22 deleted file mode 100644 index 1112800ad456b3e91f70129d1bd0710b5f77af4e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 701 zcmV;u0z&8kMxBat%p2Qj*Fs>r|}d znwH90BxiDTs2P=G^t}Bdcjf5veP7Q$f52zY^9)Q&vV1?9c-!KV>}5g}+V$;vI*y1v zRJ_(<0^!}brTp3?VzLsVPn2{l+k&X8Jo$V0RN}0$w{M~~5wTJKe!rQ-qB~C}?yx1s z=*JxXY)`cN%@GifihiwpIhQ!mzo5+BkqBEJJpP^&;dsU*!N!#s_N(!Go*SWPxD&Th z7Ym3ZEt)2SmJ$JWcjKZxh?$2XzWnwiv>UBGLcNJM=3j1nSS7q8WfLN%a*cpkVLbAa zqd#H3v3ALgbwqDj!qq83M6I5);pt%Fj9>5G5h29RqU+5`VT61C%>rWdaJ2*;&s?@0im>!OzhXwIKMKD@bW7gRGmS{7EW12 zkJ_~Q;#0(Pk5*Xb5V=Z-@D5+?QRj$-E(V*9ToB%=H80fjh@UE1gm`E>Kl@t&ksjZz z5qOyhX}BsN<|%~`Ln0^IJ}e`W}|B-W~3`Dxli$nVJPBSACQ^tQg51Z5U=3Gyo48WQ%UEc>=u jOM<6rpg@A$hL4Vfu7kVB)aps7uD3bhIz+-hPne)VuN7ch diff --git a/fixture/10/1/23 b/fixture/10/1/23 deleted file mode 100644 index 86687919272f2eb43404053378add5a79c51b303..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 701 zcmV;u0z&LzV=F6ZWx6A3_ zLgK`)tWs-p!q47&@{J{g`8mx3BdCnD%=_iAFO!NAVfk!aF+TkvcxksU0X5TP}00%DEEnAhf>gwE#r6$R^w-qOf? zT`!_u*8M%@Yq1F*=zRiQ$B2uvhx9D8jHt zZiE)`2u2*q7Y(4 z@KocnBH>;A^rUGCQKT*}LqscvAru<7h}p^&0^*PRzNE0*L}QM85F)FubAx{^5n`87 z^|Fpwow`Ba`93jCl1+#ni$?;Y?$}XJ(FsyomsRRD5dp^iy62mTW#J`pW1k6cxsDga z2W>sKffnM{u2%x$c=cKB5ADPzb(bR>JBWpT@rM1qsABh&F!`I6tLVoGIPefGC z7Xjhkm)EKFjW7`JNd8XzPSLum(nly}=_lk>dIpI7$5y@DGAO)T|Jt|yCFC|-hDrE( jc+dEHWeIm58y&P7Awjk%9wlMNu8X#_)Fk`|hqs-O-#%b0 diff --git a/fixture/10/1/24 b/fixture/10/1/24 deleted file mode 100644 index 45e2b33cca1bcfc7045fdef853a108f3bbf6ca0b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 695 zcmV;o0!aOM0VR@&PtI`w#>+KyIC8}6Oi4&`bSOQhL#)MH7*e?kdF80J4k^|lLkl$u zkwQWxnbDhOIXXsCdYu*Wh*)|Diqm-SXDh@PE+J06WD8eGN+h>NMu9B-NsyWbzqvN9u- zwjOBw}9wjYmn-*oOqge?^n$VB3Bc$#?6O_wy?i<%a>RZ*)hd> zjqt9@OF6fW_@M|85S6Miz9}1tQ|g90eS(M`hAESFsR&p9MTfoy6UGHnBf?upz4B!U zaa$QCAfz_#hZBL?5BnXX2z!^T_QGgFvM`S&np%dGoY+fTvw1jva6FMHM|f-Ieg2(9 zOtbjqwetX>8(DONhlqMvgt(xX7*~6Q*rjT0U64jBRv!}(6XZgO?|yoU3nz(+{PNh* zr-_p~(lW$$c^E?0I8O}M<_d^U&g-K$T_Q@4j(Yp*itui!clRzN0&J46JSZaU65WS5 zT_^N4vI!a|-4YN*5uvW9)I>t1Wu8$PA-*v%=?-D3Ivw`6oRI6dPt+M0&2OzB&ihph zh^YK{gSv-=rw&3EJ|;|*(nE-EX9}HWJr&*^LD{();)Jtl|FJKKZAV`T2 z^p2P}#K(kwXFA^z|7RhjRT^7~4H2_zm$nhMm0tve)CRs1ueYr2S*#_lmWih*A?aA_E}?`< z7&=&UbeN-5uOqoC<*YY76e-tS<@l$@?&Wa%{%qn^k%r7>E^)d4 zR>IZ!M3j?cLb&Ie3WzzqT{p&A5dHGA(-W+SC+QpG1}rAdsPwu+mk@i*pSW~^u#br^ z^Oh4+8*K%|S6N@Q{R*Nc>{{pTl|+^@*xAxvcpDgBD{vre{M#lPIum0mvLlbKC8+Hx zfpW!2he&rKxu~|}rw6gQ|Db-Lg0OH}e4uqBp_wZ+LcHuLw0pRfxFq)#5K1$HiU?HHH(SLLON-(Jgs!>}qQmj8R%Q}WmUAUUCzVL*k(MF6)x!{K zjbp?xRhB?_o4Ew7KS2~nk9byhiim8ewzkb9TxAibO7n@?Vb()T&Jq7=B@?1vzd%6b z`}tTT7ZJP5Cua}7EWEXw285RolNG7H-B$>89oLDweS=J!%ZOu+Hv~jLPH11%O#(e8 zTNm9C-g4<7#QU^7lbJO{X`5$8mXb&`8~bPE17cHjoq#ZFh^_uzPe|_weoD0L{+-(X zoKVlwB)r8c_07aOzgd;bUlB&-Zv=$Y#)4Mj(S|kO&Zvmfg<2jHIta-ko{{an0qaS?6S%d%p diff --git a/fixture/10/1/26 b/fixture/10/1/26 deleted file mode 100644 index 385173c4fe8bd7a3747e9b4dc6c6728bbfeec648..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 694 zcmV;n0!jUN0VR@&PtI`w#+##7Im(eJm8g(~UYSXr3SIK5oYOUCQ5lksHM*nZC@F?6 zv(RC#bT|^GD0jJ*t3&UD)S4r7`2K#Leg1&Yp63Z}F*Nn;QDNbn7klshQDIclp$^+V z71|nIR$LrVp~8HSfY={#&FJJ1!oPNBqOJy^7;p7?k0zlW)V{n|n|M>1R~j;cDA8VL z(5^$o`HU72D@*D>JsnF}^e4GH>l1&KxO9C2@hG=vob^QFRM*AbXN-w3hhSksxE`J& zAPigHp4YG-x)evp?4Cy4N%V;7wj{C|H9u^!B7)5CEO|SNu#4DLtNn{JaqIO~U`6=o6>0;m5pK1O`2kW7ebt!#nt_Fiofn@emi9(mwL zK4IHe+Z}Y87^#e3`=Nl4>o`YT?fgEuu82snKQADB(>HaNULa;Qo2;_DObk{?4<}cr!&2W1f&nHQ)^HnLLEnn5UbN4zJEHJ z(CG|cZmCTCgvpg$RiY$G7^^>yINN%8_jygiccDj}x)!nY$RrU$y{QJ z3UN2gI^eS&5ntZ_+I1S?uDEMaJ%cdz4@|W&AchwjiV*e6Eq=yxh-|l%S6Ai{kvT4A zlZ}ZDgQ_VBCWO9Y*}oBH#GkY%pZ(^jsm_$(ztl5Qk_SVq{k?ig?ap;$Q8 ztJIqK5hFE1JgQAJx@AY4F|ZdQq&8C46HCe;cxpHiYKpLDvCg8mXwmON+$m^F2--x% zDPQ{aWeedZhnSzE^|;1^P#SzWYxOpw&LQqZkr$CAix3Ao`ZizlC7h67JY^R#GbuoX z_$?PgJU4A>j|e7GW6o{t3MGPTrDX^kc^J`K(ug2f7Ab*;+7`}MhltaDo%iyh37>*2 zUBg(yQrSEDVjQ9BrrV}@oM^rwnGpE{r$mUO4r_Eml8CKo!=k>OA&fc-Kf5Or!?4Hx z%{fA@;{tKLM*a_@ysBgs9vbJAOIvnxKGHl6cN%Y`NhOyhq2dYmk=sxPecf* zjR~d1?G=mO#g~iTiS1T@D+$RWw2GK#x_!*|SHw{C9PpX;~P>lwV9~6KiB+hEAbymH=Dk}vQaPq diff --git a/fixture/10/1/28 b/fixture/10/1/28 deleted file mode 100644 index f5814dd45164986cef4fddccceddb7169a298522..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 699 zcmV;s0!00I0VR@&PmTcq#+~)0Tpcn>6zP&gMbsmiqhv%nW|b(d6zdR1q}D1*&O}U- zDZbqMDz6epRs`W7%MP$3DRU9xNB63{E4>BTVcl4xg zA4ljqmwf(aLVV0v?-gcBTyHiLAu?>J3~&`DS+LVS=5A+8xe zX$oIOq(`UBeY=`iRVgh)O#43!WQ}m*Wl4kxQKUZ6#cBhw)920U+$h4UAX`g6mM~X& zM(vIxdfc=c`)(1vk4h#)UVFR+VZqWr`~yNepaoxnQN92G diff --git a/fixture/10/1/29 b/fixture/10/1/29 deleted file mode 100644 index c65ec5a0f7fadadbcc17b707cdbaece5a3d5880c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 701 zcmV;u0z&V1k7l}g_4@7d=M`0RNe^K;ABf2tEuXTN*y^#XO)x2IM$3S5ob z7rpeoKt$v0mZFaWYfZmM1dJk5iY|Q<_*=a}Pge*$)7OeR-Yk&n|F~(;Pl154!)t|fl~DyUsv}CoLO(MvizUG_Kf;G%l?E-*XeON1BsDN ztHY+N6CV?Yd`cWb6pH(fBZm<&*1^}KGzhP_`D3~@iMfrxGJ6A2gp*aLSa6NY8W8qD;GF6}X};!!Vo^VzIGtFz$9W;)Cl1y;SVUN>LTJYQPR+HI-VN6V z8!aXZO#F79UqT#Hi4YsB-&h=VAk6gRlY5+qLH;Wx#A{U{#1-w5Do+<8%qMWc3s>o# zZ!VW1WEXN6LZ#tJJW2PG5GkEH=99J%`y8L2J?Tx@C&u&+_LbgZiFc?U(QMUQ@oNWB z604XH@eR8r#5R+ee_i|utMCt-D)tj2s?U|%93VbSS!aABkWkeTBE3r={z(cWJhdYw zgsG2ZY4jmtV1Ap?zbK+cEn7Z>$a3>-`xZllrt7(T#SzY(wZ$)w6Vn_|N{Ehx6*0x} zgz}DgXNhF1;&qt`gld-agtAI}GBMty>B!Jjf?*dGptRARMx2^5_SW_c!gp`gq_dQqyS_Es=_&CaUi_a?V9-?) diff --git a/fixture/10/1/3 b/fixture/10/1/3 deleted file mode 100644 index 213eebd6daa66ff2e8a63cada572bb2c96a36bc1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 725 zcmV;`0xJD@0UeTwGgWZ_g)61J7f~W*NVG|fyw)-1v>_Cwl)WMa zB~7JxN!qyE@7{={6{B_9^BR$nB2=h*@0srp_~xABDg@|qM^bpXY@!H-_(!cnJ5&@* z{L5UItUd zI5+dt1r3cFE^L%<8jVd$rn&vjrqN&fGp0aC~6R!sl60?MzchHj7$Wghk|EU)&@t{%Nh$ zVf8FdkI;*-m?sy)qG9ar?r313g)N5>_R=yO3glrp$Qr>M-ju6FI3&6)FpAJ} z7+pPNzHv5(67MIQ=^_qZ4LK(w2?zc9hzgT>4yo@Y6AlxidPF!--zvRKl?49r8A+TC zL3{7IOVKU_hxXbvnt2e&bp#PqWR%a^swM~>qZJ|O$@MubWD#Uew4k>Y5zH!-9zyVN zX7QMcI)bR%35&Nk5e#-+6lc*xkXNZVTdCl&VEOZhu{Jy!#dlb`@Q4eDnNsD>Lm|x) z#6w!eO3hWQVV*L5VGYOQx@5;X46A=?|1V^ z%URPAuMl7(M*vgs)#t%%0Rc0lrwWko5hNf(77;M@&S-6cO+r HT7`+a!t-3d diff --git a/fixture/10/1/30 b/fixture/10/1/30 deleted file mode 100644 index e905d02969fb7b51fa95d5c4a18c05bbe9de4193..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 706 zcmV;z0zLhB0VR@&PtI`w#-nmnj*@b}5hhCJN@YEilqO*v%4U?Tjbi+H%82>7ua4m{>}LY zfhA@yBm_onPdHWez-=S#@O{k!M+0s)eQOa2EZwzubDMyzuJyT=b^!zL z4-x_``KQ7jb_$gA`Mq33fkUpw4rQMN{L*XkW_%Ht*?DmAsUCrT_KQMH`UD!{e@F-v zsIS=d`y~)*wIw(FkAO$?lmSAEm{ixid4o35t4MqDT$gwdy2@txAR z+YBL`0-}O04kJuU=hsd&B06=t=bSJmD!dBQ4NQrYeE+}uM-zd4VoKyI& zOPEXSl|_gZs)rVPY>A0Ru}QQizWF;y5cmHVf-Ie4rMnXm;uSpgt~247Z+>KfD`C>7 z{(Q-ekTu+iTWKB=L~_R<^P#JWjdphq$9NJp@sVBMy$L<_T+eO3M6+dAdD~i|I7(we z#MZ8tAl4|xd~^yREJL5JEDt39Dvy-S43fMJmySCVOvrU?B_c|$y-y4w+;zev2s5u4 zB@sJ`p0jU9eF`V4dNmIrGF^P$HbxTL(uTWvL`&Ws4~i=H5hLwlB!~}j3nPnS3C%ku zA10D4i%At=l35|tw z4)I5O&a3OWM7^iLtUThZ9Lc+Po&MMYV!4avsf2uwOT<@Mgt%wBs7NL%mdN`uqEt*zY`s#prsLhr(Ob^aYK$(3 zlE#u+j!3!6RdXM2(n_e1v)A!`gd)oEeSgnBf52zY^F&tlf0TPmz{BRQM4)$MO752X z0?lPpOuF>~XUy7{uB;K*6naYkzE(h6xOC$DdI8lBrvtSO0)I5mBm%YBdlp@35-2bP z-gA2)u+H0jd{K))V8*q*!`=uuHm&`g`A*=Q+D&I+5V)1_Q6i92eN_GJvp|gFg3M)K z1bpM{el~v<7+6&^Ke%0>)jIw9ogV_1B4)Tw`z5fWtV<#gVRrue=)VH)p|Nuh^&ol` zj;juF)7xpECJkM<# z2xXRygmIPlWa77#e)*?V;-PM{gfO&Nm`{akCI0;g|U!D zUMnG#j4-)LL#j*VQF%>ygyehBU6RLqe?MoRKj5=-PFLXSJ?l#Z9^^WIZ_^8$G`?FD zUM8^C@3_99T)_X(BHP(_1vDS}rI%L-2yK-D%G0*bFE9vXKJ&g|QzNj_sbh3joq%`J z#niq}1gs2EpHu1u-dfw}IvWM9#=ejU>?%EI{q~hWh*dz!;wAyNF!PTO-UygoDx2-| zUZBA=@$&T#0_OwWhr6^2Y|d?y2m~7Q-VLLGyQ?F^Oh7e2ti(q1Ifklvp=x&Tms2NE#_>Pef z=hQ-o+FB)Un6)zz(LU)>2m>r>Io zJ&574A+^sZ6K(n-9&^2jGRxZRyHkk+p^6EyqG-B=m}2_dVDBfr1B)kR`xDJMaasLl z5?9(C{-(|%)H(u);LIbH@jAk_WuAoSrtO;F_GW6C2U48vB}Z%UP%Zs zH}Si(TNq(meLmxKIPpWXTtZaEj0(wEK`8I&8BHWuW{lqyBfXVb;s|Ax$OPiEiGIn; zHN-95Itih)QL%xDZ2!2+JBjf0&G~vMnNTbowh~S62i-iHLR|KEICelPu~Q8Z)Hvrsz>m~_x7n~{p(I_es6El?jd5g?Y%wE4aTa?JKo#h*Vqk2@9j5N zmblK_``*6olE+4y_RHhW z0MUE<>h-O>SI)h+_hYax_PqGsUNP{;jeVEj+kaiS(az-Bd;1fgvX-p8@!o!pvkI5| z?f3TiQ|2@cFDP6OE|uE9r6?*_@zfrm z_%R|L#pZQl5egxyPL3kO>+1Zf2sH*1xd=m)O-SD&0|sfLvt-niVP3tt0E4fJc1bQ6 z%A6E%#DPJU_Ni;z9)@jQ&qH;57%pp82oo^KHNqLh74jqu3BA7cM-v&Em)QGmPhk*i z>Si$nmCa55Rlsm>c+9Y|ltFQ=Xu_~Nng}rDJ-qgLM+3vq)cFMc7Yvrxht+>~Gn_lK zx4fyJL7roT!DzPbn;02G#+;#8sRn9R3f)&*7+? zOcvk}YqV!_IGy%*f4zX?>JzJ&xKa+$!ljC1y`yrmG2uwMIAr;^o})v49D3>xf=jwN z!uI7V#P@T^-(!ShmRy9x-EilV>6ua#2dwe#RF&fAO$`AltmQ$ZNLk&h=V~g&f4?S? EsyBvZFaQ7m diff --git a/fixture/10/1/5 b/fixture/10/1/5 deleted file mode 100644 index 4b24bd2df3869e7f49d2b7d1455047fe29ef47cd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 725 zcmV;`0xJD@0UeTwFV=AY#vMg1C8g*PWs^B#j#T1F7H{VYS%xHcDO9T=8H&o0PIQ=e zrNb3DIws_MyjGd$lC!--)iTi?!L%~}GZ1)Ih?8whrUEk04vLU8B$9J9k61X>@|?)&u+ z_}C13+YA!O_dFUN_nBa{wlbwlMc|@3YgnmCk-0qa=8?%1oyprg{0%6~$f{o3`4s!K zECnc=iL8lG;4{BA$Mqk=@c)9KC4f6-}p0F#XjA5 zH;f^n`p&zY7zV|UOM(f*jAdy83?WI8WxCl6RZZ*N2hK74)wJ)rSH!T+(=aZ#gh8sK zhT(myve3DK!BMY8fFaGN{PAErLwnw-kos;0<8JXG4AJufPezO~+zVc5?xJFtP`t2l znkI+udmRCe%a&h1+@HxIzT>nB2X58Rj&d|G7~?IaePb=mr>w9 Hd-RTY%h+ZI diff --git a/fixture/10/1/6 b/fixture/10/1/6 deleted file mode 100644 index 8df6aa4ab51aea5e90e751941c01ad91c369f733..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 712 zcmV;(0yq750UeTwPtI`w#w%nhVRVRA6e&&dIyT88R1;p+a+RF56kefADjf=|N%Fc% zm-8)&-nEG`SACB{t?1B*!o)fhDk{I<@7d=M`0RNehsPULm-PrvYdWkrXh_ho+FXFZ z^3)iGi4DQws*cg!GJ+@iH5nC-1p1yO3o={@e3VyMum^#v@%T;`AA&#A`~?V{cetJD zQ4pNVvztXC2wFc+Da(u}m}focc{qh2YX66Ct``Vc(TiBq90H?Wl>mYN;?%oTO`tv+ z;+$1Zpi#cH;O^-s+kdSU}NKs}IX!F~QLT9`DX_Ym_mRy+t#f^63t}sW6UaR56b*>an``V_@ z^q>$e{&-Wg^c1?i^rNs@l>RYKK@lN^qVkdN1bIBg1sD85LAP%M0VvU0G7B7W)t-ZCU80V36vFU?@ntxqBpzVW2))fWg)H(Aj(? zgVJY{RctoHrz{8IAq@7P%s0+eGsMi&{-IIMP`&qi{M$-~sds7w7y`OV?!`4Ti0|0k z&M+!0_PuQW{&?ZKaio3@MSpw diff --git a/fixture/10/1/7 b/fixture/10/1/7 deleted file mode 100644 index 2ac06323d0b2625beeec07b0d61fa80af63be253..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 720 zcmV;>0x$h|0UeTwFV=AY#vL7cEeS~}vMxp?nIqMsM2;=3gS4i+Z!950n03**T+_ka zQiya2QIbMb-tR%R78M=dMmaLqYqeIt_xJ4c2YmKCPmY!%M@u0`zgOGZo*_Bx((b*g zR?2bq&D8^m>IARn#ZCHa6PWt>r`paV2+MO>CNU+b|7HHW=FNAb@qzUG04YsZkfVew7h?tPK+&u=u?*G9{KEW~0L_X#zn@{Nh6U zWCFbw4g1*{1P5pKvu_0icRksmM`Z+mQmX_AJYHvBkv$>EG&pJD_<}&;D{a*2Ah5~Z z=Bwx;i0`o)uInY>W_st+h6s!Ul>!98WuL0nt5ZB2X!6k2rkK8_^vibx3YA5@2}NN| zgwADiis31rk{}z34Pqz~Pn*4+>qhaudB%}GZ;ItJ2Gtt;D569WipG@M)rZ3IN%P-c52Jx!-@?Y1^cFQB-(#PCsY8O2xGpY5okSgq1{LUH0}s{n<3 z)ouO8b_%12OXV3|6oC(-H;436R4ey&IuB8(J2{6NDk;2AsZ1F1nk4y65{94CM%HB- zFgUoslZToxoKJonu*IC=Ex$H3--bb~!-*k$&yf|+T^Sm(yagB}pKNy@^<&s;x^Tcn z#&A>mFJVC#!+43p=vNHGmLF{uE%6L#RuyhJNetcL=>iOv)p5sM3m8=IFfLR1WbTP8@qg5sn*2vvfVZIYuf^mYVu;Y?_b? zaHNQZaC9AC6vGSRoW=uRsw8oUM499bE7!FY*pTnP)3UJ(+lR9y|lH)(gCy>k| CR!=Gb diff --git a/fixture/10/1/8 b/fixture/10/1/8 deleted file mode 100644 index 226233a422c09db60d3a854e685b81f1f2ce889d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 714 zcmV;*0yX`30UeTgEZ1=u$1^&lw6b!=n&S71R6<6_+wrrGp_C#ejg}l)G?dCsY}Jze zNNJ&i<_OKSA5F~>rSIqYDraapI;0hoE~D@F`|S1q^LpOT`|rNU%Q3Yac4T|6c5mXK z?AfX^VH}25nt1;1=3sSf{^!?&9OCYd32> zuSzl>1TaKsXUZ>xG1PlT3otBJ9zG~bVDRmB{5GAzP;6+H(w@aIbyUl+_%?&0@JHpv zB8JS7j-C4}8Q#ll1sE*D@}p-O8O}dSUHq0~XqgDP_pqBmw1^*MIDXEy-D`}Ys?H$T ze2QUK8it~R$XBBp1b3H_><&%Bw&v^eU4(EcP{Q)M zyri7{gj&BP9!WlgfZ0U-Q+@=^0?~v};T$AD@E-0tZy83INp;(y7eTl$t6!!bO>k?q z8u%7Vkj{-KUYDghCmS+eMKJA&-%DqasIv(DYoJFwB`cN2tgV1N=6@KV8A-S{J zUQtL`d)9vLh9Uy#mx>^!);Le2l3?tY8T7rH&@`iN-K!!4s-eTmo2`Un zshj{oOyl$$LV3&MU%uUh10iM&uKfhjV&fp;Zkd*^;U~iGJ#nV<#t7p-#3AHd53GCl zld#=JY6ztF&Ju*O3L-qL_1BN4Y4Ye*8Y zB1H&I4&5Vh(S+y@@9NN4f@v&Y_KqXe&0p{Bs33$^6)v*AOqjWU(ZJkmgy&irB80#P zItl&R1RZ(I_O@KY;G;cbhN8NIDdM}!`Gbzv-wrW3cCnz zU#?ttvzKsE5+Rsq&HWHQM5t9J<@$^f4!fv{FlI=FFv=6ZX&6st_*;55YU?qy8|$+M zWsLl=H9;R|F(eHm#`t1W5yripsdcv&Gh9a|rYg)C0~t|%5!Q?h{511h#aP$wJ1hsI zD@J9)xMZ?Pgt7AF6E7V(qb*?B!XY=tInC_8Pumzvm3q$_JQ-3Qdl|=n3~tTwXUt0s z5@FO>w01{^FhT^^;(cKZS=ftM*GNX0zKyGGG{djxu7S7>Chj!&G9`}jXgEQHu_I$# zROcmz^p2~HJ8hq*mZmeLS+W`Z;wnkGjO&$l;U|=gwE=k|3{@L$1&qY|DeqU8FqS)O zs4R3V7-uea+y1R)EV0<5{_cDC#`+e;D8JbcdjJ#9LfAq!}+l*Z*M<)mvtxzo^AV(TTfTW=-;MYBw rh=4Sw__E|#0;HQ73HYK2_BdcFKw;it=&@LUHE;P2a{>PVNPU>?eDqlW diff --git a/fixture/10/2/.zarray b/fixture/10/2/.zarray deleted file mode 100644 index d2e6713e1b..0000000000 --- a/fixture/10/2/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "OVfB*hYD!Js^|N8m=9)JID|NsC0|Noor*3YH? z@6mt0|NX!L+}z7BKn9v=rb8n}gFtDcKxi@z41j1fXaLAEGyup05Xfn!Kmnit1`|yU zGH4k96shVo(<$5Xw4k6J(bk(X_w>Z@gZQPfTXI?+g%{f;FC)|dooGY_C>8({ z<#hm685Lg;0D*mZQXqg+*w$-Dtl*5cM z_fp;)Za@IP(*#KX=)+8=CXARsIQau3+%jECa{_>j@q#G`C89E9f>J>V<3^4gHP&e$ zRDjqY6Yz)wXM|24GPtmGtIDx}(>@}AHduOu)uRK~_#CdXHwWk(Dc}8^{fd~9>w!Mf za8-#$Z?O<)iLkL7@!KxQ#SC-~Pa&<5JWjWw2Z`aHwNeW;(IEI#6H-6o?ntK!5((Sy=0!|NsC0e);|1|N8&`*8l&1|M&m*|NsC0|Ns5} z|KI<=|NX!M63lBhaaBA`O-xM~1k)yr34jw!nrLJ)V47&a0qSTY69~zOWWbs;Dd^Bf znFdc$=+Fk514c~=Re~2cS8vTNeUaG#O;7! z@XLof3}dIA55f4dIb}~m5QKsrHOa0`F_45@QkcUU@gj%-GXM;V)D)*E0K`B!2y>iZ zB$7zRBCrGyApoTS0j8M9LI4cH5-6C-bGCD$*-g$Z4N#wSwP=1yScS`%6SfYxEei~= zB3OQKu-9@!lSi6~{?P#d$GQ-VC4vB-a_pToiBS|qfT+I)G;yaq_?KWZ2%D+T6IB7C zl%(@E2Ja^dp#%jIS4d9;PzR(z(NJ!Iv`Uo+?<&%(u0zvSu5dtzon{J~;$Ar5yC4oc z2FNA=;|cs5<{^Vf03sj^WeI^!86+YY!yp?JR7tH(rySAIe+pEyYI-sr=BFs^NHl1v ziR{JGa{4WNTUCqicb(N$B5FsmNub3+9?25scy0w>`N?-d%sdP;m8whDDxOvx(kiV} zTAd~8A1v>cH-~n6t`i11@X&p69Px51&n?h7*J(WKSy?e)51`G`SOL-0fKx0t;^ByO zCPE9=qqd+mdf%8p%_^2|(}7n;c!^WLaR7|@N}`EM5hjp5Z_-c+lu!ZfYJ?I*p&ka% z6UwCXRvj?*9NE(!mh77Y{BT?bM>wwjho?-rWNO(*#+Rv<1Yp4VNm4N0Z%BU%OMDEV zD~~pbNOoj=j0%XM6BSr#glOqY~JtF`BCIA9?sxn}h zCJ11L03-EG0%&19Gzfa0(gvEIP>P-ur86@xJg5LPG5}}* zXaGH;We?Q@2+%z>14Gj&HYw#a{ZWlgG=8W+0001J0gWfApay^q13(6V0000Q0004@ zpa1{>0000001X2`0jhqKskH`!Ks3+{nlTwP(9x!YOqe4~m=M7YF){-LG8lss1k)fI z4FJHI8Vw$x06k3@m`sdABh&|rh1J#74qEDBkDRM;f|WEGmMb1wugt5J3bGssspu1WG81qKH8YP#}}(#Q(MxM@iGqOSS2- zeN{k97~#X;0;{rf1q1ujaR&*SJVCfXBom4#r~H$o2TLu|1fx?WU4D=WGAb!>LVyS) z4@a#=w~3lm(~0WXxd8|5CJCroAVeGiAFVs(>Vp(OPyrFWSq*3wB>WN~9m0j7q#Q?J zc^&w`|8c;@;myfJdGqn!f$Y4EP5}|~q3hAr5au4K=RxLO+bE+S!dM2;r^TUVSSjtR z32jS9&_4nk>hga0gogB;f{4Qy1*SqqC?Y(Z3E#@xa65J9bb20bxwNRhrCa011P77zf421Wyn#S;%7 zo$yRhM6#bCLyx>RFu%MgL(lT#Ajzsrsw9z!1(5+{EI@10h(3T(2^@hRL<42z2>1>H o1VCAx;q|@w803Okm(ocqoB}!?f}muBq!0fWaz!{$km+J3Y+%8F!vFvP diff --git a/fixture/10/2/11 b/fixture/10/2/11 deleted file mode 100644 index 579788c52e90558ddd84d186475935e66f0b87c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 946 zcmV;j15NxwT4*sbL0KkKSxv2}*#H0zfB*mY|Nrm*-v9g8|L_0*?f<&}|Nnpg|M&jS z@BiQbzkk328grYQU`0oz2AL)Z#M1&`g9sWJm=jDUnFdVIXeXwgr~#3QjR2W20GOH> z(HLmcO(vdz3F$CKN2tJhqcWIkU?Qd>j1v<8Mgm{~Fh-0_nGG^&fB+eYqtwK~GGGE} zrkWn6L(rNUc#)<7GGbvck3fw!Ls6zC6A|iR6;IPisv0x^00*dko~EeK&@{x!lRyms zG}CNT)IpFKMu0RPh{?98>IQ+6O{wBQ4K}8l3_~Eu2kMN}@`0y>^#-Z>r6!CqW}alXc}TP(40R46e?E* z6#(sz!8GRF;)OA64J^yeip)Zd$~cp1{)jCuL1Cf+c{k<0kgXbzF9*vLU#MRS;HjXo z6fRuZ|M)p52Bn{|(n$hhKK}BksT>n|)9bS_ssTClChq`gBg9yl?cUD;^(pe=KQIE# zgca=44J$x4h&*6s|KwU(5LWq#=VyT*6ha_^2q1z8AfQ1&f(Rgj2q1z8MG%N65KtpT zh(r)U1Q0<46bL97^p0@m|B@w24>%sQCQg|I%mEEgfS{1cLsSK8z;GN8(H1|j1dKpc z-GWfdEl)l!8Oza#*fv30%TZ*%ARwr`pp-l$7D8W8!GoDa$pfj=0O>n}R6+@%Hu>CwPI0K#&AVRQclrB?HF&T7R${ z`AZ~bQ~JcC84>^&aUkP-LvOe|NaETgaHo4<1>HFiF*INz5r*AwowE z@8AFZ|Np=O1Di^@2C8~xB*Xv!000Jn4GjSG05s4UgF=7?fF7pQc}+9`27oCU3?m~(6G2qJ-J{@Ele;B|%x~X$51Oy@cn`5AV&)K`G~C{I zgbk3~yw=wvGsPsFB28tPkgnk!$7qIbDB9zQk5*Hb*yytzVXy|7Cv`9t%|KV^69>dOt5|(YqrkVw!HZcfl zCFeJ95PFxB;v>|46h@jZK@uO5ha6f7DXQAmPC+O1@2 zOFyRK@6G{S9GG0+0Y|Dlr3g!Qc%#o+GBa>O!Ess>Vidq+4vfXfqhcfpBhO%slr;mVf`E@qaF%xJ-rs>799?MHoM?{f>% zI8P%$CLWfhU`a^_p3dkwg%$t+cHaj$LJ*j;y#yf$LeFLZ00Ov?r`p62K_f&lCm$Dc z;Kc=kN?`;xY-l7w285Pv0jnjk6r#jN@RG{H7XH-BhSz&U8kj- dG_tJByovf&vT3{!%Ac41F64@Ep&-4&oeV$9gw>S?soYJ*ijM4F+X0B8UJ01Q9?05k?in<=7A z28NmjgG~U!004TQsu=)1MvR#~AU#K-9+9Kd>v(@ipA8eth48Zej%kYs4V7$Z!9guxjG1YtDO z5CMo{8W}QalOVu@)B%$SXwiWN9lq*YSm8_TSj7s0SQ_Sav}=81PaBf)_W$QXqiUoi zeR?w}_9$Z=AvUCxijy_xq%;AXsFqd#d`ceT%Xqpjwx%nV%r;26>+^l!jl!c<=**j4 zq+*YZi8%%ZW8E%djTGk`fuS`H&{Kko4C`QuwBEeV9s@yT+4WvJW~~S!0U;oQ2!TjQ zAc6=eAfSQ@3>iouf(Rg#5Kv&yQWX*s2q1z8Ac8BB5`$0_aQ-}50bK7(taZGVK#wy# zx!fb|Jn;}EYW`xX4#T6a=WzK#buxD_1E+3Ms!=FF|GgX6A$GlzNhgm2V{*Ev;F%Q; zg32~tRiWuO)e_22tny>gQZ_!;kZ%}#24a|z!Jt?Kwj`?}zjzAJ+>|x5P**G>3KX2V z>qKA@;;=uAamLfUds8?myRkqfq9*J_rAi5|K;c&)lUx+7$OO0`A-~Fi#aPYQZ_ipA z>-|VzC9MrGBO;{UDp|J*Aw1)tmwLf`#Y#f3oC85i6hR??s>IP^^OImvpGU$HkEt6? z54<3$O>{kBM`X7|Mw&54>_3hLVI71N*i4Y%mNnxbXX_!Dl2e`|T$W>ejWNL>coBve z9?23MU8T+!BfCiUV=w_J5|nO* d4o37xpV^5yuy%sX7a(2#7ji{7P>}PNVqE2_ikSca diff --git a/fixture/10/2/14 b/fixture/10/2/14 deleted file mode 100644 index f632189bffc83ab09179737767f7181064a1c4b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 943 zcmV;g15o@zT4*sbL0KkKS=*c=EdT)J|Nr~@|2zKw_y52B|MlPdfB*k~{=NVAzyE*# z{{R2q-T%M>8FD+O-H55^CcAoSDFBT&f58ZaXW!VNUj zKpRuYF#({+0&I;FK;DXB8%;ynP>QFfQ8t2MG{nTok%@o-Gz=yXG-6>i!Lg`-36mos zh`^c*g9y_m+9F^k6HJT&Hinu6WYY-1(g>-EGys?g(*iWvsi&wzYH5Kts7#GC(V792 z^bJ75WMCr!3=s5~nhhSNCIA`(Nq|pC4^U+BMgctp!K!|glrjOP5uu@@AYmB>gFs{r zFov2iAT-gSGy@5vO)_F>r=uvyX_F=h$)ghjXc!=58Z^QIlSBF5HGux>B=QsTXn;c@ z?cFSRL;&{X+Odw}zPZZ+!Y#lyFrN-s$HQz|)Zmah1u*%E0&taZ+VFt5l7jNQ>9ft0 zsh|XJ3hF}UN5)n#kyQHA0F`A=2ycm!x5+}SwK=?z#31*s8t#^`aaBl<%~BKnbe$z_ zs(*2Yhx?}WtpX921%U(-p#?$}+^Rln0t7&Sf(Rg$2~dI&0t7&Ss6hmDkRT;OR3L>= zAVdfVBLTPM|GZy~-UR1VYjV!x0SS&BF&Uz6vzVPhQ=Asm${YZ|o2ylnPuQWc0LEV6 zLahv%`X>d3B2q%~1t>ZZQdpkCWFa8UE~q|ocE+5LW};w1AQmW(h3WE&2A1KFCgbT- zU$7((9sy<#f|M2%wL@V=VO|h?5;8~y5})J(8cQ%`ElR*3n=FCiP7Hu^h!BDTfi$oY zCJ>VF3IpIlC{qd^0m~zaK=M@! zUpql_6bdKe#W`QiVc~}Um{A`LbCm#)G-CJ$kd{B`EpcQPNkpR0FtSb7)&c#(GK>)V z;sl}iAsRGiQR@6o=Nl3MM-W!P9~?k3RDq{pc=3q=KEPoeG;}}1S^VR;cbauIt?2BG znJ>Ey{^);yOy{4XG)&j%+1cZE=3th2@MRHA&TgN#o{fZJTEB}*28*wba7{SBxHu4o zV1fuBf)31?GQK@x5yu>Vy;FCWtYE=(ZLp1{$tHVUsY#c#AOGFhmbEUX8o{7at^rl7zs32HSS$%#DhViGZ+Z-Cji4hYn0 Rl{Np1xgwk>NNvs$mVjOltD^t_ diff --git a/fixture/10/2/15 b/fixture/10/2/15 deleted file mode 100644 index 58849ffde1d47a734a79aaffeb4330d7315a9199..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 942 zcmV;f15x}!T4*sbL0KkKS%v)QEdT)@|NsB}{r|83fBpaZpa1{=_rHJs@Bjb*|GWS9 z|9}4e|IWYy3g%*PBB!R4P|`4f+L~ZZ13+XN8Zj`MFn}_Cq&!bVOo50DgCLnn!(27o<7O*Hh3cMgS3n(?bGcV3?Qy00dx60GNYJ37|3>Fr8=}SF1+%!%mZUd|*ZgrO?PHNtg~tB>LsPwxEBtr(Ev#lxjt&1(kiZ=Qx~ zyc>OLSsA@kihEiocT%P3G~2o^bI=Qr*d$BZX=;IdD{!S5-5f!XCOacz>)wyXg~up9 zl}BON`O{W8y8>_`0sr1g&v~TYK zn)*t&_=%d*?MWa9-^hRKTW+`Cpimw5W(f5oUxc`76e>khmMhgMOp8oFV9Nx{qlzKH z-~rn*A}Yvb|BO1a63q>vvqmlroI z1Iv}GOq3;PlLbI8Ex=TTxU36RyQ@WHp&TS$2o+u~Y!M0qR?d`ymmOe4DHX7M|Myc% ze*j%U(19F!&eS9_q4o}Hw)(!Qrz)qe4{d3GG_k|kG)v+#x)orh%H!)Isqv{Fnk%HJb-+U5-6f)ACQQE QQ)hU)k}1N3gfHhoXzn4HM*si- diff --git a/fixture/10/2/16 b/fixture/10/2/16 deleted file mode 100644 index df44eba1660423766a0545b28d1e60b971dcdd2d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 949 zcmV;m14{ftT4*sbL0KkKSrkhFqW}Si|NsC0|6l+AumAq{{r~^}`~UC%|6l+8`~Ua< z|NH;{|9!v$8;3c#-4RsrO$n1m2*8*C(rCZ|GGxG*nqZ9@8h%vG1ZjW(KxAZThLZwe z9zY0SntGagX)ufcX&4jJAuuD#5mfyYm;o3BG{j;sjW9--jWIMZ1i=8r-iSR&8W<)7 zG|0dwpwmWx00w}JC!hc(O&V#mO))gnPt?#wO-)ZxfiM670%*V!OeUIQV3-JcPt#LC z1i>*dMokz@1YiI(1k(_}695UGqa?vF2w(`>2sKY?l{8{#28}ct15GqE&}adpK+{Z^ zMnULgMi2pz^g}~H8VwB%8fmm9(E*X54Ky+sjXfY4q+|f}fKWC`10i$9uLAjb3jozKt4c=oyYfAh2))L2KtPHG-JQ>I}#|anNKK2*FLoEt|3S z28MNdt7QTnm1yQ{b(fIpfFgT|L?nVpBoIg;5ky515eWnmK_n6gBoK(AB8U(YR3!aT zRY);vg+PE(T2aX$MFc;@wA30HjsV`E5o3k(PtnE~E+kO);u3sh67xZE`9ZPI31UQk zr(lsD3a~o_2?mc05P%~eVgh2cm;g2fhyn;m{eZ2$qE5AnzT(FI9(2&)%mBdydPr-HRGIw<9_IXlnC4Bu& zdRVNFe~IM9J7NJ@=1$(~G1i;sF|lCte}}gA_aRsnN%E%5o2!$V{#SAb1rop7gP1*f zU@vDjz|N^aYgdw!%byy5aG}nOJI{lF$o1D6pf%16YlJXHGJddafD~2mKG-!?IA9+^ X)#&#sIFTZNzwvh@Q-uiuM6en#Or?vV diff --git a/fixture/10/2/17 b/fixture/10/2/17 deleted file mode 100644 index 752b42ea74ad248563eb305fe690ab1807529c3d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 899 zcmV-}1AP2KT4*sbL0KkKS=FBc!2kdm|Np=5|Nr0r|NH;_|Nj5KZ{Pp#`u6|-`hK71 z@Bjb*{{O%N7g1VtPXZK+-Tq01Yxv z)M*0*WO_p*8k%4LG$Fl6-jR(#s(Mt?4^RLA000000000qH1$7dDt@Vu05Tc_KxhVy zJxw2orhw6)0BFhLXhZcw^%`XW(9;nB000JnXaE2MMu5-;fB*v^XaF=YfN7&Z0iXZ? z8UdgH01X2{pa2?bl?Iv(13+n}L8gp`jEn+g(WZt142;y;WQ>5wFaU!_fv2d+rVz=A zk)}*WnHn?()C@+NXfzD~&u`Y{vMP%gG-4lPh2W+w29+n$Y4wFV63~W@2O302ih4;( z=3~rjxogC^g7^o2=80Im*DhV?7LRT*;_zbz$oIjmrhFB&;(b-1FV`|a#*Mfjo-c)lr$1z;-W z1r?}T7evjkTSpSN6pPjfj1s!#L;Mvi9|(AHIbfKNsra9ncFEzAiyxxChlHlrffR>sM8_+Pf+#LKFJv%of$d{#Gh>uHqxp2(vuJ? z$RrmvLR~ed8cZFthd)J9RdrXO6>30C6UJv6c^5|>77@-Tmow;zy$%!v^tFt=g0jI@ z2f}zcdPfNxQc%PgAwz~)rk`O(0&lBkA@@)wcM%tUtuTPdSnPf$QmsOMXyX||QWsRB zp(HA^Em{9wRNIeZLP<%R=VzR5>D0eITSxBZvK2QziOUwg8Z_6mf{q<)?D}xgnzK!P zm2Z5P=ID_>Y?yxS*O0G0{)$K{u&089$TtI~PEFs)={rC9hhlARee@)sxb@Ke-$A8w zcnocF{YgV7`lRqA9k{n5m@Uvu!n%O*L@vOeE-;`uXPE(HggzpDwI8EkGKS;`0mQf- Z56B9Pp-1nAJR!aDcO+AV2@P5BED%G-jfwyO diff --git a/fixture/10/2/18 b/fixture/10/2/18 deleted file mode 100644 index 9ef7f62c8d9b3c998142ad6576aa992e30555edf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 901 zcmV;01A6>IT4*sbL0KkKS@50uIsgFufB*mg@Bjb(|Nj5~f8V$N|M&g-Z~y&2PxJTx z-~Io8|M$QG8#!XGSpWb400000000dEplBKapa3!e00001plAR90Lh>L&;SopY6qwq zs(+Oz2c!VgMnC`!4Ff@;plE5J0i#dTN&2U$41gK~5PEXXy}13~I!$k5Q!5NH}S z|5Wsl^hS)CG-;uxnIEcYrkM<$P{hfesChyss#0XpiGpc>1T?^zG{G`7z$Ta`BO?)- zCe*+PV2mb41T+|oOd|k;5rTS{0WxSZU;#AIpp6X^wTa1XI1N>@B#4?quP)W?ri#d! zwhSu+SPEr>;4tDCo)AhI?4Y6O46Ma|&=ZFUX{Hv>i3UXbQp&q0x}K@&JM~3C|4SdT znt5bnLV9yKhVIm%is5->Uret^6=@FHG*6=Ejwl6ebahtUs>Ap=Vdc6~aQz~r+k(X5 zU;rWjfFc0!BY}V-0Kgan0uaOq;6wp{Fa$st6gk9jFa$st0{~2fa~gRF;i`8uDBV4!E>f5=l?O zM>t3JxfV z&55y6u|*ptT=TBN4Bc=Y;vj((c`iz7d7rwZJC6u>Oq974)rLT+cL<0QH8L_hV!yCV zL<0!7v*P9#gW3767=ze0qN2&i3(?jJz<-#i2vNfqoXRWehLQlX8>2g9Pp&W&-vfo_ z6CR}dj$Dqz7NY`8rr*|1dt`=v{oR`}et{i8Sm+VdWM+NVO2b!6VN7^G{|5BK$$R2G}>xuiKY`Kn3*1$ zXnLAu7*9ZG0&J#+CTXdmrkhh~4LuQr#2!YbGMEXb$TXgUVIHPpn2M+6AWYQA$%&DZ zCYS&MFikNu!fAj^ji;dkV1|qUm;@LE+D4jS^%_U1h-7J|OaM%bn@NL2(UGBzP3nVF z)T#zU)EZ#`8W@8nK+{7&88jFWJx!uL8Bc0~h|moH$N*@_2Gc~|q{JF%WMXJB7?^|9 z01ru~fE)ZFJUBdA9q^!&1vqpM;3J0%x_3s-0=ySkSuH^pg#;&m<2CQ|Zx=wS`dH{g zYQuLp0OkCDSWB^FO-SWXisyhWUdU!btr#$^;$+i+xdKes`wQSC0T5#F-{{6P0y-+7 z{-C$uO4)${8Cj1Ia)xJv93-QMMyEy}LNa9wmumzf03-rXN*g605`qc|K_C)_DM}DX z1fY}&X=IC00F)AhB`E=tQi5(`#MK&O!`!H)P*IF)3IfaN-C4(@CIwPtfE_4C2@(pJ z3D1Ctf&mI~TWJG4Ab2RI3j_scF$6`2!Fq?>YS9}7iWbG84OggWnF;~p)HNLjHG*Ma zmZmCG)!CP1W=3f+UUWrRVtp9qWXnjtk~k!J%K>5!jMw01_7B4y{e6v|G-wJg z1O=!Jn=zU`AI+O@uN9GYcwJ+8YUTAFVF&E!@yZK&EB@&yGwB{rPyXe4`%hLiNrDDP z61p~zSiS;SHq4IHSqYinRRLHEQs++}^T?n#Lo`$1TBSNwJyHcUyaSKiZ+v{8&iBy) z%qv3!Xc;Dq0$?Tp000I-FaRb5 z!Kvho6Ex6jsrqO|8UO$Q000^TKmar}0QDZy6T?b*Pf!48^%?`zJwVU{N2#Wslge!- zO*Ha=$Y!QA$^%EEQ1vu3Q))dzsy!ed>Zj#HQxM63Mw($XX@E^MG-ip0nj=gg(84eT z(-Sp6Q&S)VO&BJPFq#Gk4^0V>G{G>@Fd7p~)Y=j1WSjPtPkxmqBLXzjAk$0$38suD zhK86-2w()j1Y#O6j7>BcnrOfXzyKx!G8r&v2rvYQ001B&Bq0bpVGs!!#mWxq=9C9@0UoUy`iC;7zF_&mgf)Wm7q1kB zeaeRf)IzvrNARm_D6T2<#x7+77#-`(XqE z6K5cysLytSsq}(HL6Y;~JmL^fBlHKnRvqCYggt56Nk}vaQ$%hGf@;SvPOAnLJGtP9 ziStA9BGfyGD2+j?uH`i$>S22eB;87oYh^8p@yJ;{6l-tS!i{1e_>0k4YQFNhm4^j{ zY>Z0hm8Z36WWg#PRf^@yp2CnRPC#B-u^7_WVPgoGgoO7bHm;W9~@27?d+oHW7s%>PLs&5t-^7{)jQAP@j-AVV+! z73Wf_s_cOXaKQ*1g4Zxk=$!<6|4BrbO#($U1$5GN#XNaLv$m!Y+ZI{@C!a&mAYcDz0`-hH zt^lSvL7*sTRG4K*6$p~Va)KFxW56(&;p4JN76Bq72n`SFL-HY$}lu^j8qGb+%dVtn}N~jKN$S}{{K6W@%Mjy_5c6zpa1>^a_jGdr2hUtz$D$m zuEfA_fkBFafx(2A7f3QNFfL$_k^+iBffNJd#K{iL4lq7L29U+YS56lWCc^U=1ot_ z((1b3N}iSywZLKD)Q}?Ag8-$?=yT=yh(I z)73otVDOHJ8m6_G+M9J|hMcy%D6&-N>**CrnJ#;}W8;2oS@Gi2RlYMmt*J>u9Ug*N z8ZC>27&ZD>SUFgL3Ff=3N|u9p0f5~>5GawO{`9A;fXuk z>164l8*($l!{2z(+E}v*VG~xl^*9UaiCHAq^oxR1Z1)#BI0grENw#|Qau*yJB9+UO-`t$*Z`MW4Z+R{1}@QL?O)`4yWL zyxJcupI3eOhN5i9;uXd{X{rKlC7~@5?|LtBUQw$R+AOoBtFbBJRNquxj+N6E`YOya zWjwWIqLzw7yWmkcq)?)A|?ODtV)LYOV3vKUO`P_ehDdZ;#f@ zTwp5P|NV(b>zWLSA6`r^+cxifx!`@wtrHCfUVk}GMXh>J^}9*Qq9Efr^Rw=h&thgW zD!Lc=UMo7^WD(MOv_62xF(`4z=j0RKjZFgFdz~h3|LH0c^G{>Kx%ZEZ<(AxN&*E69 n$)>PEL%~$AKu7-BVLPsOO8f4(Ofgs04*V}(hyc-`4H^fe^BPG#L5K}90i!|c9-4=#rqumYGMZ@Ks5HbHZ3M&( z4^V09XlTe9eyQa>L-j$bex)QC4AcMs0001J0B8*cO$^EDiAI0`27u58fEoiDX$(LC zp{5WGJx@>#8a&lB(?*(PKT!|>27mz20004?XlMWc0000D007X?27mwn00000007Vc z42+r&sDZ!B;H6(I1*yp{Fp-Xiq<3#3QHV{*?+a+;1=)x#as;+yI`75c?z#ce4IcAx z8#-(2GDiPuieh0M35d?6Glk-Vwu6mWNy+2lb^p0FAz-49eCqMqw`{#a>xvpDi+@FTff5T$-8}M)xl*DOGLw0$pY+9xpyR>^eFIo-+&l3F}7Rlq=W3xyy$HvMG0|Be<@ zNU@TiRGvSlc!0=O@Tl~wIP$W{tNYyWHyk)BF62FZ8`!oU7x#1N&atp(4XB9IHJDpe z=d&YL?V~LJgH@w>_-yt|e4L{eeYX^__}Qs8l@6v`%BaPpn>XA1$Hb1}n4AzH3)w1D zg6M%bUd;qv6^8KVAg&P9@3ia*wo)=R44~){P$|~aP(xh^5&hx*F64@Ep&>(QT^h+C BkVF6g diff --git a/fixture/10/2/22 b/fixture/10/2/22 deleted file mode 100644 index 014f210a384073bb80b0a4a1cf5852a9b2538485..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 869 zcmV-r1DgCoT4*sbL0KkKS*78PP5=N8fB*jd|NZa(|NsBr-~WHt{r~^}_5a`Y|NsC0 z{eR#8|Np=N`N<`BanJw&0000D00000007Vc001-rpaGBufEoY*000dD10ZRT8Uv63 zGynreKmY&+fW!a*000000002U00000&;S4p05kwJ00UJ|^pyeXdLRG*000b)007Vc z0qrnNC;-sV&;vl)gVZ*ssK%4j$UQ(b8UWBdQ^}|@2Z{jGPtu2}hN1C{?J*H4O-g_uCxSZQ*a9$pY@+M#eN~N?fI4F@n<( zd`4$y+|{}ZNY&#YzcXAEr%0fO`>`KQrwh2$UIVF%Yd&pfGvy9-+sUEsDC#Y;<)5Uy zd}++YhMuF~xsJ#i%n=j4=AD7Fo*!0ZaS#BA2802|AOIo&0w4||0Eh#Kj&QdnV44!3 zrG+C0fbiOdh@jx>UPmH9G(tx=L>@KM-YytCLzX}wzE*@7BuEaKiR4ejHpw(cpg8D- z^kN|19EwJFI&TdkaT+~RZ6N`0K4N?v=$PDstX?`OHJu1n2Dd`^sgONE-Enlqm|x{Z zcY=@EMxL_!NK_^=@Jp2#{ar`clTxisTO+&v4bKzBf_z^_q9Ug zNh2cP>E{@U(!j~2ob%-rLGsT0YtiSkFgZ0$5_jZR(OH^=eGImM#EguG?o3aiCdxd2R--%{ z#Q2afudfE_AP@AZ2iq(R$MtbOKu zI*VziGIUFA<-;b5&!Wk1@X+~%kJ&>~8HtTk-oyQ3`r`3-eGj|DGmJ%;@m20xuCM;X z@P4Rie{&s!hSS;Dyo}wR^Aqr-U6-67Rj_%$*$s&?AFu#5#S#_W5q}Vi`~mXvE+j)< v*zxx419C~3)T1Cxni5&sQA_*@rT$EMgKA-5hfoqo-|=@OQ-ui%UKrHiM=OrS diff --git a/fixture/10/2/23 b/fixture/10/2/23 deleted file mode 100644 index 602e0b4cad454e2510f85f6d6a1a610af84c8ebe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 891 zcmV->1BCoST4*sbL0KkKS*L*_f&c*T|NsB`|NsC0|L_0*zw7@0|Nr{`@9%#9|Ns8K zumAu5|9-#$`ryls;;MQ;8UO$Q4FCWD000000000000000G5`Po0000D00E#1sp@Go z(={?;WYbMBObLJ^CIAFspiLSuW{Hp^C!sMg002w@jZFYeFo4rTMg!G7G|&Pt27-Ee zk%B61QfR_yfC;8dCMEz)3=<|zsgo0Ei~xEkltUoM(<3HA@*Eao+W;P4KKyK0PuAcJX)H?hdm30G{wJ%8aCeE?YsV^Veh^=xm1ZWY+jaVUCpZQaj6vv@*WQPhrO~F&Q_b5NHt^%yXkb?^> z4a-p(=^_qGEYgAQfM5?M6_^6g2tzFAg)b4Mu=XE|cFV6qS17g7A#_*R~U~bVT`5@iv z`&pmm583MT$^5XVwRMS9KYw8iTE}KxOL3sW|5%_Q|0ft|;HR&>`077;EYdWsUf_$} zL;<7cQ*b_7D3A#s2k5W8fD8TrMB)Nr0HD^v18Up{h&*uDI1qNK?R(K>+BKjBWq` diff --git a/fixture/10/2/24 b/fixture/10/2/24 deleted file mode 100644 index 43f0ad5afc18d81d1ac9970dac20e21a841a5a00..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 857 zcmZ>Y$}lu^j8qGb{1sWq#lYbC|NsAbAO?c_UqP(D_20km2h#uJum1&-Kp_Xl-%YMN ziWne(7fLgjFi2%wfN(BINwN7fFc@*ITmX|r&}_XRc18vR16u=wt)r-|Qu9TIlgku) zm6Urfclb)%WloqT=^<}x2utl`VNW&XZwD_?psd^riXh1uV5RR+)^US2kb3nu)54>T8F`6{>I0#EyrisdV} zvh^B^@H=1WHDNmv-F;VoT8F5(g^Ev`hhhA;>ep(_3mukTIj?sh@6*KjyA{lSMcQrI z*ww#QaQEt2r58R=?N|G8u1nIZvO0cYqqL#Yxg?eqhfi{Ft8<4rF)5qxc&BkEkj?m# zm1U`z&lI6qteUgTR!*qUjCpr!nVPz{tB&ZUsoc%EUKWKtek&4wZJo&?z~QRG#AGqS zk%cLRWrb(K^@^9uiANGT3cuM``iMF*3 zs+SiWEcEVYWq*5rN9nv3AGh4!^Ga7PxvOAB*=5g~Uq2fbDW1L}wWO+Y#jT{SxQS+o zKKyPSDe`|G)_!VaP`IoilpwT(!O+nCzvoGXPsN7XZ{P69?HAzvyIz7_-hSSCNqyI# zOA+orDvrby2=Lh^h_L@xP5iOwz@nrp3&oZ!6k$?uUhLVh(=8&-*L~xsd|{Po+kGxR zT=HwhS2x$?=d@k#-S9Xc$hI+>Z`FkdGM5kNB0Dn*;*g zmz=Fu)4p=~n2`p{*%`(xkIxC#$s1Y~D(6#n(vT!Y0{MjqD|>W3HYde(nwX7n!;gXfEN7WbO#%(yqFIaK_S zeCw_Yb{u~^e|f%Tv#nLjUC)ti(ZS!bCL#Otmj8ZJ&hDT0UH0(er#s&7Je0;KCU2ql zx@GMQjWar&8}@rlI^pZOzDY$}lu^j8qGbocrU+W(G!$|Nr*`;s5{dfBged`>TKdUk{>R{{xaBu>Sz#ABOTw z28N6bh6@Y~Y|Rb~QVc+Ffvtfd14sdxY(Bjp5fcUu23`;a#wH9^4a^P<7cM)11c3mk z9%zUO;{q1O1t2wy%NiGeOk!(m082400CE_BBrlL)e8q6p|I!p@;Y%Pv5MXOKc5HIU zR|Y9wpm7&67i3f#u3WjmMABAmR)&eK2@9Lar6iNIWtW3A&)v@SkIw9=oLJg$+CwGd zQi*%skJ5V_9zO5&RF568yU-bW_fzEJsV93xbc%QE*zVk|JGdx+H(gTuom~kKG&3Gu_!@p6Fa06ZoTa z^WA-2Tp!qabhyR1CJDGOC?93^7cWY&XY2UqcF910enfyGQ->lCm&_u^J4+3;l2tWT zc3!f(@=~1d&uxan7{QZE_HEGURZ=qYP5jyU@22A(ElGvEmo*VKT!98YmbN1E&o-2Z z#3}_o*vlp9tbT7{&cP<#c^ex&RtbKX5bYIvCF|P!ulDb!7p%N&+ZDU8$Aii7*1{R~ zH@7cK4VcT^)|z}ni-*s{I6L~eZTtmw1zz2#hn@2_Gz)1-IvhDAaXj=Y*NQX#^9rkWl8D+%prCiB%v64Nu`j%1K@`g6GzB5-BnA~z) zd5EJ+WTxtu1*I&-O-8IS(}>6B7x9G%%Q$34oXr5sXvGw5HGy7$yi}Z4rsI1obo37zD(?14O`$q|<6J zo*;t}lLP^v0iXjw000dc003wJ0004yG#X+7pa1{>0001J&;S4c13&;ZPfAc5Nss^l z0000013(6Wr{sD>)Bpy60004?dXLGaJvB4{4FGAUkv&bRr>V4>4@{uR=p)qiBm^sZ zWkH9Ql`2i^n731z@z#TEr3@_IGHwfERDcRXKmbI7033*rK>%_AaR32&3;^GEL><3XO>mX(H29~E z0;E1T5)U3ostOxN}PAc!1Is_aLPNrP2bGHU`eLP}4WRRO&x3Z@t_3}>3v-L{`x#V|%-R3M#{ z(Zo-G+xQ8WQL>?GQjHA`4!I&lSttsGX=cJ9Z`BKVx5XX~2tVmJ5gQ0k-U%q8jS%3@ z7LODJ*ppSeAM1fbh6;S$_5O>a~rffVo|owlCRBQiKxzSvr987gjdd-dUBydQ9et#Klq@&dPEBR_?{u4 z*6JjzSYI$=J~JjIXa&*Ndxri8>hsI=MsSS%I`^ee1c?A2*6TtL3H!hvM8bAHPH|7W j$Ss>M$vn9(7$S)Z5dTFB3u4#Dzwvh@Q-ui(yGH162nT5g diff --git a/fixture/10/2/27 b/fixture/10/2/27 deleted file mode 100644 index fd4fa76c11a70b5bbf4aed20ac7a2754b1c89514..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 900 zcmV-~1AF{JT4*sbL0KkKSxp=J*Z=?qfB*mg|Nj5K|NsC0+y4LS|G)q5`v1TF{{R2~ z|NZa(@87@y`!%$ThNhJu8L5zH&;V%A(WXEE001%un`xytk^}$%00006ntnwmO*W%PsAf@#jRf@!(8^|}(2o)Un2iRSU?W2b>AaNCgF^(wXf!bcCI~aq005Xq1jN85n<;<;P-tn1r=%bN4GjPq z0002cXaEC10000Q0MGyp000^Q0002Vk)Qwo00w{os!*a@>A&3w- z{y`6RkztUr0)z?J3z{UxMlv_<@@sd}{*tKK2#=A11JB8ZLK70UeCa$-F{Wmsv^+5& z0cep3Q3d1cXJS>v!6iG)SfE-aawQT!IcKlcDO0xz$^*TWka!b+9~8q!;NdVC4Xl6v zU6K61+VC!5P$_AO82QJF6!V~b_#ZVB;2Z7(5@rZx0bA&g;q?QMXUiceeRAd>5)@-Q zNe~(ub3oz&OLw7jX)=()8Uh*tVSzL$)?xRx-B=`*mU0xSC<7u?@|c30jJB{m<9gLDrmuj!^sEC1AdGh^xLHC$5zG z)Ch|wrcK-Kuw(qKZtWs8OIOh*{}6RU{IM^kFsock1^r4^IIsvJvjWeS0D_r08`!;I a&|F)B>SG(WZXjEK#oUoj6eJTy{`LTE3XR18 diff --git a/fixture/10/2/28 b/fixture/10/2/28 deleted file mode 100644 index 8146743d744a969be2a01a09e720da280eefe42b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 936 zcmV;Z16TY)T4*sbL0KkKS?7aA(*OYIfB*mg|KI=r|Ns5}fA#<0-}n80|Nnn~|NsB~ z_uu#b|Np=O5#7sXHB|hlnl$wcOqyuZA)qoc z0fb^S(<4n90ifD@DNkxm8W<1;1Q|5RkjbVd38N5VX{JU-fu@ZzF);&ZnKEKM5xr1} zsYJxdlM_rM353&4FcU_Y0yMxTnGB4LkO`19U;;G269F(DqF_KWVHf~R0GJ3eFi%N< zm_F5KQJa1@Ham2tbl_V)?ufoXSZdJJd`fa9>ys@R*3anTBLT_*iX_4y7uEWw50FCw zUNeo*Sv`f)VS#{9Bm#5K1YKMGC>-g~ZG_PN$C2pLmY5Y3fx$zo-<{Js6yE=G6UpIl z0%#46lYnGU?T{3tP5^jkr3#3E&5_WcPzj>Ie1?PoL;wx-0HPZR0Eq|!Bmoi-L;yqp zf*=AUAP|5N{GbJ>v|pA=k}Gt_L08FhypelnfLK{Q`AlP1DeA{16Yo%)@gGijc4H8e z{cu4DBsbdn5$O>NFzSKnQi$RVLd+M8iIfuzmxMSy>Pim80xO5$fN|iiAPmqliz>9r zgK^17Pv{?@5dzeBf*?($4s+pRM>b$?;7q6PLZ}3&zb!Y+OrG%S5InXl^94YX;S!>+ zuoQ)*GXzI^4#unt`F&2zmI6_M3?SYq?8rS?{j~sH=gYMM^aq7R1`7L$W81$p8K7Vn zgn$uVSfj!}@)+;HL1Xkl3K4;|{NMNx3O4i)%?m6+Fe3kjqAd&INzkP)?4*9pDAT4H zmv-mBB(e)5FvHPm1s3`xVu~0|i!CrGy2Ta~#})>i6~{I?0zHs>aEH-YPkA=^aDYKG z;3feKK%F@ycZp|bvs==5BT(WrI{IrCQQ8rnoBRAalw{*||MBw6(l;L4cJun?{Sf7e z)<_pdN4@x;aY5(N+bK~G+9S!_K|^7oOQdj|9B9RB%jD*vxkNH3QI2E8>r2jY$}lu^j8qGbyqlzTfPulb{{R2?|AFA&|NX!JzkmPv-~RvK_U{J?Rsa9}9VByr zNwSRbbl?1yCTtB1NPt;!0RsasOdx~7gux2NxxfJAfe-`3#RUwE%Vx1TFfe2Qc~T}v z*jCj|nqX+Q0LWutU|hhgz?``y<+3M(6k9J);KWy(<;$E*zBDUHB^nwfs5!or>-CkG zmGag2Dp%HulMOQ%SS|y#8Mvu|^)qE0XoIl8TxA9yx1N=fF?-r_+^5Z3AU)ZBkKBfz zJ#yR<3(xN3SnY1zz+-=#A&YTx$HAN<%{Ri6_)WfxZckg+Qs|=)GxIfXt;mY@kK4-K zxvUkIK2lKWGFfywoGD}0j|Y>BS!P(dUPx!*d&ZWT&|KBbz_%r_gsFG8WTJJ2akY=d zhQQ=sJNeiSzJ4IvdaY7mw?i_6Km&_n1BU~%1j7Lq1;!qR1ZIW>jQtmwnHV~Q7JZw+ z?6Q)tWHtM%+}FiDK?hb@?K?l=X}`AQZ#5_Xc|NoM9om`ZCgyTNw5V9rb62t_k9y3i zwv-oYYkZeU%BWoCaS9C5xVFP${zXkxQfW<4$&krLa%ydqbr3JVp#Jr!jQ1N~Cy76=S*n=&dgV_n zbaXf1nDnSuOQd<3;vxGbSrIJpb3_YSpHxJgo11L1>d=LWZ|!+*wU(w_5pcZoAwW*M z<^R{NCV>owjY{kMRGe0X`hOJ<4GowhRL&}FB0 Q|A{>*^3ch-v|}{`01aPaC;$Ke diff --git a/fixture/10/2/3 b/fixture/10/2/3 deleted file mode 100644 index 94433a02d9ed3458449c8392d330731d866d6f3b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 961 zcmV;y13vshT4*sbL0KkKSz907V*mg||NsC0|NZ~}f9n7L|NsC0zo+;0|Np=L|NsC0 z@9+No|Np=O5xY5Yf-0x@QA{AyAR1`XLl7Eh(TR`^Gy@YOY7b2T1A0)xKT;W@28M$m z14AZg(?o3z5rSyM!eIkLX_}^)G{&aX^Hd_H#FX?PVloXG4H*qG0iy&qpkfAqW|}oT zC!&U!Lm{E+dYWX~OrD{mYG`SvkVZfZ2r^`8jV6YHk@``FiKpsEq$;Vs>QsNJs(+f5 z9*Lt605o6|Oh!h4Fq&Zum;}f$h9ET17=R2TO)!Qd4NQ#;h5!Rd!e|*XdVtUaMoGQu zl7FQpCIVmp0000J1i(xPzyJUM005aVn3zlnfB*mlU;<$pU?TxAOa#bAVF6jX%gD9K zQnQw9dkm-wbTv}i3c!;-o|pVko&tMw3C%$LTB;z1tp;9lM+G>I3uG$}FT9y>58dez z&@4 zC0MDrNXJsdQhFb2-L0LV#xaG!Toeibmemwe#Uzj<000SMN>Bg*K&39-LMWmD08*$> z05HP{LJ$BvkRU*D#~c6vVTTZe0000UVF*Al(=Q)SN85M>Pi|3NrnaImXX~03KBx>Y z!et_oMzm0%jaAqJf^(9IU?;Gi0w#C_*Xfd z79{*367aUdvSHWPsKT9Sq2UL*XuL|&4xNR_=2bIs!9N)(hyuU_ceP- z*b>_S6h@Tr^od8jg)%t6B?^)MC&r)H15loRaJ81hhDbWtN`XR*Os_3{aQ_u8+r)8W zPhJr-1fw|>GeGVFGmv({h@zPy8YTgL2;!Keqx2*Zq8o7j)=)^mkzo*2MQVXl=@25f z!af7>Ft&fPah6B6wN$n>HnuB}(Wx|fqzq#i#*xOX^4^Gyi1J64A|j{Kxr9Lx5fKp` zUw>5&BhDX?xQmdEKz`(1InHzR4&r}l?3BfQa1hs~?wV<*B%GRl{C9@`#Ntz9!w8r`RU=eZ0GVheOu-DKn)AR%QK2{`ImAtX3XvCZ z4}*Iao%2TOS4U&Xs9ZJjASJ|IB&k^zf)077tz?7htOv>gVlTucD+5V-6jB$Z{Hg|0 jRA>Y~OBm5HMM_|B(n6jX)6tC>f5qI9P81{-$M;yk*{h+Z diff --git a/fixture/10/2/30 b/fixture/10/2/30 deleted file mode 100644 index 012abe57774bdc4bb7abc4165267b84a03b54563..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 943 zcmV;g15o@zT4*sbL0KkKSs-OPBme;)|NsC0|NsAg-~Iph_xXQc|NC#>_y7O?{r~;{ z|Nr0r|M$QG8#!+6olzA%C?<>}5Mp3V38MfJgGq$Q7(t+8Wf=ru6A)x*X^>>elSW6T z(t1z~n2ZUgn3^zXXd^W+AoM*H{X#0Al9NHSY8n~_ni^@5rkM=^gvenHFie;UjiDxL zJxqoJNYhOQi~=_lcJae!T04oT_T9J2z ziFo(IC2a_C*SU!O_qKDX=L1XlgFqO=Fa^e!^a~rnXKXrE1}J0edWS5PYvTh7PoKPq zJ7L$yE`?iQR)tJYbb*S*MoN~X5CSq&{!<4=q%kQdBmz)^3JCy|5`sY}1fY}>0U#2B zP)G!T2|*wd5zYDxEedpqUZ5mgo{eg6T5l?WS=0(osyu8H9s}e8L%?(iMc>0YK1eJi z4yXoG!XyP|E3=`M8RiJE^s8P35(%b$;ux?CEL1+S!BR*b9GEgVL(y8K*a(e45ZEWo z1t=7v$Ps?%5geNrOo+^&SQtjEgG&hI*k#-#0!F|jz^+Cmq#!S>S`{EmW_&V3ojdjk zc1o#Lvwn)uup}4df>62;F!Vy*grO>>fm=9Ue8g<~lb-mF=_8vX=c|kMIFZENFfs!} z7dkX7FL?&Yi=Pd;;p@o~a1I7WBlHCS${Nto^$rqxS<=fSyyIWNXO1|7k^`?<`y=#*aYlcYH3I6*O|!zQHof{Z8+wYoC1Pps zwl}_b&GBLo-bZU>vi5#H7d=CDXgP&zV<)$>p*O_iGv56D8nJcrv@=v+EcCg_`MJrJ zA;C{9o`b@&vJJ-VJRp_# RWdPdZ?ntK!5(EsVM1T+0l$HPh diff --git a/fixture/10/2/31 b/fixture/10/2/31 deleted file mode 100644 index f4140756513d29a90973b8789363314645a75ab6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 916 zcmV;F18e+3T4*sbL0KkKS@0l(9RL8S|Nr~{|G)44|NHa&zc2s&x9|J^|NDOb|Nr0r z@Bjb*|Np=O8##$FAZTdNXaEL)0009`Gynh%000008UsxY0iXte00006fB*wP0MHs_ z5mfYnGHIqpOqn#(CWa;^nlK1yf*B1mY?IR=H>CyvpaNv|HlqoEYA{b}0GmOp zX-BEDnlMa3k&t0DG-zlI21lBK7)%Jk9-s!AJrK|uV@L-KOvF&&M*a!30Mmj~C6(Hg zp&s?o!!JuN9^cm)swG%!q~wczc}5CzUjU!tmBJ!HJs<>UkX-OZu^>*DCAm>pz@G8V z#ndP3rA1b^fFb1IT~rNV6Q)*A+XEjUwoP6ThCwpfbYWJjL?9-h0vU5vs7jxqx=IHq z0SRxi>4YIHo2h_BfD8iwhyw%+0|1Bv0Ehzshywt?FbINV4FF&m1V9)D0faeQqV$r# z^z3wGva(*VmQ|A}BGiIzUM*1hb3$l}MSkMgl3>%l!ZKtY?!8R};$zH@KiZwSE%S4v zNh;t}Csp$U96@I_BdQ?Pw4Ezvw%=GJ$T<{s$#%s{6cps0V**FgOu8Vv2$|V@Iy$czF2!W#^L qj9>mUgbkTU&+x`Nx;Xki(<)-eo9CNE?Bbb0{}*yaI8c!AAcGxtl$X>1 diff --git a/fixture/10/2/32 b/fixture/10/2/32 deleted file mode 100644 index 30c092f1293384fe0d54c6e5a242a9196e078351..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 874 zcmV-w1C{(jT4*sbL0KkKS($d6DF6YA|NsBr|NsB}`TpOR|Nq`zklEV@89?T z|Nj5~{r^A$5Ca(*KmY&$000000000013&-(000dD1JoJ^r~uF!8UPvq14e12pTakO*GR@Jf@m@nhmMwfte%F zgjGGN2+8F%FeWCNG8h1i1ZjlPrV!Hu)b%z{CPM%OWWbm~lSYOZjZZ`{00L>ICYwp> zdYDW`F#wuxR6qfyngc^aL7*B2fDHgN00w{qKmY-d8Vwp6000000000005k(bKph8g zk%AB1ZOSZ+nNgrF^89l&mbzh$OIZc5oTUU5RL!{<7Ir?Pu52Fv`s$O>!he)KmqHa_ z;!4(tC+M%cF6txO@N(n&)1fY}>f(i*iC?y0G5`sV_1Q19B zfJzDFx+{d*`1T?JRD}l>sbI3a)=u|_0U;~nXp>yEUYdb*Ff8cngg|LzcqbgmiWU>j zwn=|x43QE8%;kF#MDj*}s@*RH+C$+2JW8oT%};!Tu*fc-rwf&R=T-uw&;*4AVX9my zb|R`)oIX@5JMl0AYuiw;m(7@W=ONm*WR%W2u$YuY3)ae0ODM^eg?scvklDC;x>Y91 zyceZ-l%5HG{L+TX(V0}ZVpCSOSfs8rhNwdpq3^<^N&6BCs;ywt=CMK=)vxe<9}mPc zknA*Q99iMPi2;LqLdXXV)OrJt{0)#zurXw4Nd_q(j}4kAG0q=8LGj1gFdNW-FX`x5 zYXtwu#Mh!abfQT-h@wmZv4dEEK@3$GYxkU$Sas~>mad~Hv?ffND-X;8PFx#(efBZ7tbhE=p=gn!yYJrTJm#$ zwsnGTewP7H0MGzO{{{yTU(^b diff --git a/fixture/10/2/33 b/fixture/10/2/33 deleted file mode 100644 index da09b1483383949536d330d2264f108ddf399e86..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 374 zcmV-+0g3)XT4*sbL0KkKStkvp+W-JbfB*SDPZ*?D2g#dZY#}B*jXZ<0Xk=vd4K%`LjYb4$$nt)ubjhX( zrX~bn8jJu08f0Ju(SVqk1U&&T0B8a<$&f`fr;#v*O)_EulTSc3G|(9|8U{wxVq#!} z2w;yyVKYQD83t$I2yhq*9B`;2=5Cvid=1_Mlg2G^{3(icRT5k40kS|+F~130XT5~y zv5&pc001Wt3^Y+Rii46|9PV+Av88KUvUu6~q|Yx|82Kq*c-016Jso!IWru(~LVais=`MhSkk2#y6Z zj`mg3=5kX6AyuBWBYb`}+LTHfWPsTwz0bnw7~7@gti3sZ+#K zp{MAP>SzGjr33VvRL~5`4=MVg6+A+tMnDD#4FC-?88iSiXf!b%nx1J-Voei91ju19 zKrswK>Uf%lX&#Z0=tI13&-(000000000000000 z0000000001pa1|FPkT~OnJ}6fG{niKCXE>xF))lH69gIzfskp0z)S%!Muri9nlT2A z216r52*EVdCQJbs35k(_*THjdAY0#-$Xh$Ec*x0B&~3`k0o4ffOY@79p_OVOvoB!I z(1G59mL_X^`=1CTkyF0V#pM8#*h8c`YK!-QF=!lc;AEd+hTf%)uc{PNG7-lKnjMc~ z7^`VS=!@WRY`7-?Pn0L56dXM4!iN)_8j3R@0t9K*gR?dR_!%<6-~a&;#9xpBfVeyu zz=(+AB0vCO@Nwn_0Em%K2&Mr55C<5>00KKHlqj(8v^@O&_4IG|TR zK?-OvM~A?q9hrXaNYU}A)VUdN;t2;7|ABIirCBU*s{rwxMCK$aKBoMS=Ce-&9{lJJxv zvqCW%8%sQcB_acv4FLSh7?$ev?qS{TN>A#m3Pv=pmw=w)QX3__eiXGbe~sNOC`1Ei#G(jqh1 zj3|iYkr7?Qp{NE$3UoJeu~u$Hk93_3C!{sM_7Q2fk2b7r;COp|9CS3nu|akWvl*h? p$^x23Dj7~(FXB2m7Mv`l=K}A9yU(3de>F4z7ji{7P>{<{Rv4<$h&KQL diff --git a/fixture/10/2/5 b/fixture/10/2/5 deleted file mode 100644 index a2be0a3145dd9f90b7357b39a684f6fc6dcae61a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 947 zcmV;k15ErvT4*sbL0KkKS&+F6n*af3fB*mQ|Nr^l-~apne{cW){lCBe|Nn3Q|M&mi z|Nr0r_kX|w3B{9*h^l^tPa!avnHU6MMnfi;0Wg{{O)!jrdY*tL000Q|&`gG!3<;wH zNwqY|rh%u3(HPK~qA=4CObN6RQ_~`zq!|D-8fXT901W}4$OaLRXvyYEew5H?29HzJ z^$n09(V?IX88i=1)Kk;d12PaA8%d+mWIazv(`hk0pvq0^h6G^G)lcAoB^wn585r1iB~xO=m|?;Xch!j{R2I!>H$BfkycS3Rp|TJkO$%bYivCL z+X!HWyeIm(3v*FX07%HbpDkhgx+3pvkXQZnDpc{(aQWjHDR=|hXY86ITn~?agvtVC z0H!gI$^a#AuM}b*9T1KsiZ0{dh5y>gbB6c;Ss}57L7q>t(WaRJra_PlngGPa$)iI?fYGO@ z!Mu%101SbM(9=dB(+Q(PO+7I+9+1c$qfImoG{kA9jGm@6Ce+i^Xfl%~OafuFpXyKs zhJZ`}0$@x400_VUOaeTBmdAHr$63lO1P63|SvV>pdZ2(%$GiLmHVCJ8R6Kxm_9$#QSe7-)*+`Fy800jU54@4j#Ko$T16aWAK3IM?fLI7AZ000XtqQC$cAqY_b003As000XD z_<)2lrZ*7tVCC|_1)rQfArN$#In)cp>j-U6--2L$)bOJpx}=F8MaC4T=68Cn#^2Ud3gr}!O9is|)H>7Nrp2r& zk*W{rMo|2sT3}L`ivZ<6&bdPA5 zKxhi(02V|10JZ=Dl0;#K7-W5Jv?1fij}+ihMO_q8)JQPj@xzCE(GdxVSVBW$l9dp^ zu~NPQ{jmx18DQTMCMOt%E=DUTYqlU>?w7=3?e}|#y(cRTC`!}G8SIH`gLBD~{NX1J z8B?JJxqb2kgjxDa;NB6B{}*yaI8cyF Gd{-h8U7n`^ diff --git a/fixture/10/2/7 b/fixture/10/2/7 deleted file mode 100644 index fbe504f6ea9eb7e08aaf858df45325b606ab1b38..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 954 zcmV;r14aBoT4*sbL0KkKSqy6XE&u@M|NsB{|Nrmr{r~^@-}V20zpwZH{{R2~|NsC0 z`v3m_-`cEA^nwn`e3?>PtjEn#i0%T+uMwu{727#kUFbRgIsKGKZFcVEQ zF!fB-$)=g4F)=Y16J<7uhpJ*=hA{}LpW>NG=>Tb?L5YyiG}9mqO$-QVXlaPawx-f% zN_v0{A&H>G!Wf23k5g)GBTWpRqcqZK=^AA66DEUdX@t{5(dr1QV1&fe2>)uN^a-@o z$&kSq05ky_GGx%niJ-wS1i*|5rT_@hrY4w7j3${jfJ`PP5tC>pm;*oxsrzbD0W@io zLnfF;jWRSanJ}14MutFP3`~PfG8jwebW zM_En6nR>1Nq8(WRz57A0A) z?YSSbV?z9VBis~%U_Q}%SEf@8T^>R|uQ(Y#e?R~P4gdfo0d(j{fCz}d000CI000m| z5D)`484S-y(EbuvL=P&}#_<;Wcro~ctDZB#w*kd-R8ZlcPP6y-09PlCyvtW>e@2{J{waFsCu=%a?f7(zw zz;|PE_V}NV4cgM%;qejRTM>E{(HGIc9I+7@krgHQ;3UZjl(MR|G)nC|Nq_pd;kCEd;b6L`rhyV z|Np=K_x->E8#!_=LMo^1l*t(ejGAcCqYV=RX^EyF&@nXA1i+u9%&DP>!Z9%eAP;3g zz=I0000D00y3+6xmcp00h$j697ymfu;z+Oax#636mhxL69_LWEwFr z6B7Uc0%A142-5;-pwj|i8b);1rQH85nWjh5O2SX)!+#ErH8obI?sbe}43Z+aIc;_AqQS19RF0F2AV^ zS;WY1UJjoG=RY*GG?vkgTnV+BNPCZJ!Z;JQLz_5&ccHjKuV#FVq?i#99I_xG5E3E+ zF~%V{Oa>tV0T2-o5+VXI#vwRN1_J?rgakwch_&v-^zcAWMBy>UApzk-==Z{)^AK#2 zp@h+Cdm*Z!pgo{eteFCh>+QXXd9i3mbT33UvFDE^6d<|SSa=OJ$!ZBlj0$AIM{>~? zJKB*xDftjJ%mjfzQ22PlvnVhDIW=oYPjpSL5*7sGUDo=r{s{~y48sC`A?V5-=bsQW z%mUIEj<9@17g%MyDdH1rlCMPM zNQh=uL6S)b2bk3%)JLd13*84GuTd|{)Kr6*fe*LTB0hB$&gw;>M?kf0Y9PKMYhEH- z1w+HW6~MKigfLO9VgZWk(?qbD2d6bO31CcqSPSYA!mmIWVpkNMDim_`ng7PSTOAH? zv3Yv-OvYK?TKp|GOM+$*dlmO$iM+i|4Sl{ultUsSQzS@|+am)3A!VZxM{7uKZ%T&u zJ4;KoA-tWj@-;wVL1NvWBL--FspzTQs&L7{D;ESa=vsPj$08aC^Mi9yMPL~W{2!_~ z4Px4FpeYId`hpv?e8zq1p{n4nAbaScYM_vPOvHZZ^}&95$xk5Mv3r$g8no1x;u*&s z9BOg`@+p&g1RJ+jHHe9SX%%aGXr=rC!dLAtoKJdOIHpYq7{B7~NT&)C44HiD07iX^ ABme*a diff --git a/fixture/10/2/9 b/fixture/10/2/9 deleted file mode 100644 index 509948066db30aced8e3987f98040aa47c9506d4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 952 zcmV;p14sNqT4*sbL0KkKS+u_m5dZ-GfB*dd-~a!=`~UZUfB*me-~Ye+|NsB~SO4$- zzx{vj@BhF88smp{%p$6PkrdF#7@A@lX_0`8m=TGIfJTf7w2cM=FalvPfXSL@6DB4I z(@X%E143!)FaS(Vl-dB9v>_Ew(3+!7Gz?7xKpGlgAO?*CO&JX`88I7dlSs+{F)|o~ zA*PuDrbo3VCMTw*m`t96V44~{k(!!h4^t!cM#zFHr|F`YBTN8{O)_STho}%@Vlo*R z008uW0t}5diGTqz7)$^F00000dVoQJGHH_&dO+1YrA-)(4KXn=A)^x!rV|OKn2bhD zm=geM1_Uq&3==~MrkV{hXlbSZ1ks}-OoJdAWHMlyXlaBQ7pFs9p?t%Z1zEu0`=HbU zFv*B?uM2_#oNXWl?*NZ>fOC2y65U9hG$g|S?Stk>TSV%89zA6-Bp=QA0TS3< zaE~aW^?{$_{W_K$)#yHfz?<>QrHSxawWd)ANq+@GE(6S1rAvnMgXE8ZKCyH%@w;OE zDvpi#VG;yL5ds7YEWXHt24KOMFh~&~L<$%e0TToWXbc!L1`NTOpk#p%Adta=1Q9S{ zA}EAG^R#&V<4GvijT5(|?$zR@sT1?GWreAjogln(+sWEYQ$IQL=f6(v6 z4JRS>B!|;+GO00IJE{sEMHLz1OV4SSKQH|G@F|bJV8$hpmSY2$_nDQ$n^jeQR#rAJ z#{Z)X!RBG%LK$caC-*jyks^;)+_RH~Lk_W%?VOUcB$VKo#{Yd0p1??%7e}}SFZCUU z2KkJ9C81rJGkbC)#mY7_KqID1pu7vB;y^vw#9lNaEW#<~267O(0JR965u|336L>U7 a(S-!%{{nR0w-PoZd|k;D;X*>v{4_)$6QEZB diff --git a/fixture/10/3/.zarray b/fixture/10/3/.zarray deleted file mode 100644 index 7c96637c83..0000000000 --- a/fixture/10/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "B+CefK5|BVL~W9`IT5*KM}|^5y&-RV?Xh3*MpNFW@tar6?yhv3u|pZvb-8; zD)w)2KgluP?Rs!>b3Zu+FQ?$;bap?f*neO~TX%Rr8LZG5tk8LTKZ$=K(-B5|eLwnA za*TJoet$nTli{Lnn1F#l?fi*)h(A4de5+D3iHSe;;3S9zZiICpJmz0!0ryp5fjWm^&KZ8`Q z%;wRRmOo#K0Xsl%mX<#|zE!E2440Qb7xSqX^Qo7YKl3O4g-l17m_N^4QuL+dn3+GT zk)~q*fti^|( zo6peZv$OfkY(6WS&&cMpvH47FJ`0=Az~-~JeCC$V+VUA&K3mIYYWXZJpP}WmvwUWj z&&u){Sw0)fXJYv*ET4hpvqwI2F zXY@$WC;zR!t_R86EmgMa(O9N0N^~&5@wShE0ijRsCb%p!Ano1q+esM)6o2a&bB{CN z%b%rx&3!1aU`xV%CQ-bNPH_Q&^Gpv z7<)75m8Y8#%jz2aGDD5Xd9_KvhH<|Y$PNoT?Xa>9?#*G#Z2tehX%VuF7~B%3>k#uh0E4fc83ir{D(O)XC4d3$L% z|6~PdERYJJvCsZ-t*)DfG?OcXpX0=37`RKrFpy%LWN>ETH30?>W7Hi~GlTx?TNnPZ zV-b+DlT%M+k(A*$fL+RhJ7$Xer)BZ(f|!KG2uqy+i$$gJ+hpA=Bt2ZssAxDcF+bRh zUrnk5{WNAsb(}O~)$av@GiIEXHw&0iKP4)PCYxJQmEvbHJsRR7&gU<|C|`n6o^u4)D?0T&cO8pbwW{aq1StqYSDvj> zU2aw4bJePf)R^h>)C%q@d3lzG8Kq&y=PM;4;fJl3LLUgar8fhr133xo zeOy810&MS1bj=|LY~eiw1dEOiWp5(d8zS19_fK1bBx9&&K4hK_u9Q5F`NG=X2A>HB z;i8I|&vb@iJ1qA(aKTYjO`c~s$`f#uD4yL+k!@w!p1&!=01ym%)}tXV;>3Dh^J58_ z#^w1=t3plaz;oRdBN?({JkveL?cp(@Wc1u+&IE9e8_#b)Q!BXp+?F887%IYZ%^ffn zPAYhB8+`F1f_N~}RCv6}wz6zMd2cbYxh1v2`>V+vGjl@T zIJ7WTA^p9%>ahO6p58V3kksfy@{UobZqJjPcU5T55Hzay)Trr*=gC{zRFz^ieqV)P z()0#>L#16g+EL=O;0&fmw@_Ynpy)S$T7--JD zmEt!^L%x}UxR`wJKj5_LBJTI>%GJg8cbZz-1{@N zg1bs@J~yDqprGSDF6(ib_f5zhFcwa}2jQZMm_goQJ1lpK-y6`6iV}#!+s_UgFfYvB zUjhc2VglcKNK<-9Q|~nxa8SRTyziWB;&0)*4(ku>sl=PE>sZvPz2k<6(BcvB?((Du m1gjNqw>OqEaXc7@-c{Ndb#1_SOCf|xyK)Z#0E0kg`T|n>9DD2l diff --git a/fixture/10/3/11 b/fixture/10/3/11 deleted file mode 100644 index 0c7b53f71cc16bee202328c81277ea3d10b28c55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmXAnc`Vie7{~Ee)a0m?QqkTHS835)<(n+bq*BPF(1nDS6%ms)M+oKU&~)ga5;`y> zY1*PPJ&zp4txjV&-;PUL1LAWJh%+E%EaCu5`&1O4--A!q-g(G3mAmrK?7n3HG4?HB;iXw!8#_lWD)r8a#x4`Ak z32QZ93J^NPKnQU=?ltr^5e$Fm<}@*(DLZ3tWji6{)(rwB#a_^xe3F4KvjV8CoyJ8vnJ!Z(f2P^ za~V#J`^s+XFun!1#GTV)h;b}qprmp^fFYxAyNLiJW$bFPjTyr%d(KG%OGcYTyyF}j zMr_;o?7tfs`cXlhgAR;0i#iM5IWvwu-6p_Tlw8P@@5zT|BHc!sKI!AxLttZt?qJaXBR`9X)mMD)pY)&Dk{pi7T&RbThTQM;X_bM{0Uvc zX;sG!d-Vv`w~Dp>3<$K*SP71Nyp(UXoM3uNL#}U0XtPLk(6A!Jwhc`iSxZ>7C!ph% zJ>j8F$Mwffgx&XK0t6kOiZ$2W2o(ym$&(%g|E%mMG2R3<>8vb&UqTTd4R-Y>cqNUk zSRG6lHwYIX+^D05>5+s@LGI^A<%F*)bvs_i5tJ6qi3Ep@`6Fe?gx-O9wK*w-H1P4Pz)RDkB({wwdqq<&4g5jpB20j70GmM*3e|LX#Oy zH373dQyAhr(in3^5k_@x#Xg-3Mu^PZ>30@Gvp-vaQ7Q()@HIPjv#NmcuVpN1KIon0gwnF0{{RZ0{{Rf0{{RN0000E0{{RhwJ-f(AOa;A00wcrLO@&^0nLE) zOn0ab6|cFezK9P8rJuQaNW3I2>6mJI49`Y+KVmMmXu>N4xH`28; z`z)A%C}9Pt0ILA40Bi53i3@eYp?B6h#*gcrH!trivNBU81#hP|XgJ+^H`|ohJ27u6 zv5q5$2=5SX%(~gH_wuiofBl9+P++`*>Fp!|%7qr{yUD3*j?tQTvMMdg!|9F0A`8tS zc`tb-C`<0WjYmEhk9>F|N%6X}2jzX_<(|3cO+==R+}6Z*j90wZppat1l;v%ABPwCcz9YWjtXNZ@k_C4E3H8K0L`j$-1r&m6$GV8BHDtf8%FRp@67oyTX3Jv7A%Ki2cNSRd}J;7 z_`GCU1BMvMc0OkvdAVnP4g&y7u?89hH*+nJ0&FFI_y7O^ diff --git a/fixture/10/3/14 b/fixture/10/3/14 deleted file mode 100644 index d9c2b1a636b0b93317b99452737cb539a15a68c6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 808 zcmV+@1K0cl0gwnF0{{RZ0{{Rh0{{RN0000G0{{RhwJ-f(AObZQ0A}&NLO@ao3%cIb3F6yHP!Abe;deG3t- z@D6@?8<(4wkX*Ez}`Fs3XEVfRPtV&J6h%&(`ax#QPTzOPDHM z82{|)~|on}t|f_=ciK41Y#Z#%&)JHf5Dm*_?{1%8u3+M(0! z@cyC!=|}PF{bu56U|Q$R1*+t$j`>#O0mi5Tls8&-jFuh0&$!BqrQT#(V$eOEy}5MS z7G=Ztwhdi)2;;2{6mV6w_GWTmDIiY48%a?OBte!{c$Ix3$$1=X9Q#8Qug`$1H^peQ+pN~S)-nS zo&#(^%Vaa&c%tIa5W>9ys~P}SHF*2=I5EV_)w{2Yh=OC8cR0$Llk3sj&Dxv^dDnNI znW8OBckes!)QGUpW73ewz=eF4lT($GQ+;;Zb0r0dG|ya$Z}!HLp4B1<*&vYhjCK=a zD4XXqDIr;K8=lE#Xj$-J$urj~=32$iTd$)MFT}I9Ohp@q?Xy>#7L*q7xeHC(nm`oK zTH(K_)9jguo}<6C0ngYV7uX;d&(&lSB963YYLi@;m-sww4k9JVc$Qivb-CQ0t$sj1 m@CiLbm3-APb38xC=AL77&n&tC5DSf-rOpHb0E0kg>H<;)kAu_z diff --git a/fixture/10/3/15 b/fixture/10/3/15 deleted file mode 100644 index d75b4e18464c6606391437331e12ed9c05027db6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 805 zcmV+=1KRuo0gwnF0{{RZ0{{Re0{{RN0000D0{{RhwJ-f(AOam200uF=LO@&=0P>j9 zMgB-JT)uT7J(Tw}km}AUrN`}1uRz(PFlAvGIWF-E+JW5|eOoVJzi3{bF=Q=l(D+QD50@Z}9M2F4>>&{o_k8qhv;oDHA3dIlqKY`ZLGwK1 z;!WK{{$&rji%QKJVY}=Xt*0eyWM;hH5G*@|z}Jx7Uui-31aZwl^$WUo64- z)H0Jrk0t!Ft`JB_d2f5)_@xBfh#N7QZzOy+F!t=Mm2k8u+2C`Xgz0MseSQ8(LXB$p z?Ym79qAoWJB$zt+y2iIic-Ht@C#*xl&clxNe*YxsPs+dMu0klOoVaJs07Agla}MJ* z2wKDBA>6sS|3jBHVYOF#MuQ$ft=)h7%Q1wjiU`5e+D$#fjL=zIRDHsda3OrUfZ$}1 z9=Oqp&|Ii%Y$9yeN-&QMBIw-wXQvWURiYXb(+G}fe{%O`5xzV4 z?G3m_Nci&B#XFZUJJw~?{5%4g7s%jH#pE3AV#4I0i=n-xgg2Vn_TS10N-Qr3Mk1A> zYC>6SVEUC>LP&C*fFNIE-zP$0&7*;VO@s|$v#ORf6J!gt5Nc_~sw4?HSe@1w#g@~H5$l=SWw}Tl*$>)1qhccAfXfeuWjSw)-Dx!># zngdCOx(s{sd0i=b4Baq!bH=yw8>P`>7|OTf7$>(F3mAS6KK|wL3~S|}3=JQ5Q4xl8 zeUO=*B_p@5I4*Ar4beokXYtT}I zyXPMP!~!=x^u2PBpu@m{-(`o520kc%h`$v80p5~|4`j?b;LnV&b%X+n+V!)lFWQrW z@8bfIz^x3Q2c3ckXkq>pw96H=i*H5d$!a+0@l<1e*c6VSG9(GEde+e-6);nN-P_>jD6HT;azF zXj8&+`PVWII;L>`mZWw|QtM-Zn;vEreJe}@J`7p%;p*IwET#Nvi4S{!Nqo0HGzF+! z{3tZCU^v(KY+C|sTLQjXR`o@D`cV8dF`+knv?OV=49R>m0|C<@ze7sqU`RIo3KfL~ z$yWIl(58gta^e?ASqG%7^B3b@r0{#=D~7OBMn&Kw?uq?5MfiPMKu99&;qOs%m*XtW z&$B}+i<6$u<#M@PL;ntuCLP`y_;rFN7>Pdla-QJnb+`ONq$#6yiod0z#|2dx-%SRx zHDjXR2Fq-NUGfJ_skY6aW02-VewVxwq&2yImqOP;!X&>E z&QwS^Q{fjv5@8P~A$}cL@rvVC{WjRJppRRhUj}zx9QDWV5eg`3m+9BY44g4m7=Dpd gUr4GizXuADri@m<2DIkh>;(N96QhLz0RXuGqOWvq8~^|S diff --git a/fixture/10/3/18 b/fixture/10/3/18 deleted file mode 100644 index 8676baf71742ff187ffc542bbf8ae4df80928ce1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 797 zcmV+&1LFJw0gwnF0{{RZ0{{RW0{{RN000050{{RhwJ-f(AOhVN0Cq9FKR~3G0*OlZ z%mFf^ja07DalBtkaKgSIiGJBh9wrzzWu0+JnM)igIJoSE4KjbKVv!l-&-}cZ08vBt z8r5k5r~s${ssP1g3?D754AxubA1Rd87QOmq&@P~|&o`nVN>VU@PsG}Rh_!=1h9OVO zyz<53wgwDh;)8jJiJkL-n4vOt`uScnJj6jd;Pa4^rUIRmufyN^Qv}xlimydR9lR6c zXB~-abZ?SIJ{JvD6J94DPL9hJz2R?BLboWP{xy&354Vf&Rs|-i`uc5jnn;*R zKHH8yWd@P(*8nw9!gl#qGYd4FQ=dB2Qiocf%GBwX4fLff4Q9~cz@NUU1!=Itmww1F zcNgZX)r}$ayy82V!b)L8D8Ts$VFpEqcxKyMW3*t3ZCC z5I9T2CZCWCf+hxS}Lv-^)eDejdpcrAt#t+;e$Lv|;|ILlv89DfQOe9F! z+VJn3(K6#~iC?G2iYiZX{$GDik0KYM2K+dI82nI1`EQQE$#J>-z5-L->Jy(2ik%b7 zF#Z|}Wza65zlSaEVT-;Vh&ZBCg`cNoUJb!~GcNG3Wv#v)8zg^SXZ$fZB0LcTeo45Q zzNrQA3#G+@kEwn`1mcAV#Pj=Mh!z=j&~LJc5Sb;DUneJ1kh)ZUk;P<;YZ1R1vY?q_ z<>nUyl8ep}pWkDa=$Iw)+jPQ;k1nKN3VI;oh)%yt&Amc9C4M3J@hBk{=huN(fF(`$ zOC+@FZIk>iID=Yn2IV(F($>b)mfr(Xys%vA*8mfNJZg1*k+fuS@*clOBBY+&qF(`G b+I*>|@N2aEGFpE51<+R@0sw^|CcVHJ!69~W diff --git a/fixture/10/3/19 b/fixture/10/3/19 deleted file mode 100644 index fbed7565d0088c79359f91dba8d2969c0114f47c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 798 zcmV+(1L6Dv0gwnF0{{RZ0{{RX0{{RN000060{{RhwJ-f(AOhtV0A_K#JwT+E0%zNj zj4d3!kYnH!qVl9VoZkv|DeXG#l7V$}n{g)o%zC_$uI)U?1sarSSm7_J%AFPySK~O_ z9095Tr~s<~U)nytREmnmOX(-MvqUyo_+CKBNcC~~M*__#14Hwp4MTsJ_*fqV8O!GTC^MxB_49eSz|ck!fS=>d83%7l{te%T z;9=SkichsQHaI87pGxH5A-eUYsA#;D2mB}nI_UIF_)u941@HpmKQ+monqHuFR05`vkDvWx)0#{AaM2ZT(G|WI zL4SpkTmIWu5)|4qzpVfRYS*5>R^^Qi6Vc~F!HGfNk)O7YI+oKhdH?tz|d z7vIbYOp>+FFN^bp#6><9950zaNt@BeS$|3u>gN{dhbrq!Z4`8kd_u#l8g455Jxz+RRz-Y4qnV)b%+F&*LI^Z9Ue5Q#8NBL>(+{0e+d{ zrV=oZewXZr2A>GON^7cT(<{G7R2IH8=66FN-G)FqzZry3ZH*23WgY?=^L*g<$Vr$Y zH-+CNvdIFCh+hgPkj-+A8u&zy6V6t5`j%}`Jf-jaXW?xX3^4=Q1GaP~;sKqZ2Y z6u20vh;cKun8P;Y(srly$u&f<)YaU-R)K5`v{}E(va8((rj136B$XJS4{cq{iuY zlSoPQtavd*!bnB;ZM2L;UFH{MLk$WdEoObYK82~m*uYaZ6s{P#^H5k_rdj{eheC_h zhq0Il3W3%4w2hM}2o{|=6!M>Jiat|D!FGr89g{i=UGk$4=3S@|J49hlf6$thG75JN zt@Y_qr{N?Qq4Dn6vnn$i8qu=Ll3q6&n(;n7G%DqR(AbvoBkXk|4avfX1qnGc;`7h9 zt}dgYW8g9Urj~|Wql<>8#n2v$AsQ1b5#J}Jk+xB2jDd0E*7uhB3>w_$slKyj;M>t> zcfpN;)Jrg7aP56K4+G0U`-YT62G76F{Jt)SK~UsNosXpql&15xU#w+tLx`h`!G>8D zCH)cxy=l^(G%16K1x-q}>MYdqGAsx5Sd{2`AGl=A!sV`Ex}6(~59Z_d2SqGm=<4B& za2AW~6M0yW^*dc8*(`*0WR|kv$9Th`mW8~QP8LEdmnAG}{5P9!lCtm`QRCqtq;XY` z!=+!hNJnc9re{JwesSd>SY(Mf_@yd3IE8cgJ@-ZMXaa|9`EgjQ>@=ER}P}8wm+;UO>` zJ{8i83Tfs~y){Yw!uTUN=(Q0W3>y48tCYDod@vo}L}{D)a(iA_rkx*`;?c32$G2ju zT60Dpt{DQZ8N%1{2Wx2L^4-FMc(|td)kOlJUwl5B6E`zUg#Ma>DFJ>;z7;ktmY97i zNhg)0)2G%Iu%pG|qfNavN&Wn@HB?^8L_Z3Qz%P87e6u@664jXg^oM~G%cxJrwUTkI zK9lJ3uPy731rSWYWaLNFo;DOW_)rK5s&hH~CtO@ml$_-FhZ8I`vx>f97rrgh41S^V zMxhdie?CG{TdD77#hDnj&ffwi$yDL-2X~wSz(w-$yjd;UK=^lxB&soe_<>!tz%H6U z=Lr;(+@7CZkmg4gp^vR<%^4l|aGnq$UQhUL?52?T8sf7tv7yNlo8RZFk+F5)S;$8t zxTr?5$u|SU3Y!+o=OYxgl_K%=lC;5~bi?G&R27WbES#Tup z%`X&_+@2TK?^9fgN5`&TC#gLw`k?$)C=*S4-tbG|$^jjxGQSeCbP}?3ej)I<;wlsL zyF}Q>i?V=UB_m^lwjca5SVW4TZuwOJCCrj4G`|QMV5vFO^P80AP0I56MZ!QfgW16E zfV&85-KF0ERCt4OLVQPmF`lxn_>5)%8iKUO@8+4_U4s`VZNYjH?Ij0a36rkE#M4;jLfR|UiO4FM{?7Z+zT0)RLjPgPW zssN|}ssJPuANbG#ji^*i_)oXDILsja4Ja(9?iC;G+6L^}#&=>t>6(dr8D;=19xM1U zPE^6t^YUTH*0KBN}EMIRo=ETO+bYFr^T`Y0I1Ri&={y^d=XOvxES*BL3oin)Rw^a;CE0nctkPkXLXGMQjy|oC_JEmnf0-tbj>ax z-)fNI>Yn_nB6c3*gim!WqdJxaeE!^Yac;Wk%N0MoQWv;n$I_V?*WX%yv=?hg zs{p6~sQ`0jSl`ATJV8=;d}x?+YFoh1LL_MFM1=1=LsPTN;mcTh63~U>Us=suSC1$*Ka{rXq&jeXEEZ*)0QfK#hMCVSW>^LnXNlc@ln*Nk!a`VpNJlwh#vYR0Ri zpTeIafF;&{db(M4^Y~4AIHo228)EuMW2qKHHnD9pL4^5m)z4A zeE;a2XMP+XUWP_6{+}X%C3fq(SrUgNI>7fsg78Cv@b}^Zm$FfRO@%KkshXdyJ5G_H z=bs@_MGqXVug9l^Dofzw#TXQmO5xvW!S1wRei*N$#VVX%M+l1wC7*wtmZ)+M`E*Eh z1g%ZJ7brd&SiSJMKq#PvNaoA=SvVMwQ;vTn&&~%UBfrWDAY^t?eq7*EHX42tuwXL6 zPM}}ppp%g%ihd2?(Bo4=`Bma(DRHy>5*Q;iLrN0ANsCoD=G89}+JwSGtzRP(8Vhm) z{2oz(hB>EvMivf?8fuxX31YJMUU7pS=Me8fIAO%C+&4Q1SjlSSYc+9ocLqVNap zV8bDyhY!e5R2DdIzMmp{ev0h*+@!4zQHK1pDH5Vh`gwO)O=#iMM|k9y*nng~m8MT(jDxCZ)o(txYKTw#5N%u# zw%Pd~sQ*B{&jC-^^_b)@c_^I;V?To-GDoZ)J`24^T(=h z#;R|8F+f>dq(pw$^_Ya0!3Q(SVxx=7$C5d<$}s$P|%pXT)G<#Nf}BU(P|QZtA_XlDbD}GAxa!HT|bU)snjLFe@n54r4-@2ajo3AR{oY5iiTJ+e{Bzy z5lcE>yVU{P9rUv#veC;T{VPhW1)Na$Rt_kVdbNBs->d?NK&Tpj3Y?pz!+s51Sb>Er z%kPm~pkUI>uYeLoHn>US*SMGT!$=^%Mls^(mP-DjZgf!>%1?x24~r&3d__YnnN#a0 zelhSn*FQXhd2n{fCj>=$1!eLDJs8|_IQ0wJzzW&G{68KbG)>|Bw@!=^0Dpd;6)^I) z0en7vgh$m7zMdLlF_3Qffr3dhG){awTyu7~=KMd!DGNzKpN<%yEH2WQgJA3n_Jr@3 THX(7z!-r!b0swpXF_<|)hpmfm;VEe0Dd!D{+AtQ zIi>*l*scR?*MTo*W|w$<`f&uTn$REqRn4%-`{rYra6}E=p8qzNMA9EeaIHmHGkp^SUUNM#YJOu)$6i)erYhi=B@NM6!BYL?6OY z2B9Ag{3JI+RLrN3q_mWt`Haf`8zRlRP9+ z_(fe$P2Mwq0G~5E=I;CeJ!Xnfc>qn7~BJZu79OY%&Sf0TX7-)!c2o-$47<8GV0S+fe0L=`cwqn zB&h27QeI43M8)yrx(4F92K=`vogHh_cQZuAeA4i(XoOS6HNQ=P4lZ(i{u0p_3uuNZ0i7^?(T=<4uDOr;07jhNa(_Z~IdD;&7IP}}(*x+0i`P@T+NCu;P!D^Ra zwaZ6igvddZ5`P=#1_KYDUj~B{9TPo%R&R^=Jo3ea0CQ5b519Ry$>H$-u-^W#-uym8 RC;?wG4gvs!K&HS8OacD8a-jeK diff --git a/fixture/10/3/26 b/fixture/10/3/26 deleted file mode 100644 index 75c308f80729a30f86b8c43763808e63fcef7c89..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 787 zcmV+u1MK_)0gwnF0{{RZ0{{RM0{{RN0002{0ssIgwJ-f(AOejS00yzRJ3yp50og12 zN>oOT#ge+%W2Cw)Jo|FiwZ+r)2yx6(OKT@qQ!f5F;kn?AcF3V5qN=xcW^AiAC zl2N4d5A?9YVf*}IVGBTE1big5Ku$=eKQu};OPA{U5K`dWISMEw3;#-aR$XWHLzIdj-c>EJc#+J|x@Jl2a;Olzw?WSd^ z=;cRQC_w|e=7;d&rv*%m4ec(PzF0Qh!d5LLqM$e~Xrx=G&i-h0hid zCjwtCjF9%*^y3UIK~TlvQ->^Ja7}$TV%CkA^{Gs`xYt4cln93~fI0A|T%7bcQu%FA z81C}Md?$En7(wBDCMljMl6(FdU!pa>M85RdHYcj|)dD#onJ@e(nfCdI=BG8PfrV|( zN0Sx`O5O3#7D!RS9n7d5{IUyrS*Mr(1I&%#H;F&yBWLx_=MjG3U0GgogYW;@ zqsuCS#P<_}s>+kq@8j>u5yS%90pAZ%gB?;$zc0izQX6i89{2F2e-0W`tlj8A6* znzT^Rk5fg)sUrP1AHXPaP5F9Rrx($O&qhU8?n?F7)FBdxW6xJZp`Q)BE6Ypz zc+e|6=#^jQNDzUf5Ffjywt|m~FQ$Qnwjy?XD;&0u8NmONK}*6P2j9->{bHNp*GWn1 Rqyhr~M2HCo!A)^3m;!k3b&3E0 diff --git a/fixture/10/3/27 b/fixture/10/3/27 deleted file mode 100644 index b7c172848a7e669043beb83a470523518bf5beb2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 792 zcmV+z1Lyn#0gwnF0{{RZ0{{RR0{{RN000000{{RhwJ-f(AOf`)0A{heJTRn|0`(m2 zV@5f^O@V+MKys%t^P-^5D&Vd?8-zm19JlNN9w3@O%ot)mTOhRDhBT^==0YXA_M%JE z02dI<&lV%Uhh=@3j!VCWMQK+S>Q~Vk z0xmv`_$^G@6(;TcBDyjawf*zkWdg93=8?AML!f>f>HQ^y{#xLV?`A!64M zvHlaaE5AEGzLNxlBy27DYF5s87@_>MCJA+UT>crJm_yMv9|;NQ2;`n`roEZ={uG+F zMw;}?`Y;`rD|~5J7TPD~lOc*R1uf1On+^s#-}qqynk>1Bd@$+-FzN;WD}o-@0luIV z5gW#|@%>PsGv-z31J-@zAuITNoRS=AvHU!Ha#TWy_;^}CK~Y-$JANG;KOmev^5qaB zpu@?*=Od%Da6?x_BlHbDSu==ox@4k(pg9}8Ibm5224qAKvBD*P^lfQ*np{Hs}+39S|1j(~zR WA#QwY+m%n5$N>TXgFvRR3rqp4KXQ=( diff --git a/fixture/10/3/28 b/fixture/10/3/28 deleted file mode 100644 index 6b8b66835ba61eb9a9162ddf92c6e9940bee4df0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 792 zcmV+z1Lyn#0gwnF0{{RZ0{{RR0{{RN000000{{RhwJ-f(AOf`)0A{heIxwV`0`+=8 zlkEh(aUp&mfyU%(S!!k$Wa?;3y7SriR;a*2WyCoTBFn!d=8?z@x2GUlVlYKc+Fp>c z0H*+{08R%mu&|Q(6sEXwqE+=JI5GDQ>G(`oLnf@Dk0c=t!{OvVc(b94F^1m&E6(kT z_zL>KL^!+n2*Mm0L2iBHA-FeMNq&JL910AX@&!o3lL6)B189;%3f~gHla5_K(J_7# zvrM^=4CMDv>VpqK2K+uUlUMgf<+qWt*+|*+yC^Rb3c@$PifmBVtapA1|3cW&8ua_I zJPZ(#eg_SmU2{vnh0>I;;3)GeXuKk5yb^_cu;>vidi1{#&|Q=Ey{f!81PH|EvJjY@ zR{bq7NYJkEd>T{`+He>7GT@LIV6EV5`7rhnf^vxFyU?M;56i`8aS2Y$J%0&3<19w< ztu!Ny-ISlg$)KUL%eO+ZSs~f{DlJWFS}}f!DZ4snfP4{XwiZAc{iL}!_)7R5?zF{4 zn?46F3xUb09|g!J3XqMj;RjccsUaVVQKGDJ$-khaNg|;NKd(Xx0PUg1r>>vNRgr0F0{jod@5RoK&XV=_PE9UPQ)Pl*G)E9gH>3Le?hdDBW+y;K1 zvchRr4!^JK*4K6O`Icyy@Fk7Ew*XGlOVHP=eqoCad^}K$5@nTsUfmm2j~_mr6H-Z* zsQw&3&Wx}FW{P|_evoQBQSk3}M4@i6d^LzV{HZ~GyCQWmfY{=r5g_*b)8m(sB9@UN z`ePzc>0U+obl0T0H+(Q2?zF{4{V!>F=w;FQURF#xcK!HQz3LaX=*uxj$T3IwSpxM2 Wi#mKO+1Q8d69)nSgFvRJ3rqn4vw7tJ diff --git a/fixture/10/3/29 b/fixture/10/3/29 deleted file mode 100644 index b73644907742ce12682ffb0733838b42ebfeb8fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 795 zcmV+$1LXVy0gwnF0{{RZ0{{RU0{{RN000030{{RhwJ-f(AOg)70A_K!JTRn|0z-4} z#7_{ve0XDYLev8_vUHCVu;Up7Ku(0)bt=F?TWqAG&H(FN$-Z5>sPhVsH1j|>Q;RD* z39|sG0H^>yeeWQga zmzI(*K#Kz=izIviR9pz}a`;vBw*~dp#4jQ)J!QtM-$goXTCjfnD&BZfyz%6BkRQDg zOIdyc0clah&xhYXM_e_~I=_iY+7=+&_IhX6_*VW1GjWEzWPS)2 zJ$WZQ_d=5bd5x=l}DI=Pc5zWV-B*_dL#lP@$2WE_o z4@Kp~qT`~EoZ71JyWmFgym&>bdp#Wq|+_RVDglLc~zK zPX2R1B5K@~FZSld4fD*m0b$A1rSrqCYr?K;eCP6M=hgMW$Qa#d!T-XNTVk93i#H&C zj0HX$tvo=4ihde6St8t^d_Wq8(yA8zA6M;RpCF0ASTTX>cdrm!B;isJMr-6h^xl8 Z@=;g7Df(~B3UAB`4gvs!K&GGzOaaLEbN&DT diff --git a/fixture/10/3/3 b/fixture/10/3/3 deleted file mode 100644 index 2da24f454163bd62618fd4c91bf579d750b8ca8b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc~DJp7=~Lcxi@LCMbo0l+N46pG%r!9C?YNP2raipQe&pnZG=oCR1(RQQdd$W zXZxL_lw_?Y>k!d3NJB_v%{k|r=llPe_kHzrPZ(J1>*z@DDE=ZLP?K*+;psAK5ejkl zx`*{TQ!omsbvolkLGvtSi|2X@9-DoRevP5f`ED#JNTZMx{raQVNeUCbYeXp2?2Gp( z+V2^hjh3Wyspa+iUE0ruH zi(o4jwkvP_(|2Yev}et(@?;SzAB!KIc87<@usFW+OKr89#q7^a*ZN!*&9VrKZNL3+ zm9Q96o2xdqvN&y^6=5-74unP9xPASblpItAOYIa^91LuwW;m3{#c+@{LOHy+t`gyp z;O06iOwGZxQE$QMTn?2!54O|g9K74|PHZC_v_WBYMy(uDUr8n$CWjA*aG*ZbdmAeW z0^%|gIZJ}igX>CnI1yx|*tDB?63B6c5Y%N}ci5#O*kGm>AsEQ_JtE`~WLuA?BgzRJ z%A|!5+?`WlR@Xuher@lPs4fECMT=v{4-gd8E6&y{c(^Wqd^g&XN4vO=2~Ip>10$z3 zc<@k2wS@4HS}|AgaQ}Fxsxg&EM{JG=4=Ihw5g33IH+G;CB#I6gF~RegW}KEnNlidlGZJx_W*M)u278 z8}v)Kr9`-;8J&2O?ixNISm+lc6SBKz{=iBYdg= z{3t(z7Be@vUH%0s8C}|d_!ZV9GID0|m0WRFg?IcU9u97DNxp;{Y2aLm@SVD1PF*qI zfse#!T@jy2NsfwE7=I~=hd@O`|M;X0vB%^iurj6w_Vf)f1Na@yd}G@$z7>x;f{oB$ zb23UcO>LN@WYSW^Mq!x_(!C6j!5nJMG)!OB8~o=_4uR>;S)h%WlRms bXH#+lZ~5j6p)hzD0|0{%=2EPIVlFTRSMP25 diff --git a/fixture/10/3/31 b/fixture/10/3/31 deleted file mode 100644 index 21537f68ae5e4253ae72a24e405d8d491c3fdefe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 802 zcmV+-1Ks=r0gwnF0{{RZ0{{Rb0{{RN0000A0{{RhwJ-f(AOZy#0A?}0K0u=l7CzpAdlL8?TUWygT47Or^{b(l%U z6D0w)0I2|~077nB)vG4IgO+~kj#9sfbZCflXnq5QurNZp#qZ%5FjwTDUqcir_U;h) z?F0Bnfmf7YJ}}Z$C{g|HDQq^;WAmFQEH6)3o?k9el&M+h_fDFi8-GfE>m(idVpioh zP;O}m3Wwh~Ol7LLSigT=SQJs?^V|1S?0YKuZNso2 z2a*~Gzr&RUThFN9+-5d(F>FTl+zz{TN+Lr8or&ef}OE~kLB!cF$?vtEZBNR#r!N>7tJV&PX)seHDBKNQL-=_REPXI z5qMH1lP|aBlG}3maan*47Q|;V$A}X%i~mN=0M=*6cRL&d=8E*&$fiY4A$&FhiB^L$ z{3QO72pZr>h4Is%oRMJ2#~;$cMHe>7Kf7DZLZ^JQDmFky8-7`vCTAAT_Ypn zh|m6x62V`k2mQ7& zyQ9^I^t*=Wtcuu@Uo>wN6pd#1J;T(p1bFq^HzkFSG(Eq2(032|`W2(6hM-XC7fgu@ zEQ&n&)!S;7NN@PflfiP6eKlUjRcB5-u%& z0F}VOrJFyL6z1gf;sf}iz{`rq50a9C#VCSbK!8qw0G<5$u@a1?+3>sgiUonjIapnx zY|`(_vj#;a^|!_}upPgA8UpmTS|xrKTXn+Lo`0p>bEVwl%V@$Dh3|r2b+kc`^@LyD z5+=zx=1)0Q?f9efTln<}gVx4hF?A(^0`O6ApD4Ibf9gf%$aTt>@?#|!P4lD90u?1% z@u3jBbzuelC%6xH)T1wL_Wd!6@JVE|NMy5oCMBa96%l^~7&yOUp09KQ$$|FJ58=kI z$_2qkx}w2kDfI(BmB7KJ8=fyki_EIY(5Fj$K$rNycS2T&h>iMl1frl*FCW{+EelHG z$4TJvr3x88$rGi47>YjJhdb(#>Az`BylGANMV{=$*&6eSr1_CS`NU_dPT1OGUd*Rn%cHc&f% z;_pA|h6$ncF+;ec{S2h?C(D}%I-JH?(aVf8>pQ>@$Wz0 zYheSm^Y1^hyc%gL_U}K14b;w``0qbiUX3&r`|m$2uSS}R{qH{j@B;u3K!ugO0$MNy D2Hlj4 diff --git a/fixture/10/3/4 b/fixture/10/3/4 deleted file mode 100644 index f0f560c5bbe203bbd7a280f18ec334cb6dbdddf4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fdrZx77{;TvQp(Kglt}j?tWZXHDm9{#uBev93MCauM~W`Prl@uhwQf?aA|{v6 z_i|i{5|IrFCn^*n$&vW|&b!a^|7XwhPMnaZWvZ^GCdOWf(l6my(#q(L$KJ~)EvQMlPI|1C45I1z0wK+%0c)5di!h28go+XhY)$&MWX1E9FS@7ibP zNuhtMF#VK2MZ~{DMmxhO>fNFQD8{qARvE@qc(pim4@xNtCQB0KnG}8N#&imDC>-$asEliF+BvW20ID3X`zhsILtaF(rG)?+}W{o)FoY77EcK?kPp!4r{r`Yl`wZ zolVAl6yJu#qgb&>Y=lB6)ud2^;bmhg(^vZ4j$vxEqX0v= zj&W5H7;1ggu-lVCnfsx0wLin^&Id6jVGJh?>xBszR2p#%;tIu5hBQTZ^PWtGHY<epe5hcyfu3q=!#^?Qi`L-CDs{cBqo2FH%3nLTB2cG+d{ ztBc`CMnGL_FN3O%AqGqBS+A0m3<=&E0vzPTM=c9I4q?RNQ3*$gf(sqSfrNP%b|LYA&yBZ5sqLB@4Msba#Tbv2z{n2$B$}>fE+HWLgdJ?SC|Ic G$?+dBM3TS& diff --git a/fixture/10/3/5 b/fixture/10/3/5 deleted file mode 100644 index c4348b42b30f70f693d4f9c660d81b9ccdb89bb6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc}&i69LK9iWJ*bhq_D|4)H)QYw`20iv3k(7O41=jImWuQ%Kh|2uFBDRMxsd3 zy;S-hHOEp!x>$6`5r*}+f4|>nuh0Llz29#g?bCXeGA%9f9fa@N+gM@kLSS8SbFCH# z4*bmMYm*Z&j{^yX3WABrfdT{}bV-3<1VPalt0lHE1i!uY+qL5eywktzdXY+y)$&~T zmWrUyq~=meCV|sNwE#i##Ul^w3JIEfc7({v2+U{sjUQT*-01{Cc{o4o?2P|P9A zyB%gy>>F((K+&+$Xk3>wg`w)Zy2O(r=t+m$Q8`87#QNqy1%<{hFTyd9!t26`KVu>& z&UQx&Q1n`Eu(=yc;Sjd4;aDm~V!nm{CKZKf;gm@+b>7gCv1*E~yPM5l6;PB)N1-#4 zz3N0Qg^znuZ*UVu&cU!)msX1R_eBwk1-kb6ulp#D%1u1Whbh{WnrBK4#Vjchh8Sb2 z<7&WAyWvK*{uBoN>~x1-O9p?b7zRnhnc@2uPXUHy*Jh`=$r(<))0;hB!O&y-sp?Gt zgKf-HpQ><%xVx1D>Cp_0KQ0R<43ie73NVBwM&2}3F_hG=S=M)+;jhf8qdJ#i^$OG7 zrwSOPI7%4?ANS_C*D|A2 zVbII7sxy{x_zmg{a9pwZ`r+PW4sji4EIDweX4*Du4yhJr4zU#zPmZ|>QLo;u;Mia1 zE5IS9aV~&kg4gt`+rv46k88Uxj^+?8CdG36GJF*IK7~WRuH;{R8pnC*INrXDw~NZ< za5NW}$|238l;g1^!eKUGbiARFW1IC4i>wxo@|~Rm9HXQ_I27OV-kJ_`b zXWzqBD83sY~= z5`vP$2c5Gj2xJu-8^Z{JhYVq>Y6vn`Sd20KK+u}3liSlmu!y${5G0MQdzIZsQ0r9| zcu13C`njwDH$4i)yM#0|LyB_aVC!B}iZNR}9zD0B@XeJAP+V)7+!5|T(QP@DpA z@`2L@Dh8GRddt{shA&y-LKy77n5~;r!Voi4yG>TX@M71`__tLIQ*KrZFa&j!-i~Wv z5ZAGxg+VJk_M=TZgH%f&gV;*F21gt0uN3KWEKZmtz#*pLYs&FPrzyzRio<-(vmq@x zhiLJBDMw{(s!g#Ahn~@)Cy8Dhe$sK|XQIn4fa7-(6`%40Gr}+jR1nw=nJj`Ap~!0!vzRd{MmLSC6*v&y~FI_M1t0Y z#fA143A9^P?B`?=?432jJ{1s@d$E0wN(rV@s{{xIt%S<5oDf6AbwUH)?hf z*ye8b8|ouScyIab@gM;=)jpdxPGGoSAwY1jw7+VtGR4EuCQmI5ikT}*K7Q7v5H0GA zC<<$$G%uS|e4E;NGt`!1opcmQ@uofV+$ml+tA!2wP%Kj$Q)=+1h?Ybs8dGYm4}?=_ zcICFMkEPhHlPEw@CIv#Fxayt!G?T*ZosDT`0Y!$1{-cAX6ayjCTTn$|Eo#(Loc!7* zKq0rfqtn<)VGwnxEUS-V|AS+j4h>RND*}2qj#DT*ZH&}cQ23k@O&Ic;Wcf`phOaXw zR%h!nIC#90A2wn*ckxM(vpGW#zcw}BmO+Zci6PQAY}+zS4exFDS2=glF5?=cL{UpgvU6ByDgE8KIE8Qw)^3NTnyA3y3=z#y)}u#}d(-qPKLkkeF6+(8jA)QuCii_$uWjut)%AXzZpb}d?k+4 z+1n?M$T+&*IGp#;UUV diff --git a/fixture/10/3/8 b/fixture/10/3/8 deleted file mode 100644 index 878574d10b44456531c2da2e89161e08c8218f36..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fdrZx77{<#%5*cSMsm!9zafyz^a(l=zmnBj}35&VbnUhJ1h(a69adr?HB4sL- zHFi!GD=FpoeSc9Mxvg@|F=<3;<@@`+`@Da@&-*-2Kl^io#w+aXr1jz3y(aeb0gHg8 z=fftfd?LW~YM$AH321fm2>xpopxNT^<#~&Mq#HJlfZ9*u$b+2%#!kMU7i7nV%Fd8p4zg>2pGzadVpeT!MTSgz=494&iD1{cn$# z6LhkngpsA0$1a2rERn;4j;$v|^(DK+L=qgYOG*fZ0XsN^un(^iy`u@esewz!=n2`X zvJr|nLSVJ#-S-57{O%D##*oqb%Tovo3(`1*&#QWzGSUg@KXr}AjRZeqbC&KR;myoY z-G(cK;|-Pm+APAPc>e*@a|q;;CxHW&ahd(9RJARZ9p7bhwo6H2+<}!jZKyrj& zZjCTqswQkn6*vURjlC}j1y%R{gj)$~cYBpDc}tK~__PpiUHc#eq8Qmr6jVE&nXr?eve0|tGW A4*&oF diff --git a/fixture/10/3/9 b/fixture/10/3/9 deleted file mode 100644 index abd9f793d8b739b8a7131140892e0cd3b6acf6ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 804 zcmV+<1Ka!p0gwnF0{{RZ0{{Rd0{{RN0000C0{{RhwJ-f(AOaN_00!~ALO@&=0QKK- zYMqcCIAX@+x0H;Hh=9t7o14@=4#bD_2#yBK>1e8(eF=Iyg}|{No7g0Chm1Cc7a2S- z3=*z#o;wn&0IUF}04Ta#D4w}Uh&a94=Qb)_I7&g!+qS+b8P&6v4*A#DzUGerhMLFlM48A4rA};e;`WIlUfXTBJu>+H<@0kk14K!;? zJV%wXQKjs2REafp$O50AFp61zQO`{s7e3bl8}uCeUiP@^!t!||jXfDK2z_6$QupMA z+B^9MAo&L1J;82??I`tjFozmqi|u^`JhSr(y@!OnQ%TZy0pxJ++I! zf}paV16ie!Y3ZH=cf^TMG3j#y);`?2r=@Em-j5E z4^a-Mre`+} z2E4(N4YGK-dV4iNmx#o?tx&KsTgUenjW=jqeOImEJLTM)dM%<}i{4T+7Jzwtyk&5a z5OI3nQ)_FL6xorcqhbsOi=A75A@ArEQnani<$9AkFw^w@zCF-J~7d3;pcb5z^&_&~P;+R*t}$UBuJ zMSDElyLRCKJq`fK3>31(JJ66HI6=1WKnohCU*bCq!YT}6#@jEd$(Ix`Z$7LvM!^Jc zy)Gfv1&%kK+49Iu?ky)v4KG9z-fhB68KKqV{jR$sAchjd`^=vvaFQW!xC2!y`0m}U ifHrh;cym9zS)%3oZc9-BIot~a06GaVqCqfI9{?$~lXuYo diff --git a/fixture/10/4/.zarray b/fixture/10/4/.zarray deleted file mode 100644 index e59bb3f057..0000000000 --- a/fixture/10/4/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 1 - }, - "dtype": "DGS?_J9|rxky_hr*UupW#6o15vILg~nxZbB95 zyvQ+|1#Z|vtQCLhQoNJM{4r*m&;>efqu32XcdXzQN`JKJ9#V(Ac$2Nj;`|peOlEg^d3k6%F3_GJ+BeUgFW`IX%X$pr-NWU z8TQk!jA^0~f9+h#IHwhZ@?g$As~L#)WYa;f9*p{F>!Ml_$bUBNrCbxrfjID|UKPxP zJMyVu7tVz}^s8bS(1<_wtz;Y1ib43VW**f5EiJ??`NYUAllfl6D#&^*;FCuAtX>hs zcPehkA$rs;hu}Js>_(sYQmh4DxDji_9(T|x!EPJKbRo`qC)9y0+J`gXjXLI&KNSY2UbW@u||Z*g*Sbar=md3$_)et&?0fr5jB zgoK5LhKGlUiHVAfi;Rqoj*gFykdTm(k&%*;l9Q8@la!Q{m6es2mX?>7mzS8Bn3+!IQHxbG%il*bAgVVsI|k;-Rk%S5+5u&M^s^Me2JE$vA)aLy}RoW}4PBDYN8b_<|2%3t(} zAGAr{a|oR>$6WD-8?Z&%Zv>ey#98ix7_CCrYyp-lz*Xvh6sbSdY5tQay;A3V5vDuP zW%`dKxlZJH4x>2DV)u+8woBl33ZFE|Ui66`vq;^HL;A64kn2aH{#vyWY{(;j+cuKz zNu>Z>w-av4CV|~Jl<-TZ0$#Zlam*-#-#eD_O{oN6yccuMDuv-anDkGp2V%Y%bHC%(M z()l4sZE~VebQ}sqh9T7iS(fi{%x@=tAl>dcY8a< z`cbFBwOWR;1Qq^+nfEQo^Gsh7rwWu)RE`L z;yf$Xi*quJ|DT$gsPp4+f0WwCy&b{wueLS%N<3L#4Ow|%)czyN2>S>MHVU9ZD zjBlDab#%k5)`Dm^18KI8caCkG(_R$gZY0b8 z>F$Zmb32OTJdNczzuZ5$Wqwy_yqBp0_tyufw=L{0OY|{U5;yC`SO*k_ACMn3Y`TOH+4^$006DUHwyp& diff --git a/fixture/10/4/11 b/fixture/10/4/11 deleted file mode 100644 index 043f541a21574c8bbfd96f4cd3720dcbccb858cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 653 zcmZQ#oXDZT%)kJ`y-W-YA`A=+r9fI^tNz~v1*TYGhA*l-n<5#O`+c3`{BnZ%y;hye zRSL)RME4|fZ4Cdn)aUao#}|F3cbc^>R>&XC5#E)^u|Dk2Vy}-g?4R|T+-lS~UnYAb zTX07_+uD%di#*;>vwhNSbhBRVY>D)tOo45&EUSZlE^vQ4Wv11gI{AZ1EX&+qcI%ug z5ZoO4ZHCqDTKNM>%uC&0bm^SU7u*!`ZMx;{8o2|B%uC%~bZVc?6WA2|b-Kl^8rl7c zOiSFJcWR%>d!KXdK} z9x-1DG@|5L9G?`Y`Sn0gxbnN5tpPI6 zH`jTIK3G%k%y(;Pp$+@xd0D26XJ#bp|2jG`M&tATt}w+nJDUBao^7o45Wc^v%#ruT zl6-5ni*qwf{-2(fr2FGYf3(`Cy`3TQueUY%NuU#APixExbJP)MdUO5I+8Ir`5zcxN%8tl&e*R--=MfZ>hU#&0_X&jSX=?9* h>)CtgI9$(yk7sILga=g z$!iR*Q1O#B)_5AbUH_m$hbKEBC^V zStanihf*HtwSY_)YL&F&hT)+{dw; zhjLv-N>_^iE~~_G#?4{-^#V0igjR0H6RQ2;LEn5jCNs6HzEx8;?20Cjd){D zI3o`EF)FR{Q?IJ5f&RCBq=UhUu0>OnxCZ5)!N_J*gppV0Hh*_ QPFyfxGav*I0>&j&8fB~}ssI20 diff --git a/fixture/10/4/13 b/fixture/10/4/13 deleted file mode 100644 index f401b46eb2feb868f5d8a095047dd2bc98caec7e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXPsLObKx)`t9EBj2zYmj)ua-;f ziupU=@llJ~@l5VDejg?pUn!H=8U1&j!=q-^;~89QeBVzrx>6>-GwRPg`-jac$1*rq z`@WlCbh%V~N7SFW_79p=j;3?2@_E;9c)3JuN96Chb`KhrkEY4=GnG7#I%B_4Wjbfg z`@}15J9Xv>wtUOH<9E<_nPm6B!pC7Jtk)?_W~+D|cfon9#%$h(=O_8Ke_?f8}Z zAn2(1D%pOfQjigwRHk#)yidB~zEfwuV9R%q5hlwdd;S$Fz1`X3Fa2y&t*6L?)n!h+ zHh8lO_dn-#Fc&;zbvtqq4C*Anp zsi}!NKMwarseatk5iI{|Yom|EoP;^HAVivy0U-O)P}532W>Hi*VhiNnbw#S?xZ8m^!CP~wKJOYBAoRk zncv+!vVLYuew3@e6zhlEM>ouFD~xtClwtpP=lG_%?M1O3#o)J$d+^xl3s-O5fAaF(=O2F<+2DH2Z5-ix gipp!?de&~*z6Yx3#*Le|?3o1^6hzNvbDuK-0BIL9$p8QV diff --git a/fixture/10/4/14 b/fixture/10/4/14 deleted file mode 100644 index 56e6d8e66e1e42f00283e8a15bd8c44d42c533ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 632 zcmV-;0*Czq0g(tG0{{RZ0{{Sc0ssIM0001B0ssIgwJ-f(AOf`#02CyeLFkV*-GnO9 zcpt`X6uV*wuU7q}OY)gN=8ZJlf+^5;9mH!AxnKvaRQsYy@RvU1i!#}OC(d;n!f6n< zUInaD`JhMcl|18#FxP%1%ySyRXAZSo1FBK?o<;1FI^l>e)qEoes!Q#UFw%4tw^{m} zKHq^O#AXMnOYDv?(Q_5HSo)hi-+&^-We2HB?2Rwcaul{#`I|i6fFZ+W2B=EwjW5u0 z6SY_PnmpZqA;M$^r%CIKF3@okwO06?cAA(IAq>t4`9wkVqp zfb(Y9OT93o6^Q$7;8MjnsvVF5b?92nKe8p43VrWl)JM83pAv-kYTQr3G^ZJi|8eA2 z$vmzhlm>b1UeQCgDVq*~^JdsfzA&Q}i2H8fQ^h!%W-`o)o4C-c6FRUz!SgH9Udv3y7f^WMm_o{4l`Ni-h{^xw+0po(>1OEw@2 z_TkL7p^SH8OgAD8_~OmDqm6iDPB|kE`Q*>Jq>p-KPdX(K`{mHPrjdJQQ9LIR{O8iW zr;>eWQavaW{^--bsg!USnu& za(8@zhKrDunVzGlt+Kbg!N$wb)Y;zS4-^?5N>5Z-g^G`qz{Sei+S=OM+S}Ye2LJ%1 S0zenIgTN!;5%7o<_f=_nBry>H diff --git a/fixture/10/4/15 b/fixture/10/4/15 deleted file mode 100644 index 784b5e51f55a400e2c0481a895912a503c4101a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 653 zcmV;80&@KV0g(tG0{{RZ0{{Sx0ssIM0001W0ssIgwJ-f(AOc+z0OTdgaTvd446|AR zs89BsL+X(@--a#IdLhVf7QJK&u~`46P4t^W=#Mttge%f{AI5DIyJ88jR{f+)@|i&A zjWpYWDbRNw#A_0{U5DDSa1ygs_n11|eILML1f@smiY(4=5wleGm^s^h9>8J)rAFwAE6r~ZvQ+h# zIof<4zhMKUM(Bwv%x(~|Q}vcO+ItoDPBXX4y-=F{2iV`)=S<#yG1U zkOOw;Tg^bTC726+@M6_Sx-Fm*h4^aSPr@~)8jb%G->P;&4A`S@ItJ05XfyxHmSHXU z#E)7g@x6#tAM3V(O&H{_dqxu7sdPUJ)}n1W1kjsiGX2SvU@P~+jaek{yN6O9>9v4M z7vipYMG)Mlay|*wplmk-&Y5K}{Ku1CD)qp?s+E9iRzNEj0qoYnte1goS3)fp0`1tt zt(b#uSwk)v1n=3zubPE#T17A$1@PO(v73f*Tt+b*2lCvT}U$?2=m^_w4RA{ zUrIC|3iaU1wV;c3U`sY33-;m6x1o&B)Y;zS=5Z- zUSnu&a&~)wg^G`qnVzGlt+BSdz{SeX)Y;zS=5Z- nUSel#advxvg^G`qnVq7itg*J-+}z#XKLii}AQjKyuv^?TmF_kT diff --git a/fixture/10/4/16 b/fixture/10/4/16 deleted file mode 100644 index 7e2fee61c71ea03bdd926cc727f6d0aa9828e581..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 648 zcmV;30(boa0g(tG0{{RZ0{{Ss0ssIM0001R0ssIgwJ-f(AObZL07NDFphxbNJmZQn z*M23;a~i>D4z*kZs!{l#MeLJ0;fOERd?U(n7{6u>vswYDPxhTe>XA6#hAq^4A;@nQ zy<`irS^v|gdQBnp$eV9M7U{f@V>b%kv4mJF|I(&;O(65fnr=Z-^p-c-dmO%C0i#9c zh$_o%53o}7l{eUW9KBxwqD1D0Da&jQuu=1sHrRR_y(?PaEd3eMl7Ft9C*S*rabe2GX5rH2-DGF_!#d z$SsrkUc@Sp^;^LwjPh8$BZ==+x*&z?QMVj-Vof(94fx~DxTB7FV@^3F5BcQJx}}eL zWl%dM5c}rPyQYzRW>GvR68z`Vy{M9XXj48Y6aMMczNwUdYE(Ze75{bWVAV>%HLD<% z3xM`(;8x2)wkx3)i~)A+Vb@H-H>@JNz{SeX)Y#qP=j`$J`v3(B4-*(2A}1^{HakE? zN={Q)U1Dc!advxvg^7-nn4O}htg*Jcz{JVU)Bgel2MG)g5fc^|8yz1aBqu2>E-^DU iI6FN*LPSSNOioeW-{3z5005){@C!KEaEEkp36%!w7cLY4 diff --git a/fixture/10/4/17 b/fixture/10/4/17 deleted file mode 100644 index 756eb192bd4fe7249d1d2ae194270213114e14ee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXO~FLw7QnQ`9T&=Rio@8Z%5 zANfzmX68DGJm1}yXv%SCeRG)R-wR91z2!a~osnZN{A^cmf(iTWbxom~f6gy1^OF5| zWO}xp(9@kg3C3)<);5M{{64p+)Klie;pthnf=_mI#~ZQT+`|*Q?tj3-k4}@Hn6_S1 zuQ(}{wHqYlzW^j;0unkQ2@+cS461u7e650h4@|zD5GHwA0 zNo0bA{>};GKhbFYWo^2`&B;Ej`zs7zFNqbu(CPGNYo5l#Ss}c~8Z1ApNtL@X(TjO+ znf}W~(W2+t9e-`kRl7eki2F#L`G-}>GS~V&n0A-wJzo$de5Td@$Hr`xyVCBeV1o5X7$d#)3ydf>WR zu$F36kwh~{Uwt9ELHDQ zZuuXvTCX~bzx7wa)2IusyA75~PhhM5kaj!pnDs`rIf89}3ZKVZbl+pNTxJq`?Wgp+ zK__fCY0MSw_!}G+5fvMsl#-sAlUG<=R#8=3-_+9H+0!?1^3)l#=FVTVWckW9>o#uQ zx?|Vg{fCYmKY8Zdh09lO+`4=J(Ua#d-@NlqlET3FfIJGr<$c<|t%J+lCVg6OkbLK#v31Nu73 diff --git a/fixture/10/4/18 b/fixture/10/4/18 deleted file mode 100644 index 4bf52cc80d045784d5bef5cd49ab8e0b4845d069..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXOT8iLh-om*JqA^rZ)v`kxp z$J@K&3|Viit`AcCd3HgGyVSdbQ!{M%A8qT5HDI~Ex-L-l$C>%XZjx^gOi8!qd$hG9 zMxXiGs@edR@2BS#xk|p-KRL~c_u-cIXg#K@D{BIjznz*}=r{j^^_?q3uoJcG;#Eep@8#pL%T&t9j_YR=DDx%S!&z+YZZkimux(=E%EZHJ>&6g4rCV z^s~k@|D~MLpIZDP?y|=| zX@{-a3&gq^%3dd4^FC;{MsXTXnsxQWh#H0bi?QZ@+-x(1@tm_@tDy%PkR9s3% hUQtC&Q(Mo#*woz0=HbJKkL;NR7!*W5+zQH&0swpjIRyX! diff --git a/fixture/10/4/19 b/fixture/10/4/19 deleted file mode 100644 index e49859e290af2d0e40718e4d1c2fb09fadf8d84d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXNr60i47O10#9u(>@-m+{Jq zYJa7#C+8G6i@(}8F~x%W{-(A_U53lctNawdo|v8QB=&OegcNhGdmCFLbpBsjR_Uwo z<@oG8N6{C1`jgE#?`~)b*Zy~DX@!sc=VP;S9Ymh*?n^S|d}!2kS*7%tc=`^`sMUXb z=DxF^@W2oxbW{u^wCXoVs2?PxTmll}hy)3}wF3$@UIGcFZ3hW?&jAS;fP{{Sf`nH7 z0txlq2MHz1UhDT{+Fh#md|{OEnKt_$o3d5zO%LEYTx0frMUwQDUU!C_#X3*tM+ly7 zvHiXwQ~AzRKaPV{CU2J|NM7o3{kNk~>&e_O{*z7CU)QB8-kR*ow!hNo_0l-;3ti5C zw&iO)njOOTJMoy#q#v<|HT%9q9Z>E57`8{L<8AOx`Ic7!TcsPH`)rb^d+fPhr0Rj& zYQge5PAm9|Z`v>AD!697kUjU3#XRQB^QN;H(#{x8|C@9|Z_3a3quLX`#T-=c{T#7R zrSpC0ZiTitL4HRp*Q?CrZ~2w~BT`wK{}#Q7z2vdic!lg_j=Iko_kvH@ZPuJ8-1)EMW!x3d{U$5r zr*PGO$+{nU%6_Z8ql=rTw_iX|Xn0gid}2ykW_DgdQE7QqO?_iaTW5Fggh^AU&zduT z;gV%5SFhc$Y3ue~d-fkZa_q$EvllL1y?*P?{fAGUy?Fih!>6y`fBpH-$jrvU%_|@z hDlR1>uc)l1p{;9R^ytx}$M(zu3<{#ZZcWON0sxL%Iz0dY diff --git a/fixture/10/4/2 b/fixture/10/4/2 deleted file mode 100644 index 415bd3bdd6c04bd7d1b24d01b9dc44ec109cd342..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 664 zcmV;J0%!dK0g(tG0{{RZ0{{S+0ssIM0001h0ssIgwJ-f(AOf`$05m50F=(IG4Mcmg z^pGpZV*Z&m&};{zKH7B=tVrX38n#jGh$O#T_L42hWB{8t(ryW*K-_l|u1e*B9k^8Q zizdQd`IIorX9Au%)o=`_L*9B7u}tWLAG%iYjVQ!l`<61zX#}8BU~PJbm7=e_b8K5VvG`c~^PSsoDY43LYg9?xrog}Ltg+hOQ#^?U+}50)LHDzP_m2b?j+TkwS%S0e1UflL?S ztad>Q)}U)O0LqnLD)hgJR3GWIen}JFsB=9C(w%2A{Kt`8C-J<7Q5)v4dqoi1rEfX} z&zWQ|`NWP{Bks6^O&H>>cR~!;p=>t+%$8v*_P~pb=tH0SRKIxS}>y|<6m_zQn;tDZXfTzy(4k9*5U2%k%tiRLe`VJyBNnLP+ zn5)0j=K2mHHA!4>gqN$o(&qXNAvH)`aD$htzS8FT4Iwm0TyKMys=m_Y`3)d6NLz1% yma4wd<@pRCG)G%+f|jbi(dGEAuCK7Mva__cwm%6F03a3aIK~Ng9OHyvTZT02sXyib diff --git a/fixture/10/4/20 b/fixture/10/4/20 deleted file mode 100644 index b0844ae4612d779d82aa0ac99b5db1df64389d21..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXOf);EV~{k^!P+*|I`(V008 z!q0Z~CYo^EUe^?=`RBsoaxdABM`mQ(3q9T0lVHq#Yh7cA#_#is$~c7q{EcK9ie`s2!t-zD*-EoGjH`de#tNlE?p!kSr+IIGcmA|~_ys_=O zZ_s#A2_&=)B;++4B&6Q}5)w%T3H<~K_1*&s6&(f%g|7e!*?@#DD1wBxvVnwVy#@)@ zgM@@rwjOV=`n)Dh{>DUamc8W$FBiv%p6_t{y(L%u!OS3@qjeS^SEa~ao8ZZ`r&RC7 z!YJXhZT3GmWvkws9>{gL*6jVtBjj>~w8ZrCp7%)4T>fHnJ~`5dN< zv&J+3r<^jF`aAKs&ZHl4hc)}ZL>*A;{usVjspDPnPWjeX0o$Y-pZji-sC(kMUbO0g z+Zw_0yG|?kv=)eVF_gYexaxhtY_-BP?#6G~55vwlZqr#P-ose_Ch5A*A@j9L(|MbI z$F36u|zL(#k=GieupjBDbM6<`I+}5^1RDV{UuWUELHDQZ}}g!+Mqg1p!Ijb z)94FsyA75}Pc${Rvaxq`cJuJ|4G0PikBW&)Ois(l&Mhb^Ew8GnYiw@o=_oFLG;P3OBqrCSIaq8 diff --git a/fixture/10/4/21 b/fixture/10/4/21 deleted file mode 100644 index 9168d638345c20e9eb162e7c5d155d8faa3f71d3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXNi?$YlLP0g_3f4r?L)_~>u z>bfA+pJ(P5yGgx0I3?Yh@6p!I7z5^Ot7-#Pew?0HD$S<1uo*R_DxK+U<+UI!*kYao8G&6^%oS2 z4hg4hWeZyl60+_A2`LnUgjhjBGhcy(>du3NlDB|_JU~KsbU{J~g+M~fzJr9iK|=Bc zAR(4eppf>HxnTk)n{2+W%TT;E#h2|srO}(EapD)dod0gi*L*ZPl<#<>)#tTo@;4@V zv+OH3c(pi2?0kpQ?=5-i4`v4Q9IdzbxH?7l`UFqrJ*E0D7DkDjZFBg!DM$6*j6kj< zwPqhyCP`l{xoN+YyWpD5LiXIt7V}uL&YR9=NIPRR{crLKy(z!qk7`f+7IR3w_jBYv zm97t=yA|8s1nrP*e(ASGs^O{kM)8_Q9&3dw@4KuLD7)>roTuo9-D1xCD^?5GvM-v? zWy&~bJoA6*DYoj5X?FsT*=$swBiR0@=y}W~_q|5TWhb%Me#*ETbi#JC#$4f!f5k82 zE_?1XUMV+)v+hgg{g9LPTeRkjbp0=V6@S(1faxlQsoV`;vmb<=cG#x9P^_D=>`me| zpM&OW6sPkve$Ua?)i*LRx3sZybawOb^7Ri236G44OH5A7$jZ$xDk-n5scUR*?da<5 zpEPCq%sKNGE?&A~)!Ox&wrto@P*d+_+_i&t;ofBN$M=bwKJ g%&Z(-y!=8U;*!#GPo6w^YR@ddpdkA0mQb)10A=|$eEM~qeUhSv&^~9Wf zXR()iC#0Bj-`mt0q4WRpvMOJNFUM!+If=g9)1Pd{b$4S+xc0wGODla8J|CNv>nQSK zcVChz=ba7B;aYz$E~)UA|8#U_j)U;?U44lr9Jkjug=zk|F!QBV_Z{83bMgfTg_1V2 zgf9E;J_97AQwtP2AP5p#`VA!1bsHp<4-#Sy0SUdZ1PPs$0|{*c3As%N32E1Wgai^n zLSI2bowxej8Fv-yKARsYbh_2<`^GHgJJb9*4pp1HTb>|wxySAQjw0v0&1MmM?q$n)ELj&!XEUaqF`Dr=`K11oUkOLGCw`APq|y61 za=%K~hp^p>?T^CGIBnNmB;LzZ@iys(?_rCz$}{+yf8;%mJmiUX1!5uwqVT`uU{uaH6z3ic&tg4}f5B7(qf) zpMiv`PRrzeTAdT<8!|D8qJPv(USoNBiDwmw7Y_7p$%169Uvmc~n5 z>~{IPy+HHvoHs!`m=TrDz<<55@yX0G62X2#Tdf~fSvhIo3 z2GQz=Zfk@p?mF)^ULiM`qwaI&z2K8}TQuj1bp9`S8GprVzsV~3sa*A6vmS(=a@eZ9 zK(w2o>~+F5?*nG56{m4Ge#?0ncE)kL&O-4Xrt&vQ*L@F}uT`4C+w>#%al~2Y9eRr; z`j{)OfrZ``tN=kC1+4jnyy>dg6zSFYW-efPnmr_W!# idH?atw;#X%GBB~Sb3J?Z?72O&0E0s6-fd5&umJ!cb2wB0 diff --git a/fixture/10/4/24 b/fixture/10/4/24 deleted file mode 100644 index 2ad2f4c56e2dade8ca4553263503f7b1df09c3c3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 650 zcmZQ#oXDZT%)kJ`T}%uNA`A=+ML=3(tNz~v1twQvhAXPp{ExPF#uzYPUsV^V^5gXU zA~(r52PUUm@jcwy5v|X3ZDnnM%J)C*g1<(D>ZBe=mSC&`%DSbUTC*N7@)!vCI=G^x-wMObN?BkE$z!6x$6_XnD>_H zzgQG4a<1Ls*XA6x`!j;LkJOoeSeYzywcmqrcZu%v1(CvMTJ3*q%u=~KEr9b-jp@4; ziBea3-56H#m)vq#%3XNfW)Vl;Wy|?2Sr^P^Gp3(0n(;6Br2f=j3CFZ2evdt*(f1{C zziQWqusw?HZ-aKowY>D-D&6qRXQO!SV~=$rRrg(23zXe)TFzU1!)^&z{#EORY&n{*$QlF1z%XO7*i=eMr3(aMXH(>TH45-v!U2FSzYCTqZq{t>$Ce zoxtNZ8`b9swf`x49(&1suh9zGNgTDGGwueTu-mLLPq_16@yoa?p8HHz%1_~}|B`h- zRLMbhQ?+V*0v5#uI^qweu2SZ5z(;;NvY{sIeCS} zWfj%64Na}>ojrXMCr_I(d+vfoOP8-)vu@+&Z98`FJ#gsA@snrHUA%nl#_hWg9zA*f d^3A)CpTGV1{g>hS^XD(@foW~YjcM~7qyV~#Fv9=< diff --git a/fixture/10/4/25 b/fixture/10/4/25 deleted file mode 100644 index f052881bac934af5c9225a61903d6c63b3ab5325..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 649 zcmZQ#oXDZT%)kJ`olFc2A`A=+g+N+ktNz~v1twczh9jz%msR;HemOBa&q?&rNw6xMk;q&oXd5$74cK0Wlao*X`9Io~6;?fFl`A^4Y<~j&J-_@6B%5i6X zQ<&!83rotqyUBxC{-Iw1)XTKf+qH0d!&s2n83 z9Rm{j;5cb#k@nMh;ew}{ZNIJ0RJuLYkNrTE@tbAw5|_GN{%tSNdORnL|3stpm$m5% zHz)hB?yoR>y(CurLZ{Q8t$7*`XNB+{Yq0#ZCROgnL@(yOW%@4{MT?$mcl@$%#$9;bb`eM370daoSr^RaFs7e1p7Af`r2f?33CDCMevdt@ z(f1{4ziRh~usx-(60Uk3FkP)MmAm0v_Jgp~j@z^siuEv-y-B?8bI^Q^(sZ7t@41h{ z&pPeUT_n-VT=6#fhTma}b;>jOnt$d!i9F}BQ-6tMKTFm7lw1Bstk$c};&1&`@HFaz z>u!Uk(i7OKKcwAeVqxdx;S&@VlaQ8`S5i^e)X_6EHnXs{wRduH_w?}#3=WHkij7Z7 zNzcs5D=aRnsH&}RYH9E6>6Wo=)=Pz2aeC3*T8#iy=v1{-CLr0FEJag{C<*PSt e-M#RdUH)fu==mF3rjqt-yfQqX(RA>dsmzx%Z=6bL25tGEGTxD zdUtSYhBg1AZJn_O%-2`d1*-lyGr!19^6h~s>DGJ?w|2znKQ?K-s#bnNGHVxi%-Vl` z^FKIFdTiVR5|YRS3H=2LO?(6rDmxAmie3W}asUZkQ3VO@(Ui&P4;2iUt#omNu2nFPUk;c z^EDpM4&gnn)c!Vjr(DY`|EZ=6RD@3~8qgr~gShp*Q(w{1L4Q-=Yty_kN1lr`-8IbeBTgo51a|O)vbmNY+2~ z+8|c*(0#3NMc^^(jcRiQ+x`?jkGbf+$7s3CB=*`*>34%p*lyC8E8Ow7_(j}hkA21~ zDuXyb@T_r!2tKn+gRi7Isc~^B(&-`NT?qqq+9|L;)nzZy|n`h zHC_SDTvmc3;LFBip#p6hV@wIx^W!OS4;qjlyVS0&3{>-S{ZU8?td zVU+NhHv1o&vQ_R)58ygnWA=VUlJu2ccZQwCI#1_E2%c`S{k|bn`OZ{7j)PSuZVey8!SXv!EBK0U+ArlQxMsbO zJ@=BuJm$>vraFtndzs4LCf)EoWU*Fx25<9^yvGseoOkLimh5A$e3x?5?}+7km6`l4 zzw)0%op;@(zf@`hYt@I;+W|+dH>k}PX!~9GEc&9`9>Zla6WMD%rr!xVZnH^!u2B2m zq8G83JoXx|ke&SQ!{@I*e*O8+$il|S%_kr%DlR1>uc)G?sjX*VY-(<0WAEtV=IQMh z5EL376%(JBl9rjBS5Q=1UR6`y*wWV7-8*5@)akS4%wM=<*~-;xH*DIveb=7-2ag;( gar*3qOINSox^w^G%a<=-*)t0;D2UF<_MXiK0588Yy#N3J diff --git a/fixture/10/4/28 b/fixture/10/4/28 deleted file mode 100644 index 4ffe4ce864ccabd7c249a1ee60cfd091cd9b4ecf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXPG@*G89?CDQ35 zj)w2SbbiTCVyk1 zH_P5~gO`hAM9+6P{@#+S{$OSh&(S)IkE>E-uTAh|+Ec3cVqui<**5#1o3d5!O%LQc zTx<4zWs>xjzF7=uXAGzRO*)}BS7hw2Pti zb;4Ee17@ogrg1lZ%YGPk#&Mg@Lh&BP@;6D>eGZwgRhrJ*^dt9C#95~ux{D=xnJeBU z-|#zZxlVZ|U(3(DC(mBIe*59m*YCgn{AXll*(&CFlp-anRDhXT)b?>>a`m-ZQZ_e h&%T3)kDWMu_QItr*I&JQ_1d0UfI&fY*7a#hya0PJH|qcZ diff --git a/fixture/10/4/29 b/fixture/10/4/29 deleted file mode 100644 index b572e7d96e6c4bf3396076b81dabda6ace26462a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXN@Y&X|51grl#x3JVh`u(A4 znYIFtw|B)EvffxzAEfs4?1B<^sdopbX4vpQ-qsmwz;brzxI7aMzhtuyZdFl^l z2J;-PxA+*oSE=J&@J{*GR{`6k8=w1blBj#)xn8vDf!i9v^1Dte_)2ctFXb+{X0wnz z_p-%2maOxpvl-IP7)}41d_r%^ulS?d6TihAQt$m7xlg6*L+EbBwl_gLWSd|5ZINnt z>b+6C=8?zRCz0n}cIq#Y>Sw8XpL)yxsMQA5Spu!U3!X+_aNBLLOnM?)^~bb3fyZn% zs?QN@|5Nll=92qfqvf)b*lRy!+zmQmyIEtdaL2#m7jc(8_ZhF0o5ET5CG&pBN&78Y z^F_M;m%fU>>UHzZy@!vVy?Fih!>2FbfByN$$jrvU&C4$&A}%Q-r>Lx^p{=WLWMXb< zW9R7X=Hcb*9~2TE855V7oR*Q5n_pB?URhJu*xcID)!RR5%Ji9Y<}F;jbj7N*>o;xL fzH`sM1BZ{EICb{*>(_7WnFSaWL}#qib(aDF(djmM diff --git a/fixture/10/4/3 b/fixture/10/4/3 deleted file mode 100644 index f24be385d4d0c25fba475a27f5611cf39b7120cc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 664 zcmV;J0%!dK0g(tG0{{RZ0{{S+0ssIM0001h0ssIgwJ-f(AOf`$05m2$*K!W2Mc{lF zvrg)TA-q`gk1EDt{g^b*YX+h{*>n)AN8)}NwNUJbBfeVoku1n#|C%<@Z3v`4+jbJI zN#uYVw^HtjCBR(wlP=0-0h~C~ZwjVD-FXzSOXq?fxmECtC&OO8M9WgxU*&A`c>snD zlp3KYt~I+t$WPW><7)7C{e%mV7@j4pGPys-P19Q7XYO?Sfe4QkoFl0)wmrm3(O2GO z>vH&i28Iuv-VRT?n&Y3bkMhwqgyqV-C4x5W8j(y=W7@X%xU~ z7Qt*6!)_VGZyUyO9LI7V$#fseb|K6RATvi>Zi1Gnz0u_O3?DN`T5f`sslCwT_zNF1 zMp|xxm8reZty1Tr+zCQ^N03gYG9OHyLj&Z`TEn6D4^*i4H diff --git a/fixture/10/4/30 b/fixture/10/4/30 deleted file mode 100644 index 4bbf72f8e6cedff6ef9729d5e2503d5e03639aa0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXP4<`%k0yxu=4)spAI=JqH( z#w#nT{gu9*oKxT|{%YUER15C=o7y6E87?oc@>BeJVotu3*vq{WQp~yTZETIu`G0v? zrLV%5h0tuZ12_8pM2jJt|;pUsaH zI^AmbePfpLooW6YhpJ89El-fT+~f9tN0IiEx#0pQn{B?X&rrNQ#h3j+rSY4kaS|81 zT>fq=(0nu}RJiiK%PN7g+m6e5if-5~=FGohwSX=AqWN5=jB~~_|EHcZnD!^}xbCDM zaYr=!zeXKU>-iYISGnU|$WHmz*8$sP8lU@amZ*E;wO+LPf!i9P^1IF}`ATltFXb-0 zX0wPr_p;?YmaGG&s}!bkH+;>05O&&OoAyGnZpN}ViPwA%ny*ou&eQlk=TZ0>r|r6n z#Cw@4-X`7fJ#4X7c?MtekG#i`=bU%yEs^YFseGSu)BlLodX-uHt-tc0MqO~-Ww2Cw z0&De$wA%s4tT(EizjWpL%{%uVJbwD()!X-K^}5AU00ay&MgRZ+ diff --git a/fixture/10/4/31 b/fixture/10/4/31 deleted file mode 100644 index fc6577913fbaacf7e1c003ccf21a2765e051473b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXN`rkr;+G>2*Zy||>pTkg}* znK=%^&v*4EnsD4+-xR9(=fdK0FS(CLW@Ot7J=@unV9b7NU1O-m@AHewJY_!|o}OhV z_;g24yb;^YwGAQazs@Zz^^kdgcv_~dz?1FWafYln*3<{99S}^~#2m8po7?mkmR+~C zYtG8$9}q|c34H|#b>0FA!S%8Gj$by76GJ%AqJqHO@gM|1KKtf+!K|(jR zKtlUKLVPD0t-h>HlfOC1n{{8g;j6{5V&^-Zes9fFe=sYU=UBbPr`0KP*C%)}?%u)9_ z^Iq^tyDggYL^}VMyo|r%wcliw{8X;`uUQX5PdRMWULe}dQ1&|En)d;-)r!-&8^7f| z3_IhvU1yk5OrCv8!rU_b>=i3 diff --git a/fixture/10/4/32 b/fixture/10/4/32 deleted file mode 100644 index f7901c68a92a2332f0e618cae4d902b59eae7fcb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 651 zcmZQ#oXDZT%)kJ`-AoJ&A`A=+#XwqPtNz~v1two%hAXPS&Mqi%mwtC>YNieU<858B zhAh`t*9EEmJhPzKP3rBzDH+y$kG6Kk7%*R7RU4@C1d}tlpf>N6*c}!-%idga1npCFKF=>*J;nqJ8o)KpOMMk z$Dgo~5hOJA8Azz=G)O3Z14zgPBy>X)B(#?gB(&%=NT?koB%K2iVh98YJv9RfodOBP ztp^D?Pbt!VGA~@JU7zU1ythpM#iD4DbL|elHs`3_pAp1; zq|W@q%4C_IPvLu&JKu%ul5c$-xJ{<%h3{s`x+h*6M5`aVtr4oY>%5Y`4s-M8^voMm%mB6?t935tUGOaWg4=GxWzrMbYCfjj2|RAIQGJe3`=6rcv6tNU z8m*9>#8LY>Ug2M!%Qe(KEmi&w7QxPAA*qo>bby?Ouf%eNoD|1vPKvUBn93kr)# zNXsfHsj6$~=o=cFSyk5OrFw>n#NUv_v%a diff --git a/fixture/10/4/33 b/fixture/10/4/33 deleted file mode 100644 index d692ddcf7ea65b13503c56f582161e609b8ce57e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 262 zcmV+h0r~y{0g(tG0{{RZ0{{R90RR9L0002)0000ewJ-f(AOa-^0JIE|WjGAqvxZqM z0Mw^@P9gQkoNq%G>b;O;Hw)gfg;^{B1|#K+0Z&e7A>*xTLU;^gP)?C$aN_W1k#0PpYsKL8340G9;6m#|?0lmt)` MKuG{40h9z#GU~l^{Qv*} diff --git a/fixture/10/4/4 b/fixture/10/4/4 deleted file mode 100644 index c933c6c0d176ae5d3148562ac1a344bea9d5d624..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 660 zcmV;F0&D#O0g(tG0{{RZ0{{S&0ssIM0001d0ssIgwJ-f(AOe*W05m50l`+g{1D`t9 zaSf>6=JadOqu63gU}vqx(8LG@ji^&D0RaTU#$8_mg6yh%^GO7 z1W}*uJBZdKa=#5*srE#Y;4OQ}7i6*kPMhgBh0-8yya`vO^FWSL*;M7+as>C26ehGj_TB zK!e5#O_0EmOSUn>M?b5iNf@e)K7yROky66={ai7fkC{~2A-wiuy%VPPE zC%shbf*G(!-gOJ2InZbRl`O+r@P{6?OyhbHs6W+g1DY|&U-pe7x>4wV7Oh0vatNO` z&1CzMDZp0kgd4I+;CBwCJJM+YmoCLz^NApLBF%Xu&w3@$d?(UcEw2K^E-6 zmu*5A?ZcUFL>ceIns7xM@Wz~RM;!9VopVSY^vR!eN+0#hpms|k_spVqO(Xcuqj^pw z`p>0$P$v7)rhHN-{nDs@Qz`z`see_w&*Jt99Wg~%Y=D!eyUybF1P&M?EH*z$Q(b3q zdW4RcqO7&Q$Xk!Qe0+m udW4Oap{upO$kN>A@cIJ{7a=RYzP`S{zrex5!#@TP03c<(g)Oo!FGd<95k8Rs diff --git a/fixture/10/4/5 b/fixture/10/4/5 deleted file mode 100644 index 9dc6363299b750a5f8e46d8d4ff3c3bd31e4a865..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 656 zcmV;B0&o2S0g(tG0{{RZ0{{S!0ssIM0001Z0ssIgwJ-f(AOdw000bu7DtE;bVXpj2 znB_Bp&mC&G22`Q%J&V{Tbixl^tNBKh;xK*78E3QtP@d~Kht(o*z6@EX^+S-}EPBTk zW3m2Bn&>ry(I0HO2v(!!#QlRZ~xB5MS#0N@_(G*yl z-XdkF>@RY)`8#Jm)jm;rRXbevh_H8zXC>y%nwtR*&Se`=PGTn^f!9PtdVzIK_?9H*1VyHYf&~E z0OrfEm3v@CD-iYCz@>_BRXQI8>d&;9e`HB76#3r7sg845J|hV3)3}|3XiYO1{o=^2 zl6PG~Ck*n}yrG9|Q8yd`=FG8{dtpW^5cb=^rHgR4PUU$i66F2Obx zQe0+ld4r9Yp{lgL$kN>9@A(4_7a%G$K1fnqWp8YPe5C9;>g)OpNTf8*Tp*QjX diff --git a/fixture/10/4/6 b/fixture/10/4/6 deleted file mode 100644 index 52e4fb034cf2e6a236ecf8c6dac342ff064cb53b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 655 zcmZQ#oXDZT%)kJ`{Y(rDA`A=+04)8^<{O&sGfnLo? z@zl)>esf-#w_aAv+rt&H^s{6C9j%IE!inqude3-f(sW)ndna4Sq7Sy+H`PlH3B;}b z=|1(5LERb2^sUSR^WIpsUsW#L#}l>UtMkPBI#nmck~jYM-N)$Cahfk^>J91mMNc(z z)_*pu+{@tHc8WJ(%5|x@g-_J8*L^gt*!|zB^#qUqq-&Bf3m&Uwt^HtJzU!}J%W-bM ziB}||=RH!*So7YfZ08^Q=3|_`6E2HK&3&MfzUrM}$&TN4jYmVR6W**UK{)6ZDq-{TW&Cs)KeY4Co!v3F5>ZjiYQ)AKVMr`II9=?H$i zb8uOAVVJc7+v^KkXE&sJ8HoORaCB9FX_UPR*ZV6w=eK0~nMnRy>2N)dWwz1DgztT- zdxBmzh_7?KQ_Qu{{9Njv$yx^^-nYtZ_Ig;szs&YZ7UK;4W3gYlm3I0+s}))8bhChc zuF2`7pA*#gg}!N$+~9t%lxMNkh4g<@bq+^;?2zB;^SDZIh5fZ$=9&2}QXh`aE^w3n zczjNgyX@x^^NKy?znq$1>ZS1Q^ujV9rSE4KRro3YJh!CEU-j38rPYCIe=aVs4bu2~ zd1YOQ=Km|J8bY-huB~Yd*I~M`wmCwN`R4kTD1Fx38``1`+3##>k2U5IQ_wQDbq|bA z&8ukYnYLi%=Do)+-hT4#2NRE&ye3E{Dg`7nbv{Vu!Yz;twQ0g(tG0{{RZ0{{S$0ssIM0001b0ssIgwJ-f(AOeLG03;@w=r)AWA8oq` zR;2PjjoT@9#1dew`$?DNGJwt-X}1MZpzb`0*Clhn4qU4DMU&w!e99PQvjI<>>Ntke zA#c43Sf=zskKHSI#uQ?({Y#nWG=k6`YqMe1!_d9;U1W1d{5>=So zA7Z8HEN`>+IeowbMvBc4RF~QwVWj9QZn5<^d%prjiOdh=$*z-mUPCAj^Vq$kh;33h z9RldhvX^~gM=TQe+`*@eaaKGb2JF$cn}KFaFc$mZ#jB8YT0kWV@YTAXg=$YU8vo?U zuatRSL@5sR*}kKRZc{iO1L)1On0;bMEfVBL=zkWiMB8!+pf}BA z`;sZZSMG%yvPs}~4y8QPY5A-GQEd=sic*lz}%Gswy@rM%{D^ zqB+oJ{gf-hS@4G*wM*i85T`!XYy+7v$6oaa-m{5bGz#CfieNSi;kJxoHx1&rjbk|u zJrn4@lxaT{>cEw2Ko;!5mux~8?ZcRELmBVHnr}rL@Wz{Q zMjZ0TopMMX^U0ocN+0#gpms|k_ROK=?e_r-6(1-vJVsDjV{LbVi^g|nBXPeCTB!C!k>4$Q$QER>|EEp#n?mW3H{OIR(s>}qZWX&?39wiGrA+df zK2 zy#Yjr%MMYM*c)D=hUy;jzcHj2iTrNiRK_{1 z9+3lf>08c0vnH7fe(+<~NV_he6NUI|-B80ds2h!pBf3%Oeif`k+HnY zQz-t_see=||LWDitCfIjRzWKk0qobote1jqS3)fq1MS$uu9<^wSwt@x1n=6!ubPE$ zT17A$2JqX)v7Cl+Tt_k-2lCy>vYm*##n0H{?DhZ(6CEcpJ4H@dVr+JRiIbhDvAe|1 z*y8N={{#sR6Br#LCoC~GJ3mB8PE%N2VrOh|c6xt=iH?()oua3#v9-Ft#L3Rn*xlmi p?C|ya{{#sR6Br#LCo3>DIzL3t&d$%!(LV(c03em)a559NuxYT^J*EHv diff --git a/fixture/10/4/9 b/fixture/10/4/9 deleted file mode 100644 index e91b4f3ce76fd20e9fba586f0f078c78c7436f09..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 631 zcmV--0*L(r0g(tG0{{RZ0{{Sb0ssIM0001A0ssIgwJ-f(AOfut0OTXAQ~IGu@0LB| ziZR%KCe3sj!DtV*T?DI9_@74Ylsn>xFV=k|%W@gNW(~Ak0;y2;okZ)BIpBva)O#Yy za2LL03$j@Nr%v^oLg|n<-i0jEc_7Db6})2!uvh-2O!JyR=Z-Z_@R2jsb{M)_{hvVM zgeAvm3an1=ku%hG7r9&foya^+acJ+LB`2YT&a(nYr_ zoezTaXWC4^GNc!Y{BPk@$2qJYkpy<>T+TtXCYcL<@nhCWyDp&+tpZ{{#sR6BisICMz&CIzL27PEuD~VU3cPoXE}6*8>O*64BDrKL`K-Bmvq1 RoDA*>FbE(75W>YJR2soeD%Su2 diff --git a/fixture/10/5/.zarray b/fixture/10/5/.zarray deleted file mode 100644 index 2e158ca6d2..0000000000 --- a/fixture/10/5/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 2 - }, - "dtype": "B+CefK5|BVL~W9`IT5*KM}|^5y&-RV?Xh3*MpNFW@tar6?yhv3u|pZvb-8; zD)w)2KgluP?Rs!>b3Zu+FQ?$;bap?f*neO~TX%Rr8LZG5tk8LTKZ$=K(-B5|eLwnA za*TJoet$nTli{Lnn1F#l?fi*)h(A4de5+D3iHSe;;3S9zZiICpJmz0!0ryp5fjWm^&KZ8`Q z%;wRRmOo#K0Xsl%mX<#|zE!E2440Qb7xSqX^Qo7YKl3O4g-l17m_N^4QuL+dn3+GT zk)~q*fti^|( zo6peZv$OfkY(6WS&&cMpvH47FJ`0=Az~-~JeCC$V+VUA&K3mIYYWXZJpP}WmvwUWj z&&u){Sw0)fXJYv*ET4hpvqwI2&iDH<(9boVVW_7kybJ#$A$u~D?-XO(&O#r1 zi5$< z@B7BZR*elU?VjlYYGl9J#iK^!C!;>MJT(l43Ra$~RAbSYc~nQY8oMXe8Z0y=LD_^y zopvYjcJ{Yhoe?CaEKcPiA+(XDC6OA+J?d#CaeK43qt`GAA;pCm8aN(`fA`T}gN+&b zOP21^AXhvMnh58gU+!vPR9d~tr(1(1m0?W51PXh^AQYIFDVzKvD0FmNR1_YgVEK#T zp|Dykgu-#tr;p`B6vUZqX#AQbEJMRd9EOG%BZo$Q@Le7n&5lvnlSK6 zKE#H&F-S}^AIGd`z@F1Y|C2HJP%I>2FiqFO!(h#=m?VQC2BID|T2!{3oqgF)i{Bmg zC&x&&5bMa%V#}Y^yfa!{H16WjqIJrFfhZFe#&%{QWv(pXA{fGApO@$OXc-G`Mbp-s zG>fja(bZ-xEUaP%d02>ATI(R#VeY3xb>@w^+ji(rsJ zMp9HG;n2pnVd=_2^4dLA8qUG{z;7tY{8b4EtjfnmP1N%(z!Kz`Z| diff --git a/fixture/10/5/10 b/fixture/10/5/10 deleted file mode 100644 index e52da02587527edcea4ca6d56e3defb85c894e66..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 808 zcmV+@1K0cl0h94)PCYxJQmEvbHJsRR7&gU<|C|`n6o^u4)D?0T&cO8pbwW{aq1StqYSDvj> zU2aw4bJePf)R^h>)C%q@d3lzG8Kq&y=PM;4;fJl3LLUgar8fhr133xo zeOy810&MS1bj=|LY~eiw1dEOiWp5(d8zS19_fK1bBx9&&K4hK_u9Q5F`NG=X2A>HB z;i8I|&vb@iJ1qA(aKTYjO`c~s$`f#uD4yL+k!@w!p1&!=01ym%)}tXV;>3Dh^J58_ z#^w1=t3plaz;oRdBN?({JkveL?cp(@Wc1u+&IE9e8_#b)Q!BXp+?F887%IYZ%^ffn zPAYhB8+`F1f_N~}RCv6}wz6zMd2cbYxh1v2`>V+vGjl@T zIJ7WTA^p9%>ahO6p58V3kksfy@{UobZqJjPcU5T55Hzay)Trr*=gC{zRFz^ieqV)P z()0#>L#16g+EL=O;0&fmw@_Ynpy)S$T7--JD zmEt!^L%x}UxR`wJKj5_LBJTI>%GJg8cbZz-1{@N zg1bs@J~yDqprGSDF6(ib_f5zhFcwa}2jQZMm_goQJ1lpK-y6`6iV}#!+s_UgFfYvB zUjhc2VglcKNK<-9Q|~nxa8SRTyziWB;&0)*4(ku>sl=PE>sZvPz2k<6(BcvB?((Du m1gjNqw>OqEaXc7@-c{Ndb#1_SOCf|xyK)Z#0E0kg`T|l1!+Y=m diff --git a/fixture/10/5/11 b/fixture/10/5/11 deleted file mode 100644 index b3e9756f14ed044f64bf58a3cf407f6782858ec4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmXAnc`Vlf7{>AIP*d(wDw>i|tTb0Vm4&r*2xU^PE(t9wx0N*%LZMPkB1I)fhvrI} zHdLnf6)NVYV&zyW^!}y4AtvgfTB`0m6+Lg;fqV1e?1$@n-gf0lT!lIu3-S z0ePMIjs(+-VMD)M2wx0`O1s<$=ihn=5DWvGc9k9?G%4FI$@V3L7C-Mx3?OJb8x)5G z5o*V8M*4&h{I4o)+Zjnvw}=)Xl(mg@tcWFeM;v{im_V4T)pq80GC^w5lSbI{;swHaa@D;X#%TYOBP(Ywb;fRQkEyU^N%VUn&EvD%!`Y8GR!X3aR; zI-xqfg`pSj*FIp!c(=4Y@56S+>B`*#jHRA6oAX>4HFBo&Qr#HGv+sY5@?eOy9AZdY zarR{#jFYw86u_8e5G25mrlB6faPoJ(DHF-~qga3R>p4c2)FOsqzo&4zDxNVss#BMr z%t#ZDVP!VQ>ud(2yCrOmUnWDGM-F41D8gu|Xu4!rz=-m&bz4}%(3L9{U^Iwki>4`Q0hP&d9bNf0N;!JxO`Ht%stsP`Ij^=c0 o3^OKDH6BYw8JXfx8Fnp`wk?y4@1cPzHFC|kvZU+tquI^)2O4XeG5`Po diff --git a/fixture/10/5/12 b/fixture/10/5/12 deleted file mode 100644 index f0449581adc6feb5c13fb3666b4813c56f33cae1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fdrXZ17{*T}k}euTp(yLZ6vc^_htqA+QfExx8h7z}DXc-nw) zUd?gCUPFTI^^zHW#sqRPRe=+4t`yiTC77SnR2W$kI;>*XYT6JEbbOfd)s8TKPe9i* z2STG>SJr(e!misg0fL@y<*F=qLglE%-!q;B|MbkZeLe(rscyQzoKVd1!EXKp@1x(A ztqdlN8;1)Ja_XsRY9zrm$m8OOg78UFzx{bML1obrOIYht@U=XSFfcs3E<1scC>~*j z)r9YXWI|8-?nMDg!tvex@7*&9W}*n8t*rUv!aTx0AE~ES5n=9Vu>heVZNcjorG)S` zmr7eJ2s*5Gw4#boapa(@-~vD6w=>T-5k!p^!p*==0fLtV(VMyn-}B4Y+xHT(oz1mP z`UuX$89mbn2p`T)&mS8iC@obc1Zn$c0m93u5RJ-F!WrGCk8*z!mY2C@UYI02@vaU( zq|OlIn8sL~mh;C$htay)K!Bm3Q>mtgj5$Ys1kNyK)EPxs|1e>M*7bcKFlT57dfn@= zVwC@DOun;%u`SQ8YB~ONRiD;?7XDBK2f!jC#Fbi4WtQ zj$DADN@LuQvBujmelUpfe5AzVVHiVYQ5C_kUY+`?K*8wl)htPgX2goeFfsb*92&=X zP#d7{mB0|^k;s@OiZE*OD);Lt86h%Dr(fv|t)WZ-Mwu80LvC?0x4Muq+4?QAu$WO8 zscMF?SuBPjYE&_ mvbJA<@py;tSkVw8=5O=e%kLQr^Q1|~J~Ns(-jeSeW&8&cNtnt2 diff --git a/fixture/10/5/13 b/fixture/10/5/13 deleted file mode 100644 index 739cfd43590c5770f8df535a6f6a7afc85c7f20c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 806 zcmV+>1KIon0h9uQaNW3I2>6mJI49`Y+KVmMmXu>N4xH`28; z`z)A%C}9Pt0ILA40Bi53i3@eYp?B6h#*gcrH!trivNBU81#hP|XgJ+^H`|ohJ27u6 zv5q5$2=5SX%(~gH_wuiofBl9+P++`*>Fp!|%7qr{yUD3*j?tQTvMMdg!|9F0A`8tS zc`tb-C`<0WjYmEhk9>F|N%6X}2jzX_<(|3cO+==R+}6Z*j90wZppat1l;v%ABPwCcz9YWjtXNZ@k_C4E3H8K0L`j$-1r&m6$GV8BHDtf8%FRp@67oyTX3Jv7A%Ki2cNSRd}J;7 z_`GCU1BMvMc0OkvdAVnP4g&y7u?89hH*+nJ0(iB4`~Uy| diff --git a/fixture/10/5/14 b/fixture/10/5/14 deleted file mode 100644 index 28ce09def01fdd75397de77b1ca64174b0fdfd31..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 808 zcmV+@1K0cl0h9ao3%cIb3F6yHP!Abe;deG3t- z@D6@?8<(4wkX*Ez}`Fs3XEVfRPtV&J6h%&(`ax#QPTzOPDHM z82{|)~|on}t|f_=ciK41Y#Z#%&)JHf5Dm*_?{1%8u3+M(0! z@cyC!=|}PF{bu56U|Q$R1*+t$j`>#O0mi5Tls8&-jFuh0&$!BqrQT#(V$eOEy}5MS z7G=Ztwhdi)2;;2{6mV6w_GWTmDIiY48%a?OBte!{c$Ix3$$1=X9Q#8Qug`$1H^peQ+pN~S)-nS zo&#(^%Vaa&c%tIa5W>9ys~P}SHF*2=I5EV_)w{2Yh=OC8cR0$Llk3sj&Dxv^dDnNI znW8OBckes!)QGUpW73ewz=eF4lT($GQ+;;Zb0r0dG|ya$Z}!HLp4B1<*&vYhjCK=a zD4XXqDIr;K8=lE#Xj$-J$urj~=32$iTd$)MFT}I9Ohp@q?Xy>#7L*q7xeHC(nm`oK zTH(K_)9jguo}<6C0ngYV7uX;d&(&lSB963YYLi@;m-sww4k9JVc$Qivb-CQ0t$sj1 m@CiLbm3-APb38xC=AL77&n&tC5DSf-rOpHb0E0kg>H<;{Cxh4k diff --git a/fixture/10/5/15 b/fixture/10/5/15 deleted file mode 100644 index fab49c58588c17b7242cfed0e010d9ab36b07065..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 805 zcmV+=1KRuo0h9j9 zMgB-JT)uT7J(Tw}km}AUrN`}1uRz(PFlAvGIWF-E+JW5|eOoVJzi3{bF=Q=l(D+QD50@Z}9M2F4>>&{o_k8qhv;oDHA3dIlqKY`ZLGwK1 z;!W#yMN4s6(7WN;#t? zloYPt^Ux-%(v>=Sf4}EG@89p|dA?sY)#O12>MAPonuW z+<5VG2|p}K1QL=ySU)m)Ex~%v?HG-B66*AfJi2Nm9L-PEtE-hTYu&)+S6?Jlt47>= z&?F)9YO6rPL?<8D6Tc;tHNMpj@076fu;XXH9tmTnDz4r(~ImZb?`q)X)6$ykTd1nNKPVb>((=QM%s6;lNP9Zp^{L0#wM)>OB zw{LSMA-_lN$w)EH@3LQ22(V??w~6;U%5J2WI$4Q3c7UhH)p#!zab$tapTQouN;h%$n! z4<;DsFziijx|4JnI^pu>jF#ft&!Y4h%D3Ygrvi-x48O;p{_q5bg>q2F5N|h85r%YY zpo!fyMpj=zT=sNEw7YD=SaD^xfHAdAPMpzapK2Uu$*53*XQbBa{tUHYDCWhoBWGe(ZEc2O4beokXYtT}I zyXPMP!~!=x^u2PBpu@m{-(`o520kc%h`$v80p5~|4`j?b;LnV&b%X+n+V!)lFWQrW z@8bfIz^x3Q2c3ckXkq>pw96H=i*H5d$!a+0@l<1e*c6VSG9(GEde+e-6);nN-P_>jD6HT;azF zXj8&+`PVWII;L>`mZWw|QtM-Zn;vEreJe}@J`7p%;p*IwET#Nvi4S{!Nqo0HGzF+! z{3tZCU^v(KY+C|sTLQjXR`o@D`cV8dF`+knv?OV=49R>m0|C<@ze7sqU`RIo3KfL~ z$yWIl(58gta^e?ASqG%7^B3b@r0{#=D~7OBMn&Kw?uq?5MfiPMKu99&;qOs%m*XtW z&$B}+i<6$u<#M@PL;ntuCLP`y_;rFN7>Pdla-QJnb+`ONq$#6yiod0z#|2dx-%SRx zHDjXR2Fq-NUGfJ_skY6aW02-VewVxwq&2yImqOP;!X&>E z&QwS^Q{fjv5@8P~A$}cL@rvVC{WjRJppRRhUj}zx9QDWV5eg`3m+9BY44g4m7=Dpd gUr4GizXuADri@m<2DIkh>;(N96QhLz0RXuGqPz5MAOHXW diff --git a/fixture/10/5/18 b/fixture/10/5/18 deleted file mode 100644 index 53c25337fcb7f9c46f97b966443d1dc8ae3da995..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 797 zcmV+&1LFJw0h9VU@PsG}Rh_!=1h9OVO zyz<53wgwDh;)8jJiJkL-n4vOt`uScnJj6jd;Pa4^rUIRmufyN^Qv}xlimydR9lR6c zXB~-abZ?SIJ{JvD6J94DPL9hJz2R?BLboWP{xy&354Vf&Rs|-i`uc5jnn;*R zKHH8yWd@P(*8nw9!gl#qGYd4FQ=dB2Qiocf%GBwX4fLff4Q9~cz@NUU1!=Itmww1F zcNgZX)r}$ayy82V!b)L8D8Ts$VFpEqcxKyMW3*t3ZCC z5I9T2CZCWCf+hxS}Lv-^)eDejdpcrAt#t+;e$Lv|;|ILlv89DfQOe9F! z+VJn3(K6#~iC?G2iYiZX{$GDik0KYM2K+dI82nI1`EQQE$#J>-z5-L->Jy(2ik%b7 zF#Z|}Wza65zlSaEVT-;Vh&ZBCg`cNoUJb!~GcNG3Wv#v)8zg^SXZ$fZB0LcTeo45Q zzNrQA3#G+@kEwn`1mcAV#Pj=Mh!z=j&~LJc5Sb;DUneJ1kh)ZUk;P<;YZ1R1vY?q_ z<>nUyl8ep}pWkDa=$Iw)+jPQ;k1nKN3VI;oh)%yt&Amc9C4M3J@hBk{=huN(fF(`$ zOC+@FZIk>iID=Yn2IV(F($>b)mfr(Xys%vA*8mfNJZg1*k+fuS@*clOBBY+&qF(`G b+I*>|@N2aEGFpE51<+R@0sw^|CcVHJ&5d?- diff --git a/fixture/10/5/19 b/fixture/10/5/19 deleted file mode 100644 index cac3a01a485e81271d9699be80ae8f1307643782..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 798 zcmV+(1L6Dv0h9p4MTsJ_*fqV8O!GTC^MxB_49eSz|ck!fS=>d83%7l{te%T z;9=SkichsQHaI87pGxH5A-eUYsA#;D2mB}nI_UIF_)u941@HpmKQ+monqHuFR05`vkDvWx)0#{AaM2ZT(G|WI zL4SpkTmIWu5)|4qzpVfRYS*5>R^^Qi6Vc~F!HGfNk)O7YI+oKhdH?tz|d z7vIbYOp>+FFN^bp#6><9950zaNt@BeS$|3u>gN{dhbrq!Z4`8kd_u#l8g455Jxz+RRz-Y4qnV)b%+F&*LI^Z9Ue5Q#8NBL>(+{0e+d{ zrV=oZewXZr2A>GON^7cT(<{G7R2IH8=66FN-G)FqzZry3ZH*23WgY?=^L*g<$Vr$Y zH-+CNvdIFCh+hgS^xk5 diff --git a/fixture/10/5/2 b/fixture/10/5/2 deleted file mode 100644 index b1fcb3b44545c3bcec796a4b1b06c761003fb211..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fX)u)m7=&;Y95Z;P^+0Nr>5qE1~ePH(Jv!NOs z4K8vsnk&Y&NNXN3US51G>)b1bT8_xuDiS+RH}a4e`b{APk!Z~PtZb-GL8Qs7eXdVovN$H-tR02RMxHzrEUGk`UieaI zw;mje45JWGcT3wik%C~+l}DlQk#j_J6$N`Q}W*DvLt;TG&JITd1%zg1EH}c<7ddL1R9b>_ln~4Xv7s> z=&-1wp<}RP>U9GRxkfjQ-IgP}EJtX3XNkCeDUHP{gAyT*ZU*aS zSysH4Fz8E__NGc1ge`1Qv{z%HT99Eiq{pH{*JppaEep4shG`BSEC$WT?+lArMACJi zGD2A_c1+-5K{jo3ljO1x){$Asf*)gvQv(ZmEnO^xRJM%-b(XU59#!SxAf%D4 z$07aqb<)L_gK2coyU*?%1dALI2mh2AP8&iw{F(Rk=+}4-x$@&+qjRvuwUWcLa$%_) UqVH9p1RKc*Fo!vFvP diff --git a/fixture/10/5/20 b/fixture/10/5/20 deleted file mode 100644 index 6bb0977705cab0936d231069382be6c3bfded6a4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 795 zcmV+$1LXVy0h9>@=ER}P}8wm+;UO>` zJ{8i83Tfs~y){Yw!uTUN=(Q0W3>y48tCYDod@vo}L}{D)a(iA_rkx*`;?c32$G2ju zT60Dpt{DQZ8N%1{2Wx2L^4-FMc(|td)kOlJUwl5B6E`zUg#Ma>DFJ>;z7;ktmY97i zNhg)0)2G%Iu%pG|qfNavN&Wn@HB?^8L_Z3Qz%P87e6u@664jXg^oM~G%cxJrwUTkI zK9lJ3uPy731rSWYWaLNFo;DOW_)rK5s&hH~CtO@ml$_-FhZ8I`vx>f97rrgh41S^V zMxhdie?CG{TdD77#hDnj&ffwi$yDL-2X~wSz(w-$yjd;UK=^lxB&soe_<>!tz%H6U z=Lr;(+@7CZkmg4gp^vR<%^4l|aGnq$UQhUL?52?T8sf7tv7yNlo8RZFk+F5)S;$8t zxTr?5$u|SU3Y!+o=OYxgl_K%=lC;5~bi?G&R27WbES#Tup z%`X&_+@2TK?^9fgN5`&TC#gLw`k?$)C=*S4-tbG|$^jjxGQSeCbP}?3ej)I<;wlsL zyF}Q>i?V=UB_m^lwjca5SVW4TZuwOJCCrj4G`|QMV5vFO^P80AP0I56MZ!QfgW16E zfV&85-KF0ERCt4OLVQPmF`lxn_>5)%8iKUO@8+4_U4s`VZNYjH?Ij0a36rkE#M4;jLfR|UiO4FM{?7Z+zT0)RLjPgPW zssN|}ssJPuANbG#ji^*i_)oXDILsja4Ja(9?iC;G+6L^}#&=>t>6(dr8D;=19xM1U zPE^6t^YUTH*0KBN}EMIRo=ETO+bYFr^T`Y0I1Ri&={y^d=XOvxES*BL3oin)Rw^a;CE0nctkPkXLXGMQjy|oC_JEmnf0-tbj>ax z-)fNI>Yn_nB6c3*gim!WqdJxaeE!^Yac;Wk%N0MoQWv;n$I_V?*WX%yv=?hg zs{p6~sQ`0jSl`ATJV8=;d}x?+YFoh1LL_MFM1=1=LsPTN;mcTh63~U>Us=suSC1$*Ka{rXq&jeXEEZ*)0QfK#hMCVSW>^LnXNlc@ln*Nk!a`VpNJlwh#vYR0Ri zpTeIafF;&{db(M4^Y~4AIHo228)EuMW2qKHHnD9pL4^5m)z4A zeE;a2XMP+XUWP_6{+}X%C3fq(SrUgNI>7fsg78Cv@b}^Zm$FfRO@%KkshXdyJ5G_H z=bs@_MGqXVug9l^Dofzw#TXQmO5xvW!S1wRei*N$#VVX%M+l1wC7*wtmZ)+M`E*Eh z1g%ZJ7brd&SiSJMKq#PvNaoA=SvVMwQ;vTn&&~%UBfrWDAY^t?eq7*EHX42tuwXL6 zPM}}ppp%g%ihd2?(Bo4=`Bma(DRHy>5*Q;iLrN0ANsCoD=G89}+JwSGtzRP(8Vhm) z{2oz(hB>EvMivf?8fuxX31YJMUU7pS=Me8fIAO%C+&4Q1SjlSSYc+9ocLqVNap zV8bDyhY!e5R2DdIzMmp{ev0h*+@!4zQHK1pDH5Vh`gwO)O=#iMM|k9y*nng~m8MT(jDxCZ)o(txYKTw#5N%u# zw%Pd~sQ*B{&jC-^^_b)@c_^I;V?To-GDoZ)J`24^T(=h z#;R|8F+f>dq(pw$^_Ya0!3Q(SVxx=7$C5d<$}s$P|%pXT)G<#Nf}BU(P|QZtA_XlDbD}GAxa!HT|bU)snjLFe@n54r4-@2ajo3AR{oY5iiTJ+e{Bzy z5lcE>yVU{P9rUv#veC;T{VPhW1)Na$Rt_kVdbNBs->d?NK&Tpj3Y?pz!+s51Sb>Er z%kPm~pkUI>uYeLoHn>US*SMGT!$=^%Mls^(mP-DjZgf!>%1?x24~r&3d__YnnN#a0 zelhSn*FQXhd2n{fCj>=$1!eLDJs8|_IQ0wJzzW&G{68KbG)>|Bw@!=^0Dpd;6)^I) z0en7vgh$m7zMdLlF_3Qffr3dhG){awTyu7~=KMd!DGNzKpN<%yEH2WQgJA3n_Jr@3 THX(7z!-r!b0swpXF_<|)hpmfm;VEe0Dd!D{+AtQ zIi>*l*scR?*MTo*W|w$<`f&uTn$REqRn4%-`{rYra6}E=p8qzNMA9EeaIHmHGkp^SUUNM#YJOu)$6i)erYhi=B@NM6!BYL?6OY z2B9Ag{3JI+RLrN3q_mWt`Haf`8zRlRP9+ z_(fe$P2Mwq0G~5E=I;CeJ!Xnfc>qn7~BJZu79OY%&Sf0TX7-)!c2o-$47<8GV0S+fe0L=`cwqn zB&h27QeI43M8)yrx(4F92K=`vogHh_cQZuAeA4i(XoOS6HNQ=P4lZ(i{u0p_3uuNZ0i7^?(T=<4uDOr;07jhNa(_Z~IdD;&7IP}}(*x+0i`P@T+NCu;P!D^Ra zwaZ6igvddZ5`P=#1_KYDUj~B{9TPo%R&R^=Jo3ea0CQ5b519Ry$>H$-u-^W#-uym8 RC;?wG4gvs!K&HS8OaTiHa-{$O diff --git a/fixture/10/5/26 b/fixture/10/5/26 deleted file mode 100644 index eaf26bf39acd6744550cafc4229a2a314876a6ab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 787 zcmV+u1MK_)0h9oOT#ge+%W2Cw)Jo|FiwZ+r)2yx6(OKT@qQ!f5F;kn?AcF3V5qN=xcW^AiAC zl2N4d5A?9YVf*}IVGBTE1big5Ku$=eKQu};OPA{U5K`dWISMEw3;#-aR$XWHLzIdj-c>EJc#+J|x@Jl2a;Olzw?WSd^ z=;cRQC_w|e=7;d&rv*%m4ec(PzF0Qh!d5LLqM$e~Xrx=G&i-h0hid zCjwtCjF9%*^y3UIK~TlvQ->^Ja7}$TV%CkA^{Gs`xYt4cln93~fI0A|T%7bcQu%FA z81C}Md?$En7(wBDCMljMl6(FdU!pa>M85RdHYcj|)dD#onJ@e(nfCdI=BG8PfrV|( zN0Sx`O5O3#7D!RS9n7d5{IUyrS*Mr(1I&%#H;F&yBWLx_=MjG3U0GgogYW;@ zqsuCS#P<_}s>+kq@8j>u5yS%90pAZ%gB?;$zc0izQX6i89{2F2e-0W`tlj8A6* znzT^Rk5fg)sUrP1AHXPaP5F9Rrx($O&qhU8?n?F7)FBdxW6xJZp`Q)BE6Ypz zc+e|6=#^jQNDzUf5Ffjywt|m~FQ$Qnwjy?XD;&0u8NmONK}*6P2j9->{bHNp*GWn1 Rqyhr~M2HCo!A)^3m;!|}b&dc4 diff --git a/fixture/10/5/27 b/fixture/10/5/27 deleted file mode 100644 index 9916c50a8151ba3f00140997cf33018e025edde3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 792 zcmV+z1Lyn#0h92dI<&lV%Uhh=@3j!VCWMQK+S>Q~Vk z0xmv`_$^G@6(;TcBDyjawf*zkWdg93=8?AML!f>f>HQ^y{#xLV?`A!64M zvHlaaE5AEGzLNxlBy27DYF5s87@_>MCJA+UT>crJm_yMv9|;NQ2;`n`roEZ={uG+F zMw;}?`Y;`rD|~5J7TPD~lOc*R1uf1On+^s#-}qqynk>1Bd@$+-FzN;WD}o-@0luIV z5gW#|@%>PsGv-z31J-@zAuITNoRS=AvHU!Ha#TWy_;^}CK~Y-$JANG;KOmev^5qaB zpu@?*=Od%Da6?x_BlHbDSu==ox@4k(pg9}8Ibm5224qAKvBD*P^lfQ*np{Hs}+39S|1j(~zR WA#QwY+m%n5$N>TXgFvRR3rqpGmvWT= diff --git a/fixture/10/5/28 b/fixture/10/5/28 deleted file mode 100644 index e488e585962870ea196cc5af64a9c1ce15a0ff16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 792 zcmV+z1Lyn#0h9c z0H*+{08R%mu&|Q(6sEXwqE+=JI5GDQ>G(`oLnf@Dk0c=t!{OvVc(b94F^1m&E6(kT z_zL>KL^!+n2*Mm0L2iBHA-FeMNq&JL910AX@&!o3lL6)B189;%3f~gHla5_K(J_7# zvrM^=4CMDv>VpqK2K+uUlUMgf<+qWt*+|*+yC^Rb3c@$PifmBVtapA1|3cW&8ua_I zJPZ(#eg_SmU2{vnh0>I;;3)GeXuKk5yb^_cu;>vidi1{#&|Q=Ey{f!81PH|EvJjY@ zR{bq7NYJkEd>T{`+He>7GT@LIV6EV5`7rhnf^vxFyU?M;56i`8aS2Y$J%0&3<19w< ztu!Ny-ISlg$)KUL%eO+ZSs~f{DlJWFS}}f!DZ4snfP4{XwiZAc{iL}!_)7R5?zF{4 zn?46F3xUb09|g!J3XqMj;RjccsUaVVQKGDJ$-khaNg|;NKd(Xx0PUg1r>>vNRgr0F0{jod@5RoK&XV=_PE9UPQ)Pl*G)E9gH>3Le?hdDBW+y;K1 zvchRr4!^JK*4K6O`Icyy@Fk7Ew*XGlOVHP=eqoCad^}K$5@nTsUfmm2j~_mr6H-Z* zsQw&3&Wx}FW{P|_evoQBQSk3}M4@i6d^LzV{HZ~GyCQWmfY{=r5g_*b)8m(sB9@UN z`ePzc>0U+obl0T0H+(Q2?zF{4{V!>F=w;FQURF#xcK!HQz3LaX=*uxj$T3IwSpxM2 Wi#mKO+1Q8d69)nSgFvRJ3rqnH3wh}P diff --git a/fixture/10/5/29 b/fixture/10/5/29 deleted file mode 100644 index 8481c43c80f1d50899b7bee99d3eb2e644cefdca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 795 zcmV+$1LXVy0h9sPhVsH1j|>Q;RD* z39|sG0H^>yeeWQga zmzI(*K#Kz=izIviR9pz}a`;vBw*~dp#4jQ)J!QtM-$goXTCjfnD&BZfyz%6BkRQDg zOIdyc0clah&xhYXM_e_~I=_iY+7=+&_IhX6_*VW1GjWEzWPS)2 zJ$WZQ_d=5bd5x=l}DI=Pc5zWV-B*_dL#lP@$2WE_o z4@Kp~qT`~EoZ71JyWmFgym&>bdp#Wq|+_RVDglLc~zK zPX2R1B5K@~FZSld4fD*m0b$A1rSrqCYr?K;eCP6M=hgMW$Qa#d!T-XNTVk93i#H&C zj0HX$tvo=4ihde6St8t^d_Wq8(yA8zA6M;RpCF0ASTTX>cdrm!B;isJMr-6h^xl8 Z@=;g7Df(~B3UAB`4gvs!K&GGzOaawfbOHbX diff --git a/fixture/10/5/3 b/fixture/10/5/3 deleted file mode 100644 index 947508bcb04284322ae4f4676d1210ab22de9762..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc~DJp7=~Lcb#Kx}!n7!|q)Az)Xwe0Yg7(RtwW>}Z4*dx+5Z?mW0}i7$=p zOHDQX;WVbs%6)S-kwzmIH8L=R#yaoGUO)3`{B8OcU!tXPVi_+&W9Fj+jlbGxv_=N? zmg;CEbdH~}Vt~f5_^xGtlnl=HTuv*qW8k$jwIaxa!PCDg5eD1#nh)28GcZ#=pSdcL zK}F8_n131uzGIS)T*znewn$2Z!S0E}KI!ocY%aEmFp$y+(J@%g#BMhjVDJfU=PH#f zB#Tfx7LH4A4H|l|5IVA5s#Gk(Ajg300jm1eLod}EBav&_)NA2p1R&r1mE_71ZaWHa}n&D6)7sEl;2!dd31>Duy*H>6dXUP z!H0)JswIqv)QXLohj({#)y;GsFOu>^ct~lCE9YUAHf3HT$D`V?H!iuAhh#CnlSh|X z=8fBZJTi{2>`74wu$4~$Q}RBbd4_=CY0^>!$nyvjkR^)5g33IH+G;CB#I6gF~RegW}KEnNlidlGZJx_W*M)u278 z8}v)Kr9`-;8J&2O?ixNISm+lc6SBKz{=iBYdg= z{3t(z7Be@vUH%0s8C}|d_!ZV9GID0|m0WRFg?IcU9u97DNxp;{Y2aLm@SVD1PF*qI zfse#!T@jy2NsfwE7=I~=hd@O`|M;X0vB%^iurj6w_Vf)f1Na@yd}G@$z7>x;f{oB$ zb23UcO>LN@WYSW^Mq!x_(!C6j!5nJMG)!OB8~o=_4uR>;S)h%WlRms bXH#+lZ~5j6p)hzD0|0{%=2EPIVlFTRWLs_i diff --git a/fixture/10/5/31 b/fixture/10/5/31 deleted file mode 100644 index 10b3e5e7440b41af01b83368cbabe850879a9d8c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 802 zcmV+-1Ks=r0h9=l7CzpAdlL8?TUWygT47Or^{b(l%U z6D0w)0I2|~077nB)vG4IgO+~kj#9sfbZCflXnq5QurNZp#qZ%5FjwTDUqcir_U;h) z?F0Bnfmf7YJ}}Z$C{g|HDQq^;WAmFQEH6)3o?k9el&M+h_fDFi8-GfE>m(idVpioh zP;O}m3Wwh~Ol7LLSigT=SQJs?^V|1S?0YKuZNso2 z2a*~Gzr&RUThFN9+-5d(F>FTl+zz{TN+Lr8or&ef}OE~kLB!cF$?vtEZBNR#r!N>7tJV&PX)seHDBKNQL-=_REPXI z5qMH1lP|aBlG}3maan*47Q|;V$A}X%i~mN=0M=*6cRL&d=8E*&$fiY4A$&FhiB^L$ z{3QO72pZr>h4Is%oRMJ2#~;$cMHe>7Kf7DZLZ^JQDmFky8-7`vCTAAT_Ypn zh|m6x62V`k2mQ7& zyQ9^I^t*=Wtcuu@Uo>wN6pd#1J;T(p1bFq^HzkFSG(Eq2(032|`W2(6hM-XC7fgu@ zEQ&n&)!S;7NN@PflfiP6eKlUjRcB5-u%& z0F}VOrJFyL6z1gf;sf}iz{`rq50a9C#VCSbK!8qw0G<5$u@a1?+3>sgiUonjIapnx zY|`(_vj#;a^|!_}upPgA8UpmTS|xrKTXn+Lo`0p>bEVwl%V@$Dh3|r2b+kc`^@LyD z5+=zx=1)0Q?f9efTln<}gVx4hF?A(^0`O6ApD4Ibf9gf%$aTt>@?#|!P4lD90u?1% z@u3jBbzuelC%6xH)T1wL_Wd!6@JVE|NMy5oCMBa96%l^~7&yOUp09KQ$$|FJ58=kI z$_2qkx}w2kDfI(BmB7KJ8=fyki_EIY(5Fj$K$rNycS2T&h>iMl1frl*FCW{+EelHG z$4TJvr3x88$rGi47>YjJhdb(#>Az`BylGANMV{=$*&6eSr1_CS`NU_dPT1OGUd*Rn%cHc&f% z;_pA|h6$ncF+;ec{S2h?C(D}%I-JH?(aVf8>pQ>@$Wz0 zYheSm^Y1^hyc%gL_U}K14b;w``0qbiUX3&r`|m$2uSS}R{qH{j@B;u3K!ugO0$MNy D3z?LU diff --git a/fixture/10/5/4 b/fixture/10/5/4 deleted file mode 100644 index a47ba50c13621e13ac94dc2fb7d3419ef511d9e6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc`Vj(9LE(K5v?^xDR)t-QPE0zM=T17=)kmUjvS$ohfa>8mx>Ce3xQ z-lf{#)LnlV|56L$X#!yPM|y*6x}?JAYWoFK(KnsMB{vCf{x>laSsF#(<_V3QObVN` zT}6=>C{jBfuW~A)XqjChKw%J&9XwP;5nd2C`H)al_xMRKHc*HbQEe1H`xeNY-%%7- zYV6eSqxe279>v-fVj~nnsRp?!46myTs-m?REEFOPu?}H9b4?gzX`Yf1ONN>CHUbQR z8v12s5+cdnu5K=KC<7XkmouN_FXXOkk%S980EpbGEA^-m6{!I-GgA*r{joTRP*6r8% z^_Jm8>ekA}UIs-S!whEXhHv8K3^DF10vsgkvzn$( zuwqgvIjRYTqFR|>_xJmK@AdxwwdZ+Ml(W^0WJ*fnTM6HDpsCW#j=-$)&SoVL9R8U# z&><&aPKV=)+zGT2eFO;n>8e7{V1kkW2o3+*o_6{iyn zXxCgmnnPf-Jzszz@zROMi;D=_-t6(0Js>bJ-ZIfyM-UdY^mIuxLDen&MaP~IO#Y-) z>(xWxVL7SJ6lpcI`M6UkJPU%YeJETmru>-_OmVI^ zLV%*rXq)-{XbP*q<&CG3DB^D#dhJN35G`zSDCR5~J~1_)A|$HK;B6sAxpWjN6S*5w z>L}bC6Z-s`DXty~jJA75G4etR70e#y#?DU=5*v!$>u zz6X^?uB?Q#${dlaL%-kev)AYU*WT~9x?0*e6AhV6d|TliN_|w9bqG$WIV?M%Pf)kg zM1WxS$7| zL@55!pL=eqC|2v#t3?-3Tv}HmK+!G*LSa{XqI|fDB5~?||Eflcx0}8CZnaU&$h1#B z+({v63{n)$Q5Rqsv*+u&KiUl5IhF12^cb?c^J4E9Gjz)@>c(0yI7Y_Y_mVRtKM_qB z8veQnFqkj53a<8G2s<`;tjd?+1?j&Y7r-z{yUEU5!63yE#c==Z?HwZt3_~@^0t~K( z2hZG7GARAlo5yD`w4{p*VX$p6@tIe^5N9@~Lsr62x$8$_Z8^i#2UP+LL0yH95*iuA zb!=#3(1?itXw}Ie)iTH+wow(kDsb{e#_l9lf-ZwYz+yqy{lZSO7U>0ag&Y~#nct0pT6o-h!(Xb z6b04M+UG4PzEA8Z3ALeECmlu7ar2k6c2vxI~e}TdjuH7H0Jd(WV50rW5W#nI!R5>#u-G5JQa=; zSzAYAWE`Du?9RCBbC^iS5gw>s{nLWuQL(sG4rv|^9NQ%kj>7$ybv(Q{Mk|h%n)-6A z{~aX2ktzkk(Q|lNL_r*f#Zz7NxCD+^wF#jaj%KMC4oM@A!=IK4aNL@i_WN1|$A3os Bk>&sZ diff --git a/fixture/10/5/8 b/fixture/10/5/8 deleted file mode 100644 index ab7dba8b2aeb573401bb8737d23dd9b9a34e81e6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc`Vm)7{}=+l4xa$95dzUP$V^7Z^zF%hEhtA7_D+HwFu=VwzBjaQq0jo%aNMZ z(9}qzzMto-oJDh#4i;9DjK1IRv)A+Y^M2m%_dMn71;#QZC2_5Uoj!2(N@OjEUD=*1 zU0-s@>fT;s!Z-{Xn7B`Maj^DN`}%f(L+tGl0S<4z@H$bG99FD(Qs$<_a5^;4Y1Kl8 z>W2wEf3+E8zfOiTLk9OPE(`9OGZaKy3owj4Tef-sHioUrGjB|t8Peqa0sq_>IxaU& z(-DT1tq(Hq`7?xTWm;SeVW@YH5Ma>E@;+=H&*0T%KRT1nP^xd1+@@fdJg%W%n$I9F z9;>=k!jSp7efPmChEEo?0t}WRg%Puj3^A1{n(sM==82#?_q!NG6>$R$C!%cIJVqEE z)vX9zGs!R~4MR!M+1BqW1Xrie+3o6tUqhb$&$S4*QeRs>&?Y!q?D(6dOZe7O*%)s~ zxE5?8K-eM)CG=IOnLDf_B=3)&SZhsKJ0X5fXjAy6D{mu6XF)LTlnW4EU3l{2#U4VK zWGF$WE-x|XAfeW0v0LI1g8y8CUZ@X2y+~9-c<6XafZ#dQ9b*|nm`!onr5jGTYhJ%p z7C~@nvF;mP(CMgKEivHrT8-$OWJi?CV z6OuZb?BvCS)fepMZzv&Wf?;ZTrSgL5xIRADthxLvN{r9VvAyD!#Mb%ZcJCJrIz zdO+QW--Ml;q=rC>ca9*GRS?3%MsNN1RCOLbHHFpTT09btX$$bMkPP9$6-+JPXvpKd xL$FX~9tMNGLNYuWq+obRGOTzkX|@&M@iegX1e8(eF=Iyg}|{No7g0Chm1Cc7a2S- z3=*z#o;wn&0IUF}04Ta#D4w}Uh&a94=Qb)_I7&g!+qS+b8P&6v4*A#DzUGerhMLFlM48A4rA};e;`WIlUfXTBJu>+H<@0kk14K!;? zJV%wXQKjs2REafp$O50AFp61zQO`{s7e3bl8}uCeUiP@^!t!||jXfDK2z_6$QupMA z+B^9MAo&L1J;82??I`tjFozmqi|u^`JhSr(y@!OnQ%TZy0pxJ++I! zf}paV16ie!Y3ZH=cf^TMG3j#y);`?2r=@Em-j5E z4^a-Mre`+} z2E4(N4YGK-dV4iNmx#o?tx&KsTgUenjW=jqeOImEJLTM)dM%<}i{4T+7Jzwtyk&5a z5OI3nQ)_FL6xorcqhbsOi=A75A@ArEQnani<$9AkFw^w@zCF-J~7d3;pcb5z^&_&~P;+R*t}$UBuJ zMSDElyLRCKJq`fK3>31(JJ66HI6=1WKnohCU*bCq!YT}6#@jEd$(Ix`Z$7LvM!^Jc zy)Gfv1&%kK+49Iu?ky)v4KG9z-fhB68KKqV{jR$sAchjd`^=vvaFQW!xC2!y`0m}U ifHrh;cym9zS)%3oZc9-BIot~a06GaVqCqfI9{?%C8+X+J diff --git a/fixture/10/6/.zarray b/fixture/10/6/.zarray deleted file mode 100644 index d8b4f0b8e2..0000000000 --- a/fixture/10/6/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "lz4", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "+Sq-?$m6AG@#KuiioP-N;W}mNIS0r1cy`bR}}ihnNW_72uof znTm~-(yqf8h^m;(4J)GE$8hcT;v*|2i|JGey4SoItvp^z)lXnU=<2qSw`CHBJq7P` z#euXe`7S0{S&l_l0-P$S;A!X;af1|*@>^%AfdYx_%ZoC5H8oOV^w;bO zB{e*UR%t@^P`HX3tB?~I7V9KxGN49Ol8Tdt0%_6PvU2>(9j%>Vb#3`adK19YXcdt$CA0PfdVw(sqVo{ zAhVHzG~!|TjnDVC8Yz$wE&j0m(eI5E%ml@&ysk2}=>uoRge*bFy1uw3O4)?<>HOCC zyG@jqMWiyD6HBRCe7)YT9G+$=y%~=_%^wrl%tXPAwwQgn^=mCe!vfVF|KoQ97NTNB zo}QPyvDivvb%!33+2a8w5cwCmZ3iAsGK#lfK1HZ^MrM?2L)lH+CZEfX{U4 z;2))fNIEbFBF0HXohbOZlcVA!vMxY4%fdy3U5GB6;^ZRIZcI70*LIrSMBI%*?$QJa a9wP5Sd&q>eq~Aj~&`UR}t zoHJ9c(BtXBJKZ(Q^q56_%im4waj9g)2Coi1j&B+zU_j;e!jI)42K4NoKWA8i0sfh9 z2<;gIVh!7RDsCD;@(#*YD?; z5tZI+>a;jBPDy7*6*=a|ryFMU&M#Xqv(=2LrAyee188iKg3w@}r>qK&pwZMiq$Fn_ z4VRy+0F8xGAvE?o)>rF0X-G4v82t1Qmto)~4Z}c+k%bx?c^9Xd$RaV#xgWcn1$V+6{ZGfD`GS~)#c*ql0E@-NF-fvc7Lpz+3rg#bd0z~+ z;8&CSP#>)YQXSbAtogH0aK?gj@)iLLYKQLVSn0q)KH5oB;L8C%q9Gi%1qJqx)^XrN ztJW4V99ot}mpRpNP{#Zg;2>$~YK3TrbFdX<8CS=x-)zNiah5DA#tEwoDX}8vO49UA zZ>`8FSLBu}c+?4PxcKr=H2Q^U!+FejfA3<&ULIl!CBq}nzI|Q7BOYx7({ELF@NkvR Xh9zSz??{|&Lq?!zstuAJSvLF!D-VS} diff --git a/fixture/10/6/10 b/fixture/10/6/10 deleted file mode 100644 index 0e6a0b9ca2e512feb5cf3c2931eafc59c5a3bb9c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fdrXf36oyk#Mnh#GU0JNCL{ibaid1BZ3PqZvl}oy4g|1XeOhdX!GrFmml1QUM zv82xVsU;PYtwOR=KNG(1JA2Om&))Y{oMxe_ucV+LzPa!{;V)~9EeT2UZ3GC$HFvft zfY826Z_%(LA?o*~0hu#FH&6NAQ#Zo%4N<4cJP3#T1~*^zCTJx13J@L{w)KYi6a1U( zOT7XKs)1FJ_927{m1XuTLJ6KF*|W7G34dH?cTI~S-256VKybYWR`3I&!NAd`(cNGc{y5$=G9Bva5a+6=JEF&!6AiuKU zE?_Ke|mY`$kKG{`IXq1i+d~of>ie|z*m6AQ#t%Qfg+D2zO2!4_XVa8X5 zC%bzH)F~Ugo-GpvA;i~r%x95Ke zl7Qov>0&)G|y=Do72Xg~PYk+9Nh?rp_ zz|bzv7cyZqxXhi^vxX5gocQjAHDgZ3c;P)eh7^Y*WAEGUbt%pasFwJFQZ^m%RaW;@mVGnjGt!!ZGdZQLin<_N|oeNU77 z(TsBqFMs97GR(zVk{Dtu;i-(%MS8pUWiXbyWC}3EG*)LZ!p=uM*3DxqG#yHwS;!DA z#%?hV9dFa=D`lu@I1IK_GODCw_;@XgFQ{cGPG)aOt7k~_Xk=`cL>Lo2qvfuxjO(!h z(Uu*IjryGejNxZuAdJidb??Xf8P?Oyiuwl_{kO$t7%5UQ3`t{*AxoFD08XwveB;Dl qhBQ+p8CpW!t+uMl5USp!Xr(5DR@K@!2I?}@OGB06;Dok-4F3VXj+H+E diff --git a/fixture/10/6/11 b/fixture/10/6/11 deleted file mode 100644 index 1eac9533a4fb24d6f4df43f169ebe5a786e8629c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmXAnc`Vlf7{>AIP%F1m5lvJmtu$9WVPUOQM41#yNkYpi_ps&&q0q%7QdDwO7?Lz? zkxcI^bXY}nutG~Y`u@JX`@H{u_B@ZA>^gZJiHwZ&*9t%HQtH|dI)riy*@+fCg8Q2j z%?}L-GEw~*`No7IZTEdCCIqL)&sUu`Cye=63lOf(C~MeeN3gr0n{4Jt7<9$_|62!-MiwmN7WPb(&j3?yv~FC|<~nw)*8l3*i>5C&R54%32 zsts*>oks~Z-nJ{P#tGh%`y(r+2=aw19;rwerS?)2hTFhA0mejP^s)|R#&yG=-|7}H zwzM9sy19h$Bd{~>ybeQ*Lyv*-+Qml<82v8B0*sWgYh~6Z43liN$W`WyZnJntHETvp z_qgh`En{Uw!23Z5Mw{0A;#XT4Cm-z;U}$+a*%Z4mn&eF9XL>M>7TkIr?Zpsl@nuL` zarI~HPn5OX6vUWi7$U%srlB6n*cIrWBNN5=tJvcAHI|VtwTNdpx|B^fBr}Fbb)T1{ zGqS{ESeec7iMhh~)D^xuAdex=qmZ#q6k&ALcbqdSWkh?~dn~ME=*v|LFj~bx7$KW; z>pB}5OFkBj{GkMN}>ki!Lc3zhNq%a?4DkRIMY5xiSznJYrZg?M+-k` o3^T?vHSS498F}JR84g{O_Fa>V?_t3zO>&*Mp!xCb-PxV^2i#Sf&Hw-a diff --git a/fixture/10/6/12 b/fixture/10/6/12 deleted file mode 100644 index 99c3631ef4dd1450cc2d3cda4baf1d7c35b484ed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc`Vie7{^hHdLt_;6h(8?5b`S1H}{eWTUO*6YL4Cx65UhLp+m)rmLqD~klEW* zNG9G;X~pv_6*($uaulUQzxVg-^ZftW_xn{-wbd}5rlO*J8{wU5HD$e zvX~6mn?Y!Aj$aazNjMbWHQ<#^FcC!vPsj)7;bb1W}`ja67bBfZ(fwgbgnVKME?=*>@1GyP3{1 z>Lj@JXSZv169&^X3%>UeGR>7H1Zne-0O5UH#5DOB;pCjx&+~o~R+f9_ocT+5<##`N zpBh7qVo4U`)B*GRbgc6!$IsmgLOvNjv)>!j&P`;>A$5BK2Xo#Pu33^JjdP z1PU;eX-ot$9Q>S)_JlFsj+A;oi()7(s$v+Ht1o{jlrdg*sF$8gU?hvjFw+0x7I~P_ z_%KAFv`V17=dQT^6nQg{whXei%J+p zvC3u`8^vN6qDB?t>ZTe221cx-*48t`nKm)b+3Wt+Ze`eaq&H5sF*^58X7;x;QpKS% m%pZ0MFkWm6_+H${Ncz|C=;8olalZ8I!68P2%bmdOV~qdQGni%o diff --git a/fixture/10/6/13 b/fixture/10/6/13 deleted file mode 100644 index c6c8e07c7d2030d32ccec4a12db2e9508e66ce5c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmXAndrXZ17{}QfbF;( zRi-Ke1n*}j;$}}Hs0H+G9n&BbYj`f`(;~PPH-3CQpYYcOrPca`?4gqA+lGVgSXeXgudd+!PFF}msGeI>x=AFeL zAz$Z*0Kv8DP?*XnVZ>9uc3_NM*8gepFl0#l0>X!8QPWpc^##U>fIfo4Hb-o;!qjt8MOk8k~P+CiS-PR juH2g;FBlUty}-RqjNB#XY}U6jCgWiIIDGuo!cNA2V{DcH diff --git a/fixture/10/6/14 b/fixture/10/6/14 deleted file mode 100644 index d6cbcf6883b39d8d063a5a633c33281b79d26b66..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fdrXZ17{)8>oYYiQHW^)xmZFQZD8#cwQbrPC7V9Ko8IeS#DC;VhnjKj#(QRU) zk}fNhq$o5w?<*oSDi+fX`M&Sn@BRO`=XrFrm3o#Fw6w%`7XE@>kAMaq#T!F^_G-}U z`($2MzXqB8i~lwaYOo;lX49P^4cE z-|ANx6D&3dWw@9T8b{kUS(p>TFM7_>w;-4haoLq1TBN!Y{>nq|MtTsg6?Gol?nUsGjxg9)Gj)Limmn|NqHfJ2l=!rU+$$sm^ob^f zv5Zmy!c`}m$A>EjtJ=?>RaFyyMCb>tsVAH_jkR0YNRZ-qO6Um;t}|>Sq>a21ASiQI z2KKxmym7y8{k)41-!*rj@*}}E{!-<&Ucxi0DM^|Agvf`9en$og({~IB5FW^Oe_8v7 zu&aeH(*zh|8dWnHBPQd^@)eBBW%~`! z%w~udM;#ag!yB)MDjC@qp6pxa%5ayC@$v0Vxvd8y<(SpW$zBX;9zKlMk_aO%Bzpdf z0EUgMPpf7#qop87fN?+ygkk>8QxzM=s7o(*3=3!MbQGIm7)r%3B#juxR+9t)#)Pt; zw*QhC(o9ns>xMgPUuH6f)V`^;S&ZDpwHu1mj1|&Q8GUhi0*rH(Q|}3xV9|r*69FZR pZq?WtlroZa_nO;RFdW4=su>;L>u-ii+~B#c#E`Gqt`$Ky-)ko7-Ii zW<+P+KHnpt&Okjbkp=9kh+VnwgMdjJ6(R!4e+9gD9TE^AYb&()EVe2&haFsMQpHO+bcN|SJ@K_{<4hp;*%VNbCh;j_lD-33F!+3Uf7kIf`FdUm&l zn-Ja%cx3yV5l*Gm>{zvsU?m;lW&4$}>9&N!`wg1Z90+p_k}`%|2u~#uLX=b7S%eUxJRX)HiZ7L7s5KbaNnKlj*E4t3wEX>(5jz*+Y;tA_(g= zVnhf(i$2cy8b`?W@c-BoPgph3QreVAP^2!8FHIt3SeCk9OeNU2D@_O#m8AqHr)b>@ zkP$4(4;^s3OnBit20^`D{99fXZVp|)GQ2!GsrL)ix8A)l-FM=TVEz>LP8Cr_A_m^E5cT-$N z7=BV9jPYml-L`u%3LxK&7{gLA3`rx9v0@}xgz+i6Tl?*9#u;fK4BOtC z{K{xXPrOxlejFpwINu>Ho?-rfsAvyL5@E!ujmf-I8AjK_cG+hz#BEs5&0_5B_xY_W pV@Pr2F{&3YJNK@D5%}ze2tzY+*5c}7MzLP{$W=LGgIu`~#y_6Vmn{GQ diff --git a/fixture/10/6/16 b/fixture/10/6/16 deleted file mode 100644 index b8b149684a6d04464fcbcbf0e599263323995f39..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fX;6+~6oy;ulqEIaq-c>ZWT%C4(=yRiDn`~qB*{LaC>gRQQke!>l0*%~5K_uy zX%R{a<2@%TBuk4%H062UGuQL`xz4$7EzQY&OtdvLh;e7Zc0l~#6 zHF%9Rp()S6+G_?O-v6-Xj9G-~nw@19E`-{YjY)&t2#SU$VM4=v0pZQTCx2fqBJ3V* z_at{Y!MrphE^QT|BxLyVxV3~Wx^b4fLkOxY*@W=WJ5)djZ2fY?eiuO}ap^*J6ycV2 zx#^H-!jhU<-#ZTzlyk%q(ua-OUlC83mzN|UwEOiRmYz&V)`)C4nL=<*`IQ}Uh0y2} z5D}C~h^w!fGe4VPdvMOsnRf_enJa?>CF3&m3JBvuE`)X!5nk))IeslBD6y0iOhqdB zm4s(4!Kqhk2%8c<2ng~u_IxDdRX^$#{F$&iY+A*FCW35%X2Q(}hU><(6Bhd&o2vhh z(5(!i((B%u?OKd07q^x+_F#m!j1^HcJe=xJRrO_dZxlrtGn8*fFixyD6EFfEy#LK388*s68U6e`MMW6W z_1@~)R*dZKf|xtw8T-9t6UL&8Qv{5$Epp2b^vwUowck0^ATEyZj~ zo;H%DsHEwTV|ahb9H|h}3cug)v)AYU*WT~9k)hjQJC%Wf{I1d$`x_u;IT4qmzx;6p z5!fAVw#lDx@^s$Q6hQQ)wYT^L5)C4I!QHh)o+e0w*q1Qev@n?PX{p^gVIwhia>7$x zDAA=|GSzYNl#wyAgs(^%~}&hybDRc@MsTFric)y z228_CVu?-um331O5HoWQNf3j&HRnejA=*5?_9q`BiqfuZ`FET+CPE+XOCVNjY+ODk z5{gDL@uxXOf_Uuc9qN0A$kqO>sz{Z*Ys#j%>xgC6dsDBb6Jxeb`(>L!bn0ajqS7o& zf=CZp=M60~{+UNC9C-MB`(?t?|3uK+D})k9A#tm#$G)zJNcOlcL2OKm=qkNI zxC*Sniu7{rNpy%o!#i$l6UjU#FTR4sG~`T!5w0iwnBn%tT|BjrIP4XyN%ym zMbt-rIni28D78EyVx(3o>xiWx>XO+FgiUdy1Rn@OkS&hC zAQpJ+9{u|jVUafF@Xj}qH*?qyZ!O=xW?QE-h@rM4x8yG%8Y12l z&bBA+m&IA#SVEk%bdV69K|R{fITEuA?`|C6O!OTR^f=OmsP>*bzQdKs%B_p_bt4Yz zUuvmcL#$Jen3Qhx@Wlo~x2t6IVh^I+DeBmrEktS(eoom=?El`rI@ya@fy}(&KE$Z_ zT@s?T+VV)xJ;YO+kBy;zL}J9*l|TFmzcQEGoeX55o&W$LO4gPtxmZ_Sd zBM0@qN}Mm47aW#O_?om;{=7yk^v;kFCb>STm6^mBjkdpQHgV6rGT=!L5ubME-P}9U zTegy!N6d86B#(VS7$iNC5K0<``NU1TX>Se}5{Kd%=lm@q6bt`i!pdfI@A~INclfZ~ z8($Ko>Jcg05vU&J=M)~$A}#EiJ>MOkfxzB&-%gSEx! z3tdD;c*P&1ZX&Yult;9#z#6S$A}}$qmqg%S{)6~08i7*dkl&m72qb&iZYb|3urH_K zk;6a%H4Z}oGxtp~(~Je0uMUw26n4B^Yc^aU!SU$*(<20SCn^gOnBUkHeb7Q+u-&bu p7OjA?(w$=j?pBSH2*g^`+-;(OvW{ny1g4j+`UYzOeWey#f&U?HpBMlD diff --git a/fixture/10/6/2 b/fixture/10/6/2 deleted file mode 100644 index b318e45507d18194d7e8148d7b613974fa8d682f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fX)u)m7=@80+1isz)=7yhSxRw8O_Z8x87-H@m}u;k6iT*`G9)2OswoX|zdcC^ zWy?-AS`=NSjT#yE)~N5h?>z7SXU;iFidHImGZYkrx8Ps4=lG?_M-{L35~EAkLGAMuj{O8r#!^$<)LTzwnL!@$DNzCI;^LE9JQ@pU;2{KGqS21^+zP37&lSi_)5h@*$WhMDFi z?tMhOW(#X=| zaQXL5($Sj3s+gd`(QO-P`PDm@1P@}mO?Kq{!`Y6KnaQl|$(ZtYKSx@HgC)!mT z?aK}j>NsMEIL*%H7A^vMfZ&mXWRWmUn@>J2;7NV<483^&f!eHN#4@7Fvz4zTd6Cve$ z7lm~YbBq)d!qoe_iT8KnQ~t%Mx=x}@$M5aJUqph3)qNvCo`nfI) zAFd-HPUtD1u(fnQ$(u@*R;zqL=?NP&Mi6P{xL diff --git a/fixture/10/6/21 b/fixture/10/6/21 deleted file mode 100644 index 5e2bdd52dae42001738378a83db104349dfd8175..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc}&l79L78Ra#g5Ss_Aenw9;}Ho+6Z6_!+`dlFHGgHJyyoF(l&`p&2D&bYK$Y z7Ai*vMf7|pB}a}5mCyJ0-s}DUYtQp&X*%|^($>(B-$8tsj;(;0yOMP~?FjRnOne_l zeCbr4@pB}~JO&S}pFkYWc(i@#WWv97SM~j=#5DC0gHnHGU7kTSHC*g#<4P3S`)@fu zi%5#KelcVYv9|K{lmvIe!8-lKzj;KTkc9%Grc5W`<6`2x;q3}9Pa?)Qa^mylglD1S zG2wzyI-fOet|Gc*4KLzJ_BsJ^veU$Im^TsXQFSEEhnSO*)ZN#QFl(LV6YWp5Id?y7 z4$&3BONRKoD z(W#oBRFY1VxAgfo{s?ixxn#vT6%mpWal_&`F}p!dgfO*l+1o#h_#AUuK*(wQJVP9^ zGQPGsM|cNU411SL$QH}<2tC7@pB`Nz-uwJrgeydW`iMluReS^H3`2%M77)#5KGj75F=8ixW9T#bku*(oA;D>Y$rD(yw$}J zYK@nKn^lc~7#e&a_i!E2S}G5OxMiTdBknDc=2QIUS0mw95bhe-L`+r4Cc<0UtgCD$ z8qa2hl(!K19jj|y+K2>qqZvhCiPfs7r)}E_bsRqlO=p*F#vQ^tx$9+e7m-tc#YyuY zvC}T-)Q%ovag4kW3BxPi26}5t_-%Q#s#+mIUg?5<5;98%2qbJbxT`f;PlB6R$Av)> nObRAFvo?^RuEj`#+={M=go9@{^v0V?*w|q%kRYcKWFg@{QfZvA diff --git a/fixture/10/6/22 b/fixture/10/6/22 deleted file mode 100644 index b5891413358b6621d35b2f3d54573f5ff86b8c63..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmXAnc}&l79LD2UD`iw}nzE1-9U|p;kd8?*QgSVoSVO`nKjlc#j}B%kDOSocEtM-r zlE_h!%utS%=j)fFLUM-B_v^ja`~TOT=jq)6X~YrTqH-TMB4R=K$eZqjTfXJKiC#qiZ;hV{ zW)X^p4{Ld4ew z35bQ}gWtP_5Vq^;=a#M@{+92%Fm@GDZ{lHgJeqgp$S~VwZ{jsYMw?KyZ1l zhna+I;hs(WQH^_k`55uY?*r^}i9F?q=+0$ELrxO2JqJdno)+Hebx-sPh%Z`Mgt+Z8 zGpD_XIIQW?4?9P!X}BODrYnIEePgU$ZeJm^HBH%*%7`mNI6!5OID5)l!}C&I@x~@|YOiX)a8NE{|sdqAu+~$khhoxYqlw z!%HH{MbGZUYr&OM6TBow#jSy@`Z}&nNEo>MP+Nk657h diff --git a/fixture/10/6/23 b/fixture/10/6/23 deleted file mode 100644 index 3c9e20848fb60b166ab5cafb911dd95f8d7d2ba6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc}&l79LDRHBU&p7>2Q>!a&_n!dXNq|S{GVFg{f3XiXzmkBG>3Jw2&c`oRy;_ zGaZ`gc2v~!%@t9t9OLu-z4v&ad z$)Gs5Yw2?<;&9)&LMK}y&^2J(Z9Bp?Q+JQKBQdNy=HmPNI~B8sFV zTl~U^Gza(k%5Y*`s_NVYTL_z$lAJr+h+&q|0-`y7MtDvPalM)^$Lu2Z8Rgg?iYI&{ z{m-f-5N4%vBE)a4>X6Tg#EZZK0^u#vXh(o|5D%GC_Ltyb|MDBGjUPV%AGyUVOm; z%{t*N$I(E1Ff{fWXd>=LycQ4#ADuM(@RnGkwIXqK8(|eFFN7FUShn1`lW08!-r7p=@WIgl76hxp)I78*U6 zD9k%h_sf8Y*RXfoKAc$V(RKdQNW$vuHJvp^gjUz4kSE5(OXqO{;&RHXd3Pod`Nh{x>BrKbj3kXeH-4w4?L~ZQD-_@&$9CehRs}~VzX7})}H?b_d zW9ke);azzp`NDeQr;)#as0bS4og6@9s9u)z4J1PJl1(Fm2p8YQiS5C}$ULbL;jK`4 zzS%|GSMC-NQX7x<5Su!i6XxwD?3}aP@*@e!!el@3zH#WiQwNEgHjl>-i6LU;2yeCH z*PeJ{y4i1!orejH@PgY+Bx+<4;*!yn=x4`>h@koww-dxtRjPoPC>KKf@X;~4bebs7 zE!}T?hB&Q|mLYb?!w|B@MPfu}j)3^=v>|fyRifzl=#Ou&3-897h0F5^f19N1Wd+2X z*o8wKZV?03vI*)5uuzu_6>+Fy+7*LhLi~n)divdHWvJbjML8uo{<$aPIi_$$2SG~*vP#+R4? diff --git a/fixture/10/6/25 b/fixture/10/6/25 deleted file mode 100644 index c765f12638e38e423186233427677eb04a7e5505..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc}&l79LBpGsWeCFP;%s#lA;UE9{pk_=^#nzjwZA)Nk&Ah5t67J9g?AgnWJl= zq~u7E zn|}mp851AsOKP^85EVwNEPsq44z3<2Ai^u2eSbZHaMDQ+@Rbomf;?_COd?(s50be| zCNB10-*;sSvCB&`A^dXf1cX&@SEarq(JwC;zi&Fxlop(<>P(zg7<9+ZA~riT`E-GB z-@UgiR8E*ZnIj;+%KDPr=MlH#u6Ev-Ph{1{csscZZzIdAg&u^<>bIlEc@rZmjwc>n zOidQ~@#2BMa*YK@G zZ&d!-)&!zfDMDmwkJ?zhi&z`j*fxC+F}pZfK#WiZLcI6n zw&{0j$74cSORMk}tu(d~OQI%L&wW8yl)n-XQX01H#DnFFzMWPOr;4=vjo%ZJ#la7R io98z3fn9`2j?>;PpNTKZBWhgE`L6s*96cm0mG}=$2cy*h diff --git a/fixture/10/6/26 b/fixture/10/6/26 deleted file mode 100644 index 3e81050204db241288f070a6da9a615a45d184f3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmXAnc}&l79L7_T>C7@IQs#A@5GhXZ~?^0v< zolderz(w|Mo{UNwr$GzG+g9XH3H)+YQax5W?BArywzpZ4k!{X$w-cIy$Zi!%zg z3?_2*JdIk160zR;0>Uk~=Ht_mg!$h+%bg8~KT71?8bdrv?;LGAjyT(XHR^&fvE5NJ zA(kAOEFi`U0|et%(q`(uHqq2>Tt8*{%v=SmjIs z(PGdRZas>oY--+m(n;e#~fOyrjNI>K$H4wIh z*p*&%PQ8p+-M)2zuTpqBP7QtTLUgI5M2Lq?XYB5{6PXGR0U@Q4- zjp;(ar3Zhf096i&$bYMLpS;W2;-VU2b{c0cvt1Kcn dz9ZhtM-DQ$gmbvAl@)?*IPTCgnF&3$C)+> zd8&4SgdOWHm>&Eg!MAj6gh7V{bua5TQJoTg`M;de(k0iU=yQ+5v&5Pj;Z zPj{$@M)ltAn}!gD5$+q_TN26I&L_*Pi503s=j!2v{kn}Av($t^=_mp5+_*8+ehiW4 zpI&v=mWVCzn=sg(SlDHfe#n8aSy1uUa02l&GcGuK67f(sSwLjs+xXxqL|96G+N)`V zN7I_$J|I+1RzcSbwAs#rsZHd`JWF#D0)D})`sg;`{W+}x8Z&@RTU`4D9Zdf?`PTfhQ zhPD=6jwgaka)*veBwUPF$Dd3hO#Fv7_uEIj%a=_E&F_N(VvqOiL18Jx@=U$BPe%wl zUFrK3X@nlO&V71}P~td2oNxR%pe%!kaXc*$-U-VZbFzrxwPs$H=ZNn%@SxeXdZUWZ>1QMSBc>-}OvspT3Wx1uQ{CVLxY zkG@6p%Df{WOyA}T_xYAXjmT)?~Ty$LVXx#72s2=he&aSo@%^0Xm6%!w(D zo%?oJ5+<(s-@aQDpX1m2tQkh!sIn0d@#<;|pAkf0RMMWOqX@^c1r1OWDr-aUJbU7O z*rNhFN8)1f&OxW0iCv}>1%#Z&{>j3-;KCdY5M8=KSHs;1#bO$9HnU>K(iucV`=Q!b zvj|V+h{4I-F5Q_;v^3o8>ok{m;1aRzytnXHM2NLjUuGWnC7@2r?zxySj`9}}bxI(_ zb*mSZ!OMuau)WhiEhm;0%gqoD|BHd55lpeO^CFHNCC0QWxQTs6yXuyyzb2|Vo+69xz`?|dCZE5_x2J> z95KX+(%0RyFkw`zSh|M3pIwYA`s#{-D zokEQBOBE2hnTr!k(g=ASQ`3oTkCGLI8H7^Hc|vX_Et{}$X*J<#{|K f`Y0kqjc?!`!7nalY@?^6qr9_xmyG$#-`5EkQs}nkMrTCqIuhU0~fWxli#iyDC{?x3nwh{u5Y|Y~jwg_ZLJZRSc zCJnbX{Ub zN6Ns9dW4ymw`!yz@h;u?eY!DGByM|}^&%48w&%y25P|AR{r~nMCN#Ee+hR@_IOV?n z(2uB%UE{vek~m*8NJ7NgKJDv1gz$+-in%e2urHt2;A~5DSai=mZAVlEU(*b>Cr%WE zn;sfPL>Z2i5K0=`#}ihXWBBewd};T~32`A5i~31KN_yp%B~yq{vHSU*X@r~hh(79{ z*_UQY?}iIT4zq|t$Iy+LbBF_45n@%%%c=W42xr^mtbbmFe#Alv@j@F2an7Q&I>3ie z1#g}FWU2HnP?{lR7P1&Zs}Vpv%ng(f$J;GkhOQ+dJs+nY4kFys6FZGUq_>z8v@?`w zaqYa9I+X2Elfj% zgjWxJb(T;pmgW&A7PCKB<`Zv%{w~60;;QzD1cOahV~U8Ce#%k_M`a#2i5~5W2=UCr n_fmH$aVD*|Wpp{Qqq#ytEY=1>*d1@AqB9gRVv`%|&BQjD673rMw&GY^L%=^CjL!1n44fOP+cNE`M@YkLmMd8IVTM-Hg z58H=!Ia3(zSK)NllY;i;-p#6D3Lcxhk9?1#@Zqx=$xEk@68pZ*^8^KppIQ+L<;jU2 z`8)+5J?!*qqR_s7g$$#oPtNyIu*=Es_Z~%qOo(#mv7xcyg0l#XpVR(jtn{RD z^m<)+Zx9WK`Pm;Y#?h$dHjeR0r?JI-y4&wu8UuAdq6@S%PWkd8H0C`&Q2VEmMni~S z_hlW8n3jo?*7VUB9^LBOuViqs>qcs!4Fk8`DaC%y3|MIQGZ~=`VqmO%GjClS zgW{|!VS^e5p5u2MyPC`3Q@)f4gFRD*cj)m9EUz_+Fp$y+&@oueL?r3=G3bEfr4l6z z$zp>Ii^;3*{WEZ8Av7PIRjOhUBp-`kAM6hfi(`?s?OR2ehQ+K-rX@I+MV&0dBK)rp zZW0!Q8cX%YMiyrbbs{Vl$bqnE8kgJ?s^p-~TWYVc;b1sfYKB9BTnqev{dg-~#KEiS`teDgrrfIN>t0hzLhfN6J34R)mosP=g4XqO`( J^iiRRfd86Bif{k` diff --git a/fixture/10/6/30 b/fixture/10/6/30 deleted file mode 100644 index b12fb08c21c491689156da2cf1a6b2f70117bde3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmXAnc`Vm)9L5PHI*_6ZrCNouE?4uALPn~QXlo(Ol;~12Rx8z99V<~$QKDlK(_K{4 zO+}|)SSR{C(nYnR&-dGVulN72JADj+U14_VVx9of=S4BaoA{G=Az6fmJob z8&B2=%(Qwg5ino1>*T(d0-q{p8yRW@uAAt@ZE6tM6Mm)sN0UHA**f>-Zv~w6CZ1?| zFVJT}vqYe&;OOf6tpcT;Aurr0u-)6#wfwU{NcO{go38>BTDSE+`dvWV#Vuw~r$Alm zFNr{*=A5(kAAtnB6-VN_1pJdm_Y_*h$eM=bp*lo|W!B?odc@t>c@Dn%#Gy)qoJs>C z*5plV=F1r$Le=miu4eg5h+QrE*8S!aOPz1**b+cEq$afeTtFCTrUa}ECK_zpF1}qv zlq4!9L~`{K39-;}$S03*!Zx;c?!^eAyCSW8LZtLI_8fLBicsT-CgMvke9VX;{B&1K z2&+Jw()hK+_v7!(KgSWbJCubG2fc#cy-XlhW*PbTCrR&?yT!LQ5d)pKNQmZ?=?TTj zgtCrNJBXdO#hwRJ2(^}ULTN=N0t3tX^}lx$uVeN~h_f<{_nE|2<6cD}*+fuSMZZS} z3B_VoF43hk`PHQ(#FGF4`+VZKdeXaNvEk4{!q-b#DxuEf4Dn4BA*!5a=Czj)2U7m_ nT2@9x*OyBOcQp{gG;y3$)n(%2!#BAu*NDp&N;5=;S`6_I&k3AW diff --git a/fixture/10/6/31 b/fixture/10/6/31 deleted file mode 100644 index eb38719da4d15adf237d64caa51a162b1dc955ce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc}&l79LB5PQqB%xsAMFHQ97BP*hUautrb$NSNR5l6} zX?!2rHw$cb)1O|_BH){KbD#MK0n5g?-keVYop$!21{#68iC-iF`8CJwI==}-S_bE= zX&3N_9{IDWLtuDy?K1x^fmY+pTlaeeu7%B;Jm(+1ZJ zZ!ob2TXL<4J5_ty3?~o=jeY!*t%>!D2!XQ7 zG3#szQ&ntg^Hkzwp#(zg>^@fN?ns1sEgf~qnV6YpmNdnUFwoSzKRTOGG~9`E znH~}%t>O3ZfAgfb-NpDAPhxUnV#UNoXycvx)Yu36D?b zNN>+KGsf>D4k$;gYxV0NoKMVg)h?A#=5d7hsE80(Y@Kr76%x|nS5Pq#qArmT_DUdx le$*J-D`$vTH=kusJx`o5+9jJIQj}r{MdLCtNv}de{0CcYo}K^z diff --git a/fixture/10/6/32 b/fixture/10/6/32 deleted file mode 100644 index a06a738ae9b9141a74b76d689e48de1a41e6717c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmXAnc`Vm)9L9f+j&-0*^rJ&bXoQVKds@iWO3|IfGLj*xbvQc2qMD{+x*J_OS?8*s zWGF|Wi&RS#C4HWXiLUkeetYlr{{OY-dGz(1`;RryXq2BK|7XCuy_@a`Jj%2G-k}pX z+4X+Os$zi+KF4+SB?7+LOUBPF6VSGqq?bGp5N?$UC`sG7@KUuv)=SUZRy6`^?7GL~ z)CqVdTuB}LOu)E0;&bu~fffrZ|DFbc8_};M0=tV2TC}_o2r~9dUfLw!6w>eGqh(jj7h5izl_41EIV3gg>i+&p7OML9mQhnlnmF;HtK7?X%xgQY`ebj5h z0K%haQU0MpgpsjwM04=N4Jkv2>jgU+dxsI}dLA>@ni4Dii(pFLrGd+}gh5w$T#Y4B z?>$yRoL2)Ol3MrYI$9I{Zgcvdwjm~@^@$l}NAxrlJ>55%P&FKh6Y)+GVqI0c;a3;J z$Kq6Em@8o(9aP&mmFUo!y3Y3?ij8WIluaiN1S=-Q>VlaPVw&FXYHJ_q9dO0v8i6c9s9ET}tFPYzvtcNF>_2 zm){8@=Er}xcM2iKS6#?Fvx@kk4U-U+QKN%0R};!Q`bH9QMwycmqolV|OAMj35*|l< z*65bKUQgWf-zXuJG#+dw!aG0i_Dmq$y>q`7CJ~B-&32;c!|=ODlZmUYkH-y7C3dMt zEN__c<##&aV5=;ZQ0I|BJXb}CbJ~e1PYy`$sJEYdvx#}S90_5i210bN9H>2ajHtL$ my4~_5aZIySHbca!#Sp5-Sz=^ozJzG^3QdZ*NT@TtO#B09v!Cez diff --git a/fixture/10/6/33 b/fixture/10/6/33 deleted file mode 100644 index 71a5c79575c852f4e06bac1c7d5b66af35431b41..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 310 zcmZQ#G~iHRW?%qeGe!mm5e5bZIUxQ2Kl8+=S3QC^zPC>`>tAjloy&r>pvFFA2_KJZ&ZtT1K-u~;tjdmv2 z-rJw}l(l5#jraC*oK?8wZ@;(CpK=$7-rGAropzP;!Fzi#vjFS*NAK-lwQ(-|^5nh! z-s^|hvYx-UpP*5bbN}Ug`;^VZMr?Kka_ u%GCPy-X7$N`9FZ}j_WlM{r%ovXT963uD?KcgTV(V4die#{QnUE76bqkS(3;A diff --git a/fixture/10/6/4 b/fixture/10/6/4 deleted file mode 100644 index 978e881facf1ab526ee20ea6bfcfc03cbf742c62..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fdo0y)7{(PFC0c6^QgV$_4Mi)}lVVXwZpmeujD$iVr;Bn~wMAr#Vh5!dx-BK- zdpQwBq}aNnl8RDEb&~r1&b!a^|7XwhPM&0|VWg^}BF0*X!YlqnQhg)xp02Dj>1c$_ zHub^w{zkM7Jkm)XYs8M2!gXd-2~={$#z{nL!h{~@75>g zN};HRtnJ~_A!O;&Utd@mlWr# zH3JOZQ+yo}k7D&wu@MTPRKr3whG%ufHF4SumP!$Z1jnd1vrQP}>7LSWRtz&*>;xD> zG!4p&z)tXf;jen{^eH|&Y_$omP1^{Q_2xm zD}D~g^*@;c9AX=f&Tx37`uAQe;W*c{D(OHahiKtd!?Dawqs)SEWSks!_}jwKr92KJ zNo{2L3yxh|i&PE`a46qngky?Qgky`Td((uf9A)9QK~J>g_+FtaAjcYIA#!9{_Z#_I G$?+fl=8|9l diff --git a/fixture/10/6/5 b/fixture/10/6/5 deleted file mode 100644 index 9de34ad2a1bf0a4cd87d2f55f90ff927aaae8db5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc`%f59LAUJ(4>@z4pGgNS&DH)#M3cpJExXRhLXyeqGK{hM%#r}chWUXNufiI zVvzifH60^L$=S%QQcX3s`+eW3;L)V&^l zTSL%gQhxbhI)UScECGU~OGlo}$s?$3+a4q@Ca{>XZm6-6AR>6)i90m}4{uJHb*P?T z#0Oc0UkibcUAMPgH$m3!r~Sh|6ATvLOKQ>)c<9cV-;-0ME{MN-cnn2j;$|;@Gm2Sc zX{-HoioGLj1SqPPj~m_ML}8@)o>kyUvGGNd>ro{|-h`^!02PJKH#@{3fWqry%Ab)T z6lYtb1SmSJ*4q@uQrK^Et3IZtNXW7D+oYioEgaJ+Ce7_XGAfHAbXToKdoD$hbQJnS z87osNDb~6tb_CW?Ts^QY*14Wy;ISw|F@LyS&buy(qe>Icl0k}wRNbpo9mNbO5Qb=D zs_!y}p>qAL3_~*p!;Cch4l4#fsTc-H!-?Vh7Eb|&B{yascU3Z+>@%1-R>ja}`>FK3 zKZ9-b%eAG!3~_}Id(xs9v_Gy0CJd%-Y5|6=31Rt08istrs26h{$5@AHnU?v)JA<7)&M)IPVLb=NV}rJvqf-ppXxEG~p0a@xjIp@R%%L5r8t42u#(Pb@9udqDn5^f%Fp5L8FpcH-W%Mj;;2?+6yWn5dagOuSadf_opA(tM z;b0*yl|!0G5yx{$gkwt2xa4XrN4VmL<%K$qk{!(g9K)nQI8@)VJIx0$(NRdO5tSyCg z<9DHARLDxGmYPetP)YCme)swQ|Jn0Ani>wH&9r1P@$H3os`OP~&?7jZ;k0D05y9&f zWj$44rRtqHTM2_EX#U8-^>(D%OUaLJ7zKy{IYttC)59p361K=5m7kN|
3H1^|L=j z8EvT?^T|IyJi;cgL-D_$kbykSP`^Kh>!Ue_*!Dl1NS)t;p~XLRc!qQquIj&byyo@Y zpZ7nvpo$ejyWYPI3n~(t+y1`^GT}RhW!FDr#Zk6Q#GpT?l?r?a!}!0nW?k7rU*kWjIB>2f z_~1Wd`OQvFb@#s=Xu~y{I{rTfgSSpE3FtoqxL_XpiGF$_T#_U95^a#s_H*wCkYXpO3uIM`VJGZ#s z5dW5(&E~(}Xo9d0INU$IRM{Dncm%-I9WBo46XCz7nL1p|F4jK)KE>6o^x?lV8{3Pd z#Pq+;4T}3*0QA2IboGum%lW?q?bCGALFB(b4=`L7=kst>VLU zNW?!E?FU=1)9*iHC=k*2hweZ8;LX%Q#?e3fz70oc(bzv5T{9QIX7fJ2T@QrPojXbM1Lq-q^Pr2s%S^D_xmQ-;4;>a<+Lk^DbUf5Q=B9qT{CcTsWcl+8cK zp=qYSjQYPt$g;@H#`eEobNqRV=5ar$9qQE$wBNto81*3l)6736H(n7s_4PkhS3Yw&cI6D$;zP!sEZjmm3^ORPw)Gye!KN z%j`dRU`k2%$KSs*-X5qu$nifiN~7Z9@8CZ_!TCDQQeFz&z3 znoVR9`RKoQkb7pz)#g9W$APxIkO9CEY+h`4?2SJx18yE=<>Ei!PJk+4`Sd@~86KNw zKlne8e&t0tt-!xL?~x3)?@fMVfOqA?QCL_PFf;Bj-QnR$n%}Ve`KV>WW8; z|L;H6d%DYT@bACKn9r1m44c0&?_KTC?jpQT4wc@lO)#Pp`i$%o%}q|L4D;z;}`;PW8VJCgvGF z*!@3m1wL4H5A;9nM*y-_BjdlB>2b4`_FUcJBHG6%5kI@Ledu+V5Hx8*->nWS{*%F#be8p=>|P4T~|#7D`@ zywpEZeqU0}ROCMrMM@cOSlhqY8U%3hiv7PFjiY7q@$x_R)V4*)o3=mLay1mjE(XBt zlyBMKj{rcmMuChAq~5>j<+zp}g9AW_fzIB%CfGj`PybhF@yWj+8&50 zMftzVJ;hY$`T9TQy@+SsYuLZEItoFrLE1l!;z(39cKbhw$~uOY8{0oE`~^U_*a1MK z&`h-5ZoxnIx=O40sOG;rrXv6D(*nSI({WuZJkvks#@}l*Utg9qhl^c;_egf7L$r`k@UX?TNi1B`rSX)oKF)bGV8zIV(COYNWDLjh8|gm=GQ;jzAKJQ`|dx~ z!snLk^8i4u1?2$WT+TnkHStg%IorRz;_(*ri26UoK$+qE#PB~W(Ze>Ajm1Bmv(QO2 zMb1Cvv_m|=#KfoAp6v+F%-f#And>JSYSHs zY~nwXCKiXODdxYPB?b^;zVScNe!oeaqUyhhEP%ixY}P;cc=k~o`Pjd62{YfVXLmnK zcvO_PYUaNJUqB2PmoOJ)huBg9j#<>m$S&~1ypIpyLQ}Mr)(I1axdiOtAjk_+LvIjto zn2aiMB-OumqTr?wY^Xm`@gUuyJ@7v*h1#phCh$M{s&-S{(#Aja8aB_yJnz41OJ}$( z7yUnkQmN{z%;i6$N+eVAI?O*Z)bzhZJB@xGU-1SP!OV*$mhQ}KBcHb z+=g!;tLi@rnY)Gf0L?#YJ!l$1F7Cey3eRP3mEyn8GQKKe@#`f8o@vPantA?T=Kt(;F7`9X4=1LA{InN397ciQ>P%CLNC+Kj^=% zxUR$ux%$6X+jZf4x7xq0CCr{Zw(mdMGIm!81LMDN&asPc5ZJ#B!asYO?e#w>0UHI@ z@#nv-b6-e7&f`CF-{&i(QTxB5Fdnd?>h8a~3Vn*TQOm!}?}`JCSo1%wnqwb_Gwr_w zqAGs=mF>SpMa&>MZqYw<7gvsHwA(-9yq7M|k?_C8U!JgDv*5p%ZP#}8V(~vKJaRQr z;qt%GQlmh@+SNZk@h)VdkKn&Y)mxjpE$lyGMErTX>eJFahcRQKt#74^UV)r`C(Rw z8>7EytpdP-ssslR#pb`?Pp=CDaosDQ3>-1msA=|%h^kg#! zLixY9%9Z>E*zP}c%ldP?3irRyInHUS+u`gYt4DM;PAhU=}pC`X7E3)+)w>m z^w&S2k+@7%%;Y~HSpD-TwBf%e{6G7B)#pFP!Fs+X-|@d4eZH6FvLcwRcYywdOwxYtwrhP2Rt&D#D6kO7Xugsfn}aT=T!+q7KgsQRYA2BK$){ eS_Ht++9nn^E%3h5i+1@{sP$WGxZRJ08?}orXAMC%Z5i$Q`Dzv}ayl`Z2o`64-WmKJ*1hhYa ze4^1YV(7oYhJeX=HTpjm9H}%VvE@HZETtQswX#1(QD?wGn&7|I$i=>wtm?lBHEa7E zciBIxVY~iEQr^EmJe0K{9>G8FQac=!WB$MHD0?ds^x(faYJoN{?YqC@WkOI<`0hX4 zd_TRdgYCazqcRiu#rMC|S=wAwcHBQN-R7eJX0X3Eq9~bYjr+eN#*9@e!TP@t%YGL% zv*f=q>=pxfaJ9b|Ya|PUI`%(A1jNBuO!2=kWPXRKJ>fshaS#@zU;n?Mg8H%h8P`9t zuPnCs8u&lK+1pWJBf>xR^)|JdVdcLm*Ui*-xbr{#isy9{RMtN?uwwH3sKY--@Y_|Q zNbtX7k1CS^NY_7#_&(Y(>E*wH7zN0x?$bXIcREBL+NeLrY+IL#Sn)rvirV+1ZsNa^ zgs&;f9mBtciHBkRgo^2EPi_M`)R zc;!D#?^3SiU;RJ%$?*>)+xovV*4})CJ@mhK1BPkj8^XVD8S+W!YNi{rn$5cX)zCh9-4GkIt^)3-mMn?3w9p5;IGC!riK zLG!HnL@o*;0N8!ITk@bm8v)eyu zmVi=VH{(Ccx8|Y^fdxREnxOt{KK{SKbyXtrlJ!5f zzbWp^gz3Ms+AqK4hWfulg;nYb2>ic0erK3o)cil~R~~q073@ET0EK*Jap}KNjy`O` e9_YXSVJvV87py-wBvgPPfa^aFT(#{5(bB&}Gp!*2 diff --git a/fixture/13/0/27 b/fixture/13/0/27 deleted file mode 100644 index 980146972021d017a2147d0439ae3a36e937fd6c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<4ZVTr^fC+WXeu4kT58}W5a7N3kYsWu( z=GLWCJJLT^FWWuFL;F7!8Ts%@ddELht6~tCVfMe?trdvs5757IOzKNM$MnAqjk$o` z?8d*d@}&XhiuOM_qLlDEWZgf_7dS_&5a+-C3{RnS^x{8kxR7g?_07z*TSKU7$!cr3Yw&*{CfdwRdd;33tUnmvh)zv>O6g%hkjrBhrc5@)P zDdaz;7Zvz3|Ls4UyECb!pz%K*pxOaSE8jnMK)jNaGVwpf)wAr2TIRn*qFAlAX#>C^ zK4}u%(&Rt5`cW5O{MSFWW6z;F0_?wpfF4Q#GxWbEGr5BFxxhcQ_h?QA@#a7IeX(*H zqu@Wnv#6kVQ1HJfYgKv3bLqbr3p9;vY~?@ygay)$o3cMK^N90f_V7RAs4~iYu++b! zAwlXds^>qsfGuSEOr*ah16vO;A?Lq}Jn4#pv+X}{&D>k`?5dD5&QPv zx5hu`o&2#q%f7#f&pjuhwC_LDKf;6EQsO@{I1=sn1l>QCAgR9vJ0ujPSmZxiG;^g! z)9^oY3dYr^i|jw(fB`)|m*PL+9y2RQ0RTYyUfk zLUD9$67xUuQXUnyR`ovy1NGfdj@3Uvwe~o2&@z~&g8#*<$`J;IQ74Ce*oYkeDJ?z&50H6 zFv>q=p3X~h9OXYZx$D0#>LT{(P0c@o zLx%mdA=^JWzJr&uvd%y5fyrYT4az^$XY3rNulc`l65Ul9vGhL$Zq#nf*5N;e(~T41 zZ0^5fAtB3zj{!Tu?AOF7`RU|cmp7*~L zb0&E>WCFm!`qI$Ba+nCa_(qG3Rn$L$Q8j4yOQgR6Rg*R@ z=k>pe@pr{AT+~0;7bNi#bnrj*l9=I+obkV=g+mRyQO&<5ER!ElIom($#&mk7GWb7T z4IR*biSoaXC{KCzW9dJQE4L5(<<`H@L8<3uj^)3+0)*aDVC}!7JPubh`sBY!wP7V( z@!vlW&o}d%C(XZ; eH0M8%xcSEK_58mfy**QRGuc1w&J8m{xcfgbm9%yM diff --git a/fixture/13/0/29 b/fixture/13/0/29 deleted file mode 100644 index f2ec86b99bcf1474afc6d72c0a307e1d40e28be0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<3m85BNe9=%OUi?2jNUD(g4dy>LRoUSXXz)LZNu4E+ z9QnV)xj^XU>g~T-mLnE2;PyWfo+3UlyY4@a+k7Fx^&-*_(2`L&z7P`MB$i~q_VCFv`l_EtAiRC}e0Uns9PUXLBLU1jC*5bbrVi(jpv*&}%GiSNHU|1=9^5b;0w2p^pC9MHe+ z-y=I?h0ebs*iSRg=;}Xwpk#9H-ul0oeIDHHY59 zxsR3sH_1Q3fWp~^*x$c5C&Hi5Mf5+iQWE{H?ZLmIqXSwJzwkeC|AT`!!}?mf1i!y! zg_|S8C)vL%ul;1V$jracCUD9Oeeyr>JJkG_LbSh?0oJ$QK;yq*R-oL8$l}nBKpF&MxZTBgQ`Eo5a8CHZ89_H~c@k;Q+L>lkC6jgz`qvJodjE2o!TZ_qo4n=s?QtI`Tgv zc7Tu6hTgxVSy`s5y5qmnx?9h6X7E4Jk*&8_smkzybNfHWLUop|;eOZoN`H4aYx!(BZTB$+*9-hIyim67j#*1F)F1-M+uI`~2D{l<>cxg8AH(?#I8m z5Je|YCYUr*vh}D40_ucT-v|5RpSal#_&IaqehYS z7`MNyAp&s$zWF~abJBhII@G^kqs#Z-8PLBBxCbkBvh2TU$pt@DiP*pNI>sWjy!*dU z?ego{2?4-LIvV*>{o_B#>uD!b?fbu*=aRQp``JG@vpV<@1mM4z!y*u$5bi%S_ltuN zll#A~Bl6?kB;!9-^u7b{IMqKAvGzcBGuJ=G^weN|`|&?i#(GXw{;@wqop6mTl>@-` zmU!GRzT7{qIUF8@b<{tf#<=a@PtU)Zcci?{@9Mu93NTzv8P~t7A_aXmRQErR>v)sm zZ|%QZcB-?v*!#bB2Im6-e)qo?@&j~E5~9D_Z^y&UyZS%o8GpuzqX9s(Lw!%O&g4Ji zP*UezX79i4x5{k50PMdsMRft(2JAl`g5L9BPW3CuOfs^32WwXR#M6v03J2#cGH^3=buw=|t&AY#AA(H1p^x%$7ur|Wj? z;@LlJ-G$>vd-*@8vJ1nX-sZo?llM4eg6h8_q{9g zNc6vmFcBroz5zft$wL!ert3e$iW z=X-KlMLY0aYy!>(jsS zNy?3hdRPw(8X>kfH-}ApGQL{BA?%O}2T>6oc5AZ*3PaFhc>+C-TaqeVr zSoS~6o^BG1`1?PohSEJ&r|v(7`MATf7vR5MPx}#$vBf`kKos6X4g$amL<@EfXXHQG ztjF3R8^b@N6YbXxhWS4Yi@RKz$>+Z*eU+pdf8ali%|?N;4e-C4kEw-I3R@}di-10N;Uhh8^N4;gl`^vvuCvp(c9|J(YjLZww?X$nm{uL&R z-n>6hUpup`=HNda(Fiaf$?89bMQ>vw_Om}VMM%d&G~ho0P1wsGX2(Ad&=w~q>gd0c zlC3{E#qhryU7n0QVCFwKR1ON1M)*HdScYX{82Y~}cR8;2^Y*{61gp@88~48pkJ0h_ z&FDX;Z(iuEdFwyM@W1pcKJULvv$arjr2#+=UDBpXqu9Uv1Q?lS`1U_-LQme1-|0VF zXPybesP4ZEHWf4s!P>u{2VY><>g&I~p{es!g7v?R4sHX-G|<0Tm0DVNE%U#e+W*w7 zWX8XK-?jc@?fyTeguV7eOXff5?_>`G)ZxFePn+XwaOA)J{xE^`tJ%M%4j=cekpMuB z2whv|Y4N`RYU0$<`&_@&zKoKA$lX7VkU&kOa`QiYmi%>kO4Yv~m)Jo)&hEccipmKt ehtfaiY#i3y2=zZkh(3qwcG15*)Q-J^-0Hvh2(Ee@ptDjQT%;@e%Xbj{ZNiSV_Y%K;=Iz?IF$md-uPV3*So~6#l=Ij474IBKAKC zEp?JUQ_nxYgRR>-EBZfPg({dnYxTe8{a?qop~SziXJn0#ckn+YV);pHTIavvEIUGv zJn%moc}S*uZoj|phu_xtyPQ8g-Bt`K@bbUk802@g7SKOiry^0+iUB~a9{Ffl)!08q zviT&=Rq4N1OKNB%RQ5lTxlKUJUg*F2KGKY%iTgh*H3Ew}X7s;DLaO!+bkRScE61kB zYxqBeeH(cDyz9R;S%|LsNwq(A#eTU#gz`U;+FX=Q3j)AzdU_lPsqH@^@bJ1>g0{cG z9KM7;68AqsX(W>t-v3*x^!j{xc+Zpyz^o&~@) z{oTKU1E3%+5CFhe6Oe@uPq@G6MP0jttIa==yGI!-yvV;_iT2uLThc$!RdGTnzqLPW zK^aA*9QMEXD{bQ|LDRoi%e0D%D7`8KjS~8B}nqe?CU?vbRq@b4(C7A zJU^~{HeJ8bc-YLGwBbJ`F`7!y_4vOWvcQLcg7H73fH;-9=-Izzg14RBg#SO*n5<+SR{sCkJ+zUDZE9$TjAC zP~bnggR<{sSLZ*+GEBOcz4JdFskpF!8S}piR%qh)n3zAxhXEc7HtWAh5n21(hS|TA zSf5u~Bu diff --git a/fixture/13/0/32 b/fixture/13/0/32 deleted file mode 100644 index 866cad9fb504568f722d0a8d64dc0aba38f02a27..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<2(l})(wn&3YsYR|WF@8mzd{Ggth^x{8fMtohEA?v@^jH$Sk?D9V#=zvvt zwCO(vcg_5RY1+S(WQ7#3{@1@Eea@wn3gy3aM3~rPtMtFKZ=72bP~|@jSlokDPU=7S z+Qh{;qV2!BjMaLvwe!E}?D+m0`0c;A?1BOQIJ`gme!xzY9QMBgO+b2r3-Uj`mJ!@q z7V$qWFGap22-mq6|O($sn|jl#oa%Ju3hDe&Gf%`riEx{KlVRQ z5#eFVFyFsee5|R9e89gdN)&|2dc@y zQ}w@G1?MB>{qMhj5i$IKNdQ0~i6pszh~hsE+R}EeN0GlGfi-OhB;mi4_->gdeAGY7 zO#I8ok=j4KxRz(A{>(p5tpI6u8u34bWtBBfz3aa@bbx~20`R{a_a?7uu<$=-qZ6gb z<^VtCIA&#;!}LGk+ zKA1nVu}v_)Hu1j(WGFDP8uUN#MCW54UE@Dt)FQ;E+3>%v@gVPgOSC^_tJhZp z69PcPfW)i}6Y4)ysa7?)1Gc{?JVV1f?$AFZJL({jfR(?Tnl!9ksLemWDkle2iP^uH zNhU$~tKUEVxE+g3_~1WPEA>}(Fnd43hN7Kz?ZH1YA+k9m&(%LFWF^Hc-{8MPrteLw z4&OgD+$vEG_vk+iP)BhWaQ8phzdYAAVAMaCWd`cR$Kk&yC$*j!TDre5R{^b*N%Fs5 zFfi4)s6X2$&NSz1+V*o<1aK`0PJdxpc~K!oR=PYrMkW zGORx-AojFxB>BJQW1l%@TJpbmPQ(U>u=GE1_<1n5?!&(iwhSQwKGQ$ZXvV|V=KMdb zX|?+eR=~f5f6Pi>mB+ss$tbrVO!Geh%hgoNN{K&nRms5qdelF|4t7TC2;9G#dd%_cul&D=16zvgN;OT`BKPFy_B|B1E@stm8jX-r=t1fBC=t zp10%*E!w{jE^;%8c=*4MIdS-nndmk>%Mdd&11U3)ik>o$6A_rfkr) z4)s5-I=OUlRNy~(CXAl9lhi->l*g5^=Iy^HGY1xqS=m4C#L$S$sQ*6{q+|UrKK8$% zldyINUFSb@L1zF^X6rwad^0?{oBKbx))hcQIp;qdnbN|{-i1Hc<&tR;4BJ0yez{@O zC(*yZ=t!y*g7ZH{ozUf;$oRi<#~%eqIp4phAKuH*jod$KCx7W7yz)OMvaUUPI*h-) zOqG}D2syx4n8eAmAYa@lX&iqyY#<|z^Q+1tOnx}=fR(%Zj=rD%tI zbo;+}Sm+73HPb(96f)Clm*qb+uc`fP`S(9?%2t+Vi0eOACI1&k=IK8j&)kXJ@zOu1 ztjOzWGyK1;#{~z~?a@Daz--*N#JoR=VaRt}dhkCWjM?+ZDB?d+$!f8xpTNKK-|}sj eRR2Go&XXsOChI@^mO+Q=8!*=qC{Mx|NMK$K=0ANJPw}<-<)3iUr-zn0)EGw{FouXF8j z1N=V~c|E+hz4O02n7X2EjP5_%@rBiJbKbwruW`bWChflmzVDat5%oVjRBU{|?X*8| zN>DKCL#97Ucx{24yV1W`=X+jPo8rHgX2_x9qQ<`+LLb1e&seb&DS zk9pItTk1c*1Z@4%>XE-}xn&nyJJr8A5f@}UH;cb*Vc{o>hV(!Fj&??3QqR9@z(c3c zn6f{NPG^FJqt#^yhq|I1aged#}%M+VNB8}2{cC)XU| zdj3CTIi<4`-QmC9q&s31Y;FjK>9ygqcqiHU*A7)Q^RgAX+l4LAKyRDck#dW+gN<%xa&V9M!DY-2*kfx5X`y< zbL>AtH#1#zFWbMxGqzM2HuFD_VbRTMx!6AzEwG1&!0$iMWvas`vDiP$7NiARobA8t z*K7({V%NV7OAknueDuHFUy!zWqQ^fwha24TX7|7J|Gg{T&eA`mU0vtT^bEic5`3ii e>ghkpcLp4xsPVs*_a7yDhVH+Xa#X7@^YuRisGN`?RfTn8Cm5pws-C z9KJs+&9IF@*!Mq7OMf0XoASRd55zr}H|;-v|6OvAs@Olx!m`iaG^l{_FY3P+qIG6k{qjG$_KZY%eYZc98IPI0spY@H?o)(o0QbN2?$yVP zqTRn)aN#^S%eye5WuhUvdZYELjTC-*-e(SC;tw%fn-Q)blP zI`_Y`i+LRMugE{DM;%dqpzJ?uPqs8xk?y}B>`5B+htogtD2$rFkmx^%OjOLNM7ckx8!qx>#Ph#a0*a`>67#I*=EI2U8sPSn2~6{O5-y4b%Kx5~zKHu}HK(VngiX8OND+?$D;ZtK5o!v^E; zdfdO747*ip4Vu3+X&i2o73M!kwZIdsMa(}&XX+GNo#sCrOc4}#rQ^T+5Bbh>AF{uo zGN-j^P1nEdBTemmMD)K!UQLgUmH)r~7tao!Y308mC8jY6*#p4u%A9^fAo9O{N7p$A za^XK#cfFsD563@BiT*3`>Xkp=h)#)b9xQ(&ayyl*L=$)9JtW zK=V$^!O=fYpmL#lr^7#qUZUgxkjg)th<|6qzu3R8BV`c9Z`41EXNbnUaM?d{_vPdB zh>jChI?yw!=SA#`C|eCy!}D6YRg?8MuGg_|8Ast^=fAYN#zScWg7?41z|zHlckVwF%kfbc zTKB&@f&_rvj_5xO3c+-&8}C0!cC2cMZt_32eGa(TsqnuN93Lna2gN_x5+z`GIOD&v z1hs`AU+lldJjCQIbf!ORE)(%+3+g|X2t15;3**20(b`ob5!FBHu+pDN4DG*?zCr*X zvGBhh-1&9{!_>b#9#(TB?a;p_J#=q(UgbYn)ECk|2K2vfAN*nBL;*lM6**KNt^B_f zk?)L4G0i_Ia4J-;KGMHJjL`x%zvRD0BzvW|YR11gk>-|Vg5^Jg+$R_Rdg?#i5u$n^ z*~`B+Xj$F^Jk>vA*+-+`NcF$%EHPjtmdC%i?iS2zJ=s6v@-Ok4Lju6TFJm{{wzNOY zxg$TohsD3od|PTLQS?6#A0?y{WW_%Ypw|w9wcbB1eEV>4V9~$Y7FW8xIpjaXA}Lfw z{^36~sdCZX=k&kRn(9*t=hHvCJSXOK>heE`)fU${d+$GM?=bU+2b@1|&iw+9mefBp z%A=9(M(sZghKBj`UXx%N=O%Je^#%~;o3+o(TE zd{Cv|mHNLQG&6GTfkDT@<{`9{KzN3Gc36{UMwR;YLhv~oSnvrCgiS<8iv@$Qxpy9s@2zrVEbnw4O zWf}F>zxqE40}^a+6W%|)23p@yyahmXQhkNY!T!I0S-*o4n94utERo4Z6yHCT2xvc= eeVji)PXtPRPVn#n{TD+TLvi(I zGV{Nz^+EomkNm$wZ4Ugw{WH*k5KE$oIdRx}A~i&dxugq2)LB z{_($9TL_)Jt@ytgUUl$AXn(&$la)=t%)~$5B*$1xKe@lz+#&jH3fwV|XA||SEzv({CG<2$^zc8%X}eYWw9-G8j@tj> z_v1fqSOe@A@q|CD6yFsWHQzr@{)6mpits-s+tyGg5&l2Z9atw;U-Lf&RV}9xG_1c| zWaV{@Q1UNiGoKBcTZsWgHfVd`%0^&boPXFQ~5Awf&#|O2+4&py1GFf;N?G?lb^7k8QniN%~BBWJLW%1c{%p z-qJtDB$H=Vm-RoCd;lKi-P}K@tAV^Ms^!0kOQR^lc=JC=j{T$>mi9j!vDA2^)#<-A z{9cB3V+KICu=kB&W<|eXVSNCBS?)h<@6yX&N9R8UmqU>UR_4EDG5%o9<=nr&$+L}2 zRk1%80>&@J>;1p+HCeoJMe08=ns?~=RQ|t#>;-V`364Jm6_TT^e&xSz*`%(Th2lS? zd+4Vnq5;5U_LzTOnEJm)NO0z)7~{VyG8mFxn&3ZEyI|;2UCcj_iPL@b&G0{Kkxq+H zpSwR{iwa!oDw4m3v5ECBCGo$#b=Wgzm(0JHwFmgx`;)&6YHB(oZ~(w?$qh!WeC)p! z&N7vtr`<)S~$)VD~dSMWcm7?Uuz)AGMNTF4I{AJRV>QH>j#gyz4R z*tb)`z~{e6E=iNE)8{{cNcusY)xN)-=P&3bEB3$c)}nfM7`ZgK;Z%hCk; z8~49&QyX9B+3&wa1lvvhT>wA_a&N7j-{h9LT?Z**||tkiEY^ zhf5p52GPG?gjh6{KIOk|<=nXW=jT69u)i9RD(*j6>4^2T$nZbg=H*y@Ak{y5`JLMw zdgDK+d{{RU4A#FNOkH{RY}tjWJORUb>ohxET1 zOpBrO6Y{^3lM4KmH`u=$al1Lwrt3e(iNjQ)Wz|3RrvPaIqU*ooxTWxwzo$RWi?zca zLiN9la}_K3q3gd_G4or?gZjU?Bo;6ha@#))l&++M8PdN6&$YupFvP#8vF>W6v;HkO-`zhyjWC2f!v8-S06-LaZSTL;k%1c^0{Oo{0Xl0PAoRcBf(F%; e-q=6)gfSP;@cTc)Zj%@XN&7z;Y&#`*3F|-gpQpqnDZJp6M!3HmmcGc+8vj3HeljlF=RQH%6 zr{F(9GOf*<3dBE68eo`?73@EBq1#%pAnHFSU{8vbN$Nis*mUleq|v|4o-0>~7Sg}m z^8XY*ipsx*6M4_Sp5MP_Q5dm*4ZA<5Hmw7-2Y0{5q73mUd-^{jz;lgqC*Z#_SR4Nv zlkdMlkMmXBspLPox*f?xPqIIXg&s?Y_T|4F6CzF*2kAfLtmW4L+tI&XEUSfhWB5Of z8m&r}XYRjQ@Lnzzmg2wbIYk9ESHM4_r(sKcx%EG02dJz8)#ksvnWIRn7}!7IQl!J2 zpYp$WRP|xcRqa2*?bB4f+vLB-i3NT9%;P^IH&QnCbLBs4q-wPe4*kEChN|W`%gR4T z!^>=$F5tg=$}ilR=ifh*KUA&sz5GA8Aag)$hw(o^f^!7eNs7NUhTJ7QCk9 zmHs~|5i4(-P~|^F2rBht)}X)S`gJ8zFx|g^s*Jb}Jn=sdJU1d+$i_d*RYgZgwfjGG zv{um80Q-AERa8bk@QsVTH(L)XLCvSob$h$tyP3A)!RRY z^TCy*x9UGSL1*$kll8w=m;*bEQ1U-EwTTQE^0~h}Yav(D7xcdx3%@TbCF{S3%r|li zFYdpU=MPgB8`S0kfvgtn%VNlCPy!XF!%vb)g ztm8kW5;;}_qvgNBDS18r&i=nr*(TPa8da%FC zf{!~Pr`A77pDb740`1REV{p*^A-}P4e!5f4KRB(HS<6Hm8w$! diff --git a/fixture/13/0/40 b/fixture/13/0/40 deleted file mode 100644 index afbb988b866f030f83f288a282df4342c75e3405..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<2=D(zStjr70B(W}+0#MeJ%ytyyWGF`IAyX259z{4(l7XJF(aS%~)?a0CsrkQ_DYK>$ z!{EQr$wHsFG5$Yo5~TZ{p4~s@f?kKYUY)=0p%JmDO!hzSP?UdBQu@E*Vbixji`u^> zSQj8IGXg-mEVN{Tm8rjo=E83-Ag#YAHV5X%-SR&Ov95G}`tv_rA>d3R)AYZL19nu- zW7|K9nlaP27yLic`K~M_l;pqLNpv^p)z`l_=yxEef}KB3@w3PhIOabWp&3NC*Y>~L z%UH>D<_N%zO29fwmCHW^b@hF3r}96o5aBT-_tU?XXPKjJ@usiNElb@IOn_kKC}u<^e~>^Z`h3hO`1p4n<(Ci*|2 zp_P!da_m1egeW0CZUDe@)y2(S;M%`O?Njm7l+Hhm{jzj_fUZB6Do&{Vw*J462hb%@ z!SuhbMvI6AsOi5(UMc^8PO-la@D2*t@&3O#E|Db*HRC@kdZ+HO&9}e#?XnE^u<*aC zd0uLD$H+e}fcF`5npsdHTPXn7yAE58c1R(o?3W z>*l|XvFnU4nAkrU6Mj6z8k4_!yUjM7%>Y12LHQJE_|U&P>b*@dWeUKd`&^4tMgYK$ zd=9l$BlEu_PZ}JF6AM6k=|Toz&7!~XC~ljJyzxH@zS&BL9q>O1lS#rrMfAV!>Ls0{ zu>8L+WW&JJjpRQJ>!(3|!vVmn(tObSNbf%oQ+aS!_r1SfHb!5!w&OpXUQP>{Ytujc z5dkwt*497gjS)wozq&tO@bFy21=2t1==*bYn5RF3&C~-42KK+aZ%>f=G}*tvoGrw# eTLC~&%MM+68q`0)z-BRdNAf>&PScX;b??6}MX~At diff --git a/fixture/13/0/41 b/fixture/13/0/41 deleted file mode 100644 index 557dac5368d7f277881ceb188586265e2e0c1e07..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<4f%fWg>0`R}n++b8!LG!=guQ&khPW8X8R!SgP_4&WH+?$5^+08%t1VvrE z#n!*?lx^4z1nfT}IqNrx-P6BSua(HvIRn6>f33JTWZXZH;0t+4J^;WA&N(ASamzmi z%Af!BuIRsmdq#)j!p%Q9# z;={jnzT0$l=9#~ipC(lpBI&;~9Nq^!YWKf+cmtCj2I4<4X)~G*udBTAyoy)%ft!Md{j`qLbPGL28Jfy#GXXtx;4c9+}3!6ix9sR#4zN{U$ z8u34GIhoc1$>zVH_%>zG8Ogt^g*NK%Amcyi;!ELd^nJfoPBzpkboM`5o1_yiALKu2 zR>9Et-A#IQhT)RZU7z5d6Pt$oZ+flg&RF4kUN^lJ7rCD8&XpHSIs=2tBCrm+HUo zCwUbdu=PK2X&{IuSO7qiD9tFiapOOdPV%_r%lALh*R=)Y@asPsFP#ulXU)Ge0iG2> zMCHHi-@XYe&-uR*2vm9fcHF<9=5oithe(gVAWyqb;eC9vg3do5;xXV8t%*Vu^+3>$&wN&w* z-Q~a5vwqvvO#QzvW*_iJKIXq1v=Trgv*thE){;>vRM|g%BYU-{aKt~BpigJ8zxO|! z3xZf`ebYZ(1p#agc;dgm$|Hu}W#+#SYH$YDsLj98<^Qw(&)mO29a;?2e(pc{QM8)< z_}V`lzI-u;b=1GwTZ{F}1=7DgF;C3&faX6Y%EO`2Ebu>YuKv1`e*M3Hi8Pc->czkM zw?ShV8Q z`yB&@%+^2uzC&a5!K^<{FT2G8AJD%ZSw25dN%TLu_VItRQ~bXIetc&a($YV~`js1T z4Y9xC;4^DZ3iQ9N{1GI>-T6NEJ&J{ z-0nZUU=q-ZRl>jL0ngmR@A5w~Gq^QgAH2U+QF)oo-}1i+9dWn>RrJ3b{NE08`^CS; zrrnplP3=Fr1H0&MNaerJvQ~b}YUe+Y{e1k$$+Ob{6RN+4lpO5)uvFN{G zm^PnQsNlbHJaIxhBJsba;5OfCbIre3Xs8;+#{fW+QW*@-ht@yn#6qCQoaDdK&-5e{ eG~K_Z)O>si1^2(QU0sL#7V^J}u8>IFq4d8{La}=Q diff --git a/fixture/13/0/43 b/fixture/13/0/43 deleted file mode 100644 index 305f6535b9be63c8f608b038872b3a1a42d068fc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<4DG7s<`P0>Gkk%ocomfSxA)=UvtJgnk6y#UDgy_F^s;7rn zU-G}mHo$#PdGfy|BD5OKF7v;>7a75Z^wvK~7h2&B8~{MSK!R+d!SKHYakQ^+;?=)- z??MJ?_{l%Xw(NC8+tI&XPm@~?Pt`wUZY3g`Y2v@(ReRw~#pS=Yo`uxyR>!~K``hVZ z=F~svnActwh4eqNU(G|y8rVNKM)Q1DBhbH5H!dUHmFhqAD`U)Q0PsKX9P2R&YSX{u z{<1%7^e}4yefGcb92uK|{@Fhr?vc)m z)%QQhX1Rs_82LZ^pIpzdsouY**L7r|O6I=+;9BacUH(6Y`r+$05&1veHrbX*=-of# z{PNHvW&6LOn~!b!X5+tNT5@DJfAPNubI?y||NK7}mEnHIX!yVEe`uk;+x$Na7f8~q z1G~R+FlWogDf7QcOvg{!lkY#@Qlah8B;-GSkxa63LiE3jhot>b*_pq5saXJ20`@<( zxjOvYJKDd5*@E{f;QBwK{`=QL)#tyL=zs3f^ZCCe;$^ghCZ|6;9wgz)UiZHbn9m8F z_`g3fmFI@}5OqH?QXEhqN$9^cl_y*k5B@*t0evFNNN~TcKhcBXsOvwlI+S$|xa~jt z#iASXuMzX(ZEJ#WN&t<=)wOkuBlJq|;ORQ&7@7qu<<`y z*rk-|h~hscrVIK$)9t@i^L+(I0mr{RsNk@w@7BMKf7@1#xAVUh8B=Hwfbl<fW*vDTVYRkVm8nsUV diff --git a/fixture/13/0/44 b/fixture/13/0/44 deleted file mode 100644 index 5ec8ce1911ef592bcc7a304c9dcd5fd3647c8bc7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmdmH7rL$G=L7o(J53+HfAe^M-$IW94(|{4eouX_{S$n=f5rCMMqFld_up~v+jQ>v zm;I$m?24VLAMG9F_MGT9`)Y6DFgq&y#WVY$B`fNcE*!PbcHw@I*|=yw<9_*HFaQ16 z-_IsIoptTk{qrsJ9R*lk*iY-4m%Lc{_r~lc`-LVq7}PJ{uwTv1yeVSsqy0wJH>Q8O`q|#n z*6T=?z_tCi9&yaDulT)xdfiHCrdKESUuCy?Wc>K6{W8&uR_~;*?Z5HeWexLz5Bm=? zTWoNbdSWm8EGBY!=LP#o@1A`&WzN_?$4z{>N#{L#<&EM|K}yf|dswl$#27!`Z?^Mi z?hW_n_SteuLd)O0+po85(TwHSe%dD}CbQPB{%oJ0dj6P?->3bvf>kEJn)BVh(WdyT WX!T)xzGn=d#5BIyGmL^E9RdJIe74L0 diff --git a/fixture/13/0/5 b/fixture/13/0/5 deleted file mode 100644 index 176f863fa1ef8d5c0c7d7954e04e156f1237e6bb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<1vnRfcYjLJV!LTYi39AT?vEV&28ekD@;AlSdMD*$e$v+2KQtLCfgMC!kq3ZvU(iSj>NG@=ZYMcluDci&sL z4go+kga(K8XXQUjlyE;zR|UXe29s&1L+n4Hl@u1kH>?C-}|iKZSX%Ny2R+{$|n&7`G=bo~&&FMd?mJ(InpX9&nY*7BS^Y_~+ z!uh|p|HP1gJ@h~3#|jXGg5|$tv5jLyz4bpGKhJ*9bOgX8^z#Agrr5u-_mHqFu=Ky+ zS;l==r}aN?^J^PP-TFV^-`a1izyiSG2YpNu!G&P=BfRrJ4{n>#V)+W5a`;@Z0Q`@X*sBIe*#viLvNJ$S}b_wv8pu{7~@ eYsNn;^+XpF@8-X{#b+HiJL|vHtL$$f2H-#YQm51a diff --git a/fixture/13/0/6 b/fixture/13/0/6 deleted file mode 100644 index d7bf9e7f8b970c176916f30fe8bb44f7ffa1bce1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<1^UFg`&Oz1y0q+=tWGwHuv>kCsYGWI{MgpQ_63fn)=$=|e1@6Nv)?!HO9 zJ=Q-b>eY~NN&rB3Kahp&9OyrJ5ZJ7_Tjsy$nV!K8D3ia{3{@Iy&A>klYGYBFO65P) zdVB4Esm?!IY{V%)Q~p1|`P-Hc1kJx%%ek1bt^2>1Fa{H>b<02ZNB=KnJKVpo!L7`K z5ct1Z$fmhOgXX`aex?@G@7BLhIEq-KJn}z)ev_7ByZ684@HW5UV%9&Nt`=B}!vR2( zI7heLVaC6k!q8`pB+9?YyK=;62{n2V$8p2 z^vR{ZUHZR#kW<+ScHzI#Ufl-uMe9Gw!F2nZsQ$m0BeSyZc<{fydF@!1jlnJPaDw9|L;F3>X-$x2KYaGqJ4Upn94tp-hZlW#>&5&1ZGEG{OUip%D>~C$Lzmw zPwhOpM(@9Hri5xgd-A`(n8HUAZ1F#;gv3+gwCcb5em%H&0`NameXcxf0K-2o0yx3B zpz%MBwCFjdukpX+xe{qP1mwRs9S|lc4&6Ttg-YDml-s|Bw4H)s@&mwyq1#i&^!2~b z_)S3O&+)%@n_96~pzyzFDW3K!(d9pHo)xS#lGQ(3V2H5B!^A%T0+$ZK5dc8b_@OAc z&GJ9}@e6Bx82L_wi9n(MbUWGapliI)SL6G89P5VDu zTCXBzH>yA4b?gXHr}MwcXr%Ik>hHgmtS7TPLIOZ+l{Ht8mIsLykWcDnaTJ1lGEJpbfNB_TY>$-UW diff --git a/fixture/13/0/7 b/fixture/13/0/7 deleted file mode 100644 index b9b3872009819f4d9670e6b5e987b38c4286a58a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<2ufYh39Z}z{G6hRM9R&YOussd{T!0|tG(O6w>#<9QH_mt95AN0Sdi$7gC zUdKO>css^q@ZG;ABwjE_I>{n#o!z39KV)V9_g zwaGs?&@iJLzt+D~hh6}bRpUR%!qey=x3523gju||$=N?Bux8H^@ZLXWgL13D zFWo;eM~CRT=HfpEhj8i7Q`*1boR<{htOCGclsg;J)8fDPsTG`oY3#p1_P2LI;L|@T z(@<6g5X?W#+)weUZqdJ~WQ$y(7W_Z>_?k-DfaJd0qnnnFYHk`ou5Br zq&d|qaOFQX4Kfd~KhQtsXPcNCy3fC$oO4O+NZY?|MRQZ2K+?aZUBjnn2H`*Ptou(% zQrCh>&R*W^Drk}<$EpX0x_ zQ=+J}LFT{u(ygfgU+cdu*a4LfyS6_T*kghsOZmUL`&($tf9$`>yLJy#UERM4tWv1V e;_yEPR2P`^>cGFXvMQJV7w|vBlP76&0?0qoEvaV! diff --git a/fixture/13/0/8 b/fixture/13/0/8 deleted file mode 100644 index 2eb9acf01d7210e9f58a5f0078b591face57b817..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<1##ckv2THe1Db*3>^G9^TZ@Yv#XXNp;_~N!h==?-{=;tKL7;e}pvwP31oi)IT{s%;LYZ zYgxOCt?|Dxhj@m(bGAS7_Pv4H(Az&!)SR;1Y~;VT42$qKA?iOT#u&(#dge5 z_AeCG0=Mfi8NCP1e8u+e@2#9o;|SguWQR3jV+6;GM2@eEz=@geqe!Z~MP)KGEEj z>DWI4A%OqG@7q5jnLayFhta=Ye(s~_iTghc&c!DmmhV5o0hidYlKH=SNCY!L1IE9) z_OS5g5$?aUp*4r-e%e2mr$OfwP@lga9N^qrLF~WKXK6Z*9Q!{*4ko1_W34}qhWAZm zpxVC=j}PZ@Gtj?;y%={8=J`L!UHEP(DeXVn?gY;3$Hc#ALs#1}zwf{A25(Lr!s@@K ze37nf+upwjYp<`fI^aLo%cEn0HRwN3WVRQgH|M`gpzzVtpXomaUT&aa6Yak>!-`j? zljFZ#XG4=eG55deJ879dtM|VSX0`3|Ywf=-zYXYYS(?B7%ux!m&hNkX%)(A5`1e1M z*nH~)*X_Rv@Ondzi^o5{{P7mIAN4;j)zb0juD3s-VbhppO4q+XM!jb((ZWB$aAogYcsmZw{NO4Gkb)u_OQfZM;x;qkuB e4%I&%K-cB%eg8kkj;%lBxy`@b4^Lin+1$Uq>$(B} diff --git a/fixture/13/0/9 b/fixture/13/0/9 deleted file mode 100644 index 75a9e8967a26c06c47460bb9995f5fd84b581aa8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<2t$LN8y+VVfehlQFVaj`$pF!;`Kw(CFjbS2LgnD;-7#dUfsgZ{t0Q;OK3 zJ)^(z)*df4YR$hGkHh3?%kIAn%@e7)vi`rL5wpa`cKN>nKH8Y+x4J(o>>0Y!7U91U z^c3lysOi5y%(#nWEVw^NYb=rxCg#7X$QN{!r1U=yD7~LZ-s8V)^rHmK75G0lf(x#3 zyq>@O0e^{H+~mJtCogWIFXz8=kt$PwIQTy^{Yza$MCLzUI&)knP3XU;o<2^^d%iyg zXUP7N-U7e{YI;;aPW8X@Efr#20_eXbOYxFT%jmzU_JkjTX!Aee#a%Rlp#Hxp?6#;& z*!I7L-lzQs^xQwjX-zATdCfmdsQNagV%EQtt;A4&YXCqxhkS(0!Q($?ig=^2kn=y} zHk%%K0;<2+BPU@7W#PXYK_xIW2jRc5y#G>eukAnMT+g^#y68XrSJdrU`^&!!m;B;x zF5W*7VUb|O&F(*cNgdX;3*x_AL1F_j`qjT&bL$GimE1oM|Kc}Vp8UVeBBmch%-uhh zYf^DD0P?>cImk)|BJMwZzP`_?E&)JeF8=H@Pv*a(#b*a3nDxJ`#K-zR2gkq5VO%Q& z`p~~5sW(cdB;`LmWyY1b_02ylm@-Ne_tL-O4g2?VGPghKOF$P^^zpyGg_iv7i0VI7 z3Ntw?1c$pGoTRNB9Lp^5@=;ck{nBx?q~cNs+&A zq3w0p@9@7BYU!`EW8gpCI9EN{sQSNk(59xZQ31df^43K7VE!&e(+GS?)E>;L|8Jxv+_TRCV6J~j_*I! zpIr6y#M3_>{m`IE-Pk{igFjm?#Oc5F*=iS6dcr^LW0GrR6Yjqpp1vA2+VH<1Pj~h_ e;rYKKo))PS{nD3ynuZ12a1i<-ANRx<_06>YXkvJf^ z+dsA$Of@K>s=uv2QEu|L+!c<4#}xWBvly{wo&_`jFXSs65KsK1Y@N<6Zp;=dc^r$5r3_CJ9k zQHY{J+Q0HD&9A9+);~TT0~A@Z^S@~LI=16S&Oa+{b7UTn+P{K(>Bm9W=)Yuo#IVn+ z20&aV(yZv7*1tL|?|UR6%fBU)m|r2?>_1-RP>u7J=)Z0~w}v*)**`+MJ$Z{%;6F?< zeDqgR&p#l1vnA(V0zmI09deSq>Az$jp%w$0_&@${;$Vz7-aiFbKod3^?LPpqsj$e_ z)jzrLXpL+Q{y$>|C>}L0KgI{Ma@{K z$v?|>_3|tHk5mE^uI;JdSG-(px?vzU5-7`9C?G zyvR+%;=d61rJ*z#>c8M3R#0Yf`ahoWGuV45=)d749MP^j;6FD_{_6yH0YIjL&WbP= z=syjTwX=*6=|6gZp>g$c^gpriHy5|d;6KulI&0rI=|2+CD*gQI)xXiby3I6?@;?a7 zY?ym?+CRQ5RmLl(_rJuBCqKy;&p(QOCUkIH0l@M;_5fhm=0E76%UA^E&A+GtBg-Ux p{y%76JYODG-#_1Kphe}9?LYTFvz(2y`Zk-J-06;M}3(TMt=RbyjJlU?apuhR#gL{Co1Hf4os!v%Y;lC~( zU9Y)d-aj1fGcxj6;J-w(mw%hp_CK{MBs5b{vOkVC)${B@@jpe7_xUh7%0E;Zy8*GO z(Z9{9q@y+}-oJjl(Qe2w@V~iF^+URg@ju@ko+ylD+rR3AvE8S(;6Jj1r6lLl;J+b^ z-5nNG@xR;oNvXEO;XkNP%8~!%=f4u!%0QK&*}uo@{KlaA^S^nOo6}T}-@h*`m931g zm_P97VUa*)(!U`k&6Vjc>OV!1EqH98!auCLS(VYU2tXt)*6-^k+dnIk;**ZV`@eN& z9V5Fx%fDLf;W*|fz`wD!l=|JGM4 z$7Bl|&_CuTtg&&&@jupH|7L%f=s$n<#5&-XqCd4IenXRJ-9H(k6WB0Z|3C9eyYf-I z=|3ik`BYQM(!Yue9pE8y^gjr1pm912**{B8b2JUl?!R`H{&MIc$G_P{x4d1B_rK1$ z$ey@I0>JfctGQ$8VOxIYV3GCnfo_&*@WZ>T!@_diztNZGxD=RYjuL`o4zu0ODzjAk5j*}qg8 z%p$)2=0C;k;`lgr#J}V|IX5ZZ;=jTwNkD?g0Knb%#kFHbwZFPn#KrcD@jqu_2Oo40-E!kH1~A=7c$C*S}=bO>u2Y_rK+c zu;qZ%06=Iu#K`ra=Rf;#lfZWr>p!_ZJS-xd>%WWNx@CE4v_ER$9x=$=w7-&`7Dwv_ p=|6>y&Le2S^*;&w;0=uO_P^>(5hLL-@xQ$?rEKD<6|{WA+rP!s)4d%?{l9xpYO>jA&_CMkt@6$>)W6RZfmZ^o z>_0sv@xKkX!apF`9~Q`!=07L(zp@*seZR$4sB<_$$3G0R<5kJa?LR85Hy<{j$iHMC zQrO-y-amomB098U+`l39j(8l%z`wS$HU$%Z`9FsNeBK9t%S{kM9kIphQD>+F6OsK zr9aJjZ@B4A%|HFQ!Win=+CRg8E<0VO`M-#Zdym_X^uO=uYGui+^1t(+op30%w?A@z zq0Kfk(!Wy9L&r=c0>D7N19ccD|38A2t2)ur|G!kiIh$f9pFg*Ny5=Sa;y;*GU9o-$ z=f5<`#J-GrC0l-%z9o~z@@;~cwS0eMUOg}}J zfG6>T`M*K7Lteoig1CJ-A?Y}Qc){{Bt%D*Hs!uD@@;J+IibMc*} z=)X5hU(cEZ?7yzsgCNPTpuu|dj$+c*}wJS2L!k3yuVS( zCu!n`=Re7MFi?y7)W627GKsYsvcIRgghaRO*1t8*qE4sYw!d#P#XGUV)W0yHegVBD z;lHT}hUbi@=D+55ir3u|#=nhBgC?{}@INcLc#Tu_<3B`>Spif;`M*pj8E`Lm+rP`| zpaDf~%ZOyxO?2@+`l&LJ*o8L;y-2q>VZd&=0C)fwESn%x_bZs diff --git a/fixture/13/1/11 b/fixture/13/1/11 deleted file mode 100644 index 79b9f6864034b5f2263d9159d93bb6f1d102b229..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~4?Szb(aR2SCyp+-@jN^S`YWEg^jqwm-s;hGe4U<-dJiMwpgv z)<3y_YH4X9_CEo$PMT_#>pzVEs2W*P^S|~ha_+Y10);}f(yUiPZ{y)g!WN6y_yFW6iE`b+YyT2e!*?g;e z@;@({*E7oT*S`|`l8`{**gw{4K5z!S+&}(FX;Zlv@4qD-&4fYB*gx1o`(^KX0YGa( zKAN{L@;{6=0lRhE>c1w4)1QvD0zm7M;6r~7=0Df6*O=!f+dr)bKNo}JxWDniyMwvd zKj??0xG#=SG0>p!0;(SBJU!@r&S zSI5Fl_`fILtSdlwqCY*@-q+Xo=D%x+tD+xI-oLRE`2AD%zrQ-aN#+gr@V`N1ni6t6M(7&#d{yE@d{y+MS?V8@0sXs$h^3%W8uV2zwG>p!~JfKSBG zyuT6z`!7G&@IU;n_5DM1+CR#wL0cyK)W7quT*DJq_&<5H z`f9Z;>_1NzP%24f)<0tXh#?({^gn-IY#6LI?7tN--d?Jr&%XzF^QP+!n|N=^1s+r-bV|W=szVtkGpz6=|7Y5 zl>aYO0zmz89!*yv-akQmqW>8^P^FKC))>p%d ztiQIipDf_A$iE>x_0f?*!M}@_D=Dtwu)lLw6P8mU-@gkXJzDUL%RjjD`D#_WxWC}Z z5ZpjQ$G;9Gb;L?p-#?xc0me4FE>I>A&?SzI&@t&_CIxb^X5>^FP%5tKwB6v9|yK diff --git a/fixture/13/1/12 b/fixture/13/1/12 deleted file mode 100644 index 8693db545a42b4f6c4dbd5f597b32bea1b6a4618..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~6>0zTmY%);~5xi0#*4>_0GB6es-GvA<(E)O2q&!aq?bB9^Oi z`akQtc~ZcH>OayR+G!AI?**~$%BCDilE{{MFF>8**{UDG^c~Z?myhzc7A6=tUuj_H+!rRoDPR$Rx@xRxeD<0w?=s)-Rl5{A7z(3CahkD8A0>HW9qLe4x z?>}BjBi?3nvOiG`;yLH0=s(+F%jgx<%)dF7_!Q8r`9G@2mk`}>*uM#<+`Tdu1;D}% z@1eML?7uF2OcaI@;6JzM-GK63#6MtwZQU1LzrR;pu7!D!@4rwNDXDtgl0USqt0DGq zjlVYB)C!wDnZMK3w)U4a;XlwojRG`8$3K^UG{c-F(7y(~t#E+~@V{|^fylX^uIxl z*xljC{l7IxC*zFyw!iXU(9MN0^goM&mzDT=pub1m0hJr0&_A%z@ANw)+`rgFp^inl z!ao$5QyVBb?mxVGoJs#*+CP5bh)0oX;y-qr-ocxk`oEoVR>CtI0YHd!S0;&T`oEc8 z3In4B#6Q3p^eW#$!@t*kt429qyT2$S+Nzw@>pyoXLDpiX=D(i|u2p-n+rN^eyRd#( z^1p{Nx7J+r@jsn7TP2GJ(Le7@*)bPnG)IXnFEkgoX_P>Z1#R|6R@xOxW*ILNCclZFj9?>~$-+W|NF@4u73j#;|G?!UkOx!ieE z!9SG9H?;Ntw7;ObHesU4=Rfx7S|79se?KTw0?I7e`#*VereE)8`M*p4;&%??>OZdi zz*2>S$v>qoGC#3$_P^2{B|R>S0KndHCH@lt^1ty_EL&_?^FJ>qB!&@&@jqzR(nGIz zz`xYa6b5@Q*T3f@R)-~J>%UCFAd6r8$G>RN4ne9|-oHVUF*591`#%FQWrps^^S|O@ zE(o(7*+1de$=3E9{6A#~z~*7e=s&`zl#vPj@xQ=$_2o1RpB!L@4x8+U}&47{6A6}GLBH3^}m#q5i9Y{ z_&>FLsBncb^FPM83N^Cf_P?NJ3qOG!`oDl$*pwg%=RXP*(M(q-*uO;fZGvj>{6D-{^l>>F^S|x0*t--%=fAm1HogCMc;ybu|O=RX}qO6?9|%0J?>1T(xd<3CN@-5gg&ufL1X;PO?h p$3G3AOjJk)<3IgK@QNUq0Km1nS_>Ed>^}!?b3^K~>OT;us~m{cuM+?O diff --git a/fixture/13/1/14 b/fixture/13/1/14 deleted file mode 100644 index 8678ce4a935504f3ea3a3b3478eb2bb75ca26acb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~9D~QTt{4=)WR6JPO$7^FM`fsE7}}0l-QK6N$){)<54p`mq{v zq(7h+ImBPi)4#F&tXK!tlRwX~Opdyx;y(@ln2DR?{J(4LJ96^_@IO|~h1Kgp+P@N| zSbR%A@IRvPd}6Yf0sP5H$VmA@QL!4*EU`9CI|&=f>J z^1na^NxWhS+`nuW>T0A=`M>Edwq|pc97^x9vA;#J{}B#{;!l-M@(9qwYvm*}n`%;Rm2B20$mwTi|uV`@eK{5u!Dm zvW@}jA^?mx@+P$V~H z)j!4zXzgTC_dngCS}g`5+dovaZ&x>~w!ike4tC9`_dj+Y3*%c1=RXs`FERnu^gkDK z2oG=o^S^Y~U=vb@>%UIv#IG(W>c8LGRoQez$3K{Q)*^{8{Xe|k^;#e~^1mOxTL{4T z>_1HCi-Wj5^S^7P7z3{G06;Yvew^Zq`M){#r%;Np=RY48M-&Mx>_0s4)w2}W<3Glu zudr_N_^aY@;|Pn2w}>T z_`gX7gs3+T`M;Vr-4J|x<-d1Fe%*1h-oJfWxF;*Z;=d`=6WfNu(Lc=Nx8@Mo@jrTl zh)2=T(!WelYb+EO>c8-Yk4|?3>OZ&i8_2W^(!UF;8^5G^yFVyEmE=7~)xSh!{RtDo p)jy=ZM~d?}@W0$uL_nWC=RcxF^_?5YpuZ$7lut)>=)X@Etpv^$u#x}( diff --git a/fixture/13/1/15 b/fixture/13/1/15 deleted file mode 100644 index 584be9510346fdcdf250018a37de45d2ba5132d7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~2qEmn>)f;6Fsn{@@vX=)V!(P1gn!=f6_086a*r^S}SS243Gm z`@e6u&8FQL;Xh5VxujjD@;`g9MmnI@+`muANjVmc>A$d6dCw^S{J*gV7vu$F0l>gO z+iGAz=RfRuj5LXr|G&mGs7Z}Y^1rTm#2mdN(mzHXavu)&wm*j^l7Z~U)IWLuZ%>g| zd_QYE2fO3f*1t^A!u3 zd6jL_*T2h{&(Qx1=D#Ni$jNE!)xTzrfxS0o3hzr)4#MVK>*$)4wqN*`|7F06^Mg z*5#i6<3EXQ2)54^&_4|2=LLUrfj^WsO@~hVi@zxJQO=aU<-a{B^&QF<@xM5Wo{riz z*S`v>QzPLG^FPMFkZY4z_P@%o^ln~r^*?%+nO~S%!at}uRS(wz=|3DFMKVCC>Ob#7 zB!vLA?LP!S2_SLsmOsei7}2-Psy`6l)uC{l>_4%49kL{;@V{xVPLqHf^S?2#$2V@h z-M_>~jye-?(mw_y4P)cG{y!_gF77aqs6Vb+;q^}d06;O|VI-k`;6Jy|QZQ|35Tey9iA%^-sP%qp_dheO`*`^W^*=xLq|Y-@j0cBbk~t_CNo^4?_hv?mrmxg~MS+@;@fY$_c?% z0YEwG@|aJY@jt5b;|pFG0l+q(QpeP!)IXlAW^JRF^T)4vZ)0LvZo1wbJ= zm;a~d-9P!3z1=P<`ae#!Xw=0t=D&eAZZjO7+`k(O4)+Bk(7&nGgDc56R5=n#5^gq&)nQNLGu0NjU4yRV@%fE2_BgJlqbyjdfzp^S^p^5qoI*bp@V`{eok_}j(myfKDail!;6IeZS-@Uo@ISmo6Hg~;{J%nG?HCTd zgE_m6Uro#lLa22M>Yd=D(Vz+jMWt%RjH99z>#<)xTj&4(4?I;Xj)wP2KtD>Ay0@v0hWr;eFm&&A&g5m)E#M z`9Cp-u{ZWK;6E*9+o7mTvp;sjikwc7)jwHWP1z4C`#91w7>JK zOVW;p0Kn$DXp(r+!oO+LxJpjz&_9F3opV)W=)chm*TpO#@W1<{T34)|{lAexO)7P@ p;=lC?=t?HN_CGMNwZf{k_rKC=kQ<>t=|A3{s$QET>_4j-t~W5HvRMEC diff --git a/fixture/13/1/17 b/fixture/13/1/17 deleted file mode 100644 index d5b93cced3dd25c6f4c862a7f1f207d0e0491654..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~4-Ap$BPj4!VG^FMS{Ot8|@#XrWsDvgk<^glV%i51Zh2S5yI)h5p)vOg*{ zzpyJ&^FOr04RSU~wm)3iQpWr?$Ug)(ye@_G z&%b{ltkkzm-#;q(_D?Sw;J=Lr>#|Iq^S}1F{%feF+&^mcE)9>X|37%Nt&VYy(!cFE z#<#|(@V^H&SwDED{y)GVVO6_Rr$1yYk4n-x?mu$AzHhocBuV@jsm+ z*2kdd$v;~W{DxJd(m%b#^PZ`==s&&;wI*r=*gu3HIReRk`#(K`sR--!_&?qyrn}U6 z^S^UCHxK05#lP6gLCoaD>OV@p-mlFm?!PtA|KP?J`M)p2fQ^__=)Xw6Gv#`Fy}x2R zmQyD}5Wr1d?eu-p@xKw0|7GEH{J*|C61n(Lb>ukkDY7 zxj#5{yul7{;Xm!4s-s-;@4ty2%~aJ3^gp#KzCJ$Cw?Bv{6D9dv?LYCY6+M^>>_5y0 zwK;be;XkEEDlFLs)IT>0+hQ_>&OZ;00b@ZQu)h@)KU~A;-apcL=r#~#%0Er~%#B$q z-@jCvZg$9m<3E@3Jzlo+(LXc=-rGuB^FN?!%9eNc{J%(hE~|ixqCW)oA)jY5=f4$! z-jZFA;J@+s53+RW;XmJG#*rHO_P-~eNxzl~!9RB7^>d6XpFgFOy6u3~ p)IS8vph)^+=ReWwhOw`A^*{XsRseQ0@IPq3?{Vhi;y+yHr}@iDtyBO2 diff --git a/fixture/13/1/18 b/fixture/13/1/18 deleted file mode 100644 index 7df46219a1f629c628dacde5f4749761d8c9b2fd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~96pUO0{)^uMxbaSKEH@;?*L0b!{#>OY9L=3(Mq(7$6>b2kI;Xgs>ckDT`^}ngXxM?oC&cB5n zAWg5#hChxzIauHE;6K-F>0?x8>pvXIOfGsW;lKNt@(wY^$v+KkmxBsi;y=sw@VSaa z$G-`?>i<0X+P`PMUukpA=0Bq+&b(y5#=m7t%uquo^S{2c%RBq`^*?-+OI&kd@xK|u zO{|nLxWC#-JTJzV)IVv{oyH*|-9P>#+U5YZ#Xr=!%t3jH2f(2cS^fQb=D)$!Hp^@F z(mzWf!%>LsyT2E)H35=e;6L46dLRo}@IOA5igU;|;6Dm(%EuT~=f4$U1L|oz;lJIB zF+gD&`adIVs&owP#=l5qb*!swxW8aF^Ka)+;6E04;p?c&^FNexy%H$b?7xdvQkG!A z-@iVXCd|{R`M*Kz$I-#}(!bGxO^>-L;y-DJl25l!*T3byftWEU{68gUL04tJy+4Z* zHGUQ@y1$nZtX5`;*1waUR%s)ppF!!pL#lNvK8K2Ay%0GyC zj9Ee2=s(G`+ISAT=s$Crz7m#i>pvaeAimHQ@IU;X0>&`V-#?BZFWL&u)<2fLz`9-* p|33n773FCY=|B85?5LXy>%Ve+k2ng?{lB+t&bbxd&cD@%sd`FAu5kbW diff --git a/fixture/13/1/19 b/fixture/13/1/19 deleted file mode 100644 index 7ff3c0f32e57f6a0318c8009b058a82e4e10213c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~2V0h5h3F<3Fj=zaqG=?mtje^epiK^uJAGMy+gD=RcMcEr+Zp z@;?`;lFY?=<3EOYUMoMF+CPITuOjgOz`qaqTz{-^-M`_GE(tOt=|9(w>o~`%;y)$D zCppmX)W6E@bJ{yr1VCw$cjh7}_`iBCl@)rn&A&>)kv2)M=)bRakZ)F&*gp>6^M3R$ z|G(4?!Y1lY^gs8RE*Dc4@IO84OUYFn(Ld`zW7k;R)4x)BzoG){=08#@1(1db)jz}* z?q=(D?mtJPa!nCgzrUI97xIfb$G^p$Dwk`JRSO=D2(!U0vq}-L`{l5hi!;S|Y>pyX4)tsyf z+`oMQ$s?M5;y;T#P>jY^@jvwt_^Vw;$UpIRN^q%20Kj0`Azd+R|G({#@he`0<-f^1 z_UTR>t-pWmPpZc=^FLhIk=-?=(mzI?904ux1VA{=X-MMS_rGc3{66~(zdtSaeo~l% z+`k6Ldn=v@&Odx_!^+zR{6CO^$uZ*z{XfXm9Kt%{t3Mor*OG;i>p!hewsj%RmPsD<0x!#@@=BNWDu z(?6dVXURQ@=0A#(+0DMx(?1!Nx0_oh`adrH(r%%N0YHPrq!`)b*}q|IX`0Il?>|x7 zfq%2=06?SC5}`cc?>{FkVrEeE=0BTF&N{lC6QyAHzJ;lHZ?>ptmU+P^k5r{nH1 z_CF6F%t9w&=f8ZRV{#qawZEKCQ(1}Q>OXB5J8_57=)d4Ne*2o-(LVxXY{dZ!!#_Dw z-atta>_1O*M9WGQ$v@S3Mm4w4`oDgjbJudGhRMc3;J>6G)Z%gz>OZ)k8tbm-aiT-G=bel;6K{XW8wh}??1%M6qMon)IZE6?LRsX4#4D=oY)~8;6H5v z7VExI_rF&0gG4Oi{lD{zx;J~z|GzCffR{O>0Kk#R0r`ko>N*)T_djT~8rx&q%fI2l`rS|o_CMa} z8g**|_CG8}JY?FawLgeS!?QWO`#&-Xo42Rrg%*^1o%Wf*dKJ<-fg%_72>p#Gw@VYml!oO7cbj{9I_P;DnSH2_ui9eZ$*>x~7@;?iC z7C(wR>OTY@or5o3`#*HNON_T--@m7ggo_98=s%y95ir3S@W1ezEYpC^$v;=y59`FQ z@jpzkpz5B4jO>D=RX(mH>G=g(!WH*H~FuW zus<~)2h?eG$v@64AvFbd-9HAgv8}zF)<3{ubQ*7C^}jpd32kqS1HhU4V(lzN@V_Wz zmj)TZ!@nc9LmJb$?muflmy>`0%|BSPq~M$x;Xmv(4i-VP?7tBvQwB;KvcEffz@eSP zus_@Fl`(-X=)dZ*4cY1(@xM&Ltpmpc&A*|ILLu2G!9s%D=>@ zUYrT8^*?q;Hmh--;J*b7F7w0GeLw4`wob1PyT77TvRaM_$iI_$-Y6Eb`acJJyi<0O p(?41-@M(LY_?pmL<>*S|)*u@cwUwNC&5 diff --git a/fixture/13/1/20 b/fixture/13/1/20 deleted file mode 100644 index 4452b9957abb73042378d88715b0d984165ccf3c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~5+I?`l8I=Dz^J$wbq0^1m8WT7Has*gq|!z%0xk@IS1laVFR%fCrNY0j#P>OUP95Qmtj?LWzJ?uc2R%|Gfe;A}^N+CQvt1Y(T_;XlqZ za#hN}=0EU3`>2Q{@xQvg2hHTH{yz)!{i#QZ(La(C2jIwB=RfkT$W60g=)c{h3J;je z$G@Ajrapim*gu73%%R%e{J*&4AgLqP?7s()bL5FN=f9_?*uqP7^uHDgJBvIhyFVOY z#?ln`vcG`X`A~t!_CKbS0D(#F@xR8c1|U2P&OfM{B6_kZ?7!GEmF8!8`@g;3EeA@^ z;y;#~CFqNFuSol4wLc08jJcB?<3B>ulD8Om?mvZM z*M|KX(q+ z*gvu*yZaX}^FOsHUb)}<>p#7Ns{L68pefH-@nDQyl{ni z_&+RAU*yA>>_5e6tKKDB+dt&*0M@GK{J)+3Mm-+f@;@)}s+5#p{6DOZ^Ex{nhd+{< zU58&mpg(Z1scoK!;J*-69F&$D`o9s|dnl~@;lC~o-*x+p`aiZo8Ec_K@jpT*6N3l3 z=)VwJhqWbw;=d>mRp6x*0YI3GSxAmj@V|ySvk%1N=D%*wu>(YY+rNg6bHQl3^}pt1 zQN2hr;lFO09rVAp;J+gcejz*G?7wJLW3pa9^*;y&EH727^1p^oEbyQz!oS&>558m& z?7xaIjzDeT%D?#X=5_{+?Z3+Pv7yv$vp=VqwdvWC)Ia4_utHXE_CI7BDJI3$;=kh? zMMj?D`9C$J`4z;{>A$uOEyHMc&cC(rkj}g`aia}E6b;z p%)bl8v7y^};6EQq`%Ed=Hw(7#UKte?dnw7=n`gXPJ&=|4E-<*leBvOn~n3r$yR*gsAy zG3LGXvp+-`ZK)jd$v;0l!X~dn@xP#ufjrGnzdwfSqdA7y_CK6Ro!^3?#Xod-hIAIL z>c4ip=Jnm5_dmCwiw=4f=RXq^&KPKr=s&Cvqs&{f=RYC=>h_!k^1mwK7JkvXZ{yzqTw@xn!=sy@c+j)UL{=XU6v1uYS>ObKd zzfwU)=|9x*`hMS|_CK7UOo+<%OW;C2@#x1&cEmS4ko$I;lJxnq8uWw z>%T-VR7ys3{=a|j8b>Ef;XnPp=G&rq(Z3;8MFL3I1;Dzm_I7$ovp*ubjBAR9@;|a+ zwUue<^uL!7|CXH1=D*%(g0K%b+&{fk*%_301i;iCEzar_;lHPuI$X>y);|C~#nr9! z;lDE*+l!>c^uNvxiu+su^uGvn^^P~o`M(71({$58J-qCeL%LJEGB=sy}y0Od9t?!R`2JpQx00KjL*?ba}${XbM5)`JWS}QG5YhLC?mztC&D250(LejX4M%9v*gqRxGZ(*R^FIY* zMR;g9+P_R`ii6DK_&-te8zsV~=D(kR3=F=Svp>Y}8_Yc-p#PH zQE}^(%|FMXX{Nu7`oBfUvdGNF_P<|q{CSGzaX+XX>eUUj-@n}$^&tS$%s(bKUJ*L= p^*>cv&(yrF>px_OSit#5=D$CQVoX7q?LW#iz$)NB?Z5qkrIuq>s>lEU diff --git a/fixture/13/1/22 b/fixture/13/1/22 deleted file mode 100644 index 8173b3d58f1a51c134cd19658b273a7f3ddea171..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~90^=eC_j^1n|mF5$gw*gw#Qi@j$M>c2Ct!72cX~$Nbk9`__6w(D5PnE^gkLgv{@Il-oHYD1mTj> z=D##a5?K^i_`l?OVtOs1=fCY(GLOh-_2#5N=f&}-@h~79;iLY@jo(3qvGQ4;6FdX`8v*1m%m#x29SB^=|9uT zFjm$u?!V5OO=J@J=)ZT6duGbj=0DEIfwsJm0l*P#UTk;ljXx{{ZXRXj;y>U{fGS}5 z^gqxU9-C)B_&<<-=s#3@&5As|>A%Ec^YIj7+`r72qW$mT^*@mw zeS%hQq`&kXdnQtF#6O%ex81pi@V{#6C?;16+dr*EnsYWG=szL$xa|NV=Rf9FUpBm9 z^S=t}ibsq8??2Xiy326z@4v{H&yo+`piOM8=e?u0JTwekh=L>OXpdN*8xu=|6)yR%OVt0>Dmw?1jj*i=RbeZ4i5HG z!asD}B=>MR(?9&L zVp1o4sJ~;&)YYY1!aou_9_(~Y^S{W7J=3ev(!Yrt@c40v&8@a diff --git a/fixture/13/1/23 b/fixture/13/1/23 deleted file mode 100644 index 736c617ca46a81baa700c4bfe0524998ec5be2f3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~0(4 z$3OmwqnTQ==D+;h(2x^kuRs1(LPfw806_X6tkZ+R?7#T(w%t+@;XjgkW3xjD=fBvH zjD4+x#lP1+Pbe~=|37F<=5fZ@;6DTXnYfPJ=Rc8x6-xagvOjl0Z(>}K1wgF4CChMQ z?!SSAF?t(C`M=6N#Z>6|`ak8ph-ckv*uS(o3PGFFIaa}7s(?90M-)l4E0l=3#&BNu! z)IT27g|kFj_dkCqLseUNk-rs|Mk{Ak$Up1`unZtg?LR94^b^x;I_1TkZGI@;@GEGHef#&cAgC`#J0#?7!J~=O_1n)jtYmvo@OG z$3M>QIkcGj=f9xP7yQ7H^uGpM7ion0-9OfxPZK6G>%ZP&=|nt8y+4wM9$AOx*FV|5 zD~?S2?myJR=a%gA06?w<c8wmT491*(ZBr9*+rU)?7t~oh~935@;?-XpseqV>OX>_*ChCJ p^uM?7GojBV0KkKRX7$pu>p$Ob1#tm9tcvZw$6 diff --git a/fixture/13/1/24 b/fixture/13/1/24 deleted file mode 100644 index d54b09772cca19cfe341895dceb3eeca72f15862..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~0i^T_hN5U^?w=;y;om7Kf=R=D(gL1`uMt@juaize${;>c5CAfWRYc)<5`o_E8-9*uQfL zGvBRecRx#bRFt=B=D#>*cwdv;#=rX@T$*=&1i*G#eFAm^>c11vUj(koit zvcJlMAlm7$^gn%TQXy%^rQ@xRtdKJ~vP{=fA{ z%%aC?<3B$1Mb3}~wm<$MX02+4;Xf#xS6%Yhz(4+Sgm(7Tg1;G`+@b$7(m(jAOQuRV z0YIaw58nV)-oH$>0((Gj06+^9i4BiN>py#SOblmV*FQZU9<#E(;XecavRtE(3BVcB zG@cm0(7$=2Y&?x}<-bHRD|s&F06_NLhP-*%)xXo7wv6I~p1%OupIQr(r@y*~am9}3 z^uIo0IOtXF;lHzQrxph!>OTtz9=Grf0l>@cq>D1`(?7=CWF+j^?7xGWqt|!L^goou zsPbh-**|UKq9FKC_`h6$(~BRX+P`6zFs2{}-9LKt7ry&*)4%ag^p#DM!ap>+nl8<< zlfM?(?~?z#>pw7LsC?KV+&@U1bpOPzsK0B*xefZ0%>O<=f4_Y<-<&@tv``VYxemE{l8-x z4WfOu=RbEj(NOY&?>|A4EgrsT%|FzUNLOUK?!Vi;VA|vB+&_42nScp0=|2`w5TcjJ z=f5~UrKm&W?LQT9c*)qv;6H{Mo49+4(Z3QTRfAC2;lCpz^zdBe#=lq>hS{|b@4uII zRRSfb&_9ti0`3~$;XmYHL|2ls@;?k;>)&Mw(?22f4XjxU*+06dNb(6M(Z37+`3R`^ p>%V#Qr(yww+rPx*m&ah)_CMU;%Nk@#OTsZyM_1w%|B{AXc|E-?!O8O&t-0v;=j)_zA9qm-#-I~ zT%m7N=0CpdGkN41!9V?R)94;t^1q4TlEKqv+P`Tc7DPo113+C4J#)QZ^S`oa3vTBA z*FQAo8A^Be06=*Sfdj;V(Z9FvGq*xl{XgG+ge`*s=D(PoH;5~l|GzjJqxEr#;=jKp z9giMA=)bPGuEY$v`oCA(b>Vxr+P|$O%$_~A??2iyc2@`kV^FOYd zV;_ez?Y{(~Dt`Wz?Y~7u%pf^#(LZz-SB`14+dt#HmoCqd@V~`hp0HlC;J=q`*LL<| z@joj(ay3!m^1skhqd>vh)jvM*E@YyQ;J-)KLN@tu_CL&6|8vZe;y=;rBRrBV>_1^d z{CVRu;lHT=#G7qm>OaB@&!``b=|2}hqamlC;6D=gS}sKv(!bJ$l7B$Z#6N1i)4;N% z{lB9Pz6RnX{6BI#u6b2cgGxB;6EDe zy(_)x(!UcMqrYga0>FW)1P2hs=D*)huL}cl-9M$`Q>RK7&_6Cz;U2iBr@x--^k4NM z+rMt~WHSds`Mq%D*NWgW~Cq@V}|B!b(oe?>{O$ zCDiwy;y(&k!T)C5*1rv=Rd~5dcG##@xL8?zL*~;@4q4~ zl+}i7#=qlMjFv#@?Z1w-cTW|y=06H+(|a3D-oL9V!ir%^@xLyqiL>Wi^S|Jt4$liw p=0D#e{6j=q1i;bSCKfj>@V^#NXh}?R_CHa$%zg|C*1!D}vB;9Jw#oni diff --git a/fixture/13/1/26 b/fixture/13/1/26 deleted file mode 100644 index 69297f44067e677c14a7b03410ac1ee8d02180b2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~7STC_gXl=0A7Gk{l*n+dogvAhVHf$iGUB$I|7B`oC%q-HpRG z^}l1vf}O{E^}iwoL`>`1-anI2Bt0~3c0s!Yx^8`**~gbyZ%Q~-oHOQl(irp!9VX(I~f_&>qf+fiX7!aw!(Hno~z<-aM{&D3|e^FRHH=XDfR);~9}V)Fc` z!#_sw+f|}S@V{e^Dw6?7*FTE*KH4$q<-dU#1<0!I(?1Y*Iz%7Zs6WSSTbGJh@jtJM z+V`Vw;=huFuPMtN!@q@zjx)%M=|5(WIjMR!&c7%(_%8DbmcNcwcMszS+Q0rI>&jU2 z#J^wmqyv0-OZqHd1yJ)w?CkpJ^VAC zpvgYMdFY^_CJfUJGsLv_`iRep#E$={=dO> zRU-0|^*^@1DelXJ>A$ktFTdo5`oBYkRq6@|{J%SXXP92p{6Fni9(ZRJ>_3J8g?wgl p>Az8qK5W4r=)eDAEN}`JtUos-RDd9W>pu=$we1Db(!WGAts%qiu-*Uw diff --git a/fixture/13/1/27 b/fixture/13/1/27 deleted file mode 100644 index 41de8d71e8aadd599e4639601f7cd1d4b78e6d0f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~8=iiNqu)>AzR5YLzA_@;_@ZE{affn!oHuXs|Me$iG_vPFJt; z;y?2emlQP@0l@f#K7Kh^-aq`3%PwQB(mz%&+dalZ`#%*K`S3}4$3Il7Vi1^N_P^e(6^QB&(7$p_>PtSy z^uG;_xq#j5#=o=jr2*!O_CGnIl<+%b-9OD2I7h1x=fD08PoZ@5;y-M-kZYIY=)cM= z7@de42Ed2>)1O(q^S?9zNNE;V-9I71QWE;M=s$vi1tffX`#*qRC>7(?)jus1JLmR| z^*OhypohM@ju1Yv+Rpn=D$Ru zSgp2c1Hd6ZX%gJhAx5YG>vR*5780?LTnM+#3(W@4s&3^ zIIx7X{XcWHhVMpi_doB2@SFB)?LYn(yfe)y=s!W3_6au0X;sK;y>XYGb=~|06_X+1jzGR!M{B;FoOm*@IRm| z;zq58>^~@-9JP~c+&>X-!VHAK-alvrXAVks-9G@^ls4a~=D)#opYgP(**`{fcR}j$ z=RaxJ%TSw|)IYCC2gNIS{l7sR7n|~6`M+P`HP`em>c6g51*k)(yuX|~HWDj$=)adV z%B)YTA$^*;s!_1#d8)jvSB_Bgp%@jvGIzmseL;=hAL+B9Qn p!@pw9(aig+^1nY@7O{0t^FJvwL8F6o|35d4{>yAn*1!D|qfb~fr^x^S diff --git a/fixture/13/1/28 b/fixture/13/1/28 deleted file mode 100644 index 04a5e0735474b94bd8262f838cd951f0ab7c1c8b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~3o%kyd?N0l)`wrS7JZ?!UAMtQ0oRXkDnGQzQ4id8gR9>+`phF1yJg#>%acR++u@&(LcR*VVQky=)c!xk*aLE z`@f;{sDCt}mOuPC;0+4X^S_Y8j_%P``oHeEBk?Viz(2UoI}J73#J|7A#E}QN>%TAR zBKGM`%|C)ehW)f5+dnzJgO{_i&Oh#f$zvG}%0JU*>>Q@A`M+=y-BlT}^gjh|)Nahy z;Xj4bjT7N)?!RLpA>x}9h`-omr_emo#y>TvMBrT{q`wsN__$!O{69ecPCmdN|Gykn zBsGDa_rDZ#CV4ny0>Hug($K+jv_I4L5d@NG;Xgs+dPZ<0*FTWQ$}t0h(!YN3ij=_w z+CODShhhi$(!YE6_E-kA=RY1%=q`G<@juKEI@n6g?Y~}%5cH;K(!W#-{-@0qz`s&k zV<3~x_CJS!ccQlk>c1y^UEv0h@js7+CH(qy_rJc&d=il5^FP1v=>DKW>p#<7DiW6L z!@m@guN!1l?LX@O4|B8!;J>c^4+gJ??Z4WLbd#%1`#<~G{UcjZ;lIMwcq pz`t6QDh7iz=Rc6R`Nr?{{J$Z+JyUlx*+1>h4KqTx`#&+2w06(oxZVH& diff --git a/fixture/13/1/29 b/fixture/13/1/29 deleted file mode 100644 index 368bfd0d3f3f46b702b1275edabb851f40f0deaf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~6Q+6h4SR=D%qn!7gP>?LR1d(NH8_{69TNs*w8)=07)8+2Igq z@IQ)4oh6SP`M<-tKY zyD~dB(mw=@s`+xy`#(4dDH=u=y1ymJ#?eDy=06{mB1H{} z;=d7M7OXv-WODA_`oEZc9^CEZqQ4W9JLp|ghQF9cY4T`C z(!Va;8B5TW;lB<&O~uW)=D(teh&QVf(m!nSE^AOJ@V{dV*C4f_#6RpVgR*6@;lC`N z51vFJ-@l!?kCp*9$v?t?!r6w{-@i8}!k^GZ^gpsv68*02!M~!T16mTl@IP_?gMhYS z=)cVbe#2WA>Axd_eHq}J`oCa!4Sa%s-@iBm8NCO_&A;E9Xucb`vp>@LL4vYS=0B&= zj^^ z`dYdKzrSULnc3w@1hYE&^FN{kE#9n}!oR;ksQ4S0-oJv*F6!SS#y=o< zEcauQ=RaWGv0sKl^*=!rMyH(J+`q~1RN$&)+rOkgEpq6P%)id&1Xyu*@ISHTXjG9j ztv|WI0>Zdc0YG{C7-Z_3#J}q{Ew4N`{6D(k0JOA|?7!@U@Az5W=jU}g p>%ZM;A)Jqi>OTs_9aJP7`oFI%OLMwK@jw5rBO-Pq=06y~t}>m;vSI)L diff --git a/fixture/13/1/3 b/fixture/13/1/3 deleted file mode 100644 index 54273c0e058192ba9525ad58044501704e84d7bd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~1cVSO!s-^*;s=t2PJg=)YyD%J6}6`#;7)b(XH;w?86 z@IQj1Mv?Uxx4*0*0&xMp`9Cak(tY?k)W2V&%lF?I(7z102P<{5?7wNr1wT}Y*uV5T z#v-)5`@c}_^6S|N0l-Q+8u?QF<3GsjX(v>q`b}V>c1HZFkDR;*T1VG1${PD z_dk#8c$4C9?Y~=gs^~lY-t%Bi^*{C1=|??_+&|QJ!$`Q>_CFN5 z&cyfd;lJ0`b??FF@W0CG(T9+#-#-Dhu3M`V!9V>7i<^w{)W5K|G@WB0V!y}H7Bz;s z`oF`c>vrto**|REh2uwi`9G+#3&Wq@=D)_1_c&yN>c1kSebayz#y>Jri)CBH20-ma z6*M22@jrDcg|yZixWDKH{n$%Jq(89ux(|9T^FQ0XeIA+_?Z0I(COc)5<3Aib-+&n5 z@xL`p{H5lY?LX$$L=`5`(!bw!HCR}%$v;ca^kvic*1x7u+B&3L=0Be-dLQ~*#=o0; z$*6`y0YIeScz~|6_CH%B(#c)k^uLbvuz8|^$Ukin+(TrB=RYcvDuH`a-M{(U>iA7& p^FK@d?UJyA@jnJMA^}@uwZA1MjoKb&??0l7;)MxG1HcVFxVpPhyzc-2 diff --git a/fixture/13/1/30 b/fixture/13/1/30 deleted file mode 100644 index 76f3c6ebc61b14d61b53f547ef7d10a800e6467e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~702?$}O00>I?-W_d#5k3Ym)?x=C8|G!$j_TdCYg^uLHO5hcsM0YEp&Lla%5>p#Pa8p8+S^*<1Hwub2R!M~rpL=V|ApFb%Q zaB!$f`9EtrKw8V_?n^*>kv zRV@(f)4%abJ(jjH=D%~jcdGGJ^1lITaSANo^S>uivo$5|+draQ`jL_k@IP)(90XzO z>^}u@?qqOS_CL&?ZW4_6`#-3L(mhtE?mve4xWlp+;J;o^`w@<@#XomI6y8G)0>BDH z3w91?`nty=BDv%D-GEauCrU13OY1>Z(}0%vp+RONXJ4n;6DLP*vlSf$3G9y z7AGd^=)aPZtv@-%@V^^fo{T(T=07-84hob;_&-xvhGk+H`oAl8Ij;Bf_P?+MtI&rV z_rD8|(eeAu=s%}#Ug)fO>p#Zuzw|3U@4rj4wNP`V0YDC2(xytI*uVS)7@21H_CIYx zPu`H<=|5X%o(aRK?!OE+6*LUN+P|L%Utrhj>%YCBsq<8V^}me{ZUe_O(7#xfT3UB4 z^S_+h|J1Bx#=m~wwfliU0dd9@xK6S;?&XmT)))5jFN%K-9L_yKux4_^FMr+{B?Rt)xRH?*g-wc p?!Qxt$_Xxq(m&^H9M;?j^*=_4K8Nde(Z4;^j=h51>c991u7!XXvoHVv diff --git a/fixture/13/1/31 b/fixture/13/1/31 deleted file mode 100644 index c5a5b2ef0cbf5a8c1fa03eea2d077e0e832cc5e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~1=j8lHqUygxuEuyZ#0`M;(^aBu_t%fJ7Ma&pyChw*Mq*!oOaBOZuFQ`agm35%buN{y(%>Ny9Nf)`afQUDwsZN^}pu*U&pth#J{j-WQ~w_@INJD`AKV9 z=fB}BJ3@~<@IM=QNTzyjzrXK?-`4oMoIgF?Rtzcd^1t60AzP?YG@-=_CJ!jO+d?D=)d|t(u|{t`#&o+0*gCl^uI?!s`d?Z z(LbOo$EL<>_&%TQwh_3ocwLf;nez`$}@;{NE#2dK?I;?LQ*$ z@VZ%ow!gs~zJxv!_dh~uUY6T>>c6l@65~5C_P??_ND_`<;lJxp#nM zA_d+K=ReduKdyW>UBA(I*vy=?;XfrYno7|1_`e*oz=wc>@js-1IF-8S*}r9ix1HUD z|3BK5xGUAWjk?##J?v+ zof!x@`#+-^Vr2>m>p#9@|5J_5>c2MTiym4a)ISCRnHCa3@4uYu=H8du)xU5j2X>fU z)jvVVHRgO!;6J&8vhQVA=Re0XOuCo7^FJP`xUhg3^S=sKXyW&nm_N#g0UirB>%U16 zS^M0E*}s%npI2HW=)Xq-q6#|p-#>H*%&RKd@IOmo1B_I*@jt0o_V>xI@xT1Q2laK3 pzrR~?F1%eJ^1ttwF7Do_^1s`PXGZ5N<-eEk)8Ll))jx^hsaO5Lt~mez diff --git a/fixture/13/1/32 b/fixture/13/1/32 deleted file mode 100644 index e6cf1a1cc43378f4f39b69ba134377f5509db529..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~3~hO}O)#;6Em6&$n{#%Z2FskoHv z@;@NxfK_<3=|2W{&HRID+P{=!g%q#;*S{it&ZU$J<-c@9nAl^h^uM!joLdu6Oc6}#KkzG?Z3K=)q1hD^S|lr`2HLC?Z3F}f&u!l(u0Q9g*g_S>-9Lq{UFD0-^uKtf zg=l9#_CHS%;bF=!-@jOVtf`B9z`rU=6okp;13;jkWIr@1%)coX#QrU*=RcTYb;=BP z;6EGc%%C|_^}k#N=Og9)@4tT$G5mi?06-v#B)Ncy;y(`B(sr&#k-s8=HEjnZ;lGpk zZkZ;0)IZBi{L9CY+CROxmS?E`%s)@90BLp_@jrxRl{HSi>%Td4fP&uw@V^}QCa-F+ z@IPjw6Q#)J06-#OYk@W@VYf^gsJnlZ3%2v%mjzFi-~w(LV>*d4+{5&_8Ee z-LBzatiLno!v(CA_CFZ#60CyS@joHY)UFMU*}u7;GMq?M`oDaoc422mu)jj5#*uV+ z^1qaYts+J~m_M_zO)$SU@xKOSC@`@a^gr-K=XuKJ$#!Oc;XkA3ny7~K_CKqOWMeRyDZ;w!bJmL&H1n&_5(Q>L8JTmA{;tG^|~y%|E{? zCkIrC*}s@cCPDbC-#`Ai9g9r(;6GI>^;dN;dq2X4qMdf_!9O!0vNhhlxK^@;|Tnu&L{D2f+JK@d}M+ p^gj)xYM;Yz@IOt8)><`?(!W+!dQ`0u^S=c6NXr5*_`sU!dZ diff --git a/fixture/13/1/33 b/fixture/13/1/33 deleted file mode 100644 index 2a55f89c0b78dfe2a9cfa19522a9bb7653cc3dcd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~2wk9396+=)X?p%(0LT^*`MRm=hDd+`mAcJ|t-P>_1nzbjonT zzrWUNyu#lytUoCr_Ox##`M>64pE+h)^1paa#0H13^gnU3X;{6DN|wfhZLz`uik%t~LC$G;iLD7PR?^FIO0)l|z$i9d5y$-w@4)IY-xc1G*v z??1QiKl+>v9zVBS`MvBj=|AIW8SfdWy}#5;E%1G1?>{X}AJ7H&+CMvnzrG3D%)cg9 zf=2}cvp+1>12F!L1HcRzQE0Lm^FK}q7o7VN^uK&?#2ynv!{Tv<-clODep}%=D&L)M7M3M<3CZ} z;jZR?`M>?1x8w>f+P@Gkax;l|_`i=iarljy=sy#aO$4ii)xSGZOn~@);Xl+s0xlc= z^FQqhR{uyt^gnTD?Ba%6@V^pNkW4Yo(?1?P8{0bZ@jp`_fdYw1=08R_TXAy92|E(KE?Y}282NsT5*+1^Y(1^^a|34I@ zWBo5a_P?Q%uyzMs=Rb2nX8=%U>pzlwGd#MR`#-tX6+lBd=RX{o(!$K%g+JHjl4%hP z+dpc4xna{M(Z9dwNU9Wq^FKzN(B+=U_`h<;9|cG`-@m6H-pkO9+&^k3f9WB-@;@iC zu047>jK951m6zxU>A!zBs5)W3D+DG~VD+rPWIq>pxZ{{})H*=|3IM z+=<-r(m$uH$m?k{{J*Wo1qapb(LZ{?Y}~iRyg!Ly$ah?N@IN4o+4IOK;y+QzYO$)H pz`yg~@@p%RKL6=v+)jukX331#b-9K}Gt!4HuvEcv! diff --git a/fixture/13/1/34 b/fixture/13/1/34 deleted file mode 100644 index 2d07cbf338ad537cdda93218faa7c34a4820ad63..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~3xLkTphAuRrg{GCzyvkUx{8d&iqx0l=;>zqegT?Z1tdik?PK z;lJuD9BMJw+&`n2W~xhV`oD}@p)Y5x;J+QZm5nyX#|mf$Qi z@W1%4bM0{h{67_WJ-oNQ^S?Wox}t52?mye{h1GC#-oMSSal(-%?Y{@U@0aiq^*=mR zY<$1%v_Ei4P%!I5raww}ZGoJ-(Z5*ddtO(Y;=h(=$f4q*#=jjxAHcBC3Bch}6(hK) z{lCoi58%BU**^&e%J!%1@;{c7zN#S6^*@Dln@PK$?!V7oVrF2j$Uo@65v@Fo0l;#V zHb5$U*1rgkdDE|3>Oa2(Z2i*ek-u!YWfxmJ)xS9r7i2s)i@$AQ;U|lR^gsTNc1B`S z&%bNHL#NM}vOkPad9`p)_&*B+D%hqu&A&~D9PKhM`oG_@bk6_e^*^=b-B;j0@IQx_ zOCh_v0lAzxE)(n*q-@o-Xo>OZ+u(0aS0?!OcBKPQLA=0BYO%T=>|=|7rB2F{rq z?myfo*Bs$`{y$_nrLz;=;lJLbJ7N^%$G^7Vb)!cN>%Ys5NycfSxW7HY@%(W>Tyj`afEuG}U8Y-#>6u!)`BWLO*{W-#^ZG@xS)lSbXKU>pvw%x!)29 z#J^b(%(@42>_0*`GhKBr+rPy#wp1B5^FNSb(ambP*gqC6u!o1h??2FGs>3I-*gwk_ zqy<}??Z52TYzkOn*S`%*4@i}K^uOI-khXcE$3HuV8{G0{_rLW2y(`|%(m$kKUFXmA p48RW(e5Cm5=|9PL1{|TN@xPV#A0>N+?!T6DRI4!a^*;luw$H0RyLkWr diff --git a/fixture/13/1/35 b/fixture/13/1/35 deleted file mode 100644 index ea4f8621e1137de395ccfaf62525cac9ccdc0c85..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~2EyAYxT_)IaXoSC}mo>pwCc;|w5@5F z!N2OD)BKtozCSF@u#G|3_diTae;zoS^1m(*#66cc?LUA2U2>1A*gws}ve}D9%fI%V z&mNS4@xRCcOsjXc%)j6?sDSY=>c1DFb!J-q@;|!vj6`{Tw?C8_kD0xx<-fu1Q-o^( z_rLV+)yIsY-M?AoHhZ4O^FP0p#8A|Ryg&3GQ9*$a$G>`}DWTIX`#A&>AZLgZ#**~c%t$Qfi+COXPH&Xe=;=jJUCWd*2>Ay&7PcSnl_dg%eeuoOS z+rRWvX4Kz0_rJ4?c^vew$Umw_9Z`Ou>_2Nywn^M&$iD_LYGx%c;Xmo4VMyb_@V`J( zf0?ry%VQo2IKE~+`pO(yH#rqn!hw@9Bz{p=08Zaz!R)R%s)nF>J(d@=06-v5fpf(A&|t^G?gb(LYb1a-n*s!#{~$qT~RO%0HZle`mzM*uSqMWe~-0)IW-6h{n8d z**|jk<>T|?_P_eNRyyG(>pzvY!#_~Q^S`Ypk7+^^?7!g|xPRFA&Oh0%1A-eO<3Coz zO4s=*%s(LvT2cGe)IWxJrdr=G^1nwQUTg8e;J-^U1Ai%J0KnCowQ{g#?Z4emh;>DG p^}j<&o%sw%i$CAoJ|tS%0zjAxTKVeKp`^}p5+o6Ds7_djmNLumqn_rJ!#(#3#x z?mraE@lh9A_rE)W1c2O*=sye!!E~$}?>|X)tZImE@;|nH4!GE<@V^oqA1D?F#Xs2+ zC17|s5wS^#G?7zi4#N;e=rax;g6Y*#Z>OYnUJdAe>WFKVOb5OqIw|N%fB{gS>6LY)jwm|N2B0K^}p>bF<>N?$G^Dl7R+ls*+1g)FY%f~0>Hs9 zV>jHkv_H(bBR{~0#lO#dTWTm#^gjQf2l(?7dBC+2kO@;`{x7S}j??>}qrF!P58oIh{Q z{Q{4c)IT%Iqmk`K?LQ2Ncl^IN^1q4&jjc*t-aoA0(Gc2*|Gy05eDrIW>pusxuB6wW z*uS%h+tCstw!b${iKdgq;lD)QLRmQkrN2~fk$5v$&_CK$TMSnt=szpD_E5pf^gor& zSl3$Hs6R@4P^I6M`oAAEGji?Y^gpe#b0j*A_rGD#lMX=!+rMw)T%rJC-@j4jfFiD| zhd+198=^Zm^uOnioc1RE^uG(fqkowRmcO>Odk%ny>A&ilkz|>P^*?R2GB3}d;lB$A zdWr#b@V`i98THn``acQ-5^Qf1-aowtTHjH;1weFCeTB@y{=a`&zk?E(%0KBWk;z6B p-#?THXg`{LoIgNM22TVl*S|<=q<0PO^FM@>{PA_!-9Hvln)gk%pJxC7 diff --git a/fixture/13/1/37 b/fixture/13/1/37 deleted file mode 100644 index 1590ea05ca677c2c882fde546af60767e71e1f82..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~6g*bbTQQ`@e^gU0vPL>%X#)&78xZ^1ry(>%uk!zrT+vu%#MZ z?mx9X!Jv>u*uQDd1J@)o(7%e)k~bz)@4q{nZ>8#7=s$c8?UwAg^FJ*Ds25Y5pub)9 z^T5GJ`9B%%r8OOC;lFdGylV_7Oj zxY_4a_5qvR-(!Q zfWHbuarI|1^S`Y1LH?wV{J%qO4+iY?>OUgwjnJ8*-ao_x23n}k<3BQ>S-8_1p| z`5w%~?7u#8HgoX?{=crlY6#21xxd-mA^L3!+&^0n z;JlVh>Ay*yVT3a8wLiW?x$*uF-@glH<~*(&(7*d<6ZNYt(LZP<^fX8G@IS_ByH)zM z(m$4t+W+DA<3Da#1MC;^gg>kl-xU`%-#OZSQpQBhM^FRH%MM+aM*uRc12bPNvKT^FMF2rv1s$?mz!a z68kxP@IP0;%XAyAnUWRsK20*v4_l;p@MZaKSeE@-3?mukr(#u{)=RXCPLy-qo=D%ez{$S1J z+`qrcvyDtuu|F3A#xKR|{lD=wS-f&Z>OU}=cj)<4{=b3j1#s;Njz0tylB2DD<-cy( zq^_EU;yK z@IPyjPK!{VyFX!z3S8%T!a-rioU$-g&M zA4|uF^uHQRi=p!q^1qUk3jCEf*uNWbyE)UQ>p#Yc!&IVW)j##80BHfD>%Zf;rSO%% zr$5e%wZk7m^}mdB6)X9n>%Uhq^IOY<`oFj&7BCia+dmAHuB3w*(!T}IwZlI!#J{Mt z38y^D=|5$0ylZ>u@4pwb{w+A)-9JB#FoZn9|34Z4Koojy@4wZNfg2zK`M*E`I%^#u p^uOSO2Gx|_*gyA#F&EJA`#-{NlNbj{`#%|MJ0*At>p%6Mr^{rwtw{g? diff --git a/fixture/13/1/39 b/fixture/13/1/39 deleted file mode 100644 index 6fcdd4ad20000358398001e43ed7d159f87f54c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~3+ksCP0W>_3Su!QM=}-9IfyqgVtq=Rf^+QZB`C(?6V@tMICq z>Az}+N%=i$`#%qHAF1tK^*?9G?Z_G3=)Wd^iMLn4%D>l9aZ@ee_CM9#&j5Z`&_70w|Duu?ZXDjQG z|366`8SZ??>c0p)KHj3(?msSIW1!Y^?Y|9{%E9hl#Xn#?nDJT2+&@BkBj|V2^S>_@ z(6bAm?mxsV(OukM$-m|l-uFYV;XgEf)#?Z=RdS5yx^2ZxW62hzR1!V z|36}XBn*k%roU?^;lJDN zRzW~G@W0c~<2PBw(Z3qrHSp?h>OVpp1p+QE+P?y3h}5?y%s-lQuzO-K;J@u@p_Mya z1V9ouo%6aO_&?PTNKKcq&%XxLsByMy(LYrO@vk;=^}pd1g`ur+%fDe1tO!#!%Xc{->_2p&+gh+7>OUu7Pl}aE>OUCRbncd<(Z9`} zD_4jX(!bpD{}euo%D;sZdC$L|-@j&27_om1yFaHktpl|OcfZD>4Dl#?`adGTbB%H* z;J-3h8~+=V@4rEh^Htob?!Q^^UM>}u;=k)TMFlliz(1m=VM~0u^*?3@sH_3i=D)m|qe!b5 z*gxS?q{Ez_^1pag^>%V|g3aJ*G;=dJIQPJrv^FKuCEXM8c p@juEkDb%*l@4s3?5lHX7{6DxLcB2lc1i&zNPk&N(%Req>t}L>jvJ(IR diff --git a/fixture/13/1/4 b/fixture/13/1/4 deleted file mode 100644 index 7313df5e570f0bd292b894ffa78806a370f47714..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~7gR1Zu9h^*{gl5Vc2M>_5&~_qiZv@xKV5M$OJ1&_99rEg{r3 z0Kj>T17`Pa=)at1R2{vC@jpO|Vu(@w+AIbu=# zq`%f$eFV=f*T1~AGXUf%&c7SG63<%`@V_96OWJM{)j#Uo&g;&<;XjBqcvMYF^}qJ1 za6z>d?0s3IB^6=mQkOVR`XYxIh^}kk_13QdR@;^4Ui3}L>xxYMXAy?BE^uHPlzb`8# z>%WK0H*yOv?!T4i4^vc~?Z4knb=sjk;y;NDgA974>pzo^{6@1c`#+I)jvIMesgcM8*%+k1i<%!=5bbP_dilKcKVfk{=Z+* ziAcG6u)oWKk2@i!);~#~ELY$H^}kBDQq_94;lGx!feHA&vNgtYCAvp<5_PQLRn??1Tl+c`;B|34)58F1X+#lKhBA%yy3=6<4|375dC^w)2$G;Dffu6C^%RkK4UuAHq z`M;Jav!)Wm;J?txLZ7%X{y%LJr2C$p-9P4nUWd6}oxkp(5wWOD_CM}Wlz&lD`oH2~ z)3-p2+P@`O7a%P&0zkSfv}A&nslSNk!f!4ht-mKW2j<7!@;?Z%u5^C-^FLf6;7lUZ z^uLS)c2v$|+dqn$G1Ipf{6Et9t}G>#z&c8m%Rd8k^?h%r@;|K*;V~rl)4#n?Q9EUbkqt;y;0@qTB{`^1li9emVHC@xMpxIl`9;>p#n$ z*=k@W`ahwem5{V@>_0SwC?P&>0KjwA#m!yd+P_EbQ}NT3&OeR)vUGodu0NM5PN@C1 z{=bk1&?Qj8^uMk~i--lN>Ayx^DgS^@vA+-S4hq=u{=YdcktGW?<3B5Ur|z-Mx4-%A zvJCdH@V}~gUTSs6$UiQC_ZL+P_rE~%wk7Liv=D&`y>x?g$*gqH(emuk)lfQer%{H9P06ic8XSoe3qX45LIz;XqQCGcZkvm|@jnW_*-D2U@IMKYNy0!y z^uO-vC7q+N{J$<_!@$&y`Vzd2m+uy}w>IMqjtK<3F5U zP79f9(?9$X0W(L|)<5Ts5l5lFx<6j<@La z%|H4CMP0nb*1zwRZP*P2>^~zp>oAy1^-UmEt_rG~~1Ct&G;y*|mVWiJy<3Cb& zcuNSs^gnJ7IPdA!s6Tmm!htEB%fA4vXZe?o_P^gwVKsO>q`z-x=zDw(*FS^{n?t4@ z{l6)`tR1%+@jq`lnbrcy=D(o$Hf7Km$-k?GHtO#n<3H%)OW|zveZN&sHq}@~{w{al^1t}Ak~?}r>py+-QPo0U z-#-J?mrk*)sb7Q;6EQ`Ots?!)jw3!RB97;`@hlv3k&^bq(6R2 zvXhs1>%aCbntk~=`M>;CO-fJ@{J(0*`Ki2<%|96qBzO6e?>|Z?#Rfk$?LX%T zJ*e@Q>c8(Nc@-S6^*?cGAc!Vd06>!{%_z8W<3Ey4^0?*8_dn9twFTtx>pvPVoe)xI z&A&4No)tkv<-hFTz6mSO`M(heRC)b&+`pjabwz$q&cBaY@$N5t%fDA3PmCkdyuV2} zrSW}P0>B@&hg#3==s)K^|0Rv2>_43!owoVD*FW)@$(EzF^1rPCpVP5?*FTz+VFMK> pmp_etx~uG6-amDPo$F=M_&=={E^iXp#=mZ0^*cH|$3Kr|r0DRsso?+s diff --git a/fixture/13/1/42 b/fixture/13/1/42 deleted file mode 100644 index 31c5e507bb59c3a92900666bf3c86fa6ebd1cbd2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~4*v8g$vH>%Z>nG*(}J?LS^+$eq!A=0DsD$caI?%Re5>$Hbr6 z@V{cURPmnO<-gXme%sYd{l70}AMi&$=D!@Y5>&O(?4AW0c;I;;=jMjBZl5(=D!eXa0b?>&A-v*|Fizj+`m8_S`5>E z?mziaw3_|++CLn=d@+V~)W6zWi}lL|(!V`1Pt5dy=07IN!=ceE@IP>_{<@NW{l9;S zG?Yr}#lQNuL2UbLw?DTUNq||3;Xi0p7kU!yyg!4eR6LLm+CRoIMpms<&c7D={pp;^ z>A(NtgHj+E=|72(Y2fA?>OTMwLu%67OYHJePv`A@4wMF&yVOl;J?pG zcSZT?@jp}h9Rr5U)<6HgLu2&8tUpdKyTt+@(7zs8K0i=N^gp`x@qe;Y{J#Qzd}kNZ z(m%xdl^bylvA^QrGiy!?^uMkA5hTOi`9Cs|SEO8q1;AmJC?bstzdw8x(&;Ga;6FcZ z>5Q^7@4r?IGWMaq(LbVJ`peY2%)dY{qnH_2}5k(*S~o(laB=K=s)G3qn!@X_P;g_W$PI&@xOA{ zO72_X@;^S952fVb%0Dh^l(%9o^*?0Yu$`Aj*FS4(&{7;w;6JDty+-HG)4%57m;}hk z$iMC{N&te~?mxX?63~iO!oTMM&)mZA@;@>&xHVoMyuVdZd6~`M^1lfkakvCk^uHVY z-wty7#lOa;-Iu*h?LWE$yXbC6<-gCeR({KB=Rc7BeEi7Cxj)ldPzw@f<3D8uLlHcR z(!VWq*}L9s$3OkSh9EBr?mwG?rMzR6k-uy6UO=47;J*(o{4abJ{l9+dKlM9Q;Xe#c84S;d)<5XPLZHW- p3FG_yhvp=2d;7NF^*FS3%MGW^S{0q8Nr71);~!XTHy^G06@P$f^4F}@V^Cd zw6AdD)xUY~LI!F0$v?@q>~%!j(Z60#lUoi?)jwozB_f(>;=kcld*MvQ<-fL`h1Bg< z$G_nF+v#EE)IaE$*IpKd^gps+%|pu?*grQ$^L$n#(7#bPE+gHQ>Ob@=W6Ws)@IUYz z>oEyx)4$~8Hhh+@!oLcc!sDk^+rLrIKmyl9>OTyDy*elUvOjC|FlzyQ_P_8P8JmIr z**_ibkp!qM zlywfc?LYg)q8u76{6Cc;kYmSL^S^@!xSX3F{J)Q{@4l9m`#&=F&-03y;y-X>vy}BJ zz(1S`a0%rP!N0^JiFKpl)4%r5etxWu>pxplI-#x^_rF*ptUQ^F-al}FT}4HJ@IOGY z%A>f*=RX)EqaCP5vcGC9NJ<0GWxu4gTpKfz^gk`3&=wb;;y+**F0D8f{6Cw0(C-!E z`@cLL9h}eu?LXKq(%9+s>A#Q=Qw~e2vcDHZpkN;a_CHdlvt6PI@V`zc%bLdQ06^@` zq)o}N@jqJFrIhK2;y)&)3;I6O?Y~v?eFa7V$G<(O;IOLi*1wH^+g6RY^S>1tQ)m!? p@jsWmTN)qX*}qLxv#eTp#=i#Wcf&XVz`yg@$6p_6%fC7rwNKhuxgP)k diff --git a/fixture/13/1/44 b/fixture/13/1/44 deleted file mode 100644 index 384234842c9cbfde142b3e927fa34821531e28be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 387 zcmV-}0et>=0o!I5x~=8s1N#R%O&`90^LT&XLXQFt?+^BVPkpZa6MVdX#rD}oTxN6k z-*NBTbnf|={iRCmik+$-?H%Oyoai_EYH#5%J1YCdGy9+=E9#Xl9JSAO;eL?WxM)A) ze)(T7|NYqC&n7&bb?w*v^DXlo1z29#PwSeOyjc18ehaoIRZ085?qC1sfU{f8xBcBE zyw6PnpV*(}WDc77{k6T~n$@iDyMFASbCZGVRM}zsg(f!`)Gyz#U(LqwTswf(mqam=u<_`QF6-AZYuS10ygWw&}{{P?T=GSQ1x@1(EozwzB= z4fBBy`wuc(Y;c!)VlVqFCUSY_1^Y?wo_#iD&e%W4O?Xxk%ip})ueWT`jOEvU+9xO`v(~TvY@eTc{+N&7r~R{nRVKfh h^WDDDrueI9^2e0*b{lB0Is|b&=;6J^Myh~29A$Q?={i2y{68oPi6Eim_rG!1EgB>Stv~mrRdx_)*gw8# z*g@ABr9W%kv=ljV$iKIKB~tt#*uSzX0B)wU>Az>I=Bw*O>c5%_quXSO@;_TNq70Np z+`oW#-&?m10YEf_28Z=$_4HE6c)obtG`x(APYV{_PF~BmEo$*gs=bRCfqT@<07#AxFmuy+5hm8FyG| zr9Y#nxb(S9=D$O99i15i@;_r-1fW5Hzdy;At_4!t%)bEkkQY*3*1y5u`>fz?@INHF z#OUbB(7#2C2cv0t@IT;TBC1C&-oLpo(qkYU=RX@Z*{mRf%0D4P&$*Rs?!WIT*TxV; zw!ezFW71lN0l*XF1f(p_=fCeJ;O((K`M)4*U#P9)5&yEU1VDra zx>@>k;6Idcs!@-c;J+&8p0cye=|8HL5>?%wnnXM6 z;y(`a_uDDL`MI%1eM}>@>pvIL8R=IU@jsjZCg=m$ z)IVr9RNOn#;y)<6Y|e#);6Jb*xpM>4>pxN4EV#aU@xT9Z#*R5P^FNBI8d==S%0JGk z{|a9n!#`iqLZ!+f;y-uJOs5f5^uL^&J2B?k_`hf3+Pe1pzP}M7=HOMb_&?S?c*axr p^1t1&H1Tz7#y>3eL>Ck9=D)hdXB{^?>%Y{i>~A3k;6M9Pr_^c4trP$N diff --git a/fixture/13/1/6 b/fixture/13/1/6 deleted file mode 100644 index e8a2ce0429de3f0d2e3420ee4b7155b882b591e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 811 zcmV+`1JwL@0RbQb-~1U}=-ACn=sz~3VAzg-3sWsJ_CKwJj;2fs+dt3A-?UBd z&c7S(zDc}2);}le)sS#X06=&@kcI3V=s$T7*sQr*=D+Bfp1}?%lfTsrRT^u}z&{LX zV^Nw)3Fa>Qr|=RbI~pe?qw;6I&b`?*!F_CL(6kG|z& z#lPy5;6z<`=08#lNx3S0^eT^S>^UN5;t}=08pN zl)zqM%)e;#$)&zs`oDaTQ`rf2;lI&d-3IkV>p#iCbo-mA{=b+bv$F1Z@V~uz?O2wL z!9V>M4As(o_CE?w8_>=F?>{N(m<6&1_&OZ#1 zzvG?9?7whN?L4_g@4s-Sgla!~^1r{B!bcKp@jt4B#8cw5>c9GaJ-B!R@IO<1t~_f1 z!#^(qIKjD~@js5V=sBgY@xSD`5@|UEms2ZH?_rC%P*1ZILu0Kcz2AkL&(?9fHg*p|J+Q02V zkm6KL`#)M*uOenQsz2g&>w8P%+P_+`R>;$! p0Kk7o;(639-AK@U$>a6gBt0&4}p@jr9XSY2+$vA@{&l+sWi z^uMT!KV3Oq$3KvGJH}-2-M=OzVNi@8@;|bn;$3W7>p!LJf(mIc;lJLw-y7!h=sz>6 zw5`Hh(LXxWcA9hE^*;>8Z$SN)=RX|RtS$Y3;Xgy+G;mTs^*=Hu_FH|7k3aAI*eX4} z=)btsw$>fB$v-&IFryp4*1uDSUI3I;<3Gs4)94_#uRmObS-iK&**_?-X3q^o^S_QS z%OB!H;6D$}*~CSV^uJ$1$w|G&?mwcqrPun>?msAa18Lzz);|lblEQY_(?3ScY8u0! z%)bOL6*s0$NYC)#y@YYEw;Vz z-alr8a;v~E-9Ir$hv>TI;y(q4aOuxe+P~qPmlWcx0>ELEI~&r|;=lK)6`X--?7u+v zw|7C{(?2QGP*w#H%s`^$KpFd-yIn^t04El z;Xm=L`%g(y-ane3AJO~406^w~_L;Dj_`jfq<-=ki{6FWI*TqLH%D?^6O$N3wx4%jU zaOu5z>pw*vj{xpQ>pzhXTIqp-@;^K;b9fg2^*>ZWIc&OQq`$;5K5)D<_CHDhZzl{Q z$G_5aN@r2XY%T470hJHCwm%lwV}c?}`Mk$D9@AxU8Og6dJ)ITL2-qg@*=D%b~b>Fo~*}uH+8NVv4-apfSgf#(8OUvOvY8fj zxA7zu@;?=S+r?Ir@;?9;Ws{YP06?OfbtqnA`#;0C^&d6j?7t_l zkMVSw?LYX7u2$?N>_5bTE_#ej*1!JSOPhQh-9O=kz8Jp>{=etoovw9!{=X80Dq}2f z`@e2J(cG2k*gpaxfd9kq+dm?iK08r|(Z61P?xX05`#%iM#U~$@??1r-m)Nk9`M-Kd z1T#Pb#=pAuu<+&)?!U94HHYYa+CP`4LFW`upT8d*;M`k5?7z@wX*!P_`#(btCZ!-_ ztv`;2_f2G=+P@Ev59e_+(7%Me7c6IZk*;jp-oFTIudlN@;6K;Pqho?K=s!?owilu|=f6v!@X^zs=|2WuZlGZk z?Y}j{idUwSJLz6!-_rK{oX_-E&_rDHiwe9k2?Y}O+4d`rHn!o+bQ3|ro@4xrV z!cHgn_dk%>eCq?(?Y{}|dP9zj$3MRO@fNoq^*=7v((&i6w?Cm_)0kyS*S|kTy=N`a z!au=qW%;`K;lCO&Uw}{p@V^>Mk~T`4<3HC$fM(gK;=eQ+>H6XM?LYOYUmyH>=|4S) zMgZJN#Xma&xk!Hz?>~l3kKpcT;lEG6Bu=vw=|9*>p%5$CC?U^_dkrqb$Tm< z{=dCbirAq&qrdUi9xpX&&A%9r!{lkp?!OGp6REkf{=cIUv&6=B`M&`^+L-CLx<4!I z8M@LI;lB~|6zQI*>AyeBxQk>gxIaj1ERqo>=D(@P7j%@Q^gj+Ly`MA}odQ?D8^}q8i6=Gci=)WaP@sdr;=)bASRr~L=?+&{)?O)HRj%|A=1`ZlFv*1waj#87{006;p2e1y!w<3DGL zc%!h8^FQP^n;v-rs=wJICt(I<;lCR}B``Dx;lHuG|59$R?LXsO&$wE;=s)~d)a_aO z%fAel{Nipd-aimwkzmBl?mvG?9oDrA;=f!$VgoVy)xTVG>k7h^+&>Th;x}5J{J+d1 zrXNGh-9MIVQgJf?^1mKA$Vvtx?mvCLzR#&H0YGCe{_HbP=D(uFX9pyh^}npd$ND}8 z$G^*ATq^|n(7z<9H%g`? zmi+CA>OWKpGdU~c`9IaV$emj$^S@*6BE_CW`M>Jgz8KNF*}ofOgrFpg^S}P^w>x8N z^uMqjGxDl#_P_78qnq}}$iFMe0O`I|+P{0DiUM)t*FS-+N?0}o;=jBFB*0#G^S?E^ zV4B5Ak-u-D?RD7i@V^ym>94e7;6L3sS3TLN`oDG1rlzk^0l*gW-UOk+`M+X``_R6{ z+P^-TjptQh(!UnSx$sVh!9NG5%&Q6#;=ggIP1~0i>%V}0@KCYt_CL);STe!0@;{0u zd1m;I??2U_T=n$C(?1^l(4a}(*guPdKU*%u>A&^aY8O>{!awa}l51oW?!O$Kz8W>! p@V_8WclJEt`M)Ec7O4~c**`ZG2SGCP#6NejJVAG2)4$!Vwk(B`yAS{X diff --git a/fixture/13/2/.zarray b/fixture/13/2/.zarray deleted file mode 100644 index 586073abd4..0000000000 --- a/fixture/13/2/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "-~a#q|Nr~{_y7HW|Nr-Y|KIoj`~Us# z|NH;{|Np=OJ7&_|*6omJ+9cD+X^|LunF+L>rU-2&8UO(>Ow`lSr~qPmgC>js000_I zFa*;CWYPMNFhFE_LueXmWXbA!Hj;ZukUE~G)WU5WRP@CgLq>_C)WT$XOawN7CTeM# zjWC80r>W&I34lybKoiKGnoX$MJyX+JJ!v5kK11xy4LVUn4Q=4vO2+jbea>$jUT~MJVwD6y2n3H86!-nw$$#BSLDWt_5hLTNQq%u1UZ8E{o_KDey(}Qm_`jgh1`j| zX|DVU>76s8s?zYoDi3`Cku-?+_LEQ~LIW^J2bmTgmLiM-aXX&v`MR){vCVe~_R`uG zl%e^h-+4qIU!YYJ7+(i;eNJ_N?bW*3p{1RaV%U1kBVMmP$R`1|HDyEyfet%5m?08b zf>UBf4cSVBz&dt4M+6D1(ds5Ak}xpf{M|qJDC?MjQ!Fu&8^S<<$}b~uyCkkcbb5ev zD8I%QK@4=saRIFZiO{VqFdx66H;*Jg5$>`tBS}!(S_mnUjp;or_)CK=n3O0<@PGur zJdq8_bt3M4GbF>Q8WYzG@apvFUkmAu=xjYhRz!7-10HErALMB^S@97E(8e z8lj%yegineCJ+qZyj&euS7x4-e#~+>m=>d-r9%+8k5pW#2Mf^MqD#T-61RIihP$k} zF%`MC4-67;QS9f>Srt{_{S60TZqBBPgcOtI`h&bh_?t>Qu}*K-xH$neHl$tG`mNbP zcFBxjOt5MEs2_u8Q{O~?{u@L>AlLjdmC5u#P$H131m@DDCaJPMo}{dd%iB@ig2MJHaGNGw%_c! diff --git a/fixture/13/2/1 b/fixture/13/2/1 deleted file mode 100644 index 842a6ee519a0803517e96ba303dd6fe30b953204..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1035 zcmV+m1oZntT4*sbL0KkKS+?@Vt^fgl|Nr^_fB*k~{r~U(|9}7g|LNa<|KI=r|NsC0 z|L_0*|NX!M8Mf`M+EJ9y^rlQ|Z%L-7rjsGG8VL0=V2v~kPZDVK08dj*GGZG*Lka1m z2Gq%fU`8fPGy-6oL_H>%7zvF5peLqD>7yj{o+1W=K=gs5Kmjo@0%Xawm;lfOVLbo< z1jNCjV?ZX-?Er1dtM8X-NTOf(GuXaHy$000bt00000 z00w|Y000000iXZ?000dD0000WXlT%BpaVc?001-q13&-+KmY&$0000D00Te(00000 z02%-Q0rsX%>Zk((blhPqk1BsIouvV#DGH;e-xPRCF_n-&?QLiL+0cdrrmZdk-7tC7 zFeOAH_$P7kpwy&y$T$#%u@Ms(wwwXZ$)~CqR5Oz~xXlD+GLB@%qp)oWBpJ>mE~X)k zE)LxixQ;`0gez6~y;W~S@lq?Au?rgVHbblf(lzV;oot&x_V1KZzuht4^G>3fB(?># zpZ@EPNCwgp8fjAp{rAgm9w-v%as2bEZ<{emiLnvz3IPr>k>X|LyF|dGG#X@Sh9Eg0 zC8MFh;NH+)MWzra$Rk() zL}Hpc1^j^<^(WR0cj>ym`qIRFDV&&SH)5d=Pt=#Az1==mzi=@-fDLK5rePj1Og>o6i)?BssUqapo)tIO-{Dl!;yXm=`HY$(XWYev!n|C zP5#Dmk3tVtKg;?vDDGPGSg#60V1n>aHe(oIHmBu#yLd?vwqlS9#7iCQU1yA)J(UsQ zbmN8Y-;zWY-WnTE!t>SKMi2v3u6FKJNz9l|#s8ktQ=1jWU{2^DFee!C^oQdodI86o z0pE@_(ctMP!ZeeR0AVXlj|9y^RP{dLGv4U&esaiXQ$gqJ$T)yd&xo&hyOJrwgoU@3 FHE@o=)uI3Z diff --git a/fixture/13/2/10 b/fixture/13/2/10 deleted file mode 100644 index 1b7180fb640325c17885e12f232eaa809f04c664..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1053 zcmV+&1mgQbT4*sbL0KkKSzN|C^8f%sfB*mQ|NsB}|N8I$|L@oH|NsC0oBQ|g|M&m@ z|NsC0|Np=O8+UgUtRS9-jihJ|Pe7Vxk(x9{G?<=~1jJ<88kwo4(HWv(A%b84nGG3~ zF`!Hdf@ESFNulLE2+-3BrX$Id6!8f3!g_{O{G-x(O;6HIqI#NS+Ks7`GNx4Ms-E8z}WQL}X-oO{g@<>S?E`lT8fN(=|3o zbkJxTG#UY*&;g(T&>90k&<2xC0002c00Te?D?%)sL zpD~3Nrq}WXA;A-XS^_y$AgF;GLfs)vd*LX`!Mi7@B-WafmZ-6->{87=4lPq?g-3l} z)tA5w;9+)=A#FCgv7xP{S^H|@_XU3eY{4IxF! zV2(4u`H~>228Ana+$iw@{UU*=JDN110|1Ch2LwU%|H2Noiv>b3pebDXq(Qh%2;{?F zyF*Fh92M#qW+j>C>Ld{DF^G2rXUGI}XA&zV(M?=1-W^UK4bksSY99xrtLDNJ1=Zg& zLMkI1#4XVIdt^ToU=T>CPZA|ttaYPE5GV?;k+HzgKgO$5OpxHLS;9S&b1rO<<)HzT zcq&w3SEFP-oH@Q6L&j|p4D&L}f?8RrcLCx7x(D-LkUpl~?<~#L!w(wryzo#@b-o5P z^V=d!3`3+C0$_?T+`%RI{d0-Qi*5?WPk#5~TXYz%tR_3ootuJ}F-L(wsEWbXKY^eT zq&MYA!mxBr==e1x15`Pe5RaSZWC&;;4qq)pe7 zxcHyEv)I`LVjpHj_Lhg3lufpzI(19?38pAASS+Z3gG)3$={q-}l^*4Zhv5(>15c+V zVKw)PWFShUIEKl?K$$>6u?}+Jup1N@mc;PgE3>V%f3fm1#D(~l za|H$hm=Qbu0Ct-;rhYw(GC=z`9=Zl~iEP(pLsL5UI}KVxJlAtYzm^Fr{+7!UhGy=q zD#w;>@22#+iAC$<%`crN9sNx8Sma6>$JyIDvZcu>#>4&$n}obm#4valu!xSoiqoo# zV#7|Oshn@;7@dAp6v2ipx9_3+sQ}4x#;?8sWZG4DQ@!`PM(FEL5wfexD*aFmCg}H) z3#I`fd(Z*Xn7yFLXzsTnmLq`2+niQJPN)eAj}(1Cfdj)qYugJb5S6{!*FW}A@24PJ X-dOvpqT;;Z@Qb-3oG3^xV;y;bIQ8e& diff --git a/fixture/13/2/11 b/fixture/13/2/11 deleted file mode 100644 index e1455d95a8d858769010e779aa9c1d3de9ff0144..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1009 zcmVz@;|NsB{|Nr;@`#1ah|Nnpe-v9sq{r}(p`~Uy{ z|NZa(|Np=OJeKWicRL_7(9kph13&-(00000001-q00w{n0000D05s45000008a+cm zf$9c;00Te(&;S4c0009(0002c00x6VGynhq2ATi>00003L7~%4Gz|a%05Sk3G#WHC zXaEC12Afbd7ytkO0000003#D700h7Q0000W-l?@3YI!ssrhou7FeU&@4JMcb%}fA{ z0007D&;(#XrUEbo^Z+NI0Wbgn14*OMCYS?1b38Fi%T1PqgofLugMJh-TR0#qOOm)t zi(z-T5#NU4+0g!YqKRk+XR28h&P&1UVa zV~$v9K*sMJw>5J?PAGdwfKYb`_aLIdzz*gk?)X>{!5vANvIe7QAZkr|NVSoCYn!3YD`WTVs9)h>k~UD7lH`>{2CJ8VYcKvmIy%?{Z<+M ztBa+iS&?pA+AEayw?4j+l#+!2@>aZ#=wW$CD2|mc7!9U`!2Z8zNJ~9sCtWr^$C%6`a zKV(dKuzhdXpW5G6CQG&XbY-zMHf`&wZl_fsW8)YOdW! zf2g)FwDfh&#=8PRnrH3l5f><2USDuV4gw%-vB-BBDM!rteoSd-g(Nk|VX?=?eG(Dt ziBUFlb_Eb;*9gLNQ;FJAVK{vAP0tza`QE`G=o=~3 z%Tb;i;(uD_8Dd7i f6LGjNDg1#Z+~q0s7!yW-zwvh@Q-ui!ZY$}lu^j8qGbls+4n#K7>q{yz}B2jXAbvq+=c9iAfSD`4@=7j}P!l+jVe+L% zeM#nIf6ry=%a`=@s5M`?>>vDP;qn!ieI;0Ks+k%t$S^gkTXETOvOtowihWR8N7W2r z|0}Zt8d88ZW?TT<1+)Xg0678V5C+BtK*B`ff=LFmFXJna2o!)N7}y#Yzy#P=IviPb zdR@s^;RBLkkLLOsS|4sK^1n6d&ttw@Tc;ge8>UwEVuHTI>kE$a7fkADS$ABZ(_QJw ze_f}0>c@@Q6fS=G#^|^4(bfi=Fm~0nf)z{HHofb)xmqw}OMw56zaQ@k*C#vGzPNqj zKfCtBrI#8X|~u?c(wDZvcT_)2TzGheBS?Q z**ggZg*#$;tND98UFCLd@;JcCBA0y6i{*k(&0+V2O#i;^x>b98QG!q_=RW0iy}oAz z?oCcR^I+y{hHsNBlniOtWf1G&p$)`$-|NMdf4h|P*9MEo`z3R{RJ5p+{1x33XA9GGWv~|Yv zbbV$XH@&@nj4tfa^KI6;nBBY`#5`-pXGe9`&u2CTXE^9QF-W{7ciGKUqg!z8hvh%F za&P9`-4L$f^}av#sKj)GrJlBNG2YHI3x7t-C9ZkB{G^r4FKvh7c^A0!M2|X1wk16) zw!L*f*qu>i(cNCH$+{lDc6RJ%skJkda?qLU-lA_k@kY=pt~2W|thgiS-XL1HQ*yyu zhNVBhnn@M#bcyUaeEaj+0M)4Dl0W(qY`?gqemE;tRUlHolzClF(MV-{ll!7jS=*RCGCmm%r=KW$N o^7UwF00qv?LI3~& diff --git a/fixture/13/2/13 b/fixture/13/2/13 deleted file mode 100644 index 54ea8c2971849501231ebe216d1d0f8f180166f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1022 zcmVNwj1qi5pPU5vP+$>8X*RX@Q{{JtGE5=nVwgnr$Y5>S**d zOaK$mX$>+k#EdkLQIUg4(={KedN7fUjil3Q4vikBngU}%FihH}0$~l18kwNfdY**9 z6B-(77yvy$$kRcgrik>I14)?;0Wc#*1i&-|*#H_QhB`8082|v#0MVcT41fS=06jne z00000000JnX`lv&r~oto0000Eo{5t{X^;Q_13(P`&;S5v000008X5oq0001J82|tP zXlbAt003(Pe+}^ zUl06A#=%W^B-#NQS^rc+tmoej-aU|(nktCrv|3$2M!c#ZURG)GeXUR=7*aCa;z?)* z6|m0~r@&$doI#DxmXqe{$*S~&XRP*eJi z`658kg54qpQt&^uc;*7(NCgNQps-w>WyWHA+hs?Ye6Ku)#g!9?bB|BYvH|t`lIb1q zu0pNFI%{>2DuP8DHUbiKg3>U+rALv)RudLF1!%?C1IMC!x^u#eoQunsc+RS^#4L>Nh9= z@NPCGj5NW#AXqpAt%QOc{BxDn2vASO2fKhrG&dn&V#0yo5I`?MBbmsrPR>l^k1Dsm zGB{|Gm#1Kd2N++7FnT4Z6d`r&0fMF5CIH+)>H^_BrH>0QhE>m!=02}OFB38avwh0i z-QWrR&ekQ2B9m~G(vn5JnnFSHge)Zqi@E?WrFKPf(~1h7st(A|dY&GDDRad34O)AY zgw3y@nz^2cvuKo$@MwtgAXC{eJ9V|nmUg`z&&{EgmC02L)tvR?_JMb`({ sJbW2N+k1hTQ|Np=L|KI-o|NsC0 z|NsC0|Np=OJ96u@b)e~_!RZW|X)rQOOqgh7m;sd2O)yOi000SqGzowN$Yj$epaz3Y z0B8g-0ML4qOiU&KCNyYbhf~@zo~inpPf6-}jWp9uGI=IOo{5taYGDS{(;#AcOqo3= zsixF4Y2@`x2AUJm7?^3K@@h0aM$j^uG;Jm(O&*$tjSo#mBhq4MV@ZKOQ3FPR0LTCU z8VwBq00003KxoJS00E#H000004GjUH00000000L}nrWtnfB?_{000000000028MtD z00000000000000Ong)P7-o}LxbCCt)+@{7DGTmPf5O_9`HTnWS5<$Kfiyq92jx|^Y zVPK`E+*Y8ooIMxQx66s4l@X}V$oAvoaJr#maGiqST5ZorTx;a5G6Xb*KVG6hmPT-K#;9x`<9JA`7mp;O23pp2e7+;`amRqDuICQk2Lc2%F-T8;g5c(X6Vi%_1`qaNCI+Ywc-^uTA~mC~ zN_*{%hLKeRKb?)pHuNIe*7GYl?>)Ec8Vr81!zGXajHV-eG{!%uDG9GDN}pIQRv>=j z&0dO5A^6X;0GN`&dpibs>d@{QxBPG>(Es$k{NiX2q}^R z-A_EUj~)9_X2C*`ZJGt*?z6~_giR9XhvvR$`H#g43nUDOJ5H7`tMZ_O{Q)kODg@Kt z=2poB1aw#_9hjalV-6br6O9waw+Shj3;;gwDX8EhHsx%;Bwh`et26HE5OhRQNaE{& z*0(UQTQ0X&{h!V;5}SynHEHtMkD6!g`s8(3^^$mRrF{c?*v+(7yKqlH}X`Esx)hI zm(o(&-YQ->hG&3`pmG+CeIfioJ^bvWm(Rl#Jt+Wkj-ln=`GK2}lI0&Q@MOxf)AuyDhtK{800E@H zdVtW-(WVgxsMAJ(13&-(0000000000000008UPvq05mc*007Vc4FRA42{xwmCZ^NU zOh=}ZCMJzBq|-FgX|+8}X{o2Ac@0JYOrDw}G!suzk?Le=ff{Lq$Y2J9U>Y<5qd<%_ z6U1oHn43}QF+B>Zf2vKUk>XEHPeauk3lL+-aH8E9YXY)UcnMWeV03uK7%n>SSaNl4f zE{HsD9J@C5M4|gdN&$5pw3zFyX`t~VND4}QT!slv80yWiOhh-M3|P{ra!qRLuV|6R zA)Hhgrx@yp)1|9S^c)ISG@fIb{NY6r_eBbHB1HA?u@=A&L+T4W3C` z!@l8uKEW3<=E`72q$GTTac57yToI~57Nv+!8`GcRf-0B020!__sZRF%L6ZAV#(`R3 z6j(!muy#a>Mxyv)qHZ$gmqjP>TUU>ml&Pob*|2yk(}3o4=lx162qdPHfffi~7c!7e zN)=N?c?~CRf#iRYdAv=CVMAGnH38~0j5eqX9l8%=9rls|$x;lXi#M;5c;F$-IFG;? z6jSOHMCOS~*y*g$rm}+`d?@|p?h%aIjwcjU8b5!?sF;-E2x_bLM&FFH&^SZ}!c{@{ zMrWm*G|*569OSWK^*6nywQ~_1Gvo95KV diff --git a/fixture/13/2/16 b/fixture/13/2/16 deleted file mode 100644 index 70812ab04f0410332963875e25f9b47eaea92ffd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1045 zcmV+w1nT=jT4*sbL0KkKSrWXbX8-_!fB*l#|6l$8|NZ~}|NsB{|9-FkfB*mg|K9)q ze*gdf|Np=OF51((I_@Bi>SW3^W`x?CO*AGErc4bn04AC<6G#IQk%@w6z{m!GnKEKv zMkWNqQxG1A2dSnbXiYXoh|LVAlS9%nVjWM?o}P^mdYU78MxLprhK3%gri_?nJp=*j zG&IxHz?lX^6KOLk?NG>IPf*e74H_P%nKq_I3AH^&Xh*4&L)6KH)4>UvCYn6}o}vs! zfB+2u27mwn000b)0000000Tf800000000000B8UJ4FS+(G#VNJ&;g(ZfCE4P001-q z00E!?000000001J00000000%H3&DRadc;1X9L;)SP9@MKw0=;`*JnTU4sT7V(@+jk zqQI^(MFir0an!epzz{>KU9&r*ektrB=WUvA>bjR6 zKsk7U#B!nX-~jl#5S$D7!pMN)jHAwZqsjol3fhoLT*3kD0oJ5rM}6oA(ri_Bhx)@+ za(^Zoe_JlvFhDuFJV>Pv~%{35jI`aaiJqppC^# zA?FUPYIDzSYqAc8*NO@@C=>n84@Inl%xNix7&ujZF99{6$ff4S91!k113nO6ntlzL z1H`}*et89zk>w4?1#Cav66rt?AGBISL!cpmuDgSlXNQ4(WxW) zK)lAN)4FXpwlt4LaqycuFdQ^10t)Qxu|{yhtA1=!S$H8pok8r8&yOXA2rYPs83=$P z(8Kl)S^)WE)DUwL`r1AW0=SlJs)ZoomqI6Hm6Ixo{-khkM}hAkZCS9O{JG-HtSsq) zsyl+ld*|)5SVRY2CB={ z{kT?YQ<54f1muyXAfwwH7eQ{H*%r{uotft1`w|VcuQ{d`?udk&e3mqti39=gZAU=l zm&VB*>_g+UB(`-l(894$6OPau@#n+)+y|)$eq#rT=pbdGgbVH^-P=kbFz z`Wk;pHf0D{O!izo4tsL+hAc^4QvUzMUpi&jDmM=%8WF^m+-TN6Ez9iF_oP3vj&SXJ z{v_*oix?2DCQLPlGb|R$8&(0lVYzL2$W}E!Q7h=5yQCt3;()^^mzr$+`d=^AXBivj zU=LM>9=5sDNK&3tNb7x4MzS&F}SGrAo PR`|P;DZ+$+mE}7)Ss2$y diff --git a/fixture/13/2/17 b/fixture/13/2/17 deleted file mode 100644 index 8b2c02952a35ed366830b5eb059021e168257b86..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1064 zcmV+@1lRjQT4*sbL0KkKS<@D#KmY)=|NsBr|C#^4-~0dmzyJREzx)4x{rCU>|NsC0 z|NsC0|Np=OJ9lSx*4tcxp`d60000004KxOT0000001W^D001-q00006fY3A<1|R?c zGMZ%cru8Ee$jWU^G}@X2(V{fO(@dC}Hl~^$rf8TNcZC>S^j7G}@XyL}{i8wG51yO`>GUlVLQUsiHkhnl^(# z&;V!=qbI3>4K#X2o>NU8lhngg(3k+wGE8VDYHbPnf*GchPf>{VJrh96nr~Co$TY~& zrhN#gQ3q_hv7e>6{GX8IXyY%RZq5z(%VF}kqpXHhJe$ABqm+d3 zpF$Nu1AgTxHEnlQ#4!#tEZZ+Lcq5;fp!GHw+cTpL0?ccNUPiKL(1kyAHfcgf0sxy4 zI~hSv5ip*_gUS(rzTD{p!Rl0u;T^OP8@|ecf>p~2g0)daiW#J0yUyY}IzI9B4G`>5hc=zLX(6w`au`$d zDWA~tPA089!V^5fAx#1Ipx#dp4Pdp2%zJoz)>o++^u0uJN;>HA8vr(QEmM@@Ao{PB zN=cWRD60942GZIMa#^7IQT=huK$Sx4z%}b9!67qhelUo&H&{Ud4x;rrO*3g%7@1cf zz!3Oa6(SJD+HHJDf&&QoBRU`l3ws=bxmh$6j_=cg*XIhPNS9pu8WHBjf5MGM@c!KM zIog9^!G^icuL<84CJ5+cUI7w@@(qP@0!sV|P&2rZ?-%t6q(2)KB6Yp0Rc$DO16s9p zs2Hi<#Rex)QaIdCw=;@7GZ;TZzJi-#qX*dTra%z~cELVDto8LL%JmQeNvIPe88EaB zpOaOYzWN@Usa~PqRHO3>sxm+>j%`hL<2D%51gi&5T}%5ABzlI0$bn7kCh&ZBZcy@& zk#ms)kwsBzmMp|QjzqZyLnlYI7k%RMmyzg(b$xJ(s7x&Kj1QI{G*f@YNq$kad%+$G zLrLPT(Ty_+&~LiHT8$~Nc!@4Op_GFMs$BmRz>U zkWtMOr*^mbzW?AA(hAW!o$nJhUrJDbWFYpwBW-F;n+gry*TqFOD9_`eBWUy{WIf*prw;2MDcKu;Vd*+ i;A%nC$VDOiTuvi~yK5GGG(XOaK4?0iaDVjTit8OiYZLVqiuS z(?AA<(;6lw0G@_T27?qbn-fhb={AO^lxdAYwK8BDO+7{-~ zholTZ(VmEDqei19YClqWOqh&-+f!2kG{|HagAl=?05kv%13&-d{%10XJKA0F}f6)4A@XW z@=k#S2?`JpV-lNIo98^dWgpTYz_|AXVFN%`+vW%6FadGaVo+ZYbGfX6#e?cC{dCL% z6QmQt#SV`+g61E%_<$@l0NtX$-Easw;W?5ewtz4sUnzJ{PlH5xudVv!nBg8CbYfpn zOo6&twT~JMiPe;~Ej}zF`=R*@7l>jW0fpm9G1o*>;~Ec~nq>Li@|zD7f}Y|KTH2?H zTLt3wOtd6Y1$}q^gu#(**96cHpXZDQQ9$aYFasbKwhRHTcd^=MpotP3azvpy7Gp}I zV}(H}y;bb&AKijV_LVFV=@rrK*t`osu%c&|J9ogT4tnrYYCxBVKrF->=p~s;)ff#x z2L%~R6b`cW0x6^Gn67>WH# z48hKvcH$|-1kL$dN6;G^?_7D|xEjtPk%nk~8by`Uo1qd5)$Otqnl91hI`|WP6ay|C z4d~=(fh6Eh{9{c;;sI+x12zK8V77siNk7rf1AiPx637Sop&EPvh(-xn3Od?rIUfyY zQImZsap8N(`!CJbTNtwNK=e19wRuA`lL`=iq$O|cVB@XMWKb7wR3lnn<$15ude+Z${f?|k}5kHU73|AxqShd%{id6()=Qmc)de%H5)?+ z9x&=y^L!=(n>Z)%EP+$7QwV>i>guk~X%;Gh{HS(HC+>I8cyAay7tuufb#h diff --git a/fixture/13/2/19 b/fixture/13/2/19 deleted file mode 100644 index 1a3f60899424290ba1cd8d9207eca0eb0ebaa0a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1050 zcmV+#1m*ieT4*sbL0KkKS*GYI4FCW_|NsC0|L4E|zkm1p|L^_p|NsC0|Ns8}-|7GV z|NsC0|Ng)NEwv zri_N3q{f*{XqskGw3(u4V@(1YVKQQ2kW6W!WY8GVr1S=e+9rcc&@?ojk?LSDnoJ3r zO*TR_#AwL$nq*^1>8YkdJ*7QQO{u1*rl+WS4FG7KqtZ5$X`nI&lM_!#=#39kYBbZ- zV?fXWpwTfH1i%wa2r)ebJww#Q(d96nrjJtu(X}vYZ5k#HgF&W%4FRA4001;-13&-( z000000004?003wJ000004Ky?W4Lh+{W3x^s#w`sFD=*Q^5ejet?mBmKyHIM>U3gmN zfP)i$3N->S2JkLJC!AL|juh7fc~4^x#0xT*@cZ5pq&NUG@J|mvM#SdU+x-jf>}4FM z|BBMdj2so%gK`69W)fQseX=4ib*@yMiFdIVXzc)9Y?3s8$TM;`K6yQ9Mg<-aQETnf zwgnc1ezedS<@%rk1rDFqmLYs!&p*Ju;egEtaRh{QLHPg?jQ8klX_t4D0pip!QsZ#H z1XV_0#FN=ox7k%h5CL6B9 z8UVuXssS_legNtc$wpx%T;T)ov;g>@foI~>tvCoV#EYHL{P^XA5)Th9O2E5`h+zU(`?zRZ4KnGf zi$Re9@%3T=*|^}?#PQ93f#+u zh;?pZ#Q(4kjq4gfGTU2rg9N^qEUKP8g=k)@wu zjNq+$5E6=BKX~iXmf}ZP&#kjDN#IngDUdo;5eo-;<^nP5ZnOqf+!; zVx=LvDwRpcQyth`VP^0HlnOq?@Jf?^xqr<=5f3_q!k5@qDz0W;XooJCry*rVRD`PV zIL7Nzq)uhKkgW&?tyF+<+t+9u2$Vh2FvCY9go<_Zj$=d<)$}n}!f8f8boP64H?WOW zawXE+7Qa@RE%l0MQU*<;bY!e+9TltjhA<_QdJ+!+$?vxGW7;({S;NSqI#iArLwx~3 z*bxZK{AbU;Z`qN96ZR0-)Vk|x@9=QTj9YP{#S?%W96^W}Zja`XW#LSVJS->S+z^oT UT}ns?1rPo%o$38nxLVA4K=Qfd#vNWm$B<{EHW zOFM=Pa;SMLv*y|u1qo8`VWrMQrC+37j3%r6B1Z3ANI^Ax!t;*QrdONA&Ik8!g%q|S zBkKJ#z<&dS3vRkth@s5vv2ZHE)^nzM7w5itMNctKd zc*o@ewgSS-T8QpMSu7G+YAXp2R;Tt&{~ZB$Z2%%R8W2rrpu8GDHHP0BW_)Xb=OMLr z(0X-2#vmj8U{qj1@ZryY0FIyS@C#5Qkm4NK zKL0$>8{(DM#byF7U$LaYaWN>6Cm7C2!An`_?hpZhKDrC-D}(8m=<@&@xsqU0h`_gz zNUEX9${tvia!us=)y$N1B<4m4cIhQHv4G7P*#Q&4U27(GIE ze%Ejrfg8;j;ue60K;=$+IDw>&BWq9)Sziq-cYxE~PlC-knDn#`@6XIhh`^wh3t=RL z>Fez}mR?Syl1&ye5F6BX4QwB9^`4?$$X@<}pe!FceiAgf{)#GA=x6#tEeG{-EcfyD r;Xw>d_9aYIb-jIfD{o{0#k6>wS6S$~rMzs1~N2A}NwQ1{=q61tCeVx%BPIh$fCEhlrX$Eq2*3d_jExuo00J;* z0WfIPU;{>z08BQar=>p>4ve0J&;wJ@38n&I1jd6&fC+<1vSKmZ1T>Hq)$01XC! z000004Fnz%G3tSlHkynL8Aqh`3?>t3H8jSA(TFtlHm9Hj(*}gd$iivrGGxIpBPKKk zrkNmQqYH!3BhZ@c9pgeY436auF>3R3Mu$(BfB zt2r`jQ;53&SQ%QxG6H6?5W~vD(4W-kx|9d)RG`xGAzE!t=!qyE-RtMjK%G#BSxHNZ zpLhoT_u-ye>gmf)|3nO*njt&E8xcicMnc=ca|q`+1IRb6aupR5$+9?!lCo}yywLHftzYv2Piqz=dJ0s@DB;u?TDy=MDS(|cRy_0mvUD?`N#%Q+V%M$u z2LPCaCcBUZP_pKy%af>@AvcZ<c#MyXh2W70vnY z!@FI09Lg!32;;!xis1kVem2^4G+n|$A7vUX;hWP(E|5Dck=@YqKO>4NTcczX2nu85 zPJsylFVcD)yJQ7-dF}`42cMKwpl<~3yaNZaSF~$ephy~p_-Ira)+Si8nd9pp|3qz8 zEDkirvS}Gb>WVMCNuhp06t)#Gpt%s?x&?$i#0IElBkAyl5b>@$ FKEu;1(aHb- diff --git a/fixture/13/2/21 b/fixture/13/2/21 deleted file mode 100644 index cda6311e32b41e97ec66f80b3f93a7bb8058e0ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1035 zcmV+m1oZntT4*sbL0KkKSsQZI%m4uzfB*OY|Npn!|NsBr|Np=HfAzWjXSe_N|NsC0 z@Bjb%|Np=O7k779l?3LrLgNVxF3sX`sl{37VM?DAPd6=`tBK0fs>|4=L(o zdVn%{Pg6$J^)Q%>Of?z?Pay**sp+WGPek;|p&r#gMB14TQ0a-IOrD{n#*HQ>O{stZ zFx1)+jRuWO005Xq0iXbnO$mSi4FWw42AXJ@piE3AObMd^o{W=d&_HMa0B8Vc05kv^ z0i!?-0qOt%0B8UKp`$?10000001W^GAkYl}1P!UCMvVrI0002cGynhq000bt05lo^ z0B8UJ0000000Te`27mw^Vqsv62;K)NS#u(Fi&q8D{E<00}8Fvp*k*;VyE z2qK-}6j*1lLz1oXXmuYs`bY8gM$AB8+!C`F;W^2~U0YV&=z|LQ_aV zjp=0hRuY4ke&MXlvy)p#hEqiRY&wMja|Can1Pdp*R8-yLfWn~WVFek&9djO--6Vwx zyCM&~CRdY?3rg@!IGK=)mfjx348b4ln}`M@>>WJ>SX+>uTcBnI5I Fa{v~R&+q^M diff --git a/fixture/13/2/22 b/fixture/13/2/22 deleted file mode 100644 index e14e96c3ed775b0c2e2aafd15b0f529d33da6bd8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1041 zcmV+s1n&DnT4*sbL0KkKSqe>_z5oGx|NsBre*eGU|NsBLzx)5ae*gdf{Qv)d|Nr&> z|NsB@|Np=OI@aCYv$?U+vL~v3qefHGdQBcp5Wx(F$u^0Bqf<39WXXx4rkON)07D7t zV?Y2CO*Ak@5X~l-nrQU~gLE#XeXpPVH#*O8X64%13&-(00006fB*mh z000000LTCU0004?000094l@DtiJ?c#DKGvTKwGKsMlw&Y3SNFMFA4kYX3n>GI1aJV zvL`mUw5b6~sSpsD1QN7Fj;Dua=aO;L!8V-66;C?4idaPzsmY~q@FeE778D>< z;>`H#?PrqWJivw@6Ti-bfDQOTZ1xDwkl?8Sf-$VEVhzq5Jm|dXE!(EjOOT&Y1h1Eg z#Gx4w67{l%_^T7rYfugE@Q9NHhTl6vs#gwcYh_-B<}qTH5yq54mt%}3xKCqTs}Z5= zqx!Z5SYI%B{#Okga(d_M4@cr!+_4xX04!cZ*Q{}6{%F0|Vd#c7ZGzU6rx?On6pf0w zL_1+HfxwG_9r7Fjcoc3h-7PwohPqK9bp0s6>ojL7O*1RHOYoG203JB+DoYn#;-9$7 z-5}$H1quXA4Ra_-Fe`N6oMNtx0s$igJs2SVULq#sQA1bYi7*z(QDNQvyUnv^AVUoq z;0MJd!iiP$bId3nScX~(nVhRjZNMQf$e@D1?RarY$dL5`CLt20ly%tw@6el?kkAk> z*f~Gd{QOqlfuW_0s$U;wKeQ`Om8c+HPdrQN3{?FMjQy&TP9;<)Jjo8s0r`NS3w|Kp zL)!mMuFb>N3_J#gG+4gT6b}JIR>y$FJ)TfCoLQW>g7n;Qw9gj+OfY9)MTR3>utFRB zLHFXa8%8iN7b1ZwaVzA)+Ci^I9%48nhJ2x+EPuETcil1c-FxqE4Xem}Bfeqdrj4Nh zw^p@1c>&6BFSQ}P1-@ID-dnY=1pE&yu#f>sy=FiksRxJrEA7MZHVEq#QH3WV!84l0J1_nGkUbf-33rQy_#CJ<~ALVD1ju|BJaI LoG3^ICeL303vknN diff --git a/fixture/13/2/23 b/fixture/13/2/23 deleted file mode 100644 index 769b0c16d311d37f5f1d329354f74a8a70e6a610..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1031 zcmV+i1o-KFkq1jADxdVs+)JdhcX13*jw z002e}N2JIaY3g{U)X9v7ful`N(hV4DAkZ`#0000D001-qXaF(*007X?0078n0iX>4 z05kvq00w{n0012g28MtD00uw+G5`P?007Vc001-qGynhq&;S5r0002c05kvwQMERP zrlx9oJw)`IO%U_|69{5t+6n4ObN6T0%By-XwXkh8b$>434jD(5r&f! zK+{8HXh)pclG+GS`(ydiT6Rl?J+31nL5uqtliV3xJ2Z0vXbLOQ(7@t>5E4=W+y)_- zE+0tnEqQ|g`Uu=de%VSXfELm?6c|KZnu9^bB!UWvf{pt8!j0qKq0$TixLyt@}RrzAtC2QHS4rNO}kpqy!uiY@S_J zS-`>%HdNrB+0W29@EY&K&#a0*Xm(9%kkeO}C31+F0ME;7L5AuJQ@C0S;Kn7h$MS35a=AXK%qGU9@vovhN(M=s;rWmFibOg3XW)qlk~#U=P52Q=?L{{E|s7&dX=Gd8m1qi_irk6VeY}E!8B(jZn|1egizLF`aNym$vdhls+*JKJ^FeZzpgFb11~d zRE6~dG)qY3p`@dZ8&|o5=HwCqTyYelrVe;#PN_G^YlnGUkK&T)v_M;fke!o(nb8AX z&4(81^UN7CuFrB!e-qQVOu`ijs+S%T#hsEKcsI4@{}S7#6t4uLTJ&wXJbyJV3cvN?;bVXJ^{c5PxDF2u_dNMen6Tz$0=Fku@Ob!cpB`z^vwu z7Wm_QmWWZQp-vvH z3RBy1E$qbWYpLg&SD;uszTJTbfb}2)>mu4-5sFN6vkJuFqn{oDcO+AV2?}1_eL%|I B+EM@j diff --git a/fixture/13/2/24 b/fixture/13/2/24 deleted file mode 100644 index 060b24d4246943a9b5a37e117ea54767a93d0a84..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1016 zcmVSZWQ~&@2|NsB*+x_qVe|PWx{`UX=zyDwF|Np=LTmRqx z|NsC0_xr#CF>9`Ob0i6jlQF7eQ_-P4CYdm5U7$( zVH8_^n}y9amPsiuFem5bUo+z&*0>=MQlTcshB0T4LO${G(R914h5sC&x zI|Cq|P2$}0PBDIgDfK9X$&(}~G&RFJD*8`ecJ5IP@GBIcnSZ8yqFZ|k-2?!zaQ^2@ zTV;L{&)-@JIq?8m_i!5^oKYmQ0x?Tsb=GJCix`*~HAndPWRZ+Ykob~hxAu^t2w?E_ zIyr(7&Mco=POgMJk zk3i%31WyJUIG7=Bcqx9FqxX&r1azLug&pDn2*y+&u{B0iJACI|M$OuMiKF?)0AnuQ zJ^@D%3s|=;B8HmvhMgpI$vHDX<9!1J1mMy`;Lnn5&L^@$=b`8qMiQH!SCNXwjDOsJqdXdS9}+m-^vgdy`|oNpi-5LoKvg&r&UlTa_3l0{6}A8ZW z9J-dBh#DA#VbAos;NV3*AKggs&+h1^+IEGs>-$WJ zbQj5e;Fyb61e-~2;p)NAvqv1*Enorys!J>!j8{ys09?u-e1xv4m^6Oet+*zC!!SgG zBG`E9!9Z=digl?OqiSkGT>x>p6oLXNax*=IypgSj1Xyp&BNv8B?nFXsQWdr`idUJl z>)c%hW+n@j1n4WI3XvDYDBfVXkp?8ge?Y-@ua8vn+ziL8DxMh^Lb%wT4g{PKEQ5=$xHI8cz_bjDP%t;EUz diff --git a/fixture/13/2/25 b/fixture/13/2/25 deleted file mode 100644 index aaeabbef46550aeb96066d2c307b6633b8d935ab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1013 zcmV6sK5XK1Z2Pfm;mnY?Msrg9659%W9*E* zCwzSWN8d-LVM72g3NYh08Sz+v5-aD96M1oE#f9C$D|49KCzhzbLYh@K%IIa$^B zgP}EiuADBVieG6B_#${aOpS@*>D}Z+Vol!^0??9>-vt0BWVeB7;jLz#KwN7%c_TqY zWJ3UzvUI~y>F?w~EuJvwjDWGGWv_!Ii3Pi~Ba$shV^6I4r-q4uxz~RhF-l0|89{%1P{Wpe^?kStY`|@QDE?zB~_?uj8EE~wR`a)XX4weY2mG49v zAORx3`~OV$Mr9oUh3*^<4Mc%GG5Z-q)k5)v>nr_-OB+5M1!4k@&x=4D`IiUq#AwW- zP9fUv1u!*I`Foc~_UeheR0b-@UiNh^LzZK$J6w5rBZhc~ANb06P~q{Cr566@hYjW^ zFUDyfmqsbwxGFWrs|Hjrj>?JxUL9f7jyQ`*P|QElfIkpD7rUwv;zUW;*c{Mf=R*J( z$RKIimmc={Fdc>(I%vL`DUGPeLjl?Wbg8v*)BS)y?4YUcLY0Wj0pbtgm(#C37mGgL z7SMwl#c5M)C%Lr|Z=!ShOu zY#s_sXyFU0F7kne*TpCbCQH~WU0x4oV<;D|XI|5refeECNEU*BUNE`78 za9Ih9SZjSt+zTN8spTNgJXNQLBFml#sCYGl@}hS1x;pBxD3Pu~wS@!O;j^5Bd?mVh`SbN19IQ!{5^E~Eri*xke@ z2XWAz5`@Md_C%K35Lv3quaHBDpS*3IkVAkI$J>0r2&E_Oz jRtGjwvkBf6gqV-!3|^DbQYm-)z2fdjrwS4Yl0QB`(OSIL diff --git a/fixture/13/2/26 b/fixture/13/2/26 deleted file mode 100644 index dd44c626b93946c3f8b44093d2d1024333008d6b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1022 zcmV<%9+L?5G8h0O z1i+Xh0imSwo|9;dXwdaE+J;bM(-0HVW)DSeurho>302&%J0iXZ?000Jn000Jn000dD14e)b zKmY&$000dF44MpvgG~T50000000006fB*mh4FEI%27^rlKr{dbfB*mh0066)ji5mt zSsqf6!UK+BXBWl6o3K3I@ng&%e{ct+QssjRH)=$b&up2|Ab!0VrO{1~NY4FV_Pbk- z1}H#j2CWdH35Zti1(cisdq*b(%nAj^i92`XpiioTDF8%+X-zD(jKviUMh(t(iU2PV zATI3t1b;9}GiE@EfgB}UEHVNRVz?F!)Tx0Vnr7AwO-q=KlZQ!@2F*oaUOy93)@K zI5ud==luub2tLFH5ruk7be0&0?9B`mNu(;r)nvXF7(;`hKLB4agO`DF&W)bnJXNNe z&lH%`R9)l1=X#I?1D_K^Y5ePk5xXxGl!}~5oGaJ!fD?I66ER@n*v*7Cf50O5w)g{M zFh#%O*$*%oSz6X=*;I91!Hy6)Qbhy*HQh=IaVRz2PY3Iltj_rMaUa18B9}^`7e5HE z1k;!XF3&_Qi10JxeZjg2J{g=Gn|6cFw8A$GuZQRepaKQ?dV3u^4K*)4+8_Y2oca=; z^3b}c)|vR7N{VHthbSo4znU=^0tn&i6KT>6pr%#lpQ8pQdNNrg#sa@~94OO;HZC4b zKOcf9B=a!=tVY6*og!F5AwbS%e@3)K$mG0r2vk0I?6c!NR5N#?H7iIj`UH?ObQa#{ sCV)Uu1ZDPQQMzGcwq3xVQ!q>s-UqgN-5DZb$cpb5az!{$kXBo)^a8}XRR910 diff --git a/fixture/13/2/27 b/fixture/13/2/27 deleted file mode 100644 index 48a7c3bf92f91dd65094b481ca0650ec9882840c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1043 zcmV+u1nm1lT4*sbL0KkKS-(A(u>b*PfB*l#_y2eA|NsB}|M>j>|G$6xUjP38|Ly<( z|G)qC|Np=OIJ>CsGCDMAq|?(UsF-Oz00?QCOeO#UFcShZMw3QN2w)m$007Vc4NL}t zV2qwq&=Um2#*G-0&<2@LH4d9dHc&JTG6FFeo`!_lnLPlEMuya6$+RXDXq!zz`kE7H z08dfrrlU=u)byB!5WtKY3Fr+>4^Sth^)zOnVKzgl;U;Y%p{JIBnG3`_!HpdqFaglVCSfB=jUnv6l|G#Lh&JfP8#VrVn~ z000000000000006fB*mh0000005s450MG=OKKmYQoFP23-In zFy#z?y^N}Y9J~!BR>g*5MScS4N~x)cI0d=5VxxgRTN+5|Ai3~i(R@>^Ya^;;Smot# zyzEIhatlrXC19?bpklxg?+GfDD~{xP29e1J^ULQVp*%E31@D`WCbhA06V3$CcoP~> zUF3sG5nYp6Y%fD*jmzt+6^Xc}FONusSjxf2j-$MP!k!J-IM&BFpjE>@9Wrf+JYP3q z@jc66`iB}IU?7)yw|a#y^!7M5EhIR|xDP+GqLM>Qi6&?d92LPEMv;t2GeHNM3m6@d zAZHO-ZL+n*m98#G&~;#;`__EnNlLdWnkuwKs#EX)r7jhor5@mB)C0~CDl9y{|@+t+cmTXgOvxtCJKxD<@q>{cnzu6&&ksg4>#B5g3o4@@U{et}Xy>MdRR zuU`FW^kQsm^x6-XB8bXyiO7pUhEe@a?OWLQ#6;5%j##Xs-LG57LjoTbFBB4ZU?l*7>`04}8SMBzlKog2144gw zPn}6bSzyWUq@m;(DT)--Bubb^CJ|3ULVVDth6F4r zHz6XliZvpqvE8vLbOHpL5PAY&05vhF%@Y6s zXwU&RrqDD5z>H`Csp+*tsrqQAlx(M@(@dVD(lRvknLkmK^*>ZHZBJ9w+8Ty~(q^Zq zdY*})>JLyGLS*$BJwwVcBhyAgVgpQSZ9~-c9;c*a(duOMfHHYUr~?`_Og%(xL=7}( z13&-H##yjQ|8&M9BN8Yh(e$()<-lL-JbE`kl3Z*0g2){mf!42@P3?7&mmK6 z=Ej&Oyk*(oajGZK^Z8-Y5>8JSQF|X)R2Z-L5~7TX-SlMYtmhl!PZkk+;V4Ih$$$v` zMdE%T13}kx>miq0V%b+!oFn3O6bBID|ptPk<}~ zB{5405AqPE5?VE4{T7hyi;d>t|G>+RA$5ySg|v|#u;0*;`hip%QP<(I=ar=el0^nZ zodliao?z_I$7Mk!tYWKv^1DOvf^rBx_~&$g!N8$6K=o&wo4y#;YxDrV%z%%2#DO&B z@9l>$UFBxc&zhXzYebKU*I@vL0^pF(QBh$Kq#8vX@)Z#%QQNWuCP)=lbi1f>Exv&` z76uYv0tUxsd(gumCE(`dL}w$RLw{HW2v=+1(jbSyyG~Qc*3)0`LjKELLnttPvBm<4 z69xsr(8~m}y1+$sm)AL{WCBPd@%jL*p(erw^^`DYw8sdVqE0E4haPU29Kdd6JB?UN zpnU}?S$gHJMUXw34kyMSmXz6V!Vy)-rLHe|4!y*(gQo7u1K<(4!*mPBP9aKcXZKX0 zQB@XP4zNS@%tG+O{4_V^))f3gfIz&#;2sy0GQAK${IAp(+Q@UKUC4QCQl&@ zGynh*fC2*vq3Sf#YH9kTOw__*Ff}tLq-pAEAPpKC000^d007Vc0MHEp000JnGyoa^ z00Tjw000^Q00w{n13&==nrLVZ003#A13)wY0000005kvq00E!?0000q14BRn001-q zXa-THCZ3RKkQz(^U;s1;fB={@6DArO000v;7ytpF0WeHx0WmND0GI^802%-ji|Nwb zXr_Te)fcO4x-^%;V=2H35Asu#q)xggYfeqHcsoE4mElha%7nfFzg2P$Ri&7Q+$hMa zU-F#bB9L-BYg_Y`%m$m5?MQ%538@iS@IV6AFgN!C+m|YnB-lv!pxco;`taIA4#$Ks z5O3(vT(BO9aY&@PbQE!FB1ukew+X)A=C&4Z2j*S4W@FlhRV>DuS zzR56mB3nAes>>`O#k+*^n#CcYC`XCw0X`UZZJG5?w2nQ-Dl-x>3XQ0htB01RPR53x(sxEoaXyC@JHyjt;=_4*;2A zc8FgB=-PN0=ESRA% z9svl_{DeOsdKF~zkel)y$$XL&lRB<)aG}3~WO0p0ndamH#TStoo1Z~B@iwd^z2hbj zB57$+g(nuzsp~$ib&Ksp`XF0C1Z@*UU9~w|6b?X%_*IaQiPhkW(FY)qAa0SlxpkHEA|l%aXN)X8121u;j%a{_B@UvDn83Jk5}5=D5v7+h@;y|q`|oZ*GN zK@;CnxmIVwcu1WXB$1MuTcBqdKCFu*7P#~uIx diff --git a/fixture/13/2/3 b/fixture/13/2/3 deleted file mode 100644 index 29150dad0ecb6d3fccb0a1b1401196426a59d93d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1032 zcmV+j1o!(wT4*sbL0KkKSt;QNivR#5fB*mf_kI80|9k)c|4#q6@B9D%|N8&`{{Qd$ z|Ns5}|NlS&D{E`JU7+X~K+&|5G}O;gspT~FFaVfnjSVtlX^G@bFq$-MLS~u(BOrdH z8Z-05kv@01W^D13&-(27nC!0000Q0000D z05kvtKm+Ls>L@h9wuz$36=uZN--3{Ok>Zic(0-7=!+1B71oiwzhW8GL$~RPXt+z{1 z)kiGZfsGVPPO))EoFX>L6b0~Q$+M0oBWwsO1@8em`XhlAvxA1v%Ai0ASh_MI$jkyc zzZ@070%AxI%cMt}FBX|rfi+YsLe~N{@cd86GJ2ZRsmH$qHX{>hC-9Y4cNDGXt8SMJ zUm0Y9DNyG}v$6GuX$$7*23dz(vk(uTgL?uH7DsSF95pG)Leh#=7=xe?Q)sY~_3d&t zpFx&eq(}Q16j$Dd8I;k0VzE*eoN@nsm~8Rsz}JonB+MlON4pkQoC+}_0E0tmDUmO4 zDL-MZX|_dn4bzklpL&c!)U+_3#Cl^K0udxhb*NWd#SxGLTfhplEEPcthYaq=9#8~H-g zu6*(3Lhr)hM3<5@)MCFifJEpS3`Y&ZrGW);7Gic1M4WdJP%{V+-7r9U45?(l`dnWA zP)vnl4^wz8AZ-RBIk*s%cW&iTCEP#9!*~Tkos*6I5Q<5-a~ZOeN!HTe-;>F5KI;t% z`DIh;w(r<+Id6X*q$8ovitQuZz6X^%;fH)}AY7;#Na0h=cM1fo#P`OiDWu!#EDFeU zDS}nXO(o!)T&)}=0IWbuj$ESMPFv+x#k%DIAV?m8e_;b(BHAE&Y~n60($EX{^Ksx3 zX_7a%9x{B??d44(|HUaoLUJ9&%^};n+ZJ$wT0cWiqF6Pt+%&8KBg_Gynv^Alie} z(U8#Eo~8mA28eAY8jnytO{C2RqcH^VY6vuXhJYFk13&-(05kvq0MGyc0MGyifB+ga z0iXZ?00000001=5fuIcl0B8*W^$h?4pay_w0000027mwn0002cXaE2JXaE2W0B8qH z(KHPj10ZAo000000000D00000000000000000001pa859cq=2D$qaxqK!PSe8cP5Q zvl~)}e>`A#%y5+;HkoLkR1vXlm|uu}8X{t@FgZDJvMF|kp}`r@i^i7^af#S0uR#Sw z^ohtb>v;q~SPt@RI~!1hsQv50&Wr{-Q4L6eCB7+xrRA;a%Ut^PsRe%ivN>l7%;ec; zy$Xt5j!DTcPlM5{Xaianh?C~QK)e$H6X9yj&d`0-#>>#nE*}8GU5>pNg6j*TxAss7%)ej5- zg-Hm=&)I}_z*~U|Htiz%Ss-m5_6xiN`HK-S9RLpFhJc*egjQ%pcb5n=ubvNv447w& z{}E{+#PbT}SXb9f`1 zfGLKdt@SLms*b%`6^;8fQlTb251kQfU!Sdzo%_(oz4Agqh6xr_3aSkg>vOuLt@VE1 zY1Js9U`#3`OuA2EV-vMM!xofi!J|fK{u=$=lI=ovak~>qd$W;-f69<rLZZgzSwsDg+{i(4d?oBee=;a*Fj3{RnZ90d%X?>;2OMo$4NT6!eDB z@Traf&JMZSj@aL6^F%Rnv6OX4s0KV)WbA+eQ>epn5raE)#_z*l9Tib2KtDuOs^oAf hXJPOq=zSL6=C}ZrKcxFntgd(k+>uTcBq5f!kbtSIyfXj* diff --git a/fixture/13/2/31 b/fixture/13/2/31 deleted file mode 100644 index ad62e318c63bbe60578e03d92a86c9e939fdfb52..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1063 zcmV+?1lapRT4*sbL0KkKS;rp&eE85X01)2nm`pPt?@)5EIb#8USbj000_j1jN8JXaMz3K#xE* z3;;}-pe9VkF)+{*(9md@4NL$`0Myd}gEUPTnK3eBD9F<$rU060qXSH)CenFK38oqb ziG=ht5cJgAm<*->5t;x7gF&f)n3*x5GeUZvqay(gp)}J3(@huP~>IRJhZ3EOA z34j0qG64(#m;eAw003xAXbFu*089W(007Vc0GeO{pa2GhOrMyW3|MP&a-;cho?%UA zg~pv+>jVOMS{A_R5p^|W_w;CoDn5T0pH>J!;HKA{+$I1iL<(yV3HTnFkX4c-kQdLF z9Wt`~f1vlLPC3*+{;;O#Mu4><#sm!-63ys9d>{;U_>#6G%yI~Y78Mwk_-R2yYxSd& zN0)jiRM=TR3`Ya~t|VFoPqHF~00prE ze;DSs7mm6CaFUKfF|>&B8z9I`3MBbvXz5^$R73TVoJ#$5xFv|B9%$9q1RbH=89`?Y zjp#{ytonKtPa1Rqg!f_V|3p5D`&+fd!6*ih51MCH*y>9Qp5=8qN*s+mV<<`dF3AcM zbNC@FH|~5%uAeb-u8J`N%+@hhD;kR91(+CKfljWcG?K|bdY;=fX^Eg}2q1?)mnpmd zdsomDd4wMShROgg8&Zk9^O2n!KFARtFD;y9Nf5a~lg6eq4~c}jBmmS2Q8$X?^u zi6k?jgy_VA(n!MqRyxJ948%ZJm}=pv0Q6meQRCKL5^QN0Owgfrz1vRIk9|89JG{h-!FTr>rlEha~TZm@OFz+;Hu`z{843mOL|QKHpdqGFzbdWb>vb zSQw%!q5FslK^zjUa8@z7F>l0huW{qq>p%?r0?NS$bEQG343j~pZB?C)6U|L+{j$|1 z>@1-D9V55S`GzI|X#$Ay0XxAV$POcn-Y2L>`etBTFvrl3jey-ez}Q5xRP`MD1Wga{ z!c1@P3i(EcU}`Hauv{*5V{}N#5=g(NlUr&_kpyMp;MD5CuP9@?hhWY6h8&q+i7~pZ zCMgeA$GEIafoWr?;>FJasooTST2o6@t#)XDwLFG+cKbqG%z%aAc{7@DkYn~n!jHBT heVs)NH;geq*|Hhv9@vI+H-{bGF64@Ep&^bw1o}Vb;#>d# diff --git a/fixture/13/2/32 b/fixture/13/2/32 deleted file mode 100644 index fbe1b9b981edaa48c9e9953ee5a20cea09735e47..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1039 zcmV+q1n~PpT4*sbL0KkKS&5)6bN~R8|Nr~nfBygf|L^_(@85s-_y7O?Z};Et|Mvg? z_x}I?|Np=OHw$#hBp61TL(@d`)EY1*WCn=QrfAR}ku<|8lgen?0(yhg4FeMy0BN8j zCLq8F(W3%9fiqJh3`w+QHj~p7@?x7Qj;HEkdPb1iYG#S(k3eV{0WmQeX^Ek<2-8nZ z00TtRG?*HmNc5T*382Wp227CsO--XApa2t1GJ2RMjW7)(^$tKDpglkU0001J0i!?w z00003Kr(uO0000000000000^Q8UPHYA?OeqX`oC10MHW}5wvIo!7u@!022UA0%?E% z002!e000EQ34j5kLF!CQdQoLF-8Q*(_7tEeyV#kbf|N;@m+5$wM}S9atDJyd7zjX& z0SM|U$o_`YCxY3n?t|#?%M5>JI-6!@_{0zT3FbHP|D+UWaQ3JIfCp{+oEr4mf<`u% z0Tn7l*idZ>VDLeJ;~JIcE|;i?W~>z5P|{w_x*r(PYmH_M8x7Iu2d)s|;lE zN0c%*Mcb+uxaDZ|zK}u*h=qOOkdEZV2Ol=l zh!Z%nf_%u8F-z$a<_18Rf{3vyv3WOEHs`8)xF0p$Wb z2;Q9W5FXKN03VtUc`1v!2itwA{05>kv55k}L318b`( zD1{(_xIaYz(X3NbF;&ta>?n(wResK906jb5!Oa~r1K>P6%vn|}!51ZhB6myUy~Cou z2+4r&jNXrsv79P2A;N1VX~XY8utT)csb$#!Nv;R^k^K?bzk-s=*77ebl-0OTI5cLhtx=jsm*-nb3U84SpW#3cU~;hG z?1gSKaFi@f#D~=E;RW={B_wyU0#Le&LGd$MFTySSoN);Qi@prPB<_Z+j*02 z1>;Ig`6uS)@~1(CL-cqhg*v4+gD)3b)*swg8>=B2BhrS}u{v#xQ`O^L&{RAom{mTd^KRhGm)!O4gQ9-?rM64#$9=~P}@S5`9N zO=kqRMd>HZZcp+6+C|c9B*eb)$N(MWzW^b)k=E)3TnLL0$aKclxt_XKIm~_kBEp(LS+}{8I diff --git a/fixture/13/2/33 b/fixture/13/2/33 deleted file mode 100644 index 5bdd01a11c7cae9502972dc42b5e3f9238ca8a8b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1072 zcmV-01kd|IT4*sbL0KkKSyUkl5&!^7|NsB@|NsB@|Nrm*fBvuj|NsB@|Np1||Nq_p z|NsB*|Np=O5_M~|t~zMMOwp4_(=;+8Vg`sCQGg95j1a-3#K4+*nqmi_BLJR8fFYw0 z8hR#~3{Ob(nr4ZkOq(gRX{5oDdSD@`=n{CD44O<#0T>e|2-DCS0$>A83Fsz61T#PY z6KGAOU}#ODp{alb#(OQ78Iok#C#j806GjthHl{YH z+Ds-)&}w5whKSQ60GQO%OqgjJdYUxQ*-QqFOpKTS5cEuHVKl=*GJ0w~GJ~h2$jPbX zWC^CzKmchl005aV8YU(I9)$D&0004^V4j))1jua!U?G6g4@6>VgkWeIGyopcBG~k;h zkuRC5GWqyp#yR+h0Dgc>;-YTwqr)Ykv~b=P*eMW9ITRAM0Lk72K5*hNRzOy|WlqM( z?uY2dwjq`6$9ht~C=evz!iCM5uB`WdkuU}8$-V@~2Q^TsG?XfLXGJO1e-WRE3(%M# z90K~0oyO(Y@nRM20A>5CNQ5g#WiINmqDiRY2n7K3!h};X3%ZsAh6=>h>kz9FgUG$N|L$UxWl=H9WWi z3kEbEoagoc^5S~zL76w6b=MDcq;a=DCzQqFB6GDq#miVD+E$#PfA|3A0Xx~@gfWM@ z;aHtONS0gPavz->Qa9jR#-q=McxO+{+gPNzy_$s6FUqFAxVgxBJpIJWR9|Pk4I90f zC3Hwx0%3jNXHuR1d59s?4bo)t2?HC}4V5`l3q!}e^XVQDDRmw9J)*n$GEgVr2_ui7 z>;*IeVWU~GCGx`nG}uLrBO}I{Qq}#+`AbSPy0ZPRAxsFD$sg_xdMGKtS0SCG!r9-9 z8W2*as}R096Nzv{Hvgu(>e2uOS$aqHLZ7RKmba!~*}N_U%)z=gy0nd*5*(sp_ZSib zf|*OZEI~B{hR>ZUX>h=fXA6NtlG)W4Tjzto&?Y`9(L0GyauNLFvp6^!?aR*_Rvb>z z3$0#d`Dv+qHPdrJLfY}Ig@{W@jzM=^Tgd`0R&36PV7KTa z$cTwVg82J^GT!jwsGNrYX-|L;*pvTF$1BX&;T?50000000000XwVG-qd))v00000G8zB?000dGKxhpJ z00000XaE2W0MkGK00000&;vt8hJXM70MGyc0000001S+o11Y9r6w}n$lP9UYL8SFG z$Tox;G6pq0CMKIv=@=$zVKi)0K){2^3=?P#BSRV*OrE1pQxMYtjWa-M9+O6oQ}s_o z$?58b&`u=uA1ILAUC#F-^513oecAnEhZWo@<=fx;p|q}B%K zNMilqGA_<4c7}-f~MPZl#uK34qH5}kl~!Eh2lZ-l>Aib5FL;|9<0GCDLqKc zYxX9-yXQ!C*^;a^PJ9f?XZ5x9e4h4K|~a-}?ml9bOAA}HmlOt(0CS(K#2i% zF$HDYWX&gJXy4TRtH9LvTXqGjUj{iyA)O`%%^o2Maf0AI3 z;O+knZIA<5yaZ#bi~*-|r4NtTlz})~>F+&~83oj0si^bZFo5oy8tU%jQoDDCZ7oXA zUo#G)pcZNXGGQr_nj(1Npnk@gT{MzXqn=~a@a?4rv^8}4uHA-d7YyZLrVRhcD2!QX z=F-$pz-3Cemmqd8$cA-WP07U1rvh+70tf6>KN`k2==8QDei9p(5AUYt+{1`i2}l`J zW&ol%oq8v2MK4g~{oQSO!2x~O60(QQyCdGzGtTk?y*a(=JyCZgQ-ui$!2iP_Gu76a diff --git a/fixture/13/2/35 b/fixture/13/2/35 deleted file mode 100644 index 7896878d25cf3b019189f65fd2419434f45d2147..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1053 zcmV+&1mgQbT4*sbL0KkKSv*FI>Hq*VfB*Ua|G)q5{rCU>|GR&`+W+TXqcHjMrcd`6MBF&4FE=%1YsFHMkl0U zFbRN0m;!H5GE5pWXkdb33?NDRk5COG8a+muOpU3uOaU+nh-tK*rkD^mg!BLglQIKH z!JvAlp{O%S=nV}BV@(9YdH_rW^)z|_C#i&Ko}vH%XahiK00003KmY&$WYbSj0004? z0MGyc007Vc000000LhR-JkZph)Sg4sn-fA|G$!eyo`Pqf>!q`fj|JW^v}8S!k2}z@CpfAJq8H^ zX+WrpPF(jT|K_0Q8z4&3H4(+%A)2XVAx#6zO>|9_54^3GN=i_5ASQE z;pf~p$hrWN!jJ?;1e3fvZz?{p#G_-DPn9?{s%P(;^~MJPod2lt#J`>K1SclC_OZfY z8!>W055!X>5=RZdjVwzj6{XG$_}kL`jQ|UdlJ9_)HhLqBR6)F!-*(XXpoKgLA~Y@D zgN7JW`^qQb^gCH#j<6>rZ*Wi7lH~8$r{ye(&={bHx&1Y(z9S{jTwsaT>%mI_&&9ZA&4 zQEA9xY6J4eaN(R3zhsLKIpF^KvtrwT??GUr`5OiNHg6KR(1Mw2tuHbOU{aVN3HU}N z3EE^7y@1B(n&59wJw)_GeZJx?)MPy7xtC4>?L8`8OfZZ;xRbj;fyy(@F0A}F+*v6G zv7SeNkpwsKV}fPtsRe-_{4}fK+uTsR%kCsrtf2)$p(~G2)?JP(P-8Su#&KQ$0tb_O zWGHw8-6>O&kwB_xB0}Z1KcjNUnQ)6fb>t`E2ae9BD#Ul>Oa zUTVwzji*Vj+bCFR{xJwzYLvLIsSJhvn>;rw0h%OzXvroNil{EZe?N&{iShu?WJ2Lg zExSpl3r!FV_3;TQ1^-FuDoiv>D_uvC2>Jm_NlY3x060g5ny7vN&y^9H1+w2YNY17k X&QMYq1dqMryyEUirwS4Wh|zsOuNvS3 diff --git a/fixture/13/2/36 b/fixture/13/2/36 deleted file mode 100644 index 1127358b92b92e1cc68a2432686be6b5dae67eeb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1038 zcmV+p1o8VqT4*sbL0KkKSp)+Xga80c|NsC0|F8f5{r~^@|NZ`d|K8sBzkJ_sfB*0Q z|NsC0|Np=OG230YGVVcwXc`R$fB-ZAGynhq000dEpa1{>27mwn007VifY4|f00Te( z8B^H=X*Q2aJx@%KVH#~F(3vm<7(mc64K|vdnuA94rf3Zb=uM`Y5tB#iJt*}wG}F># z%}*j38K}q_dQ9~u36o4uQM765hJ*kB0imD(JwN~e&;v~X02%;j000Jn0000Q00007 zs0{!B8X5pH9XzJdF)|uYO-H6p37{}W$OAwLwAA%IKzcMZfMFQW(?OtUM$s5*WMML7 z+L;*)j3$^SA))Frc!=7JX`mSm(6+H^KiBGdTATw|oO)QxIu+&$u;OvJwILs_7ic$6 znppM73MYxn;et^gCUvl&)fgdli5hg`3wac?B9~U}fhN?YOj3 zOZ44aeC2MGU}69)3RD9|A_WKXBLw{;?P36feCGbf+e#`kKagc=^}KTfc6rK`i9&H_ zc=XM$5aa>;e1(q-@}^AJ+AKIAguz03uB;w;7Yu$7dNpdCPg#IHQ7-6Y1~l9n zgswgUs$dMSF<`F!FM>Qk*Yu&s@Zzf}$-<0iE+{#&R%X*>yw)~I*^kfjEpTN^Vr2?! zB&PpvkFb)i?3`1o%sNhR6r_(}Kj(19NWe(4@w64#dFAz%RXxd+7-a(q;R@CWP4Kf=9*)P>KGBaij`@xfX{o zpjEGE3&$8C@KgCXB(1ed*0D;(p1br;EnITD64e}nb4{%DAdAF9!wm7FA_6FlzPXtV z-vMGr#HFE#buUR^hxoOD^=Vlrb$(Ua0` zL5TG-WO|x30L@2=GBnYR6E!qFH3K1|)b$3*Jtu&XvRkxX3#)Z;K;$D5pO<1+=KQ6=CPrefq5r% z)tqN7KKzhm|LxVUxbj0IE%Pm}u>5^+7JwOh<2KkbL`@8s8?$)fc3z?!Lx6gCVF5-P z;e)Ey!5S~fDHWf9qJ%DlfAlB*nLI*p*d2u_j^tEY^&BnaF*P8H1kh-Q>)X42LIXw9mSB}6r72^j zNw3n9vCb4wdi=v$-ie;}v&7t80rOD~l}zOV%L}$TbaSv$IrOkY#KFdj?(Q*U9tMjL zibk%vwe3iZNF3m1xC}>_)uD{7NL=d%C+ diff --git a/fixture/13/2/38 b/fixture/13/2/38 deleted file mode 100644 index c7b8efa5281953305d69f199e8294d054c4b68a3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1019 zcmV36951J05kvrpa2s900001pa1{>14e)`2AMS4 zhfO^rMnK374FYMjG&BGJ029yv00x3E0AdD7f@y@n03!*20003n&;Zoa0GI*>fY8%J zK=lEj13+j101Y$%0MGycXaE2J02%-QXaE2J0000003gjYXpc}6#3ln%6DF7%MhT4x zfi{x~kYE5cGGH1e6UYK>6Gob8nq<@%Ks3qf88UhRX)<64H>o`XDD;u;!m8v^K5P(_ zk6tx*>M)s*JpJu}t+Ybgvt0Tz!S#uxiUyA1X2x02pp2!2prHiGSnTeT9iR}@Ns|_ zM-m0E9%-I!g(ijOh?d$i>a&>!L2u+y42LLU9HO4Wh!(#I=YODkX=beq*37s7a5$-xg^74YY#l zKE)983yC=&O$cKn9fYz}!Qk~?IE=4@6*g@S`Z+4>q1#xJRTQHJkL(H(wm1M=m3Ki1 zv_|gu*aoNfXV{x*u2Fh``fYPTtLeBvX*e>A6Dh$G=XH4p&euv5Rw^g2MZ;06tk{#wrT p2WTsphzpAp`W}anezua$59hGL1FUz_bwqxPxgwk>NMQLs{Q#!kyS)Ga diff --git a/fixture/13/2/39 b/fixture/13/2/39 deleted file mode 100644 index b83c29f5d206ac4718468bb881e7d4dd11d53d36..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1012 zcmVG z0BA4()6|9pGawTNiKYf5+Dy|EPf~gi9+N<500E!?007V$00T_`4FRA4000dDXaLXv z&;Shp000000BAtaGyu>50B8UW4H^Id4FCYp(V%Dm0000000u#zXaE2-0000D4Gx(A z14bYX0004?8UO$Q27mx)8UO$Q01W^DGynh%28{p!0iXahHU|}ATIM>u4n${~9ze1b zd?aE(pk{Z1t1p&E6wU;+O4kzR70GIY`;IrD!Lhcc8vdYM)o`ex#@-sChI)+&ws0Wy zpm-R}(BX43Zwf*-fvto}MKS#<0I;j+g_>LnlxKdmX*p2`wz~yY*C$vC-HAGpDok*K ztYK}vE^S!Os1#EuTEm4kkc)LUUD{SDY_AK6j=n$`uuJEJ*Plb&84CYR0Xn9DSBM$} zP|)unNEQGlP6|#0UyvNuMxe7Uc_ zwDWah3N*l10W2an8SpM?44zSU;&!+*EAeN~Mcg81qtj9eJI=<$R}DnU@83MO;t9z|@Y`74^GIZ}KSCJenXD>kV9V{h<4 zXoaN8{odP$Vb)I(*zrie^TZ}4)mFb;hzzmFQ^1j=2FV0RsHySZ?LWU3DY--E^{uQTsNykwiij}(1cn1m2hSK{Sw$$sj-*{zRX>%ogrZ$MSmJhIFXXdM!f!l;y@flMZLm zWpzdPt)#r{jN;w43yI{AhJoT4*sbL0KkKS!&sP_5cAU|NsBr|Nnpf@4xr||NXb$|NsC0@Bjb5|NiSw z|NsC0|Np=OHfMI!wb9cM+9sMBCQnTS(?%MennnPa382$P382jc9;P$~fS3k|88Trp zF{ENq}qCk>SAFu!g?pE zk>~+3VKNOgW`PZ)%!A^T1*mK*7f7vfE7FT|ju_o38@|7WDh02{C;*08Fq7gyq(}p~ z1d?^s3>7&T25*!A1~(zHrDB5pMT3-*7nhOk8M?G268BgF03r5`#ZYIHPZ>PE_=g8b z_B9`{wbVcY4N``f0aU|52NhrRKFfUIlvr^(V5j&}dvx2mElHXS9KW$oC3t{?fqVlt zMWJ%t96ts=^Z@!O2Ww>pj4qdXy)hJ@-EskNLd$Np3E?5Nx+|cOL^C~Nj?Jgc?LP_A ziz-kNCw#@rXIH*aC>*&>yo%Y{-fVx{d}pK5PY&%5&Olr51gaQwNP#DgfSiW8K6(&) zSxCsI;cAYt1|jmd2u6#xBM7_r)(t$?_q5|G0)|Rn$x;j}v40}bkcdaZ6jIjf_BgZh zK+ZX$mYSl8s4S2MVo{0RaE;u(}$hyvWVj?<|~z&hyIcTRca=L%PD38y*aRK?p$ zIU>Ws(<9geS4LSN((?;}yPiR_YOh$5Sybnod-F=$egkO6pi$uEfIzqJ;9R+b{%y;A zpHj@9HAJbOOX*HXGEx?JRNd-Q1t7V#YBqdL93oEdqA@VYHq);l* z7)<%%{{kN&?s>eBNwa{fW{EBiTFfW+UyW{lwKL6nK%!2{WFI)geSzk2Be;E?Ww@#2W{&5a`gHV(v(% K3K9)lFMhyKoY{#0 diff --git a/fixture/13/2/40 b/fixture/13/2/40 deleted file mode 100644 index 37e311ec383b31bc39e203745d712e7d98611f6f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1014 zcmV>T4*sbL0KkKS%yUn{Qv-z|Nr~_{r~^}|NZy(|L^ZR-@p9-|Ns5}fB*jP z|Nj5~|Np=OJ2jo%&ASi)&>A!VX){0o04JhpfiM670004^U?uO9DPy<0RpbY>34^RLRjDQ0`00LncG!cN9044;$03M(KXaGzBFalwq z9Wa_`Xgxp;00TyV01SWt0000000Te(27mwn000000000011b54*~BGUVyrUd1hZ@o zwwqv68NhTLAkb)3WY!2dql7Her!=9Fa)|)qt~}^00?hYW(u+9>B5e-}%=*Y;k|DVK z(!jW(c)qcD#2fyP7ULbg2=eHlVrD>SM2)FL25H#W z30jjG*mF>##hr|qOotoc3(lB-`suY#5mp?wRm2pKKn3|Yyq{qgFxtPwwE&;;W_~;) zkh@jQ(t;g|1qJ*<=mZ^*yI+6G!=Pt|)h5{X{|gqdwn3ayDo{WX2AKQAf$Lh->JDJm z?C~&+d5FjDc6%3_iAg^ECD2VLcL=}NSYFp_N zdrq8A8Cg6iW-Uj3k#-c7b-!T}AMd}xh#60>I4?H*8~{?j^`FWK)eBf4kH)OI!RO9AiohCN;iS>{!%K!iX diff --git a/fixture/13/2/41 b/fixture/13/2/41 deleted file mode 100644 index 67ce91617ec20e95ddae0ff624ac86413908793d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1041 zcmV+s1n&DnT4*sbL0KkKS*v(nyZ`}o|NsB{`~TnjzyJS#_y7O5zyH7Q|G)qL|Nr~@ z|L_0*|Np=O8#;2=b4Ny)njVz&CTJ$qdJRkf0BCAx9)w^|K%Rg!0GTw}5s`!d!JsAp z1i%2BN$EC#00TjyWVEOqyiTrkXuNCQnFo8UO$Q z8USbj0iZNA0B8UPfB~QY0000001W^D0004?8UO$TL;;Y{10V)~000000000005ky5 z007Vc0MGyc00Te(0B8U*0jeJq07$hI+g81m$3U>l1KgiS0zl4R=A{c~5-=NQzGrwL zT+^B90_JW)9~eklz7w-UU_lKmGKe7jmi(P$kiZIG#)WLit~jzk$d~7D6pAy0zYy}#}bnn?!g5tT85OaS-pE7GuCPy|a6@nWnB#@;A0j~Hh~P%!LNcCM`0 z^XtFK-HaM66yJgoIfX4mT9et$4)g!x7w~daIqW!DygrFg-1p5DcyE5g2}{Q_h=UX1k)Vu~Y`Ip32eCV|jgA1Y z?t~$L%F$gwdI*6C7CZCAR@S zrwsI8_xdr~7>WmKHg#bD*q~Pr$8f&J2B6xXt88*F`ByCVDG?VH!W|`B zu4Lv|Tw{80h2y^jVqwHQk6EUxv(gk)eQmOPOdKv>lU;{t`F`_r9nO7u00!hGMni5&@G_jOrZ&nwMfVNLWPhXHowY z!WDc0PsN6p@e|bEC!D1<7aCRuO6#1T(2$HCZ@HHF;xl9`D z0k;totrUm_=15qQKu_=px5e#2KEQEX?Xm3(bFMd z*#9__O0&n;taxZ(=!OJxBA}zq0G&fHXwSO5>>@t^2WLcrt`sD+7|Fm>v*`@CA$wN} z%cnL`wF=!eJJ$sOG6=v(d%+I!!6Iv|qKABeBv6d@9kThHe>$atL1Nq}4x6_e$S)@1 z*?~gM$o+Uc&CPOWme@qF1tMUHQ?W=(m2O8&{DsyyXm>n^RHu`qh7{VK4ufA&^ZLF$ z8UKM5F?7#$*DOZx5V|f3602K?GX6j|6B>Gw_uEK*?$C{&Fpbx%?wGA?eS~rVVC|fz{p^XBrr* zA->&#)CXI{5kM^VOk)UeA`2W}2+SuTWx$rjeE%Xv2ogiGB7fv?^-;TqTDM$0XWSy>Tky%IZDXSor2;PC$3`AqsbNFxz b>fArj$^j(FX|j(17yMnx6yZWZ|BgD;cNMf( diff --git a/fixture/13/2/43 b/fixture/13/2/43 deleted file mode 100644 index 4092d8e14cbf5ccbd28c9ea3edf45abc1b5dadbf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1043 zcmV+u1nm1lT4*sbL0KkKS)K2GCIA6AfB*l#=l}n|{omhz|NsC0d;kCc|NsC0|L_0* z|NsC0|Np=OId^u>s-h25Q*BKqdO#YUr>TjiPe26Pj1vG&4K#X2n3^#esAzfyng9lg zf$AZXO*CW}MkYokBWf`{0McmMG)<}LGgASeVq_qCfW*Zyk&-U00000000000004_KmZK@0005d2AK^227mwn00000000000000000000 z00001pa2GjfB;k0zl7n^=8~omBK?xUbEr|}Q7{o;=>xvii`S3<<7sb%7v@(%p7?+Y zT*x1r4l~EKTq9jPM=dNUj#3B+nBUkw(RPawgC=K%kbM>q9a!qM1QJw=gk* zk2U~E+!0Rp9@-7b!Am3*jvCXV6ns(85{WAUXz(*Y&}3g@-}CW-&UGa2Ql5Vvawp_71!1$eeZ zXlb?DEmzVYGM_Rb4~Yuq23z0xhpWpXsf{wfY;%A7Zw1g z*tQ6>BF~X7z|b%sm64%RFqwat!s3?B(?W(ocNDCMIS1NLVOI_UB>cFeWq@+ltuYU@ zU~C82jne>q&2TComefVpeGa%Rr^-0kW2CX+kmn=Ab$`5 z^S4Sd?;uk3(l2S9?DRz=7AO_VoU>O@MpaN}@20_9<<80xo`kO_mv9jB1bgFq1-)Hb zq};)#TyZmTn@)eKxGLiNahMe$oyk5PkYmHrAMjcuK@&CWOVzjXCLpXxMUmJ+60ST1 zqHYQYC+@j|lq|T@b2JEPam3e+q+Gk95O^@j#u_#|TaYuFs73a#fb-cGG@kHKXT(Gz zJvX~NT~TID)KVfc_B`MdF6`uXqVNWd=};bx7(4X)yG6>@sdFSjFC*|is9P0#ky4w) zbjAzS>LEWwwNhPX0fE^Ggxs7Qf-9Uf35RpMk;U&&;g3D7+SDs1u+5GyX;!+dzUCmp zm_>rUAOd5~4!>onGYBon$R(dZHhKu2nWE5shY9?|h^oX6FPqLW#yX@NG<&(Xip0r3 zu`4u+s1g!}GSf(j(j`MY+kxQPOY{Oqvks2UAeSPQ1Brvo-oAbls$<(}#xU3$p&`5C N?ntK!5)-}e#K2sh&c6Tv diff --git a/fixture/13/2/44 b/fixture/13/2/44 deleted file mode 100644 index 6cdb9a3ebce7b8726d84fefe99e8e1bc535ad97a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 518 zcmV+h0{Q(yT4*sbL0KkKS?~sLe*ge;fB*mcdz-$&A9wBDI&@lk&t0TBng6}+@uu(7 z>vezq|NSrl6D%AfKr{ek$&dg520?@X(dqyJkkAbpJxqYe82}GR8f0klO{h1jdSV?t zCPqv^$Yf{@GH7TT14AdMG&IN>9--<1>M;#58VrpGqytR`)CZyf$R45U22)L_$UR4- z44D}j14Blc8X91YG-w$#8USGc2AUWM8U~Dkf*zrhBSwu41kjZzP+^Mpfb9{0AK;tn-^l&vW66qb@>W21gaU%e0oB!{=)%CsB*1^G6uBA zjtDSuq(F_xJUOT0Tr@OaeLpI}d=0j4PcGOg&1*iyU3vJI`peExT^kh5qdHF0rE#IYLJ^BftX`=-xRpG?~t+{a_LPTk|O-4y+6Sp4N(6=0|`{+7zn1>CE#H?CG?3UK^FX7$rRy2 IL%_CWoL2rkIQk07D4G z^Z=S*BTYR|DU&8?1|foaYIzMBG*8hzO#sSzo~DL@ieLZ9#GynhwfB*mh00000GynhtKmY&$00000 z00w~20008*=mt)d-9M~VbE5D`^!n@PfQmk|Y7 zy@e*oMb8rN0ukJItY}qW#XqP69RWrG38E}O!Xd~7iZYUBJc#}4kGfz5Hmq|}q+8H? zHJ%gWd~qppf(Z-6S%~v@M2UY-vhykOU#1`34?_P`Dq-b zl^5Pr2LbZ0f_BX#WjoMXaqI#!5!GQxAd?C*Z$SzKn@1FuRPsV0%Q(&`Jxg5|xE(@+ zgnK>s5gB9*D~)F*K~B^4TUE7ws$t*v9~D%qfi$kP1mKnNp)bJFwvXK%ht} zY|Zkd7&(d%C&)6{*07B)U0}95jPZ#P_86S-oycS2Qq1i6bgck*+96hmjz7Aw$Wf|M zn{Cy|(M^Hhrbg+Q2OMt%LA)8n0mGq>K>#ecvI0}1wV7RlG-05xnKfY*u-g_m8IxJW zgW^m`iZepW4}tRLLZcBGfZZx7kZrt@QWEARZ%(bejP{yXE1+k5UC9*TLP60cYB+1; B$u0l@ diff --git a/fixture/13/2/6 b/fixture/13/2/6 deleted file mode 100644 index 114349db262b6cd6607454d82a9b35df1ffd6580..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1009 zcmVG0000027mwn8UO$Yz-Ryf27qV)00xFY z(;@)K(9=Ue27mwn00000007Vc27#u4XaE2J4GjZ80000000w~5L#B*P4FCWD8Z^)Y zL7)HzKmY&$00E!?0000013&-(003wJGyovYO+2QV1kXfdGyqMYn4W~h8&d$71Tmn| zFcScnMn*;i!8Fq*nJ}0DL)5@%8UPbDF_AQ2(DeWmv|&i^i-wf0Q3vEzM}~5%M51lK zcF0rO|GHub;iI>35Cn{>m1>bvGa)`P+r+b2BMrMDU~dL91_~Sj^S=%|h_c?iS!Bx= zU0{Q*QJvav^ppTSeA<&Mh(g@rJp@HaJ}Ik_FW~TW;F8iq)Qt~$AXq}LQCN0u-qI%# z1U=UNzlXzP4`bP203wifJIHuTqGm~Q!X8QC4ZQ$7gww=dI|MNSAKwVock&D|l0vHq zQ@neL8MNpZNw?iGF`8_&=Z9|izLi>uxUJ?h==TneBmC4gRADdZxPT=yxC)|=jUl!2 zE2DN25uoV>Ast2rSJV$D3ds?}@Qm3yYn<%{Yccr-);ucnA&3IVkPs2T2);2aH+aEO zT7u*UH4FoF=0~=PY!AnEBnMt`VFhI|tON$AAV~`;xi`tvu)15p%rsu8i;YmvRjVb6 zk06X7F9YuPBHEE$uwJs_f}J2+dgFHf!wJR~4hm{#pvPtYwCGgHUm!3Q9)J`Xs80## z9?D{TVJ*iJ1UiZkc|lfhQGo!VU z9iHZQP2mK1Mc9}y@=$6(cyBRMe?(j7B0#2EPLD8}=o-c*MRGQg2{*&GAQ-^LA`pk# zLS0i0)1 zNRGrSP@&h2hXjb&QG^69RMS`&T)YqF!mj-Xi13-QFMT38RFdQIbT4A) z(#aecP0-Q}ed9TGJAG1pF8UsK8 z000^Q000000Wp$pu(X=ybJEI|8Y*ZPxKp>K0t!^xNHNF&#!&>IZ2>m!Y6II;@H`m6 z%bJznH>1!;iY5QU#tyD<-z9C>daltO4bdNxdX!K775IzHIhhPR0EoaSgMkZDiL(+2 zus~ge9zvq5n_C)$NQ)2PP=zEc_=+GybuT@;O%c$o1-%#*Tv$|J^j9Z(f1fa}zb!uC z8^?48H|rhN0Wn?1V20-08VhkE0}2Mq=ni0gAQz}Z@<1f}2RnQUOQB;0n)}cUbb+n# z8$%`PZE#a0pImM5694%K3x()Q_D0>7W;u>0U&P~4a_(L1yV(rtP8;X^rT_uyB-3!`16g~XZ#vkV1M1m zzTrxsAm{`ZTY!ipONH~L4zS!rwNJGu4mV-M5+<|Ts@8~O{6X)3^iLzqrcpJ9QhfW- zN#m31xFeO=Ncp)Tc_q-4!J4Io+hdXe^of*)-ZcFwT3{M$qeaoRh=X2$NzDj`*oIdS zZ>CV|^ska6hd=JZP)_s12Xj(DV1kmbh>BX1oBJW15raNpLEyXc<5<)Rk3s#G^70)C zzy(#D=EB4`(s;MVPlzcgQDD9mQT1SD3^s)k#Kn#rAr0ZY1;CO~t~M})!~VexPgTga zqb7UTWuZR6kJ@~F0a0!twx5vI% zuQzyIwAO0k&cP>x)D~tu{CFT8S{cQV7Lt&H z0OtmdCfLjM22RjABaY+5-YDTr!~~X`Yk*Qo2ow#j*&6RZrWB>YVL}q{@xTY?7ji{7 JP>@wn(+~al$nF3D diff --git a/fixture/13/2/8 b/fixture/13/2/8 deleted file mode 100644 index ea8c5a6adfd3aec325691d6a9fd4d8c69e6bbd2a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1023 zcmVzW*nivGhhLh7Iz#~kUOaLY(BO}m8 z45lUknqmV?jW(Jmr1T?dU;qXtk5Q@mq62A96BP6SnrXBqCMIY`i~s>J0GeTJG)LFyU+2msJD01W^%XaE2PgF`?700E%TXaF?O z0MkGK000000077U&>8>%)6u4rKmY&$00006fB~QY00000000dD0000000000XaE9~ zippLPDoQHdY>U0Y3h%_R0B8Y$!@Uj&EIJNfMvT<5Gd1-c0 zi5OwF2QNbmCdybv`Zu!UM2WjfaUxp)z#!w+J9Li%2)277X~1V7;c|8p1Bpa{+9*Jd zW$GYA--kV`SVy&wz*;^hDWOmhG1@D`6iS=m42ND^ruXZj4 zTgv#RqTyIFe?;*Yh|meiPo>}bI~@oVG9^@N7bNh!%*K>t`>-T!2H};yT#pctLGL)b z8kTO)MpzvNe~Eygp&~1`KtcsdMhM;oIZpJYM17g-K@rHu1{xZVbRYUTJeUSM1s%Hw z-%dP&nq|5S0+|iTxO@q06-_1`ZajSbGeLDL{G|ln;G0o82rPn3k`^&XiW*Ve9|inw zkOB#MGV2_XW+H_VR$nz)mQ>Ln@pRn_AyouCC8tA=N|Q6lL-;RH`Sp*w22uwGT%t(P zM>#Q`LO;P5_Tv{kKU=LL39m>KVn(40=?HiQt>QbxzOPu}=QF=0K*4CT30!RFkN||& z3`o4p{L5jB_ai4nljwH~ND_R5ee3V-q`g>TW>EeCfDXVE_hCTaAP@&(T~nbsaTf@o_~xT#MSir0`na+Kb657l0K^d9rP zIjm{<6xo#l#G9W9Ia2~81jL|PW1LU8zHkCn=zX;L%W$WIIJ5{Ecdk5gkZ>`5B!WFA z(80DMT^j6Oig9~}jbLJ23H>HYG^rNfF^6NO@pE0L1lFiu=hi+n$dz0q te0-RvP}{?P*avK>)!Ct1R$LKeSd2ugRYcs*4NWQDF64@Ep&_Vrx$=ouxWoVe diff --git a/fixture/13/2/9 b/fixture/13/2/9 deleted file mode 100644 index f35df9cb86f63e16b5aae4d176aff7712518b91f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1031 zcmV+i1o-Hq}v8feo=rfO+`A*PJb01W~%VWhwi(8E(r8xR1PCQJlj zF&biE4FE=H4JMCM1obw7Jeo2z^h6kF)Y_vDQ%KKL@&wVQ3F;V_h5#lsnHWr%6GoYo z$%&dYm<>39-fLVDGxxG6(nrcblY-(ws0QcQZ#|~r;hyOA(C^JKnipS z^><;+pY}kH&%_$_n*2Xo3qV|6frDjEbq)w@FXrGY4ukX1!U;~-^hOTXxK#IG(e6b? zn2Osg-#p?67O*DojG5YcLf_`$JiLk+IFUzs4Y%_Eykp3A*q5ALGh-cipvQJd$wM;6 zxxQEN!lK3l;bz2(0@d>(BCTySgPZx%&$OlGbLO-K%7CN6DW=OjWX`joz%6Wq6cId( zNG3Q;Q|~J0+7Mbi!7xLb4I)6?4=Yq+U0NbH1o z5QZ#*xFLOO)tm12V;3uB+D;v^n=}S|VMKsr9o@vF-NK*mGk{ERYo=@&a$p_cMo?M9 zopQOM1c(qMU*KQj`*di~IYwp@j0vR%Lv@f$h2fEB^z#2>!Y6qRT3n{)2UgevpYP$N zctQ#pNmX|#m{o*iMdz>UrXgF!=7Q|<+bcaXAVQD^39H~<4Jk&TtfESrFcboo`eUaG zkxJqYFu)7(-bI7};K%#JI$YEu-7WWzAj$_QkR>~hrM;y?_?|9&B6Zb-yw{;3kumU8 z40z+lMBoyL8_!C%sGi!IBd#6DldrG4+#mHQaW542JaC=D!nKZiA)1jf42~713%P6Y z-HFmpXf`t>NXUmBq;$og6n-UR$jc!oet@f;7ZHNPgG{&*a4qRTGl+#q;>co44L^c; zA*-Y)0H+2bD6gGWC-5l~!v-W4X{6RCv@C!rm~h~KMm1^QUj1T)U2#5&@L1&C7_0PE zX2-(3u^Fm=c|*0vVS=0CRL=|@Xrn4rOglCLY>qn6fF}2#Ep%}P=*b@&({Wg%@YIH= z)k-benoIGE+SCh*H2D{|VU8dT2>Eb?38Xq$lQB^Cd^BD<0%V} B&msT- diff --git a/fixture/13/3/.zarray b/fixture/13/3/.zarray deleted file mode 100644 index 739e2bbe2b..0000000000 --- a/fixture/13/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "d0g?zH0{{RZ0{{Rp0{{RqDl`uXoAbYU-6eI{RoFlMyGqSrq~tSJP*VFAEB+G!f$L-jwVTG(_}$p1g2BF^`(|Mb6L{Wf{l*6=@e zj@u=lSJprKbWj{o0N}q=-Qd9u1l_+re7t~s@#H@~+fPz%bfmx6?hL!|zy!egOGuN5 zl>k7AtdTe%x!XUs8B8@Op{l>FKjgbo{pP{PVv$bcL!{VAj8pm%mSJBdR}3pA>*aO6EV^)TEcYoZ7z~MOM}s zvF$%xsZLKtPx8Ms=Zfhe&9Oh$%XsKX{kXro`n{}}Kls0w&{-KYZK%JGs!BYvq~gCD z<)=T=p7uY1AyJ5;LE69aD$TE{bk;vU9s?9vvh%-a_&T=ZN6tShZF6KEklMe3d+EnP z*XX}wdc?5Ls|G+^C(^9wp4PuQEbn_HA zRIB630q{RozwIdK6_dZEMVwU?!t=idVR8PoIrqOq>MZv6KexZ>M(nm!weLTaQJ2fQ zp#Z=VDMig#sL4NV>_C26j_g018a9-Ba`eAN!+KzJN#wuHX6$*?P~ty3_NNUwC+k0B zs=no7s`)=ToxI3R!{WaX_@$vV8S1~_B34jlar!@=@iW+aDCoc8BplJMJK#SzP5$cy zcL6}Ag3gLC7U(|>lC`sp5a~a9f1z>pa`Zp3@HZE?%iurKkvePNH|akT&?^1>?A5=~ zy}HdbkMchV%xsu@cG^F_ELFxUruVd0g?zH0{{RZ0{{Rp0{{T__TNB7faJgTtV^lFa`8VuOTkUW~Z)w4lHF$gR$MGw%|Xq zgQX-@%``t!eem7CL4 zkKey9ES0T{ub4mZ=wXpSX41bQCC!!TF6uu;ku7*^pu#__yIGadvIsyVE!OYrCfh$N zlH!w&#QVQ>W*sBDKg+*b?cq4)D8Rq5wv_taqT{~@Jz=Jwma;!-$B%EdmgK)i2m5L3 zOw~Uv3yL88iT=M3nj9T@PVzrhh05F##w8mV-q62OSQmm-a_7G?tQE&Gl+!=8 z3i8--bjQDE6~|-?8_+-ICake>$MHYbUjJr)nCL%$_QX2imZCqkC4NJbXx%>n4wO1tt=yy-tCiTPAh$Z~j zi}62aTVEt8CiFiE`Q_eNN2@<*A8)WwaO}TB$O9EegBJpyxmPag)Gz6ze~^K0GWUoa?`f-@0XaYP3IU;T|!_ u+_b-vo)$;z2I)VAjm{%z!Sz20```_X^7g;#Oc5jDG4a2>GNo+dspLQLyRn7< diff --git a/fixture/13/3/10 b/fixture/13/3/10 deleted file mode 100644 index 6f48e36687016a9c79ee1d462fcdd33a2f4e02fe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{RvI2E*f!`r{b)YH8kNd3QiPinH+XwW~}?XB|8 zG1R}$6oFR)tn5EMCGo!vx57Ul*dG?imF7Pu^}n(ksC~c1R;Y70LB~G~vg1|B%k4iZ ztv4Swpvb>uA5z%fGTuLdb$>E$|q^!hvz@ZdN5Fn`qaP1t1^kT8nVBqx`afx?AE_E&Z17I-nPGQGsQcx z!PLJnp?(3qCE>rR2!`j3r{=%rcZ%2D62`xcOoJx0O7K4`xp<9J_2WN8j#&XzMESo= zCmC=rciX?q>YxEdZREe=uT7Fh|M8Q(sCP@9V$b2e^CO=G?zF>^-UUd0g?zH0{{RZ0{{Rp0{{Sh7r!mVXa_*j8Qg9tPV>L56fGfr6ShCXkA`HT z<>kM9UPhRfZq`4!e`;xIA@)B3vrd|7mg_%_0H_*SQuDv|D{}6(=;c4*c<*&2=m5Yq zdYvf~3Io8?n6roD;LpFH&gnq0isL`m5fXIUNY+0l2fNK1e*Qnm;bdsq{JTFgsV;#R zTf4s?P1$^_d-6Xon%6VR@z=i+`;w4A;n+XcYCdoVyxc$jNoiBL81KI&9nFM6%-BEJ zLHlLzdI3OdK|Y$dFY-T(HUYbJ+v>k2h|`~rwE{rvlHfys4(31Cve%gBCfh%)2R|2s z;UDepg~kH)<-o$EiJ zDA9gdAH%<$`d7!oP58ej->fS@ccMQ%*xuLI_~yTBiL0U?Pu{<=6Zrj8_P@V6ze(l| z_wc_#tgHXW!}-608D?CtuhKtvaE@0qd7D2?zI&vkU5CG)&pa$MSl~a%{rW2J8s)zb zS5l-tN7X+A5}2nHX7xWw2#F;cP0+utlKwg1WBxz-jqRG=n5jQQRPxlYM(e+X2w;tz zFzY|M*MLvN(7eAA1p6;P*YH35uJ!#xblN}4szF;O`_#YluUx|uR`@@VTF0ZZ_20k3 zVG#{5QvtxU@cL@CE$lx}7f>omW!67p{fHqQiu6B!UThewHtfF@Fy3COqR+nvc=M+0 z3fI3FG423i;q5;U^S8mn)8s$k(r+e|e#<`^!37wRNhAmndmKbhd!qjtJ>);e=k-wpk={RNf6&WkOXk1T&xu-$ z{qsLIh1OTYimbo3v!5*BvdF(7JoV9$LBYR^mn$i*;jq7RRuh&}A>Y3XAw62~jLSc` z^Z9C3ySTsL$q?K?LdU-jC3VC~S>HdNHoY(7X0t#3T+mK?yWziZ8da<8Zs@=0eAfip uFu6Yk99nc}d0g?zH0{{RZ0{{Rp0{{TE7{1`OLDoMuM2PLzVC+9ISrjMy*Rj82In;D- zG{QeoCnA=sa{52(yLnQ;gz7)i9^88${Ovyz<*tdk=+M6&@*W4OlXbrmt~3s4_SrwN z%Ob0!XXC%q9BypN=K{cW!$kqNU)euVqBN(2!|p%a+;)CvL##jDg*SVw5#zrf!euHT zD%HQWCq*~sIRL<8hItp0Q`Ns^da5d8*YUsgXSUOJW9GjK;%Cx0Jn@XP~g9^xlYXwy79l)o+}>WAm~5$`jT`gg1|q{|A%_X z=mNmG;i8l$-0weLN+aH8bFx2C4dOZHrszN0V9V$g)Xcv*miQFVtoc8x$d?e^aM-^I zr`)|V76rh<4)3A3b?m<`d`uLE5#T?!=-q(wT*N$@Et*asSaE-q<+|&x2J(<7L)wcGRG~qwcK#c-4M8`jue>B6KCD6YHy{&M83h=*i ze4J*A!u3Bwh9Rt@wEsU{Q5KI#==(p#Wjf*BUgkfdK!S!L&HBG);$l+b!Ti7Y+NCJ2 zjN!j0ded8F2&}&?mhGV*h`7IU1aU0f%*a3MC`2`{_U1#(JRB-2Xq5 z&K=1$68%3(cUs1_tkgef^6|e%;}hE%Psl$G@no``)3CqQgROx8 diff --git a/fixture/13/3/13 b/fixture/13/3/13 deleted file mode 100644 index e90f904990f4394c5b17c3a7b54513be30e71ebe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{RAgY;V|5O%+9ViuqxeD1&Yd>n|9IsLz}>kZCc zEa<;^Ws5PGr|iFCp403!o7O+sX@OT7rq@49k6+h<0n|UATP;HZS@yq(7sU#;>G8jU z>(^SyyW_u;6=+D!vX?*Y==ADla@D_uR$tMA;nP1>XOo5vH}5};HroL=`tQG!zm8eD z!tTGn{<++FQ^7x!$Tzh0|FplLx;9~=$>%@z=vp7N2!B5)Qv%8?+510vbEaSKXZgQN z{^EBI9oawO*U8rQ9Q;3J2*BoH$>=}Ar<9Qi{qeuRc;szjt@ytZv}+Q| zkI_E_bDAnxM8iK~djFa7xAQ+pD7Ap%jNw1tY=#J)2j9Q3Zfwlfui?K}obR0l=JdY^ z5f`}vkH9}>4WzoBc=o?d1Je_v9_>HxevSuR+TuU4reF81*5N-n#C|U{CiTBDx2`V$ zGQ+>FMp7SQ>e#=mzY^<&QMx|}aZ=cm>EJ)KBiSoCo@8uP#Hv)H>7L+8J_N;bX!cjLcsQblb=Wd^{gnMI-C zW4AxrL-N>sEA79)buDk{@995l%YgvX9_~NAvD=~usm(tt`r+m#9M3-}rWf>l@Welt zGh_+2Y39HD3wx@otmD7_5FQ2aEA>BBXuJ>^hvz>XMM~`sVah+^vjj7|Gvhx^+}#{k uMz6n%(BSe_tj9kMpiERq2ID{dNbrgvm;k`Fx>^et|Li{pZgWHGvg$v9x2!S% diff --git a/fixture/13/3/14 b/fixture/13/3/14 deleted file mode 100644 index 9afd0195841191c9b853726f804ca142cf66e72f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{T^fl>Qq`{=(SJ3I>5=kq^>aHxn6y#c^V2os6O zmexPtJ^Ha4a-=_?7dgaV&eOlK{H$09)ssKZu}qG-rQ$yg|CouJ^pMv1Mojq z&4tzLLE66(rC5ARKkz@I@O)yjm*hWINN6{wmfOFO<0iYfPPD)6eogts5|zIkPQevE zv-v+JozN6SKk~mo21&eP3EaPI7wT%HQ2D>3;g;-l_JRoTA`M&SpbECxU)%v<1f z!u!8;b`hdAoa8@`yQsWEYXd;oJs5HCL-9Ws{n4_`2&g~Zjmx&E!}7nQ238GQ;PgL8 zsUH5k{_Q_C#ofl9dHBD$A`RGlf7HK@uyQIXO!L1s=)ww2;0C}Ol;yv2tMk9_9xAL5 z{<=TlNb-gGk>Wp9I0Wc0v(&#TW>)BG{PjQk>Sp|vcPB3H3i)kMg3a zx$Zy9_E01@Wz|2%3~23SQTIRHp;|2lBHKSyv~O28tG2)Px(;^DsP{j19}DAK4Cg-+ zz%McZ)$~6Xa|jP`0Q0|e)?gD-hwHyi>BO%tDC)o8+Ev+fM8`jvde$O|F#SKg-t}4_ zIr6_BzFP>u`0PJS=!=86J@db7q!DHr)_>d*#1(M}FOLvfjUaS-2-F!s5Rv(-Yf zu3(~&}svEzgdAmO-K$YY@ uN7cVXWc>*f!qq>dzDJ7lIPkyRRYX9aJ?B57MfIH<$e_O@EtF43b?CpqC9WC( diff --git a/fixture/13/3/15 b/fixture/13/3/15 deleted file mode 100644 index b514994a7a297bfd209bce9ea9684f9658e2f0d7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{R#pO-9W{op@D%>LjRedxat-c8pA6X(BDuo)n3 zIP<^%y#`+2LHoaNx6P*A7~wxnu(_mNrt&{~utqwd*4)2O$VoXCjp@IzR(a1T|NOtP z1{dT7V*$XxK-+3yLFYg0d5koPmH)rSG^k09P4d64dBhyOBho)c9&#TJ_qIQWCX#{d z$J9T0|8Gx`SA0KfJO{hu*VeyH5I&=_H1fYATg@@&O60#_Osc8Ef6~9E`mYPuKK{SD zBsx{9De1p`hIy53($~Msn9tDv3+BHk3CPK5?A55X5~M!<(*5~Q|UjMQgow$ zF8DvBC4)L%Ao#yL@tqHS?b^Td3^-^ir1L*B@<_;t?)$%IHMYam(el4v^b<)NwBx_8 z5)E+1S^z+^1fMluFYmwFQ8rLiSMfhA2ISWOk(|H1d6+8TSMtBBlejYqW(Gi0>442S z>c~I6-uU9O6WBjPYT%Nu8sR^omxv_AKHWbBDo{LfHvvH9_DtJ3ll(uR4)5ru&+Wg9 zutq;wCICQBviLZCXXd|6zce?PpVU8>I0F!CIrqOF`(>N(mGD0oXz6>-tJA-v)|bAdmUHcf|4`-{IQ^ij@~zU99?DD@r6 z7V*D0i=K|!HP^oisZ%524f8+7zmRK_SoXilu=H+TbM-%ZmYH9eTEai5I8_hV0qH*+ zA4M`isp>!PLL`L%we3FyKnWmm@RmQw;uz7l%&I>S-_@aToa{fbdmXYQsqnvPuTGPI z9P__1ug5oTz1_dWM~*rZaMC{pBn@NZyZ%2bz%K4Ek*GheTH*Ci002NS;9(@8e&9d1 z&r&sr1>L{mpt`4ITJk?Kzj+8D7SF$|Ltx9$6VN|iF)rZ7VfDY)z5t(mBly2^E5gl- ux8Xkz?G3TtsPR8|vqe92A^$%#U%Lol=IOuL!>ILsYxh4ht^0WS2lYR(1f`Dv diff --git a/fixture/13/3/16 b/fixture/13/3/16 deleted file mode 100644 index 783084f0458cb198f5cc1b70dbe8246a6827ad1b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{S7rzBW5Z{NRAizAtuHTFOM!Vg0QH|{?e^o7G= zM)E%<$;t`ARsldc>hhRRobf-Z^WzI%7y-aGpi;-wq|`s2tY&kb5&6Gqn`lM1V#7bJ z=<(YA_qRVJipK7st>VAaBvbwmaqz!Bgg2Xd_~5@Nqj=c~l-0ks#sO2b+rYoQQom}q z{klIqCw1^&y5c_+cw%PT0Mx&7vlOFndGtT6y4h#{lcT?5GUvo?mF+*8*gPDaeAB-V zOaRLr^94X5IhX&Z=iNW~mc89BD*8W8wP@7EH0Hm7H*PZ=p4`713l8@MBhbI8)q^jz z$OAy9GZLN&w4z{UgP0hvUEI z1+|m+NZ&u5IFvD?0P#Obu~GH@H|amueT|f7t=_+u-L$GgoA1A(*&Bb96A-}NL!g&O zLGHh-c`;=T(bvBy%dZ1?IzEBin5AI@P-ft4q?3h5*3kx@eMk(!#%K)3{1b>(D=g#GP|hWaz)q3)jUgAn?EYq*_<3 up8da(K}{-kwc@|^3Ft~Dz4kvau(iUfwfDc$YLFYDKj}Z-o~mA(BkVsMIIm3r diff --git a/fixture/13/3/17 b/fixture/13/3/17 deleted file mode 100644 index 730569fccda3339f7ee9c2963ad456ccb260151f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{SfT;u6=p94S$o{Uo(o$x;?`AB-|i|#);gIf<; zh}S>o4Jt<07fe4eGR^5XMe9G?5{xgd`tv_@R7|ka(#1cG5m7E{~Sk@G*a!VPjZNwz;+*;2;* zHpo8&H@q%|^v}P4Agt84Oy55$`SwpQ8sNW;2kWv-p7X!Q;w??vlJ(f7Z=INZ@-FX=z>#v?@XCF#FZ zf}HnC>hV9FBG$*C=gB`?5&VW#qtZXU#Pgo1xadE=47Da|1lT`>A2|Zae)~T?f~g4W z_4q&DC8oR7dGo(>IyVpG*~P!u%R$WK#OgmvzTU6RDek{D(Es4Z7Wuy~!+?#LQ|P}) zzcb}}d%eG6JC;)?LJ+`BUhVXK((%6$lK*Albo{@*cr69RSn@wdgDkJC&h0<%`svI3 z)6qY%Adt{tnz=tXb-ck2Z{a`fpQ@u=^6$Ti9?ewM3-mv=D!x8G(6>K`Cle+4TW0ATk}7lYRZ;(_x!&|doHVhi=saS z_935VGUvY)f!>l`kl?@Z_z$vl>ES=$WX6#i`u4vkpGm)#3c){icnBSnsDPkLW+3+&K?JO3*)|ebrYBKJdRK+P$2`)5^cFd2%zV__IHeQMIbw ulhQxSIeSqg4%9yc%b-a5V&^~6>xQwfclAI016BZbGw?rXzwdG8d0g?zH0{{RZ0{{Rp0{{T?4PH2oAN0SnXmJZe`|>{%&jDenGwMHxx8`Bu zUeLc|ReY7`bDlpNTSg!1$*@1Q!#M3n1>CADMe{!%V59&p-ZrmVAg6O|diK{*#8qGgxN>}XgmiNDb zYb}HYQP;nGq`_4fz??r5;Db>B_WHkpF?*=a`rzv{K$=HWj<=y&Wnvh}~I z!MJHIyUxFb9Ux7w%!WUXJ~>$5@!&t#Z0TcEX6rv3%1kbLE8)NUneq-X#>qboZI^=z zT;e~=_VBriM904gyXyZu`P#o{zF%o`&E`L&C(gWNzsA31OUzJ1C-c9)v&%dC_w_$~ zluKN5Ve!8i!cDA{F}T0lNjxvcm()LL)1Ag4BHcg!BiiNww#7fxxy(U%i3h-;5?THI zdgi~u)i%p(_R>E~A;VFK?YqAhu{8mbU*JF8U3wr3SnxkSmWp%8HsC)BZpz0PROi1H zVFT)EJK?|Gi!nf98u~vYY^rn&?Z&@IWp%8pY`DK*HuG=iP~blnc;V})%kw{!bG;HM z*X+NGR#KK=z~8??F6 zIljMpBx<4_mCZkK4zdC+cICgtpp|oC>hr&a>U$}EFuT85mazZWJj_2wH8A(8m&L!a zG8v!D49Y)>+UP&Yv)XtLyXZf2nZ6R1Z|grD-ypuw74Sd&o&v@&(BD6fATQbq u&elJcy}-I&7XLp2a24fg6X`$vH0-FG3+umfeUCT_&;7r*Y|gnA-p;>9rK*qs diff --git a/fixture/13/3/19 b/fixture/13/3/19 deleted file mode 100644 index f9e07b2bf7f199ef1e0e725f2c72fe31d3a2f269..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{Run1%h~{o_BW(!V0OukJrkRP-$I0rbC3V@9oP zSLZ*L6D^0VC-OfRsglgadgDKacwQ?%o7z8vDz75&|G>Ww`CNajaNWP*kS+-_Bk4ca zj_Wwbs^UK-#V0w?@6^A_?Q_~YRs=w4l6U4JDEPm6E|nE}w#~mv!I3seujs$8c93sY zme@ZI-}8R-F8{yO4Zr2U19MM1PKx5Zf+|$2OdcUFq?B+jG zDg}^+3DrNu7Vc*2cJ4n%qjF6VS--!T?-%lmI>*1oohp}UKJh<;VMUYmM&v(aQM1nv zM#jGkjkh$Ziq}7YwB4Hhr^i3L08vq@=G{M1&d-mlPxL?S4+qsoWY)hgXkd6Ppya=~ z)drP4n}o*V%!@B~0O&1p#D-1om};ru@P z48K1u_kL2Cg519b$9pTD2+luzZ^O#l2K+yefypuB3H?9F)EvS(;;TO#gV&OUk?TLL zPquX=m&-rfMn++0H1WUBUKQB`REHt8a(h{LO;O{>tEn;R+^yWXChcRqZCjGy@NV^Wg+u^^e|LZ>K zU)sMmG^gY4G4?+XAIw50VduYmp<{9#+qJ)(Pg7Zm=$;`rN$hv+}qqm>Lo5!=7;^VEy5W~Dy}(flOI$Fe_<;-vck diff --git a/fixture/13/3/2 b/fixture/13/3/2 deleted file mode 100644 index 7339400331da9982b103de547d778a122cadf24e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{RE(uT>#LEyinAk^Y=6Y4*>pyf<7;Mc!vEo~e7 z`t!fX^+={mCj7r3iRr8J``$kaA2fm8M&Li%&|~5O4DUb0%oLR2`_w7jg;bvGKpxvZLAVWbZ!}L+UyiH1|Jfv>MxE+RMM; z!TQ}$3HCqU=o)ov0`@;FMm%KNsI@KKbBdh)+zvVt5bpyj{4i1rTK{qw(}w>q?vV!A&Nsq{g_C(*wrPM^)E)$hN92vB~Ww?UgZsF6h7Nu?^Yk9Pz(Q!L0+w1I@pojY1*WDCIwuCM(li_3Xd? za7Q^0rpmv>s9u~2uJu25M>eZ*p5VU)3@-D-)O|ner?yV754*pjRI*x*3CO>bdEO`% uvid&d0g?zH0{{RZ0{{Rp0{{S*NbhPt&E~%V!O29^bMn6$Q(AtEd)PlMqrfc8 zAMih{r*SPq|Kq=E3I=A*-^;&ALTS#bi|Rie7Z8V-r|mzGjdhRz~(>jLHnqPB=Ntxy$8+Yto}a>^!=$viP1lj69?ePTIWCVt;kKY zVCcWyqzVt1%g4W)w5C3QAlN^JWz3=4-u%C~;~=Rc*6hCrkaOgTHRr#lr`W%;%XcW%$s^LHD>3Kn35cR*?WUom3Wwk#F35>au9pgVj z(vr6rc>8d<@CRX1z%cP#gjjj zwSEri zj&s3iy7j;2WKq3HGvU8(njQ4Nw&1@b4Spdz-|W9=Rb#SVKlMKd1uQRBtMb2wPAu@C zD#E|nnGe2X5bVE-FpfZN;L5-F^5%91jqSh6^|7JUZL>e8nYHQJlGH!tRj@)d0g?zH0{{RZ0{{Rp0{{TP+$LPi2F^b^c*sHHFW|o~NX@7`{qDcyPY*$P z?bE*+P;?ArF4RAz3|3XD1@J$(puvV*K+wNV->jd-AGE*Wq=V(jx#>SR<>jrYB(gvB zp9@V_YuG
3H1^|L=j8EvT?^T|IyJi;cgL-D_$kbykSP`^Kh>!Ue_*!Dl1NS)t; zp~XLRc!qQquIj&byyo@YpZ7nvpo$ejyWYPI3n~(t+y1`^GT}RhW!FDr#Zk6Q#GpT?l?r?a z!}!0nW?k7rU*kWjIB>2f_~1Wd`OQvFb@#s=Xu~y{I{rTfgSSpE3FtoqxL_XpiGF$_T#_U95^a#s_H*wCkYXpO3uIM`VJGZ#s5dW5(&E~(}Xo9d0INU$IRM{Dncm%-I9WBo46XCz7nL1p| zF4jK)KE>6o^x?lV8{3Pd#Pq+;4T}3*0QA2IboGum%lW?q?bCGALFB(b4=`L7=kst>VLUNW?!E?FU=1)9*iHC=k*2hweZ8;LX%Q#?e3fz70oc(bzv5 zT{9QIX7fJ2T@QrPojXbM1Lq-q^Pr2s%S^D_xmQ-;4;>a<+Lk^DbU zf5Q=B9qT{CcTsWcl+8cKp=qYSjQYPt$g;@H#`eEobNqRV=5ar$9qQE$wBNto81*3l u)6736H(n7s_4PkhSd0g?zH0{{RZ0{{Rp0{{T=D(AMHM)JQ;E-vA{Y}h~0hKs#t5bD1(uE8n* zisZli?}~t)&hI~r26WF=3*|r5oZAhm%h$g+fwsnH0Q$djsoxuOhUdR<3qNGQ68}GS z=@lyG?e0I=HyBO_$M!$8f0UfHV$eT(O-dY-ch3Yw&cI6D$;zP z!sEZjmm3^ORPw)Gye!KN%j`dRU`k2%$KSs*-X5qu$nifiN~7Z9@8CZ_!TCDQQeFz&z3noVR9`RKoQkb7pz)#g9W$APxIkO9CEY+h`4?2SJx18yE= z<>Ei!PJk+4`Sd@~86KNwKlne8e&t0tt-!xL?~x3)?@fMVfOqA?QCL_PFf; zBj-QnR$n%}Ve`KV>WW8;|L;H6d%DYT@bACKn9r1m44c0&?_KTCd0g?zH0{{RZ0{{Rp0{{REO@|;~rQ$z$b>?jpQT4wc@lO)#Pp`i$%o%}q z|L4D;z;}`;PW8VJCgvGF*!@3m1wL4H5A;9nM*y-_BjdlB>2b4`_FUcJBHG6%5kI@Ledu+V5Hx8*->nWS{* z%F#be8p=>|P4T~|#7D`@ywpEZeqU0}ROCMrMM@cOSlhqY8U%3hiv7PFjiY7q@$x_R z)V4*)o3=mLay1mjE(XBtlyBMKj{rcmMuChAq~5>j<+zp}g9AW_fzIB%CfGj`PybhF z@yWj+8&50MftzVJ;hY$`T9TQy@+SsYuLZEItoFrLE1l!;z(39cKbhw z$~uOY8{0oE`~^U_*a1MK&`h-5ZoxnIx=O40sOG;rrXv6D(*nSI({WuZJkvks#@}l* zUtg9qhl^c;_eg zf7L$r`k@UX?TNi1B`rSX)oKF)bGV8zIV(COYNWDLj zh8|gm=GQ;jzAKJQ`|dx~!snLk^8i4u1?2$WT+TnkHStg%IorRz;_(*ri26UoK$+qE z#PB~W(Ze>Ajm1Bmv(QO2Mb1Cvv_m|=#KfoAp6v+Fd0g?zH0{{RZ0{{Rp0{{R4#a$#AYvjKvQWNrTJ?1}?pMXtgY}`Kufcnv) zD&fC*>%-f#And>JSYSHsY~nwXCKiXODdxYPB?b^;zVScNe!oeaqUyhhEP%ixY}P;c zc=k~o`Pjd62{YfVXLmnKcvO_PYUaNJUqB2PmoOJ)huBg9j#<>m$S&~1ypIpyLQ}Mr) z(I1axdiOtAjk_+LvIjton2aiMB-OumqTr?wY^Xm`@gUuyJ@7v*h1#phCh$M{s&-S{ z(#Aja8aB_yJnz41OJ}$(7yUnkQmN{z%;i6$N+eVAI?O*Z)bzhZJB@x zGU-1SP!OV*$mhQ}KBcHb diff --git a/fixture/13/3/25 b/fixture/13/3/25 deleted file mode 100644 index 2d77f6ce937191e900b2b2310674ebc2fc736a2b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{St$Gttm3;n<7w@M`<1=K%B`wT^4M)kkGrEMEW zBJ{sd$V%09SMtA~h=V*0yXe0>+=g!;tLi@rnY)Gf0L?#YJ!l$1F7Cey3eRP3mEyn8 zGQKKe@#`f8o@vPantA?T=Kt(;F7`9X4=1LA{InN397ciQ>P%CLNC+Kj^=%xUR$ux%$6X+jZf4x7xq0CCr{Zw(mdMGIm!81LMDN&asPc z5ZJ#B!asYO?e#w>0UHI@@#nv-b6-e7&f`CF-{&i(QTxB5Fdnd?>h8a~3Vn*TQOm!} z?}`JCSo1%wnqwb_Gwr_wqAGs=mF>SpMa&>MZqYw<7gvsHwA(-9yq7M|k?_C8U!JgD zv*5p%ZP#}8V(~vKJaRQr;qt%GQlmh@+SNZk@h)VdkKn&Y)mxjpE$lyGMErTX>eJFahcRQKt#74^UV)r`C(Rw8>7EytpdP-ssslR#pb`?Pp=CDaosDQ3>-1msA=|%h^kg#!LixY9%9Z>E*zP}c%ldP?3irRyInHUS+u`gYt4DM z;PAhU=}pC`X7E3)+)w>m^w&S2k+@7%%;Y~HSpD-TwBf%e{6G7B)#pFP!Fs+X-|@d4 zeZH6FvLcwRcYywdOwxYtwrhP2Rt&D#D6kO7Xugsfn}a uT=T!+q7KgsQRYA2BK$){S_Ht++9nn^E%3hd0g?zH0{{RZ0{{Rp0{{TQ(5i+1@{sP$WGxZRJ08?}orXAMC%Z5i$Q`Dzv}a zyl`Z2o`64-WmKJ*1hhYae4^1YV(7oYhJeX=HTpjm9H}%VvE@HZETtQswX#1(QD?wG zn&7|I$i=>wtm?lBHEa7EciBIxVY~iEQr^EmJe0K{9>G8FQac=!WB$MHD0?ds^x(fa zYJoN{?YqC@WkOI<`0hX4d_TRdgYCazqcRiu#rMC|S=wAwcHBQN-R7eJX0X3Eq9~bY zjr+eN#*9@e!TP@t%YGL%v*f=q>=pxfaJ9b|Ya|PUI`%(A1jNBuO!2=kWPXRKJ>fsh zaS#@zU;n?Mg8H%h8P`9tuPnCs8u&lK+1pWJBf>xR^)|JdVdcLm*Ui*-xbr{#isy9{ zRMtN?uwwH3sKY--@Y_|QNbtX7k1CS^NY_7#_&(Y(>E*wH7zN0x?$bXIcREBL+NeLr zY+IL#Sn)rvirV+1ZsNa^gs&;f9mBtciHBkRgo^2EPi_M`)Rc;!D#?^3SiU;RJ%$?*>)+xovV*4})CJ@mhK1BPkj8^XVD z8S+W!YNi{rn$5cX)zCh9-4GkIt^ z)3-mMn?3w9p5;IGC!riKLG!HnL z@o*;0N8!ITk@bm8v)eyumVi=VH{(Ccx8|Y^fdxRE znxOt{KK{SKbyXtrlJ!5fzbWp^gz3Ms+AqK4hWfulg;nYb2>ic0erK3o)cil~R~~q0 u73@ET0EK*Jap}KNjy`O`9_YXSVJvV87py-wBvgPPfa^aFT(#{5(bB)QLasLe diff --git a/fixture/13/3/27 b/fixture/13/3/27 deleted file mode 100644 index b9084fb29ca81e59d1406a85fb89a247721ada0c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{T+VTr^fC+WXeu4kT58}W5a7N3kYsWu(=GLWCJJLT^FWWuFL;F7!8Ts%@ddELht6~tCVfMe?trdvs z5757IOzKNM$MnAqjk$o`?8d*d@}&XhiuOM_qLlDEWZgf_7dS_&5a+-C3{RnS^x{8k zxR7g?_07z*TSKU7$!cr3Yw&*{CfdwRdd;33tUnmvh z)zv>O6g%hkjrBhrc5@)PDdaz;7Zvz3|Ls4UyECb!pz%K*pxOaSE8jnMK)jNaGVwpf z)wAr2TIRn*qFAlAX#>C^K4}u%(&Rt5`cW5O{MSFWW6z;F0_?wpfF4Q#GxWbEGr5BF zxxhcQ_h?QA@#a7IeX(*Hqu@Wnv#6kVQ1HJfYgKv3bLqbr3p9;vY~?@ygay)$o3cMK z^N90f_V7RAs4~iYu++b!AwlXds^>qsfGuSEOr*ah16vO;A?Lq}Jn4#pv+X}{&D>k`?5dD5&QPvx5hu`o&2#q%f7#f&pjuhwC_LDKf;6EQsO@{I1=sn1l>QC z zAgR9vJ0ujPSmZxiG;^g!)9^oY3dYr^i|jw(fB`)|m*PL+9y2RQ0RTYyUfkLUD9$67xUuQXUnyR`ovy1NGfdj@3Uvwe~od0g?zH0{{RZ0{{Rp0{{S6VUbpSTmirbai#92lJ38>2&@z~&g8#*<$`J; zIQ74Ce*oYkeDJ?z&50H6Fv>q=p3X~h9OXYZx$D0#>LT{(P0c@oLx%mdA=^JWzJr&uvd%y5fyrYT4az^$XY3rNulc`l65Ul9 zvGhL$Zq#nf*5N;e(~T41Z0^5fAtB3zj z{!Tu?AOF7`RU|cmp7*~Lb0&E>WCFm!`qI$Ba+nCa_(qG3 zRn$L$Q8j4yOQgR6Rg*R@=k>pe@pr{AT+~0;7bNi#bnrj*l9=I+obkV=g+mRyQO&<5 zER!ElIom($#&mk7GWb7T4IR*biSoaXC{KCzW9dJQE4L5(<<`H@L8<3uj^)3+0)*aD zVC}!7JPubh`sBY!wP7V(@!vlW&o}d%C(XZ;H0M8%xcSEK_58mfy**QRGuc1w&J8m{xcfh%q_vCy diff --git a/fixture/13/3/29 b/fixture/13/3/29 deleted file mode 100644 index dd0efb2d26c13863cf40607fd0846070a2aa78dc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{S}85BNe9=%OUi?2jNUD(g z4dy>LRoUSXXz)LZNu4E+9QnV)xj^XU>g~T-mLnE2;PyWfo+3UlyY4@a+k7Fx^&-*_(2`L&z7P`MB$i~q_VCFv`l_EtAiRC}e0Uns9 zPUXLBLU1jC*5bbrVi(jpv*&}%GiSNHU z|1=9^5b;0w2p^pC9MHe+-y=I?h0ebs*iSRg=;}Xwpk#9H-ul0oeIDHHY59xsR3sH_1Q3fWp~^*x$c5C&Hi5Mf5+iQWE{H?ZLmIqXSwJ zzwkeC|AT`!!}?mf1i!y!g_|S8C)vL%ul;1V$jracCUD9Oeeyr>JJkG_LbSh?0oJ$Q zK;yq*R-oL8$l}nBKpF z&MxZTBgQ`Eo5a8CHZ89_H~c@k;Q+L>lkC6jgz`qvJodjE z2o!TZ_qo4n=s?QtI`Tgvc7Tu6hTgxVSy`s5y5qmnx?9h6X7E4Jkd0g?zH0{{RZ0{{Rp0{{RU(O3pim-Rme534o@>*&8_smkzybNfHWLUop| z;eOZoN`H4aYx!(BZTB$+*9-hIyim67j#*1F)F1-M+uI z`~2D{l<>cxg8AH(?#I8m5Je|YCYUr*vh}D40_uc zT-v|5RpSal#_&IaqehYS7`MNyAp&s$zWF~abJBhII@G^kqs#Z-8PLBBxCbkBvh2TU z$pt@DiP*pNI>sWjy!*dU?ego{2?4-LIvV*>{o_B#>uD!b?fbu*=aRQp``JG@vpV<@ z1mM4z!y*u$5bi%S_ltuNll#A~Bl6?kB;!9-^u7b{IMqKAvGzcBGuJ=G^weN|`|&?i z#(GXw{;@wqop6mTl>@-`mU!GRzT7{qIUF8@b<{tf#<=a@PtU)Zcci?{@9Mu93NTzv z8P~t7A_aXmRQErR>v)smZ|%QZcB-?v*!#bB2Im6-e)qo?@&j~E5~9D_Z^y&UyZS%o z8GpuzqX9s(Lw!%O&g4JiP*UezX79i4x5{k50PMdsMRft(2JAl`g5L9BPW3CuOfs^32WwXR#M6v03J2#cGH^3=buw=|t& zAY#AA(H1p^x%$7ur|Wj?;@LlJ-G$>vd-*@8vJ1nX-sZo?llM4eg6h8_qd0g?zH0{{RZ0{{Rp0{{THZ0^`jKLWty^k#WN;*USXTkfcFssF!Pz2uV% zNb|qO!d0nO7~sDV@>{9gNc6vmFcBroz5zft$wL!ert3e$iW=X-KlMLY0aYy!>(jsSNy?3hdRPw(8X>kfH-}ApGQL{BA?%O}2T>6oc z5AZ*3PaFhc>+C-TaqeVrSoS~6o^BG1`1?PohSEJ&r|v(7`MATf7vR5MPx}#$vBf`k zKos6X4g$amL<@EfXXHQGtjF3R8^b@N6YbXxhWS4Yi@RKz$>+Z*eU+pdf8ali%|?N; z4e-C4kEw-I3R@}di-10N;Uhh8^N4;gl`^vvuCvp(c z9|J(YjLZww?X$nm{uL&R-n>6hUpup`=HNda(Fiaf$?89bMQ>vw_Om}VMM%d&G~ho0 zP1wsGX2(Ad&=w~q>gd0clC3{E#qhryU7n0QVCFwKR1ON1M)*HdScYX{82Y~}cR8;2 z^Y*{61gp@88~48pkJ0h_&FDX;Z(iuEdFwyM@W1pcKJULvv$arjr2#+=UDBpXqu9Uv z1Q?lS`1U_-LQme1-|0VFXPybesP4ZEHWf4s!P>u{2VY><>g&I~p{es!g7v?R4sHX- zG|<0Tm0DVNE%U#e+W*w7WX8XK-?jc@?fyTeguV7eOXff5?_>`G)ZxFePn+XwaOA)J z{xE^`tJ%M%4j=cekpMuB2whv|Y4N`RYU0$<`&_@&zKoKA$lX7VkU&kOa`QiYmi%>k uO4Yv~m)Jo)&hEccipmKthtfaiY#i3y2=zZkh(3qwcG15*)Q-J^-0Ht+Ca;?S diff --git a/fixture/13/3/31 b/fixture/13/3/31 deleted file mode 100644 index 947c1305c69d365026cacfc4940a7eb4cc08a536..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{Rg(;A+HHoQMTC$Mui`uV@6LvU~d{mZ}qigI-^ z+44Ubpzp=L{_8(#mA3yRYQn!>e@ptDjQT%;@e%Xbj{ZNiSV_Y%K;=Iz?IF$md-uPV z3*So~6#l=Ij474IBKAKCEp?JUQ_nxYgRR>-EBZfPg({dnYxTe8{a?qop~SziXJn0# zckn+YV);pHTIavvEIUGvJn%moc}S*uZoj|phu_xtyPQ8g-Bt`K@bbUk802@g7SKOi zry^0+iUB~a9{Ffl)!08qviT&=Rq4N1OKNB%RQ5lTxlKUJUg*F2KGKY%iTgh*H3Ew} zX7s;DLaO!+bkRScE61kBYxqBeeH(cDyz9R;S%|LsNwq(A#eTU#gz`U;+FX=Q3j)Az zdU_lPsqH@^@bJ1>g0{cG9KM7;68AqsX(W>t-v z3*x^!j{xc+Zpyz^o&~@){oTKU1E3%+5CFhe6Oe@uPq@G6MP0jttIa==yGI!-yvV;_ ziT2uLThc$!RdGTnzqLPWK^aA*9QMEXD{bQ|LDRoi%e0D%D7`8KjS~8 zB}nqe?CU?vbRq@b4(C7AJU^~{HeJ8bc-YLGwBbJ`F`7!y_4vOWvcQLcg7H73fH;-9 z=-Izzg14RBg#SO*n5< z+SR{sCkJ+zUDZE9$TjACP~bnggR<{sSLZ*+GEBOcz4JdFskpF!8S}piR%qh)n3zAx zhXEc7HtWAh5n21(hS|TASf5u~Bd0g?zH0{{RZ0{{Rp0{{SHl})(wn&3YsYR|WF@8mzd{Ggth^x{8fMtohE zA?v@^jH$Sk?D9V#=zvvtwCO(vcg_5RY1+S(WQ7#3{@1@Eea@wn3gy3aM3~rPtMtFK zZ=72bP~|@jSlokDPU=7S+Qh{;qV2!BjMaLvwe!E}?D+m0`0c;A?1BOQIJ`gme!xzY z9QMBgO+b2r3-Uj`mJ!@q7V$qWFGap22-mq6|O($sn|jl#oa%J zu3hDe&Gf%`riEx{KlVRQ5#eFVFyFsee5|R9e89gdN)&|2dc@yQ}w@G1?MB>{qMhj5i$IKNdQ0~i6pszh~hsE+R}EeN0GlG zfi-OhB;mi4_->gdeAGY7O#I8ok=j4KxRz(A{>(p5tpI6u8u34bWtBBfz3aa@bbx~2 z0`R{a_a?7uu<$=-qZ6gb<^VtCIA&#;!}LGk+KA1nVu}v_)Hu1j(WGFDP8uUN#MCW54UE@Dt)FQ;E z+3>%v@gVPgOSC^_tJhZp69PcPfW)i}6Y4)ysa7?)1Gc{?JVV1f?$AFZJL({jfR(?T znl!9ksLemWDkle2iP^uHNhU$~tKUEVxE+g3_~1WPEA>}(Fnd43hN7Kz?ZH1YA+k9m z&(%LFWF^Hc-{8MPrteLw4&OgD+$vEG_vk+iP)BhWaQ8phzdYAAVAMaCWd`cR$Kk&y zC$*j!TDre5R{^b*N%Fs5Ffid0g?zH0{{RZ0{{Rp0{{R%y&N6KMd-gy=FG8>4)s6X2$&NSz1+V*o<1aK z`0PJdxpc~K!oR=PYrMkWGORx-AojFxB>BJQW1l%@TJpbmPQ(U>u=GE1_<1n5?!&(i zwhSQwKGQ$ZXvV|V=KMdbX|?+eR=~f5f6Pi>mB+ss$tbrVO!Geh%hgoNN{K&nRms5q zdelF|4t7TC z2;9G#dd%_cul&D=16zvgN;OT`BKPFy_B| zB1E@stm8jX-r=t1fBC=tp10%*E!w{jE^;%8c=*4MIdS-nndmk>%Mdd&11U3)ik>o$6A_rfkr)4)s5-I=OUlRNy~(CXAl9lhi->l*g5^=Iy^HGY1xqS=m4C z#L$S$sQ*6{q+|UrKK8$%ldyINUFSb@L1zF^X6rwad^0?{oBKbx))hcQIp;qdnbN|{ z-i1Hc<&tR;4BJ0yez{@OC(*yZ=t!y*g7ZH{ozUf;$oRi<#~%eqIp4phAKuH*jod$K zCx7W7yz)OMvaUUPI*h-)OqG}D2syx4n8eAmAYa@lX&iqyY#<|z^Q z+1tOnx}=fR(%Zj=rD%tIbo;+}Sm+73HPb(96f)Clm*qb+uc`fP`S(9?%2t+Vi0eOA zCI1&k=IK8j&)kXJ@zOu1tjOzWGyK1;#{~z~?a@Daz--*N#JoR=VaRt}dhkCWjM?+Z uDB?d+$!f8xpTNKK-|}sjRR2Go&XXsOChI@^mO+d0g?zH0{{RZ0{{Rp0{{S9GLSV!Q?Ebo$1*>Q=8!*=qC{Mx|NMK$K=0ANJPw}<-<) z3iUr-zn0)EGw{FouXF8j1N=V~c|E+hz4O02n7X2EjP5_%@rBiJbKbwruW`bWChflm zzVDat5%oVjRBU{|?X*8|N>DKCL#97Ucx{24yV1W`=X+jPo8rHgX2_x9qQ<`+LLb1e z&seb&DSk9pItTk1c*1Z@4%>XE-}xn&nyJJr8A5f@}UH;cb*Vc{o> zhV(!Fj&??3QqR9@z(c3cn6f{NPG^FJqt#^yhq|I1ag zed#}%M+VNB8}2{cC)XU|dj3CTIi<4`-QmC9q&s31Y;FjK>9ygqcqiHU*A7)Q^RgAX+l4LAKyRDck#dW+gN<% zxa&V9M!DY-2*kfx5X`yAtH#1#zFWbMxGqzM2HuFD_VbRTMx!6AzEwG1&!0$iM zWvas`vDiP$7NiARobA8t*K7({V%NV7OAknueDuHFUy!zWqQ^fwha24TX7|7J|Gg{T u&eA`mU0vtT^bEic5`3ii>ghkpcLp4xsPVs*_a7yDhVH+Xa#X7@^YuS;xwqv2 diff --git a/fixture/13/3/35 b/fixture/13/3/35 deleted file mode 100644 index 18b648803de88306b1b472fc882230d093f678e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{RpFCb!7cho=b*;kk?73)7T9^(z#74W~({$S|Y z$L>GN`?RfTn8Cm5pws-C9KJs+&9IF@*!Mq7OMf0XoASRd55zr}H|;-v|6OvAs@Olx z!m`iaG^l{_FY3P+qIG6k{qjG$_KZY%eYZc98IPI0 zspY@H?o)(o0QbN2?$yVPqTRn)aN#^S%eye5WuhUvdZYELjT zC-*-e(SC;tw%fn-Q)blPI`_Y`i+LRMugE{DM;%dqpzJ?uPqs8xk?y}B>`5B+htogtD2$rFkmx^%OjOLNM7ckx8!qx> z#Ph#a0*a`>67#I*=EI2U8sPSn2~6{O5-y4b%Kx5~zKHu}HK(Vngi zX8OND+?$D;ZtK5o!v^E;dfdO747*ip4Vu3+X&i2o73M!kwZIdsMa(}&XX+GNo#sCr zOc4}#rQ^T+5Bbh>AF{uoGN-j^P1nEdBTemmMD)K!UQLgUmH)r~7tao!Y308mC8jY6 z*#p4u%A9^fAo9O{N7p$Aa^XK#cfFsD563@BiT*3`>Xkp=h)#)b9xQ(&ayyl*L=$)9JtWK=V$^!O=fYpmL#lr^7#qUZUgxkjg)th<|6qzu3R8BV`c9 zZ`41EXNbnUaM?d{_vPdBh>jChI?yw!=SA#`C|eCy!}D6YRg?8MuGg_|8As zt^=fAYNd0g?zH0{{RZ0{{Rp0{{Rs=gTNQNcg`?V-30h$n?M5%30AU75u;RY-(g3 z6w<%lx98*BN6kO~0pm6eu=Kx_Qx~GOjLg4KCM*{6FU&t7iVukbUgkf+Akv#c`@=sD zK9#ddCDlI$ncEC~C!@a-Z*jpVI`F?U9bXC8bm_kz^#zScW zg7?41z|zHlckVwF%kfbcTKB&@f&_rvj_5xO3c+-&8}C0!cC2cMZt_32eGa(TsqnuN z93Lna2gN_x5+z`GIOD&v1hs`AU+lldJjCQIbf!ORE)(%+3+g|X2t15;3**20(b`ob z5!FBHu+pDN4DG*?zCr*XvGBhh-1&9{!_>b#9#(TB?a;p_J#=q(UgbYn)ECk|2K2vf zAN*nBL;*lM6**KNt^B_fk?)L4G0i_Ia4J-;KGMHJjL`x%zvRD0BzvW|YR11gk>-|V zg5^Jg+$R_Rdg?#i5u$n^*~`B+Xj$F^Jk>vA*+-+`NcF$%EHPjtmdC%i?iS2zJ=s6v z@-Ok4Lju6TFJm{{wzNOYxg$TohsD3od|PTLQS?6#A0?y{WW_%Ypw|w9wcbB1eEV>4 zV9~$Y7FW8xIpjaXA}Lfw{^36~sdCZX=k&kRn(9*t=hHvCJSXOK>heE`)fU${d+$GM z?=bU+2b@1|&iw+9mefBp%A=9(M(sZghKBj`UX zx%N=O%Je^#%~;o3+o(TEd{Cv|mHNLQG&6GTfkDT@<{`9{KzN3Gc36{UMwR;YLhv~oSnvrCgiS<8i zv@$Qxpy9s@2zrVEbnw4OWf}F>zxqE40}^a+6W%|)23p@yyahmXQhkNY!T!I0S-*o4 un94utERo4Z6yHCT2xvc=eVji)PXfV4MvA diff --git a/fixture/13/3/37 b/fixture/13/3/37 deleted file mode 100644 index ca46ea9cedc356fdea4851707c594b6126a5f689..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{T3Xmouc2m8N=kzHNg(d)mmkjtPRPVn#n{TD+TLvi(IGV{Nz^+EomkNm$wZ4Ugw{WH*k5KE z$oIdRx}A~i&dxugq2)LB{_($9TL_)Jt@ytgUUl$AXn(&$la)=t%)~$5B*$1xKe@lz z+#&jH3fwV|XA||SEzv({CG<2$ z^zc8%X}eYWw9-G8j@tj>_v1fqSOe@A@q|CD6yFsWHQzr@{)6mpits-s+tyGg5&l2Z z9atw;U-Lf&RV}9xG_1c|WaV{@Q1UNiGoKBcTZsWgHfVd`%0^&boPXFQ~ z5Awf&#|O2+4&py1GFf;N?G?lb^7k8QniN%~BBWJLW%1c{d0g?zH0{{RZ0{{Rp0{{RLVsI5zJq5sXCNQuu=FUIqvVA7nLhe5`G=lt7 z`o6#BBHKSkb=beLQA>%p-qJtDB$H=Vm-RoCd;lKi-P}K@tAV^Ms^!0kOQR^lc=JC= zj{T$>mi9j!vDA2^)#<-A{9cB3V+KICu=kB&W<|eXVSNCBS?)h<@6yX&N9R8UmqU>U zR_4EDG5%o9<=nr&$+L}2Rk1%80>&@J>;1p+HCeoJMe08=ns?~=RQ|t#>;-V`364Jm z6_TT^e&xSz*`%(Th2lS?d+4Vnq5;5U_LzTOnEJm)NO0z)7~{VyG8mFxn&3ZEyI|;2 zUCcj_iPL@b&G0{Kkxq+HpSwR{iwa!oDw4m3v5ECBCGo$#b=Wgzm(0JHwFmgx`;)&6 zYHB(oZ~(w?$qh!WeC)p!&N7vtr`<)S~$)VD~dSMWcm7?Uuz)AGMN zTF4I{AJRV>QH>j#gyz4R*tb)`z~{e6E=iNE)8{{cNcusY)xN)-=P&3bEB3$c)}nfM z7`ZgK;Z%hCk;8~49&QyX9B+3&wa1lvvhT>wA_a&N z7j-{h9LT?Z**||tkiEY^hf5p52GPG?gjh6{KIOk|<=nXW=jT69u)i9RD(*j6>4^2T z$nZbg=H*y@Ak{y5`JLMwdgDK+d{{RU4A#FNOk zH{RY}tjWJORUb>ohxET1OpBrO6Y{^3lM4KmH`u=$al1Lwrt3e(iNjQ)Wz|3RrvPaI zqU*ooxTWxwzo$RWi?zcaLiN9la}_K3q3gd_G4or?gZjU?Bo;6ha@#))l&++M8PdN6 z&$YupFvP#8vF>W6v;HkO-`zhyjWC2f!v8-S06-LaZSTL;k%1c^ u0{Oo{0Xl0PAoRcBf(F%;-q=6)gfSP;@cTc)Zj%@XN&7z;Y&#`*3F|**y{O>; diff --git a/fixture/13/3/39 b/fixture/13/3/39 deleted file mode 100644 index ab5c72a814d0840af0f40d5b4c36076c7bc66dea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{SC@~C$*BkVtkEy3PQyWKx6N26E-H0MA4c2X|I zaMM4WovZMwm+8N1he`Q8YWqJAaUZGeUG+a_$nD4(-RQq2e~GtOz~n#K=a`2-VfR0h zLp{Bx%i2FDM@K*qnDZJp6 zM!3HmmcGc+8vj3HeljlF=RQH%6r{F(9GOf*<3dBE68eo`?73@EBq1#%pAnHFSU{8vbN$Nis z*mUleq|v|4o-0>~7Sg}m^8XY*ipsx*6M4_Sp5MP_Q5dm*4ZA<5Hmw7-2Y0{5q73mU zd-^{jz;lgqC*Z#_SR4NvlkdMlkMmXBspLPox*f?xPqIIXg&s?Y_T|4F6CzF*2kAfL ztmW4L+tI&XEUSfhWB5Of8m&r}XYRjQ@Lnzzmg2wbIYk9ESHM4_r(sKcx%EG02dJz8 z)#ksvnWIRn7}!7IQl!J2pYp$WRP|xcRqa2*?bB4f+vLB-i3NT9%;P^IH&QnCbLBs4 zq-wPe4*kEChN|W`%gR4T!^>=$F5tg=$}ilR=ifh*KUA&sz5GA8Aad0g?zH0{{RZ0{{Rp0{{TVcLZv#xb;8(`4F{7UhF^4S@*dhXYs!XphnHk z9?(C5_$?vSH2}bQjRR))ZRo$8W>g)$hw(o^f^!7eNs7NUhTJ7QCk9mHs~|5i4(-P~|^F2rBht)}X)S`gJ8zFx|g^s*Jb}Jn=sd zJU1d+$i_d*RYgZgwfjGGv{um80Q-AERa8bk@QsVTH(L) zXLCvSob$h$tyP3A)!RRY^TCy*x9UGSL1*$kll8w=m;*bEQ1U-EwTTQE^0~h}Yav(D z7xcdx3%@TbCF{S3%r|liFYdpU=MPgB8`S0kf zvgtn%VNlCPy!XF!%vb)gtm8kW5;;}_qvgNBDS18r&i=nr*(TPa8da%FCf{!~Pr`A77pDb740`1REV{p*^A-}P4e!5f4KRB(HS<4iv#V(U diff --git a/fixture/13/3/40 b/fixture/13/3/40 deleted file mode 100644 index 82cda9bc3ca57a2fdf334f25943c72e2fd4163bf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{SOD(zStjr70B(W}+0#MeJ%ytyyWGF`IAyX259z{4(l7XJF z(aS%~)?a0CsrkQ_DYK>$!{EQr$wHsFG5$Yo5~TZ{p4~s@f?kKYUY)=0p%JmDO!hzS zP?UdBQu@E*Vbixji`u^>SQj8IGXg-mEVN{Tm8rjo=E83-Ag#YAHV5X%-SR&Ov95G} z`tv_rA>d3R)AYZL19nu-W7|K9nlaP27yLic`K~M_l;pqLNpv^p)z`l_=yxEef}KB3 z@w3PhIOabWp&3NC*Y>~L%UH>D<_N%zO29fwmCHW^b@hF3r}96o5aBT-_tU?XXPKjJ@usiNElb@IOn_kKC}u<^e~ z>^Z`h3hO`1p4n<(Ci*|2p_P!da_m1egeW0CZUDe@)y2(S;M%`O?Njm7l+Hhm{jzj_ zfUZB6Do&{Vw*J462hb%@!SuhbMvI6AsOi5(UMc^8PO-la@D2*t@&3O#E|Db*HRC@k zdZ+HO&9}e#?XnE^u<*aCd0uLD$H+e}fcF`5nps zdHTPXn7yAE58c1R(o?3W>*l|XvFnU4nAkrU6Mj6z8k4_!yUjM7%>Y12LHQJE_|U&P z>b*@dWeUKd`&^4tMgYK$d=9l$BlEu_PZ}JF6AM6k=|Toz&7!~XC~ljJyzxH@zS&BL z9q>O1lS#rrMfAV!>Ls0{u>8L+WW&JJjpRQJ>!(3|!vVmn(tObSNbf%oQ+aS!_r1Sf zHb!5!w&OpXUQP>{Ytujc5dkwt*497gjS)wozq&tO@bFy21=2t1==*bYn5RF3&C~-4 u2KK+aZ%>f=G}*tvoGrw#TLC~&%MM+68q`0)z-BRdNAf>&PScX;b??8QRI>m8 diff --git a/fixture/13/3/41 b/fixture/13/3/41 deleted file mode 100644 index 109deabd3739a5079ad1fe4a3325d8200eca6dba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{T?%fWg>0`R}n++b8!LG!=guQ&khPW8X8R!SgP z_4&WH+?$5^+08%t1VvrE#n!*?lx^4z1nfT}IqNrx-P6BSua(HvIRn6>f33JTWZXZH z;0t+4J^;WA&N(ASamzmi%Af!BuIRsmdq#)j!p%Q9#;={jnzT0$l=9#~ipC(lpBI&;~9Nq^!YWKf+cmtCj2I4<4X)~G*udBTAyoy)%ft!Md{j`qLbPGL28Jfy#GXXtx; z4c9+}3!6ix9sR#4zN{U$8u34GIhoc1$>zVH_%>zG8Ogt^g*NK%Amcyi;!ELd^nJfo zPBzpkboM`5o1_yiALKu2R>9Et-A#IQhT)RZU7z5d6Pt$oZ+flg&RF4kUN^lJ7rC zD8&XpHSIs=2tBCrm+HUoCwUbdu=PK2X&{IuSO7qiD9tFiapOOdPV%_r%lALh*R=)Y z@asPsFP#ulXU)Ge0iG2>MCHHi-@XYe&-uR*2vm9fcHF<9=5d0g?zH0{{RZ0{{Rp0{{Se;Tm+=sO!J(>oithe(gVAWyqb;eC9vg3do5; zxXV8t%*Vu^+3>$&wN&w*-Q~a5vwqvvO#QzvW*_iJKIXq1v=Trgv*thE){;>vRM|g% zBYU-{aKt~BpigJ8zxO|!3xZf`ebYZ(1p#agc;dgm$|Hu}W#+#SYH$YDsLj98<^Qw( z&)mO29a;?2e(pc{QM8)<_}V`lzI-u;b=1GwTZ{F}1=7DgF;C3&faX6Y%EO`2Ebu>Y zuKv1`e*M3Hi8Pc->czkMw?ShV8Q`yB&@%+^2uzC&a5!K^<{FT2G8AJD%ZSw25dN%TLu_VItR zQ~bXIetc&a($YV~`js1T4Y9xC;4^DZ3iQ9N{1GI>-T6NEJ&J{-0nZUU=q-ZRl>jL0ngmR@A5w~Gq^QgAH2U+QF)oo-}1i+ z9dWn>RrJ3b{NE08`^CS;rrnplP3=Fr1H0&MNaerJvQ~b}YUe+Y{e1k$$+Ob{6RN+4lpO5)uvFN{Gm^PnQsNlbHJaIxhBJsba;5OfCbIre3Xs8;+#{fW+QW*@- uht@yn#6qCQoaDdK&-5e{G~K_Z)O>si1^2(QU0sL#7V^J}u8>IFq4dAOQL>Q$ diff --git a/fixture/13/3/43 b/fixture/13/3/43 deleted file mode 100644 index 40741db4160df61de1d03608bed431b8447fcd70..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{TmG7s<`P0>Gkk%ocomfSxA)=Uv ztJgnk6y#UDgy_F^s;7rnU-G}mHo$#PdGfy|BD5OKF7v;>7a75Z^wvK~7h2&B8~{MS zK!R+d!SKHYakQ^+;?=)-??MJ?_{l%Xw(NC8+tI&XPm@~?Pt`wUZY3g`Y2v@(ReRw~ z#pS=Yo`uxyR>!~K``hVZ=F~svnActwh4eqNU(G|y8rVNKM)Q1DBhbH5H!dUHmFhqA zD`U)Q0PsKX9P2R&YSX{u{<1%7^e}4y zefGcb92uK|{@Fhr?vc)m)%QQhX1Rs_82LZ^pIpzdsouY**L7r|O6I=+;9BacUH(6Y z`r+$05&1veHrbX*=-of#{PNHvW&6LOn~!b!X5+tNT5@DJfAPNubI?y||NK7}mEnHI zX!yVEe`uk;+x$Na7f8~q1G~R+FlWogDf7QcOvg{!lkY#@Qlah8B;-GSkxa63LiE3j zhot>b*_pq5saXJ20`@<(xjOvYJKDd5*@E{f;QBwK{`=QL)#tyL=zs3f^ZCCe;$^gh zCZ|6;9wgz)UiZHbn9m8F_`g3fmFI@}5OqH?QXEhqN$9^cl_y*k5B@*t0evFNNN~Tc zKhcBXsOvwlI+S$|xa~jt#iASXuMzX(ZEJ#WN&t<=)wOkuBlJq|;ORQ&7@7qu<<`y*rk-|h~hscrVIK$)9t@i^L+(I0mr{RsNk@w@7BMKf7@1# uxAVUh8B=Hwfbl<fW*vDTVYRkW?DYj(* diff --git a/fixture/13/3/44 b/fixture/13/3/44 deleted file mode 100644 index d84757279befb14f6ec86ac0f9b39fe8761e11f7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 396 zcmV;70dxKW0gwnF0{{RZ0{{Sw0RR9L0001V0RR9fwJ-f(AOckj02CIsK2^4a`QSg` zxi#VM?BlV8jd)Sxq)$>0&JxauC5Z1ri;s}^Oc>BMXeyJM*>d3#< z2Rh<2ocBM0JahFGd&55t<^b{*DE2=90RUmVDwqOvr@guW diff --git a/fixture/13/3/5 b/fixture/13/3/5 deleted file mode 100644 index 6b1f005bc67584ac66c2b3f82fd60cf43963444b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{R7nRfcYjLJV!LTYi39AT?vEV&28ekD@;AlSdMD*$e$v+2KQtLCfgMC!kq3ZvU( ziSj>NG@=ZYMcluDci&sL4go+kga(K8XXQUjlyE;zR|UXe29s&1L+n4Hl@u1kH>?C z-}|iKZSX%Ny2R+{$|n&7`G=bo~&&FMd?mJ(InpX9&nY*7BS^Y_~+!uh|p|HP1gJ@h~3#|jXGg5|$tv5jLyz4bpGKhJ*9bOgX8 z^z#Agrr5u-_mHqFu=Ky+S;l==r}aN?^J^PP-TFV^-`a1izyiSG2YpNu!G&P=BfRrJ4{n>#V)+W5a`;@Z0Q`@X*sBIe*# uviLvNJ$S}b_wv8pu{7~@YsNn;^+XpF@8-X{#b+HiJL|vHtL$$f2H-zzaH#44 diff --git a/fixture/13/3/6 b/fixture/13/3/6 deleted file mode 100644 index 69a34308a414fc621807533294a747baeea17181..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{RSUFg`&Oz1y0q+=tWGwHuv>kCsYGWI{MgpQ_6 z3fn)=$=|e1@6Nv)?!HO9J=Q-b>eY~NN&rB3Kahp&9OyrJ5ZJ7_Tjsy$nV!K8D3ia{ z3{@Iy&A>klYGYBFO65P)dVB4Esm?!IY{V%)Q~p1|`P-Hc1kJx%%ek1bt^2>1Fa{H> zb<02ZNB=KnJKVpo!L7`K5ct1Z$fmhOgXX`aex?@G@7BLhIEq-KJn}z)ev_7ByZ684 z@HW5UV%9&Nt`=B}!vR2(I7heLVaC6k!q8`pB+9?YyK=;62{n2V$8p2^vR{ZUHZR#kW<+ScHzI#Ufl-uMe9Gw!F2nZsQ$m0BeSyZ zc<{fydF@!1jlnJPaDw9|L;F3>X-$x2KYaGqJ4Upn94tp-hZlW#>&5& z1ZGEG{OUip%D>~C$LzmwPwhOpM(@9Hri5xgd-A`(n8HUAZ1F#;gv3+gwCcb5em%H& z0`NameXcxf0K-2o0yx3Bpz%MBwCFjdukpX+xe{qP1mwRs9S|lc4&6Ttg-YDml-s|B zw4H)s@&mwyq1#i&^!2~b_)S3O&+)%@n_96~pzyzFDW3K!(d9pHo)xS#lGQ(3V2H5B z!^A%T0+$ZK5dc8b_@OAc&GJ9}@e6Bx82L_wi9n(Mb zUWGapliI)SL6G89P5VDuTCXBzH>yA4b?gXHr}MwcXr%Ik>hHgmtS7TPLIOZ+l{Ht8mIsLykWcDnaTJ1lGEJpbfNB_U!`n!(+ diff --git a/fixture/13/3/7 b/fixture/13/3/7 deleted file mode 100644 index 6a9b4472342facda845c191a40d36dc89bbe4591..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{S6fYh39Z}z{G6hRM9R&YOussd{T!0|tG(O6w> z#<9QH_mt95AN0Sdi$7gCUdKO>css^q@ZG;ABw zjE_I>{n#o!z39KV)V9_gwaGs?&@iJLzt+D~hh6}bRpUR%!qey=x3523gju||$=N?B zux8H^@ZLXWgL13DFWo;eM~CRT=HfpEhj8i7Q`*1boR<{htOCGclsg;J)8fDP zsTG`oY3#p1_P2LI;L|@T(@<6g5X?W#+)weUZqdJ~WQ$y(7W_Z>_?k-DfaJd0qnnnFYHk`ou5Brq&d|qaOFQX4Kfd~KhQtsXPcNCy3fC$oO4O+NZY?|MRQZ2 zK+?aZUBjnn2H`*Ptou(%Qr zCh>&R*W^Drk}<$EpX0x_Q=+J}LFT{u(ygfgU+cdu*a4LfyS6_T*kghsOZmUL`&($t uf9$`>yLJy#UERM4tWv1V;_yEPR2P`^>cGFXvMQJV7w|vBlP76&0?0o@OR9YU diff --git a/fixture/13/3/8 b/fixture/13/3/8 deleted file mode 100644 index aaa1307bed88d85048483aa630fc030fb3ad19b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{RD#ckv2THe1Db*3>^G9^TZ@Yv#XXNp;_~N!h==?-{=;tKL7; ze}pvwP31oi)IT{s%;LYZYgxOCt?|Dxhj@m(bGAS7_Pv4H(Az&!)SR;1Y~;VT42$qK zA?iOT#u&(#dge5_AeCG0=Mfi8NCP1e8u+e@2#9o;|SguWQR3jV+6;GM2@ zeEz=@geqe!Z~MP)KGEEj>DWI4A%OqG@7q5jnLayFhta=Ye(s~_iTghc&c!DmmhV5o z0hidYlKH=SNCY!L1IE9)_OS5g5$?aUp*4r-e%e2mr$OfwP@lga9N^qrLF~WKXK6Z* z9Q!{*4ko1_W34}qhWAZmpxVC=j}PZ@Gtj?;y%={8=J`L!UHEP(DeXVn?gY;3$Hc#A zLs#1}zwf{A25(Lr!s@@Ke37nf+upwjYp<`fI^aLo%cEn0HRwN3WVRQgH|M`gpzzVt zpXomaUT&aa6Yak>!-`j?ljFZ#XG4=eG55deJ879dtM|VSX0`3|Ywf=-zYXYYS(?B7 z%ux!m&hNkX%)(A5`1e1M*nH~)*X_Rv@Ondzi^o5{{P7mIAN4;j)zb0juD3s-Vbhpp zO4q+XM!jb((ZWB$aAogYcsmZw{N uO4Gkb)u_OQfZM;x;qkuB4%I&%K-cB%eg8kkj;%lBxy`@b4^Lin+1$S`3A-2o diff --git a/fixture/13/3/9 b/fixture/13/3/9 deleted file mode 100644 index 2ab0bdfa3a8918fe3fa1d498b75e56e6850eb716..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0g?zH0{{RZ0{{Rp0{{S5$LN8y+VVfehlQFVaj`$pF!;`Kw(CFjbS2Lg znD;-7#dUfsgZ{t0Q;OK3J)^(z)*df4YR$hGkHh3?%kIAn%@e7)vi`rL5wpa`cKN>n zKH8Y+x4J(o>>0Y!7U91U^c3lysOi5y%(#nWEVw^NYb=rxCg#7X$QN{!r1U=yD7~LZ z-s8V)^rHmK75G0lf(x#3yq>@O0e^{H+~mJtCogWIFXz8=kt$PwIQTy^{Yza$MCLzU zI&)knP3XU;o<2^^d%iygXUP7N-U7e{YI;;aPW8X@Efr#20_eXbOYxFT%jmzU_JkjT zX!Aee#a%Rlp#Hxp?6#;&*!I7L-lzQs^xQwjX-zATdCfmdsQNagV%EQtt;A4&YXCqx zhkS(0!Q($?ig=^2kn=y}Hk%%K0;<2+BPU@7W#PXYK_xIW2jRc5y#G>eukAnMT+g^# zy68XrSJdrU`^&!!m;B;xF5W*7VUb|O&F(*cNgdX;3*x_AL1F_j`qjT&bL$GimE1oM z|Kc}Vp8UVeBBmch%-uhhYf^DD0P?>cImk)|BJMwZzP`_?E&)JeF8=H@Pv*a(#b*a3 znDxJ`#K-zR2gkq5VO%Q&`p~~5sW(cdB;`LmWyY1b_02ylm@-Ne_tL-O4g2?VGPghK zOF$P^^zpyGg_iv7i0VI73Ntw?1c$pGoTRNB9Lp^5@=;ck{nBx?q~cNs+&Aq3w0p@9@7BYU!`EW8gpCI9EN{sQSNk(59xZQ31df^43K7VE!&e(+GS?)E>; zL|8Jxv+_TRCV6J~j_*I!pIr6y#M3_>{m`IE-Pk{igFjm?#Oc5F*=iS6dcr^LW0GrR u6Yjqpp1vA2+VH<1Pj~h_;rYKKo))PS{nd0h0(I0{{RZ0{{Rp0{{RqDl`uXoAbYU-6eI{RoFlMyGqSrq~tSJP*VFAEB+G!f$L-jwVTG(_}$p1g2BF^`(|Mb6L{Wf{l*6=@e zj@u=lSJprKbWj{o0N}q=-Qd9u1l_+re7t~s@#H@~+fPz%bfmx6?hL!|zy!egOGuN5 zl>k7AtdTe%x!XUs8B8@Op{l>FKjgbo{pP{PVv$bcL!{VAj8pm%mSJBdR}3pA>*aO6EV^)TEcYoZ7z~MOM}s zvF$%xsZLKtPx8Ms=Zfhe&9Oh$%XsKX{kXro`n{}}Kls0w&{-KYZK%JGs!BYvq~gCD z<)=T=p7uY1AyJ5;LE69aD$TE{bk;vU9s?9vvh%-a_&T=ZN6tShZF6KEklMe3d+EnP z*XX}wdc?5Ls|G+^C(^9wp4PuQEbn_HA zRIB630q{RozwIdK6_dZEMVwU?!t=idVR8PoIrqOq>MZv6KexZ>M(nm!weLTaQJ2fQ zp#Z=VDMig#sL4NV>_C26j_g018a9-Ba`eAN!+KzJN#wuHX6$*?P~ty3_NNUwC+k0B zs=no7s`)=ToxI3R!{WaX_@$vV8S1~_B34jlar!@=@iW+aDCoc8BplJMJK#SzP5$cy zcL6}Ag3gLC7U(|>lC`sp5a~a9f1z>pa`Zp3@HZE?%iurKkvePNH|akT&?^1>?A5=~ zy}HdbkMchV%xsu@cG^F_ELFxUruVd0h0(I0{{RZ0{{Rp0{{T__TNB7faJgTtV^lFa`8VuOTkUW~Z)w4lHF$gR$MGw%|Xq zgQX-@%``t!eem7CL4 zkKey9ES0T{ub4mZ=wXpSX41bQCC!!TF6uu;ku7*^pu#__yIGadvIsyVE!OYrCfh$N zlH!w&#QVQ>W*sBDKg+*b?cq4)D8Rq5wv_taqT{~@Jz=Jwma;!-$B%EdmgK)i2m5L3 zOw~Uv3yL88iT=M3nj9T@PVzrhh05F##w8mV-q62OSQmm-a_7G?tQE&Gl+!=8 z3i8--bjQDE6~|-?8_+-ICake>$MHYbUjJr)nCL%$_QX2imZCqkC4NJbXx%>n4wO1tt=yy-tCiTPAh$Z~j zi}62aTVEt8CiFiE`Q_eNN2@<*A8)WwaO}TB$O9EegBJpyxmPag)Gz6ze~^K0GWUoa?`f-@0XaYP3IU;T|!_ u+_b-vo)$;z2I)VAjm{%z!Sz20```_X^7g;#Oc5jDG4a2>GNo+dspLQO>9L0Z diff --git a/fixture/13/4/10 b/fixture/13/4/10 deleted file mode 100644 index a1645006668709fa67874d90d20b4abce0c5951b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 806 zcmV+>1KIon0g(tG0{{RZ0{{Rf0{{RN0000E0{{RhwJ-f(AOa;A0JJVP#e3S%Js>B= z3@T)SA-0E?2Z~&ACQpB~7YJmhD|OBN!-((ma#BEoRJWKk5`{fi>qS9xr!OQMH?D6V z!3=TTBaL%(WOkPhvIzB2$;PKOZ!oFmjVnY<%i<5lQq`b{-Zo}9)KBdcCD`>=vaKKG z^|Jw!$Z@aHd6p#!;&+pGSKoTMe~aj!e$Kv?!hluDSm664ahJAaW`|gZCO!2kZeA(`v1O@gKja<2OGdFfxRq#eRm0gLqjPpiS|^TkmFHxIOB$y|VIGzZSA{RUg{~hNQ=> z_~hpxfkcjb%;vZlJC9{>&Bt{*o93~;wU&t9A}3y}kCXQC&mi9H=}*G%HBb=HMI%hn zDoNI|1j{CXE4b0g}}tLDC~id#QnZ9ofzT4agff$u{1!91{nj8_foD2k3$n zCcud=)wk*DUE9ehHcS}NVkUl!ex|~U^YOt;CpmAOn#spY{euEGu}Dt$*bX;(E)2Kg zi?z3>vAvAlv{O_sMMf!a{FZJU-1JAoNN6#vx0R?t%b+r1$bWxR5qeO9>@Fn6bSn0V z_D4O7%o{ZU3H`E@0O0=Hrgr{m|6x!sdO)?a1ET!1?6i+w{!t+{n`C-P@w;hNaEg z`SkL)(gOegpW^4_@Wlc0O!?&c?aJWj?BwFs+vw%^;`Gwh&d{aV>)E{L)UwvL)ZymF z@Z@eKfgb}zdye}Kfk}fzrQ~}Kfk~~Kfph~zdygf kzdye~zrVk~KR*ZnIx}1~E>dKM8sKLcK4$>oOO~vp0Un#9Z2$lO diff --git a/fixture/13/4/11 b/fixture/13/4/11 deleted file mode 100644 index 3580cf452baf913628b2216463a1c81b6b1eb0fc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{Sh7r!mVXa_*j8Qg9tPV>L56fGfr6ShCXkA`HT z<>kM9UPhRfZq`4!e`;xIA@)B3vrd|7mg_%_0H_*SQuDv|D{}6(=;c4*c<*&2=m5Yq zdYvf~3Io8?n6roD;LpFH&gnq0isL`m5fXIUNY+0l2fNK1e*Qnm;bdsq{JTFgsV;#R zTf4s?P1$^_d-6Xon%6VR@z=i+`;w4A;n+XcYCdoVyxc$jNoiBL81KI&9nFM6%-BEJ zLHlLzdI3OdK|Y$dFY-T(HUYbJ+v>k2h|`~rwE{rvlHfys4(31Cve%gBCfh%)2R|2s z;UDepg~kH)<-o$EiJ zDA9gdAH%<$`d7!oP58ej->fS@ccMQ%*xuLI_~yTBiL0U?Pu{<=6Zrj8_P@V6ze(l| z_wc_#tgHXW!}-608D?CtuhKtvaE@0qd7D2?zI&vkU5CG)&pa$MSl~a%{rW2J8s)zb zS5l-tN7X+A5}2nHX7xWw2#F;cP0+utlKwg1WBxz-jqRG=n5jQQRPxlYM(e+X2w;tz zFzY|M*MLvN(7eAA1p6;P*YH35uJ!#xblN}4szF;O`_#YluUx|uR`@@VTF0ZZ_20k3 zVG#{5QvtxU@cL@CE$lx}7f>omW!67p{fHqQiu6B!UThewHtfF@Fy3COqR+nvc=M+0 z3fI3FG423i;q5;U^S8mn)8s$k(r+e|e#<`^!37wRNhAmndmKbhd!qjtJ>);e=k-wpk={RNf6&WkOXk1T&xu-$ z{qsLIh1OTYimbo3v!5*BvdF(7JoV9$LBYR^mn$i*;jq7RRuh&}A>Y3XAw62~jLSc` z^Z9C3ySTsL$q?K?LdU-jC3VC~S>HdNHoY(7X0t#3T+mK?yWziZ8da<8Zs@=0eAfip uFu6Yk99nc}d0h0(I0{{RZ0{{Rp0{{TE7{1`OLDoMuM2PLzVC+9ISrjMy*Rj82In;D- zG{QeoCnA=sa{52(yLnQ;gz7)i9^88${Ovyz<*tdk=+M6&@*W4OlXbrmt~3s4_SrwN z%Ob0!XXC%q9BypN=K{cW!$kqNU)euVqBN(2!|p%a+;)CvL##jDg*SVw5#zrf!euHT zD%HQWCq*~sIRL<8hItp0Q`Ns^da5d8*YUsgXSUOJW9GjK;%Cx0Jn@XP~g9^xlYXwy79l)o+}>WAm~5$`jT`gg1|q{|A%_X z=mNmG;i8l$-0weLN+aH8bFx2C4dOZHrszN0V9V$g)Xcv*miQFVtoc8x$d?e^aM-^I zr`)|V76rh<4)3A3b?m<`d`uLE5#T?!=-q(wT*N$@Et*asSaE-q<+|&x2J(<7L)wcGRG~qwcK#c-4M8`jue>B6KCD6YHy{&M83h=*i ze4J*A!u3Bwh9Rt@wEsU{Q5KI#==(p#Wjf*BUgkfdK!S!L&HBG);$l+b!Ti7Y+NCJ2 zjN!j0ded8F2&}&?mhGV*h`7IU1aU0f%*a3MC`2`{_U1#(JRB-2Xq5 z&K=1$68%3(cUs1_tkgef^6|e%;}hE%Psl$G@no``)3CqTv8{pt diff --git a/fixture/13/4/13 b/fixture/13/4/13 deleted file mode 100644 index 9c919a944c5ae4095ce16c2f488316396c4beaa5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 806 zcmV+>1KIon0g(tG0{{RZ0{{Rf0{{RN0000E0{{RhwJ-f(AOa;A0OT$QZT7KwV%bZd zh=P;tg;tD{zm%Z%D0xe+rPAK!Vtngmo@tL;7wZ-1 zR%bT9{>ZxMQ*-|PE*)`IC)Um*!O@d3Vb=(!c(ikRC~R(=5e)-=ro^{Kzi}g2<;suR zoeKgQlzg~mS`_v#ShGq}nL~BUvHGSn3lL~Uv)s_2NV;zH790(W(}7ai0;b}?G9@J}B+?XCAPzBQF3G@@nopv5!+MHdSCU z5vU3a*wJk(^w>51l{0F@LF?Qpoq>f>{r)A3R+0&(S`w8+_YgI z%3pUF2i$OAm2-fD2Z4xS(fC$W;xw;bv_b)FW>(~^T1&xNQVc}?wD{wVG zluUwXal5^3q1bN#qUQ7oss#}3GaT|%iVH(2B9dO1G#Y{ew#dz9f(|Lf-{>?@rAO1AXjv{ zITZgz;C$)R2qt{CtMC~PyjN97AQ$Qod^s$qo2CI->ASLW;WzriQ~${b*=OT}a*F_1 zhIcPz{8(Ga9URI1t&c>vj0dmgk9Z#1)+RFQQR#1X!4l-?$^fFA%`xE}2`3!<8bfzw zV=M0-sT}ZWtSg6MGe)cinE$eN?)~WO*4Nbb@#B}()9>%@!L;Xp`}yk0_5kwp@xa&X z$KLz%+5G77_|e1j;osr(!1nFp;q}AVy5Qj6b`1AJq=h*Q4^XKCRx9#cf z&CkT<^}KfgaeKfgaezrepgKfk}fzdt{} kzrVk~zra7gKR>`fKL-Fy7^10RF9jgnr}9W@W8I3FX^qjLv;Y7A diff --git a/fixture/13/4/14 b/fixture/13/4/14 deleted file mode 100644 index c77f882ad8a658a4a88371821f9428de299a250d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{T^fl>Qq`{=(SJ3I>5=kq^>aHxn6y#c^V2os6O zmexPtJ^Ha4a-=_?7dgaV&eOlK{H$09)ssKZu}qG-rQ$yg|CouJ^pMv1Mojq z&4tzLLE66(rC5ARKkz@I@O)yjm*hWINN6{wmfOFO<0iYfPPD)6eogts5|zIkPQevE zv-v+JozN6SKk~mo21&eP3EaPI7wT%HQ2D>3;g;-l_JRoTA`M&SpbECxU)%v<1f z!u!8;b`hdAoa8@`yQsWEYXd;oJs5HCL-9Ws{n4_`2&g~Zjmx&E!}7nQ238GQ;PgL8 zsUH5k{_Q_C#ofl9dHBD$A`RGlf7HK@uyQIXO!L1s=)ww2;0C}Ol;yv2tMk9_9xAL5 z{<=TlNb-gGk>Wp9I0Wc0v(&#TW>)BG{PjQk>Sp|vcPB3H3i)kMg3a zx$Zy9_E01@Wz|2%3~23SQTIRHp;|2lBHKSyv~O28tG2)Px(;^DsP{j19}DAK4Cg-+ zz%McZ)$~6Xa|jP`0Q0|e)?gD-hwHyi>BO%tDC)o8+Ev+fM8`jvde$O|F#SKg-t}4_ zIr6_BzFP>u`0PJS=!=86J@db7q!DHr)_>d*#1(M}FOLvfjUaS-2-F!s5Rv(-Yf zu3(~&}svEzgdAmO-K$YY@ uN7cVXWc>*f!qq>dzDJ7lIPkyRRYX9aJ?B57MfIH<$e_O@EtF43b?CptQ?45T diff --git a/fixture/13/4/15 b/fixture/13/4/15 deleted file mode 100644 index 73d5c46ef40202fbbbc6ce1ef71de53af699a001..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{R#pO-9W{op@D%>LjRedxat-c8pA6X(BDuo)n3 zIP<^%y#`+2LHoaNx6P*A7~wxnu(_mNrt&{~utqwd*4)2O$VoXCjp@IzR(a1T|NOtP z1{dT7V*$XxK-+3yLFYg0d5koPmH)rSG^k09P4d64dBhyOBho)c9&#TJ_qIQWCX#{d z$J9T0|8Gx`SA0KfJO{hu*VeyH5I&=_H1fYATg@@&O60#_Osc8Ef6~9E`mYPuKK{SD zBsx{9De1p`hIy53($~Msn9tDv3+BHk3CPK5?A55X5~M!<(*5~Q|UjMQgow$ zF8DvBC4)L%Ao#yL@tqHS?b^Td3^-^ir1L*B@<_;t?)$%IHMYam(el4v^b<)NwBx_8 z5)E+1S^z+^1fMluFYmwFQ8rLiSMfhA2ISWOk(|H1d6+8TSMtBBlejYqW(Gi0>442S z>c~I6-uU9O6WBjPYT%Nu8sR^omxv_AKHWbBDo{LfHvvH9_DtJ3ll(uR4)5ru&+Wg9 zutq;wCICQBviLZCXXd|6zce?PpVU8>I0F!CIrqOF`(>N(mGD0oXz6>-tJA-v)|bAdmUHcf|4`-{IQ^ij@~zU99?DD@r6 z7V*D0i=K|!HP^oisZ%524f8+7zmRK_SoXilu=H+TbM-%ZmYH9eTEai5I8_hV0qH*+ zA4M`isp>!PLL`L%we3FyKnWmm@RmQw;uz7l%&I>S-_@aToa{fbdmXYQsqnvPuTGPI z9P__1ug5oTz1_dWM~*rZaMC{pBn@NZyZ%2bz%K4Ek*GheTH*Ci002NS;9(@8e&9d1 z&r&sr1>L{mpt`4ITJk?Kzj+8D7SF$|Ltx9$6VN|iF)rZ7VfDY)z5t(mBly2^E5gl- ux8Xkz?G3TtsPR8|vqe92A^$%#U%Lol=IOuL!>ILsYxh4ht^0WS2lYR+GNq6J diff --git a/fixture/13/4/16 b/fixture/13/4/16 deleted file mode 100644 index beee2b48cf4596b728a234416e76a5b8bc45ab50..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{S7rzBW5Z{NRAizAtuHTFOM!Vg0QH|{?e^o7G= zM)E%<$;t`ARsldc>hhRRobf-Z^WzI%7y-aGpi;-wq|`s2tY&kb5&6Gqn`lM1V#7bJ z=<(YA_qRVJipK7st>VAaBvbwmaqz!Bgg2Xd_~5@Nqj=c~l-0ks#sO2b+rYoQQom}q z{klIqCw1^&y5c_+cw%PT0Mx&7vlOFndGtT6y4h#{lcT?5GUvo?mF+*8*gPDaeAB-V zOaRLr^94X5IhX&Z=iNW~mc89BD*8W8wP@7EH0Hm7H*PZ=p4`713l8@MBhbI8)q^jz z$OAy9GZLN&w4z{UgP0hvUEI z1+|m+NZ&u5IFvD?0P#Obu~GH@H|amueT|f7t=_+u-L$GgoA1A(*&Bb96A-}NL!g&O zLGHh-c`;=T(bvBy%dZ1?IzEBin5AI@P-ft4q?3h5*3kx@eMk(!#%K)3{1b>(D=g#GP|hWaz)q3)jUgAn?EYq*_<3 up8da(K}{-kwc@|^3Ft~Dz4kvau(iUfwfDc$YLFYDKj}Z-o~mA(BkVsPX0J{F diff --git a/fixture/13/4/17 b/fixture/13/4/17 deleted file mode 100644 index f7f7f7288bb49bb5e25128a9d5f4b19e2945ab5d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 798 zcmV+(1L6Dv0g(tG0{{RZ0{{RX0{{RN000060{{RhwJ-f(AOhtV02D5J2q`(|Fxzy- zISeX_T@kcg1b-@x_G)NSSA`!$4AA%)1%f8UVzdK%%c!T;Nb)Oz8C#?rZ z3XK$b{F?FwYJ2v9_++2tlLy;hI$oKISloTuc~Lpb>jS^zj7VE5&5TSciPgVLb5jjc zysY-?{w}S?Sz(W=_GGHQMcgBt*8KCeIjN>MLEis>GnVcDEiCDfysFK<6BV^8+W|l5 z%x*p2$}S<^55`ILy0X(-qJ1fY?>W`Ia(NB_$A+FJ0tmYg%&*{$=0GpbQYu>f&(8tPitH?Go<(a$URebzSog|{zEsE={R zc)QZDm#ZdoM_}?wpjD}A$?MeQe2dqUE5SkBX$Z}JP!TniTlg&*i;D}wV|_lq)+>G}#Q z%sXC`J2#;rk4iq%_>&G|cQfMy@b1@4>+{9*2eP@h!SlAr&)?wl-2c+>{-^Heg!aI~ z_vz{J$hAgIy%6#I^6k;N;qUaf?d;*y&amFf-{aBq{G#XJ;r79w@Wki= z^Re6D=+N-Wv(nV(_3+|AKR`b}zdt`eKfgb}zdt{}zdt{}zdygfzrVo0zrR1fzrQ~} c2>?qoI3eGcK}-$MNr24ZM2&FfUVLRB4L+!$sQ>@~ diff --git a/fixture/13/4/18 b/fixture/13/4/18 deleted file mode 100644 index d860335ec72d459640dea78b0c3731b7d3e192c2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 799 zcmV+)1K|7u0g(tG0{{RZ0{{RY0{{RN000070{{RhwJ-f(AOh_d02D6svJ;478?`v- z1+?T$*gGI$haPX9QE7pE6M;lH1ZCtLX~#jSg^t%8`whzpXQO4ld>Ptl{?wttOBdZf z3KiWWNMII}i#|coY2_u0m!PKk$9r+bg;__jh{XtP!d9#_{o*=z&G<|GdaRKIhMmQ9l8Ek`7ARs0q zNTi3{t5+?-gL}^dC3P4y)arM*AUWww@|W=HU(U?SOHDkT+RRxt!!>$}$^$W~b@So9 zQYObul7T@rtOoF{Rcf-8d$2GWjM}~+0x!T7?2pbk3t?fEAMFs)k#I^;JlbhX}nN7T&yp~=0W|-Q2`)x$Lc_ItZ(ZQmdw$Q zPncJJR%DrH%%TEwDgXDMS$Gn@#@f2&s5rTfL#g8E>PPci8$0k?Nlxv&e=hh*A@Btl z0M3$&iE_Llzu!47uisR9F$#)2b7VvNbCkv*0D1juh>{D)7-?Y)tLLaFVAH|5w=rcF zX7~io(H<^he%PwaK@OJCFbZC2n+g>l`!im1$p!b_0({Rx84?AmMdZtZ8kSMO_WDyT ztgC0D73Q+L%<*O`##}`C&A%u2VKJ8?wuyT7?O#|nR682&Y*5SBz^V5sPbj}GiNPT6 zIhA(mFg%wG+PiNR(9RYU3(wy4^6Jo@u-yCc?f>!e-|y!0^Re>i&G*-w`r_#D0@|?o z+2QrhhT!Yr$>PV_=En2&@wn98#RulnyWsHP=i&OsxZv~b-}%zw*ZjS@*7dj6zRl(H zyUfMP=;-V4-`4->>;2BZKR>@eKfk{}Kfk{}KR-XeKfk{}zdt`eKtI1fzra7gzrVk~ d3IMqbF*B@WFx8lv1BM!B)G%;coMjUWKpKoGm^c6c diff --git a/fixture/13/4/19 b/fixture/13/4/19 deleted file mode 100644 index 65e8fa6566dc0758c920cca3e8839be0848927fd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{Run1%h~{o_BW(!V0OukJrkRP-$I0rbC3V@9oP zSLZ*L6D^0VC-OfRsglgadgDKacwQ?%o7z8vDz75&|G>Ww`CNajaNWP*kS+-_Bk4ca zj_Wwbs^UK-#V0w?@6^A_?Q_~YRs=w4l6U4JDEPm6E|nE}w#~mv!I3seujs$8c93sY zme@ZI-}8R-F8{yO4Zr2U19MM1PKx5Zf+|$2OdcUFq?B+jG zDg}^+3DrNu7Vc*2cJ4n%qjF6VS--!T?-%lmI>*1oohp}UKJh<;VMUYmM&v(aQM1nv zM#jGkjkh$Ziq}7YwB4Hhr^i3L08vq@=G{M1&d-mlPxL?S4+qsoWY)hgXkd6Ppya=~ z)drP4n}o*V%!@B~0O&1p#D-1om};ru@P z48K1u_kL2Cg519b$9pTD2+luzZ^O#l2K+yefypuB3H?9F)EvS(;;TO#gV&OUk?TLL zPquX=m&-rfMn++0H1WUBUKQB`REHt8a(h{LO;O{>tEn;R+^yWXChcRqZCjGy@NV^Wg+u^^e|LZ>K zU)sMmG^gY4G4?+XAIw50VduYmp<{9#+qJ)(Pg7Zm=$;`rN$hv+}qqm>Lo5!=7;^VEy5W~Dy}(flOI$Fe_@5TyJ7 diff --git a/fixture/13/4/2 b/fixture/13/4/2 deleted file mode 100644 index 507c979b1c3b96c9e2b480015d3d99ac790938e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{RE(uT>#LEyinAk^Y=6Y4*>pyf<7;Mc!vEo~e7 z`t!fX^+={mCj7r3iRr8J``$kaA2fm8M&Li%&|~5O4DUb0%oLR2`_w7jg;bvGKpxvZLAVWbZ!}L+UyiH1|Jfv>MxE+RMM; z!TQ}$3HCqU=o)ov0`@;FMm%KNsI@KKbBdh)+zvVt5bpyj{4i1rTK{qw(}w>q?vV!A&Nsq{g_C(*wrPM^)E)$hN92vB~Ww?UgZsF6h7Nu?^Yk9Pz(Q!L0+w1I@pojY1*WDCIwuCM(li_3Xd? za7Q^0rpmv>s9u~2uJu25M>eZ*p5VU)3@-D-)O|ner?yV754*pjRI*x*3CO>bdEO`% uvid&a5Q2x(kx>-J6BD2d5SsfTqT%*u9pf>)Q%Kh4B~9E}i|dwY}?xl=M%62d62;EXCxV zFRYSq5D_l6LJ%mJhHi%DZX;+2hS`ev%BSUI<2AOn`T4dBAEVGn!BeBB3PKlgFmN+L zz4Q~Uq_kz@kf#b@*p#iBGv1qM>0}AgV#D4HB-CXkD1&1KwV&oc%4P$!P-*Y|@sFCY zRoe|gCt47TI?s+|nhjM2PMI+B^_f*08>9^c1PZssN^%qL$y&g125AuP-~@8}2mJ@g z3Z~2;bJ#n^`2YqYl`SO{d9RF;*Nl3R*tiV4UaC7^em=oFLna!$U#kE{s`Fi`9DCmx z6Ngnsrwl-wI$_>AU%ec|L`O!zHR7lsjLZmaO}s2PLJB#6)OF zzdK%4pk!?Z)Y(?Wp2TQhIhd#0DQqLnb9*2E-;1Z8f(F1OtchA+%OKv?HFYTW$L>B8sZb!=l%CD`#t5w*c%|`^4Rd>%j)gT+TrH${?X^?$JqSr=k&X>_VLc_`{L!{ z^|j;f*Vy4^0j{S;P2n~?A!eE{D+|6`r-QV=;8tJ=G*n*;OzDC!tBcJ zv()zD`RUI52l~w5<<{`Ozdt{}zdt{}KR-XezdygfzrR1fKfgb}zrVk~KR>^}K)*k~ jKR-Y}zdt`ezXd0h0(I0{{RZ0{{Rp0{{TP+$LPi2F^b^c*sHHFW|o~NX@7`{qDcyPY*$P z?bE*+P;?ArF4RAz3|3XD1@J$(puvV*K+wNV->jd-AGE*Wq=V(jx#>SR<>jrYB(gvB zp9@V_YuG
3H1^|L=j8EvT?^T|IyJi;cgL-D_$kbykSP`^Kh>!Ue_*!Dl1NS)t; zp~XLRc!qQquIj&byyo@YpZ7nvpo$ejyWYPI3n~(t+y1`^GT}RhW!FDr#Zk6Q#GpT?l?r?a z!}!0nW?k7rU*kWjIB>2f_~1Wd`OQvFb@#s=Xu~y{I{rTfgSSpE3FtoqxL_XpiGF$_T#_U95^a#s_H*wCkYXpO3uIM`VJGZ#s5dW5(&E~(}Xo9d0INU$IRM{Dncm%-I9WBo46XCz7nL1p| zF4jK)KE>6o^x?lV8{3Pd#Pq+;4T}3*0QA2IboGum%lW?q?bCGALFB(b4=`L7=kst>VLUNW?!E?FU=1)9*iHC=k*2hweZ8;LX%Q#?e3fz70oc(bzv5 zT{9QIX7fJ2T@QrPojXbM1Lq-q^Pr2s%S^D_xmQ-;4;>a<+Lk^DbU zf5Q=B9qT{CcTsWcl+8cKp=qYSjQYPt$g;@H#`eEobNqRV=5ar$9qQE$wBNto81*3l u)6736H(n7s_4PkhSd0h0(I0{{RZ0{{Rp0{{T=D(AMHM)JQ;E-vA{Y}h~0hKs#t5bD1(uE8n* zisZli?}~t)&hI~r26WF=3*|r5oZAhm%h$g+fwsnH0Q$djsoxuOhUdR<3qNGQ68}GS z=@lyG?e0I=HyBO_$M!$8f0UfHV$eT(O-dY-ch3Yw&cI6D$;zP z!sEZjmm3^ORPw)Gye!KN%j`dRU`k2%$KSs*-X5qu$nifiN~7Z9@8CZ_!TCDQQeFz&z3noVR9`RKoQkb7pz)#g9W$APxIkO9CEY+h`4?2SJx18yE= z<>Ei!PJk+4`Sd@~86KNwKlne8e&t0tt-!xL?~x3)?@fMVfOqA?QCL_PFf; zBj-QnR$n%}Ve`KV>WW8;|L;H6d%DYT@bACKn9r1m44c0&?_KTCd0h0(I0{{RZ0{{Rp0{{REO@|;~rQ$z$b>?jpQT4wc@lO)#Pp`i$%o%}q z|L4D;z;}`;PW8VJCgvGF*!@3m1wL4H5A;9nM*y-_BjdlB>2b4`_FUcJBHG6%5kI@Ledu+V5Hx8*->nWS{* z%F#be8p=>|P4T~|#7D`@ywpEZeqU0}ROCMrMM@cOSlhqY8U%3hiv7PFjiY7q@$x_R z)V4*)o3=mLay1mjE(XBtlyBMKj{rcmMuChAq~5>j<+zp}g9AW_fzIB%CfGj`PybhF z@yWj+8&50MftzVJ;hY$`T9TQy@+SsYuLZEItoFrLE1l!;z(39cKbhw z$~uOY8{0oE`~^U_*a1MK&`h-5ZoxnIx=O40sOG;rrXv6D(*nSI({WuZJkvks#@}l* zUtg9qhl^c;_eg zf7L$r`k@UX?TNi1B`rSX)oKF)bGV8zIV(COYNWDLj zh8|gm=GQ;jzAKJQ`|dx~!snLk^8i4u1?2$WT+TnkHStg%IorRz;_(*ri26UoK$+qE z#PB~W(Ze>Ajm1Bmv(QO2Mb1Cvv_m|=#KfoAp6v+Fd0h0(I0{{RZ0{{Rp0{{R4#a$#AYvjKvQWNrTJ?1}?pMXtgY}`Kufcnv) zD&fC*>%-f#And>JSYSHsY~nwXCKiXODdxYPB?b^;zVScNe!oeaqUyhhEP%ixY}P;c zc=k~o`Pjd62{YfVXLmnKcvO_PYUaNJUqB2PmoOJ)huBg9j#<>m$S&~1ypIpyLQ}Mr) z(I1axdiOtAjk_+LvIjton2aiMB-OumqTr?wY^Xm`@gUuyJ@7v*h1#phCh$M{s&-S{ z(#Aja8aB_yJnz41OJ}$(7yUnkQmN{z%;i6$N+eVAI?O*Z)bzhZJB@x zGU-1SP!OV*$mhQ}KBcHbqP$xK=*~c4LdPZ)d1}q zs!!rn>-5UYIU2A$SIfiIYw6sPSp30#EmpN_DygC(+EBQ?N(^mEgND0k&%RtU(~=fF z3mJhkgg2udu64|Iu|FGM=N^5EV=By7m!8*hqb@@KBmBhAqgs;FzOF9!jOx9k1g}%! z^kkL#&V#}w!FNEK%}r0Z{XcrXl#F-N!if(2CTPq(B}E(6Ja2^>Wh$Y0=)pvDZb}2U zEr|7x#NnP-i+cq~E3k?KAATT?F0ghrKx8&^Jb9a_AufNw26@5h(B><@2MebjUo-r3 zY2r%M|2+vcdBy!q^ZUM-)s|0tinGr{7D;}>B4I~$4IubIZenlb9@9m==6A$GgDY_! z40}BYZ<^LYrJ}WthyFQf&tCRX!J_%hlH+Y3r$s=r;#D!TT1mZV5CclM^#=yL*Xd68 zW-coFxTsrID19a$hCme?Vdo1(H%tr*1x6xQyQ=^$mE=?!TxJYk{`Y`a0hx(Exwp0h z5bg2KQR-1xGnH<%k+Wjq+K+IOEi+<`pBB)hBvWn`i0|pG#c>y>Awt*+B#z9V+#cRp z;AZs9wAJ4yYw5L3N?cJ|EpiI|)b;f8=<3bx;@{@M^4bIQ*8tJ|=Ktd8`r7Z~*!Ab* z`|iv0?d{Rq@Zj1l+=H1Yz+xhPI%JA>v*5>!L@bK5< z;pg%1#_i_b@$=>c@b=cfKfk}fzdt{}zdt`ezrR4gKR~}fzrR1fzdye}KR-Xez`sAg jKfk{}z`sAg2>@D+fB>S(*r=>LN7!YAnFC7}(=3n%nsTAd diff --git a/fixture/13/4/26 b/fixture/13/4/26 deleted file mode 100644 index b0de21445d054885c678514fe6690672569e4c42..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 806 zcmV+>1KIon0g(tG0{{RZ0{{Rf0{{RN0000E0{{RhwJ-f(AOa;A0JJW_cTY-cV2*NU+4kN6mOY>I@5kT+E$_V3Bo0~yYE zrLPb(nSF>q9(fT^Wt z#cR8iJ9~jbKQh|pD2#p$gIKAh`}kp+cNF}h0Wqo{ilfWOdh_F0eB~sB(3LpjTTyBasd)aLtDxL&lHLX#z*HH-U8e zQ3TT1T0kp4l7xl`)D>|a7l6^`+sOL$_1@*|w1Bke`sK3V>e=4G{@}ar?f2ZU`}*Xy z_VMBW*Z9Ka^VY-g*X7fw@#4ej&X(H5<^B5f!l}#Q^W*Bb<@5L9-oWA8;|1j7^5LV@ z>-PBm_38Tj{OsxItn1Q0Kfk}fzdt{}zdt{}KR-Xezdye}zrVjfzrR1fKfgb}KR>^} kKR-Y}Kfk{}Kfeh8NEpB=G7zAJfK>w^K+6C>Q_E(7H1))uBme*a diff --git a/fixture/13/4/27 b/fixture/13/4/27 deleted file mode 100644 index 200b5b0647ee6158dbb10872bcda137b574165ac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{T+VTr^fC+WXeu4kT58}W5a7N3kYsWu(=GLWCJJLT^FWWuFL;F7!8Ts%@ddELht6~tCVfMe?trdvs z5757IOzKNM$MnAqjk$o`?8d*d@}&XhiuOM_qLlDEWZgf_7dS_&5a+-C3{RnS^x{8k zxR7g?_07z*TSKU7$!cr3Yw&*{CfdwRdd;33tUnmvh z)zv>O6g%hkjrBhrc5@)PDdaz;7Zvz3|Ls4UyECb!pz%K*pxOaSE8jnMK)jNaGVwpf z)wAr2TIRn*qFAlAX#>C^K4}u%(&Rt5`cW5O{MSFWW6z;F0_?wpfF4Q#GxWbEGr5BF zxxhcQ_h?QA@#a7IeX(*Hqu@Wnv#6kVQ1HJfYgKv3bLqbr3p9;vY~?@ygay)$o3cMK z^N90f_V7RAs4~iYu++b!AwlXds^>qsfGuSEOr*ah16vO;A?Lq}Jn4#pv+X}{&D>k`?5dD5&QPvx5hu`o&2#q%f7#f&pjuhwC_LDKf;6EQsO@{I1=sn1l>QC z zAgR9vJ0ujPSmZxiG;^g!)9^oY3dYr^i|jw(fB`)|m*PL+9y2RQ0RTYyUfkLUD9$67xUuQXUnyR`ovy1NGfdj@3Uvwe~o1Wo2Da zNJq_O!Jz)V*P;B7?zq1%f;sNfa0P{9*fkVD92CLRL6ClBdmhYQR8og0kG{Xt6zZ

3tfqfdPuw^`<84U5|~>yrW4El!O6u2Cn*?LuhhZkRk11aR}vq&7Mp5AKyJ# z<|oB=W%D`1xz5GvL%xA$5^mEWWT^B0Rdf3HJeBkq6B$e{a+Xg0<1 z;SC>p(0LE%-d82_h{oP&Dg~RBGU~ks##1wWrZylW?sDIEOL|Bawd#X?Y%~hdEj0(} zw6hte8O-6EJY8VGfjGgEa09^y26{@S&6BqV`jDWOWVElVTL92_I{;IS_bxCJj=NB% zfA;%jQZ!tf2p7yn?}z|si0vPP?{`96lFm4MFdTyiN)bCMwW)t@x}nonl-jvXA+imx zvDR!9(j>4So@8=pB!L9^w71M?70w5cbmc{~Kfgb}zrQ~}KR>@eKfk{}zdygfz&}4fzrQ~} lKR>@ezdt`eKfgb}KR*WmL>X#o00`rA0874Rj*yv!O9L9Xrd0h0(I0{{RZ0{{Rp0{{S}85BNe9=%OUi?2jNUD(g z4dy>LRoUSXXz)LZNu4E+9QnV)xj^XU>g~T-mLnE2;PyWfo+3UlyY4@a+k7Fx^&-*_(2`L&z7P`MB$i~q_VCFv`l_EtAiRC}e0Uns9 zPUXLBLU1jC*5bbrVi(jpv*&}%GiSNHU z|1=9^5b;0w2p^pC9MHe+-y=I?h0ebs*iSRg=;}Xwpk#9H-ul0oeIDHHY59xsR3sH_1Q3fWp~^*x$c5C&Hi5Mf5+iQWE{H?ZLmIqXSwJ zzwkeC|AT`!!}?mf1i!y!g_|S8C)vL%ul;1V$jracCUD9Oeeyr>JJkG_LbSh?0oJ$Q zK;yq*R-oL8$l}nBKpF z&MxZTBgQ`Eo5a8CHZ89_H~c@k;Q+L>lkC6jgz`qvJodjE z2o!TZ_qo4n=s?QtI`Tgvc7Tu6hTgxVSy`s5y5qmnx?9h6X7E4Jkd0h0(I0{{RZ0{{Rp0{{RU(O3pim-Rme534o@>*&8_smkzybNfHWLUop| z;eOZoN`H4aYx!(BZTB$+*9-hIyim67j#*1F)F1-M+uI z`~2D{l<>cxg8AH(?#I8m5Je|YCYUr*vh}D40_uc zT-v|5RpSal#_&IaqehYS7`MNyAp&s$zWF~abJBhII@G^kqs#Z-8PLBBxCbkBvh2TU z$pt@DiP*pNI>sWjy!*dU?ego{2?4-LIvV*>{o_B#>uD!b?fbu*=aRQp``JG@vpV<@ z1mM4z!y*u$5bi%S_ltuNll#A~Bl6?kB;!9-^u7b{IMqKAvGzcBGuJ=G^weN|`|&?i z#(GXw{;@wqop6mTl>@-`mU!GRzT7{qIUF8@b<{tf#<=a@PtU)Zcci?{@9Mu93NTzv z8P~t7A_aXmRQErR>v)smZ|%QZcB-?v*!#bB2Im6-e)qo?@&j~E5~9D_Z^y&UyZS%o z8GpuzqX9s(Lw!%O&g4JiP*UezX79i4x5{k50PMdsMRft(2JAl`g5L9BPW3CuOfs^32WwXR#M6v03J2#cGH^3=buw=|t& zAY#AA(H1p^x%$7ur|Wj?;@LlJ-G$>vd-*@8vJ1nX-sZo?llM4eg6h8_qd0h0(I0{{RZ0{{Rp0{{THZ0^`jKLWty^k#WN;*USXTkfcFssF!Pz2uV% zNb|qO!d0nO7~sDV@>{9gNc6vmFcBroz5zft$wL!ert3e$iW=X-KlMLY0aYy!>(jsSNy?3hdRPw(8X>kfH-}ApGQL{BA?%O}2T>6oc z5AZ*3PaFhc>+C-TaqeVrSoS~6o^BG1`1?PohSEJ&r|v(7`MATf7vR5MPx}#$vBf`k zKos6X4g$amL<@EfXXHQGtjF3R8^b@N6YbXxhWS4Yi@RKz$>+Z*eU+pdf8ali%|?N; z4e-C4kEw-I3R@}di-10N;Uhh8^N4;gl`^vvuCvp(c z9|J(YjLZww?X$nm{uL&R-n>6hUpup`=HNda(Fiaf$?89bMQ>vw_Om}VMM%d&G~ho0 zP1wsGX2(Ad&=w~q>gd0clC3{E#qhryU7n0QVCFwKR1ON1M)*HdScYX{82Y~}cR8;2 z^Y*{61gp@88~48pkJ0h_&FDX;Z(iuEdFwyM@W1pcKJULvv$arjr2#+=UDBpXqu9Uv z1Q?lS`1U_-LQme1-|0VFXPybesP4ZEHWf4s!P>u{2VY><>g&I~p{es!g7v?R4sHX- zG|<0Tm0DVNE%U#e+W*w7WX8XK-?jc@?fyTeguV7eOXff5?_>`G)ZxFePn+XwaOA)J z{xE^`tJ%M%4j=cekpMuB2whv|Y4N`RYU0$<`&_@&zKoKA$lX7VkU&kOa`QiYmi%>k uO4Yv~m)Jo)&hEccipmKthtfaiY#i3y2=zZkh(3qwcG15*)Q-J^-0Ht diff --git a/fixture/13/4/31 b/fixture/13/4/31 deleted file mode 100644 index e614e1dd64cb976ffa55908068719f02e930061d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{Rg(;A+HHoQMTC$Mui`uV@6LvU~d{mZ}qigI-^ z+44Ubpzp=L{_8(#mA3yRYQn!>e@ptDjQT%;@e%Xbj{ZNiSV_Y%K;=Iz?IF$md-uPV z3*So~6#l=Ij474IBKAKCEp?JUQ_nxYgRR>-EBZfPg({dnYxTe8{a?qop~SziXJn0# zckn+YV);pHTIavvEIUGvJn%moc}S*uZoj|phu_xtyPQ8g-Bt`K@bbUk802@g7SKOi zry^0+iUB~a9{Ffl)!08qviT&=Rq4N1OKNB%RQ5lTxlKUJUg*F2KGKY%iTgh*H3Ew} zX7s;DLaO!+bkRScE61kBYxqBeeH(cDyz9R;S%|LsNwq(A#eTU#gz`U;+FX=Q3j)Az zdU_lPsqH@^@bJ1>g0{cG9KM7;68AqsX(W>t-v z3*x^!j{xc+Zpyz^o&~@){oTKU1E3%+5CFhe6Oe@uPq@G6MP0jttIa==yGI!-yvV;_ ziT2uLThc$!RdGTnzqLPWK^aA*9QMEXD{bQ|LDRoi%e0D%D7`8KjS~8 zB}nqe?CU?vbRq@b4(C7AJU^~{HeJ8bc-YLGwBbJ`F`7!y_4vOWvcQLcg7H73fH;-9 z=-Izzg14RBg#SO*n5< z+SR{sCkJ+zUDZE9$TjACP~bnggR<{sSLZ*+GEBOcz4JdFskpF!8S}piR%qh)n3zAx zhXEc7HtWAh5n21(hS|TASf5u~Bd0h0(I0{{RZ0{{Rp0{{SHl})(wn&3YsYR|WF@8mzd{Ggth^x{8fMtohE zA?v@^jH$Sk?D9V#=zvvtwCO(vcg_5RY1+S(WQ7#3{@1@Eea@wn3gy3aM3~rPtMtFK zZ=72bP~|@jSlokDPU=7S+Qh{;qV2!BjMaLvwe!E}?D+m0`0c;A?1BOQIJ`gme!xzY z9QMBgO+b2r3-Uj`mJ!@q7V$qWFGap22-mq6|O($sn|jl#oa%J zu3hDe&Gf%`riEx{KlVRQ5#eFVFyFsee5|R9e89gdN)&|2dc@yQ}w@G1?MB>{qMhj5i$IKNdQ0~i6pszh~hsE+R}EeN0GlG zfi-OhB;mi4_->gdeAGY7O#I8ok=j4KxRz(A{>(p5tpI6u8u34bWtBBfz3aa@bbx~2 z0`R{a_a?7uu<$=-qZ6gb<^VtCIA&#;!}LGk+KA1nVu}v_)Hu1j(WGFDP8uUN#MCW54UE@Dt)FQ;E z+3>%v@gVPgOSC^_tJhZp69PcPfW)i}6Y4)ysa7?)1Gc{?JVV1f?$AFZJL({jfR(?T znl!9ksLemWDkle2iP^uHNhU$~tKUEVxE+g3_~1WPEA>}(Fnd43hN7Kz?ZH1YA+k9m z&(%LFWF^Hc-{8MPrteLw4&OgD+$vEG_vk+iP)BhWaQ8phzdYAAVAMaCWd`cR$Kk&y zC$*j!TDre5R{^b*N%Fs5FfiBW> diff --git a/fixture/13/4/33 b/fixture/13/4/33 deleted file mode 100644 index 7fa4c67dbd32f769b9e4551c7013c992debcc633..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{R%y&N6KMd-gy=FG8>4)s6X2$&NSz1+V*o<1aK z`0PJdxpc~K!oR=PYrMkWGORx-AojFxB>BJQW1l%@TJpbmPQ(U>u=GE1_<1n5?!&(i zwhSQwKGQ$ZXvV|V=KMdbX|?+eR=~f5f6Pi>mB+ss$tbrVO!Geh%hgoNN{K&nRms5q zdelF|4t7TC z2;9G#dd%_cul&D=16zvgN;OT`BKPFy_B| zB1E@stm8jX-r=t1fBC=tp10%*E!w{jE^;%8c=*4MIdS-nndmk>%Mdd&11U3)ik>o$6A_rfkr)4)s5-I=OUlRNy~(CXAl9lhi->l*g5^=Iy^HGY1xqS=m4C z#L$S$sQ*6{q+|UrKK8$%ldyINUFSb@L1zF^X6rwad^0?{oBKbx))hcQIp;qdnbN|{ z-i1Hc<&tR;4BJ0yez{@OC(*yZ=t!y*g7ZH{ozUf;$oRi<#~%eqIp4phAKuH*jod$K zCx7W7yz)OMvaUUPI*h-)OqG}D2syx4n8eAmAYa@lX&iqyY#<|z^Q z+1tOnx}=fR(%Zj=rD%tIbo;+}Sm+73HPb(96f)Clm*qb+uc`fP`S(9?%2t+Vi0eOA zCI1&k=IK8j&)kXJ@zOu1tjOzWGyK1;#{~z~?a@Daz--*N#JoR=VaRt}dhkCWjM?+Z uDB?d+$!f8xpTNKK-|}sjRR2Go&XXsOChI@^mO+d0h0(I0{{RZ0{{Rp0{{S9GLSV!Q?Ebo$1*>Q=8!*=qC{Mx|NMK$K=0ANJPw}<-<) z3iUr-zn0)EGw{FouXF8j1N=V~c|E+hz4O02n7X2EjP5_%@rBiJbKbwruW`bWChflm zzVDat5%oVjRBU{|?X*8|N>DKCL#97Ucx{24yV1W`=X+jPo8rHgX2_x9qQ<`+LLb1e z&seb&DSk9pItTk1c*1Z@4%>XE-}xn&nyJJr8A5f@}UH;cb*Vc{o> zhV(!Fj&??3QqR9@z(c3cn6f{NPG^FJqt#^yhq|I1ag zed#}%M+VNB8}2{cC)XU|dj3CTIi<4`-QmC9q&s31Y;FjK>9ygqcqiHU*A7)Q^RgAX+l4LAKyRDck#dW+gN<% zxa&V9M!DY-2*kfx5X`yAtH#1#zFWbMxGqzM2HuFD_VbRTMx!6AzEwG1&!0$iM zWvas`vDiP$7NiARobA8t*K7({V%NV7OAknueDuHFUy!zWqQ^fwha24TX7|7J|Gg{T u&eA`mU0vtT^bEic5`3ii>ghkpcLp4xsPVs*_a7yDhVH+Xa#X7@^YuS>=eOnn diff --git a/fixture/13/4/35 b/fixture/13/4/35 deleted file mode 100644 index e0ace69355fbb1354a6049ccdd6050e23892f1f2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 803 zcmV+;1Kj)q0g(tG0{{RZ0{{Rc0{{RN0000B0{{RhwJ-f(AOZ~-0CX-e?lRKL>MTqy zf6eyD;1{}-!Sq?b^m_3}^r>sUNFVgGs%r-6K&2q@h^ST*LMGmKCEERh92U(%ZJIPl zMjZU0>_z?|?|xQF-_i1!_fLtOuZnW|m962~Rw0H*OV!;&->kYX*&hD;pv_AU|H7OB zG@|wy?(XH4AEq(Tz$xgwYSB}RM^7=MQbp`2OdA4uIw?Hg-fY%56}Qpc!whM)XG{+= zBVHFJ%13vJh`wW#K%icTBWL%zwkH{`!wh&JGMi6H+}^n$SL0x`)3ATUU9!(isCA5w zQ`I)aQ7Pl!ZLK#ZPkv^39kyy=f6YmZR4$4{c&{T~6)dM0q{^O~2D==<>Jj;;O-;|H zoY%enPDV4u^KznpWr*cg!;iQFN?N8~1GR{qKK!U+m<{Ny{EZ$xa@iiMfM!IQgvWbO zL7{B#uX|F4Fo)C}QAuV>nn5_1kBgLd@mhJkYmA=Mfz#~3 znke~sGYa4Ie%vMFvmo`qsbs)xo#o+;S%l2kYjq8q?`xB+TX=J6e2tz7LkErVZ>W&p z%X$FB#k}+3P(s)n`TO7TDX>KhTIhLq6&1&r9N3#Tsz!mfFa3R~0HVi+5G{w@+}g&5 zC$>7Tpk*+@8j**PM8pz(lzUAO3^M9Yx;AERdJPptouwa5M3rgTAaW1t<4@Al!KaYF zZ*b!##uNA=Db+8*XJ&Uu*;4h?>+tTuzW4I&*vs+E>hibc_uccn$NSpp+1ldi_uKc# z?8xEp@eKfk}fKR>@eKfk{}K)*jfKfl1gKR-V|Kfk}g hzrQ~~Kfnn9K_DP0(%=%@z&HmLH9#c+XhgDLB@M$DrEmZM diff --git a/fixture/13/4/36 b/fixture/13/4/36 deleted file mode 100644 index 41c2d2af76cfa147d901c292a5c1fe1d4edea372..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{Rs=gTNQNcg`?V-30h$n?M5%30AU75u;RY-(g3 z6w<%lx98*BN6kO~0pm6eu=Kx_Qx~GOjLg4KCM*{6FU&t7iVukbUgkf+Akv#c`@=sD zK9#ddCDlI$ncEC~C!@a-Z*jpVI`F?U9bXC8bm_kz^#zScW zg7?41z|zHlckVwF%kfbcTKB&@f&_rvj_5xO3c+-&8}C0!cC2cMZt_32eGa(TsqnuN z93Lna2gN_x5+z`GIOD&v1hs`AU+lldJjCQIbf!ORE)(%+3+g|X2t15;3**20(b`ob z5!FBHu+pDN4DG*?zCr*XvGBhh-1&9{!_>b#9#(TB?a;p_J#=q(UgbYn)ECk|2K2vf zAN*nBL;*lM6**KNt^B_fk?)L4G0i_Ia4J-;KGMHJjL`x%zvRD0BzvW|YR11gk>-|V zg5^Jg+$R_Rdg?#i5u$n^*~`B+Xj$F^Jk>vA*+-+`NcF$%EHPjtmdC%i?iS2zJ=s6v z@-Ok4Lju6TFJm{{wzNOYxg$TohsD3od|PTLQS?6#A0?y{WW_%Ypw|w9wcbB1eEV>4 zV9~$Y7FW8xIpjaXA}Lfw{^36~sdCZX=k&kRn(9*t=hHvCJSXOK>heE`)fU${d+$GM z?=bU+2b@1|&iw+9mefBp%A=9(M(sZghKBj`UX zx%N=O%Je^#%~;o3+o(TEd{Cv|mHNLQG&6GTfkDT@<{`9{KzN3Gc36{UMwR;YLhv~oSnvrCgiS<8i zv@$Qxpy9s@2zrVEbnw4OWf}F>zxqE40}^a+6W%|)23p@yyahmXQhkNY!T!I0S-*o4 un94utERo4Z6yHCT2xvc=eVji)PXij+_nv diff --git a/fixture/13/4/37 b/fixture/13/4/37 deleted file mode 100644 index ec5d700a3d9eca982b5ef1cc2c0499fd61150bea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 799 zcmV+)1K|7u0g(tG0{{RZ0{{RY0{{RN000070{{RhwJ-f(AOh_d03#+;*CxSQ$g!*;`4z3;SrsmTs(0 zCesC6<_J_{fF=s7{hF0ZjGHz}a@;;wg2fbX|5t+&YqT7#{zGDDk&xFaJd)T<`eY0+5_K%)>tj&{7$(4 z2U+KzMfcQ>9-mS=1KB~=0ZmS(63eMq=)f7|0uvg2U7W(Dpw~C0mKVS^YZ9IDU~iba zREjFu?ExcGp`!Kv2GCl#9&-qCX=af(o$yUq`n-hkJoPkH|Lhg)P$#E#A($rOwRm@< zNeSt61F#U;n|Qmq5F@7j`+EX-oA@MzB0GyA-NQB-kR&GR>{G!V3S$L=xuO+5m_AtN zv2kWa^vY+X?3t+1%<<#%-5TunyhXrFZI&|ruB%7-;TIQfCsq-Sz(kBA!CT!}Q^;J$ z@}KW8=2b5*3X#moIS8WTww8c5WzgjZ(VqleMKe@fxSU66TD^;zpjr5`%uQcZQ(6Zy z1OYOS^rFuu#0G&WgvieRt!T_Y3QX@08!hy-_whA~5nnV=k!}JH4hK>tG{#ql;2Ar6 zYS%sy;f4m$d^SpbAMV^xCU_3}>+-+u*wF9j^Pu_R_UX9dv-$hC+xG0_^xxL)?11z9 z>fYn*?Ec{M*7wfw_Qedk&5CYCg5H8PwUd0h0(I0{{RZ0{{Rp0{{RLVsI5zJq5sXCNQuu=FUIqvVA7nLhe5`G=lt7 z`o6#BBHKSkb=beLQA>%p-qJtDB$H=Vm-RoCd;lKi-P}K@tAV^Ms^!0kOQR^lc=JC= zj{T$>mi9j!vDA2^)#<-A{9cB3V+KICu=kB&W<|eXVSNCBS?)h<@6yX&N9R8UmqU>U zR_4EDG5%o9<=nr&$+L}2Rk1%80>&@J>;1p+HCeoJMe08=ns?~=RQ|t#>;-V`364Jm z6_TT^e&xSz*`%(Th2lS?d+4Vnq5;5U_LzTOnEJm)NO0z)7~{VyG8mFxn&3ZEyI|;2 zUCcj_iPL@b&G0{Kkxq+HpSwR{iwa!oDw4m3v5ECBCGo$#b=Wgzm(0JHwFmgx`;)&6 zYHB(oZ~(w?$qh!WeC)p!&N7vtr`<)S~$)VD~dSMWcm7?Uuz)AGMN zTF4I{AJRV>QH>j#gyz4R*tb)`z~{e6E=iNE)8{{cNcusY)xN)-=P&3bEB3$c)}nfM z7`ZgK;Z%hCk;8~49&QyX9B+3&wa1lvvhT>wA_a&N z7j-{h9LT?Z**||tkiEY^hf5p52GPG?gjh6{KIOk|<=nXW=jT69u)i9RD(*j6>4^2T z$nZbg=H*y@Ak{y5`JLMwdgDK+d{{RU4A#FNOk zH{RY}tjWJORUb>ohxET1OpBrO6Y{^3lM4KmH`u=$al1Lwrt3e(iNjQ)Wz|3RrvPaI zqU*ooxTWxwzo$RWi?zcaLiN9la}_K3q3gd_G4or?gZjU?Bo;6ha@#))l&++M8PdN6 z&$YupFvP#8vF>W6v;HkO-`zhyjWC2f!v8-S06-LaZSTL;k%1c^ u0{Oo{0Xl0PAoRcBf(F%;-q=6)gfSP;@cTc)Zj%@XN&7z;Y&#`*3F|*;>!{)Y diff --git a/fixture/13/4/39 b/fixture/13/4/39 deleted file mode 100644 index f2995a5fcdaf8695723dd9acfdf8d29600d34f67..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 806 zcmV+>1KIon0g(tG0{{RZ0{{Rf0{{RN0000E0{{RhwJ-f(AOa;A0OT%Si7ow{Y7b{7 z*^(#M)kb1a@kt0S4PZhq#O5@fv>ak;v5e5N+tV6C0-Eg-)dp4JVX*wg&wHw4 za~ty=9wOzdtzJc8sH3Fy(*;s$s>?4_3Q;U65q3{^-dHa1`KidahrK{k0HSf+EAHN) z!SU#`UH9q8;K&TVztwWG2=K-9MNc*bn9Z14itbkw&#?pXjsI22 zOHS8?N-hOUtVqLQRDCwJ=4{-ssnN#NNTYu;Oa#TMJ?$A+Kc^)vev%jB>wKct?pb#W z+(Weel+uZhtr1}cKw0W8w_-cGm$o*oQ^6b>sF z)fk^u+stzg%P!~mn=J3oy{LEW-RINk`}OGL_u9(#(B$$5|LX4T#oY7m$>I0sxc{cN z#Pjvx@X_kp%-{t0&(ZbEe155-@AAE;P2$J<>}G*?&84p=GgM> z@eKR>@eKfk}gKfgb}KR>^}Kfk}fzrVjfzdye~ kKfgb}Kfk{}Kfgb~KL-FR8CEhpMb0M)xPiIkg<54Z4c2L*6951J diff --git a/fixture/13/4/4 b/fixture/13/4/4 deleted file mode 100644 index 12ab4771c7418483587c31c35c4ac9a664925a84..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{TVcLZv#xb;8(`4F{7UhF^4S@*dhXYs!XphnHk z9?(C5_$?vSH2}bQjRR))ZRo$8W>g)$hw(o^f^!7eNs7NUhTJ7QCk9mHs~|5i4(-P~|^F2rBht)}X)S`gJ8zFx|g^s*Jb}Jn=sd zJU1d+$i_d*RYgZgwfjGGv{um80Q-AERa8bk@QsVTH(L) zXLCvSob$h$tyP3A)!RRY^TCy*x9UGSL1*$kll8w=m;*bEQ1U-EwTTQE^0~h}Yav(D z7xcdx3%@TbCF{S3%r|liFYdpU=MPgB8`S0kf zvgtn%VNlCPy!XF!%vb)gtm8kW5;;}_qvgNBDS18r&i=nr*(TPa8da%FCf{!~Pr`A77pDb740`1REV{p*^A-}P4e!5f4KRB(HS<4l;j3x@ diff --git a/fixture/13/4/40 b/fixture/13/4/40 deleted file mode 100644 index 38e0bb0442e110465fc7b31676d310126fb9f028..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{SOD(zStjr70B(W}+0#MeJ%ytyyWGF`IAyX259z{4(l7XJF z(aS%~)?a0CsrkQ_DYK>$!{EQr$wHsFG5$Yo5~TZ{p4~s@f?kKYUY)=0p%JmDO!hzS zP?UdBQu@E*Vbixji`u^>SQj8IGXg-mEVN{Tm8rjo=E83-Ag#YAHV5X%-SR&Ov95G} z`tv_rA>d3R)AYZL19nu-W7|K9nlaP27yLic`K~M_l;pqLNpv^p)z`l_=yxEef}KB3 z@w3PhIOabWp&3NC*Y>~L%UH>D<_N%zO29fwmCHW^b@hF3r}96o5aBT-_tU?XXPKjJ@usiNElb@IOn_kKC}u<^e~ z>^Z`h3hO`1p4n<(Ci*|2p_P!da_m1egeW0CZUDe@)y2(S;M%`O?Njm7l+Hhm{jzj_ zfUZB6Do&{Vw*J462hb%@!SuhbMvI6AsOi5(UMc^8PO-la@D2*t@&3O#E|Db*HRC@k zdZ+HO&9}e#?XnE^u<*aCd0uLD$H+e}fcF`5nps zdHTPXn7yAE58c1R(o?3W>*l|XvFnU4nAkrU6Mj6z8k4_!yUjM7%>Y12LHQJE_|U&P z>b*@dWeUKd`&^4tMgYK$d=9l$BlEu_PZ}JF6AM6k=|Toz&7!~XC~ljJyzxH@zS&BL z9q>O1lS#rrMfAV!>Ls0{u>8L+WW&JJjpRQJ>!(3|!vVmn(tObSNbf%oQ+aS!_r1Sf zHb!5!w&OpXUQP>{Ytujc5dkwt*497gjS)wozq&tO@bFy21=2t1==*bYn5RF3&C~-4 u2KK+aZ%>f=G}*tvoGrw#TLC~&%MM+68q`0)z-BRdNAf>&PScX;b??8Tg0let diff --git a/fixture/13/4/41 b/fixture/13/4/41 deleted file mode 100644 index 4539720edeb4d10f57f3c8ecf3b88a43b07c385f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{T?%fWg>0`R}n++b8!LG!=guQ&khPW8X8R!SgP z_4&WH+?$5^+08%t1VvrE#n!*?lx^4z1nfT}IqNrx-P6BSua(HvIRn6>f33JTWZXZH z;0t+4J^;WA&N(ASamzmi%Af!BuIRsmdq#)j!p%Q9#;={jnzT0$l=9#~ipC(lpBI&;~9Nq^!YWKf+cmtCj2I4<4X)~G*udBTAyoy)%ft!Md{j`qLbPGL28Jfy#GXXtx; z4c9+}3!6ix9sR#4zN{U$8u34GIhoc1$>zVH_%>zG8Ogt^g*NK%Amcyi;!ELd^nJfo zPBzpkboM`5o1_yiALKu2R>9Et-A#IQhT)RZU7z5d6Pt$oZ+flg&RF4kUN^lJ7rC zD8&XpHSIs=2tBCrm+HUoCwUbdu=PK2X&{IuSO7qiD9tFiapOOdPV%_r%lALh*R=)Y z@asPsFP#ulXU)Ge0iG2>MCHHi-@XYe&-uR*2vm9fcHF<9=5tzbewXbb)FT`OpB}r*ApAFZT zYu#%Z;V)nTGf^GM*V|nLnD39cO65!KJk2&GZ9xP(&;k#qpE($E=CEHqh(4Dl(5jf zm`W1QxOs8k-MdzNP(#^5TTvm{T6XKD83M5de6(NmJ4d3f+bZ=`+5&o*b)GG=AUg(JkTNE=N$#$lO)$AKz8`O-O3V zW{PYImCF@Wv8W<*$A_FW1s0*}?dHqy<^AU7*~ItL;^xiV?%LGS=J5T+x8c0n>pu z@z$)+^!(DX^!Ww9;P27Q?+n%B@5|Td_VMz{_1ECj$nL`Oyz=zL?d9jW0O zk>LH|=-~0q0M_K)_ww|=KR-XezrVjfKfgb}Kfk{}z(BvhKR-XeKR>^}KtI2~zrVi; a05ivZH3vcrH9%$DmVr}F>d0h0(I0{{RZ0{{Rp0{{TmG7s<`P0>Gkk%ocomfSxA)=Uv ztJgnk6y#UDgy_F^s;7rnU-G}mHo$#PdGfy|BD5OKF7v;>7a75Z^wvK~7h2&B8~{MS zK!R+d!SKHYakQ^+;?=)-??MJ?_{l%Xw(NC8+tI&XPm@~?Pt`wUZY3g`Y2v@(ReRw~ z#pS=Yo`uxyR>!~K``hVZ=F~svnActwh4eqNU(G|y8rVNKM)Q1DBhbH5H!dUHmFhqA zD`U)Q0PsKX9P2R&YSX{u{<1%7^e}4y zefGcb92uK|{@Fhr?vc)m)%QQhX1Rs_82LZ^pIpzdsouY**L7r|O6I=+;9BacUH(6Y z`r+$05&1veHrbX*=-of#{PNHvW&6LOn~!b!X5+tNT5@DJfAPNubI?y||NK7}mEnHI zX!yVEe`uk;+x$Na7f8~q1G~R+FlWogDf7QcOvg{!lkY#@Qlah8B;-GSkxa63LiE3j zhot>b*_pq5saXJ20`@<(xjOvYJKDd5*@E{f;QBwK{`=QL)#tyL=zs3f^ZCCe;$^gh zCZ|6;9wgz)UiZHbn9m8F_`g3fmFI@}5OqH?QXEhqN$9^cl_y*k5B@*t0evFNNN~Tc zKhcBXsOvwlI+S$|xa~jt#iASXuMzX(ZEJ#WN&t<=)wOkuBlJq|;ORQ&7@7qu<<`y*rk-|h~hscrVIK$)9t@i^L+(I0mr{RsNk@w@7BMKf7@1# uxAVUh8B=Hwfbl<fW*vDTVYRkW_SGHyV diff --git a/fixture/13/4/44 b/fixture/13/4/44 deleted file mode 100644 index 4c3cf2b67a76f6742357b7ba0b22dbf437a51ddf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 407 zcmV;I0cidL0g(tG0{{RZ0{{S*0RR9L0001g0RR9fwJ-f(AOfum05lc0;EqnH+;u=W zQfmQ^pO!eUi_IaNp{p`E+LzU)*ufo=oFhm!Yc61Knt=`gKDnXgw?`r#K%_*!204lb z{B#Md*)$7#J>q@`754+??nbdXxgMq=Wm7!n097?eOq-4eyql;C9}{n$-S1^FVPW6vaW0Jh<9)-tjo$Xb8k`juGriH~a{Y3T(JqhdxhneWOcSn_H=0yFLdN2fJ3Qe4=l zr+-Z96&3*b>`M_g=Os4i(1HG~1S7wEQTK{=)2-DIct$ z;N$S)o%Heb=Eb7;_2|d-_T=mM!`R~U*89ly*6`%eXx`@I=kEFQ^7q5`06)J!zrVjf zKR-XezrR1fKfgaezdt`e000XBE5qRy$8fsl;?feFx&)^#!Kq7dY8ITj1g9=fpa&_D Bzrg?i diff --git a/fixture/13/4/5 b/fixture/13/4/5 deleted file mode 100644 index d1d5657b1f345c70b9a1c874e23f6cedd2a27e89..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{R7nRfcYjLJV!LTYi39AT?vEV&28ekD@;AlSdMD*$e$v+2KQtLCfgMC!kq3ZvU( ziSj>NG@=ZYMcluDci&sL4go+kga(K8XXQUjlyE;zR|UXe29s&1L+n4Hl@u1kH>?C z-}|iKZSX%Ny2R+{$|n&7`G=bo~&&FMd?mJ(InpX9&nY*7BS^Y_~+!uh|p|HP1gJ@h~3#|jXGg5|$tv5jLyz4bpGKhJ*9bOgX8 z^z#Agrr5u-_mHqFu=Ky+S;l==r}aN?^J^PP-TFV^-`a1izyiSG2YpNu!G&P=BfRrJ4{n>#V)+W5a`;@Z0Q`@X*sBIe*# uviLvNJ$S}b_wv8pu{7~@YsNn;^+XpF@8-X{#b+HiJL|vHtL$$f2H-z$o~Y{p diff --git a/fixture/13/4/6 b/fixture/13/4/6 deleted file mode 100644 index ab0b1dc688c93396131123564c5840e54f17543f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0h0(I0{{RZ0{{Rp0{{RSUFg`&Oz1y0q+=tWGwHuv>kCsYGWI{MgpQ_6 z3fn)=$=|e1@6Nv)?!HO9J=Q-b>eY~NN&rB3Kahp&9OyrJ5ZJ7_Tjsy$nV!K8D3ia{ z3{@Iy&A>klYGYBFO65P)dVB4Esm?!IY{V%)Q~p1|`P-Hc1kJx%%ek1bt^2>1Fa{H> zb<02ZNB=KnJKVpo!L7`K5ct1Z$fmhOgXX`aex?@G@7BLhIEq-KJn}z)ev_7ByZ684 z@HW5UV%9&Nt`=B}!vR2(I7heLVaC6k!q8`pB+9?YyK=;62{n2V$8p2^vR{ZUHZR#kW<+ScHzI#Ufl-uMe9Gw!F2nZsQ$m0BeSyZ zc<{fydF@!1jlnJPaDw9|L;F3>X-$x2KYaGqJ4Upn94tp-hZlW#>&5& z1ZGEG{OUip%D>~C$LzmwPwhOpM(@9Hri5xgd-A`(n8HUAZ1F#;gv3+gwCcb5em%H& z0`NameXcxf0K-2o0yx3Bpz%MBwCFjdukpX+xe{qP1mwRs9S|lc4&6Ttg-YDml-s|B zw4H)s@&mwyq1#i&^!2~b_)S3O&+)%@n_96~pzyzFDW3K!(d9pHo)xS#lGQ(3V2H5B z!^A%T0+$ZK5dc8b_@OAc&GJ9}@e6Bx82L_wi9n(Mb zUWGapliI)SL6G89P5VDuTCXBzH>yA4b?gXHr}MwcXr%Ik>hHgmtS7TPLIOZ+l{Ht8mIsLykWcDnaTJ1lGEJpbfNB_U&D7%mV diff --git a/fixture/13/4/7 b/fixture/13/4/7 deleted file mode 100644 index f3dcaefd3b98ac02a4b9e9befea8f84a8e3f0e68..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 804 zcmV+<1Ka!p0g(tG0{{RZ0{{Rd0{{RN0000C0{{RhwJ-f(AOaN_0E8}Bl!tTJsE{VI zrQS0-3>-r;@3=Tq$XqCn4_~4v3q}M7-?c3zoDeEtRD^G4F$Lja_dqGlsrV#c&?Bw^ zqj-a3Hs+virSY2Ppy&NcMUgyI#7cUIGi)csIkx&O7P`p^2DQV06spnpi+CiV?76Dc z#@FE{{nXHh!i2Cd&O*3&uFNlzR<=4d-e`~srU}EmKRaRKg5R`uZ>%)- z*tRfU(^+QA*~z5?l4=!$3HoRqP1S1W=;|$Uhj5oW6}M1Ni<+&Gn}0NYA$;sPGMjUA z!}}lh<=0Jck6Lp%K5vp#j|~0sF`}&jV_SAo7b+*34{Kf0UB*yd3LCAOKrL`vD%PU_ z=)BJ##7Wm_!WuX0(|6H+j6T8g$Xc`CiYy3gX$fk6eTq1jEg@z^T+DGWF(up8{4Xi599Vn?>U z?tvD%ybSzBCVYa^G_(K@BFt0F^#5~jR=~y|Uhp4UF!Nj9mViHuy|uqpx5-39kjBzP z*q}|Vxmt)-0xCdYzxePk=2NWGY2XlU7J$x_24K_1Yyq8cKe|Xj22#P6AS^I?MuGoi zGa`vh+NH18pFv-{OMhMB>K6j`aPhJ9$KCSl;poxz=i&8_=*iaOui5k9^zQD~)6C+^}Kfgaezdye}zdygfzdt{}zrer0z`wu0KR-afKfk{} iKfk{}zdt_;06+v>z}0Z9CO}+p2dKFqn3OUc8$cRM1)jM8 diff --git a/fixture/13/4/8 b/fixture/13/4/8 deleted file mode 100644 index 83c7bbde303d103bb559f091ad858a4404fdd615..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 801 zcmV++1K#`s0g(tG0{{RZ0{{Ra0{{RN000090{{RhwJ-f(AOigt0CX-26v$Kc3oe|v zy{VWJbjE~t=~EyjWW3W4voZ2gwkN4W6#$~cC-}tv;pY-=0wP`v!Fsy0mmknWjt_*$ z+Gy{l2-i?c1~pyj4le!okO{snp+CVI8rL-SJv)X^*&|2E9>(3pCddgWOFBRcd>=cMd>4WY zg|$rHbs2;|S$Msii?VjN+huk2kFG9DzMU%3fIjZUmjtjiLEvd7_YWBO1Xpj7ucNl` zZi+)`wGC0ie0uTH)4gS1l7Q)7M!An9Yp1C3*R4@m6}97_MX7|2v~6*fa2l=iEA_P3L!R&f89|uVWX{pjVTb z?dS?l>q8dtm}mKbHfH)C07&3YwOhcxoOc{vx1`2VWvMm@@%pv z1CF=nWi7f;O4;H3+<)$~5q*Wt?c{V?xTrbAQQGGut-dX^bXc*NvloFt#i<9 zA$8;xl8R&EnI%mf3Vd(r?}v$&k^>QbP(d7Hpflzv$G^hcIyE<+6O%EkYgx|t*NY#n zO40cPo2dDENfBri>PmnPeYx4*+wuePFLzw+1};i@wVIK>g43| z@&Nnn?d;aw{{H*e+tK^)`NrK5JCZGL-*<+qho)xj^h4>|PiXOugv)UOHxGWdF zqYHm8Dob-d$a)p=gk83$P5Q)qc$+6B|IgI?ksV^|;-*r_zW!&&TsOut`#_d6$RfUk zw=<&vib^D4?deyh-usQY%uVoEd0fyxYLdQp76&|mn$FL9*e~R%#F)D2i;{Go1g?p0 zQ(at6{#0U;A2g`_D>hJsqaI-}Qn>BnVAcZ)Hy?3I&+G^KD@v70_ZR#*oy8cSJMx?9 z0$9MBb+0|91kmU3tJ_dAX7!+37i${!sX;-sB5@Wgp*3l_#_7_YWD%4|%yC?zfJ7(F zl0aQef`Us2kfndjuz3bFZdzGx#I-TPT0=7isWT)#1g5wXb5`vuTb|J*W2*MPaW-DX z*tFTNp}tj43YW3L`1DCGRb(|h6Eb(&akiL)J!;FccDEL&EGDGh6};RpI7Cf*-cAC` zXrS2id17n9kOE}~ue$p#%?tXKp3DFuE>D;T`Xu%DGW3Y!DMY)AYi`F>;{O%>dK22={qth-vFrE#qs{LA`MTlhxaRcZ_@3nF_~z)o0`=(V^ZxeS z&DH?p^Qz(D?dZ$i?&8(l{N3{I0p|6`(B;k2xAE%v^ZD8H^!CWw*W&Y$@ZkCZ`P$OK z;_LSE@6*`n!tU_-*~HU7KR-XeKfk{}zdye}zrVjfKfgb~KfgdfK)=7gzdt`e2>?$h Ys8tE5F+d;}sfd0hS0L0{{RZ0{{Rp0{{RqDl`uXoAbYU-6eI{RoFlMyGqSrq~tSJP*VFAEB+G!f$L-jwVTG(_}$p1g2BF^`(|Mb6L{Wf{l*6=@e zj@u=lSJprKbWj{o0N}q=-Qd9u1l_+re7t~s@#H@~+fPz%bfmx6?hL!|zy!egOGuN5 zl>k7AtdTe%x!XUs8B8@Op{l>FKjgbo{pP{PVv$bcL!{VAj8pm%mSJBdR}3pA>*aO6EV^)TEcYoZ7z~MOM}s zvF$%xsZLKtPx8Ms=Zfhe&9Oh$%XsKX{kXro`n{}}Kls0w&{-KYZK%JGs!BYvq~gCD z<)=T=p7uY1AyJ5;LE69aD$TE{bk;vU9s?9vvh%-a_&T=ZN6tShZF6KEklMe3d+EnP z*XX}wdc?5Ls|G+^C(^9wp4PuQEbn_HA zRIB630q{RozwIdK6_dZEMVwU?!t=idVR8PoIrqOq>MZv6KexZ>M(nm!weLTaQJ2fQ zp#Z=VDMig#sL4NV>_C26j_g018a9-Ba`eAN!+KzJN#wuHX6$*?P~ty3_NNUwC+k0B zs=no7s`)=ToxI3R!{WaX_@$vV8S1~_B34jlar!@=@iW+aDCoc8BplJMJK#SzP5$cy zcL6}Ag3gLC7U(|>lC`sp5a~a9f1z>pa`Zp3@HZE?%iurKkvePNH|akT&?^1>?A5=~ zy}HdbkMchV%xsu@cG^F_ELFxUruVd0hS0L0{{RZ0{{Rp0{{T__TNB7faJgTtV^lFa`8VuOTkUW~Z)w4lHF$gR$MGw%|Xq zgQX-@%``t!eem7CL4 zkKey9ES0T{ub4mZ=wXpSX41bQCC!!TF6uu;ku7*^pu#__yIGadvIsyVE!OYrCfh$N zlH!w&#QVQ>W*sBDKg+*b?cq4)D8Rq5wv_taqT{~@Jz=Jwma;!-$B%EdmgK)i2m5L3 zOw~Uv3yL88iT=M3nj9T@PVzrhh05F##w8mV-q62OSQmm-a_7G?tQE&Gl+!=8 z3i8--bjQDE6~|-?8_+-ICake>$MHYbUjJr)nCL%$_QX2imZCqkC4NJbXx%>n4wO1tt=yy-tCiTPAh$Z~j zi}62aTVEt8CiFiE`Q_eNN2@<*A8)WwaO}TB$O9EegBJpyxmPag)Gz6ze~^K0GWUoa?`f-@0XaYP3IU;T|!_ u+_b-vo)$;z2I)VAjm{%z!Sz20```_X^7g;#Oc5jDG4a2>GNo+dspLQYbFqp5 diff --git a/fixture/13/5/10 b/fixture/13/5/10 deleted file mode 100644 index 69c736ca182192696850eac4fb0f671fe0641f3c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{RvI2E*f!`r{b)YH8kNd3QiPinH+XwW~}?XB|8 zG1R}$6oFR)tn5EMCGo!vx57Ul*dG?imF7Pu^}n(ksC~c1R;Y70LB~G~vg1|B%k4iZ ztv4Swpvb>uA5z%fGTuLdb$>E$|q^!hvz@ZdN5Fn`qaP1t1^kT8nVBqx`afx?AE_E&Z17I-nPGQGsQcx z!PLJnp?(3qCE>rR2!`j3r{=%rcZ%2D62`xcOoJx0O7K4`xp<9J_2WN8j#&XzMESo= zCmC=rciX?q>YxEdZREe=uT7Fh|M8Q(sCP@9V$b2e^CO=G?zF>^-UUd0hS0L0{{RZ0{{Rp0{{Sh7r!mVXa_*j8Qg9tPV>L56fGfr6ShCXkA`HT z<>kM9UPhRfZq`4!e`;xIA@)B3vrd|7mg_%_0H_*SQuDv|D{}6(=;c4*c<*&2=m5Yq zdYvf~3Io8?n6roD;LpFH&gnq0isL`m5fXIUNY+0l2fNK1e*Qnm;bdsq{JTFgsV;#R zTf4s?P1$^_d-6Xon%6VR@z=i+`;w4A;n+XcYCdoVyxc$jNoiBL81KI&9nFM6%-BEJ zLHlLzdI3OdK|Y$dFY-T(HUYbJ+v>k2h|`~rwE{rvlHfys4(31Cve%gBCfh%)2R|2s z;UDepg~kH)<-o$EiJ zDA9gdAH%<$`d7!oP58ej->fS@ccMQ%*xuLI_~yTBiL0U?Pu{<=6Zrj8_P@V6ze(l| z_wc_#tgHXW!}-608D?CtuhKtvaE@0qd7D2?zI&vkU5CG)&pa$MSl~a%{rW2J8s)zb zS5l-tN7X+A5}2nHX7xWw2#F;cP0+utlKwg1WBxz-jqRG=n5jQQRPxlYM(e+X2w;tz zFzY|M*MLvN(7eAA1p6;P*YH35uJ!#xblN}4szF;O`_#YluUx|uR`@@VTF0ZZ_20k3 zVG#{5QvtxU@cL@CE$lx}7f>omW!67p{fHqQiu6B!UThewHtfF@Fy3COqR+nvc=M+0 z3fI3FG423i;q5;U^S8mn)8s$k(r+e|e#<`^!37wRNhAmndmKbhd!qjtJ>);e=k-wpk={RNf6&WkOXk1T&xu-$ z{qsLIh1OTYimbo3v!5*BvdF(7JoV9$LBYR^mn$i*;jq7RRuh&}A>Y3XAw62~jLSc` z^Z9C3ySTsL$q?K?LdU-jC3VC~S>HdNHoY(7X0t#3T+mK?yWziZ8da<8Zs@=0eAfip uFu6Yk99nc}d0hS0L0{{RZ0{{Rp0{{TE7{1`OLDoMuM2PLzVC+9ISrjMy*Rj82In;D- zG{QeoCnA=sa{52(yLnQ;gz7)i9^88${Ovyz<*tdk=+M6&@*W4OlXbrmt~3s4_SrwN z%Ob0!XXC%q9BypN=K{cW!$kqNU)euVqBN(2!|p%a+;)CvL##jDg*SVw5#zrf!euHT zD%HQWCq*~sIRL<8hItp0Q`Ns^da5d8*YUsgXSUOJW9GjK;%Cx0Jn@XP~g9^xlYXwy79l)o+}>WAm~5$`jT`gg1|q{|A%_X z=mNmG;i8l$-0weLN+aH8bFx2C4dOZHrszN0V9V$g)Xcv*miQFVtoc8x$d?e^aM-^I zr`)|V76rh<4)3A3b?m<`d`uLE5#T?!=-q(wT*N$@Et*asSaE-q<+|&x2J(<7L)wcGRG~qwcK#c-4M8`jue>B6KCD6YHy{&M83h=*i ze4J*A!u3Bwh9Rt@wEsU{Q5KI#==(p#Wjf*BUgkfdK!S!L&HBG);$l+b!Ti7Y+NCJ2 zjN!j0ded8F2&}&?mhGV*h`7IU1aU0f%*a3MC`2`{_U1#(JRB-2Xq5 z&K=1$68%3(cUs1_tkgef^6|e%;}hE%Psl$G@no``)3CqdJFSHP diff --git a/fixture/13/5/13 b/fixture/13/5/13 deleted file mode 100644 index 674850ec11a31d5219de05bd52fb97bd6a803286..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{RAgY;V|5O%+9ViuqxeD1&Yd>n|9IsLz}>kZCc zEa<;^Ws5PGr|iFCp403!o7O+sX@OT7rq@49k6+h<0n|UATP;HZS@yq(7sU#;>G8jU z>(^SyyW_u;6=+D!vX?*Y==ADla@D_uR$tMA;nP1>XOo5vH}5};HroL=`tQG!zm8eD z!tTGn{<++FQ^7x!$Tzh0|FplLx;9~=$>%@z=vp7N2!B5)Qv%8?+510vbEaSKXZgQN z{^EBI9oawO*U8rQ9Q;3J2*BoH$>=}Ar<9Qi{qeuRc;szjt@ytZv}+Q| zkI_E_bDAnxM8iK~djFa7xAQ+pD7Ap%jNw1tY=#J)2j9Q3Zfwlfui?K}obR0l=JdY^ z5f`}vkH9}>4WzoBc=o?d1Je_v9_>HxevSuR+TuU4reF81*5N-n#C|U{CiTBDx2`V$ zGQ+>FMp7SQ>e#=mzY^<&QMx|}aZ=cm>EJ)KBiSoCo@8uP#Hv)H>7L+8J_N;bX!cjLcsQblb=Wd^{gnMI-C zW4AxrL-N>sEA79)buDk{@995l%YgvX9_~NAvD=~usm(tt`r+m#9M3-}rWf>l@Welt zGh_+2Y39HD3wx@otmD7_5FQ2aEA>BBXuJ>^hvz>XMM~`sVah+^vjj7|Gvhx^+}#{k uMz6n%(BSe_tj9kMpiERq2ID{dNbrgvm;k`Fx>^et|Li{pZgWHGvg$vMZ>%-| diff --git a/fixture/13/5/14 b/fixture/13/5/14 deleted file mode 100644 index 6e1f9e8e8e1d49ee330ee175307bac98193e9f61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{T^fl>Qq`{=(SJ3I>5=kq^>aHxn6y#c^V2os6O zmexPtJ^Ha4a-=_?7dgaV&eOlK{H$09)ssKZu}qG-rQ$yg|CouJ^pMv1Mojq z&4tzLLE66(rC5ARKkz@I@O)yjm*hWINN6{wmfOFO<0iYfPPD)6eogts5|zIkPQevE zv-v+JozN6SKk~mo21&eP3EaPI7wT%HQ2D>3;g;-l_JRoTA`M&SpbECxU)%v<1f z!u!8;b`hdAoa8@`yQsWEYXd;oJs5HCL-9Ws{n4_`2&g~Zjmx&E!}7nQ238GQ;PgL8 zsUH5k{_Q_C#ofl9dHBD$A`RGlf7HK@uyQIXO!L1s=)ww2;0C}Ol;yv2tMk9_9xAL5 z{<=TlNb-gGk>Wp9I0Wc0v(&#TW>)BG{PjQk>Sp|vcPB3H3i)kMg3a zx$Zy9_E01@Wz|2%3~23SQTIRHp;|2lBHKSyv~O28tG2)Px(;^DsP{j19}DAK4Cg-+ zz%McZ)$~6Xa|jP`0Q0|e)?gD-hwHyi>BO%tDC)o8+Ev+fM8`jvde$O|F#SKg-t}4_ zIr6_BzFP>u`0PJS=!=86J@db7q!DHr)_>d*#1(M}FOLvfjUaS-2-F!s5Rv(-Yf zu3(~&}svEzgdAmO-K$YY@ uN7cVXWc>*f!qq>dzDJ7lIPkyRRYX9aJ?B57MfIH<$e_O@EtF43b?Cp$-L4)0 diff --git a/fixture/13/5/15 b/fixture/13/5/15 deleted file mode 100644 index 956b05cacc4caa836689538be3e05e2764dec51a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{R#pO-9W{op@D%>LjRedxat-c8pA6X(BDuo)n3 zIP<^%y#`+2LHoaNx6P*A7~wxnu(_mNrt&{~utqwd*4)2O$VoXCjp@IzR(a1T|NOtP z1{dT7V*$XxK-+3yLFYg0d5koPmH)rSG^k09P4d64dBhyOBho)c9&#TJ_qIQWCX#{d z$J9T0|8Gx`SA0KfJO{hu*VeyH5I&=_H1fYATg@@&O60#_Osc8Ef6~9E`mYPuKK{SD zBsx{9De1p`hIy53($~Msn9tDv3+BHk3CPK5?A55X5~M!<(*5~Q|UjMQgow$ zF8DvBC4)L%Ao#yL@tqHS?b^Td3^-^ir1L*B@<_;t?)$%IHMYam(el4v^b<)NwBx_8 z5)E+1S^z+^1fMluFYmwFQ8rLiSMfhA2ISWOk(|H1d6+8TSMtBBlejYqW(Gi0>442S z>c~I6-uU9O6WBjPYT%Nu8sR^omxv_AKHWbBDo{LfHvvH9_DtJ3ll(uR4)5ru&+Wg9 zutq;wCICQBviLZCXXd|6zce?PpVU8>I0F!CIrqOF`(>N(mGD0oXz6>-tJA-v)|bAdmUHcf|4`-{IQ^ij@~zU99?DD@r6 z7V*D0i=K|!HP^oisZ%524f8+7zmRK_SoXilu=H+TbM-%ZmYH9eTEai5I8_hV0qH*+ zA4M`isp>!PLL`L%we3FyKnWmm@RmQw;uz7l%&I>S-_@aToa{fbdmXYQsqnvPuTGPI z9P__1ug5oTz1_dWM~*rZaMC{pBn@NZyZ%2bz%K4Ek*GheTH*Ci002NS;9(@8e&9d1 z&r&sr1>L{mpt`4ITJk?Kzj+8D7SF$|Ltx9$6VN|iF)rZ7VfDY)z5t(mBly2^E5gl- ux8Xkz?G3TtsPR8|vqe92A^$%#U%Lol=IOuL!>ILsYxh4ht^0WS2lYR_yrq)> diff --git a/fixture/13/5/16 b/fixture/13/5/16 deleted file mode 100644 index 0edf0cda9f4e6219163ca4d82a24cbc8419e64ee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{S7rzBW5Z{NRAizAtuHTFOM!Vg0QH|{?e^o7G= zM)E%<$;t`ARsldc>hhRRobf-Z^WzI%7y-aGpi;-wq|`s2tY&kb5&6Gqn`lM1V#7bJ z=<(YA_qRVJipK7st>VAaBvbwmaqz!Bgg2Xd_~5@Nqj=c~l-0ks#sO2b+rYoQQom}q z{klIqCw1^&y5c_+cw%PT0Mx&7vlOFndGtT6y4h#{lcT?5GUvo?mF+*8*gPDaeAB-V zOaRLr^94X5IhX&Z=iNW~mc89BD*8W8wP@7EH0Hm7H*PZ=p4`713l8@MBhbI8)q^jz z$OAy9GZLN&w4z{UgP0hvUEI z1+|m+NZ&u5IFvD?0P#Obu~GH@H|amueT|f7t=_+u-L$GgoA1A(*&Bb96A-}NL!g&O zLGHh-c`;=T(bvBy%dZ1?IzEBin5AI@P-ft4q?3h5*3kx@eMk(!#%K)3{1b>(D=g#GP|hWaz)q3)jUgAn?EYq*_<3 up8da(K}{-kwc@|^3Ft~Dz4kvau(iUfwfDc$YLFYDKj}Z-o~mA(BkVsY@UKw- diff --git a/fixture/13/5/17 b/fixture/13/5/17 deleted file mode 100644 index a8ce628bd46a6f97bb3f953f55534401b323c5de..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{SfT;u6=p94S$o{Uo(o$x;?`AB-|i|#);gIf<; zh}S>o4Jt<07fe4eGR^5XMe9G?5{xgd`tv_@R7|ka(#1cG5m7E{~Sk@G*a!VPjZNwz;+*;2;* zHpo8&H@q%|^v}P4Agt84Oy55$`SwpQ8sNW;2kWv-p7X!Q;w??vlJ(f7Z=INZ@-FX=z>#v?@XCF#FZ zf}HnC>hV9FBG$*C=gB`?5&VW#qtZXU#Pgo1xadE=47Da|1lT`>A2|Zae)~T?f~g4W z_4q&DC8oR7dGo(>IyVpG*~P!u%R$WK#OgmvzTU6RDek{D(Es4Z7Wuy~!+?#LQ|P}) zzcb}}d%eG6JC;)?LJ+`BUhVXK((%6$lK*Albo{@*cr69RSn@wdgDkJC&h0<%`svI3 z)6qY%Adt{tnz=tXb-ck2Z{a`fpQ@u=^6$Ti9?ewM3-mv=D!x8G(6>K`Cle+4TW0ATk}7lYRZ;(_x!&|doHVhi=saS z_935VGUvY)f!>l`kl?@Z_z$vl>ES=$WX6#i`u4vkpGm)#3c){icnBSnsDPkLW+3+&K?JO3*)|ebrYBKJdRK+P$2`)5^cFd2%zV__IHeQMIbw ulhQxSIeSqg4%9yc%b-a5V&^~6>xQwfclAI016BZbGw?rXzwdG8d0hS0L0{{RZ0{{Rp0{{T?4PH2oAN0SnXmJZe`|>{%&jDenGwMHxx8`Bu zUeLc|ReY7`bDlpNTSg!1$*@1Q!#M3n1>CADMe{!%V59&p-ZrmVAg6O|diK{*#8qGgxN>}XgmiNDb zYb}HYQP;nGq`_4fz??r5;Db>B_WHkpF?*=a`rzv{K$=HWj<=y&Wnvh}~I z!MJHIyUxFb9Ux7w%!WUXJ~>$5@!&t#Z0TcEX6rv3%1kbLE8)NUneq-X#>qboZI^=z zT;e~=_VBriM904gyXyZu`P#o{zF%o`&E`L&C(gWNzsA31OUzJ1C-c9)v&%dC_w_$~ zluKN5Ve!8i!cDA{F}T0lNjxvcm()LL)1Ag4BHcg!BiiNww#7fxxy(U%i3h-;5?THI zdgi~u)i%p(_R>E~A;VFK?YqAhu{8mbU*JF8U3wr3SnxkSmWp%8HsC)BZpz0PROi1H zVFT)EJK?|Gi!nf98u~vYY^rn&?Z&@IWp%8pY`DK*HuG=iP~blnc;V})%kw{!bG;HM z*X+NGR#KK=z~8??F6 zIljMpBx<4_mCZkK4zdC+cICgtpp|oC>hr&a>U$}EFuT85mazZWJj_2wH8A(8m&L!a zG8v!D49Y)>+UP&Yv)XtLyXZf2nZ6R1Z|grD-ypuw74Sd&o&v@&(BD6fATQbq u&elJcy}-I&7XLp2a24fg6X`$vH0-FG3+umfeUCT_&;7r*Y|gnA-p;>MU8d0hS0L0{{RZ0{{Rp0{{Run1%h~{o_BW(!V0OukJrkRP-$I0rbC3V@9oP zSLZ*L6D^0VC-OfRsglgadgDKacwQ?%o7z8vDz75&|G>Ww`CNajaNWP*kS+-_Bk4ca zj_Wwbs^UK-#V0w?@6^A_?Q_~YRs=w4l6U4JDEPm6E|nE}w#~mv!I3seujs$8c93sY zme@ZI-}8R-F8{yO4Zr2U19MM1PKx5Zf+|$2OdcUFq?B+jG zDg}^+3DrNu7Vc*2cJ4n%qjF6VS--!T?-%lmI>*1oohp}UKJh<;VMUYmM&v(aQM1nv zM#jGkjkh$Ziq}7YwB4Hhr^i3L08vq@=G{M1&d-mlPxL?S4+qsoWY)hgXkd6Ppya=~ z)drP4n}o*V%!@B~0O&1p#D-1om};ru@P z48K1u_kL2Cg519b$9pTD2+luzZ^O#l2K+yefypuB3H?9F)EvS(;;TO#gV&OUk?TLL zPquX=m&-rfMn++0H1WUBUKQB`REHt8a(h{LO;O{>tEn;R+^yWXChcRqZCjGy@NV^Wg+u^^e|LZ>K zU)sMmG^gY4G4?+XAIw50VduYmp<{9#+qJ)(Pg7Zm=$;`rN$hv+}qqm>Lo5!=7;^VEy5W~Dy}(flOI$Fe`1nxy{# diff --git a/fixture/13/5/2 b/fixture/13/5/2 deleted file mode 100644 index 99a1b99c008330baed37a8551cbacc4748e0d7ba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{RE(uT>#LEyinAk^Y=6Y4*>pyf<7;Mc!vEo~e7 z`t!fX^+={mCj7r3iRr8J``$kaA2fm8M&Li%&|~5O4DUb0%oLR2`_w7jg;bvGKpxvZLAVWbZ!}L+UyiH1|Jfv>MxE+RMM; z!TQ}$3HCqU=o)ov0`@;FMm%KNsI@KKbBdh)+zvVt5bpyj{4i1rTK{qw(}w>q?vV!A&Nsq{g_C(*wrPM^)E)$hN92vB~Ww?UgZsF6h7Nu?^Yk9Pz(Q!L0+w1I@pojY1*WDCIwuCM(li_3Xd? za7Q^0rpmv>s9u~2uJu25M>eZ*p5VU)3@-D-)O|ner?yV754*pjRI*x*3CO>bdEO`% uvid&d0hS0L0{{RZ0{{Rp0{{S*NbhPt&E~%V!O29^bMn6$Q(AtEd)PlMqrfc8 zAMih{r*SPq|Kq=E3I=A*-^;&ALTS#bi|Rie7Z8V-r|mzGjdhRz~(>jLHnqPB=Ntxy$8+Yto}a>^!=$viP1lj69?ePTIWCVt;kKY zVCcWyqzVt1%g4W)w5C3QAlN^JWz3=4-u%C~;~=Rc*6hCrkaOgTHRr#lr`W%;%XcW%$s^LHD>3Kn35cR*?WUom3Wwk#F35>au9pgVj z(vr6rc>8d<@CRX1z%cP#gjjj zwSEri zj&s3iy7j;2WKq3HGvU8(njQ4Nw&1@b4Spdz-|W9=Rb#SVKlMKd1uQRBtMb2wPAu@C zD#E|nnGe2X5bVE-FpfZN;L5-F^5%91jqSh6^|7JUZL>e8nYHQJlGH!tRj@)d0hS0L0{{RZ0{{Rp0{{TP+$LPi2F^b^c*sHHFW|o~NX@7`{qDcyPY*$P z?bE*+P;?ArF4RAz3|3XD1@J$(puvV*K+wNV->jd-AGE*Wq=V(jx#>SR<>jrYB(gvB zp9@V_YuG

3H1^|L=j8EvT?^T|IyJi;cgL-D_$kbykSP`^Kh>!Ue_*!Dl1NS)t; zp~XLRc!qQquIj&byyo@YpZ7nvpo$ejyWYPI3n~(t+y1`^GT}RhW!FDr#Zk6Q#GpT?l?r?a z!}!0nW?k7rU*kWjIB>2f_~1Wd`OQvFb@#s=Xu~y{I{rTfgSSpE3FtoqxL_XpiGF$_T#_U95^a#s_H*wCkYXpO3uIM`VJGZ#s5dW5(&E~(}Xo9d0INU$IRM{Dncm%-I9WBo46XCz7nL1p| zF4jK)KE>6o^x?lV8{3Pd#Pq+;4T}3*0QA2IboGum%lW?q?bCGALFB(b4=`L7=kst>VLUNW?!E?FU=1)9*iHC=k*2hweZ8;LX%Q#?e3fz70oc(bzv5 zT{9QIX7fJ2T@QrPojXbM1Lq-q^Pr2s%S^D_xmQ-;4;>a<+Lk^DbU zf5Q=B9qT{CcTsWcl+8cKp=qYSjQYPt$g;@H#`eEobNqRV=5ar$9qQE$wBNto81*3l u)6736H(n7s_4PkhSd0hS0L0{{RZ0{{Rp0{{T=D(AMHM)JQ;E-vA{Y}h~0hKs#t5bD1(uE8n* zisZli?}~t)&hI~r26WF=3*|r5oZAhm%h$g+fwsnH0Q$djsoxuOhUdR<3qNGQ68}GS z=@lyG?e0I=HyBO_$M!$8f0UfHV$eT(O-dY-ch3Yw&cI6D$;zP z!sEZjmm3^ORPw)Gye!KN%j`dRU`k2%$KSs*-X5qu$nifiN~7Z9@8CZ_!TCDQQeFz&z3noVR9`RKoQkb7pz)#g9W$APxIkO9CEY+h`4?2SJx18yE= z<>Ei!PJk+4`Sd@~86KNwKlne8e&t0tt-!xL?~x3)?@fMVfOqA?QCL_PFf; zBj-QnR$n%}Ve`KV>WW8;|L;H6d%DYT@bACKn9r1m44c0&?_KTCd0hS0L0{{RZ0{{Rp0{{REO@|;~rQ$z$b>?jpQT4wc@lO)#Pp`i$%o%}q z|L4D;z;}`;PW8VJCgvGF*!@3m1wL4H5A;9nM*y-_BjdlB>2b4`_FUcJBHG6%5kI@Ledu+V5Hx8*->nWS{* z%F#be8p=>|P4T~|#7D`@ywpEZeqU0}ROCMrMM@cOSlhqY8U%3hiv7PFjiY7q@$x_R z)V4*)o3=mLay1mjE(XBtlyBMKj{rcmMuChAq~5>j<+zp}g9AW_fzIB%CfGj`PybhF z@yWj+8&50MftzVJ;hY$`T9TQy@+SsYuLZEItoFrLE1l!;z(39cKbhw z$~uOY8{0oE`~^U_*a1MK&`h-5ZoxnIx=O40sOG;rrXv6D(*nSI({WuZJkvks#@}l* zUtg9qhl^c;_eg zf7L$r`k@UX?TNi1B`rSX)oKF)bGV8zIV(COYNWDLj zh8|gm=GQ;jzAKJQ`|dx~!snLk^8i4u1?2$WT+TnkHStg%IorRz;_(*ri26UoK$+qE z#PB~W(Ze>Ajm1Bmv(QO2Mb1Cvv_m|=#KfoAp6v+Fd0hS0L0{{RZ0{{Rp0{{R4#a$#AYvjKvQWNrTJ?1}?pMXtgY}`Kufcnv) zD&fC*>%-f#And>JSYSHsY~nwXCKiXODdxYPB?b^;zVScNe!oeaqUyhhEP%ixY}P;c zc=k~o`Pjd62{YfVXLmnKcvO_PYUaNJUqB2PmoOJ)huBg9j#<>m$S&~1ypIpyLQ}Mr) z(I1axdiOtAjk_+LvIjton2aiMB-OumqTr?wY^Xm`@gUuyJ@7v*h1#phCh$M{s&-S{ z(#Aja8aB_yJnz41OJ}$(7yUnkQmN{z%;i6$N+eVAI?O*Z)bzhZJB@x zGU-1SP!OV*$mhQ}KBcHbd0hS0L0{{RZ0{{Rp0{{St$Gttm3;n<7w@M`<1=K%B`wT^4M)kkGrEMEW zBJ{sd$V%09SMtA~h=V*0yXe0>+=g!;tLi@rnY)Gf0L?#YJ!l$1F7Cey3eRP3mEyn8 zGQKKe@#`f8o@vPantA?T=Kt(;F7`9X4=1LA{InN397ciQ>P%CLNC+Kj^=%xUR$ux%$6X+jZf4x7xq0CCr{Zw(mdMGIm!81LMDN&asPc z5ZJ#B!asYO?e#w>0UHI@@#nv-b6-e7&f`CF-{&i(QTxB5Fdnd?>h8a~3Vn*TQOm!} z?}`JCSo1%wnqwb_Gwr_wqAGs=mF>SpMa&>MZqYw<7gvsHwA(-9yq7M|k?_C8U!JgD zv*5p%ZP#}8V(~vKJaRQr;qt%GQlmh@+SNZk@h)VdkKn&Y)mxjpE$lyGMErTX>eJFahcRQKt#74^UV)r`C(Rw8>7EytpdP-ssslR#pb`?Pp=CDaosDQ3>-1msA=|%h^kg#!LixY9%9Z>E*zP}c%ldP?3irRyInHUS+u`gYt4DM z;PAhU=}pC`X7E3)+)w>m^w&S2k+@7%%;Y~HSpD-TwBf%e{6G7B)#pFP!Fs+X-|@d4 zeZH6FvLcwRcYywdOwxYtwrhP2Rt&D#D6kO7Xugsfn}a uT=T!+q7KgsQRYA2BK$){S_Ht++9nn^E%3hayYh diff --git a/fixture/13/5/26 b/fixture/13/5/26 deleted file mode 100644 index df7ce25f13f242f317a82b70788e9f3b88bb0d7d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{TQ(5i+1@{sP$WGxZRJ08?}orXAMC%Z5i$Q`Dzv}a zyl`Z2o`64-WmKJ*1hhYae4^1YV(7oYhJeX=HTpjm9H}%VvE@HZETtQswX#1(QD?wG zn&7|I$i=>wtm?lBHEa7EciBIxVY~iEQr^EmJe0K{9>G8FQac=!WB$MHD0?ds^x(fa zYJoN{?YqC@WkOI<`0hX4d_TRdgYCazqcRiu#rMC|S=wAwcHBQN-R7eJX0X3Eq9~bY zjr+eN#*9@e!TP@t%YGL%v*f=q>=pxfaJ9b|Ya|PUI`%(A1jNBuO!2=kWPXRKJ>fsh zaS#@zU;n?Mg8H%h8P`9tuPnCs8u&lK+1pWJBf>xR^)|JdVdcLm*Ui*-xbr{#isy9{ zRMtN?uwwH3sKY--@Y_|QNbtX7k1CS^NY_7#_&(Y(>E*wH7zN0x?$bXIcREBL+NeLr zY+IL#Sn)rvirV+1ZsNa^gs&;f9mBtciHBkRgo^2EPi_M`)Rc;!D#?^3SiU;RJ%$?*>)+xovV*4})CJ@mhK1BPkj8^XVD z8S+W!YNi{rn$5cX)zCh9-4GkIt^ z)3-mMn?3w9p5;IGC!riKLG!HnL z@o*;0N8!ITk@bm8v)eyumVi=VH{(Ccx8|Y^fdxRE znxOt{KK{SKbyXtrlJ!5fzbWp^gz3Ms+AqK4hWfulg;nYb2>ic0erK3o)cil~R~~q0 u73@ET0EK*Jap}KNjy`O`9_YXSVJvV87py-wBvgPPfa^aFT(#{5(bB)c`mQ?w diff --git a/fixture/13/5/27 b/fixture/13/5/27 deleted file mode 100644 index 01fab5eef385e463eaaadeac42afa9186898d40a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{T+VTr^fC+WXeu4kT58}W5a7N3kYsWu(=GLWCJJLT^FWWuFL;F7!8Ts%@ddELht6~tCVfMe?trdvs z5757IOzKNM$MnAqjk$o`?8d*d@}&XhiuOM_qLlDEWZgf_7dS_&5a+-C3{RnS^x{8k zxR7g?_07z*TSKU7$!cr3Yw&*{CfdwRdd;33tUnmvh z)zv>O6g%hkjrBhrc5@)PDdaz;7Zvz3|Ls4UyECb!pz%K*pxOaSE8jnMK)jNaGVwpf z)wAr2TIRn*qFAlAX#>C^K4}u%(&Rt5`cW5O{MSFWW6z;F0_?wpfF4Q#GxWbEGr5BF zxxhcQ_h?QA@#a7IeX(*Hqu@Wnv#6kVQ1HJfYgKv3bLqbr3p9;vY~?@ygay)$o3cMK z^N90f_V7RAs4~iYu++b!AwlXds^>qsfGuSEOr*ah16vO;A?Lq}Jn4#pv+X}{&D>k`?5dD5&QPvx5hu`o&2#q%f7#f&pjuhwC_LDKf;6EQsO@{I1=sn1l>QC z zAgR9vJ0ujPSmZxiG;^g!)9^oY3dYr^i|jw(fB`)|m*PL+9y2RQ0RTYyUfkLUD9$67xUuQXUnyR`ovy1NGfdj@3Uvwe~o7;1@ diff --git a/fixture/13/5/28 b/fixture/13/5/28 deleted file mode 100644 index a1d2fba0e8874e2deea5bfe6048ae1ac9e6702db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{S6VUbpSTmirbai#92lJ38>2&@z~&g8#*<$`J; zIQ74Ce*oYkeDJ?z&50H6Fv>q=p3X~h9OXYZx$D0#>LT{(P0c@oLx%mdA=^JWzJr&uvd%y5fyrYT4az^$XY3rNulc`l65Ul9 zvGhL$Zq#nf*5N;e(~T41Z0^5fAtB3zj z{!Tu?AOF7`RU|cmp7*~Lb0&E>WCFm!`qI$Ba+nCa_(qG3 zRn$L$Q8j4yOQgR6Rg*R@=k>pe@pr{AT+~0;7bNi#bnrj*l9=I+obkV=g+mRyQO&<5 zER!ElIom($#&mk7GWb7T4IR*biSoaXC{KCzW9dJQE4L5(<<`H@L8<3uj^)3+0)*aD zVC}!7JPubh`sBY!wP7V(@!vlW&o}d%C(XZ;H0M8%xcSEK_58mfy**QRGuc1w&J8m{xcfh^T(yt@ diff --git a/fixture/13/5/29 b/fixture/13/5/29 deleted file mode 100644 index 41c5928b5adf64c5d6292caa0a0c28d613df4a5d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{S}85BNe9=%OUi?2jNUD(g z4dy>LRoUSXXz)LZNu4E+9QnV)xj^XU>g~T-mLnE2;PyWfo+3UlyY4@a+k7Fx^&-*_(2`L&z7P`MB$i~q_VCFv`l_EtAiRC}e0Uns9 zPUXLBLU1jC*5bbrVi(jpv*&}%GiSNHU z|1=9^5b;0w2p^pC9MHe+-y=I?h0ebs*iSRg=;}Xwpk#9H-ul0oeIDHHY59xsR3sH_1Q3fWp~^*x$c5C&Hi5Mf5+iQWE{H?ZLmIqXSwJ zzwkeC|AT`!!}?mf1i!y!g_|S8C)vL%ul;1V$jracCUD9Oeeyr>JJkG_LbSh?0oJ$Q zK;yq*R-oL8$l}nBKpF z&MxZTBgQ`Eo5a8CHZ89_H~c@k;Q+L>lkC6jgz`qvJodjE z2o!TZ_qo4n=s?QtI`Tgvc7Tu6hTgxVSy`s5y5qmnx?9h6X7E4Jkd0hS0L0{{RZ0{{Rp0{{RU(O3pim-Rme534o@>*&8_smkzybNfHWLUop| z;eOZoN`H4aYx!(BZTB$+*9-hIyim67j#*1F)F1-M+uI z`~2D{l<>cxg8AH(?#I8m5Je|YCYUr*vh}D40_uc zT-v|5RpSal#_&IaqehYS7`MNyAp&s$zWF~abJBhII@G^kqs#Z-8PLBBxCbkBvh2TU z$pt@DiP*pNI>sWjy!*dU?ego{2?4-LIvV*>{o_B#>uD!b?fbu*=aRQp``JG@vpV<@ z1mM4z!y*u$5bi%S_ltuNll#A~Bl6?kB;!9-^u7b{IMqKAvGzcBGuJ=G^weN|`|&?i z#(GXw{;@wqop6mTl>@-`mU!GRzT7{qIUF8@b<{tf#<=a@PtU)Zcci?{@9Mu93NTzv z8P~t7A_aXmRQErR>v)smZ|%QZcB-?v*!#bB2Im6-e)qo?@&j~E5~9D_Z^y&UyZS%o z8GpuzqX9s(Lw!%O&g4JiP*UezX79i4x5{k50PMdsMRft(2JAl`g5L9BPW3CuOfs^32WwXR#M6v03J2#cGH^3=buw=|t& zAY#AA(H1p^x%$7ur|Wj?;@LlJ-G$>vd-*@8vJ1nX-sZo?llM4eg6h8_qd0hS0L0{{RZ0{{Rp0{{THZ0^`jKLWty^k#WN;*USXTkfcFssF!Pz2uV% zNb|qO!d0nO7~sDV@>{9gNc6vmFcBroz5zft$wL!ert3e$iW=X-KlMLY0aYy!>(jsSNy?3hdRPw(8X>kfH-}ApGQL{BA?%O}2T>6oc z5AZ*3PaFhc>+C-TaqeVrSoS~6o^BG1`1?PohSEJ&r|v(7`MATf7vR5MPx}#$vBf`k zKos6X4g$amL<@EfXXHQGtjF3R8^b@N6YbXxhWS4Yi@RKz$>+Z*eU+pdf8ali%|?N; z4e-C4kEw-I3R@}di-10N;Uhh8^N4;gl`^vvuCvp(c z9|J(YjLZww?X$nm{uL&R-n>6hUpup`=HNda(Fiaf$?89bMQ>vw_Om}VMM%d&G~ho0 zP1wsGX2(Ad&=w~q>gd0clC3{E#qhryU7n0QVCFwKR1ON1M)*HdScYX{82Y~}cR8;2 z^Y*{61gp@88~48pkJ0h_&FDX;Z(iuEdFwyM@W1pcKJULvv$arjr2#+=UDBpXqu9Uv z1Q?lS`1U_-LQme1-|0VFXPybesP4ZEHWf4s!P>u{2VY><>g&I~p{es!g7v?R4sHX- zG|<0Tm0DVNE%U#e+W*w7WX8XK-?jc@?fyTeguV7eOXff5?_>`G)ZxFePn+XwaOA)J z{xE^`tJ%M%4j=cekpMuB2whv|Y4N`RYU0$<`&_@&zKoKA$lX7VkU&kOa`QiYmi%>k uO4Yv~m)Jo)&hEccipmKthtfaiY#i3y2=zZkh(3qwcG15*)Q-J^-0Ht|-mjkk diff --git a/fixture/13/5/31 b/fixture/13/5/31 deleted file mode 100644 index 4ebb1dfe47032c805ada4e79409a6fde6c1d528a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{Rg(;A+HHoQMTC$Mui`uV@6LvU~d{mZ}qigI-^ z+44Ubpzp=L{_8(#mA3yRYQn!>e@ptDjQT%;@e%Xbj{ZNiSV_Y%K;=Iz?IF$md-uPV z3*So~6#l=Ij474IBKAKCEp?JUQ_nxYgRR>-EBZfPg({dnYxTe8{a?qop~SziXJn0# zckn+YV);pHTIavvEIUGvJn%moc}S*uZoj|phu_xtyPQ8g-Bt`K@bbUk802@g7SKOi zry^0+iUB~a9{Ffl)!08qviT&=Rq4N1OKNB%RQ5lTxlKUJUg*F2KGKY%iTgh*H3Ew} zX7s;DLaO!+bkRScE61kBYxqBeeH(cDyz9R;S%|LsNwq(A#eTU#gz`U;+FX=Q3j)Az zdU_lPsqH@^@bJ1>g0{cG9KM7;68AqsX(W>t-v z3*x^!j{xc+Zpyz^o&~@){oTKU1E3%+5CFhe6Oe@uPq@G6MP0jttIa==yGI!-yvV;_ ziT2uLThc$!RdGTnzqLPWK^aA*9QMEXD{bQ|LDRoi%e0D%D7`8KjS~8 zB}nqe?CU?vbRq@b4(C7AJU^~{HeJ8bc-YLGwBbJ`F`7!y_4vOWvcQLcg7H73fH;-9 z=-Izzg14RBg#SO*n5< z+SR{sCkJ+zUDZE9$TjACP~bnggR<{sSLZ*+GEBOcz4JdFskpF!8S}piR%qh)n3zAx zhXEc7HtWAh5n21(hS|TASf5u~Bd0hS0L0{{RZ0{{Rp0{{SHl})(wn&3YsYR|WF@8mzd{Ggth^x{8fMtohE zA?v@^jH$Sk?D9V#=zvvtwCO(vcg_5RY1+S(WQ7#3{@1@Eea@wn3gy3aM3~rPtMtFK zZ=72bP~|@jSlokDPU=7S+Qh{;qV2!BjMaLvwe!E}?D+m0`0c;A?1BOQIJ`gme!xzY z9QMBgO+b2r3-Uj`mJ!@q7V$qWFGap22-mq6|O($sn|jl#oa%J zu3hDe&Gf%`riEx{KlVRQ5#eFVFyFsee5|R9e89gdN)&|2dc@yQ}w@G1?MB>{qMhj5i$IKNdQ0~i6pszh~hsE+R}EeN0GlG zfi-OhB;mi4_->gdeAGY7O#I8ok=j4KxRz(A{>(p5tpI6u8u34bWtBBfz3aa@bbx~2 z0`R{a_a?7uu<$=-qZ6gb<^VtCIA&#;!}LGk+KA1nVu}v_)Hu1j(WGFDP8uUN#MCW54UE@Dt)FQ;E z+3>%v@gVPgOSC^_tJhZp69PcPfW)i}6Y4)ysa7?)1Gc{?JVV1f?$AFZJL({jfR(?T znl!9ksLemWDkle2iP^uHNhU$~tKUEVxE+g3_~1WPEA>}(Fnd43hN7Kz?ZH1YA+k9m z&(%LFWF^Hc-{8MPrteLw4&OgD+$vEG_vk+iP)BhWaQ8phzdYAAVAMaCWd`cR$Kk&y zC$*j!TDre5R{^b*N%Fs5Ffid0hS0L0{{RZ0{{Rp0{{R%y&N6KMd-gy=FG8>4)s6X2$&NSz1+V*o<1aK z`0PJdxpc~K!oR=PYrMkWGORx-AojFxB>BJQW1l%@TJpbmPQ(U>u=GE1_<1n5?!&(i zwhSQwKGQ$ZXvV|V=KMdbX|?+eR=~f5f6Pi>mB+ss$tbrVO!Geh%hgoNN{K&nRms5q zdelF|4t7TC z2;9G#dd%_cul&D=16zvgN;OT`BKPFy_B| zB1E@stm8jX-r=t1fBC=tp10%*E!w{jE^;%8c=*4MIdS-nndmk>%Mdd&11U3)ik>o$6A_rfkr)4)s5-I=OUlRNy~(CXAl9lhi->l*g5^=Iy^HGY1xqS=m4C z#L$S$sQ*6{q+|UrKK8$%ldyINUFSb@L1zF^X6rwad^0?{oBKbx))hcQIp;qdnbN|{ z-i1Hc<&tR;4BJ0yez{@OC(*yZ=t!y*g7ZH{ozUf;$oRi<#~%eqIp4phAKuH*jod$K zCx7W7yz)OMvaUUPI*h-)OqG}D2syx4n8eAmAYa@lX&iqyY#<|z^Q z+1tOnx}=fR(%Zj=rD%tIbo;+}Sm+73HPb(96f)Clm*qb+uc`fP`S(9?%2t+Vi0eOA zCI1&k=IK8j&)kXJ@zOu1tjOzWGyK1;#{~z~?a@Daz--*N#JoR=VaRt}dhkCWjM?+Z uDB?d+$!f8xpTNKK-|}sjRR2Go&XXsOChI@^mO+d0hS0L0{{RZ0{{Rp0{{S9GLSV!Q?Ebo$1*>Q=8!*=qC{Mx|NMK$K=0ANJPw}<-<) z3iUr-zn0)EGw{FouXF8j1N=V~c|E+hz4O02n7X2EjP5_%@rBiJbKbwruW`bWChflm zzVDat5%oVjRBU{|?X*8|N>DKCL#97Ucx{24yV1W`=X+jPo8rHgX2_x9qQ<`+LLb1e z&seb&DSk9pItTk1c*1Z@4%>XE-}xn&nyJJr8A5f@}UH;cb*Vc{o> zhV(!Fj&??3QqR9@z(c3cn6f{NPG^FJqt#^yhq|I1ag zed#}%M+VNB8}2{cC)XU|dj3CTIi<4`-QmC9q&s31Y;FjK>9ygqcqiHU*A7)Q^RgAX+l4LAKyRDck#dW+gN<% zxa&V9M!DY-2*kfx5X`yAtH#1#zFWbMxGqzM2HuFD_VbRTMx!6AzEwG1&!0$iM zWvas`vDiP$7NiARobA8t*K7({V%NV7OAknueDuHFUy!zWqQ^fwha24TX7|7J|Gg{T u&eA`mU0vtT^bEic5`3ii>ghkpcLp4xsPVs*_a7yDhVH+Xa#X7@^YuT0akuFJ diff --git a/fixture/13/5/35 b/fixture/13/5/35 deleted file mode 100644 index d78cdc2e1ad2b27c723658eda84e1282ecefc365..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{RpFCb!7cho=b*;kk?73)7T9^(z#74W~({$S|Y z$L>GN`?RfTn8Cm5pws-C9KJs+&9IF@*!Mq7OMf0XoASRd55zr}H|;-v|6OvAs@Olx z!m`iaG^l{_FY3P+qIG6k{qjG$_KZY%eYZc98IPI0 zspY@H?o)(o0QbN2?$yVPqTRn)aN#^S%eye5WuhUvdZYELjT zC-*-e(SC;tw%fn-Q)blPI`_Y`i+LRMugE{DM;%dqpzJ?uPqs8xk?y}B>`5B+htogtD2$rFkmx^%OjOLNM7ckx8!qx> z#Ph#a0*a`>67#I*=EI2U8sPSn2~6{O5-y4b%Kx5~zKHu}HK(Vngi zX8OND+?$D;ZtK5o!v^E;dfdO747*ip4Vu3+X&i2o73M!kwZIdsMa(}&XX+GNo#sCr zOc4}#rQ^T+5Bbh>AF{uoGN-j^P1nEdBTemmMD)K!UQLgUmH)r~7tao!Y308mC8jY6 z*#p4u%A9^fAo9O{N7p$Aa^XK#cfFsD563@BiT*3`>Xkp=h)#)b9xQ(&ayyl*L=$)9JtWK=V$^!O=fYpmL#lr^7#qUZUgxkjg)th<|6qzu3R8BV`c9 zZ`41EXNbnUaM?d{_vPdBh>jChI?yw!=SA#`C|eCy!}D6YRg?8MuGg_|8As zt^=fAYNd0hS0L0{{RZ0{{Rp0{{Rs=gTNQNcg`?V-30h$n?M5%30AU75u;RY-(g3 z6w<%lx98*BN6kO~0pm6eu=Kx_Qx~GOjLg4KCM*{6FU&t7iVukbUgkf+Akv#c`@=sD zK9#ddCDlI$ncEC~C!@a-Z*jpVI`F?U9bXC8bm_kz^#zScW zg7?41z|zHlckVwF%kfbcTKB&@f&_rvj_5xO3c+-&8}C0!cC2cMZt_32eGa(TsqnuN z93Lna2gN_x5+z`GIOD&v1hs`AU+lldJjCQIbf!ORE)(%+3+g|X2t15;3**20(b`ob z5!FBHu+pDN4DG*?zCr*XvGBhh-1&9{!_>b#9#(TB?a;p_J#=q(UgbYn)ECk|2K2vf zAN*nBL;*lM6**KNt^B_fk?)L4G0i_Ia4J-;KGMHJjL`x%zvRD0BzvW|YR11gk>-|V zg5^Jg+$R_Rdg?#i5u$n^*~`B+Xj$F^Jk>vA*+-+`NcF$%EHPjtmdC%i?iS2zJ=s6v z@-Ok4Lju6TFJm{{wzNOYxg$TohsD3od|PTLQS?6#A0?y{WW_%Ypw|w9wcbB1eEV>4 zV9~$Y7FW8xIpjaXA}Lfw{^36~sdCZX=k&kRn(9*t=hHvCJSXOK>heE`)fU${d+$GM z?=bU+2b@1|&iw+9mefBp%A=9(M(sZghKBj`UX zx%N=O%Je^#%~;o3+o(TEd{Cv|mHNLQG&6GTfkDT@<{`9{KzN3Gc36{UMwR;YLhv~oSnvrCgiS<8i zv@$Qxpy9s@2zrVEbnw4OWf}F>zxqE40}^a+6W%|)23p@yyahmXQhkNY!T!I0S-*o4 un94utERo4Z6yHCT2xvc=eVji)PXs7@QFR diff --git a/fixture/13/5/37 b/fixture/13/5/37 deleted file mode 100644 index 7dc0ded890dc23588f7d8a048d70c41d421b3ec2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{T3Xmouc2m8N=kzHNg(d)mmkjtPRPVn#n{TD+TLvi(IGV{Nz^+EomkNm$wZ4Ugw{WH*k5KE z$oIdRx}A~i&dxugq2)LB{_($9TL_)Jt@ytgUUl$AXn(&$la)=t%)~$5B*$1xKe@lz z+#&jH3fwV|XA||SEzv({CG<2$ z^zc8%X}eYWw9-G8j@tj>_v1fqSOe@A@q|CD6yFsWHQzr@{)6mpits-s+tyGg5&l2Z z9atw;U-Lf&RV}9xG_1c|WaV{@Q1UNiGoKBcTZsWgHfVd`%0^&boPXFQ~ z5Awf&#|O2+4&py1GFf;N?G?lb^7k8QniN%~BBWJLW%1c{d0hS0L0{{RZ0{{Rp0{{RLVsI5zJq5sXCNQuu=FUIqvVA7nLhe5`G=lt7 z`o6#BBHKSkb=beLQA>%p-qJtDB$H=Vm-RoCd;lKi-P}K@tAV^Ms^!0kOQR^lc=JC= zj{T$>mi9j!vDA2^)#<-A{9cB3V+KICu=kB&W<|eXVSNCBS?)h<@6yX&N9R8UmqU>U zR_4EDG5%o9<=nr&$+L}2Rk1%80>&@J>;1p+HCeoJMe08=ns?~=RQ|t#>;-V`364Jm z6_TT^e&xSz*`%(Th2lS?d+4Vnq5;5U_LzTOnEJm)NO0z)7~{VyG8mFxn&3ZEyI|;2 zUCcj_iPL@b&G0{Kkxq+HpSwR{iwa!oDw4m3v5ECBCGo$#b=Wgzm(0JHwFmgx`;)&6 zYHB(oZ~(w?$qh!WeC)p!&N7vtr`<)S~$)VD~dSMWcm7?Uuz)AGMN zTF4I{AJRV>QH>j#gyz4R*tb)`z~{e6E=iNE)8{{cNcusY)xN)-=P&3bEB3$c)}nfM z7`ZgK;Z%hCk;8~49&QyX9B+3&wa1lvvhT>wA_a&N z7j-{h9LT?Z**||tkiEY^hf5p52GPG?gjh6{KIOk|<=nXW=jT69u)i9RD(*j6>4^2T z$nZbg=H*y@Ak{y5`JLMwdgDK+d{{RU4A#FNOk zH{RY}tjWJORUb>ohxET1OpBrO6Y{^3lM4KmH`u=$al1Lwrt3e(iNjQ)Wz|3RrvPaI zqU*ooxTWxwzo$RWi?zcaLiN9la}_K3q3gd_G4or?gZjU?Bo;6ha@#))l&++M8PdN6 z&$YupFvP#8vF>W6v;HkO-`zhyjWC2f!v8-S06-LaZSTL;k%1c^ u0{Oo{0Xl0PAoRcBf(F%;-q=6)gfSP;@cTc)Zj%@XN&7z;Y&#`*3F|*|b*SY4 diff --git a/fixture/13/5/39 b/fixture/13/5/39 deleted file mode 100644 index ac94b9998f7ad5cfa821d5848f1fd39697542df0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{SC@~C$*BkVtkEy3PQyWKx6N26E-H0MA4c2X|I zaMM4WovZMwm+8N1he`Q8YWqJAaUZGeUG+a_$nD4(-RQq2e~GtOz~n#K=a`2-VfR0h zLp{Bx%i2FDM@K*qnDZJp6 zM!3HmmcGc+8vj3HeljlF=RQH%6r{F(9GOf*<3dBE68eo`?73@EBq1#%pAnHFSU{8vbN$Nis z*mUleq|v|4o-0>~7Sg}m^8XY*ipsx*6M4_Sp5MP_Q5dm*4ZA<5Hmw7-2Y0{5q73mU zd-^{jz;lgqC*Z#_SR4NvlkdMlkMmXBspLPox*f?xPqIIXg&s?Y_T|4F6CzF*2kAfL ztmW4L+tI&XEUSfhWB5Of8m&r}XYRjQ@Lnzzmg2wbIYk9ESHM4_r(sKcx%EG02dJz8 z)#ksvnWIRn7}!7IQl!J2pYp$WRP|xcRqa2*?bB4f+vLB-i3NT9%;P^IH&QnCbLBs4 zq-wPe4*kEChN|W`%gR4T!^>=$F5tg=$}ilR=ifh*KUA&sz5GA8Aad0hS0L0{{RZ0{{Rp0{{TVcLZv#xb;8(`4F{7UhF^4S@*dhXYs!XphnHk z9?(C5_$?vSH2}bQjRR))ZRo$8W>g)$hw(o^f^!7eNs7NUhTJ7QCk9mHs~|5i4(-P~|^F2rBht)}X)S`gJ8zFx|g^s*Jb}Jn=sd zJU1d+$i_d*RYgZgwfjGGv{um80Q-AERa8bk@QsVTH(L) zXLCvSob$h$tyP3A)!RRY^TCy*x9UGSL1*$kll8w=m;*bEQ1U-EwTTQE^0~h}Yav(D z7xcdx3%@TbCF{S3%r|liFYdpU=MPgB8`S0kf zvgtn%VNlCPy!XF!%vb)gtm8kW5;;}_qvgNBDS18r&i=nr*(TPa8da%FCf{!~Pr`A77pDb740`1REV{p*^A-}P4e!5f4KRB(HS<4vYpZPl diff --git a/fixture/13/5/40 b/fixture/13/5/40 deleted file mode 100644 index 934923d0fb03129d088ba6f1f2d6f28de47e52b5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{SOD(zStjr70B(W}+0#MeJ%ytyyWGF`IAyX259z{4(l7XJF z(aS%~)?a0CsrkQ_DYK>$!{EQr$wHsFG5$Yo5~TZ{p4~s@f?kKYUY)=0p%JmDO!hzS zP?UdBQu@E*Vbixji`u^>SQj8IGXg-mEVN{Tm8rjo=E83-Ag#YAHV5X%-SR&Ov95G} z`tv_rA>d3R)AYZL19nu-W7|K9nlaP27yLic`K~M_l;pqLNpv^p)z`l_=yxEef}KB3 z@w3PhIOabWp&3NC*Y>~L%UH>D<_N%zO29fwmCHW^b@hF3r}96o5aBT-_tU?XXPKjJ@usiNElb@IOn_kKC}u<^e~ z>^Z`h3hO`1p4n<(Ci*|2p_P!da_m1egeW0CZUDe@)y2(S;M%`O?Njm7l+Hhm{jzj_ zfUZB6Do&{Vw*J462hb%@!SuhbMvI6AsOi5(UMc^8PO-la@D2*t@&3O#E|Db*HRC@k zdZ+HO&9}e#?XnE^u<*aCd0uLD$H+e}fcF`5nps zdHTPXn7yAE58c1R(o?3W>*l|XvFnU4nAkrU6Mj6z8k4_!yUjM7%>Y12LHQJE_|U&P z>b*@dWeUKd`&^4tMgYK$d=9l$BlEu_PZ}JF6AM6k=|Toz&7!~XC~ljJyzxH@zS&BL z9q>O1lS#rrMfAV!>Ls0{u>8L+WW&JJjpRQJ>!(3|!vVmn(tObSNbf%oQ+aS!_r1Sf zHb!5!w&OpXUQP>{Ytujc5dkwt*497gjS)wozq&tO@bFy21=2t1==*bYn5RF3&C~-4 u2KK+aZ%>f=G}*tvoGrw#TLC~&%MM+68q`0)z-BRdNAf>&PScX;b??8d46_6P diff --git a/fixture/13/5/41 b/fixture/13/5/41 deleted file mode 100644 index 8b8131a054c2eca72f3a02950a66b86412c9256d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{T?%fWg>0`R}n++b8!LG!=guQ&khPW8X8R!SgP z_4&WH+?$5^+08%t1VvrE#n!*?lx^4z1nfT}IqNrx-P6BSua(HvIRn6>f33JTWZXZH z;0t+4J^;WA&N(ASamzmi%Af!BuIRsmdq#)j!p%Q9#;={jnzT0$l=9#~ipC(lpBI&;~9Nq^!YWKf+cmtCj2I4<4X)~G*udBTAyoy)%ft!Md{j`qLbPGL28Jfy#GXXtx; z4c9+}3!6ix9sR#4zN{U$8u34GIhoc1$>zVH_%>zG8Ogt^g*NK%Amcyi;!ELd^nJfo zPBzpkboM`5o1_yiALKu2R>9Et-A#IQhT)RZU7z5d6Pt$oZ+flg&RF4kUN^lJ7rC zD8&XpHSIs=2tBCrm+HUoCwUbdu=PK2X&{IuSO7qiD9tFiapOOdPV%_r%lALh*R=)Y z@asPsFP#ulXU)Ge0iG2>MCHHi-@XYe&-uR*2vm9fcHF<9=5d0hS0L0{{RZ0{{Rp0{{Se;Tm+=sO!J(>oithe(gVAWyqb;eC9vg3do5; zxXV8t%*Vu^+3>$&wN&w*-Q~a5vwqvvO#QzvW*_iJKIXq1v=Trgv*thE){;>vRM|g% zBYU-{aKt~BpigJ8zxO|!3xZf`ebYZ(1p#agc;dgm$|Hu}W#+#SYH$YDsLj98<^Qw( z&)mO29a;?2e(pc{QM8)<_}V`lzI-u;b=1GwTZ{F}1=7DgF;C3&faX6Y%EO`2Ebu>Y zuKv1`e*M3Hi8Pc->czkMw?ShV8Q`yB&@%+^2uzC&a5!K^<{FT2G8AJD%ZSw25dN%TLu_VItR zQ~bXIetc&a($YV~`js1T4Y9xC;4^DZ3iQ9N{1GI>-T6NEJ&J{-0nZUU=q-ZRl>jL0ngmR@A5w~Gq^QgAH2U+QF)oo-}1i+ z9dWn>RrJ3b{NE08`^CS;rrnplP3=Fr1H0&MNaerJvQ~b}YUe+Y{e1k$$+Ob{6RN+4lpO5)uvFN{Gm^PnQsNlbHJaIxhBJsba;5OfCbIre3Xs8;+#{fW+QW*@- uht@yn#6qCQoaDdK&-5e{G~K_Z)O>si1^2(QU0sL#7V^J}u8>IFq4dAb39^*{ diff --git a/fixture/13/5/43 b/fixture/13/5/43 deleted file mode 100644 index 62b8eeea4e25f35a7804c282e99345212a5dad84..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{TmG7s<`P0>Gkk%ocomfSxA)=Uv ztJgnk6y#UDgy_F^s;7rnU-G}mHo$#PdGfy|BD5OKF7v;>7a75Z^wvK~7h2&B8~{MS zK!R+d!SKHYakQ^+;?=)-??MJ?_{l%Xw(NC8+tI&XPm@~?Pt`wUZY3g`Y2v@(ReRw~ z#pS=Yo`uxyR>!~K``hVZ=F~svnActwh4eqNU(G|y8rVNKM)Q1DBhbH5H!dUHmFhqA zD`U)Q0PsKX9P2R&YSX{u{<1%7^e}4y zefGcb92uK|{@Fhr?vc)m)%QQhX1Rs_82LZ^pIpzdsouY**L7r|O6I=+;9BacUH(6Y z`r+$05&1veHrbX*=-of#{PNHvW&6LOn~!b!X5+tNT5@DJfAPNubI?y||NK7}mEnHI zX!yVEe`uk;+x$Na7f8~q1G~R+FlWogDf7QcOvg{!lkY#@Qlah8B;-GSkxa63LiE3j zhot>b*_pq5saXJ20`@<(xjOvYJKDd5*@E{f;QBwK{`=QL)#tyL=zs3f^ZCCe;$^gh zCZ|6;9wgz)UiZHbn9m8F_`g3fmFI@}5OqH?QXEhqN$9^cl_y*k5B@*t0evFNNN~Tc zKhcBXsOvwlI+S$|xa~jt#iASXuMzX(ZEJ#WN&t<=)wOkuBlJq|;ORQ&7@7qu<<`y*rk-|h~hscrVIK$)9t@i^L+(I0mr{RsNk@w@7BMKf7@1# uxAVUh8B=Hwfbl<fW*vDTVYRkX3;kIc2 diff --git a/fixture/13/5/44 b/fixture/13/5/44 deleted file mode 100644 index acc0dad121494d39e5a68b73054913f209837fb6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 396 zcmV;70dxKW0h9V8jd)Sxq)$>0&JxauC5Z1ri;s}^Oc>BMXeyJM*>d3#< z2Rh<2ocBM0JahFGd&55t<^b{*DE2=90RUmVDwqO#&%M3? diff --git a/fixture/13/5/5 b/fixture/13/5/5 deleted file mode 100644 index 0d42fdf857beb0cd67f80bccb88e13475ad3328b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{R7nRfcYjLJV!LTYi39AT?vEV&28ekD@;AlSdMD*$e$v+2KQtLCfgMC!kq3ZvU( ziSj>NG@=ZYMcluDci&sL4go+kga(K8XXQUjlyE;zR|UXe29s&1L+n4Hl@u1kH>?C z-}|iKZSX%Ny2R+{$|n&7`G=bo~&&FMd?mJ(InpX9&nY*7BS^Y_~+!uh|p|HP1gJ@h~3#|jXGg5|$tv5jLyz4bpGKhJ*9bOgX8 z^z#Agrr5u-_mHqFu=Ky+S;l==r}aN?^J^PP-TFV^-`a1izyiSG2YpNu!G&P=BfRrJ4{n>#V)+W5a`;@Z0Q`@X*sBIe*# uviLvNJ$S}b_wv8pu{7~@YsNn;^+XpF@8-X{#b+HiJL|vHtL$$f2H-z=D5&lL diff --git a/fixture/13/5/6 b/fixture/13/5/6 deleted file mode 100644 index 06b86d632d9d3a8ff91b3e415f4009dfc8fd0bb2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{RSUFg`&Oz1y0q+=tWGwHuv>kCsYGWI{MgpQ_6 z3fn)=$=|e1@6Nv)?!HO9J=Q-b>eY~NN&rB3Kahp&9OyrJ5ZJ7_Tjsy$nV!K8D3ia{ z3{@Iy&A>klYGYBFO65P)dVB4Esm?!IY{V%)Q~p1|`P-Hc1kJx%%ek1bt^2>1Fa{H> zb<02ZNB=KnJKVpo!L7`K5ct1Z$fmhOgXX`aex?@G@7BLhIEq-KJn}z)ev_7ByZ684 z@HW5UV%9&Nt`=B}!vR2(I7heLVaC6k!q8`pB+9?YyK=;62{n2V$8p2^vR{ZUHZR#kW<+ScHzI#Ufl-uMe9Gw!F2nZsQ$m0BeSyZ zc<{fydF@!1jlnJPaDw9|L;F3>X-$x2KYaGqJ4Upn94tp-hZlW#>&5& z1ZGEG{OUip%D>~C$LzmwPwhOpM(@9Hri5xgd-A`(n8HUAZ1F#;gv3+gwCcb5em%H& z0`NameXcxf0K-2o0yx3Bpz%MBwCFjdukpX+xe{qP1mwRs9S|lc4&6Ttg-YDml-s|B zw4H)s@&mwyq1#i&^!2~b_)S3O&+)%@n_96~pzyzFDW3K!(d9pHo)xS#lGQ(3V2H5B z!^A%T0+$ZK5dc8b_@OAc&GJ9}@e6Bx82L_wi9n(Mb zUWGapliI)SL6G89P5VDuTCXBzH>yA4b?gXHr}MwcXr%Ik>hHgmtS7TPLIOZ+l{Ht8mIsLykWcDnaTJ1lGEJpbfNB_U>vb&Q2 diff --git a/fixture/13/5/7 b/fixture/13/5/7 deleted file mode 100644 index e2081d9bd8902e126b738df56ce2d755700cdcc6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{S6fYh39Z}z{G6hRM9R&YOussd{T!0|tG(O6w> z#<9QH_mt95AN0Sdi$7gCUdKO>css^q@ZG;ABw zjE_I>{n#o!z39KV)V9_gwaGs?&@iJLzt+D~hh6}bRpUR%!qey=x3523gju||$=N?B zux8H^@ZLXWgL13DFWo;eM~CRT=HfpEhj8i7Q`*1boR<{htOCGclsg;J)8fDP zsTG`oY3#p1_P2LI;L|@T(@<6g5X?W#+)weUZqdJ~WQ$y(7W_Z>_?k-DfaJd0qnnnFYHk`ou5Brq&d|qaOFQX4Kfd~KhQtsXPcNCy3fC$oO4O+NZY?|MRQZ2 zK+?aZUBjnn2H`*Ptou(%Qr zCh>&R*W^Drk}<$EpX0x_Q=+J}LFT{u(ygfgU+cdu*a4LfyS6_T*kghsOZmUL`&($t uf9$`>yLJy#UERM4tWv1V;_yEPR2P`^>cGFXvMQJV7w|vBlP76&0?0p51FC@l diff --git a/fixture/13/5/8 b/fixture/13/5/8 deleted file mode 100644 index 47229ff1ec4774e01ff75d15740317005cb29b9a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{RD#ckv2THe1Db*3>^G9^TZ@Yv#XXNp;_~N!h==?-{=;tKL7; ze}pvwP31oi)IT{s%;LYZYgxOCt?|Dxhj@m(bGAS7_Pv4H(Az&!)SR;1Y~;VT42$qK zA?iOT#u&(#dge5_AeCG0=Mfi8NCP1e8u+e@2#9o;|SguWQR3jV+6;GM2@ zeEz=@geqe!Z~MP)KGEEj>DWI4A%OqG@7q5jnLayFhta=Ye(s~_iTghc&c!DmmhV5o z0hidYlKH=SNCY!L1IE9)_OS5g5$?aUp*4r-e%e2mr$OfwP@lga9N^qrLF~WKXK6Z* z9Q!{*4ko1_W34}qhWAZmpxVC=j}PZ@Gtj?;y%={8=J`L!UHEP(DeXVn?gY;3$Hc#A zLs#1}zwf{A25(Lr!s@@Ke37nf+upwjYp<`fI^aLo%cEn0HRwN3WVRQgH|M`gpzzVt zpXomaUT&aa6Yak>!-`j?ljFZ#XG4=eG55deJ879dtM|VSX0`3|Ywf=-zYXYYS(?B7 z%ux!m&hNkX%)(A5`1e1M*nH~)*X_Rv@Ondzi^o5{{P7mIAN4;j)zb0juD3s-Vbhpp zO4q+XM!jb((ZWB$aAogYcsmZw{N uO4Gkb)u_OQfZM;x;qkuB4%I&%K-cB%eg8kkj;%lBxy`@b4^Lin+1$T7!Mhv) diff --git a/fixture/13/5/9 b/fixture/13/5/9 deleted file mode 100644 index 2e4aa0724ad19c1f3da5f613e8489de5120bbecf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0hS0L0{{RZ0{{Rp0{{S5$LN8y+VVfehlQFVaj`$pF!;`Kw(CFjbS2Lg znD;-7#dUfsgZ{t0Q;OK3J)^(z)*df4YR$hGkHh3?%kIAn%@e7)vi`rL5wpa`cKN>n zKH8Y+x4J(o>>0Y!7U91U^c3lysOi5y%(#nWEVw^NYb=rxCg#7X$QN{!r1U=yD7~LZ z-s8V)^rHmK75G0lf(x#3yq>@O0e^{H+~mJtCogWIFXz8=kt$PwIQTy^{Yza$MCLzU zI&)knP3XU;o<2^^d%iygXUP7N-U7e{YI;;aPW8X@Efr#20_eXbOYxFT%jmzU_JkjT zX!Aee#a%Rlp#Hxp?6#;&*!I7L-lzQs^xQwjX-zATdCfmdsQNagV%EQtt;A4&YXCqx zhkS(0!Q($?ig=^2kn=y}Hk%%K0;<2+BPU@7W#PXYK_xIW2jRc5y#G>eukAnMT+g^# zy68XrSJdrU`^&!!m;B;xF5W*7VUb|O&F(*cNgdX;3*x_AL1F_j`qjT&bL$GimE1oM z|Kc}Vp8UVeBBmch%-uhhYf^DD0P?>cImk)|BJMwZzP`_?E&)JeF8=H@Pv*a(#b*a3 znDxJ`#K-zR2gkq5VO%Q&`p~~5sW(cdB;`LmWyY1b_02ylm@-Ne_tL-O4g2?VGPghK zOF$P^^zpyGg_iv7i0VI73Ntw?1c$pGoTRNB9Lp^5@=;ck{nBx?q~cNs+&Aq3w0p@9@7BYU!`EW8gpCI9EN{sQSNk(59xZQ31df^43K7VE!&e(+GS?)E>; zL|8Jxv+_TRCV6J~j_*I!pIr6y#M3_>{m`IE-Pk{igFjm?#Oc5F*=iS6dcr^LW0GrR u6Yjqpp1vA2+VH<1Pj~h_;rYKKo))PS{nd0Wt_60{{RZ0{{Rp0{{RqDl`uXoAbYU-6eI{RoFlMyGqSrq~tSJP*VFAEB+G!f$L-jwVTG(_}$p1g2BF^`(|Mb6L{Wf{l*6=@e zj@u=lSJprKbWj{o0N}q=-Qd9u1l_+re7t~s@#H@~+fPz%bfmx6?hL!|zy!egOGuN5 zl>k7AtdTe%x!XUs8B8@Op{l>FKjgbo{pP{PVv$bcL!{VAj8pm%mSJBdR}3pA>*aO6EV^)TEcYoZ7z~MOM}s zvF$%xsZLKtPx8Ms=Zfhe&9Oh$%XsKX{kXro`n{}}Kls0w&{-KYZK%JGs!BYvq~gCD z<)=T=p7uY1AyJ5;LE69aD$TE{bk;vU9s?9vvh%-a_&T=ZN6tShZF6KEklMe3d+EnP z*XX}wdc?5Ls|G+^C(^9wp4PuQEbn_HA zRIB630q{RozwIdK6_dZEMVwU?!t=idVR8PoIrqOq>MZv6KexZ>M(nm!weLTaQJ2fQ zp#Z=VDMig#sL4NV>_C26j_g018a9-Ba`eAN!+KzJN#wuHX6$*?P~ty3_NNUwC+k0B zs=no7s`)=ToxI3R!{WaX_@$vV8S1~_B34jlar!@=@iW+aDCoc8BplJMJK#SzP5$cy zcL6}Ag3gLC7U(|>lC`sp5a~a9f1z>pa`Zp3@HZE?%iurKkvePNH|akT&?^1>?A5=~ zy}HdbkMchV%xsu@cG^F_ELFxUruVd0Wt_60{{RZ0{{Rp0{{T__TNB7faJgTtV^lFa`8VuOTkUW~Z)w4lHF$gR$MGw%|Xq zgQX-@%``t!eem7CL4 zkKey9ES0T{ub4mZ=wXpSX41bQCC!!TF6uu;ku7*^pu#__yIGadvIsyVE!OYrCfh$N zlH!w&#QVQ>W*sBDKg+*b?cq4)D8Rq5wv_taqT{~@Jz=Jwma;!-$B%EdmgK)i2m5L3 zOw~Uv3yL88iT=M3nj9T@PVzrhh05F##w8mV-q62OSQmm-a_7G?tQE&Gl+!=8 z3i8--bjQDE6~|-?8_+-ICake>$MHYbUjJr)nCL%$_QX2imZCqkC4NJbXx%>n4wO1tt=yy-tCiTPAh$Z~j zi}62aTVEt8CiFiE`Q_eNN2@<*A8)WwaO}TB$O9EegBJpyxmPag)Gz6ze~^K0GWUoa?`f-@0XaYP3IU;T|!_ u+_b-vo)$;z2I)VAjm{%z!Sz20```_X^7g;#Oc5jDG4a2>GNo+dspLPvY_TQ) diff --git a/fixture/13/6/10 b/fixture/13/6/10 deleted file mode 100644 index 6757f0ff3e78ac3ef4c056022034c9e3021e2fa2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{RvI2E*f!`r{b)YH8kNd3QiPinH+XwW~}?XB|8 zG1R}$6oFR)tn5EMCGo!vx57Ul*dG?imF7Pu^}n(ksC~c1R;Y70LB~G~vg1|B%k4iZ ztv4Swpvb>uA5z%fGTuLdb$>E$|q^!hvz@ZdN5Fn`qaP1t1^kT8nVBqx`afx?AE_E&Z17I-nPGQGsQcx z!PLJnp?(3qCE>rR2!`j3r{=%rcZ%2D62`xcOoJx0O7K4`xp<9J_2WN8j#&XzMESo= zCmC=rciX?q>YxEdZREe=uT7Fh|M8Q(sCP@9V$b2e^CO=G?zF>^-UUd0Wt_60{{RZ0{{Rp0{{Sh7r!mVXa_*j8Qg9tPV>L56fGfr6ShCXkA`HT z<>kM9UPhRfZq`4!e`;xIA@)B3vrd|7mg_%_0H_*SQuDv|D{}6(=;c4*c<*&2=m5Yq zdYvf~3Io8?n6roD;LpFH&gnq0isL`m5fXIUNY+0l2fNK1e*Qnm;bdsq{JTFgsV;#R zTf4s?P1$^_d-6Xon%6VR@z=i+`;w4A;n+XcYCdoVyxc$jNoiBL81KI&9nFM6%-BEJ zLHlLzdI3OdK|Y$dFY-T(HUYbJ+v>k2h|`~rwE{rvlHfys4(31Cve%gBCfh%)2R|2s z;UDepg~kH)<-o$EiJ zDA9gdAH%<$`d7!oP58ej->fS@ccMQ%*xuLI_~yTBiL0U?Pu{<=6Zrj8_P@V6ze(l| z_wc_#tgHXW!}-608D?CtuhKtvaE@0qd7D2?zI&vkU5CG)&pa$MSl~a%{rW2J8s)zb zS5l-tN7X+A5}2nHX7xWw2#F;cP0+utlKwg1WBxz-jqRG=n5jQQRPxlYM(e+X2w;tz zFzY|M*MLvN(7eAA1p6;P*YH35uJ!#xblN}4szF;O`_#YluUx|uR`@@VTF0ZZ_20k3 zVG#{5QvtxU@cL@CE$lx}7f>omW!67p{fHqQiu6B!UThewHtfF@Fy3COqR+nvc=M+0 z3fI3FG423i;q5;U^S8mn)8s$k(r+e|e#<`^!37wRNhAmndmKbhd!qjtJ>);e=k-wpk={RNf6&WkOXk1T&xu-$ z{qsLIh1OTYimbo3v!5*BvdF(7JoV9$LBYR^mn$i*;jq7RRuh&}A>Y3XAw62~jLSc` z^Z9C3ySTsL$q?K?LdU-jC3VC~S>HdNHoY(7X0t#3T+mK?yWziZ8da<8Zs@=0eAfip uFu6Yk99nc}d0Wt_60{{RZ0{{Rp0{{TE7{1`OLDoMuM2PLzVC+9ISrjMy*Rj82In;D- zG{QeoCnA=sa{52(yLnQ;gz7)i9^88${Ovyz<*tdk=+M6&@*W4OlXbrmt~3s4_SrwN z%Ob0!XXC%q9BypN=K{cW!$kqNU)euVqBN(2!|p%a+;)CvL##jDg*SVw5#zrf!euHT zD%HQWCq*~sIRL<8hItp0Q`Ns^da5d8*YUsgXSUOJW9GjK;%Cx0Jn@XP~g9^xlYXwy79l)o+}>WAm~5$`jT`gg1|q{|A%_X z=mNmG;i8l$-0weLN+aH8bFx2C4dOZHrszN0V9V$g)Xcv*miQFVtoc8x$d?e^aM-^I zr`)|V76rh<4)3A3b?m<`d`uLE5#T?!=-q(wT*N$@Et*asSaE-q<+|&x2J(<7L)wcGRG~qwcK#c-4M8`jue>B6KCD6YHy{&M83h=*i ze4J*A!u3Bwh9Rt@wEsU{Q5KI#==(p#Wjf*BUgkfdK!S!L&HBG);$l+b!Ti7Y+NCJ2 zjN!j0ded8F2&}&?mhGV*h`7IU1aU0f%*a3MC`2`{_U1#(JRB-2Xq5 z&K=1$68%3(cUs1_tkgef^6|e%;}hE%Psl$G@no``)3Cp!G_4^3 diff --git a/fixture/13/6/13 b/fixture/13/6/13 deleted file mode 100644 index 12cc81de7c785ee55288fe84a2e62650220c7bd6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{RAgY;V|5O%+9ViuqxeD1&Yd>n|9IsLz}>kZCc zEa<;^Ws5PGr|iFCp403!o7O+sX@OT7rq@49k6+h<0n|UATP;HZS@yq(7sU#;>G8jU z>(^SyyW_u;6=+D!vX?*Y==ADla@D_uR$tMA;nP1>XOo5vH}5};HroL=`tQG!zm8eD z!tTGn{<++FQ^7x!$Tzh0|FplLx;9~=$>%@z=vp7N2!B5)Qv%8?+510vbEaSKXZgQN z{^EBI9oawO*U8rQ9Q;3J2*BoH$>=}Ar<9Qi{qeuRc;szjt@ytZv}+Q| zkI_E_bDAnxM8iK~djFa7xAQ+pD7Ap%jNw1tY=#J)2j9Q3Zfwlfui?K}obR0l=JdY^ z5f`}vkH9}>4WzoBc=o?d1Je_v9_>HxevSuR+TuU4reF81*5N-n#C|U{CiTBDx2`V$ zGQ+>FMp7SQ>e#=mzY^<&QMx|}aZ=cm>EJ)KBiSoCo@8uP#Hv)H>7L+8J_N;bX!cjLcsQblb=Wd^{gnMI-C zW4AxrL-N>sEA79)buDk{@995l%YgvX9_~NAvD=~usm(tt`r+m#9M3-}rWf>l@Welt zGh_+2Y39HD3wx@otmD7_5FQ2aEA>BBXuJ>^hvz>XMM~`sVah+^vjj7|Gvhx^+}#{k uMz6n%(BSe_tj9kMpiERq2ID{dNbrgvm;k`Fx>^et|Li{pZgWHGvg$ujXsgly diff --git a/fixture/13/6/14 b/fixture/13/6/14 deleted file mode 100644 index 09cbd44f31fe9b8fc30bbdf2984c1818578df594..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{T^fl>Qq`{=(SJ3I>5=kq^>aHxn6y#c^V2os6O zmexPtJ^Ha4a-=_?7dgaV&eOlK{H$09)ssKZu}qG-rQ$yg|CouJ^pMv1Mojq z&4tzLLE66(rC5ARKkz@I@O)yjm*hWINN6{wmfOFO<0iYfPPD)6eogts5|zIkPQevE zv-v+JozN6SKk~mo21&eP3EaPI7wT%HQ2D>3;g;-l_JRoTA`M&SpbECxU)%v<1f z!u!8;b`hdAoa8@`yQsWEYXd;oJs5HCL-9Ws{n4_`2&g~Zjmx&E!}7nQ238GQ;PgL8 zsUH5k{_Q_C#ofl9dHBD$A`RGlf7HK@uyQIXO!L1s=)ww2;0C}Ol;yv2tMk9_9xAL5 z{<=TlNb-gGk>Wp9I0Wc0v(&#TW>)BG{PjQk>Sp|vcPB3H3i)kMg3a zx$Zy9_E01@Wz|2%3~23SQTIRHp;|2lBHKSyv~O28tG2)Px(;^DsP{j19}DAK4Cg-+ zz%McZ)$~6Xa|jP`0Q0|e)?gD-hwHyi>BO%tDC)o8+Ev+fM8`jvde$O|F#SKg-t}4_ zIr6_BzFP>u`0PJS=!=86J@db7q!DHr)_>d*#1(M}FOLvfjUaS-2-F!s5Rv(-Yf zu3(~&}svEzgdAmO-K$YY@ uN7cVXWc>*f!qq>dzDJ7lIPkyRRYX9aJ?B57MfIH<$e_O@EtF43b?Cp2)~&h# diff --git a/fixture/13/6/15 b/fixture/13/6/15 deleted file mode 100644 index 2a28698d6dda71c20c8be4e43444e44d51b0448b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{R#pO-9W{op@D%>LjRedxat-c8pA6X(BDuo)n3 zIP<^%y#`+2LHoaNx6P*A7~wxnu(_mNrt&{~utqwd*4)2O$VoXCjp@IzR(a1T|NOtP z1{dT7V*$XxK-+3yLFYg0d5koPmH)rSG^k09P4d64dBhyOBho)c9&#TJ_qIQWCX#{d z$J9T0|8Gx`SA0KfJO{hu*VeyH5I&=_H1fYATg@@&O60#_Osc8Ef6~9E`mYPuKK{SD zBsx{9De1p`hIy53($~Msn9tDv3+BHk3CPK5?A55X5~M!<(*5~Q|UjMQgow$ zF8DvBC4)L%Ao#yL@tqHS?b^Td3^-^ir1L*B@<_;t?)$%IHMYam(el4v^b<)NwBx_8 z5)E+1S^z+^1fMluFYmwFQ8rLiSMfhA2ISWOk(|H1d6+8TSMtBBlejYqW(Gi0>442S z>c~I6-uU9O6WBjPYT%Nu8sR^omxv_AKHWbBDo{LfHvvH9_DtJ3ll(uR4)5ru&+Wg9 zutq;wCICQBviLZCXXd|6zce?PpVU8>I0F!CIrqOF`(>N(mGD0oXz6>-tJA-v)|bAdmUHcf|4`-{IQ^ij@~zU99?DD@r6 z7V*D0i=K|!HP^oisZ%524f8+7zmRK_SoXilu=H+TbM-%ZmYH9eTEai5I8_hV0qH*+ zA4M`isp>!PLL`L%we3FyKnWmm@RmQw;uz7l%&I>S-_@aToa{fbdmXYQsqnvPuTGPI z9P__1ug5oTz1_dWM~*rZaMC{pBn@NZyZ%2bz%K4Ek*GheTH*Ci002NS;9(@8e&9d1 z&r&sr1>L{mpt`4ITJk?Kzj+8D7SF$|Ltx9$6VN|iF)rZ7VfDY)z5t(mBly2^E5gl- ux8Xkz?G3TtsPR8|vqe92A^$%#U%Lol=IOuL!>ILsYxh4ht^0WS2lYRHwWTir diff --git a/fixture/13/6/16 b/fixture/13/6/16 deleted file mode 100644 index 9f4ff0660bdc102dc5391520ad4cc11731d3a204..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{S7rzBW5Z{NRAizAtuHTFOM!Vg0QH|{?e^o7G= zM)E%<$;t`ARsldc>hhRRobf-Z^WzI%7y-aGpi;-wq|`s2tY&kb5&6Gqn`lM1V#7bJ z=<(YA_qRVJipK7st>VAaBvbwmaqz!Bgg2Xd_~5@Nqj=c~l-0ks#sO2b+rYoQQom}q z{klIqCw1^&y5c_+cw%PT0Mx&7vlOFndGtT6y4h#{lcT?5GUvo?mF+*8*gPDaeAB-V zOaRLr^94X5IhX&Z=iNW~mc89BD*8W8wP@7EH0Hm7H*PZ=p4`713l8@MBhbI8)q^jz z$OAy9GZLN&w4z{UgP0hvUEI z1+|m+NZ&u5IFvD?0P#Obu~GH@H|amueT|f7t=_+u-L$GgoA1A(*&Bb96A-}NL!g&O zLGHh-c`;=T(bvBy%dZ1?IzEBin5AI@P-ft4q?3h5*3kx@eMk(!#%K)3{1b>(D=g#GP|hWaz)q3)jUgAn?EYq*_<3 up8da(K}{-kwc@|^3Ft~Dz4kvau(iUfwfDc$YLFYDKj}Z-o~mA(BkVuw+OF*Y diff --git a/fixture/13/6/17 b/fixture/13/6/17 deleted file mode 100644 index 3f29f217eb6e4cbb7ce7c0debaa2f1d8b3242978..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{SfT;u6=p94S$o{Uo(o$x;?`AB-|i|#);gIf<; zh}S>o4Jt<07fe4eGR^5XMe9G?5{xgd`tv_@R7|ka(#1cG5m7E{~Sk@G*a!VPjZNwz;+*;2;* zHpo8&H@q%|^v}P4Agt84Oy55$`SwpQ8sNW;2kWv-p7X!Q;w??vlJ(f7Z=INZ@-FX=z>#v?@XCF#FZ zf}HnC>hV9FBG$*C=gB`?5&VW#qtZXU#Pgo1xadE=47Da|1lT`>A2|Zae)~T?f~g4W z_4q&DC8oR7dGo(>IyVpG*~P!u%R$WK#OgmvzTU6RDek{D(Es4Z7Wuy~!+?#LQ|P}) zzcb}}d%eG6JC;)?LJ+`BUhVXK((%6$lK*Albo{@*cr69RSn@wdgDkJC&h0<%`svI3 z)6qY%Adt{tnz=tXb-ck2Z{a`fpQ@u=^6$Ti9?ewM3-mv=D!x8G(6>K`Cle+4TW0ATk}7lYRZ;(_x!&|doHVhi=saS z_935VGUvY)f!>l`kl?@Z_z$vl>ES=$WX6#i`u4vkpGm)#3c){icnBSnsDPkLW+3+&K?JO3*)|ebrYBKJdRK+P$2`)5^cFd2%zV__IHeQMIbw ulhQxSIeSqg4%9yc%b-a5V&^~6>xQwfclAI016BZbGw?rXzwdG8L(0 diff --git a/fixture/13/6/18 b/fixture/13/6/18 deleted file mode 100644 index c06ec478576a621eb4952db4396302950cfd1e7d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{T?4PH2oAN0SnXmJZe`|>{%&jDenGwMHxx8`Bu zUeLc|ReY7`bDlpNTSg!1$*@1Q!#M3n1>CADMe{!%V59&p-ZrmVAg6O|diK{*#8qGgxN>}XgmiNDb zYb}HYQP;nGq`_4fz??r5;Db>B_WHkpF?*=a`rzv{K$=HWj<=y&Wnvh}~I z!MJHIyUxFb9Ux7w%!WUXJ~>$5@!&t#Z0TcEX6rv3%1kbLE8)NUneq-X#>qboZI^=z zT;e~=_VBriM904gyXyZu`P#o{zF%o`&E`L&C(gWNzsA31OUzJ1C-c9)v&%dC_w_$~ zluKN5Ve!8i!cDA{F}T0lNjxvcm()LL)1Ag4BHcg!BiiNww#7fxxy(U%i3h-;5?THI zdgi~u)i%p(_R>E~A;VFK?YqAhu{8mbU*JF8U3wr3SnxkSmWp%8HsC)BZpz0PROi1H zVFT)EJK?|Gi!nf98u~vYY^rn&?Z&@IWp%8pY`DK*HuG=iP~blnc;V})%kw{!bG;HM z*X+NGR#KK=z~8??F6 zIljMpBx<4_mCZkK4zdC+cICgtpp|oC>hr&a>U$}EFuT85mazZWJj_2wH8A(8m&L!a zG8v!D49Y)>+UP&Yv)XtLyXZf2nZ6R1Z|grD-ypuw74Sd&o&v@&(BD6fATQbq u&elJcy}-I&7XLp2a24fg6X`$vH0-FG3+umfeUCT_&;7r*Y|gnA-p;=jR;n-n diff --git a/fixture/13/6/19 b/fixture/13/6/19 deleted file mode 100644 index 2264f96869e914dd18e859312070c65a9c8c1a44..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{Run1%h~{o_BW(!V0OukJrkRP-$I0rbC3V@9oP zSLZ*L6D^0VC-OfRsglgadgDKacwQ?%o7z8vDz75&|G>Ww`CNajaNWP*kS+-_Bk4ca zj_Wwbs^UK-#V0w?@6^A_?Q_~YRs=w4l6U4JDEPm6E|nE}w#~mv!I3seujs$8c93sY zme@ZI-}8R-F8{yO4Zr2U19MM1PKx5Zf+|$2OdcUFq?B+jG zDg}^+3DrNu7Vc*2cJ4n%qjF6VS--!T?-%lmI>*1oohp}UKJh<;VMUYmM&v(aQM1nv zM#jGkjkh$Ziq}7YwB4Hhr^i3L08vq@=G{M1&d-mlPxL?S4+qsoWY)hgXkd6Ppya=~ z)drP4n}o*V%!@B~0O&1p#D-1om};ru@P z48K1u_kL2Cg519b$9pTD2+luzZ^O#l2K+yefypuB3H?9F)EvS(;;TO#gV&OUk?TLL zPquX=m&-rfMn++0H1WUBUKQB`REHt8a(h{LO;O{>tEn;R+^yWXChcRqZCjGy@NV^Wg+u^^e|LZ>K zU)sMmG^gY4G4?+XAIw50VduYmp<{9#+qJ)(Pg7Zm=$;`rN$hv+}qqm>Lo5!=7;^VEy5W~Dy}(flOI$Fe_Olcbvf diff --git a/fixture/13/6/2 b/fixture/13/6/2 deleted file mode 100644 index a61bb29fad7886c62e20da779ca63e791bbaab5f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{RE(uT>#LEyinAk^Y=6Y4*>pyf<7;Mc!vEo~e7 z`t!fX^+={mCj7r3iRr8J``$kaA2fm8M&Li%&|~5O4DUb0%oLR2`_w7jg;bvGKpxvZLAVWbZ!}L+UyiH1|Jfv>MxE+RMM; z!TQ}$3HCqU=o)ov0`@;FMm%KNsI@KKbBdh)+zvVt5bpyj{4i1rTK{qw(}w>q?vV!A&Nsq{g_C(*wrPM^)E)$hN92vB~Ww?UgZsF6h7Nu?^Yk9Pz(Q!L0+w1I@pojY1*WDCIwuCM(li_3Xd? za7Q^0rpmv>s9u~2uJu25M>eZ*p5VU)3@-D-)O|ner?yV754*pjRI*x*3CO>bdEO`% uvid&d0Wt_60{{RZ0{{Rp0{{S*NbhPt&E~%V!O29^bMn6$Q(AtEd)PlMqrfc8 zAMih{r*SPq|Kq=E3I=A*-^;&ALTS#bi|Rie7Z8V-r|mzGjdhRz~(>jLHnqPB=Ntxy$8+Yto}a>^!=$viP1lj69?ePTIWCVt;kKY zVCcWyqzVt1%g4W)w5C3QAlN^JWz3=4-u%C~;~=Rc*6hCrkaOgTHRr#lr`W%;%XcW%$s^LHD>3Kn35cR*?WUom3Wwk#F35>au9pgVj z(vr6rc>8d<@CRX1z%cP#gjjj zwSEri zj&s3iy7j;2WKq3HGvU8(njQ4Nw&1@b4Spdz-|W9=Rb#SVKlMKd1uQRBtMb2wPAu@C zD#E|nnGe2X5bVE-FpfZN;L5-F^5%91jqSh6^|7JUZL>e8nYHQJlGH!tRj@)d0Wt_60{{RZ0{{Rp0{{TP+$LPi2F^b^c*sHHFW|o~NX@7`{qDcyPY*$P z?bE*+P;?ArF4RAz3|3XD1@J$(puvV*K+wNV->jd-AGE*Wq=V(jx#>SR<>jrYB(gvB zp9@V_YuG
3H1^|L=j8EvT?^T|IyJi;cgL-D_$kbykSP`^Kh>!Ue_*!Dl1NS)t; zp~XLRc!qQquIj&byyo@YpZ7nvpo$ejyWYPI3n~(t+y1`^GT}RhW!FDr#Zk6Q#GpT?l?r?a z!}!0nW?k7rU*kWjIB>2f_~1Wd`OQvFb@#s=Xu~y{I{rTfgSSpE3FtoqxL_XpiGF$_T#_U95^a#s_H*wCkYXpO3uIM`VJGZ#s5dW5(&E~(}Xo9d0INU$IRM{Dncm%-I9WBo46XCz7nL1p| zF4jK)KE>6o^x?lV8{3Pd#Pq+;4T}3*0QA2IboGum%lW?q?bCGALFB(b4=`L7=kst>VLUNW?!E?FU=1)9*iHC=k*2hweZ8;LX%Q#?e3fz70oc(bzv5 zT{9QIX7fJ2T@QrPojXbM1Lq-q^Pr2s%S^D_xmQ-;4;>a<+Lk^DbU zf5Q=B9qT{CcTsWcl+8cKp=qYSjQYPt$g;@H#`eEobNqRV=5ar$9qQE$wBNto81*3l u)6736H(n7s_4PkhSd0Wt_60{{RZ0{{Rp0{{T=D(AMHM)JQ;E-vA{Y}h~0hKs#t5bD1(uE8n* zisZli?}~t)&hI~r26WF=3*|r5oZAhm%h$g+fwsnH0Q$djsoxuOhUdR<3qNGQ68}GS z=@lyG?e0I=HyBO_$M!$8f0UfHV$eT(O-dY-ch3Yw&cI6D$;zP z!sEZjmm3^ORPw)Gye!KN%j`dRU`k2%$KSs*-X5qu$nifiN~7Z9@8CZ_!TCDQQeFz&z3noVR9`RKoQkb7pz)#g9W$APxIkO9CEY+h`4?2SJx18yE= z<>Ei!PJk+4`Sd@~86KNwKlne8e&t0tt-!xL?~x3)?@fMVfOqA?QCL_PFf; zBj-QnR$n%}Ve`KV>WW8;|L;H6d%DYT@bACKn9r1m44c0&?_KTCd0Wt_60{{RZ0{{Rp0{{REO@|;~rQ$z$b>?jpQT4wc@lO)#Pp`i$%o%}q z|L4D;z;}`;PW8VJCgvGF*!@3m1wL4H5A;9nM*y-_BjdlB>2b4`_FUcJBHG6%5kI@Ledu+V5Hx8*->nWS{* z%F#be8p=>|P4T~|#7D`@ywpEZeqU0}ROCMrMM@cOSlhqY8U%3hiv7PFjiY7q@$x_R z)V4*)o3=mLay1mjE(XBtlyBMKj{rcmMuChAq~5>j<+zp}g9AW_fzIB%CfGj`PybhF z@yWj+8&50MftzVJ;hY$`T9TQy@+SsYuLZEItoFrLE1l!;z(39cKbhw z$~uOY8{0oE`~^U_*a1MK&`h-5ZoxnIx=O40sOG;rrXv6D(*nSI({WuZJkvks#@}l* zUtg9qhl^c;_eg zf7L$r`k@UX?TNi1B`rSX)oKF)bGV8zIV(COYNWDLj zh8|gm=GQ;jzAKJQ`|dx~!snLk^8i4u1?2$WT+TnkHStg%IorRz;_(*ri26UoK$+qE z#PB~W(Ze>Ajm1Bmv(QO2Mb1Cvv_m|=#KfoAp6v+Fd0Wt_60{{RZ0{{Rp0{{R4#a$#AYvjKvQWNrTJ?1}?pMXtgY}`Kufcnv) zD&fC*>%-f#And>JSYSHsY~nwXCKiXODdxYPB?b^;zVScNe!oeaqUyhhEP%ixY}P;c zc=k~o`Pjd62{YfVXLmnKcvO_PYUaNJUqB2PmoOJ)huBg9j#<>m$S&~1ypIpyLQ}Mr) z(I1axdiOtAjk_+LvIjton2aiMB-OumqTr?wY^Xm`@gUuyJ@7v*h1#phCh$M{s&-S{ z(#Aja8aB_yJnz41OJ}$(7yUnkQmN{z%;i6$N+eVAI?O*Z)bzhZJB@x zGU-1SP!OV*$mhQ}KBcHbd0Wt_60{{RZ0{{Rp0{{St$Gttm3;n<7w@M`<1=K%B`wT^4M)kkGrEMEW zBJ{sd$V%09SMtA~h=V*0yXe0>+=g!;tLi@rnY)Gf0L?#YJ!l$1F7Cey3eRP3mEyn8 zGQKKe@#`f8o@vPantA?T=Kt(;F7`9X4=1LA{InN397ciQ>P%CLNC+Kj^=%xUR$ux%$6X+jZf4x7xq0CCr{Zw(mdMGIm!81LMDN&asPc z5ZJ#B!asYO?e#w>0UHI@@#nv-b6-e7&f`CF-{&i(QTxB5Fdnd?>h8a~3Vn*TQOm!} z?}`JCSo1%wnqwb_Gwr_wqAGs=mF>SpMa&>MZqYw<7gvsHwA(-9yq7M|k?_C8U!JgD zv*5p%ZP#}8V(~vKJaRQr;qt%GQlmh@+SNZk@h)VdkKn&Y)mxjpE$lyGMErTX>eJFahcRQKt#74^UV)r`C(Rw8>7EytpdP-ssslR#pb`?Pp=CDaosDQ3>-1msA=|%h^kg#!LixY9%9Z>E*zP}c%ldP?3irRyInHUS+u`gYt4DM z;PAhU=}pC`X7E3)+)w>m^w&S2k+@7%%;Y~HSpD-TwBf%e{6G7B)#pFP!Fs+X-|@d4 zeZH6FvLcwRcYywdOwxYtwrhP2Rt&D#D6kO7Xugsfn}a uT=T!+q7KgsQRYA2BK$){S_Ht++9nn^E%3hd0Wt_60{{RZ0{{Rp0{{TQ(5i+1@{sP$WGxZRJ08?}orXAMC%Z5i$Q`Dzv}a zyl`Z2o`64-WmKJ*1hhYae4^1YV(7oYhJeX=HTpjm9H}%VvE@HZETtQswX#1(QD?wG zn&7|I$i=>wtm?lBHEa7EciBIxVY~iEQr^EmJe0K{9>G8FQac=!WB$MHD0?ds^x(fa zYJoN{?YqC@WkOI<`0hX4d_TRdgYCazqcRiu#rMC|S=wAwcHBQN-R7eJX0X3Eq9~bY zjr+eN#*9@e!TP@t%YGL%v*f=q>=pxfaJ9b|Ya|PUI`%(A1jNBuO!2=kWPXRKJ>fsh zaS#@zU;n?Mg8H%h8P`9tuPnCs8u&lK+1pWJBf>xR^)|JdVdcLm*Ui*-xbr{#isy9{ zRMtN?uwwH3sKY--@Y_|QNbtX7k1CS^NY_7#_&(Y(>E*wH7zN0x?$bXIcREBL+NeLr zY+IL#Sn)rvirV+1ZsNa^gs&;f9mBtciHBkRgo^2EPi_M`)Rc;!D#?^3SiU;RJ%$?*>)+xovV*4})CJ@mhK1BPkj8^XVD z8S+W!YNi{rn$5cX)zCh9-4GkIt^ z)3-mMn?3w9p5;IGC!riKLG!HnL z@o*;0N8!ITk@bm8v)eyumVi=VH{(Ccx8|Y^fdxRE znxOt{KK{SKbyXtrlJ!5fzbWp^gz3Ms+AqK4hWfulg;nYb2>ic0erK3o)cil~R~~q0 u73@ET0EK*Jap}KNjy`O`9_YXSVJvV87py-wBvgPPfa^aFT(#{5(bB(z^R3qa diff --git a/fixture/13/6/27 b/fixture/13/6/27 deleted file mode 100644 index 816dd2f6412d11e5d05f0aa0cbd41b98087d1efa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{T+VTr^fC+WXeu4kT58}W5a7N3kYsWu(=GLWCJJLT^FWWuFL;F7!8Ts%@ddELht6~tCVfMe?trdvs z5757IOzKNM$MnAqjk$o`?8d*d@}&XhiuOM_qLlDEWZgf_7dS_&5a+-C3{RnS^x{8k zxR7g?_07z*TSKU7$!cr3Yw&*{CfdwRdd;33tUnmvh z)zv>O6g%hkjrBhrc5@)PDdaz;7Zvz3|Ls4UyECb!pz%K*pxOaSE8jnMK)jNaGVwpf z)wAr2TIRn*qFAlAX#>C^K4}u%(&Rt5`cW5O{MSFWW6z;F0_?wpfF4Q#GxWbEGr5BF zxxhcQ_h?QA@#a7IeX(*Hqu@Wnv#6kVQ1HJfYgKv3bLqbr3p9;vY~?@ygay)$o3cMK z^N90f_V7RAs4~iYu++b!AwlXds^>qsfGuSEOr*ah16vO;A?Lq}Jn4#pv+X}{&D>k`?5dD5&QPvx5hu`o&2#q%f7#f&pjuhwC_LDKf;6EQsO@{I1=sn1l>QC z zAgR9vJ0ujPSmZxiG;^g!)9^oY3dYr^i|jw(fB`)|m*PL+9y2RQ0RTYyUfkLUD9$67xUuQXUnyR`ovy1NGfdj@3Uvwe~od0Wt_60{{RZ0{{Rp0{{S6VUbpSTmirbai#92lJ38>2&@z~&g8#*<$`J; zIQ74Ce*oYkeDJ?z&50H6Fv>q=p3X~h9OXYZx$D0#>LT{(P0c@oLx%mdA=^JWzJr&uvd%y5fyrYT4az^$XY3rNulc`l65Ul9 zvGhL$Zq#nf*5N;e(~T41Z0^5fAtB3zj z{!Tu?AOF7`RU|cmp7*~Lb0&E>WCFm!`qI$Ba+nCa_(qG3 zRn$L$Q8j4yOQgR6Rg*R@=k>pe@pr{AT+~0;7bNi#bnrj*l9=I+obkV=g+mRyQO&<5 zER!ElIom($#&mk7GWb7T4IR*biSoaXC{KCzW9dJQE4L5(<<`H@L8<3uj^)3+0)*aD zVC}!7JPubh`sBY!wP7V(@!vlW&o}d%C(XZ;H0M8%xcSEK_58mfy**QRGuc1w&J8m{xcfhGRkbVt diff --git a/fixture/13/6/29 b/fixture/13/6/29 deleted file mode 100644 index 24fcf39c38d2fd4da84ff2389e502d522454ce5f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{S}85BNe9=%OUi?2jNUD(g z4dy>LRoUSXXz)LZNu4E+9QnV)xj^XU>g~T-mLnE2;PyWfo+3UlyY4@a+k7Fx^&-*_(2`L&z7P`MB$i~q_VCFv`l_EtAiRC}e0Uns9 zPUXLBLU1jC*5bbrVi(jpv*&}%GiSNHU z|1=9^5b;0w2p^pC9MHe+-y=I?h0ebs*iSRg=;}Xwpk#9H-ul0oeIDHHY59xsR3sH_1Q3fWp~^*x$c5C&Hi5Mf5+iQWE{H?ZLmIqXSwJ zzwkeC|AT`!!}?mf1i!y!g_|S8C)vL%ul;1V$jracCUD9Oeeyr>JJkG_LbSh?0oJ$Q zK;yq*R-oL8$l}nBKpF z&MxZTBgQ`Eo5a8CHZ89_H~c@k;Q+L>lkC6jgz`qvJodjE z2o!TZ_qo4n=s?QtI`Tgvc7Tu6hTgxVSy`s5y5qmnx?9h6X7E4Jkd0Wt_60{{RZ0{{Rp0{{RU(O3pim-Rme534o@>*&8_smkzybNfHWLUop| z;eOZoN`H4aYx!(BZTB$+*9-hIyim67j#*1F)F1-M+uI z`~2D{l<>cxg8AH(?#I8m5Je|YCYUr*vh}D40_uc zT-v|5RpSal#_&IaqehYS7`MNyAp&s$zWF~abJBhII@G^kqs#Z-8PLBBxCbkBvh2TU z$pt@DiP*pNI>sWjy!*dU?ego{2?4-LIvV*>{o_B#>uD!b?fbu*=aRQp``JG@vpV<@ z1mM4z!y*u$5bi%S_ltuNll#A~Bl6?kB;!9-^u7b{IMqKAvGzcBGuJ=G^weN|`|&?i z#(GXw{;@wqop6mTl>@-`mU!GRzT7{qIUF8@b<{tf#<=a@PtU)Zcci?{@9Mu93NTzv z8P~t7A_aXmRQErR>v)smZ|%QZcB-?v*!#bB2Im6-e)qo?@&j~E5~9D_Z^y&UyZS%o z8GpuzqX9s(Lw!%O&g4JiP*UezX79i4x5{k50PMdsMRft(2JAl`g5L9BPW3CuOfs^32WwXR#M6v03J2#cGH^3=buw=|t& zAY#AA(H1p^x%$7ur|Wj?;@LlJ-G$>vd-*@8vJ1nX-sZo?llM4eg6h8_qd0Wt_60{{RZ0{{Rp0{{THZ0^`jKLWty^k#WN;*USXTkfcFssF!Pz2uV% zNb|qO!d0nO7~sDV@>{9gNc6vmFcBroz5zft$wL!ert3e$iW=X-KlMLY0aYy!>(jsSNy?3hdRPw(8X>kfH-}ApGQL{BA?%O}2T>6oc z5AZ*3PaFhc>+C-TaqeVrSoS~6o^BG1`1?PohSEJ&r|v(7`MATf7vR5MPx}#$vBf`k zKos6X4g$amL<@EfXXHQGtjF3R8^b@N6YbXxhWS4Yi@RKz$>+Z*eU+pdf8ali%|?N; z4e-C4kEw-I3R@}di-10N;Uhh8^N4;gl`^vvuCvp(c z9|J(YjLZww?X$nm{uL&R-n>6hUpup`=HNda(Fiaf$?89bMQ>vw_Om}VMM%d&G~ho0 zP1wsGX2(Ad&=w~q>gd0clC3{E#qhryU7n0QVCFwKR1ON1M)*HdScYX{82Y~}cR8;2 z^Y*{61gp@88~48pkJ0h_&FDX;Z(iuEdFwyM@W1pcKJULvv$arjr2#+=UDBpXqu9Uv z1Q?lS`1U_-LQme1-|0VFXPybesP4ZEHWf4s!P>u{2VY><>g&I~p{es!g7v?R4sHX- zG|<0Tm0DVNE%U#e+W*w7WX8XK-?jc@?fyTeguV7eOXff5?_>`G)ZxFePn+XwaOA)J z{xE^`tJ%M%4j=cekpMuB2whv|Y4N`RYU0$<`&_@&zKoKA$lX7VkU&kOa`QiYmi%>k uO4Yv~m)Jo)&hEccipmKthtfaiY#i3y2=zZkh(3qwcG15*)Q-J^-0HtK*RMMO diff --git a/fixture/13/6/31 b/fixture/13/6/31 deleted file mode 100644 index 4e1a5e146c0c8c6bb74f33051eae9062c4753a1b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{Rg(;A+HHoQMTC$Mui`uV@6LvU~d{mZ}qigI-^ z+44Ubpzp=L{_8(#mA3yRYQn!>e@ptDjQT%;@e%Xbj{ZNiSV_Y%K;=Iz?IF$md-uPV z3*So~6#l=Ij474IBKAKCEp?JUQ_nxYgRR>-EBZfPg({dnYxTe8{a?qop~SziXJn0# zckn+YV);pHTIavvEIUGvJn%moc}S*uZoj|phu_xtyPQ8g-Bt`K@bbUk802@g7SKOi zry^0+iUB~a9{Ffl)!08qviT&=Rq4N1OKNB%RQ5lTxlKUJUg*F2KGKY%iTgh*H3Ew} zX7s;DLaO!+bkRScE61kBYxqBeeH(cDyz9R;S%|LsNwq(A#eTU#gz`U;+FX=Q3j)Az zdU_lPsqH@^@bJ1>g0{cG9KM7;68AqsX(W>t-v z3*x^!j{xc+Zpyz^o&~@){oTKU1E3%+5CFhe6Oe@uPq@G6MP0jttIa==yGI!-yvV;_ ziT2uLThc$!RdGTnzqLPWK^aA*9QMEXD{bQ|LDRoi%e0D%D7`8KjS~8 zB}nqe?CU?vbRq@b4(C7AJU^~{HeJ8bc-YLGwBbJ`F`7!y_4vOWvcQLcg7H73fH;-9 z=-Izzg14RBg#SO*n5< z+SR{sCkJ+zUDZE9$TjACP~bnggR<{sSLZ*+GEBOcz4JdFskpF!8S}piR%qh)n3zAx zhXEc7HtWAh5n21(hS|TASf5u~Bd0Wt_60{{RZ0{{Rp0{{SHl})(wn&3YsYR|WF@8mzd{Ggth^x{8fMtohE zA?v@^jH$Sk?D9V#=zvvtwCO(vcg_5RY1+S(WQ7#3{@1@Eea@wn3gy3aM3~rPtMtFK zZ=72bP~|@jSlokDPU=7S+Qh{;qV2!BjMaLvwe!E}?D+m0`0c;A?1BOQIJ`gme!xzY z9QMBgO+b2r3-Uj`mJ!@q7V$qWFGap22-mq6|O($sn|jl#oa%J zu3hDe&Gf%`riEx{KlVRQ5#eFVFyFsee5|R9e89gdN)&|2dc@yQ}w@G1?MB>{qMhj5i$IKNdQ0~i6pszh~hsE+R}EeN0GlG zfi-OhB;mi4_->gdeAGY7O#I8ok=j4KxRz(A{>(p5tpI6u8u34bWtBBfz3aa@bbx~2 z0`R{a_a?7uu<$=-qZ6gb<^VtCIA&#;!}LGk+KA1nVu}v_)Hu1j(WGFDP8uUN#MCW54UE@Dt)FQ;E z+3>%v@gVPgOSC^_tJhZp69PcPfW)i}6Y4)ysa7?)1Gc{?JVV1f?$AFZJL({jfR(?T znl!9ksLemWDkle2iP^uHNhU$~tKUEVxE+g3_~1WPEA>}(Fnd43hN7Kz?ZH1YA+k9m z&(%LFWF^Hc-{8MPrteLw4&OgD+$vEG_vk+iP)BhWaQ8phzdYAAVAMaCWd`cR$Kk&y zC$*j!TDre5R{^b*N%Fs5Ffid0Wt_60{{RZ0{{Rp0{{R%y&N6KMd-gy=FG8>4)s6X2$&NSz1+V*o<1aK z`0PJdxpc~K!oR=PYrMkWGORx-AojFxB>BJQW1l%@TJpbmPQ(U>u=GE1_<1n5?!&(i zwhSQwKGQ$ZXvV|V=KMdbX|?+eR=~f5f6Pi>mB+ss$tbrVO!Geh%hgoNN{K&nRms5q zdelF|4t7TC z2;9G#dd%_cul&D=16zvgN;OT`BKPFy_B| zB1E@stm8jX-r=t1fBC=tp10%*E!w{jE^;%8c=*4MIdS-nndmk>%Mdd&11U3)ik>o$6A_rfkr)4)s5-I=OUlRNy~(CXAl9lhi->l*g5^=Iy^HGY1xqS=m4C z#L$S$sQ*6{q+|UrKK8$%ldyINUFSb@L1zF^X6rwad^0?{oBKbx))hcQIp;qdnbN|{ z-i1Hc<&tR;4BJ0yez{@OC(*yZ=t!y*g7ZH{ozUf;$oRi<#~%eqIp4phAKuH*jod$K zCx7W7yz)OMvaUUPI*h-)OqG}D2syx4n8eAmAYa@lX&iqyY#<|z^Q z+1tOnx}=fR(%Zj=rD%tIbo;+}Sm+73HPb(96f)Clm*qb+uc`fP`S(9?%2t+Vi0eOA zCI1&k=IK8j&)kXJ@zOu1tjOzWGyK1;#{~z~?a@Daz--*N#JoR=VaRt}dhkCWjM?+Z uDB?d+$!f8xpTNKK-|}sjRR2Go&XXsOChI@^mO+d0Wt_60{{RZ0{{Rp0{{S9GLSV!Q?Ebo$1*>Q=8!*=qC{Mx|NMK$K=0ANJPw}<-<) z3iUr-zn0)EGw{FouXF8j1N=V~c|E+hz4O02n7X2EjP5_%@rBiJbKbwruW`bWChflm zzVDat5%oVjRBU{|?X*8|N>DKCL#97Ucx{24yV1W`=X+jPo8rHgX2_x9qQ<`+LLb1e z&seb&DSk9pItTk1c*1Z@4%>XE-}xn&nyJJr8A5f@}UH;cb*Vc{o> zhV(!Fj&??3QqR9@z(c3cn6f{NPG^FJqt#^yhq|I1ag zed#}%M+VNB8}2{cC)XU|dj3CTIi<4`-QmC9q&s31Y;FjK>9ygqcqiHU*A7)Q^RgAX+l4LAKyRDck#dW+gN<% zxa&V9M!DY-2*kfx5X`yAtH#1#zFWbMxGqzM2HuFD_VbRTMx!6AzEwG1&!0$iM zWvas`vDiP$7NiARobA8t*K7({V%NV7OAknueDuHFUy!zWqQ^fwha24TX7|7J|Gg{T u&eA`mU0vtT^bEic5`3ii>ghkpcLp4xsPVs*_a7yDhVH+Xa#X7@^YuSNYPW>| diff --git a/fixture/13/6/35 b/fixture/13/6/35 deleted file mode 100644 index 0c4deef60a5e2be8c9551e11c7bd3fdd7c103449..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{RpFCb!7cho=b*;kk?73)7T9^(z#74W~({$S|Y z$L>GN`?RfTn8Cm5pws-C9KJs+&9IF@*!Mq7OMf0XoASRd55zr}H|;-v|6OvAs@Olx z!m`iaG^l{_FY3P+qIG6k{qjG$_KZY%eYZc98IPI0 zspY@H?o)(o0QbN2?$yVPqTRn)aN#^S%eye5WuhUvdZYELjT zC-*-e(SC;tw%fn-Q)blPI`_Y`i+LRMugE{DM;%dqpzJ?uPqs8xk?y}B>`5B+htogtD2$rFkmx^%OjOLNM7ckx8!qx> z#Ph#a0*a`>67#I*=EI2U8sPSn2~6{O5-y4b%Kx5~zKHu}HK(Vngi zX8OND+?$D;ZtK5o!v^E;dfdO747*ip4Vu3+X&i2o73M!kwZIdsMa(}&XX+GNo#sCr zOc4}#rQ^T+5Bbh>AF{uoGN-j^P1nEdBTemmMD)K!UQLgUmH)r~7tao!Y308mC8jY6 z*#p4u%A9^fAo9O{N7p$Aa^XK#cfFsD563@BiT*3`>Xkp=h)#)b9xQ(&ayyl*L=$)9JtWK=V$^!O=fYpmL#lr^7#qUZUgxkjg)th<|6qzu3R8BV`c9 zZ`41EXNbnUaM?d{_vPdBh>jChI?yw!=SA#`C|eCy!}D6YRg?8MuGg_|8As zt^=fAYNd0Wt_60{{RZ0{{Rp0{{Rs=gTNQNcg`?V-30h$n?M5%30AU75u;RY-(g3 z6w<%lx98*BN6kO~0pm6eu=Kx_Qx~GOjLg4KCM*{6FU&t7iVukbUgkf+Akv#c`@=sD zK9#ddCDlI$ncEC~C!@a-Z*jpVI`F?U9bXC8bm_kz^#zScW zg7?41z|zHlckVwF%kfbcTKB&@f&_rvj_5xO3c+-&8}C0!cC2cMZt_32eGa(TsqnuN z93Lna2gN_x5+z`GIOD&v1hs`AU+lldJjCQIbf!ORE)(%+3+g|X2t15;3**20(b`ob z5!FBHu+pDN4DG*?zCr*XvGBhh-1&9{!_>b#9#(TB?a;p_J#=q(UgbYn)ECk|2K2vf zAN*nBL;*lM6**KNt^B_fk?)L4G0i_Ia4J-;KGMHJjL`x%zvRD0BzvW|YR11gk>-|V zg5^Jg+$R_Rdg?#i5u$n^*~`B+Xj$F^Jk>vA*+-+`NcF$%EHPjtmdC%i?iS2zJ=s6v z@-Ok4Lju6TFJm{{wzNOYxg$TohsD3od|PTLQS?6#A0?y{WW_%Ypw|w9wcbB1eEV>4 zV9~$Y7FW8xIpjaXA}Lfw{^36~sdCZX=k&kRn(9*t=hHvCJSXOK>heE`)fU${d+$GM z?=bU+2b@1|&iw+9mefBp%A=9(M(sZghKBj`UX zx%N=O%Je^#%~;o3+o(TEd{Cv|mHNLQG&6GTfkDT@<{`9{KzN3Gc36{UMwR;YLhv~oSnvrCgiS<8i zv@$Qxpy9s@2zrVEbnw4OWf}F>zxqE40}^a+6W%|)23p@yyahmXQhkNY!T!I0S-*o4 un94utERo4Z6yHCT2xvc=eVji)PXd0Wt_60{{RZ0{{Rp0{{T3Xmouc2m8N=kzHNg(d)mmkjtPRPVn#n{TD+TLvi(IGV{Nz^+EomkNm$wZ4Ugw{WH*k5KE z$oIdRx}A~i&dxugq2)LB{_($9TL_)Jt@ytgUUl$AXn(&$la)=t%)~$5B*$1xKe@lz z+#&jH3fwV|XA||SEzv({CG<2$ z^zc8%X}eYWw9-G8j@tj>_v1fqSOe@A@q|CD6yFsWHQzr@{)6mpits-s+tyGg5&l2Z z9atw;U-Lf&RV}9xG_1c|WaV{@Q1UNiGoKBcTZsWgHfVd`%0^&boPXFQ~ z5Awf&#|O2+4&py1GFf;N?G?lb^7k8QniN%~BBWJLW%1c{d0Wt_60{{RZ0{{Rp0{{RLVsI5zJq5sXCNQuu=FUIqvVA7nLhe5`G=lt7 z`o6#BBHKSkb=beLQA>%p-qJtDB$H=Vm-RoCd;lKi-P}K@tAV^Ms^!0kOQR^lc=JC= zj{T$>mi9j!vDA2^)#<-A{9cB3V+KICu=kB&W<|eXVSNCBS?)h<@6yX&N9R8UmqU>U zR_4EDG5%o9<=nr&$+L}2Rk1%80>&@J>;1p+HCeoJMe08=ns?~=RQ|t#>;-V`364Jm z6_TT^e&xSz*`%(Th2lS?d+4Vnq5;5U_LzTOnEJm)NO0z)7~{VyG8mFxn&3ZEyI|;2 zUCcj_iPL@b&G0{Kkxq+HpSwR{iwa!oDw4m3v5ECBCGo$#b=Wgzm(0JHwFmgx`;)&6 zYHB(oZ~(w?$qh!WeC)p!&N7vtr`<)S~$)VD~dSMWcm7?Uuz)AGMN zTF4I{AJRV>QH>j#gyz4R*tb)`z~{e6E=iNE)8{{cNcusY)xN)-=P&3bEB3$c)}nfM z7`ZgK;Z%hCk;8~49&QyX9B+3&wa1lvvhT>wA_a&N z7j-{h9LT?Z**||tkiEY^hf5p52GPG?gjh6{KIOk|<=nXW=jT69u)i9RD(*j6>4^2T z$nZbg=H*y@Ak{y5`JLMwdgDK+d{{RU4A#FNOk zH{RY}tjWJORUb>ohxET1OpBrO6Y{^3lM4KmH`u=$al1Lwrt3e(iNjQ)Wz|3RrvPaI zqU*ooxTWxwzo$RWi?zcaLiN9la}_K3q3gd_G4or?gZjU?Bo;6ha@#))l&++M8PdN6 z&$YupFvP#8vF>W6v;HkO-`zhyjWC2f!v8-S06-LaZSTL;k%1c^ u0{Oo{0Xl0PAoRcBf(F%;-q=6)gfSP;@cTc)Zj%@XN&7z;Y&#`*3F|*KZm59( diff --git a/fixture/13/6/39 b/fixture/13/6/39 deleted file mode 100644 index 9a448ecd776bbb171a4e1de76368c006d99e17e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{SC@~C$*BkVtkEy3PQyWKx6N26E-H0MA4c2X|I zaMM4WovZMwm+8N1he`Q8YWqJAaUZGeUG+a_$nD4(-RQq2e~GtOz~n#K=a`2-VfR0h zLp{Bx%i2FDM@K*qnDZJp6 zM!3HmmcGc+8vj3HeljlF=RQH%6r{F(9GOf*<3dBE68eo`?73@EBq1#%pAnHFSU{8vbN$Nis z*mUleq|v|4o-0>~7Sg}m^8XY*ipsx*6M4_Sp5MP_Q5dm*4ZA<5Hmw7-2Y0{5q73mU zd-^{jz;lgqC*Z#_SR4NvlkdMlkMmXBspLPox*f?xPqIIXg&s?Y_T|4F6CzF*2kAfL ztmW4L+tI&XEUSfhWB5Of8m&r}XYRjQ@Lnzzmg2wbIYk9ESHM4_r(sKcx%EG02dJz8 z)#ksvnWIRn7}!7IQl!J2pYp$WRP|xcRqa2*?bB4f+vLB-i3NT9%;P^IH&QnCbLBs4 zq-wPe4*kEChN|W`%gR4T!^>=$F5tg=$}ilR=ifh*KUA&sz5GA8Aad0Wt_60{{RZ0{{Rp0{{TVcLZv#xb;8(`4F{7UhF^4S@*dhXYs!XphnHk z9?(C5_$?vSH2}bQjRR))ZRo$8W>g)$hw(o^f^!7eNs7NUhTJ7QCk9mHs~|5i4(-P~|^F2rBht)}X)S`gJ8zFx|g^s*Jb}Jn=sd zJU1d+$i_d*RYgZgwfjGGv{um80Q-AERa8bk@QsVTH(L) zXLCvSob$h$tyP3A)!RRY^TCy*x9UGSL1*$kll8w=m;*bEQ1U-EwTTQE^0~h}Yav(D z7xcdx3%@TbCF{S3%r|liFYdpU=MPgB8`S0kf zvgtn%VNlCPy!XF!%vb)gtm8kW5;;}_qvgNBDS18r&i=nr*(TPa8da%FCf{!~Pr`A77pDb740`1REV{p*^A-}P4e!5f4KRB(HS<3`WUC1P diff --git a/fixture/13/6/40 b/fixture/13/6/40 deleted file mode 100644 index 9788320779f7148c1ed0ed2b94c1b7a692a9b638..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{SOD(zStjr70B(W}+0#MeJ%ytyyWGF`IAyX259z{4(l7XJF z(aS%~)?a0CsrkQ_DYK>$!{EQr$wHsFG5$Yo5~TZ{p4~s@f?kKYUY)=0p%JmDO!hzS zP?UdBQu@E*Vbixji`u^>SQj8IGXg-mEVN{Tm8rjo=E83-Ag#YAHV5X%-SR&Ov95G} z`tv_rA>d3R)AYZL19nu-W7|K9nlaP27yLic`K~M_l;pqLNpv^p)z`l_=yxEef}KB3 z@w3PhIOabWp&3NC*Y>~L%UH>D<_N%zO29fwmCHW^b@hF3r}96o5aBT-_tU?XXPKjJ@usiNElb@IOn_kKC}u<^e~ z>^Z`h3hO`1p4n<(Ci*|2p_P!da_m1egeW0CZUDe@)y2(S;M%`O?Njm7l+Hhm{jzj_ zfUZB6Do&{Vw*J462hb%@!SuhbMvI6AsOi5(UMc^8PO-la@D2*t@&3O#E|Db*HRC@k zdZ+HO&9}e#?XnE^u<*aCd0uLD$H+e}fcF`5nps zdHTPXn7yAE58c1R(o?3W>*l|XvFnU4nAkrU6Mj6z8k4_!yUjM7%>Y12LHQJE_|U&P z>b*@dWeUKd`&^4tMgYK$d=9l$BlEu_PZ}JF6AM6k=|Toz&7!~XC~ljJyzxH@zS&BL z9q>O1lS#rrMfAV!>Ls0{u>8L+WW&JJjpRQJ>!(3|!vVmn(tObSNbf%oQ+aS!_r1Sf zHb!5!w&OpXUQP>{Ytujc5dkwt*497gjS)wozq&tO@bFy21=2t1==*bYn5RF3&C~-4 u2KK+aZ%>f=G}*tvoGrw#TLC~&%MM+68q`0)z-BRdNAf>&PScX;b??7!1+t(3 diff --git a/fixture/13/6/41 b/fixture/13/6/41 deleted file mode 100644 index c978d117188414165a103b43fa323835ffcc5724..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{T?%fWg>0`R}n++b8!LG!=guQ&khPW8X8R!SgP z_4&WH+?$5^+08%t1VvrE#n!*?lx^4z1nfT}IqNrx-P6BSua(HvIRn6>f33JTWZXZH z;0t+4J^;WA&N(ASamzmi%Af!BuIRsmdq#)j!p%Q9#;={jnzT0$l=9#~ipC(lpBI&;~9Nq^!YWKf+cmtCj2I4<4X)~G*udBTAyoy)%ft!Md{j`qLbPGL28Jfy#GXXtx; z4c9+}3!6ix9sR#4zN{U$8u34GIhoc1$>zVH_%>zG8Ogt^g*NK%Amcyi;!ELd^nJfo zPBzpkboM`5o1_yiALKu2R>9Et-A#IQhT)RZU7z5d6Pt$oZ+flg&RF4kUN^lJ7rC zD8&XpHSIs=2tBCrm+HUoCwUbdu=PK2X&{IuSO7qiD9tFiapOOdPV%_r%lALh*R=)Y z@asPsFP#ulXU)Ge0iG2>MCHHi-@XYe&-uR*2vm9fcHF<9=5d0Wt_60{{RZ0{{Rp0{{Se;Tm+=sO!J(>oithe(gVAWyqb;eC9vg3do5; zxXV8t%*Vu^+3>$&wN&w*-Q~a5vwqvvO#QzvW*_iJKIXq1v=Trgv*thE){;>vRM|g% zBYU-{aKt~BpigJ8zxO|!3xZf`ebYZ(1p#agc;dgm$|Hu}W#+#SYH$YDsLj98<^Qw( z&)mO29a;?2e(pc{QM8)<_}V`lzI-u;b=1GwTZ{F}1=7DgF;C3&faX6Y%EO`2Ebu>Y zuKv1`e*M3Hi8Pc->czkMw?ShV8Q`yB&@%+^2uzC&a5!K^<{FT2G8AJD%ZSw25dN%TLu_VItR zQ~bXIetc&a($YV~`js1T4Y9xC;4^DZ3iQ9N{1GI>-T6NEJ&J{-0nZUU=q-ZRl>jL0ngmR@A5w~Gq^QgAH2U+QF)oo-}1i+ z9dWn>RrJ3b{NE08`^CS;rrnplP3=Fr1H0&MNaerJvQ~b}YUe+Y{e1k$$+Ob{6RN+4lpO5)uvFN{Gm^PnQsNlbHJaIxhBJsba;5OfCbIre3Xs8;+#{fW+QW*@- uht@yn#6qCQoaDdK&-5e{G~K_Z)O>si1^2(QU0sL#7V^J}u8>IFq4d9y0d0Wt_60{{RZ0{{Rp0{{TmG7s<`P0>Gkk%ocomfSxA)=Uv ztJgnk6y#UDgy_F^s;7rnU-G}mHo$#PdGfy|BD5OKF7v;>7a75Z^wvK~7h2&B8~{MS zK!R+d!SKHYakQ^+;?=)-??MJ?_{l%Xw(NC8+tI&XPm@~?Pt`wUZY3g`Y2v@(ReRw~ z#pS=Yo`uxyR>!~K``hVZ=F~svnActwh4eqNU(G|y8rVNKM)Q1DBhbH5H!dUHmFhqA zD`U)Q0PsKX9P2R&YSX{u{<1%7^e}4y zefGcb92uK|{@Fhr?vc)m)%QQhX1Rs_82LZ^pIpzdsouY**L7r|O6I=+;9BacUH(6Y z`r+$05&1veHrbX*=-of#{PNHvW&6LOn~!b!X5+tNT5@DJfAPNubI?y||NK7}mEnHI zX!yVEe`uk;+x$Na7f8~q1G~R+FlWogDf7QcOvg{!lkY#@Qlah8B;-GSkxa63LiE3j zhot>b*_pq5saXJ20`@<(xjOvYJKDd5*@E{f;QBwK{`=QL)#tyL=zs3f^ZCCe;$^gh zCZ|6;9wgz)UiZHbn9m8F_`g3fmFI@}5OqH?QXEhqN$9^cl_y*k5B@*t0evFNNN~Tc zKhcBXsOvwlI+S$|xa~jt#iASXuMzX(ZEJ#WN&t<=)wOkuBlJq|;ORQ&7@7qu<<`y*rk-|h~hscrVIK$)9t@i^L+(I0mr{RsNk@w@7BMKf7@1# uxAVUh8B=Hwfbl<fW*vDTVYRkWQ+O`D% diff --git a/fixture/13/6/44 b/fixture/13/6/44 deleted file mode 100644 index 579d8b627890432b58237730b3f29c311516e99c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 398 zcmV;90df8U0Wb(40{{RZ0{{Sy0RR9L0001X0RRC1|68^`Rknor;6LEGHR12<^*=a3n_6q= z=08%TsDC2R#XoCA3*c#iqQ3#ZANuM3_`i<^6PE?8^}nAvZ$S_Q=s%WjC4d*mb`15%my>pvl^s|D|h_`jUl01C=>!#|-k*Z?Qf z*gvZa1F~4H;=eL`*q8Fv^FKK~O2ldq*1y`~2$(;3`@ffdsT%_7$iLMGI^r|q^*^Q+ z(K_xM*1y>IM63hA@V~(WIIu?=pk sB$Mi#_dkI=bM+N_!#@t@0P+?n_CEjs00000000090RNy+000000LhTNxBvhE diff --git a/fixture/13/6/5 b/fixture/13/6/5 deleted file mode 100644 index bd091ff96bcd101c1f7f150c8ace2bf788abd538..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{R7nRfcYjLJV!LTYi39AT?vEV&28ekD@;AlSdMD*$e$v+2KQtLCfgMC!kq3ZvU( ziSj>NG@=ZYMcluDci&sL4go+kga(K8XXQUjlyE;zR|UXe29s&1L+n4Hl@u1kH>?C z-}|iKZSX%Ny2R+{$|n&7`G=bo~&&FMd?mJ(InpX9&nY*7BS^Y_~+!uh|p|HP1gJ@h~3#|jXGg5|$tv5jLyz4bpGKhJ*9bOgX8 z^z#Agrr5u-_mHqFu=Ky+S;l==r}aN?^J^PP-TFV^-`a1izyiSG2YpNu!G&P=BfRrJ4{n>#V)+W5a`;@Z0Q`@X*sBIe*# uviLvNJ$S}b_wv8pu{7~@YsNn;^+XpF@8-X{#b+HiJL|vHtL$$f2H-zCA*hM~ diff --git a/fixture/13/6/6 b/fixture/13/6/6 deleted file mode 100644 index 3e897bf99ed4f620204f7e40a6868fe7ac657f8f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{RSUFg`&Oz1y0q+=tWGwHuv>kCsYGWI{MgpQ_6 z3fn)=$=|e1@6Nv)?!HO9J=Q-b>eY~NN&rB3Kahp&9OyrJ5ZJ7_Tjsy$nV!K8D3ia{ z3{@Iy&A>klYGYBFO65P)dVB4Esm?!IY{V%)Q~p1|`P-Hc1kJx%%ek1bt^2>1Fa{H> zb<02ZNB=KnJKVpo!L7`K5ct1Z$fmhOgXX`aex?@G@7BLhIEq-KJn}z)ev_7ByZ684 z@HW5UV%9&Nt`=B}!vR2(I7heLVaC6k!q8`pB+9?YyK=;62{n2V$8p2^vR{ZUHZR#kW<+ScHzI#Ufl-uMe9Gw!F2nZsQ$m0BeSyZ zc<{fydF@!1jlnJPaDw9|L;F3>X-$x2KYaGqJ4Upn94tp-hZlW#>&5& z1ZGEG{OUip%D>~C$LzmwPwhOpM(@9Hri5xgd-A`(n8HUAZ1F#;gv3+gwCcb5em%H& z0`NameXcxf0K-2o0yx3Bpz%MBwCFjdukpX+xe{qP1mwRs9S|lc4&6Ttg-YDml-s|B zw4H)s@&mwyq1#i&^!2~b_)S3O&+)%@n_96~pzyzFDW3K!(d9pHo)xS#lGQ(3V2H5B z!^A%T0+$ZK5dc8b_@OAc&GJ9}@e6Bx82L_wi9n(Mb zUWGapliI)SL6G89P5VDuTCXBzH>yA4b?gXHr}MwcXr%Ik>hHgmtS7TPLIOZ+l{Ht8mIsLykWcDnaTJ1lGEJpbfNB_UDtGh1% diff --git a/fixture/13/6/7 b/fixture/13/6/7 deleted file mode 100644 index 5b5077c38645a65c173d66f5fe1cd78ed0c4d1da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{S6fYh39Z}z{G6hRM9R&YOussd{T!0|tG(O6w> z#<9QH_mt95AN0Sdi$7gCUdKO>css^q@ZG;ABw zjE_I>{n#o!z39KV)V9_gwaGs?&@iJLzt+D~hh6}bRpUR%!qey=x3523gju||$=N?B zux8H^@ZLXWgL13DFWo;eM~CRT=HfpEhj8i7Q`*1boR<{htOCGclsg;J)8fDP zsTG`oY3#p1_P2LI;L|@T(@<6g5X?W#+)weUZqdJ~WQ$y(7W_Z>_?k-DfaJd0qnnnFYHk`ou5Brq&d|qaOFQX4Kfd~KhQtsXPcNCy3fC$oO4O+NZY?|MRQZ2 zK+?aZUBjnn2H`*Ptou(%Qr zCh>&R*W^Drk}<$EpX0x_Q=+J}LFT{u(ygfgU+cdu*a4LfyS6_T*kghsOZmUL`&($t uf9$`>yLJy#UERM4tWv1V;_yEPR2P`^>cGFXvMQJV7w|vBlP76&0?0oR{Hh%Q diff --git a/fixture/13/6/8 b/fixture/13/6/8 deleted file mode 100644 index 8c6b1e2ac8bcc13e6f62bf6b7fefee1370627415..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{RD#ckv2THe1Db*3>^G9^TZ@Yv#XXNp;_~N!h==?-{=;tKL7; ze}pvwP31oi)IT{s%;LYZYgxOCt?|Dxhj@m(bGAS7_Pv4H(Az&!)SR;1Y~;VT42$qK zA?iOT#u&(#dge5_AeCG0=Mfi8NCP1e8u+e@2#9o;|SguWQR3jV+6;GM2@ zeEz=@geqe!Z~MP)KGEEj>DWI4A%OqG@7q5jnLayFhta=Ye(s~_iTghc&c!DmmhV5o z0hidYlKH=SNCY!L1IE9)_OS5g5$?aUp*4r-e%e2mr$OfwP@lga9N^qrLF~WKXK6Z* z9Q!{*4ko1_W34}qhWAZmpxVC=j}PZ@Gtj?;y%={8=J`L!UHEP(DeXVn?gY;3$Hc#A zLs#1}zwf{A25(Lr!s@@Ke37nf+upwjYp<`fI^aLo%cEn0HRwN3WVRQgH|M`gpzzVt zpXomaUT&aa6Yak>!-`j?ljFZ#XG4=eG55deJ879dtM|VSX0`3|Ywf=-zYXYYS(?B7 z%ux!m&hNkX%)(A5`1e1M*nH~)*X_Rv@Ondzi^o5{{P7mIAN4;j)zb0juD3s-Vbhpp zO4q+XM!jb((ZWB$aAogYcsmZw{N uO4Gkb)u_OQfZM;x;qkuB4%I&%K-cB%eg8kkj;%lBxy`@b4^Lin+1$VVtGc)V diff --git a/fixture/13/6/9 b/fixture/13/6/9 deleted file mode 100644 index 9732486b4e9caaa7c36dbb7f6f3d58446b2aefa6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmV-01JC>d0Wt_60{{RZ0{{Rp0{{S5$LN8y+VVfehlQFVaj`$pF!;`Kw(CFjbS2Lg znD;-7#dUfsgZ{t0Q;OK3J)^(z)*df4YR$hGkHh3?%kIAn%@e7)vi`rL5wpa`cKN>n zKH8Y+x4J(o>>0Y!7U91U^c3lysOi5y%(#nWEVw^NYb=rxCg#7X$QN{!r1U=yD7~LZ z-s8V)^rHmK75G0lf(x#3yq>@O0e^{H+~mJtCogWIFXz8=kt$PwIQTy^{Yza$MCLzU zI&)knP3XU;o<2^^d%iygXUP7N-U7e{YI;;aPW8X@Efr#20_eXbOYxFT%jmzU_JkjT zX!Aee#a%Rlp#Hxp?6#;&*!I7L-lzQs^xQwjX-zATdCfmdsQNagV%EQtt;A4&YXCqx zhkS(0!Q($?ig=^2kn=y}Hk%%K0;<2+BPU@7W#PXYK_xIW2jRc5y#G>eukAnMT+g^# zy68XrSJdrU`^&!!m;B;xF5W*7VUb|O&F(*cNgdX;3*x_AL1F_j`qjT&bL$GimE1oM z|Kc}Vp8UVeBBmch%-uhhYf^DD0P?>cImk)|BJMwZzP`_?E&)JeF8=H@Pv*a(#b*a3 znDxJ`#K-zR2gkq5VO%Q&`p~~5sW(cdB;`LmWyY1b_02ylm@-Ne_tL-O4g2?VGPghK zOF$P^^zpyGg_iv7i0VI73Ntw?1c$pGoTRNB9Lp^5@=;ck{nBx?q~cNs+&Aq3w0p@9@7BYU!`EW8gpCI9EN{sQSNk(59xZQ31df^43K7VE!&e(+GS?)E>; zL|8Jxv+_TRCV6J~j_*I!pIr6y#M3_>{m`IE-Pk{igFjm?#Oc5F*=iS6dcr^LW0GrR u6Yjqpp1vA2+VH<1Pj~h_;rYKKo))PS{n!&o)9G~_ajoMZlPyhe` diff --git a/fixture/14/1/1 b/fixture/14/1/1 deleted file mode 100644 index f9efb20d8dafdc3637d4282654246cc8fa148bf9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58 zcmV-A0LA}!0UeA%5dbg@!nEah!~p)ajP6A!1r3EZwi93krb*BP8RddK%_JvEwOM1) QDtDauGZ%axeTVTJ2AfP5F#rGn diff --git a/fixture/14/1/10 b/fixture/14/1/10 deleted file mode 100644 index 20e61eb7f40fd973ef134c1ee8d85fb7015b01e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCN5dbg?J00&5$5*ns`lva^8@@uUGp{bJUXZjj03O<299LQc1#BE{5? S{1s?_{}#^IPWk|a790yn6CA7n diff --git a/fixture/14/1/12 b/fixture/14/1/12 deleted file mode 100644 index ee34be88e955cd845bc68bbca84865dadacd24ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0VRyd2>>t*!)7ynXaVm(OVt)a@SP4)R%o+iOFblQ=Mzj7@wir~vPwSx S6RMVCUJr+?zrX>F!W;*_ofv-r diff --git a/fixture/14/1/13 b/fixture/14/1/13 deleted file mode 100644 index e339ea9f84f6cdb1f65d3870ce84e4dbb8b04aaf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCN5dbgS0$udaf`Sh8K^bb6waG7j~g!& S)m28co=ACI{=)~UlpMDoU>gAd diff --git a/fixture/14/1/14 b/fixture/14/1/14 deleted file mode 100644 index 48ccaa8fac1f7cecf3a98e6345a90fe3c376e775..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0Ts)^4FE6*!@$|1JiI{mp9Q0$0y@%?tSn*vt>plP@RUvUMB2KDHuX2j S1aF@U#@;aDTiXMU`5YrfUrA9?Lfh1h6CfJE}2xU~2hL(=29>o~AcTB_fT?@wi-RQXn^dM;XEeLG)t&xstiFD0a=h R)s??()s4T%_79v+99||l9TETl diff --git a/fixture/14/1/17 b/fixture/14/1/17 deleted file mode 100644 index 627a56d58fa35f38b099656393a79f468d41ac41..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0UeA<5dbg{!n83wVj%vtl?+D!)7b}=mOb)mW%HYE5ztVt<0>E3_>RfG>Z%zWqaH_L0^8N S?dhMQ_o!}8@%RIiU>q4D=^KXt diff --git a/fixture/14/1/19 b/fixture/14/1/19 deleted file mode 100644 index db91460f4cc86971a426eb86727abe23e401addd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCV4FEt00&62YXkhxUWoI>pAcxF}%Lo~3KWWN2S)%0KCJ5+)l0>js SKTx|cn@5rv-S-}ki5x$-`xv4C diff --git a/fixture/14/1/2 b/fixture/14/1/2 deleted file mode 100644 index 913a84235a089f9220a8df3ddb9ffe6b57e1badb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58 zcmV-A0LA}!0Ue9M4FE6*0%s#USb+U!@z6#C9Bt~Pk<}uFHWqCG8qP})c=rWUNk8>DCpS@Ni`#-S&D27j|^9h+j4 SYK>KBSq?3=KIT7sK^wsoWg0#J diff --git a/fixture/14/1/22 b/fixture/14/1/22 deleted file mode 100644 index 081b4832c679c44c3ac5fc0eee57ba8df79f89aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0UeCN4FE6*0%s#UZUOe6rB@{p0%$f!*DcB*rFCghTD}qyIv35}b&1Mp RjKim*<4Zis-9DWt98P5|7f%2H diff --git a/fixture/14/1/23 b/fixture/14/1/23 deleted file mode 100644 index 2f7c3d1e59286deda60f44d04d0b9269f4dc9f60..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0TqnF2>?I{!)6P4bOHCD<#GsG($?KMZ3A~}KsOWd@BOPJpnzmje$n6l S*+{)`H&7)%-Af;Wt{a#Qup3DL diff --git a/fixture/14/1/24 b/fixture/14/1/24 deleted file mode 100644 index a39a990994152bd53ebdef22f6a529bcd3c0e043..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0WFNt4FFLH!e%4=Z~^w8<;u$$O@yKe*CQp@td6=Vr+Gwk?DtF%Lux~w S&rtivDeDL2;JXiK3mb?$0URFy diff --git a/fixture/14/1/25 b/fixture/14/1/25 deleted file mode 100644 index a4770b131134b9e4029738ddba9c41c4784b93c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0VRvc4FEw5!fHc2Xu$o~vd*lc2#X_K;|+BnEsNGk8> SYrC?4fze#JhUEiuAsgf&rW@P< diff --git a/fixture/14/1/26 b/fixture/14/1/26 deleted file mode 100644 index dab7a2275f7995915eb7ad4a85e9cd4a33d710ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCN5dbgkO_V`(6|+EBR=4E& SPhETz_pVpxGvfht+8hGzeHp3% diff --git a/fixture/14/1/27 b/fixture/14/1/27 deleted file mode 100644 index a49fb69d27c32adc9cf9544f852acaf027c336c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0UeCN5dbg@f@aI{Z~^z9rPv=q4JDVm_Ml;)Xs&cxCub#70vmYG8Ly=0 R7p1t1wrK13@&J(-99{FT8i)V@ diff --git a/fixture/14/1/28 b/fixture/14/1/28 deleted file mode 100644 index ecd14e4895e183b4e8033c08abdf7617ed0abc26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0UeCN2>?I{!)6P8bb;LMDXS7g6w$;e5k+5mrf}Va5 R1$-qD7-bL73lDxN8^Pk%7*YTL diff --git a/fixture/14/1/29 b/fixture/14/1/29 deleted file mode 100644 index 69c04131630bf018a3f0d03dc776f6e8009aa99e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0Ue6L2>?J0!e$FTT)_Qjx%4PVHEBkhWE+d`Ogc|QWFXYQS)-?xgIY3? Rj_t2izw)g{{s5h)9D3Oi8o~eo diff --git a/fixture/14/1/3 b/fixture/14/1/3 deleted file mode 100644 index 0145976e82a7bcae583224d37741c62a64fbc408..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0VRyf4FFLH0&622XkhxUWyiN-BH?4&t%X$3Cfgm#R0iieTyOz{9p}hi SS1Q+4Uj*~xzxF?W-W*o%;u_lk diff --git a/fixture/14/1/30 b/fixture/14/1/30 deleted file mode 100644 index abb5717e4068ea002e1d192a423fa717153d3baa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0WFNt2>?+D!)7b_=mOn;mWz)=#89Ip7VAou?eBEdRfgCum2YYtJI@6* S)3bJjDeu&%m3NM{2d;f+Z=xkxEqcD diff --git a/fixture/14/1/33 b/fixture/14/1/33 deleted file mode 100644 index ca5f4e3381c73accbd4d901d462a9717374b2d86..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0X2)s4FEw5!fLa4&;b3{vd%~wl#4iB*kxz1tWF91EpEpklSfdh1Ch1( RtNNJQPxGcxp?#{o9C>t*0%sE+x4`W`OAVA@3`VVx#84QtMx4eCJBgizbQs@v-d^Hl S4i~sLs-fiC)wDowl{OJx2Oc@>p; RtNm(*Q9`#CI|q?490k8&7kmH! diff --git a/fixture/14/1/39 b/fixture/14/1/39 deleted file mode 100644 index b8e3b69fa39f2740a3de4a9f76ea0d1a92e9a03f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 61 zcmV-D0K)%x0UeCN4FEt0!e%3VumJnda`jA15G(|?rE!*()1`%;LDa!e+v{t;#10ar Trnl=g869V8g{AESbl)4XKNcLQ diff --git a/fixture/14/1/4 b/fixture/14/1/4 deleted file mode 100644 index 243be6b33751ad092b73387c50249b86b7a9cd63..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCN5dbgDJ6k8V{XVN)v6>0vu!OW%$2 S4dbr$$?lgSKhp=LyBwNL&>dR< diff --git a/fixture/14/1/40 b/fixture/14/1/40 deleted file mode 100644 index 4f5dd75b4bead1293a0e02df806462445a12664e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCN5dbgvkVC78m2d+wJ5hx6kTtRW9F#* S&n&sSIoaH4k?V8{TD(7Z3md diff --git a/fixture/14/1/42 b/fixture/14/1/42 deleted file mode 100644 index 2fa100540c2c93ea23cd59577bd24229fdf1e166..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmV-90LK4#0Ue964FCWL!fqoBJi!0ASac9=N@}TfoOsC?plF;0m}8DvDxOBtJe&Fg PeR?hlu=AgCfn^)jeij%1 diff --git a/fixture/14/1/43 b/fixture/14/1/43 deleted file mode 100644 index 64a51738c77c1ccc49bcb04f1f4da1a7f79400bd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0VRvU5dbg?I{!)7aeWP$EK%f$n+&_r8O$}&i3+?!#xf}@sthm;V0;5 S>&@}>1o`fd2TFNw%NxEhmK|;Y diff --git a/fixture/14/1/45 b/fixture/14/1/45 deleted file mode 100644 index fe8dfd118e965dd07392770e42cbb608a8fccfdb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0VRyV4FEt0!e(oDumJnday1uYkfOrU+KYOkYs<#z7B|4m1R0^0fj~k; RyF=wu=V+w&hzE-y91a^p8w>yd diff --git a/fixture/14/1/46 b/fixture/14/1/46 deleted file mode 100644 index 56fcc687310837396d6c3d9a6f352788f9edfa22..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 61 zcmV-D0K)%x0VRyV4ZuJQ!e&D}ya4;pQs){uk06Jzc7@PvAz?8vZ10$;fRt$IraRnU TMOWb!_-m>1l{xzWi+>yYP*ojM diff --git a/fixture/14/1/47 b/fixture/14/1/47 deleted file mode 100644 index e058f7d6c4930abb6a0bab1fe8815fd314c9eb5d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 61 zcmV-D0K)%x0UeCN4FEt0!e(oDumJnday?^AK#HszZ8JjZKsK!#s^jEVJ}m12IaRe5 TeIY6T`?j*?a)ayvk9iz00qGiY diff --git a/fixture/14/1/48 b/fixture/14/1/48 deleted file mode 100644 index 26e7f4d6f7496805be6ecfcd932f60e5f55a6eab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCN2>?I{!)7aeWP$EK%jFalLqlnst?`7gIIYm}Gcr=7&DAq3CrW7f S{}g>O_64x#?)m_Z030F}eH$kL diff --git a/fixture/14/1/49 b/fixture/14/1/49 deleted file mode 100644 index 78f15a2f6fd6687349542b402fe82baf9ff8a5b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 61 zcmV-D0K)%x0WFNN4FE6*!)}W_`IQ2 TD*9p%UoN(kG4zfDgb5q)4ZIo! diff --git a/fixture/14/1/5 b/fixture/14/1/5 deleted file mode 100644 index 184d34eea3f3341a1955db3c17d239dfc7aefd16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@z0VRyd4FE6 diff --git a/fixture/14/1/50 b/fixture/14/1/50 deleted file mode 100644 index 9cd75818a0dd7b65748b993c1c48adca19325306..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCN5dbg?J00>RqEhYf81wVa0Y6L2IzOfW)9Bv5E8(yW|#XnICJ@u9ny QZ&7yn7x6y$0hb;d^35U{wg3PC diff --git a/fixture/14/1/53 b/fixture/14/1/53 deleted file mode 100644 index f3a2f2edb09c355c4224a902f1f52fa1ffc83eb7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0VRyV4FEt0!e%2pSfKmQay91zQ&4Euh89YMD9q8`Oor%z5HB4mmBRd~ S5tsd~UR|nZtI-34UK{EFup3AK diff --git a/fixture/14/1/54 b/fixture/14/1/54 deleted file mode 100644 index d1574903afe3f1882dfd3a738cc67e3c93e94322..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0UeCN4FEt0!e%3Vut4{p<>WLWu^<*mQxa=}+DLAd<{H0%sE+w}AW4(iMTBXAIV+^d*cfUg&t%7~M|{RNd%ATNh1M>ab?4qfr0=(;x0`>(@i Q>fngAgeNmQd3qomEd6;K5&!@I diff --git a/fixture/14/2/.zarray b/fixture/14/2/.zarray deleted file mode 100644 index 829927d4a4..0000000000 --- a/fixture/14/2/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "|S1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 5555 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/14/2/.zattrs b/fixture/14/2/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/14/2/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/14/2/0 b/fixture/14/2/0 deleted file mode 100644 index d9e1e751a8055d31f5c82cd2c1bdeabe14cd7cca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 76 zcmV-S0JHx>T4*sbL0KkKSy%^34gdfNMgU*{D1Zb2AOKKc162PNH9f(jJ=){j-lonc i1Dqk4lj4k;uvn$}MZ+Pfa}I4)_`8xR!i0jrI#6)0V;$lE diff --git a/fixture/14/2/1 b/fixture/14/2/1 deleted file mode 100644 index e7644decdf4ab63e8acbff0c06a8d7047ee51d4c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 69 zcmV-L0J{G|T4*sbL0KkKS)O`Z4*&ojh5#sl1OOlaNNM7NQ`1tS(k`3;wAO(nq)rPl bku<9FTS92IDKY$}lu^j8qGbEdOw4Dgy(@F$M(&jRqDV8IabH`NYZNauDmopc}U)^u(o`$)>K^ c-e7hm(CA}zPO6ge(K{3UL{wz|n5#Mf0Ik3qi2wiq diff --git a/fixture/14/2/12 b/fixture/14/2/12 deleted file mode 100644 index 6066bf062816de727d54e1c643e97aca3774ba87..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 69 zcmV-L0J{G|T4*sbL0KkKS(~0#g8%>q1^_641OOlaP-dD9JW!&m28R@MjtsPPnj@%W b)^ugojxz#DtkpsqW5wK&P81|2=arzq@Us~n diff --git a/fixture/14/2/13 b/fixture/14/2/13 deleted file mode 100644 index 377683fef98f127693b9676fd4cdea3bd552e967..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 72 zcmZ>Y$}lu^j8qGb6nPW7gMmT%7=r?XMgt3w3`lFpe6;u;`@P9_K7zM4t;tpuD_&R{ cb4t%|!Q^>!+PT{f@6&tIRj^P&^TzH10Fp!;(f|Me diff --git a/fixture/14/2/14 b/fixture/14/2/14 deleted file mode 100644 index 9e224d31ab7b72011aeb6445698cc463a7013e02..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 68 zcmV-K0K5M}T4*sbL0KkKSu3a`;{X5-h5#sl1OOlaP-dD9JW>q}E_FB(7`ehWkzEUG aO~8#2iLsu7)iWc|-Y(>daG@YqP)EkILKlhv diff --git a/fixture/14/2/15 b/fixture/14/2/15 deleted file mode 100644 index e2302f8001f0e8d706fe76ff702bd59e74838f53..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74 zcmZ>Y$}lu^j8qGbJZ!~viGe}djUjEB8IpD`AZ_t;z4+sWeB!BXKTkCIbK@g%=S3 diff --git a/fixture/14/2/16 b/fixture/14/2/16 deleted file mode 100644 index 31b08440b5eeea636d69158c6321f8e9f4380a6b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 68 zcmZ>Y$}lu^j8qGbOybeL&cGnY#-P!_!l1z5aQP@(=29^MoumMb(!Po_muU88;8spHg+qG_0I%y81^@s6 diff --git a/fixture/14/2/17 b/fixture/14/2/17 deleted file mode 100644 index dd92ddd40340eb1d4d5253f933f54b704ba4bce7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 72 zcmZ>Y$}lu^j8qGb{G0tQlYv407=r+VMgt3j0z*JrLsp$8@9s?x9&Ji@*|yBY$}lu^j8qGb6w&?a%)lVn#-P!_!l1wqkTy%uJ}Wq_=%a;soa=5Qvs+&mUC&)t Z+;TTM!##L)AnV6+mEcxRMyLBB1^@*}7_tBW diff --git a/fixture/14/2/22 b/fixture/14/2/22 deleted file mode 100644 index f0bf2f6ab7a229ce780cccc1027fbd306c9a434b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74 zcmV-Q0JZ-@T4*sbL0KkKS)W##p8x<2#sEM7D1Zb2AOKKifNFjzI`=&-sJT}#;7{CAk diff --git a/fixture/14/2/24 b/fixture/14/2/24 deleted file mode 100644 index 75aa48f43796f5b926322e3b8108a4606d344a5b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 73 zcmZ>Y$}lu^j8qGbjC0nqWnf@)V^CnwXkY=70cj0h^C!7j-&nbP>iNy)Q`UP%oECYV dBe6ZteW9N6sbq=6&mX<>Sk5KHeInI;0RU4t8!`X@ diff --git a/fixture/14/2/25 b/fixture/14/2/25 deleted file mode 100644 index 637eb9aeb996c788e8d6d91f3dcbe2cf17184fdb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 69 zcmV-L0J{G|T4*sbL0KkKSsjTqod5s@1^_641OOlaK-0wrX`tq_Nv6pmDZ(|3i>F}A bB#Cszj7S*H49bO!M~k^4oG3^R#F|ck9J3ky diff --git a/fixture/14/2/26 b/fixture/14/2/26 deleted file mode 100644 index c21ca8e226616ed038002846b647f791d85dd5e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 72 zcmV-O0Jr}_T4*sbL0KkKS>P=9+W-I)#sEM7D1Zb2AOKKifNA2O+uIWHhsQ2fx{C^U e_34qcG-z3PFvldYhHUno@pmLsg$WJ<&wa4=jUHkE diff --git a/fixture/14/2/27 b/fixture/14/2/27 deleted file mode 100644 index 1beba30b600389d3e3423517baed5b197dd7280f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 73 zcmV-P0Ji@^T4*sbL0KkKSshSAegFU!MgTwnD1Zb2AOKKifNFjzIOlhqnL2#K5`x%h fwTTHywMU|{oStB+)vT_lE%A3GQ-uiu)dV--SU?+0 diff --git a/fixture/14/2/28 b/fixture/14/2/28 deleted file mode 100644 index 853ecc6baa7ec635b7b509b9aee173d49130bfeb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 73 zcmV-P0Ji@^T4*sbL0KkKS^BUAT>tY$}lu^j8qGbJX{^*%)lVy#^Au9(ZIr>zz~qukX5H?rF=I!?%^B0m-&{;A**&6 dP18+l%w~ENAC`KnZSkXb9?Q9eB=WO75&&G38czTK diff --git a/fixture/14/2/3 b/fixture/14/2/3 deleted file mode 100644 index f0adfbddf8b298bfef27c44c36371ef17d670cf5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74 zcmV-Q0JZ-@T4*sbL0KkKS^E?tdjJ3fMgU*{D1Zb2AOKKc12RbWL&!k$wMn%j7;Op; g6;zyd=nJ6}4c8TX>D?Obv_*a{V!Z diff --git a/fixture/14/2/30 b/fixture/14/2/30 deleted file mode 100644 index c0b934a9e25668083182cb96b439f3c6008d9905..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 72 zcmV-O0Jr}_T4*sbL0KkKStKUyO8@{JMgTwnD1Zb2AOKKifNA2O+V=B>SY&lMaAGO2 e%aK<+i_#f6$hZz=gtg7%;_gVN3K9f_-Kk)PC>;d= diff --git a/fixture/14/2/31 b/fixture/14/2/31 deleted file mode 100644 index 75be21d1c86a531b1d9d253ea3a75e3f8f16a291..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 75 zcmV-R0JQ%?T4*sbL0KkKSpt5@4gdfdMgUL%D1Zb2AOKKc162PMk=yRNC7`&2LRqs9E1P> diff --git a/fixture/14/2/32 b/fixture/14/2/32 deleted file mode 100644 index eec62893f36b4568fa54b1e5409b4831435fc2c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 75 zcmZ>Y$}lu^j8qGbl-<1FiGe}j7()VsMgt3j0)t297KJN+LiFAq*f8g{_S%l)5-T^^ fH9QV*ITfk4Vadq}DpPzjo$iYlxmpBF0BHsQ6{j9- diff --git a/fixture/14/2/33 b/fixture/14/2/33 deleted file mode 100644 index f4fdeffe00bd21671c885b9732174cfdeef1f43f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74 zcmZ>Y$}lu^j8qGbtf~|bV_=YXV{l;5XkcMbUE6}fq?R~rC9Oc}8N diff --git a/fixture/14/2/34 b/fixture/14/2/34 deleted file mode 100644 index b6823ca35b7293d77fbedba426e7f21bd0e23647..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 68 zcmV-K0K5M}T4*sbL0KkKStyy^l>h)1h5#sl1OOlaP-dD9JW{fnrHf_23XEK;YelO- a35*DuLV*d`%(+-jd|k;D;X**7XLeKx-x(tS diff --git a/fixture/14/2/35 b/fixture/14/2/35 deleted file mode 100644 index 5632228422117742a6b2711e97a4363c808f2c33..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 75 zcmV-R0JQ%?T4*sbL0KkKS+oD^MF0R2#sFXdD1Zb2AOKKc162PNH7(yg-t%Qqvep{e h)0NaxE@lS^pt96B>E1jd=^}0TyOJrwgoU5~S|}w)AK3r^ diff --git a/fixture/14/2/36 b/fixture/14/2/36 deleted file mode 100644 index a121440a67d07ab3fff24b509262272844964422..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 73 zcmV-P0Ji@^T4*sbL0KkKS?n3;&{y)kBhk?oG3_k4D@mUSuPor diff --git a/fixture/14/2/37 b/fixture/14/2/37 deleted file mode 100644 index 128b15445d02fd8a0b1a95f3420980b05232db8a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 70 zcmV-M0J;A{T4*sbL0KkKSvS8AlmGxMh5#sl1OOlaR3$w%2%qUFI^1h?PPQYZo;yaD cwMa}*>aj+#WWs5L(&rv7&#sEM7D1Zb2AOKKifJ%C5ge$Uce7v-q7L;`f faym;n+p|WwnHL5fJ~K_x f9Iu5sXU%V7CPvQ8CdK5Y$}lu^j8qGbWDe2hU|>)@#^Au9(ZIr>zz~qukX5H?d3&envUl(N1Q{l6oOX$A d@)h$r?h6mL_|1FvF)Pj2UcAWFB47eYF92%P8X*7x diff --git a/fixture/14/2/42 b/fixture/14/2/42 deleted file mode 100644 index b5751dd6c65bb10ed00f58474f87215e575724e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74 zcmZ>Y$}lu^j8qGbJoZ}SGXsO#F$M<)jRqD51%?n)dr^bSmpvY3HV8!qi8`*7vzDH4 deMQ?Mrfw-$aq~%MqH-p@6EAYL2$%rU3jmgD8&UuO diff --git a/fixture/14/2/43 b/fixture/14/2/43 deleted file mode 100644 index 4845d61620602d5a8ff9fc5c4be33a3b95951db4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 72 zcmV-O0Jr}_T4*sbL0KkKSwMUwegFU%#sEM7D1Zb2AOKKifNA2O+UwABhCsZ;;qIBtM a)*d3bXaP~qinOOnc)OA*!i0y+EO?*^Um7L= diff --git a/fixture/14/2/45 b/fixture/14/2/45 deleted file mode 100644 index 45420cecbfcc823aab6c704e83c0885e575d758e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 68 zcmV-K0K5M}T4*sbL0KkKSy+|QIRF3x1^_641OOlaK-0wrX`?Y=a3bo!smZO5q|R+v aUJSb9p;M?688nKCUM}Q{aG@ZvE2MHDe;RH8 diff --git a/fixture/14/2/46 b/fixture/14/2/46 deleted file mode 100644 index 2e7493bddc0e564a96aa67dd378bfa849da6e4a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 73 zcmZ>Y$}lu^j8qGbEPuTEBm)EIF$M(&jRqDV8IacCHSd&Sw g?k<*btWvqORi{}TstGo%VvmctBAh5lCmc#d0EPq_O8@`> diff --git a/fixture/14/2/49 b/fixture/14/2/49 deleted file mode 100644 index 54011ccc3bb420a45107d032a040cab1aa9fc48a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 68 zcmZ>Y$}lu^j8qGb^gYjVoq<8JjX|SY$}lu^j8qGboT1>v$iSd?j6s1xqk#oT2BbA))oE%Mt)I$wGSyQ>#q3?q-npq; c{Y(UmUu~95mzZVT{^*^@axNiZ77hmm03DtePXGV_ diff --git a/fixture/14/2/51 b/fixture/14/2/51 deleted file mode 100644 index a194d9c7f20931a75f64942b27d975f340d196c2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 76 zcmV-S0JHx>T4*sbL0KkKS)+l*S^xkGMgU*{D1Zb2AOKKc0#!fLRMg|#blArGY*cP{ iz13&N+8UaOkhJARilru?q_dR8_`8xR!i0q!4l>YI_#u-3 diff --git a/fixture/14/2/52 b/fixture/14/2/52 deleted file mode 100644 index a396ddad2101e0ba21fdffc5e76b9a1869b32ec2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74 zcmV-Q0JZ-@T4*sbL0KkKS^ZLIYXAT+#sEYBD1Zb2AOKKc15@!(?Xm6PcvlA-Fd?F= gQn(4$ywLD^qjSe_S?;*jb(`YuNT&)CAF53aVEAVqs{jB1 diff --git a/fixture/14/2/53 b/fixture/14/2/53 deleted file mode 100644 index dc55c9631c7cbcf44314a4a7571187fa7399079c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74 zcmV-Q0JZ-@T4*sbL0KkKS-emQ-2eb7#sEM7D1Zb2AOKKifNFjzF6Ym99c03a$Z77q gc2U{YWG^kIn{f8&4QMp#Vnx0#k f>K(Ke5)^HDg=;!QNE1zJBjWBzrwS4aMW%0HR3sT& diff --git a/fixture/14/2/55 b/fixture/14/2/55 deleted file mode 100644 index c39da03490a2731e40bba84930b679f9913a7ce3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 67 zcmZ>Y$}lu^j8qGbe9Wx0j)8%79s`4d1A|5b3xfhfh~wS>p_TuY$}lu^j8qGbeC$(Zz`(%3#-P!_!l1wqkT#3gCNL<(&0yV9rQ|K!*1EB6RXCZN YHeHy*ZB15G!pCxz;8spPljF$@0I-b}#Q*>R diff --git a/fixture/14/2/7 b/fixture/14/2/7 deleted file mode 100644 index 4d4d1af9afeb30b7e29bec7774741794aa38a909..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 72 zcmV-O0Jr}_T4*sbL0KkKS=j$E=l}pD#sEM7D1Zb2AOKKifNA2O+4qz&HO*oo*CJgl eBe$AUKF>;PR}-rXTAq$g@pmLsg$WIh^B#b@${UUV diff --git a/fixture/14/2/8 b/fixture/14/2/8 deleted file mode 100644 index 2b17161401c3157490370173ae588d07a38523b3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 72 zcmZ>Y$}lu^j8qGb6xQksVqoBQV{l;5XkcMbUtvtriIZKjI4acLr6%^I$U`RwpH-F~02cKabN~PV diff --git a/fixture/14/2/9 b/fixture/14/2/9 deleted file mode 100644 index 4aa7ce9e12a675aa3371d8b505a9663699b4bb38..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 69 zcmV-L0J{G|T4*sbL0KkKSy0df=Kug91^_641OOlaP-dD9JWx!wiRDGADzjrR7&T&{ b!0hGFOs#?};{cI#o5kFbP81{*Gyyrl8GspG diff --git a/fixture/14/3/.zarray b/fixture/14/3/.zarray deleted file mode 100644 index add9084d76..0000000000 --- a/fixture/14/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "|S1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 5555 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/14/3/.zattrs b/fixture/14/3/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/14/3/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/14/3/0 b/fixture/14/3/0 deleted file mode 100644 index 3836be882fc182056c4357a0072deacd4ecd64ee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlQ+Y!J(2m-xy#_&M{lyD`al2n{k#|JAu=R0?1wqD9s;-qoujnE82g@czn=B`#O U#z2S#0s?O0s4F_J>p}?e184OfUjP6A diff --git a/fixture/14/3/1 b/fixture/14/3/1 deleted file mode 100644 index f4b276bbae597273b88eba625ee1b64017bd247d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!4beP48oFb#_?bQM#u=Aq!U-p!v|Q#&;IsVYt6h&VpVFL7LkOL;@Vc=AmKcK VBZ57Ws7Xx&L}iwQ`)0t+PJU1D9oPT> diff --git a/fixture/14/3/10 b/fixture/14/3/10 deleted file mode 100644 index a8fc2fdf506778ea16121fa4a140e0f4a41f966a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJXM)+U>Mz|3=NhdC7Vib7#obTM3X}@U_kSvHWtE?2O0Sd8AC70<>8|9gzS4 diff --git a/fixture/14/3/11 b/fixture/14/3/11 deleted file mode 100644 index 24057187009c3e06ba38d5f8e52a14d7ddbf6315..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!41GL3<8sGhIp_5BV>e5(usSHWQ@R=XMg*wwFWOxEU5|`cH=0m9G diff --git a/fixture/14/3/12 b/fixture/14/3/12 deleted file mode 100644 index 1db63c6f49d320b056dcc476a443218ee303da5e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!4ZH!3<7g@#_*s4O0Wbgv10eUM>1rATxrv-Qu~n&sK{#xeg(x)W WW?PEE5S{zHfKn29yzq$nANT-O!5!HE diff --git a/fixture/14/3/13 b/fixture/14/3/13 deleted file mode 100644 index 03a377b3c68b1ab1eba98fdbd419b7e95d103bfd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW+uQ5C>I2+}p<@j(G1T!c%q#Dl5$RKRE7Ju_3>K(t{M0zuhAHYOV&4TKYs7NS~K UY;&~-*Ww&@7$T$dMP_nok}U)CRq|E$|AK;UO=rY TAOpG=2qVkZQ_z+LgQ)EvR`?zD diff --git a/fixture/14/3/15 b/fixture/14/3/15 deleted file mode 100644 index 71d9ec3414804920b651a6ba559c4eb0f8d96d07..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh+YP`_2tvKx%<#bijL;D}Nhj_hCE^3r>%8Zlneu_kY7G-=L@pGklCcUqu4Z;H UMT9~BHk!EsoFLJ_@L|p84@JQpnE(I) diff --git a/fixture/14/3/16 b/fixture/14/3/16 deleted file mode 100644 index 9f5c0922c2a5f4a1976aa0d826ea1732c5c6624a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z%Mrjp2twU-=5Vk9O4t%oNh;361IY04Ip4We6}({FtAPPsxOiRQV<%d8)Bk}p=h9qIr8 diff --git a/fixture/14/3/18 b/fixture/14/3/18 deleted file mode 100644 index b5541dca080c7ab135694c912d82ab720bf66ce6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8sGhIp_5BXopL(usRcWr3wL_}cHj*IJ_oB{ieq#-y~W15s;5u$JW} VhvO3<51$~S`a9@6GM9-^{s3EF9qRx9 diff --git a/fixture/14/3/19 b/fixture/14/3/19 deleted file mode 100644 index 991dca22031fe641052450a17d88a033ce0cde9c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-YQ5C>Y2ts{p#^HkkL|B9+S>j!%;z5w>yyu>o#RD2jOjA{9>?M}h(zcMeZKtCm;q7PXf$?9nv+xQ{krvVnvvmkK# TaCUBbHaYx8NntFwFVN}&VWb`C diff --git a/fixture/14/3/20 b/fixture/14/3/20 deleted file mode 100644 index 2b14c71a7bf62b8c9cc2b5561194d50e501da3f5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR(G9>r2tvJXM)+U>Mz|3=Nhj_zf)WEf=R0?1mM?4qtjG)}KusIFMcLl3@uAxN VYAF+7VGb?$Ty2UvB4|p9`UgwP9jE{R diff --git a/fixture/14/3/21 b/fixture/14/3/21 deleted file mode 100644 index d93a90cca990dea3d529006bb35e6b9272c1149b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8~QhIp_5BV>e5(uo79EMyBGJnLI~W~w(LY<2_~T>=5tg^2o6PUUIw WCxeC|6`OLf!~&$u*J`SLDt`b(Kpm3+ diff --git a/fixture/14/3/22 b/fixture/14/3/22 deleted file mode 100644 index 12923c43997efa3670828c09b9140ae45cadcb38..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJXM)+U>N4OC>Nhj`8iAn%{&UbEAO~16d6S>_zRAI?xAjPpW3<#jC U)#NrC!6*w`ndPAHG8}1de`O~gAOHXW diff --git a/fixture/14/3/23 b/fixture/14/3/23 deleted file mode 100644 index 7c7527e4ce51cf23f7c1852718d72db2eb1cfd42..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR!41GL3<8~QhIp`mN5}}Bq!ag0)8B~cEgkG#0Yr3Zd(Y6 VfQ2dYfwtW&gdUg;pn{L)q<>1S9clmo diff --git a/fixture/14/3/24 b/fixture/14/3/24 deleted file mode 100644 index 93ff7086d6ea0086b774949a8bf0f5722d14e18c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp%MHLV2t!FXvwUCyM(7BgBopU3D#8-};AidiR#or;$vth2)uT*%xS-*_4r07t VBViGO6XHDroE|XFUK8J@?f@_f9a;bY diff --git a/fixture/14/3/25 b/fixture/14/3/25 deleted file mode 100644 index d6a391e4b108b620953731cfbfa8d6ecae4e22af..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAg+YP`l3<8sGhWKCsM#u=Aq!aht%0fQzxc0NpT5EJ8ITn}l(?U^%HP04URdM@4 V|I(E0aVlcdHjE}93Jcb4^a48|9ku`f diff --git a/fixture/14/3/26 b/fixture/14/3/26 deleted file mode 100644 index 59b0f0401a0241ac0e2d7d478b77252811381850..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW+uK@9*g4C>~L@W22PCgGHv;=!6wAcgMruQM~n7m$D;8yV7!2Bok~<~I`6q>rh1SfuXmG`V$}fRjZzH5yCqUt V>VUh9xFxv$BF^j#jiBLs^Z{8H9xng@ diff --git a/fixture/14/3/28 b/fixture/14/3/28 deleted file mode 100644 index 7c5126054a82375581d0ff4d6250e7ae5bfbc300..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZQ4K&b2m{%iHT|doBuv6n@)Yk_C@DD#d;RNF)$~Vq%N6CWr7;uQpd2y-|&>jFk79g_e6 diff --git a/fixture/14/3/29 b/fixture/14/3/29 deleted file mode 100644 index 2c46de4e1e6ad557829e1b78b5de892ed29ad22e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-XK@Gq#3<{9 diff --git a/fixture/14/3/31 b/fixture/14/3/31 deleted file mode 100644 index 6737728b987ae143fe6f20c8b93f10a2a59e6863..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8sGhIp_5BXopL(usRcEpZ6h_}cHj*IJ_o*-2GdkqCkVCGmLE+A8k) U++k*2m*cGjPSt%jL;D}Nhj`E6M~-Tb>4H&%<4gQr9@F{6^HEX7PsIjY|Fb8 V7FLAlTRAoYkRS~I&o;(y^#fzt9#H@Q diff --git a/fixture/14/3/33 b/fixture/14/3/33 deleted file mode 100644 index 1064f6684ead5a2ee2cf95e08949a955665bbd1a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwTMfW43>}Tl$;Xh7pVp Up(r?#VJZBQIZWu2<%SWUesa7XO8@`> diff --git a/fixture/14/3/34 b/fixture/14/3/34 deleted file mode 100644 index f010c7de1927a90c844ff7f42db96ca85b5243c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp(G9>*2m*cGjQDX2FhWP@B%QdBGzK+#aGm$uGgCd$8YEdPMD|+7xzQpk@&-YH VJNpg0b@WVCb4?DFu)jND`vFK39jX8T diff --git a/fixture/14/3/35 b/fixture/14/3/35 deleted file mode 100644 index 3954b930fc020a8cbd0f75ee3f605c4f17848cca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-YQ5C>23r2tvJXM)+U>M(7Bgq!ag94G{so&U@~eSv-!MQtRz#bFDQ W9m)uX6IoVOKuf6ss9a;%`E^s7!`=J VnXKB#!BE>Dq(yE82%tz{Q9nh09gqM3 diff --git a/fixture/14/3/38 b/fixture/14/3/38 deleted file mode 100644 index 82f9da674a597502aa57f0bb8ab3e4057dc7d221..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR+Y!J(2m-xy#_&M{lyD`al2n}4$B69cbG~zDX8WZTxvkY~SWa3ASNMwpwb>c9 VVcbOUkP3{Q>`H#{3W!QX`3G4q9oGN= diff --git a/fixture/14/3/39 b/fixture/14/3/39 deleted file mode 100644 index 2dfa80c467d15f1f916a36708d7500470ef90d0b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z+YNwF3_^R|tl@(N7{L*o#ECuFgoN_C&U@~eDITDelB}1@#wxdL$rT9LD@*+uh`WT<9xfA%+8;dL9fJS> diff --git a/fixture/14/3/4 b/fixture/14/3/4 deleted file mode 100644 index c4d03f7b2a8d857f8181f6369683e330b437481b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZTMfW43r2tvJXM)+U>Mz|3=NhdC7qUg)>Ip4W6(|*%30h^-4W{Lq2$}gZNq*(Np!8Ei>S*BUV Vi0lRYNpP_7N^!aZy!(d(Bz-r^9gqM3 diff --git a/fixture/14/3/45 b/fixture/14/3/45 deleted file mode 100644 index e6fbd881765ae4dce090f4bf77d0a8ed245b0d11..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh(G9>*2m*cGjPSt%jL;D}Nhj`^#t`&Suk)UJW>yc9>IBvJ8dDOrT5rvUL`K4P UWQ8ICKx9z&5c?^;8KC<}A5$P5-~a#s diff --git a/fixture/14/3/46 b/fixture/14/3/46 deleted file mode 100644 index 3a5563874db14940b3b6b884f46ec8dbabe6ec97..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8sGhIp_5BV>e5(usR+EqH*=T>IH)tu?-}QoBwRt5rlJ3QBuI5lDg@ VLBYx$;U5hwz&F4zqrfx9^#xOY9nAm$ diff --git a/fixture/14/3/47 b/fixture/14/3/47 deleted file mode 100644 index c328b65792a10280e4b3f632e1984d7e451e480b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y!41GL3<8sGhIp_5BV>e5(usSHY$Pz}XMg*wwMH*e(<+H8%V8w9L}Z1eYuj8H VmV@AMDLcF%;pd+(H@-cE`vX^a9s&RW diff --git a/fixture/14/3/48 b/fixture/14/3/48 deleted file mode 100644 index fd42d717c2228c5db449bd9eca07452af179b2da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!41GL3<8~QhIp_5BV>e5(usSCC6Ey(p7pIgGe<9qZM7DrC>3WD+}lNs98!zm VD4h$E0>yvYz!SxulCX2V{sC409rOSI diff --git a/fixture/14/3/49 b/fixture/14/3/49 deleted file mode 100644 index b43f2f1d93b4fd3fd41aef66a3a071690c1c650f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp!41GL2m_ODX8GX-JVHk3B%QcNRUu2j9bEg_XRS54Nn|OO9k)s8Qe5(usRk9)K{0XMg*wwFWP6pF diff --git a/fixture/14/3/50 b/fixture/14/3/50 deleted file mode 100644 index 6076ea3b9223f1a7360307a1a04509c1e35c7ad8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z+YP`_2tvKxjPT(CjL;D}Nhj{1Ca65Zb>4H&O!0v1t{g=i!_pmu5<@qubz&qk V^`N$8seT7p_{tyVrL1{Vs}Eso9p(T4 diff --git a/fixture/14/3/51 b/fixture/14/3/51 deleted file mode 100644 index f398650e811ebd72130f45192b1a93a8f2a66c14..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!4ZH!3<7g@#_*s4O0Wbgv0^tqU*2m*cGjPSt%jL;D}Nhj`^ny5KIuk)UJX0{)?R&3R7T>)Y=6mo)-f&>7O Vq#zkm!;P`J-oKgqPDbM^)(1;n9lQVl diff --git a/fixture/14/3/54 b/fixture/14/3/54 deleted file mode 100644 index 6153f818edfe66b7195581a34c17c50877cda691..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-a(G9>r2tvJXM)+`nH$q3~B%QcWjU)o%b>4H&Oy|k4wzXPIERum0RvH{WI{#*) VjlWn^%n>P3ehKuJX0#fU2Omm59u)up diff --git a/fixture/14/3/55 b/fixture/14/3/55 deleted file mode 100644 index daed9a8f03b46741f4518355e36a64db34d28842..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmb7)K@9*P3u+a2T1C@XTl%SYS0$Z->a+~9hT~*BqAVdN(IxJHN$`#e!?g!9> JvvKD7nFrt)8Grx) diff --git a/fixture/14/3/6 b/fixture/14/3/6 deleted file mode 100644 index 96538fab8eaca8cc65113a1919bf5ee27e448c7b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAhK^4F-3_{yE<8i!1w0MVjm>hc0ufgS|_ diff --git a/fixture/14/3/7 b/fixture/14/3/7 deleted file mode 100644 index 38c9006e799f4a782ab0cc374156ccb7ad48da58..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR(GkEf2trLar2tvJXM)+U>M(7Bgq!ag96CwzBo%h@`vv`2w)|FJtN!mTxk{PIe6NTx< WB9F`3v;s}&Ke5(usSHWNZWX>~Ej7*62lv8@raoiglL~d?_Xx!&euU UleBQRns`Wp}?e188R+U;qFB diff --git a/fixture/14/4/1 b/fixture/14/4/1 deleted file mode 100644 index 8f13849c3ba5fc6e83790dbcd107339ac94d94a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!4beP48oFb#_?bQM#u==qzhNh!v|Q#&;IsVYt6h&VpVFL7LkOL;@Vc=AmKcK VBZ57Ws7Xx&L}iwQ`)0t+PJU2g9oYZ? diff --git a/fixture/14/4/10 b/fixture/14/4/10 deleted file mode 100644 index 0835bd7e6add5e5442d8aed6465fa6688570fd46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJXM)+U>Mz|5WNf$0?Vib7#obTM3X}@U_kSvHWtE?2O0Sd8AC70<>AQ9g+Y5 diff --git a/fixture/14/4/11 b/fixture/14/4/11 deleted file mode 100644 index 8f764b1cf4e5080c870aa889a8ef91152079b8c2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!41GL3<8sGhIp_5BV>eb(uI4DWQ@R=XMg*wwFWOxEU5|`cH=0m1rATxrv-Qu~n&sK{#xeg(x)W WW?PEE5S{zHfKn29yzq$nANT-PGacFh diff --git a/fixture/14/4/13 b/fixture/14/4/13 deleted file mode 100644 index 682dc760601f9d7ef5e7d37e26bf44542b58b9eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW+uQ5C>I2+}p<@j(G1T!d?~!h@;!RKRE7Ju_3>K(t{M0zuhAHYOV&4TKYs7NS~K UY;%8Zlneu_kY7G-=L@pGklCcUqu4Z;H UMT9~BHk!EsoFLJ_@L|p84@NT`ng9R* diff --git a/fixture/14/4/16 b/fixture/14/4/16 deleted file mode 100644 index 14af294782fe5bf513bca1ced7018f156cb5672b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z%Mrjp2twU-=5Vk9O4t(8NgB?>1IY04Ip4We6}({FtAPPsxOiRQV<%d8)Bk}p>;9qRx9 diff --git a/fixture/14/4/18 b/fixture/14/4/18 deleted file mode 100644 index ade88140e1d68d53f03339d435490ef1bdb4d344..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8sGhIp_5BXopr(uI3YWr3wL_}cHj*IJ_oB{ieq#-y~W15s;5u$JW} VhvO3<51$~S`a9@6GM9-^{s3Fi9qa%A diff --git a/fixture/14/4/19 b/fixture/14/4/19 deleted file mode 100644 index 049de27a0eea3b43a65e3eb600f229c6ba79ad6c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-YQ5C>Y2ts{p#^HkkL|BA1S>au$;z5w>yyu>o#RD2jOjA{9>?M}h(zcMeZKtCm;q7PXf$?9nv+xQ{krvVnvvmkK# TaCUBbHaYx8NntFwFVN}&Viq0g diff --git a/fixture/14/4/20 b/fixture/14/4/20 deleted file mode 100644 index a627dc87d3dfb0008ab9bb70f48b58e03381c198..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR(G9>r2tvJXM)+U>Mz|5WNf+)jf)WEf=R0?1mM?4qtjG)}KusIFMcLl3@uAxN VYAF+7VGb?$Ty2UvB4|p9`Ugxs9jO2S diff --git a/fixture/14/4/21 b/fixture/14/4/21 deleted file mode 100644 index 9b68629279effb7fd81e3f34ee465a8a31ea997a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8~QhIp_5BV>eb(uD)5EMyBGJnLI~W~w(LY<2_~T>=5tg^2o6PUUIw WCxeC|6`OLf!~&$u*J`SLDt`b(vK^EF diff --git a/fixture/14/4/22 b/fixture/14/4/22 deleted file mode 100644 index ea0dd04320c3c6535e64045b1ec026cc0b996e59..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJXM)+U>N4OEXNf+)@iAn%{&UbEAO~16d6S>_zRAI?xAjPpW3<#jC U)#NrC!6*w`ndPAHG8}1de`T2-ApigX diff --git a/fixture/14/4/23 b/fixture/14/4/23 deleted file mode 100644 index 3deab4168b060c111b683797f510bef9c47c47af..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR!41GL3<8~QhIp`mN5}}>qzm^@)8B~cEgkG#0Yr3Zd(Y6 VfQ2dYfwtW&gdUg;pn{L)q<>2v9cusp diff --git a/fixture/14/4/24 b/fixture/14/4/24 deleted file mode 100644 index c49367620957bc7028ca72a3022efe05546b09cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp%MHLV2t!FXvwUCyM(7CLBn#&`D#8-};AidiR#or;$vth2)uT*%xS-*_4r07t VBViGO6XHDroE|XFUK8J@?f@`+9a{hZ diff --git a/fixture/14/4/25 b/fixture/14/4/25 deleted file mode 100644 index 83eb474aa4a297e67949d4e15bd26b47677a0d8f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAg+YP`l3<8sGhWKCsM#u==qzm`l%0fQzxc0NpT5EJ8ITn}l(?U^%HP04URdM@4 V|I(E0aVlcdHjE}93Jcb4^a4AQ9k&1g diff --git a/fixture/14/4/26 b/fixture/14/4/26 deleted file mode 100644 index 986a3d6f2c4036851d7fe48e87e26bdc07f7bef6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW+uK@9*g4C>~L@W22PCgGf%;lY|vAcgMruQM~n7m$D;8yV7!2Bok~<~I`6q>rh1SfuXmG`V$}fRjZzH5yCqUt V>VUh9xFxv$BF^j#jiBLs^Z{9k9xwm^ diff --git a/fixture/14/4/28 b/fixture/14/4/28 deleted file mode 100644 index 0e82f2938ea8c78a589016c906f312b31e29c829..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZQ4K&b2m{%iHT|doBuv6{@(k}-C@DD#d;RNF)$~Vq%N6CWr7;uQpd2y-|&>jFla9h3k7 diff --git a/fixture/14/4/29 b/fixture/14/4/29 deleted file mode 100644 index bfbdada29b6a006dd3b1a037a6bac932b7d05ea5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-XK@Gq#33$WffTXMg*wwI(l#tHgF&)tqH3Wk51a0%C|a UMn;VWV!=Q-wEGSElxxNCA7&UHO#lD@ diff --git a/fixture/14/4/3 b/fixture/14/4/3 deleted file mode 100644 index 8d79abcde830363510ec9184d71a647ef0829e67..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh(GkEf2tt!?#__=djF1t!Nf++3RXI5jul?+^)*9SEc4D_OU0X_K)eTD8@u|*X V)N<%_9N3qI!g0|fy5ZwL*2m*cGjPSt%jL;FfNf+)}6M~-Tb>4H&%<4gQr9@F{6^HEX7PsIjY|Fb8 V7FLAlTRAoYkRS~I&o;(y^#f!~9#Q}R diff --git a/fixture/14/4/33 b/fixture/14/4/33 deleted file mode 100644 index 527e5c76952634d245e7ec934a2d8865b3c760e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwTMfW43>}Tl$;Xh7pVp Up(r?#VJZBQIZWu2<%SWUeseA!OaK4? diff --git a/fixture/14/4/34 b/fixture/14/4/34 deleted file mode 100644 index 91351c2b3cce7268e68f171ec42d1d96637945c6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp(G9>*2m*cGjQDX2FhWP@CSACYGzK+#aGm$uGgCd$8YEdPMD|+7xzQpk@&-YH VJNpg0b@WVCb4?DFu)jND`vFLW9jgEU diff --git a/fixture/14/4/35 b/fixture/14/4/35 deleted file mode 100644 index 1d47283879b09e5b5307f4333ed8d72bd3e4f12b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-YQ5C>23r2tvJXM)+U>M(7CLqzm_14G{so&U@~eSv-!MQtRz#bFDQ W9m)uX6IoVOKuf6ss9a;%`E^s7!`=J VnXKB#!BE>Dq(yE82%tz{Q9niT9gzS4 diff --git a/fixture/14/4/38 b/fixture/14/4/38 deleted file mode 100644 index 4a193d40e6ddf90b21b43afdcaae26ae64d214f2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR+Y!J(2m-xy#_&M{lyD`alQf*w$B69cbG~zDX8WZTxvkY~SWa3ASNMwpwb>c9 VVcbOUkP3{Q>`H#{3W!QX`3G5{9oPT> diff --git a/fixture/14/4/39 b/fixture/14/4/39 deleted file mode 100644 index 52447d6d7a1bfbd5738133f1aaa88b83f647bf1b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z+YNwF3_^R|tl@(N7{L+T#DzWBgoN_C&U@~eDITDelB}1@#wxdL$rT9LD@*+uh`WT<9xfA%+8;eo9fSY? diff --git a/fixture/14/4/4 b/fixture/14/4/4 deleted file mode 100644 index f0749f4b199bbb2d1ed5d0df37651179eb8a363a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZTMfW43b%7 diff --git a/fixture/14/4/42 b/fixture/14/4/42 deleted file mode 100644 index a7152e76c78e28f2da944ef556038b15788872a3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y(G9>r2tvJXM)+U>Mz|5WNf$0?qUg)>Ip4W6(|*%30h^-4W{Lq2$}gZNq*(Np!8Ei>S*BUV Vi0lRYNpP_7N^!aZy!(d(Bz-tM9gzS4 diff --git a/fixture/14/4/45 b/fixture/14/4/45 deleted file mode 100644 index 895b07c9f4776df165aa89639e2d0fd8fd63ebdb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh(G9>*2m*cGjPSt%jL;FfNf+*!#t`&Suk)UJW>yc9>IBvJ8dDOrT5rvUL`K4P UWQ8ICKx9z&5c?^;8KC<}A5)SY;Q#;t diff --git a/fixture/14/4/46 b/fixture/14/4/46 deleted file mode 100644 index 5084bbe4896cf3ff2711f2b8f07fcef73600fdc6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8sGhIp_5BV>eb(uI3&EqH*=T>IH)tu?-}QoBwRt5rlJ3QBuI5lDg@ VLBYx$;U5hwz&F4zqrfx9^#xP#9nJs% diff --git a/fixture/14/4/47 b/fixture/14/4/47 deleted file mode 100644 index 13bcd02dd8538595813bdc151edc940401799e94..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y!41GL3<8sGhIp_5BV>eb(uI4DY$Pz}XMg*wwMH*e(<+H8%V8w9L}Z1eYuj8H VmV@AMDLcF%;pd+(H@-cE`vX_%9s>XX diff --git a/fixture/14/4/48 b/fixture/14/4/48 deleted file mode 100644 index ca5aa0208e357c346067aa7e2906d25666ea7853..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!41GL3<8~QhIp_5BV>eb(uI48C6Ey(p7pIgGe<9qZM7DrC>3WD+}lNs98!zm VD4h$E0>yvYz!SxulCX2V{sC5T9rXYJ diff --git a/fixture/14/4/49 b/fixture/14/4/49 deleted file mode 100644 index a312eb65ff73ddfaba5c59eb7168fb2029082f7b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp!41GL2m_ODX8GX-JVHk3CSABkRUu2j9bEg_XRS54Nn|OO9k)s8Qeb(uI3g9)K{0XMg*wwFWP64H&O!0v1t{g=i!_pmu5<@qubz&qk V^`N$8seT7p_{tyVrL1{Vs}Et_9p?Z5 diff --git a/fixture/14/4/51 b/fixture/14/4/51 deleted file mode 100644 index 548812ef59dbbfad48c686eaa37a8ff3989e260f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!4ZH!3<7g@#_*s4O0Wbwv0*npUxN diff --git a/fixture/14/4/52 b/fixture/14/4/52 deleted file mode 100644 index c51ffacd73874d9f3f52c092f51fae097a5dffc9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp(FwpX5JE5AO#E;GN5}}>qzhL=1UYz5?pfd3Gt<7QZP79f0}^0ErLdC18dlOd SRX5_1%pN?8XhvTk_5*)kj~> diff --git a/fixture/14/4/53 b/fixture/14/4/53 deleted file mode 100644 index af4809930b738bb4bb5e9caf8164d9bea815e024..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh(G9>*2m*cGjPSt%jL;FfNf+*!ny5KIuk)UJX0{)?R&3R7T>)Y=6mo)-f&>7O Vq#zkm!;P`J-oKgqPDbM^)(1<^9lZbm diff --git a/fixture/14/4/54 b/fixture/14/4/54 deleted file mode 100644 index 5472ee06284fb18916789fa7703072c776caf492..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-a(G9>r2tvJXM)+`nH$q3~CSABtjU)o%b>4H&Oy|k4wzXPIERum0RvH{WI{#*) VjlWn^%n>P3ehKuJX0#fU2OmnY9u@!q diff --git a/fixture/14/4/55 b/fixture/14/4/55 deleted file mode 100644 index b3b069a87cfe3c0faaa199e4346663861c65f5a7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmb7)K@9*P3u+a2T1C@XToM4zv0$Z->a+~9hT~*BqAVdN(IxJHN$`#e!?g!9> JvvKD7nFrvC8G!%* diff --git a/fixture/14/4/6 b/fixture/14/4/6 deleted file mode 100644 index 6a55b362138f60ebd657bff6925b47a7c75af6cc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAhK^4F-3_{yE<8i!1w0MVjm>hc0u^Bx8O diff --git a/fixture/14/4/7 b/fixture/14/4/7 deleted file mode 100644 index 4b7e2e59ecfeb3a6fc649e4d751bfebf0e14d73d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR(GkEf2trLaqzm_v)5E~i=X~eRO#7v?$=Y&U5&vB=1&7hZ!EHi- V;tEILP=#SB7ExZP?)DsL^aE0W9j*WX diff --git a/fixture/14/4/8 b/fixture/14/4/8 deleted file mode 100644 index f71014d72174a6ad885d937e3f4874899486a8c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJXM)+U>M(7CLqzm_16CwzBo%h@`vv`2w)|FJtN!mTxk{PIe6NTx< WB9F`3v;s}&Keb(uI4DWNZWX>~Ej7*62lv8@raoiglL~d?_Xx!&euU UleBQRns`Wp}?e18Kb;V*mgE diff --git a/fixture/14/5/1 b/fixture/14/5/1 deleted file mode 100644 index 5a0b69c4f43980181ff79c5215c76c2756c760f7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!4beP48oFb#_?bQM#u=ArSn$K!v|Q#&;IsVYt6h&VpVFL7LkOL;@Vc=AmKcK VBZ57Ws7Xx&L}iwQ`)0t+PJU6i9ozr_ diff --git a/fixture/14/5/10 b/fixture/14/5/10 deleted file mode 100644 index 42bf618586bca0b199d9bdde2ae68f22d3a7b0e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJXM)+U>Mz|3=OXn?UVib7#obTM3X}@U_kSvHWtE?2O0Sd8AC70<>ES9hCq8 diff --git a/fixture/14/5/11 b/fixture/14/5/11 deleted file mode 100644 index fac4573540420888f40a3b7362eaf1b492bc1f0d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!41GL3<8sGhIp_5BV>fm(s_H1WQ@R=XMg*wwFWOxEU5|`cH=0m1rATxrv-Qu~n&sK{#xeg(x)W WW?PEE5S{zHfKn29yzq$nANT-Qi5=Yl diff --git a/fixture/14/5/13 b/fixture/14/5/13 deleted file mode 100644 index e31ca24d8394a22ded725fe6d42f715a822063e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW+uQ5C>I2+}p<@j(G1T!hQAyo0IuRKRE7Ju_3>K(t{M0zuhAHYOV&4TKYs7NS~K UY;%8Zlneu_kY7G-=L@pGklCcUqu4Z;H UMT9~BHk!EsoFLJ_@L|p84@Zd|od5s; diff --git a/fixture/14/5/16 b/fixture/14/5/16 deleted file mode 100644 index 999064a700cc63ca6e19fe57dfaf64e52cf9851b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z%Mrjp2twU-=5Vk9O4t%oOX|(T1IY04Ip4We6}({FtAPPsxOiRQV<%d8)Bk}p_=9qs@C diff --git a/fixture/14/5/18 b/fixture/14/5/18 deleted file mode 100644 index a6f7b49e81ca757fc405d0ffdcb479ce62be12b5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8sGhIp_5BXoq$(s_GMWr3wL_}cHj*IJ_oB{ieq#-y~W15s;5u$JW} VhvO3<51$~S`a9@6GM9-^{s3Jk9q#}D diff --git a/fixture/14/5/19 b/fixture/14/5/19 deleted file mode 100644 index c65939545bcd6c79307b08265268cdd0e1490bee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-YQ5C>Y2ts{p#^HkkL|BAnS>9cz;z5w>yyu>o#RD2jOjA{9>?M}h(zcMeZKtCm;q7PXf$?9nv+xQ{krvVnvvmkK# TaCUBbHaYx8NntFwFVN}&V{9Gl diff --git a/fixture/14/5/20 b/fixture/14/5/20 deleted file mode 100644 index d13c975764c0e9faf468c7fc9c55fe97dca2cf01..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR(G9>r2tvJXM)+U>Mz|3=OXuw~f)WEf=R0?1mM?4qtjG)}KusIFMcLl3@uAxN VYAF+7VGb?$Ty2UvB4|p9`Ug#u9jpKV diff --git a/fixture/14/5/21 b/fixture/14/5/21 deleted file mode 100644 index 5279d21b324c6ad41fb018b59b55ac567073e63a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8~QhIp_5BV>fm(s=`^EMyBGJnLI~W~w(LY<2_~T>=5tg^2o6PUUIw WCxeC|6`OLf!~&$u*J`SLDt`b*2pyLI diff --git a/fixture/14/5/22 b/fixture/14/5/22 deleted file mode 100644 index 79addd85bc03ef276975a6a0a65dd90ec24c37ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJXM)+U>N4OC>OXuxViAn%{&UbEAO~16d6S>_zRAI?xAjPpW3<#jC U)#NrC!6*w`ndPAHG8}1de`fC)8B~cEgkG#0Yr3Zd(Y6 VfQ2dYfwtW&gdUg;pn{L)q<>6x9c};s diff --git a/fixture/14/5/24 b/fixture/14/5/24 deleted file mode 100644 index b3725a35db05db6b8e3c6cec2f056002e2e39cfa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp%MHLV2t!FXvwUCyM(7BgCG+MvD#8-};AidiR#or;$vth2)uT*%xS-*_4r07t VBViGO6XHDroE|XFUK8J@?f@~;9bNzc diff --git a/fixture/14/5/25 b/fixture/14/5/25 deleted file mode 100644 index ee33d336c984e40f80be06b9d2a4c4f519760b52..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAg+YP`l3<8sGhWKCsM#u=ArStaO%0fQzxc0NpT5EJ8ITn}l(?U^%HP04URdM@4 V|I(E0aVlcdHjE}93Jcb4^a4ES9l8Jj diff --git a/fixture/14/5/26 b/fixture/14/5/26 deleted file mode 100644 index 05a9f506ebf4714ccacc5cefee2ee7fedeec56a4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW+uK@9*g4C>~L@W22PCgHT4-octsAcgMruQM~n7m$D;8yV7!2Bok~<~I`6q>rh1SfuXmG`V$}fRjZzH5yCqUt V>VUh9xFxv$BF^j#jiBLs^Z{Dm9y0&{ diff --git a/fixture/14/5/28 b/fixture/14/5/28 deleted file mode 100644 index e45e8a1ede4ebb3b651d46e8313bea024979b1fc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZQ4K&b2m{%iHT|doBuv87^7QUlC@DD#d;RNF)$~Vq%N6CWr7;uQpd2y-|&>jFpc9hU$A diff --git a/fixture/14/5/29 b/fixture/14/5/29 deleted file mode 100644 index 2b677a7ad01178a7e58ac0d1b15a7bdbb46b0b46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-XK@Gq#3*2m*cGjPSt%jL;D}OXuxb6M~-Tb>4H&%<4gQr9@F{6^HEX7PsIjY|Fb8 V7FLAlTRAoYkRS~I&o;(y^#f(19#sGU diff --git a/fixture/14/5/33 b/fixture/14/5/33 deleted file mode 100644 index 80a2377664e63154165b18a464dec4472e1d41a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwTMfW43>}Tl$;Xh7pVp Up(r?#VJZBQIZWu2<%SWUesqK$PXGV_ diff --git a/fixture/14/5/34 b/fixture/14/5/34 deleted file mode 100644 index b0a01857dbf1871f8c1fd458bce967c59adf9039..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp(G9>*2m*cGjQDX2FhWP@ES23r2tvJXM)+U>M(7BgrStY#4G{so&U@~eSv-!MQtRz#bFDQ W9m)u_Lk3;&I`6q>X6IoVOKuf6ss9a;%`E^s7!`=J VnXKB#!BE>Dq(yE82%tz{Q9nmV9h3k7 diff --git a/fixture/14/5/38 b/fixture/14/5/38 deleted file mode 100644 index 31ee6464a8592c42278a75e9c999409957e0e014..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR+Y!J(2m-xy#_&M{lyD`ameiZo$B69cbG~zDX8WZTxvkY~SWa3ASNMwpwb>c9 VVcbOUkP3{Q>`H#{3W!QX`3G9}9oql^ diff --git a/fixture/14/5/39 b/fixture/14/5/39 deleted file mode 100644 index f2afe009b802ccf784f0dfcefff1809eb35e14f1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z+YNwF3_^R|tl@(N7{L*o#d$s0goN_C&U@~eDITDelB}1@#wxdL$rT9LD@*+uh`WT<9xfA%+8;iq9ftq_ diff --git a/fixture/14/5/4 b/fixture/14/5/4 deleted file mode 100644 index ca1c7cce679d1e8a607d57213b84332242329541..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZTMfW43r2tvJXM)+U>Mz|3=OXn?UqUg)>Ip4W6(|*%30h^-4W{Lq2$}gZNq*(Np!8Ei>S*BUV Vi0lRYNpP_7N^!aZy!(d(Bz-xO9h3k7 diff --git a/fixture/14/5/45 b/fixture/14/5/45 deleted file mode 100644 index 36877a938ea4e6d8893850050a112f9f49dc7b41..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh(G9>*2m*cGjPSt%jL;D}OXuyG#t`&Suk)UJW>yc9>IBvJ8dDOrT5rvUL`K4P UWQ8ICKx9z&5c?^;8KC<}A5`cafm(s_GsEqH*=T>IH)tu?-}QoBwRt5rlJ3QBuI5lDg@ VLBYx$;U5hwz&F4zqrfx9^#xT%9nk;) diff --git a/fixture/14/5/47 b/fixture/14/5/47 deleted file mode 100644 index bbe53d905ccf33f854724e8cb39abe09cb0188b1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y!41GL3<8sGhIp_5BV>fm(s_H1Y$Pz}XMg*wwMH*e(<+H8%V8w9L}Z1eYuj8H VmV@AMDLcF%;pd+(H@-cE`vX}(9tHpa diff --git a/fixture/14/5/48 b/fixture/14/5/48 deleted file mode 100644 index cba05e40ac8cc7676b946902e10c6879cd82b3e5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!41GL3<8~QhIp_5BV>fm(s_G{C6Ey(p7pIgGe<9qZM7DrC>3WD+}lNs98!zm VD4h$E0>yvYz!SxulCX2V{sC9V9ryqM diff --git a/fixture/14/5/49 b/fixture/14/5/49 deleted file mode 100644 index 86cd745eed772d4343d63c752314a46d4989c739..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp!41GL2m_ODX8GX-JVHk3ESfm(s_GU9)K{0XMg*wwFWP64H&O!0v1t{g=i!_pmu5<@qubz&qk V^`N$8seT7p_{tyVrL1{Vs}Ex{9qIr8 diff --git a/fixture/14/5/51 b/fixture/14/5/51 deleted file mode 100644 index f9c0123138894d6197dfb51b43caad8da4dbc72c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!4ZH!3<7g@#_*s4O0WcLv0gVmU*2m*cGjPSt%jL;D}OXuyGny5KIuk)UJX0{)?R&3R7T>)Y=6mo)-f&>7O Vq#zkm!;P`J-oKgqPDbM^)(1@`9l!tp diff --git a/fixture/14/5/54 b/fixture/14/5/54 deleted file mode 100644 index cb6433583a6e6d12101d19508dcdb5cec76939dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-a(G9>r2tvJXM)+`nH$q3~ES4H&Oy|k4wzXPIERum0RvH{WI{#*) VjlWn^%n>P3ehKuJX0#fU2Omra9vJ`t diff --git a/fixture/14/5/55 b/fixture/14/5/55 deleted file mode 100644 index 07a77996cbde9f2e06820e50c2e1ad7e980a598e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmb7)K@9*P3u+a2T1C@XTw4j$x0$Z->a+~9hT~*BqAVdN(IxJHN$`#e!?g!9> JvvKD7nFrzE8H4}; diff --git a/fixture/14/5/6 b/fixture/14/5/6 deleted file mode 100644 index dc2d8f73c7e535964eae339bcf37e2ef39293956..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAhK^4F-3_{yE<8i!1w0MVjm>hc0wNgfFR diff --git a/fixture/14/5/7 b/fixture/14/5/7 deleted file mode 100644 index c61a53f5056b334b0ae3480b5d963bf533ee68a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR(GkEf2trLar2tvJXM)+U>M(7BgrStY#6CwzBo%h@`vv`2w)|FJtN!mTxk{PIe6NTx< WB9F`3v;s}&Kfm(s_H1WNZWX>~Ej7*62lv8@raoiglL~d?_Xx!&euU UleBQRns`WARyo-j=G}rx-NteKRq=a{{R30 diff --git a/fixture/14/6/1 b/fixture/14/6/1 deleted file mode 100644 index b3cd1a8c84f2c18ffbd717572a7126909a13e2f0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!41GL3<8sm)C~|17GQ*o(EZhO9v0v;e)hZ1T5ING601_{w1^~}6xX%_2MOl^ W91-l1L``ZMAS$yY+&2SmcJc!yFdcXR diff --git a/fixture/14/6/10 b/fixture/14/6/10 deleted file mode 100644 index 52cd2292201f0ff0084101a4bfb731406cff4ce7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJ%)C~w9EWijiLibNW6QjV>=e+06O#4fdfMh|0S!JbI4N!<}D!ELT VQA}kG-MPXI!@=S5g?pB(Wq%G`9Wej^ diff --git a/fixture/14/6/11 b/fixture/14/6/11 deleted file mode 100644 index b0c792f28d59d50d4a6b2a67e882b92556bc688a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!41GL3<8sm)D2ENSbz~SLibm3Bx3}|Jp0{etu^=xEFieMtXRegC{@*#h&z!> Wx_&?ERMON$rUmTzzYX`^v*aHlRULf* diff --git a/fixture/14/6/12 b/fixture/14/6/12 deleted file mode 100644 index fd7f4377900c16680bd12ebc76cd200963815169..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8~w)C~|17GQ*q(EZg*^RR^v@U`ys%*>vbk(#8P<7KrxZ(0F$@304@T1-DF6Tf diff --git a/fixture/14/6/14 b/fixture/14/6/14 deleted file mode 100644 index 8e9f23f6b71fb3eb7cee40fa78091265f83cf6da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlQTMfWK48wYZGzL*VLLd+h!uV^h`j+eGyywo$)<@Zq>QqYUHp!AWQ5LC%@&amI T0U6M}Kp0uJo`SY47({LVEI1vN diff --git a/fixture/14/6/15 b/fixture/14/6/15 deleted file mode 100644 index defe3004dc64af796b6cccc2ac46101150af890e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh+YP`_2tvIbsT&wRSbz~aLibM(DG?u_Ugti~%#<%wR%@70BXXfQm5f!`aW%7p UDIyH|x6#ZE-~@>Vh7W5#e;WZEIRF3v diff --git a/fixture/14/6/16 b/fixture/14/6/16 deleted file mode 100644 index a7ae9e413b316bef693ef68712c839d97713da65..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z%Mrjp2twVIlnxvYHb4nmLi(RPJb(-zpYxtuRlx_wJsxRntE0P(3CS%(7M{qL VQzB<3L@wD{lk(bB+3?HC{s%dY9nb&( diff --git a/fixture/14/6/17 b/fixture/14/6/17 deleted file mode 100644 index 6ad8b6b5816a55f6d96894d2e02b50a7561c9a12..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZK@9*g5CXTAbPfm)3?QKr&VLGhkN`XEUf()1Q~i-7kb>w~!geiWgGi(SCLrZ= WF2j(CZ8DMy<&(R6TxR{4lY9Xy{T+$` diff --git a/fixture/14/6/18 b/fixture/14/6/18 deleted file mode 100644 index cbf16842c18e5f0d96e9d4871807da324a15645c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8sm)C~|17GQ*q(EZhODhn)~!Pma`v(_5DD5)6*HzuW39f(>hg0(C+ VIUJt=dH4ho)!#wqk-1ES@&_=V9g6?} diff --git a/fixture/14/6/19 b/fixture/14/6/19 deleted file mode 100644 index 24adae013268f0744fd393fb50c07efe5ec4ffae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y(G9>r2tvJ%)C~w9EWijIq5G%Lsu%>h&V8PlS-ha3#57fv#$IB1Eo}>l+m^~I VZi@tAku+a5*)}A9B8Ati_W>-(9kBoa diff --git a/fixture/14/6/2 b/fixture/14/6/2 deleted file mode 100644 index c25f2af6297586cae4da28f92139c841f47513ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y!3_W*5CT_Ax(+t`paDv-gzG;JYB0dyUf()Z75&KaNLEkV*v1bLKMk;Wo&|x^ ThqH6jv&rE%N(y7aeSuaNHsl?M diff --git a/fixture/14/6/20 b/fixture/14/6/20 deleted file mode 100644 index 7e2b58d646b4cdc5f4717a17455967b885fe3300..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR(G9>r2tvJ%)C~w9EWijiLibOf5tJCr2tvJ%)C~w9EZ_(?LibOfN>l>qbKY~SYWkzqoyhI(p$bbj11XN3VL$+7 VttPkG2u4}h$}9(km*Gfz`vW>>9l!tp diff --git a/fixture/14/6/23 b/fixture/14/6/23 deleted file mode 100644 index 30225a36a4c8e551fc6fc2c93f00b57a5a8c8df0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR!41GL3<8~w)C~|17Vroeq5G?cBFE?eKj%GnW;!2}UB~8^v>TpeCq}^Qb=yKn W1T0LE547!OA@sm(02O>RC;bB-?HvjL diff --git a/fixture/14/6/24 b/fixture/14/6/24 deleted file mode 100644 index c50bbef8354a4679d64f67f80ccd275833a2fc9a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp%MHLV2t!Fn>IRk%EWijIA^R)OQ4yB#2S4jwUsVMgNbYHCtR7|B!vziZbr9qI V8VQRKoDlC3;PilT_L}%Mbq57X9QptN diff --git a/fixture/14/6/25 b/fixture/14/6/25 deleted file mode 100644 index fa9c916cc672c8fbc08001f3c448df94ae811db3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAg+YP`l3<8sm)C~|HEWijEq5G@nRu=M!$F=W$)>@+n$+5VUpB9QDta-M;s*2kW V`j@6`k5dtwwqY~@QCP5UqZbfh9aaDU diff --git a/fixture/14/6/26 b/fixture/14/6/26 deleted file mode 100644 index 7cf2d272e304cd3faed2c3ca286ec1b29c890d8f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW+uK@9*g4C*H791tEDK*A)P{|wfI0x5K_Z=IPb{(uAo*~pM)G$@4?ia{hC)ktoz UDO$`J VQU}~+#4W+~7jb51Xao)4qYo}u9nSy& diff --git a/fixture/14/6/28 b/fixture/14/6/28 deleted file mode 100644 index 3240de0a40f511e6116ba5f0fc51f02b20624e74..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZQ4K&b2m{$9JqJxcY5)n7@cd`TLP^O{*y~%Ts-}OsTke>O#ptJ2B1IsqgWzMb V#m(D9nfO2v5U(HrM3}?LSr;2<9Wwv` diff --git a/fixture/14/6/29 b/fixture/14/6/29 deleted file mode 100644 index 08fc49384ef7e3b7c4af27f488b3aac734a3e0be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-X!41GL3<8sm)C~|17GQ*o(EZhOWMLWKJ^S5ftu^^bTqU;Ks^%;M1& diff --git a/fixture/14/6/3 b/fixture/14/6/3 deleted file mode 100644 index f7253fdd4dba30a53234f76e1820823d03fe5601..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh(G9>r2tw~VQa2!cumB_62;Dz@wknZ;cDp2GxPT*cgzitDM+xG?m|Xkb&swW`ku{9sK$+y%I5Mk=Ks#f& VPOFT!D^VkDDd)X V=MFpLo*pQj|4pfxW(o)_{Q(1&9RUCU diff --git a/fixture/14/6/32 b/fixture/14/6/32 deleted file mode 100644 index 8e05dcafca65d6cf59df8f1153fe183b1d02018a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>*2m*Z_sT&YJSbz~aLibP4nh^9vuXCSgW>zn3`^mc%wa;GEH{h*^#e!z9qRx9 diff --git a/fixture/14/6/34 b/fixture/14/6/34 deleted file mode 100644 index 3f4dfe7f75c1b79f889ccf06d4703cd7ef2abec2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYp(G9>*2m*Z_sT&YKZUIK<2;D!0q%o+`gX`SqnVIU1)*#7ZA+pyp&W#pXkv9km V+}Ur~t)pkEnrm{Xg#FzK+YcR69ZCQI diff --git a/fixture/14/6/35 b/fixture/14/6/35 deleted file mode 100644 index cbbdd35d38e790076fda2e53a6fa2b39a8fc3da1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y!4beP48oF*)D1WuEWijEq5EIuJZx+b&wlq=Yt4L&?W&bxb#9E2(Kb#()a$6? VbLvac2?z`vkqWB2`6&`ow0{=b9bNzc diff --git a/fixture/14/6/36 b/fixture/14/6/36 deleted file mode 100644 index 8ab344adf17d08d131ab0a82501a7e2f6e9ba6ed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJ%)C~w9EWijIq5G%LYKREvb?)=b%;E)BV_McSrAga=DrzIyEe>0d W>`+E<#StEHQtNCA3C^2+iP;YyRvl{q diff --git a/fixture/14/6/37 b/fixture/14/6/37 deleted file mode 100644 index 202b10a4ddfbb8bb6598d7c94c53bdda6aeedea8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z!4bef2m-y7lnxjkG(ZU{A^lI*JY>)XuXCSgW_DhtvE){9lltEf+1vuKgHeGf Wo5`w;91OMnL0aTifB=dF7WD%fza20D diff --git a/fixture/14/6/38 b/fixture/14/6/38 deleted file mode 100644 index e33e58bc691bde81cc1524d1caf87fd994fa14dc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmWlR$qm3j2m{%alnyLE+<;5i64Jjia0ufOJm)=kX0|_Ck=t6$hUKJ{aD~4pP@A1m V8^%op52?V|$*$xFuYjmTlz%R79d`f# diff --git a/fixture/14/6/39 b/fixture/14/6/39 deleted file mode 100644 index ba3791621bcc8cb6d75b8a29c8f48144172de6b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z+YNwF3_^Py$qgDlSbz~6!TsaGCM1;Cb?)=bOz{G(lw`eJHdeW1ORhk`UST5h W{hSoFmZ-|OMcgH<_HdbC)cybx9vve9 diff --git a/fixture/14/6/4 b/fixture/14/6/4 deleted file mode 100644 index 34627de72eda74711c29189c9b63f5d2d9963b81..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-ZTMfW43INJiEWijEq5EH7qT(TVJnLP1W)40OM^!}iPHLq(>r%ros(Zrm V8S#ONj!%Y73X8Uk9{ji=<_8o_9Z&!O diff --git a/fixture/14/6/42 b/fixture/14/6/42 deleted file mode 100644 index 17c4b0c5624b43e017c0581e824541cce3946b90..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y(G9>r2tvJ%)C~w9EWijiLibNW6GdO1&w0G(Vg9pY35A3^xXyi^nc2L=jvS|IBE_P&45p!7$}-I= WMr1GGPlAJuSBldW;N3qQAn5}Q3LP*2 diff --git a/fixture/14/6/45 b/fixture/14/6/45 deleted file mode 100644 index 7fe7f3d68217d03b80abc615d665b11f7ab5b000..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh(G9>*2m*Z_sT&YJSbz~aLibP4G=`vudY$_`GqZY;R41s$*O-#1)p~0-Br+1V VBP$dE03w6BhuBZ)%>dm;`T!_k9e@A; diff --git a/fixture/14/6/46 b/fixture/14/6/46 deleted file mode 100644 index baafdfab074ac4d60cbb2c8914af64a2e22a5650..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!41GL3<8sm)C~|17GQ*o(EZhOYrz9_=Gyl@YpwBvmD+WpSgj%&QBc|wia-+N V2nts22>)nc0loo#83mp(t}iIQ9c=&r diff --git a/fixture/14/6/47 b/fixture/14/6/47 deleted file mode 100644 index 4647adcfac726f1bdfb908c0d473c85cbd79bf3b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Y!41GL3<8sm)C~|17GQ*o(EZhOWFvt&Kl|Nhtu^|PnpR0%Sq>w)B_b;%UEAiu Vup9)3OWENC2|xdQx$*5O+#f8s9ijjL diff --git a/fixture/14/6/48 b/fixture/14/6/48 deleted file mode 100644 index f37cdc2ad5db384f8c2de6ee3eca3c1d3358fbcf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(FwpX5JE2I>VloA2RVC1ONa4 diff --git a/fixture/14/6/50 b/fixture/14/6/50 deleted file mode 100644 index a6e6e520c3d4997db0af6c6fd1c1f2ad986c0987..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z+YP`_2tvIbsT&YJT!0ZeLibM(H9_SOu5+JfW{MYNcjYMJ7?$oJlo+~MtrH`W VsRy+!OZ7X*!dLzxG+iBJ diff --git a/fixture/14/6/53 b/fixture/14/6/53 deleted file mode 100644 index 759db202b46d3dd832c1dc27a8a2073e4925413c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh(G9>*2m*Z_sT&YJSbz~aLibP4)I`kzdY$_`Gqe5DwPLGw>k1H~p^y`t6eIwM VBn8Qk8g7i$_5RJ=cQP7Zu|6Q39b5na diff --git a/fixture/14/6/54 b/fixture/14/6/54 deleted file mode 100644 index 01a0bee767b314b846356f1125ddf3411d3905a8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-a(G9>r2tvJ%)C~w9F7QU^2;D!0Y9tX5uXCSgW;$<%wXM}!Vv!82u+rf0(fKzU WZT!WWVvb0O@=Kt%G^5p+Joo?~e;u^| diff --git a/fixture/14/6/55 b/fixture/14/6/55 deleted file mode 100644 index d719a8ea5e31b45136dc5acb54d421c62c4974bb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmb7)K@9*P32o9P)YM>I3fd9}*V9PaKZgcE$s;apGgh)U}hh+*uxuUw;{Q#P9 JHqKl>^8mG286W@v diff --git a/fixture/14/6/6 b/fixture/14/6/6 deleted file mode 100644 index a308d6725787630baea404a52286044626a18925..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXAh!4ZH!3g$qpDEG(ZVgg8h#heI!62T>IWIRk{UVsrYLibnC5y-?oc+Pw7%(Oo`o2)Iz74hE{Q*anf9NZ=Z VD6Vh>4pkVIViDzq>Tb`0Mn5M`9ZdiL diff --git a/fixture/14/6/8 b/fixture/14/6/8 deleted file mode 100644 index ba025b7ea7e6c8d207af62a2da47739ac8bf1caa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmW-Z(G9>r2tvJ%)C~w9EWijIq5G%Lnh-(2>)hv=nZ*kfx2~jGPSWnlmdrr)nxBx4)6XTSTbwW<%<6VGHMu{^pw~XFp&)*Jx9ETj4NI+X$e3(4-aN*5o~K@8sq;F|_J!yAcJ+5W zmH$y^t@C-{{lIo#=SL^sebCtl-}N)^JTv*ePv+5K&ztf4|9)lmk>`HS!%rXUp2wf^ z?KeB$Y`^!}|F53*aUbNH`G0iSd1m`z@;kqEv+u_``%+))&NJhuzjZTy_QPpD^-umT d4?XQiznkCnbw2DqX8I&Qdb)3P=Hb+r{cpAPiBkXo diff --git a/fixture/15/0/1 b/fixture/15/0/1 deleted file mode 100644 index 2911e4b5f19b9d20365ee5669e6f64580976ca29..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbV}%L>9k5Crq9=HOKn1o7*gK z#n$hmIsCtecuw<6`qU49_RSwYeG*>vbKjP4zOnVGAN?EO{`PD7tNX@xe(JaS|D9*- zyo6I8eDOuy8JeaiQHIv+j$<{SUZr(g4PzlQ09|0?Et Q?T? diff --git a/fixture/15/0/10 b/fixture/15/0/10 deleted file mode 100644 index b55a0ad2e70213bd29181a9b972c5d7c155eb6ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu6K?=h#3+xUxKi1p- zU%yIU^>-dW-VgS^u=m+z{OM=?>U^2r{=0szH`^cPePHIZzGC$Fx!&((uJm{P*vI~G VmUllhdiI6!tM;SbdbsN6{1^5~iw*z) diff --git a/fixture/15/0/11 b/fixture/15/0/11 deleted file mode 100644 index df78560b9de106594946a324cd5d75b4daa0a101..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb7=K?;RX3U|r-+8e6cg+7yeeiep?Kjhpd*&|R`jk&! z^l;MCC*8+S9(tI4%1?Xi&8ZLftv93m2 diff --git a/fixture/15/0/12 b/fixture/15/0/12 deleted file mode 100644 index e7fc6a269f3a56be502761a9a14a25748d5ffec9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbV~!3x7L3`6s)>tWZ?!eH#{=axPOj~O*9n4rj#<1$1~r&_c3NZ`VKSS zeDeF3XP);lPX5k2tH0V$Ue7aj-sGQn{JZ(qyFcZb)hB=Rv)Fmj`~T%VQor{z#vi}L VyB|!xr0@8ohxfb=vrqRv@E^qYi7fyC diff --git a/fixture/15/0/13 b/fixture/15/0/13 deleted file mode 100644 index 53e4b20e313b0e817eeb31e7e7ecbf0a22c2ba76..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb7>K?=h#3B+Od+4E-dI=`g%y!GA(J@tXym{chiWX&?9Ra>`47o=^9;-t765Z@n4+?*G!~ a|CpVh{5=no5AWu6di%lNH_iJ#p8o;gs)&jJ diff --git a/fixture/15/0/14 b/fixture/15/0/14 deleted file mode 100644 index 4b8efd17d77c9abce2fb37517a3e2cf931ac5095..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbV}(F%h=5CiX5+Xr8Zg@W|!yMhBUWr?S-$dJ7xr|THwbIexnXU(JLU9&%H%~@ZH zU&qwzPfzFVXXgBOH?Ifo`Xt?b6L)>c_j&T^Yu%joop0{^=!ZYt?bFx(iS6fo>t^?Z z>66%b>~mjp%AeJ*=5wFqKiil6&fj^nocygPcE0!T>fQggzyFt+ef#l!(L28D=RDZo QBl%z5@A~_^?`z%s10UCj;Q#;t diff --git a/fixture/15/0/15 b/fixture/15/0/15 deleted file mode 100644 index 7e7f18ca336b06a564524bacce45ee66f3738a2c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu5(F%kx3z=Cz*xqd&3zp0fAp^)nCix}SH?i{Ac8?|w6W&NtiN z`PQ4Q$B%sTsGfbCXFnMITlRd;H(PH`^YB0RL*IM%zDb|_Q$P8=_mr3RM~}aGG%xkr W5B5CiKhWdvdCb-)9?i$!dh;K;M2Qgq diff --git a/fixture/15/0/16 b/fixture/15/0/16 deleted file mode 100644 index 66ae31702ac337d959a58387d719d773863a1fc7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb7=K?;OG3wWhb4E`xs-Z`c=*6TXjD5ulHW{*5RwZ z&u6v&Q!~d?bKZW5z0ZD0$DeyU=00>d`8$uMaN59#5 Z(!6yu{^YekdY7m3qd)0spXbfacLSryiGu(D diff --git a/fixture/15/0/17 b/fixture/15/0/17 deleted file mode 100644 index a4ce29a8a7ee793550f88ba5b793e056df04fd70..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcma)&K?;OG3raRRc_c6v+bJfi6Q*%5s^+*iDwfF7Q&wJ5+pZ%m0-#oPMqaS|a;X98`{KO;AzWK>t<#~TqoA3W+pZm@Cd#uTu ZZ=U;{x9;-hC(XSb&;8csn`gh*Nv zA7A@|xer@6%7M9Gn3DII1inE7=Oz9m7hNH zQeT?K`&9e>&{-M8PI-lyexf950aEVsUCV3<7EA_c<@=xyODmeDbG0ou2&UcmGZgPv3;u d2XBnu{X5+CHUAWQf9lUY&QJPuuk~p^{x7#MhAsdA diff --git a/fixture/15/0/2 b/fixture/15/0/2 deleted file mode 100644 index 582263902fd18fd97a2bf51f776087c9e64b3671..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu6K?;OG5Ci9{df2Nd2+O{HTd+V1nurR66rH3qI*&1?I$NFNnd@BnQ@os~Uc+qt zTAKYl-+HHcm-j30@!*H8H(T#M)_ZPFd7e9;dG~$a&fVASeD^ckZ%%p4`Cl;icV1%r zqwm=Fx!bqP^FHS@@A`In_Id8UX6xC1H_v|Oo5_EAKm7D_zS;c} a+s{7w+Mo3JyS~md<7dA)?N4*^==TStK#alw diff --git a/fixture/15/0/20 b/fixture/15/0/20 deleted file mode 100644 index e1a98079032a68dabc1c622d07f6c4d81e1c97f1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb`F!3u;x3`6Hv_25-pSP;M73O-mKl-9+8GKRECsxD)U^{%+a95qE97l^Y(eec@i zhX2I#b^hhiuk@Di_|0dW`J->%^Y%CPy!lGc^A*nY*vJ0XgQwrTvHgrIJ$U|yE+${) zm+_fjx4-g7-|x%&z0Etnapgxo_Irx0Z+~O!o420(%^TC-{T}qQzxAB2>hI2H`MUF2 SUh<)Dob5xu{XK8~son|oYL4vy diff --git a/fixture/15/0/21 b/fixture/15/0/21 deleted file mode 100644 index 25a7a78bb633ad26918f2b329898a5c55917c412..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb`E%L>9k5Cr#E&B3cE2;$c}ffR(&X4a6v5IQ~m*v&G=xYWFB*5_XH+~)NIU+dd# zn|giicl_D@OLf@ycYeov-~G@(*2C6goc7awT0fhA)X#bUV)sjW`oSCf{u1vz1X4nXGX!^OXV~nNlTX%kI-4lLaY|p<+x7~MI z_PXP~&jY5<`3XDUn7s5J@3G!_v)K8@&ZFP&WxxHM-hApK-+K3T7(aRLGq&E?diR?* zCXfGVKYZHT>G6|qY=7EEzxB=oqc=|ZX&-*`&WHbZAO6|C^q#cO`m{Is*$3}D^C_SA Zn(y++$8T)EF?#lc$xC{8^!R7@{{VG9hQM&a|L@&>`_Z2~Uo-+C~4sUP{~JB1fLWV#!2}^$mYr>k@rXL2=G&sK_!anb);GS! zE4_K}JG|xt|7-ppp3k$Ue>ZQp-}&9&<*j#qW6v{>ANRJt%iG@=KmSkWn_uO*tEb=d zjmhhMjPXOC<%hQ(%zXQIF@By`^`{>_*nSzlo7er>J;`U@*naGj^}}cTReAg~z4RmnSJn1IKR)tH z-|P9j*EX_L{op`L6PkANk*>sEg_V diff --git a/fixture/15/0/26 b/fixture/15/0/26 deleted file mode 100644 index 8e90f8c7cd8a3b7f7b4985568f1a34c4bdc50816..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu+!3qK~420pns(bJ%3WE6brm%DQu!OP;Is8pBGwmK@j9b;KYJShE_gSAmeAL;T z|5nXUzSr5~tl3x3dD(gOWaqE4dBUsrv2VWgeXr;_-?8^o-~H8m{M`@R4`0=Dzsqai zeD>9sv+w^1t0%Aa^FCc)J=wf+^!1Z-ALsS!c-P4X R`YV0?x_rBNJ3al?{{c5qj0^w( diff --git a/fixture/15/0/27 b/fixture/15/0/27 deleted file mode 100644 index 439753f5f4334661eeacfd997ad6668943faa4d6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbWzK?=e^5Jb^jRTpkWK@d;xi1{E-1``8KD7xx*C8uRs&g+cVav5`5=RV%YF^4ga zF|Q|meakPsH$U@l<(of^UZ3Biw!iuIey_TI^5v<2`MV$Ayjq?*-q!x+)!vU^U$uU< z{POlyd#}Ab@5MKtp84aO*RRfRK5gG${uzJPU%q+wv-|Per{%equg>59nDN@@?|yo= e_nv(9US9rx`;@Q0ecjLccHg`@-d4YP?>_+*42z8b diff --git a/fixture/15/0/28 b/fixture/15/0/28 deleted file mode 100644 index 0970dae36cfed1f8d589e7947c2c17a25e74224b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb`D!3x4a5JUG@_0X#*2;$c}N*~IDkRB`yOPWlQ-EFOP*=LS*ow@JxnDabynz_xq zKl9uC``vHY`}jXU?%zCP^3f;keDwA+wqN3#zjD-u=uQ(+|Id>GJ~P{fBP= diff --git a/fixture/15/0/29 b/fixture/15/0/29 deleted file mode 100644 index a2df4e00152485f88cf6567868f48f22a75ea7bc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb`E!3u*=38SP%>xHyV2MSAAVW60iLPUeZLd$gm#RnAyJ|kqs@GnV z>3=8GvsNAFlO8|MTYuIsuQYe(1J`;#Gj}DFlLlBa`#@@yl-)nyL-_Xy}kD9&a{Pb6~ zyjL;b*Zkv~w@+U!KmO+|t508jw=cZ#yZ4&;_N(1TUw*Xvsk3jFk8i(ueRcBTMQ0!H zv#P@jzw1w*dHdpbdHHwGhi87q(|5lauRGtpmprvRefLwFmuFtz^U3Ev(cOFN%a1QF Xy6aD0cRs#->W-iORla%o>RJ8(Y^IFx diff --git a/fixture/15/0/30 b/fixture/15/0/30 deleted file mode 100644 index 2ac3e2ba46a3b2b317f09526be2b30aad2237313..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu5+X{m~3`F-=+Xr7u1)=oo8^Ht1fiSEUf(~=Z#BGf6x~5*^S@WpbPf$<7TWMy$ z@w@orn$MK?t?%=3zWwl>9^QSe@38lEeVuPizVqnU@vHlsZ@)47-PgG5XTS4}lm6fS z_8YT5-2;!F|KWS?=95Rh_a)r<-H&|q=8diQeM#T(++#jr`gDHy?!LsQ{l4FreENau W1779Pr+%w^>f`&2@pE6t)BguL35<>a diff --git a/fixture/15/0/31 b/fixture/15/0/31 deleted file mode 100644 index ace596055ce72ac32074773e4a5e2b2e2c48a0ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbV~!3u*w5JUG@_0X$SC=|cm5qwY{gxR_KexA zkBhAHuKC>ats5SH^T{vi@nhdC#t;2d?EJ>~lh3@d^~UM@B!Aw!AA0*ak9p%x-|=0( z#M8%l(9^HO={^3=-(l~w9zOZQr~JteeaE}6G5g79zVr8c>y529PWsuto!`Ixx;*yt Y{>0PAdSmaO^-KQtV;}m&^M9ZRpY&pkfdBvi diff --git a/fixture/15/0/32 b/fixture/15/0/32 deleted file mode 100644 index ab0f9f4c6a26e093cd59bb1844e264d664e42e8c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb`D%L>CV3#qi)^~Z0R-OVy+5TeZIqZ-08M z_0RO9f0q7r)NYad&AZEX!(?)L5culCRS;h*%rzj4>kzUCA5y*v+|{E4@Ic0T^} zv(H)VdCqH$e>dNL3ER(l^JlUBjJ=Kz4OrD>YnFKyw_dfSM}c+ zUJrU>_9XtOjQ4q;dMDiF(cgVPJbv~ied3+By1(nUpM38Jd!I3W`kZgQd1Laret7y) zKKd@teq;Rf-Np3dHzvRPAMp6W)_cEsWAfOO{OG%Wc=EbF{O+?q@#x6|({I1A_x;P$ Z&z_WTJ-FNNJnIvWo_+q_Ri1rG{{!=_i9-MY diff --git a/fixture/15/0/35 b/fixture/15/0/35 deleted file mode 100644 index 591cc429e9678658f9000cdba73a3a5a9cc4de8a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb7?(F(#q3zw+z`gx!J-92>j;k4h~$G@7_-ShjvzHfhG`^~9;w@*L(u=`kt z|NVZxzsg-7?prt0m-mD5yN`7kf0s{Q+E4eao1JfVzg55YQ=WCR`#In2ec0a@PJZ_> aC%<(w{rG=a_dWLMpLF)Se01_)_RT+VWr}J5 diff --git a/fixture/15/0/36 b/fixture/15/0/36 deleted file mode 100644 index 24fe990ede392f1c889849d17b3e8816e7d79203..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb`D!3u*=3Y>+SK`8zD=7Dn=TvD+@4r4aE$y>)5OVy`pep}Uk)cb_j`Zx8j zs`;5c@4op_9?zrB{Lr&sr%!$6@nip_PyM8~zZt*oeXjajpZwXU>)X%oWlnzF+xdCk zdNcXllbHO@pLz6&o%b(&y3hX3htoWL?&+93`*rVUz1j2Tv>*GVKKK0Um-OD(ocz;% VcjukgvERq}?whT59(vyke*po!hgARo diff --git a/fixture/15/0/37 b/fixture/15/0/37 deleted file mode 100644 index f0c1c8006ee100cd43e5d200bb50da31d2f3fcf0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu5K?=h_3D!f)PstCJ4!rymO54x~6_(3cm|)*X;Fw);wxH zHQ&$JdixnCJ^sfp?$*ydxa+r??|#YOy!VZ*PyNvI9$@ZG{^;3vUgE8Hp815y>-r~u z?>o=?tJwZv-mk;l^KT#g@iR{MIG=s@w?Dl7yM6TV_5-_*{mt92%sVg@iR{Mq1@ZN6rE?hxDOy-cnrxEpx-83O<~6fF>&!3s@!9^~zs~pD zF}vhGvmNuCH+k%v{}(%-JpMQF^l7~P+}FJQjIB35tEWHvVDB5F=RU^i|IYe5&$#(F z-hT8?{hA;9zDM%2KJ~GmG5+`&Tc2>s$FK3ePt(&k^?@fJob=9b*!`N`JbhX}c<(#k YeogOvWBj<6dHbWMuW{1jj~?v2H*v*?z5oCK diff --git a/fixture/15/0/40 b/fixture/15/0/40 deleted file mode 100644 index 7471936e773a36ef451ceb2f23c4def30e5e4d78..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbV|+X{m~3`F-=^`Wmtp-}qu&4LHyKp0gBILs~Cb&Rpp>^1XSYtDEp{5j@(;(zr& zzgNxsn2GK0c>K;!*m=hElW*Sn=)3#P+wXkq%^Rn@q^FO2!QDO1Lytf0V;(>91o8A{WNK3w+J_2ZX#11S+p6l+bL-iEThAx_e6v;O{$7sR z|GM_vk9qVr@ku}Jr=R_kp1$zlkm>AZ{9fVH*f4bu-~74 S-}=#$2Tu2;eEXB$y!|h$@{EQ6 diff --git a/fixture/15/0/42 b/fixture/15/0/42 deleted file mode 100644 index 5fe460fc63eeee5c47000c34d97aeff7e9efc59c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb`E!3u*=3cOkDP$+(VBREiohLx%iWXxu>!Fh}^Rc%%K^Qd|q_5Q(2eOr&J z*D%lZ?b|PL+Lz|L{&Npk4}EiYukUd__wmOM_CEOEdHcg>{gR%1zL$H>&RgI4u`lI! z-+pH6ou}(}dF*dLv-M{5>~lXc^ZXC%eV^I$FnKy=U-EMv{FsN?*ZI*;dhfHJ89(x* YK6>(4@BQfUH{Yf2@;KjBc7FSP0C*6H`~Uy| diff --git a/fixture/15/0/43 b/fixture/15/0/43 deleted file mode 100644 index 2efe338fcebc522a678f15503f1f2d9d14545774..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu5(F%h=3w~XFK@h*bIdDJ*mOvFk8M3?C>oUgp)Lb?5+iH$`&iY+y=6$dH`p)k>Gk*6mb1(VXyKmBypZ4UrpYzS=@jJiMqH1Hd?m+W-In diff --git a/fixture/15/0/44 b/fixture/15/0/44 deleted file mode 100644 index 428ab6a8e1212e4ac957d0c0ab29cd69221ddffb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb7=(F(#q3Na=5_ClpZz`0>^%I;PfULDqvJP|KlRg39^BpI ZeD{-=_Csf$Ipw+E`Ec^1dw;X{`2n61ifjM? diff --git a/fixture/15/0/45 b/fixture/15/0/45 deleted file mode 100644 index 86cf2b6f330803fc803731c9a9a88c186aa75285..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb8t!3x4a3`NoTRl9I23I*}&%|PGgK!_GXxxCCxru0~A-Kt(y`*~KKSH0_fRPFpz zwV!*{3WE6brr7cD!ID)AOZl0}B-wTwW89DPsP_h+i%)fKb@nIx zeB+0se_Ma4oa=G+$mAzaUiD|bZXWaGuD|(mm-lbJ_4bdxdLP!yvwf1UZoRDEdBgc1 z^qVJJuP*=UpY5N#=(7FYPu6c<$6cPfdFrzB$@-I@@2h`R_kNR?diC&Zp6Fe@x_Pqx TaOUmyk3aLR=6C+C|E&H61*nYL diff --git a/fixture/15/0/47 b/fixture/15/0/47 deleted file mode 100644 index 97a63fd7f69ba8f43a3932bf605e3fdcc8a7d83b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbV}K?;LF5Cr$D@!&NW5X9GKLaa;4(5kRV(LFuAx{NW_<81X_0?(ztk5jKP)p^y~ ze^5`-@qhmI(>`?aVEoQ^U;NBlPkcAue)iKBrcdWbw;v|Y^S%%Mly_D4zPtJEXMUQ` z_i!Hl(BX7n{O)5W4?paDboWbn);-_t^M3eh9(|qfdFx%jF3*1FC*AwaY2Lcod5Jx5 XX5QZePX12s{xAE`)4X*vd0pQxFBpw( diff --git a/fixture/15/0/48 b/fixture/15/0/48 deleted file mode 100644 index 810e4050159615ad884bfbb6a5bad25c545f6365..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb7=(F%kx3Ql8}P-m@r_P@o) zUBCLG|JVO@+&5cq_CEUL>DYcVe)7W1cV7G5x8H2P89jN@eEM(o=|1@B!=CSc;*=M? z`@X+9$`okec#u9 av-Qqrw%**$x1RaDe`4Oxe(%GNzT*?24~mok diff --git a/fixture/15/0/49 b/fixture/15/0/49 deleted file mode 100644 index 12d6d68b6391e0d2b28f9b7a5178a6892f10d45e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb`D!3u*w3n1?(V|1RHt)|>Hn9&A7BUAKS7?zjIaSMTF| T{QjlKkN1Krf9mMHe>d+1UrUM8 diff --git a/fixture/15/0/5 b/fixture/15/0/5 deleted file mode 100644 index d30e244868ce6e44f21f6216d1e371c72588188d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu5!3x4a5JUG@?ZKiEHH{X6TI z!nc}N&F4#a`_jYv&g*#m@wzKfU-RVQmw5cqC+xoV>oETGv7fR1I^KF? z@_oP52R%HP{jLxC*?s$^ef+F9W}m)(4`b_-ANlCPNl$*_(|*$9m-^#3+b`*_`jbb$ de{q-Z_p#o5>f`&y61rZ*0B$zX9=NieLZ$ diff --git a/fixture/15/0/50 b/fixture/15/0/50 deleted file mode 100644 index 5aa841f699e171d348726bb457347e596f3f65be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmb7>!3x4a5JUG@?ZK-k2;$c}3w@VYm&_IhNs~!3^%!H^YF;(-dDgto`TW86diOY| zelGf>-c7&nBh($ diff --git a/fixture/15/0/51 b/fixture/15/0/51 deleted file mode 100644 index bac0eb913108e4874bfd480beff7ee8d4e09f04b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbu5!3x4a3`F-=?ZK-k2;$e9Lf_@lA-lB@WSrz>vfCKrR`sgdzpZN5-(A%Ata=~y zInnp}?{QT93~PAslg~c-(X0K;voGmces{lp@9Xm0?{wP7J>7ou&9jD=`1rhcH}Cz- zC%$=}w}wZa4A`FstS3brVx2kW|{En*0uL<=%tKMgQ zF4*%hdiUCSoK?SJW*)zeorih+5~FYbq{sYj-}<|p@^fF8C+)A^)8(~(%0phyo4vp5 zj~@4R`~ULeKI@sgeDu*X`<|rFe%H4?%=^LU*}vn?KlNAr$>(`<%AfqxeDY)7eZPM$6l@u{fzk!-aCxmeLB7Qsju_N!{0dZyS!;W{^lFg4?Z~cbwBvl z{~M=1-|pl7Q$71k`Khn-`M)VYJz_51jbUPyb{8sh{(czx_H)ANzS9 P@)O^B^TG5nzsvgo&uWG} diff --git a/fixture/15/0/55 b/fixture/15/0/55 deleted file mode 100644 index 69a07cf619da7fe5b304b4b87b2c013f5f53ab92..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmeH_K?;B{3a0uLx!>bo_c#ITq*ZnR diff --git a/fixture/15/0/6 b/fixture/15/0/6 deleted file mode 100644 index 5c696799e38a7d4a103df4964329cb437620d3ac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmbV|!3x7L3`6s)+acGnL80{P>y17rj~aU|n4(CQl*}>4>zMeBcg%9kdV!yD_K4YH zzMt`ov$J29|NPu)Kl{6{dFNYiY&|&Z2hV%l^|L>?@~iGkKm5SmzRq`^vGvXa)2FMa zFZ;&$!DrZc#@@fnXZ%o=Y@9+K@&iw6{<@O-ucGfrw{#&tvBxc%v(=hIJl`jM?4S0NzRNRjOdfr}tU~pg0T2{BUm5>O;jOBkxr*G(-`AZwNxG7x9WG+9iB>borkw$ zxIP&F#MURC%X2;JUwH0S>2vQ}CXeTx z$M1pO^KjCqyxxcZTYb0R{f@gl^ts>nn63Bzr+WLr>HWOV?0M%gcY60>-mBxZk9p@e U+rR5u5BojS{w}ZmQl7ML2LpVEy#N3J diff --git a/fixture/15/0/9 b/fixture/15/0/9 deleted file mode 100644 index bad570b092569e8acac74aa6c9a9ce9fc06d2625..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1600 zcmcJN!3uyd3`6&;dhjX=g821TFqa20iaIDGq)pPXzO~j>ZB@tVtFG`+{CHMfZK_(% zTI+W6b8*Mve8SSxyz>*!euvY(NzcuveB77)iRV7-7q5I@>(h^ze#~;0*p=3T$^ zm@na^N1l2qFL^ib^3z{>>KE335GF7Dm%sF#zw*(aSo5ymy~o}Dw|?{|eyXp1`F`4m LeA0i~&-}zYpHqtY diff --git a/fixture/15/1/.zarray b/fixture/15/1/.zarray deleted file mode 100644 index 4b49834802..0000000000 --- a/fixture/15/1/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "O.=Gu>>f$?zotԗoS \ No newline at end of file diff --git a/fixture/15/1/1 b/fixture/15/1/1 deleted file mode 100644 index 45aae56240..0000000000 --- a/fixture/15/1/1 +++ /dev/null @@ -1,2 +0,0 @@ -x -@U@Rƅai_۶f^3yμ}y3W9xNK>}^sߖokw~g켄S|o7}'&gULԇ}{:=˧_s*{폞[=ܾK \ No newline at end of file diff --git a/fixture/15/1/10 b/fixture/15/1/10 deleted file mode 100644 index 7437e5dfad..0000000000 --- a/fixture/15/1/10 +++ /dev/null @@ -1,2 +0,0 @@ -xA -0 ki S0iW[kqg#og=yg8O>WfcfYoջ=v{_?_J_w?eg|~7}`泾bewpw?3zzI \ No newline at end of file diff --git a/fixture/15/1/11 b/fixture/15/1/11 deleted file mode 100644 index a5b9b2c595..0000000000 --- a/fixture/15/1/11 +++ /dev/null @@ -1,2 +0,0 @@ -xA -P U @_(24I|./5G|֛֚y՟w}}ywM}w7ӏ{.O_p'Oz}O{ͩ7fɇ7;0ĩ_fѧ?( \ No newline at end of file diff --git a/fixture/15/1/12 b/fixture/15/1/12 deleted file mode 100644 index d00873aed2..0000000000 --- a/fixture/15/1/12 +++ /dev/null @@ -1,2 +0,0 @@ -x -0 Ca`>5+ȒeqzWR ok?zՉ^w7o~XO>u_3߼1f>3|g1N;O^3vxֻ?'y=R3ĻLO>- \ No newline at end of file diff --git a/fixture/15/1/13 b/fixture/15/1/13 deleted file mode 100644 index d4f2a83fa1..0000000000 --- a/fixture/15/1/13 +++ /dev/null @@ -1,2 +0,0 @@ -xA -0 kiPʐ Dk+'s^zT}c~Ogտ<ɳf:=ɇ~s>~owyֻޜwߓ>zya;n~irK~Oo=uz7>ߪ \ No newline at end of file diff --git a/fixture/15/1/14 b/fixture/15/1/14 deleted file mode 100644 index e88b8c90b8..0000000000 --- a/fixture/15/1/14 +++ /dev/null @@ -1,2 +0,0 @@ -x -@W_뻂2eȐ$sVgͣ]?kY__OOgw7$}w}ykݜo?}fys_sʟկs?ۗy'v}};<#_k׈ \ No newline at end of file diff --git a/fixture/15/1/15 b/fixture/15/1/15 deleted file mode 100644 index b349a1b7bb..0000000000 --- a/fixture/15/1/15 +++ /dev/null @@ -1,2 +0,0 @@ -x -0 ܁ YZnҹ6U^ֳ꺽?o.k[<=&{,Wǜ浞?3wϋI~3~7֛Ǐ|}g?[|7[oN?C{IS?Gy4/^᫾gS3Ss~~Iǟ{;>t8;~oy>wXOl籎_ʇn?^:7|zw?~;;}GyҼu3?z?iwȉ \ No newline at end of file diff --git a/fixture/15/1/17 b/fixture/15/1/17 deleted file mode 100644 index c1dbc1684e..0000000000 --- a/fixture/15/1/17 +++ /dev/null @@ -1,2 +0,0 @@ -xA -@ zث산z3`4!;1VsSs<3^3%23W~Oi^~}I}^wuhWu~{~<~;N#Ͼ_yUe{ɛo'ͽgۿ݉= \ No newline at end of file diff --git a/fixture/15/1/18 b/fixture/15/1/18 deleted file mode 100644 index fee75855f0..0000000000 --- a/fixture/15/1/18 +++ /dev/null @@ -1,2 +0,0 @@ -xA -Az*+ŭKA * R$tnkq^󿉯×99_[7M8{R_o=1S6u5/ݷߜާy#,{3{6}`색N \ No newline at end of file diff --git a/fixture/15/1/19 b/fixture/15/1/19 deleted file mode 100644 index e15f65db82..0000000000 --- a/fixture/15/1/19 +++ /dev/null @@ -1,2 +0,0 @@ -x -0 !גB +C}he̷ּx9A=0ѧ?g}Ǔi߿y8'I9/~0|>wNO߄o;5{=Oti?/0. \ No newline at end of file diff --git a/fixture/15/1/2 b/fixture/15/1/2 deleted file mode 100644 index 2d656020c8..0000000000 --- a/fixture/15/1/2 +++ /dev/null @@ -1,4 +0,0 @@ -xA -@zث(˾~[@ - -3:1:[\S^lZ{nNy;y}|3oNy_0w^b䥾w~֛۷>3vzyf7~z?s>ڟ3g9Os@ \ No newline at end of file diff --git a/fixture/15/1/20 b/fixture/15/1/20 deleted file mode 100644 index 773b02aea9..0000000000 --- a/fixture/15/1/20 +++ /dev/null @@ -1,3 +0,0 @@ -x -@ CWU\X ->XŁ2I.c5EE}uѯgo7Jփ߼+=x.&__G;qG>{oc뛷gY^oC=o j \ No newline at end of file diff --git a/fixture/15/1/21 b/fixture/15/1/21 deleted file mode 100644 index 7bd6a0e130b6a8bc099b29f2338030c737d23d9f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 177 zcmV;i08amS0j-hC3c^4T1ov0X!K)|;;@3NY6ok@d){wvuIz9c^%`(Qg)Vyof=U(&N z=Jf+#>)UOcdVTG8{Mr6Xb=dcJe#d*?{m?(w!`5S*_S1b@KbwEl&w2l1_e*;E!5jPj z67M|Yg' \ No newline at end of file diff --git a/fixture/15/1/24 b/fixture/15/1/24 deleted file mode 100644 index 333ff4b1dc..0000000000 --- a/fixture/15/1/24 +++ /dev/null @@ -1,2 +0,0 @@ -xA -@ !BB€Yt!Ym:ۢs7+y;kϳ7ow~c3?O_幫}C凷|w1~W=~=g~gUy3*>}W|5_? \ No newline at end of file diff --git a/fixture/15/1/25 b/fixture/15/1/25 deleted file mode 100644 index 0c81e7e043..0000000000 --- a/fixture/15/1/25 +++ /dev/null @@ -1,2 +0,0 @@ -x -P ߯CbA(F\]B(kٍ.cugߪWC?>K6T4?ۿ||[};Oy:Nwtw~O~yk|KKߧ \ No newline at end of file diff --git a/fixture/15/1/26 b/fixture/15/1/26 deleted file mode 100644 index dba3814a87..0000000000 --- a/fixture/15/1/26 +++ /dev/null @@ -1,3 +0,0 @@ -x -1 ᾪ{* -s 9M23c[իj~ϪY?|ٜVOWyyd篲y«o}9߱S|_sk|՗'>]_=ټrsx7o?|+}|y;=7Q \ No newline at end of file diff --git a/fixture/15/1/27 b/fixture/15/1/27 deleted file mode 100644 index 5b7a815277..0000000000 --- a/fixture/15/1/27 +++ /dev/null @@ -1,2 +0,0 @@ -xA -@D\UnEAO OM(w%eYrs[>111'}}/7?o?^ߣ~~߼Z:ޏ__~U{<7ׯo>m__yۧ幗~z|^^}v߼:Vy? \ No newline at end of file diff --git a/fixture/15/1/28 b/fixture/15/1/28 deleted file mode 100644 index 92c14290e9..0000000000 --- a/fixture/15/1/28 +++ /dev/null @@ -1,2 +0,0 @@ -x -@CWЫ(;Jʃ, KLmugu?ooO>om?;>3ny'go \ No newline at end of file diff --git a/fixture/15/1/29 b/fixture/15/1/29 deleted file mode 100644 index dacc0d449f..0000000000 --- a/fixture/15/1/29 +++ /dev/null @@ -1,2 +0,0 @@ -x -P ߯CłPd`E C6cmOGջj>Ϫ^'ԳV?[/_~0{_Ol=l]yf%w={3>3~;o?ϛ<͞~>#qCO?}7ɳo}c_ȇS~K \ No newline at end of file diff --git a/fixture/15/1/3 b/fixture/15/1/3 deleted file mode 100644 index c86f01f657834f6956d891bc7f5fa18cdc83aabd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 180 zcmV;l089UP0hQ1}3d1lELs4I~U36PgD3qK&6NomC4vqsu5R$*f-o_Z;Yku|L(9hD3 zn!V=y^jEdKS25q${NtOqPhTxR{^u;KPhWkvFTC)(_nP_ktKCOmezg0kvu~G=Z@+nc zb@JgwXCLpgs>2Jv>rbC~`{H+b`FGEUXMV=hcfT30JKw#RJheQ1_fwmfXI|g)$>%=N i-FxfHk1sE}>rY>IKE8eGj-UQjzIplTS^fZQri}2ZR%a>z diff --git a/fixture/15/1/30 b/fixture/15/1/30 deleted file mode 100644 index b070f41e8c..0000000000 --- a/fixture/15/1/30 +++ /dev/null @@ -1,2 +0,0 @@ -x -@ DW_Kˁ0sm^YOPO[ifϔq}u}oLo1׸g?{G%ݏ|捭}Ic>at~ħߘ|^ѧ~|s_: \ No newline at end of file diff --git a/fixture/15/1/31 b/fixture/15/1/31 deleted file mode 100644 index 18c9fd3e32..0000000000 --- a/fixture/15/1/31 +++ /dev/null @@ -1,2 +0,0 @@ -x -@CWЫT(|Pٺ0,!L"cfZ_{菋/c,Sϼ$޻z9yN]yӯ=a>ç}ǻ1g{덭7Nپ߿wGoL=Лw_g{GO߮wywy~O^)?iy~?yY'=ylz>ֳ>YT \ No newline at end of file diff --git a/fixture/15/1/33 b/fixture/15/1/33 deleted file mode 100644 index b1d98acbc8..0000000000 --- a/fixture/15/1/33 +++ /dev/null @@ -1,2 +0,0 @@ -xA -@ ͡ג@uh2`I"$˲1OKգ[ozunͻ8qϾ<~v>Yyk7~ zgy<'xv>c1>w.~c7&{ycؓ~x>?oϞo=<}U}IC \ No newline at end of file diff --git a/fixture/15/1/35 b/fixture/15/1/35 deleted file mode 100644 index 393382fadc..0000000000 --- a/fixture/15/1/35 +++ /dev/null @@ -1,2 +0,0 @@ -x -@ W탯҂ ~92+61ƣ[3f1U>sto~Vy=tǿobͩO?X~][7ӗuO^OovUSu9}N~1'u3X=t|t`?pej \ No newline at end of file diff --git a/fixture/15/1/36 b/fixture/15/1/36 deleted file mode 100644 index 1dea3a54bd..0000000000 --- a/fixture/15/1/36 +++ /dev/null @@ -1,2 +0,0 @@ -x -P ߯bA(s\RBc6[Kէj~[~7=Qϣг_O}cO}[٧eN~yz3ܓΟy/}·<}ל~wױwz U \ No newline at end of file diff --git a/fixture/15/1/37 b/fixture/15/1/37 deleted file mode 100644 index 4a4f90a683..0000000000 --- a/fixture/15/1/37 +++ /dev/null @@ -1,3 +0,0 @@ -xA -@ a@eK&ɒs~c - o5z'=/<뷚~߼O`Mw^w';`ޯo3N8}?}ۯoL>~·Gx3N|Ӈ7 @~ \ No newline at end of file diff --git a/fixture/15/1/38 b/fixture/15/1/38 deleted file mode 100644 index 5e55c34981..0000000000 --- a/fixture/15/1/38 +++ /dev/null @@ -1,2 +0,0 @@ -x -P ߯C HT:.0dټiqzVo:fݷZGNz};c<~]fϛw5o#|f}>~f{__wy`ֻoN.)Γ>"fo>1y| \ No newline at end of file diff --git a/fixture/15/1/39 b/fixture/15/1/39 deleted file mode 100644 index b80e96596d..0000000000 --- a/fixture/15/1/39 +++ /dev/null @@ -1,2 +0,0 @@ -x -0 CۡAa精cMW0A,:؎zyU=±8jVʷs5gov>o{_qG`u|s>cn}w{O|Ky>a9i>Y~_}Ƀ?*k \ No newline at end of file diff --git a/fixture/15/1/4 b/fixture/15/1/4 deleted file mode 100644 index 9f2b54481823a7acfd248aa44bee107a6b75beff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 181 zcmV;m080OO0i}>Z3c>&o1N&8b@G1%g@%3({a~TOKT3AY&Y?AJ}EX!r)HM2kK%rE%y z+5XpaqP49hU{J58S`=h6?anj?D9_+j~am9$fC5URd diff --git a/fixture/15/1/40 b/fixture/15/1/40 deleted file mode 100644 index 65e84b7941..0000000000 --- a/fixture/15/1/40 +++ /dev/null @@ -1,2 +0,0 @@ -x -@ DWEP͂@U8-u5[kx+9{?WxOyo|{=C?c?}wNu>0``{o?IO?&4p~.o=}]?[^O7o~+3'~w_c%>Yo87o<ߟ~ѓN|޼/ \ No newline at end of file diff --git a/fixture/15/1/42 b/fixture/15/1/42 deleted file mode 100644 index 1ffee1d82742fd43211cf5a6e6a4f16e22490ccf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 181 zcmV;m080OO0j-h23WHD#1mCaf!K<`TD1Ln-I8cU$m8uYA%x1H}d5ketZB_g8sCpgs z{=rLqTaT*OFwgbv+b?n2m*%_va}QS!eRFrO?{Pl&@y8GLKKS2x`@?7blAe6NmwV36 zTi^MyFXeaNerD^Pr|Wll>~BA_^=9in1|Wd`O!~$ j@3Ws7Kk}qLdh%HB{pj&G-=**JINw!ve*1j@co2yEBAjX6 diff --git a/fixture/15/1/43 b/fixture/15/1/43 deleted file mode 100644 index 66a592c592..0000000000 --- a/fixture/15/1/43 +++ /dev/null @@ -1,2 +0,0 @@ -x -@ W냯EA9p@@B2\5j{]j~foZ=O?<3~1s/ٻoғ也;_{O^~}仟o|1[Oy/]~g=y<߇yS_aΏ}ͩ~f8 \ No newline at end of file diff --git a/fixture/15/1/44 b/fixture/15/1/44 deleted file mode 100644 index 39caa64600..0000000000 --- a/fixture/15/1/44 +++ /dev/null @@ -1,2 +0,0 @@ -x -@ W탯 _o:PB(f7k[իjG?}og^4ywqV47sOoL4뷞xz~Wa켎s=/ov^ӟu=t`w_[O|7~OG&wUg~pt_c&?|tKy=~j}y].~'i޺y=ofNN/Ӽu3y]//n \ No newline at end of file diff --git a/fixture/15/1/48 b/fixture/15/1/48 deleted file mode 100644 index b57b31cdfa..0000000000 --- a/fixture/15/1/48 +++ /dev/null @@ -1,2 +0,0 @@ -x -0 烯rpXb A&ۮcS_Pgz]u7[o>~3~w^ݷl=y|o>Þ~9!ggI$=y;|sOix?}}~gͷb~Ï \ No newline at end of file diff --git a/fixture/15/1/49 b/fixture/15/1/49 deleted file mode 100644 index 283c33c31f..0000000000 --- a/fixture/15/1/49 +++ /dev/null @@ -1,2 +0,0 @@ -x -@ kQį7e dW*,˘]#bϑ*^?+ʳ}uϟ<*Ư;a,;ԫ}g\?7[?OGf֭yp/uez<.~֛wl?](W|Ǐ+7_K \ No newline at end of file diff --git a/fixture/15/1/5 b/fixture/15/1/5 deleted file mode 100644 index 0b390ec3a2..0000000000 --- a/fixture/15/1/5 +++ /dev/null @@ -1,2 +0,0 @@ -x -@CWP< °$]1:5ck;·WKxx񻊝><]_x'0:zc}=<}}7f~cIO~~7/Gq|֏}Gold` \ No newline at end of file diff --git a/fixture/15/1/50 b/fixture/15/1/50 deleted file mode 100644 index 96ba17b620..0000000000 --- a/fixture/15/1/50 +++ /dev/null @@ -1,2 +0,0 @@ -x -@CW(; }WII2cj^5yּz8~.M$_O<|cϼg}IwUw'ol`? ?w_>+U>ѫ̳/Y~w}t=~ͳw7'yG}wʗ<)\_߽?oɯ =g}.0zyUafyons_'~C^ϛu>뙻|3~׷>ٿ?Wysʟ|c}_e{uy胈 \ No newline at end of file diff --git a/fixture/15/1/53 b/fixture/15/1/53 deleted file mode 100644 index bf0e8df5d10364da64cbedf351654a877b208f22..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 180 zcmV;l089UP0hN%^3c^qfL;I`xuvbwK#ILu?4vd2(cS4aYX_DTps&4!AU)_72`@H_( zYk#}+ynDXCq~BU|^f$)u@pG#JPyXh?tNT-a%ERCHl0WSyz4yUW-r4@v8@pe^X`lYQ zhxO)-ouByRKlx4XOWrDvo_oRUgUy4de(=+M_@{mH(Ho!jU(KWcq(9pSKlh(7`jnsU ix4-rFH*bs|{lL73{Su%2C!Rd~-NzWc`+WdPvW?8FFmH|k diff --git a/fixture/15/1/54 b/fixture/15/1/54 deleted file mode 100644 index d8d820fd2fd163e56c361472f6fa53b4f79f543b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 172 zcmV;d08{^X0j-eR3IZ_h+l6O_P}yrNJI;v44qrLt+g&S@0$JGYo5ov z26!vZS${2jJEpGn>wD+d^-cQ6Uak)PjQJ1VJB;3aI=%U+uk*>n-#GERylFoE<{Q%w zJ~;JtKls-F8>c?s?&JPbJ^M`gsju_-zbQZECq4eD|J2X^#`rrAocPX9|6~8DpYxNy a{W?q^`*|Po6W@CC!Spe|%liP&YKA=~>10^| diff --git a/fixture/15/1/55 b/fixture/15/1/55 deleted file mode 100644 index f2d7c6fc55..0000000000 --- a/fixture/15/1/55 +++ /dev/null @@ -1,2 +0,0 @@ -xA -0 $RA_o.@Y&TadKDl5sR#kg8tW?Y_abO[z1ds멋5scӽGut UKܹ_8Uv \ No newline at end of file diff --git a/fixture/15/1/6 b/fixture/15/1/6 deleted file mode 100644 index 1395b624a7..0000000000 --- a/fixture/15/1/6 +++ /dev/null @@ -1,2 +0,0 @@ -x -0 C!ױA>({-H1wrzqb񌳳_j?y[ol=8{?K?ݾwӧ/gy޿g2?}>]߼~k}Yo\3O_L^G~g; \ No newline at end of file diff --git a/fixture/15/1/7 b/fixture/15/1/7 deleted file mode 100644 index d62802524b67b8f8d22ad2184fb8f607078f0eae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 184 zcmV;p07w6L0iBV{3WGop1ou~yL#{zV2!6e@U|kBE(FlXIJ^k2q8Dnhc{ObL#dDW~n z^ZC?#&zTs0pP(M&rM_MH-QBm|`NrO-5B-g;H}3q*TTfm(uX+2y(?9vc|GQ7WTYnexKiPkZ(|zFw560j3Vaz^ypZ@?O>WUEkG;1ON diff --git a/fixture/15/1/8 b/fixture/15/1/8 deleted file mode 100644 index 2a8fc693ab..0000000000 --- a/fixture/15/1/8 +++ /dev/null @@ -1,2 +0,0 @@ -xA -@ az#X MT!HN3ST߷wJtc>'s=_3O~<<֛g=us[&pҧއ[}|_ӏ~s]/Gz)/y7_z&/џX߽~'~}~ڇ|;S \ No newline at end of file diff --git a/fixture/15/2/.zarray b/fixture/15/2/.zarray deleted file mode 100644 index eec9c32ce1..0000000000 --- a/fixture/15/2/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "nF3E$P{Lq@sx-!@ z+jz{*1X&41Y6ld|)kKq9ZH+~5f+6@6;DLG2I$<{9 diff --git a/fixture/15/2/10 b/fixture/15/2/10 deleted file mode 100644 index c15ce3004e3448cc3ebe697faf0e7fcb04db2230..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 115 zcmV-(0F3`aT4*sbL0KkKS%WuGa{vNkL4ZgAF#xhqFaWH=C;C+y07>eq7)%hAMwrz5 ztj`sRsAD53NlQ}_p<_95 diff --git a/fixture/15/2/11 b/fixture/15/2/11 deleted file mode 100644 index f1e95dafdb65e4602d8c661315c55c082d333588..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_=ZT4*sbL0KkKSNKtg5o8&XhyI7l<9GZ9@RskH|a_aTnB32b2^Q6_Xnz6k0Y2 W(``tJ2-s~^dM@OOaG@cbI0ka)FD#k> diff --git a/fixture/15/2/12 b/fixture/15/2/12 deleted file mode 100644 index b79946bbfbbc7b47b6ed66875bb9bd5d39699227..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKS@6r6#Q*|aL4ZgAF#xhqFaWI3l=7(2kRLtL8hAb z;)V)k9ArU=OBR%vaia?u!y^oxB8Q3(5i}#Q95$o!rv*#_^iToiQ&dw#GDK2X#jt24 TwV1R(EBL#TDZ+$@hFsPt&<-v0 diff --git a/fixture/15/2/13 b/fixture/15/2/13 deleted file mode 100644 index 12ecbc88e96d5ebadd4594bb3e43982d66f9a819..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 115 zcmV-(0F3`aT4*sbL0KkKS=%pl@c;s1!GK5rF#xhqFaWku0MktaJxZob0urdx8lL}) z+}Sl3W~gk~O*AQyCba|<+ScC{L-47=4v-HJ4EO=+Cj?9@+N!C-si=^uSx8hAB`E^b VvPh(;DI`(;F64@Ep&_*5t)Fi!vg diff --git a/fixture/15/2/14 b/fixture/15/2/14 deleted file mode 100644 index a1b229da00f3d41cf0a5d4fe1914ebcc2c8ec8aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 115 zcmV-(0F3`aT4*sbL0KkKSy)@rssI9#!GK5rF#xhqFaV~4OsbhM5DhfY6r_M@&hnF3E$P{LrElBm-f zpKyI5Scw{iYH5ka8i-ieTdldNJ0vtqW~`#FUbG^ diff --git a/fixture/15/2/16 b/fixture/15/2/16 deleted file mode 100644 index 58e8811dbd2bec789308062f9c1a4b62d566cd64..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 114 zcmV-&0FD1bT4*sbL0KkKSY$}lu^j8qGb%ucm9%D^<`V1oyPA;YEs1BNv$D}M&1oKTR;T;LQcl^pyy@#sd~ zE`da+WhyID)4Duv9_!uuQN*6f#^G1t;*Jgq{t)OPkba^7J!z^EfTn_kp`apY3Y8cN Srm96B#oUoj6eI)M`gOolr7eB{ diff --git a/fixture/15/2/2 b/fixture/15/2/2 deleted file mode 100644 index e31aaab9c467083ae2cb809c762404d94416e687..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 114 zcmV-&0FD1bT4*sbL0KkKS*4|@=>P&w!GK5rF#xhqFaWku0MktaJxZob0ZB;)nse2A zu~8X~G6>1AF)>@5w;P6YZPi2Kr;>FSad37o(!5IIpK1l<*x9pfn;{5NfgwPJ2rZJK UjMmY%%^?0RDgXj%L4ZgAF#xhqFaWI~C#qE%V^Dx;rhug=Xwdil zl)M|d?v(CvI+@#=>Bz=2Gnx+NKBc8LrweqwgUeNQtKQbC-a!&bfkmPNA)7L`nkzFD QU`OKaNT&)C6?hP(K diff --git a/fixture/15/2/21 b/fixture/15/2/21 deleted file mode 100644 index 97131774c8cc76bf65c9509d9c114755d1d8cd2e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKS>!7-H2?x(L4ZgAF#xhqFaV~45~R}s0MktZ6%$4$Js#s4 zjg3WN#1 diff --git a/fixture/15/2/24 b/fixture/15/2/24 deleted file mode 100644 index 86e317f69d13717f2aa3e3d73a42ad9a864e5934..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 110 zcmV-!0FnPfT4*sbL0KkKS>Ec`kpKc?L4ZgAF#xhqFaWHfDf*RDCISJbngWufXw%gD z?0QDiW}S_s&25(5q<#==#QwJsCJ>3wq6zJt`hwyFJ{{|Eo{tFP*X6_6+sFV VMHEv@X`&y++>uTcBn~pF3jl$RE;axF diff --git a/fixture/15/2/26 b/fixture/15/2/26 deleted file mode 100644 index 752ad816a6527699176fe80e795f4f3d1e3da131..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKS>BHhVE_VHL4ZgAF#xhqFaWHfDf*RDCISJbngWuN4K(|I zuWht#r%hwEn#(&yrH1D@u5*oUgnX)aj-u`^CuM#s!Ca&4L3nMmZDMTDr0$L*oz>3g TA|e6MKa05{oG3_dM~5(g2i`FJ diff --git a/fixture/15/2/27 b/fixture/15/2/27 deleted file mode 100644 index 4a4b29098d634c38c53ce8d7285ecde1aadf9e2e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 112 zcmV-$0FVDdT4*sbL0KkKS)Y*;Hvj@{L4ZgAF#xhqFaWI~2~@~!2nL#H3Q`~rA7rQd zwl><>t!qx!!5T3Z%+ob#nTBp7s;AOzFA~!69~5{fYw=qqIa diff --git a/fixture/15/2/28 b/fixture/15/2/28 deleted file mode 100644 index f0fb6e65973e72f6c6a4e41737b6e84f697812ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 111 zcmV-#0FeJeT4*sbL0KkKS=qh2O#lLOL4ZgAF#xhqFaWj$15Gp$Khmhs0u+#Gr`>rL zqb!kQ2ojpilwz1wnS~XSSy}*31YRg;Q(~F5Pk}fBVu9$01HzK3mSBk~qKXP)K#3tz RP=y!qcO+AV2@RXuw9v85Drf)z diff --git a/fixture/15/2/29 b/fixture/15/2/29 deleted file mode 100644 index 063754ed843259be7ee490c68fb12034ce99dce6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_=ZT4*sbL0KkKS(Gmju>b;$!GK5rF#xhqFaWHTu*8R;j+z5ti)qc1tcXV WR~ diff --git a/fixture/15/2/3 b/fixture/15/2/3 deleted file mode 100644 index d3f867afa6c1dd506679bda4bc9ac781f9350438..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_=ZT4*sbL0KkKSr`F~XaEA0!GK5rF#xhqFaWFsr_e#L*-ZEWkpH Wl+{pBQ&NB*#oUoj6eI=!V;TUji!1v8 diff --git a/fixture/15/2/30 b/fixture/15/2/30 deleted file mode 100644 index ecd18f1cbc7cb29de3cadcf88371e3e5e964438d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKSw!krsQ?0I!GK5rF#xhqFaWFwpS4tI0s*F)1Sv+1G~eDr z%M&Jq%4I>M16E2hvnC@er9kjO;s%j6iGb}#Y$}lu^j8qGb+?BRgf`KW>vB87EkYQ7R0mIr2UUf@DPBu6#$~eN+>C4luk4~%Sz6Kr5@e(v{XgT#lPh(@4|_jY+PS-x5~FV$XyEdbXD}2(@>~nB@`a8 TV{un!?A1E)B3Fxm2_Q=VAdxQ7 diff --git a/fixture/15/2/32 b/fixture/15/2/32 deleted file mode 100644 index 4e4bea4e733e473e3362cece419fbf4977631c6e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 114 zcmV-&0FD1bT4*sbL0KkKS$Y$RzyJbNL4ZgAF#xhqFaWHIVsRR97{L4ZgAF#xhqFaWI3%&KI-B=uDcCIuxZ(WaTy zI*lNxEodMh#F0dWSr+Fx#?Eb;&FWF|7l`&(WpZ|x>Ygfcr_u%CvTd_97G(lPsKVCB Sz*JdE{x0N-aG@c-vYe{okuqBV diff --git a/fixture/15/2/35 b/fixture/15/2/35 deleted file mode 100644 index 9c65ed33520e3970467f4886679d88986904f9ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 115 zcmV-(0F3`aT4*sbL0KkKS>Ev?D*ysl!GK5rF#xhqFaWH=C+$@l07>eq7)%hPgH1m2 z>|`27Nuic16BZQ8j4LY)A~Lelhe4)j$W01w<7XlMzLW83@LO Vt12RsLtAa=yOJrwgogKt5m;8ZD@6bR diff --git a/fixture/15/2/36 b/fixture/15/2/36 deleted file mode 100644 index 73e5ce294614b60c9b2aa600698d069f41637131..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKSx~Y8k^ll{L4ZgAF#xhqFaWHAPgJU8z(6$9KvXJ?F{$V5 z_c`0Tbi1cha$Ok_y6ZNzvejnGmi!3(B;b7$!7_D7bRV}v$qOwI8)@I7A TiX&lK+wi-RDZ+$;g^&!8QVufH diff --git a/fixture/15/2/37 b/fixture/15/2/37 deleted file mode 100644 index 04737cc384d4ca56668951898b6aab22235833eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKS#`h5d;kJx!GK5rF#xhqFaWku0MktaJxZob0ZB;)ntOYR z*1H-LV8AvujTJ+VZgXpGXH^f9o=MVOrNP->rFfOdeGpz-ZJ5~EvWT#%fmI74ODdq0 TEKF^PKa05{oG3^--{rmldTTBZ diff --git a/fixture/15/2/38 b/fixture/15/2/38 deleted file mode 100644 index da5c50df6c8aecff85d07c5fcf6c9aa423eed176..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 114 zcmV-&0FD1bT4*sbL0KkKSv$mf$N&OO!GK5rF#xhqFaWFsrV+8L4ZgAF#xhqFaWFuWm6^s0j8P+DIn8LesqjP zs0A_2ZX~-P*AkAl$4Z| WvZE9QK~+eL{9VZu;X*)C6sx-!? zkB4~?2}Ve);NCCxI-2mi2Gg1Hm diff --git a/fixture/15/2/41 b/fixture/15/2/41 deleted file mode 100644 index d2de6779d77f07c244f22733fba347d2f9e95fe3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 117 zcmV-*0E+)YT4*sbL0KkKS@3hb)&K%d!GK5rF#xhqFaV$sl}3+K)F2vZpnz$nk2`YK zF|chLQL7P+tx=7!7BixaixIsl2f+ub9?>|7gy1jaUWm9;>WBlP$DgtJq X5^5lcVk#*F5901frwS4t4tJWsoW(A> diff --git a/fixture/15/2/42 b/fixture/15/2/42 deleted file mode 100644 index 76aafea6e02c480503b960a02c5fb89df69e11b2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 112 zcmV-$0FVDdT4*sbL0KkKStZ#1EdT;x!GK5rF#xhqFaWH=C+$@l0Bx$MVK6C0Mwrz5 zU0x*?EmjJQiD1xH8K#>xxw)2R{8WC0!0K0$wRsOz;Y|m01;oY~hGP;Hi(_KRMr%_B Sz-&s|y%%ytI8cx!*#0ex94*%X diff --git a/fixture/15/2/43 b/fixture/15/2/43 deleted file mode 100644 index 21805082abab42c07fe2f3c7ffa063a7deff1171..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 115 zcmV-(0F3`aT4*sbL0KkKS-i%D(EtKrL4ZgAF#xhqFaWHAPgJU8z(6$9KvGJhOlo)o2a{YI^=fUZO1n|v+$$&7l{2!;#|H9=)4P(_fRh_wryg&8S? WD{QHl3YE6ri@744C`fjOdS3ufWiK}X diff --git a/fixture/15/2/46 b/fixture/15/2/46 deleted file mode 100644 index 7bffa556c7940ec2b2944e8ca57369b5886d7aeb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 114 zcmV-&0FD1bT4*sbL0KkKS+}wMrT_wxL4ZgAF#xhqFaWI3l=7(2kRC7DAA^x zK6X+$B+*c^(9-0RSy8s!&CPMG>Y?#d!h4IjxgE*+SBYGU?LfS3nX=nWGf{|$DI-&A U%4Q~PnnCupuM6JU!HSfarYmKwC9 T8WPQnls*@7ML1B9SiO=fQY|sj diff --git a/fixture/15/2/48 b/fixture/15/2/48 deleted file mode 100644 index 0b173cd9de3c4da2e5f18102c49cc23b697d766c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKS*WFIPXGdG!GK5rF#xhqFaV~Z2~^1SFo0>MfTXCJF*`iC zXbl>P#)yclRxPty+BS;OM%MTuA5k9>J4E6pP9fr6lH!llg7#+2+gNBQ38`jMq6id* Th=8Qj5901frwS4k6s<|%9F{N= diff --git a/fixture/15/2/49 b/fixture/15/2/49 deleted file mode 100644 index 48641b67396631a3911d499fb74a2c7965fc27bb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKS-}3iLjVGm!GK5rF#xhqFaWHAQ}rsQOauc>GzBFn(Wah# zFS{7B*0eU2vkkK*nYLqxHMNgL9}#&DlI<>6aefzpaxb+4^4Qt3Z3;SyAmZdXC3VgK TqmCC{-S}O}6yZWc1N!w0+!il% diff --git a/fixture/15/2/5 b/fixture/15/2/5 deleted file mode 100644 index e04fbba5e7efe0f0d30b1fad2f5eaa52bd633f51..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmV-)0E_=ZT4*sbL0KkKS=3;L4ZgAF#xhqFaWHtTwi_8qs509Vmz32Z|ji_@jr#A5eH8mEq4*KvheAFG=fNLEI0|7N)c_tXL_||mFhfZaHAM*` TF%kqQzs1~kL4ZgAF#xhqFaWFus(?$w- diff --git a/fixture/15/2/54 b/fixture/15/2/54 deleted file mode 100644 index a38b27f6e0e007409fd49ea8ca3247c9a6f42910..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKS&3Aai~s^`!GK5rF#xhqFaV~4PgJU8z(6$9KvI$cr=Pz0 zPSoRHtvt$*8X_S?;C-Ax{;@-{|<@SwMuEXvGcqZC^qf-x%7 T5^PG=--XhqQZM8A5Gi|PJHKad_xgwk>NESp( FFaR_>C&T~% diff --git a/fixture/15/2/6 b/fixture/15/2/6 deleted file mode 100644 index 833e6ef997f66b8084d42d2cbe486aa0cf7df648..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 115 zcmV-(0F3`aT4*sbL0KkKS?c940RRGVL4ZgAF#xhqFaWHfCRI$B2nL#H3QAF(=Y)5 diff --git a/fixture/15/2/8 b/fixture/15/2/8 deleted file mode 100644 index eb860ea3a405c13e3b0e078d534986026930ad02..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKS<&PpzyJbRL4ZgAF#xhqFaV~Z2~^2|fN7?Hq^O!PIz8%x z8m+KwTG`QN*tW4^c8RTmHZ*7QsDD9yK;%=gOm+_>^(PU2q!+cBw#LSQp(-evpr(|8 TNNJcbtN6Q;DZ+$?k0BlaoEI{a diff --git a/fixture/15/2/9 b/fixture/15/2/9 deleted file mode 100644 index 9c952f6a8dc7fe6673644655bcd8b337f5ea0120..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 113 zcmV-%0FM7cT4*sbL0KkKSv&-&RR989!GK5rF#xhqFaV|mnN>1iAR1|)Dr%x=#OJS8 zv_%sr!#IhUXo-}enne;PUyNY>VBYPHK;40@*xoaGX5pLmFmGClsw7n^vZ*a=D5Q#m TS*uf21Ngg=DZ+$-;3Yz+uFNo* diff --git a/fixture/15/3/.zarray b/fixture/15/3/.zarray deleted file mode 100644 index 7012a92e43..0000000000 --- a/fixture/15/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "NPK zKnP4hh$5gs!ioZGPX%U;R}#~LSD(|4b9rt;mE47N9Y^xY%n*unO%4w7#> qtjl*?-2R{ET!8WY7oB!}ZPGpN9D)75vL0*9o1a~go0Hi6j~@X0P*2$a diff --git a/fixture/15/3/1 b/fixture/15/3/1 deleted file mode 100644 index 4358c98c837eb44349d35870bff0765d2929a54c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 201 zcmZQ#oFL%9#=ro=Cm9$RM1XiB5NmAJ|C`{zYRk$ng((ThPXyv3hC(1&S^*@|fH=RZ zsHh4EK|~dhDyj-y(V*|iAi&JgafU%CK_DSvW|M48B5zlFn#N5=7MBa0a-Ei2F1pA) z5HPM+5!PA$^?+sH#!UO=Ngu>l_)Rt%&i&EjKTolUXVK-@j;ur_sT$4*dTL&T|P* uu&_PBTRqdMt!-^#o8+nR^&h96vpFeyh<@|HtAO9lnbczk~#i6?9BNGOqZ=V)XupzHyWpV&y@R|{^?GF zg+%$HlV8tT^EFG%7Rq^=^M&1FHlV`0j(p7%eOT(R?}NWlNw>FYMX`~UerKLDb{ BR(Svb diff --git a/fixture/15/3/12 b/fixture/15/3/12 deleted file mode 100644 index 2c341dc9bc983bc2b3ec6ed5ea4723f98c39c33f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 212 zcmZQ#oFL%9#=ro=R~Q%=M1Xh?5NmAJ|C`{zI+c~-3sVx1p9sWN3`K=NsLX9tTP!NFmaqZ!yuF(kPtAlsm+0zw>rqi#P}tja@y{j3e%*F z)&)y`$xyT9d){*BNY0-X6T77zoLtuX=4PXz>A8yw1ydgWF|{x`=HQdjE&D3dMrld@ wHh-hL<+Hfh&R&s_tW8j3Kbu}?7O8OI#mAcmjeT-!<{e7#Js&Ry}7-&7&~bH&USPZwI1&lfew-tTDA zwDa+<Nfqf901-pQeXf8 diff --git a/fixture/15/3/14 b/fixture/15/3/14 deleted file mode 100644 index 95993702aa3fd07c3be21e940eecce7442689181..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 196 zcmZQ#oFL%9#=ro=M;I6wM1Xh=5NmAJ|C`{zD$2^x!c++4msS9YBp^-%;v$B^G$55< zRaI0~R8$3|fMijX?}`TNm&^>zoE_&FgbD;2S+ch@DZ2zzB%JZGEk10t;&E@mwtf?L zopkG$Pj{|vOrIXV?3VotsfTM6mRxwjb1dFmuwD97$zqEivELabuI*I5G}G5YdT)mz mzd=Xly*bBZi%N>`&CF;KxUrH??CygzhJQa~r~F|5$qoQl#YrFl diff --git a/fixture/15/3/15 b/fixture/15/3/15 deleted file mode 100644 index d7550e03f7fd6f9958f53dde9e64b9f8820a54b6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 206 zcmZQ#oFL%9#=ro==NK3mM1Xi35NmAJ|C`{zn##&>geeKgPXyvBhC(1&S^*@|fH=RX zs0sqBs(?%|yDDTwfu$z{12e~pQw%~00to>dS-f`~k`m$X+UX+qxj58KaR7rqZ;>+s^mTRC&MTphQvddGmc29{oF-&twY#VBbz! diff --git a/fixture/15/3/16 b/fixture/15/3/16 deleted file mode 100644 index edb7e3b35b0fb4199eddd4ab63383a5978d191e5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 207 zcmZQ#oFL%9#=ro==NT9nM1XiZ5NmAJ|C`{zn#;;?g((ThPXyv3hC(1&S^*@g(tt!h zLsbzJ0;wtxE~<)MQLu9XqXIL>3LBvSgM%!bjc&SPkv6lQGZrtO^>J=nSs43~bDImR z4x5@Ex0!d+La^GGL*w#U^*55T6CN9^P3gT>yf#qSRcGSn+#AKq72Tc1iN~Se_KNogR0u!)IUCr0$=F$NqK8O{fL{DU?hV diff --git a/fixture/15/3/17 b/fixture/15/3/17 deleted file mode 100644 index e3602b87e2c095ca438c5c3c854c0f3e0b783a1c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208 zcmZQ#oFL%9#=ro=7Z?~AM1Xh)5NmAJ|C`{zTFT0>g((ThPXyvBhBP3VUsP2Hq)IEQ zia@XkLIRmUP!zeMz&evbfSIG?41-XDK!QPKQ=7vR(Qi8{jEc81E;y*()u>$bD6&w7 z-TYRuzsupu@U{F)jNLAWNzT>Ku-06DY=yambjj6VuGAd9bLWrV)aR3{_@v=$%E$iF sMXn&ORO7s#-Tc=ltCbJLEayqEm>?TDMSZbt-rSAyY22lsp6})d08<1=O#lD@ diff --git a/fixture/15/3/18 b/fixture/15/3/18 deleted file mode 100644 index 25c836b8d6674199a1be33d4eff0b274f4a7a799..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 214 zcmZQ#oFL%9#=ro=*BBTWM1Xi75NmAJ|C`{zx|Ef{gt-vNFRcI)g&-mch!cUhilHbC zNaa@*L19%@QBf6;08#NP8m!+kD=;^%G9TvrE1J!Nn3TCH)ytLp2Nx64D%nFJv=9~N6q>3fpaa1%_kFc yopyvhK9P3v)s8Ju)|t|NEmLRB(0%-0U~!C#u)on3)0to2S-zVv_e%1=FTVg=##K)M diff --git a/fixture/15/3/2 b/fixture/15/3/2 deleted file mode 100644 index 8048a6aef56785fd02eeec7315208afa22f8ff91..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmZQ#oFL%9#=ro=ml+rsM1XiV5NmAJ|C`{z+RMstg((ThPXyv3hBP3VUkD^hE2^rB zia-cRgPB!8Q4ok(F+o3*;QDawU5#k|2XJ# wZeg0+?-z59EZgikImPfMo8+652h;8vx3qsgc=4_9#=rhcr2qO?vvAk}04N1f?EnA( diff --git a/fixture/15/3/20 b/fixture/15/3/20 deleted file mode 100644 index 4622a055292dabff4756f812e8ea7f9867a30859..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 196 zcmZQ#oFL%9#=ro=M;I6wM1Xh=5NmAJ|C`{zD$2?*g((fl&rbppi9lS%PzWSTD~hVp z@~euffS?FO0&!81&x!);Oa=jFjullx2?7lUiA`+@G0HJs4#vX(X4BJ kYF1W5k!;H0N_($_oT*9k52l>{v}Li>{(owGv;Wis09~#`_W%F@ diff --git a/fixture/15/3/21 b/fixture/15/3/21 deleted file mode 100644 index 9ca0d4e686f6b059f5f171af759aadfd9e913eaa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 213 zcmZQ#oFL%9#=ro=R~Z->M1XiN5NmAJ|C`{zI+v9}g*gexPXyvBhC(1&T9F1M^BIb& zfUu|vM1lwi0?L3u)QSS@Oa=jFj%9faLID#FvS{ydQi|}i(GBioH#m1?V_AVx$lTex zFQuI>n{nSyZJEa`+qP#Om$&)vU+MEHAbfIcdPZ`+Ub=~oUBv;**vs#FQgS~?E|F#q sckm59)2YP1HN7RJ*k^uw#-cRaRgHYvrk{LWl6p?;uS@tYDu1CC0HKCTp#T5? diff --git a/fixture/15/3/22 b/fixture/15/3/22 deleted file mode 100644 index ad4c6ee27e20b6e30de71bb8efa1bf08746f51ad..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmZQ#oFL%9#=ro=ml+rsM1XiV5NmAJ|C`{z+RMto!kh%;CjxO5Ls3;x8jz7+2qa1? zimIxLfFcM8;T6TNNU+Xi2w*;RhFQo#z~R8brqCN6NkMBO+DIun?Y{+lW@$6J r-M@-m)ANbW1wrr4YBvlz8fHl--@TA7ZO`-NUFUCm!+vMcWrx@S@t#k= diff --git a/fixture/15/3/23 b/fixture/15/3/23 deleted file mode 100644 index 20d5ec1dd290d7b8b5961ac52145b8d388b9fe32..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 223 zcmZQ#oFL%9#=ro=_Zb)%M1c4>5NmAJ|C`{z`j(YJg}D&OFRcI)NkE(k#8nJwKr+9m zsH(823J8jTED!}DimHm@R}@&kWM*LI>^Q?9gM=!!A&v@8tD7_*{{r8P06(z2-rYhP#>iF*b$p7qtv{J8pn`Fm;^UM6_ I#ZRmU0P<*9PXGV_ diff --git a/fixture/15/3/24 b/fixture/15/3/24 deleted file mode 100644 index ff106c6c36c9e985f01bef308d2af4f80b865c56..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 199 zcmZQ#oFL%9#=ro=#~BzHM1Xib5NmAJ|C`{zs>{ld!juH$CjxO5Lm`kXttd(ZQu#$y zMODZsctwMCCc_6NjuU4XgbEBcvSjaZN@fXu6VT(jo3Y2+RoYs_-&pg&_ul!1&pe-{ zJ!(8AHv7|-l?-ea0c;1oX4lBfnyoWM&d}6PA@A3QLoG+2ud}$=sJvw3XN6^Eb$ssb kx9;%zFFxp(vCa2xxyIyYtA4(5&c9S`^!ERU>sNjN0Gb<3S^xk5 diff --git a/fixture/15/3/25 b/fixture/15/3/25 deleted file mode 100644 index 06400dd841a6d5bfb76db7aaa3a2184dbd250429..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 213 zcmZQ#oFL%9#=ro=R~Z->M1XiN5NmAJ|C`{zI+v9pgt-vNFRcI)NkE(k#8nJMX+SE! zu&AmEh=2qL0GS{TPyz%(R}@&kWHw;tY^xGV5J)iSY%+~X6z`Iqo_LGRSgB;uBT40- z1%Eea7oYCQIH7sD&U`oHKI@0d^LXz`9&bwi#mqaet$1PIO5VBct&EKZKU+(DB&|Fr tpX)om^IH?MoNDHYosaHhG8?P&d%j#2y6p7x;_1)j{c=U_{p(=z2_&geRPuwB>v z7~F1may wH^KVmO3z6qI|?VAtl7)x7JqZ1@6}Vq?DLYu@+9_Mjs6hq_-snek7e(e0luhEKmY&$ diff --git a/fixture/15/3/27 b/fixture/15/3/27 deleted file mode 100644 index 1e13dd361b8d72d5581656da17c49905de65010b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 227 zcmZQ#oFL%9#=ro=j~N&kM1c4-5NmAJ|C`{z%FD)J!kh-==NAHr(h4Av1jLCzTvb$* zmS0p=R0Tvp0!S4>*+726iUjMIj10^i9Y2H|1R6F}wmj&W!QmdZCS_S^^N}ZqZzlGh zacb}6&s$}&rjxJNdh)@43C;JHeDRt0B*83ogC+hyPOPWL5qT|YDY3hq45#`I?9!UW^|=Ee2TjtGVR KKg00-`F{W&En0p6 diff --git a/fixture/15/3/28 b/fixture/15/3/28 deleted file mode 100644 index b4c5f301eaf3270c7f13d5884a81bd07bb08f2ac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 210 zcmZQ#oFL%9#=ro=mlzlrM1Xh~5NmAJ|C`{z+RDoCg((fl&o2ZLr4>LT35XMcxF{{Z zs;H`{sHh4EfFzg!qM}w5SifX6VCGo1i&4lyz~REdCQ&azzbg@^crF)2y|`#}#d1!n z+ODts>H9=Y8|2S$crO0DV#aUdWAjUD_GtcSyt+TFiuK3C-H)x@tiL{P|92wR>t97L wqtl$sJA39WPZZWXbJK0Iy4vRjcMEQqeq;P9(s$x~`jMrscjEsYI`&5%072ANZ2$lO diff --git a/fixture/15/3/29 b/fixture/15/3/29 deleted file mode 100644 index 2ce27ffe106a76642b29e73d2c28d2310ca44eaa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 219 zcmZQ#oFL%9#=ro=w;321M1c4(5NmAJ|C`{zdX|+zgt-vNFRcI)NkE(k#6=8gKr+9o zu&Ao2sHzG`f(S4TSEC-Hb$` zO=(%7JZHKVE|!j-&78J@eO-kHuhiAg=_=pYR?UcH`y{M3Z!e>J;gkDj5b#R$; zWs@z>wv*2O2hZQU=x%zN&CT)d?<}Kb?ek1#BpcUQ`ng0JUKjYkQvQ|f5B<7-e-`~` F1^`->Qz!rc diff --git a/fixture/15/3/3 b/fixture/15/3/3 deleted file mode 100644 index 219841fb19140276ec754520e2e0a545e137f5cc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 221 zcmZQ#oFL%9#=ro=cNrKMM1c4x5NmAJ|C`{zdY6?!g}D&OFRcI)g&-mch!cUhiXjb1 z<`)%J6;%Nd1cIrms^}FB)^C|FFf|%k3pofl6wGYWEm-KXBuUkHN?!8p;BMyLA7+bg z&iu@D!Lfbr;=sqf3uYDM70-Y3z$5cspfX?kMTMz{O`iT_zcep5nW6u{%o)dz$v?<_ z@+{@}B@b%7 diff --git a/fixture/15/3/30 b/fixture/15/3/30 deleted file mode 100644 index 5294beb8359c7314d8eec9265d324a5f37f7b3f1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 209 zcmZQ#oFL%9#=ro=7a15BM1XiF5NmAJ|C`{zTFc7d!kh-==O+P)L?A9=NXst-(xnwu zML<|pRRko!EFb}70;ND8d_{qECW8ZWi=vcJfsN;^Zy$7EKGo3jje=0?y$8K}~ q^u9MqXFFZ2%K1ea(-&x%)ft~JU-s>bwMp<@cMFz`str&7vjYJCyhoe> diff --git a/fixture/15/3/31 b/fixture/15/3/31 deleted file mode 100644 index e629ad018713513f610151069f1a6e0f58cdc0ad..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208 zcmZQ#oFL%9#=ro=7Z?~AM1Xh)5NmAJ|C`{zTFT1sg((ThPXyv3hBP3VUsVVsODl@1 zsz9g+N&;CxVGsyj(O{j)z`)FL>=c7gLBPzW<04YZE=wYkjKfwls`OXQdw1ZwCZEEd z9~b%}XPhctcVV^3?e)or^Trq8cp fG9|g)&-kjVT9CAH%0z3)AMX`-*Zx1xT*MCmh@wC- diff --git a/fixture/15/3/33 b/fixture/15/3/33 deleted file mode 100644 index cf9640589faae4a4abcc648209212d5b1147671c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 215 zcmZQ#oFL%9#=ro=*BKZXM1Xid5NmAJ|C`{zx|WsU3sWJGUs?eql7Ki7h^rWi3X9T! zjQpx1FoaM=AWl_P^oj!Om&^>zoGZ>S2ssEiOgPEny(F=}Yk6u^A7hfz;pCZf*4P-V zEY5$`aB)HKXMXYY2Pf)IZMHvJA#qqD<=EXF3**|BEIV}nv1C>8%43Yq{q+KqXC!{O zd+^$wiz&_~#}C@uektZLa*$Cg(*FBGyw_Y*J@Lpwf&5Dgf7oW*L{EO+|3n@DSuRu# diff --git a/fixture/15/3/34 b/fixture/15/3/34 deleted file mode 100644 index 778578d653fab23a9ad0f742138e154bda489c20..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmZQ#oFL%9#=ro=Hy9WgM1c4J5NmAJ|C`{zx|NmT2~!e~p9sW73~4|zzpARJ5J;C+ z02xI<3?d-}n2cLdV4cYzz|65Mk3q;mz~REpCebS!SdMb3aYpwsve}(GG-G?h3ANqd z9XI!^#d??_{K=b@miD_cizXf8t(^CU y;c?Csku!X+&(=G+-LTKOVYAFw#W}`|AzW(XLdymBmwoB1@VZcN;zRPkEx!RJo>ON4 diff --git a/fixture/15/3/35 b/fixture/15/3/35 deleted file mode 100644 index cbddc72ec878ad3e70feac5f86202a5ad5b95d7a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 214 zcmZQ#oFL%9#=ro=*BBTWM1Xi75NmAJ|C`{zx|Ef{ggFVwPXyvBhN40sRa#M1l?J5q zi;9Y>fB-~-5r_d42Z4wc1=g7i0?ZsMau|dP8akWW4m9!h1w?+_!*iLhh4b)(ji)nC z+AliJV|l>EUSc=%*?)qyU*;TPt$$`Z@mXJb+2@p~-VJ66M}#dM{?(lO)=@kwTVSD= vg)`fCgT;J-XDSc#Do&bcvz&ce`Qpf%&vYv8H~n;emYJ2^zqxu7S3nH_atlxJ diff --git a/fixture/15/3/36 b/fixture/15/3/36 deleted file mode 100644 index 7047af084f891f5662a5ad7769958c3636417b8b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 201 zcmZQ#oFL%9#=ro=Cm9$RM1XiB5NmAJ|C`{zYRk&7g((ThPXyvBhC(1&T2Yh+r1GnP zps1(_Oo9j?tSSmwQDB|P;K0nH#4c2ju#u&@W1*o_+R;g(#U~AwtIPJWx)m^#vdqgY zy|KJ@#z`H&P1dPzK83L@nz`_FuE^GzJpKZH9Dz-7ZBKHqL@%G)*15&Uj>+%p!&S2r mr9FF2Jn4K{u|whf4@Of(>#q47=Ps{VVQiKbVE2pR6*B)vux495x1-TOHR#H*ob=`wDM>o~>#fe-JMYu6py-^L*Q1|X wZl15VI^NR~nSRMldMR7tf(*7kIi7A`mZlMS*oDg8(zfiW~-^1c8KxiA`+|%skm?IxEkpMDu=r~hjh+I%}EWJbX%=O<}mifIn`!Z1D va>n6%XCIk7=#c+Zl)x-~!g5n_N2-F7xmVBitfZF9Hy8c+{P|hVr=#ouZzoga diff --git a/fixture/15/3/39 b/fixture/15/3/39 deleted file mode 100644 index 13c3295ea3b823b12b774a593ce0c32ad62da24c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 224 zcmZQ#oFL%9#=ro=4;UC2M1c4N5NmAJ|C`{z`jwR-g}D&OFRcI)NkE(k#6=8MX+SE! zu&Su2s;H_8gh31l1j>Lw?1~2Km&^>zoG0E0IS4o`SlA?*vr+XZSK5qMikh}oZgRyyy5{|*K3Zx*LbwArup F9{^FgQn~;D diff --git a/fixture/15/3/4 b/fixture/15/3/4 deleted file mode 100644 index bfbd312140827c96af1a9e0ada1352bfe722d214..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208 zcmZQ#oFL%9#=ro=7Z?~AM1Xh)5NmAJ|C`{zTFT0Bg((ThPXyvBhN7x8Ae~V`7G1=z1hdXo38)M-=lPiyyzBzYvL$ai(lzrdlZ!f0^HyJ5U7Ll2J tt07uUZq>o9Y$jjRPrF=~m&|zAtWrKt@PW1Y>15+O-#xFNOP;ZS9{>@*Po@9> diff --git a/fixture/15/3/40 b/fixture/15/3/40 deleted file mode 100644 index 0f21078343fbc79ada40d9bdea1c1b1aaf834302..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 213 zcmZQ#oFL%9#=ro=R~Z->M1XiN5NmAJ|C`{zI+vAU3sVx1p9sW7423|lw4y2vNaZsW zRUttj6U2*MQDB|P@PLWq#Tf>n0D+Y(f{n*?a-23^lFmM}CC?`LSV)+UnRC=%&x<+I zJjP#|ZB}QlX-}JHl`JOlM1ZSN<60ZJBF0`FL8727C3Nn12mh_b)!haEcuOJls+! diff --git a/fixture/15/3/41 b/fixture/15/3/41 deleted file mode 100644 index 9805e51d6b73e6e4bce83269d2f41fc622bb6d4b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 212 zcmZQ#oFL%9#=ro=R~Q%=M1Xh?5NmAJ|C`{zI+c}S3sVx1p9sW73{`1BD!&j&lvWf~ zRaF&L6#)^D24Nss6tSYfI+KBcnd8JRMxg|Ogo2$-x)(O29zEwJJVSz=x#!}P=E^h8 z_8&i_&b%BJ=96(r!{EmAw&;SitaZ%izNsvf4DXg~aF0|fP?oECb%F1eeL=&r-N|8w z7ixqH_zPXicxUa1Ta!B3{JCNAfP1Ts)1 diff --git a/fixture/15/3/42 b/fixture/15/3/42 deleted file mode 100644 index 18087bc9d18e195fce85c1f671d2e5e3900ed5f2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 198 zcmZQ#oFL%9#=ro=#~2tGM1Xi55NmAJ|C`{zs>;ex!juH$CjxO5Lm`kXtw;ls`3yxx zRYgEh1*Sk`@QMcOOa=yKj;wPGLIoc-u=wk^G*VG=JGLiR#<@`{9=3m6rcIl63w0vZmoa5=i@eoK)%ts&dC@i_a_{KXbR^B&&! z-5`@`@T;@JY||FWpyzVORvkZEdBQ?CQzZZJLjKt&ENuABXSoT?PU;M1& diff --git a/fixture/15/3/44 b/fixture/15/3/44 deleted file mode 100644 index 420081305c41e235c28d32cfc11d85d0d5d7ef7a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 220 zcmZQ#oFL%9#=ro=cNiELM1c4R5NmAJ|C`{zdX<%-g*gexPXyv3hN_~fLLj5GqN=DU z4amr6C@Lxf3KmrX1%MQoRaFF(0fERB0oIue0?ZsMau|dP5)QKL?n&hDQahF^c8$^C zf#Mz6hyWGyiT9KvpFL4NxJ9dUMR1S*lF~WzwPrn@V?H(e*2S9YSBy=cBtDzhT;Qzg zt88>jzade0VwPKlrQddg{j+#JA2*cR%pRP0OXB#oHNHP*%FQ<{k>L1J{XB7b9ph1U E0I7IT8vpIy?k(zndIMk6LQlD^Nf*}im~tyU zlTz!QWL@yZMUd@`M3Ra8@@4aM)AE`RTcuB#C1c&cG^|BB4rQ)!-* sT$kK2Gd9@hvG&%_TipDs1+0R-!gw-V%#Jp)F9xH(AXqeka)6d>e($e%>sO4W|Q~0o14f4 z%$GA*>$JzvFksq@WR-t+(i>hCBpoxJ+kUKP8fX7H4Xf2R1HKp;e4V+2dH&Nm2c7#D w)@m_5`|5ng;_r>#=fdvNdA?^)cGtJ_ zJ6`nY&J9ngS3U;wj3>!biob-XSn{f` sdr|tYEyZ(Gyf&UPm}UBBa?XnthjV<=Op{*;zxb~$ywc#i-{Cqg0K}n7D*ylh diff --git a/fixture/15/3/48 b/fixture/15/3/48 deleted file mode 100644 index 45539a3d2165f65684d05231b6a9bf7b637c9a14..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 222 zcmZQ#oFL%9#=ro=_ZS!$M1c4h5NmAJ|C`{z`jnL+g*gq#&rbppi9lS$kd|Kvq)RKR zii)a$07w8CAZ8JWE-C^_f8{7Xg F0|4f?R6_s& diff --git a/fixture/15/3/49 b/fixture/15/3/49 deleted file mode 100644 index 08c9cc54a372f84fd03ce3e864a81fc34ef7912a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 223 zcmZQ#oFL%9#=ro=_Zb)%M1c4>5NmAJ|C`{z`j(Yp3v(fmUs?eq3PD5~5a%-_0m(!l zE~=_3sw#qzU?!9Ulmda66$RFw49pE$+(HflFPvGucPI*qXwI9K`00UzyN+~p@iQ52 zKIhC=p9?EbJozB;djqH1rsjuIi+TP>WvXqx^YR9B`?B*_eLk=SSifPoe7x}6pB;y; yRorRx7khSMWm|i&?Lv9uXTxgG%L&Qt9G diff --git a/fixture/15/3/5 b/fixture/15/3/5 deleted file mode 100644 index 752d02813d89b70afecfe51d85e33502ecba7c84..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 215 zcmZQ#oFL%9#=ro=*BKZXM1Xid5NmAJ|C`{zx|Wqeg}D&OFRcI)X+WG`m;@vffw-!u zsH&=}2nK-+5Fad(x}w0^Q-GP{M3s<(z>8$o=q(#lk8bXLby%^q;MdOi4NletImv0Z zy%KMnTm%Cir7b^JJ76 diff --git a/fixture/15/3/51 b/fixture/15/3/51 deleted file mode 100644 index 460aed3c4e2ea1ac88080db05f6f4bd3a79e1dd9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 210 zcmZQ#oFL%9#=ro=mlzlrM1Xh~5NmAJ|C`{z+RDoCg((ThPXyv3hN?m!Ra%h-B=Z@H zs;Y`W7z)8WkU-pu1nW!&17?maTcLmh4_UmIC<;vy(Vn)@gEgXja2WcMm6atMz%wXdw>(WWr6e diff --git a/fixture/15/3/52 b/fixture/15/3/52 deleted file mode 100644 index d6379c29ef7f513cd0ee1d6e902eac37398f793c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 206 zcmZQ#oFL%9#=ro==NK3mM1Xi35NmAJ|C`{zn##)1!juH$CjxO5Ls21+Dy^tW15)`# zRYg@$R8<6IR)wx;u+C)oz{JtMoTYHrIJNccZ{75Ty$rVU?NZCHvjYI0S4^A$ diff --git a/fixture/15/3/53 b/fixture/15/3/53 deleted file mode 100644 index 30f5633e64708598eaf1dc1852bd1822e2af4394..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 219 zcmZQ#oFL%9#=ro=w;321M1c4(5NmAJ|C`{zdX|;p2vZu6pI-4Z zSbSSiTxGhL?XqD{yQjmUdDeVsg}v|m?6`I&H}r<>W1b^@>G6~2sV2#J2~v00+?!w& zvBu5sRa@nTJsa<6%>8-wg`vN-U4!F}smIvUvsQWf|CwV~=qI(n$uih|&wZ-{W`E@Y DHb+&d diff --git a/fixture/15/3/54 b/fixture/15/3/54 deleted file mode 100644 index 2d51797e3e034e752888fe7a8c4bef38c0d72fe3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 197 zcmZQ#oFL%9#=ro=M;RCxM1XiL5NmAJ|C`{zD$B}ng((ThPXyvBhC(1&T9F1M^BIbO zpsJ{<3Pge#AgZb=Xhni`CW8SpM_ZLpLBL9u>?w)CT-hNe2d`xr+g@~T`aXH)6I1#3 zK2aY&+gj*tPvGe{mne?2a4c%({pa;-c{O{v#?F=2{}|rgQ9kG#<}-8I_XL~6HOFq` h%%9CD{5WxTw4LIm4y#1Qwsn8_%Ch%d?zZ1+2LRFWNH+ig diff --git a/fixture/15/3/55 b/fixture/15/3/55 deleted file mode 100644 index 6d89af542c46a32b380b7907611e878418012e7e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 182 zcmZQ#oFL%9#=ro=+ZY%aM1Xi65NmAJ|C`{zx|D^1g{ct8FRcI)X+WHx1SArHxT>hI zs;H={s>r~YVQ*U432u!K0Grq^>;M1& diff --git a/fixture/15/3/6 b/fixture/15/3/6 deleted file mode 100644 index 458f6fc13f132082f4e1e9925acc1ded7dfa9ba3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 207 zcmZQ#oFL%9#=ro==NT9nM1XiZ5NmAJ|C`{zn#;m)G7*TY((;Rn zs;Y`W1ds$`Fcq<)z}l06nd5|?P=Y|hf`d)E=L97V20ltOoL(_mVWau{ln0+0`!D;> z+T46#mi99j)54OTNaLCotJ{v3CQPKo6c`_ t>|JPVT3q+QTm4p#$-x7eR}R0(+Z4!o{DSSBORX{Q4!ph3bYszfW&r%6QLX?0 diff --git a/fixture/15/3/7 b/fixture/15/3/7 deleted file mode 100644 index d4f7f7c1f25672821ecf6bee2d9ac1b08dd6e17e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 218 zcmZQ#oFL%9#=ro=w-^{0M1c4Z5NmAJ|C`{zdX$yn3sWJGUs?eql7Ki7h>IA~fMkAE zVO3F8QBf5T0BHyTWW=o~uztz>fQfTe1%r@-fWv{pCe@0KNgFR2<*^wo>Rfrsvryso zJPDR3ED|Sj(h`|-}EYzHj7ew zvt;+2EpojpcQW^XIWK>8%Clb&Qux{`-lm?heZu&iKj)8qpKv(egUyF##{UU^zzhH$ Cgi??I diff --git a/fixture/15/3/9 b/fixture/15/3/9 deleted file mode 100644 index e7fdf454ded6419c0cbc15cbc03c62bd8622acf5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208 zcmZQ#oFL%9#=ro=7Z?~AM1Xh)5NmAJ|C`{zTFS~Wg((ThPXyvBhBP3VUkD^hD~gJW zs(=uLfJ7CT9k8OnI+Ni86Gz862B8Fjgo1@lvS&7^x4L=6r3r~kD?9gfikWyByeOadpOAyO`emO@&D!`s{yi2uRG=)|;$0Wp;DfpNxjTc6!sy?Eq@8PCNhr diff --git a/fixture/15/4/.zarray b/fixture/15/4/.zarray deleted file mode 100644 index f0fbb7bdc4..0000000000 --- a/fixture/15/4/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 1 - }, - "dtype": "vDhNEEjW$~G9nfnVYKeVj~&BlB=q6Yg?{rwXGysl`6}Y z+S-N1#%QKSF> diff --git a/fixture/15/4/1 b/fixture/15/4/1 deleted file mode 100644 index f30413ed31bc1eb74ce0fa1e8e3f8bc833efa2ed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 230 zcmVQ(8rR<(5deMzsX zul^O-7~CSpkqe#qd7jQQGV@aDPyY%4K)Oq<*6Y<Qdwps82}?W1vmkKfe`Q+wL{E|l^P?|0I&}fgZp$A5xWAL0@MM^ gz*;BwWhRhI8A0JM<_HvO&?q<+2Zg_wBTx)V4@4eV!Tq>)IY?DnS0(09recER_-yzE(r}L5n0M?Q!wf~Y{S`8qC5JCt5fO%%h`Qfnr z=kUo{^0IAbc;{SBF9R6>CO`p*>p}^DAShugGbkBkX2>~Ug$~?k*{!HO9Z d$fY(;tb^4kRtA7TqTp1-iC?f9+RA_c$N{qMTu=Z2 diff --git a/fixture/15/4/11 b/fixture/15/4/11 deleted file mode 100644 index 59299b5f9185368084aa602ef6660907983b5646..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 217 zcmV;~04Dze0g(_u1^@s+1^@ur0000K0002N0000ewJ-f(Kn1-803wbV3h+5XGr8@b z&shdDFaQGp12Y2ve9haW|DJvO*FW99Ecx1}?Yb}T)<1vytl#$h{o8y?zdYO8tPKE^ zOxctrine9R7G+7YL`xJU*^~{jNlYXrTP#FmvWYAv+a_Dc7K?2nF_9nuCqMy++d__9 z2A~G;9Kcpm$HYW4AIR+HmfQflK~EDbLeniAZe5M1GcOn{=0p&hnS({asix@y T4&$zd)|m~A8*?}a*bFlaXc1Tn diff --git a/fixture/15/4/12 b/fixture/15/4/12 deleted file mode 100644 index c6e513adf562d48c1e94810a3158098988abd399..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 217 zcmV;~04Dze0g(_u1^@s+1^@ur0000K0002N0000ewJ-f(Kn1-80CEo;3h+5XGr8@b z&shco12Y2y00RJUzb(tNKTp=L|KImXpM86?W^3}U>%O&Fo@ZaSuSv6Re*lmrk+Niw z7A4cPBvYnj(xxQJrb$UAO<9s)vMpk=$s%O2Nl3^xiAih{A=@Sr+hQUi5!)m}VzO-k z8vq`lf)ozON$~jq7Tk1TQ$th*K+u>YzyLXi(Gl7yK)r%^CwEvTkgN0%qDCoqus~55 Tn;QlSuHX>jLn)Vm*fzK^Hh5F6 diff --git a/fixture/15/4/13 b/fixture/15/4/13 deleted file mode 100644 index a5d2995158d721c56b12fe2df428506aaa369022..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 222 zcmV<403rVZ0g(_u1^@s+1^@uw0000K0002S0000ewJ-f(Kn3Lm00vMV3(!HyaeV7E zFwFT68UPmn7yu+klEEz&TX5VqMvSrG$T7%qv28&{j2mNP>ROU+b=CE{uT@Fvw_3W= zfWa+-jBT+$OGiff%$;v%kvlU0;4ZaaN!73aE>%?_1Q0*~0E7?#05XfrBQw+aXQtDU zE{}}d%AGzxHyHpWK!HsRxdK4Jm?UZzfO{Y_qVj`bTL`)kmKyTPs=v2f)pXiV2~0F1}P*NAt53RGDLzA3`#@@g%lA=2qhc< z7C;3+f#n@jIItQZM*#@elLwuH;o_i`hif=1lS?H`VGZqWa$RBqxm0V)Wu~0o=>rxx V9R^WwYBc2tQ%>*n0Sla%g94TMQuqJ> diff --git a/fixture/15/4/15 b/fixture/15/4/15 deleted file mode 100644 index 41bac71a4592f16e5c4e0d096125d3287719c67e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 217 zcmV;~04Dze0g(_u1^@s+1^@ur0000K0002N0000ewJ-f(Kn1-80Fn(E3h+5XGr8@b z&shco128i)GcW@KH{1R-+qQ4%_PyP^?*ISw%l3Bvvv>cOt?mE+0f3Y#O0p$Nk}Of8 zP1zz%l9EKpwrJY4AYu}WZ6aH2+hmI^L}DU~kcfzFi%mop5+WfBA^;^h1yH>J9F#!7 zWdMYmNel!56cGrp%NLkIObli#Ks_J{gamS(+$@_wE>&nSDLBK;xgZE|gPCMOAW?8? T(_m6?hMRLi5a0$g$p%0UQQT2; diff --git a/fixture/15/4/16 b/fixture/15/4/16 deleted file mode 100644 index ebe8bb5c8b945f57db2400f3cc94f610c278c7f0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 235 zcmVq833}FOco27OhiILWU-LgCW}cdwuq2z3klg4n{2Y8))Yl@PHN31QHyKo zrzFiOX^9dA7O@soNe#0!vO&3MJ)yZ1Q0?10fYcT2mk~CK)>w0 z<$Py5J5!d^>HIS%9RL|X0eArLfDKR%D#9?+fMrsZy)z>q42H?f!OB-=2ji2-rOGJ9 lwh82_y#kJ$<9ap7&5r>;SZ)9S diff --git a/fixture/15/4/17 b/fixture/15/4/17 deleted file mode 100644 index 7778055c1b08fa8ed07847d10bb6e214a7cef58e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 229 zcmVM zGXSMN5fLFoh!7!!hzJpS*`L$SXYWio+qRred1pAcoS68EC~-Ia9zVRf$}Ip9UZt$n2BY%)xe9fWd%>(*#sAfm~%d0e_(K f$jq0kp{Y3z2owcZmlN;rqe diff --git a/fixture/15/4/18 b/fixture/15/4/18 deleted file mode 100644 index d22639f5123a0e3aeae8261615ffd022d8ccc43e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 221 zcmV<303!ba0g(_u1^@s+1^@uv0000K0002R0000ewJ-f(Kn2|e0Addu3h+5XGik6t zpR)`wFaR(!05bzK0P?%7R4%Pp{QnjABCh-Yz0_j&l}j(Z_^oZ}YxnlwUgUD$1^`KJ ztG2T2N^)IUc2!B0Dp{>T8wh81?a;Anj2jptx X#>@v=je=8XrY<-~;cAe^%+~@fM8a3f diff --git a/fixture/15/4/19 b/fixture/15/4/19 deleted file mode 100644 index 66d900bd615d5152aef92396426028c8c1963497..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 212 zcmV;_04x6j0g(_u1^@s+1^@um0000K0002I0000ewJ-f(Kn0Zr0Ky9v3(!HyaXw;b zV3_kCAcOz{2%+R{%ihW7!?~R74Cm?3u*13K%)Ok$=?^(GA95xDK;O3NFX^jwCEZH8 zSA8u>3oJrJWD*hEB4n|UZ6UHPBt#+>i*1VqBmf#f0jvX9xG5(q7j|-hDgzh*_N9za zAppXF;lPWN$W@>V>|=ydQf1i$a;b)tn>(NT3TK!D6O}zk0vb`l3<^#Sq@3UR##flY O9GIx=Rq~KIBLGNoUQB0Orvl3(!HyBP7;9 zz%b`O9sn8u8~|-2wr&xPt`S|gZfkUnwcTQE#M;HWHoAx{x-Mc173*ZSu6lrgC z2DDBAL_kCUKmb5MKtKS~=jqJVpMLe4=j&E`Rn^=+t?5jwH8WTH^;G}qJk#s;spq*0 z0I%=ewQt*QU)ODW?f<&BzT3C=UO^Z_p#&p@BqHt&gfJz1}1L*^o%H&eTa%`JGF6B=-yFVNWZl}V4ZYWxY Wfr3-{lneXAk>GYJ4CscULm2~)Dq3*> diff --git a/fixture/15/4/21 b/fixture/15/4/21 deleted file mode 100644 index c7eb01d1c5ee0d693c31ff4b29c080ae0c627ddf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 226 zcmV<803H7V0g(_u1^@s+1^@u!0000K0002W0000ewJ-f(Km`Q`0G1FS3g9uP^n^D` z(%}yp02%-t0HimMBR87kIGdZdk=r)TGv|3TIgX}nn)U*(s_L(FNv$NQx?WwE-m2>R zzwa*??a|vj2l1F`V`3&65z+X(@p;7C=?nmfV cHL?lhQiG>EGs?Z7U=IbS2v2urlzRbIo=_NF?*IS* diff --git a/fixture/15/4/22 b/fixture/15/4/22 deleted file mode 100644 index 2ceb30769c3f96a70fa8c8ab8982a37a555df4a6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 219 zcmV<103`nc0g(_u1^@s+1^@ut0000K0002P0000ewJ-f(Kn2YO08&pI3eaKcoKrsh z|7!jrfDk|kA%xQ}d$K)UpYP4S?%VROPq#PklIBm|cF(?kX_MsrlWlGG{Q&@>6Vh>* zI0>D^j6<3U8QTeyIH66FaY81s8K8xxDIEYN zK*3E6xdAJALBkdpISeWTAl%IyeiH0b2rdJ;fv}uJE;SOTSTuoLRixnR2j2q^EkO~0 VfT7@OOTm#<_+XA$YTOHif&#wtScL!p diff --git a/fixture/15/4/23 b/fixture/15/4/23 deleted file mode 100644 index 2d272059ee46ce28dcb88bc86b2d1f9b47c5804b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 215 zcmV;|04VH`Yx{RYjv@Ro1aEk4) RnQG+A00`G*92KI?91+Q`PYM74 diff --git a/fixture/15/4/24 b/fixture/15/4/24 deleted file mode 100644 index 2ef403907e702de7bb89384350c739e6f966f3e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 231 zcmV zGXSMO9sn2s82}LrnTTv6BHLmji!HW^EEbax*5MN#u5 ziqc$DFG=eqFbN3>?02Ta4mbvyO*^VHQo^K)Ha)9O@RGt<`mQ=OW6=FZdr zu&#US?)tj@*7x3P-+u3PYqx*vw!PhL1A{>!gAxp3kRoIVg+gJ3kRlWcgNz_Wgbb1t zQi7xe8~`La1>lYgIB8Y|3EZw`?Sa||6*T6F*b diff --git a/fixture/15/4/26 b/fixture/15/4/26 deleted file mode 100644 index 2ceeeac8612f3c3d92f705ae1a8738643ad4b03a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 232 zcmVTp^sZ+N}Q>AW|I;mA;@-sZ2 z2WYA$byL?$0pZ)>GaU3C5og4206>_B$;q65l7#=0^Cy!eA%qY@004vl05JWaebNpG zeg1ZY`QNr34l?4W9UTBExdI?S00Nd80P=w4mtwgLU2+4eU{*8~7bHYHNB zMTwSWQxqkO6h)b&Wx_U**djKO#TGJ|NGukcEMl>kM1)Kt9{?pe1vB3nH&=IGh86S=20oJc(S&1(s_}AeVwh-4=oyEN-Y^ozxgM*8{`AIw&}0j5;F(H(1P~^;ci1S9Ksca=}<^93vJ%GRDQmZ5%g7j2yQZBgWvy7#9H` z03JXEtb4%$b$}!g0~i2HS&BPcF7iAeRzQ;VUY`RRciq bZH^Ula1byQoWh{O3O5)8-{x2`2M+)fGRjxz diff --git a/fixture/15/4/3 b/fixture/15/4/3 deleted file mode 100644 index 6648606f083d6dabb629580657cbb16283e5d3bf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 220 zcmV<203-hb0g(_u1^@s+1^@uu0000K0002Q0000ewJ-f(Kn2wW0D?~(3(!HyBP7;9 zz%b`OA%Flv00DpyLI5EQIm--RMmV!jhRYP0GN%Y1;mAXhGB1ye$Z+y`WG00(&z%2x z002d5Nqo(zIVCQVlwOOIYe|!$w5I;`AQrK1i*}918jUTwh={FS7met;tr3f@YpiY8 z1{(k>I|Z-+5R)1(1OiO$n`%T+1Y8z?g_#+^WUNC&N@bEN7nr)H3FK05lnXjBW~wBk W1_>?)1*g;~2Xtc0R7pk+@&ZmW#ZS)w diff --git a/fixture/15/4/30 b/fixture/15/4/30 deleted file mode 100644 index 42995fe886bfebf4323e6c7c27d0b8adfc113ce1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g(_u1^@s+1^@uq0000K0002M0000ewJ-f(Kn1l008-Bz3(!HyBRs@F zz%b`OA%p-z2mt^PLKv3p{6Ev_kbm#<&z=1~otbiYheP&xznu2j&f&CkX4+wAvMm7s zqDbj~>erfj%_WLlKcy*3nxZr*iD0paO+-XuA~Ko8L?$6yEJ7wCu}LhpNh~&r*cJgC z04uoyLU0IPtmp%31kM4u0XzqIAp&lJabPnj>@)%Om_V+gOK`%9Q%*exi+I5gS`P(R SzXZpwIOWuHu!tA@E${*WG*pNH diff --git a/fixture/15/4/31 b/fixture/15/4/31 deleted file mode 100644 index 1cd84e1544a8d800740bfdb4c800f89559499ebe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 230 zcmV3*h03&0`+Z||JlB;&)>e?w*BwY^~n|hB#D+pQI=_00RsatGcW@) zFaQGs006D;{&ahm_f7Y{Te~+)-uLep03|sE01qr?r geW!qen%wtIAeUZXg^}qta48B-HArDFGV{&=9mX?OzW@LL diff --git a/fixture/15/4/32 b/fixture/15/4/32 deleted file mode 100644 index 5c4ea7b35921a94358c50a8af1aee1df2f0e455d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 218 zcmV<0044td0g(_u1^@s+1^@us0000K0002O0000ewJ-f(Kn2AG0FqA}3(x_{BY4xW zGXSMOA%p+|2q1(I00?0CoZjj5nRYtoa-Qi-pF7X~ck;4BvYchx8TOgWK5Xy&KfD|O z02iO4mZYhFO)g3##VLwfTGXPnw74iBF^Q1LV%tI%5wQ@7$tEI`kjZ4R#bhzrVv9*k z78?KyCRhRrS7iVjwlV;401RGg7#ss@HiOZS@yeA+QlkkZReTeNm}NjUG*it0!h&F< U;MCp3AZ8g*4b4rwn-TP zCOH+{}by6)e$ByX0t>5{)$yXV>Sx83(=%d!9fhK^~PjN>>? zNXDi^CT8q}u^lE11Ez&yvDhR{Qz#S)X`3Pz+N78wq$wgaX_^8e03~ji|LLDB{rY8Zx4nP5zw4HD>z@C8x9tHy5=Dx( zD2bF!Q=%nOv}KaCEYdb5Te3t+mId2l5et#Uwup&@#1^teEJP*|5i(hXh{Ptc$rjsW zvTX|-04O^JSbzi!S_E XIlPs55HJ*6<&l70Qw8lD-i8teY`a-J diff --git a/fixture/15/4/36 b/fixture/15/4/36 deleted file mode 100644 index fd30b36b8b49764cd6de906412f9e9ef3cec5700..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 214 zcmV;{04e_h0g(_u1^@s+1^@uo0000K0002K0000ewJ-f(Kn0}*01}QF3h+5XGr8@b z&shdAGXOI)GXpRKt@-n|U-SL__x;bmY~Q}6%ev)#^Jm?kEPIwOTetQBK(=Jkk|oI| zX^N60OCoJblqHc8B}=woBAZ0WCfgNbm`PD={zM- zJbCFPG-6$JU2JQ<=k#y;JniA|t44Gf~1HrGt=kkbf)TCf9Kn}zEiE%)2dF@3;=Z7zq=bm1VBUpL;yfQ1VAWhn5)f|Yh@l1uH6S-Y=D1p>f-A5AR+(H1 jGKD=8$fZhA*j@$*mmMkNAh;+vWs1V~GC;WOlA$~Sw7FXs diff --git a/fixture/15/4/39 b/fixture/15/4/39 deleted file mode 100644 index a07e30376ed012b0fbc49350efa31315b134b516..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 230 zcmVa%0|5X40tg|100IaB z0MOy(&qKDI&$pAeoo8CkcajGf04F&GGTp^=anpgCj)IB+-~t&~3aE_BjnFd4ZlKcy gRAU0UO4tQFDmB2&3GJ4FCWD diff --git a/fixture/15/4/4 b/fixture/15/4/4 deleted file mode 100644 index 2e87924d9be3054525a54ee89c7328b2bb93fd73..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 214 zcmZQ#oG9SH#=ro=*BBTWM1Xi75NmAJ|C`{zx|Ee6s8@pPgQb)4&bRZ=N3t;+8ygrK zGk@M~8o%rP{rx48|MKhS)t=py_jmH!pOe3z{$zUpKRbh$n(nP-xt-Hgf~TZTnxrvt zlS|+hAIBUaMedtwN{ULGZm4aX;iHrkog+9|SB!h+5ivyubxT$qsVS4PTN<;99t1Na zFe`1a5D1ReW7cx8j9@)rt8nWH-$|AtO(xN-1ontKlRgG2cg|#tXqvY0!4+mvHiP0k L(?bxUZ46=nWkOEj diff --git a/fixture/15/4/40 b/fixture/15/4/40 deleted file mode 100644 index d3810990809331db37abc9514d14520f86292043..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 232 zcmVz61>lyqIv zCH_1mL1WRyA{L(CbDQ^Y#vTtnoSFMGzCAVooZ>~1()FLbt|9^;A|fIpAR+<)A|L<& z=1t!=J+|-p`1cI?;cyrn03SI8HXi^7umI@$)o2h1E+dLWFa#L5Iqq)oR)8w7NPq)j hkjWjO3FOiZ7QRmWe&!(<1`19iSnWD7`I(&qzyn2-SVRB- diff --git a/fixture/15/4/42 b/fixture/15/4/42 deleted file mode 100644 index ae8fdec75f09540c4fd0fac650c386f895825e82..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 217 zcmV;~04Dze0g(_u1^@s+1^@ur0000K0002N0000ewJ-f(Kn1-80OF4v3h+5XGik6t zpR)`wGXpaKGXn!NaPj}Xbww)ody%)kZLxn>`r3WPm)_F9zq}%^>x=xi|F`x5plaJz zmMpcdRLQDjTejq?s$Er9RY`R%TT*KaIFck6TWn--;}+u<5sTP@;~2qlF^=09j74lC zBNiI~BtQXhhg0(k8HfP)Hf6Zy2$X%f4ZtEQ+^B%MlM7Q-bTCaIm#SCr*VP1YA<9T_ TO(-}8uHdh$3E)DMkr$W)(XCmk diff --git a/fixture/15/4/43 b/fixture/15/4/43 deleted file mode 100644 index 544d899b4b61b34d30b26586d1c5e3208c258f7b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 217 zcmZQ#oG9SH#=ro=HyIchM1c4p5NmAJ|C`{zx|fxqh+l^5LgLFg!FGTCXZ>Sf5pd!F z5ey7pY<_R5JAJ>Va`L+Wx8C^w%Fo?h|8mm${A+)6x0l4rGfXh(5uT+Ysg`zVhKaM4 zpO96`qZx%lw>jq?VVtCPbeZOoquYEZCtVVW_KDU|$z15mbyQ@Ml2eoi7poM5s>4(< zzJ`V%76$gYrzRFN8}OVr(*604#j-Po>wQA~q-Sia8D}}@wU$5PxbZMVOxq>>`G*2S^!A1?UAk g!14eI*!0OA3Y$PK-C#LD3$(3Ka0v!$mx^;WoLr@|}GCxJT(sh-R-t@+umzL07OJYKtM!91OPfN)lgpi2an5tR^FgC=P9Eh>|% d%CCT5YldP1Ny`1gFj>umpx`S13h-MU^9e$5TIv7* diff --git a/fixture/15/4/47 b/fixture/15/4/47 deleted file mode 100644 index 56eb583cb51d28b1a373578f65ce861de02ce8ed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 232 zcmV3~sB+z(6)s_+Cq~gg%lB?kfbR>T7)!d+P08F3vCer9snhv zUUke2g&$@w8`)pro$0&pAR!=+m7IqKYX5t zWd4u|nVboe|2&!hO!Ayh7$ynNCpmerO08B^)i$--q)qCiNp06nZ6~SfDovB5b(Peu z0U!VrIR(HS5DXBJbf6VVqzuqG04~%dkQ?CC0Js;7ks&vSHiVT)svfB@gS>$)urs+C gFo9fkpnx%M;Go7}sRoFGD+C3MaRUc62FpP~0RyjDApigX diff --git a/fixture/15/4/50 b/fixture/15/4/50 deleted file mode 100644 index c32b1b9c25fee51957e60dc7e1e11d0fee6c5051..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 220 zcmZQ#oG9SH#=ro=cNiELM1c4R5NmAJ|C`{zdX<&oQNIlL1;>-hD%Tti7(BOEU=U#7 zP-p-uQ4DD7K5>HMXOE7(lJR_b#h=Ehr)-QZ+fNs?6-%BvZ9K(t{!b=`2%()Snd_!| z>|3^Mi;3mYrJ9B7RI|>R#xJ<7v@18eZ~{-k;Vr3hJC2?b U2q>89dVtZ~C;C}qbdE+M04w%TJ^%m! diff --git a/fixture/15/4/51 b/fixture/15/4/51 deleted file mode 100644 index 57dc06369546ce6f8a877d16d1112d3f5fa1138f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g(_u1^@s+1^@uq0000K0002M0000ewJ-f(Kn1l00Md>c3eaKcoKrsh z|7!jq01!e5A%xTQ`PU_D_V4@CJ^lCZX}Wapvh8`dx5?fl&)PoU{`^_eegOcX9Wo}J zm=2vdOu{&cO(rH8#&#Gc3~47oi_oUnLW+eJQfQKYRk|NN!)^WVOoo4@(bdgJ;3`^jDF>nD}OZr*=;()zp14VvCcnw^_e zFD-G2n&f&)B{OoFino$#G*f3suHi&oC#7v29Vg9hDk-HHb>?o;O%lGzB*&m^$(r{n z^5lXmOdA?P*$mcc3F%2T^z1%!z=k6!%~&g3E!f$&oVTW@*4%PxnCnri1v_e%? diff --git a/fixture/15/4/54 b/fixture/15/4/54 deleted file mode 100644 index e4f90f396f66c5aa2a9b415ef26d010ef1e7ae87..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 209 zcmV;?051Om0g(_u1^@s+1^@uj0000K0002F0000ewJ-f(Km~mT0D_Ge3*f;=X*^Vf z2QcObA%Flt2qDbQ?d&tt;dA-?%#c6q(`P3S9RLaspToQ40NO>tsSEIcR)}(Q(*dCH L@HxCo4lP&$yf?%g{(2>k}w;1EO6qS@* zI0Thi1UfpZqU%??lz#ONalN`N>iX57+SvcSdC~db%kO^s6??n%{np#>|C%3|@Zof0=VSp>3df6~wvBW=8UR?MqyTpBV023!HH JjK)F?asahdIbQ$( diff --git a/fixture/15/4/6 b/fixture/15/4/6 deleted file mode 100644 index eaa294cf0a1291ed9ac790dc68e4165d04f0bec5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 218 zcmV<0044td0g(_u1^@s+1^@us0000K0002O0000ewJ-f(Kn2AG0OC#?3(x_HBly#> zGXSMOAcOz{2mpW(LI533Ie+Ha&Slw|cZPkQpP9B#C-0rI&rENpUru&dwod>6mnd<* zG^P2JG__uuTuY)Py*MdRl%61Di!CHXHi=C_B(_ORCbCI{Ot!_gi7X^yi^V1q0UQ7$ zK#9r$7Y>-<0Em;S0$iYG4`_fVE|4#VFc~V`l}W08fZ53ma1%&UW57;^azG|FHZzKn UF;H;jLvS~g3o@~>nNehg3I0b<-2eap diff --git a/fixture/15/4/7 b/fixture/15/4/7 deleted file mode 100644 index 1148dbb0c15336cfd77152ae146b4bcd6c3a7df6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 220 zcmZQ#oG9SH#=ro=cNiELM1c4R5NmAJ|C`{zdX<&olb3nj#pmp$)MB+N@jgeBzh)FD<3md+R-c6UAVD#qr!Ft6FRU29|sF(|L5R@@=1Hl1u61mg^Rx#5Aaw*`H2CK%ZRenJm U0DDky$~vXNs{*k*aDPg z1QE5rO5Ulw-?Qib%!i+S+Rx`c{~*6@&bG;ODlLo8z4fs+=0Eu|hnqoRtI5ghkD@M# z)CVq7@eL{tEY8xMbZH8UlG<#SmJ=z8lXHc*1tW7d<_ZZ;o^Vt}MQyU{5w(rFVhoy= ztbPxkJ3IuYHFmtU8$5`nutVhDB)Kq1IK2 LDXr64elP(5-<(q( diff --git a/fixture/15/5/.zarray b/fixture/15/5/.zarray deleted file mode 100644 index 815d2839ae..0000000000 --- a/fixture/15/5/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 2 - }, - "dtype": "NPK zKnP4hh$5gs!ioZGPX%U;R}#~LSD(|4b9rt;mE47N9Y^xY%n*unO%4w7#> qtjl*?-2R{ET!8WY7oB!}ZPGpN9D)75vL0*9o1a~go0Hi6j~@X3giqZ7 diff --git a/fixture/15/5/1 b/fixture/15/5/1 deleted file mode 100644 index 2c34178514cce58a761b93dec52ab3ab0e2827d2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 201 zcmZQ#oFd@B#=ro=Cm9$RM1XiB5NmAJ|C`{zYRk$ng((ThPXyv3hC(1&S^*@|fH=RZ zsHh4EK|~dhDyj-y(V*|iAi&JgafU%CK_DSvW|M48B5zlFn#N5=7MBa0a-Ei2F1pA) z5HPM+5!PA$^?+sH#!UO=Ngu>l_)RHq)$ diff --git a/fixture/15/5/10 b/fixture/15/5/10 deleted file mode 100644 index 08bcb9f5586bb58e321fd3993cf7b37f25d3a286..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmZQ#oFd@B#=ro=ml+rsM1XiV5NmAJ|C`{z+RMuDgeeWk&rbppi9lS$PzWSTD}Y2= zRaH?D5W`3itEwt$MT2!Ng8(x}#t)$e0SAM|Cf6KCK`r%ZQEpEeo6nf9$lNsN)@J6N zjy=AQIvROrtpB{Cnbo?9Iem|(tx-m>t%&i&EjKTolUXVK-@j;ur_sT$4*dTL&T|P* uu&_PBTRqdMt!-^#o8+nR^&h96vpFeyh<@|HtAO9lnbczk~#i6?9BNGOqZ=V)XupzHyWpV&y@R|{^?GF zg+%$HlV8tT^EFG%7Rq^=^M&1FHlV`0j(p7%eOT(R?}NWlNw>FYMX`~UerKLDmE BR(${f diff --git a/fixture/15/5/12 b/fixture/15/5/12 deleted file mode 100644 index 922c5f876f133fa075b68ed1c238762f1e08f310..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 212 zcmZQ#oFd@B#=ro=R~Q%=M1Xh?5NmAJ|C`{zI+c~-3sVx1p9sWN3`K=NsLX9tTP!NFmaqZ!yuF(kPtAlsm+0zw>rqi#P}tja@y{j3e%*F z)&)y`$xyT9d){*BNY0-X6T77zoLtuX=4PXz>A8yw1ydgWF|{x`=HQdjE&D3dMrld@ wHh-hL<+Hfh&R&s_tW8j3Kbu}?7O8OI#mAcmjeT-!<{e7#Js&Ry}7-&7&~bH&USPZwI1&lfew-tTDA zwDa+<Nfqf901{nQe*%C diff --git a/fixture/15/5/14 b/fixture/15/5/14 deleted file mode 100644 index 323a52a61c89b6e295b946b89485ca7ae791458e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 196 zcmZQ#oFd@B#=ro=M;I6wM1Xh=5NmAJ|C`{zD$2^x!c++4msS9YBp^-%;v$B^G$55< zRaI0~R8$3|fMijX?}`TNm&^>zoE_&FgbD;2S+ch@DZ2zzB%JZGEk10t;&E@mwtf?L zopkG$Pj{|vOrIXV?3VotsfTM6mRxwjb1dFmuwD97$zqEivELabuI*I5G}G5YdT)mz mzd=Xly*bBZi%N>`&CF;KxUrH??CygzhJQa~r~F|5$qoQo%}FEx diff --git a/fixture/15/5/15 b/fixture/15/5/15 deleted file mode 100644 index ef2115d2264cc2f3f7aaedb959a5ed53452bdddd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 206 zcmZQ#oFd@B#=ro==NK3mM1Xi35NmAJ|C`{zn##&>geeKgPXyvBhC(1&S^*@|fH=RX zs0sqBs(?%|yDDTwfu$z{12e~pQw%~00to>dS-f`~k`m$X+UX+qxj58KaR7rqZ;>+s^mTRC&MTphQvddGmc29{oF-&twY#WDibW diff --git a/fixture/15/5/16 b/fixture/15/5/16 deleted file mode 100644 index 1cd0d7067a08a5d404168b17e35ab63b4b4cdfe5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 207 zcmZQ#oFd@B#=ro==NT9nM1XiZ5NmAJ|C`{zn#;;?g((ThPXyv3hC(1&S^*@g(tt!h zLsbzJ0;wtxE~<)MQLu9XqXIL>3LBvSgM%!bjc&SPkv6lQGZrtO^>J=nSs43~bDImR z4x5@Ex0!d+La^GGL*w#U^*55T6CN9^P3gT>yf#qSRcGSn+#AKq72Tc1iN~Se_KNogR0u!)IUCr0$=F$NqK8O{fL{EXYh5 diff --git a/fixture/15/5/17 b/fixture/15/5/17 deleted file mode 100644 index 0fab74147eb48a1246466a5caa13c9132181bba1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208 zcmZQ#oFd@B#=ro=7Z?~AM1Xh)5NmAJ|C`{zTFT0>g((ThPXyvBhBP3VUsP2Hq)IEQ zia@XkLIRmUP!zeMz&evbfSIG?41-XDK!QPKQ=7vR(Qi8{jEc81E;y*()u>$bD6&w7 z-TYRuzsupu@U{F)jNLAWNzT>Ku-06DY=yambjj6VuGAd9bLWrV)aR3{_@v=$%E$iF sMXn&ORO7s#-Tc=ltCbJLEayqEm>?TDMSZbt-rSAyY22lsp6})d09HImQ2+n{ diff --git a/fixture/15/5/18 b/fixture/15/5/18 deleted file mode 100644 index 06511f9e48d44e1d66d6939b535c534750ed82d4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 214 zcmZQ#oFd@B#=ro=*BBTWM1Xi75NmAJ|C`{zx|Ef{gt-vNFRcI)g&-mch!cUhilHbC zNaa@*L19%@QBf6;08#NP8m!+kD=;^%G9TvrE1J!Nn3TCH)ytLp2Nx64D%nFJv=9~N6q>3fpaa1%_kFc yopyvhK9P3v)s8Ju)|t|NEmLRB(0%-0U~!C#u)on3)0to2S-zVv_e%1=FTVg^K2=iy diff --git a/fixture/15/5/2 b/fixture/15/5/2 deleted file mode 100644 index b0b56d0ba692545474f155d4a5dc51546b251f23..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmZQ#oFd@B#=ro=ml+rsM1XiV5NmAJ|C`{z+RMstg((ThPXyv3hBP3VUkD^hE2^rB zia-cRgPB!8Q4ok(F+o3*;QDawU5#k|2XJ# wZeg0+?-z59EZgikImPfMo8+652h;8vx3qsgc=4_9#=rhcr2qO?vvAk}04qsR@c;k- diff --git a/fixture/15/5/20 b/fixture/15/5/20 deleted file mode 100644 index 31aa7b5492ca516ea598635be520fbfa25b4c18e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 196 zcmZQ#oFd@B#=ro=M;I6wM1Xh=5NmAJ|C`{zD$2?*g((fl&rbppi9lS%PzWSTD~hVp z@~euffS?FO0&!81&x!);Oa=jFjullx2?7lUiA`+@G0HJs4#vX(X4BJ kYF1W5k!;H0N_($_oT*9k52l>{v}Li>{(owGv;Wis0ARL6`v3p{ diff --git a/fixture/15/5/21 b/fixture/15/5/21 deleted file mode 100644 index 6f14703c4ee13ffdc7701347e7c3c9e348040c8c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 213 zcmZQ#oFd@B#=ro=R~Z->M1XiN5NmAJ|C`{zI+v9}g*gexPXyvBhC(1&T9F1M^BIb& zfUu|vM1lwi0?L3u)QSS@Oa=jFj%9faLID#FvS{ydQi|}i(GBioH#m1?V_AVx$lTex zFQuI>n{nSyZJEa`+qP#Om$&)vU+MEHAbfIcdPZ`+Ub=~oUBv;**vs#FQgS~?E|F#q sckm59)2YP1HN7RJ*k^uw#-cRaRgHYvrk{LWl6p?;uS@tYDu1CC0Ho4Nr2qf` diff --git a/fixture/15/5/22 b/fixture/15/5/22 deleted file mode 100644 index 21be67934986d2e8058c703d9093a3c33e131848..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmZQ#oFd@B#=ro=ml+rsM1XiV5NmAJ|C`{z+RMto!kh%;CjxO5Ls3;x8jz7+2qa1? zimIxLfFcM8;T6TNNU+Xi2w*;RhFQo#z~R8brqCN6NkMBO+DIun?Y{+lW@$6J r-M@-m)ANbW1wrr4YBvlz8fHl--@TA7ZO`-NUFUCm!+vMcWrx@S^x{v$ diff --git a/fixture/15/5/23 b/fixture/15/5/23 deleted file mode 100644 index 859cb448e7a2b392acec158cf62e652f65d769a7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 223 zcmZQ#oFd@B#=ro=_Zb)%M1c4>5NmAJ|C`{z`j(YJg}D&OFRcI)NkE(k#8nJwKr+9m zsH(823J8jTED!}DimHm@R}@&kWM*LI>^Q?9gM=!!A&v@8tD7_*{{r8P06(z2-rYhP#>iF*b$p7qtv{J8pn`Fm;^UM6_ I#ZRmU0QKBhQvd(} diff --git a/fixture/15/5/24 b/fixture/15/5/24 deleted file mode 100644 index f021c72e9e3577828dfb394acfd625e67b85846c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 199 zcmZQ#oFd@B#=ro=#~BzHM1Xib5NmAJ|C`{zs>{ld!juH$CjxO5Lm`kXttd(ZQu#$y zMODZsctwMCCc_6NjuU4XgbEBcvSjaZN@fXu6VT(jo3Y2+RoYs_-&pg&_ul!1&pe-{ zJ!(8AHv7|-l?-ea0c;1oX4lBfnyoWM&d}6PA@A3QLoG+2ud}$=sJvw3XN6^Eb$ssb kx9;%zFFxp(vCa2xxyIyYtA4(5&c9S`^!ERU>sNjN0G%&QUH||9 diff --git a/fixture/15/5/25 b/fixture/15/5/25 deleted file mode 100644 index cda7cb89f64d03ee61aad7118cc5e9e87a1f44ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 213 zcmZQ#oFd@B#=ro=R~Z->M1XiN5NmAJ|C`{zI+v9pgt-vNFRcI)NkE(k#8nJMX+SE! zu&AmEh=2qL0GS{TPyz%(R}@&kWHw;tY^xGV5J)iSY%+~X6z`Iqo_LGRSgB;uBT40- z1%Eea7oYCQIH7sD&U`oHKI@0d^LXz`9&bwi#mqaet$1PIO5VBct&EKZKU+(DB&|Fr tpX)om^IH?MoNDHYosaHhG8?P&d%j#2y6p7x;_1)j{c=U_{p(=z2_&geRPuwB>v z7~F1may wH^KVmO3z6qI|?VAtl7)x7JqZ1@6}Vq?DLYu@+9_Mjs6hq_-snek7e(e0m1lCL;wH) diff --git a/fixture/15/5/27 b/fixture/15/5/27 deleted file mode 100644 index e6604a62d21d406aa6cacede456a6f86a423e0f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 227 zcmZQ#oFd@B#=ro=j~N&kM1c4-5NmAJ|C`{z%FD)J!kh-==NAHr(h4Av1jLCzTvb$* zmS0p=R0Tvp0!S4>*+726iUjMIj10^i9Y2H|1R6F}wmj&W!QmdZCS_S^^N}ZqZzlGh zacb}6&s$}&rjxJNdh)@43C;JHeDRt0B*83ogC+hyPOPWL5qT|YDY3hq45#`I?9!UW^|=Ee2TjtGVR KKg00-`F{W*v08!v diff --git a/fixture/15/5/28 b/fixture/15/5/28 deleted file mode 100644 index cf6b3aeba8d83779a5260d910dba6505ec5084b7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 210 zcmZQ#oFd@B#=ro=mlzlrM1Xh~5NmAJ|C`{z+RDoCg((fl&o2ZLr4>LT35XMcxF{{Z zs;H`{sHh4EfFzg!qM}w5SifX6VCGo1i&4lyz~REdCQ&azzbg@^crF)2y|`#}#d1!n z+ODts>H9=Y8|2S$crO0DV#aUdWAjUD_GtcSyt+TFiuK3C-H)x@tiL{P|92wR>t97L wqtl$sJA39WPZZWXbJK0Iy4vRjcMEQqeq;P9(s$x~`jMrscjEsYI`&5%07Vp5aR2}S diff --git a/fixture/15/5/29 b/fixture/15/5/29 deleted file mode 100644 index fcfad9ccfd1aad0513f52d115c1dccab3563459b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 219 zcmZQ#oFd@B#=ro=w;321M1c4(5NmAJ|C`{zdX|+zgt-vNFRcI)NkE(k#6=8gKr+9o zu&Ao2sHzG`f(S4TSEC-Hb$` zO=(%7JZHKVE|!j-&78J@eO-kHuhiAg=_=pYR?UcH`y{M3Z!e>J;gkDj5b#R$; zWs@z>wv*2O2hZQU=x%zN&CT)d?<}Kb?ek1#BpcUQ`ng0JUKjYkQvQ|f5B<7-e-`~` F1^`|8Q!D@g diff --git a/fixture/15/5/3 b/fixture/15/5/3 deleted file mode 100644 index 1d1b5b0c8267b0cde77cf2c13373ff817a07aac1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 221 zcmZQ#oFd@B#=ro=cNrKMM1c4x5NmAJ|C`{zdY6?!g}D&OFRcI)g&-mch!cUhiXjb1 z<`)%J6;%Nd1cIrms^}FB)^C|FFf|%k3pofl6wGYWEm-KXBuUkHN?!8p;BMyLA7+bg z&iu@D!Lfbr;=sqf3uYDM70-Y3z$5cspfX?kMTMz{O`iT_zcep5nW6u{%o)dz$v?<_ z@+{@}B@sN;^Zy$7EKGo3jje=0?y$8K}~ q^u9MqXFFZ2%K1ea(-&x%)ft~JU-s>bwMp<@cMFz`str&7vjYGF2S=a) diff --git a/fixture/15/5/31 b/fixture/15/5/31 deleted file mode 100644 index c03c52e2d565c8f80de15e1705b5ee5803b404e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208 zcmZQ#oFd@B#=ro=7Z?~AM1Xh)5NmAJ|C`{zTFT1sg((ThPXyv3hBP3VUsVVsODl@1 zsz9g+N&;CxVGsyj(O{j)z`)FL>=c7gLBPzW<04YZE=wYkjKfwls`OXQdw1ZwCZEEd z9~b%}XPhctcVV^3?e)or^Trq8cp fG9|g)&-kjVT9CAH%0z3)AMX`-*Zx1xT*MCmi<&?+ diff --git a/fixture/15/5/33 b/fixture/15/5/33 deleted file mode 100644 index f0f945e07f96b36d1922c61491584af96b3787c4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 215 zcmZQ#oFd@B#=ro=*BKZXM1Xid5NmAJ|C`{zx|WsU3sWJGUs?eql7Ki7h^rWi3X9T! zjQpx1FoaM=AWl_P^oj!Om&^>zoGZ>S2ssEiOgPEny(F=}Yk6u^A7hfz;pCZf*4P-V zEY5$`aB)HKXMXYY2Pf)IZMHvJA#qqD<=EXF3**|BEIV}nv1C>8%43Yq{q+KqXC!{O zd+^$wiz&_~#}C@uektZLa*$Cg(*FBGyw_Y*J@Lpwf&5Dgf7oW*L{EO+|3n@DT!K^* diff --git a/fixture/15/5/34 b/fixture/15/5/34 deleted file mode 100644 index aadf943e9e4a1ade47b56d3c05850d84d75d175d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmZQ#oFd@B#=ro=Hy9WgM1c4J5NmAJ|C`{zx|NmT2~!e~p9sW73~4|zzpARJ5J;C+ z02xI<3?d-}n2cLdV4cYzz|65Mk3q;mz~REpCebS!SdMb3aYpwsve}(GG-G?h3ANqd z9XI!^#d??_{K=b@miD_cizXf8t(^CU y;c?Csku!X+&(=G+-LTKOVYAFw#W}`|AzW(XLdymBmwoB1@VZcN;zRPkEx!RM_ET#B diff --git a/fixture/15/5/35 b/fixture/15/5/35 deleted file mode 100644 index 444cdcf5b8e1fc66e6313176d5747e15b0bdaf6b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 214 zcmZQ#oFd@B#=ro=*BBTWM1Xi75NmAJ|C`{zx|Ef{ggFVwPXyvBhN40sRa#M1l?J5q zi;9Y>fB-~-5r_d42Z4wc1=g7i0?ZsMau|dP8akWW4m9!h1w?+_!*iLhh4b)(ji)nC z+AliJV|l>EUSc=%*?)qyU*;TPt$$`Z@mXJb+2@p~-VJ66M}#dM{?(lO)=@kwTVSD= vg)`fCgT;J-XDSc#Do&bcvz&ce`Qpf%&vYv8H~n;emYJ2^zqxu7S3nH_bz4vL diff --git a/fixture/15/5/36 b/fixture/15/5/36 deleted file mode 100644 index 9fddcb88622754408fe48967a19620b2d7aa7b51..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 201 zcmZQ#oFd@B#=ro=Cm9$RM1XiB5NmAJ|C`{zYRk&7g((ThPXyvBhC(1&T2Yh+r1GnP zps1(_Oo9j?tSSmwQDB|P;K0nH#4c2ju#u&@W1*o_+R;g(#U~AwtIPJWx)m^#vdqgY zy|KJ@#z`H&P1dPzK83L@nz`_FuE^GzJpKZH9Dz-7ZBKHqL@%G)*15&Uj>+%p!&S2r mr9FF2Jn4K{u|whf4@Of(>#q47=Ps{VVQiKbVE2pR6*B)vux495x1-TOHR#H*ob=`wDM>o~>#fe-JMYu6py-^L*Q1|X wZl15VI^NR~nSRMldMR7tf(*7kIi7-@V*mgE diff --git a/fixture/15/5/38 b/fixture/15/5/38 deleted file mode 100644 index 5a8c8a6989de2ff6483d13a822ae913872ef2cf3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 209 zcmZQ#oFd@B#=ro=7a15BM1XiF5NmAJ|C`{zTFc6?g((ThPXyv3hN_}8Ae~=T2qa4@ zs)~RROaQ5>A`mZlMS*oDg8(zfiW~-^1c8KxiA`+|%skm?IxEkpMDu=r~hjh+I%}EWJbX%=O<}mifIn`!Z1D va>n6%XCIk7=#c+Zl)x-~!g5n_N2-F7xmVBitfZF9Hy8c+{P|hVr=#oua${5I diff --git a/fixture/15/5/39 b/fixture/15/5/39 deleted file mode 100644 index 65888bab768f5f12b7d35b8fa0af767dade4a09a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 224 zcmZQ#oFd@B#=ro=4;UC2M1c4N5NmAJ|C`{z`jwR-g}D&OFRcI)NkE(k#6=8MX+SE! zu&Su2s;H_8gh31l1j>Lw?1~2Km&^>zoG0E0IS4o`SlA?*vr+XZSK5qMikh}oZgRyyy5{|*K3Zx*LbwArup F9{^P`QoaBH diff --git a/fixture/15/5/4 b/fixture/15/5/4 deleted file mode 100644 index 6ce64230ebe8fc99fab429f94962ec7341319ec2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208 zcmZQ#oFd@B#=ro=7Z?~AM1Xh)5NmAJ|C`{zTFT0Bg((ThPXyvBhN7x8Ae~V`7G1=z1hdXo38)M-=lPiyyzBzYvL$ai(lzrdlZ!f0^HyJ5U7Ll2J tt07uUZq>o9Y$jjRPrF=~m&|zAtWrKt@PW1Y>15+O-#xFNOP;ZS9{?2hPpSX_ diff --git a/fixture/15/5/40 b/fixture/15/5/40 deleted file mode 100644 index 9d4feaf54a542b29311df2984ea75b8a88a69a16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 213 zcmZQ#oFd@B#=ro=R~Z->M1XiN5NmAJ|C`{zI+vAU3sVx1p9sW7423|lw4y2vNaZsW zRUttj6U2*MQDB|P@PLWq#Tf>n0D+Y(f{n*?a-23^lFmM}CC?`LSV)+UnRC=%&x<+I zJjP#|ZB}QlX-}JHl`JOlM1ZSN<60ZJBF0`FL8727C3Nn12mh_b)!haEcuOKqyiy diff --git a/fixture/15/5/41 b/fixture/15/5/41 deleted file mode 100644 index 1c4210a8b9e0d7681b2a43f20b488593cc5eccdb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 212 zcmZQ#oFd@B#=ro=R~Q%=M1Xh?5NmAJ|C`{zI+c}S3sVx1p9sW73{`1BD!&j&lvWf~ zRaF&L6#)^D24Nss6tSYfI+KBcnd8JRMxg|Ogo2$-x)(O29zEwJJVSz=x#!}P=E^h8 z_8&i_&b%BJ=96(r!{EmAw&;SitaZ%izNsvf4DXg~aF0|fP?oECb%F1eeL=&r-N|8w z7ixqH_zPXicxUa1Ta!B3{JCNAfP2YOH` diff --git a/fixture/15/5/42 b/fixture/15/5/42 deleted file mode 100644 index bd2e2b97f8f273da6f21fd031d37def0772c44cd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 198 zcmZQ#oFd@B#=ro=#~2tGM1Xi55NmAJ|C`{zs>;ex!juH$CjxO5Lm`kXtw;ls`3yxx zRYgEh1*Sk`@QMcOOa=yKj;wPGLIoc-u=wk^G*VG=JGLiR#<@`{9=3m6rcIl63w0vZmoa5=i@eoK)%ts&dC@i_a_{KXbR^B&&! z-5`@`@T;@JY||FWpyzVORvkZEdBQ?CQzZZJLjKt&ENuABXSoT?PUjzade0VwPKlrQddg{j+#JA2*cR%pRP0OXB#oHNHP*%FQ<{k>L1J{XB7b9ph1U E0Ic9p9{>OV diff --git a/fixture/15/5/45 b/fixture/15/5/45 deleted file mode 100644 index e924d96b7d11001aa8cab0f0af5876399c9aaf69..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 205 zcmZQ#oFd@B#=ro=XBik6M1XiJ5NmAJ|C`{z8q3PCg((ThPXyv3hC(1&S^*@g(tt!h zLs3yx5g1ni8C5`b!it9V3m65MIacHe1r$tdYH>Iy?k(zndIMk6LQlD^Nf*}im~tyU zlTz!QWL@yZMUd@`M3Ra8@@4aM)AE`RTcuB#C1c&cG^|BB4rQ)!-* sT$kK2Gd9@hvG&%_TipDs1+0R-!gw-V%#Jp)F9xH(AXqeka)6d>e($e%>sO4W|Q~0o14f4 z%$GA*>$JzvFksq@WR-t+(i>hCBpoxJ+kUKP8fX7H4Xf2R1HKp;e4V+2dH&Nm2c7#D w)@m_5`|5ng;_r>#=fdvNdA?^)cGtJ_ zJ6`nY&J9ngS3U;wj3>!biob-XSn{f` sdr|tYEyZ(Gyf&UPm}UBBa?XnthjV<=Op{*;zxb~$ywc#i-{Cqg0LR%&F8}}l diff --git a/fixture/15/5/48 b/fixture/15/5/48 deleted file mode 100644 index 29e4a6d55cc8300e110d86e4f7c21888b32bf6ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 222 zcmZQ#oFd@B#=ro=_ZS!$M1c4h5NmAJ|C`{z`jnL+g*gq#&rbppi9lS$kd|Kvq)RKR zii)a$07w8CAZ8JWE-C^_f8{7Xg F0|4qLR7U^+ diff --git a/fixture/15/5/49 b/fixture/15/5/49 deleted file mode 100644 index ad6041f3be445ba177a0c67f39bd7aacd005045d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 223 zcmZQ#oFd@B#=ro=_Zb)%M1c4>5NmAJ|C`{z`j(Yp3v(fmUs?eq3PD5~5a%-_0m(!l zE~=_3sw#qzU?!9Ulmda66$RFw49pE$+(HflFPvGucPI*qXwI9K`00UzyN+~p@iQ52 zKIhC=p9?EbJozB;djqH1rsjuIi+TP>WvXqx^YR9B`?B*_eLk=SSifPoe7x}6pB;y; yRorRx7khSMWm|i&?Lv9uXTxgG%PLR0Yo diff --git a/fixture/15/5/5 b/fixture/15/5/5 deleted file mode 100644 index 464075337313d0b73041b993b282194d484b0624..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 215 zcmZQ#oFd@B#=ro=*BKZXM1Xid5NmAJ|C`{zx|Wqeg}D&OFRcI)X+WG`m;@vffw-!u zsH&=}2nK-+5Fad(x}w0^Q-GP{M3s<(z>8$o=q(#lk8bXLby%^q;MdOi4NletImv0Z zy%KMnTm%Cir7b^JJ7EgXja2WcMm6atMz%wXdw>(XaY^Q diff --git a/fixture/15/5/52 b/fixture/15/5/52 deleted file mode 100644 index 60e0b679058193ee4f1c4eac849c1cdc4c4a0be8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 206 zcmZQ#oFd@B#=ro==NK3mM1Xi35NmAJ|C`{zn##)1!juH$CjxO5Ls21+Dy^tW15)`# zRYg@$R8<6IR)wx;u+C)oz{JtMoTYHrIJNccZ{75Ty$rVU?NZCHvjYI3hfJUV diff --git a/fixture/15/5/53 b/fixture/15/5/53 deleted file mode 100644 index 4abf1fed81503310fff4060f9b8c061c8a7517d1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 219 zcmZQ#oFd@B#=ro=w;321M1c4(5NmAJ|C`{zdX|;p2vZu6pI-4Z zSbSSiTxGhL?XqD{yQjmUdDeVsg}v|m?6`I&H}r<>W1b^@>G6~2sV2#J2~v00+?!w& zvBu5sRa@nTJsa<6%>8-wg`vN-U4!F}smIvUvsQWf|CwV~=qI(n$uih|&wZ-{W`E@Y DIjdEz diff --git a/fixture/15/5/54 b/fixture/15/5/54 deleted file mode 100644 index 07a184361c5639d73acd4cf010e5430f9298e537..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 197 zcmZQ#oFd@B#=ro=M;RCxM1XiL5NmAJ|C`{zD$B}ng((ThPXyvBhC(1&T9F1M^BIbO zpsJ{<3Pge#AgZb=Xhni`CW8SpM_ZLpLBL9u>?w)CT-hNe2d`xr+g@~T`aXH)6I1#3 zK2aY&+gj*tPvGe{mne?2a4c%({pa;-c{O{v#?F=2{}|rgQ9kG#<}-8I_XL~6HOFq` h%%9CD{5WxTw4LIm4y#1Qwsn8_%Ch%d?zZ1+2LROlNIL)k diff --git a/fixture/15/5/55 b/fixture/15/5/55 deleted file mode 100644 index 6b9bc810add588cf530b46301ec66177280f449c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 182 zcmZQ#oFd@B#=ro=+ZY%aM1Xi65NmAJ|C`{zx|D^1g{ct8FRcI)X+WHx1SArHxT>hI zs;H={s>r~YVQ*U432u!K0G_BX@Bjb+ diff --git a/fixture/15/5/6 b/fixture/15/5/6 deleted file mode 100644 index 5b17683e000db100100f6b6d776b0f3c955a0366..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 207 zcmZQ#oFd@B#=ro==NT9nM1XiZ5NmAJ|C`{zn#;m)G7*TY((;Rn zs;Y`W1ds$`Fcq<)z}l06nd5|?P=Y|hf`d)E=L97V20ltOoL(_mVWau{ln0+0`!D;> z+T46#mi99j)54OTNaLCotJ{v3CQPKo6c`_ t>|JPVT3q+QTm4p#$-x7eR}R0(+Z4!o{DSSBORX{Q4!ph3bYszfW&r=zQL+F4 diff --git a/fixture/15/5/7 b/fixture/15/5/7 deleted file mode 100644 index 7bdbebf05f7a62aa1add9f46988cab830a7276c5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 218 zcmZQ#oFd@B#=ro=w-^{0M1c4Z5NmAJ|C`{zdX$yn3sWJGUs?eql7Ki7h>IA~fMkAE zVO3F8QBf5T0BHyTWW=o~uztz>fQfTe1%r@-fWv{pCe@0KNgFR2<*^wo>Rfrsvryso zJPDR3ED|Sj(h`|-}EYzHj7ew zvt;+2EpojpcQW^XIWK>8%Clb&Qux{`-lm?heZu&iKj)8qpKv(egUyF##{UU^zzhH( C;8K(T diff --git a/fixture/15/5/9 b/fixture/15/5/9 deleted file mode 100644 index 5a68387243db530d4948753ebe91befcbad6dab7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208 zcmZQ#oFd@B#=ro=7Z?~AM1Xh)5NmAJ|C`{zTFS~Wg((ThPXyvBhBP3VUkD^hD~gJW zs(=uLfJ7CT9k8OnI+Ni86Gz862B8Fjgo1@lvS&7^x4L=6r3r~kD?9gfikWyByeOadpOAyO`emO@&D!`s{yi2uRG=)|;$0Wp;DfpNxjTc6!sy?Er1(PCx(v diff --git a/fixture/15/6/.zarray b/fixture/15/6/.zarray deleted file mode 100644 index 593a140b77..0000000000 --- a/fixture/15/6/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "lz4", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "1p=5S%lsROj58xHVjp(xCi6Pq-&2E>Z<73j!gPo?tMz&I>}G;HTsdX;Wlo zQDrO`-QMhIW><^Q=I#y`Am#GEMxU? zk6*jt^{Q@av9yARu_8@qY!I_GH;Nf{MEn7-)KDGClWaw%j1A>h5kD16R_Y=V>(rx( z6lTQU->e6aNd?<9Fe(T0jv>tMbrv#-EsZ8SI+VadvwhQ)lnWJj@j%L1PrgrwCS8b@ ksJ@%;RW#5ezh813ZE}<`2bPfWX$U&BY4(_DBs~28~Bcw8*#&uMraubfA0a7R_+(tHFz;I!31Xrm&f=2KN8O2Bz z$UUW0K?oe4eBRw3Ide@J=YZkE0rvn-tY7Ol?p}!yn}u{B1}7L^RpghCof35g@+3T@cyAS6Z< zT4R|zDc@^ymGoKAk48Ox8*qvaWK%n-3g5wFmWK@-*yW*t)7V=23-nASWeGt(oh0}O mYQ{J;S{bKh7M2o)Kie&L6`#R;#;$XatA&@^I(#MC{2M>zA}=8T diff --git a/fixture/15/6/10 b/fixture/15/6/10 deleted file mode 100644 index ae491652296ef6819d378ec3fe4fe2e36faed854..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 404 zcmZ8du?@m75IiFT5^`cAK^ftOK5UQ?GD4uB#Y93v5d|Yq(9vWRMqmU+V1saPl7=Xk z_h0VrZI4v$iU}aND_{ZO!rE>2TpfC_U`K3;S{gq8{gK?M$Ca0s1jC zb&GkEBK5Ep#;ZDq))jOKQ@}i79IWJdSsAj|p2`fBj<2#;WacLNXjH<6!KDk>66=`3 zG`hWn>z9~K8^0y2Lnh`RDYnIu`Z3QytIUzn{L5S))7vO>NSH1=r6O1TPGuU?e|qZ~ Z6T~mdPmJKSH=Xp5;^_jCsYl6s1HMShEerqv diff --git a/fixture/15/6/11 b/fixture/15/6/11 deleted file mode 100644 index 18437c172faccde54a4720f4cf9832ffd5a04348..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 460 zcmZ8dyKTcT5PdetxH;hnBPtDW1g1?Bj-U|~7inV)Fkqze2p&NrsPF(DAVo&V2tgJ| z-VwA55qRL;@qG?lE>g%3fZ=liJOa4z{(IW9I#e8a&-*QBuf#yuc_ZE{tk2l|KF>+> zelEJbf0zT${rv?#Vm&B)fJ=~XXhy3YyktlZ;rIvcY=tIlS*F=N4AL^y~^0dLet1#u0fu;GORR0#{lx{MwBo?y6KXw>UAU%8lBM6#{%T5I@TDP=3+{YUuAUVv;F}aoiXeH diff --git a/fixture/15/6/12 b/fixture/15/6/12 deleted file mode 100644 index 73ab73979b4c6f2d477551991b5a4aae3a15714d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 421 zcmZ`#%W1mvZq08Z@R?UuIaS9;Jg@tgOUR2B<^unQy^VdRRxp3e2}Nhfa1K{W4ahGFLAP$$=h8 eKd0n+-kO{AU#9fANOISK^1!zk+2uRsOLP8gyEny?#E8zkPp#(}mq5$~g zy=X0sX8w%-|6`xIE|+t#_;7Foyet3RJ*j#=g%Xd72gTli&Jyp`dp*UQav@y3+;f>p z1I_PxIbY)H_sNC~e&Bk)Ast!!j#!9C;sL9Fh&`z#q@E^@cGYi*Ein^op}}qHG#rQ` zw4{mPJ7+-4l$n-|=7oC~d|Rb;e=>@?nn@zaXJvMiIMT@y)>~xIXs3j0W=#X5dey+F j-?Qey*-|5ETM4GCh2=T0GI>c0>1}nXFD?qbeh&TtV%seW diff --git a/fixture/15/6/14 b/fixture/15/6/14 deleted file mode 100644 index 417d5174b9849251a8e435ef88334810e88e146a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 365 zcmYk1&q)L^6vls%L@#lZ1=riau((%A2~)ycyvpdoo2U!IvZpCwO0a~n152<2TUf9F z{a#qXfsdD$?_cJ`mE4_z;nBebaHIKs`J&V75j)L$;ZE2J_Er9hpJ3#J@N|R*9Zf96 z%-5f7Z`Tvq7na*=7CrC(Olwl+JWT{Y5Ig+Ha(~OBB^5=D7L|IjZ}OheSDGm}6N|k~G_y80>0!QEo9DySs z(IUiOV`ylw^fj~p|M@%Xy(*_>9||5TI0hb6-!HG#T?t#Ed>i3z!oBKBSWi&-qyNi) zPHd@sM@(GIH#fwA;5F89bo)#t+7$jiF07raBcrhz nVPmHwRglSaUqZy9=-=7iEBjaNr&Y?E;!ay=B6E;v^*`YY4lFK^ diff --git a/fixture/15/6/16 b/fixture/15/6/16 deleted file mode 100644 index 74119f8293dd70960a1b933e24c7fcfa2c38c2e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 454 zcmb79v2DXZ3_KYxyP-e@aBJWvFf@Wj&yyN{MtIB!_D6Sc>25{l~bJ#I^WgHng2gdt7KKTB{AN7ft&VT3k!d5Xd4}BWK2%!`8wY zNUnyDC^I2mTfd~ZwhpqFe9zQRIi#udMGngY1?0nOF;*iQ===f(W-ti= diff --git a/fixture/15/6/17 b/fixture/15/6/17 deleted file mode 100644 index f5b6eb34cdb949f0dd5f1fec937eaa0c4c76eb2a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 429 zcmZ8d!D+)l5FCY5$YYOFAdbI!xB#UCm2f3o^2j$%AYlA3D#0KB-9W&Q22?^ya0v!~ z^T^EF29tw9yR*A9ySk~mroRP*uL^hou;KUj>4niVu_W|fiMKP9U+N!9`2OcTj*su; zKhIcP^;|T)KErfTaKBvP`^u$(ZD9q*phId_uV?Bqy1`4j)0movD4 zQ5A6A(7Np~gb(n!{^f~@UqT9G+NdEBZcyGKY?As#t7TEZM%Wsbrk!yll4Hub+~|$) zk5;*a)^z2qGMJ1?5(!i>{@jU~9+6r0Z^m)E>#k#AuO!q{+Q}(@3uWK1@^VtAj!;PQiwz{1k^?jjD za)Vq}9Mp5TQJyQBp*Fx`&F{?{jvs4%UbxTdVo~px%0xBEqpGIvB!}BhKHH7o+K8mU zA|^P`=#xRSu|Xx0A4f1g`;}|+n29Rp8h5&n6Np`rLz*n0icS`2$(SI3+H)UVsn8#d o70%W%5i+&svL?f%5cdi!Li=Ew9U);_s(t(+m3J)am-es!0m$AkmH+?% diff --git a/fixture/15/6/2 b/fixture/15/6/2 deleted file mode 100644 index 14aea08b426ca58d27398a9d267e76498b1efafc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 401 zcmZ9HKTg9i6vlr+Hj^U8e{ud1!e&h>k(+523EU%xZI9~yto_0>UWxnI8! zI_~xdmN4QPLL!uJ+)#_6N9;-$zwwXTBeCH*Oa?e9HiWS#@;}wJxT%tr8_Znv+GU)& zNnJ9Dz?ku3Ug8W-#`NK6w1pylkadqZ8u*w**FcIAEuHK|TLcC7Wt*P($I86Md8E}W eUK9(JFe4*d$2_&1{e-}w>^@M(R%pkt=W@<#t1bfo diff --git a/fixture/15/6/20 b/fixture/15/6/20 deleted file mode 100644 index a555eb4943782b93e74036160aa15bc68a574375..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 427 zcma)2J8l9o6dV&T)!RH5tV9#B1qbj2koE|!QUgU=J`z$yf%XVUTwwb&9DyV35psnn zEd}#7OB5*$mPT*<#&5>+|Es~#*^5({E)8?Ftp>+#Nd*SF-h z9@3}ydj30nH~%#|;QI96tAU&$5k6tEPZ)X9780OyWZcLRS|OLz^%8rnNx3g!>fGA0 z?!~SfoCXxWfv%BjiHtSVh(c?QaUeSR1cjMgx;Y->Y0qB3i#gVvDM<;-WeiEMJUzO? kZU|=rBO)UzS~rZadgcY`#aX_f{dqwRsmwwX#tHUEH}KmsH2?qr diff --git a/fixture/15/6/21 b/fixture/15/6/21 deleted file mode 100644 index ce45537771430b6c1652899c94885bcaa0734499..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 403 zcmZutu?oU46ue04l$c2ErqDt6Ebje;PR`ZA)hY<0OTIt^Kfr#9pW-J7{($$ix@h1? za_{bPq@#i_x`5!UfB}F5_s49()1257vev}5#qQ62Np97G+mHV5d<<7YrRyPdj>}ha zf;KtK6)wlf1Q-$(`kNzq9xfnB_d#ZfF5x0FhD_WQmXtNkOQ9QyG#>gcN^5GlxB`i* ziKr~S6bWCnBTI)+)yE-n%@!r2#n~WZm3NfXieltJYTPiDM84)}s^Lz^G&SWbpLD>4 RG{Z%fw@$`MJ5urU(NBN~FxvnC diff --git a/fixture/15/6/22 b/fixture/15/6/22 deleted file mode 100644 index d511cf353805fa8173a95f4c8d65899a2ee545c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 440 zcmZ{gy-fr$5QV=*vxvZljYL3l(kA3f1IQ(~goS{Vy*oNCtl~&-85@WjW#WslIAP`2 zg|Kl+zis=TFjNx>(S=4`=p|Gcbpl<-S#ko4C}bN-mXna)$yH{m#xs|y<-d+^tu2;@ q>thyjc8oM+>umd|S&^IAlF?Xo*Q@PNoeK3;gMWkzAGj#`HTVWDv@pH^ diff --git a/fixture/15/6/23 b/fixture/15/6/23 deleted file mode 100644 index d0a308d379faa3970f46a898ed8795c7f44ce099..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 420 zcmZ{gu}#E45Jmr@H6j>mH#o<*0i^8!m#`%m0#eLLNa&&~fWRdx$Kwv_jSUz>Sdp~S>9tEm0xiE z@y0B2w~F}C6I6P9X8Wqi_kTQMi8^#xrnNP-X4{YmQ_6iN>Ofy$*#x%lIj` diff --git a/fixture/15/6/25 b/fixture/15/6/25 deleted file mode 100644 index bcaee9295e6c97e2d26cd061dbf39d4e7f7d4084..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 431 zcmZ`#I}U+Kh9ByOrcgD#|pi|aZ9Aw%St^x`VWGRF9iSq diff --git a/fixture/15/6/26 b/fixture/15/6/26 deleted file mode 100644 index 9d9214d29769795865bb9771ad38c9e47b604dd6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 405 zcmZutK}rKb5Ui2jcq3~vs&190T``{9Y*Be5myY>3@2_W#Tm{(*0HI!MInh27ea zAu>P`G~#yiimw|JGoe2UdpmW0#u8^hJA{aZjwUwWuHiGZg~AEzCxl}%iVp>i^mQSz zD+p(JlIR#RRwgvT+wNEuNR1GNOpc9Ghc%gvw2oDKHkbRUl6jNOtD)C@80k6CX@pKS qfer{g8vUr`Qlk`RPo_firmnP1^=|6)vxt0HFre3h>mJ#D0Db_gJ1=Me diff --git a/fixture/15/6/27 b/fixture/15/6/27 deleted file mode 100644 index 1510fb81aa7b39b39cd59f330971030c5750c1e4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 472 zcmb7BJ5Iwu6dXs-&^X#Hph8G)(z{NI{ zMJ&y}&%Bv^E3b+s9v%SUrvRP+OpMp<2eZ!w4EQ2W3(k!1#2YaX`hM$vF8}I1uDiR{ z|J~LfvF=c{s3NLwKbaq3HC~3S+E9p2%0q*-Wtfjh;ay7z6P~G3%V`@uZ~Sawq~=a1 zXUUkji@9sLSh~P?a)T(1ZVW9-UQlWc8r29H63|IQNv-(>R9Y-TB2A7()N9;hW>;;iH(H;Ut1(ZCFj!f z*=J>9iV&xO;%Eq{>NU^uJ)sj1H8v zRC3EO6=_8e`-G3ke2ukjATV^}g=$?A$aI7*BgnEo8Bb(WqQ a-zP?FG^wLh(+nrgTG&#TxYWN3pXLM8LoNmY diff --git a/fixture/15/6/29 b/fixture/15/6/29 deleted file mode 100644 index 82f4971494bd5a6acc0b5ce835565e7346736f01..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 397 zcmXv}yG;Z!6da>xNPMzSkRn;cAen&NCAfrz;L=-GLPA8_01_o2D!2*|C0q$t0tFB- z-KlR^~@KjuGZgI1P z!`G=dVGT>LicksX8 z-Uh^iRm{YIJBjb*BGcq_GbOwA`_LsCY+T$~4th6ABRiiOGF9Ry~O`O&u$y)&n lafa9C1@V{l+%ta1Q+JVxKV7ho>|k`97hO^tE=Dxy`~xVQDwhBN diff --git a/fixture/15/6/3 b/fixture/15/6/3 deleted file mode 100644 index cd816bbc16aa363a031a315d292b556f4b23ebc0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 407 zcmZ8d&B+2W6r7brJVX=m=O%0kYX@6GN?1L3)$apAytpL@3SKS25-h<|1Pjoata#YK zlPhb!Rkl#DO|qkRQ01LC!2e8Hkt%RJ!&thvGG3FDw>>`-9? z>k?tc-DqiD$jlLrPEjIOaES>k8OFxqnr_6cK_;=ZXY#xzckz4{cpf0DVD Yq#A*anglAU)k-stlG?nY7xgFj0J=~up#T5? diff --git a/fixture/15/6/30 b/fixture/15/6/30 deleted file mode 100644 index b90420e1e3f1c6c16c1041a98373366b9aafa191..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 374 zcmaiwK}y6x5Jg{0M`5T?E$Gg`Y+R1S8}t!!0*Nl%8U+W*YL1W_I7jdZ9wkTc0D}IG zBe=8SQ(axZ-VZqnZOX?`v<+|u+^BwD-|O~pkP3Iwt@KnHo&Wlbb$qQJWqm&A<4AH)euc&Dbt*Hk{xXjbc~@jbQNvRS{_TD`xs5m`%d zhYz7L8AO=1+H)B@qOvDEajrD+LgU)AnNmtboY9gDwWVM!B_ah)8Z`nN;Ri3H(1Mi! XjrEDPkT*Up#A3_HROu_OFk)@%NIo8kS-y2Z`c3a2Yz%kIaF zPsR=!al3woW<2gPR5p+i)}WRQNuE9F2yGx8WNKw5^9UY zzQiCu-Xlq-B5y4RX@54;jcZCo(PJ19ER(?vD58h5K@ZuxJV~|EH2vEpao~V^4%If{ f!3I(?wDp!=!4i1T3h00#-4odkBqgxi0>K7pXu*@kNT(i zFT91@gqZL?c8J!(68sO1cX;z7>X#k6y#$s*Y!&;2I_9|=J9t9v11g3(Kb3u>T^$Mj zQ5oHNW`dtlGqDWs8IdS5x*iG@zfgImXG_lGG}=M bjX5**Yw4*R&{!XIbJFF~vYN5#p+CO?;dCu9 diff --git a/fixture/15/6/33 b/fixture/15/6/33 deleted file mode 100644 index 26381efe324a21c9ed4287397ace574767f3b86b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 395 zcmZ`#J5B>J6da?sK=CXa2}-1GD3P2S_y{?|LO}^j5E4q2o(72v~QgnZ2Lz!T3l#5Nkr`zj}2XqvFO2_D?KdPC$=K z_P+n51zzD9sUTX5)S+RxT0bkjNELT!K;ui;k>g_!yP`=G`;=#R8@vZ)HX^l%>h|-X zNM<{vj9c&EgSytirg^Xd7jqV^!j_PZ7Q|(d;gf3P>Pg*(>t&Id_af^qKE}VqNh2+_ gvn4X3Z|AGF8p&|MJxxb3tOR1w7w78fs5n5e#7P=JD8{`Ocgy_Ph#zLWV6?y`XQ1AqK=rZVBeGQGQ#Hn&MQJ5Rh>hx>^DQ@<9^c>!K7JYRa6K;C+U*G?d5Gjb5qcHb)c6Xgrn5WhW$d&$ z4oBL@VIxAz#Dp3`O)iGQPR{qWF1T~8kxio3oDBqtNmxqVSj}UH9Zjd{{0T??3 zu_Myh{{HjdI4cuFw}z~D16RON``7J*PWRG5%HLAI%;mpzhYfsQKg<65qx~Puf-U_m zH~Ylp;f-~|2da-pS)XRz;U{W;qOxaf!TP9+K^Gmqr|~-~kJ&htf!a;DFb_pF&Q$J% zO}L2fwh~VPJA^5(3*@^x$1eO goJx)zvF$43b|q}Wx73QwX|~1oB|UNx==J;X4bqG(+5i9m diff --git a/fixture/15/6/36 b/fixture/15/6/36 deleted file mode 100644 index 74bf6eab3f727b69879d90c4f132ed057adffeba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 437 zcmZutOKJi^6s(put8QM0Buk;Ufdo`=-$$4uOtQ$o`5D_``SLk%0D(X5#rdF2oa6Rc2}J4^Hpm~X|!zc8gz nv+Ji16y3U--BB-;KL>oP-wQ3- zOSg(&6|+ul(AoR7E;T81vZVN_HWbB4D+Y0>jX_8X*|#gp@EWN0UH6XJKPh&Qv!7g< zU^3gOttOS>#!2fic#@R{(Ar*c)D;)8-c#Zpmuqp%ii{a3%8=s*6qu6#U;S{WL#;4`TZZr1mF zUBZiq%^9yc52VVFCKeGFcf>6*<12nfuR~l~9czD`@spE&OpV}=q$LmO!06St?~3ev z9~pc1n2HNt4ijmX*S32pmK+PFQ8Hhgij_p~{Xf_e zJG@JTJt1>mE&76MaG8*Zg<8QCw*51khS=g`!b>1h7>SvkHQa=gkAwr^$0la0vNaQL zvtgEWh+eC9-Mps|CL?N%7_p(2Wo=?r?Jdi5Hm^xlXbHuLUN@SLuIl8dGpEY*nug0_ kDTkf>e7p(+Pqs^A-Ia)=dVBG&^RrBycyOaVVOW3U9~coVssI20 diff --git a/fixture/15/6/4 b/fixture/15/6/4 deleted file mode 100644 index f3afa8e1e6cfad23d7245b97bfaf72df44312439..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 428 zcmb7A!A%1(5FDdD_}JJ;gl|p}1vm{vSHhKW@Zd`l2`S3!O2{JxqyQdC2b7Q!QbMFq z05iV8A4_{X>z&;h-;K+uxdjxT4tNBxW&WPN61_0i44wbh4IY*=+}Aj~ar%9T2`*0y z!jEgLn12^9iFHc<8RJz#_C5R=u|{P_rQln{6RYfqe8TXJ86*3GH85T^YFLKt1w@Y> zMt)wZ4&l0o198V*i@-sj98u*n;)ba$J33eax=p0^RHgy>s2xQEB}BIx)=Qgr6+M$2 m{7w~#u#JQQNwU_b;o)K{d}PC`ZIt{K|}TxTKm99ZyOyZ8EBCR+NfnsL+V?V4e5FAHq((J{S2vu~Fi?mVh1CjiI6t1Wt1uX$7QbZB?1rbl6O3f4U1fGyD zAU=@UJ4!fdwD!*I?D%e67xM@({5aqqz&Gn@`o`0oSQGYLyKe>>3)bZs)>;uCXMD0= z!=^wYLPir3yky8bLWB)Vtj4(|I+Oc~KI p6QztpJB~4o8C#NK%d|SuTsNVGrKIGK51LVyExxI5GYahY`~#jTFKhq+ diff --git a/fixture/15/6/42 b/fixture/15/6/42 deleted file mode 100644 index 31ed56b013dac9d644d2f53a7bb7ef406cb5356d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 415 zcmZvYy-fo_6okJ+yC!+3Jz>abrpSMT8f1%B&m78FuP zJ73@MJ#&#i0);-TIC*>}j#$Th(_>rggVfV%6bhlkl;~a(GHIZ--s@C2OIfrX_KEFC zD`FThk&3b@q&?A!i>rAa_M}mbD&>ok&3=om=OpZoB@*O90~%+j6l3&R6iY&_jn&&U qsjV2aEITqgz31Mw;~ygNvFdH>L(6I>b6LH}c*ljkb6oVt{QLk4vM?wB diff --git a/fixture/15/6/43 b/fixture/15/6/43 deleted file mode 100644 index 6d1a6de62ace3a6f60413da61670f1001382435a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 437 zcmZXQ!A-<45Ji8G!^y!xq5untBa&$VxrCJ9<-nC?C8QOnl#m9HwuHUV4<)eD7O-z@ za>CNjjA#CzKh9N`#(XUqzB}no(iidk^g^})pTN#L`1r$~xb}DCcGmmK&$`_`>vZ#9 z`3n23;a+Ylm*hwJtT1YDS$?rQ4L^yii}FW>q}~;UNT6J*s#Ib!mZz7h93g}HP}KXH z4#u8ZaiNInrwWlwjEm-I^TVtMxc*?ytPl=*WGxfal{`*($Ig-{_bUgxGkN>pI{c$w`z?5_ zpP(%oIYK6ch>)QXyHE_gJu=|QDm(4S%GeBdM!1m^tP@(}Ne?a9YN`#6eKjJGuQeLA-NvV+FQBLR2HaXLDgXcg diff --git a/fixture/15/6/46 b/fixture/15/6/46 deleted file mode 100644 index f2ef5b4f47282bea341c01898891666ddc161426..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 425 zcmb7AyA8rX41E_}s2trDq$Uj|Qa0d5$OwUg77+;vrD6owAT3=+VT8=U1Uz3p1s#!| z&d+{+wv(9BhZUfB8DI_I$@kM<7+oja`2O-Ea|fa!^qYA*(uL3UfBR4LIV<)^Yit@1 z>q_LdgA9-!Qb9bJ%A}4_h0N91Nr2p83pj6bB^14pT8avcTuL>tbo7bF(Md}bM9Zy% zNo}PrOwdk8GKgB|If%nl&CXHMO?8g3gQ3tB$fY)F5>&)#?!h&HWUns(PKV)BA6~;@sNuNqC;2hezQ-*juCT&;FX& z;xkt-OEz%8x7_UO$alnn)YFKPGEEv-L=7i9@F!A75M3kn3Agq=;xwrxq=~gJq|q%@ z-udjLrM>ue{9%H@9KML^wWumalH!q6s%@0wNQj4OQ6w~3D2*-CQ9eam$qdM~5l?D! gWK~fK6x`_5BxYw(yE)^h3w|`i-wVq;LBamx52EHN*8l(j diff --git a/fixture/15/6/48 b/fixture/15/6/48 deleted file mode 100644 index c8c28dd5dabc7ba758ac13bafbaa88b667f87713..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 394 zcmZWlJ5Izf6dZ$tC|JtHN7y6e2pk2m7huMl zt*|tHd7qg#&V@@cUjf3W1MUEfjK}*|e!VSVzzgwAj7oTF{{g=@O18xNf)B>?@NR?S z<-hLs5kA5aEFw&3eZuWHD6_1a`DT~lZLWn5zSrDSC}`{;Zb)eSgwT;ZtIe9v2oWuZ zTI*oFlM+>@x{Mr diff --git a/fixture/15/6/5 b/fixture/15/6/5 deleted file mode 100644 index a2bef0c6c952aec635a7eec10562b43a39718e83..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 444 zcmZ`!!D+)l5S$CAhc0fRcom7o%2Kk|)Z2n4-cf=lqj1*m{LREldz1({iy zJREYctY&s+cJ$!7Tpt0|mjj*wT-d+IcYb}`>XZGI^>!<^Klpxs$DJ)})H2_H_^^y| zum8RGf;b=)_&rvK4k=@udNixCM=EH-9-eGnN;$Y-W((`^T+K98XA=AN=93`Tr zV4g!7ERDwVJa69fh0AS!35Hh(x4@J5`|(A$XJI4QIqU67vKD{*;Zyuyzp3_aSkbh^ zfwUl8Z&!qjZ%LWX_uOv^QEK4sX8nUdl3JR$Bke_tv+^VUU~Oh7bnY9Fa2pH%#+)2WW{#Yc*-A94w{QCLWZ%OtY5UyL71SJ`-mbXWPB8 YNeU@?QTbTEq+pvloHj3975yK60rw6nu>b%7 diff --git a/fixture/15/6/52 b/fixture/15/6/52 deleted file mode 100644 index cc42efc7967665c9da1887bd6740bc9211875694..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 377 zcmZ8dF>V4e5FAH7(yX;c$(2YJHHA~qMt;J*AR;N!=0HLMDnB3vPv8mg1D?PW@`yYj zvj;S>G+wV~JhSqH%dw0A!`1=M0Nz=DUq0+a>KgZ7y7C^Y8l1ApPw9*HGd?@wCSCAN7rTTg8jQpa1lW|1F;z z7ypKRn>}8UM>f`B6IRGQb^O43Y#<)778}VeL6eX>l1iUSx>Mso6FXud&g4RpD%k-$ z;_Y^sOnyHJlk%V|V-;-Yt=g$#mDNykx19;1=65BL-ETrlF=Ww*S__~}Lf+H(ma2-x n9eEOhD#dZ3>%A->jm-Hq)$ diff --git a/fixture/15/6/55 b/fixture/15/6/55 deleted file mode 100644 index c40285eb9b7bd071338801c40cdcc609f26cacd6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 339 zcmb79u?@m75Ijc?l@l9<7GWVu3imXO5DAGoP|zkpLPDu1nT3uGG6Ewo0}K%EqZAZ0 zSh|1T-~CyRRB-DNAlN8i0$|1d-qyUG2AtUsL^*)%Z@%Zwj%Z|=r_Wp9l?wSOa$e4m z6u5uMb7+V0F~t&SVH|Wo)&!Ue)?;c5B*w!!wu@Nxv#+g;ViSZ=Fi>12<(gFAn>cqE j5+siyy5a-MakTN?PN-tAe6KwV(=F(bdZNf&EvCb2p z&7_VtE=2LE&WI~DRO@2z9(S%G_Db(uu@n0g?4vBEL4BPvDGWgbau%m@S{k|1xcvsF7E-M!KlU9df^*Go!Rdunq#P5`P7c8mrz5JKbsK6~1t^M?uO$w=3Kn%X4 z&DTY5RLbZzW%Oco_(iIPnE~w1ZrR+Re7mG+sYYgSW|JcZO@?JCwM`()>|{bL(HTqk h*_3eazny2XtcO%wU^9COE+d1E_d@zXH*&T8;a?LaEPen0 diff --git a/fixture/15/6/8 b/fixture/15/6/8 deleted file mode 100644 index 435b119f3a4467202ba88881e5ad3825daf568e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 393 zcma)&v2DXJ6hx1~r>n1!hMiKd8u#=Dnjqi^P71e{4Fm{Wsdc9jJc0&Dkr6VA2Qc8= zXVkchfWs%nyLSYiT#D@sFr6K+1aRQIZQi+hqmGn)J8D1aKl>JvF+aUx7O^@;s3wF6 ze}6tO{xRv3^I_iti`^Gwgpx2KT;M~(i;l+QKOGQyR7T6FIixG@1{y(UB@N_iRcjfh zfJP^&0E@+k5ndVFT3}spjU}ou{N>^F$9TkRZ>Cx^gyGg%&`!!trb=aHgd|rO9lY@L ayVzLKl-tsv*|E#oE8^JYLkDC&0q_edlq>=O diff --git a/fixture/15/6/9 b/fixture/15/6/9 deleted file mode 100644 index 5f2c519dad9f7b1797035f88f186b106e458a219..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 429 zcmah_OKJi^6s(cgaWi8#s7s+QAj}bZlYJk-WRY!D2*@m?kKo4rTtM&$9wA538>C8y zECP~-;{B@Xy_r$fhV%jmrwW(>I5OV4cV=%yN9g@me~|MzK>Q8yHDJg1Ouyw!@2~#1 z-eVGBJ*L|TzlO7jx`hvL(=J0*fX`^yG-f@s>zxtN*jY+Vs6D_js|zf_uUMo(>g)hp z$+GbU&L>*1LsdrIKl)n0RT);`uXK(e&-n{|=(xVqgKT&NnL4H5RkpO$5k=dyx}_{s fj&aWKSw03lXkolNRrW1C>xPU)eZe>&>kr`v%w{hx diff --git a/fixture/16/.zattrs b/fixture/16/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/16/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/16/0/.zarray b/fixture/16/0/.zarray deleted file mode 100644 index b817ff3faa..0000000000 --- a/fixture/16/0/.zarray +++ /dev/null @@ -1,14 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": null, - "dtype": "O<$WLPxyhk9f6Z52j;yxj(s+;FLwKq?by=7l- zwa@$DzZ@O)AU&TKp*Q;?>?`$Po$H(Pb$4#19?bdmeVilDN0+xZr%$f`uJ-WL^~fO~ H^4Wg@eq!YC diff --git a/fixture/16/0/1 b/fixture/16/0/1 deleted file mode 100644 index c7c270d3249f047f8ec2cecd7a80232a9d2c0c51..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|N!41MN425A2!jV0CW&j3Zg6zONJ#k?IcFB>d;VY_JSCM|%e(%MR8Zo~6`hBKf z8JG8CIrF5C=4yOiZ_9aY`s_C!{@#!d8$hv(Imy`*O~YmygRiPXEZC-}4tY1A`du A4FCWD diff --git a/fixture/16/0/11 b/fixture/16/0/11 deleted file mode 100644 index 3fcb7a133e9ec45324603dbd1ae1d8ec31843892..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmaLV%?*Sg6h=`BacLi}Eno_<0(zKwX642T?1kkJ^#pl|5|SI<_X9DebYCS8+fw&$ zGIjnOYoGAcoBun`b+1>hWO;s!zSu9v7a4VV^e*h@(k1JqdmjhB>m|$SbFaC)KDs$y zpYPo%weQT2dL*2mbocYS7v<@wFXs8_XpVZ25A{0BA>BKX?u`8Uu-Cn&{SPMi2UX16 AQvd(} diff --git a/fixture/16/0/12 b/fixture/16/0/12 deleted file mode 100644 index 6ddf19a21f920a7d5c2f57d8fb6e5acbbaa18ba9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|Ny$!-J6a?T5LdhPk8Gu2Uz&kJx6%7-xi%U$RPu4p`WaV@}zUM^Vw(m3gb${ON z^t#RUXuCXLI6U-~$L00h&zHHL4X?S;^<@rEpRNy=Gr!{KxPA2;T=@H4_Q8MioJW7= z_;h_H=RSFQ&5zEf9s1Igu&aYhSx_=ZoeK37FnEt`nJ;ZC@ F=pR!M*%tr+ diff --git a/fixture/16/0/13 b/fixture/16/0/13 deleted file mode 100644 index b7d51136e1fce7c47f1736b28c9abfdeb17540f7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|M!4bkR3Q{Zvo$o)IU;UX! zWZqldL#};cu18cZ$G$kE$LZ=m=GA`n;d1wLq|A3#9}e^U`Mo94<$3de^VPoB~n^6}X4glT$8KkY0QY0-lgJSqo`2-aoSz)4ELS@|qv} z`TXcM+~+jT!^`o{+kN}q^MUn-$I+2PUA!B-{@kl4b9nbvA5IUg?(kH~pEmBYz@0o5Da A-~a#s diff --git a/fixture/16/0/15 b/fixture/16/0/15 deleted file mode 100644 index 16fa8cf32e786e3dcd4e095b977301af3bdd036b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmajb!41MN5Cp(0geQIEO#vx{3fu$r@Zv)S^x`KW*2wb35tilE?d|SevaY}LSuXGU zcAv}RdfV=6iT%{l@c7d4!98xvgLnzqr*8c`uNMA!UM>7tCl9Y4F@nGG-dfCqI%D1U zlWQKGd`JiX>E^z^cb+Gg58~-ZKEfV!`kB{<@>=aZ{+BboeDmleWnMj>+rJ~{((|9q H8T%1m)2!W= diff --git a/fixture/16/0/16 b/fixture/16/0/16 deleted file mode 100644 index 2ca23671e445bdd8fbaa66aa51e91ffa3b65f8bf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|N!41MN5Cp(0geQIEO#u`_1@3`*^2&z_=*3SW%wXL~wydmnZ+CT;5;4yGd)^!T z$??42&U4kc5PMG`VyXCk+& z$7){leDvGIp0~mf5G~; z6ndzi{WzWZP2cyRPM7oKsE;V;`u=mC`EsxJ&wQhQ|G$~ju=n}K+`jMJ$K~OpU$>(@ F!#}=W)>8lg diff --git a/fixture/16/0/19 b/fixture/16/0/19 deleted file mode 100644 index 39725a4026ce484a2eadcc914ec086e7c2f89d34..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmajZyA1*{5CqUIgpfWQQviif!Sz5rL?l!|FAh1Kc_5GYoGi`l?0mLre8=s(x99kR z^Zu~DYfBu0z9cVacRQ``(h~U;@blU8nSbxA=o8a#pZu8LIqssf|BQWcAD>y@KVA<# xKOY{Zp3Z${NDsf|@aw0ak*Y8LGLJs_`JO60_?+=jADw)tmo71LW?z-P{0C}}-(mm& diff --git a/fixture/16/0/2 b/fixture/16/0/2 deleted file mode 100644 index ac33e71ae64552f5b8fdac54375c3aca9b80d471..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|Nu?+%23`9{ELP#GMQviif!S+BsL?l!|FB@_kJRKg1)k>qW|JWOB)%(nGcwE+d zTj*B)sq@$4X}$M_|7exEJXKHbyq)KF&Yth}IpplqH$N4ZIdwd5o_D6=slNH$&%sli zJhdO^9qOH{;;FgvwD;%8xj7$?OVy{Dx0lM})#beVpU2DNd5W`-;`Q-;U;Znp{U73A D&;shE diff --git a/fixture/16/0/20 b/fixture/16/0/20 deleted file mode 100644 index 5ea78b0433aa97782da5a20d39dba00ed954dcb2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|Mu?@p83&!xJ}r!M<(dG;lNj`QU_b^LTlhjQeq%kMq+J#ByLGOC00exCN<=(G8L DQyJgx diff --git a/fixture/16/0/21 b/fixture/16/0/21 deleted file mode 100644 index 5651831c6ef067fb800f848ec3bce44cceb24112..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|M%MAiS3{I_lepPcpvy@Ts3e z9)0%v{kwnE(I) diff --git a/fixture/16/0/22 b/fixture/16/0/22 deleted file mode 100644 index 57fd78123e0f7822ef5b552450f31d96c0e61ee9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmaKo%ME}q3_~*rNA~EM0T_e{x(gXDnZ4bHpfcb)q49>4n0<9su8-GjZ+@qze2-)MYAzt7Qr`q01l E4TTHH#{d8T diff --git a/fixture/16/0/23 b/fixture/16/0/23 deleted file mode 100644 index 7ad968b57bc2a11998e7e46af4b0ef0863bfddb9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmaLV!4X7I5QO0gWRLQ27JxuRAP4a{?L`EeizD^;o-$=A<<~Pk{YI6k>87Upw)>+n z&Z{riuh#WH%dp>bt4|}}5wnijr;g8a5xFksU%vjienrlqtD(HCBc1d4kq_0;9&%{p z)2H=O-8p!#+Z!X~%Aw!quet2cIW*q)aY{t)%SF!YlgEdL-*;aBmAW}vKFUGo>eDD^ E1M^zx=l}o! diff --git a/fixture/16/0/24 b/fixture/16/0/24 deleted file mode 100644 index d69d1dc1133cc352ae5f9ca2ff24bf1f4c6b7f19..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmaLV!3~2z3yWaH~v5oUK&inMZ z&ijuidGRoQC)dY{=l|At7?Jge;kBIiIl-Evzc;VWkptyHdA08G^0RKAhTaErf7_ex zeNaE$Ty@p?^wFXHsxGgG{54-c9pcTyo13wHhuWWg?Z1#;@!o%|^6bH1YdLs6{oelp DOflp3 diff --git a/fixture/16/0/25 b/fixture/16/0/25 deleted file mode 100644 index 0c554a0f5805e00b31cad977a054830ef2307e60..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmb8r%L&6U5CqUI#K-jEa|%cyu0TDc9-ne?1?fe{n3!j=3l2mW53@TTp=n#DZCU3? zr#JL@yYJs?-bFujy*<9?edf@^ydU{TPvzR7{XqBPm!Ibm`puzaewg>Y;c_6qUML5z zdR#6&>s*8P>APOltu{P6pach37Ur_>AO;?@5z*CY1|=`y-+1fO@P_p`6~0jOZn A&j0`b diff --git a/fixture/16/0/26 b/fixture/16/0/26 deleted file mode 100644 index a98bdab422119f8c8dc9840692ebfbf4b20be4e2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmajb!41MN6a&xCZj+<}*JX^64eV9OifNLproaPfT4O`!nRzQ!YOa`SJA8&2jzm9Xap=YpCNe diff --git a/fixture/16/0/27 b/fixture/16/0/27 deleted file mode 100644 index 5568a8e142c5e40270b9788a00aa8511b3161db4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|MyA8uI3VGVlc=G>A$c3IG5A diff --git a/fixture/16/0/29 b/fixture/16/0/29 deleted file mode 100644 index 17790b041bf6582f0506097020356b6266c8e503..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|M%MHRX5CqUIgd=_AOaT-^1@3`*IB}r@dhwAc=4rKyh^&m}9z@{LCY#vGVMv>yd~2#pmdH^&@bo7l*wM@At)_ zoL+pP_Z;1?Jm;i$)Z|<~-96}X-KWdrE9ZMA%2i)0%>Sp)KK$QY{GP|{bAII!I5g1* Db~fM^ diff --git a/fixture/16/0/3 b/fixture/16/0/3 deleted file mode 100644 index 7a1dbfa815d7e1f2b8fc0d171d37400aecc6cd43..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmajau?>Sz5Cc#KNy#4643I%G!QX*-sM2JD?DESXJ`aQy1VUK$*}h}y$EhFR!}3}2 z@p&!pyz|tb>+>D>y6vGiT8muja!?-f%k$9_<@j*O-*_#F=6Cz_$zSF4m_vE<|NOWw weYrRPm!A%EKiz)wt9<&CyX%bFYtaw+?Bk>-mrFlx?k=du98c8K^y6Oo57Cy~6#xJL diff --git a/fixture/16/0/30 b/fixture/16/0/30 deleted file mode 100644 index b600a0727e6d4bd611ecd363eacc615713565458..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|M%MHRX5CqUIgd=_UOaT-^1@3`*IB}r@dhwAc(Npk1EK8%=`K+T%r`K({j|TbEcQluS=`)}Ee&2JX%X_bW+;_pbhpUQiqSst5kw1MnU0)(y yFD}oI(>L7zpShm@ey-8=Lb=3wPZixf_uzV=&2xV5OJDlwP){Nq>cPDaX8r>#tluvH diff --git a/fixture/16/0/31 b/fixture/16/0/31 deleted file mode 100644 index 3b7caf67ae3e37603970049d93e6dc4163f50813..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZwD%MHRX5CqUIgd=_AOaUo`3fu$raNw0o; z&fZ~v&ipo{j+alA$EV*r_44DP9OQ%Y_TV9&Pmg)lXZ1hzHv9bl-n`R3y?8k@J^43u CBI&sR diff --git a/fixture/16/0/32 b/fixture/16/0/32 deleted file mode 100644 index 957151150c83bd9d4d29482110a293e3b6582bb9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmajd!3~2j425A2(j$B1%m5vP3D}|Y^u(nTbQg{U(5J{jh$!Nh?dNABh}h3MzmM}T z`1(!te#=p=b+I~rhOyqIsGoYzYacydl;_JqUYzH&z18(+U4%TU^B42Uyw1^wPyMIg zI%h`cyCZ-5*k7!V4%O*Sj5Jb@}gwTBuQ-BMh0(x*gj!3A$y$~85ezF*Yd^%|~yE}gmiEbIYWqkJ4 z&N|)OdG%Yd)gQNEyYr`C#nV0aTgRbz_VD@?kE+}EFQ-rK!{PCHd~-B8>E)AmHHVv; z`+15dceOYBJe5l>eyZ;}=Bv)<)=AwwPm|{z`7U$E-1q-9e?C`bKRynXOU?cpAJ6Jz diff --git a/fixture/16/0/34 b/fixture/16/0/34 deleted file mode 100644 index c370e679c4282900b5149c2364f96d8ef95c7f5c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmajcy$!-p38Tec=q?-!i9SVfK}9GJ`~BF!w9eByzsi18 zjCySgeAMexp11pczl(m>k9@?)H`j5FI&&h-`IpZxKOWa`E;V@%MYv=Q+8~x?I~wUk&9!y1XBr@BF=Vq5lA{`|_Kj>#zEB F_%En_*&F}> diff --git a/fixture/16/0/36 b/fixture/16/0/36 deleted file mode 100644 index b5de5dd48e10f1862dfa2b7a34888d89d3cf78fc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmajb!41MN5JXWI!jV35rhpVe1@3`*a^gY->BYxbm`9XR5SFFU?)=%TQ_47(*S+6m zdR%Y)KBxJ8)^9ziR&k$LIo7S}o5MHH9LmR2JjLaFHO0yO>+|t+^u1?)xB2$OWe%5m za{@e3$-0jQh@M^B7qV@70-djbzHIJL;o(S`->$kl(bC~xJU)D#D{hWOE zL;d_XI3DuvZdUpD5u0}1Qy!n1JnSCwC1xKT59`Ng zE^A4Z0)#OKf|J^0o~6zempGSJUSl! E1FGZX{r~^~ diff --git a/fixture/16/0/40 b/fixture/16/0/40 deleted file mode 100644 index 7ebf77ee9760d8ab0879387d49c32b4de67a4d27..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZY7yAgvx41{47Lg+q-DZqueg6qNc5Rp&;y^O=!gZrEAr*Xn|cIKJM#uW3Ka zc-n`Lc%R?)?+@!|Kl51Y*Svpk@@VDd*w0JA>+@8uIh;DLU+a9%i>Pz_#ov8(n)je1 z)NvHSy9ZC*6HoC~_48bB?xD}yBS)R$^{M>KJ@=Tm2T$cuxp?#V+BcVZ@9%!UICSj6a&y3Bqe)LGe8Dmg4=<4Ql-fR+2u+Q=z-3n03j^<+y1{`8Kz~JzGK;+ zRuA-T_4S*#_cOlt=YJx9>iinE^4tDi=f3vofgDfwx+`<~>`yLdE{~f-c|84juO2z` zo|l62r_Vh1(dF_ke#j>W>9FtdpTO&0y?UU$zQiNfDjnB{XP;g=f9klN-Mzg#5Tw+OlJDE` zSvFcP>-AR8*8(r^M?IehpViTKEP0lvkM${;6ZgUS@-X`9^ykkwT@LCG=}<2E=0QIG zFUKF}luVC(cAXFV`;2|;1No7g!^h22kK1p=lI7v=oc+(=kG^A`dVXGY_QoN-cW8J0 GfA|8bzv8$6 diff --git a/fixture/16/0/43 b/fixture/16/0/43 deleted file mode 100644 index 5542cff176463e0446d7f8257b745e6822ef5004..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|Lu?@p83^I) z&ZpkquW|Rf-5=u~l@9s&==iMnmUaDd9_NSpsLMn1aK4zQuK%OR=>yecpY=}a6MK_` z^q4Q@-#oeK|9wAm`Ef{xbbT}0hYyEY$0vFZ7^^2UPbKap2e~rO`g(7B>I-LL`S84t IeAZ3;0#hBxyZ`_I diff --git a/fixture/16/0/44 b/fixture/16/0/44 deleted file mode 100644 index c63fb8ac0d40eb2396a6414995f947ab3c6ed08e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcma*hu?>Vk3`9{ELP#IP6mW%5!S+Bs7hOUH*UN?=;0dh-5(sHD(=6kEb~KZu893GjI26$@l5&nYY|) z&m7-ApE+_pde6Du!Ld*Co?M^0|E<@<=W*n`IhDIdpX%}Y_jyF_@%Yp}k9%mu3R)8N AKmY&$ diff --git a/fixture/16/0/45 b/fixture/16/0/45 deleted file mode 100644 index ae893af9513dd3bb306110447237e66b53b032f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|LyA8rH7zAJsLdhPf8Gu2Uz&kKcsx(Z%E-s1Ud_+FUk)_jrPbsDJ@Ai4^Z(ZQ~ z{@CBPbhXvI@n`&mR-ecB{paYJ=gU1eT^?pn=UbKI!;g5W`ZI^;w3kR9`>+3`SD(Ba v;`PYk`Nq9c@O<)aJ#zdrU*_EWFnjDIZ#VVX&2PtkV(z$Sn0>iVx1ahCXXWAi diff --git a/fixture/16/0/46 b/fixture/16/0/46 deleted file mode 100644 index 2f5e6ffed441f575439a1f6d3492c1bdaf3a286e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcma)&!3}^Q3`IMLNB7|D01V;;?BG0}yf}foa1_Xvu<4!s$1MHIw!4!Gt=y!rL{lrt~Ba8UL10jWdRkhky2`;h6qDxe3~lT$9KAiZ#m1%5&dLLTkTXBYY0j&nOcd+Ddu z$M<#J_g$BAk)40`^V{+Id;@)3rPtw84{TLVB0t2hIdfmUT)uUlYWnEqLP|OEv!=tZ z{`o#W`W5FstG|@BZk12pT$uf#&;KE3&ZXneIoZ$m;B$}6QJW_R;??q?K3(>64|7r{ I2g-wT{;b5}yZ`_I diff --git a/fixture/16/0/48 b/fixture/16/0/48 deleted file mode 100644 index 909553b82e75b7067291c47adf52ca533d2190b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmb8pu?@pO3BHuzh$)$QOASph;1rrgfgy`PH7c z`~JP#<9MI8i-(Sz$Mwulx-I|WXkU*x?vC7Z9TUC3`f+)3D)Qz0o?CT!E|7=k`L^JY z&z^or*PHix-fzG32swN?CxXtWj}EKe8}c82#ZkW8dRO&p$$GzE-^eH5KIC^t!U`{<@6vO?fFq7U#489yuBVXT_4@P z&$ZBVj?RCl>qmY)ck~bMjs6IGy~rnz{O0WMxd?mAIX={v-!=2;(QBVM>b3j;vZvQ? diff --git a/fixture/16/0/5 b/fixture/16/0/5 deleted file mode 100644 index f93abd1cd7d34404124421b648878ff162f85160..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmajcyA8rX5CqUIgpfWQQviif0XqeCRs5UY-{$)Dnd5YGdvLmD^YrcMW4?b6dH2u@_0jRv z^^d;qJ(>TPvqvviNUWkHFVgJiedoH{XMt^5ChDP`=9JTM;k!=ljTg H=N0h=vh~Y4 diff --git a/fixture/16/0/52 b/fixture/16/0/52 deleted file mode 100644 index 0d439c1813bd6a895e41d068e445ae3df0bc7cd9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmaLV!3~2j5ClLM!ZUsFP5~)|3fx2L$txc!NH2cI!i=oDh**f_ZufSLVcEB3-(G9K z9azF@^>M7vI@+xUx);yeyzpkUMRdvpddD7(0xws#EDg2#1BeAIZT52Ifx`h5Q9%>RN<&M}{n JF6PYd^#$#X!NLFl diff --git a/fixture/16/0/53 b/fixture/16/0/53 deleted file mode 100644 index 1c848c81b0d9b21462a0eaf4647b9c17285d1a61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|Lu@S;B5JXWGLP#HuDZqtLfqI}GA`&W~7lp_cZ#-H+qw&t&?!U9WuG@JnxAUdn z$5K9Lu3Oz>Jx5Q^Yrps4TE+3HeyV+P=Iamr-Y@n=Ju{!R`3U~pZ4Uu=Uvu*PB>wlMDi?M?-v7W%s9$t_ EU*_4`JOBUy diff --git a/fixture/16/0/55 b/fixture/16/0/55 deleted file mode 100644 index 9e7b720493e6834a740714f7d3e771c42701385e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmeIuu?c`c5Jb@}#LzyBEx&+wZEBAQ(C@FgM$(McGQ2Gd7AIpGp-c^0__~;P-Z-179 zpXJf(yY=BAf6c3U-8bj&@&6;_@nz&2J@;f+#OOKach3FaUc7xhP!k`lx|uG|Tyx$n CsO4+` diff --git a/fixture/16/0/7 b/fixture/16/0/7 deleted file mode 100644 index e36162a194af849d39358bc75cbc96f57f0104b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmaiw!3}^Q5CpFfpZ4L~0xZM|=)rn?`LP0f;VEK9a*G5)vh1<9dtmmg7>09Ad!I5* ze97zjIQNRL-c0q|IH!G7{nSD7TIhXA17qOS>t}ASFLV1GeDz<7Uj5)8^C;JRbo__c wo5A%S&fIYCU%vyr`sy6;e>5WxpO&xI;q>G0dHH8wFYlpFKD_1E)6uW<-8moN!TQ`6&@ZWRg=<}(~RpT=kk@p`x^K&jgUOttpUvu>Hxm0r_ za1_Dkoa*O#^jxDx&-=~)&wTaId%#sS!JfYrHS_J`9;CiJ_~prQuFn1c|IbJH Gs=fe+oz3k4 diff --git a/fixture/16/0/9 b/fixture/16/0/9 deleted file mode 100644 index 0ba36f9014e861c9ef8cb7e7a2c6e8c20d3a8e65..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmZ|L%MHRn3P|X) xyu#!ABKYm`<9@2zr{DL>Gp~?N1D!i?`;7blri1Rl<>kKJFPEMl^4rTX$KT>H+_nG! diff --git a/fixture/16/1/.zarray b/fixture/16/1/.zarray deleted file mode 100644 index 15aa0309ac..0000000000 --- a/fixture/16/1/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "^==Ϋ:z}ɡ9W]>듯<7Oe_o =7"+`uwn}#G7O!~b \ No newline at end of file diff --git a/fixture/16/1/1 b/fixture/16/1/1 deleted file mode 100644 index aa9e976823478659180c3f7384a1a43c8175954b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 155 zcmV;M0A&Ao0d>;B4Z<)Cg<%iEkv)2500v=#?7%!dabW^>$&sqzE2>*pk$%~J@5PZC zF~0lyeWqU-m-k~i^Q4dFYJ6UA%Xw}3>^C3&-jELEA>E#%ia6$&pd8dkefL+StKKp4 z{`+KoRof%ys_m0&{d2!=Z+!00l%ww*d`Op>^=_uy=bFah|Dwmrci!)f&pw&nf##Ku J?hAPR=w{1lSf2m@ diff --git a/fixture/16/1/10 b/fixture/16/1/10 deleted file mode 100644 index 0fe5bdce9e..0000000000 --- a/fixture/16/1/10 +++ /dev/null @@ -1 +0,0 @@ -x 0DQ1}L)$ԓS.T^|=QE.e6L6.]Svmc7-E>S'R>5Mu^o8ʟ}9e>k{'<5q4y=orΏ9Nȟ7 \ No newline at end of file diff --git a/fixture/16/1/11 b/fixture/16/1/11 deleted file mode 100644 index d5662c3206..0000000000 --- a/fixture/16/1/11 +++ /dev/null @@ -1,4 +0,0 @@ -x} FQ -qi-` -zzf -y1t_%Ro2ukԛ;uWdy~/uy.%{%s>9_)̏z$t/hz:!;׽&US \ No newline at end of file diff --git a/fixture/16/1/12 b/fixture/16/1/12 deleted file mode 100644 index b9f7cd8a97..0000000000 --- a/fixture/16/1/12 +++ /dev/null @@ -1 +0,0 @@ -xuҽ 0 BA;0 KLO;Ddt?D޶3uh<_8<ϗ eO3}\]Gt}&>yz͏Χ*_aeOr}tfoί\9}0}9=kS \ No newline at end of file diff --git a/fixture/16/1/13 b/fixture/16/1/13 deleted file mode 100644 index e632b0bed1..0000000000 --- a/fixture/16/1/13 +++ /dev/null @@ -1 +0,0 @@ -xu0 @“>BzxzX<$ܜtwukyΗqWSrq z+B9W}Ν?_Gd[C}aGT.Ǿ8>~rswV%y u齽Ԯ \ No newline at end of file diff --git a/fixture/16/1/14 b/fixture/16/1/14 deleted file mode 100644 index 5818cddc89..0000000000 --- a/fixture/16/1/14 +++ /dev/null @@ -1 +0,0 @@ -xu 0D-cRԓS.T^| 7Y i4?ӺL64}ёC]ܫ'sxUN\悔tDo ڠUG>Qyz8y< \ No newline at end of file diff --git a/fixture/16/1/15 b/fixture/16/1/15 deleted file mode 100644 index 7a453d6a7f..0000000000 --- a/fixture/16/1/15 +++ /dev/null @@ -1,2 +0,0 @@ -x 0+'}M) -C'!]Y.vzk=n˃x ٧n<y^-Y'1Z̃:ck|H澾w'G>tׇZ=3|$e^=ۿ#_Ӭݖ \ No newline at end of file diff --git a/fixture/16/1/16 b/fixture/16/1/16 deleted file mode 100644 index 1ea850689c..0000000000 --- a/fixture/16/1/16 +++ /dev/null @@ -1,2 +0,0 @@ -xu 0+'}MBzʇ -O"`Ivowu1{{lV9y_^<џ2熽zpNo^{p|j^|p}rt|YГt(C=rۛ9f}#7B2 \ No newline at end of file diff --git a/fixture/16/1/17 b/fixture/16/1/17 deleted file mode 100644 index 03851a6fc8d3dfdc602067b0f57754832182579c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 161 zcmV;S0ABxi0gceT5yCJG1z-(AnLS)*00vqU4 zx-ILo#qZo+kK^0hL0gBWXCA)RxewKUf|ZZZ-@a;2eIXzH P#L?v--@o_)>Zi&f9|Bq0 diff --git a/fixture/16/1/18 b/fixture/16/1/18 deleted file mode 100644 index b39626a2f1..0000000000 --- a/fixture/16/1/18 +++ /dev/null @@ -1 +0,0 @@ -x 0+$;T'|RAz5|e:lPώ߾zSnGp0fo-H4FWL`1kf#nkUkt!0EJM&^*}vDBve2z4mq89AdmQ*EY0oge70(Q z$L+hf=lFv2{;>{FvW4?xM5*jD2w* zpIP5OUJpG#A0DQj&V6P`55MK`>!+WQsxSUBk3RYNo+>@~obgZ}oqVX5E-`avUzNT5 I2WpSsVm540DF6Tf diff --git a/fixture/16/1/2 b/fixture/16/1/2 deleted file mode 100644 index dbb192a95e35d659ffc515a5442d067104c4c4d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 160 zcmV;R0AK%j0d>-`4FW+7L{S$)NFNtd0EJM&_CP&EBve2z8*&^x9Uh6*N~5v=*c)us z`^<58T-JMA=vMxz^Vj2Pz4wLxXqCD=RZs4`o#%JXp6~TJYc0Nsk!pB_vgsDIUkQp)u)-am&)VS<-Gf!$IIh+inEX6_3?dQ O{wt~dAL3uo0_vq6R9XfA diff --git a/fixture/16/1/20 b/fixture/16/1/20 deleted file mode 100644 index fdda01fefa72d32f97996d9220b6aaadd632f664..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 160 zcmV;R0AK%j0d>)_4Z|=D1V9erGJCl802w3`Xot*`Dvc-bE-GWhJ_sTtLBQkn&nHbs zT<7^e$9l0^-jD6P-<#j2sD;kI?632F))|g-BD~AdK|Vh7dFR(3hdG~5J+5E&kA3zK z!4LbqyvJIyo_U^w4)qz&rMk_hF8gtL_9cOi^W{8s{B%f%a^$MZ?>+ZDZGY-As)O`? Op7!79v-y5g8Q<<+!dIC9 diff --git a/fixture/16/1/21 b/fixture/16/1/21 deleted file mode 100644 index ea63f2a9beb3d3cde354aff3ecd099e01162fbb3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yZe0d>*K4FW+71JEsmBYkkDfGvayrU&Zb#DxmzWsWSLM{p!YBUy25C$qb* z&vQR4?Y!t8UJZA*({?W(-yH1vy~e|LzkFHa3v-_IWyw7J9OwOUyVvi#)N=Sb>f47; zGQRupsh>k0efInQwKsjvRaNf4x#pNpCqWG*Pw`X79dzN1_@kd7a3KA*l?KlRzI Ts=jCDctpS λw|3t݃@h|E~п \ No newline at end of file diff --git a/fixture/16/1/23 b/fixture/16/1/23 deleted file mode 100644 index aeaf1a1efc..0000000000 --- a/fixture/16/1/23 +++ /dev/null @@ -1 +0,0 @@ -x}DP dp@D 8E#2e)3=F馦0Ϋ/ׯ?˰sO#٧s._~EΡ#rhӧQ9x#ʡ篹94qJDELJw^9Z>A(gZ \ No newline at end of file diff --git a/fixture/16/1/24 b/fixture/16/1/24 deleted file mode 100644 index c363425923..0000000000 --- a/fixture/16/1/24 +++ /dev/null @@ -1,2 +0,0 @@ -x} @ @“>xS@ -z3drtʻ6'y0~'NJxᵜ97ΑAynsۛ}P?\uѡ.5_?Û}ڟ}__k9x>L1 \ No newline at end of file diff --git a/fixture/16/1/25 b/fixture/16/1/25 deleted file mode 100644 index af7024af3f..0000000000 --- a/fixture/16/1/25 +++ /dev/null @@ -1,3 +0,0 @@ -x 0-s -H!@=rqEǘg D;i[mYH7ykE?t=}üGOڡ@◟͡d~r ^(z\.=+Nsw1 /# -2ow` \ No newline at end of file diff --git a/fixture/16/1/26 b/fixture/16/1/26 deleted file mode 100644 index 6457124755e1a4f29d4d624ed4c21aa4a6c2d0f4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 156 zcmV;N0Av4n0fo@P4Z<)K1JDe@kv)2500v=#?7%#nxG({`?s+Es za^$9t^Z5_I_xGOZ&pr9(Gd~^j=_SV;=6CT!IM%I diff --git a/fixture/16/1/27 b/fixture/16/1/27 deleted file mode 100644 index 4324703a1b46555fac7c374bc30e397248bbc1b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 166 zcmV;X09pTd0d>*44Z|=D1W*o=(tD`(02w3`Xa~=eDorNHE-K>>9t0AwVBmDeTF-nkbM(WUGkWDS;kelcMpQoz z&B-|t_Tj_td{xi6^Ib0A`Cs_##gFUb!(r9+ ULpeC4Ul<{$*3E(He=L~ej7myemH+?% diff --git a/fixture/16/1/28 b/fixture/16/1/28 deleted file mode 100644 index 4f61f16a26d9341ddd8daabda350244f34351df7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 161 zcmV;S0ABxi0fo`A4Z|=D1V9erGJCkr02#y+Xot+>%8e(;E-GW#IS3?>L@@Ap^8ZQ6 z)4ojmvd(dy-u~esPI|oJ^L9VJ?UNhvIL~XYb-eoRp-v}9{nAC~eXZc(Gfd diff --git a/fixture/16/1/29 b/fixture/16/1/29 deleted file mode 100644 index 3b378b05ed..0000000000 --- a/fixture/16/1/29 +++ /dev/null @@ -1 +0,0 @@ -xu 0-#}LBz8qz(i:Xx/zn_;:Qԫ?G;nz#둇z#pš^|ݯ=rݧ+{&U_+>\s~84v6 \ No newline at end of file diff --git a/fixture/16/1/3 b/fixture/16/1/3 deleted file mode 100644 index ee4392ca9c..0000000000 --- a/fixture/16/1/3 +++ /dev/null @@ -1 +0,0 @@ -xϱ PPI A2߁yd >BXپcǩYy-ԟ7Z\rPpx-w_Cy/}7s?~|k'K?n.O^і \ No newline at end of file diff --git a/fixture/16/1/30 b/fixture/16/1/30 deleted file mode 100644 index 6badc05fd4..0000000000 --- a/fixture/16/1/30 +++ /dev/null @@ -1 +0,0 @@ -xu 0-#}LBz8qz(S@,KT}76|Y]n\mw43~s{~wn\.?}8]_"]/.Ϗ7~ByOzroa6Oǧ;!Oyg?6޼>x93=7t" \ No newline at end of file diff --git a/fixture/16/1/32 b/fixture/16/1/32 deleted file mode 100644 index 4678297ce95705d64c5a70339c901374f98c191f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 161 zcmV;S0ABxi0fo}R4TCTYg<%iUBYWh`03C!0*rD_E#HAB-7mftbr^rEwDB_px=Vv2` z*v~q@kMl41`c3tI%Tca%u{wT+vEHSqpL)-0A3a}`=gUD}oaeN?)%9mxggmPA7xT%y z&e4ZY{iokLXGZ9|BY*qYU#yP~)#-bkxTpR4JRhV(dh^j9V$42N?K{yS#a(QE_HF-P P?rZs8?|tMq$kpkDl{{Ny diff --git a/fixture/16/1/33 b/fixture/16/1/33 deleted file mode 100644 index 7a6c03db41..0000000000 --- a/fixture/16/1/33 +++ /dev/null @@ -1 +0,0 @@ -xұ @D-}S zp=H~|:i4;ne|:y[a_=[ǡx/Oy|s49w5y'w7We?>Kc \ No newline at end of file diff --git a/fixture/16/1/34 b/fixture/16/1/34 deleted file mode 100644 index e25a1b5526..0000000000 --- a/fixture/16/1/34 +++ /dev/null @@ -1,2 +0,0 @@ -xѽ P `BA ;0 KJBOBDO:}?Ry/<<.M}e -ï.߼1z!qs<{9'C:=<鳇˞]?tHO>skn/8l \ No newline at end of file diff --git a/fixture/16/1/35 b/fixture/16/1/35 deleted file mode 100644 index 407d63ba1b538040507fa6cfd33ca7145dc1082b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 162 zcmV;T0A2rh0fo`c4TCTY1z-=tv3um)0Xhg1utVqRsTU^bE*uMqK1FdsMJNyZ{n)^? z&eJ--%6?RgdTk4Q)az58xBGs-i+rBzrBXGQ٬S8׽ez\o>}o=VqX9֭7S<|5tgezg|OWrT~jP9z>U|nM2 \ No newline at end of file diff --git a/fixture/16/1/37 b/fixture/16/1/37 deleted file mode 100644 index b053686bf5..0000000000 --- a/fixture/16/1/37 +++ /dev/null @@ -1 +0,0 @@ -x}б 0$p2H&ǕL]TppG@OeӲN><0qj[E5뷽7s_G|C8CrG5{>;>:\λU._{Qo? \ No newline at end of file diff --git a/fixture/16/1/38 b/fixture/16/1/38 deleted file mode 100644 index b7e11c34a388518a9e5b8a2590e2eee57f1f22c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 162 zcmV;T0A2rh0d3LC5rRMv1kee@BYAix00S|C&4GA0c`*WW*`pNtTxM{KqN{iMGX={u zFVp{yl>C?^UHxbujSsFx4__`G|Q1LgeZ z5ppW>>4TaomoJB(9*1;%Fz1f%#V0RgZ`oJ>!{BuC@*Yypz4Xw&98i8m`r1cl4&Cdk QXilFofMBzxzɓEe (׻\ky^|zOOA:Xu rF>=Odx=r=r^k7:}:?}H}:T/xO~:'t9Tٹ \ No newline at end of file diff --git a/fixture/16/1/4 b/fixture/16/1/4 deleted file mode 100644 index 5b4dd4281a..0000000000 --- a/fixture/16/1/4 +++ /dev/null @@ -1 +0,0 @@ -xu 0PcRH*pԓS.T@/>ƀ#aY^gg-C)ju/x,|ZzZx~XnVvS<%fg.>|/uG|%ĽU~|Uz,/8~<,<: \ No newline at end of file diff --git a/fixture/16/1/40 b/fixture/16/1/40 deleted file mode 100644 index 27acb8204e..0000000000 --- a/fixture/16/1/40 +++ /dev/null @@ -1 +0,0 @@ -xeһ@ aB>)P\h`nYꂋ8޾i?xڇxg?X׼ohKT9:_|΋s}u#qOOU\o#G=OQx7y~8y{ \ No newline at end of file diff --git a/fixture/16/1/41 b/fixture/16/1/41 deleted file mode 100644 index 12617e423b3f76935d615305d14077905695b66c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 156 zcmV;N0Av4n0gckJ4TC@w1JD~JC3{daKn7ug+ktsfrO5=@bRcd Kshh9ADC&I$0#@z- diff --git a/fixture/16/1/42 b/fixture/16/1/42 deleted file mode 100644 index cf30a62450..0000000000 --- a/fixture/16/1/42 +++ /dev/null @@ -1 +0,0 @@ -x 0@gRH*pԓW> 4X>!ݽ;ԎY6Z/V/G=w,y)08]P.A>/L}v}SoĒߏcz~^u!whw \ No newline at end of file diff --git a/fixture/16/1/43 b/fixture/16/1/43 deleted file mode 100644 index 25dbc980f375d0650895410c5343c9384a2fd9a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yZe0d>%^4Z|=D1V9avGJB}b02w3`Xa~>Z%1tK7E-E8iI0$4ALGa_B|0Jza z&ePikUzL=-3|D(w11Jz@n z^-k&&dy|9om@nnuJh|xqeLr*gaY%=BeKXpJ4~JRDCwdPUt0yy0CGI5$xiZiCdT)E` T3uj{a@Vt+F)=m5ZQys{>ZKYQ? diff --git a/fixture/16/1/44 b/fixture/16/1/44 deleted file mode 100644 index 918b16ea53..0000000000 --- a/fixture/16/1/44 +++ /dev/null @@ -1,2 +0,0 @@ -xα @ DQBHpP@=]Bˆ i4_8oiCakS}{Ww28n3ok뙷k>9r=zϹO\q9GyD>{h -Z@ \ No newline at end of file diff --git a/fixture/16/1/45 b/fixture/16/1/45 deleted file mode 100644 index e92de042e6..0000000000 --- a/fixture/16/1/45 +++ /dev/null @@ -1 +0,0 @@ -xuл 0`BA;0O4L..|D>ɑO)yo]߶tռgV=7]fO[Ïx3紗HWƽR|n=r3_0{$o7o~bg}Og \ No newline at end of file diff --git a/fixture/16/1/46 b/fixture/16/1/46 deleted file mode 100644 index a6aacbe767c9e368fe8f0cb05993a13c64c89a0e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 158 zcmV;P0Ac@l0gaHs4S*mFMLUQ`_u%XR4B`ar;5?qZIDxxx6d_O2pAaR4pWpuemd5y; zD@^BF?*3v8m$=oFQ$yaIws|S{2#UTAy%}*u6vTB7xaLQ^`StmfGcUeyQ1$cysYBP0 zx9`dOnR$*bC;UKkAbl0JKYI0HGR>nS?|Gk#Jox299&B~}zv}Z>M;(4TulE`6n?2&A Mb@V}Z1K1wne;qec;s5{u diff --git a/fixture/16/1/47 b/fixture/16/1/47 deleted file mode 100644 index e9d78090a7..0000000000 --- a/fixture/16/1/47 +++ /dev/null @@ -1,2 +0,0 @@ -x] P -}RH*דS. p~B Bg܎s;>{u]r|}[SlUN"?į9s_\ujBJ9ï>>>nO\!gΥ9sQ'>]ssR'ʃr \ No newline at end of file diff --git a/fixture/16/1/48 b/fixture/16/1/48 deleted file mode 100644 index 89e635c152..0000000000 --- a/fixture/16/1/48 +++ /dev/null @@ -1 +0,0 @@ -xα @ @ 2{|2''UO]\ư}xHyIQu՞xO-h_9sqyr*[uy.Ϟ~Hכzo9|9'ΧQzWkz_'>wG_\ \ No newline at end of file diff --git a/fixture/16/1/49 b/fixture/16/1/49 deleted file mode 100644 index 15cbd1bb4d37b7a3a8f5f96f7ade8540d5d2ba01..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 156 zcmV;N0Av4n0fo-J4TCTchT$6|W%i)X01T1|yhG-pqG5vU;xZ=I1M(GMS$cK%{n+!o zpYQZ}zS><6#&BveKDKwe%VfT$+(~&H^R?Lf5B>km_Rl`+qejr>^dKGW`B5WZrd+1H zy&f}NAKkvswa{~p&VQ%tM}9qb^bhZi{s?=$$S05d=Irme2z$&qKGc`rHS_7wYo9sl Kwfq3Gr`K;Fc3B1h diff --git a/fixture/16/1/5 b/fixture/16/1/5 deleted file mode 100644 index 4d42627087a26d8da9d4cb7c76c152f0ce106f32..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 158 zcmV;P0Ac@l0fo`K4Z=VW1kf#nkUkt!0EJKiJy4G$CsaT$gdDaWryH=aP9Dw9XHRz3 z`*~gu_e(qTbUU8fSw47PpT|UB?aj|eH~)=Q{F~n2=KA%S<8*U-aJpvm^zG?mzJCvS z_s|RV(ec#vkG}6cng5rwM=x}5c{u7-xE{XDr_Vk4>4|!Cj%S`f=l0R}xL!JxqbJ{d M{eRg10x9Y7#W2}cSpWb4 diff --git a/fixture/16/1/50 b/fixture/16/1/50 deleted file mode 100644 index c3e5fdf72d2aacd33e3e4fe5eae033e78fcad323..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 161 zcmV;S0ABxi0d3E*4Z|=D1V9dw(tEh}02w3`Xa~=eDorNHE-GW#I18jL2zosE|D@Jw zS*PVQZ%@M5PjAot{ANgZ^vC=7zP9^5bvpF5k5?T%)IYGdT)%RALVEL{Tso-FS9SNi zH~ZGyZp9@2&c3 PzB!N{%H{h5YQWnp{XSEf diff --git a/fixture/16/1/51 b/fixture/16/1/51 deleted file mode 100644 index 6cbc95f96a..0000000000 --- a/fixture/16/1/51 +++ /dev/null @@ -1 +0,0 @@ -x 0-#}LB!pB#&(nfOg˘m6}G<{3#ǯ̏W:CzssO(>~g<7߃੏P[/}: \ No newline at end of file diff --git a/fixture/16/1/52 b/fixture/16/1/52 deleted file mode 100644 index 20e869ca1c..0000000000 --- a/fixture/16/1/52 +++ /dev/null @@ -1,2 +0,0 @@ -x} 0@3}N) -C+*H/~ŒXnvaٷe^kXjq:ڭ۽_Uu΍yι|)7y<x:5+:y5~g[c=GOkZ~}r/KN9rBz8qz#"&|~03G> -C}_d^>&uͳGD3ܪE-̃g; {>^j*\}O|/%都_s$*v?L_t}_< \ No newline at end of file diff --git a/fixture/16/1/55 b/fixture/16/1/55 deleted file mode 100644 index be5892bd8b..0000000000 --- a/fixture/16/1/55 +++ /dev/null @@ -1,2 +0,0 @@ -xб @D->-B -PQr f|fgZlXǝ<#+x()zɗ}P^r=y?/Sѯ9 \ No newline at end of file diff --git a/fixture/16/1/8 b/fixture/16/1/8 deleted file mode 100644 index b6b547a6852e72b4cba17726c7dd9a91893b40ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 163 zcmV;U09^lg0d>&94Z<)Cg<%iEkv(!|00v=#?9h37;=%;%k|P!9Q(^;ErB}!Qdv+7C zKBHgX&$Dgk%kkdZ>2W*Wy{**xU3tCgS6BV;-*e^Y^Qp~M<1-hL_a8m;b1pw#K9#Fq zbM*7MRC6P66v5}5>gRg&T%$(M`_2E)eD%+Jz*RNDp1&0}^X=mvvky<}{`21W<;iib R&i()Y&qw*Hz5s`v&Fz9)U>yJe diff --git a/fixture/16/1/9 b/fixture/16/1/9 deleted file mode 100644 index 4cc02933ea..0000000000 --- a/fixture/16/1/9 +++ /dev/null @@ -1,3 +0,0 @@ -xu @ P‘>L) -ą -e$S*"ƞeٞWΗ}4{FǫtoϾJFur3Jo>>N:{"~ڧ3N;p/11ܶ \ No newline at end of file diff --git a/fixture/16/2/.zarray b/fixture/16/2/.zarray deleted file mode 100644 index c4bed6fbc7..0000000000 --- a/fixture/16/2/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "F|T4*sbL0KkKS#X}F_y7Rjey-FM00aa8004j>0stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?zctH1rc{et3o;iKL{+smi3QlY(y6De$4>1o`g2v9)b679uFx zEp0_nv1-Ivy?~se2Y%%@tYSIiVtxu80%NovMcacsTwMZsP=PuWJ|guEP=AZLBAh5l II8Rf20L3CYvj6}9 diff --git a/fixture/16/2/1 b/fixture/16/2/1 deleted file mode 100644 index 78bee24d7e23769c91eac87f6627a4cb403e05d3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151 zcmV;I0BHY0T4*sbL0KkKS-Ev{HUIz$ey-FM00aa8004j>0stTYkN^M#5P$*z06;JR zx`8R_Pz+50O)@btF$5(@BPJT1{LMu*Qqbg7@B=FFa8S{+!y36%U;xg2kyxrmH3AYU z)2(c3*xMFuXV?Mc1UsLR`!tkjJbeW@MB(Ysv7)1p4?>U!Ag9n?WTm70UC9*TLPF)$ F%-9UpIhX(d diff --git a/fixture/16/2/10 b/fixture/16/2/10 deleted file mode 100644 index 6314d7ae245b0141659c6aeb77373c1d7e187c04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 154 zcmV;L0A>F|T4*sbL0KkKSv_{cW&i*Pey-FM5CjAO004j>0stTYkN^M#5P$*z06;JR zxj~IeXlNQ3rc6wOBLtdMO$H{L)jdOo3L7M@k`On97!ttIi?T{^1mp;Gb=o$T%W8{i z3N2eHKm-ynfGH%xNv<1h?DtAboXRMqoq$XrT~L~;xDezBgfIerBr%wxpr`S7BvXY6 I1J`UeV9}W~`~Uy| diff --git a/fixture/16/2/11 b/fixture/16/2/11 deleted file mode 100644 index eedaca87e78a64997162aebd1455e6dd01a65f68..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151 zcmV;I0BHY0T4*sbL0KkKS=1?wSpWbDey-FM5CjAO004j>0stTYkN^M#5P$*z06;JR zqe4w7CYoRq6v>H@WMGPFfB<@$P0$fjHUiQ{Yn2AmK(bGF6~-DM%IydIR8}?FK%Jxgwk>NNN0stTYkN^M#5P$*z06;JR zx1UmHNiQ5S5s}{kA;eq()ni_GI z7H1@ia+U?KY2YwE8HRcEdq`1K%=`jq2Yt8aWXRewG$H5`5YP$qQ&Q@3U*hgarwS4i H+hHM~@OwN` diff --git a/fixture/16/2/13 b/fixture/16/2/13 deleted file mode 100644 index cbdc9af6066201a8b3c945c18fdb4dc4de7bb5ab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 148 zcmV;F0Biq3T4*sbL0KkKSv$s@mjD3Uey-FM00aa8004j>0stTYkN^M#5P$*z06;JR zx`88A000!zBNG!4LX?_mk)tNT?h1)$2x}Ebgc%UcD%QcVrvMz1PW|vzXg1n|OIp^% z#>X0xzKZao8ynX6ITl&XX_r6;U^?mIvaFoDSC9!_zz&gnl7=dOi@744C`cXSPD_9} Cr8r># diff --git a/fixture/16/2/14 b/fixture/16/2/14 deleted file mode 100644 index 1c45b5478880343b86b57042bcb91c1b2c7d0597..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151 zcmV;I0BHY0T4*sbL0KkKSv*7pg#Z9l{;t#%5EKLe004j>0stTYkN^M#5P$*z06;JR zx{%DOnKZyA3Sl(R8e#}ilS4yBjW!2*NFp%-JqSuTX*$3}Q@yrvEVHTCWrGEvjVmH7 z1yD>gCo>Bu;6(5!_uFmbLTxa#&%&oD;Piu7sGduTcBo7e* FAwZ7jGWq}j diff --git a/fixture/16/2/15 b/fixture/16/2/15 deleted file mode 100644 index 41af0965d6b0bac7680922d3c8bed75baebdf527..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 149 zcmV;G0BZk2T4*sbL0KkKSwyi4aR30_ey-FM1Ox;C004j>0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1SLug35`yVh@pt2CaNr|J-{SGJXHl#co6a`eDr%9Y}v+Z7&L1d zs@piM`V~5WJr3j-UFVE(@DSujgNZ((-tEP;ZdE^oB~C#PfV|BVKjQ94rwS4XmLX0c D#PBtK diff --git a/fixture/16/2/16 b/fixture/16/2/16 deleted file mode 100644 index 91a1658c5e681a6c033c1089dbf1e822f303e793..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 155 zcmV;M0A&9{T4*sbL0KkKSs;kBod5s}ey-FM5CjAO004j>0stTYkN^M#5P$*z06;JR zxuHy$nFdA)PfCD#fC5b_n@|}v-l^wxV5=5HMOCr^J_Hy6t+qTRRcCF$H~^gTAoN-# zQj1cyimft1V#w5s2e1mqXNRu~sR7P5=-l03V?YLSV`KUC9*T JLO_Bd&UCsDHzEK4 diff --git a/fixture/16/2/17 b/fixture/16/2/17 deleted file mode 100644 index 04be71efb329339c887d24042b43cd620d0f2efe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151 zcmV;I0BHY0T4*sbL0KkKSv68`UH|~+{;t#%5EKLe004j>0stTYkN^M#5P$*z06;JR zx&loqnGG2pjS68j&>CU|Q&i9Z14jrXN`#RJxD~pp0?Q;#RHJ7E55yskj%}rFv8h_n zXsxQztt(KtArFKh=r&`rm!N@|Lr)V#{({t*gg78DhC&7jInAoMu&RmJyOJrwgn_D) Fdho7zH*f#| diff --git a/fixture/16/2/18 b/fixture/16/2/18 deleted file mode 100644 index efce7d362467bfb83660b69746ea86b2662412cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 148 zcmV;F0Biq3T4*sbL0KkKS=czU?*IS+{;t#%6a)kS004j>0stTYkN^M#5P$*z06;JR zxC&u3&>CVC!fBv1#1N$*Xvi{Z8_85jAxVfMvZSDCq)n}!BR5yrq4Vz;#ze)KMP^|c z8IfeMRAgU84@iOIav}|CiKBsDMKsX5U_=mdp|Y}vBEOM%=MyBO_`8xR!i0vw#h-Y7 CmN~5e diff --git a/fixture/16/2/19 b/fixture/16/2/19 deleted file mode 100644 index 362345592f2f84572c72f9eba65af51cae775c08..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151 zcmV;I0BHY0T4*sbL0KkKS+`MZe*gdsey-FM1QY}S004j>0stTYkN^M#5P$*z06;JR zxj`wlP#PKlQwgSk(-1alY+_WRYcI zmSaR#nixV6R7u%I8&OnSTM=Rk@G5eNfwiP)K}KzyqH3ukaw={i#4j=YUC9*TLPFg| Ft^7@uH^Be^ diff --git a/fixture/16/2/2 b/fixture/16/2/2 deleted file mode 100644 index 6202edd2da10a5113e27352320c352e6e9431c7f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 149 zcmV;G0BZk2T4*sbL0KkKS-3bIYybcS{;t#z1Ox;C004j>0stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?zctXu&+Eb@D_?QzlR!0LTXk!Bbs}Mw59_^a^|V&8?|17A;Lt zjBPm87Bo#;;3?n`@A#A#B9BU++0stTYkN^M#5P$*z06;JR zx`88A000!zBNG!4LR3(COpU4ZhvhO?HmIV5fKl+F0vvHw35i}I4nT*!h4H0Z7O_>W z3ej0LYGN^dfSwRuy1Fs8Eb2Xooq}=CCfiPoe`l~yNRTIDC$Ub+r9Z{okxmpOJm0i! EfOyn69{>OV diff --git a/fixture/16/2/21 b/fixture/16/2/21 deleted file mode 100644 index 395ef6b86650b79747eb0dbc5b265756db55a5fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151 zcmV;I0BHY0T4*sbL0KkKS%5W?q5uHU{;t#%5EKLe004j>0stTYkN^M#5P$*z06;JR zpb}|O8a+&mY81k0pftoOgwsH2iNS&?6=I?a6p$w1M1WI7QQFstPoM{UtI*Qbtpu#C zDQ&SyWvO`pFa!;Qd@V47=%YI!GbApey3*E_$OjOJ21rgoogjAA5oTZF?ntK!5)cNm FR6yeJHhlm9 diff --git a/fixture/16/2/22 b/fixture/16/2/22 deleted file mode 100644 index 00154e5efc845bcea3d2cf9d0e548df5ac84360c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmV;H0BQe1T4*sbL0KkKS?dS&(EtGP{;t#%1Ox;C004j>0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1Su2=k?EB3p9!EM0;#YReTYDZV`lR+GsUT+UjaWwc|Nz-3}!1L z3MVMAEXpb1SJ<~fSo95-Ou-Z?x^Wr0VRC^oT7C#Nl*B@k}1N3hgd(Z Eh+nZgYXATM diff --git a/fixture/16/2/23 b/fixture/16/2/23 deleted file mode 100644 index 60f09735919f097e994fbcbcfe69627fc529650c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmV;H0BQe1T4*sbL0KkKS$k<70stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?zbOjj5ArczJ>(CQAY*lt>rHZ?5<*FzvXGML$JPN4WW#XwI!^ z#x)vsV5=5tv76Yb%B$z?r}Fc-;Wc~(Iv0*1XuEFxDtZ)^It4xe^$Gqi0stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?zjU7$(YoK4_?Eq9LOw4}@6&!S%+|c?Zy`%kYneHB}nc+ZwP` z*t8nNZ0lS2RPd;s|6cT7^V_poYjOk7ha3aL6dfvi22|(=kOQd-5Ak;-Q-ui(_l3t0 D=dU@r diff --git a/fixture/16/2/25 b/fixture/16/2/25 deleted file mode 100644 index 576fc4a2e24611627b21e4a780bfadd7eedfaf16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 147 zcmV;E0Brw4T4*sbL0KkKSzQ~6+yDS(ey-FI5EKLe004j>0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1SL`jsP!3?`Z=nCiY7^lD!`-o2?8Dm-XoPiK~B9%RqJi7YBtSV zF^dH3YVrzj3jLf+RmL<=LY<=c@XxVmP1vU@LY<0v1n%gn?#(OyF64@Ep&+_96S(nm BIko@* diff --git a/fixture/16/2/26 b/fixture/16/2/26 deleted file mode 100644 index 04ae4702c252a1e845c17d8595375e279f9cc5d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151 zcmV;I0BHY0T4*sbL0KkKSy}I44FCWNey-FI5CjAO004j>0stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?zbuntDuXcjjoIrXq@HjG#ZDvH`=yl|^y^~yb?30{B?Y6r-ns_HBJUC9*TLP2M} Fgf#2`I$r<) diff --git a/fixture/16/2/27 b/fixture/16/2/27 deleted file mode 100644 index 9574f9e2bf57fe85a091704572a618e7fe3fc136..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmV;H0BQe1T4*sbL0KkKSwi$>-~a&k{;t#%1Ox;C004j>0stTYkN^M#5P$*z06;JR zx`CNhGGG8HlM^7w!3t3g83s+bk*cao0TXZ-7f%}ESA50cWa3*Vld2uIfe>UxBGDEK zgs?Emqv)Zy2zc7_R#_Nctf)B@I#A6mZ_%WhSzRc6P?S0pIRWYlC<^{A0stTYkN^M#5P$*z06;JR zxC$C+fC7e^U;si@Pf&SGo>S|lqMAU$Das@(z9yEwc49gK{S`fXBe;zQ+i0qdTUDmj zM%rzx+IGE)ohrY4l?`(nR_p=rLykCh&YccCRP+NXcmVbzu{;t#%5CjAO004j>0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1e#RTG{R$3UKthw;e`><0Gr7K0JwvZGqn zDyogOt!1kU^hph&0Owt{psK{Ir+@}X8U`FCv?^LxGDDCCNMr!)l6Zp0KgHaUP81|P HVRm4^8Y(wj diff --git a/fixture/16/2/3 b/fixture/16/2/3 deleted file mode 100644 index e53fa9f2d71e86aad4161f3c5edbc08b601c111d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 149 zcmV;G0BZk2T4*sbL0KkKSt2k6z5oFIey-FM1Ox;C004j>0stTYkN^M#5P$*z06;JR zyFpBtnFdA-%Bhn80EH@KX{LiF+*L&j5MrWwRFQGkiP8gx$;7r#I|B94wOD9N*xLufb}&s2l00#Q-uivBLG|A Dm)bJ! diff --git a/fixture/16/2/30 b/fixture/16/2/30 deleted file mode 100644 index 477039df75c62a98ee4cbeeea8661de0a4b975c2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 152 zcmV;J0B8R~T4*sbL0KkKSr<|fpa1|1{;t#%5CjAO004j>0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1SL{>jSZ=dPW~XFrkY%+pQ17@UJ1T3W5OpO2frkqjO`Vw*(_TT zv9>6x+P1NeAP3;6cke%V#sRm;{X8Eey-FM02BlO004j>0stTYkN^M#5P$*z06;JR zqe2p@WXYNW6v>H@WMGh_0}y(hK;oo{VxuCbbxBu#NnI@@QNX8w1J}=X+O%yrt7_TC zBN#2B#%$Q$fDXuk$6w|mq9bj%d0stTYkN^M#5P$*z06;JR zqXZ>X$)}(b3S`8{GB7GdOoIuN^-laW5fwt=6zmBK0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1SL{>k5PnePG1rf6tfu>JAlf)j?Wd&H4yfz!Vi#NN46XKwkI1^ zCA94%;}y25KS6tdhdjmiCLpYO$FT>c4aCt$70|v=VR{gKs`UjW{x0N-aG@ayI5?Jn CpEd9R diff --git a/fixture/16/2/34 b/fixture/16/2/34 deleted file mode 100644 index 099253e6b38f07eecff1b958ebfb9dc07ee8c961..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151 zcmV;I0BHY0T4*sbL0KkKSz!5n^8f$<{;t#%5CjAO004j>0stTYkN^M#5P$*z06;JR zxRM61KJx%ygYT0U9TFC_Tg@K1#vbd5|DJ9=cGv)wp27Tl6 z3yUa#lqkgGB&$m@u#W&|voM?Q=Q3Qx%YPthBc{q#+UT4>q|yh_Ygr$~+>uTcBp5zl Fe89*CJRAT3 diff --git a/fixture/16/2/35 b/fixture/16/2/35 deleted file mode 100644 index 0a8e23b1d76e179de1785b2b95105c1c9a12458c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 154 zcmV;L0A>F|T4*sbL0KkKS=j7^KL7w_ey-FM5CjAO004j>0stTYkN^M#5P$*z06;JR zx`CNhGGG8HlM^7w!3k8JrX~@!J04~vs$#0DCNL=YMg%t#2!bT4$-TC5EtAfn+C@qN z2x5rUi(-pJV+Q>cKNSxBuKQZOW8kUE4^D?1W!p8%hownFkyH5}mpd&KQvZv&BAh5l IY<5B)fFkKQO#lD@ diff --git a/fixture/16/2/36 b/fixture/16/2/36 deleted file mode 100644 index 74916d0a8f65d2e8476e4245638d484698d29a2b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmV;H0BQe1T4*sbL0KkKS?&$6=l}o;ey-FM00aa8004j>0stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?zjSX^3p6fjyNaQ7~jw^oc6txyB2|H0{{zQEy Epg9veWdHyG diff --git a/fixture/16/2/37 b/fixture/16/2/37 deleted file mode 100644 index f272098694bf519c1e0990eb57a9f356c2833ae7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 152 zcmV;J0B8R~T4*sbL0KkKS?K62dH?|Oey-FM5EKLe004j>0stTYkN^M#5P$*z06;JR zxI`vQfuWN^m`yYWn1T|jWYMP7V^i$nnUX0es&D}K0Dug2=^Jczo)!HNe4gg@t*ZoR zpxYB;7TC2@pCS+71K3XYa^A&x1nJQ{S~T^l_`%8#3dlT)`2h7a2vTIv;_gVN3KAV1 G1&=^_bv<|h diff --git a/fixture/16/2/38 b/fixture/16/2/38 deleted file mode 100644 index 2d60be641e95e4a21b459ad1aa57a7146a07a4a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmV;H0BQe1T4*sbL0KkKS0stTYkN^M#5P$*z06;JR zx0stTYkN^M#5P$*z06;JR zxqNpk(5Jz+gK+6K+%XC?S)#4E3kUBjn;0stTYkN^M#5P$*z06;JR zpb(W!A&IA^p-nO|F)<2hk%@_k_Iju&qLv__vIzMAln&<=#Z<+OdIbFtejM@>wXqRv zMwqKxYADz?3}b%?K0qG}`93j^fG0wk;F;&YW`neDkbQzkJpi8&dXkd=7ji{7P>|4Y F5%-QIH^2Y@ diff --git a/fixture/16/2/40 b/fixture/16/2/40 deleted file mode 100644 index 6133ac59fe87271c01aa65eda2cc44de7bab33dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmV;H0BQe1T4*sbL0KkKS#tH`ivR!zey-FM00aa8004j>0stTYkN^M#5P$*z06;JR zxDA8Yqf2tT@bVihB@+HVA%1*(iS(az!{$kaG3o Ei<|Q}x&QzG diff --git a/fixture/16/2/41 b/fixture/16/2/41 deleted file mode 100644 index 903718d03a83ded9060b949fc135027e18574dcf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 152 zcmV;J0B8R~T4*sbL0KkKS%npEeE0stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?zkoMvR(xr-RhAP}38Y6Ud~CzIe%2&MmX)Q}j^i<>tg>);6@~ zV`8mr7~0y{+ADtw9z+Aj-2OpEs*31T>_d-^Efvnq2WlSGB@V?-g?kEv{9VZu;X*0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1SM4U2Aff*l;^@xR0SoF6UYRk@4!*V$%;djPr(QG(b1|Z*^0$& zPKvbVHpJ1RI`km<0DOHPakiqKg-+4jM;5h4gGI604=R!mVyEy2L>=tc_`8xR!i0zU GL3ZfxY(H55 diff --git a/fixture/16/2/43 b/fixture/16/2/43 deleted file mode 100644 index 2ed6e190bd9ff98ebb2d4fc13892d1a0f3beb497..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 152 zcmV;J0B8R~T4*sbL0KkKSwh7QZ2$lO{;t#%6a)kS004j>0stTYkN^M#5P$*z06;JR zyKM|=rh&CI07;d@N(Y?{Cn{;hmPi$MsH&?$fngE}gVBlJ>70H^Ryz8M)B{9VZu;X*+R G6gjj6)G)XJ diff --git a/fixture/16/2/44 b/fixture/16/2/44 deleted file mode 100644 index ec2106d0226855c14b4a2b5787808783cdf1afce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 148 zcmV;F0Biq3T4*sbL0KkKS&w?97ytnZey-F600aa8004j>0stTYkN^M#5P$*z06;JR zxC$C+fC7e^U;si@CzN_7n^Tp1!9@{RMLGn9d_D+yvCh#(FCspKPoDp%jYMsl&8uyg z*x1vpwj&jScd--DEBDlOIx6#nd<1(C@x+{3tBWUSp2TEM!5+mA@pmLsg$W4vs!@QM CJU5*H diff --git a/fixture/16/2/45 b/fixture/16/2/45 deleted file mode 100644 index 13f5165696d65433285f4121a98b68eb06104145..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 154 zcmV;L0A>F|T4*sbL0KkKSqF^%A^-ph{;t#%00aa8004j>0stTYkN^M#5P$*z06;JR zpb(W3G|7`{6w@OU6A-4E7?_x!W95oUA}OjefcO-U3~_e0#U}EB`XYON1W`Z5YZ@Yr zwk@>dSgT^TJ&2wJ1G(If%8yaRQLlmrN}Th_+IT9Y+oVrO5+_0j#ZIDtpfB-vBvXY6 I0C>;hAY%|ZZ~y=R diff --git a/fixture/16/2/46 b/fixture/16/2/46 deleted file mode 100644 index 294149ddb0da00e528ebac95ce086f63f5d77327..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 152 zcmV;J0B8R~T4*sbL0KkKStX}9v;Y9yey-FM00aa8004j>0stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?zc7XvvdmcMml&Arw_kP#*{q0lDppN?$4;q7Sh7{Z(UZ=-P~9 z5n|D3#AMS~u?NC~-TR-&eOJLlpiU0O&bHS#2No?CNIsM#9)%Bxok>7n;_gVN3K9gg G=N5pV+&7m1 diff --git a/fixture/16/2/47 b/fixture/16/2/47 deleted file mode 100644 index a7fcf97f65c52288b4e456a49d87ea6bc0e2040b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 148 zcmV;F0Biq3T4*sbL0KkKS^Buo@c;n$ey-FM5CjAO004j>0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1SL@nn@NpM&k{5=m_Zi<|*c){}*yaI8c!KxX^#2! diff --git a/fixture/16/2/48 b/fixture/16/2/48 deleted file mode 100644 index c619b5f09abe04f2c311fec4d32ff69fb8ce7997..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 149 zcmV;G0BZk2T4*sbL0KkKS@m#<@&EvGey-FM1Ox;C004j>0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1SM4U2Ae`{PtHmiqJkx+sK|@mLR3C@r-IqXD)0stTYkN^M#5P$*z06;JR zxC&u3&>CVC!fBv1#1NGz$kP#;pMC-gX^0{UFaY;Jm3r`fPg|tv){g?6Jc1vmzfoc+ z*0w8AtXmdmMB>p`@FCqn?c2ZLaXe#=H?UKXPCjJ#v1vIF@+k;=1wANpJjqM`F64@E Jp&&5P$s9)8JWBuo diff --git a/fixture/16/2/5 b/fixture/16/2/5 deleted file mode 100644 index 727f9cc40e6a076492c0aeeb358bb8450a416a0d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 147 zcmV;E0Brw4T4*sbL0KkKStvXzuK)lA{;t#n00aa8004j>0stTYkN^M#5P$*z06;JR zp@f-Kka|s{P@$%n01}a=nE=!8$nAtk#>XIJ+i-9wpH^!pkjL-_`P1%&NKl%UY9>}| zQY|FQIb?bO-ax_nZdJgc&%iO*2ZO?ogsS(&i8}*y7(m^Cgn);|+>uTcBnl4-tH9?< BI~V`} diff --git a/fixture/16/2/50 b/fixture/16/2/50 deleted file mode 100644 index 0507ae29f7f40d99a6c858efb4a337d925ba8675..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 152 zcmV;J0B8R~T4*sbL0KkKS)gET0stTYkN^M#5P$*z06;JR zx`88AOqc))WW>laFhW$I^&3cSPD7-qXlg2~pgt9l3~>j)IM4Dl9pMfMA?IDIDB6oy zp;0ZiqiD5lYRSJvPLWTZN}}*!^dfeljtD6BBd9gUYMxOgPQ*`2_YDC@_`8xR!i0qb G18*n{&^W>X diff --git a/fixture/16/2/51 b/fixture/16/2/51 deleted file mode 100644 index b2acb1a218ddd1e4984f02b171665417fea272f5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 148 zcmV;F0Biq3T4*sbL0KkKS<6L}egFUkey-F65CjAO004j>0stTYkN^M#5P$*z06;JR zxC&{JiHV3)OpHuSK?+k)G|FSENsHc zV93G>Z=jCikCV;D=35ywatYF=YqV!-ima^5HreP$;E<0%oQib_e;0B^I8czwMU#FQ C4LF+s diff --git a/fixture/16/2/52 b/fixture/16/2/52 deleted file mode 100644 index e85c96c21f0b86b92d8db44f6d252783a84a80c5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 154 zcmV;L0A>F|T4*sbL0KkKS@^A%XaE3a{;t#%1Ox;C004j>0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1e#K64Kg!R)Zs-1V#uUXY?2e0stTYkN^M#5P$*z06;JR zqXd~%000!zBNG!4M4cJ%N9H?m{|pd4QjlEh6QP$z(p5`avo)JYZ*{9VZu;X*+d&}Ni& C3N$nT diff --git a/fixture/16/2/54 b/fixture/16/2/54 deleted file mode 100644 index cb05a855bcc41d9d230a347ecea333ade0906a19..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmV;H0BQe1T4*sbL0KkKS%ju>M*slS{;t#%6a)kS004j>0stTYkN^M#5P$*z06;JR zx`C&aKn8|^OeUHGOhF1#XlbA_Z2FogN-3(q0rIH;80*;0bQrBATBS~;s`lmBkOT-c zgb@=kDS}xqpu3_6XKIa$K4H9cCxH`?O*D?U7)8jw(II&eKOpfbMb3X0az!{$kc6gj EN4kbKzyJUM diff --git a/fixture/16/2/55 b/fixture/16/2/55 deleted file mode 100644 index 01c5a5f4b8bf42fd1978b1dba4df1bc8e0d8a646..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 131 zcmV-}0DS*KT4*sbL0KkKS$f^BDF6U9f3DOJ00aa8004j>0ssO4AOMg600a<#0ssI= zAOMjR$%&9;V2G(SF+EK@r<140stTYkN^M#5P$*z06;JR zx`CNhGGG8HlM^7w!3kA8Lrft3Q?hxgkfNFlik<-_Uw*nMG@?0v-b*-^+2!lFt!hHS z07O{bV$fqawW7ZTPXMoU?FVwPw0{7c5Z={w7}_dcDtQEzI0Al(as!j2{9VZu;X*(U G-?9u4BRM4i diff --git a/fixture/16/2/7 b/fixture/16/2/7 deleted file mode 100644 index ee8b2acd7848a0f8f124f9ff08320838ce184869..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 152 zcmV;J0B8R~T4*sbL0KkKSyT-D6951O{;t#%5CjAO004j>0stTYkN^M#5P$*z06;JR zxuHy$nFdA)PfD5^XaFHfO#zXKzQYU2;V4{oPWn;dW@*;GLn#BZHYZB=bbV(DT6NC<8gQ!|Ri@744C`c*> Geu;nqHZ(i{ diff --git a/fixture/16/2/8 b/fixture/16/2/8 deleted file mode 100644 index 0279fc31b76bf6ef81783149715e697efac45282..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmV;H0BQe1T4*sbL0KkKS^n&=0stTYkN^M#5P$*z06;JR zxC&&%$TBddOiY6#1SL{v$%NTX-bg4Zp{lBCvZ(k*L^|~PqDuBX8gb^-%BHJMDYgI2Gkb9SZ7Bg40stTYkN^M#5P$*z06;JR zxC&u3&>CVC!fBv1#1NH9>S?AUO{v)P6+ulhdme;Fez-77Gz-Hu6X!}DcP1eyV^pzF2A#IclIZxZ6!HL z4PUQr7C#yJ^ug?kjM?u3?>1+C>oTfddHTxVrPI%H9_X9v9=;+ay!ZAszTERiuUT<2 mmt<%^%k+=_b#B@$yDjHF&+uCmxK)kG{k~?(2bur>wV44@zf%VQ diff --git a/fixture/16/3/1 b/fixture/16/3/1 deleted file mode 100644 index e3d6a791fe1b1a0898b9f19160eab027b0130e3a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 193 zcmZQ#oWP;L%)kJ`2N@U`M1XiD5NmAJ|C^w|^p}O9go%M6^r}6B%<)1X+G8JjH5AC5 zq~Zxghkn#EFhH619xDzA$}&!1(lnN45H#T7JaLRuZH5E$qes==lWceYusu+4-N|O6 z`{(vB2fgnC)z5bNPnSEjmz8@%`Goir3~k$_W-bPD|eT@iriLcb>~=rZQ8=5 lq7OIqrSb#SE`_F5@Y-GY{^^&WSOo diff --git a/fixture/16/3/10 b/fixture/16/3/10 deleted file mode 100644 index b2bb7e1e0b3f9dcf90566bdf9ba219ebe1d54bdd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmZQ#oWP;L%)kJ`yBHW4M1Xh^5NmAJ|C^w|bd-g`go%OSN4?DPLLk~>KS{+Chz{9@ zUbP2`f)E%7uUOc=uAX56lcsl`pov1G`HqH$1YTEugK*P5Ipxw9%Zx6cY|i;1HmOl& z#`W8s7n4^<_ZVN_DY9>K`DM-G68ZCt+8?{Bou|B<{!PZ_ww*A0zj6P{O0)NiZH;Gh cK9Ab4aMq6Z7Z>oS8$M0B&=9qL(?9+{0FbdtjsO4v diff --git a/fixture/16/3/11 b/fixture/16/3/11 deleted file mode 100644 index 6cb4e7ef72ed08b9d203aa09d8d2c6815b58c56c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 195 zcmZQ#oWP;L%)kJ`hZz_cM1XiT5NmAJ|C^w|#LLPM!o7ZG`BVKo@qVTF0 zH#)x+&-py}@(r28ci)|Uw)xjAiHUnmd^hhd$u_J1$hX77Y)%?;#dF`UZq}>5u*occ o>pk~m2V257=Bxm<_11kC%r4ZO7%R`Oad=RUSpc5@{A>`OmHtlQW3+_t=Ec=*{>Gyj)+ mm2cS!zs-+P3Vtgsx54D76#L;bRmIEwZTcTY-T(N%+z0@bMoKyW diff --git a/fixture/16/3/13 b/fixture/16/3/13 deleted file mode 100644 index 6c02f54b3f59a578a70a95571c30fe0d671310f9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 191 zcmZQ#oWP;L%)kJ``xzJ*M1XiX5NmAJ|C^w|^p=G|g()=jsyzb`1Ch+}LLk~>KS{+C zhz|XzhrqxU3!7hyPhgsPSX$74hwI0KL!8pyObc)1N=XUM4LMUCtB_&0y0)r+XUU2^ zMTZyU%3Lm)q!#t>OYEkKEQOvN@y~LJ(fT&dS@#m(MwM-5dv7|Or$W|dd%;5P+;wkD hU-=dZd_S?vmU;TyNasG^HMe8;9$&>We|?w%D*((SPDTI# diff --git a/fixture/16/3/14 b/fixture/16/3/14 deleted file mode 100644 index 54e8ee945ab71ff72a4fc381ed44402bb2019e86..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 193 zcmZQ#oWP;L%)kJ`2N@U`M1XiD5NmAJ|C^w|^p}O9gh}RjArS4c553ATNyQV04%z>x zXJCL}5D5W+D>iIYU~FJgI?W}>qVa%NV}`-Ph)E2^?8jc(bBOk)Z#}pn)z0E?q`AlG zM$6UPbUtshFsLg3u}phk;qDUSPewaVu$fMletF@Ig5RoVCEHVvoSl59wLLK{_I#}V gokeLm*Jj+l=@wt3r}FfMbb;DU#{RW`|1KS{+Chz{8^ z{HO;a5CkfQf`Anp+b`KOOki5LSz6G5!J~m!&tXQvMwXtfRXa|;=H%R-U=ZsaJ^S*% zy4y9MFWeMaR+ip#E6h4~f4lVY3*53P<%K7e@ANb7=-+JJWA|nSdsXz!%$thei-b0P hXwYk4qIND}=FA75x2eaxIkNX}YQ>rV|Myv$0sy(0Pr(2H diff --git a/fixture/16/3/16 b/fixture/16/3/16 deleted file mode 100644 index 6ac34afe9c3c04fa905890d6851e750bdc8dad21..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 182 zcmZQ#oWP;L%)kJ`+ZY%aM1Xi65NmAJ|C^w|w3LOxgo%M6^r}4&F-%hN1foMf>VMSB z94`c-J)vN5)!t{tg^4T+m<)DVF$kJyG&hR`-6&u$SU6j4$7|c|LYoTWliqH9JgIDP zKmXkoy;puIsafyydn~6JoVjmewtR=%9Mi(`2S=Tvqxr33@*fM!aI;NcW4>qKSG6TG aByYSj=(*1x;%_Bv$h*Y5=E1}5#;gFxRzb8=CqByGj`Y>mR)$zaC!b4nc(HyQy!&cM;dRL*?5wpeC@-t>kKRBt$7l%YvBo$8}I%NN&Ugmfq z5bgO<50n6dtM=|IHW&&OFiDn#F$l8gFzRe@Vro-)!SML*1ozHn%{!$&@1>>p@49us z_|MwuW(O*l$K`M1d?&I0(^9Pm*P?G9{uw2;9z=E0q%h0bmI fW{vGqkK4SzNekyZ`}As3Msvx5(8AjD|3gy%#&Ar@ diff --git a/fixture/16/3/2 b/fixture/16/3/2 deleted file mode 100644 index db4c6c5abd4b0465d25a783e9a017512edd4b0d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 190 zcmZQ#oWP;L%)kJ``xqD)M1Xi15NmAJ|C^w|^pu6cgo%OSN4?DPLLk~>KS{+Chz>C@ zgo43U`ycfn>7W${4oh(?VA3#pEx^DiY!TuRQ2;dP(i!XGn_JV$4xe=A>Oa6x_b~6#O~Uoc4E5dc@kygUe>{E57bIvCvoguzRlDXLHk|5ubZ* h>ZRBA$oj}0pRqNqKx diff --git a/fixture/16/3/20 b/fixture/16/3/20 deleted file mode 100644 index 8fa3d25f725761031560c2c042bcf61f989061f2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 194 zcmZQ#oWP;L%)kJ`hZqkNQa}o7V->BgDY zqd(t#q@A2`-HbtaJ@3hF#%dOMbsg?2bG`HSmQ8+Xu=dj(0Wq1PXD6c4dm{sa&$X{j kIdb~Ko(=iuuU-Ez;pFX#gL3-snpM*C{=|9w;tys603Pv87XSbN diff --git a/fixture/16/3/21 b/fixture/16/3/21 deleted file mode 100644 index a8ea8a004293fe0949af82891d514aa6c491ce65..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 188 zcmZQ#oWP;L%)kJ`dl(oPM1Xh+5NmAJ|C^w|bd`nSLulw#dxlV%vUjY?ufNSJXquB*pm(HDPu23y5#t%DXKy67pPiHTH$dW;@+sN8Z(Fx_t8}~P mPhm5UQAtzj-NhYbSbk3@==R}BA-%>l{y!}%DxjX<-=uYAQ diff --git a/fixture/16/3/22 b/fixture/16/3/22 deleted file mode 100644 index 62871466ac2e8bfd1f14528ab47cdf8d802b75a2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 178 zcmZQ#oWP;L%)kJ`n-~}vM1Xh}5NmAJ|C^w|)XKu}V3LX_5FN5-_)*UgdR69lArS4c z2XcS{UMmg=@-j?dlGtr5Xrj=-9Z|7i9xLZ!Uu~Uj^`h)0>xvu?`^++%F_-xiPs&^2 z{^K40zI_Y6vvor3^RWB7L$6Qd3)4GsK)H|G|JZeZ*YAIeegFXO$VlM; diff --git a/fixture/16/3/23 b/fixture/16/3/23 deleted file mode 100644 index cab36b4834d2b4acc16967e54245d890145a5ab0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmZQ#oWP;L%)kJ`yBHW4M1Xh^5NmAJ|C^w|bd-fbglUqBClDR7X9&G2bG#6U_SpZZ z2daUBAN3(C9%Ql?G#RF)37SY8Xmw7?aA)|Orp0{bsM66pv)xkuul;PX@z$r0Y;uRK zg5MmANSbuRl>JW4Mww@Ogpa7jW*R?A`0y*C>|yJUj`Q|HvSNlempwihvp{fLJnzKG e0&&^i*XI7?i%{);cJf`K|C_jbT7f$+{s913)=Lop diff --git a/fixture/16/3/24 b/fixture/16/3/24 deleted file mode 100644 index 46a958ba62c95c87c87b2d23ecfbad460ebc75b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 180 zcmZQ#oWP;L%)kJ`TNoG^M1Xh>5NmAJ|C^w|G?j(Hg=vzCClDR7553CpqaG+HbG#6U z_SpZZ2XlcUKk7YKTu9|!z@%v`ENCL5JeN!H!8~T;4PTYTw4X0*bosm^wfXgv-ec}h zPJO>oS-qpDcJ1;)%NcUnQd{;#-za#oWbI;!$0_dpQn}81(oS93RbN_lx+lz5I6Bge Y=f=BJGVGQslPB*_-uf`XlJkx{009?A5&!@I diff --git a/fixture/16/3/25 b/fixture/16/3/25 deleted file mode 100644 index 3ac5fe508ee2c94bcedaaec0f1a3d3c2d3eda114..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 183 zcmZQ#oWP;L%)kJ`+Zh-bM1Xic5NmAJ|C^w|w3db8hs^OpAlhTk@S~n#l8Pq~9kTyX z9|{In85rDFTws=BNMO>OYR)KBVD!LER7$O)QcLH$_2~)fzbfwJHy`yL9IE!lt;MZ1V diff --git a/fixture/16/3/28 b/fixture/16/3/28 deleted file mode 100644 index 3327523ef5f4d2e8c3e74536bce7e7d1f9c4392e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 185 zcmZQ#oWP;L%)kJ`I~f=lM1Xi95NmAJ|C^w|w3mf}g-PakArS4cXZTUiFiFJ|hz>C@ zgo43Upb&%SiVMt>7!sH?rj`qus4U{6pq z#Ji_+Jx^5}ZE&7mx+CcEo07O$2d6rJkzH`^J6$}&hHQq`2J&)zy0rr3XI~rTvekYw`1^`N7PGtZ9 diff --git a/fixture/16/3/30 b/fixture/16/3/30 deleted file mode 100644 index 8f1026224df9db9964a13d423e0d1602975dd22c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 188 zcmZQ#oWP;L%)kJ`dl(oPM1Xh+5NmAJ|C^w|bd`lcgo%OSM?DZtQt^W9{V&fKx7ngdE#YZ#GcPY>= jy=}0g_srxKCnI)T+I4)bb-dW*-)x5;*RS4P_dW#x!p=_o diff --git a/fixture/16/3/31 b/fixture/16/3/31 deleted file mode 100644 index 309b86eb7fc5f1e4b0f3074d2ea2512a15e976c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZQ#oWP;L%)kJ`8yFZEM1Xh(5NmAJ|C^w|RLa6|AvE-=J;NjwParyEFLS&Qi1sir z{HXs?@3Z2-VGo7^Ca1DAK?W8D2HpUVMkgWWZ28Id1AcUH(SPEQd(@VBo&lWm(z-fr_J Z)^d_YjyKk*N$mOYwe45t_n-fnB>{>)N9X_m diff --git a/fixture/16/3/32 b/fixture/16/3/32 deleted file mode 100644 index f5020b3e7389b387ad13d625775842b7ce36606e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 197 zcmZQ#oWP;L%)kJ`M;RCxM1XiL5NmAJ|C^w|B+JThgo%OSM}6p3dmxfIUI;{c?12K4 zR6K#`(2shcVju_wgRAy_D>g8D3Qb^I;3X}{AmEX}tEUo@AoPVHdDi5wQgPQDm}QTo zOWhWIc0~0Xw^90&3zv8N=DM7EeRq-ed@H+OXJ%a5kXn==WB=yWS2mflM^RSyKAdZ2 n%dNV3FGpQ!){NsfV!ZQ8rdi)wG~<*>pYk1t<94tAE3g6p$6ZZu diff --git a/fixture/16/3/33 b/fixture/16/3/33 deleted file mode 100644 index 36f329a1787dba7c8ff69b0f51bceb55ce9408a8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 179 zcmZQ#oWP;L%)kJ`n;94wM1XiU5NmAJ|C^w|)XT!~LgsiO5bd#N_)#Bv)qawSClDQC z0P=p+Gcb6rIB?j5ses8K+?+wsM8%m`YmUOkz6kakJC(|}OxtFD?BnJl=1muabv`|@ zv_5!s)@#Q1CdZ_A-LOc$Z2JGMa9go`$Mv<35BYmr>)yF)EhPJV;oUCo_O@MX)BZd< cpR*>tbH=eP7LohUH5qHM*C?mHUcZ?g00gc|aR2}S diff --git a/fixture/16/3/34 b/fixture/16/3/34 deleted file mode 100644 index fbcce27ba358c70441d633c8709a474c7f655db6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 174 zcmZQ#oWP;L%)kJ`>lhdqM1Xi25NmAJ|C^w|l*+>Jfq~&iJ%h~gLLk~>&%h8023IGk zcmmNOd*2lon57sdFey4q3o0D>|@IRF3v diff --git a/fixture/16/3/35 b/fixture/16/3/35 deleted file mode 100644 index 71430b3193752fc99421e9826ffd181a4412231c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 189 zcmZQ#oWP;L%)kJ`dl?uQM1XiH5NmAJ|C^w|beDx;3zN+8LLk~>4-^K1AN8SNaMga2 ziYE{q0tv#DxUYD?DyzGoX`#_<2EiRHRvwA0;?uG(Fe`+~+8=v+=S+sf)45f%bC2%c zJE41ewnMb}{BZ2DEfb%YXtNb<`onhW6>r+^?ENy+i@!9OO_i0+ ZJs?}!9QU_-YG=-yH6d)@zeTU$2LQn6O1c05 diff --git a/fixture/16/3/36 b/fixture/16/3/36 deleted file mode 100644 index 4b72b18a94883fa261b60f7988719184409cceb6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 192 zcmZQ#oWP;L%)kJ`2N)O_M1Xh&5NmAJ|C^w|^p%C7g^7Vd=6E3x?XjPv;t52D>_e~q zsQ*#VzyKjb!QiTW@QQ_p|1vLNT5yU_kVU6a%s?;Zf;~eq@4oGU_pfy(YWMw~JSX?i zyWq`<<(DmFlTJNbzvYgZyiUZ-zBN^k12b&jyv<$j{aWYq4xu**eP5!k9^Z2GT+D=i&R=BW8`kU>gTUpo4|JZ+j9mWU%J*ZRH diff --git a/fixture/16/3/37 b/fixture/16/3/37 deleted file mode 100644 index f27efb097687606157fe206c3d830e26eb364af3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 184 zcmZQ#oWP;L%)kJ`I~W)kM1Xh!5NmAJ|C^w|w3UV7#UvF^AUb5v@S{HTs=dtdLLl1n zqn-iCXJGJJap15AQ$f?hL$?JP7(5p=3p)fbvZ-xk+c5RXl1I8yyO`w?j?MoP$$V;_ znDc=&tH$+vu0GUlx4LF`<@4Io+|L4LM;gWF^geShPIKOw!pi&N?(=AlytFAbm-B8$ hOz#t5yRX1s^TjrSu_%7u@t=A&#de7vf7b7o0{~b)N+)QqT$y zE8A17Vy%-T#WtjcDDU{&c(*0*o{z2cvPm7_v$?YV*7Mc n<~@0kbnLRAdQrv7Cr4Kcw`shd;HxUAbGNXqh4Ibj|HY;Nw46-_ diff --git a/fixture/16/3/4 b/fixture/16/3/4 deleted file mode 100644 index d9c8b699cf16475a90d9b10b71d5bf6835e9d0d2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 185 zcmZQ#oWP;L%)kJ`I~f=lM1Xi95NmAJ|C^w|w3mh92Lr>8dIp)}g+R2&ev*nO5FN4) zz51gbC=3MtD;6&P!o7h>(pp%Mfr0ly!ck71j_$8azqb^)@K(iCtS}BfR?TABJ>#Ko zPJy<1!_;~`mEiA3*EaRfziFm4_e!bhuIIixKR#RV>{?~}&9!2ml-%NW?CV^o@%oBw h)s?cDXYa)8%(ZZOW??DWyo71j(f{9n@BJUl3jjk=O|1X` diff --git a/fixture/16/3/40 b/fixture/16/3/40 deleted file mode 100644 index e117525d10221f3921579563518e7a69fe1d3da7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 197 zcmZQ#oWP;L%)kJ`M;RCxM1XiL5NmAJ|C^w|B+JUMgo%OSNBtxfParyE50nZ8gR4L4 zWsVmD(H@`}jODxHLbDh{K$By@Ee1gb2JRP4B7O@N-c`;x`}WeoWjvV<>mpy>$vd5O zCUceT$!U`k+9tBCK3bBjcK+0MZzkW#H&b4!{QlCK-)>ae>bvH`1B2jR_UhtGE5nnk jU&r2CGVNl-?^#B6x0}>#WmZb^=3l7u{aHBw|2uX72V_w9 diff --git a/fixture/16/3/41 b/fixture/16/3/41 deleted file mode 100644 index 179008af0925c60cde3b8f986a82894ac78b18b0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 187 zcmZQ#oWP;L%)kJ`yBQc5M1XiP5NmAJ|C^w|be4tT!z2|?AUb5v@S`4xWR4dC(H{HI zt3WOg1g$tAD9c#DWRM}tAZVh|C~A~=u&FQZ2ETjBC3%UwXAN}^FXbpnY}nzyPUo~R z{~db?bG?|0d(XUCXDw#*P3@L7i%A#vW$%cib9%XSL^fB=mXWw)bH!M8yZLP9#1cW{ g2XE${+L5F7JIb@7#aS(Qx{tuUU-ze8{=M}J0A*}P_W%F@ diff --git a/fixture/16/3/42 b/fixture/16/3/42 deleted file mode 100644 index 9133371e6ae10d5c215837856db97ba6499d3a8b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmZQ#oWP;L%)kJ`yBHW4M1Xh^5NmAJ|C^w|bd-hR1p~v6`p~QP43ktmf#{IE%<)1X z+Vi6x1OrzbIPAfY&=ja+%_zvgz*TUtNc`GbX5s(2E2dsu^7K|rxsdY>zkTl(YDYdY zdzb0pX}SEeOa|-ZhllU=e6s&ylP9xkQ}>4EgVKirL>}L`k=lQ5(Z%0vzH?M<%9?F* iEG)9b{FY3L+VhLi$j?h_%}l;iE%Hy^{d@esJ{17+l22j) diff --git a/fixture/16/3/43 b/fixture/16/3/43 deleted file mode 100644 index ef0a5aeb9e4e23a95ef4e6c3e270704bc66b6c28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 194 zcmZQ#oWP;L%)kJ`hZqKS{+C zhz{9@UIobjXw(lUP#f9n_7FU244OTd&70EnIVN zpZSWJlOHYQyxEu1`X_bYpO#oXj_bB7Hwvp|8tm&bmU_{k^kL8K+QqCY@;du}zcn@b o(eyBm=R$_>){1~8>7IkZJ+rGa3}Vf-J~+>>s=W5)-+yjV0Bj6QJpcdz diff --git a/fixture/16/3/44 b/fixture/16/3/44 deleted file mode 100644 index 8357d1d85e7fd1ddb719348340d1c8d5633eaea2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 185 zcmZQ#oWP;L%)kJ`I~f=lM1Xi95NmAJ|C^w|w3mfJglUqBClDR7mpNVtM0@NRe$7i^c(7J--Bpm^JLeKiJrA9h2pag*Bz9>DzrQ6{vwYU;Qxmt9^Yre_TwmSmFPVJ(N}b5F?uacv gBR^_b-~0dMRn24FU3GWa&T!Tre*LjX?DqkF02zKwwEzGB diff --git a/fixture/16/3/45 b/fixture/16/3/45 deleted file mode 100644 index bd4d5ab4ffbd94b649b68d6f932452d60bde1595..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 187 zcmZQ#oWP;L%)kJ`yBQc5M1XiP5NmAJ|C^w|be4s|go%Mc=6E3x?XjPv;t52D?0?k% zs1F5$t3b63Kk9u}TyS<|Drj2hBqnIUqS447z;Tf6E@SdD*^-y1=H(o9dQ&?^^V#!^ zIk|tyE*LFpGCT|E4q^X-|%J2$NJw<3cJT)Jn3@r!nfagIF4E`-uJlD gJ5pzhltsY-o9vT~6$v@DIn!dACkNDB;x}dm024|{O#lD@ diff --git a/fixture/16/3/46 b/fixture/16/3/46 deleted file mode 100644 index 28fe2a2b55f110108d596df53dd8fed73bef5bd2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 179 zcmZQ#oWP;L%)kJ`n;94wM1XiU5NmAJ|C^w|)XT!~BQ*4?z0C1KAld_z0D>R&Kk6r` zcmmNO`ychLD=swiF*Gn4wwN&pB?u@8U6A&>z>>CN$DX&6n`Msedv&Q$?(wxh%O~y^ z-D8%swEVW)vnKcKoFMfHas@SM5i7pWy_dkRwo9Pwuv)Oxy8F>@E4M`#Om;WoQeI~! ab8wMFMS1b*Z%wa5js&bWXj{j8fFA&2{YKOP diff --git a/fixture/16/3/47 b/fixture/16/3/47 deleted file mode 100644 index b154ac9db9781a57f63ec70fbb8abfd80330f04f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 197 zcmZQ#oWP;L%)kJ`M;RCxM1XiL5NmAJ|C^w|B+JV1glUqBClDR7XZTSsbG#6U_AoGn zg27e$AN4=#8GsTD3{WY1zZD0Ztr;dTE!-?E$fD7}tGDEU<0OWMs}0iC)-Fs+R6cU= zsqu!FoMlOm_|}~aiT&>I2z8ExL8=PBiDf6Eby>mVTDvRI}*QzbEa-On(34w*vr_sZr|y diff --git a/fixture/16/3/48 b/fixture/16/3/48 deleted file mode 100644 index d01f8f60826673474eb58f89d5905c6c17fc670b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 180 zcmZQ#oWP;L%)kJ`TNoG^M1Xh>5NmAJ|C^w|G?j(nfz0tjAlhR;NyQV04%vrZwP*NI z4^#srJyvWy{FgbQDezn#gHXl?iA9a#E{@zt4NixqrpXy~SgS&RQREP`f!-^2of<-Oo(5_8(a~+wx}VYuy<|e^9+ljhc$7FucNmXxy_Xry&roMTb3zu>xr eyx-L`ugp2yE|oESTN`$Ke?|bUOGpy{ diff --git a/fixture/16/3/5 b/fixture/16/3/5 deleted file mode 100644 index 54564bd5b9a6ae27c08376924283b3dcaad87bfe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 179 zcmZQ#oWP;L%)kJ`n;94wM1XiU5NmAJ|C^w|)XTyk!o1B(o!u-2A@W^3cLqQeutpXaI>w;fNfu&qC^ar^B$ z+1cg>Z=SZ^S;r@3+SM7?x1Q&BGXIq*y&S2V-#wapeRIw<$}hRNbLEqQi9B1S{zoom WyPbMnc+0I-vqjyC{y+Ww{t5ssxpe#_uk9zkN2M)h@!7zbIbF-|V0gH-Dvq;p211c{R8BVNtHKVvV^4=Af zSw*5rD=pctarXY>TWM)rcGowF|9R_!2;aLVX1>CEJ>Ey=bf~Xy3R|w&cX#2Ni*KGw n`z@+0k@CDRaQ3g{w{BiV_dw;$6Q$ffdmsF$bbnd6pGgt`Yh6v4 diff --git a/fixture/16/3/52 b/fixture/16/3/52 deleted file mode 100644 index e93e4e7c64ed570a2edf2c2c737655b1b47544e3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 185 zcmZQ#oWP;L%)kJ`I~f=lM1Xi95NmAJ|C^w|w3me;gh}RjArS4cXZTScdKE-6Oj7X# zqC-FyjA`$);=nHg07D86sfI`7^8R~8rAADg;Q z_~kkKj2+5%p43QIJYsl!@zU;Xv#uy5U6Yvi`rofMx9orA(x%P2$Fth5u`Em7W?LO? ZCcfLPWvb<;tS{x?j(DZ|J~@BP1^`YNO;rE@ diff --git a/fixture/16/3/53 b/fixture/16/3/53 deleted file mode 100644 index ca0e6eb9f39b460a7c88bae86f11278b390d7668..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 183 zcmZQ#oWP;L%)kJ`+Zh-bM1Xic5NmAJ|C^w|w3dazg()=js{JGtParyE&+wyO=6E3x z?fFs90OY{LeO6pZ=H*Oia$GJa$iO1uz^kKDu*QCY35;sUeKf~JK|{DKS&ZUR9&&2OK*krhz4Q zDgp9;)Eh9w?`$%SlZfGHlS{Woowl!E;FpWq<#R%K-pe`$^gW diff --git a/fixture/16/3/7 b/fixture/16/3/7 deleted file mode 100644 index e0cf65ec734d26d1734a4ef3ee0b103fe61b3d16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 182 zcmZQ#oWP;L%)kJ`+ZY%aM1Xi65NmAJ|C^w|w3LP60|Ud4dYR*eK(xmm$PB&8z%WV0 z6NnD|sApjCTCu@!63YZ83F~`;3_P9+&7x5oF7W>T=(H_o&`fYWdS%$ZMjilX3`uGL diff --git a/fixture/16/3/8 b/fixture/16/3/8 deleted file mode 100644 index 587454f580522ce9a6898872006fc1f9961f5b5d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 184 zcmZQ#oWP;L%)kJ`I~W)kM1Xh!5NmAJ|C^w|w3UTHgo%OSM}6p3`$;OEKy=7n=6E3x z?XhPF{ZS8+`cdz@V&UR1{0&VjcWE&QnkX=u?qFczk}(lkk?EfFKxWO>YdmGyH6}d8 zt;Ujt8)TMV`WeDr%)6#m%WCSH#W6<3kIi33e|T)#+q3BAijNZ(n#t>1xQi__>}LD) f_2L@FJDW>V-|m>fV{*fxN;ql%zxTG`{~M(My2D4d diff --git a/fixture/16/3/9 b/fixture/16/3/9 deleted file mode 100644 index fabe81cbcb59cff6ed40bc78545a8fba73bcc75c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 193 zcmZQ#oWP;L%)kJ`2N@U`M1XiD5NmAJ|C^w|^p}M}g()=jsy#!f%<)1X+G9US#S@4Q z+5f2jQ4eE#tk}5t3wHpM>dYublCmW4ETT z-zl;Ealt9e^!+L$q4Hfmrx@q1t@$XqMmbNz-8V#Thg|QzJnc7UW>?Aw<|V47bROc2 ky8MpaH69?MRe)AERuo#QDd5PduY20CR^(ssI20 diff --git a/fixture/16/4/.zarray b/fixture/16/4/.zarray deleted file mode 100644 index 3c0a641923..0000000000 --- a/fixture/16/4/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 1 - }, - "dtype": "4fhf7Zs0)3!fx}{cz$d8IVq@#O!;V%}|u zd5&+lCEnkeVU9RnrX7x#h=)jl=tGGP)X<4S_z=Q@?jaBbGqacxj}T&nSUf-Th?$?6 z8L^m|XT;3RoZY?Q9pZVGc4(NFiHLt@nP#>H697jj#dUxnWV|W|olFh3P3!n5K!ILO2AP zh^9xTY%h#F3B9hK(Et697_eoS6*RwnE(Jusruk8MxxZQF&Qv Z9v}e?0w;hwfT#yUz=e>dJ42clxC3e3a&`a! diff --git a/fixture/16/4/10 b/fixture/16/4/10 deleted file mode 100644 index f30e6fbb4906df1b22c67cd35a9e7753d95e7e7a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 281 zcmV+!0p|V!0g(tG0{{RZ0{{RS0RR9L000010RR9fwJ-f(AOgJy0LIxe5YQ3uLfiyj z^&6J9NDY={dMW@a05AYQ&-{1rUsQZW7wL;Gge)Q=ix8DA#3E7=T}YSqeno`HeIeRk zEX03>d{qUi?gP;NU;zFL>VG=l>4^R5GdLn5jx)tRGa?qJ4|Z(t7jY2p&$I_O%q;D4 zoaM00k>fNwvvZ8Q%pdpN7@ue3LBSXFsy^rd)Kyhgbpg=o09;iq&_VT2L{DL&1JOhj zC`?2|G=)G!^noS{;fW~xXZe?TNhMcfEp~z^d|r(04o6FfB4WIf`qV0Z(sTn!Y`Gjl|NKodgxca5?)>;974EqXg|oU zEFWfdRb9KvZf%>N_8FWR5&JmvV{vTppZ4h&zc`PNIM2+*r~ScO3||hB;U*?S$c_S47sSKhpaLXV>jbT*AnIrHy)NY`=$`0ja dsRgT^0RUqVBW&>-K? z?;=YNWnZi9db4VkRm;9*w&6G;-s1Q;PJia-nLhsWbnb&+obh?Y=?qQ>4@2Zgarrp$ zrT7~oM}Cg;PYfYW3~`x5jC17OH`2Dv?AErot@>48SM}|$t91{600Eka{s9qziD&}! zL<9&tpurOWL^L2kG!e*=m-pR0#(jTzVq^+&iWFjG-ltdu4**OJbmb7j09FmM%{*wl ch?AG4!U6#Tr@)bg24}cX4BXfY1B#(I0e%m8ga7~l diff --git a/fixture/16/4/13 b/fixture/16/4/13 deleted file mode 100644 index 9b07d227e57a0e560a0b0cb90cb532357e811ba8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 277 zcmV+w0qXt&0g(tG0{{RZ0{{RO0RR9L0002}0000ewJ-f(AOf8S0G8J-5YPdDxZvZ_ zVlUO?NAN`z@g)E%04o45FL%-ZR@z@kulU84_Abkd=wh+B@-G&ZMZY3i5m_u1y+Ya> zhr^|CC>_db$S+)EoYAiKeH}Lv>BnRWnob^VC!G%yrHD)YQyO zUDwRaJb7BCB|h*o5e+fVVK|O6$7lf;08%qCLdHOi;nmPJ2eLU7$FnPWS;_*(z_CDn ba0{3L><1Zx%ph6Nejp2k0pYzf&9hNLvYI)@#;!>wmRhR@dUJ2z3Q#(`ueS^tO64tB6>hznkE{6 zp9oCgA5268BBF_ih!*1x#g7}Kz&r8yzQYF|3VetWABQ)J5e@({Gk!HfQ$+I|nH!)p c1MbR9%F9x4FPP9!dGBQq8Y=I-4C0X~Jq)aSy8r+H diff --git a/fixture/16/4/15 b/fixture/16/4/15 deleted file mode 100644 index 4cf3b530e1b22c80e74b36957373dc24c3f656ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 280 zcmV+z0q6b#0g(tG0{{RZ0{{RR0RR9L000000RR9fwJ-f(AOf`q0Opx85YPdDx}oDy z;&0WYN6Tvp2E!#kRM~U%Q)ZcE#Q{>zh@Ud;9jw6biC zmzU!)ay;{L9OjM76A!;J?(k2%an%>7_CZ}=1$8fQ)dlr+RlllW5EC5^L{kV9rVs)h zh=>9OnkY<%KtzFvi0BVQG~$VeWBg&_iQ~(>{4#T3j`0#P$6*fuR!9KY^_jt_aqY}u eH!_YHj+7UwuFeIpT%Eyz&Ka3@3~U|LfDHgJtF^V-25N)f1_K*_2luz_ z_HC&&h0@`taG*o@K$D2b7!_G$EPYglJjM{Iw3W&-Dvw|O9}jtqA2*jmh&w#*^APdy z@&sOr&&$jgDQ+P$#hB;ijYvckLWw2{QxBy;hY*PR^ixwmb4^t}Gcz?`Q&m4T^Gww| zbxqAP&oxhyesNZUX=S diff --git a/fixture/16/4/17 b/fixture/16/4/17 deleted file mode 100644 index a338c0774f972372519e2074fbb24dae743a5fa9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 273 zcmV+s0q*_+0g(tG0{{RZ0{{RK0RR9L0002_0000ewJ-f(AOd{{045kP5YPdDx}o#Y zVlP#u7tlo&@hSi;04@Nc>Ys&JJmLXW4@53d%YQ9P$pQp`<*EUA@FMcyL4B2607%sj z7Fh-|2t;NVnQdho8PP`c^k(+-WJE@rZIewiGczN^H9{;(EJn;MW`14>@$(~Q0y&28 zz;T{=$n%jQ!w4BLARcmzM11@Z!ihPKbBvJZXZ|@5!x-my2#3Tl5)n~t+qQSx@9ur~ zdTp=mwcl_1^=`km_x;^}ZQFkL_Fj7t5fR4-69ADgg63+5QeKuqupn>(zz^7h_5vP3 XFR%)@3*ZNAp}o(*0W^9b^bOnr&Jl0; diff --git a/fixture/16/4/18 b/fixture/16/4/18 deleted file mode 100644 index edabb5a4632fbe7bb5a98115a89db1ef01481b6e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 278 zcmV+x0qOn%0g(tG0{{RZ0{{RP0RR9L0002~0000ewJ-f(AOfWa0A`mi5YPdDxS{h= z;xE;tN6=Lj@g)E!04o4}0RGxm1K0ZPX4md^H?u*%z-j~VpayES3wptWw;$Ypu+`cR zw|eXA>93kfW5`k&S46JdZ<40vwDtGjbA20tHOQiv~5V^n;czC10MDZf? zGE*d;r^v%Y%&elRRF{Y@-RVwWL|0Mis;^R&N&z!Zc%DOeo|!Q76T*ZLCd}b5VPNaj+l4jjo!QKk*teqEg~hgtgpJh$fB!C>7pfCQj1<|t*Yy_Uh=A10U2-; zLt@CV|0W(5!y@9iVR1vgh!`?AGe6Kanbky8RMYlm+Y?Q;8I?^|WJP3GGtcwS%nSwq z%>09y2Mic6U}i8gGXrMk=aA$1FJd?|Lxyv47vFcoxVW2n;vxnT06`c8Cecj|nrZ-C jKvcdRCo_{5sw@y4gayb0UL#;Ouam diff --git a/fixture/16/4/2 b/fixture/16/4/2 deleted file mode 100644 index 26626c652fc21a3833ee055f3dd889bc14594c8e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 285 zcmV+&0pk7w0g(tG0{{RZ0{{RW0RR9L000050RR9fwJ-f(AOhV70EXEz5YPdDx}f9H z;xEtQ|t2z+=P diff --git a/fixture/16/4/20 b/fixture/16/4/20 deleted file mode 100644 index 7af6926fea318cbe9bd9c0ef727584fdeef8271e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 285 zcmV+&0pk7w0g(tG0{{RZ0{{RW0RR9L000050RR9fwJ-f(AOhV70Jd5*5YQ3u-n;`o zbek+J5E_iJpjv2!ILDixI>aga{$v7-A6?V<3hRP#hj2V8F*10uTUFAh@a= jzz$1>r}BN|gNVE=4X`di2h9YQfQ?`w*a%A$Pb^3Qj+J@s diff --git a/fixture/16/4/21 b/fixture/16/4/21 deleted file mode 100644 index 19c64de690f4b6c22aaf721dede28165d56af7f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 277 zcmV+w0qXt&0g(tG0{{RZ0{{RO0RR9L0002}0000ewJ-f(AOf8S0A?315YPdDxZ(5B zVsF)?7tmD|@g@K%04)IXl+qUbeeo5b9RMK27rYP;#A5JQ+(p6S_Qjwr5R31F|1W6o zHz5R~Z+M~)G#%(p4HF&O2AV=B|IB=+w3kw5zU61W%yXIhv^;ZZTLOpm z`vx$u(XLgetW;V#=wjXzQpP6mnFx%b$2tN^hDYOS9zsUM8k}O{JWO0(kB5M{& zlC$Qss;WrNr`h-;q0& diff --git a/fixture/16/4/22 b/fixture/16/4/22 deleted file mode 100644 index 222d1144c6514a70565cd3b53b7cf7cd5a002fe3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 279 zcmV+y0qFh$0g(tG0{{RZ0{{RQ0RR9L000300000ewJ-f(AOfui0G3!V5YPdDy5Qr{ z;xE{Czt8 z?~io&=_$~iE(Pl0a1Vh(ASwl5z`$mMH){vBU3(k+_6F>3*MM~}xC3AS1YeWC?z$_5 zTrPFx>Qd!$AE|O=a+zGYBSlDkm#b3czAGOYfdYkz4o}n`Xu9+PB*{rq@F|{?f`XHr zVnI@H&M7DaNpdj-Ns5Kwf}}{1t0rGn%8|OdR22YGjRMrbKF6@sR1ic21O@W4lr8uN dd%@xWJYX7#Q9uvkg22IgU>e%UGdySvJOST^b{_x$ diff --git a/fixture/16/4/23 b/fixture/16/4/23 deleted file mode 100644 index ab3593f944dbc5c2b8f684750254a44040f26ae3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 281 zcmV+!0p|V!0g(tG0{{RZ0{{RS0RR9L000010RR9fwJ-f(AOgJy0LGaz5YQ3uLfix& zx=j{#yc)yGJt+Vx04)Gmoar;SxB68gy-NDhR4L?1#6sQd)dZNQmfxZm_EPwz)yZ{3L76SZY z0KfwvgaAN*0T`L*-47c%#+_MyXZeu-aAttk>-Cy}b{>zqJE!|GsV3TGSAz zmnZ~k4}m6XY3W#o3=tX9%A+D8Ln=$hkY157q%E4 zg~&V)aUsSt@x1R4Z)%tV1tJR6(xF5|fl3E8GtW~~*UU^+O+8a}&CD~;RddZVGn0qn z6DKeczdJGFAzli+yTcp<763UJ2i#%cd~su-WYTk#n8|<)n4Odtsw5B@Y!3_-Tmu0D U1i;N;dtj*G8U)B0BqKu>q9;*l8~^|S diff --git a/fixture/16/4/25 b/fixture/16/4/25 deleted file mode 100644 index 00320b1dbde5480d8e5698c485549554f52e7175..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 272 zcmV+r0q_0-0g(tG0{{RZ0{{RJ0RR9L0002^0000ewJ-f(AOdv<02Y=k5YPdDxS{h= zVsF)?N6=Lj@gx8y04f0g822CY4>Rj#W_SB*1!le0*7^npy&VACelxIjy8zrD*bf42 zsZZe$0!=?rm^y_J=no>&hlq%XbcpmyTj|Sq^!71i`9;RkQ}awsRsD6%%v1Hu z_1qn%xbyGs#=OikF#{L?xeS&XwL$=HjGDQW=O|5{g~!X%fCa1uIj{uw1(pGA0f+(l WAQ0@^Gwc%T09+U_mtD0QIsxkCp>+@d diff --git a/fixture/16/4/26 b/fixture/16/4/26 deleted file mode 100644 index 32f298c44a7da343d2c6164596c243785f6b74da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 283 zcmV+$0p$Jy0g(tG0{{RZ0{{RU0RR9L000030RR9fwJ-f(AOg(?09F?;5YPdDxZ(5B zVlUO?7w|0rGO+8RQ&<#%wp$$_5xZG0urId0(TTn`S0pJ1v%B6$S2jzk<1p!9JFnbWP zk-Ix`+Z`D!QJ;wDhPHtMBxkW^kz`d>k=JC+ z`YN)>Dw6eekt8|E;$}9vaW?GChJ9m_Sbug+XCVn)ne^r;YiF?U48REWjm=cn^Rh!MYNBhSOgfQcay z2M+x3A;V+l$j5l(NW}1u6UPHH1PoyuT6L|g%Ia>tYSy>9Wer3Ghz3M}01=>R0z?Gp zpNIerz<>Y|(GMm90|HZqFcKe`BQszK7)B1v5fU*UGY|kmaWYgsQV|Wxa1NhMcfEp~z^eF%+04@OdKZbi7L|&xQw##7&AuJ)ZOAt8(>Fo{j;f8x}=pU8_StJ~K z5RqjG*VeMl>gr`?+y6HAb`T%2h|f4P<4ija;xjn-b7%U`*ufF!>4S&|BgKs&PBHR4 zMUJufQrx%`i(K5@xG}~USNFEe-mGTVR#jV7yOr5hR`zwP`T+!J0uuoOL<2w*0h%Tv z04ADfA|gNl002mgJX73_@A5J;zahp6AwD58$BDazxI8ly03aw$5?F>}H3H^TO4LA|=xL%(6CsmHr}L2M%yjyYPG&+#CzGE%^AOUxcS@$u{5;d=3Bw*^ z+@p+g8O5wvx!jGr?=JV_tlVce?21|0irH=3T?mImsWb&D5tTZH9_SE44~Gwvd_f@u zNiIl1u^>rKF8Cw`CrMI}VhWOTl0U|yjI*wZU3}=RjPrBd`!y5TFcK1kNo;0XjZ)!TK;;fEp~z^e6x-04x9_W3KivK=i%95lMZ(e#9}c%cLjS(pRzhFohPzZk zkPrfGS@q4TZ+7c$wJNKcK06aYc70t+COUu75w3GOWMET+6HMSv9C bt25Ap03dI`251n3&>83~jBMrsN`Z0L diff --git a/fixture/16/4/30 b/fixture/16/4/30 deleted file mode 100644 index f79c7666501952c8bad3f9ae6859ef8b8bf59aad..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 273 zcmV+s0q*_+0g(tG0{{RZ0{{RK0RR9L0002_0000ewJ-f(AOd{{0OlAl5YQ3uLc9aM z>NhMcfEp~z^e6x)04)G_{x$?zq^(F_dKU?Y`)x)3(ElLeE+U79+Xgv&L--}!hAZDi zZukRseXD+}+q!0UYX(Pbi`YlRc@Y2U|KQva@#)-l@cGXCIJa~0J7QyT;{5IwDb7rB zi%%gQpBR~9X$DbvLuCb=wL=6AfrUL`2|;24Et3A|jfe zhz5fR5KRQYnL^xmnR%Cgd44ArBOjT0H(ul;5&%~f0D&MXH=yjUfCq+I4Ri9c)C#0R XcyWeYU>BGdTm}2$40(HC#0@104cu~T diff --git a/fixture/16/4/31 b/fixture/16/4/31 deleted file mode 100644 index 6be833e447897611ebf41141827b508ba7a00c31..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 276 zcmV+v0qgz(0g(tG0{{RZ0{{RN0RR9L0002|0000ewJ-f(AOe*K00tQ{5YPdDxZvZ_ zVlUO?NAN`z@hJc*051UG0Jx%yibdSNB3o@Ox(tnhPVwIV+=hsOo6D=p_hnCfqwc!12a`M^-o>%%v3#7 zQ!_I&bxmC}GgCFsJT)`V%*-<|2adFniTPumXohL_V^|Ih4gfVrTFE+BTk9y!q`a&u a&%m=&!$96ZyI>t>;MwUg;0)PR7)lfX11A?^(woUnVD5tS-S-U01+SpMDzd< z5daf-q6yG65dcI4CL$t0L_mxoa0DCz!{HFbix3V0#}EQ>AYwpq2)H;A07V#sCgT8; lp*eufj8_H;>UKo1*-azX$A diff --git a/fixture/16/4/33 b/fixture/16/4/33 deleted file mode 100644 index 957dab52371c11394be4caa8e2f80c443e40164e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 276 zcmV+v0qgz(0g(tG0{{RZ0{{RN0RR9L0002|0000ewJ-f(AOe*K04A6(5YPdDxZvZ_ zVlUO?NAN`z@h1Q&04)IhmS044`7Vn^7pW|A7ZDM+_pb<1(e~y4?;^J%`mHRj5DT%8 zE&-)LQyan|9H`Ux-+_KN81%c|clWzHZ~zpv{rms>-R*&XdtVE{01PgPd7_Ph0}(SV z&&(6kY(yLQh?a&p#$!AT)25#&bfUwd6O|^q6A=Xpp@E2Q&wowLQ!_u$)YLUqQ&-hB zHS^5OGtW%T_L~jdPE32`z|GLoY~0LjBw`c*RT$nS>#2su5w3Hv%LoPLFwq%E$_v#A aECKX^7O(&y0L=_!fFPcsR1b_89p)32m37wu diff --git a/fixture/16/4/34 b/fixture/16/4/34 deleted file mode 100644 index 9b425adaad88571b5ae659b9e56ffe13cae844b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 280 zcmV+z0q6b#0g(tG0{{RZ0{{RR0RR9L000000RR9fwJ-f(AOf`q0EXH#5YPdDxZvZ_ zVlUO?NAN`z@hSi-05AZ(@4Mf>h~8Fwe-Wujg~&ogmPJ(lR=y&Z`$BYCEEP*%q%RgB zm1VihAAO>Jm;#li=}!L#dRy@Q@2+KKQ@|HfD~2 zX*o6=8>XdMnr(*X*oL8PY2%ie6ao>IsDbD}MAH;Xe@kBq)BO-S9j1mUHB(d9)XdZ~ zGgDJDH8oQ+&&)N?%*<2sJXMu8FcEQNVxDG(W;o1pnAw&#(DD)O(Z&G{06(+F73>uu eC}TjCi$P`(lNYK1ECj*q3}bg7M+fr3%;5ojUUTfC5obAVAdMuLBea@bO>Z0QG~20N}VD zd<>33eMDsRMjMft5pDD~ZBxCO{`xBVr7BXQr6np_%U`RMsJ#A{tWv+Is>phM7&hMx zV}`^$?4G!|?;#g6{3ar1E^c`9@m$;tyNLGN8`+l`QIVAu8Cgw#ZBzks-+lM<%=g{= z+2#F%-r03^Yh%?%-sy`i!Z+W?uQfKfS4H)1BS#54**dZJ|_V%{SyzyYLF=} btGLbpA5Fxd30NLh(;46sniv+BpJ`Z zi8;(N5g%=|9~r_eV1bX%f8M1lHG%lPq?hxF0$GUUe{B3ER{5c#FcXghwn4R_<69b#^0=6RRr z;b(|wnRz4Po!y-s=6_T8@TCLw(C`%K!wp22C{Uo!3-Qd4cxEv(|15-9%!tLzLd+xn z`9AZrw@kcz+YUFp%sjO7?A}HT762eX&U~xN2gbEjsWfw{xN%ZmmTFv~1QtqaE(GB- ajK|d+5Q?ORAzYtfJg(+|P)ce>FF*$INP9&9 diff --git a/fixture/16/4/38 b/fixture/16/4/38 deleted file mode 100644 index 1413f2cf3aabac82bb1f73381edf9e49bd477529..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 281 zcmV+!0p|V!0g(tG0{{RZ0{{RS0RR9L000010RR9fwJ-f(AOgJy0M-{U5YPdDxZvYa z;xAR^7w|x4TXEZOMTKa4 zEADOE3ek!RAu4*W=xAr6nPDbA5)nVmY~;fN7XU;IAc>&@VQkC+auSzY!-c#oWq}kB f1MUTxgTf#bFdPH|XFwOQ28atX2ZbRN$2kNkBgbz# diff --git a/fixture/16/4/39 b/fixture/16/4/39 deleted file mode 100644 index fcf89e6ba6932f6a54c0b8467dd227788bb53178..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 281 zcmV+!0p|V!0g(tG0{{RZ0{{RS0RR9L000010RR9fwJ-f(AOgJy0H#|t5YPdDx}f9H zVlUO?NAN`z@h$)@05SmBM=baO07V4&6(aHr`HEFDQ`J#M7kx#bv-8AHW1Mt=u4pu;nGCk9&YFqe!9IO zGc!~5OkH!$RnJp1*Hbgk%+yrPGxN+()iqN!GgH@GbcTCfl`_TFq*2+h3_z z7wg-t$p=-fuj?-;s1B-Mw*_7-=87)9C=Djv}h&z%>Y&UAjBzoH^| z11~WLo;PqH4$LtzGslg?@!`lZ$G|(mpf2d(Z}qCW2K9pr3?5wItKb4HaNT;)YIF!w zAo@TPO?03`AR;4hmRXyW)2)9;+Th@H{N*XefcF`CXUMs0Iz~4 dA&07RHZ=hRfpB{baZY)mDtiEj2gZ31We>jXb|nA+ diff --git a/fixture/16/4/40 b/fixture/16/4/40 deleted file mode 100644 index 0797de44390d9600527e822aa5d7e0a24a23cc6c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 276 zcmV+v0qgz(0g(tG0{{RZ0{{RN0RR9L0002|0000ewJ-f(AOe*K09IHr5YPdDxS{h= z;&0WYN6LOdRn%ycAy|3UMz|B;rEgiACm!7!d$eAOJ1`HO^(QmLhDD>GjN{ ayeuVv4`A&A{Qx-t1rTNhMcfEp~z^d|r%04xAc`@PcT5JE+kzafZ(zd^zx2U(WT!<9B18YIYfXd|29B z`9t-!*e9L|{Of zW8}!oZwMjs?t6?B;!gZ2PJB1Sed9z9aRwFuFauXOLW&sSGeGyHrJH gA}_0~Yc8DONEQ5m)q-`f7c37#pCMBR*+^9uqAm1z{r~^~ diff --git a/fixture/16/4/42 b/fixture/16/4/42 deleted file mode 100644 index 0f1fd68dbde4641001bcfd9547fa9ba325ba99ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 281 zcmV+!0p|V!0g(tG0{{RZ0{{RS0RR9L000010RR9fwJ-f(AOgJy0A?985a3||00tvI ztDI_gSzy&3J}Lk%04@M^ecjbX{2ljy5&w5Z#l_tb|MhXjT~`%%#9hSKbwu4oUGaBu zfA_`Rbwp-nMPzl^6B#|xWJDq*sxPaEs$5zsDzd6xDwkBLtSX|aS|VDPzKF_;EGlv# zya#+S-`vFpU;IqWaOR1bZy+i%d)kQ1=&~|0qRs3wx~R%%vjd(1U@$Y_ z!GIYsGXT$Eex7G$er5(d^UTZxHoTdeh@WGIa7e`PA;aQgVrD=*5CFC^3#gGQ2EfdG f*9>R!LX`!czz$&jz=Z(DASo~x*dKBLoI^kZRH}1v diff --git a/fixture/16/4/43 b/fixture/16/4/43 deleted file mode 100644 index 7d67f0e4896c64c2c9b060eb004b974518420b99..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 276 zcmV+v0qgz(0g(tG0{{RZ0{{RN0RR9L0002|0000ewJ-f(AOe*K0Cv|c5YPdDxZvZ_ zVlUO?NAN`z@g)E%04e}q?-pGDEAFz0keB=aVj&i-EWIM~-$lgzE<}iLmxXkx+zROj zp{1#ph{BzSCgcp&0tZyP3> z*%;F-KXJ1$K0a)W6LEVOrlE~E49{=4(?o%uZa9PkKl4=8*H!cURnIfm)J%OnKhHeR zOwG(QGjlzTF)hu8WqzJ%d3@!|g=BO}S&M99?#I0u_K1cw<+mXsHjP62?1c?u6W_AZ1ynTUzZr^{~ z065fA1N}sF58>$>D0HGJghfPtbaZ@&3>k7CvdCCk>5yMU?qj@shm0-(;>0B`E)O41 zmHP_TtQ#11=B4Xm*fKSQpZ;{vIGCBQN;{uy%loC6QQ7(U8xQvd(} diff --git a/fixture/16/4/46 b/fixture/16/4/46 deleted file mode 100644 index b27ac66dce98869b7a9a633a195aa1b3ddcfc9ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 278 zcmV+x0qOn%0g(tG0{{RZ0{{RP0RR9L0002~0000ewJ-f(AOfWa0Jc^x5a3||08%49 ztDI_gSzy&3J|+Ms04e~bTJ?9u|J@PyaU5}9+*j3I#NYQHRrg)oeOFXeMATLFb$5S7 zR7NyiR#s+4M1Pr8y)5g$$g1+Xt}2U)i0aFtRkDbRESHv+s*1=T`MwxJZYE}KZoq&6 z5yK&b51F}dKn(RXZU01@(c88eO-4j!Pm@*AL;>>yz<>b&GnfJM3jkeK&K&d|VtcPs}%jkcqgP2NwWQ4T6#}S7kg!hRvZyB?C5b!~h{Xs4P-mmI^?i cD9J#du-F+M0s=)z2Kt1>oZ+pg@Hohd8UCwYDpK+v!X=OgPMh4~Ot~4)b(+2#3EIcmhO# zz%+pY5zzz)UkH&RQ!K<7$Hc{mTL1t6 diff --git a/fixture/16/4/48 b/fixture/16/4/48 deleted file mode 100644 index bc1aca65c3ddf8df0b73efa98b498cdbdd81727f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 278 zcmV+x0qOn%0g(tG0{{RZ0{{RP0RR9L0002~0000ewJ-f(AOfWa00!7E5YPdDxS{h= z;&0WY7tmD|@hAW(051UK$y3rH1^24^9bC7n{<`Ycp!zK^xK$m1!5v&}LACx{pdJja zf))seaJUC*8>mEqK7^^E6v%ysJa=aPdFIY!`W-U4Go6showj5;{~a7R>~-2yKy$cohVF~h<=*r(t#)(E>QzwG08bO7yKzmK~C~X z3JQ{(6r58mNKyz2&T{!~e4BljdlV~YX1@g%0P0x};y?&1Lff$huk)J%BhJjBHl` diff --git a/fixture/16/4/5 b/fixture/16/4/5 deleted file mode 100644 index 5a5d88507d74b6081d877857d8266738c66059db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 281 zcmV+!0p|V!0g(tG0{{RZ0{{RS0RR9L000010RR9fwJ-f(AOgJy0Ct==5YQ3uLizj< zwl-Nw8_-s@AsJ+Ev@C*X*|JTD@Jn?p58k z>pDb$i0ElRfM}W~B0zuuO@Jl>^fUl~2oMnw2w@~9evacAFhgcyW`;4t2$*LGBNM~O ffR8XTj0Xn*U`TGm$_AbJGS%=JA54~%7j;bmc#?V1 diff --git a/fixture/16/4/50 b/fixture/16/4/50 deleted file mode 100644 index 148309d2ed56a806cd94764553e0671516b96c6a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 282 zcmV+#0ptU0+lkaaG^X%{DW$TJK={@78b49@yIhV7Ao?IxxF~Te}ZHZTm#@ zfu24DqN&u-1JOND2&GAWkwrwLG8Pf}Aum6M$WrO(L*zc>2H!ZQ$ji*c4^PCq7kAzZ zahHdO0vC5$;5x_)wm1XP Vl?9*;z$-L&2A3#>YpmE705Gf)Yo!1H diff --git a/fixture/16/4/52 b/fixture/16/4/52 deleted file mode 100644 index a7fc0c5b7af8cc7ae4bf89e49cc52c80afb5cc15..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 279 zcmV+y0qFh$0g(tG0{{RZ0{{RQ0RR9L000300000ewJ-f(AOfui0M-~V5YPdDxS{h= zVsBNZ7tmD|@hSi<04)H^7{3fzWLd_r4cKH)10pk;Y$N;aiKe#+nEeCrZyR_6;DK$n zZ9hE#0TX7LrvuRpP4m$FGgFueVV>|4!arf2d7cnNhD`GGAQGWT7Ls3N8X{xJG7gt` zIW7?IaJ&?E=ZE*-BaVNgxcf{YGV{w5nJ2`VmpLvj@4oL7b0GkjpPvH#R#n?pTkY*_ z|9*SFx9wN8RduVaYI{|y?rpXAezk7zFEjB(6aYkJeR4WT$jhqW8L-nQDNvxp6HP=! zfhIb{GczquyR%F@?KV8T&%`^!L(5Cd7yv>bwr}MC diff --git a/fixture/16/4/54 b/fixture/16/4/54 deleted file mode 100644 index 444562941da636a8d869067c73d3c64ba5e0de7f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 277 zcmV+w0qXt&0g(tG0{{RZ0{{RO0RR9L0002}0000ewJ-f(AOf8S04A3%5YPdDxZvZ_ zVlUO?NAN`z@g)E%04o6PN&i;F(#4e*FDh5$7x6Dd7EzJKqKidEy69c*SA@u|+=?;A0-h&6dd-opzcOU58V9>#!cOBe;y8~#6W}i0lbC_)d z&2r?x91k?Z2PSUgi8jnev=JXO&qPF`FojDWK9nxe0W<&1OkGpYOwG*KOnuc%%{9++ z&CD}DRaec7;g4t|&3@#VBhfNPJ^~m3P#6NpY*YeZ%S?jvKG?>~QVk?K117K#7z5Y< bCP2#|*%>f_g}@lV1~36FKZEqe1&;-?U@dUy diff --git a/fixture/16/4/55 b/fixture/16/4/55 deleted file mode 100644 index 7a0bc8bcf1933b416fa96fb29c6124ceee532819..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 190 zcmV;v073r(0g(tG0{{RZ0{{TN0000K0001{0000ewJ-f(AOhtC0J6;-5YPdDxS{h= z;&0WYN6cLFgNheDu8kt2SGW8#_O?i(*f zUDXFw^;G~=0Vo($fvW4)V4mrZnfaM9rekzGX2#5O=do?p7c+Y^XO9I12^Oo%uRV&>M}3@mIR^!q8r!(>wv&#Akb+7DD53e3;+NC diff --git a/fixture/16/4/6 b/fixture/16/4/6 deleted file mode 100644 index b38481e47ef69489b50b136dd718516011bfd3b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 284 zcmV+%0ptDx0g(tG0{{RZ0{{RV0RR9L000040RR9fwJ-f(AOh6~0LB?J5YQ3uLisE_ zTiXLzAT%t?^eF%=05AYgXGYwS#iUo{*2I==`}=P%_bYb0^u-o4xwc5H-K5LcOuqJ3 zGm~op1JG(O(80F~eCw-K1#XGOh+ZLXN6g}R=EePrxn1sgM_|0b-79 i$@lLJH}XQtY9QR4n6yy&V$lJefwWNhtfIz=$dn#2XMGI- diff --git a/fixture/16/4/7 b/fixture/16/4/7 deleted file mode 100644 index 1c9e4000e981595029e6b9c86a0ef315419904ba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 281 zcmV+!0p|V!0g(tG0{{RZ0{{RS0RR9L000010RR9fwJ-f(AOgJy0Opu75a3||08%49 z8#!$2a&pxkJ}Uq#051S?t*X~LC;$dA7zB7wgF!`sC@_eM9~1}>2l$U8jyk^Tqpp7d zVsIT1*Kzh!k&$g=n{BljkxkcTc5O?EEV?WzFSRVCC0Z)7Xz4E{D!PiUy4I>9dYc$< z_Y8@NW9~05;uyx=5FW$sz8`jXGjVgnh`3>KsfZ?@!Z7R&2Vl842c2r-OUjIEd*FKw9V82_JF;1 fm6=jrmS$irSQ22O;0h2H1PO!K8BTf5K@30)alK-)nd{1@0})@=iWw{5dG`?m+yg8PH*yS;TY z1Ge=C)(`*mf!c8YFny<|PDFvG{(<_0khYUPAu}_59y0lPp81*oc`_}zKY6~-0eANp zqujV57WcUE821?8H}1qJMO^MqJic<{9_7XuVm!((M!66|+e1GDBBD-HI6O@gJy9gd zA}`h#$-1g0$zqYSW^uA6uZvYxoEM8VStQBHpJXkBD5abzpSZsw#vKd*JE&Gc%}sCa`pli@=jNH-bG^I2yQklMuGhJHg^0=rk8?!CftJJ2%-%A8OT;Y) zZexz|#6&~GVdDr2v_Mq{;DW(}0t3(*R76A+0v$pKPee~Ng(-v&fu;imnx73FAgLkXVpD@1l_5$NHV3Epa3wEGEm#_042l5Z1NopC ZSRHr)O9PBS5g>dZA2fs2$r=1)1!Q`5a(Vy& diff --git a/fixture/16/5/.zarray b/fixture/16/5/.zarray deleted file mode 100644 index 3a20db9137..0000000000 --- a/fixture/16/5/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 2 - }, - "dtype": "hdme;Fez-77Gz-Hu6X!}DcP1eyV^pzF2A#IclIZxZ6!HL z4PUQr7C#yJ^ug?kjM?u3?>1+C>oTfddHTxVrPI%H9_X9v9=;+ay!ZAszTERiuUT<2 mmt<%^%k+=_b#B@$yDjHF&+uCmxK)kG{k~?(2bur>wV44`*Ha4s diff --git a/fixture/16/5/1 b/fixture/16/5/1 deleted file mode 100644 index 1fd6bc7764d46ffa9ff928f68686748aab6461d4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 193 zcmZQ#oWh~N%)kJ`2N@U`M1XiD5NmAJ|C^w|^p}O9go%M6^r}6B%<)1X+G8JjH5AC5 zq~Zxghkn#EFhH619xDzA$}&!1(lnN45H#T7JaLRuZH5E$qes==lWceYusu+4-N|O6 z`{(vB2fgnC)z5bNPnSEjmz8@%`Goir3~k$_W-bPD|eT@iriLcb>~=rZQ8=5 lq7OIqrSb#SE`_F5@Y-GY{^^&WSKS{+Chz{9@ zUbP2`f)E%7uUOc=uAX56lcsl`pov1G`HqH$1YTEugK*P5Ipxw9%Zx6cY|i;1HmOl& z#`W8s7n4^<_ZVN_DY9>K`DM-G68ZCt+8?{Bou|B<{!PZ_ww*A0zj6P{O0)NiZH;Gh cK9Ab4aMq6Z7Z>oS8$M0B&=9qL(?9+{0F#kQk^lez diff --git a/fixture/16/5/11 b/fixture/16/5/11 deleted file mode 100644 index f6777e52cce6a04be2f3ddf026b499bb80037b89..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 195 zcmZQ#oWh~N%)kJ`hZz_cM1XiT5NmAJ|C^w|#LLPM!o7ZG`BVKo@qVTF0 zH#)x+&-py}@(r28ci)|Uw)xjAiHUnmd^hhd$u_J1$hX77Y)%?;#dF`UZq}>5u*occ o>pk~m2V257=Bxm<_11kC%r4ZO7%R`Oad=RUSpc5@{A>`OmHtlQW3+_t=Ec=*{>Gyj)+ mm2cS!zs-+P3Vtgsx54D76#L;bRmIEwZTcTY-T(N%+z0@eLP|aW diff --git a/fixture/16/5/13 b/fixture/16/5/13 deleted file mode 100644 index 15c9343e9bcfbef0db800f222e5b52fd58320cc4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 191 zcmZQ#oWh~N%)kJ``xzJ*M1XiX5NmAJ|C^w|^p=G|g()=jsyzb`1Ch+}LLk~>KS{+C zhz|XzhrqxU3!7hyPhgsPSX$74hwI0KL!8pyObc)1N=XUM4LMUCtB_&0y0)r+XUU2^ zMTZyU%3Lm)q!#t>OYEkKEQOvN@y~LJ(fT&dS@#m(MwM-5dv7|Or$W|dd%;5P+;wkD hU-=dZd_S?vmU;TyNasG^HMe8;9$&>We|?w%D*(?JPD%g( diff --git a/fixture/16/5/14 b/fixture/16/5/14 deleted file mode 100644 index ffeb8eb0e7510b77fae6a103523e762c835d6fbe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 193 zcmZQ#oWh~N%)kJ`2N@U`M1XiD5NmAJ|C^w|^p}O9gh}RjArS4c553ATNyQV04%z>x zXJCL}5D5W+D>iIYU~FJgI?W}>qVa%NV}`-Ph)E2^?8jc(bBOk)Z#}pn)z0E?q`AlG zM$6UPbUtshFsLg3u}phk;qDUSPewaVu$fMletF@Ig5RoVCEHVvoSl59wLLK{_I#}V gokeLm*Jj+l=@wt3r}FfMbb;DU#{RW`|1KS{+Chz{8^ z{HO;a5CkfQf`Anp+b`KOOki5LSz6G5!J~m!&tXQvMwXtfRXa|;=H%R-U=ZsaJ^S*% zy4y9MFWeMaR+ip#E6h4~f4lVY3*53P<%K7e@ANb7=-+JJWA|nSdsXz!%$thei-b0P hXwYk4qIND}=FA75x2eaxIkNX}YQ>rV|Myv$0sy>~PsIQL diff --git a/fixture/16/5/16 b/fixture/16/5/16 deleted file mode 100644 index 5ac770900d03be43ede2f30c9dadb026b41d95dc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 182 zcmZQ#oWh~N%)kJ`+ZY%aM1Xi65NmAJ|C^w|w3LOxgo%M6^r}4&F-%hN1foMf>VMSB z94`c-J)vN5)!t{tg^4T+m<)DVF$kJyG&hR`-6&u$SU6j4$7|c|LYoTWliqH9JgIDP zKmXkoy;puIsafyydn~6JoVjmewtR=%9Mi(`2S=Tvqxr33@*fM!aI;NcW4>qKSG6TG aByYSj=(*1x;%_Bv$h*Y5=E1}5#;gF!CPljd diff --git a/fixture/16/5/17 b/fixture/16/5/17 deleted file mode 100644 index 9bb3e561952baaf848ba693535f38acb199d675c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmZQ#oWh~N%)kJ`yBHW4M1Xh^5NmAJ|C^w|bd-g`glUqBClDR7XOKBw2t<4Af7CND zfN&@nTm|y|R~$HO#jv1h<*q!T4@N(jLS``Wy{%_|e)N;i#G@g}bBoigZmE^uUKPuJ z^G>b8=CqByGj`Y>mR)$zaC!b4nc(HyQy!&cM;dRL*?5wpeC@-t>kKRBt$7l%YvBo$8}I%NN&Ugmfq z5bgO<50n6dtM=|IHW&&OFiDn#F$l8gFzRe@Vro-)!SML*1ozHn%{!$&@1>>p@49us z_|MwuW(O*l$K`M1d?&I0(^9Pm*P?G9{uw2;9z=E0q%h0bmI fW{vGqkK4SzNekyZ`}As3Msvx5(8AjD|3gy%$xuwq diff --git a/fixture/16/5/2 b/fixture/16/5/2 deleted file mode 100644 index 3ee271461cd6bbed4f25cd6f2a5cbd9cb2f0a16e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 190 zcmZQ#oWh~N%)kJ``xqD)M1Xi15NmAJ|C^w|^pu6cgo%OSN4?DPLLk~>KS{+Chz>C@ zgo43U`ycfn>7W${4oh(?VA3#pEx^DiY!TuRQ2;dP(i!XGn_JV$4xe=A>Oa6x_b~6#O~Uoc4E5dc@kygUe>{E57bIvCvoguzRlDXLHk|5ubZ* h>ZRBA$oj}0pRqNqKkNQa}o7V->BgDY zqd(t#q@A2`-HbtaJ@3hF#%dOMbsg?2bG`HSmQ8+Xu=dj(0Wq1PXD6c4dm{sa&$X{j kIdb~Ko(=iuuU-Ez;pFX#gL3-snpM*C{=|9w;tys603q>B8vpvUjY?ufNSJXquB*pm(HDPu23y5#t%DXKy67pPiHTH$dW;@+sN8Z(Fx_t8}~P mPhm5UQAtzj-NhYbSbk3@==R}BA-%>l{y!}%DxjX<=&`#z6 diff --git a/fixture/16/5/22 b/fixture/16/5/22 deleted file mode 100644 index 770c6f46c550a79b1775a77de485036d760960dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 178 zcmZQ#oWh~N%)kJ`n-~}vM1Xh}5NmAJ|C^w|)XKu}V3LX_5FN5-_)*UgdR69lArS4c z2XcS{UMmg=@-j?dlGtr5Xrj=-9Z|7i9xLZ!Uu~Uj^`h)0>xvu?`^++%F_-xiPs&^2 z{^K40zI_Y6vvor3^RWB7L$6Qd3)4GsK)H|G|JZeZ*YAIeegFXRh)CrC diff --git a/fixture/16/5/23 b/fixture/16/5/23 deleted file mode 100644 index 8ce8479a3a65244b14ccbc827562704d03dfba5e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmZQ#oWh~N%)kJ`yBHW4M1Xh^5NmAJ|C^w|bd-fbglUqBClDR7X9&G2bG#6U_SpZZ z2daUBAN3(C9%Ql?G#RF)37SY8Xmw7?aA)|Orp0{bsM66pv)xkuul;PX@z$r0Y;uRK zg5MmANSbuRl>JW4Mww@Ogpa7jW*R?A`0y*C>|yJUj`Q|HvSNlempwihvp{fLJnzKG e0&&^i*XI7?i%{);cJf`K|C_jbT7f$+{s916wo4TN diff --git a/fixture/16/5/24 b/fixture/16/5/24 deleted file mode 100644 index dfa21aa035b63bf15f9c5dabcee292988e183644..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 180 zcmZQ#oWh~N%)kJ`TNoG^M1Xh>5NmAJ|C^w|G?j(Hg=vzCClDR7553CpqaG+HbG#6U z_SpZZ2XlcUKk7YKTu9|!z@%v`ENCL5JeN!H!8~T;4PTYTw4X0*bosm^wfXgv-ec}h zPJO>oS-qpDcJ1;)%NcUnQd{;#-za#oWbI;!$0_dpQn}81(oS93RbN_lx+lz5I6Bge Y=f=BJGVGQslPB*_-uf`XlJkx{00ZAg761SM diff --git a/fixture/16/5/25 b/fixture/16/5/25 deleted file mode 100644 index e270fc458c180efefe2da64335e21dc35cf40473..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 183 zcmZQ#oWh~N%)kJ`+Zh-bM1Xic5NmAJ|C^w|w3db8hs^OpAlhTk@S~n#l8Pq~9kTyX z9|{In85rDFTws=BNMO>OYR)KBVD!LER7$O)QcLH$_2~)fzbfwJHy`yL9IE!ltC@ zgo43Upb&%SiVMt>7!sH?rj`qus4U{6pq z#Ji_+Jx^5}ZE&7mx+CcEo07O$2d6rJkzH`^J6$}&hHQq`2J&)zy0rr3XI~rTvekYw`1^`WIPH6xD diff --git a/fixture/16/5/30 b/fixture/16/5/30 deleted file mode 100644 index 0b9bb50c9d853b383c8402f7d67bbde4706454b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 188 zcmZQ#oWh~N%)kJ`dl(oPM1Xh+5NmAJ|C^w|bd`lcgo%OSM?DZtQt^W9{V&fKx7ngdE#YZ#GcPY>= jy=}0g_srxKCnI)T+I4)bb-dW*-)x5;*RS4P_dW#x#kNlX diff --git a/fixture/16/5/31 b/fixture/16/5/31 deleted file mode 100644 index d2305472bff935d123dd94b2ff3703c9ac12a9c1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZQ#oWh~N%)kJ`8yFZEM1Xh(5NmAJ|C^w|RLa6|AvE-=J;NjwParyEFLS&Qi1sir z{HXs?@3Z2-VGo7^Ca1DAK?W8D2HpUVMkgWWZ28Id1AcUH(SPEQd(@VBo&lWm(z-fr_J Z)^d_YjyKk*N$mOYwe45t_n-fnB>{}~N9+Iq diff --git a/fixture/16/5/32 b/fixture/16/5/32 deleted file mode 100644 index fef41aad1f012b95c0f6b6bfc111e6da4aa93d39..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 197 zcmZQ#oWh~N%)kJ`M;RCxM1XiL5NmAJ|C^w|B+JThgo%OSM}6p3dmxfIUI;{c?12K4 zR6K#`(2shcVju_wgRAy_D>g8D3Qb^I;3X}{AmEX}tEUo@AoPVHdDi5wQgPQDm}QTo zOWhWIc0~0Xw^90&3zv8N=DM7EeRq-ed@H+OXJ%a5kXn==WB=yWS2mflM^RSyKAdZ2 n%dNV3FGpQ!){NsfV!ZQ8rdi)wG~<*>pYk1t<94tAE3g6p%4tn> diff --git a/fixture/16/5/33 b/fixture/16/5/33 deleted file mode 100644 index 29d2b47855c03a745370ef2e193c83143197aedb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 179 zcmZQ#oWh~N%)kJ`n;94wM1XiU5NmAJ|C^w|)XT!~LgsiO5bd#N_)#Bv)qawSClDQC z0P=p+Gcb6rIB?j5ses8K+?+wsM8%m`YmUOkz6kakJC(|}OxtFD?BnJl=1muabv`|@ zv_5!s)@#Q1CdZ_A-LOc$Z2JGMa9go`$Mv<35BYmr>)yF)EhPJV;oUCo_O@MX)BZd< cpR*>tbH=eP7LohUH5qHM*C?mHUcZ?g00(kPbpQYW diff --git a/fixture/16/5/34 b/fixture/16/5/34 deleted file mode 100644 index 655ee9934435433c7760c2e5e591755ec1b4ad5f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 174 zcmZQ#oWh~N%)kJ`>lhdqM1Xi25NmAJ|C^w|l*+>Jfq~&iJ%h~gLLk~>&%h8023IGk zcmmNOd*2lon57sdFey4q3o0EFU0Jpcdz diff --git a/fixture/16/5/35 b/fixture/16/5/35 deleted file mode 100644 index 761b866166f75764b0c1ac4432aab074bc9c2a6d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 189 zcmZQ#oWh~N%)kJ`dl?uQM1XiH5NmAJ|C^w|beDx;3zN+8LLk~>4-^K1AN8SNaMga2 ziYE{q0tv#DxUYD?DyzGoX`#_<2EiRHRvwA0;?uG(Fe`+~+8=v+=S+sf)45f%bC2%c zJE41ewnMb}{BZ2DEfb%YXtNb<`onhW6>r+^?ENy+i@!9OO_i0+ ZJs?}!9QU_-YG=-yH6d)@zeTU$2LQv=O1=O9 diff --git a/fixture/16/5/36 b/fixture/16/5/36 deleted file mode 100644 index 26b6cecaf8162b9900d40f4b814393aac939c5be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 192 zcmZQ#oWh~N%)kJ`2N)O_M1Xh&5NmAJ|C^w|^p%C7g^7Vd=6E3x?XjPv;t52D>_e~q zsQ*#VzyKjb!QiTW@QQ_p|1vLNT5yU_kVU6a%s?;Zf;~eq@4oGU_pfy(YWMw~JSX?i zyWq`<<(DmFlTJNbzvYgZyiUZ-zBN^k12b&jyv<$j{aWYq4xu**eP5!k9^Z2GT+D=i&R=BW8`kU>gTUpo4|JZ+j9mWU%K%i6G diff --git a/fixture/16/5/37 b/fixture/16/5/37 deleted file mode 100644 index 09a155347aea273c951f8ec3594b7aaef47c4b18..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 184 zcmZQ#oWh~N%)kJ`I~W)kM1Xh!5NmAJ|C^w|w3UV7#UvF^AUb5v@S{HTs=dtdLLl1n zqn-iCXJGJJap15AQ$f?hL$?JP7(5p=3p)fbvZ-xk+c5RXl1I8yyO`w?j?MoP$$V;_ znDc=&tH$+vu0GUlx4LF`<@4Io+|L4LM;gWF^geShPIKOw!pi&N?(=AlytFAbm-B8$ hOz#t5yRX1s^TjrSu_%7u@t=A&#de7vf7b7o0{~kVN-O{X diff --git a/fixture/16/5/38 b/fixture/16/5/38 deleted file mode 100644 index 7d6f11843bcb1a6563d18b88687486535297ea11..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 183 zcmZQ#oWh~N%)kJ`+Zh-bM1Xic5NmAJ|C^w|w3dazglUqBClDR7XZTUizz_-sSAW#Y z94`c-JwUZU;I`sJ>)QqT$y zE8A17Vy%-T#WtjcDDU{&c(*0*o{z# diff --git a/fixture/16/5/39 b/fixture/16/5/39 deleted file mode 100644 index 61645232d495f55908a46f3952e5098ae8cefa31..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 199 zcmZQ#oWh~N%)kJ`#~BzHM1Xib5NmAJ|C^w|q|3_C!o2cvPm7_v$?YV*7Mc n<~@0kbnLRAdQrv7Cr4Kcw`shd;HxUAbGNXqh4Ibj|HY;Nx3EnL diff --git a/fixture/16/5/4 b/fixture/16/5/4 deleted file mode 100644 index 645ff3b5dcc3c8f141e80f5d8ab4d63702a9ce85..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 185 zcmZQ#oWh~N%)kJ`I~f=lM1Xi95NmAJ|C^w|w3mh92Lr>8dIp)}g+R2&ev*nO5FN4) zz51gbC=3MtD;6&P!o7h>(pp%Mfr0ly!ck71j_$8azqb^)@K(iCtS}BfR?TABJ>#Ko zPJy<1!_;~`mEiA3*EaRfziFm4_e!bhuIIixKR#RV>{?~}&9!2ml-%NW?CV^o@%oBw h)s?cDXYa)8%(ZZOW??DWyo71j(f{9n@BJUl3jjtfO|bv~ diff --git a/fixture/16/5/40 b/fixture/16/5/40 deleted file mode 100644 index c8cc2623b4e0eb560c000202fe75abd10a351105..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 197 zcmZQ#oWh~N%)kJ`M;RCxM1XiL5NmAJ|C^w|B+JUMgo%OSNBtxfParyE50nZ8gR4L4 zWsVmD(H@`}jODxHLbDh{K$By@Ee1gb2JRP4B7O@N-c`;x`}WeoWjvV<>mpy>$vd5O zCUceT$!U`k+9tBCK3bBjcK+0MZzkW#H&b4!{QlCK-)>ae>bvH`1B2jR_UhtGE5nnk jU&r2CGVNl-?^#B6x0}>#WmZb^=3l7u{aHBw|2uX73UE;S diff --git a/fixture/16/5/41 b/fixture/16/5/41 deleted file mode 100644 index c953e9321b81e28ae9f892a5eba75aed12a324db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 187 zcmZQ#oWh~N%)kJ`yBQc5M1XiP5NmAJ|C^w|be4tT!z2|?AUb5v@S`4xWR4dC(H{HI zt3WOg1g$tAD9c#DWRM}tAZVh|C~A~=u&FQZ2ETjBC3%UwXAN}^FXbpnY}nzyPUo~R z{~db?bG?|0d(XUCXDw#*P3@L7i%A#vW$%cib9%XSL^fB=mXWw)bH!M8yZLP9#1cW{ g2XE${+L5F7JIb@7#aS(Qx{tuUU-ze8{=M}J0BBH0`v3p{ diff --git a/fixture/16/5/42 b/fixture/16/5/42 deleted file mode 100644 index b35aba98f0bf2e9b87e899a2eca06e255ceab7bc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmZQ#oWh~N%)kJ`yBHW4M1Xh^5NmAJ|C^w|bd-hR1p~v6`p~QP43ktmf#{IE%<)1X z+Vi6x1OrzbIPAfY&=ja+%_zvgz*TUtNc`GbX5s(2E2dsu^7K|rxsdY>zkTl(YDYdY zdzb0pX}SEeOa|-ZhllU=e6s&ylP9xkQ}>4EgVKirL>}L`k=lQ5(Z%0vzH?M<%9?F* iEG)9b{FY3L+VhLi$j?h_%}l;iE%Hy^{d@esJ{17KS{+C zhz{9@UIobjXw(lUP#f9n_7FU244OTd&70EnIVN zpZSWJlOHYQyxEu1`X_bYpO#oXj_bB7Hwvp|8tm&bmU_{k^kL8K+QqCY@;du}zcn@b o(eyBm=R$_>){1~8>7IkZJ+rGa3}Vf-J~+>>s=W5)-+yjV0B;OTK>z>% diff --git a/fixture/16/5/44 b/fixture/16/5/44 deleted file mode 100644 index 44027d7524c22de50e1fea67e254089b71316611..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 185 zcmZQ#oWh~N%)kJ`I~f=lM1Xi95NmAJ|C^w|w3mfJglUqBClDR7mpNVtM0@NRe$7i^c(7J--Bpm^JLeKiJrA9h2pag*Bz9>DzrQ6{vwYU;Qxmt9^Yre_TwmSmFPVJ(N}b5F?uacv gBR^_b-~0dMRn24FU3GWa&T!Tre*LjX?DqkF032FPxc~qF diff --git a/fixture/16/5/45 b/fixture/16/5/45 deleted file mode 100644 index 0b81e9a3df6076bdad8932d39215bb32552d3903..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 187 zcmZQ#oWh~N%)kJ`yBQc5M1XiP5NmAJ|C^w|be4s|go%Mc=6E3x?XjPv;t52D?0?k% zs1F5$t3b63Kk9u}TyS<|Drj2hBqnIUqS447z;Tf6E@SdD*^-y1=H(o9dQ&?^^V#!^ zIk|tyE*LFpGCT|E4q^X-|%J2$NJw<3cJT)Jn3@r!nfagIF4E`-uJlD gJ5pzhltsY-o9vT~6$v@DIn!dACkNDB;x}dm02VGuQ2+n{ diff --git a/fixture/16/5/46 b/fixture/16/5/46 deleted file mode 100644 index 0d92fdfe1397c3fbcbecf60ee50342501f2f9181..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 179 zcmZQ#oWh~N%)kJ`n;94wM1XiU5NmAJ|C^w|)XT!~BQ*4?z0C1KAld_z0D>R&Kk6r` zcmmNO`ychLD=swiF*Gn4wwN&pB?u@8U6A&>z>>CN$DX&6n`Msedv&Q$?(wxh%O~y^ z-D8%swEVW)vnKcKoFMfHas@SM5i7pWy_dkRwo9Pwuv)Oxy8F>@E4M`#Om;WoQeI~! ab8wMFMS1b*Z%wa5js&bWXj{j8fFA&5!A95s diff --git a/fixture/16/5/47 b/fixture/16/5/47 deleted file mode 100644 index 44e7f6387ae67f0ca76e1257ee2bb5c1db992212..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 197 zcmZQ#oWh~N%)kJ`M;RCxM1XiL5NmAJ|C^w|B+JV1glUqBClDR7XZTSsbG#6U_AoGn zg27e$AN4=#8GsTD3{WY1zZD0Ztr;dTE!-?E$fD7}tGDEU<0OWMs}0iC)-Fs+R6cU= zsqu!FoMlOm_|}~aiT&>I2z8ExL8=PBiDf6Eby>mVTDvRI}*QzbEa-On(34w*vr|wNdW? diff --git a/fixture/16/5/48 b/fixture/16/5/48 deleted file mode 100644 index 531629718ff1e10511c72fe68be65ee941db8a1b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 180 zcmZQ#oWh~N%)kJ`TNoG^M1Xh>5NmAJ|C^w|G?j(nfz0tjAlhR;NyQV04%vrZwP*NI z4^#srJyvWy{FgbQDezn#gHXl?iA9a#E{@zt4NixqrpXy~SgS&RQREP`f!-^2of<-Oo(5_8(a~+wx}VYuy<|e^9+ljhc$7FucNmXxy_Xry&roMTb3zu>xr eyx-L`ugp2yE|oESTN`$Ke?|bXH%J!% diff --git a/fixture/16/5/5 b/fixture/16/5/5 deleted file mode 100644 index ec978dc963bf1e3bdbe8adf8eb358afa6347394b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 179 zcmZQ#oWh~N%)kJ`n;94wM1XiU5NmAJ|C^w|)XTyk!o1B(o!u-2A@W^3cLqQeutpXaI>w;fNfu&qC^ar^B$ z+1cg>Z=SZ^S;r@3+SM7?x1Q&BGXIq*y&S2V-#wapeRIw<$}hRNbLEqQi9B1S{zoom WyPbMnc+0I-vqjyC{y+Ww{t5svenlDp diff --git a/fixture/16/5/50 b/fixture/16/5/50 deleted file mode 100644 index 213f5062d138642c666732d7358c6ce3a1903c4c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmZQ#oWh~N%)kJ`yBHW4M1Xh^5NmAJ|C^w|bd-f5glUqBClDR72MPnhkNQwBxN0wR zyby@?{HO;BdaT&UZgzn=peZm%S8xT3Re&PvZNb-!zxCdJ_TpN6W68-CH!t1V^Ob#* zTzYdP`z4F-HNRKf)XAQ?w<_ymXYYk~;hQFJam+A#enfgwtoxMEJ!vvN*%$WCToL(- cL)qOaC%tr&{G9!~AG`Ydb$YJ+UGab&0RJ0IKL7v# diff --git a/fixture/16/5/51 b/fixture/16/5/51 deleted file mode 100644 index 3b1e0b6440d0fa91448d1808ae4dfcd70667f76b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 195 zcmZQ#oWh~N%)kJ`hZz_cM1XiT5NmAJ|C^w|#LLRi!ope#_uk9zkN2M)h@!7zbIbF-|V0gH-Dvq;p211c{R8BVNtHKVvV^4=Af zSw*5rD=pctarXY>TWM)rcGowF|9R_!2;aLVX1>CEJ>Ey=bf~Xy3R|w&cX#2Ni*KGw n`z@+0k@CDRaQ3g{w{BiV_dw;$6Q$ffdmsF$bbnd6pGgt`ZedNF diff --git a/fixture/16/5/52 b/fixture/16/5/52 deleted file mode 100644 index 2c0b8878b48af4c7110b24a6007391cfd9148e5d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 185 zcmZQ#oWh~N%)kJ`I~f=lM1Xi95NmAJ|C^w|w3me;gh}RjArS4cXZTScdKE-6Oj7X# zqC-FyjA`$);=nHg07D86sfI`7^8R~8rAADg;Q z_~kkKj2+5%p43QIJYsl!@zU;Xv#uy5U6Yvi`rofMx9orA(x%P2$Fth5u`Em7W?LO? ZCcfLPWvb<;tS{x?j(DZ|J~@BP1^`g>O<4c{ diff --git a/fixture/16/5/53 b/fixture/16/5/53 deleted file mode 100644 index e1eb2f97e15b00e47af0f9067a33c70fce7b454a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 183 zcmZQ#oWh~N%)kJ`+Zh-bM1Xic5NmAJ|C^w|w3dazg()=js{JGtParyE&+wyO=6E3x z?fFs90OY{LeO6pZ=H*Oia$GJa$iO1uz^kKDu*QCY35;sUeKf~JK|{DKS&ZUR9&&2OK*krhz4Q zDgp9;)Eh9w?`$%SlZfGHlSw( diff --git a/fixture/16/5/6 b/fixture/16/5/6 deleted file mode 100644 index 71a928925f0b693b48f06badf4b1354d9ed9679e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 184 zcmZQ#oWh~N%)kJ`I~W)kM1Xh!5NmAJ|C^w|w3UV7!z2|?AUb3pdexpm=6E3x?Xmw+ z&%gk|J}VA1D={Woowl!E;FpWq<#R%K-ph(@EX{ diff --git a/fixture/16/5/7 b/fixture/16/5/7 deleted file mode 100644 index a807841568e24b2a240d493eabdf26b3325197e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 182 zcmZQ#oWh~N%)kJ`+ZY%aM1Xi65NmAJ|C^w|w3LP60|Ud4dYR*eK(xmm$PB&8z%WV0 z6NnD|sApjCTCu@!63YZ83F~`;3_P9+&7x5oF7W>T=(H_o&`fYWdS%$ZMjilZ+(~W# diff --git a/fixture/16/5/8 b/fixture/16/5/8 deleted file mode 100644 index 2e8726f1963734b2d863e64c5f511acf1824d096..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 184 zcmZQ#oWh~N%)kJ`I~W)kM1Xh!5NmAJ|C^w|w3UTHgo%OSM}6p3`$;OEKy=7n=6E3x z?XhPF{ZS8+`cdz@V&UR1{0&VjcWE&QnkX=u?qFczk}(lkk?EfFKxWO>YdmGyH6}d8 zt;Ujt8)TMV`WeDr%)6#m%WCSH#W6<3kIi33e|T)#+q3BAijNZ(n#t>1xQi__>}LD) f_2L@FJDW>V-|m>fV{*fxN;ql%zxTG`{~M(My_-k6 diff --git a/fixture/16/5/9 b/fixture/16/5/9 deleted file mode 100644 index 18ff45313d2f32619582760d7ad9c04edc959693..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 193 zcmZQ#oWh~N%)kJ`2N@U`M1XiD5NmAJ|C^w|^p}M}g()=jsy#!f%<)1X+G9US#S@4Q z+5f2jQ4eE#tk}5t3wHpM>dYublCmW4ETT z-zl;Ealt9e^!+L$q4Hfmrx@q1t@$XqMmbNz-8V#Thg|QzJnc7UW>?Aw<|V47bROc2 ky8MpaH69?MRe)AERuo#QDd5PduY20Cs~&t^fc4 diff --git a/fixture/16/6/.zarray b/fixture/16/6/.zarray deleted file mode 100644 index c802c4589a..0000000000 --- a/fixture/16/6/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "lz4", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "retJmt{OkVm7)oB7cliMM`f@Ryd?m=h{8)jY8= zuX4nD!XC-VR>x+kh-D bd@1bhQ_f!h^%T)5np)Z}R;fzG=JWx70iP@9 diff --git a/fixture/16/6/1 b/fixture/16/6/1 deleted file mode 100644 index 908781e81326fa07588b354f2d7158dcd066bdf3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 311 zcmYL^KTE_w5XFDH*~CRcHljro!2_?bI534^u-TMecL)JXKZ17Gd8MWJd8}--w8{6- z&eFHRKg;aQ+c$6Cu-3GGmO;Ab2KIm>)m;obgW-6LfscyA*E_{?9Huk=RmnNSobs6u z|LoIzsd$OQK8rE-xEc0_`PaS2o!~qsvUQ#Cwirv%;G{Z8IIPGDn?^(hPcBt>Bez!+ zp0Xv>dUPy64mIwPQn?}YWXbCix1p4;K!d%Y=m=~1bZOewRJ~P0(zJ6uc}Oj$o%T0m TCAN4lT2j4FjoTm5=J#XYd0{I; diff --git a/fixture/16/6/10 b/fixture/16/6/10 deleted file mode 100644 index fa1e3290d3aca4f14ad13302bd3e1e664eeb4233..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 306 zcmX|*txm*15XXNz9hQbH6bPaaL`hCGs0OvJ7aZl4`YeZP=HZ?k+ zHPYIOV}C{AWC~o(<38`x{4dT_Cg{;y^u;xrQB%~U>~3jAEC#wH4QPu#kh(JTj{3$_ z%OWh1u@4?^kl7m4A({9U4vZ}L*syh)bi`Lu>!EQ=bI)G-_OFOp+bRb$ QGG4Q$iqGhpriT??f1Q9RGXMYp diff --git a/fixture/16/6/11 b/fixture/16/6/11 deleted file mode 100644 index 8e76ca085cd8566f81383ab8f421649e6bc6f9c1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 311 zcmX|+F;2rk5JlgvH_OUcPKYP~A+bm)T&BaF-O{Crl!AgXB~+Bo8K^h_N1&jfqTvL& z2c=8?xL`G_o%!?U&tLD>DHmY(&cQiwqxTZW`#4gCCo$#?ecH-%b zL*{q(y1G|<|6|s9Agl?Asv-1*Oz|S|hDuPLpoGt?#vxHk-dnmeVs)@;o;elL5;pjj zC`yl?i1CeiPf>Ylt!Swp&7B5j-b=vMduQB+s8Z8Qq8x~-y@?k diff --git a/fixture/16/6/13 b/fixture/16/6/13 deleted file mode 100644 index 5bc172a72a10d18ca6c4c0fd7a2393397d88e86e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 339 zcmXwzu}T9$5Qe{u{b1!h~Qxru(*_GC&!g@u)k zPmp)8wGgg!_L3WBnVtRT|8~R`X+DB@?i|d4YpaK~<8@nF*GIq|@H?)z<;nNGm#&a(X~rNnA+ea?>0B+fxmq4>{wkpXhT2f`}&Bk&=h?^Z_x~SB9VBppX8W} z_?ge~0a>C#67UszieE7aOig-28^qjek`E*!c|ZeNMOP#>eS5}aL$dB^hBB#PRxygu fAN=kqMXpfur~Mk{F{o{IjxU@MXBp!S`{>$#G8j1! diff --git a/fixture/16/6/14 b/fixture/16/6/14 deleted file mode 100644 index d203df401149326b1eb30e71570b3e675092e14c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 343 zcmZXPJx;_h5QRU_dQF6^H6V%zQCMgtLYFdK5Z!iZR&2RP2qy?7CE^5>lv`3z(QpEe zf|P=SvDq{*8tcv9n>Q9y7jXvZ-5NLrTE*|H^N&0Aw|1TF^B+$A36+?w!5;AypO!22 z_x86q4*M>D*X`#zM7S7-0w)}=4Q`8^;VNv8xg_ksmPo^*!@puXLdo1m?1n>OISv)R z#6A)tGcSai1OJ?kksB7#_#XR^s&lI(y>gp+q0wAL3Z%mA^0?q$l583(a-alVbB)bH THFAR6q|9rrp4h#)pMLoRh+;ZQ diff --git a/fixture/16/6/15 b/fixture/16/6/15 deleted file mode 100644 index 0a14e83e7e197de8588eb34ec74ad9855cd9a62f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 312 zcmXYsF;2uV5Jg|SmI;x`25E~3QADdPQl@l)loi!du}vgOO3LK~X(pYuL==7JHYf>*GsENiizv1oR7MMNRpgq{Z0crL;dv`x+PUU7{Q?*Aef2 Y(kEKH#+nG$oQTm59pwWj4g0kE9|Dai(f|Me diff --git a/fixture/16/6/16 b/fixture/16/6/16 deleted file mode 100644 index 0c32110c89c8f0e8c34c232707cb70267fe92fc5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 319 zcmXwyF;2rk5Jle(<7E{r+Xy6FP#6gbBT=MG=aiv~kbDo|0;Wufs8Ukq5EO`p6Zj%j zE+`l;_GotJYyO}A#Vtd11mkNQoC7yD&v2Z!j;p>o=11GUF(TI6^}Y31Q*$?T1@3}G zM!4qp_VWA5`eV`#-T%eV&HH&yR?J>276qxGJA5ENaFXrOCHjdzq9yW7R-+ZVA`KC& zk$@NSibOYYWyF4@HzjdRvi49v=N&1RdhLCN;jQQui<$Lv?58a+0>h`>1 zPJ;;NLl^S~?HGO4PAU588ofkgq6HUie@CcEg^A`46Cs%qzR;bQvL=-141GdOAgqSY z`eyJ=5~FKp$2CDll}(&c4Bdkd``8Z2IH_^>2F;Mt57aGb&XJn;(iLVBi0sN?`&Sxw f3nSLOL0^4XSLpM|t(*H3!wYJ5OVXIE8Ncrru1PhO diff --git a/fixture/16/6/18 b/fixture/16/6/18 deleted file mode 100644 index cb42f3a98d725ec17c716fd536d1d5324d2d21a0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 304 zcmZXPu}TC%42Hj%%Vm#ca8^(pL~w;GuCUm0yR%%at`U5MEw!`T#YeER^cCzZtZaM& zdmq7au`(kZ78f!k|0MZHylWQq4DxFn901AUK8~|#DEK*N8m|BBbG?m^%S-e39c{yA z({S=}Xa2VC5RX>K27k=Uf5G^&LKnzA?vx@?wx~xEp+_oW&1?yluLuTciHW&Bz?IYI ztbHQ(6jqlJ-=N=l+N7`14f2#_X~>Q^x}#uUBXX0;hp}Z|UZ7uu25&3YnONFJw=P&- Q;XCvd>)A6x@5b##zgRysk^lez diff --git a/fixture/16/6/19 b/fixture/16/6/19 deleted file mode 100644 index 73fe8d70dff85f98161f558e13526fbfc625b5d2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 341 zcmZXPAy33W5QV=E(@k?M^azLwL~~RS43#ycCl7(Vq(LE(;1DERiz6r`KY<^B!oVN6 z|AAb|>?LRJCU5g*U*3DkNY)nh9;CO--~_lbJ|^SHgtJv&k_&$~Z_TygyWeID*u=1{ zFW0l`Pk$Z_Upu_XC4NTl$O8FD6T=*N;WCDr{RUcmfuEx_zCxzB1^SLu%wi}gh1`9h z9r}qrP*xnsEiR&#G1c6bZ_)FVaYHt#&|lm=V%1ZFxGwD~IQv@l3_oxY!)tPBg|{*b Y707^j^=ZA==*Xeq7fd=_&A2)FAGDG{)Bpeg diff --git a/fixture/16/6/2 b/fixture/16/6/2 deleted file mode 100644 index f94d1d59a0f86526216671ca7efa9bdccc41609a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 318 zcmXw#F-rqM5QV>8C(Cji=LIYd33x)#LlCa>D!tauCJ<7kv@~71yV6?7PYC!UtkR^l zN$*dvywceCnTpbj;CDw91bPwFm4zGwusvr}KYbf+WY4 zCMlSL+Inn@tC9EMpH!T;j9ibrkuK$_$0SsoeX??RIQ YU}_kEPO7}d>U?A}P214xC+QlxKdBZet^fc4 diff --git a/fixture/16/6/20 b/fixture/16/6/20 deleted file mode 100644 index 1fdbe1525f3abbc9d100727cfa89bb9c885f3684..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 331 zcmZ{fp-uxq5Qe{$Hb&T#>Af~3e2%$X+0lPAj)91_{^1V}U*h2aVM z4rtU#{!Iw3$!uok&-~xanya#61nJ#7I0deiPjJjOj>Ud8<$dU1mUoKJ6*ET0V(79L)b_!EZ3bxA--7hF9G&EjjYH*cHp6f5IQ*GoofnB&)TGn7PoA zOKAyJaz)i0)-rh`R#-`Rz!%9Y-K(DC>$EPcldhRWZisbiZO&vzUJ$P6 diff --git a/fixture/16/6/21 b/fixture/16/6/21 deleted file mode 100644 index a93e9f6531733aa3c016910b0c6016b013a3ffe2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 303 zcmXYsJx;?w5QV>;jF!c+O!!j}qOcG}3MAZ-&M8foCK4qbl;H$+nKI%G6!f$dh=vp7 z4jjQ9%s7l@|K7a!W>!L#$`Saw32*{T>^xis-nX^AcCk;Guy=-z(Zp}K_k6iX`LdA(iB3luKY~jdj-@aVkS$@^WgHMCb$cs+r0*a^BF-0c-MOQ+B{D+kC_r0WpS@4zt69_ zvbgEGrytEjR=?ME8O<3xxarr(7MEgPkr7X2QPA3l*7SMN2Qvr|C)gbS%YztXg*UX^ z5=oGGg>Q&@s7n2m?BPZ@ZO(z0HZrL&4;r=bDNUM{H(6N`@2$H~=9>}Xhm-FJmmIYm#yMq^2IUkx(=By@TbH|_RK=z1 e`c(NL4w(&}+sL4bx<}{a4Km&D@#x>C&;0`??LYDW diff --git a/fixture/16/6/24 b/fixture/16/6/24 deleted file mode 100644 index 8031754596ac6ce02c165ebd2a1ff9375ba4dd4d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 345 zcmY+AuTI235XOJIOgBrjNh=T)gcO3fU}#R^S`>#n)8ufcQQ&ap8F+$|JOC1fffu;< zAg3ysT_}Xf?0mB`-+cSWn#R|A5Z8@?1K?6|H-9?4R)1^zWZPey>J#cfvj6*WV4q`p z!^yTSI2#8~sM%$3_Wnc~_EO;3}&# bL0_>X?{J^UN0inJ$s!GEYb;t&y&N~8_Vb3h(9q78}QIxTE6uE=FVux zA+Wo2?5`Vig$CRa6RYQ_5ED_*1|RrJ8;>8)aafjE_rLrd+GT_3HPwpk4Bj%<1^ST- zsqx80<5PL7OzTDVHzo#RL$zR{GxV0+sB=S1FYMeS2lyH>vF()boy*KSRpMCYwD3d~ TihgPWlLaR;{A1pStMdPEgODuj diff --git a/fixture/16/6/26 b/fixture/16/6/26 deleted file mode 100644 index 4a1a9e99b8d5ac6505dd5a672e00f6ac434805dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 303 zcmXYrF-rqM5QV>;TbD!DTtvYj7E26xo!1EIE?C9R=D^Z2t)1Paw6?OeP)Mo0pxF2e z4*VIGS7zpLnB9Fl`}VyjuF6vew$2?K0!Mlt;Mi{*)2=Rf>D$NImG*Z%;r-<2PWz)) z<|#4Z&UkIQ`kyhz=eP?#>04zCH74^yv1ZRN8I{N-v%Y<^N#++sAzZV2Y{Mn~#zej9 zl=$RiLa|^cY={+cj_89BH-rkA5_0?CpO8fOay}7%@s)*8Z2R0m%uQ7834dq?y{fw8 cga-q0g_MkL-C&7EGW%!q3M*4sH$!#x7Huvp6#xJL diff --git a/fixture/16/6/27 b/fixture/16/6/27 deleted file mode 100644 index ee4717180afe39d7aa4b6f44392719cc1085e13d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 334 zcmYk0y-LJD5XXPJPSy|>E`mi0i>OzeAeid7E{BDjja(DB@;XzVPmp^A@e!uTC(sBE9$Dk}+3&_@7=Kw+rF z)bkLClgut)Fl=ULvj6=5KeN^}z8XNh3j?RXjn>brv$wUz%dG->`tHB_mw$&$5YN-Y z#Z==p8jU!a8_o|U5|B2w%{9XnQe(!9=7$=ms(`h)=SayXSOs?!fx%_pwH!%_ZE>pF zBNeiV@@VL>C2mXpJzGKT`jpkN=6IQyo~*&W|8z5T4@Q<`6>g7wkgKFYr^G#G6kCe; YR^7jrx^*8l(j diff --git a/fixture/16/6/29 b/fixture/16/6/29 deleted file mode 100644 index b4e24635c6811663ef615979ca0c7433c6e189db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 298 zcmXw#J5B>J5QaaS!Le4>Vje{bBo;(rMHE}6+cr?GB1NL)3Q*CYN1%p=f{F$% zkh^Tj*df;VG4ucP*|IgQZ}uS1nSm4FNcUq5hbzPRrkn7w+H|j%*Bam58Sm`QY9%D9 zhS@ko#NwtoyBdd{r_-N%jjtCZAxo-=yF;eP0%@~Z8f@fZ9Q=P@t>SI5d+Y+&V->k3 zM1nV4$@<({H&iX|E%Sm_Zji^!%lO~2Cc-w=t0FcbpEz}v=LW?lBRw+D>cN_QZ4Sk% QCHp`*3ktQiZJtz+KT9lF6?bKCZhXolXQWk`f+$<6VQlT6TpbjyV-P$6A# zruXO?HDnpUj8W3zOGHTa_;0jA^I^gpagH=3g-chAeTC1$+#6zww(!dIiS)~8mjPIz aFNi4{jj41&$j&N;%e-B-=6p*Ne(@JvZ%94> diff --git a/fixture/16/6/30 b/fixture/16/6/30 deleted file mode 100644 index 2c7eb105d177dd6d555cdbef3ec09efaf88db618..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 315 zcmYL^ze>bF5XOJIH;F;w?I8-MSOiUBVG*nZi=>QQ8o|=i*2*&C6C6IlJ&2W!PjGOL zU@aCY{dQ3tW@nc9{tV+?Q&cmE@7louaHf8Q<8&GWU)eOi);G%c_BO?)g=4-CPd*-% zZ|z@~QZbGHEVv*naiL?+D%=yRsN=cw|R% UBATiesrHG#j{lvGK__n7AL`pI*#H0l diff --git a/fixture/16/6/31 b/fixture/16/6/31 deleted file mode 100644 index f8e470c27f0901d7a4f0fbe6feedf995803c6919..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmZXQze>YU6vltKIo=itOdh{fTT zxG8dnTkZ@oUpC?Rz`I hX`=f}WJ>CYlZ7eGyLMv)SL2oZ6wb{VWGut4{Q}HcK8^qY diff --git a/fixture/16/6/32 b/fixture/16/6/32 deleted file mode 100644 index 2d05cea02e9fb3d79c7cb1a4137e7a787f20e013..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 312 zcmX|+Em8zA5QV>Fo76Bhe^enxP(`3dtPF14MD9Aw_?1{ruq)9h4!{L)2owe+X753v zR?-PSsZQsm-^=TcHKFheNP9GJief!aFgzW$b6)#tKIU;hoxR;^yi6YHFz0;Lvr@m3 z`(dr|`Y+;&g{2vhfW2TiYzw4EHrSp46n4lRw!=NAthGqF#Pw9#VM7%t)!_zFlY%x< z+iR)<_kqL&l3B#PXRClLlTJMCC$%c{Vn=m1sqZRtKMKve%zBz&$`#J$+42*)r0qCL Siiiy~4^%pm+Ix0?zx@U&87^r6 diff --git a/fixture/16/6/33 b/fixture/16/6/33 deleted file mode 100644 index 28de21575e11a1936f01d33c1382c705d1e228f3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 377 zcmZXQAx{HA5QV>+PLGmo+7l#HAR4H+po*)z1O=*?OHLL?P~Zw?D^H{N83-f_gT>qr z!L4NWj#W%%cV^z3d9zu`R+GgD;(5y82)MQQ^L+C4X#P1DcTURCgWqED>`Nu=5 zQyk|0<4GsQq&M^jpX$Wdhn#iQhOv+x^ImsV+VFa| zM#^2`3goiy7iJ{~+Te`XqeYMI&^2<;bcZv!0bJu6bQ?QmMc!tH+wdq4=);iRpe@pn bzajv6J|eefpMPFqcbo diff --git a/fixture/16/6/34 b/fixture/16/6/34 deleted file mode 100644 index ca18fcf9d5bca0896e11d9e9f514991704740f91..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 313 zcmXw!p>6{)5JaEdjnBZROQeF6T5yp94w|(^cPc9jiBuREn6k3_g{WW9PiS~iNCHD5 z^-~TMblu3(THe{2S&yxWzPW*Po($XqbG>I6=HpOMc=q?lH?8kZxjSYnWiDOGUSVcR zstD3!9{82jrOU*};zjGwt&tj8VmEF7GTqyNO>j z$MH7C|3S@_*;5ErU^&J!=3B@4Vc(E(qgEz(mAsNi-S|6n6A(Ki~9Ch`8B>oJ<5 zrrIaMR*W#s3U744x9CIms*{%(_8Fh?kN6tx(Iz)9sWB(D+v|)0-6JJMki;}pW}A3s8>Nxf@geFS;09b5pd&N+_1tIOYK+1K**=3e%*?8_Ptv0$y;cH&z6 z9oM^UbF5XOI*8`l_?To8*xt_YGMR&IG+f{l0GHG-v&pm4=?NogrQgO6b$Ha>xG zVv$Z~S8N1_+4=L$Hy^`F7V>%q@m*zb0$f=BlBZ^4{?-}rV@JsIG+Zb9=mMYT%lCWp zm+qJPH~%+Z<8yRJ@yOXUEO6HC@jY6hHN~37E}~eVH>l7|LyhLhDP^1BDl$gIV8Qj7 ze?S9z7g^e(m#8gUy~0~-^(r#G#VT%wz&-g6K& R_DGU(J$fAV4g=ftIp091G#vl{ diff --git a/fixture/16/6/38 b/fixture/16/6/38 deleted file mode 100644 index 58cf049a7a5f9154100c1cdfb89f0be66bc8ed8e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 324 zcmZ9IAy33W5QV>;PPR?cw1tpxA&7>YAOula$B~>UZp|epPo6v+?vQAH0Y3qWKw+>L zJdz)PILYjWkicYiHgDd{+m{vB7hMMN%pDv6=XQTx-cPSAUoNa+vD^N|ut@R#!~e&- z?eXii<@4|Uh;~#f%APu~w{YkNKf@i8PtX+_s0#E39ncoBD)FXtTM-7Gdokk6xoQ{Yx{gyZ6{8Sry0<@|fC{V{);+-rZ$>%GQB zS^Iy#ay52%&*ZS_7%d#H`&wX$O|gi(VxVM7xl41m8JCPb%GlS&;jRBDh=)d8v5sJg(wK$%#O@(fI2V`F9J2^jkjomhAR zo}?WZu}^|pKHKuoXX{*2XH^I6O%r$mhUy><$U+r zqH@{Z>hKA31HAJq%kzEk2I})dMOR9sMhe^rIgn;%rNS48Bh~5xc|$US#+uyW=E#O2 zd|0CaG(E8(G||>J5h_Bo(&7~0cqs82G)DCec}eb8q%*6@jL^nm?NDN7=ujkfnC{rv RVZJ4eYy5=#o9^NNSFYf7GVTBX diff --git a/fixture/16/6/40 b/fixture/16/6/40 deleted file mode 100644 index ac7df65d5a08b65438f36d3a7d3df87b6fdae4e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 317 zcmXw!Ax{KB428d$>rEz`0hR=@0u9NppaM`Zcm;hBqaO<97qHT zgT?SiP%CLC&~!RY`(FF%T^s8e*gJD@1f1x9h~s4%AC?#DA6?1p5iYZR*Zk}qf8MIU zcYmc$X=>AGaS2zfrZIA`VJ=%_z^@5+NR6B$5#JE*kp`)dl42le(opoQU~M(u5)`iG zNO42(IafwR76y=Aj(M2$&Hybb5*8FYr(IvM|AcJv9%Z$8zv3&>Kq~XhFH>hT`>gAf b^z6bbNmr|kYomBQROS$5bdrpfAXKAJ|;B(v(Ygs(8Fwfj>#}#ge+v7`|aE)D&eMI0S9&jbT z!fy53*`k0rBeSeTlIQm|+Dt^2|I_Kr2suo}43#Bx_O zTJlWLph`7qDo>M)NIJ5_c3u(?!?uhE3Hy3e=@Qw6%_J#G`ibR4@?8r=}Rm|7YyoCacdN?xTY>aI3t+aXk$S{%RpzT4Q_l`=t18-V&MV_V`L1WZGPJ>qM^9us)aXE)C3itvHyA!B*s+z$9X!BbQehp4kH`G)X9 z?rElB7n#uu=S!A73)@lj(b#GvFI0F;J+pKQQGwH3k&{Jd6<(au>3X3;@=Nk}WEEBF aKAVKz*%x+7UAKf}QoP2A<@2l`r}YmRVk}(% diff --git a/fixture/16/6/44 b/fixture/16/6/44 deleted file mode 100644 index bd41a6632d2f6f7164d57b4f6b03c9925b26dfc4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 350 zcmX|-uTsN65Qo3JY(fNZ70`)5L`1|9S(jou#nmuV)j>wW6O>2jpelI~6ox0@MHoaS zzx=uG?A_k(e!IV!^RCIu1p2dca09Fq{~q3}NBLKK_x&pWY}FVn*2nC$pUUSD`4}Tt zVI}sAyXDtZjC-C2`)TJ?r+tGJgc+&3=i<=gw!mgsj;-+v!jUxFV4kof(L_hsl4`YI zV_P!egN(1aej(KAi7iR84*S4#=Uj>^FsbGEj_l~9dDTlvP-1551_3cjx|zaAAIaoC?`cZ$y_NTcum--mo; zWW4p=eIAE+hF|dvfBZCEeTO%Aoz~)pV@pY!;2UOrH^)R7r4XrA+z{sYuXIbOlFo8o zxa`*WD%a(xqQC_{D6F9ro7Hx~c%SUc7ONRcyCCfGHVb#Tr;8+xT#xDp__5J)Oc)Q~h_&efI>k~8aZXi(>R5{*J4(FB1&VfY38 zALpoKwn-68X7k?cn|ZS;Zk9{4feS%gNkzqpV#vw5o+5{Ie+CMJ#o+lF zClI{7u#?%@+4pAN%bIJ7#RSrMaBu_M>;Aoatsj*iYXOR(pMSn6-~Zo``KK|y*O^~2 z!*L$^OjMjR2iMph-w_LZi&tGR?Wl*oOs#*$udtG61U0t=;Tx=?h7P|a3RsS7E#W7P z`-Z7;b>t#$#&?B3B<)yXjTW2?gAlE?Nj&Tl6~B^7JQ1Z^Vsd;VDab&zz-wI|2d743 Qw&e2UWF|^|Fpl|Oe{4iFM*si- diff --git a/fixture/16/6/48 b/fixture/16/6/48 deleted file mode 100644 index 3234c0a96816bef08e44de12b220992ffacafdd1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 336 zcmXw!t!~3W5QM+moUfK;`A4c$g8)|wNZCQF#8FYgtV-sAfe9;1U~%7}?@&-wQd*KX zDX@yVXX#dZKRY`!dnd-H?E=CY4BP^pp1%*v?~~?d|F+s`e)Z!V*FNI0j(ru*(NSZ# zpUyXwDNVT{`Ku!-ienv34p_lfxDEAaa39zs=8pIA8V6U{7vvc?P*x;%Kw4I?4Z5tw zd9nuAVK3N}YzStu$5oV`g;Rf}uF19Cgo`}lw#K&P4N_4k!IP(ydlGrV{RC8BqPu-H ZN-BfuMM043&}@o!`H=r{?IHX-v6@D|4!cx&Gcg<@iy1K zp|F_qzB|PSKF0$$%@(P}`jjP|nwM4-$ zGbNK{o|HnQD{M*AcQsLD1V|J$2R7kFJYkRVJ5!X5-97G>$qfIByUKpEEE=+XNtEdp e|D-Z1Cag%T)WC1m)dmiq)iMgc8nE`0C#3M~&}G9jA5bBCl<_ z&c~>We7s+Cr)MC5p`*^Xf@Ex`FNsq5cxA>B@v784MJ-Je^ z$TKoSHb_89GR->{4f&p|r&w_8IZ;##;I!{|21G{?c}6Dy(>Fr aniU6%k}K=fDf<4Efvm(g%v-mae(N9GYdswR diff --git a/fixture/16/6/50 b/fixture/16/6/50 deleted file mode 100644 index 557dd45f47da0d8d20ef78fd4b114261fa05cc99..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmZ9HF-`+95JlfQBV!gT7(|f*DT|b1lZF*lM6)TOK%z;X25C!jf^AbqoFGTY0jOv= zK~9op%ZwM9BBQbYAOAld+hmn11`yXr26w=l;_>Ng@~Zh$pQ9Qyx^~&ep-z~5f4c6M z9H;lcYt8R}QxcQ;Tu*h)mb%7 diff --git a/fixture/16/6/51 b/fixture/16/6/51 deleted file mode 100644 index 2cdd1c9ca376336bc076e53a20cce7e6d92d559f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 306 zcmX|)txf|$6otQ;&dyHAbazV-w54k=$v3n>)F=XYO~7R*kDikltFs5pW`Nf?^g!&a(&)^9$*>c1&I|=@j3ip*?%Q zm45l33o?yb@RfB#E09zSe`I&eY-n?`8m}>T7{?rL&@0Yk*kX1`I%u%>m?=h-247*W zF?IS%ibF54!Y|Q>*w=JfZWzmM1atVyJ=~yc>=Z40Pa(b=RxvzM?39trE9?iRqTF&? b^m|rfWOw4#{rX&3)$9ClOvw10==5NFBrOL=Sk z*=|^6Bxz1L4jH<~UGZj;*>*@G=o7v{PVhOBnVk*|XpaoBS2foiUmSdi^8}qKrd*yVT?=iwU_!?JY9+3-9g4a~A748f(#GW#m$dD!>9p(nN z#ncfu6>`gE@GVkf1=pr_7nSxU;!brc(rubl=~k(((ei;s`KNLUO_;0LUnpT>8ITV9 Z6&EUhHLx%GG@VtkDmkzl48D%K5JaD^>$7{ZoVY5mqJpaw;7A3AVKz)ziKW26z?6kKpHLO$QwoAYLPPQ| z1_9l(+S8u)W@j|BXQDdw4-oDmf@h%6UUU(!w~S9ZgKWquE&u=k diff --git a/fixture/16/6/7 b/fixture/16/6/7 deleted file mode 100644 index 9bdbbc1a4b48856d11c31c54f00e6366ccddc7a4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 278 zcmXYsF-`+95JjIivtG+GvLhjsf?^}3*e)*9?UtcS6KO?7nKC&53MD1t1QZ;A15nX$ z0`8IuFRa0a;9=f%WC=sQpN&kPV_WVI49MKBlMYsK^4Vh%R&l<)#A4mXOLwxi%Z0 zu_Ka=C`A`uO7-{15&Nt-VN11{`b2chR&XpLaisniRc8-)b#O3VPpX6-dn!V4M;$w^ M)L8Szu6(2a1JA=CQUCw| diff --git a/fixture/16/6/8 b/fixture/16/6/8 deleted file mode 100644 index c536bfce184d61c3765623298ae82157743626a6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 286 zcmXYsF;2uV5JjJd!D}ssM2KRM5Q)Gm~g&iv78eNh1 zXh}{;&BXy(d22Hb4RT2|q(l<&;9@zVTVg`Tf47=`q3Ai)3hB^Cx5v^s@2wHrcI1j> zN!%ih1G{NB5(&kgvzUUx7C@gI*6&%H>xjOhU63`}kS*}&H>d^fkvro1oT;K9EbPeN KYgi=u`ImpE1|xU? diff --git a/fixture/16/6/9 b/fixture/16/6/9 deleted file mode 100644 index 53f1a650b9049353602be104deebab88be5fbe63..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 309 zcmZ9HAy33W5QV?pPPa|d(1RcbdDi@DU#}iCzuWm3 zyY|5U$34}4^+^20<$hIi10|nukW-P*2pil5NoT!$;I2t(vL#0~g(p&~Xh>v9o}#yu z6a%r;$P}4Sq{P{PypeYCW2Lti&9EQ=K1Yv2Q^KXHEF)Uh1Z|-Wl?wvbO1E~ diff --git a/fixture/17/0/1 b/fixture/17/0/1 deleted file mode 100644 index fffb6a463a85664ef3d4fd1f0430a7308893465a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 ucmYL>K@tD}1On^-pO(!GE>fE;#`={Vv7>x3BDv2B0=EnS(rqZvf?@*+WdJe& diff --git a/fixture/17/0/10 b/fixture/17/0/10 deleted file mode 100644 index 6abef8ce893d68657d8b8f7ebef97b49387f3f19..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmX|($qfJi2*T+8OAAW$BMk?DB;;>p9n6(t*S+Y{yN$NH?KTp*l>rLI05kvq diff --git a/fixture/17/0/11 b/fixture/17/0/11 deleted file mode 100644 index 35bcfed184bbc95110ef31fb9c7783c4a7d1feb6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmX|&0S*8#3<1yoKfOB1Xd)Dna4fzuZG#vV{izQ}*F{P@sjjJO`+Y%kgEO>JA?XqX07i diff --git a/fixture/17/0/14 b/fixture/17/0/14 deleted file mode 100644 index 5319d19640e2ca6dba40b80152202bff4a052896..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmXwxfe8Qr2*ULLr^{Idslk-YD%bd)4HkoZjNSvJT+qVQv?)F77BlecWq0W|8gG+&j2+5 diff --git a/fixture/17/0/17 b/fixture/17/0/17 deleted file mode 100644 index 5d6f35e8710138515caf9dd4f183b4ddbba527fb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmXYo0S*8F2m$&3r#qy!c0`hGF~23_3gtEppqW^4Tltrjfdoqn?Hme@05t#r diff --git a/fixture/17/0/18 b/fixture/17/0/18 deleted file mode 100644 index 264d40be75b9fda77aa56560ee55a13d28a0aa61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmX|&2@(JR1On^-pVk3`p)CTWS}WDdkLER-mdL#5b7l4?aC!n(2MRy{Hvj+t diff --git a/fixture/17/0/19 b/fixture/17/0/19 deleted file mode 100644 index fc26d9de7946153e0c18b5acee07cfdd8c86d3f3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 ucmX|(feipK2*Tk1r$-slC=zV6Eje|Hq5oDr*9Ik#79shdpo5n2IP(Au4*)v= diff --git a/fixture/17/0/2 b/fixture/17/0/2 deleted file mode 100644 index 5edad2fec48ff46efe5dfaebfdd6be3837ef3ff7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmXYo0S*8F2m$&3r*mXYZpyM`ZxV-LVpqLK;F^@^UmFIFKs<&^2MNmnE&u=k diff --git a/fixture/17/0/20 b/fixture/17/0/20 deleted file mode 100644 index 382e59cfb32f03e3594135ee7fd4aecaec46128a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmXYni4g!W2*PmxrJF$URK5#1!GnAWA8?wPuAyv?Cz2*T|DpH5v6c*waVO){4h--SjrlkC-uke{$3)Isb|wgn5o06+i$ diff --git a/fixture/17/0/24 b/fixture/17/0/24 deleted file mode 100644 index 0077353a9cedc43a4b0acd04b6847b6b621a0b69..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 rcmXYpw*deE2*U9Hr!^54ib4hiKs7|B_?NXG`)H$gDGWHJQeHU=3nu_H diff --git a/fixture/17/0/25 b/fixture/17/0/25 deleted file mode 100644 index 3789f2eb6638d1231a875fb4988d4e0636c8888d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmYj|K@tEk3<3H7r)wka)FCkoS*~P7ImUoVt{qy`Xey*T+-NhwllYy!SO~rVD*ylh diff --git a/fixture/17/0/27 b/fixture/17/0/27 deleted file mode 100644 index 504d41843d25e1f363ccc3e18c60422edb18a1e3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 vcmXwx0TKW}1On^-pT4ae#w>+pizK8#kPZ*W%vs#B(h0fzO66Mfikk5O3l;!2 diff --git a/fixture/17/0/28 b/fixture/17/0/28 deleted file mode 100644 index 655a689f4fb541477aa13e086b782a90fc1dfad2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmXwx2@(JR1On^-pVlHrl!j#zXgDkZQr~Q=GkZHRtO|Dav+Nsb00`>S_$She2lTl}FYt5>%{ntK#ijF`Ba4e6R;Q$Fo051Rl diff --git a/fixture/17/0/3 b/fixture/17/0/3 deleted file mode 100644 index 67cdd694cd529861f3ce097a972a37dfb65db095..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 rcmXAghYbJ#2*U9Hr-fEcSd#0+O-jdwG7dGDV8ElAHkQqFCQ0S@3mE`6 diff --git a/fixture/17/0/30 b/fixture/17/0/30 deleted file mode 100644 index c6b3c2236bd7e4cf3938c35d5250f1155d319a07..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmX|(fe8Qr2*ULLr)wHTK~vH0$kq;^`=AIY(Vi|>V=cB9pd@AF1+-vJO209}O>nOk0v`%^05<>t diff --git a/fixture/17/0/34 b/fixture/17/0/34 deleted file mode 100644 index cb07ca5f30e93dde13de17604dd43fc5dc853100..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 qcmZQ%fCEMb21XDB(I5f@KwKajM1W<$G(;U-F+>qeF_?xDP;CGR{{S!m diff --git a/fixture/17/0/35 b/fixture/17/0/35 deleted file mode 100644 index 0a9d96aa968f31eebb4e295580df3cd3ca259cf1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmXYo!4Uue2!eF~r4#efkztZ$XQU9Y5q7G^plE)fsXdN!0d^_;=K~4S05t#r diff --git a/fixture/17/0/36 b/fixture/17/0/36 deleted file mode 100644 index 6a99113f21ec90bc5abf6cabad4b30be7b888ea9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 rcmYL=0S*8F1On^-pQgh!YvHo2A9arKSY8o<`^8-jZFo5FEx=j@3LOA2 diff --git a/fixture/17/0/37 b/fixture/17/0/37 deleted file mode 100644 index 00fe4a21c3c84b90a1251b05eecac9d0b9c54da9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 rcmXAghYbJ#2*Tk1r?r$AS(-G?^F%AnQhor4(QK@#3bSq}t9cPU2$%pW diff --git a/fixture/17/0/38 b/fixture/17/0/38 deleted file mode 100644 index 1b8d1ef77e80d51085559d22449e20108befb82b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmXAhfe8Qr2*ULLr)#qyZDDDf)P7xMgdIx_X$&mojj<01P9S`r9t$}DIsgCw diff --git a/fixture/17/0/39 b/fixture/17/0/39 deleted file mode 100644 index c18a0afb6bec524778d3a7546799bf898cb987a7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmX|%0S*8F1On^-pLWHJ)fJLt*;oN=M7AgQdJ@aS$0bUdOgDT5GzxnFH~;_u diff --git a/fixture/17/0/4 b/fixture/17/0/4 deleted file mode 100644 index 4d5a4d3d0e7b3443091ce65f4636dfff9ed61a3b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 rcmY+5K@tD}1OoN{Pg~W6VMob~OOR1eAw|RM)WvOQDG9t+$L(-g2i^cG diff --git a/fixture/17/0/40 b/fixture/17/0/40 deleted file mode 100644 index 5ef2033381164094d088ce054c8e1b1f9801ccf6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmXYo0S*8F2m$&3r*opsa>=q}zdH*ds_hn34x^c=#{UCIf2rY32MS04Gynhq diff --git a/fixture/17/0/41 b/fixture/17/0/41 deleted file mode 100644 index 238634e5ef5eb9ddc9ee41016f95efe4254a4d97..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 ucmXYpi3tDz2m*EgrPH&Z@J;I2?i!S<&fj5ysyK2-*NI diff --git a/fixture/17/0/43 b/fixture/17/0/43 deleted file mode 100644 index f0c11c0752095699c9e07acb13f9008dd4e5c28b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 ucmYL>hYbKQ2*b$#PuDR>Ma&G6$uwd({~;FzbQBh7mv!wTg|B(ceF1n1@&Gje diff --git a/fixture/17/0/44 b/fixture/17/0/44 deleted file mode 100644 index 269fdaf308c870ac15b5f43ab4b8fb0bdd46e053..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 ucmXYpNdf=>1OmaeFZU8m_ diff --git a/fixture/17/0/46 b/fixture/17/0/46 deleted file mode 100644 index 195e118c62347ac1141ca9965c745efa216f0ec6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 vcmX|(feipK2*T?BPme+}je;n0a0x!hQf=xkn_{`+EOm8I*b|N3N1mAn3JCx- diff --git a/fixture/17/0/47 b/fixture/17/0/47 deleted file mode 100644 index 87bf7a4881b23da8b01a92f8614d9c0a841fe6de..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmX|(fe8RG2*aBHpWZS%g`(6DLcpZ)axC8S6VpvwVAoY>5m{NK;Q$GR051Rl diff --git a/fixture/17/0/48 b/fixture/17/0/48 deleted file mode 100644 index 1c0bb590d9371bad2e9e88d08ac22e9fd67e01f7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmXYofeipK2*Tk1r==iS$XZplJ|-Hshq%~wegKtJ|Dbn>e_a{q0168LGXMYp diff --git a/fixture/17/0/49 b/fixture/17/0/49 deleted file mode 100644 index 635e08c45aea19efa958379f7c7e4d17c5ed6b0e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 rcmZQ%WB@`SU;xn|7Kj1nK*T|!2qCB%m@1GQL<^V!V}o>nG(n{R3snF& diff --git a/fixture/17/0/5 b/fixture/17/0/5 deleted file mode 100644 index 5975fc3f5cc6a689d7bebb5314314f1e01cdf58e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 rcmX|%feipK2*U9Hr`MGznuyROGq>NNpgku2ELsu>m4v>*=vzxY3Q+(p diff --git a/fixture/17/0/50 b/fixture/17/0/50 deleted file mode 100644 index 794568873f407673336d84de8c1a677d32301f5c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 ucmYL>2@(JR1On^-pH|BZXA-iNNKzCOZ^dAX*tavhsDgd4z1QKO4|o6wFaRn5 diff --git a/fixture/17/0/51 b/fixture/17/0/51 deleted file mode 100644 index 7935beb0ca2c6eafb6520bf820f2ac579b9b69c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmX|(0S*8F1On^-pVr0LkPeom%P13DPlXo4B8usMg0S*8F1On^-pVq}D77~_aTT3uVjSjA@hUx8HGA1@L^@gK=sR0X705t#r diff --git a/fixture/17/0/53 b/fixture/17/0/53 deleted file mode 100644 index 2d3a3146ed393590e937ddc2b5baf6d88449cc28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 tcmYL>K@tD}1Om7Jf11KL7_AaWK*lQMHK3?>9c(9ud>u@Dd+cmOcT)+;05AXm diff --git a/fixture/17/0/54 b/fixture/17/0/54 deleted file mode 100644 index ff17572db0730c4bae2dd06800322aa350a0c42c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 scmYj|fe8Qr2*ULLr*lRSLQPTImgEYEiy_N@OiuToZCu8x>Kf`?8VXhbGynhq diff --git a/fixture/17/0/55 b/fixture/17/0/55 deleted file mode 100644 index d7a39cf60ad8c9a959dd544ee61559413eca0d93..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100 lcmZQzU}R)qWB?){4Fn(pBmfo#i-08$qfK748rjKOP9ov`T@iR0a6$)S&D?jO_4@oTseu(-B@Ctj D&dU@l diff --git a/fixture/17/1/1 b/fixture/17/1/1 deleted file mode 100644 index a047fd6883d077a6965f987457fd01b9c3508e53..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmV+~0MP$<0Y!{K5&!@M0_*>umdy+Y#4FCWL!sz}>3rh4O4F`ZE0X>TW4gfF=0nh(Gy*kNgA{3HvEWR>tn3|qYES_Dw$o;CWeY=DQ3=IG_ De2Edb diff --git a/fixture/17/1/12 b/fixture/17/1/12 deleted file mode 100644 index 3435935f026bc3b8811a42733fa6655934e1142c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 46 zcmV+}0MY+=0X>U>4FE6*!r=d>wRIUWu?RM4P~BZ=1NXxMCh~oqO!?R#0}7r1 EIl;CQk^lez diff --git a/fixture/17/1/13 b/fixture/17/1/13 deleted file mode 100644 index 403f9b7bc4cc2011ea2576dae4489713db33de88..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmV+~0MP$<0X>Yt5dbg<{H!u0>A%UK1f!IaD@*Z7_d7K40@-UFmu(8AQTDLw1t0SZ3= EHtUoX5dZ)H diff --git a/fixture/17/1/15 b/fixture/17/1/15 deleted file mode 100644 index 2964b51e7cfe4b1b9fb315a42376a4d25d38e0e2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45 zcmb0X>Te5&!@M0_*>u)&YZ|Edr!kE7i-7<~5s^$h_!tW%egX?4FE6*!r=d>M;Xy55^S_BIdzJm|5iQM1|^XeA^D)7gO>3)^8gGF F06WGY69NDL diff --git a/fixture/17/1/2 b/fixture/17/1/2 deleted file mode 100644 index 1e43f6d554d66f49ad6d3252cf128340f7438299..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45 zcmbEV_@K5{P}-*af)x=sZA;QLhXr3;d|X5%$o8dUYf1X!KTfdo%1w<9sqpo B5ZeF% diff --git a/fixture/17/1/20 b/fixture/17/1/20 deleted file mode 100644 index f4285488bf8940bf444bc54edeba8451959fd0ed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 46 zcmV+}0MY+=0WFG&5dbg0X2+)2><{H!t(#8Yib4>!;&VF<_5zP0Q9ifwU%i3(A|3l2V`?B3Q+(w Du)7l2 diff --git a/fixture/17/1/22 b/fixture/17/1/22 deleted file mode 100644 index db97fd2c562a50f151b6831f6d762adbe98e41e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmbrz}Tk2QF%la0EPp|!;NaeepkU`wvu42A%l CfD%Oj diff --git a/fixture/17/1/26 b/fixture/17/1/26 deleted file mode 100644 index 8220824b4c7abb2214df63866042e62056a2d6e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45 zcmV+|0Mh?>0X>WX4gdfM0rUS)cUY}rjHJuzO5X_EQMu@B&0x)4iCr7S=_SH3Ay}AfoFmAsSI+2`{1JiS}&#v0^_qts}SDyoWflmL-hYXzV4Eg|@ C?-BU` diff --git a/fixture/17/1/3 b/fixture/17/1/3 deleted file mode 100644 index 51a360650af10537fac9e39b1412d8993b7d2f57..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmV+{0Mq|?0VRuv4FCWL!tnp6g;q^ilIz4xO2>sV4mFoxz@wTrmd$h~N#*wo82~q; CQ4-z& diff --git a/fixture/17/1/30 b/fixture/17/1/30 deleted file mode 100644 index 9e9b0baa57f9b2e4b3e5c54acb5bbc23dac995ee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 46 zcmV+}0MY+=0X>X?2><{H!u0>AYZ^sCQ_=3o)()Wipa>|@o-S8oEw&e+BxU5}0SqPp EI?6p06aWAK diff --git a/fixture/17/1/31 b/fixture/17/1/31 deleted file mode 100644 index 640cfd33e5d996b2c493b260dc91465457136b7c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmbB%bF!7YljB)c^DqnyR0Ry!I{m)FVdDMGU3@ DoEQ>c diff --git a/fixture/17/1/32 b/fixture/17/1/32 deleted file mode 100644 index 0fefea0239c6fd85c5dc6a4887a5067af8f42636..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 46 zcmbE;l CD-xIh diff --git a/fixture/17/1/33 b/fixture/17/1/33 deleted file mode 100644 index 81910eb186f734440ecb854c2a8e7e3463dc90b3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 46 zcmb9Sc7I diff --git a/fixture/17/1/35 b/fixture/17/1/35 deleted file mode 100644 index 38d4f178f99f294cb2b53746d5deb4dc023a83d7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 46 zcmV+}0MY+=0WFKc5dZ)Pf^`3-6Z6rLVUlHMq!6$XcB;mpXnvuoJ&tn$b}9Vl0}0Xq EHOMj(%m4rY diff --git a/fixture/17/1/36 b/fixture/17/1/36 deleted file mode 100644 index db5abd189826d3a030dc5259a8e213f6cdc0b865..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmbzrmFYWsf5hvLA`)Dj*ZiU4M-2+jqT(S%X0Es#g AWB>pF diff --git a/fixture/17/1/37 b/fixture/17/1/37 deleted file mode 100644 index 4734555638f69586054dc0816761ce12ecda4262..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmV+{0Mq|?0VRuv4FCWL!r=d>wUiiHnl#SyL@UiwegKHkY^qLWf?Mlj~MY*YVUC3O1maaFHbKIS#7CS27F766HW B5gPyi diff --git a/fixture/17/1/4 b/fixture/17/1/4 deleted file mode 100644 index 5a9d8c20854c88e3eff2f9d8c66b4e3c87bccc2f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmb0WFIG4gdfM0r~%@bE3_1$+Bd>I}0JI?G{uHqnWA3{{u*Wso_lr3P=Dn Dm>m)+ diff --git a/fixture/17/1/41 b/fixture/17/1/41 deleted file mode 100644 index 4687d48a26625ca25551497b38b73e8167d86def..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmV+~0MP$<0WFM)2><{H0(JkT)3c!PP3qY08kDQh2&Dl_OvS82ejWVskE}t0M+XVK F05I_t6z2c{ diff --git a/fixture/17/1/42 b/fixture/17/1/42 deleted file mode 100644 index f0702f4c89a742f3d660093cf9123fb0a400b935..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmV+{0Mq|?0WFIG5&%F50r~%@uic}DK_!i=j7oBl$p||9;>-@{E%nP0#@`1B+5j(} C@DsiO diff --git a/fixture/17/1/43 b/fixture/17/1/43 deleted file mode 100644 index 07ff17532719c9f132063dfe4e7026ad99632e02..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmb=>sx40^X1NG_>DvELN$lInm@TZ_3=Ad! Dea;aY diff --git a/fixture/17/1/45 b/fixture/17/1/45 deleted file mode 100644 index e55cd748285d1c0a4f8f30ca80a20b4b00da87d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmV+~0MP$<0X2&O5&$3w!r=d(j;&G;5Hz)tZaPrC6}moX1SA>4bX?2>>t%!7aL-^&ZZ2ssaa_v49m-%10641+ AkN^Mx diff --git a/fixture/17/1/5 b/fixture/17/1/5 deleted file mode 100644 index 86de7d48df07c33117682dd6d2a45e975a2f4eb6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmV+{0Mq|?0X>R=4FE6*!tnp6*Oe%mh|nZ6x8I?lJtqAuS`rAAgucP(TT49(Q2;Hs CtP=hJ diff --git a/fixture/17/1/50 b/fixture/17/1/50 deleted file mode 100644 index 171495d3ac3ca6528d8d82bc388492d13af309de..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmV+~0MP$<0Y!`n5&!@M0_*>uR?7@$60(&@QWO+##bArrw==w`f_<>P*WsWKcmN16 F04k&~5|#h} diff --git a/fixture/17/1/51 b/fixture/17/1/51 deleted file mode 100644 index 8ae9ea7762a21ef8198ec0804dcbdbf6d720100b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45 zcmV+|0Mh?>0X>WX4gdfI0_*>u*2UP64wj|MC=**xg%-miis^raU%Lwoz<&}s3a0=z DkopmJ diff --git a/fixture/17/1/52 b/fixture/17/1/52 deleted file mode 100644 index 09c9ad493991012f7866ef003215ba234107fffb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 46 zcmbEaPa26jaTNdUV53331c diff --git a/fixture/17/1/6 b/fixture/17/1/6 deleted file mode 100644 index a4192b043576bc8f84f9eea2df74ebe08d3f12a8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45 zcmV+|0Mh?>0WFIG5&!@M!s`E@W`pi3DoGnwti~`r@ DtjZJ@ diff --git a/fixture/17/1/7 b/fixture/17/1/7 deleted file mode 100644 index 2535cb52e1cc404484e72f62ca4783f7b6839813..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmbS=JG3lbthD<4T%PGv9{O;ZIynwM<&(oKY$}lu^j8qGbbU5hO&A_1Gz)-`Wzz~qu;Q4CG1(&IJFZgsWVrf%E`hb@?rx3NdFYK diff --git a/fixture/17/2/1 b/fixture/17/2/1 deleted file mode 100644 index bc76354eff5e47b0e1aabf5645f167a4dcbc9ec6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmZ>Y$}lu^j8qGb%+}n!n1O-&07C_X0z*LBEKb`XmXt$IDf41oG-sUYjf*k3WGpXU LY$}lu^j8qGbWDv5`W?;}dz>veBzz~o&i_JtJ0 diff --git a/fixture/17/2/11 b/fixture/17/2/11 deleted file mode 100644 index 224e4701c7970a80bb7fbce0b4bf9a64ed671216..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58 zcmZ>Y$}lu^j8qGb+`XCk8UusK0fvGG1%`mMS)8^(LT+rfcb9m2t3G!$GMYIlS+l=; Or_vk^kJy`yuNeR=wG^QM diff --git a/fixture/17/2/12 b/fixture/17/2/12 deleted file mode 100644 index 06a523869d558220679b0615854e08db8f39eefa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58 zcmV-A0LA}8T4*sbL0KkKS=Mxl7XSblzyNrFAOKKifJy3<(M=w8HZr}}Qw*joq@kC) QjIn+$h(_zyNRnAOKKi1fHm1n=+8dr*+YclF%Y%3`Cff O;_gVN3KAdCypo_+gBMEx diff --git a/fixture/17/2/14 b/fixture/17/2/14 deleted file mode 100644 index 68bc0f53d4307d5368e7a99b278992112b2601d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmV-90LK49T4*sbL0KkKS<&94>Hq)-Kmc$6AOKKi1fHm6cXcu|Nl2DRySYFKNSO(l PGM+Bvig2MJqrFMgGA9*h diff --git a/fixture/17/2/15 b/fixture/17/2/15 deleted file mode 100644 index 4bf3e85f8f0adf6d1e8785675ec6fae3f93885cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmV-90LK49T4*sbL0KkKSv-_=E&u=?zyNRnAOKKi1gGjvtpQtM1E&xY$}lu^j8qGb6!CBuVqg$;V8~%mUm1MS4`}Cf4 M6)aTHn9!#J0GJvOssI20 diff --git a/fixture/17/2/17 b/fixture/17/2/17 deleted file mode 100644 index 49e9f8deacb74cb3cb68c9f3175fe7915c99dc4a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@7T4*sbL0KkKS>IuRvj6}EKmcq2AOKKinh8Bp5feu4xS@(kA-&x)83QDc RNW`bb+>uTcBsbV#EWkA(6$StR diff --git a/fixture/17/2/18 b/fixture/17/2/18 deleted file mode 100644 index 2828b38c77c6e758dff76c60b0dd4faf6ea6212d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmZ>Y$}lu^j8qGbY**Nw%fKM(z)-=Uzz~o&i_!`xAS~z<{{R30 diff --git a/fixture/17/2/2 b/fixture/17/2/2 deleted file mode 100644 index ca6f910e241a74bd8a4407c21ba409477a0a1973..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmV-80LTAAT4*sbL0KkKSvOrr1ONaazyNUoAOKKinh8Bp8U@|bFi>HjppPxokwT4> O_`8xR!i0gk>Np_aR}`-R diff --git a/fixture/17/2/20 b/fixture/17/2/20 deleted file mode 100644 index 414fbbb531baa1589de2d11040dfc6de4b512421..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmV-90LK49T4*sbL0KkKS&0__vj6}YzyN3fAOKKi1fHm78G$TIy5xw6kRd`z4c){f P%6PkyDZ+$=NVo-<2EP>9 diff --git a/fixture/17/2/21 b/fixture/17/2/21 deleted file mode 100644 index 2af966668965f62e9bc83b30f9ebd9e0faa67a6f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmZ>Y$}lu^j8qGb{9L4Znt?&ZfuVvyfgvDm7N>0xV-t_}u2)_XPCXlXSnjY$}lu^j8qGb^q=w4h=GCEfuV#!fgvDm7N>2H?d`^$H+_yYrizGsFVabyvau%i Nq{u@jN4w-DCjc4J6;1#E diff --git a/fixture/17/2/23 b/fixture/17/2/23 deleted file mode 100644 index 6beedb5103436a77ed98f20b459c67936d0d0df5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmV-90LK49T4*sbL0KkKS<9gkx&QzGKmd3EAOKKinh8Bp5u+w%d$$A4yAupDX%vtJ PD)_sSDZ+$?T?m!X@pcng diff --git a/fixture/17/2/24 b/fixture/17/2/24 deleted file mode 100644 index e37df7fa1d298df6ec788822984a448a5bde7deb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmZ>Y$}lu^j8qGb>^mXW!oVQrz)--Tzz~qeGH->5!Mhg@NkT_8=H6}Y$}lu^j8qGb?BvY9zz~qeGH*q2BFCcOU2ifbiHNj``OMg;ee}*m MKM@tB!(p)p0P64)eE(^`KmcR_AOKKi1fHmABM_2_pp<0AO$ M$rRy2Ku*Kxl*$+rl>h($ diff --git a/fixture/17/2/27 b/fixture/17/2/27 deleted file mode 100644 index 5aec0dc4e87518efbe6f360add012dba583b6e46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmV-80LTAAT4*sbL0KkKSY$}lu^j8qGb3_RZF#K53%fT4&%fgvDm7N>2HB%?RSvB@fGLFwgg8Ozk?N}3Y$}lu^j8qGbyw~-13j>3q149CX0z-g<%2&yWMr|eyYD~Y$}lu^j8qGbDr0Q1BW=>Px# diff --git a/fixture/17/2/30 b/fixture/17/2/30 deleted file mode 100644 index 40e3961195a357765f9b072e11be9e1008739bcc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55 zcmZ>Y$}lu^j8qGbl=z}m&A?!AfFXrJfgvD`W!{Q3O&89tqPq&t#s&c@hi1-PFJ9zo J5ikLy5CG5I5+MKp diff --git a/fixture/17/2/31 b/fixture/17/2/31 deleted file mode 100644 index 4a2755dd79293b544761ae8c84f4066eb1c25f4b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 54 zcmZ>Y$}lu^j8qGb6g?l1%fO)Ez>vtGzz~qeGH*rdM3y;s6jOMX1-qC<@J@a7&SN>3 K5Yx_AybA!eYZ3AQ diff --git a/fixture/17/2/32 b/fixture/17/2/32 deleted file mode 100644 index 52e17412449641be6731647181b24891fdfdb6b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmV-80LTAAT4*sbL0KkKSvaj(w*UYHKmc$6AOKKi1fHl+#4;Ck%te8aBVw7Prh;Xa O;_gVN3K9nut2W^L0Te6% diff --git a/fixture/17/2/33 b/fixture/17/2/33 deleted file mode 100644 index da504223698f3d2e195454217e5da3c95a0b5273..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55 zcmZ>Y$}lu^j8qGbbf57-lYv3x07C(T0z*I=%e)n=lINO?-jzK_YEtD%F`2Dhv3ru{ Lp_V3_Y$}lu^j8qGboO^0oBLjov0fs^b1%`mMS)8^(tZJDGi&YlQimYCD#9z;|EoJJ* Oa+Tm#PR=_kGXwxFP83)G diff --git a/fixture/17/2/35 b/fixture/17/2/35 deleted file mode 100644 index e4f222d24aa4636ba62bdbc06c52771d4dcb43c6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmV-90LK49T4*sbL0KkKSqt&up#T60zyNRnAOKKi1fHl#jKqYytFu5vVU!MW3JVB~ Pr;E8FoG3^I`0&txI&>A+ diff --git a/fixture/17/2/36 b/fixture/17/2/36 deleted file mode 100644 index 3c3f565d5abb283d1b83e596352aa83b07e25fc9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmZ>Y$}lu^j8qGb+`8yPKLdlp0frI=1%`mMS)8^Z7ZO`C9nCx$dt1tP7HcfkNVzXw LY$}lu^j8qGbykxqQpMinRfgzPafgvDm7N>2HNU9>wk(nwAGxf`MC>c$@d+fe= Lk*h_(1dvhy+n^G# diff --git a/fixture/17/2/38 b/fixture/17/2/38 deleted file mode 100644 index 9dd89b4d46ef6c4012c1b599d723ce1d3373da70..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmV-80LTAAT4*sbL0KkKS-4|wF8}}&Kmcd}AOKKi1fHnww*=Bs8AM5hNtptX5fVt! Oyj{r@;X*>;jl8_;FceV$ diff --git a/fixture/17/2/39 b/fixture/17/2/39 deleted file mode 100644 index d7d8cb0c47ba60cfcfeefa38cedabf86d7b633ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55 zcmZ>Y$}lu^j8qGboXNXei-Cc`fgyuIfgvD`W!?&zCkn^hJd_wsSUOG|n7CAAy?BwU JMZg4*LIAh&5pMth diff --git a/fixture/17/2/4 b/fixture/17/2/4 deleted file mode 100644 index 8596d07aa78a46a05109bc811ea3b309a679e5b6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58 zcmV-A0LA}8T4*sbL0KkKSsnaMdjJ3gzyN#zAOKKifJy40UUMO4NimcgM3nD5yOE(1 QlQMi=$rRy2KzH#q>^Er^2LJ#7 diff --git a/fixture/17/2/40 b/fixture/17/2/40 deleted file mode 100644 index e0c2f91305457e66f970c0103b1ea2f799219572..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55 zcmZ>Y$}lu^j8qGb?7J5(!N4GYfT4gvfgvD`W!?%l0WWS&9+|s>E*v@@fr8pc?@aU) LQBmGL^%w&HrdSW* diff --git a/fixture/17/2/41 b/fixture/17/2/41 deleted file mode 100644 index 646521b1f8b0b6375961b12fe0a9d47ad643b70a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmV-B0L1@7T4*sbL0KkKS-^`0g8%>mKmdFIAOKKifNiQsUw0UqDKNmrg&DWH+{}^p Ry7Zrmxgwk>NMJ>R!GK#y7v%r| diff --git a/fixture/17/2/42 b/fixture/17/2/42 deleted file mode 100644 index 96ff6595bfe9830706b86dcfd2ec42f3643d0831..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmZ>Y$}lu^j8qGbR5;5X$H2hhz>vbAzz~qeGH*qB>{2yJA;ykHGmf-e$rMu7K6+=O MpNI;FvE%gy0LE1j?f?J) diff --git a/fixture/17/2/43 b/fixture/17/2/43 deleted file mode 100644 index 48bc977b66c12687924d2573cfb74f1137e09f15..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmZ>Y$}lu^j8qGb)HCH_XJAk}z)--Tzz~qeGH*p%d6A;IMXSa{$q7D+ngR>X#_2uj MDp;rxlO@3i0IH-AuK)l5 diff --git a/fixture/17/2/44 b/fixture/17/2/44 deleted file mode 100644 index 367bd0cd0c93b580b50781ddae2e9b3961a66a6e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmV-80LTAAT4*sbL0KkKSvt4c2><{iKmcR_AOKKi1fHl&$fV47a+wl?mf|SQm`I{i O#oUoj6eJF<_JTkcD;1Ui diff --git a/fixture/17/2/45 b/fixture/17/2/45 deleted file mode 100644 index c4d9d366a8bf232e83da819a0080932d0efff4d1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmV-80LTAAT4*sbL0KkKS-eAA`2YYJKmc$6AOKKi1fHm61YOq+o0lN41W3s=!U#f@ Oc)OA*!i0t58sEtNsuckM diff --git a/fixture/17/2/46 b/fixture/17/2/46 deleted file mode 100644 index 119493f7a472d069eb61ea909394776b2f61c3ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59 zcmZ>Y$}lu^j8qGb{2^M)!oa|PfT4&%fgvDm7N>2HMC!ZiDQzjU81`Ph(jn|~=Wek2 P(K{3UL{#ip;~g0QM(q^u diff --git a/fixture/17/2/47 b/fixture/17/2/47 deleted file mode 100644 index 3cb245d1c540a72a46e6bc6e1fa71ed3aeaa75a2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmZ>Y$}lu^j8qGboUf<0jDbPPfuVpwfgvD`W%(<&h^8e@lLU`*Z##Ci-OEZ=Ar6gdC@ diff --git a/fixture/17/2/48 b/fixture/17/2/48 deleted file mode 100644 index 8ebd56d0e1d5386943a093109c884f29a5d302c2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55 zcmZ>Y$}lu^j8qGb{BMv@z`!7MfFXrJfgvD`W!?%;g#aUE4OO9FRiT!MltX?MyC-QL LYH9!3%-jF~!>SS~ diff --git a/fixture/17/2/49 b/fixture/17/2/49 deleted file mode 100644 index 5a7bf3db1dc61805519d0a08cb14625071714fea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 56 zcmV-80LTAAT4*sbL0KkKSw_j3!~g&&Kmc$6AOKKi1fHl7MADehkTD>Z2^q0W%$If5 O;_gVN3K9s}GdO@2JQT11 diff --git a/fixture/17/2/5 b/fixture/17/2/5 deleted file mode 100644 index 3f4ad3c7d9f74f985c77bbd0d95dcddfceac9f2f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 54 zcmZ>Y$}lu^j8qGbJknm1&cML!z>vV8z!2b|@|Dx6Y$}lu^j8qGbRCpbnz`!8wz)--Tzz~qeGH*qqrW%)*QDU2Y$}lu^j8qGb?C-yJnt?&ifgy`QfgvDm7N>0xY$}lu^j8qGbJlND|#=xN9z>vY9zz~qeGH*qYV5iE2a|KgQCAO$IJ4;P{^v+{B Lmym=(fZ727-_H_= diff --git a/fixture/17/2/53 b/fixture/17/2/53 deleted file mode 100644 index 457f07022d6cbc4a1ec4d13fc94b9920985b868f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 53 zcmZ>Y$}lu^j8qGb^trLwnt_4afgyoGfg!*_Y$}lu^j8qGbe5`eRB?E);0fqzy1%?0zm9G*hN=;(Mk~4UW4JS&e1htwTy))5I LM8)FN##asi+X511 diff --git a/fixture/17/2/55 b/fixture/17/2/55 deleted file mode 100644 index da36fdc68f925fbb7f6625682c492e318c3b754e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 52 zcmV-40L%YET4*sbL0KkKS(RSQ;{X5%&;Vcn03aX$R5JokR3wq6?%^a6l9-lAJTByl KaG@b8y_v?ih!W5M diff --git a/fixture/17/2/6 b/fixture/17/2/6 deleted file mode 100644 index 37450a516149dc450b95d2d6c8b53ae1abf8bec9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57 zcmZ>Y$}lu^j8qGboRG%3hk=3R07C(T0z*I=%e)mSnJz7p($wDVa+$1bppqu!7PKe! Nq{u@jC&w9T9{>kD6XgH^ diff --git a/fixture/17/2/7 b/fixture/17/2/7 deleted file mode 100644 index d0c40412a722a2ea74b7cc1c291cb1cf6d22b43d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58 zcmZ>Y$}lu^j8qGbyymmpjDbP?07D6b0z*LBEKb`X&PACsRgUW1G)PgJDA8BG>v4Yf OPNg{-9@%QKvl##>i4;Eo diff --git a/fixture/17/2/8 b/fixture/17/2/8 deleted file mode 100644 index 2764980cddf196d4467625dcf5805c2531a61738..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55 zcmZ>Y$}lu^j8qGbOxwARn}I>#07C+U0z-g<%2&zBDyPyM+dNK8^x33jsASMtSgsP> L%E^{J>%j&9-sBR% diff --git a/fixture/17/2/9 b/fixture/17/2/9 deleted file mode 100644 index 41e0d44883121966d7e00bfff1d3f09506528b30..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55 zcmZ>Y$}lu^j8qGb)IGNK5d(w90frO?1%`k$mU$~YIX%l`4J3jk)s!x}95A}HC-$Vs LLnnvrQr9~G>~|BE diff --git a/fixture/17/3/.zarray b/fixture/17/3/.zarray deleted file mode 100644 index a11bf5fbed..0000000000 --- a/fixture/17/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "|b1", - "fill_value": 0, - "filters": null, - "order": "F", - "shape": [ - 5555 - ], - "zarr_format": 2 -} \ No newline at end of file diff --git a/fixture/17/3/.zattrs b/fixture/17/3/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/17/3/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/17/3/0 b/fixture/17/3/0 deleted file mode 100644 index 8a85f7aeef41a2863c4b79b9f4050690dcf1d293..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxfe`>82m+;98=*;>IG$7wLF8Pz+EoRPA$NwXB25wO{{~GG2OldooSH;4PrDw{ CWC8~O diff --git a/fixture/17/3/1 b/fixture/17/3/1 deleted file mode 100644 index 4e7bf7a54ef8ba9a46c430bc5bfee307a58b9f17..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYLBhYbKA3<7WU1TXPo85-NrhA;*82m+y58=*;>I3Dv(1p$|@w0S*8l2m-m@+7o(7FAmf#C#b+&T(+d(7(mhIoavsbJ#L4SH@YH5pOBpijaJ{` E0n`5i4FCWD diff --git a/fixture/17/3/2 b/fixture/17/3/2 deleted file mode 100644 index 916ad2d7a0814aaad4794fc00050f324b92c9f0b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYLBhYbKA3<7WU1TXPo8BOd;3x&nY4ZwZWRDzl=tv!PVEBVnf`g4pl%e=%Sc2?RD Ec+tB800000 diff --git a/fixture/17/3/20 b/fixture/17/3/20 deleted file mode 100644 index b0e3dfd9ba02443c99dff08bab4aabbb1c32cb20..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYofe`>82m+y59id5@IG)nOlSh}2M-tkRBbj1n);`{yv=sMGOJaB+=062Umvd4{ E7t#{~2LJ#7 diff --git a/fixture/17/3/21 b/fixture/17/3/21 deleted file mode 100644 index e7d7273ea1caf88dd0e49f3545d0e22d54a66d61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|yx`2qZuN$N)kR0Tu&OAOR472?C`c3c(U!9!LdPBUA{i86<#Y F3;@zW0tNs8 diff --git a/fixture/17/3/22 b/fixture/17/3/22 deleted file mode 100644 index 7bdfe6b1bb63084edf15d7fb332fc0b3b803818a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL>feip448p)$J;6)7Sg46^VOe0^@b;8ctw^RbBrzAyM! F-~iI*0to;B diff --git a/fixture/17/3/23 b/fixture/17/3/23 deleted file mode 100644 index 00c773fff7371dbc5abb9190edca24d2cb7aa224..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC2m@PZWdtX2V*8j#Ac5<#csKw&Tce_zuYG6_6zV17JepADPnuSUQ!ziz EF4M3A5&!@I diff --git a/fixture/17/3/24 b/fixture/17/3/24 deleted file mode 100644 index 40ba259735664f29ba615e21021c122239f2e6a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(feip448p)$J;6)7SfR#cBruu{Z%;|lx{@<7*O53jzLo~$!=(#AC3pLacgfZP D(-#5; diff --git a/fixture/17/3/25 b/fixture/17/3/25 deleted file mode 100644 index b28a097b083b99e8293176eedb1ded289d72a6e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3i4g!G2!dvHgeGa?04?wEc$CG%0$^+b1bf6HrN?&iXk1C(nk&eP-!0SCAK#a} D(yIak diff --git a/fixture/17/3/26 b/fixture/17/3/26 deleted file mode 100644 index 22f174593c567725cab94d7692947fb0cf287576..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxi46cC38-wa D(5?af diff --git a/fixture/17/3/27 b/fixture/17/3/27 deleted file mode 100644 index bd9e2b84f4c6b19d9aedce19286133fd7d2198ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwfeip42*a4SdV-gDvE9IgA+)%3wX4cGL?w#Glg%T5In=CUFfeL_e|{6qxmBtU EAJYZ`2mk;8 diff --git a/fixture/17/3/28 b/fixture/17/3/28 deleted file mode 100644 index d1e8750d8ee981b9c1f0ecdbf4d546a63cf203df..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxi2(p02m+y58=*;>I8Q@G5ufGjEeV{0$0;$pIhpu?wNA}!#Ok834xbf1*3ARZ C+X4Up diff --git a/fixture/17/3/29 b/fixture/17/3/29 deleted file mode 100644 index 5850d8e89e01107e4e52adf1f2f0c8b6d8aa036b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYpi4g!W2*RYZIzlJu#3ktYoe({G>Z+oyOGz5xDHJy{O1x}K4Ph2$U_c-33&ipA FMIX^O0s#O3 diff --git a/fixture/17/3/3 b/fixture/17/3/3 deleted file mode 100644 index d5a7015c64b6ad1046aa03be82b23e01ca2ab2a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxfe`>82m+y58=*;>ICw9WQ&3sH-ja$F$O%oB1eMx;!K$)o<8V0NNb`v$9+)vb D(+L6z diff --git a/fixture/17/3/30 b/fixture/17/3/30 deleted file mode 100644 index 318e4046fc36aeeeea7bb661fd6e6d7283067e2f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3hYbKA3<7WU1TXPoffAe0u-e7T4Zt`8J0en%D6^VePQ0($vzpSI?uxR E0Mr%&3;+NC diff --git a/fixture/17/3/31 b/fixture/17/3/31 deleted file mode 100644 index 063fabd5cb97196d608ee7c3b52d9c228f77c896..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(ixB`I2tuJ*9id5@I7m|Ej`;yj{@m B0s;U4 diff --git a/fixture/17/3/35 b/fixture/17/3/35 deleted file mode 100644 index 56417f3b5cda259391b21439511ad3981a5b1fcf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwx!4Uv42m&Fsri4^-6(5Vq14Wi``SX^9P9#}&gzLg_+^86mJVoZ D(hULv diff --git a/fixture/17/3/37 b/fixture/17/3/37 deleted file mode 100644 index f1d6259eacb11d8f0249743e251e80603029351a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYoixmJM2m+y58=*;>I1lov{B-$xOJdH9xI-)710CL;lG2M#SoQ<3B%^(FUGK=s!82m+y58=*;>IG)rK70~7DEr~iQC*?tBU@1TNsqzRolMPy#5&GrdqhPE1 E0n#`E1^@s6 diff --git a/fixture/17/3/41 b/fixture/17/3/41 deleted file mode 100644 index b931f1ec4962df5b739c9454c07f367d95c42371..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`feipK3}LtKgk7?Whnhr#Y8Njz0P~0gk`Reb$V%Eu7>^N#cvhu6|`M@+*%X D(X0Xj diff --git a/fixture/17/3/42 b/fixture/17/3/42 deleted file mode 100644 index 343e10a0542cbd906c8c674cd397f6cf5c95b204..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`feip42!d|ygf8jgAkc{@sEd~yfN=zX$xkh`7;8*PjTjobkeivyrhHNJ0MN<; A0RR91 diff --git a/fixture/17/3/43 b/fixture/17/3/43 deleted file mode 100644 index b41fc0473de4d4f9e27e2f80ea46499e55051ae2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYjJfeip42!d|)gf8jg0Nu*TP(T+C3xKf&5)^rGh2Ij-Y6l%oM*64rwX^AQ=8Loo D(&7RK diff --git a/fixture/17/3/44 b/fixture/17/3/44 deleted file mode 100644 index def228f2ae2a643fa664d86d4e387872bcf1a513..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`hYbKA3<7WU1TXPo88uB?Mqu%B1MnU#siW6nshB{BU+i#6HXJi(e9*|LOXIKQ E1JdvU1^@s6 diff --git a/fixture/17/3/45 b/fixture/17/3/45 deleted file mode 100644 index 53ecd22c8767de44517eb91936a87f6f437f3bf1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`hYbKQ2!h_~3B9Bj7YLf54E6AF02o&QQ<|Z1+GwkOXym#|RD0fB(r*@Cko?kK B0tf&A diff --git a/fixture/17/3/46 b/fixture/17/3/46 deleted file mode 100644 index 7cc2dffbac3ac21e82d06e01c61fcd81a14c0e8e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|yx;2t+^tVnHZ~7+3-*1QrG=0zx1Mj6qUJY9Snu0K^cmiC_Y% GnE?RN`2q$2 diff --git a/fixture/17/3/47 b/fixture/17/3/47 deleted file mode 100644 index 98f80f5d654673c41bac42eea78b0a1b28c54bc4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(fe`>82m+y58=*;>I9@9~M3G~7drAT;Fl~^%z@#|p`5djadJUfvK?;`84QiGT D(RBg= diff --git a/fixture/17/3/48 b/fixture/17/3/48 deleted file mode 100644 index 56cbc7d226a7ca212044c420e7dee876c4371380..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(0S*8l2m+zE`h;H6i}N(Y!3GSMk3$mNN!chcc)rrMWOQiW2!oka+!a~>_+A&$ C`vL_3 diff --git a/fixture/17/3/49 b/fixture/17/3/49 deleted file mode 100644 index 4d02e05d9003b909231aad5a3e1a86fcf0f88de7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC2m`6JI)alpu{}y6q@na#y4qC*jv<{9 diff --git a/fixture/17/3/5 b/fixture/17/3/5 deleted file mode 100644 index f2179af0c973bc0302b3f23c0bdb2bffbc729901..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|&feipL2m+sk;y`&c4W D(m?_L diff --git a/fixture/17/3/50 b/fixture/17/3/50 deleted file mode 100644 index 3dbe18094f8b700a545b0e7bb6891173ac706629..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL>ixB`I2tuJ*8=*;>I39JE3Mzhvx2Ghq0@*}RLWsk)s#Kkm+x#UK?t`1Tj#l{< EKF}Zm`~Uy| diff --git a/fixture/17/3/51 b/fixture/17/3/51 deleted file mode 100644 index 718959de1a4313d907abe8d8a78c5d4c76045aef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(fe`>82m+y58=*;>IG*#;6A{$q>n({oDJQ{UCR7|2r*@Ft+Vp>gUbzblnEy%W E1JaiQ2mk;8 diff --git a/fixture/17/3/52 b/fixture/17/3/52 deleted file mode 100644 index 14dbc487833e01b9afe4fb286666a65d52edcf39..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC3r0tWy9 diff --git a/fixture/17/3/53 b/fixture/17/3/53 deleted file mode 100644 index eaa45fb0ad775beb6bb3270b7271d2fbe3929cd2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYj}(G>t72m+y58=*;>IL_+T2MCHZPj^XR1dP2*TiJ?FgNu6W64Va-zc5)0G77;h+s=k7fZpoKI_W$7t$Lkhi%i8E9?o E1=CLg3IG5A diff --git a/fixture/17/3/7 b/fixture/17/3/7 deleted file mode 100644 index 68587ee156ca8af47052587d99b43e2b8eda2e9e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(ixB`I2tuJ*8=*;>IC)lg;s=4@?I|f(1=*J0V98MwbSti)nbKhUEfJ7$Qz%H` Fa{$q20sa60 diff --git a/fixture/17/3/8 b/fixture/17/3/8 deleted file mode 100644 index a5574160caab3f3f925d66392269854b3da01e46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv_i4g!G2!dv9geGa?c=Aq^M;0$P0OJUthOC^DXGLzYyj~1J_lUBNBmEL^P CECK@n diff --git a/fixture/17/3/9 b/fixture/17/3/9 deleted file mode 100644 index 4461ace4b4e5a916873df35df0ac50842e6049ab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oWz*Iz`y{)B|yx`2t82m+;98=*~FIG$7wLF8Pz+EoRPA$NwXB25wO{{~GG2OldooSH;4PrDw{ C)&d9s diff --git a/fixture/17/4/1 b/fixture/17/4/1 deleted file mode 100644 index 2c0f5e9136217b0f60f38b46907408487979917c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYLBhYbKA3<7WU1V8a%85-NrhA;*P> diff --git a/fixture/17/4/10 b/fixture/17/4/10 deleted file mode 100644 index 897363cd7e1e9cc6245b9beb5b73b8fe47dbdc9d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYpfe`>82m+y58=*~FI3Dv(1p$|@w0S*8l2m-m@+7tRo9}d(lC#b+&T(+d(7(mhIoavsbJ#L4SH@YH5pOBpijaJ{` E0n~8<4gdfE diff --git a/fixture/17/4/2 b/fixture/17/4/2 deleted file mode 100644 index 4437b635a1e49515002e42b7de8f60b8649eea60..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYLBhYbKA3<7WU1V8a%8BOd;3x&nY4ZwZWRDzl=tv!PVEBVnf`g4pl%e=%Sc2?RD Ec+xEb0RR91 diff --git a/fixture/17/4/20 b/fixture/17/4/20 deleted file mode 100644 index 3cb32e63a8c1c6b4d948ce707e84f217f6e79368..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYofe`>82m+y59idHHIG)nOlSh}2M-tkRBbj1n);`{yv=sMGOJaB+=062Umvd4{ E7t)0S2mk;8 diff --git a/fixture/17/4/21 b/fixture/17/4/21 deleted file mode 100644 index d72a9e161b5eb297f52d311a05196eb98672fec8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oXnWQz`y{)B|yx`2qZuN$N)kR0Tu&OAOR472?C`c3c(U!9!LdPBUA{i86<#Y F3;@!z0tWy9 diff --git a/fixture/17/4/22 b/fixture/17/4/22 deleted file mode 100644 index 3ff02fc8bff6e4d9223cd08263dbf014de520729..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL>feip448p)$J;6_WSg46^VOe0^@b;8ctw^RbBrzAyM! F-~iKD0tx^C diff --git a/fixture/17/4/23 b/fixture/17/4/23 deleted file mode 100644 index ad9e3c0c06c50513a7b8f72c454d1966d8f2ce99..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC2m@PZWdt{IVf&a!Ac5<#csKw&Tce_zuYG6_6zV17JepADPnuSUQ!ziz EF4Q6d6951J diff --git a/fixture/17/4/24 b/fixture/17/4/24 deleted file mode 100644 index b1ab7b9a09d74877e487173f1cbdbe390a433564..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(feip448p)$J;6_WSfR#cBruu{Z%;|lx{@<7*O53jzLo~$!=(#AC3pLacgfZP D(}@BH diff --git a/fixture/17/4/25 b/fixture/17/4/25 deleted file mode 100644 index 556d979b064d3027640e5de2fee3c269119c08ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3i4g!G2!dvHgf?m604?wEc$CG%0$^+b1bf6HrN?&iXk1C(nk&eP-!0SCAK#a} D(;Wf? diff --git a/fixture/17/4/26 b/fixture/17/4/26 deleted file mode 100644 index 91958fe48252908a1f8503c90358661e029b16cb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxi46cC38-wa D(I5f- diff --git a/fixture/17/4/27 b/fixture/17/4/27 deleted file mode 100644 index d03b60abb6d67196de9e84cfdce4f6ab35121cf8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwfeip42*a4SdV-($u-(9fA+)%3wX4cGL?w#Glg%T5In=CUFfeL_e|{6qxmBtU EAJcdO2><{9 diff --git a/fixture/17/4/28 b/fixture/17/4/28 deleted file mode 100644 index 58e416611a5dde044674f32788177c253379b292..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxi2(p02m+y58=*~FI8Q@G5ufGjEeV{0$0;$pIhpu?wNA}!#Ok834xbf1*3ARa CO#%S` diff --git a/fixture/17/4/29 b/fixture/17/4/29 deleted file mode 100644 index 14036144081016aa692dade224ceb180b8b56aed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYpi4g!W2*RYZIzl(;!X@bWoe({G>Z+oyOGz5xDHJy{O1x}K4Ph2$U_c-33&ipA FMIX_r0s;U4 diff --git a/fixture/17/4/3 b/fixture/17/4/3 deleted file mode 100644 index 96a675b061ceb0207f5fa87c70c7c8be92693102..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxfe`>82m+y58=*~FICw9WQ&3sH-ja$F$O%oB1eMx;!K$)o<8V0NNb`v$9+)vb D(|ZC6 diff --git a/fixture/17/4/30 b/fixture/17/4/30 deleted file mode 100644 index ea89c794232a54e7d239a75c163ac5bce23832ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3hYbKA3<7WU1V8a%ffAe0u-e7T4Zt`8J0en%D6^VePQ0($vzpSI?uxR E0Mv*A4FCWD diff --git a/fixture/17/4/31 b/fixture/17/4/31 deleted file mode 100644 index a72618a5b9898451ffc6da2b168d93f9c41aeb83..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(ixB`I2tuJ*9idHHI7m|Ej`;yj{^@ B0s{a5 diff --git a/fixture/17/4/35 b/fixture/17/4/35 deleted file mode 100644 index 06df90198b1325c935f590e4031ebecdb50a76ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwx!4Uv42m&Fsri65I4Ihii14Wi``SX^9P9#}&gzLg_+^86mJVoZ D(tiR2 diff --git a/fixture/17/4/37 b/fixture/17/4/37 deleted file mode 100644 index 71b04497ca41d3d867c10166f94de2858e5cb2a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYoixmJM2m+y58=*~FI1lov{B-$xOJdH982m+y58=*~FIG)rK70~7DEr~iQC*?tBU@1TNsqzRolMPy#5&GrdqhPE1 E0n(}h2LJ#7 diff --git a/fixture/17/4/41 b/fixture/17/4/41 deleted file mode 100644 index 5f552ebdddb9d3348b1ee7d0cd6b0ae93c98b040..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`feipK3}LtKgnhDyhnhr#Y8Njz0P~0gk`Reb$V%Eu7>^N#cvhu6|`M@+*%X D(jEc> diff --git a/fixture/17/4/42 b/fixture/17/4/42 deleted file mode 100644 index e11c7fbb82a705b9fae4872e2ec5f1ca9a4f4b10..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`feip42!d|ygg)uvAkc{@sEd~yfN=zX$xkh`7;8*PjTjobkeivyrhHNJ0MR@G A0ssI2 diff --git a/fixture/17/4/43 b/fixture/17/4/43 deleted file mode 100644 index f99ba781d3b038cec1166d81f40224d230cadd5b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYjJfeip42!d|)gg)uv0Nu*TP(T+C3xKf&5)^rGh2Ij-Y6l%oM*64rwX^AQ=8Loo D(^LWo diff --git a/fixture/17/4/44 b/fixture/17/4/44 deleted file mode 100644 index 7b616d7e695c7363c808142e2042214f9b03cc37..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`hYbKA3<7WU1V8a%88uB?Mqu%B1MnU#siW6nshB{BU+i#6HXJi(e9*|LOXIKQ E1Jhyx2LJ#7 diff --git a/fixture/17/4/45 b/fixture/17/4/45 deleted file mode 100644 index e030acf0bfe101c6d55c68cd160ac4c14bd0213f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`hYbKQ2!h_~3H_uG7YLf54E6AF02o&QQ<|Z1+GwkOXym#|RD0fB(r*@Cko?ln B0to;B diff --git a/fixture/17/4/46 b/fixture/17/4/46 deleted file mode 100644 index 249317525b99bda3c511c2852001d40b6b678b8f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oXnWQz`y{)B|yx;2t+^tVnHZ~7+3-*1QrG=0zx1Mj6qUJY9Snu0K^cmiC_Y% GnE?ROYXS!V diff --git a/fixture/17/4/47 b/fixture/17/4/47 deleted file mode 100644 index 60f15bb0e78b2c289243e72dd58145ebe35f284e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(fe`>82m+y58=*~FI9@9~M3G~7drAT;Fl~^%z@#|p`5djadJUfvK?;`84QiGT D(dPmJ diff --git a/fixture/17/4/48 b/fixture/17/4/48 deleted file mode 100644 index c102366e5bbfec77f0fe388e9bb27b5dcba0df38..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(0S*8l2m+zE`h_+A&% CZ2|@W diff --git a/fixture/17/4/49 b/fixture/17/4/49 deleted file mode 100644 index 82c9d48c4e1ad32d6520aafe740d416c38b95ddd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC2m`6JI)aGRgj=8ecCevj^UEtpj>sk;y`&c4W D(z5~p diff --git a/fixture/17/4/50 b/fixture/17/4/50 deleted file mode 100644 index 3957ae99fc1ab5698cca632e09d015c8cb2cbda5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL>ixB`I2tuJ*8=*~FI39JE3Mzhvx2Ghq0@*}RLWsk)s#Kkm+x#UK?t`1Tj#l{< EKG2c@{Qv*} diff --git a/fixture/17/4/51 b/fixture/17/4/51 deleted file mode 100644 index 598fb48d5e64d498eccd147fa1f5b25f51f7fc93..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(fe`>82m+y58=*~FIG*#;6A{$q>n({oDJQ{UCR7|2r*@Ft+Vp>gUbzblnEy%W E1Jelt2><{9 diff --git a/fixture/17/4/52 b/fixture/17/4/52 deleted file mode 100644 index df18d04b3531df3acb7bea17c68703f4e80d0060..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC3t72m+y58=*~FIL_+T2MCHZPj^XR1d?N diff --git a/fixture/17/4/55 b/fixture/17/4/55 deleted file mode 100644 index 496628f0409301ec41eee07b76497f907668fabb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 xcmZQ#oXnWQz`y{)B|yx;z{tqJ2qZx?hzG`C9#|Zr04NDk22uxC1yW5M007T}0qg(( diff --git a/fixture/17/4/6 b/fixture/17/4/6 deleted file mode 100644 index cad0eba631f485e56fa6a22b35043e1f8a5613c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYofe`>P2*TiJ?FikZ3)iHNa-zc5)0G77;h+s=k7fZpoKI_W$7t$Lkhi%i8E9?o E1=GO-3jhEB diff --git a/fixture/17/4/7 b/fixture/17/4/7 deleted file mode 100644 index a17e54288c6c29b62679d4a1b99826041673c626..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(ixB`I2tuJ*8=*~FIC)lg;s=4@?I|f(1=*J0V98MwbSti)nbKhUEfJ7$Qz%H` Fa{$rV0sjC1 diff --git a/fixture/17/4/8 b/fixture/17/4/8 deleted file mode 100644 index 2277a77f7a441f1ea7ea0df17a0e1c5f60fff6fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv_i4g!G2!dv9gf?m6c=Aq^M;0$P0OJUthOC^DXGLzYyj~1J_lUBNBmEL^P Co&p2_ diff --git a/fixture/17/4/9 b/fixture/17/4/9 deleted file mode 100644 index 5e881925908eea3d18bba6b15cd3e2f961a1ad8e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oXnWQz`y{)B|yx`2t82m+;98=+a6H=a}vLF8Pz+EoRPA$NwXB25wO{{~GG2OldooSH;4PrDw} CECLGv diff --git a/fixture/17/5/1 b/fixture/17/5/1 deleted file mode 100644 index 7e7fb84c716a950091589b1c42f31916e46d573a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYLBhYbKA3<7WU1aI+P85-NrhA;*82m+y58=+a6Hy-m&1p$|@w0S*8l2m-m@+7o(9?+w%~C#b+&T(+d(7(mhIoavsbJ#L4SH@YH5pOBpijaJ{` E0oBI>5dZ)H diff --git a/fixture/17/5/2 b/fixture/17/5/2 deleted file mode 100644 index 219298a43af2ee46962872dc748c7918c90be91e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYLBhYbKA3<7WU1aI+P8BOd;3x&nY4ZwZWRDzl=tv!PVEBVnf`g4pl%e=%Sc2?RD Ec+-Od1ONa4 diff --git a/fixture/17/5/20 b/fixture/17/5/20 deleted file mode 100644 index 39003015618f920887e75919582049aa1a6d64d2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYofe`>82m+y59ids8H=feNlSh}2M-tkRBbj1n);`{yv=sMGOJaB+=062Umvd4{ E7t`AU3jhEB diff --git a/fixture/17/5/21 b/fixture/17/5/21 deleted file mode 100644 index 32339c5f372301d8713f1eafd9905e319d166750..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oW_{Kz`y{)B|yx`2qZuN$N)kR0Tu&OAOR472?C`c3c(U!9!LdPBUA{i86<#Y F3;@�tx^C diff --git a/fixture/17/5/22 b/fixture/17/5/22 deleted file mode 100644 index c8810a45270ac149d950a7c56de11e27db9bb0db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL>feip448p)$J;7VNSEz|@VOe0^@b;8ctw^RbBrzAyM! F-~iOF0u2BF diff --git a/fixture/17/5/23 b/fixture/17/5/23 deleted file mode 100644 index c6effe622740d212ed3942bdf2b456b603bb7c5b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC2m@PZWdvt&Ui+9xAc5<#csKw&Tce_zuYG6_6zV17JepADPnuSUQ!ziz EF4cGf761SM diff --git a/fixture/17/5/24 b/fixture/17/5/24 deleted file mode 100644 index 91ddbf69b1cd6e7e4259b910800c0e23f77229d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(feip448p)$J;7VNSE0sbBruu{Z%;|lx{@<7*O53jzLo~$!=(#AC3pLacgfZP D)ZYRM diff --git a/fixture/17/5/25 b/fixture/17/5/25 deleted file mode 100644 index d2fad2df7829c1fae494fb2315eeccdc1c5fd0af..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3i4g!G2!dvHgl1{p04?wEc$CG%0$^+b1bf6HrN?&iXk1C(nk&eP-!0SCAK#a} D)N=v{ diff --git a/fixture/17/5/26 b/fixture/17/5/26 deleted file mode 100644 index c0e81ff82661fcd1f16fb553ff15f089ea817aa6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxi46cC38-wa D(slv? diff --git a/fixture/17/5/27 b/fixture/17/5/27 deleted file mode 100644 index 095a9fba95a6de5a9350a7a153e9fcf09808e303..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwfeip42*a4SdV;rjuie0eA+)%3wX4cGL?w#Glg%T5In=CUFfeL_e|{6qxmBtU EAJonQ3;+NC diff --git a/fixture/17/5/28 b/fixture/17/5/28 deleted file mode 100644 index c95f66754b68663ca9cabc0518c1eadc5365d254..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxi2(p02m+y58=+a6H%~)F5ufGjEeV{0$0;$pIhpu?wNA}!#Ok834xbf1*3ARb CqXGl~ diff --git a/fixture/17/5/29 b/fixture/17/5/29 deleted file mode 100644 index e33e619c772b55ed8fda161064b269105b8cfe53..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYpi4g!W2*RYZIzngZyd~)Qoe({G>Z+oyOGz5xDHJy{O1x}K4Ph2$U_c-33&ipA FMIX}t0tEm7 diff --git a/fixture/17/5/3 b/fixture/17/5/3 deleted file mode 100644 index 0e2dfb493a5105d84c278b2f7e685508b5ec5513..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxfe`>82m+y58=+a6H+V0VQ&3sH-ja$F$O%oB1eMx;!K$)o<8V0NNb`v$9+)vb D)X@SB diff --git a/fixture/17/5/30 b/fixture/17/5/30 deleted file mode 100644 index bc0de741443e0df824ffe668c6ef6c96d1f3d9ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3hYbKA3<7WU1aI+PffAe0u-e7T4Zt`8J0en%D6^VePQ0($vzpSI?uxR E0M*_C5C8xG diff --git a/fixture/17/5/31 b/fixture/17/5/31 deleted file mode 100644 index 639fb79a7ffbce91b2556c43e54825e2f30e7811..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(ixB`I2tuJ*9ids8H%L)x diff --git a/fixture/17/5/32 b/fixture/17/5/32 deleted file mode 100644 index 3e475971943376780db843e5d52fb598bd8252ba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwixmJM2m+;99ids8H$K`EJ&_I`x~im0@c|UOCc##K&Kyk12o-g5Xg_P+iYMm- D)7AnG diff --git a/fixture/17/5/33 b/fixture/17/5/33 deleted file mode 100644 index dd7583c016b1bd6fabbe9c334b2160122813b0de..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3i3tE85YlG75t^lW^P%7r6}`p70bpzaBt;fJn^5AH*aR&~X7m?zNmcBtmSQi| CFaiw# diff --git a/fixture/17/5/34 b/fixture/17/5/34 deleted file mode 100644 index 16644e2a0cad68f8d6828c3e0775d07a8d566501..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|&ixB`I2tuV<9ids8H=bH}d^~aKC?f)HQ|7=?42}R+XQVFW-TUP(y>j`;yj{|_ B0tNs8 diff --git a/fixture/17/5/35 b/fixture/17/5/35 deleted file mode 100644 index 9c6940809455c1539fed305c4252dd2b8371189f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwx!4Uv42m&Fsri9dT^&X4K14Wi``SX^9P9#}&gzLg_+^86mJVoZ D)71h7 diff --git a/fixture/17/5/37 b/fixture/17/5/37 deleted file mode 100644 index cb8574a7f3dae90609bf9250f347276294b6e543..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYoixmJM2m+y58=+a6HxKfu{B-$xOJdH982m+y58=+a6H=fiJ70~7DEr~iQC*?tBU@1TNsqzRolMPy#5&GrdqhPE1 E0n`8j3IG5A diff --git a/fixture/17/5/41 b/fixture/17/5/41 deleted file mode 100644 index 69da8ccf9f4b32e0b41cc7eef30f837ac0123d24..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`feipK3}LtKgx#`xhnhr#Y8Njz0P~0gk`Reb$V%Eu7>^N#cvhu6|`M@+*%X D({us` diff --git a/fixture/17/5/42 b/fixture/17/5/42 deleted file mode 100644 index 639f219f3631e69c0ccc151135fb65ef8f475e3b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`feip42!d|ygl_5HAkc{@sEd~yfN=zX$xkh`7;8*PjTjobkeivyrhHNJ0Me2I A1poj5 diff --git a/fixture/17/5/43 b/fixture/17/5/43 deleted file mode 100644 index f4973acc4964bbab8d7ef11979de72350d50d149..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYjJfeip42!d|)gl_5H0Nu*TP(T+C3xKf&5)^rGh2Ij-Y6l%oM*64rwX^AQ=8Loo D)T#mt diff --git a/fixture/17/5/44 b/fixture/17/5/44 deleted file mode 100644 index 86ef4127885e465b12b35be585a9c0f568b477c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`hYbKA3<7WU1aI+P88uB?Mqu%B1MnU#siW6nshB{BU+i#6HXJi(e9*|LOXIKQ E1Jt+z3IG5A diff --git a/fixture/17/5/45 b/fixture/17/5/45 deleted file mode 100644 index 8e16f6a6885a3ee711f0484913ac1e5d713e5b0c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`hYbKQ2!h_~3B9HF76_W44E6AF02o&QQ<|Z1+GwkOXym#|RD0fB(r*@Cko?pp B0t^5E diff --git a/fixture/17/5/46 b/fixture/17/5/46 deleted file mode 100644 index 0b08688bb14a72c271d053bd7421276c17ffce43..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oW_{Kz`y{)B|yx;2t+^tVnHZ~7+3-*1QrG=0zx1Mj6qUJY9Snu0K^cmiC_Y% GnE?RP!2${Z diff --git a/fixture/17/5/47 b/fixture/17/5/47 deleted file mode 100644 index 6caa8809753f4d5099b3ab7253cf0d50d7b1fbba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(fe`>82m+y58=+a6H(o0}M3G~7drAT;Fl~^%z@#|p`5djadJUfvK?;`84QiGT D(>($O diff --git a/fixture/17/5/48 b/fixture/17/5/48 deleted file mode 100644 index 8c5e70a732fd9a39b437e913362fd719075bec04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(0S*8l2m+zE`h?!nd-F8I!3GSMk3$mNN!chcc)rrMWOQiW2!oka+!a~>_+A&& C!vYBa diff --git a/fixture/17/5/49 b/fixture/17/5/49 deleted file mode 100644 index ba7ce70c08aff4f061c8876f09c9014a8c0ccbbf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC2m`6JI)bw}uRTg4q@na#y4qC*jvsk;y`&c4W D)CmFu diff --git a/fixture/17/5/50 b/fixture/17/5/50 deleted file mode 100644 index bf7ceac77fbe1ea5f7269dadfdb76567c919e375..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL>ixB`I2tuJ*8=+a6Hy(AD3Mzhvx2Ghq0@*}RLWsk)s#Kkm+x#UK?t`1Tj#l{< EKGEm`00000 diff --git a/fixture/17/5/51 b/fixture/17/5/51 deleted file mode 100644 index 486ca399d936d1d23a86864b45c386d3625e9bcc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(fe`>82m+y58=+a6H=gs-6A{$q>n({oDJQ{UCR7|2r*@Ft+Vp>gUbzblnEy%W E1Jqvv3;+NC diff --git a/fixture/17/5/52 b/fixture/17/5/52 deleted file mode 100644 index 52912d65272d994257f27cd88316c61afe9fb763..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL=i46cC3t72m+y58=+a6H_qzS2MCHZPj^XR1dP2*TiJ?FgNv^VXz~a-zc5)0G77;h+s=k7fZpoKI_W$7t$Lkhi%i8E9?o E1=SY<4gdfE diff --git a/fixture/17/5/7 b/fixture/17/5/7 deleted file mode 100644 index 460b8251c69c0b4dece5324705d9e10b52949b84..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(ixB`I2tuJ*8=+a6H+fcf;s=4@?I|f(1=*J0V98MwbSti)nbKhUEfJ7$Qz%H` Fa{$vX0s;U4 diff --git a/fixture/17/5/8 b/fixture/17/5/8 deleted file mode 100644 index b96e7f7b598fddd4ab782f7bf83fc8119d060160..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv_i4g!G2!dv9gl1{pc=Aq^M;0$P0OJUthOC^DXGLzYyj~1J_lUBNBmEL^Q C^a2L} diff --git a/fixture/17/5/9 b/fixture/17/5/9 deleted file mode 100644 index eb1425825dcdb68322acd9c869c4cb70063ec443..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#oW_{Kz`y{)B|yx`2t82m+;%nyC@m|KdsY5Jb+Uty5Lt8gggID$*3e{%_DEaqzKX!>LI$^R(*$ Dr=$U= diff --git a/fixture/17/6/1 b/fixture/17/6/1 deleted file mode 100644 index b12aec688a54159a5b9a5a7139c9fb4db10dccf5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYLBhYbKA3<6K`CQtDH$k5n^HiR)axd0ej0Db)af)w#Ywjf$-r37v&*|^&f?zU&; E0j8A!qyPW_ diff --git a/fixture/17/6/10 b/fixture/17/6/10 deleted file mode 100644 index e4672b254b6a9820881924f240c2ce0128c58a0a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYpfe`>82m+yznyC@m|Kc(4R1k1^`$(d1+9_5+CU7cu81h%gT9*5$_41^=>7T$t FUVf+c0j2-| diff --git a/fixture/17/6/11 b/fixture/17/6/11 deleted file mode 100644 index bdbef2b9d6818e7f328220e83980d01ee572f2a4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|&fe8RG48n9L?WUcu|Cy#RTPdWvynQ6qHs(+!N(mcC}L$FowAo>%f&6}q5 E0I59zr~m)} diff --git a/fixture/17/6/12 b/fixture/17/6/12 deleted file mode 100644 index 779edf0bf2e326e052de250dd50aedc9b40b9c6c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXX32t*Hu7l7w!7)D5ff>xE>GG1NBi&kvhapp9-XX^dn E1E5VAR0gORDh1aEkprm*Yk-M^HGo6` Dr*HwO diff --git a/fixture/17/6/19 b/fixture/17/6/19 deleted file mode 100644 index 2b3d488d0eeb43ba8a044d85bc7bb584811255dc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL>0S*8l2m-mD)SG%j|6ibPIY9;HVsj(~*8qw>=S=rh?QuJtywMdg`h@IEXteqc E52-%^tpET3 diff --git a/fixture/17/6/2 b/fixture/17/6/2 deleted file mode 100644 index 7f45e1b2a66a9d597ff091f6cce754264b0462c6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYLBhYbKA3<6K`CQtDH$Y^3$S|}`T9susErV`Y2Y3&&_Sjmr;(Vt_aS>`1sv9r>K FzyqfH0iXZ? diff --git a/fixture/17/6/20 b/fixture/17/6/20 deleted file mode 100644 index da428b152ff1a0b11752d7951a6b41296269b420..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYofe`>82m+yzoT(Ao|KceYskN^n41c6czgfeip448p*ZyvY;%KSE7(3(ErQhNqXLYDY4iAx9av2)foL-~^z=!&o@{_I<&> F0tcuj0jU50 diff --git a/fixture/17/6/23 b/fixture/17/6/23 deleted file mode 100644 index a018ad046b0e71ac12bae715cdada92fb36c6c05..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+DWwMi63zA`l121`7g(AUvQ5NFJ;VClN^kYW E1EuZ(od5s; diff --git a/fixture/17/6/27 b/fixture/17/6/27 deleted file mode 100644 index 95937fbc0ede1ce1361f22c1a2dc6179828f5138..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwfeip42*a2sd6Osjf9wV(457uPty5LbB`Q%go@^cg%%Nr-gMm>a{PUY=&aF~? F_yDLz0jK}~ diff --git a/fixture/17/6/28 b/fixture/17/6/28 deleted file mode 100644 index b9ea12e1461cb0ad277562b361719325805db30e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+G)TKoA83jErD5hy)=h2C=~6AQ~nI5d`vJA|T};Hbe=?M4%o3 DrW*mE diff --git a/fixture/17/6/29 b/fixture/17/6/29 deleted file mode 100644 index eff7f81c5bd7175bc5f2e3220b7d753b5220d64b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYpi46cS2m+-eIa5dI{wasXFT&|j>sA%@TuRaiPocPxQQ~D=Y6!D10|WYKUm%W` FFZ!l;0igf@ diff --git a/fixture/17/6/3 b/fixture/17/6/3 deleted file mode 100644 index fcd1d71e520e83c084f0da42f3f2aadcbcf11422..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwxfe`>82m+yznyC@m|AO~YIR%yF?IWqUft=7}Nl>Zn7py9aHV%jLjWnNF;(-~{ E1E@&>sQ>@~ diff --git a/fixture/17/6/30 b/fixture/17/6/30 deleted file mode 100644 index a1678ca6b5bd105fd2fa7676a0d42237d343741d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3hYbKA3<6K`CQtDH2$a}_hSe@^9stG_*b$M6M48p(a^ii}p6x_Reeb@u(|H!g E2dP#8tN;K2 diff --git a/fixture/17/6/31 b/fixture/17/6/31 deleted file mode 100644 index 6da6651b06dd36aece23eaa9596b824f1a8a4a55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+G&-LO8_;WP=b?2qw-56o3eW7;rT}VJHA=0kT1oPzqu&NHGH= E0H)FbrvLx| diff --git a/fixture/17/6/32 b/fixture/17/6/32 deleted file mode 100644 index cdbe6e55e5534371d6e1a397ac4c9235e4a22128..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwwixB`I2tuWioT(Ao|Kg*a=o9InajQyt6dypbYZ7b)=*+>Cj8IW0hxW79t$1=i Dr%nN? diff --git a/fixture/17/6/33 b/fixture/17/6/33 deleted file mode 100644 index ea0fdf509423185e62fec2ece633cd081a84477e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|3i3tE85Yk4vnHr(}$%ledRP+{y8-Q^HkQ7U>Zt5wE+O8 CF9D(e diff --git a/fixture/17/6/35 b/fixture/17/6/35 deleted file mode 100644 index 6b8780e612ab1b1312594c24c3eb9d768980ff71..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXwx!4Uv42m&D`rKXf{{m-$OJWymAm$#23^hJ_oN4PE=$Bl|1$x~zwZZg?pkSVmu G3(OCv0s*H0 diff --git a/fixture/17/6/36 b/fixture/17/6/36 deleted file mode 100644 index 9c4cf5402db2f0be88d05f1c30e89c4c9101a1f4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+G&-LIeT{fB={T5g;iLfG7ir!KNd7$-ZWkg6$Dpv`Y-HUsM6#SD=B4Ea DrN{xC diff --git a/fixture/17/6/38 b/fixture/17/6/38 deleted file mode 100644 index 14c23dfc92de8440262c169aff952cb5aa1b8f1d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|2i3tE85Yk3!rbcLg@}Wai6kXgr0L&|bq$EgCRx!DYUv=z5Uv_;>!xEn|-2g;~ diff --git a/fixture/17/6/4 b/fixture/17/6/4 deleted file mode 100644 index 9f4e3a487ef3bd65815f8095afd74c2cc5928f5a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmY*Q+YtaD2$Du>rbcN0i`P${pdu`6oS6|W(E=xdP!AwCS)EeAa+Wd;U(|Lh{5+)& B0h|B; diff --git a/fixture/17/6/40 b/fixture/17/6/40 deleted file mode 100644 index 4b001ff06a57fc464665b34406b84a471639c8f3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYpfe`>82m+yznyC@m|KdqKQ2|}vK9Z=La#9|22A1-JpDK@lGufb(8KGbPJqotE EAE$T$rT_o{ diff --git a/fixture/17/6/41 b/fixture/17/6/41 deleted file mode 100644 index 051c607daa50ae6fb4102d339522cd6bb77edc2e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`feipK3}Gkjrk$|=Gt?v+RJ*u&0GL-Ckc3EdLRQjN!mw%PP7+^4cJ(WBkzaZA E0H*8#q5uE@ diff --git a/fixture/17/6/42 b/fixture/17/6/42 deleted file mode 100644 index 4be3415ad103c963bdb81afdc771ee16a6032430..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`feip42!c-PrcUVp1%XaPL0#ND0E{aDOnz#i#aLrXYQ)gch1|?sHsy<&2c`-E Ap#T5? diff --git a/fixture/17/6/43 b/fixture/17/6/43 deleted file mode 100644 index 6f906d445cc4e68e025b31a69865e0e45d0427ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYjJfeip42!c-XrcUVp1?W~zh61`++yIOtkf6weEBuytRy*i$GSWY_uboYgGhd`# Ds3HNU diff --git a/fixture/17/6/44 b/fixture/17/6/44 deleted file mode 100644 index cf03c65980619fbba6f5b2305622323eabb54847..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXv`hYbKA3<6K`CQtDH$f#-BG6IX62Y~l#NgcfoOT`32{9=bovf-FPZ5(?(nMLnGH!qT2K3l76%Bg5(FM Co&l%; diff --git a/fixture/17/6/46 b/fixture/17/6/46 deleted file mode 100644 index d7b4bb6e6ca50dde7ddd62ce10bbe9c88ae6edfa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+G(^1R@{+u^<#g3@iZ@0t*8b0U?kB#vmyqwGa+S0AdK(L@)u> G%m4tVIRT~s diff --git a/fixture/17/6/47 b/fixture/17/6/47 deleted file mode 100644 index 808950666e7dfa68a6a8633146b31aa57d3e8b1a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(fe`>82m+yznyC@m|KhdMLlilNr@~ diff --git a/fixture/17/6/5 b/fixture/17/6/5 deleted file mode 100644 index 01a65e26d5ba3705dd0c83ed94f694fa863d44ee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmZQ#G-6C)U|;~@5+DWwMn*USv48*~3ZcLvAOfrqNPs0lIzU<>DnSew1c`yz5NR+C E0H=Wgp8x;= diff --git a/fixture/17/6/50 b/fixture/17/6/50 deleted file mode 100644 index 085e7424869a0b315a3fd9b0cf524a2e990c5e6b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmYL>ixB`I2tuKenyC@m|Kd@1si5L#czQ_!JCIETC4@L!t4h^5xy@f<;Xb&T>u8l< F;RB^$0h|B; diff --git a/fixture/17/6/51 b/fixture/17/6/51 deleted file mode 100644 index effa701e71c9858d1fe26b113fc411e6249745be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(0S*8l2m+xe^`@TC{};zxIuSu#-aeA3n{pByWc!~0gC_t diff --git a/fixture/17/6/6 b/fixture/17/6/6 deleted file mode 100644 index 9ea311f7e1e2ea24c09fd8bd105b80cf6689b47a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmXYofe`>P2*Th;>P#J>`(I7^C?_g>UEN9G84lV|_GlKs!}+u}cZ{a~1bLgQl7ZIN EUZ{@&ssI20 diff --git a/fixture/17/6/7 b/fixture/17/6/7 deleted file mode 100644 index fa59201fb50985aa78765b7b9e93c93f954c17ce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 116 zcmX|(i46cC3G(k*47zol0)(-%v C!vU@U diff --git a/fixture/18/.zattrs b/fixture/18/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/18/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/18/0/.zarray b/fixture/18/0/.zarray deleted file mode 100644 index 679fb143d6..0000000000 --- a/fixture/18/0/.zarray +++ /dev/null @@ -1,16 +0,0 @@ -{ - "chunks": [ - 100, - 3 - ], - "compressor": null, - "dtype": "P>~wcq!tl0B$6mv(2@>xq!T^q zMKl8#$RI{Al2J@#5|f$5Z04|t#l*9U)g-WyO>AZ-yGSOL1Eg`B6P)A%7r8__H_6}u z4|&8(Uh$fbeBv`d`NeNCi?cC@Dh z-RVJ3`q7^O3}ZMW7|#SIGJ~1SVgU7r4$1 z(z(Zd9`KwOyyQI}_{evD@ROk6Kp>dRF)vhtYO>TCJyWQhnlRfMaPkY9*rg+7x-u8}ned1HoeB)c+ z`Ngk(^N)Z1XQq%K$ZQsKncF-Tvam&rvZSS~U_~og(^}THk&SI)tgUQqCp+84-uAJt z@eX!~qaEW|r#RJV&UK#iUFLFEnCLp!yUp$HFvJg#+t}82cD0+` z?Pq@nIMiVdcbwy$;B;p=(*-Vckt<#0YB#viP40A;yFKV3lRfDvPkYJBrg+m^-u97? zec~%$`^Hax_KUy#?H@CQ20=zM8D>s%S-^r8GSU)8S{p z$)5JIw{Z?K-jR-Sw3D3d6lXifxh`?3%S>>siEeSL+uZ9ulRV;4k9pQ}p7*NPO!cn! KylY$Sp0>>!E#9N-|wIl)O%xWGkjaFbg+DtEZcJ)ZEC zXQcCnw|wF=U--pu{*X0uAdrphJ0ERJ~5sYIz6PU(yW-y-xEMz$=h+`e=*}zt|v7No_V?ReZ#&OPZo)oTeog3We z0S|f33tsY$_hj&uZ+z!3{|IF9B)P~*>+~PKO NNaZn4NaHo>{09n5lmh?& diff --git a/fixture/18/0/0.3 b/fixture/18/0/0.3 deleted file mode 100644 index 274147974d1544211e2a263366ee7f8bcd384da1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(?FJ0EXc^H%Kx_GF-Y0*Cm4_gCv7=8Klc_Ne1b-n7a!nbe2%a2J$}Y-_!Ix&Xe_j< o#f>pLr@B&`J>v$U%@gY9Nm-rSx;xc~6|2sP|bpQYW diff --git a/fixture/18/0/1.0 b/fixture/18/0/1.0 deleted file mode 100644 index cc3af1369366c4d949a1bc44fbab398b9d77a909..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO5S8omg06@_XRWmj*YJ{p0YQ`*~W++N*Le&UWBUF_bHDVK+7&T%Oo7jY!v4T*7 zSTRe?68k)yKX8-#I!hq%hPQm?3t#!oAN~>?6bNJ^JNd{@0g6+C5JD+W7~xc-I`ybe z10rcgbE0TRd%Dt%?!?fKSmGE;JPC{;kx5J@nOV$c4vSgBQr57R6t=LHZR}+q`#HvO zPLRrZE^wV2+$4?rJm4A6c|khw`9KEW`9UCSAdrd7Rx&p(2&2OigN0 zn+O`wn3lAnHPLjW6FuogZw4@sK@4XEBS~T$ZxnloJDGFQ08ZSL@pM?B^wuXxQzKJl5K{Ngu3!O{^-9`cfpq7)hoYX*}U6&v?r_()r3aGWg3s0@*xC4sud}f)pZzl9VEh3RI*z zHK<7g8WKTsTF{dAbRe4U^q?oP^k)F^3}ZNnj3tR=rZAN`%w--+S;lfwSjT#{v7H_4 Q=Ku#e!AVYWfs0(?KNC8cX8-^I diff --git a/fixture/18/0/1.1 b/fixture/18/0/1.1 deleted file mode 100644 index b29053245d15f086024e369f0d4c567d36081e39..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO2cZ86 zWE1E8@xH)&8yW=fc+WS!^Mf>k^g$3p7P69!yyPQ4MJYyc%2A#Qgj1aw)TaRrX+cX` z5lII+(u1Bv5zPQ%7{+i$FrEoaWCk;tMH~xRL_8}=U;`UTWCuIh#X$~nn3J61G?%zc zGO66;77uvHBVO>5SA66XpZUda{t%i$8yUz!PI3`OK?+flQk13=m8n84YEy^CG@&UG zw4p7X=|Wd}(}%vqGKj&9Vl-oz%oL_Fhq=sS2}@bVYSyrp&1_*Sd)Ui9k~qpS&T@|P zq;QpM+~y8ES zGn&(acC;swZgi&y{pe3LLm0|1#xjoaOk+ATn9l;@Sk4OKS;u-du#N5PU_S>q$Z<|^ zk_%kq64$vwD)+e01D^4m7rf^KANj#gei4!>2+|QsHnNk0{1hOJ;*_8y6{tuhYEY9} zG^7!YX+>)y=tw6z6Gbn26T?7a8No5galTxnnX6SnO*E=4~IEI O5~n%CS(3Rz3jYC0iJ0#I diff --git a/fixture/18/0/1.2 b/fixture/18/0/1.2 deleted file mode 100644 index 2aba316ec4a47d0c52c102520ecc8875c663012f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007YMoFmT6JmSoZGvdr_vdJdn9C2ng8E0f3*<_PVHW_Ec;fQmNI6_v) z$|jq8 z?rrb*$j3hMm9Kr{XTSK>KmPTfVVQy;vsuh#Zu1ynVT)MG(w4E3m91heYg@<0HnFK~ zY->Ba+Rg69+Sh&#aj3%_>o~_d)oD&Q!FkSinaf?_dN;Vy9qx3O2R-CrPkPGJCVSCK zrg_~P-u0gMed;rx`_^~9_nY7SVGtGs84NM2*$g+Y`OI%oiy3KI%URy4Rx{eV*0a9N zY;FtN+rf_Zu&2H3?*QW*?g&RZ!HG_ChBKYz0vEc-m9BENo80Ucce}^E9`UHhJmXoD zyzCWIyy-2|ec(eM`NEgJ@}r;p>@R=&$I#3{kjXG}nA2Pqw2%>&u%xAovZ9r&VNGk< z&_*`4m91@KXS>+d82cFOAO}0d(T;Jf@lJ87bDV2}OI+$Q*SgO2Zgaak-0uMon&=5n Ndfp2ro9Z>w{10*Hn0EjG diff --git a/fixture/18/0/1.3 b/fixture/18/0/1.3 deleted file mode 100644 index a9ecb081223f4ea4347ca70d6f78be4cb0966fb1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXeKb1uVm$sp+lNd`#aX0SAX*`U_@g$zb zi#U%r@DAR`NB9(9;#>TPU+_Er!hg6X5_GG>3EYZ1aW78dAv}t+cpA^+WxR&B@GdUm zV|9Zm1W({8JcpO?D&E8ee1J>%3}4|p{0}5pGn)Va diff --git a/fixture/18/0/10.0 b/fixture/18/0/10.0 deleted file mode 100644 index 4c56281bdef2ef1fcf78ed5be578bb9f9b763fc2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007WWh$EXgXT;%L<`HLR6KBSm*~H<9GqcGi&KcQclW|5iab}#EO`J2b z$qM1Udb!I6K_2rOY7xT>x3p!fXeBEfZB1+0&_*^k##XjA)=qY|m%Z&{oP!-=f}O!K+vzVp2w{O%8b z8bky^1~VFBc5|5D0v5EG#Vuik<%~4Ss#dd(b**Ppo7vp9wzIum?PhoT+1~-iJIvvZ zb)4g!=5%K`-vusonaf?_dN;Vy9qx3O2R!H@PkPGJUi6Zez3DA)`^d*W@ujbP?I%C` z#b5sRkD1B^L1wd<(_H4ZkcADkq@@hEycMiyb!!-HeH+-&7Pd6T4t6xw9`>}C107_X zBOGah6P)NIXFAK-E^@Jnu5^{F-Q;GsxZ6GM^{_`g>KV_P;uWu&>K*TT&!;{!%{RXF zonQUtcmMj&ATkKDn#~aNn$P@(S=3^dv8)kRwu({KvbJ?>Y!jQ>+BUYevt8_JAN$(R zAr3X((T;JfQ=IBF=Q_{%Cb`sQu63R3-R5?8nCyNJc-#}7^t>0m=yh*+(+58Ek?Fqh Kr62t0C;tNgHN5wbGQ$R^G?GR~1r#yPTyb4HvQ=ZtK!$vooBY%-5HXJnIkWE1Bc z*<_QI@m{?GA%Q?agDqi6OBrHCL#=KN!>wn18`#_yM%vEyb}-uR#u#fq`y1yl;~ncb z$2-mG&Tzg9Txg=pUEz8+xX~T%beG8K=p}D>(_5zb&~%^s!k2#Vqo4fg zFMk_UF%ZaNR&$%jycV{IMJ;1l%Nb^6s~BM|Yum`iHZjUpwziXM@Ud&huXIn%BMIeIJ%Hl9x^OmbblQx{u88 MrLTPLCqMhe|MNP<{r~^~ diff --git a/fixture/18/0/10.2 b/fixture/18/0/10.2 deleted file mode 100644 index 97398e620d6b83917c681a3a3d72ddeece785e66..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007WW93d;4INKeLY_iEXM>ZMf$R?XO9NAQHPkPFWUh=ZHyzLzy`^2Zd@vZMn^NU~o<6r+7 zQZ5KGnc1A?GPi{+Y!ORY+A>C1$;#HUwsmZ56Pp@kYunh_E_OA>-u7{@LmcWD$2!h9 zr#RI)&UKzkUFLEVUE^Al-Ru^3yT`pA@uh-R)st``O=N4tIp( zonWlfobC+gyTFC6aHa9CbG=D!b(`DW=Y9`(+!LmF-V0vzhBv+CLm&CrSHAX*pZsi^ zzx?eVGlU00MnlYQ4s%+-f)=ubB`swID;i-9Yg)^OHnOpiwld0&cCxe4_AjkDu{7{=$E_Dq7wXs>KaBf;(^z9>7C5i6`+ip2tgg6>s8QoWlithOclDKjK$h z!aum8C3sbh>u@7(#hthp58`1whEsS3XYev!!&`U{AL3(tj<4|@e!_3~6aV7MSn#a| qhjBA*$KALeC-5kqz-c^(7jYJE;2nH`kMJqJ#JBhXzu*u2jsF3gVnX!* diff --git a/fixture/18/0/11.0 b/fixture/18/0/11.0 deleted file mode 100644 index 8d6bf5817367da2374b8294021bf64c42c79c848..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W$%O>-Pb4HwVWRp!c*<=&vjEr-{IU}1mXJnI2HgV3#CgU90WQD8{ zN1U1W>ZOMV!B2mgu~HCZGPAkNZ5|6-#G-~;+AzbdWP~-WX)PPt$i}v^wQcNd7rWZW zzVd);S}M@{yOXFcatubJW8yiSk*e#wVq9FW^>!w-VS!R zhdmwOKnFS85soz0@lJ56)12-+=exkAE_1o-T<-?AyThFx@Sul0?g>wN!HZrp)f?XQ zfe(G;3t#%m4}SEM-~C~_LFFLGU`BJ8(_9v`kcBO2DMPJbMZ>LTb!%AP1~#;XEp25- zJK5P@_O_3M9b%ND9PJn|U^IMZ1!bdd?JaHXr<=q5M2%iZqrut!Ytl&3x8Wv_VE zTi*7LkA31(U;D!%UIS3D;sGoYg@<0HnFK~ zY->Ba+Rg6vv%dq3c9_GBah$QnJH@Haajx@Bbcst{<676b)opHfpZh&vvd28`InR5+ M6tA1=J@5O#|C{5*?*IS* diff --git a/fixture/18/0/11.1 b/fixture/18/0/11.1 deleted file mode 100644 index 5e29f1e7299c3f7e5331dd62a9da554f56e16d56..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=1&l}m007W$nsGQLkLfsfYT_IdC&tNX#>t5j(@Y){C#IQZoE+!GG}BBoabjwi z8pbj0z3u&~5Cp&dW5)0x$Yf?i%w=v1TFAndw3MZdFw#oaw3bmeu%R)=+QOE0u%n%f zv!}fr=pf@A=_p4#$;nP}ma|QCk&9j8YS*~dB)7QLWcQfjVUKv!GoJOFSH0$S?|I); zpZVMuzVp2wO!J%H{bx`y2-2C=Y-Tsqyyi2^q878PsZ&uHnFL#ZDU)z z*wt?Kv9J9c;!uY<)^UzE!D&u+uJfGlGMBr;^=@#Z+uh+#_qpE#9`}SNz2HSJdDC0o z_Mwk_>`Pzy+K+znvp@XlFVjZ^K?XCL!<>eg-vSo2xFsxUxD}1Cx;3n6J?q=RW;Qq0 zcDA>J-R)tV{T<*yhdaWNPH>`=oZ(Doxxj@ka;2+W?It&y_HEC*i)YNjF-LQ zRquG$dp_~0&wS%s-}%L_ruoOe{xfrA5M(i{xy@s!g)L&3r7dGwD_PkpMp@fvV{Bw& zTiVLjcCxcw>}7BJ81G<*INC9eb&69>Fwr^Ab%{$|=33Xe-mPwPyD9Eltkmo7%>~25% zJHX+NFvf9?cY<+FcZTy_;6hip(p7GBlbhY;ZsR@ZArE`X)1EQW%U=b7?+c_?FiA!DMTGzSNZEiQgz3%g<$2{&i&wIfnuX)|O-t)ex zKJ}SzeCssd!yN7y$2!ibPBYHA&U3!YT>cm=*e9m?9|Fi4U!DHWRP@&q#Gm|B;6p%An68423;~p*A0>k zmt>G+&?UpA%OJ@hgW)n5WMI2{e|qz;2?r`Q;AY&8L%0_Y;5Z(~Q+O6H;uXAsw{Zp^ z;S+q0uW#l4IG~rg@eE$T%Xl4c;XQnak8vJf;X7Q!&-fje@ei(z qR$n#Z7TkfuxDOBF5j=sX@f=>lt9TRd;C-CMr}zTj;Co!culOHw^Fkf~ diff --git a/fixture/18/0/12.0 b/fixture/18/0/12.0 deleted file mode 100644 index 9923f53b757d9441d8756cb66ecdd1628cbc2581..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007YMh%>XvCYw0th;v3Z*~H<-D zGn?Da_I5DZ?)I>s{T<*ihdaV}$2s0Wr)d%}}m z@S>MY^@caS?*kwD+!wy|gCG6m4}bd0kgy=gXeM)*(_9v`kcBN}Y0FsIDpobZTGqCK z4Q*sgTiM!9cD9Q#_OiFJ4sx(@j&igUo#bR^Iomldbdif)=_*&d$<1zYm%H6#iibVo zY0r4p%Uf(?mOT6!F0d-!yr5eG8keuvzx>G7O4WFB$Oh;v3Z*<_rNO&lSPkd<+c zY_f^>$NL8Fd6gh|K`QTf&j-HqgP)|U8U*RdKz4GFlTh*#Mlp(0g7Q?LA~mQy%@+K1~ZD$jA1fUh+#H!m`fasi6?vw)>6V>xSB%R08QjqU7XKLgCHZB$Vo0j2%`W6DM3j}QISei zrWUoSLt~l{K@_csrX!u`OfP!VhrtYCC}SAQIAWN}H0CmoSmIg2QW9Cs8aA_qt?Xeh O`$*y_$2iM5&hsC_R?6xC diff --git a/fixture/18/0/12.2 b/fixture/18/0/12.2 deleted file mode 100644 index 4cc857548c33a7116f5e30440419934221b4828b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007XRaYmdoGS0{*;~a6$kxe$)WRp!Cj?5$T$R^{AY_iEFo2-x(;>b8h zoHOFg_v*cg2!gjJo9buNO!t?+%~T}_G8}?+hJH(;JI>xb1c8XJ-?HuR2*d;D?jcZMGi(B31UiZ1*V;=W}=e^)X zZ+O#NKJcNBO!9>JNXKZicEskkL%$FsGsBw}1sLZV5|T!HQP0x;2cl zuJx>MGn?DO_I9wNJ?v>O2Rg{Xj&iiIPIQu!o#`xRyU;~0c9pAL<0dz|#og|4uSY!U zG0%F=^Ir41H@xS4ANb7YCYkIzQ%v)VU;XVL{~A&)2(p;fFms#7a0^?+(w4ET5mqtM zXlof`LmS!HR<^c{o$X>*``Fih4t1Er9qTyboa!{=o$EXkTIkLydK`W1QqmCb-DOE^)PMOmwqb z-0B|py3ZpX^_XWo>p7FW>NW3r&-*_0na_ReJKy`&Z+`c${|u@JL8zI{VlH!=$3hmi zh!K{yjFqioRimt9U7Og{X11}d?d)ndyW7wH4lu^yjxf$~jyK+EPIs>JobNK1yTWy@ zcZ1vA;ZFB^z=Iz5geN`k1uuHt8{YJR4}IhdU;4@qe)N++{OK>#)d+(0W-zPS%x+%u znct!ov$$m~XL+kz!$|8{-v&0fh0(URgB|T*PkTAgK@N7Lql|Te6P@G?XFAITE_9JA zUFB*wy2;J%a<_Xtts$cwSnt%LjNMsOXGSo10 zn#*ttTF8=?GQx^hva+?TZIq2{Y!h4A+BSB!i(TzwU;8=KVa7PdvBo*Ysm7b&9Ot^k Pr7knkwXSok+uZJd9*EJ@ diff --git a/fixture/18/0/13.1 b/fixture/18/0/13.1 deleted file mode 100644 index e77a9a119c49cab56d4660c145552cae2fb97bdb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO4b&tpa007V*#yK(0iHQ^AoR~bF(@dOWnrWt)W}0zw;v5s_7>8qOm>Q<0nP!@4 zCeAVW{&?TuU921gm$=L=ZgYpnJmD#Cc*{G!@{Lsf@RxstLGK{f|V?5KC&J5z1&jOaSf_T=ko|iJRIlw`VbApqc=K>eG!A)-QkVib`6|Z^2XTI>2U;O3|8LI|CCPK(TPQu7f z0SZ!_5=2st@>HNYHK<7g8q$cCw4yZ~=tw7e(u>{ELhd9g;PH~zuT;>W_xx-!V@swvg=N<3)Kq}u!;~zma*9avGSqUdM zd5EAeMTnvlrKv<^qNze7U!G$V$##L|VXbfYi*=+7{QGlKCCWXJnI2<`GATBV@&yan8sl zn{2{+^-?2(;HuY5^RDSW@u|;z>pS23&F}s&s1XDi%xDgCn#+O~valsBWvCUcWS9}w zFw%O~w}H)VVM{yM(N0F&(-`CI?*NB6+!2m%isPnb7T-?F{^pZYd%9PYB9@L)^dhh z#i~YG%i1=wu}y4k8{68&u68rl-u5xxK@N75qaEX9r#RIF=Q!8JE^(=ATPA?{y4JI)&1`Nv+uOnJ_AuJM_A|~Q4t1Dg9p`wbIo%n~bG{2)=5klK-VJVa zhdbTn0S}t&aZi}yInR5+YhL$;>E83c&wTC+-}}Lj{_v;2%or5}napf1bDPJ)7BR$7 zOIyY;D;sX4HI1@?4Q*sgTiM!9cD9Q#_A=H14m92oj&zh0o#bR^IokviUF2d{x!N^u Mc8gox<6e{e4|gBWkggjf86+7b86@2x$)HOH=`!e& zK?Z{i1{ow7BpD9<0pj!kt;3nLLyKxE+;t`y|Q+N(9;WfO8cW?n8;eWcCMQZ>6 diff --git a/fixture/18/0/14.0 b/fixture/18/0/14.0 deleted file mode 100644 index 83d345b61ed5ae53afb113eafdc6a9ce045423fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007YMjLajOj5D&yIA@=cd1RAKoS9ALk$Gg372*h4*<=%EW)tTe*<_p} z&Kcpodix`S;D84`;Ym*!??o?p(_7v)!ABh!Fw*9>u%+$oU`Kn{(_Th9&_RxLl%t*GWT!aG+0Jp1i(TSs z*SOX=H@n4M?skudJ>pT%c-C`X@v7In>pkzAyKm%sgEhT1`p(M*P# z(_9v`kcBO2DN9?)%2qMLTGqCajcsC-t!-mxyV%t}_O+iw9O^K~I?nONIL+zCI?wqo zbGa*A=Xy7|)opHfulwBZF^_w~^IkCC>)!CD4}55X&wTC+-}}Lje)GGj{`H?hogm0; z7PFe$Jm$5CMJ;9-%UaH=R_Z7c*)CN@wRuoYod=$@|CZBW3r!3G0mU;GNf)0 zq&I`v&0(ndEnq>zEMZA2SkX#`Thj<5ZD=D~+R7+9+R4uLvbTL4$!R7|OfyancaCYs$%zx=n^13&?>3tvg&@`X>!uNjgqd)xVFGC`NAcGmrVNP>d z&_ae<(o&W-%t}_amf=R&z=k$5%9ggWqn(U4#-8?apo5Hcq@x_|Bquw?+0JpUaV|05 z)vj@^32t_ayWH&_4|&)lp7xAqP4==^O!2mNO!cu(eC2E3_}MRh^^br3XXe^Lkj1R# zF|YY7Vo{4()^e7&s@1G+q;;%o6Pw!1Hnz2$UF>Q%``Fih4soc%9P2p8JI(3NaJ~y% z=rWhP!ga29gInF^cK5o^{T}t0$35qHFL>2!UiYr|yzf(=ndV#H`QC4S_lM~sgCM;j zW;44v%x?h;THF$rw1O24vxYUTWj*WLz~;6v%Jz1!quuRcjQt(pK!-cRkxp=;lbq=+ zXS>M7#<|i}u6CoFOmK%g-DRQ&J>*GGdD8)P7kk;;KE^uOA&z#8W1Zqu Or#aVo&NtqrF7rRNMApv$ diff --git a/fixture/18/0/14.2 b/fixture/18/0/14.2 deleted file mode 100644 index 3af7cfc58c28bb2fd844877ffa53f4364ffc3677..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO5cZ&li1~!twc6P9j{T$#pCpgIkE^>+M+~6k3Jm4YEdBIEG^MQ|~@q?cPVL_0AjASPV zIVnIv3Q>|$l%^t;s7x(tQ-_8$qA@LLMQftyNGE#Ii{8XAkU@-KB%_$fBqlS9+00=v zOIS)AYgo%BHnWAD>|!?uImBU3ahfw+<_d}2;x>19#ABZDir1v_iO+oD7r*&KruspU znGkZ3n>-Yz2t_GFS;|p`s#K#c^$4R0O^KilZHc5aUFb?5`qGcV3?Y`$jA1NOn94Nf zGLQKzV>v5W$9m$~$~F?%!(R4rlw%y{9Ot<}64$uSJ?@iC3eR}XTi)@WZ+s_>bp8>9 zJ4rUOlaKrqpg1KcNhlSlNDXRIi*Opykmj_YCGF`z6y51TPofz>48s`C2*xvkiOgUo OvslO?7PE@g#PJ{NqSjXc diff --git a/fixture/18/0/14.3 b/fixture/18/0/14.3 deleted file mode 100644 index e2147f50919a8e0aa7aa06e89c2cc963c03d1198..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&Jt)Kh0LI~y;gSrOWRP@&B!eU$N4nvvEBc9r)PSH-*IQCv~zro@9{G(;xGJ%E4oVep=w-@n{XTM z#YsGfNAWnG#`Aayui-7ci}&#%KE;>#7C+)w{DFUPd3VsN3fJN&ZowTmjtB5CPUA^D zix=?<-oV><4i@em%v6L5#EZ(AXv#NHn5RR>|__aImBU(aGEomC5bCs01kd~>h z3}cCBGEOFP=rjqdcIAN?7? za7Hka1jaLgX-sDZ^O(;9ma&`_tYbYJ*v<}ia)5&z;v}ay%_T0A#4T=fhle~Om6yEY zHJ|uQ8bA5PZ!*;jg3M$g7rDtpAqo>lDN0j@N>ru_wWv)U>eGORw4f!e=s-t0(UV^E zW*~zY%qT`PhKWpKGBcUQY!J+QAE?2CNw3MHngP+UFk*~ Ped)(ghB2IY#*x5(flb@H diff --git a/fixture/18/0/15.1 b/fixture/18/0/15.1 deleted file mode 100644 index 0bd6ea6b7960de6c1ac23ada4f0ba61652fb70c2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W$Ws^;ubHq6#n>c4=6Ne-7$T%aLIA>&qI6_v&IpUm=O*YwNlT9|+ z#No(zuimP9L9p5+Q`~5(JKX6m4|>SMp7OM3yzCXPddIuo^Qq5#?mOT6!8E`7!+!>m zL6E`h<}jxPENCG^ENQ6WmN&v`R=0+c);G##Hn)Y*wzq@b>~0VH+21&aIouJ(JKhAR zIo%m1I?wqob(zaecCG8&>=w7W$Gz_JsK-3+InR5+>)!CD4}9n&U;4_|e)6+lO!t?+ z%~U@KGMmNR<}t5DENU^sENvMpTFJ`RvbJ?>Xd@fj%GS0q#!hxN)?W5@po1LjC`UWS ziB58|Go9sZ7r4+xu5hKRT<-=`+~#(7xZeXF^n@opAm2+uF`(yV%uk_OY-19O6)i zIo5H;JH@F^bB=RObg@fZYLaVAHq}jTc9*-|<6)0@)H9y-oL9Z(b?qG>|jS@>|sx1jdOql N9pOkvncxH``X4ts+d}{V diff --git a/fixture/18/0/15.2 b/fixture/18/0/15.2 deleted file mode 100644 index abbe3f082332b4e14ccf660eb01e09747abf4293..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007XBb7YfE=8;YIx^qUHbHth1#F^P-6GzAjal|=CoHMe?CY#J7n{2Yl zChyfttRDnxO)|yJrn<}B?(wikJn9+Gdd{m}^SXDv=Y5~~+!wz0gCG6jPk))dK@emx z#B63ahxsjFK}%TDQdY2{m8@wkYa3xBBaO19(Z<@|4tBSPJ&kjK10CT=M>)|+PBz|I z&UT@TTx^0XU1hTC-QZTYx!ry4_khPe;Ylxe(MzUz(_23Dk&k`lYv1_UFMjopfBk1@ z!yw3HW^pS23&F}s& z2oHjErZ`z3%g<$2{&i&wIh^-Z0JkKJcM0eCaDc`pM7! z^0$8si3oy>hML2i=CYuLENm%D8)hXdTgBSev96Iewu#ZUGR6*ew39vUWp4*M$ia?s Lv}2s?6sP(h-6-12 diff --git a/fixture/18/0/15.3 b/fixture/18/0/15.3 deleted file mode 100644 index a4dacaf65406b7278d220dccd06a84dc56397efe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&Jt)Kh0LI}f=`u){L6YIRWRPT#I3HayNS8s9LDyxFWRPT#bb|~A84NO9l0laY zmt>G+kYtc#kYtc#&?SR(vEBc9r)PWXx6>IaEr*Zr8NR}I_zAz^0{+G2@zQOm3fJNo zZpQ66fd}yjPT?s$hnMgw-oV><4imsqrHLk~vxE057KOVwk zcmmJh1)RZacoS#wKF;G)e2H)IBYwr7_y?DD2dye`4UXa_+=hGc03OEUcoNUzMZAL7 u@fP022lyDD<7<46pYc2X!bKeJ3A#ma9d5uaxC8g$Bp$_SJdNk^GX4jkh)K5q diff --git a/fixture/18/0/16.0 b/fixture/18/0/16.0 deleted file mode 100644 index 9ccac5522ea3b86af078e8f6ab825003b70a035a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007XBb7YfoMx1kGlTF;6BMwJ4*<^*Rkd;lGGcwMIb4E7VWRrQsnb~BM zP3DpJ>P?9Xf~ih3!MP^7#HA*g>>5+t>=w7W$Gz_JsK-3+InR5+G_QHxd*1hf&wb%b zKlss4{`8l>%@`d7nG7+9In8B33mIm(C5^DW6|86tYg)@Fqm41nW;VC2?QCy1yW7M5 z4sf8u9pOmFJHd(0aHg}I?*bRP+!d~Lo$KA;c6Yeb10M8{Cq3n9Q@!M6Z+gqyKJu|o zeC-?G`o*t)Gu^-bGc+a$vY6F8<~5&1ENU?$Eo~VqTg9r@v99%uwULdDx23J@XeT?{ z%ii{Jkb@oKC`UWSNltc(vz=pti(KpySGn3`H@eBq?sB(#JnRvVdd9P!^NLqZ^Nx4D z=To2g+;_hBgWvt(PlJX*kim>*GrKv=ZvhKh+!BUc)^e7&n$@jgeH$2M6Pp@mYunh? zE_St>eeGv|hdRvRj&+>lo#u3BnCLv`o8&T=o8ns6xz%lMcc1$`;BilQ(hFWR)$88y zrVo7RBVYQ;*M9P|U;OPK(+!CYg3N}R%iQKM%)%Bi!csdj~v1T&pwtP5RaoGXoYgBwk7hdbTnK@XYeNl$s&OJ4Sh$=))>2R`(XFMZ`} zKl#}-fBD-#hBOL-jE0)SoaQpjf)=ubB`sw|D_PlE*0zojHnFLZwlT`icCo8{>}x-x z9qKU0IM#8-IK`>Xajx@R;!>Bn+BL3qvs>Ki9{0M>BOdjbXFcb6lf342Q@!I|pZL^g zzV)5&{pvTr`_G_p5TrA+SFaWw}mZjXL~!?-5&OI zfCC-ma7Q@O@lJ4})12-M=exjIm%G9^*SX#eZgaak-0uModfXG9^nw??^<-M zz~{d3r62t0Cx7_UU#5=;f((Y3&FtndzXc4ln8ht&c`I1a8rHOyjcja$t!!LVZf#Mi#@t!aMotAG6KKSP@YK_)Ys%iQL%kcADml%*|WWvf`#I@YzGO>Jg#qikzC NyV}j}_OrhO{0|J?;k5t& diff --git a/fixture/18/0/16.2 b/fixture/18/0/16.2 deleted file mode 100644 index 59b7b262db495d84b9e98146714217d800fc1bd5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007YM$T%a;%qGr^!;x{$=Ex?SY_dWeAuIF9CeF+zn`|=9kxk~2O`LON zoRRfjy;;#gFxxpUa=w7W$Gz_JsK-3+InSHmRj+y7d*1hf&wTC+lYMWB zU;Sp9fBk1rF9!`(TiU4rnkIpl8=1sD_{G@ zR6qL3AO7^0A@zeGy&24Ac5|4|{1&jdB`j%qD_GG;t6Rfp>sj9zo7=+ncCe#8>}f9t zI>^C}bd;l=;6x`m!>^0MmM?HUG8>|hdts^&v@2zUiOMtz2jZ)`NXF_ z^R4ep_OoC7>Tmz}*NhE2g$;w7p(^}THp^a>8OIz96PIk77 zz3pRPhd9(>j&ZE9PIih@o#R~NT;fufxyH4wbF16j?mqW>z~i3qqzPUy(d*vurVo5* zk}rJeD^vVns%d`rhe5+2NN0$lW;L66%xgZ2TFl~>wVdUxYBeK`vaZoKwV5%twVmzl MW_Nqo-vJKvKiRV2J^%m! diff --git a/fixture/18/0/16.3 b/fixture/18/0/16.3 deleted file mode 100644 index 03b1b419cec8d427222ca60e19c73cccbfa8fc0e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXdTGDtE=GDtG$x(t#G;+(p4-5^~zNV?&Y3^EvGFvuXupi2fx2I(^B zl0lL|l0lL|l0lL|l0lN;Y`^E5p7HBl>J7r>@h0BIMSP6U@io53&-fjGv$XQ;X{0iFYzsY#IN`h|Kf_i|86z70k_~b+=b&f zg)=ydC-5|$!;5$oZ{QugkB{&fzQT9-3BTblT*8&{AY3hO#I3j;cjE*e#KSm;C-DrP t$IEyPZ{Y$yz$f?u-{1%QfW^b4Hw*agI1=#5qSCAuD8MoRLkOnN2ovW;WSmlTGFk=ZwrF zn{2Y)tJk+-5cD(Ap$>DnNse>8)12-M=exj#CcDCwZg8WU-03cNd&t8c@w8_=>lLqh z%~bE0=3}4u)Yrc8tzZ1=H~;$2pivNnn$>LPHJ|wnvzXyVSk`h@wVIJeTgMn}V%@*wbDPaG--6=_p4#(Me8rmb0DXVwbqo)vj@^Tioh4_qxyh9`m>-JnscB zn&J&_de8elFx}_A@Vy`W=nsGT%aGV0$Y>^WnA2Pqw2*}@VM$9_!HQNg${NPI0Q!oa;R2yUgV#yUz7)aJxI)=>ZRV$djJ( zw3oc>6>oXlR3G}t$G-HHul?j_zxdlf{xx&sAjo2}Gd+m}q|oINTAAbi5Os=nQ8%%Y`m-u`6BWYB#yrE$()Ydp+V& zk9pQ}p7)y9O)<^8-t(!?O!uwteD62E`@NH6l2o7~m8eNAYSW0uG$E3f zMA4oObfh~y=t&Iy8Ne`xGlF==F`lVRV>k$AOnOGnvIg7O|L>tRjhxY+^IJ*v%debA+Ru z;VkF4$~BU?%^mLYgr_{?E$?{GSHAI`e+1#Kk%g>eBQN>LPf?0doN|<>0@bNOO&ZXU zMuZbVByDL&d%Dt%?)0S}F~l;IVZ9rp7NrXyzDJ+ zd&kEVPxi5V02S1wXcYm1fKZB?s$Z9sTo7a5ix0uB(VYuZiZ#Ao1!}>O`p;0!o zxzV<>ypZ(%bf0<^6W9g)C_)OIyK;ROw3Qv~XeWEv(-`|Zz=6g(!jZ-~!FZ=R-5JhzfeTG^g)80QMmM>`o$m6Whdk^l zPkYA8Uh%4Tyz4!aed;sc`p);J_{~)R_}6qpng>A^vzpsH=Cz1LEoPWy4Y!I_t!7>8 OS>FgFjk1kxjrKp2I_9td diff --git a/fixture/18/0/17.3 b/fixture/18/0/17.3 deleted file mode 100644 index 47ffdaddc590e409203752c0e5e13563d1c4f4b0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXc+=#oLY43Z4eWw<1RB!fH0pi2fB3^Lqs86+7b86+7b8KmomOETz^ z;gSrJ43Z3z43Z3z4ANzg*zSM6=^4M?>3FETdAy9*@iyMU1$>0h@D;wpPxuXg;a?n1 zlrM*BaRY9_ZMX|3a0-v&2|S5s@giQun>dH}@F70I7x)H0;1~RXzi~xh(5f2O;U?UQ zJ8%ykz{7Y9r|}e?!%KJ#Z{a-N$3=XKFYzsY#IN`h|KQ4G(5(j7<7OPgowyed;t@QK tGk69s;1#@qvv?OD;A4D_ukk&8#_za<|8P}*&@F-+aTK@XZk)tJ_#X|7O#}b{ diff --git a/fixture/18/0/18.0 b/fixture/18/0/18.0 deleted file mode 100644 index 4d8f594eafaa82112bfef95d315baa5191349ad1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007XRP3942W|Mhjg{;gYn~XE!%*-R3%p;r3Bb#ip$v8(Gj*N3;9&ygd zCgY6v>a}kk1Rd;X4|^J6qyrr22uC`~2~Kp9Go9sZ7rMyBu5z_&+~j7rxZ6GM^@vA3 z=2_1fYrI#y>TU0s=p!Hd#1vnd>PJ8M**d)vpp4t9t`jdF}*o#Ird8SPx>xzuGYH^z0YcbnVY;eHQz z&=a2YlyP40q6uF2hDqM@zR5oGxo>>yJHPtP@Ba0lLCYY>Vpg*mVqWuE#G)3ntmQ0k znANOqUF%ujrZ%&=ZEa_JyV>0y_Ori{4s*C89Orl^INce}biNB*=n7Z5$_;LGlRMqz zZV!3bBcAq*XT9WQIku7ayxE<|eXCv%oZwETa!H#mYQBHEQQ=IJ_qh0J0m%7Ha#<<0; zZga2u-0v}ud%{@H8|PK8nP8%KP4bCPO*YlnzVWkP{A!wi{A;H0AjoVMbDPHy3t8AA NmbQ##tzuQf{15nE>=Xb1 diff --git a/fixture/18/0/18.1 b/fixture/18/0/18.1 deleted file mode 100644 index bec5f457fab19d5750b9b4a2e68356a11f8a9eb9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO4cZ?UQvWdfyagMBzdA5u*vdJdS%sjHmCeF+z&KcQc6Ne+4IOoVF zo6Pse`v$LF;~;2HG~MV<5Bf8Jfy6L^kxXDBlbFFwX0eb(EGCXstY#CN*+M+KN#GEN zIl^hqaF#@_aFyHK;V#KMA%$1G<_(|u!dL$Amw$vr1wlqKk&|46QjkIvCX7;)rXrPy zpawOmMFSerh?caXH67_hXL`|_J`83ELmAB&#uCdErZR`Q%ws9bSk7A3v7T*gX9s)P z$9|4+oD-bq0vAc*Iybn_10IsfGoJH~_k7?xKln+SCP9#vbYvkb*~mj)@==sx6epar zlp~TVRHZg`s7n-0Xi6K}(vB{4r5k7 zVJq86U=Mpa!cmTKmUEovD%VKjF88=k3QtMp4R3kJSHAI`e+123BNLg)LMXY(Lt%zn`p diff --git a/fixture/18/0/18.2 b/fixture/18/0/18.2 deleted file mode 100644 index 2b3e9d18fd7ef44101e75ba3aa232bc8fb2ed0ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007XBBhHL-MmCv8Hd!GKM^@&Md1RcCO`J0_&XIY}?+hIoMc7IodJCIoWt;Iokvmy2!;QyV6x|bd#G+ahJP2S@n-$;)2xws*Yi6QBCbx4!ee-~8?mgJwaH&h&pu5; z%;TOg&2ygjn%BMIeINMH7rykBAN}NKfBD-#hBOa?jAk;2In8B$3s}$+mb8=+RN3}-sa`7Usw%Uxlz>)qf+cev9O z4|vc+p7fNdUhtxqyy-1(`^d*W@wIP!>leTJ&AVyi=TNf^$rCu}e&Hm8)IjX1BQ2 z-R^O(M?C5=&v@1}uXxpK-u0gMedcpt_}&kG^oKwFW%{Tf$Y6-s%x(_D&1Zg#S=(^b diff --git a/fixture/18/0/18.3 b/fixture/18/0/18.3 deleted file mode 100644 index 58cc26eacf43221fecc3cf7d00d8389e1b0a7d20..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXeyC4(**BpD>#a7hM91{sud%5WL3>oQ2!4U!C!43Z3z43}h(WY8tU zB^e|cBpDB+wOn9=~=%w-5)A%3{T)0ynt8mI^M5@E1Nho?#_`-HrZs8d1M}$N1StHlTDnNaYmeTWE1C% zY_bXO)vMbk2Dn;~ehAnIZES0}9qnXid)eDQ4t9t`9phNXIn`-ScdqlC?^2h! z+_kPV&dqLdt9#t*K973LM9+H8^Ir9u$=>#kcYWeh(|qk4-}=R`e)F&Y3?hOctJ%zM zKJ#0^VivcA}of=+t2#V_$(hbF+J!DM z#+9xz)_6CV;C6So(*qv#kS9FpDU-bDB~!d%s`q{1L(_fk3*Y;}kN)tdzYK{Cf{bP| Mr@72+sD&)-fB5t7i2wiq diff --git a/fixture/18/0/19.1 b/fixture/18/0/19.1 deleted file mode 100644 index e39f3df09f25e04dbcc4e9d687cb3aa6a62fc2aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W$lT9{RAuD9XIV0oTojW_5Y%-5*vdK6j&des8IA>&&O*YxYIY*o` zvd4S%LL!2oo}o6inPIlHm96b$XS>+jKK6BpLmlQA$2!ibPIJ2RoNuggE_1o-T<-?A zyThHvd%y&bd%}}m@S>Nz=`C;j$j3f0(bp!K;zv{c;ZJ{=E;0zxo5AenFsB79Xdz2l z%F1f$ip7-v}Zi)6|Z{DyWaD@&wTC+lYQ$uKl{b6{_(H>%+w+XGMmNR<}t5D zENU^!Sk`h@v8vUqV_idRY!jOrWwb5rU`IRI(_Z#=kb@oKC`UWS$xd;qbDZlum$=k8 z*SOYoZgrd6-RFMeJ?b%!d(QJ-@VYm==>s47$d|q{(f58Z#czK1hiRgMAg$@lYBsZ* z&-@m!xFsxUc`I1a>ejHPq1Ly7VKz71*0!;&UF>Q%``XX`4s*CMj&r;dobC)~8tVcV zy4)46bb}k+6>odTyFT%$&rI@-$)@_r&;Ihaf6Nda1R2d_ NPIH;tLKe1&{{f6=@ecq1 diff --git a/fixture/18/0/19.2 b/fixture/18/0/19.2 deleted file mode 100644 index 82ff5be59d2d8737ad7f0d72d4588cd4e773e9bc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO5cZ%rj8E0gibJ=7Q=NuVlWRr15Hkn7pIkL$n^N4efI5W;U zvdJd<{qa7+YZ4j+O$nh5Z3(3lo#{ev`p}mMh7idpMib2>CNqWE%pr!wEMX~YSj#%L zu$4G=vxmJL;V8#A%Q?=I$W@X^;Wl?j^#Am+nn?L*|OS>S*N;dM6kNgy) zI3*}g1u9aV8q}md4QNPnTF{a(+7nJUy3>RH3}7Hp3}ZNB8OM00F`XIAV?GO5&I(o% z%LXbe{5zx4h#$-}ufCg0LXSKt^(qlUx*} z5QQm4Y06NE%2c5iwW&iR8q{AZhJK4o<4snDNJP=G0bHi zOIgNp*0G*g;@Cz!d)dc+j&Yn5oaX`wByo*o?r@iTr16AwUh{^xeBmqK_{%?na3{%5 O4)Rlgf|Q^nrT7m3vhbGx diff --git a/fixture/18/0/19.3 b/fixture/18/0/19.3 deleted file mode 100644 index ee558956457c1d61f639035841a485062c7e69ba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXey4U%qv$U%@F70Mm-rSx;#d5Me{pyq zXcfWrxEaTAJMO}LIEgbjizo3cUc{?-1MlE{e1y;N6~4ny_zi#IKU|pzy4BzY+=5$i t2kyZMoWi4c98cqUyo~dB3-93~KEW6G20!2z{DFUP#bD5?8rR__Tmq;!P*DH? diff --git a/fixture/18/0/2.0 b/fixture/18/0/2.0 deleted file mode 100644 index 66f7a4b96080e782abf45eba06edf66c65840993..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W$o_R)`8Rs0CM>g4HoFntdI3t^EvdJdS8F4t`%xtnkR>+ETMx2>V zHreF8dMP16aNShXyytzN`P>)2_k$n(;ZJ{=K1&c}FrzulX{ccqu%O|VFv4<{w}REI zZVl_(z=pQ4rO~#xgB|T|5993b00%n4k&berlbq}}M=p!Hd%GbW}vtRt`AOHH#%vpmV#4P4Ek9jR@5sMmWDN7q= zB`aIY+SakLO>An6t!-nho$X>Td)vps4sobs9P2ozI?d_MbG{2)>N1y`fP z?e1`=2R!H@Pk7Q(UhtxqO!bC0z3&4b`ofpK@}r;p>@R=&$Bfy7Ad{I5HJ7<9Xdw$5 zVM!ycU`3;>VNGk<&_*^k+E&Kc(N4x1XHR=M&_ND%l%pNvWT!aQInH&S2`+J|t6gJ~ No80Ucce}^E{s)eVo!bBa diff --git a/fixture/18/0/2.1 b/fixture/18/0/2.1 deleted file mode 100644 index 4a324364ef0d8a420407134a8102ce7ec5503b58..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W0Ben(Pywn(AxQ{N!i9_}f3Gn>jQHvKV4cbD7&h7Pg3`En~QqtZWq{ zt!*6}+r*|u+uAmEva_-FvbTL4>=5G|?HI>8#i>qnuJfGlGMBr;1lPIVM7O!!B=@=B zV;=W}=REHPuX)`Y-uHnIP4T%eeCs>k`_1qE@Sj0g5M(gStY$N>`OI%Ii(A47%URxP zR=0-rtZxIOY;FrSMp7fNbz33$`d(&Iq_K}ZG_NA{(^@AV%k^l`%r&kx!7Xkz z(LL@p$s->1m}fleIj?xtYu@#q_kHR!Q%v)XZ~fv|znSh|{~3}k2to}rw|UHK5sO;P zaLXED6{}j!I@YzGO>JhBZES0dv39Ymee7#L;~eTR$2!jOPIJ05obLh`y26#lyWS0M Nbh|s;>3$D*(EnAEpJ4z1 diff --git a/fixture/18/0/2.2 b/fixture/18/0/2.2 deleted file mode 100644 index 4a31b93890220c2faf539c19e557522ee509ef59..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W$ggYb78QElyUI#u@K)XE@≦PXvCc4h`Zg891O)<^A?(?X} zJnmV~dETpD^SbxE?*pIt+!wz0gCG6jPk)&)BnUE@*&OCHm-#JVK}%TDQbt(?d3oRIoMH-c8rsp>=b7?+c_?FiAzm#m8(s5lbcO-r@K7h zK@WMtlb-T|7ro>SZ+gpzJ~G{xzVfx7{OlKh`^Ud#2@Qg*W;3^W46~4hEn;cQSk_8b zwu-f^Z5pSMd&aX~_KH`%<6ZCh#HT*B!eV_q#Gm|*zSM6=^4K1eW(hR_kut0H?F8I-$PN{fMd7~ci~<NT%>&-*^`na_RUdq4QmAO7^0>9Ykv1~VFFHnW@G z0v5D{B`swID_Y6w*081xY-l4}+RD~;w3D6fX)k*l>p%xN!jX=0qLZBLEN45%#V&EF ziLP?B8{On)ce=~n9`uk&p7fNbz33%Vyx~o6`Orr`_NA|U?I%C`#ozw%ubHw3L1shD zX)beH$ifz}v}FvpvQ><*mbI;8W1HC2Hnug&E_St>ee7$DgB{{fM?1!`PI0Q!oa;R2 zyUgXryT-MybBkNu<{tOD&m$i7m}fm_vRAxns&~BWJ=1*ZGvE5o_kQ!cKMZmNK|0f$ z)lkFCYd-T^%;J`?oaL=xq}8l$J?q=R=C-h#F=r< z$R?Y3f4pz-j)n%oF^+SN^IYIMH@HbE_jy1X&q?PU@A<%Ye(;kFIf5V~A%v2HF!GU~ z0u-kNC8m zS;a;+v6-FhVmHYg#$>F;95OD_--7&wSxGfA~x0upr1nR>H|e zZVFMDB9x{KWvN0{s!@l!)T1#?Xi96^(3UQAr5kaI@IOoVXN9K`DHrZs8dBiy*j*u1N$R?X?GLMWi zvdQMXdPlPb!7-0}&huXIx;MP(10VXx7rr#j4}SEMKmFxzGh`2fjAk;_oaVBig$%Q# zr3|-%6|H2XHLYbs8`;<>TN!N!I~r?ud)U+d4sf6&9O)=0I?2h-a<+3^>=Kun=xUQp zc9Wak=`MGh;z192(o>%Hl9#>WZSQ#3RG;|N*S_(sU;OGf|N74$M-YUV#jNHrulX!u zQHvR28OvJ5s#dd(b**Ppo7vnL+t}7PJKM#0d)vps4sobs9P2ozI?d_MbG{2qaJeg7 z>pIuF#jS30k9*zcVUKv!GoJOFSH0$S?|I(`KJ&RReCK;V_}w4=G+k&Aq&I`v&0(nd zEnq>5Tf&l-v%D3oZVe-?Zvz|J!j?wa&h~b&tKIBwU;EkLVGehMa&teuRrr@f4Kpo1LjC`UWS$xd;qbDZlu Nm%7XZlU(Ck{{u55q+9?1 diff --git a/fixture/18/0/3.3 b/fixture/18/0/3.3 deleted file mode 100644 index 9e338b52ce761d6b617ee271e7f0249cbb965cd2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXc+=+ZftWY8spB!eV_q#JZyH%OO3l0mu*k_?gzk_?gzk_?w*&6yOaJ&%OSyT(QQgx~NN{>9*#JBhXzu*sC!WGe=RRq`LCftU*Z~~|CARfgNcm^-v z6}*9W@IF4o$M_6i;XC|@U-2jY!Id>Zw-|1~&A1(R<38MvhwvDl#Itx2ui{Od!w2{X tpWq98gYWS(e#hVV4_C#4Zgsd3x8hFRi&Hp*M{pKT<9WP{*YP&q!~d$5Hs}BV diff --git a/fixture/18/0/4.0 b/fixture/18/0/4.0 deleted file mode 100644 index 6b276724e189d22804c596a2adce981a2048cc84..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007XRd1RbBaX7NcCgY6EBb#jE%xvPE5oczFI6_vOnN6ITP3DnJ<`HLR z6NlrydWpG%V7(jM?hbdFcSIM?Ut6slM{HpZx3>)BNopGld30 zX0w>f+=d!q0Sj8(5|*@r6|H1VYZ+xj8`;>Fwz9RI>}(ff>}?+hJH(;JI>tETo$M56 zJIA>uxY#AGah-R)sN`#Zqlj&P*o9Pb3D zIo%n~bG{2)>N1zR)^#Sj)opHfpZiVnn8!Wgc`ta;>)!CD4}9n&U-;5gKlss4{`8k= zW(W&{jAk;2In8B0^BZANi&@-qmbZe{tzk{;+rWl4w}mb3U`IRI(_Y3n&_ND%l%tJx zqLYkwrn8*wLKm6f3Rk+y4Q_OkJKg1O4|>SMp7OM3yzCW|z3mZkF&M$ZCk8<}u8I7BbS3ma?>!tZWsdtZf||+r(&F+s3wbv8&zeV_*9@)L{-c M&asYjic_8De+!zazyJUM diff --git a/fixture/18/0/4.1 b/fixture/18/0/4.1 deleted file mode 100644 index 123f25dd0e38b702eb3b3bf5de25d4a0df9eadac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO4cZEsWRp!+$O>86WRr1@Y%xkeHx+#!|6JmD#Cc*{G|_{ul_@RxsN z%@YLK$WAC>gj1Lz6r~JhDMwYR5kVd55=9J+X+kWmiK7#p=|XS%(3imsVJKr5%Q&Vo zjp@u|J_}gJa#pa8^=x1}JJ`tq4swV@PH>V7T;vkT+~6knxX%Ng^MaRr;3J<%=LbK@ z5E=v-$wUY_$wfZ$Q-I=>pd=NjNF^evK~18mPXn6KoEEgD9qs8xcX|*{e+Dp|5sYL4 z6Pd(JW-*(EEMhS$S;cBLvWd;?VmEs@%n^=qiqo9oGFQ0DEpC&-Lmu&%SG?v8pZP)> zzxd4`GKU307P69?JcLq^LKLPHr71&Ys!)|$)TRy%X+#VyX+{BygNW&T*a#BypW&Qn|}Lp7M<6yyHC| z_{MkA`9~1$8rjJ~2;t-cSM2R`(LFMZ`lKl#~T{`QX<^8`UA zGn>;~=C+`PENm%DTgFOOwu-f_V_lor)MiH5)^>Kao867FulNt}xzpu6Kjm-Qi9XJYb^7Jnjk4dEN_N^SU=o^Pcy8=5t^8-Vc8Chd=#g z`g}o*O_ MPIij3o#R~p0}`L7(EtDd diff --git a/fixture/18/0/4.3 b/fixture/18/0/4.3 deleted file mode 100644 index fbcc259a98179bbdad0bdf42561f9436a0ce808e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXd{E`xNAq|0zg21y1<2I;!tl5UV>(4`wB86+7b87|2ngF%KHWY8tU zB^e|cBpDDj(|3z1NHhxim<;#>TPU-1Y2!Iib;t56MY#Lc)J zcjJCMfQNAgPvTj;h*$6i-p0H503YLXe2wq%Gk(Wk_zzb_gKkmWgj;bZ?!`%*#-lik zr|~>q#%p*J=kXpc;uCy`O@ED%JGk5{#@H*bY tJ9r--;WK=N@9-0T!=LyUhwFoG5gfxUxC8g#1RlgAcpOjRIlP2d@jru#I1c~- diff --git a/fixture/18/0/5.0 b/fixture/18/0/5.0 deleted file mode 100644 index e1c15cfabbc7478ef779cf9b06c9991d3f3f037b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007YM9NEO(8QEl&& zP395at2aAe5X^C|OI+$Q6I|;$x4O;kCc4l4CV9-`p7XpHO!m4rO!b~=KJ}T;edl{W z_}w4=GzbfV42GD^?B+1ca3d^gF^gNy@(u5-N`-0lu{y59pH^tdNH=>;!($s67@#We5x zz~{d3r62t0Cx80ObVKq7K}JK(VNP=yVSWo(+!B^F(h5cyV>PQ=&-ym7xh;&fy&ddm McYD~=cn3Jp|01QWH2?qr diff --git a/fixture/18/0/5.1 b/fixture/18/0/5.1 deleted file mode 100644 index ccbcab4c7aea25a232868a40744efe96f0f4f72f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=1&l}m007X>IWbOiIHs9A#>sJVnsH7{GjU>Sm>R}0c}$#`X5z#&(@ZnXI2_YV zoS2UHwl_Oh5X^C|i(O)(t6k$-x46|L_qf-69`%^VJ?D8Zc-JNYV%aGhbkj_xU%xX5n&1Zg#TFl~>v%D3IvYOS6wx0EEW^-E@V|!!mW_Nqo-vP!s z+!2m+yc3*gywjawg7aPAGMBr;^=@#Z+uh+#4|vc+p7fNbz2s%Dc-uSPHO0q1@wIP! z>u10C)j$6ApBeH5K}Iu~!<^0@(hBmUXt!!-@JKD+4_OiEq z9OPh!ILgtEagvjr;!I~b+l4N2u`6BWYB#yrE$(u+dpztBk9x+lp7W~LyzV{ko9t7c z`P_HD_k-X3?hn)C4T7|WnAt3bna8|_Ti7BNwTxvgXBDd&WgY7pZ4;Z?%(k{O#x8cX zn|n9c0wFuw(iu(%~GX$319X?1H@)A}~Bp)G7_D`V|o OM|;@QUdB1lLH-B!yRN$c diff --git a/fixture/18/0/5.2 b/fixture/18/0/5.2 deleted file mode 100644 index 4132388d7ad890291dfd6ab75721c9f3d9fd62ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO5cZi(KL+x46wC9+S!|Uh{^} zeBmp9_)8j@!h;|)SqLFFp%kPLg^8dPrKw0IDpQNv)S)p=Xi5}qXiI0h(3L*)C59mk zWf)@^%Q&Vmm1)E=hq)|f2}?<2HET#_6Pww|E_QQ>!yMr>XE@6hu5yjL+~Yn^dB$_z z@tzNS=LbIt@&rLT(vyws!BXiXFy=|pFG z(VIRDVlYD(#c0MbiOEc17PE!6(zQ*_X8NcH%{D*7mD|bRs zoWQNP6ZhgI9>SwIi>L8CUc!02fp_peKEkK?65rxS{E9#D53Y^{t?FiD&TwUdF3<6BqCSF5xqLh41hae#4*m7l#{yZV}vwTW|;N!2@^@kKhSBh3D`h tUcu{l8}H#me1b3V4Sv8cxQxGXRXk`_i(|M6x8W|_hf_F%$8ZkM;D273IyC?Q diff --git a/fixture/18/0/6.0 b/fixture/18/0/6.0 deleted file mode 100644 index 514c797570040bf6735adf444c7c770fe1e59f64..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO5b&tpa007V*(~QHZ!!gc@X{H(H#57aG)G#%1V)B?g#>t5j6DOt_C#RWanu&9a zlhce7?~nHp-iQK0Fp?N1Fp){jU?#JOX8{XY!AcU?z(x|;!A^FO#6b>ml2e@K5|_Ec zZSHWFCp;yYR9^Fj&wSx4Y5d_YnFvbViczgWhqBhs!^SK)TaSW zX-0F}(Vh-;qdPt5M}Gz|jNy!69OIe5G^R6ydCVuCWh`d}>sZeQwy~WZ?B@VU9Oncl zxxhs(ag$rz<`Iv1!b@I}%11u&nP2=SjSOKykdaK}Bo`qRq!3}0pd=BLrveqJK}~AW zkVZt&f|j(RBc13>PkPatfed0WF^pm~lbB2_vzSdB3t7Zs5?IA*64}INcCnj19O5uX zIL#T(a)qm0<1Y8OPcqL);SFzj$5+1boxl7eDC{Iz$wnUX5=s$@5>6zgDMMwdP?b8= Pr5@2Vp($->OFRAp$ZE5v diff --git a/fixture/18/0/6.1 b/fixture/18/0/6.1 deleted file mode 100644 index 3fe4d29f3701915d317c5374cee40017bcfe3151..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W`M#dSLcjg&!X6BK3WRr1593d-YWfNz{;m9VNI5YFeCY#J7n>aI@ zj5FfAS8rrk5R7uPaZYlw@y>F#3ti-5SGn3XCc4=z?skuRJ?s&Wdd9P!^Qzap?p^PB z-xQzu+;_hBgWvsOs%Zw{K@egVvl?n1^P0~h7B#|1%UISbR<)XStZO}+*wkjWwVg4> z+SP9Mv9J9c;!uY<)^Uz^s?(fqf^(hcQkS{hb*^`V+uZIBlicqCk9)$CUhtxqO!lU? zeCQ(|`_fmw_LHCe;xB*u$Mgk)AcGmrZVq!AX1E0`ZV5|T-U?PU${N-*+6FeXg)MDm z2Rquy?)I>!{T<*yhdaWNPH>`e&Tyvj&Ub+eUExYsxzSA~y3<|m_MnG6>?u!s#>-yu zs<*x4U7z^W6yNyPcYgJo-~H=9)67&b2r`?+T;?{9g)D3lOIg}TD_Pkp*0Q#BY-D4b z*xEL>wX)hg2x4GASCVA9j9`~H* zz2J3knCyKY_|WIR@TDL8=qFSC=`TYH1wlH~8)`PQo6r1)8DTMtTh?-xx0=?7O&w9nHUh}T^ zyzeug`@;8r@S{Kc=`S;e1wke=o6}r|TF^phvTc+nf) z^p+2OD3^x;D0nO^vga zt&O*no$X;ydpW>?4swJe9pywPIoVmxc8-f&>=IYI#w0hp#bkH8#}p5F*fdXi%F|x* zvRAzA9q;o})6&FLmO&-pHMxhq`fdN;Vu?e1`&`%N|7qaO3D z=REH#Ajxn^1{n;x!61VqgCv7486+8`%OL3n z=`u(%NHR#eL6Sj|LAnf**zSM6=^4M?xgb*B63*gHyo(R;F+Rgr_zpkfSNw^8aAjTj zW~2tkaU*WUowyed;9)$DGk69s;1#@%xA7j%;}d*=Z*UPm<9Gap|8P}2>{g4DxEZ(O zZk)nHcoa|IDLji8@haZHJ9r--;ZuBxZ}9_u!5{b=SJa2Cs&RlDa0~9hJ-8pI@fe=O t(|8^)<2Ag6bNCP!@HxK5_xK6F;S&DE(L~rSh7-66x8W|_hX?Tp{s*l5JX-(& diff --git a/fixture/18/0/7.0 b/fixture/18/0/7.0 deleted file mode 100644 index efbf0101af9a7385bca6332003c05f2154996e6e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W$%RJ(2&des8tdJGrh%@7yk$Gg3O~yGg&XG-=bHq6#o6IAd%p;p@ zvdMe(B8mh-cY7FVf1@1ca7Q@K@lJ5MGo0xH7aHSoR~Y9y*So<4x0`6Pdrk3(M?L0Q z&w1XfUh}&5yzc{_`P>)2_k$n(;ZJ{=zGx6+FrzulX)Z%8U_rwyX(=mO$;#HUwsmZ5 z6Pw!FHnug~&UUevz3t;b2RYbiM>*O_PIij3o#R|%UF;I$UFB*wx!ElyxzkP0Vk(_7v)%||}=m9Kr{XTSK>KmPTfnTrKM7PA^+9`jn*A{MobWi4k_t6AN8 z*0+JpY;FtN+1?IzwVM(4wV#m=aj3%_<5E2u!=QK&q%*zQ%x(_zncq;0Tf#8QTfvIf zu%@+aXd@fj(pI*%qn!-5hdu3Olmi^-2uB+21SdMlna*;yF)lLJI9D3)1~H}F^_w~^IkC3>)!CD4}55vFMR1MKl;hf{_?kf%vd4_GMU+2<~GEF7P7FVENvMp MTg9r@v99&}50aU<8vpaI@ zj5Fe__v%H320;&_?e72w8si8@I>Cufa)vXV4u)!~GsG#p9mv zq!+yCC2xAm+ot-+$G-BlZ~W{R(@gibf6QDg2(p;fJmxjTA{I5&2uoSoN>;XtwXI`a zo7mK5M%u=pRo@>No%R*MDX$9t7FUZixBJZ>Yr#v$SO_ OYZa?n&AQgJzW)Jukh!4% diff --git a/fixture/18/0/7.2 b/fixture/18/0/7.2 deleted file mode 100644 index f669fdd436b5e5561b85789f8bdb66d6328a3e4d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007YMh%+l2^)%(uSty=i_k-M{`b2oHi#vzpDk<}=K2BaAf4(w4EZRjg{Xb*yU> zo7&7awzZuxcCo8*_O_3M9pX^OIM#7aajMgt>pbV1;4+uH&h>6E$!%_TpZh)FQIC1t zv!3(3*Szix@B6@qzVM~5{OBh?`@^69G9)4hGMdR8<}{ZDENCH%S=IE-)$(!EtwvT-56JPtrw|?=fY5w+)f6W{j1X&CS@n-)+=81ns>eDeV_T<7rys{A5HhWKMaZnK?Xz2W_EKJW_}A8X;F(=#`bH~0a+;1B$b zE1JspL)Ewr$8a0&!o7G9592XBiD&QvUcno92k+tnKEh}C3g6);{Dw>T7l)%kw_4nQ zCt+*2>@BmKY tQJlilcpfj~b-aypIFAqUDZa$F_z}P2PyB-`V?nDLT#s9DJMPAP_#Z|1J%RuL diff --git a/fixture/18/0/8.0 b/fixture/18/0/8.0 deleted file mode 100644 index be72870e83b1dccdf814e5090742d55977538edd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007W$WSo(CWZu~!D`aJpO~x5jCgY53;>^q=n>go)Gvk~i zn{2Y)tJko25Hzx}t!!-@JKMzwBkgS;qa19sqa5uRCp*Qd&UTJ-UE)%gxz=^Yy47uN zH_mleTJ&A&Ub+eUExYsxxtNY za;Ll8?LiND*mzHQ+Dl&cib>w|mJfXBBh!5D3*Y;}kN)tdzf2z<1R2a|c5|51PzzYl zFvBfjc`I1a>ejHP^=)87TiDW8cC?e7?P)I~9q1sV9N|buInhZ@cBZqO?IIVu#MQ2G zt()EAR`=e+7Q6TIymlYQ(HQ+(+wU;D|=e(|?|{A;EXL6F%j<}$Z= zEM#GeSjy6tv67XoVl8W1$Hq3Xscmd)J0t9BH~ZMvenvaQp^kB^QayTG^H8MX-9iH5KA|@(~tfPU^pWf$pj`ci5bjf77JL&B381B)ofxj zTS#OVyGh{y2RXq>PLak1E^?h4+~ffddBjUz@tRM3<_o|1!(TFo2SFCHl83zHqbS8F zP9&u$O+_kEnHtoj77b`fBU;dsR&=Bjo#{bOdNGhe3}zIg8N+0z5XWrh5YJ+ku#`2d zWgT1D#&-6wmn05xm?NC#3}?B-WzxCDZSL@xCp_g1Z+XX8zVV%Z1QD*0jqKzgKLsdA z2}%-58Oln7V(3aNedtR+hBA!djAJ|#n8tKwFpv2xU^y#T z$p$vEi3E0%NHY6L;TXp`K`Q4-<0{v<&OPq)fakp6B_H_6Cw}sa--JX4K_)Vji`?X) MFhwXzI1xnh9}2#`761SM diff --git a/fixture/18/0/8.2 b/fixture/18/0/8.2 deleted file mode 100644 index 0e26462727411efb0e521cc30a776a2e196da945..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007V*XJ#JR>xLsMWQDA3GLJYjn{2YlCeF+_N1Pew9NADZW;WSm z^IpBi#e<-UO>JXa+ZkgQyV}RT_H(Gi9PT*BJHhGBaHg>?aG@()X`BhJcY}#;H_5&3 zbHB$t?g`I%-V0v$hBr;~z7Krv3t#%dkACu}zx-`Ti6F>eMst|cTo$mPg$y^sNXuE? z3RbhaHLPcS8`#_ywlvxfcC@=a?CAgpI>?cZa=b7^$GI+ciA#-lwQJnuX1AE^ zPIr0GLmoE8Q=aycm%U=Dx4q*dAN$1DzVWSJ{OULV`p+ON2r`?+tcIG$ycV&j#f-9) zrLAZsD_hfA*0!OIY-}rA+s00IHpX7|wvU4y;!wvp)^SdCn$w-A z8EzTNTFxp~wVHLTYdxFV%;vVUz0r2Ho89eae+M|+5sq|%6P@HtXF1!2E^@JPt}@;Y zZgi7L?l9T?9`K+iJZXv-yyzuwddpNF_|Qkb^p&suSGPAkNZK#DT LY!M?ZX_Wr~Q((TF diff --git a/fixture/18/0/8.3 b/fixture/18/0/8.3 deleted file mode 100644 index 6096c3dbf4dee3cb3e948307775a3ce545985815..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXe;Aju%fAn9;jGRR<%!63t>%OL57>(ULnE`ub4B!eV_B!eV_E*UQA z21y1<21y1<21y1<21y3#Y`^E5p7neCBSE+!oW@x^foJdnUcnnUkN5B)KE;>#7C+)w z{E2^YMN@b&sKE_5hC6UK?!yB(i8FW%PvJSdgxByE-o-_HjL-2kzQ@n_9e?Ap<{(@Z zuEP=BhC6W&j^ja`!lQT`Pvd#KjMwotF5m-vf-mq5e!wsI1OMRimM~jTjq7m~x8p9{ tixYSlkKi1h#Itx2ui{O-gZJ?fKEqe|4nN^H{Dn)nGWy@G7B}Kn{0EIxKd1lz diff --git a/fixture/18/0/9.0 b/fixture/18/0/9.0 deleted file mode 100644 index 675ac1b5cc42da4a069903828c801b62b10e4235..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007V*=NwrfD`cN>WE1C%j5FeJ#F^P-lTGH4O~yI0i8HgwCYx-s$v8*G z8JWj>^-32Hf-;u1vQ?~Vq;-t4u}zFN#@4nm&Q5l=m%Z)dV23!=F^+YdQ=R5?=Q_{% zE_1mFu63R3O>&#t-RFJ}c-#}7^nw??CufGTxca za-oY{Y@#b&zA(jizW1x&{BD~63`zz; zn3>FMPIH;tf)=u{r3|;U6|H1tYgyY!8`{XmwzQQocCe#y_OPeD9OxhiJIc|Hak5jK z>TKsY*Cj4>nX6snTDQ2>B=@-2eIE6g$35qHFL>P>-t>VFedJ4D`PvVD^pijQ=`Yij z3WD^8n8mDSGmm-AXAz59%(9j44(p3ye7nQd%qtex#*SNqu4ehzh*!yV^% zCpg_1#yj5yE;PXvCc54YZgjgl-01-iddQQW^0b${>=kc&$Gbl9sn1OIttqDZ#jpPH NuW5#c2SG-|{15)y!~g&Q diff --git a/fixture/18/0/9.1 b/fixture/18/0/9.1 deleted file mode 100644 index f8b36bc9625ef6008c69a38d8db9c0087341b4ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWO4cZSq8`{#DE_9_2ed$L8Lm5USV~JuiQ<%zZ<}jDV zEFq3~R+GR+HnEvq>}C&#Il@uSaF%miC7OO z`7B@=%UMApYgxxuwy~Xk?B@W-Il)OTaFI*g;3l_7<{^)G&I?}ho)3KFJ3shIhR`6$ zNG5WUi`*2X5QQm0NlHHNYHK<7g8q$cCw4yZ~=|pFG(VIRDW(X0CW(<)`WD=8^ z$t-5GkVPzJC98;MJsa4_4tBDOgB;>8r#Q_SE^~#e+~y8=Z zRHhNbT;>r=97|b70&7TQGh5io9`>@2qa5Qn=Qz&=lDN(d?(=|TQhCO6-tvz3eB~S8 O`9~1u8rjJ~PW}Thlf#Gr diff --git a/fixture/18/0/9.2 b/fixture/18/0/9.2 deleted file mode 100644 index 102d6f0fb657ffa5013b0c99f31a5a6ad6584b3c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmWN=2Z%@k007YM9C2n=$cl4DoS98F*<=%E#+lhgplI7j9YXJ(U4 zHsQT`ktKqlf)%Y{O>5cEMmDyUt!-mxyV%tz``FiLhd9(Rj&+<p7FX;#IGi<{j_)#HT* zaBY-uYy+R4uLvbRwVa=dUu$GOgP ziA!DPTGtutR=2s`J??d%M?GeOr#<6YFL~K3-tx9-KJu|oeC-?G`o*t)^RNF5B7z{3 znGG|Sxy@rC3tPkpOIg~ARCSMb z3tVW7D_rR+H@MMF#<|mY4|vc+CVIjolRa;W*Sv13cfIF*pZVMuzW0M4{ozl48B!_; L(i>`4vzguh5h}tF diff --git a/fixture/18/0/9.3 b/fixture/18/0/9.3 deleted file mode 100644 index e8f62e2ad230a505b55e2fe4d6a4289365a32ca1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1200 zcmYk&F(`xq0EXd{E`xL#WH3nQCmAFeB;BA(2I(?LGF-Y0k_?gzk_?gzk_?w*xTG5- z86+7b86+7b86+7b87@iL?ti}NS--b69x1L9CviU>!lQTs&)@~Tf;aFs-ouCZ6kp<7 z{D@!iC;q{u^~LLv3S5ora5HYl3EYbZ@CY8mQ+N(9;WfO4cX1vc<8yqC@9{H!$KUu5 zmjz+BN?d~j+=4rBH}1oOIE}~gG@i%HcpYbP4joWYZL7BAveyoq=4K0d-{_zK_QC;W!L@Gp)whTUTLA6pGT&j0`b diff --git a/fixture/18/1/.zarray b/fixture/18/1/.zarray deleted file mode 100644 index 7da777f00a..0000000000 --- a/fixture/18/1/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100, - 3 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "$eK+#W&*u*TMYJ{4x2~{K1j7^LxF-vS>6SKsq5u-}X5~E6} z8nKB@s2Z_}6~qc+1;@kr12;Di2m~?_L=JM2i-HuQFrkzrjBqMYks8#b77;Wgk|0GGdBjUz@tTi(;xj+_#cwi;mW8b3Auss|p(w>DO&Q8knJQGJ4t1$VW17&E z*0iB5UFb?TV(3F(Vj03v#xRy~Ol2C=iDMq~S;{h&lgL`uk;GQEk-{GKa)`qm;WTGB z%N4G2joaMeE>C#MGv4r)cYNV1-}u8{{!f;T?BpOn1t>^yN)Spp$`ejCs#AmdG$4Xz zG$)F7w5J2z=|NBW(VqbfV>lxi&jcnigPF`?0Sj5g3Rbd;^=x1x+u6ZR_OYK-j&h9S zoZ~zfxXumIxyOAT@SGRC_N1T~W z9FA-<&d3T`A@jX@p+OL&H-p*DVVL3Ow}25AH_~X!TF$Cgv%2-HZv&g#!Wi4x-VS!R zhdmwOK;siB58+vz+ZB7rVsOt}($)Zgz{i-Q!-9J?s%rd&aY-c*U#U_KtUb z;#1Rn<6GbP#jk$zkAMAVrjQ`WY!-8w+dLMsutkirq@}E2MJrj;TGqCajcsDAt!!;4 zJKM$H_OY+=4t9v69phN1IMr#+b)NHG=5kk<=sMTC&F$_m$^9Pin8!Wgc`ta;RIhu( z`#$iY&wb%b-}}Lje)oqz4M`US=?pci+01TU^BHbYiy2{Q%NT8Ct60@K*0r8ZZDw=Z z*w%J-wVU1TXMYDc)L{;Hoa3F~bZ0oz1uk@vD_!MkH@MMF?sS*CJ?J5mJ?SY=d&$eD zc+*?n_K}Z$;wxYK#!r6si@*HsA2WmoK}ItfW=?Ziz=9Ss(h^2l&hl2Ux;3n60~^}N z7+V@^2Rquyp7yf0aSk%xk&be-lbq}nXFJEaE^(>LOmMA1u95u-+o8nKC4LQ!H=iBThF ziA|^(n;11>R0$=B6$B@_=M!%BKp>EVoD`rSg(yiWN)tpSf~i4GY7t698qtzgw5B7S z2&V@Qav;G^H6~w51&p zbfGJ~=|f)zGlZdxW(;GQ%oL_Fhq=rnmZdBso;9o`kTwP6|_@QIYcriImKx% Ya*4~_;x>0kI#4-SPj{)`q13w|;pA diff --git a/fixture/18/1/1.0 b/fixture/18/1/1.0 deleted file mode 100644 index 193380356c2f65c8571e08fc741039da60bad139..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 485 zcmV$eK+z9XGd3}5gsKs0#w?*`C`xQX)d*E1RFxPtViTJfHDVK+ z*o2y~f>453F-y!6`#hXKaFhEwOCa!uw|wRcU-``+{t_G%2xKEW`N&TJic^9RLMcxe z;Z&nK^{7t+B56i*qG(5Zy3&pA#L$me;uuOi35+3;NlYf0S3L z>}4POImU5Lkji;3aGe|6B#rw#;2F<(K|1gGKnCCWK_F`&kcrIXBp10UL}7|hiqe#! zB9*92O=?k_2pZ9tmb9WZ(R8E}J?TYn1~8C83}*x*Nn#x1naVV#GmrT!U^y#T$$B=h zksa)07Y8}SVNP+HGhE^_SGdJ(?(mRDJmw{@c+E#X@tL3e;x|FT(h*D^@{*6D6r(t0 zDMu((s7g3>s7pPX(3D8p(3U7V(}k||p)WBEW(aYNVl)X%U?P*4!AxeckVPzJ6{}gp zCN{H$-Rxm6M>xtc&T@`au5yj*+~po=JmD$Nc*{G|`N}sk_{%>6**r-Oa#DbT6e5I@ zlp>4@RHQmJs7V7F5pS7C5dFFFqJvXWgbge#&S|v b$9lH0ogM7w00%k2NltNri(KMA6FQk^QfJrA diff --git a/fixture/18/1/1.1 b/fixture/18/1/1.1 deleted file mode 100644 index c9c5dcb21de11fb44fb7489d44c0e3af504e120a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S(Z1i^u^00MOq#duATlWD{p*lX=9M8E3>fM>g4H6KBRbBjb#0GLLLB z&Jjn*3R&4?6X*T$zQB7M8U*in&o{pFgEWHlK@dU~vXYIwtDMoS1QJxBfQ=J;r zrvVLVK}%W@Ne4R8gPueY%>ZH;#&AY3o(W831~ZvO91B@QJS$0H0~<+X2RqrtK@M@4 zlbqr-m$*zasodli4|vEUUhtAveB=|K`NePk5Sl?78OT9SauG&B3Q>|$l%^7usX{Gk zQ-{Vhp(zoxp)H;1LRWgzhrYxzh{23vG-H^|6s9tVxy)k;OIgNh*07e%Y+);V*vmeW zILa~3a*p$)aFuJ^<_>px%oCpSnm4@V3t#!hU;dFsri?+5nJnZc4|yp}5sFfVvXrAL z)d;69^{7uXn$v=Iv?r2obf*XX=ub357|JllGLG>~V>&aK&jR9D&I;mL$9gufjqU7U zKLELhdDwLr#Zt}lDR?({{c#gnD5#L)^7j+ diff --git a/fixture/18/1/1.2 b/fixture/18/1/1.2 deleted file mode 100644 index 8ec25f96cf34d79871c246375391fd84f995004d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MPH8BhJh`;>?UQ;>>Ka$tL3*ab`9dXJj7PWRp!c8E3@d zh;xoOLRQGiCY!uh?|sG~_~1u>_|sp8ga$!ILk%~(Im~YX3mR#0OIY3tMj35&Ygpd~ zHnfE;ZDmJ0+1Xz9HpVyyI>?cZanSvm*S+3e zddkx#d(lg#dEFb{^`7^A>NB7F)_1=5o8SFm5EcX(3^A+O3^%X&%x_VP8EIL|S>CEv zGupb=v%bx2ZVTJn!H)K@r@id&0OK6)2uC`>iB58cGo9rE7rMxmu5z`T-0T*2yT`pA z@u=jeI=`GWJ;6oqz!k51Cqo4fjFMs>T(9A)Q$uM)6(_9v`kP()!q@|3q zqLr*+O>5cEMmDyUt!-mxyV%tj`xxsW2Rp>kj&ZENV5+4{_+2cdonw{{R30 diff --git a/fixture/18/1/1.3 b/fixture/18/1/1.3 deleted file mode 100644 index 0df9adea42e86d2f28aef1539ea164b6f94c9f49..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 254 zcmVG0CH}1!2 zJdDTjB%Z~KIFC2*4&KK{_!M8_Tl|P$@H_s(f4C+RbgRP&+=@GKFHYhiJc_e;8qecp zyoR^%E-vC@e2%a2J$}NkxQxGXxH@PR!}Yicx8W|_hf_F%$8ZkM;03&b*YP&q!-x0; zU*H@3fS>Uj{=~mH8V$PD;zrzpJ8%ykz=L=MPv9v$hnMgw-oyobfJ^ucU*S9a45cEMm9FaR<<_QPIk7Jz3pS1 zgB@amqa5udCp*R2&T+1ZE-}f~u5qnf-0C*>y3b^fdd%adc+T^tdd=(J^S%#E^SSB1 z^SvMZ?hk(&LnT;{frg$=c&r3|;c6|87=YZz^P8`#hmwlu~Lb~M%= z_OzD+9b}v%9BG0RoaiKHI?LHEa^yH@@|qU;XBH|N74$G6=Go%@FgN&-{j2)MA#gtPxhWic!|GwsmZ56Pw!FHnz31 zUF>Qf``XVT4mIAAvu#AN=Si{{sLu#CTl@0iyr_ diff --git a/fixture/18/1/10.1 b/fixture/18/1/10.1 deleted file mode 100644 index 0d0c4bfdcdc41e4b8370a2a94edb316d7c0d008e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_khz$S$0MO47vNF!dCeArB&XG;VIkJg!Mw}VvjBK*WJmSo3GLJZC zWRrPh6XzV+WRsQgUcCY#fj~inEn!JZ8Dd34t!@p&t!I53*xVLI+RpZNFxu|M7;8WK z8|N_N9qTy9JI(3NaJ~y%Xrjwq;d(c?(H-t|m&qP5#p9mvq!+yCC2x4sTc-KYbf5de zmwxc0pZw`Be;ZUW5XfRybDPJ!7Pg2*En`{B8D?dx7-21I+sMW?G0Il9wv(OhVo!V7 z+kp;pup=DlC?`6}$qH4c-B;}c-1@J^`03% z_KB~3<6FP@)o=dsum8*z8VF=JhxyEJ0gGAO5|+1uAy&1T)vaS)>)F(1Hn)v!ZD&`z z8Eqf?8tV{;8s})oIMyjnb((XX=X{sC%tY6^&h>6{yF1+Hev>`wF^_xB^Iq_p*S+C= zADHG-pZVN(zW0OQ{ozkDRSE<$8)Qy%ncH9sS=dsRwv3@xGR$ym8esz)+Q>*-8f6DN z+Q}Gu*wg+FaG>!HcZB1e;6!IQ(^)QbkqNGFrK{ZNCO5mw-6omhK@WM-Q=aycmreDS Xx4mP!kIe9;uYBz%Kl{c1^E$=;pfCMU diff --git a/fixture/18/1/10.2 b/fixture/18/1/10.2 deleted file mode 100644 index ba3c613e5f54fcd89891294d949b29beb59900f6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 478 zcmV<40U`c)0S&_kh)4kd0MJhyAuF3W+Z~QBb!==Cn;K=t*s$Gsl$sK-3xS&zeh+xu6Q+3H3tsewH@)RUANkl< zzV?lu{A`-P{Ounzga<)JL(Fatb6UWH7P5pTEoB8O8et7{pNT710LPRg`da$tpET3 diff --git a/fixture/18/1/10.3 b/fixture/18/1/10.3 deleted file mode 100644 index 07335f108801f1e2cb05b20b098dea16f8e55f20..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 251 zcmVNHXZU43ch;WVm!0BpEKrAYBGY z21y1kYtc_gCv94?tkC(=C3{yD%XVDa2M{waXf;@@f4oL3wQ;u z<88c;^Y{c`;2V68pYc2X!hg6bTHX_?#SJ)uJ8%ykz(Y8RC-F3%$4ht>Z{l5?!v%bX zuW%7R;#XY4Ke(bLcvX$-a3gNTowyed;$b|7Q+Nhv@G@S*TX+v2;$wV{ukjsz!f*H! z|KiG6@T~@iaWihm-MAko@FPk))QQV?V^v$@P|9t&HhNo%R*MDY>2!d>8H=p?}V2H&nVOh&r-bkxh)jHO-o=t6L zbKBY84tBSPJssdc2RYmkjx^TsPH?KzobEj5yTGL`bGhqW?*_NK!<`=Rpocu}2~T># zi(WF-8{YJR4}IhdU;4@qe)N;y{b9O6wj@Ygpd~ zHnfE;ZDmJ0+1Xz9wvU4yVw9sC?HDII$v9^?(^)QbkqNGFrK{ZNCO5mw-R|+QM@;gR zr#<6kuXxp4-u8}z%xWIr{M$%zxwOdb;_rkQ4(9OuL| z(@ZmQVrrNg#xd=^?ft3{1i$@b#_%A>WM)ImWo`>v$ikMil%)2^SvKT^PAuOXHYQ+(wWt4W;fKl<}=Kq7PGA7EN>O7TFq$dSl7lj zv8kp^PKN8m%GCCZg8X9-QiC6x!(gG_k<_C z;6*Qa(_7y5p^tp*OJDigkACvAKm6%0(?)XI)HaFIGwzq@b?O~k#9pFHRJHnApaH5l(;Y??_z=bYyrK?=+CO4bpE_a*kK@WM@ zQ=ayWm%ZXu?|9dHKJlr~eB)c+`NglM`NzNhGjn7RWHGC`&10yAEn=9ZEn`_LS=lN^ zS=(r1Y-D3w+RD~;va?<6WpDc!?_h^G+A)rGic?K6(K*g_iA!DPTGzSWt!{I>DeiTj ZM?L0o&w1VpUiXGKP4$5f{SO06$90{n1(E;& diff --git a/fixture/18/1/11.2 b/fixture/18/1/11.2 deleted file mode 100644 index d2a570da800052ae2388014ae6e4d827922f3708..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0MPHqJhF*1Gw+NuGmkhkn{2YlI7c>dW;WSmlW~r0vWdfy zaYh`DtPn@Y%6hNf-|! lttnayHWbDPJ!hFQd-ma(kmjI@eXtz%v58Eq4r+QzoF zv#Z_gZa@1wz~PQC#&M2!f^klFhVxzELRYxbRc>^Xo89GZ<2~ph4|~efo-xtOUh$T< zO*X|xKK7-rd~KQ^{p1gS`pb|CL6F`IhML_R=C^pMUD#jpPMkAKZrF$gl5*_`Gww}mWhn58Uj87o=YNNZW!IySPg z(YCU+ZR~6pyV}RT_H(Gi9PSv$I?kz1GtRlrbH2-5?h4nt!Hw>4r@P$m0S|h@lb-UT zmrV4AH@)QpADZHGU-;7ZelX2%e)oq#r65RWh*`~MsCmt2ev4Yn;+C_#;a0Vp)vafJ zqikw3o7>LzcCfoW?CAgpI>;DD8tVinI>{N%be0QU|y# zT{1}54U!C(WRPUgCBvo5Aju$u;W8LxV7q&Ndh@Rd2P!q-X55ZLxEBxLI3CAScor|> z6}*ABaRwjZ6MT-ZaREQ#7yN<0@Gq|Gu51f5;a1#go)b4E7V#No(1GLJYjn>c4=oRLkOnR#TB z72*h4ab`9dXJoxsFRfY-Jn^Y-eCs>E`ptCz`p=+x5QLi5Z00qe`7LH~OIY3tRy52S zh8t-;qikw3o7>Lzb}-uR_OPG*9pEsBJHmL!Io@ebcZLbhH_>G-H_3IbcZ11pcZd5u z;6aaj!joR`qL)ndhBv+M10VX_7ryj^AN}MHfBMUiupr22CUcn6To$yDg)L=i%UIbe zRyD#}*0zBSZDdPZ+1gHawu>?LvbV7gas7LmlQA#~SYxr#j8K&NIO!E_In}Tb%7 diff --git a/fixture/18/1/12.1 b/fixture/18/1/12.1 deleted file mode 100644 index 5e3218a8bdcdc8f7a926848b7becdd8f2b7e2ede..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S(f3i^u^00MMVEdBiy*o9uOV<`L%{*<_PVHW_DR9&yfyb4E7VWSo&r z93hU7m2r-2vWfS{`v&iMl^}RQD(`sD2fp)zpQNiA1nJ2@c5;xDQ1TN-F^W@y@>HNA zHK<7~8q$cyMADKd+S7rK^q?ob7|0+7Gm6oSVKP&QVK#G^OB{=dCxKNYvXM<}W*58J z!(on)#A(iOmSnDQl@xAshetf-32D6IHJ|v*7k=}HzhtTw1ewV~2)W5aK?+frQk13) zm8n8i>QI+@M9`FGMAMo!bfybk=|f-oF_d8pXB^|1z%-^agIMOXfTb*BIcr$UI<~Tn z?d)Se2RO!YPH>(JT;v+pxxroTai6C=<2i45%R9dEjqm&;sO}nB$wqeal8;b|P?Tbn zr5xp{PB=BFPXij#oEAjVmUgtK8{O$ae+Dp+5sYLM6Pd(hW-^P}EMyUJtY9SxtY-ro z*}+bBagakC<|L;$%_T0A%uQ~Q!UG=ih*VyZ#s@y~iJ$!9HyOf%AS0Q`NiISNqW}dd zK}kwckxEpi7PYBEW10{_6s?G+Bc13>FM895!3<$2V;IXgVwlP_<}#01;#tB{5?Rd} ZHnWAT>|rnaNa85RILkTC^B=-i%IaK>;!FSl diff --git a/fixture/18/1/12.2 b/fixture/18/1/12.2 deleted file mode 100644 index 8bc37f0e6e14b463af7a04e8a6ae356c45aa8885..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S&_kh)4kd0MMUtMw~M;&d4U?9C6N(O*YwNlT93s%p>#2CgY53vdJc! ztdJGr$T&xwGvdtm>b;2wg107{>SxnT_m{uTR3!*98)B%r46~qx47a4EENvw#8)1|+ zjkdlGY-kHx+RBc0va`MHZ660a#G%GI#<5Ozic_8K9Ot^&B`$T1YfW^ETixbf_qpF= z9`}Ujz2HS}c+*=x@S%@P@`W!=@x34X>Nmgp*MA0)L6FsKW;c&{&1VsdTFkPRv%Ha3 zwVE;3wvLT$VpH4L)^>Kao89eae+M|+5soy@@lG(_>CQ00`7Us|D_rS%H@MLq?sS(2 zJ>+3edD=5x_KH`%?H%v>*e5>qm9Kr{M?abB4}Y3&hN?l3(M;wrr=jMzfCVjX2}@eR zidM3^HH@;Z^{j6*o7=+ncCe#8>}f9tI>^C}ap0__>NMk>>pT-&>N1y`=sMTC Z&F$`RzXv?%2~T>;i(c}w{{f6?%y2j50E7Sl diff --git a/fixture/18/1/12.3 b/fixture/18/1/12.3 deleted file mode 100644 index 12b1793ea62a762a3c6e9efef1359ca57dbcc09e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 254 zcmbkBQy_O&SOU^IU$@$R6fO+))fi+P8ND>&Cb z)ZNql`G>^($JsGQ`hT$2KM-2?f+H&QvDKZH=t6P72PSey7FRHxf55t@$@~M~z2ojb zMB*Ps*YwZ-p#ATN{SG6!lGO*lT6EnmFn@RWZv{vHLrfj#$<-aR1D&HM&hCi%52pJM>W$r9TmV0z BdJX^p diff --git a/fixture/18/1/13.0 b/fixture/18/1/13.0 deleted file mode 100644 index e8b81ff32304ce882c8f471f22123cd43a697b3b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 484 zcmVK^yH&m$i7m}fleIg`BVHSc=Q`#$xV&wcAV-}}{Xe)q5c45|k~sF}@T zE_0j5LKe1&5tg=$m91h`qpV|Ho7mK5wy~}4>}of=+t2VFedG&Y`pOS}^pijQ=`YjO2!ixx zFss?jZeH`5-=Y??xMeM8d8=E)Nb6bO1~#{a(YCjP9qnOHdpXcS4tAuYjCFz&o#YH> zI?Dwvbdf7vj)UF~CE`#IEM#yG~Y#yQ2Q a#+%?A=eoqDE;G@!u5+u~-0puKh|$#hfC0V$ diff --git a/fixture/18/1/13.1 b/fixture/18/1/13.1 deleted file mode 100644 index f2bc6add9004404822b57a91769f27cdb94a96ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 484 zcmVOlL{gF{Do~M1)T9=*X+&e1(3&>H(234;p*MZ#%MgY#jIoSkJkyxY4C0v20+zFa zc-FC=M3ULcHukWWeH`H^$2h}T&T*A%T<0G5dBAgC@RARF1~P(?jA9~_n9M9@GlxYi zW(f(bVl_!@WD_auU?=-Iz(J04f|H!*0vEZ#O>Xg!M?B^guX)2~zVMY_{N@iCs|G

OFs7V7F(ukI{qBR}pNGE#Ii{1=kFhdy47{)S%sZ3)o z^N3?9%UI4D*0PSxY$2Il>}C&#ILr}Fahfw+<_cH2!(Hz2lxIBW9q;)-D&I-tA3-(O z2qgsZAZ~(uAfoBZjub(uJ;cqc8pF&oG8Fg7HjXA~TrD aEEcekMZ~j`1QOXm65H5L3jYC91kqf{cjPMo diff --git a/fixture/18/1/13.2 b/fixture/18/1/13.2 deleted file mode 100644 index 99f41672af1dc306164495c2319dcdfd151a75e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0MKt{lX+yEk#UYV=g20TY~q|F4o5cGWF8r3WRp$i5l4t4 zWW||r&d4U4Y{Gl>QX_)ks@F{OuIWDUsn2}tJKy`w@BT2T5d;~`Xby9l%YqiNuq7>J zs1>bbm=V@6(t6gnfz54UOFP)nPDb0)7~|~k0Eao;5sr6)6P@8qXSu+ICc46vu5zQB z-0UuQyT@b?dDs+Bdddr4^pZEc=`HX3z=yu@rLX+xCqMhk-~KUkWDsOAt9i_8K0_>O zG0Rxia)w*QszzDM+BUMWO>Au&+uFshb~D!A_A%Z;4tA8I9phxDIMoE_IM>B4aj9!u z>pHi(&Fv<+&;1_psK-3*8P9szE2eteJEr-_$3F43Z+z<)zxvI;{xhf<1li1P4)a^U zf)=-gB`t3SD_YI!Mp(zX*0ZV2Y;HT-+rjSkFxtNMGtMCnb(muv=Xj?%-5Jhvz6)ID za#y(C4Q_OYJKg0051Q<8PnhC4&wIgZUiXIS-t)fCeC`Y1`@xU?@Tb4b7!?GW%xo@m zo5#WyF~m?yTgEUe8*ZdEjk19aZDdPZ+1gHawu>?LGS&eOG~N-8bd(dF)27?R+86+7b86+7b8FWe6?ti}NS-$1H?Fg0kh+pw1F5${}`5LOh^*DxG zaRT?^0X&Sy@g$zbi+B}p;5^>PMSO}c@HM{2&-fjG<3C*08FZ_~jkp=N;~w0H(|8nT z@id;t%Q%O(@Gd^U$M_6i;v4*cU+@S1!4-+1RW+`|QQU&#IEnl55FW!5cm^-v6}*nO z@g6?JC-@v+;XC|<-|!dy#o?}?TLd@YCftU*aS9LO5uCwOcn&Y&HN1&;Z~-6Tf4Z7Q EYwx0YmH+?% diff --git a/fixture/18/1/14.0 b/fixture/18/1/14.0 deleted file mode 100644 index 6ac1e1559bfb00a00dea14f2303885a461072ba8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0MPG@%p;qOGqTAzXP=RIWRp#tnN8-Ad1R9n;s{yUWD{p* z6XzV+WSk?;8R5Np`y+zjfCoL{NlzK?MK5{NTi!OoM<)8hm%j3&pG-E@AEp`93W9Wo znAL1%H?R53Z!wD-W;x4S!Rpp9-1;^!(&o0XrS0utM|;@QUPe36L5_5kqn+err#Q>m z&T)~8UE*rjxYjr~yTx7Zc8`ZW;!)3d)^lF*s@J^hJ@1?36QBCVx4tvQFMjoxzx`u| z+Ch-fOop1%To$yDg)M0*OIyjxRx!d_*0zz2ZDN$IZDVJ<*wsGvwVy*A>M+MT&hf@L z&FRKE&-pHMxhq`fdN;V$ZEkn3``qs_k9)%NUNGM4-teXmd}xBteC`Y1`@xTX^Si13 z^`AkVAjoVMvzpsH=Cz1LEoK?ZTF$Cgv$}PyXMLO6%;vVWo$c*rcYE010Y*FA5sq|% z6P@G?XFAITE_9JAUFB*wy2&_qxYJ!8^pJ-=QkFK%N>;X(;YQfNhBh+FmbS8^os2fdp7wH}gN${gqa5udCp*R2 z&T+1BE-~KKu5qmiZgz{i-0dC@dDtVK_Kasu_Oe$@@wRtN^|4QUo7&7awzZvI>}ogr*w=mzaj3%_>o~_d z&FRi?z6)IFGMBr;b*^`VTixb%_qxyh9`%^VJ?D8Zc-3oO_pbN6?^BQ zhv_1NAiW`GGrKv=ZvhKh+!B_wf)x$3hBd8aJ?q=R=C&}(_I9wN-R)tF{T<*yhdaWN zPH>`=oaroQyU4}HxzbgxcB7k2aECkHWugZ?`iZ(;zJ*q>I+}`%8!2X zv%mc9A2Zeof=p&Mm$}VjsD&+JY0FsF%2u(e5!N=+MmDyIt!!-@qwQ=Ld)eDQ#yZ#` Zj&_V=o#IrdIoEm4H{PW#^FOsj*3XT?2D|_O diff --git a/fixture/18/1/14.2 b/fixture/18/1/14.2 deleted file mode 100644 index 809288d5af8e1bb077e586ca1afd3f979ade12ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S(i4i^u^00MH+CW;WSmoRLkOGvdrR9NA=?BhJhwn`|!Asurfsdr|gP#Op zL6CurWG4qXDL_FAQIb-WrXrQ7Of70thlVtwF)e9DYoh2#CwkJ0-o!AFL5yG|qnOAf zCNqoK%waK0SV|mgSj#3hvxS}PVmAjl#9>ZxnloJH3W?m}Hg|Z$W1jGe*QD}^&wSw* zzxhL^`azJH5OR^5JQSt~MJYpB%29=?RHH8S2%`y2iJ%Q_iKH`K=t>{@(vQImA(qjM zVJuUa$~5LOkNGTPIV)Jldg9s2HWJvwUiNX6V;tuk=ea-<*SO9-?vqRk&v?#T-tnGq zd?$@`{t<*bNj9>RkNgy%I3*}aC>5wk4Qf)0a2n8%=Cq(C?dd=i-RVJ3q8UI8!x+v8 Z#xsG5%wQ(7SjZw4vx?Qk@gM7=)>l-X;;R4v diff --git a/fixture/18/1/14.3 b/fixture/18/1/14.3 deleted file mode 100644 index 7a73f26349b849f65adb7900b285d621eeba8aa1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 255 zcmVyo2+&fKTuRzQGUp1;68OT-Fn`s>C(80XO4z+=u(|5FW!5cm^-vWxS4a_#boU FN0CKDf5HF& diff --git a/fixture/18/1/15.0 b/fixture/18/1/15.0 deleted file mode 100644 index 3602e7424d883f2b9e165bc7f4044697f44c2526..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 484 zcmVW14BE znP!}vILA0S%{Uy>?~nHp-ipW|Sjj3ju#rvdWEZ=+JKEEY z?)0D^{TaY;Mlg~D#xsFwOlJo3n9l;1v78mGV?7(#&JK2RfP)<3B&RsdB`%Z1EpBs% zhdd&cm%QRNpZH7~Kl#OPGSv%$%w!=KxyeHz3KK>tN>hePRHh2Gs7)Q}(}0Gwpe3#7 zKu0>!lV0>@AcGjpC`L1eiA-WLGnvI~7P5%NtYS5ZY+^H8*v%gHa)hHC<1FVm&sDB* zon-Ej!V{kIjJLewJzx1oI)C{`5aT4-2_ckxBmrpF`RhDk-&d}P20O6YUJqv diff --git a/fixture/18/1/15.1 b/fixture/18/1/15.1 deleted file mode 100644 index 321bb7eaacc77a41188755f076474aa81e681395..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MKt`lTDm+#5p6IIA>%Nha>aII3t@lXJmyqLRQ8(;+&C9 zHrZs8O*YxY;mCNe-l}>*u-YV3+-Ryh-03b4ddS0`^0a5X>=mzi$GhJ1sn2}wJKy`k zG{5`9e+H33kiqQcFsB79Xdy!^X{h0rH^ORGw}z3{H_B!`z3%g<$2{&i&wIh^-teXmeCQ)z`pVaS z^0Qw|_m{uTR6htZo5kGbF|S1|YB9qsZ5b!c=5}|u-vb`>geN`aMK5{TTi*7LkA31( z-}u&de)XGa{_(H>%o-I0+01S}^IO2;mN3M!mNVQcR<)XSt!Jc7Y-%&x+RkXZ*wt?K zv9J9c;!uY<)^Wx=#i>qnj&n_Pu}fTPl50#h)lF`8m%H8LVUKv!GoJOFSH0$S?|I(` zKKF$${oqGG`NN;4o1sAvWHggG&1G&2S=b_mTFNjZtYAfJSkqcY*}#Uju%)f+U`J!@ YVNYX?bASUK;YdfB-~=c7A2&MNL(A6z1^@s6 diff --git a/fixture/18/1/15.2 b/fixture/18/1/15.2 deleted file mode 100644 index bf81fd3a3ac2eff81fbe71a5b46fc999f25275be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 480 zcmV<60U!Q&0S&_kh)4kd0ML(fWRp$ikxllxb4HwV#F^Q|nb~9$N5~3s#5qTtGqTAh zo6IAdY_iEF@6}7J9|UVnGR4iNy35_}@vuic>KV^^&Z}PYx_7@5Y$;0{W+f|I#oE@fu8}sjiP5$)#twG0lRfQa WZwETa!H#mYW1Q?1r}`h=DB8)A;qD|Kw@7*DKywG0mP;v#+ z`3J0X4rW&fr+>6N_c*wwGykJ<-O>3!*r%7WuhlB#?R%tlr>*)UgWR#l6+-79x$bGY z{!#4SQT87a^B-r|^w0lj{qK;g%sb9qt_8Ar2XuEZe}AYVcQn(c_xVSof9wl7JhuY? DoAZBS diff --git a/fixture/18/1/16.0 b/fixture/18/1/16.0 deleted file mode 100644 index 807886b8c3bb56dbc25e56db14645e0546d36df1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0ML(fWRr15oO5K8P28O$4o5cGWQDAdl}(&8GR}x|MmE`G zlX=9M*<_PV=8^a6O^FJEsZKM&xhA^Ar6!r|8dKct7Pq>`z3%g<$2{&i&wIf%uX){j z-uHpeec?+#_|Z@P^q0TQ7##$e3^9i}&1FFg8D_X8jj+5GtY{5uTFWS-jWNz)qgXcev979`ukWJ>_Xrz2s$Y zddu5B^07~R?Hk|v#jk!d-M{`bG$shLnAJSyHJ?Q+YB3`%Z5b=BQ8#)!CD4}9n&U;4_|e)6+l{Oupp4T%kc%!ZoF+~zUN!WJ>YQbt;X(wXI`} X4UM(AEsVFl9qecid)mwYhKJxUWz+)e diff --git a/fixture/18/1/16.1 b/fixture/18/1/16.1 deleted file mode 100644 index 993a2b6f6eef692efcdca282d62389d9f3b2bfbc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MIYa8QH|)$R?ZYd1jn5vdJcnkQL&{CeDm=M#dT0WSo&r z<`L%{*<_P(Mz;6r&1e_|Go59u3teQKD~)%98%=PBJKg0$51HsmPkGu)UiON~-ZI4p zKJ<|M+MR)^Wx-#i`D5uJc^tQkS{fHLi8DTiog%_qxv`9`%@KJ?D9oyykUN zz2jY<_|#{<^_}ni>Nmgp&!BM-q%*Tw%xWIwVYM0W_9aX-v&0fg)MDo zdpp?O9`x{)PH>{robC+gyTDkNyTUlvx!w(KbGtj-?*R{b+!LPkf)~By z4R4z4J@5O#=f3czAN=SifB4g1rjH1M42GD^?B+1P1q`#8#VuiZD_GGQ*0h$5Y;1(B zY;B|+?PO6g$=iqr7dG+t60@K*0r8Z YZDw<$Y->Ba+Rg6vv%drU4-DSnwUx{RDgXcg diff --git a/fixture/18/1/16.2 b/fixture/18/1/16.2 deleted file mode 100644 index 1d5268a0fcaef5ee8f0a23498dc57c42814e787f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0MPHqI3v!?CeDn*k#WxE$R?X?vO*joEAz-E&des8Y%`z3%g<$2{&i&zs;?uX){j-uHpe zeC`XAeQ%0i{brhf{bx`w2r`?+P{Yh^9t&HvPM|Ns@ArSQ8u=TO>J!(+uFsh zcC)Yj?C&s#JHlAUIo_#GbGmWPbH2-5?h4nr-VJVdhdVvsK@WM-Q=T@_i(c}kx4dnV zk9_PaU;D;XKl;fZ{`8k2^@AY28O&yObC}Qk7O=P_ENOWwSkXwUTf=DUS>G6&+rsvC zu%kWfX)gyl$ia?ul%t*CL?=1Jna*;73ti+&R~hd{H@Vqe?skudJ>pT%c-C`X_KH`% z<6ZCh#HT*Bn#)!CD4}55nFMR1MQ~Y46X@2*ILBk+OXNaL@HJf?NYd(uw%;J`{oaL=*H6x9(uF*EN XnK8Dto$c*rcYE010S@#(*|Ok1&65a$ diff --git a/fixture/18/1/16.3 b/fixture/18/1/16.3 deleted file mode 100644 index 89a0a15d56f9901db6d33f8492412736137062d7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 254 zcmb_719z2^^Ry}Cor{ISNnBZhO1WZHBc|0w_bvDZBSCl`Mx diff --git a/fixture/18/1/17.0 b/fixture/18/1/17.0 deleted file mode 100644 index c9d8b8760556cd67795faa74d5acee667769b747..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0MPG(jyPw;IY%5JD`aJykxiVLO*V06HrZs8 zP395jjLajOY_i^~*SBF1^fS?+4s*Ckj&r=zobC+gyTFAeyTX-jaHE^t=`MGB$ip7- zv}Zi)6|Z{DRPUJPW1slc*S_(sU;OGf|N76MQ4oZh)okW9pZN{5nBhiP)^b+0nvq6Z z#~5R6Y@7+Uw3Qv~XeWEv(_Rj6po1LgC`UWdNltc_vz_B&m$=l`u5qnf-0C*>y3hR{ z^SCEG?*%WK;tg+l&-*?w-RHjWy&wGO4}bd0kk}x|XeM)*(_9v`kcBN_NlRJ5idHhp z8rHO)^=)9hO>Jgt+t}95cCo9y?PFgDJH(-majZ#BajMgt>pbVX%;hG#&h>6^yF1+J z0S|h}lb-Ulm%Qv1Z+Y8PANt70zVwx^{p4rA_}f4JHFM)2$YQ9u&0}7RSky2}TgC`0 zTg9r@vbNDSw2`qkw}lC|v%MYcW_Np-XnzMd+!2m+yc3-03}-sag)VZjD_!MkH@Vp@ z?skuRJ>pT1dDe5D_nOyDG0nT)^Qq5F_pR@I?>E2u!yqmQG8keuvzx>G7OgplCY#J7n|Ob`Z}0|(1;G$v8O<2tNMI6^nZ<18u$Uz*C5hFnVKZCU${zNzkE0yp zIOjOe1(LZ=3U|53eV*~07rf^KANkG?ev+76Nic^A;RG=c2s7Wnq z(}>12A(ECv(Vh-;q&q$6NeulNz%Yh0f_TO;o~cY@I`f#%0uouy3f8fn4QyuzJ2}8X z4snuGoaPdjxk4&8xy3^s@t9Y<<_&3l<_o|1!(T#L1VJV;lZ)Ksp%8^BLTSoSmMT=G z8g;2heVP$QbE0WY8#>XMF7%=|eHh3f1~ZaTjAjB8Nni#unZ-gDv6z*tB8iP`Vl%tg z%^nVOgrl6{Ea$k&HIli_9q#gkr##~=?|9EwzVV%Z1mUicg{)*FFZsw%QHoKVa+Ie6 z)u};E8qknNgcCs|ZD~h)y3&pA^ras$#4?m(n5CNqVp%waC`SjsXIS;Jb^v6XFX zXCM1Hz;RA+k_%kq5-HpumHRy4Auo8zD?ajxG=B1n-(-jgf{cWagPi0dl!6qZB&8@# ZB`Q;e+SH*gO=wCpqG&}l{{gBvV#yPUdCgbkNJhDO@AuHpIjB~`9*<_rNO`Mrc zHrZs8aYi;7=ZM4cUcF%vK``7{$2s0O6P)Ta=Q_{%E_1nwu5-N`Ome$B-0uModcu>Q z@}if#>@9D5$HzV~+1I}DtzZ0VifR7#kC`HaAhRKcn#v!;n8ht&xaBNwHLF|0 z`ZlnkQ8u%=(YCX_UF~Lf``Fih4soc%9OGEWIoT;DINLeSb%{$|<{Hphcw>NDT^&iAJH%~b#R*K|Xg2SFCIn%g|)wTMM6W|(CS Zw~AG*W?k!9-v}d(vW;zx_CJ$4=CHM#1YH0C diff --git a/fixture/18/1/17.3 b/fixture/18/1/17.3 deleted file mode 100644 index df5481394eb75add04c7b5e1505a5f0530943374..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 254 zcmbE(uE@1ALq?E;`~FNx5t+S0G8Q! AO#lD@ diff --git a/fixture/18/1/18.0 b/fixture/18/1/18.0 deleted file mode 100644 index 22991c6643be51104473dd5f1a8b8425feedbfdc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MMUJ<`HLRlX+x?tjr^uj5FfQ%p;r3Bb&@4n{2YlI7b|g zjB{ijan8sl@(0PIQtpo#kv7y2!<@a&teh+xi6Q1;xabEDE310VxN#66m$v*SBZ+z=JzxvJZ{`H?h%OJ>NRsjBXHnX{HZD)JC+1(!Yv%irJbGRcM=XfVL-5Jhwz6)IF3Rk+y z4Q_OkJKg1O4|&)lp7xAqz2s%%z3DA)`@n}j@`W!=@x34X=nsGT%XF=RAiWvPZVq#r z-vWkO+!B_wf)%Y~4QpD<1~#;jEp27E9qnXiBkW~w2Rg{Xj&ih7PI9tSob4Q=UF;H< zy2iD}xW%n*bFcf{?=g>i!dTB6=T)znV4`_mD}9mF3CJ0g;%`h4WIeKSN`yqe}qH@K}Ir>lU#&SkU|tD zj8c@QB9(}s1~sWg0~*qZmb9WZ9qB}8deNIc3}y&J8O<2R63Y~(GKaa$V=2p6&RW*7 zo^5Pr2YcDaevWaR6P)J)7fIqeH@MFO9+Jv4p7W0PeBe7j_(_^3L6DYoWFafr$U|Q8 zQIuj7C!DgBBa$jqr8ae_OB79LN*mhJjxKbi8-3|Ve}*xf7{)Q42~1-;Gnmf;7P5ks z#Ib>mY+@(7i01$YImAg$ahgk9CXt)m;x>AvH-AXqGzc;fLUwYH zll&B*ASEbC80D!zMXFJq8q}jc4QNgaS`tkMI?{ul^kN``7|cjUF`7wCCYD*uW)6#4 z!ctbVhP7;AE89q54|_SnQI2tzbDZZY*GS?n_qb0APf6tsZ+XX8zVV%Z1kGF{6Pd|E YD7nc)VTw?c(v%^b2r3iFe`?3;oVk781^@s6 diff --git a/fixture/18/1/18.2 b/fixture/18/1/18.2 deleted file mode 100644 index 910a6c8216f71dbb794a54900d4d5f3ce75dfa13..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0ML&k&Wv+LHkn5@Ss@NbR_2j;WSo&roHH`ck$J?K*~FRI z#No&$n{2YlCcIa#Q)Ccywu`;&Z660Y*jPt7+A+pC*?4C;+XNT7$i*hR(p7GBlbcO( zm%BaWVUL*VY0r4c%U z<*jNpt6R_dHZam=Hn*+qY;UyP>~4(x?C(&AIoxrMcY;%$=5!OC>pYWO>N3~3)^%=m zo7>&%KKFafLCmH7qXFALIE^wjCU1758 z-QY%dxYHC5c+f+h^pvSy@S>Nz=`C;j$j3hMwQqdu7r*+=zy3355d@jdVyL;zW0-|3 zY!ORa#9IvxuAMj z6{HFQSKfYCzwc$_e)GHXTKPQw44dj=tR? z{r#choI{H%Wc(jj?&aJ>&XG+v*<_P>WFDDEoO5K8O`Mr= zMx1kG6X%RRI0~o7voOBaF1OUF>Qf``XW;4s*EU9Pb3DJHwgIcYzCC z?h02L=X&Ga>NdB#&;1@S(c_-*ycbL|+3Tiw*L&VK&1a_j)_1=5o8SFm&^8D%7-DvF zn9~9lG}IE7w3HRBXeDb{(^@vPk&SI>D_a|7dpp?O9`*O_PIijX&UTJ5 zE_R8ru6B(HZgi77-Q{i%dDtVK^0a5XP>I?&__P@g)e>OM?d-5U;g%wnZko0 zvsuh-9`jn*A{Mo@Wh`rDt60_A*0HWlY-*ToY-_k3?PO-}}Lj{_v;2 X42cYajAk;Yxy)^-g)HoU`19|H-VOec diff --git a/fixture/18/1/19.1 b/fixture/18/1/19.1 deleted file mode 100644 index 2fc2a11162b9572794465779e45f307aa296fc55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MKufO*UB}D`dqvBjeniJ3E_fGLLMs$v7j<%qE*SXJnI2 zHrd2EN1QXV$9wfcB7&fvp*FRdVYalDt?guIyV%=4_H~Fu9p)IvI?kz1bGq}KZ>(`H zbGhqW?*_NK!=1)^zyyzb!joR`qL;ksEpPkC$38L9*Cv_bM^pXbPk)&%G6>R}!R+QR zrv)r%Axm1y(pI#Rm91$lYumtvHZt4@BW-It+uP0V_OQPL9B7Oq9O(onI?0*Na<&Uy zKl9`}04!yfUpXFTf_uX@e9-t)fCeC`XAed{|v`^B&R@vr~P)FKEn zo5kGbF|S1|YB9@L)^b*{s@1GxT|;ba6Pp@kv@PvmM?2ZmUiNm7gB{{1M?1#JPI0Pp zoa;Q7xYRh;xYl)Ub(`DW=YHco>M@Ud&huXIx;MP(10VXxm%cL5_kJ+NZ+`cOX`+H4 zt?A5aHnW@0{1&jdB`j%qD_GI$*083b*0+ISHaFbXwy~{U>}ogr+Ry$DbGR{%bG#Fr z?hI!d>jD?L+!d~LgB#uCPItN61P^-1lb-Ulm%Qv1Z+pkPKJlr~O!AG%ruxax{_?kf Y%n%&}8O>x)bD7&h7Pg500gPwy52hvqX8-^I diff --git a/fixture/18/1/19.2 b/fixture/18/1/19.2 deleted file mode 100644 index 98c11f8dccf224a536ac87738052efa52ec882d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S(i4i^u^00MH-VWRp!+$O>5*XJnjn*<=&v92sY1lW|5inMcMsvdJd% zh;xoOGtN1($tL^#@jk+95*h?e384*b38fRA=|XS%(3c2?5XmS;6U`(hGlkjAA%?{) zVJT}^%R08Ol{j{@hrJx(D91R8Kl~+2yCBF) zHu92>{1l@&B`8k?DpH*q)TBNQXh?Hf(2_9P6HYg}(}VsDU?5QpV>n|O$9SeOof*ty zJ_}gR3RV)!1~wAUc6P9z103W8Cpkp|7r8_-*SWzx?(=|jp7M;hyyHFJ_|6Z4upr1l zMskpoToj}bg(*d8%20{QRG}8NsY4?g(}Y&ErVSnFNGE#Ii{1=kFcFMkB%_$XL?$tl zSKg>QSE%n$et4+7U(76;TZ{KYn5+}p|Sa8na;0iwdho*Zv%Re&O9ee&m>i*;OKYjZ@+OO-H z{=O}%P`U4s*`3bnj{<&=4Q(3JKQP4|d|e?e{z1|1==2|A_K(8*@oxJ5AAr?C%b8 x&N;NWLdO4b<({6;72NM1D%l+r|G{SeK>SZ*eZkR$3#+8AKU9lhT45d#2mmqoe8K<# diff --git a/fixture/18/1/2.0 b/fixture/18/1/2.0 deleted file mode 100644 index c6ea7eadb62fa1dc6067487d1ea34a475f945d79..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MKuqc}AQW=Ny?wHrZsHBlE~OBb#ip$tKPjaX8}4Y_dXD z$cl4DoS98F+2p-?DIq~{-Bi=O=Y5~~+!wz0gCG6jPk)&{OAur*qdClJs9_ecpy8G< z!g7|kg4L{U4eQ&$hPJS!(YCjP9qn!po})6&FRi_z6)IHGMAg= zTGzS7t!{I#``qtQk9pj)p7Xp{y=IDcyla|IeCjjb`p)-$^SeI`vIRjp)0@rg<}jc6 z4YQcV4Y!PCEoT+0TFtuFv%bx2ZVTJm&h~b-o89eeKl?k(;f`>;6P)M_XFAJ;E;8N~ zt~Am0ZZO&H?r^6EJm?`$c+yi|@S>MY^@caS?*kwD!k51Cqo4fjFMs>TjM;-AlbH=Q zm$@xyAqyK}Nh7UbMWd`?O>5cEMm9FuR>s)TPR1H%PkTAgK@N75qaEX9r#RI)&UKy% YE^(==U1O4)-0T*2yT`r$2abxJ+hs`r!T=U1w>TA>dpS23&F}v3pFvm9?^w}$nsZv&%jZVO{}V%@*wbDPbdZA`=_p4#$;nP}wsV~85|_Hn zHLf+mEp9c@J?=HhBOdjbXFTgUuXxpK-u0gMed;q)O!JLz{o+@@neJcz8ImmsLJc#w zdCY4Oi(1TZ%Nk)7t6I%E*0r8ZZDy2hY-^0McCo8{>}x;c9O^K~I?nMsZ%%Hno}2#@N<&#@f|x_O+k=9p-RHINk}y8SivwINt>>G{F@ny3X}(aGTpr zG0na1^Qgx>?pe=y-m6~oy7#>A1E2Za7rys{AN}D^f0;2P2r`-39Og8a`7K~UOIXrU zMq0s&Mp?s}Hn5?MY+*}V*};x>vWGqGt4(&3 zn@x46yFB1Q4|&3qp7Mehz2psVddr7CGToQH^0lA*>=%Fg$G>I?4T7v@Gq-sRvyg=? zVrk1*)=E~kinXk59UI%krna_?F?P0#vG%r)eI4RZhdI`9j(4iljCZc{obNK1o8TJP zy3Q?bb(_20W15FO;!#g~#Y*tH=XKJd;LoQ diff --git a/fixture/18/1/2.3 b/fixture/18/1/2.3 deleted file mode 100644 index d5a03f4f177d4d273d57fa46070721901e392cdc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 254 zcmV z;a)t5NANhF#Itx2ui{Od!+ZD;pWq98jqmUie#0gFi^GwiTODq~aomA>a6eAs6i(x5 zJdc<0I^M=RxPXuF8NS3f_#Qvwcl?F_aHJ;aR*#!;EAGSzJb;Js7|!4synt8m2F~JL ze1MPfIljWT_z}P2PyB-`qd}`$+=yFnJMPAPcnFW;2|R`8@Dg6bTR4yRaS@;5f5?C| E!g0NUY5)KL diff --git a/fixture/18/1/3.0 b/fixture/18/1/3.0 deleted file mode 100644 index 799cd0896fe5737b10f52c0fe133173dc4efe2ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MKv5ac9qL;>pz3AAP6yw zSy35@j^pHuO^pvN)=p|FU;Z1M(&__P@rLTPLCqMhe-~REh znX(5#W<$(rE^}MR!WOZ#Wem5nRgAEfwXI`go7mJgwl&HwcD0**>}!mJ9pX?&JI1k2 zajMgt>pbVX%;mBOdjbXFX@KSG;Pfcf9L8(|qbP-}=t?e)GFO z3~~fPI@6ogP{Yh?KJ#15;+C+S<*i_()vRtk>)XKQwy>q`?O;cv?O{**JHS}u9PS9m YJHd(0aHg|d=pq-J;7Swy4^XkAPMK2$?f?J) diff --git a/fixture/18/1/3.1 b/fixture/18/1/3.1 deleted file mode 100644 index 49a15041669cc53c792ea717325b2f5fb491de66..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S(f3i^u^00MH+2#5qSc*<=%E#yKP7jBK*WJmQ=YXJ#I8W}KN7vO*l$ zWRrQsnQ_j@CYyMFyl?Q1h6cegj&qLlT;Mu4xJfGac|aP^N#`B!`M`I6@RJNVf*>Oy zgpz|W@{ykc6sH6wsX#?45kXCA5lKU$XhBP&=|D$1(UV^EW)OoJLM)?*Bc6#&VkWbg zO(F|fL=r1m#YQ%i!pTK$3Q?FMl%@=2sX|q%QHQ$JqcKfrN^9ECmM(Oq8-3|Ve_|NMaK!cjqBXy9;rO#8EL%XE${fsH@@?a zAg60&BRiqwAustTN->I4j`CEXIyH!(J`ISZ8O>=yJKEEM?)0E10~p94Mlh0C#xsF< zrZaRI zZ$iR@AQPDhBPZb$pdf`PNhwNGiON)=7PYBE6pd(1G_7b&Cpy!G-t?g_Ll{a7ag1gR ZlbFmDW;2Hb7O|KmtYS55_zwwjqSl;q*!2Ja diff --git a/fixture/18/1/3.2 b/fixture/18/1/3.2 deleted file mode 100644 index 451bea8913cc817c2b955be6c12b80bbf62222d3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MMU#)|nY+WSk?LI5V3#=g2rm=8;V{*<_P>#5p65kQL&{ zCYx+BkBl?2$>zO!N3#XNF^_xB^Iq_}H@xWsANt4_zBJ7be)N++{pD{nWDkOjW-`>A z=CYuL46~%A47Y+6tz@J%tz|%Ro})6&FRi_ zz6(rnxhq`jI@i0!t!{IVd)?<@k9gEGp7oqpz2rmU1$)b zH-p*DVW{~nU_pyp!jhJ=ycMi&4I`~@0~^}HmPXmm_I9wV-Ry2(``O=N4tIp(o!~@g zIMZ1!bdif)=_(W5;6{_(?hbdl-vg$2+!LPkf)~ByO>cSIM?N;yG++7JPk#1`zy0H1 zGvy3|%!ZiD+~zUN!WJ>y(neUx%2u(KwXI`go7mK7TN`7nos6@my^MFDgB7fN+U)NvyfYbjL}KyLcwSAyAr^?QX&ZpztKe z=@8+(2{+!=?|V6O-$UE}mQ%Zrx&CRn|3UoU(TA@-rLGV=Uda9KfUQL*`v(KLkuH4ge{iE5v!~a)JGkXC51si|9 diff --git a/fixture/18/1/4.0 b/fixture/18/1/4.0 deleted file mode 100644 index 60a08aa5f5add3e1bf0a9f6767319cea6023ebd6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0MMU#WSl#3II_tm=b7^$GIlB*d?xVwQJnuX1BQ8J?{0eM?C5o&w9>euX@e9-t)dGKJ&S6edl|> z`Q0C;`_CXx5QLb`?1mX`UL!4R5ldUfvR1LG)vRM(>ltlRo7vWOwzr$z?O{LrJHX+N zaHQiL?*ykg-5Jhvz6)IHGMBs7btbyiZEkm;`%Utg$35YBFL=@G-teXmeCQ)z_|jBA z_|Z@P^p|O72n&LYW-^C4&1F9G8(~q4S=@4#w}REJVNL7Xz=k%rg)Qx1M?2ZmUdA}k zK@N75qm6Z=l!}?H%v>#HXhC z+Bd%Si(mcbAOD(emhd3RYKWocG0cJ%GSZTkvb2?~Y!#!dZ5=WFFaMlTB903R&4?lW~r0 zGR_g_jBK)r!;$&^c;Db{&K(3>*vcOEvX7%2BY`uVC9t33s}Z-RY zBcDj;2S3RW8Uz{1L4 z1~8lvjAQ~6nZ!(HF`I=fVlgXO#cDRPiOuX{H+wkD5sq?-)12WlSGdY8Zj-`89`Tr0 zyygv``9d1M_{|?OhXp|vvXYxTgi??~6s8oVDMMwdP?cKLrVb5hL<}uyMJyfYNGE#I zi{1=m5Q7=TXvQ#^DNJP!bD75yma>dBtYsZr*~WJEv7ZAZaGXTWah?k#ah+sRxywDC z@{H%a<2@hv#&^>BM-c8B*~vi&;p8PBMJYyc%2A#QL{ObbqNqnSO=wCp;%Gx#y3mzw Z^rav13}qO@8OL}gFr699g4H6Gw<6WMz|ajyN;U zj5Fh$kxe$)g!k%g%@qXO-0nX2n_!YhJ!Z0JJ!gtny=JO+P4kIQedb%=`QC4S_lH65 zAV_C=L(FD&L(OlP#Vl?K%Ui*4t6RgGHn5?MY-uZ7+tE&THqu`9Hrj#4INTAAbb=F| zSvAoz0E!%2A)QZ6i&R*?ijy^{*fj&0H@7gP_c zf(78ho;Tms?|XT3UlsSnfXenCBJq!+|8&HEWdCei>L19-!JFb%*Ew5O06ooF^!ASa%2W_XjL;N0cjM{2yBGX$k*m-`r)v4*(}WckciI diff --git a/fixture/18/1/5.0 b/fixture/18/1/5.0 deleted file mode 100644 index 46aa63eb65dac5f476428eb54e282ade6702a0a7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0MPFo*~Hx$*<_p}&KcQc9@)g18E0gLtdNyWHgRS)ab}#E zO&pFmXJnI2<`LejH#=Vt%yF(uTK_PRGr^`2=y z^_kCo=X*c+-5>rm2n&J?hM3Ll<}l1~BP?n$i(Ag}MjCBZW2|Fc>)F(1Hn*+qY;T<1 z>~25%8}BfOJHl~}cY@QL?hNO4E(pGk~ zlb!8pFMB)CK@N7Lqa5u-Cpp=f&T_VkTNS(S z<6TpI>=U2*#<#xntKas7LmlQA$2!g_PIa1do#%X)x!e`5bG;kf?hbdl-vb`>xF2RP9GBBiZ0p^p5@ diff --git a/fixture/18/1/5.1 b/fixture/18/1/5.1 deleted file mode 100644 index 9f92df6dbc90d37c57fc276d781e0975bd4a4dbc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S&_ij7R|h0MO4lF-~(hrkOm($#HU;aZXG#abjwi8pbhsOq`fz;>0x5 zOf$_m9Mepkn2z_hH#=7l%yF)ZU1FlEUE^A}xYZ>0xYvCi^_a&!=Xo!9-5cIC*#|!K zxi5U_2S1wX4}bd0klaC#&QQb5YBs~oXMT%X%;J`_ycLYHn$?ZAp7m{Jb6Xf=dt>cp zcYE010meDp5sq}c6P#$g)16_0^IhOFm%GCCZg8X9-Qi9Tc+f+h^pvN)KmPTf8S(@{Ml+eioaQpZf)=u*r7Ufvm8@({YgyZdHnOp;Y;7Am z+R4uLvbTL4{Fll+;_hBgWvq_57Xogg0zO1*(`>c$GnDH*di9SjAbon6{{L$9qSrx6Pw!1 zwzf0IE_St>eeGv|hdRvRj&+>lo$55>o$EXkOmwNsT;!( z$(!Etwhw(|iZ6ZTYg7H?XMg$IKZb?}L3%To&FtndzXgo2xFsxU1uGh9b!%AD`Zlnk ZEo^BkW9?u^d)U)n#yQYI{s;EEuDkdT1UUcz diff --git a/fixture/18/1/5.2 b/fixture/18/1/5.2 deleted file mode 100644 index 51c59ea489370de53083140f8a98337ad087e153..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S(i4i^u^00MH-V#JM|r#^J~&n{486WRr15=8+Y$LL6~s#yK+1k#R;g z*<_PV#u?dUlX1L1-bZ-xVL`Bfg{)vD39M%WNo->~DePlE2RP0NPI7^ZT;e9TxXmLT zlgcYz^M=oS;VXamOB$KNgCH|m2q8D26r>P^iJ%mvsYoR%Q;XWvp)pNpN)&BqOJ};! zl|J+(h9L}P7-JaAIHoX_X~Z#yxh!T0OG#ukYe;4jo7u@Oc5{fs9N{!)ILj5Ta*ey( z<33M$#&h2Bo)3KI2R{k&1VK8|la1`;Ae=npr6|QHP9$Y1M^&m(oqE)#0nKSaOQLB{ z2fEXPp7dt`1Bqn>BN@*GCNiBF%w!((iDwzhS;1P?v7Rk#WgC0g%RY{BjN_c+JQujm z4Q}#)hdkm1FL}jBKJl4f{N@iC@&-XhGLe&9gpi*C6r=YgB;={r#Q_e zE^~$3+~F>%JmD#Cc*{G!@{RAL@sA*%lVl|up@b1mVTw?c(v%^R%2c5$b*M`{n$nEs Zw51)3;8OCs8`48-9u0u{K+L!D$Oc~RV7PK z7I*@MAn>J;hf&Adf6r&n`}jJxyG`r%q1`(=`afFkJNEsD5Ak;CraZwr9nl5s?+!9o2=qVTv}t|* zK`HLw>k8@j51nh8^gr;{9njy=v%fIZCL^>vc?akAhkAF~gDskm7x0~bP#-tb$`k-` CnR&zj diff --git a/fixture/18/1/6.0 b/fixture/18/1/6.0 deleted file mode 100644 index b4320554c5502dd76bc5ebb0266067848febb0fe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 484 zcmVMiSY+!&qXO!c^jz!(0}#gr%%z4QtuV7Phj7z3k%% zM>)n>&T*b=T;~S&dB8(bc+Lyn@tzNS=LbIt3I#zr(vyws{;xI=z%^A*eg{xfSF88=kGS5ii4R3kJSHAI`zx*R8>?B#qMjrAKN)d_@P9&u% aLuIN^l{(a=9?>+RDQ##=JN^U6YO|;MR^Lnj diff --git a/fixture/18/1/6.1 b/fixture/18/1/6.1 deleted file mode 100644 index 18b6b3706022c07231a0c255033ea467e8fe16b7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MLI%#u=G+<{5Ej=8<`1lW|5IAuD8M6KBTZ$R?XOGxNwM zo6IAdI5V4!Gvd5gZ)8{yjB>PbPI9vG&T_U3UF2d{x!N@*y4fx6c8_~K>=BQ8#m$}?^u6Kjm-0lvO-0uO8d%}}m z@S>MY_NKRd=p!Hd(pSFrlb`+KFMs>T^aX+-gBi_k4s#l2xCJb32}@eu3RX1A8rC%0 z1~#;XEp257JKD+a_OPe@9pFHRJHnApaH4U}aHjFjcYzCC;YwGz(M=}0(_QZNpocu{ zDNlRG%UWMiAy+BUYevt5j}m%Z)dAO}0d(T;JflbzyJXFJCP7rVrzu5qpF+~QWZxz~Lr zdDLSb_nhaw;B{}9?0p~j(C5DJr62t0CsY0DFGC6iK|0eLYBsZ*&-{iNVKIwa)^e7& Yn$?Z6p7o8knayosjP32!p-5S=kfeno| z#umof&h~aN!ESc9ul?-rP=`6(agKL_)1Bc==exj#u5hKR+~7txxx=0AGSvef^q9vz z;dw83(Hq|MmJfa8V_*5&H-7euU;X1>{~1y^2(p^ZJmxi@;TEx|k(RcMm8@(PYg@;< zHnxdPjkA@ljklAX?O{)QIlzGqa)cut`0O8r|LLqRNR9oOwuJ3? zp>p2QxjV%9A0*oJ8CS6BKM>v1xBa8sxyQmajrj$F_K(Z|wEiy$wRscPJ$Z-Z_Q!d1 z4mnqd_&?CJ=@$Q}b?<<6g}nU3;F`|-kIZ!k<$p+rUuj>vs<1)2fcxD6+a2=DKT6Cy za=AiI|FP+w*5V%m;vX#c9sB-+bN@r3e@8F6+|X?k-od&3q28T#_KzI@*w_5Ai3R}O CjCZ5} diff --git a/fixture/18/1/7.0 b/fixture/18/1/7.0 deleted file mode 100644 index 98795e318cc7a3c133012d15e8c857ee2ce4e691..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S&_kh)4kd0MKvCJmPH5%qE+xkQL&HGvl0*d1RAK#yK+1kxiU)#5p6I z%p;r3Bb#ip$$Rx8iUdJ-dl+edqa5aNM>x*$PH?(2oaq7=8slBb!==Co7&nowl&<&cCnYe?c+cPIoN1NIoe51c8ar|<6L81>=NT$Xzvl?O^^IF&<7PX9J zEoW7$S>1Zpw}H)UZVTJl-VS!Pn-TW4pOFr6sKXrNSjRclX-;>Z^IhOlm$}?Eu63PT z-DZNj-D9$cJnRuqd&aX~_KH`%<6ZCh#HT*P3YHK@X$t?*IoH;|NDO!HG_ChBKYz0v8%@qRUNkt?OLxHn+RO z{T?vIx8vY6F8<~7737B$oeOIg}V zR|hdk^NPkY9*Uh%5eyz4#h`_yMX_nq(k;5Wbf!+!>0L6E`h<}j!EEnq>z47a#t zEoXVFS=}1ew}B09ZVOx5&h~b&o867Ful?-rFozrCILAA|X-;>B^PKMjm%7YESG&fw zZgH#I-0ME~d(>kd_nhaw;B{|!(+55@)fc|>l^^}&XMg(3bTftrK_)Ys%iQL%kcBN` z2}>Gb1uI&~n%1(mjcjZaTiM!3JKD+4M%&X~4s?)%9qA}XJITq$I?LI{8Sf$!OmcZMf$R^Gi zab`BzWRp#tGqT>R7h5CM2iq$;)2xws*Yi6QBCbx4!eeX?`=^zy31_4}ws$n$5iCGt6)!j5Ny9ma(!` ztZKA%tZNgS+RQe#wVg3`v8!?RwvU4y;!wvp)^Sd8s?(h7Jm;I>GMBr~^=>f9ZEkm; z`#s=Mk9pj)p7XrdyzUL}`@o03@TIT(=qEq>!=L^#Bq9hhn#mmIG?xV|Xd#PP+!B_v zycMi&4QpE81~#<0Eo^CfJJ``~b~o0(_Orjk9PS9mJHdFTJHweSaG{G_;YwGz(M@i4 zhdbTnK@XYi2~V2p1uuHZo8I!ak9_PCU;D^C}asjCbG?KU~90~jF diff --git a/fixture/18/1/7.3 b/fixture/18/1/7.3 deleted file mode 100644 index b6ad8df4d4df74f48a76cf0128219b08487af1c2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 254 zcmb^H@M-a0Q?KL(@I2n=9nSKXUFnVE%(~{{#L%z1p)*eRsaP zgYS5O^t(fQD+Kr-8`^ZHe_)C`C|$w+{t=Viq4XbI_aCbNY4iRe-oDszd(@qV$2*w6 zKd7^5asOc9_c(G-NBBpza}R858u~w4?mPDVhvfdp`hVJ2&uY#SlsUR~hp_qw2ERvv za}Hgukkfx$x~I3;qM!ex=Ds7_e+ce>Bn z)^*0Z)opG!&VBCpn8!WgInR5+1h1QDvUj~_icd}TwQqdu7r*+=zy31_3xX_WHN-sT zHJ?Q+YB9@L)^b*{s@1GxUF+G@W;VB-?d@PUyW7Kl_IH3o9p-SyIo=7zINce}cYzCC z;YwGz!HsTmr@P$kK@WM@cu#rSOJ4ShN#6994}9n&(|qm=-}}Lj{_v;2OdlQu8O&&Q zbC}am3s}%F!!2QXD_GI$*084aZD2!N*wR*Zw3D6fX)hxk=pdsU;Yde0(Me8rrn8*w zA{V>F)vj@^o8978_qf+Mk9gE$p7E^byy`U*yzL#6ee4rceCaD+`^nFK@wb2cYo-!G zkl8HeGPijwWMPX~%F>pxl9jDuEo)oH#x}93ZER~hBkXE7``FihMmxlzj&ZExoa!`V zoa;R2yUgXTFxK^MaJxI)>3$D*&=a0C-V0vzl8N3h$$Q@SfvG+-&9}buz2E%q4}+3H Ykk0gmn9c0wGryr0v$$dY2NWQ^jwWaXi~s-t diff --git a/fixture/18/1/8.1 b/fixture/18/1/8.1 deleted file mode 100644 index 2d944ae855570d6415be4251f5ffbe700898c55d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrK%0S(f3i^u^00MH+2WRp$YA}hoZvNF!dCgU90WRp#t8E0mbO~yImoFkiz zbHth1WRp!c@&0(<;57;jg2qJCiq^EDGhK+G7rp7jV1_W1F^pv#aZF_z@yul&OIgNp z*0G)qY-a}vB(aZVj&PJ?oaG#;q;rL<+~F?wc*--L^N#m?;5$F~Nrtc>$Vdn|$Vn~= zQi#HY5=J=XC{G2Vs77__QjhvHr5Vj>M|(ODOEh z&tjIalr^kn9b4JPcJ{EBBo1+yBb??8XSu{>(z(TL?(mo=Jmn2@fN)k#L%2JLhR3(bq)S)iXG@&VNXiGa{=t?Yo=u1C_GK}GjV>}a>#&l*d zkNGTMIV)Jn1~#&Z1a^{0GW$v47{@t5D(6Y#D%ZHqJ?`^>=e*!0ANa^8e)5aoghU2G XCNh(Y+~lD!MJP%*5k&GI3ckG-d;Z(F diff --git a/fixture/18/1/8.2 b/fixture/18/1/8.2 deleted file mode 100644 index dcb94b444d339df1f7cda26d860bb8f7d58b6b13..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 480 zcmV<60U!Q&0S&_kh)4kd0MH+2W**t=h9fIvg{*8ck2o`%Y_iEF&dfMRoEhgF*<>Es zWFB#5HrZtJUcJV}gP@5`ZDU*88Dkf_+Q+{3bEv}{?l{Lg!RgL$rm-$?p(|WzoC&UX zgNbf8$-VA#zsEf83D0@n3tsnzH%;@t4}9(mU;4q1e)6Zk{B1~yAjn`wbC}ay7O{2*xVMjG};b!w7Wg*=>P{h$dQh6w3D3d6lXifxh{5zOO1E6 zYuw~!x0viscX`l59yY~Op7xTLy<)1jz2hSv`^4A2@vUF{>No%T&mb%aGMmM$hMLE` z7O|+sjIxxat!O1HThm(BwxNw|Y%5#a#!hxN#$NWekAofJP{%mdaZYub)1BvhV_oKQ zSGd-7Cb-pYCc4`_?)8XAJ?0tDdd{m}^SXDvXPQra=5ycq-Vc8Fhd)gh9t7zPF`L=V zVLtO)z!HWTZW+s3&MH>5nsuyeJ)7Fh=C-rF(RQ_)-R)<82RPgjj&y<(o#aesIopLU za$ Wv$@P|sD&(S5hE>Wl>Y%!V7{EFe)^LD diff --git a/fixture/18/1/8.3 b/fixture/18/1/8.3 deleted file mode 100644 index bb7a3f449833e0b67166d515d82315295740cf4d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 254 zcmb6t=|=(@v#ciOBA`THI_ z&N;NWLdO4b<({7FAITew1^|AEZB sBbzG(*FW;z)BOFTpxxo{AAI%?&HuD|?=bpxf32ALqp&~hy&?810K18LEdT%j diff --git a/fixture/18/1/9.0 b/fixture/18/1/9.0 deleted file mode 100644 index cc23082c6169b7e6fc702ba2c59a250e962ddea8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiE$0S&_kh)4kd0MH-j99bbNWS?nb~BMP3DnJ#yPTyGqcGi zn{2YlI7h}Ana6weN*51;GM2TnRjg{Hb&RsHO^i0i*0wRuPIk7Jz3t;*hd9(Rj&+<< zo#u4sI?wqobGZqwb)D-?a+}-T=Y9`(+!LPkf)~ByO>cSIM?Ut6uYF^(pZsj9zx?eV zLrMfe21Ctec5|4|{1&j7#Vuik<*i_KYgp5I*0+JpY;FrqAOkHMmM?Ho$hkChdk^NPkY9*Uh%5eyz4#h`^@LQFvWMi_p9Ih zZkqoLN(MognapfXbD7(M7P7FV47aoutz>0uS=&e(+Q`PXw3RV-u%mJIu&2Eo=pY9> z%F&K-vQwPuZ09)FB`$TDt6k$-x46|L_qf-69`%^VJ?D8Zc-*``Fih z4t1Er9p`u_INce>JKqH^G{F@ny50?Lbh|s;=>ZRV$djJ(w3oc>6>odTyFT%$&rJ5M YDW>|xum16`X@-UeK}N&;5B}T40Do=-NB{r; diff --git a/fixture/18/1/9.1 b/fixture/18/1/9.1 deleted file mode 100644 index 7f809c5371d2444c66fef2713c34a7af250bf3df..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8#0S(f3i^u^00MH+@$qI3VIG0T}*<>EsWD{q`IY;J^d1Ras=N#E&lT9`m zXJj66&WJPP%*^-4`v$K<=^&^`C2CTO+BBjuO=wLU+R~XWbfph{=|==Z8Ac>yiDEKS zn96MCFqg$FA&z)flfXtcv6)@$W)Fus!cop}mUCR?8cE#c9`{M%DXFCKhPR~ig|GbK zFaO9|CJ3^ToqXh{03j5kIH8m!j4D*68g;2heVWmn7KGED4s@pnJsH421~Gz>j3S!x zOdy8o%pjKeEMOVSSwSLeS;tnkv7LSF=K#k!!AUM~kxSg*CbvlDA&+>@3tsY`4}9c1 zKln+8&>+Z2CUTOC+!Uk`g(*QvN>PsTRG>OFs7V7F(ukI{qBR}qL}z-@n?4L?2oa2C z43SJ^5|f$9EM~KiMJ#3|tB7Yk8`#JWcCw3u9O5viIL#R@bA_wi<_>px%o9?0#cR^| z#Anj^#c%$Qxoi+*AuD;vOFoKFln_c&hEOU|nJUz#4s~flQ<~A1c7)TFZgi&~{TaY8 zhBJaF#u3d_rV+zj<`GLAOIbz&Ye-}>TiD7T_Og$o9OF3WIL`%=xXumk^MGVhdB$_z Z@{ad> diff --git a/fixture/18/1/9.2 b/fixture/18/1/9.2 deleted file mode 100644 index fdb7b2e4f61062bc71e1f89139b3e787624024fb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 480 zcmV<60U!Q&0S&_kh)4kd0MPFoab{M?igQMsnN2p?WD{q`nb~BMd1Mo3W}G9NIOoVX zN9GY{W|K`e;k|m1C4!)W6|G@SYuV67Hnx?mZDVJ<*wrZe*w<)>IMgwYb(~Y3=5*&d z-vusnxhsryy&K%_4&&VCeh--7aT7i3Ig`EORj--m9q;LzcCfoW>}h`oIMCsaaHJEQ=p<)4 z%h|@b$i=R5wQJnuX15scE_Zv#!yYlolb$lg3tlwU8{YK34}9ngU;4_Ae)6-w{Oun@ zO9nv(Gn(BT<}|+rENF2{7;ZVs8)-GGTf_P`u%Rt%X)8P0$J!XQZJ>yv~dD$!8^0sL{^07~R?Hk|v#jk$z zum21pf*_Nb4KtUy&0`@8Tf_)US=x$Lva+?TZ5)2_k$n( W;ZJ`VQYr}28){awnce>pD#8)6-1>(A diff --git a/fixture/18/1/9.3 b/fixture/18/1/9.3 deleted file mode 100644 index a8e906dd94ad7726ed891bc3c8feb4a4d30d6cb3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 253 zcmbP8UK)8A#q8pl1c%d8b}W$wN+Ll$ z&ztY+_q~kVzui<$bDgkyp}pLp#T88FAF%FeGXG$9?{NDMk@!c^e>&nns{cE<^y|^r z6S-n~Ul$nX9p}8$&HjNy?r?AgpZ-JBJ+0-1a`KOhYkJ>*G_E^-{|Enc*2L{zG0n_( znyd@V-yLqY=so_?WZv=96|(7teDV*3YZ~)EFxMTF-_hVLbtvrV4zBBu)b6zHF5vfj tm^kO)Y$}lu^j8qGbtm(Z{%D})_|Nnpe{qKKq5qH1W9$?(v@Qz*4kY`2@i-E8kQ|5w% z88cXpT!f96TO6I7 zd8WvCHFX_2b(v$CT3O@TcYJ#9`Sm{t7z!E&83h#^2bYMbe3=z%wmo+J?cDFu|Jd0l zo_tzT6>E1rf9K;*m9=%}^UuHh`uD${K=k5=fsdt}X#`yioSY`|2uxVwq|n5~@IPi< zx%bg$U+eZiFqk9}8JStR^W-flEvu?V(bB2g%$(M)eW$OV-};b8w!6osEv&4r(N}2K zu3NvWXYa9zsrk#c)pyPftBQZOSTrUssc-a>7To-^ckPBdG5wd`)NNl}-(V4u{Ozdx z{s;Az%u|2K-~Uit-0tjBa+d#lfbSgH+(N60vi>gKgH9cR@A^LZZO__wfIEcahTsAH z4`Lg{c8DDiyr6s_S|QrO{lK;Z6BzRt>kmw5RAO>t-Nm|xW#WUAlht@64U%hOPl`Nr Ka{Oc-cm)77NA3au diff --git a/fixture/18/2/0.1 b/fixture/18/2/0.1 deleted file mode 100644 index 23ec212a66c7a622532dc6429483a97d084a2507..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 619 zcmZ>Y$}lu^j8qGbl(}+{gMo3u|NsBr|Ni%XKMvyezV`(3 zV}@D}N6L|u1I`Rg3BoLi20je+uU7apF332!tcSCK`ELlb=avf$3Jfs73t>nBsk#*x zE<5}3nwWqnAfL^biLI>{$Y6FbVYqOCfq|F7gnU#Pn%v-Q%-G)v34jejn;nKYa zk3M|*BFDz1;u6v`WzLm5Peo-_V^cF$dq>Y*cD;Phy^j`q^75_cUwi${`X4|4BK74D zKmM$$<4uysWm37mjx4!ybb0DE0BF9cyviEpO z`tlbqHM6o-y}&z;T)_V*=NCwvG)vm)TZa;JeOt;)B3#wwD4zCnYPJt!*+U zT)rZ8>%p-Tw~To6c;9|Je8l1>yS{nF?L$!wTOG5RuCs37IG{Tr{zJ4wPt$Fd46z5g q7orbrKUl!@ko5?6kISTqo$AU$K^GYu9OOTis|2@lGM@DjXaE3vg#9D{ diff --git a/fixture/18/2/0.2 b/fixture/18/2/0.2 deleted file mode 100644 index 0540918e9cdd4033cbd98f4782a9088565c7049b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 625 zcmZ>Y$}lu^j8qGbj6TG(fPwMo|NsC0{;$9P9T!o4_WRVQ-TkJ0KHe0@TBh0pv3a@H9KH z&0=6yb^vKknCgGkJ~V)rfepxE>tz5N$iU+u3AO=f00RT#0x1T@Zz&0BIe7(jf{e0? zsN#Wu4!mwRFd<7dBw`ij?mC+5fL||qwmDU-A_z>l#=pPP<8Io7aNroNAB>#&SxmpBF03|U1 D1L_(c diff --git a/fixture/18/2/0.3 b/fixture/18/2/0.3 deleted file mode 100644 index 64d7dbfa220a79bac2dfd61b5277e87fa1c8afc5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 275 zcmZ>Y$}lu^j8qGboNst4fPqo5{{PQaT3w4);37H}88BQ*6l!1q1GdHmj0=~|lw#|Z zTf|@gYE>`e1YR})215oz69xuJW(9^5>2E{boRyt@lzrJ0Tar>z4y9ZcTIcH!>Br!U{W?|=RI*B=ZmObjv{6QBAn^xHbS zB~jgMZfl{z<39@hua&o#^hk7`p6|DJX@Sn+D*5}TY$}lu^j8qGbJSHHk!@%TO|NsBr|MmC3V zfTR@T0KHu)6yLl-k-med@uh|RrR~J_MoJ* zo%4~SuE$S!oI34&_MBfUkBqCTu5W0>M5kksk+<)}^vc*XvvW(!D>Mp4{*tm<^0iA; zclVz1-&3d0sAW>g6jl#do1hoRBQZOr;K7d?=guec=5uQ87JjRCzyCqL|3uyVz~BGa z?yv%`An&D!UD6535`ljZY&cxHN*;p w5>y^|F-&RHVhUr~z?mWRK=nc710RMNjYhl|+oTK*yGi~RFLJdAm;g#z06O3SuK)l5 diff --git a/fixture/18/2/1.1 b/fixture/18/2/1.1 deleted file mode 100644 index 0c727a9d637bbb96f114e0ca4f9cae310b367fc8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 611 zcmZ>Y$}lu^j8qGb{9!fo5CfBU{r~^}_rL%B4;%4&-+Q3c0meOi&t)cac1~7PW=Tq5 zxy-q&S3;R#22*Cs0cU5$jzkGzjtdED?EhAEwj4^7lJH?@ld`|+ASsn;Gn0vdfq|ES zf!P5_Gn`;{;AN0#U`S-R03?$b7(1jG7#A=w&RW25fk6tWKnkc71lXE=1Tro_*dQ_^ zgJG)wRr`=7P%#sP6ocFXFbAZQPgA8vOXG)9r;_3lL#je&78Ol_gPdG0%7I5*Jb5yK zFw2U8fssLB|L#9Fwg0#zm_OWj^5QKgk5y1q(yT?R9=-Z3t*o7#UA%hp?$3W%Y+QU= zdM+e9(tM@m(j}gqV^Z)*qGHApPvJ|KuV{p9XXh7p_Ybdc?_a-t|NX}=6cQSb9&J{XH?Yw{g1J+ zW6y^uW&eQe>}IvLiyMjyWNpm-Tci(cKUg5PwlJINI_n0G6Uq+T9CMk>xN`&#L^tez m(AQ+cT_X6wL)qg$5G8y3U!2**&|v@QoyT%6A$j&-3kLv-ANV!^ diff --git a/fixture/18/2/1.2 b/fixture/18/2/1.2 deleted file mode 100644 index cfa2da23c32e409c3b3ea6a4aaccafe8ff7fdc5a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 616 zcmZ>Y$}lu^j8qGbd~x8DCIgdi{r~^>zyJMTkCiCD3zTwT+{5>rSw*m+hvi73!3<$F z0SO1W(Nr3?fOn@-u z0^=vt~4-60R=Od9Y9hNV4ewx2?9V9IFcR)CQWmC(9&VjMrGKcv_Qe5NsEc4gKeRb zppe8RiA#z=OB)27Dl320azts^KRA1i&$Y+F-NQ2=Feoxg>d=v6SFYW7_Ttr#Uw^rI z`DKkwR04KYbxmw`kGdT_LCITKTw&I4Q6rp+)b$ePQ(_23ug>>NcGbHOGLlkJty#1<$g^fCQsy2?@_wn6#8)`K}r scAN~l6QUiq9aLoL5PP7KFp24|%w=AQ#I_#ee|k^43Kl9}YY$}lu^j8qGb{L=HInt?Iu|NrxgR;<$M!b)^3(paUnfnn!{l?+fI#n!l3ZN{tw zVJB6F42BDA4Hx7VFfL$Vc3@zr>Sd9>|7xWJ16$(+W`!gMUbY4Xh7%@?9E*CTRw+RB zGcYJ}I0-bks4)mMIB*E+KjpfY$}lu^j8qGbR1?hK%)n^;|Nr;e|MmC3|HVbzJ;1n!?>X~U7K2%m2EK+Si3UC_ zmwkMBr4lk`8VED6T;Mh6nPtSdn1TIg$PCT~HD@0;24CsFA(Cv46EascFfcGMF1WzJ zV8Xb7t??C*$B@Fna9P1bil^ZMv$C@TPs0hI*inWIh6{`f7#NHgCxF!OG%GMLoB-+s zvQ2=PA;rjqGt~cY@K>(}23`iX1~!Mw&JI9v#tw!Ipy>=C3ipCRiQ#hgE51E0}F$LRzs7hqN=8&t7m9rY-MfZ)R}WPZryqH z=G~wFnB&uZ#Z~p2JJIP_-2EgzIcI%ipH8LO)-kHy;)WORKYaZ0)4P

f7E>X>oT%(5n2dG7F`p}mYUSuGt3@43EY-S5Ze8vun*+&TxVm2X? zXj%|MEDsY$CR3P77H=|(Z03_gIj1>8CEsy@Dy~vZ;$7O&jU=9u5(D9eA90I@5)&WHX-}mhv9CtYj6dsp2Zt+~y~02o4Pd?k0p}29UxqULuvzjA1N= zY^R95d`>ZkILr~E>S{+cZFq=S+VdD4n88f4n8({>vzR3;rGoQRa)lqL;wHDaO;6+1 zizNE<0?7Asu#$Cr#CmRW zi`&$`HxRgsVCqqy1`J^+!x+Wy8O?ZJV*I zF_*WP$0FWkG0XXY6UvZ3bPIHE~_i0Bv5_po1B=QX1SinLSk;nTiC!aN}$yL9M3lAA$&_ zE@2F05Q9nMWk!(BI5OD5PIghk7wo5$qm&VIzjm}Djz?%qJWr6oEM_x@9Nu98xhx}( z3tZ$9)%?gcY6vv*4tmpvzNGLX14(5#X>4LMTPWf)c2LYdN{DzsJ0gju1u?|(FmYru zg{frmCbP(9J~@>vdYj(xVl9(lFW?WZm;i6ftD=pV`S-8?HTr>+8&6118mCM4FW{rzx z$wlMJW#LM*aM3JW)-1WKapkgbrCGS#bALF0!SnO@yxt*!K;TEzC7kj^kP=^r0Xh=B27|saNc$u-JGnowbu$O(5@;Qeo=L8kR+#3kAp)GMdK?maLMgsF$ zz(R6(hg|YlMLw6g!c}Vdftv&c2LksJ%m4;5h!kFA7^#dQjcsga2gQ8K9!fYwDUo%w zqdC#ECWcrZCyq>JGK(zUWIov}A%`l?a*k@g+1bXod ziS(mCN#wDLeAe>;1#Dp}g#_KF9l?asfH0cSln7E8LmHEKg>)Fg-*}_gfW*7B*-1P~iF%Qy&7Cb^r zCNPmnOyze>V>WLvhhrS4j8puR)12oUF3{OPbfGIf_&Gi4!!PN}a#paCHT;>iY~n*U zbBDY9e{AYTNpIFXn-e(Qh_>Sw`=D*w_q_GX9E`xcVA&lTRjASh17|%{VW*7VTJNr4z z5suR0LG5TsTYgMCI?$0$%;pW|u#i8ph+LMkjPrcM1+MZRu5puF+@?;zKsGgTJwpJ?v#4jhbplW190YEr_8FZJEMrOl20oXEyU$z(P*)6{k4Izc|lj zu5gv^5!%s%-u!|-3}7IGSk3#aVIzNG6WiF%4nmq~M_t0XpGHLT5X~9OIL4E~t4tx2 znatuaM>t9aUviQv&T@_pj;fAyB7vvqP9o3Jn_QN%jC|f>H3e*7BR9FlZGs(XbqFDh zhJ-VW;fx@Sml;bslgVHYd)Y@RpL3XUPEbLN1F#KkiQ@@65KlJ}n9l+hlEXXXlE*6Y zxy%)=Qp*qABP#YHYrP5h(Ukw7n=A(4LcCy6{(kCB*nLzGg+7nF0FGgK1WN;~4{%9F&? zlc(uLHcQB1CGV2QTGp|i8g5WaAlm&y5cQ}}D9H>Zh0(l3DifHOquj95C;g|5tH9$75rZL(R;3RZHFOH^~6@2TMqcL~I3M}LwS!V4rbl2MFi R3tK5<7oSkXehzSu{{ix`0J8u9 diff --git a/fixture/20/0/0.0.3 b/fixture/20/0/0.0.3 deleted file mode 100644 index 580db00e794d497daadbfade137776b569da977d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmYk;QDjze9LM3U^|n^6^;WCaTD8_&CX<=TWRgrK$z(D!nM@{=$t0OfCNq;{GMUUw zCX-|`NhXuYWHOl~lgVTeLScp87f^SC@c z9!}wE+=mD82wugR>!O*&jrbaF!xQ);p2fu((J#Z5xCcMN19%Cq;Kcgq*W$~#8IR$2 zcnTLJqIndT;tt%6d+|K}g_m*lhS<7@8}JBzg~xF+GdlS=h1>8Q+=XZHcf5!zH^$ak zT!RPjGdzkDS<%VCdAJ$h!0mVnf5vmTbW?1d#8tQ#Kf%Lz8E0&cW;SlXS8*#I#~<(v zPHl<)aa@7B@I%~>7x7=5o*n%fT#uXZD1M74ao*Nw7U2@yj_=_Icn<%>e{fY!Y+b;0 zco@ILZ*cau=TK%kMR)xga6^Io$)Po_zG^pZ}59OjSF+5c?_51PJAEt;RXB~uj2E&V(Sua z#4qq`Jb`m}N9O=8#<%cY+=IX3A9x9$-VF@E{(+t2i@1nn~PFX0uO;779-U&hUN z48OxuxL|+0^HE%iJ8(Dd#q;>Xp(aFat+=lPqE4*!VZ4kp4o5Q^H{h$d n6_4W&cm}6Z(LatWa2I}v`|%?Fi_?!pzXsRiCOnGY;z|4;Zt$x& diff --git a/fixture/20/0/0.1.0 b/fixture/20/0/0.1.0 deleted file mode 100644 index 975ce49bff064168d566e43486e07bffea06f8a7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8;bRX30>|NBYnH^!h?!ZMS<5+TIp?(GoYs;6BIYkxUaGn~jP)ql_1A!;$K{C(Kn-m6+%4*iImO?&c zBSmbdn2_K=;2!D|Miat`q!m$&VJzdwsHYvBN#qH- zlf=^`lgCn)ko(D_KQ8HC&;VoBTi>LG=THy9p+R0i-gN7fE9j zqe-WT?G&?zPbuLbhbSfTUhRk?mUhI^kw@r67Bk3Z9&eJvA{MiRO3qP5HQ!Rhb#8Ez zqz2lNOn;sug~1GAC3L?Bwt4Vn3g8fac-a z5kVUsB!>3;k zXB^-t|Kb=Goa7YkTWCiII`cES(48mg!2;f9A)racKfMrUR+i`nGz77NH@Da$y| z1ujy{cU+~8Tim91q;~WnmFF2q8p9aQMmDjTVm@XECG2G%;jOfz8Bw$*nm8UJo=Hq* z3fa8DOmdh{E)|^Q6jgk~d1|;qE#0HEqX)@6LvKtr#9xy+-C6O?n7uc_n`m#L;(JMBoM z7f+EyU;5FXm8>G4^?X19TiD7rg5tCzn1(bWl;*S`f>Denoe8{322+{FbPjTeQjYT_ KWt`>=XZb(*3j$^U diff --git a/fixture/20/0/0.1.1 b/fixture/20/0/0.1.1 deleted file mode 100644 index b70544cd38749b467bf7ec8517a4398d020215c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8@qZ5l9>?*owAM(>jF_1bTFW_UIp?(GoYs;ZhoE!@$ z&BCEsa;$N3ESxlJ9GZngv*gg|`NQ)M_&i=eyg$)_K%fn6iRBSG6363oWfrrULk@4V zkX)9robz1ZA~k%^HEOxd9s1S@1fHWG$-KZ|QW(KVHnEv46!8f=DPb@B2)`>3Xi77p zXiYRRJWMQ8n94M=c#~OVvw$2bILRrh_?GijbCnu;)(r%nq8EugOJ9;0L^5kw%Q_19 zh)oo-gCaui4g~5^pD>ybP9&{}VjSa{Ksv88g-m9W#bJ(clybhNf=bR&MO=_}bS0iA z=}7|5kVr1eSWX`Avxa;&Qa~-YxkKl(KIV|N}a#_V{@~Gx2HQeGyY6-eG5V(h6k{CoX!+D7m z#xRyt3fVys#e7By2RTG3ks;a0Pv@CKR8Wghb> z;{@fLn)Kz{}>kX5WEj}3fCK3mzwc7hscM=%X(L@3Q^K?GwM zODdCijWnh+gA5LGh*FO86=j^}3}@+lzjkz?JHMg_z4;A&Sj-ZZvXZ~CiuHWJ25xYZ zThwhB2;5B&4Y;3%3}qO@8Od~M+9wokal$BXLMpZGss{Le`YQVd51-u<_u@K$p5&+HGbeay~DJl5B>Np{Ta-Q z3}HPVuz@Z7gRSi3Q+CmyiFP!k84u8$Xxh-0iM+}rrtv4HGmF{G;TT_XoKyUl)12o5 z7wHzR9o^~0ujx%+o}(Wtc#oB=)avmfIA4G z9`zZ_iwt2Tzh@NV7|#TD@+rI6$G_OmVUBQ==;qqdmRKI4BXK-VS7tGrIppv*3&~{} z%Q?>lE>gqyT%(rT+@Ws^?dV4`FEE%CMlh01Y-S5Ze8Nsj*vme`BebI#QM4wS7#=2; zDNJP=S-iDdqx?UF5K3dhNMQscN#zyBk;Y`wDPb@BDCG+dQ^pC(iD|7Jv2@`v z;^@H>#FNbea#+f{?+jEr}V4nGrKHPFl-3X*uV#Vm5Qgxtc%K4g8RB(Yx67JTH&Lr{#-AUpZ zlF4Hk%gN_`)=r*iR{6aD*~WQcm1G z+7VAD9wmWpJWe7xEFhPqyh|RdSWP}vT&J2l{76vCQJYXw7)UC^c#$+lGlq1E*g-LS z`HT_{ahOt~Yimag?RbzlI`Rme$YLhh%;!yVSj-ZZQo#i(xyE-?af{pBAt_Wlk{Q5r zq%ee`45NUJ6tbO(wWGsWH62C z%-|4*DdhxTQN~%$ah{I#w4)PU`3>FZ$y4-VF-us=O8&+w*7E@yxW#SmQ1jj(xQANQ z=RO)RgrN*$6n|ti6L^J*>}C&pImo{`#Bsjl1a0eUM>{(3OFGhpU(=Pj%ws-_c!$NT z;5}Ayg{xfSCjaLaHSP<7yQxV(`ZIvR{GK6<~2{X-@)=(V1DyW)8W$#X|B}#&RxliOW>; zJvRuN+E7C1O&|J_%JU2&jp2-76PwvWF`uxL685p5$osXUDKWGnmN*_Fp2)96P{2kC z30i1J2%*#=jBpweK^nsuK{_upmJBA5NeTPdPbpt;gfdQ2PF%Eh#M6mKNuV2#lSmE= z$Ym+-lE*4mlTQ`bspbwp6122E)FzY^29nA!UL=jtj3J#Oc2LY-KBI&~9Hx}$7_W(; z9S;&mM;@UQS6xWCrjYDGXsK!zf@Qg>2_z zirCE__7Wbe9T7y)f@s?E0PVlzGBcS>lF1~Q zOp?iDGLuXu$z+n5OeT}bWRk46&Uc=^^YZ(bWW*f_W>n%{d=C%eCHxyFGUKVk7jQEk z#S?f67p#e9F)qiQxCi&+dHe&f;Oazdox_cI7{A2hIA?8i@^Kos;hVS{&)~0k5m&B@ zt<$&`58|hI1Si%amPA-sY!H$*du8}Vh_g2(YY zJdM*EqhE$Aa5uh-2k;{Pg;#Oyrr2u0O?U*q#*;Xej7||Q#U1!I?!$BVJ6^_Bn`7%N zuE#_81s=o6?C9j-LfnFH;4VCkXYm5A*b-Z(a19>7Pw;cRiZ|v&GZ#1EYq%Xx;*aLdyo~?itZmV+$CvO`Jci%m54do9G!NsWxC`IG_wfS$iT~i5+}Jvg zFXHF;6@G(rcSL6|F2U{i7VgDg@Hf1KPv*te8C-`S<7apjXYGv6He7(O;_J8*f54yd zJU*I=trNH!-^Y*eF#dzr?TTg&zKE~jHv9&^$1}JjKl(>-CGN%d@E~5ozj0!B^y}~i z+>A%@1fIeL1<@?V<+u~~;C?)hf8Z5dy(hNL;YK`+U*d6`vo|{VIE~xzP27!V@K?Nu zEBD3LXoIDhrJY0xd@D1FBr|~Ra!2bY_Xsm$% diff --git a/fixture/20/0/0.2.0 b/fixture/20/0/0.2.0 deleted file mode 100644 index cd1ff6da6d49d132a8eafa612523ab886f43b13b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8@qfq#9>?)-Yl)eWm|4=y$Z0J(X)QTvEhlN591ADS!l7AmXq+4iC(Xj4SvWLH z4vmvz;iOqOG)oSRlVjneSvb`5hv#4LdOSYwPp2RVo}e=+JVQ59=|yjrvW(>v@gZv{ zW+R&jy)6jtpcWC-rvXtkC7NLjX9QWi##pkM#ANoekNs5e6-TJ#B&TQ>5(E#^o+KWp z6UjVH3iFuH0t$Gar4+K7BCc|c>xA4M1h*4PIQ58N00S9B2Cp!TOvaGKc6P9nGQMCh zVIMQajxh;}6M7)i`vCbP)nUFMO`5(+ra1ujy1cz z0O<@RgJL$ai4s0#JEiQQjHp`L5lt*Di6fo|Ng$g^OeTl7m_aUc$)l2!oT7^FI8QZK zs3G}I?MR^~&yz|&`jbW>t0`gwA5+X$woyWOZS9C4k|snELknWbWDHqMn@tfL)Cbmb|M>A`dKB%dV|u!;{TWF70-KoF)KA%szva2nB=NYWWf z2BUeEOvW>TiIlR3G7j-2~MnC$K#t>d6oso=UG+Wt53A_26QVwvCL&Vh6j#%3A0P%F-5jrxB>Etkn zcgSTSi&)HA&QZl>exRBg+~gKLBDA9?efcf@7|cryVIAw)z!v_&R(A0jyJ=KkJ0fY$ zeZC!+uy3(Cz=|LY}pf4*}$tu?J zH`cM4PuN23yR;*WhTKgfnsG198ObO{Gmbwno+-S^R1R>ELmcJb9OE?Ka)u5KwWA}Q z`4wI0#;@tlLKd-@<@}Wutl=Zpa)X=PqSoC(a3{5Cz+E(CFfTEL5&WK!jOBI4v5U{x z&3^vH0gmuBM`_bYJKEBoU($h2JV9q>^EPu>z+YI%QkJotOMK5|uJeCx5Zc%orxty9 zfxZmlcMN72!x_P5K4A+x`6s*B%RctgEK)m~)0+EfLpvU(JyUp-sm$Wf%w`_*S-@$& zAN%~F=JoFYDC4aIC^6QR-C zQHu!b(|{;<~EcNQt3r+(s+>pq%)KZirL5}O8AuR zl(L61qVCg7rkJg4qlEAl+7Uq{O^70f7Q~Xt7_yki8)P$$>Euw(VJbMmH&k+#b5xNS zs~t&n2%|3HG@>z)q%)KZM)N9}jAsH9DP<33 z9O6sLImU5L5Fe)<33TL966wN|bS0O$A)j&WE#`SVGi$*%R&~hn6sRtip%^! KH8;4)Eq(@n*Z@iZ diff --git a/fixture/20/0/0.2.1 b/fixture/20/0/0.2.1 deleted file mode 100644 index 0077456535ef1073c5d626b5fc82ffe2f4235546..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8@qZ5l9>?*o)>>j_BxaU0vz*gfa?)CIT5HLnadIr2G;181C5Ohzv2fBXgl3IH zv*ge?ITlWug+sIC&^S33PMU?&^M~gj@Oiv`cz?PD0)Z##P72S`lT`ZApCVSUk`g{* z9i?n$3nBFafjg;BI8A6uB&~=dlTnN&o7b2?4pW)NehzSuD!%3z)tu%Goo)*R9-%Xd zJV7^-d4?1gvWUeL@&QE@vz8LBbAy`%2L%Fm5JDJ@31N)@O0mTJy(ff^F(YeynUJWVpa=|f)%SWY2p_>f{Yu#rs!-K8DDgwlvGn$v;^ z(iuqx<9L-UCNY^Ql(UZtj_?(goZuv$0F6tIfbtf7`` z)KRZtAaEN&G@v1&3}P^8jNoO`8N*n{v7H^1v6n9>=MaZELUgEh#L$rkiK7dT(v_La zBA5BRLmo?5$}-M#ff}yzBemQj(8wM1rVo7?$nO}$a9&~r8`#Juw($?Pvxm>wOY<=8 zh@cJk6HNzxNk=9znJLWR&&*^VZ!@10oa7Ye_%G+V!Vg@fM`P_sq8GoRHv@QqfvjRR zYgo_U*uYjkWg88`wIh^f+(UC(b02LO!&t^Kkv}qt>AcAd4snm!W@iFVUMWCs6)aNc5(3HDr#&BL@1f%%_W0=6}Ok@wA zvzLSXi$fga8;;ZAZtdtuXMRN&y7464naA7AXEE=wgd$e3k}LecRc`WsZV}SVhEksa zyud()@q31o$tXs%l~38mZvM$0_H%%Pw7y3>+R&a~(1A`oLT9G)CNr4JUzo>27O|MK ze8)L1^FOX|og3VwXLIf7MSp(F0ERG>VXWh0*0Y7bvz49fVmD1&Xh$=mxR=(%@&N6b z!0Sw88h>IsvzfzOj`0o0Im3TA%SA46nQjr<(VY~Yr6;NMqd!HgU?nAd#5zjZ%oajg zYDay-X+l#XX+;#7jAAs|yv77_n94NvbAW?X@ioV&<}_#M6saAZN#qH-k<2rsu#iP8 zrjQRPqL{UmaGe|6B)FCThY-SOOgKXr$}lo`g-o&-Pc}Q*#cnG2lKoV2lqzDQw4*)o zJWMANc$`FLGl#k4^DYZ1U^#_cj|5k(B`h$W7Ph$n}sOe2@Km`xrF$fugqoS}yAxkxS7s3ZA4?MR_7&y&g^ z29riHYbjw9pHRwnc2GuG8|?@uf|f)QO?)tZmk(HGZHgPnkDD7mYlSfoV1o48YjoXNwaWhmO3;}j)jwE;m|A` znk9$E$+2+KEF798hsMdVaMCR9`N#9$??11{ zv4Ik{vW@WDf}kGtiJ}QjiJ=v-jAjgD$>udCk;62mbC5$ErjoBYK^12?N2fYL@Ccnr z<_Wry!ZW0@h{Y_SkPlc%5$h@DCbtNF76f6`C7eha6U9)5F`P_ZVKiAxB%9srVK3!; z$w4YOP9^O^g5Uw#lgPt#B8kUIW)5?iM?UYdhyqqn$Yri@m0Et{CZV?n!5xH=Mt=s7 z&Wj8sgHdEs!dA9X%4h7Rj02Pt6RI7t#B)FGNZ=t7$zdAP$>nY4kjFyusp2f>sOEbv zQ^O5vNx4HiQt8L@q%oKwq*KItirK;^l(3UsloA=H9Z^Knk{IG>OFUUjB%7(cNe;7^ zO)eE2r;;;#OBENnL^VluwIi9HJWUFH=u1BeSV1A{_>dwtv6(G|hHFO{5i}x_=CmN1 z3`UX31YRYJ$xLA?WgMWKV|+ygr#Q_S66$G3B3*fmBzo`^J;`Gs`7Gys3RuHh)=|R^ zY6+K7?k2pH;3pz5HDNJQ1e_|H%d4~m@;xuQtz<;^OHGbeaJtDNDC%ySKeHh3K3}Ovy zS;t2H#wND&DLZJ;NIN2E#@#fhHTTknag1jIZ}0~uGlRF7$q|lnjFbGEQ=I2JF3=@X zJG#=HU(thJJV$SqvW(@d=C7<_10S=II*qj>g!ma>vntmYa&aGjt@5Zp!`!l_4n z2J!-f7|!n)!Dz-XmhF7X4)*d-_HmFy9Hw;w4W{!)W-y1j%;N;#aFTQUhx1(K3RmfNw{~;I&Vm@L6C2VCI z;mx(9K2bEGDKWGnmeGu1EZMxqByyO>bPjTe!&LG$C#d2q=jhZzJ35oh6LcemXGmoc zi&;V;AFz@l)>F()ZV^P=9_kWKB#nt;D8m>|Ca*A>EGCl8ZuYR3a=zps6&$CMb}hA| zJ&8O_Cz5!aWacoJdF1mRizr|PgBsY= zF_X$&Uc=^^YZ)Gq{W>LrZwUbJdP)Ec6vN{IECBsb=-qz z@fW;=E7rwUHLk;h_z51v>FcADg_F1$U&UQ`8h^y|xO78ooxoMNA3wyycoqM}iHzts z;>)-VPvWi?~JXJxCYvE4UrM#_#YfF4`6SW4Hp}!T0bWUdF$0 zdS3L;;S0DKKgX}|G|t~0%|o~p-@vzVKmLlp<5gUfA6sW}BObxycoJvtiB29);dXo- z_uyIl1ux->y|Gn|>+m3cg2!<%eVI6t2Ys_%R;EYdEtg knmM=$x8e>wg+JgqTyiA(Ww;Xe;sV!Z diff --git a/fixture/20/0/0.3.0 b/fixture/20/0/0.3.0 deleted file mode 100644 index 5489d24b6f6e0a6bd99193434c923ae70b09852e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmaLZ;ZM$S6o&DmNRmpWQmG_mX&9D<#b{Vs8ivI%H4KwsvNEh%HLMzjRm0NCuxiz? zYBDSh!(>>R8kSZKtA=56^#`1Dw$GdAy7znay!a-I$YyMTcJ~O)xUUWPJ#>`FG=xFB zTv{ITjZH7D5BY{_7kpbcWk*lZXlvJso0T?4HVO~xk3MI;y@&@PLXjT~ds zP5Xi#V-r7KWHl0?T@|ewHO9u%Rm2P4(C!E=9Y>8#JMBF>j7{tWktJ9P?aFA^P;P9T z-1rN^8QSfk?ZrN0^PKhqFOAJycaeFR5A80{F5;50`Az$Szs6>(hsZW;hjtHW5An#@ zgijQSzzk@2igp@jjLlctH+(lXYduBQVLi0FO}m4;#>U@EBmjZX?l|oPP8yq!v`_eK zY*u)Sti&p4cY}5lw~US3BoTLbK)ZvqLpW?~-q7CSov~TyBeDpKq1|QL6LM%z<`iY3Fd>*!-aV#4lr$Jel_owm`dkv}W8lHlhB! ze-H-ka%p+UH#WVrKJ*)#!~otuNP>2?v^vxq8=pYlKk$Qg8MI6sGd3S+o#-+)%ck)D zK^(NJpjG0!v2h9F{R3BMx1W}V1IFeRtqre@O>{8tAH+brVp<7GjmAZj74egH5(s9(-wA0?B!`Q@z^Zvn7XjewN PhH_)$6v6uk&d}~ZRVJ$J diff --git a/fixture/20/0/0.3.1 b/fixture/20/0/0.3.1 deleted file mode 100644 index 28d426107ec7f0afd183551ba403a9a48afe2c8a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmaLZZ%+td6o=tcQ7TELQb{V+urv(AFj^WG!!Q~RtA=6KFsvFDqhZxBtQuCW3{y+P zs-Y&JNGY{VvLS513}8eBMf~6o?T*ookSnJh9F^Pt^j+7(cZEU(}J?J$yal!n55D)Fj zX%)C*Y&@p&|A8m8OQWSD!`QUa+R$!n7KQNt!4ha!NGn3Iv2mEj{|8RcE}52sRA|>k zYeoySiw@=ggBWO+N6SZnu^FNLz^Jj=7RLVviO{Z=R)>0H6B^F{2jS2zhjs>MjZGh| z9|OiFVLJaGtcP|FXqBikHog)3f8Y=8j?s?egt6(Mb)w7Itcc|QgIH)+LMug?v2l&! J{{weu_aCz6s&oJV diff --git a/fixture/20/0/0.3.2 b/fixture/20/0/0.3.2 deleted file mode 100644 index f3fe5decf5b39b316306b2f2592e06f784932470..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmaLZVNVEP6o%nbDn(LBl1inLhS4xA4NFVI($cWBG^`B6Xc(4;RjYtH4LkUrKw@n(y%nl{RhrD+x6wWpC9mMh{#rCLc1nHGg`C(?~x87vk(RCifF|s zF*ZZA?-({VDUKo=kP7YUX$@#JHUVQq0uco5a%m@!XKX&xdeLWW5}ZU*8;stMLcYt;fImV`g)`>1-6XPPX46)GeDy;(7jE&P+5ofqSyDVBZb{d;jS{vGp z%{*6;`G|&grL;1X8=Dc@Z;Tq7G&hkgNQZWhX;1Lf*o2J}2}cC9%cm8f(Af0T1~6!B zlE#ayM>4dlq1B?!*!a1NOoczRJ4!o-^P4$%(dh_QK3`+$$eX1TA(3ao^7*J(F!)7ZF95plyfXt#^D z8+(k+OWG^EHZ}|VL>6KZv^!6`fQ!cF5A84h8Jmo${C|)M?V4!KXfZaC{``Ls1?`Gx z#V9d0L$vQ0Ha02K`2Qdk+SSt<&}eJ|0{H(R2-@Y+P9V?Ne5UoH&)6gc^8Z01w7W&S zjXTE1Gl>5WyrJCz+Ck(Pn+{qhx{OWCbpAhxg?3kI6}V<>oPzoPzy;c6(Xz1<+O^W! z&<^e9h4BADG_)(Fm7(0&jL?2#)Yznj^8Z0Pw0lf@f~UqNYzF@yL_oWIS^)};O+Re_ zgT^K)jQoB!{gqDOD2i6^n9Q3IF-FnO*#7j&$oCm_S(nJOMKX=H^(pTEMJ;DW2%o5STk5-=lRQs=tW1ZD|L;X=0tz_@Y! fQwL@aOzu**dcfFM`lkxa6qwAlZgqe$UH$V9_A)ug diff --git a/fixture/20/0/1.0.0 b/fixture/20/0/1.0.0 deleted file mode 100644 index fdadbc43fd6d240663691eb345e066fb1a9cae02..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8;bX`J0>|Os?VOX=l9SeQPFj*?Mq*~f%t&aQ91ADS!l79>G)oSRlVjneSvWLH z4vmvz;iOsP&@3ElmK+)<$HGbXymT%tfqhu*gzp$*+#9$gWzY>CX9Qj zM>r1>!6-&Eh74Y1BAHAli&749h;qKH(UEu(=}r=hS;A8Cc#oCj zvz`KOaFbhvJP`yxr52&wLl{FC$}rM+nNg%Ofed!DhrN{WIi-|wjB+BQw4)i(v?7LB zIuJ)TbD2jDZ?l+OR**+Em$*y~-*bbYWf1&?5R&Ofe^PjXA*3>rGz!_uHj4O!-4t_x z62hakBZ8(pLL^bNB$`a7lf@j~B%6gSB8N)OP{l>QrJ8G8r-sB<+L1(Go+Ft-3?_wq z)>FV1KBADF?4pR!80`q7ArBBv6CS21=}aJlX}m@zvzW~s$~Z2#7FS)EBk9E9HKAYIg7J@d~5kg(=CX@!;PeW1}NgCsMg>7p(_b2WDz+m=UsAH!&=sHjqB8KhaU;r20Wo^|K>bb_>QY|iPeq-dhsm18Nl-lWDRRs$436jCbsi2JE+q^JL*!O zU(kR?{F26uWgO#~%Gk_rsWf&Xykd18PZ)|5bd)Q08F4|F_2p*ylkvvK>CNhc1WbryP$Yw6{IL--9 zQpMMtrJ74zCcdk7bS8-&^dy;n^k*fjSWN*Ruz^ChvW;2^+EJS@?xh~#JV*qi7|j?m zc$JA{GMy|+ImjW(`I6&Qa)v5mx@kun;^;^`iF7B4#VlbddA!F;@>x#-H@L|yLK6KS zY7xpkgfWDn3?q$~8AUo1$Y3{n*h>kYQ%V`fC?~SJb~GcJR>TlX2ja+PF7wFYZ5ETu z3i7Dt5|^prdu|XU*-%1ArXT%D;RS|}%1F{EWGmY!;uCgL%mGRW@1Y$LH02Q@iJ~RZ zWHOyB=I|!jEMyTmRC0zYF7hqaT;n=5B=*#fB>M6k$qZsJDde-B0=DoGh3sS(MT9=1 z9bq)&0m5m*!!#wG31l#h*T`fRvzbE~$0($yn?Cd?)-x7#@&GqCzRogU?l0h!dNnxOeUr5Wgq2y!67O*K_wASYDXl| zv?GRCx)4VW3s^`l@34$KR+CRHSGYU^4Z7-6tI457Cq~#*oe=UL}L+%wQ&E z9HgA%d`ShTIm20^VzeWgPIM-g?(`s$B`hVERlG+Y>)F6YZg7)2?r9$g{D>eLa4!uR z$}mzH&C8@Qfr(6_m_3wmfX^x8D91QXiw@e+k~Ta=6dienPRwN<^I6QFSi(x)Wfd2> z#AUAYUv6-hz|-!aAN?7?5Pri@M)4A(*~&JyvzxzD%zi%O08Kh-M^l>fOIpyHCuzfU zW-yaE{DHYF;%ydlnlqf`0{`YB*Z7|6bbm%W66wpY=*M7QUuNsZ3)(pK*XA{F9?pagtNC z?xG!S=)lu-B%W?0u!y%=%nJU@O4hKJbzI|nu5*X~ahG~=HkA4d<^_f@lHW3lv5aFp zyZD6N?BgHo=MaZELbI;g(VR%0pfxeHrvtNjlQ}Hpk1S#t%UQvBzU2Z}`488)#cl4; zCtf@HGKgO@nBk0IB%Aq&E$rlP?4p#t?4waP?PyFmkJ5|?S`o<OHF+^$Fzx8WF}Lgfo_L zj3<-Vm_in_$mS4-IYK30QAIW9s3E4ic61<)uEdi>FOpfta#oPf`>dgWO%!sA+uR|z zhyRCqgm6Eh3}*x*N#_;DlEGv$DP=GFDCY|fQNal+iAdCrNTO*+46$?}jvN-SkX+tj z8F{QGpIWYPl{$Xl7C}92D8ZyKkU^yKBEv~z4CxfHgPoM{DW#NgkaEJ3v?H9BJWd2r zv?ZD>W|7T&-Xe!3EG3s}&QZfp$sFH0ya^|c0Q(vV)jr% zNV0Z>(v*h@qXm!Ak_;x3$xL1+i@D5WJ{6pxlCykGH5a+WWnz14M;wXtB#D0XX8?Ju zCZCObKml9X#&&}GXh$#&xsMQ<@DNQ&V+`p`;#D%3&J1Q!#zD$C&X-hhnlqdw>N)L* zrW2isr8_-HWC=^jWfkv{$9gufksI8kj(ht0e+Z%h_tKD|3?r4%yi6Jsn8+lG*+U5j M_?$A1a*X5r9}`oFu>b%7 diff --git a/fixture/20/0/1.0.2 b/fixture/20/0/1.0.2 deleted file mode 100644 index f0ef2cd413f4fe39d8ea03cbc5d2caa49d4f9c46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8VPMDw8prYfIk%IXw3eK-mUGN3X=Ws5M$C*H8YjoXNwaWh77oplL*wLFIB6CR z&5}bnITlWug+sG&XqFrrC&yBs7oXS9i|>oyQ@>ddJVXN`c#OtG(Sm3uGli+-@H(@| zWj+fy%^A*8$=6(>itF4UF)|3c(3KQ=(VJ8TFp$-(VJ$^`z$Uh{gPqiF9t8JNhcF(b zKH)q{1Y;P>II?(^$z(H=9FB05W0dnHr>Wo~mBd8_L0j69KxYz3rY9*ZWf{vU;5}AT z$VQ5|%MS!Cf}j@nP@7O5AdF!QX9O9%%osA6L>Bwl&jCvLoFkNRigKc&ogtc7+7L%P zok$>;`79uhw^>R)t0)fE4@3~9J<3aFqYLUtS29m}L3?rS?KbE1f$HL+wflN{#pCb=wT33*g-kxH)dEmhp+4%H+-p&cpo=Q&at!cfvE zWFtjv<0H1ShrJXN+DbdZXv8Cg)0AdJlF1~pn89mgGl#j%ql{CObAhj@;0jl{M*Nf7 zkw6kXNTx6S=ubYYC}0EcQ^*#!vW<`!?WjdP?kALnJWM0f8BGQgd4)`-F`XHdaF|j~ z@&#p_<2)CLX{{Zxbf6>gbfY^-EM^ILtmIwtS;u-daGN_+Q!_RQentp&xsQ4bVJK;g z;w91<&jcp2hrJYYh|ehDI43ws(>B@>Nh_WthW0#72j(!Bc`V{DEM^7ou#ziWi+L1&benVdd@jQcB$9gufnZL7zU3|=L>OQ3% z^=QB^X-H#!MH9v|fr(7zk4$40Z!nwVoZuv9`48u~#5Y`~eS7WbKv$lj8@+j!KCIv! zR`VeOd`qjB!#6cV>t!9$7%}MND+7WfuO7H z;T~!e$^(QkjNyzRgO?dYCX>iwANx5#DW7wMGEPxW)HB)&Y^;V`9~gbfY^-EM^ILtmIwtS;u-daGN_+Q?r-VR0SRD?MF0Q* diff --git a/fixture/20/0/1.0.3 b/fixture/20/0/1.0.3 deleted file mode 100644 index bb67029ff7ec974894dbd18ed12f583ed20846b2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmYk#QDhYW7>40ZlF4K;Gnq_MYn^J zGMSmoOp?hYnM@{=$z+mDCdt&@{^z@TuHG+yTgK|P-~{f(w{b6?$KUZXuHL?Sm2no= z<3T)vCvbLAbn$r)(OHjkaU*WQ?RW}*#B(^Y zGrmf33iskicmOZse>f`<{d#-_H{%KX7Ej~CUC}JYN!*R^;Xb^Gf8iCJ-W^{Ta04F3 z<9HJ1?upK3T!h>4E!>0W@Hf1KQzh|Li|g7XE;3nLLC-Db7i;I%c--j!34}OTB;3fPQuRRd`I(!*l!(;dj zevb=Eqqz;2;4XX@Kfnw4C;o$L4#wAcdW8orJ@@q7FU&*PHA@l}qi@dNxA58^*~O+_@b@g;l}x8m3M z9iG8kk3@erF2i^5ecX?K;NLj2GWzH6Mcjx-@hd!q^N&Vz3r^rpd>i-TdHfwOdxYNJcvi|1kOITn#st+g}4=W;BGvFzu-k&Rux}WIF0-9Gdzqlk4I-c&c%(m1-IiV z{1MOLM0I?X;uP-1kMICq#{Y2EiRjnkE4Ud?;J0`h7oLn}F;3!cd=K~GMf?k|;Pk2Z zx_}$-FdoN~IJYJ`n{g3t$G30~p2Oep5>B0tuUcG(2k;9#hO=s;vk@2IW_$y8;c5IC jFW}^v_^QM;xDP+YLwE(ROGh&YH{d4Rh9~g{Jd6JWkiFTx diff --git a/fixture/20/0/1.1.0 b/fixture/20/0/1.1.0 deleted file mode 100644 index 3dc5f4893401aa5560a903a9e3a6424292584e18..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8e|*RV9>?+TIp?&Tb6RpvYsoP)BW6ZoW=S(YPMU>7EIBkzj)g1D!l79>G)oSR zlVjneSvWLH4vmvz;iOqOGz*93o_{?5|2!VAKi;3{=0VV!HpJ19P9)HsL>97$#pLlW zE68Up>$t&9ZV^NT!H=jx2=`NmK@4UHX}riN(izVLcCw4zl<*k`DCH=}Xwo7G9-}Ey zv?7{VIuOS!W;2Hz-ew`WEGLhPT;eiSe9sN4xknHg1bygBKT>&~L5yG|X>4W-TPflb zc2dlKN@&nB2p*y#;XFzcB56q!Q<=tevUrnOWV3)APIHE{RPZeqspL9UBt&UPBFXfo z4=D^Jm3-E+jsiYpGllG+h>%v=QHM|-qyb?(LOAJ+X95|#%2YC$NfxCXI`&K0h5jV|rABc7f-LlXUYmI16{HEYA4%}LJjHRoyDK|5mT%#(DX2T#+JB`jqb zEBOnnSkDJ+;5K)-OU<|-_zAVB%g?FDP=+y_(fp1vOym_Nv4_3v;~@X!5XbqF6Ey3n z9TBwQaoW;}C+N&v<}she{Fx=J;5}Ayg{xfS7XRZmH97^skEuz2o@D?-_$@;j#Y>E4 z8y~Tq-TZ?+9N=>f(&!28XiQUnO*5itO&eb0b!IS!KQWhuEMhU|_?q)v=D%Fw1~<7y zkIvfBlYab${tRL;Ls-uTY+x&YXB#`&#ct|8sU7ub$S-L`6CR@}6M2P6Oy`fh#w=zt zhvR(73C{8#&T)}TT&7bO?dVJ*Ptk)u^raswc#oB=$}yTGXh&0`Xhk%!bRdpd z%w`Tbyv;&#Sxz1oxx{6v_?{b7bC00AcJ!qmsXWghMlg~zHnWAT6!8f=DP}(~@dcgSTGt64)O*Qw$zKTu7rBpXU?QW!`o!+C)bjA1O} zC}amk?Bi34ImBU(5Y|gO!ik^-k+h{9F=R56EavkT*(_lx%P8jp6)$xI=G8N5LzbD76{PI8Je N&hriBT;VF$_&>v$j`sin diff --git a/fixture/20/0/1.1.1 b/fixture/20/0/1.1.1 deleted file mode 100644 index b44378c87bc31ad69aa164b6ae4e5cd6340d4f61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8@qZ5l9>?*o+c_sK=bVZh zoE!@$&BCEsa;$N3tZ~vT9GZng&mW$D!29v~;q!@V6$o^oBe8U&JMr`(fyFFgDfzs| zDhgQ725xYZTLe882>g_~1alt^No6R*NatlnlfeWgvYS2ZrIgP(L^;PePRrJTz@xMx zlJ-OqLsw#%!(8T(%R4M4kCo(ekxN{rmLIr5oi>5MPY9wv0~kmOFOo_cqey2f+t^Mq zpR$`04pK_f@Ic@}ni0k$v?PLdL^6%(%pjY$m_rVW$fc4qoTZxYxJV7xsU^Oxb|jF* zbMz;fA*4{idNxqVM{K2tT@(`>p&bng;Q^Ww%EN?_!2~9f$?Hrbi`isT&M}Tt!Pit$ z#RaN~X{Q~r#L=605_y&+@>oee>-c~IHnW97>O^Ws5DmDOV4840A*3;ibjI^48BAd+ znUrvlQp)&}a!zrY3L@HTMpxB90suk;@9+C66_%WgRtKr-V-tU8GduX0oz(BF9SvyA zuV_MZenSh!GLG>~=8sHaCT}o{BOIlSll+HMoZ}nL(kx!B?E5b&Pg| z(~-yNM0cK`2lH9LLYDFuma&TWS!8VLN1dJN)u1~ZJ`F`Uu7!Wee& zF+17IKiS72zThy;W3{6Nt$2*qMA3nc%;XJbF^@kppT#U;Dd+fx^IYbCT;T>cxkcY@ z+R=}J{FXtaGL&I#wQoa*4}ye?mKYkib*)r9T50$SU4vH5>RF8`;V>wo|u;kv}krX-sDZhxmfS9OvJhpprA3CF)7-=twNx=uSL+NMJEbSV}(cv5ErLvw<7j zeZmh7v@71~8BmUL=(?Mv=}|wy~XJK4mv09Hf+{y|tqm zVLU=hB4|e>)0oZ-vU!U+|;L%Xp*QMA++EjLTODK!kNNUGMUAjWHFxwEaViYso*@{ LQpFXna*ZDWjW>;t diff --git a/fixture/20/0/1.1.2 b/fixture/20/0/1.1.2 deleted file mode 100644 index 98cdd84f261aea29f399645bca8eec6ffe54597c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8;bRX30>|Osx!uk=X*uV#tr7j^nKtqNzf{|qKBIC$pGFj|pANwiibB<8KNlp>bDiCNzYoh5u z4Doa&fqBel0r|YmQVLj2Ay>J^b!z#6I|Q{31b#v=0~y3%(s-WXq%)QbwzGqsl=3Ni zDdRBZG<_@(c$j8{(~=0HXiqdVn8_@1d4qZ6v4niibAgLg^Bq^I;Wo7-wb71bQh0`e zq%w>&irB~|ius7`l(2_VLfUFaLqd6oriAe*;bbzIEV6l(8RRgRTq-!pDJuDz^Hgz# zYT_fcBY{MElSF?8kU{~gDP#j5P{dZYQA|)f?Fgm;_Y*=B9wd}>#*)D#UM7=iOedQ% z4pYtvzNCV)oTHMcDD8+QjxNO0lU^i}#}e{c#k&-+j`eJyhTGIqr+pysV}hv9eKa7I zVWcsJ7f5FU6PZK_dnn})pHap!j&p*rXzd6ml6FMViO$54!(4J%%v;}`5q0R#08$vrbEGng(Trg$+bCu?pHRX94swVlG1?JI3mzeiHnb&@X-p@Z zIlN8|3t7Zs&T@`QF7qu_+~6j+=-E*_66wcp>CX_JWhm=d&jz;eH@32ikJ(NASnX&) zV}3;wn)4f4FoB6oVk&=P8nbzgIUM6SCpg2uIm;!!;WC{b*N!-P@FYFy%hU8@1uI#_ zTK>X1HuE7{2z1hpI@IIm)Ta@@q%ory%^1e>2PQCuSD4BH4swX2{F7sx<}1$7rn7cL z63gRsqB~E}gM}<&G0S;}6|CWX)^dZJ+~OYpBM@hM_!;#W!m|uzB)?}A<9Lbj?BZi~ zv!8!(fFpdtQJQzrjuy1$G1?GAM`D@HYs_H*Z?cf3EMqyB_=d||=fB+G4tKdn-+1ll z$6$WP5QZ~?k!|iH#yJ<%~8uAMo(Uga2#yDPL zJX!paDa>FdvpB*R9OV@M;xy;Ez(r!ZYey^zbfY^-^dXt0EMqx^yvG`f*vKaCaF=@o zKjHsSmk=JHA;THLNHTblabz-?EcUXG{gm@LN2uT=r-0DG&7jVEOL2+dE~K#e9m)$i&XO+SE=DPwIub@j$~4JhJmCqj5Lba$R>*Ui0zcH zhf+ckwWA@SJVaB%c$9E5nM@Yhyvhu6m`g4doa7Xhe9d{PxI#7Yy|p8OM0%4%e+H03 z0jnuw10PVtR<==0P?B~8(}4R4p$QKXN;+f7U=lBr$uy>uO&NzN=LBC;!CB5xNmL*0 zh$fCM#M6^rB$CGx@>#{Z6tIrdrCXPiKHD-bfPnH_97V{Q)tY9UpsNxFM+~Rv`2=uj~)S*8ENMR_? zk;*7WGls2fqnO=%LJ0>r$RV0MtsSAX;1R-TLt7%5#&oio!|UX*kVP!!Ea#}?GT&0g J4Q_Ia{{wMHj3)p9 diff --git a/fixture/20/0/1.1.3 b/fixture/20/0/1.1.3 deleted file mode 100644 index 0ab8134cb97b020c7fb5fd5452ec5ec2f885d307..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmYk#QDhYW7>40xGMUUwlF1}FtyOEC)~dBmYt>q(NhXtIGMP-0$t0OfW+s!#WHOl~ zlSwj}Br}uAOfs1ylSyVWnM~H*{^z@TuHLUKk+Hf;oW_0lDIUTrc-{8ZWJVrt#Mf{u z9>-I71{W1ZzZ9o%H-3Z%@FM<)vvx$k9$&`Icm$8(NnB7A%|e{S?f5?K#dG*KUdHL2 z@pT?I;3515kK(*t(bxE0^UU3donzzaCFJHBdg9Uj0h@G#CQj!rJl$IbXAZo`xK z3!cTvJ@Hk6t8p)Wf(P+3URx5)9Nd7f;ubuLKjLYe*c<&4T#mc&L)?!S@L!ynjD8)y zgq!d%evc<`{=R5#!^OA_cj6vAi|6qYt}czQbNC`2#INx?oU=bVoA6fLg74rCJdMBO zU%0$1zE0q?xF0{quW;sp=&Z*ZaTC6QZ{rF48GpmY2jlA?uEIU|F@A=Z@R~!>%*Ge- z6?`4P!yoV`ytO>~yKouqzz=XA{)PYG6@2z^d|kke_!WMO$MMFB=xo77_%^>+v)E5|7~QRCG4r0(>3c!tMAI{)*>t+0po_#A)1zpW-3Bg4b1MtS%2X z;%m4SkK-vkgNu%><}*rh3U}j2cmOZre>kfu`t|rSZpI^c3{T>M^;(XkU zZ{jvQiND}koUDnj3S5nQ@e@3Vm+{)u(agaO_$qF}qxd79#))+FOK>^v!VhsjUci5G i=9%c%;Y+v)599ZE0_WF8a~m$kZMYNn;8{G6m+*h0C))=A diff --git a/fixture/20/0/1.2.0 b/fixture/20/0/1.2.0 deleted file mode 100644 index a601726f1206db0ab3b323b401d6498cfffe64cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8;bRX30>|Osx!sbJ){>Lfa!$;WW=3LWG&3^}&5}doZhoE!^x&x_|T_`LXjQ(6WBedtRX&ohX0hBJbVY+^H|e8LXO z*vmc|Mg#&6(TH#!qbU)zB9h5WVJbPi!7TEa$9zt5iqll{Ef=WaD%Xf_6$o^rJIVAW zg;WNT#wu2`hGITqBPDF7l)90Dz`fKXlm}@@7>^RpSjI7)EM8+W*~}z|!yMr#m3+-f zsyI(IF;4^n?dd=qU5O`&UL>=Gr7WY64_HMJ8z|;BceqP%>pJq{Ogp$s1Mv%eF zj3tvvWKqUm_EF9k9HxR3R1)!|c0>|QJ7S2X3vuKzkNM>DE=wq2C56;*m21@UBew}^ z6A1i_U{V=K8pC*zbVf6V3`*EeDSP;gG7fTxa>Aa{j&NGgk_e(`OElTcB!^tyB9BEZ zCZ8(KQ_U5=r-qx{qL!p6?MP+-&ymUyhBAyIHc-qqKBk0S>}C%kZM7qmCOkqI&3T*_ zWHN~?X7D=M%waCMRB(by&hZUZT;eiUh>g~cI1=ee68-4U018-1A?x{&BDS!VZ3MN` zj$j&aKOr>cVVW?SF=Q~2SIJ}=)0x3R4pGi=zM_INoaG!*G1?JLCpr^L0zF7%5sS%Z z1@BY9TGp|io7|$7I_(32pAtlU?xO)i7|Jk4@jFH{fmfKwE_Snr1N?)79OFxl)4YRr zw4e=75k*Iyp%ZhMOD+rf6N^~Rd#vCRm$||X{>x1QPX_`&p$`4%&j1GV8-_5Fml(ws zwz7?#{Ec1g=W`CwxTAJ7p&7rTIjwn;HcVqWGnmaEnZp9!VIgNY%Q-IcZ!U42AGkrn zGun|zUw%zL2Jr%eS<5=svzfoLg&lm#PU?5kjs`U17c{0RzoZ!xc!h~fYRPV+C$aDnf*NXO3F(TVOnO9CnMp)bpMj}@%pFRW!Fo7hYsRy*oYkDpVYhCDo~9%5bfY^9c!z~7}4Mf zyJ<%w!g-9QM9_*zCNqVp#!$(pbf6 z)=MQos$+uY$U!9DyR>Jq{Ogp$s1Mv%eFj3tvvWKqUm_EF9k z9HxR3R1%S>9g#%Sju>L;LL7O_V?O!3%MuD$Ng*{{y=iF||No&bTYdOcvl4eF?X2i_=I5bNRjgw>Hq**vLOAd{bW8tJ( zI5fhcS#oHc91ADS!XcI%8Yjon`;YhE&+B=8{&=2VO@g2|eMskd29m)rhO>#yY@vjY z*-0t;*iXHtLGUp3iR5t_5lwSqn8akJkjop)B#(K_=M<+oLnYsEi7KvhgQTb+=u8(< z=}9lr7(hC!Sj`$X@*$fjW(OtIY8C|dQJZicq8<@EMkHex%Q&)mjY;G%gItbqlw(xz z6{onsMJkDp4uUqcC6P`fkwOnrS;A75QONtOqKFM_|;M=e9jTdIY|Z4F-8$X9Ic5bfsQ1S$2{hf&pRxkfRz+d#dU5_%@5oq zWGMFI@WhB366yte?3G8MMdpXFzIK*+j-~>%t zYey6kkiW2q<-E%Zu5guW+~WV-rbc`a{FIvXr62tn#BUkQ2wq|& zTiM2TcJU8(bAZn{NP{-o(U8XcnkKa1Nm??MX-sDpe_}QZc$^jrbLf8P6+BUVL zSsdpJPH=|*aF$Da%VpZN(~kCZ;aR%Ui{A8MIq$N9HT;dWY+^H8sPT+;)TB1QpbqtT znEH(1B}OuiKQNw2OlArP_>6-bllD0feJCf*37Z&g~3t7frS$5L1np={3{TL4 zc-qjGnY_s?=JRJ3u!N;7;}YL;nH&6%o7^SnXb*bPn?9uTJOjyK7{l4bX0}km$Lyq( zee9=RqIT3LlE-O8G|h=&5|f!iE^jcCJmxWcN+ zXYFW9BArMgg&w4`grzK_koQ?d5gXXZU4ky!5z4*PB8&$KCxc-OCzF>MLlzUsrj&i` zr;N`zLOCa?Ao^MDh#`*F#FIcr63Js8^U3EOmQcV-3aR2cH>lCbbdF_<9?rHBn|WIG>G%x?Cum$2^I5l%xMC4we2C5kL2lFf8pCx_Y0 zVJ_vIq=NH&%>}M-m1`uVXh$N+bSH(r^rJrotfY|jd_WOf*~WH4dT2)|b$Ng=8t@1W z8O3NanZT=LF_mdd=MaY};{;z)&RNcJp4e3Fh@(9nNT4g-NM;d>$!7)cQNUW(v7X!9 Qp_-aK{Xc|Ihx@6^kLb^gasU7T diff --git a/fixture/20/0/1.2.2 b/fixture/20/0/1.2.2 deleted file mode 100644 index 7b13bbe17b0070f30a44c25fdbf6b0db3be7e2e3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWO8;bX`J0>|OsIp?&NoV1plw3fuol4eF?X2h^?XqFrrC&$7`vv6pZ92zIb!b!7m zXci95l0)O)dpex--;d%Oy z${^BM!&=r+#K&x=n4OeRr&$o(OI^ZwkOoBXD3Od~JQK*`4W^LIEOI!?F^*HgH=L%D zi&PQcJP6v+omcl>Ho_j4wG#Ij5)~x`lSc5Jy|$NuV={CJ9?ACKwcn~p$ua0gT9ehGDd)Ui9!k*NQa2oRn5j3MYQDici zEN1d1+011g^C{;P6rZas@LPs5idPuTHny{a z-TZ?+9OMfQ(I`$k8q<^~XhtiZqBYZ*!A$1xC+4z%XDa`9UbY$b9AQ_EaF`jvx2{}k~OSl9XI)r zTh#0j1izpbb-0(h4CW<$e&rnGM2M~OMK5|Zty>DQlp~{r6zsoOFz$k(V;rY~Z#YdQ7pWpXQ9Ig` zNEedmMQ>7A#&T9rz=y1%kWCa(ql|rna2~=2Olo zD!9P6RC0x@TqB`}b|jKaPkPax0SqLc)fBLik0@jt+u1=#vUY@0pZf`;5f9UtF^naH zNxV)b)0x3c4s(PuPVzP7oZ~zfi0!Ezade~;33R6i$t+$OYHgcOg1ifqz McM(E8?xQ|G0S^$03jhEB diff --git a/fixture/20/0/1.2.3 b/fixture/20/0/1.2.3 deleted file mode 100644 index abdd000dd5d4aa68895d7a9b1fa44133a5955233..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmYk#QDhYW7>41^Op?iDGMP+fty=3;t4?dxTBo(vn#p7`NhXtIGD#+r$t0OflF1~Q zOeQmvnVHGVB$-T-$z(D~CX>|N{^z@TuHLVqFl}`ga0?#66L<#aZC%Zz72*W$!1r(u zUcf)_3a&1Sud}!b58z=uj^KZTL3s!gKg5Uc%+u%@I^d`-{47{b1*uaaS^_W z@8J9RGyaBu;)=5Ps=*ETIevvl@Y+Ms*?{x#ReS??;P?0wUcja0@pS}O9O` zn#0k|#uxDw+=eIdJ3NPrjzoVCF30!rBix67;y*a^X!INK1>Ay1@C2U0c@=4^E5r%h zf$!lSynuh;6#oTW~RM$9Hizp2y$uGEUXRR~>G|{rEK=#aXAL lvk~XxR(uP0;#vF!FXCivd{yFF+>2k}L7aXjI_q!_{tv6(*i`@k diff --git a/fixture/20/0/1.3.0 b/fixture/20/0/1.3.0 deleted file mode 100644 index 3a5de5d37018aba5a6ce6a8501339491d7e0aa13..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmaLZVNVEP6o%n58ivKNGz^PXDwT?;kV>VJmWHLpuxe@;hGEsPYHAp*8deR%s$pr> zuxiz?vTB$N!_+WZ46BCW_5;p2+x6*w_I`eMx`|{W3)*$jy3u27{M<$S5diH3wmg0IFV(NknSlAv8ZtpSb3 zW`>u@Ow59I*|Z$w8k={tKJ*)#$b}+Nh=z7mv})8C8%J-ENtg`n_R{uYzp;5n>&0_p z6XYWjj1Xv7POCtru^HtnG8zuhE{&Fs3}e$l>%=2tHP`9_B;4JX$^qjLj$7XABsd_yCc0NPu>=wA-jNHcpF0ro$Q9 z9i|<@QDgI(_6Bc_P1q8Va6~}6tF&vlZfwQ}icG*nXt#^D8+(k+W7-ouH8x9@iUeXQ zv^!6`fQ!cF5AE;&zD#>XWE*Web{L!cv<#4BU7Vx`DRghIPZw9B|+Y{rC&jKw%;x09BMENIt7>qZZ>^IOIL2LaHo zgjR|&WAl^t3%`v`Y8d|?Y=w5Mv^KOG8`p6DKX8Y3CuxN!GB$&>FZgO~5+nHkAPL&l z(;CodY-X(H|ASf3E}NEvTx0W&)`xy$6B)_>2hq^3idKypW8)ab{|A$y-Co*0>^C;g zXuWuDY=WZs{~!d~mD4IvX>3NV;r{~%XqQGyM~1QKpmpMrvGI=K{{vrWcb0Yz#l~ic zHjEKtlN`(c2Px35iPnr3W8)IX{|ED-T^=nT1;*wR?K1|9P5fH^KS+RfwY1x)Gd51~ K{D0sK?fwCMJlb&p diff --git a/fixture/20/0/1.3.1 b/fixture/20/0/1.3.1 deleted file mode 100644 index d5f29c88ff6056a4b43c49da1a59c20db82ec9b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmaLZVNVDE5Qp)@(lCsMVKFSJR4SDsQ7V1>#VYD=?8iti&YSpk<8J31&)vz=g zrdAD8tAuxePfY8Z_?U*PVx->c{U0WQr|WIxiOT??%hZN|obkw^dnpcM*4ZK)ZZe0Sb-H0BsOM#%7a;$YyMTc8_R}QDbbJ z7mLinTxgd~%fU%w(?#n>kFkmN6p2ABv@54opwie(@e-K|Cuo;J%S4v3X{U9d)7XS~ zi>yQ_v@4}uN13s4SRyhOj?iu|Z68vN&1+f{nvIQ*kBBe)pj{E|A}$%5-?Tp%Ha3aA zB0G=-?Vi(WQDvk?KI99o3FHQ_-<_CLPX-R9@^cb-NyrCGi{~Fbj*NuhiOM} z)Y!bIeZWU!vpQ5H4B^o32JI$p8JlsdM8;zRv`eGyM>@1?p|zq7+WD{M|3M(ME2fp; zva$I~`-c%@lN`qXgA{02PisJP2<{|9rST{bNTCyh-Pts6bYCOVS;2eHtuoK}HKV>2a+{|8RcE`yedEMwD7>p-Wm z30cekgHUK!O1q9SW8)Cb{{u&8x0kjLsmA6ttqIM>#wUjV2Y%45h;|W|jLmP_9}F9t z#900xBtg69v|7{|8<%zbKX8S1XKA^}Gd8`nKJ*)#ggE{mY=m}Iw1=oRHZ$Y-e=r-` F{RaY^+xGwf diff --git a/fixture/20/0/1.3.2 b/fixture/20/0/1.3.2 deleted file mode 100644 index 5e0ca35fe53f2a15be4a184ad8770e2717f5fdb2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmaLZe@_Si5Qp(&7!AWPjF#p{rBbO>ib|zY4U45=v}#xx4O7FgYM7cDmR1d`hGEsP zvKUsa8dj|uM#Hc)EG-SAdEUU?ZJ)oM?+dv7OGOSK3)*$ldeCcZ0$fA_5d`hZXyvFd zHsUHW24kUJDlHA^#-^3lhIV7)?k3^^PiS|ZR)k_>Gfex55o43IOk^vPpcd@50Q8zK)V`RE$WQTOiz(nm<{cY(6W(ZY~Ik`qR-fb zE*A+yIJB#zRiWD0O!N|&1P5rBLCeHGW79$FM3=Gg^%n7iKeQ{MT}G*~`AhqUQDd{y zM`RaLpj{KK87;=fWrc_<+@Re#S^)};%^>XuhKx<3ugGR>fp+z@20Sn}^Zi5?z!BP= zpq<1iWAl;r37?Hkw7*CUVxiqF+HKr1Hq%y$OvempcaU}nhmFlk+AF*^Ho*ZRAy^IV zuF$UHnz0!dC^8-spxtiT9_%$X&uGu_!q|8PiFm^Y+FhVs#3f_%i}o9TjLo)HBHOV8 z+C8Q{!Bbo1ooY+CAJiHgi^s%*8xtca(Mv z$BoT9+IxI3HW6z?A`u1cuG4Pdrm>kEDl!FAq1}Gk0c1hDZdwm|pRX>DjXHtuWrf8Yu2&eMueY;1;UKQUr#k|OwjkPPh_X%F$p z*f>S<|G*jA<7;-dI}kO1v!Xtk&_HZ#}p|6n$>J3`AwjqM8a@r~vGfj_h>pq0 E|2QMrfdBvi diff --git a/fixture/20/0/1.3.3 b/fixture/20/0/1.3.3 deleted file mode 100644 index 9ed32dc994eec16bff9d1f0d120a7955930faa64..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcma*gAyWbY07PMC2nK?|Kp+qZsHYwRa^k6|CnBN}2n2$GKp+qZ27p2>7N=fQ(zoZx4OW1Li(oy%orGJU$3mSqq@}x#($uHo`IPGlZolpJ1`rT{&@uE8<@nQZgqfJi|d~G+AHeud_0JP9Kft6Cy7dOk`kDTz0y6=|KG&@_FzyTe zQvzlLjOplB6PP71g-hKU024^+pE@veV6s=b)dR+x(myp|rocGYy43~7lh!{KV8*~$ hH@ejVW(7M9{!X|0!1(X=&p*q{Nl5?z diff --git a/fixture/20/1/.zarray b/fixture/20/1/.zarray deleted file mode 100644 index ca7dbfe466..0000000000 --- a/fixture/20/1/.zarray +++ /dev/null @@ -1,21 +0,0 @@ -{ - "chunks": [ - 100, - 3, - 3 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "1lMv=nghL6PI(q*kaZndKR}tYPT!D>?9ymfg8^uuLc$z^OrW9{lc-p%)ttjW zK>1_l?YGw19fBh(+Kg5u z7P-Ui^WdvtLGJ1~lAXKd{@~90iFl0$41$vq0roPa5*U!16`iN(MVwF)f^dyja}^2=>d<7!OOQ~;k}>dMcntgq=BGU8VMkc>0sOQOm8bGz>;m( z9%!gjErFkt<1~xcg=B83kkv>T2KtP#@g)t|vav$6vSJAG3|6dnou>B}rLo9p!_(&d z_Zw3h<`rsiJB~hquG?gQgGHiZo$AShYCu*=${xbE=J6xjeMW1F7rVA_Qpz*)QeQM( zv&99cr~i?ga3l4iQO``PiRQbPSrCIRQjtn`i(Q7ISw5HuQus}R{R7@0gc;MsY@}@k zAy7dKWB^avS5b{ygrOT%W0cv*{jx9nmlwQF=HlGGWO6sJ#~`kS4a|i1(cNRU-Ww(2 zLFd~_#h4a#8-w0>vF*cSCJ`LS)%DbkdHia?f^fWw_2`E zz-POzXt|CFqGjh@+NBiesyf$FaelDb1)UL zO*Z%!m*Px6Gm~>7|K#19#$#PqK^Lq0Uk6eh&(eWw;T_(=s|Q=0N4~ao#N9{{MEPx^ zPLEa1$jb5ov9^ipd3TeZn{@2Lv6)MA6d~oj^e1;f$d>0s2PlP#IN(3 zj;xvnvo53wTN2Zj7yO91K?gocENLEcRNy>oZW3ov5dFg(hlaBnixkQI_GKWdNfkw+E zS9TFWcsC;7bITQ{_cy=Kbv)G5#;Mif>aSl-3z&gA-UPZciZd4{tM2eYh79iWckX#v z$>Jy~b<*oI{^v7Oa#F!z+BO0};mcD+sgg|$wdkhh6&jmw}#q>gu=M4cJleFBi#q A=>Px# diff --git a/fixture/20/1/0.0.1 b/fixture/20/1/0.0.1 deleted file mode 100644 index 97b4f48e2b..0000000000 --- a/fixture/20/1/0.0.1 +++ /dev/null @@ -1,9 +0,0 @@ -xwB,ifj-ZrrpYh.%++Bh\4\4 Eq•f?fl`#DEsIgLu2L((eʩ<߭ tiI/eyCF -!!!4"TX^!Lb2죄#|I)'8)bhGxx:Ӆd3d*'j &d0^"ьa,l|N18jG4mxXLd%,Tr&MOՋ$ a(F -Ƨ=쥡lD8x(Z 0ILf:bsyy$82sΆ ]oI??+Yj76OFp}ҐO3F3LLdo0C'8˷}@'~Eg,=Xu|znrԾ7`Q a(x^c^Oq9Nцvt):1"e1\{pqF&y31d[mf?8HphF+5myvL 3?3,dgs\\׹Aу~ dYGg g+;.9ӀH)1y LLc18g7gE.qƯNoЗ| -Wֱl!ȁs' A"HF')Le?8H_qrNsiG;x$ XB,'5׹A.a dIeF82NvQ짔sv"GhKJx@;"10qd2d&f>9s3T*$FiG7x$3YZ -M"BkG4&(M:`2 fE)G)PN$>Z;$ғ^$2X͇䳁PCvPP҈pRNcyt&2%$ю8:8t ]f>9,=rY*VSUOJ aD*X -N{bq#DҎh1:0d1f1KXJ -?PMnqjGyd0al`#lS -3A \ No newline at end of file diff --git a/fixture/20/1/0.0.2 b/fixture/20/1/0.0.2 deleted file mode 100644 index 1b7df776b667990d6507b7f125291b968bc76fa3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1370 zcmV-g1*Q6U0TtHqe-HE?$I)+VcE!w+m?dduTvu!1qFJshE!TBfxY8_KGz%Bal8eTb z%fgjrjf-Zi`^^Yi$;-XVcN;78OYocn1+ zBoEP?v5aFp8NA9AGMULN4s(Q~RPZGysp2f>=x|RU(2-6g@D$xiG8wj+aEpa?S z2jb~Q0`pnGLUMSAT=G~&K9{+|RciTxn*;?10{0Tk00uIM6kcQ)sf;0wZER-;#eB*h zN;pI*k#)4AInlHxhFBgaj!b4Ui!9z`KG`fGhbqo;j%vQ;GBwMoUMG{e%p;3(PEbK5UsJ_JE>TUq z$KH(udhra2^rJsX=fXqd!Ru;RTWz$tXs%g{>5_i%%$GKLBLXzOm}`p4;Jwji&@5>Sk7wRXARf*j_cg!zuY0Du??jzgL$4IjNmtnWGv$t&rUvO z7yI}-`#H=Jj?&^m?Py6`eoQ+$(2-8e<_+eskUz4BT$Zwo^L)bvuJRwQag$rzrcV>? z=*u8}&0vNxoDpo|LpHO6zp;}&>}4N~nrcU5n)5I%h@lN_nZj#KWfs3@HuG7)LQe7( zr#Q#IIL~FSaFy;6+R=mF{DM9VU?787&HJojBY$BN+t|(yLYiquUBbDaMnv)u%^Ax$ z#*@LTOd*q*%;GRdI7$Uya*`^}a*hs;s*ZFbfv4zBBG1yBT$ZwoeBNU<1#Dm=H@U@a zf*omf2qBDygfoocj3AAd8B02o$zTtA*+(g#bC_~YP(h3XunldA;|V$tPd5^n&jJ>b z!#m`X$13u<%oVOu%MaWn=wTa5FasFKAX0ddVWcvKG`6vw9Tf8^dnn-$r9`&Sj^;$u zniyhvoH#O>$t<#Xllf$`gdD0k%Q>p~mdn&|gIW?F(T?6E@f-t4W+*8Xuz`&f@)6r8 zVmHNvwbYJqB4|b=QM4kObS9I*3|=Rbxy&Pra!yb|C0|p;MJ`cI{G-~DKrfylk$&_i zi9A-3&w4(ffGupLkf12-2qu&UgwceiM3BlD(wM|6q%)1_%%Frrlv2hQlyjOhR1(`t zJL2fdlf=`Lr|CsDOUPj*?~=z_*0G)%Zcs}g+WkWi^{7uM$qXfh(Y!<|6PU;(ir7st z2llzGMOZk$z(D~W~T1;d;V9?x%qt0x%!{1v^YD0X$80i-^3kw z8h^p_xI8@`PT^|YhX?TpUd5T~qM5{v_!@4*6Zj*Z#l;!XFT<6%2S36CcnPoI#QNyh z;>)-hkKuQC3Kt}zc@&r84&04<@jU*8mvQxm*t&=t@Cbf|$8j<f8)7dU-;bT;E$+=Q>= zTX+(G!e4O-7uyMZ8b82~@euxl|KY5i@hx@u3U0w~@OwOs3v;7+4430hd>{AW1^gSY z;`6&=>k@9nFYs$Tfpd39=KwCoxA0xugTLY*cnP206ImqKzBlr~_$H{}y$;T<&hVS4m zJd3~MMO;~!wsy|q8a#lX;ZdA8w3bQB!FjkD-@xs73V+6PxU?v?PU0%ui=W_Oyo@sr yM>88Y;H$V5kK+$`2B%WdKaMMK7k-HQ@gn|<(~m^I2G`>zJc{4qN&Fvf@T)h@1z8;c diff --git a/fixture/20/1/0.1.0 b/fixture/20/1/0.1.0 deleted file mode 100644 index bc9a97df71..0000000000 --- a/fixture/20/1/0.1.0 +++ /dev/null @@ -1,9 +0,0 @@ -x c_k̈YY9i9֒C89NCph'Iph'IơY8,94Nxϋ7:<@y$,f[, ,<H-q:nqw҇Tg i c8f}MiF&gB6o0K'9EpPK'A2ЛjְB>d#El@F‰Qa,5& 9RWTP9HI $R{ R-n#PFN:/3 &I {G)s2NPN=O'%n5\@A &2IcIO) (%^"vёGD3fo"JQE5RunpڑL# a(lm|DBӎ0"hI$1% &t^'9ee|M%L5jG"x$ҏd -VH;iH#BiN "hҙd0iL (8_RipщnWbY -J> nhE fCx^aOr/8NLvҁґ.a.XȟXreL5/׹ÏhG?30Hmb?qA$Leәf!9Y..qFϦы> Yfv ;iL;ҌiN4І2Y_pSӜrvt'y>,]Vlb3w@sý4 4c/1qLb2Sg([hG:3Н'f!3YrmB"l( b8ec8v ;?apцx= f%e᭴Q&8"ILf -ټLrO.U|yjB-ݣ,X* -zІL -He(/(FN)fRAQJdvDK;#t"d37aYB%稢o:7IR[H?ϑ`06P6>]fF-$ĒD2d1̣s[;HOD_џ"|@[6#4" Q& JStrR%e g? f \ No newline at end of file diff --git a/fixture/20/1/0.1.1 b/fixture/20/1/0.1.1 deleted file mode 100644 index dffa896ae262c01b47e8bd0228db60588ac2b304..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1367 zcmV-d1*rOX0S(sie-8v6$MLVU)=12Zn3)k;%Q$ zS#oHc91ADS!l7AmtZ{NIoHT13nuSBNL^ zGHY1NItuuRO%$?&B0}yC1nNI9OIclIGhI}?sKrOeqL-0L;z`cYJN@K!EVFV*d1Y;OWDwBAPG^R6y3=VRLQjYT#Wt`>=XX$*uc66aTzoG}d`3-$o%o3KelE1Nv z^?bkvZg7)Z)NL3D+)WS-xSxg$Wf;R5%^w)UL|$bQyV*lA2lzJ!ImVY9r(GlM=s+ia zPG`FDOS&_cdCX@Ke_=5zc#oA_;xbpb&i}bVozOtwF6z>s0Ssgazhfw)c$v{`WgFYs z#Xs52em>^_%^Pb+1Z{bcc68)tbYeO)$Y2hCW-bePhee#`3}?B>|G308e&9O2!?dFh z{rD~Y8O)0eVLczPfi3)lt?cAecF~}Tb~L0J573-w+R&DXyvih|@h7G;i`mTK7+-Rn zQ~a0HoaX`;=@zaX-RZ@z=}ljrqaQ1HkCm+B@2qDNo7qC0rrJ@L`rJnY!f8q~M)5ME znZO^J$P}hBjs1Mi0gmz?j#0r$PSLTMc66dEzn~jEd5T^vVKInMGTA=tnXyFqjlZFp^DdW(!4p!cI!q%Ra&*w4)hO zv?iJu9wwG4Ol2BbyvZ!GSwIdIoa7W$e9L*Nxk?Q^TWUuy5_y)sBr%9&*07d!6z~z7 zC}amkghXmbeZpu$IFYm>igAo*0_nWY6f&7f7Kb^)QOfz63Mx5A6>+V!qbu<|Nly}Z zhD35%#&Ys_pEcyOkpgPD%^iZH{6B;cN@K!EVFV*dHtw|I9)-o2VXI~H-c;w| zt}u6WQu8unBX1{4Jt~Yk!pfyL$Ctj{bKm|S{vUoHem}-`oxh?pWE$|^l?@vfgeB0$ zU`XL|Tmnuho~do2#q(iMtKLpxGc8^ZIKUlp#OaZQOpoT7Z)@xF`pd;H&jT_8T=#S| zy6qL;(phowO9wI7gNhhzs%+LqkK1d{Pc89{xFvks=C+I>Xy0G(hzh<@MYFuh_ql9YPIr(@Ric*Jet(s>OD z@13#W?1KL;HR2f^Y8b)9sF7uw7U)MVH2%dGZyN7sJ}frB(#Vc=5wuGwO{GLE!Lc9R zzb*_3rCs&`Esb*q%~#c*5nn>MeX$UEpYrGNz=vMDb|xkgv}?vp*fglF8?+1J=5!Fl z2UwO6cYc}(Kt|DW>`B3-Df|=N04un~<#R41Q``aL>o?nd;q_#H*DW^Z&)$LZpt8VV zeQ#73qvVhm=E{%x6K?4Abv6h{FAcsyr4j&(jABQpRqYB{jOC<4>7PPYfcU`@4OE-- zd?>x(>68gF)R_x;m|AO>Qv(oPD%{spQG|?fNT2X!7%t`+=j(Zn^0nd#nmidr^A4{k zVh&xk2{w44qbAEbVix+WQ*2fe>EaLiGXH4XiAG@tcG*ND>5ReCy5lFE-4l*_bS8ME z-q0V=9}&pvDRzYmis`+%$XWHMAZgejy^`}@ECPSOuJdfZFXUYe+Lzq#^Hqu(S8X?F z?MYq=R6RFt%?%$@x&NF_KbyBwF4q6|| zvky_L4THoll;WHC2i0n$rGtRE<^6L_#eal#Z$ZjG7gZuu1R}$bs4T1;yE1X|W43gG zZJ}6XjsIYSU+_2j*p&4TxzFkA;6-esz-BCOh$9?(75N%W1PkrC*Y$fCU%S;UG0ZyH zbSf}~y+JDvDW3x@8c+#ZPAcnrLlssF68GnEy7F~vYjRikI6d2_mFP{`J9SK_b5l#r zl^xj-iIILES&FXQB3x4)1k!RuFtguTX=0Pg+=PG~MZP zcn59alR)?RO+aqH3Se6Jf_)lXHtCM8y2}HtHCD+!wTO^tu_@mXZd%#r7mb>1mmEkT zAGN*hH~>Md^9w@h6^Zv8MmcbE)q(rOXydwuXqz{P88d89H!y!E^*6)!dDZ9=&rz!k#AgnVO)q2$8@dpm-9KFohyk&$#(OiZh0nvyq#A!2Tk}@`bVgSAdg4Y}B zdJbpBKUv+jC_4=jD5igEDwVs;g-D70xJM45v`r;Nr>KkGwfh5jo?Aj5Z?LS=$?)EO wYEqh(*fPusgbo$cgu~^Ct(q-YQ4qgaCyZlvv=2^l@+zI5TCTJzf4PJAFCI&EV*mgE diff --git a/fixture/20/1/0.1.3 b/fixture/20/1/0.1.3 deleted file mode 100644 index 29c51b7bdf1e2e85a660c3de1e8817bda061067a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 659 zcmV;E0&M+w0Zq+OWEB7u$KkD8t-7sMYu(nWwQg%NGf5_sWHOmdCNq;{GD&78lgVT< zGnq`1$t0OflF4K;lT0SbWRjUoCX>lzlB~DRcb>lU^81%$#2pD{RN`KI4-euc{2M1S zO^dv!;N?tzr^D>XKi%yaT>Sbo46a# z;IDWQSFVe#)3_E7;-`27C)P(N8>etHZp9sV3V*_LxI8Pij^irak00V8yn-_~L^FvS z@nzhC$MHKnjnf;WUxq7iH@=Gp@FM<&S8?s8*lNH{cm%)3lQ@-(P7yA}9r!lx!*lpM zUdC0MW9uxg$3yr99>dA(=;Yx-+=6f5Ec5?iNm4IaQx@N>M1H|9h$7dPQ+ zxE)X8kN68N-5UL4_$2Pb5Ab8WjQ`@SZPBmCm+)0QhTq~3xNv(k596b_3*W)_@dEyd z|KOV3*gB6d;^+7keuHy&L}xE9!R`1K?!{m5H@t*T=Ec?-T!$ayXLuB6?TpSgT!63Q z>$nqtz@PCvKAMWH6Sx}R$B*zZ{)5-;ie?VJh_B!_{06_rGq@x_`bTgj?#1`;AYQ`1 zabkD$>+l8Kj7RYVp27tM(JaR0xD)r_emswV;1yiGC$`SvMm&sP;&Gg_H#+$^joa`| z+>K}OSG@Lzwj!qO~+OPZo(t@HJ-$&gV8C%rMLs% t#(j7Wf5*$XsyMdJ;(9!UU*IvEJQSTgT!>rn4cvvN@ho1z{{W6?tbzPHS1kYl diff --git a/fixture/20/1/0.2.0 b/fixture/20/1/0.2.0 deleted file mode 100644 index fc0b00370cfd5ffbd114d537091a3af47787b8f5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1363 zcmaLWX;=~l9LI6(Fe^OpASpxz1vRfcQbJ9!JTZq-ldZhMgU2qBk+rfk=ZOd60f?rO z(y2*SHqB`vLgEEJKFZwk!lE)IT8qtmCVQSe&)#g$^MCPw`FZjEW$7ABs;-E(LI+i6 z(~DSiCP69@k(^O~ACkm@>_iSfYa)=-{`q`7BMeI!C+BVu{`F0$3tq*qUi9@}iQ-x7 zH#B-U_y&1LD-WNo;nNgmDw}ls6nLHfQ)m76jUeCzPZ#HD$4&&KZXxstWBDur@e{xN z3x3r+}_Kn=3W{z&!k#)VrjL(L(kxSp^%|N$2HVyartuR4n zGx(5RyBiHFtQeqD?=#$c6I>OL*4(Z!>$Jn^CRv44+Yc6Dk_5y9(ZY!(`l!hcpV6%@ z(a+0|+jYV9&%pEW7su;7-wV z5HM(Gl9&h}U>6PeEFpz#|H{prGrPwTHH5kydY5Y%Mo~CaD*H1(+FrLayI*W%PqchH z9bnvQX_JSmJ-2(D87c*b>v1nsV}%|$MOFL)cSDt(0~H!EbxGPsdCY+;Jhf=rX*HE7{LuBXE(C<%q)WZV4Ym#WsUO}3rPeZal_UJTJJwb zRe`%n_;@soG;&!mmLES>qrxV@Vl<{Q=J3ywIg3G5KX`qJ;G*)YRZ0WUKT7LUm4Q}q zjjnrR#g3C7yeB(YQB#=Z?oK!@eMLiA*^Zx!;dpXoOlNlpTFwwGP+%-xSmK5*KzHRq73Nrs z;AmS$eMQQ;6=pjR3IrrrUlYxm=hMX&{HYN`YP4|IyB5xKyz%*VY9}J6AT0z|P8R@D z+)I3aO@?Zmny#vk&%(UcL`QR%_E#BeM=i~w>)!CF0u)b+5aFMzmbGa%zn7JzC6U4) zD&NpeS{kZn06=C%K&v^6W=Ag#Uby`PV<6rqr(>cTkk#2Ak)>Lho_{Oz-YIV0Do73t zw2dr;<%0y38!)D557A?z%dbWf9ew(}@#LCJJl*2Xe@td98jaECy3Bbt@1tv7!D>eq zEp?NrL$XmaWIo~rN|s^`B3I7W{7lGXX diff --git a/fixture/20/1/0.2.1 b/fixture/20/1/0.2.1 deleted file mode 100644 index 94b8d35554aa71181ce579085540c8ff3f23cbbd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1365 zcmajeZ9EeQ0LSt2l;?+;=Xshjw8X5kNM(k}B-F>#CrnM6b8O}Gx#C7_u3@u{O%pSZ zcg{mo7r7E^vsqbFqK8WeyV|FQyQr=^dv!1G-T&qH#s62d-E3M@Vxi|E(ezHI;P4Q9 z9YV;PN+Bv!h$#dqzkmqiAqb!bPl{ODpRa^n##f<)i;5tmZEQpNe`{)uvh4cd!f z$&kJd8LEmko>VduKRs#!pXjRvgyNjra`7haf9jBfyN(87&qPChp@NymXkJ&Jat@g% zaK**u`E=hqOZHEAOQwx^CVDRXz{-%r`)V�Bx^R?*mu2qk+OYKo65bpznLBSCJ^RpHW_UXx zc(k(FlMy~uXMf%P=-z~)G7uhqCCwKZZPT$)clIHP>1Q0c}=vF?No zI&NFfH7DoV#B-wxIQdljdAq{8#Zqa(m^FOJ8RCp2>`6z zOMoyWI;+Xz&%nMPpLFxLV8?nh-SV9dBbwd$ra1o`+T3ZS!&s8IztNf1U%%UX*^+_` zJJ(q#QZRN@3QTI0Y_1$!zd4L@Ig%uUYi&KN9yeZq730q9?G~3D#^2kE@=xOQoMy(o zZfY!U3MrGbN7nq+)Mm4f*4Q1zm`JToJ~t^pt;3s)nVe<^>!N(DEzZ z9PCv=wI6msu=C~So&zgV(mKxQNU}KktIT&=6YzEr-P~39c-hZU!MdFz)&d2ax>=l& zw=t(InuPUbU#_4)3r^Kjuk+L^QQ5_4yCDtaC0FuXo*nSM|FZU%JHLz^&s#0Yvr@_{ z;+wV)j~uGdd|$zodPfM^M`y)(U!^X8k$xx}xDy%tOmQt;L5>Xx%Whxu3gk?rb0lH5 z{T$&%3DR%vU{8Ld(~H(@l6nPQQarJ(!k?Ady^u96EWaycH@?H|!=|2z3~aeoKP_6_ zrsisejwQJLLhe{vwZ=?>94iT$K>HPN^Q@jdY6M_|W{jVIXi^+IL!xWES@qRFB*V9gq%}{j|fA=I)=4QZ*2q5@mS~tbX3Jc>R zlAP3r(9*ha05%mjjtiNMd2L4INMXdTMepOh`P83~llo_)ZlwnUF!MRJDy-|L_z|LU z@)!juBv<8vy;FiqjoPd-5gqzgt%0?sVvTQd3=W>)o$YD^dcD&Zc@Gn7_4GvWyh@e@ zQIvH=fwxK)(mwiR*!-M~+JXsA(*B{iIV$LJ1_2yI#0R*FmI+&!*-Ck4!j46vj)B~V S8vnN!%eUsm25!f<&HW2;mWVt6 diff --git a/fixture/20/1/0.2.2 b/fixture/20/1/0.2.2 deleted file mode 100644 index bb22ea0ba8..0000000000 --- a/fixture/20/1/0.2.2 +++ /dev/null @@ -1,6 +0,0 @@ -x q|u_n133K%紖'Iph:4Nf,%ɱp,%ɱp,?v Iʳ" `%XM1ۂ Mf c5&4sC9Ag9G:AI 3,XI)&~a%HP1L^a4YL$a/|A*8N%Bړt"IfsG>ﱈVPepZ~&F΋ %QdRVQg즌s?H!amB,{.S jIC;R1!Sz']QDӒhK>g?jG4m8: :2)Le&by4g5~:="'5} b kYF&);hH;"h45҆qgo&3rS;.%R;ҝ_у^XMnܥћ eldg;^6͈Zӎh$^ Gf0ps\ -Wvvt'gEJVb>b%le͵>Q4)1"ьa,ټ$rtrT5',B;:t#gHeYB!"SMnDN$b(N&0,&nr v-=$X;{bz7^}y49M=Br};zW+szKGBZgglbOk6W|B-M z$;>2~OeT}bWG0zRlF1~QOeT|LW-^&flBu`Ocb>lU^844M#hneNHR2IGjwf+;dOUeJ zh1>CU+=FNF7rcZk*2PveuET@)2_D1g>!Xu}leigQ#a(zBf5h{+bVF>Nz*V>(Kg7d$ z75~MFjOaJw%eW0s;0OJA-sbB;Eb)&ug8~g3!cDl@OxaaEtL>y5@+v; zP99F-c6=T8;92|yFX4*4u~m)h@F0GI$8h?-=w#s}ZpK$}7oNr+@jNaqh^-U23isoO zco?tZzc{f!`i=N9Zo`xKEuO)t!e}1F<+um;;Q_paf8sSgdOVDu;R&2L6rEgLfZK2uhhys$uEhiR vF&@QhII}34Ik*Y8;to88Kj1lBawPg?xDxl`2Y3jt;6FH{IQstpIDxCpyDVfB diff --git a/fixture/20/1/0.3.0 b/fixture/20/1/0.3.0 deleted file mode 100644 index 960db9e7b661289950c61737a62ecc7b66fd026f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 744 zcmV(#L9F z5!pdYL8`H7p*=>cv59mRnTaT9S3oO7k+B(|4PwaHY#1l95u2c01FaEF#wN%`Bp4yk zE{m3p9Anc>`+^>06F**LH4>m*6|EXI#>UfC#0%cg?g%X%M~zK8?L9h-P3#1bC0GjW z%4pY6Zfu;~_zS`r+U=q3#Xe*6oc01Qjm=zlk$IR8?Jm$R;*zoXP5Xnt#%8OB$Tn<; zb`NL|@yOVOPZWv33}|)_Pxx$XR(Ol7#42cagLV_QjE&nQ5qEe%yMwetIBaa*(B9&mv03ONvIvWz-DTPp zTs1Zhz9Nno4efT)c44=%c|v=NXU1lhpU7;?fp%wU=WyQG{Gk2BFJqHDnfDL2K)ZXi zX52S6q5iyo5C-jXX?e&uHodey^c$PR0Ny`Hf_AmEI@B8*pFrL}@Pl?4v`ic`HXmr6 z=rT6Trttnj9JH&TRpPp_aS7u616OFbpO%IL#^x2R4X=$&bTIE9#6Y`ZS_w*x%|F`k zNDtAz!uyAof>dMELVJu>V-q=*_Yb0=T>-5SMaE`;Hi#i(vmuoC4>m!&23jMUj7`uq z-aiO|c3HG+Z?M:vh26EюㄱuجYXhWB+Eq whwv[36=E`!F/]Evc{F{fL̂vhq|}wc[d BGxbl}D"ڝic_I"kݥ}c+ hwvG{|46KDa9If3\^>76/i̙,2e\56?i$h@{FcQ zFd0@2!>U!ouxeN}46BBvsbSU9ur$p52hKU$_2s>vAMj>~$W~-RyCy<2TC@T0kq#oW z5C!duXvHWoHbb=U7&bO3jv^b73hnA?4QMnr0b@i05d`gWX(y0pY(CR^(PwNDoJ3Y5 z5!&6N-Nqeb>(gpCskM+CIXrxl>k z*!0r|FlcO&#*3^+GPJ9q)uPVW__>Qrg+H`AN;`(*#-^M02|dOpZi2`v#6!DES{15| zjk|})1b9HZy|jJUZ*1Ps-r}9HSv*l>36?^;OSH?lVr;}y!~u@bZX0bob{LyywC8wX zY-W3j%)wk}cZPNr=Zwt{+E4s4HXFS~HeoZgdq8`LN5&?2l1K!J5RfSi^k>;?Jxcrn~bUae~=07nrO{vF*cF@ z{C^My?TTo{C^0rewC@-;HYwBi{~#6G)zcc#Xlw!k`2QdX+U3$tAkWx*ruCxF*dzq< z|3Ml4q7L=j7`jR{y&I?c2{W?xMpmeg8Bcz1=?lN zvau7|wbI(q4(;ZJ@c%(Hv@4~Rq1@Pv(0*go*rbK>|3NymdrW(Rr^Y612LB&KK)ZZe z0Sb*xKWzYm#wIC@{|}O(T@9@kb;iaooc|B}q1{p1F&sBG-Ly~WF*b1#{C^M+?J8+i Ts5Un4Gx`6(1KRxuaeu1&W%X+6 diff --git a/fixture/20/1/0.3.3 b/fixture/20/1/0.3.3 deleted file mode 100644 index cc9d09c9cdd656a9b816aeec243c68ed560d6e22..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 344 zcmV-e0jK_W0gcU}TEYMTMPUSGD&W+qC_{&$E)WO=gMnZm5DWx@fj}@23MBa7zhLd!9XAw2m}KGAK{*x|L>lnM{lT!C$Xkc1DFLcd9QBufpL8L zrv}UnnC!Z4b%C)q^iLU>2{38DZnc3i4E<9AW&})XQ@5JHEP*L(>DB-kcU%9|ftdr7 z3+Pr47(1wcs=!Qv$%J&P1B|(&f1ZK)2Ikh(tv6tNyZYx5m@iErhm%7On^x{y4428i0PjaFe6}6=epGdW(iE; qLbnFMxN-ec2WAdT?ozjUz}Q#%rwYszn9Q|qb$~Hl{qqm@GC9Ygp_e-V diff --git a/fixture/20/1/1.0.0 b/fixture/20/1/1.0.0 deleted file mode 100644 index 18428582c7..0000000000 --- a/fixture/20/1/1.0.0 +++ /dev/null @@ -1,5 +0,0 @@ -x c휓֒rNZfFbfHh '¡Y84KcY84KcY,k'IxϋcAp!ɣ4c"L3l2$U|YeA':DzL?U@![Fǃg&{G8(F3 ^e"L'Rrr8AК6ёxNYRx䳞np[<?CC0xQ3 v9qr"Ѵ"X:G6sGo\VG5L seGRx!24 -6s£#좈!N)L iF&8F9|E%Eb[kG t#/ uO>6B]&OCRIyt0ise4D=Ѵ=t+ ,d9,]rYZqp A.L -#xT2dE39)hJ f{( hY;ӌhN i `&#Yro8enp[|Z;zџ_3! elolg7{K.Qa4i4% d0dC(<Hl/#n$қ>%䱎gBvP ӐTFx^!)Le|A8MQ#v'.t%,"K.kX:q#A]eIa/X1좈|F19Q - \ No newline at end of file diff --git a/fixture/20/1/1.0.1 b/fixture/20/1/1.0.1 deleted file mode 100644 index 20317ff6b5d057d34e8280b765d5d1dc73556dcd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1365 zcmV-b1*-aZ0S(sie-8v6$MJ8s+c_t#B`2-roHR?C8Ht%$%#0iwC&$7`vv6n@4$YE7 z@cuN42m~IaG2uK)Ga_h3 zBvY8mG_rYv+2pW*g;a5pQ`GPc=c(liSBY;G2y`QXWO~zw6b3SgHLPVFg?z|nirB$U z>O}?uKczmQJU}DDc!Y4qGLG?N@)}dfViwsP;xI?3mwC*mf)iA7 zmanPiBA2*KY_xX7kw{OH=tqABkjHBB*~kYJu$66WC#aov1k;fF2%!lN(UdgCkj^At zC4=eAU?yc8q@3e?Nd>1l!&#zYv?H2MbS9SW^dONXEG3s!yhk4E*}z6_aFaUjX&(su zh#(qpFAW*WFj5)K%cL=ZiA&>O*(2vQ=0Qj zTF{y&X~T48Fq1j_fw?T=Z5DHyGo0lD|K=js_@3)@e?~hJ>C3O^$6#Jy224{G28<;}?Bs9kqLjVtqfs~QXiPYd(u@dN5y=#$GL3BB zU^Y1{U?EkUtYsaAe8^^s*uhTfJ*yq{3FQGA z5ym5gGnR3TCzIEhLKd^g<`9QDLM2~OMK$NBA*Q=_bRdqd#FIoXl3B)bR*=v8tf7ES z6mpB(+#$G!|A%^na6h38X9Ocj=M~11!DKQiWiR_E=L-%|!3ipfNYsuA0d zBb=5zP6Sc3C7LW|kQNv}vqm~=oq>iLs+L6ovo+pK&3?r2SHc`lS zKBkCb_E17dvUY^hl!pnU1&`5^3?`GwOkO98xy)le6`Y`wvwTf87rDe`VtZ>x9EtQK ziGK8F0C}t?pN)J#0bAL|c7pn7M=%Y!j}V&h5KT#A4CzebRWg{)3}#ZsLCQJKmsD_? zGn^&rIqit16P<~rJ3UBb2}{Xk74MP9dN#0;8{DLhd;0o+2%-V^(vYDHBbCv-Od1oI X$Rvu{LkS1?oHCAbjN|+t6H|$?tt*cu diff --git a/fixture/20/1/1.0.2 b/fixture/20/1/1.0.2 deleted file mode 100644 index 5fc020f357dbf543dda311ce037a13f128b54c24..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1365 zcmaLWYg7_u7{KwILRhAl9t0;yqNocKB@G3&CMqbRf?000R0J=Tp_B_#EALZG1;e{2 zUZy82g_hZDnm52}+Rex<@3xj1+RfEYgT8O)ydU0k-si*bJ?Hgiiu-evwcVT7Us6{K8lPJ3o#rMcrPp-#>aiR;oTs#%4~1VgGf6<7n_M6 zPZX>bJY)2PgNYjK&EqE^$hkr5iRs zx6WE!9`FGi_+C}AKV~PT+14Jzf{obEdW10)zWKLCjPx8b>LBlftAY_oexyN+9;|y5 z>RdsBe4L)p@!zaQl_usVNhCG9Di5o`e#9CwvTPHBEb0vR_q<|Pr{UFAu5|?{77o1T z#+M9l0Sbn;8=z_?quXoXFB{8)rv3DD?QOc7l1X3+SCUun(xJsItI=H;wmvVEQ{sP3 z?Qn2r+3=yYS3TVR&59cx))zcw7me~cv4FZM%e`Z2qVgy7#SWmd?w7Jj0;RRI{O<8+ zTyEEB1D{dKy?DK4cH5yY-%mo#2X}gqn;rjo9;Bf~=&msokekxNV(4m%kH;(9(i|cc zH3bUH-ryB?qt*mYF=z6$3#g$V<2xZ`0 zLFvArGhIa~29b7g{CS%x*n}QhsFFyN@iHcYg5dY&AC1^J+q;{poiexIxIl3d~@GTHo)VgdP@)48zIxtI2yM`aR9| z4!n#oD$^ZTJ|-^j|Mrh4;}!e_V9&;}YaHR)EuYS0RSC&5-TN3&P0Z91+beox*+)}H z6JB^W!QaOAI>wQi={_byxQjngmx`!N$SG{qLiMb5%%;)Gt$}?gl)?n5tmM6R*NuTKgSa0@4r?9Tcx5guh_Kk8x&{4;7?iR50-y;fz4Zije^oA$U|6B z;>&qvGWWvy#A1}i60~sPmj?YwA}wG(Hh{oZ17+d8;&26B)KMLf+RsXO?Uj&JW(jhS zlgr*jB70cie&{&JRlTd~iPCF&(DNPl!r%I;$qOJvgHy;2WTlcC`HfrmI3}rL;wu_A zM;5y7Qr0r>zK{H}4StkTZ+3^m+M*jiX8wpiu=PX~fiO*3C9Sr2LyhQS4#FnGiO;eK z?;ehpR!|K6@WW2Sn}Yk=H0N)}`Vn?Mr&=lBSZ6n3m~Mb>dHl8~WaA$QW7Uo(NUK2vGOoK9t4;Iile@J1&8#$Snti_>_tPucYnu=K&LqGUr;J3L z5{yKNiv0j)t&8T*=z}L;1qUVxk{j=dSI8`oH>IJt(6pnlXnGv+j!um=S_L&27lCPv zN@I!^2E1M}X0hyCq=AhCev@Qqvs5epYiDXzAq6QjL3TTM<-U!$N(1!zwhC>Mc5~RK z>c#EDcsXwmSSb0QospG&&IP|nvV05GtTS{lpb=NAPmMM@XTJ;R diff --git a/fixture/20/1/1.0.3 b/fixture/20/1/1.0.3 deleted file mode 100644 index 22ca3074c368192dd220f16c80bc3d48fc439482..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 660 zcmV;F0&D$v0Zq+OWEB7yhT%<;$z(D!nM_h^oodx-t+m!^ty=3yGMOZkNoHm;nVC!` z$z+mDCdp(nnVHN?lF1~QOeT}bWRgrK$<*Ec=ev5Y-Yp;|cs0PvgQ}(JaPE+>P(yKD>y3;T4?T z9bXr410KfXcoOICiOyzRgxm2g+=J)vH@t*XCGk~@>+k@6fyZ#x-so(^1-KdCz+HG6 zf5rLQEWB3hzj|)npxeb@#E_@e1zzg^%{)1}{#@Bg#2@m0y_%+Tci_Rvz6}RD=_zs@M zU-1uIaVWk{;&b>3evU`++VbdZ!1?$ZzK%Qbd;AH{M1MCf!*}p~+>d|Y-#D`}`seUP+=xf_8~5UQ z{2edj>Qu(+&fC(84?o32cm=OZM>7XE;3nLLC-Db7i~j?Vz1h9OuvK6H diff --git a/fixture/20/1/1.1.0 b/fixture/20/1/1.1.0 deleted file mode 100644 index 676f4d60ceb55c3627160c606cbd99bc10644f20..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1366 zcmaizSy&PV0Dz~TPE0GvlN3b7EAilvL`jEugbLwN%VI4^5LUT`&hne-%n&sM1uHQ% z@3buQ+O)Zr7lHw%_F2+05kpJeGOO9A<@9k+d;1^%hyOokuzeycLv9QmTK*e1*HNBw zgiAT8K%$*l;S3fD9ZV_X5fL~HMac71^;IYMW1@BxN=|6&{zGPbePtGYV*?Lu37)z> zuO~bz!w!rVQ{ay(Lap2K24|POJ>^MW3theBWKgJ4uc(m#_$Q7}FlPZcJ#zOk@Q6`b z+ok6vZ@?z*FWraOV@?2Qtw&6UQ`zAF;1Gg^=G#xP9J>*2qD!6HY*aX9A`%j~Gfj73_kGBc9>2nB%FIBM~c7ET!&+ICKokl6mSJtZZ# zJ^qcCP19_^L3k)9!b$P2VW)>V{Bp)bo6s;Q5zSky$}2jgiQ!YBbp~{Ni2o}=k|DTf zI36V>S=ZaYv}I-ta)!IVGHxB!fFkhE_rvm!!6gs5Zx8pc&9)blEvc$~7bXbQYioe& zpq;>BK)6`uf6TQ)5$%}6_z>3zx?fcyDX$U8u^LdA`BHwp**E!a*NkWH3eL)zR<{y} zE>D_fyo0Ia+e10iHy*q4vg=UgHnzRCGL$eRjXDq%R9{N8*8}LMJDtk`M}VZG8W7PH ztzZOwU#!}7rA@%~ruO>XuN6Q0Q|nDy>Ts4C;3KvX!LlE)ZTW`3tRQ)dbf2|GhgwKA zMo;f!ho?A|DlJi^y>^MHtLe%)_zia*{smQSb%$H%z0~PkS&1j^`jmdfpqb%!^67&Y zPvzJkMqnqSvljb))%ZPUpoOj6awDy0D|%!L%4r}U*Jt`!MN%*O&2gR)0vYp4Nouvp zfpLiof(iEgXm;!Hh{>sG?Afj;RTpg4w6pQyNYfBC7g;{KE<{GpTDi9Je_zzWHfDiC zAH^1!g+>UdLwiOumhU#Nr0J2@2iK)8&C~l1ae23BI_nJ`)-S-gSJXtxs$_maT=gP? z2crZmJBQYa=j%E9?59(9jLn-av7_TM$>^4JMKmm zT(wmhWsZb4pbB4Y8FB?#XUfORwTRJW&1kNFz*xS~J&8QTy0sKP#Weod%!KUiQg~fR z{_OA_u0?v8ab0r*!xJ-i48>rR*PJM5dg_4C~dkw@arpC%(dO=&=pC!nqZ7RK$u*3RG_f@_L&D1*W+ UK&e-@zocpRJXsCymkR&+7aBl(0{{R3 diff --git a/fixture/20/1/1.1.1 b/fixture/20/1/1.1.1 deleted file mode 100644 index c06c4c4413f47115070eb1d33323801714fa6714..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1364 zcmV-a1*`ga0S(sie-8v6$MLV*IVUaWoR*x^S`sreVrC>}mb4a5nl%p1l0)O$5@$?~q z#Vlbd`Mk#}3RuquZg7)Z1U(iA{FJ%`a~};!Whlc)=VeBd!2~9J1i!TmE?1gOI)UwAGkrCHi5uT2%r5ky*<@4B zF^*Hg*Hlu)1*(Z@rya4x(VKV@d6p#dSV=zX_<#a7vxP$HL~2J64Y-$Jns7fMq%n$g z#`7u}OkpaSlyHzz%J`CUPH~zFBHC+5B++yshMx2yjvN+|%L?8lk2S1i9W`91mb(O^ z?4UmP(12uykitk_B8{<(V?0IdqL>4GMhQnaN*SRYv?Gjg+7dx0IulJ6v&m*5ZO)izNd!U+~F>X9knBg!Ms2+!x_OyHnW97cJc{D>|;L%X!5vrgwTSA2&FY` z2xkgY$z&F9lEr)$u#i)nrh@Z)OBGkR$~Ag+(vCR#@eGL!;&}$MhPA9?6MttjJNTHL z)bFev4QR}-XhL&-Lkq?-j`2+9k4#}EZ!n7^9Hoqt{D)JV;~UP?DOx+C>A{oqq%Tj? zk7X=p1*`chYuLz#Y~nU|xJ$h*fxyqHPa}RwV}>(=k&NN@jAat9F`0er=KzQK7e_e3 zSDd7EjCO?6k;myocb=dJ^I5<`mhu;tv5NOu%@wY4ja&Sm+tlqE2>gtC4B~kPGmPIc zoYB0(7|rmBx@$*cn(=Fz(~?JN#Ux&1GBfxS zGnvC&=5d0rILTT5%Q-G`iOY0PhYBNG#pxPCR``U@=QrN|rmZe9j@tImU5X_R@}4MADup zV(3aNbC}CKa(Rcv*>)bay2s1xUg5=4InFpv~pB$YHqkXkPsfADWN<}7#U1pBAL9-G_sgYHsu`SI2C+NB~@IYnwSLb zh$W8R#FNOgB$3BT@>$0R6tI~s6jJ9Y?Fga)_YzDK?k9vaMv=~VUL}JmOeK>N4pK@P zUsBE~PE$cdU+st_nl8l9lU~G;!y^?8*O2<(>mD;HxJae% z{;+i%1)PsSSkq)dkt<;Fbp!=l+TQD0@#kaco17XZ01sFfa@`jkyf?JWznGV@v{U;y z1w~v;-Tn!#&*R3aB4h1Gy@BcBS@jEhZI@}DEXH7(*=qX`VZ)IF;KOY_r!rSrL4X!J zz*(H1IjD;?M!%|&`~PCwsw}x8d<$p7wSKecLIjX5I&prSZ&jau5bZDOLr@td4Bg%H z1;cr)$^@g^;hUO(7D&q|H-$hiW64ofXcMqvm2D6GE9?#5+f9h zYLL#aCea(y2lrl;7!z_!GiN+nJXm381Yt!MmvCmc;3?+Y9U8WNBrjIh%J_{i0~^Zq?HU&(LHNB6Z;9OxoSL}rvaglB zm>}FJ+_6L{ZX78@@2F0eBg?i#q^)xlhgmaWv%tFe9waxfxGCLW<;Bkr8|4gl@j4T! z#KB{_yT#SF>nnT`@+$wj34Q_wezZR|Kk^;Hr9V%Osu->>|Sxlz8x?|+<{hW<7`86V`Z(0g)W1b8(S_WH<(MeEY; zRz>B-$pc@G7}EDr&W1Zmj9ZWTu{beg?;^6Rl5qjMO^flubar3|JRfOLSKOa*F=v(_ zdlm-9S19X!h}}oOssjUz;A2*_F>c_5RzGpWzC(_bxeU4V7%icGrH4iRCZFuHfKof> zGyJ4MnIGmbprzFZ9eh$!iXbBg80DL9YYLxIOD<2%xUYEt)0iJY?_AVM+f+zLnv<5! zSzer>Ylr@-ac2aT4^vjx$Wdn)&j;U$8({1=y zbi;fv>1;31bj$3y(AFkFhqChFA|>R){Y*SXvYaJx+~;%D-vmT_9n79X)~z=Ib9iex zoLh<7z?Y8AGRwZ;@Z%OJw#o!la5@xX!y?J+)PoHC9@n=vuA{>8j2=u9IhgpF2OlIs zl^FGwZL>~mGiZI=@8RV8S@>^v6QWs*s{y)S19yT{5hyw@*YwNxii=-_Ml8N@8ghd@ zFLk@aX)s52jaLIqI-M~f8cy0(nmFtF?B=ZH8{mq3qo;^vOZLkM9?2O4MO(a>RK-TE z3Lgq{MO4ZA3GV+F^KR@L#W=Lcz|!EbLu$G(2PKYio^0>8rHZ0`H`nT%stbk~zuv>` zT7LMgV)*%h@pGT7rPiAqq5>&>B!?daedq?k0Z=(y4>(Rx5r^X=6Sn_HFxb?4FT0a16TK|JSzBTrgC)Ut+@#IZ_UiP|V>`t?G=;eHwXW$> ZIARW=Z`7e)5xO`2Os!lq&&5}H{R7UMjHdtq diff --git a/fixture/20/1/1.1.3 b/fixture/20/1/1.1.3 deleted file mode 100644 index efe26bf7414975fce4944a3e82febecacab9ff6a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 659 zcmV;E0&M+w0Zq+OWEB7yhT&y0naoU*$s{?gRcoErslzGMOZkNivxvGn2_oGMOZkNoF#cOxE50=ev5Y-mff?vARl}#(nrH9>Obl-S*XF zMjmd&*KjKy$5VI)7Zpao6sK@EeuM|`BL0W7c0|7(U&hUN1driKTu>CvLY&0y_&)B% zbNDx2#_65$bsjh1A^Zl9;=Ena*^Cpo72m~Ocn1H#3plkqzG`qC9>6c~FwQEDPA<;J z&G;s6!;|<6p2f*M@l}DVaW8&?2k|mqTN2G2+<>p*7CeeS;%S`N8~qYoj=S(f+>aOV zU!0kYejUDqoA5Axk0)^czG!a4#kdW3;vPJU=kXG*E{(5q_#z&}ukkyavp+hU@K)S{ z@8AwRjlbhxxV$XBPT;e+A3w*haOQ#Ntj8O16TX3O;|cs3f5XKGy~TAMhu!Ay?f4V^isx|I(fF#wY21gO;vu|( z*Hvb$E)O^2Yq%AU<0(9Yi;k`4GfHs^cjHHR059TyIIAl9_4qPw#v^zPPvU~((JaJC z+>Y<#UOb0?<7J#a5ntzV10KR}@F>nZ8J*2Ifm`ui+=XZG54?a=)$vt>>+k@6froL{ zsp#b5eB6w0;x;^qzu;M%tckA*T#bA26Fi8Q@!HeT%)t%#DsI7}_#>XiiFEW!a5?V6 t4{<+Uz<+V(ndsNyOSlOSj80+*p|S zFLo5jE!Gs=qBQBGPokFPo`X7{W*o2aE9$x&@>y4W#Y3&@9r>ahC5x?3eo<=;VViVJ zD{OOHl3kLrYR>X0yCyas*n35J3VWu9M8D9vh+~HHbmEeYZk|zTKP>Z5V`|M=&s7%w zriT73OmAO0$i#N4cgCq~Zdt0ccmjWftDv;TGqds?-UY869>~2CI>g*E>0E)^goCGh zR403;1Rjg~7I;ovW?7SPh|tF+WiCR#)=3>pxeK2d_WykHSYz!@{UWi7F9()bl~w<=FjvvHC* zTRBH8#i&y!?Ss|LGl_aTLkgO_&VA$!jd^4g-qCxl&qC;>r_kRQ=NY&nJQZL6oS?LA z!_-`st(W@}qzdd6O`avrGMHtUVlXlMWQx?~nFk&D0?!rhThwGc=c!Df;VnU5bt_?S zCR>jb!S0jktnY&lL|xlB59Bhdx6F<6gaR&v zo_)hdAz&U`E52X%5I;87E7FeKl4D!GJ~mqYtAwDee&-soD@7~S*iHj3}^Ov z%alI?Qzmysd#ldl^E&CGt0NDW);(Y=|0&;Sar^MzE6D=-1~UZzOq$XP-&4%ir9fs*@@y{-duu=COUub5PRs+EeyL zwfDKCi`8MtgKlOUX9n3NvZuaDmFL}^>8^aJvS*pgg)R>@*ku3Mw@}#C; z@Y(G#3))u4DDT@{R-!)h-jWHY94p2x12 zB+p71X(U=Nee=-xl*I|fQ=Hz!#rcdP8zwnj`>g1ilc;^0!|Nh3Ii_}t@+`j+wP`F>6HX|JO+L?cZ+XMk dHOaQ-J6&~4fBmv&37r2s4eV diff --git a/fixture/20/1/1.2.1 b/fixture/20/1/1.2.1 deleted file mode 100644 index 7a8b0b8c02..0000000000 --- a/fixture/20/1/1.2.1 +++ /dev/null @@ -1,5 +0,0 @@ -x |onIkIk9̒fFbf84KcY84KcY84¡Yrh '!'y}x^M7}Hy0lI)OA0qMsbd$y9C%p*s LRI/@:X6!(f%jQplc;!J] +L` -d*3yY NppK|eڵӎtYҋa9ﲂ5lb3[vԤ6x%F2?0ILf -9Cђ<`fP줔}^v'iD4͉aB.o0<1S |% :iGRI/@:X6!(f%JҎڄ1"XƑkL"r8.RI|ghK"H",YE].ʽ" 'a 'Cɦ}짌9B9' Y!ē@GcPJ -:7 %4P^$Q&bvP^>CnI͈&Vđ r[䱈sTr6w&%Y;Rs1! ld;QnNC%&4%,&t^'a.圤|pk$hG"t'?|@[vBzhG(uy0 h2d1iL)_RYqT툣=H+HfgQZֱܡ=?Pˏόr \ No newline at end of file diff --git a/fixture/20/1/1.2.2 b/fixture/20/1/1.2.2 deleted file mode 100644 index 39b2e3211a201ff8e043145bb184b65fc06e5cca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1365 zcmaizTU62q0Ebt3p%TQqD4Hpvq-EeGNb*vOcMP>9Y3-EPjH3yTq->0G zinW>V4@0(*RtQbq|Ci5g}R-?~q zx@lpUdOqo!QPU=`lT@xevoTCcctl+snuh?ME5M&AlR{(UL1;M}jt@;6SR zH8>jSLJht-P~}7k?-QQ=V1KhO`)2xnEiwA3dR9QKa}!wT5&~T-nv8X-$84-z7!Fxt zl%>|R3-H_SM%p}CF3!oNmAMcCKcR&jak+TeEDgJc=2Wy7edcL@kWxHSb<$x8WZ@^p zk}*~ggh3mal(zAPbp29zNJPW0i_j2+NthXSYPB0VhUh~_-9GGQ^f z*A!5Q!4x3i6i5tVjz13v{dwoY2eS%BZV3U=?;hgl$M+V=52Y4+Pv+uqIYm zd*Hxkh|ZH3=rAns$R>D+oBW4`-LW$Uj~RLSDY6zfNGtANsr7meMC9r^f21RZFHAm? zbl+1!M-|2ADKFnr((EKR>rXF?lO*hwO9VfC{v{0eNuA2?FHJ$_QUDFxeNY5=|E-7D zE{ppMvRj>;UOk`{w3?rNrH=kB%jWg6r-vptA|B;?*m0@_aE*Kf_av=q>dE{Cd<4C2bJF|GSh!{rXib`Q3|R}r$%Xy+nZ!ZV zwmbKKPNJ4xOLe&y_6U#;7@7d+D)pN)k^%IPHul8+mu7UsisXpTE8 z>K+lfW+a4Gf*~&YzZAW|+m+8>b;>Vjw%S!sXlJs|wfSKKv37 z;mjS;S&wsZ3%-uq@eKZm=W(JqzRGY4_u!|vAFtqyozcv~P53fy#p8Gy&*FlT=$GIm z?!pgoFJ8jGar&<4*W>fJ84uwxJcV-;(cFTIaXY??yYW2!j+b$2cYM|1M%<5I<58To zCpsH(K5oUga3`L{U+^MM?v1ZXT#I}03p|L^OQW+6=ip|14d295_yhioi}%IXeq4dO z@e}+UFXMlB?f&RD;!F4{9>s6*dz^nDn%i+H?!*u9W4wre;lH>x8DHn{MLdY#;7Ocw zFglxY5x$A<;QRP9{)T_zin92s!43F1euYQy+C$OVfb;NGd;@pj_xKZDz@_E!bp%)A z$M_i@z<=?Y!_myf7x5L`h9~hmJco;pM1K!1$M^9g+=qYSKREMf^c(O6+=55&1fIcp z6=|z0#0lJi@8KT2fPdf>TzzcyD(x(8!UK31kK^ph=xo9TxDDUNU3dU4PM?8-cC*rFNr*IE`iu>^j&ZvrJ7H-0qaVs9j(|8sa zoQ!@6PU0^75clFG{2QlNN53AQ$IW;MkKrktdn%e+a4~MjcX2nK$KUZXPSwO$9d5+^ w_%$BIS*N435$EGpd<%EtS^Nbr;$&@nRpMIQi(lYDoPH)c>u?VK53AnTRo_NpV*mgE diff --git a/fixture/20/1/1.3.0 b/fixture/20/1/1.3.0 deleted file mode 100644 index cf472c33aacf245fcb3849541bbca5dd226fda5a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 739 zcmV<90v!E#0e#kCPY7TXhT$_BhQ+Wn42xANm5Qj4N~MyPhNZ=@YHApUVb!o|Y8b5= zRt>|dVQJN{YSpl^YM2be)G%5MtA^qB1I{_y_33{0etvhliDV)R+I7*o(PM1<+(rBm z0PRX>r6@BtKWV@4+t{Qo5ZQvQ(5{u%hIV7)>LKC=cW8H#R)`{FGf4Y_uf`_PQ)E4o zpj|z!0gc9HhL^}p%z}2=v>fCbn|HK6^c$PVg(6XihIUo7YSb7TM{kiym<;Xq()MA$ zv3W-8#dBj5P0?Hr1Y z%@A!EBgQ7#Ph>Mvpj{KK87;=f#b0C|=0m$YT0RPl%_rJt3>cgE0FiY_fOfUC+o&@( zPK!mR!x`EgrX9giWAmE!25*f`*b;?eG7-OnXIS8*Mvw7@PaF2Y6^~Jc2|#;RWqZ(N5!xvH3>( zjvvNmL$Jt3Y=U-oXm@eX*vt+QnS;5|?ilSjP8gf_v=8`bY+{y+#3Byb-JspXEn_of zg~(J)gLVgK2XV;Qyr8|rD`T@_rN~NzLc2?}%eZ1}#)OKD#W-lUla`4rXxBySMh~>} zTgCqe0no05R*EuX^ON=qzl}|582=w^g?6p9HnbZX*KqznaEEp$X@w{fk+@s8pD17B!& zmUa%s#%72%j1gm#9LxU)DbTKo){GWo;}XaJ2lJs_9xWdQ#^w|4GX{)J{967$NPu>= VwA-jNHcs*Uf8Y%5{sDbF+HrEKaE|~0 diff --git a/fixture/20/1/1.3.1 b/fixture/20/1/1.3.1 deleted file mode 100644 index 05047d37a3f7e0577ce014dcdebb8e39028532a7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 734 zcmV<40wMi)0e#kCPY3`Ihw;PGFpP#_F)XQ6DwQHpDwVPrhNY!pv^1<5hLvGz)v#C@ zmWE-~urwN`Rt;0DhE>C`YFM>u7>zt%;O@5HtLOg#F3nYBKhmLH3#}Dx#>Rh zr*)vy*o1hCtVAfZE2UjWnXz$LA~F_^&~7hnA5x9YYg!YUjg60wh%fx0T@mdfE*YEO zv_BX&Hi^C>JCFqJp3`bkXKY;jL>9so+MT84BG1_L()!SEY!a4=Y`{inS4De>YGX6g zUt|_$L%UMq>=L+eO=rJ;vq*?Im6r8_!@7FL*<{bF}lgU~GQSe&Uz0 z*}7b08@5BcC$y({W^CrI5Sfn!(C!rNG|m{Cue5LYZfxR0MB=d?+TEkw#{*+CZKcR` z%z$=>X-9C>*u1BGz(-@VI#eVK;n3~|?Ivy+n{lf|#$y7sOQY>aI<#w{wW1B$`LE{x zK_Ij%rj_8bvH45;hY@3w9LE2H6lhmZYe1v1aSP}Ffd{n9rxl>k*bLAHF=T8ut>OQ{ z7HIc~_82wB#yNuj2XmoaHZ2DyjZGJ=8$HG*I+FhfvCyuZR)I=mGbM`u2Tsr~gO-Ub zW7AISK&P<@S%xoSpFX*LA&R)TGSaEmv#I|tQoVUriJmY zZcDtnVUtAIS}7oMJG{X}N0)bT<-G%UbLa1$_nv#lUvF1F1@2p=m#Z#h=P<>%FbT4} zJ(clvg@M^4EhDz%g|S+?yf2R}n|iISEcx*}wMUHp8RhfBT9T5DeSUWeCQshtlcGIo zVXX37ML9`n_40|W1#M+Jx7a+9o?UlKoJV4|1^>hf6^GqYQM#{s&ieRIjGV#yw(r8G ziOD(lF5Fvm{zi+QIH$A4meor_7v8$z^F?ST3wy?N$J0mbk}o^HcA38M<5#hd&GuWg z7sk4+-1yV`cY9#+=F9FW;z0{yh1W{z9y^xW+M#`C!ClKJMy}qo3UArldVCkHKarNx zSoU=bn~j0l=f5su2Lx};T`>2EX7cBW-#F*@`fl0p!IQM`uI{beJBDVL!@7Kq^JJ7y zDC21FJ?ni-dykpfK}IFT2;R5rF06YrYonyNhLN1`g1dRQe`gYyrya&rR&XmzI zVhvt+x9%2WisSCQDE_L2u?`ad`I{ERO0Ja=J?3`kXT5>gt)-VPO~}iM%FN9$H+z@; zv;G0++jEz@0&;TNGVKkMawaMLsGq>|Rw|SKI(~ zw{@!t%o3P_p<4rB0z3Mr4$K^w?5=M0fbs6>pBgYzU>sAoy1;ls`lkZS7#M3`w_3og zfGLJ`YY2=H(LW7f7Qp1Ay4459f1rP!ftdl5iRsomFdLTsc?9Mgn8cxOb%0rm>z^_( zU%*6;bn6wEKVTkg-TDM3c&vY3fcXU`ccNP#!1zw}&l4~|z@!qo^#;uPnf|E)GXcgv z*R3`%?hE}>0%in^>F8Dym?bcUOWhg(6G-ZxIxur!vRAs*1IC-uKQ&;cz&O{s)dj|r s);|?s#=uxNy43<^1xzucTSH)sTm91jW&uq8PPh8N`0w@4Kg-KWNo3lZH2?qr diff --git a/fixture/20/2/.zarray b/fixture/20/2/.zarray deleted file mode 100644 index 5738808845..0000000000 --- a/fixture/20/2/.zarray +++ /dev/null @@ -1,21 +0,0 @@ -{ - "chunks": [ - 100, - 3, - 3 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "Y$}lu^j8qGb^z#Z(W?*2f|4#^9VEOlffw`T5K`WGj!9JvJ#oypti| zD%632ftP_n{@;~V4S!cPFt9Z+KtKuuL&gcC%w@jz_f}1=;;OD&kk&ZhrtHhh zka4K0m%U3`B+2AZ&axSoJr_;NRx#M&AdSQr!? z0}~J4pU5Z>Bo>xd_MKgP`tm(GHa=(WyiHire2}4mjYDvvhJm`uL%z*I1!4N!x<}{S zXPs>t5h)ePZ7gl^XJXG@hG2$($E=(!EfEI{o6gSBQOfoRR+o5qb~VF{9ZI(jSd>j# zJF|CV?ADfoqc4?qHzZtR<(;u3>9Lt+e3aL=G>gXAqb&ulFO{x&>B?+~Qrpt1Cl!;n zKm7a3sI=s?-7Sex3_%YT?$Yp4QM%ckb3OD@wnxwn@oWayjbc|Xw?~DfCFca)nyhqD zX(vMfhgqEWX=7DRYhy`4O(BJeLK+@Tu?nA(B4=u;@|;Nt3Q`F`6U#X=L4o1Bi;&E$ zNuu%7_ot^hfW|nV0-G}7(?rFS{0i($i_=!>>ztXQ^5#s6%My8FJxcZy_8W+5@3Y%8=Ii@L<%Wi%2 z`WuU)D|Rz>B+lF{Wj3RtX(zj%oL$l8OB1z{yF_N_JhSVF4t$U>i+z?$UsFrn`Ha$R zd+qh^`wtl9*?jyNnNWOzQGNBb`43p*jUMyvxS4lGMEA7JXTE-vA} z`*G5-6ACT13`%ZH9EwH-njtY84hh=$NtE9yc2t>EJ@-Pzo&=Q{s@aTXTTUF|aB}G} zzWh+2WvW4lCPzy`(xp#|PvuNL`KgQ8>GufRMIG4tKg;8Fhr`c%6<_{3cpm$gEZfg= zKF?HEz5jvNC;zjb`azWk{11d}*9hjBrmXQi`cGN1 zmu2?pi?wN!7c#$oH2*{OrahA18)g=G-6C~xI2<5No{F5QunsXTm4?~M52{b*8pSIzLllaf_|2OzsG*3=Gv0rt{)F-pWT-$(7 zxUjJ~$UVl1t^1+t1kc3`fiqdoyJT5jpROn<&Xjt_r!%%-SqGQ{# zXk(@Dh%?)=c3zz*n9G{b(Ve3mRNU!$plZu{7uQ2v2ZeSW(GFb$5@vCUXwC8~1PWeP z6cjVo1{#)ZwDs^}4$+S251tXN`&rJHFSR^{ENK+fan?EJOve-yaG5xTqFFH0tttpv0Q7}ogNlC|PtGW|w($~y5ps@))K&4^sDvGS# zSLbs$CVfpmB=RI;Z&diCFvSRAt0USW#p@KAU0iuMOv5Fr&jNdPE#smWJR m#6Yb|N$=L`)j~jBKpZt?lI6MLx$6J)o^%x~RG2emfg1q2wsGhH diff --git a/fixture/20/2/0.0.1 b/fixture/20/2/0.0.1 deleted file mode 100644 index c7be15409060d190ab01e8312a4d50c3c32828e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1493 zcma)*dsNbQ7{XLP_Toelko$5we<c*SvKTCBjN8ZL?)lGb{;Q;4N=8 z@lxoTmA7)`I-)sU>O{L)Te>oBX;aQBwL>|yZRhNdo$dYO`JVSZ&v~BDKVN3Fm`dFk zMGB2-qUf~%QI|gdU(;_iYX|K10~k*YU~fWS`TiPs$Nr@nh|^nFl!Jwte_b6SPOoMF z1Y>arh}#}ux!B~kEpvJNyH_jLllb$SEf2fTZw|Hyi?1Y+iaiAGMMXt?9izy~2OhIK z202^MQ?j zgO$H;eXr&V&nNq)Yt`wK)jFOme|OI-MHN8yix63URM0=^(>(-vfDQf-}!nBpF~y zp#U?fB_ATl2NPU!3;%LaAU}7-Nm@W-9dCOZh65jb%ebaLwU)3xQfVjD={;Ntz!zA~54zxHYl>W{_0X7QS=*f*X z^cym;Aj4SA{{IOj)*&pJ)Qb6-m4`xB(jW3htDE=rq9GV|5p6d$piwH31R1~#K<)*l zB1s4<8)5?oGuzjd*oVB3<3lRClHm7ug6`VAOK#g+QxS++Eq$A9z+mIs9g)J*Ma5s0 ztIn%y^tm1+q|Ro$GBHP# zv%>XUGLa_}O){2vVV`q$ElBd#RlY4aF200HP?mLd$O1;}CPObsM9G&oRqdirMitbK ehXbOeGr&l3c|XmxPNm+^r9h!sD6^2MUi2UKYFL~A diff --git a/fixture/20/2/0.0.2 b/fixture/20/2/0.0.2 deleted file mode 100644 index 71e793fd35015ed1624618535ff53efadb13cf48..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1552 zcmb7?eNfVO9LK*R82B&+#0TsOl}r>B6>V-c36Xr_H1MZ_v#^Mev~1^fwVGlBLZbp7 z%E$~9%BN~I&E`WnXC`hHf;O#nS#xC%mOW@?nbW#;x4(9K-+li1+7`PZS)y zlIz3aR;EsP1M8vL|BMO#=D~Kbd=Y@J(*tYq3Rd0t>Zfc8C>ZzC0^k6EJ3)T7?JrOiC3)sGw49XvR_0ve)e6N5b{}j$*lF za*+nv9ctD9Ynp`>lBIQRmyS?5m>NHd7(tEw6b9y z#`AsTEH~6~InsjlL6xFQ6XY6)V2WJHEL6ekuIx?sNVRy>m#2i?_@c)fi&ho)-V2SW zGY&r)cij0a)5!ZPO&r8D9+jmCpweZ27TUHm*oI$vL(w*mKv2w4L{l_eNNTekOt@vN z;bQE6Pjl~ev)Iee^MO7TiYC08{E|d+Zz3wth=heh6Iz^b9^I`dJbQA@c2}Ct~*fSfg z#O8Qqdb+#8fQ!yqO({Pjl(HELVl*Jwc}&$f-;86+hjmo7S1gu(mr;sB)=+Q8jT+8v zx#C2$a{j=W?=O-{=Uo05L6i}T2_&-YiZBU8FQp~NzvSTXMJi1NPby(x1+v;p;btEU znuY|tIiaqU91S`aLr^;d33An`V~QKIB!k zRQF$A^%j;%@J;i>v`SJ*#z;8)!YI_gkPTS(BMhFT~H4f+a97=iY$Qwyq zdiB91Oq}iCFdKJ0L{;=7$)Q7Wd+WO0p=H>Y&VE^i5ZenEej=(q!Kp1zvKz@_?fE9x ziSGL99byx@T+7&@))6b=(R5W^au0jqpAOC8HwCIL@1e(gc3yx5MIPuaDCq4od468g- zSZj-$7+=~Ccv~`AFL5cANY3{NUQ1$DaC^01eXi5(>XV#IhndQefQURpZ8UTOMG`;_ z@h4sn1`HH;`P5M1AZaOn+J9y~_6Z)lOD(MX{|1GE6#&f7n)UGzy_+`uDclxTt5}h7a>f|W&hs4ARGMFP@6W8*v||# zB<{6P-6jFVo(Mqj3?u38)K_Lz;0IpY$}lu^j8qGbY~6WEje%LL{y!M3!VL~E{ye-xTF}ecA?3eUFRv6s-CwO`b)h`X z&JGL=hLX&_3=H?&CoOAM{}-xoImw@uYl5(_;ACH;M3bbH6DJNO2u?WQ#_QwQVI*ZR zgJDMFVqu4`91Qtc!4B0|mN`B(U-?suL85^H1Q-{9sg6}fxxcl7OfEAeWiI^xYL%e@ zlSH$d0#7q%%Rxg4HMJQHj0+c>5HwtNXSR9j4Ws2cCpJ9w6VVgOJ7}~bLn_$A*ikt3 zqMVmNti#SpEaKh^BppOMCLG`j+AxFbSVjU%gC}3ZnG+m`_+BUpx~$~jSjp*9*irt7 zE5Rc1rSqvJRpwRdQ|7WeGBoevXbIGEV$yO7ySIy`aNYj-_r&Y~S7Qn;)-YU;%oFMd_M#&@%r3A%55X!@c=O+;X=s z{dWG>Y4%wKKNjEG(|cj|uj16wh!aziE8XJWX>@ccDV|?_!oKI+O1XKLE_6)zGq3rS z#8j*26CJNC?l^z`mHqz5YrQ32*xo#|q3(X=)5R;xU4H7GO9`sb@ewxM`m%ppz>-6g zZ^l2fx8HVcg|I)9hyg*RH>oS5M!R<>G+F`e7?%LQ(>)tpWTbfmQp?Ok0g6Qq4l^lfC*9&M&4fu@U| z&baujM`6;Qi|-r+AIeL4edy0p@L*|9R+{?I=BnM#wzRyQgfnL@cAF?m7`qu{IPpqs Yb~0=>N#<~pfAr2{IhT;Yc1xE70IoS1R{#J2 diff --git a/fixture/20/2/0.1.0 b/fixture/20/2/0.1.0 deleted file mode 100644 index d76f8cc59827cfdc57ab54787e0024b8dc07648e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1524 zcmaiudsGr;7{)&k!E0VnL?T2)!j*-{TAp%4R1TVymvBcjFThCbhFw-!DyAZ&CZJ}z zHc?X%(=xly(wdqWDPo*zYnx_P+C@{l>l(Jdc4}wOIq!46_kF+fygx>4vX4(d3@Iw6 zojtb$m^*#=U+H1*`vGL{B!Gmf0pwlHt+XGExEVD-yf$lw3+-Lsbpi6_Z3k0%$T?kAxk$q01u>( zS6jFQ2}84e;LLJcnjR$4&?IJdfRv`q!B8X;iP#mBtQSfLNfJ6l(Q===qv)uI8BbJC z5y@Lx2Br+t3um$Z{u?ur7%paZ9sw}!fgBqEJ%wkW%xNx`j z*fwvbfT`-$6GC|&Og1Qu@ItA=2re*8Fl4=jMJ)kVrU=>VQ|fr8lZEa^UL_2ZoD;>; z^ElM8nOL^WDo(g_McBz<<=OJW=s2dc$|{bW3eCC3SdMPHnVvxE%v}zVo`SgK#77D` z)w=G<+EG&Q@%&}bEl`R2LYK}^ck6_JCptY!VxkpW@2 z5nEw!9$KDNuJkn6iNTQ7B%(yNt5LvnA0-4Tar6b|0?njLhfd2_tLuJ~Kjw&6xRR>~ zmb5vBj?%F2zxUwSxRvBxx7z>HZ z`y~89TProZxYbADs32H65OSm@4w7fNvN~MXtyaPSF6bkocAtPR+m+n|>2U7Z$0`dw zC!pdYiGJ=6z)*Bb^O_sJo|db8T^+nkeE$aFM@Ie^oc-$HL@5d>8`JR|EPe_?)C1Ti ztaRkCpX0KJ+x>n_&&V`f6<(pebZ(D*vVr#;bDDcebn8{~h6u;VYEy|9lVLDIuW_wJ5g3 zj(2z6oWsbPr5)*IzF<7tFaZx9MW< z+QyA1`qMLO{0s%mHKfYgYk+!QI5#S{qD)&ZjjCM1*R&gD&nFghPNrvmy&!=hvQFLU zpZ~Ma7~NI=G`_DfzMH!rT^c{$tBJ8K;Y5`M=0*&8wPoF4n{!r@w^TRnluB-XyDAT0 zEl^@G+;Ijj9ZAb=QgGHfjn;FTQx|)uJW3w&UKwQ=)HM0Y#)QVrzxNtP>kev;1uZ>v zRwqAOa}PR5tgQ>?)P<}eG^O#&3OUBo^;&C!rrbT{@ByxkDmEF|$WKE8lqk)kXyEG3 zH`OPbmj&GKp!$DoD{h30ssLuhrQ&dYZd1acf?#TB3~)lBhVyyRw`)?6y45}KU? z(Gtn9)QmhcbLyH?k+kCtXJ#vPF70BgEY0O5FJtJoj%WYu`Qtn9`#kUaKF`MtkEPRn z!l?Vg7zcZ;fT2ga?*Gt-n)MJGxC0QZ0wAq&nZ0nWDcGxD3AXA6R6?MKfi7h~ymbZ! zMrT)A5*EY?l)!)^oR-I8iCZr>Z7&X9rg+Vejnig=1MctZ7o3WmaY`X=s-3{KwhBeY2eqaY2Tt zxLheGGK{G2m7tGAM3i3j2H5pN0AlqiI{1-*MAFxtN`w-Cmn|Aq>7UcG^Xn>&#|jEBTq=1NSL9OnVL2%&51bf# znt3{iyV+7v({1Qh;_jV&)NTAwMlV_gb$FmNV*%PsXlGAqITN0tK)AE<>w6Bhu9u;I zFl>v{G?{HO_xZ|9f}}yB@dO!xZ{5=qqZB=RbO8B;TsSHzN3ysq6Iw{%CCa6T5WDCM zi{_*8MnoA_OnqMje~wTXO@(0GUYdLfZcL#@G3wyBW!tcejmX|M^-YW^iiXGndh5#Y zO6{o#3hYehTKV~lcEB302Jg^WXT^vv7KF>{wSzx-pGO_U!>1D9uoLa$Lo5x`0gw?< zV`dV^g0rNGixSJ@f`;7#k8Fhe z%5q8t5IZX?P`*mKMT_wu_wVEWHuVz6^SQtq*=5QmU_yr(kd)#B(GVc?$e>=7m1Ap} z0*EE0X0IOh7t|MA4@el1lVK!{4Pk1mqAV&_;^{X`Xnquq_2GoRTv&nzlV7Xr8)g=hggbaJ-`;jP1Q#lv_IX!e@5?gu^d=5280|sKEtzDa z$z^4&R4(q6vF_L5Z$(eu9eWnDrKdsO>JDkq)+pXsq^12Gk+T;>@q^v5+kF6_BOoT zkU54bR`h8{f+V=P`SA~-gL3xbt*iF)$e0$jMQCzO0o%(Jj+_=st2gbx4s8~BHmCVV zOVhpjqI5jY+{35qjKemI3+J$1%Wqf#DsA=gf!zt6Clf#HvXQfUbjTMBOv&s#kGGoi z^##jj9zK5L+~gAK=V%w={PPL{_0`SlRk`;H z@>J|p;i8(IhD+N*g~f*y@qgqqq)WI(HD^+s`m9RDP6MezcKVVfUzW#mXo{cU^ao7c znLx&z^qmNxCJ}Nz8>^coiO=17_$ZEQYq|}2C-9S_g<6&7L2rHNP!%2Zqj=~ZHfv|^ z6-d0!V@RAkrT2WXlW&gD>)F{hnB?{w82Lvc>z31XYrZ*<)|eP`ShnU>RKZGH^ov!5 zEhQPU3%?n+`dGWt`46(Rlq#m0;L&Ec-6??O%ksmaRebIF(zeS*T>Wd-6;q+Yjy#Ub`Px# diff --git a/fixture/20/2/0.1.2 b/fixture/20/2/0.1.2 deleted file mode 100644 index 4d63e676e60f872edbf920bae84e6186fbec520c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1493 zcmaivc~H`M7{-61cz}o^9+jiWfL9)IXgcXAAPHV6`Xj+G71u%2wbEuXH^Zw?J0LYy zZMRa=L@ULk-E=!vGS4zh%iLBaD>bc&Y~8k17u(~Hoo0Ie`Of>i@B4YaFO@4G5GWjn zC{B!HiX4FQkF0%3!+OgAU|$YEgU|1>_M2Au**gK);Ge^r|l(wvDhZ1N3eIq5tN5?xoyR$|b8B zhRk55c6-&KocAEGQC}(i+~~UZWng+7v75iWVr=%!fl$ni$*Ug)Pj9Sn?=(tVQ{JUM znaWh;sBz4R>T~9fR|}qt&1}Y#W4a3tB){0NcbP;Of}VvQ_r^ZJb_uL4@@R>qTI&~S z(5ms^j%4q=xbT*G*zeOSOGScU8XhERibBnV;BMn7ANx-WxGlVs2#EkvQRn`q`na$Su!Ph<#j|8ah_7;oAvx zhpbn@xH4rPgZ9EsmmXKu^@FVh$u1eANy0?qDDL8Hg6m)f>S(eTb`Am{vncj`4fddF znNx}7VU`0-0Qqb-LmFI{OtfX;eC*%G8$fCk8#T)dgXOq|!F^N$6NQm%h`4|?$|_KY zCUPoW@Wz8lEEX<8f^uTy^Dvr!ytF_u2GC%{GY`e`kO2}dL}t5Ar}YP32T266Mq=NG z#H_;`JX}AqlLBRo{6NMha$j+B(xq||p#>En)a}ium9ePnV-oH60dGhH(it@uJ=mjV z9mDwwZD1!#WL>Sqh%)V^C<)fc3|lC|tB?-(%F{IwgFSRSmi~WF$(rAz6U{@o<=)~8>;Ip{QJLp=D4!#0y#D@^(OMyV#7;xUL!_oI1;59jtF zvM736x1DD)5FwjQV`on(p%OOL4R8+J>x>+@sB}v-LaD$kZqcOKd zrsC0#5zn_lK#xzcBmJ7|YSSCZWe}@U4lT-lfgC z|1pI93M+qdGlYW!TlScF*f!boz%b3GB`~V+n2F5M(`zPUGZPGlX`N;sVkIV$2Zlo| z>7c*HvcWJsHDiX`RuMgYAxYbXgGG?c%5F(Yc&7JhH|Kk51ATg{R=eI;Svk^We?aS?#3xfoU)zO|PUZB_hJrwHf@TL_SKg)MTYAhR4myT{OW zB&jOEirNds54UqS3*i@%wAH9RHGh`-AK}XRcU4l_Hzjf&LgVuI+PO%|o;JX$Sdhm& zc;<&f3%c~gnNAU8pl2Wume!Tt9uv6>;K4CyDRtBdglex4LWdPP%q%yyXyY$}lu^j8qGboU-@wCkEze{~=%%Zg7C{#^X5>s{$8xE}Q?#v%}<2^7mB}{)eiy zHT$rfoXjwT@Q7V{i4m=-oEs{%6-4V&9XgY>F7~yn8*fr|c*3q^w4g|Ziz6zagTvENq(jBwn1>3Z zLP{fJhY-UBi4$xtlH!ka91c`UA7?g9h|rqgpy&}%v@~RmWspb{Bi9l!)^5Yb2Aza7 z9d6bj4i-%yWD@nz;TB6~W{5a&CPIvBi2z5C{pI2>6&5$^w;W(%U=Y|H_{RADJhzPG z7tzt@*%#ls>U)n}_=IbVu1MJQ{hiX6&N_Z9>gf^T(Rr!(a>Arz6$}k542HYP>~@vU zNGwn^-Eg8XW$9;!_dh>LgI= z>9-UzomhBf)^;uRsa-sEDHlthkfm$zTaHU^%29$#CMMYl95R$MX!F>9NWx9|n7~PK z0TzR{=RFoDPY27S$)r3#+%spZX5W=-eAm?NWDN{$7=t+3#-o-d#y~-gis6 bFU9k|6e(H0FP`|`<*j&;t3|*BP$~of?@t?| diff --git a/fixture/20/2/0.2.0 b/fixture/20/2/0.2.0 deleted file mode 100644 index 099ced7cded57dec400a91b4f90f8f157e6e0c13..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1496 zcmb`DYfO`O6vqF((E=;jmm*jklv!PR=<|K29Loz5U*=MsaJITp;a?Z)~J2?RY@s=(A zvCdJkyHg|;0MnMY@?W7tFFpoXgdWKGfQkCFyPgO8S%X2{%EZM`2>>Lfj_F{~!;)qA zJOE;V+QlM~R>cSp4}i&e;I{z7;@s*T291k|XMpRtYJhF}L`vUX0uTm$lW`TL2dEWJ zxPu1R*Qs@4FEs$`P>=CI+quq?qp)!v6#SEfX@`}bxRePYiU z;h3&~57_nOtpU(}KfAUgpg2fGhvxl1NDxE@tH;KQW;!tRYWPj*nlQZMvCp>hY+Uf| zrGzuB*loQ|XOHYJRwR{qX=h#7Pz#m@gz=6Cq*Zq}YAC{?eqrxJoG?FtTvbU;K(hBy z&~D-FUfNCqb*O*IHu;h}ve17Mx|F3{i_8D2WSytDIsytEif0U-&7l|0tcc~dFI7XD3u}~N#9gU@`b5K`&}HZWpbwXW*LH@5<_#U^{-5R!Z%PF&_UaMeIYWX z{FQp?O!S(PzMzfjuXn0(r9pM>^vXi-_uCxrOluH?gX9cLSuf4|XiA2|LoPAGH(nUj zf~qn6CQO^MI8Gwn-`h_^5I!u*wN-_Y*uMiL1_`l#0JjJ%4MB zrnIhJP^PA+t|firTPgJ!7@(eOpG=hUm#MWkldRL;{9)yDI_6um<^_Z4;u$gg^Dj>X zcQnk$vc)$2>DWWT>t`<4U^Bo>e(vXuv&1zKfi@P3%sF&K;J_T#p^p&gBdV!m{>lfa z!}tjVr+e4Qj4a#YgoYbrWNgR924wB@+?&eIT!3Xt6=q>s=5c#$i$!I;1OWALJP@Hj z&;{2&?gMsuHwo&L6z18UJ>j1^5AYw>vS@TEJiJ4K$Z?Y(X9W!-LF67xvkuUWJ|T6f z?L)2d5S>8wz-iWQd?7IF3c1%+ z;cb>^^0vb|x*M;zPOHej-JHY^sY~WZ8UY_c!FLbFgMw{avbNITeCdaJ=PB4$Y zlG&*1G;Y0j$HY0k5WGy0`{#HmKmxu2dASBXa_M>roQP33CwH62g(0=m&Nnc8RWA<{ b=W#r~t)N^=<3{H7PbtHQRKoZRr+fbZq$Xad diff --git a/fixture/20/2/0.2.1 b/fixture/20/2/0.2.1 deleted file mode 100644 index 4d50f923f49903639674d92f4be862ae7fdbf7da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1492 zcmZ>Y$}lu^j8qGbyp@=@f`N5={eME>0?Q8>20eBL_O7W84E&z2Y?i<3ubRTZ{P$%j zLk0s7)c^Kc$^2;og9(TO11adtkH@yUVNZ6-sd z!;}A9E6iuAnwWfF={WhsrvT=xs-y;(g-{yoAP}3W|Ng_!fd4nW7BDdI{$CjamB&U| z2LMIYe1L>9X9GlmVCQ5A14_!~4@-{%jL@QKpX2`0mTV}N0d7nj(~S)~i!_SzS#KC9aR#si zIS3e>)(8unt#Zm^LR!)Sr_Kj^*jOC(8kJdul$sP`op$xyREtpZoZ0O#Rb)dHJCnk6 zhlv?5sat9@fKok1A_|urxYvsdsm$l)iKdcyu2S zVD00MOZs*zXo}g{NBJHTRXX>?cuY!ElyJ6iyA`ggySb^ziKRhVS;(U)XTmn8WJQoc z%JK{jOxHX1DkW+Ib?JLdWa(JI!kFB2C24l1SHNu(wMiygfdg-V_+ErGLgmlg%xwovO3V(0=Jyj5b^Etf-18jK9w0bL$5yv}ZR(o}L0QfdG> zFR&|jsit$zg;^lik#BDL?Yeu$iBUek2T zaoQ_^Wol_Nx2RoS?K3s?>1U(g3pIEZi+)Zl{3|qhiWZmr>T+(MoYkk&LqiiQSosCK zR1l_PW-_Ska@9J~5d8ML+BatTA)g*|6u&ZH1t{%^W<K>s%&jvi;m$=nc$B%mKCsPZ;QRz#OUAivF7=;rT-ER3a#na z)hkOo{zQ4=N!Qupt?kkkz6D*YzyA|SF1(_3{nXuy(@sB9VETPuRzCl3q0_4WCKto} zTBE-I5J^nD)^c5Ib^i%9ca^Ty(@rma`gVx|)8qXTkLxWTt=b;xvisk~u)SY@B|Hpk zxg)@IF3c?N5yR$x$IkziaXFQvoMps7Yai?8iukP*>qU~X%DtH_p6pcGfp zz$Yc{pd+bRbvdSiXTmN9o(X%GTu5UJ)VFb9^N3~Gkkb0vF>=N!)kkNJ1?r1sNBE=) zeLljF(t3fL%_HuXz(vVcBd=Hnk&~JwOh>P*UDjCnh{4F|1#7d4e%2YD3430^cv0CC z7$=E40P&V)1EWraG9(F9YkgVMk~OAx8%w(EiCmk=wNz?eMT6F)kQc07OQzaxQ9Jo) rqi@LK;G;8FP06a*ob+l+?g0}v1_1?NnZ#Jry;Et9hDWZ!_7DaD2!vkU diff --git a/fixture/20/2/0.2.2 b/fixture/20/2/0.2.2 deleted file mode 100644 index 30fb04234d87471cae2af80c2158cce542c6acf7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1542 zcmah`X;71A5PrY#5g`pi0)qq)0)`L}l^iNmX+M%M#sGqlPy^c1n1M)3rHB>}G=w5& zYyw1xAcRv0Wd=b!5HuXI1f#?R>N#qy7C989VjYG`gZ^m$^_kh7-FNrdXLnv^yqHQ2 z6_8^EIV&A|fc53Y|Ai5L=MF+Gsb7kx>hgpqqreaZe!6c(F`ZtZ$38N$g#6xA?8Jsh5^+o@&N-<4}pF( zY)1ot;c9tP8wo(KP}Hg!0F5loAW+`&5@mrZA>R28fcsHl?IjHbXe3XV zmy}v6_4kHJ{JEULAQQ~(M&C^Tf!nG6pdbvTkim#8e2-U;-^ zh@94-evDz;?5IuJyLAfY(y3x6eS}V3mM>NJF^!Zk5^_8Yt2!4M$gbb8p-o9l0J!aN z9=QO+>lLWHcM<165K3v!E68XzT&tZ_cOEg^*COE1E;_lPb}wz zXmx$6lCzYg-6@P&7a43SBEdNDVM33s`i5GnZXti}UVh<0qDt;tl#8Wprv69+6lLNm-@r zTvAUbjolsk6g%^!?F!h^IoL^y^T8TY z*>WP7CCZ=P@>*(BSR^fHmUzVYnCj^Sd%u5|%eNtB!#syDlnwF8=DIov4oa5;tmXe* z0w1gF8;VAz-h5ZueRq)qJ-PAYqeV; zJkWhSVdW3-~|C=ylw;U*HB`?^Krm>3)6TD*eV>R`Ty5zdUP8 zOi&6=_r5as->3{3$*5i?J$WD@w>3Mj7Y9f7Zgcm*Bh=!Ko#ZC%wbA#-Lci1z98##V zTNALbohqA-|Ktu`o)^@7|D-LKng7$FMBgL8`&8#k&#O;p#8GeR**;Gq%DXG=aCxVE z+(Q)1_v38GRaV)rD28MCSvjxcWhj^N%{6Hq9lGF=Egb$dW?voavz@{08<8N%gU^xI5e*AQf5z07X8Pjc{X7jrfyl}VKaMK_JEaw}-M?)6O>KUO+B z!f*fHBZ0YpLAV^_j6eg-A#3#3GTkNfvtdozL&4;Ey9*~rs!&zFCodh+RPGI!OF&1h ziBFl-h;kMD2zeLcG;DoFs!@4~`0b+H*5C&YVAxs>XxU;r=q8PMo1$ro`bdgP7E0np z^Mc9Nh??+_INh}7;<;D+SW-aYd91u^pb6Sj5@W8@m`RO_I2|pbGHze1rlrI=j89{Z zku!u?b$lkk$~&OY66dy^K$I)Dm?qLChvHlftVjWp=q35xE-K{Qst?Ysp!h$O)}rDt l8dm_E^nM~#(OteuS6dNmj%1i4W)^nooql67u^HQ5{R6dEg|Ywu diff --git a/fixture/20/2/0.2.3 b/fixture/20/2/0.2.3 deleted file mode 100644 index 3a818cf458ef5c1d8a478746f7484c0676508d55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 647 zcmZ>Y$}lu^j8qGb4BM=-jDdOOe+XEG8ysM~d3c9JR}cgHuMoBd=D)8(`Tu$~E?`{1 zz`($)rq-=uOCo@VkBuXVpvI$5U zJFaXHkThakCM>{EcPSvFPAl`J-105CAq*02Z4E&4fF?Qs2}aidv;V4EMj32vz5M^Z zR$REi*4T2{A(3&Jo8rU?i3|tae2tQlq>@Bl&$Bu7(Ay$$$IEgbubm~@o#L!sCLv2C zwUyi)y_hxHI8`|hoNb(Rl8-GZlGWPP>ZFIl@}Om64pX@~T4n|)T$s3_yx@VwoQEfh zp6p~iy|K*aj^>Pc%t1b9_a(a>;eB{_(i6)fnVlL9T}d;IHD^pYm6meUkWs`-QP7fw zNqB;bQl|?`u*k`oK1Y@X8>?w{a4ZVsSS0AOh)J{m{KCMS zwRzjEE7v6@)ywQ3{C4osiFL2D4+=Q3y!^iEWUBRO;?FNjJP_LY2m8Yitu@DngLo;TLYOEu1O8i zJ{!#$;K;|Lv_heYw`GMwI~ya5fVa?sCK-tUfvP_Vg>{PzT9h|WN#yi0=t<`E66jef W;S|)8Ea4myds5_~lVjAGNizUc>k}0K diff --git a/fixture/20/2/0.3.0 b/fixture/20/2/0.3.0 deleted file mode 100644 index 45f09ab33645a1fed1a7716700a9768789786b08..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 728 zcmZ>Y$}lu^j8qGb{1A9HiGiu-KLp&x4Gu6>9z4gx5**6Da;u3^h9vV>h0Fz+3#53N z1sF0IQZgs7xiv^iGA`qM#gJjd<|}3EHgCec$^QRCJ&kWI{u;2VL5cyWj)8%30Rw|{ z08azMpnuC4qs-&?s}kg+T^>lhaSnL)QDKzQC#KaA2PM%FSooTt|*=BXq^=H%kvOR(Xxmlc!=m>HP zY;e&LaDg(M4hn1piXQ|bFaxB3MNo`kp~C{X*}A%Y>(?#)QkA(WQf9i^tmjYHU0StU zMtu9uC52oa9i=V(i}t$C5nrgPaY}RLv5Nt>7&gi7vT_JLFlWQj?iakD=1gF!`|*>} zN8xFjApg6Wdl((t8X7h?>aMbO`jzoFXc9xf6Ga8PvZ!n3cC${0OQ*(8jcY$}lu^j8qGboc!-aIs;SRe+bx(8ysM&Ja~?WCDhhowvq{(e}Ju8!)3J@4!n}Q z3@L}CGFvV#b4X!eY%wvI$>ugI$wXq7L9gLUpZZ^`nr|)s8nCKCN{WHmt$~4oaRCF6 z(FQ^llR%6My>{ET! zC3D&Ts~YP6Rx!-*QD&47W?(d6nBjIMq0NWUz;PMNWrYJQ2NaVlA-XFr%D-r=ZKR^Rt&;wEr-}UH_br(gSa; zrl6J*xs#C&$~)RUg%$_}FI<+O$iK)@?xc;!?8UZK|F%2dk8e0vDjr%G`tnaq%GyoF z>8a@rr&h1a-g|%bw5jaPx!Jk@4mO0gzHQ(Bv+B}p@#*C=lGemV*3K-sG-sLdve=}@ z4ocQa)>^NHUjIrmS$3lP_=)40k&@a`*S4-b#lXNPz#w-47^v(D49pD-)u7IYrAd91mqbrmK3I@6W`4CC% iDJz*en!1#@lsK9?niLi(ESgtXt`gkJ$!Y(ttpETpeiV}c diff --git a/fixture/20/2/0.3.2 b/fixture/20/2/0.3.2 deleted file mode 100644 index 46922e63ab0923eb3fd7c2f180d96a33f5396c18..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 736 zcmZ>Y$}lu^j8qGbEIpRFn1QMPKLmU)#|a#mDv$H=u!MXKsOooN$lS_nA0laDl96Hp zB-v)na2Ckml}a&WIKp7U_$npAXNJScS^gduDoyHtt(t7R%KwVe@>h#ePIg=t_F+mn z|2LGO$7fc`kppfF%)!b&K59MlI-9d*%rr`1$?TOfVPL5aG&D5H+-hi&#K6GtKa{~% z*Yx{;wMHuNGh^VPH_4v22FNF)ej2&#qPMJYmdf+N-2Z zHhHs)^9nAs$myGl&=P3n0tHHNpuAEvjVm>(kG>FpT3&0c{)?hlzaDUTVH?YczJt$ z>gnzKxb>duA3FW(YfEhG)!5|S%fg<@->%JFAHUu8YFwz*pL<4|-aftiUZl`w~*in&Sw4Ha>p~Ht#E9$>(esS5?EExP&yuA&mMTs3=nB&GSjZ~K;^^Wi5Gddx;vy0V ip#@n5S%EwkAPtmss;a6$m9sze&SN>35Z{ioCJ6wpm=h%c diff --git a/fixture/20/2/0.3.3 b/fixture/20/2/0.3.3 deleted file mode 100644 index 036ccd59c08bf7c5a82bad8fd744db042bc98c01..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 288 zcmZ>Y$}lu^j8qGb+@IH&$-rn={~ruGaDxpDyVH~=FkfK+1O6+sGT0iv@|bcM2>Y;{ zzv{!l|7isSgEE5}Qvw6WBxSXkhLXQl&hlNpsw&rHvEpJTUc-N@dVCm-QY0oWSQb+H zaap|ap3XF{$;NY!oJ~D9^K@qL?dZIf-j$DyLBItF+*u}hssIthLI(D~RSVc<*cr5A z`F09#XbIW$r>))R=%Pl3o4-Uwx2nWG36~H07+jw8SwX#d=cI4T7pMNZBl;sHB+|Ju zVt((B&qrKb8kB&bLdL~|Nzz^cXe^5Z14GOswHZoI0*6^z#DDCbq+!k2LQU3 BbDsbJ diff --git a/fixture/20/2/1.0.0 b/fixture/20/2/1.0.0 deleted file mode 100644 index 51d1e0b4e3f18d663d0753edc36eaef7e77a9887..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1512 zcmah@do!BX+)p-yMI(#CU%s8>{L z9*a<25!S5_hf-8ED5o9mRCS%#n1fQr!uH4h=sCOR-h0mdeD1xU4=s>OCcAKm-W+KE zqdtJB*!dAP~8}i0RR*LZ2q;|M|IT)0Lu1^peiw* zD-|Ft@|@>}d{xCK)|R}gA3YoLs_~|BF_b~vROA%Jkm8VXja8Hbc7%5gnsS{#`Z!|j zu&u-#1hFVP+m%Q($FoWda_qS1^6DH^&Y^NqhM2|3E_x1UZ|E_34L+zCU!Wa9MU^Dy z0LqfFXbONc7&5}#MW}$?1PD{0Pyo#wfVz^ZOr=j1lZS1x@{NUjCW?x2*gPp!x&__J zt4{^m4zK{S^;6WpnEH7S>3G-V?9YXsX&$VRhb^k_p^tCYjnrP${sHfGvb(NUPe-Td z9*Froz@W!}wh_Lj({#C76Gzy@?(Ks@#21Av1g8m`aeWk;wEXh^S83+(b=Wy5l?p3R>TVU`bHPCs{tti>RA;nwS{>S-8|Cz13Jn9f`BE#03cSL;XT~yYW#p zE(~Z`x^?5x0VB?8cL{VI!Wa@3c0Z!imRDuv`sFBbB14BMNw3Tx>fmA}lE}dP=02mK zt4vWOf!{Vj7o*kMirbn8DIBJvy1(3DvsukWoKwub~p_vWvR*^ zG%G?FY3JFrlUpx!R znq$~l2&M~(kS~M{+1@-tn3*m}Rmmr8JL9@R%x~yk2(6+b$)#435e++zO(RS#Y)Cw% z!wtdH*Iyea#jG=JeN67`IKGo~FiYIkQ2X-y`f!F^YbxmWUg!Oz-_gtB4>BId-X%rh z-@T1p#oqftkfHvofBl2jBYsx}g`rou9deS#z55wuy>m%Y>64T~p09AS;d{IAWp) z$95Y0zNkHaz6>lME}XD=ja=re7&xACOjXUwWqndo7Xe4L?yB3lf@F@&TUe3z@{=vd+x(WVIVKaWnVo<^=lKi+KSl}<89 zQx|I9CwIy29v!lc6V%f0cyt8~Q&oqS9?q_nAwS%BJ0q{`sd&8-B5J9dM|m%&_!V9{ zq93-BCz}SE1#DYeFZ4`9NXYZ}Yi=FA_954kz|k9%w)G11JYTG>Q(FhDdcOaVkg74g z_+-o{p+B><3G*(=x+dINTI0(?dArCDE=A4zdKc2VYF*lC z!}ZPCev-v1k#A(%F^(Shr)zb6HmB8Pd|`vNA{HyJ?9Uub%arR?SqN+S7)Dr`46H$d@IQyI1JhZwo>X%^sE7)~ylUE0p^O07J8IWku!2U50Ep0Oh%7-opMubR@$%{K}G^Pd8@D nx<4_qxTDq-biXS@{^n^D=}PBm^q= z5f2Ul<&av8iUl+ycz{I%1S;UDcvLzFPM}%^ffAS=KRVNIW_RDbxAWfHKTF@6;O*@n zK?;pXig14oU}u;9cXaS~R6wNyNXR(=nJan|RJ$O2&5?rd7F?@k?xeCyMS_!Mz{_F_q(BacT449rrC zGuAF}9Ma13C@{D-0PZEdV@ByeMvgbb=*9Kw1Q>FZZv(Z-qwFUVA5U4bcK+7j;1{Zw z?&SOJ0%n-gRS5m>H&-gG|GUU4NB}1G*KagdFV2+kfo~N>eEp59G(m@uF3Sj-4!~%u z_W;lVvsGy71$Y3i{fqyP6R9|tV~PF1L%~_83oeHqX>)wi6w}@#p_@)^^6;tyxKg*; zW8-viksPPSEnvsBDIyKSoE&Qy%$!Mqctmm6Dr3AvoGg?WYgp0N@#q=SR>H2$F1#KZ z)yEl=jj8!~y8>fdSV1VgzzDW=#+;;P$Mmrjwoc?jnKgUN(YO=YUimrRe2r2j6F`BL zKvhUG|MBu6Yzjj`WNMMnuC%+kQDdBttdTZ;k2$em8=7s-BRawl!pAuJy35IghbV@M zZBe#LgM(m&P6`gE<-?7*=M2nrb%arEAQ70h9kb8d}P;|d6-Semh)QLJ;3s4dvb^(dd?T86HXZn%JX}xCfPa%tU>JenoVnM8!)IT_-=`MmAl1?u|x0BTOsW z`7NWO#H1?^r~9jg_^pz;zOOo3<{i1%uBV6JLwaAxqYvJmjIkPi`7?nZaJpoW7agwc ztQeVIFcrM(&BIUK9r=1}mb=j&??l?jrZqG+{V-=z*I1F0y}@(-!XpO=)*xI{S_~IB zvg%o>nH&Q7l%T8}f$}cOBD^_=TLc#Q%e$UY9UdZqYU8a zgXeRf7DH9--JRFYn~0J^?;WpHs(hm2_ZMDOZH=g&p6`F>UDOSE`)$J?i%1jusnMNN znt|>(J3B+rv)jyymUP7lq3CIH@|kS4)kI_42e~RYPae7Lc-;J!EJ*riD*Ll9l48SB z()(1+b@IsVxo8_|Sba<5mF<+pmV&UP)`s-527p^S?DyOd7BZ0}4DVUWe=;ni3Lx#u zb_ICx^b2ekul_xHtfukAyvZxq5vnhGeBAdObg+~YZ5e(NUe_Pqr$1i}oxK?r47FxP zI-8=0WLkHsDfeDZQ!pxIqDfRi5>C?Fz&QR0u*Ihp`Ip-=Itl=W$H!mV(v|Kv%>-N*=`Uo*P$@^otjY}uX;AO3R>ah zAr*ePMO?KnRf@^~3F2R9-GqpVm%pSz3ax1_Wcr)xt;t5Xyc&=`Fqg8^4#qG+ynK@W zPcBB=>-16qdeBw4<$k)1mCTCVntg6Vv57*>Y&Rs4E!Lsj+3q_vIvGiw5U^yO!lVlF HjX(LH3iD|> diff --git a/fixture/20/2/1.0.2 b/fixture/20/2/1.0.2 deleted file mode 100644 index 861e85808e6e24f96ff8890df81ec370d75f2175..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1486 zcmah^dr;C@7`=e7D!GCWR4gBa3O+V9GTUr~pJ;-Q>_^nZ6huT3RhSB*N25O9uuO6a1E*DiC zcBm%uY5ib`&R@icbAsqv*F8`?qLD|#Y!0c4Iv&E1wSj<|-77LUXF|FUbvz33CU74B z-oq%`Mfp1ObuHTQwn`!UG+!bW5;5Y+mDLR}Vj-g1K;fh402t1?Dh5W~AbcJ0pH>>g zgdNpWMjlI1o6h7wXDH<`@hlR%;ICx5O)viFyDRTkZL)VMeXtgr2`Z2p^IBOyt#=mjMnVc#A11Gs!_^ImS$zX>$>UZQVdK z>WxR!tR8i_J+N!Sn^f1IQ>A)JyvM%wM%Ia~v{Yq=t|zT{TaB|HL3<-|7v+7$4`iE{ zlcysQ&Zn}rj}iVna7Zppt{fevcYz2KGQ*GTS!?w)v?4SZI3W%jx084t_DPdA*|1nEqkYk?r9EX2%de$G;K&Foa-q>! zL2EuInZ$Em{edna6}ejcc8wow$QOkKS0@ZZD%(Cuq+#ZgBd{!sa55?Xak2+orj;d= zLzRUzFEY!!Aa84gvW5M9lK#e&|E_Qdkgu_<`K`lrYX=f zRs3ohXPS~yjKXw`00;9QtM0W5u@|+0l?qLP{$uF!M28NX3WO7M60}!l*_F3L=o0zM zWgmUV>>zw^1%FfFTdH*a>x6FUG<6Dm=+gInqM&z#guUHASBa;Z=U82IbpbY9x5WZF z_3nv`1YL0t;(Ys_=MF-6v`O%3oX-y0bhP66!*r1Nlk!#vovIqFw;z>uqnr!FOUHrN zp?ZuNAA&LSZf*f|%%a&mf_2~d?v;3Ol%Em>QllpKOJU4#*d83B8|p{9nPCrP55kIJ z0e=u~_8>}E!>PNWQDa4eY4)(Y)<2z~mX(xbsi1zaBhga`AKc=Dlb&qnMs7kka$DoF zwR$IHd39T_sUpt6-@*{gNmYt+s9zyhxAB2|DkekGlEMS8+@5zU*>P*X4C3NqaV)<ohDRQPgh8&k}6;V~9oHuY$}lu^j8qGbbjwdT%)s>L|NqykfZ#t6`~xvC!2!l=5AHB66*#%z{z_$s3m5zE zt?Jx=)#b9YfJCoBkB_pCFE1~%L;jWJ`$LwmoN)ecsJ|~~^F{TSJPUYkO?;K0>?_GH zXe%tY;_@kbFJ2R7hYLIk2D2EKT`^)fxr~KbkRfxnr%6hkmsFx$f5?RsmmL?JIFgab z=BC!ez|e5yLUkqhb2XlP1u*0N3nq^p{-5f5D$w~ zQbKoFVp_D)(URyIqXJLv>Tiv<8y|0DnDtPJ@i@bkwt1Ed1P?JS+KnP(^=*xsY?LP4P1QJ^Q~{DH=c#utn) z%x#?0$h@HPgQ`GBxg$gKg60Jcj(1s{G^R2!X|XI4R%nPK zOI5LD%$&KXt8>wWv@W&IS&lOoJ%pn?ehQ_&oFG7lyk82 zi_op(X=i_SM!w{bThpkQlD|e@N`A}jc{cYMjYE|UC;VgU^xd9zeu`=RNs%;Psj4RN z%%dyJ%!1SoTId~^^LF}*)*!XUzkxe8>~(qSG3(^xERQZPTNkg%w~ouaWH{k0Bg(oa zx%-MJm{C~Ow8F#4anh6vwvHisS_@W5Ts#<}XL*TnLhA;1R!5L@)B&!hrLVXa<_H*0 SFyg&ft`gkJ$&&Vs#{dA80T#mm diff --git a/fixture/20/2/1.1.0 b/fixture/20/2/1.1.0 deleted file mode 100644 index 8889dc6ea31e1c2e75f56693ffdb5417d44d4614..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1478 zcmb`EeNfVO9LK*r9Rd<6h@!3#k<_L@61uiZ@MlAO48;W*Hy3TDQm)!V)|n$QOt--2 zsg(vKqW0j>OQ&~bn%L~ZhaR-mEz87hx0ac;Wiqkbop<}AfA;y~d++bfGl>$+W+AlwJQL`DGnTQ{0hb93da5ujfUn}DMT_6%`EZt{|T z^T2cw-dM!u+}GkQ)9M7fI9fM&{JGf4KpX(pl{!py9IS3+z8vgQ;=N#J^6(4gk+ILI zfUU)|@;r|WaRuo;crUNDc%z(S7{SbEqdrp^xu(34bP7XyGu;1OaQ z^vjZ$Nm8yIYdX4c%`q2gAAPgyGJlz*UxKj!aarR((t}fbrMJ5_iWdo$S{m{qhh%8$ z5aGV-rcitr;}i|@o?#63ocxAdhajErMHB_}QCijcv!StJeSibGW05E@z+*jNP0$g= z>DLA7cvSd4x5@&`k@$jRspPR5K^keg@eZe=w$ptn!-Aocx@$G>oejP;owFi?bYWHUubYdhcs#(ZsJWYUw?EX6Ql1nv#mK zuI)KcQ6Z5(0hMB$bey}wM~3$7I2SsMMAst6d^6Z_=ED5^#6@;J$$|bfUj(V?QA=PQ zX=%TDjsyEY6VkUwyM@r>a7#xQ-xC>xTK_u_^A>;EQ=fT(D;c@-bFlW+y_vR>y~EE| zwFl-gZ@>6RE~6$YM}KY0AYJ)#we9_TcKEDf3PGouw@xX$ekr22$eS-5Nb)x|HSdt7 z#O0l+cIQ`BomRX+{yz6(uGd=T=20}?m_#H&t7-=N9tsC%2Ewd<*u$sp{vjNHY(>Ld zZ%-FpE(uVd)dw>xZlOJ|c>CluBB#{HH)eHjtcMaCP0WWgcXQZ^{zbCdqKZ>eV55D*dQSXQ5SKvSpiLTkWq&@kUym^o*BMj zB}EZR;iJiC*hh~Cob)JNA9R*G8S=z6rMo6t{X7kOt?Ah(BettF)BQ&xJ6u1k>6?Fg z^&qhq_SZt;s~sAgXG#*mt=ZZPUM?UzIwMdEaQRHRZ=kbcaQBndYQ!o{Y4UwuIlNM;VJR`Lme#^9)ATz$s*W)L1A0cbofxmtwA`ZuV- zINyR&+N3QX!&+4u@VQHyfGWWEl7l){{DIJR;yPbi||1@B_4S6B^tZh9(jXAi)rqpg!%VX;$sqC0kB{tX$ zM)A2rbCV6n9FV5ePI7xS2o5PT+f3$8k)=si88g@9lp+?{B<{(bY$}lu^j8qGb+_E-oB?C*ue}dow%MTd_%Zm&QA*&b|>Q?;qe3gII`L7lOcu@%YR%o6KqZBh=w3C2 zFff?d|MgmcDviuz@?j8WU^GA$LSr*2{<)=8@kx*6o$4FTm%^7Vrk`8s@A~+8gJ6(( zBZr~GjV_NylZnosts)%+Jq46>7=Jl1E z)73f0>88=CmZr_JW}#aOZ%5M{R-Qc~N|O|vj&d8fYm}Xsl9qI=)lbIYFpENm*&Atz zfE-sBhjvyz$qh4Z3iX_cRXxb2!65KJA%a6NqeCiU%VslSmLM07Da{*`8W_1*bY|!T z3i+Ig;7k({`_BlB`dM?*1!A3yPtm6joCYQ zVeqGR>kliA@pHFro~&YY=irj1%T_0xW1Lmxlbg0pI9bTrZ$(YK{K+qWosM3Ow5&^> z`Jzgp;%0mE!-~528MgX9vCs4sJvL`WN=8h+S(d!>L8qYKd9J4~8J=t0y}9`6GDE?V zD(4&1g;ed=-?&mA<;Xo#Fy~>Le3y#Lwvr5S6$ZNYWknb#B(wIw4aZEHFp6wafgWhk&tI84}N(vHLg&bdiqLMNWML~KpC0w%mxi>*=1 zR84~DDma!D$aO@A5yCeYvIFY+(SAVK`jI@>CPRUJiXUbO#S8pkO0RK#sI3iT{&H0P z$iHl{ii>`Cyw6#ceTrY2fO0RV_N?&)QY_Pt*v*;*)Tdi(3^M2v=Ukxb=skI+@?!g@ zEmD4T{n8abb%A57ZPR3)>*R%r>y?)Fo;oaWDApw}qfNu|+#;n8cB{RD;uofQohzQE wm>~1&wZMiI-qUm=e6Cxb7TCa7TG|oett_cB$>Ui4qjw(5xrD@~uDYrK05cqGS^xk5 diff --git a/fixture/20/2/1.1.2 b/fixture/20/2/1.1.2 deleted file mode 100644 index ef86481fe4c7c6446cc1a2660bbd9755b5e2fd22..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1494 zcmaixdr(tH6vp?400|_OkO)K(Lna~*X#pb-TQxwyOAN>>LIP62fDnjCi`Lg*8igW4 zCka9kV$JY~Bthj}EvOVw!3P2o6tF@^5EQiv3aFLz2B*`p|8!^0**)jmJ>PF;eRxT( zuHIa`Fz){A6So0ctoz^9gMKdpp4I>gtpd^c(P4-|4{x@Zw1}s9S4#Y_~(P9PP~DR zC9zT9$p*G!Ku<1DVjKYZxbGEoNMh7QzprS`cQx`SCSYuss93=EX9IATLG(}fN>q%P zAr*ux#)@N6$s!_=od-~{b`aeq4**i8&P4sg`x{Kfl0MOrt<#IV%aZ<~{B)8E$5oIa4U!kWVFc*$f5^(MAEZQjVg8GhPxMu)XX* zhIA-sUwzY z)$*}+AU%kV@r>J7sxnP0P5UJ~G8KK^BRT}WB(C|<>}r=)lhMZAc;8mmjL!>fv-o#T zdPdFi#PMh5PWzx;n1+5HyFx!w8(yly?R`FtbcKG6aA|1KU{!7fEM|s7AaWNsY^ku) z!8f%(Fq9TR6*$uZTqLqH; z^8XUnW!n$K$ar7|>csYoe8?;*I!A#5H4pSED>QphQ9^@d%pM*ri?b!I^&i?+>0IwE zG^cR~$yP5$MHJ+(+wSgZa*s;g`#43MQ(WmPZFw6cp2b3!+D)G1Mo+1=Q|@0n%h5Q? zqQsP8ca>nQ${@&QVRGcSWdz~zx8Yqg+1nxof-5QaSNVFKVh#&0Kl-l4hGc*1_I{-n zH{Bsx9x*Y`?rfP1A!Er7+_;~f;n!{0JY5}8!yaIUm>3t{G^a**Iz`Zd^*H`2ogHdN z{k-METq63|ris_<3RRL6sy+VY_07i!j@z6p8YsqIe$zOmE0JKivtfe{NLMS*EGwz_ zP2tsDMtT6(TTeuICXZ)(^`Qv0*JK6E4nNn2D=qWjf)C05I&_k^n1b0EwzT6h4jg9Fq(}%787AAEmOBtCWZbAlveh5ES@RZWvZG06$A&f|E2RpW^F}NT}Dl z8U;JY0|VL`Q7}JCvpyj&J^|J9G3~N`WdHAcZQ`C=@eZ=kPJq}@NMVlW1e0pz)-`Xq rw|SDs5IlTR;nm?1NZ=_8Hu02J+>oqdpEOTf(dZc^P#-zUimR)T= diff --git a/fixture/20/2/1.1.3 b/fixture/20/2/1.1.3 deleted file mode 100644 index d7a32896871f982c24154484125b44fb6ae76329..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 660 zcmZ>Y$}lu^j8qGboVKCs2LrQT{r}gifZ#t6`~xvC!2!l=5AHB6bzG3S{MRIAHD9*B zuU7Q`4VlTTEN~&Am#5)?FWU^awomiEdZ@4XYX3LXKj5<2l9RtoU8GWsukxB)7BG|! zzO~HPDD%ms+?6ZX6c;Y`X%|867ma!;f=3--)vwP39*g8iYNR}b?%mbwnpSYGViy+wuGmk46HkWA-`y69N+)woyT%6A)#x6@eBYggdLp# diff --git a/fixture/20/2/1.2.0 b/fixture/20/2/1.2.0 deleted file mode 100644 index eccfc654fc3d06575101a0020fc940d16027dc50..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1489 zcmaivc~BB)7{)*FLd67B3fnP2LbE6!a&+62SX96Zg$|ERQY0ivN4KrDLv+PO#TLn2 zk2dilk1We$&D6})QCADEHLKlpGtDWF+Ol*i7wey$ooV)&`JQ>d`MvM^d>pqJ%}WDouH2px!5 zZ>zEu0$r4bW*~`)8R2^lN3ZbHrI!99+=_rKZK~3@d`NsjVM`S!&IHnE-3M*qc{DO? zWw?lz;6qf>yM;qOJ_A&m&cc9BHt^KuW~vat01Ef?gkiD?k7jKgcg>V)-1+ZW5N(16 zh=+V+;R#gP$Kd~((T1Dtl|?Jp%IE-6;=jVzKk~COB?BK|*!Ect0Brwpg;){ppqrmK z*by;V%xp>Y*q)GPwH_7V@29fo_(9nCo^d6SF6cXFZM}pdKe{HWVn> zbL`UuWDzIHOp0>($PjR6)8+0oUv&!3(L^Mi)y$T%^ZI0hzU%?(^@hyZ;_n7-DV*WM zilkzmmmxMy;DR2r%D@ZgU-CrSKkQQ{<=Gxh-zHrP5sk-Q06uMJaqauz7 zhxo}f1S>;^Fv@%;pd>Y9ms!-xt>mmMLRydfCG)H)tR2(tJlLAu&_C0yK!J=DHMW32 zl64v1XSOrQ01JtRk_kQ=MPqIDP^N<50kG!le=o6qzzIwpE23rL*;b4tJUb;NAYWQ` zYLhg7?F`=IB~#bRQXj-wiwdH`I3c1}15i4W!AAJ1(fUbNE=#`!B0;jPOoJN49t2i0 zlM4k`TO<_Ux-v&;z7-oq$f9ji6ZDN3?frd&Ib$s}y2_BNW?Bj5<(e>V2kFz6g^f6<@ zBT96fxQ(Z67B_f@9ImP@rAR9>tFvmb6VPP2#SRz#h?6S$@a?JLnw{~TyK(lLd)Az+ zIkBttnZF|By@j((jeUM>%>Cmm%M4i@sE*n;kdL;Utltt>Hq5--LX}8v+dFoWyI_W-Rl9IW*h9A|%DTHnZs#R{>Pq1B7%IfJQx4^}tA(-gq#f2)jrpm(CYXWTePq^d5v1T!X7;SQ8!7CS7 zm0$|-;L`NIJ-jG$eC7Ejbos)?4~$V(7^=NEi5#KbIaBy<5?5I;Do4|L%T>L;|kKX)9`uOnjql3H`ba*XqMbL6}!sxr+nHI+iRqifgW?L zwLbe_XoDlj0ffO{izl+Ev!9H14pC%Y{uL&QHW CM{6Ab diff --git a/fixture/20/2/1.2.1 b/fixture/20/2/1.2.1 deleted file mode 100644 index e91bb920b5fae3bb41ec7eea7a243d6fa2a1823f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1492 zcmah`Yfutc6uux9hzf}KNJ~H@H8xSn+|3dZ6$lbhe5K|CQ7lJm+F7$vNwM%zAzCT6 zX*!y*X6uHwJt*^)mX>0gXb*LF+-z-IAMI4Lt&9HHY5KEg=6rL$bI(2Z`{pvDQ{CPD zqR639P>MAR80+#@{x61L@9lwiCV&W<0OVu&lkDKpP0LMS{ac+HtOjaDBE;v0t<|g0 zCo;mD2GTv&H@(~bc9c!_&v++DlvI!MFY`zI9sSAVG;x_?xL|^hg(XVr1Iap@k)YS* zJTLlbW*4h-L!!HTg=rcUs642!gbkwpqVm8%`cCLhE}>DCKrPQJ6rjc{RH&U{z%#oE z06QL9T)$kq_Bw41KLg9$6%+M|e;%9xz|73|Q>gH>voA|h2ZRc+Uv@v|7LK&COiS~NW3`yn3eq4CB{g-$i90P>Pm#& z0w?%@Fiz0>n|iPm;>3CMd?m#go*xdnrTBPNq~_YU>$;LwmqfaV?j`xdnPw0CTI0Ql zb$CwC7SF6;J7P#x2_zVij~|yiNtFSM$pmZL8|`P78`?8?-3SoS$wu>(RzLR)YKmmC z7$7xn!5m_ChNoA{V#qY6aer@yRmx9Nsi#S{vxf6$i^~0 z5ZN+Wh$_7y$0c25Jx2^!WDeb7DH8~m;%do0&R+5IFqDpAtO+#SWL2^ZTOer|HWm@p zo*F=6yuY>qB=)nW)K|l13cW+}2*A;i_79&F9RAMX3tx&NoM0A)xB# zPVI2;q1#eaxixA?+}w!gk^KlwZ^Fqwa?tN|<3~ad{b8uKD!R+5`~n^|JMB?{$Gv#t z=9W$_pw>2MyI+A7-zNjHc-&GJVoj>=qJ1YYmzaHW_N%mOI(GqYly&?aXvyU3FY!Sv zvKCAcvO_(Max9j*LB`NcNQ|DbJk>mqQ5Q!U$V`l*j429BpB?a70FfQL&7~2o$&{VV zTQkSStp#={D{-sXYZK*JPEin&HrA(Z6z(1sHYbPOu|Pw(>T~Vt|2kT4UWkyZxaoRX zY25%UA2|J1tga#)7B-6t=LXFwAlYI8*fMWtD)OrcM@7O=*M`^XF%R`VBz!;pD+|s> zTzHX_d2!oMx;8f-VOT%1?n)BLO#%lRw)2r;VXBh<>WAnea_rn7ABYNv3^~VJ7oWW9 z$v{nJS#jrGfvMuAA+^rj0n@s;EUAG)V)~IIGF(;Zs#${su}0P#;%5IBmPaG2&?`O9 N%Jx{2K#8#q{0nO}REq!r diff --git a/fixture/20/2/1.2.2 b/fixture/20/2/1.2.2 deleted file mode 100644 index a73b53b404a2c5983d1d0049818d0c1f01afc18f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1570 zcmah|dr*>D6u*d11SL{JG6o3|NL$gb!jeQq10P8WhE`@s`eC(|)|Od@f+m|bh^~sQ zrDlrIV|&RUTV1Oy-OqOVd(WNUz4x5+oAWz&?wqJ}n8S%o zW2B_*k-1$3P7kO4*KCfH+)*$(3Q)?0?d>YT&`3gy|NPtp`dL-v#uG!zbsJ7PuE2?kywT1gRDA=}>XBCDx5C^*rR2`y?c zx)@0VkKA5VVhtP~|E&XWW^6^7IuS?akSQxV!>bt;C{L!MM`K@rEpeOD#=#SMAyg!R z$?$|JL8Z8>fyiWRStS5u*cPJH!7y3b1UpAF)QsZlSUi~n*{n!|WC&Oxuc9e}sdBPc zqQ=|Mi4mV9pQMnp$_A88ke!dJDhDvK$j-%0Xg&_hPywh5Kvo>&%=ZBfiIXv6j?T2a5US3XY{f4O$1;mG%l%Lx z&l0X26=(zeu*o$m(CoQ8TQQFOUe$FSNid9`$@1!rOVx+APiJKw$$sAigE^Vp2bSS^ z*5Uxk0n>AO{h1leEC+9}LF~{PAo1v+CpQzYS(v6$5lxp#bCsc(fIBej*u483y-2Ke z=9Feeb*G8wK3zgtYO$)7i9so}<5?-?Zy(6JMo-Bf|?gNanGD5uBP2GS$v>vq*6QMtJ9=76LaxEM>Ks|BU(jAMOONJ{04irZ%rT#{sGZKMp* zqV*KMhZD{uD#%te2V`f_{AmILEk;xBf6tTQZ_uA&BC#G})jAqoCM;P)eJlUcqYw`Y z@WMP;*IHMbEvyrUBb7x+Z?9GuD)!ZAJ%j~->$R}Mf5f2IG#V<(YqWTwmPnu?%=+5w zxUObHrBUw)DF3~4nubVHT|ja)g?llt`!t}IOb1(IEK?RRbY_I0Am zsUf!*itKE}h7>xes>m6R@zO+->_);-Icc%>K-K<^Ay|Z-x(O8!3ZHfqmd<>ah`x#Q9g zf1W)5Plh>W@bMp`?EBKSq1_1?9PiZUjGBgOaN&w#X;SS@_Q_u++MmCt9mj2}tLt1- z4t8^mh1Yz6SN-}t8!2bW>;v%|3MJCD5B@HSppK`{Pn+0LyG!Oi_IT+N-*IX0$l8~mu}BQ>4|&<##cx1>mBSCpeWU0eiNpf5q0E>z_b|d^5LHy1wMWXhl_fm zX`~`6p*cQ^QyEekdQz)OPOr!p%LgeZ3Unnc9Tutrk{|dy><%YmU?t&7r3Nnn_fFUZY$}lu^j8qGbWH>&}gMoS5|NpO70l|L|05LGZ0mf?&?l3KNcI>EMs_ZMtV1IAb z!u?kR4ka4QV&G{=;%W9Zl$h~p`TLLwUoP4I4R!0+e3kM!A7jJBxYqsToIlJ`pAD^$3{MwWzzwN^1 z4X-=@aE0y(>EElC_&2ue_JWsfst(M-CbIW9bichSU}AB!{qJz~#>Hvb7MXf7%vk(y{oa3#uBx7zou67QJ^pZUEi4RW z4cK?WF(9TyiA_Z}MPY@WfreJ=ngmwIV5X)OQHOXIq^bawWuLPWWEPTe42XIR6i#Gy Y%s!{Rsq*=}WV!3&MXnYB6F`X%0J1Y2nE(I) diff --git a/fixture/20/2/1.3.0 b/fixture/20/2/1.3.0 deleted file mode 100644 index 97c775a215dfafe0803c78d17c11b5fb15483b28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 726 zcmZ>Y$}lu^j8qGbte^g783WU_`u}%9;6Dg}Ss26trm{yf9y3lgIoT+clyJr5tAR7e z0f!?84LA~*cvw<0E?ivZ!@zQYfkBOdf#LH1t06|x0jC%;80uCqE?{6V$*5{y38dND z92giFw;D+@E=V!SxWI5BKZI9`ry(QN|8J;Kf*bSKz{x&6^1nhEO^zI3F#wvxC~45x zalogCDFMVWU~qPuA+++}sugSv>{}fe>`eoJ!e~H#6$9VJmihNsRT%=`L{w;ef0(4m z)+jPlaj{Zouau@^^07V*Lk9*Sur>}Sffhv$=bJMjUThykgl1Jgpm32@g-DmIft;4i!cL z7Dok+1|?1g7a_xC^fc=Oa#qrH9SZ*Hh^zs$Tmz*Vs5==VSqH7(1i$;>~ODYO-(ndkCXCY}~I zurIF^6!1I|@Mb}Vfa$W;**&v7gqZ54-deudaqG`tGKXh&GP(FKu5LT0c<1M@Yyow5 zs|ngp8rFNxF6pkcYIyqO;S#Rz8{Y{Z|6x&EJFBPF@8E@RD`Wgt^)Hm)RciIpf6+(p zKTA)t{I-_1>AW$)y|}%&)SPQd*o0>n&fj?Y%)QrLUa6ET(5oxul*ppv2f9)<8gxM+ zI)#D3gMsS-kfrJXWHvBpF95O`co`TbsLje;HY@Wb5HI@`VwY(H3IT;h3LP398jB#b f5|Y$}lu^j8qGbY~R~?jDcx-{r}&)f#5#~fEbwI08`n66@c22m=l-Y9Ogak_p56b}tUNx2jiW7KxS9~?#Y?NI7s^zke zNQo}nk2hhwU(zRS=zn@jw6#(6#~K@jrHc}ecIg-H0bU%GbVKST$OZajqnd${a$9&T=7HQeHMPSN$#j2VrG4xUobS)_W?ee>2s{dcuaG+w@RT0zslc;3Bd z&%b;5TJf_;ue7nBJSkvys<}{0$J^z%?>z8ZDi`u{d!=R2g8u*gow4pxQz9q0SC_YX zX$t&TKCo1Pfq`)agX94Q)&;;&7HVKtU|`q+WHE&RQ$Yy>qY|U9$z>=yzCJ!aN5=^m r2u%u$6go6IG!{WRLnVRX1yNLf^v*;-5f#PvNrepn!N?%$ diff --git a/fixture/20/2/1.3.2 b/fixture/20/2/1.3.2 deleted file mode 100644 index d97aacb01489d7d24f6f08b5483a5efc9613a0a9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 723 zcmZ>Y$}lu^j8qGbWb?K^$G|kR{{Q##^6%yUL7*Hfa9}EXkle%WFyq3EnFg~=4$Mk9 zpeD?6&_Gz2fq_L*D&@!#2?ie)sRV`|U#W~^9cJ~fRwePS2zB6P;J?bt00dG-w!T2h z1jJ#x!ob$c>?>u$!1y+BLdL}f8Iu`U4*U&ZIZ*#H1f&=pFooJ*4drZ`*kJo=$z{j( ze=A=t5S+NIm&chY!B@hKZ9?Y6#eyvv5|^Y56c;uyCHOG-n$2G&tjwR6y5PR7*TH?$ zE3`TOi+u|dV@ZFsRFO?rARuGX(bKLyh6OE(JPA$$EsBOc$r8tmcOMjl00ssHh#S;I zScN8?m@;i)z{;Sr4q|K#8&p{t16G<&ndY)&6G$Q`a`LP*GcDJgJezJh({gRp>X6l; zU{=sdfekJ?P6q`xajpPNL;`L$~d60z~+gINT7g*MxcO;$dj<=*%g1f zPQ8A5K05ZHjJ?rz{%1gu80*G=cehUW5O)5vRE32t%jf+9w}fJmW$jl8-Wh0BUHsF~Fg@+LfU5st>FcJ?zwi6c zlJ9eqDT^sqj#pMiMqbmiAzU&dg)@MYuW-(_M~}aI=&YHX!nuNz@5|iVFJ6B4@Zs!R zWL3ze|K!L-P0u|Oou&w>dG4EPv_$%6u&VFLQ|FrMOkRn6T;(XmCH!HPb1TcL2@E+d z3_LFy7+5kIfPtCAz+lzDu&o5h=IdZ!xWvGq%)r1jD|6Y4U|*BVz6V}kzkb8(h>IYr oAd91mqd=fQpbLa{5dj4cC~#nst`J4e`}Cf46)aTXnN~js06BOG$p8QV diff --git a/fixture/20/2/1.3.3 b/fixture/20/2/1.3.3 deleted file mode 100644 index 85b9fb4765a983f1150eb134e0e871bfcf5ccc4f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 285 zcmZ>Y$}lu^j8qGb41Ag2#K7oO|NlfsM`y=>5a_@PHZbgH^_a=~%5gFyPfr_5#sQ|> zf2$TdoVd&=zbehtxmn@j0-jk(QW-`(&Ky$AZfeR73@ly_QVjNALoO>{F|qv_YVy%+ z$^~Wx_7w~aYzNyIWh}2OYPZXpD7C&xb*XN|Ysr+ZEnA<=a^L%zf#Dhh7cwKkxw$0t^DHLtf2! zHN-0+UIp>$%qtyUSI)#QFV->V-%cG zY>ZlC)SFRcMx8Kf%%~4Wc`!=MC<{haFe=O^J)c_nl;=|^pGf(H=MyKNHu-et6Pr(0 zd|LDAiBC>^QuE1(Pepu6^U2Gn06um3r2Ay_Dfda}6YbM%A3FOSvd^4-{@BNledO$O z$3AxKQ^!7L_K9JiGW(>k4+{H`*(Zd3K-lMlO}K2jWz(KbuWWKCB}WF3E7o%%v4BscC7L&A?l*J?qCRH#g%%lb;F)#_tqyi=pFlm5u@=bo{lU$P9`65^3iQI@6 z@igz@JRHqyI1QiSLZ^N9)_=}=>7)--ESqB41Irv(*1)m@mKm_DfMrL^e#?BvDmg~U zu}O|Ka*UByLR}#OF_!Y!2Ab$PuD~De={JP=S?U(J>(6868pkJcE2c>Zk8V8+mPzeW-aL@<`ad6NE2WfE71P4iQPz1YH z*`>;^RCbB7E0kTJ?8;;pCc7@#RmCnUc1^L1iCs(VQexK-yM)*k#IB=Vzg@mvxLriM zZo6W;V7p!e6*3SZ0}V1zoPqclXpe#53^d0;at!psKrRf_!a!pNGGU++1`;z+2Lo|% ztCm}|+?wT9o?EQkTICj=Tcg|(<<=LsytviHtu?o-xK+h1HMg3$#l)=yZXs~%fLmQ| z0dVW+R_<2OE!;nk{NuQ^orxlusLn)* zOoYfpbS4U9B0whkVhgfHraM( z+a%j0*=A?kBHI+%mdG|EwiU50&9)x4@vsfewi>q4ux*BILfdHDX4_iZezvi;m9~+# zedvyd?0Be-hunCmjfdEHsEmimcxa4=LU;&-hdy`+gNH77$byF+c*uc=8n|}KHB+vY za?O)#om}JOS|!&gxi-nQD6Tsusu@D{$-LViH3%#+B8w;(lkO~W>u+Rt#iLg)z3u&-W1`A;@EtYAp zOnYToD$`JzcFHtRrhPKalWAK_(_&f{)25gv#k44X6_2AJlX)|doA@J{je_j3o@bBnf?qAVA+`dQlIkK;jeRK91vagVR za`yGHFV4Ok_TjMahJ9uB!LaXzePs5Xuy4#h)V|X`pL=`m*>bOzdwTA*axc%lH145s z?~HqG?tyXdi+gPDU2$*Cy$S9~a4&*;U+yVzFM)er?genK%e+kHVKVQMd3EMNGVhUj zbmkp0Z_d0S<_R$`hxNi|X59|!&aBI=D_VzJH(MuL7hA_#w_2y#NX|xmY!qjs zIyRzXBRCtevC$eEz1fJ&Mkj1EW}^=_@?aw|8(FYX1sjDqr{`QN=klB@uhAQ+iH`aw?Qlc}`_=3X@ZKPC;_& z&M7vhm^ih>sWqpDI3>g>HK%kqmF5)h)b7;LDch-`Q?*mBQ>|01f#?jx$3SxiqGO;r z29h(-8Uv{@P@I9x3}nJUWd`zKpbiEiGtiiUDi}!2C_bZD8MS8=DWgUirDxP8qcj!R*Y&h%85};jAApYh*3n0S~H5vC;~=p8RZ+*8^s%S8$~qA_K%%^4Ed+dKYskv z$3JxbvE!dQ{-NU^H2=Wx&zXNv_~(Rw%=`nwKOg+#!9S%wpIrIW$|pXbO!-vGCsIDa z`2@zNH=nThbj2q(pP=~k#3v^{f%(M1r!Swnd@A4*mruV>NS}P0@N9x))16I-Y&v9< zolSsj`eTzHo78O5VN;q-GHi-r6PisbY)WAh3Y%P;TANs#PMb`dN|#`{^vWeyF0FA% zjZ1MZiE$~6OK>h}aVd*SSX>%&se(≦(?iflFX69dOBjOI^OnHF=&(@<@*7iae1c z@*+;e*PMs%a5bmlGhBw>PITLa{<`a^|NInFJd@%X1kWCL=D@QAo+0q;fM)_c3*Z?5 z%P3hk$udcnF|urtWr{2#WZ59g1XSQ*F2IOfH%E{<_=EQ4bh9J}C{1jiyc2Ej2e#}qi0 zz_3q-c`~e%VU`T5WEdsG92wTgFh+(YF${@eM+^gE*bl>e7>2{J8;031%xG9{7;e~X zm~2>Vm}*$iu+&0&ER@GWcq|mhLU1hf#zJT;bjCtvEJVUWBP=AsLK`fk!9p1<6lS3b z7LwqXD!)?sh03o`eu47qlV6zpy5yH7zo_^%#V;v-E%8f}p{b3%knfDq$B1yU6U~ zVAlq>Xt_1ZEm>~uxwXnIRc`6ICCaT(Zh3L5i(6dWYICcKTU6X)bBl>vOWg8uOMzPn z+}d($fLj9G;@#TaI-2FktVU)rGONz4LS_*%i_R=QX3d#}!>k)-*)Z$OtQTgvFw4v= z6K0i}rJ5BqEA^_+t6E;s@`}$ZR$lFSg~qEhUYYUg&8sh7dGX54D=S{Lc@^f>1g|7` z_2tz9ue`kC@``|0TUKGR>XKEKth%%6kyVbYvU3}Guu*BF=lYy;fj1bB$|_V~snFGmR@<1j|LQT;$3{ zYh0wpMR6_?w>;k@ z`4-7HJl_=gmdH0mz7_F}h;M7Y@$hYjZ)(2L@NI@~GJLyzBl>3hCiLy~&GilS?dRKR zB0DCkVm5)9{!^!?YQu$uNzDX)8=qVHye3MwlkTG}N@8 zX{TqhJd5QSEYDPVmdZ0!o{{lvjAvpz3jAX({Wkyns6f{z5U!Q%o?4xBLpM9+C+p`ai zeP`@5W8a&7U+nW@pPPMF>}#_x%)SZsNwDwBz6JJq*~euc0sFSx!{pv2_bj=0=iVdt z9Jy!bo+0<@+!Nwn5chz%m*!p$_i(s}<{k|9&fKfrqura`W8GWbQ{5}wJFm|?IrHS0 z7stFf^U|1y#ymLlz?k=D9+`O@%-dj|n0XY;3o{SQJO<`{d8g-{DDOgfm*-t3?=X3X z=N%;P?!063j)`|myj$~bh<8G~Q}a%TcWK`7-tFEUy|cX=dRKerde?f#`iRa)e0((L zBRW2s<0CmAt?`i>AI15|%tt1CROTZOKI-5jG9QiksDh8gtmCtem34d8k+N=-b$Zrq zvQCqAdDgjEXT`cU>zr8E#5y+XidaX)x;5*#tRrCEmUX^$y>+~Gw{=A8Y#Z6x$dHZd tY~;sAeQZQ$BRe*#VQsLVkg97N_IF$Z06P?&?j9Q43JUry~gMarpBPU$(d$tg`vKJIw zKy3`f#z1lgIx`Ro1DP2Jgn>R7sLViP2Et$Ae6r(Hnolx(Li0(5Pbqvl^GSqHA$$Vi)9I7xQ)v?{n_k(}XA>%$PT6G2rZ}6# z*aT;j7Mrrz^k$P3o1)kR#U?PD9@z9{Q!=E-7*;kxO?j z334frOMqNjbBTvbYA(@mX@*N_F0pWFg-a@2dR-d2q{wV+tHA;1~hN1{h|^ zuu6teGR%=-jSORCm?6Uo8Aix3Acp-g%!gq(47*{N4Z~m<_QEh1hRKG-hQWrdhN*@H z4LeaS6VWnJ923DY(Hj$?G0_>6d4D7!-0rOB>Lc44y1id|LgqGDGQyO`Lu#4aLs4Y5mzT?OnSVAlY< zcDs^x>2?|IqU|bL$dH8!S%{E@=q%L7LVPSVXCXQknqwgt7J6YJ7Zx(J&*dbfDDcC#Rv^~fwoX4#o_$Sgx<)tTkTEIPAvn3cmU9A=@J6~inTW}TUZ z!YnhhTC-TQRA6P9 zwMnk!xyHz~MXo7w?TBkeTw`<1hig4tTXW5ZYc*V>;TrDR?V9Zx>>AOv*R`N)scS$R z;jz&j8`-hZ8ymT?Q5zeXu~8Wtk+G2o8-=hD2pegzQ3e~0*=T}|B-ki|X{by)Wtu6| zKAGmpv`(g3GOdznluVOiS`^cun3lveB&HoP4Txz!O!Hw{0Mh`N_M4WQb~Ft)O*Sny z4R(!Ew~pj4ihWk>YqQUZeNF81vaf-C4D8#o&wzae?Cb60?c3dhd6~y$-U0Krywl`eChstLhv!`+?;v@1 z=N%&N?7So5-4O4Dc&Fyw4)1h$m*$-e@6fz6dPjR#^v?B8=w0jG>7D6a>7+R)(Q(oo zC&@W!jg!b(gP=bS+{2$DeFdAr)S+J>oi%H zXPqSL@T_aI&WUwRtYfpTh;>A)TeFUbb!yi2*74Tu*4ft8))}pPts7eB8mZ1mevCwC zBs)f`W28AFwJ{PKBgq-*%t$DVWM(7~M*3i+G9!%{34@Wuoa=MWm2-T~nR2d_b9>Hp za*mU8dd|H$hs8NJ=b$+E#JM)-jyPw;IX36GoIBv$mUF*zzH_~ExO2C2wvX<7gvdvB qJ_6*UKR&AS5gs4i@sS-LrTIvPkI;Oi!bd55bmk)wJ__L@5CQhZq?cL>L$tRsm^^t@?ix1bA6_8IFi@G6*xsGUPIpGc+<>WVp@n zl);pd!QOM`;aDDt{F0f)wP&wCG~}{&ojQNzPiAi3*!04Kr?0B= z=v#J9p8NJYBX3}I>el@yCHa(fYuhJ2eErpx-#>EY=DoZE;);dUtyk~Av=nsqox6M^ zqY$@fYJTO(%Xf8!%^fGsUi$Svr$}US?%v}U6-Bj8Tl;6e{QTEfEIe`J?xUjO@|u;+ zy?38~bd>N8UAlfJr=+lKZh7Oy+fPlUpdOQkc}xc8Ff@?lDD($CTh6 zQ-*p>1?DkTn8(y09&6NDAy{DjP+w4I!5{st=33`3YXES?Y j=Jl%^-`U)jsLcL7-#Yznwf!$dYg+*-dtbED2Xq$TG4+mL&26^Z^b62?DFK zScS!^D^|5x1;wf-Rw`MtCCjvR^e9NR@qM3IhDvML{8N?1<0vC zPSH7a$Ei7|(43m#lnkfNoKoRb3a8AR3gJ|lQ>Ifur%I#xjDls~I%U+JQE*0$ zF-nY4Z$@b`DvMEWMny5I%_uOV9vJmyRF_c*jN&qC%P0Uw{XXIOG|8tspBDL~$R|6W z1o;%mr#he1eCpv-nol)+qTv&oPb_>|;nSH1IcUs57#t+#pfCqPa1fY-yd0FkL0vZK+0@CVJew-nM9C&R zn;O}~$fiX$vDtLQrZt;>*yO_|HJfhOWW%N!HsLniHrY18HW6)lZAxuIZ8{A^XP`R< znlsQF1GzDfoPo|5$c%x?7|6^(Aq-SzATk4GFwmHRBp4`yfx=wkbLo^zdoF!)$&*WZ zE?siTl1r6ba&sw)OKmPCaS4e_Y%T?H35ZKST-tI8fJ?tix=XoBxJ$81MVDaz2=Y%L z|J3;>kALv^ht5B7`~$~7Z~Vi;KP&um=ARM%5#b**|7`G&2LF(`ERW@Vp2|@i!c7Ju_8F2&t^i7W9W?&S%*%LRA<-*Vn}*98}qa1jX?jc`#17jbaW1{YayQ3V%K zFl?1!stikI*eJt985YX0O@?VQER$hX45MP$6vLVr#>B8Ch7~c4h+#tvBVgD7!vq-C z8+J5|H;guHHcU2=Arloc5g`-xF%cgV?J-du6VWlz923DX(F+r~FwqGUnJ`fa6MZm| z2NQKL(Ok@9PlUts+D;+Ge{uJ~oeuPS~$@ym%{P5f%$7X!Z* z_+`Ma0)7$j%l8ZES8rD&yCB*1$Sy>79kR=iU4QKIV^<%$cG#uEt{isBuq%dLFziZU z7Ye&h*fq3kwM%GMY1hxLw_dsB%B@yzt#M0@TWQ=9<5n2Az_^vgEi7(bam#{R72KlW z)|Xoj+-l$!msX!9{TdClvkv@8s$}WNoQylUdrnpZ}=D&m!z zS3SH+^UC)M=~eHQ?N#j+?bYj*>s4!^KNj+1p*|LxvydGN)v=JAh1yss&O#|Hgu+56 zEL3J85ElAip)m_#u#lKludH%qRV%CZtTJU)DXa9X>SR@(=Wyq4=WOR-=ZMa|&ZW+w z&Yi~58F$CHIpf|K=f*fWzz zPriBbP0zPWzFG3El5cLlMe(i8wiLlMgwhgvvu#L<$S+2!$t`&$UplfpYDWYg}C0;@X>QQ(Tkcnwx7&TvOs&64$(3OW<0UYXV#g;M$gJylcB_ zx@nC}V`SPQ(+HV1$TUHw?J-S{X?aY$VVVuoYMAE2v=*kZFs+1XBupD&T51|<+UXfB z&t`ch%d=IUsq!q9XJ9uXIu!C(&`z z94Ey&iH(!iI0?>4W1J+$Ngtf#!ATvQB<7?GPO{)6Feg25(wBXu>?390DEsp4<7D3^ z`|#|WWZ#{APwaDIUlaS*>@#9t5&P8a>tSD-eZGB2`+ECq`)d1W`(FE8`&uLYF_Ir6 z^)b?%k?a_$j*;Yy)W%40MoM8M6h=B>q%tFcFwzGjjTs4pk;L44<(@0|TDiC9o+*qdz{P^U)n2&G`t;M>BjR!$)U6QsJW%J~H!B2p^UC$n+7=N2Pgv=D{-W zm3e&ToicCFJUH{lm?y@(H}kZZm&H6c^P-s7W*(S%56t^Augg3H=5d*~WgYfG3zi`CuUukbr7rrv(C%91lDyqr{`QJ q=klDZj1zu({f_P4*^-~RUZ``b-gnj|SLO;Va9DJ?Bcl9ZO(5A*>L0t^DHz*zOg zDlb;8S#`xKD^{sl<;1Eqs}xw3z$ye*by*d_DgajLR^?XVPSH69$*D(9%{g_*DML=l zIpxQxIH$~<(&1DNr^=j);S>y~$ecpq)RRm=QF}(+GRl@wdPcc2D$gi4qtqCc z#;7)Z5ueU{;^ET{pUixk;gbxX%6y7_f_-{@QhiE<$=r6E2(fY=X0ij!kbi zv9W25O>Q=gu}O?gZ8nM7fsU(mvonMmvEPcF3B#%F0C#BT~hrc=N~`*iSv&g z|J3mhoPXT-r;UHu_=n6tDEu?#9}xce;2$ynVDQfc|5z|d&m>nS<(Xv4q*5l~ndHf& zP9||OiOnP|CaswS#iS=DshNbtq$4I7G0Dp$0VZ{s#GAC6q?<&WG@CTEPa^vuvd^4- z0NLk{edO$e$3A!LW5+%->?6ZIW%f~FpA_~XvyTY-gs=|?&*iv$&tds1*K<^U%1!wg z7vpWN#j`k@OYtZU#Tz&S|8fF8z`fk)z4HRcSU9%AF%^!HaBPHQA{^u3*apWm7$(cG zScbteOqF4&3`1p@D8oV-2FfrlhHWuSi(ym@n_`$0!88EDXVFV028s-~j zH0(BvXqfF+Bfl8=wa70*ehu<~Cx$vunUo8AG;a3U2Nci>h zi}cH97cIMH*(J*^R(7qjOO;(`>@s6l8N0mL)x|C@c3H8jid|IfieMK6yB^quz^(&! z8L$h0UB6vNw=lVN$t_E6J#x#DTaDZ@*|JbA3(>NW91F#<5F87ou@D*yov{!Y3w^PW7YlK)&;|=>u+Rhx zNw81^3n{Qr0t+E9OO#om%mQUromrX8!ekbmS&+ZM93;mdvcCUWxL`&MQq`)p=#bt14bm@hZ)$CSEb|3e77b zUY&Ubz^mV@qgS|BMXzqJVy|GYUIQ62P$2^mGEkg>`WT3hf#3{8$3Slef?=Q+26ADb zF$0}2kO>2c8OVcy!mP4oRV}M%S(Rs1E2~&pg=ZBhtL_BjbmMa4aN~x?$;QRTt;PY3 zQ(YwIB0nyQbCDev)o~G=i`=-Vjf>d0h|EPOTr}n)5H9-QA~6?XaM1-9S@2EIH&?#p z`DV(uQoiB&=E=8CzH#!6%{MH*t@#GUwAp(VU3@ndpy+3d@V;ULL#+b&%v@ND-F%5%h7fiEY+5^)ZnAX5F z1Ev))jeuvKJnQ5cC(kT-R>?C;o;C7}k!OoML*m&H&y08m#Iqls`S9$9XEr>m;Ti4O z?Ag#W*0Z2zt7kvYNY8v8!sDSk99Mj`i9>?%FCdaWjj=>TCMEMuWKT!VF`IpH* zO#adN2g$!j{xR`yiGND`JM(Xde?t5-^G}C=IsCi*Bl>6i_xk7h*ZPP0clu}gh>wr< z_(+eB-h4F2M{<1R<|8#eO5-CFJ}TiO5??ECFI+K1a$wC}brwhy-NwUHqk z6|xZ_8^zhEkB#`)2+l@yZ1iR$7&dxgBNsLrv(X6~nXr+VjXc;W%spG~)pC!PdwK4) za*vgJctgF*>t5?r>r(4b z=W;oR%eg)0;GCo5+?#W3oLl3Zn{#8F6XRT)b7IbUa4yU_3(i$=4$Qd*&M|QA%Q!sa rNEvr$94F&88E0o4CF3R;C&@T9kpZyjtLu0HUMm1@;fwVa~mRG(9aq7(}D^9sNCFaxwr^1~2a*BadUQTs6MZhU8qw zj51`Dol$E>2{B5|C>=)SFe=Tc7)HS`3e70mD56obQ9+|xqgbO%qe`O=G-n_=29h(7 z8Uv*Ti0Kwt)HV4yFb_IwiMlb%nSe9Gigo==i| zisTa{pW1w4;uD)sM0^_J)0$5_eA?lY4xf6Tc%OEkYM+We(LTLCxjqH`Q|BK({-N^^ z9skVn&zyhQ_-BoO)cEJjKPLQR<{uCK>ENF-|5)%(1^Dh}hI+69JpJZ1QbF+SJ>0+hp5RyL8AULoV65 z|=Yg;rQdg@s61XoQ7C zSO~Qc&_btUvK))$7%azBIhM*XRF0K#jErMr9OL5H7RR(WM#Zryj!AJWf@2UId*GM? z#}YV(z_BgI1UMGJFiVD2GK`X8jSORC*doIS88*l;L52Y_?1y1K47*{N4Z~^}=EATR zhOsbAHcV((Y?x|TY8dJlEx%^@CCe{3e!cO_jbCT{GUHbnzr6U>#V;;?Y49t9Ul{xf z^GkwX5&ZJ;h%iC%Zh^g=g0#yDZstXO|Js)O`6D362iI7rSxaU2B4L2(X3!v0(i7);=XmFK=W6GQ&e6`j&biJ7 zZB%C?J~pDW5gi-NvC*83*w|=|jnvra%tj_`WM(4|HtJxbG8WZWj>G#U41oE78TjB{dK6XV*9Gh$p3WVjZ?H5IO# zey*7=g5{!DE^_4}HZEG@A~h}=<03IG3gaRzF3RE}EH1L(q6#h=bI}7AIdBn}ix9Zz zfQt;6#>uozrfD*bl4+AnlVsW=(-fJO$TTCS6)}y7X+BKrVHyw9YM4gDv>B$|rrD;| zrp2bgroE=2rU6YmO=QPJbxcIZL~cyf#zbsPRK`SPOf<$sAWZbZL>^3p!9*8KWWhuc zOa#G14?HvFSt-v*dFIKpPM&e{%#vr7Jfq|p6wjV`=EO53o*nVbh-W}N`{9`n&wkI6 zp81{`J;Ob_J(E2XdKP;Kkca+w$d8Bcc<7FY?05){hu(O|jsHOT_sKs`{^9v|$v;c} z-TCLpzefHk@h^#gNc>atFNl9Y{7dr>hkrNxJNlRVXY_COZ|I-wAM4-hpXww%PRipX zJWi5xQXD72aZ;R<&^YOglSnvegp))#iOfkGoTR}?VosXiq%iwb*_X;bRQBoF7s@_R z_T|}!$v!;$sMt5fJ}LIG*|)?#CHAe^C&WHA`?l;8U|#_HcKdYuj`q>^5$&6eM94^k zj3mfNbVk}^Bt1r&Gm;!5$r;Ioky;pug^|pRRKiFkj8tYM4n`t#kCuD0+>_-VpL?s^ zQ{~>Cd!pRabI*%=UEJg1o|}7B+@s=Nn|n;$V{^~Ty#?+maF5Hq0q$+N*Sp8Nx0`oo zo+I-bnP+ESA@c~CS7#m{^XSY=GY^M(H_Ssb?}d3T%sVsBgn4G>vF5GjspfrG%R5@$ z^?Aq2J3jB?yhG#N8Smh{`{JD!@7}z#;+>m!V%|;gF3h_x?-+RJ>(Z=?VI2(X(5$1aBU(3G7qqUmjd#KKgQQ&pA=f p={cv#xlGRGIVZ`vNX|iWuFW|n&apX1#JM5PtvScTxgE~w5CAw5Ec^ff diff --git a/fixture/20/3/0.1.3 b/fixture/20/3/0.1.3 deleted file mode 100644 index 3bade1f42de752beef196d65eac8ee76879ce40d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 448 zcmZQ#oWLT$$G`x>2N)R`L>L$tRsd;@t@?ix1bDylFl-T3XE0>2X6R;^$}pedJ;P51 zW=4kK*!1lOPfIhY>eqKpe*E^kJ9A+4>aF|vStONOX(wW@a{C@$|y# z(^v27vs*e(p1b@zBL{bMYX1I{mz6nn&D$r=e*N{oKUZY(=Do+oxfQjmTl??7{Oru* z8@_zwZf;&t`TWY}%Xgof^ErCYUb_B2C%>`EAD>x#`0RCcNki-Isq^3eWR`+@Od9Sn8HmSZ;U1HNcuXGdF$Ji{6k#4yf_qFE z;xQGt$5bI6Q-gV|QD=oc+!gd#cmd!P#Do9< diff --git a/fixture/20/3/0.2.0 b/fixture/20/3/0.2.0 deleted file mode 100644 index 2b2ea41f5214e6dc3211df9417fb04da4eda38b3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2168 zcmV-;2#5Cq0gwa`4gdfU4gdgn2mk;S0001B2mk;mwJ-f(5Dm3L01DwmAu$lZC3gU; zM4-@uR1^X&NkNSQku;U0t3l0 zP#gonF_4^r(ijMhf#M7V#z1feDl-rV18p$Sn1Lu5NX$TB25MlSFR%8z66IAWuk^gi znA2s@tlfRkl-i zP8o8_&M7}m^>M1ssX9*4af;5VG^b!Vh2|6rr%pI^=F|tLJUC_Ml<8FI)KRXCYGqWP zQKpP4WfUo+;*0`g6r535jJjgfn^91VdSa9lqri-6VAPjUT}Bx&ip!|qDBq~wr#zn^ z`Gn^aBA*WVbmtQwpZ@sd$EP))boiv^lMJ6?_>|_83ZGK=gu*A-r`IRfC)B6YC(|Za zHodaRl}&Rtsj*4UCNVaJu_?|bEjDGb35!i+HdU}`%%(7#9M}YA69StK*wp3HCYLn1 zr00?(mm;~8=aM3q61jxPr6Mj7af!_(9xm;0Y0V`XF3oUBhD*0gMVD-sf-c1_!7iyT zr7odf$LTnov+*@P=VW}0i}4Tc!Oi@FTW~Um;14`Z&wO~+!!sV9+3>7}XEZ!>;aLmM zSa>G%O!h43+3MNPGu1-0EHukPvMl7rLTxO>#zJN+RK`MNEab&PT`a`KLK!TC!9o`- zB*8)vECj(qUKUzlAqAFwvdoiZoh-9tStZLTS?0*HMwT(MEQw`EEIVQu5X*j8=EE`^ zmff(-hGj>~a?5bbXv=2HgqE?E0WDh{l*d7M9CXJ)a2)i;L2ev`#zAKsWX3@w93;X) zAsn>9K^h#C!9ijUn&2P_j;V4im1C$J3*{In$38iR$+1h0S#peuV^bWH;@A?$lsJ~e zF(HlxaSVuK0~{0JSOCY6j_Hma9jhH1Iz}5t$gn|%2{LStVR{V9W0)Mn;ur?UFc*fk zFpPy^B@82B*a*YQ4C7$f2ESNtLtBPM# z{A%-yiC;_n^73neUkd!%@{5391N=(*wflv%%aL7;>|$h>on3|OB4k&cU3~1?V;2s) zZrEkRE;PGd*yX~mGrLUKRl+XTE}&hjU2oNLiW}FJ+QW%HAIM}$?IM+DT zxYIb(MzC!3%0{kiG-o32NAzODI2!?zi}$?)y=t>~NW zThO=IH`q7Tx70V(MRi<6$3=85V&kGUE}C-@85fOlkr)?!aFGWWnYrkKi!8W^%ta7f z^uR@7wwbc6lx?JJ>tq`z+cw!o$+k(hNwN)!ZBJ}-V%rhhjM!GhHXpY2u#Japziqy4 zy=}N{M%!-Ngtp1H1x*CVM1M@=$3%BbWXD8xOytHyZA`?*L@G>_!bB)c6v9LxO!UD- z8BB!1L>F9x<=QLPT)BqIwNtK{a_y6Ao?PqXnikixxQ4|wDXv9v4T@_?Ttnj85!Vp7 zcEGhR*8;c(z_s7CqieZqxQ7&ZD3ON{c}S3l0(l6Khw^v`kB9Dfh=zw|cu0naSa@iK zhg5iIgoi|UD1?Vp52YSLO_OC>EYo0_rpmNbrlB&8jA>&`6Jr_|)3%tV#WX6WO)*W1 zX%I|%U|N`I3QS928UoV>m?prq0G?6uY?5b^JY(e9BF_|gM#!^4o(U5FX!$qGKUx0q z`M1hHRsQYyC(6H2{(14Qi+^1FbMvo?e^mTy^N)#tOZ@ZlZ-IXb{M+)6fPVx0OZvC_ zhqTX;eU0p6WS^aVh3q3_U!8q??Av1>4*PD{XTv@;`(D`R!oD;6OxRb#KGr^Qr3;KZqK?+)@ia%&$>73tXSt}ofGStSl4D<5$lLp$7UUubp@>3vaYv| zw=QYjZC%ki+eddkGUOvWANlc7A0O5EsE&{5_=wI&X+DDCBQzhO@X-k$o%!g4k39Iu u%txk=N*_Jv%DGm~^*LwCxl+!NaxTs}FwVg_hsC)o&b>JY#knWWIS~Ml2q?q= diff --git a/fixture/20/3/0.2.1 b/fixture/20/3/0.2.1 deleted file mode 100644 index 1fb3df30af22e2487d14522c5deb0221828014c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2173 zcmV-@2!i(l0gwa`4gdfU4gdgs2mk;S0001G2mk;mwJ-f(5Dncz01Dw)AuS2|dRlZfdRlHTVRkl?|t6-~!R=ozQ zGteIc`7sckf$SKlj)CS3)W$$^20AlP3Im}qkePu%80dq6$_#YDKx9t!IrYjZKBrDO zWy+~Nr#v~;$tgXj-ki$fl$%pgoPy$1n^Q=fI^qcbQE5iaFbd766-KEr>dYt+MujlS%qY~T( zsn4fWKB4mIluvO!jqwT2r!78d@#)Q{DLzT@DT+^FK0WXW%qK6O5ct&PQvja;__SqH zo=uZ%!n0|SO^R%~vuTh`f@}(8)0#~^Y*MqShD|hVO0%hjO)P9$VH0fA(5BZWpiQYw zsDEbpN0xtL`De~QZTutWpECX-{(<11F#jmEPA=)WRLLbuF6Ft@$R$QDEpiEoOGjK{bLodmK3rOJ>4r--T&m$x?h@|O?NaO# z(IwcWpi8PtK*!^EoX+348=vztZpO>F2oK?Ap20DAnM?2pE+)r#zbjMgvLZ?OccgMU`+JIL>o+` z!9*EMM8QN8OeDcXUnXi`A_j(uGAxu~pbX1o7$(Cm85YSfNQON!jEP}O3{zs*5W|ER z7R0a}hUqXYhhe*6NW*l)iiXjK&4#&#wT7`C;^Uz`9@68XIUbVZp*S8|;~_O3O5-6D z9xCA>5+3T{Ar2ne;Gr=ORqzl6zgqdl%CA*^k@9PlU!wfleibcr$U=rJRLDYpEY!zBd@NMQLUb%N z$3ihI1j9lvEQG>BCoE*bLLV&T!9pD@R9fh+T6Xc-<;t#BcJ0|!$}UoNg|Q2aU0>{S zvkQw|SL|xD>xo@X?E12+fn5yj;&&ebZlQ3?b*puYb<1??=T>P}pINWW;xlWFS!&EmV-}oQ zV$2F-)|**b%yKhp%&ZD#Q7{Y4EC*(Nnbl?10kgQg+T@icuQGXs=anR{B6)S^RU)tK zyei@q5wC`LrREh6uXcEq=G6?Z(7ZBwReM$R>h((KmFpGi)#;Tf=fa$O;2fB9Ud|zK zuFJUq&H-?4%eXw_CK-ok+#=%?8Fy#gAmaoX7s$9Z<9ZmUW?T*9Xc(7fTnpn^7`MVW z*tnr_uW>-*QsYqHX89(|w^+W-`PRlaIp50oM#i@|-@5q5#kVcKmHBqTH!|PEe1qUy zm~RSvOW>QAZJuoFWSgFCm29JATb^x=Y-42GBHNJIcEmO|+kV*Q!?rcsZrEnSwi>qO zw&Awjw#Bv)ZG&wK+NRnDv=JT~-LVm!jo#SEjg97PbjC(zY*fZZB5V}GMrJn3U?U7R zDzlLU8%3~@m}{wAL*?2j*Fd@U$u&=|U2@HmYn5D^;+hoKqPV8SwIr?~aV>~zKwSIb zS^(DoxVGh*?%L6{+_l*?*|pe3gIpxYMS)zT$3=NugvUj3Tm;8OZ(P*EMJ!yj!bK!p zG{Qw9T(rSO8eEjYv{|OfGA)*At4vd6S}M~-nHI`4P^NV;jf-hpOsirV71O4e#>BKG zrYSK^foTa$^D=FKX#z|OVA^gP(lp((MxHV9Y>{V$JR{`UAkX-Cw#PF)p55@whG#WA zd*PW2&sun9!m|>dk?<_&nd%wPvRanWvTT-Rtt?|@*(%Gj-{QL5+fqxAANuOXJ;QG`wrPxXWt+D{Mfg{J{|Vuun)~X8TQ4n@65gw_Mx!PwXe00 zwa>KgXJ6@FpL?&|<8yC~durTE;~t!QV%!Vk-kW<_+;ekp%)JWkQE(5;JqPZ6x!2|1 z0r$Af+hm?5^D>!-XPzYUBAIt*ULy1C%qwCZ5%Y$ar)C}x^LCh*X5I|*(9AQMSDRNf z?=??oo@*Xz-f5ocq&iNb9$oaDht9h^kwBnwU&b5fX- z9ykfiyFKqpc}L1SJ?}Vqx5>La?pEGdXWg51S*&xjE{b(ftZTCliFHS;W3#Tyx&zj6 zS+`}~Z=G*lZXIsj?V~#%CGwG-j{^A!kdNwogvUpBd_?D?G#}0I5t@%y_(+A1&U_@o zM2!!_n0gwa`4gdfU4gdgq2mk;S0001E2mk;mwJ-f(5Dm>j01DwyAukZPC3gU} z@b|ml{r-OUyWjo(e)qfI-|v3+yFZ3ZOG}fKmX;<-N|Pj|rKL$qKm7p+0uBNL?JP43x$|Zw4x1AQA=|VW2SsaWK#Z1Bn@Ef`P)E(sOE+Q>vWGb4rv` zp`5~VDw9)pPO&*v#VIOItvSWSsU=RSIW@$oG^f0r>T>FLN_Q%E3U_LDN_Hyt51oGs z`G=5y=KRCQKYRQm=bt(LiSv({e_r^AN$PN2hBb?>~m(H81{i-A2a))u+Itml-Vb>&(A)nPq=)# z<&!O+Uisw8r&d0}`IN?|H=n}z1jZ*fpR)Lb#iuJiiTO0ar!b!u_@uxmFrNnaB*3Qt zHsRTn$)-D-BH0AVCOexF*@VcZLpG_|G{mMfn|9cw!zMJFX4oXdrWiKOHpw={HmNoN zZAxu=DVIyQT#|EXj!SVat#L_>OK>iYaY>9zVO%0}se?;nE>&=ef=gm9HE@Z6OAAaQ zWzr~<@=V%fk|vYzOqyhpB$FbUQ z$1*sE!LbXDMQ{v)V-E~NW!NdhOd0mcFi(bcGVGFJmJF+8m=wdJ7zV|#B!(d|?1*7O z3^01FkRS&Iau6T~<#7-m2iIJ_aL@_| zsc_H;2Z?Y{2nTI&kOl{3@N1S|viyqWmny$f`Gv}_P=0~(>yuwy{Nm!*7Qd+YHN`I} zel77!iC;$^3xv;B+T`cS>VHXLzM%Z=QW!hD`wdWQsw`RGe=hiB>RJnD=Ei-PFajVTOFK%^l zi_NVnZc%Y7f?E*Wdf?WVTL|1b;FgzLU2gqmT{6p(S(VJHGs}@#jm)AmtB_fQ%nD)_ z5VL-mb!HY0vu>DWX4VU{T$mL!>ovPI6~rnut8!R%W)*G~(W=|3*ecknpjD|=KdVpy=fs?w;9Qt<3!GEn9GG(h zoD<+&0ORnC%VgZ0agmIJWSpIGiHt*J+#%!Cj2mKHnsGaf(_tK%aWjmQVO$L3X5(bz zV&hcffX1c9ohX-yaG6NXL~~3OXQDMGQez@G6OA#E7!!pt5t)fPm}tyI6--3IL}Dgt zU?K)4THqTg-$wbC=i4UVH2H?-+a%v4`4-7HC%!fDZOykLz7g?F&9@%D@$hYjZ$;mB z-*n$<-)P@v-&o&P-&7Cz@lYQR&3UMfhv;}n&O>cH#KuEwJcPnSCp=WHB+vQaZQYCVO-ndnikixxF*H5D6TCCe-ZqH;NJuPzWhVr-vR%;{Oj`X zx9^gDmh7u!U!8r9>}zBnoqdJuBV=C?`+(T@!@e{7aM*XlJ~R7X*yqB&pnb1>u6?L| zKKo7|+44~>AMN=lj*sB@=#7uwe1yhFXME)5qc1*c^U($$Y4A}7ABFiyf{!Bj2+T(b zeDvjBDEC0Q_sP9G_b|D4$vr#w9=TWN-V*nexR=DeH1~wK7sNd@_j0&*<{s`I(Y@Qf z*ge?2pnIu%Kle}@?Xi&_8|AT4oQ>qzD2|QbY?Q`EZ#F7nBN8?mVWTk{aj?+_8;RLy zf{nt=(=%_Cd8*9IGf$Lxq0GZGFOzw9=CPSq#XKtJt(nKfyd~zTnK#6|H1oX7>oV^* zPd6_&4>xZ%Pc|=h5uJ+)xrmU9=3K|gE zybItR0PnoKD|)wkr(0)d-6QMjtUF|#A?xU@^J85f>*lOOvo42qXV%5A4u*AR)}gTO xgmq=srPlqdL!HCr+%4y9IrqvrSI)I^4$iqW&b>Jo#yK#~xjC1`IV{dy5dcGDJy`$% diff --git a/fixture/20/3/0.2.3 b/fixture/20/3/0.2.3 deleted file mode 100644 index 89d89693beb1cef2ec8ffe8cce608d2bc61e5449..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 446 zcmZQ#oWLT$$G`x>`xqG*L>L$tmH}yvt@?ix1bCnFFf0*uW$3lE;Ysw$>$**SUc+wY9xfzhd3_n(xM zP}Z$&pZM_gS64~@$d#M-@=C!yCJphJ4BTU~FptSWJthzLm;%IOig1r9K|H1m_m~RQ zW2!KZslhy^-l(%eu)zAEzM#&6KU1xj-MzH!^(&q7d1o(ddHrgQ_1xT^JzM{tzZ?15 d=C(v-_V4+-lkZlVIbS|kukpWx_lE!c_nO diff --git a/fixture/20/3/0.3.0 b/fixture/20/3/0.3.0 deleted file mode 100644 index e499a63fa1327eb5d2b8e9cfc64fd4772ec2f3d1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 751 zcmV0Ooq9HGp#M00aUA z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjZDsw&&4Y$R!G+O{<{H8pKp`vH^yngE*s zW5F5g4VHP7zGC>*lWSkRRfpG-J4bbydp0Dt{_{zmsEv|kW^-WZkZ&GY-Phrw^i0wIWbp-xe83xg{v%FRe{k|i>6W(WGWz2{h;bi z#hXfO%~NWgN;3uGsSi(isI(NNr6ws^D#=nvO06xWwp3a=pL01=9K}xrMBWW~CuC># zuIwF|o4?xp#pY|oUn2erfoZQxds%9-7n8k~cnR1mz+P>4?y_@Noh}x4t+-1?sJTGR z^~uUyRpz3?wO}po%@~h#R*^lTo{0b0aReL z*`m!91=$S9WnX|hR5CQC9I60)_))Fw-#^Vpon zz7Jl*SbkJFTUQtn0RA-H$oo`F8|_{`fQV_uATFnCq-sOC*^S!~NxH`b?%WYn6 z>(WNywh6aMNHiOw*$(-bt;cLU*uZAn&Gv=nwKK1oS?ll`hu1bpS}oFQkdCZoWVIq{ zXRG~I^Tjz0&S`JbOq^EYG!mOe(==KpD5E_Y%?Vn8(Flw-K+k7+KEw0kGZ&w=IQwtZ zKT%o!VflCE=ipz1e+&lg6SOZ7j(s=w+3@N1W$nv}89V5;giOT`>Rv diff --git a/fixture/20/3/0.3.1 b/fixture/20/3/0.3.1 deleted file mode 100644 index 051f8277787b2b48f9af1a9d7c3ada2582d9a2fd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 752 zcmV zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBmN(=<)fR%BDtwzZ_KY1`J+(hrmXngE&r zU%~n6&6SC-N_<6P(^i_c$^>PrCtEqODzFuStp@11D$iATUR>qksuoqbtD>&LWtj@g zR9C1ROx0j227{gw^i&`mPu+OR#uV?V-BVg(mI|{J7^w?OSy-w9qjQVSDGG89$hjZ6 zv2(w3Vr!04b5xor5J!DD%0s1bl*Ua`GA_wDByn%!+{U$~^EsC@#Zml3MD`l8mk_VB zz0CG1b8}alyV!J%xJ$%cAu!E#X)a4m=3+9}5-tIA1(*xKoww|~RcDLETPxmD5o#W& zd7rGzt1^!YZ^1kT^AZ4h_vf9T8t-Vlo3X=sr+atxWi2mjb&#eYusV#zVXO_3)Bu@G>x$^s)~T&aixXhVE?j2evI>kQ zTQr%XAd>-^><8J{WWUM8);y->u{2{K9{cc^hf4n_{hOrZUy^@FesBNW{B(1<{1Wp^EXS6({;dwEdi_u!30Ooq9GjMY40IB$X z-|zc*dQpT_c3Wh^RVO~FzyR)Vn*{Q1hySAA%FHRCH8S5{x$zN+%FRhO-} zSWVbU!d4LweLM8cP>*jszU{bqecOH0ow>@)Rb{F;T(#jU4UwjTG}R*`Qx%zt2-R;Y zv#ES=o_h0?o2e2{k$7r^rlm41h3UysPL^s?ioj9>mJ%?}@%&B~Z*dj@N&Z6e*O9NZ zztH|Vo3odkz2bPG*z3ezCOW<2^lp=scTwI!*&TRi;9UVe>-MbE6Js5WbuV;N>!jA* za+j97vUEYX>%mBB`zaz*$7SdGTp=UV5;f%3e6#=n diff --git a/fixture/20/3/0.3.3 b/fixture/20/3/0.3.3 deleted file mode 100644 index 7c45c45e2daf1ba50560d60223b170d568d693a0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 230 zcmZQ#oWLT$$G`x>&lngOM1c4l5NmAJ|C=DdtIEdkgd>^ZIDRjFLgSod$^VC0f=UvUuSA5-?e?GH7`e}W^$={=e z_A3jwzxEf|EG}Ao-&t%qw|M?#bBWpilO>O9O92g*-YqW!G+1`MupH1}`NixCK!X*5 z1}hy_R|XoavRzsgXt3I9e)W_U0t@&am@^3d_#c1p4gV4Wq0|KqK36)TR=7-UY>+lN YF6QJSso`Ayh{NS>hgnPT`?aB00HgL&F8}}l diff --git a/fixture/20/3/1.0.0 b/fixture/20/3/1.0.0 deleted file mode 100644 index 60c0a550a0f41284af19ccf8a965eacd4f1e4ef4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2185 zcmV;42zK`Z0gwa`4gdfU4gdg&2mk;S0001S2mk;mwJ-f(5Dh&;01DxVAukZPC3gV) z|M9oK{r&#-x4+-t{`R-O-yM>cCMhi~ElpBdnxwQeNm4=_7VU4t4+#Pa0uutSpm_Df zD<@v5d3D4qBVMI><-;p9uQ+(M!7B}3g?Tl>D+ykKd8NRsFZ;~d2atXK*hkJjckE-w zK5_POV;?yCl-WmxeNxzm%swIP1HwMKeRBKYR>`s|mQ}E%_!NZ*r=dUt5H6qRHHr^<-w>ipYD93Z7FAfM8F^5GMjPd0q2;nSHRWfLu%;%stb z6P!(EY${{Zn@wJ9>S7ZYn{=CUn{bEEHmxq@x#YBp@bzFv){SVVVn%xWSAzyt{7&;uquW*F|3JU zObjz(SP{dB7-kw)8rCyRgJBsA!(f;M!y*_4!9a)%bjU!44D`o9ehk#dKz0mN$3S!p zB*Q>43GUZn(zexEd#;-7bf$=Mg zUs(LQ;ujRZp7`a&FQ8wqU#?%MU#DNDUmX0};FkuwB-s_oE=YFK*_FsHM0U;D1<0;H zcJZ)lhg~}CGP7%jT{7${vrC0tDeOXE*8#f>*!9*fw{*EB%dJ>$!E%d@TWj1>ee{la4{d4lrd-;nsG<{J>-e)yK=8xG%Y_-4a5 zG2bZo7UmlR-xl}=<{JUu2KXkxM0rev$3$`_f@7jLCWFp&rok(tP7 zqTNKgiE0xGO+?$K%C=Otp|TB>ZJ%t*vkjANmu$0S+Z5ZR*v4j?65Ep4wq~0U+k)5z z#I_H%d9ZEFHVd{@uuaT12evh^je&;kro*!wp5gFJhG#K6gW;J2&l-5fz%v7$74VFJWx6cOWf?BZc zXe<+BSs2T}2>&Gc7s)?J{?YlD$Uj8>&G`q&zd!!*@Nb8II{Y*9Z-##|{44WMg?}mh zL*U;5{|xwd-!A)f*(b}sSoXoPkBxn6>{DZ(n|)*K6JuYSeOm0xV&82a(LUS0pnb4? zK>JktQu|Q%IJvjUJx%V}xi`r@N$%CTr^r1z_l&q##62SJp}E(?Js$3zxktl2GxsF8 z7r{LU?tQtJz&!-+dASF`y&u`}Q5_%A@e!Pl+W3f#kKTMl#z$^G0^y_IN4}469~FIc z`$+at?4zK0rpzm49x3zi%$Pjmm7K!bT});&+@5H>L;9Zz?b=E1ej?Owk)&;U|&bmC-;jvE6x-;u&SZ8J(3+q-` zS7zM^>qJ;bW}TOH0IYkCmUFY5ljWQ$=TbR`%DFe^$T;Wb92e)dIM?Rf6z8Nk$L5^u zT(r zDK1X2IYq^(DNe09wZthkr&y;}r&Om%r#?94!KpE)EI1`*RGm?cjG{BDkWqw;nlp-z zQG1M%GwRH!8%CKK^};9@MwJ<5!l)8Pks0-6lmMf=e7fb6EuU)n)XFDTKCSZU&8IUy zx%u?PCoev=`DDeXDn7CKRQp8xH2dWG)cVBwRQlBOiL~j?rb{;2+4RUJM>f^jbjT({ zHWjid&88qWq1lwfCLA`M*%ZSj7&g7IDaJ3PZ;}vvCkL#^!6#*hqsT=KDK>o`@r^j?c=(X=Taz_@LbB|5+;}KT#Dop zB$pn!wC2(hm(*Mu;*t=T(p=i%k`9-0xHRUH1((EJa^O+}m%>~!;8FpX2)Kujd-k|T z&OLM7BgZ{)?pfm=HSS5{9uw{<;T|&g`0g>fr+3fLJ-2&=Cbcq&l}W2i8fB6wlk!a3 zWRfP6GMQw>q$(z{nbgE2CMKLJuK5;Sq956SSG=;2$n&xOo3$yEJI)*KnD6_AU_7WV<0;Qs$-xx26AJdHU?5* zpcDo|VW1EO0%4%vK)Zo-1LcmzatxMZuN*t&m?_6fIrhmhPmXnROp9Y#9K+&R6vv=A z_QbIyjv;aEh-0W@r(>pL930!=mkGXJpQ@k zpBnz5;h!1)Vd0+@{!!td5&jY3pAh~5;Ge^G88*u>S%$?jER|uX3_E358NQf$Spdz zlDLJ$ts`!sxdp_nA8wtwb;B()wS z$E-JIy_t2!EH|@8m?grj5N7da5zX4os?DO!n!QTp6)LYzd4=Z{D6c+wb?4P3uj~Zt z!mMLp9hh|ltQ%n6mvgzC!{yv9=i;1;;~boGX`Dmj+?#V@oCD+B7w3BCiq7%Q37xZ@ ztDS?Ld!2KQ%QG&Nad^gMG7gh*cg96B4w7+?j9W8qiE(Pi4KYrLacRcwFiwYYIgA@K z&Vq4b#yK#qfpKBR88EJZaRfZX$3uHOB=%L#~Lf=~X#>%%=meSGKvbt(9%1Y$IjcDBHr=2FA88wqddDifvYGdt#dt z+nU%0+xFV#+Gg73v#qpEgKZgX!{AyZ*C4s}$hAbSA#&}IYk*w){6JuH!)3lhD z#WXCY4NbdE6PgyA_A?DO4K?jF&Gc-OXPP|A_vd_*wOZHW=ug<5-YNI++ymv_C-?5$yX2modsEz#;$9T@)ZA0zUK01x+za9!ntLAH z>);*-_r%<*;2s6{!rWuv9+->fTr|igwi^^Qo!bL1xMCKwA zE*jyY-bFWiFs<~ zvF5GjspgU9eK601d1K~TFi*@xbtYx?v(S6TL8z z3lo)@$b^YXn25|oUnUY@A}{Z5d1uSJTHdwtj+J+-ynFNRjCXF{eeuqVcWvHT@ve$@ zY~I!0(caD8x!$$jvEG&5^}HjkyR+_+b#~T0vd)opb=DoS&X9G5tV^>lh;?Yz<**Ki Ob!XPaunvZGF9ZPXJWJ&O diff --git a/fixture/20/3/1.0.2 b/fixture/20/3/1.0.2 deleted file mode 100644 index 7cbed37e35d2631de2dda6bd3b6f4c0a3e99f7e4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2186 zcmV;52zB=Y0gwa`4gdfU4gdg(2mk;S0001T2mk;mwJ-f(5Di5`01DxZAutfQHFp58 z-{Zpl?)Ue*-~Il6_q*Ty{{C%-mco?|EhC#=MmD(=T1GY-2(){*_x%R~4gwAWuc&x6 z#VaXZsd=@;DAQA@h4b&SbY1N)pw5*zCm7Z0rtWsrFo>ii(!n3N) zDlb-bv5L*ADppakYRxJpR;gL#TGd*`T6J1wTII89%&H4kiTS6_KaTuE=O07AOFbt=gdDi{A1=H82)+TpECcP@Q(@qkol*}KLz~5<&-U_YB{y%R4b=gIi=^+ zn^S0UN6gli~=*t%P0dzbs6>3EuU=p6z7v1 zpWu8_<5L=+-h2|{Qy8DX_~iT4`;_zv_o?X9?UT@_*eBSgJex$>glCf`n=;vSXOkqG zBH0AVrZtxNEF)!^C(Aln#>p~EmQ}Khl4VdVdt#Xr%aB-h#4;n6 z0kQ0dWj-w1V3`KXGFUdjG6|MNurG9Q)*$C&#uprp2)=j!AJW ziepe5Q{q?>$B;O-ItFx1b*zJ9930!=SOv!@I5xpBMusgiOp#%O3=?EnAj9++md7wW zhS@NzhG8@eYhf4*!&VqZ!mtsBi7;${VFC;b;MXj_Wcd}#FI9e}@(Yz;X8bDS7a700 z_{GJqEq+n)Yl>e|{G$Du{Tlkk`W5tR^~>j1=~oB4EZJ4bE=qRM+11D{Mt05FMaZr} zb^)>Lhh0AGGPCQ3T{i40v&)5DE$sTTYk^$~?BcSEfL#OJ+H;GRTeIAPfD z))}|VxYg#C7q_~&g>-9o>*$v4R_zw;mg`pQ7HbwLvp$*S$t*jwE}3P?tU9wCnMG%o z60?$+g~Tj0vx1lf#H=&3aF}IgRt2*tm^HyHFtZq#^<`FZB9OUL85)K;SAQ28qI*4}=(m}U_hz_#7Qsq@DuTXh~=T#`LKzViN6(+Cj z1n0t>Yv3H1b6(CFaIVX_8{M*zEgQwzNREx*Y^26UX>9anBQZ7#V@HC}|_y zMnxOlHWJz>wh?Sxo^hg#!!u5kahZ&}Gft9mk&J_6+?sJrj8ij?h;c)VOEZp#aXXCD zVceK;7mO1#?tyU*j0-akfpG_n^Kwxi7x8hCoQvqVXpW2GT*SsjYh0wpMJHTj!bM~* z^5CN1MZSx27a3iI`_{@gR=(-^M#{HQzUBGG$+u0uY4YugZ&rL`^UaBGO?+GP&4_PB zd?Vsp>Kp3Y>01WhF!(m+TLj-A`1Zg=h)i_IM21ZC$3%Wi)W<}2OjO52bW9|}L@`VR z!$c`egu+B8Oa#J2A57%IM7Q0t&6aJiY;$E>E89%jR?0R~wu!MVjBQ|S%VHZA+pgFK z#kME5Ik8P>TWlL_n`+z7w$!!_wrQ{}gKLspi{u(4*A%&y$TdW+1#%6LYkyqh;o1(@ zbht*twHdC-aBYQaDqKt98Uoi2xaQ?r0M`Jxc3LdcV43#HG&ZKKF-?tWWK0`lni$iz zn5MU(R zp84>shi5!Iv*B3{&uDlS!7~V+J@8C{X9+w*;MoAr1b7y}Lv}n=$3t{HVIp^UsBUE&TiP zZ-IXb{NwVEfPVw*+p~|BeY5O?W8WM5+}P)4-x>SN*w<#C7yG){hqQ0E?`WTGUu_?4 zpKD)hAL||{_ddDj$vr#wF1csPy*l?Cxku-o68Dn0hr~TJ_ky?w#Jw~3aJXmYUIq6k zxHrK)F!va^_vK!fdj;I%a*`e=<#7@oC&4)>j+5Xx>CH)KoaE*t5>6W7BoR(ZI*E4@ z(n+_Ih)%N2Q)ON%^H7BM~yvAS1~cX^)Zg7%9$3a*PCLq%tG9Fj5O6kr}Ck zkw_THH&Sn;q<4GX(eiGVcY5Bf@=ld^dESZg4$r$b@4R@|#XC0ds(44myEX5ac&Fx_ z>s{*|>)q*{>7CEJG4C#TC+4F%A35?7osSIpsF085eALHBe0(J5qcb1j@R6C1VEE{T zkIH;>!bc{2MCPL|9~JNsmvy$Rt7YAub*-#pWu2aNZ`Pr)&doY7)_t+A&AKbrS+S1I zy4yOUb+&b|b+2`TNoJ_L>L$t<^XApt@?ix1bChJWPB z&3kz@#T5&yTd&@KX{qJxJ9qg;Ms04<)cnemm+$K8m^)6Kz4YsUPTk1l+`Y#yD(Y#Q zw)W3_`T4J}et6=>-A6?YCy8yHBF6ed#5k__?y)%G(L0Z;j^;l>V}QoQ=h*7>1h!hyLS6QK}%`X()!Mu zkKfu_xd$#>y_MCPUox|}_U!eChBnr&Q|GVz$!yCTn_hVE^i@?meap_tbKibvv=59< z-MaszqyyYzju4MI!9C^-@t6zTW3Didxj{YV4)>S`#ABXtk9k2n<_-6l57cA6P>(h0 ztPpIleyA^~GvUvdqvBGDNiNIEGda$l_*=H3M*8}d(_V%)N|&omwrqY}XKixG<*Qjs O+LOB7e<%EGvWM~=LpE?xyzP%)gJ-^4+0JXv(lJ_ z#;h}DwV4IRtS@GqH><)})n--FD&MN!s@y8vs@tmAD%h&mDLkh}Id$jMCZ{wxW#^P6ry@C3 z=aiaLO`J+|s)$oWoI-Pohf_P8I&(_QDGW}9IR(L~2Tp-G<>eFtr@H(j=bt|QiSth# z|IqOdoPXH(XN`Z}{6prS6aE?V&j{r_fOG3xlwvXwK6Kts8U9eG78VAPDXJu zYLiiHMqM##&8R0vIWbDjs3S%hF{+4BKci5iPNOs!mBA=6qa+v=!6*nm(fM@9r#YYg z_~geYIiK$MWXGpEKAHIx!>2NzQuu_zCo-Qx_yoeI4?f*=%ceb>UfJZzCOw-@*<{M5 zQZ~8S6vn1Do3hx1#U?hJqSyq*rYAPVHo-Q%HmNqHHvMekVABShG`J+mrARK-xs=Ey zL@v>}6v!n&F8y(dhf6zLI&*1;OEO$Cb7_T3DqKq8l9x*eTex=$?oz_QOP z`>?Uk8vDH2XN-Ns*vHL2TkNC7K3VLe+ox!s+&;5?Wc$SSVePZpM-|U%ct*ps8J@N9 zjD=?_JS*WD3C~7&Ho!9ho&~UMmSwUmi)Gm=%T!sG%Ca(+k+E!yWnC=eV%ZkUs#r$F zvMH7gEu$@)Eo&_UTE<#dTIRFNgJYH)tK=9Z#~L}t$gxF^6>^M_V}l$6;@A(zd^mQ) zF&mE6aO{O+E*xv&7z4)^IHtg{0*(=IY=B|445MY(EW_X!_Qo(bhMh6YjA3OA`(l_E z!@3y88+J5oH;ialZP?H-*RY^rtzV)10_E2yzcBfA$uCQOJ@U(uUyb});+GP?lK3UW zuONN_@hgX4IQ+Wdmj%Bn_(j35FTWc2#lSBvzYO?Qz(RX0q{l*eEF{N5aV!MKLTN07 z#zJQ-RKh|eEHuJGy@ik#;w^Mr$hHvCu2puavMZHccXo-gE0kS!c4e{)lU-HpqGHz+ zyVC4pV%HM8(Civwmk_&3yL@(euq({23wBws3(PJDc6~Xhkb?+0Xpn>A9K^>#dmIGk zpg9hb{ics%4g*S*^^<6YGA~ zq1K(&X|OJXbz;^@ur7ji5L`s(qC+m4bI~6c`Eik)i|)9{j*IHJ$jn7CTvXeQ?opx18H^?v-<{oYQmelyjz>E9IPSe#>XE{bzdoO|M2 z>>TXe>zwLb>fFyc4$f_GPJ?lhjEiJkopFhbLu4GCae<5jWZWO)co?_CxHIEs7$?Ix zGvihmr^2`t#(5csz_>2s0vHFtxZh&=2FtfszOnIbjc;$hjqy#4Z*IPA@lA_wS$xxd zEBcoEHv1;~7W>Bfw)&>p*2y+bwr#SFl5LZ0lVsZ>+Z5TB$hIT48L_R1Z9Z)4VH*$I zYS>1@wi&ibuq}dZ5NuOmTLRm>Y#U&k0NVnX=#Gi(n5d44+?c41iP)H^jETsYXpD(M zms)w@em#l-SH3{554h_ z8xO7UkO~i_@X!bkiSSSe5Ahy4dT2K-mT9m|du3WG(@>dq%Cu0XfimrrXR!B!L$mdQSfY$XNo*ag}s#A1(W4*_US@EBjX2hiBg?`|j-fVxJfLy4bg7pB4M6*r#S+6Z_KabL|V- z*V=d5XWCcVCuSc8`@-CNFO8Ut6 zQSYPNN4Sq}AH_a`ee`;V=iMmp?!4RNohI+>yp!ZzB=73HQ}eEgcWK@g@s5ahXx{Pg zZijbg-idjK!MiZ;Ab9t{J23COyhGq!myP6X)W=3~HmYMIIyQo{5gQw=vC*52$ZT}N zMq@VmV58qgNgL@lD%vQwPS3hl*5z4O$~sck;aS(oI!@MYvX0HVE7q-9_ry9U)~Q){ K#5yC^6%hb!QBGj1zk{mCv@}U+X=#$Av^1rqrAbi8-R=td2MGcU0uTcKu<_3t z|ETfLn}5dmM~r{m{G-J`ZT`XibNk2kPwXGqKd*mO|M>iqT7_p7C#yDDb!XKit0Y-v zXO$wW>a0?;%7|4(tV*-0hgCeRLbHm7RcBU(StY@$2v&hvmB1v_90`RH~Wy;2ZVio`xNby+XuJLY#-S^u~VmM%_$^K9dSy{sUJ@Ha4OBIF{d~XT8PjB+zdi&1SxNiiylQEWyjF)E2sNQ_dA@)?yH<-w>9MvWO|!KeyGQSiym zCq+Kh`6S4vKt9p=q{pW`KH>2R%_kZ@o%zJVrxiY#`9#8}5k86V>B}boK6%+x%O+Yj z&9Z5gO{#24WfPoDWNdn~iHl8JY;v=SicM2&l48?rlWbFL(`wVtCeN-G@Ar8i8W~z%WznB z!!jF|!LaOwWiBj3Vc7}GOjt(1vH_L}a4eT&xE#CX*el0eIo8TCG>)Bd%#34T9Q)#! z7ss$TcEvF(j@6D09itr!I@US{bPRRubj&m?lVO+)yJQ$7!yXyt$S_2P9Wu<2VM7cP zVptHvbQqSyFdT--Ff4{)FbsQOm;=Ka7-qn*0)`RrYnNZT{L1Cm9KYoF6~`|%ex>mX zjbCE?3gZ_TzkI)Xzj(iFzleU-e!YIVeg*9!W!EUXMA@~;E=_i2vP+U(k?ewGmlM01 z*u})IB6bn6YlvMu?Al?M4!bnimBB6yc15rYf?W^nLSWYcy9_wUkAwO+h>wHnIEape z<~WFrgVs1mje}4)=!AnzIOuoK(Luh0a0lHEvfX;+mMgbfxs~TuDYrG(j z#jPuDS#fL4ttW0daZAlDBW@LO3w7&s%XCZ3tqpEza4XC$32sF&5F!H|GLRty%^B#A zf&3Uq&OmkyRL4Lv3>3paFbq^?pcDo|VIVRCfiTc-)-JPjnU%||SZ2X8>y=rj%ra$G zo>^kd3S$-+v)atcVip#&*vx`r)|y$bS*}?@vr@BqW}#+{nZ>~@F|Q&ZW+IaIS-MW6oJ{u7YzEjI%RNk#Tj#2{JB_adgJ%F)ojBc#K0cj)rk(#<4JN zg>h!akuYwAaUzWSG7f-oUcS}xjh1h-d|Tz4D&JE12Im_Y-`;%V;@cMA+*WZNR!>}(@s+aTKn+4jRWAGW30X2Z4` zwxQYP!nPK+v9N7{Z3=7yvrT|)0c`s+(Jd3%GEprPy)ls+6UCXxjETya2+l-aOw`3h zTufv%k#3@*iD(lEO*ESbXd>1`t81WK`{bG@*Dkqc$+b$ZIdZL$Ym8h|;#v~dkhm7a zH6X72a1DoRH(ayfng!P?xJJRX2Cgx1ZGme9TpQq;01xT$P#zEA@lYHO!ST==525kU z84sE95D5>B@Q?@(@gCYeWb{z&p`nLp(^i?L%CuCbi83veX`oEYWEv*ZE}2Hfv?-=Z zF^!37OH5N@+7Q!(m=?q|(zFkzc`yxwX%|ehVA=!I9GKR?LWC?d$U=fF#K%H=ETqRm zb1WpsLUAnQ!a^-9#KJ-*EL6fmBrN1xsJ9UB*)7j(c~;A_R-Uo)Y?WuEJR9YiD9^lj z*2Oa}o>}p%if2?jbK+SO&zN`y^o;dv_006F^o;aOgJ&5$!(f>s%Nkk6$TCBg6|#(w zWqvH{V;LU-|6cj$%D-0r<@s02KT`hT`Nzq>P5xo=?}~p`{9E(yiGNP~Q}fS=e?|O5 z{X6|L{S)(VgMS+Q3-eEce-V6y$VZ2KWXMNzKKkP$KR%N4ksTk^@sSK4#qbdfAC>ti zg^y79h|EVIeDvG5%RXK9<+3l9eX#6%W#1|LOxc%bpBVeX*aybGHv6*Jhs8cN`=Hpj zX5VX{YhTd5)V`j5sC{Ghaj;L!JxT6Gau1Swb?zl{50QIx?g4Ud&OILP?Ql2z`HN+0C?wRBRLz{u~8ix#o4Hhjo8=-&PHTx^kyS6 z8-cLVZ=<4(avR|`nr$T8D7NmDb*8M#v(A%sovg#Nu99_>th=+0%{nO7tyzb}x+B)9 zS@**_AJ(N=H)b6N>%^?1VBG}k!mL|hodWB?Ttw$0KrWhd5gr%aagm&h-nht(i{e~l z<{}j?Dszzt7lm*UnTw1r(p^+^ZkBVhoQvgLD(6r+cgi_X&V6#ulXGs)X>qR2IVsLX OagNP7CC(*r4v7Hq7{TbL(9l!`2GHN8-54^4FV1VuiSXm z#w#{nxp`H_D>7cSdBw#mHm`KAj$Y+n5xttdlD%rZV!c|ey0glYRh_J|v#OF+l&q?= zijh@xR;5{m#Hu4!p;`6ADj!yzS!Kg2GpoR?qF~hotG=vSV3h)^ysQ#nRhNO{41~u( zcMJq)pf?6`W1u$!nK6)?fyN9Z!ayMmv>OO%Al*Pk1JMQ|I+f=XDyQ(A0_D^vr|z7( z8idN@Y}@ zQKF0rWfUl*+Kl326q`|0jGAK9no&%QT4IzEqgta_qgJCzqkKk*8HK^93r1P+sm>=x zKGFF^$frR*&H2Q~r#(LD@#)Mb8$Ox&{4#xXIDg>ejw zV_F=`;usdkaK~=PY{y{7hK{|C0Ub*n`x&;$FinPKGE9>#zSg6 zM8-p7JS4_LA3XGXDCwcxLq-qb9+Ev2dkFT+lwYO%BIQ>nzc~4|$uCNNP4Y{UUr_vd z;+GS@j`(H7uOfc=@T-SkJpAI|*9N~d_%*>V34TTJOMzbr{6b(MKoT@&S6DAz!_mdQ0tu3d5sl53A#bL1Km*Os`Z#I+%= z32`lmYdT!Z;TjItFt~QXH4CmiaLs{h4O}zeS^?JxxQLI7_P9uoi{`jUj*H^BNR5lq zxCo7lOt`3oi%7UA=_21ny^C-c-7d0CV`bVZ(^Q#8%Cu3Yi85`IX_`#SWSSMzs+dN_ zG$*DtF^!37MNA`N+7Q!D(@fJ!(=?ct!88n}MKBG5X%9?f$V7!qM94&bOw`9jd`wiw zM089v$3!qp^uk0gOoYNjCro6*L?2A_n<(j-EzfFsM$0o-p0)Cfm1m|rE9Ds}&%k*0 z#WOFSVe#yWXI4Ce;@K0=oOt$nCiKkp?B^Nk+36Vv&o+3b!7@meJ+jP^Wr!?0WSJq$ z09p3OGCumMTCna(cA}7f?DUg!@IVsLbc${>{Ni>`^!$~rnMCPOw zPEz5dF(-*|QV1t;ISGK1?$fg`mVL17%d-!aeR%efv2TohV(eqHZ;O3e>|3)>ihXMK z$@aze!S=28srCWw6SL2QePQlVa&MA*lH8+nZ;^Y7+?#Vxkb83O`Eaj?dpz7TbFYSb zG~6q5kA-_=?kR9DfqMwt>vAuEdtB!2nJ3FUJ@ed{*Ty_H=DC?y#ym3SwVB7oJT~)m z^N!}_<`K=C&6CY*&1215y}R?ylXsoGv-7T!ca*%V^Nx{sbl#+GysvrdY2YSt;SE{Sz% z)&;Q+h;?YziCO2tx-jc3SXaS1FzXsv$H2NTAITNoJ_L>L$t<^XApt@?ix1bCd-u`v^4gll&Aro~fBfyO6B@sM=V4)8S@rV9?%Pk_+v|A- zFJ8NyU0+Z-yR`oN&Bw+Dw(ipxuKvwx$RD3seE95jbt6OT?y2+N|711}j!oZw@U*mv zs(yXvW{eM6n0KG)S5&!@I diff --git a/fixture/20/3/1.2.0 b/fixture/20/3/1.2.0 deleted file mode 100644 index da79cd79dac3c18f97697a4cd7ff50029881c291..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2188 zcmV;72y^!W0gwa`4gdfU4gdg*2mk;S0001V2mk;mwJ-f(5DisB01DxhAutfQHFp5E z@VCGH{r>j1zy1CG_P4*^-~P7grEn!f%gCme!j)vV*-bAY)9-d9Q-2Er4gv`R2MKae zAO`_*keq|^I0%n};v59WL2wRY;h+@`QsE#n2aRx$2nY2JLOO`IYR@WJR>iVP&#F{b zp|UE^Do|G8S=DA07pt~d#b(tMtE5=9W|b1F)U0ByTCECNRa)h<%7ay6R#~tr%&9u3 z7&*1bDLSVHIVH%cIj8hECFj(cQ#PEc;gp$EEu3QERGCvGoFa3o%c%iQaT!I+s98qs z8MVqNRYvI<^=6bAquh-0VpJES+Kj4V6cwY`jH->Ijhc;ejcSczjWUh;8CCjp=aVI$ z?0jHeIsm&L&7UJ+jG>O=~tOu}RG)AvOiEDa|GwHs!DhhfQKOS+FV0CI>b(unEj2 z12z?~$;&@;{?X$fIseGSA9Q(Ymj|=;l*~f%^O4z5&K4bRz?c)o} zoLJVxGA5Q8v8;$?L@e`RSr5y2Sf;_U43=TAOoC+*EQ4T~0?QIuhQLF9Jk-ZSd^}{w zLv=hv$3t#B)W$<>JcPnSCp=`rLm)i#!9!yn+C8LuD0l3YW3C))<(MhQN;yW#F;9+l za*UH>SscUS*cHd1IQGObCypU;?1*DV97`QT9XlQ4;MfMoG&n}Vu?dbzuuvikA+pdR z3jwmw9}D@h5FQKNv5*}L&9IOR3&pUI3JayM5DE*4uuupKfv`{j3jwguVX+K@W!Njj zP#Jd0FjIz+F>H)sVhr12m=?pb7$(KAD272XOg3z2SZr9(Fx9ZsFb{@xFpPs=l>D0H zmn6Ry`K8FOM1BeKE0AA+{PN*f55IW$Rl_eDe$DWUg&vYLZYgl9 z%dG)!ahZk3tUG4eF$>PDH)gpp>&+}PX1SRq!mJQxfiR0V>uA<)R&5q-*6bB3uTFVo z$}2ptK6&NIt2?hOd1dF76tALq1;r~huabC$#H%!~fOv)Gl?Sgnc*Vi1Fs~|jMZqgD zuNZjsCFgqQkk0YW6`ix4tDS?Ld!2KQ%QFs?ad^gIGVYRbcg8_7?vZhhj9W8KiE(Pi z2{A5+acRcsFfNC2IE)iB&Vq4a#yK#qfpK8Q88EJZab7l>vymPf$=OJbjpEoS&PHl% zl*UGAY*c0=5;h{Uk#D1-yrz*z&8gjD&!(UE~0Z09~bR$(VUCu zxM+@xD6Rw$X&46nKT=R16v|FaxGVPUVu1srXni|v6 zn1;qQF{Xtv4UB17Ov7T@71NBS-KG^yi%k=n2Ah_ehMIPIrpdERo?-G#l4p@TgXCEv z&k%Wb$TK3I4e?BfXFNRH;h7H4W_TvUvlyO1@a%zS4m?BP*#XZCcox7j0G^$Qj)~@& zNRElvm}rfO)R>5jiN=^njETlf^qa^xQEnpKL`D+W5!DKR5r%_(#USHvhQzx5dBR zKi$9Fzu7;cf3kn9f2)5%`#jm#$v#f@+1XdgK1%l0*~iGfMfM@F?}&Xy>_fBfhkZWm zJG0M*eKqW(VBZA$B-r<5-vav-*w0Q;# ziF;}80dWt_JrC}6aF2s~VeVCMkAiz(?lExh%SeKZ6v#+`j3j5IJVwG}q&OqNF%q1S zSQu%AkyIFo%t#}QB*I9&k&s5>&D%3imU*$v(=#uXd8o|GGY^z`c;>a4$HlxY=CPSK z#XKqIt(m99JT>!J^H%eM=9T98%=2KLn0Xe=3-eK(j~MxAk&oznG{{GSd^G1HJwB52 z(V35I_^5`D%zV_sM=X3)<|7h5BJ)v~j|TXN%R5@$&GK%~yH(z)@=njYH}A}N=jNRk z@49%`=3N!nvGkXPqPK8d+CoT_NiTSx0AG znsq>|L$eNtbvLX#v+jj;F03=N4$L|R)_qy$WgP+Qx}3A+TrKBlITz;~9OvMiL*v{T O=iZzH_$R?LU%gCl%2(%mHXMPI;4FV1V2NiM< zAqNd|keq|~IB1W9;v6)`L2wRw;UE_dYT+O<2bpkC2?zZS@*R}4YR{@#R?)Ib&ni|{ zt+Fc5s!>+qS=DCM7puHj#b%WitEyPFW>pib)U0x?YOP|eI;}FT`dKArRR*iV3{+>J zM+S0aAUXpXGEgA{%^9eVf#eKyW}qAf!eJmY1HmxR3j>uI=!AjD4Af;H0|w%9>XuWs zoZ55Bl~b*p(sSy~sWeWxITgk!Fiy2Og~h2WPO&+4J5_Ybb_#atb;@-rbqaOrH0sW% zOh(xm70D<_M%5XG$f!d`(HWIy)DWZ4jM`z84x`SDl3`Q~qs)u~GwOj+Uq*Qub-<`D zqW~E7(=DIke467EoKI_fQsdK`Ph)%%<5L))exH1wl0M-+8GX8a8u}Fb1lyEn(XWJ)NCqZ6A_!zZ0cbX51V$_BxX|vo5F00U=swJz-&rj z69SvO{4?jDKK_yOPaXfz@lTw8+W3c!f7bY?%s(gmL*}0k{u%Sn@1NX1NB`h1xpJwM zOL{Jqa*32nc`kKwiIYp4T*Bhg6_?mtdg783m)2Z5;*t@Uinx@zgt~OPw813}E{VA` z!6gYUMX(Pd`y8?loqhh;$B%vH>~qIHcI;EfJ~HeR!#-yANnsxp_9?SZ2>XDr&j$g^ zqF4sSvL}`$u?&f2M=T3s84$~USk}QZ4wh}Otb%0}ESq3i1Irj#w!lMyJOs!?e>{}O zLwG!N$3t;E1jj>fJhZ|?Dm;|JLnAyS!b2fEg!B;aq1`cAj>U2emSd?LL*>{h$3i&< z%CS$5adB*mV_F=W;+Pc2qByq1F(r;Aacp%g=$Pu5&oK{GCA78$0< zutA0iGAxi`dkoWKSRTV{7*@kD8iutnjD=w<3@c$63ByJhM!>KEh6(VCmS3~{lI7Pb zzf}2^%C9qinenTPUtav`;ujacs`y34uPJ`fe$9Rn{bKzR`nCG?^Q-jBXV)dWEZJ4b zE=P7XvWt;jh3q0^*C4xs*agI{A9mre>xNx6?0R9B3%gp_)xa(Wb}g`r%dP@;5pZkI zty*r;ax0EoaNK(1mYZ8>+&be{n_FMp^5PcKE#9r&E!(ZyE!r*Dt=28ptWaiwGV7CB zc4lER>ylY@W<4^?ky%U3QesvTv(U^EVpb5d&dkbT77nv6m}SAN3TAzo<-n{4W_6im zz$`8c?Xi#^3+1s8oQ33AD2|2RER@DVZWbzGArclEVWFgjdJ7>fbX%xsA=|4}Ua9gb zl~;IPiSjCxS9e}z^2*MuDqd0XYKm8CUNP}%iC1Y}4e<)itDje;S3a-8yu#qs1+T!o zdf?TUAN@Y^eU$VO?jxg*ZXXSO6#ED^F3-48#^D*a$v92M-5EE@I7!AuGH%VdCdR25 zSHw6X#-$n8!#Ezs?J!QvxD3XH85hAg2*!aKm%um##(CLj&PIJ~Bxj>KHlkyrI2*OG z5gQw=u~C_gPS}XdMjvc6W~1Lmxs8rC!hLh)TPxr6d@JP}Dc|yZ>*O0J-!}P%#kVWI zvHAAIHz&TW`F6xNBfb^!E%goc?euMfZyJ0P^KF7}5`2r`B1A4arDQEY=^+Y{S{w#Bx=wyCxOZA)$IU>gV9Hn=v)HA${Ta!rwIiCjbE zS|HZ|x%S7k9Wm+uLV42p& zG&ZKKF|CYgWK0`l8W+>Hn5MM{Q$6eC87I#+c~;3YN}f&f zjFD%HJX7S^5zmZxR>ZR(p84>shi5iCtKk_9&n9>#!LtaSE$~c%X9+wb;MoAr1eoZK ziR_rDj)~rw$c>5GnCOg&%$TT*i9(nNgo!?wXg85=qTEEZiDnZKEjwkIDa%S(_Q^6& zmUXi1l4X`Gt0a$U;4ut5W`W0Wc}xM1A>iMhf3^Ih2^}lNw#~d+*9RVD);c*6Xjkg_wL-wk#~-~qw~&?cZIy0^RAC~ za^9VJm%}?8-kEs^!@C#Wm3eo6~ihRR*_ki!YUM2jadc4s-#s<<#Gy_Q?Q(R<&-O@ z@|-f|6rNLUPKj|Uj8kk*WpN6NQ)^B^aZ1f8*s0ek*D2H~pi`$)Vor5%D$J-lqa+y> z$tXIb5*dZas5zqm86{`bnNd89+F_KLQ8SE^VN{t>DvTmC%FCz(Ms*n#z$gGl9Y~ge zVi^dQf!++n#z1ZcB4eO225K|V76WN95SxK=1K|d`4HO$_Xdu`?s)14ip+4RD#K|W+ zpD6h>$)`G>7Wt&eCpw?fd@|w_nomA_>fzIwPc?j^;ggw9U_MRo>C2}DJ}L0Y%clW8 z3GgX^f8zXO$3JlXapRvh{(19{8UK{=4;lZA`3Ho5lK$EKqx&cKkL(|!e`1^RY%*mN zo=u)?>SWWMO_pq`WD_Nu)@*`elbTIPY&v37noU4#`eBm~o5XDDU{jb)6>Oql6PQg6 zY+_*30{hI_2atW_?1RTXckB~qA2{}TV;?v6DYK6X`;gg3gndHTXUsmmeR%uqF3EB! zmP@c)Lgmsam-1W!<4HlZT&m!nMeb4L9y<33a!(-l%(+L8d-Avkk9%skhlYF1+{3~>E8J7&9un>u;T{nJ z$EY|q#W5+4F>!2(V@ez&;@A+!gg90@)^qHGV;LO7;MfJnA~*)Yu?HR^uJdf}G~zfkyf!Y>nk9q`M5Uj^*KW!EjcY}w_? zu2y!jvP+F!Y3xE{R~Wm%*!9IOEOuS7%ZgpKU9?@ZU9MfNT|&D~yMA_=ZfSBWlUtbF zisTj~w;s8L$gM+e8FGt=TSMFu;?@qgbhwqnEg5dba0`Z85!`~{)&sY?+(O{i0kG7ir;PsVjJ?#?(%##J(ol5uOsK`~Cv zI3&g$F)qzGAjbVL&WCYg#&s|*%(x21Q7{h7xCX{CFm8cwbG`xcP0lwwzTNRH&Nn!| zz46VBZ)Luz@Qutj5x#}+ZOpgcH{Q41Hd(gCvJIAPsBAlBTb^y8Z2M%JC)>8zro}ck z+oaeQ#kMuul-QQUHYB#GwxzbAwtcY8gKc8AU9io9Z53QwL(Ku9>;U!nGBymAOX3wGpm~aBYBV0$dAV8ZFainI_9LRi>pf4V7tTOeKQPrcp6%ifK|zlT9O<7Mm6{Z8c3btu(D?+6T`pc~;3YN}e_HjFD%HJR{`U zAkPGO2E?-;p84?XhG#ZBtKpdo&suoK!m|dRG4O1GX9YYX;Mo8d*>X`W7twMN92dQD zksBAGanTtUnQ_q<7kP0}7Z>#|;$5`6Xm(N2MY4;8E@E92v<#GGpDgoa879jvS!T(y zN0vFVtdV6(EK6b;63c{G7Q`|jmgTSvhh;Y`yI`3G%PLs*z%mDxHL&b}WdKI3|K)A~hyTVvLl7CM8YvLah|Iqv^ z;vW(J&iv!y-wyva_@}|Y4E}xjC&9l6{(1SAz`rgZ`SDR7AMx=KoR8}Gh>nlme8k2_ zZazZcqZ2+d;iIIFkUsi-l=}$x(QThA`&!w@%04{%O4&!szB~Ik*=J`T7W=N)XT?4> z`<~e6#J)89jM#@}-)Y~^KGVK1`#9LQ!9FniCfN67BSbbjWFtd1lC#kt8~L$OoQ>?* z2+l?_Y!t&rFl( zG2XR#x5Yax-m!U?dxv{>dl!2*^bYn;^)B@eweHS3PS)93N6ETL*40_J$T~&V(OH*f zoe}HMtn*=A59`jXt6?1t>&&bJvu=WQU)C+KPJwk^)(x;ufOP?!i*wG7b8ybNajuPX NZ_b%+ZY)bL>L$t<^gGqt@?ix1bCP7FocMPsx!oA?mT=}Rzuyev3u&% z_dh)~gJaijKPadrty)^&dGqmGTW$Big{!x+>hMcu7T2D={?Jg@+I8yul|PyFcw^HG z51ziNs;_U^IeG5e?~De4(WzVapOiFI)~#)y`0({tS0n$(m7DkS8jC9yR<~Zg|I*UL z*>~>pjf|$;qN(|nCokXCH8XddID6^W|D5KL$+>%vUsSZvHf`;n`SSB$U(4{sjk}ME zTFGlxHuv6r{?XCeJ9O##ot!qpvbp7r7jHi`wYB%0xp?hgc00ku?9!v>Z)(~b+xAXh z`0+QZLny>!j&P4TK|JOR_m~UBW3F(Ixj{YV4)d4?%wwKVk9om8<_+y%(v&-(}x4X@V$Zx*T17w}rzSC{* Q8j-`wMqBH>?LV*s0Nszi(*OVf diff --git a/fixture/20/3/1.3.0 b/fixture/20/3/1.3.0 deleted file mode 100644 index 9bc312faf6c81722f0e224022a4e46c59573f654..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 757 zcmVC0gwa`4gdfU4gdi40ssIM0002p0ssIgwJ-f(5DlFc0M`1bHGp#M00aUA z2oNAZfB*pk1PBlyK!5-N0t5&UAfQB1GAzroq^W7!*0iRkt*LwLxIlnF{sWi*ngE;t zd*11Jmj}kX7w=r`1iTCI4uF=evTTLL%D2_mRy{)N2CWm6V;zolH*^cuDOi_a%vEBp z3R9(a)$gjbMpH4GYLSqsf=mU3io#SArjo$ssWnfjnF{d~h^IbSI#=l&r6uQ-oJ*3! zor^lRq0UltmYO4_VyP5Mq4+e;)3{Dl#!VR~1(!9BHxA3sQGJf$Git_BGLDK7mz>;d za*s)m>fY479G<=I>}99LUM%)n;q|UicY(TcmzBG!bamaub=Ov)xd6@e$HrVW=Ayxc zV6Fpm836Ovm$$rZoxNrDR+*x;60L>k$67wt>Y){3EeLBpNOKmNv(99BIIF{1945_^ zG%pg8c}M0M;c1wcVIBrKW5pQ@4%UgWOpH~6(^s3m(iG(@C|^Cf+I?m9l`dy$&O&?! z;y|~cx*tI#kefCMQm&_vBkm;9cJjTLOc%JahQ%U2ZuE{jDeZK z$_z#Z%x$o?!Pp?}d$i9{k$pt=4e?#rXJKE3H-EYLtIZdQzefBeBGWxg_W~Ru%mKn2 z9;EpV&2MMkJp9(-Hx83_leAkTB)c8i&4`C0gwa`4gdfU4gdi40ssIM0002p0ssIgwJ-f(5DlFc0M`1bG;nh50IB$X z-|zc+K>vpa21DXJt0Gt4S z*7CDf9~x`TSWAXhfwc&%HNeYRUC!c?MRr#2tT9Am9U99}kFj`+wFAq+SPjNvaONvB zUzNFP`|9_VFVa?!wt8e_t0G$wvAVF8g{>;Ux$4bTZmLRLMdGRvn#N@shv~^UC*zvn zs>b2QQRSHm&s2A)TujwsDi))j67^IlEKgl|%E}b)sjjDXf|d%j6rfT!ma?%_jhmzl z7G*FfU|EA<4R*!nuRVY1iSZYVzg~O^_$$C)0JiL#*?m@WsK+R)m z9z!$s;V}=7y-`|h(qfX5EQVyUBVrmB%di-RIfuzPEKV4T!%iG#qSIiU1`8f2>|w$l zB((XB&2MYoMEn-wHxQS0v$R_!Cc7=!O^KV-ZX9;oVCS|vx6x@^ahr7S=6 z|ET<%^3(m>`j`8&&(FR-H1^HdC&O1@AAx-XyxgJ79k_HLyF>2|V~A!uG@GFwv+e_G;nh50IB$X z-|zcQGJVj#DIZfv>K{@y2oRi${ z9Mw6Uo^g4`;dwF6#kdx@0OJ6Rn`J31OI?xjE!A79YtT`GjtVpi$5A(qvJsr7?31!D ziVthw)jq2}_xRk~Bja9-doX$g?hUvnfXiK5?$Xi~b{E)PUxwx?G#8;AbLp5X2Uml+ z7|gYx%v)sM8near*4kTYkLEd=*NDixA@hXrEX=DgkAj=E+N{M!Ys6Y2)(U~?tV?HE zYH}8nvzBD#&cdA~b!RL)W7WZ8G1iK)RD}8p)K{OZd{yNuDpy-y@xIauW^c?)Y*u13 z5}Pj5bXg`Smp!@6N!ji)s>^hGCd)G!o)?q3n5+d^fXM(%Hp^pJ9=kH;d#v|Z*Pwrb z{sqGE@5VnHKL!61{6ipSu`r8)5kp(-Y%w!Nhb=lxQINxc9QGq@!eJ5)i?C)eHG`!A z12Nc#!8}y@i_%|{l>C+CFCniZa3U+Qg?2%b6cG@7PqaqO+~2LK+X2a%4}6; zqr$c|8*er(K(GCI%}h9dyu0&_M?oNjSywgC8{!aS8wc diff --git a/fixture/20/3/1.3.3 b/fixture/20/3/1.3.3 deleted file mode 100644 index f1bd2af65a17618c9c08324be9304d1182a70f04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 230 zcmZQ#oWLT$$G`x>&lngOM1c4l5NmAJ|C=DdtIEc3g`-%V;kvcPd}ht`)B0MIzej8D zSJr8N?XSC8T(A1Rv;J~!gZ#_phO_@C8y(j+?*ANavRmG?`MI~*dSUbO+x8ZV*)6lr z8(U5P9dCVD-KP6}uBH7mb6O8Vnzp0lU#woB#j- diff --git a/fixture/20/4/.zarray b/fixture/20/4/.zarray deleted file mode 100644 index 98e3eba3db..0000000000 --- a/fixture/20/4/.zarray +++ /dev/null @@ -1,23 +0,0 @@ -{ - "chunks": [ - 100, - 3, - 3 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 1 - }, - "dtype": "k|aq;k|s%# zBuSDaNs=Ta3H9{HJ?}a1x$phF_xB}}l6l2M2;%?KMBinoG3wR~W<`i%mS8l%A0}_+@Bpd}3yH&T5Ob z!wyHLhiB={vg*4v^^Y3BDbkhHH8jCRMaLwZN(49Xd2sZ{7&yz#R`%N+z?Ix6t-4bU zPFzA#VSysJpx_WrWE8lrx841p2EduDWms&o1ebRyzxa9yI0}_tY`!=+_Fmt>LqXtL zp0{?s=>n&>T;F(&3AnU#>A4s4z)k;|;pL-%b8}~U`>?^)J*;nj)&fp#vAXUuJ#g_S z6H?EnfgAZgI`w-RoZU8i*PU+QDsNYDAJl=9lUGn(qy{eHC@1bjJh;A({lnizz?pBf zu-$40uIO5Ec||2S5m7N2Svhb42Lr>xBfxdM?(F^02hM1}X>2xKCr&t%#icRub|JQXDJ;@tWCqEF8_e)9bRop`&7^$( E0Ts+ej{pDw diff --git a/fixture/20/4/0.0.1 b/fixture/20/4/0.0.1 deleted file mode 100644 index 3dcc375cf4def11e67208c456cb748babd3bcaf8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 738 zcmWmCX*|?H9LMqhpS4@LuSICd5lIq45``ocQn`|qtXxSFVzV11Nk~GHR6;taBac5+>P$1mgaznCtU5nM zKv+anMpn*emv8XikjN-@{qu&_*KJ)Nx^R@;@2d}$1f-( z?6%F_o8=RsZ^Bco&BHf*$Wa&`e%a`RTj{RiNr z=`su@WpEJ(B4c6?gX?_X)%T?z+^W?Uw(B>5%e$O^qqqdz>^XvxQqtf8cL#-qM}TX7 z+0yZ@6P&>^Lo@SL;4;r-UA&YBZu;*GZPsjXo;$q!0|LQSKdotOY6hpdP)kqW09@jU zr1XqTaKk@Grv6NW+p^WkgXsyb{9Z-XlWK5^^B4;>G{JEX#Uvb01lRkyZ*XWBoXt90 z$IV;7UAumx>~1+YaS2Iz1x0Y7`@^C+TyX7gJ3fBu1!ua_%zCX2xZDdDi>_V+CqSi% ziiv~s_45ze7YeT7MPu8Wc5u2&^-NZnf=fG{o^w7I+~n^m1O?~5-2*|vRX(mlP;hGM z3lJ1s{ILWC1vl_x5JAB?Y;r_UaHV(35EPu;TzLcq$37U1py0ZDJ|ZYMt2NdL3a;=< z5rPsJ2ny~v3*Rn;NfN{3q|i$iM=0P|ln`1~O`XsrC@~(qC?1PLw`VxZ4pH9FSkwTr zmQ3cl5^NreNoO)LWIV>YL KLPFnC+V>wBtwgW@ diff --git a/fixture/20/4/0.0.2 b/fixture/20/4/0.0.2 deleted file mode 100644 index 471e6cce26ca8c38cba43a6f3811cfbf6b727761..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 738 zcmWmCc{EgE9LDiGmKoXCv1WvXgd|DQBuSDa$rfX2LXspTqZuSgLXwaqNs=ZZNs=Ta zNs=T3g?qWGDZA_hQX+zHK9fD#dxs87@oOQI*luz!QVw;k^@K+ zoJ@`j!K!64s0><$JeR+X;!mcNnn+n3XEv+Wn;~IG%f;&!N&uNjnk3cNiHML>L$tjsR(mt@?ix1bDCVGNh$N*gu$ZoiV|J@AyLtnfAv8 zGJS^&@p2MhXm~a0xOh%z)!^JDpkMFA;0T27=iekw->O%1)p7HT^eO6RerL*R=N5M) zxAms1>8$&BUOW0m%F60HCFfL@-7I^vwtw2j3wr4mPggban4VYPH}iXLWMWzElhy5I z8_#Qn|0us=lP2|C@Vw9Ot=HU+TfCTN*kg3nzOE`=`1YFJW^a~c`k0)Nj{BLV=>4|1 ztN7pE+h&)|`O;%edK%My9#Z#zo4mYKukh@S^xbY%$DH@CNu0Wtujq>HVu|$oJ2Ulj zlNYDHD>xk~JTq%S_@2)vRbQ7`9rwJ>lO$9p=%w&O?u*O^sW%cY#GZ&e5V|99gYOE@ z1+Ft3C)kd#9AMhRup?~OG1uc~5~ppH`*g*Abwv8)>c@Ff$2~6pc{qK$*`}*b+jl&l zqWMPpm8^d4=8n{9b5%4MSexY;Y8Rv}5Oa{(VLF|why0jN4aegRPU40{e+8sj%cplXL&t67d2jGR)7x=aA9CDS$h+0jcT2q}Uf8ZoPv>rAu6MfmANw3;GQ zNnJxzZ>fG{RCL0r#I$qi{htR$evD53owC|!ZNJUI&7DgrtOqHtBRG-qClfl~cJ+PgAO1dKvf9*ggO%M@`@Bo}#n($JZdX!i0^)Ne%p&db{;;9wxQrf1FVuRFl$ zF3~euX$&s)Y+BBRTyPVACdqshaIQPt*n7Od@gCGRK5YW0xTV&3zs?S%#dliYmC!u<+R9ao~DB^bUR<0%yL?!e+BAxWcPN zrMJq!35$r*Wn{tm9|#CJ8VauMReSgQ9&m;$jLg=WgUddjQ*fma96=%r3JHVr+|Bme z?+>owNn`8FHgH;twGEaVf=fzH$;`?IH}-3M<{tsh(aG6kmnXRDdo}fs8^9^eSJqsl z1ui-!Ht}>4xPdQ&qd&*MS#Pp&*zO39d$Y9aZZ$Y5X*z;}3qBHppy0aRbt5P^(=}!Y z3NHV00fK@Pm?Maw;C%P_At<<(7p({iPJfvJf`UuW$V5!;!dPq`U6;vaAV1jCZjL1R;WN!^?njx!O)W3%WiW;%n*--6XXq%l$!$7yyGLl;6q-(1@J EKjNB1*Z=?k diff --git a/fixture/20/4/0.1.1 b/fixture/20/4/0.1.1 deleted file mode 100644 index e894c05b2e5a58a692cfa2f7c276b236271b9636..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 738 zcmWmBX*iTo7{>8;G-Juq$U31RdlEvCgpgE{Br(ZI#-2)2F)^W%BuQBkk|YU9l4K+a zi9(XJkR%D&lBC;*bG;wVdp*DBI)5HgJeev&5Ob%ShakiWf~dz=twd{vNfo9@5|Jtj zOB6MhY3i?B6?O7d-1+$Al+^z31EXW(Gyi67w%9uEbaLC{UUav({82^CvswvBDZ0FZ z`cjRf$3h|^qhim+b$$5Q_pN{A*QnV#^G%y=9CkQn-^eK_EGm0gE-+tEOk6^4G2MS( zK=6^HoD&hRUbl3<@9O>92hPOQ%zEP{a9P*0^Y0XZ zI>G7c=^L*#0e3kmIrCZ;xaq$$6ka}Xu1q&?A760QPiq=qHi1)BQ`cFc3ohntY~rQM z;D&}rrv6NWv)}IEvfCA0>4UPWC)MC&WaX4qRKbOXbE410fb03vJNR=5oYe+vyKVO1 z@^0ss+%E;UfGSE`Bm*w+P!Rig7`XPg9iO{Gv0Tqubj%_0|v2B I&qBicKmCG79RL6T diff --git a/fixture/20/4/0.1.2 b/fixture/20/4/0.1.2 deleted file mode 100644 index d2c46d98f03159a7ce417a586862788e0bb7d08b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 738 zcmWmBeKgZi9LMqB7?a24VIDIfAt6bUBuSDaNuFX!3oS{KWZIA<2}z#Pk|aq>NRlK; zk|arzBuSElBuRbx+rOuR0 z!w*Nq9F1*z+1~xGXW;W7IIGpx_8T0) z!I>;IwOnNdE;B1T?@~UvnZJa{EKzViJA7Gt0>RZkYG`@h3Ql){p79bBaEYgq(lau_ zjgE~^|Cs^jvelKb-3MIdovONr_25+LYT7!w;G&{q5>6(9>;E(`@_iJX-FkcHEiT}S zuk$NzSAvt4kyV+a3NG|u7>64LuJcV--^YG%7Aq}n*V%!~y_i>Ytr(mzMT9zA8eG6` zR>*-+aLvzJI$n2zGg@qHzQO`r+Szp8gTmfK~Qj!M>q%yuJ=P9f`YSI zYm1=Z3a=I+C^$(eDuRLw-XDUX;M!kxASgJq<>m+qF6TTCLBRrSw1m<$?&p03CjilE5ic#M?Ab>px(%nElx+Sppo F{~up2L{cNrNNL>L$tjsj_ot@?ix1bFZAGNdhwuzxV+I^&Ut7BcOR3uO8Z z7h3Qg$ID?)T0KdDJ5VS^(W^;^Ym&VagCm0zL%?UBN3%<>x2@gM)7@uzSSOkH$;x`W zjpz24-_F~0%;WNzhts#qZMy2TddKrAzR!3~`=;;vdD!T%SKX6uHCrdF=Ce9#vp8ncyXE<7ww#sg+xc24+1I4IKCbf6{-xInFI%?HNV~Ou zyWOv&PP=m+PFek~@NwC#W1iRVB+l5W_UWqg`Wxv}Ro_^@k}>Wz?ns|jo3ggItnh+v zxJ2rz%0FMuDQ}bf{$}0eKEsO!$rVpm*XwLOuf`_mqwq&=ip2-h7lsdXZ)jdnJ)w9& zc8BB!(G`LVcxP}cs9lh@ zK+FLsJcIQHdm38~2htM6669w{ZeYE_o&gm8%s9I-vZ2;$7Gnk@r_`b@ F69A}S*f;nRkI*bhUW$_Y@z$fT)`O;4NKUv+5C*U~pIG+l0%cswcn zbVly^yrFNy6Tc=25+68s4^O|H3~;sgxs6Ynz^Q9!>Mha-mvAgGH7y<7;MbwC@dI61wZ__hqXW3YD@B}}W#EKG zL}lgV!3FOPi8vGquI*)e_q!f&Cd*7MS6hM0IhR{-xe(m!UqWb(FgPDyzo0$A;2It` zw!UZsr@K(kc&Q1vlvAl$**V}we~wMh%z|^-=1Sk;1FrgRP5q+=aH=#lZ5>^3v2pRq zCsV-ne;ye5F$&IZgT3=s7jUIFI8}G5!AZ-=s?1dd7k)5;$%+Nn^|rh3Q$IM1RhG8v z?ZD+b&dZyQ zpy28r)*~o5tp(Z$3NGnHGJ=8|{yu`B;GDKNBPh7a+f@h(PDxn>LBYiwVInBF-j96< zit+LTpx_fC=puouGWfj$f-1wHVQdUPE1TrOjp8zRGHewNHH+LyVF(71##lKlcOsh0sB$BujjiPZ F{sI1RM*#o; diff --git a/fixture/20/4/0.2.1 b/fixture/20/4/0.2.1 deleted file mode 100644 index 9f1932544a3d3ede13e2fc132af9cc4c9e385e56..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 740 zcmWmCdoWaC7{~FmSWE7=MF_bkAtWJ$BuSElgs=z+2}x{{BuSE6LP(O3gd`+MNJ2=G zkR(ZNNz$)B-Z?Yx%$d(Q&-Y;KqYcz_4Ex) zcbZx4v(CxQE4o))QCUS{VddfF6A=~j^7aWl5fmOlZ-3p<_pX0OELwBp5$qv)x zl+^5-IfZwNmi{c0nJHY{JdRGzo=3a_jt4e8YixVf-qYJBC$FHoQB7M%H}+y&(zWEw ztn8^D(~G~C7@5f691c0Udw7DYf7;N}+6GQWR!&((6_Is=4o)sX zAz^TS{sEzv#1Rx6{Y)f+f*bujhM)xI z#e@}L!B??k(A?M^e*BIQ3W?$+3CuwnY2^q7f+;eb2+pEKP@Tk6giFY+6mMn^(i}aF zWETnEF-}mJqCGnBP3F80N&qEN>1VJ?6u3c&{%OHy>Q-~5x z%e8ddO?FP(opZ0{72PhbxL+wOA}TFIQK72(9qwWjY=RuR@rnEJd zHXCizFJ|Oi%`LoDH20SfnkOtNCGEM}i?!b`=tywg)B2W|t)1_>bQkFvFEg=NO-neN z$Vp4jzLGQcb9`oYPC!ry9AlTK?>-i|+9!3*FIvE9>*yLSH3k=dCL#3#2i)k7vFSfE z;2gI(dF*6>E5BD!^SBnAlCp}XmNvKuc4XY?cyN9F10&x@!C7yxaoFkzuHa^2+1+w* z7FO%6!ErBV=iev*$Hy-qEgEovq_nK+VKq28c?C6f4RE1h;V~y;!S#IX9r`*9j=s*y zZnHhOyzBYJcS^vCiiuNX<-qwL35}36Vj*l$69Qh*tMnRT< zH)(>D$fiexF__hq9%W~_JpL9uW|Ah^X(TrStGN^!wMd@B-yz5pV3Eez=_GqvKsB@6 Lg^)C+%lP~Qb$Ld& diff --git a/fixture/20/4/0.2.3 b/fixture/20/4/0.2.3 deleted file mode 100644 index ab017d2cf31e115862df69e386d59ff01354ad15..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmV<70UrJW0g(g{4gdfU4gdh*0RR9L0002V0RR9fwJ-f(5Done0CwhBK=6{+0XQz` zJm6d*eQ@x?ndG>j^MG@O^ufUkXOiQB&I8UB(gz1GoJo!gIuAHk$f~NUq9}@@D2k#e ziee%12SNZr06_p)6TRY}SC+7EsdL~=6{r!Q4VVd#1&9HFk-Xko$W{xT;+|7Bc{kKI zUixMa%lx{?Sr^S>pjn>q@2Pd{w9X-}xi)OBk$&;eFIxyI0QG_&3JdQ*%|I=HK43F2 z3$PE&4A=tb12qG-fcb#Uz%0N%Kr>Jas1MK#*aGGQHUqN&`vA>AEucO?Ghhpt57-RM X0_+1c1GNDAz|3^E01p5XG(;6?h%(;R diff --git a/fixture/20/4/0.3.0 b/fixture/20/4/0.3.0 deleted file mode 100644 index 1920087c7406c30152eed9dc5ecc91e75c133253..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 482 zcmV<80UiDV0g(g{4gdfU4gdh+0RR9L0002W0RR9fwJ-f(5Df(m08|%RTU-FXzrX+@ zBP0NZhll{`>+AqdPf!4|v$Oyf7Z?C~dwc-i-{1g3Lqq_kr>Fo53k(2mZ*Tz8)6@Vq zH#h*Ao16gt{{UiRWB|s;$N(xUEC7y=kO1=Y^Z-^@SOB`ayZ{~_AOM1cgaGE}=m1Jf zOaQL0umBPh6aaR2cmUel+yFj5KmeknqyPp72moqpYyi&B&;T+sGys;Dm;n0w`~Y5G zU;x6y!~iBIC;*C!i~#QM@BmU%Q~ zbO6@Z*Z?{^JOG}bpa22`1OR4dXaLH~%m6MgFaVO1lmPbk_y7R`0RRF50s{jB1Ox;H z1qB8M1_uWR2nYxX2?+`c3JVJh3=9kn4Gj(s4i66x5D*X%5fKs+5)%^>6ciK{6%`g1 z78e&67#J8C85tTH8XFrM92^`S9UUGX9v>ecARr(iAt53nA|oRsBqSsyB_$>%CMPE+ zZKwbMSRe2e9TLcC!Y7R}alQW^V+FVSznc^S>xzF{mDRl=`*m1mHd6ahvfut~r8Qgxba zPEP=_vaW6DDk}huj*kHG^78;yR#yPIy1M`!9v=XKf`b6%=H~!ON=pE(uCD+Q5)%M* zc6R{T+S>p1_uCXYHI+^&d&fbGBW^`mX`qe`uhL@0RaF40s;dA0|W#F z1qB5L1_lQQ2M7oV2?+@b3JMDg3k(bl4Gj$r4h|0w4-gO#5fKp*5)u;=6BHB_6%`d0 z78Vy57Z?~A85tQG8X6lL8yp-Q9UUDW9v&YbA0QwgAt50mA|fLrBP1jwB_$;$CMG8* zCu^txLhJ+Z6&(^ND#BMqBd+)VV+;kihA|=mqeDPMz~l&s1dI*>5{a9FHRr1wzaKP& Rm4OriK@f-lf(QtKGBA<~hGYN$ diff --git a/fixture/20/4/0.3.2 b/fixture/20/4/0.3.2 deleted file mode 100644 index fb73b4d2c6b26ef2e2af35665793b2eec1cb57d0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 475 zcmV<10VMtc0g(g{4gdfU4gdh#0RR9L0002P0RR9fwJ-f(5Dm=@08|%XVPXKp#l`?A zDJlSrjgA2D@$vvvRaOAFxw-%x9UcIHfr0?!<>mlLNlE~$t*!tN5fT7&b#?&Q+1da+ zJw5=Sp`ri;1qJ|UX=(t>&CUQYF){#@m6ibb`T77{U0wjd!NLF}B_;rfiHZR1?d||j zQBnZ3wYC5l85#h5eSQGo;o<;9MMeOqsj2`B4GsWsadH6E)z$zwIXVEGot^*z0sv%X zW&p^^$^a}aE&z~`k^uDe_5fH}S^&Jgz5pO0A^?Pih5+d4>HthlP5`j6vH%nn765p8 zdH~$r-T**BLI9+trT_>D3IJ?vZUE5H(f~9yHUOBJngIO${r~|10RRF50s{jB1Ox;H z1qB8M1_uWR2nYxX2?+`c3JVJh3=9kn4Gj(s4i66x5D*X%5fKs+5)%^>6ciK{6%`g1 z78e&67#J8C85tTH8XFrM92^`S9UUGX9v>ecARr(iAt53nA|oRsBqSsyB_$>%CMPE+ zYp4K1>;v!>9TEsS!dFEjuJ`|A3e;F7UM1c4y5NmAJ|C=DddzX!2k(ZiofTZ*$5kU?P4i**`CMG6E z#>R#QhXw@&1pxs84ueitj!jT(bm_m{JU2GC^2ES{fj%BNf&%=roSB`sEtt@tC?gTW z%ECO&fx%&F*RR^F%gPt0UZ11bzw62;m6axG-=CjcW@5?Z8@c!AMh)*ZeiJ?7^Qx!C zXkF$OS$cVvZ2PJ$k61Fdq?SEDvBY4fov+c{orW*Ob5=0iVE-U0D>P#TN4)8ymw)Sz yv2JfORAOB0?BH_X2}g&^xh@fDff-Gv&sWRr?>BCl+9<;4DA2*s#KF>bIv_8^O>118=1;Zf z=n65ZHPS(cf+LScC7ej?dfVOqX<+30sP0BR6H_yrZMJ!r@{6yRR8&^6Q#kmRQiVlC zeSG}_4+e!tM7BI{?R?YK_px7PovOBuuJLA*^s^bc7xRj)70>*hWn09~!^`Kg%hh|2 zPr!jd*2DVdXDuDCJHaU_t7vV|2A6gwJ?BC$xanUr1jz=@$=Stgw>P-D2dt*2&EO~~H9G$?G->#^=Uk8r1Tv%e2B)G66;c>^}!S#OV z8~Qp7&d|u%(#jfK;nkwDTjk)mm+%M*(ZKod4+uRR2Cn^8N6-6Sa2lFg2Ad4QWo73S zTqy*%@R#6P%ni=N(~G&!A6(;;rnZ;u;N%q))z@o)OFo^Fc|Hr=_{8Mg`~o=poem6l z4{$a2Y8xImf)l4p$jZrsi;0a(I+YA=@XOHH&v9_(7M6B9?7@}ZEUUg-15RL>px8=r zaKRy=QPDBry5IE-d>#a+r*B}k#T;Dz<${tMrQkR?xeyeb-(Dtyf@^)zhM?e7)zlFb zT*kRf1O+$yXAVKZxwO@&@2l1b^}^x^O$r^qbQ znHA3RX3!Y)Jdq+w2Yxfj2{Jp{F)E_Z(}15r&lD-6v~V&xe8{QjEYc||g5j;@ObDnO H3;F&7I50}+ diff --git a/fixture/20/4/1.0.1 b/fixture/20/4/1.0.1 deleted file mode 100644 index 4632d222f06e800254d58462830a6bde3989d1c2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 738 zcmWmBc{r3o0LSt7#o8pfSDT~@IU@I&`v^&r#3n0QcO^-ZvLTfum0Ss_kc1?aWF;v_ zl7v)3k|ar+BEj{;AUeWd9=|3|ReyXsDs3+6Q zKOiu4Z&=-<`j%&{on77F)HO5>jn;rmIiH%HlM8O@_cSjb1)Rq=PhUTOaJ3KXnwnd{ z$;m6ItyTwj{6xapb1C3Pzm84*ngZvv(V4N;16;-J${J2BIQl~Av+}K`@RpHzJZ~Ir4_iW zi`n@Fh2TgYUcq@n;CxuVyLN|wd)nC4{;~tyDpfUIJ$-P=XU=9`$O6axIXO#^;N099 z-aCB2RX?a{c=8n7(q+q)RaSwEI~t#KIvL#X=aGpYTyPHS9bLD$fh)UNUUk13oFq+3 zcF9t35eFh;V&lNQeb+nmX&9WDxrO~Y2XI%e=9iR~ffJZ3h@jwtckV(^aBVNz5fq%Z zjxK_NOV7weP;fJUXAu;f*LH6N1y}#L0YSkjDk&oL*^rRr9zqBqHsqEh39(5Vk|as8T9+hAND`7HNzx`HNt+}| zk|arzBuSE666)!XGiT<^nfaXgeGeCz!YwRB5dT*%7eR;-1ks3HxmTcuU^s`(cPh>25W zWaR?)1%(|BkB*6Lebv_WzPta+fS$gA**bF@Tf5Ax?EEVQB{$h~e+j<%{34=a-aCB) z_6CL?3afw8(DJgi^IaD>-PL*~re@$W&S&Ob&IdO$J4fN=14rNC?Y}1gT;1dP<`*sC zlvPx9Ts*z#;41G{)jp~NC%sfgQArsbD=IqSWFok} z&;7$cM!;EbuyNY#46gV_NyVK?aKej3q?Sm73pp4Xd6WgN<85c}r#^7TYfUVztik18 z$}75F433+JBD7E#oUfmM@PQC;P0yR#-*kY}TBU7hWDG9tYPC!ryLBR#>4@OXMZLixA6r8~t zLj(nveIW-y!4V`Ef`aqe#XwMS4Nn^p6rB1>4Fm<3oRW&5;KnB=5fq&3Ha7$XSAD+* zL6y)(4c9d>0Ubouhhv1%nc_EC;DJ_{xviZ&p+a!Sc$qO-3=UOGmMxjV)4}UY@h5wt zVo7&S1c$*9WlI-I=Hi$^2_VPFEYi)4naf~N>9QG;Y@R0G0E#a;K_*AhSWG&@z?~4$ IrHT9f1IXn|T>t<8 diff --git a/fixture/20/4/1.0.3 b/fixture/20/4/1.0.3 deleted file mode 100644 index 7c05fdac9112d86b45054489630378b19b17c407..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 479 zcmV<50U-VY0g(g{4gdfU4gdh(0RR9L0002T0RR9fwJ-f(5Do1O0EQ-6K=6{+0XQz` zJm6d*eQ@x?ndG>j^MG@O^ufUkXOiQB&I8UB(gz1GT$pB>X{MQGnrWt)X2IiOdUJF1 z<|erO0YU&l06_pv=L}++V;_0zqg^aCZU)7#IEfW!+f+c@#kZOO6`HOym6FnPD4QYol8n^zM~yQ@!J-cNQ^! z8e%f|Fu+{sS$L_yqu?@OC!r;R4g$-AoP(7HI0h;UZVFNo*btx$^^KRl*~2owE^^jI zvlwXIDxW;Hj-A#y#5Kz%nQNq9Jaq4taa-NvsCy8)WQK$!CZtHRhY&(c_UuU#Vv;5#Ns@%dk|f!Zgd|ClBq2$fBuSDa zNs=Tt)0^jXOG>UrFY7zAJo)8ZJ;d^kz6IEpr{mbEHwH= zOhRH(_s5=rZ-b*_^Rijx9~<0I8F+cf3W~KFK?fKLxJF$UNpDA?Et5`PEAKw z4_xZGwCqbc;HLgeb1dKl=epC4xz`IE`%!J<^Coc8YvzN3i#-*Gpx}mn3?nEwyDjzz z3a+fY0ztuvh>9U7xX`e01O?agsTV=P8Jn0QD7b>_g$N3cmyaJo!TBEyKu~aPZ`u)5 z786v&QmEiN)iHW4Y?Cg2&;ZSh@ES8r#?s1~kS92!+*x4^CYwf=C=ewm(paZ#ntIsm|d8nd9oV=!% zw(&}n)oZL%&R@vL%*wlYYx?)hJPMVEm(OFHr>~!X=bz=?@V$SjiuciV{q-5+}fzYc+0 zWwqLVy#u)18+nEIiokI%;t>%Q0~fL@G;;p|a4m0IyFPS-(>E}*uv`T${nF)}>$%|O z2y($fZgAc^e1dm|fU9{?+x)r(oXQGST|IqpiD!~h)6&6>{TiSBI|t5Xi>udmZ*XOg z%d4N)fMYC`QBqa`7aJFU>U1KwfiHujKgYn?t+U^>*#%sFL1F2mGH^n|BGOA3;3D=# z#vG0X*ZIDy|MLJiGjj_YTRU*s*K+RM%?C%Hzd%4x2wY%LaQNN`a80k8JKlAI)7H^7 zF*O5s;bLmm)ogGxe`cvPIyg_Jmw!MYxTjW3(PDJUu-D7X_RPa!C{ksqT73eI`s zCIkgn@~{*^!AVLmdLgSf3n4J2ql6N>lw{vvN)odQd8mu)CPJGjYUq7 zX)$i*>}b5!U=&DPq_)%jXaVFnnMS&E4suvbQ6|Gte2UUeXVHSlF*2RR*O@E>H$qUy HO2q#k5#vl2 diff --git a/fixture/20/4/1.1.2 b/fixture/20/4/1.1.2 deleted file mode 100644 index 3cd56ae46a1bcbc5c7d5f7d587d24f87a7863845..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 738 zcmWmBdo+|m0Egl46H7vy)`o;6_Ygt|A>@`MX$eVNBuSEl)+I?2lHAfJNs=}pN!lby zk|arzBuSExB-F1z=FB;B=FBrQFAtf*OBErA|D%_OAZP?ZG-6gN*O>953iHv4SOuL` zy2d7^*6VFDGPCk7Bl9aFIt@@h1|%^?vLd`Zf&C z(#qO#qZ7EIYsF>d72rhXQ^h4D!37-%4v&Zg*Z#Vr=R+?z14AQ=b(Y|AFR%-*7J=iL z!z&~#0*<+Bci{dYaE(u!+FrGTQ`gYcUu^&`^-NmM`CM=lza|Ni2OPuA-Df8gT+MxM z!;?mEa?9k^R;YuEJ072$k_v9{>(Kbm32+V@99_3Ez?I%AtG-tQj=o4-cBve=up{BI z$Kt?szwa6NG6>Gh+``V@0bIeALe9-naDwxMXkv750s8_&4~Kzkec9Iat{a@Lp1$c? zGjLhiIr*0hz|H<81m+5Y^WNd(zc&C}{o{s~7p>q_RMij^T+-=e1O+!bHjbd+T(-C( zD7dP-)d&hsT1FN@!9~Z!A}F~2&jSbw&c@abLBW;W;2X@#F;-}8YiCa=6TC5=tOyp9O?Q-GOK|v_`28tNa)Qi? zVwhr?OV^a)NM!JJ@EcQTBbG`GFjMND8a@ulj2W~k=Z16vzht~LR7~>%=aHf CvPwh% diff --git a/fixture/20/4/1.1.3 b/fixture/20/4/1.1.3 deleted file mode 100644 index 36df7124a88d32d10e673a0e4b25750c9388fc90..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 478 zcmZQ#oX8@;$G`x>_ZS%%L>L$tjsalxqw zXR}U(^Y2ZXUpzmi=!#{URJy=g7S=ye znUdnNn@{iF$oA=q{rrgZ$=o`-UrH(a8n@Tx+or5;4KqwM`(v^v=O%*Ma~5 diff --git a/fixture/20/4/1.2.0 b/fixture/20/4/1.2.0 deleted file mode 100644 index 352854b406fc959e8b46899873a4339e7e1c01ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 741 zcmWmBeKeC{7{~F)wj|~yNfMI8^0s9{XqNYokR(Y@_I>Uj{xQN{WoBz< z@3P)C=W1@@t)lXK6(X}lrRT^fC@T5y4hT6A$~_v{^s>4AT}MxEpWbqPmMPn2wQbt@ z^z17+1vd*Pr>2Exib&0t2IsfSKX`u#xW*SvZExGb=`!_9RdHhCjYU(t`GF@##8Pd$U*hQL`_+c-KogUi2BP*Pe3PC`;jZoWLYpnbvNha_F*bUCe*u=up3S8!;ti0>_;7DW&Ra^p`&kow}R85GYnT4flEG< zl5sH;+}Q7NLWl&;eUpdxb{}w653B2+HGorDtg5B011{!xY+_O}xPh;Oqrb+$IjnVb z+vpCi_)baXgDP;c^W>BlselVR6drvn23+TduKq6r;LOY|5ENYQwLAm`Cn`ooP;dcz z0udBk^Q#sF1*dOdh@jxoFJvGnxaq$Hf`aqf=8d4>>YmmkC^!vGEd&LZa5@n|!HxVJ zMNoB=)5J15-iL`32DpVW9@Z4A&2fEe6tTmd2n~V~MQ0nbnH;e+$u=sFG)UGXmvRLJ zpR3JbahMFTBFS(n4Pz#`j61>Q6WUoUM;y#GNeV F{{f5POKt!F diff --git a/fixture/20/4/1.2.1 b/fixture/20/4/1.2.1 deleted file mode 100644 index 2afacab6c803e8e80f5b7afe1dffe30ab6e7a593..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 741 zcmWmBc{G$!9LDk23?a*qElQH?SqE7%woqhCk|fh2Nl20;c_m2_l8}TXNkXy&~{1&5!Dh>4Bs``AD7b#&_Iw8=(OD{C9a z9ZuPIata?5RXncb;^q+&7Liyj>2t(4@MKWf+3@y`&fX7w!(T?$YwH*pn^>Qln z_Uzr~dDsiw(`OAWt!?1s6%^?jn&6@@#Uv&rgPZ&@HUDP;oWpiUSLPmY<&P@rpF9O8 zzDhz?P99w7nXssf(clJ$h9|yHg4=9iX}`?@T;BcsvWMm1_?HQ+SSb$9|9C(M84B+0 zyPkp1gW&WT2IiYKgUh&?nR_n}9NQ9h8XrG6k3*h*$Na%HziR2~ehW@bovyb*A6&xK z#I*DbaItC}UI&Y6&?= zHjqk829qAkLC`w5Tk)GtEhT5kWoe diff --git a/fixture/20/4/1.2.2 b/fixture/20/4/1.2.2 deleted file mode 100644 index 1d560f93e00e380dbd73c66597872bab35303ff9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 741 zcmWmBX*d)>9LMpoyGeE(Ns^ExiXP0@O^Of&zOa!mA!+b>t?sy zn|VdWB^8gjfrG&A_EyO3%KY1CH=e_~{IAo;$q! z_WFaXd0yM}x*42`s+x|j9=P~~#MFyv;6{Fr&ip0dTsFCSZ1)6L{;;C@Sq(UuC9+D& zD&QiIN5!3q2iNztfB4r3INP;$&Kq6872GK-dr%HebiSDMVi|D3heE<5BEhwP=;-~@ z2hMo4iM5R_xU6g0`L_$e@y+2EnI{U)caLAt!C-KWubSH4w}aEt)?pbLgG;%Pnt3$~ z+|=|8?`%GB>}?)CyM4h`J*{qd*$7T?xsoPR3ta5!xa9LG;D&wgpR16r9EiO#}s(bS@b|!HtbiAgBVK zu8bP$IKaf8b&=5kHydGCrs!{pC)r@iM2XNM)QLg72AiHD)XE5^BvZAiMP!m&$5Cao z*i2)3j!-kBkupGKQcF1FoO+^~!6KURd<8;vj8;lHl};@t6P$Xk3YVq9WYhgBI5YtV GbBTY}_)Nn9 diff --git a/fixture/20/4/1.2.3 b/fixture/20/4/1.2.3 deleted file mode 100644 index bf11e16606a1dba689bf677efe1c1449ebe4f74f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 481 zcmZQ#oX8@;$G`x>4;dL4L>L$tP6BC-t@?ix1bF}QGE7s8w0|(=I%9$b-|>eQGVPBG zWcm&lV&xc=g!q=MT(%deKNPs?o4TA-{SV_eBYGSt#O6tw+qHdzmn0fHSS2Qt9`Pj zyUg&yu28$mEII47pF6Ur@g^KU6a88-Ty{%W<+j>{TmFwDGH2WW*`2MN>|4@Ze{Sxk zlR@uyyqx3eQ~X9Hzt*Iu_1W1^Z&%gwn4VYNH*;G!CcSZkAXUF^-+=+I$7PArg1QmS}8Hb&xchwAJj_QOEajH_fiv)>Wko-L|nl5UZ}6 zyr_2foZndr-fus57VFJCuyN+Pj~9&@wHR2M^&i+87$Yit0{&(Hue zGc*8}mzV(h`}_c2Utj>j!^8k4Cnx}li;Mv7@9+RpQ&a%9x3~Zr8yoPH4-f!yb94aK*Vq6$J3IiMpP&E&0|WqOXJ`P*%gg{SFE9X-lav7V_xJ!> zTU-FXzrX+@BP0NZhll{`>+AqdPf!4|v$Oyf7Z?C~dwc-i-{1g3Lqq_kr>Fo53k(2m zZ*Tz8)6@VqH#h*Ao16gt{{UiRWB|s;$N(xUEC7y=kO1=Y^Z-^@SOB`ayZ{~_AOM1c zgaGE}=m1JfOaQL0umBPh6aaR2cmUel+yFj5KmeknqyPp72mmK1CnzW=DJdx`Dk>{0 zD=aK5EiElBE-o)GFEB7LF)=YRGBPtWGc+_bH8nLhHa0gmH#j&rIXO8xIyyT$J3Kr* zJv}`>K0ZG`KR`f0K|w)6LPA4BLqtSGMMXtMMn*?RM@UFWNl8gcN=i#hOH52mO-)Ts zPEKs7003Yg;01I563A&nBHSbr+x!191_ifWFjEA>O3ermi2-5tdq;&7NiaZWL@ogi aMYg!BpdmFy0KgFh6%+vg6-)q8cqIe$yqE(3 diff --git a/fixture/20/4/1.3.1 b/fixture/20/4/1.3.1 deleted file mode 100644 index 49774937378d115a433a493f8beeb417a9a3fad5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 479 zcmV<50U-VY0g(g{4gdfU4gdh(0RR9L0002T0RR9fwJ-f(5Do1O0E8E9Zf^k5($fGn zHa7s7nwtRq{{H}BVq*Zs#>W6DDk}huj*kHG^78;yR#yPIy1M`!9v=XKf`b6%=H~!O zN=pE(uCD+Q5)%M*c6R{T+S>p1_uCXYHI+^&d&fbGBW^`mX`qe`uhM~ zUS9yg!ovV1CMN)iii-g4?(YCmQd0o6wzmKo8XEw8et!Vr;^P2CMn?dts;dAE4i5lv za&rLH*4F?zIy(TJo}T~#0s{bLW@iA&%F6&PE-wI)l9K@S_V)l;T3Z0UzP|t=A|n8W zhKB&@>gxbaPEP=_va@~ zD=RE4EG;c9E-o%FFE21KFflPPGBPqVGczmAD4uF_Tt}@uHs@ls8UgJPnVJ9q diff --git a/fixture/20/4/1.3.2 b/fixture/20/4/1.3.2 deleted file mode 100644 index 648f9102dfc06c4520f4a567884583c4ec38a82b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 478 zcmV<40U`bZ0g(g{4gdfU4gdh&0RR9L0002S0RR9fwJ-f(5Dn!G0CX2{adH6E)z$zw zIXVEGot^*z0sv%XW&p^^$^a}aE&z~`k^uDe_5fH}S^&Jgz5pO0A^?Pih5+d4>Hthl zP5`j6vH%nn765p8dH~$r-T**BLI9+trT_>D3IJ?vZUE5H(f~9yHUOBJngIO${s3TM zVgSU&#sDZODgca)jsWoS@&Hs-RsguUx&Ry<9sq!Wf&k>@<^V`ZN&u{_t^g1b5&(2{ zb^zGf+5kK~J^-Mhq5uR11^{SjY5>g5&HykmG60m7mH_zq`T$&AUI4(s!T=;CCIE{0 zD=aK5EiElBE-o)GFEB7LF)=YRGBPtWGc+_bH8nLhHa0gmH#j&rIXO8xIyyT$J3Kr* zJv}`>K0ZG`KR`f0K|w)6LPA4BLqtSGMMXtMMn*?RM@UFWNl8gcN=i#hOH52mO-)Ts zPEKp607C5p;01I55~x{1B3u<;@Bhab1-DH-nPh^D06rT(8f6+E9d=b$insU?T-;62 UkX8~N$8udBN1=E$&9$#nIk0P+iU0rr diff --git a/fixture/20/4/1.3.3 b/fixture/20/4/1.3.3 deleted file mode 100644 index 28728f8a1adf3db64b4e419f5bcc2c9d84757f0e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 316 zcmV-C0mJ?R0g(g{4gdfU4gdf=0RR9L0000a0RR9fwJ-f(5DnD`0D{gmG(c}oB;US$ z`_8s+-@bkO_U+rZZ{NPX<=eM!-$pEnOeTvgktLDIWU?f(B*EXWU%&P(KM1AL`@XK* zwk^xDB+|OBqminrB2k*AD2j$c=XsiDS(YFWNs{9zieeaspAQIv9?$oEosQ>u98TAD z9LH^&rfHi^mSq@rU6*B9RYg&sHZAbS>nu}Tu*eF(d@*&&4a=;tH_8wL+_AaJ5)TZr zd&?CYJo3UYV~lOG!!vITo^r$idz`-V!~}<&u*()7OfthUXS{v#!4#sNrE{z4Z{#taWV{pxTK>nJcRo)cwKkxw$0t^DHLtf2! zHN-0+UIp>$%qtyUSI)#QFV->V-%cG zY>ZlC)SFRcMx8Kf%%~4Wc`!=MC<{haFe=O^J)c_nl;=|^pGf(H=MyKNHu-et6Pr(0 zd|LDAiBC>^QuE1(Pepu6^U2Gn06um3r2Ay_Dfda}6YbM%A3FOSvd^4-{@BNledO$O z$3AxKQ^!7L_K9JiGW(>k4+{H`*(Zd3K-lMlO}K2jWz(KbuWWKCB}WF3E7o%%v4BscC7L&A?l*J?qCRH#g%%lb;F)#_tqyi=pFlm5u@=bo{lU$P9`65^3iQI@6 z@igz@JRHqyI1QiSLZ^N9)_=}=>7)--ESqB41Irv(*1)m@mKm_DfMrL^e#?BvDmg~U zu}O|Ka*UByLR}#OF_!Y!2Ab$PuD~De={JP=S?U(J>(6868pkJcE2c>Zk8V8+mPzeW-aL@<`ad6NE2WfE71P4iQPz1YH z*`>;^RCbB7E0kTJ?8;;pCc7@#RmCnUc1^L1iCs(VQexK-yM)*k#IB=Vzg@mvxLriM zZo6W;V7p!e6*3SZ0}V1zoPqclXpe#53^d0;at!psKrRf_!a!pNGGU++1`;z+2Lo|% ztCm}|+?wT9o?EQkTICj=Tcg|(<<=LsytviHtu?o-xK+h1HMg3$#l)=yZXs~%fLmQ| z0dVW+R_<2OE!;nk{NuQ^orxlusLn)* zOoYfpbS4U9B0whkVhgfHraM( z+a%j0*=A?kBHI+%mdG|EwiU50&9)x4@vsfewi>q4ux*BILfdHDX4_iZezvi;m9~+# zedvyd?0Be-hunCmjfdEHsEmimcxa4=LU;&-hdy`+gNH77$byF+c*uc=8n|}KHB+vY za?O)#om}JOS|!&gxi-nQD6Tsusu@D{$-LViH3%#+B8w;(lkO~W>u+Rt#iLg)z3u&-W1`A;@EtYAp zOnYToD$`JzcFHtRrhPKalWAK_(_&f{)25gv#k44X6_2AJlX)|doA@J{je_j3o@bBnf?qAVA+`dQlIkK;jeRK91vagVR za`yGHFV4Ok_TjMahJ9uB!LaXzePs5Xuy4#h)V|X`pL=`m*>bOzdwTA*axc%lH145s z?~HqG?tyXdi+gPDU2$*Cy$S9~a4&*;U+yVzFM)er?genK%e+kHVKVQMd3EMNGVhUj zbmkp0Z_d0S<_R$`hxNi|X59|!&aBI=D_VzJH(MuL7hA_#w_2y#NX|xmY!qjs zIyRzXBRCtevC$eEz1fJ&Mkj1EW}^=_@?aw|8(FYX1sjDqr{`QN=klB@uhAQ+iH`aw?Qlc}`_=3X@ZKPC;_& z&M7vhm^ih>sWqpDI3>g>HK%kqmF5)h)b7;LDch-`Q?*mBQ>|01f#?jx$3SxiqGO;r z29h(-8Uv{@P@I9x3}nJUWd`zKpbiEiGtiiUDi}!2C_bZD8MS8=DWgUirDxP8qcj!R*Y&h%85};jAApYh*3n0S~H5vC;~=p8RZ+*8^s%S8$~qA_K%%^4Ed+dKYskv z$3JxbvE!dQ{-NU^H2=Wx&zXNv_~(Rw%=`nwKOg+#!9S%wpIrIW$|pXbO!-vGCsIDa z`2@zNH=nThbj2q(pP=~k#3v^{f%(M1r!Swnd@A4*mruV>NS}P0@N9x))16I-Y&v9< zolSsj`eTzHo78O5VN;q-GHi-r6PisbY)WAh3Y%P;TANs#PMb`dN|#`{^vWeyF0FA% zjZ1MZiE$~6OK>h}aVd*SSX>%&se(≦(?iflFX69dOBjOI^OnHF=&(@<@*7iae1c z@*+;e*PMs%a5bmlGhBw>PITLa{<`a^|NInFJd@%X1kWCL=D@QAo+0q;fM)_c3*Z?5 z%P3hk$udcnF|urtWr{2#WZ59g1XSQ*F2IOfH%E{<_=EQ4bh9J}C{1jiyc2Ej2e#}qi0 zz_3q-c`~e%VU`T5WEdsG92wTgFh+(YF${@eM+^gE*bl>e7>2{J8;031%xG9{7;e~X zm~2>Vm}*$iu+&0&ER@GWcq|mhLU1hf#zJT;bjCtvEJVUWBP=AsLK`fk!9p1<6lS3b z7LwqXD!)?sh03o`eu47qlV6zpy5yH7zo_^%#V;v-E%8f}p{b3%knfDq$B1yU6U~ zVAlq>Xt_1ZEm>~uxwXnIRc`6ICCaT(Zh3L5i(6dWYICcKTU6X)bBl>vOWg8uOMzPn z+}d($fLj9G;@#TaI-2FktVU)rGONz4LS_*%i_R=QX3d#}!>k)-*)Z$OtQTgvFw4v= z6K0i}rJ5BqEA^_+t6E;s@`}$ZR$lFSg~qEhUYYUg&8sh7dGX54D=S{Lc@^f>1g|7` z_2tz9ue`kC@``|0TUKGR>XKEKth%%6kyVbYvU3}Guu*BF=lYy;fj1bB$|_V~snFGmR@<1j|LQT;$3{ zYh0wpMR6_?w>;k@ z`4-7HJl_=gmdH0mz7_F}h;M7Y@$hYjZ)(2L@NI@~GJLyzBl>3hCiLy~&GilS?dRKR zB0DCkVm5)9{!^!?YQu$uNzDX)8=qVHye3MwlkTG}N@8 zX{TqhJd5QSEYDPVmdZ0!o{{lvjAvpz3jAX({Wkyns6f{z5U!Q%o?4xBLpM9+C+p`ai zeP`@5W8a&7U+nW@pPPMF>}#_x%)SZsNwDwBz6JJq*~euc0sFSx!{pv2_bj=0=iVdt z9Jy!bo+0<@+!Nwn5chz%m*!p$_i(s}<{k|9&fKfrqura`W8GWbQ{5}wJFm|?IrHS0 z7stFf^U|1y#ymLlz?k=D9+`O@%-dj|n0XY;3o{SQJO<`{d8g-{DDOgfm*-t3?=X3X z=N%;P?!063j)`|myj$~bh<8G~Q}a%TcWK`7-tFEUy|cX=dRKerde?f#`iRa)e0((L zBRW2s<0CmAt?`i>AI15|%tt1CROTZOKI-5jG9QiksDh8gtmCtem34d8k+N=-b$Zrq zvQCqAdDgjEXT`cU>zr8E#5y+XidaX)x;5*#tRrCEmUX^$y>+~Gw{=A8Y#Z6x$dHZd tY~;sAeQZQ$BRe*#VQsLVkg97N_IF$Z06P?&?j9Q43JUry~gMarpBPU$(d$tg`vKJIw zKy3`f#z1lgIx`Ro1DP2Jgn>R7sLViP2Et$Ae6r(Hnolx(Li0(5Pbqvl^GSqHA$$Vi)9I7xQ)v?{n_k(}XA>%$PT6G2rZ}6# z*aT;j7Mrrz^k$P3o1)kR#U?PD9@z9{Q!=E-7*;kxO?j z334frOMqNjbBTvbYA(@mX@*N_F0pWFg-a@2dR-d2q{wV+tHA;1~hN1{h|^ zuu6teGR%=-jSORCm?6Uo8Aix3Acp-g%!gq(47*{N4Z~m<_QEh1hRKG-hQWrdhN*@H z4LeaS6VWnJ923DY(Hj$?G0_>6d4D7!-0rOB>Lc44y1id|LgqGDGQyO`Lu#4aLs4Y5mzT?OnSVAlY< zcDs^x>2?|IqU|bL$dH8!S%{E@=q%L7LVPSVXCXQknqwgt7J6YJ7Zx(J&*dbfDDcC#Rv^~fwoX4#o_$Sgx<)tTkTEIPAvn3cmU9A=@J6~inTW}TUZ z!YnhhTC-TQRA6P9 zwMnk!xyHz~MXo7w?TBkeTw`<1hig4tTXW5ZYc*V>;TrDR?V9Zx>>AOv*R`N)scS$R z;jz&j8`-hZ8ymT?Q5zeXu~8Wtk+G2o8-=hD2pegzQ3e~0*=T}|B-ki|X{by)Wtu6| zKAGmpv`(g3GOdznluVOiS`^cun3lveB&HoP4Txz!O!Hw{0Mh`N_M4WQb~Ft)O*Sny z4R(!Ew~pj4ihWk>YqQUZeNF81vaf-C4D8#o&wzae?Cb60?c3dhd6~y$-U0Krywl`eChstLhv!`+?;v@1 z=N%&N?7So5-4O4Dc&Fyw4)1h$m*$-e@6fz6dPjR#^v?B8=w0jG>7D6a>7+R)(Q(oo zC&@W!jg!b(gP=bS+{2$DeFdAr)S+J>oi%H zXPqSL@T_aI&WUwRtYfpTh;>A)TeFUbb!yi2*74Tu*4ft8))}pPts7eB8mZ1mevCwC zBs)f`W28AFwJ{PKBgq-*%t$DVWM(7~M*3i+G9!%{34@Wuoa=MWm2-T~nR2d_b9>Hp za*mU8dd|H$hs8NJ=b$+E#JM)-jyPw;IX36GoIBv$mUF*zzH_~ExO2C2wvX<7gvdvB qJ_6*UKR&AS5gs4i@sS-LrTIvPkI;Oi!bd55bmk)wJ__L@5CQ=B1rmM$ diff --git a/fixture/20/5/0.0.3 b/fixture/20/5/0.0.3 deleted file mode 100644 index c0796dc1457926a3f37cdc28b847d22a3a6c616e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 450 zcmZQ#oWdf&$G`x>hZq?cL>L$tRsm^^t@?ix1bA6_8IFi@G6*xsGUPIpGc+<>WVp@n zl);pd!QOM`;aDDt{F0f)wP&wCG~}{&ojQNzPiAi3*!04Kr?0B= z=v#J9p8NJYBX3}I>el@yCHa(fYuhJ2eErpx-#>EY=DoZE;);dUtyk~Av=nsqox6M^ zqY$@fYJTO(%Xf8!%^fGsUi$Svr$}US?%v}U6-Bj8Tl;6e{QTEfEIe`J?xUjO@|u;+ zy?38~bd>N8UAlfJr=+lKZh7Oy+fPlUpdOQkc}xc8Ff@?lDD($CTh6 zQ-*p>1?DkTn8(y09&6NDAy{DjP+w4I!5{st=33`3YXES?Y j=Jl%^-`U)jsLcL7-#Yznwf!$dYg+*-dtbED2Xq$TG4+mL&26^Z^b62?DFK zScS!^D^|5x1;wf-Rw`MtCCjvR^e9NR@qM3IhDvML{8N?1<0vC zPSH7a$Ei7|(43m#lnkfNoKoRb3a8AR3gJ|lQ>Ifur%I#xjDls~I%U+JQE*0$ zF-nY4Z$@b`DvMEWMny5I%_uOV9vJmyRF_c*jN&qC%P0Uw{XXIOG|8tspBDL~$R|6W z1o;%mr#he1eCpv-nol)+qTv&oPb_>|;nSH1IcUs57#t+#pfCqPa1fY-yd0FkL0vZK+0@CVJew-nM9C&R zn;O}~$fiX$vDtLQrZt;>*yO_|HJfhOWW%N!HsLniHrY18HW6)lZAxuIZ8{A^XP`R< znlsQF1GzDfoPo|5$c%x?7|6^(Aq-SzATk4GFwmHRBp4`yfx=wkbLo^zdoF!)$&*WZ zE?siTl1r6ba&sw)OKmPCaS4e_Y%T?H35ZKST-tI8fJ?tix=XoBxJ$81MVDaz2=Y%L z|J3;>kALv^ht5B7`~$~7Z~Vi;KP&um=ARM%5#b**|7`G&2LF(`ERW@Vp2|@i!c7Ju_8F2&t^i7W9W?&S%*%LRA<-*Vn}*98}qa1jX?jc`#17jbaW1{YayQ3V%K zFl?1!stikI*eJt985YX0O@?VQER$hX45MP$6vLVr#>B8Ch7~c4h+#tvBVgD7!vq-C z8+J5|H;guHHcU2=Arloc5g`-xF%cgV?J-du6VWlz923DX(F+r~FwqGUnJ`fa6MZm| z2NQKL(Ok@9PlUts+D;+Ge{uJ~oeuPS~$@ym%{P5f%$7X!Z* z_+`Ma0)7$j%l8ZES8rD&yCB*1$Sy>79kR=iU4QKIV^<%$cG#uEt{isBuq%dLFziZU z7Ye&h*fq3kwM%GMY1hxLw_dsB%B@yzt#M0@TWQ=9<5n2Az_^vgEi7(bam#{R72KlW z)|Xoj+-l$!msX!9{TdClvkv@8s$}WNoQylUdrnpZ}=D&m!z zS3SH+^UC)M=~eHQ?N#j+?bYj*>s4!^KNj+1p*|LxvydGN)v=JAh1yss&O#|Hgu+56 zEL3J85ElAip)m_#u#lKludH%qRV%CZtTJU)DXa9X>SR@(=Wyq4=WOR-=ZMa|&ZW+w z&Yi~58F$CHIpf|K=f*fWzz zPriBbP0zPWzFG3El5cLlMe(i8wiLlMgwhgvvu#L<$S+2!$t`&$UplfpYDWYg}C0;@X>QQ(Tkcnwx7&TvOs&64$(3OW<0UYXV#g;M$gJylcB_ zx@nC}V`SPQ(+HV1$TUHw?J-S{X?aY$VVVuoYMAE2v=*kZFs+1XBupD&T51|<+UXfB z&t`ch%d=IUsq!q9XJ9uXIu!C(&`z z94Ey&iH(!iI0?>4W1J+$Ngtf#!ATvQB<7?GPO{)6Feg25(wBXu>?390DEsp4<7D3^ z`|#|WWZ#{APwaDIUlaS*>@#9t5&P8a>tSD-eZGB2`+ECq`)d1W`(FE8`&uLYF_Ir6 z^)b?%k?a_$j*;Yy)W%40MoM8M6h=B>q%tFcFwzGjjTs4pk;L44<(@0|TDiC9o+*qdz{P^U)n2&G`t;M>BjR!$)U6QsJW%J~H!B2p^UC$n+7=N2Pgv=D{-W zm3e&ToicCFJUH{lm?y@(H}kZZm&H6c^P-s7W*(S%56t^Augg3H=5d*~WgYfG3zi`CuUukbr7rrv(C%91lDyqr{`QJ q=klDZj1zu({f_P4*^-~RUZ``b-gnj|SLO;Va9DJ?Bcl9ZO(5A*>L0t^DHz*zOg zDlb;8S#`xKD^{sl<;1Eqs}xw3z$ye*by*d_DgajLR^?XVPSH69$*D(9%{g_*DML=l zIpxQxIH$~<(&1DNr^=j);S>y~$ecpq)RRm=QF}(+GRl@wdPcc2D$gi4qtqCc z#;7)Z5ueU{;^ET{pUixk;gbxX%6y7_f_-{@QhiE<$=r6E2(fY=X0ij!kbi zv9W25O>Q=gu}O?gZ8nM7fsU(mvonMmvEPcF3B#%F0C#BT~hrc=N~`*iSv&g z|J3mhoPXT-r;UHu_=n6tDEu?#9}xce;2$ynVDQfc|5z|d&m>nS<(Xv4q*5l~ndHf& zP9||OiOnP|CaswS#iS=DshNbtq$4I7G0Dp$0VZ{s#GAC6q?<&WG@CTEPa^vuvd^4- z0NLk{edO$e$3A!LW5+%->?6ZIW%f~FpA_~XvyTY-gs=|?&*iv$&tds1*K<^U%1!wg z7vpWN#j`k@OYtZU#Tz&S|8fF8z`fk)z4HRcSU9%AF%^!HaBPHQA{^u3*apWm7$(cG zScbteOqF4&3`1p@D8oV-2FfrlhHWuSi(ym@n_`$0!88EDXVFV028s-~j zH0(BvXqfF+Bfl8=wa70*ehu<~Cx$vunUo8AG;a3U2Nci>h zi}cH97cIMH*(J*^R(7qjOO;(`>@s6l8N0mL)x|C@c3H8jid|IfieMK6yB^quz^(&! z8L$h0UB6vNw=lVN$t_E6J#x#DTaDZ@*|JbA3(>NW91F#<5F87ou@D*yov{!Y3w^PW7YlK)&;|=>u+Rhx zNw81^3n{Qr0t+E9OO#om%mQUromrX8!ekbmS&+ZM93;mdvcCUWxL`&MQq`)p=#bt14bm@hZ)$CSEb|3e77b zUY&Ubz^mV@qgS|BMXzqJVy|GYUIQ62P$2^mGEkg>`WT3hf#3{8$3Slef?=Q+26ADb zF$0}2kO>2c8OVcy!mP4oRV}M%S(Rs1E2~&pg=ZBhtL_BjbmMa4aN~x?$;QRTt;PY3 zQ(YwIB0nyQbCDev)o~G=i`=-Vjf>d0h|EPOTr}n)5H9-QA~6?XaM1-9S@2EIH&?#p z`DV(uQoiB&=E=8CzH#!6%{MH*t@#GUwAp(VU3@ndpy+3d@V;ULL#+b&%v@ND-F%5%h7fiEY+5^)ZnAX5F z1Ev))jeuvKJnQ5cC(kT-R>?C;o;C7}k!OoML*m&H&y08m#Iqls`S9$9XEr>m;Ti4O z?Ag#W*0Z2zt7kvYNY8v8!sDSk99Mj`i9>?%FCdaWjj=>TCMEMuWKT!VF`IpH* zO#adN2g$!j{xR`yiGND`JM(Xde?t5-^G}C=IsCi*Bl>6i_xk7h*ZPP0clu}gh>wr< z_(+eB-h4F2M{<1R<|8#eO5-CFJ}TiO5??ECFI+K1a$wC}brwhy-NwUHqk z6|xZ_8^zhEkB#`)2+l@yZ1iR$7&dxgBNsLrv(X6~nXr+VjXc;W%spG~)pC!PdwK4) za*vgJctgF*>t5?r>r(4b z=W;oR%eg)0;GCo5+?#W3oLl3Zn{#8F6XRT)b7IbUa4yU_3(i$=4$Qd*&M|QA%Q!sa rNEvr$94F&88E0o4CF3R;C&@T9kpZyjtLu0HUMm1@;fwVa~mRG(9aq7(}D^9sNCFaxwr^1~2a*BadUQTs6MZhU8qw zj51`Dol$E>2{B5|C>=)SFe=Tc7)HS`3e70mD56obQ9+|xqgbO%qe`O=G-n_=29h(7 z8Uv*Ti0Kwt)HV4yFb_IwiMlb%nSe9Gigo==i| zisTa{pW1w4;uD)sM0^_J)0$5_eA?lY4xf6Tc%OEkYM+We(LTLCxjqH`Q|BK({-N^^ z9skVn&zyhQ_-BoO)cEJjKPLQR<{uCK>ENF-|5)%(1^Dh}hI+69JpJZ1QbF+SJ>0+hp5RyL8AULoV65 z|=Yg;rQdg@s61XoQ7C zSO~Qc&_btUvK))$7%azBIhM*XRF0K#jErMr9OL5H7RR(WM#Zryj!AJWf@2UId*GM? z#}YV(z_BgI1UMGJFiVD2GK`X8jSORC*doIS88*l;L52Y_?1y1K47*{N4Z~^}=EATR zhOsbAHcV((Y?x|TY8dJlEx%^@CCe{3e!cO_jbCT{GUHbnzr6U>#V;;?Y49t9Ul{xf z^GkwX5&ZJ;h%iC%Zh^g=g0#yDZstXO|Js)O`6D362iI7rSxaU2B4L2(X3!v0(i7);=XmFK=W6GQ&e6`j&biJ7 zZB%C?J~pDW5gi-NvC*83*w|=|jnvra%tj_`WM(4|HtJxbG8WZWj>G#U41oE78TjB{dK6XV*9Gh$p3WVjZ?H5IO# zey*7=g5{!DE^_4}HZEG@A~h}=<03IG3gaRzF3RE}EH1L(q6#h=bI}7AIdBn}ix9Zz zfQt;6#>uozrfD*bl4+AnlVsW=(-fJO$TTCS6)}y7X+BKrVHyw9YM4gDv>B$|rrD;| zrp2bgroE=2rU6YmO=QPJbxcIZL~cyf#zbsPRK`SPOf<$sAWZbZL>^3p!9*8KWWhuc zOa#G14?HvFSt-v*dFIKpPM&e{%#vr7Jfq|p6wjV`=EO53o*nVbh-W}N`{9`n&wkI6 zp81{`J;Ob_J(E2XdKP;Kkca+w$d8Bcc<7FY?05){hu(O|jsHOT_sKs`{^9v|$v;c} z-TCLpzefHk@h^#gNc>atFNl9Y{7dr>hkrNxJNlRVXY_COZ|I-wAM4-hpXww%PRipX zJWi5xQXD72aZ;R<&^YOglSnvegp))#iOfkGoTR}?VosXiq%iwb*_X;bRQBoF7s@_R z_T|}!$v!;$sMt5fJ}LIG*|)?#CHAe^C&WHA`?l;8U|#_HcKdYuj`q>^5$&6eM94^k zj3mfNbVk}^Bt1r&Gm;!5$r;Ioky;pug^|pRRKiFkj8tYM4n`t#kCuD0+>_-VpL?s^ zQ{~>Cd!pRabI*%=UEJg1o|}7B+@s=Nn|n;$V{^~Ty#?+maF5Hq0q$+N*Sp8Nx0`oo zo+I-bnP+ESA@c~CS7#m{^XSY=GY^M(H_Ssb?}d3T%sVsBgn4G>vF5GjspfrG%R5@$ z^?Aq2J3jB?yhG#N8Smh{`{JD!@7}z#;+>m!V%|;gF3h_x?-+RJ>(Z=?VI2(X(5$1aBU(3G7qqUmjd#KKgQQ&pA=f p={cv#xlGRGIVZ`vNX|iWuFW|n&apX1#JM5PtvScTxgE~w5CB=$EdT%j diff --git a/fixture/20/5/0.1.3 b/fixture/20/5/0.1.3 deleted file mode 100644 index cdf8fc7f010586e03cb87ef9c3ba34fe2574dcf1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 448 zcmZQ#oWdf&$G`x>2N)R`L>L$tRsd;@t@?ix1bDylFl-T3XE0>2X6R;^$}pedJ;P51 zW=4kK*!1lOPfIhY>eqKpe*E^kJ9A+4>aF|vStONOX(wW@a{C@$|y# z(^v27vs*e(p1b@zBL{bMYX1I{mz6nn&D$r=e*N{oKUZY(=Do+oxfQjmTl??7{Oru* z8@_zwZf;&t`TWY}%Xgof^ErCYUb_B2C%>`EAD>x#`0RCcNki-Isq^3eWR`+@Od9Sn8HmSZ;U1HNcuXGdF$Ji{6k#4yf_qFE z;xQGt$5bI6Q-gV|QD=oc+!gd#cmd}O#E1X@ diff --git a/fixture/20/5/0.2.0 b/fixture/20/5/0.2.0 deleted file mode 100644 index c3603a9349bb05577e9f260057c0570f38f035c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2168 zcmV-;2#5Cq0h9y~4gdfU4gdgn2mk;S0001B2mk;mwJ-f(5Dm3L01DwmAu$lZC3gU; zM4-@uR1^X&NkNSQku;U0t3l0 zP#gonF_4^r(ijMhf#M7V#z1feDl-rV18p$Sn1Lu5NX$TB25MlSFR%8z66IAWuk^gi znA2s@tlfRkl-i zP8o8_&M7}m^>M1ssX9*4af;5VG^b!Vh2|6rr%pI^=F|tLJUC_Ml<8FI)KRXCYGqWP zQKpP4WfUo+;*0`g6r535jJjgfn^91VdSa9lqri-6VAPjUT}Bx&ip!|qDBq~wr#zn^ z`Gn^aBA*WVbmtQwpZ@sd$EP))boiv^lMJ6?_>|_83ZGK=gu*A-r`IRfC)B6YC(|Za zHodaRl}&Rtsj*4UCNVaJu_?|bEjDGb35!i+HdU}`%%(7#9M}YA69StK*wp3HCYLn1 zr00?(mm;~8=aM3q61jxPr6Mj7af!_(9xm;0Y0V`XF3oUBhD*0gMVD-sf-c1_!7iyT zr7odf$LTnov+*@P=VW}0i}4Tc!Oi@FTW~Um;14`Z&wO~+!!sV9+3>7}XEZ!>;aLmM zSa>G%O!h43+3MNPGu1-0EHukPvMl7rLTxO>#zJN+RK`MNEab&PT`a`KLK!TC!9o`- zB*8)vECj(qUKUzlAqAFwvdoiZoh-9tStZLTS?0*HMwT(MEQw`EEIVQu5X*j8=EE`^ zmff(-hGj>~a?5bbXv=2HgqE?E0WDh{l*d7M9CXJ)a2)i;L2ev`#zAKsWX3@w93;X) zAsn>9K^h#C!9ijUn&2P_j;V4im1C$J3*{In$38iR$+1h0S#peuV^bWH;@A?$lsJ~e zF(HlxaSVuK0~{0JSOCY6j_Hma9jhH1Iz}5t$gn|%2{LStVR{V9W0)Mn;ur?UFc*fk zFpPy^B@82B*a*YQ4C7$f2ESNtLtBPM# z{A%-yiC;_n^73neUkd!%@{5391N=(*wflv%%aL7;>|$h>on3|OB4k&cU3~1?V;2s) zZrEkRE;PGd*yX~mGrLUKRl+XTE}&hjU2oNLiW}FJ+QW%HAIM}$?IM+DT zxYIb(MzC!3%0{kiG-o32NAzODI2!?zi}$?)y=t>~NW zThO=IH`q7Tx70V(MRi<6$3=85V&kGUE}C-@85fOlkr)?!aFGWWnYrkKi!8W^%ta7f z^uR@7wwbc6lx?JJ>tq`z+cw!o$+k(hNwN)!ZBJ}-V%rhhjM!GhHXpY2u#Japziqy4 zy=}N{M%!-Ngtp1H1x*CVM1M@=$3%BbWXD8xOytHyZA`?*L@G>_!bB)c6v9LxO!UD- z8BB!1L>F9x<=QLPT)BqIwNtK{a_y6Ao?PqXnikixxQ4|wDXv9v4T@_?Ttnj85!Vp7 zcEGhR*8;c(z_s7CqieZqxQ7&ZD3ON{c}S3l0(l6Khw^v`kB9Dfh=zw|cu0naSa@iK zhg5iIgoi|UD1?Vp52YSLO_OC>EYo0_rpmNbrlB&8jA>&`6Jr_|)3%tV#WX6WO)*W1 zX%I|%U|N`I3QS928UoV>m?prq0G?6uY?5b^JY(e9BF_|gM#!^4o(U5FX!$qGKUx0q z`M1hHRsQYyC(6H2{(14Qi+^1FbMvo?e^mTy^N)#tOZ@ZlZ-IXb{M+)6fPVx0OZvC_ zhqTX;eU0p6WS^aVh3q3_U!8q??Av1>4*PD{XTv@;`(D`R!oD;6OxRb#KGr^Qr3;KZqK?+)@ia%&$>73tXSt}ofGStSl4D<5$lLp$7UUubp@>3vaYv| zw=QYjZC%ki+eddkGUOvWANlc7A0O5EsE&{5_=wI&X+DDCBQzhO@X-k$o%!g4k39Iu u%txk=N*_Jv%DGm~^*LwCxl+!NaxTs}FwVg_hsC)o&b>JY#knWWIS~M`;3&ud diff --git a/fixture/20/5/0.2.1 b/fixture/20/5/0.2.1 deleted file mode 100644 index f08debf82585dd541b44d71c10424326325d85cd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2173 zcmV-@2!i(l0h9y~4gdfU4gdgs2mk;S0001G2mk;mwJ-f(5Dncz01Dw)AuS2|dRlZfdRlHTVRkl?|t6-~!R=ozQ zGteIc`7sckf$SKlj)CS3)W$$^20AlP3Im}qkePu%80dq6$_#YDKx9t!IrYjZKBrDO zWy+~Nr#v~;$tgXj-ki$fl$%pgoPy$1n^Q=fI^qcbQE5iaFbd766-KEr>dYt+MujlS%qY~T( zsn4fWKB4mIluvO!jqwT2r!78d@#)Q{DLzT@DT+^FK0WXW%qK6O5ct&PQvja;__SqH zo=uZ%!n0|SO^R%~vuTh`f@}(8)0#~^Y*MqShD|hVO0%hjO)P9$VH0fA(5BZWpiQYw zsDEbpN0xtL`De~QZTutWpECX-{(<11F#jmEPA=)WRLLbuF6Ft@$R$QDEpiEoOGjK{bLodmK3rOJ>4r--T&m$x?h@|O?NaO# z(IwcWpi8PtK*!^EoX+348=vztZpO>F2oK?Ap20DAnM?2pE+)r#zbjMgvLZ?OccgMU`+JIL>o+` z!9*EMM8QN8OeDcXUnXi`A_j(uGAxu~pbX1o7$(Cm85YSfNQON!jEP}O3{zs*5W|ER z7R0a}hUqXYhhe*6NW*l)iiXjK&4#&#wT7`C;^Uz`9@68XIUbVZp*S8|;~_O3O5-6D z9xCA>5+3T{Ar2ne;Gr=ORqzl6zgqdl%CA*^k@9PlU!wfleibcr$U=rJRLDYpEY!zBd@NMQLUb%N z$3ihI1j9lvEQG>BCoE*bLLV&T!9pD@R9fh+T6Xc-<;t#BcJ0|!$}UoNg|Q2aU0>{S zvkQw|SL|xD>xo@X?E12+fn5yj;&&ebZlQ3?b*puYb<1??=T>P}pINWW;xlWFS!&EmV-}oQ zV$2F-)|**b%yKhp%&ZD#Q7{Y4EC*(Nnbl?10kgQg+T@icuQGXs=anR{B6)S^RU)tK zyei@q5wC`LrREh6uXcEq=G6?Z(7ZBwReM$R>h((KmFpGi)#;Tf=fa$O;2fB9Ud|zK zuFJUq&H-?4%eXw_CK-ok+#=%?8Fy#gAmaoX7s$9Z<9ZmUW?T*9Xc(7fTnpn^7`MVW z*tnr_uW>-*QsYqHX89(|w^+W-`PRlaIp50oM#i@|-@5q5#kVcKmHBqTH!|PEe1qUy zm~RSvOW>QAZJuoFWSgFCm29JATb^x=Y-42GBHNJIcEmO|+kV*Q!?rcsZrEnSwi>qO zw&Awjw#Bv)ZG&wK+NRnDv=JT~-LVm!jo#SEjg97PbjC(zY*fZZB5V}GMrJn3U?U7R zDzlLU8%3~@m}{wAL*?2j*Fd@U$u&=|U2@HmYn5D^;+hoKqPV8SwIr?~aV>~zKwSIb zS^(DoxVGh*?%L6{+_l*?*|pe3gIpxYMS)zT$3=NugvUj3Tm;8OZ(P*EMJ!yj!bK!p zG{Qw9T(rSO8eEjYv{|OfGA)*At4vd6S}M~-nHI`4P^NV;jf-hpOsirV71O4e#>BKG zrYSK^foTa$^D=FKX#z|OVA^gP(lp((MxHV9Y>{V$JR{`UAkX-Cw#PF)p55@whG#WA zd*PW2&sun9!m|>dk?<_&nd%wPvRanWvTT-Rtt?|@*(%Gj-{QL5+fqxAANuOXJ;QG`wrPxXWt+D{Mfg{J{|Vuun)~X8TQ4n@65gw_Mx!PwXe00 zwa>KgXJ6@FpL?&|<8yC~durTE;~t!QV%!Vk-kW<_+;ekp%)JWkQE(5;JqPZ6x!2|1 z0r$Af+hm?5^D>!-XPzYUBAIt*ULy1C%qwCZ5%Y$ar)C}x^LCh*X5I|*(9AQMSDRNf z?=??oo@*Xz-f5ocq&iNb9$oaDht9h^kwBnwU&b5fX- z9ykfiyFKqpc}L1SJ?}Vqx5>La?pEGdXWg51S*&xjE{b(ftZTCliFHS;W3#Tyx&zj6 zS+`}~Z=G*lZXIsj?V~#%CGwG-j{^A!kdNwogvUpBd_?D?G#}0I5t@%y_(+A1&U_@o zM2!!_n0h9y~4gdfU4gdgq2mk;S0001E2mk;mwJ-f(5Dm>j01DwyAukZPC3gU} z@b|ml{r-OUyWjo(e)qfI-|v3+yFZ3ZOG}fKmX;<-N|Pj|rKL$qKm7p+0uBNL?JP43x$|Zw4x1AQA=|VW2SsaWK#Z1Bn@Ef`P)E(sOE+Q>vWGb4rv` zp`5~VDw9)pPO&*v#VIOItvSWSsU=RSIW@$oG^f0r>T>FLN_Q%E3U_LDN_Hyt51oGs z`G=5y=KRCQKYRQm=bt(LiSv({e_r^AN$PN2hBb?>~m(H81{i-A2a))u+Itml-Vb>&(A)nPq=)# z<&!O+Uisw8r&d0}`IN?|H=n}z1jZ*fpR)Lb#iuJiiTO0ar!b!u_@uxmFrNnaB*3Qt zHsRTn$)-D-BH0AVCOexF*@VcZLpG_|G{mMfn|9cw!zMJFX4oXdrWiKOHpw={HmNoN zZAxu=DVIyQT#|EXj!SVat#L_>OK>iYaY>9zVO%0}se?;nE>&=ef=gm9HE@Z6OAAaQ zWzr~<@=V%fk|vYzOqyhpB$FbUQ z$1*sE!LbXDMQ{v)V-E~NW!NdhOd0mcFi(bcGVGFJmJF+8m=wdJ7zV|#B!(d|?1*7O z3^01FkRS&Iau6T~<#7-m2iIJ_aL@_| zsc_H;2Z?Y{2nTI&kOl{3@N1S|viyqWmny$f`Gv}_P=0~(>yuwy{Nm!*7Qd+YHN`I} zel77!iC;$^3xv;B+T`cS>VHXLzM%Z=QW!hD`wdWQsw`RGe=hiB>RJnD=Ei-PFajVTOFK%^l zi_NVnZc%Y7f?E*Wdf?WVTL|1b;FgzLU2gqmT{6p(S(VJHGs}@#jm)AmtB_fQ%nD)_ z5VL-mb!HY0vu>DWX4VU{T$mL!>ovPI6~rnut8!R%W)*G~(W=|3*ecknpjD|=KdVpy=fs?w;9Qt<3!GEn9GG(h zoD<+&0ORnC%VgZ0agmIJWSpIGiHt*J+#%!Cj2mKHnsGaf(_tK%aWjmQVO$L3X5(bz zV&hcffX1c9ohX-yaG6NXL~~3OXQDMGQez@G6OA#E7!!pt5t)fPm}tyI6--3IL}Dgt zU?K)4THqTg-$wbC=i4UVH2H?-+a%v4`4-7HC%!fDZOykLz7g?F&9@%D@$hYjZ$;mB z-*n$<-)P@v-&o&P-&7Cz@lYQR&3UMfhv;}n&O>cH#KuEwJcPnSCp=WHB+vQaZQYCVO-ndnikixxF*H5D6TCCe-ZqH;NJuPzWhVr-vR%;{Oj`X zx9^gDmh7u!U!8r9>}zBnoqdJuBV=C?`+(T@!@e{7aM*XlJ~R7X*yqB&pnb1>u6?L| zKKo7|+44~>AMN=lj*sB@=#7uwe1yhFXME)5qc1*c^U($$Y4A}7ABFiyf{!Bj2+T(b zeDvjBDEC0Q_sP9G_b|D4$vr#w9=TWN-V*nexR=DeH1~wK7sNd@_j0&*<{s`I(Y@Qf z*ge?2pnIu%Kle}@?Xi&_8|AT4oQ>qzD2|QbY?Q`EZ#F7nBN8?mVWTk{aj?+_8;RLy zf{nt=(=%_Cd8*9IGf$Lxq0GZGFOzw9=CPSq#XKtJt(nKfyd~zTnK#6|H1oX7>oV^* zPd6_&4>xZ%Pc|=h5uJ+)xrmU9=3K|gE zybItR0PnoKD|)wkr(0)d-6QMjtUF|#A?xU@^J85f>*lOOvo42qXV%5A4u*AR)}gTO xgmq=srPlqdL!HCr+%4y9IrqvrSI)I^4$iqW&b>Jo#yK#~xjC1`IV{dy5ddXJJzW3* diff --git a/fixture/20/5/0.2.3 b/fixture/20/5/0.2.3 deleted file mode 100644 index 54709f08ff9a7e0bd801fe518570e77dc07a662e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 446 zcmZQ#oWdf&$G`x>`xqG*L>L$tmH}yvt@?ix1bCnFFf0*uW$3lE;Ysw$>$**SUc+wY9xfzhd3_n(xM zP}Z$&pZM_gS64~@$d#M-@=C!yCJphJ4BTU~FptSWJthzLm;%IOig1r9K|H1m_m~RQ zW2!KZslhy^-l(%eu)zAEzM#&6KU1xj-MzH!^(&q7d1o(ddHrgQ_1xT^JzM{tzZ?15 d=C(v-_V4+-lkZlVIbS|kukpWx_)5!dU0Ooq9HGp#M00aUA z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjZDsw&&4Y$R!G+O{<{H8pKp`vH^yngE*s zW5F5g4VHP7zGC>*lWSkRRfpG-J4bbydp0Dt{_{zmsEv|kW^-WZkZ&GY-Phrw^i0wIWbp-xe83xg{v%FRe{k|i>6W(WGWz2{h;bi z#hXfO%~NWgN;3uGsSi(isI(NNr6ws^D#=nvO06xWwp3a=pL01=9K}xrMBWW~CuC># zuIwF|o4?xp#pY|oUn2erfoZQxds%9-7n8k~cnR1mz+P>4?y_@Noh}x4t+-1?sJTGR z^~uUyRpz3?wO}po%@~h#R*^lTo{0b0aReL z*`m!91=$S9WnX|hR5CQC9I60)_))Fw-#^Vpon zz7Jl*SbkJFTUQtn0RA-H$oo`F8|_{`fQV_uATFnCq-sOC*^S!~NxH`b?%WYn6 z>(WNywh6aMNHiOw*$(-bt;cLU*uZAn&Gv=nwKK1oS?ll`hu1bpS}oFQkdCZoWVIq{ zXRG~I^Tjz0&S`JbOq^EYG!mOe(==KpD5E_Y%?Vn8(Flw-K+k7+KEw0kGZ&w=IQwtZ zKT%o!VflCE=ipz1e+&lg6SOZ7j(s=w+3@N1W$nv}89V5;X zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBmN(=<)fR%BDtwzZ_KY1`J+(hrmXngE&r zU%~n6&6SC-N_<6P(^i_c$^>PrCtEqODzFuStp@11D$iATUR>qksuoqbtD>&LWtj@g zR9C1ROx0j227{gw^i&`mPu+OR#uV?V-BVg(mI|{J7^w?OSy-w9qjQVSDGG89$hjZ6 zv2(w3Vr!04b5xor5J!DD%0s1bl*Ua`GA_wDByn%!+{U$~^EsC@#Zml3MD`l8mk_VB zz0CG1b8}alyV!J%xJ$%cAu!E#X)a4m=3+9}5-tIA1(*xKoww|~RcDLETPxmD5o#W& zd7rGzt1^!YZ^1kT^AZ4h_vf9T8t-Vlo3X=sr+atxWi2mjb&#eYusV#zVXO_3)Bu@G>x$^s)~T&aixXhVE?j2evI>kQ zTQr%XAd>-^><8J{WWUM8);y->u{2{K9{cc^hf4n_{hOrZUy^@FesBNW{B(1<{1Wp^EXS6({;dwEdi_u!3 diff --git a/fixture/20/5/0.3.2 b/fixture/20/5/0.3.2 deleted file mode 100644 index c2a2b9f24cbc24936eb330b1ee03b8a712962af0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 751 zcmV0Ooq9GjMY40IB$X z-|zc*dQpT_c3Wh^RVO~FzyR)Vn*{Q1hySAA%FHRCH8S5{x$zN+%FRhO-} zSWVbU!d4LweLM8cP>*jszU{bqecOH0ow>@)Rb{F;T(#jU4UwjTG}R*`Qx%zt2-R;Y zv#ES=o_h0?o2e2{k$7r^rlm41h3UysPL^s?ioj9>mJ%?}@%&B~Z*dj@N&Z6e*O9NZ zztH|Vo3odkz2bPG*z3ezCOW<2^lp=scTwI!*&TRi;9UVe>-MbE6Js5WbuV;N>!jA* za+j97vUEYX>%mBB`zaz*$7SdGTp=UV5;gFOe7FDr diff --git a/fixture/20/5/0.3.3 b/fixture/20/5/0.3.3 deleted file mode 100644 index 30854104b3a1c92bb022ed0aacaeed20ac2a72b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 230 zcmZQ#oWdf&$G`x>&lngOM1c4l5NmAJ|C=DdtIEdkgd>^ZIDRjFLgSod$^VC0f=UvUuSA5-?e?GH7`e}W^$={=e z_A3jwzxEf|EG}Ao-&t%qw|M?#bBWpilO>O9O92g*-YqW!G+1`MupH1}`NixCK!X*5 z1}hy_R|XoavRzsgXt3I9e)W_U0t@&am@^3d_#c1p4gV4Wq0|KqK36)TR=7-UY>+lN YF6QJSso`Ayh{NS>hgnPT`?aB00H=mhGXMYp diff --git a/fixture/20/5/1.0.0 b/fixture/20/5/1.0.0 deleted file mode 100644 index 3a8df6f80d15f9bdb999b4db910858cd59d3ec99..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2185 zcmV;42zK`Z0h9y~4gdfU4gdg&2mk;S0001S2mk;mwJ-f(5Dh&;01DxVAukZPC3gV) z|M9oK{r&#-x4+-t{`R-O-yM>cCMhi~ElpBdnxwQeNm4=_7VU4t4+#Pa0uutSpm_Df zD<@v5d3D4qBVMI><-;p9uQ+(M!7B}3g?Tl>D+ykKd8NRsFZ;~d2atXK*hkJjckE-w zK5_POV;?yCl-WmxeNxzm%swIP1HwMKeRBKYR>`s|mQ}E%_!NZ*r=dUt5H6qRHHr^<-w>ipYD93Z7FAfM8F^5GMjPd0q2;nSHRWfLu%;%stb z6P!(EY${{Zn@wJ9>S7ZYn{=CUn{bEEHmxq@x#YBp@bzFv){SVVVn%xWSAzyt{7&;uquW*F|3JU zObjz(SP{dB7-kw)8rCyRgJBsA!(f;M!y*_4!9a)%bjU!44D`o9ehk#dKz0mN$3S!p zB*Q>43GUZn(zexEd#;-7bf$=Mg zUs(LQ;ujRZp7`a&FQ8wqU#?%MU#DNDUmX0};FkuwB-s_oE=YFK*_FsHM0U;D1<0;H zcJZ)lhg~}CGP7%jT{7${vrC0tDeOXE*8#f>*!9*fw{*EB%dJ>$!E%d@TWj1>ee{la4{d4lrd-;nsG<{J>-e)yK=8xG%Y_-4a5 zG2bZo7UmlR-xl}=<{JUu2KXkxM0rev$3$`_f@7jLCWFp&rok(tP7 zqTNKgiE0xGO+?$K%C=Otp|TB>ZJ%t*vkjANmu$0S+Z5ZR*v4j?65Ep4wq~0U+k)5z z#I_H%d9ZEFHVd{@uuaT12evh^je&;kro*!wp5gFJhG#K6gW;J2&l-5fz%v7$74VFJWx6cOWf?BZc zXe<+BSs2T}2>&Gc7s)?J{?YlD$Uj8>&G`q&zd!!*@Nb8II{Y*9Z-##|{44WMg?}mh zL*U;5{|xwd-!A)f*(b}sSoXoPkBxn6>{DZ(n|)*K6JuYSeOm0xV&82a(LUS0pnb4? zK>JktQu|Q%IJvjUJx%V}xi`r@N$%CTr^r1z_l&q##62SJp}E(?Js$3zxktl2GxsF8 z7r{LU?tQtJz&!-+dASF`y&u`}Q5_%A@e!Pl+W3f#kKTMl#z$^G0^y_IN4}469~FIc z`$+at?4zK0rpzm49x3zi%$Pjmm7K!bT});&+@5H>L;9Zz?b=E1ej?Owk)&;U|&bmC-;jvE6x-;u&SZ8J(3+q-` zS7zM^>qJ;bW}TOH0IYkCmUFY5ljWQ$=TbR`%DFe^$T;Wb92e)dIM?Rf6z8Nk$L5^u zT(r zDK1X2IYq^(DNe09wZthkr&y;}r&Om%r#?94!KpE)EI1`*RGm?cjG{BDkWqw;nlp-z zQG1M%GwRH!8%CKK^};9@MwJ<5!l)8Pks0-6lmMf=e7fb6EuU)n)XFDTKCSZU&8IUy zx%u?PCoev=`DDeXDn7CKRQp8xH2dWG)cVBwRQlBOiL~j?rb{;2+4RUJM>f^jbjT({ zHWjid&88qWq1lwfCLA`M*%ZSj7&g7IDaJ3PZ;}vvCkL#^!6#*hqsT=KDK>o`@r^j?c=(X=Taz_@LbB|5+;}KT#Dop zB$pn!wC2(hm(*Mu;*t=T(p=i%k`9-0xHRUH1((EJa^O+}m%>~!;8FpX2)Kujd-k|T z&OLM7BgZ{)?pfm=HSS5{9uw{<;T|&g`0g>fr+3fLJ-2&=Cbcq&l}W2i8fB6wlk!a3 zWRfP6GMQw>q$(z{nbgE2CMKLJuK5;Sq956SSG=;2$n&xOo3$yEJI)*KnD6_AU_7WV<0;Qs$-xx26AJdHU?5* zpcDo|VW1EO0%4%vK)Zo-1LcmzatxMZuN*t&m?_6fIrhmhPmXnROp9Y#9K+&R6vv=A z_QbIyjv;aEh-0W@r(>pL930!=mkGXJpQ@k zpBnz5;h!1)Vd0+@{!!td5&jY3pAh~5;Ge^G88*u>S%$?jER|uX3_E358NQf$Spdz zlDLJ$ts`!sxdp_nA8wtwb;B()wS z$E-JIy_t2!EH|@8m?grj5N7da5zX4os?DO!n!QTp6)LYzd4=Z{D6c+wb?4P3uj~Zt z!mMLp9hh|ltQ%n6mvgzC!{yv9=i;1;;~boGX`Dmj+?#V@oCD+B7w3BCiq7%Q37xZ@ ztDS?Ld!2KQ%QG&Nad^gMG7gh*cg96B4w7+?j9W8qiE(Pi4KYrLacRcwFiwYYIgA@K z&Vq4b#yK#qfpKBR88EJZaRfZX$3uHOB=%L#~Lf=~X#>%%=meSGKvbt(9%1Y$IjcDBHr=2FA88wqddDifvYGdt#dt z+nU%0+xFV#+Gg73v#qpEgKZgX!{AyZ*C4s}$hAbSA#&}IYk*w){6JuH!)3lhD z#WXCY4NbdE6PgyA_A?DO4K?jF&Gc-OXPP|A_vd_*wOZHW=ug<5-YNI++ymv_C-?5$yX2modsEz#;$9T@)ZA0zUK01x+za9!ntLAH z>);*-_r%<*;2s6{!rWuv9+->fTr|igwi^^Qo!bL1xMCKwA zE*jyY-bFWiFs<~ zvF5GjspgU9eK601d1K~TFi*@xbtYx?v(S6TL8z z3lo)@$b^YXn25|oUnUY@A}{Z5d1uSJTHdwtj+J+-ynFNRjCXF{eeuqVcWvHT@ve$@ zY~I!0(caD8x!$$jvEG&5^}HjkyR+_+b#~T0vd)opb=DoS&X9G5tV^>lh;?Yz<**Ki Ob!XPaunvZGF9ZM&b4%#} diff --git a/fixture/20/5/1.0.2 b/fixture/20/5/1.0.2 deleted file mode 100644 index 20df9cfa9cfc67c8c42168ad75a95c55213b3188..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2186 zcmV;52zB=Y0h9y~4gdfU4gdg(2mk;S0001T2mk;mwJ-f(5Di5`01DxZAutfQHFp58 z-{Zpl?)Ue*-~Il6_q*Ty{{C%-mco?|EhC#=MmD(=T1GY-2(){*_x%R~4gwAWuc&x6 z#VaXZsd=@;DAQA@h4b&SbY1N)pw5*zCm7Z0rtWsrFo>ii(!n3N) zDlb-bv5L*ADppakYRxJpR;gL#TGd*`T6J1wTII89%&H4kiTS6_KaTuE=O07AOFbt=gdDi{A1=H82)+TpECcP@Q(@qkol*}KLz~5<&-U_YB{y%R4b=gIi=^+ zn^S0UN6gli~=*t%P0dzbs6>3EuU=p6z7v1 zpWu8_<5L=+-h2|{Qy8DX_~iT4`;_zv_o?X9?UT@_*eBSgJex$>glCf`n=;vSXOkqG zBH0AVrZtxNEF)!^C(Aln#>p~EmQ}Khl4VdVdt#Xr%aB-h#4;n6 z0kQ0dWj-w1V3`KXGFUdjG6|MNurG9Q)*$C&#uprp2)=j!AJW ziepe5Q{q?>$B;O-ItFx1b*zJ9930!=SOv!@I5xpBMusgiOp#%O3=?EnAj9++md7wW zhS@NzhG8@eYhf4*!&VqZ!mtsBi7;${VFC;b;MXj_Wcd}#FI9e}@(Yz;X8bDS7a700 z_{GJqEq+n)Yl>e|{G$Du{Tlkk`W5tR^~>j1=~oB4EZJ4bE=qRM+11D{Mt05FMaZr} zb^)>Lhh0AGGPCQ3T{i40v&)5DE$sTTYk^$~?BcSEfL#OJ+H;GRTeIAPfD z))}|VxYg#C7q_~&g>-9o>*$v4R_zw;mg`pQ7HbwLvp$*S$t*jwE}3P?tU9wCnMG%o z60?$+g~Tj0vx1lf#H=&3aF}IgRt2*tm^HyHFtZq#^<`FZB9OUL85)K;SAQ28qI*4}=(m}U_hz_#7Qsq@DuTXh~=T#`LKzViN6(+Cj z1n0t>Yv3H1b6(CFaIVX_8{M*zEgQwzNREx*Y^26UX>9anBQZ7#V@HC}|_y zMnxOlHWJz>wh?Sxo^hg#!!u5kahZ&}Gft9mk&J_6+?sJrj8ij?h;c)VOEZp#aXXCD zVceK;7mO1#?tyU*j0-akfpG_n^Kwxi7x8hCoQvqVXpW2GT*SsjYh0wpMJHTj!bM~* z^5CN1MZSx27a3iI`_{@gR=(-^M#{HQzUBGG$+u0uY4YugZ&rL`^UaBGO?+GP&4_PB zd?Vsp>Kp3Y>01WhF!(m+TLj-A`1Zg=h)i_IM21ZC$3%Wi)W<}2OjO52bW9|}L@`VR z!$c`egu+B8Oa#J2A57%IM7Q0t&6aJiY;$E>E89%jR?0R~wu!MVjBQ|S%VHZA+pgFK z#kME5Ik8P>TWlL_n`+z7w$!!_wrQ{}gKLspi{u(4*A%&y$TdW+1#%6LYkyqh;o1(@ zbht*twHdC-aBYQaDqKt98Uoi2xaQ?r0M`Jxc3LdcV43#HG&ZKKF-?tWWK0`lni$iz zn5MU(R zp84>shi5!Iv*B3{&uDlS!7~V+J@8C{X9+w*;MoAr1b7y}Lv}n=$3t{HVIp^UsBUE&TiP zZ-IXb{NwVEfPVw*+p~|BeY5O?W8WM5+}P)4-x>SN*w<#C7yG){hqQ0E?`WTGUu_?4 zpKD)hAL||{_ddDj$vr#wF1csPy*l?Cxku-o68Dn0hr~TJ_ky?w#Jw~3aJXmYUIq6k zxHrK)F!va^_vK!fdj;I%a*`e=<#7@oC&4)>j+5Xx>CH)KoaE*t5>6W7BoR(ZI*E4@ z(n+_Ih)%N2Q)ON%^H7BM~yvAS1~cX^)Zg7%9$3a*PCLq%tG9Fj5O6kr}Ck zkw_THH&Sn;q<4GX(eiGVcY5Bf@=ld^dESZg4$r$b@4R@|#XC0ds(44myEX5ac&Fx_ z>s{*|>)q*{>7CEJG4C#TC+4F%A35?7osSIpsF085eALHBe0(J5qcb1j@R6C1VEE{T zkIH;>!bc{2MCPL|9~JNsmvy$Rt7YAub*-#pWu2aNZ`Pr)&doY7)_t+A&AKbrS+S1I zy4yOUb+&b|b+2`TNoJ_L>L$t<^XApt@?ix1bChJWPB z&3kz@#T5&yTd&@KX{qJxJ9qg;Ms04<)cnemm+$K8m^)6Kz4YsUPTk1l+`Y#yD(Y#Q zw)W3_`T4J}et6=>-A6?YCy8yHBF6ed#5k__?y)%G(L0Z;j^;l>V}QoQ=h*7>1h!hyLS6QK}%`X()!Mu zkKfu_xd$#>y_MCPUox|}_U!eChBnr&Q|GVz$!yCTn_hVE^i@?meap_tbKibvv=59< z-MaszqyyYzju4MI!9C^-@t6zTW3Didxj{YV4)>S`#ABXtk9k2n<_-6l57cA6P>(h0 ztPpIleyA^~GvUvdqvBGDNiNIEGda$l_*=H3M*8}d(_V%)N|&omwrqY}XKixG<*Qjs O+LOB7e<%EGvWM~=LpE?xyzP%)gJ-^4+0JXv(lJ_ z#;h}DwV4IRtS@GqH><)})n--FD&MN!s@y8vs@tmAD%h&mDLkh}Id$jMCZ{wxW#^P6ry@C3 z=aiaLO`J+|s)$oWoI-Pohf_P8I&(_QDGW}9IR(L~2Tp-G<>eFtr@H(j=bt|QiSth# z|IqOdoPXH(XN`Z}{6prS6aE?V&j{r_fOG3xlwvXwK6Kts8U9eG78VAPDXJu zYLiiHMqM##&8R0vIWbDjs3S%hF{+4BKci5iPNOs!mBA=6qa+v=!6*nm(fM@9r#YYg z_~geYIiK$MWXGpEKAHIx!>2NzQuu_zCo-Qx_yoeI4?f*=%ceb>UfJZzCOw-@*<{M5 zQZ~8S6vn1Do3hx1#U?hJqSyq*rYAPVHo-Q%HmNqHHvMekVABShG`J+mrARK-xs=Ey zL@v>}6v!n&F8y(dhf6zLI&*1;OEO$Cb7_T3DqKq8l9x*eTex=$?oz_QOP z`>?Uk8vDH2XN-Ns*vHL2TkNC7K3VLe+ox!s+&;5?Wc$SSVePZpM-|U%ct*ps8J@N9 zjD=?_JS*WD3C~7&Ho!9ho&~UMmSwUmi)Gm=%T!sG%Ca(+k+E!yWnC=eV%ZkUs#r$F zvMH7gEu$@)Eo&_UTE<#dTIRFNgJYH)tK=9Z#~L}t$gxF^6>^M_V}l$6;@A(zd^mQ) zF&mE6aO{O+E*xv&7z4)^IHtg{0*(=IY=B|445MY(EW_X!_Qo(bhMh6YjA3OA`(l_E z!@3y88+J5oH;ialZP?H-*RY^rtzV)10_E2yzcBfA$uCQOJ@U(uUyb});+GP?lK3UW zuONN_@hgX4IQ+Wdmj%Bn_(j35FTWc2#lSBvzYO?Qz(RX0q{l*eEF{N5aV!MKLTN07 z#zJQ-RKh|eEHuJGy@ik#;w^Mr$hHvCu2puavMZHccXo-gE0kS!c4e{)lU-HpqGHz+ zyVC4pV%HM8(Civwmk_&3yL@(euq({23wBws3(PJDc6~Xhkb?+0Xpn>A9K^>#dmIGk zpg9hb{ics%4g*S*^^<6YGA~ zq1K(&X|OJXbz;^@ur7ji5L`s(qC+m4bI~6c`Eik)i|)9{j*IHJ$jn7CTvXeQ?opx18H^?v-<{oYQmelyjz>E9IPSe#>XE{bzdoO|M2 z>>TXe>zwLb>fFyc4$f_GPJ?lhjEiJkopFhbLu4GCae<5jWZWO)co?_CxHIEs7$?Ix zGvihmr^2`t#(5csz_>2s0vHFtxZh&=2FtfszOnIbjc;$hjqy#4Z*IPA@lA_wS$xxd zEBcoEHv1;~7W>Bfw)&>p*2y+bwr#SFl5LZ0lVsZ>+Z5TB$hIT48L_R1Z9Z)4VH*$I zYS>1@wi&ibuq}dZ5NuOmTLRm>Y#U&k0NVnX=#Gi(n5d44+?c41iP)H^jETsYXpD(M zms)w@em#l-SH3{554h_ z8xO7UkO~i_@X!bkiSSSe5Ahy4dT2K-mT9m|du3WG(@>dq%Cu0XfimrrXR!B!L$mdQSfY$XNo*ag}s#A1(W4*_US@EBjX2hiBg?`|j-fVxJfLy4bg7pB4M6*r#S+6Z_KabL|V- z*V=d5XWCcVCuSc8`@-CNFO8Ut6 zQSYPNN4Sq}AH_a`ee`;V=iMmp?!4RNohI+>yp!ZzB=73HQ}eEgcWK@g@s5ahXx{Pg zZijbg-idjK!MiZ;Ab9t{J23COyhGq!myP6X)W=3~HmYMIIyQo{5gQw=vC*52$ZT}N zMq@VmV58qgNgL@lD%vQwPS3hl*5z4O$~sck;aS(oI!@MYvX0HVE7q-9_ry9U)~Q){ K#5yC^6%hcBX-;kc diff --git a/fixture/20/5/1.1.1 b/fixture/20/5/1.1.1 deleted file mode 100644 index 4e18ac492c05c30763845897840fea72e4cf41eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2188 zcmV;72y^!W0h9y~4gdfU4gdg*2mk;S0001V2mk;mwJ-f(5DisB01DxhAukZPC3gV) zzv%b3zy1CG_P4+N{r>j1zk{mCv@}U+X=#$Av^1rqrAbi8-R=td2MGcU0uTcKu<_3t z|ETfLn}5dmM~r{m{G-J`ZT`XibNk2kPwXGqKd*mO|M>iqT7_p7C#yDDb!XKit0Y-v zXO$wW>a0?;%7|4(tV*-0hgCeRLbHm7RcBU(StY@$2v&hvmB1v_90`RH~Wy;2ZVio`xNby+XuJLY#-S^u~VmM%_$^K9dSy{sUJ@Ha4OBIF{d~XT8PjB+zdi&1SxNiiylQEWyjF)E2sNQ_dA@)?yH<-w>9MvWO|!KeyGQSiym zCq+Kh`6S4vKt9p=q{pW`KH>2R%_kZ@o%zJVrxiY#`9#8}5k86V>B}boK6%+x%O+Yj z&9Z5gO{#24WfPoDWNdn~iHl8JY;v=SicM2&l48?rlWbFL(`wVtCeN-G@Ar8i8W~z%WznB z!!jF|!LaOwWiBj3Vc7}GOjt(1vH_L}a4eT&xE#CX*el0eIo8TCG>)Bd%#34T9Q)#! z7ss$TcEvF(j@6D09itr!I@US{bPRRubj&m?lVO+)yJQ$7!yXyt$S_2P9Wu<2VM7cP zVptHvbQqSyFdT--Ff4{)FbsQOm;=Ka7-qn*0)`RrYnNZT{L1Cm9KYoF6~`|%ex>mX zjbCE?3gZ_TzkI)Xzj(iFzleU-e!YIVeg*9!W!EUXMA@~;E=_i2vP+U(k?ewGmlM01 z*u})IB6bn6YlvMu?Al?M4!bnimBB6yc15rYf?W^nLSWYcy9_wUkAwO+h>wHnIEape z<~WFrgVs1mje}4)=!AnzIOuoK(Luh0a0lHEvfX;+mMgbfxs~TuDYrG(j z#jPuDS#fL4ttW0daZAlDBW@LO3w7&s%XCZ3tqpEza4XC$32sF&5F!H|GLRty%^B#A zf&3Uq&OmkyRL4Lv3>3paFbq^?pcDo|VIVRCfiTc-)-JPjnU%||SZ2X8>y=rj%ra$G zo>^kd3S$-+v)atcVip#&*vx`r)|y$bS*}?@vr@BqW}#+{nZ>~@F|Q&ZW+IaIS-MW6oJ{u7YzEjI%RNk#Tj#2{JB_adgJ%F)ojBc#K0cj)rk(#<4JN zg>h!akuYwAaUzWSG7f-oUcS}xjh1h-d|Tz4D&JE12Im_Y-`;%V;@cMA+*WZNR!>}(@s+aTKn+4jRWAGW30X2Z4` zwxQYP!nPK+v9N7{Z3=7yvrT|)0c`s+(Jd3%GEprPy)ls+6UCXxjETya2+l-aOw`3h zTufv%k#3@*iD(lEO*ESbXd>1`t81WK`{bG@*Dkqc$+b$ZIdZL$Ym8h|;#v~dkhm7a zH6X72a1DoRH(ayfng!P?xJJRX2Cgx1ZGme9TpQq;01xT$P#zEA@lYHO!ST==525kU z84sE95D5>B@Q?@(@gCYeWb{z&p`nLp(^i?L%CuCbi83veX`oEYWEv*ZE}2Hfv?-=Z zF^!37OH5N@+7Q!(m=?q|(zFkzc`yxwX%|ehVA=!I9GKR?LWC?d$U=fF#K%H=ETqRm zb1WpsLUAnQ!a^-9#KJ-*EL6fmBrN1xsJ9UB*)7j(c~;A_R-Uo)Y?WuEJR9YiD9^lj z*2Oa}o>}p%if2?jbK+SO&zN`y^o;dv_006F^o;aOgJ&5$!(f>s%Nkk6$TCBg6|#(w zWqvH{V;LU-|6cj$%D-0r<@s02KT`hT`Nzq>P5xo=?}~p`{9E(yiGNP~Q}fS=e?|O5 z{X6|L{S)(VgMS+Q3-eEce-V6y$VZ2KWXMNzKKkP$KR%N4ksTk^@sSK4#qbdfAC>ti zg^y79h|EVIeDvG5%RXK9<+3l9eX#6%W#1|LOxc%bpBVeX*aybGHv6*Jhs8cN`=Hpj zX5VX{YhTd5)V`j5sC{Ghaj;L!JxT6Gau1Swb?zl{50QIx?g4Ud&OILP?Ql2z`HN+0C?wRBRLz{u~8ix#o4Hhjo8=-&PHTx^kyS6 z8-cLVZ=<4(avR|`nr$T8D7NmDb*8M#v(A%sovg#Nu99_>th=+0%{nO7tyzb}x+B)9 zS@**_AJ(N=H)b6N>%^?1VBG}k!mL|hodWB?Ttw$0KrWhd5gr%aagm&h-nht(i{e~l z<{}j?Dszzt7lm*UnTw1r(p^+^ZkBVhoQvgLD(6r+cgi_X&V6#ulXGs)X>qR2IVsLX OagNP7CC(*r4v7F18#?I# diff --git a/fixture/20/5/1.1.2 b/fixture/20/5/1.1.2 deleted file mode 100644 index 0dac5c906f645b35772aa5faddf4d4935850ec9d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2188 zcmV;72y^!W0h9y~4gdfU4gdg*2mk;S0001V2mk;mwJ-f(5DisB01DxhAutfQHFp3O zfBXCW?Qeg-zy0lRf4{%|?eDkEZh9$P$7{TbL(9l!`2GHN8-54^4FV1VuiSXm z#w#{nxp`H_D>7cSdBw#mHm`KAj$Y+n5xttdlD%rZV!c|ey0glYRh_J|v#OF+l&q?= zijh@xR;5{m#Hu4!p;`6ADj!yzS!Kg2GpoR?qF~hotG=vSV3h)^ysQ#nRhNO{41~u( zcMJq)pf?6`W1u$!nK6)?fyN9Z!ayMmv>OO%Al*Pk1JMQ|I+f=XDyQ(A0_D^vr|z7( z8idN@Y}@ zQKF0rWfUl*+Kl326q`|0jGAK9no&%QT4IzEqgta_qgJCzqkKk*8HK^93r1P+sm>=x zKGFF^$frR*&H2Q~r#(LD@#)Mb8$Ox&{4#xXIDg>ejw zV_F=`;usdkaK~=PY{y{7hK{|C0Ub*n`x&;$FinPKGE9>#zSg6 zM8-p7JS4_LA3XGXDCwcxLq-qb9+Ev2dkFT+lwYO%BIQ>nzc~4|$uCNNP4Y{UUr_vd z;+GS@j`(H7uOfc=@T-SkJpAI|*9N~d_%*>V34TTJOMzbr{6b(MKoT@&S6DAz!_mdQ0tu3d5sl53A#bL1Km*Os`Z#I+%= z32`lmYdT!Z;TjItFt~QXH4CmiaLs{h4O}zeS^?JxxQLI7_P9uoi{`jUj*H^BNR5lq zxCo7lOt`3oi%7UA=_21ny^C-c-7d0CV`bVZ(^Q#8%Cu3Yi85`IX_`#SWSSMzs+dN_ zG$*DtF^!37MNA`N+7Q!D(@fJ!(=?ct!88n}MKBG5X%9?f$V7!qM94&bOw`9jd`wiw zM089v$3!qp^uk0gOoYNjCro6*L?2A_n<(j-EzfFsM$0o-p0)Cfm1m|rE9Ds}&%k*0 z#WOFSVe#yWXI4Ce;@K0=oOt$nCiKkp?B^Nk+36Vv&o+3b!7@meJ+jP^Wr!?0WSJq$ z09p3OGCumMTCna(cA}7f?DUg!@IVsLbc${>{Ni>`^!$~rnMCPOw zPEz5dF(-*|QV1t;ISGK1?$fg`mVL17%d-!aeR%efv2TohV(eqHZ;O3e>|3)>ihXMK z$@aze!S=28srCWw6SL2QePQlVa&MA*lH8+nZ;^Y7+?#Vxkb83O`Eaj?dpz7TbFYSb zG~6q5kA-_=?kR9DfqMwt>vAuEdtB!2nJ3FUJ@ed{*Ty_H=DC?y#ym3SwVB7oJT~)m z^N!}_<`K=C&6CY*&1215y}R?ylXsoGv-7T!ca*%V^Nx{sbl#+GysvrdY2YSt;SE{Sz% z)&;Q+h;?YziCO2tx-jc3SXaS1FzXsv$H2NTAITNoJ_L>L$t<^XApt@?ix1bCd-u`v^4gll&Aro~fBfyO6B@sM=V4)8S@rV9?%Pk_+v|A- zFJ8NyU0+Z-yR`oN&Bw+Dw(ipxuKvwx$RD3seE95jbt6OT?y2+N|711}j!oZw@U*mv zs(yXvW{eM6n0K^Q$761SM diff --git a/fixture/20/5/1.2.0 b/fixture/20/5/1.2.0 deleted file mode 100644 index 9758ef890098db0a5dd96d9ff1c6956ab16a0b0d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2188 zcmV;72y^!W0h9y~4gdfU4gdg*2mk;S0001V2mk;mwJ-f(5DisB01DxhAutfQHFp5E z@VCGH{r>j1zy1CG_P4*^-~P7grEn!f%gCme!j)vV*-bAY)9-d9Q-2Er4gv`R2MKae zAO`_*keq|^I0%n};v59WL2wRY;h+@`QsE#n2aRx$2nY2JLOO`IYR@WJR>iVP&#F{b zp|UE^Do|G8S=DA07pt~d#b(tMtE5=9W|b1F)U0ByTCECNRa)h<%7ay6R#~tr%&9u3 z7&*1bDLSVHIVH%cIj8hECFj(cQ#PEc;gp$EEu3QERGCvGoFa3o%c%iQaT!I+s98qs z8MVqNRYvI<^=6bAquh-0VpJES+Kj4V6cwY`jH->Ijhc;ejcSczjWUh;8CCjp=aVI$ z?0jHeIsm&L&7UJ+jG>O=~tOu}RG)AvOiEDa|GwHs!DhhfQKOS+FV0CI>b(unEj2 z12z?~$;&@;{?X$fIseGSA9Q(Ymj|=;l*~f%^O4z5&K4bRz?c)o} zoLJVxGA5Q8v8;$?L@e`RSr5y2Sf;_U43=TAOoC+*EQ4T~0?QIuhQLF9Jk-ZSd^}{w zLv=hv$3t#B)W$<>JcPnSCp=`rLm)i#!9!yn+C8LuD0l3YW3C))<(MhQN;yW#F;9+l za*UH>SscUS*cHd1IQGObCypU;?1*DV97`QT9XlQ4;MfMoG&n}Vu?dbzuuvikA+pdR z3jwmw9}D@h5FQKNv5*}L&9IOR3&pUI3JayM5DE*4uuupKfv`{j3jwguVX+K@W!Njj zP#Jd0FjIz+F>H)sVhr12m=?pb7$(KAD272XOg3z2SZr9(Fx9ZsFb{@xFpPs=l>D0H zmn6Ry`K8FOM1BeKE0AA+{PN*f55IW$Rl_eDe$DWUg&vYLZYgl9 z%dG)!ahZk3tUG4eF$>PDH)gpp>&+}PX1SRq!mJQxfiR0V>uA<)R&5q-*6bB3uTFVo z$}2ptK6&NIt2?hOd1dF76tALq1;r~huabC$#H%!~fOv)Gl?Sgnc*Vi1Fs~|jMZqgD zuNZjsCFgqQkk0YW6`ix4tDS?Ld!2KQ%QFs?ad^gIGVYRbcg8_7?vZhhj9W8KiE(Pi z2{A5+acRcsFfNC2IE)iB&Vq4a#yK#qfpK8Q88EJZab7l>vymPf$=OJbjpEoS&PHl% zl*UGAY*c0=5;h{Uk#D1-yrz*z&8gjD&!(UE~0Z09~bR$(VUCu zxM+@xD6Rw$X&46nKT=R16v|FaxGVPUVu1srXni|v6 zn1;qQF{Xtv4UB17Ov7T@71NBS-KG^yi%k=n2Ah_ehMIPIrpdERo?-G#l4p@TgXCEv z&k%Wb$TK3I4e?BfXFNRH;h7H4W_TvUvlyO1@a%zS4m?BP*#XZCcox7j0G^$Qj)~@& zNRElvm}rfO)R>5jiN=^njETlf^qa^xQEnpKL`D+W5!DKR5r%_(#USHvhQzx5dBR zKi$9Fzu7;cf3kn9f2)5%`#jm#$v#f@+1XdgK1%l0*~iGfMfM@F?}&Xy>_fBfhkZWm zJG0M*eKqW(VBZA$B-r<5-vav-*w0Q;# ziF;}80dWt_JrC}6aF2s~VeVCMkAiz(?lExh%SeKZ6v#+`j3j5IJVwG}q&OqNF%q1S zSQu%AkyIFo%t#}QB*I9&k&s5>&D%3imU*$v(=#uXd8o|GGY^z`c;>a4$HlxY=CPSK z#XKqIt(m99JT>!J^H%eM=9T98%=2KLn0Xe=3-eK(j~MxAk&oznG{{GSd^G1HJwB52 z(V35I_^5`D%zV_sM=X3)<|7h5BJ)v~j|TXN%R5@$&GK%~yH(z)@=njYH}A}N=jNRk z@49%`=3N!nvGkXPqPK8d+CoT_NiTSx0AG znsq>|L$eNtbvLX#v+jj;F03=N4$L|R)_qy$WgP+Qx}3A+TrKBlITz;~9OvMiL*v{T O=iZzH_$R?LU%gCl%2(%mHXMPI;4FV1V2NiM< zAqNd|keq|~IB1W9;v6)`L2wRw;UE_dYT+O<2bpkC2?zZS@*R}4YR{@#R?)Ib&ni|{ zt+Fc5s!>+qS=DCM7puHj#b%WitEyPFW>pib)U0x?YOP|eI;}FT`dKArRR*iV3{+>J zM+S0aAUXpXGEgA{%^9eVf#eKyW}qAf!eJmY1HmxR3j>uI=!AjD4Af;H0|w%9>XuWs zoZ55Bl~b*p(sSy~sWeWxITgk!Fiy2Og~h2WPO&+4J5_Ybb_#atb;@-rbqaOrH0sW% zOh(xm70D<_M%5XG$f!d`(HWIy)DWZ4jM`z84x`SDl3`Q~qs)u~GwOj+Uq*Qub-<`D zqW~E7(=DIke467EoKI_fQsdK`Ph)%%<5L))exH1wl0M-+8GX8a8u}Fb1lyEn(XWJ)NCqZ6A_!zZ0cbX51V$_BxX|vo5F00U=swJz-&rj z69SvO{4?jDKK_yOPaXfz@lTw8+W3c!f7bY?%s(gmL*}0k{u%Sn@1NX1NB`h1xpJwM zOL{Jqa*32nc`kKwiIYp4T*Bhg6_?mtdg783m)2Z5;*t@Uinx@zgt~OPw813}E{VA` z!6gYUMX(Pd`y8?loqhh;$B%vH>~qIHcI;EfJ~HeR!#-yANnsxp_9?SZ2>XDr&j$g^ zqF4sSvL}`$u?&f2M=T3s84$~USk}QZ4wh}Otb%0}ESq3i1Irj#w!lMyJOs!?e>{}O zLwG!N$3t;E1jj>fJhZ|?Dm;|JLnAyS!b2fEg!B;aq1`cAj>U2emSd?LL*>{h$3i&< z%CS$5adB*mV_F=W;+Pc2qByq1F(r;Aacp%g=$Pu5&oK{GCA78$0< zutA0iGAxi`dkoWKSRTV{7*@kD8iutnjD=w<3@c$63ByJhM!>KEh6(VCmS3~{lI7Pb zzf}2^%C9qinenTPUtav`;ujacs`y34uPJ`fe$9Rn{bKzR`nCG?^Q-jBXV)dWEZJ4b zE=P7XvWt;jh3q0^*C4xs*agI{A9mre>xNx6?0R9B3%gp_)xa(Wb}g`r%dP@;5pZkI zty*r;ax0EoaNK(1mYZ8>+&be{n_FMp^5PcKE#9r&E!(ZyE!r*Dt=28ptWaiwGV7CB zc4lER>ylY@W<4^?ky%U3QesvTv(U^EVpb5d&dkbT77nv6m}SAN3TAzo<-n{4W_6im zz$`8c?Xi#^3+1s8oQ33AD2|2RER@DVZWbzGArclEVWFgjdJ7>fbX%xsA=|4}Ua9gb zl~;IPiSjCxS9e}z^2*MuDqd0XYKm8CUNP}%iC1Y}4e<)itDje;S3a-8yu#qs1+T!o zdf?TUAN@Y^eU$VO?jxg*ZXXSO6#ED^F3-48#^D*a$v92M-5EE@I7!AuGH%VdCdR25 zSHw6X#-$n8!#Ezs?J!QvxD3XH85hAg2*!aKm%um##(CLj&PIJ~Bxj>KHlkyrI2*OG z5gQw=u~C_gPS}XdMjvc6W~1Lmxs8rC!hLh)TPxr6d@JP}Dc|yZ>*O0J-!}P%#kVWI zvHAAIHz&TW`F6xNBfb^!E%goc?euMfZyJ0P^KF7}5`2r`B1A4arDQEY=^+Y{S{w#Bx=wyCxOZA)$IU>gV9Hn=v)HA${Ta!rwIiCjbE zS|HZ|x%S7k9Wm+uLV42p& zG&ZKKF|CYgWK0`l8W+>Hn5MM{Q$6eC87I#+c~;3YN}f&f zjFD%HJX7S^5zmZxR>ZR(p84>shi5iCtKk_9&n9>#!LtaSE$~c%X9+wb;MoAr1eoZK ziR_rDj)~rw$c>5GnCOg&%$TT*i9(nNgo!?wXg85=qTEEZiDnZKEjwkIDa%S(_Q^6& zmUXi1l4X`Gt0a$U;4ut5W`W0Wc}xM1A>iMhf3^Ih2^}lNw#~d+*9RVD);c*6Xjkg_wL-wk#~-~qw~&?cZIy0^RAC~ za^9VJm%}?8-kEs^!@C#Wm3eo6~ihRR*_ki!YUM2jadc4s-#s<<#Gy_Q?Q(R<&-O@ z@|-f|6rNLUPKj|Uj8kk*WpN6NQ)^B^aZ1f8*s0ek*D2H~pi`$)Vor5%D$J-lqa+y> z$tXIb5*dZas5zqm86{`bnNd89+F_KLQ8SE^VN{t>DvTmC%FCz(Ms*n#z$gGl9Y~ge zVi^dQf!++n#z1ZcB4eO225K|V76WN95SxK=1K|d`4HO$_Xdu`?s)14ip+4RD#K|W+ zpD6h>$)`G>7Wt&eCpw?fd@|w_nomA_>fzIwPc?j^;ggw9U_MRo>C2}DJ}L0Y%clW8 z3GgX^f8zXO$3JlXapRvh{(19{8UK{=4;lZA`3Ho5lK$EKqx&cKkL(|!e`1^RY%*mN zo=u)?>SWWMO_pq`WD_Nu)@*`elbTIPY&v37noU4#`eBm~o5XDDU{jb)6>Oql6PQg6 zY+_*30{hI_2atW_?1RTXckB~qA2{}TV;?v6DYK6X`;gg3gndHTXUsmmeR%uqF3EB! zmP@c)Lgmsam-1W!<4HlZT&m!nMeb4L9y<33a!(-l%(+L8d-Avkk9%skhlYF1+{3~>E8J7&9un>u;T{nJ z$EY|q#W5+4F>!2(V@ez&;@A+!gg90@)^qHGV;LO7;MfJnA~*)Yu?HR^uJdf}G~zfkyf!Y>nk9q`M5Uj^*KW!EjcY}w_? zu2y!jvP+F!Y3xE{R~Wm%*!9IOEOuS7%ZgpKU9?@ZU9MfNT|&D~yMA_=ZfSBWlUtbF zisTj~w;s8L$gM+e8FGt=TSMFu;?@qgbhwqnEg5dba0`Z85!`~{)&sY?+(O{i0kG7ir;PsVjJ?#?(%##J(ol5uOsK`~Cv zI3&g$F)qzGAjbVL&WCYg#&s|*%(x21Q7{h7xCX{CFm8cwbG`xcP0lwwzTNRH&Nn!| zz46VBZ)Luz@Qutj5x#}+ZOpgcH{Q41Hd(gCvJIAPsBAlBTb^y8Z2M%JC)>8zro}ck z+oaeQ#kMuul-QQUHYB#GwxzbAwtcY8gKc8AU9io9Z53QwL(Ku9>;U!nGBymAOX3wGpm~aBYBV0$dAV8ZFainI_9LRi>pf4V7tTOeKQPrcp6%ifK|zlT9O<7Mm6{Z8c3btu(D?+6T`pc~;3YN}e_HjFD%HJR{`U zAkPGO2E?-;p84?XhG#ZBtKpdo&suoK!m|dRG4O1GX9YYX;Mo8d*>X`W7twMN92dQD zksBAGanTtUnQ_q<7kP0}7Z>#|;$5`6Xm(N2MY4;8E@E92v<#GGpDgoa879jvS!T(y zN0vFVtdV6(EK6b;63c{G7Q`|jmgTSvhh;Y`yI`3G%PLs*z%mDxHL&b}WdKI3|K)A~hyTVvLl7CM8YvLah|Iqv^ z;vW(J&iv!y-wyva_@}|Y4E}xjC&9l6{(1SAz`rgZ`SDR7AMx=KoR8}Gh>nlme8k2_ zZazZcqZ2+d;iIIFkUsi-l=}$x(QThA`&!w@%04{%O4&!szB~Ik*=J`T7W=N)XT?4> z`<~e6#J)89jM#@}-)Y~^KGVK1`#9LQ!9FniCfN67BSbbjWFtd1lC#kt8~L$OoQ>?* z2+l?_Y!t&rFl( zG2XR#x5Yax-m!U?dxv{>dl!2*^bYn;^)B@eweHS3PS)93N6ETL*40_J$T~&V(OH*f zoe}HMtn*=A59`jXt6?1t>&&bJvu=WQU)C+KPJwk^)(x;ufOP?!i*wG7b8ybNajuPX NZ_b%+ZY)bL>L$t<^gGqt@?ix1bCP7FocMPsx!oA?mT=}Rzuyev3u&% z_dh)~gJaijKPadrty)^&dGqmGTW$Big{!x+>hMcu7T2D={?Jg@+I8yul|PyFcw^HG z51ziNs;_U^IeG5e?~De4(WzVapOiFI)~#)y`0({tS0n$(m7DkS8jC9yR<~Zg|I*UL z*>~>pjf|$;qN(|nCokXCH8XddID6^W|D5KL$+>%vUsSZvHf`;n`SSB$U(4{sjk}ME zTFGlxHuv6r{?XCeJ9O##ot!qpvbp7r7jHi`wYB%0xp?hgc00ku?9!v>Z)(~b+xAXh z`0+QZLny>!j&P4TK|JOR_m~UBW3F(Ixj{YV4)d4?%wwKVk9om8<_+y%(v&-(}x4X@V$Zx*T17w}rzSC{* Q8j-`wMqBH>?LV*s0OVi3*8l(j diff --git a/fixture/20/5/1.3.0 b/fixture/20/5/1.3.0 deleted file mode 100644 index a2e579ce26b77af2d76b2a7687f9a8d6ca121249..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 757 zcmVC0h9y~4gdfU4gdi40ssIM0002p0ssIgwJ-f(5DlFc0M`1bHGp#M00aUA z2oNAZfB*pk1PBlyK!5-N0t5&UAfQB1GAzroq^W7!*0iRkt*LwLxIlnF{sWi*ngE;t zd*11Jmj}kX7w=r`1iTCI4uF=evTTLL%D2_mRy{)N2CWm6V;zolH*^cuDOi_a%vEBp z3R9(a)$gjbMpH4GYLSqsf=mU3io#SArjo$ssWnfjnF{d~h^IbSI#=l&r6uQ-oJ*3! zor^lRq0UltmYO4_VyP5Mq4+e;)3{Dl#!VR~1(!9BHxA3sQGJf$Git_BGLDK7mz>;d za*s)m>fY479G<=I>}99LUM%)n;q|UicY(TcmzBG!bamaub=Ov)xd6@e$HrVW=Ayxc zV6Fpm836Ovm$$rZoxNrDR+*x;60L>k$67wt>Y){3EeLBpNOKmNv(99BIIF{1945_^ zG%pg8c}M0M;c1wcVIBrKW5pQ@4%UgWOpH~6(^s3m(iG(@C|^Cf+I?m9l`dy$&O&?! z;y|~cx*tI#kefCMQm&_vBkm;9cJjTLOc%JahQ%U2ZuE{jDeZK z$_z#Z%x$o?!Pp?}d$i9{k$pt=4e?#rXJKE3H-EYLtIZdQzefBeBGWxg_W~Ru%mKn2 z9;EpV&2MMkJp9(-Hx83_leAkTB)c8i&4`C0h9y~4gdfU4gdi40ssIM0002p0ssIgwJ-f(5DlFc0M`1bG;nh50IB$X z-|zc+K>vpa21DXJt0Gt4S z*7CDf9~x`TSWAXhfwc&%HNeYRUC!c?MRr#2tT9Am9U99}kFj`+wFAq+SPjNvaONvB zUzNFP`|9_VFVa?!wt8e_t0G$wvAVF8g{>;Ux$4bTZmLRLMdGRvn#N@shv~^UC*zvn zs>b2QQRSHm&s2A)TujwsDi))j67^IlEKgl|%E}b)sjjDXf|d%j6rfT!ma?%_jhmzl z7G*FfU|EA<4R*!nuRVY1iSZYVzg~O^_$$C)0JiL#*?m@WsK+R)m z9z!$s;V}=7y-`|h(qfX5EQVyUBVrmB%di-RIfuzPEKV4T!%iG#qSIiU1`8f2>|w$l zB((XB&2MYoMEn-wHxQS0v$R_!Cc7=!O^KV-ZX9;oVCS|vx6x@^ahr7S=6 z|ET<%^3(m>`j`8&&(FR-H1^HdC&O1@AAx-XyxgJ79k_HLyF>2|V~A!uG@GFwv+e_G;nh50IB$X z-|zcQGJVj#DIZfv>K{@y2oRi${ z9Mw6Uo^g4`;dwF6#kdx@0OJ6Rn`J31OI?xjE!A79YtT`GjtVpi$5A(qvJsr7?31!D ziVthw)jq2}_xRk~Bja9-doX$g?hUvnfXiK5?$Xi~b{E)PUxwx?G#8;AbLp5X2Uml+ z7|gYx%v)sM8near*4kTYkLEd=*NDixA@hXrEX=DgkAj=E+N{M!Ys6Y2)(U~?tV?HE zYH}8nvzBD#&cdA~b!RL)W7WZ8G1iK)RD}8p)K{OZd{yNuDpy-y@xIauW^c?)Y*u13 z5}Pj5bXg`Smp!@6N!ji)s>^hGCd)G!o)?q3n5+d^fXM(%Hp^pJ9=kH;d#v|Z*Pwrb z{sqGE@5VnHKL!61{6ipSu`r8)5kp(-Y%w!Nhb=lxQINxc9QGq@!eJ5)i?C)eHG`!A z12Nc#!8}y@i_%|{l>C+CFCniZa3U+Qg?2%b6cG@7PqaqO+~2LK+X2a%4}6; zqr$c|8*er(K(GCI%}h9dyu0&_M?oNjSywgC9WEaSi|g diff --git a/fixture/20/5/1.3.3 b/fixture/20/5/1.3.3 deleted file mode 100644 index a5fd4ca10d1f4492868805076ea3cfbdca70cc7c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 230 zcmZQ#oWdf&$G`x>&lngOM1c4l5NmAJ|C=DdtIEc3g`-%V;kvcPd}ht`)B0MIzej8D zSJr8N?XSC8T(A1Rv;J~!gZ#_phO_@C8y(j+?*ANavRmG?`MI~*dSUbO+x8ZV*)6lr z8(U5P9dCVD-KP6}uBH7mb6O8Vnzp0l#5Zpa1{> diff --git a/fixture/20/6/.zarray b/fixture/20/6/.zarray deleted file mode 100644 index 99654feecb..0000000000 --- a/fixture/20/6/.zarray +++ /dev/null @@ -1,23 +0,0 @@ -{ - "chunks": [ - 100, - 3, - 3 - ], - "compressor": { - "clevel": 1, - "cname": "lz4", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "|+mtu;$xme9;dnkA>TmUGf_a$0iIEF79O4$YEdjgw>Hq**vLOAd{b zW8tJ(I5Z1~X33#(ax9!Q3x{U)^nE;@e}5m}$LsO=yuWqp#0E8N5D457#jU3fbqVE8 z>J!dAG$D;)3@3wE8ABEmn8;{OGT6dawo%IG?52!^loNGZAkd1|#PAU9 ziQ_TinZ|Txkjp#FA&*7mbCz?QrvR_k;80qso*##sN`GDQpF{zNeI@CZY1(7y-1=T z$rP}Hl@#(3>nLI~#f03U9ifELm~bL!K_uynCWA~~CyOaeC7Uu1QqD2Hrh?O)p^~@| z?TDuB|cwF^ItoVGV03WD}oI#CCSDlUw&t+A{_UHb4meHqB_7(^Pw7|upM zW)s`^C)?T09`@4I_%@?Ct+}5zwC7QR;VRvY z{L}QHH@~G1{TaYOR`DUL*}y;8$QHJ;jr#X$M+2I0FHMP}6|EV=Ym8+ge`FHVn9d9i z@fC+T!GAc(SjW^T_8t7E{0qR&tpu zT&0$u2;66Ts7EOM8NfhNd6_iQ8BGRT*vd9a`JCO9agcJNBDAA5F+4D59zdXvnH^e2U(q_U3nY@nD=*+L1sC?z~nJDLzl zOQMLT9Wi7vfr(`ECez4aHn~)AoD)>?EoZ6X64fNM)Q)Z>@+`ebq94f=u!5Bo@)7GO zVl%~rL}^DTVKgS32wD(HI-|)Tlh?^&3RB6ZjDwVOjIXKSG-s$Ju9bGg)0HPlpeN6e zNFIyGXE`5Gz#7(4NDVisCAhW!A3|tIBf>~wD5;F%719~Ucrqzr7o{BF3(7dcQH~LP zzjnkB%cI26g(v7r4ztN+A@7pMQkJotDlSpY4SuACpf)y?V3O!ZGDCQY6h<(TQ53P6 zVs`QwCG2BA2Z(q;J0fYzgGAGjM~Gz#Q^{r)ZDRhU#*#in2|Iya#~B0){;ZZImg0Dvv6n@jx|dTjgw=IlV;)2EIBkz zj)jwE;m|A`nk9$E$+2+KEF48`x=43qjVsSr|3*P&(ebhEMyUR zyvK6#S;JbcbAy}Is2K>7K-?k z9Tc;V62ir7N;4v9MHJCIObi)JWg3~hNfz16Cx>!QbA}4OIaX9F84pqfCib_7$G5E{{#Fp?Qb3aPw88k3mJ6pGnL z2}k&XQciG^Q$*L*ju<-e1hI6b8{NrfJ~=GqU2<8;Dppg;HLAG7k5m&B5(wN)Fo_Hz zi4nX+GGiG>DuryPh`oG9F^4$J5hChoMl!M$ZqzqmqrF)W5Q^`14Pi4U(k+8OlAr* z_!Bdk%UjIj1SdJgIsVIeuJ9dK>1xn+qdUF$4ZRt_^9*Dqt60r?{>}!r@-f>8YM>p# zG~`|y(VY8f!C1zT%0&LaB&PEQGdRRyj&PiRbAq#c!#O%O)Q(Pc;g@u!C%>i_i&?@_ zR`6F=vW^c~&n<3qhdTEL0(TKaeHze^VGL&kWB5H|nZRpIWH)=*%R&CdA&&7C$7$P0 zJKE8K$LL6Bo~8?Pd5d{0;x8;_Iq$QAD}2XQZt{O_QLC{LP8|mDJOdfZ?-<4?Ml*)3 ze9Sg>@lSTMp937Ed6;&zpbZbwmi9bK2d47|Gnm7lnactevWT;M!#OVVKdx|{8{DL4 z6Yc0le}2mV1~Y`Atm8x0vxR@Kl^yJ47xgU-4QNO+?xQ(Tw5AOcc#Vln1;W9nlALI7k^^a*T3LbB6Z!X-5a*c#6)%^DI4Bz(N+0$9pU%pEa!I zIybmUjpp_rY7s&`LK(~uhLXa|j3SNkq_cya?4pFv*-t4)DI@BB?Px;`kIJ;iq;|y7oo9%r zH+@JTmu2L!nh(fl0~;xznm{Y<2&OI}G@>zKBr}o}Qh9|mCNY^Q6tj;Kj_?JgoZuv< zh>p^Z7&`F;v2>*y-N|M?IV|N}a#_hLR#V9}s<^|CR1?(NA4)KZ3?hjUyhJi%8AmFG zY^R95d`2;cILr|u+Gs~4?Rbc2I`TN3n8_?MnaA5?vzR3;X5r8*9GWGE#>ug8(kvXk{qX(^KF{lUo}e0W!C|3+z&*{W6$k`=LT$o$fCfbH zFijcF7{-#pt4t!3>CE6Dhd4|*Uvh#9&T@{p8i7DN+LOT3bRm%*^kfl>SwcSVv62GT zv7VdU;x@rSfj~`a5lVf+7{XA7k;co6CY_07u#4U7p_I=#NEyc|C;GlXpcSo&E88gMQ+83p z0ZNIesU1y;q9xJ9@C31BGMyP@^Col1VIjFxaF%ma@-3IC;s(_u8lWEZB#GzfM>2y+ zp@4O)r;v}>LJ>PDCe#qtCya2K5J4m@h$5YdWH61_$s~)}WK+g*$~nW=RB(|?R1$C4 zJCi_no+Xh!^d*TrR*=tHKA?b2Y^IQ)`?VvO5b6<1BN`Jv#ghovlIITyJ^CD-|$ zD(-NXKt1i~OA>>4fnSvztFIm1>CJEG!$6*A5UW|kS~l``HnE+L*+CtPyDlL# zEL4H|1lLz?mk&4{KIt(nAYOlAguWF~W%%REl- z6(>2ze>l%&u5gtu;o8xap8Sel^rJrmSjqdWVm*Ij16$b2HflA|j@pFr01b%XVVW|U zF^naHSD8d6)0x3R4sn=rzT^ZIoaG#Go~m}VCxNHwLLxor$s!iBgnZs(B?YWwJvX_< zZGt^%HK|1?^$BALLm5UIFEg5SCX&G}cC&|4KIb519H*RU4`3@=6U&pt5l?3ln8RG= zk;^+QB99g1bD1k#rJ5hONzfy1D8cljKLbeNMTU^dDAL%%R<=>hr|hDH1C$ccOgowq zMN6WI;R#~NWI8j*=1u01!$NYY;4J5;AoHKk)1sAzQCGn4GM*`h>mPGo{ zmn8C7K|X8wfC4tLnL>ghwIi4i>JdsK8WT<`qex={uaM3ZrZSBZ4p7Q5zMza#oaPKM zEwm$+PCP|C-FSxX}4PO`4R1X0agG2 diff --git a/fixture/20/6/0.0.3 b/fixture/20/6/0.0.3 deleted file mode 100644 index eed4f36b1280defa0955ef6669e9bf0a9a3f40c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1270 zcma*nab%Qn9LMqR?RHzY-R*X5w{O>Vt!v%QWa?xxIhjmNCaFm>HOZVklbM-hW|B-M zNis>2BuUbeNoq2gHJQwuRA!Q7GN~j}@2_J2Y**vu@wwmkd3>IS?{Zw5bBl{a%(q;T zQW1H}HQ^PPl$8`pr9^$A?dV1yCh;2YeUhkg`9-%A8Ze0KxPt`_^K=_Ll;Kz z5*Y@n+($&W5K z`v}djVh=IGK`?)pXoL`l73)PmD5kTRV<@%45#34H3oCXT58z3;HNZ6t>(z-K&`YN= z!_CQpA<=DwdRVWkxCQIwE~LDlV7*$=0ea~;rWlC(c=b($T3D|OxB`0VcLrR32CUa1 z9E0_Gf-w%mMLfQOPz@_~2IoOB{e-U!)dw8WJ%od>V)yZw1Aj5!MhP)kv6DCris?Ii zWT-aii0&XX!iwF*Jq}zexx0`Mf)zWC6QG#B!W&d)@^mZLI#{pExB+_UFCOA8eetK) zQGRF#z4SRIIgEvPyN!ezSg&&!1ikbN78q(Qb3_jiT0k#-gi#JcO22YK6|C1O48VGQ zz&t~Bs~pkYgeF+AyBOxc8|Lm3LKIf48-1YI;##bBvaCgf_CcntSR1-PvBkAmlYK4J zlP*|(E7lJE$5xa(%WTCu-Y!CyIILGM`jNpb=J*7un6u_6o6PU&f pcPA^HENiiWZ{ugO8LjBR2*xqR-SPOp)*`X=T09*~@UQLf-*3`E(?*oHA`Y<#LQ?hYdMF;Ij1G(w3Zy2C5Ohzv2fBX9GZngv*ge?ITlWu zg+sIC&^S33PMU>7vv6pZ9Gaba-Ve{e;PrTX-k+da@xfuCfx!Po(=reUJV-QgJVIOI z=|pE{GK<;d@Fok$Whu+Jz(p=m#kX9inmg3cyH+6Z41Gx9IR=u-Fov^{O>Cx!kJv#m zd)Y^LP$1BV#zfJAmc;TfaZF+|Q^?|VW|GZ(ayZE;PE*0xT%eL`RMGvOK;Q{_ki^sU zCYb@Gu$ncjrGO9ENFm!PA|yBvxR*MF(U5Q=X-*Vl7|S>^c$G-6aCD*9pHs4cCP@O>F z9)d|`04WUR1yUKsXwoQTJ4Ni_6N)*=Axen6PdlQBp$)OLrvn|yWCmHx;|;P|#A23E z&Uq@h$~RPUlUv*-v95L`(Vu5YW-vn-NhG&$S!uXhtT`ABa8@|5lJgr6GJ)^ z$zVFKk;xq9GLKSDP{ujFqMXZI;VRwgX-5LRc#=f=(vSYEWEFX==Y8_o!dA8sR9`!S zsYiW6X+l#X7{zGPn7}KfGnHvf=OBkD;W%GV${Egbj`nV12RhP~U(k)7{EA*IVlhit z!CzR(I^JVFH@U@a0u2IzS_Dy-`>DrZhA@x47@&kc}+R=~x4B|HoW&|%Xk}Ygy z8$0@(L$snTKcO8{nZ|Tx^GD{efVWu48P0N!OZHrY{3|o+vpL4+ z9OpFu<_s6O$R)ZwpdDT5!7u4aZ=RtK%XxAiYI86m#bAd{(QAPJC?dU-gPt%)Z29UyP*07cW zK42q-Y^R8j7TQsVFd7n0B+ZFp3}YEb2Cp)SOlFY9VUBQ=GQQ*_<(#L2cu#F7I+MWT zbSIIgNFtY|EF+J1Sxr70D4?1<)DY}Ru1yG`G$4#rhB2HpUSbUCOeBM1_Og!>KI1T@ zoS=-@XzhrjBaaeKHy$H_Z03{065b}4m8>F&jt$E#)lNLi{0!YG)6nZh@csfw4yaJq%)BWrt=z^%waC`DCGoY zoZ~CXxy%)=(yfhlB+!c|Nu)3R=+8=4k;i)8C!Z~BWg9`U+7V1W>Jv&6ni9b%Mw7+_ YULl>SOk+9+IYbG^`GQi;aF%oY2=mti2><{9 diff --git a/fixture/20/6/0.1.1 b/fixture/20/6/0.1.1 deleted file mode 100644 index e75936d98284802af6dd239901c7c724af3d1ab7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmG|9=kz9>DRBFKdm&%!rv8p|zYt%Q>edC#@xiX33#(ax9!Q3&)y;L$f3_PL73> zX5r8*IW$g=g_CCC&@3F9C5Og6?}z6f@P51=kN5jsvqq=d4ek#F{;ws`fk2=Yt%>Ck z+7riPbYUj5m`x6Evw&QdvWyE{XAW|642sW~b%@pw| zJ1Ax^`v|WY2sEY%QM4qQ7#=2;$xLA?S-iapK8lJ~=GmJ#tyeD)Ol08dcopC#ng$ zHxRgoV3HU}GQ)U@6hLqaBG1;CYf5!cc~h&jt$E#wQfAi{0!Yw61o95kYezX+vAu zk;Wv_$>0q#nZsP>QOXI*ILEh?bD1k#rTcx_5l?TPCV_tRX8%feoN=*=P) zvxF7=g_W%1L)LSXTim8r{Xn2LLDb`Z>NA9)3}Y0(XEYOdm5J z4SI!XM{oM_8~QPb7a7btK4d+c`5RlOk*aq zn9VW1;y9=IH)puOMJ~}bTsykalV8(|K0HTXmh(OnHr6?qZW0!k9vgD zm?n(mWkxZcKQMvGOkpbf`GNx+WpOE>Oues_4;NJ9?7Hv-BZ}fh4n< zHLRt8kJ(5e+bJR>QakDrMkB(BqyGlEoJVJvA(BAsIPvX2tJ z%qNE>yhko8Sw$X|T%(HH{6sZD(QYWgBr%X=hVc?9 zjAjg}6tbNn_V78y9OMusM7Gk7DBAH5F?6I8oylYdSqKJiU3E1p3jR0jy*dd93Fn^4Y>xwh`1;JA$cC143y^Ga?wx c7*d(YYosxa>11$_LzHlwuPNmWXF13J09I)Sn*aa+ diff --git a/fixture/20/6/0.1.2 b/fixture/20/6/0.1.2 deleted file mode 100644 index 3ca9bb52d20c611f69bb19647f9749748cdfdbf0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmFad^lD8o=>iOJYW1X2i^llh$$$E$5t;oYPu3G)oSRlVjneSvWKchi1v4adIr2 zGz*7j$)Ry_ESxk8hi2i>EIFyqJjlC8MH3qYL2KF&&m*)a zfyd~=Z00bRT;606c`RoIm$=Los`-|i1a}!i=tE!nk;-!nCXEq{WHVdXN--a?ixT#8 zfXI*_XhKtBXhke>JWM=On94M=d7athu#j9%bB422@->&J;s(|9xH|}*q$kNdLmyHY zL@H}p$9f9+fXx)KlVZYZ1VLTu5kX@jiKZnnjAJ|#$mCU~ki{&rIm$7PQ_h#1rhN^C$^)=W!CrVIjFJ<8AU-%^LEl;s({+;Rk|RhT4RZ!XQ!^ z&I_b5hOwkm#7>IY$0w9ktDqL3YYND+J3%Ra*EXh#H5v>=+cJV-k-m`o-!d5tXQ zF`osLaf)&-@D&wY%N9;O|M^yVp&=+6KKvYIvIvyt~HU>n=nL9M#l5lRCZ5>7Lk z6U7+DlFlSvA%p46U?zt-LMbQtf-=r=o(ptz6FbqFZv2w&^y1g_W(iAK#wz~GYBum5 z8@bK*+@a=uL2xg%sL%a0U?{^F&S-wm7$)*Eli0&v_Hl@RahMZ)&Pm$V*N%2{;AeEC zE5D!{^O(;97V~G8u#$IJ#Z|6xom>2m+tj!}2=1XK{TaYOhVWa4GKv=&%{I2PgWddt zJsjjy4$-WEc0|#JpU{@}{FDw%X9hEw%b%FXBHm&#=Qz&=uJB*3a+B}4MX!e1(VKq! zhW-rZd4{lo_t?l*{?0ab@iDupAFdq@XvzaLBbL^*VInUxiD~?i>C9#hb2-81oa8M3 z;T)H^%oVye(vEKQ-ifS*vuBTQX@h;YEqB;s81wKXv!#FWHb}_ z0~49TRHku|PdUVK{>=$abB43DZ>$|1=)%wGN)Mi-CyRKC#jM~jtYj_gSkFzq;}#*2 zL2x%U2%|3b7|inwVI;p}6yq4r1a|Q;yE(u=Iml6tah%vD+R=u19-%!6JVqC0Gl#k4 z@+OPOV>v6h#AU8f&9~enXzGU&LLd6lk5ryxFlmfnB%9g7R*LzEU6in&14KTc9ZiX$ z6|uzeF!4-bD$~g3b!L;pLUK9H8O~D4*Ic5C8&uPynRfIfnP=!j3WG>xE$diMAs?`r zB6dDM+A+DB$}4QFplv|Ad^>_LKd^g<|xNFPB~w4nhGvbNkWu%bRm%^=s^-s zlT04VSwTMUvX%liQAp52J3}4O}vDy(q6fKCREf3O;3?`GwOkN|4dCX@4Wt^g%3w%Wd zSGmS@y0_MjM0)cSN%Us`16j=)^4ZAy6tIo$?4VW~?Fgj-4GE_i&52?RV@YQcuaLoX UW-yb(9HEqxd_fuKIL`%s1edM@HUIzs diff --git a/fixture/20/6/0.1.3 b/fixture/20/6/0.1.3 deleted file mode 100644 index b2c08020c9d4fce20b6c97f1970f61bc8cd99259..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1273 zcmaLXe`M5g7{Kx8c3W%RZnw1`PwQIOb?r`Ou9=*iOeQOnla*wql9}|$%p@~2nVFf) z%p{XZl1!3hlB^_|Nis7@W+s!#B$G+j`%&CqSG?re=lRb2^T%gdo&(v11tMl6Tckup zmiRv!6OWYD_#`IDe9<9{VFGK|$d{mWXh9pM@CvgW2D46yRx-rVi$RQH8Q<`e zr3SAPJ;~6FNj$|g2Z8KkqU8(`bfO0XSilliSxV+O(W4A0Sl9zR;lR6vul)=P3+q5P z2-A5ivJ}sCqK6pjVPUs%9~S1>O1+Q4!dlS|!gL06EJX6oi&irvU|}N|2VuI7O_oyI zoakAGiy%y2;4KI0BQVTR1qlgxI z`UPt&)s;HY;|xu(u!or9Ah(RKix`5iu&d|=VfqovEY+$LJH`cG zb_tyzOy6OFg;+UvuVF~S!p1NG!u~#toh(>b3)(HhT z9A8KFa`$S61T1U><5Du_9!8X}OW6#;!n)80 P!t7zZT#546?ccvY;J?!! diff --git a/fixture/20/6/0.2.0 b/fixture/20/6/0.2.0 deleted file mode 100644 index 8890383d89e45f88dfee79f2cb2f4f80ce3138ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmG@qfq#9>?*owZzOUF|(wZaZYQ=No&bTYdMF;$+2(}3x{UOp>c97oHPrEX5r8* zIW$g=g_CCC&@4GLPL73>X5plJ-Ve{e;PrTX-k*?K?L#9Yg5dvR=@bOP6Lcn(XXr*6 zz3IbpRNB8jFcF^pg&qsZbF#*@tyrm~*{9HfFTI7TI> zIYYaUAb6PeB=b0(Na1NxS-?UTQNX(_r;xQ2ah)67B;<}DxPwr_sZRt08N^^Rc!?2Y zGL9^EvWwl6@frIm=O`7#hXz3_T9e2_v?GbfNM;tZnL{3Lvw(b-QNTqmahV#v=Q_ci zK~Rej(&$Ye(s`bN3}ZMM6tkHvl<+Y-DPJ~MUF}GwD^HO^4|>vze3ntb8s4Lj4Qyl+L6~-g z5Jo-1X+&e97{+ii7|Y9KGLcD4rj&h@afHt)=L9D?MM6F8NTef=l0+Atq$|11Cy%AP zLq4lm%^Iq?Mh&<5ksv$>>QI+3`ZIuZhVmlA7|j^QvYj22u$NCMp(8VxNe=UPi(D47gr%J40##h)JF2?4?ly?TDf|_Y+GSeo9*=GKtAd=MT(aE^jiA6P)A}=lD10xx%+xrAwrCbfr7L zqz8R@j()6SHEUSUU)aD_K4Ken?$(Yl8gdVfXvTdsXEb9N%LIPMM5ggN(>cUpj&PiR za)Psb%{e+W)Q*mH=I3;w8^54Ci&?@_R`O?7v5pT|&n$Rf`2HRrg@f4IVRZg7)sO|+vsefSl98OR_8vyKl~&ldj5R(7(B-9$!f zM?+$`muAG%iq?$hRVFZ%-!qL_%w`V9_>$wC;a{BPBA2*Kr>5G`nN*&k8)@{W56fA> zN{V=&briFiEriBsM{Oc#KqS#LC591D0v57}0^Vggg{-BB>)hZbAn7pdkN zHKfF9M=HH|mNfb^fOHC3OA(v+kYcv8gA&49Xh#H5G$EQ;S`bGjJ~MoOUGBm8VFd2R-RUKFcUz4ewFN1~#&Zprv+%5Jo-1X+&e97{+ii z7|Y9KGLcD4rj&h@afHt)=L9D?MMAuGB+`*bNumo+(v@81lgCotA)i&OW)0O`qlVl3 zNYKjmP?s?JGk|o4@*=|+%^1eAogI|0mrp6>5QjNJY-{a^qb(1RKnEV7BQuyu4)b`6 WTo$v0rJUyiRb1ses=37v+~$9JqyX&z diff --git a/fixture/20/6/0.2.1 b/fixture/20/6/0.2.1 deleted file mode 100644 index 89196f63c0cac49b88a25994efe4c371d0d5d4e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmG@qZ5l8o=={vzC|{iJ66FmUCJoC#@wXttE%X$+2+KEF798hh`_o!b!6bnuSBN z^&Y!C?iUn{x=0)Z##PBPEZ zixm1ZfR(IbHAQ^H28!9rHbQCz0(VoFa2nB=NLmoZXvQ#>OkQIWSxhIJgB;>86@1MJ zDmlwJI@JyY9-%XdJV7^-c!p#av6v+k@Bu3+WIaXP zVKf;`B$M6jVJ~HT$wA6FP6e@b0)Yo&75nK3#Vs^5N62k7*j&LGqP9)K^ zA%+YllF2mQAd6YdCWms4Q^6U&rIL$WqKbsN+L1_4o+gPt^rav9te}8(d`Kai*vuA! z?$M55LTNx4O=(61X^bMB3A{=MQ<%y$N;yCo$M}kJPH~zu#MRS|c)Ic!3H0D8dXmdR z@>tIMQSFi1~Y_IM)ERgjAJ|#*vT$R*v}V~a)hHCBRW(& zV(7p_#LB=l-lfweuA(y2rV>uVOL>1Thk!tP`unTL^hraY<5WitCBY26CY+^H8 z*ume}$v!@3KTX55BZAgEKs4?7IUSh7RHiYLKQfE?yv+hmahfw+;NM*28b5HI9u2jl zC%yRit~KO1-dlSj;?g)m-L_)&(WKuEMqyV`7>+Sz{hOl4uQtnQI~tDM`P}#2_txk zk&NYcjAIh7GnsvS&VCN_4~}qxZ#YT&d$pqjo%sb_=*E+DXFhMUfF-=gQdY8x)m-BT zu5+9Ja)*#6ZYXsb$O{Z&IKO2CqZz|kw(}`F*vsG9$3YHpn3nfxM{C;gGuqRMN9fE9 z-ee~8_!IM4#A23kp6|H8RsO>@ZgPv;^lGXdy&1r-8OTtEF`Ny2%tp5HSGKd8J?y1% zGwoMcm{Tw+U|H{~?4h8WPS>hB2IUUSTvDOeB-t>|rlu ze91w|IZg$!QQFatcskOF1Rf`mIm~4qdA!Ra@>xLvm$|}KYWRtp1l{k35=;vH89*v8 zGL$q%kxnsN*+vPUv71s3P)1}+?T8|Vw!{+0!^D%tbh63eE#{ERLh`8OEa#}=doEMW z4Qfbgr5(xi<9SjT%n(v3WIaV};S-A4$u3F=YposOM9`c_qG>}68B8RTX}m!evzSc| zK>oe&n-)0@m>%g=}IoTL_BQj$lG*Kp0JFMg(b$ zBAp4mN(NJy$}~zjKpDsQigHeInlr?;(T;e!@)!y9;3;~N%R=&4&imxEhPAAtnj6$m zE5`pr5cQ}}D1#Y7DkFKBG{!NW3G8GSCG6)5N;$$&juG8fJ7Va-L&VXAN9oEeW|PAL Z-XWKzEMqwrxkMG$`H^bw5QsGg{{tH#0fPVl diff --git a/fixture/20/6/0.2.2 b/fixture/20/6/0.2.2 deleted file mode 100644 index d4f7a021500790538261c62ab82ae693d82c4641..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmGaeT;i9>?*YwbqQ78Ht%C&60CkOHNu#PFj*fb*M`;jc80PEr??jqZvasuQ8DvrZSBK9OMv{e9dvH zIKx>wgapAObR>l*=t3&b(1V36Vljn$zzT|3M=>|KMQ~dXgj1UcqG(7oLm0|1GI@nj zWHEtkcCnj1l=CGAsNfitvGmnu>H=d$9c`P8G zWxP)Tt69TZs<}Z8p>>1ccEYGfeIgmeVA2`E%VaQ?ag1jNJ1J!!Ur@$jj&PLtJGCQ$ z_B==;op_Ya%w!h1%;#P5Si(}4ae<3m;yOQ4O^7>q8=>^34}BTPZy3aIUSb3r*~DhH z^LKWzm(STp)B4&GLo4nho_744_Do_jQ<%XYnaMofVLm50#c9s-A1-i>AGl7pNbTrO zFMdUD2Jiv{SOvVp&{k!^g+cIq|Ij!2qtH%)2Dy|iL1;~38y{GLfn=PhP%m?IqJ z1pneB=lG8Ebc)iB&UEFMbfYKF(TgQ4Wf`mZ3#(br$7~>^p>~8)mpiFPWA35}!+D7j zjNx~TWg@Th27CFOeH`MS9OgLRaDsNx+R>hl{DMw&;YqqOk9U~QV*bn$R z6Eq5fT7(cm9qKZG7Z}Jee#>x1F`6-K<5RY?hkvk_103WKEgNe`E86li+R=eW=*V>5 zVg_^h6Z2TeA{KLw?>Ns@{>wFPa*N{M#M7DtvY0?NQ+SgcW-^Oh zDmX?Zr}>sDE^v`cB)8O#6uR>?sr04~eJNl$g{}^Z diff --git a/fixture/20/6/0.2.3 b/fixture/20/6/0.2.3 deleted file mode 100644 index 5a0ec97d73a76af4db08b1e1642b6636be2e6b91..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1273 zcmajfab(nC7{Kx8wq0x8Zriopp4PgqYwgaNshN|>WO8aUlS(G3nMwA}OlD@1nMpD; z$xJ4bWRhf(WGb1=WG0iD$z*1d%uFUz--p;=SMiZ&@AI4Q`^UR9&ykG091*jbAyObB zOZ*>Aibo3TeG-#~Y|$}{VxC!kW+u!t?`XIgIQ$CtAjkfQ1cU z7=-CMwpgmmbfOm+8bO%8#v}*9odkv%;;^t@^n);6!5T|7Sx)pc!+BWPGrZy;YZqVV zGlXDao#+8!x`1Vt5<;dMF#Ul|mTL2y=vjtKu&|eS$3Y;#^@;cAL zVd+8Y{R|d%740BQr!mJuX(4w%%uodjyNeMJraxKmC<_*L5zQb>-(ZTvqCNcj5{7bE z*ewi#F#U!NmXbwIw4R{>7B-Fv4*Yw$o-#yWVcqBhVcQR**rj+F`}o?&W)K$EhAt4c z{V+oAKM!L+*R!xDw1TkhhY{v_W&9uk3md>NRf;@VPU=K$0Am+ z#@96mU5bZM!q@q1g(qI4p diff --git a/fixture/20/6/0.3.0 b/fixture/20/6/0.3.0 deleted file mode 100644 index 43120a81e554b4f8777ab662632db97adea092e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1636 zcmXw(e~8R^9EaaKGdnvwJG(nOJ3BkOvpYLGJG*;1p>w$nAt!gSp*uMtE;e@|ba$a$ z-3cMYhMbVQ(30OFgwPQ}cR~oEBZLsT6W6Xsug`t!>HE~v>yK~Edpud)eBlDK4>bAB zqGr~}yWKkq8x3PXPp)k^h8>3!3Cs3aTRO|E1~OSu3wdnk0(>DX%2I!fKY^#N5Am1{ zg<&4`z4$)Z@6kTyH7kXw15m+2=68r{0EdfvrC6gV|-+3J_@%W%c@9(?&{(BM~Ji?4(Ae(l@w4zL{c@i}G_peaZFG=2uoy51>Z_Azu0 zs4v5pLu)|$m-s7q?fQsZvu|KzuKHp82prATJ{dHtf>cm_Ey`NuE4g?C+ zC*hM}N`d;H_+N0*^-l`Ty2Gc1>Z|czVNId-Z}E5VpX+18X5--7u=*ePNjMeOzOKlu z9zH5kUyLt-rA6w`@aOQt^{*mkU&HW-`T_hP9ExaPUTl_tiemMj@n2w0vHIWmJ-F}s zK+fk2_%f=#4c`tsqS_afm=!{}MEwW+NBF5k{VIM9uDjkdX4VUO$J96AzrpV@?LDPt zUhtKwPsG22?@QIs;pgFk>s{hzU7=fCeFeS}R>igd2mcq|xIU_k{{R?MrhW`R4kyaA zPnUE5kSSMRfG>na=`+#_zyg z*ZWs;|1h9Zy$#<2TPw8>Cb@qICDo_m(_ng1{Stl|uDIT#iu;F#D)qJaI#^$&eLLO( zmQo*|;{Ks2rG6Se17}_DRL%WE=W6w3_;P5i*8U~_3SPTDBF+87$h7)l{0JOPYoDy) z{vlPPJ{O+{^J~-};1A)E>w_}fKQw04cj3EXPe%JlE%y)6TJ;(DOqf-xegnS=w_NX2 c$NfXUI`xhCCfHo3y}zFOhd_P8TJ8VmJy3l7X#fBK diff --git a/fixture/20/6/0.3.1 b/fixture/20/6/0.3.1 deleted file mode 100644 index 8220969fe2a7314565d3a29d0994bb5bed50668f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1636 zcmXw(eQeBk9LGO**K2#dUawEr>-G9{ypX`Mr`?-hcd(`<&O)H7z&C>^B3N71f2Up>%VU0)q3H}tGxnA!z z8wSI@>PPWo(CXDb<})jSxKDi{z6cik)NkW=;I8Za)651yU7C6`z8?;xX&>^NbNuR8@oRA1^&WhAPw17dz7ubNUFq6;GR(Z-%TS+)PlCxA>KE{faLM&fnP#1# zOQ!lpd=qTW)czIz8s4}*DquDm#st*c@OC&E&_0o6Rt3o{_2u{qSed2%5Pt-ZT^|xO z`wE5z)m!kxa3rXGB-^YAqS@;6@cFPHTfGCn3AbGD6Ef=y{X*)EcoXakX&>NMlm)>Y z_38Kwn3 zu7An-41__2>Idj3-|*J;u`&JwU|dZ76n+}c z#I#S9aQ{$SqP_}W4Qoo&pWsj7nd|j&?jMH7)sN!Gpf#?2td#qQc&YkAd=V@zRlkkj zfxE8vFXR59u1vid-wy}Mv=5bY{}3)$pM}qcIpykC@oRA1^&S=6KlG|l--$QCt_tlv zmE1r0D%B_AlVEbC`UU(VTynirg8PRq3H6QmCfJQPj<0}~N%e>LBY5okkZSH9hE}V$;D_Nzwf2z|_YcvO`aFC-EJ&$$;5Xrx f>wRjtf9O}E-iSBBz8dWVwcI}hYvZ=T{(t@htnK^p diff --git a/fixture/20/6/0.3.2 b/fixture/20/6/0.3.2 deleted file mode 100644 index 3cb47661b72df32d83e0b7be9798c7f1161c82a9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1637 zcmYM!e`wTo9LMqZeRp@;-EOzr?e2EJcKdZZO)8dDlq{)GG08A7(a>R%qGDp9QlX-w zWQt0KiiwFz$uuVxDmp4uR5DabEL1F1R4g``QlV1PvtA$Q;K9A%$@`CkedhVv{IRIn zzh(r@5@t4*?+0bmSg&C^^aNe`!XlpatY)vlTA%s=`!?>l zKEX441rt5>gX}}-^>m*vGpk3TOnn)9IXcVKud%P=hU=sKW-nljUwt>b2YdXwj|I$X z5D%y?U@yd?fckm%1zdE!onQV8h6mNRvbSM-Q1||FvjBqS>a*ChF{fPp6#F#JxZYM_ z_85j%sIOzM$A${s-(}y!eb=Xi%%-9vq<)xv1V=->fP*}*j1(b2!BOY zh(^`tv){y9QT21|AMumxPgR>ejbYX5o7taWOLfkIo^6bo`QXKp7WVA(*V!{LGnTP( z&$_>5pTu{u)PMcq8nZ|6XpQ;@><{r#jr#BGKk%pPlj3HR@oHTCEB4p;Ca(L&TC*lJ z*Q&qAejh7p)o-$Y#c!^^$m>qH}^~cj@PvFV4`bPH0*p$}&U+ll} z!1ZYv{smxqM*SH3I8J1AKRCZ6LLI%Ow6etWFJCrPWSmd z&mV=n`ZD%%bmrBsv9IHX>!a&={uooQzMI{HJ@vYe6?p!L7t|N97h+LC{XF{uF1p^{ z!1KrO2KBA%ZP?zRdw(O(AHhcTS?t-E)2M!meHv$6Z)@WDV`!85I`(>OXwv;%_C4Hp zeM&RWA05r=huKGPv|0DX7M?#!E$UtDl~~oH-pB68E!W2tdHxt*RNu$mj{`;Br(1dc u$hN94W-r0gR`tv5E4b?Vh!W2qqe|-C?48(E(tTtQ&mYl28QW|BfBpggTKtFr diff --git a/fixture/20/6/0.3.3 b/fixture/20/6/0.3.3 deleted file mode 100644 index fae50b933883fcceffd1f1b20ff9b49007f2315a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 693 zcmX}qAxr{M90u?|$LXmkhk~M_KJ`usOwWyAFyLk&5Ofj<1_Evdg26yA5DW$a!9Z}% zU@#B}&JhR%1HnKr7zhS~!9XAw2znoU^R~O&ef#ab@9lq@XGahCMQ}fBA|VmEbKg!* zJYuSrC1z7%9yc9aTVklPEdkYYA5KO)6+_oVHQX;-dAG&XTyO>O1(4DSY5>6zf&xGX zATdht0ALsdd4M)Re2k!is1KDLKOXM$0uY_pA9g4KbOF*nf;vEClAs9i36Pv3r~;UN zf(w9mfIXAo9>5zQI0JYEaHa`v5dA=%%&=(_zzXsx*8txDharL|fbcBACBO&3!5qOO zfG==s37V?W!KphGqR%=k5U5Y0;Fw%IzVKDpa_5&Imu0eDu5Xs0PSe- G7ybbQl3*hM diff --git a/fixture/20/6/1.0.0 b/fixture/20/6/1.0.0 deleted file mode 100644 index cedb1a1e8707811fc45519b76224841e93f47214..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmG@qfq#9>?)-yPcEPl9SeQPFj*?W{H^*Gb5pKax9!Q3x{Um&@4GLPL73>X5r8* zIW$g=g_CCC&3tdU27rj}^GL}=s`>dgu zjcg+Hu^{+4H3;V(Y7@Z&L^6!wj3A3w7)v&j$zd=1*v}EZ;3yTGrjppELC}IY5@<^z z$#f=#`7B@|1-#2r3Rz7N*SWz>LLLu-pAkwJcN5M41~P~YUSb%Tj3JBd>|iHle8yhN zIm{8FqP3$jF*GNZc-oLaE;E@$9&a(9e3no^6&JZgHQ#Zapji;yK?tezqBm(g&j8XH zN(RMjWD_NP%yvrILm3e<+7U@39wv%tnh`@blgVKQZ;;De=8;DQr>W!u-%!O>u2D^L zbL~i>C(n{fKl+nKA*(5310Pb%R<=<>Sgdx0Q;++Jpdk;@h)l+i#Ux%On`umE2IU;) z2&edx3eIt!3&gk3js%kEL^9p!K~M5oLIJCIk3!b5o(%+X+7Uu6?jnr3+($jq8A=AD zd6`TmFp)`=vWGGb@j2z3;3TJrenLB9Xhmz{=|D%4n9Dr!Sj;=*vx1eZ;wsmu<~BbN zv$~H>a&8L)dfP);O;gj0Yh^9PFG%b0OR!n0$Gnm7l znad*HW-;eD&jqgVKdy3%@3~F)R@%{%zWkPc4CV!fu#WX?UU%m2~Ki~v;2p1T;^M@&>>zslIX@Wbf*u`(U%phWEE@q z8|&E2M{J>H8||n?9ezn&>hmiaFp^P>W;}mj0#kUMsT|-Shd9o^Il&pe<}57}w4)X6 zd5R8nf|exGj`qyvP3EwWzp#j>L9F^|%OSXvOr6kca4v-lIUna=_ia)z%t z%O(EHWv+9Bn{<6jJG#-E-_VBv3}g^%_<*%+;_qx`J3H7(?GD;ehe#fzK2bbEW5zO$ z@#OFtQ^;i|vpC8zj#J53oS}+~Tq3cfcC;sjE_5Z8Ui4-u%UDhk@3V$tHnNG(B<-j{ zIQLMS2p%AkVGL&kS-iqnvYAW{d)dc+j_?IXso*q~#CFn-I1*?}BFS_nh50OCAqBk4 zQVLm35!boFO+u3WKZFv--GnoMfea#pml#GSW5{AVJJ?AXpRt#64s(R4&f3wK7@8AH zJZ(rImzm5WkGGglK1(Q|ii=#Ln(w$ykYYm#A(dYACXMGAKsrOopqPzpqJ)pxPAPjR zBch9TMAC?di6WY2#E{Koa+tvzf0AWipw-L?%(n9?Cex z=ah4Tlbj;DyLQCTiq^!_fsQ0GmwDu|n0Lr$1uI#_RjyIZZGIr=VSBifTJ)noX$;{- W(izDpMzfV|l(3smDdhkMImC|y--o~e diff --git a/fixture/20/6/1.0.1 b/fixture/20/6/1.0.1 deleted file mode 100644 index c18ec22c7186119fee88461abdd953120bb2cfdf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmG@qZ5l9>?*o*3LOGOHNu#PMRgnjKs`{nUT;qITlWug+sG&XqFrrC&xn4EF798 zhsMdVaMCOsnuSBN|SvdE+AD(}}=kfaC{kf+`>wD|f4Fvx0QR+to0uRuDa2}yC z5i}=~Nla!6S-i8rA&7ErMDG0{0S3GW{7q3NJB~ zR7R0TA=}ta5udY%Vh&P5*i+gOPBR`Sf+$)MO(rwQVlHo!%_0_)LpkTE;0ixbNfkG# zCNWAolIX_^Br}*Hq>#@B3fRi06taum6cN%&J3?v1LxjX~(b^G50^LcZ4}IxJE-T4nJs*+JX11`Epw`+EOdaYHLPH*;5vhzK zjS0L)I#Zd(bc#7h3CH=GQqFLeb40~xM>Or}KrCJ9MgogiOb#pffLzwHj`dV=lWJ zhi3pvAC&T)}{af$2v$PKzaqa6wK=6U)skQW)mTGp|i zP5gz;?BFwYQoFr&)S&^tq#=!YlqQU0JQJA0@0rR>-eML$>)hftcjy(b9laUAuNlZthB2Ite8MKS^H+AThrR5herN4yKsb-km35HESr~V>VLAHnvmqS?#Dr zDECvJFdinH(Trg%8NALUGMPaZhd9g;%J_zplyja6V!CQaTjJzTyz2oS=+|1nr0Ofvl$KngE0lvGBMMj_kSP7$B8hhh#= zLRg}9gwu@2i6DwrM3c!3vY5-;WV49Hoy-loOP3j&CXF5|_C`Y)|co zBZ2ND(ucnEBbSxrv7V2}XER&aN>DHD2&N8o385hm(uh<>k;VjGBb}*CV>-ngq=e&q zO(|zM%Q>Q+(~fA`(}7sJ(v1Wbv6viI@Bz83WgY9O;wIJ9=8ttBU|ug8(kvWe;m|BOG)|6%lV;%% zOAd{bW8tJ(I5Z1~X33#(axD7Xi_h!l#rMVUsdi6H^*XhK;Qu11+aw4crXCSIPD7$- zMl_R{%oMVDgPG(okNKS9G-s&b8!l4GHLjBw83di^ObR{dNhcRkwlW| zP6|s{$};kKpH*yNBL&>yET@gm^3!9kpi~yF@@}A4@HEw(2g(~@F?LlrU{WuU?Q1J=XJ7}%^c=Z%1O#N z&)1Z5nJZi+{weK9Ac<}y)0;l@C6ATlvz`ywz!tW$jgT1as6ibbAe8z%LIctnNe1J2 zl?hB`8q+D}5G9=8OG-J*InEQ)QafU4M|7PqOQdTbE< zln`ojKXn+yVA2@D%cL`wag1j-dnn={pHs{+j&p*>t+XSO7Cc1^ZFz=v%w`UAS;(JR z#B$zc1(&(PRc`V>Zc!~R2!29!deeu#4CJ>AVmL1`f-P)i8@u=iyE(vT9Hf40?Px$F zenn%N^CT^p$~2}ki$5}(1-#8d&T@|PT;e}m<_6z$lP+zvBZ*%8n%)fH1qQN~b*yJI ze`5)|wpN9OBMvP?~FIeO8b-!On73}qOb_=wHy;P32YFZ zg>2qnCOOPwKBqX%87laOi&S!r>m+v6j?SdegPx?)kN&J;HESr~LpD*!c6LxRQ9Ei8 z#zWL4oX3b@6r&kKCa*DxEM}0+VUBQ=GQQ#zmR7_O zPX`jnVIK3z!(#3{SgH-y_pEO=%2Kq&Qjga)KDk_^W4DifH>G^SI`Axb#G zmy~jrbDSq8MLS|?M|7PqOQdJq2(A=Ku6>M)4Gq%ne* XNoOqM7|(9@P{ct#rjjTb`4Y?mDG(pH=ArY~m~StSZgttWrNB=&5ubeq509&fS2y5me` za7HGz81}i+%b6Ip(`+sOv#a{Su3;CFYvEDby}V&K~rGoO&BO^m+2kj0);% zNX}`@u@-&S%!sE>hU9EVH^`~iu|=Od-^?hZ_Cs=xVhZHkjg2}NI<2^{=X!bOxSKfc zHkt{pm$AlCyWBo=)cNkvhfCe&sQbg-J$1`bkCX)WqkmgfYGgr{Ir8cPIr7^a`CX>6 zCB1<6L{2U=Tg#x%gKX&`o`5as@AyTZ|Ge4SP3l{aExo`S*5ZoHj3nxG$PVt~0oZ~5 zjBoUnUNAGtsjD!8NjzsQve?WxMs0`WyoWB3Q-8!JeR(BjMlrPylJglRAUThenxBk~ zT5>j`739?Kuu4z*Mf0=Ep)P{te2ig`Q~#vLQf7K2XB`?qPW=i?tlKY{?@6Q1hUDzS z0LXcGQ5`;y$y>?0T1d`W%rnDw+3ZN$ zKQ^$99kO_?n;8|<)sURin3J3pW=1@9GMdqjZmeJ(TlBeam>Ff%ehgw1Q`l!cDq!{# UM=d#<(1t}UTh;5eK(^b diff --git a/fixture/20/6/1.1.0 b/fixture/20/6/1.1.0 deleted file mode 100644 index 6e705644de7fb37430ba49ed121f6e3579fb4f63..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmGe|*RV9>?)-&N(gToR*x^T5`7v*ge?IfRpD;m|A`nk9$E z$+2(}3x{UOp>c97oW#PRSvWNJyg#1*e;$w5AMa1K`(mossT~CW7fy83AZSS|;%P^F z66rz`^I5<`@_CnK6tJ2#+~6j+2qJ>uKB^JI1Jt5F0~kmKFENZvMl*&T>|_@ue8zrC zIl@sIH4B2rX-pI?h$fD<#504L%p#Y!nNJ=|$>$=MxJ)JAbAu}G5kv+-PkPatG+v-T z=?o=IdRe25l;f0NhF!>q>#r_@>#|E6tJER6jH@Kf|fy0lluvwE)Nn) zIz!1|6t9rUcqTBBV)jwOVZNZ06P%=s$X41BMJ#QIqa&S2AcuM6vV?cYV+AW&MFrQX z8VKLefP6W+}B!< zWV(gLZVIH@~J2{TaYO*75=C*v#M9!VY$_i`q|VM;+?(3mVXf$7#%1 zUS%9p_ybd!!Axdxj4wIP8UD>#E^>*>wC|`L9Z2G7y3&(g^ky0Fv79yhg|%#C6Pu~l zNjs`ji=R@PdOS>hhVe4P8N=@w%OoZJKB>-7m}FI z0v3|byDX!C)vVzLH@QX7+5bZ|LU@2$^k)DA$>1f1k;!Pru!EiKqJ+=bPbo(@N~1*W zXiO9>h$fD<#504L%p#Y!nNJ=|$>$=MxJ)JAbAu}G5p>axUi2o77wAtqL&;zxo7hYd zpRj{s_EAE;B<-kAIFHeYNSYJHBqlS3Y~ExBIm{!MQ=H}u<$TLUD!5K1iBCJ7BvR-> zPg3be8U?Io4TXHjMz*n?B0{=qM=e5mhnlVh|H71e8bh0Vs2uCU7D^78q z3zQT0jCRD6KxYz3raLL*v6OsP@jeBtX9I;)agU&z?cshxsLO+dlFm>v7{x1OGM))c zq?mn_aF{PB%)4W%Zj z^dpTSyhu7D7|AHMv7I9J@+rj}#iN)M9_>#VrWe)SxhIJxx7UVi&)GO&U1ls zuJIie+~y8?*omUGg`IW0NIS`sreVrC>}mb4a5nuSBNQaxv3}Gl~yuv8b8P5cEv70@V@C64c z;BR8ngDiFAbAo|je{-p2{ zgGps1X>4ID+bH5Qc2Ud$N@x@w2s}(XjN_E^4X3H#0+qzH(T-T+=t(?@JVz3_tRRoId`Lc<*h~R6BDEukI^0h%4SA3d zQW;4a<9Lm9CNqT$ia9_DNBN3U$~Z+i5pA_2l4v>+Lw9-*M>Y$|VL9)S%WBrJmMX4O z&0PXfc2Jx9s6#S?NMQsolgb#zGLAxaQpA2frma&`)E>OueexQom+~F>X?X@F`fxJjE!x+v8HnEukcJL{M>}4POY50V8gwT{` zgwm2$gfp2bWH6Jr$YdV#SwI=5DCa!iQNb0ia*ggCv?Go_JWC=2c!7beW({lE$lut+ zc0OSTwL5A@9UAaU8q$Pc)08ocWgL_E1CyD-o6O`eM>xt!{!JO@_?Gi@h}Mp1y73g< z>CH3rVJXX4&MN-GYBul@8@bIL?oz8$AnW2Nj~dX&kb&Ji=giQA8HazUFtEIAq*vrR~SV) zV;997poB&}wWBd%JW6vSXhS4ZnZ|Uoc$?W|vydE4bB422@;w)+;yTsD$7@FdNjy(q zk{Lt_`K)6-1$@jF3fV~!!M(Jj9w9tLBSLwEFwz;%1TuJosbn&XEJ``Xamx9I(^PPQ zN@5bUBbGRN5>Fz}kwh*l$YU)ZlFueKQ$UTUwIhf++)pqKd5{oN8A%%Bc#U)>GldL_ zIY0?V`HE7?I7K-Ty|p8fXgU!?cX|*{V8L2d4%4#^B6 zg%P|=Dq|SSI11TG5&QX^Vh(eJqlEU+jxfS$O#~h2NHm$uB8vsQLpDoU#&RmSKqc4s zfhuluhr1*`s~t%Ur6sKhXEIaBU?y*o W$voz>fHF=|&UwD0f-79*8b1Mu1CBBP diff --git a/fixture/20/6/1.1.2 b/fixture/20/6/1.1.2 deleted file mode 100644 index 2a8a85a6594c93a1497770c30930ac9f70acbcfa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmG;bRX30>|NByWP$?t>v84l5<)!VrE9njKs{6W8tJ(I5bNRjgw>Hq**vL3x{UO zp>c97oHPrEX33#(ax9!Q3x{Um?ztDwU+{VH{RZ6^8yprI2>f4D;+h8n?dd=wUFk+L zy-8sy%UDh!@3Mv>HnNF3+~po2QGvjZs6!YJQlDWAX9StN#8|SJ#ANoekNuSM8Aqt# zB&UdK5eT%PC9$+4js&`p$UNq=fPCI&DFv*ikgHtdI<Ab)& zG8jW9+u6ZRO8JDnlyR7H8b2NgJVFy9X-*U|v?Z1q%w!h1yvaQBSVBJMxxhuL`If8H zaGP3^TWLoMsXWU7(iloQMQmgf#eB$iO4vgwVXd{JKH)q}V12MD7P4-rlV zW5{G8uaLzwrjtz>hbiX-Ur@nW&QVEBjCRBlPiGS7K~Iv%V+r}J;vEWD$9gtU!)ClypY(A{mTl0uw1=52YO9Q_48TaZV5ss~wR<(}oy2(usI-m`g5; zd5b(&u##0&afNDb@f|e;+64ml5kx=wlgbdDCykMeVl-RXMlrkjm=X?fkV7V>;Q);SF+F$RZYVmUC2cnQy4#1~<7ykM`P;L|=YIKL+z0Ls-XpHn4@i zvXxzY#BS|!tb*iVBl+R>0E{DP)L(Snvt;Z>$G zi$5@%dCX@4r}>gIT;$(e;wsm;PPatu=uQex(}w{JWDsk3kF{*#FKlKzJJ?B`uG&$T z`uvmzH0BYSFqW4Y$7FuT6lO4!SsdYWj&h2Ba+>p8;39F|w4(!wbfp`~^d^O+EMqx^ zyvrJj*vKaCaF=_8Jn8>XhcF(bKEoK!2r_wzv1Boc$?Rnx`zhx$j!?l#P7&2zJ6aM; zJK{*73yI8QJ`2d_ZI)8NY6`i^HLg?3_uL`)DH}=%0~p94(s_YlWH5$IwzGqsl=2CC zDdRBZH144tO^Bp9QN+-eSY|MjS>*C2^T=Zf`JCqh7pdl3u2REoYDw;?9Vw*pECWbm zDCrcjkxdlyA=@co52b`9X-9p+d6>pT@EDO~F^S1!^BObAVJ^8;aFSD0@)hT);tJIy z^wN$*lITS;{pe3B1+1o!4ZKegTiHf2!O7YYLMRUqMk5{~oD9a0$wXcui)l zyhsM)nZQI!*h49Y_>?k^ahwxGJgps(MAL>CI?{=Fa+pgli+PJYRGBZhLn@naVGc(D|B$=5^OD5Ca-@eNgFWu+&eDC>vo_Gom%iXs>-Wc&P(wGCr zn7jO5kN0Nq@&8Zpp3i0S*=g+XQDetrjh(`FtVxS@6`0@QtT|k}Uo)n`b;xY+uw~D) zc9%&qcD~Tq4}5hg)*Q)nXPGwnQoh^78S6i6Y#&QQ7{faL#+uZqV-+SW+}LVzJ(@9# zH&|djB~#bkWDmN~i$Sd77q*xy%WB-&r;1z$Il&C(SaW0tcGi-~8E8i*dO%gXf;Hy+ z$90cVawVkaB&J!5%F#U%$xcYmHgtfVb`i_Wd2)4+B61MYa}*Pho{@Qao&>V=Y(gvO zY2V{BGZ`mpcKYU#eUP357y&))A7&yvIwL)6(Fl6l*LcUe^Q68foty*d*^PeC)BeOJ zbHRM=dzD-R>G>QlSxY#jXHFugL3-Z7ebCc>!w=?sUfrXFTmk9%1kYHDIIVlcku9X> zP22`O?I(O?E~h~EC?E$QJs;sQq-WR}Jx>f-dS1s3(9^!fM`qG|`ZviYdm%j^U>NkY z8`x&9qEO#;jobiw+83B-%@%3jRB|Sy=RNd+p7uM|nG2lNJ<7?|ke*L5%UX_iWi zu!1$_{6XELlw1kvIf-f3qAuwkiDW0FXB#@OfJH1b=PA`aipW7o&rwWZi}lFMdY%Nb z^lU;a=oz|WW%@VC2R-GEz1l1f H{p0)xL@(SY diff --git a/fixture/20/6/1.2.0 b/fixture/20/6/1.2.0 deleted file mode 100644 index 852f35f1d489ea7542a33789d86c6090c39f7e69..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmG;bRX30>|NByWLJsYb`lxE$756X=Ws5M$F7OG)oSRlVjneSvWLH4vmvz;iOqO zGz*7j$)Ry_ESxk8hi1v4adIr&J@?}I3qCKt-=KS9gTq1tf&XhvYV$y#H+@Lwc?Ocf zFov^{O>Cx=kJ&*Pd)Y_*s6gN$8W722G$x7`L^FxWOd*#ym`NVW zT;eiUh>z8d1d`}MGJWYse+pPh zA?x{oBDS!VZ3MT`ju1k*pD-HoFpU_+Xfm0=t7I{iX-wxJhbZSbUsAyt&T@{JIPHj~ z109K{8{J7_5sS%Z1@BS7TGp|io7|$7ptga)j|iq7_YulqhA@F7PcEY2Q&hI?$D8=|(EO>BDl~Wd&>aGi%w%CN>j@ z*N!0S@>A+jpND9`2wq|&tC3 z&f3w0Xr7=YakQl!GkKF)%;yg*Upg*SXCd?vk3Q9eqgWc?OcfFov^{ zO>Cx=kJ&*Pd)Y_*F51z6NFJjxQM4eMNla!6xxB$l@|eebPI8LVRPzlNsNpKtNbIT| zT}h!AsiZN0bXKvNH5Bt98!2HsrPO&=JL(e7gVZO2M~P$%V;M&_uQ7=nW{}Haj&PJp zzTzZRoTr+&Zrag~1Ui#QGCfIQ2}@ZSF`svh}a*bMk;5NZM zY$ze5F@SW2@*)|GVlP+zBb-J&LIh2DoMvP( zk!+^(IyuZ{4s)sC1eKiQYpS@!Wv&q4OFI%sq6f+Jr62t%U?qjD=L3q^!dA8soT?om zgmOP&G~{6#F^bV-GJ#jgVk*;^&Or`Q&T+n^f-{`u95KDMBbE+yB%W?`Cy7NYCZ83& YM*(YD$9isZi&}#E_&)?wkNXJaf48WLfdBvi diff --git a/fixture/20/6/1.2.1 b/fixture/20/6/1.2.1 deleted file mode 100644 index f2613159e04e1038937b265382e2d48402852fa0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3616 zcmWmGe|*RV8o=?dBNv*ge?ITlWug+oXVjgw>Hq**w` z!l7AmXq+4iC(XhkmK+)<$I|=$c>n#pp4aD(=ee&^i~DOl6a@cQpPr3^pclPK;{^tg z&QON2nJsLkkWbh}5&Jnnt;Rv{7`2JuNg5DEQ=*y3BqlSHHSxPRe z$fKMa+@yjZxkt!TLGW`zNu@t&4B;ix8ObOzC}1ar?Bg?vILr}>sn<+9B8a33QN+-a zSf(?BnatyDvRTX$a=6GP%DB$=lyir>RFM3%cBIgc=SgJ{gBij`Hj&Q`KBj;@>}4Ne z&9x((x;#NW8qt_YGMPXY(|CjF%waC`DB%>PT;Llna+PabC%%PtB#=ZmlIcTV`jN{j z^4P$KY-AhT*+EE*c7#%shY6z&k5iYCj3R^ayhbKdn94K`bA)0}@)aeV<2)CLX{jBt zw4*)obfGIrEM^HgtmJ)iS;u-daEH58P$f19entq@sXL3E#|6RtRG|-j z>Bm5R!ytz9G9%c=c6P9vzq5yfe9j^2wAPNgG~`z_q8U%qoGDCY8ngK$b6ChbEaDvJ zxxf|v!&Pqa1GnkYMmv(|&9CXh0A6Gu>sZeQw(>W&v5QaHP4#EAqb9Za1$Ah^FKNg) zUS&L!`2$m!#aqng1YdHJv;3QLT;@Bj(6+61w4*c6(S@G$qBkpekCm+DudHJ;Ti8nF zXSJgW)p&^N)Z#H}Gn|(h!B~FJI3_ZQ$sFW!4so1+ae~vF;VjMCX-9L~@Cf5SXvQBJRL|NoB1pthj&>@F006+oEzMvf*-j@ zNEbJhP*Ukn8bf%AbVf3Y3<}suA^Z4@A`WweV(N9(jtC-YLKHEyB$nySU?%f;n`{=d zgd8q%i88M9J>}ftE)^svX-5kEc%D=SF_fq?BY`BkkxU=@(vMtLk;eu;WFy|NBJJ(5TIcY69t+ga(mNYXGGb4tDL$l=2I5`$hnuS9wIW$g=g_CCC z&@3F9C5OhzA)GV|hi1v4adPzBi{~%+y!d`Y?r2`4c32SnUnD&r4}xCwCW99lKqf;O z#zr==nNmJw2W9MKAN3jq!9&z1n#X8JEX{~xB9oX*E^jfDJmxW zQl1EcPIM-X=jlm0{mEbzt64)aAG47Xwo^)2;~=<~+C=am^@!q8q8ZH?#*)JuOk_GU z$mIw}IYtHFaEeMUP(@;sAZSe+lIcha-RVIZOIXS>3i*tRI~LSlm8XVf5+dk7<( z2Z$h(p$sF7R~bz<6Ud>Az3k&KUvh+UPEtW^Q|*W&fmS4vLvgpy8wG8n?kWHORbWKqI)O4-BblyQ(l944xnc0?0HQ(}pyB?(Mt z2D!}T9r9SjV)Civ0##h)2dcTnZGt%M=s_C&c!6{VF_<9~v4LW?@d+jDVmEsTe^NUl zh~yEXXiO7g$YugLOyfX#}NG6qTbf*t}=|=%8DP%n# zQN$LuvW;3#X-6n^xu0+v@Gy~#WE5G9=XJ7~!c?YlkV72i1Yc9mSIb}$|4q%&k8=EfVHe+J-4_`@N^K=)Nvy0vA;Q;^S zAjkQN6Eu$3ju=|-H1V|MS=uq1Im~4te`XQOd7l+r<_cH2$^W>;oh^gl7u2K=ed)(Q ze#0P!^9m!_!dAAilfScz{d~az8YE~(B#n52#x&j zf4IyIe&i-yT4_frz4hMM;Ch1i{32feO9oBzp|E%Y+^Haw$YB7 z)aIAep&k!WpW(d12*&bz#xapeOlCh{aDZd{i{qT)G-qi3jCQo34bRY)6gttF1-#2b zmhl&svx?QM;RZi)lNxP<;OE>)823_}0ldUOhVfg5Gnz4sWe1fhrP{wWAHmbR>oD^dOBTEM*yme8?(_*g!EM9krtdq1;0l;XFVDnG9tZS-i?< zvY9{*W$a}ihxw8tlyi~_VpFsujs#kfND>`LCXac{C!hCNLIEo&q?&77C+Ot=5JD~P zCX{sglfe*PCXEL;a#*Nf=C`AipDe{hHNH~ z!!+JxI

yeSSxWx0zD~uWPI3E7kY3X9T`3uLS zE7xym?%cg6WV^VzW!>Uk>HV)4IB(z6adSy&g6Y=7iAiSlj$5`$S2yn3Ex-1~-+hnk zf6B-x{(ab|XYeGYFD~KPe|L*^XBR(NS>HgLnldw2Cc!&*MWP?|++52m{y|qkxxt;G zuQ9VRi!rM)n^BI*j&&XD2G&iiTUZ)6ImB*=JrH{#_CZKN_k&8_v0jP9Hp&0uMXnYB I6F}(;002Gq*Z=?k diff --git a/fixture/18/2/10.1 b/fixture/18/2/10.1 deleted file mode 100644 index b1bb6ead353168675c1512f49e70ff699191ab8e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 616 zcmZ>Y$}lu^j8qGb6b_U1Vqi4+|Nr0n{qKMO`;UwGeSmQf-*e`zoD&ihQjQ#8k}~k& zoXo)BYr@vdWOCsE$036rjswj9L!Ar_d}gH>8L0IL&VRLlSM9R1ucQkDF9WX>0|NtF z;{qVbz_`E!#AFsQVK~9dV8rZ@0i>iDG7KFTE&x(M1(O{Z*cyN;4H=A57#9E;3=jht zE-(nKs$W&NYFPtN=!B68(98w~BQ}Q;Lv!z$ekx| zWmR=gOmx=&V0f%ge(wA=4xPGp-#N8P z<+Cj}m3({og(E)xXl?tx6Hzs+TX*c(DIT8m`I-K{=C^fr{~KG|3=7-rU!^b=N zEbBazlD1_l%-5OmG4FOR_+Yb$RE{IOp&M3#ai{ppxgXjknn4G?39Of}NZ2#z;$8s(qrmVv&9RP7x{!jn_ diff --git a/fixture/18/2/10.2 b/fixture/18/2/10.2 deleted file mode 100644 index 90469c40613cbe1ed517d1ef338061ec7db01ca1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 612 zcmZ>Y$}lu^j8qGbOcicuU|=+>|Ns5&_rL%D-v<))SP2KlJ$#RwHH{Kxuv})j%)q%= zm??pQ<)D#7Gs^)6Mgs;0frI~7Ejg08m?_1?*_f2+bucq~zp~dBkA>D^o)j%Rv($wD53rS_pDDbs;N<0uzG*6Zq*a3yt8H8n%WjEd-O*k;KstE+@~ZL zFR`d(a`|4#RW$X@+b>_g{r&f+uD+qIy`!hMZ_?x`)27duGk4yi#Y$mT}{}9;B`JTs2 zqtN2PNfAaS#_2P7-hAM>$Ts6b)@z1>4>>u_IUStBY~SD1+b{k*%Xss#z6qDFNZopz zo{^W=`;YPHG0V!i+!pCC${RD9F0uY+UB(?Cwn8@{>cKX~oTgi>+qetFUg$oEez1eF ipy?6oG42WC8hUvT+awa(B+@N*PtrWp(yTFug#iH57yjP> diff --git a/fixture/18/2/10.3 b/fixture/18/2/10.3 deleted file mode 100644 index 58ce5260e0e03ed390620a568ea8d770031d62d4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 246 zcmZ>Y$}lu^j8qGblw*~=!@%(O|NoPVG*)T-TC`%7Ru@)c1H;Y@M;IU=LyE0&vD=JU zNtvCC9akzaFfcA$0F`23k*Y$}lu^j8qGb{QuZ@8Us^s{r~^>zyJMTkDa*tz4pHY;~u`}%v%LJ7c-@#7|fLD zQS(vuH8hcwO1bPd%j8fJvrkX2ArFVdtVE-P`Ttf;>@blsx!AdY;mDUO|F8NQ&ATvz z!J~lz1Q-|anlPALU|?WVVC=ZSki@{)0%S5s%mT}v4PeON50C`1dBJ?3vW5&^h72f2 zih*(AD~7AV|F8OARRxMkF-|aH;FSWhm>qxwTLYK^Nq_`;dV7=ueH&U$4kAN~7KMcY zO_@}HOP|gTAjEQm>3uYn1o+AE$H#t-^x}}*3i^4Y09*s0;x^A4t@H~ zZ0+pb+&%g9)b zKYsrG=PxEHIVB@AD=#0NHCy)7T)EMxv?620%U4=%yt0YOsk!+DHjUD+|M2+4B&Bja zd!GLCRn|1UXjIB|O*i3~!|9var~eCls&t=y&elTt>T9_(hVPC@ z-23?9NB{f_wfi1_{v|1?{G&wfU(ex(OadJjCTjT1(F_sO<(}TgXiwHcV@Z;jB<&n9+1zy1AXlBC)Oh=$(mvA}aPjE~hpC E0Nq#qKmY&$ diff --git a/fixture/18/2/11.1 b/fixture/18/2/11.1 deleted file mode 100644 index 50dcebd9b8383926d8b2a9a0ce7bf9ee191334f6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 619 zcmZ>Y$}lu^j8qGbWckOklYuFu{{Q>m|Nig)43_xlep?%{jRyp<_I%~vWV z(SRi*L2ZUlkD4%}gfaum0W~Ix#)dYvhW}qfjgm5#HAoqy95mou{%@7~l3N)kjEn>r z7}y#Z7#J9r0ZF3_AWMpIf|LoHLk5syNMUP`lmyA2bz#V0kOa!|0x<}%HDo|Eo&a)y zg3MRgR?Yvns&1tQ(5wk;jSCnuE&yeuKx!En7l0{{h6@auCQGHIOoAS=G9}D{!#Pd@ z0V^C$4pIX;iZV5b2nJ{{I?NPwTByXJz`)3$@SpjgNJ?5sSw&Mz+mvZDvWgaM*ksz+ z+PQS)+LLE5zWw-3NKDGi%BEyP)`CYYH*UsoOPKvlx#UM>H6(E>IV-W30POw{^RCZ0w*(FD; zL;r`af;(eolNEP>*b3zX{Y}?c*KsF^ols$z&~%G+8>fIu!gl73i+M~E+w!`1D$UXG KV5(<|X#fC|ECeqA diff --git a/fixture/18/2/11.2 b/fixture/18/2/11.2 deleted file mode 100644 index 9bec0785fad5d08534253d1c07e9a0a06bf65f22..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 619 zcmZ>Y$}lu^j8qGbOkl9{VPFdT|Nrm*`upGig9tDSl_vUfL4d6x1FVbT1dsy~ z{K~ef{?+nViyAd$f~HSY3t~kF>B7ECCf}Rb31z{uHCrx z;?8jUAmHR@$h^7f`Sbv1XZU_pSf`H z5}$>!bJy!_Zv@~lv%U!?|*ia8`HlOpW}SXc+pP7<3>Q4SsqTMlexn9!)ibd~8E({-jBOl~X_I5mV4R33OSJ~-?qWpLQc_0B{;5fupr H{>cmg+=~Jh diff --git a/fixture/18/2/11.3 b/fixture/18/2/11.3 deleted file mode 100644 index 84374a0067a69d4077dead4da098426df01fd78a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 258 zcmZ>Y$}lu^j8qGb{{L&Ou0<=>gNRj_M8_hH4GcS*8`v5ym@t@JR$#ce z%z>?EhJqmjgVPkAnI=*OJ#Ic1X0$0^F!8lznAfo#QWvzlG19A z_&=wA%{r4;d|+SN+4<`CW?t?8<@8OYXz5bd>8aZ}g5O3vJ$)lF@#IWhsVU)6zoT*w qi9gs?s9)Y|n)Z*o)Do_$~Zr?o|aLGeKBNs)(64vN3$o&f+s6mLWT diff --git a/fixture/18/2/12.0 b/fixture/18/2/12.0 deleted file mode 100644 index 364c40f38a8b4685f08bc054d3a4ba55a1a7ad28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 616 zcmZ>Y$}lu^j8qGb+gTZM%?{g3z9m(xQFjKvnoTz#fDiXyoM%6 zE;D5=T-L12B4yyC%)oJRfuR&no4{p`6ZfBnoVe`Fz|bu0AGqbh|4<(TVFoS*1_lOR z1_l!#bpc4RHJo4*V32TNNMc~@U|{P7i86CdVUS|rcL6E^DFXp1#sy#|vjUiF!oI>Z z_LzMbS(^M%7np|fCdHzg$8wfbNll8_WcJ9dG>qp_aAm~b@TP}4-1drNn#e3R*p{2 zUc7qq>(75oHaIL??%W|992%yRaJ#yD;uDU?$xnpTg2SU@)_waAG$uX2ynp)q{m0M0 z|NJB+hoz)v$_&rS&I1cqXk;{Z2>bd6Mn=Z~wR%Tquiky|3CF`nNl%`pPErf@_76@@ z&shKF=9$Gy|DC&NQ!8ZBIk{=7!^RB##B;{;zx?Xmy5r7W-ls3_#PqQNwejEoV6o%y zuM@)c2Txk47accgx3($q=VDRnRaokyCDOGtX=BEuhpLZ!EYswK>{uE&IfN8c5Y$}lu^j8qGbT$t0L$H3(F|NsB}?|=XM{~ttvS#aX_zV{&Z0meOi&zYwfB`EvM zGBjvr%E)ZFtn6#TCd|NOz#wqqkiiTNh9d{%|E-$k#%REllE89V&Hr!6Y{x0LGHqrW zxiBy=NHH+5H2`S_h72ABDaMHl7qB@lYmj7Iz~+#_xFCbsfq_BGg&~7M5~zX~qznky z8Zsc7P5?PTLAeDitCs&;`D@7npqvBavIfS*4nUamRDda(Wvw)YmFPPe12h_L#Iwo&7)MVSFiJ? z-Fy7xsp0jT4l1coImF;|GkNOZIj#mbL1UbE~%tVQZ;$=F)}ktckR)$ z7xwHG7gv7WV0XW;y~A+hF~5L6X*_ox&R@Xok|`}UYr(^d>nh}LatQP;ag@8E(lDvH zy6@;J?g+sH?vC}2*-bXw34$M@74{#Pz~sc8A*A4OFt^ErQ$zQ_y@Z3!Y(6vpckfi1 Mqu~*-%|)C609zFK7XSbN diff --git a/fixture/18/2/12.2 b/fixture/18/2/12.2 deleted file mode 100644 index 59bb4a0c37d5f55c24f3170486adb29843457c90..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 621 zcmZ>Y$}lu^j8qGbT*32VIs=pY|Nr;D|NUQ&l_&nG7*G%$bx zFE5y~Rp3#$tn84%(;&sBz}9dgLkcX+HHATnfqe;(Z35;qFfLpm#q0o)WME)xV92m_ zFb(wbuCYrv`5S1+}@&@AJgX9m^}^XJ7am-^M49 z_=x?x2=8MyH;>e3Y=vxX9ht8f4=-F-!T+X-MR7}q;M+zOCNHVz39PGB3Ors++W>?+ zCO*_{RDKk#6#r3$$-`w^%S29Pq1!?pDpR}*Vtot_^GccS(tFZXuuw5T@}eF95`Okj diff --git a/fixture/18/2/12.3 b/fixture/18/2/12.3 deleted file mode 100644 index ae24d342982bf20a51b7318aebadf69ba0993d41..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 248 zcmZ>Y$}lu^j8qGb%wh~rV_=M||NnT!Dy^=?KY+v{5DSUuSfl|I*ubzeBP)O*g8>eh z6^=0QGDt}>FfcnvvGp=LTrfFt+1J*=;qSjyD;K0NTozVf211~E#t97E6(8Rz226l! zR^)IJXb5Tez@nhQ;kbZ7B2ag1vQe#8 zOXLneq3#Q5mn&tOG}ZX$cOL9mv-J6;w6n)PJ}dk?(bJ$-=lo^!{D;TO^=hrYy?=au ZeZAeZ^rsCBj(=iLiad03T)`SU0|40&WB337 diff --git a/fixture/18/2/13.0 b/fixture/18/2/13.0 deleted file mode 100644 index 81d5c6d98153498741246174ee92736116f38a2f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 623 zcmZ>Y$}lu^j8qGb)KOa|%)n&&|Ns5(fB)Ct{|_R-EC_M;do5Jp0OKCM=geCLm(4Os zN$8dEnZa_|;XEG zzjlYs{)rj57eu*L~P< ze1h`S#Y;S9`u4v4$Fyh8UfbBz-#wL;ziMj!);rjrZss(%v=VFUnqg2iYvrqhN^5j- zT5m_+<&~|iZ?G2@S80^F^X5}jRm-X5)YQM5`RX>Gc6k5Z=NE%hhu{*Hm5o*nL93YV zJ>t<@clhO3=JJD$O>Ae|)HITxu!Tj`{%4#~U|ig=h1ugp<~-&Xj9dzC6HJAgasyNz zcrk2kn9``leCtC>gZD3$7EgicKRhKSIqEv9Eb?sG>N2@gL+F}Nkje`0n2I(jgTro; O|HX@3EdnNh5*PrV`s%v? diff --git a/fixture/18/2/13.1 b/fixture/18/2/13.1 deleted file mode 100644 index ca2915e63f61e6fffd40d2f9a8340ddae560ac7a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 632 zcmZ>Y$}lu^j8qGbJktJCfPu;U|Nr0r{_lVP`#*>PvmnIpeeaI<(0t`q%l5ycyHlMa;1;!2rHU$QW29TT)!%?;dwgxF6)8qmJ;{qUkB8B1P z1hxhsi-AGP1V}J0;AL4g|JD3eUSKh~1(^#MfLY9L4PY8ZaJAZ*Wb_%ew6rSrqCzDd z6-|K&OAJYZjzUZ<9ZC)k6B(`q35m3spu=`C1%(C%g@)*WdF$>yd#|o+M>AmMS5HOsS(AY9-_5vQ6P+im5)QpRV->rMkA*ece z>NKB)v9-N(|MAJi<<;#wcVAxLzP^9{{^N%k93H5usX4JSl^9e>Svfi|h@|?4mew|0 zJpA^ZUVi@i6N0L}{S#--o&z)>`u)W}GAa@`o=z!}f2DIJZHZS_%Ci+gSxeK_L?8e5 z`yX>*k-hwczk0`CSJ~(9_?O@IpTYk@pPa&iV=_GxPg&G=xQZn_TP)J)YE*bWK&yo1 zsZ-HLYZt!=bzO<%W0MSY9;h^UI!t1$KQNh5o#{MN0Lv0?2hISY4Y~rV4c^WbiS0c$ SiL;#Ec`WA=l9=#ci2(qeHu$Un diff --git a/fixture/18/2/13.2 b/fixture/18/2/13.2 deleted file mode 100644 index ce1cc59826f64d53b2abf8d094f969d53271c238..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 620 zcmZ>Y$}lu^j8qGbyq`WNfq}`Y{{R2_```a!66JTHq6ZlF@I7bV>dYX_Bq8i@;ZR1( zWnY7yw#Jss1`Y`UNkbzeo@NJ=Bj*2B2K)F*C>SWK^~~#hRptLT$k%3h=c}ZYlZzM_ zq!=!+H849aU|?%tV32TNYZl;XxWJ}h!f=9@!H5A2j6;B`*c@LmFfco0K!l_i7XZZ| zfZ2gTZoviBQ2Vd8K@&_EGJp~$KpCh?s3Hip)Z@gIQ#ryCm(bvq9F7HxJUTiRdr5;z zf)f-ba-0zGXtm=qC$FHUuAy&W zXl`Mth{4TUQZX_y3m+*tcL;0i>ggohGCX`DG<2eh&(fpk7=w;}{A7Ga^6ItgckkVQ z{N(A&SFhiH`1tkP_uqeN{_!dJOqsLfz{-iqY!?D9a>;uJ203lpwP&KWc6QP9^65*x z6COPlKXv+yLAo>d+xG6CcFo^fCZ_zKxWs49<;}c7E3RhqM(w!z(S5N=xVZA&_ww=+ zcJC3t{+RiNK|!1Rous7ZABVp*D4t;Hs4r?VY1B%bRuHg6>nztbrgJP?gdRjbl={c1 y$}&T!ATptkr9kI^7o#T23b72;glSL(52kl8ACoiq*p|V)Q)!NdNB-t>1`Gh3ll9C1 diff --git a/fixture/18/2/13.3 b/fixture/18/2/13.3 deleted file mode 100644 index ee80f055cee0bedfbac721cf9b5a4124ef83c66f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 251 zcmZ>Y$}lu^j8qGbtx_z}Ugduxn?( zoQn631yKDgiX2V?919o}83Y*_yi^!C7$zt%B*xZEE7XqITIBts@ag&Gad-bzetoaK zyrJ4p{pJ?7b+@%eH8#DV-dTUcC}f9#&5d8nl!H0^W*0O{6c7ytkO diff --git a/fixture/18/2/14.0 b/fixture/18/2/14.0 deleted file mode 100644 index b56c92a6c0dab58151d230a1d9e8a90d2be25cba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 618 zcmZ>Y$}lu^j8qGbG!_y`WMI;$|Np=K{`bHC>;HoYFzf#JyWeZ!?7s&X_i!tutzw+) zklEQGB_YgmBy#~<c6(=JWAq7(@7Z*%}kVMV|9+v<@#TQ?GtzP{`=c4lV9Xmay8kKSth^@Kfau$JaO5PBy-fRb9@$ zbMnoHYxX4y8D@SJ+ji*DliGEyPhWUUsjGMT^+!FxzN`Ib#DOS-@HQT2wS-wr4#EMx zjpi&5!~`OqOlLaCvPYP~$I*!8jIe_TQx@k2wFh&U4zWBb>ND)KIQGALr_vk^56Mn0 Gg9ZROYXj;4 diff --git a/fixture/18/2/14.1 b/fixture/18/2/14.1 deleted file mode 100644 index 75ddc0e66fe27a746350bd4850fc870799ebede3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 627 zcmZ>Y$}lu^j8qGbY}c4QkAX?E{{R2?zyJN;k3#(3_Z}f|fN>AsbLNQ#Gfgh5%`%ek zH8IItIN7a-QIgkCDs!=#va;iZBL>P$CTz+M(*Hv}E-Sk+8Cy3?S(XY|U;BAZ0)$iVN5p zF0g@k%s{aW1}R3a$dCZ3W19uUZ6FpyS9e!n(%J(H zc{0g`0?rN&6Io1HI0bl}6a^+pT*3|27#Nxu6dI!bGqWqJYrA{Y$}lu^j8qGb?BqVoz`&$k|Nr~n|MmCLi1ND#fdhJl4GUV}LV3^^f%rO6FNSm*Lf&`G`X78mYoN;opni2y8g9!rz;{t{Y z7l2HL6ow0ujEfx@%mNsA9DpJzAh`>M4s4AJ7%naVDwAk-$Y5Y=kYZrSTmaMv0=z&D z0|T2cbEy5-;IEzzARfb4US2TGz{`*UVL-`Kb22V+91acTP@91U7ae9~0$)#Ej{wBiHi`o<=QPNmA7 zw`T3S9bG+#mFrW{=hD(|6%{|LtN+ygvytGa7S`1_&?+ixi8vLNRd_YDbnDe`Wz!ZP zVEgp#H#fgPqflgJ%NK)>Mn*-Oi#LBM+49x2bn7?f^bB+R2k8~O(#-7+A3r#>emSJS z{zkq0#KT8qatcqLl8H(Do7Pv?@Zu%^=U-O44#n;Gzs)_rpxmeL=R)%X>U^CwN|6tB zeoQ~2xretr diff --git a/fixture/18/2/14.3 b/fixture/18/2/14.3 deleted file mode 100644 index 1cb441dade1b27c0d172954288331fa3579ea71b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 244 zcmZ>Y$}lu^j8qGbY`mJgkAczu|Nob(w7M3pSp5q~fLS1-W03}&y@6rph9eA6z{|ka z*28k-s}Zj!Oo)N4fq{XM`TMU`0U1&mymAW|GMF8Jl0f2u;4OFAHA)>2rAz`XiX1FJ zDYgTOj0`qh3=IMcIPAse$Nj7Q`!{H7ceuWS%S5AAZ*c?h*&c_dWIph!)!L?e?_j^# z-0B@)#Lq80sdIS(lV+xH+B>G=SNe^6Cj`#tv*lm#rz`fj!lZ74`|Wb=7wTNq7Pd$v Rvi^xZDe}Y$}lu^j8qGbyx8{U9s`qP{r~TO|JUC~BJO^#MG-i_xQ8bpWijWn89hEdKFVs0 zY)l4f%EAnck|q*vj64hiNf}0o#(!2ZN=jv9b~d;%%={Uk=3{cukSBlv3^E`TLk2J7 zGKCWij1vqQ7&3qq1GB?rCuRqjnhOjJY>pia43Z2|42;X%8Yl3mH83!kqyWXGR{mc# zf0Y-+oT@gUFc7dgnD9VYU@ik=c(0VAMAoK-ESE9hm52ld#ek+(17EB#z@^cFi(`TY zqr*&5Cv5sLO}xx!V}Q(Rbn>@X9&#rR9~a)Xly7_Fqz}b@lGQdr$qF z9>B2a)}t40Zeho+Jr7SWfBu7`qG4j;#7&bnUEX}qIp{Knkk+ANr=GoflkrA~rEt@x z&n6}%o!QxM-{pPySU6ep{Pml+A3uHmF8uL_%_kV+)nB}14hgx+DH@@Z+PZY@hJr`> z`uXh%5BU|`UcWA{{JHnQ!9z!n9Y1mM)Y)^q{4X*EOujNl$C#NpXXlvvHXdG3%b@7; zBgy*p#{&mtY#fgrmw)E)G_@}+`b$RQ{oD4{Q9aG1v=yCL?Z$U`S* IH|fn40I$FQoB#j- diff --git a/fixture/18/2/15.1 b/fixture/18/2/15.1 deleted file mode 100644 index ccc6dfb168578761381a4ef112b6bd1f4aee6db0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 611 zcmZ>Y$}lu^j8qGb+;TEcmVrsS{{O%K```aYB7X0Ck0Nk@aSyjb$ciHumo;2m=D_2o zrmT=;!sf$qak5~OM6=_>CJDz06221b|5i=xoXm27fwNbb`ERI!6pyJvAOqt928Ij< z1_mi!AiwSwt<(UkWVisK(8v%iQ%ep_j&W8vX_imB<@x%uUlRW8%` zG>vB3@7nd7TRJ+sdiCzpufNFOQS9t8Jdu*pv*+NPmW+%Rk3+$SQXYD(TOM}mOw?`e z?uq3U`uR=V3mcmZHXPG4(Eo7ql;q5Xd=(ciN_jLYvrG^QaBsTIGDE1qi&2wh<&~pL l9pJ(?Vlz|@Ol}I`^iXwfD(p#?DgP6DQskkN(`D5iB>;Ox1-JkJ diff --git a/fixture/18/2/15.2 b/fixture/18/2/15.2 deleted file mode 100644 index 80880c78dbcf024459f0598fe86ee5ed79c69d9f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 622 zcmZ>Y$}lu^j8qGbl=)_z%D^OB|NsB}?|=W--~SIHz^wASUu(NUDCL4jqNbsx zSHFpA3N732xA=3(vQJ-ww)O;`&zL7=^YVWD)~LN{C-)w_dH3NzCe~?UQf5|mQ85-1 zonE}ubf2IcpO{=&T-x9<(`m=iO`AS{F)1zG`t7@UMa7PvzpSdOcmMfoTW43-Q*Xy9 zqSw^ck#H;V3CFdt>yeSYTeofBy=SkAKfB1nm`V4!MLKhJb0`1JvlrLW`|qy*@X9^8 zUw?ksKVB79y1H*~5G#ay9$T-v3 TlOpiHd#BPI4G+F**-Z@qZqV%@ diff --git a/fixture/18/2/15.3 b/fixture/18/2/15.3 deleted file mode 100644 index 2d3fe040a6e0140a119f092ec41dc67a9e9024c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 245 zcmZ>Y$}lu^j8qGbtbbgxgn`lP|NqmAR;<$MTC^WTfLR@jG~jHYzy^k$8;&qQ0kgw} zi;LZ6FeUMN!h{&4fGkF~@4r?}U|`^Bc57hZ(Gp+R5~6aTdJ;?w=^+1C6E+S(nipWrglsMT9sfBFSSH<`)&Zc(8* z(RzgI3L=ngG^%*gi>{tBd+%;&Qq6Cqe!e S3``sh_2NaY76B7L-UI*=DOGL& diff --git a/fixture/18/2/16.0 b/fixture/18/2/16.0 deleted file mode 100644 index fc49e01554256836dab8ac3067f64fa44db28395..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 622 zcmZ>Y$}lu^j8qGbEIl6lhJlH@{{R2`-~axvhZ1+c*TUIQQ3u97JPBnBdS+!Dlr#|R zY`N^%a)9Hq+blMYBPk|{5U5f4wHVF&eS;+A^py)UR4LQ$orpp?NC< z3NU2s;Bh#@%fQoc0VIBbS;0t3!33;?t$~3T$VicBU|^iUxPU<_B}0jE#bCkKOp+um3Qy6dD$bo0J9q8gx8ID7bHhj8-oBhkX5n9M z&VQWtT-w>?gvps!!HG+ZQvOWt|IAxhr1zfn>yHBmB`TVwWl~c5p9P#gBmd{(+<7J@ zj+smj+&*FpR1_vMU1jMJa`1FiW0|9H?dc=|kH)MfH%q6E>rQg Puv{g$m6JWD)b0QPMWpuR diff --git a/fixture/18/2/16.1 b/fixture/18/2/16.1 deleted file mode 100644 index 02926743e7c407fb39bf99687c4493bbd89f957a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 623 zcmZ>Y$}lu^j8qGb)aY}%%fQ4_|Ns5(fB*Nt|NS3C{QJG{J%|Fcp~L~kJv<3zD^4~V zC8)VMBydhvYvwR9xy+Q%rp6$wz{6r9F_XuKv;W?zCLUq6Ua15%6DISg!5k+WC#!js zEnq+aCTt3Z%nG~=M$8Vp3>TOkOc+k^G_key0u_Nc7cMX`@G_V%Fi3&;N0=2ZFfMCg zIB|giC@x_d^8ah_S5F6!fxHa7CcHqJtpUUXNrM2h1LI-^#+Y?0P8?jcmqSCCVOY@B=)lEtqDz!RZ3anjrT~Kj69WUI_>!e;0vZXASh~0GkgRX) zGb#A<=5LbJlk)99JBug3d;d)%WMlm^HZiqn%eJeVyKg^!!y_W&#*1&uUV834dCMyx zk*SSO!DC_B_w9!bHhg63J8|Z${Nkm{oa2*=OZxf6CH3n1&Rwdri%aGD{r8{PB9>o# z%T}%1v~7n)quI7Y>+;qei+_-q_=s0FIWzmy_FsR2BJbD#mvBGX+2vEyR9Ek!**SfN z&-;hQUpN;p@%0T-aB}fDF;V;M-3Q{=A2YMaAC}m0SXo8>h@`9ho{h8T*zbJo{KurE zvE*PWW2s|lV*t}a*6XY;Ts`6mq6%7p%Qt^6- T+-Fn%C-$VsLnnttmfR%(U|sy5 diff --git a/fixture/18/2/16.2 b/fixture/18/2/16.2 deleted file mode 100644 index 2d9f35a924a94b35d42ea30ee937c303b83c3cea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 622 zcmZ>Y$}lu^j8qGb+{W#8hJlIi|Np=L>+gRD68}NO{qnnD3W+$txQ8d9eT6WS0aHeb zp~MU|mJ^9a2EHblEe8Y+B}y5z`7$DO7Z{iw7)%%#Or$uZq<}O7FT(|9X9u7F!vzM03?QFbfT!7k zZ59KwvI9^lL&8=6zaf8v0)R>x7?_QU})e_X#68$P^h%Zt#6%TW_D$DZD*IavAK14 z#JqRke){Frz5CB!-?aZ!f|y=HVp3*Sc2RK&$G4yCoZOPqvdXIJrskGzA_k30tKIrG z%Q-!J_3lp}o0_K1oHcv?^2)|0RYzCPk+E^L-1+ObbxqBlzyG-YiIsCnC{$H7GQDa8 zsz^<37I(yO=qaiv650=zQXF(LUE#?wN&Rq=JHvK{zD7BwyG(X04crx+96}0@XY?etDNF9td(u^~ KP%%fWcLo4|<>Z|J diff --git a/fixture/18/2/16.3 b/fixture/18/2/16.3 deleted file mode 100644 index f8760e122607b3eaabf7fe80aaf470319d30678a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 247 zcmZ>Y$}lu^j8qGbyqBc+oPp7;{{L&Ou0< z`%0N)E}LbNxrIg1gMopqfq_Aa+1UXk%j59(->Q`hQWy*k8H|7s2qYO8cFojFb-e@P zFtjLgI57r+nF0+Q4GpS73@i*Di~_$Rr>##qySiNO-u?|4cW?hFd~$X{e>UQFCSc2Ckg)YA4@u-pOw8Y5pi diff --git a/fixture/18/2/17.0 b/fixture/18/2/17.0 deleted file mode 100644 index 1d2d10edc3f0c24b94a492b327ba257c4c91f941..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 620 zcmZ>Y$}lu^j8qGb%=&ymmVxol|Nnpg*WdpRChmT(g|iW&2N?J87^Ey_U=UWDX_CNb zz@RwUhk=2CQ7S>rjpcxVROV!1`>Z7iK%pMarutV44Saf1PEN57017~W352@9!0b@B zg5d}QvoEhy3a<%+2?GGtOmL;Za2nju3@~J|cn&0ZdIQTnqtf zt^q3?O%9R?4{10yvj{k{{cCIQ=$|lg#>`m@7cCYH4NpzaPt*(ip-^9$eU3--G z-hi>WySGmx;>?@ByuPuCyPezL=TCb0Nc~Lk>h+rsA6XAH>Hg!jt{#P=Td#gAo3`ED zDS!P3gO7$qMXfxi&z!$-@ygX}H>DVFC)}0e{luW;Arq7ERE=rQgO{(^(=)99Fod6w zx${IXq0Lv|=J84M{!1w?UmwH-T=|AC!fpzuFKB}1h{{Q~<3atU$; zat%Tc)EKH9d>UpkzG5_ED!j4iff|Rcim-~Xi`*j4O`M0+rUbfni$wp^d(u^~P~qzH G1y%sDBKlJR diff --git a/fixture/18/2/17.1 b/fixture/18/2/17.1 deleted file mode 100644 index 631dd65010bc62ec83acd4d2e8f6fafb4493693f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 621 zcmZ>Y$}lu^j8qGbG=Dht00ZN{|NsB*fB*X*kof=a_rCW~Hdp{k9AMnTlTg&-!*a4g zSa7m1Qv%0 zWT%)&G%h%i!qyc*L*v554nUa;ybLBL3=GT;KxJ$VK)xZv2@{6Q1q?E-;v!0O|pm2m}`(6ypM*LeUp5eth(BSSWD`J5+LVn#jrJ zEaf5?AfVzX%E;ux9LV7jgk3RVQ5gq^2B7UMadu8_Nhz|dyrRCL5f0nJ8IN%7+kZsL zqAo6NCX>pxZ9jILobu}RC+%kuyB-V6zt&F9E?&KP_vb$>HZDFbJyT9tuDtp3S5{Tm ztKZxX*A^b(+_P7-Nhjgrjh&XxpErNu5WI5Zrt0p!`;YlgUc6*^etBiQy?_6t`}5cD zzoy94wso6;+OsdAA2!Z#Ui)9XG}L_i^&b%xGj9BptY4ItHrf7%udGs5)`V?4*?;-Y z|7rG}bMHPsIi(_Tb;Bk}6X$e}XRj0I&XbZ-T)sl;Xmf(|-%UU&zreh_xxRtDg}sBl zhkXLe4t@=O1I`Ay0$~NUhAIZ%hB-`s^^y|U=dhgMTp-sVoS@E7Y$}lu^j8qGb{C@h47X#z}|NrlQ|NFllLX_V{u;Hu&jC*(tQW|C&r5rL~n89*@ zfhoaFN{s;s6dPC$nw&V`|JRFQR#L_R1~rD6|5o`LsQE}5@m&EaKmqJaq~upIPLN_q zVQb)JR$z8uXt)4YAjQC7!oa{dA&G&3&4I0fA#>t_6lTr^3=F)IKykKJ^Z%`UHK_rt zlyR{Gm}cOGP*9RJZL6Y!N!TM+pnI@DQz;EDRROgbYBMz49UVlJ7cAi7oH)fx!PkI1 z*dx%$$Rglm_h`{$m8Yud>gg*pSH52TM)$CAbq!BoGTXDXSA25L`NjPcI_=i&*EgCU zpLNK7`VEOFsi-+IbMDQ%ukcVYSvh%W>g;7{%U-N_skQ30&YHK(!UbBNzB99PM4ZW4 zvvKDyDdqN8Z{7;ryZ_*c^65(+^YhE=XZrj1&tLyMy=m31Q@5qHtqYFa?b@^dz`>(p z1_no-_MSO^;o=n;`}hQD7sj2tc>ggJ7B=*2SpH)0m|^vs@$Nl-c?FR#eCsyo#PCQK zDO=mD$au*%Z$Wl;+qnyM-xY$}lu^j8qGb>^r7^g@Mtr{{Q0@tF*cntypE>wWwo}280b3fD#)Rc5Yb300mME zQYI;veN8U=JF+clYXFO8K;#7M>sPJHkYwQTao}m-0ZFkrFsPoHS3||5gB=$`gTMktyW`WXre=q4OuM>!dVaiwNx>%;x1ePgme265-qB_7Y$}lu^j8qGb47?q8n}PA||Nr;D|NUQo|NC7C1!kiV2N?J8yyK{9xj0$ifZ7Zn z2DKTS%Vs4d%u1NSFq2Ji8B1mduOtuWgp|wnU%h+;4lqh(95R@p{%@6uFw0@Lne15% zP{25W--FG;h;f2MLke4iButor;erB?eSsH9WL%J9xWK@W!pp$cCctZw!C)i>m2`c?cF-f#^>VEsBt#g;IJ$iC`c=GFS^3@N2?w7D|{@!L-*kNM*NzI0w* z!KKSgOWXDxxMgL|&-_BJ&@kTpvrNiU{+B$Lo9!Jk{x=?O*wUEUbcJ;lcZB#Ku^qY% z(F)rdWm&gz7YIIxUeMpY$}lu^j8qGbY*^oy&cOKo|Nr0r{_lVP``_>VU<#8sz_^FUps1(E%}34GppAin zB`M=FqX7dDv@uFaFmSdUx%~O6qXEkSw;2pRv&>iiTg9NpaB?Dl07wZOur)Ab@G>i$ zun%D{Vqmzy%V5In?7+amz}8?QWx{Z=AzS%7)8dL=<0XWxvtMZSC2!ci&MdYh!ck zkh+b$ci#os9NAsH*S2>YqOkDVQ@e|c&%>Mot0xtz!3AxI#ZoQD! znTXQ`vG)=Z9`efePMtn;;o>Fs(ee2O?d<%5a(Q+84!QiXsjc1n??3akj`DSzgtTr% zoL&%n|3Si|UfGG0r!d5(zhL(Eky?Ck{RZB(8wBpM{b#V)aqyIj%wL9AuNn0&luuS~ zZkcE2RJrq{LCR#eJH^IdnB6^0{x4h+z_%eMr|sH}yu60@4DTEMAE;ocaHwdgI8ez@ z=}_5Fd7z5HhhZk;OGXo>9O=_SflXXSjY$}lu^j8qGbJZjQ;j)C!0{r~^<_rL!I66Jrv6b5mCaSzWG*%fXxW~iz82(w7| zN+n4MGZ~tEH96q8*k=Zd1Oo#TvyZRzUoQq_Wyj9Wh6|THzQ3As+0{Y}9 zl{hjuL~NJ}&SVM*OtIu>nQ)9kc<4iu7KcKU)(^fn@BaK}P?A%!b8<^c%WCmh)irCi zv|_orGdutOW5-XNzo58&gL}{mZZWC;s^JEbYZhG5s|OCuZHeWmfR= z@e6^aKB2C)t^JEHzuK~On}pfB9|eDOYU@)T@A3S?vdGOVY$}lu^j8qGb+>$FfiGk7P|Nqxri&m`C>RQzKNdrWI*~o+e!_Eyy7@$DPB;(>@ zH(x`OBS)k{dSId;nnn8lt5psR84Muy-6_VU%DH-#$UfW2V#6?iofIHxDoGUww0X**}>wqmQW<*C-qjNj#x(c!JlW z@`B{E%M+rTAD@WZ+y2SZbynevB@5>K;=O;6Y4VHpsWb8q@0-_TKgrj-L;0u5l{gNC T1_t?~cP9FYsOU<&r8EElJqKVy diff --git a/fixture/18/2/19.0 b/fixture/18/2/19.0 deleted file mode 100644 index e8c6cbcb9d90da943b3d2461fbdeb8aacb26ab59..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 621 zcmZ>Y$}lu^j8qGbT(F7v9Rs6R{r~^<_rKr$UJIiBVj~VP?%}y2(`YcuBy*V$qlr|? z$z==-j3$?TdIVceHZZ6$Ffg!4|J9l?qm4n$r`IIqfcn2x>Jt(r*?f5dfN~%p1))+v z)CKzxxdkQ+Cm5I&7#P?Z6imQ!Yz+*&Kt_s00|Vm(#sv&gDH)OshcXx#nB9DVwrQ>W zzw*^o1_l$LDia2_W(SBauvRz`o|5AAw~4bSlLra~dKy_e+#H%**cK@%c_ag2QL0c2 zN5>>3DQC(d*C7sr!#oC^>(=XcrKRf|8W$CpRM*tDw6^v3^-mKsDyyoSHE+?rLq~Ft zp8dzryz!Iq@d?+i-?*>jJ$wG*{fCb~m@VP7U-I_sJF@rW5)={^DJ3mYNm<(3Gi&yo zdGi-6S-Nc1>NOiSZrZl}+Z{%|f8q1}*Zz}J)APTrZT|7+-~T2iUro#E4@K;I*sRtk z`^cvDCF|6=3vPE$h`Z0(+}hn^t*#+CbAf;ZzXGqULRMCDe1ddz!rgm}LJkifv8!vE zZ95UM)4aSR;Uw<|frpn0KM8#Nq)?^tt#$PpX=w#($L;@_jvgpyD0e7tC_muCFq6@Q zDTlp*y@llg=LIIW*Dr?PkWH+*JTz%pqm*A%UQ^%lD)X&!26Wu1^; F001cu=VSl? diff --git a/fixture/18/2/19.1 b/fixture/18/2/19.1 deleted file mode 100644 index 7f19ae7452db64373a728c08f7754575904f8813..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 615 zcmZ>Y$}lu^j8qGb+;VKP6a%Ao{r~^(fB*Zv@BRMw|Ns5p4`ibg2N?J8B($&aX=ao% zP-EnoX~1Z5fMF)%feW*JG|L4n`>iw3)zQhvEVS zk0vQ5mJYUs0v(<_nbd)CT~_8j9cY<&}S4Wb%)nXa<_XI;Y` zBX&S{LbSuSgVHPoVh@xT^fTRMsS#o*uX@tovOv#$@gi4?fC-?41pvqk B@}~d* diff --git a/fixture/18/2/19.2 b/fixture/18/2/19.2 deleted file mode 100644 index f782802c22b82221d5ba4b77dbf9dc42e5e1de56..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 619 zcmZ>Y$}lu^j8qGbtPxl%#K7oV|Nr~n|MmCF?}908!~w=VJO)XNIU9T!r3|GKIF|`_ zNE!0-Ffh#Im2gOCxWLSmAngBb6$1!x-2NKuvwX^@l&oAs=dEBN1_oXR2DV-(o6Vta z1;dF9wjPBH#sxqz1rx9YTLVZHLyANL1LFjsgj7m~B*UQ$1_ov~Uj_z7t>wSwtqcIG zWMFH21!16);mOB)7B34wF-hVQ3@S>o2!XJIsi2^cvy`(+prfOU%R~XV0+>S5X-NS` zHV%a*-d|^A=kDBd_TfQSfU%r0(_1i0{qmNGAy7%bS zyHEd_*@eX=67EdA*>LVWTU!VF%Z5e);j3)>4shi%7nex?VR)p)=NFVSQ*m*MMM-=A zL?)rUMQWdnRJ{HK<@yF(|I2o;;ii$aTf_YiEDtyr2rsa|AbddmK-qx+hJ}oG81FK= wF#lt`$LPkqk7)v{23NxUI*mh45?;=n9m^tJr$oB`KYC}PpNNV)i?&Mx0I`wyR{#J2 diff --git a/fixture/18/2/19.3 b/fixture/18/2/19.3 deleted file mode 100644 index 7f316c2647b96418b01bc705581cf69f0ce93493..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 248 zcmZ>Y$}lu^j8qGbtX#FQm4T7<|Nob(w7M2`EYetI-?eB3h>b>UVA#3g2m=^MnPf0z zNM$ZxAtjZ$Y=+}X1qKGj1q@OUNd^}F`>$3yWG-N9R=&c(29yJW3k-d)EZ#j7aS&)x zY$}lu^j8qGbv|5*9!@y)+|NsC0?|=W--$y6zey@dyIxz0xd(OPo&DZ2)XQNS) zgtM>7i9=E*87W7O@bt_yXl&@2mBBNEbFna^#Qc9NJx+Gq684o~wCVWbY=6~ZhLp+4 z$%+gN3~UWj3{pV)0t15-n_8O!LuLm9n*xJG149x}6bO#8HGtGGFqm9mU|ayCPoyxM zoWRxqWC699013tg7g$y;|F`njl7$mMf*A}9nLq}!1Be4M7!24NE--NHII&eiGlxq^ zMMYC!!V*J@p`$1xlZ#-0fQo~uKufD)FDg`0P+($EXkg5(?VZObr#ElietmoYMaMHV z;!kv(c**&SOPgEUhe;wzDr!zl%bvZq2VIYOvht_|?7aE%oBj@8V(dpT;=NcLrZGG~A!J)D7 z2@ajw)!nmizy6fU^*H%STH13lz2uOO|8Mc=e^b-5bMuQ!%WD!YnJCq(xH4F3uJglt zo%@{^oquI_ZSlt(Jw4aE5BBY6@9&Yn;Mih6|6*sC;l^Wr0jJMM-h9Z{;krz4iBzYf z8tW=f3svU-YcJ?rh&-_QUW6^AVcm02c;El^o7ndvIa9HEA&!ySoj5{ZY~ RO^@E0=qIAWw|XW&0|1JA0pY$}lu^j8qGbTy@!M3j>o){r~sB|NY;ON&MdT9wK^xaSz{fW>FqxACAjD0w*UV zs44pzWiE5;HMzLp#APO4gJv~nhMpd^Sw<4#zaaq^KjrfZ{L0W_WchlPuNem z$bWI-CE+U~VPat|1{+OGwp#48+-tkf{RGogE(w=APbDTgYh|r*>r>1xtaF)YlT-7k z)jR8TUf%m6@ux3e8Q#8o|M|<;pTBazILIqxonk*}XHh44Yvg{E05c!DVvaiWy9%F~t OlOhkDoL2k1x&Q!eyYEf_ diff --git a/fixture/18/2/2.2 b/fixture/18/2/2.2 deleted file mode 100644 index 51aa2cd330e6956629643de7b8a97ec5561a8140..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 621 zcmZ>Y$}lu^j8qGbbla1_#=vCv|Nr0r_4mJHA| z2>T#Ir3M8Cg$73R^6fu&{<7Zv$M)nOj?7;H66qWs8J(G(tGZU5?p}Rn&6&4n-|@ZY zH(+?;#L8=H@1WqZ>eQ`=&F-fWPg-+uk6WU{#NQ*!3cBb`&OUc3G; zDe1}6)c?g_OiH$V^?jW)hj;UWiXRG29239Zf49!<{Udw*8Gq{z|JW=2`s3e({`WkE zhVJqTMVpSBTNHm`|GUVl{^M0MtKUq;8Gi3LA3xsJ;K?!V!z4wO{5EMJGuAb%>sU9i xZel6m&fw1Bcp>&c@PYD!=mj1O6B_dwm6+UQh210)+j^z$ix;_C1WW)WF96yx`%VA= diff --git a/fixture/18/2/2.3 b/fixture/18/2/2.3 deleted file mode 100644 index 03d19aa58901083ffb134a73a5bc3560e709d742..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 245 zcmZ>Y$}lu^j8qGb{Fb~@mVq&>{{QP=U5i$%(!wA*7HNP*H!$qnu!;c!*cumPNU_bB zmAPz&K&u0@1A`R91qQ~&4j|c!3mpFbTeWhrn*+130y7W-#ef9I#H>eG!W@7cFyL?! zXi*SVU=(2J;9yF$VNhTy*E{xCq4msc%c>AhVU6ImS>JSSZ7+Oz=kk70r@7hY`j5CR zs#zZCCw6FT=Cx$mu)N=)>RadG345!1ELFu4m&-M2sy`{>{1U$DpX8VO$3ffxpiPT^ P?4G1~sHJ_gtj7xgUL0J9 diff --git a/fixture/18/2/3.0 b/fixture/18/2/3.0 deleted file mode 100644 index 597d3812b778d0e1ffde55b90924902765b4c5db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 617 zcmZ>Y$}lu^j8qGbl)hoM2?g%j3&yaN=~A*;09mUHbzIT77;B0UjuSrfJ>tT7smt*Mu(ZA zP77Hs>ocr&hth_{Znc6C?)p~mC zjW!!^XQ^#zY$}lu^j8qGbjJ*>-m4Qj`|NsB{-~axHM*QCQ9xi%-aSz{fW)+spJ_;vNE(o$?Cy3s$0Tx@<&i*Bs}tuk z@*o$-3K50{U5yT0EGN1|In-ughw1_h42~=eB1euMJA3Z@wd*(TKX~}!<*SdMKL6CH zVlnt17|6`)=<1o8ncEO?EUm2lLxI3YK_ej}M+R;?35!`#iOFnz@~K4*ojN=+O{rYd z&Rw~B?bhu(4<0^x_WXtZ&D)QkK1;s;`12R}4-QO?<`xpR3|)>UUqzMKu0=%jZP~g_ z!R^(rKhxyYG>v9jF7=M9)6ZZ3!Qdmm{ITOYbJm=>9FRV%-Q|kMS&u1KPM!@|a(TJ@ z>$i1>ZrzEw%fI)*-aq|!KOFeSqhnAlB7aQ&P2=P#MxWV=7#Fl;Dsg-{g=qXx?ou?>%83=X&R7@RFER|#(A KWW9UzX8-{AulYLw diff --git a/fixture/18/2/3.2 b/fixture/18/2/3.2 deleted file mode 100644 index a995e289ed9fc1a1eccffa5824c4b38ea035ba09..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 624 zcmZ>Y$}lu^j8qGbyctXk_?(7&`??sz|?4@)at~!3_BELU|2M>ic!qx2^wyoQ? z@7}Zb{NnQV?w)=3-s{`@_s?JdVEwX5mmW5sIqT0M?Ck2MH*e8~gp1e2l;7sA>kw3} zt@Gy>R#+Hw=FVH8A2a3e%(?%qu-VG0YSwOPh3p(YS>_jvTncUzgqGF+n%=~}u;AeD z_Q#*(_diHVYO`}ldCHcSk)GbzthhvRE1q$y30wx3slzV8L(B!Z_BC7y0^ZpHT_ diff --git a/fixture/18/2/3.3 b/fixture/18/2/3.3 deleted file mode 100644 index 3b9c9c8359aaaa07ad1dac7bc3e05266352c32fb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 247 zcmZ>Y$}lu^j8qGbEGYlj$-o#`|Nrs&Ra#w(R-h6ci!@*Y8yI%Zcz=~aih&o18H^Z! zfX$haV-kqLz}CRlxPZZg*Tm$qvxCFmf2&q5c5`4BR$vA~pfr%++OaWPF<>o7F%Sr} zC~`PxIWRCxVQ^w(&|qv}VNh&-8N*}pMfgwB+1=YeJU%TqHA(Nm$;D~!)Jm-n$)~u^ zu$7v6Vuzp1%&($*Hd{#*{tNi|V^YE6)ARi{Z7%oY$}lu^j8qGbobxwx9|M!@|Nr;D|NUQ&Ox*omi!5+}aSyjb$chUWOpY+CsWnb8 zxgcpU)1a+cz=W5V!Gx_>Qqm-WbK%5=lmA~?DajzooYWueJ|!W`{MQOoiDOL(E({PL zDaF9JtO3Yo6EI@z;AxP`NMdU+0?C<}FmUjK7z~UH7?>Rx7$n&g7#LC*G8ouqO0hW@ z%mT_>oD3AtTKWIg@>i1@co{A*02N9B#TgimFaUW#jS#@Bz`)?Ewy`kT<(5Ez8s`y3 zR<0IdAL5~iV1NdrqbO5@2OfRE?zFA zwQkdv+}hR!55J3>yXbdJQVSNBSlfR1aneMs`n{7{?N@t8r^X*Fa(eec`#<;V4;wRF zE0RtaXGojhtl3w@bX37>M(yNo{R06@>fRh(yJ1I&{dT2SZ{k+V|9;r8K{rM)jY$}lu^j8qGb+#+R~%)li7|NrlQ|M$N~CVuaGk1TM2aSyjb$ReYZ%gPLGjU5JR zy@o~%62cCdjgm$wM-n9jBple>6!Oo8G?I+N`8eA1%00Oqg z1q_VK8YCHbfoumu1}Po~hJ;{+bH z1_lO`6ri}!s`^!RD>WFZfC3CqtqhC{fMy~ANuwFZdIgUKOqj65aH*)1(?pI-5+p!{ zPDUmdK?MbmhAd4^E@vr%u#4kypM{g%uKi~mHvagtX02}SguC}tyc=fo3);DPP4&7l ze|5r_Z$H;p+&K95q51CT=F2xsFbF6#3@i#NYCPiGDWbP%*@}#Z->!XU@f6l>ZJ#_P z+S=aP=h(7i=dOMG&#ta-9v+|G-amc&{^JiPOm@8A;_rWfQ$%mtta)ov6D}Tp^Hw)6 zZ{3MW>Wxj|;?gqPVz-`u^y=GBA(PJf3JcE-Ct`wY$}lu^j8qGbjEjvq%)q2n|Nnpe|L=eQ*WX7F<#!S6?|%<4?%`GlS>QG+R^BXUK3^q5Q}{Yk3$CI1Sw`UhZ79UZVe0!h71fGQc@;BIbH^k$qWo83>O#}GJt$$ z0iI?Dwpk3!$_^mi30H&vU-iGL$sonJ0H_?K3Z#j_gqMLAXbJ>KNii_^DlM9FE1%)A zkFbxDlPHHqh{CJ{Qs9D)Mh7mA4iSb0KAiz898C@qfNxR+99ei$GFH6Q{A()rzW$(A z)@^Q)SGrg3#;yu)*f>kS?acPGOU^qS%qWTK^;>!H=*fpqU$JrVY3UjH1%(xrFdRI> zxpLJ;5uMbdXFq&m@k~lheU_2&GHc4UTet5#c=+hq^A~U4YTtkO`1RZO-+z4n!w~sO zVFA0KyMKV%rfoX{c57!BPA{Ln_(c4}iK_h*Cx=H&e;XU0H1E`(0fQ_ zc6$1-S-_{!n59FlVOCQP=MEi#wMPtfS#}70@L@D$IU#J|(UiftLhZoprh_aGR2)u7 TGE3b5SgsP>%E|il>KOw7>%I!{ diff --git a/fixture/18/2/4.3 b/fixture/18/2/4.3 deleted file mode 100644 index 5706fb3825564afc24d3e62953be6a43f0ee5f94..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 246 zcmZ>Y$}lu^j8qGb6qU=v$fyKdrQ;30u!9#v}-|6M~a{E00Z0$BXH{Yoza`VgU+iTv;{92{3K&~rP zY$}lu^j8qGbeCoPkDFc&0{r~^<_rL#z6L-JY!r2hf1B`ok5>h%RyEQKBX`IZG zlpt{A$i>dimdk2xf}P6*65P~;1t%_ZT)1$V{a3A7%nm0P`?4=eGPLzwT(@f8VxQT; zrz#CZv?efszy&Y~B$yr88ZMk*NHJn|;AyzP>|nyckddJPB!Pgffq@rDrARa|Fiv1x zz#x^9A<1wk1E|T(mw|y%YvuoyuckCEkbPcQCP`c(0j3B5EfrwR)XZG1oxPT&YxkbLhomg( z{K7h!RJNH_aDJHb;^l{rpMGjBVl6rtenY~kH8LkRue7YZuD+qYqjSQ8G_eXoAtdBqY{7c`qeBbN;^(X$ZiOH$y8SXp5JE7&KNAC}} z{A-b$jeX}#I2kNy)vOtk+U>K?%(CGh!_WB=5y>yVO5AwJen;^nM~CvCp8Yq@{^NPl z{A-F(Li_{IgBq;!I6YJyrZz=zW~d(2`mwpwLX}|((|ML9LJPbO>ac7PdfY$}lu^j8qGb%*yv$!oVb0|NsB{|Ns8)fBzdo{NDE-&W4B{VBEu#te_^$+1X@h z&@;qZ;7*crJ6pk>Q;AJoZ%4K9UFk~=0E_UFR=w)`e02GjL$Y96-%1ANrN-;37 zH7;Oa;01DpR{mQ4YAFN52_PE|GC&G#9l(6K1q=*~vhp&Ej~chSNL(ToDmDp;bR@L2 zDhaM=aZ+-YA|AR-VgT9PpdB0GPN0 ztm-;-G4+_4Tj;fE%f;2rmv28|;Sq7-#>>de%9p6{HLvFoJ9p{YvsZ5ns{a@= zxpGTRF)2O%{A*=Z!Z|iR`Om`AGAU1a&RmFm?Cg^HiZ|Hd=D`gma&`iW(zEw7i=PiY zR=_w}`Foq8p_`e-$y1Uu7i>P()^YiY)UvjX8;WDZ4k$0!<|xNfBX&WBVM5bw)*aj> wGWXt0yb!I>&t%7)A;jR}n0vJMPq*}MG4o$KKN=L6|LHyHDp;s+<=$Km0OWM_$^ZZW diff --git a/fixture/18/2/5.2 b/fixture/18/2/5.2 deleted file mode 100644 index 1a305ea2d8d1d6ea1e881633ce458c3ad03c4d5c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 621 zcmZ>Y$}lu^j8qGb4E$c%#=s<8|Nr~n|Nrj;iFyc8eiy+8vkoxs;YmpKHA-;fY&0~I zVBlQXVw7>&jdK~J1j7u6q(n(cUS>CeBmYk=U`b$L5N4S9GoXiYna_+i4+a!ql99n+ zB*nO}gH0iW;er%H3R{B|vqR&;#STDa7kC*=Oc)rL9e5cS*cyO*LxvM344DfUj7%7S z;%uCu_E+sgrZBLtU|axHZUn>_fX!#ITgPHGUydM&OA=QiSUI^|R5W=q2|)puhDIfh z2?8n(rXnq^ioHa`iH;yAaPZ&Sb?Uam!FTK4vvNyLFe&7D^EbChW&8G@ol~@L-+dK* z`}0?yU7vn4TRVFvXBV&Dy!-PX78@6zmYykRuG|%sFxYAuW)_yDr{{A2LE@vNUKx91 zleWHT^LEPip1l-f6O(eP`0F>(4yPZaySY&_pw;9*swfcl4nw2Qshf)lSmgh%=1vZ$kifX0w{R_ E049CwTL1t6 diff --git a/fixture/18/2/5.3 b/fixture/18/2/5.3 deleted file mode 100644 index 013056877699edb022cdc3f1979b8e1d29970ad6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 244 zcmZ>Y$}lu^j8qGbyf$%>F$1If|Nk#n>31z!u}TX{bS%<-_=xamAs(k7wP>?wtqA>^ICin^t3;vmAG_H6~~hKPem7S UC@^uzAH6ftPejFZNk-2B0JXeVT>t<8 diff --git a/fixture/18/2/6.0 b/fixture/18/2/6.0 deleted file mode 100644 index 3f238f07712261c5f73615d34ffd5a553e19d681..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 627 zcmZ>Y$}lu^j8qGbe3MyP!@$H?|Nr~n|Nrj;iTeBB?|!cZQD8QlIKa4vCn06Av#_s$ zva&E|=VV`rnI=h6nag@i)P$KNCEOS#SPuC3^!TXx|5@eC*}%YYZ26KCC+AO9WAHIK z*(IsK00z7eih*(A1O|o_iMGZGQVc0<4N^c3uZcoN#s#3f39|z)NZkcy2L{FkKt7Ly z5if&O1_Mto15li?Db)X~|5a562Dt?pKxI4)Kmr{wPR_WvNaN(>CXqIiHj{Qyrv(iy ztxAGR46#5*Atsg%B?pIz3|E4L!0JJINQABo0tyTa0<2G#JXL$Ho~h{+8hS1AR@6Oq z0eyCEg^Y(ke@Q*zm~`*{GxhU#?>~GOu>NJynFqI|@5n?l?zY=+f7tnm`w5RTK4$|i z2Db7B$0sJ|7Z%C5zL@BAEGem1cJk~w_TK&hjk3LGFU8ozr2PD4ZC$-*&tERlAAfWN z7}htg*3MqD=B@79clyj72ZXeFQp>NMI(P5UvrpfCvvc#m?~-`V@XjISDf@dJ`on!9L_=TZWa7)E=mUj+wtt{ggo<1XU=b>K3i;TW`3tqkE+jn4k ziuXUEx1421b~nXd)O@J-VakuG4AX18TD&^EddhicDLzp9An-$wLAZvag`Y$}lu^j8qGb{Jp#WE&~&D{r`Xe_y7M5B;No2_j})a5Cvw#i35y#c#;*=7}REP zPFCZb?4#z)a&lq=XUmBTiH2+p3~FkEEs2s6^{-Z%Bye2znZedDL(TrGX9L5@32hz> zD1h0qBZ=7|#YmEI08RbAj^Ffn0ZV0PeTU|?$i@(meIm@s56 zU@$Ua0E)A5hWriw8t4F208|VnfHVj&zJkzSft0Z2l3Pxqip-fjAeg12;-uu_xWo`E zbY@Y}6gbGq<)R$u(c;o(LIyMyV{m9-U~s7S_CDi(A)tB1%C+nG(=#+8PVZrS!ZGRi zx%294*Kgc^AYi?{dwqNV!ThNUTyI24Ma_wsb9e4N`3DL~N=ZvnmONYbT;ru?)*)5_ zwLPJ0w_e!T>>e+0@X#?OZ*6^pd2M~`w(Yz3?4_TaUecbwwR88`#nt_1-Os(Zg^S0a zkjbfyH$&^%vlp>1E*?G+e)Cq;KRtW%@cP}a|L_S*9>?B3!y>n^h_mM;e=%%no44TFb!lnk>>RNNx)-7kY(H4Q z^pGV*a6ybiZ<7UgkGO(f(8IOL3cXB^SaJkUT>L+A@&9D^|856a1Q_a%-kInpqGI>% H{&xoebF1~_ diff --git a/fixture/18/2/6.2 b/fixture/18/2/6.2 deleted file mode 100644 index 1b7e7487f29594d53c121a9ae5a8e1b7e9e91b6b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 622 zcmZ>Y$}lu^j8qGb+;&gKje&`^{{R2`-~axvhY;m=5o{RCfpHH{La0-cRA$Q&6Ny>8 zQYn|+dYl=JnB96M4J9}yD`d7L2qZ9NF0lXWwXEUdvNoR?{+_SQCUab_U$t0G_~c}k z6NU~9AixZvfGjp=0iFh)W(5<56TBd9!-wdv;DHQTc109_v6g${Ledf|59-NX8TI{pZ@vgmilXwA36Tt3I4zF{NyPm znS4_XFR!p#njW#eBSj|pZ$SUv7p^t>AK1j7B>p>C&!%T^{Dh>X^S>S;hscCUO+MTP zLIqxonk*}XHbgF%#uO!-?l@x#QxK9T#vn(5gE<_#Y$}lu^j8qGbys|ktmVwdf|NqmAR{YZHTC`#nkmy*X0iwWc7_ot2=Y}H;P+*dB zaj{#kR8qo><{)PWu%HR!0uar}_WjqY2|SI<8ZsCsE?{75U@&2jlw@$*&@ZRrEdf&E zB+#PB&;?Sdo6W5<7I``6(l_q&UBxm_sLXxpBH~xe{{F%I)Eb?`b#tmXmi{yqwPg|X Z{*pfz diff --git a/fixture/18/2/7.0 b/fixture/18/2/7.0 deleted file mode 100644 index 0f42c6567c067dc3d4df6403fed09b5b1d50ba1f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 617 zcmZ>Y$}lu^j8qGbTx(|Tz`*#q{{R2`-~a!wzyJO2_gWD37oBim+{1giS;g1nP)dTZ zurdRKnwv7`GL{1j3>+7mG8@_yPEMSx%=m99X9EL+o14@O{;xqi!Y3!IaTq!-U7Y%&!7!4d2u42B{eAqoovnljg2S0|nj5bU)tPE1mYJ?lq9sC*QGdeJz;atikqR_;lJSX;~$U`Tm JEjR8m001v~@WcQB diff --git a/fixture/18/2/7.1 b/fixture/18/2/7.1 deleted file mode 100644 index b16655eea8f5b935e9067e58de1709ed647a0cf7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 622 zcmZ>Y$}lu^j8qGb+)-_Hg@N%){r~sB|NY+&B!2IE52C zsLe`fU=wD_Y)Lu5!R#Q(!;~P{a&np4OkU&rtCk$-l`^@^z?5<1Q2ncrBNslIWH$0l zV1NN$5QEu)f!BnW;ob^n1zv^|CcMlFKtTl)kQ4)30|Q7+ibMkg;{?V93{oi>k_?A3 zfSTNV85kI~R{mf4Y7$Ux0Z@??5Q6~NPzV!9vfL|hKfX-!jHiH&*yldgh=iUr?F GR{#Ke&+*{^ diff --git a/fixture/18/2/7.2 b/fixture/18/2/7.2 deleted file mode 100644 index a1af96c5da0a7742a0f3b1ae1a6dff2a61e65086..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 622 zcmZ>Y$}lu^j8qGbv|9Sdhk^0i|Nnpg*WdpRBFgWADRklh;~pM^l%@nX&IVrtpBW4+ zhm4XE4cr(Q7z8J~u^jM~m}$WLZI$4%88h74fTHrR7N|)voSbkHr~m|bL4*m2NMUPh zR;XLSaDsu^mmz}}B&GnCU|?$iD@&1RU|^iUxPU<_B}0YEakN^THw#Efu29!woWMpR0+i^o}1{f|n%&;&;VPQa1CK7B?;bI7IX>{OX zIk8HV1F90Do@COM!@#kL!yx|r`tu(yJW^i#MD>|khDOFJmoD+dB$Lv;Z@>QdByn@+ z%_pUudGCcaPla#ZzWe&^`#;M+Y+Sl>?a8wjEXvA%a0m(t8VMN%8wZz|gp?||hFuT8 z$+KC?>fRgi84?!GCz4ZBpJimc6uhRob^Ff!2M+~LS??bnU*F!pe*6CWj~6((+JuIO zcT1*c=9rXTy`lfXpwNB7iIbH=OZNB# z>)j{fJLb$;^8AHjvx-UX{j)P?N&aG7I!CIiaqT*3X~pe3r1yWgcc1YA<3mP0<|E9< zm`^aDVm`xsj`;%fC8h+H6Z`?33xxApcRBb7_)PGbP}QNY$}lu^j8qGbyfyvUB?d;j`v0%By4J5)rPZ~lW03}k0<)2b4GcRs9ASV0wpmg@ zWR%&~A~gvtbm0P2#^LY3RVx>ym@o?~Fase_lyL$B*N<<8uXGkLFt8|cI0C+6T}w{x;KwFwsws~?@I6!XfmBwM)RF8c>hHyM^UclNid z)kd?3H{AKv=RT=UEOB|GZsx})`hh1tznEXorT=n%uf-RJF3&kt46Z7VTLc{2#6Om+ N1h;bXuB^#b007BbSv>#% diff --git a/fixture/18/2/8.0 b/fixture/18/2/8.0 deleted file mode 100644 index 788319f576021a90bf39d4ac04eb5155cf100997..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 624 zcmZ>Y$}lu^j8qGb{BkC1KLg{F|NsB~ufPBQ?)O?C^&P}UCk`;~;kk0sD07*xGXu+s z0}L~bBxWU=B&abkGP4{sk(4wtXyXZH_`9mn(BuTOFlWcbl}Yq;#dU;>oQXk5710Z3oqWiSEqnH_i;7}y$sd_#s4CJdPi7>rCn z>ex6#?XTL0ECH%K0mLAkFktHdX2~sJVBi%#@ss1LqTrS`6DV|dW>RQrRXozrsKhZr zK*hmnafe5c#3gFMJ`RN@9)W}|L&Hs`+st-Z?Y7%*f7tnm`w5RTKJ5z@E?Oy|cIeoN z*rR9vv2h$+-2PlZd6jo@dBuEv^ZfGq?fVb9`CnQtu5P}3`w0t=h>jU6IVCkaCw$&~ z`Af-^tx7D{u7^+2in?^|Mq_F1+x>^WKm4<=-X|ufX3hTdufNa#r@wfW8n3iPeJYbl zXK=pbhc3}}9lb|S#J3-6pWhZydnEglKBkLDu{{WT)+1ZVg9daIUUXX1NKA?V}%prhb zA>%DZ7p5Lo1+DY$}lu^j8qGb+`e#DBLm~J|NsB*fB*a6?|tt<6qt=h9AMnTb4Aj@*TBbT24~BW zBbOOwFs1O!VDMpJ5N2?j$>!tc%p&nW)IQYY0LzgB3=C@Zt0sIBK4Fw;6L69N3YZ-j z7$g}^ur=^9E?{7gUuD9;V8X1x%W#2#*+Buw0RpxL23{bQBGJIWI02|iDkVdb;ZOzx z1GAei0|Vo#<^NaBU*!d4Eo)rBz>opd$iQF-H0Aol3_v8{35?VMrMf4m}_8j@k zaPi7l(Qjhr;^xjD42q13wv=?_OjJF1$bZ3d|MvdrC&L_O&H5i5b5}NAK7NjKyMUUS zO~B5`#L2VTV|vw z**OggKQYgq!)oObvD3oBBjKdQPmYY6e8m%9KC)G?i^|DqyZ^-8#eBcx?|+O*53C>9 zFxV*AUl3+cbEs&jI8ez@>EO#SoADW=1@k%Pe~ea46@2nl!VASk$Hc PPwz=r!9vA+XHzBs$~^nK diff --git a/fixture/18/2/8.2 b/fixture/18/2/8.2 deleted file mode 100644 index 1627b4bd694bb8ca57b050dd84ac64eb4f0b0620..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 619 zcmZ>Y$}lu^j8qGbTya3(pMmlD|Nr;D{{<4|cY#zrh>cDhVBEuVAXVXzRAvWTLk6?M zg$yH;3rW07OfEYxJMc6}Wk{$wTwK;;|JBQ1qf$6k%AjA#R&mp8&VQ@sY0j9T`G6x- zfq}tP>hYjCdKO zG8lMz8Gz!9&7uDPL+wMR0F|5o3IJ6xfW$!nq#X!=wwUlVIxciQ+ABDrsnNg}4E+>2 zG(r?+B`oM_bl~DxA;Pf0r(g&v}MO9%wN1jIWRcH`*Lu2Wb_-WieFq`H1^qhTl;S^ncuSA+;j5EkhMuy zPv0=ixVm||-~CIcAFTZIuRna<3h9`V?^}Lx{T9(!voGn~h3Hx*lcvY-gzofTf1~E7 zcZJpKV7=4TL*ITFBuN_cMzg(RUY_{+?WX#N2NrsSSBPkFi?!Z=SY5+ep~f(yX+O&e zVFO=AQ|2ygIbv}O5WSyynVBGcAnzLfvj)`kXy!#oCO7|T_HTRFLU In7(`f0LTpe=Kufz diff --git a/fixture/18/2/8.3 b/fixture/18/2/8.3 deleted file mode 100644 index afc90604f68d298f54782ac581e86c63b2fb1637..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 253 zcmZ>Y$}lu^j8qGbeA%jD&cJ9{|NrrdRa(Cmbu7|YrPZ|v$VMkNFzlRh*5Lv#TjRn7 z7g8897#J={Wk@kFwkQQKfWZZZ3zr=(F1T>PB{t8iPQC1DB%!Ljy|(i+sY-$z~nb^!VI@w$8{8zL5Fxi^e52 z|0Sl?;qFKLlDcp2xz}Fd{Nn1{kUe%noTulkjrun8z`a^O`&07vnfi%o`45jzx0_pC i{_*+s{qyS;x;z(jdA?y_5McVJ_oS;}q2gt(fII+g(Pt6> diff --git a/fixture/18/2/9.0 b/fixture/18/2/9.0 deleted file mode 100644 index ee3d37c45f871325f036e1d38740a0a69cb9a49e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 626 zcmZ>Y$}lu^j8qGb+#tJMf`Res|Nr;D-~C?yzyALBzu1WbjC*+Av3c|eE}NC$uD$U&ow1O^{rfs~Vt1`LcQ8EkF#Uqh}OIlv&;WO5-X@!u*R;jBvEm2GWp8Vn%7 zIDy%LfvthzLgs`NAO%D~KC=Vk0tSYY3z!`m7cO=Hs=C0-U}C}m6z63CDPg!^$Z*1h zA#(wPkqHA(oQ*fs{%^?NP}=|o#syM9^+07nZ~>^{0^`C3Kqlh^AYlR&4tO!KFxgd1 zMTE1dNr{y~LrEcksddF9QLYwYAL>Iz1_4I~1_zx5iep~zoS zvv%E%uAY6g{KrqrD(}+r@*h=IziVq3IKBJS85$mue?Rua#!YNTx9-@vOMLpG{|q&C z&in$&HOK6))b4xyd6Ln8XLAd&hKrY^W-X3*U{#SlUs_r@m*MWcGiNI#pP1MuyL*Vu z&~=Dfu)kqzqYUd`mJ-1S@eiUN^f&!)y1}}MJ45V-3d45Byrz579od|``3#5MdJGQp O+!rr$wFsC1N@D;gE&b8} diff --git a/fixture/18/2/9.1 b/fixture/18/2/9.1 deleted file mode 100644 index d1fa23cbe4d725d9bd485e67c8848ba3fe2eb4ad..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 616 zcmZ>Y$}lu^j8qGbTs-mpYzD^b|NsB~_j}*_{qKMO`;VPCz_^F!^_)gSlgo@o1`Iwk z7f0(cqN8i1@5ybLA` zj1%~;GNdpoNHLrM39@zYg4HlCU|@D&V31@}U|>jL$Y5ZbDaGbsFbgPmaWYWcYvsR{ zzm_bJVqkWB1r(D4Vi*7$%>Xo+fo%%spNxw#shpf#&QdOd3JM-gDlD9?KCP}w6P6fK z9a=01Sm?yUC9rAItVOpTy;jyX&Mrs~PcOgz{6|Jb$4^elMkZJP3&Gth)^FG-VfH}$ z2Xp)53CeT4!y{(;UfMHf@7%rj_TBq`w7G6Cdv#NDOILRf-v#3+JvK2le`QtAl$HOe`1QN`??1cS zJ$z>GK7ISo%q}c0QSc!prCHyAg`?TtK`JUibro-ILvC)vOo#mZ=JpQOvd*2moLW7i zYg&JJ9zWriS5)#<;CQ5U^<18<-?kktaab52)L?x;wW0h#1%r3PhlY;~9~(X~d}{d2 v@VVg&!Y$}lu^j8qGbOuSt7gMsl@{r~^<_sj2p{|ltB69*Xg@ED|YF7uU87C4k>D8V4i za}tLFL4@|ls@uy6_k6!0=+09hA6 zybNCUSI!CyJPmBk3MN1yCbqU-pfIxokONe}%V5I5AO+$dVOF@nxU7NU#08K>3Dr>l z{~>=vCIAhRVn_iQ$H3-rLJ~wXPGDem1{wwdK}t$op?WOGOxl33Lrp+H#lczS*n|^a z5-m<1$yA1k;w=J>qAVfehh{BW_2|`SZDZ?#wC@S;-k<*<@KMl+NhM7!ZAHe4SF2v@ zyk+Uyz2~5Wxw5*pOTpjXx$BrzIzN9YpYq}Rk4z5p#|`D>?LRmqDmrHDIB`+PDCp8> z;mMn?gp^DbH4VM`O{^(5FRwM8A>!bUh#Nm8XYM@Q{xUoJO`gDACGYm>GvY$}lu^j8qGb+@JgJ4FjXe|NpPM7Imy&u}Z6J5q83WVdsV;3{a3E#nv_}bJ>hp zNl5}r7#NtH9T*rcK;%*!_x@Y8gxRf^t#JVZTjRnOkTA1DMEHq`LaIGM0xgOhECCRu zK$5?KqoF}rh=GN{gOUHttR0n1DxJkIuWzrHtF8X|{abdr|ANE4Nq65^8n?hd@zgcwVNB-r0=TDkylL`*7 T2rx4L*gZ+}P)qCY{O<_>h*YO<3@fe5k7f10EH}Mbm@D8uLh0|T`5FYopE4bU)PT*_*?tOd2_J-{V+vByj zYfsnStUXzKvG!E$rP@Qa7itgG-lsiGdzbbs?LFFawAX0Q&|aZELVJDo`0VZ3qq8?> zPtM+&JvDo2_QdRk*#oneWe>~Vl|3kXPxhSb9oaLoS7gt}UXMK~7DPr}~ZUV}XbdvJRP_6+Q`?FHBau&4HDhxTUyf4}~G{q_2@ z^;heU)?ceXR)4GhNd1la6ZN<0Pt#wfKS_U){viD&`a|@0=nv4}pFcl;cmC}B)%kPt z*XEDSUztBLe`Ef*{B8Nu@;BvA%3qW}C4Wi&ko*Pt1M>Id569n)KO289{#^XE_%rcW z;*Z2%hd&N~8~*72D*RFSi~D=<=itxnFTo#zzqP*se**sQP1}>U$82xep0e+Bz036u z*SlEnV7+_w4%NF;?@Yb>^v=_}PVX$etMrc2yGHLAy<7B-(7QqJ1ijnyPS3kM@8rCT z^A66tH1E*7JM#|AyD#s&yu0$w%DXD>oV;uDj>)?s?})q`@{Y&59q)9!oAFM@yBP0O zyi4&8#k&yiK)n0#4#T?(@9N&oy^HWp?j78_1@GS8wY@9wj_sY=yWP9*@7bTTzh-~O J{*3(<3joi9ej@+? diff --git a/fixture/18/3/0.1 b/fixture/18/3/0.1 deleted file mode 100644 index a96cb78fa6975061fc0720486012b9fb17870415..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 678 zcmV;X0$Ke60gwc+1ONcA1ONc00ssIM0001v0ssIgwJ-f(umdF&018X|2_rzZpaE(# zOSK!T-B|6$YByH9vD%H*nvxAu!XZgIq-4MMJ#YY?0G|MFc4lApVkh=t=k;CJ^;wto zSXcE_NA*w#^-st2OSkk#cl1U#^g<`}KIij2r}H_N^EFrVG#B$Q2lFh)@+*h(CwKBA zH}WF)@gC>#8mI9YXYmzR@evpC5ZCYw$8fq!_}t^p;Ba@lfwz6_0KN-*$o7uy8QTN4 z_iNAB-mN`bd$sml?X}utwO4A7)ZVB)PJ5g7H0@2=le8CUPtjhYJw$tf_5khu*~7DU zXV1>wn>{ysZT8IUmDwY+*JY2(-j+QodsFtL>@C?-vX^8}$X<{=AbUCXaO~aKgR%Ew z&&A$}JrjE+_B`x$*yFHQVUNOI-QI&e2YYgR3HA`|z3mOy6R^kjYNz&T0e{2(g#88k z)Ag6@57%F;KUjaS{!sm$`ZM+S>Ce+&r$0-7mHsIGHTq-px9E@1-=IH1e|!G){N?$R z^B3n2&R?28G=FFQ!2Esr^YVA)&&pqwKPP`p{+Rp~`6Kc-yH)R0y&LsT)VomcG`-984%53x?;yQ<^bXOxL+=c|`}5AvyFTyi zysPt$&bv16*t}cwj?B9;@5H>@@=nXUEbpYei}DW2yCm zT5ok$Uv*L+by44RP0w^mk90^+bVNUNK>u?;zjHgUb2o2uHZOBBA9F6>axI^7DUWg` zPjVy=av=Y49KUfJe{mOYaT70b67O&h-|)IqINjy$;BkLDg1epV0=^b|%l4G*CEFXe zCu}d+p02%Id${&u?ZMi6wTEi&)SjumPkWyBI_+87tF%XHuhAZ(y+wP3_6F?<+S{|I zXD`p5oV_@EaQ4#dq1ijL2WIcfo|nBVdsg^a$Mvd3hv$R3fsA$vUbcI@fco3STj zFUFpVy%c*W_CoA|*!!@DVei78g}n%S5ccNw7VIh5gWD^xM_{jQ@7weC27ku>iv1D$ z>-ER$Z`U8Kzgd5>{#N~|`b+gE>MztEsJ~2qnEo#PLHc|2=jiXypP|1(e}4Y@{PFp# z^GD}z&L5knZv-R%PJ6G>oy)*T$)H_n|I=$ocZqqwT?56Z9_7J3#O9 zyud)_MXA9>4nv;{WBM@ z-G1=&)%!0$|7B+qlvdTZboLET%r33(oIH2=#@$EH-+cV`J0ml2#|&>Wz-KXZ8I?#$Vldo$-|uFag8xiWKP=DN&rncFf) zWp2uxl({8yO6HQx37HEr2V^eC9FDmgb1>#!%(<95F=t|~#GHq@4s#slD$G%sn=r>< zZo!;_xdC(EoVNgP3+T20ZVRZk0BQ@Ewg6@eShfIV0e8CYa^2y&lXVyC4%S_&J5+b4 z?m*pry7P2*>CVz!r8`G=jqVuT6}lsIH|UPf-JUx=cXRIK+{L+5bC>20&0Ux~Fn3?> zu-sj_vvT+3&dFVqJ0o{R?ugv=xZ`oRrMMGu7vc`YU4}ahcNgv; z+&#E+aChL&z+Hj60CxcH&Sjg!Hg|0f+T61_XLHBqjLj7bd$smx?akU_wYO?d)!wK* zQG22GH0@>D!?YJ^57OSFJw$tl_6+U)+4Hm4XV1=Fojp2xZT8sgt=S{9H)c=F-j+Qr zds+6R>_ypwvX^8J$=;DYAbUUdeC*xWv$0oW&&6JgJr;W<_DJlF*yFIbVNb)}ggps+ z5%v`9CD=o-=k0ZS+#R>OZFk!4rrk-qi*~2%F4-NjJ7IUh?tleM~Cx%xZxXX>xi zpQpc0f1Lg*{Zaax^vCFL(VwEfL4Sh&0{!Xv%kzilFU}vFzc+tq{?7cF`TO$c<*&=1 zmA@)~RQ{U$G5K5aN91qFpOC*De>(nh{K@!>@dx8C#UF~l6MrE7KKyz3yYOe>ufm^$ uzXpE{{tEmN_#5zd@7tcYy=!~c_NwhU+iSMRY_HfJvAtn?z4mzR?Fs z_i4`4T&FopbCu>O%{7{1G`DDu(A=OoL34ZN^vva%lQS1*4$fSfIW%);=D^H-ne#Gt zWzNc6l{qJKP3D-)6`3P4H)M{-+>SXNb2H{-%*B{fF_&Tv#axIv5OW{qFw9+;voQBy z&cR%RIRkSA<_OFIn7e>)3&6I3YYVWpfM*MEwt!{c?poZjxGQl-;%>wphr11T8tx|C zNw|w}r{FHZ9fG^=j=K{u$8B!goVK}XbJFIb%_*BpHis;Gxb|-C+1h)x=W4Ijo~gZ3 zd!+U{?Qz=Mv`1-g(w?NfMSF_&6731v3$zDlFV7yHy*qnw_TKEd**mjmX0Obim%T1~ zT=uH$QQ4ca$7FBGo|3&GdqVbt?CIFcv4>+X#vY8l7keo7PVAZ3`>^L>ufv{&y$X92 z_8ROl*juniU~jtc>84b}2%t7zH#`K{8p8%f#bHnC@%>|p=HK%JX*PN`mSaY!EQq7^7 zJ2eMt?$ey7xl41F<|@rOnrk%2Xs*y4p}9eGeCGDd>6x1|Cuc6soSL~bb7Hv zGKXdE%AA$CCv#5bn#>uQD>6r9uE!jYxgB#f=4Q;vm|HQYVlKs;h`A7RAm%d6VVJux z2Vw5PoP)Uoa|Y%L%yDyXa{+$K_8Yd}uKfn>w`adO3wOTmdfoB5vvpVNj@DhPJ63nA z?nvE@x)XJ`=}yyKraMV@k?tVfCAvd&cjyk#-Jd%@cX#gW+|{{rbJymM&0U#0GIwL{ zxZG{I({eZEPRd=BJ0*8X?vUICxdU?d;||B&jXN86FYa92wYW2JSK^MuU57gkcN^{~ z+)cQXaJS%2!Cito0e9b>H`i^B+uXJ}YID=(q|GgxQ#O|@d%5;-?cLggwfAbz)!wN+ zQ+uWMJnePb{Z#Lve#sf$=;GZB6~yjgzW9u)3KLhPsU!1Js5i__E7Ad*aNZmVb8+{Fwug)Kxzd3(w{?`1d`5W^m<}b{j zmcJ~2SpK5?LHT>~hve_bpOL>Ge?IV_ zEjK|+RbOdyeTR{orLVce&DY`U_5TS46CEcpJ4H`fV{LbViGS;u2N4@3FF8X^S7B>)e~FWwr?I=m&t#|&_yph= T5Eh6J@(7!QYN6)H5B9+i)nP+8 diff --git a/fixture/18/3/10.0 b/fixture/18/3/10.0 deleted file mode 100644 index b3280121df5a6b0d5fe4f85dab269f47a4d25daf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 726 zcmWl~Yivtl7y#g7#%hvI*w`>;aos|9W2=LAa~;FR%yp&PGG?gLer=E72O@btw`LJy zm%3jf$Wjr}Gndyo(Xe(Yi^Z~5h&b-Y{E$gx34T0zq6XAOd!hy5zpf}jh!+Im(Z8qO zHT0W31Ft3su|*GJ$CT_Th`QL5;EJTVzNbC?9_zOxpcgJ3HGcioBAga32qb_&7;^HR z_}Iy7@PLzV!+a;N!VD+hg1rvD89ffZ5idCSS~NR&8KyY+DjeqED==i|%dyVR3-N%R zFT{L1Ux0$01NJg5qKEO>C^0?@&5UPZ3gemh*2bq}$i}B(osCb$5*we0`8J+{f{l;I z$5uWDJyt#nB`Y6^8CE_VQ>?mS_*>G&p;yuk#p{wT25ThU-~nU-3+sHN?&m8zkd zDxiYQV4`v;r}eO&*1;Mog;FYpVp<7Bv<#NfVpvRzU_KQ<0nLTEGzW~72YHkOIg||< zWB>!DLpr6w6q*c^DHT#_0*s?%NTwu6qC`lbc!(#>-~VQJTXCDAt=MPq#rX`aajie{ z*=w~qxS?s^tdBdJyWO{yO7=wE8~oYWGjqufOPBkWQpRqmtAj@xht7QORVLsxPp;p*;($XFMN!N}0k6RaYRX z;ZNqrs%F3Ygfg1ts2c;f>o;XCskA(Cb7LG!R`UX7^_MepD_pJNL&iZYS{)o1qLVwd zE0IO_OPy!F>AOuH(JYfDMItSC9DQFpUYTx+rSes+Fyd%=Z@>51^)hlrTq+;a;vkYsazD79MNW#7}re5d)658scG^_o7M?N)?RBbbN;_6!qJrvp8%i$pa6FS?grcmnBz9LZBEp+)Lf}KQgfZ=IL&REqck^ZPSV_>IYo1c<^;_JngcYK zXAaNYojEvjZ|2<0otZN;S7y%3T$ed6b5-W3%uShNGPh(-$=r}RA#*|Ibj;u%SbuDe-xvhHHtsk%#bhw3iW9jLoccbM)j-C4SObm!=<(Vd~YLU)Aj`rPrk z+jB?fZqA*YyES)e?$X?exeId#<}S+}mb)u=Q0|`GIk`J>XXLKPosYX7cRcQD+|jt3 zamV6r#hr?~5qBc)Lfm<{>u|^6uEHIKy9sv;?iSoB2z%Q0vh88pi?#=C@7W%*y<>aE z_I~a8+UvDvYp>QGt-V%ztoByzk=h%zCu(ogo~FG_dy@7d?Lpd0w1;T#&>o<@KYM=m z?(EswtFz~3ugxBty)t`b_QvdS+1s+GWpB!!l)Wf>O7@cMA=wME2W0QZ9*(^mdp7o7 z?77%$v1ekh#2$$~ZV$xXhdm5?7xpadJ=k-w*I@6!o`Jmr0e{v0sQpd*WA?Y~PubtF zKVg5t{&fB2`or}X>kro7t3OnKr~XX+efsnC*XhsFU!^}ve~tba{Vn<<^f%~F(BGav zJ%4%r56$11KQMn^{=EEM`Lps@<aJH3w@h)f}q1Q*)r^KFxWWyEJEMuF{;Nxkhu0<_gUbnj197 zXKv4&p1C=5a^~X9shLYNhh{F!9GJN;b6DoC%vqUxGUsHj$()h7B6CFMdd%^d+c8ID zZpNI9xfOFN=2Fawm(19*uRB|JweD!$wYp<?i$=N2z%Z3xb1D*qqa9~PukwHJ!N~z z_Jr*P+XJ?jYY*4ntvy(Kul8K+o!T?CS8C7GUZ*`ydzJPm?M>Qaw6|za(cYjvL3@Gr z^z7x?!?PD>56<43Jv4h~_RQ>k+4Hj3WzWi9l|3qZP4<}VE!iWoH)K!9-i|#TdpY)G z?8VrFv6o^G#omd%Z_nH7_B8Be*u$_FVGqLIgS`ZM2=)#Hf7t%6{aO2a_UG)c*`Kk$ zVt>T`dj0YG+x185Z`Plzzg2&#{!;yk`U~|3>MzqDroT&nkp3S1Ir=;FXXvlcpP#=z ze|-Mx{L%TF^T+0I&7YdTF@IwI!u)CZ%kqciFUlX3zbAi4{*L?^`TOzbXk! zQ(b3qdxejgqpi2W%hliM^Zf@Z5gR2hIYUiXVQY1NiIbhDvAf03+2if^WT+7M0N@u8 R7Jv^xsHM~pI`Yna-~+vPL{%%zz_ zGZ$tK%-okbEOS@ptjs-`b28Ut&d6MmIU;jC=6KBQn4>W_V@}50ia8Z?DdxI4ZtiW) z!(4|s4s#XeD9lZmV=%X1PQl!OIRSG4e#`b7w%@J=cgOCG-4(n0b?57@*PX4qT6eVW zTHUd_TXje3Zq%KqyG?hR?lRp;x{GuN=`PV7qPs(PfbRa>`MJAuXXmcYotwKhcWmy; z+>yB(bI0Xw%bk|HDR)xtqTDIDOLB+gF325_yB~Kr?rz-KxO;Ku;;zM=iMtYa-<^oN z5O*N%GTdRfyKo2L?!lddy90Ly?h4Fxo8vaOZI0U9v^i-3d&2gD?E%}%wTEl()*h_A zS9`AZPVJf6E4AlouhSl+a8I% z5qli=HtcEGo3JNgFT$RJy##v*_5$ny*n4;F&e~nIyJvUK?wSREz5aOp?fRqjH|tN< z->N@Vf2saN{e}7i^_S@n)8C~(NPmz19Q_^oGxS&J&(B|+e#$Sy;8hwb$C#UTd#)nf-s3A`7w)pa7o$p8#{v=AO+tn?p8tY|hx+uQ^|Hz29_dF?VCm#@vfJ7jrG@L`yuDe`!xb9-z z!Mb~OhwARsovFJ|cb@J#-C4S;bVuo~(H*0^MR$bm2Hgp|+jFPqF3+8uyEu1n?$X?$ zxjSR_?0YIk{_c$KI|+9Y?jYPHxI=Jv;10muxo>mW=B~|I3wy@)itQ2G>$S&gZ`U5Jy;*y* z_Eznw+Do-3YA@6tsJ%>knD#F1LE3w?=V^#f*59i? zSAVVkO#PMmBlXwmkJI0#KT3a-{v`b^`cw3m=ugmJpg%x=dH(SH-T8y__vX*d-{B`-`@>k`L%HNbfCVxx*l>80(6Y>}2Psd-5KOBEC{$TvQ_(Sn`;?KmNh`$hj sApSD^Vfef72jTC*pM$>xe+K>v?0wtww%2XX+FrFiYJ1N1n(Z+Q0F_OLb^rhX diff --git a/fixture/18/3/11.2 b/fixture/18/3/11.2 deleted file mode 100644 index 81296ea26a2160cb9d68f3c4aa05240356dbbd9c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 710 zcmV;%0y+Hx0gwc+1ONcA1ONcW0ssIM000240ssIgwJ-f(umdF(018Wd3K~Fq(Ew@Q z*7jQ4+H38#uC>?NcK^Sy)@J4dpa7o$pa65z=A_L&3T&ZG{Ugo;YS(&RcM`fF{fiL$DEA07;`Y@Qp};4J25w6 zPQ+Y@ISq3e<}l1fn1e9)U=G3DfjI+nFY)aX-U4^d?wZ{(yEArI?2g!7uRC6MyY6V+ z&AO9yx9U#SU8*}#ccJb;-DSGNba&|v(%qvwM|X$r4BZvF^K;kdj?Z14J34oB?%3R| zxl?mD=1$CAm^&?ZS?;jhMY)4=_v8-A-H|&ZcR%iY-1WG#aaZGx#$Ag$7I!P|x;yT! z#GQw`4tE^xD%?@Hn{dbAZo!>`y8(9s?gGqho6|OzEqlrKknJ7Y1Ge{T&)43qJzIOV z_FV0?+GDj>YLC?3s69@5oAxyAP1=*R7imw?UZOojdx7=hIH^r@u>omi{XJIr?k#$LO!nAECcNe|-M-{OS3d^C#yo&YzmU zG=FIR!u)~x`|^k7@5-N*zbAiA{+j$5`782Ai0BCiDq5uE@ diff --git a/fixture/18/3/11.3 b/fixture/18/3/11.3 deleted file mode 100644 index ec7414359330d1aadb36714efdfd39b79b1995e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 163 zcmZQ#oWQbyg@FNt7c($0hyZan5NmAJ|C_LZnU{rO3LB#~1GlKWrm?MiV03DJWpnTJ zg{!yjKY97?^N+t-b@(Nfb-g{FR&c9=~||>HD9|dc5L_+NSoN!LjLu)vf(A7q8uZ@buODDJukiFgy?!5NqH! PSf+R-$m&y2-2-L-3=%;% diff --git a/fixture/18/3/12.0 b/fixture/18/3/12.0 deleted file mode 100644 index 167f799aeae0006f550e10a7a0b23afa897878aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 714 zcmV;*0yX^t0gwc+1ONcA1ONca0ssIM000280ssIgwJ-f(umeRE018Wt3LQ{5K?AYE zF~9DP-71dlD#q;!$F5HAy=PW2UaSY90H6S$0CTqHYR%D_b2Zm$j@4YLIZ|_@<~Yr5 zn$t8lX-?8yq&Y=%iRKW^1)2ji_h$~z+?_c)b8qI{%(a;_GgoGg%v_f_E^}MvsLV~7 zlQOqtPRU%7IU#dF=77xQn8Pu5V-CjLi#Zo_-<&ts&5f87F&AP^!(4_r4093YAk00O zLojz>&cNK?y8XuOw`#vp`)%59%zj(;o3h`A{U+?UU;%fr?qJ=$x=VG3>h9DXsJl;h zp6)K)S-Puq=jg7{9izKKcZBW+-SN5GbEoHS&YhgQICpC9(%hlB3v&nN?#msPyDN8A z?w;H^xodJ~=;$@4}viy$5>^ z_8RON*ekF{;EvnfwmWTi)9$3*MY~gWm+TJNU9dY~cfam(-Ql{s6@RDxO#PMm^Yqv0 zkJDeJKT3a-{uuo&`cw2b=ugmJpg%o-dH(SH#rcEt_vR1H-k`L z%3qT|CVxx*i2M!t6Y{s?Psd-5KN){9{$Tv2_(Spc{gwD5@i*d+!{3HK4Sy5YAA wQ}CDI55Zr6KLCICuI*XdtG4HCuh|~6y<&UB_J-~8+S|3KYme66tUXx)0A{9&i~s-t diff --git a/fixture/18/3/12.1 b/fixture/18/3/12.1 deleted file mode 100644 index 7e72cbb95af02e1f4e8a4e1f5f5d0f962faa3bd0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 712 zcmV;(0yq5v0gwc+1ONcA1ONcY0ssIM000260ssIgwJ-f(umd#}018Wl3K>9p(E#gO znr&^bwXMC@Uh7(WqW@Rc+7tPs0H6S#0CT(Mbj{_On>8nEF4ml?xm0tg=0eSZn)@_| zY3|aTrMX9Qj^-N88Ja6JM`*6k9G|&8b9CnB%*mNsGpA-Q&77FIFmqt$vdm$byD|r5 z?#Y~!xg&E%=8DYunCmgeW3I*=jky_fEaq0sshBe{S7MIDT!%Rha~tL;%uSe+Ft=b% z!CZnl0doQ70GvYGDYBgc+bN=*0@^8_odVe@j-A3-ced_o-O;*pb=T^S)m^DOQg@^7 zINfc!({wlKPSRbZJ4JVi?hxGtx&w6g=MK-^ojW^sZ|>aOwYf8MSLTk)U6(s9cU$hL z+)cTYa<}A8$z75=A$LLUfZXM{!*O@x4#wS!I~RA{U5YytcOmXT+^0eAvR7n}$lj1W9(z0XbnMO8ld%_LPsQH1=k1Z$8?h&1Z^NF3y$pL2_9E;- z*h{d7VDG>lfW3F$?y%imyR&xp?9SO;vpZvV#qNmR`MT?M$18uV{#N~|`Wy8p>Mzuv zroT*onEoRDLHc|2hv@InpP|1$e}4Y@{Mq@d^GD~e%^#b;HGgFO#{7x-+w!O7FUy~l zzbJoD{*wG5`8)ClcEB6R^i^Z`+=>y=i;W_M+`6+e@~GY%kazu)SYON7?| diff --git a/fixture/18/3/12.2 b/fixture/18/3/12.2 deleted file mode 100644 index 6c0796123eef46cbe82616f5f81fb0234fbe948b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 714 zcmV;*0yX^t0gwc+1ONcA1ONca0ssIM000280ssIgwJ-f(umeRE018Wt3LQ{5K?68$ zH}iG7!m(S$v0cTuU78nEF4ml?xm0tg=0eSZ zn)@_|Y3|aTrMX9Qj^-N88Ja6JM`*6k9G|&8b9CnB%*mNsGpA-Q&77FIFmqt$vdm$b zyD|r5?#Y~!xg&E%=8DYunCmgeW3I*=jky_fEaq0sshE44J27Ws?!%mixejv{<|@ol zm}@Y{U~a)2fw=*50)Er>Tejb@{TA&vXum!C4cTwUelr$#w(e@((YkYW*XoYdU8y@# zccbn&-EF$lbT{cv(p{uGMR$qr5Zwj319bQ24$s}4J3Du8?%dq9xifQD=8nu=mpd+Z zTkfdbO}UeDx8zRAU6MN?cR}ue+~v5#ad+bm#@&lM7kA&Cch}vGxD#;~;!eX|hC2** z5$+(|J-9<~ci_&z-MMaa+~%syQJb4K$82udoU*xLbHe6=1$(jfVC}uyL$!Bm&(z+h zJx_a`_AKpH+M~4BXphm}qCG-;gZ2dN?b*|_muFATUYtERdujI2?48*Iv-f4s%ifhe zD|=P;oa{B(W3pFdkI3GTJsx{I_H^vc*psmrV^76iiaiv2CiY6~k=X07$6;^79)-OL zdlL2*>?znwuqR+Iz#f1*Yk| w2Y(L!8vGgfEAU5PkK5k1J#Bl__N47a+f%leY!BI9usvXVzxHzN;o7?u0ROd!{Qv*} diff --git a/fixture/18/3/12.3 b/fixture/18/3/12.3 deleted file mode 100644 index 55fe15573f2b84ef67dad4be285dde7e35cedc75..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70gwc+1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg-#E&%iW2QLvD zB`-NcO;=%Sb$^MIou{$8#n0K}?e_sN3l$$JGd)L9TV-!~gN>J=tF^z$)7|Is`vox% z86zz>K}%I%X>)ytk(;Hjxx>xZ;p_GP2{IEMCowxkPg!GacY%wQpQ*FG$7HAw_yph= T5Eh6JW(S*t%^|hOoA<#FBmG06 diff --git a/fixture/18/3/13.0 b/fixture/18/3/13.0 deleted file mode 100644 index f6026d0896d8b4b19bdc4f5f75fc4bdb32059e58..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 716 zcmV;-0yF&r0gwc+1ONcA1ONcc0ssIM0002A0ssIgwJ-f(ume>U018W#3K~Fq(EyEF zdo9kkw%5AWUTa&M=l@e{?X|!MrU0J+p8#{B=0eSZn$t9wX%5p|q&Y})kLD1~9hx&V z_h-(}T%S2Rb9LtE%(a_nyw9I9hlQI`&4$54TIV5vO=77xonDa4r zW6s80jX4){E#|&C6muu$Ow4_l^Dx(8&ca-UISO+P<`~Q^m?JPZU{1g+xXohQEVRvH z+AO5aBHApJ%_7+>kj*04EP&18*DP?&;?^u|&EnN8T+O0Ycb)Dy-EF$7bVuoK(jB9_ zMR$ts2Hgp|3v{RFF3%mFyEu1n?%v#?xjS=b=I+a#m%A=^R_?0YQMqe!$K-Cw9g({s zcS7!V-08T>aVO(0#vP117I!P|RNRfY6LA;fPQzV>I}CRb?jYPfxI=Jv;LgC^xo>mc z=B~|Io2xeGY_8cHv$8<66Y>}2 z56EARKOBEI{$TvQ_;c~6;xEM?ioXzlApSo5Vfef7XW{R`pM$>!e+K>v{1Mpmw%2Ws y+g`OjYJ1c6nC&gwQ?@s3PuO0tJzaab_HgaR+Jm+CY7f=ksXbGBp!PoPc?tkM=!7Q# diff --git a/fixture/18/3/13.1 b/fixture/18/3/13.1 deleted file mode 100644 index 96bfab39a7d0cf848b2ff9c6f6ceba465954d40b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 717 zcmV;;0y6yq0gwc+1ONcA1ONcd0ssIM0002B0ssIgwJ-f(umfEc018W(3K~Fq(Ev}A zwJl4xw%6KgU2Ctk^Z$!w?KRd1rvRS-pa65F=19$rn(H*jX>QXTrMXFSlI9l8DVj?( zCulCv9H6;8b9my&t(a3Wmts!DT!=Xka~b9^%w3p+F!x~2!Q6p419Jst@og5~X3=dH z)@D&{7Sv|ZY!=LBv1}H^W-)9Q!e+5+7P@AUYZkI*5o;E(0(YS9KHYh`!*qA)&eGkZ zJ4bho?hM@(x+8Sg=Z??ao;x~sbMEBat+`Wkm*!5)U6?yCcUkVR++DeYa`)uU$=#7V zBX>paeBAZ8<8fExj>g@LJMXT$rvdv+ei#7*s?%5o&xnpz2=6=ojn(H-ZYp&KDt+`fntmak)dzmOU+dQ}(3nMcGrbmt+sg zUXVQ?dq4JY?A_S2vG-!n#a@fO6niN4PV9l$`>^L>@4}viy$X8{_8ROl*ekF{U~j-( zw>xfk+wQ2{O}mqJx9m>YU9vl2cfsy}-Q~K&b$9Cy*4?W+S9hrHPTiRbf0q6#{ZaaB z^vCFL(I26|L4Sh&_WbGj%kwAaFU}vFzchbn{?7b?`TO$c(nV{K@!>@yGqK_*?Nu;%~&Ch`$Yg8vZi;N%)KK2jMTlAA-LFe*pgO zecQvfcWuww-m^Vtd(HNY?G@W2w%2Qq*WRu@T6?qhWbLimQ?-|BZ`7Wsy-)!FJM@FZ diff --git a/fixture/18/3/13.2 b/fixture/18/3/13.2 deleted file mode 100644 index e2c88d523824acd2cb34c93bcd78bd48282da607..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 715 zcmV;+0yO;s0gwc+1ONcA1ONcb0ssIM000290ssIgwJ-f(umepM018Wx3LQ{5K?ABd zwyWM{7RPoK<93B(w~FVz7r?k(3DX0j0H6S$0CT41O3jg)^EB6Kj?-MFIZAVr<`~T_ zno~44Xim^vpgBErdFJrU#hHUM_ht^w+?hEub6@7X%ypTwGFN4e%3PB>?=3vZqbKKn9T!}dnb0g+B%x##{FgIaN!d!$o1#=1J5X=Rb12Ffu zYrk3ht=ezSerxs{v)_vSM(np?zw!EQ*KfLhoAsNl-(m%Kq3%H4eY(qZhw1Lp9i+QQ zcaH83-5I(obm!-;&mEt;I(KyL=G?KlTXU!8Zp@vSyD)cJ?y}rrxr=fKDY1*5#CuuLz zo}#@(dx-V|?E%{RvxjHz&YqpUH+ydO+U%LxE3-#tuge~ny)AoG_NMGf*;}%wWG~5{ zki8&#K=yL%;n=&e2V?KWo{PP=y%l>Z_D1Z9*bA|zVK2iThP?=T5cVGIA=o>xXJGGL zw>xfk)$XX>O}k@ux9m>Y-LN}hcfsy--Q~K&br=MX{ds@g--$mHe9p(E#>Z zd#!6}wza+1w)R?T|G%)-UT6BK0G|M#0CSDz7|ku3D>O%FZqOW`xjl1w=H|@FnTs>0 zW-iSfnz=A@VCKHeVVS!!XJzimoRhgGb4KQh%n_OEF~?(W#~h8h8FMn`T+FqYV=-4^ zj>O!EISz9h<}}Prn3FIUVNStZf;j|p0pnA@VL%(kvtc+J#<5`-8%D8V z_!>s9VelG8tzpm_#;jqm8pf(&s2awoVTc+=s9|UtMy6q43U`R^4&52L19bQ2&d=SQ zJ3DuE?%dq9xnpxz=8nwWm^&_aTkf>nO}UeD7v)aLU6MN_cR}ue-2J%2ad+d+#$9*E z-NCp^afjmW#2tvc4|g8!F5FqTt8nMwuE8CHy8?Fv?gq?to8vaOZI0U9v^i;W%jT5L zC7TmA7ibGPPT&Apm)HFs*x)Lf}KPjj8-IL%d>qck@udxQ1_?FHJ?vzKQN z&t9B8ID2pQ(CnSrGqd+)&&ytyJu7=v_NeSN*<-S|WRJ++kUb%LJN9(!<=B(47h~_+ zW3jhlPsQGdJrR2$_B8Be*u$_FVGqLIgFOU$2lfo?z590O?e5y0wYzF}&hDDsF}o{v zN9=Cc9k07xce?Io-O0L(b*Jhs)g7w4P z@7Nx&y3K~Fq(Ewyy zd#$}Vy4$+eUTa$m)c-HC*0qlhsQ{kxf*jc=D0Z+b1&vl z%$=AsG52B4!(4|s3v(6bD9kmOV=%X1j=%w3r~FLzz;xZG8_qjERpj>+AUJ0*8R?u6V0xzll%;||AN zjJxm7yJK;;;!efgh&vH?A?`HXWw^s|7vT=V-Ge&>cL(ka%zc~lHrH*=+FZ3cYIDuz zn9VJlBQ`f|PT1V8IbCzP=48#qnu9f$Y7W)hsX0({pXNNxU7E8rS1Egi_6Y3_+T*jg zXHU=GoIN>varV^grP)KX7iJI4-j_Wrdsp_X>^<3Yve#tK$X<~>B6~gdc$* zPsZMhJr{c|_Dt-R*dwvmVUNS!hCK><6ZRzRE!b1Amtar8UVuFSciHZ+-CetbcK7Vg z+1;@_V|T^weBJfB<8@c-j@I3*J63nA?o{24x)XI5>Q2*LraMe`lI|kiK?;9>{{H;= z`MdLH=daG6o4+=HZ2rppk@*|*$K`L!pO(KVe^UOU{3-cM@`vOv$RCiuAAdOhZv5H! z^ZvR&?oY*Eia!*8A^t%8efY!hcj3>%--AB~e+~W&{1x~ku-9#m+upW4YJ1c6r0p%+ zQ?{3EPuO0tJz#se_Hgap+Jm+CYR}c)sXbGBrS?4Sb=u>!S80#Z-lV-odyMuL1pqw% Bc!dA} diff --git a/fixture/18/3/14.2 b/fixture/18/3/14.2 deleted file mode 100644 index 47c658888550945c3b05af5aabefefa09b8e6aab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 717 zcmV;;0y6yq0gwc+1ONcA1ONcd0ssIM0002B0ssIgwJ-f(umfEc018W(3LQ{5K?5R= z?JCCYX1;D$ICiTzwrhItJ+q2&tEmU20G|M#0CSM$9?dzLLo|12&d}VSIX`oK=IqSX znWHn;W{%C=nmICaW9G!nZJEbF+Enfk5NZ=`TxlePL<}S@y3VVk33hfcv>$Asa zZ_gf`y*Ybw_SWpF*-Nu0W-rVhn7u4}SoW^$LD_q<=Vb55o{_yGdp`Dh?D5#Eu}5QX z#$LC_?Y-@x*gLUjV(-JAhrJGa7WOLaQP^v+$6#;49)Z09djjsX-DSJOb{Fjq+TF7| zWOv8zjNSdZ^L5wj&emP6J6dQ2<%raMh{neHatNxF*^e}Vo0{r&mF z^LOXZ&fl9qH-ByZ%>0%4BlFkgkIUbdKPrDy{-pdZ`BU&C;_S5%2nJsw56}3(6J%eM@3#(iEXD(j5{ov`V_g{Yg%Wf$s zt*US7>>Hk#U0UBcdG7L!yN{l~`S|U3Mk{Vnc}-(m_rU1X{L1Fu=_xA&ey}_c7ZB^< RH)Fc$bar9XiSYji`2p?fL$&|_ diff --git a/fixture/18/3/15.0 b/fixture/18/3/15.0 deleted file mode 100644 index 3487ff45ac0de9ce33214823a74ee129e373b406..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 725 zcmV;`0xJCi0gwc+1ONcA1ONcl0ssIM0002J0ssIgwJ-f(umhbH018XE3Lj83KmvU2 zul=>Z?yvp2b!+GE{9S(n-?_%`{GESTJFNhn0Gj~g>Wrf^ZqB$iMr4#>D4<9v*}G0w)g8spx^$ru-7oQiQN#-SJ&VjPHZ zAI4!AcVV1`aSz5h7}sE&fpG=K5%`VUZ`*#;_S>}Ir2Q7{H)X#i`wiJ|!F~hw+pphn z{dVg&Tfe>f&DC$Melzu3sozNb*6BA+zis-B(r=S~ll0r7-xU3p=r=*X1^Nw8KRCZP zKQ})#zcW8Gzb`*8zb-#3zbZc}za~E>za>8+zac*%za2jvzZ}2Zulv29i(iW$i(iQ! ziQkAHhu?;uhTnvrgkOZ8f?t9kf?t3ifZuV~##tLzZJe`l&BiesS8N=yal^*(8npT=PtcWIoZagWA18rNu?p>c)A5gONL9G`J}0(Wcf z)ZC@H6LS~l4$NJaJ1los?x5T~xpQ)No z+?lxhaOdH!!<~h@3U?Il8r(6sTX09G2=Wv|6aqgYNoqIdS;@paJD$b2K zC*oX)a~jTNIEUd}gmVziJvfKp+<|ij&b{k)$L+4#9ksh@cg*gV-6^{pb|>sE*qyGs zTz9zcV%@>Idv%BE?$n*ByH9tX?mFFBx~p_Y>8{Zoqq{|Sgzg623A)>Jr{^xu-JCl) HcX0v$C_Z3LQ{5K?706 zv0cTuU13hW8@DSQyHvgR9$Uq+owWz10H6S$0CRZe?#$VlgERML&duDJIWu!*=Df^x znd357Wsb_+lsP7IOXif!4Ve=%7i3PyT#h*$bKYDx$IZ2vV==d4j>O!EIT3Rk<}}P@ zn3FIUVGhDvf;j|p2j&3G{jJ+?)_$w@8@1n>{l@IKWxo;oZP;(Ze%tk%uHSO~ChNCY zzrp$~)o-YNJM|l=-#-23>9sbiro3Q>v6~9uErgWyBT*e?qJ-#xI=Mw;?Bg~ zhdU2<9qugLRk)*Y*Wix9-GVy;cLVMO%xRm;HivC4+8ngGXLHErj?EdH`!(lluGgHc zxmt6y=333Mnp-tTYHrk=sJTsZn&vXiNt%l^2Wc+R9HO~HbAaal%=rm>ZuZ*jvDquL zM`mx#9+$l>ds_CU>`B>+vZrJ($sUruAbUXee(d4cyRm0ukK23Old-pAPsLt}JrR2$ z_CV}q*u$`QVGqLIgFOd(2lfo?6}anm$L((09ksh@chc^b-6^|Eb|>sE*d4IDTz9zc zZr#DUdv)jP?$n*ByHa2A^;qq{|SitYy83Azh(x93jJU7mkw{?PoL z`2+L!<?tk{LT3L{=B~ze=h!7{F(SG@kipX z!ykvg4Sy8=Cj3eGTkxmgFTtOHzW{##_OR_;+q1U!Y|q(Vvpr*b#rBBp_1fdLw`-5q z-mE=Yd#mJwJPW_W11C*{ichCjhyO BiLw9y diff --git a/fixture/18/3/15.2 b/fixture/18/3/15.2 deleted file mode 100644 index f359f8e4e09356a35fa6a0dbbe2d3baf2c290e64..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 720 zcmV;>0x$gn0gwc+1ONcA1ONcg0ssIM0002E0ssIgwJ-f(umg1!018W_3K>9p(Ewyy zd#%0JrD@*QUTa%A|DRfGFS>oJ0H6S#0CRoj_{{B@t20MuZq6K=xixcY=Els4nF}+g zWiHDcmboZ%Q0AV@A(=ZeXJqcjoR7I4b2jE`%zbk(=3dOXm^(3NVy?uThq(@O9Of#_ zQJ9-B$6#*3oPxOla{}f9Ok>+Lv`r)1G^9-<+BBd|BiS^NP2<=!fKB7qG<;3t)--HQ zqt-NBO{3K`SWTnUG)PTj)HFCvW79M=O=HqDBuyjIG!#uE(KHYRcX;mZ+}XK_GXJ0^Ea?v&gOxf601-cOdRE++n!8a0lV;!JUJ<19t}Q3e0_*^ETIQ&e~kHIcjsw=9tYbn#o)vt-D!wtnOCbsk$3=C+aTLou<1?cbM)X-9fs0bcg8f(4C<>KzD!c`~-h){@VPp z`785B=5Nd&m%lB4TK=Z|N%@QNr{pimACkWye?b0z{Neb!@n_>t#$Sv-7=J1LQ2d?v z1M&Ca&%@t^KMQ{q{v7-@_+#)_;E%xHfW2;e-1fHZQQMogCv9)pp0d4Ud&2gD?E%}% zwTEl()*h_AS9`AZPVJf6E4AlouhSlGS;uJ`o!wFF8X^S7B>)e`Kf-_yph= T5Eh6JNQccKwXiwJ@A1J8F2Y4P diff --git a/fixture/18/3/16.0 b/fixture/18/3/16.0 deleted file mode 100644 index d62131ba9089347358c0882810c6cae7c7fd3c53..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 719 zcmV;=0x3LHQp}OJMXT$or)qB0oT#}_bDHKd&0(60 zGzV$!(Hx?=Lvx1a{>=HA>oaF(uFf2txi)ib=GFvzTlTc;c*Pv4>;t#@^c=jlCIrGWJ&Nsn|=gCt@$e9*DgRdl>dE>_OOju;*a!z@CA<0(af+ zxZQ2Lqjop#PTJkFJ7sst?u6Y1y90KY>kil5tvgtEukKvkow_r1SL)8wU8g%vca`oa z-A%eqPHaroQtr{QnHpM<{%e+vE*{2}-Y@CV@U z-nBhzd)4-w?KRtDwpVPA*xs-`UVFRtbnVUBleHIXPt{(kJyd(4_CW1@+QYPWY0uK$ zqdiA^jrI)f71|@T*JqE<-kv==dvo^W?5){TvzKOX%$}INFaf_s`3=f%Pkux4+mYXl z{PyEFAHVhZ?d`YkH}5wYzs2|s#&0QpL-E^*-$4BK;WrPzUHHwyZxw!X@LPl582nb? zHv+#6_}liU?JwJ(w7+P7(EgJBA^SV_2kh_HpRd1Lf42T={ki&U^~dV3)E}w8QGcBN zHvMV(oAf8?FVdf)zeInC{sR30`up>T=kLy+oxeAKZvNW*nfWX8N9NDVUza~F0RY1( Bm5l%Z diff --git a/fixture/18/3/16.1 b/fixture/18/3/16.1 deleted file mode 100644 index a0b5d656ff8759cbb1e779de6a9c3742952e670f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 729 zcmV;~0w(_ z<*v$|le;E&Ozw)@5xE<3$K!6tosK&jcQx*4+_ku4akt`*#NCKH5qBHzG~8vllW-T| z4#Hi6I|O$J?f~39_w5|EbJxyUJNN9IvvbYP89P_(9Im03fv(Cvnx9Xg# zbE(dWIv463sB@XlVLEr|9HeuP&N(`F=$xT*h0ggo*XJCcb9K(qIXCAVn{#WwdW(jNgl&i{FW#iC>AIhhK*uhhK#sh2Ml9 zgWrOmg5Q9jfM0;SZFk!4vfW9$i*^U?F4-NjyJL62?tb0*y1R8}>#o+FtGiZrtnNzP zk-8go$LVgf(=KX-WU?%dhAdvoXJuFc(0^g|e z4JzN5@(m{6Sn>@e-x%@@A>Rn{4ISUe@eLf`pz)0v-;nW*72ikj|6eR?ujR}Kp8%f#pa6Gg?#$ekx%+bG<*v(}mAfi;RPLJGF}YiE zN91nEoshd7cRKEJ+|{_FaW~_R#odZK6?Y@6)80Cu=U&oT|B0bExJ*&4HTxG>2*K z(wwEaM{|zm8qFD+D>O%FuFo8wxjl1q=H|@F343Dp!t8h`$ei82&E&S@?VK=islwpMk#ue+2ft z?RDGZwpVSB+TOH1W_!!_lYR}Z(r#(-5o%Ss4 zRobJp*JzK?-l9E1dxQ1_?d{pqvzKR2&R(27ID2aL((IuL-@x*XE8np4jVIr5@{K0n zF!GHe-yrgh9^c^cjUC_MzG34VHNN5E8!f)U;u|HtLE;-DzQN%e8@{388xy`E;TsXY zq2L<{zJcHy0lop?8~?KXVf(xG2kr0KpR>PXf5!fb{rURq^~dY4)*r3ES%0klR{g2^ z8}%pZFVvr=zf6Ca{v!QB`g`<;=>-GN$K@%M(F*`+1Sz~Q?fs2)&sk6Pu(c9(k`2#`?7a=P(KS@(vXJn`l_yph= T5Eh6J{6e*`IoKS;i}S$`71~7m diff --git a/fixture/18/3/17.0 b/fixture/18/3/17.0 deleted file mode 100644 index 272727a3f933dfc9641d53191cc6d72d46a51c10..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 717 zcmV;;0y6yq0gwc+1ONcA1ONcd0ssIM0002B0ssIgwJ-f(umfEc018W(3LQ{5K?AYE zv0KHlUB$Rv;h0}{FY>*&%qqt1s=5cB0H6S$0C!66lH4J=6LJ^i4#?fx-FN5R)wrW^ zH{*`Q-HJOEcO&jZ+=aN)aF^i@!(D_s2zL+e5ZoQOGjMmV+Z?yKYID@)rp+;%TQ;X` zZrGf#xnOg;=5o#9nu|3DYwpz?s<~5hrsh7)d7A4qXKAj|9HqHNbByK|%@LX#G$&|o z&zzpQJacm9;>^LBOEZUN?#vvRxi52G=B~_HnX3|eMfQm74cY6l$765DUXDE+dpGuA z?7i4?v3Fw6#9oO#4|^TScT(=6+(8L{K>mLG`S|nxxU7i~}3Ua~!8 zd%^aA?fu%rwRdaJ*50c~-1WvR7q~%HEW{CVNcwmIS};_)W)eZ@=C6&Bkvv zesl3#i{DuMR^m4jzm52f!*3gY)9~Ab-z5AN;Wq`pCHM`&ZvlP-@Y}y@f7br0{W<$< z_Q&k6*dMXKVSl{-cKzx4oAoE_FV>%`zf^yy{zCnM`up^U>F?5?rN2jij{X|`8Tu>q zN9eE5AD_QHe{}xl{K@%S^QY!7&7YXRFn?hFvixEByYdI+@5!H&KO}!g{)_|w4GNN) diff --git a/fixture/18/3/17.1 b/fixture/18/3/17.1 deleted file mode 100644 index a53f2567b2dd95b0a4669c2a9caf352ebc76560d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 718 zcmV;<0x|sp0gwc+1ONcA1ONce0ssIM0002C0ssIgwJ-f(umfck018W-3K>9p(E#gO zd#!Elwf0(oS~L2f0G|M#0C!LBoZL0JJ91~_uE?E_yB>Eu?r_}QxU+Hh z;?Bigi#ro{CGJSvb-3ejx8aV$-Gn;{cMI+m+$FdZa2MbXz?`=6cQ9nyWQOYp&HCtGQKkq~=D=iJIFqr)e(JoTRx(bCBi|%^{jQGzVzz z&zzsRJ9BpC>dd*BYct1YuFM>nxiNEG=C;gf342QRlI$Vb6S5a%56E7($L;CZo3STj zFUFpVy%c*W_CoA|*!!@DVei78g}n!R4)z-C8Q3eZN8rxeUAH@Kch&Bw-A%h=cDL+K z+1;=^VRymqblv5;!*v(y4%XePJ5+b4?o8c%y7P3`>CVz!r8`P@jqVuTExIFgH|S2# z-JUx=cX{sQ+{L+rbC>20&E1(hFn3?>yxd*6vvOA@e?|U?{0;fz@weme`?K*^C>@9^;s9q-8T4jk`@ z@eUa8c<~Mt?>O-e6Yu!&4iE3>@D2;_sPGO7?`ZH22Jcw#4g&8O@D2fg-2S%xY5SY@ zC+#oVpR&JXf5`rV{Q>*?^@r>4)}O7vSAVYlTK$>&EA>a}uhSo=zfFIX{wDoN`djp; z=r7Tqpua$Wfd2CQ;rYAs2j}n2pPRoke`fy5{CWB7^2g<`${&@#DSu7=nEWjX0JU_K AGynhq diff --git a/fixture/18/3/17.2 b/fixture/18/3/17.2 deleted file mode 100644 index 1a9ff8363f264f2b5994e5ae32fc1c86ab64545d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 718 zcmV;<0x|sp0gwc+1ONcA1ONce0ssIM0002C0ssIgwJ-f(umfck018W-3LQ{5K?68$ zS2%X7IJT=8x10I8E%3cZ!fqAERzVM-0GEu?zp=g zcR221+`+heafjmW#GQ$|4|g8!I^0>ft8ho*uE8CHy9IXy?grcmnA0|wZ4TR9v^i*V z&*qTL9h);Y_iN7AT(3D>bG7DZ&9$0iHMeSx)ZC~!QFEK-G|gq2lQb7;4$@qrIYe`Z z<^aw8ne#JuXU@)CojEsiZRXg_m6;+aPZs=HHnrtUu7dAjR#XX&oe9i_WQcZ}{9-4VJQ zbSLO;&z+vTJa=;L;@rWxOLK?j?#vySyDxWM?ylTfxvLU?MgEBV4f*5ox8qO8ACA8p ze>VPJ{JHpR@n_<%#2<;j4u2f}HvCceoA4*$Z^55}zXX2*{sQ~~*u%DWZO_`?vpr{f z&GwA#726}W*K3d0-mX1bd$aar?XB8VwU=s7)Ly7PP_G{?0r~C6Z$5tWe(QeY zew*=|jNfAXrsB60zoGao#BU&e`|ulv-!A-S;kO6BIry!?Zw7uV@Ed_YZhzbUwEa!{ zllB+wPuX9xKV*Nw{($}c`or~i>(AETt3OwNt^Q2?mHH#~*XfVb-=;rGf0OJ= ztF^z$)7|Is`vpZ086zz>K}%I%X>)ytk(;Hjxx>xZ;p_GP2}TngCowxkPh_YN_yph= T5Eh6J)1l_DIj9!MFZjU^I$uSN diff --git a/fixture/18/3/18.0 b/fixture/18/3/18.0 deleted file mode 100644 index 3e10d186cb68756bbb87faf68267096b444865e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 716 zcmV;-0yF&r0gwc+1ONcA1ONcc0ssIM0002A0ssIgwJ-f(ume>U018W#3K~Fq(EyEF zd#%0Jwf0)u+H36pw^>_ztBin|kcChk7mdARFv zXW_2G9fi9FcMR?p+!44Na3^4n+uXJ}ZFAG+q|HT}Q#O}u4%u9=Ibd_Y=5Wp3nzJ?c zYR=VMt2tA1rRGS@b(-Tew`q>j+@v{4bBpE_%_W)>G#6+N&|IE5Jac#E;LN?5b2E2l z&dgkyIWKcv=D5sNnWHi{Wsb?*k~t-FL*|6c1qpjJ_GawK*kiG`Vo$~1h&>T|A@(%v zW!S^87hw;=-h({^dk6Lm?7jPT=k4y=owd7ach2sb-7&i>c1P@P*d4FCU3a?fX5GoU zi*={!F4Y~XyHIza?mpdNy1R5|>F&{;qq|0ThVBa85xVPh$LDU(9i6*5cXIC5+^M-s zb0_95%pI7!EO%J$uG~SndvfRG?#P{yyCQc!?t0wu2!AjBT>Q28Gx1mAkHlYxKMsEz z{wVxS_>=Ir;7`F{f2B+KaRYX)n@Q05Ojb z^T05V3-horj|cN`FpmcFFffk-^C0l&?XTM(x4&wC)c&UZG5cHgr|fUopRm7Rf4cs1 z{o(qH^#|+k)gP+AQ-7xZKK*(6>-1;ouhJi-zeazI{ucca`Wy5o=x@)Tp1(YQa{l7{ y!TC$`hvx6hADF){e_sBs{8{;{^5^8Q$sd!yB7a2whWzpP`~JMY8-F(bY6JibkcpuH diff --git a/fixture/18/3/18.1 b/fixture/18/3/18.1 deleted file mode 100644 index 8a46a33236431bd50b1e3f99d1d150b31544f63e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 725 zcmV;`0xJCi0gwc+1ONcA1ONcl0ssIM0002J0ssIgwJ-f(umhbH018XE3LZc<&;SWh zEXtx(ibYwJO0g&lk-zsK-iTsR!iD(rrvRV;p8$6{?sDAWxRY@g;||7MiaQi{C+*cj3;$U4=UbcMa|s+!eSZa0hp;+c|FMwwV{~rOIYs9NofC8}&^bNl@|?qS zF3veP=iZz{bMDMJGv~gX^K!1sIVvPBF zuFf5uyE%7k?$+F?xf^pQ<}S>gmb)x>Sni_SLAiT!hve?aosqjAcRucV1b;C8Ui`WE zJMm}Yuf(5+zYc#K{wn-Y_?z&@;BUd7f9;dxcdz$tp?Md2;w5Mn<(H^3`Kzo4p{_Nq| zyR&C!@6Dc@y*7Jh_R8#$+3T{$WpB$KmAxr@QudbYDcMW1CuA?k9+16n&)e$)$E`R{ z#c?T)6LDOK<3Jpj;W!M(T{sTHaSx7jaNL383>;VBxVPi}b^EjSSM875U$Z}Ef6M-e z{SEsQ_P6U#*I%wbS%0zqVEv`~L-lv+57ghMKTm&`{w)1f`g8Qx=#SA~p+7=@gZ}vZ z?fKL5H|I~zUz|TRe`)^E{Dt`g^Y`Ts%ionhD}PV^ocuNUGxAsDkH}w-KOTQO{&4)= H__GlJgQ|)u diff --git a/fixture/18/3/18.2 b/fixture/18/3/18.2 deleted file mode 100644 index f4817a3ba3ac6bdc9d0d4cf0559220c0ac639604..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 714 zcmV;*0yX^t0gwc+1ONcA1ONca0ssIM000280ssIgwJ-f(umeRE018Wt3K>9p(E#>Z zd#!8jwYIg_+8pfvw=lA{wR!rU0H6S$0C(P9cgNk`xU+FrlQbJ*st%|V-cHs@^a*qpJsVspOcdd=~gt2IY! zZq^*Dxm9zj=0?qlnhQ0jX)e_+(w#;dn%Q7crF3KE~xg>K)=8gn=I`(qx;nwdnfil?0wkt zuy@L}zu)AP)!0vM0;kvtZ z2kY+DovXW3cc$)2-Fdp}bjRtg(jBF{Nq3Cy7Tqbj8+0e=F3_ExyF7Pz?&93RxqEYm z=I+d$nY%A{UhcZwS-GonN9C@`9h18ycSP=n+zAPPHU4P)&G=*Sx8hI5--tgEe^#f)?ckZSAVVkSpAjyBlS1xkJI0#KTUs={v`cH`cw3m=nv6fpg%x=fBx|N-TAZg w_vX*dUz8<66Y>}256B<)$K!8D01L>D6#xJL diff --git a/fixture/18/3/18.3 b/fixture/18/3/18.3 deleted file mode 100644 index d66e93f38b6c4a589553198f878a02e6f812cd20..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70gwc+1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg+4MgU`NcY%wQ zpQ*FG$I;v6@A(5q4HqFRH9tvHU1xE7g^!t|t+&C;)!*s!{Rc=98znC}Lrqs6)80Cu=U&oT|B0bExJ*&4HTx zG>2*K(wwEaM{|zm8qFD+D>O%FuFo8wxjl1q=H|@FnOifbW-iT~n7J@>VCJ&SVVS!! z2W9TboRhgDb4KQh%)QNhbKabexg2vi=3>mjn0pa>AND-#b=bSGXJN0xo`byxdkppp z>=D=-aJTJF+g-LhX?M}?pxq_ALw0xU4%pqVJ70IV?rh!Fx^s2c>Wbob{D&)uCnJ9lsH+}yRfGjmtwj?7(`J1%!y?x@^N zxs!6YgGdnWcu1iw}Ijlypeeq-?4g5MPU zHsCh_zXkZq_J{57+8?yPXMfKAj{Ouh$>1zgmB^{$~BL`djs<>TlGasJ~Eu zn*K8VVfu^o2kGz8AELiQe}?}4{Q3Fo^JnL;&L5q>Hh*mX*8GwA8}ldTZ_A&Szbt=J v{-XRr`AhPL)Z+|@gcKqr1oAD>(FUFsWzZ8Ec{zUwR_yZ9D|1pY( diff --git a/fixture/18/3/19.1 b/fixture/18/3/19.1 deleted file mode 100644 index ca2adb688048c732326f48c960eaf47b4a9ff0b7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 724 zcmV;_0xSIj0gwc+1ONcA1ONck0ssIM0002I0ssIgwJ-f(umhD9018XA3Lii=&;W#D zQ5L0AEXtx(ibYwl@O!V`2nDnVCH%|b0jB_;0G|MNCGJSvjkxP@$Kh_n9fi9IcM|Rv z+$p$Aa0hoM;O^~QwsY9dT{{Qu+_Q7e&K)~v>|C*PzRvYJ$Ln0JbF|LQI>+kVs&lH& zjXEdlT&Q!J&Sg4>>0G39kj_0ihv?j)bB50SIp^nGpL2H3)j3D!T$^)j&aF8|=G>Tb zV$N+jr{!Fhb5hPlIS1ujl5diU&h0p-<6Mq&GS0;~2P1nR_CD-+*u$`Q zVb8+egFOd(4fYJ|71$$i*X@qm-L^Ywchl~q-7ULQc9-l<*j=zYV0XFhaNXUygLU`n z&eh$iJ5zV1?mXRfy5n?L>5kIfq&r4;i|!QN4Z0I_7wAsUU7kBUcX96E+`YL&b9d&> z%-xqeFLzz;tlU+(qjJ~ej>+AUJ0f>O?u6WRce%SBcQ)>7+|jseamV6rMSmOqH2h`w zlkgYe55ix9KLmdV{@(rq`~ldzwr6dx+Mct$W_!%`itQ2G8@9)5Z`Yo#y;*y*_G0a+ z+Do;EYA@6tsJ%~nnD#F1S=xKF=V-6do}s-$dxZA-?D5&#vqxud&YqmTHG69I((H-Z z3$q7iFUua5y(@cA_MYrH**mglWUt7ckG;6PZ!gClj=dXuF!o;Tx!6OocVf>($5}Y8 z!f_OiYj7Nc;}#r8;J5+D!ToLf)ApC`PugF!KWKl+{*e70`vdm(>(AHUtv_3Twf5%Mf1Ca^{Z0Cl^cU$*(O;rJM1O()0R8>>!}E9N&(7bQKR17E{>=Q9 z`6Kh!<&VqXmOm*O`DT87i~`2T(UW2bHV0-&Hb9gHFs;y*4(Q(S97iAOwE;= zBQ@7)j?>(xIZAVr<|NH6no~5FXim^vpgBNudFJrU-I;?k_h!z`+?hEub7khd%ypUL zGFN4e%G{JWCUZ;Xl*|p86EYWMj+^r_*JIAcT#Y#z0ed9&M(l~$AIVBC+jZOovOQ3cc|_{ z-GRFMbcgBg(w(KdM|Y0y8r>PXD|AQbuFoBxyFGVw?&jRdxm$Cm<}S^hn7c4{VD7Tq zVY$0<2j%X`os+vGcSi1t+`Zj>cix?jyBv2o?qb}*xO)+QAO1Z2b@;RJSK*JsUxPme ze+&Ky{0;aMu%~S=+a9*PXnWB1p6wyqJGN(R@7JEMy`&R>us>mc!Txmp<@&?*7wZq!->W}Vf2aOT z{eAlL^w;Un(qE-NN`H<182v5!BlI`uPtf0Xk!Q(b3q zdxejgqpi2W%hliM^Zf@+5gR2hIYUiXVQY1NiIbhDvAf03+2if^0Zt1QA7rQy_yph= T5Eh6JhoD-pIj9zh*WiO64gW9p(Ewyy zdvUb4wb#1VUTa%>t-U7wf2n<@0H6S#0CTG5Qq7^76EzoV4%A$xIZShx<{-^InsYRF zXwJ}Fp*cTuedhSg)tRF+H)oE`+?qKxb7SVj%!QfLGM8lz%UqN>D05Hdkjx#KGcxyM z&c|GjIU92|=4i~dm}4=wVvfYzh&d5+8|F03WtfvN7hw*f(=Ja>5R?%ct-dvoXJ?#!K;yE1oP?z-G@xvO$V0fCPS#wkIamREp!PoPdD^?QXKAm} zo};}+dyMu9?Gf4=w8v*}&z_#WIeT*U;_Ru}OS6Y&FU%g8y)S!M_O9$%*?Y3*WUtAd zk-Z{&MD}{@@z~q3M`Lfso{YT}dn)!)?1|V5u?J!=!ybmc3wsdu9_%^TJFxfdd3yo& z0PMZXc8Bfm+8wmJXLru-j@=o%D|YAWuGbx}yIOa&?q=P!x?^>>Du0{)H2r1zlk^wq z57J+vKSY0r{s8^``SbI4=g-bxoj*5!ZT{H&mH8v{H|CGa-U0wzq6g*U018W#3LQ{5K?5{) ztC&-F$8HtJb`|4xg=4phLtO8@;zAFk0GD05Hdkjx#K zGcxyM&c|GjIU92|=4i~dm}4=wVvfYzh&d5+8|F03WtfvN7hw*vYHIZqps5 zyGeJF?iSrCx=VB?=q}J5pu0SGc<%1p!MS^L=jQIrote8bcV6zg+;O?9a!2KE${mxt zC3i~hhTI9c3v#F9F2@~?yBK#c?q1xXxI1xY;_kzphr14U7VawCQMhYx$KYEth|LX~6E?SNPS;$nIazbD=3oVTp!PoPdD^?QXKAm} zo};}+dyMu9?Gf4=w8v*}&z_#WIeT*U;_Ru}OS6Y&FU%g8y)S!M_O9$%*?Y3*WUtAd zk-Z{&MD}{@@z~q3M`Lfso{YT}dn)!)?1|V5u?J!=!ybmc3wsdu9_%^TJFsV9ufX1b zJpp?G?y}usySsJ=?e5u~v%6z=#_o#U`MT?M$Lp@v9j&`rcdhPN-K`3LoBlNYW%`r! z7wHeuU!p%me~116{r&m#^LOXZ&R?BBH-ByZ*!-3GBl9=rkIUbdKP`V#{-pdx`BU;AaE yx4mwA-1fHZQQMogCv9)pp0d4Ud&2gD?E%}%wTEl()*h_AS9`AZQ0<-CGZg@rVuaxU diff --git a/fixture/18/3/2.2 b/fixture/18/3/2.2 deleted file mode 100644 index f6f0963ff190efc082c2e9254dee7982ea6fea2f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 713 zcmV;)0yg~u0gwc+1ONcA1ONcZ0ssIM000270ssIgwJ-f(ume36018Wp3LQ{5K?AYE zvAfK#-71dlD#q;!$8HtJHZAYH7g!IW0H6S$0CTP8Sk0}PD>X-IZqyv7xlMDL<|fTa znu|22XfDwlqPakGfad9F322^xg2vi=5EZvn0qnjV(!G8iMbMU9_BjCahR(xM`3Ql9D}(9a|-6Xxo(b| z`&+l)xcye`H)_94`;FOe%YIY#+pyn+{TA#uUBBh}4Oe%l?oQpAx&w9h>CV&Lr8`S^ zmF^tfHM(PTSLlw=-Jm-@cYE&i+|9X@a~J1M&0U&1G|7Y|htQuQ^_GwdQEe%?f*?_C)Q4+S9a`X%Ew0 zq&-M`kM0O56WJW zJtTWa_JHjD*z>V>W6#E3jXf88E%sRKmDnS(H)4;&-iAF5dlU8~>_ym9u$N#D!5+8w zwnyNO+ugQ1ZFke|q}@fkQ+Ai^4%uC>J79Oe?r`1Ry0di$>+aQ^tAC#UI{k6_tMo_d zZ_*#5zeRtF{s#RC`U~`@=P%D6p1(MMaQ@!>q4_)WXXfwApO?Qbe^&mg{89O9^2g+F z$sduwA%8;tcKqr1%kd}UFUB8?zZ8Ec{!aXX`1|nZ;qStqg}(}a4*nYaG59O+_x*W) v0sa8|-MhADZLiv%v%O|}%=U`y5!)NK$7^rbp02%Fd$RUo?XB8VwU;UYY7~UB diff --git a/fixture/18/3/2.3 b/fixture/18/3/2.3 deleted file mode 100644 index 90fc38cd859c5c2870cf8291dff56800cfbfd83c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70gwc+1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg-l2LR*k_W=kC z6(1=xJx5VnWp8>-GN$ z3KJbCF*`+1Sz~Q?fs2)&sk6Pu(c9(k`2!0L7a=P(KS@(vXK{OlkC~&bw`8ag_yph= T5Eh6J+6C2u&7tPVC;7n-3HC#X diff --git a/fixture/18/3/3.0 b/fixture/18/3/3.0 deleted file mode 100644 index ab371fe0b1cbf04ef045bda1a0a5ee18b5d1d2d5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 718 zcmV;<0x|sp0gwc+1ONcA1ONce0ssIM0002C0ssIgwJ-f(umfck018W-3K~Fq(Ev%4 zwXMB4+S}S|U2Ctkt-W^pe=(R3r~sb;pa64}<|fTanqxG#Xim}GpgBQvf#&qg<(b1X z7iSL6+?zQxb7$tv%zc^jGS_9!%3PH>DsxTdn9MDiBQiH+PRQJjIURF3=48ypn1eBw zVh+XJi8&B+ALcyFU6`{lS7FYi?poZjxLa{Y;%>y9h`SAU8tyXONw|w}2jR}U>+ZO_0e1rK0?cij(>9lFPTE|w zIcRgq=8(-Dn*%oYYtGl)tvOqBwdP#SwVGo!S89&b+^9KDbDQQg1$&D2673<{3$zDl z@6R5dy*qn$_TKEd*=w_BX0Oa1nY}K1T=ur?QQ4caCuMKRo|3&JdqVbt>;c)!v4>;t z#vY8l7ke)DPVAZ3E3xNcufraPy$X92_9pBx*juotV9&r_fjt6u-tM~Hal5N_N9}If z9kaV-cgpUD-3hx3cBktu*B!3ASa-1QUfrR(J9TI3?$e#8yH0nP?kwF^3V((E2>lKE z+aaC;@GZY+^%r!R`T8p4DA7=0H6S$0CSb*D9ufpYc$7bZqXc}xj}P+=Jw3# znaeXLXD-ehoVhe}Xy(q$ftmX<=Vk87oRzsMb57=(%rTiOGDl=?$Q+Nk9dkP7X3WW$ zi!rBSF2x**xe#+8=040}n7c4%VeY}4gSiHC-5fXfHYZ>%z#M?zu>E%JH*3E=`_0*J z&3-fXTe07W{nqO@Ucc@7jn;3oev|dvs^3)omg+ZAzlHh@RCkc>9^E;*Lv(lO&d}YT zJ3n`Q?(E#vxubK}=8nzXnmaOgWA4P#|V{upFj>O%FI}Udn?ljy@xRY=f;ZDI_g1hg|z+Hhm0(0Evw#{jqn>HtH zF4~;3xny(5=7P-uoBK6~Ywp&Zt+`iouI5_JnVKs#M{2Iq9H+TW0eg$~6zwJ26SNm- z571toJv@7N_TcQj*>kgZX3xxCnLRIiUG})_RoSDmH)W5>-jY2fdqehw>;>7=v6o{H z$6ky*7<(`FQ0$%9GqLw!&%<7aJqvpk_9*N%*kiD_VDD`Y!QO#A0DJGc-C4VMx;u3T>h9B>r#nn{m+mYDe}?`F{So@> z^T+3J&mWz?Ie&8g*8HjYOYge=7b){E7Gr@u%T0!yksf2!9a%9{hcO-e31e;BUa6fIV${+4ivQMcae6 z_iPW@-myJnd%yO4?e*HTwO4D8)?TYUR(q@VNbQZ<6ScQ#Pt#tey-9nL_96uUSM`N% diff --git a/fixture/18/3/3.2 b/fixture/18/3/3.2 deleted file mode 100644 index c40c84d5134d5a4b80342d9bd989a417b8179ef9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 717 zcmV;;0y6yq0gwc+1ONcA1ONcd0ssIM0002B0ssIgwJ-f(umfEc018W(3K~Fq(EyEF zd#%mU*4Mh$UTa%>t-VR*CrvRS-p8#{2<}l4&nu|0CY3|V+qPatJhUWgv`I+l8 zXJ@X?9G$r~b8P0;%#oQJGbd(l%bb?EEOS!kqRc^=OEQOK?#LXFxgT>r=5EZ{n5!}8 zVy?v;i@6eWB<4oUahTgMr(tfwoP@aua|-6ZId6`@+<-X&C*XF1Z70xnf@vp^c7kXp zPI|S6?c^GCf!N8V|2IZPSM?< zJ3)7W?)2Q{xx;f8=MK)@n>#djXYS10eYx{;*X7R2U6ng3cTMh?+%35yayR5o$lZ=R z9d|kIWZcELgK?MQ4#nMxI}mpt?mXOGxU+Cq;m*NbgF6Oy2<{Ht8Mr(5ZO+@=wK;2Z z)#jYdHJf8LS8R^h+^{)bbGznr&CQyVH5Y46)m*AMRCA%`K+Sy$dyn=U?KRpnv{z`4 z&|aTCK6`ui=_Q33A*~7ATWe>{UlRYPUNA`^D71{H#*JF>z zUX48(do%V}?5)^Su{UB*#9oL!4SN~(FziLxgRu8t&)e(vxIF=T0rmjgX}imIhwU!f z9kjb=cgXIJ-5I<4b?57@*PX4qT6eVWTHUd_TXje3Zq%KqJ5G0-?lc8|ivAM)A^HpS z2k7t5AD+KEe|G-f{JHsS^JnI-%paM*E`MD9w)|1~oAM{+Z^@sMza)P`{(}4g`OEQ# zE{4w}j@TcItF}jN zZ`vNSy=8mK_J-{V+Y7d*YcJOxuDw`$u=ZZJ=tF^z$)7|Is z`vndU86zz>K}%I%X>)ytk(;Hjxx>xZ;p_GP2@ew;CowxkPg!GacY%wQpJb>I_yph= T5Eh6JHUgW2&7oSz%l*L*co#!> diff --git a/fixture/18/3/4.0 b/fixture/18/3/4.0 deleted file mode 100644 index ecbf759a10b9e7c7b336bb4431904424d65557fd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 719 zcmV;=0x3LQ{5K?68$ zS2%X7IOgl!v0cTuUE$cRMR@N8uv`zP0G=ETf} znFBMIWe&^Sl{qMLPv)G=9hoySS7gq|T#q>(b2a8@%*~i%F}Gq)#oUNF5pyBtG|Xj~ z!!Q?N4#M1nId1N4ZowRZxdC$me$)0_w%@S*7VS4^zdid6*>A^wGxpoB-+cYn>o;4! z)%uOrZ>@e~_1mi7Nc}eIH&MTB`c2bsnSPV>TcqD0{g&uAM86#hcX#gW+|{{zbLZx+ z&7GOMGIwO|y4-QO+j2+cZpxjMyCru@?vmUIxeIa!Hm7WE*qpGrU~{_W za?RnIi!}#p?$sQsxl?nd=044Nn(H)YX|B>7rMX6PjOG^25tZdWrM*delJ*wuDcVc4CulFw9-uuvdwKTo1OWc0 Bh_?U$ diff --git a/fixture/18/3/4.1 b/fixture/18/3/4.1 deleted file mode 100644 index f3cf855ea75c99c921c97450bf277c560f6161cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 719 zcmV;=0x3LQ{5K?5Sj z?Fz?k6?5wD*sbE&u43G-s=fD~Nvj8@0H6S$0CRul{LJ;4yEA8JuFjmBxi)ib=E}^G znHw|5Wp2xymboc&Qs$z}DVa+$hh#3u9FVymb2#R1%-NWGG3R2g#hi(`5_2TxI?QpH z+b~CAZo-^|xo^&!>*fy38JH{ZTesi1{kH8lYQIhUP1NiurmHN%oZ=HVQ^joFhDE&6+H%7lL`b|-HdhYVv;klD@7v~Pn zU79;IcW3Uv+p}OI~{j3?quA>xKnYL;ts`K zh&vE>AMP;RUAVJw_u$UKU4y#?cM9$j+zGe~a0g%x+uXG|Yje-$oXs_xGd5Rjj@VqU zIbL(S=4j2$nv*rRYEIQ$syR_}q2@r%Wtzh@cWDmN+@m>1bBE>(%@qoJb@u4&&DmqK zw`Nbx-k3cxdtvsp>}A=*vKM6!%HESbBzs5pjO_i`^Rd@s&&FPjJsNv0_E_w#*dwtw zVo${0hCK~?8TKUXMc9L|*X?n8Z+ivy2<#2G+jghzF58{7yJ&aN?vmXhyE}FV?C#f{ zue)1!w(e@(xw>n0$Lg-s9jUufcbx7v-D$d;bSLR9(w(BaM0bep1lh&3QTdzl$K-FxpOU{Je?tC({OS11@rUCt#vhEo7k?=JPW+kp z`|#)Cufv~(zY2d8{u=x-`1}46{2}-|@CV@UUbj7Kd)4-+?KRtDwzq7L*xs-`VSBsw zbnWHZleHIX57u6)Jyd(A_CW1@+Viw`Y0uJLr9DS`jrJJr71|@TH)yZV9-qBE0RUd7 BiI4yQ diff --git a/fixture/18/3/4.2 b/fixture/18/3/4.2 deleted file mode 100644 index 9ee87dd997d487b624d1e7c843639d34c6acb38b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 718 zcmV;<0x|sp0gwc+1ONcA1ONce0ssIM0002C0ssIgwJ-f(umfck018W-3K>9p(E#>Z zd#!72j<&wmw)R?kt!sw=S8)2N0H6S#0CR-q2F(eY<1@EsPS4z&IXQE2=G4rknL{%d zW)95UmpLqRSLUqDJ(+Vd*JRGfT#-2}-(M9hVl12LCj z4#V7qIS6wP<{Zo^m`gB+U@pKMfVnrwb^~lTxOM|*H+XgfXE!)@17kNRb^~8G=yd~L zH>h<3S~r+=16DU!bpus57Kxw~^`=dRA3 zo4Yo5Z0^e3k+~al$K`IzotC>PcT(=6+$p(Ba);zD$Q_WoA9pzJZrs_pdvWLDuEm{+ zyApRK?mFCYxZ7|?;cmj6ggfr8!5xFU0(S)N2F!Jv<2JW#j@sO`Icamt=9JAPn-ew{ zY!295t~p$Dx8`8Yy_$11cWTbmT&X!vbDicm%~hJCG&gCE(cGdrMFD$y_VVoE*^9FW zXYb7(n!Pi7X7;}9dD-i-XJxO-9+kZ&drbD0>=D@;vL|G3$DWS89D6eMV(h`#ORAIVBC+jZOovOQ3cc|_{-GRFMbcgBg(w(KdM|Y0y8r>bbGjvxde|7%o{LT4e z^S9it@tDH zH{wsk--bU8e;NKH{6+YK@WQ~v=?bl(O#lGM0@tMW7?I|k+ey}_c7ZB^< Rmt&aioRwJIar|MkJOGRNLAn3{ diff --git a/fixture/18/3/5.0 b/fixture/18/3/5.0 deleted file mode 100644 index cc8cd80c1bf72a542c622e78537539f0124bde51..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 724 zcmV;_0xSIj0gwc+1ONcA1ONck0ssIM0002I0ssIgwJ-f(umhD9018XA3L8*(K?15c zwyPMoD;&Ga{MxPJ*sfx{*bl&>^`-!u0GW5WfsR48IG%?#KP!Pr)z255X_M55Vsb zvJC;-5UvdY+7O-%!PyXw4Z+wDiVeZn5PA)P*AQwAfz}Xa4T04VRt-Vb5Jn9_)DS`q zLDLX24FS^-A`Jo35FQNy(GU&|!O#%?48hM3`V7I&5b6wpPIF}D#>|PC<1)8pPRrbs zIVp2d=9J7OnL{!cWDdyOk2xH3H|A{2y_j<`*J94ZT!}dna~t6ZQ-C1NO`H!}YuMgY|p$bM-s*GxaO= z^YrWVfWf*XFFvRhx4**KCg2T(LP~bHnC%&Fz}gH8*Qc)?BPPRdcE4P|by!12y+) z4%6JFIZJbo<{Zs6nlm(4XpYcapE*8rd*xF3}yLyFhn}K84b}|>3i=nsve{Opa7o$cWds{+@-l2b0_95%$=6IEO%J$qTE5b zdvb^5?#P{yyB~Kx?t0wWxT|qTY22i7lEy_Er)XTFafrqR8V6|HpK*A`-5F;mduH~^?2*~?ve#vg%U+c| zDtlA*nCvatQ?fT?Psm=7Jso>F_HgXQ*n_e6Vh_dMi9Hj0AND-#b=b48S7DFB9&Qi9 z-h({^dk6Lm+>JHW2 zsXI`2pYA-}UAnV$SLx2tU86fjcZKc<-3_|qbGPSC&)u9mId^dae_{T>{C)Yu@^|IW z%HNYeCx1=;jQkb(Bl6edkH_DRKN^2C{$%{E_*3zh;!nh1h(8d28U8T*UHHrWeShBH zfhxQEZ{n_)g*JsbpUY$KUdv5mH?6C>wxSZQ^PRqF| z=cJsAa!$#)B^gT%2oh&cwMA=SZCEaE`;d4d*DFn{ZCT zxd-PQoNI8-z_|kF2>f;X<`#qu0LFVxBg)Lz4~+Y zck0j7U#UM&f1Um~{Z;y-^f&2`(chv!MSp|-1pNj2)AN_-56@qmKRADH{?h!R`8yK; Dt}BMe diff --git a/fixture/18/3/5.2 b/fixture/18/3/5.2 deleted file mode 100644 index e29ff9fc61a9306ac42e4633a35280d777f0c9f5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 730 zcmV<00ww(d0gwc+1ONcA1ONcq0ssIM0002O0ssIgwJ-f(umi;v018XY3Lii;Kmw9V zwN$D_qfRO{Qd3Q}sMJZNT9olSmprZ{JFWno0Gj~k*qmE)PR%(o=f<29b8gEyE$6bF zlX5P~IVk6noI`T%$T=YAew_1h?#4MA=W3jDajwNV7UxQwBXMrTIS%JGoYQb_!Z``& zBAjz@uE9A5=L(!7aPDn5Zo_RGPTO$PhLbj2wBeKumuxs>!vz}-*l@pw!!_Kk;cN}} zYB*QJwHnUUaHWPLHC(6RI1RUHI7-7!8cx!1i-uD)T%zFw4HsxQK*Qx34$p9R0>3jq zGruywFF!B8E3DnBZ}CO;;>B|jp+AwMC%9X}nv96uSq7(W=l6h9Qd6F(5Y4?hpT z3qK3L3cv36ez{+QAA;Y3Uw|Kg-*eZ_SvyzloU?Py&M`Yz>>ROk!_M(Kx9gm)bF71o=kIp$d*XW#~bA`?kI@jkMpL2W8(K$EgoSb)J z?!w%Gxyy2g1`PKQo`MLSE3CDFg zj>~adj-ztil;flvx8yh_$0a#V$Zv0=`oq^>|8t56 z%c>h%yZT2a=ax5iPo2MV^WNhZZ$EwilUbBkTv6N9-ZMBhy|B8qf9B$~+Yg?;djI9; zzwBay(yIEF&c5M^*`@WJljknqxclh&n~&dqXB6iamDe=3bq|bA%}-e&@Pp-nxPVv( Re^Xo5!e;@x9pcZMsvK<;wf;kdhT z2jlL=or}8@cP8#i+tdm;8f>}A-)uyyFjksykJ8qwYlAg}T#pm+21EU8FlmcaQE6-5t6! zbob}Z&t0E8J9l;N=-jorV{^CWj?CSdJ27`#?zG%x34clcko+C_1M>Id&&S`5KO28F z{#^XE_+#-`;*Z4Nh(8X08~!x>eSh9x_t)T$!QX;E0)GSk1nhC!+qS1|Z`z);y=Z&N z_LA)(+Y7b_Z12||uDx4(w)S4_x!P;BXKJt19;v-fdz|(*?NQpBv?pnA(Vn8cM0}P@6H~ay*GPq_Rj2?*(Wv|N~mpvdZyZ&_j&H9t|7wb>eU#dS;f1&0x$gn0gwc+1ONcA1ONcg0ssIM0002E0ssIgwJ-f(umg1!018W_3K~Fq(Ev%4 zwb#1VUTa&+qph#C*Sf|3i^ZM~p8%f#pa6GP?yB5TxpQ*Y>u+3eYvo`l^ z&e>eEIb(Ch=7`Ppn&UOMYmV03tT|b8tL9YArJ55p7itdFT&6ipbC>2I%{`iPGi@g$iB=$z^aoF3ir(w_A>-M<4273(l7VHt&8?Yzfj@#X~J8gH<-yousdLPzwU6|-MX`N_v+5oU8_4&cctz~-F3R-bhqh_(%qyxNq39x6x}7d z6Lc5o4$xhmJ3M!H?%>?LxpQ-O=FZGrnL96cUGBI9e@y=WVasp0&Mdd(`%t z?J?V1wnuDl*q*SxU3Q^+s{3{&8yFkuQ3rm$cN3Z{@?3Ie7OU(AC-tv_0St^Qd3t@c1|`Q!4p0Cow+k}_vOyZU6+4N{+RqN`6Kc-h{2!9Ix68s_f3-AZv@7}dNYkSr9 zob5H+W42dpkJ#R@Jzjge_H^yd+LN^xYfsf)sy$SDq4q%SecHpccWKYk-lIK7dyV!C z?G@T1wAW{k&)%LrI(u{WgWn4LM&P#rf7|}F z{bl=;_809B+F!CiWPiv0fc^dY^YwS@&(>e9KUaUP{#gB$`Xlu>>W|akraw)8ll~GS;u8xb2NFF8X^S7fLV_yph= T5Eh6JHlpTGE!Z5$qxQiM3)Mt+ diff --git a/fixture/18/3/7.0 b/fixture/18/3/7.0 deleted file mode 100644 index 0208585abc3e8d78547dd8e371c2f7f545d076e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 716 zcmV;-0yF&r0gwc+1ONcA1ONcc0ssIM0002A0ssIgwJ-f(ume>U018W#3K>9p(Ewyy zd#%0Jwf0)uS{`kEEw%q&SYi910G|M#0Czs_dff52vvF7Bj>cV!I~I2@njljbDNMVeDImuL>rT%b8XbARUW%-xx@GxuiB z&0L!~GjnC;$jo(_<1)8pj>_DWIVp2X=9J7O341yAaO~aKi?Ih|@5LU9y%T#T_CD-+ z*z2&@?Qwe-_8{y%*mJOVV9&r_fxB;a-tM~HS-Y!tNA0fJ9kaV-cf{_7-3hzfb*Jkt z*PX1pSa-1QQr)4tJ9P)@?$e#8yGwVL?ke3mx@&aD=&sNmp}RqMeD3z#>A9P8C+9BC zotnEecWCay+=03Ka);&a%AJ+FCwETnn%o(=D{@C9e>DDP{K@!R@u%W1#h-}35Pu;4 zGW=oq`~D{UN%)KKr{FKaAA-LCe*pgOW!uBHcWn>a-m^Vtd&l;S?G@YewbyHp*IunX zT6?qhSnaLaQ?)m0Pt;zhJxzO=_Au>5+Jm(BXb;iep*=%;fA;+B_1UwtS7(pTUYk8O zdu#T{?2XwIv$thW%U+f}DSJ`&pzI~tL$Y^dFUTH{y&nN%crgYSV`wo36Jsbb1`=cF zFa{1|*f53*W3VuW3S)3Eh6ZC`FoptSATWjjf8GAL{cZcB_BZWM+TXH2Wq-;3g#88k z1NN8e57*zVKUjaS{#^Z?`ZM)c>d(_(r$0`AmHsIGP5NW>x9CsN-=IH1e}Vq={N?$> y^B3n2&fl9qG=FFQ%=~@%^YYi_&&pqwKPrDs{+RqN`6Kc-U018W#3LQ{5K?5{) zt2nl+7`H1NyH(7oyJNR%`Q9U82UZWD0GcV!I~I2axjS=q z=HAS?nQJp=X0FT}nYk`=T;{gSQJI@ECuMHQoRYaD0ed<2aO~aKi?Ih|@5LU9y%T#T z_CD-+*z2(Ow)gFMdlL2{>_ON|u!mspz#f3Tcirx+-Br7zcGv8V+1;``Vt2#tgx&4B z({-2YPS#zlJ6LzA?oi#Gx&w9h>CV&Lr8`S^mF^tfHM(PTSLlw=-Jm-@cYE&i+|9X@ za~J1M&0U&1GR*;}(mW^c@%n7u7~TK2N+N!g3C2W2nG9+JHydqMVq?EMJ8z4*<=Z!LZ^@mq=C zNc`5}Hx9pT_|5yR`;Gez!fy|LbMV`N-wga#;IG>sx4&(F)c&UZN&8#&r|d7;pRm7R zf5857{o(q%^#|+k)t{@sQ-7xZO8t5I>-5LzuhJi-ze#_L{uccy`Wy5o=r7Qpp1(YQ yc>d!2!TEdhhvx6hpP9cee_sB&{8{;{@<-*b$sd!yC4WTzhWrWnU018W#3LQ{5K?AYE zv0KHlUB$Rv;n-c~*KQTF_}+7DUAhOJ0H6S$0Cz#|fZYAK%W;R}?#3OAyBBvZ?oQm9 zxGQnz;jY6Shr932yX)>E+(Ec|aEIXTz@34+bKT~+%~hMDHaBgK+1#=@Wpl&kgv|w; z(>0fC4%b|)IaqVA=1|R@nlm-`Y0lGJr#VY=mF6hTHJW2Iw`h*g+@Lu@b9?6W%;lMr zGZ$wL&Rm)~G;?R>z|4J_^D=j3&dOYsIVW>X=9mO~Joa|%>DZ&OH)Bu6-ikdHdnxur z?1k6^v6o>F!`_9x3VRgxChRfTTd=2KZ@`{_y#ROF?y%imyMuQ3?9SQUu{&dT#qNCF z^}6GASL=?}-K;xScdPDH-Ho~vbrLv(lO&d}YTJ3n`Q?(E#v zxubK}=8nzXnmaOgWA4P{$l;9`b+hP>MztEsJ~BtnEo#PS^9hQ=jgA|pP|1(e}w+}{PFqQ y^GD}z&YzsWHGgXU()@|}3-br&FUud6zbk)G{+|3f`8)DwLW6(1=xJx5VnWp8>-GN$9uplWF*`+1Sz~Q?fs2)&sk6Pu(c9(k`2!yf7a=P(KV+y7_yph= T5Eh6Ja3Qs@IoKS?yZONn$w@>x diff --git a/fixture/18/3/8.0 b/fixture/18/3/8.0 deleted file mode 100644 index a63742f8f443976167fcd77cb9ca0440b114a37a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 713 zcmV;)0yg~u0gwc+1ONcA1ONcZ0ssIM000270ssIgwJ-f(ume36018Wp3K~Fq(Ev%4 zwXMC@Uh7(Wt!*ujwuJouBDOIfp8%f#pa6F$?oQm9xC3$b;m*UIhPw=R815q6LAZNx zhv4qOoq@Y^-{!o{U7NEuS8dMOT(dc5bH(O}%?+F5HMeU{*W9c*S#z=GRL!NDLp2v_ z4%FPIIZShx<}A%UnsYSQXwJ}Fp*ccxedhSg?U|!9H)l@H+?qKxb7|(p%!Qc)GnZu! z%iNVYD05HdoXj1WGcs3X&c|GjIUaL0=4i~#2zw*;MC^sw`}VxOZm+@~g}n)T4E7f6 zDcBpZCtxqY-L^YzciHZw-9@{Dc9-l9+1;@_V0XXneBIr;vvpVN&edJ3J63n4?nvE@ zy5n@W=}yz#q&rD>k?s`TCAvd&7w8Vq-Jd%=cX#gW+`YMTbJyn1%w3r~GIw3>xZG{I zqjERpPRiYqJ0*8X?u6V0xdU>S;||B&jXM~3FYa6fe;)oi{Biih@ORQB^Ps6S1Anf@^SMf!vE_vjDN-=RN4e}De`{Pp>>^H=AO&R?59Hh*jW$o!4@6Z5y_ vPs?AHKPi7v{-FFN`9tz|zv^)t0H6S$0Cy?wP~4rk3vmbH?!%pT*WGb<6z(S6 zNw`~Zr{FHZoq)RlcL3(F&0U+bHur4K*<7EyHQ}y6beu>2A{Zps~#yCru@?uOh6xeIcq<1WV?j=LClFz#Lie;@ul{B`)_{xtk$_>=G# z;Sa)Jf^a$MvS(zk$R3fs9(z3YcI?sEo3STjkHy}KJrx1Jz5Vw6=KW^jw+g>e z_^rWj41Qbi8-d>j{3hT}+h4XnY=6=Ip#44jL-u#<&)DCuKVN^n{%rl#`lI#N>W|gm zsy|YHqy9wwZTi#nm+4Q^U!*@se~JDO{T=!P^!Mk_&)=OtJAZZl-2AoqWAj($kIdhg xKQ4b;{aJ{rJQ2cjM2---|yNe=Yt_{F(SG5dd$miJt%f diff --git a/fixture/18/3/8.2 b/fixture/18/3/8.2 deleted file mode 100644 index f546f9b90274dbdbdf36d6f67df1cbd00ce7c6e5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 714 zcmV;*0yX^t0gwc+1ONcA1ONca0ssIM000280ssIgwJ-f(umeRE018Wt3K~Fq(EyEF zd#%0Jwf0)u+G}yPH8TI7THAmRpa7o$p8$6)?pWNdxGQl-;%>wphr11T-<^fK3U?Il z8r(6sTX09dP!RCO?{hGrycWchp+^acPbFJn~ z&6S!XHP>m5)7+*xN^_IuB+V_FQ#6-oPS9MSIY4uH=J3qjnS(R;X3ovrnK?6aW#+uh zb(!NbS7naM+>|*cb4%uw%ng|nG8be{$6Ss%907YM_D<}X*aNZmVb8;!hP@1X81^FU zLD+k+hhXo(o`Jo0-|oEKUAwb(SMAQ(U9&r8cg60A-3`0rb+_wI*WIi;S$DDSRNbYz zLvSgd)W4(?Lpgnwufx**q*VyUwgjxdhOZTtF=dKuhkx_y;Xap z_D1cA+S|0JX)n{Bq`gRckoFSoA=*2%2Waolo}axtdv^Bf?77)%v&Uwy%pRG&F?(G0 zw(M!yo3bZmFUp>hy(D``_JZsI+554FWADbEjXfB9FZNsnZ}9Mj4sYP_1`Kbw@CFNS zc<=@XZ)ora18*qs1_FQH{<{5f`>Xax?Qhy2v%h73%KnD^3HuB7r|U1*AFjVxf3W^u z{h|6h^=Inu)1RlmPJfpED*aLVYxKwHZ_yv2zd?V3{`UOo`OEVs=P%A5oWC@GX#URp wf%*IL=jHFppOwEVe@_0I{4x0}@<-%v$RCft9e+ChX8g(ci}APOPsLw~0QD4!_5c6? diff --git a/fixture/18/3/8.3 b/fixture/18/3/8.3 deleted file mode 100644 index 93a541087b0173192ffb397dfd3ba49f3b71bcda..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70gwc+1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg*<9{^KbXK{Ol zkC~&bx53NR-|6%H2Otp}B`-NcO;=%Sb$^MIou{$8#n0K}?e_s83l$$JGd)L9TV-!~ zgN>J=tF^z$)7|Is`voEo86zz>K}%I%X>)ytk(;Hjxx>xZ;p_GP2_q97CuFD)_yph= T5Eh6JhhTHCIj9!MU;e=l!g@q> diff --git a/fixture/18/3/9.0 b/fixture/18/3/9.0 deleted file mode 100644 index cbb2ff6027066d07ac9848983ab8b2c77be966db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 711 zcmV;&0yzBw0gwc+1ONcA1ONcX0ssIM000250ssIgwJ-f(umdd>018Wh3K~Fq(EyEF zd#%0Jwf0)u+H37a_y4!BwB^7Dpa7l#pa6Fj?kL<%xNC67;BLVkfx7{B0_M2QZJX0J zH*HSZT(mi5bIInA%>|nSHuq}|*W9f+TXV1GT+OwbGc{Lgj?`SIIZkt%<|xfgnv*oQ zXim{wqB%ixf#v|s<(b1XcV`aH+?zQ!b7$tv%$1q*GS_8}%UqQ?Dsxljn9MDiQ!+PX zPRLx4IURF3=5Wl#n1eC*Vh+XJi8&K)V9&tbyKi^i?ylWg zyQ_BR?5^1zv%6w<#O{XO@w(e}r|WLkovgc9cdG7E-J!Y*bqDJ1(;cR}OLvy;9^E;* zYjkJmuFxH!yFPb(?)Kc#xtnt*=WflNn!7Z2V(!A+fw{|ahvn|d9hAE#cTVn(+!?tm za_8f&#~qKm8h14AX56v3TXCo2Zp594yAXHX9e1}Oe+&K;{3ZAk@E70@z@E0fYNFtG!iwr1nPbiQ3z=r)e+Ko}|4g>7MYqQ5@ugo5qy)k=S_O|S4*_*N_WiQH}lD#B*NcMv4 z0onVphhy)?o{ha1doK1`?3vgru}5OB!ybpdZx6%Xg*^)ab1*Q60&^hn=k2fCAGg11 zf7Je_{W1Gn_NVM`*q^Y!V1K&)a{b}@i}eTV@6{iwzf*sv{yzPA`s?&(>95isrN2gh zjQ$q=5&9eSC+Kg_pPs)we{%lf{K5H4^M~f|%paJ)FMnSCuKZc~tMcdMugM>izaoD` t{)YVV_}lTP<8Q{FjK3IvD*jUZq4*2&2jcI;--bU8e;NKJ{7LwW5CBF+ikAQY diff --git a/fixture/18/3/9.1 b/fixture/18/3/9.1 deleted file mode 100644 index d891b53549a2aa70484de61cc4a995ad430ad5dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 713 zcmV;)0yg~u0gwc+1ONcA1ONcZ0ssIM000270ssIgwJ-f(ume36018Wp3LQ{5K?5Sj z?Fz?k6~}fJ<93B(w~FPy_jV?;$h8Nc0G|M$0CyPfF5FqTgK+oY&cWS*I|FwG=DN*s zo7*-=ZEo6}w7F$-%I1>I37ZQx2W&3a9Im-rbFk)K&AFO8HD_wB)SRcePIH{*D$P-v zn>5F0Zqb~gxj}P+<^s*>naeYWXD-ehoVho1Xy(q$nVI`C=Vh+ToRzsMb5!P<%rTi; zGDl=?$efV59dkP7a?Ht?i!lddF2x**xf63B=041M2zwLuBb98s;&d^<O%FI}vwpci){Ce-8c{{4w||@JHZpz}~h! zZF|}Fr0qr9gSMA!582+aJz#sk_I&N#+OxG+YtPkQt36hGrS?eejoRb1w`ot)-lRQA zdy)1O?Iqenv=?X((B7XtJbQQc?CibSbFYxc+NZ`mKQzhQsE{&xN8`pfkv>o3+HtiM!$sQymx?_J%4)s=KRU|i}R=EFU=pCzc7Db{=WQS`MdIG{eQ diff --git a/fixture/18/3/9.2 b/fixture/18/3/9.2 deleted file mode 100644 index 0c75fc4b2a077a52ce14b5121f550ba8c7ded8b5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 710 zcmV;%0y+Hx0gwc+1ONcA1ONcW0ssIM000240ssIgwJ-f(umdF(018Wd3K>9p(E#>Z zd#!8jwYIg_+G|})+5ewLTRwfC0H6S#0CyYiG~8vln{X%LF2bFHy99R#?gHEaxI33^ z4%^(dIcRgw=A6wPn=>|7Y|htQuQ^_GwdQEe&6;C1w`xw++^9KGbD`!m&1IUyG#6~k7MVW&#mt+pf z+>tpTb3f*M%-xu?F;`>G#axRy7IP)$NX(50dlvR8>`~Zru-9OZ!Crwq0(%4Qy4`WR z+jd9oZrYu+yJdIE?vmXJy9;&)>@L?GuDe@zuuhou|7_cbx7j-BG%m zbjRp!(Ve2ZL3e`g0^RAk%X5e4F3ugCyEk`e?#|qqx%+bG<*v(}mAfi;RPLJGF}YiE zN91nEoshd7cRKEJ+{w6$aR=ir#T|;f6L%o)KHPZ-e-Zv5{5|+X@OR+Pz~8-Zd*1f0 z?OEHaw&!fG*&egKVtd5)hVAj%+qI`_Z`Piyy;ysy_EPPk+6%P@YVXq?roBshmi8X) zIofNqXK1g`9-+NHdwllx?9thqvnOY7&7PXQG<#z9!t8(AC-tv^?Pt^Qd3mHH#~H|mem-=;rJf0Oc1w`Q!4p<&VnWls_qdOa7Gn sCHWKb7vvAfUyeTeVM{Jr>d@pt0S#9xU&4}Ts0zP}8A82&B<05cqeJOBUy diff --git a/fixture/18/3/9.3 b/fixture/18/3/9.3 deleted file mode 100644 index 59c3db677e5a7cd864ea0647377ba5ff7a91b96f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmZQ#oWQbyg@FNtmohLghyZaf5NmAJ|C_LZS(b%i37erZgSD%FWO8nKWB1hgD>v^w ze)0Cx_dl6cc*Pa9P3=8{W77+(Tl;4&Uc3F^>8tl&e*Vj@Dk!b0Z|UqCo|s))-#K~i z@{PNXp1=9{?RQ2sZc%wnV_WyY=+ykm=HBTGS8v^a^77s1AAhr|^QWv3_`&i(TtKXY RpE030K=+2sg!hk{ z9v>hmDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZzQ4f8 z$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD8nX=-wF zbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBM zCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WGyfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe z(bC%6+~nov?(gvU`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>vR9szNW@l({adLWl ze1wIDj*pO-nVO=bq^zy3wzs&z!NSVR%+%G^-rwK=0RjU61rPvKDhc!uWr+$cicSG# Cv6~G5 diff --git a/fixture/18/4/0.1 b/fixture/18/4/0.1 deleted file mode 100644 index c39a3838cd71593f3b7faba6d0257d8c743d20f5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 356 zcmV-q0h|5;0g(i-1ONcA1ONbJ0RR9L0000?0RR9fwJ-f(umcqe02C7g1O*Ka4;L62 zAtECyEG;!QH$Ol@NlHsoR8?JGUuS4(adLBee0_z6hmVkvnVOrUq@}H{ueZ3l!NSAK z%+1x-*Wcjb>FVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!zY;ARR zcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lIVL;8vbwvx#Kp$W z&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=}M@UdnQd(PFWMyV}9UtnlyYI1XQe0_d~hlr4olA4>Gq@|{=udukey28W6%+1c$ z*Vy3U;_B<{^!4`s{{RRH3KA0(9337eCnzv6GCDgvL`6nUPf%D{T4G~lY;A6KcX)t- zf{Kfbl$DmApP;Czssp(%ReHJ C;+!1- diff --git a/fixture/18/4/0.3 b/fixture/18/4/0.3 deleted file mode 100644 index 41433d9ee3c794097b5243ea1666079921aa3c55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 236 zcmZQ#oXE0)g@FNt-!L#Rhyd{wAlBHb|2JU+b0{0bvq`#!23t)SG}L!CY45;=j(Dkt zX>APZoa(j4W&a;L1qOK_yziqPTXMnnc}(#c-Q_VwCuGxO3Xkxr#}pjcnSDHOF4yd^ zUoRHc-~0J&W_;YwCll@O{dm+VANS)yWBL7gf2&IMdrdAneC{zmYp}V;=%hk%kKs`P z;~s;943C%b&Nj`ue=>W^1-sWdo6qR2&e?QAE<0!A5kBpl4F`7Ro-SK^i<^}p)Pa@d Qf^~z@)rE(=7A0^20L6u9FaQ7m diff --git a/fixture/18/4/1.0 b/fixture/18/4/1.0 deleted file mode 100644 index 4a4394dd81c876bfe332b7cc2863e4f0b9d882fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;LW>FV9v>hmDJn8EG(0^%Mn_0cQBqo4 zTx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZzQ4f8$;#5x)ZE?P=I7|}@$&ln`~(FC z4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD8nX=-wFbbNh&hKGodk&>F5oTR0uuCK7T zxw^u`#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBMCMPH`F)}(kJVZrCPESx+Sz2Oa zWNdA2c6WGyfr5&QjFgp@o}Zwosj9NGw7k8(#>dD51O)~M00s~MR4R#di84jy(ADxz E0r1qH;s5{u diff --git a/fixture/18/4/1.1 b/fixture/18/4/1.1 deleted file mode 100644 index bee41d4a9441632de7687d77360cf623f60bf7d1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;LZ?CtgT_x}I^ z2?`4n6crsFA15d&F)}kdJUvB5M^8{uSz23TWMyq`Z+Cckfr5jJjE$9+m!F`asj929 zw6(pyzsJbQ(bCh~+}-8o=kM_G`TF|<1O*Ka4;L62AtECyEG;!QH$Ol@NlHsoR8?JG zUuS4(adLBee0_z6hmVkvnVOrUq@}H{ueZ3l!NSAK%+1x-*Wcjb>FVqA^!5Gz{|5*O z5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!zY;ARRcYlC^iHeJpl$D*HpQos)v9hzf zyuHQ7$IsBw+1lIVWo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuL zx46K;!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifN!S6EL;8vbwvx#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ z6crX8A0Q|xDl#)PJUu=}M@UdnQd(PFWMyV}9Utnly zYI1XQe0_d~hlr4olA4>Gq@|{=udukey28W6%+1aN1O)~M00s~MR4SQtin2pR(bXbM E0c<6n2mk;8 diff --git a/fixture/18/4/1.3 b/fixture/18/4/1.3 deleted file mode 100644 index ff6d449debd5c94ff21ad4b1f7b9c3f98dea0750..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 240 zcmV*fgoN>hwH=J<60oRV*+B@o|p=LU1qlf-^=bLAKdF7Kw{&?eyCw_S0 zg9pAn`}OLn1553+(nk{wG|xEG3^U6plMFJ)7*h-}!w3@$Fm2eZxdA_w*WJy{Ews-% q+bpxoDw`~_#~NEKvBL@*EU<0auD=BUR6r#I(kVdM=}^Juf|e=jyJK4b diff --git a/fixture/18/4/10.0 b/fixture/18/4/10.0 deleted file mode 100644 index c8d844f34a6218630bc2fd5ff6e4781450d6778f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 356 zcmV-q0h|5;0g(i-1ONcA1ONbJ0RR9L0000?0RR9fwJ-f(umcqe02C7t5fU0393&+s zE-x@RIXXf^L`+RiR##YHVPa})Y;<*Yet&?7iHeewl$@QOrl+W|v9h|myu`)E&d<=; z+1ldc9v>hmDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_ zgp7@jmY0~Ip`xm*thBYZzQ4f8$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1 zHa9pxK|)GPOjK1?USD8nX=-wFbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk*4NnJ z;o|D+?DX~a{{H|72?`Pu6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WGyfr5&Q zjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6+~nov?(gs?C@Crc1rPvKDk=05Wr_+dgiZl? CIiH3A diff --git a/fixture/18/4/10.1 b/fixture/18/4/10.1 deleted file mode 100644 index ee5ca4586fa8aa25c47b7276b59a1c8d6c0d85a7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 356 zcmV-q0h|5;0g(i-1ONcA1ONbJ0RR9L0000?0RR9fwJ-f(umcqe02C7w6crsFA15d& zF)}kdJUvB5M^8{uSz23TWMyq`Z+Cckfr5jJjE$9+m!F`asj929w6(pyzsJbQ(bCh~ z+}-8o=kM_G`TF|<1O*Ka4;L62AtECyEG;!QH$Ol@NlHsoR8?JGUuS4(adLBee0_z6 zhmVkvnVOrUq@}H{ueZ3l!NSAK%+1x-*Wcjb>FVqA^!5Gz{|5*O5fT#{933SlCoeED zIXXK-L`6+bPghu3VPa!zY;ARRcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lIV zWo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuLx46K;!ph6c)YaDB z-{9!!>hkmS{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifN!S6EL;8vbwvx#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)P zJUu=}M@UdnQd(PFWMyV}9UtnlyYI1XQe0_d~hlr4o zlA4>Gq@|{=udukey28W6%+1c$*Vy3U;_B<{^!4^9C@Crc1rPvKDmnBNWrqqagiZlw C$)TM9 diff --git a/fixture/18/4/10.3 b/fixture/18/4/10.3 deleted file mode 100644 index 56a9e3ac0d612cf3e22a00e052aa77c638a9fa9f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 238 zcmZQ#oXE0)g@FNt-!U*Shyd|5AlBHb|2JU+b1EByo3Ngt!B!K7NlaP`aH3bfE_zD8 zk9(gD40f0fT?r`J_Jo$UUv;&HcnUB$y@_IGouziF=v zUz}(4a#8a;%jdI_^DLiEa(-v=xJxy}z>3oBiHyx#Y(EcJn#2({DDNQtN)R z@tBzRn+=E9xX(y$f0MuM)_3#E9>TmP=PfSt7@yXd%wu$1BACbUFo!VDhP+!FSQ$bb SRM{^CH*C7RkjaNdHy8jR$zTWo diff --git a/fixture/18/4/11.0 b/fixture/18/4/11.0 deleted file mode 100644 index 03d2e75051335e912874328bfd62492c1b43dddb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 359 zcmV-t0hs;*0g(i-1ONcA1ONbM0RR9L0000_0RR9fwJ-f(umdd$08|tB`T7C_1Pl!h z78e*GAtEX(EHpJXK0iQ6NlH>vR9szNW@l({adLWle1wIDj*pO-nVO=bq^zy3wzs&z z!NSVR%+%G^-rwNp>FV z9v>hmDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZzQ4f8 z$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD8nX=-wF zbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk*4Nl7Dk>{0EG;eo1rPvKDzS8mvO|TK FM5n`;p8)^> diff --git a/fixture/18/4/11.1 b/fixture/18/4/11.1 deleted file mode 100644 index 87482d0eb67b277dd7ea103129ae6214b8146f07..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 359 zcmV-t0hs;*0g(i-1ONcA1ONbM0RR9L0000_0RR9fwJ-f(umdd$08|tE{QU(62M-Vt z85$cSBqc2_FE=FVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!zY;ARR zcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lGGDk>{0EG;eo1rPvKDzS8mvP6ZL FL#IBVpoIVc diff --git a/fixture/18/4/11.2 b/fixture/18/4/11.2 deleted file mode 100644 index 75f65b8b0ee7d5a553232b680ff60a27ef57101b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu05lW+{{RRH3KA0( z9337eCnzv6GCDgvL`6nUPf%D{T4G~lY;A6KcX)t-f{Kfbl$DmApP;Czssp(%ReH@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFpKR`%HN>Wo)TwPvfXJ~M7 za(a7wgoTEVkC2#|nxdnmtgWuLx46K;!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m5*iyE zBqb&;FEBVcIzmH4OifN!S6EL;8vbwvx#Kp$W z&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=}M@UdnQd(PFWMyV??poK^N z$@kvLdE%yQF11tPrviflgFM6BIH}Aemx8qTVwBL>g4STbX=gs9} VWe9OlWxQbBV03jM(-~IXU;zB{XdnOp diff --git a/fixture/18/4/12.0 b/fixture/18/4/12.0 deleted file mode 100644 index eeb7109b5ad6fb4c6e38e7338b70af7d775a61ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;LO;o|D+?DX~a z{{H|72?`Pu6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WGyfr5&QjFgp@o}Zwo zsj9NGw7k8(#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>v zR9szNW@l({adLWle1wIDj*pO-nVO=bq^zy3wzs&z!NSVR%+%G^-rwNp>FV9v>hmDJn8EG(0^%Mn_0cQBqo4 zTx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZzQ4dOFEBAO00s~MR4RdVhcZP4(bWP^ E0T3IWfdBvi diff --git a/fixture/18/4/12.1 b/fixture/18/4/12.1 deleted file mode 100644 index 56ae00b2096d0018263356ff6cbd79df1cc0da86..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;LR}9UtnlyYI1XQe0_d~hlr4olA4>Gq@|{= zudukey28W6%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337eCnzv6GCDgvL`6nUPf%D{ zT4G~lY;A6KcX)t-f{Kfbl$DmApP;Czssp(%ReH@9_Bf`T_$4 z3=Iwz7Z@NRA}T8^G&MFpKR`%HN>Wo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuL zx46K;!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifN!S6EL;8vbwvx#Kp!gFEBAO00s~MR4RdViZVnc(be)! E0hW@Wng9R* diff --git a/fixture/18/4/12.3 b/fixture/18/4/12.3 deleted file mode 100644 index ac46a44812a8e2666829f7583e95ff5087221df0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 242 zcmV$p;|-A^;%(nMS&3 zp?%ibW|>`9*<_JD*4Sc+9ah+2Z``zDvzFRvrBOGXw9!KY%`?t4!^|?uB!kQ`#uP)$ zFv0`_Y+JT#)ldUZ{q)jD6CHHVIoBL>%PE%}a>p509C5=57aZ{I*{@dz2R;FB@9ysI spLf1_=9gDKdE}2bzIft?7e4qqckS4%1prh)B>>VXK$+@LA%}vNDHpC|Q~&?~ diff --git a/fixture/18/4/13.0 b/fixture/18/4/13.0 deleted file mode 100644 index e1621707561fd700d999d590c5f1516f755e52b5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;L0$;#5x)ZE?P z=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD8nX=-wFbbNh&hKGod zk&>F5oTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBMCMPH`F)}(k zJVZrCPESx+Sz2OaWNdA2c6WGyfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6+~nov z?(gvU`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>vR9szNW@l({adLWle1wIDj*pO- znVO=bq^zy3wzs&z!NSVR%+%G^-rwNp>FVFVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!zY;ARRcYlC^iHeJp zl$D*HpQos)v9hzfyuHQ7$IsBw+1lIV}9UtnlyYI1XQe0_d~hlr4olA4>Gq@|{=udukey28W6%+1c$*Vy3U;_B<{ z^!4`s{{RRH3KA0(9337eCnzv6GCDgvL`6nUPf%D{T4G~lY;A6KcX)t-f{Kfbl$DmA zpP;Czssp(%ReH@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFpKR`%H zN>Wo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWsxGc+|e00s~MR4UPVh_XaQ(W^zC E0ul0``v3p{ diff --git a/fixture/18/4/13.3 b/fixture/18/4/13.3 deleted file mode 100644 index 852272b91ac1168d0f1078eb37e57340eee00124..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 242 zcmV9v>hmDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gS zd3u6_gp7@jmY0~Ip`xm*thBYZzQ4f8$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZ zBrGj1Ha9pxK|)GPOjK1?USD8nX=-wFbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk z*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WGy zfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*GAtEX( zEHpJXK0iQ6NlH>vR9szNW@l({adLWle1wIDj*pNwH#j*u00s~MR4U0lL|LMu=+#nB E0j#N@z5oCK diff --git a/fixture/18/4/14.1 b/fixture/18/4/14.1 deleted file mode 100644 index 8b4693c8b708b3f779f96fbbb8d0f2659818d5d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;K$w6(pyzsJbQ z(bCh~+}-8o=kM_G`TF|<1O*Ka4;L62AtECyEG;!QH$Ol@NlHsoR8?JGUuS4(adLBe ze0_z6hmVkvnVOrUq@}H{ueZ3l!NSAK%+1x-*Wcjb>FVqA^!5Gz{|5*O5fT#{933Sl zCoeEDIXXK-L`6+bPghu3VPa!zY;ARRcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw z+1lIVhkmS{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifN!S6EL;8vbwvx#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|x zDl#)PJUu=}M@UdnQd(PFWMyV}9UtnlyYI1XQe0_d~ zhlr4olA4>Gq@|{=udukey28W6%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337eCnzv6 zGCDgvL`6nUPf%D{T4G~lY;A6KcX)t-f{Kfbl$Dk?H#j*u00s~MR4UQDM46&;=+#nB E0dUl!UH||9 diff --git a/fixture/18/4/14.3 b/fixture/18/4/14.3 deleted file mode 100644 index 0be22831301600e439c4f5ae89babae106a711c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 242 zcmVyNLTyew=Cp_@(+p}M< zj=Jfjiw?Tyoa0P$%r2{JvdA85Y_Y@+EBp;GZrZR}OYOAMMhoq;P7S^S0&j1YQ6?E= sjxnYfVuleGShsE2u2nFV9v>hmDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZ zzQ4f8$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD8n zX=-wFbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu z6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WF>J3Ku;00s~MR4PecqD)aa^lHJU DMKq#F diff --git a/fixture/18/4/15.1 b/fixture/18/4/15.1 deleted file mode 100644 index 1e2df017687914efed053758e2c778c5ea0eeab4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;KeoSmhnr?0TF zxw^Z<#Kq0d&)3-5;o{@#?CtgT_x}I^2?`4n6crsFA15d&F)}kdJUvB5M^8{uSz23T zWMyq`Z+Cckfr5jJjE$9+m!F`asj929w6(pyzsJbQ(bCh~+}-8o=kM_G`TF|<1O*Ka z4;L62AtECyEG;!QH$Ol@NlHsoR8?JGUuS4(adLBee0_z6hmVkvnVOrUq@}H{ueZ3l z!NSAK%+1x-*Wcjb>FVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!z zY;ARRcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lIVsp(%ReH@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFpKR`%HN>Wo)TwPvf zXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuLx46K;!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m z5*iyEBqb&;FEBVcIzmH4OifN!S6EL;8vbwvx z#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=}M@UdnQd(PFWMyV< zZ*X{ddV+(5jE#<#mzbcTqN=N`w6(Urzre`J%F@%++}+;h=jib9^7{My1O)~T4-gm` z8X_YkEG;fJH#k5+LP|?aR8>}9UtnlyYI1XQe0_d8J3Ku;00s~MR4VzLqU=yn^lG`M DkkO*; diff --git a/fixture/18/4/15.3 b/fixture/18/4/15.3 deleted file mode 100644 index a84036d61d220e6b17f50e82743939d4a8f9e703..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 241 zcmV0000K0002l0000ewJ-f(umgPt09I2jF)(*D00|GA zga=N-11I5ulkmWioMEtHSXePEtQZzntg)9XN=r&ZX$}I_RErt~utGvz)TYBY(W{#S=fg@OQwuYsYRq_0vlq zJ@n5z-#qh6yR7obB!kQ`#uP)$Fv0@swk_MWYN(k;nrNVT#+hcA4-5)EZ*P%3*4Sc+ r9aflNfN|4?&01=wl{Q*vpLMobCI$dhKqbL*3Q%?c71f~^OzbiRtwLoM diff --git a/fixture/18/4/16.0 b/fixture/18/4/16.0 deleted file mode 100644 index 9f557e44c73bd20783db54cf9daf5e69656324b0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;KDfr5&QjFgp@ zo}Zwosj9NGw7k8(#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6 zNlH>vR9szNW@l({adLWle1wIDj*pO-nVO=bq^zy3wzs&z!NSVR%+%G^-rwNp>FV9v>hmDJn8EG(0^%Mn_0c zQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZzQ4f8$;#5x)ZE?P=I7|}@$&ln z`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD87KR`i300s~MR4TboQFf>(dbNC} D7uupn diff --git a/fixture/18/4/16.1 b/fixture/18/4/16.1 deleted file mode 100644 index 799646bb0104471530bd153ffb61a582bf474586..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;KGgoTZckC&L4 zp`xR!tgW@Sx4*!_$;!*q)YaYI-{FVqA^!5Gz z{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!GKR`i300s~MR4TboQI@D6dbNC} D*np$s diff --git a/fixture/18/4/16.2 b/fixture/18/4/16.2 deleted file mode 100644 index 3f6286ef9dba176ec074609a5a6a295cadb87e08..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;KJhlr4olA4>G zq@|{=udukey28W6%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337eCnzv6GCDgvL`6nU zPf%D{T4G~lY;A6KcX)t-f{Kfbl$DmApP;Czssp(%ReH@9_Bf z`T_$43=Iwz7Z@NRA}T8^G&MFpKR`%HN>Wo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnm ztgWuLx46K;!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifN! zS6EL;8vbwvx#Kp$W&(PS}+T!En?CtLM_xJz- z0tyQZ6crX8A0Q|xDl#)PJUu=}M@UdnQd(PFWMyVPKR`i300s~MR4T#mP^PFLdbN0_ Du)d=j diff --git a/fixture/18/4/16.3 b/fixture/18/4/16.3 deleted file mode 100644 index 5d7f8ee8cd38008daec2749102d01377bf50fc51..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 243 zcmVE0g(i-1ONcA1ONc@0000K0002n0000ewJ-f(umg<-09Mm3F)(*D00bH` z77ZDThKxl+#-br((U7q`XvHZQ;3Pb7{`OS~C&SK^l9^I6Q%YvyeCgGP|s@NtYb5#~WWf@xu!rJaF#Xv0G35^wLKU{qxQ@&;0VrCjk|{1h2b0Z_j?cI_jpAE;{I*bFMk&mQw--08~IF0KWq$Q-F%<&F5oTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBMCMPH` zF)}(kJVZrCPESx+Sz2OaWNdA2c6WGyfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6 z+~nov?(gvU`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>vR9szNW@l({adLWle1wID zj*pO-nVO=bq^zy3wzs&z!NSVR%+%G^-rwNp>FV9v>hmDJn8EG(0^%Mn_0OLqtVJ00s~MR4S?OP^PFLdbMm$ E0qp&wl>h($ diff --git a/fixture/18/4/17.1 b/fixture/18/4/17.1 deleted file mode 100644 index 3670407bb88b85207e76e15df3b3b242172e8d28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;J@Y;ARRcYlC^ ziHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lIV}9UtnlyYI1XQe0_d~hlr4olA4>Gq@|{=udukey28W6%+1c$*Vy3U z;_B<{^!4`s{{RRH3KA0(9337eCnzv6GCDgvL`6nUPf%D{T4G~lY;A6KcX)t-f{Kfb zl$DmApP;Czssp(%ReH@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFp zKR`%HN>Wo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuLx46K;!ph6c)YaDB-{9!! z>hkmS{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifNgLqtVJ00s~MR4S=YQHH1_dbMOu E0YJH<+5i9m diff --git a/fixture/18/4/17.3 b/fixture/18/4/17.3 deleted file mode 100644 index 03a6aa4b198055f539467114492e19ccf67b0543..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 243 zcmVE0g(i-1ONcA1ONc@0000K0002n0000ewJ-f(umg<-09Mm3F)(*D05J^| z7z-YZ1rNr82V=p5vEad2@L){Zpjb4hPrjE>U^r7EXG-KuiJU1>$p;_+Apju&V_dPs z4tpCcFmBqgSxfD-(nbsIv(7fl?6S%xi|nyRXKeAr5HpN0!2sKq?OHX|Oe0M+&^+Tz zGt4ZbOftwE0dIUU#Su50aKQoJp8a}t)J-Q{bkIHLTyxAVr(AN#9RU|U1+TmN;qQY7 t&Rsip>#3h!`sks5-udR4Utam-kv{?k08~IFpgsjCQ-I3p&F5oTR0uuCK7Txw^u` z#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2 zc6WGyfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*G zAtEX(EHpJXK0iQ6NlH>vR9szNW@l({adLWle1wIDj*pO-nVO=bq^zy3wzs&z!NSVR z%+%G^-rwNp>FVRj&fdBvi diff --git a/fixture/18/4/18.1 b/fixture/18/4/18.1 deleted file mode 100644 index 6f9477efda29f6f92ac0e18c3ebfbb6e71cb45c1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JrR8?JGUuS4( zadLBee0_z6hmVkvnVOrUq@}H{ueZ3l!NSAK%+1x-*Wcjb>FVqA^!5Gz{|5*O5fT#{ z933SlCoeEDIXXK-L`6+bPghu3VPa!zY;ARRcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7 z$IsBw+1lIVL;8vbwvx#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ6crX8 zA0Q|xDl#)PJUu=}M@UdnQd(PFWMyV}9UtnlyYI1XQ ze0_d~hlr4olA4>Gq@|{=udukey28W6%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337e zCnzv6GCDgvL`6nUPf%D{T4G~lY;A6KcX)t-f{Kfbl$DmApP;Czssp z(%ReH@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFxM@UIZ00s~MR4SPdQI@DEdbJ!) E0p=Q_NdN!< diff --git a/fixture/18/4/18.3 b/fixture/18/4/18.3 deleted file mode 100644 index 25334cd27920a9e3c4d324f645952e333126fa75..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 241 zcmV0000K0002l0000ewJ-f(umgPt09I2jFfeyC00|GA zga=N-11I5ulkmVvc;F;Fa1tIk36IX-etlL9i%B^XDQ6<(Or)G3`5*ux03ZNA{GD*Y z0pFhedUe!ICtY;VJ?C6=%q^!}a>yNLTyaE0+_1t251hMp?AB91z4XyT|Ge|fGrzp@ z$s>Qf@x>DXOYE>W!UO|sTefS}P&17*(LnQzGtDrwj55g}bBr-X8NLOtyW3!aanpv) rT56}2Hd<((b+%b%msK`dWREqr2nGODKqX*41t>d!it5k{hD2owEV^ST diff --git a/fixture/18/4/19.0 b/fixture/18/4/19.0 deleted file mode 100644 index 4405e7f1147d5188de7c61d9c95033920dd0c204..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JQIXXf^L`+Ri zR##YHVPa})Y;<*Yet&?7iHeewl$@QOrl+W|v9h|myu`)E&d<=;+1ldc9v>hmDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm* zthBYZzQ4f8$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1? zUSD8nX=-wFbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|7 z2?`Pu6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WGyfr5&QjFgp@o}Zwosj9NG zw7k8(#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*wOH55p00s~MRH_h%C`(iny;>km E0dYs7v;Y7A diff --git a/fixture/18/4/19.1 b/fixture/18/4/19.1 deleted file mode 100644 index b467061e7172ee94f10e43af3170779565cefd31..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JTJUvB5M^8{u zSz23TWMyq`Z+Cckfr5jJjE$9+m!F`asj929w6(pyzsJbQ(bCh~+}-8o=kM_G`TF|< z1O*Ka4;L62AtECyEG;!QH$Ol@NlHsoR8?JGUuS4(adLBee0_z6hmVkvnVOrUq@}H{ zueZ3l!NSAK%+1x-*Wcjb>FVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3 zVPa!zY;ARRcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lIVkm E0r!ZcNB{r; diff --git a/fixture/18/4/19.2 b/fixture/18/4/19.2 deleted file mode 100644 index 4f7ce1aa7c71d36ea6c8d1823cc60b8389793037..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JWKR`%HN>Wo) zTwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuLx46K;!ph6c)YaDB-{9!!>hkmS{Qdq0 z2M7=m5*iyEBqb&;FEBVcIzmH4OifN!S6EL;8 zvbwvx#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=}M@UdnQd(PF zWMyV}9UtnlyYI1XQe0_d~hlr4olA4>Gq@|{=uduke zy28W6%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337?OH55p00s~MRH_u0C{t7py;>km E0SGap(f|Me diff --git a/fixture/18/4/19.3 b/fixture/18/4/19.3 deleted file mode 100644 index 87498557cc45dbc06ba637f16b823b3295e0e2ce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 243 zcmVE0g(i-1ONcA1ONc@0000K0002n0000ewJ-f(umg<-09Mm3F)(*D01{?6 z2r(?I7#3Cx3oC|&6~n@cVPVCvuwoE@d@rGBOez;CAbH_!a?$|sNf@x~WV{IK^y8$2+;w`adz9d*-57aerZIoBL>%PE%} za>p509C5=50V7Osz_@9{W-YbTN*gV-&pO*Iv&$-*EV9QMTP(4|3IQ3u1+Tl?wrtm` tp=KIsqJicaXPRMV8D)||<``p&A^rje08~H~VB7(eDM00P=mpc9WeVCYVj2Jd diff --git a/fixture/18/4/2.0 b/fixture/18/4/2.0 deleted file mode 100644 index 79fb5e799c8b211ad5d062d5add369cb2b1e34c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;L8(bC%6+~nov z?(gvU`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>vR9szNW@l({adLWle1wIDj*pO- znVO=bq^zy3wzs&z!NSVR%+%G^-rwNp>FV9v>hmDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~I zp`xm*thBYZzQ4f8$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9pxK|)GP zOjK1?USD8nX=-wFbbNh&hKGodk&>F5oTR0uuCK5M2nh-c00s~MR4UPVin2pR(W^zC E0%CoieEFVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXK-L`6+b zPghu3VPa!zY;ARRcYlC^iHeJpl$D*HpQos)v9hxV2nh-c00s~MR4UPVin2rn(W^zC E0`hjC!Tsp(%ReH@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFpKR`%H zN>Wo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuLx46K;!ph6c)YaDB-{9!!>hkmS z{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifN!S6EL;8vbwvx#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=}M@Udn zQd(PFWMyVs4DApjr%3;i?C zIMWO>%P5l!GRGKG3^Bt96AZ9z+25+6W*TXt4-ItBI@>I>%PN~Jvd0=*EV0818!Rwx z+OSzm?X=QH0T&&#&pY2d^UEurJo3jIUp(={3m-gi?%J`vr+#|rqd%9|>h8|D=9pVf sx#W;L&bZ=;8&0_3fN#%!y*lcqlL7_+R6r#K=N&+q0#sCoE||P!3jPvg3;+NC diff --git a/fixture/18/4/3.0 b/fixture/18/4/3.0 deleted file mode 100644 index 1465dc30234c3220bf6144ccd528571ac030a55b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;K*xw^u`#LUgk z*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WGy zfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*GAtEX( zEHpJXK0iQ6NlH>vR9szNW@l({adLWle1wIDj*pO-nVO=bq^zy3wzs&z!NSVR%+%G^ z-rwNp>FV9v>hmDJn8E zG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0|d3=Iws00s~MR4O65Lz$w2=+&Z3 E0XOlWi2wiq diff --git a/fixture/18/4/3.1 b/fixture/18/4/3.1 deleted file mode 100644 index 6f4343a5a30961536b1f184739ee5cff4510b01e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;K;yuHQ7$IsBw z+1lIVZ!@=+#0` E0TctDQ2+n{ diff --git a/fixture/18/4/3.2 b/fixture/18/4/3.2 deleted file mode 100644 index 9aa2f7d6f94426d3c97ecc1c3787c9851c10ac2a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;K>zre`J%F@%+ z+}+;h=jib9^7{My1O)~T4-gm`8X_YkEG;fJH#k5+LP|?aR8>}9UtnlyYI1XQe0_d~ zhlr4olA4>Gq@|{=udukey28W6%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337eCnzv6 zGCDgvL`6nUPf%D{T4G~lY;A6KcX)t-f{Kfbl$DmApP;Czssp(%ReH z@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFpKR`%HN>Wo)TwPvfXJ~M7a(a7wgoTEV zkC2#|nxdnmtgWuLx46K;!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m5*iyEBqb&;FEBVc zIzmH4OifN!S6E^$Q)x#F~kfbOfbN>X~SL(HPc8F4K&X<0pCn>%r2{JvdA85Y_Y@+D{Qd9x^2t$ zPVKbPMhoq;PUl>+%rCEe^2i@=eDTB&FMRO8yKm3_R{iwSM-Tn;PQRC{a$|Z-~ samE!#+;GAL2b{ZhY}QdXopjMb_XGw2R6r$w=oFw#0V=0MFPO|_3SksvQvd(} diff --git a/fixture/18/4/4.0 b/fixture/18/4/4.0 deleted file mode 100644 index 82142c40eb543a293229931f5efa012f62b7976a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;Kjp`xm*thBYZ zzQ4f8$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD8n zX=-wFbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu z6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WGyfr5&QjFgp@o}Zwosj9NGw7k8( z#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>vR9szNW@l({ zadLWle1wIDj*pO-nVO=bq^zy3wzs&z!NSVR%+%G^-rwNp>FVFVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!z zY;ARRcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lIVqD)aa^lG`M DUQMAL diff --git a/fixture/18/4/4.2 b/fixture/18/4/4.2 deleted file mode 100644 index 110519d133398554d044934af9e6514e2d806cee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;Kpr>L;8vbwvx z#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=}M@UdnQd(PFWMyV< zZ*X{ddV+(5jE#<#mzbcTqN=N`w6(Urzre`J%F@%++}+;h=jib9^7{My1O)~T4-gm` z8X_YkEG;fJH#k5+LP|?aR8>}9UtnlyYI1XQe0_d~hlr4olA4>Gq@|{=udukey28W6 z%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337eCnzv6GCDgvL`6nUPf%D{T4G~lY;A6K zcX)t-f{Kfbl$DmApP;Czssp(%ReH@9_Bf`T_$43=Iwz7Z@NR zA}T8^G&MFpKR`%HN>Wo)TwPvfXJ~M7a(a7wgoTC=5D^j+00s~MR4M@-qAXES^lGuE Dd@P?h diff --git a/fixture/18/4/4.3 b/fixture/18/4/4.3 deleted file mode 100644 index 8a127323df5e7f20f3987ebffab8cd61e985ef7b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 241 zcmV0000K0002l0000ewJ-f(umgPt09I2jF)(*D00|GA zga=N-11I5ulkn(B&TtT7SXePEtQZzn3=8X-NI4TJXJV4?ODm-;$p;_+AOIi$x2*EX zBY(W{#S=fg@WBJ`zCGKu>Zg}Jdgz~bzIo=CySy^WC5PN`#uZ20aKZ%#oV)fn>!_Pf zy6B*L&bj88!_0EZCX4K`#uiKLu)+oltlPHi)~TIV+GwGD*4bv6W3cb#?M*Vs9Aiu| r#0(=$Fu=HJd#{F?X{3n;nrED80tNt7KqUZl3Q%?c71f~^OzJWPHkN4~ diff --git a/fixture/18/4/5.0 b/fixture/18/4/5.0 deleted file mode 100644 index dbd90abcb52afc12ff73a7b953efb6f4453f4aea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;KLiHeewl$@QO zrl+W|v9h|myu`)E&d<=;+1ldc9v>hmDJn8EG(0^%Mn_0c zQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZzQ4f8$;#5x)ZE?P=I7|}@$&ln z`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD8nX=-wFbbNh&hKGodk&>F5oTR0u zuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBMCMPH`F)}(kJVZrCPESx+ zSz2OaWNdA2c6WGyfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6+~nov?(gvU`T7C_ z1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>vR9szNW@l&<6crX100s~MR4Ty_QI@DEdbJ>@ DZr`9? diff --git a/fixture/18/4/5.1 b/fixture/18/4/5.1 deleted file mode 100644 index fd5218f99b01947e433dbf491502e0f471d10031..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;KOjE$9+m!F`a zsj929w6(pyzsJbQ(bCh~+}-8o=kM_G`TF|<1O*Ka4;L62AtECyEG;!QH$Ol@NlHso zR8?JGUuS4(adLBee0_z6hmVkvnVOrUq@}H{ueZ3l!NSAK%+1x-*Wcjb>FVqA^!5Gz z{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!zY;ARRcYlC^iHeJpl$D*HpQos) zv9hzfyuHQ7$IsBw+1lIV(dbJ>@ D#5|$_ diff --git a/fixture/18/4/5.2 b/fixture/18/4/5.2 deleted file mode 100644 index f8d8c5e9ae1b4e62cb87b54bbff18306687ab4ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;KRkC2#|nxdnm ztgWuLx46K;!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifN! zS6EL;8vbwvx#Kp$W&(PS}+T!En?CtLM_xJz- z0tyQZ6crX8A0Q|xDl#)PJUu=}M@UdnQd(PFWMyV}9 zUtnlyYI1XQe0_d~hlr4olA4>Gq@|{=udukey28W6%+1c$*Vy3U;_B<{^!4`s{{RRH z3KA0(9337eCnzv6GCDgvL`6nUPf%D{T4G~lY;A566crX100s~MR4N%SQKqOIdbJ>@ DZjPYg diff --git a/fixture/18/4/5.3 b/fixture/18/4/5.3 deleted file mode 100644 index def0137c71bf4b6d220037b1453b7dda49cdd314..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 241 zcmV0000K0002l0000ewJ-f(umgPt09I2jF)(*D01{?6 z2r(?I7#3Cx3oC|&6~j^v(Sjps!I8A!NLp}&5N=9o0YCp|rJRdY$p;_+AOIl%j|_6h z8CM)}!wDB0@a@^%tfOu^>7s+~Ip>;VZaL+WlN_?g8(%!}!wVlgaPHdQu2nz1^wC5A zyz|X7zr6BEn=JCj9Aiu|#0(=$Fu=BDzg`VB(?}ByG|xEG3^U6p0l%0B>+aUrVu>AA r*kFNi)Anwi+G(YY7TRZ>ZI;<(l?DJ*KqUZq2T-N}mD8aYOz<)Vl0agj diff --git a/fixture/18/4/6.0 b/fixture/18/4/6.0 deleted file mode 100644 index f868da06eef815c9e61c50e8483bc791732d0b43..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;J|adLWle1wID zj*pO-nVO=bq^zy3wzs&z!NSVR%+%G^-rwNp>FV9v>hmDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@j zmY0~Ip`xm*thBYZzQ4f8$;#5x)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9px zK|)GPOjK1?USD8nX=-wFbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+ z?DX~a{{H|72?`Pu6dWBMCMPH`F)}(kJVZrCPESx57#SKH00s~MR4M^4QKqOIdbMOu E0s4ZWs{jB1 diff --git a/fixture/18/4/6.1 b/fixture/18/4/6.1 deleted file mode 100644 index f5826b8049b52f16ca1be3c3ef232ae5960c061c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;K0baj1xe}{;P zk&=^}oSmhnr?0TFxw^Z<#Kq0d&)3-5;o{@#?CtgT_x}I^2?`4n6crsFA15d&F)}kd zJUvB5M^8{uSz23TWMyq`Z+Cckfr5jJjE$9+m!F`asj929w6(pyzsJbQ(bCh~+}-8o z=kM_G`TF|<1O*Ka4;L62AtECyEG;!QH$Ol@NlHsoR8?JGUuS4(adLBee0_z6hmVkv znVOrUq@}H{ueZ3l!NSAK%+1x-*Wcjb>FVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXK- zL`6+bPghu3VPa!zY;ARRcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lIVsp(%ReH@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFp zKR`%HN>Wo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuLx46K;!ph6c)YaDB-{9!! z>hkmS{Qdq02M7=m5*iyEBqb&;FEBVcIzmH4OifN!S6EL;8vbwvx#Kp$W&(PS}+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=} zM@UdnQd(PFWMyV|N7#SKH00s~MR4TDgQFf>(dbNP2 D@kXJ@ diff --git a/fixture/18/4/6.3 b/fixture/18/4/6.3 deleted file mode 100644 index 69dd93f119668b5cbc0c101774a2f04a0a1e24ce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 242 zcmV+Xh_VT1_= s*!K79)lf5yG|@owj5EzJvy3vyAOQvdR6r%bJ_RT{fQstS3uc}&1+S!KNdN!< diff --git a/fixture/18/4/7.0 b/fixture/18/4/7.0 deleted file mode 100644 index bc58ebd29011082a6ad99579afddbf55640d2103..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JwSz2OaWNdA2 zc6WGyfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*G zAtEX(EHpJXK0iQ6NlH>vR9szNW@l({adLWle1wIDj*pO-nVO=bq^zy3wzs&z!NSVR z%+%G^-rwNp>FV9v>hm zDJn8EG(0^%Mn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZzQ4f8$;#5x z)ZE?P=I7|}@$&ln`~(FC4i69*85$xZBrGj1Ha9pM9337X00s~MR4SQIQFf>(dbJ!) E0e~r?>Hq)$ diff --git a/fixture/18/4/7.1 b/fixture/18/4/7.1 deleted file mode 100644 index f87a5071411d658c45cac400b1af3320ba8836f0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JzTwP^mXK!$E zd3t+;goTZckC&L4p`xR!tgW@Sx4*!_$;!*q)YaYI-{FVqA^!5Gz{|5*O5fT#{933SlCoeEDIXXKV9337X00s~MR4SQIQI@D6dbJ!) E0a|*ah5!Hn diff --git a/fixture/18/4/7.2 b/fixture/18/4/7.2 deleted file mode 100644 index 98376c076b07f9d4e21d15af460f6f3fa3c225dc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;J$UtnlyYI1XQ ze0_d~hlr4olA4>Gq@|{=udukey28W6%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337e zCnzv6GCDgvL`6nUPf%D{T4G~lY;A6KcX)t-f{Kfbl$DmApP;Czssp z(%ReH@9_Bf`T_$43=Iwz7Z@NRA}T8^G&MFpKR`%HN>Wo)TwPvfXJ~M7a(a7w zgoTEVkC2#|nxdnmtgWuLx46K;!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m5*iyEBqb&; zFEBVcIzmH4OifN!S6EL;8vbwvx#Kp$W&(PS} z+T!En?CtLM_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=e9337X00s~MR4SqGP^PFLdbJo$ E0d4Q1y#N3J diff --git a/fixture/18/4/7.3 b/fixture/18/4/7.3 deleted file mode 100644 index 430a252372736d91eaef30283440d27607e7a4fd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 242 zcmVJ zvdSil?6Jlc0aqNc!wVlgaBlC|t*3r^>7$4KdFPvFetG4SNB(%@i$9xJ>+Vjt;DB$> se!V*Crjsr@=$><~Ip&sAE;;0mGXe$xR6r$wz5^&zfQstS3#OQ53Z{W%fB*mh diff --git a/fixture/18/4/8.0 b/fixture/18/4/8.0 deleted file mode 100644 index 6d93103ec3d91eaef7860df9a85a6f7da0c166b5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JYK|)GPOjK1? zUSD8nX=-wFbbNh&hKGodk&>F5oTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|7 z2?`Pu6dWBMCMPH`F)}(kJVZrCPESx+Sz2OaWNdA2c6WGyfr5&QjFgp@o}Zwosj9NG zw7k8(#>dFe(bC%6+~nov?(gvU`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>vR9szN zW@l({adLWle1wIDj*pO-nVO=bq^zy3wzs&z!NSVR%+%G^-rwNp>FV9v>hdAR!_n00s~MRH_(vC{t7ry;^=w E0YSy0m;e9( diff --git a/fixture/18/4/8.1 b/fixture/18/4/8.1 deleted file mode 100644 index b787c21735544f14ca0d740160378f8ea8736d74..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JbL`6+bPghu3 zVPa!zY;ARRcYlC^iHeJpl$D*HpQos)v9hzfyuHQ7$IsBw+1lIVEC2ui diff --git a/fixture/18/4/8.2 b/fixture/18/4/8.2 deleted file mode 100644 index ed9b43023180fb202b43082978a01dd5d32ed50a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu03;JeM@UdnQd(PF zWMyV}9UtnlyYI1XQe0_d~hlr4olA4>Gq@|{=uduke zy28W6%+1c$*Vy3U;_B<{^!4`s{{RRH3KA0(9337eCnzv6GCDgvL`6nUPf%D{T4G~l zY;A6KcX)t-f{Kfbl$DmApP;Czssp(%ReH@9_Bf`T_$43=Iwz z7Z@NRA}T8^G&MFpKR`%HN>Wo)TwPvfXJ~M7a(a7wgoTEVkC2#|nxdnmtgWuLx46K; z!ph6c)YaDB-{9!!>hkmS{Qdq02M7=m5*iyEBqb&vAR!_n00s~MRH_)KC__{dy;^!s E0pFcv%*lQyVUN~u2O7lNHRB{QX@Bp)FFAOIi$8$2+; zxM#m!4K>q96Ad)aIMWO>%P5l!GRGKG3^Bt90Uu0oz`DI-w@&S}(nbsIv(7fl?6S%x zi|nz+7EA1~LML3Xz`MU?yH@@5(nk;d^UgQV{PM~tkNok*7f<}~Lcg1>=Ix!EHf+{W sH=T6RLHC?<%`vx}a>*fgoN>hwHv|R%R6rGAoC1_7K;?Ak1*4p03RMPV;Q#;t diff --git a/fixture/18/4/9.0 b/fixture/18/4/9.0 deleted file mode 100644 index 132fc0b80b4fd830a445948a59792649ad58530d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 358 zcmV-s0h#^+0g(i-1ONcA1ONbL0RR9L0000^0RR9fwJ-f(umdFu07MfgDJn8EG(0^% zMn_0cQBqo4Tx4ZtZf|gSd3u6_gp7@jmY0~Ip`xm*thBYZzQ4f8$;#5x)ZE?P=I7|} z@$&ln`~(FC4i69*85$xZBrGj1Ha9pxK|)GPOjK1?USD8nX=-wFbbNh&hKGodk&>F5 zoTR0uuCK7Txw^u`#LUgk*4NnJ;o|D+?DX~a{{H|72?`Pu6dWBMCMPH`F)}(kJVZrC zPESx+Sz2OaWNdA2c6WGyfr5&QjFgp@o}Zwosj9NGw7k8(#>dFe(bC%6+~nov?(gvU z`T7C_1Pl!h78e*GAtEX(EHpJXK0iQ6NlH>vR9szNW@l({adLWle1wIDj*pO-nVO=b zq^zy3wzs&z!NSVR%+%G^-rwNp>FV}5CBvvQuGjIi3%;2 ErrF)3-T(jq diff --git a/fixture/18/4/9.1 b/fixture/18/4/9.1 deleted file mode 100644 index d8e0200a9aa393edda5d0d6af2a4b7eb57e1b758..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 357 zcmV-r0h;~-0g(i-1ONcA1ONbK0RR9L0000@0RR9fwJ-f(umc?m03;JDEG;!QH$Ol@ zNlHsoR8?JGUuS4(adLBee0_z6hmVkvnVOrUq@}H{ueZ3l!NSAK%+1x-*Wcjb>FVqA z^!5Gz{|5*O5fT#{933SlCoeEDIXXK-L`6+bPghu3VPa!zY;ARRcYlC^iHeJpl$D*H zpQos)v9hzfyuHQ7$IsBw+1lIVL;8vbwvx#Kp$W&(PS}+T!En?CtLM z_xJz-0tyQZ6crX8A0Q|xDl#)PJUu=}M@UdnQd(PFWMyV}9UtnlyYI1XQe0_d~hlr4olA4>Gq@|{=udukey28W6%+1c$*Vy3U;_B<{^!4`s z{{RRH3KA0(9337eCnzv6GCDgvL`6nUPf%D{T4G~lY;A6KcX)t-f{Kfbl$DmApP;Cz zssp(%ReH@9_Bf`T_$43=Iw=Bqb&%00s~MR7%=IlqD*PUM-WR DouZ>A diff --git a/fixture/18/4/9.3 b/fixture/18/4/9.3 deleted file mode 100644 index da0ff81aa4ca05f02f681c40b7e0537e40191385..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 241 zcmZQ#oXE0)g@FNtKQb^dhyd|TAlBHb|2JU+b1fS~Sg@XJJ+y ze>mV-@j%6Ako1#O^6Y6N9EbVOC%O!S~y2e}8?oVz%F|7YnM-?R+*P z+HdER3D)O!JnE44+wq`*`H!!4+3S__<9@wZSby*5vzhU6Kc7srzxU%&r+nOx2aVio zc5lnMzG`8bROX__XOii&64NA8CpkWoNbXWhlSphT-tqp~Ti)4QulQ-_Zn%AZ^0s~FHMe1j939#6>CBO diff --git a/fixture/18/5/.zarray b/fixture/18/5/.zarray deleted file mode 100644 index fe1bded976..0000000000 --- a/fixture/18/5/.zarray +++ /dev/null @@ -1,21 +0,0 @@ -{ - "chunks": [ - 100, - 3 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 2 - }, - "dtype": "*YO<3@fe5k7f10EH}Mbm@D8uLh0|T`5FYopE4bU)PT*_*?tOd2_J-{V+vByj zYfsnStUXzKvG!E$rP@Qa7itgG-lsiGdzbbs?LFFawAX0Q&|aZELVJDo`0VZ3qq8?> zPtM+&JvDo2_QdRk*#oneWe>~Vl|3kXPxhSb9oaLoS7gt}UXMK~7DPr}~ZUV}XbdvJRP_6+Q`?FHBau&4HDhxTUyf4}~G{q_2@ z^;heU)?ceXR)4GhNd1la6ZN<0Pt#wfKS_U){viD&`a|@0=nv4}pFcl;cmC}B)%kPt z*XEDSUztBLe`Ef*{B8Nu@;BvA%3qW}C4Wi&ko*Pt1M>Id569n)KO289{#^XE_%rcW z;*Z2%hd&N~8~*72D*RFSi~D=<=itxnFTo#zzqP*se**sQP1}>U$82xep0e+Bz036u z*SlEnV7+_w4%NF;?@Yb>^v=_}PVX$etMrc2yGHLAy<7B-(7QqJ1ijnyPS3kM@8rCT z^A66tH1E*7JM#|AyD#s&yu0$w%DXD>oV;uDj>)?s?})q`@{Y&59q)9!oAFM@yBP0O zyi4&8#k&yiK)n0#4#T?(@9N&oy^HWp?j78_1@GS8wY@9wj_sY=yWP9*@7bTTzh-~O J{*3(<3jo>&ekT9` diff --git a/fixture/18/5/0.1 b/fixture/18/5/0.1 deleted file mode 100644 index a0838c5685f6e5a7d39d3a61372199ad180fc32c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 678 zcmV;X0$Ke60h9!=1ONcA1ONc00ssIM0001v0ssIgwJ-f(umdF&018X|2_rzZpaE(# zOSK!T-B|6$YByH9vD%H*nvxAu!XZgIq-4MMJ#YY?0G|MFc4lApVkh=t=k;CJ^;wto zSXcE_NA*w#^-st2OSkk#cl1U#^g<`}KIij2r}H_N^EFrVG#B$Q2lFh)@+*h(CwKBA zH}WF)@gC>#8mI9YXYmzR@evpC5ZCYw$8fq!_}t^p;Ba@lfwz6_0KN-*$o7uy8QTN4 z_iNAB-mN`bd$sml?X}utwO4A7)ZVB)PJ5g7H0@2=le8CUPtjhYJw$tf_5khu*~7DU zXV1>wn>{ysZT8IUmDwY+*JY2(-j+QodsFtL>@C?-vX^8}$X<{=AbUCXaO~aKgR%Ew z&&A$}JrjE+_B`x$*yFHQVUNOI-QI&e2YYgR3HA`|z3mOy6R^kjYNz&T0e{2(g#88k z)Ag6@57%F;KUjaS{!sm$`ZM+S>Ce+&r$0-7mHsIGHTq-px9E@1-=IH1e|!G){N?$R z^B3n2&R?28G=FFQ!2Esr^YVA)&&pqwKPP`p{+Rp~`6Kc-yH)R0y&LsT)VomcG`-984%53x?;yQ<^bXOxL+=c|`}5AvyFTyi zysPt$&bv16*t}cwj?B9;@5H>@@=nXUEbpYei}DW2yCm zT5ok$Uv*L+by44RP0w^mk90^+bVNUNK>u?;zjHgUb2o2uHZOBBA9F6>axI^7DUWg` zPjVy=av=Y49KUfJe{mOYaT70b67O&h-|)IqINjy$;BkLDg1epV0=^b|%l4G*CEFXe zCu}d+p02%Id${&u?ZMi6wTEi&)SjumPkWyBI_+87tF%XHuhAZ(y+wP3_6F?<+S{|I zXD`p5oV_@EaQ4#dq1ijL2WIcfo|nBVdsg^a$Mvd3hv$R3fsA$vUbcI@fco3STj zFUFpVy%c*W_CoA|*!!@DVei78g}n%S5ccNw7VIh5gWD^xM_{jQ@7weC27ku>iv1D$ z>-ER$Z`U8Kzgd5>{#N~|`b+gE>MztEsJ~2qnEo#PLHc|2=jiXypP|1(e}4Y@{PFp# z^GD}z&L5knZv-R%PJ6G>oy)*T$)H_n|I=$ocZqqwT?56Z9_7J3#O9 zyud)_MXA9>4nv;{WBM@ z-G1=&)%!0$|7B+qlvdTZboLET%r33(oIH2=#@$EH-+cV`J0ml2#|&>Wz-KXZ8I?#$Vldo$-|uFag8xiWKP=DN&rncFf) zWp2uxl({8yO6HQx37HEr2V^eC9FDmgb1>#!%(<95F=t|~#GHq@4s#slD$G%sn=r>< zZo!;_xdC(EoVNgP3+T20ZVRZk0BQ@Ewg6@eShfIV0e8CYa^2y&lXVyC4%S_&J5+b4 z?m*pry7P2*>CVz!r8`G=jqVuT6}lsIH|UPf-JUx=cXRIK+{L+5bC>20&0Ux~Fn3?> zu-sj_vvT+3&dFVqJ0o{R?ugv=xZ`oRrMMGu7vc`YU4}ahcNgv; z+&#E+aChL&z+Hj60CxcH&Sjg!Hg|0f+T61_XLHBqjLj7bd$smx?akU_wYO?d)!wK* zQG22GH0@>D!?YJ^57OSFJw$tl_6+U)+4Hm4XV1=Fojp2xZT8sgt=S{9H)c=F-j+Qr zds+6R>_ypwvX^8J$=;DYAbUUdeC*xWv$0oW&&6JgJr;W<_DJlF*yFIbVNb)}ggps+ z5%v`9CD=o-=k0ZS+#R>OZFk!4rrk-qi*~2%F4-NjJ7IUh?tleM~Cx%xZxXX>xi zpQpc0f1Lg*{Zaax^vCFL(VwEfL4Sh&0{!Xv%kzilFU}vFzc+tq{?7cF`TO$c<*&=1 zmA@)~RQ{U$G5K5aN91qFpOC*De>(nh{K@!>@dx8C#UF~l6MrE7KKyz3yYOe>ufm^$ uzXpE{{tEmN_#5zd@7tcYy=!~c_NwhU+iSMRY_HfJvAtn?z4mzR?Fs z_i4`4T&FopbCu>O%{7{1G`DDu(A=OoL34ZN^vva%lQS1*4$fSfIW%);=D^H-ne#Gt zWzNc6l{qJKP3D-)6`3P4H)M{-+>SXNb2H{-%*B{fF_&Tv#axIv5OW{qFw9+;voQBy z&cR%RIRkSA<_OFIn7e>)3&6I3YYVWpfM*MEwt!{c?poZjxGQl-;%>wphr11T8tx|C zNw|w}r{FHZ9fG^=j=K{u$8B!goVK}XbJFIb%_*BpHis;Gxb|-C+1h)x=W4Ijo~gZ3 zd!+U{?Qz=Mv`1-g(w?NfMSF_&6731v3$zDlFV7yHy*qnw_TKEd**mjmX0Obim%T1~ zT=uH$QQ4ca$7FBGo|3&GdqVbt?CIFcv4>+X#vY8l7keo7PVAZ3`>^L>ufv{&y$X92 z_8ROl*juniU~jtc>84b}2%t7zH#`K{8p8%f#bHnC@%>|p=HK%JX*PN`mSaY!EQq7^7 zJ2eMt?$ey7xl41F<|@rOnrk%2Xs*y4p}9eGeCGDd>6x1|Cuc6soSL~bb7Hv zGKXdE%AA$CCv#5bn#>uQD>6r9uE!jYxgB#f=4Q;vm|HQYVlKs;h`A7RAm%d6VVJux z2Vw5PoP)Uoa|Y%L%yDyXa{+$K_8Yd}uKfn>w`adO3wOTmdfoB5vvpVNj@DhPJ63nA z?nvE@x)XJ`=}yyKraMV@k?tVfCAvd&cjyk#-Jd%@cX#gW+|{{rbJymM&0U#0GIwL{ zxZG{I({eZEPRd=BJ0*8X?vUICxdU?d;||B&jXN86FYa92wYW2JSK^MuU57gkcN^{~ z+)cQXaJS%2!Cito0e9b>H`i^B+uXJ}YID=(q|GgxQ#O|@d%5;-?cLggwfAbz)!wN+ zQ+uWMJnePb{Z#Lve#sf$=;GZB6~yjgzW9u)3KLhPsU!1Js5i__E7Ad*aNZmVb8+{Fwug)Kxzd3(w{?`1d`5W^m<}b{j zmcJ~2SpK5?LHT>~hve_bpOL>Ge?IV_ zEjK|+RbOdyeTR{orLVce&DY`U_5TS46CEcpJ4H`fV{LbViGS;u2N4@3FF8X^S7B>)e~FWwr?I=m&t#|&_yph= T5Eh6J@(7!QYN6)H5B9+i*X=_* diff --git a/fixture/18/5/10.0 b/fixture/18/5/10.0 deleted file mode 100644 index 04ad268fca25fe220a7abca667c24d63c106a5d4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 726 zcmWl~eN2pT902e~N+;W8l!Qo=ml{`!IuE|(HA0cRq~ppW$$9R6clQ|nz--^2w_7tt zb>3fQSUWRwPvNVpxzSxa8e`pAW*+Yk{xCLc!yli0q6VCZ_CyQ9e=nj0Awdv?2mhXY z$Ix%~47`#k#1-F*8(q4qFzP~YqAQZ&`kL|RYnp37M1ulV~DLq%=sQ@i3NBAcc}4nMOh)B|rjc{{GiHJ4!kX9VI@4FWzTpk8l50 zz+S2?!S&61X1=d)dF8&PRI^TXckoA3?~KLUEic?Rm2!4nT@^gkG-O6>-L#%DSCwMc zppFe1o5b9$wJjm{Ib|-}t9}YRZEVi*Z?Od3O-eSaR(*kOja}2RX4;c6CzMI7NOc90 z8~-2`~#nqNhH#f$!6g59ke)>{YUX`moe83pPqSZlx!8*A| zyBt~AR^~kQMgPj=5zR7bQY6xP+wtpD_jA(?u}r?A6-6AaZ|!$Kx}HTYi%aAqT0$hf zb+|q0gFa+BE6$c>tuK6EVN6!L1ExmNAe%IAxLmnp%X{Z~962tg%k#B^;TWaIdg`s- tZ#pWDlnq*G_>4El`b(P^-tX;_@Qvn6IoK8}>ot8i*_$cJTCe-A@E59vQMdpA diff --git a/fixture/18/5/10.1 b/fixture/18/5/10.1 deleted file mode 100644 index bf4e046fb76ede51f7384d3641da00b12e2c3649..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 710 zcmV;%0y+Hx0h9!=1ONcA1ONcW0ssIM000240ssIgwJ-f(umdF(018Wd3K~Fq(Ew@N z+H38#uC>?N)?RBbbN;_6!qJrvp8%i$pa6FS?grcmnBz9LZBEp+)Lf}KQgfZ=IL&REqck^ZPSV_>IYo1c<^;_JngcYK zXAaNYojEvjZ|2<0otZN;S7y%3T$ed6b5-W3%uShNGPh(-$=r}RA#*|Ibj;u%SbuDe-xvhHHtsk%#bhw3iW9jLoccbM)j-C4SObm!=<(Vd~YLU)Aj`rPrk z+jB?fZqA*YyES)e?$X?exeId#<}S+}mb)u=Q0|`GIk`J>XXLKPosYX7cRcQD+|jt3 zamV6r#hr?~5qBc)Lfm<{>u|^6uEHIKy9sv;?iSoB2z%Q0vh88pi?#=C@7W%*y<>aE z_I~a8+UvDvYp>QGt-V%ztoByzk=h%zCu(ogo~FG_dy@7d?Lpd0w1;T#&>o<@KYM=m z?(EswtFz~3ugxBty)t`b_QvdS+1s+GWpB!!l)Wf>O7@cMA=wME2W0QZ9*(^mdp7o7 z?77%$v1ekh#2$$~ZV$xXhdm5?7xpadJ=k-w*I@6!o`Jmr0e{v0sQpd*WA?Y~PubtF zKVg5t{&fB2`or}X>kro7t3OnKr~XX+efsnC*XhsFU!^}ve~tba{Vn<<^f%~F(BGav zJ%4%r56$11KQMn^{=EEM`Lps@<aJH3w@h)f}q1Q*)r^KFxWWyEJEMuF{;Nxkhu0<_gUbnj197 zXKv4&p1C=5a^~X9shLYNhh{F!9GJN;b6DoC%vqUxGUsHj$()h7B6CFMdd%^d+c8ID zZpNI9xfOFN=2Fawm(19*uRB|JweD!$wYp<?i$=N2z%Z3xb1D*qqa9~PukwHJ!N~z z_Jr*P+XJ?jYY*4ntvy(Kul8K+o!T?CS8C7GUZ*`ydzJPm?M>Qaw6|za(cYjvL3@Gr z^z7x?!?PD>56<43Jv4h~_RQ>k+4Hj3WzWi9l|3qZP4<}VE!iWoH)K!9-i|#TdpY)G z?8VrFv6o^G#omd%Z_nH7_B8Be*u$_FVGqLIgS`ZM2=)#Hf7t%6{aO2a_UG)c*`Kk$ zVt>T`dj0YG+x185Z`Plzzg2&#{!;yk`U~|3>MzqDroT&nkp3S1Ir=;FXXvlcpP#=z ze|-Mx{L%TF^T+0I&7YdTF@IwI!u)CZ%kqciFUlX3zbAi4{*L?^`TOzbXk! zQ(b3qdxejgqpi2W%hliM^Zf@Z5gR2hIYUiXVQY1NiIbhDvAf03+2if^WT+7M0N@u8 R7Jv^xsHM~pI`Yna-~+$=L|Om< diff --git a/fixture/18/5/11.0 b/fixture/18/5/11.0 deleted file mode 100644 index 2e12ba6e723ef0055b49472da36d41563f41f46c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 712 zcmV;(0yq5v0h9!=1ONcA1ONcY0ssIM000260ssIgwJ-f(umd#}018Wl3LHQ%%zz_ zGZ$tK%-okbEOS@ptjs-`b28Ut&d6MmIU;jC=6KBQn4>W_V@}50ia8Z?DdxI4ZtiW) z!(4|s4s#XeD9lZmV=%X1PQl!OIRSG4e#`b7w%@J=cgOCG-4(n0b?57@*PX4qT6eVW zTHUd_TXje3Zq%KqyG?hR?lRp;x{GuN=`PV7qPs(PfbRa>`MJAuXXmcYotwKhcWmy; z+>yB(bI0Xw%bk|HDR)xtqTDIDOLB+gF325_yB~Kr?rz-KxO;Ku;;zM=iMtYa-<^oN z5O*N%GTdRfyKo2L?!lddy90Ly?h4Fxo8vaOZI0U9v^i-3d&2gD?E%}%wTEl()*h_A zS9`AZPVJf6E4AlouhSl+a8I% z5qli=HtcEGo3JNgFT$RJy##v*_5$ny*n4;F&e~nIyJvUK?wSREz5aOp?fRqjH|tN< z->N@Vf2saN{e}7i^_S@n)8C~(NPmz19Q_^oGxS&J&(B|+e#$Sy;8hwb$C#UTd#)nf-s3A`7w)pa7o$p8#{v=AO+tn?p8tY|hx+uQ^|Hz29_dF?VCm#@vfJ7jrG@L`yuDe`!xb9-z z!Mb~OhwARsovFJ|cb@J#-C4S;bVuo~(H*0^MR$bm2Hgp|+jFPqF3+8uyEu1n?$X?$ zxjSR_?0YIk{_c$KI|+9Y?jYPHxI=Jv;10muxo>mW=B~|I3wy@)itQ2G>$S&gZ`U5Jy;*y* z_Eznw+Do-3YA@6tsJ%>knD#F1LE3w?=V^#f*59i? zSAVVkO#PMmBlXwmkJI0#KT3a-{v`b^`cw3m=ugmJpg%x=dH(SH-T8y__vX*d-{B`-`@>k`L%HNbfCVxx*l>80(6Y>}2Psd-5KOBEC{$TvQ_(Sn`;?KmNh`$hj sApSD^Vfef72jTC*pM$>xe+K>v?0wtww%2XX+FrFiYJ1N1n(Z+Q0H9rmdH?_b diff --git a/fixture/18/5/11.2 b/fixture/18/5/11.2 deleted file mode 100644 index f1f640996b0ed3f811c5c38bf9ae9b973da956b6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 710 zcmV;%0y+Hx0h9!=1ONcA1ONcW0ssIM000240ssIgwJ-f(umdF(018Wd3K~Fq(Ew@Q z*7jQ4+H38#uC>?NcK^Sy)@J4dpa7o$pa65z=A_L&3T&ZG{Ugo;YS(&RcM`fF{fiL$DEA07;`Y@Qp};4J25w6 zPQ+Y@ISq3e<}l1fn1e9)U=G3DfjI+nFY)aX-U4^d?wZ{(yEArI?2g!7uRC6MyY6V+ z&AO9yx9U#SU8*}#ccJb;-DSGNba&|v(%qvwM|X$r4BZvF^K;kdj?Z14J34oB?%3R| zxl?mD=1$CAm^&?ZS?;jhMY)4=_v8-A-H|&ZcR%iY-1WG#aaZGx#$Ag$7I!P|x;yT! z#GQw`4tE^xD%?@Hn{dbAZo!>`y8(9s?gGqho6|OzEqlrKknJ7Y1Ge{T&)43qJzIOV z_FV0?+GDj>YLC?3s69@5oAxyAP1=*R7imw?UZOojdx7=hIH^r@u>omi{XJIr?k#$LO!nAECcNe|-M-{OS3d^C#yo&YzmU zG=FIR!u)~x`|^k7@5-N*zbAiA{+j$5`782Ai0CR-g{FR&c9=~||>HD9|dc5L_+NSoN!LjLu)vf(A7q8uZ@buODDJukiFgy?!5NqH! PSf+R-$m&y2-2-L-4wgYX diff --git a/fixture/18/5/12.0 b/fixture/18/5/12.0 deleted file mode 100644 index 192daa472a81cb7890c6b1faeb82ea6f8b43c0f6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 714 zcmV;*0yX^t0h9!=1ONcA1ONca0ssIM000280ssIgwJ-f(umeRE018Wt3LQ{5K?AYE zF~9DP-71dlD#q;!$F5HAy=PW2UaSY90H6S$0CTqHYR%D_b2Zm$j@4YLIZ|_@<~Yr5 zn$t8lX-?8yq&Y=%iRKW^1)2ji_h$~z+?_c)b8qI{%(a;_GgoGg%v_f_E^}MvsLV~7 zlQOqtPRU%7IU#dF=77xQn8Pu5V-CjLi#Zo_-<&ts&5f87F&AP^!(4_r4093YAk00O zLojz>&cNK?y8XuOw`#vp`)%59%zj(;o3h`A{U+?UU;%fr?qJ=$x=VG3>h9DXsJl;h zp6)K)S-Puq=jg7{9izKKcZBW+-SN5GbEoHS&YhgQICpC9(%hlB3v&nN?#msPyDN8A z?w;H^xodJ~=;$@4}viy$5>^ z_8RON*ekF{;EvnfwmWTi)9$3*MY~gWm+TJNU9dY~cfam(-Ql{s6@RDxO#PMm^Yqv0 zkJDeJKT3a-{uuo&`cw2b=ugmJpg%o-dH(SH#rcEt_vR1H-k`L z%3qT|CVxx*i2M!t6Y{s?Psd-5KN){9{$Tv2_(Spc{gwD5@i*d+!{3HK4Sy5YAA wQ}CDI55Zr6KLCICuI*XdtG4HCuh|~6y<&UB_J-~8+S|3KYme66tUXx)0CC2OkN^Mx diff --git a/fixture/18/5/12.1 b/fixture/18/5/12.1 deleted file mode 100644 index 70620dfe1c86f4c0925cc7712f331753e07e4456..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 712 zcmV;(0yq5v0h9!=1ONcA1ONcY0ssIM000260ssIgwJ-f(umd#}018Wl3K>9p(E#gO znr&^bwXMC@Uh7(WqW@Rc+7tPs0H6S#0CT(Mbj{_On>8nEF4ml?xm0tg=0eSZn)@_| zY3|aTrMX9Qj^-N88Ja6JM`*6k9G|&8b9CnB%*mNsGpA-Q&77FIFmqt$vdm$byD|r5 z?#Y~!xg&E%=8DYunCmgeW3I*=jky_fEaq0sshBe{S7MIDT!%Rha~tL;%uSe+Ft=b% z!CZnl0doQ70GvYGDYBgc+bN=*0@^8_odVe@j-A3-ced_o-O;*pb=T^S)m^DOQg@^7 zINfc!({wlKPSRbZJ4JVi?hxGtx&w6g=MK-^ojW^sZ|>aOwYf8MSLTk)U6(s9cU$hL z+)cTYa<}A8$z75=A$LLUfZXM{!*O@x4#wS!I~RA{U5YytcOmXT+^0eAvR7n}$lj1W9(z0XbnMO8ld%_LPsQH1=k1Z$8?h&1Z^NF3y$pL2_9E;- z*h{d7VDG>lfW3F$?y%imyR&xp?9SO;vpZvV#qNmR`MT?M$18uV{#N~|`Wy8p>Mzuv zroT*onEoRDLHc|2hv@InpP|1$e}4Y@{Mq@d^GD~e%^#b;HGgFO#{7x-+w!O7FUy~l zzbJoD{*wG5`8)ClcEB6R^i^Z`+=>y=i;W_M+`6+e@~GY%kazu)SY8nEF4ml?xm0tg=0eSZ zn)@_|Y3|aTrMX9Qj^-N88Ja6JM`*6k9G|&8b9CnB%*mNsGpA-Q&77FIFmqt$vdm$b zyD|r5?#Y~!xg&E%=8DYunCmgeW3I*=jky_fEaq0sshE44J27Ws?!%mixejv{<|@ol zm}@Y{U~a)2fw=*50)Er>Tejb@{TA&vXum!C4cTwUelr$#w(e@((YkYW*XoYdU8y@# zccbn&-EF$lbT{cv(p{uGMR$qr5Zwj319bQ24$s}4J3Du8?%dq9xifQD=8nu=mpd+Z zTkfdbO}UeDx8zRAU6MN?cR}ue+~v5#ad+bm#@&lM7kA&Cch}vGxD#;~;!eX|hC2** z5$+(|J-9<~ci_&z-MMaa+~%syQJb4K$82udoU*xLbHe6=1$(jfVC}uyL$!Bm&(z+h zJx_a`_AKpH+M~4BXphm}qCG-;gZ2dN?b*|_muFATUYtERdujI2?48*Iv-f4s%ifhe zD|=P;oa{B(W3pFdkI3GTJsx{I_H^vc*psmrV^76iiaiv2CiY6~k=X07$6;^79)-OL zdlL2*>?znwuqR+Iz#f1*Yk| w2Y(L!8vGgfEAU5PkK5k1J#Bl__N47a+f%leY!BI9usvXVzxHzN;o7?u01D)Z0RR91 diff --git a/fixture/18/5/12.3 b/fixture/18/5/12.3 deleted file mode 100644 index 0d9fbebbc9c23dab48f6fadead1ccd1d7ecf2b54..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70h9!=1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg-#E&%iW2QLvD zB`-NcO;=%Sb$^MIou{$8#n0K}?e_sN3l$$JGd)L9TV-!~gN>J=tF^z$)7|Is`vox% z86zz>K}%I%X>)ytk(;Hjxx>xZ;p_GP2{IEMCowxkPg!GacY%wQpQ*FG$7HAw_yph= T5Eh6JW(S*t%^|hOoA<#FCW%9( diff --git a/fixture/18/5/13.0 b/fixture/18/5/13.0 deleted file mode 100644 index 83d6b3e61a9d3d0b8645d8f449937187274c0886..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 716 zcmV;-0yF&r0h9!=1ONcA1ONcc0ssIM0002A0ssIgwJ-f(ume>U018W#3K~Fq(EyEF zdo9kkw%5AWUTa&M=l@e{?X|!MrU0J+p8#{B=0eSZn$t9wX%5p|q&Y})kLD1~9hx&V z_h-(}T%S2Rb9LtE%(a_nyw9I9hlQI`&4$54TIV5vO=77xonDa4r zW6s80jX4){E#|&C6muu$Ow4_l^Dx(8&ca-UISO+P<`~Q^m?JPZU{1g+xXohQEVRvH z+AO5aBHApJ%_7+>kj*04EP&18*DP?&;?^u|&EnN8T+O0Ycb)Dy-EF$7bVuoK(jB9_ zMR$ts2Hgp|3v{RFF3%mFyEu1n?%v#?xjS=b=I+a#m%A=^R_?0YQMqe!$K-Cw9g({s zcS7!V-08T>aVO(0#vP117I!P|RNRfY6LA;fPQzV>I}CRb?jYPfxI=Jv;LgC^xo>mc z=B~|Io2xeGY_8cHv$8<66Y>}2 z56EARKOBEI{$TvQ_;c~6;xEM?ioXzlApSo5Vfef7XW{R`pM$>!e+K>v{1Mpmw%2Ws y+g`OjYJ1c6nC&gwQ?@s3PuO0tJzaab_HgaR+Jm+CY7f=ksXbGBp!PoPc?tkY5QHlL diff --git a/fixture/18/5/13.1 b/fixture/18/5/13.1 deleted file mode 100644 index 9d8da527d0852b71b2b75e861bf6f0bf1a5ad58b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 717 zcmV;;0y6yq0h9!=1ONcA1ONcd0ssIM0002B0ssIgwJ-f(umfEc018W(3K~Fq(Ev}A zwJl4xw%6KgU2Ctk^Z$!w?KRd1rvRS-pa65F=19$rn(H*jX>QXTrMXFSlI9l8DVj?( zCulCv9H6;8b9my&t(a3Wmts!DT!=Xka~b9^%w3p+F!x~2!Q6p419Jst@og5~X3=dH z)@D&{7Sv|ZY!=LBv1}H^W-)9Q!e+5+7P@AUYZkI*5o;E(0(YS9KHYh`!*qA)&eGkZ zJ4bho?hM@(x+8Sg=Z??ao;x~sbMEBat+`Wkm*!5)U6?yCcUkVR++DeYa`)uU$=#7V zBX>paeBAZ8<8fExj>g@LJMXT$rvdv+ei#7*s?%5o&xnpz2=6=ojn(H-ZYp&KDt+`fntmak)dzmOU+dQ}(3nMcGrbmt+sg zUXVQ?dq4JY?A_S2vG-!n#a@fO6niN4PV9l$`>^L>@4}viy$X8{_8ROl*ekF{U~j-( zw>xfk+wQ2{O}mqJx9m>YU9vl2cfsy}-Q~K&b$9Cy*4?W+S9hrHPTiRbf0q6#{ZaaB z^vCFL(I26|L4Sh&_WbGj%kwAaFU}vFzchbn{?7b?`TO$c(nV{K@!>@yGqK_*?Nu;%~&Ch`$Yg8vZi;N%)KK2jMTlAA-LFe*pgO zecQvfcWuww-m^Vtd(HNY?G@W2w%2Qq*WRu@T6?qhWbLimQ?-|BZ`7Wsy-)!FM<9d9 diff --git a/fixture/18/5/13.2 b/fixture/18/5/13.2 deleted file mode 100644 index 28684a49df941718cd88e791935a081d07cf7e2b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 715 zcmV;+0yO;s0h9!=1ONcA1ONcb0ssIM000290ssIgwJ-f(umepM018Wx3LQ{5K?ABd zwyWM{7RPoK<93B(w~FVz7r?k(3DX0j0H6S$0CT41O3jg)^EB6Kj?-MFIZAVr<`~T_ zno~44Xim^vpgBErdFJrU#hHUM_ht^w+?hEub6@7X%ypTwGFN4e%3PB>?=3vZqbKKn9T!}dnb0g+B%x##{FgIaN!d!$o1#=1J5X=Rb12Ffu zYrk3ht=ezSerxs{v)_vSM(np?zw!EQ*KfLhoAsNl-(m%Kq3%H4eY(qZhw1Lp9i+QQ zcaH83-5I(obm!-;&mEt;I(KyL=G?KlTXU!8Zp@vSyD)cJ?y}rrxr=fKDY1*5#CuuLz zo}#@(dx-V|?E%{RvxjHz&YqpUH+ydO+U%LxE3-#tuge~ny)AoG_NMGf*;}%wWG~5{ zki8&#K=yL%;n=&e2V?KWo{PP=y%l>Z_D1Z9*bA|zVK2iThP?=T5cVGIA=o>xXJGGL zw>xfk)$XX>O}k@ux9m>Y-LN}hcfsy--Q~K&br=MX{ds@g--$mHeLQO diff --git a/fixture/18/5/14.0 b/fixture/18/5/14.0 deleted file mode 100644 index eef3744700e2d9bb8658dc8346aa4961e8a94201..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 717 zcmV;;0y6yq0h9!=1ONcA1ONcd0ssIM0002B0ssIgwJ-f(umfEc018W(3K>9p(E#>Z zd#!6}wza+1w)R?T|G%)-UT6BK0G|M#0CSDz7|ku3D>O%FZqOW`xjl1w=H|@FnTs>0 zW-iSfnz=A@VCKHeVVS!!XJzimoRhgGb4KQh%n_OEF~?(W#~h8h8FMn`T+FqYV=-4^ zj>O!EISz9h<}}Prn3FIUVNStZf;j|p0pnA@VL%(kvtc+J#<5`-8%D8V z_!>s9VelG8tzpm_#;jqm8pf(&s2awoVTc+=s9|UtMy6q43U`R^4&52L19bQ2&d=SQ zJ3DuE?%dq9xnpxz=8nwWm^&_aTkf>nO}UeD7v)aLU6MN_cR}ue-2J%2ad+d+#$9*E z-NCp^afjmW#2tvc4|g8!F5FqTt8nMwuE8CHy8?Fv?gq?to8vaOZI0U9v^i;W%jT5L zC7TmA7ibGPPT&Apm)HFs*x)Lf}KPjj8-IL%d>qck@udxQ1_?FHJ?vzKQN z&t9B8ID2pQ(CnSrGqd+)&&ytyJu7=v_NeSN*<-S|WRJ++kUb%LJN9(!<=B(47h~_+ zW3jhlPsQGdJrR2$_B8Be*u$_FVGqLIgFOU$2lfo?z590O?e5y0wYzF}&hDDsF}o{v zN9=Cc9k07xce?Io-O0L(b*Jhs)g7w4P z@7Nx&y3K~Fq(Ewyy zd#$}Vy4$+eUTa$m)c-HC*0qlhsQ{kxf*jc=D0Z+b1&vl z%$=AsG52B4!(4|s3v(6bD9kmOV=%X1j=%w3r~FLzz;xZG8_qjERpj>+AUJ0*8R?u6V0xzll%;||AN zjJxm7yJK;;;!efgh&vH?A?`HXWw^s|7vT=V-Ge&>cL(ka%zc~lHrH*=+FZ3cYIDuz zn9VJlBQ`f|PT1V8IbCzP=48#qnu9f$Y7W)hsX0({pXNNxU7E8rS1Egi_6Y3_+T*jg zXHU=GoIN>varV^grP)KX7iJI4-j_Wrdsp_X>^<3Yve#tK$X<~>B6~gdc$* zPsZMhJr{c|_Dt-R*dwvmVUNS!hCK><6ZRzRE!b1Amtar8UVuFSciHZ+-CetbcK7Vg z+1;@_V|T^weBJfB<8@c-j@I3*J63nA?o{24x)XI5>Q2*LraMe`lI|kiK?;9>{{H;= z`MdLH=daG6o4+=HZ2rppk@*|*$K`L!pO(KVe^UOU{3-cM@`vOv$RCiuAAdOhZv5H! z^ZvR&?oY*Eia!*8A^t%8efY!hcj3>%--AB~e+~W&{1x~ku-9#m+upW4YJ1c6r0p%+ zQ?{3EPuO0tJz#se_Hgap+Jm+CYR}c)sXbGBrS?4Sb=u>!S80#Z-lV-odyMuL1pr7h Bc!>Z2 diff --git a/fixture/18/5/14.2 b/fixture/18/5/14.2 deleted file mode 100644 index 3e4cf65c39f2e88b448617586dd751321dfdd1e3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 717 zcmV;;0y6yq0h9!=1ONcA1ONcd0ssIM0002B0ssIgwJ-f(umfEc018W(3LQ{5K?5R= z?JCCYX1;D$ICiTzwrhItJ+q2&tEmU20G|M#0CSM$9?dzLLo|12&d}VSIX`oK=IqSX znWHn;W{%C=nmICaW9G!nZJEbF+Enfk5NZ=`TxlePL<}S@y3VVk33hfcv>$Asa zZ_gf`y*Ybw_SWpF*-Nu0W-rVhn7u4}SoW^$LD_q<=Vb55o{_yGdp`Dh?D5#Eu}5QX z#$LC_?Y-@x*gLUjV(-JAhrJGa7WOLaQP^v+$6#;49)Z09djjsX-DSJOb{Fjq+TF7| zWOv8zjNSdZ^L5wj&emP6J6dQ2<%raMh{neHatNxF*^e}Vo0{r&mF z^LOXZ&fl9qH-ByZ%>0%4BlFkgkIUbdKPrDy{-pdZ`BU&C;_S5%2nJsw56}3(6J%eM@3#(iEXD(j5{ov`V_g{Yg%Wf$s zt*US7>>Hk#U0UBcdG7L!yN{l~`S|U3Mk{Vnc}-(m_rU1X{L1Fu=_xA&ey}_c7ZB^< RH)Fc$bar9XiSYji`2p~DL%IL} diff --git a/fixture/18/5/15.0 b/fixture/18/5/15.0 deleted file mode 100644 index 31a27b4a3a717701cacdf5a5c5b260c6612a9a18..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 725 zcmV;`0xJCi0h9!=1ONcA1ONcl0ssIM0002J0ssIgwJ-f(umhbH018XE3Lj83KmvU2 zul=>Z?yvp2b!+GE{9S(n-?_%`{GESTJFNhn0Gj~g>Wrf^ZqB$iMr4#>D4<9v*}G0w)g8spx^$ru-7oQiQN#-SJ&VjPHZ zAI4!AcVV1`aSz5h7}sE&fpG=K5%`VUZ`*#;_S>}Ir2Q7{H)X#i`wiJ|!F~hw+pphn z{dVg&Tfe>f&DC$Melzu3sozNb*6BA+zis-B(r=S~ll0r7-xU3p=r=*X1^Nw8KRCZP zKQ})#zcW8Gzb`*8zb-#3zbZc}za~E>za>8+zac*%za2jvzZ}2Zulv29i(iW$i(iQ! ziQkAHhu?;uhTnvrgkOZ8f?t9kf?t3ifZuV~##tLzZJe`l&BiesS8N=yal^*(8npT=PtcWIoZagWA18rNu?p>c)A5gONL9G`J}0(Wcf z)ZC@H6LS~l4$NJaJ1los?x5T~xpQ)No z+?lxhaOdH!!<~h@3U?Il8r(6sTX09G2=Wv|6aqgYNoqIdS;@paJD$b2K zC*oX)a~jTNIEUd}gmVziJvfKp+<|ij&b{k)$L+4#9ksh@cg*gV-6^{pb|>sE*qyGs zTz9zcV%@>Idv%BE?$n*ByH9tX?mFFBx~p_Y>8{Zoqq{|Sgzg623A)>Jr{^xu-JCl) HcX0v$Gm3Yl diff --git a/fixture/18/5/15.1 b/fixture/18/5/15.1 deleted file mode 100644 index b5df95f9739c4c7b30fbf2c1399926e46afdcc11..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 719 zcmV;=0x3LQ{5K?706 zv0cTuU13hW8@DSQyHvgR9$Uq+owWz10H6S$0CRZe?#$VlgERML&duDJIWu!*=Df^x znd357Wsb_+lsP7IOXif!4Ve=%7i3PyT#h*$bKYDx$IZ2vV==d4j>O!EIT3Rk<}}P@ zn3FIUVGhDvf;j|p2j&3G{jJ+?)_$w@8@1n>{l@IKWxo;oZP;(Ze%tk%uHSO~ChNCY zzrp$~)o-YNJM|l=-#-23>9sbiro3Q>v6~9uErgWyBT*e?qJ-#xI=Mw;?Bg~ zhdU2<9qugLRk)*Y*Wix9-GVy;cLVMO%xRm;HivC4+8ngGXLHErj?EdH`!(lluGgHc zxmt6y=333Mnp-tTYHrk=sJTsZn&vXiNt%l^2Wc+R9HO~HbAaal%=rm>ZuZ*jvDquL zM`mx#9+$l>ds_CU>`B>+vZrJ($sUruAbUXee(d4cyRm0ukK23Old-pAPsLt}JrR2$ z_CV}q*u$`QVGqLIgFOd(2lfo?6}anm$L((09ksh@chc^b-6^|Eb|>sE*d4IDTz9zc zZr#DUdv)jP?$n*ByHa2A^;qq{|SitYy83Azh(x93jJU7mkw{?PoL z`2+L!<?tk{LT3L{=B~ze=h!7{F(SG@kipX z!ykvg4Sy8=Cj3eGTkxmgFTtOHzW{##_OR_;+q1U!Y|q(Vvpr*b#rBBp_1fdLw`-5q z-mE=Yd#mJwJPW_W11C*{ichCji92 BiM9X$ diff --git a/fixture/18/5/15.2 b/fixture/18/5/15.2 deleted file mode 100644 index 52e6895e8a962df0231bfd849cafcbeed0233454..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 720 zcmV;>0x$gn0h9!=1ONcA1ONcg0ssIM0002E0ssIgwJ-f(umg1!018W_3K>9p(Ewyy zd#%0JrD@*QUTa%A|DRfGFS>oJ0H6S#0CRoj_{{B@t20MuZq6K=xixcY=Els4nF}+g zWiHDcmboZ%Q0AV@A(=ZeXJqcjoR7I4b2jE`%zbk(=3dOXm^(3NVy?uThq(@O9Of#_ zQJ9-B$6#*3oPxOla{}f9Ok>+Lv`r)1G^9-<+BBd|BiS^NP2<=!fKB7qG<;3t)--HQ zqt-NBO{3K`SWTnUG)PTj)HFCvW79M=O=HqDBuyjIG!#uE(KHYRcX;mZ+}XK_GXJ0^Ea?v&gOxf601-cOdRE++n!8a0lV;!JUJ<19t}Q3e0_*^ETIQ&e~kHIcjsw=9tYbn#o)vt-D!wtnOCbsk$3=C+aTLou<1?cbM)X-9fs0bcg8f(4C<>KzD!c`~-h){@VPp z`785B=5Nd&m%lB4TK=Z|N%@QNr{pimACkWye?b0z{Neb!@n_>t#$Sv-7=J1LQ2d?v z1M&Ca&%@t^KMQ{q{v7-@_+#)_;E%xHfW2;e-1fHZQQMogCv9)pp0d4Ud&2gD?E%}% zwTEl()*h_AS9`AZPVJf6E4AlouhSlGS;uJ`o!wFF8X^S7B>)e`Kf-_yph= T5Eh6JNQccKwXiwJ@A1J8F-}E1 diff --git a/fixture/18/5/16.0 b/fixture/18/5/16.0 deleted file mode 100644 index e19cb63b5980ddefc95acf44d70eb892e1b275ba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 719 zcmV;=0x3LHQp}OJMXT$or)qB0oT#}_bDHKd&0(60 zGzV$!(Hx?=Lvx1a{>=HA>oaF(uFf2txi)ib=GFvzTlTc;c*Pv4>;t#@^c=jlCIrGWJ&Nsn|=gCt@$e9*DgRdl>dE>_OOju;*a!z@CA<0(af+ zxZQ2Lqjop#PTJkFJ7sst?u6Y1y90KY>kil5tvgtEukKvkow_r1SL)8wU8g%vca`oa z-A%eqPHaroQtr{QnHpM<{%e+vE*{2}-Y@CV@U z-nBhzd)4-w?KRtDwpVPA*xs-`UVFRtbnVUBleHIXPt{(kJyd(4_CW1@+QYPWY0uK$ zqdiA^jrI)f71|@T*JqE<-kv==dvo^W?5){TvzKOX%$}INFaf_s`3=f%Pkux4+mYXl z{PyEFAHVhZ?d`YkH}5wYzs2|s#&0QpL-E^*-$4BK;WrPzUHHwyZxw!X@LPl582nb? zHv+#6_}liU?JwJ(w7+P7(EgJBA^SV_2kh_HpRd1Lf42T={ki&U^~dV3)E}w8QGcBN zHvMV(oAf8?FVdf)zeInC{sR30`up>T=kLy+oxeAKZvNW*nfWX8N9NDVUza~F0RYZj Bm5~4d diff --git a/fixture/18/5/16.1 b/fixture/18/5/16.1 deleted file mode 100644 index 00a88e0d8948646663c7a8250409f0c8e8a87003..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 729 zcmV;~0w(_ z<*v$|le;E&Ozw)@5xE<3$K!6tosK&jcQx*4+_ku4akt`*#NCKH5qBHzG~8vllW-T| z4#Hi6I|O$J?f~39_w5|EbJxyUJNN9IvvbYP89P_(9Im03fv(Cvnx9Xg# zbE(dWIv463sB@XlVLEr|9HeuP&N(`F=$xT*h0ggo*XJCcb9K(qIXCAVn{#WwdW(jNgl&i{FW#iC>AIhhK*uhhK#sh2Ml9 zgWrOmg5Q9jfM0;SZFk!4vfW9$i*^U?F4-NjyJL62?tb0*y1R8}>#o+FtGiZrtnNzP zk-8go$LVgf(=KX-WU?%dhAdvoXJuFc(0^g|e z4JzN5@(m{6Sn>@e-x%@@A>Rn{4ISUe@eLf`pz)0v-;nW*72ikj|6eR?ujR}Kp8%f#pa6Gg?#$ekx%+bG<*v(}mAfi;RPLJGF}YiE zN91nEoshd7cRKEJ+|{_FaW~_R#odZK6?Y@6)80Cu=U&oT|B0bExJ*&4HTxG>2*K z(wwEaM{|zm8qFD+D>O%FuFo8wxjl1q=H|@F343Dp!t8h`$ei82&E&S@?VK=islwpMk#ue+2ft z?RDGZwpVSB+TOH1W_!!_lYR}Z(r#(-5o%Ss4 zRobJp*JzK?-l9E1dxQ1_?d{pqvzKR2&R(27ID2aL((IuL-@x*XE8np4jVIr5@{K0n zF!GHe-yrgh9^c^cjUC_MzG34VHNN5E8!f)U;u|HtLE;-DzQN%e8@{388xy`E;TsXY zq2L<{zJcHy0lop?8~?KXVf(xG2kr0KpR>PXf5!fb{rURq^~dY4)*r3ES%0klR{g2^ z8}%pZFVvr=zf6Ca{v!QB`g`<;=>-GN$K@%M(F*`+1Sz~Q?fs2)&sk6Pu(c9(k`2#`?7a=P(KS@(vXJn`l_yph= T5Eh6J{6e*`IoKS;i}S$`7-mHO diff --git a/fixture/18/5/17.0 b/fixture/18/5/17.0 deleted file mode 100644 index 6506b22b0275bcf0942fcce4be2aab1d438ed771..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 717 zcmV;;0y6yq0h9!=1ONcA1ONcd0ssIM0002B0ssIgwJ-f(umfEc018W(3LQ{5K?AYE zv0KHlUB$Rv;h0}{FY>*&%qqt1s=5cB0H6S$0C!66lH4J=6LJ^i4#?fx-FN5R)wrW^ zH{*`Q-HJOEcO&jZ+=aN)aF^i@!(D_s2zL+e5ZoQOGjMmV+Z?yKYID@)rp+;%TQ;X` zZrGf#xnOg;=5o#9nu|3DYwpz?s<~5hrsh7)d7A4qXKAj|9HqHNbByK|%@LX#G$&|o z&zzpQJacm9;>^LBOEZUN?#vvRxi52G=B~_HnX3|eMfQm74cY6l$765DUXDE+dpGuA z?7i4?v3Fw6#9oO#4|^TScT(=6+(8L{K>mLG`S|nxxU7i~}3Ua~!8 zd%^aA?fu%rwRdaJ*50c~-1WvR7q~%HEW{CVNcwmIS};_)W)eZ@=C6&Bkvv zesl3#i{DuMR^m4jzm52f!*3gY)9~Ab-z5AN;Wq`pCHM`&ZvlP-@Y}y@f7br0{W<$< z_Q&k6*dMXKVSl{-cKzx4oAoE_FV>%`zf^yy{zCnM`up^U>F?5?rN2jij{X|`8Tu>q zN9eE5AD_QHe{}xl{K@%S^QY!7&7YXRFn?hFvixEByYdI+@5!H&KO}!g{)_|w7&elg diff --git a/fixture/18/5/17.1 b/fixture/18/5/17.1 deleted file mode 100644 index 387ad56c7a6214925dbf3fb6eb03ac07a40ba8f1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 718 zcmV;<0x|sp0h9!=1ONcA1ONce0ssIM0002C0ssIgwJ-f(umfck018W-3K>9p(E#gO zd#!Elwf0(oS~L2f0G|M#0C!LBoZL0JJ91~_uE?E_yB>Eu?r_}QxU+Hh z;?Bigi#ro{CGJSvb-3ejx8aV$-Gn;{cMI+m+$FdZa2MbXz?`=6cQ9nyWQOYp&HCtGQKkq~=D=iJIFqr)e(JoTRx(bCBi|%^{jQGzVzz z&zzsRJ9BpC>dd*BYct1YuFM>nxiNEG=C;gf342QRlI$Vb6S5a%56E7($L;CZo3STj zFUFpVy%c*W_CoA|*!!@DVei78g}n!R4)z-C8Q3eZN8rxeUAH@Kch&Bw-A%h=cDL+K z+1;=^VRymqblv5;!*v(y4%XePJ5+b4?o8c%y7P3`>CVz!r8`P@jqVuTExIFgH|S2# z-JUx=cX{sQ+{L+rbC>20&E1(hFn3?>yxd*6vvOA@e?|U?{0;fz@weme`?K*^C>@9^;s9q-8T4jk`@ z@eUa8c<~Mt?>O-e6Yu!&4iE3>@D2;_sPGO7?`ZH22Jcw#4g&8O@D2fg-2S%xY5SY@ zC+#oVpR&JXf5`rV{Q>*?^@r>4)}O7vSAVYlTK$>&EA>a}uhSo=zfFIX{wDoN`djp; z=r7Tqpua$Wfd2CQ;rYAs2j}n2pPRoke`fy5{CWB7^2g<`${&@#DSu7=nEWjX0KlY_ AH~;_u diff --git a/fixture/18/5/17.2 b/fixture/18/5/17.2 deleted file mode 100644 index 3f651865228beeb4ff5ea308abffa07f57db8e33..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 718 zcmV;<0x|sp0h9!=1ONcA1ONce0ssIM0002C0ssIgwJ-f(umfck018W-3LQ{5K?68$ zS2%X7IJT=8x10I8E%3cZ!fqAERzVM-0GEu?zp=g zcR221+`+heafjmW#GQ$|4|g8!I^0>ft8ho*uE8CHy9IXy?grcmnA0|wZ4TR9v^i*V z&*qTL9h);Y_iN7AT(3D>bG7DZ&9$0iHMeSx)ZC~!QFEK-G|gq2lQb7;4$@qrIYe`Z z<^aw8ne#JuXU@)CojEsiZRXg_m6;+aPZs=HHnrtUu7dAjR#XX&oe9i_WQcZ}{9-4VJQ zbSLO;&z+vTJa=;L;@rWxOLK?j?#vySyDxWM?ylTfxvLU?MgEBV4f*5ox8qO8ACA8p ze>VPJ{JHpR@n_<%#2<;j4u2f}HvCceoA4*$Z^55}zXX2*{sQ~~*u%DWZO_`?vpr{f z&GwA#726}W*K3d0-mX1bd$aar?XB8VwU=s7)Ly7PP_G{?0r~C6Z$5tWe(QeY zew*=|jNfAXrsB60zoGao#BU&e`|ulv-!A-S;kO6BIry!?Zw7uV@Ed_YZhzbUwEa!{ zllB+wPuX9xKV*Nw{($}c`or~i>(AETt3OwNt^Q2?mHH#~*XfVb-=;rGf0O AP5=M^ diff --git a/fixture/18/5/17.3 b/fixture/18/5/17.3 deleted file mode 100644 index 99ef1b5e5093833d55ad976cd175cb3fb3e079d3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70h9!=1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg+SLI8V(kC~&b zx53NR-|6%H2SX7XB`-NcO;=%Sb$^MIou{$8#n0K}?e_sh3l$$JGd)L9TV-!~gN>J= ztF^z$)7|Is`vpZ086zz>K}%I%X>)ytk(;Hjxx>xZ;p_GP2}TngCowxkPh_YN_yph= T5Eh6J)1l_DIj9!MFZjU^JnKb~ diff --git a/fixture/18/5/18.0 b/fixture/18/5/18.0 deleted file mode 100644 index ef9d223bd0d1203c2579427aaa3cba56dafbfe49..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 716 zcmV;-0yF&r0h9!=1ONcA1ONcc0ssIM0002A0ssIgwJ-f(ume>U018W#3K~Fq(EyEF zd#%0Jwf0)u+H36pw^>_ztBin|kcChk7mdARFv zXW_2G9fi9FcMR?p+!44Na3^4n+uXJ}ZFAG+q|HT}Q#O}u4%u9=Ibd_Y=5Wp3nzJ?c zYR=VMt2tA1rRGS@b(-Tew`q>j+@v{4bBpE_%_W)>G#6+N&|IE5Jac#E;LN?5b2E2l z&dgkyIWKcv=D5sNnWHi{Wsb?*k~t-FL*|6c1qpjJ_GawK*kiG`Vo$~1h&>T|A@(%v zW!S^87hw;=-h({^dk6Lm?7jPT=k4y=owd7ach2sb-7&i>c1P@P*d4FCU3a?fX5GoU zi*={!F4Y~XyHIza?mpdNy1R5|>F&{;qq|0ThVBa85xVPh$LDU(9i6*5cXIC5+^M-s zb0_95%pI7!EO%J$uG~SndvfRG?#P{yyCQc!?t0wu2!AjBT>Q28Gx1mAkHlYxKMsEz z{wVxS_>=Ir;7`F{f2B+KaRYX)n@Q05Ojb z^T05V3-horj|cN`FpmcFFffk-^C0l&?XTM(x4&wC)c&UZG5cHgr|fUopRm7Rf4cs1 z{o(qH^#|+k)gP+AQ-7xZKK*(6>-1;ouhJi-zeazI{ucca`Wy5o=x@)Tp1(YQa{l7{ y!TC$`hvx6hADF){e_sBs{8{;{^5^8Q$sd!yB7a2whWzpP`~JMY8-F(bY6JimxQV3z diff --git a/fixture/18/5/18.1 b/fixture/18/5/18.1 deleted file mode 100644 index 0eba36f848a2fd05db3492e2fc8107e33d47b4c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 725 zcmV;`0xJCi0h9!=1ONcA1ONcl0ssIM0002J0ssIgwJ-f(umhbH018XE3LZc<&;SWh zEXtx(ibYwJO0g&lk-zsK-iTsR!iD(rrvRV;p8$6{?sDAWxRY@g;||7MiaQi{C+*cj3;$U4=UbcMa|s+!eSZa0hp;+c|FMwwV{~rOIYs9NofC8}&^bNl@|?qS zF3veP=iZz{bMDMJGv~gX^K!1sIVvPBF zuFf5uyE%7k?$+F?xf^pQ<}S>gmb)x>Sni_SLAiT!hve?aosqjAcRucV1b;C8Ui`WE zJMm}Yuf(5+zYc#K{wn-Y_?z&@;BUd7f9;dxcdz$tp?Md2;w5Mn<(H^3`Kzo4p{_Nq| zyR&C!@6Dc@y*7Jh_R8#$+3T{$WpB$KmAxr@QudbYDcMW1CuA?k9+16n&)e$)$E`R{ z#c?T)6LDOK<3Jpj;W!M(T{sTHaSx7jaNL383>;VBxVPi}b^EjSSM875U$Z}Ef6M-e z{SEsQ_P6U#*I%wbS%0zqVEv`~L-lv+57ghMKTm&`{w)1f`g8Qx=#SA~p+7=@gZ}vZ z?fKL5H|I~zUz|TRe`)^E{Dt`g^Y`Ts%ionhD}PV^ocuNUGxAsDkH}w-KOTQO{&4)= H__GlJj`oT! diff --git a/fixture/18/5/18.2 b/fixture/18/5/18.2 deleted file mode 100644 index 2874d7160db817dde447e5faf196f9fc4dc1e597..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 714 zcmV;*0yX^t0h9!=1ONcA1ONca0ssIM000280ssIgwJ-f(umeRE018Wt3K>9p(E#>Z zd#!8jwYIg_+8pfvw=lA{wR!rU0H6S$0C(P9cgNk`xU+FrlQbJ*st%|V-cHs@^a*qpJsVspOcdd=~gt2IY! zZq^*Dxm9zj=0?qlnhQ0jX)e_+(w#;dn%Q7crF3KE~xg>K)=8gn=I`(qx;nwdnfil?0wkt zuy@L}zu)AP)!0vM0;kvtZ z2kY+DovXW3cc$)2-Fdp}bjRtg(jBF{Nq3Cy7Tqbj8+0e=F3_ExyF7Pz?&93RxqEYm z=I+d$nY%A{UhcZwS-GonN9C@`9h18ycSP=n+zAPPHU4P)&G=*Sx8hI5--tgEe^#f)?ckZSAVVkSpAjyBlS1xkJI0#KTUs={v`cH`cw3m=nv6fpg%x=fBx|N-TAZg w_vX*dUz8<66Y>}256B<)$K!8D02b(u82|tP diff --git a/fixture/18/5/18.3 b/fixture/18/5/18.3 deleted file mode 100644 index dfeed47717ba5a85528a4db24c4ee6cad2872f11..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70h9!=1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg+4MgU`NcY%wQ zpQ*FG$I;v6@A(5q4HqFRH9tvHU1xE7g^!t|t+&C;)!*s!{Rc=98znC}Lrqs6)80Cu=U&oT|B0bExJ*&4HTx zG>2*K(wwEaM{|zm8qFD+D>O%FuFo8wxjl1q=H|@FnOifbW-iT~n7J@>VCJ&SVVS!! z2W9TboRhgDb4KQh%)QNhbKabexg2vi=3>mjn0pa>AND-#b=bSGXJN0xo`byxdkppp z>=D=-aJTJF+g-LhX?M}?pxq_ALw0xU4%pqVJ70IV?rh!Fx^s2c>Wbob{D&)uCnJ9lsH+}yRfGjmtwj?7(`J1%!y?x@^N zxs!6YgGdnWcu1iw}Ijlypeeq-?4g5MPU zHsCh_zXkZq_J{57+8?yPXMfKAj{Ouh$>1zgmB^{$~BL`djs<>TlGasJ~Eu zn*K8VVfu^o2kGz8AELiQe}?}4{Q3Fo^JnL;&L5q>Hh*mX*8GwA8}ldTZ_A&Szbt=J v{-XRr`AhPL)Z+|@gcKqr1oAD>(FUFsWzZ8Ec{zUwR_yZ9D3S5ed diff --git a/fixture/18/5/19.1 b/fixture/18/5/19.1 deleted file mode 100644 index 10480804fa613fa2f892450a922550d983af3b1b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 724 zcmV;_0xSIj0h9!=1ONcA1ONck0ssIM0002I0ssIgwJ-f(umhD9018XA3Lii=&;W#D zQ5L0AEXtx(ibYwl@O!V`2nDnVCH%|b0jB_;0G|MNCGJSvjkxP@$Kh_n9fi9IcM|Rv z+$p$Aa0hoM;O^~QwsY9dT{{Qu+_Q7e&K)~v>|C*PzRvYJ$Ln0JbF|LQI>+kVs&lH& zjXEdlT&Q!J&Sg4>>0G39kj_0ihv?j)bB50SIp^nGpL2H3)j3D!T$^)j&aF8|=G>Tb zV$N+jr{!Fhb5hPlIS1ujl5diU&h0p-<6Mq&GS0;~2P1nR_CD-+*u$`Q zVb8+egFOd(4fYJ|71$$i*X@qm-L^Ywchl~q-7ULQc9-l<*j=zYV0XFhaNXUygLU`n z&eh$iJ5zV1?mXRfy5n?L>5kIfq&r4;i|!QN4Z0I_7wAsUU7kBUcX96E+`YL&b9d&> z%-xqeFLzz;tlU+(qjJ~ej>+AUJ0f>O?u6WRce%SBcQ)>7+|jseamV6rMSmOqH2h`w zlkgYe55ix9KLmdV{@(rq`~ldzwr6dx+Mct$W_!%`itQ2G8@9)5Z`Yo#y;*y*_G0a+ z+Do;EYA@6tsJ%~nnD#F1S=xKF=V-6do}s-$dxZA-?D5&#vqxud&YqmTHG69I((H-Z z3$q7iFUua5y(@cA_MYrH**mglWUt7ckG;6PZ!gClj=dXuF!o;Tx!6OocVf>($5}Y8 z!f_OiYj7Nc;}#r8;J5+D!ToLf)ApC`PugF!KWKl+{*e70`vdm(>(AHUtv_3Twf5%Mf1Ca^{Z0Cl^cU$*(O;rJM1O()0R8>>!}E9N&(7bQKR17E{>=Q9 z`6Kh!<&VqXmOm*O`DT87i~`2T(UW2bHV0-&Hb9gHFs;y*4(Q(S97iAOwE;= zBQ@7)j?>(xIZAVr<|NH6no~5FXim^vpgBNudFJrU-I;?k_h!z`+?hEub7khd%ypUL zGFN4e%G{JWCUZ;Xl*|p86EYWMj+^r_*JIAcT#Y#z0ed9&M(l~$AIVBC+jZOovOQ3cc|_{ z-GRFMbcgBg(w(KdM|Y0y8r>PXD|AQbuFoBxyFGVw?&jRdxm$Cm<}S^hn7c4{VD7Tq zVY$0<2j%X`os+vGcSi1t+`Zj>cix?jyBv2o?qb}*xO)+QAO1Z2b@;RJSK*JsUxPme ze+&Ky{0;aMu%~S=+a9*PXnWB1p6wyqJGN(R@7JEMy`&R>us>mc!Txmp<@&?*7wZq!->W}Vf2aOT z{eAlL^w;Un(qE-NN`H<182v5!BlI`uPtf0Xk!Q(b3q zdxejgqpi2W%hliM^Zf@+5gR2hIYUiXVQY1NiIbhDvAf03+2if^0Zt1QA7rQy_yph= T5Eh6JhoD-pIj9zh*WiO65Q{}Z diff --git a/fixture/18/5/2.0 b/fixture/18/5/2.0 deleted file mode 100644 index 3b658ade3f347b55b0c699b680c9a1a0c8edd618..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 715 zcmV;+0yO;s0h9!=1ONcA1ONcb0ssIM000290ssIgwJ-f(umepM018Wx3K>9p(Ewyy zdvUb4wb#1VUTa%>t-U7wf2n<@0H6S#0CTG5Qq7^76EzoV4%A$xIZShx<{-^InsYRF zXwJ}Fp*cTuedhSg)tRF+H)oE`+?qKxb7SVj%!QfLGM8lz%UqN>D05Hdkjx#KGcxyM z&c|GjIU92|=4i~dm}4=wVvfYzh&d5+8|F03WtfvN7hw*f(=Ja>5R?%ct-dvoXJ?#!K;yE1oP?z-G@xvO$V0fCPS#wkIamREp!PoPdD^?QXKAm} zo};}+dyMu9?Gf4=w8v*}&z_#WIeT*U;_Ru}OS6Y&FU%g8y)S!M_O9$%*?Y3*WUtAd zk-Z{&MD}{@@z~q3M`Lfso{YT}dn)!)?1|V5u?J!=!ybmc3wsdu9_%^TJFxfdd3yo& z0PMZXc8Bfm+8wmJXLru-j@=o%D|YAWuGbx}yIOa&?q=P!x?^>>Du0{)H2r1zlk^wq z57J+vKSY0r{s8^``SbI4=g-bxoj*5!ZT{H&mH8v{H|CGa-U0wzq6g*U018W#3LQ{5K?5{) ztC&-F$8HtJb`|4xg=4phLtO8@;zAFk0GD05Hdkjx#K zGcxyM&c|GjIU92|=4i~dm}4=wVvfYzh&d5+8|F03WtfvN7hw*vYHIZqps5 zyGeJF?iSrCx=VB?=q}J5pu0SGc<%1p!MS^L=jQIrote8bcV6zg+;O?9a!2KE${mxt zC3i~hhTI9c3v#F9F2@~?yBK#c?q1xXxI1xY;_kzphr14U7VawCQMhYx$KYEth|LX~6E?SNPS;$nIazbD=3oVTp!PoPdD^?QXKAm} zo};}+dyMu9?Gf4=w8v*}&z_#WIeT*U;_Ru}OS6Y&FU%g8y)S!M_O9$%*?Y3*WUtAd zk-Z{&MD}{@@z~q3M`Lfso{YT}dn)!)?1|V5u?J!=!ybmc3wsdu9_%^TJFsV9ufX1b zJpp?G?y}usySsJ=?e5u~v%6z=#_o#U`MT?M$Lp@v9j&`rcdhPN-K`3LoBlNYW%`r! z7wHeuU!p%me~116{r&m#^LOXZ&R?BBH-ByZ*!-3GBl9=rkIUbdKP`V#{-pdx`BU;AaE yx4mwA-1fHZQQMogCv9)pp0d4Ud&2gD?E%}%wTEl()*h_AS9`AZQ0<-CGZg@$iiG6= diff --git a/fixture/18/5/2.2 b/fixture/18/5/2.2 deleted file mode 100644 index 81fb914284572bc02ed9e902c4c51c22ad9404d1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 713 zcmV;)0yg~u0h9!=1ONcA1ONcZ0ssIM000270ssIgwJ-f(ume36018Wp3LQ{5K?AYE zvAfK#-71dlD#q;!$8HtJHZAYH7g!IW0H6S$0CTP8Sk0}PD>X-IZqyv7xlMDL<|fTa znu|22XfDwlqPakGfad9F322^xg2vi=5EZvn0qnjV(!G8iMbMU9_BjCahR(xM`3Ql9D}(9a|-6Xxo(b| z`&+l)xcye`H)_94`;FOe%YIY#+pyn+{TA#uUBBh}4Oe%l?oQpAx&w9h>CV&Lr8`S^ zmF^tfHM(PTSLlw=-Jm-@cYE&i+|9X@a~J1M&0U&1G|7Y|htQuQ^_GwdQEe%?f*?_C)Q4+S9a`X%Ew0 zq&-M`kM0O56WJW zJtTWa_JHjD*z>V>W6#E3jXf88E%sRKmDnS(H)4;&-iAF5dlU8~>_ym9u$N#D!5+8w zwnyNO+ugQ1ZFke|q}@fkQ+Ai^4%uC>J79Oe?r`1Ry0di$>+aQ^tAC#UI{k6_tMo_d zZ_*#5zeRtF{s#RC`U~`@=P%D6p1(MMaQ@!>q4_)WXXfwApO?Qbe^&mg{89O9^2g+F z$sduwA%8;tcKqr1%kd}UFUB8?zZ8Ec{!aXX`1|nZ;qStqg}(}a4*nYaG59O+_x*W) v0sa8|-MhADZLiv%v%O|}%=U`y5!)NK$7^rbp02%Fd$RUo?XB8VwU;UYbufgs diff --git a/fixture/18/5/2.3 b/fixture/18/5/2.3 deleted file mode 100644 index 8ea7629dda993e43c92b45776abd60c14640be55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70h9!=1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg-l2LR*k_W=kC z6(1=xJx5VnWp8>-GN$ z3KJbCF*`+1Sz~Q?fs2)&sk6Pu(c9(k`2!0L7a=P(KS@(vXK{OlkC~&bw`8ag_yph= T5Eh6J+6C2u&7tPVC;7n-41z<9 diff --git a/fixture/18/5/3.0 b/fixture/18/5/3.0 deleted file mode 100644 index 3fd3ee54a9c2a39193fe2f089965e90c905a89d5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 718 zcmV;<0x|sp0h9!=1ONcA1ONce0ssIM0002C0ssIgwJ-f(umfck018W-3K~Fq(Ev%4 zwXMB4+S}S|U2Ctkt-W^pe=(R3r~sb;pa64}<|fTanqxG#Xim}GpgBQvf#&qg<(b1X z7iSL6+?zQxb7$tv%zc^jGS_9!%3PH>DsxTdn9MDiBQiH+PRQJjIURF3=48ypn1eBw zVh+XJi8&B+ALcyFU6`{lS7FYi?poZjxLa{Y;%>y9h`SAU8tyXONw|w}2jR}U>+ZO_0e1rK0?cij(>9lFPTE|w zIcRgq=8(-Dn*%oYYtGl)tvOqBwdP#SwVGo!S89&b+^9KDbDQQg1$&D2673<{3$zDl z@6R5dy*qn$_TKEd*=w_BX0Oa1nY}K1T=ur?QQ4caCuMKRo|3&JdqVbt>;c)!v4>;t z#vY8l7ke)DPVAZ3E3xNcufraPy$X92_9pBx*juotV9&r_fjt6u-tM~Hal5N_N9}If z9kaV-cgpUD-3hx3cBktu*B!3ASa-1QUfrR(J9TI3?$e#8yH0nP?kwF^3V((E2>lKE z+aaC;@GZY+^%r!R`T8p4DA7=0H6S$0CSb*D9ufpYc$7bZqXc}xj}P+=Jw3# znaeXLXD-ehoVhe}Xy(q$ftmX<=Vk87oRzsMb57=(%rTiOGDl=?$Q+Nk9dkP7X3WW$ zi!rBSF2x**xe#+8=040}n7c4%VeY}4gSiHC-5fXfHYZ>%z#M?zu>E%JH*3E=`_0*J z&3-fXTe07W{nqO@Ucc@7jn;3oev|dvs^3)omg+ZAzlHh@RCkc>9^E;*Lv(lO&d}YT zJ3n`Q?(E#vxubK}=8nzXnmaOgWA4P#|V{upFj>O%FI}Udn?ljy@xRY=f;ZDI_g1hg|z+Hhm0(0Evw#{jqn>HtH zF4~;3xny(5=7P-uoBK6~Ywp&Zt+`iouI5_JnVKs#M{2Iq9H+TW0eg$~6zwJ26SNm- z571toJv@7N_TcQj*>kgZX3xxCnLRIiUG})_RoSDmH)W5>-jY2fdqehw>;>7=v6o{H z$6ky*7<(`FQ0$%9GqLw!&%<7aJqvpk_9*N%*kiD_VDD`Y!QO#A0DJGc-C4VMx;u3T>h9B>r#nn{m+mYDe}?`F{So@> z^T+3J&mWz?Ie&8g*8HjYOYge=7b){E7Gr@u%T0!yksf2!9a%9{hcO-e31e;BUa6fIV${+4ivQMcae6 z_iPW@-myJnd%yO4?e*HTwO4D8)?TYUR(q@VNbQZ<6ScQ#Pt#tey-9nL_96uUVt-VR*CrvRS-p8#{2<}l4&nu|0CY3|V+qPatJhUWgv`I+l8 zXJ@X?9G$r~b8P0;%#oQJGbd(l%bb?EEOS!kqRc^=OEQOK?#LXFxgT>r=5EZ{n5!}8 zVy?v;i@6eWB<4oUahTgMr(tfwoP@aua|-6ZId6`@+<-X&C*XF1Z70xnf@vp^c7kXp zPI|S6?c^GCf!N8V|2IZPSM?< zJ3)7W?)2Q{xx;f8=MK)@n>#djXYS10eYx{;*X7R2U6ng3cTMh?+%35yayR5o$lZ=R z9d|kIWZcELgK?MQ4#nMxI}mpt?mXOGxU+Cq;m*NbgF6Oy2<{Ht8Mr(5ZO+@=wK;2Z z)#jYdHJf8LS8R^h+^{)bbGznr&CQyVH5Y46)m*AMRCA%`K+Sy$dyn=U?KRpnv{z`4 z&|aTCK6`ui=_Q33A*~7ATWe>{UlRYPUNA`^D71{H#*JF>z zUX48(do%V}?5)^Su{UB*#9oL!4SN~(FziLxgRu8t&)e(vxIF=T0rmjgX}imIhwU!f z9kjb=cgXIJ-5I<4b?57@*PX4qT6eVWTHUd_TXje3Zq%KqJ5G0-?lc8|ivAM)A^HpS z2k7t5AD+KEe|G-f{JHsS^JnI-%paM*E`MD9w)|1~oAM{+Z^@sMza)P`{(}4g`OEQ# zE{4w}j@TcItF}jN zZ`vNSy=8mK_J-{V+Y7d*YcJOxuDw`$u=ZZJ=tF^z$)7|Is z`vndU86zz>K}%I%X>)ytk(;Hjxx>xZ;p_GP2@ew;CowxkPg!GacY%wQpJb>I_yph= T5Eh6JHUgW2&7oSz%l*L*dZR;p diff --git a/fixture/18/5/4.0 b/fixture/18/5/4.0 deleted file mode 100644 index d6a7fbf9120d4c61fb848c6eb6aeba0b48a38b1b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 719 zcmV;=0x3LQ{5K?68$ zS2%X7IOgl!v0cTuUE$cRMR@N8uv`zP0G=ETf} znFBMIWe&^Sl{qMLPv)G=9hoySS7gq|T#q>(b2a8@%*~i%F}Gq)#oUNF5pyBtG|Xj~ z!!Q?N4#M1nId1N4ZowRZxdC$me$)0_w%@S*7VS4^zdid6*>A^wGxpoB-+cYn>o;4! z)%uOrZ>@e~_1mi7Nc}eIH&MTB`c2bsnSPV>TcqD0{g&uAM86#hcX#gW+|{{zbLZx+ z&7GOMGIwO|y4-QO+j2+cZpxjMyCru@?vmUIxeIa!Hm7WE*qpGrU~{_W za?RnIi!}#p?$sQsxl?nd=044Nn(H)YX|B>7rMX6PjOG^25tZdWrM*delJ*wuDcVc4CulFw9-uuvdwKTo1ON%@ Bh`Rs) diff --git a/fixture/18/5/4.1 b/fixture/18/5/4.1 deleted file mode 100644 index b4703b98eccefa9a956008a71c3adc90b313275e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 719 zcmV;=0x3LQ{5K?5Sj z?Fz?k6?5wD*sbE&u43G-s=fD~Nvj8@0H6S$0CRul{LJ;4yEA8JuFjmBxi)ib=E}^G znHw|5Wp2xymboc&Qs$z}DVa+$hh#3u9FVymb2#R1%-NWGG3R2g#hi(`5_2TxI?QpH z+b~CAZo-^|xo^&!>*fy38JH{ZTesi1{kH8lYQIhUP1NiurmHN%oZ=HVQ^joFhDE&6+H%7lL`b|-HdhYVv;klD@7v~Pn zU79;IcW3Uv+p}OI~{j3?quA>xKnYL;ts`K zh&vE>AMP;RUAVJw_u$UKU4y#?cM9$j+zGe~a0g%x+uXG|Yje-$oXs_xGd5Rjj@VqU zIbL(S=4j2$nv*rRYEIQ$syR_}q2@r%Wtzh@cWDmN+@m>1bBE>(%@qoJb@u4&&DmqK zw`Nbx-k3cxdtvsp>}A=*vKM6!%HESbBzs5pjO_i`^Rd@s&&FPjJsNv0_E_w#*dwtw zVo${0hCK~?8TKUXMc9L|*X?n8Z+ivy2<#2G+jghzF58{7yJ&aN?vmXhyE}FV?C#f{ zue)1!w(e@(xw>n0$Lg-s9jUufcbx7v-D$d;bSLR9(w(BaM0bep1lh&3QTdzl$K-FxpOU{Je?tC({OS11@rUCt#vhEo7k?=JPW+kp z`|#)Cufv~(zY2d8{u=x-`1}46{2}-|@CV@UUbj7Kd)4-+?KRtDwzq7L*xs-`VSBsw zbnWHZleHIX57u6)Jyd(A_CW1@+Viw`Y0uJLr9DS`jrJJr71|@TH)yZV9-qBE0RU;+ BiIe~U diff --git a/fixture/18/5/4.2 b/fixture/18/5/4.2 deleted file mode 100644 index 2737c12928050c65c4311fe67c4a107e08d7009f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 718 zcmV;<0x|sp0h9!=1ONcA1ONce0ssIM0002C0ssIgwJ-f(umfck018W-3K>9p(E#>Z zd#!72j<&wmw)R?kt!sw=S8)2N0H6S#0CR-q2F(eY<1@EsPS4z&IXQE2=G4rknL{%d zW)95UmpLqRSLUqDJ(+Vd*JRGfT#-2}-(M9hVl12LCj z4#V7qIS6wP<{Zo^m`gB+U@pKMfVnrwb^~lTxOM|*H+XgfXE!)@17kNRb^~8G=yd~L zH>h<3S~r+=16DU!bpus57Kxw~^`=dRA3 zo4Yo5Z0^e3k+~al$K`IzotC>PcT(=6+$p(Ba);zD$Q_WoA9pzJZrs_pdvWLDuEm{+ zyApRK?mFCYxZ7|?;cmj6ggfr8!5xFU0(S)N2F!Jv<2JW#j@sO`Icamt=9JAPn-ew{ zY!295t~p$Dx8`8Yy_$11cWTbmT&X!vbDicm%~hJCG&gCE(cGdrMFD$y_VVoE*^9FW zXYb7(n!Pi7X7;}9dD-i-XJxO-9+kZ&drbD0>=D@;vL|G3$DWS89D6eMV(h`#ORAIVBC+jZOovOQ3cc|_{-GRFMbcgBg(w(KdM|Y0y8r>bbGjvxde|7%o{LT4e z^S9it@tDH zH{wsk--bU8e;NKH{6+YK@WQ~v=?bl(O#lGM0@tMW7?I|k+ey}_c7ZB^< Rmt&aioRwJIar|MkJOGY`LB0S0 diff --git a/fixture/18/5/5.0 b/fixture/18/5/5.0 deleted file mode 100644 index 12f5325892f01bacb8fee2c44f4666b05de97fb0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 724 zcmV;_0xSIj0h9!=1ONcA1ONck0ssIM0002I0ssIgwJ-f(umhD9018XA3L8*(K?15c zwyPMoD;&Ga{MxPJ*sfx{*bl&>^`-!u0GW5WfsR48IG%?#KP!Pr)z255X_M55Vsb zvJC;-5UvdY+7O-%!PyXw4Z+wDiVeZn5PA)P*AQwAfz}Xa4T04VRt-Vb5Jn9_)DS`q zLDLX24FS^-A`Jo35FQNy(GU&|!O#%?48hM3`V7I&5b6wpPIF}D#>|PC<1)8pPRrbs zIVp2d=9J7OnL{!cWDdyOk2xH3H|A{2y_j<`*J94ZT!}dna~t6ZQ-C1NO`H!}YuMgY|p$bM-s*GxaO= z^YrWVfWf*XFFvRhx4**KCg2T(LP~bHnC%&Fz}gH8*Qc)?BPPRdcE4P|by!12y+) z4%6JFIZJbo<{Zs6nlm(4XpYcapE*8rd*xF3}yLyFhn}K84b}|>3i=nsve{Opa7o$cWds{+@-l2b0_95%$=6IEO%J$qTE5b zdvb^5?#P{yyB~Kx?t0wWxT|qTY22i7lEy_Er)XTFafrqR8V6|HpK*A`-5F;mduH~^?2*~?ve#vg%U+c| zDtlA*nCvatQ?fT?Psm=7Jso>F_HgXQ*n_e6Vh_dMi9Hj0AND-#b=b48S7DFB9&Qi9 z-h({^dk6Lm+>JHW2 zsXI`2pYA-}UAnV$SLx2tU86fjcZKc<-3_|qbGPSC&)u9mId^dae_{T>{C)Yu@^|IW z%HNYeCx1=;jQkb(Bl6edkH_DRKN^2C{$%{E_*3zh;!nh1h(8d28U8T*UHHrWeShBH zfhxQEZ{n_)g*JsbpUY$KUdv5mH?6C>wxSZQ^PRqF| z=cJsAa!$#)B^gT%2oh&cwMA=SZCEaE`;d4d*DFn{ZCT zxd-PQoNI8-z_|kF2>f;X<`#qu0LFVxBg)Lz4~+Y zck0j7U#UM&f1Um~{Z;y-^f&2`(chv!MSp|-1pNj2)AN_-56@qmKRADH{?h!R`8yK; Dxo3vU diff --git a/fixture/18/5/5.2 b/fixture/18/5/5.2 deleted file mode 100644 index 7b0d9b690ca6c019081d84d183ee26f1f99ef271..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 730 zcmV<00ww(d0h9!=1ONcA1ONcq0ssIM0002O0ssIgwJ-f(umi;v018XY3Lii;Kmw9V zwN$D_qfRO{Qd3Q}sMJZNT9olSmprZ{JFWno0Gj~k*qmE)PR%(o=f<29b8gEyE$6bF zlX5P~IVk6noI`T%$T=YAew_1h?#4MA=W3jDajwNV7UxQwBXMrTIS%JGoYQb_!Z``& zBAjz@uE9A5=L(!7aPDn5Zo_RGPTO$PhLbj2wBeKumuxs>!vz}-*l@pw!!_Kk;cN}} zYB*QJwHnUUaHWPLHC(6RI1RUHI7-7!8cx!1i-uD)T%zFw4HsxQK*Qx34$p9R0>3jq zGruywFF!B8E3DnBZ}CO;;>B|jp+AwMC%9X}nv96uSq7(W=l6h9Qd6F(5Y4?hpT z3qK3L3cv36ez{+QAA;Y3Uw|Kg-*eZ_SvyzloU?Py&M`Yz>>ROk!_M(Kx9gm)bF71o=kIp$d*XW#~bA`?kI@jkMpL2W8(K$EgoSb)J z?!w%Gxyy2g1`PKQo`MLSE3CDFg zj>~adj-ztil;flvx8yh_$0a#V$Zv0=`oq^>|8t56 z%c>h%yZT2a=ax5iPo2MV^WNhZZ$EwilUbBkTv6N9-ZMBhy|B8qf9B$~+Yg?;djI9; zzwBay(yIEF&c5M^*`@WJljknqxclh&n~&dqXB6iamDe=3bq|bA%}-e&@Pp-nxPVv( Re^Xo5!e;@x9pcZMsvK<;wf;kdhT z2jlL=or}8@cP8#i+tdm;8f>}A-)uyyFjksykJ8qwYlAg}T#pm+21EU8FlmcaQE6-5t6! zbob}Z&t0E8J9l;N=-jorV{^CWj?CSdJ27`#?zG%x34clcko+C_1M>Id&&S`5KO28F z{#^XE_+#-`;*Z4Nh(8X08~!x>eSh9x_t)T$!QX;E0)GSk1nhC!+qS1|Z`z);y=Z&N z_LA)(+Y7b_Z12||uDx4(w)S4_x!P;BXKJt19;v-fdz|(*?NQpBv?pnA(Vn8cM0}P@6H~ay*GPq_Rj2?*(Wv|N~mpvdZyZ&_j&H9t|7wb>eU#dS;f1&0x$gn0h9!=1ONcA1ONcg0ssIM0002E0ssIgwJ-f(umg1!018W_3K~Fq(Ev%4 zwb#1VUTa&+qph#C*Sf|3i^ZM~p8%f#pa6GP?yB5TxpQ*Y>u+3eYvo`l^ z&e>eEIb(Ch=7`Ppn&UOMYmV03tT|b8tL9YArJ55p7itdFT&6ipbC>2I%{`iPGi@g$iB=$z^aoF3ir(w_A>-M<4273(l7VHt&8?Yzfj@#X~J8gH<-yousdLPzwU6|-MX`N_v+5oU8_4&cctz~-F3R-bhqh_(%qyxNq39x6x}7d z6Lc5o4$xhmJ3M!H?%>?LxpQ-O=FZGrnL96cUGBI9e@y=WVasp0&Mdd(`%t z?J?V1wnuDl*q*SxU3Q^+s{3{&8yFkuQ3rm$cN3Z{@?3Ie7OU(AC-tv_0St^Qd3t@c1|`Q!4p0Cow+k}_vOyZU6+4N{+RqN`6Kc-h{2!9Ix68s_f3-AZv@7}dNYkSr9 zob5H+W42dpkJ#R@Jzjge_H^yd+LN^xYfsf)sy$SDq4q%SecHpccWKYk-lIK7dyV!C z?G@T1wAW{k&)%LrI(u{WgWn4LM&P#rf7|}F z{bl=;_809B+F!CiWPiv0fc^dY^YwS@&(>e9KUaUP{#gB$`Xlu>>W|akraw)8ll~GS;u8xb2NFF8X^S7fLV_yph= T5Eh6JHlpTGE!Z5$qxQiM4q-%k diff --git a/fixture/18/5/7.0 b/fixture/18/5/7.0 deleted file mode 100644 index 89fc1ebe39d3b5e4919e707ff7fed7d48afc29c4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 716 zcmV;-0yF&r0h9!=1ONcA1ONcc0ssIM0002A0ssIgwJ-f(ume>U018W#3K>9p(Ewyy zd#%0Jwf0)uS{`kEEw%q&SYi910G|M#0Czs_dff52vvF7Bj>cV!I~I2@njljbDNMVeDImuL>rT%b8XbARUW%-xx@GxuiB z&0L!~GjnC;$jo(_<1)8pj>_DWIVp2X=9J7O341yAaO~aKi?Ih|@5LU9y%T#T_CD-+ z*z2&@?Qwe-_8{y%*mJOVV9&r_fxB;a-tM~HS-Y!tNA0fJ9kaV-cf{_7-3hzfb*Jkt z*PX1pSa-1QQr)4tJ9P)@?$e#8yGwVL?ke3mx@&aD=&sNmp}RqMeD3z#>A9P8C+9BC zotnEecWCay+=03Ka);&a%AJ+FCwETnn%o(=D{@C9e>DDP{K@!R@u%W1#h-}35Pu;4 zGW=oq`~D{UN%)KKr{FKaAA-LCe*pgOW!uBHcWn>a-m^Vtd&l;S?G@YewbyHp*IunX zT6?qhSnaLaQ?)m0Pt;zhJxzO=_Au>5+Jm(BXb;iep*=%;fA;+B_1UwtS7(pTUYk8O zdu#T{?2XwIv$thW%U+f}DSJ`&pzI~tL$Y^dFUTH{y&nN%crgYSV`wo36Jsbb1`=cF zFa{1|*f53*W3VuW3S)3Eh6ZC`FoptSATWjjf8GAL{cZcB_BZWM+TXH2Wq-;3g#88k z1NN8e57*zVKUjaS{#^Z?`ZM)c>d(_(r$0`AmHsIGP5NW>x9CsN-=IH1e}Vq={N?$> y^B3n2&fl9qG=FFQ%=~@%^YYi_&&pqwKPrDs{+RqN`6Kc-U018W#3LQ{5K?5{) zt2nl+7`H1NyH(7oyJNR%`Q9U82UZWD0GcV!I~I2axjS=q z=HAS?nQJp=X0FT}nYk`=T;{gSQJI@ECuMHQoRYaD0ed<2aO~aKi?Ih|@5LU9y%T#T z_CD-+*z2(Ow)gFMdlL2{>_ON|u!mspz#f3Tcirx+-Br7zcGv8V+1;``Vt2#tgx&4B z({-2YPS#zlJ6LzA?oi#Gx&w9h>CV&Lr8`S^mF^tfHM(PTSLlw=-Jm-@cYE&i+|9X@ za~J1M&0U&1GR*;}(mW^c@%n7u7~TK2N+N!g3C2W2nG9+JHydqMVq?EMJ8z4*<=Z!LZ^@mq=C zNc`5}Hx9pT_|5yR`;Gez!fy|LbMV`N-wga#;IG>sx4&(F)c&UZN&8#&r|d7;pRm7R zf5857{o(q%^#|+k)t{@sQ-7xZO8t5I>-5LzuhJi-ze#_L{uccy`Wy5o=r7Qpp1(YQ yc>d!2!TEdhhvx6hpP9cee_sB&{8{;{@<-*b$sd!yC4WTzhWrWnU018W#3LQ{5K?AYE zv0KHlUB$Rv;n-c~*KQTF_}+7DUAhOJ0H6S$0Cz#|fZYAK%W;R}?#3OAyBBvZ?oQm9 zxGQnz;jY6Shr932yX)>E+(Ec|aEIXTz@34+bKT~+%~hMDHaBgK+1#=@Wpl&kgv|w; z(>0fC4%b|)IaqVA=1|R@nlm-`Y0lGJr#VY=mF6hTHJW2Iw`h*g+@Lu@b9?6W%;lMr zGZ$wL&Rm)~G;?R>z|4J_^D=j3&dOYsIVW>X=9mO~Joa|%>DZ&OH)Bu6-ikdHdnxur z?1k6^v6o>F!`_9x3VRgxChRfTTd=2KZ@`{_y#ROF?y%imyMuQ3?9SQUu{&dT#qNCF z^}6GASL=?}-K;xScdPDH-Ho~vbrLv(lO&d}YTJ3n`Q?(E#v zxubK}=8nzXnmaOgWA4P{$l;9`b+hP>MztEsJ~BtnEo#PS^9hQ=jgA|pP|1(e}w+}{PFqQ y^GD}z&YzsWHGgXU()@|}3-br&FUud6zbk)G{+|3f`8)DwLW6(1=xJx5VnWp8>-GN$9uplWF*`+1Sz~Q?fs2)&sk6Pu(c9(k`2!yf7a=P(KV+y7_yph= T5Eh6Ja3Qs@IoKS?yZONn%hg0Z diff --git a/fixture/18/5/8.0 b/fixture/18/5/8.0 deleted file mode 100644 index 9afca5f89e82ba35c4fcdde32259fd212b1d5622..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 713 zcmV;)0yg~u0h9!=1ONcA1ONcZ0ssIM000270ssIgwJ-f(ume36018Wp3K~Fq(Ev%4 zwXMC@Uh7(Wt!*ujwuJouBDOIfp8%f#pa6F$?oQm9xC3$b;m*UIhPw=R815q6LAZNx zhv4qOoq@Y^-{!o{U7NEuS8dMOT(dc5bH(O}%?+F5HMeU{*W9c*S#z=GRL!NDLp2v_ z4%FPIIZShx<}A%UnsYSQXwJ}Fp*ccxedhSg?U|!9H)l@H+?qKxb7|(p%!Qc)GnZu! z%iNVYD05HdoXj1WGcs3X&c|GjIUaL0=4i~#2zw*;MC^sw`}VxOZm+@~g}n)T4E7f6 zDcBpZCtxqY-L^YzciHZw-9@{Dc9-l9+1;@_V0XXneBIr;vvpVN&edJ3J63n4?nvE@ zy5n@W=}yz#q&rD>k?s`TCAvd&7w8Vq-Jd%=cX#gW+`YMTbJyn1%w3r~GIw3>xZG{I zqjERpPRiYqJ0*8X?u6V0xdU>S;||B&jXM~3FYa6fe;)oi{Biih@ORQB^Ps6S1Anf@^SMf!vE_vjDN-=RN4e}De`{Pp>>^H=AO&R?59Hh*jW$o!4@6Z5y_ vPs?AHKPi7v{-FFN`9tz|zv^)t0H6S$0Cy?wP~4rk3vmbH?!%pT*WGb<6z(S6 zNw`~Zr{FHZoq)RlcL3(F&0U+bHur4K*<7EyHQ}y6beu>2A{Zps~#yCru@?uOh6xeIcq<1WV?j=LClFz#Lie;@ul{B`)_{xtk$_>=G# z;Sa)Jf^a$MvS(zk$R3fs9(z3YcI?sEo3STjkHy}KJrx1Jz5Vw6=KW^jw+g>e z_^rWj41Qbi8-d>j{3hT}+h4XnY=6=Ip#44jL-u#<&)DCuKVN^n{%rl#`lI#N>W|gm zsy|YHqy9wwZTi#nm+4Q^U!*@se~JDO{T=!P^!Mk_&)=OtJAZZl-2AoqWAj($kIdhg xKQ4b;{aJ{rJQ2cjM2---|yNe=Yt_{F(SG5deDAiK74j diff --git a/fixture/18/5/8.2 b/fixture/18/5/8.2 deleted file mode 100644 index 766413f4ff7c11e776db7de6980057fe87e3d50d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 714 zcmV;*0yX^t0h9!=1ONcA1ONca0ssIM000280ssIgwJ-f(umeRE018Wt3K~Fq(EyEF zd#%0Jwf0)u+G}yPH8TI7THAmRpa7o$p8$6)?pWNdxGQl-;%>wphr11T-<^fK3U?Il z8r(6sTX09dP!RCO?{hGrycWchp+^acPbFJn~ z&6S!XHP>m5)7+*xN^_IuB+V_FQ#6-oPS9MSIY4uH=J3qjnS(R;X3ovrnK?6aW#+uh zb(!NbS7naM+>|*cb4%uw%ng|nG8be{$6Ss%907YM_D<}X*aNZmVb8;!hP@1X81^FU zLD+k+hhXo(o`Jo0-|oEKUAwb(SMAQ(U9&r8cg60A-3`0rb+_wI*WIi;S$DDSRNbYz zLvSgd)W4(?Lpgnwufx**q*VyUwgjxdhOZTtF=dKuhkx_y;Xap z_D1cA+S|0JX)n{Bq`gRckoFSoA=*2%2Waolo}axtdv^Bf?77)%v&Uwy%pRG&F?(G0 zw(M!yo3bZmFUp>hy(D``_JZsI+554FWADbEjXfB9FZNsnZ}9Mj4sYP_1`Kbw@CFNS zc<=@XZ)ora18*qs1_FQH{<{5f`>Xax?Qhy2v%h73%KnD^3HuB7r|U1*AFjVxf3W^u z{h|6h^=Inu)1RlmPJfpED*aLVYxKwHZ_yv2zd?V3{`UOo`OEVs=P%A5oWC@GX#URp wf%*IL=jHFppOwEVe@_0I{4x0}@<-%v$RCft9e+ChX8g(ci}APOPsLw~002XY`Tzg` diff --git a/fixture/18/5/8.3 b/fixture/18/5/8.3 deleted file mode 100644 index 3711a77c4451b1058b2033994a3554ddfd2cff32..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yY70h9!=1ONcA1ONb~0000K0001u0000ewJ-f(umc?g0Hg*<9{^KbXK{Ol zkC~&bx53NR-|6%H2Otp}B`-NcO;=%Sb$^MIou{$8#n0K}?e_s83l$$JGd)L9TV-!~ zgN>J=tF^z$)7|Is`voEo86zz>K}%I%X>)ytk(;Hjxx>xZ;p_GP2_q97CuFD)_yph= T5Eh6JhhTHCIj9!MU;e=l#Rf!p diff --git a/fixture/18/5/9.0 b/fixture/18/5/9.0 deleted file mode 100644 index 76f72ee79e4da1c487fe0b43a4522dfe7219b461..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 711 zcmV;&0yzBw0h9!=1ONcA1ONcX0ssIM000250ssIgwJ-f(umdd>018Wh3K~Fq(EyEF zd#%0Jwf0)u+H37a_y4!BwB^7Dpa7l#pa6Fj?kL<%xNC67;BLVkfx7{B0_M2QZJX0J zH*HSZT(mi5bIInA%>|nSHuq}|*W9f+TXV1GT+OwbGc{Lgj?`SIIZkt%<|xfgnv*oQ zXim{wqB%ixf#v|s<(b1XcV`aH+?zQ!b7$tv%$1q*GS_8}%UqQ?Dsxljn9MDiQ!+PX zPRLx4IURF3=5Wl#n1eC*Vh+XJi8&K)V9&tbyKi^i?ylWg zyQ_BR?5^1zv%6w<#O{XO@w(e}r|WLkovgc9cdG7E-J!Y*bqDJ1(;cR}OLvy;9^E;* zYjkJmuFxH!yFPb(?)Kc#xtnt*=WflNn!7Z2V(!A+fw{|ahvn|d9hAE#cTVn(+!?tm za_8f&#~qKm8h14AX56v3TXCo2Zp594yAXHX9e1}Oe+&K;{3ZAk@E70@z@E0fYNFtG!iwr1nPbiQ3z=r)e+Ko}|4g>7MYqQ5@ugo5qy)k=S_O|S4*_*N_WiQH}lD#B*NcMv4 z0onVphhy)?o{ha1doK1`?3vgru}5OB!ybpdZx6%Xg*^)ab1*Q60&^hn=k2fCAGg11 zf7Je_{W1Gn_NVM`*q^Y!V1K&)a{b}@i}eTV@6{iwzf*sv{yzPA`s?&(>95isrN2gh zjQ$q=5&9eSC+Kg_pPs)we{%lf{K5H4^M~f|%paJ)FMnSCuKZc~tMcdMugM>izaoD` t{)YVV_}lTP<8Q{FjK3IvD*jUZq4*2&2jcI;--bU8e;NKJ{7LwW5CBnGikkoc diff --git a/fixture/18/5/9.1 b/fixture/18/5/9.1 deleted file mode 100644 index 272b374ea5b13e4d38359fdede7aab1bb172448a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 713 zcmV;)0yg~u0h9!=1ONcA1ONcZ0ssIM000270ssIgwJ-f(ume36018Wp3LQ{5K?5Sj z?Fz?k6~}fJ<93B(w~FPy_jV?;$h8Nc0G|M$0CyPfF5FqTgK+oY&cWS*I|FwG=DN*s zo7*-=ZEo6}w7F$-%I1>I37ZQx2W&3a9Im-rbFk)K&AFO8HD_wB)SRcePIH{*D$P-v zn>5F0Zqb~gxj}P+<^s*>naeYWXD-ehoVho1Xy(q$nVI`C=Vh+ToRzsMb5!P<%rTi; zGDl=?$efV59dkP7a?Ht?i!lddF2x**xf63B=041M2zwLuBb98s;&d^<O%FI}vwpci){Ce-8c{{4w||@JHZpz}~h! zZF|}Fr0qr9gSMA!582+aJz#sk_I&N#+OxG+YtPkQt36hGrS?eejoRb1w`ot)-lRQA zdy)1O?Iqenv=?X((B7XtJbQQc?CibSbFYxc+NZ`mKQzhQsE{&xN8`pfkv>o3+HtiM!$sQymx?_J%4)s=KRU|i}R=EFU=pCzc7Db{=WQS`MdIG9p(E#>Z zd#!8jwYIg_+G|})+5ewLTRwfC0H6S#0CyYiG~8vln{X%LF2bFHy99R#?gHEaxI33^ z4%^(dIcRgw=A6wPn=>|7Y|htQuQ^_GwdQEe&6;C1w`xw++^9KGbD`!m&1IUyG#6~k7MVW&#mt+pf z+>tpTb3f*M%-xu?F;`>G#axRy7IP)$NX(50dlvR8>`~Zru-9OZ!Crwq0(%4Qy4`WR z+jd9oZrYu+yJdIE?vmXJy9;&)>@L?GuDe@zuuhou|7_cbx7j-BG%m zbjRp!(Ve2ZL3e`g0^RAk%X5e4F3ugCyEk`e?#|qqx%+bG<*v(}mAfi;RPLJGF}YiE zN91nEoshd7cRKEJ+{w6$aR=ir#T|;f6L%o)KHPZ-e-Zv5{5|+X@OR+Pz~8-Zd*1f0 z?OEHaw&!fG*&egKVtd5)hVAj%+qI`_Z`Piyy;ysy_EPPk+6%P@YVXq?roBshmi8X) zIofNqXK1g`9-+NHdwllx?9thqvnOY7&7PXQG<#z9!t8(AC-tv^?Pt^Qd3mHH#~H|mem-=;rJf0Oc1w`Q!4p<&VnWls_qdOa7Gn sCHWKb7vvAfUyeTeVM{Jr>d@pt0S#9xU&4}Ts0zP}8A82&B<06r{(KmY&$ diff --git a/fixture/18/5/9.3 b/fixture/18/5/9.3 deleted file mode 100644 index 4fe8fe00f7671bc549453adae624d3d526f4655b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmZQ#oWin!g@FNtmohLghyZaf5NmAJ|C_LZS(b%i37erZgSD%FWO8nKWB1hgD>v^w ze)0Cx_dl6cc*Pa9P3=8{W77+(Tl;4&Uc3F^>8tl&e*Vj@Dk!b0Z|UqCo|s))-#K~i z@{PNXp1=9{?RQ2sZc%wnV_WyY=+ykm=HBTGS8v^a^77s1AAhr|^QWv3_`&i(TtKXY RpE030K=+2sg!hk{|D24;y0IuH~PkSUeSWg(DYa*>-53K2>PN>YjlDiBE&)u};!8qkmy zv?Q7ibfgnK=|v0!7|0+-GK$emVlq>h%^c!b#9|Uy$tqT}iOpU9FLmu&%SG?v8pZLrde({?>1UW4m*~v>j@)Jf8iV{v4%2J8SRG}8NsY7F$ z(3IA+p)FnLN;hKZLtln4lwpiz9OIeBbY>9GJm!%9MP?c&#QI~qu zry0#@K|9*hf$sF6C;jNp0ERPykxXDBlbFdYX0w2WEMhq;Sjh%9vWXq+WETfH#9>Zy ziWDwzkxN|X1~<9S10M2%m%QQwANj-&e)5aJtN{U82_iRn$V(`N38NIH2`7?@RH6no zsYOE?(U@pj(V9+lrVBCjCYC`AW(cDh!&s&;m1)E=mv|CbLL#eK!&*8w%^e=|gs1!m>z|cc diff --git a/fixture/18/6/0.1 b/fixture/18/6/0.1 deleted file mode 100644 index 513f1c5b308ee5ee8f8d1a54805bcdbc8b893355..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmD=ZnYz0D$4&ot;gbGqTCvJDWIX#F^P-lT9|+#5p6I%p=Y@vWYX}%s4a7%nDf{ z^YP*7{R`gEkjkm|rVfJtPLV1I0twQPnJi=_j68%>m?9LV3}q=tHL6pC`ZS;+5wswZ zDB9D3?)0E10~p94Mly=gOky%qn9UsK63r4~Sj`&NvW2Z|BaS`9bBMznA(7LZA&JXe zA(`9UA%(|0;Wck~%V)mumEZi~FX=*pAUzq#P7ZRCkNgy%I3*}a1u9aBTGXZvjcGzt zTG5&|bfPm|=uIE`GK8TFV=Uts&orhpgZV6AA zE^v+O+~6MfdB8KC^MZH0=L6sP&JRLDgCLYtWF!-r$wh9$C`cg+Q;O1*p$b*0MqTPr zpJp^Cg0{3Himr5{JN@X-0ERPykxXDBlbFdYX0wRJM6;4rtY#CN*}^V%6UPA#a)^_h zB9V(+B8i*aBAJIgB88W{;x!-n#Akl;i{GR%XIj#cm26}uoV?_tD8(pFIm%Ok8q}l~ z4QWJUB56r0I?$0$^rRQP8N^_QFq$!pWeQW7#$4txpBR?1jJ2#|J=@q$Eb;6mfg>E{ W7-u-kIj(S(Yuw>3_jtlnp79T$)t2A@ diff --git a/fixture/18/6/0.2 b/fixture/18/6/0.2 deleted file mode 100644 index 28033a48a91263740cfb2c601a3db787bf2785ba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmD2aAXS0D$4oIcLV1*~B>`^T<3h&d4}NHrZs8O*R>4#F?2#HgV3#I7c?wWRp!+ z$O_?6@ADJhkTg-DNufdT-*Li%AV^OJ!pTk!@>76yh#7IUlnn_G%3bUC*JP9mj39DJdS~jzVtt7FBWDawL6i#uPGhF5h zSGmJo?(vjoJm($n`M@{6^MfFb2q6=h$wDr2lZV0-p(v#&O&Ox7OckQ3O&uE1m?p&0 ziq>?ZGhOISANn$cp$uaz;~390rZa8CpgI| zE^>*>+~PKOc+3-?@`ks(;|pK;#vlF?r1c~j$wUrvl8b^AqA-z^q!bmYL=-itMKm!q zq!F>SB#sVrq!T^qMQ;W%m?4a23}czXRHhNnT;{Qar7UAD>qulP+t^Mrd)Y?{M@i)j UXF11Ju5q1v+~)z$dBIEm0a&D#XaE2J diff --git a/fixture/18/6/0.3 b/fixture/18/6/0.3 deleted file mode 100644 index 387c1eb177e91b2f8c3187a8d4d084a227891248..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 438 zcmXZZF(~G790u^;|9#)m6ZYr>AFnhHoOOOl_q?c()gY@G*q2Br_jE6p9LR zLpGG8L!70GdYWmco39Ko%ulA6W0`e!IOL39{Nau#Ueh6yjPgR9r^J-;nHm~trITLz z8Dx|RW>{dAP4+qAf@^Mg;F&i=If>dR&R2>l=L>Z-(MA`2d}oL;CYfcCHMTk6luLed x%OfvFqGUxB=PHGi@`-91XrY51zVU+*#+hcG6*k!Ah;y#^n-0hKNw4{T{{Y;ELdpOD diff --git a/fixture/18/6/1.0 b/fixture/18/6/1.0 deleted file mode 100644 index b729e5389797496b027d14642905edf091a8aa1d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmDS8NUd00iK_R8e9Rn^2UPB}R>yCDe>fC`zaqqeg6Eme|B5MvYK2w#FuuAe10h zY(mwD{dxG_?&U7I3_%gWNx^}@f62Vc7zn)P4QYHPonQRs4BZM5}Bo_rK zL}5x%nlLI-iOSTZ7PV7VJq9%!(Ng&!cmTKhO?aGD%VKi4tGiAF;95P8{YDcbiVMFKl~+-*(b?LD0#?B zK8jL|;*_Nv;Z&s>5kyj#dNidOQM92g?dVE3y3?0_#4?1T3?rVgjAJU(n9e-rlfW{T bvx4<(U?Yj_U?=-Iz(G!Ml2csZBA56FVUnB7 diff --git a/fixture/18/6/1.1 b/fixture/18/6/1.1 deleted file mode 100644 index f99e50873dbf71af265a1db3e62e83203fcec115..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmAWsHac0D$4|g3~5WOfzwgX{MQG;vAF5G}BBo%{V8fnP%dgKF2iEObt`R)HKsf zJbpaC-hXdsNcr$x;X&}rWB>A zL}jW_n>y5`2~BB6ENy5@99`&2ANtadp$ua<;~38brZanLU--=*{t}To2-1+2EMz4cdB{sXic*Z? zl%Xu;s7f`eQ;+&IpgApQNjuupfo^oC2mKkqKt?c@#FdR{(3{wRSr!F4TAsDc$Yp1-t&R){NN`cT6#jsLRPYohrHyYD8(pFS;`Sb zRjN^)deo-@(KIK9SlZE^ZgeM(e)MMm!x_OyCNPmn%w!g`iDwauNn|Cf*uX}T*}+bB zae#xQ^8fz?C%M2yE^(7v+~y&Vc+5*)@tRM3<_o|1!(YNO1VK0%i693#DL_FA5lKl( zQGtq7q9(PdO(Po9gqF0TH67_hXL{0$-V9~0V(CIxy3vQe^kXQ)7|uAxGlA*MU?vM#NIc6~K_cr| z&jz-!ogM6BKLXi-$biljNqkSI?x|1j?|I(0p5gUa<@UBMCB0fwuB2Pec;=gzn5wU| z?C??{MY3$a-aX`Ca*(Qsou>rD(zIE%t)NUpolJ`>(}5lgU<5HtVGc`J!xr{&f(zW> z0nbR_$5o-<`A{^uHOEmvrXXLYn5zoZq6uy2LLY`OiV4hM0V~+RE)H>qE8O7`FZgs7 z_GW}E%Oew@HwdEyRj5ZZ+R=@E3}XzFn8hMiv57q#;~dwx#}nfCW|z-b`L`>ZK7=Ba gp$3g;MJIYOh$zM}jd?6%9XmL{sjH()-~abWzXnN1vj6}9 diff --git a/fixture/18/6/10.0 b/fixture/18/6/10.0 deleted file mode 100644 index 7bdfec3b267d4c31bf91886a63cfee984b7e09a0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMIE2u!yqaM(iviAvzXN|^O)Bn7PXjBMjKpxl9jDuZR=Rq#x^nD*0wRh zPIk77z3pRPhd9(>j&+>lo#u3BINt>>bcHKj^8TX=05j(z~i3qq!+yCC2xAm z+dlHKPkikg-}=R`e)Eri4Pt^IqnXTXE^`}ZAq!i?2qTTMoaK$Rn$@jgeH$2OGn?DO zMBCZkZg#ha{T<*yM>x_^PIQu!o#kxjxY#8wHOV!uHO0-Qy3<|m_K=4?;%U!#)+=81 Sns>eDeV_T<7rys{AN>bTjmF&o diff --git a/fixture/18/6/10.1 b/fixture/18/6/10.1 deleted file mode 100644 index bc1287d64f27ed07244b3a6d831a6fe8f328673f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007XB6SCsWY~tJ@n`|@CjSkxe$)WRp!+ z#`Ef>PE{dfS4a^2pKNGY5ackY1uSSG!z^jI<*i^vYgp4-HnfqAjk2Y!>|jSb+0$P3 zc94S|;%LVh<7B5e)j7^J);O2A)HSYko$+op!M*Ntzlk0*$~v5s@9)0}Rs^PKNem$}?^u6Kh8Zgac)J>Wr;Og6={p7Xrd UyzUL}`@o03@TIT(=qEq>4?&^GV*mgE diff --git a/fixture/18/6/10.2 b/fixture/18/6/10.2 deleted file mode 100644 index 00c9b8abf3a39b09d4148364e1b7b8adc71b4664..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMIwNFd6KCI@nN2pCN1QV<&d4U4Y_iEFMTgJ*(v8r{fYdxFV%;rYh))>3k)mZ!3 z*M1Iln8O|CcqcgB8P0Tp3ti+&R~cu#8{Om%ce=|19`uk&p7fL#y=1aCz2$8m`Pe7E z_Kk1-;#X5l^^bo|7aj!Z&0sdOo5TDTu%N{)VM)tb-U?Q?hBa+qLmL@oOIz9A4tBJ= zJ?!ZK2Rg`+j&ih$ajDB(<6765;1;)<=pOfa#G@YbjAuRP6|Z{DyWaD@ z&wTC+-}}Lje)oqz4I+XdjUi?Td)vps4sobs9P2ozI?d_MbG{2)?h04B-VMgP&F$`RpZh)FaZi}!c`ta;>)!CD z4}9n&U;4_|e)6+l{N-;`4UG(fw5BtQS%ZVRJ~ zv7POWwVU1TXMYDc+!2m+f)kzOOlLXUMJ{%Uajtfao7`-IyWDM}hdk^NPkGuiCVSZ{ V-u8}ned1G}`PO&7H^py$_aALF$m#$9 diff --git a/fixture/18/6/10.3 b/fixture/18/6/10.3 deleted file mode 100644 index e8277427113d4293edbb2ae8f8c513e82231d061..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 492 zcmYk&F(|}g7{~G7|D2@$r$k=YWjIIozmqUXq9hq486+7b86+8`%Wz2sNd`#L70P3+0y_IH2-9pOkvInhZ@c9ye^Gr@%}a;2+W?It&yXp*~3_J9XHWU41k z^MV(>&?6 zJ>YRqnCdytd%^48@TL!Z=p$eF(pP@)qo4fgFMk^v6$BZ~Xby9l%YqiNu;G@pl#y02 z%IemzrVVUpv@L9DD?8ZHSbNyhUJi7SgB|5)$2i$3PBqRs#=FSHE^)PMTx+6R+-kDB XP4SS2Jz|=tJnbbfd&S${@vi>>>O06( diff --git a/fixture/18/6/11.1 b/fixture/18/6/11.1 deleted file mode 100644 index f58668427fd569a7d0a21e82165517e37f0fd186..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007XBv$M%24rkn%*<=%EW|K`e*<_PVoO5Io=Nxf&Mx4DzHrZr_tdN!M zdG%7Js2GwM5(NJr^JnQG_~ma?mkEM2rZtOM&1PQnncq;0S=E_|o%b6L8stz=DWS=&Z7 zwu!B5Z5un=#jf_Tul*eAFo!$N@lJ5MGo0xH7rMw8m%GCCZg8XVZgac4-Q!*ld&Hxj z^_=Iu=5=p)-v>VQxi5U_dq4Qm@BZ+osUm_P#MEXovsuh-9`jnnqJ~<=a3id26{}ju zy4JI)&5X3I?QCy1yW7M54sf6&9O)=0I?2h-a<+3^>=L6~=_*&d$<4;P!<{C$&;2HQ z+!LPkf)~ByO>cSIM?Ut6uYBzrKl#}&{`QZ5O5Tf#8QS>6g( zw}v%sU_%?(!j`tOqn+$*FMHd^!47e#V;t)^r#j8)&U3yCjCPqZu63R3jdQE|wa$=Uz_ diff --git a/fixture/18/6/11.2 b/fixture/18/6/11.2 deleted file mode 100644 index 8a443675a1c8048c6aef83cb8ddc4a88b38fa463..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMT%4IrHgV2b=ZtJJkIW;RY_iEFn~Za0lTGFkXJ#H5XJnIcj;xRs zva+66FC=%kwNg+piB^inZb3J`h z>~(K=(+55@#pk~8rSJXVN5A{SpN5nTg0zO3*(_!?k9o~!QHxpJNXuB(%2u(eb*yVW zV{B?OV{L0YyV}j}_OrhO9PS85I>Cufa;CGKZM+Lz3(;SWI!@VGDtE=GDtE=k|Y@<86+7b86+7b zQIZTo21y1XgCv6tl*A1(@V+9Rdi8vs@6)&M)8jEk-c7HR@NA?^Ntf(zX(mGaS*ibW zRw4&vbMs{j9oZ68q85#4MJIYOgfUEE4og_WHuiCX3*6!nulRHnPIiOl-NT%8_xA~s z-_2H>V)$Pz>dG%$Rv}wWYdxCLj&Ag01QVFS0#>krT^!*I*SN*H(@LIkC# gLLHjWhA#AB7~`16JeIMJ9US5mSB?(uVt*g_4N>1m#{d8T diff --git a/fixture/18/6/12.0 b/fixture/18/6/12.0 deleted file mode 100644 index 6a94bb8c4c874795415f43d1ef60c765d129a714..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMTsCoLHgV3;Ip@eGn>aJ`$R?X?vdJc!I2@TrHgV386|zDc*<_PV z*7NFxq>BmN6B-2nr}{7|2p;*^SHAX*pZ(%j|M=ISLJ(v!vsnx?w|OjV5sMmb86%9g zqA^ythBa+qLmL@qOIz8|PIk7Zz3lBE2Rp>kj&ZD0oa!{^I?wqobGa)_aJ?Jc=5}|u z&;1@S+2f{o)^ncss@J@3ns>eDQ=j?VcfR+7-~HiFL!yHqy`g3^yE)8fehXOK5|%X5 za+WvN%2u(qb*yU>o7&7awzZw{cD0**?Pq_7IouJBcY+h0;Y??_&_ynGm8)H2qMO`o zlDpjPArE`RR8M-!3tse+H@xXB@B6@qzVM~5{OBh?`^(?{F+;^5$Y>^Wn#(W?TFAnd zvb5nwS;1(lTFvU#v%U>%ZVTgVZwEWt-5&OIfCC-mNJlx^Nltc(vz_Bym$=kru63OW zZgH#I+~Z#NdDLSjd)hOe^|DvI>TU0s=3}4u)HlBMonQUtcY~N9NN0#y%xX6CnAdz3 zwV1_?u&j~BSjkvxTFcrtvawBUWoz5m*)GQ0+dlSnh(jIbSjRcuX-;>B^IhOVSGdwu VZg8WC?r^6`9`K-tO!0)N{sR|G&Bp)$ diff --git a/fixture/18/6/12.1 b/fixture/18/6/12.1 deleted file mode 100644 index d0bf64fcd33e41271955921aa3f2ad7d6d937aaf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmCcZP3Il@s+aheqV0T8{-PXGV_ diff --git a/fixture/18/6/12.2 b/fixture/18/6/12.2 deleted file mode 100644 index b41751688caf7535ea0b181cbf5dcb1c1d2989b6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|1&odX007Vrzno^8adP6En`WBHW14Yt;vCaVoERsknK&`c#EFS>?6j$2YM7d4 znrWW5z0|29Ly4gv_&>#G;X&}+7rys{AN}D^e;KM61ZhoYRsj9hHn)W>ZEpuV8f_1II>3R(INTAAbb=F&Gu|1_bb$+9kA3B9-}u=te)W%k4I+XdgBi_aPIH;tLKe1&r7Ue3 zBdlbkHLYcB8`;<TKsY*Cj4>nQL6@I=8yb z?e2A-`%U(!$2{X%&w0hGUh|H3z2_63`pmb!^S$5v?hjK}3W79-%xo63n#a86v#7-k zv#jMTZ)K}k)jHO-o=t6LbKBa^_I9(o(e|^y103owhda*kPH>vjjdz~&UEp$8xYG4* zaHBii=`Ig=&_kwp+!LPnf)~B+4R3ni2R`(LFMZ`lKl#~T{`QaQB7-2k8O&}Db6UWH z7P5pTEoHbBjj*~ktZ4%q+Q^o+GRls2va>zyWp85~WUM0{pT2R!}V->q=aWf%8_)%9m{yAApV|M zQvOWj0{K%NuU<`Zmh(48-^o*)W@%E-apaR-RU^CJlHH7UbfX^;jA9bASi~x}u!m!u z;~IB(!keWM!+lpIu{h0Dy6oJ1*+o2+A&goyq7|L!#UO?;j%mze8SB`=0Zws&8{Fds zAC`(!f&s%YWoJ_R;72(sQHLh9p$mN&!U!fXg9WT$1G_lF87^^)N5t{PE^jFOvn!jr h5CH^HjRv%!13ef(6l0je9G0+#ZR}e*JPG}O&^IU@NzDKN diff --git a/fixture/18/6/13.0 b/fixture/18/6/13.0 deleted file mode 100644 index c4a9e86d15992f7cd39803fa32ecf5b597ce86c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007XBbB=7X$v8*0!Mw9Ra8tZi*)SG(EWzV@@f!yN7i$2-A^&Tyu)OmKmT zE_a1Vu5-N`-0lu{y59pH^tdNH>3J`B(Hq|MmJfa8V_*5&H-7euU;X1>gUUe=W_mN2 z-5ln$fCVjNge5F#1uGh94QpDYYiGOI)!z28uR|Q_FvmL1@lJEP zGo0sq6I|*tm%GNbu5+u~-0oiYx!&MA-F)V^fZ-N5!g7|kg4L{U4eQ&$hBmi_F}Ab49qeuo|!924i5JjO9h4O7!hGtD&9 zOg?|SZ}3v5ib%UNZ4mr-lyeb5aGncX=LR=^4yUi4-VgBikT#xRyCOl2B#nMW*f zEG3@RtRaz2Y-S57>|!?uImBU3a*ESj;xbpb%^mLYgr_{?E$?{GH@@?ORF#7uHEGC9 z7P69y+~lD!MJP&X%21X_DpQ3z)TJI#G^H7BXiGb~(3NiVr62tn#&AY3o(W831~Zw( z0v57}1eUXcB-XK>WVW%LJ?v#4M>xtc&Ty7uBRiqwB_H`IMsdO^M|mnxjq22(J`HF{b6OBhdpZz9cY4s10SsgiBN@eLCNY^Q z%w`UAS~ YV>&a4Wj+gtXBi13vX&&au$5%~0ij&e!2kdN diff --git a/fixture/18/6/13.2 b/fixture/18/6/13.2 deleted file mode 100644 index dc345659c912b078a658b55ff8e07d4459c89d2b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YM9NAV>3>2;ChT1pgoNVnh&J^0K$Q?HwQc#HYUTt?&HmH@_QH3xafpn8mDSGq3r~ zZ&8aGZduD&-UzE1X&vhtZH!HfwY6<*Yn)x|YH$14*TD{PsAC-KIHx+z>CSV$3tZ-M zlU(aM*SpQ_rkLtJ_nYQXk9o$kp7W~LyzV{k`@rYE@TDL8=qG>r%io4Z20;chn!}vt zGR%S&vVg;F9wyk|0S)6P~#@NzUwziXXYDNHRz=NHR!zBuV)mPu+X!)A{#5=XCzRfKhGknj%^_AW|SK_H^E5jV?3m z)>l93Ix6b5RMeLx@hm$g@4S$p{ePQH*+ud}Gv@16*b))tG>B@lM75y{eHg+RCXvEC zma&d)9N+}!xW+x6k;ae3tU0yb*CAgW)itx0F>4i<>(PP^^k4uZNMIVXSi~wev4=yP z;sQ5#zzaUfZa5+1FMF=z4N&N0vKRY$<&Ms7lf99rt>{EA1~G~WBr%62tYHiLIKmmO zaEB+n;oFjuY}Ygr`ty8Of}$c}QE`qcQHN%@2Dq!guzq!Lloq!zVlL}QxJiq^CtmdyLIO7=41g0~CnJi!-i}(kRCfBY2 diff --git a/fixture/18/6/14.1 b/fixture/18/6/14.1 deleted file mode 100644 index 19ca505c11435ab9be0e0525347c03d6f0a56ec9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|1&l}m007X>9mg~iC&tO?c8+N#kBJl0Of${IiD||;F*S^1n09iSadMi86VpsH z9nafd$`lnscZ3GP|Hn+M5(K;5<6)0@)FjV%)+=81ns>ctvQK^HbKm*i4}SNDKMjct zf>2YL!Hi}yr@0I_!h#mEq@^rvWvduzEo)oH#x}93t!-nBo$X>*``Fih4t1Er9p`vs zo#u3B80UQBUFLFEnBY3syVY%Occ1$`;BilQ((_*MqBp$hEg$&MN51rxul?j_zxdlf z{xz&>5Tr4!S})T4+sDBU zaj0V)>o})4)oIRko^dX5smomBS`*yt7Pq>`z3%g<$2{&?&w1W!UiXH{-uHpeec?+# z_|Z@P^q0R)RV@fo8)hamo5gT*o5w;Hwuq%IV_74uYLs=XYdxFV%xGh5YdgEz&F=QI XzXKfZ2uB+01SdMfna(oa1upa-1FP9I diff --git a/fixture/18/6/14.2 b/fixture/18/6/14.2 deleted file mode 100644 index c7a7ba29a99984ea3d89a04de5f1cdeaf1b0a463..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmDcZZd!x86<%p;r3Bb&@4^T;NfY_dWeA&!i5#No&$n{2Yl zCj0s0eT0`jT~tU?ND%y&%KmCWaDZe|ImU6$a*p#{<2pCE&jTLvf|tDFBcJ%pFMjif zjMal66Pd|LE^-q_AqrEHQj{i=ic}(+8pKea1~jBOEoeyxI?{61&+$3WqpM8Yel$MJ{ofTioUjk9opV-td-peB~S8 z38I4_9q9=rE7{0PKJpVzQHl{kS;`Sb6{=E;+SDPI#x$WdZD>mzUFb?5`qGbhhBAx< z#xjnnOk+Cpn9l;1vx1eZX9FAA#&&kFmwoK#2uDffG-o)=6|QoPyWHbG&v?!Y-t&Qv z{NN|Q2&oYS8OTU>a*&e(6eNt|l%OQ#sX!#vs7^F>sYiX9(v0S`qdgtyMt6G9p8*VH zI3pOzcqTBB8O&rB3t7ZsR|_^7By*4yj&p)E&U1l_+~6j+c*r9j^NQEJ z;WJBC@# X5YK4FkicZ7FqJvXWgbge#&Z4vItkdk diff --git a/fixture/18/6/14.3 b/fixture/18/6/14.3 deleted file mode 100644 index 04f7674a62796cb8ec4c11e5b4d92f4ce56fbf81..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk(KPbd;7{~F??_5cKr{vFIzV+(+{ORc#9+y?>UUy3|FP4-p>5@N^vRz5|`wrsb zlUVGPxhRmiwPhX=#h0!8Bqyv_7yUHnZ&b>0G8LuhkE@kA%9g3BmI*W3gf?`c4?`Hk z6y~smRcvAxM>xYZ?(vKdTX_kAcMZlCdjj&vm@F9|tzuN577b`Y2YN7o5lmnP3s}Y) zwy=+5oZ|)$h~U$fFEQx1ESF3wxfg{fK@fFlL@PSci$RQH60=yu3O2BVL!9CYcX+}Z jd#upUo@DY2IbAy{a;31EA z!AoB8kxzW)7r*&K#=1d}iOl38H+d*b5sFfpGL)qXRjEceb%>x5jcGzFTGNKkbfGJ~ z=|f+JFqC18WgO#~$~2}kmwC)*Da%;S8rHIot!!gEiR@u7$sFbgr#Q_SE^(PF+~y8< zN#hAmdBa=Y@s)3UCx{G!bfhOM*~m^l@>76Nic^B}RG=bZ)SxDj)T2I8G^07~Xio>C z=}r&&Gk}4NU?ii6VIq^5!AxecfQ2k#1uKbVJsa4_4t5gHKK65fqZ}iJvz#N9t6bwc z_qfjkp7ER)yypWS`N2PkPatK@4UHqZz|kCNqVp%w`UASr_WPE0d-OdeCi)G#&8G{c>kIHz-(X(o?p zrkQx&_R@q@3QG(Ng8z@Yyk-!raHSjE=q7iXXp#p!XtF0f=_xOI$;;mIws(B&6QBCp zH@@|YU;XA^gQy?~G29GhG>18juz&?EWC=@J%8FL9vQb7GW2_BqXbW4~%8qt2&Yt$N zw}Tw)5Jx%MF-~%_Q=H{&=eWqlE^)PMTp8D_&FkLtz7I_G zxi5V02S575pZ+p5Itao{Yi6^U)!gPWuaOq9sAVi`IjdUD>ejZ7b!}`Do7%>} z?Pg#5+244FIot%tIo_#GbGmb#=X{sC%;m0gy&K%_4tKiG{T}d`$35YBFL==#-t?9a zedJ?b`pVaS^0Qz3?H~V|E+z=l8*VnUo5OtOw}8bgZVAg@(0PIQtpoarnVxX?webd{^!+>*P;@ajiF{VlbMY$m$@xuVIwVNY0FsIDpobdTGqCqjcjZy YTieDsJKM$H_OY)+9BRB{9BYFA0ELp>r~m)} diff --git a/fixture/18/6/15.2 b/fixture/18/6/15.2 deleted file mode 100644 index 069715943192394cddb3f1596391b06b2b04055c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMoRN9Nnb~9>+4IaeXJnIcjyN-$Y_dXD$ci(w>71EyjyN;pjBK*W zCYwC3UPzkCp$VZu@PD%LwS!=->)h%#x0~o*_j%M~CYj+mV0U{MW1Ri$ z?=Xiu!tqXUqBET7EEl-YMXqqAtK8s5H@VYY?)HEOJ!G;cJn4Bac+u%lc&1gp$eHg+hCNYC~EMpxzIK(L~ mafc^F@x`pPQ04EeEOH-;P=*>bq7|L!!2lw*4#q=?Pxt{aFG{um diff --git a/fixture/18/6/16.0 b/fixture/18/6/16.0 deleted file mode 100644 index 39522d6a3bb22dd61d7666976956d44942663a78..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z)XV007X>`HjpYn>c4=lTDm+WSkLaW|I}NLROp^=ZtK!i8C|Kk$Gg3O*YwN zlTDsiFDy;v(9Td0{GV!kR1i#ZvI)*I(S=%Fg$G@hJ4uTA3G~68Kw15RIWN}Ma(h63zk~OVmZDVX; zLz~;embSNp9qnOHdpW>?4swJe9pwZk8t)8en&5mFxX=}@bd?+3WRg4G-B`FM7IT@~JQlWy5tg#FWvpx!t6Imp zM%l>5HnEkhjkA-T?P72H*w?`haj2sm<5(v<#i=Gb+c_?FiA!DMTGzS7t!^{fy{34? zqo#Vsv!3&+*SzjM@B6^#zVM|V{OBit`pe&jVuB!@>CI+#!_8-Y3m9oJi(Ag}RjS|3{rzCkQ4v*;&pu(M2wHiK|`XS~t7Jt?qV@dp+V&k9o$kp7W~L zyzV{k`@rYE@TDL8=qG>r%ipH08wBYLHLKYSGq3r~Z@9&bu#9CbXH~0N-6*4tG1jIw zGu}3~wToTtW*__7&mj(Vm}4C4IHx+z>CSbY^Ihh0SD55_H@MwocevjJ9yG-hp7eqj zz2r@AdD}-m_KB~3<6FP@)o=bas22pO%wR?{nZumsvY>@5Y@{VDX$31<$(q)(whe4( zBjapgOWWJQjwaZ{p7wWu10C)NM>^gKPIQJdo#g@-y2zETaJY0YdFvzpsH=Cz1L4Y!n~En{V?Sk=1L zGs?y`G1gYLHr`Hlwu`;&Z660Y*ddN`v}2s?6sMZ#9Ot^kr7m-=>r8U1+uZJ6_qpF= z9yi5vp7(;+z2QwC_|Qkb^p&suCXdNu;>6?~hZB>>G}BBCQ^V9W=b7^$GI+Yu}fU-8rQnTt!{I-d)(_0k9y3rp7Xrd zyzUL}de8el^SLj4?*~8n-5>rmq;3$THPkF-HJf?NYd*s*YB3`$YdNb})oMl=ZCxAN z#HPmC#VGeh!;~ei)r#anu&NtqrE_1o-T<-?AyTe5Hx!(gG_k<_C z;6*Qa(_1F{z=uBarLTPLCqMheU;g%w>FNbRdNY{a9Og8?1uSTBOIXtKRUQbDPJ)7BSq?mNCLeD_h0d Y)-lRh8`;=awl>aAcD9SX?PFj60Vo;bqW}N^ diff --git a/fixture/18/6/16.3 b/fixture/18/6/16.3 deleted file mode 100644 index 63b1ae0a08b65a4862eeb36d05418e52f2d777c5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk&KPbdu9LMp`@0>w?$KcQ7l5|d8k|N3A&+qO^l0lL|l0lL|23?oIAcG`>B!eV_ zBmvtIU!SH0t1@A=ecKKGsP{or?h_|uT+AV_bh+01SZBg|_)i(1U$mb1JS ztZFr@ThID7u&K>#Zd=>g-Y#~vn|l=h zO?038J>YRqc+zv8H_dBa_lEa<;6q>d(pP@;lb`+NZ~vH~aS&uQlR3?0nE5SWK}%TD zQdYE*k=C%LwQOi3qitbJTiL;m#@gK;_H=*)jdO$}9pywPIoVmxc8-f&>=IYI#ZZSVNlCqDI!Z++)izxmxDCJ53QVivQS&2aM=VPT6{ W)H0T}oKaS>s&%YuJ!5QQQ~v?MLFfkn diff --git a/fixture/18/6/17.1 b/fixture/18/6/17.1 deleted file mode 100644 index 441a53f8de3e65760879d06ec234e3675a2b3c81..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmCcZ%g$BWY#~2tL1cQiWB%_F90uxCfkr~Wn0Sj5gN>;I&O>AZh$?Re` z2RX!HPH~zuT;>W_xx-!V@swvgCzW@+=NsSoL7E0Zkd_d_$wX!%$VF}nQJ5l>rVM3? zqzYB3LtX08m?ku(4Q**h7rN4oKJ=v@Lm0|1;u*tOCNqVp%w`UAS;A75v6gkLXB*qu zK?-}>$5D=PoO7J#0@u00O&;)&N4(?}uldL)KJ$y;{2{Di5TqkLS;uq&*$zNOyYBll}}~Aj28KNX9Xq2~1-;iOgd@3s}wyRO( zDMe{2QJF|;QJXr%(1^ygqBU*kL}$9ti{A7hmca}mj?u)Ez$7L!lUdAW5sO*EYSyrp zEo@~QyV*kuhdIJg&Ty7DOF7C@m1ZMf$R;bq5wfz$CYv}j^T;?O^ISF==ZJGgHrZs8 zO*YS~7m}uOXkus({Ga6Ds2~{PP{%mdaZYub(~WhW^Ihh0SGdmg#=Fh!?r^XB-0v}u zn`DY-P4$Xbz2;r-dEaL~_l58M;75P>(_e-*41#o~H=EhbVSWo(&=Qt3%<@(+!fIBx zhV^Y=l+A2z3)|blj>g#C9`>`p103!MM>@fYPI9KRob5svx!5>Yx!R3xaYJUF>NudppoU4tA8I9phxDIMq4MHP$6Ab(w2i>pHi%)ot!} zk9$4hQIC1rGp2aS%U7sEwVHLUXMLO4)MmD|o$c*vH)HH$U;8=KVGehkWr;JYlk_p7XrdyzUL}`@o03@TIT(=qEq>%isPned8d=U`BJ8(_9v`kcADi Xl;K8L(MW4p(^^K^&}duO(pLTh;gabU diff --git a/fixture/18/6/17.3 b/fixture/18/6/17.3 deleted file mode 100644 index 0091099f3ee8829d719787ad0bc1d87c0f1239d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk&KPbd;7{~F??>b3-NdA0Xk~q2yLN`bl_}!gBl0gQ83?iZzzz;^iYwgV32(Lvt-!kmVvCalN|DLRk;&(&1b)<_0WD}p z7kV*>QA}bM3s}J>c5#F=T;m=u_^?%w5cFA=OD2un120NZi8?f*6&>hCKZY@mY0O~> lYuLsCPH=%+JR*uOb|r=UKf5x>^WZ}Tsu8rc*Btu)@NeJPOql=x diff --git a/fixture/18/6/18.0 b/fixture/18/6/18.0 deleted file mode 100644 index 52661be3310ced649cb267c7c1b932e4e38c9bd0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|1&l}m007XB!!dbGGtP-=riQ7hJEj>Y$H{3XPK=Y2$Ha+g#;KcT;>0*P%{0?Y zGtKk1mpWCYkfe|x_&?c}je?++t&O&mosF@Vy&dFWhdA0X#v12jr#Ra=&ULX%OmMYp zTx+6R-0E)kxYr{dHQ5x;c-AXk^_q9R=Y5~~+!wz0gCG6jPk$NGI0(|3&Mam%n|aJ@ zKEo|)gk>yiIjdUD>ejWM^^LNr&1`Es+uPM{cDJwn?C&s#JHl~}cY@QL?hNCd?*f;( z+!d~OgB#uM4wKyP0S|iI6Q1rTFVADw2{qiVM{yM(P(?v(-;Rh&_RxLl%t*KB;%avEN8pW zMJ{%wt6c3SH=F2Ace&d`9`=Z*JZ*}XyzCWkd&j#z@u|;z>pS23&F}s&bz~5vF~m$} zHjBB;Z5|6-#BfVl+A>zQidC&`9qZb}NTY0R8{691E_SuIeeCNHhdRty$2!g_PIa1d zooBoWE_In}UFUkYy3OtGb)Wl9_L#>#>p9PR&FkLqz7Krp3t#%mkACvAzx?eV(?tbA XdPB`-cEikPehV03F^gNy@>cL4M#b)V diff --git a/fixture/18/6/18.1 b/fixture/18/6/18.1 deleted file mode 100644 index 2d7e1a698927e8bdbe678154d5bec9d42b6565ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmCb&tpa007Y6ahh>*nrX&ym>Q;?oMz(0#EEfonrWsP=fpJAOfybSGfqyN80W+^ zlg}UT8@v=D5uv+7gW$iDv}qUwZD~hWx)DWR`VmbGLm9?c#xb60OlJo3S-?VKSwS4@ z*}z72u#;UJ)oE&T@|PT;n=7xW|1S@QmlY;4SZX&o{pFgOJ8Skb;zCAS0Q` zK~8c}kU|uu6s0LcMJiF5n$)5;jc80Ht!PafI@5)&^rjDe8O#u37|j^QGKHy3V=nWU z&oY)1%Uag4o^5Pr2m3j|K~8XzcrI{}OWfoZ2|VNxk9o-}lK8+!KJk-Z{3cao5Tqsz znaM&JxyeIbicpkdl%*WuRG})>s6$=q(UfL1rycF-Kos5SK{Wjtz%Yh0g7HjXA~TrD zEEcke#l*3aRcvGvo7u%~_HdXZ9OX1;ILj5Ta*aFOv+g?bTh|q-4AoxGU_K`u*!5F*S!&v(}z=4i%q@$eZBquw|+0JpXOI+$2*Ba+$ zx46{=_n7EmkC^01PnqgPFL~Kp-u8}aB%;qw;c`R%Z zi(1CAh8tlOt6Imp)-%#5qitnt+t|s@cCnYe?c-pFIMgwYb(~Y3=5*&d-vusrg)5DB zy&K%_4tKiG{T?vcW2Sh_#oarnVy2!<@ zafxF}V%r?P)IuI>^C}a=dUu$GOgP zsmok$oa>BttJ~aeqI=zEl1EK8)zhBwvRAz79q)S2r#|z!?|knEzx%_VhBOU=w1%45 YEM_&2dCg~0i&@-o%URy4RQobj0Z+&{7=Xracr(Ty;>0WnBNzay)E9rtek=52n_v+kMSn`I%#l~7 zs#d0+)-YPofgTKC7-N{i9G0+#E$rhM=eWTG68N%Jlo4`U(d6=+kUTQJ0+|w?Do}$4 zG@%up=tUGUjAI(}SVkP%IKT-maf?U1+wxmspJlmZGRgB$h*AVm2M5h)M>qO0gi%al l7K>QLCU$XzGhE{yFZg6vdL;O>E1TSl0LpFcRY(3m`WtzVONIad diff --git a/fixture/18/6/19.0 b/fixture/18/6/19.0 deleted file mode 100644 index f235c049df231c25b8fec3983380fb6db12c2e88..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YM9NEN~Ss^RLkxe$;oipN`Bb#ip$tIgP=g20TIOm9SMmE`GlT9|+ zgy+=@Nf#cP6dDBoAGKyg5Y)1^jcjZaqikukF?KN4Zg#ha{T<*yM>x_^PIQu!o#kxj z81G^eTwVdS*x2n~wYd!1R%;vVRwQX!`oSp1!FMHd^!47e#V;t)^r#j8) z&U3yCOmwNsTKV^^&Z}PYy7#>A1E2fCmwxc0pZw`Be;XPV z1R2a|4s)8z0v5E8B`j$vD_GG=R=0*VZD2zi8DXSRwzIu4cCoA7>}x;!JIvvZaJ&v3RF8Sw6Q1{i7ro(4Z~4$iKK7NbedA}p_|-rD zHE0?lV& z#>q}`s&ky{JQG}EqN`owTDQ2>Z6>?N6w^H9VNZG5GhX(JSH0t1@A=ecKKGsP{or?h V_|uT+AV_bh+01SZ^O@fQ{sTu^@$LWs diff --git a/fixture/18/6/19.1 b/fixture/18/6/19.1 deleted file mode 100644 index 4dcde71687df5f6aaa9cede18abea0273df6238d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|1&l}m007XB#KG(`}k*rkOk@k7=gk zdD}~#tbEA!kRbT~m|;zVpt?1zZvz_|VGCQ@-VS!Ohdu4(KnFS4k&be-6P@H_XF1y# z7rEFau5z_;Zgi6g?sB(#JnRvVdd9P!^Qzap?mh4Oz-K=9h3|at2fzEnpQdOU1R8 zL6F)sW;Tmi&0}8kS;V3ivy5deXH}~iW?k!9-*B57VO!hT-fnichy5MkK!-cRk&btQ z6P@8qXSu+IE^>t{UFCW=xX~T%be9J`CI+#bC}-(7PPn}ENKNRTFDyLw3ZERWMf;}%1Arf$F9Ot^krN$cP8sklHvs>KbUiW#_V;=XM=e^){Z+O!OJ~Yu6zBI`Xe)N++ W{pD{{HV=YShMLh#W;U0(&Er4Xzw>ke diff --git a/fixture/18/6/19.2 b/fixture/18/6/19.2 deleted file mode 100644 index 71fdd559373d57bd4351301ed96f553b3213dc75..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmDcZWNZ-x znaE5ixyVgH3Q?F+l%@>fRH8C9sYPuX(U>Mg5lswnw4*)obfY`{=+6L#GlG#!U?P*4 z$t-5Gh{Y@+fz_;GGh5h7GP~H#K@M@4lbqr-X|rk{9N{RboZ&3zxWZMgahH4C=NZp=!FxXNkstiz z7a`F>kb#WkASa>ZrvL>hK}kwcfr^Avof_1n0S#$HBrS*{mbS#vg|5WYhraYC9jz3s}e^R* zgt{eOdu2L`WO{6w0Ssdt)0oFH*0F;_oZ+=?MJIX@ l#t0@bg9WT$6T3LV8Ln`LC%lnkg?@5U>9cI@=ZF43_ZzP&O>qDK diff --git a/fixture/18/6/2.0 b/fixture/18/6/2.0 deleted file mode 100644 index bfc324060db47ad6015557775fba4c42a83244a8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMoHNeOCeAss$tLs2I3vy(*<>EsWRr15Hrd2EN9K_g;s{yUWRp!c z+2nckLef+UO$iNx|5Lr3J_xRO)jQtxo=<(|bKm*i4}SNDKMlzc1ZfR5vsuh)9`l;d z2#XqNY0FsFDps|cb**Q8o7&76V{L1kUF>Q%``XX`4t1Er9p`u_INcd0INt>>bcHKj zWuhBRa=Sa+>3$D*&{U6m!gHSYg4ey_O&|EsN51rxul?j_zxdlf{xvLP5TrMQ+09{2 z3s}%XMp@hvmbZcxtzk`T8Er!w*}|5#vb`PbXb*eZ%XkMm$dQh6w3D3d6lXcxIWBgI zOI_ny*O}~Qx0vE?_jt&|9`U57JncmJ zobNK1yTbKuFww1UbGv)p=YEfR%v8^K)^lFa9DHLPy~qm8k-EsV3B?d@iFd)VIr#yi{*j&y<(on(SD Wo#jFox!6^%c8y7HGTEK(GR1$~;-EkP diff --git a/fixture/18/6/2.1 b/fixture/18/6/2.1 deleted file mode 100644 index 2448c942f8fa8aac45cc878f362f305318092f50..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007W`9OuY9vdJc!Y_f^Nk$J>9M>g4HoRM)xHgRU08HXb)n~;@FHrZs8 z?RoW5g;WaN5gG*lAN6Wj5M1-R_q^`|pZmg>e(=-v%}`(iXO~y&ddmjNOg3ul?-rFo!$B@lG(_>CQ00`7UswiLP*^ zNp5hX$?kBc`#s=6Pk7Q(Uhtxqyy-1(`^d*W@wIP!>leTJ&A$fWK@eiN>CIq-+09{o z3s}$+mb8==tz>0uS=%}`wuw!RvbAlDwzFM~v!}fr=pY9>%F&K-l9Qd{EN45%#V&EF zt6k$-H@n5HrnuWZ9`dk9Jnb3Jdf6*p^^SMF=To2g+;_hBgWvt(PgAE4f>6_#(M)DG zr@72+Aq!i?(w4ETRjg_?>srtHHnX{rwzZw@?P@n;>}?ez4bFAYW?^LHb-MP+l zzRO&0qU&65lH1&FvU}a@vmVSf*_sY zW;L4;<~5)BEoN~`Sl$X&w1zdUWkVa;*p{|3%8qt2+E{xSXMYDc&=HPwl<`h`>Y>H} diff --git a/fixture/18/6/2.2 b/fixture/18/6/2.2 deleted file mode 100644 index e20c304d34eadfe3f1874154186993e070901f71..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMT+W>lXJ(UmWSo&rHW_DRlT9|+WD|!Y^T<5noDt`YI6AUIRyNsW zlTFrB?}em`4&4(P1plXbBPsd!yN87$2-C4&Tyu&E^wjCUExaCyTOen zy4@s`-Dira9`%@~J>yv~d&R5X@virL>NB7F&i8)syFdJCNO%yWH`L5#F{`=FV_u6` z)M7?i)^b*{s@1G(J?q=d=C-h%?d@Q9d)U(f4s?(s9pz{zIoT=Bc8+t6bFoW|ceQIw zaI;(7=`MGBz=Iw#&EuZ%oaepZHLrWa`#$iYFMR1MKl;hf{_?kf%n%U-VP-U&*^M-x z`7K~^OIXtKRQ>Kots&aZy+yFsQPNN0!Q{3+X{{fPZpick* diff --git a/fixture/18/6/2.3 b/fixture/18/6/2.3 deleted file mode 100644 index 1c315b6268b4858880800e734cf0e1a5054847df..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk(F(|}g7{~G7|D62K|9=de*Cpwkx+F!C0m&fAAju%fAju#}l4Ou%kYtc#kYtcV zNiv8s$Y78G$soxfAuh?l_d@QiPw(?Q?^D0w^;kvTO|KO5Y)R>oF1TM)E|VyK&uv`& z7^|~n-t!c;?10`p^p5?vDh2EeMH%MA&GKypWlAb!DqWdcG@=!q=*1AmFoiiRVGY~Z z$1%=ujeES{(^YPw&ub0D!c%?n%LFrJ!aNnC4ArPdGuqLOevDuOGg!b1Hn59BoZuWc zct8|iu0p=#h-G5&>an3Q##EFR$(@dNgC#RV_riO70Q`1Z{%{0?Y zGjU?-dD{y~6BW8WGzk7b>b}fDu-^k7_k<_C;6*Qa(_7y5k&k`iYv1_RFMc)EKmIic z3xW)0G?U@xG{OQFw2&n%X(=mO$tY`D%V=Y4Xspd`VM{yM(Rh2<(*y@N&_RxLl!+!e z(Mis9ma|>tVwbqe)vhtcO>TClyWH&|4|~MZp7E?#yy`XYde8el^SLj4?*~8n-5>rm zBufyaHPkS(nAP0oF|Uypv8ZJ%YdNb})oRwUuJw$wu}y4k8{691E_SuIeeCNHhdRtL zj&+<)!CD4}9n&U;4_|e)6+l z{N-=|m@aD&q&I`v&0)Ct%x?jUS=!B01P3|9NltN^OI+p(x4FYzp74}syyYG5`NnsCkS<#g zq$dNRWFw3`l=|Td%=|f+JFqC18WgO#~ z#&l+o$b1%%%ra6)V-0KB%oet?n?3C12uC@_SGLe~Za*&e(6r>Q5l%OOPs7NJhQj6L&q!EpYp(U+oPX{{EjqdcIKLZ%Z2u3oB ziA-WLvzW~sl32uIQdz-D*0G)qY-2k+*vEbjaGVpITfXp> zZ~P_5?iyLhN+`L=O&$tUgrbz9G-aqv6{=E)xz39y#1~Y`w zjA1NOn94NfGLJ--u#{w0v6?hCvWd;?WEZ{pimy YhBJckOkg52n8_>_u#hBDSWYVc09VPSI{*Lx diff --git a/fixture/18/6/3.2 b/fixture/18/6/3.2 deleted file mode 100644 index 0350903bc6912e415c54643dbbec7dee2346d86f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMoU_l&JTlJ6CiBQRBlE~6&des8Y%5*=g20TY_iEF z^LSppkW`gIcZCMQ|4AOr7zD>m@{DIa=T)zH-Fx2mfzN&6OF#J0PyY0mzYWb41ZfO2 z+$?4_k9o~!5sMmW8OvJEs#deQb**Q#O>Jg#+uF|d#@fy9_OrhO9PS85I>Cufa;CG4 zbAbz8}V%D+tXh5c94S|;%LV>)+tVP zn(@wYu1j3%G80_uIybw;t?qV@dp+zCk9x|}p7F9*yy_kAde5gm^SSSQ?+3s8!=HwP z2SI8>&17c7&1G)$SjfT_v6Q7PV`ZyY)!Npvu8nPCQ(N1{wsx_rvG%dA{T%8rhda*k zPH?(2oasF0yTIkHaHZ?rV4~aH?hg04-vcIj++@#r-V0v$hBtlSLm&ClSHAX>pZ(%* z|CnM}mLN!LIb5M++FhLaY^FnG6>zE3xgzsB!eV_B!eV_E*T^lBpD9+y+@-f&9^&yJKS>5@C<+xHOup5vtW zJQ3&0-W16`7}*#;je@BrqE{Eabn`dHJCm(A#p0w|nNdJCQY~A@XcOAdg+4?vib>33 z5v$n54i0gOE8O7;Z$^cV_g&G%Vvko|*{mGdd|D+4qXrFVK?iy;fMJYd8uM7j8n&>D zBb?zH_jtjFQ9+6?2YCYx;HoDpZnnb~BMO;*SXS=q!nN1T~)MmBL~HrZqz znaA_$g`}w*x;r!o{!em!wjkKxM&sOJyov5}zsEdolIJ|{1(Ut*4e$HFhraNoul(pI zKl{^P{x&o`2+|p5RCSbY^IhsPm%G+=u6LW;jdPECP4tLIJ?0tDdd{m}GugY|^S;k~ z?hD`h!H<6Thd&L841%`ZlwyUIPIQJdo#g@-y2ur-bd?+3r9JCi95Hkxe$)#5qTtnN2p?WQ90FR_2jSoHOFg%p>#2CYx+B zpFiF=cxlsAPQN>S5d3$X4LO5gBb(U4PIi&VL6S)31Sh$`MJ{oJo80074|&8(Uh$ew zeC7+k`NLl_g#BT?> zF_=+|W(<>=!c^ukmwCjqgr%%z4QokY3tQR4UiNXABOK*4XE;j=SGYeQe<4QNOUTGEP+bRwGW z^dOG@3}7H57|AFmGKtB|Vm5PF#A4!E$tqT}iOnRii{0!Yi9;ObB&RsdB`%Y~EpC&_ zBOddF*Sz5^U--&5{t|?{Mi#P?jc{_2n?e+(2&E}Q1eK^v6t$>L9U9YwrnI3g?TDcZ WvGk%heHhFThBAh+jAJU(n9e^bg@E5wmaoHOE_Bb#ip$tIg@ z!t?5dq=^nq3=M++kGd%=2sXRLM0cBHvWHCZgeN`aMK5{TTi*7LkA31(-}u&de)XH* z4YCJ88bi!vX0sS!E^}MR!WJ>gQkFK_N>;X(wXI`gn;2s&TieD?cD9SX?PFhuIMiW| zb)4g!>NKZ2&-pHJnaf>ag6mxGHn+ROz3y|rsUGpDXFTgUuX@ev-t)c>eC`Wh`oWKW z@~6N2ZD@`lNN0Mp8D@6#n$P@3TFl~>wVdUxYBj4{&-ykn)@C+0&UUu9o89eUe+M|w z5sq|}@lJG-Go9sZ7rMyBu5^{F-QY$yxzk-HdccDwd(7jW@Vpni=nZdr%ZEPlv9EmX z8$bKSum16`LCzq^U`8{U({Lj!U_lF6!jeW=!HPy(!83tQUC4tBJYJ?&+0 z2RYawj&_V=o$M5+I>))rbBRk`<{HM2iq#>-yus&~BWJ)io_ z=f3m3AN=kQe;N`V1ZfR5i&@QPZu6MeA{I5$(w4ETm91h`>sZ%%#@N(Y+t}7PyV%uk V_O+k=9p-RHINk}yJKY)1^dDgNsx|-s diff --git a/fixture/18/6/4.3 b/fixture/18/6/4.3 deleted file mode 100644 index 37f68c26aed593255edcc9c65b2342ff63c975dc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk(F(|}g7{~G7|C|i|=lQobj0Z+&{7=Xsx}XLwy!g?r5{B|TeGj-(6jMAovG@b_$_ z#5>72D05gOb85?6;SNvuuoX^oqIz}F<8l7RCFA7jjbbRJR_Z7qQ&}TZ$7mB;(TQFR zVi@C?#vB&0iY@Hp80Wab0}}YMRhZ#(Te0MFpHCi{V7^Q-Pvxja16t6IZuBFDQA}b6 z^H{<K`j~)MF)B?fH=l5g;^|M1)JE#5zcUp ld%WP2UFrVF&n_={0AZA&3iW748@kYkA&l7CoACdC#5Z`8N1Ffu diff --git a/fixture/18/6/5.0 b/fixture/18/6/5.0 deleted file mode 100644 index 77680f8fee3c1fe6d593f2f1833f06fc8436f15c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YM9NEM^9&dD&au z_KuHz;#1%F)^~pOo8Jw>f*_qCW-+VT3^T9c7O|+sjI@kV##qV9*0Q#BjJ2^%Y;7Ck z>}(gi+S@+%b%;Y9<`~C1&M8iHnsc4!e3!b+<*swR8%%PW+uh?{_j$yl9`lT6J?B-g zdEI;7_kquS;Y&aG(NF&Lm%k0o8w455Xby9l%lsCwpb?g^q|uhMyj87cb?aH*1~#*~ zEo^6dI~Z?wd)VIr4s^I99O-x`IMM0OaHb1fXo4$T=_)t6$<6L?r^)X3fTgf=p&Mi@D8Xn1w8C5ldOxNGn*;7;9M5S~j$i zv9`38t?g(hJKNJ<_I8ki9pWfQJH|;)c8ar|<6IZJ#HFrrt?Nv5t4XG~+dUrgutz-Q zY0r4sD_-@EcfIFRpZVN(zW0OQ{ozkT@&!S9L(OJ(a~N(u^IOc~Mi^yTqpfTet6Imp W*0YICZDyQpZD&`z8E;?v+24O~{jZt; diff --git a/fixture/18/6/5.1 b/fixture/18/6/5.1 deleted file mode 100644 index 8ff39ce121a27371025cf00a150775cd81982704..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|1&l}m007YM+==NpC#RY2o;b(kF%HKx(@YIh!_+k6WnNwFw=6?^?HSK{#j9TPuJ^p} zGoSmy_kQrBKm6%0lV=TrP{T}TdNUYdc5|5D0v5D{B`swoD_h0d)-lpXHnxc|wz9Py z?PO8*(pwSj&q&o5|_HnHLi7?Tioh4_qpE#9`m>-JnscBdc&LE z@}ZA>>?>dU#?OB7t4aRxuPMWWAeE`jWM;FN%iQL%uth9t8OvJEYF4*~QP#7*(KfTW zv9`6HUF~Lf``OBuqQp`X)k%% zE8g~wcYWehpZV5zzW1Bo{b90fK@ehc)0ozDhMUa@^P1267PGh|tYAeeS<_n9wt)?8 zWD8pwV|zQ;(H{1+mjfN-U`IO2(N1)dlbz*k=eWqlE^(EsU1Nfq-Qphiy3a(9dd#z) z^Ssx*?hWtzz=yu@rLX+xCqMhk-zFIr5dR WQ8uxu(YCgYv39nLUF~CE`}q$=BC#L< diff --git a/fixture/18/6/5.2 b/fixture/18/6/5.2 deleted file mode 100644 index 7bc617aa1c2d3d64f622a28d58deb64416edcd66..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmDb&tpa007V*J8kluj>%&jj%lVDC&$T&6Vpr$Q^VB6iOFM{iF1r|Vw`hKGtD&9 zOf&ub@jk*!n|_`5ByxZxk~vNaXF11tu5q0k z+~)xgdBICw@sUq_<`=*DL;BDl$UsJNkds^#pdf`PNhwMbK}9N2gPPQ$A&qEE6s>4Y zG@a;7FM1QhAOVCB1bt!GN(Di zS*~!EYux1?_j$&1Uhtj|eB=i|`9(-r5TqkL*~m@~@{ykc6sH6wDNh9=s77^aP@e`g zqy;UBq5~a?rUyOg#Q+8}h>?t9G!vP`WM(pp*~GGl#jIjAacp8UTZm^j2_$ilL!@wm zRL*mOi`?KQw|K}S9`lOVyx}un_{tyt66AG~Ok^e(xe28Zg(*U5$`DQ^DicX9YEy^C YG@&W2X+v8&(}k|Y(1*SZVJO4+2RhHNr~m)} diff --git a/fixture/18/6/5.3 b/fixture/18/6/5.3 deleted file mode 100644 index 1c9091534bdc668ae56dbe50ad454ec20afc20de..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk&KPbd;7{~F?@8?SLJEi;Ged&_UpDr1Y3_=D;21y1<21y1YjS_eV*t0=c%u4mHRe*lIP8mTuIlg@vNBuX)+d4 z@)a)+%WM?M>^d?>IKvh0@r(q192I7`F}>TI4|wk;BhL*fK{J}tt8kSoQ&TI`$kkSK zq8Ec0!8oQchb63G8~ZrH1#a<(SA01tNDF={=1uhn<(DxzG7+{)QH6RmqaEGo$1p}Q zfoaTR8SB`=Ax?3LJ3JwdZ%2{zP}H()nQZDXicyAYG@u0?=)nMD7{erHv4~Y{VGqYR k#|<9vf)D0c=5J0WbrAU|K_%+Ygf?`c&(Xn<`Tu(97fwt`h5!Hn diff --git a/fixture/18/6/6.0 b/fixture/18/6/6.0 deleted file mode 100644 index 9d73283b62f586e2bea204f090f82805a8cd17e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmDcZJ2UghJhI6qn>a#N$jT<0jB{k15$BA|BhERpi8HgwCeF+z zo9yS0_Yq#&G?5|6Awlq83d8dS!3ah&fr(6FCbNiU0Sk#GjuphSo(*gyk?kb0mwoIf zg<~A&9Ot>fb#8EzR37k<7rf*ZANj;*e({?>q|YA&8OTTua*~U13Q&*|l%y0Ds7NJh zP?K6Tq!Ep2MQhp+MQ6Iui{A8M5Q7=QXvQ#>DNJP=G0b5ui&?@_RW+7{w_; zIm%OkYE-8N^=UvuTF{bKbfgne^q?ob7{EXVF_KY?W)hQ`LNv39A(llfCZ3h7Vk4VK zAc-9$v!4STTHFf~j~GtJ~N%{V!E45ubfot$QxadMhz zCZ4ywR3R0^62pSv|6>k~2!de_cbwy$;B;p=)A=rNq4BP8r5oJnCU=4k{1&jFk(RKe6|86_Ygp50 z8`#iB#@fP`cCe$J>|sxPIlzGqa-^di?Ib5V#W-g>$HgvjsjFS%S`*yjR(HF{y(W3Y zqn`1s=e+7QuY1q?KJb~(ec^jQ_|YH!^p~NzgCNY*W-_x`j4+qEEo>2sTG}#}HOeYh zwYGJvYm7~7YHQor*3NdZtG(@GUk5wHp^kB^%+rxhLcYwnk;YcSq(Mis9mT@k0k&9jFDp$M7%_g|h zUGDafhfVU7r#<6kuXxov-u0eOeCjjb`p)-$^SeI`$r}Wz3^jup&14R98eu^TS=f@6 Xvb2?~Y?RT~vbK$EY>X{!Wo!Qdi~hBg diff --git a/fixture/18/6/6.2 b/fixture/18/6/6.2 deleted file mode 100644 index f89d3012b94e1ddcdd395c7ca26bcbbf828017a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMoRLk&IkJg!M&=P`W|MJ_I6_v)$~YsNj5D&yCiCc=nR#TBO`J2b z$tIgTuU<&HDxpcCLGXWyqY4DUXycvaWT!aWIVQN+B`$T1YhCA7x4GS9_qf+19`%@K zJnK2Hdd=&mde1bU`poCP^SvMZ?hk(&5*7sM4KI+hw}tKPV2p8gv%7umXMcw}%;Ao8oa3G5bZ0ov`7Us|D_rRYH@eA0ce=~{9`K+k zp75j>yyzuwddu5B@S%@<=__CR$(e(ZD2!N*wR+U*wI-30r5Mv8~^|S diff --git a/fixture/18/6/6.3 b/fixture/18/6/6.3 deleted file mode 100644 index ba2783f86913c273a9e1a96cae53587b5046a4ac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk&F(}1R6vy%3|9O)7KN7kBzki13kwHR|L`gD8GDtGWV35HeNzxl68T67tl0lL| z5+%tXWRPT#L`gD8GN9)TlK4K6ck9zV=iJUYb$wQ;f5R`uy;xF~q$}=d+OQ$~J!1)R zBp!P*v-vWMw#+KFu!m!u;|33S#fPm>vJ=spkG^#0Z=~cp*@{sNCDe)?xiXbCGIg{z zp$%Q=#Q?&Xz%=HujCJhb5U04r9i9-wm#w^1*KZBRmuI>P$mBXQg*=s^8VzVc2fERR zA&g=YGg!b1HnEE%oZ%Yxc*eV}U{a1}Sw5Knxj_&`s6Z_m(TYy=pdTX`#}wwUgf(pA j04KP>EgliYC%aO-@}FH9dW;Ss+vdJdS%qE*` z^1ON>X)1>93=M++Q*56v2s+r&?)I>!103ieM>@*UPI9tSob4Rvy4WQqxXRV8G0Dwt zakqQiYqCc?>S@n-)+=81nyKFPo=<(|bKm*i4}SNDKMlzr1ZfR5+{|V%(p=^?%ECrl z%F>pxvQ?~V9qU@p7@OGCSX zEodR5EMZAYSr%io3;3W9WonZ>L| VnA<$&HQFK;wTxvgXH~0N-GAWmx_|%x diff --git a/fixture/18/6/7.1 b/fixture/18/6/7.1 deleted file mode 100644 index 640599b13c1401482aad273b4e3f7316def466b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007XBv*Vm2;~besHrZr_tdNy?WRp!c*~B>`&N(uVIOk3_*<_PVHrZs8 z?RoXmrl}N?8WIHmAF)eB5OlShee7#LhdRvRj&r;dobC)~8sh?EjdQv2u63R3O>(Qr z?sB(#JnRvVdd9P!^Qzap?mh4Oz~{d3r62t0Cx80O-=;4R1R2a|s5uNXpZSfjsKqR9 zS<6}8YF4*~^=)87o7=*cwzq>Fjkbq99pFF*Inq&%c9N5w;%w(Q*F`RNi3zTBm5FY2 zlPRXU-F@!&fX6-INiTTOOWyRBw|(SepZMB0zV(Y={pMeT$RJ2#X0w>paC4d4NDErX zC`($(idM3+wXAI&8`;<||%V*vsDbaj-)i>KMm5&Z$mwy7QcGj7wc+oU2{q zS~t7JBzL&eT^{t1hdt$K&v@A@UiFT5z2{S(`P_HD_k-X4;ZM^Q41y5To6YQon#a86 zv#>=hYH7<@)+$!Dnsu#beVf|M=C-w+?d@iFqwQyZ2RPgjj&y<(o#aesIontly2yA} znBaOhm}s)wOmUBU-RDt{dE9fJ_k!2G;Y}a-&_}-Xm9PEeXTSK{KmIjip&-a)X2Z;B WxDggG(&Cmd%JNpQqBX2(E&l<==)491 diff --git a/fixture/18/6/7.2 b/fixture/18/6/7.2 deleted file mode 100644 index cadf1afc3f2a585c166eae0b179d27e09a77ebd9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMTsCoLoO8sP8E0gZ6|zEB#yPUdCiBQRN1PewjBGN_$fk2wsCd!ZocVT}Fk?*NB8!jVpJqLZBAOyf;-z6(rpxyi0`y&K%_ z4pZFk0S}tyaZh;O3tsewH@)RUANklAXr+t|s@b}`mo_I8ki9pY%mIMz6)IMoDaJI6&Xc8M!psd!yN87$2-AkPIrcLooAv;U1pMN zTVQg)e>OM?d-5U;g%w86tupqnXTUF2gKn zA;T?h38O4;1uI(J8rHOd4Q*r#TiVJFcC?c*_B7T34s?(s9pz{zIoUYlon?XxT<9W` zUExYMxY13fxYJ!8^pJ-=;Ym+<(Mw+Tmbbm*W1slcH@@|qU;XBHgCaqY&JeSh)okW5 XulbCyh>@1Ev}KI8vQ?~Y9qalJg6g`D diff --git a/fixture/18/6/7.3 b/fixture/18/6/7.3 deleted file mode 100644 index 978b04b67ca6472d28ecc02b32244445d047bfde..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk&KPbd;7{~F?@B9q%I|hHguKS(qoVp}Mk^#vegFyy^43Z3z47y~HWRPT#WRPUI zBmlf=@>UsR7%HCYEOg&|Gq7%Iszz8NV lg9R*O13NgxDXwsbCw#CcCHS+)OKuQCF)AGG)dv4R`x^q?NoxQA diff --git a/fixture/18/6/8.0 b/fixture/18/6/8.0 deleted file mode 100644 index 1e18577531174f52cd53f6089a010c507b89f22f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YMTs9eJWS%W#g{*8ck2vRub4Hw*O*YwN6KBRbN1QXV$v7jMY_iEF zo2=*63rSNYG$k|${y%E{B06P)f0XS%?J zE;8Pgt}?-mCYtPaQ{3l%4|v=Yrh48BUi5}Hz2!q6`Pf&!_KlzY;#dFp*C0FyGMLd! z<}{aK7POFsEoEuTSjoy(v6i*1V`H1x)Ck+y)@VD~*&g<^mjfK=AV)gN(N1!*Q=IJ_ z=eoqDE_01*U1yS;-QrGnx!Xe?_K2rE?HMn7#jD=&uJ?TEGoSm;_kQrZKm2J((I7}` zs9DTvHuIR*e1==pVwSa><*jNpt6SH4*0-6>ZDBjx8)+B28e?z!80TPzIMgwYb(~Y3 z=5*&d-vusrh4HR;g9&bRo5}8RulqdeF^_xJbDsB_*S+C=ANbH0zVwwJ{p4qV`P)CH zD;5Ol&0uzOnA7|gu%IO@X(=mM(Mr~^rnPKnBOBYw)=U2*#<#xntKa-? WP&^3I7-A+fo5e75o5#WyG2DM*c)#-i diff --git a/fixture/18/6/8.1 b/fixture/18/6/8.1 deleted file mode 100644 index 24d720bebfa0ebf9dd4127e090685db122912b49..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWmCcZaI@+?m;AlT9{p&XG+v z@%-_=!3#-OF*GSO2>v@k{qP`YKtr0-f>_$qfsXW`C%qWRAO;i1C`L1h$xLB3bBJdV zi&;V3CcC&}W9N{QuILkS%a*gZU<30~a;W??i>@E+Pm*NKaO> zk)6EcBR}Cp5J@RYQ-(@ZrV6#FO&wxrOcPqsnl^N%3tj0$U-~hWVGL(1;~390rZa;C z<}sgTEN2DlSkDHwvxA-NV?PHt&IwL(fs0(?CbziFBOddFG+y$Gk9^`Yzxd4`GDHSJ zMlumbPI6I@LKG&NVicz=F>jcj5wyGSCLgB;>8r#Q_SE^~#e+~F?wc*-+Uc+DH$@`bN_<1ay! zYh)%1S;@KXh~^(z7QcG{ diff --git a/fixture/18/6/8.2 b/fixture/18/6/8.2 deleted file mode 100644 index c62a624d5b2a18b5ad2b8abc74319251b97c3f41..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|1&l}m007XBb7JBg({Z7uhN)p{oE+!GG}BBoc}z1-PMnxHG0ij+C#IQZnrWt) zIPtvgrA}2oWJgF4{GV)0p&+Pl17mG&ob7CH2fN$Do(^!JgB~v3O>&c)-QsTdxYr{dHQ6(s^_*9|=5_CR-v>VTg)jZ!M?d+~U#1um5d>)sHH%pd zGq-unYn0Izwv?qUVZi`#Qv-4l~}dj&rKhobFub zIp3u&bGd6>=X$rf-5u`tfCoM92~T>#i(c}kx4i8mAN$1DzVWSJ{OULV_}3sZ2-2Iu zjE0-t9Ok!x1ubeZi(A%mmba?atZrTF8DmqM8EYHc+RmNB7F&i8)syFdJC>Zl+{ zV~Cl|Y!-8x%iKm7X_O@_X(=mM(Mr~^rnPKfLmL@q3tQU3j&`!Az3lBE2Rp>kjxpZJ zPI0QUo#R{+UE)%cT;p1|xYcd$b)Wl9_L#>#=Xo!9-5cKYfe(G;OJDigPk#1`DgO44 Xq0vE*&h&OyQVcRkGDtE=GDtE=x`QNxB!eV_B!eV_ zgh7%4$#9bl5(Y^INs6u;CGmYR+*6;PbI#LqPLIc`@NRk~*NY|PO1k2VXEyjr|Dz=- z?{M=%nVw>q0b6DmDvt8_MAEO=Y5QjR( zv5s@9)12-+=exk=t}w>+ZZOvE?l8{%9`K+iJZZcMUhtyVz2Qxhz3&5``@)xg@S~sn z=`Vj9QalLKn$E0dGrM`sXMT%X%;J`{oaL=*HLDw8J?q=d=C-h%?d@PUyW7Kl_IH58 z9pOkPIMGSYbe6MS-Jm+~6yy`Wtd)Ip=`_yMX z_nq(k;CFxc)6}6skj4-*o5if=HjjBNY!QnZW*N&`*(z4Gj&+T&iA`;08{68>E_St> z(e|~ULmlRD$2r~!PIrbgUEo3&8RJS<8S6$j8Rt%SdC)^1Hr`X7_M(?e^rp8=@_`S1 Y~0y3mzw^r0`Y3}y&J z8N*oOnZi`2F_(GFXBo>`!Fo2Zk?rhYCkHr4BF8zwNiJ}aOWfoZw@Ky^k9oyw-td_( zr0|x$H&T@sTT;neHxX)9b@tk+O z=L6sP&JR+B2SIAmkcrG>At$-WO+gA#nBtV6Bo(PdBsHi>EgI5@#5k?;Jl8+)3C7jZfp)65Up(=H#OFf#>jOMhZ9qs8x zcVdX8A8`z27{iHY9OIeBbY?K01uSF*D_O-xHj%(icCnj84w1x3PH~z`T;>Y5xx-x^ Z^Mt3o;Vth-;Va+xOAz52p=2Zz{{VjL#q9t9 diff --git a/fixture/18/6/9.2 b/fixture/18/6/9.2 deleted file mode 100644 index 00d04c97a59dddd29d066db43734aafc6a8f2b26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmWl|2Z%@k007YM9NA=rtZXumY%-6GGcu3NBb#h8kIWnA<$&wXj7jY8lH~&M>Q5&AQgJzTq~rxovG{dn4^;ccbiQe}_5T5sr6) z6ODF;GmUY93ypKRD_rM#H<;jdcbMcp_j}Cao-oC8p7)y9z2SWy_|O-=^pzj|(?d1RmI>?cZ za=b7?+c_?Bu}fU(D&yVgCO5m&T_$?KgC6pvr%d*O7ro?7Z+Y8CKK6;PedAld z_|;VZ_}8FR5M(f;napV}b6e0t7Pge7En{V?7-ntjSl1>tHQWf>*w!v~HPSxzHOe6l zb(muv=Xj?%-Du}I-xypHi(%>?(j*Cda4)MK9UtSMgcs@J^hJ@5O>=f3d0 WAN=TdfB4go(m{~cP_vlTZ2kjZg2Z+J diff --git a/fixture/18/6/9.3 b/fixture/18/6/9.3 deleted file mode 100644 index 8f088400e1823b8ba4386ef5c21af8fada5d86cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 496 zcmYk&F(`y#9LMqBdrk)Lk-|ea=-hiQ8PH`AGDtE=GDtE=GDtF9(hZUfk_?gzk_?g* zNd^gnB!eVHk|fC>bR4H6;RvM)IT@?v+ zty}wL8XcKdTc#7e7{D;bF^zdFV*@)l#3`kbE-!9GMVT z#VAJ&8qk6c^dO2MjA06MSi(BCaexzC;to&vuobWh{FddBN#&e{JQN~=D%7C~ZRkQD j1~GyO%wPd4*u*Z5aE5C<;1yqFC5Iz!)?Rw}|6_ju5V}fF diff --git a/fixture/19/.zattrs b/fixture/19/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/19/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/19/0/.zarray b/fixture/19/0/.zarray deleted file mode 100644 index f439a51fec..0000000000 --- a/fixture/19/0/.zarray +++ /dev/null @@ -1,16 +0,0 @@ -{ - "chunks": [ - 100, - 30 - ], - "compressor": null, - "dtype": "3)1u02I zYSNIFbfhN(8OcOuvXGT*WG4qX$whARke7VqrvL>hL}7|hlwuU81SKg&Y06NRa+Ie6 z6{$pJs!)|`RHp_tsYPw-P?vhtrvVLV#4(O@f|H!$G-o)=InHx|i(KL|SGdYGu5*K% z+~PKOxXV56^MHpu;xSKn$}^txf|tDFHE(#!JKpnwk9^`YU--&5zVm~h{Ngu%_{+bK zEdT)tL|}ptlwbrW1R)7UXu=SdaD*oU5s5@(q7ap6L?;F@iA8MU5SMtwCjkjbL}HSV zlw>3)1u03zIL0%9iA-WLQ<%y$rZa|!^2*vmflbAW>!;xI=z$}x^}f|H!$G-o)=InHx|i(KL| zSGdYGu5*K%+~PKOxXV56^MHpu;xSKn$}^txf|tDFHE(#!JKpnwk5mt+Cu&lQ+SH*g z^{7t+8q$cyG@&WYXif`S(u&r!p)KubPX{{EiOzJPE8XZ$4|>vz-t^%=`qGd73}7IG z7|alcGK}GjU?ig$%^1cqj`2)jB9oZR6s9tb>C9jzvzW~s<}#1@EMOsvSj-ZZvW(@d zU?rlYxw6 zA~RXYN;a~SgPi0dH+jfQKJrt5f)t`KMJP%!ic^A;l%h0cC`&oYQ-O+9qB2#eN;Rre zgPPQ$Hg%{=J?hhdhBTrvO=wCpn$v=ow4ya_XiGcV(}9k3qBC9SN;kUGgP!!FH+}ez zzVxF%0~p9Np7Vm2yy7))c*{H9^MQ|i;xk|P$~V6AgP;83H-GrczXL)50uqS81R*HF z2u=t>5{l4-AuQntPXreG#AU83dBtnq@RoPH=K~-4#Am+nm2Z6K2S546Z~pL?e}~Zk z1SAlF2|`eU5u6Z&Bov_uLs-HQo(M!F65Z)RPkPatKKw^t`q7^O3}g_48NyJ8F`N;M zWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf!&=s{ zo(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJBE2Yr62tnz(58u zm>~>h7{eLCNJcT5F^pv#HMQFkh zmT-h80uhNsWTFt2XhbIlF^NTN;t-d3#3um>Nkn3jkd$O3Cj}`YE-8N zHK|2y>QI+@)TaRrX+&e1(3EC0rv)u(#W~J%fs0(?GFQ0DHLi1mo800yceu+v?(=|$ zJmN7=c*--L^MaSW;x%u0%RAolfscIRGhg`1H@@?OpZwxCfB4J43xog!BoKiKLQsMc zoDhU06rl-2Si%vW2t*_jk%>Z7q7j`K#3UB6i9=lC5uXGkBoT>8LQ;~EoD`%a6{$%> zTGEl83}hq|)0oZ-W-^P}%waC`n9l+hvWUejVJXX4&I(qtiq))PE$dj%1~#&Z&1_*S z+t|(ycCw4z>|rna*v|nDa)`qm;V8#A&IwL(iqo9oEay1S1uk-l%Ut0q*SO9NZgPv; z+~F?wxX%L~@`%Sg;VI8}&I?}hir2j1E$?{G2R`zN&wSx4-}ufCeo{Y#o@huT8qEMhTBSjsY%vx1eZVl``6%R1Jx zfsJfpGh5ioHny{ao$O*ad)Ui9_H%%PBoC=4Qj&_)q#-ToNKXbbl8MY@AuHL)P7ZRC zi`?WPFZsw%0SZ!x!W5w>#VAe*N>Yl_l%Xu;C{G0{Qi;k`p(@p=P7P{Oi`vwoF7>ES z0~*qZ#x$WR&1g;wTGEQvw4p8SXio<^(uvM=p)1|!P7iw0i{A9%Kl;*-{tRFsgBZ*Z zhBA!dj9?`1c+Uqu@`=xU;Va+x&JTX_i{Jd=FaIum0}zlv1SSYU2}W>25Ry=YCJbQ- zM|dI-kw`=)3Q>thbYc*bSi~j{afwHK5|EHYBqj+-Nk(!~kdjoSCJkvxM|v`lkxXPJ z3t7oVc5;xDT;wJXdC5n93Q&+j6s8D8DMoQhP?A!VrVM2%M|mnxkxFc1J3H9PE_Snr zz3gK@2RO(f4s(Q~9OF1AILRqabB42=<2)C*$R#dwg{xfSIybnC9jzvzW~s<}#1@EMOsvSj-ZZvW(@dU?r)hZbx46w6 z?sAX&Jm4Y4!{~{Ul%h0cC`&oYQ-O+9qB2#eN;RregPPQ$Hg%{=J?hhdhBTrvO=wCp zn$v=ow4ya_XiGcV(}9k3qBC9SN;kUGgP!!FH+}ezzVxF%0~p941~Y`A3}ZMW7|AF` zGlsE@V>}a>$Rs8+g{e$qIy0EbEM_x@xy)le3s}e^7PEwdLkl`h)fis z5{>A@ASSVhO&sD9kN6}YA&E##5|WaPqc$9*2~kVib`2~T;(b6)V0 zSG?v8Z+XXiKJbxGeC7*Z`NnsC@RMKs<_~}Q_b42IfCM5iK?q7Pf)j#}gd#Ly2unD^ z6M=|CA~I2kN;IMqgP6o3HgSkcJmQmpgd`#{Nk~dEl9Pgzq#`wGNJ~1>lYxw6A~RXY zN;a~SgPi1I9`jkiLKd-@B`jqb%UQunR$y!A)*)n>*a)9`|{`Lmu&% zCp_gD&w0U1Uh$eYyyYG5`M^g$@tH4tsX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A7 z7PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#E!+-RpAN?7?Kn5|GAq-_0!x_OyMlqT( zjAb0-nZQK8@tq(1)lSKm;ZTK?z21LJ*QrgeDAO2}gJ$5RphkCJIrB zMs#8jlUT$i4snS`d=ik5L?k8&Nl8X>Qjn5Vq$Uk%Nk@7zkdaJeCJR}~Ms{+LlU(E` z4|&N)ehN^KLKLP5MJYycN>Gwgl%@=2DMxuKP?1VhrV3T5Ms;dXlUnR!KL{7OcbILjp)Q6Cb5W39O4p>_#_}9i5SLkMlh05jAjgD8OL}gFp)`2W(rf8#&l*d zlUdAW4s)5ud={{fMJ#3sOIgNpR)oEPH>V_oaPK?ImdY}aFI(~<_cH2#&vFRlUv;84tKf7eID?TM?B^U zPkF|3UhtCgk@ZAHDp8p#RHYi#sX=yOIp#IHngQ3 z?dd>AI?BUp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1 znZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY$SRVJrR>w#3l}L ziAQ`AkdQ@0t zrU*qTMsZ3|l2VkW3}q=tc`8tmN>ru_RjEdGYEY9})TRz~sYiVp(2zznrU^}HMsr%w zl2){)4Q**hdpgjOPIRUVUFk-5deDKlsTne)EUF{2TfPARvJVOb~(+jNpVIB%ugR7{U^c@I)XYk%&wbq7seh z#2_ZIh)o>g5|8*KAR&oJOcIikjO3&sC8AZhTiM2TcCeFO>}C&p*~fkkaF9bB<_JeQ z#&J$?l2e@K3}-pVc`k5~OI+p(SGmS@Zg7)Z+~y8>it7{LiaNJ8;{o61rH diff --git a/fixture/19/0/0.1 b/fixture/19/0/0.1 deleted file mode 100644 index 00f179961259f595eb60888bf6483303065f5c5f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12000 zcmWO6!=e}p0z|<(!HaF%wr$(CZQHhO+qP}nwv(wv{Xw5@A1xrD10Cr^XS&dpZgi&y zJ?TYn`p}nt^k)DA8N^_QFqB~oX9Ob|#c0MbmT`<{0u!0UWTr5cX-sDZGnvI~<}jCe z%x3`$S;S(Nu#{yiX9X)+#cI~DmUXOW0~^`IX11`EZER-;JK4o<_OO?I?B@UnImBU( zaFk;l=L9D?#c9rPmUEov0vEYN_UHitImk&aa+8O=lxi$tXrMhOvxeJQJA6BqlS3sZ3)! zGnmON0%8ON{Et8cCI~_Kk6;8R1R)7UXu=SdaD*oU5s5@(q7ap6L?;F@iA8MU5SMtw zCjkjbL}HSVlw>3)1u02IYSNIFbfhN(8OcOuvXGT*WG4qX$whARke7VqrvL>hL}7|h zlwuU81SKg&Y06NRa+Ie66{$pJs!)|`RHp_tsYPw-P?vhtrvVLV#Bok=l2e@K3}-pV zc`k5~OI+p(SGmS@Zg7)Z+~y8R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJ zh$~2}kgPF`?HglNE zJm#~2g)Cw*OIXS>ma~GDtYS55Sj#%rvw@9lVl!LV$~LyMgPrUmer!FFkVGUV2}wyt za#E0zRHP;iX-P+VGLVr>WF`w)$wqc^kds{GCJ%YZM}7)WkU|uu2t_GIaY|5Y(34*DrVoATM}Gz|kU>1>1uuEUYu@mdcf98VANj;*zVMZAeCG#0`NePk z@RxrC#PQz*A}~P+%6|kSI3Wm0C_)p4u!JK#5r{}6A`^wEL?b#eh)FDB6Nk9OBR&a8 zNFoxGgrp=RIVngTwNFfSSgrXFq zI3*}aDb}-rjcj5wTiD7rwzGqs>|!^2*vmflbAW>!;xI=z$}x^}f|H!$G-o)=InHx| zi(KL|SGdYGu5*K%+~PKOxXV56^MHpu;xSKn$}^txf|tDFHE(#!JKpnwk9^`YU--&5 zzVm~h{Ngu%_{%>6;@TU52uu)y@*lwnP6$F0iqM21Ea3=G1R@fN?)0E1z35FJ`qGd7 z3}7IG7|alcGK}GjU?ig$%^1cqj`2)jB9oZR6s9tb>C9jzvzW~s<}#1@EMOsvSj-ZZ zvW(@dU?r)hZbx5yt)PZXpOg(*T&icy>rl%y1;DMMMxQJxA^q!N{> zLRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb0 z7rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K z1uSF{!Q<F-b^DGLn;m zl%ygxX-G>t(vyLVWFj+J$VxV{lY^Y(A~$)+OFr^bfPxgFFhwXzF^W@yl9Zw}WhhHI z%2R=gRH8Cfs7f`eQ-hk+qBeD?OFin-fQB@pF->SnGn&(ambBtL7r4kJE^~#eT;n=7 zxXCSUbBDX!<30~~$Ri%}gr_{?IWKt0D_--4x4h#$ANa^8KJ$gIeB(Pm_{lGR^M}9u zBOrnQCJ=!MLQwuA7{LiaNJ0^sFoY!>;fX**A`zJ=L?s&0i9t+a5t}%~B_8ofKtd9c zm?R`68OcdON>Y)UG^8aR>B&GwGBKSQ%w!g`nZsP>F`or2WD$#5!cvy8oE5BO6{}gp zTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZARz0Xi77h(}I??qBU)3OFP=rfsS;dGhOIPH@eeEMhTBSjsY% zvx1eZVl``6%R1JxfsJfpGh5ioHny{ao$O*ad)Ui9_H%%P93pulJ&}@Bq$Uk%Nk@7z zkdaJeCJR}~Ms{+LlU(E`4|&N)ehN^KLKLP5MJYycN>Gwgl%@=2DMxuKP?1VhrV3T5 zMs;dXlUmfK4t1$VeHze^Ml_}gO=(7RTF{bKw5APhX-9iH(2-7brVCx^Mt6G9lV0?u z4}IxJe+Dp+K@4UHLm9?!Mlh05yypWS`NU_w@Re_T=LbLe#c%%bmwyB#_TK~|FhK~) ze*_~qAqYt*LKB9tgd;o=h)5(N6NRWmBRVmNNi1R$hq%NeJ_$%jA`+8?q$DFbDM(2w zQj>hfil%qTq zs7NKYvxA-NVmEu(%RcsVfP)<3Fh@AbF^+SBlbqr-XE@6_&U1l_T;eiUxXLxIbAy}Q z;x>1<%RTP%fQLNdF;95PGoJH;m%QRNZ+Oc)-t&QveBv`-_{ulF^MjxK;x~Wz%Rd5= z*c*WeOb~+dAHfJt2tpEy(1al@;RsIzA`*$nL?J5Ch)xV*5{v!}U?77S%n*h$jNy!6 zB%>J37{)S=@l0SMlbFmDrZSD`%wQ(7n9UsKGLQKzU?GcG%o3KejODCgC97D?8rHIo z^=x1xo7l`2wz7@w>|iIm*v%gHvXA{7;2?)M%n^=qjN_c(B&Rsd8P0N!^IYH}m$=Lo zu5yj*+~6j+xXm5za*z8w;31DFo>Whiq!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0 z>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_4 z8NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mG zWEJ6)>4}I$A~I2kN;IMqgP6o3HgSkcJmQmpgd`#{Nk~dEl9Pgzq#`wGNJ~1>lYxw6 zA~RXYN;a~SgPi0dH+jfQKJrt5f)t`KMJP%!ic^A;l%h0cC`&oYQ-O+9qB2#eN;Rre zgPPQ$Hg%{=J?hhdhBTrvO=wCpn$v=ow4ya_XiGcV(}9k3;yO3D$t`Ykhr8V4J`Z@v zBOddFr#$01FL=o-Uh{^xyyHC|_{b+d^M$W`<2yh2$uEBMhrj$IAi4i05P=CoQ2rwr z!3jY~LJ^uUge4r|i9kdm5t%4NB^uF*K}=#1n>fTJ9`Q*)LK2afBqSvn$w@&H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67Q6^rAO?=u1EPGk}2%VlYD($}omAf{~13 zG-DXcIL0%9iA-WLQ<%y$rZa|!^2*vmflbAW>!;xI=z$}x^}f|HygeM&u%kxXPJ3t7oVc5;xD zT;wJXdC5n93Q&+j6s8D8DMoQhP?A!VrVM2%M|mnxkxEpi3RS5_b!t$PTGXZvb*V>v z8qknNG^PnnX-0Ee(2`cPrVVXrM|(QZkxq1`3tj0(cY4s1Ui799ed$Mk1~8C83}y&J z8OCr%Fp^P>W(;E)$9N_%kx6{#2S546Z~pL?e*~n`6M+a!5Q6d_!3a(WLK2G5gdr^9 z2u}ne5{bw}Au7>`P7Goai`c{=F7b#@0uqvl#3Ugp$w*ELQj&_)q#-ToNKXbbl8MY@ zAuHL)P7ZRCi`?WPFZsw%0SZ!x!W5w>#VAe*N>Yl_l%Xu;C{G0{Qi;k`p(@p=P7P{O zi~StnAcr{25sq?<> z6Q1&n=e*!0uXxQH-tvz3eBdLW_{YE-8NHK|2y>QI+@)TaRrX+&e1(3EC0 zrv)u(MQhs7mUgtK10Cr^XS&dpZgi&yJ?TYn`p}nt^k)DA8N^_QFqB~oX9Ob|#c0Mb zmT`<{0u!0UWTr5cX-sDZGnvI~<}jCe%x3`$S;S(Nu#{yiX9X)+#cI~DmUXOW0~^^y z^t5^+Cb5W39O4p>_#_}9iAYQml9G(%q#z}!NKG2jl8*FbAS0Q`Oct_|jqKzgC%MQ? z9`cfp{1l)dg(yrBic*Z?l%OP~C`}p4QjYRepdyv1Ockn9jq22(Cbg(d9qLk#`ZS;+ njc800n$nEsw4f!eXiXd1(vJ3Ypd+2=Oc%P+jqdcIC%yPTA@^NE diff --git a/fixture/19/0/0.2 b/fixture/19/0/0.2 deleted file mode 100644 index 56493b6f13ebf7bb713f32c54b0a5bcf9a2956be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12000 zcmWO6!=e}p0z|<(v2EM7ZQHh;yx6vF+qP}nwvDMp{Xw6;pDrNa0S|e^W1jGoXFTTx zFL}jl-td-pyypWS`NU_w@Re_T=LbLe#c%%bmwyDL4+!`lfe1_xf)b42gdilL2u&Em z5{~dhAR>{7OcbILjp)Q6Cb5W39O4p>_#_}9iAYQml9G(%q#z}!NKG2jl8*FbAS0Q` zOct_|jqKzgC%MQ?9`cfp{1l)dg(%E&R)oEPH>V_oaPK?ImdY}aFI(~<_cH2#&vFRlUv;84tKf7eID?T zM?B^UPkF|3UhtAvyygvWdB=M`@R3h^<_ll>#&>@3lVAMi4}bYbKn8mw5P=CoP=XPh z5QHQYq3J+JI?H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vitGU|z3@0trU*qTMsZ3|l2VkW3}q=tc`8tmN>ru_RjEdGYEY9})TRz~sYiVp(2zznrU^}H zMsr%wl2){)4Q**hdpgjOPIRUVUFpVubf*VB=|yk)(3gJnX8;2k#9)Rnlwk~K1S1*6 zXvQ#>ag1jI6Pd(hrZAOhOlJl&nMFV*JrRh&1R*HF2u=t>5{l4-AuQntPXreG#AU83 zdBtnq@RoPH=K~-4#Am+nm2Z6K2S546Z~pL?e*|QfTJ9`Q*)LK2afBqSvn$w@&> z6Q1&n=e*!0uXxQH-tvz3eBdLWsF6ia)S@eQenwWv)U>QayTG@v1kXiO8D(v0S` zpe3znO&i+Mj`nn*Bc13>7rN4o|L9H+deV#D^r0{P=+6KKGKlBA;3cnk%^TkGj`w`v zBcJ%p7rye1@BH8=zxd4`{_>B2Z2p@-1SSYU2}W>25Ry=YCJbQ-M|dI-kw`=)3Q>th zbYc*bSi~j{afwHK5|EHYBqj+-Nk(!~kdjoSCJkvxM|v`lkxXPJ3t7oVc5;xDT;wJX zdC5n93Q&+j6s8D8DMoQhP?A!VW<49&$R;+kg{^F3J3H9PE_Snrz3gK@2RO(f4s(Q~ z9OF1AILRqabB42=<2)C*$R#dwg{xfSIybnqMS&c8q7a2CLQ#rQoD!6z z6s0LcS;|qK3RI*Lm8n8is!^R9)T9=*sY6}rQJ)4hq!Ep2LQ|U2oEEgC6|HGQTiVf{ z4s@gwo#{eXy73>~=|N9=(VIT>r62tnz(58um>~>h7{eLCNJcT5F^pv#Nkn3jkd$O3Cj}`YE-8NHK|2y>QI+@)TaRrX+&e1(3EC0rv)u( zMQhG;fs0(?GFQ0DHLi1mo800yceu+v?(=|$JmN7=c*--L^MaSW;x%u0%RAolfscIR zGhg`1H@@?OpZwxCfB4Hk0&@9p0uh)X1SJ^32|-9g5t=ZBB^=?2Ktv)DnJ7dh8qtYC zOkxq6IK(9$@ku~J5|NlBBqbTiNkK|dk(xB5B^~L>Kt?i=nd!`6CbO8$9Og2Q`7B@| zi&)GOma>fHtY9UpSj`&NvX1p^U?ZE@%oet?jqU7UC%f3q9`>@2{T$#Rhd9g;j&h9S zoZuv;3J>-%oo1$jqm*6C%SnGn&(amb9WZZD>n7+S7rKbfPm|=t?*K zqdPt5NiTZShraZqKLZ%ZAO&aK$t-3w zhq=sSJ_}gLA{MiRr7UAPD_F@YR>(8$u4%YhrR4$KLHNAm8eV=s#1;W)SxD{s7)Q}QjhvHpdpQDOcR>YjOMhUC9P;p8`{#2_H>{l zo#;##y3&pR=uQuM(u>~op)dXD&j1E8h`|hDD8m@e2u3oB_k7?ZpZLrdzVeOl{NN|Q z_{|^w@{fSL{+mDqCI~?ZMsPw9l2C*u3}FdJcp?yyNJJ(IQHe%$Vi1#9#3l}LiAQ`A zkdQ@0trU*qT zMsZ3|l2VkW3}q=tc`8tmN>pY$JJ`uCcC&}Q>|;L%ILILmbA+QD<2WZc$tg~AhO?aG zJQujgB`$M?t6bwcH@L|yZgYpb+~YnEc*r9j^Mt27<2f&Q$tzy-hPS-qJsKlsTne)EUF{39Try%C7O1R*HF2u=t>5{l4-AuQntPXreG#AU8 zsX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We!@gLpk zK~H+on?CfVAN?7?Kn5|GAq-_0!x_OyMlqT(jAb0-nZQIQF_|e$Wg63&!Axc`n>oy7 z9`jkiLKd-@B`jqb%UQunRuQ3qo`^(bq7ap6L?;F@iA8MU5SMtwCjkjbL}HSVlw>3) z1u02IYSNIFbfhN(8OcOuvXGT*WG4qX$whARke7VqrvL>hL}7|hlwuU81SKg&Y06NR za+Ie66{$pJs!)|`RHp_tsYPw-P?vhtrvVLVL}QxJlx8%i1ubbsYueD3cC@Dh9qB}8 zu5*K%+~PKOxXV56^MHpu;xSKn$}^txf|tDFHE(#!JKpnwk9^`YU--&5zVm~h{Ngu% z_{%>63i@vX5ttwZB^bd8K}bRonlOYV9N~#TL?RKHC`2V1(TPD!ViB7-#3df_NkBpp zk(eYTB^k*{K}u4Qnlz*(9qGwHMlz9^EMz4c*~vjpa*><)EMOsvSj-ZZvW(@dU?r)hZbx46w6?sAX&Jm4XZc+3-?@{H%a;3cnk%^TkGj`w`vBcJ%p7rye1 z@BH8=zxd4`{_>B2LiR==0@H$)w4ya_XiGcV(}9k3qBC9SN;m$aJ3Z)0FM895zVxF% z0~p941~Y`A3}ZMW7|AF`GlsE@V>}a>$Rs8+g{e$qIy0EbEM_x@xy)le3s}e^7PEw< zEMqw-V?7(#$R;+kg{^F3J3H9PE_Snrz3gK@2RO(f4s(Q~9OF1AILRq8 z6xI`&$V?Wpl8x--ASb!VO&;=+kNgy%AcZJQ5sFfb;*_8yr6^4q%2JNZ7q7j`K#3UB6i9=lC5uXGkBoT>8LQ;~EoD`%a z6{$%>TGEl83}hq|naM&{vXPw}S|UJKW_S_j$lW9`TqbJmneBdBICw@qcuRkXZl# diff --git a/fixture/19/0/0.3 b/fixture/19/0/0.3 deleted file mode 100644 index 8a794e2ecb25412898a4305695fba090b28a4589..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12000 zcmeIx1NYDe8^_`QleMkdTDEIpVcF)gZQHhO+cvkBmu)QDwTwO23;EsWTyNlW?r*uG zAtB|dKt(E1nJQGJ8r7*mO=?k_I@F~e^=Uvu8qt_0G^H8MX+cX`(V8~2r5)|*Ku0>! znJ#pt8{O$aPkPatKJ=v@{TaYO1~Hf+3}qO@8No_#_}9iAYQm zl9G(%q#z}!NKG2j@(bxmPX;pbE1Ael7P9gi*~m@~a*~VOD zP6HNAm8eV=s#1;W)SxD{s7)Q}QjhvHpdpQDOcR>YjOMhUC9P;p z8`{#2_H>{lo#;##y3&pA^q?m=41Ojiv4~9^ z;u4SeBp@M)NK6uvl8oe}ASJ0tO&Zej3+YHt1~T$1naE5Qvho|*$W9J&l8fBrAusvJ zPXP*2h-EBi1uI#_YSyrpb*yIt8`;EWwy>3LY-a~M`GY^%#b4}Z4}00iehzSuzd6KV zj&PJ?9OnclImKztaF%nN=K>eG#AU82Z~R9{3I9eYLKB9tgd;o=h)5(N(~kCZpd+2=Oc%P+ zjqdcIC%x!RANtad{tRFsgBZ*ZhBA!dj9?_A7|j^QGLG>~U?P*4%oL_Fjp@u_CbO8$ z9Og2Q`7B@|i&)GOma>fHtY9UpSj`&NvX1p^U?ZE@%oet?jqU7UCx7rKyZDRU>|rna z*v|nD@;8S#%n^=qjN_c(B&Rsd8P1Zuq&<<7T;wJXdC5n93Q&+j6s8D8DMoQhP?A!V zrVM5IopO|?0u`x5WvWn>YE-8NHK|2y>QI+@)TaRrX+&e1(3EC0rv)u(MQhs7mUgtK z10Cr^XS&dpZgi&yJ?TYn`p}nt^k)DA8N^_QFqB~oX9Ob|#c0MbmT`<{0u!0UWTr5c zX-sDZGYKhWPlO^gVF*h&!V`grL?SX#_>S-Sfv7|yIzRFgG5DF7#3D9vh)X=;lYoRI zA~8uwN-~m@f|R5pHEBr8FQg+q8OX@5WFj+J$jWbIBRe_BNiK4ehrHw?KLsdAAqrE3 zq7hfi{7yN_Q-O+9qB2#eN;RregPPQ$Hg%{=J?hhdh8*HBM>xtcj&p*O zoZ>WRILkTCbAgLo;xbpb$~CU@4>!2UEpBs%yWHbG4|vEU9`l5!JmWbpc*!eX^M-$U z%RAolfscIRGhg`1H~u4}w0|QMp$S7+!V#VbL?jZCiNbe$&ksZ;8qxWYpNPTF#3UB6 zi9=lC5uXGkBoT>8LQ;~EoD`%a6{$%>TE;P+2~1=XlbOO)rZJrv%w!g`nZsP>F`or2 zWD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ?BoyrWEX$2n?3AhANx7LLH_0t zhdIJgj&Yn5oa7XzIm21bah?lYS|UJKW_S_j$lW9`TqbJmneB zdBICw@tQYOD`QX8peD7bO&#h|kNPyAA&qEE6PnVD=Cq(Ct!Paf+R~2pbf6=h=u8*7 z(v9x)peMcPO&|KwkNyl`AcGjp5QZ|0;f!D;qZrK?#xjoaOkg6Dn9LNWGL7lXU?#Je z%^c=3kNGTMA&Xed5|*-z<*Z;Ot60q%*0PTEY+xgs*vuBTvW@NRU?;%^1%d)WfuKN8 zASe(N2nqxRf&xK-pg>R{C=e6~3Iqj$0zrYGKu{nk5EKXs1Oag1jI6Pd(h zrZAOhOlJl&nZ<18Fqe7EX8{XY#A24Plw~Yu1uI#_YSyrpb*yIt8`;EWwy>3LY-a~M z*~M=5u$O)8=Ku#e#9@wblw%wx&Hn-d(vpt!WFRA%$V?Wpl8x--ASb!VO&;=+kNgy% zAcZJQ5sFfb;*_8yr6^4q%2JNBomp*LRPYoogCyO z7rDtpUhrl%y1;DMMMxQJxA^q!N{>LRG3!ojvSjANx7LK@M@4 zBOK)z$2q}CPH~zuoaG$nxxhs(ahWSz$y!A)*)n>*a)9`|{`Lmu&%Cp_gD&w0U1 zUh$eYyyYG5`M^g$@tH4tW_xyE&FaFbiy<_>qc$9*2~kVib` z2~T;(bIJtK6XhsR1u9aB%2c5$)u>JlYEp~Z)S)i*s80hL(ul@1p()L1P77Mniq^ED zE$wJe2RhP;&UB$G-RMpadeV#D^r0{P=+6KKGKj$pVJO2G&Im>_iqVW=EaMo@1ST?x z$xLA?)0oZ-W-^P}%waC`n9l+hvWUejVJXX4&I(qtiq))PE$diMl%RSd8qtYCOkxq6 zIK(9$@ku~J5|NlBBqbTiNkK|dk(xB5B^~L>Kt?i=nJi=_8`;T0PI8f(Jme)G`6)m_ z3Q?FM6r~u&DM3j}QJON8r5xp{Kt(E1nJQGJ8r7*mO=?k_I@F~e^=Uvu8qt_0G^H8M zX+cX`(V8~2r5)|*Ku0>!nJ#pt8{N6XUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNS z{7OcbIL zjp)Q6Cb5W39O4p>_#_}9iAYQml9G(%q#z}!NKG2jl8*FbAS0Q`Oct_|jqKzgC%MQ? z9`cfp{48N9%UI3|RfZ+Bn7+S7rKbfPm| z=t?)b(}SM$qBni$OF#NEfPoBRFhdy1ForXNk&I$AV;IXg#xsG5Oky%qn94M!GlQAT zVm5P_%RJ_@fQ2k#F-us=GM2M~m8@blYgo%V*0X_)Y+^H8*vdAxvxA-NVmEu(%RcsV zfP)<3Fh@AbF^+SBlbqr-XE@6_&XXmCp2$Xaa*&f;@0trU*qTMsZ3| zl2VkW3}q=tc`8tmN>ru_RjEdGYEY9})TRz~sYiVp(2zznrU^}HMsr%wl2){)4Q**h zdpgjOPIRUVUFk-5deDf)JEo1SbR`2}Nka5SDO+Cjt?PL}a26m1smK1~G|6Y~m1? zc*G|G2}wj^l8}^SBqs$aNkwYXkd}0$Cj%MDL}s#(m26}u2RX?_Zt{?qeB`G91t~;f zicpkd6sH6wDMe|@P?mC(rveqJL}jW_m1)oEPH>V_oaPK? zImdY}aFI(~<_cH2#&vFRlUv;84tKf7eID?TM?B^UPkF|3UhtAvyygvWdB=M`@R3h^ z<_ll>#&>@3lVAMDZ~pL?e*}cGHv$ouAOs~C!3jY~LJ^uUge4r|i9kdm5t%4NB^uF* zK}=#1n>fTJ9`Q*)LK2afBqSvn$r-~~#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r z5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^5QjO!QI2t( z6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67Q6^rAO?=u1EPGk}2%VlYD($}omAf{~13G-DXcIL0%9iA-WLQ<%y$rZav8qknNG^PnnX-0Ee(2`cPrVVXrM|(QZ zkxq1`3tj0(cY4s1Ui799ed$Mkp74}sJm&>3dBtnq@RoPH=K~-4#Am+nm2Z6K2S546 zfBfbTfB8p1SpQ8R0uzLw1S2>h2uUbH6Na#aBRmm^NF*W?g{VX$Ix&bzEMgOfxWpqq z2}npH5|f0aBqKQ~NJ%PElZLdUBRv_&NG39qg{)*FJ2}WnE^?EHyyPQ41t>@%3R8rl z6r(t6Sj#%rvw@9lVl!LV$~LyMgPrVRH+$I2KK65fgB;>8M>xtcj&p*OoZ>WRILkTC zbAgLo;xbpb$~CTYgPYvqHg~woJ?`^>hdkmjPk72Rp7Vm2yy7))c*{H9^MQ|i;xk|P z$~V6AgP;83KYsIvzx*R0oV^i zF`pn2^h7X%6M~S0A~azLOE|(4frvyRGEs<1G@=uOn8YGBafnMi;*)@cBqA|MNJ=u2 zlY*3_A~k79OFGh%fsAA#Gg-(=HnNk0oa7=mdB{sX@>76<6rwOiC`vJkQ-YF|qBLbF zOF7C@fr?b3GF7NbHL6pCn$)5;b*M`{>eGORG@>z0Xi77hbB42=<2)C*$R#dwg{xfS zIybnUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNSSnGn&(amb9WZZD>n7+S7rKbfPm|=t?)b(}SM$qBni$OF#NE zfPoBRFhdy1ForXNk&I$AV;IXg#xsG5Oky%qn94M!GlQATVm5P_%RJ_@fQ2k#F-us= zGM2M~m8@blYgo%V*0X_)Y+^H8*vdAxvxA-NVmEu(%Rcs#B(k1JMsiY+l2oK74QWY7 zdNPoaOk^etS;@0trU*qTMsZ3|l2VkW3}q=tc`8tmN>ru_ zRjEdGYEY9})TRz~sYiVp(2zznrU^}HMsr%wl2){)4Q**hdpgjOPIRUVUFk-5deD2Z+zzmKl#Of{N@jT`A0w$|4kqQ z6NI1yBRC-lNhm@ShOmSqJQ0XUBq9@qs6-<=F^EYlViSkB#3MclNJt_QlZ2!sBRMHZ zNh(s4hP0$3JsHSICNh(StYjlQImk&aa+8O=5{l4-AuQntPXr~U?P*4%oL_Fjp@u_CbO8$9Og2Q`7B@|i&)GOma>fHtY9UpSj`&N zvX1p^U?ZE@%oet?jqU7UC%f3q9`>@2{T$#Rhd9g;j&h9SoZuvsX|q%QJospq!zWQ zLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH z5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot zFwylyIKmTwh(sbXQHV-3q7#Fd#3D9vh)X=;lYoRIA~8uwN-~m@f|R5pHEBpoI?|JY zjASA+S;$H@vXg_HI4f|8V?G-W7DIm%Okid3R9Rj5ie ns#AlS)S@2Z+zzmKl#OP z{_vN7G>I7y(3EC0rv)u(#ecM>4Q**hdpgjOPIRUVUFk-5deDAZhTiM2TcCeFO>}C&p*~fkkaF9bB<_JeQMw(c9A|2_;Kt?i= znJi=_8`;T0PI8f(Jme)G`6)m_3Q?FM6r~u&DM3j}QJON8r5xp{Kt(E1nJQGJ8r7*m zO=?k_I@F~e^=Uvu8qt_0G^H8MX+cX`@gJ>eLtEO>o(^=R6P@WoSGv)i9`vLaz3D?= z`q7^O3}g_48NyJ8F`N;MWE7(r!&t`gnJ;|h8{hfCPk!;6Km6q%0kQozfe1_xf)b42 zgdilL2u&Em5{~dhAR>{7OcbILjp)Q6Cb5W39O4p>_#_}9iAYQml9G(%q#z}!NKG2j zl8*FbAS0Q`Oct_|jqKzgC%MQ?9`cfp{1l)dg(yrBic*Z?l%OP~C`}p4QjYRepdyv1 zOckn9jq2=X4}00iehzSuLmcJ^M>)oEPH>V_oaPK?ImdY}aFI(~<_cH2#&vFRlUv;8 z4tKf7eID?TM?B^UPkF|3UhtAvyygvWdB=M`@R3h^<_ll>#&>@3lVAMi4}bYbKpcA` z5P=CoP=XPh5QHQYp$S7+!V#VbL?jZCi9%GO5uF&sBo?uWLtNq!pTP`aD8m@e2u3oB z(Trg%;~38bCNhc1Okpb1n9dAlGK<;FVJ`ES&jJ>*h{Y^nDa%;S3Rbd;)vRGH>sZeQ zHnNG$Y+)*>T;VF$ zxXul3a*NyC;V$>M&jTLvh{rtPDbFYqS5K6qJQb)&B`Q;es#K#oHK<7~YEy^0)T2HP zXhQ6^rAO?=u1EPGk}2%VlYD( z$}omAf{~13G-DXcIL0%9iA-WLQ<%y$rZa7rN4o+uY$U_qfjk9`cCC zJmD$Nc+Lx6@`~5I;VtiY&j&v8iO+oDE8qCe4}S8C-~8b({|JchzX?QOf)JEo1SbR` z2}Nka5SDO+Cjt?PL}a26m1smK1~G|6Y~m1?c*G|G2}wj^l8}^SBqs$aNkwYXkd}0$ zCj%MDL}s#(m26}u2RX?_Zt{?qeB@^_OIXS>ma~GDtYS55Sj#%rvw@9lVl!LV$~LyM zgPrVRH+$I2KK65fgB;>8M>xtcj&p*OoZ>WRILkTCbAgLo;xbpb$~CTYgPYvqHg~wo zJ?`^>hdkmjPk72Rp7Vm2yy7))c*{H9^MQ|i;xk|P$~V6AgP;83H-GrcKLQfi8-WN+ z5P}kn;IyUY(34*DrVoATM}Gz|kU-n zMQr5Vj>K}%ZkAFXLaTiVf{4s@gwo#{eXy3w5;^rRQP=|f-o(VqbfWDtWH!cc}W zoDqy<6r&l#SjI7)2~1=XlbOO)rtzCU{N*13iS$Gu0uzLw1S2>h2uUbH6Na#aBRmm^ zNF*W?g{VX$Ix&bzEMgOfxWpqq2}npH5|f0aBqKQ~NJ%PElZLdUBRv_&NG39qg{)*F zJ2}WnE^?EHyyPQ41t>@%3R8rl6r(sLC`lUG8z82R!5vk9opV zp7ER)yyO+HdBa=Y@tzNS-QWF%)aV;IXg#xsG5Oky%q zn94M!GlQATVm5P_%RJ_@fQ2k#F-us=GM2M~m8@blYgo%V*0X_)Y+^H8*vdAxvxA-N zVmEu(%RcsVfP)<3Fh@AbF^+SBlbqr-XE@6_&U1l_T;eiUxXLxIbAy}Q;x>1<%RTP% zfQLNdF;95PGoJH;m%QRNZ+Oc)swB}9)u>JlYEp~Z)S)i*s80hL(ul@1p()L1P77Mn zivMU$8`{#2_H>{lo#;##y3&pA^q?ob=uIE`(vSWOU?77S%n*h$jNy!6B%>J37{)S= z@l0SMlbFmDrZSD`%wQ(7n9UsKGLQKzU?GcG%o3KejODCgC97D?8rHIo^=x1xo7l`2 zwz7>lN%cfL;*)@cBqA|MNJ=u2lY*3_A~k79OFGh%fsAA#Gg-(=HnNk0oa7=mdB{sX z@>76<6rwOiC`vJkQ-YF|qBLbFOF7C@fr?b3GF7NbHL6pCn$)5;b*M`{>eGORG@>z0 zXi77h(}I??;y+r`hPJe$Jss#sCpy!Gu5_b2J?KdKt?i=nJi=_8`;T0 zPI8f(Jme)G`6)m_3Q?FM6r~u&SOvX1p^U?ZE@%oet?jqU7UC%f3q9`>@2{T$#R zhd9g;j&h9SoZuv;3J>-%oo1$jqm*6C%^d3AO7->faLZ@AOaJFpadg0AqYt*LKB9t zgd;qi=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2 zF`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H z!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhktc@0trU*qT zMsZ3|l2VkW3}q=tc`8tmN>ru_RjEdGYEY9})TRz~sYiVp(2zznrU^}HMsr%wl2-gj zYueD3cC@Dh9qB}8y3mzwbf*VB=|yk)(3gJnX8;2k#9)Rnlwk~K1S1*6XvQ#>ag1jI z6Pd(hrZAOhOlJl&nZ<18Fqe4*NvS7-5u6Z&Bov_uLs-HQo(M!F5|N2QRH6}`7{nwN zv57-m;t`(&BqR}uNkUSRk(?ByBo(PiLt4_2o(yCp6Pd|ERP^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEtG^7!YX+l$) z(VWwq;VkDk&jl`WiOXE!D%ZHq4Q_Ia+uY$U_qfjk9`cCCJmD$Nc+Lx6@`~5I;VtiY z&j&v8iO+oDE8qCe4}S8C-~8b({|HFszX?QOf)JEo1SbR`2}Nka5SDO+Cjt?PL}a26 zm1smK1~G|6Y~m1?c*G|G2}wj^l8}^SBqs$aNkwYXkd}0$XEIZm$~2}kgPF`?HglNE zJm#~2g)Cw*OIXS>ma~GDtYS55Sj#%rvw@9lVl!LV$~LyMgPrVRH+$I2KK65fgB;>8 zM>xtcj&p*OoZ>WRILkTCbAgLo;xbpb$~CTYgPYvqHg~woJ?`^>hdkmjPk72Rp7Vm2 zyy7))c*{H9^MQ|i;xk|P$~Wqy))V!pPXij#h{iObDa~k33tG~O|7cAc+R~2pbf6=h z=u8*7(v9x)peMcPO&|KwkNyl`AcGjp5QZ|0;f!D;qZrK?#xjoaOkg6Dn9LNWGL7lX zU?#Je%^c=3kNGTMA&Xed5|*-z<*Z;Ot60q%*0PTEY+xgs*vuBTvW@NRU?;oS%^vo$ zk0fdIL^6_-f|R5pHEBpoI?|JYjASA+S;$H@vXg_HI4 zf|8V?G-W7DIm%Okid3R9Rj5ies#AlS)S@6()w=#5ttwZB^bd8K}bRonlOYV9N~#TL?RKHC`2V1(TPD!ViB7- z#3df_NkBppk(eYTB^k*{K}u4Qnlz*(9qGwHMlz9^EMz4c*~vjpa*>-n)hZbx46w6?sAX&Jm4XZc+3-?@{H%a;3cnk%^TkGj`w`v zBcJ%p7rye1@BH8=zxd4`{_>B2boNFd0uzLw1S2>h2uUbH6Na#aBRmm^NF*W?g{VX$ zI=$&bU;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K z1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWRUiPt{103WKhdIJg nj&Yn5oa7XzIm21bah?lYUGDLJx}a>$Rs8+g{e$qIy0Eb zEM_x@xy)le3s}e^7PEwKt?i=nJi=_8`;T0PI8f(Jme)G`6)m_3Q?FM6r~u& zDM3j}QJON8r5xp{Kt(E1nJQGJ8r7*mO=?k_I@F~e^=Uvu8qt_0G^H8MX+cX`(V8~2 zr5)|L!d0$uog3Wb7XNXZJKW_S_j$lW9`TqbJmneBdBICw@tQZhEMhTB zSjsY%vx1eZVl``6%R1JxfsJfpGh5ioHny{ao$O*ad)Ui9_H%%P9O5uXILa}ObApqc z;xuPC%Q?<-fs0(?GFQ0DHLi1mo800*ZgYpb+~YnEc*r9j^Mt27<2f&Q$tzy-hPS-q zJsKlsTne)EUF{G&-GJ<*Kjw4f!eXiXd1(vJ3Ypd+2=Oc%P+jqdcI zC%x!RANtad{tRFsgBZ*ZhBA!dj9?_A7|j^QGLG>~U?P*4%oL_Fjp@u_CbO8$9Og2Q z`7B@|i&)GOma>fHtY9UpSj`&NvX1p^U?ZE@%oet?jqU7UC%f3q9`>@2{T$#Rhd9g; zj&h9Sq{*x&(vhAFWF!-r$wF4Lk)0gmBp12KLtgTcp8^!55QQm1QHoKV5|pGAr71&M z%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV z=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb7HeBmqK_|6Z0@{8a6 z;V=IP$l|{VL|}ptlwbrW1R)7UXu=SdaD*oU5s5@(q7ap6L?;F@iA8MU5SMtwCjkjb zL}HSVlw>3)1u02IYSNIFbfhN(8OcOuvXGT*WG4qX$whARke7VqrvL>hL}7|hlwuU8 z1SKg&Y06NRa+Ie66{$pJs!)|`RA&!+*~fkkaF9bB<_JeQ#&J$?l2e@K3}-pVc`k5~ zOI+p(SGmS@Zg7)Z{KswXaF=`B=K&9S#ABZDlxIBW1uuEUYu@mdcf98VANj;*zVMZA zeCG#0`NePk@RxrCWVJT}5ttwZB^bd8K}bRonlOYV9N~#TL?RKHC`2V1(TPD!ViB7- z#3df_8NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75 zv78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJ4|cbrveqJL}jW_ zm1+=(3Wqc$9*2~kVib`2~T;(b6)V0SG?v8Z+XXiKJbxGeC7*Z`NnsC@RMKs<_~}Q zM?en$O&|gjgrEc?I3Wm0C_)p4u!JK#5r{}6A`^wEL?b#eh)FDB6Nk9OBR&a8NFoxG zgrp=RIVng|!^2*vmflbAW>!;xI=z$}x^}f|H!$G-o)=InHx|i(KL| zSGdYGu5*K%+~PlObBDX!<30~~$Ri%}gr_{?IWKt0D_--4x4h#$ANa^8KJ$gIeB(Pm z_{lGR^M}9uBOs@}5s1J9At=EJP8-_Nj`nn*Bc13>7rN4o?)0E1z35FJ`qGd73}7IG z7|alcGK}GjU?ig$%^1cqj`2)jB9oZR6s9tb>C9jzvzW~s<}#1@EMOsvSj-ZZvW(@d zU?rrl%y1;DMMMxQJxA^q!N{>LRG3! zof_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62 zU;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)o!|W7FaHS0ttSEzm>>it z7{LiaNJ0^sFoY!>;fX**A`zJ=L?s&0i9t+a5t}%~B_8ofKtd9cm?R`68OcdON>Y)U zG^8aR>B&GwGLe}qWF;Hf$w5wXk()f^B_H`IKtT#om?9LV7{w_;NlH!nJ#pt8{O$aPkPatKJ=v@{TaYO1~Hf+ z3}qO@8Nohfil%qTqs7NI$Q-!KjqdGOH zNiAwqhq~0GJ`HF{BO23$rZl5DEoezATGNKMw4*&8=tw6z(}k{dqdPt5NiTZShraZq zKaY9BQ=ajh7rf*ZuX)2;-tnFfeB=|K`NCJe@tq(1h{PlzDalAq3R04a)TALT z=}1ooGLnhRWFafr$W9J&l8fBrAusvJPXP*2h{6=1D8(qw8rHIo^=x1xo7l`2wz7@w z>|iIm*v%gHvXA{7;2?)M%n^=qjN_c(B&Rsd8P0N!^IYH}m$=Lou5yj*+~6j+_>bG% z;V$>M&jTLvh{rtPDbIM$3tsYy*Sz5^?|9D#KJtmreBmqK_|6Z0@{8a6;V=IPC}3{{ zA}~P+N-%;Gf{=tFG+_u!IKtC~u5_b2J?Kdlxi$tXrM zhOvxeJQJA6BqlS3sZ3)!GnmONW;2Jm%ws+aSjZw4vxKEAV>v5W$tqT}hPA9?Jsa4_ zCN{H$t!!gEJJ`uCcC&}Q>|;L%ILILmbA+QD<2WZc$tg~AhO?aGJQujgB`$M?t6bwc zc?#-@eB`G91t~;ficpkd6sH6wDMe|@P?mC(rveqJL}jW_m1+=(3WA@ASSVhO&sD9kN6}YA&E##5|WaP*>T;VF$xXul3a*O}C%^mJ?kNZ5}A&+>> z6Q1&n=e*!0uXxQH-tvz3eBdLW_{CL?#MRiAHo{5R+KMCJu3lM|={HkVGUV2}wyta#E0zRHP;iX-P+V zrZAOhOlJl&nZ<18Fqe7EX8{XY#A24Plw~Yu1uI#_YSyrpb*yIt8`;EWwy>3LY-a~M z*~M=5u$O)8=Ku#e#9@wblw%y{1SdJgY0hw#bDZY_7rDe`u5guWT;~Qixy66n<_>qc z$9*2~kVib`2~T;(b6)V0SG?v8Z+XXiKJbxGeC7*Z`9_^0dZHfnX+T37(U>MQr5Vj> zK}%ZEnl`kh9qs8rM>^4&E_9_E-RVJ3deNIc^ravD8NfgWF_<9?Wf;R5!AM3inlX%J z9OIe5L?$trDNJP=)0x3cW-*&N%w-<)S-?UTv6v++Wf{v^!Ae%Knl-Ft9qZY^MmDjT zEo@~Q+u6ZRcCnj1>}4PONm5i#BqKQ~NJ%PElZLdUBRv_&NG39qg{)*FJ2}WnE^?EH zyyPQ41t>@%3R8rl6r(sLC`l`P7Goai`c{=F7b#@0uqvl#3Ugp$w*ELQj&_)q#-ToNKXbbl8MY@AuHL) nP7ZRCi`?WPFZsw%0SZ!x!W5w>#VAe*N>Yl_l%Xu;D9`@^W4B=e diff --git a/fixture/19/0/1.3 b/fixture/19/0/1.3 deleted file mode 100644 index a752ff7577ab01cbc306aa5c8dd66be82096a0ac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12000 zcmeIy1KZGNAIIVQ$+pdH*|kR<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJ@2{T$#Rhd9g;j&h9S zoZuv@z{$?^$n94M!GlQATVm5P_ z%RJ_@fQ3XUZBIlaIx&bzEMgOfxWpqq2}npH5|f0aBqKQ~NJ%PElZLc>$M^g|I?|JY zjASA+S;$H@vXg_HsK)L_YFUfS)NyAqrE3q7hfil%qTqs7NI$ zQ-!KjqdGOHNiAwqhq~0GJ`HF{BO23$Uuep&G^05!Xh|zt(}uRRqdliN!&%O8o(o*$ z5|_EcRjzTJ8{Fg;x4FYz?s1<7Jme9NdBRhk@thaD~-sYydx zzT1<%RTP%fQLNdF;95PGoJH;m%QRNZ+Oc)-t&Qv zeBv`-_{uk$mbE9E(VP~vq!q1cLtEO>o(^>6H#*UoF8oecy3w5;^rRQP=|f-o(Vqbf zWDtWH!cc}WoDqy<6r&l#SjO=Oe=?p4Ok@&&@i&v1!c?X)of*tz7PFbdT;?&K1uWzr z7O|KmEM*zXS;0#F}4Mr%h?l|$wF4L zk)0gmBp12K!;j?UC-RY>0{l!t3Q?FM6r~u&DM3j}QJON8r5xp{Kt(E1nJQGJ8r7*m zO=?k_I@F~e^=Uvu8qt_0{6bTHr5Vj>K}%ZEnl`kh9qs8rM}DIdo$12wbfp{J=|N9= z(VIT>r62tnz(58um>~>h7{eLCNJcT5F^uIkZ+Oc)-t&QveBv`-_{ujz%Ddl$A~azL zOE|(4frvyRGEs<1G@=uOn8YGBafnMi;*)@cBqA|MNJ=u2lY*3_A~k79%XfUw52PbK z8OTT`GLwa@WFtE{$Vo18lZPM4%TMGZKLz-if)t`KMJP%!ic^A;l%h0cC`&oYQ-O+9 zqB2#eN;RregPLq+3tQR7c6P9nUF>ELd)dc+4seh|9Oei|ImU5LaFSD;<_u>!$9XPr zkxN|W3Rk(tb#8EzTioUjce%%X9`KMyJmv{cdB$^I@RC=&<_&Lo$9q2TkxzW)3t#z0 zNCp3mP=qE7VF^cgA`p>CL?#MRiAHo{5R+KMCJu3lM|={HkVGUV2}wyta)K8W2nqxR zf&xK-pg>R{C=e6~3Iqj$0zrYGKu{nk5EKXs1ORIs$(+y1)uo>OnV*Y=+Co%8+vzW2P(^Ck%@KAu(z^b~TQ zEWGgK?yu)1@3-s8-HKg)|3PL4ugU+jcW)lN``K7P5P%KI05U^FkRnikb5tL&B$k8} z0+F%A8enl07Ab}zJI5>nOOv=rL4Z4!K&LW^aHOcNyYsmzYDr=pQdnge8_`ZJjv*q& zF~iOmx2dJ6%Z&n}y4cV>UnW7JQAAtUIeNsmB%awQq}YrNzwKLmE}&7&yxIA}3*XX| znMOgGXfCQSA%nqLG>fzv#f&GU=LavEp6D7ezmt$zKEG&jwQKaq4?Af&wu>mAq!D!G zPDbgyMe|EZqc-2|q!*d2?^{L6PBq|E>Wm9y*_ud^s326Sam9wXI8B{7UeBLZmqqr^3Z=yU)WJV2%Os^0@dl2@Ro1{=RG7r z4HhJPL||a@5EiO8nd>c00O!wgp+;M|K0;u6E*B2f9d`4U#MAR;>Y#?}Za!kBbTXd^ z)t?*kmW`+9&o4ub_lJB0CGoj^3e`FTI^F;eeBR}jNMW@(Zp7Wz;tMg6;x=;w7hkuQ zUf);~5XW#s3uBpKkTnq`W*~Yzwj_bGCUmHq8-6FY_SLH3=juM zf-6)5oC5nml@eGu_}g^{pOndRy93v=ZzpdFN)v&ZDt*4$C=Q51RH#+y59FaL0XQO< z!R`xaKvk&liAo*p{@_*AEl~p^C_ktVU(&`=Y#}Os64Zb4QCp=<3{k#(uJ8EWwu-|W z#6w@s^`Cj&c1swt49vmw`4z@-l8HNS1B1#2QF@QJa*f!LUpE2>9c01(=YsPiJ8@b3ZfnT%O*KW zopp+zMmwE)JXtB5SSMex?s(#h$qMz|x7_0*8ah?v8jmjiXQpS8R57IyptFlqSOEr&;6D5vKWb z{7|DE)`XB!d+vZisP1Usxa9HnysukA4PORMh*`9g$6`YDpUjTSUTM$&b|cjIr`ZWX z+4$Uk$b!zG)i}UAK5v?{V6bL2A!-y)9t~d5`=)DL=3;#QllcXsH(e9L>f5=4whOuw zN#jyyw)4KZw_x~V(uBC(HhCg-LI3IAxZDlAJ*hIvO-pr})L>SNF^qE8KRKO!x3l)h z;pc8h>*=gJiM2LIp1T{TOy|6N?rHxPot7?5FmX~IG08?}&?p46Fy+yY1Z%ilGx7t= zKgFyzSkdtj)x(oV0;=`CZt|A-D6szbU)s)pdaz*p&gi-@ka|=EA>jaUiFE|heMt!E zP=IR^3`ldsA|(CDE{OynBan-bjU&6Jf~iz290725cZtVS=>c^JnP_*{6jLek z4ZB>4r!s<<5pv1Ht~Vrosg4Q_63)6VaUQ;OJhMSMT-P;O&zI&A6J}#GJ8X2NBj>A) zFne*UA*gHusTQ(ig|r&h^-jp{;4Il5>Kf8EO33;oc*)wbYuNB&Le7`-OLlTeLt5%P zq(<8%jB(Pi{+XTZ&+jcA(byY8+U;chEp^Gpes9?L#!k+IgKHrYM~@1DSehc*%Dn&w zH%CyJ2}v8WKt76vf!I1&ZPiJ#1CB8H@tyYN?3nuN_M)F5&opza_@Kgg$`a=pywPe* zWjwxcC-@nxw2QBJC%)k2{4>PoU1NuTh%frd_8D}b|2g?%J55szO{$g-v(i!?)^R~+ zw^71uuFr^Msy+Ig8KhS-zH3bHv<>C}-zy;5_ znuxYsFUTUeXb-LlCA;CkbUJ0ZPE)(t%}aHPUbsut)M5|el-lV98_Sx=?jbMrZF+Rdn$#UyNm(nqQT@+C6w(eUs{Hy9)40<|Uljq@TFADsw4$ z@S5Ew%_ntL>g+!6>Wxjt*@IQNoBMBcAZckLFcSv}50eb}3@Qm`7AoOs)hC~xi-no` z<$9Ra$Y-*+FpIccPuoRC+8@{;gx}Dw)W*^_2v->m>h(C@R%YH3e)#2FkBdbc+b||v z?a5rP_m#HtBOBowKVfj$r>e$(CSbuq5o zW`0HeO>&Qu`W6dmyP`6Y-0OK}tIYD=%HbdPdtB|d*v6?VYESojeQs=(+aIhv6@dP_ zKod?rq#y5Wkhu`;dd>PF%{#F{>h!wH)s%;fGrJ9P?_!R*t8hcIO_-t5ks?|u z15rGak~m7FkUX19Y%nQ~#zl&u*#j}FCZ);zNI|I}E}^8A8Ez0Msvk6P?on&WAKM`A z8V)}HKtFrf@pwC}0sLW z<#WD_lYbi2)Cxp3HPa<s)Q?s7v~*I_vfriM`8w(jbdq@!K;!dy5CdRZQ3iWvz{MY zFQ8ua>t+Y9bAIL4Gxh4PqK`VMPO=bY^(tf04xT3`%PczU4}ZUY)YW>DZIoEA_VE`e zJVoIM=p!rbpVFUckiFdOdR_G))y1p<;1%uip0<9F($wCHePH?@0_(s(HU0O&+EP#M NUsb(zvwQ?E{C9<0d_DjG diff --git a/fixture/19/1/0.1 b/fixture/19/1/0.1 deleted file mode 100644 index afa407c733928fca5ec2c168ef2d58b65da34291..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4200 zcmc)N`BM{S9>?)=AA&-{DJbL)h#Har38!FyB!nxVf}&y)1uQC;6%>^t1OkDagu`$M zX#j5l6)d$Q1WZD3xay$mxEN9n>$+T};8J&I`V+IWb~>GHnR)(#XXf*o_cvR|e7vv8 z&QzE1)aPm0nZG^U_=Ii2zXUd%b=vLRsJCN|ko0)C@@!U%k>6bKxsStDSC*5$(6g5( zvQVNEW=Z-G`~KtslsKJ}qzkN(Gi0dB6iSjIrlvo28da5}PSV*DFQE^79*ZY9E8V~TFXHhX)t zl6IetDSkZO>~Ee-glr#U%COD&-N~d~hsRhCJDdG>%n}jaV?}qentl9cNqaAj6+c;S z4$!kxI2&S^*UjiSh@BD{f??g~(0zf`3Md>?)JCCuW2%*Ix-@Ac(?zIr7F%i0!YyzXnqB5C)Fmz`Jr)n&!vgHUJ6B2G z&htWjr%fVr1~QM$3l8;6o1e>hcFBL9ADKJ7<@nmbg)8&Ih3*oC%w+$wbtfQ zci+(u=?zA{T1#$DZ%qAy#I{FkeK)7?$eV@wZMjP$xNPhBw$RJlhj}4b znMrs`Nao>T!NE?M`I(f^OWwo$h%DKbqjMn_E)ENiEz7oDo(sKdh~fqM&KT^?>HqT0 zy!b-TjGoh)Jn6}N<+O*iKV-hk5$hyab_*#|_ zpI8Y!Yq||2#9lEJ1@l)TQ#clUaD}wZ`*8*P!2r zrMzNajeBIa3RAySP}QUHIF~)>^JXcZo2$VbUsZWMSt@K;*LYoB9SqRbymH7lrdSj# zIz2UxFb)Eg4l_Y+Z;d1M!2tG$=Pf4@+H5yuH-N5K zpd^BY)oLplx_nNZ2=AyKa-2qU*W408WpQeNxSjLEtwh+(xS{PY+ADraO@v&VP}@9g zFaO8;M8r1}LpxX6xxYA0Iv3;AmK7PCDajJfmVMVG>-`rMAA4QU2rWN%$j$ zQ)Hx-)S}2GD7rLS0uV{9zINFkuZpfWjgj5Cku5>>ss5NfbZCwNZN+6f^)!r{7ody?3D3-`+>9&lpy*uq(N8yBGNV#$gl8k z_@zICG{MB@NmCF&t|KK8W;8(A=91PmwBtht_g}t~ATF{#HgYpb+t43xg>&ppByB@~ zX#OZ^-Txob_%&_}n8QnDU_{1dRN4m8zFX@%{>hTp`^K1%xCxc^zsCx`O!njd)s?n^ zG|nkAy8W|kIEqi&SZUHx;Ad-Q!}KTQpgjvEk-7jui5v=YQ4%9ese;mRDQNc`G13q% zV5=FRT`v_8TQdY@ueG4PYl=u6Q))@C)pcY)GQxyREgcnIcl!YuY2Zp_t75Oae%~5l zewteL`)X#x^t08#`bEx6&m`<__K?Gy#fqPECm{{1YTGA^<^NotM0~e8W4^ z;;ai~Hm--F3V;Q_qd#cBaiX2V6nCrX{#)V{V4Du+*KTzDt~jOZ!4B4gTXa8%2?gA< zqv&=j-N$D_x#z2n;&0#61B`GA(AG?5v*Q#Ffm6CfWU@LWQ@^i2R4pUG&mCw4dhM|2 z6t=}+cS`V=uZG26b+-J8H#K3o^CB32za-pI1K_Sgok46O1t1V`v{GkS8!>$QsGwq* z;S8x!P;A--K2uZOlrE)Z)M(EebaC;TCVllQ6T-i#ozCkU2 zH{J+;&>Ftu9hLhL+XyO8rT}Y4In$kuu-nPu4zETlRBEI9u(}nyK%`Rrd{d#>Up>?4J+TJUQHTN+Xiv44VeH;5(zJ zk-V=2(`q63u6XlE?oR({l|4>HWZIMLMgwc!HC#eQ1ZGrlwWzyfbi^R#+I zeAmhC3tZ2{Y1M-(ohME$wNIdK$(mhUF#gQ%_{{b0A|o37ohFFow!nE3LTWgZ}X|bl|ZF*od$lp z|1_>l!Hf<}_iLl&*9o+*(zUPnweNjUSE?RP2Nw^w?Y>o4vVNYfdwaP3z^l44IoPCD zD!$D-D@p(sOhT&h?S8#crKTuyH5=Tc*; zRQJzzo{p(ja+oDWyy-7E^)h6g98?sLVwoGL79wv9?Fj9(%#X?lkp=Tb^eW4|FiMD` zHJ{{&aR9PfXuPx`A!5_4tEZwBk1eGmHV%ua8cbgL$RxsIMeHe~LSwZ-Doi>|sLJlNw87j8 zvndl#g$*=zw@<}}A1+bV57W{g4^^1|a>-Lpe~{G)n~}%jJHmP+^P>ebvbs$o#`Va& zBmOgrTbnwLK94LoIWqGJ-(XcVLBM}MYn-J0Z18gVC zAz-ns2yOVn&5;1&7NSsSsKm|5oyIMCK?y+S6iOjgR++QX08B$+4{X{hd&MjOk?t!6 z<=1CST@65A@$J!nP@nZncz|*7BdN}<`pm!22VieM>M?v(pZyj#3Qx6_0<)MI6M|7x zwQUcymzlNXKZ-19lxkmRX8trXiuty&XZ>?#_OJG%i0n8ixO6dNrg;?oOTX?h|7`w^%FRbZciKI^pOh6-X}$lM`_^|C z>>QrCStQ&~z5DXQ4mmwBT+uqYLM_PFSWjX?woa+b2(kwkicuKr^EH&9ZPtYnvtaAg zCTY-iRbMeury=>GSrC4suVlmShLoFEgB%PVi7{Im&R+`;va@?6F$d9q0Dl_#6kr-H zV?4JUboAd%YI{1j>rt9fQtq*rf-y^uZ4OT zJRZz?6;p0p@{FSU-C*YXXH?CE!StsuD=gkV>szO2#rorxmZiFP7c+le9mRgX++*}^ zG5bvTyJAOq48Td597)jPc_m}i0+n6jkXl?N*Qs@{}KM6UO$;tT;tigZWQfx!&o%x_0O15HzMs)H8*n})4* z__$U_$Aqp6MhbgU9yLOQm{DM*~GUZ-G6 z3{3ONJp`$iN1Kwuy1nw5xz)1THAKc`ue`%P)ry;INJr&K5L}Z;pt&XZLg| zDLZl8`rvZ+*1JpR%2vm>o?gDULzcM`@#mFhBSf#Cl{k{Exco1xv^B7Xa`t(CNu{lU zl{Ps5{)n3SY)WfY3p8oh>jjXl(ta2Jnl1fHY0ZC;($@4x@bBVZv!&0Ww6YH?ZB2iC zT+MtgrLE}?B6~HV@(U`ht`Kd;fZp?Kb>9uFI8(~{f=YYbpC5apT5kO;G33uGP36u1 zM`>nsw85~%iOS>_e}PK-Pgn`B$Z**&QEC4jRx8K@@P`hxzd~sP8G&XKdVTU3EB4w- zLna{q?Lbp@_H6@}0*ELOmrd}g1eWNDP&##NXa8|vnLG*wWV;b)JgxvDL}}N!Iqx0h zma5TFV7`Q~yPjJDn?&h0Nt_QXa?4~P!jI^W;XWA+CaWDbX3}Nbt4w94-4~3R&Di!S zm^0b;{KqytZ0uD(#!P=QGG_j|u}{u$k=12CX4)6mt9oED?P2rSrk~^bl(sLj`x3`C z{ez+(O<`$KMprFnBWDN{y{6c9>@v{fcjK(ugWF zhE3^p;alCPQAO5*DcDR?3&}i+ztMk6|6xuTvp`7oKuHe+5P45@39 zOHZ3&F>lwkQlDZbPTiWitUBHQ_gDA1&;9xP^ZWgLKflu&Z)UYVPHnLodG(cj>)O+V zzO2A+%F#>)wP(NQz%zT;qHnYD43SH_94BZ~X+aA|u`D00mJHk&sqf#JKSc{@+ESv_9~BAl77+w;E7 z?k~tdpf0ioYDlpdgb1-=$XXZaKIE*l8@!OQ?qinm?>Rcs*ap`}v3ScFmB9*@po>zu z@+3I(9+j0$vd<`oRW>h&x~nAw4SgbA1a5X7a^44sBwn21G`l^FWvb$uU{xJBvo2!! z>RglKtq#1+DzV~|XA`viD9*HYhN0+fa=3OBZ}oJB`R9oyc*ASlwmUQBZ{IgL`ScH^ zzs;)7n_t!d(!|M)N)FY3S;w6=l=edDo=F?Y+oP-`oK+YcQ4bdsfq?+EU9%qELIh$e zG4`-ZIu86`F)!bK`!s!`;tR$w-_cP({|{&sH>!?n{Lrz-H@GLesajStaZ2Bfs`TU3 zEq>|?du2mM)>43ZvXOkFtV<8k;v{AkC)s*g+X+3CI+4ZAtzXvM)-R@9&#FpGT(--| zVt+rcFgn#g@{_IdQnQ6t1`cYV*y{ooP$k^ zZxq4(P3TwzZ1$g~?!U&{eVVD*G$h$yu#g%TqZqFkB*Ybqaf@LT^Bs-^W7kN*?Wpn< zDar9>&A7D!RiSnxLCX$FLEIk3Qa8!r>Y;JV$34ss$4T(ICsOmip7J+qB&PzU)7_1- z=W9vspPx)>7^!?pe5EGL9L8e(O9bC{95TVW-x^TKPLw+WX^+D5^`kEZgWL5&UBa>> zqgg~wyCFs0ki+3?nss6*UC~v?;`q8=ouTQg(bZ?>_!?VNG0C-4oD@&K&W_a3w5LkrqM6uZCIju0SRP300D?9)qk^D@C?TVL-W|HB4Xi3D*9Cpb)Ief2v`n?MFK@a!c;cFYW=JmcVV-i}?PFX!p~PTo11o7tqH1 zUaS{ZoL$&;WMkB46W-ce00%b%LJo;c8uc&;P-Us;)Y(ZO#=&x0U3r-1*IX0 z3nrJf5eSpgh+rbCvL4)uFzbv6Nf<^p0&rvynasDWb>Q6!9)e}vW^?sq%2-U zZ@QUB)l|27a=?*8l(j diff --git a/fixture/19/1/1.0 b/fixture/19/1/1.0 deleted file mode 100644 index c112add40da8f32512bf1611caf1ec8068f7fcab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4200 zcmc(i_g53<8h~Z*k--QuLIwdu3}FV48Ac#%R8T~qk)enz6%>^bAb|i`j6ev(uq*wHAbi;pLYuC~^RXsqEP?oSlPB-TkBUvs+r zYNGhoPQ)P*NKZ%>niVP=Aqt1|M)skL;;0eAAa)P27F`%ch!8hs_eL+Hi<3tpgk%DH z!it($=M5snjs*5zc+^yMr7=R}@N7@`y{5uT(Ge0>v%N7dn~HC)uN;!V_JrogvOZBq zjlK~Uo?_(h5TS7P07040$XgdcQSAdBnp=#5eQ+f-Q8@|yv*}8mFY7%tp7|EMGO5(gizI+br>M)VmB`N`Jq|yf%72+y2`^tIsNA2* zd%jbN`exNb>(x}jYuONJhU$o77KS=c4ME&d9aZhalYDx)x&WclKIj;{MXwMB^At1n9DH(oI6xK;T%+e0 zIOM~X_U8c$8yrsD^(jBTeqQSj>;rrGxDvSfl4?J;6Ym*UYRp+uUul2fWEjUj611fD zLwl!3OkA1O+>+++$qyVgH%m~~OOWy8PWRx=QuB@_jo(-{$ zcYEFEUIP?oo=DqDW}LT#iCdeN*F-F5vvsoBXKBkBZ^b>f?{z}PlTi+ zq%YPP&G4ay%YFjsPtrlt@q}<`KXzYYIGPza5-y*>?oUx}qB$9aOSuR3#h+|qoM;S} zyBOGi-K>f35gjgbX14F@)h6cI^>Bq7v;DW^V`+}+%aShGzBtcVh975HHUit9Y#2*- z4_cN!)!ug{CYE_-Zdv|Xd;d+%4VsJfvXobHU&4^+Bxp(3OHALFGUQ*S|9!ou+y&p< z$J14YFY3KS)TYRNCRKW)k)ATAr*a;(RvrB{(o4c}iZYg1b>yd2PlfAKxleYgj{Uhh zen=cc?vxGH?N%L=bjIXNQbP?ERmVkjFqA<;sQ$x}G1+iT?(9gY;daTmuri5O441OD{FyAvM+p z?VK@mwgC}&cy`dDk#!`;@@IHLAUO_INj+QVgp`eW}6DN#BR9?_Z<# zeS^Kn1)nm@=^D)!QFe}MlO;MPHIUILd-v0mrDm-)8o#dDxmZrJ4HIh)|J2@n_IX@| zgtY)rklf|R-Q=ir2$a4^?mqKmldF&-09EXDo%(jOTx(yT@@BKlc7I7rsB_;uO_HFP z4x~OMK@nAw9;#iy{0$rwMkf=%)xf;f9;i+;*+YE+Sg?zLYExVZKn8;<;6afMt{!S{ z$Ms)-X9D4;}MIe@hydu-T%F|2wWv zUphmDL8MAixW%PNcdN$_)AAFmjnA&Sncshy!QH7gzq#se`^q>qO*Rbes5*qsGES#c z!%RL=9k%E*PNNXQFn%RNCbh;H?2$0Dgpy(F<(AY;gD@lafFVp#OL}2rnCZoUVarD? zX?f9M#%E@R%hl&^=G-SR_5%Q^R?>}%;23`g_~s5>O4hYQW|W`%+4M3H(R6V$oZNrIW`pjPvMh6MukYHe4d%K1{>!g7if@aO_~ycZ5~wIrwNuL8)d5h7 zCLz`5DRwv*fUS!|z71ALcR-&JSBgLcGTB}JwMONT5u$PnyE|yvs6xzu2*?fW@-1rS zC^Zt5J`Lg~$T>0TSf$~@Hf>poj1K`8+k;!8J9jx zFKVqe{ZK;|N??OyNg{bhyQdoFeXZ#@2qzP2P@*8!B(M1orb z$gT(&D6<|1w=^Q-4Z=X#Z9Q-sDOXn*kV)bp;8;CZyuL3ptCa^o4jFLO!7wu$iEu0I z0Nya3ncX3P+luMBYD@c(s=;?G{xbRrX}LiQm{V;-rk^Kdl+7)eU27XY{(L(%+j_ys zD{07>yPaO#v0!>RY1rz?c3MHog7LZCA@gsyGb;8MZ2ti(=Nka|fvC1_r=+K(12AWT zq-`)S>Ggg+LkNK}OGd!8n7p--5S^BiQT1g^!Ja{gc5c83u!v7xY79Ys8ZfH%h@bx= zIs{%eGopNtpZDu}2vLqg>rBrELR#ldhXlj$b;1Rc&%!*Bv|LmzLvSjX6>8_BL1(vRXKc)+%iSNW+B8 zPlFaM&D#c##w28aJ-2A1kTd|-+$PmoFJg}+4IT;J&iblj@wnFR0Lo@N^NW;4E8E?{ zW3Ly@Z|@G<3d3Gq{%@3apg%sc(hl^8C4C3?5tVjePcspGuIoos+JQa&PNjkW|4KvZ z;`QA8viPh!*6&v{{|lwv{f|l$g#jr8IBoq&vgbQav=5-Pxu-f`wT)^#k1zNGOsPe^ c^+5XoN;}XW=*btjkEpZ*{jq%;o9dAJH$t?8X8-^I diff --git a/fixture/19/1/1.1 b/fixture/19/1/1.1 deleted file mode 100644 index 10c8d7971c0956870abcc77da30923484642f2a2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4200 zcmc(ic~Fyg8i31vBRMz%2)Q5u5h2_Nas@~rfE+3yD#S=YQMpu5RBjRoM-Bpk5Cj5( zWi6maNvM zmd4)S+j{!ZozsubONhVze*FrYJb!VkzNhaDol`#4(R=PpJoE7Eg8GGyzRL!yCEiX8 zN&%_8Nk>;1hdLM3&ZhQV+P6v%NLx@jw$Xdx$}02N=E9Dv8{V$8^z?TC%HL#R==+NP zitfPtv22*xykdYX97yfQ!c500{;EV^!L$HozD5a<1~MoDB+O(0=dT>Z$e-fFb}!=s zmlMh~Yj+4v+>9?h6|pF9*U@wOd3@QW z*+r=xse#NjwWdNe##qtT z*X4G{_FTQ)pmsXe`;y}r-9NcO<;b$vg^Od%qw5VjE-!mumA5EyQ;k;ifb_1RJn|~s` z(n)M&xMAYzrW{xf2iNQLn7HA60xK}Adi|$OuAZiW9Mh0`-EW)R{7wc|I!@Iayo`1A z&>b%~wyuW^$GZ83uNnR&*5mNQah`fIA5^vMe(3IamBBh+`|D+oV{gW*rBq28S&**% zxt3f2jRdZPbcgn?arUG=)%$+qe!!KrN|(*2hElD+*kvdZG%|s-xojx1M$u2F3s|&@ zg~I6+0=O1fxFmoYw^ID{MZn_MNGOboBLEo;nwSqYYQp*HPB4nrNl*l*pP<#oC|nUk zQ62q$1`~^LV;%#KIq0Z2e0JjSMCC{h0u+Mm7>$qvR3V1>h-Skw7}tJ){;;r6WLUwNgDI zM1bo1s!`}}oe@PZO#V16%It~GsH`!DIzWgroh%Tq1tu1C`&>gU@E{=O2m=jZ8BDfL83xBfi3&wfP7KQVuj zJ8!mVKPqRQNF9opH~p@CMD0vs!K2xE^Pk#BWeg;gKBsw;Q8egK0ISYPvk_-x8k}<@ zq|VW%5x@InaNgaix_xRfIE3zGc7t`D!|oXTuD5kIr(=BgKAg-bO0Km!vgBiTcQT8& zUTb%G$=BtLReFXh(ZWM#&?46=lTIUAAJ-YO@3qRH5{Q`K@4&zLR^c6>HU8bHO8YBfq@uqb&;s%M-?qbx;$l2XEBiHs5!35k+3 z!F!&ZC}Ah_Q8JJLY;;>mX&ecq;4t8MMp9CiCMLf>r3be%3gd`Yho=W^@7&6&S|Qq9 znjYHsDk(inUu5BJKWJH$l*#0ZtjYF64kJk!1rZ|5q4q(Wn@L%fvm)CI?L*GbU!>6>P?Z|)z?EV(!goGf5ZCcDs2m_ z-t6p)@IO&$mOLc_3CQGhg*5z2Y1DNrR2r0hO%`DfQ^cd)LAfp22qypqhlGQ28nFmR zOA6kM2+C_0AoeNYaBv_qn~y{|nBefHLCoCSe1tQkA7_kV<}{NKdmZ}m=84R_PBFqo z4vm8;2WQuU>+FrtcoU!C+_wJ+Yo0?_U{L}W#1(_DJ9LMQB=A%s#K5wSt^+p{s`O^X zT3>c_A97dE6sL9t^VYc9onp-|Qo9d7UgPaZ6N9QZx(?l2t1{daYk#xRee5lu zWcsn<|FG2Lk4FDfE%goLIOrqMf4+VV_4SgU;hTx#x2loQEFGanE`~Nwi$vDx4D0k_ ziq;5`aAvs>+<+-u7DgK1Dj(Kgz!YzoMZyX~g+O*2?P+tQ(dVJVx)0ilUdBZtDyM~7 zciIZ=@h`DXB4&Cv5^jmd`Cu5#Ocoz*6N~e8oM2|qNN}sLejnR5W)@crw@dB!-6si7 z&ji<6c%pqQ^Mf-PtXk_6XkUlH;Eeo`TFik)ADbJ&Srt>Yw&xmsou5sn=UUfV`o;KI zRZM2GZ;M>QCv)%Iu5;eGghM$_=G;uK+v~i9-*a&?@80^8ZKBWEI4cb(hsql1+*fkL zxdSRJvW@iTsjgT!fP=;w>5fy~2t+`oy}-y|4e#m>q?aR+Mvwu#n@eoW;dy>K*H|J2_ydE}uD=AJF zzw-JWShrU*ThOaGq3zpS#W+XsCSXU6cm8;oI()>KEJN!W_Pc4oFs+{RM zc&D{$=L$*ttC{YjuUe~R^u-zrFhu1_OD@A$(wIlly@ifmOOwe67DWc^r8bSS@eO2NcG-2Um=9 zw^uX&{Z8Ya{?R*4-d@doDy40?)6e)1K-;OAPo=aiup(pEv45h{>Re=W`l0ex*)^(9 vK2$RkZ$7!wwmhk3eiQ$eDSa}fCI7C{w)Dpb9%%murETetwX1irh57#kx5c5@ diff --git a/fixture/19/1/1.2 b/fixture/19/1/1.2 deleted file mode 100644 index 87417d96527844a2c54e855e21afb07dbd5b07ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4200 zcmc(i`BM{S9)}BrQ?4XjfdIKf5P<+{!hHltFx*t3vdR$*C?Z!aC{?TB2!tzyBM=TD zh$vbCMX;z40wx$hkhM@*>7yMVkuj{xS!Q=wZX~w2^njDKl5^GgM-r(iq*0E*|(lG zxF{HiU`n=(v+B_Vq=DGX%QmZ(73~PF5TP)(na$K_d)o@J#R1#wE=jcWmQWEwubt6o z5lxJ=0RR|i=nT`9gaC5QNNB@RrW-X1SVa(`O;#wb?x5ld6bcOyQ`~%liYx8-XyZk( ztCvkN*Mf#NniRYFB^OsYO3|hpI9E5dzzT#;qd`B;jp7qnX~SuR&NsVyng?>tLmCbL z-t6XkFtEygywT*>I9CtD(F%-RBV;JfZPyp0l{>CB8voQK`22EGwb7JR?^RNj!_>UV zZ|OZACMy+K$9c#|dhf1%E0sID=Z$~a=<#-5;aX+P8$I6W^*^yv<+3?%DhE-Jf9VaA z2Q%sbNCI4iV&)Fcy23y@0&~bH1UU0N32Bedp;$zLvpa=IXH_y80V-keQAi?=Ofe5C z$-2r%IzdEaj7>>q3k~UzQgPQ&)_y9}A_~7NbkL@({d8$d)b>N6ckLgvf0G{{WxID` za7Rb`nd+seoyRBcIzLZ2bp|?X?M)cO7o?mn;mz9733rLZDc|IU&)V$i9Nd06l_IFcr&RS_Aq{$ki$#ITol_uW0Z!PP3_nedIx(MsMmicYQoP*1{*y_Yu_U+9E z3dU)aCA(UOz7*?g^ut)e^VB+I^%JW8jj{ZnpVeV+KJhhuF;@6eJpy*dK%$*x!^W8kS9v01J{?b9`|NcC5HYorVH)#qPV?imRbg zlzx}kW8Z3Vt-KCjI}_&~m>b9gariooIFGQwz#6p>KB%PCQ#5?JhiN(9ZWomo62EI&4EZMtx=kU*o<;SNMo?>2r(aT~(m*9#LC86LHAF(wYv=Zb%kZ^|7Bpa9m>{UEm8aW6)4dqh_r-(O8jW}w zxN1_cyQ8(*c!{R}-DKas=dHC0P^oq{Q4m;=!~^rBI!(mBu;HYdE#Xp7`P*=fX;PKjh_Y;zObu<=f_gPm1A#$bxNlBRo9)bO_qzi9c8Ni=>sYImf3r{ zWg17)?;LktF80rmZP~vu@YRXslKq=9t&GElnhJ-cU z1m^Sr;AXulexFupylGSuVF)suT(CwHH)21f(q4o0-%7jmFMYiu`X6ek|2q2rd#Tfx z@?8NK6*pC{M0ag=00W~Qsp_4E&@LvEFlvEJuhb~*l29Q=Gl}V)0V*mYp)jfxidXVq zMxU-=G$}mLai#QdJYCsgvOnSZmGZBb7CuuZ3L^^=InmGs0G8MvH=J0O!dsBn?-Isb zPAokXzM!s!$D*76vC%5$XN{(RKzdx2@D*_N z7=u1YFWC!UiD$(?=eRv244#Xl#u(n`_EHbvtB8^qleMrOcfHmMv_%Z$Zdk9+?$*lf zEiuMFPWE{1XytB;k1=}4Eb;e$mzsGEeMK`eTrHUK-UGBST@YNe!qe}T>VBKvxA)OX zjaG&fT)QFIb8DsAbW^H-d!ujvOJLFCli>fLui_5My~luC%&7PS5FnJ+hXtm(CjA>-;xq z)7abVL#8iR3r`7NR{cLp3o2rV@G%;36z^m2Rx=+;X|MH%S2f~T{hLm-kD#>4UcaNQ yRZdGWroU5yLj2!&pnU|Ty>?T-O=$s=53V#nRiB{TG0s_wd9#sMa?#f#<^BoYhH_H? diff --git a/fixture/19/1/1.3 b/fixture/19/1/1.3 deleted file mode 100644 index aaf0be0deb5ec77b32aa94907ee0799662c01320..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1515 zcmbu9X;cyh0LRy1d6nXgcYt_IO-_ZpHRUkPFz+%6O`DcSnWj?#&pZ*Qpd1UOtht&; zrsXQZd4R_lu1U6Xh`ija<&m?dw(omydwXvm|6hOa_xtDWdOfpe?)!5#qrdz)-Zz=9 zxEJX%hyDp3Dp!#IaAicn8ubhOG)w zWM3E8YMEmCP$53O(Z%0msnRdUh{9_#IffjS(M^mvW-61X6R$F89uS?plgYJst}?kl zAU^dblfU0n1*pptk$5W{(?ON--8^yJixr+;fy$8oUKI5RZ~_id6_tRHiJC+Yks-Ar z5CjjeARq;Xbk;BkomoNjRLZMftY1I8{c8G?eejAQ-9!hm*_37w{Fs3*EQDkZj)8Ls^|%Wrta#+&NFr&S8cwEo-}{jUHdNW z47hc6!lbXe=JoO!`-iiW)*Ib*+m`dTFjpl*}sEi(f2y!7P~!To`p&RZ9_fBuT-^8ryz)g`+Hm`U8q5HvajB)HiDp5hA0p3B*RH83GxPl&YmEnvsqG4bx(0=pksPiAQ%g)z&n!-%ln7H-dZ>u z$bebW4gq5vad@W_%(C8s(Lc6)u|F`W)f5mSW{iODX3{w-kbB-HV{j|bdzDnocf=ji zCY1<`+C1zW@QwshS$AWbr*Q|)&WTidJE9GX>cG34CzU^#Y4bjif&-h&OA=Bfcad!v zmlXV`vGTIM-ZroQ=fKD|)abfaK(q+J(+q?dFe?2c1OTqtFvO&*G9X?F;O~V%0F`+E z6AcWGQ9H!=7Cs>M(L#Qqt<}=FZxEcpcBa@_D8~Ilvly+OA!o4&*0^&rqI_WDthY|| zn2$3VMvRugn|f$pY)Okc$*9kwrP!xE)S5IhB5qcK?(3m*m&xw=vr_Mk>|(m5(k<9S z0;|oYF*!=)IS(mjDw|pruY^bSOHg;R>8;O|=qvqFuQzMO<(^7dSgr)YTcb4(Dp416 zrAJ<@QR@nnh=fgvdrQ+j`xY6;q_^Gp>(qd_XENTwODcD)jP1G=8udWNwO(&G{dOrJ z;e(97OP8Wg2a1lD!8m|wicuR-96hqKzX;CL3ZWPjt++GUqKF$XE^vZk(!&sM0T~HI+Lt^Q#=Vmcd=d4k}T#FL!mictzOGXHbAMukUj1n z+0EcusvxEVB((T>$uI1x ztkrv6XtND`;=2#VI;5LqCI$}Sf?zGM>cziv@|Qe(`R^CNEG~h7V0at>-oT_awu4Zq zIO&ywX}xx^7jO&*g)>V#Dc~cRF}!OQv)pD}8m0pKXl$}+Mgq5^4g}agC!3b66Q>x- z$@zwPYB^aiUol#EFrz-eQmq+`8OaRIXdrRay5}&Xd9E1@V!T>AYGCB@g$!ogbM?V1 z1EZf?zpW4URBMIhjbxvG+YmLVK5#K_G~fFzBdkEJlkh$Wr}44ocnMTDQ7bqXq|d+w zLbbyygD#QvnbE`0gPE1V1qSu?fe@$`9v_rNGd0rG-m5{|?MMC6_qeTt{(qjhv5&jl INVXe)0YMnLbN~PV diff --git a/fixture/19/2/.zarray b/fixture/19/2/.zarray deleted file mode 100644 index be8c857048..0000000000 --- a/fixture/19/2/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100, - 30 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "ghYy46VW2jP!ahSaD$2xu~^h9>WKMu>xR|h z!lKqc<0$r9?6_b=TK{FT{YHgMlEdn2XKEuT* zSoVd1u_26tUOhnu;Ls8_W~#H3m>;E2D>Hj-v{A_hfvdM-ozmYw%|BSKvUe@Hpw|n+Du&U$m3aW57fVvm$DvzV zhlq?aJIBv7bfUUY9X`LL@XzFXPU9^3u?fZMs#fPVja=pwFe`pU^zsdrekIMpPX49M zn}!4pa2@#eAh*HFAw!3`5BC`1Inqnzt=5e4dB=D3n6cx=zpK?v@S8a4J^#r8fkE%< zgFl!OGBtEs7&HCD@QBD6GiS}76BV6zIlth_)oX=C#U-WJ%Wjx&mfxzlU0GFKQ(JfE zZvD><_s&Nr+*<8F#PRFAxr=YLZJa!K>y>#+Dpp?#nf)N?lBH$kshW{aPJJWh?XWI} zVPg0L`105#*AkQr?mW`L!N{iYJO?1tLB8}G#OnDUwlmE1AY@s9e9|tckK_3hw-F(P zZ-%6fskS_OLk%xg{OpGl^|AU0*YZdr5jy38W)BY7z5*EV9#xa4hltry0P!E_NI- zg84B8>FMcP)oQhJ>G)kmu}gkAGe^6-sPRbS(c3<2eqDC#*s*MYzQw8c{%YTxX=!aQ zHZ?takXqRyNIp9kB{*Du)Fwz;}19&%?3A$g}A_F|{2Q6uNESuweWv zL(0v~p(5P^6_Wst6K&9f{TWnmEB5cZ^@BbcU9^G>HHCYy`8^vEM=BXWM;B zQUaA3*y#lq;wwxWtIG7m8*Hmta)@rpv^$7h2~Bz93xp*f<8(|Tk3x*BwCwQu_{MRx zV^IXMp@o8N!;bichfe-RRt`0N^_+rl)=r;?Gguo^A+^Oub?Fiv0u~jXRXMTRP2n)J YUUA-QeYC=Ow<#dA_nUqCnVi|b0Z9TtPyhe` diff --git a/fixture/19/2/0.1 b/fixture/19/2/0.1 deleted file mode 100644 index f0156e33efdefd6b0e31113bf5c3d33382231ab6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2349 zcmah}c~FyA5Pu;dT%lZwiK0-&v{I~zpyE-D+J?h0o}UN|8j%|V=nP6#AR`C}DmS7I zqZKXsX*`C@(#j#VkqRwJ5nINqwpc--lmP?`>D%`uLHkd4Uf$bp_xE;xyZa`q0wToX zmHxtw{yQ(;5~E2UCu{#to?0cFG*D}4B9RouQva+P_M<4(=1Y?HD7unvj}p*FWSVs{ zI~hwP^X1tgp{$rA=kPfp-^-D3H9|Jy7uZf{WzKf~Nl^+yQai-Cr(C+JRG22T4iU-h zB(1G2Qkhx4o$v%|$}PRmmioE}i-g4|-39Kg+M&V?Av?79Tr%9bzO3%Z?xdqDExV&o zoAf9*MaHqxXleSZ9}mif)=jDPcD{Tx3kg{Q0ng^^2qQ5UAv+mz?9=$rxv9z5H(B#> zdTc|pWQa93NPVygC6BjL61D^()QW!w(Q2s;N-K?v5-eP@n`r3^db;ox6UABWB`Z0d4wI+BJlo(khe4T zl31ZnEn;l3QHf9>z9?boGoa?VaKorG0z0bNj2K}g^u-LLL`pW+P@n{MHw#4te;sHn z*pM9Z+)@&qZ1p%;g-D_^aG(q?@*xjYRGfP`VCeNFoL>M1IG`x{^le!p-As^1hM24IbbA%|)`5bH`enN=Y@?_T7 z2sKIYiFBQ|XEJeZ@ywYpUZ6lNFmYr{?t(_-{XDuD96}xP!jda7((p(a8|()mzy#EE zm38WermN&4E~@9Vc@}ChA5oh6KXku&9{bdlKSCM$M^Z&u^n>^>wR_i;oWK9Fe6+5itSz#7r1Rl zcy#R8H{;sgzSzIx`c2u0G}4FpzooBOR;r={9#pDQ=`#@4c#hA8g7cFpM*IqH?i=4v z{Lujt@EgsX2?x3(Z%DgZJJwhGc9(jZ5-i3c?~T>l`FK!#M^Jp<{%(nhW18TG2A&hO z4xlLbc4k;uShxgq7KIO2+?^u-(1{gU1f+20YkwWLou3ut{DT{!(g>R$o?17cFkDf9KD1V8R0=shf^B!H2cm@Shqy=s=Ec{QXX z02U=i;Q@moNsBqbPi<>ssTY1!sX23>FFkg)AXgP`?|W77z2a5IFUKBd3&0| z0RlAN!CqtHArBWf|6C)lgLylzCtmBjFLzl&|8sTr400U7*G*>NM)Kq2jp@jH9Fbcgd$O7 z7Dbp^#LPM!YSkgSfg%Vf0d#Xea$E?aOJi$>bZgo~F-6)F-MB zX%cFhq|!vSF@N^zDo{zKDo&WZsrh*`cpoj~YF1avHF1gvA8N8ZLA1JxdtEP=`KU7d z)!B0>{x+#+fGSiqLvpMq}@2sr!5KwVz zYS!pM1>7pb)Z;><$OveAx6ENC(tK+B+D=*uZh9$5<&8#el@vfRLN9(Rg0ixlf(Sy5 zR08@G3>^iE7g07$4d6A+93=HgV=4m8In50djG*%2R*CYDd2z3&^AQ{152Rs2RsQ7J z>P2~Og?J1w&d6i>BF!d_KNgR{7#QGA?0Cagt%B`paSWKbn7T6;_K2k_*A zE_yB@it=D5kk>t+uEM-Tv)KfoMPqi2g`%Sa%}Dev&O|yT&lTPvyp71)Ce)1FrDM%# zqSDEPBbcj-fGUK#jrrN8Rm69K13~UmJ;A^k{KkgDc>BRFDN%GJn7ep^?YLKwNl=VY zmAh~UE-hLU?uFT;n}U4t5QNCmquzsL1Q{Xk+1X8KG&*1tcF`YUcHv~$tActZi;6`! zYc?T}r-y@=-I1!c`=H*$E_HGw5Kj!A5(h^;Mmam2`PjRyb8tIJ7CSpC5w?Ei??0bK z>ZZ`JBtTdbzo$QYPHREN?!1Cyr!QPQk)9kdb!UbCwX0=^yU43pZxBLgY#qRK z7uV1$!cWq~s^|@08xEf-uO7Vg$3xfLfpRAF*7FL*NcYw&rA0koExc38j5MUQy|M~2 zJ6>DE4|Wb(_nzn;vNn|Vno!geV=GdB2|ACPY)eWjlWut(J1vV@uxd(O?Vzx4=8eCs zir^znPhJjN{o1C+e%C&2)e(!o+H`+<+|dq+=KBW$@s*0A_JLum4zbyU9~s~mNK!V; z>+D*aBUu*u!J{7rnwp2UrM7R2Nv`X9F>GDmv*D?hNozSW(%SKv`+-^N`na9KxSg0n zc(7>I8JgUQq_T&EG?1j7%JdY^)7`(8Ie*B4VV9&Phi#)!?LNJHxs5DvP;=ilY(AZW zWD5o<^gdyc1-IW~uU$z?A_ja>^S_Kd`U^_eRmZeq(F3qqL#e~ZWj9*B zsVVHHJy7;K*2)a=!hcuUpWlfg4UqvXus(es*f7R)@*}caA9eul7*39!cWYf?uX`_+ z9^x<>ky#I*LAQm?2hxcsObh~lG?=Wh=f@uL#@n=^GWF#gUnTMcHapl-S7(gp3qw#h zg=QvLdDlE)?HokV;D)vfd$~!CRY$}lu^j8qGb{PXOH6a(AJ`u~K$1=cv`t6Gf*UwsV?jhWCR%rJu~L2U+S1A`ia zkJ}8vmIDk7YAlCR4lu|~nBn_x$;1wmBqKu;12+Z+29C>2nG=`&{~sDEbaBCni!CRY zIZk%=l}h1dn=wNn?CpVj{)UCU6;O0$aO> z&rGS51G6#?%-~$+Gb>Rlf#U!p&kQEEwpoc1J`8Hg&c2fWe};x!cAWoIL#>UWr>AFD z=CU3)VTH`jMgs=^K)3nt|Au;j+&j|%Xkf2V0+0j(&Sh$hAOR>4W?+<*GDuqa|Myj` z6@N8AqHGNe3>VB6m@qC}<~TtT$YwB+0t%ZjFfc1HJMaQ&wg&svx!Z23<8kFCnYa>01^eO-ofMnrl*5MVpgr%wF(FpFn&AK$|a%z z6l&s7XaG_S3?PXGU=~vYm;w>pQ$r!Z0mNewa0DU{i-8+t3{*Xb0!ReOVUs-dKq6?) z-2oN@6Hv9Kx!Z1myfZNu=*4M~xpU8*-1O%9-uW4~%GR#Wxn1`9+5H`P=J%?#&7But zdim9AZ{Ggo&9~luw_fx2-T%hLhb~=x^l|Gpv+d?PBnpd)OG?YiD=Mq1YijH28ycI8 z*)nc&ctm7Wbj(<>DQ7Mqd;npEUJJV(ek0;$s&AO9fXVdA5tpBp!xm#{OO{KdbjHg13Vwz{VFyYGJl*3`w@FO|Rk_WK_} zK|vvKAOeZi98Ll)iX6_lzb^f&kKdJ5)iwY5_rIb4SFc&SZvBRhn>KIRx^4T8ox67L z*}HH5frEz*A31vLxQ&a8ix=30C7r;q4px>pX4sP~aTM${j)_Vx9dPH$^=u6TSpc*J zEXHJHWC)Ge(9p1tx>~VnJ~NF{jsXEw`%$=bXfnv$h>b}f!CF~7o-}|NU}EJ2h^ZC{ zATCn?SP)D|T4t>QTd2qZ=Bs*5f%$gTsi`7`#m2{!iLPA&EmC2PG&zlGRy)v$Nk7Nu-V8*ND6^JWrCtdQMLY z-YPpnG7UabXx1kuvwA@`b1n+goOSIi&0V#B@)Bx>rLdwAi813)C1XhNRtK4;u3fOK za{(ctbxhVQp{pYi9u>Hdj&?}HNH#Fc12;KVsM7ThCS)m8amEj$d&|bi`tv8`^j*?v)HG@ZCqme+l?kygd%~m zJ!t7^oh5pNXr{Vb&LFaC$EfSA{BTKip9jTQq`K3w(--ZB(Az?YyG51;Z+Evaiogwb z!BK~Weu>rTzCd&-J3#0!@b?1{9uT0% zG$B!cbgXTsii(E@2J2gu5k5B4)X{*Q7|7GSd$jT8wr_-Sw^7- z__R4=W8)nN|EES zGc~n}`d3BQi;+)VbU*DqaaQT$x0x&WS$z3&PGXS0MMtv!Vas)bq>A=fo%x^sBJ1C{ zXtqI0|NC`@)oKNyJP+!R9x6GB>t4|0c|{~Moh>9sPRyFW0JkkF;%zRDqG(VeX|aw=B@ zw6(A?q)kQnVhF%Yr}8lm$h#L%8l&amou0kW!#jflh=IMmJ#7=2wSVbm6+toXB}6XI zoqPxU&h%E6kdC78=6le8ny`ioUUc=rqN}}qk*E-ToB~UudtNse1eTqDdiT#=H~hC4 zRok-Y&?urm4RrF0DT(ei1kVw0qm$%ENSh`#2xMS;WS4ZPyZw45;5Gt=!SW)cEaE*M zwxK-z9TnVMlplGuy7K{Fv0Q%H#bxJoWQPz4`HbJ&H0DQu+r{Pr-=Xy?w>&=G?;Vk4`O#`~yCah-i#cs4w!o929iGA$9BxZ?5VAsOG~D$s?M14@=3m ztIL+a{N4P`nN;zq8&x2RxO_<5(i-yuC(?(XmoY9eAx2Zbw{F6gz>$r0@p=aI3f^*9FAuubZr!}zEU%3@-8Ff7F`^vqdv-g1 gMA;5Immen;{w46j6N-=!5&fR6`7Dw zry@fgIJ#^Yxdb0mhgcaEqf-~Bjv?Ey7K6yvWmLop3~6`1?-nTf$4*Ypckb`}&iT$g z_q*TH#7u?ajfAk339}`VMG%;*b^M=nN(w?$u@K4Y0YrBP*UoRtX8bE!n|AOe)Z10n706!Y0*qHFcvf@jK36oii`lS#gaz=e_>I&tCZ8x zfW*<-#;PQatVlhHh-xjApmqGq^Tms&=~$7iC8izYM9~-(G3E(cpGG~Y*)El#VI1&aR zOc%W`pn?q$Mn~r-oI8L#Van7VqqK!We9Tf%AQPKnigB(G}>m5J;x1g=HvFdW3SJysWQk~r~u*AI0JUDNyNz+1 zM?#jj*+!~!O#{8w+(&tRn;viO-|}RuE&u691Hn3-_*i&t#Bh_P+LKM)S@6=Xx{@`M z!kKk$vpjueaIvU3K+d>%`b`R71AYmKNkiM)gH|6rXg%>LI`3qkLi_rOd$0Bn@7;gk zi-U(NkK`LttU;A+y+b>i*FKWU0wq45U0T;Ct%{#BZO@NskEh8hmPa^L`Q zY;p4Tstb2Vf|sQ1IMQS+^p+QT&z*3R>$4N@oUP9;x_8gilFN||yc9aRkGi}jB36Ca zJPNKk^}*%Rz$rBEGjRfQ|V8saTHe=gEM1rfIlD(UFr z{m<83u;yyaCMq1kJVyU#tV}h@ugAOb^B{gMEYx_~PX4lfQLxRR9WI{68IS zW53CmlROm*pl~G5a|}iYAUKLu{}M_?=q@9DybHnyztZ^npJsh0Xe0I|M{we;g5Z2% zKzOs|4L9WG4R_)IbT}CW8ssxVx9aYhWh#6GXM~2SsUPe7#co$%g8ROj$TKBWNIrY$i5)=EvJNTasm4@#(`P>2GxPMK2v6a+Pdbno6vppG+j=h)r%?cUzr z-rj}Tnb~o1voa#*XBbvc>1gzIz5D;DRjAKP7j8D0Ovg%93QBROcjm{FRY%Iudn&Wo zETWMdg~%s{AT$xB#lJ5>C>sTx_mv2W_@4vjcM(Gfg`gcAUZ9f3I`T;)0@Vc|{3j@x z@9tG8>7+qr88%Su8)+=V-9skR;cH*>imAg#<@v^h0Nxjov^XI^K=ZbhAzr=_QAg7P z!ksRaDu8$B$;*BT7vi%8#Yh|$gK zjg>3IIzOTi%Yac1jm5Fq(u6UFvcU;Mn6tykSj5vYaRIdjp|e z*pD+|0v1TUSYv4lqlr!!hj1PjHs^)?3V;|NNvsiZTZs<|LZVRlcoi$-L2Mi*y68=f zg*^g*ag>4rVuzv*u*bHhWKu zA~q-%J{*S(OFWAx0yr_!SO1BV9C03ddFtSaRG3Ty=1V$Z+3A{?)3Em%?fpf^Qp?&i z51uf!H~k#4sD8a!yQE=#r(XSgPVCMP8*{Q=3X4jV1)i+!7EYa~-TA$}CupW!`_rhv z(V^m3Cqzb1nby+txcc(kEAL!QzD9AQ1>+SLe&z2S6Qge2u-$N?;%ejHPe+NP-;ntn z-BNXPbi8scJu794-x$d!oE2OBMbc#-Xse=1`F ze`$J4THiB!D_k?v+TaY&>ddoM4Cn@02E(^K&?Q}cqEBpdg&%*Er_XW>)V{}h5IZuo9v0Ty)NP0n|-c%3kC2lvSe;h?kSy=qTKV##%9|O9nNJf z6?f~burzSA8xWG+WYd77B=+f*>YPR>yq}hJ=NK)-Gln1hJhiytq&I)alZ;( zeTzgXofJ$E|4JaN#l|cuzjIr~P$seQpKXcyGv-y9^=B@2G+MVdGt&tV+21z9E~-v! z#a~qJ()A56mNaVE)W6Nww%GJDYS-<*Pu$JyDp=ezpS}|sw_~}eI&KG<9gJ@|P^`40 z>276Cr^C7$1dSom*dueAPSP>20oxj9j^>X6nz@qXPU4-NBXO6H>j4jZDS>bxUrUDr z*1oV{9V8BE(HP{8$ZA;4y-_0V?q%GV>_;Ym_1!b<{PLB}Aset04um*e&!cXm&gggA*6er)ZrV(ruvBuS5)LcZT+l;Z}({j;|wnr%nyk0?k!#nY*kclR#d zOq%Ri9I>LxQ`pHrHIyXw*^8YhLfV`TN9OI}3N7(ZVa`*B!%>;_o)tm%F37NMVM@X* z>V2B0R%#m0cpC|RHh_Gey|B2gp_y9IgK<)D;j-V}6&}J8XH#P>H85hz-C=9reqFH( H|C94CK^#0t diff --git a/fixture/19/2/1.3 b/fixture/19/2/1.3 deleted file mode 100644 index f6cf02426b4a3c4fc9134e7b6d3e9f6f50c871e6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1150 zcmZ>Y$}lu^j8qGbJUmUTih(`#KS6MTHSUuJM{s{=)v8tNoP2r>C3+Z*QZkp#P~&V+ z^O==olw@*1t;a`=G?Fq(O8Eb6YKV`S(aOaN zMkzcj7aKBq4W$g+9FCk^Hlu!3Th@yI|FxF-J7pa3@iob8G+^*)bDYcobT?JDpQ*;6#=vNx3{q}1%b?j2s8E;zC^Ca7 zDS?5rDYJo5Dp6tvNONt31rY4@4n3OmQ33>{wrRG3Bp4V(O(UL$B7qQyFask@EKDcK z0VKw?3JNwd^RTINb8~Yrb8~Y#Fo=f2v;di*p@|@Ms|5@Wpy=5E=7NL}=IjS^!I~g; z^)!G)IGJD;L1?fbNCN0J8C0nSU{M&sQUsOV#Ni~+D`k>#3dG;Iki$u1QI}x-t+RQ% z<2T%Rv#r{9-R=AxciwHUKHWWgp?Om4y}0}F4-(o8G9EtRn4}i4GU*x5EVF`_k6#EZ za+|1?@=9b?*v71ElYob9%@i9?}@0~Qhtv%55JLX6bTu1)NbvZ!QI>71e!v9npNcaBxXFBXl7 zOT1=UwQ5aW6ScFNO=sqotQ!uEyxVT)BB4Bs$9*8={2gd6W8WkX2A*}HNDc+Lm}O_t z)6>)SG!}JNfGrnNfV+XgRpiRa$!hL0g~w$I8$iMWU=qXvCe%XLnSBB+iomF90P%rz zGqU9%_Hm#fP(`eYirzes8V0a(Fbhm@N=hv10>&-OcCc|k8I47U&I`fAvmXe)JQNA& a66kJy>dLXGMN#~p-jl9^g^KMuI$r>b3$P#n diff --git a/fixture/19/3/.zarray b/fixture/19/3/.zarray deleted file mode 100644 index 7fa06a1c0e..0000000000 --- a/fixture/19/3/.zarray +++ /dev/null @@ -1,21 +0,0 @@ -{ - "chunks": [ - 100, - 30 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "JT z(r;Nat_Y6@9?c%*!7~f$mQU=M$N3HM*WRS&cq7UoGfZ{MA^Xu7_I?YtI9I#LJB1$1~o%vKN(qTK-A-)A(P4r&?IvPs4jR^)CLx z{A2yi+c)3a|5kpWygj~aOomu2xQi|p56WB?m+zq2;vZ#3OTisew z8O;Ta=q&LW*`Rs(7Mdz9khPi%ZlN>94B18V@=bJx`0p}?x!@-Hr1-e(ka-zElf_45 z2y+2|J|;dSb1^Rq(dps?GNY+Lh(0QgmJOPgZ=ln}1=7~}g3IVYF++NBez^v{UHo?` zW4@pUy-j>vdT4(6653CEM2eU%xP?=Jqw_J&K6>pIu<_ap&&SDF#BICD{uO9Rn z#h+%}fB46dKHsd|jOmk0Th5kbWoG;fJ#n%mIjcn6{*Y`YHQ)$(AeKB&s=~e30sF}> zNyWG)Iv|ETL!#qKbO3?;GwB$PpabyaDN-Wty%xZc3rRiLC!@d)@+s0iY{@7PNX{iS zUk!fIm5tM8}ql0NcnXNyo5+5dca)MoPrK9|ru$M@d-h zlVQM@oJtDCmJ9~({%oeH9#QHmt5!EBSp&{=Tt9ee zHqcZbQtBdeRW~PC*NXFrp?*i{Gimi`GtMe1?%*HHnGIa14=5d!x{fyQvz{;R;2osR z&KT?cOCL+CN1D-AN^!@vgC}O2jP=mcR;lYqbGS9OxZ~E9FY+0Kp$_oRPIY_Q?SguB!45vy$PG{W%oW@53eS0hBFHxcQ!vl?Mo z`c2|78(t$srZ*51ZST~=UFmg1tgTlqM5JFOhT6`mg`w#+L?0VoEeuJoB0AgNsf6%! z*YmyuWTzIVmFFryczQYU)!P$FKlgM$V(JQ1>F1X2O^jQ4KH%q?4kGSZ0S5eB(l-$U zS56H0Ij6f4Jy-CHugYyLN77&V{qNzgcElH^{~}!4Q$E;oDE*s`*P5^XB$l;22w(nw z6(3&_7rwYayIadC+R!P=_Agp~rQONrurBC2%LpBopJ=yJIRD(xHkaY@E#tJNa8Awz zZCx2Q+%iNHZQ^8aj8~NHTQv948W+#dE{qqJMRu4av|Fk(l#Q{hGQWId8trO$G4jH2 z-FcsIV-oGkrs9x|p^Ed~i}MF*wTnft3qytHy*lP`w9BfZfQ`Yd^Pv2BG_9ti2zp`g z=y}iZ`CYV2wMAPul=0^`Eza$vRp%FhFDN6=yLZg_(JrPGZQd9NI`5i42clJl7r9*+ z@H+1tKIclSoLDdY_qU~spZ&S?>l0o}Q2Ej~U;MBkTm1R^(vK>b@%*=#r$ZtLGmbxx zIX5KoViNdMn3y3En2F^pG4??bh#AT6!8{!lZDH==-@}|66nQek_zjqtLD6PrD8CA0 zSBf?kqGACixrU0laB-TTLRo5jJ zf*H)o9o5eRfYA`VM>AH&e0|x%yVF(J-@86$0p003 z-T!F)frWRw>r{W+`ZWvac2|D?-Sq+suc<4yA6P$P0X20o`}ym8Ej&>dz5mks77Iw! z^+o^J>&s?dV^>!H+4V6qsIiOM&su+A=H2Sb?9W-hW(M8r%IK%97npfByH56>SU+M0 z-RvUwlh*f|c|g~({=@5A%pjmEy`Q+gY~l&Kj`ri%$4nq$S6Y9>`U4a1MptS-a{ZbK zbfYV!AGTg#;x%+7_e0l@m_Q9(N&VpUy(ZrEuEc(~^(`jQ^{#||$J+87ue!^tAHOy> z2deG@^+&8dnB!gS^6W>hU7G`4?Ap{1TPv93Rdu=dL)VVXfvUP(`@w5_=XjM}&i!s{ zw^!d+lzs7SmzDP)L`7Zy(|v!Wy}MaK|Ki@h$rtZ)D=PYlU;MoN{m<*S`@g&?N!A-; zsYX_f#9eRLPyLNmA{o&cVyM5cawRo71A%H_os=Z&40!4@RL6*D3U2E9)7sB_CN5&0N)E`;q$2H@INa|D8$)4m91C)B4 zmE7Y#V(_DW#v=5L3>$o@M_6G!HNyrU>L;viJ;}p{t<*y-_a66QgE#df*8A>}Ap?YZ zfMxEk88UcLV^}|TCl49G)M(a7xBHL*M2%uS=^hz0Y@zODecN3#Xz-+>SU0J)$&tPciL9;|KM$pZ!_stZdu^XF4}=d;g$7%6Oc`U&;dTTjD&8GG8@IqCn;*MADt z@WUH0o9cQ(jU&`F5yEv__fx|^-f+BC2@+h_c2y=guenwxR5<1rCW5)|)`TiJabxza zsy?B_ac^M)#xVPXgO`I>MPj^DWZR>e=qb%ZWV1aODf{6^uJ zjj!LTcnMjKdEz4-|+ zH)qXn2p+H@Us2U0L^|m66I-|^rv0?R*>=ZcRb#y3&3x`eRQcH%;dn63KKod8EB?uw zd$|)4l z$0}ib^PBmc2~;_LMyLrU*=dheH{x%;xtBA6EJx1xX@Wm>5#AED4Ywgtq1abfQa>T> zW%&9CFN=`FZ6PRU?5o<;k4VdOUvJ?#QQJ@(3^k8=bvZSL^nmW`CCn2chuQ*AO3bU8 z)F@H`-4`VMT+}w$21VV&ytQ|7mcdZK)xpo>*Y=*(LBVYg6Ap92{y}x}I{GxYy(p zD)1;ns_%as>}*>Sq+B5G7}(`Z+%mAsiMYJr0~grOx2f(EgXiBa)u)^zK3MPx5?Ib7 zRrh0qm2a2oQobY>Ecomc7|yqi-p2>ud%MI>$s_Jv@Cg)*o=1+}#{^fsUE-zugSc$; z*)Di`zHQ`wL@@pB(zTS&i4TlE+XU_Bkt4g7k1bBUTcXCXP4c=`l%5*Po|9LsT6EO? z>{)r?YP*ga!=9FBt>)>d1om_J(N&a=if2#C<5w+ODvqs{N3OPOsnKk;JZLpfOU1H> zy!S0qTY?d)9n*B)L zX=@*&?qz=`Z?@%)QTMRhg_*o!M@3uNBn5 zuXlg=x2_zmC}Gd;d{y}Fsp1s7^@lfJ6agQmr!Kr&s@#xyhHfX@HF$MyvREQmO-2;$?LpLC@{2V3iBdw6aHyv$>ij9HWBg+ zw=&6jkxhggLwzPLuTw-|7%pe}F#LnUp?B60{0*N|$n?k>0@Uys1xN3^MDR6y zLh++VULtHYd_-Bz?yM$23^A0SvLmYrU_%r|lHGZcu*HC)2(u$E5;hxxDV*%iDuRc> zpF+-#tRlD>AQW77XC=YK;6d@rF4X_>gQfGP;YY%kb-givhiVX?`o}+3Hze0ZFmk34 zJZL7MMjZAJdaS;lToZxFnF`^#WkS`6kNrRNs0GQc6l7(xGsUekxgx^IoC@Z>qW~%d z(f?(Sx<0uiqLn!X6(ZJO(W9r*`n3Q-Dzf-oLp=%}>saKrp8Qc*7Lv zC<5bO(WB-iXGItpQ~tbO3NV6*@XzW|UrRn2(aM;D^4ciS5yWo)_#XAuJ4`bu(g1cEWOm3N5(9Y%!sf9O`%CdWq@=~EEiSqd2txKZ-g497h*^KVHq z^*M1oGx(MyLr;r4ZRX#UoYbF)OEZISO2~Rr9NNqWB**lJ<3MIGAW7E~<6fKiLdj7* zK5pCu7E03e5pfSq{2P)~Ju;4G0^g9N=wWfEP5g$xt^qVI%>-_cBs*qsASPI{Ljk4ni$`nA|!7f1pLQNI$~x1g<;9MCVt-dP~kOJenNvE>We zI!TOvI+nUXs*||rH^=UrBUMV`@_%nC{rtddy%hIsW{Gw1C;j34nZR@Y2V9%8@(nYK zTF&Rq$MTycuijANm?nN54yC8WGUxaeIE#+5pE=7f#I@@vG0bUx7A{XmAuylwkK#}| z3Z6O1kH=ZG6dY5_kHocWDbY+dKM0qnrC^!Ed@meIOTjP)_#dJz_?_tYv&5NXo^$a~cj=SKvy1HEtkLCXEC|iA@ zgYW15>NvI9sDsCFUpO*XGjwnQ_a{f{>UJF*&z*3@t-jF0aolmop4CPzJesR=1g>Ui z;aKjF!*g}J7LMWmk7M2TVjRAY+vj*?YaEA1aer{k*fPf9kz9#GW7|FskKlGWWVRP$ za5VR!<9l1<7<@0c!_j2R7=!QO-gjKJZ6Aa0=Kjl3W_zK5hjZ^ZPT3kYa1^)Ek!j1& zz{9vgN2+bR29D$k9C5Z6gK!WR?f8DBaS*NZ zz@53S4$qbSi~s)5>c$c7OZfk${JHZ(A@`SmE$#cy(8eL|H~;f`;?Ms$$~GP(EO)K$ z|L_u>FSx5HwhzyM^1bgW*mn8M>pa07MWMZSW<1aPj^eca(aggGZVSM0OPr(ZJ!JVm6vcY2)R%~PQ5 zkES0o1lJUM>}}ILhW9l^xc%<*X@=mcBFqj}RLP>E5-9op#prL%QIyBG8^Q&7*r?R&2M^rccuaHHvNa6VqvQ?;3@l zoivT63oa>q?1!g8bni4({ZiwaLWa+;UzeNnN= z4x2umEvQns+o98G+1^zOS37tboh_(TINRN%fF_I-m32mQBkd-w)$|P6h?GG!VV_3D zpV2g=)zKKHXNX3q3{Zz1jtV-Xxt>--Lzte0G`h*4>adTaJ``&NX|7UaWty|ptun2G zW?Xm{-1trksKSU*FN-zxX(hDQg=ervlN727i;YqgYwFSpX^e$uI~t!$fzdF0RCBS0 zpO#BQEIbQn9F{^y!!S`5#Ts5(7R_jU=HJ*W1xCUmqOyuL*V0bXT8+=3jcrosNZ9VE z_+rh~w4*eJ@tJQUAO(iQkWoR!nk#9^G=%Zl*2YUx=x|s_)Q2KXZCX6dIR6aNcvi}@ zKyLTtTkpzFTk4zoa;<rraV_Nw#w549uN4H**fh-VF-xt=eWv|WkjeS|xv$AnB zq_K}`Wyv0z>u>dCT61JPGvroZhLt8eZLYuBchY)7mS%?B>?2!AGPJoK=sRXTECZP# zKwr9*D0^+H7xo>s;$`C|h_Els8X>e zB)zlnS$&^}H9$(8`>d+Z&AL^3a86&@=VIL~-8uJJWnWzB@6DwdKfbm~4?cUj^kv^q z*2ASULFcyr==w6N)bMgq&&hasthBlBm6aC9F=^{?;d)vuXHHvzTh!6^b7r-LxDFjH zhBK|r!sY8|1kQ8qQCzr=hUZLb<8g~x8jhpYM&dfOv}lf68-&Z((y*Litrsp_OT%ym zv>&1u$7%aGa_t|{9pkhpj#T?HI)9uN$?4VVqQl2&5u9$VB6@L*hUPrdc1CxM(e`q_ z(>6!vkI{TMhqYeV@L}3k&d1shn8hKQH;1VG1Jf}?gK%QCFERN;G%pTbtHXp3(ZC#x zR)JX@q=7gQ+D=TzAZ-h0x3(FRKS=ZBAhmUv@Il&UPKdSwv#6wP;=r_pm<}b)gA<_5 z!sIJy?i{H0C?;GYd|>vjjIFOgjXTsP$O8+`cyXQ4DkqT@>9Pc*%-&S3#Fpo5o9DP7wgGO%^*Ey9N#AVPJ|yp?m;DE SJ+Gyvlg==X`w2yxo&FET)&eE~ diff --git a/fixture/19/3/0.1 b/fixture/19/3/0.1 deleted file mode 100644 index b5078d22920edfc226223d4ed961002ada3cca27..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6864 zcmWmH_g@kSJeg1~e0p)U)WqT#1|2Jc(q@=5)q}21@ zTD>^;@5E)h+wYVP%bPQdjxF;ul?MLvpa1xnR4>(BOr`zTg$X-IG1jzA?N>FDTEe^P z4^KZ|9@JM&I6(~F(i_xQNjOS`ZoM1OKbW9NwBD*5&_9rHhzQ)eJfN?faFD36r8l6j zlyImLQo{0G)6z+cbSoGDei517RJ#o+Vfwz;dYc;QQqT^(B{I9Fb{SGq>pQDOOO13Y z_!am?XqKkd1XeHeP1ABjM*<7R4V*^Jz-mQcb+T`Q)=pBSenF4H?;~b*YAIlKk}pE5 zGbvK9pw+->#LQYP0IdGNH$n@O6nUb6VemWO?2OtOuzIO4Ov^DT@_0dsffL`%Tul|M zUgGPcwS$U0T99M#`>+{EP3oXt?CYx4iHbZ@aMi$R*bJyP;-G%t_ncNOWW31NPAdsD zbGYD=LB)`nuG%Aq@j~A-S|?F6>IMD=x8Rrbf20Z zgX`00?%D3cCrmFWLU&*^AL@_1_32ahjQ3~9O#>CSJ20wG=?~xaY4dwp`|xqo^9rpU zSh-K$59Re~(|bSmX2(pu6|!v@#RvamaBUiNPqqgiGj&%8w_y)`vVY{QO&i@?+M69U zbx}OqhL!oG|L|R#2Hq3y!ADIU736Ig*$4GQd2L$%-pjq&5mS3b<~EGvgZweLI<0r_ z*&ckv^sFLu8}`5_>POz{^oe_{y;;7gg~E0lR_YV{!*_N1_&xF-oNszkp|uSw@$vnk zygGgK9&uifs>$SB<33w>l1i%Oq;ne=9;9m4a?-f0g^W~E4JUqTxL@W4XiW;ogDacg;fBXHb8e8{pS^xPb`g+%?f1&-sMvbWjQEA>;?7OP8 zr(Kr^0jjYlGK1Uo1_3IuM>C=A?*;$|V>L6a+bahE2VxIp0^2VS0F+}7W@@zS4FHs4 zp9^>=KV-0MvfGAu2R>{vZEm*p@>Vx8m^RnjI(b7Io3%FA+Me+0H!^B%(%b5J`5T)x zHfe3Oyrhkc8k>|h8t?qZCc_5Z_JDVCBZFasYRl&x*x0PLNoc#nTa{*1+r+nFctg_7 zDx2803|_r7qsj)^mc+}KZdTeL+GsuJS2s&+PPIMgIk}oqVq@Nx-*aGfv)IP8?M~0? zN=C5_s14IIw6b~M#;7f$r+y{lz74P~sV9GBv*;-lVITJ*Yh>u%VZ_F!pH6&HTbs?yyAOiiCvBdHM^6v8S@qN|U&w!|-e%TwXIYT9s8P%9ZV-DW-&P^TT6xUb`PVEGb?& zG9H=&B5cn4jnzSS$2%+^dS%J*Xo?YGaejET&Uv@B!?Milstk&z011M5ztKAIZb^qF z*(*`DpG46o^v(~D)Y)oIZ%LiVOGJF!{1fh0*m|u5h z*Q&#^)GJ5^O`;qpJec?6*O~9CcUYEq{UqB*QH~Ps%nuLOfp$N(TNZn{$#^Kr5kkhi z-*6ppcf8&5zL$dxkD6wA*~`*VM>Ppo<{O6Uba#Jk|4`^Q;G$@_2!)5Z|lb{&65Cg07=vc~=PTAn%c(Ve~75Os^tkQ$Mfh&49=DZp8#Euhhktf59(6h`qU|$F>iJp z6ezdFz^ERjOTOz<<~S`Ld>nLMu7!b>d*m%CuTPoce)P(Z*9s5x70H`3UZM@%Z8PCq%ZldO#yMj9{4E8QBKZ=kv&jL%4<{l zxR*V%BOrTuW;TrEfm|9~ozlZSQ>R?S7YZ8SCN(>)6|hE6Ln+mYavrZDXoxVu@3hik zEYHPtss+UnUpUbaVuIXhrNJsZYuBl#DJStN6AdsE=uRsYM)r(Zr=Fzf;0wna{7is5 zt>v&nPs4Sp8RY<8WxT=1WPiJr0wa1Zu2D@WJ2{184IU;7+pP~_w>)dtsK%7l9F?&K zR}*Qf@Rp&XUbhI1zP?N$;j&U0~<3ZQi6 z6pl35m>{=XAHXho)~-@@DfKxjBMp`&(CyYzSg2>rDpiL<%PHhHoH7As1Ezp(X@9Zs zSg{D`n)aTRj;#^_UDEzwA+c9PKV%r*z#ymbLP_4o+#g*({TDyOgI?+%%W9a=Xw{ zG8N)7#c)Hfknm{YUDp0l^(s4+0`>?<7%AkZPm({Q5Hh>kg~ZfFN7T0UC);l*>QU02X#Q|L2=I5iRdIluZ7+&?gp?8OKU@z4Otf3w4{)%( zgL=aXu(!=X9oHmXiEbRK*9H96{;|++(D=Z{%(-GBbZ*l8XYEgzplUy!@sM=Jt~e8V zY0^AKTZ9R!^6N0pm(JJ}$3f===2x^in4n5Ow()uCjCFA+^pe2*qIMZ3sKW0z;{(ze z%VJOH+=O|!_H|4U-LJ-YXnp2Pu`Tq{g!u*SFia55k7}I1K69$r7&@*1wXA@e9k9JZ72)swNMI1-cTy%0S_uxsS6j{>CX1c}UPtf6tb`Oj*L@wi=e{yo_+008(4OH+ zNa6EiuYcP6wmezzT>G`#-r{md!E>$G4tu@Jlljl*^f&+czVkXfq4~L-^!@DJ>j{hc ze_#7jc^8#X|9s|;f2n@^Mno#(x|bF(h)Iwgu4`!mqdy6<#XVo*Vg3++Y;aGP*qCYo z=q#>fsRn}(fUI#1OQjfn0musXcnOdBV*+G}V=kp*swY4exauV&1~CCTgQG76WArCL zr*V`eDCUoG&?y{g$r@8V4myb|UIJn)hvL<7W=nUoJ`Bbm!Wl1J&uSTrSHl@B#b>1r z#&2rh{`w~#*Wxi7j~n~5QqJ}G^zfEz@1(rl<4ycWdGEnL@5-O`ZBAkT;5}V_nBS)% zKixxG*3R!cBscE4w)`-!PgSnl6SAzG*LP5^)nl{#Ft<-xuF|8qtexA3l?tg4*D018 zc14&6`6^<$V*eCoL%KyQ7wm!%1;L3}&e(UtD2S_w<%E4B^o4vCvLINI5De)SvK+A! zLURaC$O2|bRMG8@1+z3k#ds!F-b{%2qVxxq<;IDkv32c}U z4DRN$bg;ofb1;t2I*ttx9tXSfS;w%xLS^vRVb)QsmvGymdzht-g$h?3aKo%4SZCoI z2iIYi78WcVboe^7#DeN?#4ymP8%0pPjf)IFl)MmnV&eki6zWDH^!Ubk#zB<40D5%8 zow1g9qX2qj!;vwZD9?v#Zd}B+@@`HsJw!Ddz0ezNPTit(Y$NYYoJ+B&bmKhs596zH zNSf#}wvIOyr;61ThB;Uc#;IVB3B9tcX(8otch+gAsttMk@umQKYm7BDq$JLLon~Hr znCCy<B*uFH|njDfC_idG?U)|foA8E3)PtUd{ zg+#~ouF~|XpY-^TG+EmRXInoAiHy6mN;^?q+r#HKow2vhwk{0`igRD39j|`S{_ia7;t;pE-WA%B>Wm)$;U=K{M3(ja5Qn%sD{ood z$&zxs$A*Z}og^v28*PL!x)UY2`0r9tb@$Jb9Q=D}Sao-tBn#gs6;*Y|NUq|U(y*%T zXh|ZTAQe@1Uy(%PW2Ir0-IpYhcn_(lqWhvG2yY|}tLToD{Dl9$E~0mbOWg49*Td-D zp%Mpt-@1s_eL-S{XRe3Qx`QNUc*44f+8rn{z{jqKQM>&m$M7EOqVn$Z5_P=MdRTe) zPZA~k_calv+gq}i^L{Oi((NhP%IRAZJ?wUue9U34g+1(clPu>D)npa+&#_w*OgaziTh2xmr+|TgG`m1$6xwx?G7*x-aPUWyu}@09=NIT(tPWe z0jBaz)tAS&Tn3m*HOP>kj^ z+h?5i=AoFw*Q}mBYEK=C(YSW{Sy{WqP>lLDvuAhPHwI%4T{C`my}fxbM(vuxv-tMZ z!I+FAFLpWjYc0;RG2Nq_N-@{@dfD$TF_YqU=QlA|KgAs6+!a52ks<05BcCRg<-gDn zpN=La9?5@kNNgN^EwL=`g{oLLIwbK(-iw1`t!SIXvfLNSVwGsk#3Q*c=z<#msm5%J zQiwGxcscZ2D$SMNTf-MMUbjexSTlp?Lwi$c&h%A=%2=bT1rN+F3(gFULDLNBZyAN7 zjS!0(Fq<5l9BPQB8PJCqDx;0(Ebw49DL5{4F^LACcQOh`8f`3)VD^LHOQE$%G+lZ< zLuI7V(gF%*mj;K1#w5{n=rl$lzwwj>5X>$K_762oq8+2>GgSDErWX4S?BZb0&_xtY zn~q@=4mTQEEI6?52RntbpKThxpY@~|?#WtQsnBl0xD^evjlvhQp4^8! zv1(T;v|5tfREOF6;UcJXhz$tu$jT^!pJiQG(LLM}?v^sd)(NL(Wfa0svJS84s<(K% zNe9{5;n`Uk1#m-_U|CnK#ojGtkUeKT$Nt|6+~zI8ttTEMkIp_?3{P(fY6)!VgwI>Q zVb8Gt(fv60q2l%vcPX%P!YVW{-cdk{swCGZ&z){K4b+>k3JUac6i}ne$Vtg_CmVEu zmE%_afgndgc~lYkWb&L@g9=b@-0G*me}e^-sJrA<^qfh<7lX<%E6>0`!GecTx5@SB zIpc6mL0QywauRyZuwl-ia@5Kx@DW%*j!Ge)M9&#C@D21vtsDaHf(4|g zc=Bq}9H8NMgUS&r+rW6R;6c=7a(&XAZbPGi-iVb|pchzB8Wl!PN}AJYs4%GHTb&LB zfdwT|0pycObH^GA4fOa{W`X~95EMswkylZ3+6}i1Du=C%1OIdo-0#1t_;sJKF1q}L}mXaMdZGRbh@Jdq9SPDNE%VmAF23h|NHth zy+2&xw*P)Tg5DphaMGxD@?e(oqKkRo`eB5KMMLg_xQ!MWh z)~3sR-v0IR-1lLH|GnS(|2g{Q>wcHwiO+Pxhv)e|hZPok^yPE;KI#gSJ?!%HJRda$ zU=O~0F3(3rp}hxLexB=dK%usGV)w*a-4um0@ zYS$s5|dQ@lM;JEuT0umJSnjwaAog0VUrSDLZ@t`^U$QkhR`N^(K#lNoFzPw zHFXvXB-VsxnQF%&zT_yuTlTN^v0;fe!CfY8FCLa0A-KriwZn!bS_DVgNc*8-i6#NN z8#jLqMJalvw|i+m1a-9N)rsBEc^ed^@YV5M|9MT+(ZW|pcRlC-Nu(6KI~-8zYf$LBpi`}+q(zq4iGeC=IywFI%R^Y?0CEDK%EjnS*G0n6%Kto zzV9?;neAQlS%Vev7jb6;Zt{3*@79>5hxfbf(uWaPB5e(B{CIiq%c!NR_lxb)vWRQM zi)(Ph$1{7kMlB)UmD{D{2sBY=4Q}vwVDHO_lr4b>-i>q**$K`vs_?D-<^|nh(BK(LttMFrwbN9Xs zTbg?RYpb+4!h`sB1+M)#Ywy;urIGi$tj-}o3j;M z6k$y~zcQ=wICSsQkfn}y*;a62ggNoR%Iu-Xo_lkHmfGIew}J~IfW)EYSyg7zg34qw z!iCq#t_*vY8IwXk%e=TyC}@Ve)O4~d!WuITQ|MOA^9w41<_H&jCz~F|%3MsQTQD6L z3MZOFT#%h?T3AJ9Z8H5d^W=icL^I3<+R3JdkuzhG=_i>w3x(s&elEaHc6nG~reQMO zjCo)|WxUzPWxs<>2_t4MqUk2go%zDCW)GKz4)(*aTbZ?Jx-oNgUS+J=)rHr=E(^=d zj6u^4nQ!L{N1GuoH63hnSaPNznr^@xnpYWZKIek(V3We)G8dER0A}ZW;YhQM3$la# zAna0RZ4zCVSwF8b(roDh?Fh~dQ_eh)INOJjkMoQ^gtPjxzGkO9Nv3rcy(W#JWE6A{{XLsK#u?b diff --git a/fixture/19/3/0.2 b/fixture/19/3/0.2 deleted file mode 100644 index 18763859cf96e8678cca57ae24146a4a683eda87..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6860 zcmWl8i$4>J!@k@}a;dRlBey8R%&bMnQOChtr>GIzT(dOdAbO`1b88s8*v?txD7kHu z&Q@RE&1KlwIn|pe$0@rUb)3DX+Lzk5-|xK7=kt7?KjFCasYG@bTYz&ZhU)m)VQD3=Eu_txyOignuG#KUuA!!u0NDIb1B}9yVTA)EOi-f?@u?1QXW}e_S zTDCw7#LN(EM_~)J0F0QhXY`Ydb__F4(DKJ*G=Gec@RnaDqxoUR2v7Mi8SN;BM-cNr zNohwgLj)dwOiJ^`^b@-IWm1|C=4V11A10+8#&i>|^FPhgyfHr#iuq&nG%pO3faRCX z(>yVqgd{#}p5}pRBSi2&&C%R3ErdY+*c|N;rh(wbFPo#eVQvv@`LH>fE9N?35C79F z4T8Bw(DKG+X<$qx;VrLhmga&fBRu83m;~&_93k{`J0}5_7;gfTdu9?~fpI4^a1Txb z%rOu`CHI9GV1{ub6mUDm08@+u;T-pj7+`|2Cq#1(iUG!${R9N}g$Q7T*-P-@c8UOo z7&8KpdqxD#{(2eOA-g~!!W#sF@ZECi`z7z0_uHNwv-DShC=aE0)dDuzCg zK1>uoR!M0C=ZEu!U8)$`K-zGY@TyAMIdFD3O?Yv=v0=b-xI+-LZrL#4KHMzuU2h~0 zxDDSFSgu==2Oz`Mg7jzwhO9j%mmh}Tp!vw*Dw~ciJz~O8`<6FzRf&Z00IiA5B zetbmppmZ#wAhmbk>6ssHWi$+boBHzL#``Nd8D~;|8jv3>>%8)5vU6{$UpkLCHrZ*J z>XY70ESv1KNcBu_Bf=&-%~RdduM;!e|4U^Joswrp z2`k^^;S*e{HkP5YV1#a(XLdX6@@6qU!KrF_c~k<1>3Vr)En#_^f%pVq)!Z^v0uI#O zxWo~M52@X|)WpquL@iMd~%y4;h2JENH=9x8x?O8Ue2}|AN;Svs3-Cu^zfPHkQ zcxH`Z37f^Z1p6w+^5`_!Q|G}mYY2@oa!u|kX{c==C-UBEpeo*{{Cgo(>XM|n zSz^|Kr+~8siePRwQXvtB8k`?+8X`UBWcVVlxV|e3hq&cPZZx zKxbVLn(1P*+sMnS#fo^R@@2uO#0945EjDXG=B)-Q;(_IJ0;t3#P;;}`tQncPs$GwF zC>IFOHArItv<7J?7@cwP(_|N$H6iz{V&BLO%MS>sA{S$gGjX;CX|n3}Ms83Z-vafN zr-Y*4psqnxiz#sQD(VvS=f#u|^fD>~nzNV!L;nvd4(hp>5{&*06$Vu;q(IRNC_m`W z3n@Y9Ig|@DXCWmJJ&oE2^;}2^K#NfS2~x>Yj-mgB`a9@nS&BbefciZsN0#D;eu!EO z@|2|c^lQX^IcJ7u6EvDNQ+yzJt0Jq?%9hMzc|u zf_|P)@j^3D89_PoDW2#KR9ukfe2NFU6%`hwnoDs-LU;EUS_Xe&^7|h6{F;*2*d=9N#Y%7W1Nzb!=dm%j`VaIaGLB=VH~a1SzGr~2 zF*o~d`o3X&&XU&lTla-BUS-A9_FMG@Fdk<~Z}eOC9cFZ8#oXvO?{j5b&5~a4H|=v| zoX?86-f!Gz!#JKLt?4)HGiQLZ-Xj!jcRAuM+enW1gRNLZ{LY?QM7(C-T|}&~YZnnO z+4x1o3wGop;yK%W5%H95w21hXtyn-TvZodhGWOjC#5}up0Wr(QFCb>vk*K_-b7gA8 zU+knjMBXvBkhnAn+?#iV-A|-V0xk2r*-YZuNuWiZJG+4hm;{>VLD-eVB{9$}&xu_? zq=|v1c@FGz#Is_cNuE7Bng|dBjq~=i5yT}C&?s*&+lNRK0S)ua*g)c05pYkQ5!=Nn z55?vYB_iOL+yxe|NFo5f&PB0!1PK@TBzK{UcTsX5$jU`^@$w~ifK|B*OkS>p0mS5@ zn7nLBD-fBxz~JE|WS~I4Lnathhr?xs)6S&X?8q$AazH!tt+t#(|YnpPlCmf*VElPGSX=9+qO|O(9uVLu zH^1(5(9uS$tyR;+>9wBcho1Ko*_Ln6BPX2St89+d|A6R~b*EduE!m(&K5~Abvhl0W zMR>@%)2ziM8=aBE&KFfSN9xZadZpbd*4-r=9g%&`zA77^`tJ}P((Xj->m?i1$e*0w zD{Z{%LlC|5-6(5p$wqr5&G~`S#&#%MqA`<=Zi`k_j&-Lcdq-CwOh$X zYh<0XuhPb?-W1_6*Zqz4o{|kptpVczfNYR8~_mw)9HA0x+?mkw_x<)Qe+~4g|$99e6Pu$tPs+KWFawizO=hd;y zk?e`q-Q#K*V+1!r-UX>+86#N}HM^fxGWy7ciHhASVNlZ7(bg3@h8WYn^GlA9Etuis)B=cZ((iTiiH_o)Jj@bGnq8nxIs}9^6 z6VVMbEvntN%0%=%nI_e?TQJef2!)khUSepi2>5Z)^|EOhl+22FZS^cSF{G9YEM0Uh zG(7_)cSSt6YR^p!uDt{NxZs*+3WJiF5qVZY_(WjsoMW)WHPG}{pm}pdrqwns(V=;}VVd(g9#?{X>C(yhpVvnP7O+=~{AD4Kr_P%5AjH{37=|JPL4QfwT(^OUt8h$aLU!iR25)e9|5=G zV-xq)<~s&YxdKgR0?g|o{&2ngTS03Hx-*Ecp$grTQ>~ zl@)&@mXcngOm?R`3LnmSp7!@Fv1p5}PY+TiyV1>s!E>Gw{w5_Bt+Ax^ZACJKz9V=z z>lx;+Dz>1+mZ$R-$u9H_LGY|6!hfdN;&$xi^b$p~6Mb3mP~r*m|GC(rB{nZTNRbSr z&k2Gho`L?iiY=OBGt;-%lO5;+K}t=mu^_l6)==EwrGfrN)K93wxyE=!PA~@{wBp1@BZSp>Pz94wY6S#J}5~?0zz;SDJmG){s20Ux+zbZed zhUoAMYd=&zSNYN5>1#!mqpBeq{QO#OB~|4|gQu-ssJx;Y>V%(NJ6nlX`E|lm*5WIn zs-X^e^4fQm_A0*)c;Z@ku6&}A9Qu$ozM}?!-{3}P5L+$XGHLpsl(ytx<{TjIP zigKt8{@og&60P)WgP&Tnu7oOwTH)WWnO52>{aWGQtbMKcpctaS!`E~b&lP?Y_=z=5 z#i(NFHvIUSx`L|kyA2OrTdTOD7;1rstSwcb6@D%7;5B&#R58>H4_Z53p?Gtt7QSyS zsAB5P-#6fU*L*APzPWS*Zn1W#qV~<-*WqSspbGq(OV{BhYjzcpZ~m@<8?9MZxWBnn z^Gl&(uwfJ+2Za{?F<3devq2A^_6rn;;O0|&)i-B4xWJ=pW}zet8Umma|` zwLA5-=*1O{cgf+mcr9MWDV8Atd~O4o@e>h!mO73(?+v~Zon*;ywa#yem7v|OW?FC zSwq)hrb~b{be7+BnDLTz8Z>LD24=WqnuaW3N3WwS^Xx*Y6HMh|Z;@9qw15@;3MI|8 z3!x4(vln|WdL=>MwL}-8F5>N+s7s7qNpG-Mci_9`=v*ys8c*j!49gho&d+$|DjiEk@e&zA3_h~P|E+jhGC-_~L zao0y?cH9H6O0P1nBx;G*c)Qc)^(M}s@J$sJL5PuQuF4pY9V{5bEP z0501mQ=?cXo+*>OC;^qF^X{?WGu!0$DC&uJWs(QQwesVfdlVS9O>T>7I+3hQIz+Ll zES+|QmV7(v%87PG zk~8H^#Yc%d96Y8aw?q}5NLD0)D8E&dO5CB~PA$1P3V*^*k>p4bReYRrKL#$-lAEGT zDo)iz8C7`HL}5-mTTcQ|xD^L#q6{iZXWWl~&uGbwQRoxx>q!SFcPc(kyL*FST5>~F z?1|*{O&0XB{4qkICa|EFZK(X>02qX0-9hxb>icqKsbZCaW1Tm%B{AbpZUqswh5opkJavY*o zwb=?U&LJ{p&c46KZK}NZniGKdT5?z08J9Qr7Nuuhbw;kpo9wdq$vHfxm;!bMP z`)t$aAcsixdf;~I9K?-OtGC#8n1i^INcvsv%UKA7RHc8XrO!gZq;ma=Hgy)_Lb{@# z*E-BXoJp7ULhVZl#EImoZ`*=Ta?D9?`s-W&7IVxSubDi?E1NC)*v zTW~SQm}IMu*!s7KV?^4g58V1i#4#jU=-sx;|JgT5CVJZ~xag1ZV%_x0-hYbX_+{O{ zR{qMRu*NUx9)$&jI#~zmd zeO(t`wRBBFUqz=l<3 zn?P==2)w7tX!Ae8a_{2sTOTag34c^?tI=?dV7GME8yWsW+i*MKN_D#e?aX(SosZYH!+#8ChL>>!2|p*i_Ws-c-SZ8!&f<&5=L4ThuNhwZT+I(Z!#Tk@&bqLo zl=hts;qePP;~o%Va_oIs@hjQVzT}WLenCfE55Y3W{xBJG0i7m6E=Y5FW3f zCGH|225%2y$(OSweL*2@yn^O99KjNA@5q{1R?hVKhZOVr4Or&f0t1#Yx1cHRJVA=H z2e1se76z>0YOV?usv7uBwQJJg314YgL*~WUA)85U(ogM4r(k3nNvU z4kTI=BXn04bs&>8-wKUX8Y(hDb6luU7EzILnjqnnQqzu%)%XhUDvR2Y(V9cTTBW89 z8KnUU@yen$$MMi2Yh3?9tQu^=M@}4KZ{a$AF_{-|K@)tk;IeaDRch%}R z@z2SOz2!$&hluwkGc3ywul`8Po6N8%_gHNqdQWDUm%FZBBmODQFe`Uny+phx&M+-^ zT+JZni8D;f53I%!y~P>E~4Ro`G5ulhcN zg^O<#9#-FBRN>?eLN7Igfx*Q$2tCxT3?xoY79LWQ8BVx(vd~ps!}x-g*9*bw3dU<} le7(?FO=LX5%Ip4l8S)t{YE)K6W?-H+H{Y-t&)^5aj6*HPHV|Y>pr(S!jTmW6`vB=b1^1YS}`*U zQ7SPFo&?#wEbmIZaN)K`lTAfIeZcMt*JROBQ8QyYILA(Iw%y$yzUT7~e8WN;!zVIA zLjGTEWJpLtNJt3x4|m?%{f0e3T>U`^_RF!YiSa)lT|ba{w66I?VOk#kcJqmIeQ$7U z!V<$G!)W1zNcvlbl;UhtDf?bu;YC^)A)Jmg#1+sYuRI;OuW0IWnWyUJl_x#z ziYB(pJdyuiaoloWVQ;$3WAW`A_PZ{2`+Zr+ZTrIE|IYK(=RVJUZS&k>|IV}7exGNa Lws~ey&&dD)d^%n% diff --git a/fixture/2/3/10 b/fixture/2/3/10 deleted file mode 100644 index 13126ed0065f11a6eba5471812d558638e0e7aa8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 262 zcmZQ#oWL@Hk%0k(*??Gtfq~&25NmAJ|C=y@L6x0>P13?Zq1=^GdqJyMR*0i03VO`B zbV`tyCYV@h34nNuaWbu>9Pnw?;KS_QP{KWZ* z@e}1I!cUl=e181+@#V*dA8&rVSh@TGPfVk70khu$RttvmVzcmK6Z2vt@nVDTMS9nZ zbha1WwVdkB$^OW0PW$TunY_c9J9ye3>dk3gT_BQo$a4n=`$M@o&DjM!c?UIju(T^o ve_U{-?6h6lH!jO?sOLy>h>L7Yau%QQNW!}<3iQGBfS=a;z|qv5ZG%A diff --git a/fixture/2/3/11 b/fixture/2/3/11 deleted file mode 100644 index 24fc9f1c5fda5defab7202854c96c878ffa7ee23..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 85 zcmZQ#oWL@Hk%0k(Lm3ztM1a^9h&8tA|4o>{Fqe^G3zH8E1A{LR`vI{(5C;HpAP@%u WaWD{v0C6Y~hp{j)G6)AdN&)~BehU=< diff --git a/fixture/2/3/2 b/fixture/2/3/2 deleted file mode 100644 index 18d54b12f9435517fdfcac6ef245d2dc7fae05ba..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 236 zcmZQ#oWL@Hk%0k(-!L#Rhyd{wAlBHb|2JU*Lns>q+e|}EgN4#e{~tO9inxN%6!{%ZQ!nGVE%D^FH`>0m3yYNfAX7mQoB+m|4HYb f3GAQj<{f`sA(Q_&b5BqEN4HBW2 diff --git a/fixture/2/3/3 b/fixture/2/3/3 deleted file mode 100644 index 36f4f433a08786cfa6c75937995fea9bcae9635f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 240 zcmZQ#oWL@Hk%0k(KQJ&bhyd{oAlBHb|2JU*Ln#{r+e{;+1@B534xIT92KJ1cDia)- zI8_2<oKk}04lrB`#JJPs=|M^2JnfBWSQhJ9IcW|dGmOtr?Olz^bJ21XQkx<+1&K@$j%7h5kcwrnppPcJrgFE&;$He@f>e_o_}yhwX_k!E|5 zdU}znyHNN;jXN!q3k3NN*;=q)e;{$EDY$@>@1U#&bNB;}I}O4GjC=?F&N=^;<@uv1 zneOO9Exn@)cL=9H@{;M4E>zMx(zt{F`9mw2_S*$gdWREtaHlJlKk12?s9faicY@VY j;rrv7nEuCw)_%vITF8_?&WY(=Txjff?5KsrcO?b@(8*qb diff --git a/fixture/2/3/5 b/fixture/2/3/5 deleted file mode 100644 index 2be7e6d7f7f2f4575c5b4838c97013eb983cc04b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 258 zcmZQ#oWL@Hk%0k(nSfY?fq~%_5NmAJ|C=y@ft8(sP14*zq1=^Gdx4v1h$AvGofqJx zxpYbplvrsD0jmG2wHP#kP`%iYy;%Qwk?!##?d3(9*XRE3{M7iV@>Aib%uk7*B0mLw z^8DoZ$?}t-koiNEn3m=O{&NRSE!eg{5Q%AWE?_@*K-7ZC`~gc$19O3Sdai@}gTp+=XCQ?0k6wN?22CJ5UTnR**s{IYJiXY|z1Ud2*pR(g|9O$_@gnWzMVjqJ z>gh$Q?n2=YHSV-bE)e89WNX2G{ei@trr-ijzJszB%;66>?lcG&F!CMvJLmjYmgkS6 zWV)jZwe*fI+##I)$V;YEx==~)NaGIv=MSx9+HV&~=^ako!JV#H{-h^nqH>Y5-w9Sr jh3}7RV)`E!TKgS;Y9UkpI47oeaiOu_v7;6e-<22uMX6ut diff --git a/fixture/2/3/7 b/fixture/2/3/7 deleted file mode 100644 index 5e6d967987e58d052b51214ce7d562e5bdcb5c80..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 265 zcmZQ#oWL@Hk%0k(Ie}P&fq~&85NmAJ|C=y@!IqtY?WTo+Lb)rW_JUTisEmaWBFwCS zQ5*#|img%tgP?uWZ8aG{F#VC2Os90AlHQTV9sJK9TFJEEE|AhYoVbHK{h^jj>+Av{ zy+e*W*q)sPAcwHPJhxRGf}$8N$&(B=X^GKw)w2`tn*pq uS>`j#GtXy|XPVC_&p4k!o?-s%{Thyd|bAlBHb|2JU*Lo6Eu+f76HhMaPS|4^`hL5TT^pt}oR z&2I_%tG@8cPtF=O22CJ5UTnR**s{IYJiXY|z1Ud2*pR(g|9O$_@gnWzMVjqJ>gh$Q z?n2=YHSV-bE)e89WNX2G{ei@trr-ijzJszB%;66>?lcG&F!CMvJLmjYmgkS6WV)jZ zwe*fI+##I)$V;YEx==~)NaGIv=MSx9+HV&~=^ako!JV#H{-h^nqH>Y5-w9Srh3}7R fV)`E!TKgS;Y9UkpI47oeaiOu_v7;6e-<22u>i}W9 diff --git a/fixture/2/3/9 b/fixture/2/3/9 deleted file mode 100644 index 36c472f966946b87c3e7b3448038082a5c5f7087..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 254 zcmZQ#oWL@Hk%0k(|1mHyhyd|3AlBHb|2JU*!&5c}Hc2xBg{`s-*^Gap81{=DSkL+a z%#>?Olz^bJ21X>P_~{L^8iOVf9xt|DUToQ3Y@S|h>RxQDUTnx-tpB`7_jr-^@*>Ul zBK7nlRd=EAhZ=WUCKm|u9kR7xzy3htPE&9JC*MI?3+C_#9CsRo3mEwh{GD_DE6ejo zQ8L}pg<5(?7w!;Ff8-_8DP5?fccgI#|MQ1dGVQkur1TCa?%+;WEPv7yGf}z7+3y6a krNZ~eH8K5<3$6W*Kedo4f1DH3ySUKU@7PfbiSJ4b0E(bqQ2+n{ diff --git a/fixture/2/4/.zarray b/fixture/2/4/.zarray deleted file mode 100644 index ca8bd7c8d8..0000000000 --- a/fixture/2/4/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 1 - }, - "dtype": "{kju;v!p6YJ#LU9V#?HaX#m&RZ$1fl# zBrGB-CN3cC$FHWq^zQrLCi@r*B|rWNcz;W^Q3=Wo=_?XYb(X>VvWlvjx`w8f ywvMizzJZ~Uv5BdfxrL>bwT-Qvy@R8Zvx}>nyN9QjH!}+ZvjBsOm%!dlcFX`l2rE(m diff --git a/fixture/2/4/11 b/fixture/2/4/11 deleted file mode 100644 index 98bb22123b91394d8e9eec1c62e9597c44093cbd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58 zcmZQ#oX9ePk%0k(tr!>>M1WWch&8tA|4o>{aFl_ez{l6mKOitDI3zTTfrWutfI-&l IL3FSX05O*f@&Et; diff --git a/fixture/2/4/2 b/fixture/2/4/2 deleted file mode 100644 index d59f36b7c1c4e4824cdd1fc42c2899d162b0d243..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 149 zcmV;G0BZjN0g(id0RRAy0RRA%0000K0001e0000ewJ-f(kN}+n0CWb($;!*j&Cbuz z(bCh@)z;V8+1lIO-QM5e;o{@u<>u$;>FVq3?e6dJ@$&QZ_4fDp`TG0({r>*|0RjUA z1qKHQ2?`4g4Gs?w5fT#=6&4p585$cL9UdPbAtECrB_<~*DJm-f0RRIK07ObKujF|K DSk6CA diff --git a/fixture/2/4/3 b/fixture/2/4/3 deleted file mode 100644 index 8b43466bb0b2a5c82e30a50280c5e708b85e4940..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 147 zcmZQ#oX9ePk%0k(Co?cGhyZal5NmAJ|C=y@p_iEu$;>FVq3?e6dJ@$&Nl00IC+KuHNwU diff --git a/fixture/2/4/5 b/fixture/2/4/5 deleted file mode 100644 index f8a731c57cc1e604ac155e8c3c5ed97ba74d4c97..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 149 zcmZQ#oX9ePk%0k(r!p`whyZad5NmAJ|C=y@VJ{h>D3zNJ>e|$jZqpC@LwdsH&-JXliNe=<4Yk7#bOyn3|be xSXx=z*xK1UI667IxVpJ}czSvJ`1<(=1O^3%gocGPGBGdbje8tLDtJkbuw|>LMO`ErD-L`$l&Rx5i7?>Db96ULL64(F<;yxGv diff --git a/fixture/2/4/7 b/fixture/2/4/7 deleted file mode 100644 index 8c7aa6e1cbfc4d5b5cc4b03d94913266c85da219..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 149 zcmV;G0BZjN0g(id0RRAy0RRA%0000K0001e0000ewJ-f(kN}+n0CWbty}rM|!NSAD z#m2|T$;!*j&Cbuz(bCh@)z;V8+1lIO-QM5e;o{@u<>u$;>FVq3?e6dJ@$&QZ_4fDp z`TG0({r>*|0RjUA1qKHQ2?`4g4Gs?w5fT#=6&4p585$cL9UdP70{{aM07OcfD*-(N DmPJKo diff --git a/fixture/2/4/8 b/fixture/2/4/8 deleted file mode 100644 index fbb54e30b2bdbdd4e6f58f1fb2fa6ad4e8ff7a40..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 147 zcmZQ#oX9ePk%0k(Co?cGhyZal5NmAJ|C=y@p_iEu!D00IC+KuHNwUB`5jFMILgHf8668 z=lI4oo^gy{+~O6d2roL(hc5J>1O4Yd?>WzRuJfGZ{N^^VIn8G-^O(c@!|j5ZgL z*_tTP20?~Ejd1XQU4cOv2%p!M-z`4=x9oQI^4hYS$?bnjuSciXmR=2Z|66j|Tb*^i z#Zh&=j~&i`9`fZ|92Q^qp+))612%n&gY0@A8kql-->+sYuRI;OuW0IWnWyUJl_x#z ziYB(pJdyuiaoloWVQ;$3WAW`A_PZ{2`+Zr+ZTrIE|IYK(=RVJUZS&k>|IV}7exGNa Lws~ey&&dD)f3jXM diff --git a/fixture/2/5/10 b/fixture/2/5/10 deleted file mode 100644 index ec0d175e38191e3d84a6e94dc82cdb453fc6f35e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 262 zcmZQ#oWe4Jk%0k(*??Gtfq~&25NmAJ|C=y@L6x0>P13?Zq1=^GdqJyMR*0i03VO`B zbV`tyCYV@h34nNuaWbu>9Pnw?;KS_QP{KWZ* z@e}1I!cUl=e181+@#V*dA8&rVSh@TGPfVk70khu$RttvmVzcmK6Z2vt@nVDTMS9nZ zbha1WwVdkB$^OW0PW$TunY_c9J9ye3>dk3gT_BQo$a4n=`$M@o&DjM!c?UIju(T^o ve_U{-?6h6lH!jO?sOLy>h>L7Yau%QQNW!}<3iQGBfS=a;z|qv6zFRY diff --git a/fixture/2/5/11 b/fixture/2/5/11 deleted file mode 100644 index 4b3e48bfe8a05cc9620ba6797382b810a771a698..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 85 zcmZQ#oWe4Jk%0k(Lm3ztM1a^9h&8tA|4o>{Fqe^G3zH8E1A{LR`vI{(5C;HpAP@%u WaWD{v0C6Y~hp{j)G6)AdN&)~C$_p6) diff --git a/fixture/2/5/2 b/fixture/2/5/2 deleted file mode 100644 index 7dfc01aea25d6d6f8b01af62b346185042ef07c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 236 zcmZQ#oWe4Jk%0k(-!L#Rhyd{wAlBHb|2JU*Lns>q+e|}EgN4#e{~tO9inxN%6!{%ZQ!nGVE%D^FH`>0m3yYNfAX7mQoB+m|4HYb f3GAQj<{f`sA(Q_&b5BqEN4oKk}04lrB`#JJPs=|M^2JnfBWSQhJ9IcW|dGmOtr?Olz^bJ21XQkx<+1&K@$j%7h5kcwrnppPcJrgFE&;$He@f>e_o_}yhwX_k!E|5 zdU}znyHNN;jXN!q3k3NN*;=q)e;{$EDY$@>@1U#&bNB;}I}O4GjC=?F&N=^;<@uv1 zneOO9Exn@)cL=9H@{;M4E>zMx(zt{F`9mw2_S*$gdWREtaHlJlKk12?s9faicY@VY j;rrv7nEuCw)_%vITF8_?&WY(=Txjff?5KsrcO?b@)V*GX diff --git a/fixture/2/5/5 b/fixture/2/5/5 deleted file mode 100644 index 1a8d24be8b3e4796a033f878e3b47ec22158f340..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 258 zcmZQ#oWe4Jk%0k(nSfY?fq~%_5NmAJ|C=y@ft8(sP14*zq1=^Gdx4v1h$AvGofqJx zxpYbplvrsD0jmG2wHP#kP`%iYy;%Qwk?!##?d3(9*XRE3{M7iV@>Aib%uk7*B0mLw z^8DoZ$?}t-koiNEn3m=O{&NRSE!eg{5Q%AWE?_@*K-7ZC`~gc$19O3Sdai@}gTp+=XCQ?0k6wN?22CJ5UTnR**s{IYJiXY|z1Ud2*pR(g|9O$_@gnWzMVjqJ z>gh$Q?n2=YHSV-bE)e89WNX2G{ei@trr-ijzJszB%;66>?lcG&F!CMvJLmjYmgkS6 zWV)jZwe*fI+##I)$V;YEx==~)NaGIv=MSx9+HV&~=^ako!JV#H{-h^nqH>Y5-w9Sr jh3}7RV)`E!TKgS;Y9UkpI47oeaiOu_v7;6e-<22uNr7MN diff --git a/fixture/2/5/7 b/fixture/2/5/7 deleted file mode 100644 index 6ea8540c2fd7551f6882a054f9a8eb64a2c74823..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 265 zcmZQ#oWe4Jk%0k(Ie}P&fq~&85NmAJ|C=y@!IqtY?WTo+Lb)rW_JUTisEmaWBFwCS zQ5*#|img%tgP?uWZ8aG{F#VC2Os90AlHQTV9sJK9TFJEEE|AhYoVbHK{h^jj>+Av{ zy+e*W*q)sPAcwHPJhxRGf}$8N$&(B=X^GKw)w2`tn*pq uS>`j#GtXy|XPVC_&p4k!o?-s%{Thyd|bAlBHb|2JU*Lo6Eu+f76HhMaPS|4^`hL5TT^pt}oR z&2I_%tG@8cPtF=O22CJ5UTnR**s{IYJiXY|z1Ud2*pR(g|9O$_@gnWzMVjqJ>gh$Q z?n2=YHSV-bE)e89WNX2G{ei@trr-ijzJszB%;66>?lcG&F!CMvJLmjYmgkS6WV)jZ zwe*fI+##I)$V;YEx==~)NaGIv=MSx9+HV&~=^ako!JV#H{-h^nqH>Y5-w9Srh3}7R fV)`E!TKgS;Y9UkpI47oeaiOu_v7;6e-<22u?yO?Olz^bJ21X>P_~{L^8iOVf9xt|DUToQ3Y@S|h>RxQDUTnx-tpB`7_jr-^@*>Ul zBK7nlRd=EAhZ=WUCKm|u9kR7xzy3htPE&9JC*MI?3+C_#9CsRo3mEwh{GD_DE6ejo zQ8L}pg<5(?7w!;Ff8-_8DP5?fccgI#|MQ1dGVQkur1TCa?%+;WEPv7yGf}z7+3y6a krNZ~eH8K5<3$6W*Kedo4f1DH3ySUKU@7PfbiSJ4b0FIDeRR910 diff --git a/fixture/2/6/.zarray b/fixture/2/6/.zarray deleted file mode 100644 index ad63df90d4..0000000000 --- a/fixture/2/6/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "lz4", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "rFVvr$*8DW$(V~jJwBpIfdCQFVP^2{>F zJPQ<9WQk=~SY?fMHrQl~ZFbnD$R7I~P~wmy${ce-g;UNr=YmVFxaNji?zrcHN1k}* Qg;(Bq=Yvna_@>Ga|5*POQvd(} diff --git a/fixture/2/6/1 b/fixture/2/6/1 deleted file mode 100644 index d6595c09c50a6d88b781368f11ef024cc54060eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmWl|hc*IG007XJ~dUK94&?|_32IqZl@Q;s_3xD%$GbjoQn&N%Cw z^DemPlFP2R>YD3rxapQzx7~5qJ@-BE&?9r^J@&+cr=EH4g_mA=?TxqIdGCXdKKX3X R7hiqz-48$g^4lMO{R1s?1`rq`~yAAC2Ifx diff --git a/fixture/2/6/11 b/fixture/2/6/11 deleted file mode 100644 index a7fe035f0f44117f00084d4d60f62b9efbede97e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 87 zcmZQ#G+>#)$iM)?;S3B6B0y{p#Q#-%SQr?5f!Gg-{ed_Dhy#H*2#AA$I0T48fjA6^ Np@5U&zgqy94FH8;2y6fV diff --git a/fixture/2/6/2 b/fixture/2/6/2 deleted file mode 100644 index b4642ab8642daa5ccec7f6426ca3ecf37df07301..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmWm9MM6SR006-E5ych@TWpM-C*=qp$L_#ZQ0!Lh7#DMZiA9s~Y!t%3DW*dRGt4r_ zJPRzc#4;ZqrIB#kuDObe}~XrrADI_aXD9(qaB TM?V7$GQ=<=WEf?PaVGc!m_0Y2 diff --git a/fixture/2/6/3 b/fixture/2/6/3 deleted file mode 100644 index 16746f482ff739a61455fa66e2bcf6fdc7afc720..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmWl|1yTY4001!Xe2xVMVq<}UfmoQ>g@|1knAlJ7sm|T4A{egx2?hfHf333G8f&dH zWY~HeY_!RU%|>mp)i&Ghu+uJMcH3jGefAqS;edkXI&hg`(@84Gutg5UI1_EUr{|#vk zwbW5h1C4}<&_pvWw9-a9Lr<`%l1(#fL%?-EQanA#fJdx*_ U7hZYeoew@K@I{dl-~8~)9}K2V_W%F@ diff --git a/fixture/2/6/5 b/fixture/2/6/5 deleted file mode 100644 index a8450d8a9608a2724fab0196874d7b6ad5ed2a2d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmWl|g*roF007YcUejIErWq!to5_vocrB0MAu-KyzVF0QHt`sR@c+qYUwpOXo9}-3 z>6hRB_-lwmh>R^WVYwAnT4l90)>>!14K~_jvn{sTX1hr{?6k{nd+fE(ep3!O=#ayX zIO>?=PMCJmDW}aiu&U+vH1L{p3UjP6A diff --git a/fixture/2/6/6 b/fixture/2/6/6 deleted file mode 100644 index 0cd2ea38cd1ce10a7208dcfa8e81e18835d9383c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmWl|g*HP0007Xp9be>Ym|p0%6w_TZ%`n~F{G98abHky^ib5y|{+F!^2SK$FYpk`- zdK+xC$*3{oCTzCFR@-d1!%n;Gw#Q!k?03MVDbr@mnsd-0haEBRsAG;h;iOYeJL9Z# z&b#2EOD?+2N!ENdjJ3c diff --git a/fixture/2/6/7 b/fixture/2/6/7 deleted file mode 100644 index d50cd5b39a6e75a32dc692dff36177351b157812..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmWl|1y%x4006+h3wDd0ckrYh!Gl;}cNbt|cVWjk?@b)_CHtcg{tq}9hcM)@BaWIj zYD3rxapSL?wB=a-d*?H_rODsJhtG8r=EH4g_mA= z?TxqIdGCXdKKbm6MPGgM-48$g^4lMO4T%sUV-qGVvD7llt+3K6tF5utI_qt)(UeU# R+hVJ2w%cK+U3S}JuYa?3KdS%$ diff --git a/fixture/2/6/8 b/fixture/2/6/8 deleted file mode 100644 index a6dfed6b2a91ee79e2a6f58f5a5387ec8707447c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmWl|g;hcU006*`IEZ4S7?>zxg58Z6D0X-E0*>IQocG?9gyN;Yp&Yt+&CLjW*eAi*Z|Rv)v9m?J{AvJtplnWuN^HIOveWrp-9wsAG;h;iOYe zJL9Z#&b#2EOD?&M;?3Psb`*h;iXqzd*iKl-uvLAPd@wN Rt8c#h;iq4I`{S>FmH_sWN)G@4 diff --git a/fixture/20/.zattrs b/fixture/20/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/20/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/20/0/.zarray b/fixture/20/0/.zarray deleted file mode 100644 index 083d0e26ee..0000000000 --- a/fixture/20/0/.zarray +++ /dev/null @@ -1,18 +0,0 @@ -{ - "chunks": [ - 100, - 3, - 3 - ], - "compressor": null, - "dtype": "|-R(wZePOK4^!&61PWa!y)KZY?=!77ooChi1v4adIr2Gz*7j$)VZF zv2fBX9GZngv*cK_lVjneSvWMSXMa5Z{ysjB*W>efe*=L);1=o;!W}dujJs(@DkB(4 zIB)Q+u#jBdWf^&_CZEe(;VO0f z$j=7|rU4-gWDtW%;U!W@V=U=xV>>%2;WPG9%3;cgyfqMLLtCPGkdDOgD6!0B7PHCb zZRV51QgS)VInGnV_gtoyo7B;#ejxBHeM#a429nHhQrN&oHc`YUY@?Vxln{2Cb~Gb` z)Op4Q7(XJhG|a1eH|r4QHw55;epHX-7{Ic!oYCGJquVSWP|!e8>h0 z*-8Uk60ecLbY_rADTgVeoUf?hG-s$HCRjUS>A@4k5zo^k zki$}PS;hP0v5xf=P|Ho~2x<@r+)gl!X+kK;3@3##yi6Jsn8+lG*+U73_?%LXah!6Z zT+R+e(}hQfp*xS$gDmEe%@W=rhZU@371dm#h8z4qE%jae+Xy0&0VFYu7fEImqZvaX zTPb2UpHj>L4swWa4@oN`XwL&g(V2(o!gOYk$z0wdi$yGE38y(j6&LxAYOZme8^qtK z9SQX3c@i1IP=>LN^%Ss$k11p)yVyUCNPmnOyf^XXAW;Nmt!2K zoKyUl(_G+NF4Fxj?dU;oenUL{c#i(8U?rBUp@W)X{7!gBt~3fAxeYq`dC zZcr~Y5V)24G~`YiF@&KEVq9sHA>>}4PO zX>NR5(2};?M>{(55S^IH>r7)de`XHzS-?V0@-?S8&;Pi@GN~<#(OL$pTDt&4Qyl+ffm|Pj}Y#lAz|E2Gg2ABNYZ(Qabz%= zDePq*`zhlKj!?k~D(To#J30}|V{|2sC+W$27O;?9-ennitR|n!T;VEp{7B$l+d~6F z7|0+7lfp}+lEzrl*~WHuP{L>IrIf>z5gD!>ZHeYVIugU9#4?jv%qE++nNJQ&$>l8P zI8P1VbD3IhQb(Uw+R>LJUSJ@}3@3#RY-AHfe8M)0*+U6o5!%s=2wD?K6di~rgUL)G zlQ)=27W2raf)i9y#W$R#noHCW*IGMzlE5?cA&~(jk;iKCDd0mkP{>w_2#(Z_5JG86 z7~!-cf;7gG&LmzVgXzp5lTr>-Mmb+m!D-G=MNAv*h@}Tl5Jx;ulRyqj$z>JqlgB#N zQ$Q^@sUxVZ{~v;BOcO#$W;iK~;bqd8z(gid%pOWO#OIW9jN_COb)R-b(}hQfp*xS$ zgDmEe%@W=rhZU@371dm#h8z4qE%n>kP=ZKg07(qvMUolCXvR>;R*KlorxbI5gB&9K ze(i{$Jr58?XC9^t)0sggb9sv_7O|KmoaPKwT;w~dxyE&F5Z_)q66nwKBr=4d3}YSZ PDPRj9Q^-zsv74U&wMzto diff --git a/fixture/20/0/0.0.1 b/fixture/20/0/0.0.1 deleted file mode 100644 index 8ad54874691dc275be5fe723d5ca8551c91b787b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3600 zcmWmG@pljO9>?*ome$Nd%q(eUqH*QU!j)#>qV>2xJpO>sd7kt7yn_OPz@5}4jC-g@1Wkx!1S1(m z2Cp)nOr|iE{T$#RWqidk$~ny$+SUpL9;F=#JV{3qd4_H*U?Gdh<9(Ks&l=WpjqBW? z)*XSs9Rv|dUBVd1AO@4h%ZwnMab&QAo$R87FW65hM=2vFC=h5#E8=*Bw#4%U3Cv

5^GNAgkEW8bJs=WQbTEa2S&1V_gw&^^puSBZ_k45bK|&6grzd9@SV;_+6C?P~>(_?umA$=(qfLbhR7B z(p??XW~`+UEq?h^wDuLbKMSO=o|o}HM?n!Q9YytnP2)V0C?*as%QH`<*tTr8^Pq~N zSQ4QaqTfQVt0`9LHEA3ur8w_dAwcm_3WUP()v@Z|breZc_H3$ap?I;;`+IIDg-P}T z<-u+WNn?PbY?h_~gLddA&%e41K1H=3>kS!ldQ0MNm@@Rr&l$v7F*wH@D)g2!DDR6V z49!EX0t}W*ZNeJ77$TD=j8gkEJSIP{#0N5r*KKw5Q7}kx#4;47=WQQOWEgBz3NW}$ z-gl}%#h?mUXPJ=2(4HwSgu%Yu%y&*HL;Otbud)h;+MQpLUR5(pxmhQ`5ZrV3c47;I zxQ_Lm3|i3%Z*966q*?|T#8#R#IKIOET9Gct!Xt(P9AX-NrW`NFv<2H*ahR{E`llu5 z5G|S)aa1;>*%Z5S=uL`xlI+FdFC9nz74+E!a{Oo$m&zf{BZebg65)7P(6~B9$zju7 u))AoMI50O$fP+eba2VYS$#1>GvEz>i?v!(sEEb#L7)}(6;gB?1IQ|1||B+V! diff --git a/fixture/10/6/7 b/fixture/10/6/7 deleted file mode 100644 index 3b8d86f5c70126c7cb0ac51d53ed16596412fdcd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmW-fc`Vj(9LF6UdMrtjF2_2sQhG8+(p#<^TbhzsQ;6ltB&>Cybvd5t(3C5kbf}P0 zD0g|j2eK9w9W=_3B|TPa_51xkdwu?Y?frghsLQoXG}Y9^w-P>At6xBgLGRWzy(3E4 zWZipNp;F@btMj|k)CoG~9{d%gO<)`lq_Cb#5R>n+RAx+2{lQ#-Kx>JPZ>5}Id-(WZ z`g(%vx4M#dyA%8vt~IdPLIA64rP_f6iulv6pF#=VJ&F+^u=wq}KQoCS(a~;3NGd^7 z%Hks13gsuF@figE!0&yJjPp)~|K`bRA6TL}97L+W(e z3FLX3{QJ8JQhHa8KItRirg|r`MhF&$sRRfjO5T^RQ>S=5RPU*)O)+(K@y9Rv6rx47 z5k*mDoX%NuiqXmKH^Su<>!qVeOE&GC?N0HgVOr#X55=-+!)mnw6!DS>MP25jReNJ7 zblUTp9g`?_&Pf%ZxGe=jp}ORq{w$lqt;fnVyO83%i9uCF3B{+-Kbuibu}ajap*Z}l zS%5-md1p>t8--!qncEk-DZ(BmY>euos89uVIgL=LJ2=G}s3?4niY5&C^|FF`8N;_J zV{3Et8SFe>E2E7VPG&p}b~b0|7o{Jha`_&t%q`D^R_rWA&(mG|9q(;0eVvjrGdR2)3uR>&Z(V_^wH zi@odBu`&j!mTCsEmAqz#2bw>_i;2S&E4(WgPzj D2=pTuEMHS!;i<%M?0CVpl?xf_ zs#E&^>M*GMJQ~f67`!%lsNa<_+={mmU>JM6Y~vm$21n(YPIEVgi}H^l|2!GGFSJdQ zAA{M8^6Y!T4AI)z7H1`J1ax_}cNHCE18@ zCBjsIut^e1cweO^b6HDB-xEJ+X+yA_6h9|)qGz2d8O!XrK&!rd~42@q7D=i z(z@FmyMtT5syv=!%A_bcI)rk(Y|RQK z#iXQS&+`jOYaKc)DN{+O-~03I^ZftW_xl|`&QilrRYgVl=Hfq;tKje^f@@ER)-E|= zFsb#Uy(=MW*$xqc{l|9)=6Dgh!|hgR`Vg*6&sFX9BiI!he`p9Iw0NXkuLvf@kEr|Q zg%K<=qeKX`Yr2Nxq6yK>-fu%=31+c;**lIPM3;GSN5?*Pg zix2`H>Rjl{BIw9tb~NS?o^I93ogddTvu-xHJ;Mgk!@W7uJ0Ex~rpA-zBKgytsY5zd`*dsNj%SUR`- z$kR^3r!t%Mw|WT2B@u$DmciGEK|-Y>DaU7w5aOgJ!k8fi!YE1nu3<8X;b-Ytqpi!( zu6dt1AYa)*Q#$XWH=8`xRzkf=ubc48)?Nz$FHTnD;evX z_72Ix=!{XCFfN&H5@FbsJ@?X)GaCJu%pY`NoYlsdS7Ml*b$-J2zLg9+}l+9$^`9uHj* zVeCv-Inj2BA*~~oaksH`YEc?PswIojC$^H5!?^LrF5;MivCcnNgrQ8sC7+S_Amz)d zLdNpV8rkzo7)pyNrHs=TyKG0^G8S3vR_~~0@c%<(=!T3JpBaXF&jRn&Go*PmF%C*1 zjA=tURRNuhqO_P)*B-{+)qOsz`xu&1AdK9Tb$@lo7~4&p-i)dW7%f#cBOqHUMu4O- sMZoX-G7$mS9pj6W^#w>XH4@OCaA@m(GXV+a&4ybS3b10gA21j24~RXPqW}N^ diff --git a/fixture/11/.zattrs b/fixture/11/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/11/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/11/0/.zarray b/fixture/11/0/.zarray deleted file mode 100644 index edd48fa52f..0000000000 --- a/fixture/11/0/.zarray +++ /dev/null @@ -1,14 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": null, - "dtype": "i5G8&9-/>6=.8幗4/7/=^=X<9=6:8U+ \ No newline at end of file diff --git a/fixture/11/0/1 b/fixture/11/0/1 deleted file mode 100644 index 13a9154b89..0000000000 --- a/fixture/11/0/1 +++ /dev/null @@ -1,3 +0,0 @@ -L<>Ĺ;8052U8=F:4u?q<޼w; :}'3??km91|8⽄0y<: \ No newline at end of file diff --git a/fixture/11/0/10 b/fixture/11/0/10 deleted file mode 100644 index 53ef107b49299cdc5603422529e4fc4ab5e3088d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|_^z-v4RFI9Ij#xo0x%ؽ)=:/,^=ԹD; -96N50=8·O?f->,;d%O87p909W(+=v:ѮC_4434#4e,!Ѻڽv;w7/=-$S:4=;1ȼ \ No newline at end of file diff --git a/fixture/11/0/13 b/fixture/11/0/13 deleted file mode 100644 index 15ff462e5a..0000000000 --- a/fixture/11/0/13 +++ /dev/null @@ -1,2 +0,0 @@ -K6u8Ku6eB=<Lw4:oH=4V⻟=̷E8=?i:q<0ոJ9?ҩ7ղM9u@ݶ8N=96Z8 ->9L:49 >^6TD<< \ No newline at end of file diff --git a/fixture/11/0/14 b/fixture/11/0/14 deleted file mode 100644 index 09b1a1b3c8..0000000000 --- a/fixture/11/0/14 +++ /dev/null @@ -1 +0,0 @@ --+;:%:1<7Ĺ>D9q:J2<:5.C5ռ5Z?*8 96 ;8_U5o1¾02ൣ@=5=,/6w<>A5--8?7Ͼh8;:<ߺ^8o}'Sˮ>@ 6fb \ No newline at end of file diff --git a/fixture/11/0/15 b/fixture/11/0/15 deleted file mode 100644 index 4f8776cf9c2e8171b1a2948cc71c962e7546ff10..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|`*x#74?K3^x1FJ-y5Jk7EzxKqB;t82KuK43rbynVcEw1>L&GCH+=G!Q&O zy!JcPxlBNOK0U9syPv%MI>@JLJ83{Fy6n6uGL$@qyKg9aJ&3-DGg7$nFRnQeGYY%| zxV1e)HMu$gGiT)yIZ{3Jmk04 COkk)0 diff --git a/fixture/11/0/16 b/fixture/11/0/16 deleted file mode 100644 index 2cbb62bcae871469d3cbb52716f1f52ec2df8bd4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|_QJo~X}xnaDuxU4-^ygWdUbzVWq|zScNsxkgg diff --git a/fixture/11/0/17 b/fixture/11/0/17 deleted file mode 100644 index 15fafb4084..0000000000 --- a/fixture/11/0/17 +++ /dev/null @@ -1,2 +0,0 @@ -!W&n^<:;?@^A130?&=WϺn ->5w=K<`Ź޾#(8,7:<]2>88_8Ÿ?V4;3K7I?4);:q:>24ḥ0Cw9s<u{:_@rp>4M%">0(..=. \ No newline at end of file diff --git a/fixture/11/0/18 b/fixture/11/0/18 deleted file mode 100644 index d8f260531ef77dfa0b7d9501e75ba149a40d9eca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|_wwrH{9zEry~JQX^BJGQuUwpfxE3`sIj1;~wv99-JRvu5KioUuKK#HiE-$qJuyMLq zHax#{HY~aUxCA!~EjK*EyDF_5JI6K8Hg7zDw(q!`ILNSlJnJ?6xN0>yyOTSYz2iJ0 zHAJx+Iu|%nJxM*IHTk+#xR$I`JWaRwwPio8t`xa9Ixnk_y(&EVxs)kftX(r=z+JUl C;##l( diff --git a/fixture/11/0/19 b/fixture/11/0/19 deleted file mode 100644 index 00f8d0cbe4..0000000000 --- a/fixture/11/0/19 +++ /dev/null @@ -1,2 +0,0 @@ -<=];'.<<4 ;l;<8v 4QͲ<8 j2\5޷ŷ/!.O;a<־9=;>*8; -9}!6pt7x?<9;9370;yb2. -b*8E;:`}>g97=P8pU3-:ӿ574L706lm \ No newline at end of file diff --git a/fixture/11/0/2 b/fixture/11/0/2 deleted file mode 100644 index 4299d85edb795581a889a0ebdb374a2e5a898527..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|`_Jg+==I3ls96&t;DJWxD8z41DCHW$3py!tg=wn#spJCrvsI8!~wI{!D= zyC6bpya2VHzE3#TFdsVgzH~fxy2Y?`J1#U(HDx;7Hg&aBvB)wiyZAevG|4Q*Jg~3U zvB9@}HSaW4xv;TExum)4zGSn{zG%1SxHUWvycfC~wxqavJ&wE*woo+w侼G<0;0X:t39:=t4ᴘ9޼>nѳ6~3,:P5˺.<N0'.ƴk9`0<?_(a8u=N㸒0!!b== i;Q3>轨T?޹\<% \ No newline at end of file diff --git a/fixture/11/0/21 b/fixture/11/0/21 deleted file mode 100644 index fb5e25e22c..0000000000 --- a/fixture/11/0/21 +++ /dev/null @@ -1 +0,0 @@ -@%<6@} >>`9m69C<8Sȸ?ʽܽB=K1A>@yGA(45Fn@t8M/fI7@9-S2i6x8?;d29s;9!O.;7b88qi': \ No newline at end of file diff --git a/fixture/11/0/22 b/fixture/11/0/22 deleted file mode 100644 index aa6a8b475c..0000000000 --- a/fixture/11/0/22 +++ /dev/null @@ -1 +0,0 @@ - 0~?;(`\2$8:@RΗw(?[>6P0V5%t8025&:30o6?@I8E6I4‹L9qJbg< \ No newline at end of file diff --git a/fixture/11/0/23 b/fixture/11/0/23 deleted file mode 100644 index d0fe213d8f853e04733deeaf2720d6e8abd78996..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|`Rz0EYQwWBt7weUKHyP-T_r%=6dIbE{VzEU*@JRv;}wx%$6D)v2mx^K6x zy3D=qGiW|qvJSiZF}FAYv7J4Dx|g)PG)guMDjmJ8K#j8xFYh=Qxk)Q)xu!O2v`sWH zyrQ~xy|1|@!RoX|xYRJVDF;0;L5(~>x8=95V\9<Dbx*j`PJ~lZmK6fqnJ{&;rz7{ixEy=duH$*AoxzMK4Jl8PṠ448v8V9ej0*863)=86w9̶;<"2\7@3M;14jL=68 \ No newline at end of file diff --git a/fixture/11/0/27 b/fixture/11/0/27 deleted file mode 100644 index bfa91897e8a924f5f9ae5bd65b3150155ed3726d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|_RH2SarJm5gqzUw@_E{;9RIn*l^I!?ASx_vsqx+FQ@C#k@gxLY-Uwiv-k zJ!~-=HKNrqeynv=ul7J4~|mG)g&Y zwud@kxM#T>z6ZF5x@xxNIOo3Rzy7&lH=i^gx~)7!xn?)cwez~yxZgR)HNQK@HFCZ? zKmj%$I0&|MJYT%wI4QtuIZ?cCG;TeWI_5llJTkb+IOe{8xsp98wt2q?Kz}$MI@r4d C)Lvcy diff --git a/fixture/11/0/28 b/fixture/11/0/28 deleted file mode 100644 index 99ccc787f16e6f61ed7f8cc1c937abf4a4c921c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|`OG#@mQx^J-nzKpfXJNmhyKfk>yv_QGUv3xz?yEwNzIrc4^F}b^ytcN^s zzLdE`zf3siJl?SeywAJAw9vFmJ6gH+HsiGQxk;;6:-B+66=)8C<%889+44 \ No newline at end of file diff --git a/fixture/11/0/3 b/fixture/11/0/3 deleted file mode 100644 index 905f9b7612..0000000000 --- a/fixture/11/0/3 +++ /dev/null @@ -1 +0,0 @@ -˶%<_8<6ۿݷ514_AI91:#o6+4E76R,7Bۼ?<ص̸q@3s%;56+<6Ӵ3&_<˷=35$:y9J/R#>i0Ҽ \ No newline at end of file diff --git a/fixture/11/0/30 b/fixture/11/0/30 deleted file mode 100644 index bc51790d37..0000000000 --- a/fixture/11/0/30 +++ /dev/null @@ -1 +0,0 @@ - ༬KG2?: `9/7{<8X@:$"U8(M5y>b74=HO/ 7Z.9XE@9><19>=8<+:A*\>¸28_6 @a3'f5ܶ-߼e~?V"l4:@<>a@{;.38p<5ƽ974{> \ No newline at end of file diff --git a/fixture/11/0/31 b/fixture/11/0/31 deleted file mode 100644 index 05cb06ebce88585c27f93d8f15558cb448168c76..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|_=u_ij|EQ-D2F`>5GI@d4^E-}1Zv%@׺AF˾g?0~7d169 \ No newline at end of file diff --git a/fixture/11/0/33 b/fixture/11/0/33 deleted file mode 100644 index b10dfd066cfed29dadae058e732508b22a1be3a0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|`qKUu(4ti(L{udTo;y+SpuG+{YFx+pY}yD2o#Dv`Q8KGVL4u)exhKgu{V zxq7s6Ka#r-zo;|?HB>obITb(OzR~x*zMr+3t35q>zos@Hy9GHxyGlMa zyY0YGx-PU-JPEz@ystZey}Gd!vGy_mx@x;zH7mPuz3Dv9HxfTaIupGsE)cw%z9_fa zJ7zSHy|}lMu)@BzIa0Hby~@33JQ%jaJW4(nJVdzvzydWdK!~XJzh=4Nv1GgQOu}ZkfL3A{?x=KB6Ju5#gK&d&IwS2Oyw05@kJm?X{J8C&)yxzX@ywtW# zFfh5GIXSo-G=?wmKFzypK*6|Nz45YpLMc1bI9)m@x;Hf-89YDrt_`-Syn8UPG|s$0 zH+C})JlQ-6E+xFBx(q)|G5kLIHwZd|JLJ6SH1oWVv>!Pdym~nPy^}mqxg5N{y`#A9 Cy3j0?S:4>;MԬ>;l0ʹ0?089:;v@./=8{:`:>6<;1?A'>7@8C::`vG8M90>J6=9= +53Q>0B V.:Q(4`639ԯ>A9L38g1ڹ \ No newline at end of file diff --git a/fixture/11/0/36 b/fixture/11/0/36 deleted file mode 100644 index 993c47dc49..0000000000 --- a/fixture/11/0/36 +++ /dev/null @@ -1 +0,0 @@ -8й}5A9_Tz@r>=26<ѹC=~<26)-;[9'/U <{-;@8R>ټR@r \ No newline at end of file diff --git a/fixture/11/0/37 b/fixture/11/0/37 deleted file mode 100644 index 2fc46c23b9..0000000000 --- a/fixture/11/0/37 +++ /dev/null @@ -1 +0,0 @@ -|8lkX=O$ix7R:;I-2;̹N#=-i@g;r6P7D; 88<<뿾]5lU603@~'6Y=d57>F>û>;eY3V+44*b;95O-=! )D8 \ No newline at end of file diff --git a/fixture/11/0/39 b/fixture/11/0/39 deleted file mode 100644 index 263bbde985..0000000000 --- a/fixture/11/0/39 +++ /dev/null @@ -1 +0,0 @@ -7ӵ;n=7jvc8J38⺫ٷ̰Q?$8=;˻A øZ(99;;)(<8B9u >ᰉ:}$;A4#8+6:!:3/=MҺ2I?6Q8<7?`kF9c:1%?8P*289=Q'+=8A \ No newline at end of file diff --git a/fixture/11/0/4 b/fixture/11/0/4 deleted file mode 100644 index ce9d4b174c..0000000000 --- a/fixture/11/0/4 +++ /dev/null @@ -1 +0,0 @@ - 8D29ٽg>:ּ/ p9:=?{::x7 81L70o6)@8967䶭%0Ӵ|:9v<66TB<&P_к<$ \ No newline at end of file diff --git a/fixture/11/0/40 b/fixture/11/0/40 deleted file mode 100644 index 4eb39f7e4d..0000000000 --- a/fixture/11/0/40 +++ /dev/null @@ -1 +0,0 @@ -`,>n*;Q<8!0y28J5P<\,7i8;;߷;2<7D>9~86h׻<9/y/><7=j9\8!8. 38p98,#8=84>4l6Ͻ,8Z7)79(7;;2R \ No newline at end of file diff --git a/fixture/11/0/41 b/fixture/11/0/41 deleted file mode 100644 index 8a93d9202f1b44e7d1129d1e21a8cba0cda1a63e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|_wJAXOJIh8(vJPNTfmCra_Fr+zlz=l82D}BCCyF$QRyp6CAK9V+cGm5;K zy85}MxuCUcv~9Itxl%9)wPL} C$6*lw diff --git a/fixture/11/0/42 b/fixture/11/0/42 deleted file mode 100644 index 5e2efd24b3..0000000000 --- a/fixture/11/0/42 +++ /dev/null @@ -1 +0,0 @@ -=,>4޾Q=2=7649%<:K-\; _G9Ķ2̷⭛64OHD;%736>@4N??96W¼>94;r!X99y<<|M8m8 \ No newline at end of file diff --git a/fixture/11/0/43 b/fixture/11/0/43 deleted file mode 100644 index 00bdfb3836b6e330b6edbbe91bf2178e21a88d58..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|`DKtsE%JoPn@zGApDG||2$zzn}7G{Lv_xUaafy-l}BzrZ}jKBvHfw|BOc zu>w4Hx2CQ2K_9OXKLj{#xWu_~pMt#Uy2ZA#G)_Olw_-PgzCJwTzGFHrt+}txv^2f` zys|yJG-&8E!RAs zIi5A}ztgEly0AIzJJqySxjH`6xW~Myx^cMVy2CnUzz@3=s=6}7J}y3IzM{WnCS$oF CDQLw2 diff --git a/fixture/11/0/44 b/fixture/11/0/44 deleted file mode 100644 index 91d5de0a326be8ad7377b327f42893bf451f7084..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmX@QM?odYc-yXjJ0y2a-4<#Qut#BM^0rs=vP{GrrdS-YaM^Nwx7CjIJKfD@@7rh5 zxc;8y>78b~o3=gKR&14EHQ)Z-_DzR+wb%7 diff --git a/fixture/11/0/5 b/fixture/11/0/5 deleted file mode 100644 index 3ec1c53e476883e91c123b75734f965c2da13bb7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 200 zcmV;(05|_~xHY}NH)A(ex_Q8oHu|w)HPk=erX0AJya2T$G1DsLKKwUOu&231G(x+< zG0ZpZvJ1Iiz>_?5HrO^lzJ)zeFa1Jhn1zJ6Jg3J-RecIm@}RI9Iknw?/H2b9Ey&<Ύ94$>A -m41]&ϴ8AX.=[ø e4G<-<<9%<:χU<9=Z.<5 \ No newline at end of file diff --git a/fixture/11/0/8 b/fixture/11/0/8 deleted file mode 100644 index 5763394a9c..0000000000 --- a/fixture/11/0/8 +++ /dev/null @@ -1 +0,0 @@ -A#Ժu-<@B9?Q:35%=kw='T@8] y:!: <<=2ջi5α%!}lԲ;U>y?<18@W0=ɵyi9,-s;4b\;1=/Un:*  1<5ߩa<60;z8}DAA817=V \ No newline at end of file diff --git a/fixture/11/0/9 b/fixture/11/0/9 deleted file mode 100644 index fa532219b7..0000000000 --- a/fixture/11/0/9 +++ /dev/null @@ -1 +0,0 @@ -:H>60v:̽75<*8` 8i<4p541ܵH \ No newline at end of file diff --git a/fixture/11/1/.zarray b/fixture/11/1/.zarray deleted file mode 100644 index f9b2b175cd..0000000000 --- a/fixture/11/1/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "rhqNd?zB{n8I6zD^4L+7VqBa6L N{y3DpRVyS|USyzDVj2Jd diff --git a/fixture/11/1/1 b/fixture/11/1/1 deleted file mode 100644 index 16dbbcb73c70692dad0c261f3d0d97f37ddf321e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~&mL8$QIjJ2=!Zi?6aZ`Z85G#63nj_%wAtaXjj+-n{3#cRLHY z2)q%xfVs{(1hIX+^CulMtGA#$ytZ#V#yDy_WxPu+!>X{oY&?p(;J6OFoH-l5@i;C# zOgOVS6grr@y**sFK&(==-8W9SNG$QZsXSget+ax-C^@OQ12CmLeY)l~4K+Ki2)Nul z$hbhiAUV#wi8)t23OPwU3P8L&N;Qc-1wRu%;J0hNZMdO17%`i&d^qC0gfMwLWjKjB NWIjBsI=PfuT1WwxUQhr4 diff --git a/fixture/11/1/10 b/fixture/11/1/10 deleted file mode 100644 index dc9900c26f2de5f62ae5ab0bb782936bc972471b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~(zFYdi=sTpnOHp}GOQinMt>t-L|OEVz<9?!LLaH#8(cZ@rei zYcZ{T^ zK0u>52EpQ}U@Cn(>M;X7L^eD=1-G6sojR1bQo0Gb{<$tVSHC&Fb3e_wXEOD@1}?$5 zLB3f%yFA~n|2v&O2s*7dUNSN{)3t51H#e2H+O-}EnmmZS6FOEpNjh}8#lCtt05SwT No4FM_I6V0FTODk|U;6+6 diff --git a/fixture/11/1/11 b/fixture/11/1/11 deleted file mode 100644 index fb5743e3674f73f370900d56e10665112b9941c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~$*0tTr7slDa@ReLk-x*u8(b9=|C)9XbQJti7wc8@T~5*(_c? zi@6BCp}gg#)VZv*M7QiavbYMiy*Y)r#Wqegi7<-1>pXElg&;4yb0_ON?6@GXb2ZRB zG&+eo(zb3k$GOBhn>=ke;kAXbIzOGYGdyfHS+!xk#WK|`IzQsNmp~9U3^u>G2{n>8 zfIHN?p*_eo-Z&DoZM&!}{xOTaSvvVU*0~qF*|Z)eI<(BI7BhLh1HE#=iaX3BF+7dD NKqlZaf4C8pUV@H}Vt)Vt diff --git a/fixture/11/1/12 b/fixture/11/1/12 deleted file mode 100644 index df2c5434531f369df1427a06d9ba3f2eae5a4daf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~%-j{63jI9JUoW!ngOkPd}f&k+fzln?5W%u)d<1WF=3t^*E!w zinggYa5=~@owTj5>^WB`E4=T#^}W+Qb~@3nL${Z>Up$^a*tm{7EGNve1U@vOzBKYP zfxewIfVi){Be*oUA3Q<4Qo4vUPQ6CCsJ>h~(zFw#R6EDH{>= diff --git a/fixture/11/1/13 b/fixture/11/1/13 deleted file mode 100644 index 6de00356b95b5695e20b70c9342233c744078f49..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~&kpm^O7dOR#k|Ww=5;hPcW+=e-uTOtp73j=IkZ2# z(y5U*)v`@Fb-DvQ%r7oJ)If%}-L~Pg#5hhpIk_4(TCnms3O=#3uDN=9RN-_vMX1JBU20p1gmaWt}>9sX3p0Go;z%|vp2DAt@T0gA3DmV){g1?eB z3p=(rU#(R&Z!yBYFf!n^qq+D%13j9&j5XCg{Jrfg@h=&@6E=4|2tNC>0YM(Xvo)r@ z#w{(l4mc06oVpddyFVE>&%S6lJF>PMk2<_O-@2WrUbx0MZ?k_B+(MOh9}-J+HO9pS=A#$fs&MX+SEv?7S&5lstyJZzy{`h`xw3Qn>Oj zt~n7i3cLfjwLL>MxjF$fc{*3NB)S|rf;>>WoVYo$dp?0UOuh51-m|g1wXUSJ$+d;H z<2}-~Tq=gE3b+?7FDx4^;JTTz6*|GOEiqUzt~Pi$ld(#>;Jgw%DLOJaeL5?#0J+$^ NTfEsk|=cVZ61ttUXq|JV3g-06M0yA+;90Os=B5y1u9{6}i1W z06fY*a=BYDKEZ;tPA={+Y^g&mZNV8dyf<1s<~1*{Ydp8WAGjd8Yp7|~kU+G&UbO~6F{#lr7QW9gx<4j8;kZ|} zhP=y#kt8{hb7Bw1UCyUH$1|-DyoxtjYBf2# zlRKBa<2)iYM6nw>7dTQqNj;-A`MOoOmaJ4fO}F^9Wk0R16uCD#FRPEeDm?nRlqp=S NT{B|9UA0@{TCmH^UvB^a diff --git a/fixture/11/1/19 b/fixture/11/1/19 deleted file mode 100644 index 3fc14f59a887ae0b91548ab47b973377a4681615..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~)Y<5Iyd@T{{=L{-u;48UqK8?_j{Ts7Xe#kcS;0=XeBzqn7h@;hNX*1q~Vm_0i_DmXj53OV$; zeYGJraJrVfbT@cE`8=pO)jJwFGdD0hdA(vXF0BnMVz(+di@HU)JGtpP0is~H`nB^L zeLnKMXE}{G5WC(zP&g2{aKBYEx~U($ExhwO)4vBb|FQHpwY0G`@wrT@(>K&G7B+0V Ny116OZM3d+U!XEAWI6x< diff --git a/fixture/11/1/2 b/fixture/11/1/2 deleted file mode 100644 index a749912a8d6e5dbb39d0b0d7c0b8bbed7c7bdd0f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~+#suRM1+BC)3x8@+TqP&_}q@j7=l7rfKF`ZZm)NI#!Dls7Lp zQ$5By|2NpXAVO-q0JWaJPdL^vA3F8EbUb#t#jtccE;LXzWjfq8b+uHn$TBLs_&c67 z$t=Y@u&>s!!MA-i?=)4pu(3zEq`B+9WV6q{Xt(FMH9QZz7rGm^q_}!Lj=U1KP&ExZ z6}GIu&ah}bgfx*eLAboU?Q@#5t_H{5s7&w7qmR;k1}J-n@i9 zZobj8^EQ6Dzq%bW`YaV5-#SpX9W@TP%es!V`?oH)sXP(1PPs5AtS-O2# z&OFUHQ?|&s%RkD!l&jpmLOn~Lv@wc7zqtQCK(=|aM?omDf4ze=hq2%_MzC%`bU01C z`7dU=?XyX{H$aHFfH^I-qP$ZwX*PH`Kf&<4sXJu5e=-QY+BtJO+c_aTK|Zp)Pr5EU zg*;`s^SL;=Y`zw~{k*lbRW$H661cv%>M#7gtve(=0l8qd`8?pSwK(XlIzHDoVmOF6 Naky!>CpxvxWJ*m5X!8I7 diff --git a/fixture/11/1/22 b/fixture/11/1/22 deleted file mode 100644 index 6d30259d6bd67e3fa319366f63252da2177e0374..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~$PUw!D5n(>u{9V649x!jX9X9_z4nRqUhDt> diff --git a/fixture/11/1/23 b/fixture/11/1/23 deleted file mode 100644 index 2f938278c1a9f5353d6b3da89ec4fe9d1549de85..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~)^k%`~sIqc(T7@H&ONp*&!xP`zSZ@ojriMm$bYzN;V8C9lfnUjk6Cg?>HE_Nh@o) zrZ#J|O*AmPqPljyuem0{>a<3<)G)Rw2R$%BjXXfN<+t-XyuJ&ypR=qu={WVdp|&44 zWV+Kl;5dCTK{j}|1G&t$061?l!?yo9k+F=q5;P6Gz`i=2Lb(yNyt~#sOS$Q`3^szj N&Ak{lAGbt?VcO5&W>)|J diff --git a/fixture/11/1/24 b/fixture/11/1/24 deleted file mode 100644 index b16726c0f413d27bab262290169fc2289af02e40..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~%)f)Hr{-WV9>0+cZ=?a=$h&+B&|rhP=Es0z4o)+ciWkF}v(K zdpeE0)ipUe4?E4Q+`0s~2fT@@=CevbC_BPCi#Or5Xtd(71~NJ}&Jm`x!MKvWB0Xt9 z%sdIbOgb1mbhgtsO}8FC*)>+XTsb^ES2C87D#5-awU9|{8136l`FtY`_ N&%gmSAwR~xSc5MTU0MJD diff --git a/fixture/11/1/25 b/fixture/11/1/25 deleted file mode 100644 index 9d66fddb214e18f606825b671f2b49158736c0a2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~)sd6Dz2>EI5R*alP5Qq`F}}EIs?Q={$?EI<-(Mk2G?)nI%j* zB`|s^=r>h40<4y|4l-cAyRz}Xw7yQiFgz(E487Yo={pfP3bJuCHamj1r6(PpSGUw>Ac;1;3~-v%0Ljt{}O;Ww{zbG&v2s zo;~opZn&7d88mr4{<|N*T|75F`@dW_Kr>Ccl)djeyD<_p`M7Gj>A+00!95?l7ryv5 N_BgP(m9_AAVvB&@XIKCL diff --git a/fixture/11/1/27 b/fixture/11/1/27 deleted file mode 100644 index a45563e2c432e51b590099b630c9016225fad2d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~%>_`mg{z;6T>C>pZ>0#6*vVu zOtSSfN;zt_hdN-mXSp1{2e^j1YPRJ#=f38@{<&c{pEMu3tvp1zW;f2Y^Sal#-#N!M zzdOe@a=tr20X7~u2)1-QU%cTsDZpzvQM_+7ZatMc<~)2nGPub&=DvTql07N5dA|oh Ne>fgG*t-MNUS5ErVrc*X diff --git a/fixture/11/1/28 b/fixture/11/1/28 deleted file mode 100644 index 9b9f81e6c2bcbeb00c016478e99a0b0a13510e26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~)(?A2gD>Z?OQrjJ3);`njP$zr8B7K)J-Rd_Ca1IJZ1G_AQ$+ zxx1CDhdgk;l(|E{OgQH}-mwO}&%432(6mcCTDkT% zTc#7S%eVG8D#MICO}mFSt~DAxR5GHvYrd7W)xQV1ue61^St*XQKDF7vF1;iSzquo?#68Ki3b+QbYCj}5<0LY;PdG<8OT2zL*05P76T4=%AG}b!2{x~_Gre0t NfV^Hj=)IpjWBW7QXsiGL diff --git a/fixture/11/1/29 b/fixture/11/1/29 deleted file mode 100644 index 9542689003a2af7555ce5da3ccf5a3d2b1d86bc0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~&sI0<;LZOfvW@fV$nfOupna1ica*y*Q{c$+<;4hqhL|inOvg zWw{4CD!LQ7FT8j;(W=+GSGFX=IXZW-(7@EazqUfXSvs;d!8J9(G`N+yqPwNGfxP1~ zB06z7YOw!5k34{`x3^k72t1^{SUmBtj@q zu(UnFDZTYLL%_W}B|JkZ$v8VWd#``Jy*r^j*1V;?VXycg0>hq^El!; N?JK)9pfrW(VkHywW|05@ diff --git a/fixture/11/1/3 b/fixture/11/1/3 deleted file mode 100644 index df60baf2984872c7dcdb8ce3f30666c48f8aaea6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~*%>;JwSXB|KlWI6OAE+rQnng*7oW1-+iNUqSD@NjWjV={h63 zZ#FBvr!+;oxVaKH5$UJW^!n$oXem5yN)4O)LYPrR=P&mCb z0XAzrgE@+@yu7!$FtnmN|E55>-MMtEA3I$>ma@93+q}X*o4v3+*tN{KalFz%7Bh1t z7&|q&kQX*98Mw?m|FSl|)3o_BCcA{XU%2Bu%eS{Z>$o#DB)14UdA#vCN-rO|Qn4dG NX)vF;(!BaJWF*%Svy1dRk>bW>PoTw{0K`LCoKEk*%IJ93j3A717VKXPUW;NWl^DW=JWuT?E ze!S;DR=gq~Y`VcT8NInW6F|4R-aH((20mdxd$Se3mOCyp1UPWD6T7559yP|je>ulD NG_`v^^0-cyTLr!sUcvwX diff --git a/fixture/11/1/31 b/fixture/11/1/31 deleted file mode 100644 index 04e0f7c9d3e63e99e1aed7177852715667091b9f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~(m{COYaYioN17p|;yP*Dwq&F}z%}!#g&*E2w_Eyu5C$kTLo{ zO1EsP0KMioUpDkTay*|kg}03|8?T`|oI1X=*}YXgs5l+9SUAfpMm@N{Tef9BfjAdG zb(0YH&7doWqJ N2DS+~3bh^jU=gAvWDx)W diff --git a/fixture/11/1/32 b/fixture/11/1/32 deleted file mode 100644 index 68689ee6d9cad0431a809b6cb645c6db1b28c25b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~%rQeWtp&%&ZE)WH_`yYqHNVh_yzrnJ~&V z9yRx}CN__>BRK`Rtt@}O>$QqFBfKBA&%k-TZM=gnRy8fFAGQ&;x;f7>ShXxY%|HCS zE;ueid^BS(BCFUp3Of6|$fk4VL3p$C^V6~DKyY3k-9uS)4qtX zzPeUF$~ZE)dbDyslDiMTs5AvNR5@Zf6+hp;(7a4JY`oOHcRzc+pS78*Jw1BArZykD z1vx>xNW;Bq!xVMt9!oIaRQnQe~%Drbi7`DSaNK)h(L+&K3; zcD@fZF1v`h#5(vnZMhAxO1Q~EbTqfRNz@Ve|k1 diff --git a/fixture/11/1/35 b/fixture/11/1/35 deleted file mode 100644 index 78fa46db826eb0e3079aa784a598f5f9cae4556d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~*o(W4LBK3bMAmJ~L`C)IU=?oAh3qqd*9pTEkv8MxOl+CS~M7`(T+=rFc8)H$Cz!8>-e8$hkK|1K{*h&X$|#U153 zV7&o4J~q5OiMjYY95E0-l0hdvus5VY)Hp-C2dNl3oH~HKV7zvtJ6hA^$V^FB(v zHa+P%A3Yqmk+2LapSLwLQ9dxeLb)Ki3$9kME;>;t*)(7_+B2Xz)UTF46+xysOf&yD NXS~BP+PT;uT#Qc_VB`P* diff --git a/fixture/11/1/36 b/fixture/11/1/36 deleted file mode 100644 index 9e68efec9dc67ce9243980ccfcd361c1e10f89cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~$nk(7A-QeZR51t~K+zK|G?nK{pt>D>fv)K)1!Z5j$l!?!17q zF1ngK7CpqhojR7f474FAL_Y+)t~#*1_Pj|akR__YqdZ5ztguf#Ye3JnyS*ws>^jr8 z?YOemo()y*aQkjk9~S+qZ7Ea69U@J3q@boVdw5{y3h#BfMj;R=zGklseR+e6`HD z+rRFMkZzcebnVms$Krn$j2Pc7~}A-v$Q NAi2FML^vrxVgs`)Wk~=4 diff --git a/fixture/11/1/39 b/fixture/11/1/39 deleted file mode 100644 index 166341d7aa3076a33dc47ddbcb30682ae7c92c3c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~%-e)3v%gZaw8UYOr>*V>n7PnZA@b;<~H7tiI~95xl3sytUc4 z%&-W#Q9mR&{jU@2Rb3Z2s)fI_`AHl#4n*e zO|#OvA2LZl=QdF|B|oXQ)HqNotul-_k-0fNQNAa< ND?PY46+w2oWDX0FXea;x diff --git a/fixture/11/1/4 b/fixture/11/1/4 deleted file mode 100644 index a570b9ed6447f7e10d831369ef8e8a3801d5465e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~$MbL^6*#*}bs5mOM>0L^!m&*}LvLCp=ob9=-FfWx9~M0lj~> zp1fkb#xxPD!Z(?|41DxLbaVdCb&?%7`b1p N(7NwDB&-T2VRVsxWbyz2 diff --git a/fixture/11/1/40 b/fixture/11/1/40 deleted file mode 100644 index 4707d39f0ce50d8d8ad874f5ab1ac55827a0ded8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~(NP%|33wr@bmW_P$ZFt2_cYt|2hDc`}|jO1bkjP&^;ATr9pf zX*dTv54=0L-?uxvGQWbt$UHZ`L_X>{ezxv7%{FMftE1PuJUO|!FRpp8$SgjwZZVTEurHl|F$y3b8Sj&p2E#q&arLhCk3NeZEh-Lcm%P%00p2~%B|MlqOD&`Z(Kc7H!o0RVmb%tCG`-}q@;m*r<+XAlSiKp(w>iPKr#X2%%RGF( NmAOqgZ8-R-U~PdUWl{hD diff --git a/fixture/11/1/43 b/fixture/11/1/43 deleted file mode 100644 index 6429f7b7bd112ceab216a480249e60ab84548261..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~)Y@L%Xaz^)-;bVz@Ij(Y_|Y48J8b!MFCfueh?kO}9wDz&yo1 zr@(@@cea(W0z7uNrmgisAFmNV1UPQE#JO^xg1qUv#kR6EPCvr8VmE`nK0M>TV>&La zxv$Q&G`;@3u{s?h?6ep+d@fkJ>_F`~5ilIMpF2&u`MSnF+9w;j(jCn@_&Ccu0KJ$y zyRoz_*F2v&o;C2l)2TZii)sFQ$ z-OXn2+h@_Z{+{LOoo2h6wmsQaY?WX&-~Qe9O^157B^htGykqil`)xbrJ*iuQ_Ask5 L&=~*#=r1j?X5=^C diff --git a/fixture/11/1/5 b/fixture/11/1/5 deleted file mode 100644 index 89af621bde38c90afa0e75aa9f6e83b7dbd17e47..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~(_DHNC($V>ed1dBBo3`mtd()IZ*)9JrUf0JS4A(<<^1wC%FsXW{{)UrJ}lDD%!RKa37 z55GCHd#wmQin%*Ix4dILm$$PlVznVXT09vyoV>WX9lU?Q3_U-!YPI}5p0~NWF|&xg Nf;IcK54bV9VC1=!Ws3j+ diff --git a/fixture/11/1/6 b/fixture/11/1/6 deleted file mode 100644 index bb1f1053ccc381d493e13aafaadb82b8faabf021..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~+x6%&u51DY(A1IIY&P^(Sh%1w1k?2)Kha!mRJUbu|>cdON(c zJ~|7zdMwvGPPDqXJ~g2aZn1N^ zV!g{g?7Brh;JP$8jl3{8Hnjk?;W&b~w!IfRmAoQ2$vXnJyF9fwk-3{ak-P`CnzGk8 zc04vbDKmVuPQOCE5kFYJN4k_ewlZxySUBN5x-?HY%ek>QSGGX6C_6Vh!9IRIS3Baq NP(N5ZAiBYUUi(9CWDWoT diff --git a/fixture/11/1/7 b/fixture/11/1/7 deleted file mode 100644 index 3238c90b40203a4caececc855cbad61c0989c7fc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~-Z+CcN`FM7?*qt~pCQH8qs9JHW8AMK}?)$ffbICbiovU9`qM zYQSE&_PPTz@GN~k&Oa}@84yS^V!8)8sAr95Y^3p6=7 zjl9}C@II+L`@G@5t2q_Am^36lLAeUHZ8R~x5V~C^&$Qk+h(TB`yt=GCTfUCH!?=gE z^tcJAWi&@T)-8Xy4?K&yh`!T2kvvzrW42biIyLS-pgAQxk2=qXRXp}N!aZ8I^u8`U NnKiw%ud|8EU`r56WWoRd diff --git a/fixture/11/1/8 b/fixture/11/1/8 deleted file mode 100644 index 04009aa98a84779b32c7f295b295651905989448..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211 zcmV;^04)D_0RhMWH~$<#Be~SNbuGC(k3d2>6}{p={G_zHQM|D_q%(yzB|U4pcReS# zR6xo&UAqmyc{=^I`miB73q0jKv%IuDqPT;=x4il?)w`puX*JHV>9+yBo+Tl`eYtGA zsk79wt~*sedA#{QJi3K37C7WUSGX|0_C2e*$+daBy|uZzX*nz{bGi6C6f|PLTs!J9 zkUj1%Rl16@ZaONu3%3}vnY<0UhB41P6E)wdrn+G~@HVHpFgtoUg1UXK;`O%)K`?)jW|rdbkul&@%Tt zmpB%_{XU^8oj72<3pi;!zCW5hZ#XT!JTSeug)`o~y0Rme(YqeCe z4KmWX*&qxs;JFSwsxe?VXe>2A#W>HmkvfgOs4p8eF)&xP4ZKmhh%R)z6+cM9EwvFZ z4>(l2iae9Ohq(7RDLkINNY$}lu^j8qGbv=^V)%D`Yz|9^_x0hg8UzyCkfo_u(Yfrr3^2GtWn@9O`*a$*n= zI*=i0z_2;-ECX8u0}wDffJnA^J_`>pFfdNIaB^7#Tf>D61`{A7gO@=@ZUV0qkiGzf z3?^XmOM~qdxdm+2nGVb|zis$2C*o1>$t8u36Ri!K_&A(npR+WddHF2L$vo-L@~17% z@<#H}Y>$4adhNVda4R&)>u=u8Eziu~crd22?^QXWFg58Cms4YzyTS!+-&wyLndV$% zV|e(9S#oMlutf5&PX8?9pc)LC zXbk`W&;uX<000d%Xc_eqBx<4-Ca?}1F+;MpnM4#c(und%<@aVy+ZlzR=oP}P38)+5b}{rP zjRb(Tq=<$OkbD$D;Nn9y(p8RTg-lziP){vG>liJLKS&TH8`vIiR7FdI2H853;2w%0 t=0n{kBeTo5nn?wH#il{TutMbxJG5H$RErpJgs25~7ji{7P>^B51pxWlbL;>B diff --git a/fixture/11/2/10 b/fixture/11/2/10 deleted file mode 100644 index 77926314e99b6620ba9afcdd2c892b4085444e34..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 290 zcmV+-0p0#WT4*sbL0KkKSr>nM$N&H{|NrlB3JW9^_y6Q55v9bAj5YvJ7n^9y5C7@{ z7!Z>gCIPSjyI61y000000009(00E!?&;uHqP~ZS)05ky5GypVc8hV-22W4`27q8f(2OI4I0CV05E+pez|uCdJ{*{QF$}Q5od$U6y#z#p z&VrFfo4rW9Q%K0HgzT7Ca)q0Zu3xovGn@~2{2$272lpgyp)8NH oV3rbyB%wHZY)#++H0m}Tg#|4%#C#cN90&Njk}1N3fV=zNK<8w1lmGw# diff --git a/fixture/11/2/11 b/fixture/11/2/11 deleted file mode 100644 index 45c43c1ac2f9539f1e1fcf81901f0267f734b77e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 288 zcmV+*0pI>YT4*sbL0KkKSvjmwoB#j?|Np3TQJYJq|Nq1R03jd;I3mJ25Q-vA-~Xn> zS7CAt03k2{yI|n;01W^D0000027u5R00U|bq8R`H00E!?20#WNG5~spjR5rk4GssS z0B8UPAT$hs27u5FG#WG-003na?r*J<2 diff --git a/fixture/11/2/12 b/fixture/11/2/12 deleted file mode 100644 index 6c1c0edf7d310e396d65a7172595b5bb08e5a73f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 297 zcmV+^0oMLPT4*sbL0KkKS(uNxu>b%SfB%GoN+zz(?*FMloQ8)0QH5YpAgjD%-TzqR zxKJPj(uFVpxd<_!00i0q5vBy%07eOcl=RR5&^0`h1bZrep}+tGK+w?8(;x#t02%?I zpa2Fy02}}Tpa2GgMu5?v00x6YK+_Na009J?B)r6w5p7XBN@I!0Wti%ONUCYlRkTKf zdLU90q|^~xbaD0{o5MdHna4LS)R1A)J}9p%n~D1YEM7|KEh(Hxia vUY;Z#BzcJ)h=2eM(A5nR!;B&wGLNgJjN=f9W|c986Wm?N6yZWbVm{`?eD#6@ diff --git a/fixture/11/2/13 b/fixture/11/2/13 deleted file mode 100644 index 57c68e542e2011be478d952dd0d710350ab80dbe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 290 zcmV+-0p0#WT4*sbL0KkKS;{qy+yDS1fB%>j0h9m<<@eU#4G}^hs)PwDHOzz||NlJ~ z76LMWRG_c`w$yL|p`ZZB0iXZ|nKS?Z15F;LYG`N&13+n@dVm^e0004?(U52~$TVm& z27pyFP-&n5000I-kOo1Z05kw-0j7X4$O|~*R~oDcU;+Ej9<$-{b15*wL+v8${C7Z$ zb`#aqQ!Yf7i9{Gt_D{(~x|}zZPmXWEg-T!&>a-Ro0mlfWb-@*5!!HQ6Nveki2}7s{ zQ8Ex?K~@1$8J#jkzcBqF4e5=5*UBkv+$rGbb<y z0004_)D0Q{27my1fIT1p006waSg|$$!@A3z@zTUXQK0}t!=|i0w-Kfi;)RKJhVN{Aq_JjMZ zomtF!7u?NAmA!@ZKJqz!Ad$Un7f3GRlhgq0QcXD4_%Z236a%%N{<36q32@V2Q0^=w l1T#xg+yN@A_`o=Td?Ol2^a5HCI22p{F64@Ep&_g0ondST4*sbL0KkKS;AN(bpQZLfB)zZa*PE2zkk>kSj@safNnFuEF^@BzyBa& z6k%)xI|48OyI{x(fSIAAM0x;dGyo$2OaK4?dV@xdC+LR*KmgDH01X-dWHit=pa1{{ zfB?|rKpFrB(lQzV0iXaH001-q01XB9dTxMG&tWVY3z0nwsV~$9KoyEbuVG3@@xb>h zjT^ij#u*mBz4!^@(2}X4%`PU1{wm%Tt|ln0Is3g+$^^mVSK%K<~KmcG%!4q su5~wEhwOsU*yzq2l94q)%e*NT&)C7)u0>pm}(5R{#J2 diff --git a/fixture/11/2/16 b/fixture/11/2/16 deleted file mode 100644 index aa538c634f27190d8a33e41503b16e02e6c3d8cd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 288 zcmV+*0pI>YT4*sbL0KkKSs-yqEdT%#|Np>312yIa@BiziB^V)Ym;(?1F+>w>fB)jq z6NFU(AW1L)s?op$KpFrI27mwn27m@5K+&Ky(`o_3Lumj2000Jn0MVdm^#A|>Y3cwR zG&BGj2dFdv000dD8X5oq0BAg>x7k{T^m4}{8D(HZ5Rv3cpa94K20#GN2AKeJqmD4P8Uf7&K?35IEVsiTXHqQ5&UC=3c${fEX74er zf3ADsA@xGVNk|A>OK(!LiKjniZs>eH#^R+=AbazQ5#0bBFm)Ur7=@nL#h6wKpqV5a z)hhIOND|2MfIcI$6qvnA#OHLQwkXO;rRfjOAou6??h*y%Fd)=WA$2sb5z9C}#9KoQ n5x=*9GlDf{>>iR%N>`5WR|bR$yCWHJl^!nSig2MJ-VG5`QBfB)nNCyX?=|Nq1z7sgvy{zAY5&58<-bN~3j zhz3|FfOaqdyI62E1YiOT4JH5yh+rlJWWWFb00F0^KPYej003wJ001;-007VifB*mh z2Q&=^h{Hl)5uTs`OaLYVX_0}T(?9}#paf{w@f5t9;55vf28()AD$ZPu(*S0A9g9)a zy9_A+ZG`l%u71#I^?0GyTwtMg)4Hdc1j~jGKmjXK&|(=iicNaSZ}t-oRKbPBJ-lZS z89u21fUy*ap{H#E#eqg}Lf+<83V3jjItm10QS!J!8!>>a1N0+~Ej32^{Li&GL#HQE zFxXC@?GG|Znm7pTKms3_004s+VK4fJ{`5l!?itg=l6=RBe&X&(rwS4ZjA6z=XX|(d diff --git a/fixture/11/2/19 b/fixture/11/2/19 deleted file mode 100644 index afb755c83e4f1c99864a365e83ea85d4666446fc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 294 zcmV+>0ondST4*sbL0KkKS>2jnJOBVS|NpSILD-;^-~WOH;s$V*MPfh-0YDITcK^cy z4*>une6g?qyI61l(?OsB13&;74Fl8wXa++-WYhISngD5`h-fkZ$TVmT0LW+n$PEUK zG-w>rkOM#uP-yiu02%{80MKXv2dHFZ(abk{!L+QWmb5U-JY+(4!WB$r2H#jlL%}xf zP_FSUwnHVZb1^04S8O~2t|ii_wA^aQIdu!|PjH4CB)vR+;BUivhP@W$*Zw*J&qcLb zYXkv@0iYT4*sbL0KkKS<(W92LJ$9fB(P&l8Dq5-~aH!&jKb;7m@}EBmu{i>;Lm% z$wUH(qEfH`w!z1#plAR900001rkWZ6001;J4ALBWfHVLZ8UO$dGyo4%(f|zr0005U zsp%R30000001W^dG&BGKr=$QHy`T&v1LGNqq=lZ;&ACb3CjJ63S-t>O`&;~e6)EEP zFeXIj1s(se)(6%gY7wOtOTProGR0mT#2{TBl=A{>Gy{OZ7K~u18n8+|C|L6p1#3qo z0{t2n7LFre32|#La8RZbIXk%X&99cCw(8~V>IRqCpmew3(!Bcd@`s^KrzM$kxr8T5 mFd?1*lU#&D4|MB85uCwKBCmbGflPQG#oUoj6eKi&p+Ug(_Hs)A diff --git a/fixture/11/2/20 b/fixture/11/2/20 deleted file mode 100644 index 194af3df232ef853ff4bee2be57f34a825eebf9b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 295 zcmV+?0oeXRT4*sbL0KkKS-L30h5!ICfB&QjF3T#%Pk(@cW={AmhJjQ8I7A|C|Nn(c zZ~?4nNL(-gyKNi*0MGyc0000000E#h15Zh#O&SgXpa1{{fCE4P8UPwHXaECEGzNfV zp4Bjz8YUw>L7nxIa>4A*})3D!MFxs1LNlHNY zNmK#4y^sZYB9I(PFbY(f${^MhC1A-HfW+VT@hIW|j^lm-LXH&@CA=J4_~$(OiUgp~ tlsW`P3+g#-1P5{pG`UN`<03GRIR&Fp_F6%zKoUQTxgwk>NL>_R!vLS9YR&)v diff --git a/fixture/11/2/21 b/fixture/11/2/21 deleted file mode 100644 index 0ae739b5f0622050a14678c7e2930258c717ba83..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 288 zcmV+*0pI>YT4*sbL0KkKS#}Pvs{jBWfB&FB5dh}HxBvO7iU3z9EMnT!1_6>P|No#{ z0GvQlNG7lVvtht!0iXZ?01W^Q27mwn02*jKQ09X}MvVXf00Tf82AG-v02u%pGyrfO zkZAP=fB*mh0000Q27nC>r~qmr^FSG+Y?{L5AOLQ$1gss>jsy;YEXG=)DD?<70VH&U zkOkqzp!~81IEcSMu&}J$!f3y6M=?GzYg8e!Rl-q#7j52r1C#8&i!iejaAU+?!`vVj mzi_@(fB%~WsY8cV-~YmPQ!Y`0tjCZFA%cW||9{X0 zIS~;A3<9tKyJT=Qn9^jxLuz6hLNF$tlS9Tza>x~EayOJrwgn~J(_gDk(dmI1& diff --git a/fixture/11/2/23 b/fixture/11/2/23 deleted file mode 100644 index 686284d9ae7de9aa72ddafe0beb06471ddfcb3da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 290 zcmV+-0p0#WT4*sbL0KkKSunkC2mk;w|NrQZutK3Y|Mzh^P!dE)p~g}NAcDDvzyD|k z4*)_j%LuRlyI|&nMt}eS00001pa1}901Z7u)WkXJ9-tZk000dD4H^NUJwN~e007Y7 zGzNh60Ay$y0LTCU&;VoxfB*n2LmL3mLKW6B^Bgrmi4u+y08xm{JRB9(`%A#m_Hmyy zaLkwpE_}P~pUsl~KKlY$}lu^j8qGbv|AbM!oVO_|KI+>!o(`?|NpOwu}%|7n9|j070AeKn*aX``#Kv# z76XRe5d{n&!0Z5~7^I|9zO*wi@Pc_#%x*IoG8be3IVtmI`L2ACm2x@LP;!C5S6h}` z_a$GA(kAw*&1B1v=$X|Y%+t`>=fpm*OTtqzAzFx0qs7l|>Ya9i8eX;AhQX_^C@m yJKw`#;OQ=h-5Cg)I8CdHT{y6W&NXf9?Q9e*sp43Gynj|v3Ilp diff --git a/fixture/11/2/25 b/fixture/11/2/25 deleted file mode 100644 index 829d10f031f38d692a617726bfe2da40ae302b45..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 296 zcmV+@0oVRQT4*sbL0KkKSxL90-=cC9iT6S_no6n8*mE zq-HALmfYrulEAal@~NE#>)DYZi&Uy%=B=os*l8c#B*D;$Aup-qT9sbF1HcD}Ce0=t z2wWIFWdH)EtPkMCgM$*AL=cwPutI>CN&I2u$0||XZGcI*z|ijD)<*x*v-o4p`9~Wl u*x?{>_wrog%P_+ZG(n|#wEltNYME3|Rf7h_umF@te;0B^I8cyD%@u(O7JV@Q diff --git a/fixture/11/2/26 b/fixture/11/2/26 deleted file mode 100644 index 8b1b21624557cf4cee8020aba3be95720b03a3c6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 289 zcmV++0p9*XT4*sbL0KkKS+hI+{{R4E|Nn~ukh;)(|Nr0y0Zvj88K8h57C~KgOaJH% zBmg!j!D+Ann`q&nY3ej+0000042FOJ1|g<_pwXj4fuLvr05kwJ&}7gJ0QCR>03M*z z5a0j+G&C|AdWL{#42FOJ4X6MO13=Gp==p^>A$gD_sZ2-)N^l`j<(8lqfI$jAaT?W- zJFaCQe0qcDIwy$B8CGHuvQSuWV+gzKgHaUP81|8&i{YFM{soQ diff --git a/fixture/11/2/27 b/fixture/11/2/27 deleted file mode 100644 index e74b5e6b6a9f48741ad7377bb138337b0c58d1fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 290 zcmV+-0p0#WT4*sbL0KkKSwwcF<^TW|fB)jZ0#$^mH~+*{sEFaelqP|J8bE<6SO4It z4dCO54iPW_yI{!EKr#RT0001J0009(XaLhAOw=*}(?9?K0000D4IZEd(hpDoGyo20 zXaE2j0iXjzCV}bzX`lemG&D2-3wqqc5n*SIwr5F1&(aD`I>9&E(t8EyuJHPsr~@Bi#L z;~_}E0}!wPyI|&x8UO$Q0000013{nwXlOLTX^3-827qax007Vc(?AUXG#LN}Mt}oD zo}d5?02&$q000^q0QCb#ho}Gm080Ry=PV-*K0`b}6yrd8FPhb3q~Kt?Ai}SZofrvI zPJ#Q=ZWa2s9p#5_;UgSh0=5)nVL&t+^O{c_VogydSolJeEhWK#v=q3+5F-_x%NpcK zK*+>L18M{e#mzG$K?c%L|25|z5o$5$G}3sufuTcBr~dKJ^+_OZ~g!P diff --git a/fixture/11/2/29 b/fixture/11/2/29 deleted file mode 100644 index 1f5ae0d77de5bdfd257c3455a9d887f6784611d3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 290 zcmV+-0p0#WT4*sbL0KkKS>7nNhX4R4fB&4o7(^C2*Z-Oa3GgBiAj$BMf&_k1_5bi7 zXM!9GDnPIRw!y$O$kCyufB~QY4GjPQ02%|-8e}~ozyNwc15E%L003bC29Hn;G5`bA z6-Iyq)X)F{pa-ZL4GjRuXaEfcKm(jk0{rcILIKmmK15wY_h@XsLqpNWfh4j<&APuK zC{#F!>4zW5)&YKO03JJ!TyBvYD zw$7QN%H<|r)PfiYfv0XABe<0NUQBzz+mT@MS^W>Y1P5U=(cMD>WZ=^76an1BQE>0t oAw#l5eX^1uGw8^NAt4q{YuD-|tXxY;98cozNT&)C8^spT-~pm`DF6Tf diff --git a/fixture/11/2/3 b/fixture/11/2/3 deleted file mode 100644 index 75a14ddeb749ed05973f36248f06f351a8ea3f0e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 292 zcmV+<0o(pUT4*sbL0KkKS;wx%f&c(EfB&LD5$KOB@Bi{JPy&EDtsxE&K^c*xcmL8% z7)nSepkS~7vJNsd8fY{CG-v<-00x=^Kn9vJ7>1sqkkO%#13(6i27?npp`$=(XfzrF zKn(!k0iZMu01Y(A05oZk0iz}e3_t)124siMd!b>z#{(pY)r1;2U=~rd-G?*oh>6Y0 z-uWR8*lQ)FdU+ZSFz<*eT1E+6o=T_J6;qJs+iE;dP@W@EAQ99>fuOGt!h!_mD1!B{ zg51?(RPZ`lKLEO65W`@Kk|(Bo)cHR68Y$}lu^j8qGbymTeWgMoqh|NquE+Nx7-@BjbkRCCB@76;883Z`&Ot58qaD=n3*Y47s7XA&MM{aU8#b3EAqS(4IY`-AuGU-K)KD--i z`f7|yCho~}Iii$tB=75*r8idD2__UTbpDXPppsqAVH<<6foNSKPqd?w*+PNHO=2FV zx>7rqZP=XO8h71F^ia{=#MeCWEqSslRcasch0FYW^)$Vn@ps*n%;q?jhyPU>o~-@j ks9?9K>C%MP5~d;I@v}J`br!6s|FL_L=Ao9hX^Vv%03xn&Jpcdz diff --git a/fixture/11/2/31 b/fixture/11/2/31 deleted file mode 100644 index d21bd62ffa92634844d1a80b8bc7f89e35138e70..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 295 zcmZ>Y$}lu^j8qGb{I9C!%D|vk|No)clrG=1|NlQWV9sfF6^K`oN?_X({Qv)(PJ@+_ z6ItdMFzn8iVSs@Q1_s833obD5K?Il`fV2q%1G8HLy9uu_gJ1&#!;Bd~0iZfJVFr_o zlr@LN#JIRbc3hlvfvJG^?t!k?W)Fi(p1Zj7zm1Zvy~Da<>GsLpg-JpA*UHb|6g-?2 zZ5-9q$fOrLKj(%A|4ZZKi03{GYrk@6W;!cmWh8M3$^`xQzV5Medc)I03maJu$b6LJ zdAXo0?C|64cdbqlo9{{d7W{W diff --git a/fixture/11/2/32 b/fixture/11/2/32 deleted file mode 100644 index 35a7ae579c1510f9ad8cbafce0861a45d281c4b6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 288 zcmV+*0pI>YT4*sbL0KkKS@<;-nE(I~fB%AzU}2dT_xtgH0YOfy04Io0#3a)We}BMU z$iRUNh>S1*vJ8L$pa1{{Kn(yI002Ekfu@J38z^uZ00E;xpa2aTX`zrBXvAb14FQM% z-~pgC4FC-^$N)5HkO8A62n;{~1b9V)d8o*ZA9Dmw4^+ep)`NDS6gYK^x};vCl{}tA zcPs0$DXSW`n{QwWfs)efpxrUaibxd$1tp}CcLr@&=3m%(}nyp4s3>^ zEO*EBE=$4dpdJI-(~l++BKrU_T0J}{u`q+mUL+p?poh4SN?#h1vJ3zZ#-kpHh>!7t m*TCVMqGcyI|&lpay^d&<212003wW8UsdwkYZ?L1JY;!8UO$Rrhp9q&;vjX03M(K z;2Hn}KpF!;00E;-00HU%^)dmV0Kma^40)4hAQ|9;1SO~FsK?JlKk#s8BzOcjaU@V0 zX+;QGH3;q$C9x;3uEv#MNHWbZ%han#5*S9Zbb?3(ekaEi6TvXBrxgeWKoQa-$yH~8 zQ4|P_feXL{bse-}lzs=ZgLQAq&@UtVz#kE@hs(;L)A3}Kf_6J6X&<{2BSEE>0bTtY o?9&b^C&S7-Y_W%GY|Np>eptQnk{{QflAPz!X1)K>$h$PA-_x~oN z3?&3$d(|)iyI|uZK+pgH0000013&-(0Mj53NN@nq0001J00E;w4Kx5~0B8*W^#gza z0gw$E0MO6_A)%lE27mw>00o9&nkcJ5Wi0S6aiPNLv%yjc014m;PBj1q9^?MX>>n=byF_ud7R#!;n;pZ%O1BK^>KS!Mq&}Qppxz zf}AbXafYaXLJ){8vnHje6diZi$@iB=1dr_BOUl&~NHyUi`+$bJc?kdj diff --git a/fixture/11/2/35 b/fixture/11/2/35 deleted file mode 100644 index c6b40f8e5cb213035e1cf8fe0f579b4eaded9dfe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 292 zcmV+<0o(pUT4*sbL0KkKS)x3-tpET#fB&GILhiuH@BjFgDF8{70SG~8F$C>5*Z=Ae z1h58b2tcp^t6{*<0MG`20MGyp4Kx7I0MG!?%}uCqGyn#T0BAG-000>@(8$w3&>8^s z8XV9zqfImeKmY(V0MU>%4F-mQ01Sb^$R=qTEJF90wK)BU5@j~QVGtLO6t8goP802u zj`8J1@lLqcKcIBPtod(1eNE;0L@*pB;tJ&`BJ!evPO!%(ssxU2!7V7d)YH8N;2;pR z1}qIgVi|#TA&*1=(?a_2kT+SUJ6i)_+7qAWi&C|9KqN$&?zzx^8`t1XRZ=(&W%KD$ q0lc%)xi9hpp-5obl0x2fi3?!>xFlOtN(t~={x0N-aG@bYd2?FZ-F-y> diff --git a/fixture/11/2/36 b/fixture/11/2/36 deleted file mode 100644 index d290f6750327d73f6e477a7d8b920790dd847af9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 291 zcmV+;0o?vVT4*sbL0KkKScbl(5~ diff --git a/fixture/11/2/37 b/fixture/11/2/37 deleted file mode 100644 index 4f374246b5d9c455cc7a75bb6ab2cff3e677994a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 291 zcmV+;0o?vVT4*sbL0KkKS@#zvdjJ3c|No$DH6X2PSN`=il1PMXL-bHU5P(7rPyga( z8UO&pAdoNsyIUBV2AXI9000JtKn)Cl000d#X`}T+f$B17(W65qjQ{}9!U3RZpa2>J zKn!Ha&Ku{*s55l zx9epY!$I}G54pq^=+gQe%@k8jQpH7Ld3^BQLX;qfAEGlurrzk- pO#+h0ibTH(q4M?B0W;1uyCCMo&^$<_jyb|CF64@Ep&{-rO!mIsd^-RD diff --git a/fixture/11/2/38 b/fixture/11/2/38 deleted file mode 100644 index 3e0fe1691e684ca13f938f79f3f1e5250e437fe0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 289 zcmV++0p9*XT4*sbL0KkKS$o&FuK)mFfB%BQAP{N0@BhUb(;^0Q8tAs4{7wXc{yC001-qGzNx1000I?ni&jD z4H^Ig)M!0G001-q000I+WB>xh(7~t~WC%<$s#Kd|4n;cyI1skz_|V=D7D}8?-_rwp zfO^V54t;H73Fsk~gnCyLk^6Yma@ejpoC+#sI%Lc48&Zog$o1K-@+Fu-rt(^$EizJ0 zb(vEk>?%9lbe15$IUI0`0G~$xKR^JJ>?P8n=)ja`hUS&b-RJx@QKqA_CV|UrC_nV< n;)F#+9*Hyo?GtGMieQ|O4xWACK@|o;CjKtuig2MI_pfhWkP~vj diff --git a/fixture/11/2/39 b/fixture/11/2/39 deleted file mode 100644 index a6ec5bff9e15a6170e4ece5205001b5baa6035f5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 289 zcmV++0p9*XT4*sbL0KkKS!ek2VgLXT|NnsEIsmTtz5lT?U=TXwU!v&;SORHiS4d000^Q00SdH0QCbQr>JS50LTDv z^$$=q0D3?G0j5TP&;T^h00000lQ9JmMT+?>4H1jdV3ZNhQX!=|2^jUA9FwUUeZF~o zgcDD%ya1CZXMumBudf2s9c^$H#LGsRz(hu}L8vaI1OV?|GJyhvWk&!PmMb_c*e%at z7I4(}AY3-Id)i5VxNfb-plyo`*@zNohClJw;`=hO8ZaY@4`&!rUrRu*gP1{Q3Jf5s nf#aS}AQ1>8nKGa%C}RaOBWwaBd4alRU&Y*!P81{={ydm~Abf0T diff --git a/fixture/11/2/4 b/fixture/11/2/4 deleted file mode 100644 index 03d876f598d37cfe5a93decadcd435da453e19e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 301 zcmV+|0n+|LT4*sbL0KkKS)HhX761SO|Np8gqC+R3|NqnmPLP&}!A!uA!DC2YU;otr zZ8Fpa$`CLBxd$|AW~0%mqb2}GO$>$r5t;)=lLQ`<)bxg%Q#}(?Cz&C@15Gpl82|tc z000dD05Sjpp`ZseGzNeG4H{_B05kvq001-rpaGyU$|E&agV4xKBx%rLK-hpnz;2sb zB_0G_(EtRl)X7e@9zjXAGE4%xjlTaL9=touiv^;WkVMHN`GKq%>=o3d2cV*c5G4M diff --git a/fixture/11/2/40 b/fixture/11/2/40 deleted file mode 100644 index 45219613c7f67656c6358767e5b009d537587feb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 289 zcmV++0p9*XT4*sbL0KkKS%<3b!2kd*fB%qzu>z=d|L+74a1=mwKp{|wF$qcSU;pqz zg22ELNl7pOyIULp05ky54FEkr8Z>AIKm#CX^qweiGyn#MfEhGo0j449VrUv<00x2T z3~}CYcQy000^Q27}Qw&@>GJq?x1#1l;qn09@gqfD{>~`c(Fcx`zjb^TQ(w zN597HB{t}$xaDe*xhD#7yVNWJdx#7NM>19@Pgj~sHUCe=3GMPDqrz+4W^ zppZ-m{Bbb{jt*HG2315@ZOC{K#lg8UnPp)RD>C4sp#|6iXT(>anb8par`y7q6bRp* nQBRHs#KJ(|R7AG$v&1`ERn!J6;>2!DpkQ6e6yZWb9;>?qGXr{p diff --git a/fixture/11/2/41 b/fixture/11/2/41 deleted file mode 100644 index 0e5238f11c2e1de9b5783c1cd6e41239f9595eca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 291 zcmV+;0o?vVT4*sbL0KkKSqy{U$p8Q)|NqcbfeZ+1fB)JDkj#;em8zi@Gcav~=l|fK z;Q#@M5OpvBt6{(z003yv&;SO2007e<4Ff@-(8=l?GyrG-&;S4fKpFsgPf%zBAOj!) z!H@<((9kphWB>pF05oU=On_w2a9&Fh5$H|0p+;~QaXd;OV@N5%XgZr->tBXu()d3a z3jOTce5w1^&c!VR>EafIg00Q90kGItf(in@=pLr6@NwjplZkCIAx<2nYw=|g&C-A! zdxk+>5|C-v3d|6NC0kU$nZr?K5@X(lP1aujMw@@Y)!(1`D8qmmp8yZ7g7fxV0ZsvD pz)VrEiy-5>fCs<)0Y|P*P~z=B8Y_=c4U~W4?ntK!5&@8V`5?gjeNO-Y diff --git a/fixture/11/2/42 b/fixture/11/2/42 deleted file mode 100644 index 75eaaec4b43ba07eeac56f1d0fffccfc281f8877..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 288 zcmV+*0pI>YT4*sbL0KkKSxCvgzW@MUfB&I^G%_kCJ^$3tpu~WJurvyY16re|yZ`ho zBf%C55dts(yJ|89hJm9X02%-Q00STm02u(tFhho#00Tg10000zL7)ee27mz28e|S= z0MGz37=Qo($Y?ZZ(9=NEKmalV@#j214#<2B0+9|N28iY4*iCel*x`*b3w+$w3|b4& zAf$U4ufU}6)9TodnR~+^CoFPS``}y$^O=|yI09fIij82P99QOy6>6)Pv6xZ-EWoG` z7Gd}tNm>yUnzxrR43rKm^FT1NkVr2(3WP61&M^_ar6dpBjy9>oN%i!DkCbdfMez5Y mW!yjX03smN8Z>ADpay^d0B8ZA29GEJGy$L- zGz|kl4FTx@05k!h4X6zO4H*CcY-6AZ;LMlFYF8KphFE$J4w&r$n9-iXKI_U({vUt^ zR?)QMhBZ7+@Z9P+6i){d3(4u-&AbnxZV<^=AhRiZGUN~uLL-0(6PT1N-0T&N+qL^+ zgBq4h`Y08GXR}QA&NkBk11yw&g2?I$Wcu|$vrhaEBbVaAp(A%E&eBE|22wNT&)C4QfSu02%&vzyJUM diff --git a/fixture/11/2/44 b/fixture/11/2/44 deleted file mode 100644 index aa74d6a59c517ca1ac1a0332b2bf1f3d2fe05db6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 156 zcmZ>Y$}lu^j8qGbTz>wb357q^S+!FhD%+6n6!cjGm-Exj%_{VaU;8sqq Inm>XE0Gksv761SM diff --git a/fixture/11/2/5 b/fixture/11/2/5 deleted file mode 100644 index a3df9f90dfd0011e5770e1f7dbbd290f2c127336..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 305 zcmV-10nYwHT4*sbL0KkKSw?9Kr~m*ifB(Wvh-NDkXRp+aMG&|Vlqs@M7{{UFfB)cy z3`GM10UR&@sz)?xW|8epO{s~I`hgi5V?;FdF&>GHMrt0DYGP)Ih zS*)xlHZeg9pLv(^$lgDaVGU6O#i>EX`C)DmiwH|RcBEL$^z#z<;?hN1c7)IscJPWt zKobrWG)4e~4r10S#->oqNe?oj@icZ)#bwsO3P)#uJM>L~$@Q#nX80mX%pU7Z57&JH z%G0ZzAvMu%qKNl1$Jr^Ofrk1nU;-2mJL3?h lBTh4Ri{qs&vPGoO0vM0Liznb(HhBMwxgwk>NPN0ff`EN1bqN3f diff --git a/fixture/11/2/7 b/fixture/11/2/7 deleted file mode 100644 index bd310462b623df9a25c61a54a104e3ef3d231951..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 290 zcmZ>Y$}lu^j8qGb4C^nRz`zjo|9{7hM+)ui|LT{WV7@IiiD6O0=_MIXwg3O$bnKQ? zV9ez(VAvf|aDo90m>obAugO=lW$hrA2?H-fiU|XQtpl$VTeDk3N(PW436cSVj0+4V zw!S8OViP~I^)$}p&^WgB8K0$sdd&=`->wp_Gqkw9WMmAUOkw<7?dL4+y7`o=&#&g` zFCtI8o}No2cmzwG4O=$f5Oh`P|Le$Vv5$UB^ts}JetoAb7oH3 zI7|59g`Mt^Ul`7@?hM_e9@F=vME_#So{i7vFdn?e!1cp!cFL>>2PM`_lRYek^PK-G h&x+i-T;RydXNNRZo^AAVZy5jp diff --git a/fixture/11/2/8 b/fixture/11/2/8 deleted file mode 100644 index 12cdfcc751aaf0a4660c5a857873d249dfd8ddcb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 288 zcmV+*0pI>YT4*sbL0KkKS)epGi~s-yfB&Eg4@4@K|Nry=Cx$RB3bG#%z#$#0e*VBQ zh5(i^L3S_zyI{xw00003KmY?k8UO%j02*nUY8)P>fB+2u0004zpa9Y78UO$q27urV z0001bfB?t<0BF!@pf;cY$N`zglFI-9*g{HW4*S?D(>bTyAs^?050vQ@cYaCaLXq%*eE1%xZj=OkKs(k8PQ0){9VZu;X*=y(B3c}lXNTq diff --git a/fixture/11/2/9 b/fixture/11/2/9 deleted file mode 100644 index 59cc5604f34b8d6c2faa0419e8b6e7a80e2efba0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 291 zcmV+;0o?vVT4*sbL0KkKS(UL2nE(Jv|No&6rwvFq|JUGAWS~iC#Slu;EVCd`e*b1g z(j`CuK!UIUyI|%fhK7Nl41f&)0B8UJ000BjG(AC78UQo^000dDG-%U6Xwb-MpaVvL z1C6K}27m@Zpa1}905sD;dVpvE4FQzo3qlPojs~G0BP2n53INjH10^)g0wi{^bG#$5 zrr*g1F_UNz9zP10aUuwLu*~L2NO(9qBInvL|4p0;6-V8wr pZCe@^{61s+t)a#(jY>qps08n{|1qkmAr*g%xgwk>NJ`j-On^LwZ=C=D diff --git a/fixture/11/3/.zarray b/fixture/11/3/.zarray deleted file mode 100644 index 9f139b4ede..0000000000 --- a/fixture/11/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": " zx|_7cE*!qBz0EkSu)etExtF;#FCRBAz^6T4J*mK0JUKi~4=}wDJh-!DIB-A+G*-J` zwNbk8Hgmd>w)WjGJ3c*vQD|Tv|>F_yn?v|z2d$4vGKSJKZmp^J-$1zvN%9Y SGz~tMJ)$-OI{rA6yj3fg17TwT diff --git a/fixture/11/3/1 b/fixture/11/3/1 deleted file mode 100644 index 51e7c1be294c12d6599d25de03a0c6c6121af458..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0000?JR3g5xjQ)2FpICUHTp7DIK(|hI`}koKXE+j zt=_!nyLUSaxd^-wx`4UPIs~zOz4Ip>Gpo0tJiNAVJjOU`J7v5}F2ky@y=**+y5P7D zyqq~3zVSFNJWM#VITSjWyS+VJw?M2?wcR&PxJWGVys121IjyvUw|6kSIE diff --git a/fixture/11/3/10 b/fixture/11/3/10 deleted file mode 100644 index a05065716dffa48c81c9cfa7f0ccecd61fc84388..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0001Nz-v4RFI9Ij# zxo0xV6w!JxpxWzV3HHk2ayz4x1K!qSLymKe( zJM6e1uyZxgJTy9qI?}doHpjWdI-5LgIN`O0vN}JVwKF_yHCeS`y~Q%sEjmBqx|cu@ zHVihuxCu3qH-J0TyP-YEHQqQ9vu(SmEdDWzy;(Z>JJz`uyxFuKCOWjts}?hPy#u{+ S!HPS~BQZRUyg(-4GJm*)KVg9Y diff --git a/fixture/11/3/12 b/fixture/11/3/12 deleted file mode 100644 index a2af8b7a1d31c92b3b047154db14723e30eb40fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0000tzWhF!Jsh?bIKsF0yH7u#zLB(MEt@_pJFvc@ znPeqTv-LQmy^6M}H*h(~FrBomuk1NjC@Z}0y!E})J$5?Lu0yw%xL-V;KiIgAJS->7 zvIIUfp}sWoGl9OHG=R9Ty(73ZxF0-0yi&S|Gfurmxv0KeJJPfhq*Ob{x&E{|yga;J zyWzJeww^R0yB#&dH+;8nx^gXkvJEpxK4mN&xgopJy4t;VJ9og(H}bj}xG%fOJuM_t SIy5~yG040Sv+%hGxDKuka%Hao diff --git a/fixture/11/3/13 b/fixture/11/3/13 deleted file mode 100644 index fa498b654738e79e56b671534b8beaf937288a32..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0000>u$VS=I7_f~Hf6X%J%+f-Jm9 zxQV?=xH+^xywa(WH`TIDId!@NJj^dHKGZ;lx81hkv&1+~Jvq4=Hd?UqI0` diff --git a/fixture/11/3/14 b/fixture/11/3/14 deleted file mode 100644 index e631168caaef53e8ccf4a340640dd828d42b6106..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0000yxpKR8uc5Zo zG2A?rH^jLUK14Zjzv?=YxJohzJZ89+z6L(2JeIB0I_b4FE}pPMwZJviyau!gHCjKc zyec>gIfB2EHVZqpIA5(*HE%J(zA!T2wWGQCKm$FRyo@!~J^a1xEb%WHy%RQfJP1Df zvjIUKz_T@`y~ZsqxDGfEu$;OTy1PFaH_yIkI6Jbo9FID@Jm0#Vr(U?mIB&CkCsVY` SuF^ipKnXTxu429wx(=|bnO}_n diff --git a/fixture/11/3/15 b/fixture/11/3/15 deleted file mode 100644 index 36e143b8fb460af0e2b7a1cd3b05f67c92a50552..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0002Ex#74?K3^x1FJ-y5Jk7EzxKqB;t82KuK43rb zynVcEw1>L&GCH+=G!Q&Oy!JcPxlBNOK0U9syPv%MI>@JLJ83{Fy6n6uGL$@qyKg9a zJ&3-DGg7$nFRnQeGYY%|xV1e)HMu$gGiT)yIZ{3Jmj}X24klH diff --git a/fixture/11/3/16 b/fixture/11/3/16 deleted file mode 100644 index ce14972604c70049b7d0d15fe2b2367781a4147f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0000uJo~X}xnaDuxU4-^ygWdUbzVWq|zScNsxkj@lwuZdVx}?8utqMM{HFrHrJYcvJy2ZKPz9T5l!Miw`EQ>cfJY6z}KKeKh zx|_3xz&N`ZwqG~_y8OJ2zs0!DKUOq^J2SgWzT&($uKK?Rys@}Ry+6RBG~X#ZI&r)d zIvzeUG~u|VFhjYRx_3Equ+lsfw{^9MxO=>WI$uB@wsN`lxp29eKEJuIxc0oWG)=6G Syd}FLJ}@YPF2gRAJubXP17uVH diff --git a/fixture/11/3/18 b/fixture/11/3/18 deleted file mode 100644 index b2ac0fa6e4007662ecefc6923cf3fa3331125ca9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k00013wrH{9zEry~JQX^BJGQuUwpfxE3`sIj1;~wv99-JRvu5 zKioUuKK#HiE-$qJuyMLqHax#{HY~aUxCA!~EjK*EyDF_5JI6K8Hg7zDw(q!`ILNSl zJnJ?6xN0>yyOTSYz2iJ0HAJx+Iu|%nJxM*IHTk+#xR$I`JWaRwwPio8t`xa9Ixnk_ Sy(&EVxs)kftX(r=z+JV@ja{z* diff --git a/fixture/11/3/19 b/fixture/11/3/19 deleted file mode 100644 index d1fa17cac558f671bc2675ee93fade8e8c575c46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0001hJP{ zJ3Kgcw++7eG*Q0IvbQ|OI1IpQG8?rRy<9ckx5c;cF9NwCF2A@>xbiz;Jl4MYIhZ{= zJ}Njnyb3w=x_z}FHgLL@ymU8sKlwbUIn_HFIWspfJ9)igGA^wREn>GSIE%VPxI4M& zIsu|!xB9j78+|_Vyk|L$HxRqtJy19hxNyH!GrFlCye+)*I@7-gHUF{nH?_2}H1WAi Ss?#^rFcvm!ySliRxNWo{FJhkn diff --git a/fixture/11/3/2 b/fixture/11/3/2 deleted file mode 100644 index 6d244e9442a70007ab658e6ee9e0b6ac93d82086..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0002OJg+==I3ls96&t;DJWxD8z41DCHW$3py!tg= zwn#spJCrvsI8!~wI{!D=yC6bpya2VHzE3#TFdsVgzH~fxy2Y?`J1#U(HDx;7Hg&aB zvB)wiyZAevG|4Q*Jg~3UvB9@}HSaW4xv;TExum)4zGSn{zG%1SxHUWvycfC~wxqav zJ&wE*woo+dO diff --git a/fixture/11/3/20 b/fixture/11/3/20 deleted file mode 100644 index d4c29636945be9f3cdb568732c57ebb90a27805d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0001QJ*%~oyj3`3Jqx|$KczM!Jl;LXI}@+pJQFls zJk2~^yjwcbv1~9AFp(`7K6kz3zP!IjJTSUDFj%h_yU;omu$;4WGsHQpyZk!MJ+!@a zG~u+EIo`a4K5o9zv-387xxcy{Gx{tQ9^X1pwjDJNxXZeZwEMR%x2Zf4v`)D&C#){N zyvDR^wSuwEIbbm9Joup-KVP>fx?wnVJ%qhZvg5dtFd-pgJ@Br8JqxvIJ5e+1KIpxu Syi`Blxm>vcyO=x#y}u=qBx6GW diff --git a/fixture/11/3/21 b/fixture/11/3/21 deleted file mode 100644 index 3926a89688fab796240fc943829b7036c8d08a0b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0000$v?V-(xTQ9)yo^Arvwa|*KC-y0K9#m$IhD9= zy`nZBIYYhbw9Y)uI8(OBxXVAvy_Bomy+S=ppR_TGLBF{FK0vm4vqwQFv46dTG>5U^ zHAb**Ky)}wy!kI?y6v+`yEj0Hxqvw>wW7RJGHEt=I6uMgys0~6yMHnWz1lf*JKH%S zJV8FPyid9=JB2)Dx%0U=xoo}`z5Tqkv{f|lHWIkLx9Ttay{$VWJps93w)s5ZueCVn StvWu}H)1%5IB~dXwx8=91IS)I{tlYW;xCgw6s^+swKqx!HJBv5rwP>{Bu?8|aHqH^I zw86NNy&^qnK+HS|y-Ye7Jao3xH%+%5KG`)^yIeUuJXbQTH+QUxyxXuZI2o{LI!3lS zwWc~5GnTd^JcGH>xv9Z#xI8^2z@EDXH+{esxR5*+K&3QkyRN&VFn=^}Kg2s?EnT$; SLIXKkxiGT@yU)M@H6cGacw2%1 diff --git a/fixture/11/3/25 b/fixture/11/3/25 deleted file mode 100644 index 37f5ffd8fc5200f4f954d5d824eed462f2b1001c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0001nz7s2`xhyz@v2nfGyQI2dJuE%@wCOyHu{yO- zD~~jCxS1tPIwdfADd;yDbx*j`PJ~lZmK6fqnJ{&;rz7{ixEy=duH$*Ao SxzMK4Jl8PH$OZn zC~`W~EE>G!Gxoc#Kv=aYIa{LdyqLfKIWs=txuCu@wST)bx#Bo>s-wEKI954jwQ4Xb zIMFsUDZ4$XI5xK(w|BS?Is3TGwmUo`GRr(-I<>rsuIoGGytg(6sRh5NFSEL=ysjX* zzh$`^K{PoHyPiGpyKcCcycslkJ^s5Nz+F5yKKs91H$XE@x|F@|JG(IwH2Jt{y6M17 Sv%x(dyBEIrHugBMxRteIGG&SY diff --git a/fixture/11/3/27 b/fixture/11/3/27 deleted file mode 100644 index 189889eb5d06c2630dd136a8ba420912af849901..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0000vH2SarJm5gqzUw@_E{;9RIn*l^I!?ASx_vsq zx+FQ@C#k@gxLY-Uwiv-kJ!~-=HKN zrqeynv=ul7J4~|mG)g&Ywud@kxM#T>z6ZF5x@xxNIOo3Rzy7&lH=i^gx~)7!xn?)c zwez~yxZgR)HNQK@HFCZ?Kmj%$I0&|MJYT%wI4QtuIZ?cCG;TeWI_5llJTkb+IOe{8 Sxsp98wt2q?Kz}$MI@r5*e_>q! diff --git a/fixture/11/3/28 b/fixture/11/3/28 deleted file mode 100644 index 9d05d8089d002582f3621b690b5afd7bcc369a46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0001sG#@mQx^J-nzKpfXJNmhyKfk>yv_QGUv3xz? zyEwNzIrc4^F}b^ytcN^szLdE`zf3siJl?SeywAJAw9vFmJ6gH+HsiGQxkxlA(nD}cJ)x=g<0Gz7g89KATGGRe6` zJBPMby^6H5IAys9J1V*pxi7qUInk=uyH~a(!Z|v3vCzQOy}!0Xy;(Z4Ho-MD!8Ev) zxuUzJwt>9kG9o&0Icl)~KaV_suD7>ZJqSFcy;wZ)u;aSxzfH75qFFwuJF2)lHk>+u zEwZ>mxGOfCHn6lk!707K+ zyh%AR!09?8yKgoty{9xqySTX$H{-SzHXE{1EFU+TwfMV2xX3(jFT%QQHhwoLIMcg! zxoWw^wNN;{GyyhiJ%c%lu)MstxiGY%I{&6Xx!t*RtRFjFK9;h&soT85KbyUvnaVuHF-W_H#9w;xJaN+r0mKUTaVA8fk8G#S0QIuk&*y52k-wgx_7Kzp+lzLq;KGXywrwG+Ff SJRUX1y?;5!H#D_-KJvK1LS6&_ diff --git a/fixture/11/3/31 b/fixture/11/3/31 deleted file mode 100644 index 7b7222a3c38bae5e15662f49d2be6cc948aa1a26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0001Ju_ij|EQ-D2F`>5GI@d4^E-}1Zv%@tLy=}aME><-ys~@%zwz@gb zGFY`NJE{LVPr1FCwehHwrrYyvU}Cz_YSByKOa~z8Sc?J88L}zaKQQxbv~s zx#mARzNobITb(OzR~x*zMr+3 zt35q>zos@Hy9GHxyGlMayY0YGx-PU-JPEz@ystZey}Gd!vGy_mx@x;zH7mPuz3Dv9 zHxfTaIupGsE)cw%z9_faJ7zSHy|}lMu)@BzIa0Hby~@33JQ%jaJW4(nJVdzvzydWd SK!~XJzh=4Nv1GgQOu}ZkfL3A{?x=KB6Ju5#gK&d&IwS2Oyw05@k zJm?X{J8C&)yxzX@ywtW#Ffh5GIXSo-G=?wmKFzypK*6|Nz45YpLMc1bI9)m@x;Hf- z89YDrt_`-Syn8UPG|s$0H+C})JlQ-6E+xFBx(q)|G5kLIHwZd|JLJ6SH1oWVv>!Pd Sym~nPy^}mqxg5N{y`#8ccVIaH diff --git a/fixture/11/3/35 b/fixture/11/3/35 deleted file mode 100644 index d9fe0144ca593ea957146b8c02a0353903f2d899..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0001?zhk&&JPNY5ygoB(Fw{R&I^;AyJ58|EtfM}Q zzP>vbud1|cxa%;IsiU@^xu3tvxf!_EFxo%uxEQ>*x#%#qIMg|xI>9@3v>QOJwf`zaKk7y%)dHwb(wfIk3N9y;Qb( zK!dw-yBt0}GB!NXxq+@jJ)^LGJTf*Zye*{KJ6ky?sgb`gzg4~!xCyrBy%#(Vy?ec; SEz3I`K&d!VKH0odKytqiUSrSz diff --git a/fixture/11/3/37 b/fixture/11/3/37 deleted file mode 100644 index b6550680075f018dffd1a74028666cc63c5275a7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k00008xO_NlzH7T!y!W)QJx?TQx5dGDH&QyyzDcYt zGPpa;xdFRQwfJyNqdH|IMDx<9+fxtF!~x^TG+J1ef} SG-$Z)xRNvpFgZFkJ&3ts`dx(p diff --git a/fixture/11/3/38 b/fixture/11/3/38 deleted file mode 100644 index 6c463540c07b6b7d7b980c77fb029a1a650b59b0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0002wI#RqQKuWrXIa$2kurEG~x@kaXoz6RQHc&UT zy^*)cJc6}xFfqPOxPCk#y}dcGF^#i(wA;6Cws1S@wmUz|G@Q7}JN`JHz9YP2uU5V; zK$JSvqI|W?xZA((zMDTwKAX5iI}JGhIJ3Pxzq>r^zrL{IwOzLeHEg+6HiR%UK#RS8 zC+9XAv{}6eJ!CZzy#+VUKBlOMm_|OsyXij3J7v3Bxih#{zbiBdyT3Fly<$7(Ii|V6 SHBT+>Jt4f{upqg;DMUD%>tzA} diff --git a/fixture/11/3/39 b/fixture/11/3/39 deleted file mode 100644 index 7ac1680e3b0072d4342c0bfe1a16abfde5d9961c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0000txzn|}J8nJYH)^nUvtu|)Gnu}WIO4jiy{x|K zvJt$ez`V8Dx6H5zxlunPIR3X1tf@VxJC?o6yFt1Mx5KzvDC0RfJ3F^JZU$Fy%IB< zytlhIxxOj>HH)^`wFkH{Jhj2_v@Sj)I1xP(!1%l!I5a+yyfn6KHqX5*I9kErH}<LQ`Hx{75JI%g}GE%x?v0)7W diff --git a/fixture/11/3/41 b/fixture/11/3/41 deleted file mode 100644 index 0b580975e8397366a413767feb664961c1151d44..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k00013JAXOJIh8(vJPNTfmCra_Fr+zlz=l82D}BCC zyF$QRyp6CAK9V+cGm5;Ky85}MxuCUcv~9Itxl%9)wPL diff --git a/fixture/11/3/42 b/fixture/11/3/42 deleted file mode 100644 index ea45ed25e78b516ff38073e69a65e951062d4596..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0002qJjuZ2sZ6$dIY>O%Ikqz~DSWlKGS@w&x^K4C ztN}bFGu}HRJiItUyrn*-JrOL2y*|DYG@ZHLzEQq~w>>hSJ-;^^wiGs|H0wDXsUCHJj*w4Hx2CQ2K_9OXKLj{#xWu_~pMt#Uy2ZA#G)_Olw_-Pg zzCJwTzGFHrt+}txv^2f`ys|yJG-&8E!RAsIi5A}ztgEly0AIzJJqySxjH`6xW~Myx^cMVy2CnUzz@3= Ss=6}7J}y3IzM{WnCS$pi)N8~5 diff --git a/fixture/11/3/44 b/fixture/11/3/44 deleted file mode 100644 index 78c2f72329eeb5e42c1da8c528da9883265074f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 130 zcmZQ#oWOK~fq?;pn}ApZh_irLW2^pOg%iBY3{zN-?om)lGTyf9-ww%LQ@4d$1ng1R znY``Qyetzjhbb0EEL^r+-)*&H{Z4nY+57fcG_JpAd3vYW?xt-|wiR0?Sk1S8w|� g-fc<7+b!>yeB6H9PI*u2mY_Y%stk+_lHQIY02$>qDgXcg diff --git a/fixture/11/3/5 b/fixture/11/3/5 deleted file mode 100644 index 645b9e2ac5b79bcb80b7a857866afbc451d874d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0g?j90002U0002k0001TxHY}NH)A(ex_Q8oHu|w)HPk=erX0AJya2T$ zG1DsLKKwUOu&231G(x+_?5HrO^lzJ)zeFa1Jhn1zJ6Jg3J-RecIm@}RI9Iknw{ZKF&Wcx)~5iGGe+1IjXZoym_?yyCyuFKh`rJFX1q| zJf%EmuM0FeIgPy9Jn%lLJo~)izpFVFx|lR1K0&z(wrw;qy%4%xCeO6qIEX=5F1)&| zJzKtxy~DVNwDh3BbI;1m& zH6=Z3x_3P%xKu#OI9#`yt=X@xMR3ZxLL52 zH2=WCz-zTsvJEoQx!E8LFyOfkJgPBZIA|<2K*c!Ew~;!HzNjx7H8C((wGF&cx`-}x zycIu4!7a5BFAq3WyNW!My@$B>I4L}yy-GnYyFk4LIFq&Tx5PLczR$kMxKKQAxMn`i SJcl%JHG?!9G2FEQv`D-w=U+1b diff --git a/fixture/11/4/.zarray b/fixture/11/4/.zarray deleted file mode 100644 index 54183bbe7e..0000000000 --- a/fixture/11/4/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 1 - }, - "dtype": " zx|_7cE*!qBz0EkSu)etExtF;#FCRBAz^6T4J*mK0JUKi~4=}wDJh-!DIB-A+G*-J` zwNbk8Hgmd>w)WjGJ3c*vQD|Tv|>F_yn?v|z2d$4vGKSJKZmp^J-$1zvN%9Y SGz~tMJ)$-OI{rA6yj3fg*Gpo0tJiNAVJjOU`J7v5}F2ky@y=**+y5P7D zyqq~3zVSFNJWM#VITSjWyS+VJw?M2?wcR&PxJWGVys121IjyvUw|>Rm_x diff --git a/fixture/11/4/10 b/fixture/11/4/10 deleted file mode 100644 index d9190bbf8ffca45c488eb182fce6c75f98651165..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0001Nz-v4RFI9Ij# zxo0x2 diff --git a/fixture/11/4/11 b/fixture/11/4/11 deleted file mode 100644 index 8bef43b77b7aded3298ed0b7887bff2e9e388c1a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0000QuBV6w!JxpxWzV3HHk2ayz4x1K!qSLymKe( zJM6e1uyZxgJTy9qI?}doHpjWdI-5LgIN`O0vN}JVwKF_yHCeS`y~Q%sEjmBqx|cu@ zHVihuxCu3qH-J0TyP-YEHQqQ9vu(SmEdDWzy;(Z>JJz`uyxFuKCOWjts}?hPy#u{+ S!HPS~BQZRUyg(-4GJm**6=8w^ diff --git a/fixture/11/4/12 b/fixture/11/4/12 deleted file mode 100644 index ecdb501249b7fed7accc2c1bc42360d4cda21bb3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0000tzWhF!Jsh?bIKsF0yH7u#zLB(MEt@_pJFvc@ znPeqTv-LQmy^6M}H*h(~FrBomuk1NjC@Z}0y!E})J$5?Lu0yw%xL-V;KiIgAJS->7 zvIIUfp}sWoGl9OHG=R9Ty(73ZxF0-0yi&S|Gfurmxv0KeJJPfhq*Ob{x&E{|yga;J zyWzJeww^R0yB#&dH+;8nx^gXkvJEpxK4mN&xgopJy4t;VJ9og(H}bj}xG%fOJuM_t SIy5~yG040Sv+%hGxDKulNM*19 diff --git a/fixture/11/4/13 b/fixture/11/4/13 deleted file mode 100644 index 92e3fc62e6cd55cfedef284dd290c14bfc73ecf0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0000>u$VS=I7_f~Hf6X%J%+f-Jm9 zxQV?=xH+^xywa(WH`TIDId!@NJj^dHKGZ;lx81hkv&1+~Jvq4=Hd?UqI0`5Zo zG2A?rH^jLUK14Zjzv?=YxJohzJZ89+z6L(2JeIB0I_b4FE}pPMwZJviyau!gHCjKc zyec>gIfB2EHVZqpIA5(*HE%J(zA!T2wWGQCKm$FRyo@!~J^a1xEb%WHy%RQfJP1Df zvjIUKz_T@`y~ZsqxDGfEu$;OTy1PFaH_yIkI6Jbo9FID@Jm0#Vr(U?mIB&CkCsVY` SuF^ipKnXTxu429wx(=|cZ(oi8 diff --git a/fixture/11/4/15 b/fixture/11/4/15 deleted file mode 100644 index 1234d0c868468182b25e69a431eaa460fd61d89e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0002Ex#74?K3^x1FJ-y5Jk7EzxKqB;t82KuK43rb zynVcEw1>L&GCH+=G!Q&Oy!JcPxlBNOK0U9syPv%MI>@JLJ83{Fy6n6uGL$@qyKg9a zJ&3-DGg7$nFRnQeGYY%|xV1e)HMu$gGiT)yIZ{3Jmj}X++(N! diff --git a/fixture/11/4/16 b/fixture/11/4/16 deleted file mode 100644 index 0a7fc9eb7c6d985f02c9e3d3be3834c5b88ddb92..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0000uJo~X}xnaDuxU4-^ygWdUbzVWq|zScNsxkj@lwuZdVx}?8utqMM{HFrHrJYcvJy2ZKPz9T5l!Miw`EQ>cfJY6z}KKeKh zx|_3xz&N`ZwqG~_y8OJ2zs0!DKUOq^J2SgWzT&($uKK?Rys@}Ry+6RBG~X#ZI&r)d zIvzeUG~u|VFhjYRx_3Equ+lsfw{^9MxO=>WI$uB@wsN`lxp29eKEJuIxc0oWG)=6G Syd}FLJ}@YPF2gRAJubXP*<@7! diff --git a/fixture/11/4/18 b/fixture/11/4/18 deleted file mode 100644 index cc7a99dbdd6300b473d8760a8da3fb63d2a831ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k00013wrH{9zEry~JQX^BJGQuUwpfxE3`sIj1;~wv99-JRvu5 zKioUuKK#HiE-$qJuyMLqHax#{HY~aUxCA!~EjK*EyDF_5JI6K8Hg7zDw(q!`ILNSl zJnJ?6xN0>yyOTSYz2iJ0HAJx+Iu|%nJxM*IHTk+#xR$I`JWaRwwPio8t`xa9Ixnk_ Sy(&EVxs)kftX(r=z+JV^V_mQS diff --git a/fixture/11/4/19 b/fixture/11/4/19 deleted file mode 100644 index 03cfc68a4334fe0ea06536d999e65b35682a51fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0001hJP{ zJ3Kgcw++7eG*Q0IvbQ|OI1IpQG8?rRy<9ckx5c;cF9NwCF2A@>xbiz;Jl4MYIhZ{= zJ}Njnyb3w=x_z}FHgLL@ymU8sKlwbUIn_HFIWspfJ9)igGA^wREn>GSIE%VPxI4M& zIsu|!xB9j78+|_Vyk|L$HxRqtJy19hxNyH!GrFlCye+)*I@7-gHUF{nH?_2}H1WAi Ss?#^rFcvm!ySliRxNWo|1!AB8 diff --git a/fixture/11/4/2 b/fixture/11/4/2 deleted file mode 100644 index d64d2a9dc38ec158e0421856264a486b8d22d742..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0002OJg+==I3ls96&t;DJWxD8z41DCHW$3py!tg= zwn#spJCrvsI8!~wI{!D=yC6bpya2VHzE3#TFdsVgzH~fxy2Y?`J1#U(HDx;7Hg&aB zvB)wiyZAevG|4Q*Jg~3UvB9@}HSaW4xv;TExum)4zGSn{zG%1SxHUWvycfC~wxqav zJ&wE*woo+fx?wnVJ%qhZvg5dtFd-pgJ@Br8JqxvIJ5e+1KIpxu Syi`Blxm>vcyO=x#y}u=q`eQ@@ diff --git a/fixture/11/4/21 b/fixture/11/4/21 deleted file mode 100644 index f4f80d6f7cb85fae730e26e5583ce5e1f2dc856a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0000$v?V-(xTQ9)yo^Arvwa|*KC-y0K9#m$IhD9= zy`nZBIYYhbw9Y)uI8(OBxXVAvy_Bomy+S=ppR_TGLBF{FK0vm4vqwQFv46dTG>5U^ zHAb**Ky)}wy!kI?y6v+`yEj0Hxqvw>wW7RJGHEt=I6uMgys0~6yMHnWz1lf*JKH%S zJV8FPyid9=JB2)Dx%0U=xoo}`z5Tqkv{f|lHWIkLx9Ttay{$VWJps93w)s5ZueCVn StvWu}H)1%5IB~dXwx8=91IS)I{tlYW;xCgw6s^+swKqx!HJBv5rwP>{Bu?8|aHqH^I zw86NNy&^qnK+HS|y-Ye7Jao3xH%+%5KG`)^yIeUuJXbQTH+QUxyxXuZI2o{LI!3lS zwWc~5GnTd^JcGH>xv9Z#xI8^2z@EDXH+{esxR5*+K&3QkyRN&VFn=^}Kg2s?EnT$; SLIXKkxiGT@yU)M@H6cGbPFsTj diff --git a/fixture/11/4/25 b/fixture/11/4/25 deleted file mode 100644 index d7912414ddb381bfc30d4d663d29992fa1653cef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0001nz7s2`xhyz@v2nfGyQI2dJuE%@wCOyHu{yO- zD~~jCxS1tPIwdfADd;yDbx*j`PJ~lZmK6fqnJ{&;rz7{ixEy=duH$*Ao SxzMK4Jl8PH$OZn zC~`W~EE>G!Gxoc#Kv=aYIa{LdyqLfKIWs=txuCu@wST)bx#Bo>s-wEKI954jwQ4Xb zIMFsUDZ4$XI5xK(w|BS?Is3TGwmUo`GRr(-I<>rsuIoGGytg(6sRh5NFSEL=ysjX* zzh$`^K{PoHyPiGpyKcCcycslkJ^s5Nz+F5yKKs91H$XE@x|F@|JG(IwH2Jt{y6M17 Sv%x(dyBEIrHugBMxRteJ2xW@^ diff --git a/fixture/11/4/27 b/fixture/11/4/27 deleted file mode 100644 index fce2a7ceaaffc1dcf4570861a4b2422b68339156..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0000vH2SarJm5gqzUw@_E{;9RIn*l^I!?ASx_vsq zx+FQ@C#k@gxLY-Uwiv-kJ!~-=HKN zrqeynv=ul7J4~|mG)g&Ywud@kxM#T>z6ZF5x@xxNIOo3Rzy7&lH=i^gx~)7!xn?)c zwez~yxZgR)HNQK@HFCZ?Kmj%$I0&|MJYT%wI4QtuIZ?cCG;TeWI_5llJTkb+IOe{8 Sxsp98wt2q?Kz}$MI@r5+RbgHL diff --git a/fixture/11/4/28 b/fixture/11/4/28 deleted file mode 100644 index 6fc7ea999991793c8fc4f097a7fafe42a3af818b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0001sG#@mQx^J-nzKpfXJNmhyKfk>yv_QGUv3xz? zyEwNzIrc4^F}b^ytcN^szLdE`zf3siJl?SeywAJAw9vFmJ6gH+HsiGQxkxlA(nD}cJ)x=g<0Gz7g89KATGGRe6` zJBPMby^6H5IAys9J1V*pxi7qUInk=uyH~a(!Z|v3vCzQOy}!0Xy;(Z4Ho-MD!8Ev) zxuUzJwt>9kG9o&0Icl)~KaV_suD7>ZJqSFcy;wZ)u;aSxzfH75qFFwuJF2)lHk>+u zEwZ>mxGOfCHn6lk!707K+ zyh%AR!09?8yKgoty{9xqySTX$H{-SzHXE{1EFU+TwfMV2xX3(jFT%QQHhwoLIMcg! zxoWw^wNN;{GyyhiJ%c%lu)MstxiGY%I{&6Xx!t*RtRFjFK9;h&soT85KbyUvnaVuHF-W_H#9w;xJaN+r0mKUTaVA8fk8G#S0QIuk&*y52k-wgx_7Kzp+lzLq;KGXywrwG+Ff SJRUX1y?;5!H#D_-KJvK27+wVc diff --git a/fixture/11/4/31 b/fixture/11/4/31 deleted file mode 100644 index ca510bcf4f3f8af739109bc56965f19f8ae77237..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0001Ju_ij|EQ-D2F`>5GI@d4^E-}1Zv%@tLy=}aME><-ys~@%zwz@gb zGFY`NJE{LVPr1FCwehHwrrYyvU}Cz_YSByKOa~z8Sc?J88L}zaKQQxbv~s zx#mARzNobITb(OzR~x*zMr+3 zt35q>zos@Hy9GHxyGlMayY0YGx-PU-JPEz@ystZey}Gd!vGy_mx@x;zH7mPuz3Dv9 zHxfTaIupGsE)cw%z9_faJ7zSHy|}lMu)@BzIa0Hby~@33JQ%jaJW4(nJVdzvzydWd SK!~XJzh=4Nv1GgQOu}ZkfL3A{?x=KB6Ju5#gK&d&IwS2Oyw05@k zJm?X{J8C&)yxzX@ywtW#Ffh5GIXSo-G=?wmKFzypK*6|Nz45YpLMc1bI9)m@x;Hf- z89YDrt_`-Syn8UPG|s$0H+C})JlQ-6E+xFBx(q)|G5kLIHwZd|JLJ6SH1oWVv>!Pd Sym~nPy^}mqxg5N{y`#8dO<+0z diff --git a/fixture/11/4/35 b/fixture/11/4/35 deleted file mode 100644 index a86c6dcc74b233214231d1e75d654a6fd640ed75..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0001?zhk&&JPNY5ygoB(Fw{R&I^;AyJ58|EtfM}Q zzP>vbud1|cxa%;IsiU@^xu3tvxf!_EFxo%uxEQ>*x#%#qIMg|xI>9@3v>QOJwf`zaKk7y%)dHwb(wfIk3N9y;Qb( zK!dw-yBt0}GB!NXxq+@jJ)^LGJTf*Zye*{KJ6ky?sgb`gzg4~!xCyrBy%#(Vy?ec; SEz3I`K&d!VKH0odKytqjG-J^K diff --git a/fixture/11/4/37 b/fixture/11/4/37 deleted file mode 100644 index 553ead29fa12f1c7ee8d9d578ba5d77f8ff41783..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k00008xO_NlzH7T!y!W)QJx?TQx5dGDH&QyyzDcYt zGPpa;xdFRQwfJyNqdH|IMDx<9+fxtF!~x^TG+J1ef} SG-$Z)xRNvpFgZFkJ&3tt&|QWA diff --git a/fixture/11/4/38 b/fixture/11/4/38 deleted file mode 100644 index 7445eb8ea7ac67a4efadc8bf4f178965596203a8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0002wI#RqQKuWrXIa$2kurEG~x@kaXoz6RQHc&UT zy^*)cJc6}xFfqPOxPCk#y}dcGF^#i(wA;6Cws1S@wmUz|G@Q7}JN`JHz9YP2uU5V; zK$JSvqI|W?xZA((zMDTwKAX5iI}JGhIJ3Pxzq>r^zrL{IwOzLeHEg+6HiR%UK#RS8 zC+9XAv{}6eJ!CZzy#+VUKBlOMm_|OsyXij3J7v3Bxih#{zbiBdyT3Fly<$7(Ii|V6 SHBT+>Jt4f{upqg;DMUD&!DRyg diff --git a/fixture/11/4/39 b/fixture/11/4/39 deleted file mode 100644 index 92b4d714f0c334b469e80088d3f871f1e790a9ac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0000txzn|}J8nJYH)^nUvtu|)Gnu}WIO4jiy{x|K zvJt$ez`V8Dx6H5zxlunPIR3X1tf@VxJC?o6yFt1Mx5KzvDC0RfJ3F^JZU$Fy%IB< zytlhIxxOj>HH)^`wFkH{Jhj2_v@Sj)I1xP(!1%l!I5a+yyfn6KHqX5*I9kErH}<LQ`Hx{75JI%g}GE%x@hhYu? diff --git a/fixture/11/4/41 b/fixture/11/4/41 deleted file mode 100644 index 80fe9317654ff51d2e025a91db5f67227b440f14..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k00013JAXOJIh8(vJPNTfmCra_Fr+zlz=l82D}BCC zyF$QRyp6CAK9V+cGm5;Ky85}MxuCUcv~9Itxl%9)wPLO%Ikqz~DSWlKGS@w&x^K4C ztN}bFGu}HRJiItUyrn*-JrOL2y*|DYG@ZHLzEQq~w>>hSJ-;^^wiGs|H0wDXsUCHJj*w4Hx2CQ2K_9OXKLj{#xWu_~pMt#Uy2ZA#G)_Olw_-Pg zzCJwTzGFHrt+}txv^2f`ys|yJG-&8E!RAsIi5A}ztgEly0AIzJJqySxjH`6xW~Myx^cMVy2CnUzz@3= Ss=6}7J}y3IzM{WnCS$pjs%ymn diff --git a/fixture/11/4/44 b/fixture/11/4/44 deleted file mode 100644 index d1c3a721d768dcc0274900f44c283405061079eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 133 zcmZQ#oXB*7fq?;pTY*>vh;xBhW2^pOg%hgG3`_?5HrO^lzJ)zeFa1Jhn1zJ6Jg3J-RecIm@}RI9Iknw5n=lP diff --git a/fixture/11/4/7 b/fixture/11/4/7 deleted file mode 100644 index 66eb790a5b42503bb72f00ebd123a3e756d52a6a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0h0pA0002U0002k0002hIVQaGI7Gd7x~@4(JT*0xv^&7CvPC!%waBIM zu_m?KEM2t5J!-&Sxc0gOGw>{ZKF&Wcx)~5iGGe+1IjXZoym_?yyCyuFKh`rJFX1q| zJf%EmuM0FeIgPy9Jn%lLJo~)izpFVFx|lR1K0&z(wrw;qy%4%xCeO6qIEX=5F1)&| zJzKtxy~DVNwDh3BbI;1m& zH6=Z3x_3P%xKu#OI9#`yt=X@xMR3ZxLL52 zH2=WCz-zTsvJEoQx!E8LFyOfkJgPBZIA|<2K*c!Ew~;!HzNjx7H8C((wGF&cx`-}x zycIu4!7a5BFAq3WyNW!My@$B>I4L}yy-GnYyFk4LIFq&Tx5PLczR$kMxKKQAxMn`i SJcl%JHG?!9G2FEQv`D-xy zx|_7cE*!qBz0EkSu)etExtF;#FCRBAz^6T4J*mK0JUKi~4=}wDJh-!DIB-A+G*-J` zwNbk8Hgmd>w)WjGJ3c*vQD|Tv|>F_yn?v|z2d$4vGKSJKZmp^J-$1zvN%9Y SGz~tMJ)$-OI{rA6yj3fjTVZDa diff --git a/fixture/11/5/1 b/fixture/11/5/1 deleted file mode 100644 index fec854aa56a528ad3c1163bbc077c44ef3174d1f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0000?JR3g5xjQ)2FpICUHTp7DIK(|hI`}koKXE+j zt=_!nyLUSaxd^-wx`4UPIs~zOz4Ip>Gpo0tJiNAVJjOU`J7v5}F2ky@y=**+y5P7D zyqq~3zVSFNJWM#VITSjWyS+VJw?M2?wcR&PxJWGVys121IjyvUwI9Ij# zxo0xV6w!JxpxWzV3HHk2ayz4x1K!qSLymKe( zJM6e1uyZxgJTy9qI?}doHpjWdI-5LgIN`O0vN}JVwKF_yHCeS`y~Q%sEjmBqx|cu@ zHVihuxCu3qH-J0TyP-YEHQqQ9vu(SmEdDWzy;(Z>JJz`uyxFuKCOWjts}?hPy#u{+ S!HPS~BQZRUyg(-4GJm*-mtlnf diff --git a/fixture/11/5/12 b/fixture/11/5/12 deleted file mode 100644 index 0a250224ef3bbe7e0e837137c7e6a8a49a2feb5c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0000tzWhF!Jsh?bIKsF0yH7u#zLB(MEt@_pJFvc@ znPeqTv-LQmy^6M}H*h(~FrBomuk1NjC@Z}0y!E})J$5?Lu0yw%xL-V;KiIgAJS->7 zvIIUfp}sWoGl9OHG=R9Ty(73ZxF0-0yi&S|Gfurmxv0KeJJPfhq*Ob{x&E{|yga;J zyWzJeww^R0yB#&dH+;8nx^gXkvJEpxK4mN&xgopJy4t;VJ9og(H}bj}xG%fOJuM_t SIy5~yG040Sv+%hGxDKun%4M?v diff --git a/fixture/11/5/13 b/fixture/11/5/13 deleted file mode 100644 index 1c621a76991e2c2d8f7e89287a00bfd7d3e49810..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0000>u$VS=I7_f~Hf6X%J%+f-Jm9 zxQV?=xH+^xywa(WH`TIDId!@NJj^dHKGZ;lx81hkv&1+~Jvq4=Hd?UqI0`5Zo zG2A?rH^jLUK14Zjzv?=YxJohzJZ89+z6L(2JeIB0I_b4FE}pPMwZJviyau!gHCjKc zyec>gIfB2EHVZqpIA5(*HE%J(zA!T2wWGQCKm$FRyo@!~J^a1xEb%WHy%RQfJP1Df zvjIUKz_T@`y~ZsqxDGfEu$;OTy1PFaH_yIkI6Jbo9FID@Jm0#Vr(U?mIB&CkCsVY` SuF^ipKnXTxu429wx(=|e@n4Yu diff --git a/fixture/11/5/15 b/fixture/11/5/15 deleted file mode 100644 index ad1ad969c8334f441bd2703bc54b1973881ea806..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0002Ex#74?K3^x1FJ-y5Jk7EzxKqB;t82KuK43rb zynVcEw1>L&GCH+=G!Q&Oy!JcPxlBNOK0U9syPv%MI>@JLJ83{Fy6n6uGL$@qyKg9a zJ&3-DGg7$nFRnQeGYY%|xV1e)HMu$gGiT)yIZ{3Jmj}aUSq2O diff --git a/fixture/11/5/16 b/fixture/11/5/16 deleted file mode 100644 index 3eb98481e29b4e62802451b218c02f522f183be8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0000uJo~X}xnaDuxU4-^ygWdUbzVWq|zScNsxkj@lwuZdVx}?8utqMM{HFrHrJYcvJy2ZKPz9T5l!Miw`EQ>cfJY6z}KKeKh zx|_3xz&N`ZwqG~_y8OJ2zs0!DKUOq^J2SgWzT&($uKK?Rys@}Ry+6RBG~X#ZI&r)d zIvzeUG~u|VFhjYRx_3Equ+lsfw{^9MxO=>WI$uB@wsN`lxp29eKEJuIxc0oWG)=6G Syd}FLJ}@YPF2gRAJubXSTVz-O diff --git a/fixture/11/5/18 b/fixture/11/5/18 deleted file mode 100644 index d02b008350c70878823166108d757df12aadca79..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k00013wrH{9zEry~JQX^BJGQuUwpfxE3`sIj1;~wv99-JRvu5 zKioUuKK#HiE-$qJuyMLqHax#{HY~aUxCA!~EjK*EyDF_5JI6K8Hg7zDw(q!`ILNSl zJnJ?6xN0>yyOTSYz2iJ0HAJx+Iu|%nJxM*IHTk+#xR$I`JWaRwwPio8t`xa9Ixnk_ Sy(&EVxs)kftX(r=z+JV`{ zJ3Kgcw++7eG*Q0IvbQ|OI1IpQG8?rRy<9ckx5c;cF9NwCF2A@>xbiz;Jl4MYIhZ{= zJ}Njnyb3w=x_z}FHgLL@ymU8sKlwbUIn_HFIWspfJ9)igGA^wREn>GSIE%VPxI4M& zIsu|!xB9j78+|_Vyk|L$HxRqtJy19hxNyH!GrFlCye+)*I@7-gHUF{nH?_2}H1WAi Ss?#^rFcvm!ySliRxNWo~hhn1u diff --git a/fixture/11/5/2 b/fixture/11/5/2 deleted file mode 100644 index 0aa576bf1478b9da1936989472830e25376519c3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0002OJg+==I3ls96&t;DJWxD8z41DCHW$3py!tg= zwn#spJCrvsI8!~wI{!D=yC6bpya2VHzE3#TFdsVgzH~fxy2Y?`J1#U(HDx;7Hg&aB zvB)wiyZAevG|4Q*Jg~3UvB9@}HSaW4xv;TExum)4zGSn{zG%1SxHUWvycfC~wxqav zJ&wE*woo+fx?wnVJ%qhZvg5dtFd-pgJ@Br8JqxvIJ5e+1KIpxu Syi`Blxm>vcyO=x#y}u=td}Bud diff --git a/fixture/11/5/21 b/fixture/11/5/21 deleted file mode 100644 index cdac81d2e725527bb01e14c00e48acac065c45eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0000$v?V-(xTQ9)yo^Arvwa|*KC-y0K9#m$IhD9= zy`nZBIYYhbw9Y)uI8(OBxXVAvy_Bomy+S=ppR_TGLBF{FK0vm4vqwQFv46dTG>5U^ zHAb**Ky)}wy!kI?y6v+`yEj0Hxqvw>wW7RJGHEt=I6uMgys0~6yMHnWz1lf*JKH%S zJV8FPyid9=JB2)Dx%0U=xoo}`z5Tqkv{f|lHWIkLx9Ttay{$VWJps93w)s5ZueCVn StvWu}H)1%5IB~dXwx8=91IS)I{tlYW;xCgw6s^+swKqx!HJBv5rwP>{Bu?8|aHqH^I zw86NNy&^qnK+HS|y-Ye7Jao3xH%+%5KG`)^yIeUuJXbQTH+QUxyxXuZI2o{LI!3lS zwWc~5GnTd^JcGH>xv9Z#xI8^2z@EDXH+{esxR5*+K&3QkyRN&VFn=^}Kg2s?EnT$; SLIXKkxiGT@yU)M@H6cGd&|8K8 diff --git a/fixture/11/5/25 b/fixture/11/5/25 deleted file mode 100644 index f35749eaece9bfa15fc2a7ddddcdd1d68d6c4ce7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0001nz7s2`xhyz@v2nfGyQI2dJuE%@wCOyHu{yO- zD~~jCxS1tPIwdfADd;yDbx*j`PJ~lZmK6fqnJ{&;rz7{ixEy=duH$*Ao SxzMK4Jl8PH$OZn zC~`W~EE>G!Gxoc#Kv=aYIa{LdyqLfKIWs=txuCu@wST)bx#Bo>s-wEKI954jwQ4Xb zIMFsUDZ4$XI5xK(w|BS?Is3TGwmUo`GRr(-I<>rsuIoGGytg(6sRh5NFSEL=ysjX* zzh$`^K{PoHyPiGpyKcCcycslkJ^s5Nz+F5yKKs91H$XE@x|F@|JG(IwH2Jt{y6M17 Sv%x(dyBEIrHugBMxRteLie-)f diff --git a/fixture/11/5/27 b/fixture/11/5/27 deleted file mode 100644 index 58d05c0b3b86ae3682b189c9a275d777d6e69ee4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0000vH2SarJm5gqzUw@_E{;9RIn*l^I!?ASx_vsq zx+FQ@C#k@gxLY-Uwiv-kJ!~-=HKN zrqeynv=ul7J4~|mG)g&Ywud@kxM#T>z6ZF5x@xxNIOo3Rzy7&lH=i^gx~)7!xn?)c zwez~yxZgR)HNQK@HFCZ?Kmj%$I0&|MJYT%wI4QtuIZ?cCG;TeWI_5llJTkb+IOe{8 Sxsp98wt2q?Kz}$MI@r5;*I{7* diff --git a/fixture/11/5/28 b/fixture/11/5/28 deleted file mode 100644 index 43b36e5cf80b4ed3b09365fcf358240ef70e7f1d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0001sG#@mQx^J-nzKpfXJNmhyKfk>yv_QGUv3xz? zyEwNzIrc4^F}b^ytcN^szLdE`zf3siJl?SeywAJAw9vFmJ6gH+HsiGQxkxlA(nD}cJ)x=g<0Gz7g89KATGGRe6` zJBPMby^6H5IAys9J1V*pxi7qUInk=uyH~a(!Z|v3vCzQOy}!0Xy;(Z4Ho-MD!8Ev) zxuUzJwt>9kG9o&0Icl)~KaV_suD7>ZJqSFcy;wZ)u;aSxzfH75qFFwuJF2)lHk>+u zEwZ>mxGOfCHn6lk!707t!ea diff --git a/fixture/11/5/3 b/fixture/11/5/3 deleted file mode 100644 index 36147c0c300e6b72476aee3a3b8e2e1fca119fad..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0001{!r;Bjwk14Yv^YFAxZA(ow}mw^GzGn$wO>K+ zyh%AR!09?8yKgoty{9xqySTX$H{-SzHXE{1EFU+TwfMV2xX3(jFT%QQHhwoLIMcg! zxoWw^wNN;{GyyhiJ%c%lu)MstxiGY%I{&6Xx!t*RtRFjFK9;h&soT85KbyUvnaVuHF-W_H#9w;xJaN+r0mKUTaVA8fk8G#S0QIuk&*y52k-wgx_7Kzp+lzLq;KGXywrwG+Ff SJRUX1y?;5!H#D_-KJvK4nqCM1 diff --git a/fixture/11/5/31 b/fixture/11/5/31 deleted file mode 100644 index 0bb727ff83a3240d2200963ba3b7a57551a059e6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0001Ju_ij|EQ-D2F`>5GI@d4^E-}1Zv%@tLy=}aME><-ys~@%zwz@gb zGFY`NJE{LVPr1FCwehHwrrYyvU}Cz_YSByKOa~z8Sc?J88L}zaKQQxbv~s zx#mARzNobITb(OzR~x*zMr+3 zt35q>zos@Hy9GHxyGlMayY0YGx-PU-JPEz@ystZey}Gd!vGy_mx@x;zH7mPuz3Dv9 zHxfTaIupGsE)cw%z9_faJ7zSHy|}lMu)@BzIa0Hby~@33JQ%jaJW4(nJVdzvzydWd SK!~XJzh=4Nv1GgQOu}ZkfL3A{?x=KB6Ju5#gK&d&IwS2Oyw05@k zJm?X{J8C&)yxzX@ywtW#Ffh5GIXSo-G=?wmKFzypK*6|Nz45YpLMc1bI9)m@x;Hf- z89YDrt_`-Syn8UPG|s$0H+C})JlQ-6E+xFBx(q)|G5kLIHwZd|JLJ6SH1oWVv>!Pd Sym~nPy^}mqxg5N{y`#8f&tN?O diff --git a/fixture/11/5/35 b/fixture/11/5/35 deleted file mode 100644 index 26a00e51e83ebe1870f39665267aa44c98228f96..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0001?zhk&&JPNY5ygoB(Fw{R&I^;AyJ58|EtfM}Q zzP>vbud1|cxa%;IsiU@^xu3tvxf!_EFxo%uxEQ>*x#%#qIMg|xI>9@3v>QOJwf`zaKk7y%)dHwb(wfIk3N9y;Qb( zK!dw-yBt0}GB!NXxq+@jJ)^LGJTf*Zye*{KJ6ky?sgb`gzg4~!xCyrBy%#(Vy?ec; SEz3I`K&d!VKH0odKytqlwqw)) diff --git a/fixture/11/5/37 b/fixture/11/5/37 deleted file mode 100644 index 17c7dd724380feeb00cec9f24a80c02eb03f81c3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k00008xO_NlzH7T!y!W)QJx?TQx5dGDH&QyyzDcYt zGPpa;xdFRQwfJyNqdH|IMDx<9+fxtF!~x^TG+J1ef} SG-$Z)xRNvpFgZFkJ&3twQeBAv diff --git a/fixture/11/5/38 b/fixture/11/5/38 deleted file mode 100644 index d88b5e1d02f5b12d1ce82a0324b0335af41c6e2f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0002wI#RqQKuWrXIa$2kurEG~x@kaXoz6RQHc&UT zy^*)cJc6}xFfqPOxPCk#y}dcGF^#i(wA;6Cws1S@wmUz|G@Q7}JN`JHz9YP2uU5V; zK$JSvqI|W?xZA((zMDTwKAX5iI}JGhIJ3Pxzq>r^zrL{IwOzLeHEg+6HiR%UK#RS8 zC+9XAv{}6eJ!CZzy#+VUKBlOMm_|OsyXij3J7v3Bxih#{zbiBdyT3Fly<$7(Ii|V6 SHBT+>Jt4f{upqg;DMUD*LuCd4 diff --git a/fixture/11/5/39 b/fixture/11/5/39 deleted file mode 100644 index 91bb6273e7bfcb1e64544b741f0f0c4f2eba4074..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0000txzn|}J8nJYH)^nUvtu|)Gnu}WIO4jiy{x|K zvJt$ez`V8Dx6H5zxlunPIR3X1tf@VxJC?o6yFt1Mx5KzvDC0RfJ3F^JZU$Fy%IB< zytlhIxxOj>HH)^`wFkH{Jhj2_v@Sj)I1xP(!1%l!I5a+yyfn6KHqX5*I9kErH}<LQ`Hx{75JI%g}GE%x`31JZc diff --git a/fixture/11/5/41 b/fixture/11/5/41 deleted file mode 100644 index cd842a338b61d1960be0af52d294a475d317527f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k00013JAXOJIh8(vJPNTfmCra_Fr+zlz=l82D}BCC zyF$QRyp6CAK9V+cGm5;Ky85}MxuCUcv~9Itxl%9)wPLO%Ikqz~DSWlKGS@w&x^K4C ztN}bFGu}HRJiItUyrn*-JrOL2y*|DYG@ZHLzEQq~w>>hSJ-;^^wiGs|H0wDXsUCHJj*w4Hx2CQ2K_9OXKLj{#xWu_~pMt#Uy2ZA#G)_Olw_-Pg zzCJwTzGFHrt+}txv^2f`ys|yJG-&8E!RAsIi5A}ztgEly0AIzJJqySxjH`6xW~Myx^cMVy2CnUzz@3= Ss=6}7J}y3IzM{WnCS$pmENjRB diff --git a/fixture/11/5/44 b/fixture/11/5/44 deleted file mode 100644 index b420c587f96d0e0f780fef08ab94f9d7a0b04271..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 130 zcmZQ#oWgX1fq?;pn}ApZh_irLW2^pOg%iBY3{zN-?om)lGTyf9-ww%LQ@4d$1ng1R znY``Qyetzjhbb0EEL^r+-)*&H{Z4nY+57fcG_JpAd3vYW?xt-|wiR0?Sk1S8w|� g-fc<7+b!>yeB6H9PI*u2mY_Y%stk+_lHQIY02}2sE&u=k diff --git a/fixture/11/5/5 b/fixture/11/5/5 deleted file mode 100644 index 57c541753ef6ec6a74ccf8d4347595b45f54ad0d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0hR*D0002U0002k0001TxHY}NH)A(ex_Q8oHu|w)HPk=erX0AJya2T$ zG1DsLKKwUOu&231G(x+_?5HrO^lzJ)zeFa1Jhn1zJ6Jg3J-RecIm@}RI9Iknw{ZKF&Wcx)~5iGGe+1IjXZoym_?yyCyuFKh`rJFX1q| zJf%EmuM0FeIgPy9Jn%lLJo~)izpFVFx|lR1K0&z(wrw;qy%4%xCeO6qIEX=5F1)&| zJzKtxy~DVNwDh3BbI;1m& zH6=Z3x_3P%xKu#OI9#`yt=X@xMR3ZxLL52 zH2=WCz-zTsvJEoQx!E8LFyOfkJgPBZIA|<2K*c!Ew~;!HzNjx7H8C((wGF&cx`-}x zycIu4!7a5BFAq3WyNW!My@$B>I4L}yy-GnYyFk4LIFq&Tx5PLczR$kMxKKQAxMn`i SJcl%JHG?!9G2FEQv`D-!KVLTh diff --git a/fixture/11/6/.zarray b/fixture/11/6/.zarray deleted file mode 100644 index a3ed77db7e..0000000000 --- a/fixture/11/6/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "lz4", - "id": "blosc", - "shuffle": 0 - }, - "dtype": " zx|_7cE*!qBz0EkSu)etExtF;#FCRBAz^6T4J*mK0JUKi~4=}wDJh-!DIB-A+G*-J` zwNbk8Hgmd>w)WjGJ3c*vQD|Tv|>F_yn?v|z2d$4vGKSJKZmp^J-$1zvN%9Y SGz~tMJ)$-OI{rA6yj3ek!(jse diff --git a/fixture/11/6/1 b/fixture/11/6/1 deleted file mode 100644 index b5a2d633ff3ae0691d95b14d795d7a596c95a5e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0000?JR3g5xjQ)2FpICUHTp7DIK(|hI`}koKXE+j zt=_!nyLUSaxd^-wx`4UPIs~zOz4Ip>Gpo0tJiNAVJjOU`J7v5}F2ky@y=**+y5P7D zyqq~3zVSFNJWM#VITSjWyS+VJw?M2?wcR&PxJWGVys121IjyvUwI9Ij# zxo0xV6w!JxpxWzV3HHk2ayz4x1K!qSLymKe( zJM6e1uyZxgJTy9qI?}doHpjWdI-5LgIN`O0vN}JVwKF_yHCeS`y~Q%sEjmBqx|cu@ zHVihuxCu3qH-J0TyP-YEHQqQ9vu(SmEdDWzy;(Z>JJz`uyxFuKCOWjts}?hPy#u{+ S!HPS~BQZRUyg(-4GJm);|6w5j diff --git a/fixture/11/6/12 b/fixture/11/6/12 deleted file mode 100644 index 5d90a3ddf3e9ccd2747dedc5d1db6c843bba98c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0000tzWhF!Jsh?bIKsF0yH7u#zLB(MEt@_pJFvc@ znPeqTv-LQmy^6M}H*h(~FrBomuk1NjC@Z}0y!E})J$5?Lu0yw%xL-V;KiIgAJS->7 zvIIUfp}sWoGl9OHG=R9Ty(73ZxF0-0yi&S|Gfurmxv0KeJJPfhq*Ob{x&E{|yga;J zyWzJeww^R0yB#&dH+;8nx^gXkvJEpxK4mN&xgopJy4t;VJ9og(H}bj}xG%fOJuM_t SIy5~yG040Sv+%hGxDKwqBV|tj diff --git a/fixture/11/6/13 b/fixture/11/6/13 deleted file mode 100644 index dfc88cc5af753794d2eaa7ecf0b34554b7462597..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0000>u$VS=I7_f~Hf6X%J%+f-Jm9 zxQV?=xH+^xywa(WH`TIDId!@NJj^dHKGZ;lx81hkv&1+~Jvq4=Hd?UqI0`5Zo zG2A?rH^jLUK14Zjzv?=YxJohzJZ89+z6L(2JeIB0I_b4FE}pPMwZJviyau!gHCjKc zyec>gIfB2EHVZqpIA5(*HE%J(zA!T2wWGQCKm$FRyo@!~J^a1xEb%WHy%RQfJP1Df zvjIUKz_T@`y~ZsqxDGfEu$;OTy1PFaH_yIkI6Jbo9FID@Jm0#Vr(U?mIB&CkCsVY` SuF^ipKnXTxu429wx(={gSzj#x diff --git a/fixture/11/6/15 b/fixture/11/6/15 deleted file mode 100644 index 057d73d65b1cc5a62220637fb6704c9e5449f819..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0002Ex#74?K3^x1FJ-y5Jk7EzxKqB;t82KuK43rb zynVcEw1>L&GCH+=G!Q&Oy!JcPxlBNOK0U9syPv%MI>@JLJ83{Fy6n6uGL$@qyKg9a zJ&3-DGg7$nFRnQeGYY%|xV1e)HMu$gGiT)yIZ{3Jmk0cw_`^D diff --git a/fixture/11/6/16 b/fixture/11/6/16 deleted file mode 100644 index 763caea1775c2fa020b5f65eacae8a7fa436373f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0000uJo~X}xnaDuxU4-^ygWdUbzVWq|zScNsxkj@lwuZdVx}?8utqMM{HFrHrJYcvJy2ZKPz9T5l!Miw`EQ>cfJY6z}KKeKh zx|_3xz&N`ZwqG~_y8OJ2zs0!DKUOq^J2SgWzT&($uKK?Rys@}Ry+6RBG~X#ZI&r)d zIvzeUG~u|VFhjYRx_3Equ+lsfw{^9MxO=>WI$uB@wsN`lxp29eKEJuIxc0oWG)=6G Syd}FLJ}@YPF2gRAJubZUv}5!D diff --git a/fixture/11/6/18 b/fixture/11/6/18 deleted file mode 100644 index 5db5778363d4d2330c5cec85dfe381e3d0835035..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k00013wrH{9zEry~JQX^BJGQuUwpfxE3`sIj1;~wv99-JRvu5 zKioUuKK#HiE-$qJuyMLqHax#{HY~aUxCA!~EjK*EyDF_5JI6K8Hg7zDw(q!`ILNSl zJnJ?6xN0>yyOTSYz2iJ0HAJx+Iu|%nJxM*IHTk+#xR$I`JWaRwwPio8t`xa9Ixnk_ Sy(&EVxs)kftX(r=z+JU|O{ zJ3Kgcw++7eG*Q0IvbQ|OI1IpQG8?rRy<9ckx5c;cF9NwCF2A@>xbiz;Jl4MYIhZ{= zJ}Njnyb3w=x_z}FHgLL@ymU8sKlwbUIn_HFIWspfJ9)igGA^wREn>GSIE%VPxI4M& zIsu|!xB9j78+|_Vyk|L$HxRqtJy19hxNyH!GrFlCye+)*I@7-gHUF{nH?_2}H1WAi Ss?#^rFcvm!ySliRxNWr1;9@@j diff --git a/fixture/11/6/2 b/fixture/11/6/2 deleted file mode 100644 index 811d2cca3effbf4ae50690823cbc2766ff2aef48..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0002OJg+==I3ls96&t;DJWxD8z41DCHW$3py!tg= zwn#spJCrvsI8!~wI{!D=yC6bpya2VHzE3#TFdsVgzH~fxy2Y?`J1#U(HDx;7Hg&aB zvB)wiyZAevG|4Q*Jg~3UvB9@}HSaW4xv;TExum)4zGSn{zG%1SxHUWvycfC~wxqav zJ&wE*woo+fx?wnVJ%qhZvg5dtFd-pgJ@Br8JqxvIJ5e+1KIpxu Syi`Blxm>vcyO=x#y}u5U^ zHAb**Ky)}wy!kI?y6v+`yEj0Hxqvw>wW7RJGHEt=I6uMgys0~6yMHnWz1lf*JKH%S zJV8FPyid9=JB2)Dx%0U=xoo}`z5Tqkv{f|lHWIkLx9Ttay{$VWJps93w)s5ZueCVn StvWu}H)1%5IB~dXwx8=91IS)I{tlYW;xCgw6s^+swKqx!HJBv5rwP>{Bu?8|aHqH^I zw86NNy&^qnK+HS|y-Ye7Jao3xH%+%5KG`)^yIeUuJXbQTH+QUxyxXuZI2o{LI!3lS zwWc~5GnTd^JcGH>xv9Z#xI8^2z@EDXH+{esxR5*+K&3QkyRN&VFn=^}Kg2s?EnT$; SLIXKkxiGT@yU)M@H6cIgDO(}{ diff --git a/fixture/11/6/25 b/fixture/11/6/25 deleted file mode 100644 index 1a79ac327e45ca605e4bb07c98304d1ff06ec20d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0001nz7s2`xhyz@v2nfGyQI2dJuE%@wCOyHu{yO- zD~~jCxS1tPIwdfADd;yDbx*j`PJ~lZmK6fqnJ{&;rz7{ixEy=duH$*Ao SxzMK4Jl8PH$OZn zC~`W~EE>G!Gxoc#Kv=aYIa{LdyqLfKIWs=txuCu@wST)bx#Bo>s-wEKI954jwQ4Xb zIMFsUDZ4$XI5xK(w|BS?Is3TGwmUo`GRr(-I<>rsuIoGGytg(6sRh5NFSEL=ysjX* zzh$`^K{PoHyPiGpyKcCcycslkJ^s5Nz+F5yKKs91H$XE@x|F@|JG(IwH2Jt{y6M17 Sv%x(dyBEIrHugBMxRtdM@?|Oj diff --git a/fixture/11/6/27 b/fixture/11/6/27 deleted file mode 100644 index cbb7c8a57bf797dc99191611e4cd9a01419052ed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0000vH2SarJm5gqzUw@_E{;9RIn*l^I!?ASx_vsq zx+FQ@C#k@gxLY-Uwiv-kJ!~-=HKN zrqeynv=ul7J4~|mG)g&Ywud@kxM#T>z6ZF5x@xxNIOo3Rzy7&lH=i^gx~)7!xn?)c zwez~yxZgR)HNQK@HFCZ?Kmj%$I0&|MJYT%wI4QtuIZ?cCG;TeWI_5llJTkb+IOe{8 Sxsp98wt2q?Kz}$MI@r4=KVba; diff --git a/fixture/11/6/28 b/fixture/11/6/28 deleted file mode 100644 index 4a9bc40525a638825aa5dc2f6fcea95d75944479..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0001sG#@mQx^J-nzKpfXJNmhyKfk>yv_QGUv3xz? zyEwNzIrc4^F}b^ytcN^szLdE`zf3siJl?SeywAJAw9vFmJ6gH+HsiGQxkxlA(nD}cJ)x=g<0Gz7g89KATGGRe6` zJBPMby^6H5IAys9J1V*pxi7qUInk=uyH~a(!Z|v3vCzQOy}!0Xy;(Z4Ho-MD!8Ev) zxuUzJwt>9kG9o&0Icl)~KaV_suD7>ZJqSFcy;wZ)u;aSxzfH75qFFwuJF2)lHk>+u zEwZ>mxGOfCHn6lk!707K+ zyh%AR!09?8yKgoty{9xqySTX$H{-SzHXE{1EFU+TwfMV2xX3(jFT%QQHhwoLIMcg! zxoWw^wNN;{GyyhiJ%c%lu)MstxiGY%I{&6Xx!t*RtRFjFK9;h&soT85KbyUvnaVuHF-W_H#9w;xJaN+r0mKUTaVA8fk8G#S0QIuk&*y52k-wgx_7Kzp+lzLq;KGXywrwG+Ff SJRUX1y?;5!H#D_-KJvJ60$rp4 diff --git a/fixture/11/6/31 b/fixture/11/6/31 deleted file mode 100644 index 8c13b505209ca0a8461dd6432f83ec4d302b0126..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0001Ju_ij|EQ-D2F`>5GI@d4^E-}1Zv%@tLy=}aME><-ys~@%zwz@gb zGFY`NJE{LVPr1FCwehHwrrYyvU}Cz_YSByKOa~z8Sc?J88L}zaKQQxbv~s zx#mARzNobITb(OzR~x*zMr+3 zt35q>zos@Hy9GHxyGlMayY0YGx-PU-JPEz@ystZey}Gd!vGy_mx@x;zH7mPuz3Dv9 zHxfTaIupGsE)cw%z9_faJ7zSHy|}lMu)@BzIa0Hby~@33JQ%jaJW4(nJVdzvzydWd SK!~XJzh=4Nv1GgQOu}ZkfL3A{?x=KB6Ju5#gK&d&IwS2Oyw05@k zJm?X{J8C&)yxzX@ywtW#Ffh5GIXSo-G=?wmKFzypK*6|Nz45YpLMc1bI9)m@x;Hf- z89YDrt_`-Syn8UPG|s$0H+C})JlQ-6E+xFBx(q)|G5kLIHwZd|JLJ6SH1oWVv>!Pd Sym~nPy^}mqxg5N{y`#7hH(%KR diff --git a/fixture/11/6/35 b/fixture/11/6/35 deleted file mode 100644 index 3cc85dad68ffcbbcc0a939e6b9b97da70538755d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0001?zhk&&JPNY5ygoB(Fw{R&I^;AyJ58|EtfM}Q zzP>vbud1|cxa%;IsiU@^xu3tvxf!_EFxo%uxEQ>*x#%#qIMg|xI>9@3v>QOJwf`zaKk7y%)dHwb(wfIk3N9y;Qb( zK!dw-yBt0}GB!NXxq+@jJ)^LGJTf*Zye*{KJ6ky?sgb`gzg4~!xCyrBy%#(Vy?ec; SEz3I`K&d!VKH0odKytso4`Xlu diff --git a/fixture/11/6/37 b/fixture/11/6/37 deleted file mode 100644 index 23c09296f677b2bd66053b8b429d7e71ffee3f41..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k00008xO_NlzH7T!y!W)QJx?TQx5dGDH&QyyzDcYt zGPpa;xdFRQwfJyNqdH|IMDx<9+fxtF!~x^TG+J1ef} SG-$Z)xRNvpFgZFkJ&3sxx?Lpz diff --git a/fixture/11/6/38 b/fixture/11/6/38 deleted file mode 100644 index e4db14a18fe3fbbcb8db84dcc54c00904e28c7b1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0002wI#RqQKuWrXIa$2kurEG~x@kaXoz6RQHc&UT zy^*)cJc6}xFfqPOxPCk#y}dcGF^#i(wA;6Cws1S@wmUz|G@Q7}JN`JHz9YP2uU5V; zK$JSvqI|W?xZA((zMDTwKAX5iI}JGhIJ3Pxzq>r^zrL{IwOzLeHEg+6HiR%UK#RS8 zC+9XAv{}6eJ!CZzy#+VUKBlOMm_|OsyXij3J7v3Bxih#{zbiBdyT3Fly<$7(Ii|V6 SHBT+>Jt4f{upqg;DMUC+t7M`8 diff --git a/fixture/11/6/39 b/fixture/11/6/39 deleted file mode 100644 index 76ccd88aa5c8751aeda5b78f5476bf2333f200e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0000txzn|}J8nJYH)^nUvtu|)Gnu}WIO4jiy{x|K zvJt$ez`V8Dx6H5zxlunPIR3X1tf@VxJC?o6yFt1Mx5KzvDC0RfJ3F^JZU$Fy%IB< zytlhIxxOj>HH)^`wFkH{Jhj2_v@Sj)I1xP(!1%l!I5a+yyfn6KHqX5*I9kErH}<LQ`Hx{75JI%g}GE%w{abT?g diff --git a/fixture/11/6/41 b/fixture/11/6/41 deleted file mode 100644 index 132d74acf32f44d900f8098315d0877dc72273ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k00013JAXOJIh8(vJPNTfmCra_Fr+zlz=l82D}BCC zyF$QRyp6CAK9V+cGm5;Ky85}MxuCUcv~9Itxl%9)wPLO%Ikqz~DSWlKGS@w&x^K4C ztN}bFGu}HRJiItUyrn*-JrOL2y*|DYG@ZHLzEQq~w>>hSJ-;^^wiGs|H0wDXsUCHJj*w4Hx2CQ2K_9OXKLj{#xWu_~pMt#Uy2ZA#G)_Olw_-Pg zzCJwTzGFHrt+}txv^2f`ys|yJG-&8E!RAsIi5A}ztgEly0AIzJJqySxjH`6xW~Myx^cMVy2CnUzz@3= Ss=6}7J}y3IzM{WnCS$onlxt)F diff --git a/fixture/11/6/44 b/fixture/11/6/44 deleted file mode 100644 index 4157846bd646ce5ba1b5f1c75e4f52f9fd3c2184..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 132 zcmZQ#G+;Wxz`y{)EkG;+#5q9xKj`Ql1(hV@ZM**MklZzOTc}0A9)+FB+g{DfG7)o_ zVsXU6Wy|&5Ry)@3bT^y5Z=Xfu`g@kAcbe^P+V*5yu~mZAeEWCXHy!HTmSnu$@{Y;J Z?YHfe_oQwK+QY00G#mmr8T_?5HrO^lzJ)zeFa1Jhn1zJ6Jg3J-RecIm@}RI9Iknw|vS! diff --git a/fixture/11/6/7 b/fixture/11/6/7 deleted file mode 100644 index e007eca7ae62d6fa7773baae33bad7ae31e1ca91..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 216 zcmV;}04M(f0Wt!}0002U0002k0002hIVQaGI7Gd7x~@4(JT*0xv^&7CvPC!%waBIM zu_m?KEM2t5J!-&Sxc0gOGw>{ZKF&Wcx)~5iGGe+1IjXZoym_?yyCyuFKh`rJFX1q| zJf%EmuM0FeIgPy9Jn%lLJo~)izpFVFx|lR1K0&z(wrw;qy%4%xCeO6qIEX=5F1)&| zJzKtxy~DVNwDh3BbI;1m& zH6=Z3x_3P%xKu#OI9#`yt=X@xMR3ZxLL52 zH2=WCz-zTsvJEoQx!E8LFyOfkJgPBZIA|<2K*c!Ew~;!HzNjx7H8C((wGF&cx`-}x zycIu4!7a5BFAq3WyNW!My@$B>I4L}yy-GnYyFk4LIFq&Tx5PLczR$kMxKKQAxMn`i SJcl%JHG?!9G2FEQv`D<$m|oKW diff --git a/fixture/12/.zattrs b/fixture/12/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/12/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/12/0/.zarray b/fixture/12/0/.zarray deleted file mode 100644 index c33bba9220..0000000000 --- a/fixture/12/0/.zarray +++ /dev/null @@ -1,14 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": null, - "dtype": "!=j ܾ?k>=A?n?3?唾SþM=1d=Z@q>+Ȇ>7M?92?CIZ?a'?Tfpf]>U$aC>n6zZ~?=?lp? ?1vdVeԾVIR@G>P$?zzV==????p?^?^w@p*>>h@Fv.[Q?z?3$@y!= ܾ \ No newline at end of file diff --git a/fixture/12/0/1 b/fixture/12/0/1 deleted file mode 100644 index 9b9429fe11a97eb027061eefd26fe9c031de96e6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM{T2dci+*`huslFvVgE73osZ)Lqyih(}tksQC_*a5%7>xDl<70o{{fil01 z$#_0=>3F`eCS5<0>&CtlmiE8!cz{1OVw=ACI!wM{1>ZiLrnElCi0M8Vo6x^RY7Rg3 zTlBv<0!2R_Cyzg?E3dy^D5yTAQHMXxL^8m!k&Hijw)H$V0))OK>U%$3 zS=~RsulPPz>dim=(Z;`@dn!K#EC;^2$VtBm-}b)~o*X{{6JS4Q$E!a*mzF=ojzz!M zHKjj9EHAQ;^;iPByYd3jlDlUg2BJn^9Vo8$r!&M&W*nr_wT=LlU%;--J!o0 z7-v1DdE!5&OH4lFrvE=fc6-0x^`pN(b%VcdQ-!_DL@q!rz|_A!q8>l)Ff+fQEM>mT z2BE(;4`Dyo>NCBrF^fNXb2C14Kli_cTZ}&e)d0ZxSnt2f&1%0CV`snAu(Cg&LodEk us-Hg!V-dfLqM|;nwemh#F)}~S3hur#cEP@qq?o=yg44gf)T_Nia>Tw+U&}E7 diff --git a/fixture/12/0/10 b/fixture/12/0/10 deleted file mode 100644 index f83a3a63a9fe05255e30c7f2f6fafff4ffbd2ec5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM}Zj{HBbr8qxi$pgRlmR7s+p};>!DM&xK^1eSqBKN*?FE+o1aWcOw(apZ7 z8Hm4(BbPoo@t-~vU8_GxP~pE=vT?tLI!-@f;5$CZf;T<(#i2hgMuEQqjyk^pwOGG= z(*VFy0SG>;*#*C2pF6)h)@i?kVUfHRZA`zJ;$l9k8S6gb(w4qlmASrJ@jX63R_wf~ zLaINr^P)Z}Jd8hcN*q7!!KlA->JGoo0Y$xDQeHol!uCHR;`hEdRAN2+#!Nqo+-$wW zane6Ygsi`}k}W?55Gy|cc9TDjDSkb?I%Gdw9)G_2Cr!WRdvd?tf4INM`Qg53d!Ig) zAXUDgw@N;Q5S~8(dIY~~#}z=ES3y7Krb)hE-(bFH3W2|3tkS%fR|>%Lyr4f?Lo~n= z11mi{Zj!%pfHS|1coM(sdiFn3H;upQ3~E0Nh?GCis1?A%haJDc0=_?gN~pgaVVOVh uT#~#pL3+KOQI0*6Wv@Q8q;$WwRfWD2zr8+?AvHd+&9Xk=`!~N_-XTAyd&5Wo diff --git a/fixture/12/0/11 b/fixture/12/0/11 deleted file mode 100644 index 3e466a751e6ce2c3840e7f68bd2064fd69f3b84c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM}-f&o5ZRLZ~i2ME8(KV`pni)g`mpbs)c!b!I-Ag?c^uZ%4nJ zi?F`JW?etiW*I*Ge+)k>;i5khu--l*+4aB4i;6ytJRZOQx92|!ZOlHm@fg4D1`$7% z@VPw(=G(uG@mxRcqR+n4*0n#8)&9N3I=DYTjM+XWV0k~p&-=bQy|g|<_1wSq6#~8? z1-`!%O_IOB3uM1p5h*{D2JSKhIKxj+D^Z8`jkB|NtC}w#yY-qmjOU8k=eeU>ju6s zVo$zZxH3MTvF|>sw1Yl;0*^jpEIGcjWX(My7hAqx`{%#^TPQ$?X*xfuw0ghvlxsd$ uPOQKD2mQXyvAw@Dr%}DxY0AG|-^M-)rY=7KwNt-y!6ZIdMPa|q00Td%Tgtis diff --git a/fixture/12/0/12 b/fixture/12/0/12 deleted file mode 100644 index 489b1377b4347766141dde110f0573aa585f0248..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM|BNie_i`AI)M;^)5&qkzA$?Tx?qJ5WAc8V*0Rua7=SOai~P;xfQYh8{hZ zy-7cHjVnM^O4+_1QYyc1_@h2ECxgClKchc%TY+FAKp`gAg@r1skKn6hI=omlVbR)lrEQCGB{uMpv(bqp~p&>omPYpm; z`=!59gao~RN<+UbTK&HJJo3IW7&gB}cHKX97PdcZC(b@V-txX&=q|r4=RH4Tt(iVS zO%%SMiwZw89b`YNydAzVj!C~!8mYg_Qh+`vv!v>e35LJcjC<@A1DI%auP2`U}5^swO}lLta1P;I2MH%gH_q*3Ld8 z&oRF~G2p+m`3%3yUzWUE*CoGWo}|CFia)>Nl7v6Pi9bILhA%!1Ocp;f$4)=pwMjqMH|#$45E=G@n7Y3Q`~W_o)+s>T zKA1n3`BT3JdN)6$Jhi_$hp)em*NMFqISoHpvrj)hKT|*BPyIeam(IU)lX<;~`*%MF z_u@VQdOAK!kf=VJ0-!#@f&0IC)000GU*f%EBsD*>j}yRRo76u{1(d&4r6NG0p$9`kbl4B5sE%-eFeTBR7XDzrjNgm$yYsFXvx2_5X43R diff --git a/fixture/12/0/15 b/fixture/12/0/15 deleted file mode 100644 index 8f26830f6b4d4f4cbe450ff5dec038e79193f97f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM|~+pInl{wzPGjnBUTeXGB$;-$Y)<2Am=;IhA7a7 z>hiu?zN0?k^8`PVb5}od!f-#L96vr1Rdl~X5hOplE^@wBf-=CB(#Jld?mNHOvFg6_ uOvO9s-NwJ%}qv diff --git a/fixture/12/0/16 b/fixture/12/0/16 deleted file mode 100644 index 4235871468..0000000000 --- a/fixture/12/0/16 +++ /dev/null @@ -1,3 +0,0 @@ -D*?7?"A3ɾI>?C:t,ݸ?*d?E?>>H씾dN ?x>(An4^_x3?9,?+ ?Q >Ӂ_/9m=\T9?=G9Į>DV @{8>X?U?ʿоXX>Y&?T>?w?= ? `?@^ -߽\|s?|׿i?t ?pee?ῠq_ -y?ǃI=E?s}v&̾V'>D[84?D␾"?N?2O?k|X?;9@@K?|R?Wf? \ No newline at end of file diff --git a/fixture/12/0/17 b/fixture/12/0/17 deleted file mode 100644 index 68d2a64bd1..0000000000 --- a/fixture/12/0/17 +++ /dev/null @@ -1 +0,0 @@ -~?$V?4?4ugg?T?9#?~Gx?q>[lz???->?_!np?AQDT]?G7?WU@.>b)' >>׹>F?D)J>@kŕh>柱=J ?2d>e?#-?=,k > ;j1]>Ţ?*Y?%?9@>#?{>;>8+?' >3_>`f;?+F`ڿ0>yag?׿8v?+R?[=b_2ܿu>anQ2:?HR?r.&˾1,ſo?K?O?]>t޿@6>x >I෾?b ?`UE? \ No newline at end of file diff --git a/fixture/12/0/18 b/fixture/12/0/18 deleted file mode 100644 index 97165ea436..0000000000 --- a/fixture/12/0/18 +++ /dev/null @@ -1,3 +0,0 @@ ->_>M?J^s(>fvH>(?+??: v?F=>;? ʾ wF??+7 ->w?ۨ| -'?O?[o0a>"?;@f?5þ{˿?4?m~>E?1DqĔO?p?If?u?r7b=BûD>oX=j>F">@߱?0=e]>F?R\?OEr??Z?Q;-q>6;?i8p5?%Qȿ">s>#;=a= *& x??T׾Իǿ+WG.%?8ǽ1+{>ՓI@T?ֽ?5xn.CƳ=򊥿ꜾZ_?Y -j 3>O?35 ?w>oē6-?S?@r.q`?ſ?Oz>E3?/w%#>sZ|OzNŚ?μ> K*m_?<b2???BA}>XGŵc>>#@ I3K?>4-?'N>Q[??R>_8s,@8?'#?>'Nѿǿ̾ h⪾g?pɋәl(?%?> >vk>!Ƕ?g>G?J־{G+v,a? \ No newline at end of file diff --git a/fixture/12/0/2 b/fixture/12/0/2 deleted file mode 100644 index c65cb4faf6cff06b9430bc9d8bff4748f2756447..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM}Oe-u9){JK4l4y!+mijO_E><_=}Q*1p3V>v%|_~O5Z8aKa@wK2a>ZPGqK z{02XKP|rOAq8`8wa?`(ys&u|hU;w`X`ma0nRPess48%Y1&l11_@|r$<_)|Xym@_}Q zib+46U2Z>Zo*KVv{fxgkI0C>Fr@6jq3`f72uarO0I+MOu_2(T6{c1v9_MNz^{tD?-0vALPEl zJ`ums{31UDYfe7WcFH}?dLTe(uHHXnnfOssjfX-m*T$xB8$JI*U!I)e$>Aqw*f%1)dN85caOg%9S^)Sbfmuk usd>D%)X_h86N*2E1W!K>*G9f0h@n3fj3qw?&dxr&TkO5Z0i!;)m6JX!sliGB diff --git a/fixture/12/0/20 b/fixture/12/0/20 deleted file mode 100644 index 1fa5d6a28cde7f17e9befc183ad5aace817600be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM}Cg*-o6FeASS^iDtNumrySIMF}k+N!=&`ozDiJ3qcOld8Vj?yJ90{uMtk zUA8}wt&hKq19?9vOdG$Fe#*Q9;{?8OZpysmH}XDf7=ypuilsj;=m0;#=v6;wvPZw^ zu#UecW0yZ6F_gb~;}|}5Zh}8x*a5y4#g#v?35&jOf}_8SlfOP54og1H2P{CS@^QZ# z@_@bq8auyw=Mlb;Fgv~rPhY>+yzf4UNL#+iIIKU{2PVIHLGC|XKaD;PyP`kda_l|# z*hjxN?1sLdfw8~6YvjEhIWRudM>{>3LaINCwiv!*Zt}nO(PBWk%P_yL|93wqi@?9; z7<)cjXG^~grZm8L%#glS5ly|KBVfL@@_0V?{E5D6o{c@%F)lvl77)GM-!8s-#$>;i zEf_xs<&eH!N4mbaB?CW(3t2vqzB0d+6`((UIGDd&2ZX;r87n_FCE!1y&=bE?UDG~Z upP0Y<(QUuq>yST7MY}$>SizI@!N?^M=1YXvG-- diff --git a/fixture/12/0/21 b/fixture/12/0/21 deleted file mode 100644 index b5e98ec68f6ce6855bb8c759053e135ca40b7467..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM}^rnWy2qzbI11`X-tdc!vMkznP zQB^;*AQ?X`S#>|Ggv~#gO^iP*j!8bs*$KWw$Lc*84voI!439r4brL^x9K*h~d#OJq zah1Lo9&*2e0WiF2`cl6xt8YI(ZdkwRM(n+;kZ?cVQZ~P6fzm#}F?qj|KYBhFHxWMx zrYybOQVBgAQsX|-o{zrCxnn<#fv-O~0ByelFXBJHVw=9QM*lwpaFo8o7BxTkwlhDo uO~$^fy_7wvPPjiA^`yR;$ko2kq?5mmrN}>6DE7Yen3+Fh@)^G_l$Ado+Pe4v diff --git a/fixture/12/0/22 b/fixture/12/0/22 deleted file mode 100644 index 44a174c497..0000000000 --- a/fixture/12/0/22 +++ /dev/null @@ -1,2 +0,0 @@ -*N?i;-'q*W?n$>U"*2:I-#9?S A>>i;?؎? ÿ&t@.># ?apm?P?,lw=ATo?9=o>/ ?>ܰE?ݾ|d?m&?w?=]P?f?Ҿ> |3@o*? -@TWpA|< Ϳu2Ti}dE?ϛIf??mq>׿ M1|>NHz(7p؄ӾV>7>|?!>>X6ajӾ(:"*sXW?;?iCď???E :(>JZ>?t)W)A* \ No newline at end of file diff --git a/fixture/12/0/23 b/fixture/12/0/23 deleted file mode 100644 index 923ef95c259195c70c1f8abfe248fc75d1ce7b83..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM~D2|-Y+StC?hR46+`>VhHygfg~ zu8_W>X4t-uj6J_yW5vEX2$4VDt`k1G3YWj7NLYzOL3)H`9 zpzc5MrHnr+OZq;#cmTe_DyzR}t*O3Cl;u92TaCY{*N(kOewV*??Zm$2BCWq@n7Y3+ zAg(_CY81bWx%j?aDI&j-M=n5Vd>cP+`8GeIfkwYqHvvDgFKNCikVwCH?Dju&q7py+ zw*^0m!+5_pmZHBGyKBAC(GtH2)*L?zTS9d>d^1?o&*u_7HzOp?q$ArH_re+R%O!zaA6I>EkEKsLS!3ZOnx z6eT|~-jUd_I1^+rFv>WIJeBuKwFcKAIwVZ6SM?2W%dEC@esffB!>KUlw*kTSjitYp8r z0SvuzD!9ILI0L^Fp8>z4;C??53cWt`#@fGP3t~SNJp?~xKk7aqPGdidKz2ONy&gYA z&+Wbj`&d6ISzkU#K(N0(!~wqjfK$Ge`y@W^z{EdoU;Vx5Jw87sT^GKs u6Nx{oxD9lN#RWkAJ<2>lN&CNLI+;KJT0p;Mn1w%J z`1n0q1IfP_#DzZSHb6c0Mx#xG2B14rM14pTv)zeX;QyF zNdLYSP(#1qm6kuI8we|{(U}x z=+(Xu7A!sK<>o#nf4o1UzXQK=q!T_Qe_lVDM7Y1ixc@(U>Xtta65GGSCB8ldfDpel zam&8%bhAEwbA~_WhVH&gr{}(;jJ`g*YZ5=M>43jbq1-<|uq40a^maehMdv;XI2%7m zyC1(Gr`*3XQm?!ckBz+SKy|-U{%Svn5L3NlA5XtL#}z*zlN>*kVmH6Z-<>~Ckqg*U&ExxT*d@P)szsYySf&ImyFNio0Cl$gJA)PO&y*P=er zd#gT4Ny|S5>8`&%e6qiAm?gfm*?~RByS2Xhd>udx*q**s{0zU@r~W+)jCenywK>15 zgBHFX(SJYY1BXB7*E#u`BRh5bLB`?J3-GF$TbDNj<(HkIOxAN>jgl7(72xD!ab{3=O^_q`bdj zWS71GNHsrfk0rjfRbjppe&aliM4vsnnK3^gCo?}G`n5mFqbok2lGMM2qFX-<$E-hT uj2%FL(YZfm2>ZS(tv^1OkBYubB+$MGYv{c@lcYcQY0JNAQ-Hr&7@R*@FveQ| diff --git a/fixture/12/0/27 b/fixture/12/0/27 deleted file mode 100644 index e79cd846aa..0000000000 --- a/fixture/12/0/27 +++ /dev/null @@ -1,2 +0,0 @@ -?(K?iy?dտD~(;B?ZC?n)l* _'>MֿT}c}?{TP>H?S8I?D??Cj>~1%=OF?:B?7)?Ӑ?9T1ɾ?g>>#?aJ'?>/?_g>?!;>Onӓ;g-׾@;N>?:S޾?M>7;G+? kZJ>?>M?(p> -8c>秫C-cjTҿH>gl>d?o`"?r|<0@ !A7n'?h@?vIm]?g鐿0>>>> \ No newline at end of file diff --git a/fixture/12/0/28 b/fixture/12/0/28 deleted file mode 100644 index d1cb3660147a0d2c0de9d412f22a5a434cbf7e05..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM|7>6t$#2iL#M?*Bbr#hSm$@ytcJRZP64oE-tXNkTjn6^GTsJ1^y zD84?9el0$ho?9KgN{!XLgU=%7FHNX5T%z#czqV9UREV$41T zl()P>#vDJxn1R3U_XxjW|0zFWgaW^FX+b}nFjhX3Ipn??cA>sev1L1KcVWJ!@j5?K z7{0zJvJ$^R25CP~A=tjlRSLfz6(7KI4u!u;uvWhSMhd?k-Z;N4se3;Ya7MpCTr#|yCAGi(z(>C!-ORor z5CK1qS*AVnk{7;xV)wonYp1`^dKW)|!Vy2p=^ekP@{GT3F2_IUrhGnbKg&K3Tj4(Z z>Y2Yx=)XTmm(IReaZ*3*t-3#YsHQ!yI+wo%l^?+6SslIa5`aFgtrWi@@d`dER@^=f ua(+Jy4QjqIxH-K6<2JsBnh8A(U<1ElnFPPV_~^bu1Ry?==~%z5=;prFQNnfr diff --git a/fixture/12/0/29 b/fixture/12/0/29 deleted file mode 100644 index 4fd9f62569..0000000000 --- a/fixture/12/0/29 +++ /dev/null @@ -1,3 +0,0 @@ -?mze?Du5[]?Ĝ ?Խ=5NR\6e>ŢXq?E?S/Ñ?Ҿ6Ռ?9?H?Q>D\?9 >顳?k>N?dUluJP?GV9?f?=&qq@=w>w] U?'>ׇ>?T,= ?ݶ>hJ?,B?vfڿ"E>Xk?WѾ @Vs f -?.l>4z&?l>=?nGC?s!}Z1 ->JTP?Q!?@?z>13?jf٢?6Kvb?h?nj@?$?Y|B@?s>Xž9E?v_ \ No newline at end of file diff --git a/fixture/12/0/3 b/fixture/12/0/3 deleted file mode 100644 index 4b438ca7983eb90dfd379d1757236f10e056ce09..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM|9n+m_~U`D@7HGjVz&~Cp9opB|$$a?bs0=&z1W>=%d4fNmASXXVX;nX~PAxv2Sf;*g zQHZ@Z@7+FJTL8d^cwaxhMjJqS?)5&M)sMd08YRAeAvHf~K7_u3VO~G-Q8K<=YTG|z z;@3X_45_};@1?!xKDWGo=MucOigLePD~rDl_@6%{ev3a`NQpl`7LL7fE%HCy5V*W_ z*as*+jnpvi&~)x){G_ zND00vXNkW;S^YmM>6Ja-DU(0Kfc!peiYvVc(!afusX4zJqvpP=|A4=D^^U(ShXp{I u&5k~woV!1&M@zqqG@!q%PkO)38Q8w`fE7IXp%f0mu0@!rz*eTb5=k4HPSyNj(|VoC^Em-1^++zEr36@D(b$fpeH|+Ck8(`7;wKBfbl`JI&W67e zu(3X@>=nIArI5cf`-?Tr^&TPMH`|Z9S zT=qUM=zPEBAB4ZW3=KZt3SK|#bbmh{;=4WL5G=pCBaJ?B?hrqk5{5oU`JF!r(1E`N zZ8E=SG>kv$i@d)t6$-yPMmxXWH^{&6qd&eK4L?6^6d*p;vP?e%l9)e-amBvvWhp>P z+OWR7p;^A4CN;mMo(n(Syp+B_lA*m^BT~N4UunL)6(T;ggXX@%e9pczQ;9#6NS40) uV8lMVwPU|&uH(Lo#GStrUhO{+53oM~a2P+Us|i4NF-1RV8Zf@*y1qVD3%|Ai diff --git a/fixture/12/0/31 b/fixture/12/0/31 deleted file mode 100644 index 1ba45c2c63a2a6268aae94fe3c44771efe04e8af..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM|4F0MakTw%ZE9+tnH>CZmNI~Kn=q7cAw7@xlbbDzGFVMRSduAe<4RRBP@ zn^eAcW{y5P9nin^5C6O%9IHNo2(Z5y8gIW)boxFD+zY>K0n)u?a0)*iTmnF;8)Lo| zb>cr5x%$6v>=!=14XeI8PYAyrcPKxF#aq7Cm;u1APs_i;);K?}kkP-tvOhmQiT^zj zUoO7x+oruP#{)myx0b(cIW0Xp+mXM2k|#caeyv;n`C=)-FO$a~I^c}#nB`Lqwk&{2yL#IA!E}p+d2cJKQ u19v|v+?T$?Bw;@Q`2Rod^@hK>F%Ca)GyXsFEGIuaA3?vYrF1@4Sg$`Ddc7e4 diff --git a/fixture/12/0/32 b/fixture/12/0/32 deleted file mode 100644 index a53c50a21283ff75e645ad27858ebb1fbd817502..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM}J1cJX`jrP2dQ+d8Iq#?g4T6aF#M9#gLvLruB3z9z@?4rNAO^Q9snpwT@ zOHe*-d40UP412#kHM&2@yzoEw01Q7f9T`8tF$O-g9e6)}TpYkj9(}v7mlD5MQ58No z+lxPt?VCJwlDNKOp0mGQ%-_9a8Z1AJQCGjXHUPjUjS)Z}oU1-Cc;gM7aiG*Q2K@?$>%n~^?a$M?T}kgh)d`hdRV z@a4VpMpnP)PO?60b%sCaS0uluoIyWIk=MSjufMxNl(AxR~0{BPiVg&@zFd)&1t@=s2x8=$!WYFHA%jdh)uurX%0Q> zIfcFw+g`t&Q6oNdTDm^AL#@72du~6ZCx<>$RiwYVFRMRNSwuepJb*vB4$VGNofW|C u+KWGd{CGVj*|a{KRti7v*Y`hB)eFBNIR(FF-C4ik^N7Ebn1;Wh2JOG#O2QZb diff --git a/fixture/12/0/33 b/fixture/12/0/33 deleted file mode 100644 index 57dad6a2ff..0000000000 --- a/fixture/12/0/33 +++ /dev/null @@ -1,3 +0,0 @@ -r>rp?L@4ZW(ޝ?\>z >rҖ?YB5>+>˦1=I?0 -=v=˿?FD^?(d 15]>q??T?W=JmZ?}E>+ ?K|=(W?")>j}?z -><=~zc=?3[ξi#ZϾԶ=8zuj"=?t?ͧ ? }Cde=p?ſ6z>6?ѾW=P?)͐q=k?ň?q?;n{E>PE?ֆ'Kw=^3=?!F?oJږ_?+7=ᐿF?C@˖&$ \ No newline at end of file diff --git a/fixture/12/0/34 b/fixture/12/0/34 deleted file mode 100644 index 3c685db58ae6f7969bfd1433cae62c5562774473..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM|@0fs(}M5(_;6Qe%ox%599BLKiXmX*Jb-s8S~xe7lO@MAyTS5H5$iP*pH zp)9{EduG4>hK0Qzz?D4KLa)C?4dK4soD@Gw+91F92~EDRGub~$x&A-BQwqQ#L+L%L zusT3%EA>8ic$YjY2B<%{Q(ry>R)3LaIIw-ow9&f6c%5 uIy}F$#za01>&U-*7Xm(Sn|{BB9bCWaSf)R%!5BTtUuVBYQ9nL(9K63*F2_>< diff --git a/fixture/12/0/35 b/fixture/12/0/35 deleted file mode 100644 index 6565391175..0000000000 --- a/fixture/12/0/35 +++ /dev/null @@ -1,3 +0,0 @@ ->?g)? -@3/>`1^:@t?[Xc?qp+?T?V t? (Hk']p=%*xd?!?,h>7e+˿Y?%;VJ!,??/?eCbm>7>?f?owG @ig?>=wM>NKM>>G9*ѿUVMc?BʿA> 7F>IΰҼ隿aж?|ܿg_? >NJӺ?-C?>8mP?=[?9^ԡ \ No newline at end of file diff --git a/fixture/12/0/36 b/fixture/12/0/36 deleted file mode 100644 index 4880d11394bad3e6e1d818ba2412a35824b82fc9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM{~j3&OrTCG0sbsE3~f$Bb>;L*K_Op-nmqX0ipp~*g)cmTjM&xgO-W~x3Z zeM~+vAWFaN)jU5bN`t=kzG6T6;hI02eU3h`I1WDw#2dffXJkMBTVFn2T`#}IgylcpBho%X<3vC4jsQND>N&n2KYu*8QhPodUyQ#9SVKPxn2$fZPD#G{ zFh0L~EWj}U diff --git a/fixture/12/0/37 b/fixture/12/0/37 deleted file mode 100644 index d96b7f93224a75e64a0dc5aac70eaf764f802ddb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM|PhOIxjmMT72Yau_R7#F`B)}Fr+`OLm*8JoWTUqn85$UMK9jm$h1zm`9n zBi6rFdk4T9dY?ZXSi8T=n8LnO#T!2)mvX)J7nZ%UM3g<9G9f^P>*GKBmodMBgN8r# zXaPTh-JiS{YG}W<8RtI!v7ElMRl>f?UPQld^nX8libTIJ`$E6&CPzPB%M-pIdGNj4 zHH1G`C#XN5E(pM8-2=XigS5ZL3C6zpy4<^fueH89JcGZMx{1GDS~@?bbi_Z~-UGm* zy{EnaVmm*Div_(&jgG(2p!B|Qvzk6hNS(f$hD5!3_ME&@HwM3}3Q<4DdGx+5vw**9 zvJOCkQlGtHXePjWo1i}#GN3={66!wKvLQb>ISN26dUn4Ra0os)AN)Vmo(n*9aJ#-x zrT{=Sc}+hsW4}NA5=B3AW1K#jW(`36QU|{Te)B(6;sZclc&Z$hnT)4@c2KT5gxwpO@u#A1`$7aiJiU^#U8%7-YPz+O~dK{ diff --git a/fixture/12/0/38 b/fixture/12/0/38 deleted file mode 100644 index 98effd7ff1e158151237b00301df7139a6ba7cc5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM~FLj%C^Y;r$Aaq_+FUVA?~!p1%ab|$_sN=iPNyG=gK^SnO*QGh*eijzK@ zg`vNput2@lCb~auj@v$(UW&iaOnJYh(mlV@=YK!;WMDso5?#QHt){(kE*CxuPjq^id{)0E!!W;DxEQ}ti0HpXKuSM$Y3aWI zl2?1eHES?Wn)EHxWOc87e;@ zT8Td2Y+F95ix0j&KPEpu(Xv1NI;lQ&!g#;)EM>pj>N!8{t#-d$;Y~jJ>ae~CKZ(De u4^=j1gV^%-n$SOXOqP9O|vjM*KPm8|gi+R6MA;=8? diff --git a/fixture/12/0/39 b/fixture/12/0/39 deleted file mode 100644 index 6d42fb1981..0000000000 --- a/fixture/12/0/39 +++ /dev/null @@ -1,3 +0,0 @@ -oS"(_>cljP>Ț[__U -@^)뇽?ο4YuϿN&;$>\;L>@=6gS?ڕD>L=ұS>+=9?}7~>V.=`͏{=?- -ֿ(UNpt|>?JU??/?"f?V79?>&6w~iq?<,A?U:#?@㾅1t>x??YA?ӧ9P?H|=>?gM??猵V >\?Wm= ?0?n N6?޾St>G]?cD?|?0>#>. S:>t쏿9࿗}? \ No newline at end of file diff --git a/fixture/12/0/4 b/fixture/12/0/4 deleted file mode 100644 index a5df461dfe..0000000000 --- a/fixture/12/0/4 +++ /dev/null @@ -1,3 +0,0 @@ -c3? 7?IPޔ?J -=0ÿ8B?=q@}?.=B?C?=Ӿ-d>?~Q?X]t;\?ct?W[`*.?r>e?)ؒ?kq?y4?#IͿ??ٝ?LA? %>ö>}f?X?-=?u>ʯZ>62PtX>`? uq?~֝aQ@? -?H?`?f"?y>P?*j{?\qrQ+? ۿsS!?;{DOڿ"?^ݍ=SRE?eA}?Fq]pnEG೿G&>G2 \ No newline at end of file diff --git a/fixture/12/0/40 b/fixture/12/0/40 deleted file mode 100644 index e4f807ccc853f44b30763effc868d3cfbd59a299..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM{fJWxH)1Xn(fJq|xhZ+pK0m(IRQg}^@wG5WqQ9*Dk_V=KR(s6D@GfQ>)% zfgZrO{pr5z(Pln#Fxfv5Pm#YpAg8~HFI>O(qBB3$N&r4dsJOq|wXHY7k1@>aemA+$bt$Unbp+g?9b5S~6bjjg^KSYN*;o2S3QVb(rGg|WXCe5XI^ zq@lm+r#8N@tYN uLo2?Zw}n5<6dk|$|7bp!>bSoh8v(zwZHK>~L7YGGklsIjo3*~xp/x \?.?X=_w -%~>K ? F>`?՚g?R?:2 ?KR9?&> P>Ra(u=G0x?^t?W`k?q??>{J,¿?]eS+>*ʾaR]?>4?t? HFg_??сx:?щ?h*?>Ó>HAֻ*Fc@*n >'?$&I7$D?J@p>W -ѾxyB?=53nJ?Td]7?wL8fH?l?D@<{>>X>>3@ \ No newline at end of file diff --git a/fixture/12/0/42 b/fixture/12/0/42 deleted file mode 100644 index 8b46168136d14296e213f655c2bad27c621c663c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM{|1d_hvA9X*WD)YYZ`X;|kaPvQ+@!h@tvkSdOHjzGPgC;=y%nCsLQlvk@ zp*BFs&vCy4=qEo8Nt-=*apJ!U8|^-vP5ZvVNyI-+K+3*iUgy4;c5Ns+%yJO;mkfDgak$OpeK^|wF2|5&|?MsGha;U_*XH?==@uYy08Z5O|-7W+Rb zx9Gn4%uGJ13Bf+{3i>{B(n~*}yg|R4TQ)!S#S}kqd-pjXOhUfE-%h`l@bx~36e+(1 zmvcUsh0ndn#j3y5^=`k&KF>b3IR?L=^bWrdl0&}<;ySuGpoMA9NfR&wWq#Jd&?~V diff --git a/fixture/12/0/43 b/fixture/12/0/43 deleted file mode 100644 index 73d8748d677e3590281ac591fff1661f3af44e2d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM}65;(u=V}`zg0h&G>JU6}b8%sZ6wz$4LZyrCC|BpXADvUlhV5+=CC)7SQ zK%PFkD1bk4HcLOTN=`qJR5U$8wXQxYBBefQi<-YZ(uhA$tJS~VQ8quevxq)Aj*-3; zO2|IAPS!sCWv9QOL$yC1y^}qYBoshzE=oV&R7yQ+rZB(P|KhzmZ5ci{le<4K1D!vB zDd#@hbmBi92?4-(^6Wk|xu8GdBn&@vnX11=*|9&p=R!Y2pRYa%^|U>qG5pQFDwKa;;z3y!}) zMsmJSwC}!VCxbtz&tX3m;AFnqokl=@adN(50Mxy1f}_7v@}#}zM3_E}l$XDJRR6y7 zZKpn-TL!<|@jJiYIY>WF9HhSrL5sicqBTC{omfAkZJa){LS??@Tb#ZrR)If-=UYF! uE=)ZNB#yq5c@RCFDLTKU#~Hqjr24+Pht9oDN$9?|AqGDLgHb;nYQnzW+`d)- diff --git a/fixture/12/0/44 b/fixture/12/0/44 deleted file mode 100644 index 0f4fbe6750246851b99bdb78599f0d5ab41b1387..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcma#N?!W(rj+uRyp{?E3BfIyB32m?!TPCo-OXC0j%QmL_+11b63#?kS@5*Q9{iojD zv}ws|w68e6a&KRfxINQ($9>OJCGF2ov#^_T^z^<(DlK;J9&+uk%Gj`f*Xu=lEV!2L zSNSMm|E6DZZ=T*Z`|Bbr_X!Iu*|Sw<&;EBoz5A0NT-zV`y3>A(*Xw-;?yj^~>eoKt ptA5OWnt%7+Du#7-O?(~uEB~r;s!bv|sts_2^5uCr*MoqteHF`a3 zf^0t~VZA>pSP8z*tggQ`Fmyf#cjP}fw$Hy|V06DJXdFKmNgY0Fh6}%lWnn&NKzcur uWXZkFcs{;L$`(KEfQr60K9WBgwgEucmt?=z0HDA8QNBKEi=@BYx~9MVal_UC diff --git a/fixture/12/0/6 b/fixture/12/0/6 deleted file mode 100644 index bd4fbccf89022f865d01ce0b521180f25deeb126..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM{=CIi5cw!^;HAP2qYaRk3`_OUZOKPXoVyEBZdN2;RS_uaCc&L{mL}9=tyZ%fUWP;ugR6sJ_2F z*(JZ#11`O7MX8%3!c)h-N&Llrq+Ydhy z7P`MzF9W?D=wm<6TZccw=e<5M<8Hn>x7R*B%1%E`Iqf|)7ivCc%zwVe8>_#9j55Ag zS1rFChAhA1hqpgOnR7ps#Mr-2-HN}?B)YzN03^SrgrYvoEuX(yVs$?$KQz6VSFXMp zoNBzD!_~eF_5r{!WkWpQIHbNyS9HIXc#J;cmXq#M$ogO9&kj2b@e{# z%0$0My3IWgR0P1?17JQw45mMk`Qg9UM9{uv=wH7$YQeutVgtaUC6~XGF?hc07Lz|u z=)%9Ix|Tk<>0mzM0@gnsFA_iH>mWZ`1v5W*nry#+q!Pc0^^QMIKm)+=t1iFwSE|3P z-ETe>-PgXFKOjHbIN!Zm=Ec9N`@g@ZGRC_aX+^)WS_r@XJ8(buPeMN=9Ogf~_MyHo zA|$`v9M(QMkPp8DF9*N4^G?1;jnh9GH#9%n4o^QU&don&3}-*)(U8A>D`P(z(}6#o z;3hxnAxOSu+zUU(&855mWO~0v$vVF-Cp16R<7dD5!B4-=-9SG800}zv? uQZ_$!;cC9Zb{RfmATvA;puN9xcd)+rmP$N|G1EOcf!)6q@<2cQnFKzgy2K#> diff --git a/fixture/12/0/8 b/fixture/12/0/8 deleted file mode 100644 index f20d0033a725c9179ed44eba249245fd3f071a79..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM~07KgvRN9I3?bFsb_G?2azMJF-5AmyJLAt(U%#sG2`@ zui(EH_zgfR>1sW}ScyNe+h0Cm(o(;u{}jE9Y4pD!Ixs&uYDPYfCq%yMus}clu^Bxy zSsOoLT6RA-nhd{l-BrKYeB8e%0sB2ndFMVmY^lEj&$U04gi^o1p9a8NR=+>Ni~PTA z)OJ5gn>at1oTEOJ)CIp&w*WuEG=@JEUVOix^LanZO>RHS3YI>(xCy@^dT&3>AOb)4 z3WC277QDWetx-Qdsp>s_0gFFT?5@5e`G3FsV+=r&C)dCF>i@nWJC8p8Z+t(seI-Au u>k~kA*YLg7dtbl*_h7#F)1$sr-iy6ggnPf{w@5#A-Mv29#-YD{(04z7C&c9d diff --git a/fixture/12/0/9 b/fixture/12/0/9 deleted file mode 100644 index 7a3eb5aee70cb291df517e152c9429877fc8e095..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 400 zcmV;B0dM|6y-&Y@W|zNY*|I$Z0pY)_=@vhauQk9Z!h^pWp2=LOO`+F0rfs*^0>dzo*KXMt1Uq5CHuYwAU{A>zuG^(;RHVyFPJ`5{q4Ta z7q-1RP%yubPJ+E>T&}DQ8 zoB+UBo^c>uqJ0>nSHmaINx&#J$rHW)vU=V(8;p|`!A zv@k!bxgWr@!7RX{-j=`Joftmx9mGG#E|9;Pidw%!ceOn>5&XWymkhs+%h$gBN~k}5 u-Ht!JLclx%#?wEqPXa&AvXei^OkBTVS0}&q6e_=Oo3Xu>XLUW1`lUW2>&B1( diff --git a/fixture/12/1/.zarray b/fixture/12/1/.zarray deleted file mode 100644 index fcf798d35f..0000000000 --- a/fixture/12/1/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "zlib", - "level": 1 - }, - "dtype": "!=j ܾ?k>=A?n?3?唾SþM=1d=Z@q>+Ȇ>7M?92?CIZ?a'?Tfpf]>U$aC>n6zZ~?=?lp? ?1vdVeԾVIR@G>P$?zzV==????p?^?^w@p*>>h@Fv.[Q?z?3$@y!= ܾƤ \ No newline at end of file diff --git a/fixture/12/1/1 b/fixture/12/1/1 deleted file mode 100644 index 07917e896b610d27869253017193b1a521d2e14d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~g-Zs=n3PqCO^)&p(JO(LbbbWxZ62fj;b!9KYe%0l&iQg+D_T z%|9=JGQW+s6M4phd<5Z<-TuoEI$`SGQhEsj6Zs|^*lBL zguW!|dp})S-9Nvt_&!$Z%|HCn#=oC?DnA7*2fn(8Dn7%-Q)4#pctGz>V#J*5p F%Q0?=&r|>a diff --git a/fixture/12/1/10 b/fixture/12/1/10 deleted file mode 100644 index 481f147b514fb265a5e06f65b882ab0fa6f05d3e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~n86{6DXyI6q{`1Hbo{R=e_{z&}SRNI$spzCS}E_r7y4Hot~( zGQTX*&AzA^h`)>@mp(c1pFR{_t3OCk;lEh2aleK-PCsGbJ3h#QH$C>np+7E0fxiNd zI==w5SigMJ0KifK2tKRX1;1mTJHI>DX}^PEk-QddOuw1pVm_)F>ptPqmcCq-xxQNQ zJw8BI?7XQ$sz0;yqCP1+j6ZWq96#;BsK0XR4!_O;MZI29UO$w=_CF%x_r5q(VmzCV9T zsJ|RxnLqGclDslOdcB@ejy;oQuRgS-bicM$g}xKNy*`j3H9oP;vOeJZH@{roAwQ>k F!$_yN$L#4#$=RXQ<%s#jA z7{BcX5kHmixjhEv+rN$RTtDri&%V;uwLg*7{=LOIxIaLQ**+#|U zlRwJ1oIl?f7rk7R6~9o}&c9Y%UcZ~M3c%hUX}`(DwmvR!PQ7Tb6hI~Wy}p?A<-XPk zh(Fn-_P>m(y}yF-DL+UZNWRUYFuo({Z@*B6bv~WiPQP^elszy>l)p#DI=*z50YER2 z*}k6Z2EH(2PrhBaGCrQM?>?)vgFbu$k3M58Ili-G%{?L)TfSfW=fD43C_sp5IzOtk zdcX9PYd%*_tiSvR{l3kyy}vW3QN7t|%D-OU#y$$BE8E5 zGQdoR9zB-5Nk4UsD?n9B*}fi9D!*^|qdqeygT8P-qd#=zPQUQce!hTDU_b5Qoj!D; zls}e*-@Z<>p+C9!)W6!;&%cznqd&FO@H>~fYd>zc%ROwj=DU{sslTY++q@-7u0I?f zXg#f*EbgubId20-BG7(d>0Bfp3&ggwXp6+P$C*FS5a zAwAkp4M0}=rN2^y1igPsL%%Ir{l5D=^1d<{Horx7-9L2}wm)nq&OSii^1fW?F2613 zJwIcunLa^H6uzK~3O_R)WIwCC9lkM+NxxAVslUrofIcX+3P3k1j=yKSj6Xo}EWbmv z^1f|2YCkfug1*r=em>EcYChG6EWaHkWj~uee7&@~vA@-MWWPRlN5Gl&R6VBarau7* zhQHS36+cvS+&&C@K))xg#6KpQi@rUm06^|t(Z3_iiN34b8NSz9CcSuJH$UroSwMhA F#pH@q%E|x$ diff --git a/fixture/12/1/13 b/fixture/12/1/13 deleted file mode 100644 index 096a4829b44484897c465e257f86c346051efcec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~kun=stOq#ymt&jK2c!@xL3(l|Kyn3%`h}CO{rTUO(dCu0BJ{ z$vz9#&ORj1F~2@B;J>r^48O}?mb_cnCBI{yq`$U`KfmIVgg?TGKR*qIFFp-S7C$oM zkiYhzOTS({pFY2nfWPPe-92cUp}&MC)4!;1JU=~lgFk$IA-{#5!oT=c4Zx|!>p$Zl z_16g zNIw?rzdpKs_`f#nz`wZ1>ONE8yFaan13tcA7r*am>%Rz@NWbeJ20p$(Tfg(=cE46& zQN503xj)58FF(m$l0K6HK|bpl4L_=k;k`^}AqKR-h)>%EeG;XfvyoIe;k^uD5$ zPd~6cIX`>OaZ5NxvjD zSUwi#vA-By^1U+I9X~*?j=#6DKfVdbOh3}q7C%NUl)r3Wzdkbv4?lw^-9313i$4YC z7QZSn(>_QgmA_Bn%)iU@M?au)s5}ux?Y;{2SC}DslQPvQ9l4Qg1;TGBfq}r9ltAAA!k8 z-oD<5)IY27%)a09>c8ZGP(M#`ZNEa3TRu7+FurMi;XmbMFTUr2lE2Fg@V<#ioxgoa zR=$CV;y=yJpFiEA0zgGz^*-E)56tM?VdwkH3$}S3O&3$-lA? F#74C+$cg{} diff --git a/fixture/12/1/15 b/fixture/12/1/15 deleted file mode 100644 index 47f4892ad8d4c33d0be54e8445090c91c94b3322..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~l+ltUeR|EI*}<&%XeDtG}$`rN2?*HNMB-vcFz(us?E7-#(~X z89(f0%fH-)r9aM0COHNL%rf4^}h-oO2#oIjw(Grv0lx*gqd!P17r%pc z+drk#kw3TU^1fQWqdwyE1V55b~<##XIQT#=qoX4nBG(qP~5RV80P)Jv~}j*uOU7nmt$Y@IH-6oIX+XNk4?^ F#WIb8$`Akm diff --git a/fixture/12/1/16 b/fixture/12/1/16 deleted file mode 100644 index 893273da09..0000000000 --- a/fixture/12/1/16 +++ /dev/null @@ -1,3 +0,0 @@ -xoD*?7?"A3ɾI>?C:t,ݸ?*d?E?>>H씾dN ?x>(An4^_x3?9,?+ ?Q >Ӂ_/9m=\T9?=G9Į>DV @{8>X?U?ʿоXX>Y&?T>?w?= ? `?@^ -߽\|s?|׿i?t ?pee?ῠq_ -y?ǃI=E?s}v&̾V'>D[84?D␾"?N?2O?k|X?;9@@K?|R?Wf?8 \ No newline at end of file diff --git a/fixture/12/1/17 b/fixture/12/1/17 deleted file mode 100644 index 7b82f5d416..0000000000 --- a/fixture/12/1/17 +++ /dev/null @@ -1 +0,0 @@ -xo~?$V?4?4ugg?T?9#?~Gx?q>[lz???->?_!np?AQDT]?G7?WU@.>b)' >>׹>F?D)J>@kŕh>柱=J ?2d>e?#-?=,k > ;j1]>Ţ?*Y?%?9@>#?{>;>8+?' >3_>`f;?+F`ڿ0>yag?׿8v?+R?[=b_2ܿu>anQ2:?HR?r.&˾1,ſo?K?O?]>t޿@6>x >I෾?b ?`UE? #| \ No newline at end of file diff --git a/fixture/12/1/18 b/fixture/12/1/18 deleted file mode 100644 index ecc2687456..0000000000 --- a/fixture/12/1/18 +++ /dev/null @@ -1,3 +0,0 @@ -xo>_>M?J^s(>fvH>(?+??: v?F=>;? ʾ wF??+7 ->w?ۨ| -'?O?[o0a>"?;@f?5þ{˿?4?m~>E?1DqĔO?p?If?u?r7b=BûD>oX=j>F">@߱?0=e]>F?R\?OEr??Z?Q;-q>6;?i8p5?%Qȿ">s>#;=a= *& x??T׾Իǿ+WG.%?8ǽ1+{>ՓI@T?ֽ?5xn.CƳ=򊥿ꜾZ_?Y -j 3>O?35 ?w>oē6-?S?@r.q`?ſ?Oz>E3?/w%#>sZ|OzNŚ?μ> K*m_?<b2???BA}>XGŵc>>#@ I3K?>4-?'N>Q[??R>_8s,@8?'#?>'Nѿǿ̾ h⪾g?pɋәl(?%?> >vk>!Ƕ?g>G?J־{G+v,a?m \ No newline at end of file diff --git a/fixture/12/1/2 b/fixture/12/1/2 deleted file mode 100644 index 78f392aee5bbb5c213113b3d61464ca4bc1d4ba5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~mx%6h9pNx;>8$t3Qm2k3F{R55MeFY&`~JIX`yz;=hL)H@}g! zF~3i3(mp`^20wgI&piU79>5NA)4z+VbiPes0KWnHuRHZr@V?v(#6R%Q62JoTnm&E_ zQ$GfnGe5bCNk5%kZa;0F8oz7(jK4WJ0>BifxxQ%(N57e`lt0lrlfG8ugTL+}n7^oz z{y$vqw7jhlNWY>9{yjv^@;^C*$G-31FF*C&mp?8V^1uIXBfqfGhd+%4Gr!15)IQoP zLcd`j#=B0mIcPCn9h%010`AV6rY-allS{lAz7>d{XXlL$v?+HLO;#z5I!L`)<2hSU_X4xJHH!q{XY_NBtI3- zTD}ui0l;iju)hVK**{vX-al%ou033r;=chRi@&7T&%cL$)W0FO0YI|V13>F{kG~}y z54sfl)ri77(R7wf>k~TfWIStUuQWCck+>?mt{VjXn;$ zqCehp>^=9`N541hhQ6PHvA?}*m^Fh0~rJ3W{}sy~Xh7`|d|^1t@cVnDgeFu$(< zcRwhLz`y1gdp=udOTP`KG{AYxkiJzBO}(NcV7|5Tcs}?1iN0%|jXl>fEr zF1~ukWWSaz7(WN)kiK3=y1uw213!ifSw4}zGQX7-pg(>%n7>>Hgug!-D?c_R;6I_z z6Teek(>`9Gn7{kcZNJ~^kUvXByFRw$ls_sAi$5>B1;7XunZFo_j6Od)*}r)6hQB>% F#Ti1~$rJzp diff --git a/fixture/12/1/21 b/fixture/12/1/21 deleted file mode 100644 index 6178369a2d67cc9797bdeddf50537b655d5d43fc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~oqyZa#!Cx<7%+CcjI4 zj6HFP!oRE&EWZkfu0APbK0l~Z4nOb&0>EAK>At+5cfa&Fp1xMNOB|^jlSayk3T7O5aGe5IU#=fh)ls&0VxIY>7q`sNR)xOZAlfR9n$Uj&p_P+F(nLlLm8NV)+l|LTZ Fy7>KHz#0Gm diff --git a/fixture/12/1/22 b/fixture/12/1/22 deleted file mode 100644 index 04183b5b15..0000000000 --- a/fixture/12/1/22 +++ /dev/null @@ -1,2 +0,0 @@ -xo*N?i;-'q*W?n$>U"*2:I-#9?S A>>i;?؎? ÿ&t@.># ?apm?P?,lw=ATo?9=o>/ ?>ܰE?ݾ|d?m&?w?=]P?f?Ҿ> |3@o*? -@TWpA|< Ϳu2Ti}dE?ϛIf??mq>׿ M1|>NHz(7p؄ӾV>7>|?!>>X6ajӾ(:"*sXW?;?iCď???E :(>JZ>?t)W)A*w,T \ No newline at end of file diff --git a/fixture/12/1/23 b/fixture/12/1/23 deleted file mode 100644 index 85a99ec544b60d2825551dd5616402152bc3da86..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~pS+T|aPpyFZzRtUr#!Xg^Sgfxk>Lm%p3O=f7!86~Dh^vON?3 zfWMQs+&(AZCO>KL^1ic$tG_hDus=)j4?S>s_&-Q7y*(uxCO$*ycE7CJ*uL3@$G_tH ztH1udJwL^+kiMd3*uIa9J-=OJ#lATRkw4z96F#~Mm%pXsgFg|@+&}I~{XUp0WIvrk zoIjxp)W2zEkBedhCKu2Q$LsSMLrEDU%!V4 zOTL=@3O)$VZ9jxrWb$sK&Aw~(MnApkh`;nCNWVCC_&qpbyuOa?jlV)H2tRFs62GE9SihK% zGQI$;WWTuq483wHxW02Z1HTlX0l%Z*em@Zky*~8D+P`88Vm}o<1V3dz>OLV(V?T>P zc0A6#9zR6S?Y;*4SU)LQUp`1cu)jm7gg=%41HNHLiM~&z;=Vc^y}pGdYd#Coe7{Y| z$iHYnh`)w#!#*9;CB7Yfjz9SDguU>tS3gW#F~9c8-o7^r96Ta{7QR3tcE099U%#^R z%D+vDAwNIv1V4!;9ls9!!9LS)D!vJqjXxjwr@xKF0lxi!Q@)k^BtGxJ#6N9c{k`cu zK0hX17rw0%i9f5lKtDsMs=l9t&%WqV`9878C%-W@Lq7N_w!ZUsygtj}B)$MmF~4tS F!scz5$J+n^ diff --git a/fixture/12/1/25 b/fixture/12/1/25 deleted file mode 100644 index 1b9f0f0359d3e769536796d4078547d4a133475b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~n4+jXyh1bv{>oTfM{x*FJaY)xVd;1wj5i$~-_x`@dy6nLqzp zK)+^~g+E~U_&r(!$-fxHg+AyuKt4ZhlRt>81wRUl6Fww=UO$>dxWB}>|37=`mOl;>+rPsl zzCHzj5Wh5W%f9b)vp#-vhCk+p?!HT>=f0$jzCOEa5oYCnh&Q@vv!Prp3J6+a=996yv|H^0f> zoj*^J4?rJ=i@!x0Jid&KtUk%_5kS<~JHMQWaKDB~a=)BcoIh?fHNRz|S--P_Y(B?= F!zRb7$It)( diff --git a/fixture/12/1/26 b/fixture/12/1/26 deleted file mode 100644 index f5301aec4e7f74a786ab5f83e8233443b47c8507..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~iaOojx-ci9fhQygd)-em)Nk5^^2tfBqF~8B2n7?w= zfIp|#qCU}kt3F9d%RdI`uD?EfvcGVcCBC!Sfj!5&wZ8g%9Y74&p1xK548Pf@{yhtf zct4`GIlrre7QP@X1?c- z0Kbs_QNL<|mA|fYkv|wY!@i+~nmz%v>A!1AfIp&m7C*-xtiNragg!5Bn>)37=Dv{0 zIlxy7DZfG3nm)m948H~hW4^kTzdwb%p1t-l2Eb}bJ-#82%RO;QQ@?x|JU>z@yT1Vp z4Zb3zyuV>&m%ac{#%6$L; diff --git a/fixture/12/1/27 b/fixture/12/1/27 deleted file mode 100644 index 2105c15f4ca3d3c4a6f5e648ea9268a040f1b7cb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~n{?x<9tmD8IQ&(?4m`c|ZLZiN0jp8^DEu)xSi3D8D<4LO+ID zLqBfP?!GD0Y`!WB1-;z-U%s4=CqCa8_Pi5K*1rXdRK6D1;J@)nfj_21KR=Q~YCe83B|WZI&Av{sx<2`x4nI1#i@#IG-oC8a zA3wWI+delt|G!5o7(cZQ4Zmwz2EUF<&pwdv!auw|O}-xMmOm(PxWAR*1wNaC3comG zkUpEWm%itxtG`1nW4~%t(!YvGr@s{K^*->2fWEkAY(Cgz=s%r_m%eXcB0q9`<~-BS zFhC$7LBO}TH@_WjCqHPuK)(^{5I>p~cD+e$T|Z~(kiV1tFh22_?moTq20oVm7(M{` F$LA9_%?1Df diff --git a/fixture/12/1/28 b/fixture/12/1/28 deleted file mode 100644 index 92350f40832861f0db5a6cf843deb182713f3100..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~j8*nLj57*T2l}|272jNI&*xiM}Y9 zwmv$jwm(QHzCMnAEk2f>LA~(WlE3e1T)wra{6D-Lz`hH@AHFE)pg;0R#lLgF9zSbf z%fEMG%svH_x4c5e96!UDfxqte2)|(eDL-O_0>5)2362CzPX+KaQ*uKkE3cnr|AHZ@Bg}+L$R=)s73cnuSIKM5adp{F! zM!!H@Gr!PdA3o^=jlbnE$Ui1pO+Vm+mA}}(fj{qHoIlYplD{q_mA?r@9KUfTwZHwq zN53K6%)TKI0Y8perakkL7ruRB_r4fwr@zp87e9f*5kJc59lxjYjK6L!$3N(%d_HbJ z%RUcV;XeH8nZHcvzduNq&c0Z2Qa|jix<7iTraiAZm%jy-AHd~V9lh@ofIhCR6u%+y z3O*@T+&&F*em@KiYQ8bJIlTenHok|N2|W#91HWOJ1i!)f=)OV(AU=}mSii04=DyWY F!gkWE$4meK diff --git a/fixture/12/1/29 b/fixture/12/1/29 deleted file mode 100644 index d3cf090614..0000000000 --- a/fixture/12/1/29 +++ /dev/null @@ -1,3 +0,0 @@ -xo?mze?Du5[]?Ĝ ?Խ=5NR\6e>ŢXq?E?S/Ñ?Ҿ6Ռ?9?H?Q>D\?9 >顳?k>N?dUluJP?GV9?f?=&qq@=w>w] U?'>ׇ>?T,= ?ݶ>hJ?,B?vfڿ"E>Xk?WѾ @Vs f -?.l>4z&?l>=?nGC?s!}Z1 ->JTP?Q!?@?z>13?jf٢?6Kvb?h?nj@?$?Y|B@?s>Xž9E?v_dǞ \ No newline at end of file diff --git a/fixture/12/1/3 b/fixture/12/1/3 deleted file mode 100644 index c97825a4974e256479e8c8c7c9d1bc0ebd7d41bf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~jD^3cv1PM!!onf4?5kZodkiK)xN5w7zKQ9=|i)F~3Z4l|FG* zus^TYGQ3k$oV~nMZ@+46WWMoSm_LWhgg+t8cRv}GZ$D|9-#!Lw%|6thdOrPe)xMW% zt34{lTs~G8B0ok`%{}+J20$R&n!i#dK|d+)D7~bp3_JG(P`}rCf4qx4eJn61=vGa=%+lD}NpM85#C z{XYM?7{6yo3BD<3iN8Ww{XZ({l|A1nlRv_M{61`oE4>KPzrB*FIlmjD=Dw@{fWLS3 zj=wF31wfk3jy|89yFaQ&OTUaXpuen7dcV#Y*uM0D6+I!38ot=?MZfK%vpsB>Gd`R- F#Tufb$wmMG diff --git a/fixture/12/1/30 b/fixture/12/1/30 deleted file mode 100644 index 869d74e79bba7d4d572ebc6db0b51da13c7006e5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~lg8k~|)lWxm&^D!<@!RzLbR(my4RfIs6XGQZaa|3COGfIqb= z>b}Z*M!z?Hk3Wv^F+RG>S-;__7rq*N(7uZGQor`yfj<- z8ox@lx<7U`-92JByuYU=UOr}^CqI-Y20u9%aK9LU@jrYMz`v^<<2_wR-aprN7e9Ow zRzDTahQAZAu|BNq6}?KOkiS!PT|eTp{hCWC6 zoj(cCfxiW9GQVduj6dp&yuU9M3cor=JHOsH$iMKTKfWCeKR<00AU@TyOg{sXm_LVc z#lG!jDL_ivu)e&ZS-zhpHNT~v3qRewl)gWbp}kxqQohe$X}-J_B0jW(=Dxyw&b~8K zi9eJ`mcIL7#6G*VW4~#x75WsO5pT7fhpT3e|MLk2V zpFJa006@2!RK9m+jy^jb(7*K$|GXd^t3H7Uu)i4^Z@*A<`aTNW3%_gu(!FJH3O^lO z0zj!7W4;x2;y)O<`oC}N7e2lXtG+u=2)`b8C_jb8TfWwq0l=?M%fG_bI6tqD(Z9d4 zKR-T+|2+|3F23&DroAr513%oimcMN|Ej>Ehk-vYECq99HB|o<9DLz`w8$T>GGrxnd ze!cFmbG~R?o4(TaOuyI{-M$-bj=vAu*uN`cGr#0MAV2@EZ$4nypuf1_&%RhiF+ZuG z<-T2cwZHs)5I{lqD!wQ1l)trKFFwssue~Tw#lI{#zdaY$dp`q!ZoW&}1Hf4juRf?( z8b2Bz7QbRW6u-ED5J02EA-^eFcRtxf&b^tkBtJ?El0O^lqQAOL ziapDkS-tQ}P(E#WeZ0C1d%rw2x20pYMct3tz9KcB)eY>xh z62Dhb6+SrIi$9R=)qxV~eav%g);-@RlSEI*A=SHHM60Kg}W5kMcDt3EK~V!j>> zvOo6U5IzNfsz1btpFe;jqrYl|>pm&g+&_}1f4>Zae7_eoQNMZeV?P0#kv?O`_rHFS zu0HzrW&RLq8!Z zwLho#qdbm^&A+d@qP_wx8$Mr2Pru?<6+d53Xulxw(L6-WX}+nb9Y00MX}ljbNxqbb zO~3SM4n6BRg}xHoUca4DBR+Imx<0l;t-exwZa<_ahdxtPq`$f^t3Og%L_YyMfIqno z%|23{6~OJrp?L@4ZW(ޝ?\>z >rҖ?YB5>+>˦1=I?0 -=v=˿?FD^?(d 15]>q??T?W=JmZ?}E>+ ?K|=(W?")>j}?z -><=~zc=?3[ξi#ZϾԶ=8zuj"=?t?ͧ ? }Cde=p?ſ6z>6?ѾW=P?)͐q=k?ň?q?;n{E>PE?ֆ'Kw=^3=?!F?oJږ_?+7=ᐿF?C@˖&$sx \ No newline at end of file diff --git a/fixture/12/1/34 b/fixture/12/1/34 deleted file mode 100644 index ef6e3a2354cf4bb2f5c300ed5a2a4a565c2abfbf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~lk@hCYo%slP=Nqdw=k^gkOT0Kh$#mA{VO5jIzVeH^*(oampm&5s6V(b}3X_P?q)#=ao>b3ei>?LHzaf4{`B^S{2<5x%v2Y(FY8^}aasxWDn~7QajWramtS zpFfCggTA*0hd;lpaX2|oj-Ee!qqtT)*mAra!I07(L2gXTL^KKR$FEyuVj2 F$5Um4%W41s diff --git a/fixture/12/1/35 b/fixture/12/1/35 deleted file mode 100644 index f4b39eafdb..0000000000 --- a/fixture/12/1/35 +++ /dev/null @@ -1,3 +0,0 @@ -xo>?g)? -@3/>`1^:@t?[Xc?qp+?T?V t? (Hk']p=%*xd?!?,h>7e+˿Y?%;VJ!,??/?eCbm>7>?f?owG @ig?>=wM>NKM>>G9*ѿUVMc?BʿA> 7F>IΰҼ隿aж?|ܿg_? >NJӺ?-C?>8mP?=[?9^ԡysI \ No newline at end of file diff --git a/fixture/12/1/36 b/fixture/12/1/36 deleted file mode 100644 index 3def4de46f9d417bea758f8058a5b700ea1a55ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~i)rCceX3tv>H{8o&gB>OP?0(Y=XGl0Flo06$Qn$v&ER0KhWO zhrilpsy-=wOg=FnO26#YJU=N)gTD5@Vn6!fnm?O;jy|wB4nGRS8^7LXWIz8~Up`*s zmcBg#oWI43_`XX6j=%o!LO%>QoIjqBYCrWFA3$}w>%RB67(b*om%kGHNxsJ!y}vJY z5kCO~w!TJw1HfKgFTcfv}HRdp;XqjK2t2Lq7|c zk3YLkNxu5!1;4=UJ--4j;XezI=f0aiCqJ7XLcS<^dA_5@L%)n=c)xOMJwGcje!o)B z>^=~LxjpNwKtJ&%9KR|;ia$h@5WZC2B|uf3&cCa38$cIk0lqvYWIndC8owIR$iD+c z2ftH0yS`sAJ3e-Sk3Z^8Lp~iNRX*6u#6FFo;J=#9)W24pzrVm8zrI|Q@4xGnFu%(p zufNu{`95Y8;64QInZD~W^S|2Fa=x!Ys6NPxPd|`v;6L*!k3TNEC_X0(lD_URKEHb` Fz%gx8#6bW6 diff --git a/fixture/12/1/37 b/fixture/12/1/37 deleted file mode 100644 index 32893fc5bcccd41e552cdf6dd75e56e41271ba3e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~jzEL_T=PJinQZ z%sdspmOq;#*1uJI2f!P8pFbT~yT8kr!oE|*8$TqMa=rBzmc6n>ls%m?AwY%e<3IbC zF~5R?hClUa0Y8G>pS%}pXuq}@=RW?ioW8SF!oJF0M89wJe?NPQM87ZlLci`NM?YT6 z6TTmL@V(nLgg;j&s6U`C2*76D1HO!dw7OR=AAwM`d3P3G-cE1&H2tGI;{6Ey5 z3qW*myS`AS06;Z)O+PSWzd!sEML%<6oIaUm4M6--2fqY<^FLJL13+GQu0N&blD$Js zdOp=GqrZ^2=f3ym9zTMfw!hVfn7$?O_&=Tz9=`8Qgg;IO5kGi|oxT&r9=^HWDn6-A F!|JRu$prua diff --git a/fixture/12/1/38 b/fixture/12/1/38 deleted file mode 100644 index caa67b1aba26f0473d7b06cf67f6a322d7221cee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~pW{1HkcYaz8+di6JioeiIdB3I7J-^cDe?RtQU_XNrUBHX2roC}4 z7d{G4cE4Bb$G$0Esz2sa(LPMc|GvY`KfaS!X})hAwGz>^=#rp}zbgyu4RvFTc)w=e+j`U%$4us=P^tx<6V*1wX+Al|Dr6sK2*2 z5kH<8DnB4vi9X+KTRy3a557J>COcljP>Ț[__U -@^)뇽?ο4YuϿN&;$>\;L>@=6gS?ڕD>L=ұS>+=9?}7~>V.=`͏{=?- -ֿ(UNpt|>?JU??/?"f?V79?>&6w~iq?<,A?U:#?@㾅1t>x??YA?ӧ9P?H|=>?gM??猵V >\?Wm= ?0?n N6?޾St>G]?cD?|?0>#>. S:>t쏿9࿗}?SR \ No newline at end of file diff --git a/fixture/12/1/4 b/fixture/12/1/4 deleted file mode 100644 index 60cc292a07..0000000000 --- a/fixture/12/1/4 +++ /dev/null @@ -1,3 +0,0 @@ -xoc3? 7?IPޔ?J -=0ÿ8B?=q@}?.=B?C?=Ӿ-d>?~Q?X]t;\?ct?W[`*.?r>e?)ؒ?kq?y4?#IͿ??ٝ?LA? %>ö>}f?X?-=?u>ʯZ>62PtX>`? uq?~֝aQ@? -?H?`?f"?y>P?*j{?\qrQ+? ۿsS!?;{DOڿ"?^ݍ=SRE?eA}?Fq]pnEG೿G&>G2j \ No newline at end of file diff --git a/fixture/12/1/40 b/fixture/12/1/40 deleted file mode 100644 index eca2cc90c31a94a158af5e623bad77ff7d103386..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~hNFP(9BCS3ZwD4nIq8d%pme&b~^8z&{Ex`o1q7h`y9#E5Dzp zJ-=#zjX(2&9>BN#>AvgHWgq zvA-33r$6eXp}*>p/x \?.?X=_w -%~>K ? F>`?՚g?R?:2 ?KR9?&> P>Ra(u=G0x?^t?W`k?q??>{J,¿?]eS+>*ʾaR]?>4?t? HFg_??сx:?щ?h*?>Ó>HAֻ*Fc@*n >'?$&I7$D?J@p>W -ѾxyB?=53nJ?Td]7?wL8fH?l?D@<{>>X>>3@.R \ No newline at end of file diff --git a/fixture/12/1/42 b/fixture/12/1/42 deleted file mode 100644 index f2dca581f1b66ec73669a868a54c0377a6123b9d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~iz0lD^{~bw8mh^S~1O;=c+T?LM4M`@X?R#6M0z%D!S==f0Uc#6P?Y zhP^G>ls}|J**=vc;69G$dA`<=06)s52EGu+O}|Nl)xD(Gq(A332|tLqL_g6~z&e5n z_`k(hoWF_qlfJo0k-tnl2ET!T55L~X2fr`%w?DuCSiOrzZ$B^LCq6GXwLf;RfPOg^az!9MZ|`aW{fOFy8zLBE?@Hb3>n6hCl#_c>AVLl^W7(J-PT)ta~Z$F4OxxaZQYQK2MNxyh% zhrdetuRSee0Y5V|`@a^q^1c_bq`tOWlD@-zq`#PJpuSo^X}@|itG>Y;+`rznr@l;k F%Pnfv&tw1q diff --git a/fixture/12/1/43 b/fixture/12/1/43 deleted file mode 100644 index 8ba2dd4a3a9f724c31d3243e638a239b6e76165c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~m4NIKS#+hQ5ITnm!ynH@)*4OFv+?xV}7Z9zT@-k3Tyqj6ODC zs=P!e)IK#po<6%MfIo3IOFywnPCt-TG(AGKu0ARvr9Nqkn!i2Lh(A!P)xX_QHb1tr zh(0=wk-ij4$UeAE);|4Zr@x>>wLc!clRc9p6hLn-N^?NPpg-ay3_o<4s=r3ru|K`%LO(;FuRaO&v^}9Q z|3A)`<37o`cfNqOPrq*MK|jlRd_UWKXFZb24!=u3sz0bLW53B3{6A?5JikSsqrW#l zlfP99j=w-ga=uTr@4jXygFmRxVLuh%WWL&+MnHaXa=u~!)V*$kqrX$~q`l`vm_Ch^ zm%n^e|Gx8Wr#_xr2EW_!JHOvKNIy;-q`wM5i@)xoH9qB?SU;m}oIbNcWxnQHoW3bm zfj@=kTR*!lOg#!Dj=qz55IvnKI=`jI8NQ9A`o6k{&b>}a=)Sfg20sLYQ9m7O!oJ?z FzE;gt!<+yB diff --git a/fixture/12/1/44 b/fixture/12/1/44 deleted file mode 100644 index ba549052b4537640276c08214649f4198d35740e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 198 zcmV;%06G770gJruzyF4gnSGX_t=-ilyZ4C+ZLk+xCa}Lt;{X23Hm3X8)z8}ttXj11 z%4g>Nr{3MPX~}A|uQqUDkxR&l$`6yxkreAVzp58Y5>mn=n2@5RQvsGr#{&zvW`;#AB+aLJ4(|(KB>wO3A zuC!O`*FNB@e$0NFfA`)hhIMvLd>#8M|FGGwE!@Aa?(&TN7pBbG&oImZ0MX}Ku{C*i ACjbBd diff --git a/fixture/12/1/5 b/fixture/12/1/5 deleted file mode 100644 index a96d45d4f46e947722f3edce3c5e9c162d5f3323..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~jm7O+I&y+rO!(PCpk82t8I<4L}Ahr$3?b5I+a-{k>YpI={R@ zaX%sYYQFi5l|QmL%{~C-&_3NV7(c9RYrkC8g1hzrW7T%RWtw(?7#mJ-#|@CO-w?Ucbj?oxS)1#J^g?Nk2fXBR-T7oWIvb zO}~FNdOd4`Y(FMpy+0~g3BJ#)uD>-fbUp`nlE3V<96qm41HXSO`aZG<-oL1?kH45iQ$2njygv!c!9Gpm z7QgqXzP~-$CBM}JF1>B!7QnVoo4}9KYy*0Y6n{|2^+`y}ozO zBtKW%4?hwXy1!R11HB#SV?WPZhd;vSy*@JIZoWFV*FHVUPCrdK?L9RYYCdMnf4;{X ztG|MbGQL+=Ex#RxEWhK2w?9Ogb3c^C*uPKRioebzy1sb;B)_JFqCU(mpTAmSbw4RT zG`*NtuD%(ZYP_Dq)xHe&0l+Y2LpRo2tQoP1i;+`U_L_(razJS;lI~J(7t8pU%xnN!M{sl1Hhssm%o!S zc)siwlRr=B!oQ}vmOi=ZU_Rmk);}IE5*S?xRAV1qU-@RJq#lNcizrUw4#=9D6MZd9H2*3V2a6k7?LO&!N z=0Cjlp}sI8B){Dp);>Cr55EI12fw)UPQFKt(?1$FG(X!8Pd_Zq%|B)gXFulAkiUK_ zV?P?xfj^z#CO_&SNWNv<3qQuqrMv)SdcQ@*zX-M F#385S$lCw_ diff --git a/fixture/12/1/8 b/fixture/12/1/8 deleted file mode 100644 index e02434fc4bd705d92617f0386c9636f476f1db6a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~o;Lhrhi?=0AyZvAz~GkiHK`BtP>e@jskcETRlnJM+`lLR`#nr~=RP}ZslNixwLg@EQop~S2EbcZ zzdyl?{J(6}c0Wp+I6s)2qdt_>1;10b06)PrhCdWue7~Ucc|XfdZa>QkmOi<-3BMwG zZ$Hc+0zdZ(g1-OFk{i$789uD&Drf4}@=3_y}6*T4Ge|GptRk3Rly zd_T2)B|od{6F_y>@V(W0U%&tNV7~X$qrOz$i@jHbd%xzlNI!Mmy*}B-p}&66cRzn8 F#N~wB$*ceX diff --git a/fixture/12/1/9 b/fixture/12/1/9 deleted file mode 100644 index 4356b3c24f6a7ad06fac622b8000165a39bd1d87..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 411 zcmV;M0c8Go0RfN!Z~j5OPrra>m%n7$vONR=;lHfu7C(=#HNYssgTES{$v@`EFTX$c z0>5@wZE>1ravlR&cE~7Vm}^x^1tpcAwT|*RKDaO zsXs0IslHB}0Kk@vK)w%tBR(c22fjo)6F#r9LC; F#*i@9%WMDu diff --git a/fixture/12/2/.zarray b/fixture/12/2/.zarray deleted file mode 100644 index 7d45c627eb..0000000000 --- a/fixture/12/2/.zarray +++ /dev/null @@ -1,17 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "id": "bz2", - "level": 1 - }, - "dtype": "yZ``efB(DP*Ng7EeXr}|e;YU5_qqT3{I=hpT7UcA z*7oiZ_x7*>HD)oCZ8RWcX`s=d0009Z&;Sh$27m$T8UO$fP-xMnnlu2=XlTUR143!# z5Dh&|0qQi-rkWWBBS180$O9l@8UgA9CYVhyhJ#FohK5W}P|yQGpaVvlXa*qh6KScT zrbe1*2dHELiKc+i0MHEpXc(Fdj7>2BWHiJ9qaegIXaE3cE_uTWhyQb6_v^7S%WmsL z7++yjFwX=BC{DnLA@d1R#A{TzRmxAxZ(xFqZdeG#*Z>p>f4!513~afSfUsz+K4$~y zCoxQWg2c(SHUqN}P&ZnL?I$D5QN)xg{B9c54Dj9IWDdMN8}5#94}3%T&#gVOpr@4 zU!M-d0asm8Gs*#3G2>yuC={qMb}SAI9r8#=JW9onm!e}^_R52_I3iHoASu-DC>tHz zND;C-WV~Mc-N0WE5~YRRAM~0kwd#NpRA4ed8*(?A+D05mkyKpFrIGynrYG|)5|G&E?)2AKzx$%OQt zr;-&=W`j*Zp`oC}2AVVg$NFxXfiY!8f4QW(lQMI28@~pjRXK1VrbJN z37~0{1k*!9CJ1OWVhjNd84Q^iAOVmx$&k>*28}R=fM|2{&w&C4u<0EKGw|FlTXN#X zm?#8rrA~39B)xudMg;f6-7P(Hj>vP^dg*N9jXzG&A(drAdXzrFq(KG);FWrug97Pk zN2WLmx>9L~$=f1R+`@6&ALJ`z8AR~-X!8Tf`*+S1HXh=)0g5ZBn_{f7fk1CBjA{FSm{M93_i=ktOdGQW1u-1qh@fZpP96Z zYE%R6_{F;wF@?&0G~6v%a((DLf+kuF2dWSUJ?`t+hRow}3lN3i$;f<2ijYdHNH~Es zWPy$Jd^@pK{v$a3QxvCvMV74!oYTzC66GG-Bd~)OY}$!H%bF@n;h=yZ#fKh6b>nA! mLU0aDkVlU&sCg1Wy^=wK>A8JG;h_Ov_`8xR!i0x70`XY*M)(5& diff --git a/fixture/12/2/10 b/fixture/12/2/10 deleted file mode 100644 index afd3a3d096573d6a74f1623011083f515d0f736f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 550 zcmV+>0@?jST4*sbL0KkKS?_FurvLzmfB*kgUnh94pa1*!ch~;^Jzf9Tf2sF;zwi6w zxh3%)dAu+IF_}m-(@#-|0Lirg$j~$Z0QCb#nFfY{!~hL60MVc_28|5{K+t4*gV7sE z+BAe7lhF;R$ns4Apay^d11G2e&;gJD00-(pp{AabKn*k+H1#z7P-xH$fB+d!3VLbk zY7GDY01r?A4K&CBpa2Yj0BN9T0iXbBpwMZcXnKGeXl#-%ftf6Y7l?j)p0h(Gr12C& zF#~!OOqPUAw2sOXEYI#8BCo@>D&ZoIEH<3*GY(8oDTw{teIZ+s4QsZKP|$6HfihG?q1Ht8W713S{c zy>JvCi9$2v12A$anO=>6A%p%9WfX`;V!9OW!yp9gG-1$~V=Y0=x2M zD1SM@9t*WyTiQQFAy>pGUoU#?gDVX%x}1Kbu0*G`h0$k-9NRURWS^Q-E!do*3T`{G za;3%tJD8bo4@a&oA85C+gCg)oj2yxVNh}Or2{{;{$%etGvcOJ^mYJyFR03HS;|%+E z;E6M43+a~KO#%~byN{6^v_vXFJew|`-@Tc4uCmQ{BY=ol%+XOYfQfu&QqvNE#I5ZO o5ow1HhX;NlL4xEpqx4n+bd?8{Hz-V5XFtW)PX!Ub zV@+%r?l&kZ35Zl3c%9agFagcTKWZ4`L5hR`f*=M1-HDloWcr}F2an(Hgz+gu*3Qo#6DLVWP$4!v@MhX{5{(`n}sHRJ_ z+A$>a&BnEA9H6#S3LlIix1%sl9Dsn~Z!FdIAOtaj9?h+?JODng3PRN=G9zUyOiFQ; mh81w&{|N#K6+?~zCB2sAcR)bv2mX{L;WMu5}NG-FU{p`nNXFbEhygfu*+KmZv48Vvww00?KPKU8`X zPt+liwG99?05KW>02&5>0iXe+MvQ;}82|%7XwcBm+GK;zpT0N)JrMc8`@vNd z2kGkr4ExMY*}T>OfjSkFP{3GJl}(O2haTYswkp^_jeHh?6z&sc1Ux0%b@fS&DWV#! z&c`2%13m}&jhyEB0fl9 zZ7s9Hvx%9sqYl!!hHNy?zqWes_|J03fv=gQW0^#is)jEn2vm(oJ;%^kWw60mk`yRn zm%kZoVqF{>nv|FKGe7`b`RHrxo&`Tl@;h=DoU(so!qKI1PrmmE3||etS1CfH<0!(c68}hXAx&K kJnQUTi2x|WVE%B0NPJ6@RU{!br8@r?az!{$kQdCx3(mIh&j0`b diff --git a/fixture/12/2/13 b/fixture/12/2/13 deleted file mode 100644 index de002325c5c1b6b3e40a11ee093b413048497241..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 551 zcmV+?0@(dRT4*sbL0KkKS=x;iSpWc9|Nr|>HT(YxySMCp`|JDh%WqG0-~aaeYk%LV z+lqeOt#z;gI@-X>c~3;s1U*AT5Dfqt0AgvN0MGz513)wY0MVcUp^yVj8$@Vm+Dv33 z1Jr25(?O#nO&S2i8f5h`F&IDxq%;5+hCl#hWMX8*XaED$9*_eQO$`I5wI-gXAZXJd zGy?<;00x?RfCEi310V(<0LhSO+L{2+WDPXP0j5A3La5mD@hZcT2)7&48dqoKLZNp zFY5coR5uwq6iZfLHaM;c0^h75Zo&*>l+|%jcy3cV$wUp#T@tSPkZn#iz7e~39!CL1T|J+B08Xtzt20gq8>I}MB!e4A0l=Hm<@(F?32#D pc(BE8K<9jiv?xbPH@gNVZUX_T^a0Q;bnWN(yOJrwgoe~;s>tm6>n8vJ diff --git a/fixture/12/2/14 b/fixture/12/2/14 deleted file mode 100644 index 518c9e5d7fa5a7c7d1a22d7df1b32a86daf7889e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 552 zcmV+@0@wXQT4*sbL0KkKS%sL0!vFwufB*C5zec{g|M&a+@5NNdGXgxqQ0iXfu42=NL000Jn1JnaS=>P|)GynmUAPpLAH4OfUG{;2t5FzO@>i`2h zA{|sj$<;#332;15JIKfH*gqEzKoQM_2Z%8u_xGp-Q&gy>2;AZmi2_@ZLV#g+cP>{k zRKYO!ug9)QrojuxYSppK()y&@GXHyHHgz;0uO|}NrgFMFAa*@>r3Jq%Q9G40>D7~? zBbkCRf64$uPXNFJVD(0ei1^?DP+Tto*du7Mwk{F5!a-gx-Z6>l(@{VWNxUdwlni)L zHsgx=Xew2k(BMEfTqbRzpfEHDncl$1I@;taCGVG<-U~(G7~1$*O)>)S3}mI#hq`Ab zzaYO~P4g69<_((k#AWp^(d{P)xMvflQ0o1glZY2E^8EfvQnp8Nipn1JXv)%>Sdc&* zBrN7N*PU||0BTG|!7RldSzA}sM8}vc?QDfvo%#U2=GzGExgxTl5fdUVumS-#e>pOm qhD35i#02tE3KS;(v{GX;nlJP?cChExqby*P{x0N-aG@a!F%gC;$M*FA diff --git a/fixture/12/2/15 b/fixture/12/2/15 deleted file mode 100644 index 1ed1cf082382f688862e0dc56bd69bb2388dfe1d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 554 zcmV+_0@eLOT4*sbL0KkKS+Uks`v3rKfB)}|eXRHY|7-B(>(OG!|5BYh{=9q5bhGo{ z`ET6yT@x??5w%EZ>Ux7iO*F{R^)zVHMwv9zKn)(FAY{-oWCn&zgK9JbO$M1TKpvw; znhi91k3&sEDf)@0sALDIG|&%F8X6Bo&>oNtJwr_z1Ju*h9;SfM$&sc&(d3UP27m+9 z14eIs!@3W z?a#1Y@j`;U&%SdS?&K$oKR&3{>mRA#K(6*?Gr$jS+~cN1_|txSCCT{zQ?{xFgPS znV65v^5s(I4s7A-O=Feq@q|7CZM%L2hJzQ@oTnGp^e{?3WH#ubb>#I4Aq$WTT)qI2 zYQ_pAzykq8L`sX?^y?ZI$ExWC*lc@D{E2ksu_>@##@yhj*e4v3uGeZsv@g@+<5nfy zSi(c@c)W7J#Q=c*$x-?@MUmt=WA;mvb!Q!xWO&nDWVyixRDkj;Ff1F_cr#G}3+Hh%LOgH6Q4`FYt^m!8tShyKvp=Ge+r-0I6vuMaeuX zMY^XUN_Vc3cyhSgq+Tw5E!HExFQi!|89Ej@fZm;7BdZz8tqx3U^b42a2o+#Kg%0cv zR9ZZd$0qn2f`Cv;IPl+CiI{N{BDsGBA%KM$+3hiD=ENI!{!CQ#K^N4hRLV^%%72jTV#@;g-n`+0utFZx%(7h(|&6 zfIA2wQZJ`(%gxeojw~+!0pp^C1o!KzZhihQsPQDYLVZ1W!Z#cBr%{=QV@-%H4LfJQ z&Vx|m9A9YnuLHDiNINfs2n1`IM45CG{mHstbjRt@KXa<1L1__|TX{LrxQ`COx-*#H$f<&PL5U_z|*rTJ9 zBu{*L%Shk_Pen{Qc~anqx$Q^<-<(Mj5p0r#I>aGDDjiJ6o`NLk3PP)H<0BjN!E1Y?f@bDTabhVRC}rfpVE{o$P77HjdOap<%Rrc?M{;kGqiIE={YYqKP!QV~)KYwCTpF#sFN9kac6W90q`m*xV_VQF}& z-8s)e^WY|BYdkaiP_DhlW1F@G#W$w14`@g1!e%rG|M;~+B~&8^&;H1WKr+#L*+5=m q0SXC^Iw*&MGgb1$sZ&IgJiL*-59}C%49LL$i@744C`d{e3iAM%p7_`R diff --git a/fixture/12/2/18 b/fixture/12/2/18 deleted file mode 100644 index 514b981bda8f2fb2c7e782d239fb242fe89def26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 547 zcmV+;0^I#VT4*sbL0KkKS+jaiS^xls|Np=CJ%3;OXW!TQwBNTf`_FvcetYT6|9k!3 z#hJSwS^F>nHq=qmMvRRyG|{8f9+98`X_HKi8U~F328{p!&;SO2G%zNEOqwz@8fm5{ zskDzxA_Gk{83549kThflOa#Oppc-VvWMt498ete9Vi^IZjUJF`paVeA(;<)sQ}jvg zQ`)Dgr>FpE0000q27m^G(f|O^000^Q13>iv0000UOr~w-&)PfSOdzk{@&gfQP6q&3 zJX#>2U=jl(%0wH9zV2|uLga{DAr1{gi1{r68#pg(#u%a*xS_%bWoefKTj0RCM#_bL zxZz)XTqRpX_cFsMTob`Jsye_o4EmPUP!&hh-1N2^kFzGBm=o34X(|GjC}t?UTeT>G z+LtOYDMCT)XhEmjb&sZ+>7Qe=0R=;10Bm!I83T(?l_szQNGqA>NV2ywUZwI0LvQt}dd7iCP$Oghq@Il66d1Iy{&sk6Sxfm7JYAw@X<)NKj}3r&Xd4t1R*h z58A72_6JkZWP9%jb^WGM* llGWfx7i?GM`3|uO8{{U`Q8JhUOG`ieUC9*TLPE{yJ!pn2>{9>$ diff --git a/fixture/12/2/19 b/fixture/12/2/19 deleted file mode 100644 index dc82e2c243d14bd14bc25c9f5771e196e80558d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 552 zcmV+@0@wXQT4*sbL0KkKS>_le$p8Q&|Noo+|ClP@zi0Znym*%! zfA8^reeJLT8!QZ`q{L**lSYP`0001J41f&*pa1~S(9i~e00T_`28{zFAZh9bX+0;Y z>KvZcJsO5UG|&SeG-zmO(dsk?hJerj88T!U28Ics0jH<{05kvtKxhENr>Uptr>W`y z8UtzoXaLB_WM}{b5CF&nO%G7W10kRRpa26wpvW2llTDpA#2J}cGfe9#oBmzu&0Kq!qOxPF<@NP}Rf@QFh_xx{{ zF{32d_*$Cy^LH)MfEY(}vAXS#^LX@fUs16aMV95CV#>1jbXQSn0B8e8ur?F@c>>`G zI%xo6h#wuxG6bI0|7B!;yPsWf$+gDVa9(OV2uHy@5Stc;g_3|YQt%8S*O$d90553F ze6Ms>@bTP;9w`AV2Rm%gxmv0fi^%s#nO>N00G!{0l3KMXH65RYhQE2CxIPuRzMY~# zF*?hINze*{a4-c(oqMt+%oq=ZO2_Y-w3P5erDlj8yRL=xW-}5WshX8H@o2fHgN@pi zp0NYPG!e=Z&%QA^2DWj;&n&3&uOwzrd*6B+5U2(v(@B?WWHp$oIro2CUzrR+;D8mQ q7Go4fHXE+ttYU3|4uEgsB_>`fulo1O$`7&LqIePfM^3FLqO1JqfaKM9a(1l)gh@+<;%=U z9G0fYI3QX&w?1db0tEk0+$;mbG7SaGK4c`Lwi$G33wh;iX5kS8)Zkh}*}W!01B!nd z)Tr=D1P8coDm4aj?C^o67p=c&tR4-L(7+-e=HztB3ZLd;(4A_8-5XuEp*Cb&TeI93 zs~597e3>50RdKZ6nW%~aPCSLYMNaRy{yVwEdE|4piO zUe-r~A@l&kP_Nw~3jmQ}ZP_o(FbKPBHN~bC4rLSW92gKzC3I|DQtsEVHN5h@@u0Wt zUdgPf9N*dc4Peg+9teOS;DS&JpinnJ4*y{!i+Jm*UopZ{dHbW zyZ!jV3l1^9Y-L`>e-SH+g$>5OVX1d*G$mb4{erLq5!_B$SWivlFK*$Cl1_%ITWN%b7(8x3cLqUiJLr17I2t7~H zrl=Uo01r?A001=6pa1|3000^^0Av6F&;SAI00Yu`L)0O9)3`j0_g47#*3b*YDV%9Q zWCV;zM}v5MLQIB$6yXZ}5S7oZ^v3{$BawC#{AfcH{vtxV)36ZYT=ZaD!bdw;FU};sixOJ39stz?6< zwi@1%PacpcE()}2-q@dw4mXQh^N$KfOTYi~9WkfD z+=~DvG{(YR3JI)nyQ-BSzuzeV0PWtEQ2+qC|Ns6s`So{y`}5DM^WvQ2_q#Sn-szYBpZERt z+i!U{Ur?|CF}1cbo{{AqlTS(N05T040iZG(XaMhT{jH5zCD z$ZbQWn?XG$MnKa*XaK}C^nlPBGy~KF)CNE_dV!-&G7TDNX`lunr>Fx)O*Gm~4GO68 zN2q2{10zO_8UQrJ83CZsVgLXQ84UwQOd!Gl4Kip9nhco+hK3L);wrQ!ylvGcCr#uE z=T`+!RHg+Yh7)ZKme@*!a^wRd209jTU=C1#*TIdd+8;ofvZCFL5Lu znQ?xL65eT$a9gD)SdK8QBTCn|>ufl{Ap!DaCy(-eIA#Wr0Q#M)kjQ$tnGAM7MI=-H z4B@^bn2o)WMqpMQ-zasX+w^yY)SjeXz$5SEW{$2s(($;T8DSJ+&4LPCImL|?8C#fRThZz~{#PJ8(jiio6N zh@UHJ=Mfsm<{&$VO0|=pS(%CQHqKr=9Cz^=p#|Fb%W{Pinw%6H&!@`+Jq-1@fdmqM u3V)d)!9M(u4@o{LG16D`5+C^yC(bVs0&)vPL>c0c1>BKN6eKu14-`O@dhgx< diff --git a/fixture/12/2/22 b/fixture/12/2/22 deleted file mode 100644 index 3a5cb2126775e6009062a439c44ffa4f5f539215..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 541 zcmV+&0^8?T13(7U8Z>!L8V?lNO)_W> zjT=d(O#n2)G#WH|gC+#^Jw}bF1|SBE)EP7|6GJ0IOhL4O28KXr8U{iSXi2pV2AT~$ zMn|Yd#)0S^>mIt6Dl-W{y39qlkB*Y! z5mBo2AcKIau+)e!f<__FIa6XQPINo1r@HQ!!dCTFu!MmR2jsOHtlZvp5`)G4r}H+E^pN(dZ~PYDlZRK z$gulMJzK*K3sBWD-=c0AI<*6XU6c|4z!)$ef;nVjsU-KOIHj5dnn;!P f!b1Lk0P&$3wCysEc<}%0U-5S&Q-uiv-dtwzse0+C diff --git a/fixture/12/2/23 b/fixture/12/2/23 deleted file mode 100644 index 732782f75ef9db4de0bd1c0f7d47cbb82462ef72..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 550 zcmV+>0@?jST4*sbL0KkKSq$t2GXMal4NUDu{$L)Xb?f-i3Sihcu zzx`gPb$_q{5t$i7Pf?%@jWhv~paV@BGyq{TXc%e@0|}!>LqVp102(yYCYVN#O-HHf zVrXfQ>9m?2k*DfFWHbP6Mo&=nG}F}dng9TLjRBwmqfHGo0iYUa(@g`?00HVW#2%)B zRXo);rpkJnew6hFAOL6p0iXZ?007Vc27nC!+L-_V0i!@3qek^YQZ-Aq74l$(Hua{6 z$6fXu)TD8_vVmPt65sU@Sj$ff<)9)JaXfsCvP%Ii!DxvQue*?shQuM z1cziE+D!5xOo0*VqChb6Xl`Mv*ChyiX(@wQjvPjSz#gTzk%q8e_+_ei6+hel`tMY=pFFg;jX^*$PuVH(2wXkwNq{AM1xHR^Mp8`l1@ o`a02EJ3&#<9T?g_!73Z`RiDO7?~OxVFp2<01r(QGBp^g zhv`E{q5+`LGynkdO))Yw10ZB*&;~fsa7tTDd41`UXU~hhImph zvW@p9Md5X=5Ig35)z^GBD<>$n&ahUhUkQb>gz8*}M`tYo0Msl#?H-3__P%+?h+)Dg zJpV5WH#neI0cr!|cPMiHf{ue4w!FaDc55!#9r~+XNppi7BEY|PR3blzMCNRdLgdv1 zZ#N^Qc%Ph9(+vb;3vm52h-^wvw02Oea6}C=69dGp>q?+GcnFe64|D(%L{`YVtQj95 zfFAyuB(Nv?DhmbD{P5h0#H#TRl#I8czQGgKI^6YTN; diff --git a/fixture/12/2/25 b/fixture/12/2/25 deleted file mode 100644 index 976e79f7087dc0f9bc7f94fd1be37ddaa9703fcc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 556 zcmV+{0@M9MT4*sbL0KkKS?ZW&`2YZifB))P|6K1BzyJPw-2K}>|DW4#ulF{W`%kX; zhR)~pgio*m8!Qf)keXpMX_HL@)Bw{)nFA((14f2`4FCWdGHB2mXaLX+9+9SvG|Kwv}CG8zG+CYlDC45P|< zKn)(FAkb*jO*Ayb&;V(ZMvNwmBOnbjGynk8O#z^7H87ZiMnKRs25@>$vmDsD9Jqw;Wu9Ds8C9UCFR>{Gw}~O zE7}DSP@IZLYadRwdbJ=LY<0q<4}!aI2o!h*LzpripZ3+dRwu!)n`}KhfYfIz`%95f z1cMHP5Wp4KLcmKq!r-nH5r8z4vudO^1Pla2uS%C3HGLA`3aR*!4KB}~Z5RivTIuTW zwuWBHiWxZ$_?SE*m1ne>WD=-;EF&1Y@BuKmCz2CxqG`)2A5qpe;cpaf5A(1?>n$z` zA&LE3GA$Z|q47q{9JEs-B>d21+!;tq-C6VH+$5;7F!@^ZhMhGPa?rz$gooQHsNawV zTs2z^YGYz<@rWqza2UKGL}4n`n|?U&#KmWlrBd=u0ECPa;HaY*SP~UZ^2JZAxOj)A u6P>*Pp^^aoE_pAI=m_i%=;6?`d8^1FN+*FWZj}`$_`8xR!i0xZ!!O8Q+4XY( diff --git a/fixture/12/2/26 b/fixture/12/2/26 deleted file mode 100644 index f29adba1e2095685b410b8e9e909597dd5015b2d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 544 zcmV+*0^j{YT4*sbL0KkKS%{&s6951k|NrgIo748Vs_*RoI;k%|cwO$c{{OwNK7Z}$ z=HLnB>Ax@m8c`iKrqG!fjG8hu$NTLbPgc`2w5cE3 z9Ld=$dZ`SsqIH@c{5!%{-UZ|BT6#Z-Xx`h%?Q6Z(bnnvM>DG0-eS!z=T_<^IeeITlW@a8X5GBul3 zNBjS98_xDuyv{_D2A&g240M1>pm2m7->M5&5ACQXEHc}LL?v)_!T+FjZ i17GILLoAQvFbI`EDD8Y;BT|xM{9VZu;X*t?V437L?3hM6*Hk)i4U&}pCzr~m^&qfIdefCE4QplOgA18N40hom$yG}B^_ zB+Wz`c#y0|=eC z_l(Vk0^2U9?+OXkdy^&{&7LG(WNnty~cgmgzjRWMgXlEj5k|rgfGSb-Ffz zWfVoe*cv=Um0oCCVAL3rAlID|@>srsW<1FRLVJd|B#Qky{vvtpQZGXVq5=X9#%bw~ zO0_W8m&O#g?9zo=2G`rS0LprBjYSyx386$7l7k8MQ<-y$Sb@rkA_v2Pz_W1WM*LiD zxNxkcAfeCW8khp&W?09JGM|w9Lo`qcTJ(cODut$|m|0>CKH;eke;bx1jO|uOl0k+g zmLNi-@Fju%%<7B;0>wUjLx({WtL+Tovya5Q5O6^iDEK4~{K}kxcw`<3uMPAZ+t3Id qeON9>lxk-6KmnSG0h15~62!mf{(e?+g&8=X;_gVN3K9wzhkQUyq50kb diff --git a/fixture/12/2/28 b/fixture/12/2/28 deleted file mode 100644 index 97c7c5c2ecf701ba430c2301d00188cad23e4acd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 550 zcmV+>0@?jST4*sbL0KkKS-c!_mjD1m|NsC0{@3T@X@B+BdwqJnd7pl#AN675^Vj>` zVApST`NpsTHEi1zQ1u6-G-7sUD-sG#WiaKzfY;Vg`dxQKM=A$Qc08+JWjc01r^m1|vXer>G4EnlueGXa-Yx zQR$@GN2t&Sfb|&xkTe-I0MkGKXc+)x#0?D$fDH{AWB|y}00x^=c&MSX9TEp+ zy*f@u3S`3^Ad_>9{uv0ucMP}%Krur*agTCX8zv5ge3~1FU{$Jr1E4LOf0C>e8GD8` z7a5_fh#Z%!;we$`1H>%qyBH03pM|%5=r+%NNyF>0@P}$jl^?hZ5h4pYv3rIiq!iGu z`H$0q1=A4+C;~qk)_D;qCG$GkN0&`V-!*D@YqhD_!3faVZVwkf*)3m8m|?IzV*oQG zV2PyE1Q~N(3YAjfOBV65BfIn_K)D15;O&G105K>^#Rl-Qm^`+Kh-QC)bwq=kf?hG$ zXE$|$pYJgu5~NXww|OH|3%yWD`bMJA2+-~QzH>bjF|9va-;ZPm1mceMr}x!yR_9H{ zE-<3XD>1j%v#6dm$A~G1@)%GM8k(7x#vsYI&L> oMMuDT!27r2lKs`XwkUW_|_&QaCzo}O$p^ltx)J!~! z3fU;kem&vz=+N9s^K7W&PW-1X2^0bjn-iSD%kh~r0LQr&KX^=#zKi%>N4T(qc=?wn zwb*x&ii8M)Ht9Go1Cvv$*74_$u*j7@1v>fKk-}OTr!pf%XG%&H3BqQVsEt z`TC;%O`;j&MRqf@;{yPIrbOW{B;?n?={Vy32W^{uPh_(q$wZ>uVu(F*vY}}95Is0S hxk;Gy@Q$P&^H8NrW>Hr3sG#LhfXa<1LGzNx2rhou628|4xPz?r~Q)+sT zN2%(ECWojrX`zXuOn?~yka_@300T^#4GjjG4FQ20P-OC%01r{>003#G5Hb%_^fe|X zMok(6AZTfz01TLcpc()T85sZo&;V$9OpP=&00xg!O)`3bjk1!LwU=!|9SxHJ@%CV7 zTC*VO^3PBnYVtrf(tLFWWJIu(n09N_a>83NQco@?+tT&W7CNa`8{Vryz$kt4*F~j& zK+aS6A=YvMe@xA|bxODa7Ax8;4j5C5jfNj;F`Djuw^f9%iyRrNzm)0XoDHyMIix}U zj1q!H_ynkgk^3@&wkIFv?hqvmVUXM+Y8nBdplBHlqBQhDpwI?@+K)-K z57Z(ICX7st41-3DCXF!~8W{sYra;JH8euePk)UKSBO@bBO&E-g42=yMWW*UyOlkm{ z84T3W0ie@B8feha05kvq05s5gA)p#*p@ZCXA3?(t2f;GzE9;1G<(H<(|771SoWThCi5?o7I>;OlH(FE zkEDv+x}T7zfj%&hpXWGtg!&@+yc*?G(PW`VXpFNRG_DN2e^BIjX15fY1Be{6XdW9f z$HsI-=V3EwK;(t9s-oAI+-TbrCzgZ(6nNDFxp~gHdJ8AYSYgrb@}^3S8}T`lLk0r| z*geRH#({bQKFTD_H~>Ck3>puZhf>yl)__q0sYuGaFp;nsBBMY6s>o#MfPR|A7)8Ws lic}I0m-Gnmm}m`xVe?M42}2x2DZrKfF64@Ep&^#d8!nH^`O^RZ diff --git a/fixture/12/2/31 b/fixture/12/2/31 deleted file mode 100644 index 26e6cf447a78d82d3ed6f283d3759a534f504e32..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 545 zcmV++0^a>XT4*sbL0KkKSxe{T+W-JPfB*Q~@BhyKtbghb$Nu}*?yc(~{q583+wa=0 z@!wV_SJkiq7LyP>P-92{8fXlf8Vwp4jDesqjDQ0sC!l~dXa;}{02%`$O*AyfZ%TTE z9;4LsCzR7b0004@y(nlJXgwee00Y#}^)zSlR z%|W0BhMH&qXaEC7hK)1;0iZKT27mxEXahibPf?-i4F*bJX4AY$8p=fJe9x&w@42;bWEh-Fa*F;|akrVR2%7Gpv|4rO40OhT-;_ z7eTu;v(xGD;2EpYamQGt7rOY%f44?yf`xzFAp{cuwdF|R6fpt=&_DuyD$+$Aj5$~% jU$mrW#}wv-NJ=ato84|Uxh;KxoiCL8qtyXfyz5Xbk`WGyu>5WCKP(Gz~HU02&%SPs*A$ zru7C;X`@D(Xwjh2qd;iTWHM=l(?&xOWH5~ZkkAZ_Moj={(-TcJ8VnNzXc`8CDE%pr z)7143P#FLNA&@i;0MKcmGyrG-41*z~CLxKVO&WSY1JnVLrh^j>rc8v$B@?DhCp%5t zClKT|MFhlPq<_pkF}F z`0R*juq<37V>4_GZ_;C&8f03g_?jFkPLe*B@JNwOI1)#|SJX)lMDG?Dzl3e5I~3W! zC-rQ%JP-jxv-JjZR_y)`l&}Ww09~L`g94JnOQ6LYmE0!`c|1TFoqQ+MopV^$xDqn) z(A<6 zwOjW&*H173HD*!|QHG$}nrH!_$Y^Qm0qQgifu@FrPf!{F(U1c`0BND182}!L^z>1X z_NX$CQ1XpEAjyzo0MkGW)W9GBXaEd>h|@#V^hSmd44P@9AReI6rkK!xWjrbRQ)JZf zN2q830BH3ApfmsgdVm3@fB*mlV4%1AB2$b z2Yvo>kOEH4*-BdaxhC~yR~XxW6pspwrQAH|h0o%5a?sSN~q3e-3i e!vyQ`OqXMj7misttU$=m@pmLsg$V-ch^VlOMgLU* diff --git a/fixture/12/2/34 b/fixture/12/2/34 deleted file mode 100644 index df13e1ceea7fe7fd66e71acf9f5cbf338e5afe2a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 552 zcmV+@0@wXQT4*sbL0KkKS%rCJb^riofB*mb@9%gWp4ZDHT{tk0YCBi!kB=I8(N zRdcMr@6j*;F=<6q{Sbf}XaE3Y$N&J)(9_fg2*>~dkYo&lKmn$kP{e7YXf-_})b%tn zjDytm8fa+H^)z~(qfHGB01X;xqtpO0X^{0AX`!RkJwOMjF{o+!rkWZW4Fg7*10Zzt z%6ghGAkcc4o~Dffp`#&_2x+E)pa26#O$LSm27nAknE)6-X_M5=Mq@FRrKS*bhVP`{ zOFIbC$UMX~2SO@6+(xO~1`k;$0P${}2Kkw89!*L)LY(!?N|fB$Z5tnrAObruVw=gr z4XW0%$nKI7Omp-jGE+ANGASpEfp7wD;OrDXn&})pM-{I{ErXxvz%~@e$c?0|e>k_w z^1LNb;R+Ck5t!1Pj`Q{y)2zxKHEjL5MPw!|QoEug_Ua$z2= zXo(O>^8y#;7PV8thCWBxx$nT+8ms7qFbqlq zD*$9;g^oOUm0h#o&o&IZ^a>BK4PxZ}2?{GV5Yk9B1Wvc(V8ldleOt#XFG>cI2+dO{ z+Sg4XkzeTkU1LqnJSidKuc=2Vk~vgaP5;H*kxmpO6agSw0GRpeOaK4? diff --git a/fixture/12/2/36 b/fixture/12/2/36 deleted file mode 100644 index 25d102b9c659298a5bbf423415fc4753478ea76c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 551 zcmV+?0@(dRT4*sbL0KkKSzJ(si2wj5fB*j7eE!?{@&Erl-(Q!Qn|rLk*Z*u@)BDfs zw^44N>-De!8!SjNc_7GofM{p{00w|)01r?A05lB%28}&NgFqSvfspi`qfIhBPt`p~ zqCB91wHXJgp`oKd0Lh`FKx8xqjXfcwMkYYh)W`rdWN68dXc`7jPy;4LfO?*g1{qH^ zM~S3*jQ{`u03M*w(V%DpMu2Dl02&63G|&JVXl+AAfN0PGr=(`43WTL3S1KA4mplj9 zp%{Rp0}5PJ*5ZJf&C=O!4n5h$r3mN89;$V5Og_U4LJ**Ujshd#2g0BRplcQoLX&4o z)KE|cNz&2EhUoMgZFp$J#PN{)P-4HnH0siv+1)n?MT*F#p+CXANj`W&awma6{C!bf zeOUE9K|}TY<$!Zdi=mHTQ?wf`Git)*QSu#Nh#}L!0~Lv)t48t-_z@JWN}y&|%hrZ^ zyTe(Vxe~>$NU|b}lvq`&R&A13xS$KA`swdFT4f_225&0ToU31&=GUNpo9o%>r$5!M-IW6 zjFk*v<)S9>RhY$*jF#Jx3JwXgsR88N*Z`4CH4N0qOT>T?Lq+)~Nen6D1TmTK)JuTcBo`DRVnDg?`eFb8 diff --git a/fixture/12/2/37 b/fixture/12/2/37 deleted file mode 100644 index 5b959e874c7a7887e201254b39d18c454361f25d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 551 zcmV+?0@(dRT4*sbL0KkKSwERg+5iAZ|Nrm%I`6NS^8eoBe$lyD-Fe@?RsY}1zkk=< zM^72K&+RY)DVawRKX$e0B9NmK=gnz4^fe_ zo~DEvF{sl(XwiwF00Tn=VhsQRriY?qQwV7EjROS2dW<6@Kn(y514c}MWi!;t9-!0O zp{J+-003wMMuva@0gwP_&>9*8N2mv>rh%r7G-v?z0BOBbOO0!G&Ds#Ea-#5Y2^Vin zefNY%-Id=A1usc+8>;VGcC`!=fD!#-3?+=82K-WmNGFmI9m@`HxoQWcNFYo|W{2X7 z!*0>$JW_quP#_ZDKo~nvg$5l^i7+#%zWIl~n|-W(`4^uhl4JsuDKNi$_7BMLiI%mU zjC;0!M@|tah3K)vN%zJBETf1{K#GE2tJP|x)dA0TH1gC8EQ$)sKqD__PaQdwuiNC8 zpxEeOy`dT%nzTV33oI@h_9tm{LQBRpc*;_628~Q>MX^Lc7Ko*6zr7f>=lUii?p1pL zWAzIVkWP0Vo@$E5RkF241dwi7*Du^&&><20XmxCKemfpgxDDA(nfd)z7_`=4WrYW- z^m&))o{@ibFzN*wWCsRc%a7tou^*lekaTCHj=&(fCnBa%GN}TIv*cNy`1t~1?|kdp p-Qc$qdyzSqs3^>AApwKYH;y#tnTT<;Ru2FEF64@Ep&)-Uo3v;K<(_k7R!Z+o-)dEfc> zdPuwfKen&|6-`D{(rJi#jRVvg2ATi>G-w8Z01SWtXaLEh5uu>OXaSQz@{JlEhd=1yB*Xox}>bgb)=sp{ce)zqa>H3#C9rzyuSPn){ciT)uE(gq`tD{CffT z2Iyn`k-^|-D9C70kij%~jJ#>q_oqsRPq}52>X@a<0mL{vqZ`qeBNdQdwji>gi=PGv zZAQFl`4`KM;J-j#4BW7ZEzYu`3{x0N-aG@Z>Dhfg$+5QRu diff --git a/fixture/12/2/39 b/fixture/12/2/39 deleted file mode 100644 index dbdef698958b308749396dd0a976b902a42ac238..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 549 zcmV+=0^0pTT4*sbL0KkKSzK9h@&Ev?fB&cXYh9gh<-g?DORveJ=Fh+X&ylzD=g;}y z{}X@S_l2+l2~!<5fuwp6#Lxkd2dR@lXf$MG8U{cb0MVct05lmkkkAbSO#!CTJx@~+ z>S#dI^-PaK85#_L0Av8#n@|7&pfq6`WDTjc4@slcWEu@LGzNgs&}cnIgGZtu(9=PY z={-Y7q8NZ=8f0k1&>9U50f7umMkWNrV2uC+L4g2h00FfGk`V&g0>;7((h|!x^%e(4 zaKS$n{k%E~Mdd0xn+nFS4k_jUlTlRy^mf8A948Y8qy`S$#*6-CWfCu%zCdj9wHV&C6T#OTJNLDe#qVce4v=eZE{#Mu~HOu87!0PsH8Fw&& zNN8?}GMcigJ`FrH7v~;-W?+C%a;aCvPUM!3u6Si(LXsH8?2L|U;V|=0^SfM%WeCU0 zd_6o}*#*fRg6SKy##00e0c7}%B-KUcu0HN*^N*gl2^@?&>qq}t-=+w|ME?~*rdjY# zkN9{vP={%A6#Y`r{1U)-wz|fM_>T;qW(V1V64lienF>qs2G1BmMMokC7h*Z>XXU#? n20TY?0FgQv0j`)p-dr0Qo52|pk^vFA6aFsbig2MIxU%EqPC4=f diff --git a/fixture/12/2/4 b/fixture/12/2/4 deleted file mode 100644 index c4f1da725032271473eda7745ff940f82989d961..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 551 zcmV+?0@(dRT4*sbL0KkKSrjwZ8vp=QfB%fz=70M;UC;mY_}_0Cek;ZQ_n+Ua|F_uw zU3-0Ni_WkC8#a-Y^*s>NOqvZE8Z-k&fB@QqO#la|007VofM9@W>KZg@pgllsL8C@x zF{s*v88cDp84Un5001=gF*8Y^XdaW)00HU@nmrQ)WYb22YGlv=4KxOTG#X-IRW?)9 z{U|*&ho+-OfM@_{XwycG00xZ*qyPW_WM~Zy007Vc05k?oskIiaSm`55g)G6~bTZ`n zvfou)BvQ-ZvC>ipqFOX=iHUy!syQX0DHWFX>S(S|6pus`j+9};!xDN_!o}co3@|oG zlA-+4av2um-en^eox4sD%nmJ&JX`S^LUz?y1OGF3bQI;qrX>r4nc%3c%L-Uk>Y}_h zVRqJT)ubd;hrkZpfG&0fJOk009q`*ABxxT=U?9d0EY@ga%*%jPL$csZ%>*n5x*jl2 zwgghU)S!s&4d$goz-bV1;j7SQe>IaMnVaW=oVmKnR;Fyxv@mIt0wx-$&;mR`qiwxLig%Qo1lLCEW<(>YB ziPDKi#r?bvO?~9LhlMG_1Xag&Ck1fuFwDC=e_sWe(&kc z_p3iV*tf6&J22SFJc*+bnvFd{pfu0`00xZ!0MKcpL7>oSpwN1a2dE7OnhZ??BNHd7 z>Uxc-rh*KfpwkdB8ZtBoq{PTH(?HWhK+rKTnhco%pa5vmfSNKi01PHWMvR6`45na{ z(lp2b88QZ%G-zlH5YPY`8UsK9kQx93O&CA`0AUPFnUH~y45E8M`cXUpGo*z|Ky)3~ zqVooXEHREzh)~J_fAdUF^%tovN+y&qh;V=#LIV9_p^@biUE$g}m_h~H>^w2Ru4q4# zlMPb7e$kWLH=13*)n;l*)quj-WdR3wx_6c@gf&-g6qqUW78T_;>62vL12L{&h4z)d zKCls3%nNoKdIr@b#yq^Z7E8^hV16CJW%j?H z4N$4t;dB=h5mlY*X3KIQQ~kVv#h0R8&{?E3i8%&)gZ<|VrE$<8(zhFUgT*?I7%AFE zDOsEyIgq;{UgyRLLm)>oaCMIlh%tcFHWqQQq+}A&A%}&QHyDpeq<7a}b>1VCrr}tA z_`fzNF&4Q%CW$5J^4gqE0S+}?z^11vBx;ggYXtj@9hb4G!uB diff --git a/fixture/12/2/41 b/fixture/12/2/41 deleted file mode 100644 index 694ad6c648952a6ce95f1fb4ed5e16862d243954..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 550 zcmV+>0@?jST4*sbL0KkKS(doCrT_p-fB&wX|MJJ(-uLzVUdQjvX8C=(|DU$b^B?^8 zdlr;$M*pw@8!QfTXaE2J05r(cMu0RuO&T#Y$)W0C z#Sk>wLr0pcXaLhd4FC-QWNCo_$TR~+jQ|+R05k&-xi8m^a)G!i^e5Lq-_U{# z5J1foB%9d7$&;If9FN3!Jj9R*Ur11I*Kn78?keHF2bNA8pA6HlQ(6o&(uNz$tM63{Wh?Gu-vR*g6X z!!!+6@cuIbyuNUpC3r+yVL~uUHm4>78+@V(9QYuTAn?16amY|t{6KGLhkS9?(JQI` zZ5C~R)6t(0!|1#b@k7XL6}m}kfmY*B4v530p!p05)cZL_+5Vnzq)x;N+p##o7^)nx z!}V;uu1dgC8Y{u|R{w4v@mjVi`K_uDr=37`JAZ;4EDivOw ztz3+`{!hTwg3Ph|Nrg1#mtGajsO2Y7XLVV|9XGv|NM6PtLN`j zx2O8Te*drmHD=o&(9=wYnlTLqjWl8!GBOwmp`Z*v(8w5u1k*+c3`RzcG|&O+03M?z zH2~@QQ`1nIko6ax zwt*tD<5E))SPL$ZYh{EpPRYvz#3)D@Pr@b)L&W9~B|z~v3*n({tl>!KmBl9Qq#x&@ z3ZXP|5xHuL2~cYxc1N@d8~`GU+NXknOh6;H&D;#p_n{v5t3<`?P4lkBHE|x&D%a+r zSaltHb3W}AQ8=VItOu|nZ=G8%Qq*$X|<<}5gUQfEXMfXD2xYDs6b7QDNcUe n9rc`-_3ux{rCUbuYb__ zm%7{6x)!hj6>UQ)=`&L&rl0|!(?HRn02&PdXbk{*jRBx&0MN)84H{{aKma{Vhp6>E z5t~vS4Gftufu>B28UdptX!VE*v0MVwJ0(>C_Ye=RdV8zEWYB!*<4(Mat z2a!gomkbP>kRWN&0-vL5X?WDT&B5x?{s`OP7qcz+exJ7) zAg>8wWcB44l)2R*H=r=E0h37q4iFIMf_R#6Qc{Pd;t-Vb9T#FJ@z4hizA>EKMUzD_ z4215Z7*nr*oDZ!t;tF?yASRtGSBTh@rES%$L^2h;HNz1m6oK1Aga-QHrQR%?uTd+> z^Ay2XuW5|~7?eK`#^t(!f=UQc;(Ljn9ARYde_<6vLz*-&L3zY7NL`w>lI%1DbvRz` zF(Cu&z9g=|^~yBRL-QUT0Vg9R9&)Eq@yG%k*r%oF*Qc2=AVxT_Qdk&2=24#%+u&}4 myhj`(^3lQr!Bk|L^;TdH3hsEw5GrQ({9VZu;X*?~ z5DW*uPe}0KuS{@+aFYQddoGFLB2jPNGfmILBeVDBUIwPbp;wWhEOpGmQB2 zvpI0;iw!MpEN&lJ!FIZZ*w z?;x5Gx*G4kq@OLvm}f=wNX7~B3Pc5hY2uWY{G3II0?jc%4p(^d^#2!fML1B9j+9j5 DM^$I= diff --git a/fixture/12/2/5 b/fixture/12/2/5 deleted file mode 100644 index 064b3177e99e1a4d2bc46b163adc65617337870c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 552 zcmV+@0@wXQT4*sbL0KkKSqXzO#{d9D|Nr;@fA*>JSJ(MFxpimWzuEKuca~-E8+ZJ> z%kFjqZhtTVJ27FD@+X<-P#;B94N(!X>r{?CKPas-(MXO8N&c`EUU+FVZtwcmjy0C0g zxEKf2>*AY2{;YP7he2%mXNS`Ue7XahA*mQ-y)Ea_yU`FMMN_8V%yu^}q>?h$XhMi^ zuF)*!VQ~1n^sR(X&m*-NwmtRSNE(nPAnp!;)ueSu1rl^I5S1F7$9wKKfT25 zSPfLs<93o_v&iB-h$UY-?({7Me|b!E_tI)DB>6{nD2%zb1F*^6QIasS?Z| z>WA!V^dl_z!y+8XCX=`&)KvZi0Rjd%uU`s*6R?8-u^y?bP3oYiqf&g5{d4g%%4FP- qw0b2XRu?J%k^Erp7@DhzBw}Gp8XH7&!JdFD5qBh0g$V$dGc0hnMF6Y- diff --git a/fixture/12/2/6 b/fixture/12/2/6 deleted file mode 100644 index bc4396a7b87389832f663f954592f5a008e30cb0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 547 zcmV+;0^I#VT4*sbL0KkKS+yK8;Q#<1fB)_D)30wo)_2^S+MR!!?&saq{_WLCwb%Z= z!tcDtft#=a6=p|G>P-QWkO7cskO0$aWYEw9AT;!VG{_ohkTQCJXwVx_Xk=n-Lle|C zqtx<;O)?rblT3q6GBFxyplH)gFoQ-w(THj44^fB!G{j`d>R|&AX`|E!s2L0j zC*p>W%bs{N)p0&kfFt_wKgfqyIm4G?c!!N>{}0LU#75S6A;(v6E^bK(SPnsVZtrH2i5 z3*vs-mHVsfR_pkKPHu#?OlOHiylViq{{>bpwIjTc9mTEu{xF6>d@X=quH#?~+PX;D l;<}j0@?jST4*sbL0KkKSvLldaR30W|Ns8&pLKWb{r~>i)Z5>+(b2g#>AH9SV1M^B z>(2lDd;PEh5wRfsMo{$(fM_&npwYDeXwVG+&;vmA0000P05lCW4FCqx0BCI!YGh`k z)F9rdX_M5@0io#upa#^@pvVJHQRx6_>KXtZgwPLA(9>#Y(;xse0ie({JwN~%MxL5O zGEFiXX^;Q|5Ez*QMt}eSVr0?k000_j20+LF0MU~`Hi$f=sfJ^8>GOS6q$M>($Ciui z^doT4Hg3eqLk>66K%aEFiKsQgie=s)U9CfiVCSx+ZD>s4P5>l5^y|@oog~DgTk3?l z><#ywG~1zl0+m{!-<@2O@TyrBKKUXExIspPk1sqW!Se!R@c(eayp9HHFie%G?qw?8 z^T9&_9kRgF9Qgqa%r|1lWUaN3*ur|CXqtP_bW8kw%r9YTLpwFBaXu==P)jdj@rGRI z(4B$%=LKLb0d)9ikm-055zn^LMNPFlY*SbKn5Npa5gj044LM{HkG3wcW;6(j5rE^E oG%+XRC`SZ`*j|_+4S2GzxnDvi@)FR_1x)u`}gGz z{;$LR{Qa;28x}`3G8zDA27u7S4H_C~02&6EKm$O~G{n#tnlv7xL7)Kj8U{h2G|2Ls zQ}rSZJx!<@JfI9914eFXlMXEMvVXgkTd`Q00EE<21C>U(@cOg007CM>JKRos}Wv_<#68&vg7uC zxFBhjlt}DNF=l|lJkg5PeN!NdZkXHvQ;rwo&zZ|i#vP|}QB0XLd8y#p7(pbJ8c)s7 zLG=q#HtV!r$ZX{z3LsUsLiGT9O|^XLqD$jd&+&d~rj9BpoYEK(je&wavu3mRf)Y5z zJ>klUe1un8>Qx{*9V7v}nzIBzY}^nNanr>z>&Kit8i1mIYOQ!Zm;;_}<&9p~@<2jD zV_PJv+9j-hOF=ZbsT7W7>yp#4f(WHA@IfUh2`|}=3h5?3(m4kKkBWz`VBj}ASR3zg z14)-RMEwHognS^a*z)ans9KZ~L%;Bt8Ln0<$x(lf33C>@+!5Tu3>nkD4TbS;2W^K3 z$^}WYN9l|Y>tFOF&Ku;40hzf*a{MY>2ZAMbMq{A22?9{;A8CR}g*cyeD$3|T=y`c1 sC95vuoMWK;gT4L;3n8=#j;JzlM*Krc5d_Q6BcI~#NT&)C0{+$6z@?-7fdBvi diff --git a/fixture/12/2/9 b/fixture/12/2/9 deleted file mode 100644 index ef0608b0474041f4b7b46dfd8f761ab58bfa5ac0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 552 zcmV+@0@wXQT4*sbL0KkKSzlx#IsgDdfB*mG?f3up*Pr~I&AoTf>$}~z>;HTGzvuOB zsI#T(I=?UhF}7@g&}7q17>0vGOieNaO#@8;WN5_1&}0B;$N)5C00vDo8&C|JP-&A0 z(?)^n9SZEdoyY@9cfp2&COvp7Y5ibjklvJ@KY&z*@0 z|BB&pG6)=k9% zQHk-@>@6D&@dXYS6dG%BfHWs?{+Xpdh(H1`86YF0AM(6>_;)l&PNV_09rgVzP9g(z z@E|rdxK;~sphA+;7vbgsQ-BNTvK$u?@m5k8WzglTRKS>SepQ(E{rU*6VkH*Bjpdzz z`Qcn)XPq$SmKLZow&sGxYVwfned!v;wWxB~6R(b)#*|N}3)mO1I#W4@kVohZFizit qe;$NArN}9=P7Xi}x*3o}m}_f$0S}1;_gVN3K9$KghxQzYxw2> diff --git a/fixture/12/3/.zarray b/fixture/12/3/.zarray deleted file mode 100644 index b6bf3d831a..0000000000 --- a/fixture/12/3/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "bsbi=e&(Vx>P4Z=}E1hb=$qy3#*%0f9f#0`I@qXCuEP!gszoMu9(ea;m?C zY=XaKDJ43*5fp!WKWXlWRT>-~2u92tmJv*!Vvmob|qL)c3r;g+IRfGx|TL<&?ft!o$8D zP4YeEi!r}sj^sUB$QMAeahX0V$c8?h$Tz=D$J#$RGWb73Nm@UhVJAOSW^le{UC2J< z%vHaEB#^()VZOiIL()EzZm_>L(Eq-t%X+<9%YHvS1{A-we~v%6Y;ZrKAc?=fxQ{=J zF_FH8cK^O)2amp1z-7C-fYiPS_Ex`1Qb4~3M-D!ZP$a&^{RTgJdR9Gzxt%?W!WTaq zpS3?f!MZ2J6l-$0TF3A7@ diff --git a/fixture/12/3/1 b/fixture/12/3/1 deleted file mode 100644 index f9f419ce19971a38a35c5bd634e397409ded04d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR942dci+*`huslFvVgE73osZ)Lqyih(}tksQC_ z*a5%7>xDl<70o{{fil01$#_0=>3F`eCS5<0>&CtlmiE8!cz{1OVw=ACI!wM{1>ZiL zrnElCi0M8Vo6x^RY7Rg3TlBv<0!2R_Cyzg?E3dy^D5yTAQHMXxL^8m! zk&Hijw)H$V0))OK>U%$3S=~RsulPPz>dim=(Z;`@dn!K#EC;^2$VtBm-}b)~o*X{{ z6JS4Q$E!a*mzF=ojzz!MHKjj9EHAQ;^;iPByYd3jlDlUg2BJn^9Vo8$r!&M z&W*nr_wT=LlU%;--J!o07-v1DdE!5&OH4lFrvE=fc6-0x^`pN(b%VcdQ-!_DL@q!r zz|_A!q8>l)Ff+fQEM>mT2BE(;4`Dyo>NCBrF^fNXb2C14Kli_cTZ}&e)d0ZxSnt2f z&1%0CV`snAu(Cg&LodEks-Hg!V-dfLqM|;nwemh#F)}~S3hur#cEP@qq?o=yg44gf K)T_Nia>TwCpUsT` diff --git a/fixture/12/3/10 b/fixture/12/3/10 deleted file mode 100644 index c8eb99598cbe0eda85bd04673ca94e5531449801..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBAj{HBbr8qxi$pgRlmR7s+p};>!DM&xK^1eSq zBKN*?FE+o1aWcOw(apZ78Hm4(BbPoo@t-~vU8_GxP~pE=vT?tLI!-@f;5$CZf;T<( z#i2hgMuEQqjyk^pwOGG=(*VFy0SG>;*#*C2pF6)h)@i?kVUfHRZA`zJ;$l9k8S6gb z(w4qlmASrJ@jX63R_wf~LaINr^P)Z}Jd8hcN*q7!!KlA->JGoo0Y$xDQeHol!uCHR z;`hEdRAN2+#!Nqo+-$wWane6Ygsi`}k}W?55Gy|cc9TDjDSkb?I%Gdw9)G_2Cr!WR zdvd?tf4INM`Qg53d!Ig)AXUDgw@N;Q5S~8(dIY~~#}z=ES3y7Krb)hE-(bFH3W2|3 ztkS%fR|>%Lyr4f?Lo~n=11mi{Zj!%pfHS|1coM(sdiFn3H;upQ3~E0Nh?GCis1?A% zhaJDc0=_?gN~pgaVVOVhT#~#pL3+KOQI0*6Wv@Q8q;$WwRfWD2zr8+?AvHd+&9Xk= K`!~N_-XTA2yTzmc diff --git a/fixture/12/3/11 b/fixture/12/3/11 deleted file mode 100644 index c7a9feee71e3621802399cd06ac743fb0cb607f9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBkf&o5ZRLZ~i2ME8(KV`pni)g`mp zbs)c!b!I-Ag?c^uZ%4nJi?F`JW?etiW*I*Ge+)k>;i5khu--l*+4aB4i;6ytJRZOQ zx92|!ZOlHm@fg4D1`$7%@VPw(=G(uG@mxRcqR+n4*0n#8)&9N3I=DYTjM+XWV0k~p z&-=bQy|g|<_1wSq6#~8?1-`!%O_IOB3uM1p5h*{D2JSKhIKxj+D^Z8`jkB|NtC}w z#yY-qmjOU8k=eeU>ju6sVo$zZxH3MTvF|>sw1Yl;0*^jpEIGcjWX(My7hAqx`{%#^ zTPQ$?X*xfuw0ghvlxsd$POQKD2mQXyvAw@Dr%}DxY0AG|-^M-)rY=7KwNt-y!6ZId KMPa|q00Td7o6Qyg diff --git a/fixture/12/3/12 b/fixture/12/3/12 deleted file mode 100644 index f14231fc7c1d258de7b18c5741403edef8181247..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9-Nie_i`AI)M;^)5&qkzA$?Tx?qJ5WAc8V*0R zua7=SOai~P;xfQYh8{hZy-7cHjVnM^O4+_1QYyc1_@h2ECxgClKchc%TY+FAKp`gAg@r1skKn6hI=omlVbR)lrEQCGB z{uMpv(bqp~p&>omPYpm;`=!59gao~RN<+UbTK&HJJo3IW7&gB}cHKX97PdcZC(b@V z-txX&=q|r4=RH4Tt(iVSO%%SMiwZw89b`YNydAzVj!C~!8mYg_Qh+`vv!v>e35LJcjC<@A1DI%auP2`U}5^swO}l zLta1P;I2MH%gH_q*3Ld8&oRF~G2p+m`3%3yUzWUE*CoGWo}|CFia)>Nl7v6Pi9bIL zhA%!1Ocp;f$4)=pwMjqMH|#$45E=G@n7Y3Q`~W_o)+s>TKA1n3`BT3JdN)6$Jhi_$hp)em*NMFqISoHpvrj)hKT|*B zPyIeam(IU)lX<;~`*%MF_u@VQdOAK!kf=VJ0-!#@f&0IC)000GU*f%EBsD*>j}yRR zo76u{1(d&4r6NG0p$9`kbl4B5sE%-eFeTBR7XDzrjNgm K$yYsFXvx2LP{yJF diff --git a/fixture/12/3/15 b/fixture/12/3/15 deleted file mode 100644 index 452fe96d4d233aa8cf3ddd669bab49e8c729fe7a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRAx+pInl{wzPGjnBUTeXGB$;-$Y)<2Am=;IhA7 za7>hiu?zN0?k^8`PVb5}od!f-#L96vr1Rdl~X5hOplE^@wB zf-=CB(#Jld?mNHOvFg6_OvO9s-NwJP}<7RkOzv4}n# z=ZZf=y}vrgbS%Gv-MBvysJFlQ%__fyjbuMX8{$9P8}>ei;(tC!?3BJ_P7FWwcmzK9 zC_%q&G@ick5MI9r6kj|)xg|b$GtoZ;f z>Fd8af^EO{|Ia;KR5`w?mmfbp<43;a-Z{Um#I8O>Rv7qaV9m>C$!O*@~Sf9Qb$r?U@S^U41CaphI6x2TPxQ{>dcg{bSl|8?IqaVN^yplh_ zppw3-3z5HIh!a2SsvSUH3g5k4d~-ki%zVGD=hwdh_-Q|M%L_l6aPGckW!gW++Tp*T zah<{D;*h=~!G=HXP60nMPvt+cTd%)t(0acYwvs=`2%|ru?S?G>1uZ|RosT^%Yal+U z4LiW*YLdUX5t6?#hFw1G#iBo+gYvx+Dp^0{^CdqyKo346@De|)*n2+vu@SznJIFpb zE4e=>4DvqGGhaTMpkTjdJGDP68nnMgVA{VJFup#3d11eqXNy0ogV(?FICekZD^fpO zvhh7)Uk1Q3mE6CWv2{KNVQ#)rGCDu}w2Qs0=N`VwNE^WIic&vvE+)UG;mf{-?ufrJ zEXBVAZ@fPhrAt5Y-cLXFv|YYFbl$(`hXz2LHrPIR5)3{`;J3a4+5*6t9+E%QVu!!= K>kB_%RYgDMRKC~% diff --git a/fixture/12/3/18 b/fixture/12/3/18 deleted file mode 100644 index 1e6e4412cb5b8b281037d4caee5d8c78aa9c2728..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBC7OOsApzuDYO{_n?+e*J)b0|Itf@Z$ncI&@L znvy>ID4;*ME3iM%!RV}3Yfq8<|jW-p6x$dZ~eXmFk!#?1DQVZpCUgygu1>M zn*l)9)Mh{GHN(Dp*2}-p;;26~k(fVi0-L^|ezQKv6dJzxMFl^MF@wKEah1Qslutj0 z2XH?aN&CKL#y`JxTiuLwZj zv6MeB_8vWDT?IZyzTrPo^ISjoPo2L-?s7jH6rVp@5zjwSkj=g=>+-(J8gakk!ks=g zojX5^X#>9qI1#=ba09@gHNih6QOLg{YTn24-~#yU&TLJjHSN{YO%iz?K3{@Pm@10 zH4Q&^_?kX%#FM@@6)isluTwujzJtCF*>bQ$Hsn$PFiRr%hdY(Q- zqccAT1~0$6ck8|-BL_Znjat8asEEJtPkO&j#hO3PyvsiS3`@T%ZC^i!-8{b=r}@7V zVlqFnx6(hb+YUcMl<~ho_rSb;m5jayrNKU==2*Xoy{f)P#kIfT7h^uW0~1TM|FWKfAwD=-fW}UlPAKbM3#O zEI_|Ft^+?O8tA{&6(c|F7Lh*D`GLMCPSL-R2*<_=}Q*1p3V>v%| z_~O5Z8aKa@wK2a>ZPGqK{02XKP|rOAq8`8wa?`(ys&u|hU;w`X`ma0nRPess48%Y1 z&l11_@|r$<_)|Xym@_}Qib+46U2Z>Zo*KVv{fxgkI0C>Fr@6jq3`f72uarO0I+MOu z_2(T6{c z1v9_MNz^{tD?-0vALPElJ`ums{31UDYfe7WcFH}?dLTe(uHHXnnfOssjfX-m*T$xB8$JI*U!I)e$>Aqw*f%1 z)dN85caOg%9S^)Sbfmuksd>D%)X_h86N*2E1W!K>*G9f0h@n3fj3qw?&dxr&TkO5Z K0i!;)m6Ja5+QX&* diff --git a/fixture/12/3/20 b/fixture/12/3/20 deleted file mode 100644 index fba7063b17394c10479880be39e097915fd02887..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRA;g*-o6FeASS^iDtNumrySIMF}k+N!=&`ozDi zJ3qcOld8Vj?yJ90{uMtkUA8}wt&hKq19?9vOdG$Fe#*Q9;{?8OZpysmH}XDf7=ypu zilsj;=m0;#=v6;wvPZw^u#UecW0yZ6F_gb~;}|}5Zh}8x*a5y4#g#v?35&jOf}_8S zlfOP54og1H2P{CS@^QZ#@_@bq8auyw=Mlb;Fgv~rPhY>+yzf4UNL#+iIIKU{2PVIH zLGC|XKaD;PyP`kda_l|#*hjxN?1sLdfw8~6YvjEhIWRudM>{>3LaINCwiv!*Zt}nO z(PBWk%P_yL|93wqi@?9;7<)cjXG^~grZm8L%#glS5ly|KBVfL@@_0V?{E5D6o{c@% zF)lvl77)GM-!8s-#$>;iEf_xs<&eH!N4mbaB?CW(3t2vqzB0d+6`((UIGDd&2ZX;r z87n_FCE!1y&=bE?UDG~ZpP0Y<(QUuq>yST7MY}$>Siz KI@!N?^M=0zsK<2x diff --git a/fixture/12/3/21 b/fixture/12/3/21 deleted file mode 100644 index c22b813038be1ce42ea528f63fdfb700f47e043d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBrrnWy2qzbI11`X-tdc!vMkznPQB^;*AQ?X`S#>|Ggv~#gO^iP*j!8bs*$KWw$Lc*84voI! z439r4brL^x9K*h~d#OJqah1Lo9&*2e0WiF2`cl6xt8YI(ZdkwRM(n+;kZ?cVQZ~P6 zfzm#}F?qj|KYBhFHxWMxrYybOQVBgAQsX|-o{zrCxnn<#fv-O~0ByelFXBJHVw=9Q zM*lwpaFo8o7BxTkwlhDoO~$^fy_7wvPPjiA^`yR;$ko2kq?5mmrN}>6DE7Yen3+Fh K@)^G_l$Af_3%yhT diff --git a/fixture/12/3/22 b/fixture/12/3/22 deleted file mode 100644 index 6b3433b5d3454bed2b7758a9ee1620d7bc9943dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9hfKERT>S?}%`8&S@EhoR+aVo#;SBO7u6C^%W zypF%27lFPaDh9qXpA^*_HXrfj}<`8~Zs*#5p$ zZ;n4X`8~hLZx22%l?gu{)&xG>uth%`{N28MWFJ3m_$EKN)^|TWUGcx!Py#=mxn@7L zs?xp&J`BHnjWfT)7=*t-Zz?~t3KBq6R}Q~$-|oJ`L3})f4b8uG4>G^lRB63^*kr%L zMVUX(o29==sAfOn_q#uBal<~>zYM-jF?>FYk>S4aP6EJ4uzJ6oC^x*@&~U%lgwwwH zR`otNffPO(e1|_Fot8e8xU@c(ST?_}VXD7~YSX^TC_29)#VWq?bBMoKtye#v8au!5 zKf1o@X{EeF#E(DO9N|Bn>FGb=5v0FG48Oh@IuO4onY}(rTAn_fKNY{KbSb~lSAW0E K-6_C9DyF|ZY{jqu diff --git a/fixture/12/3/23 b/fixture/12/3/23 deleted file mode 100644 index e7a5aaf1c66f7fbb15c57e02002e1cce9ec243fd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRB<2|-Y z+StC?hR46+`>VhHygfg~u8_W>X4t-uj6J_yW5vEX2$4VDt`k1G3YWj7NLYzOL3)H`9pzc5MrHnr+OZq;#cmTe_DyzR}t*O3Cl;u92TaCY{*N(kO zewV*??Zm$2BCWq@n7Y3+Ag(_CY81bWx%j?aDI&j-M=n5Vd>cP+`8GeIfkwYqHvvDg zFKNCikVwCH?Dju&q7py+w*^0m!+5_pmZHBGyKBAC(GtH2)*L?zTS9d>d^1?o&*u_7HzOp?q$ArH_re+R%O z!zaA6I>EkEKsLS!3ZOnx6eT|~-jUd_I1^+rFv>WIJeBuKwFcKAIwVZ6SM?2W%dEC@es zffB!>KUlw*kTSjitYp8r0SvuzD!9ILI0L^Fp8>z4;C??53cWt`#@fGP3t~SNJp?~x zKk7aqPGdidKz2ONy&gYA&+Wbj`&d6ISzkU#K(N0(!~wqjfK$Ge`y@W^ zz{EdoU;Vx5Jw87sT^GKs6Nx{oxD9lN#RWkAJ<2>l zN&CNLI+;KJT0p;Mn1w%J`1n0q1IfP_#DzZSHb6c0Mx#x zG2B14rM14pTv)zeX;QyFNdLYSP(#1qm6kuI8we|{(U}x=+(Xu7A!sK<>o#nf4o1UzXQK=q!T_Qe_lVDM7Y1ixc@(U z>Xtta65GGSCB8ldfDpelam&8%bhAEwbA~_WhVH&gr{}(;jJ`g*YZ5=M>43jbq1-<| zuq40a^maehMdv;XI2%7myC1(Gr`*3XQm?!ckBz+SKy|-U{%Svn5L3NlA5XtL#}z*z zlN>*kVmH6Z-<>~Ckqg*U&ExxT*d@P)szsYySf&ImyF zNio0Cl$gJA)PO&y*P=erd#gT4Ny|S5>8`&%e6qiAm?gfm*?~RByS2Xhd>udx*q**s z{0zU@r~W+)jCenywK>15gBHFX(SJYY1BXB7*E#u`BRh5bLB`?J3-GF$TbDNj<(HkIOxAN>jgl z7(72xD!ab{3=O^_q`bdjWS71GNHsrfk0rjfRbjppe&aliM4vsnnK3^gCo?}G`n5mF zqbok2lGMM2qFX-<$E-hTj2%FL(YZfm2>ZS(tv^1OkBYubB+$MGYv{c@lcYcQY0JNA KQ-Hr&7@R*JaLBg+ diff --git a/fixture/12/3/27 b/fixture/12/3/27 deleted file mode 100644 index 15fd8603ace80594f95a53b331bd8406a9ed8766..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBZ5xPIN)F{8XOVdAT(|JGr7m2=P+Z(`zfz`i6 zeki{?i$Xt!T0=i>((b+~(`>#f3kAL0{9nGDk0(Cg81}pqP1e5!i&VZBAGy4JV|_n+ z&{V&W%uqf^77#yE=oh{?o#4OmNr6A6L_a^0Lux*LF(o~_1?0cxPewmFLcTvYkB+~R zDXc%7(~v(oRFc0j&B?yAowC0_XNA4tiRC^T1*yOBhs8d>og+VCN+&;%wCFxBKb^l{ z$N0T}XS6=!0=qxKnISuj;MqP;nQp(4)04lnJEXs7E!V!K_ya)k+|9mDu)03^oen=b zw~N11#@@cH*&jcy|$#aJavf z;RQaMf(pMlV~{?ZwU@rnn1En~lGRMNkSNTJUGg6?VNzZCyWS>5#vZ{V+c9neINl K^9DYa{}?{UD#}9u diff --git a/fixture/12/3/28 b/fixture/12/3/28 deleted file mode 100644 index 17290c6dace72bc81eeaeb4adb86dead080d51b1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9(>6t$#2iL#M?*Bbr#hSm$@ytcJRZP6 z4oE-tXNkTjn6^GTsJ1^yD84?9el0$ho?9KgN{!XLgU=%7FH zNX5T%z#czqV9UREV$41Tl()P>#vDJxn1R3U_XxjW|0zFWgaW^FX+b}nFjhX3Ipn?? zcA>sev1L1KcVWJ!@j5?K7{0zJvJ$^R25CP~A=tjlRSLfz6(7KI4u!u;uvWhSMhd?k z-Z;N4se3;Ya7MpCTr#|yCAGi(z(>C!-ORor5CK1qS*AVnk{7;xV)wonYp1`^dKW)|!Vy2p=^ekP@{GT3 zF2_IUrhGnbKg&K3Tj4(Z>Y2Yx=)XTmm(IReaZ*3*t-3#YsHQ!yI+wo%l^?+6SslIa z5`aFgtrWi@@d`dER@^=fa(+Jy4QjqIxH-K6<2JsBnh8A(U<1ElnFPPV_~^bu1Ry?= K=~%z5=;pqgk;Kvf diff --git a/fixture/12/3/29 b/fixture/12/3/29 deleted file mode 100644 index eb569487a14d48424d8c2d715c9a305b97bb1549..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBtf9F4KdSyRE19iVOTU|fIoFG5J)6~7UJvG0; zPEx;IHf27=qFBGcag0AjzOX;VQ!l-e!;wFWw$i@B-!{L|)r>zCIm$o7Nc%s;QK~*f zvs^zp37D#XRyjX~9%etu8TUQv>L$PJadEvH zr$9aDig!K-#&^G}ABetPm$ki*fDgb4A5}li^Cv#w*M~ll_XEFy6c#@h`BcB4z%0K# z+8{sY-L^hxh@-zs=gB|kEJDAV#s|K;mft^i(*wZ4X4=0ZMdChKpdP@P$O6EUYnDIS zSJA$^j0`|lbA-PP5Tm}vW(q$pZ1Fxcf`Yz!0Vcmc>TEvDJ+MD+M?*hz=^?++2R7K;u7pkDWd-vNJzwgl4~?ySTo=*`hx-OBBC$ zV!l6sXpTQ_YCu2U8kWDYyq~_`>Lfq>S$w~oqe8x?i$FiTa}mEju~@&yf9AjI^u@k8 K??peJc3;2BmC5=5 diff --git a/fixture/12/3/3 b/fixture/12/3/3 deleted file mode 100644 index cb2bd4a3bf8cd2182f25071f0f01fac644137c08..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9*n+m_~U`D@7HGjVz&~Cp9opB|$$a?bs0=&z1W>=%d4fNm zASXXVX;nX~PAxv2Sf;*gQHZ@Z@7+FJTL8d^cwaxhMjJqS?)5&M)sMd08YRAeAvHf~ zK7_u3VO~G-Q8K<=YTG|z;@3X_45_};@1?!xKDWGo=MucOigLePD~rDl_@6%{ev3a` zNQpl`7LL7fE%HCy5V*W_*as*+jnpvi&~)x){G_ND00vXNkW;S^YmM>6Ja-DU(0Kfc!peiYvVc(!afusX4zJ zqvpP=|A4=D^^U(ShXp{I&5k~woV!1&M@zqqG@!q%PkO)38Q8w`fE7IfpeH|+Ck8(`7;wKBfbl`JI&W67eu(3X@>=nIArI5cf`-?Tr^&TPMH`|Z9ST=qUM=zPEBAB4ZW3=KZt3SK|#bbmh{;=4WL5G=pCBaJ?B z?hrqk5{5oU`JF!r(1E`NZ8E=SG>kv$i@d)t6$-yPMmxXWH^{&6qd&eK4L?6^6d*p; zvP?e%l9)e-amBvvWhp>P+OWR7p;^A4CN;mMo(n(Syp+B_lA*m^BT~N4UunL)6(T;g zgXX@%e9pczQ;9#6NS40)V8lMVwPU|&uH(Lo#GStrUhO{+53oM~a2P+Us|i4NF-1RV K8Zf@*y1qUeOTrQW diff --git a/fixture/12/3/31 b/fixture/12/3/31 deleted file mode 100644 index eac88171825bc9e4dec189a9412f75c039b396d1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9$F0MakTw%ZE9+tnH>CZmNI~Kn=q7cAw7@xlb zbDzGFVMRSduAe<4RRBP@n^eAcW{y5P9nin^5C6O%9IHNo2(Z5y8gIW)boxFD+zY>K z0n)u?a0)*iTmnF;8)Lo|b>cr5x%$6v>=!=14XeI8PYAyrcPKxF#aq7Cm;u1APs_i; z);K?}kkP-tvOhmQiT^zjUoO7x+oruP#{)myx0b(cIW0Xp+mXM2k|#caeyv;n`C=)-FO$a~I^c}#nB`Lqw zk&{2yL#IA!E}p+d2cJKQ19v|v+?T$?Bw;@Q`2Rod^@hK>F%Ca)GyXsFEGIuaA3?vY KrF1@4Sg$|ftG|5! diff --git a/fixture/12/3/32 b/fixture/12/3/32 deleted file mode 100644 index b5111eace64bae6163c775e6f87ec959980be332..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRA_1cJX`jrP2dQ+d8Iq#?g4T6aF#M9#gLvLruB z3z9z@?4rNAO^Q9snpwT@OHe*-d40UP412#kHM&2@yzoEw01Q7f9T`8tF$O-g9e6)} zTpYkj9(}v7mlD5MQ58No+lxPt?VCJwlDNKOp0mGQ%-_9a8Z1AJQCGjXHUPjUjS)Z} zoU1-Cc;gM7aiG*Q2K@?$>% zn~^?a$M?T}kgh)d`hdRV@a4VpMpnP)PO?60b%sCaS0uluoIyWIk=MSj zufMxNl(AxR~0{BPiVg&@zFd)&1t@=s2x8= z$!WYFHA%jdh)uurX%0Q>IfcFw+g`t&Q6oNdTDm^AL#@72du~6ZCx<>$RiwYVFRMRN zSwuepJb*vB4$VGNofW|C+KWGd{CGVj*|a{KRti7v*Y`hB)eFBNIR(FF-C4ik^N7Eb Kn1;Wh2JOG5io|pP diff --git a/fixture/12/3/33 b/fixture/12/3/33 deleted file mode 100644 index 25602497c4f05ac884e9f67408ea75cbba71d7aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRA@a@sy}aQ;93Oanm7G+MvXSBt(V-km>{T-`o; zz6n0Ja)Z8=(w0BGS&6?wHTFI$o3K91rZGLIs7XH_FbX}*)(F3ncD+3g%FDms+8jSd zL|#AS7bw55{$#%ks2;yD5`n!n5nVoU$j(2Bn!Z2#-&8+?SNJ{VN^QSd{?jSCc;?Dd|3H7kxi^3gtenJR7~B?w38^?|!{{W2C>@8i2osls!Kb zGoim*_RhY`X@)!b+Zn%oLu9`X1Z6#Nqy0aR6UDyhK0Qzz?D4KLa)C?4dK4soD@Gw+91F92~EDR zGub~$x&A-BQwqQ#L+L%LusT3%EA>8ic$YjY2B<%{Q(ry>R)3LaIIw-ow9&f6c%5Iy}F$#za01>&U-*7Xm(Sn|{BB9bCWaSf)R%!5BTtUuVBY KQ9nL(9K63BZpp6z diff --git a/fixture/12/3/35 b/fixture/12/3/35 deleted file mode 100644 index 4e52fd208c610ef86582363e8d561903facfff9d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBaz{5V8yp2B&XDL7a7YaZ#9r(X51Oh(Q_p874 zU@^Y@UdlTC=n+8J*mS)HprE}DsenJ6TUfmZW9mO~a4SE@7*s!24DY{m8~#7B4Jf}z zYahIt7bm}~8iT*5;F-T&aML}MCH%iCp?JRcWQRZZ#vwl}Xx%>SH)X#o1qZyu(dwfH_1LfW|lvmZ=k(*<43=v zhYdh!*k?bu`5QhJx~V;PO_V;P^G?1?O#?o$uKzwq{yD$3D$&1H*95>;O=CYooyxzF zLGC{9!IQrWvXQ^w+c&?I%0@nu@T9*6*V`(6&DweB8gjXJ5XZ;-Wtv zh^fDYgW$az3??ku06lg8(Tk* K7dgLP)S;L*K_Op-nmqX0ip zp~*g)cmTjM&xgO-W~x3ZeM~+vAWFaN)jU5bN`t=kzG6T6;hI02eU3h`I1WDw#2dff zXJkMBTVFn2T`#}IgylcpBho%X<3vC4jsQND>N&n2KYu*8QhPod zUyQ#9SVKPxn2$fZPD#G{Fh0LQY{HEI diff --git a/fixture/12/3/37 b/fixture/12/3/37 deleted file mode 100644 index 3e74520eee055916b8b519d8603abbaf4110d5d2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRA0hOIxjmMT72Yau_R7#F`B)}Fr+`OLm*8JoWT zUqn85$UMK9jm$h1zm`9nBi6rFdk4T9dY?ZXSi8T=n8LnO#T!2)mvX)J7nZ%UM3g<9 zG9f^P>*GKBmodMBgN8r#XaPTh-JiS{YG}W<8RtI!v7ElMRl>f?UPQld^nX8libTIJ z`$E6&CPzPB%M-pIdGNj4HH1G`C#XN5E(pM8-2=XigS5ZL3C6zpy4<^fueH89JcGZM zx{1GDS~@?bbi_Z~-UGm*y{EnaVmm*Div_(&jgG(2p!B|Qvzk6hNS(f$hD5!3_ME&@ zHwM3}3Q<4DdGx+5vw**9vJOCkQlGtHXePjWo1i}#GN3={66!wKvLQb>ISN26dUn4R za0os)AN)Vmo(n*9aJ#-xrT{=Sc}+hsW4}NA5=B3AW1K#jW(`36QU|{Te)B(6;sZcl zc&Z$hnT)4@c2KT5gxwpO@u#A1`$7aiJiU^ K#U8%7-YPzCjmAa* diff --git a/fixture/12/3/38 b/fixture/12/3/38 deleted file mode 100644 index e87eadee8b51ac9c9a76a4d4908826dbf412ceae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRB>Lj%C^Y;r$Aaq_+FUVA?~!p1%ab|$_sN=iPN zyG=gK^SnO*QGh*eijzK@g`vNput2@lCb~auj@v$(UW&iaOnJYh(mlV@=YK!;WMDso z5?#QHt){(kE*CxuPjq^id{)0E!!W;D zxEQ}ti0HpXKuSM$Y3aWIl2? z1eHES?Wn)EHxWOc87e;@T8Td2Y+F95ix0j&KPEpu(Xv1NI;lQ&!g#;)EM>pj>N!8{ zt#-d$;Y~jJ>ae~CKZ(De4^=j1gV^%-n$SOXOqP9O|vjM*K KPm8|gi+R5nVajO$ diff --git a/fixture/12/3/39 b/fixture/12/3/39 deleted file mode 100644 index 7b9485d17cb8cc9c6a5288ccd8259d3b696ed1ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBe7H_{(B7?suq+dQ`$BDk5P^mtu$eO>U4qLwg zUthmf)e1mfs42b?>xaFspPs!O!^S`8fzH1)S#`Y_wa>rdP8h%@_dCBN2h%=WJ50a3 zp~^l$5ZXO9XAZto>Y+c{l^DN7<-$IU%jUn}Ov*jdv4g(aQa=Ndo%rY}DtwzR)y<-I?y%2vKNIfOqxCa}N5Hg~>$X>mWz!Q?zF1VO*|r>8&Y z@Rh&(Rj0f<;v+xG9|k~{t?Ir7i;cdHuH(LiF?2q7f)+oq@ryrMj6pxsr~bYbIZ!`H ze7il`$UZ;xXH7r$r_ev=jJ3X2sSCfF^5j0ZT$evr)NMTpwVyw#r!YTm3r@blHV{9a zq29ifQ*=H@U5UT6m5aTh>wiDJV?;lE?;byiFq1y7t|LC}E~38+2vfeI|2jT&?2o^S KIpDvSeULvHlFcvx diff --git a/fixture/12/3/4 b/fixture/12/3/4 deleted file mode 100644 index c97821cad1593761ca06f8da8fde6a8f7007d287..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRAF$TL46H@rVdP`bX}l$$?F3Wq%~f5X2xLgYU^ zaS=cpeI7q9;kP}T^@_fiitWBa_?kbPfQ`JVL)Jg!%MLvoAJe|n?is%=WIn$Tp94Sj zv8BI}eo?=sh^aqVU39-Y+FUsmZHB`TVTKLDlR|oa*96Y9c4cpo}<4h z*pj~1_#MBi8jiobj2J)CYjHoid91!P=)ymyBT2u<&A&h9?hilNot(dxqWwRQlHk8wfvj$LT)GuUbAfGElyBq*y*+*!w@z4VS)k zahgAVpdP>0ongO+uu(tHAUBD6nw%+NmT^H4u3e}}(n;(I?_adN&< z_bWdQiQB((Qz1V)dk?_NM2)_LPujn-4kABZ-Hkm{QboV7nzujii)FtQ|3Sc(eFHz} zAL748ab3UDa0I_@MYg})$^5=Y;IqHSM)%fgZrO{pr5z(Pln#Fxfv5Pm#YpAg8~HFI>O(qBB3$N&r4d zsJOq|wXHY7k1@>aemA+$bt$Unbp+g?9b5S~6bjjg^KSYN*; zo2S3QVb(rGg|WXCe5XI^q@lm+r#8N@tYNLo2?Zw}n5<6dk|$|7bp!>bSoh8v(zwZHK>~L7YGGklsIj Ko3*~xme4mZU!PdrH47@WQ{))A_&O7wtb?{g=MKWsScUQg6ltY1(H7tNJhYCU$s9IkD5R2f;_*HdfmRF&C)*1(Sg6~jd;E~ z0f0Zzi3C4rvMRsP>4HBN6`MZ8lZif?(gVLpLDsw6Dn`HmWB9-8K##ulD*C-{l?*=O zC!0Sc{3gHtNxZ%{Bt$<-r9i)ga2-BZ3emo-czM3b6+%BfHNUG{8%{kFek zT@=9iHM4Z2a;vYafdyPI60nR>HmHs}qg8e?? K7$3icGY3HQgUEFN diff --git a/fixture/12/3/42 b/fixture/12/3/42 deleted file mode 100644 index 35a948930dbe10838b329d4f716a937c24c32b51..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9v1d_hvA9X*WD)YYZ`X;|kaPvQ+@!h@tvkSdO zHjzGPgC;=y%nCsLQlvk@p*BFs&vCy4=qEo8Nt-=*apJ!U8|^-vP5ZvVNyI-+K+3*i zUgy4;c5Ns+%yJO;mkfDgak$OpeK^|wF2|5&|?MsGha;U_*X zH?==@uYy08Z5O|-7W+Rbx9Gn4%uGJ13Bf+{3i>{B(n~*}yg|R4TQ)!S#S}kqd-pjX zOhUfE-%h`l@bx~36e+(1mvcUsh0ndn#j3y5^=`k&KF>b3IR?L=^bWrdl0&}<;ySuGpoMA K9NfR&wWq!kyUmFJ diff --git a/fixture/12/3/43 b/fixture/12/3/43 deleted file mode 100644 index 8dd44a4d5ee9aaf962393119d08ba048121a2c06..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRA&5;(u=V}`zg0h&G>JU6}b8%sZ6wz$4LZyrCC z|BpXADvUlhV5+=CC)7SQK%PFkD1bk4HcLOTN=`qJR5U$8wXQxYBBefQi<-YZ(uhA$ ztJS~VQ8quevxq)Aj*-3;O2|IAPS!sCWv9QOL$yC1y^}qYBoshzE=oV&R7yQ+rZB(P z|KhzmZ5ci{le<4K1D!vBDd#@hbmBi92?4-(^6Wk|xu8GdBn&@vnX11=*|9&p=R!Y2 zpRYa%^|U>qG5pQFDwKa;;z3y!})MsmJSwC}!VCxbtz&tX3m;AFnqokl=@adN(50Mxy1f}_7v z@}#}zM3_E}l$XDJRR6y7ZKpn-TL!<|@jJiYIY>WF9HhSrL5sicqBTC{omfAkZJa){ zLS??@Tb#ZrR)If-=UYF!E=)ZNB#yq5c@RCFDLTKU#~Hqjr24+Pht9oDN$9?|AqGDL KgHb;nYQnyy9Kf;w diff --git a/fixture/12/3/44 b/fixture/12/3/44 deleted file mode 100644 index 43287f2c9cfae32d47780f6ce011f3a0b4628969..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 219 zcmV<103`nc0gwcc0RRAy0RRBo0000K0002P0000ewJ-f(kO0jE02B*Z*H6FLEH*!C zF+D!j#Jj#060koOrVzi182`W1JT<=uC(l0+s-nKs^8>%i?%6zqYJop^$Em%JVi!LG z&q2QDWf?!umN-6?#ml~;B!oWh;R?TcXt2M!>!Q3k3Z=g!@fbhsj~TsgF1A0{6sf)w z5Tv}d9K65oQjNc3;MTuU>xnb{02X* ValgKP)0n@|l$yT)0RS;B1*D~kV-ElT diff --git a/fixture/12/3/5 b/fixture/12/3/5 deleted file mode 100644 index 5dd97ab36bbbae98be242817d32a2f0a8b408d92..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9`^G!Z?j@!Sfs7^l@4hTI~SPei1EvG-B@en@; z@cq47$2z~fL2*AJ`f9%UjFmsKIL$r)<r;s!bv|s zts_2^5uCr*MoqteHF`a3f^0t~VZA>pSP8z*tggQ`Fmyf#cjP}fw$Hy|V06DJXdFKm zNgY0Fh6}%lWnn&NKzcurWXZkFcs{;L$`(KEfQr60K9WBgwgEucmt?=z0HDA8QNBKE Ki=@BYx~9LwvBok0 diff --git a/fixture/12/3/6 b/fixture/12/3/6 deleted file mode 100644 index dacb9ca833a2a0a0b058ac5f3f79a84c3bcd3d21..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9nCIi5cw!^;HAP2qYaRk3`_OUZOKPXoVyEBZdN2;RS_uaCc&L{mL} z9=tyZ%fUWP;ugR6sJ_2F*(JZ#11`O7M zX8%3!c)h-N&Llrq+Ydhy7P`MzF9W?D=wm<6TZccw=e<5M<8Hn>x7R*B%1%E`Iqf|) z7ivCc%zwVe8>_#9j55AgS1rFChAhA1hqpgOnR7ps#Mr-2-HN}?B)YzN03^SrgrYvo zEuX(yVs$?$KQz6VSFXMpoNBzD!_~eF_5r{!WkWpQIHbNyS9HIXc#J;cmXq#M$o zgO9&kj2b@e{#%0$0My3IWgR0P1?17JQw45mMk`Qg9UM9{uv=wH7$YQeut zVgtaUC6~XGF?hc07Lz|u=)%9Ix|Tk<>0mzM0@gnsFA_iH>mWZ`1v5W*nry#+q!Pc0 z^^QMIKm)+=t1iFwSE|3P-ETe>-PgXFKOjHbIN!Zm=Ec9N`@g@ZGRC_aX+^)WS_r@X zJ8(buPeMN=9Ogf~_MyHoA|$`v9M(QMkPp8DF9*N4^G?1;jnh9GH#9%n4o^QU&don& z3}-*)(U8A>D`P(z(}6#o;3hxnAxOSu+zUU(&855mWO~0v$vVF-Cp16R<7dD5!B4-= z-9SG800}zv?QZ_$!;cC9Zb{RfmATvA;puN9xcd)+rmP$N|G1EOcf!)6q K@<2cQnFKy*`o?_# diff --git a/fixture/12/3/8 b/fixture/12/3/8 deleted file mode 100644 index 81d4a3ebeb1432bf3eb2ad54f205b8344bc86e6c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RRBy7KgvRN9I3?bFsb_G?2azM zJF-5AmyJLAt(U%#sG2`@ui(EH_zgfR>1sW}ScyNe+h0Cm(o(;u{}jE9Y4pD!Ixs&u zYDPYfCq%yMus}clu^BxySsOoLT6RA-nhd{l-BrKYeB8e%0sB2ndFMVmY^lEj&$U04 zgi^o1p9a8NR=+>Ni~PTA)OJ5gn>at1oTEOJ)CIp&w*WuEG=@JEUVOix^LanZO>RHS z3YI>(xCy@^dT&3>AOb)43WC277QDWetx-Qdsp>s_0gFFT?5@5e`G3FsV+=r&C)dCF z>i@nWJC8p8Z+t(seI-Au>k~kA*YLg7dtbl*_h7#F)1$sr-iy6ggnPf{w@5#A-Mv29 K#-YD{(04yYXU9PR diff --git a/fixture/12/3/9 b/fixture/12/3/9 deleted file mode 100644 index 4283afcc704815c8b60c2eee4a6fea107322c150..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0g?oe0RRAy0RRA?0RR9&y-&Y@W|zNY*|I$Z0pY)_=@vhauQk9Z!h^pW zp2=LOO`+F0rfs*^0>dzo*KXMt1Uq5CHuYwAU{A> zzuG^(;RHVyFPJ`5{q4Ta7q-1RP%yubPJ+E>T&}DQ8oB+UBo^c>uqJ0>nSHmaINx z&#J$rHW)vU=V(8;p|`!Av@k!bxgWr@!7RX{-j=`Joftmx9mGG#E|9;Pidw%!ceOn> z5&XWymkhs+%h$gBN~k}5-Ht!JLclx%#?wEqPXa&AvXei^OkBTVS0}&q6e_=Oo3Xu> KXLUW1`lUYV9LVed diff --git a/fixture/12/4/.zarray b/fixture/12/4/.zarray deleted file mode 100644 index f00b0f7416..0000000000 --- a/fixture/12/4/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 1 - }, - "dtype": "bsbi=e&(Vx>P4Z=}E1hb=$qy3#*%0f9f#0`I@qXCuEP!gszoMu9(ea;m?C zY=XaKDJ43*5fp!WKWXlWRT>-~2u92tmJv*!Vvmob|qL)c3r;g+IRfGx|TL<&?ft!o$8D zP4YeEi!r}sj^sUB$QMAeahX0V$c8?h$Tz=D$J#$RGWb73Nm@UhVJAOSW^le{UC2J< z%vHaEB#^()VZOiIL()EzZm_>L(Eq-t%X+<9%YHvS1{A-we~v%6Y;ZrKAc?=fxQ{=J zF_FH8cK^O)2amp1z-7C-fYiPS_Ex`1Qb4~3M-D!ZP$a&^{RTgJdR9Gzxt%?W!WTaq zpS3?f!MZ2J6l-$0U%*g=& diff --git a/fixture/12/4/1 b/fixture/12/4/1 deleted file mode 100644 index 3d06cee0f0d8db00fd90c47bb214fac8cac6b3f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR942dci+*`huslFvVgE73osZ)Lqyih(}tksQC_ z*a5%7>xDl<70o{{fil01$#_0=>3F`eCS5<0>&CtlmiE8!cz{1OVw=ACI!wM{1>ZiL zrnElCi0M8Vo6x^RY7Rg3TlBv<0!2R_Cyzg?E3dy^D5yTAQHMXxL^8m! zk&Hijw)H$V0))OK>U%$3S=~RsulPPz>dim=(Z;`@dn!K#EC;^2$VtBm-}b)~o*X{{ z6JS4Q$E!a*mzF=ojzz!MHKjj9EHAQ;^;iPByYd3jlDlUg2BJn^9Vo8$r!&M z&W*nr_wT=LlU%;--J!o07-v1DdE!5&OH4lFrvE=fc6-0x^`pN(b%VcdQ-!_DL@q!r zz|_A!q8>l)Ff+fQEM>mT2BE(;4`Dyo>NCBrF^fNXb2C14Kli_cTZ}&e)d0ZxSnt2f z&1%0CV`snAu(Cg&LodEks-Hg!V-dfLqM|;nwemh#F)}~S3hur#cEP@qq?o=yg44gf K)T_Nia>TwEJ!DM&xK^1eSq zBKN*?FE+o1aWcOw(apZ78Hm4(BbPoo@t-~vU8_GxP~pE=vT?tLI!-@f;5$CZf;T<( z#i2hgMuEQqjyk^pwOGG=(*VFy0SG>;*#*C2pF6)h)@i?kVUfHRZA`zJ;$l9k8S6gb z(w4qlmASrJ@jX63R_wf~LaINr^P)Z}Jd8hcN*q7!!KlA->JGoo0Y$xDQeHol!uCHR z;`hEdRAN2+#!Nqo+-$wWane6Ygsi`}k}W?55Gy|cc9TDjDSkb?I%Gdw9)G_2Cr!WR zdvd?tf4INM`Qg53d!Ig)AXUDgw@N;Q5S~8(dIY~~#}z=ES3y7Krb)hE-(bFH3W2|3 ztkS%fR|>%Lyr4f?Lo~n=11mi{Zj!%pfHS|1coM(sdiFn3H;upQ3~E0Nh?GCis1?A% zhaJDc0=_?gN~pgaVVOVhT#~#pL3+KOQI0*6Wv@Q8q;$WwRfWD2zr8+?AvHd+&9Xk= K`!~N_-XTA4S;eIQ diff --git a/fixture/12/4/11 b/fixture/12/4/11 deleted file mode 100644 index 3c1684a2650d5d521eea27c2150d1848a0a0800b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRBkf&o5ZRLZ~i2ME8(KV`pni)g`mp zbs)c!b!I-Ag?c^uZ%4nJi?F`JW?etiW*I*Ge+)k>;i5khu--l*+4aB4i;6ytJRZOQ zx92|!ZOlHm@fg4D1`$7%@VPw(=G(uG@mxRcqR+n4*0n#8)&9N3I=DYTjM+XWV0k~p z&-=bQy|g|<_1wSq6#~8?1-`!%O_IOB3uM1p5h*{D2JSKhIKxj+D^Z8`jkB|NtC}w z#yY-qmjOU8k=eeU>ju6sVo$zZxH3MTvF|>sw1Yl;0*^jpEIGcjWX(My7hAqx`{%#^ zTPQ$?X*xfuw0ghvlxsd$POQKD2mQXyvAw@Dr%}DxY0AG|-^M-)rY=7KwNt-y!6ZId KMPa|q00Td9In5UU diff --git a/fixture/12/4/12 b/fixture/12/4/12 deleted file mode 100644 index 7d9cf997debfae92d2f9a22ea7361f67e41654f9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9-Nie_i`AI)M;^)5&qkzA$?Tx?qJ5WAc8V*0R zua7=SOai~P;xfQYh8{hZy-7cHjVnM^O4+_1QYyc1_@h2ECxgClKchc%TY+FAKp`gAg@r1skKn6hI=omlVbR)lrEQCGB z{uMpv(bqp~p&>omPYpm;`=!59gao~RN<+UbTK&HJJo3IW7&gB}cHKX97PdcZC(b@V z-txX&=q|r4=RH4Tt(iVSO%%SMiwZw89b`YNydAzVj!C~!8mYg_Qh+`vv!v>e35LJcjC<@A1DI%auP2`U}5^swO}l zLta1P;I2MH%gH_q*3Ld8&oRF~G2p+m`3%3yUzWUE*CoGWo}|CFia)>Nl7v6Pi9bIL zhA%!1Ocp;f$4)=pwMjqMH|#$45E=G@n7Y3Q`~W_o)+s>TKA1n3`BT3JdN)6$Jhi_$hp)em*NMFqISoHpvrj)hKT|*B zPyIeam(IU)lX<;~`*%MF_u@VQdOAK!kf=VJ0-!#@f&0IC)000GU*f%EBsD*>j}yRR zo76u{1(d&4r6NG0p$9`kbl4B5sE%-eFeTBR7XDzrjNgm K$yYsFXvx2M?#814 diff --git a/fixture/12/4/15 b/fixture/12/4/15 deleted file mode 100644 index 642db7d4af92a4bccb2d90a01c2268096e0c26c5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRAx+pInl{wzPGjnBUTeXGB$;-$Y)<2Am=;IhA7 za7>hiu?zN0?k^8`PVb5}od!f-#L96vr1Rdl~X5hOplE^@wB zf-=CB(#Jld?mNHOvFg6_OvO9s-NwJP}<7RkOzv4}n# z=ZZf=y}vrgbS%Gv-MBvysJFlQ%__fyjbuMX8{$9P8}>ei;(tC!?3BJ_P7FWwcmzK9 zC_%q&G@ick5MI9r6kj|)xg|b$GtoZ;f z>Fd8af^EO{|Ia;KR5`w?mmfbp<43;a-Z{Um#I8O>Rv7qaV9m>C$!O*@~Sf9Qb$r?U@S^U41CaphI6x2TPxQ{>dcg{bSl|8?IqaVN^yplh_ zppw3-3z5HIh!a2SsvSUH3g5k4d~-ki%zVGD=hwdh_-Q|M%L_l6aPGckW!gW++Tp*T zah<{D;*h=~!G=HXP60nMPvt+cTd%)t(0acYwvs=`2%|ru?S?G>1uZ|RosT^%Yal+U z4LiW*YLdUX5t6?#hFw1G#iBo+gYvx+Dp^0{^CdqyKo346@De|)*n2+vu@SznJIFpb zE4e=>4DvqGGhaTMpkTjdJGDP68nnMgVA{VJFup#3d11eqXNy0ogV(?FICekZD^fpO zvhh7)Uk1Q3mE6CWv2{KNVQ#)rGCDu}w2Qs0=N`VwNE^WIic&vvE+)UG;mf{-?ufrJ zEXBVAZ@fPhrAt5Y-cLXFv|YYFbl$(`hXz2LHrPIR5)3{`;J3a4+5*6t9+E%QVu!!= K>kB_%RYgDN^1j&s diff --git a/fixture/12/4/18 b/fixture/12/4/18 deleted file mode 100644 index 884a2f9e6cc100483d63357dc41265786926f92c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRBC7OOsApzuDYO{_n?+e*J)b0|Itf@Z$ncI&@L znvy>ID4;*ME3iM%!RV}3Yfq8<|jW-p6x$dZ~eXmFk!#?1DQVZpCUgygu1>M zn*l)9)Mh{GHN(Dp*2}-p;;26~k(fVi0-L^|ezQKv6dJzxMFl^MF@wKEah1Qslutj0 z2XH?aN&CKL#y`JxTiuLwZj zv6MeB_8vWDT?IZyzTrPo^ISjoPo2L-?s7jH6rVp@5zjwSkj=g=>+-(J8gakk!ks=g zojX5^X#>9qI1#=ba09@gHNih6QOLg{YTn24-~#yU&TLJjHSN{YO%iz?K3{@Pm@10 zH4Q&^_?kX%#FM@@6)isluTwujzJtCF*>bQ$Hsn$PFiRr%hdY(Q- zqccAT1~0$6ck8|-BL_Znjat8asEEJtPkO&j#hO3PyvsiS3`@T%ZC^i!-8{b=r}@7V zVlqFnx6(hb+YUcMl<~ho_rSb;m5jayrNKU==2*Xoy{f)P#kIfT7h^uW0~1TM|FWKfAwD=-fW}UlPAKbM3#O zEI_|Ft^+?O8tA{&6(c|F7Lh*D`GLMCPSL-R2*<_=}Q*1p3V>v%| z_~O5Z8aKa@wK2a>ZPGqK{02XKP|rOAq8`8wa?`(ys&u|hU;w`X`ma0nRPess48%Y1 z&l11_@|r$<_)|Xym@_}Qib+46U2Z>Zo*KVv{fxgkI0C>Fr@6jq3`f72uarO0I+MOu z_2(T6{c z1v9_MNz^{tD?-0vALPElJ`ums{31UDYfe7WcFH}?dLTe(uHHXnnfOssjfX-m*T$xB8$JI*U!I)e$>Aqw*f%1 z)dN85caOg%9S^)Sbfmuksd>D%)X_h86N*2E1W!K>*G9f0h@n3fj3qw?&dxr&TkO5Z K0i!;)m6Ja7c*Cav diff --git a/fixture/12/4/20 b/fixture/12/4/20 deleted file mode 100644 index ee66ab64e9019adef073d541025f68e76dbe7c4f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRA;g*-o6FeASS^iDtNumrySIMF}k+N!=&`ozDi zJ3qcOld8Vj?yJ90{uMtkUA8}wt&hKq19?9vOdG$Fe#*Q9;{?8OZpysmH}XDf7=ypu zilsj;=m0;#=v6;wvPZw^u#UecW0yZ6F_gb~;}|}5Zh}8x*a5y4#g#v?35&jOf}_8S zlfOP54og1H2P{CS@^QZ#@_@bq8auyw=Mlb;Fgv~rPhY>+yzf4UNL#+iIIKU{2PVIH zLGC|XKaD;PyP`kda_l|#*hjxN?1sLdfw8~6YvjEhIWRudM>{>3LaINCwiv!*Zt}nO z(PBWk%P_yL|93wqi@?9;7<)cjXG^~grZm8L%#glS5ly|KBVfL@@_0V?{E5D6o{c@% zF)lvl77)GM-!8s-#$>;iEf_xs<&eH!N4mbaB?CW(3t2vqzB0d+6`((UIGDd&2ZX;r z87n_FCE!1y&=bE?UDG~ZpP0Y<(QUuq>yST7MY}$>Siz KI@!N?^M=0#M#pvl diff --git a/fixture/12/4/21 b/fixture/12/4/21 deleted file mode 100644 index 4ddf698b208570752b10ae0be77a024d606bd94b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRBrrnWy2qzbI11`X-tdc!vMkznPQB^;*AQ?X`S#>|Ggv~#gO^iP*j!8bs*$KWw$Lc*84voI! z439r4brL^x9K*h~d#OJqah1Lo9&*2e0WiF2`cl6xt8YI(ZdkwRM(n+;kZ?cVQZ~P6 zfzm#}F?qj|KYBhFHxWMxrYybOQVBgAQsX|-o{zrCxnn<#fv-O~0ByelFXBJHVw=9Q zM*lwpaFo8o7BxTkwlhDoO~$^fy_7wvPPjiA^`yR;$ko2kq?5mmrN}>6DE7Yen3+Fh K@)^G_l$Af`sl8PI diff --git a/fixture/12/4/22 b/fixture/12/4/22 deleted file mode 100644 index 32bcda0d68496bad9e3453a00769253a8d262759..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9hfKERT>S?}%`8&S@EhoR+aVo#;SBO7u6C^%W zypF%27lFPaDh9qXpA^*_HXrfj}<`8~Zs*#5p$ zZ;n4X`8~hLZx22%l?gu{)&xG>uth%`{N28MWFJ3m_$EKN)^|TWUGcx!Py#=mxn@7L zs?xp&J`BHnjWfT)7=*t-Zz?~t3KBq6R}Q~$-|oJ`L3})f4b8uG4>G^lRB63^*kr%L zMVUX(o29==sAfOn_q#uBal<~>zYM-jF?>FYk>S4aP6EJ4uzJ6oC^x*@&~U%lgwwwH zR`otNffPO(e1|_Fot8e8xU@c(ST?_}VXD7~YSX^TC_29)#VWq?bBMoKtye#v8au!5 zKf1o@X{EeF#E(DO9N|Bn>FGb=5v0FG48Oh@IuO4onY}(rTAn_fKNY{KbSb~lSAW0E K-6_C9DyF|b3dOMi diff --git a/fixture/12/4/23 b/fixture/12/4/23 deleted file mode 100644 index 72f2aef5ca486b32073abb2debe9dce68ae69324..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRB<2|-Y z+StC?hR46+`>VhHygfg~u8_W>X4t-uj6J_yW5vEX2$4VDt`k1G3YWj7NLYzOL3)H`9pzc5MrHnr+OZq;#cmTe_DyzR}t*O3Cl;u92TaCY{*N(kO zewV*??Zm$2BCWq@n7Y3+Ag(_CY81bWx%j?aDI&j-M=n5Vd>cP+`8GeIfkwYqHvvDg zFKNCikVwCH?Dju&q7py+w*^0m!+5_pmZHBGyKBAC(GtH2)*L?zTS9d>d^1?o&*u_7HzOp?q$ArH_rg diff --git a/fixture/12/4/24 b/fixture/12/4/24 deleted file mode 100644 index 86d71010727143c6176d1a2397afd6c876295c03..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRA{FqXcc1mwQpBM3k3-Q&O5MAAQhtaZP>e+R%O z!zaA6I>EkEKsLS!3ZOnx6eT|~-jUd_I1^+rFv>WIJeBuKwFcKAIwVZ6SM?2W%dEC@es zffB!>KUlw*kTSjitYp8r0SvuzD!9ILI0L^Fp8>z4;C??53cWt`#@fGP3t~SNJp?~x zKk7aqPGdidKz2ONy&gYA&+Wbj`&d6ISzkU#K(N0(!~wqjfK$Ge`y@W^ zz{EdoU;Vx5Jw87sT^GKs6Nx{oxD9lN#RWkAJ<2>l zN&CNLI+;KJT0p;Mn1w%J`1n0q1IfP_#DzZSHb6c0Mx#x zG2B14rM14pTv)zeX;QyFNdLYSP(#1qm6kuI8we|{(U}x=+(Xu7A!sK<>o#nf4o1UzXQK=q!T_Qe_lVDM7Y1ixc@(U z>Xtta65GGSCB8ldfDpelam&8%bhAEwbA~_WhVH&gr{}(;jJ`g*YZ5=M>43jbq1-<| zuq40a^maehMdv;XI2%7myC1(Gr`*3XQm?!ckBz+SKy|-U{%Svn5L3NlA5XtL#}z*z zlN>*kVmH6Z-<>~Ckqg*U&ExxT*d@P)szsYySf&ImyF zNio0Cl$gJA)PO&y*P=erd#gT4Ny|S5>8`&%e6qiAm?gfm*?~RByS2Xhd>udx*q**s z{0zU@r~W+)jCenywK>15gBHFX(SJYY1BXB7*E#u`BRh5bLB`?J3-GF$TbDNj<(HkIOxAN>jgl z7(72xD!ab{3=O^_q`bdjWS71GNHsrfk0rjfRbjppe&aliM4vsnnK3^gCo?}G`n5mF zqbok2lGMM2qFX-<$E-hTj2%FL(YZfm2>ZS(tv^1OkBYubB+$MGYv{c@lcYcQY0JNA KQ-Hr&7@R*L4#>Cw diff --git a/fixture/12/4/27 b/fixture/12/4/27 deleted file mode 100644 index 8de17f9be773aa0c5342efc917e4ba687963c379..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRBZ5xPIN)F{8XOVdAT(|JGr7m2=P+Z(`zfz`i6 zeki{?i$Xt!T0=i>((b+~(`>#f3kAL0{9nGDk0(Cg81}pqP1e5!i&VZBAGy4JV|_n+ z&{V&W%uqf^77#yE=oh{?o#4OmNr6A6L_a^0Lux*LF(o~_1?0cxPewmFLcTvYkB+~R zDXc%7(~v(oRFc0j&B?yAowC0_XNA4tiRC^T1*yOBhs8d>og+VCN+&;%wCFxBKb^l{ z$N0T}XS6=!0=qxKnISuj;MqP;nQp(4)04lnJEXs7E!V!K_ya)k+|9mDu)03^oen=b zw~N11#@@cH*&jcy|$#aJavf z;RQaMf(pMlV~{?ZwU@rnn1En~lGRMNkSNTJUGg6?VNzZCyWS>5#vZ{V+c9neINl K^9DYa{}?{V$jU?j diff --git a/fixture/12/4/28 b/fixture/12/4/28 deleted file mode 100644 index cb71c44a89cb2e23ad9dcb29195f035627623983..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9(>6t$#2iL#M?*Bbr#hSm$@ytcJRZP6 z4oE-tXNkTjn6^GTsJ1^yD84?9el0$ho?9KgN{!XLgU=%7FH zNX5T%z#czqV9UREV$41Tl()P>#vDJxn1R3U_XxjW|0zFWgaW^FX+b}nFjhX3Ipn?? zcA>sev1L1KcVWJ!@j5?K7{0zJvJ$^R25CP~A=tjlRSLfz6(7KI4u!u;uvWhSMhd?k z-Z;N4se3;Ya7MpCTr#|yCAGi(z(>C!-ORor5CK1qS*AVnk{7;xV)wonYp1`^dKW)|!Vy2p=^ekP@{GT3 zF2_IUrhGnbKg&K3Tj4(Z>Y2Yx=)XTmm(IReaZ*3*t-3#YsHQ!yI+wo%l^?+6SslIa z5`aFgtrWi@@d`dER@^=fa(+Jy4QjqIxH-K6<2JsBnh8A(U<1ElnFPPV_~^bu1Ry?= K=~%z5=;pqiFT~RT diff --git a/fixture/12/4/29 b/fixture/12/4/29 deleted file mode 100644 index 199e2656585e9c93c50920bc5ba76ff23df03802..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRBtf9F4KdSyRE19iVOTU|fIoFG5J)6~7UJvG0; zPEx;IHf27=qFBGcag0AjzOX;VQ!l-e!;wFWw$i@B-!{L|)r>zCIm$o7Nc%s;QK~*f zvs^zp37D#XRyjX~9%etu8TUQv>L$PJadEvH zr$9aDig!K-#&^G}ABetPm$ki*fDgb4A5}li^Cv#w*M~ll_XEFy6c#@h`BcB4z%0K# z+8{sY-L^hxh@-zs=gB|kEJDAV#s|K;mft^i(*wZ4X4=0ZMdChKpdP@P$O6EUYnDIS zSJA$^j0`|lbA-PP5Tm}vW(q$pZ1Fxcf`Yz!0Vcmc>TEvDJ+MD+M?*hz=^?++2R7K;u7pkDWd-vNJzwgl4~?ySTo=*`hx-OBBC$ zV!l6sXpTQ_YCu2U8kWDYyq~_`>Lfq>S$w~oqe8x?i$FiTa}mEju~@&yf9AjI^u@k8 K??peJc3;2DGs*h^ diff --git a/fixture/12/4/3 b/fixture/12/4/3 deleted file mode 100644 index 9a40e3877d849ba43d8306fc07093e6e89926564..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9*n+m_~U`D@7HGjVz&~Cp9opB|$$a?bs0=&z1W>=%d4fNm zASXXVX;nX~PAxv2Sf;*gQHZ@Z@7+FJTL8d^cwaxhMjJqS?)5&M)sMd08YRAeAvHf~ zK7_u3VO~G-Q8K<=YTG|z;@3X_45_};@1?!xKDWGo=MucOigLePD~rDl_@6%{ev3a` zNQpl`7LL7fE%HCy5V*W_*as*+jnpvi&~)x){G_ND00vXNkW;S^YmM>6Ja-DU(0Kfc!peiYvVc(!afusX4zJ zqvpP=|A4=D^^U(ShXp{I&5k~woV!1&M@zqqG@!q%PkO)38Q8w`fE7IfpeH|+Ck8(`7;wKBfbl`JI&W67eu(3X@>=nIArI5cf`-?Tr^&TPMH`|Z9ST=qUM=zPEBAB4ZW3=KZt3SK|#bbmh{;=4WL5G=pCBaJ?B z?hrqk5{5oU`JF!r(1E`NZ8E=SG>kv$i@d)t6$-yPMmxXWH^{&6qd&eK4L?6^6d*p; zvP?e%l9)e-amBvvWhp>P+OWR7p;^A4CN;mMo(n(Syp+B_lA*m^BT~N4UunL)6(T;g zgXX@%e9pczQ;9#6NS40)V8lMVwPU|&uH(Lo#GStrUhO{+53oM~a2P+Us|i4NF-1RV K8Zf@*y1qUf>B18L diff --git a/fixture/12/4/31 b/fixture/12/4/31 deleted file mode 100644 index 34c49ed47573422ddded8ad550c29fb04da1eb68..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9$F0MakTw%ZE9+tnH>CZmNI~Kn=q7cAw7@xlb zbDzGFVMRSduAe<4RRBP@n^eAcW{y5P9nin^5C6O%9IHNo2(Z5y8gIW)boxFD+zY>K z0n)u?a0)*iTmnF;8)Lo|b>cr5x%$6v>=!=14XeI8PYAyrcPKxF#aq7Cm;u1APs_i; z);K?}kkP-tvOhmQiT^zjUoO7x+oruP#{)myx0b(cIW0Xp+mXM2k|#caeyv;n`C=)-FO$a~I^c}#nB`Lqw zk&{2yL#IA!E}p+d2cJKQ19v|v+?T$?Bw;@Q`2Rod^@hK>F%Ca)GyXsFEGIuaA3?vY KrF1@4Sg$|hNxyyo diff --git a/fixture/12/4/32 b/fixture/12/4/32 deleted file mode 100644 index 7e2ee8e04acee3382099307188c0749f8b7d3bb2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRA_1cJX`jrP2dQ+d8Iq#?g4T6aF#M9#gLvLruB z3z9z@?4rNAO^Q9snpwT@OHe*-d40UP412#kHM&2@yzoEw01Q7f9T`8tF$O-g9e6)} zTpYkj9(}v7mlD5MQ58No+lxPt?VCJwlDNKOp0mGQ%-_9a8Z1AJQCGjXHUPjUjS)Z} zoU1-Cc;gM7aiG*Q2K@?$>% zn~^?a$M?T}kgh)d`hdRV@a4VpMpnP)PO?60b%sCaS0uluoIyWIk=MSj zufMxNl(AxR~0{BPiVg&@zFd)&1t@=s2x8= z$!WYFHA%jdh)uurX%0Q>IfcFw+g`t&Q6oNdTDm^AL#@72du~6ZCx<>$RiwYVFRMRN zSwuepJb*vB4$VGNofW|C+KWGd{CGVj*|a{KRti7v*Y`hB)eFBNIR(FF-C4ik^N7Eb Kn1;Wh2JOG7D8zLD diff --git a/fixture/12/4/33 b/fixture/12/4/33 deleted file mode 100644 index 6db48eefe4238bca54014c1875539cb7092bdf6b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRA@a@sy}aQ;93Oanm7G+MvXSBt(V-km>{T-`o; zz6n0Ja)Z8=(w0BGS&6?wHTFI$o3K91rZGLIs7XH_FbX}*)(F3ncD+3g%FDms+8jSd zL|#AS7bw55{$#%ks2;yD5`n!n5nVoU$j(2Bn!Z2#-&8+?SNJ{VN^QSd{?jSCc;?Dd|3H7kxi^3gtenJR7~B?w38^?|!{{W2C>@8i2osls!Kb zGoim*_RhY`X@)!b+Zn%oLu9`X1Z6#Nqy0aR6UDyhK0Qzz?D4KLa)C?4dK4soD@Gw+91F92~EDR zGub~$x&A-BQwqQ#L+L%LusT3%EA>8ic$YjY2B<%{Q(ry>R)3LaIIw-ow9&f6c%5Iy}F$#za01>&U-*7Xm(Sn|{BB9bCWaSf)R%!5BTtUuVBY KQ9nL(9K63D49Tzn diff --git a/fixture/12/4/35 b/fixture/12/4/35 deleted file mode 100644 index a709637ebfcd98a49e349b66e5c17493c56bd172..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRBaz{5V8yp2B&XDL7a7YaZ#9r(X51Oh(Q_p874 zU@^Y@UdlTC=n+8J*mS)HprE}DsenJ6TUfmZW9mO~a4SE@7*s!24DY{m8~#7B4Jf}z zYahIt7bm}~8iT*5;F-T&aML}MCH%iCp?JRcWQRZZ#vwl}Xx%>SH)X#o1qZyu(dwfH_1LfW|lvmZ=k(*<43=v zhYdh!*k?bu`5QhJx~V;PO_V;P^G?1?O#?o$uKzwq{yD$3D$&1H*95>;O=CYooyxzF zLGC{9!IQrWvXQ^w+c&?I%0@nu@T9*6*V`(6&DweB8gjXJ5XZ;-Wtv zh^fDYgW$az3??ku06lg8(Tk* K7dgLP)S;L*K_Op-nmqX0ip zp~*g)cmTjM&xgO-W~x3ZeM~+vAWFaN)jU5bN`t=kzG6T6;hI02eU3h`I1WDw#2dff zXJkMBTVFn2T`#}IgylcpBho%X<3vC4jsQND>N&n2KYu*8QhPod zUyQ#9SVKPxn2$fZPD#G{Fh0LS3c`*6 diff --git a/fixture/12/4/37 b/fixture/12/4/37 deleted file mode 100644 index 694e12ab8454d06cf404f9dcf6a9877608a24575..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRA0hOIxjmMT72Yau_R7#F`B)}Fr+`OLm*8JoWT zUqn85$UMK9jm$h1zm`9nBi6rFdk4T9dY?ZXSi8T=n8LnO#T!2)mvX)J7nZ%UM3g<9 zG9f^P>*GKBmodMBgN8r#XaPTh-JiS{YG}W<8RtI!v7ElMRl>f?UPQld^nX8libTIJ z`$E6&CPzPB%M-pIdGNj4HH1G`C#XN5E(pM8-2=XigS5ZL3C6zpy4<^fueH89JcGZM zx{1GDS~@?bbi_Z~-UGm*y{EnaVmm*Div_(&jgG(2p!B|Qvzk6hNS(f$hD5!3_ME&@ zHwM3}3Q<4DdGx+5vw**9vJOCkQlGtHXePjWo1i}#GN3={66!wKvLQb>ISN26dUn4R za0os)AN)Vmo(n*9aJ#-xrT{=Sc}+hsW4}NA5=B3AW1K#jW(`36QU|{Te)B(6;sZcl zc&Z$hnT)4@c2KT5gxwpO@u#A1`$7aiJiU^ K#U8%7-YPzEE5=6v diff --git a/fixture/12/4/38 b/fixture/12/4/38 deleted file mode 100644 index 92e9420c1ebdc99f396100e1f5eef46b2217ba1c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRB>Lj%C^Y;r$Aaq_+FUVA?~!p1%ab|$_sN=iPN zyG=gK^SnO*QGh*eijzK@g`vNput2@lCb~auj@v$(UW&iaOnJYh(mlV@=YK!;WMDso z5?#QHt){(kE*CxuPjq^id{)0E!!W;D zxEQ}ti0HpXKuSM$Y3aWIl2? z1eHES?Wn)EHxWOc87e;@T8Td2Y+F95ix0j&KPEpu(Xv1NI;lQ&!g#;)EM>pj>N!8{ zt#-d$;Y~jJ>ae~CKZ(De4^=j1gV^%-n$SOXOqP9O|vjM*K KPm8|gi+R5o|H^6r diff --git a/fixture/12/4/39 b/fixture/12/4/39 deleted file mode 100644 index 0d2b7927a0ff16b7153681427da57c505bde116d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRBe7H_{(B7?suq+dQ`$BDk5P^mtu$eO>U4qLwg zUthmf)e1mfs42b?>xaFspPs!O!^S`8fzH1)S#`Y_wa>rdP8h%@_dCBN2h%=WJ50a3 zp~^l$5ZXO9XAZto>Y+c{l^DN7<-$IU%jUn}Ov*jdv4g(aQa=Ndo%rY}DtwzR)y<-I?y%2vKNIfOqxCa}N5Hg~>$X>mWz!Q?zF1VO*|r>8&Y z@Rh&(Rj0f<;v+xG9|k~{t?Ir7i;cdHuH(LiF?2q7f)+oq@ryrMj6pxsr~bYbIZ!`H ze7il`$UZ;xXH7r$r_ev=jJ3X2sSCfF^5j0ZT$evr)NMTpwVyw#r!YTm3r@blHV{9a zq29ifQ*=H@U5UT6m5aTh>wiDJV?;lE?;byiFq1y7t|LC}E~38+2vfeI|2jT&?2o^S KIpDvSeULvJFwHRl diff --git a/fixture/12/4/4 b/fixture/12/4/4 deleted file mode 100644 index 44a05b71c453ee2eec5f3b2df16e3f071a355946..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRAF$TL46H@rVdP`bX}l$$?F3Wq%~f5X2xLgYU^ zaS=cpeI7q9;kP}T^@_fiitWBa_?kbPfQ`JVL)Jg!%MLvoAJe|n?is%=WIn$Tp94Sj zv8BI}eo?=sh^aqVU39-Y+FUsmZHB`TVTKLDlR|oa*96Y9c4cpo}<4h z*pj~1_#MBi8jiobj2J)CYjHoid91!P=)ymyBT2u<&A&h9?hilNot(dxqWwRQlHk8wfvj$LT)GuUbAfGElyBq*y*+*!w@z4VS)k zahgAVpdP>0ongO+uu(tHAUBD6nw%+NmT^H4u3e}}(n;(I?_adN&< z_bWdQiQB((Qz1V)dk?_NM2)_LPujn-4kABZ-Hkm{QboV7nzujii)FtQ|3Sc(eFHz} zAL748ab3UDa0I_@MYg})$^5=Y;IqHSM)%fgZrO{pr5z(Pln#Fxfv5Pm#YpAg8~HFI>O(qBB3$N&r4d zsJOq|wXHY7k1@>aemA+$bt$Unbp+g?9b5S~6bjjg^KSYN*; zo2S3QVb(rGg|WXCe5XI^q@lm+r#8N@tYNLo2?Zw}n5<6dk|$|7bp!>bSoh8v(zwZHK>~L7YGGklsIj Ko3*~xme4mZU!PdrH47@WQ{))A_&O7wtb?{g=MKWsScUQg6ltY1(H7tNJhYCU$s9IkD5R2f;_*HdfmRF&C)*1(Sg6~jd;E~ z0f0Zzi3C4rvMRsP>4HBN6`MZ8lZif?(gVLpLDsw6Dn`HmWB9-8K##ulD*C-{l?*=O zC!0Sc{3gHtNxZ%{Bt$<-r9i)ga2-BZ3emo-czM3b6+%BfHNUG{8%{kFek zT@=9iHM4Z2a;vYafdyPI60nR>HmHs}qg8e?? K7$3icGY3HSA;@+B diff --git a/fixture/12/4/42 b/fixture/12/4/42 deleted file mode 100644 index 19af4958b4653cbc7d1310ed40d83b342ec2622c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9v1d_hvA9X*WD)YYZ`X;|kaPvQ+@!h@tvkSdO zHjzGPgC;=y%nCsLQlvk@p*BFs&vCy4=qEo8Nt-=*apJ!U8|^-vP5ZvVNyI-+K+3*i zUgy4;c5Ns+%yJO;mkfDgak$OpeK^|wF2|5&|?MsGha;U_*X zH?==@uYy08Z5O|-7W+Rbx9Gn4%uGJ13Bf+{3i>{B(n~*}yg|R4TQ)!S#S}kqd-pjX zOhUfE-%h`l@bx~36e+(1mvcUsh0ndn#j3y5^=`k&KF>b3IR?L=^bWrdl0&}<;ySuGpoMA K9NfR&wWq!mSJU6}b8%sZ6wz$4LZyrCC z|BpXADvUlhV5+=CC)7SQK%PFkD1bk4HcLOTN=`qJR5U$8wXQxYBBefQi<-YZ(uhA$ ztJS~VQ8quevxq)Aj*-3;O2|IAPS!sCWv9QOL$yC1y^}qYBoshzE=oV&R7yQ+rZB(P z|KhzmZ5ci{le<4K1D!vBDd#@hbmBi92?4-(^6Wk|xu8GdBn&@vnX11=*|9&p=R!Y2 zpRYa%^|U>qG5pQFDwKa;;z3y!})MsmJSwC}!VCxbtz&tX3m;AFnqokl=@adN(50Mxy1f}_7v z@}#}zM3_E}l$XDJRR6y7ZKpn-TL!<|@jJiYIY>WF9HhSrL5sicqBTC{omfAkZJa){ zLS??@Tb#ZrR)If-=UYF!E=)ZNB#yq5c@RCFDLTKU#~Hqjr24+Pht9oDN$9?|AqGDL KgHb;nYQnyzy1=sl diff --git a/fixture/12/4/44 b/fixture/12/4/44 deleted file mode 100644 index bddc3edef34f34fb09198e5ee10486ce59ce976b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 225 zcmZQ#oX9ePk%0k(A2Kj7hyd|PAlBHb|2JU*!(Ubg7w*U#Sy#oxx-PQ|TshTJ(Z}@s z{FFuSs&-kZyve&Ry!Bmj;FbeQzSF9jD%aLsV7RVhctmKKgpK;D&+oF1C!J57c2wnI z#%r#R{dyt-GC>bsd)@6<_h;byQ+Rm_gTI;W?hOL}P0uf4zS+1^+)>ivbPLypMN1_l zx2;^Vr}tXt>y_Hay4Q8E?VmA=VZXiIKKuRq_uKE=Z)3mLejku)w}1bh{UG)pu(s^bsgctd0UJ?dT>t<8 diff --git a/fixture/12/4/5 b/fixture/12/4/5 deleted file mode 100644 index 080904b5773e6536b8525bb592fadb8b70f6186f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9`^G!Z?j@!Sfs7^l@4hTI~SPei1EvG-B@en@; z@cq47$2z~fL2*AJ`f9%UjFmsKIL$r)<r;s!bv|s zts_2^5uCr*MoqteHF`a3f^0t~VZA>pSP8z*tggQ`Fmyf#cjP}fw$Hy|V06DJXdFKm zNgY0Fh6}%lWnn&NKzcurWXZkFcs{;L$`(KEfQr60K9WBgwgEucmt?=z0HDA8QNBKE Ki=@BYx~9LyPsTF< diff --git a/fixture/12/4/6 b/fixture/12/4/6 deleted file mode 100644 index d0b44366887d4f0832bfe21c620cfb7e696dd5fb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9nCIi5cw!^;HAP2qYaRk3`_OUZOKPXoVyEBZdN2;RS_uaCc&L{mL} z9=tyZ%fUWP;ugR6sJ_2F*(JZ#11`O7M zX8%3!c)h-N&Llrq+Ydhy7P`MzF9W?D=wm<6TZccw=e<5M<8Hn>x7R*B%1%E`Iqf|) z7ivCc%zwVe8>_#9j55AgS1rFChAhA1hqpgOnR7ps#Mr-2-HN}?B)YzN03^SrgrYvo zEuX(yVs$?$KQz6VSFXMpoNBzD!_~eF_5r{!WkWpQIHbNyS9HIXc#J;cmXq#M$o zgO9&kj2b@e{#%0$0My3IWgR0P1?17JQw45mMk`Qg9UM9{uv=wH7$YQeut zVgtaUC6~XGF?hc07Lz|u=)%9Ix|Tk<>0mzM0@gnsFA_iH>mWZ`1v5W*nry#+q!Pc0 z^^QMIKm)+=t1iFwSE|3P-ETe>-PgXFKOjHbIN!Zm=Ec9N`@g@ZGRC_aX+^)WS_r@X zJ8(buPeMN=9Ogf~_MyHoA|$`v9M(QMkPp8DF9*N4^G?1;jnh9GH#9%n4o^QU&don& z3}-*)(U8A>D`P(z(}6#o;3hxnAxOSu+zUU(&855mWO~0v$vVF-Cp16R<7dD5!B4-= z-9SG800}zv?QZ_$!;cC9Zb{RfmATvA;puN9xcd)+rmP$N|G1EOcf!)6q K@<2cQnFKy-n8tnp diff --git a/fixture/12/4/8 b/fixture/12/4/8 deleted file mode 100644 index 8bdbad150c990522d10c81d995712f54880302a0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RRBy7KgvRN9I3?bFsb_G?2azM zJF-5AmyJLAt(U%#sG2`@ui(EH_zgfR>1sW}ScyNe+h0Cm(o(;u{}jE9Y4pD!Ixs&u zYDPYfCq%yMus}clu^BxySsOoLT6RA-nhd{l-BrKYeB8e%0sB2ndFMVmY^lEj&$U04 zgi^o1p9a8NR=+>Ni~PTA)OJ5gn>at1oTEOJ)CIp&w*WuEG=@JEUVOix^LanZO>RHS z3YI>(xCy@^dT&3>AOb)43WC277QDWetx-Qdsp>s_0gFFT?5@5e`G3FsV+=r&C)dCF z>i@nWJC8p8Z+t(seI-Au>k~kA*YLg7dtbl*_h7#F)1$sr-iy6ggnPf{w@5#A-Mv29 K#-YD{(04ya1;;`F diff --git a/fixture/12/4/9 b/fixture/12/4/9 deleted file mode 100644 index 9e438e403f34c6f1b1da6cfcb0de040da69b6691..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0h0uf0RRAy0RRA?0RR9&y-&Y@W|zNY*|I$Z0pY)_=@vhauQk9Z!h^pW zp2=LOO`+F0rfs*^0>dzo*KXMt1Uq5CHuYwAU{A> zzuG^(;RHVyFPJ`5{q4Ta7q-1RP%yubPJ+E>T&}DQ8oB+UBo^c>uqJ0>nSHmaINx z&#J$rHW)vU=V(8;p|`!Av@k!bxgWr@!7RX{-j=`Joftmx9mGG#E|9;Pidw%!ceOn> z5&XWymkhs+%h$gBN~k}5-Ht!JLclx%#?wEqPXa&AvXei^OkBTVS0}&q6e_=Oo3Xu> KXLUW1`lUYWy2$MS diff --git a/fixture/12/5/.zarray b/fixture/12/5/.zarray deleted file mode 100644 index 701f196389..0000000000 --- a/fixture/12/5/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "zstd", - "id": "blosc", - "shuffle": 2 - }, - "dtype": "bsbi=e&(Vx>P4Z=}E1hb=$qy3#*%0f9f#0`I@qXCuEP!gszoMu9(ea;m?C zY=XaKDJ43*5fp!WKWXlWRT>-~2u92tmJv*!Vvmob|qL)c3r;g+IRfGx|TL<&?ft!o$8D zP4YeEi!r}sj^sUB$QMAeahX0V$c8?h$Tz=D$J#$RGWb73Nm@UhVJAOSW^le{UC2J< z%vHaEB#^()VZOiIL()EzZm_>L(Eq-t%X+<9%YHvS1{A-we~v%6Y;ZrKAc?=fxQ{=J zF_FH8cK^O)2amp1z-7C-fYiPS_Ex`1Qb4~3M-D!ZP$a&^{RTgJdR9Gzxt%?W!WTaq zpS3?f!MZ2J6l-$0ZrpW{V diff --git a/fixture/12/5/1 b/fixture/12/5/1 deleted file mode 100644 index a60dc1e99d139e457291498512ef2b371fe128a9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR942dci+*`huslFvVgE73osZ)Lqyih(}tksQC_ z*a5%7>xDl<70o{{fil01$#_0=>3F`eCS5<0>&CtlmiE8!cz{1OVw=ACI!wM{1>ZiL zrnElCi0M8Vo6x^RY7Rg3TlBv<0!2R_Cyzg?E3dy^D5yTAQHMXxL^8m! zk&Hijw)H$V0))OK>U%$3S=~RsulPPz>dim=(Z;`@dn!K#EC;^2$VtBm-}b)~o*X{{ z6JS4Q$E!a*mzF=ojzz!MHKjj9EHAQ;^;iPByYd3jlDlUg2BJn^9Vo8$r!&M z&W*nr_wT=LlU%;--J!o07-v1DdE!5&OH4lFrvE=fc6-0x^`pN(b%VcdQ-!_DL@q!r zz|_A!q8>l)Ff+fQEM>mT2BE(;4`Dyo>NCBrF^fNXb2C14Kli_cTZ}&e)d0ZxSnt2f z&1%0CV`snAu(Cg&LodEks-Hg!V-dfLqM|;nwemh#F)}~S3hur#cEP@qq?o=yg44gf K)T_Nia>TwJ7tN6X diff --git a/fixture/12/5/10 b/fixture/12/5/10 deleted file mode 100644 index a30f143f530e0e29b3c8418c1d464020844332f7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRBAj{HBbr8qxi$pgRlmR7s+p};>!DM&xK^1eSq zBKN*?FE+o1aWcOw(apZ78Hm4(BbPoo@t-~vU8_GxP~pE=vT?tLI!-@f;5$CZf;T<( z#i2hgMuEQqjyk^pwOGG=(*VFy0SG>;*#*C2pF6)h)@i?kVUfHRZA`zJ;$l9k8S6gb z(w4qlmASrJ@jX63R_wf~LaINr^P)Z}Jd8hcN*q7!!KlA->JGoo0Y$xDQeHol!uCHR z;`hEdRAN2+#!Nqo+-$wWane6Ygsi`}k}W?55Gy|cc9TDjDSkb?I%Gdw9)G_2Cr!WR zdvd?tf4INM`Qg53d!Ig)AXUDgw@N;Q5S~8(dIY~~#}z=ES3y7Krb)hE-(bFH3W2|3 ztkS%fR|>%Lyr4f?Lo~n=11mi{Zj!%pfHS|1coM(sdiFn3H;upQ3~E0Nh?GCis1?A% zhaJDc0=_?gN~pgaVVOVhT#~#pL3+KOQI0*6Wv@Q8q;$WwRfWD2zr8+?AvHd+&9Xk= K`!~N_-XTA9GsUO? diff --git a/fixture/12/5/11 b/fixture/12/5/11 deleted file mode 100644 index e74d989160d6c63fc7756607dc946f202ed91422..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRBkf&o5ZRLZ~i2ME8(KV`pni)g`mp zbs)c!b!I-Ag?c^uZ%4nJi?F`JW?etiW*I*Ge+)k>;i5khu--l*+4aB4i;6ytJRZOQ zx92|!ZOlHm@fg4D1`$7%@VPw(=G(uG@mxRcqR+n4*0n#8)&9N3I=DYTjM+XWV0k~p z&-=bQy|g|<_1wSq6#~8?1-`!%O_IOB3uM1p5h*{D2JSKhIKxj+D^Z8`jkB|NtC}w z#yY-qmjOU8k=eeU>ju6sVo$zZxH3MTvF|>sw1Yl;0*^jpEIGcjWX(My7hAqx`{%#^ zTPQ$?X*xfuw0ghvlxsd$POQKD2mQXyvAw@Dr%}DxY0AG|-^M-)rY=7KwNt-y!6ZId KMPa|q00TdE6U`a` diff --git a/fixture/12/5/12 b/fixture/12/5/12 deleted file mode 100644 index 24016902a98edb1bcf590b123b95f5421cfd8266..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9-Nie_i`AI)M;^)5&qkzA$?Tx?qJ5WAc8V*0R zua7=SOai~P;xfQYh8{hZy-7cHjVnM^O4+_1QYyc1_@h2ECxgClKchc%TY+FAKp`gAg@r1skKn6hI=omlVbR)lrEQCGB z{uMpv(bqp~p&>omPYpm;`=!59gao~RN<+UbTK&HJJo3IW7&gB}cHKX97PdcZC(b@V z-txX&=q|r4=RH4Tt(iVSO%%SMiwZw89b`YNydAzVj!C~!8mYg_Qh+`vv!v>e35LJcjC<@A1DI%auP2`U}5^swO}l zLta1P;I2MH%gH_q*3Ld8&oRF~G2p+m`3%3yUzWUE*CoGWo}|CFia)>Nl7v6Pi9bIL zhA%!1Ocp;f$4)=pwMjqMH|#$45E=G@n7Y3Q`~W_o)+s>TKA1n3`BT3JdN)6$Jhi_$hp)em*NMFqISoHpvrj)hKT|*B zPyIeam(IU)lX<;~`*%MF_u@VQdOAK!kf=VJ0-!#@f&0IC)000GU*f%EBsD*>j}yRR zo76u{1(d&4r6NG0p$9`kbl4B5sE%-eFeTBR7XDzrjNgm K$yYsFXvx2R$i}7s diff --git a/fixture/12/5/15 b/fixture/12/5/15 deleted file mode 100644 index f2b58a7fd36acbba7ab6c675294b2c1a6f1f6f74..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRAx+pInl{wzPGjnBUTeXGB$;-$Y)<2Am=;IhA7 za7>hiu?zN0?k^8`PVb5}od!f-#L96vr1Rdl~X5hOplE^@wB zf-=CB(#Jld?mNHOvFg6_OvO9s-NwJP}<7RkOzv4}n# z=ZZf=y}vrgbS%Gv-MBvysJFlQ%__fyjbuMX8{$9P8}>ei;(tC!?3BJ_P7FWwcmzK9 zC_%q&G@ick5MI9r6kj|)xg|b$GtoZ;f z>Fd8af^EO{|Ia;KR5`w?mmfbp<43;a-Z{Um#I8O>Rv7qaV9m>C$!O*@~Sf9Qb$r?U@S^U41CaphI6x2TPxQ{>dcg{bSl|8?IqaVN^yplh_ zppw3-3z5HIh!a2SsvSUH3g5k4d~-ki%zVGD=hwdh_-Q|M%L_l6aPGckW!gW++Tp*T zah<{D;*h=~!G=HXP60nMPvt+cTd%)t(0acYwvs=`2%|ru?S?G>1uZ|RosT^%Yal+U z4LiW*YLdUX5t6?#hFw1G#iBo+gYvx+Dp^0{^CdqyKo346@De|)*n2+vu@SznJIFpb zE4e=>4DvqGGhaTMpkTjdJGDP68nnMgVA{VJFup#3d11eqXNy0ogV(?FICekZD^fpO zvhh7)Uk1Q3mE6CWv2{KNVQ#)rGCDu}w2Qs0=N`VwNE^WIic&vvE+)UG;mf{-?ufrJ zEXBVAZ@fPhrAt5Y-cLXFv|YYFbl$(`hXz2LHrPIR5)3{`;J3a4+5*6t9+E%QVu!!= K>kB_%RYgDS%)ZID4;*ME3iM%!RV}3Yfq8<|jW-p6x$dZ~eXmFk!#?1DQVZpCUgygu1>M zn*l)9)Mh{GHN(Dp*2}-p;;26~k(fVi0-L^|ezQKv6dJzxMFl^MF@wKEah1Qslutj0 z2XH?aN&CKL#y`JxTiuLwZj zv6MeB_8vWDT?IZyzTrPo^ISjoPo2L-?s7jH6rVp@5zjwSkj=g=>+-(J8gakk!ks=g zojX5^X#>9qI1#=ba09@gHNih6QOLg{YTn24-~#yU&TLJjHSN{YO%iz?K3{@Pm@10 zH4Q&^_?kX%#FM@@6)isluTwujzJtCF*>bQ$Hsn$PFiRr%hdY(Q- zqccAT1~0$6ck8|-BL_Znjat8asEEJtPkO&j#hO3PyvsiS3`@T%ZC^i!-8{b=r}@7V zVlqFnx6(hb+YUcMl<~ho_rSb;m5jayrNKU==2*Xoy{f)P#kIfT7h^uW0~1TM|FWKfAwD=-fW}UlPAKbM3#O zEI_|Ft^+?O8tA{&6(c|F7Lh*D`GLMCPSL-R2*<_=}Q*1p3V>v%| z_~O5Z8aKa@wK2a>ZPGqK{02XKP|rOAq8`8wa?`(ys&u|hU;w`X`ma0nRPess48%Y1 z&l11_@|r$<_)|Xym@_}Qib+46U2Z>Zo*KVv{fxgkI0C>Fr@6jq3`f72uarO0I+MOu z_2(T6{c z1v9_MNz^{tD?-0vALPElJ`ums{31UDYfe7WcFH}?dLTe(uHHXnnfOssjfX-m*T$xB8$JI*U!I)e$>Aqw*f%1 z)dN85caOg%9S^)Sbfmuksd>D%)X_h86N*2E1W!K>*G9f0h@n3fj3qw?&dxr&TkO5Z K0i!;)m6JaCQp2hM diff --git a/fixture/12/5/20 b/fixture/12/5/20 deleted file mode 100644 index f0c6933831be0ba86446401b53891f6fd7878466..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRA;g*-o6FeASS^iDtNumrySIMF}k+N!=&`ozDi zJ3qcOld8Vj?yJ90{uMtkUA8}wt&hKq19?9vOdG$Fe#*Q9;{?8OZpysmH}XDf7=ypu zilsj;=m0;#=v6;wvPZw^u#UecW0yZ6F_gb~;}|}5Zh}8x*a5y4#g#v?35&jOf}_8S zlfOP54og1H2P{CS@^QZ#@_@bq8auyw=Mlb;Fgv~rPhY>+yzf4UNL#+iIIKU{2PVIH zLGC|XKaD;PyP`kda_l|#*hjxN?1sLdfw8~6YvjEhIWRudM>{>3LaINCwiv!*Zt}nO z(PBWk%P_yL|93wqi@?9;7<)cjXG^~grZm8L%#glS5ly|KBVfL@@_0V?{E5D6o{c@% zF)lvl77)GM-!8s-#$>;iEf_xs<&eH!N4mbaB?CW(3t2vqzB0d+6`((UIGDd&2ZX;r z87n_FCE!1y&=bE?UDG~ZpP0Y<(QUuq>yST7MY}$>Siz KI@!N?^M=0)Ajf$C diff --git a/fixture/12/5/21 b/fixture/12/5/21 deleted file mode 100644 index ee12c7550dbc8f7889f02ff7e33c8c376a3d53ee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRBrrnWy2qzbI11`X-tdc!vMkznPQB^;*AQ?X`S#>|Ggv~#gO^iP*j!8bs*$KWw$Lc*84voI! z439r4brL^x9K*h~d#OJqah1Lo9&*2e0WiF2`cl6xt8YI(ZdkwRM(n+;kZ?cVQZ~P6 zfzm#}F?qj|KYBhFHxWMxrYybOQVBgAQsX|-o{zrCxnn<#fv-O~0ByelFXBJHVw=9Q zM*lwpaFo8o7BxTkwlhDoO~$^fy_7wvPPjiA^`yR;$ko2kq?5mmrN}>6DE7Yen3+Fh K@)^G_l$Ag0gS}V) diff --git a/fixture/12/5/22 b/fixture/12/5/22 deleted file mode 100644 index 6f517e98be555c579037193f8bc78d721f70b138..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9hfKERT>S?}%`8&S@EhoR+aVo#;SBO7u6C^%W zypF%27lFPaDh9qXpA^*_HXrfj}<`8~Zs*#5p$ zZ;n4X`8~hLZx22%l?gu{)&xG>uth%`{N28MWFJ3m_$EKN)^|TWUGcx!Py#=mxn@7L zs?xp&J`BHnjWfT)7=*t-Zz?~t3KBq6R}Q~$-|oJ`L3})f4b8uG4>G^lRB63^*kr%L zMVUX(o29==sAfOn_q#uBal<~>zYM-jF?>FYk>S4aP6EJ4uzJ6oC^x*@&~U%lgwwwH zR`otNffPO(e1|_Fot8e8xU@c(ST?_}VXD7~YSX^TC_29)#VWq?bBMoKtye#v8au!5 zKf1o@X{EeF#E(DO9N|Bn>FGb=5v0FG48Oh@IuO4onY}(rTAn_fKNY{KbSb~lSAW0E K-6_C9DyF|f2|-Y z+StC?hR46+`>VhHygfg~u8_W>X4t-uj6J_yW5vEX2$4VDt`k1G3YWj7NLYzOL3)H`9pzc5MrHnr+OZq;#cmTe_DyzR}t*O3Cl;u92TaCY{*N(kO zewV*??Zm$2BCWq@n7Y3+Ag(_CY81bWx%j?aDI&j-M=n5Vd>cP+`8GeIfkwYqHvvDg zFKNCikVwCH?Dju&q7py+w*^0m!+5_pmZHBGyKBAC(GtH2)*L?zTS9d>d^1?o&*u_7HzOp?q$ArH_re+R%O z!zaA6I>EkEKsLS!3ZOnx6eT|~-jUd_I1^+rFv>WIJeBuKwFcKAIwVZ6SM?2W%dEC@es zffB!>KUlw*kTSjitYp8r0SvuzD!9ILI0L^Fp8>z4;C??53cWt`#@fGP3t~SNJp?~x zKk7aqPGdidKz2ONy&gYA&+Wbj`&d6ISzkU#K(N0(!~wqjfK$Ge`y@W^ zz{EdoU;Vx5Jw87sT^GKs6Nx{oxqog diff --git a/fixture/12/5/25 b/fixture/12/5/25 deleted file mode 100644 index b56d561342401e82baf22da4d44a17d41ab5f74d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRB9dW}CjPIW$4d|SQ52-iM$>D9lN#RWkAJ<2>l zN&CNLI+;KJT0p;Mn1w%J`1n0q1IfP_#DzZSHb6c0Mx#x zG2B14rM14pTv)zeX;QyFNdLYSP(#1qm6kuI8we|{(U}x=+(Xu7A!sK<>o#nf4o1UzXQK=q!T_Qe_lVDM7Y1ixc@(U z>Xtta65GGSCB8ldfDpelam&8%bhAEwbA~_WhVH&gr{}(;jJ`g*YZ5=M>43jbq1-<| zuq40a^maehMdv;XI2%7myC1(Gr`*3XQm?!ckBz+SKy|-U{%Svn5L3NlA5XtL#}z*z zlN>*kVmH6Z-<>~Ckqm+n diff --git a/fixture/12/5/26 b/fixture/12/5/26 deleted file mode 100644 index c5cf7190a9d819fbba87f7ef3d6d72b0dd26ee80..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9m&z(Lq7l}W(L%cl?=zcyQnAg3B^QONmWlX-~ zu7Ew&JmbCxK*+ydN0GltP9eSM$ZkItRo6ciK0Lk>g*U&ExxT*d@P)szsYySf&ImyF zNio0Cl$gJA)PO&y*P=erd#gT4Ny|S5>8`&%e6qiAm?gfm*?~RByS2Xhd>udx*q**s z{0zU@r~W+)jCenywK>15gBHFX(SJYY1BXB7*E#u`BRh5bLB`?J3-GF$TbDNj<(HkIOxAN>jgl z7(72xD!ab{3=O^_q`bdjWS71GNHsrfk0rjfRbjppe&aliM4vsnnK3^gCo?}G`n5mF zqbok2lGMM2qFX-<$E-hTj2%FL(YZfm2>ZS(tv^1OkBYubB+$MGYv{c@lcYcQY0JNA KQ-Hr&7@R*P=*YVO diff --git a/fixture/12/5/27 b/fixture/12/5/27 deleted file mode 100644 index f24f6fa4d73f009636d3ce0627d1398c09c463da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRBZ5xPIN)F{8XOVdAT(|JGr7m2=P+Z(`zfz`i6 zeki{?i$Xt!T0=i>((b+~(`>#f3kAL0{9nGDk0(Cg81}pqP1e5!i&VZBAGy4JV|_n+ z&{V&W%uqf^77#yE=oh{?o#4OmNr6A6L_a^0Lux*LF(o~_1?0cxPewmFLcTvYkB+~R zDXc%7(~v(oRFc0j&B?yAowC0_XNA4tiRC^T1*yOBhs8d>og+VCN+&;%wCFxBKb^l{ z$N0T}XS6=!0=qxKnISuj;MqP;nQp(4)04lnJEXs7E!V!K_ya)k+|9mDu)03^oen=b zw~N11#@@cH*&jcy|$#aJavf z;RQaMf(pMlV~{?ZwU@rnn1En~lGRMNkSNTJUGg6?VNzZCyWS>5#vZ{V+c9neINl K^9DYa{}?{aqRK}A diff --git a/fixture/12/5/28 b/fixture/12/5/28 deleted file mode 100644 index 05b069cf1647c22c28737d2df75037ecf022482d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9(>6t$#2iL#M?*Bbr#hSm$@ytcJRZP6 z4oE-tXNkTjn6^GTsJ1^yD84?9el0$ho?9KgN{!XLgU=%7FH zNX5T%z#czqV9UREV$41Tl()P>#vDJxn1R3U_XxjW|0zFWgaW^FX+b}nFjhX3Ipn?? zcA>sev1L1KcVWJ!@j5?K7{0zJvJ$^R25CP~A=tjlRSLfz6(7KI4u!u;uvWhSMhd?k z-Z;N4se3;Ya7MpCTr#|yCAGi(z(>C!-ORor5CK1qS*AVnk{7;xV)wonYp1`^dKW)|!Vy2p=^ekP@{GT3 zF2_IUrhGnbKg&K3Tj4(Z>Y2Yx=)XTmm(IReaZ*3*t-3#YsHQ!yI+wo%l^?+6SslIa z5`aFgtrWi@@d`dER@^=fa(+Jy4QjqIxH-K6<2JsBnh8A(U<1ElnFPPV_~^bu1Ry?= K=~%z5=;pqn3B=X_ diff --git a/fixture/12/5/29 b/fixture/12/5/29 deleted file mode 100644 index 12fb8f72d3166b3b4b171d7b41215c67e20c7955..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRBtf9F4KdSyRE19iVOTU|fIoFG5J)6~7UJvG0; zPEx;IHf27=qFBGcag0AjzOX;VQ!l-e!;wFWw$i@B-!{L|)r>zCIm$o7Nc%s;QK~*f zvs^zp37D#XRyjX~9%etu8TUQv>L$PJadEvH zr$9aDig!K-#&^G}ABetPm$ki*fDgb4A5}li^Cv#w*M~ll_XEFy6c#@h`BcB4z%0K# z+8{sY-L^hxh@-zs=gB|kEJDAV#s|K;mft^i(*wZ4X4=0ZMdChKpdP@P$O6EUYnDIS zSJA$^j0`|lbA-PP5Tm}vW(q$pZ1Fxcf`Yz!0Vcmc>TEvDJ+MD+M?*hz=^?++2R7K;u7pkDWd-vNJzwgl4~?ySTo=*`hx-OBBC$ zV!l6sXpTQ_YCu2U8kWDYyq~_`>Lfq>S$w~oqe8x?i$FiTa}mEju~@&yf9AjI^u@k8 K??peJc3;2I4axoh diff --git a/fixture/12/5/3 b/fixture/12/5/3 deleted file mode 100644 index 6f7a5745b7266da9855d8a4538658cbbfc9e2529..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9*n+m_~U`D@7HGjVz&~Cp9opB|$$a?bs0=&z1W>=%d4fNm zASXXVX;nX~PAxv2Sf;*gQHZ@Z@7+FJTL8d^cwaxhMjJqS?)5&M)sMd08YRAeAvHf~ zK7_u3VO~G-Q8K<=YTG|z;@3X_45_};@1?!xKDWGo=MucOigLePD~rDl_@6%{ev3a` zNQpl`7LL7fE%HCy5V*W_*as*+jnpvi&~)x){G_ND00vXNkW;S^YmM>6Ja-DU(0Kfc!peiYvVc(!afusX4zJ zqvpP=|A4=D^^U(ShXp{I&5k~woV!1&M@zqqG@!q%PkO)38Q8w`fE7IfpeH|+Ck8(`7;wKBfbl`JI&W67eu(3X@>=nIArI5cf`-?Tr^&TPMH`|Z9ST=qUM=zPEBAB4ZW3=KZt3SK|#bbmh{;=4WL5G=pCBaJ?B z?hrqk5{5oU`JF!r(1E`NZ8E=SG>kv$i@d)t6$-yPMmxXWH^{&6qd&eK4L?6^6d*p; zvP?e%l9)e-amBvvWhp>P+OWR7p;^A4CN;mMo(n(Syp+B_lA*m^BT~N4UunL)6(T;g zgXX@%e9pczQ;9#6NS40)V8lMVwPU|&uH(Lo#GStrUhO{+53oM~a2P+Us|i4NF-1RV K8Zf@*y1qUk!@?E- diff --git a/fixture/12/5/31 b/fixture/12/5/31 deleted file mode 100644 index b16d0592e70fc97d608a8ca7e4dbc456864922bc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9$F0MakTw%ZE9+tnH>CZmNI~Kn=q7cAw7@xlb zbDzGFVMRSduAe<4RRBP@n^eAcW{y5P9nin^5C6O%9IHNo2(Z5y8gIW)boxFD+zY>K z0n)u?a0)*iTmnF;8)Lo|b>cr5x%$6v>=!=14XeI8PYAyrcPKxF#aq7Cm;u1APs_i; z);K?}kkP-tvOhmQiT^zjUoO7x+oruP#{)myx0b(cIW0Xp+mXM2k|#caeyv;n`C=)-FO$a~I^c}#nB`Lqw zk&{2yL#IA!E}p+d2cJKQ19v|v+?T$?Bw;@Q`2Rod^@hK>F%Ca)GyXsFEGIuaA3?vY KrF1@4Sg$|mBfo(F diff --git a/fixture/12/5/32 b/fixture/12/5/32 deleted file mode 100644 index ba110c5d534240f6712ba2018ef2f15b1afbdd8a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRA_1cJX`jrP2dQ+d8Iq#?g4T6aF#M9#gLvLruB z3z9z@?4rNAO^Q9snpwT@OHe*-d40UP412#kHM&2@yzoEw01Q7f9T`8tF$O-g9e6)} zTpYkj9(}v7mlD5MQ58No+lxPt?VCJwlDNKOp0mGQ%-_9a8Z1AJQCGjXHUPjUjS)Z} zoU1-Cc;gM7aiG*Q2K@?$>% zn~^?a$M?T}kgh)d`hdRV@a4VpMpnP)PO?60b%sCaS0uluoIyWIk=MSj zufMxNl(AxR~0{BPiVg&@zFd)&1t@=s2x8= z$!WYFHA%jdh)uurX%0Q>IfcFw+g`t&Q6oNdTDm^AL#@72du~6ZCx<>$RiwYVFRMRN zSwuepJb*vB4$VGNofW|C+KWGd{CGVj*|a{KRti7v*Y`hB)eFBNIR(FF-C4ik^N7Eb Kn1;Wh2JOGC0>pR# diff --git a/fixture/12/5/33 b/fixture/12/5/33 deleted file mode 100644 index cfdebf3a0d461905a053b3ccdc6cc380ac19ba27..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRA@a@sy}aQ;93Oanm7G+MvXSBt(V-km>{T-`o; zz6n0Ja)Z8=(w0BGS&6?wHTFI$o3K91rZGLIs7XH_FbX}*)(F3ncD+3g%FDms+8jSd zL|#AS7bw55{$#%ks2;yD5`n!n5nVoU$j(2Bn!Z2#-&8+?SNJ{VN^QSd{?jSCc;?Dd|3H7kxi^3gtenJR7~B?w38^?|!{{W2C>@8i2osls!Kb zGoim*_RhY`X@)!b+Zn%oLu9`X1Z6#Nqy0aR6UDyhK0Qzz?D4KLa)C?4dK4soD@Gw+91F92~EDR zGub~$x&A-BQwqQ#L+L%LusT3%EA>8ic$YjY2B<%{Q(ry>R)3LaIIw-ow9&f6c%5Iy}F$#za01>&U-*7Xm(Sn|{BB9bCWaSf)R%!5BTtUuVBY KQ9nL(9K63H=E<`F diff --git a/fixture/12/5/35 b/fixture/12/5/35 deleted file mode 100644 index b7482d14c748e3cb182cd228084df452a5c4f0f0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRBaz{5V8yp2B&XDL7a7YaZ#9r(X51Oh(Q_p874 zU@^Y@UdlTC=n+8J*mS)HprE}DsenJ6TUfmZW9mO~a4SE@7*s!24DY{m8~#7B4Jf}z zYahIt7bm}~8iT*5;F-T&aML}MCH%iCp?JRcWQRZZ#vwl}Xx%>SH)X#o1qZyu(dwfH_1LfW|lvmZ=k(*<43=v zhYdh!*k?bu`5QhJx~V;PO_V;P^G?1?O#?o$uKzwq{yD$3D$&1H*95>;O=CYooyxzF zLGC{9!IQrWvXQ^w+c&?I%0@nu@T9*6*V`(6&DweB8gjXJ5XZ;-Wtv zh^fDYgW$az3??ku06lg8(Tk* K7dgLP)S;L*K_Op-nmqX0ip zp~*g)cmTjM&xgO-W~x3ZeM~+vAWFaN)jU5bN`t=kzG6T6;hI02eU3h`I1WDw#2dff zXJkMBTVFn2T`#}IgylcpBho%X<3vC4jsQND>N&n2KYu*8QhPod zUyQ#9SVKPxn2$fZPD#G{Fh0LW*GKBmodMBgN8r#XaPTh-JiS{YG}W<8RtI!v7ElMRl>f?UPQld^nX8libTIJ z`$E6&CPzPB%M-pIdGNj4HH1G`C#XN5E(pM8-2=XigS5ZL3C6zpy4<^fueH89JcGZM zx{1GDS~@?bbi_Z~-UGm*y{EnaVmm*Div_(&jgG(2p!B|Qvzk6hNS(f$hD5!3_ME&@ zHwM3}3Q<4DdGx+5vw**9vJOCkQlGtHXePjWo1i}#GN3={66!wKvLQb>ISN26dUn4R za0os)AN)Vmo(n*9aJ#-xrT{=Sc}+hsW4}NA5=B3AW1K#jW(`36QU|{Te)B(6;sZcl zc&Z$hnT)4@c2KT5gxwpO@u#A1`$7aiJiU^ K#U8%7-YPzJ1;$DM diff --git a/fixture/12/5/38 b/fixture/12/5/38 deleted file mode 100644 index c3fdfa443a87db7006037ed04c4e9a6372cb3f25..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRB>Lj%C^Y;r$Aaq_+FUVA?~!p1%ab|$_sN=iPN zyG=gK^SnO*QGh*eijzK@g`vNput2@lCb~auj@v$(UW&iaOnJYh(mlV@=YK!;WMDso z5?#QHt){(kE*CxuPjq^id{)0E!!W;D zxEQ}ti0HpXKuSM$Y3aWIl2? z1eHES?Wn)EHxWOc87e;@T8Td2Y+F95ix0j&KPEpu(Xv1NI;lQ&!g#;)EM>pj>N!8{ zt#-d$;Y~jJ>ae~CKZ(De4^=j1gV^%-n$SOXOqP9O|vjM*K KPm8|gi+R5t*~)DI diff --git a/fixture/12/5/39 b/fixture/12/5/39 deleted file mode 100644 index d01f560deb5ece6134fdd4d35d06b66da83cf195..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRBe7H_{(B7?suq+dQ`$BDk5P^mtu$eO>U4qLwg zUthmf)e1mfs42b?>xaFspPs!O!^S`8fzH1)S#`Y_wa>rdP8h%@_dCBN2h%=WJ50a3 zp~^l$5ZXO9XAZto>Y+c{l^DN7<-$IU%jUn}Ov*jdv4g(aQa=Ndo%rY}DtwzR)y<-I?y%2vKNIfOqxCa}N5Hg~>$X>mWz!Q?zF1VO*|r>8&Y z@Rh&(Rj0f<;v+xG9|k~{t?Ir7i;cdHuH(LiF?2q7f)+oq@ryrMj6pxsr~bYbIZ!`H ze7il`$UZ;xXH7r$r_ev=jJ3X2sSCfF^5j0ZT$evr)NMTpwVyw#r!YTm3r@blHV{9a zq29ifQ*=H@U5UT6m5aTh>wiDJV?;lE?;byiFq1y7t|LC}E~38+2vfeI|2jT&?2o^S KIpDvSeULvO3e7YC diff --git a/fixture/12/5/4 b/fixture/12/5/4 deleted file mode 100644 index 9d1d43a2036a6d127514e9d9e4271b6ad4ec2f8b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRAF$TL46H@rVdP`bX}l$$?F3Wq%~f5X2xLgYU^ zaS=cpeI7q9;kP}T^@_fiitWBa_?kbPfQ`JVL)Jg!%MLvoAJe|n?is%=WIn$Tp94Sj zv8BI}eo?=sh^aqVU39-Y+FUsmZHB`TVTKLDlR|oa*96Y9c4cpo}<4h z*pj~1_#MBi8jiobj2J)CYjHoid91!P=)ymyBT2u<&A&h9?hilNot(dxqWwRQlHk8wfvj$LT)GuUbAfGElyBq*y*+*!w@z4VS)k zahgAVpdP>0ongO+uu(tHAUBD6nw%+NmT^H4u3e}}(n;(I?_adN&< z_bWdQiQB((Qz1V)dk?_NM2)_LPujn-4kABZ-Hkm{QboV7nzujii)FtQ|3Sc(eFHz} zAL748ab3UDa0I_@MYg})$^5=Y;IqHSM)%fgZrO{pr5z(Pln#Fxfv5Pm#YpAg8~HFI>O(qBB3$N&r4d zsJOq|wXHY7k1@>aemA+$bt$Unbp+g?9b5S~6bjjg^KSYN*; zo2S3QVb(rGg|WXCe5XI^q@lm+r#8N@tYNLo2?Zw}n5<6dk|$|7bp!>bSoh8v(zwZHK>~L7YGGklsIj Ko3*~xme4mZU!PdrH47@WQ{))A_&O7wtb?{g=MKWsScUQg6ltY1(H7tNJhYCU$s9IkD5R2f;_*HdfmRF&C)*1(Sg6~jd;E~ z0f0Zzi3C4rvMRsP>4HBN6`MZ8lZif?(gVLpLDsw6Dn`HmWB9-8K##ulD*C-{l?*=O zC!0Sc{3gHtNxZ%{Bt$<-r9i)ga2-BZ3emo-czM3b6+%BfHNUG{8%{kFek zT@=9iHM4Z2a;vYafdyPI60nR>HmHs}qg8e?? K7$3icGY3HW`^b3! diff --git a/fixture/12/5/42 b/fixture/12/5/42 deleted file mode 100644 index cbd9103da2abfb90e6a7921b2a9d165ecb57e206..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9v1d_hvA9X*WD)YYZ`X;|kaPvQ+@!h@tvkSdO zHjzGPgC;=y%nCsLQlvk@p*BFs&vCy4=qEo8Nt-=*apJ!U8|^-vP5ZvVNyI-+K+3*i zUgy4;c5Ns+%yJO;mkfDgak$OpeK^|wF2|5&|?MsGha;U_*X zH?==@uYy08Z5O|-7W+Rbx9Gn4%uGJ13Bf+{3i>{B(n~*}yg|R4TQ)!S#S}kqd-pjX zOhUfE-%h`l@bx~36e+(1mvcUsh0ndn#j3y5^=`k&KF>b3IR?L=^bWrdl0&}<;ySuGpoMA K9NfR&wWq!rGtG?v diff --git a/fixture/12/5/43 b/fixture/12/5/43 deleted file mode 100644 index dff54e0f70ac4d4af21279a6e07649a703e89266..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRA&5;(u=V}`zg0h&G>JU6}b8%sZ6wz$4LZyrCC z|BpXADvUlhV5+=CC)7SQK%PFkD1bk4HcLOTN=`qJR5U$8wXQxYBBefQi<-YZ(uhA$ ztJS~VQ8quevxq)Aj*-3;O2|IAPS!sCWv9QOL$yC1y^}qYBoshzE=oV&R7yQ+rZB(P z|KhzmZ5ci{le<4K1D!vBDd#@hbmBi92?4-(^6Wk|xu8GdBn&@vnX11=*|9&p=R!Y2 zpRYa%^|U>qG5pQFDwKa;;z3y!})MsmJSwC}!VCxbtz&tX3m;AFnqokl=@adN(50Mxy1f}_7v z@}#}zM3_E}l$XDJRR6y7ZKpn-TL!<|@jJiYIY>WF9HhSrL5sicqBTC{omfAkZJa){ zLS??@Tb#ZrR)If-=UYF!E=)ZNB#yq5c@RCFDLTKU#~Hqjr24+Pht9oDN$9?|AqGDL KgHb;nYQny&l)$zC diff --git a/fixture/12/5/44 b/fixture/12/5/44 deleted file mode 100644 index aec3c4f0ee49a33e8afa22f8d577fea560f496ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 219 zcmV<103`nc0h9!g0RRAy0RRBo0000K0002P0000ewJ-f(kO0jE02B*Z*H6FLEH*!C zF+D!j#Jj#060koOrVzi182`W1JT<=uC(l0+s-nKs^8>%i?%6zqYJop^$Em%JVi!LG z&q2QDWf?!umN-6?#ml~;B!oWh;R?TcXt2M!>!Q3k3Z=g!@fbhsj~TsgF1A0{6sf)w z5Tv}d9K65oQjNc3;MTuU>xnb{02X* ValgKP)0n@|l$yT)0RS;B1*E9$V-o-X diff --git a/fixture/12/5/5 b/fixture/12/5/5 deleted file mode 100644 index 9afa272a21cba3a105c224c9eca5c10947755a28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9`^G!Z?j@!Sfs7^l@4hTI~SPei1EvG-B@en@; z@cq47$2z~fL2*AJ`f9%UjFmsKIL$r)<r;s!bv|s zts_2^5uCr*MoqteHF`a3f^0t~VZA>pSP8z*tggQ`Fmyf#cjP}fw$Hy|V06DJXdFKm zNgY0Fh6}%lWnn&NKzcurWXZkFcs{;L$`(KEfQr60K9WBgwgEucmt?=z0HDA8QNBKE Ki=@BYx~9L%DaJMc diff --git a/fixture/12/5/6 b/fixture/12/5/6 deleted file mode 100644 index 15c2cdff59d20a62554a3b102853b13553d3447b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9nCIi5cw!^;HAP2qYaRk3`_OUZOKPXoVyEBZdN2;RS_uaCc&L{mL} z9=tyZ%fUWP;ugR6sJ_2F*(JZ#11`O7M zX8%3!c)h-N&Llrq+Ydhy7P`MzF9W?D=wm<6TZccw=e<5M<8Hn>x7R*B%1%E`Iqf|) z7ivCc%zwVe8>_#9j55AgS1rFChAhA1hqpgOnR7ps#Mr-2-HN}?B)YzN03^SrgrYvo zEuX(yVs$?$KQz6VSFXMpoNBzD!_~eF_5r{!WkWpQIHbNyS9HIXc#J;cmXq#M$o zgO9&kj2b@e{#%0$0My3IWgR0P1?17JQw45mMk`Qg9UM9{uv=wH7$YQeut zVgtaUC6~XGF?hc07Lz|u=)%9Ix|Tk<>0mzM0@gnsFA_iH>mWZ`1v5W*nry#+q!Pc0 z^^QMIKm)+=t1iFwSE|3P-ETe>-PgXFKOjHbIN!Zm=Ec9N`@g@ZGRC_aX+^)WS_r@X zJ8(buPeMN=9Ogf~_MyHoA|$`v9M(QMkPp8DF9*N4^G?1;jnh9GH#9%n4o^QU&don& z3}-*)(U8A>D`P(z(}6#o;3hxnAxOSu+zUU(&855mWO~0v$vVF-Cp16R<7dD5!B4-= z-9SG800}zv?QZ_$!;cC9Zb{RfmATvA;puN9xcd)+rmP$N|G1EOcf!)6q K@<2cQnFKy?a>juG diff --git a/fixture/12/5/8 b/fixture/12/5/8 deleted file mode 100644 index 151cd2427342f1b9955d7bb4199a19e633b59810..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RRBy7KgvRN9I3?bFsb_G?2azM zJF-5AmyJLAt(U%#sG2`@ui(EH_zgfR>1sW}ScyNe+h0Cm(o(;u{}jE9Y4pD!Ixs&u zYDPYfCq%yMus}clu^BxySsOoLT6RA-nhd{l-BrKYeB8e%0sB2ndFMVmY^lEj&$U04 zgi^o1p9a8NR=+>Ni~PTA)OJ5gn>at1oTEOJ)CIp&w*WuEG=@JEUVOix^LanZO>RHS z3YI>(xCy@^dT&3>AOb)43WC277QDWetx-Qdsp>s_0gFFT?5@5e`G3FsV+=r&C)dCF z>i@nWJC8p8Z+t(seI-Au>k~kA*YLg7dtbl*_h7#F)1$sr-iy6ggnPf{w@5#A-Mv29 K#-YD{(04ye-^WD& diff --git a/fixture/12/5/9 b/fixture/12/5/9 deleted file mode 100644 index b4224d47676e523c66154e519fb95e46fe12e42f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0hR=i0RRAy0RRA?0RR9&y-&Y@W|zNY*|I$Z0pY)_=@vhauQk9Z!h^pW zp2=LOO`+F0rfs*^0>dzo*KXMt1Uq5CHuYwAU{A> zzuG^(;RHVyFPJ`5{q4Ta7q-1RP%yubPJ+E>T&}DQ8oB+UBo^c>uqJ0>nSHmaINx z&#J$rHW)vU=V(8;p|`!Av@k!bxgWr@!7RX{-j=`Joftmx9mGG#E|9;Pidw%!ceOn> z5&XWymkhs+%h$gBN~k}5-Ht!JLclx%#?wEqPXa&AvXei^OkBTVS0}&q6e_=Oo3Xu> KXLUW1`lUYbl*sS^ diff --git a/fixture/12/6/.zarray b/fixture/12/6/.zarray deleted file mode 100644 index 7c766fe673..0000000000 --- a/fixture/12/6/.zarray +++ /dev/null @@ -1,19 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": { - "clevel": 1, - "cname": "lz4", - "id": "blosc", - "shuffle": 0 - }, - "dtype": "bsbi=e&(Vx>P4Z=}E1hb=$qy3#*%0f9f#0`I@qXCuEP!gszoMu9(ea;m?C zY=XaKDJ43*5fp!WKWXlWRT>-~2u92tmJv*!Vvmob|qL)c3r;g+IRfGx|TL<&?ft!o$8D zP4YeEi!r}sj^sUB$QMAeahX0V$c8?h$Tz=D$J#$RGWb73Nm@UhVJAOSW^le{UC2J< z%vHaEB#^()VZOiIL()EzZm_>L(Eq-t%X+<9%YHvS1{A-we~v%6Y;ZrKAc?=fxQ{=J zF_FH8cK^O)2amp1z-7C-fYiPS_Ex`1Qb4~3M-D!ZP$a&^{RTgJdR9Gzxt%?W!WTaq zpS3?f!MZ2J6l-$1i-^ic< diff --git a/fixture/12/6/1 b/fixture/12/6/1 deleted file mode 100644 index 55e155deae8631688c338163127c516a98182986..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RR942dci+*`huslFvVgE73osZ)Lqyih(}tksQC_ z*a5%7>xDl<70o{{fil01$#_0=>3F`eCS5<0>&CtlmiE8!cz{1OVw=ACI!wM{1>ZiL zrnElCi0M8Vo6x^RY7Rg3TlBv<0!2R_Cyzg?E3dy^D5yTAQHMXxL^8m! zk&Hijw)H$V0))OK>U%$3S=~RsulPPz>dim=(Z;`@dn!K#EC;^2$VtBm-}b)~o*X{{ z6JS4Q$E!a*mzF=ojzz!MHKjj9EHAQ;^;iPByYd3jlDlUg2BJn^9Vo8$r!&M z&W*nr_wT=LlU%;--J!o07-v1DdE!5&OH4lFrvE=fc6-0x^`pN(b%VcdQ-!_DL@q!r zz|_A!q8>l)Ff+fQEM>mT2BE(;4`Dyo>NCBrF^fNXb2C14Kli_cTZ}&e)d0ZxSnt2f z&1%0CV`snAu(Cg&LodEks-Hg!V-dfLqM|;nwemh#F)}~S3hur#cEP@qq?o=yg44gf K)T_Nia>TxSP|Ym> diff --git a/fixture/12/6/10 b/fixture/12/6/10 deleted file mode 100644 index 6d109aa6ba58d20f3649c784a2ee39c9b4d50b1e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBAj{HBbr8qxi$pgRlmR7s+p};>!DM&xK^1eSq zBKN*?FE+o1aWcOw(apZ78Hm4(BbPoo@t-~vU8_GxP~pE=vT?tLI!-@f;5$CZf;T<( z#i2hgMuEQqjyk^pwOGG=(*VFy0SG>;*#*C2pF6)h)@i?kVUfHRZA`zJ;$l9k8S6gb z(w4qlmASrJ@jX63R_wf~LaINr^P)Z}Jd8hcN*q7!!KlA->JGoo0Y$xDQeHol!uCHR z;`hEdRAN2+#!Nqo+-$wWane6Ygsi`}k}W?55Gy|cc9TDjDSkb?I%Gdw9)G_2Cr!WR zdvd?tf4INM`Qg53d!Ig)AXUDgw@N;Q5S~8(dIY~~#}z=ES3y7Krb)hE-(bFH3W2|3 ztkS%fR|>%Lyr4f?Lo~n=11mi{Zj!%pfHS|1coM(sdiFn3H;upQ3~E0Nh?GCis1?A% zhaJDc0=_?gN~pgaVVOVhT#~#pL3+KOQI0*6Wv@Q8q;$WwRfWD2zr8+?AvHd+&9Xk= K`!~N_-XTBIY{f(X diff --git a/fixture/12/6/11 b/fixture/12/6/11 deleted file mode 100644 index 3c32f58b46fbed82115066f63c1c6b9efeaff074..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBkf&o5ZRLZ~i2ME8(KV`pni)g`mp zbs)c!b!I-Ag?c^uZ%4nJi?F`JW?etiW*I*Ge+)k>;i5khu--l*+4aB4i;6ytJRZOQ zx92|!ZOlHm@fg4D1`$7%@VPw(=G(uG@mxRcqR+n4*0n#8)&9N3I=DYTjM+XWV0k~p z&-=bQy|g|<_1wSq6#~8?1-`!%O_IOB3uM1p5h*{D2JSKhIKxj+D^Z8`jkB|NtC}w z#yY-qmjOU8k=eeU>ju6sVo$zZxH3MTvF|>sw1Yl;0*^jpEIGcjWX(My7hAqx`{%#^ zTPQ$?X*xfuw0ghvlxsd$POQKD2mQXyvAw@Dr%}DxY0AG|-^M-)rY=7KwNt-y!6ZId KMPa|q00TeNOw6_b diff --git a/fixture/12/6/12 b/fixture/12/6/12 deleted file mode 100644 index dab95584af910e7004cd7c4abc41519d1a0aeaa7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RR9-Nie_i`AI)M;^)5&qkzA$?Tx?qJ5WAc8V*0R zua7=SOai~P;xfQYh8{hZy-7cHjVnM^O4+_1QYyc1_@h2ECxgClKchc%TY+FAKp`gAg@r1skKn6hI=omlVbR)lrEQCGB z{uMpv(bqp~p&>omPYpm;`=!59gao~RN<+UbTK&HJJo3IW7&gB}cHKX97PdcZC(b@V z-txX&=q|r4=RH4Tt(iVSO%%SMiwZw89b`YNydAzVj!C~!8mYg_Qh+`vv!v>e35LJcjC<@A1DI%auP2`U}5^swO}l zLta1P;I2MH%gH_q*3Ld8&oRF~G2p+m`3%3yUzWUE*CoGWo}|CFia)>Nl7v6Pi9bIL zhA%!1Ocp;f$4)=pwMjqMH|#$45E=G@n7Y3Q`~W_o)+s>TKA1n3`BT3JdN)6$Jhi_$hp)em*NMFqISoHpvrj)hKT|*B zPyIeam(IU)lX<;~`*%MF_u@VQdOAK!kf=VJ0-!#@f&0IC)000GU*f%EBsD*>j}yRR zo76u{1(d&4r6NG0p$9`kbl4B5sE%-eFeTBR7XDzrjNgm K$yYsFXvx3b0mecA diff --git a/fixture/12/6/15 b/fixture/12/6/15 deleted file mode 100644 index cb0dbf55941b5cf3023562feb7bdb91ec12b4111..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRAx+pInl{wzPGjnBUTeXGB$;-$Y)<2Am=;IhA7 za7>hiu?zN0?k^8`PVb5}od!f-#L96vr1Rdl~X5hOplE^@wB zf-=CB(#Jld?mNHOvFg6_OvO9s-NwJP}<7RkOzv4}n# z=ZZf=y}vrgbS%Gv-MBvysJFlQ%__fyjbuMX8{$9P8}>ei;(tC!?3BJ_P7FWwcmzK9 zC_%q&G@ick5MI9r6kj|)xg|b$GtoZ;f z>Fd8af^EO{|Ia;KR5`w?mmfbp<43;a-Z{Um#I8O>Rv7qaV9m>C$!O*@~Sf9Qb$r?U@S^U41CaphI6x2TPxQ{>dcg{bSl|8?IqaVN^yplh_ zppw3-3z5HIh!a2SsvSUH3g5k4d~-ki%zVGD=hwdh_-Q|M%L_l6aPGckW!gW++Tp*T zah<{D;*h=~!G=HXP60nMPvt+cTd%)t(0acYwvs=`2%|ru?S?G>1uZ|RosT^%Yal+U z4LiW*YLdUX5t6?#hFw1G#iBo+gYvx+Dp^0{^CdqyKo346@De|)*n2+vu@SznJIFpb zE4e=>4DvqGGhaTMpkTjdJGDP68nnMgVA{VJFup#3d11eqXNy0ogV(?FICekZD^fpO zvhh7)Uk1Q3mE6CWv2{KNVQ#)rGCDu}w2Qs0=N`VwNE^WIic&vvE+)UG;mf{-?ufrJ zEXBVAZ@fPhrAt5Y-cLXFv|YYFbl$(`hXz2LHrPIR5)3{`;J3a4+5*6t9+E%QVu!!= K>kB_%RYgBb6ux)> diff --git a/fixture/12/6/18 b/fixture/12/6/18 deleted file mode 100644 index 55e1ae0c78ba4b3976c593e5d351f8582ad21f42..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBC7OOsApzuDYO{_n?+e*J)b0|Itf@Z$ncI&@L znvy>ID4;*ME3iM%!RV}3Yfq8<|jW-p6x$dZ~eXmFk!#?1DQVZpCUgygu1>M zn*l)9)Mh{GHN(Dp*2}-p;;26~k(fVi0-L^|ezQKv6dJzxMFl^MF@wKEah1Qslutj0 z2XH?aN&CKL#y`JxTiuLwZj zv6MeB_8vWDT?IZyzTrPo^ISjoPo2L-?s7jH6rVp@5zjwSkj=g=>+-(J8gakk!ks=g zojX5^X#>9qI1#=ba09@gHNih6QOLg{YTn24-~#yU&TLJjHSN{YO%iz?K3{@Pm@10 zH4Q&^_?kX%#FM@@6)isluTwujzJtCF*>bQ$Hsn$PFiRr%hdY(Q- zqccAT1~0$6ck8|-BL_Znjat8asEEJtPkO&j#hO3PyvsiS3`@T%ZC^i!-8{b=r}@7V zVlqFnx6(hb+YUcMl<~ho_rSb;m5jayrNKU==2*Xoy{f)P#kIfT7h^uW0~1TM|FWKfAwD=-fW}UlPAKbM3#O zEI_|Ft^+?O8tA{&6(c|F7Lh*D`GLMCPSL-R2*<_=}Q*1p3V>v%| z_~O5Z8aKa@wK2a>ZPGqK{02XKP|rOAq8`8wa?`(ys&u|hU;w`X`ma0nRPess48%Y1 z&l11_@|r$<_)|Xym@_}Qib+46U2Z>Zo*KVv{fxgkI0C>Fr@6jq3`f72uarO0I+MOu z_2(T6{c z1v9_MNz^{tD?-0vALPElJ`ums{31UDYfe7WcFH}?dLTe(uHHXnnfOssjfX-m*T$xB8$JI*U!I)e$>Aqw*f%1 z)dN85caOg%9S^)Sbfmuksd>D%)X_h86N*2E1W!K>*G9f0h@n3fj3qw?&dxr&TkO5Z K0i!;)m6JYKn!`o_ diff --git a/fixture/12/6/20 b/fixture/12/6/20 deleted file mode 100644 index 8cb41dbdf4ea69f513a3e9242f5ba1b7f411e756..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRA;g*-o6FeASS^iDtNumrySIMF}k+N!=&`ozDi zJ3qcOld8Vj?yJ90{uMtkUA8}wt&hKq19?9vOdG$Fe#*Q9;{?8OZpysmH}XDf7=ypu zilsj;=m0;#=v6;wvPZw^u#UecW0yZ6F_gb~;}|}5Zh}8x*a5y4#g#v?35&jOf}_8S zlfOP54og1H2P{CS@^QZ#@_@bq8auyw=Mlb;Fgv~rPhY>+yzf4UNL#+iIIKU{2PVIH zLGC|XKaD;PyP`kda_l|#*hjxN?1sLdfw8~6YvjEhIWRudM>{>3LaINCwiv!*Zt}nO z(PBWk%P_yL|93wqi@?9;7<)cjXG^~grZm8L%#glS5ly|KBVfL@@_0V?{E5D6o{c@% zF)lvl77)GM-!8s-#$>;iEf_xs<&eH!N4mbaB?CW(3t2vqzB0d+6`((UIGDd&2ZX;r z87n_FCE!1y&=bE?UDG~ZpP0Y<(QUuq>yST7MY}$>Siz KI@!N?^M=1@S;rLs diff --git a/fixture/12/6/21 b/fixture/12/6/21 deleted file mode 100644 index 93c0c49784ef8787031d25453c387b4afe308873..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBrrnWy2qzbI11`X-tdc!vMkznPQB^;*AQ?X`S#>|Ggv~#gO^iP*j!8bs*$KWw$Lc*84voI! z439r4brL^x9K*h~d#OJqah1Lo9&*2e0WiF2`cl6xt8YI(ZdkwRM(n+;kZ?cVQZ~P6 zfzm#}F?qj|KYBhFHxWMxrYybOQVBgAQsX|-o{zrCxnn<#fv-O~0ByelFXBJHVw=9Q zM*lwpaFo8o7BxTkwlhDoO~$^fy_7wvPPjiA^`yR;$ko2kq?5mmrN}>6DE7Yen3+Fh K@)^G_l$Ae8%e?de diff --git a/fixture/12/6/22 b/fixture/12/6/22 deleted file mode 100644 index e94a15e0a74366f59f53ff40e4d8f06b30937df3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RR9hfKERT>S?}%`8&S@EhoR+aVo#;SBO7u6C^%W zypF%27lFPaDh9qXpA^*_HXrfj}<`8~Zs*#5p$ zZ;n4X`8~hLZx22%l?gu{)&xG>uth%`{N28MWFJ3m_$EKN)^|TWUGcx!Py#=mxn@7L zs?xp&J`BHnjWfT)7=*t-Zz?~t3KBq6R}Q~$-|oJ`L3})f4b8uG4>G^lRB63^*kr%L zMVUX(o29==sAfOn_q#uBal<~>zYM-jF?>FYk>S4aP6EJ4uzJ6oC^x*@&~U%lgwwwH zR`otNffPO(e1|_Fot8e8xU@c(ST?_}VXD7~YSX^TC_29)#VWq?bBMoKtye#v8au!5 zKf1o@X{EeF#E(DO9N|Bn>FGb=5v0FG48Oh@IuO4onY}(rTAn_fKNY{KbSb~lSAW0E K-6_C9DyF}p9mP-p diff --git a/fixture/12/6/23 b/fixture/12/6/23 deleted file mode 100644 index 5942845e1a1da3f4ca4f9bf6f8c2a1e711e324d0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRB<2|-Y z+StC?hR46+`>VhHygfg~u8_W>X4t-uj6J_yW5vEX2$4VDt`k1G3YWj7NLYzOL3)H`9pzc5MrHnr+OZq;#cmTe_DyzR}t*O3Cl;u92TaCY{*N(kO zewV*??Zm$2BCWq@n7Y3+Ag(_CY81bWx%j?aDI&j-M=n5Vd>cP+`8GeIfkwYqHvvDg zFKNCikVwCH?Dju&q7py+w*^0m!+5_pmZHBGyKBAC(GtH2)*L?zTS9d>d^1?o&*u_7HzOp?q$ArH_re+R%O z!zaA6I>EkEKsLS!3ZOnx6eT|~-jUd_I1^+rFv>WIJeBuKwFcKAIwVZ6SM?2W%dEC@es zffB!>KUlw*kTSjitYp8r0SvuzD!9ILI0L^Fp8>z4;C??53cWt`#@fGP3t~SNJp?~x zKk7aqPGdidKz2ONy&gYA&+Wbj`&d6ISzkU#K(N0(!~wqjfK$Ge`y@W^ zz{EdoU;Vx5Jw87sT^GKs6Nx{oxD9lN#RWkAJ<2>l zN&CNLI+;KJT0p;Mn1w%J`1n0q1IfP_#DzZSHb6c0Mx#x zG2B14rM14pTv)zeX;QyFNdLYSP(#1qm6kuI8we|{(U}x=+(Xu7A!sK<>o#nf4o1UzXQK=q!T_Qe_lVDM7Y1ixc@(U z>Xtta65GGSCB8ldfDpelam&8%bhAEwbA~_WhVH&gr{}(;jJ`g*YZ5=M>43jbq1-<| zuq40a^maehMdv;XI2%7myC1(Gr`*3XQm?!ckBz+SKy|-U{%Svn5L3NlA5XtL#}z*z zlN>*kVmH6Z-<>~Ckqg*U&ExxT*d@P)szsYySf&ImyF zNio0Cl$gJA)PO&y*P=erd#gT4Ny|S5>8`&%e6qiAm?gfm*?~RByS2Xhd>udx*q**s z{0zU@r~W+)jCenywK>15gBHFX(SJYY1BXB7*E#u`BRh5bLB`?J3-GF$TbDNj<(HkIOxAN>jgl z7(72xD!ab{3=O^_q`bdjWS71GNHsrfk0rjfRbjppe&aliM4vsnnK3^gCo?}G`n5mF zqbok2lGMM2qFX-<$E-hTj2%FL(YZfm2>ZS(tv^1OkBYubB+$MGYv{c@lcYcQY0JNA KQ-Hr&7@R+ZA;?z% diff --git a/fixture/12/6/27 b/fixture/12/6/27 deleted file mode 100644 index ee1dabd57b7ee47b6a2eb4f931aaa0abb2af1467..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBZ5xPIN)F{8XOVdAT(|JGr7m2=P+Z(`zfz`i6 zeki{?i$Xt!T0=i>((b+~(`>#f3kAL0{9nGDk0(Cg81}pqP1e5!i&VZBAGy4JV|_n+ z&{V&W%uqf^77#yE=oh{?o#4OmNr6A6L_a^0Lux*LF(o~_1?0cxPewmFLcTvYkB+~R zDXc%7(~v(oRFc0j&B?yAowC0_XNA4tiRC^T1*yOBhs8d>og+VCN+&;%wCFxBKb^l{ z$N0T}XS6=!0=qxKnISuj;MqP;nQp(4)04lnJEXs7E!V!K_ya)k+|9mDu)03^oen=b zw~N11#@@cH*&jcy|$#aJavf z;RQaMf(pMlV~{?ZwU@rnn1En~lGRMNkSNTJUGg6?VNzZCyWS>5#vZ{V+c9neINl K^9DYa{}?_i>dE5( diff --git a/fixture/12/6/28 b/fixture/12/6/28 deleted file mode 100644 index afc5cb9d5b02a2c1617ab61d7e5fedb7010a0754..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RR9(>6t$#2iL#M?*Bbr#hSm$@ytcJRZP6 z4oE-tXNkTjn6^GTsJ1^yD84?9el0$ho?9KgN{!XLgU=%7FH zNX5T%z#czqV9UREV$41Tl()P>#vDJxn1R3U_XxjW|0zFWgaW^FX+b}nFjhX3Ipn?? zcA>sev1L1KcVWJ!@j5?K7{0zJvJ$^R25CP~A=tjlRSLfz6(7KI4u!u;uvWhSMhd?k z-Z;N4se3;Ya7MpCTr#|yCAGi(z(>C!-ORor5CK1qS*AVnk{7;xV)wonYp1`^dKW)|!Vy2p=^ekP@{GT3 zF2_IUrhGnbKg&K3Tj4(Z>Y2Yx=)XTmm(IReaZ*3*t-3#YsHQ!yI+wo%l^?+6SslIa z5`aFgtrWi@@d`dER@^=fa(+Jy4QjqIxH-K6<2JsBnh8A(U<1ElnFPPV_~^bu1Ry?= K=~%z5=;povQN(fp diff --git a/fixture/12/6/29 b/fixture/12/6/29 deleted file mode 100644 index 7bbd6a2917e90ba1df66c3430dae9a5173bdb1cc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBtf9F4KdSyRE19iVOTU|fIoFG5J)6~7UJvG0; zPEx;IHf27=qFBGcag0AjzOX;VQ!l-e!;wFWw$i@B-!{L|)r>zCIm$o7Nc%s;QK~*f zvs^zp37D#XRyjX~9%etu8TUQv>L$PJadEvH zr$9aDig!K-#&^G}ABetPm$ki*fDgb4A5}li^Cv#w*M~ll_XEFy6c#@h`BcB4z%0K# z+8{sY-L^hxh@-zs=gB|kEJDAV#s|K;mft^i(*wZ4X4=0ZMdChKpdP@P$O6EUYnDIS zSJA$^j0`|lbA-PP5Tm}vW(q$pZ1Fxcf`Yz!0Vcmc>TEvDJ+MD+M?*hz=^?++2R7K;u7pkDWd-vNJzwgl4~?ySTo=*`hx-OBBC$ zV!l6sXpTQ_YCu2U8kWDYyq~_`>Lfq>S$w~oqe8x?i$FiTa}mEju~@&yf9AjI^u@k8 K??peJc3;0QRmqwF diff --git a/fixture/12/6/3 b/fixture/12/6/3 deleted file mode 100644 index 20593daddc29970a7ab2f25dba245acfe216955f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RR9*n+m_~U`D@7HGjVz&~Cp9opB|$$a?bs0=&z1W>=%d4fNm zASXXVX;nX~PAxv2Sf;*gQHZ@Z@7+FJTL8d^cwaxhMjJqS?)5&M)sMd08YRAeAvHf~ zK7_u3VO~G-Q8K<=YTG|z;@3X_45_};@1?!xKDWGo=MucOigLePD~rDl_@6%{ev3a` zNQpl`7LL7fE%HCy5V*W_*as*+jnpvi&~)x){G_ND00vXNkW;S^YmM>6Ja-DU(0Kfc!peiYvVc(!afusX4zJ zqvpP=|A4=D^^U(ShXp{I&5k~woV!1&M@zqqG@!q%PkO)38Q8w`fE7IfpeH|+Ck8(`7;wKBfbl`JI&W67eu(3X@>=nIArI5cf`-?Tr^&TPMH`|Z9ST=qUM=zPEBAB4ZW3=KZt3SK|#bbmh{;=4WL5G=pCBaJ?B z?hrqk5{5oU`JF!r(1E`NZ8E=SG>kv$i@d)t6$-yPMmxXWH^{&6qd&eK4L?6^6d*p; zvP?e%l9)e-amBvvWhp>P+OWR7p;^A4CN;mMo(n(Syp+B_lA*m^BT~N4UunL)6(T;g zgXX@%e9pczQ;9#6NS40)V8lMVwPU|&uH(Lo#GStrUhO{+53oM~a2P+Us|i4NF-1RV K8Zf@*y1qVt{K2vS diff --git a/fixture/12/6/31 b/fixture/12/6/31 deleted file mode 100644 index 30b7b584c5268b99484886bd6256b1a48c4afee8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RR9$F0MakTw%ZE9+tnH>CZmNI~Kn=q7cAw7@xlb zbDzGFVMRSduAe<4RRBP@n^eAcW{y5P9nin^5C6O%9IHNo2(Z5y8gIW)boxFD+zY>K z0n)u?a0)*iTmnF;8)Lo|b>cr5x%$6v>=!=14XeI8PYAyrcPKxF#aq7Cm;u1APs_i; z);K?}kkP-tvOhmQiT^zjUoO7x+oruP#{)myx0b(cIW0Xp+mXM2k|#caeyv;n`C=)-FO$a~I^c}#nB`Lqw zk&{2yL#IA!E}p+d2cJKQ19v|v+?T$?Bw;@Q`2Rod^@hK>F%Ca)GyXsFEGIuaA3?vY KrF1@4Sg$`uYrh=; diff --git a/fixture/12/6/32 b/fixture/12/6/32 deleted file mode 100644 index b6bc84730bf4b2eb176b94e1289a3624a9766ab3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRA_1cJX`jrP2dQ+d8Iq#?g4T6aF#M9#gLvLruB z3z9z@?4rNAO^Q9snpwT@OHe*-d40UP412#kHM&2@yzoEw01Q7f9T`8tF$O-g9e6)} zTpYkj9(}v7mlD5MQ58No+lxPt?VCJwlDNKOp0mGQ%-_9a8Z1AJQCGjXHUPjUjS)Z} zoU1-Cc;gM7aiG*Q2K@?$>% zn~^?a$M?T}kgh)d`hdRV@a4VpMpnP)PO?60b%sCaS0uluoIyWIk=MSj zufMxNl(AxR~0{BPiVg&@zFd)&1t@=s2x8= z$!WYFHA%jdh)uurX%0Q>IfcFw+g`t&Q6oNdTDm^AL#@72du~6ZCx<>$RiwYVFRMRN zSwuepJb*vB4$VGNofW|C+KWGd{CGVj*|a{KRti7v*Y`hB)eFBNIR(FF-C4ik^N7Eb Kn1;Wh2JOEKO2iZZ diff --git a/fixture/12/6/33 b/fixture/12/6/33 deleted file mode 100644 index 868126cf843f250956620d242c1de5f967d0b4f7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRA@a@sy}aQ;93Oanm7G+MvXSBt(V-km>{T-`o; zz6n0Ja)Z8=(w0BGS&6?wHTFI$o3K91rZGLIs7XH_FbX}*)(F3ncD+3g%FDms+8jSd zL|#AS7bw55{$#%ks2;yD5`n!n5nVoU$j(2Bn!Z2#-&8+?SNJ{VN^QSd{?jSCc;?Dd|3H7kxi^3gtenJR7~B?w38^?|!{{W2C>@8i2osls!Kb zGoim*_RhY`X@)!b+Zn%oLu9`X1Z6#Nqy0aR6UDyhK0Qzz?D4KLa)C?4dK4soD@Gw+91F92~EDR zGub~$x&A-BQwqQ#L+L%LusT3%EA>8ic$YjY2B<%{Q(ry>R)3LaIIw-ow9&f6c%5Iy}F$#za01>&U-*7Xm(Sn|{BB9bCWaSf)R%!5BTtUuVBY KQ9nL(9K64RAIVPu diff --git a/fixture/12/6/35 b/fixture/12/6/35 deleted file mode 100644 index 34544746673b25b6ace8b860a1d7fc3d1c43b0bf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBaz{5V8yp2B&XDL7a7YaZ#9r(X51Oh(Q_p874 zU@^Y@UdlTC=n+8J*mS)HprE}DsenJ6TUfmZW9mO~a4SE@7*s!24DY{m8~#7B4Jf}z zYahIt7bm}~8iT*5;F-T&aML}MCH%iCp?JRcWQRZZ#vwl}Xx%>SH)X#o1qZyu(dwfH_1LfW|lvmZ=k(*<43=v zhYdh!*k?bu`5QhJx~V;PO_V;P^G?1?O#?o$uKzwq{yD$3D$&1H*95>;O=CYooyxzF zLGC{9!IQrWvXQ^w+c&?I%0@nu@T9*6*V`(6&DweB8gjXJ5XZ;-Wtv zh^fDYgW$az3??ku06lg8(Tk* K7dgLP)S;L*K_Op-nmqX0ip zp~*g)cmTjM&xgO-W~x3ZeM~+vAWFaN)jU5bN`t=kzG6T6;hI02eU3h`I1WDw#2dff zXJkMBTVFn2T`#}IgylcpBho%X<3vC4jsQND>N&n2KYu*8QhPod zUyQ#9SVKPxn2$fZPD#G{Fh0Mg9l|XD diff --git a/fixture/12/6/37 b/fixture/12/6/37 deleted file mode 100644 index e80e04ac2c138fb24ddd127baaf9a8cbeb0fb1d7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRA0hOIxjmMT72Yau_R7#F`B)}Fr+`OLm*8JoWT zUqn85$UMK9jm$h1zm`9nBi6rFdk4T9dY?ZXSi8T=n8LnO#T!2)mvX)J7nZ%UM3g<9 zG9f^P>*GKBmodMBgN8r#XaPTh-JiS{YG}W<8RtI!v7ElMRl>f?UPQld^nX8libTIJ z`$E6&CPzPB%M-pIdGNj4HH1G`C#XN5E(pM8-2=XigS5ZL3C6zpy4<^fueH89JcGZM zx{1GDS~@?bbi_Z~-UGm*y{EnaVmm*Div_(&jgG(2p!B|Qvzk6hNS(f$hD5!3_ME&@ zHwM3}3Q<4DdGx+5vw**9vJOCkQlGtHXePjWo1i}#GN3={66!wKvLQb>ISN26dUn4R za0os)AN)Vmo(n*9aJ#-xrT{=Sc}+hsW4}NA5=B3AW1K#jW(`36QU|{Te)B(6;sZcl zc&Z$hnT)4@c2KT5gxwpO@u#A1`$7aiJiU^ K#U8%7-YP!SKE>t$ diff --git a/fixture/12/6/38 b/fixture/12/6/38 deleted file mode 100644 index 49e9649d0203bfc7a81b7dca87bb3ebfaba0ee24..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRB>Lj%C^Y;r$Aaq_+FUVA?~!p1%ab|$_sN=iPN zyG=gK^SnO*QGh*eijzK@g`vNput2@lCb~auj@v$(UW&iaOnJYh(mlV@=YK!;WMDso z5?#QHt){(kE*CxuPjq^id{)0E!!W;D zxEQ}ti0HpXKuSM$Y3aWIl2? z1eHES?Wn)EHxWOc87e;@T8Td2Y+F95ix0j&KPEpu(Xv1NI;lQ&!g#;)EM>pj>N!8{ zt#-d$;Y~jJ>ae~CKZ(De4^=j1gV^%-n$SOXOqP9O|vjM*K KPm8|gi+R6%63Phx diff --git a/fixture/12/6/39 b/fixture/12/6/39 deleted file mode 100644 index 99e79b0d06382a2e6362ec0b2a6f9db764957a16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBe7H_{(B7?suq+dQ`$BDk5P^mtu$eO>U4qLwg zUthmf)e1mfs42b?>xaFspPs!O!^S`8fzH1)S#`Y_wa>rdP8h%@_dCBN2h%=WJ50a3 zp~^l$5ZXO9XAZto>Y+c{l^DN7<-$IU%jUn}Ov*jdv4g(aQa=Ndo%rY}DtwzR)y<-I?y%2vKNIfOqxCa}N5Hg~>$X>mWz!Q?zF1VO*|r>8&Y z@Rh&(Rj0f<;v+xG9|k~{t?Ir7i;cdHuH(LiF?2q7f)+oq@ryrMj6pxsr~bYbIZ!`H ze7il`$UZ;xXH7r$r_ev=jJ3X2sSCfF^5j0ZT$evr)NMTpwVyw#r!YTm3r@blHV{9a zq29ifQ*=H@U5UT6m5aTh>wiDJV?;lE?;byiFq1y7t|LC}E~38+2vfeI|2jT&?2o^S KIpDvSeULwXL(I?s diff --git a/fixture/12/6/4 b/fixture/12/6/4 deleted file mode 100644 index 1d25c05c537f30fc52c845d1a0f39b41a9cb755e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRAF$TL46H@rVdP`bX}l$$?F3Wq%~f5X2xLgYU^ zaS=cpeI7q9;kP}T^@_fiitWBa_?kbPfQ`JVL)Jg!%MLvoAJe|n?is%=WIn$Tp94Sj zv8BI}eo?=sh^aqVU39-Y+FUsmZHB`TVTKLDlR|oa*96Y9c4cpo}<4h z*pj~1_#MBi8jiobj2J)CYjHoid91!P=)ymyBT2u<&A&h9?hilNot(dxqWwRQlHk8wfvj$LT)GuUbAfGElyBq*y*+*!w@z4VS)k zahgAVpdP>0ongO+uu(tHAUBD6nw%+NmT^H4u3e}}(n;(I?_adN&< z_bWdQiQB((Qz1V)dk?_NM2)_LPujn-4kABZ-Hkm{QboV7nzujii)FtQ|3Sc(eFHz} zAL748ab3UDa0I_@MYg})$^5=Y;IqHSM)%fgZrO{pr5z(Pln#Fxfv5Pm#YpAg8~HFI>O(qBB3$N&r4d zsJOq|wXHY7k1@>aemA+$bt$Unbp+g?9b5S~6bjjg^KSYN*; zo2S3QVb(rGg|WXCe5XI^q@lm+r#8N@tYNLo2?Zw}n5<6dk|$|7bp!>bSoh8v(zwZHK>~L7YGGklsIj Ko3*~xM diff --git a/fixture/12/6/41 b/fixture/12/6/41 deleted file mode 100644 index 11844f5e6f12b097b3e13f077df7a86a16031697..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRA1FQ~uxafd!|FB-qCcnrT>me4mZU!PdrH47@WQ{))A_&O7wtb?{g=MKWsScUQg6ltY1(H7tNJhYCU$s9IkD5R2f;_*HdfmRF&C)*1(Sg6~jd;E~ z0f0Zzi3C4rvMRsP>4HBN6`MZ8lZif?(gVLpLDsw6Dn`HmWB9-8K##ulD*C-{l?*=O zC!0Sc{3gHtNxZ%{Bt$<-r9i)ga2-BZ3emo-czM3b6+%BfHNUG{8%{kFek zT@=9iHM4Z2a;vYafdyPI60nR>HmHs}qg8e?? K7$3icGY3FfL&y~X diff --git a/fixture/12/6/42 b/fixture/12/6/42 deleted file mode 100644 index 9d990663a5e994a993a652318ddb1366b8febe0f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RR9v1d_hvA9X*WD)YYZ`X;|kaPvQ+@!h@tvkSdO zHjzGPgC;=y%nCsLQlvk@p*BFs&vCy4=qEo8Nt-=*apJ!U8|^-vP5ZvVNyI-+K+3*i zUgy4;c5Ns+%yJO;mkfDgak$OpeK^|wF2|5&|?MsGha;U_*X zH?==@uYy08Z5O|-7W+Rbx9Gn4%uGJ13Bf+{3i>{B(n~*}yg|R4TQ)!S#S}kqd-pjX zOhUfE-%h`l@bx~36e+(1mvcUsh0ndn#j3y5^=`k&KF>b3IR?L=^bWrdl0&}<;ySuGpoMA K9NfR&wWq#!Y|SYE diff --git a/fixture/12/6/43 b/fixture/12/6/43 deleted file mode 100644 index ad90eb5b80c0ab8da2a27af5bfee9c89a4e08212..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRA&5;(u=V}`zg0h&G>JU6}b8%sZ6wz$4LZyrCC z|BpXADvUlhV5+=CC)7SQK%PFkD1bk4HcLOTN=`qJR5U$8wXQxYBBefQi<-YZ(uhA$ ztJS~VQ8quevxq)Aj*-3;O2|IAPS!sCWv9QOL$yC1y^}qYBoshzE=oV&R7yQ+rZB(P z|KhzmZ5ci{le<4K1D!vBDd#@hbmBi92?4-(^6Wk|xu8GdBn&@vnX11=*|9&p=R!Y2 zpRYa%^|U>qG5pQFDwKa;;z3y!})MsmJSwC}!VCxbtz&tX3m;AFnqokl=@adN(50Mxy1f}_7v z@}#}zM3_E}l$XDJRR6y7ZKpn-TL!<|@jJiYIY>WF9HhSrL5sicqBTC{omfAkZJa){ zLS??@Tb#ZrR)If-=UYF!E=)ZNB#yq5c@RCFDLTKU#~Hqjr24+Pht9oDN$9?|AqGDL KgHb;nYQnw=+`v)* diff --git a/fixture/12/6/44 b/fixture/12/6/44 deleted file mode 100644 index dc3f5014c871f20789252dafe04aeba2a18eef39..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 220 zcmZQ#G+>#)$iM)?cNiELM1c4R5dU8ldEI~i4IMN4EJItnt4DV46BF8CFSblzf0xAn z{g-V__p_^?w-;EoXy293%==HhyJ^#s)o5RFeC6K0ByoGD^N#zTr%KwNpJrh<<>={s zi&R?d-aX{nUzM?8|E|}I_E>N&-LLXd!v0Oar;s!bv|s zts_2^5uCr*MoqteHF`a3f^0t~VZA>pSP8z*tggQ`Fmyf#cjP}fw$Hy|V06DJXdFKm zNgY0Fh6}%lWnn&NKzcurWXZkFcs{;L$`(KEfQr60K9WBgwgEucmt?=z0HDA8QNBKE Ki=@BYx~9JZOKPXoVyEBZdN2;RS_uaCc&L{mL} z9=tyZ%fUWP;ugR6sJ_2F*(JZ#11`O7M zX8%3!c)h-N&Llrq+Ydhy7P`MzF9W?D=wm<6TZccw=e<5M<8Hn>x7R*B%1%E`Iqf|) z7ivCc%zwVe8>_#9j55AgS1rFChAhA1hqpgOnR7ps#Mr-2-HN}?B)YzN03^SrgrYvo zEuX(yVs$?$KQz6VSFXMpoNBzD!_~eF_5r{!WkWpQIHbNyS9HIXc#J;cmXq#M$o zgO9&kj2b@e{#%0$0My3IWgR0P1?17JQw45mMk`Qg9UM9{uv=wH7$YQeut zVgtaUC6~XGF?hc07Lz|u=)%9Ix|Tk<>0mzM0@gnsFA_iH>mWZ`1v5W*nry#+q!Pc0 z^^QMIKm)+=t1iFwSE|3P-ETe>-PgXFKOjHbIN!Zm=Ec9N`@g@ZGRC_aX+^)WS_r@X zJ8(buPeMN=9Ogf~_MyHoA|$`v9M(QMkPp8DF9*N4^G?1;jnh9GH#9%n4o^QU&don& z3}-*)(U8A>D`P(z(}6#o;3hxnAxOSu+zUU(&855mWO~0v$vVF-Cp16R<7dD5!B4-= z-9SG800}zv?QZ_$!;cC9Zb{RfmATvA;puN9xcd)+rmP$N|G1EOcf!)6q K@<2cQnFK!0tHvDw diff --git a/fixture/12/6/8 b/fixture/12/6/8 deleted file mode 100644 index 077ed632b1736802f6f7c69c35cae1d06cd5a611..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RRBy7KgvRN9I3?bFsb_G?2azM zJF-5AmyJLAt(U%#sG2`@ui(EH_zgfR>1sW}ScyNe+h0Cm(o(;u{}jE9Y4pD!Ixs&u zYDPYfCq%yMus}clu^BxySsOoLT6RA-nhd{l-BrKYeB8e%0sB2ndFMVmY^lEj&$U04 zgi^o1p9a8NR=+>Ni~PTA)OJ5gn>at1oTEOJ)CIp&w*WuEG=@JEUVOix^LanZO>RHS z3YI>(xCy@^dT&3>AOb)43WC277QDWetx-Qdsp>s_0gFFT?5@5e`G3FsV+=r&C)dCF z>i@nWJC8p8Z+t(seI-Au>k~kA*YLg7dtbl*_h7#F)1$sr-iy6ggnPf{w@5#A-Mv29 K#-YD{(04zo7{=iM diff --git a/fixture/12/6/9 b/fixture/12/6/9 deleted file mode 100644 index 690a51b40d0cb940cf772b6b0fd8098b6d618223..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 416 zcmV;R0bl+C0Wt)T0RRAy0RRA?0RR9&y-&Y@W|zNY*|I$Z0pY)_=@vhauQk9Z!h^pW zp2=LOO`+F0rfs*^0>dzo*KXMt1Uq5CHuYwAU{A> zzuG^(;RHVyFPJ`5{q4Ta7q-1RP%yubPJ+E>T&}DQ8oB+UBo^c>uqJ0>nSHmaINx z&#J$rHW)vU=V(8;p|`!Av@k!bxgWr@!7RX{-j=`Joftmx9mGG#E|9;Pidw%!ceOn> z5&XWymkhs+%h$gBN~k}5-Ht!JLclx%#?wEqPXa&AvXei^OkBTVS0}&q6e_=Oo3Xu> KXLUW1`lUWj+{lao diff --git a/fixture/13/.zattrs b/fixture/13/.zattrs deleted file mode 100644 index 9e26dfeeb6..0000000000 --- a/fixture/13/.zattrs +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/fixture/13/0/.zarray b/fixture/13/0/.zarray deleted file mode 100644 index 4a07bdba84..0000000000 --- a/fixture/13/0/.zarray +++ /dev/null @@ -1,14 +0,0 @@ -{ - "chunks": [ - 100 - ], - "compressor": null, - "dtype": "qSrq~tSJP* zVFAEB+G!f$L-jwVTG(_}$p1g2BF^`(|Mb6L{Wf{l*6=@ej@u=lSJprKbWj{o0N}q= z-Qd9u1l_+re7t~s@#H@~+fPz%bfmx6?hL!|zy!egOGuN5l>k7AtdTe%x!XUs8B8@O zp{l>FKjgbo{pP{PVv$ zbcL!{VAj8pm%mSJBdR}3pA>*aO6EV^)TEcYoZ7z~MOM}svF$%xsZLKtPx8Ms=Zfhe z&9Oh$%XsKX{kXro`n{}}Kls0w&{-KYZK%JGs!BYvq~gCD<)=T=p7uY1AyJ5;LE69a zD$TE{bk;vU9s?9vvh%-a_&T=ZN6tShZF6KEklMe3d+EnP*XX}wdc?5Ls|G+^C(^9w zp4PuQEbn_HARIB630q{RozwIdK6_dZE zMVwU?!t=idVR8PoIrqOq>MZv6KexZ>M(nm!weLTaQJ2fQp#Z=VDMig#sL4NV>_C26 zj_g018a9-Ba`eAN!+KzJN#wuHX6$*?P~ty3_NNUwC+k0Bs=no7s`)=ToxI3R!{WaX z_@$vV8S1~_B34jlar!@=@iW+aDCoc8BplJMJK#SzP5$cycL6}Ag3gLC7U(|>lC`sp z5a~a9f1z>pa`Zp3@HZE?%iurKkvePNH|akT&?^1>?A5=~y}HdbkMchV%xsu@cG^F_ zELFxUruV~Z)w4lHF$gR$MGw%|XqgQX-@%``t!eem7CL4kKey9ES0T{ub4mZ=wXpS zX41bQCC!!TF6uu;ku7*^pu#__yIGadvIsyVE!OYrCfh$NlH!w&#QVQ>W*sBDKg+*b z?cq4)D8Rq5wv_taqT{~@Jz=Jwma;!-$B%EdmgK)i2m5L3Ow~Uv3yL88iT=M3nj9T@ zPVzrhh05F##w8mV-q62OSQmm-a_7G?tQE&Gl+!=83i8--bjQDE6~|-?8_+-I zCake>$MHYbUjJr)nCL%$_QX2imZCqkC4NJbXx%>n4wO1tt=yy-tCiTPAh z$Z~ji}62aTVEt8CiFiE`Q_eN zN2@<*A8)WwaO}TB$O9EegBJpyxmPag)Gz6ze~^K0GWUoa?`f-@0XaYP3IU;T|!_+_b-vo)$;z2I)VAjm{%z e!Sz20```_X^7g;#Oc5jDG4a2>GNo+dspLO^tgvtZ diff --git a/fixture/13/0/10 b/fixture/13/0/10 deleted file mode 100644 index 622b51d0ccef4477039494aab1bb68bb828d1b24..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<2MI2E*f!`r{b)YH8kNd3QiPinH+XwW~}?XB|8G1R}$6oFR)tn5EMCGo!v zx57Ul*dG?imF7Pu^}n(ksC~c1R;Y70LB~G~vg1|B%k4iZtv4Swpvb>uA5z%fGTuLd zb$>E$|q^!hvz@Z zdN5Fn`qaP1t1^kT8nVBqx`afx?AE_E&Z17I-nPGQGsQcx!PLJnp?(3qCE>rR2!`j3 zr{=%rcZ%2D62`xcOoJx0O7K4`xp<9J_2WN8j#&XzMESo=CmC=rciX?q>YxEdZREe= zuT7Fh|M8Q(sCP e@9V$b2e^CO=G?zF>^-UUL56fGfr6ShCXkA`HT<>kM9UPhRfZq`4!e`;xI zA@)B3vrd|7mg_%_0H_*SQuDv|D{}6(=;c4*c<*&2=m5YqdYvf~3Io8?n6roD;LpFH z&gnq0isL`m5fXIUNY+0l2fNK1e*Qnm;bdsq{JTFgsV;#RTf4s?P1$^_d-6Xon%6VR z@z=i+`;w4A;n+XcYCdoVyxc$jNoiBL81KI&9nFM6%-BEJLHlLzdI3OdK|Y$dFY-T( zHUYbJ+v>k2h|`~rwE{rvlHfys4(31Cve%gBCfh%)2R|2s;UDepg~kH)<-o$EiJDA9gdAH%<$`d7!oP58ej z->fS@ccMQ%*xuLI_~yTBiL0U?Pu{<=6Zrj8_P@V6ze(l|_wc_#tgHXW!}-608D?Ct zuhKtvaE@0qd7D2?zI&vkU5CG)&pa$MSl~a%{rW2J8s)zbS5l-tN7X+A5}2nHX7xWw z2#F;cP0+utlKwg1WBxz-jqRG=n5jQQRPxlYM(e+X2w;tzFzY|M*MLvN(7eAA1p6;P z*YH35uJ!#xblN}4szF;O`_#YluUx|uR`@@VTF0ZZ_20k3VG#{5QvtxU@cL@CE$lx} z7f>omW!67p{fHqQiu6B!UThewHtfF@Fy3COqR+nvc=M+03fI3FG423i;q5;U^S8mn z)8s$k(r+e|e#<`^!37wRNhAmndmKbhd!qjtJ>);e=k-wpk={RNf6&WkOXk1T&xu-${qsLIh1OTYimbo3v!5*B zvdF(7JoV9$LBYR^mn$i*;jq7RRuh&}A>Y3XAw62~jLSc`^Z9C3ySTsL$q?K?LdU-j zC3VC~S>HdNHoY(7X0t#3T+mK?yWziZ8da<8Zs@=0eAfipFu6Yk99nc}K;%Cx z0Jn@XP~g9^xlYXwy79l)o+}>WAm~5$`jT`gg1|q{|A%_X=mNmG;i8l$-0weLN+aH8 zbFx2C4dOZHrszN0V9V$g)Xcv*miQFVtoc8x$d?e^aM-^Ir`)|V76rh<4)3A3b?m<` zd`uLE5#T?!=-q(wT*N$@Et*asSaE-q<+|&x2 zJ(<7L)wcGRG~qwcK#c-4M8`jue>B6KCD6YHy{&M83h=*ie4J*A!u3Bwh9Rt@wEsU{ zQ5KI#==(p#Wjf*BUgkfdK!S!L&HBG);$l+b!Ti7Y+NCJ2jN!j0ded8F2&}&?mhGV* zh`7IU1aU0f%*a3MC`2`{_U1#(JRB-2Xq5&K=1$68%3(cUs1_tkge< zA8RHbIK)4OFA8H$2jRcXQ*HSp(C0r~Ix1VMpyof}VdH)*SLeUXBeX$w2f^6|e%;}hE%Psl$G@no``)3Co}bgXLt diff --git a/fixture/13/0/13 b/fixture/13/0/13 deleted file mode 100644 index bfbb8e37a8f671d3eb3234d0a9d792279c1e2a74..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<1ygY;V|5O%+9ViuqxeD1&Yd>n|9IsLz}>kZCcEa<;^Ws5PGr|iFCp403! zo7O+sX@OT7rq@49k6+h<0n|UATP;HZS@yq(7sU#;>G8jU>(^SyyW_u;6=+D!vX?*Y z==ADla@D_uR$tMA;nP1>XOo5vH}5};HroL=`tQG!zm8eD!tTGn{<++FQ^7x!$Tzh0 z|FplLx;9~=$>%@z=vp7N2!B5)Qv%8?+510vbEaSKXZgQN{^EBI9oawO z*U8rQ9Q;3J2*BoH$>=}Ar<9Qi{qeuRc;szjt@ytZv}+Q|kI_E_bDAnxM8iK~djFa7 zxAQ+pD7Ap%jNw1tY=#J)2j9Q3Zfwlfui?K}obR0l=JdY^5f`}vkH9}>4WzoBc=o?d z1Je_v9_>HxevSuR+TuU4reF81*5N-n#C|U{CiTBDx2`V$GQ+>FMp7SQ>e#=mzY^<& zQMx|}aZ=cm>EJ)KBi zSoCo@8uP#Hv)H>7L+8J_N;bX!cjLcsQblb=Wd^{gnMI-CW4AxrL-N>sEA79)buDk{ z@995l%YgvX9_~NAvD=~usm(tt`r+m#9M3-}rWf>l@WeltGh_+2Y39HD3wx@otmD7_ z5FQ2aEA>BBXuJ>^hvz>XMM~`sVah+^vjj7|Gvhx^+}#{kMz6n%(BSe_tj9kMpiERq e2ID{dNbrgvm;k`Fx>^et|Li{pZgWHGvg$t&sH+?R diff --git a/fixture/13/0/14 b/fixture/13/0/14 deleted file mode 100644 index 7acb129c0d609040a43d144f382b794e269d098d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<4hfl>Qq`{=(SJ3I>5=kq^>aHxn6y#c^V2os6OmexPtJ^Ha4a-=_?7dgaV z&eOlK{H$09)ssKZu}qG-rQ$yg|CouJ^pMv1Mojq&4tzLLE66(rC5ARKkz@I z@O)yjm*hWINN6{wmfOFO<0iYfPPD)6eogts5|zIkPQevEv-v+JozN6SKk~mo21&eP z3EaPI7wT%HQ2D>3;g;-l_JRoTA`M&SpbECxU)%v<1f!u!8;b`hdAoa8@`yQsWE zYXd;oJs5HCL-9Ws{n4_`2&g~Zjmx&E!}7nQ238GQ;PgL8sUH5k{_Q_C#ofl9dHBD$ zA`RGlf7HK@uyQIXO!L1s=)ww2;0C}Ol;yv2tMk9_9xAL5{<=TlNb-gGk>Wp9I0Wc0 zv(&#TW>)BG{PjQk>Sp|vcPB3H3i)kMg3ax$Zy9_E01@Wz|2%3~23S zQTIRHp;|2lBHKSyv~O28tG2)Px(;^DsP{j19}DAK4Cg-+z%McZ)$~6Xa|jP`0Q0|e z)?gD-hwHyi>BO%tDC)o8+Ev+fM8`jvde$O|F#SKg-t}4_Ir6_BzFP>u`0PJS=!=86 zJ@db7q!DHr)_>d*#1(M}FOLvfjUaS-2-F!s5Rv(-Yfu3(~&}svEzgdAmO-K$YY@N7cVXWc>*f!qq>dzDJ7l eIPkyRRYX9aJ?B57MfIH<$e_O@EtF43b?CoO7OeyT diff --git a/fixture/13/0/15 b/fixture/13/0/15 deleted file mode 100644 index de39765d265092f6b3dbb4b595b6d95b0238ce47..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<2SpO-9W{op@D%>LjRedxat-c8pA6X(BDuo)n3IP<^%y#`+2LHoaNx6P*A z7~wxnu(_mNrt&{~utqwd*4)2O$VoXCjp@IzR(a1T|NOtP1{dT7V*$XxK-+3yLFYg0 zd5koPmH)rSG^k09P4d64dBhyOBho)c9&#TJ_qIQWCX#{d$J9T0|8Gx`SA0KfJO{hu z*VeyH5I&=_H1fYATg@@&O60#_Osc8Ef6~9E`mYPuKK{SDBsx{9De1p`hIy53($~Ms zn9tDv3+BHk3CPK5?A55X5~M!<(*5~Q|UjMQgow$F8DvBC4)L%Ao#yL@tqHS z?b^Td3^-^ir1L*B@<_;t?)$%IHMYam(el4v^b<)NwBx_85)E+1S^z+^1fMluFYmwF zQ8rLiSMfhA2ISWOk(|H1d6+8TSMtBBlejYqW(Gi0>442S>c~I6-uU9O6WBjPYT%Nu z8sR^omxv_AKHWbBDo{LfHvvH9_DtJ3ll(uR4)5ru&+Wg9utq;wCICQBviLZCXXd|6 zzce?PpVU8>I0F!CIrqOF`(>N(mGD0oXz6>-tJA-v)|bAdmUHcf|4`-{IQ^ij@~zU99?DD@r67V*D0i=K|!HP^oisZ%52 z4f8+7zmRK_SoXilu=H+TbM-%ZmYH9eTEai5I8_hV0qH*+A4M`isp>!PLL`L%we3Fy zKnWmm@RmQw;uz7l%&I>S-_@aToa{fbdmXYQsqnvPuTGPI9P__1ug5oTz1_dWM~*rZ zaMC{pBn@NZyZ%2bz%K4Ek*GheTH*Ci002NS;9(@8e&9d1&r&sr1>L{mpt`4ITJk?K zzj+8D7SF$|Ltx9$6VN|iF)rZ7VfDY)z5t(mBly2^E5gl-x8Xkz?G3TtsPR8|vqe92 eA^$%#U%Lol=IOuL!>ILsYxh4ht^0WS2lYQc^`vhhRR zobf-Z^WzI%7y-aGpi;-wq|`s2tY&kb5&6Gqn`lM1V#7bJ=<(YA_qRVJipK7st>VAa zBvbwmaqz!Bgg2Xd_~5@Nqj=c~l-0ks#sO2b+rYoQQom}q{klIqCw1^&y5c_+cw%PT z0Mx&7vlOFndGtT6y4h#{lcT?5GUvo?mF+*8*gPDaeAB-VOaRLr^94X5IhX&Z=iNW~ zmc89BD*8W8wP@7EH0Hm7H*PZ=p4`713l8@MBhbI8)q^jz$OAy9GZLN&w4z{UgP0hvUEI1+|m+NZ&u5IFvD?0P#Ob zu~GH@H|amueT|f7t=_+u-L$GgoA1A(*&Bb96A-}NL!g&OLGHh-c`;=T(bvBy%dZ1? zIzEBin5AI@P-ft4q?3h5*3k zx@eMk(!#%K)3{1b>(D=g#GP|hWaz)q3)jUgAn?EYq*_<3p8da(K}{-kwc@|^3Ft~D ez4kvau(iUfwfDc$YLFYDKj}Z-o~mA(BkVt`8m>10 diff --git a/fixture/13/0/17 b/fixture/13/0/17 deleted file mode 100644 index b304d7c7e813f04e490367cd02f5c51f0f70a558..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<36T;u6=p94S$o{Uo(o$x;?`AB-|i|#);gIf<;h}S>o4Jt<07fe4eGR^5X zMe9G?5{xgd`tv_@R7|ka(#1cG5m7E{~Sk@G*a!VPjZNwz;+*;2;*Hpo8&H@q%|^v}P4Agt84 zOy55$`SwpQ8sNW;2kWv-p7X!Q;w??vlJ(f7Z=INZ@-FX=z>#v?@XCF#FZf}HnC>hV9FBG$*C=gB`? z5&VW#qtZXU#Pgo1xadE=47Da|1lT`>A2|Zae)~T?f~g4W_4q&DC8oR7dGo(>IyVpG z*~P!u%R$WK#OgmvzTU6RDek{D(Es4Z7Wuy~!+?#LQ|P})zcb}}d%eG6JC;)?LJ+`B zUhVXK((%6$lK*Albo{@*cr69RSn@wdgDkJC&h0<%`svI3)6qY%Adt{tnz=tXb-ck2 zZ{a`fpQ@u=^6$Ti9?ewM3-mv=D!x8G(6>K`Cle+4TW0ATk}7lYRZ;(_x!&|doHVhi=saS_935VGUvY)f!>l`kl?@Z z_z$vl>ES=$WX6#i`u4vkpGm)#3c){icnBSnsDPkLW+3 z+&K?JO3*)|ebrYBKJdRK+P$2`)5^cFd2%zV__IHeQMIbwlhQxSIeSqg4%9yc%b-a5 eV&^~6>xQwfclAI016BZbGw?rXzwdG8{%&jDenGwMHxx8`BuUeLc|ReY7`bDlpNTSg!1 z$*@1Q!#M3n1>CADMe{!%V59&p z-ZrmVAg6O|diK{*#8qGgxN>}XgmiNDbYb}HYQP;nGq`_4fz??r5 z;Db>B_WHkpF?*=a`rzv{K$=HWj<=y&Wnvh}~I!MJHIyUxFb9Ux7w%!WUX zJ~>$5@!&t#Z0TcEX6rv3%1kbLE8)NUneq-X#>qboZI^=zT;e~=_VBriM904gyXyZu z`P#o{zF%o`&E`L&C(gWNzsA31OUzJ1C-c9)v&%dC_w_$~luKN5Ve!8i!cDA{F}T0l zNjxvcm()LL)1Ag4BHcg!BiiNww#7fxxy(U%i3h-;5?THIdgi~u)i%p(_R>E~A;VFK z?YqAhu{8mbU*JF8U3wr3SnxkSmWp%8HsC)BZpz0PROi1HVFT)EJK?|Gi!nf98u~vY zY^rn&?Z&@IWp%8pY`DK*HuG=iP~blnc;V})%kw{!bG;HM*X+NGR#KK=z~8??F6IljMpBx<4_mCZkK4zdC+ zcICgtpp|oC>hr&a>U$}EFuT85mazZWJj_2wH8A(8m&L!aG8v!D49Y)>+UP&Y zv)XtLyXZf2nZ6R1Z|grD-ypuw74Sd&o&v@&(BD6fATQbq&elJcy}-I&7XLp2a24fg e6X`$vH0-FG3+umfeUCT_&;7r*Y|gnA-p;?(hpBo1 diff --git a/fixture/13/0/19 b/fixture/13/0/19 deleted file mode 100644 index 44945581894b72daa308105b907c4af9984e5b37..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<2Ln1%h~{o_BW(!V0OukJrkRP-$I0rbC3V@9oPSLZ*L6D^0VC-OfRsglga zdgDKacwQ?%o7z8vDz75&|G>Ww`CNajaNWP*kS+-_Bk4caj_Wwbs^UK-#V0w?@6^A_ z?Q_~YRs=w4l6U4JDEPm6E|nE}w#~mv!I3seujs$8c93sYme@ZI-}8R-F8{yO4Zr2U19MM1PKx5Zf+|$2OdcUFq?B+jGDg}^+3DrNu7Vc*2cJ4n% zqjF6VS--!T?-%lmI>*1oohp}UKJh<;VMUYmM&v(aQM1nvM#jGkjkh$Ziq}7YwB4Hh zr^i3L08vq@=G{M1&d-mlPxL?S4+qsoWY)hgXkd6Ppya=~)drP4n}o*V%!@B~0O&1p#D-1om};ru@P48K1u_kL2Cg519b$9pTD z2+luzZ^O#l2K+yefypuB3H?9F)EvS(;;TO#gV&OUk?TLLPquX=m&-rfMn++0H1WUB zUKQB`REHt8a z(h{LO;O{>tEn;R+^yWXChcRqZCjGy@NV^Wg+u^^e|LZ>KU)sMmG^gY4G4?+XAIw50 zVduYmp<{9#+qJ)(Pg7Zm=$;`rN$ ehv+}qqm>Lo5!=7;^VEy5W~Dy}(flOI$Fe^j)1&18 diff --git a/fixture/13/0/2 b/fixture/13/0/2 deleted file mode 100644 index 19173a2e557da12facf1b479f95657c4eb503709..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmV+*1K<1$(uT>#LEyinAk^Y=6Y4*>pyf<7;Mc!vEo~e7`t!fX^+={mCj7r3iRr8J z``$kaA2fm8M&Li%&|~5O4DUb0%oLR2`_w7jg;bvGKpxvZLAVWbZ!}L+UyiH1|Jfv>MxE+RMM;!TQ}$3HCqU=o)ov0`@;F zMm%KNsI@KKbBdh)+zvVt5bpyj{4i1rTK{qw(}w>q?vV!A&Nsq{g_C(*wrPM^)E)$hN92vB~Ww?UgZs zF6h7Nu?^Yk9Pz(Q!L0+w1I@pojY1*WDCIwuCM(li_3Xd?a7Q^0rpmv>s9u~2uJu25 zM>eZ*p5VU)3@-D-)O|ner?yV754*pjRI*x*3CO>bdEO`%vid&GjdhRz~(>j zLHnqPB=Ntxy$8+Yto}a>^!=$viP1lj69?ePTIWCVt;kKYVCcWyqzVt1%g4W)w5C3Q zAlN^JWz3=4-u%C~;~=Rc*6hCrkaOgTHRr#lr`W%;%XcW%$s^LHD>3Kn35cR*?WUom3Wwk#F35>au9pgVj(vr6rc>8d<@CRX1z%cP#gjjjwSErij&s3iy7j;2WKq3HGvU8( znjQ4Nw&1@b4Spdz-|W9=Rb#SVKlMKd1uQRBtMb2wPAu@CD#E|nnGe2X5bVE-FpfZN z;L5-F^5%91jqSh6^|7JUZL>e8nYHQJlGH!tRj@)+$LPi2F^b^c*sHHFW|o~NX@7`{qDcyPY*$P?bE*+P;?ArF4RAz3|3XD z1@J$(puvV*K+wNV->jd-AGE*Wq=V(jx#>SR<>jrYB(gvBp9@V_YuG