@@ -463,70 +463,66 @@ def maybe_decode_store(store, chunks, lock=False):
463
463
464
464
_protect_dataset_variables_inplace (ds , cache )
465
465
466
- if chunks is not None :
467
- if engine != "zarr" :
468
- from dask .base import tokenize
469
-
470
- # if passed an actual file path, augment the token with
471
- # the file modification time
472
- if isinstance (filename_or_obj , str ) and not is_remote_uri (
473
- filename_or_obj
474
- ):
475
- mtime = os .path .getmtime (filename_or_obj )
476
- else :
477
- mtime = None
478
- token = tokenize (
479
- filename_or_obj ,
480
- mtime ,
481
- group ,
482
- decode_cf ,
483
- mask_and_scale ,
484
- decode_times ,
485
- concat_characters ,
486
- decode_coords ,
487
- engine ,
488
- chunks ,
489
- drop_variables ,
490
- use_cftime ,
491
- decode_timedelta ,
492
- )
493
- name_prefix = "open_dataset-%s" % token
494
- ds2 = ds .chunk (chunks , name_prefix = name_prefix , token = token )
495
-
496
- else : # if engine=="zarr":
497
- # adapted from Dataset.Chunk() and taken from open_zarr
498
- if not isinstance (chunks , (int , dict )):
499
- if chunks != "auto" :
500
- raise ValueError (
501
- "chunks must be an int, dict, 'auto', or None. "
502
- "Instead found %s. " % chunks
503
8000
code>
- )
504
-
505
- if chunks == "auto" :
506
- try :
507
- import dask .array # noqa
508
- except ImportError :
509
- chunks = None
510
-
511
- # auto chunking needs to be here and not in ZarrStore because
512
- # the variable chunks does not survive decode_cf
513
- # return trivial case
514
- if not chunks : # e.g. chunks is 0, None or {}
515
- return ds
516
-
517
- if isinstance (chunks , int ):
518
- chunks = dict .fromkeys (ds .dims , chunks )
519
-
520
- variables = {
521
- k : store .maybe_chunk (k , v , chunks , overwrite_encoded_chunks )
522
- for k , v in ds .variables .items ()
523
- }
524
- ds2 = ds ._replace (variables )
525
-
526
- ds2 ._file_obj = ds ._file_obj
466
+ if chunks is not None and engine != "zarr" :
467
+ from dask .base import tokenize
468
+
469
+ # if passed an actual file path, augment the token with
470
+ # the file modification time
471
+ if isinstance (filename_or_obj , str ) and not is_remote_uri (filename_or_obj ):
472
+ mtime = os .path .getmtime (filename_or_obj )
473
+ else :
474
+ mtime = None
475
+ token = tokenize (
476
+ filename_or_obj ,
477
+ mtime ,
478
+ group ,
479
+ decode_cf ,
480
+ mask_and_scale ,
481
+ decode_times ,
482
+ concat_characters ,
483
+ decode_coords ,
484
+ engine ,
485
+ chunks ,
486
+ drop_variables ,
487
+ use_cftime ,
488
+ decode_timedelta ,
489
+ )
490
+ name_prefix = "open_dataset-%s" % token
491
+ ds2 = ds .chunk (chunks , name_prefix = name_prefix , token = token )
492
+
493
+ elif engine == "zarr" :
494
+ # adapted from Dataset.Chunk() and taken from open_zarr
495
+ if not (isinstance (chunks , (int , dict )) or chunks is None ):
496
+ if chunks != "auto" :
497
+ raise ValueError (
498
+ "chunks must be an int, dict, 'auto', or None. "
499
+ "Instead found %s. " % chunks
500
+ )
501
+
502
+ if chunks == "auto" :
503
+ try :
504
+ import dask .array # noqa
505
+ except ImportError :
506
+ chunks = None
507
+
508
+ # auto chunking needs to be here and not in ZarrStore because
509
+ # the variable chunks does not survive decode_cf
510
+ # return trivial case
511
+ if not chunks : # e.g. chunks is 0, None or {}
512
+ return ds
513
+
514
+ if isinstance (chunks , int ):
515
+ chunks = dict .fromkeys (ds .dims , chunks )
516
+
517
+ variables = {
518
+ k : store .maybe_chunk (k , v , chunks , overwrite_encoded_chunks )
519
+ for k , v in ds .variables .items ()
520
+ }
521
+ ds2 = ds ._replace (variables )
522
+
527
523
else :
528
524
ds2 = ds
529
-
525
+ ds2 . _file_obj = ds . _file_obj
530
526
return ds2
531
527
532
528
if isinstance (filename_or_obj , Path ):
0 commit comments