@@ -76,13 +76,13 @@ def __iter__(self):
7676
7777class RefsItemsView (collections .abc .ItemsView ):
7878 def __iter__ (self ):
79- return zip (self ._mapping .keys (), self ._mapping .values ())
79+ return zip (self ._mapping .keys (), self ._mapping .values (), strict = False )
8080
8181
8282def ravel_multi_index (idx , sizes ):
8383 val = 0
8484 mult = 1
85- for i , s in zip (idx [::- 1 ], sizes [::- 1 ]):
85+ for i , s in zip (idx [::- 1 ], sizes [::- 1 ], strict = False ):
8686 val += i * mult
8787 mult *= s
8888 return val
@@ -286,7 +286,7 @@ def ls(self, path="", detail=True):
286286 recs = self ._generate_all_records (field )
287287 recinfo = [
288288 {"name" : name , "type" : "file" , "size" : rec [- 1 ]}
289- for name , rec in zip (keys , recs )
289+ for name , rec in zip (keys , recs , strict = False )
290290 if rec [0 ] # filters out path==None, deleted/missing
291291 ]
292292 return fileinfo + recinfo
@@ -349,15 +349,16 @@ def _get_chunk_sizes(self, field):
349349 if field not in self .chunk_sizes :
350350 zarray = self .zmetadata [f"{ field } /.zarray" ]
351351 size_ratio = [
352- math .ceil (s / c ) for s , c in zip (zarray ["shape" ], zarray ["chunks" ])
352+ math .ceil (s / c )
353+ for s , c in zip (zarray ["shape" ], zarray ["chunks" ], strict = False )
353354 ]
354355 self .chunk_sizes [field ] = size_ratio or [1 ]
355356 return self .chunk_sizes [field ]
356357
357358 def _generate_record (self , field , record ):
358359 """The references for a given parquet file of a given field"""
359360 refs = self .open_refs (field , record )
360- it = iter (zip (* refs .values ()))
361+ it = iter (zip (* refs .values (), strict = False ))
361362 if len (refs ) == 3 :
362363 # All urls
363364 return (list (t ) for t in it )
@@ -878,7 +879,7 @@ def get(self, rpath, lpath, recursive=False, **kwargs):
878879 data = self .cat ([r for r in rpath if not self .isdir (r )])
879880 else :
880881 data = self .cat (rpath )
881- for remote , local in zip (rpath , targets ):
882+ for remote , local in zip (rpath , targets , strict = False ):
882883 if remote in data :
883884 fs .pipe_file (local , data [remote ])
884885
@@ -918,7 +919,7 @@ def cat(self, path, recursive=False, on_error="raise", **kwargs):
918919 ends2 = []
919920 paths2 = []
920921 whole_files = set ()
921- for u , s , e , p in zip (urls , starts , ends , valid_paths ):
922+ for u , s , e , p in zip (urls , starts , ends , valid_paths , strict = False ):
922923 if isinstance (u , bytes ):
923924 # data
924925 out [p ] = u
@@ -930,7 +931,7 @@ def cat(self, path, recursive=False, on_error="raise", **kwargs):
930931 starts2 .append (s )
931932 ends2 .append (e )
932933 paths2 .append (p )
933- for u , s , e , p in zip (urls , starts , ends , valid_paths ):
934+ for u , s , e , p in zip (urls , starts , ends , valid_paths , strict = False ):
934935 # second run to account for files that are to be loaded whole
935936 if s is not None and u not in whole_files :
936937 urls2 .append (u )
@@ -950,10 +951,12 @@ def cat(self, path, recursive=False, on_error="raise", **kwargs):
950951 bytes_out = fs .cat_ranges (new_paths , new_starts , new_ends )
951952
952953 # unbundle from merged bytes - simple approach
953- for u , s , e , p in zip (urls , starts , ends , valid_paths ):
954+ for u , s , e , p in zip (urls , starts , ends , valid_paths , strict = False ):
954955 if p in out :
955956 continue # was bytes, already handled
956- for np , ns , ne , b in zip (new_paths , new_starts , new_ends , bytes_out ):
957+ for np , ns , ne , b in zip (
958+ new_paths , new_starts , new_ends , bytes_out , strict = False
959+ ):
957960 if np == u and (ns is None or ne is None ):
958961 if isinstance (b , Exception ):
959962 out [p ] = b
@@ -1061,7 +1064,7 @@ def _process_gen(self, gens):
10611064 for k , v in gen ["dimensions" ].items ()
10621065 }
10631066 products = (
1064- dict (zip (dimension .keys (), values ))
1067+ dict (zip (dimension .keys (), values , strict = False ))
10651068 for values in itertools .product (* dimension .values ())
10661069 )
10671070 for pr in products :
@@ -1105,7 +1108,7 @@ def _dircache_from_items(self):
11051108 subdirs .append (par0 )
11061109
11071110 subdirs .reverse ()
1108- for parent , child in zip (subdirs , subdirs [1 :]):
1111+ for parent , child in zip (subdirs , subdirs [1 :], strict = False ):
11091112 # register newly discovered directories
11101113 assert child not in self .dircache
11111114 assert parent in self .dircache
0 commit comments