Skip to content

Commit a9a2d84

Browse files
committed
Fix B905 zip() without an explicit strict= parameter
1 parent 4dee569 commit a9a2d84

File tree

18 files changed

+88
-59
lines changed

18 files changed

+88
-59
lines changed

fsspec/archive.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,9 @@ def ls(self, path, detail=True, **kwargs):
6161
paths[p] = f
6262
elif all(
6363
(a == b)
64-
for a, b in zip(path.split("/"), [""] + p.strip("/").split("/"))
64+
for a, b in zip(
65+
path.split("/"), [""] + p.strip("/").split("/"), strict=False
66+
)
6567
):
6668
# root directory entry
6769
ppath = p.rstrip("/").split("/", 1)[0]

fsspec/asyn.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -397,7 +397,10 @@ async def _copy(
397397
)
398398

399399
batch_size = batch_size or self.batch_size
400-
coros = [self._cp_file(p1, p2, **kwargs) for p1, p2 in zip(paths1, paths2)]
400+
coros = [
401+
self._cp_file(p1, p2, **kwargs)
402+
for p1, p2 in zip(paths1, paths2, strict=False)
403+
]
401404
result = await _run_coros_in_chunks(
402405
coros, batch_size=batch_size, return_exceptions=True, nofiles=True
403406
)
@@ -469,7 +472,7 @@ async def _cat(
469472
):
470473
return {
471474
k: v
472-
for k, v in zip(paths, out)
475+
for k, v in zip(paths, out, strict=False)
473476
if on_error != "omit" or not is_exception(v)
474477
}
475478
else:
@@ -509,7 +512,7 @@ async def _cat_ranges(
509512
raise ValueError
510513
coros = [
511514
self._cat_file(p, start=s, end=e, **kwargs)
512-
for p, s, e in zip(paths, starts, ends)
515+
for p, s, e in zip(paths, starts, ends, strict=False)
513516
]
514517
batch_size = batch_size or self.batch_size
515518
return await _run_coros_in_chunks(
@@ -577,8 +580,10 @@ async def _put(
577580
)
578581

579582
is_dir = {l: os.path.isdir(l) for l in lpaths}
580-
rdirs = [r for l, r in zip(lpaths, rpaths) if is_dir[l]]
581-
file_pairs = [(l, r) for l, r in zip(lpaths, rpaths) if not is_dir[l]]
583+
rdirs = [r for l, r in zip(lpaths, rpaths, strict=False) if is_dir[l]]
584+
file_pairs = [
585+
(l, r) for l, r in zip(lpaths, rpaths, strict=False) if not is_dir[l]
586+
]
582587

583588
await asyncio.gather(*[self._makedirs(d, exist_ok=True) for d in rdirs])
584589
batch_size = batch_size or self.batch_size
@@ -662,7 +667,7 @@ async def _get(
662667

663668
coros = []
664669
callback.set_size(len(lpaths))
665-
for lpath, rpath in zip(lpaths, rpaths):
670+
for lpath, rpath in zip(lpaths, rpaths, strict=False):
666671
get_file = callback.branch_coro(self._get_file)
667672
coros.append(get_file(rpath, lpath, **kwargs))
668673
return await _run_coros_in_chunks(

fsspec/caching.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -642,7 +642,7 @@ def __init__(
642642
offsets.append((start, stop))
643643
blocks.append(data.pop((start, stop)))
644644

645-
self.data = dict(zip(offsets, blocks))
645+
self.data = dict(zip(offsets, blocks, strict=False))
646646
else:
647647
self.data = {}
648648

fsspec/generic.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ def rsync(
135135
allfiles[k] = otherfile
136136
logger.debug(f"{len(allfiles)} files to copy")
137137
if allfiles:
138-
source_files, target_files = zip(*allfiles.items())
138+
source_files, target_files = zip(*allfiles.items(), strict=False)
139139
fs.cp(source_files, target_files, **kwargs)
140140
logger.debug(f"{len(to_delete)} files to delete")
141141
if delete_missing and to_delete:
@@ -361,7 +361,7 @@ async def copy_file_op(
361361
u2,
362362
os.path.join(tempdir, uuid.uuid4().hex),
363363
)
364-
for u1, u2 in zip(url1, url2)
364+
for u1, u2 in zip(url1, url2, strict=False)
365365
]
366366
out = await _run_coros_in_chunks(
367367
coros, batch_size=batch_size, return_exceptions=True

fsspec/implementations/cache_metadata.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,9 @@ def check_file(
105105
perform extra checks to reject possible matches, such as if they are
106106
too old.
107107
"""
108-
for (fn, base, _), cache in zip(self._scan_locations(), self.cached_files):
108+
for (fn, base, _), cache in zip(
109+
self._scan_locations(), self.cached_files, strict=False
110+
):
109111
if path not in cache:
110112
continue
111113
detail = cache[path].copy()
@@ -192,7 +194,9 @@ def pop_file(self, path: str) -> str | None:
192194

193195
def save(self) -> None:
194196
"""Save metadata to disk"""
195-
for (fn, _, writable), cache in zip(self._scan_locations(), self.cached_files):
197+
for (fn, _, writable), cache in zip(
198+
self._scan_locations(), self.cached_files, strict=False
199+
):
196200
if not writable:
197201
continue
198202

fsspec/implementations/cached.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -574,12 +574,12 @@ def open_many(self, open_files, **kwargs):
574574
if self.compression:
575575
raise NotImplementedError
576576
details = [self._check_file(sp) for sp in paths]
577-
downpath = [p for p, d in zip(paths, details) if not d]
577+
downpath = [p for p, d in zip(paths, details, strict=False) if not d]
578578
downfn0 = [
579579
os.path.join(self.storage[-1], self._mapper(p))
580-
for p, d in zip(paths, details)
580+
for p, d in zip(paths, details, strict=False)
581581
] # keep these path names for opening later
582-
downfn = [fn for fn, d in zip(downfn0, details) if not d]
582+
downfn = [fn for fn, d in zip(downfn0, details, strict=False) if not d]
583583
if downpath:
584584
# skip if all files are already cached and up to date
585585
self.fs.get(downpath, downfn)
@@ -595,7 +595,7 @@ def open_many(self, open_files, **kwargs):
595595
}
596596
for path in downpath
597597
]
598-
for path, detail in zip(downpath, newdetail):
598+
for path, detail in zip(downpath, newdetail, strict=False):
599599
self._metadata.update_file(path, detail)
600600
self.save_cache()
601601

@@ -605,7 +605,7 @@ def firstpart(fn):
605605

606606
return [
607607
open(firstpart(fn0) if fn0 else fn1, mode=open_files.mode)
608-
for fn0, fn1 in zip(details, downfn0)
608+
for fn0, fn1 in zip(details, downfn0, strict=False)
609609
]
610610

611611
def commit_many(self, open_files):
@@ -670,7 +670,7 @@ def cat(
670670
self.save_cache()
671671

672672
callback.set_size(len(paths))
673-
for p, fn in zip(paths, fns):
673+
for p, fn in zip(paths, fns, strict=False):
674674
with open(fn, "rb") as f:
675675
out[p] = f.read()
676676
callback.relative_update(1)
@@ -886,8 +886,8 @@ def cat_ranges(
886886
):
887887
logger.debug("cat ranges %s", paths)
888888
lpaths = [self._check_file(p) for p in paths]
889-
rpaths = [p for l, p in zip(lpaths, paths) if l is False]
890-
lpaths = [l for l, p in zip(lpaths, paths) if l is False]
889+
rpaths = [p for l, p in zip(lpaths, paths, strict=False) if l is False]
890+
lpaths = [l for l, p in zip(lpaths, paths, strict=False) if l is False]
891891
self.fs.get(rpaths, lpaths)
892892
paths = [self._check_file(p) for p in paths]
893893
return LocalFileSystem().cat_ranges(

fsspec/implementations/reference.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -76,13 +76,13 @@ def __iter__(self):
7676

7777
class RefsItemsView(collections.abc.ItemsView):
7878
def __iter__(self):
79-
return zip(self._mapping.keys(), self._mapping.values())
79+
return zip(self._mapping.keys(), self._mapping.values(), strict=False)
8080

8181

8282
def ravel_multi_index(idx, sizes):
8383
val = 0
8484
mult = 1
85-
for i, s in zip(idx[::-1], sizes[::-1]):
85+
for i, s in zip(idx[::-1], sizes[::-1], strict=False):
8686
val += i * mult
8787
mult *= s
8888
return val
@@ -286,7 +286,7 @@ def ls(self, path="", detail=True):
286286
recs = self._generate_all_records(field)
287287
recinfo = [
288288
{"name": name, "type": "file", "size": rec[-1]}
289-
for name, rec in zip(keys, recs)
289+
for name, rec in zip(keys, recs, strict=False)
290290
if rec[0] # filters out path==None, deleted/missing
291291
]
292292
return fileinfo + recinfo
@@ -349,15 +349,16 @@ def _get_chunk_sizes(self, field):
349349
if field not in self.chunk_sizes:
350350
zarray = self.zmetadata[f"{field}/.zarray"]
351351
size_ratio = [
352-
math.ceil(s / c) for s, c in zip(zarray["shape"], zarray["chunks"])
352+
math.ceil(s / c)
353+
for s, c in zip(zarray["shape"], zarray["chunks"], strict=False)
353354
]
354355
self.chunk_sizes[field] = size_ratio or [1]
355356
return self.chunk_sizes[field]
356357

357358
def _generate_record(self, field, record):
358359
"""The references for a given parquet file of a given field"""
359360
refs = self.open_refs(field, record)
360-
it = iter(zip(*refs.values()))
361+
it = iter(zip(*refs.values(), strict=False))
361362
if len(refs) == 3:
362363
# All urls
363364
return (list(t) for t in it)
@@ -878,7 +879,7 @@ def get(self, rpath, lpath, recursive=False, **kwargs):
878879
data = self.cat([r for r in rpath if not self.isdir(r)])
879880
else:
880881
data = self.cat(rpath)
881-
for remote, local in zip(rpath, targets):
882+
for remote, local in zip(rpath, targets, strict=False):
882883
if remote in data:
883884
fs.pipe_file(local, data[remote])
884885

@@ -918,7 +919,7 @@ def cat(self, path, recursive=False, on_error="raise", **kwargs):
918919
ends2 = []
919920
paths2 = []
920921
whole_files = set()
921-
for u, s, e, p in zip(urls, starts, ends, valid_paths):
922+
for u, s, e, p in zip(urls, starts, ends, valid_paths, strict=False):
922923
if isinstance(u, bytes):
923924
# data
924925
out[p] = u
@@ -930,7 +931,7 @@ def cat(self, path, recursive=False, on_error="raise", **kwargs):
930931
starts2.append(s)
931932
ends2.append(e)
932933
paths2.append(p)
933-
for u, s, e, p in zip(urls, starts, ends, valid_paths):
934+
for u, s, e, p in zip(urls, starts, ends, valid_paths, strict=False):
934935
# second run to account for files that are to be loaded whole
935936
if s is not None and u not in whole_files:
936937
urls2.append(u)
@@ -950,10 +951,12 @@ def cat(self, path, recursive=False, on_error="raise", **kwargs):
950951
bytes_out = fs.cat_ranges(new_paths, new_starts, new_ends)
951952

952953
# unbundle from merged bytes - simple approach
953-
for u, s, e, p in zip(urls, starts, ends, valid_paths):
954+
for u, s, e, p in zip(urls, starts, ends, valid_paths, strict=False):
954955
if p in out:
955956
continue # was bytes, already handled
956-
for np, ns, ne, b in zip(new_paths, new_starts, new_ends, bytes_out):
957+
for np, ns, ne, b in zip(
958+
new_paths, new_starts, new_ends, bytes_out, strict=False
959+
):
957960
if np == u and (ns is None or ne is None):
958961
if isinstance(b, Exception):
959962
out[p] = b
@@ -1061,7 +1064,7 @@ def _process_gen(self, gens):
10611064
for k, v in gen["dimensions"].items()
10621065
}
10631066
products = (
1064-
dict(zip(dimension.keys(), values))
1067+
dict(zip(dimension.keys(), values, strict=False))
10651068
for values in itertools.product(*dimension.values())
10661069
)
10671070
for pr in products:
@@ -1105,7 +1108,7 @@ def _dircache_from_items(self):
11051108
subdirs.append(par0)
11061109

11071110
subdirs.reverse()
1108-
for parent, child in zip(subdirs, subdirs[1:]):
1111+
for parent, child in zip(subdirs, subdirs[1:], strict=False):
11091112
# register newly discovered directories
11101113
assert child not in self.dircache
11111114
assert parent in self.dircache

fsspec/implementations/tests/test_archive.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -322,7 +322,7 @@ def test_walk(self, scenario: ArchiveTestScenario, topdown, prune_nested):
322322
# prior py3.10 zip() does not support strict=True, we need
323323
# a manual len check here
324324
assert len(result) == len(expected)
325-
for lhs, rhs in zip(result, expected):
325+
for lhs, rhs in zip(result, expected, strict=False):
326326
assert lhs[0] == rhs[0]
327327
assert sorted(lhs[1]) == sorted(rhs[1])
328328
assert sorted(lhs[2]) == sorted(rhs[2])

fsspec/implementations/tests/test_local.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -172,11 +172,13 @@ def test_urlpath_expand_write():
172172
"""Make sure * is expanded in file paths when writing."""
173173
_, _, paths = get_fs_token_paths("prefix-*.csv", mode="wb", num=2)
174174
assert all(
175-
p.endswith(pa) for p, pa in zip(paths, ["/prefix-0.csv", "/prefix-1.csv"])
175+
p.endswith(pa)
176+
for p, pa in zip(paths, ["/prefix-0.csv", "/prefix-1.csv"], strict=False)
176177
)
177178
_, _, paths = get_fs_token_paths(["prefix-*.csv"], mode="wb", num=2)
178179
assert all(
179-
p.endswith(pa) for p, pa in zip(paths, ["/prefix-0.csv", "/prefix-1.csv"])
180+
p.endswith(pa)
181+
for p, pa in zip(paths, ["/prefix-0.csv", "/prefix-1.csv"], strict=False)
180182
)
181183
# we can read with multiple masks, but not write
182184
with pytest.raises(ValueError):
@@ -189,7 +191,7 @@ def test_open_files():
189191
with filetexts(files, mode="b"):
190192
myfiles = open_files("./.test.accounts.*")
191193
assert len(myfiles) == len(files)
192-
for lazy_file, data_file in zip(myfiles, sorted(files)):
194+
for lazy_file, data_file in zip(myfiles, sorted(files), strict=False):
193195
with lazy_file as f:
194196
x = f.read()
195197
assert x == files[data_file]
@@ -291,7 +293,7 @@ def test_pickability_of_lazy_files(tmpdir):
291293
myfiles = open_files("./.test.accounts.*")
292294
myfiles2 = cloudpickle.loads(cloudpickle.dumps(myfiles))
293295

294-
for f, f2 in zip(myfiles, myfiles2):
296+
for f, f2 in zip(myfiles, myfiles2, strict=False):
295297
assert f.path == f2.path
296298
assert isinstance(f.fs, type(f2.fs))
297299
with f as f_open, f2 as f2_open:

fsspec/implementations/zip.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ def find(self, path, maxdepth=None, withdirs=False, detail=False, **kwargs):
145145

146146
def _matching_starts(file_path):
147147
file_parts = filter(lambda s: bool(s), file_path.split("/"))
148-
return all(a == b for a, b in zip(path_parts, file_parts))
148+
return all(a == b for a, b in zip(path_parts, file_parts, strict=False))
149149

150150
self._get_dirs()
151151

0 commit comments

Comments
 (0)