Skip to content

Commit acdff6c

Browse files
Change legacy string formatting to f-strings
1 parent b8aeb13 commit acdff6c

27 files changed

+90
-102
lines changed

fsspec/archive.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ class AbstractArchiveFileSystem(AbstractFileSystem):
1313
"""
1414

1515
def __str__(self):
16-
return "<Archive-like object %s at %s>" % (type(self).__name__, id(self))
16+
return f"<Archive-like object {type(self).__name__} at {id(self)}>"
1717

1818
__repr__ = __str__
1919

fsspec/asyn.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -426,7 +426,7 @@ async def _process_limits(self, url, start, end):
426426
end = ""
427427
if isinstance(end, numbers.Integral):
428428
end -= 1 # bytes range is inclusive
429-
return "bytes=%s-%s" % (start, end)
429+
return f"bytes={start}-{end}"
430430

431431
async def _cat_file(self, path, start=None, end=None, **kwargs):
432432
raise NotImplementedError

fsspec/caching.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -222,8 +222,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32):
222222
self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block)
223223

224224
def __repr__(self):
225-
return "<BlockCache blocksize={}, size={}, nblocks={}>".format(
226-
self.blocksize, self.size, self.nblocks
225+
return (
226+
f"<BlockCache blocksize={self.blocksize}, "
227+
f"size={self.size}, nblocks={self.nblocks}>"
227228
)
228229

229230
def cache_info(self):
@@ -277,14 +278,13 @@ def _fetch_block(self, block_number):
277278
"""
278279
if block_number > self.nblocks:
279280
raise ValueError(
280-
"'block_number={}' is greater than the number of blocks ({})".format(
281-
block_number, self.nblocks
282-
)
281+
f"'block_number={block_number}' is greater than "
282+
f"the number of blocks ({self.nblocks})"
283283
)
284284

285285
start = block_number * self.blocksize
286286
end = start + self.blocksize
287-
logger.info("BlockCache fetching block %d", block_number)
287+
logger.info(f"BlockCache fetching block {block_number}")
288288
block_contents = super()._fetch(start, end)
289289
return block_contents
290290

@@ -606,8 +606,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32):
606606
self._fetch_future_lock = threading.Lock()
607607

608608
def __repr__(self):
609-
return "<BackgroundBlockCache blocksize={}, size={}, nblocks={}>".format(
610-
self.blocksize, self.size, self.nblocks
609+
return (
610+
f"<BackgroundBlockCache blocksize={self.blocksize}, "
611+
f"size={self.size}, nblocks={self.nblocks}>"
611612
)
612613

613614
def cache_info(self):
@@ -719,14 +720,13 @@ def _fetch_block(self, block_number, log_info="sync"):
719720
"""
720721
if block_number > self.nblocks:
721722
raise ValueError(
722-
"'block_number={}' is greater than the number of blocks ({})".format(
723-
block_number, self.nblocks
724-
)
723+
f"'block_number={block_number}' is greater than "
724+
f"the number of blocks ({self.nblocks})"
725725
)
726726

727727
start = block_number * self.blocksize
728728
end = start + self.blocksize
729-
logger.info("BlockCache fetching block (%s) %d", log_info, block_number)
729+
logger.info(f"BlockCache fetching block ({log_info}) {block_number}")
730730
block_contents = super()._fetch(start, end)
731731
return block_contents
732732

fsspec/compression.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,13 +39,11 @@ def register_compression(name, callback, extensions, force=False):
3939

4040
# Validate registration
4141
if name in compr and not force:
42-
raise ValueError("Duplicate compression registration: %s" % name)
42+
raise ValueError(f"Duplicate compression registration: {name}")
4343

4444
for ext in extensions:
4545
if ext in fsspec.utils.compressions and not force:
46-
raise ValueError(
47-
"Duplicate compression file extension: %s (%s)" % (ext, name)
48-
)
46+
raise ValueError(f"Duplicate compression file extension: {ext} ({name})")
4947

5048
compr[name] = callback
5149

fsspec/core.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ def __reduce__(self):
9292
)
9393

9494
def __repr__(self):
95-
return "<OpenFile '{}'>".format(self.path)
95+
return f"<OpenFile '{self.path}'>"
9696

9797
def __enter__(self):
9898
mode = self.mode.replace("t", "").replace("b", "") + "b"
@@ -195,7 +195,7 @@ def __getitem__(self, item):
195195
return out
196196

197197
def __repr__(self):
198-
return "<List of %s OpenFile instances>" % len(self)
198+
return f"<List of {len(self)} OpenFile instances>"
199199

200200

201201
def open_files(
@@ -498,7 +498,7 @@ def get_compression(urlpath, compression):
498498
if compression == "infer":
499499
compression = infer_compression(urlpath)
500500
if compression is not None and compression not in compr:
501-
raise ValueError("Compression type %s not supported" % compression)
501+
raise ValueError(f"Compression type {compression} not supported")
502502
return compression
503503

504504

fsspec/fuse.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@ def format_help(self):
275275
for item in args.option or []:
276276
key, sep, value = item.partition("=")
277277
if not sep:
278-
parser.error(message="Wrong option: {!r}".format(item))
278+
parser.error(message=f"Wrong option: {item!r}")
279279
val = value.lower()
280280
if val.endswith("[int]"):
281281
value = int(value[: -len("[int]")])

fsspec/gui.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ class which owns it.
7070
same name.
7171
"""
7272
if name not in self.signals:
73-
raise ValueError("Attempt to assign an undeclared signal: %s" % name)
73+
raise ValueError(f"Attempt to assign an undeclared signal: {name}")
7474
self._sigs[name] = {
7575
"widget": widget,
7676
"callbacks": [],
@@ -141,7 +141,7 @@ def _emit(self, sig, value=None):
141141
142142
Calling of callbacks will halt whenever one returns False.
143143
"""
144-
logger.log(self._sigs[sig]["log"], "{}: {}".format(sig, value))
144+
logger.log(self._sigs[sig]["log"], f"{sig}: {value}")
145145
for callback in self._sigs[sig]["callbacks"]:
146146
if isinstance(callback, str):
147147
self._emit(callback)
@@ -153,8 +153,7 @@ def _emit(self, sig, value=None):
153153
break
154154
except Exception as e:
155155
logger.exception(
156-
"Exception (%s) while executing callback for signal: %s"
157-
"" % (e, sig)
156+
f"Exception ({e}) while executing callback for signal: {sig}"
158157
)
159158

160159
def show(self, threads=False):

fsspec/implementations/cached.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -283,10 +283,10 @@ def _open(
283283
hash, blocks = detail["fn"], detail["blocks"]
284284
if blocks is True:
285285
# stored file is complete
286-
logger.debug("Opening local copy of %s" % path)
286+
logger.debug(f"Opening local copy of {path}")
287287
return open(fn, mode)
288288
# TODO: action where partial file exists in read-only cache
289-
logger.debug("Opening partially cached copy of %s" % path)
289+
logger.debug(f"Opening partially cached copy of {path}")
290290
else:
291291
hash = self._mapper(path)
292292
fn = os.path.join(self.storage[-1], hash)
@@ -299,7 +299,7 @@ def _open(
299299
"uid": self.fs.ukey(path),
300300
}
301301
self._metadata.update_file(path, detail)
302-
logger.debug("Creating local sparse file for %s" % path)
302+
logger.debug(f"Creating local sparse file for {path}")
303303

304304
# call target filesystems open
305305
self._mkcache()
@@ -322,9 +322,9 @@ def _open(
322322
if "blocksize" in detail:
323323
if detail["blocksize"] != f.blocksize:
324324
raise BlocksizeMismatchError(
325-
"Cached file must be reopened with same block"
326-
"size as original (old: %i, new %i)"
327-
"" % (detail["blocksize"], f.blocksize)
325+
f"Cached file must be reopened with same block"
326+
f" size as original (old: {detail['blocksize']},"
327+
f" new {f.blocksize})"
328328
)
329329
else:
330330
detail["blocksize"] = f.blocksize
@@ -547,7 +547,7 @@ def _make_local_details(self, path):
547547
"uid": self.fs.ukey(path),
548548
}
549549
self._metadata.update_file(path, detail)
550-
logger.debug("Copying %s to local cache" % path)
550+
logger.debug(f"Copying {path} to local cache")
551551
return fn
552552

553553
def cat(
@@ -604,7 +604,7 @@ def _open(self, path, mode="rb", **kwargs):
604604
detail, fn = detail
605605
_, blocks = detail["fn"], detail["blocks"]
606606
if blocks is True:
607-
logger.debug("Opening local copy of %s" % path)
607+
logger.debug(f"Opening local copy of {path}")
608608

609609
# In order to support downstream filesystems to be able to
610610
# infer the compression from the original filename, like
@@ -616,8 +616,8 @@ def _open(self, path, mode="rb", **kwargs):
616616
return f
617617
else:
618618
raise ValueError(
619-
"Attempt to open partially cached file %s"
620-
"as a wholly cached file" % path
619+
f"Attempt to open partially cached file {path}"
620+
f" as a wholly cached file"
621621
)
622622
else:
623623
fn = self._make_local_details(path)
@@ -700,7 +700,7 @@ def _open(self, path, mode="rb", **kwargs):
700700

701701
sha = self._mapper(path)
702702
fn = os.path.join(self.storage[-1], sha)
703-
logger.debug("Copying %s to local cache" % path)
703+
logger.debug(f"Copying {path} to local cache")
704704
kwargs["mode"] = mode
705705

706706
self._mkcache()

fsspec/implementations/ftp.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ def cb(x):
156156
outfile.write(x)
157157

158158
self.ftp.retrbinary(
159-
"RETR %s" % rpath,
159+
f"RETR {rpath}",
160160
blocksize=self.blocksize,
161161
callback=cb,
162162
)
@@ -172,7 +172,7 @@ def cb(x):
172172
out.append(x)
173173

174174
self.ftp.retrbinary(
175-
"RETR %s" % path,
175+
f"RETR {path}",
176176
blocksize=self.blocksize,
177177
rest=start,
178178
callback=cb,
@@ -321,7 +321,7 @@ def callback(x):
321321

322322
try:
323323
self.fs.ftp.retrbinary(
324-
"RETR %s" % self.path,
324+
f"RETR {self.path}",
325325
blocksize=self.blocksize,
326326
rest=start,
327327
callback=callback,
@@ -339,7 +339,7 @@ def callback(x):
339339
def _upload_chunk(self, final=False):
340340
self.buffer.seek(0)
341341
self.fs.ftp.storbinary(
342-
"STOR " + self.path, self.buffer, blocksize=self.blocksize, rest=self.offset
342+
f"STOR {self.path}", self.buffer, blocksize=self.blocksize, rest=self.offset
343343
)
344344
return True
345345

fsspec/implementations/git.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
8181
"type": "directory",
8282
"name": "/".join([path, obj.name]).lstrip("/"),
8383
"hex": obj.hex,
84-
"mode": "%o" % obj.filemode,
84+
"mode": f"{obj.filemode:o}",
8585
"size": 0,
8686
}
8787
)
@@ -91,7 +91,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
9191
"type": "file",
9292
"name": "/".join([path, obj.name]).lstrip("/"),
9393
"hex": obj.hex,
94-
"mode": "%o" % obj.filemode,
94+
"mode": f"{obj.filemode:o}",
9595
"size": obj.size,
9696
}
9797
)
@@ -102,7 +102,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
102102
"type": "file",
103103
"name": obj.name,
104104
"hex": obj.hex,
105-
"mode": "%o" % obj.filemode,
105+
"mode": f"{obj.filemode:o}",
106106
"size": obj.size,
107107
}
108108
]

fsspec/implementations/github.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -79,9 +79,7 @@ def repos(cls, org_or_user, is_org=True):
7979
List of string
8080
"""
8181
r = requests.get(
82-
"https://api.github.com/{part}/{org}/repos".format(
83-
part=["users", "orgs"][is_org], org=org_or_user
84-
)
82+
f"https://api.github.com/{['users', 'orgs'][is_org]}/{org_or_user}/repos"
8583
)
8684
r.raise_for_status()
8785
return [repo["name"] for repo in r.json()]
@@ -90,8 +88,7 @@ def repos(cls, org_or_user, is_org=True):
9088
def tags(self):
9189
"""Names of tags in the repo"""
9290
r = requests.get(
93-
"https://api.github.com/repos/{org}/{repo}/tags"
94-
"".format(org=self.org, repo=self.repo),
91+
f"https://api.github.com/repos/{self.org}/{self.repo}/tags",
9592
**self.kw,
9693
)
9794
r.raise_for_status()
@@ -101,8 +98,7 @@ def tags(self):
10198
def branches(self):
10299
"""Names of branches in the repo"""
103100
r = requests.get(
104-
"https://api.github.com/repos/{org}/{repo}/branches"
105-
"".format(org=self.org, repo=self.repo),
101+
f"https://api.github.com/repos/{self.org}/{self.repo}/branches",
106102
**self.kw,
107103
)
108104
r.raise_for_status()

fsspec/implementations/http.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -655,7 +655,7 @@ async def async_fetch_range(self, start, end):
655655
logger.debug(f"Fetch range for {self}: {start}-{end}")
656656
kwargs = self.kwargs.copy()
657657
headers = kwargs.pop("headers", {}).copy()
658-
headers["Range"] = "bytes=%i-%i" % (start, end - 1)
658+
headers["Range"] = f"bytes={start}-{end - 1}"
659659
logger.debug(str(self.url) + " : " + headers["Range"])
660660
r = await self.session.get(
661661
self.fs.encode_url(self.url), headers=headers, **kwargs
@@ -812,7 +812,7 @@ async def get_range(session, url, start, end, file=None, **kwargs):
812812
# explicit get a range when we know it must be safe
813813
kwargs = kwargs.copy()
814814
headers = kwargs.pop("headers", {}).copy()
815-
headers["Range"] = "bytes=%i-%i" % (start, end - 1)
815+
headers["Range"] = f"bytes={start}-{end - 1}"
816816
r = await session.get(url, headers=headers, **kwargs)
817817
r.raise_for_status()
818818
async with r:
@@ -831,7 +831,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
831831
Default operation is to explicitly allow redirects and use encoding
832832
'identity' (no compression) to get the true size of the target.
833833
"""
834-
logger.debug("Retrieve file size for %s" % url)
834+
logger.debug(f"Retrieve file size for {url}")
835835
kwargs = kwargs.copy()
836836
ar = kwargs.pop("allow_redirects", True)
837837
head = kwargs.get("headers", {}).copy()
@@ -844,7 +844,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
844844
elif size_policy == "get":
845845
r = await session.get(url, allow_redirects=ar, **kwargs)
846846
else:
847-
raise TypeError('size_policy must be "head" or "get", got %s' "" % size_policy)
847+
raise TypeError(f'size_policy must be "head" or "get", got {size_policy}')
848848
async with r:
849849
r.raise_for_status()
850850

fsspec/implementations/libarchive.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,8 +122,7 @@ def __init__(
122122
files = open_files(fo, protocol=target_protocol, **(target_options or {}))
123123
if len(files) != 1:
124124
raise ValueError(
125-
'Path "{}" did not resolve to exactly'
126-
'one file: "{}"'.format(fo, files)
125+
f'Path "{fo}" did not resolve to exactly one file: "{files}"'
127126
)
128127
fo = files[0]
129128
self.of = fo

0 commit comments

Comments
 (0)