Run pyupgrade --py37-plus x 2

This commit is contained in:
Chris Mayo 2022-11-08 19:21:29 +00:00
parent fd6c960ace
commit b6bc366af0
28 changed files with 47 additions and 47 deletions

View file

@ -232,7 +232,7 @@ def better_exchook(etype, value, tb, out=sys.stdout):
if value is None or not valuestr:
line = "%s" % etype
else:
line = "%s: %s" % (etype, valuestr)
line = f"{etype}: {valuestr}"
return line
if (isinstance(etype, BaseException) or
(hasattr(types, "InstanceType") and isinstance(etype, types.InstanceType)) or

View file

@ -197,7 +197,7 @@ def get_index_html(urls):
except KeyError:
# Some unicode entries raise KeyError.
url = name
lines.append('<a href="%s">%s</a>' % (url, name))
lines.append(f'<a href="{url}">{name}</a>')
lines.extend(["</body>", "</html>"])
return os.linesep.join(lines)

View file

@ -370,7 +370,7 @@ class MailtoUrl(urlbase.UrlBase):
The cache url is a comma separated list of emails.
"""
emails = ",".join(sorted(self.addresses))
self.cache_url = "%s:%s" % (self.scheme, emails)
self.cache_url = f"{self.scheme}:{emails}"
def can_get_content(self):
"""

View file

@ -423,7 +423,7 @@ ignored_schemes_other = r"""
|whatsapp # WhatsApp
"""
ignored_schemes = "^(%s%s%s%s)$" % (
ignored_schemes = "^({}{}{}{})$".format(
ignored_schemes_permanent,
ignored_schemes_provisional,
ignored_schemes_historical,

View file

@ -508,7 +508,7 @@ class UrlBase:
else:
host = "%s:%d" % (self.host, self.port)
if self.userinfo:
urlparts[1] = "%s@%s" % (self.userinfo, host)
urlparts[1] = f"{self.userinfo}@{host}"
else:
urlparts[1] = host
# save anchor for later checking

View file

@ -180,7 +180,7 @@ file entry:
)
+ "\n".join(
[
" o %s - %s" % (tag, desc)
f" o {tag} - {desc}"
for tag, desc in sorted(checker.const.Warnings.items())
]
)

View file

@ -46,7 +46,7 @@ HtmlCopyright = (
HtmlAppInfo = App + ", " + HtmlCopyright
Url = _release.__url__
SupportUrl = _release.__support_url__
UserAgent = "Mozilla/5.0 (compatible; %s/%s; +%s)" % (AppName, Version, Url)
UserAgent = f"Mozilla/5.0 (compatible; {AppName}/{Version}; +{Url})"
Freeware = (
AppName
+ """ comes with ABSOLUTELY NO WARRANTY!
@ -87,7 +87,7 @@ def get_modules_info():
if version_attr and hasattr(mod, version_attr):
attr = getattr(mod, version_attr)
version = attr() if callable(attr) else attr
module_infos.append("%s %s" % (name, version))
module_infos.append(f"{name} {version}")
else:
# ignore attribute errors in case library developers
# change the version information attribute

View file

@ -227,7 +227,7 @@ class LCConfigParser(RawConfigParser):
self.config.add_auth(
pattern=auth[0], user=auth[1], password=auth[2]
)
password_fields.append("entry/%s/%s" % (auth[0], auth[1]))
password_fields.append(f"entry/{auth[0]}/{auth[1]}")
elif len(auth) == 2:
self.config.add_auth(pattern=auth[0], user=auth[1])
else:

View file

@ -53,7 +53,7 @@ class StatusLogger:
def writeln(self, msg):
"""Write status message and line break to file descriptor."""
self.fd.write("%s%s" % (msg, os.linesep))
self.fd.write(f"{msg}{os.linesep}")
def flush(self):
"""Flush file descriptor."""

View file

@ -34,7 +34,7 @@ class Form:
def __repr__(self):
"""Return string displaying URL and form data."""
return "<url=%s data=%s>" % (self.url, self.data)
return f"<url={self.url} data={self.data}>"
def search_form(content, cgiuser, cgipassword):

View file

@ -34,7 +34,7 @@ def get_package_modules(packagename, packagepath):
for mod in pkgutil.iter_modules(packagepath):
if not mod.ispkg:
try:
name = "..%s.%s" % (packagename, mod.name)
name = f"..{packagename}.{mod.name}"
yield importlib.import_module(name, __name__)
except ImportError as msg:
print(_("WARN: could not load module %s: %s") % (mod.name, msg))

View file

@ -325,7 +325,7 @@ class _Logger(abc.ABC):
"""
Write string to output descriptor plus a newline.
"""
self.write("%s%s" % (s, os.linesep), **args)
self.write(f"{s}{os.linesep}", **args)
def has_part(self, name):
"""

View file

@ -70,7 +70,7 @@ class DOTLogger(_GraphLogger):
"""Write edge from parent to node."""
source = dotquote(self.nodes[node["parent_url"]]["label"])
target = dotquote(node["label"])
self.writeln(' "%s" -> "%s" [' % (source, target))
self.writeln(f' "{source}" -> "{target}" [')
self.writeln(' label="%s",' % dotquote(node["edge"]))
if self.has_part("result"):
self.writeln(" valid=%d," % node["valid"])

View file

@ -113,4 +113,4 @@ class _XMLLogger(_Logger):
for aname, avalue in attrs.items():
args = (xmlquote(aname), xmlquoteattr(avalue))
self.write(' %s="%s"' % args)
self.writeln(">%s</%s>" % (xmlquote(content), xmlquote(name)))
self.writeln(f">{xmlquote(content)}</{xmlquote(name)}>")

View file

@ -66,7 +66,7 @@ class UrlAnchorCheck:
else:
anchors = "-"
args = {"name": url_data.anchor, "decoded": decoded_anchor, "anchors": anchors}
msg = "%s %s" % (
msg = "{} {}".format(
_("Anchor `%(name)s' (decoded: `%(decoded)s') not found.") % args,
_("Available anchors: %(anchors)s.") % args,
)

View file

@ -39,7 +39,7 @@ class HttpHeaderInfo(_ConnectionPlugin):
headers.append(name.lower())
if headers:
items = [
"%s=%s" % (name.capitalize(), url_data.headers[name])
f"{name.capitalize()}={url_data.headers[name]}"
for name in headers
]
info = "HTTP headers %s" % ", ".join(items)

View file

@ -73,10 +73,10 @@ def unquote(s, matching=False):
return s
_para_mac = r"(?:%(sep)s)(?:(?:%(sep)s)\s*)+" % {'sep': '\r'}
_para_posix = r"(?:%(sep)s)(?:(?:%(sep)s)\s*)+" % {'sep': '\n'}
_para_win = r"(?:%(sep)s)(?:(?:%(sep)s)\s*)+" % {'sep': '\r\n'}
_para_ro = re.compile("%s|%s|%s" % (_para_mac, _para_posix, _para_win))
_para_mac = r"(?:{sep})(?:(?:{sep})\s*)+".format(sep='\r')
_para_posix = r"(?:{sep})(?:(?:{sep})\s*)+".format(sep='\n')
_para_win = r"(?:{sep})(?:(?:{sep})\s*)+".format(sep='\r\n')
_para_ro = re.compile(f"{_para_mac}|{_para_posix}|{_para_win}")
def get_paragraphs(text):
@ -104,7 +104,7 @@ def wrap(text, width, **kwargs):
def indent(text, indent_string=" "):
"""Indent each line of text with the given indent string."""
return os.linesep.join("%s%s" % (indent_string, x) for x in text.splitlines())
return os.linesep.join(f"{indent_string}{x}" for x in text.splitlines())
def paginate(text):
@ -191,7 +191,7 @@ def strduration_long(duration, do_translate=True):
time_str.reverse()
if len(time_str) > 2:
time_str.pop()
return "%s%s" % (prefix, ", ".join(time_str))
return "{}{}".format(prefix, ", ".join(time_str))
def strtimezone():

View file

@ -61,10 +61,10 @@ _safe_path_pattern = (
r"(%%[%(_hex_safe)s][%(_hex_full)s]))+)*/?)" % _basic
)
_safe_fragment_pattern = r"%s*" % _safe_char
_safe_cgi = r"%s+(=(%s|/)+)?" % (_safe_char, _safe_char)
_safe_query_pattern = r"(%s(&%s)*)?" % (_safe_cgi, _safe_cgi)
_safe_param_pattern = r"(%s(;%s)*)?" % (_safe_cgi, _safe_cgi)
safe_url_pattern = r"%s://%s%s(#%s)?" % (
_safe_cgi = fr"{_safe_char}+(=({_safe_char}|/)+)?"
_safe_query_pattern = fr"({_safe_cgi}(&{_safe_cgi})*)?"
_safe_param_pattern = fr"({_safe_cgi}(;{_safe_cgi})*)?"
safe_url_pattern = r"{}://{}{}(#{})?".format(
_safe_scheme_pattern,
_safe_host_pattern,
_safe_path_pattern,
@ -195,7 +195,7 @@ def url_fix_host(urlparts, encoding):
if not urlparts[2] or urlparts[2] == '/':
urlparts[2] = comps
else:
urlparts[2] = "%s%s" % (
urlparts[2] = "{}{}".format(
comps,
urllib.parse.unquote(urlparts[2], encoding=encoding),
)
@ -255,12 +255,12 @@ def url_parse_query(query, encoding):
k = urllib.parse.quote(k, safe='/-:,;')
if v:
v = urllib.parse.quote(v, safe='/-:,;')
f.append("%s=%s%s" % (k, v, sep))
f.append(f"{k}={v}{sep}")
elif v is None:
f.append("%s%s" % (k, sep))
f.append(f"{k}{sep}")
else:
# some sites do not work when the equal sign is missing
f.append("%s=%s" % (k, sep))
f.append(f"{k}={sep}")
return ''.join(f) + append
@ -381,9 +381,9 @@ def url_quote(url, encoding):
k = urllib.parse.quote(k, safe='/-:,;')
if v:
v = urllib.parse.quote(v, safe='/-:,;')
f.append("%s=%s%s" % (k, v, sep))
f.append(f"{k}={v}{sep}")
else:
f.append("%s%s" % (k, sep))
f.append(f"{k}{sep}")
urlparts[3] = ''.join(f)
urlparts[4] = urllib.parse.quote(urlparts[4]) # anchor
return urlunsplit(urlparts)
@ -397,7 +397,7 @@ def document_quote(document):
query = None
doc = urllib.parse.quote(doc, safe='/=,')
if query:
return "%s?%s" % (doc, query)
return f"{doc}?{query}"
return doc

View file

@ -87,7 +87,7 @@ def main(args):
def get_regex(schemes):
expr = [
"|%s # %s" % (re.escape(scheme).ljust(10), description)
f"|{re.escape(scheme).ljust(10)} # {description}"
for scheme, description in sorted(schemes.items())
]
return "\n".join(expr)

View file

@ -132,7 +132,7 @@ class NoQueryHttpRequestHandler(StoppableHttpRequestHandler):
list = ["example1.txt", "example2.html", "example3"]
for name in list:
displayname = linkname = name
list_item = '<li><a href="%s">%s</a>\n' % (
list_item = '<li><a href="{}">{}</a>\n'.format(
urllib.parse.quote(linkname),
html.escape(displayname),
)
@ -247,7 +247,7 @@ def get_cookie(maxage=2000):
("Version", "1"),
("Foo", "Bar"),
)
return "; ".join('%s="%s"' % (key, value) for key, value in data)
return "; ".join(f'{key}="{value}"' for key, value in data)
class CookieRedirectHttpRequestHandler(NoQueryHttpRequestHandler):

View file

@ -40,9 +40,9 @@ class TelnetServerTest(LinkCheckTest):
def get_url(self, user=None, password=None):
if user is not None:
if password is not None:
netloc = "%s:%s@%s" % (user, password, self.host)
netloc = f"{user}:{password}@{self.host}"
else:
netloc = "%s@%s" % (user, self.host)
netloc = f"{user}@{self.host}"
else:
netloc = self.host
return "telnet://%s:%d" % (netloc, self.port)

View file

@ -54,7 +54,7 @@ class TestHttpbin(LinkCheckTest):
def test_basic_auth(self):
user = "testuser"
password = "testpassword"
url = get_httpbin_url("/basic-auth/%s/%s" % (user, password))
url = get_httpbin_url(f"/basic-auth/{user}/{password}")
nurl = self.norm(url)
entry = dict(user=user, password=password, pattern=re.compile(r".*"))
confargs = dict(authentication=[entry])

View file

@ -64,7 +64,7 @@ class TestConfig(TestBase):
patterns = [x["pattern"].pattern for x in config["externlinks"]]
for prefix in ("ignore_", "nofollow_"):
for suffix in ("1", "2"):
key = "%simadoofus%s" % (prefix, suffix)
key = f"{prefix}imadoofus{suffix}"
self.assertTrue(key in patterns)
for key in ("url-unicode-domain",):
self.assertTrue(key in config["ignorewarnings"])

View file

@ -37,9 +37,9 @@ def pretty_print_html(fd, soup):
if val is None:
fd.write(" %s" % key)
else:
fd.write(' %s="%s"' % (key, quote_attrval(val)))
fd.write(f' {key}="{quote_attrval(val)}"')
if element_text:
fd.write(">%s</%s>" % (element_text, tag))
fd.write(f">{element_text}</{tag}>")
else:
fd.write("/>")

View file

@ -125,5 +125,5 @@ class TestFtpparse(unittest.TestCase):
for line, expected in patterns:
res = ftpparse(line)
self.assertEqual(
expected, res, "got %r\nexpected %r\n%r" % (res, expected, line)
expected, res, f"got {res!r}\nexpected {expected!r}\n{line!r}"
)

View file

@ -165,7 +165,7 @@ class TestParser(unittest.TestCase):
Check parse results.
"""
res = out.getvalue()
msg = "Test error; in: %r, out: %r, expect: %r" % (_in, res, _out)
msg = f"Test error; in: {_in!r}, out: {res!r}, expect: {_out!r}"
self.assertEqual(res, _out, msg=msg)
def test_encoding_detection_utf_content(self):

View file

@ -70,5 +70,5 @@ class TestGTranslator(unittest.TestCase):
continue
self.assertFalse(
"·" in line,
"Broken GTranslator copy/paste in %r:\n%s" % (f, line),
f"Broken GTranslator copy/paste in {f!r}:\n{line}",
)

View file

@ -40,7 +40,7 @@ class TestRobotParser(unittest.TestCase):
else:
ac = "access allowed"
if a != b:
self.fail("%s != %s (%s)" % (a, b, ac))
self.fail(f"{a} != {b} ({ac})")
@need_network
def test_nonexisting_robots(self):