Review comments on black linkcheck/

This commit is contained in:
Chris Mayo 2020-06-01 16:07:21 +01:00
parent 8ce980ffb3
commit b974ec3262
4 changed files with 8 additions and 17 deletions

View file

@ -138,9 +138,8 @@ class UrlQueue:
self.cleanup()
self.queue.append(url_data)
self.unfinished_tasks += 1
cache.add_result(
key, None
) # add none value to cache to prevent checking this url multiple times
# add none value to cache to prevent checking this url multiple times
cache.add_result(key, None)
def cleanup(self):
"""Move cached elements to top."""

View file

@ -436,9 +436,8 @@ class UrlBase:
if urlparts[2]:
urlparts[2] = urlutil.collapse_segments(urlparts[2])
if not urlparts[0].startswith("feed"):
urlparts[2] = url_fix_wayback_query(
urlparts[2]
) # restore second / in http[s]:// in wayback path
# restore second / in http[s]:// in wayback path
urlparts[2] = url_fix_wayback_query(urlparts[2])
self.url = urlutil.urlunsplit(urlparts)
# split into (modifiable) list
self.urlparts = strformat.url_unicode_split(self.url)

View file

@ -63,9 +63,7 @@ class StatusLogger:
def internal_error(out=stderr, etype=None, evalue=None, tb=None):
"""Print internal error message (output defaults to stderr)."""
print(os.linesep, file=out)
print(
_(
"""********** Oops, I did it again. *************
print(_("""********** Oops, I did it again. *************
You have found an internal error in LinkChecker. Please write a bug report
at %s
@ -80,11 +78,7 @@ When using the commandline client:
Not disclosing some of the information above due to privacy reasons is ok.
I will try to help you nonetheless, but you have to give me something
I can work with ;) .
"""
)
% configuration.SupportUrl,
file=out,
)
""") % configuration.SupportUrl, file=out)
if etype is None:
etype = sys.exc_info()[0]
if evalue is None:

View file

@ -336,9 +336,8 @@ def url_norm(url, encoding):
urlparts[1] = urllib.parse.quote(urlparts[1], safe='@:') # host
urlparts[2] = urllib.parse.quote(urlparts[2], safe=_nopathquote_chars) # path
if not urlparts[0].startswith("feed"):
urlparts[2] = url_fix_wayback_query(
urlparts[2]
) # unencode colon in http[s]:// in wayback path
# unencode colon in http[s]:// in wayback path
urlparts[2] = url_fix_wayback_query(urlparts[2])
urlparts[4] = urllib.parse.quote(urlparts[4], safe="!$&'()*+,-./;=?@_~") # anchor
res = urlunsplit(urlparts)
if url.endswith('#') and not urlparts[4]: