Remove spaces after names in function definitions

This is a PEP 8 convention, E211.
This commit is contained in:
Chris Mayo 2020-05-16 20:19:42 +01:00
parent fc11d08968
commit 1663e10fe7
63 changed files with 270 additions and 270 deletions

View file

@ -23,7 +23,7 @@ import sys
from xml.etree.ElementTree import parse
def main (args):
def main(args):
filename = args[0]
with open(filename) as fd:
tree = parse(fd)

View file

@ -48,12 +48,12 @@ from .logconf import (
import _LinkChecker_configdata as configdata
def module_path ():
def module_path():
"""Return absolute directory of system executable."""
return os.path.dirname(os.path.abspath(sys.executable))
def get_install_data ():
def get_install_data():
"""Return absolute path of LinkChecker data installation directory."""
from .loader import is_frozen
if is_frozen():
@ -70,7 +70,7 @@ class LinkCheckerInterrupt(Exception):
pass
def get_link_pat (arg, strict=False):
def get_link_pat(arg, strict=False):
"""Get a link pattern matcher for intern/extern links.
Returns a compiled pattern and a negate and strict option.
@ -101,7 +101,7 @@ def get_link_pat (arg, strict=False):
}
def init_i18n (loc=None):
def init_i18n(loc=None):
"""Initialize i18n with the configured locale dir. The environment
variable LOCPATH can also specify a locale dir.
@ -127,7 +127,7 @@ def init_i18n (loc=None):
init_i18n()
def drop_privileges ():
def drop_privileges():
"""Make sure to drop root privileges on POSIX systems."""
if os.name != 'posix':
return

View file

@ -163,7 +163,7 @@ if os.name == 'nt':
Beep = "\007"
def esc_ansicolor (color):
def esc_ansicolor(color):
"""convert a named color definition to an escaped ANSI color"""
control = ''
if ";" in color:
@ -191,7 +191,7 @@ def get_win_color(color):
return foreground, background, style
def has_colors (fp):
def has_colors(fp):
"""Test if given file is an ANSI color enabled tty."""
# The is_tty() function ensures that we do not colorize
# redirected streams, as this is almost never what we want
@ -210,7 +210,7 @@ def has_colors (fp):
return False
def get_columns (fp):
def get_columns(fp):
"""Return number of columns for given file."""
if not is_tty(fp):
return 80
@ -226,7 +226,7 @@ def get_columns (fp):
return 80
def _write_color_colorama (fp, text, color):
def _write_color_colorama(fp, text, color):
"""Colorize text with given color."""
foreground, background, style = get_win_color(color)
colorama.set_console(foreground=foreground, background=background,
@ -235,7 +235,7 @@ def _write_color_colorama (fp, text, color):
colorama.reset_console()
def _write_color_ansi (fp, text, color):
def _write_color_ansi(fp, text, color):
"""Colorize text with given color."""
fp.write(esc_ansicolor(color))
fp.write(text)

View file

@ -22,7 +22,7 @@ from xdg import xdg_config_home
nt_filename_encoding="mbcs"
def get_profile_dir ():
def get_profile_dir():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
if "LOCALAPPDATA" in os.environ:
@ -46,7 +46,7 @@ def get_profile_dir ():
return dirpath
def find_bookmark_file (profile="Default"):
def find_bookmark_file(profile="Default"):
"""Return the bookmark file of the Default profile.
Returns absolute filename if found, or empty string if no bookmark file
could be found.

View file

@ -24,7 +24,7 @@ from xdg.BaseDirectory import xdg_config_home
nt_filename_encoding="mbcs"
def get_profile_dir ():
def get_profile_dir():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
if "LOCALAPPDATA" in os.environ:
@ -48,7 +48,7 @@ def get_profile_dir ():
return dirpath
def find_bookmark_file (profile="Default"):
def find_bookmark_file(profile="Default"):
"""Return the bookmark file of the Default profile.
Returns absolute filename if found, or empty string if no bookmark file
could be found.
@ -64,7 +64,7 @@ def find_bookmark_file (profile="Default"):
return ""
def parse_bookmark_data (data):
def parse_bookmark_data(data):
"""Parse data string.
Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
@ -73,7 +73,7 @@ def parse_bookmark_data (data):
yield url, name
def parse_bookmark_file (file):
def parse_bookmark_file(file):
"""Parse file object.
Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
@ -82,14 +82,14 @@ def parse_bookmark_file (file):
yield url, name
def parse_bookmark_json (data):
def parse_bookmark_json(data):
"""Parse complete JSON data for Chromium Bookmarks."""
for entry in data["roots"].values():
for url, name in parse_bookmark_node(entry):
yield url, name
def parse_bookmark_node (node):
def parse_bookmark_node(node):
"""Parse one JSON node of Chromium Bookmarks."""
if node["type"] == "url":
yield node["url"], node["name"]

View file

@ -30,7 +30,7 @@ extension = re.compile(r'/places.sqlite$', re.IGNORECASE)
# Windows filename encoding
nt_filename_encoding="mbcs"
def get_profile_dir ():
def get_profile_dir():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
basedir = unicode(os.environ["APPDATA"], nt_filename_encoding)
@ -40,7 +40,7 @@ def get_profile_dir ():
return dirpath
def find_bookmark_file (profile="*.default"):
def find_bookmark_file(profile="*.default"):
"""Return the first found places.sqlite file of the profile directories
ending with '.default' (or another given profile name).
Returns absolute filename if found, or empty string if no bookmark file
@ -57,7 +57,7 @@ def find_bookmark_file (profile="*.default"):
return ""
def parse_bookmark_file (filename):
def parse_bookmark_file(filename):
"""Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
Returns None if sqlite3 module is not installed.

View file

@ -25,7 +25,7 @@ OperaBookmarkFiles = (
)
def get_profile_dir ():
def get_profile_dir():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
basedir = unicode(os.environ["APPDATA"], nt_filename_encoding)
@ -35,7 +35,7 @@ def get_profile_dir ():
return dirpath
def find_bookmark_file ():
def find_bookmark_file():
"""Return the bookmark file of the Opera profile.
Returns absolute filename if found, or empty string if no bookmark file
could be found.
@ -52,7 +52,7 @@ def find_bookmark_file ():
return ""
def parse_bookmark_data (data):
def parse_bookmark_data(data):
"""Return iterator for bookmarks of the form (url, name, line number).
Bookmarks are not sorted.
"""

View file

@ -24,12 +24,12 @@ except ImportError:
has_biplist = False
def get_profile_dir ():
def get_profile_dir():
"""Return path where all profiles of current user are stored."""
return os.path.join(os.environ["HOME"], "Library", "Safari")
def find_bookmark_file ():
def find_bookmark_file():
"""Return the bookmark file of the Default profile.
Returns absolute filename if found, or empty string if no bookmark file
could be found.
@ -47,21 +47,21 @@ def find_bookmark_file ():
return ""
def parse_bookmark_file (filename):
def parse_bookmark_file(filename):
"""Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
"""
return parse_plist(get_plist_data_from_file(filename))
def parse_bookmark_data (data):
def parse_bookmark_data(data):
"""Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
"""
return parse_plist(get_plist_data_from_string(data))
def get_plist_data_from_file (filename):
def get_plist_data_from_file(filename):
"""Parse plist data for a file. Tries biplist, falling back to
plistlib."""
if has_biplist:
@ -74,7 +74,7 @@ def get_plist_data_from_file (filename):
return {}
def get_plist_data_from_string (data):
def get_plist_data_from_string(data):
"""Parse plist data for a string. Tries biplist, falling back to
plistlib."""
if has_biplist:
@ -105,11 +105,11 @@ def parse_plist(entry):
yield item
def is_leaf (entry):
def is_leaf(entry):
"""Return true if plist entry is an URL entry."""
return entry.get(KEY_WEBBOOKMARKTYPE) == 'WebBookmarkTypeLeaf'
def has_children (entry):
def has_children(entry):
"""Return true if plist entry has children."""
return entry.get(KEY_WEBBOOKMARKTYPE) == 'WebBookmarkTypeList'

View file

@ -43,7 +43,7 @@ def guess_url(url):
return url
def absolute_url (base_url, base_ref, parent_url):
def absolute_url(base_url, base_ref, parent_url):
"""
Search for the absolute url to detect the link type. This does not
join any url fragments together!
@ -64,7 +64,7 @@ def absolute_url (base_url, base_ref, parent_url):
return ""
def get_url_from (base_url, recursion_level, aggregate,
def get_url_from(base_url, recursion_level, aggregate,
parent_url=None, base_ref=None, line=None, column=None,
page=0, name="", parent_content_type=None, extern=None, url_encoding=None):
"""
@ -125,7 +125,7 @@ def get_url_from (base_url, recursion_level, aggregate,
line=line, column=column, page=page, name=name, extern=extern, url_encoding=url_encoding)
def get_urlclass_from (scheme, assume_local_file=False):
def get_urlclass_from(scheme, assume_local_file=False):
"""Return checker class for given URL scheme. If the scheme
cannot be matched and assume_local_file is True, assume a local file.
"""
@ -154,7 +154,7 @@ def get_urlclass_from (scheme, assume_local_file=False):
return klass
def get_index_html (urls):
def get_index_html(urls):
"""
Construct artificial index.html from given URLs.

View file

@ -30,7 +30,7 @@ from ..bookmarks import firefox
from .const import WARN_FILE_MISSING_SLASH, WARN_FILE_SYSTEM_PATH
def get_files (dirname):
def get_files(dirname):
"""Get iterator of entries in directory. Only allows regular files
and directories, no symlinks."""
for entry in os.listdir(dirname):
@ -43,7 +43,7 @@ def get_files (dirname):
yield entry+"/"
def prepare_urlpath_for_nt (path):
def prepare_urlpath_for_nt(path):
"""
URLs like 'file://server/path/' result in a path named '/server/path'.
However urllib.url2pathname expects '////server/path'.
@ -53,7 +53,7 @@ def prepare_urlpath_for_nt (path):
return path
def get_nt_filename (path):
def get_nt_filename(path):
"""Return case sensitive filename for NT path."""
unc, rest = os.path.splitunc(path)
head, tail = os.path.split(rest)
@ -66,7 +66,7 @@ def get_nt_filename (path):
return path
def get_os_filename (path):
def get_os_filename(path):
"""Return filesystem path for given URL path."""
if os.name == 'nt':
path = prepare_urlpath_for_nt(path)
@ -77,7 +77,7 @@ def get_os_filename (path):
return res
def is_absolute_path (path):
def is_absolute_path(path):
"""Check if given path is absolute. On Windows absolute paths start
with a drive letter. On all other systems absolute paths start with
a slash."""

View file

@ -21,7 +21,7 @@ from . import urlbase, absolute_url
from .. import strformat, url as urlutil
def get_intern_pattern (url):
def get_intern_pattern(url):
"""Return intern pattern for given URL. Redirections to the same
domain with or without "www." prepended are allowed."""
parts = strformat.url_unicode_split(url)

View file

@ -28,7 +28,7 @@ from ..network import iputil
from .const import WARN_MAIL_NO_MX_HOST
def getaddresses (addr):
def getaddresses(addr):
"""Return list of email addresses from given field value."""
parsed = [mail for name, mail in AddressList(addr).addresslist if mail]
if parsed:
@ -41,19 +41,19 @@ def getaddresses (addr):
return addresses
def is_quoted (addr):
def is_quoted(addr):
"""Return True iff mail address string is quoted."""
return addr.startswith('"') and addr.endswith('"')
def is_literal (domain):
def is_literal(domain):
"""Return True iff domain string is a literal."""
return domain.startswith('[') and domain.endswith(']')
_remove_quoted = re.compile(r'\\.').sub
_quotes = re.compile(r'["\\]')
def is_missing_quote (addr):
def is_missing_quote(addr):
"""Return True iff mail address is not correctly quoted."""
return _quotes.match(_remove_quoted("", addr[1:-1]))

View file

@ -45,7 +45,7 @@ unicode_safe = strformat.unicode_safe
# schemes that are invalid with an empty hostname
scheme_requires_host = ("ftp", "http", "telnet")
def urljoin (parent, url):
def urljoin(parent, url):
"""
If url is relative, join parent and url. Else leave url as-is.
@ -56,7 +56,7 @@ def urljoin (parent, url):
return urllib.parse.urljoin(parent, url)
def url_norm (url, encoding):
def url_norm(url, encoding):
"""Wrapper for url.url_norm() to convert UnicodeError in
LinkCheckerError."""
try:

View file

@ -53,7 +53,7 @@ def print_plugins(folders, exit_code=0):
sys.exit(exit_code)
def print_usage (msg, exit_code=2):
def print_usage(msg, exit_code=2):
"""Print a program msg text to stderr and exit."""
program = sys.argv[0]
print(_("Error: %(msg)s") % {"msg": msg}, file=console.stderr)
@ -61,7 +61,7 @@ def print_usage (msg, exit_code=2):
sys.exit(exit_code)
def aggregate_url (aggregate, url, err_exit_code=2):
def aggregate_url(aggregate, url, err_exit_code=2):
"""Append given commandline URL to input queue."""
get_url_from = checker.get_url_from
url = checker.guess_url(url)

View file

@ -49,7 +49,7 @@ under certain conditions. Look at the file `LICENSE' within this
distribution."""
Portable = configdata.portable
def normpath (path):
def normpath(path):
"""Norm given system path with all available norm or expand functions
in os.path."""
expanded = os.path.expanduser(os.path.expandvars(path))
@ -87,12 +87,12 @@ def get_modules_info():
return "Modules: %s" % (", ".join(module_infos))
def get_share_dir ():
def get_share_dir():
"""Return absolute path of LinkChecker example configuration."""
return os.path.join(get_install_data(), "share", "linkchecker")
def get_share_file (filename, devel_dir=None):
def get_share_file(filename, devel_dir=None):
"""Return a filename in the share directory.
@param devel_dir: directory to search when developing
@ptype devel_dir: string
@ -428,7 +428,7 @@ def get_user_config():
return userconf
def get_gconf_http_proxy ():
def get_gconf_http_proxy():
"""Return host:port for GConf HTTP proxy if found, else None."""
try:
import gconf
@ -449,7 +449,7 @@ def get_gconf_http_proxy ():
return None
def get_gconf_ftp_proxy ():
def get_gconf_ftp_proxy():
"""Return host:port for GConf FTP proxy if found, else None."""
try:
import gconf
@ -469,7 +469,7 @@ def get_gconf_ftp_proxy ():
return None
def get_kde_http_proxy ():
def get_kde_http_proxy():
"""Return host:port for KDE HTTP proxy if found, else None."""
config_dir = get_kde_config_dir()
if not config_dir:
@ -483,7 +483,7 @@ def get_kde_http_proxy ():
pass
def get_kde_ftp_proxy ():
def get_kde_ftp_proxy():
"""Return host:port for KDE HTTP proxy if found, else None."""
config_dir = get_kde_config_dir()
if not config_dir:
@ -527,7 +527,7 @@ def get_kde_ftp_proxy ():
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def get_kde_config_dir ():
def get_kde_config_dir():
"""Return KDE configuration directory or None if not found."""
kde_home = get_kde_home_dir()
if not kde_home:
@ -536,12 +536,12 @@ def get_kde_config_dir ():
return kde_home_to_config(kde_home)
def kde_home_to_config (kde_home):
def kde_home_to_config(kde_home):
"""Add subdirectories for config path to KDE home directory."""
return os.path.join(kde_home, "share", "config")
def get_kde_home_dir ():
def get_kde_home_dir():
"""Return KDE home directory or None if not found."""
if os.environ.get("KDEHOME"):
kde_home = os.path.abspath(os.environ["KDEHOME"])
@ -572,7 +572,7 @@ def get_kde_home_dir ():
loc_ro = re.compile(r"\[.*\]$")
@lru_cache(1)
def read_kioslaverc (kde_config_dir):
def read_kioslaverc(kde_config_dir):
"""Read kioslaverc into data dictionary."""
data = {}
filename = os.path.join(kde_config_dir, "kioslaverc")
@ -600,14 +600,14 @@ def read_kioslaverc (kde_config_dir):
return data
def add_kde_proxy (key, value, data):
def add_kde_proxy(key, value, data):
"""Add a proxy value to data dictionary after sanity checks."""
if not value or value[:3] == "//:":
return
data[key] = value
def add_kde_setting (key, value, data):
def add_kde_setting(key, value, data):
"""Add a KDE proxy setting value to data dictionary."""
if key == "ProxyType":
mode = None
@ -641,12 +641,12 @@ def add_kde_setting (key, value, data):
# XXX todo
def split_hosts (value):
def split_hosts(value):
"""Split comma-separated host list."""
return [host for host in value.split(", ") if host]
def resolve_indirect (data, key, splithosts=False):
def resolve_indirect(data, key, splithosts=False):
"""Replace name of environment variable with its value."""
value = data[key]
env_value = os.environ.get(value)
@ -659,7 +659,7 @@ def resolve_indirect (data, key, splithosts=False):
del data[key]
def resolve_kde_settings (data):
def resolve_kde_settings(data):
"""Write final proxy configuration values in data dictionary."""
if "mode" not in data:
return

View file

@ -21,7 +21,7 @@ import os
from .. import LinkCheckerError, get_link_pat, LOG_CHECK, log, fileutil, plugins, logconf
def read_multiline (value):
def read_multiline(value):
"""Helper function reading multiline values."""
for line in value.splitlines():
line = line.strip()

View file

@ -22,7 +22,7 @@ import email
import requests
def from_file (filename):
def from_file(filename):
"""Parse cookie data from a text file in HTTP header format.
@return: list of tuples (headers, scheme, host, path)
@ -43,7 +43,7 @@ def from_file (filename):
return entries
def from_headers (strheader):
def from_headers(strheader):
"""Parse cookie data from a string in HTTP header (RFC 2616) format.
@return: list of cookies

View file

@ -19,17 +19,17 @@ Simple decorators (usable in Python >= 2.4).
Example:
@synchronized(thread.allocate_lock())
def f ():
def f():
"Synchronized function"
print("i am synchronized:", f, f.__doc__)
@deprecated
def g ():
def g():
"this function is deprecated"
pass
@notimplemented
def h ():
def h():
"todo"
pass
@ -41,7 +41,7 @@ import sys
import time
def update_func_meta (fake_func, real_func):
def update_func_meta(fake_func, real_func):
"""Set meta information (eg. __doc__) of fake function to that
of the real function.
@return fake_func
@ -53,7 +53,7 @@ def update_func_meta (fake_func, real_func):
return fake_func
def deprecated (func):
def deprecated(func):
"""A decorator which can be used to mark functions as deprecated.
It emits a warning when the function is called."""
def newfunc (*args, **kwargs):
@ -64,7 +64,7 @@ def deprecated (func):
return update_func_meta(newfunc, func)
def signal_handler (signal_number):
def signal_handler(signal_number):
"""From http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/410666
A decorator to set the specified function as handler for a signal.
@ -86,7 +86,7 @@ def signal_handler (signal_number):
return newfunc
def synchronize (lock, func, log_duration_secs=0):
def synchronize(lock, func, log_duration_secs=0):
"""Return synchronized function acquiring the given lock."""
def newfunc (*args, **kwargs):
"""Execute function synchronized."""
@ -99,12 +99,12 @@ def synchronize (lock, func, log_duration_secs=0):
return update_func_meta(newfunc, func)
def synchronized (lock):
def synchronized(lock):
"""A decorator calling a function with aqcuired lock."""
return lambda func: synchronize(lock, func)
def notimplemented (func):
def notimplemented(func):
"""Raises a NotImplementedError if the function is called."""
def newfunc (*args, **kwargs):
"""Raise NotImplementedError"""
@ -114,7 +114,7 @@ def notimplemented (func):
return update_func_meta(newfunc, func)
def timeit (func, log, limit):
def timeit(func, log, limit):
"""Print execution time of the function. For quick'n'dirty profiling."""
def newfunc (*args, **kwargs):
@ -130,7 +130,7 @@ def timeit (func, log, limit):
return update_func_meta(newfunc, func)
def timed (log=sys.stderr, limit=2.0):
def timed(log=sys.stderr, limit=2.0):
"""Decorator to run a function with timing info."""
return lambda func: timeit(func, log, limit)

View file

@ -24,7 +24,7 @@ from ..cache import urlqueue, robots_txt, results
from . import aggregator, console
def check_urls (aggregate):
def check_urls(aggregate):
"""Main check function; checks all configured URLs until interrupted
with Ctrl-C.
@return: None
@ -66,7 +66,7 @@ def check_urls (aggregate):
# and both should be handled by the calling layer.
def check_url (aggregate):
def check_url(aggregate):
"""Helper function waiting for URL queue."""
while True:
try:
@ -79,7 +79,7 @@ def check_url (aggregate):
break
def interrupt (aggregate):
def interrupt(aggregate):
"""Interrupt execution and shutdown, ignoring any subsequent
interrupts."""
while True:
@ -94,7 +94,7 @@ def interrupt (aggregate):
pass
def abort (aggregate):
def abort(aggregate):
"""Helper function to ensure a clean shutdown."""
while True:
try:
@ -108,7 +108,7 @@ def abort (aggregate):
abort_now()
def abort_now ():
def abort_now():
"""Force exit of current process without cleanup."""
if os.name == 'posix':
# Unix systems can use signals
@ -124,7 +124,7 @@ def abort_now ():
os._exit(3)
def get_aggregate (config):
def get_aggregate(config):
"""Get an aggregator instance with given configuration."""
_urlqueue = urlqueue.UrlQueue(max_allowed_urls=config["maxnumurls"])
_robots_txt = robots_txt.RobotsTxt(config["useragent"])

View file

@ -26,7 +26,7 @@ from .. import parser
QUEUE_POLL_INTERVALL_SECS = 1.0
def check_urls (urlqueue, logger):
def check_urls(urlqueue, logger):
"""Check URLs without threading."""
while not urlqueue.empty():
url_data = urlqueue.get()

View file

@ -61,7 +61,7 @@ class StatusLogger:
self.fd.flush()
def internal_error (out=stderr, etype=None, evalue=None, tb=None):
def internal_error(out=stderr, etype=None, evalue=None, tb=None):
"""Print internal error message (output defaults to stderr)."""
print(os.linesep, file=out)
print(_("""********** Oops, I did it again. *************
@ -94,20 +94,20 @@ I can work with ;) .
_("******** LinkChecker internal error, over and out ********"), file=out)
def print_env_info (key, out=stderr):
def print_env_info(key, out=stderr):
"""If given environment key is defined, print it out."""
value = os.getenv(key)
if value is not None:
print(key, "=", repr(value), file=out)
def print_proxy_info (out=stderr):
def print_proxy_info(out=stderr):
"""Print proxy info."""
for key in ("http_proxy", "ftp_proxy", "no_proxy"):
print_env_info(key, out=out)
def print_locale_info (out=stderr):
def print_locale_info(out=stderr):
"""Print locale info."""
for key in ("LANGUAGE", "LC_ALL", "LC_CTYPE", "LANG"):
print_env_info(key, out=out)
@ -131,7 +131,7 @@ PYTHON_ENV_VARS = (
'PYTHONWARNINGS',
'PYTHONHASHSEED',
)
def print_app_info (out=stderr):
def print_app_info(out=stderr):
"""Print system and application info (output defaults to stderr)."""
print(_("System info:"), file=out)
print(configuration.App, file=out)
@ -146,7 +146,7 @@ def print_app_info (out=stderr):
print(_("sys.argv:"), sys.argv, file=out)
def print_version (out=stdout):
def print_version(out=stdout):
"""Print the program version (output defaults to stdout)."""
print(configuration.App, _("released"),
configuration.ReleaseDate, file=out)

View file

@ -73,6 +73,6 @@ class Dummy:
return False
def dummy (*args, **kwargs):
def dummy(*args, **kwargs):
"""Ignore any positional or keyword arguments, return None."""
pass

View file

@ -28,7 +28,7 @@ from functools import lru_cache
from builtins import str as str_text
def has_module (name, without_error=True):
def has_module(name, without_error=True):
"""Test if given module can be imported.
@param without_error: True if module must not throw any errors when importing
@return: flag if import is successful
@ -109,7 +109,7 @@ class Buffer:
return data
def get_mtime (filename):
def get_mtime(filename):
"""Return modification time of filename or zero on errors."""
try:
return os.path.getmtime(filename)
@ -117,7 +117,7 @@ def get_mtime (filename):
return 0
def get_size (filename):
def get_size(filename):
"""Return file size in Bytes, or -1 on error."""
try:
return os.path.getsize(filename)
@ -135,7 +135,7 @@ elif "G_BROKEN_FILENAMES" in os.environ:
else:
FSCODING = "utf-8"
def path_safe (path):
def path_safe(path):
"""Ensure path string is compatible with the platform file system encoding."""
if isinstance(path, str_text) and not os.path.supports_unicode_filenames:
path = path.encode(FSCODING, "replace").decode(FSCODING)
@ -144,7 +144,7 @@ def path_safe (path):
# cache for modified check {absolute filename -> mtime}
_mtime_cache = {}
def has_changed (filename):
def has_changed(filename):
"""Check if filename has changed since the last check. If this
is the first check, assume the file is changed."""
key = os.path.abspath(filename)
@ -155,14 +155,14 @@ def has_changed (filename):
return mtime > _mtime_cache[key]
def get_temp_file (mode='r', **kwargs):
def get_temp_file(mode='r', **kwargs):
"""Return tuple (open file object, filename) pointing to a temporary
file."""
fd, filename = tempfile.mkstemp(**kwargs)
return os.fdopen(fd, mode), filename
def is_tty (fp):
def is_tty(fp):
"""Check if is a file object pointing to a TTY."""
return (hasattr(fp, "isatty") and fp.isatty())

View file

@ -21,12 +21,12 @@ See also http://cr.yp.to/ftpparse.html
months = ("jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep",
"oct", "nov", "dec")
def ismonth (txt):
def ismonth(txt):
"""Check if given text is a month name."""
return txt.lower() in months
def ftpparse (line):
def ftpparse(line):
"""Parse a FTP list line into a dictionary with attributes:
name - name of file (string)
trycwd - False if cwd is definitely pointless, True otherwise

View file

@ -91,13 +91,13 @@ swf_url_re = re.compile(b"(?i)%s" % urlutil.safe_url_pattern.encode('ascii'))
c_comment_re = re.compile(r"/\*.*?\*/", re.DOTALL)
def strip_c_comments (text):
def strip_c_comments(text):
"""Remove C/CSS-style comments from text. Note that this method also
deliberately removes comments inside of strings."""
return c_comment_re.sub('', text)
def is_meta_url (attr, attrs):
def is_meta_url(attr, attrs):
"""Check if the meta attributes contain a URL."""
res = False
if attr == "content":

View file

@ -17,7 +17,7 @@ import base64
from datetime import datetime
def encode_base64 (s):
def encode_base64(s):
"""Encode given string in base64, excluding trailing newlines."""
return base64.b64encode(s)
@ -68,7 +68,7 @@ def asn1_generaltime_to_seconds(timestr):
pass
return res
def has_header_value (headers, name, value):
def has_header_value(headers, name, value):
"""
Look in headers for a specific header name and value.
Both name and value are case insensitive.
@ -84,7 +84,7 @@ def has_header_value (headers, name, value):
return False
def get_content_type (headers):
def get_content_type(headers):
"""
Get the MIME type from the Content-Type header value, or
'application/octet-stream' if not found.

View file

@ -30,7 +30,7 @@ default_language = default_encoding = None
default_directory = None
default_domain = None
def install_builtin (translator, do_unicode):
def install_builtin(translator, do_unicode):
"""Install _() and _n() gettext methods into default namespace."""
import builtins
builtins.__dict__['_'] = translator.gettext
@ -55,7 +55,7 @@ class NullTranslator(gettext.NullTranslations):
install_builtin(self, do_unicode)
def init (domain, directory, loc=None):
def init(domain, directory, loc=None):
"""Initialize this gettext i18n module. Searches for supported languages
and installs the gettext translator class."""
global default_language, default_encoding, default_domain, default_directory
@ -90,7 +90,7 @@ def install_language(language):
translator.install(do_unicode)
def get_translator (domain, directory, languages=None,
def get_translator(domain, directory, languages=None,
translatorklass=Translator, fallback=False,
fallbackklass=NullTranslator):
"""Search the appropriate GNUTranslations class."""
@ -101,14 +101,14 @@ def get_translator (domain, directory, languages=None,
return translator
def get_lang (lang):
def get_lang(lang):
"""Return lang if it is supported, or the default language."""
if lang in supported_languages:
return lang
return default_language
def get_headers_lang (headers):
def get_headers_lang(headers):
"""Return preferred supported language in given HTTP headers."""
if 'Accept-Language' not in headers:
return default_language
@ -132,7 +132,7 @@ def get_headers_lang (headers):
return default_language
def get_locale ():
def get_locale():
"""Search the default platform locale and norm it.
@returns (locale, encoding)
@rtype (string, string)"""
@ -150,7 +150,7 @@ def get_locale ():
return (loc, encoding)
def norm_locale (loc):
def norm_locale(loc):
"""Normalize a locale."""
loc = locale.normalize(loc)
# split up the locale into its base components
@ -175,17 +175,17 @@ lang_transis = {
'en': {'de': 'Englisch'},
}
def lang_name (lang):
def lang_name(lang):
"""Return full name of given language."""
return lang_names[lang]
def lang_trans (lang, curlang):
def lang_trans(lang, curlang):
"""Return translated full name of given language."""
return lang_transis[lang][curlang]
def get_encoded_writer (out=sys.stdout, encoding=None, errors='replace'):
def get_encoded_writer(out=sys.stdout, encoding=None, errors='replace'):
"""Get wrapped output writer with given encoding and error handling."""
if encoding is None:
encoding = default_encoding

View file

@ -81,7 +81,7 @@ def get_response_headers():
]
def formvalue (form, key):
def formvalue(form, key):
"""Get value with given key from WSGI form."""
field = form.get(key)
if isinstance(field, list):
@ -126,7 +126,7 @@ def encode(s):
return s.encode(HTML_ENCODING, 'ignore')
def checklink (form=None, env=os.environ):
def checklink(form=None, env=os.environ):
"""Validates the CGI form and checks the given links."""
if form is None:
form = {}
@ -147,7 +147,7 @@ def checklink (form=None, env=os.environ):
out.close()
def start_check (aggregate, out):
def start_check(aggregate, out):
"""Start checking in background and write encoded output to out."""
# check in background
t = threading.Thread(target=director.check_urls, args=(aggregate,))
@ -183,12 +183,12 @@ def get_configuration(form, out):
return config
def get_host_name (form):
def get_host_name(form):
"""Return host name of given URL."""
return urllib.parse.urlparse(formvalue(form, "url"))[1]
def checkform (form, env):
def checkform(form, env):
"""Check form data. throw exception on error
Be sure to NOT print out any user-given data as HTML code, so use
only plain strings as exception text."""
@ -227,13 +227,13 @@ def checkform (form, env):
raise LCFormError(_("invalid %s option %r") % (option, value))
def log (env, msg):
def log(env, msg):
"""Log message to WSGI error output."""
logfile = env['wsgi.errors']
logfile.write("%s\n" % msg)
def dump (env, form):
def dump(env, form):
"""Log environment and form."""
for var, value in env.items():
log(env, var+"="+value)
@ -241,7 +241,7 @@ def dump (env, form):
log(env, str(formvalue(form, key)))
def format_error (why):
def format_error(why):
"""Format standard error page.
@param why: error message
@ptype why: unicode

View file

@ -14,7 +14,7 @@ import imp
from .fileutil import is_writable_by_others
def is_frozen ():
def is_frozen():
"""Return True if running inside a py2exe- or py2app-generated
executable."""
return hasattr(sys, "frozen")

View file

@ -19,7 +19,7 @@ Locking utility class.
import threading
from . import log, LOG_THREAD
def get_lock (name, debug=False):
def get_lock(name, debug=False):
"""Get a new lock.
@param debug: if True, acquire() and release() will have debug messages
@ptype debug: boolean, default is False

View file

@ -29,7 +29,7 @@ import traceback
#gc.set_debug(gc.DEBUG_LEAK)
PRINT_LOCALVARS = False
def _stack_format (stack):
def _stack_format(stack):
"""Format a stack trace to a message.
@return: formatted stack message
@ -54,7 +54,7 @@ def _stack_format (stack):
return s.getvalue()
def _log (fun, msg, args, **kwargs):
def _log(fun, msg, args, **kwargs):
"""Log a message with given function. Optional the following keyword
arguments are supported:
traceback(bool) - if True print traceback of current function
@ -70,7 +70,7 @@ def _log (fun, msg, args, **kwargs):
fun(traceback.format_exc())
def debug (logname, msg, *args, **kwargs):
def debug(logname, msg, *args, **kwargs):
"""Log a debug message.
return: None
@ -80,7 +80,7 @@ def debug (logname, msg, *args, **kwargs):
_log(log.debug, msg, args, **kwargs)
def info (logname, msg, *args, **kwargs):
def info(logname, msg, *args, **kwargs):
"""Log an informational message.
return: None
@ -90,7 +90,7 @@ def info (logname, msg, *args, **kwargs):
_log(log.info, msg, args, **kwargs)
def warn (logname, msg, *args, **kwargs):
def warn(logname, msg, *args, **kwargs):
"""Log a warning.
return: None
@ -100,7 +100,7 @@ def warn (logname, msg, *args, **kwargs):
_log(log.warning, msg, args, **kwargs)
def error (logname, msg, *args, **kwargs):
def error(logname, msg, *args, **kwargs):
"""Log an error.
return: None
@ -110,7 +110,7 @@ def error (logname, msg, *args, **kwargs):
_log(log.error, msg, args, **kwargs)
def critical (logname, msg, *args, **kwargs):
def critical(logname, msg, *args, **kwargs):
"""Log a critical error.
return: None
@ -120,7 +120,7 @@ def critical (logname, msg, *args, **kwargs):
_log(log.critical, msg, args, **kwargs)
def exception (logname, msg, *args, **kwargs):
def exception(logname, msg, *args, **kwargs):
"""Log an exception.
return: None
@ -130,11 +130,11 @@ def exception (logname, msg, *args, **kwargs):
_log(log.exception, msg, args, **kwargs)
def is_debug (logname):
def is_debug(logname):
"""See if logger is on debug level."""
return logging.getLogger(logname).isEnabledFor(logging.DEBUG)
def shutdown ():
def shutdown():
"""Flush and close all log handlers."""
logging.shutdown()

View file

@ -63,7 +63,7 @@ def init_log_config(handler=None):
add_loghandler(handler)
def add_loghandler (handler):
def add_loghandler(handler):
"""Add log handler to root logger and LOG_ROOT and set formatting."""
format = "%(levelname)s %(name)s %(asctime)s %(threadName)s %(message)s"
handler.setFormatter(logging.Formatter(format))
@ -71,7 +71,7 @@ def add_loghandler (handler):
logging.getLogger().addHandler(handler)
def remove_loghandler (handler):
def remove_loghandler(handler):
"""Remove log handler from root logger and LOG_ROOT."""
logging.getLogger(LOG_ROOT).removeHandler(handler)
logging.getLogger().removeHandler(handler)

View file

@ -81,6 +81,6 @@ class DOTLogger(_GraphLogger):
self.writeln("}")
def dotquote (s):
def dotquote(s):
"""Quote string for usage in DOT output format."""
return s.replace('"', '\\"')

View file

@ -93,7 +93,7 @@ class _GraphLogger(_Logger):
_disallowed = re.compile(r"[^a-zA-Z0-9 '#(){}\-\[\]\.,;:\!\?]+")
def quote (s):
def quote(s):
"""Replace disallowed characters in node or edge labels.
Also remove whitespace from beginning or end of label."""
return _disallowed.sub(" ", s).strip()

View file

@ -22,7 +22,7 @@ from . import _Logger
from .. import url as urlutil
def sqlify (s):
def sqlify(s):
"""
Escape special SQL chars and strings.
"""
@ -31,7 +31,7 @@ def sqlify (s):
return "'%s'" % s.replace("'", "''").replace(os.linesep, r"\n")
def intify (s):
def intify(s):
"""
Coerce a truth value to 0/1.

View file

@ -29,14 +29,14 @@ xmlattr_entities = {
}
def xmlquote (s):
def xmlquote(s):
"""
Quote characters for XML.
"""
return xml.sax.saxutils.escape(s)
def xmlquoteattr (s):
def xmlquoteattr(s):
"""
Quote XML attribute, ready for inclusion with double quotes.
"""

View file

@ -35,7 +35,7 @@ _proc_status = '/proc/%d/status' % os.getpid()
_scale = {'kB': 1024.0, 'mB': 1024.0*1024.0,
'KB': 1024.0, 'MB': 1024.0*1024.0}
def _VmB (VmKey):
def _VmB(VmKey):
"""Parse /proc/<pid>/status file for given key.
@return: requested number value of status entry
@ -62,7 +62,7 @@ def _VmB (VmKey):
return float(v[1]) * _scale[v[2]]
def memory (since=0.0):
def memory(since=0.0):
"""Get memory usage.
@return: memory usage in bytes
@ -71,7 +71,7 @@ def memory (since=0.0):
return _VmB('VmSize:') - since
def resident (since=0.0):
def resident(since=0.0):
"""Get resident memory usage.
@return: resident memory usage in bytes
@ -80,7 +80,7 @@ def resident (since=0.0):
return _VmB('VmRSS:') - since
def stacksize (since=0.0):
def stacksize(since=0.0):
"""Get stack size.
@return: stack size in bytes

View file

@ -59,7 +59,7 @@ PARSE_CONTENTS = {
"application/xml+sitemap": re.compile(r'<\?xml[^<]+<urlset\s+', re.IGNORECASE),
}
def guess_mimetype (filename, read=None):
def guess_mimetype(filename, read=None):
"""Return MIME type of file, or 'application/octet-stream' if it could
not be determined."""
mime, encoding = None, None

View file

@ -22,7 +22,7 @@ import re
import socket
from .. import log, LOG_CHECK
def is_valid_ip (ip):
def is_valid_ip(ip):
"""
Return True if given ip is a valid IPv4 or IPv6 address.
"""
@ -33,7 +33,7 @@ def is_valid_ip (ip):
return True
def resolve_host (host):
def resolve_host(host):
"""
@host: hostname or IP address
Return list of ip numbers for given host.

View file

@ -41,35 +41,35 @@ def parse_url(url_data):
url_data.aggregate.plugin_manager.run_parser_plugins(url_data, pagetype=key)
def parse_html (url_data):
def parse_html(url_data):
"""Parse into HTML content and search for URLs to check.
Found URLs are added to the URL queue.
"""
linkparse.find_links(url_data.get_soup(), url_data.add_url, linkparse.LinkTags)
def parse_opera (url_data):
def parse_opera(url_data):
"""Parse an opera bookmark file."""
from ..bookmarks.opera import parse_bookmark_data
for url, name, lineno in parse_bookmark_data(url_data.get_content()):
url_data.add_url(url, line=lineno, name=name)
def parse_chromium (url_data):
def parse_chromium(url_data):
"""Parse a Chromium or Google Chrome bookmark file."""
from ..bookmarks.chromium import parse_bookmark_data
for url, name in parse_bookmark_data(url_data.get_content()):
url_data.add_url(url, name=name)
def parse_safari (url_data):
def parse_safari(url_data):
"""Parse a Safari bookmark file."""
from ..bookmarks.safari import parse_bookmark_data
for url, name in parse_bookmark_data(url_data.get_raw_content()):
url_data.add_url(url, name=name)
def parse_text (url_data):
def parse_text(url_data):
"""Parse a text file with one url per line; comment and blank
lines are ignored."""
lineno = 0
@ -81,7 +81,7 @@ def parse_text (url_data):
url_data.add_url(line, line=lineno)
def parse_css (url_data):
def parse_css(url_data):
"""
Parse a CSS file for url() patterns.
"""
@ -96,7 +96,7 @@ def parse_css (url_data):
url_data.add_url(url, line=lineno, column=column)
def parse_swf (url_data):
def parse_swf(url_data):
"""Parse a SWF file for URLs."""
linkfinder = linkparse.swf_url_re.finditer
for mo in linkfinder(url_data.get_raw_content()):
@ -107,14 +107,14 @@ def parse_swf (url_data):
url_data.add_url(url)
def parse_wml (url_data):
def parse_wml(url_data):
"""Parse into WML content and search for URLs to check.
Found URLs are added to the URL queue.
"""
linkparse.find_links(url_data.get_soup(), url_data.add_url, linkparse.WmlTags)
def parse_firefox (url_data):
def parse_firefox(url_data):
"""Parse a Firefox3 bookmark file."""
filename = url_data.get_os_filename()
for url, name in firefox.parse_bookmark_file(filename):

View file

@ -50,7 +50,7 @@ class LocationInfo(_ConnectionPlugin):
# no risks should be taken here by using a lock.
_lock = get_lock("geoip")
def get_geoip_dat ():
def get_geoip_dat():
"""Find a GeoIP database, preferring city over country lookup."""
datafiles = ("GeoIPCity.dat", "GeoIP.dat")
if os.name == 'nt':
@ -85,7 +85,7 @@ if geoip_dat:
@synchronized(_lock)
def get_location (host):
def get_location(host):
"""Get translated country and optional city name.
@return: country with optional city or an boolean False if not found

View file

@ -29,7 +29,7 @@ from .. import fileutil, log, LOG_PLUGIN
_initialized = False
def init_win32com ():
def init_win32com():
"""Initialize the win32com.client cache."""
global _initialized
if _initialized:
@ -47,7 +47,7 @@ def init_win32com ():
_initialized = True
def has_word ():
def has_word():
"""Determine if Word is available on the current system."""
if not has_win32com:
return False
@ -64,13 +64,13 @@ def has_word ():
return False
def constants (name):
def constants(name):
"""Helper to return constants. Avoids importing win32com.client in
other modules."""
return getattr(win32com.client.constants, name)
def get_word_app ():
def get_word_app():
"""Return open Word.Application handle, or None if Word is not available
on this system."""
if not has_word():
@ -84,18 +84,18 @@ def get_word_app ():
return app
def close_word_app (app):
def close_word_app(app):
"""Close Word application object."""
app.Quit()
def open_wordfile (app, filename):
def open_wordfile(app, filename):
"""Open given Word file with application object."""
return app.Documents.Open(filename, ReadOnly=True,
AddToRecentFiles=False, Visible=False, NoEncodingDialog=True)
def close_wordfile (doc):
def close_wordfile(doc):
"""Close word file."""
doc.Close()
@ -155,7 +155,7 @@ def get_line_number(doc, wrange):
return lineno
def get_temp_filename (content):
def get_temp_filename(content):
"""Get temporary filename for content to parse."""
# store content in temporary file
fd, filename = fileutil.get_temp_file(mode='wb', suffix='.doc',

View file

@ -111,7 +111,7 @@ class CssSyntaxCheck(_ContentPlugin):
log.warn(LOG_PLUGIN, _("CSS syntax check plugin error: %(msg)s ") % {"msg": msg})
def check_w3_errors (url_data, xml, w3type):
def check_w3_errors(url_data, xml, w3type):
"""Add warnings for W3C HTML or CSS errors in xml format.
w3type is either "W3C HTML" or "W3C CSS"."""
dom = parseString(xml)
@ -126,7 +126,7 @@ def check_w3_errors (url_data, xml, w3type):
url_data.add_warning(warnmsg % attrs)
def getXmlText (parent, tag):
def getXmlText(parent, tag):
"""Return XML content of given tag in parent element."""
elem = parent.getElementsByTagName(tag)[0]
# Yes, the DOM standard is awful.

View file

@ -125,7 +125,7 @@ class ClamdScanner:
self.sock.close()
def canonical_clamav_conf ():
def canonical_clamav_conf():
"""Default clamav configs for various platforms."""
if os.name == 'posix':
clamavconf = "/etc/clamav/clamd.conf"
@ -143,7 +143,7 @@ def get_clamav_conf(filename):
log.warn(LOG_PLUGIN, "No ClamAV config file found at %r.", filename)
def get_sockinfo (host, port=None):
def get_sockinfo(host, port=None):
"""Return socket.getaddrinfo for given host and port."""
family, socktype = socket.AF_INET, socket.SOCK_STREAM
return socket.getaddrinfo(host, port, family, socktype)
@ -215,7 +215,7 @@ class ClamavConfig(dict):
return sock
def scan (data, clamconf):
def scan(data, clamconf):
"""Scan data for viruses.
@return (infection msgs, errors)
@rtype ([], [])

View file

@ -34,7 +34,7 @@ if socket.has_ipv6:
raise
def create_socket (family, socktype, proto=0, timeout=60):
def create_socket(family, socktype, proto=0, timeout=60):
"""
Create a socket with given family and type. If SSL context
is given an SSL socket is created.

View file

@ -40,7 +40,7 @@ from . import i18n
from builtins import str as str_text
def unicode_safe (s, encoding=i18n.default_encoding, errors='replace'):
def unicode_safe(s, encoding=i18n.default_encoding, errors='replace'):
"""Get unicode string without raising encoding errors. Unknown
characters of the given encoding will be ignored.
@ -59,7 +59,7 @@ def unicode_safe (s, encoding=i18n.default_encoding, errors='replace'):
return str(s)
def ascii_safe (s):
def ascii_safe(s):
"""Get ASCII string without raising encoding errors. Unknown
characters of the given encoding will be ignored.
@ -73,7 +73,7 @@ def ascii_safe (s):
return s
def is_ascii (s):
def is_ascii(s):
"""Test if a string can be encoded in ASCII."""
try:
s.encode('ascii', 'strict')
@ -82,7 +82,7 @@ def is_ascii (s):
return False
def is_encoding (text):
def is_encoding(text):
"""Check if string is a valid encoding."""
try:
return codecs.lookup(text)
@ -90,12 +90,12 @@ def is_encoding (text):
return False
def url_unicode_split (url):
def url_unicode_split(url):
"""Like urllib.parse.urlsplit(), but always returning unicode parts."""
return [unicode_safe(s) for s in urllib.parse.urlsplit(url)]
def unquote (s, matching=False):
def unquote(s, matching=False):
"""Remove leading and ending single and double quotes.
The quotes need to match if matching is True. Only one quote from each
end will be stripped.
@ -124,7 +124,7 @@ _para_posix = r"(?:%(sep)s)(?:(?:%(sep)s)\s*)+" % {'sep': '\n'}
_para_win = r"(?:%(sep)s)(?:(?:%(sep)s)\s*)+" % {'sep': '\r\n'}
_para_ro = re.compile("%s|%s|%s" % (_para_mac, _para_posix, _para_win))
def get_paragraphs (text):
def get_paragraphs(text):
"""A new paragraph is considered to start at a line which follows
one or more blank lines (lines containing nothing or just spaces).
The first line of the text also starts a paragraph."""
@ -133,7 +133,7 @@ def get_paragraphs (text):
return _para_ro.split(text)
def wrap (text, width, **kwargs):
def wrap(text, width, **kwargs):
"""Adjust lines of text to be not longer than width. The text will be
returned unmodified if width <= 0.
See textwrap.wrap() for a list of supported kwargs.
@ -147,13 +147,13 @@ def wrap (text, width, **kwargs):
return os.linesep.join(ret)
def indent (text, indent_string=" "):
def indent(text, indent_string=" "):
"""Indent each line of text with the given indent string."""
return os.linesep.join("%s%s" % (indent_string, x)
for x in text.splitlines())
def get_line_number (s, index):
def get_line_number(s, index):
r"""Return the line number of s[index] or zero on errors.
Lines are assumed to be separated by the ASCII character '\n'."""
i = 0
@ -167,14 +167,14 @@ def get_line_number (s, index):
return line
def paginate (text):
def paginate(text):
"""Print text in pages of lines."""
pydoc.pager(text)
_markup_re = re.compile("<.*?>", re.DOTALL)
def remove_markup (s):
def remove_markup(s):
"""Remove all <*> html markup tags from s."""
mo = _markup_re.search(s)
while mo:
@ -183,7 +183,7 @@ def remove_markup (s):
return s
def strsize (b, grouping=True):
def strsize(b, grouping=True):
"""Return human representation of bytes b. A negative number of bytes
raises a value error."""
if b < 0:
@ -203,13 +203,13 @@ def strsize (b, grouping=True):
return "%sGB" % locale.format_string("%.1f", (float(b) / (1024*1024*1024)), grouping)
def strtime (t, func=time.localtime):
def strtime(t, func=time.localtime):
"""Return ISO 8601 formatted time."""
return time.strftime("%Y-%m-%d %H:%M:%S", func(t)) + strtimezone()
# from quodlibet
def strduration (duration):
def strduration(duration):
"""Turn a time value in seconds into hh:mm:ss or mm:ss."""
if duration < 0:
duration = abs(duration)
@ -229,7 +229,7 @@ def strduration (duration):
# from quodlibet
def strduration_long (duration, do_translate=True):
def strduration_long(duration, do_translate=True):
"""Turn a time value in seconds into x hours, x minutes, etc."""
if do_translate:
# use global translator functions
@ -275,7 +275,7 @@ def strduration_long (duration, do_translate=True):
return "%s%s" % (prefix, ", ".join(time_str))
def strtimezone ():
def strtimezone():
"""Return timezone info, %z on some platforms, but not supported on all.
"""
if time.daylight:
@ -293,7 +293,7 @@ def stripurl(s):
return s.splitlines()[0].strip()
def limit (s, length=72):
def limit(s, length=72):
"""If the length of the string exceeds the given limit, it will be cut
off and three dots will be appended.
@ -311,12 +311,12 @@ def limit (s, length=72):
return "%s..." % s[:length]
def strline (s):
def strline(s):
"""Display string representation on one line."""
return strip_control_chars("`%s'" % s.replace("\n", "\\n"))
def format_feature_warning (**kwargs):
def format_feature_warning(**kwargs):
"""Format warning that a module could not be imported and that it should
be installed for a certain URL.
"""

View file

@ -24,7 +24,7 @@ _trace_ignore = set()
_trace_filter = set()
def trace_ignore (names):
def trace_ignore(names):
"""Add given names to trace ignore set, or clear set if names is None."""
if names is None:
_trace_ignore.clear()
@ -32,7 +32,7 @@ def trace_ignore (names):
_trace_ignore.update(names)
def trace_filter (patterns):
def trace_filter(patterns):
"""Add given patterns to trace filter set or clear set if patterns is
None."""
if patterns is None:
@ -41,7 +41,7 @@ def trace_filter (patterns):
_trace_filter.update(re.compile(pat) for pat in patterns)
def _trace (frame, event, arg):
def _trace(frame, event, arg):
"""Trace function calls."""
if event in ('call', 'c_call'):
_trace_line(frame, event, arg)
@ -53,7 +53,7 @@ def _trace (frame, event, arg):
return _trace
def _trace_full (frame, event, arg):
def _trace_full(frame, event, arg):
"""Trace every executed line."""
if event == "line":
_trace_line(frame, event, arg)
@ -62,7 +62,7 @@ def _trace_full (frame, event, arg):
return _trace_full
def _trace_line (frame, event, arg):
def _trace_line(frame, event, arg):
"""Print current executed line."""
name = frame.f_globals["__name__"]
if name in _trace_ignore:
@ -82,7 +82,7 @@ def _trace_line (frame, event, arg):
print("THREAD(%d) %r %.2f %s # %s:%d" % args)
def trace_on (full=False):
def trace_on(full=False):
"""Start tracing of the current thread (and the current thread only)."""
if full:
sys.settrace(_trace_full)
@ -90,6 +90,6 @@ def trace_on (full=False):
sys.settrace(_trace)
def trace_off ():
def trace_off():
"""Stop tracing of the current thread (and the current thread only)."""
sys.settrace(None)

View file

@ -32,7 +32,7 @@ else:
URL_TAG = 'Source-Package-URL:'
def check_update ():
def check_update():
"""Return the following values:
(False, errmsg) - online version could not be determined
(True, None) - user has newest version
@ -53,7 +53,7 @@ def check_update ():
return True, (version, None)
def get_online_version ():
def get_online_version():
"""Download update info and parse it."""
# prevent getting a cached answer
headers = {'Pragma': 'no-cache', 'Cache-Control': 'no-cache'}
@ -70,6 +70,6 @@ def get_online_version ():
return version, url
def is_newer_version (version):
def is_newer_version(version):
"""Check if given version is newer than current version."""
return StrictVersion(version) > StrictVersion(CurrentVersion)

View file

@ -87,7 +87,7 @@ is_safe_fragment = re.compile("(?i)^%s$" % _safe_fragment_pattern).match
# snatched form urlparse.py
def splitparams (path):
def splitparams(path):
"""Split off parameter part from path.
Returns tuple (path-without-param, param)
"""
@ -100,7 +100,7 @@ def splitparams (path):
return path[:i], path[i+1:]
def is_numeric_port (portstr):
def is_numeric_port(portstr):
"""return: integer port (== True) iff portstr is a valid port number,
False otherwise
"""
@ -112,13 +112,13 @@ def is_numeric_port (portstr):
return False
def safe_host_pattern (host):
def safe_host_pattern(host):
"""Return regular expression pattern with given host for URL testing."""
return "(?i)%s://%s%s(#%s)?" % \
(_safe_scheme_pattern, host, _safe_path_pattern, _safe_fragment_pattern)
def parse_qsl (qs, encoding, keep_blank_values=0, strict_parsing=0):
def parse_qsl(qs, encoding, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
@param qs: URL-encoded query string to be parsed
@ -168,7 +168,7 @@ def parse_qsl (qs, encoding, keep_blank_values=0, strict_parsing=0):
return r
def idna_encode (host):
def idna_encode(host):
"""Encode hostname as internationalized domain name (IDN) according
to RFC 3490.
@raise: UnicodeError if hostname is not properly IDN encoded.
@ -183,7 +183,7 @@ def idna_encode (host):
return host, False
def url_fix_host (urlparts, encoding):
def url_fix_host(urlparts, encoding):
"""Unquote and fix hostname. Returns is_idn."""
if not urlparts[1]:
urlparts[2] = urllib.parse.unquote(urlparts[2], encoding=encoding)
@ -229,7 +229,7 @@ def url_fix_host (urlparts, encoding):
return is_idn
def url_fix_common_typos (url):
def url_fix_common_typos(url):
"""Fix common typos in given URL like forgotten colon."""
if url.startswith("http//"):
url = "http://" + url[6:]
@ -238,7 +238,7 @@ def url_fix_common_typos (url):
return url
def url_fix_mailto_urlsplit (urlparts):
def url_fix_mailto_urlsplit(urlparts):
"""Split query part of mailto url if found."""
sep = b"?" if isinstance(urlparts[2], bytes) else "?"
if sep in urlparts[2]:
@ -252,7 +252,7 @@ wayback_regex = re.compile(r'(https?)(\%3A/|:/)')
def url_fix_wayback_query(path):
return wayback_regex.sub(r'\1://', path)
def url_parse_query (query, encoding):
def url_parse_query(query, encoding):
"""Parse and re-join the given CGI query."""
# if ? is in the query, split it off, seen at msdn.microsoft.com
append = ""
@ -273,7 +273,7 @@ def url_parse_query (query, encoding):
return ''.join(l) + append
def urlunsplit (urlparts):
def urlunsplit(urlparts):
"""Same as urllib.parse.urlunsplit but with extra UNC path handling
for Windows OS."""
res = urllib.parse.urlunsplit(urlparts)
@ -286,7 +286,7 @@ def urlunsplit (urlparts):
return res
def url_norm (url, encoding):
def url_norm(url, encoding):
"""Normalize the given URL which must be quoted. Supports unicode
hostnames (IDNA encoding) according to RFC 3490.
@ -335,7 +335,7 @@ _thisdir_ro = re.compile(r"^\./")
_samedir_ro = re.compile(r"/\./|/\.$")
_parentdir_ro = re.compile(r"^/(\.\./)+|/(?!\.\./)[^/]+/\.\.(/|$)")
_relparentdir_ro = re.compile(r"^(?!\.\./)[^/]+/\.\.(/|$)")
def collapse_segments (path):
def collapse_segments(path):
"""Remove all redundant segments from the given URL path.
Precondition: path is an unquoted url path"""
# replace backslashes
@ -371,7 +371,7 @@ def collapse_segments (path):
url_is_absolute = re.compile(r"^[-\.a-z]+:", re.I).match
def url_quote (url, encoding):
def url_quote(url, encoding):
"""Quote given URL."""
if not url_is_absolute(url):
return document_quote(url)
@ -393,7 +393,7 @@ def url_quote (url, encoding):
return urlunsplit(urlparts)
def document_quote (document):
def document_quote(document):
"""Quote given document."""
doc, query = urllib.parse.splitquery(document)
doc = urllib.parse.quote(doc, safe='/=,')
@ -402,7 +402,7 @@ def document_quote (document):
return doc
def match_url (url, domainlist):
def match_url(url, domainlist):
"""Return True if host part of url matches an entry in given domain list.
"""
if not url:
@ -410,7 +410,7 @@ def match_url (url, domainlist):
return match_host(url_split(url)[1], domainlist)
def match_host (host, domainlist):
def match_host(host, domainlist):
"""Return True if host matches an entry in given domain list."""
if not host:
return False
@ -428,7 +428,7 @@ if os.name == 'nt':
_nopathquote_chars += "|"
_safe_url_chars = re.escape(_nopathquote_chars + "_:.&#%?[]!")+"a-zA-Z0-9"
_safe_url_chars_ro = re.compile(r"^[%s]*$" % _safe_url_chars)
def url_needs_quoting (url):
def url_needs_quoting(url):
"""Check if url needs percent quoting. Note that the method does
only check basic character sets, and not any other syntax.
The URL might still be syntactically incorrect even when
@ -441,7 +441,7 @@ def url_needs_quoting (url):
return not _safe_url_chars_ro.match(url)
def url_split (url):
def url_split(url):
"""Split url in a tuple (scheme, hostname, port, document) where
hostname is always lowercased.
Precondition: url is syntactically correct URI (eg has no whitespace)
@ -455,14 +455,14 @@ def url_split (url):
return scheme, host, port, document
def url_unsplit (parts):
def url_unsplit(parts):
"""Rejoin URL parts to a string."""
if parts[2] == default_ports.get(parts[0]):
return "%s://%s%s" % (parts[0], parts[1], parts[3])
return "%s://%s:%d%s" % parts
def splitport (host, port=0):
def splitport(host, port=0):
"""Split optional port number from host. If host has no port number,
the given default port is returned.

View file

@ -15,7 +15,7 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Windows utility functions."""
def get_shell_folder (name):
def get_shell_folder(name):
"""Get Windows Shell Folder locations from the registry."""
try:
import _winreg as winreg

View file

@ -23,7 +23,7 @@ import codecs
import html
from linkcheck import strformat
def main (filename):
def main(filename):
om = print_memorydump(filename)
dirname, basename = os.path.split(filename)
basename = os.path.splitext(basename)[0]

View file

@ -60,12 +60,12 @@ def get_long_description():
except:
return Description
def normpath (path):
def normpath(path):
"""Norm a path name to platform specific notation."""
return os.path.normpath(path)
def cnormpath (path):
def cnormpath(path):
"""Norm a path name to platform specific notation and make it absolute."""
path = normpath(path)
if os.name == 'nt':
@ -77,7 +77,7 @@ def cnormpath (path):
release_ro = re.compile(r"\(released (.+)\)")
def get_release_date ():
def get_release_date():
"""Parse and return relase date as string from doc/changelog.txt."""
fname = os.path.join("doc", "changelog.txt")
release_date = "unknown"
@ -253,7 +253,7 @@ class MyDistribution(Distribution):
"creating %s" % filename, self.verbose >= 1, self.dry_run)
def list_message_files (package, suffix=".mo"):
def list_message_files(package, suffix=".mo"):
"""Return list of all found message files and their installation paths."""
for fname in glob.glob("po/*" + suffix):
# basename (without extension) is a locale name
@ -263,7 +263,7 @@ def list_message_files (package, suffix=".mo"):
"share", "locale", localename, "LC_MESSAGES", domainname))
def check_manifest ():
def check_manifest():
"""Snatched from roundup.sf.net.
Check that the files listed in the MANIFEST are present when the
source is unpacked."""

View file

@ -29,7 +29,7 @@ basedir = os.path.dirname(__file__)
linkchecker_cmd = os.path.join(os.path.dirname(basedir), "linkchecker")
def run (cmd, verbosity=0, **kwargs):
def run(cmd, verbosity=0, **kwargs):
"""Run command without error checking.
@return: command return code"""
if kwargs.get("shell"):
@ -38,7 +38,7 @@ def run (cmd, verbosity=0, **kwargs):
return subprocess.call(cmd, **kwargs)
def run_checked (cmd, ret_ok=(0,), **kwargs):
def run_checked(cmd, ret_ok=(0,), **kwargs):
"""Run command and raise OSError on error."""
retcode = run(cmd, **kwargs)
if retcode not in ret_ok:
@ -48,7 +48,7 @@ def run_checked (cmd, ret_ok=(0,), **kwargs):
def run_silent (cmd):
def run_silent(cmd):
"""Run given command without output."""
null = open(os.name == 'nt' and ':NUL' or "/dev/null", 'w')
try:
@ -57,7 +57,7 @@ def run_silent (cmd):
null.close()
def _need_func (testfunc, name):
def _need_func(testfunc, name):
"""Decorator skipping test if given testfunc fails."""
def check_func (func):
@wraps(func)
@ -70,7 +70,7 @@ def _need_func (testfunc, name):
@lru_cache(1)
def has_network ():
def has_network():
"""Test if network is up."""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@ -85,7 +85,7 @@ need_network = _need_func(has_network, "network")
@lru_cache(1)
def has_msgfmt ():
def has_msgfmt():
"""Test if msgfmt is available."""
return run_silent(["msgfmt", "-V"]) == 0
@ -93,7 +93,7 @@ need_msgfmt = _need_func(has_msgfmt, "msgfmt")
@lru_cache(1)
def has_posix ():
def has_posix():
"""Test if this is a POSIX system."""
return os.name == "posix"
@ -101,7 +101,7 @@ need_posix = _need_func(has_posix, "POSIX system")
@lru_cache(1)
def has_windows ():
def has_windows():
"""Test if this is a Windows system."""
return os.name == "nt"
@ -109,7 +109,7 @@ need_windows = _need_func(has_windows, "Windows system")
@lru_cache(1)
def has_linux ():
def has_linux():
"""Test if this is a Linux system."""
return sys.platform.startswith("linux")
@ -117,7 +117,7 @@ need_linux = _need_func(has_linux, "Linux system")
@lru_cache(1)
def has_clamav ():
def has_clamav():
"""Test if ClamAV daemon is installed and running."""
try:
cmd = ["grep", "LocalSocket", "/etc/clamav/clamd.conf"]
@ -135,7 +135,7 @@ need_clamav = _need_func(has_clamav, "ClamAV")
@lru_cache(1)
def has_proxy ():
def has_proxy():
"""Test if proxy is running on port 8081."""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@ -149,7 +149,7 @@ need_proxy = _need_func(has_proxy, "proxy")
@lru_cache(1)
def has_pyftpdlib ():
def has_pyftpdlib():
"""Test if pyftpdlib is available."""
try:
import pyftpdlib
@ -161,7 +161,7 @@ need_pyftpdlib = _need_func(has_pyftpdlib, "pyftpdlib")
@lru_cache(1)
def has_biplist ():
def has_biplist():
"""Test if biplist is available."""
try:
import biplist
@ -173,7 +173,7 @@ need_biplist = _need_func(has_biplist, "biplist")
@lru_cache(1)
def has_newsserver (server):
def has_newsserver(server):
import nntplib
try:
nntp = nntplib.NNTP(server, usenetrc=False)
@ -183,7 +183,7 @@ def has_newsserver (server):
return False
def need_newsserver (server):
def need_newsserver(server):
"""Decorator skipping test if newsserver is not available."""
def check_func (func):
def newfunc (*args, **kwargs):
@ -197,7 +197,7 @@ def need_newsserver (server):
@lru_cache(1)
def has_x11 ():
def has_x11():
"""Test if DISPLAY variable is set."""
return os.getenv('DISPLAY') is not None
@ -222,7 +222,7 @@ need_pdflib = _need_func(has_pdflib, 'pdflib')
@contextmanager
def _limit_time (seconds):
def _limit_time(seconds):
"""Raises LinkCheckerInterrupt if given number of seconds have passed."""
if os.name == 'posix':
def signal_handler(signum, frame):
@ -237,7 +237,7 @@ def _limit_time (seconds):
signal.signal(signal.SIGALRM, old_handler)
def limit_time (seconds, skip=False):
def limit_time(seconds, skip=False):
"""Limit test time to the given number of seconds, else fail or skip."""
def run_limited (func):
def new_func (*args, **kwargs):
@ -253,7 +253,7 @@ def limit_time (seconds, skip=False):
return run_limited
def get_file (filename=None):
def get_file(filename=None):
"""
Get file name located within 'data' directory.
"""

View file

@ -147,11 +147,11 @@ class TestLogger(linkcheck.logger._Logger):
self.diff.append(line)
def get_file_url (filename):
def get_file_url(filename):
return re.sub("^([a-zA-Z]):", r"/\1|", filename.replace("\\", "/"))
def add_fileoutput_config (config):
def add_fileoutput_config(config):
if os.name == 'posix':
devnull = '/dev/null'
elif os.name == 'nt':
@ -165,7 +165,7 @@ def add_fileoutput_config (config):
config['fileoutput'].append(logger)
def get_test_aggregate (confargs, logargs, logger=TestLogger):
def get_test_aggregate(confargs, logargs, logger=TestLogger):
"""Initialize a test configuration object."""
config = linkcheck.configuration.Configuration()
config.logger_add(logger)

View file

@ -48,7 +48,7 @@ class FtpServerTest(LinkCheckTest):
pass
def start_server (host, port):
def start_server(host, port):
def line_logger(self, msg):
if "kill" in msg:
raise KeyboardInterrupt()
@ -95,7 +95,7 @@ def start_server (host, port):
return port
def stop_server (host, port):
def stop_server(host, port):
"""Stop a running FTP server."""
ftp = FTP()
ftp.connect(host, port, TIMEOUT)

View file

@ -193,7 +193,7 @@ class HttpsServerTest(HttpServerTest):
return "https://localhost:%d/tests/checker/data/%s" % (self.port, filename)
def start_server (handler, https=False):
def start_server(handler, https=False):
"""Start an HTTP server thread and return its port number."""
server_address = ('localhost', 0)
handler.protocol_version = "HTTP/1.0"
@ -221,7 +221,7 @@ def start_server (handler, https=False):
return port
def stop_server (port, https=False):
def stop_server(port, https=False):
"""Stop an HTTP server thread."""
if https:
conn = HTTPSConnection("localhost:%d" % port,
@ -232,7 +232,7 @@ def stop_server (port, https=False):
conn.getresponse()
def get_cookie (maxage=2000):
def get_cookie(maxage=2000):
data = (
("Comment", "justatest"),
("Max-Age", "%d" % maxage),

View file

@ -59,7 +59,7 @@ class TelnetServerTest(LinkCheckTest):
assert not self.server_thread.is_alive()
def start_server (host, port, stop_event):
def start_server(host, port, stop_event):
# Instantiate Telnet server class and listen to host:port
clients = []
def on_connect(client):

View file

@ -26,7 +26,7 @@ from tests import need_word, need_pdflib
from . import LinkCheckTest, get_file
def unzip (filename, targetdir):
def unzip(filename, targetdir):
"""Unzip given zipfile into targetdir."""
if isinstance(targetdir, unicode):
targetdir = str(targetdir)

View file

@ -23,7 +23,7 @@ import linkcheck.configuration
from builtins import str as str_text
def get_file (filename=None):
def get_file(filename=None):
"""Get file name located within 'data' directory."""
directory = os.path.join("tests", "configuration", "data")
if filename:

View file

@ -44,7 +44,7 @@ def pretty_print_html(fd, soup):
fd.write("/>")
def quote_attrval (s):
def quote_attrval(s):
"""
Quote a HTML attribute to be able to wrap it in double quotes.

View file

@ -27,7 +27,7 @@ from tests import need_msgfmt, need_posix
pofiles = None
def get_pofiles ():
def get_pofiles():
"""Find all .po files in this source."""
global pofiles
if pofiles is None:

View file

@ -37,7 +37,7 @@ import linkcheck.url
# (Latin capital letter C + Combining cedilla U+0327)
def url_norm (url, encoding="utf-8"):
def url_norm(url, encoding="utf-8"):
return linkcheck.url.url_norm(url, encoding=encoding)[0]

View file

@ -24,7 +24,7 @@ import linkcheck.checker.urlbase
from linkcheck.checker import get_url_from
def get_test_aggregate ():
def get_test_aggregate():
"""
Initialize a test configuration object.
"""