mirror of
https://github.com/Hopiu/linkchecker.git
synced 2026-03-24 09:50:23 +00:00
101 lines
3.5 KiB
Python
101 lines
3.5 KiB
Python
# -*- coding: iso-8859-1 -*-
|
|
# Copyright (C) 2006-2010 Bastian Kleineidam
|
|
#
|
|
# This program is free software; you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License along
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
"""
|
|
Aggregate needed object instances for checker threads.
|
|
"""
|
|
import time
|
|
import threading
|
|
from .. import log, LOG_CHECK
|
|
from ..decorators import synchronized
|
|
from ..cache import urlqueue
|
|
from . import logger, status, checker, cleanup
|
|
|
|
|
|
_lock = threading.Lock()
|
|
|
|
class Aggregate (object):
|
|
"""Store thread-safe data collections for checker threads."""
|
|
|
|
def __init__ (self, config, urlqueue, connections, cookies, robots_txt):
|
|
self.config = config
|
|
self.urlqueue = urlqueue
|
|
self.connections = connections
|
|
self.cookies = cookies
|
|
self.robots_txt = robots_txt
|
|
self.logger = logger.Logger(config)
|
|
self.threads = []
|
|
self.last_w3_call = 0
|
|
self.wanted_stop = False
|
|
|
|
def start_threads (self):
|
|
"""Spawn threads for URL checking and status printing."""
|
|
if self.config["status"]:
|
|
t = status.Status(self.urlqueue, self.config.status_logger,
|
|
self.config["status_wait_seconds"])
|
|
t.start()
|
|
self.threads.append(t)
|
|
t = cleanup.Cleanup(self.connections)
|
|
t.start()
|
|
self.threads.append(t)
|
|
num = self.config["threads"]
|
|
if num >= 1:
|
|
for dummy in range(num):
|
|
t = checker.Checker(self.urlqueue, self.logger)
|
|
t.start()
|
|
self.threads.append(t)
|
|
else:
|
|
checker.check_url(self.urlqueue, self.logger)
|
|
|
|
def print_active_threads (self):
|
|
first = True
|
|
for t in self.threads:
|
|
name = t.getName()
|
|
if name.startswith("CheckThread-"):
|
|
if first:
|
|
log.info(LOG_CHECK, _("These URLs are still active:"))
|
|
first = False
|
|
log.info(LOG_CHECK, name[12:])
|
|
|
|
def abort (self):
|
|
"""Empty the URL queue."""
|
|
self.print_active_threads()
|
|
self.urlqueue.do_shutdown()
|
|
try:
|
|
self.urlqueue.join(timeout=self.config["timeout"])
|
|
except urlqueue.Timeout:
|
|
log.warn(LOG_CHECK, "Abort timed out")
|
|
|
|
def remove_stopped_threads (self):
|
|
"Remove the stopped threads from the internal thread list."""
|
|
self.threads = [t for t in self.threads if t.isAlive()]
|
|
|
|
def finish (self):
|
|
"""Wait for checker threads to finish."""
|
|
assert self.urlqueue.empty()
|
|
for t in self.threads:
|
|
t.stop()
|
|
t.join(0.5)
|
|
if t.isAlive():
|
|
log.warn(LOG_CHECK, "Thread %s still active", t)
|
|
self.connections.clear()
|
|
|
|
@synchronized(_lock)
|
|
def check_w3_time (self):
|
|
"""Make sure the W3C validators are at most called once a second."""
|
|
if time.time() - self.last_w3_call < 1:
|
|
time.sleep(1)
|
|
self.last_w3_call = time.time()
|