mirror of
https://github.com/Hopiu/linkchecker.git
synced 2026-05-02 03:44:43 +00:00
Print level in loggers xml, csv and sql.
This commit is contained in:
parent
532f2d0fec
commit
84f6d56a49
7 changed files with 18 additions and 3 deletions
|
|
@ -19,5 +19,6 @@ create table linksdb (
|
|||
checktime int,
|
||||
dltime int,
|
||||
dlsize int,
|
||||
cached int
|
||||
cached int,
|
||||
level int
|
||||
);
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ Changes:
|
|||
|
||||
Features:
|
||||
- gui: Added CSV output type for results.
|
||||
- logging: Print recursion level in Machine readable logger outputs
|
||||
xml, csv and sql. Allows filtering the output by recursion level.
|
||||
|
||||
|
||||
6.6 "Coraline" (released 25.3.2011)
|
||||
|
|
|
|||
|
|
@ -1111,6 +1111,8 @@ class UrlBase (object):
|
|||
Cache key for this URL.
|
||||
- url_data.content_type: unicode
|
||||
MIME content type for URL content.
|
||||
- url_data.level: int
|
||||
Recursion level until reaching this URL from start URL
|
||||
"""
|
||||
return dict(valid=self.valid,
|
||||
extern=self.extern[0],
|
||||
|
|
@ -1132,6 +1134,7 @@ class UrlBase (object):
|
|||
column=self.column,
|
||||
cache_url_key=self.cache_url_key,
|
||||
content_type=self.get_content_type(),
|
||||
level=self.recursion_level,
|
||||
)
|
||||
|
||||
def to_wire (self):
|
||||
|
|
@ -1167,6 +1170,7 @@ urlDataAttr = [
|
|||
'column',
|
||||
'cache_url_key',
|
||||
'content_type',
|
||||
'level',
|
||||
]
|
||||
|
||||
class CompactUrlData (object):
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ Fields = dict(
|
|||
dlsize=_("Size"),
|
||||
checktime=_("Check time"),
|
||||
url=_("URL"),
|
||||
level=_("Level"),
|
||||
)
|
||||
del _
|
||||
|
||||
|
|
|
|||
|
|
@ -79,7 +79,8 @@ class CSVLogger (Logger):
|
|||
u"dltime",
|
||||
u"dlsize",
|
||||
u"checktime",
|
||||
u"cached"):
|
||||
u"cached",
|
||||
u"level"):
|
||||
if self.has_part(s):
|
||||
row.append(s)
|
||||
if row:
|
||||
|
|
@ -118,6 +119,8 @@ class CSVLogger (Logger):
|
|||
row.append(url_data.checktime)
|
||||
if self.has_part("cached"):
|
||||
row.append(url_data.cached)
|
||||
if self.has_part("level"):
|
||||
row.append(url_data.level)
|
||||
self.writerow(map(strformat.unicode_safe, row))
|
||||
self.flush()
|
||||
|
||||
|
|
|
|||
|
|
@ -64,6 +64,8 @@ class CustomXMLLogger (xmllog.XMLLogger):
|
|||
self.xml_tag(u"dlsize", u"%d" % url_data.dlsize)
|
||||
if url_data.checktime and self.has_part("checktime"):
|
||||
self.xml_tag(u"checktime", u"%f" % url_data.checktime)
|
||||
if self.has_part("level"):
|
||||
self.xml_tag(u"level", u"%d" % url_data.level)
|
||||
if url_data.info and self.has_part('info'):
|
||||
self.xml_starttag(u"infos")
|
||||
for info in url_data.info:
|
||||
|
|
|
|||
|
|
@ -98,7 +98,8 @@ class SQLLogger (Logger):
|
|||
"%(checktime)d,"
|
||||
"%(dltime)d,"
|
||||
"%(dlsize)d,"
|
||||
"%(cached)d"
|
||||
"%(cached)d,"
|
||||
"%(level)d"
|
||||
")%(separator)s" %
|
||||
{'table': self.dbname,
|
||||
'base_url': sqlify(url_data.base_url),
|
||||
|
|
@ -117,6 +118,7 @@ class SQLLogger (Logger):
|
|||
'dlsize': url_data.dlsize,
|
||||
'cached': intify(url_data.cached),
|
||||
'separator': self.separator,
|
||||
"level": url_data.level,
|
||||
})
|
||||
self.flush()
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue