2001-03-15 01:19:35 +00:00
|
|
|
"""Handle local file: links"""
|
2001-05-23 21:20:44 +00:00
|
|
|
# Copyright (C) 2000,2001 Bastian Kleineidam
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
|
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
2000-11-11 00:38:04 +00:00
|
|
|
|
2002-02-24 12:29:35 +00:00
|
|
|
import re, os, urlparse, urllib, linkcheck
|
2001-11-20 20:27:25 +00:00
|
|
|
from UrlData import UrlData, ExcList
|
2000-02-26 10:24:46 +00:00
|
|
|
|
2001-11-20 20:27:25 +00:00
|
|
|
# OSError is thrown on Windows when a file is not found
|
|
|
|
|
ExcList.append(OSError)
|
|
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
# file extensions we can parse recursively
|
|
|
|
|
extensions = {
|
|
|
|
|
"html": r'(?i)\.s?html?$',
|
|
|
|
|
"opera": r'^(?i)opera.adr$', # opera bookmark file
|
|
|
|
|
"text": r'(?i)\.(txt|xml|tsv|csv|sgml?|py|java|cc?|cpp|h)$',
|
|
|
|
|
}
|
|
|
|
|
for key in extensions.keys():
|
|
|
|
|
extensions[key] = re.compile(extensions[key])
|
2000-08-19 20:01:58 +00:00
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
# if file extension was fruitless, look at the content
|
|
|
|
|
contents = {
|
|
|
|
|
"html": r'(?i)<html>.*</html>',
|
|
|
|
|
"opera" : r'Opera Hotlist',
|
|
|
|
|
"text" : r'[\w\s]+',
|
|
|
|
|
}
|
|
|
|
|
for key in contents.keys():
|
|
|
|
|
contents[key] = re.compile(contents[key])
|
|
|
|
|
|
|
|
|
|
_schemes = r"""(
|
|
|
|
|
acap # application configuration access protocol
|
|
|
|
|
|afs # Andrew File System global file names
|
|
|
|
|
|cid # content identifier
|
|
|
|
|
|data # data
|
|
|
|
|
|dav # dav
|
|
|
|
|
|fax # fax
|
|
|
|
|
|imap # internet message access protocol
|
|
|
|
|
|ldap # Lightweight Directory Access Protocol
|
|
|
|
|
|mailserver # Access to data available from mail servers
|
|
|
|
|
|mid # message identifier
|
|
|
|
|
|modem # modem
|
|
|
|
|
|nfs # network file system protocol
|
|
|
|
|
|opaquelocktoken # opaquelocktoken
|
|
|
|
|
|pop # Post Office Protocol v3
|
|
|
|
|
|prospero # Prospero Directory Service
|
|
|
|
|
|rtsp # real time streaming protocol
|
|
|
|
|
|service # service location
|
|
|
|
|
|sip # session initiation protocol
|
|
|
|
|
|tel # telephone
|
|
|
|
|
|tip # Transaction Internet Protocol
|
|
|
|
|
|tn3270 # Interactive 3270 emulation sessions
|
|
|
|
|
|vemmi # versatile multimedia interface
|
|
|
|
|
|wais # Wide Area Information Servers
|
|
|
|
|
|z39\.50r # Z39.50 Retrieval
|
|
|
|
|
|z39\.50s # Z39.50 Session
|
|
|
|
|
|chrome # Mozilla specific
|
|
|
|
|
|find # Mozilla specific
|
|
|
|
|
|clsid # Microsoft specific
|
|
|
|
|
|javascript # JavaScript
|
|
|
|
|
|isbn # ISBN (int. book numbers)
|
|
|
|
|
|https? # HTTP/HTTPS
|
|
|
|
|
|ftp # FTP
|
|
|
|
|
|file # local file
|
|
|
|
|
|telnet # telnet
|
|
|
|
|
|mailto # mailto
|
|
|
|
|
|gopher # gopher
|
|
|
|
|
|s?news # news
|
|
|
|
|
|nntp # news
|
|
|
|
|
)"""
|
|
|
|
|
_url = r"(?i)%s:[-a-zA-Z0-9$_.+!*'/(),;]+" % _schemes
|
|
|
|
|
_url_re = re.compile(_url, re.VERBOSE)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class FileUrlData (UrlData):
|
2000-02-26 10:24:46 +00:00
|
|
|
"Url link with file scheme"
|
|
|
|
|
|
|
|
|
|
def __init__(self,
|
|
|
|
|
urlName,
|
|
|
|
|
recursionLevel,
|
|
|
|
|
parentName = None,
|
2001-01-07 13:28:38 +00:00
|
|
|
baseRef = None, line=0, name=""):
|
2000-02-26 10:24:46 +00:00
|
|
|
UrlData.__init__(self,
|
|
|
|
|
urlName,
|
2000-03-21 11:38:22 +00:00
|
|
|
recursionLevel,
|
|
|
|
|
parentName=parentName,
|
2001-01-07 13:28:38 +00:00
|
|
|
baseRef=baseRef, line=line, name=name)
|
2000-02-26 10:24:46 +00:00
|
|
|
if not parentName and not baseRef and \
|
|
|
|
|
not re.compile("^file:").search(self.urlName):
|
2000-12-22 16:25:32 +00:00
|
|
|
self.urlName = os.path.expanduser(self.urlName)
|
2000-02-26 10:24:46 +00:00
|
|
|
winre = re.compile("^[a-zA-Z]:")
|
|
|
|
|
if winre.search(self.urlName):
|
2000-03-21 01:33:52 +00:00
|
|
|
self.adjustWinPath()
|
2000-02-26 10:24:46 +00:00
|
|
|
else:
|
|
|
|
|
if self.urlName[0:1] != "/":
|
|
|
|
|
self.urlName = os.getcwd()+"/"+self.urlName
|
|
|
|
|
if winre.search(self.urlName):
|
2000-04-24 22:07:48 +00:00
|
|
|
self.adjustWinPath()
|
2002-03-14 21:18:52 +00:00
|
|
|
self.urlName = "file://"+self.urlName.replace("\\", "/")
|
2000-02-26 10:24:46 +00:00
|
|
|
|
|
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
def buildUrl (self):
|
2000-02-26 10:24:46 +00:00
|
|
|
UrlData.buildUrl(self)
|
|
|
|
|
# cut off parameter, query and fragment
|
|
|
|
|
self.url = urlparse.urlunparse(self.urlTuple[:3] + ('','',''))
|
|
|
|
|
|
|
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
def adjustWinPath (self):
|
2000-02-26 10:24:46 +00:00
|
|
|
"c:\\windows ==> /c|\\windows"
|
|
|
|
|
self.urlName = "/"+self.urlName[0]+"|"+self.urlName[2:]
|
|
|
|
|
|
|
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
def isHtml (self):
|
|
|
|
|
# guess by extension
|
|
|
|
|
for ro in extensions.values():
|
|
|
|
|
if ro.search(self.url):
|
|
|
|
|
return 1
|
2001-08-23 14:06:46 +00:00
|
|
|
# try to read content (can fail, so catch error)
|
2001-08-22 15:09:10 +00:00
|
|
|
try:
|
2002-03-14 21:18:52 +00:00
|
|
|
for ro in contents.values():
|
|
|
|
|
if ro.search(self.getContent()):
|
|
|
|
|
return 1
|
2001-08-22 15:09:10 +00:00
|
|
|
except IOError:
|
|
|
|
|
pass
|
|
|
|
|
return None
|
2001-08-22 13:50:17 +00:00
|
|
|
|
|
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
def parseUrl (self, config):
|
|
|
|
|
for key,ro in extensions.items():
|
|
|
|
|
if ro.search(self.url):
|
|
|
|
|
return getattr(self, "parse_"+key)(config)
|
|
|
|
|
for key,ro in contents.items():
|
|
|
|
|
if ro.search(self.getContent()):
|
|
|
|
|
return getattr(self, "parse_"+key)(config)
|
|
|
|
|
|
|
|
|
|
def parse_html (self, config):
|
|
|
|
|
UrlData.parseUrl(self, config)
|
|
|
|
|
|
|
|
|
|
def parse_opera (self, config):
|
2001-08-22 13:50:17 +00:00
|
|
|
# parse an opera bookmark file
|
|
|
|
|
name = ""
|
|
|
|
|
lineno = 0
|
2002-03-14 21:18:52 +00:00
|
|
|
for line in self.getContent().splitlines():
|
2001-08-22 13:50:17 +00:00
|
|
|
lineno += 1
|
|
|
|
|
line = line.strip()
|
2001-08-22 15:09:10 +00:00
|
|
|
if line.startswith("NAME="):
|
2001-08-22 13:50:17 +00:00
|
|
|
name = line[5:]
|
|
|
|
|
elif line.startswith("URL="):
|
|
|
|
|
url = line[4:]
|
|
|
|
|
if url:
|
2002-03-14 21:18:52 +00:00
|
|
|
config.appendUrl(linkcheck.UrlData.GetUrlDataFrom(url,
|
2001-08-22 13:50:17 +00:00
|
|
|
self.recursionLevel+1, self.url, None, lineno, name))
|
|
|
|
|
name = ""
|
2002-03-14 21:18:52 +00:00
|
|
|
|
|
|
|
|
def parse_text (self, config):
|
|
|
|
|
lineno = 0
|
|
|
|
|
for line in self.getContent().splitlines():
|
|
|
|
|
lineno += 1
|
|
|
|
|
i = 0
|
|
|
|
|
while 1:
|
|
|
|
|
mo = _url_re.search(line, i)
|
|
|
|
|
if not mo: break
|
|
|
|
|
config.appendUrl(linkcheck.UrlData.GetUrlDataFrom(mo.group(),
|
|
|
|
|
self.recursionLevel+1, self.url, None, lineno, ""))
|
|
|
|
|
i = mo.end()
|
|
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|