mirror of
https://github.com/Hopiu/linkchecker.git
synced 2026-04-07 00:00:58 +00:00
support for ftp proxies
git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@569 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
parent
d83f268bb0
commit
07da7a92b6
4 changed files with 51 additions and 28 deletions
|
|
@ -1,3 +1,9 @@
|
|||
1.6.4
|
||||
* Support for ftp proxies
|
||||
Changed files: linkcheck/FtpUrlData.py, linkcheck/HttpUrlData.py
|
||||
Added files: linkcheck/ProxyUrlData.py
|
||||
* Updated german translation
|
||||
|
||||
1.6.3:
|
||||
* Generate md5sum checksums for distributed files
|
||||
Changed files: Makefile
|
||||
|
|
|
|||
|
|
@ -16,7 +16,9 @@
|
|||
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
||||
|
||||
import ftplib, linkcheck
|
||||
from UrlData import UrlData,ExcList
|
||||
from ProxyUrlData import ProxyUrlData
|
||||
from HttpUrlData import HttpUrlData
|
||||
from UrlData import ExcList
|
||||
|
||||
ExcList.extend([
|
||||
ftplib.error_reply,
|
||||
|
|
@ -25,14 +27,24 @@ ExcList.extend([
|
|||
ftplib.error_proto,
|
||||
])
|
||||
|
||||
class FtpUrlData (UrlData):
|
||||
class FtpUrlData (ProxyUrlData):
|
||||
"""
|
||||
Url link with ftp scheme.
|
||||
"""
|
||||
|
||||
def checkConnection (self):
|
||||
_proxy = self.config["proxy"].get(self.scheme)
|
||||
# XXX proxy support (we support http and ftp!)
|
||||
# proxy support (we support only http)
|
||||
self.setProxy(self.config["proxy"].get(self.scheme))
|
||||
if self.proxy:
|
||||
http = HttpUrlData(self.urlName,
|
||||
self.recursionLevel,
|
||||
self.config,
|
||||
self.parentName,
|
||||
self.baseRef,
|
||||
self.line,
|
||||
self.name)
|
||||
http.buildUrl
|
||||
return http.check()
|
||||
# no proxy
|
||||
_user, _password = self._getUserPassword()
|
||||
if _user is None or _password is None:
|
||||
raise linkcheck.error, linkcheck._("No user or password found")
|
||||
|
|
|
|||
|
|
@ -19,18 +19,17 @@ import httplib, urlparse, sys, time, re
|
|||
import Config, StringUtil, robotparser, linkcheck
|
||||
if Config.DebugLevel > 0:
|
||||
robotparser.debug = 1
|
||||
from UrlData import UrlData
|
||||
from urllib import splittype, splithost, splituser, splitpasswd
|
||||
from ProxyUrlData import ProxyUrlData
|
||||
from debuglevels import *
|
||||
|
||||
_supported_encodings = ('gzip', 'x-gzip', 'deflate')
|
||||
|
||||
class HttpUrlData (UrlData):
|
||||
class HttpUrlData (ProxyUrlData):
|
||||
"Url link with http scheme"
|
||||
netscape_re = re.compile("Netscape-Enterprise/")
|
||||
|
||||
def buildUrl (self):
|
||||
UrlData.buildUrl(self)
|
||||
ProxyUrlData.buildUrl(self)
|
||||
if not self.urlTuple[2]:
|
||||
self.setWarning(linkcheck._("Path is empty"))
|
||||
self.urlTuple = (self.urlTuple[0], self.urlTuple[1], "/",
|
||||
|
|
@ -82,9 +81,9 @@ class HttpUrlData (UrlData):
|
|||
| extension-code
|
||||
"""
|
||||
# set the proxy, so a 407 status after this is an error
|
||||
self._setProxy(self.config["proxy"].get(self.scheme))
|
||||
self.setProxy(self.config["proxy"].get(self.scheme))
|
||||
if self.proxy:
|
||||
self.setInfo(linkcheck._("Using HTTP Proxy %s")%`self.proxy`)
|
||||
self.setInfo(linkcheck._("Using Proxy %s")%`self.proxy`)
|
||||
self.headers = None
|
||||
self.auth = None
|
||||
self.cookies = []
|
||||
|
|
@ -101,8 +100,8 @@ class HttpUrlData (UrlData):
|
|||
# proxy enforcement (overrides standard proxy)
|
||||
if status == 305 and self.headers:
|
||||
oldproxy = (self.proxy, self.proxyauth)
|
||||
self._setProxy(self.headers.getheader("Location"))
|
||||
self.setInfo(linkcheck._("Enforced HTTP Proxy %s")%`self.proxy`)
|
||||
self.setProxy(self.headers.getheader("Location"))
|
||||
self.setInfo(linkcheck._("Enforced Proxy %s")%`self.proxy`)
|
||||
status, statusText, self.headers = self._getHttpRequest()
|
||||
self.proxy, self.proxyauth = oldproxy
|
||||
# follow redirections
|
||||
|
|
@ -194,21 +193,6 @@ class HttpUrlData (UrlData):
|
|||
else:
|
||||
self.setValid("OK")
|
||||
|
||||
def _setProxy (self, proxy):
|
||||
self.proxy = proxy
|
||||
self.proxyauth = None
|
||||
if self.proxy:
|
||||
if self.proxy[:7].lower() != "http://":
|
||||
self.proxy = "http://"+self.proxy
|
||||
self.proxy = splittype(self.proxy)[1]
|
||||
self.proxy = splithost(self.proxy)[0]
|
||||
self.proxyauth, self.proxy = splituser(self.proxy)
|
||||
if self.proxyauth is not None:
|
||||
if ":" not in self.proxyauth: self.proxyauth += ":"
|
||||
import base64
|
||||
self.proxyauth = base64.encodestring(self.proxyauth).strip()
|
||||
self.proxyauth = "Basic "+self.proxyauth
|
||||
|
||||
def _getHttpRequest (self, method="HEAD"):
|
||||
"""Put request and return (status code, status text, mime object).
|
||||
host can be host:port format
|
||||
|
|
|
|||
21
linkcheck/ProxyUrlData.py
Normal file
21
linkcheck/ProxyUrlData.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
from UrlData import UrlData
|
||||
from urllib import splittype, splithost, splituser, splitpasswd
|
||||
|
||||
class ProxyUrlData (UrlData):
|
||||
"""urldata with ability for proxying"""
|
||||
|
||||
def setProxy (self, proxy):
|
||||
self.proxy = proxy
|
||||
self.proxyauth = None
|
||||
if self.proxy:
|
||||
if self.proxy[:7].lower() != "http://":
|
||||
self.proxy = "http://"+self.proxy
|
||||
self.proxy = splittype(self.proxy)[1]
|
||||
self.proxy = splithost(self.proxy)[0]
|
||||
self.proxyauth, self.proxy = splituser(self.proxy)
|
||||
if self.proxyauth is not None:
|
||||
if ":" not in self.proxyauth: self.proxyauth += ":"
|
||||
import base64
|
||||
self.proxyauth = base64.encodestring(self.proxyauth).strip()
|
||||
self.proxyauth = "Basic "+self.proxyauth
|
||||
|
||||
Loading…
Reference in a new issue