mirror of
https://github.com/Hopiu/linkchecker.git
synced 2026-05-13 09:03:11 +00:00
Make sure login form fetching uses a timeout
Also resolve an XXX comment about the User-Agent header (which is configured in new_request_session), but add a couple of XXX comments about using proxy and possibly disabling TLS certificate checking.
This commit is contained in:
parent
639ba0dba2
commit
b0435b3d47
1 changed files with 6 additions and 5 deletions
|
|
@ -79,11 +79,12 @@ class Aggregate:
|
|||
if not user and not password:
|
||||
raise LinkCheckerError(
|
||||
"loginurl is configured but neither user nor password are set")
|
||||
session = requests.Session()
|
||||
# XXX user-agent header
|
||||
# XXX timeout
|
||||
session = new_request_session(self.config, self.cookies)
|
||||
log.debug(LOG_CHECK, "Getting login form %s", url)
|
||||
response = session.get(url)
|
||||
kwargs = dict(timeout=self.config["timeout"])
|
||||
# XXX: proxy? sslverify? can we reuse HttpUrl.get_request_kwargs()
|
||||
# somehow?
|
||||
response = session.get(url, **kwargs)
|
||||
response.raise_for_status()
|
||||
cgiuser = self.config["loginuserfield"] if user else None
|
||||
cgipassword = self.config["loginpasswordfield"] if password else None
|
||||
|
|
@ -98,7 +99,7 @@ class Aggregate:
|
|||
form.data[key] = value
|
||||
formurl = urllib.parse.urljoin(url, form.url)
|
||||
log.debug(LOG_CHECK, "Posting login data to %s", formurl)
|
||||
response = session.post(formurl, data=form.data)
|
||||
response = session.post(formurl, data=form.data, **kwargs)
|
||||
response.raise_for_status()
|
||||
self.cookies = session.cookies
|
||||
if len(self.cookies) == 0:
|
||||
|
|
|
|||
Loading…
Reference in a new issue