mirror of
https://github.com/Hopiu/linkchecker.git
synced 2026-04-22 23:24:44 +00:00
Only read the maximum data size plus one, not the whole file.
This commit is contained in:
parent
67cec7b2c1
commit
06a25676c5
2 changed files with 6 additions and 2 deletions
|
|
@ -695,7 +695,9 @@ class HttpUrl (internpaturl.InternPatternUrl, proxysupport.ProxySupport, pooledc
|
|||
"""Read URL contents and store then in self._data.
|
||||
This way, the method can be called by other functions than
|
||||
read_content()"""
|
||||
data = response.read()
|
||||
data = response.read(self.MaxFilesizeBytes+1)
|
||||
if len(data) > self.MaxFilesizeBytes:
|
||||
raise LinkCheckerError(_("File size too large"))
|
||||
self._size = len(data)
|
||||
urls = self.aggregate.add_download_data(self.cache_content_key, data)
|
||||
self.warn_duplicate_content(urls)
|
||||
|
|
|
|||
|
|
@ -758,7 +758,9 @@ class UrlBase (object):
|
|||
Can be overridden in subclasses."""
|
||||
if self.size > self.MaxFilesizeBytes:
|
||||
raise LinkCheckerError(_("File size too large"))
|
||||
data = self.url_connection.read()
|
||||
data = self.url_connection.read(self.MaxFilesizeBytes+1)
|
||||
if len(data) > self.MaxFilesizeBytes:
|
||||
raise LinkCheckerError(_("File size too large"))
|
||||
if not self.is_local():
|
||||
urls = self.aggregate.add_download_data(self.cache_content_key, data)
|
||||
self.warn_duplicate_content(urls)
|
||||
|
|
|
|||
Loading…
Reference in a new issue