Use signal.alarm to Deadline utils.http.get_url and throw useful exceptions
This commit is contained in:
parent
be75f72356
commit
5b9ffe013d
1 changed files with 28 additions and 16 deletions
|
@ -1,4 +1,4 @@
|
||||||
import re, traceback, urllib.error, urllib.parse
|
import re, signal, traceback, urllib.error, urllib.parse
|
||||||
import json as _json
|
import json as _json
|
||||||
import bs4, requests
|
import bs4, requests
|
||||||
|
|
||||||
|
@ -8,6 +8,14 @@ REGEX_HTTP = re.compile("https?://", re.I)
|
||||||
|
|
||||||
RESPONSE_MAX = (1024*1024)*100
|
RESPONSE_MAX = (1024*1024)*100
|
||||||
|
|
||||||
|
class HTTPException:
|
||||||
|
pass
|
||||||
|
class HTTPTimeoutException(HTTPException):
|
||||||
|
pass
|
||||||
|
class HTTPParsingException(HTTPException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def get_url(url, method="GET", get_params={}, post_data=None, headers={},
|
def get_url(url, method="GET", get_params={}, post_data=None, headers={},
|
||||||
json_data=None, code=False, json=False, soup=False, parser="lxml"):
|
json_data=None, code=False, json=False, soup=False, parser="lxml"):
|
||||||
|
|
||||||
|
@ -19,6 +27,9 @@ def get_url(url, method="GET", get_params={}, post_data=None, headers={},
|
||||||
if not "User-Agent" in headers:
|
if not "User-Agent" in headers:
|
||||||
headers["User-Agent"] = USER_AGENT
|
headers["User-Agent"] = USER_AGENT
|
||||||
|
|
||||||
|
signal.signal(signal.SIGALRM, lambda: raise TimeoutError())
|
||||||
|
signal.alarm(5)
|
||||||
|
try:
|
||||||
response = requests.request(
|
response = requests.request(
|
||||||
method.upper(),
|
method.upper(),
|
||||||
url,
|
url,
|
||||||
|
@ -29,6 +40,10 @@ def get_url(url, method="GET", get_params={}, post_data=None, headers={},
|
||||||
stream=True
|
stream=True
|
||||||
)
|
)
|
||||||
response_content = response.raw.read(RESPONSE_MAX, decode_content=True)
|
response_content = response.raw.read(RESPONSE_MAX, decode_content=True)
|
||||||
|
except TimeoutError:
|
||||||
|
raise HTTPTimeoutException()
|
||||||
|
finally:
|
||||||
|
signal.signal(signal.SIGALRM, signal.SIG_IGN)
|
||||||
|
|
||||||
if soup:
|
if soup:
|
||||||
soup = bs4.BeautifulSoup(response_content, parser)
|
soup = bs4.BeautifulSoup(response_content, parser)
|
||||||
|
@ -40,11 +55,8 @@ def get_url(url, method="GET", get_params={}, post_data=None, headers={},
|
||||||
if json and data:
|
if json and data:
|
||||||
try:
|
try:
|
||||||
data = _json.loads(data)
|
data = _json.loads(data)
|
||||||
except _json.decoder.JSONDecodeError:
|
except _json.decoder.JSONDecodeError as e:
|
||||||
traceback.print_exc()
|
raise HTTPParsingException(str(e))
|
||||||
if code:
|
|
||||||
return 0, False
|
|
||||||
return False
|
|
||||||
|
|
||||||
if code:
|
if code:
|
||||||
return response.status_code, data
|
return response.status_code, data
|
||||||
|
|
Loading…
Reference in a new issue