Change utils.http to use requests
This commit is contained in:
parent
0b44788ac5
commit
68f5626189
4 changed files with 46 additions and 94 deletions
|
@ -9,7 +9,7 @@ class Module(ModuleManager.BaseModule):
|
|||
def eval(self, event):
|
||||
try:
|
||||
page = utils.http.get_url(EVAL_URL,
|
||||
post_params={"input": event["args"]},
|
||||
post_data={"input": event["args"]},
|
||||
method="POST",
|
||||
soup=True)
|
||||
except socket.timeout:
|
||||
|
|
|
@ -9,6 +9,15 @@ fn main() {
|
|||
});
|
||||
}
|
||||
"""
|
||||
API_ARGS = {
|
||||
"channel": "nightly",
|
||||
"crateType": "bin",
|
||||
"mode": "debug",
|
||||
"tests": False,
|
||||
"execute": True,
|
||||
"target": "ast",
|
||||
"backtrace": False
|
||||
}
|
||||
|
||||
class Module(ModuleManager.BaseModule):
|
||||
_name = "Rust"
|
||||
|
@ -18,17 +27,11 @@ class Module(ModuleManager.BaseModule):
|
|||
:help: Evaluate a rust statement
|
||||
:usage: <statement>
|
||||
"""
|
||||
args = API_ARGS.copy()
|
||||
args["code"] = FN_TEMPLATE % event["args"]
|
||||
try:
|
||||
page = utils.http.get_url(EVAL_URL, post_data=json.dumps({
|
||||
"code": FN_TEMPLATE % event["args"],
|
||||
"channel": "nightly",
|
||||
"crateType": "bin",
|
||||
"mode": "debug",
|
||||
"tests": False,
|
||||
"execute": True,
|
||||
"target": "ast",
|
||||
"backtrace": False
|
||||
}), method="POST", json=True)
|
||||
page = utils.http.get_url(EVAL_URL, json_data=args,
|
||||
method="POST", json=True)
|
||||
except socket.timeout:
|
||||
event["stderr"].write("%s: eval timed out" %
|
||||
event["user"].nickname)
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
beautifulsoup4
|
||||
python-telegram-bot
|
||||
requests
|
||||
scrypt
|
||||
suds-jurko
|
||||
twitter
|
||||
|
||||
|
|
|
@ -1,102 +1,49 @@
|
|||
import re, traceback, urllib.error, urllib.parse, urllib.request
|
||||
import json, ssl
|
||||
import bs4
|
||||
import re, traceback, urllib.error, urllib.parse
|
||||
import json as _json
|
||||
import bs4, requests
|
||||
|
||||
USER_AGENT = ("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 "
|
||||
"(KHTML, like Gecko) Chrome/49.0.2623.87 Safari/537.36")
|
||||
REGEX_HTTP = re.compile("https?://", re.I)
|
||||
|
||||
def get_url(url, **kwargs):
|
||||
def get_url(url, method="GET", get_params={}, post_data=None, headers={},
|
||||
json_data=None, code=False, json=False, soup=False, parser="lxml"):
|
||||
|
||||
if not urllib.parse.urlparse(url).scheme:
|
||||
url = "http://%s" % url
|
||||
url_parsed = urllib.parse.urlparse(url)
|
||||
|
||||
method = kwargs.get("method", "GET")
|
||||
get_params = kwargs.get("get_params", "")
|
||||
post_params = kwargs.get("post_params", None)
|
||||
post_data = kwargs.get("post_data", None)
|
||||
headers = kwargs.get("headers", {})
|
||||
return_code = kwargs.get("code", False)
|
||||
if not "Accept-Language" in headers:
|
||||
headers["Accept-Language"] = "en-GB"
|
||||
if not "User-Agent" in headers:
|
||||
headers["User-Agent"] = USER_AGENT
|
||||
|
||||
if get_params:
|
||||
get_params = "?%s" % urllib.parse.urlencode(get_params)
|
||||
if post_params:
|
||||
post_data = urllib.parse.urlencode(post_params)
|
||||
response = requests.request(
|
||||
method.upper(),
|
||||
url,
|
||||
headers=headers,
|
||||
params=get_params,
|
||||
data=post_data,
|
||||
json=json_data
|
||||
)
|
||||
|
||||
url = "%s%s" % (url, get_params)
|
||||
try:
|
||||
url.encode("latin-1")
|
||||
if post_data:
|
||||
post_data = post_data.encode("utf8")
|
||||
except UnicodeEncodeError:
|
||||
if return_code:
|
||||
return 0, False
|
||||
return False
|
||||
|
||||
request = urllib.request.Request(url, post_data)
|
||||
request.add_header("Accept-Language", "en-US")
|
||||
request.add_header("User-Agent", USER_AGENT)
|
||||
for header, value in headers.items():
|
||||
request.add_header(header, value)
|
||||
request.method = method
|
||||
|
||||
try:
|
||||
response = urllib.request.urlopen(request, timeout=5)
|
||||
except urllib.error.HTTPError as e:
|
||||
traceback.print_exc()
|
||||
if return_code:
|
||||
return e.code, False
|
||||
return False
|
||||
except urllib.error.URLError as e:
|
||||
traceback.print_exc()
|
||||
if kwargs.get("code"):
|
||||
return -1, False
|
||||
return False
|
||||
except ssl.CertificateError as e:
|
||||
traceback.print_exc()
|
||||
if return_code:
|
||||
return -1, False,
|
||||
return False
|
||||
|
||||
response_content = response.read()
|
||||
encoding = response.info().get_content_charset()
|
||||
if kwargs.get("soup"):
|
||||
soup = bs4.BeautifulSoup(response_content, kwargs.get("parser",
|
||||
"lxml"))
|
||||
if return_code:
|
||||
if soup:
|
||||
soup = bs4.BeautifulSoup(response.text, parser)
|
||||
if code:
|
||||
return response.code, soup
|
||||
return soup
|
||||
|
||||
if not encoding:
|
||||
soup = bs4.BeautifulSoup(response_content, kwargs.get("parser", "lxml"))
|
||||
metas = soup.find_all("meta")
|
||||
for meta in metas:
|
||||
if "charset=" in meta.get("content", ""):
|
||||
encoding = meta.get("content").split("charset=", 1)[1
|
||||
].split(";", 1)[0]
|
||||
elif meta.get("charset", ""):
|
||||
encoding = meta.get("charset")
|
||||
else:
|
||||
continue
|
||||
break
|
||||
if not encoding:
|
||||
for item in soup.contents:
|
||||
if isinstance(item, bs4.Doctype):
|
||||
if item == "html":
|
||||
encoding = "utf8"
|
||||
else:
|
||||
encoding = "latin-1"
|
||||
break
|
||||
response_content = response_content.decode(encoding or "utf8")
|
||||
data = response_content
|
||||
if kwargs.get("json") and data:
|
||||
data = response.text
|
||||
if json and data:
|
||||
try:
|
||||
data = json.loads(response_content)
|
||||
except json.decoder.JSONDecodeError:
|
||||
data = _json.loads(data)
|
||||
except _json.decoder.JSONDecodeError:
|
||||
traceback.print_exc()
|
||||
if code:
|
||||
return 0, False
|
||||
return False
|
||||
if kwargs.get("code"):
|
||||
return response.code, data
|
||||
|
||||
if code:
|
||||
return response.status_code, data
|
||||
else:
|
||||
return data
|
||||
|
||||
|
|
Loading…
Reference in a new issue