Change utils.http to use requests
This commit is contained in:
parent
0b44788ac5
commit
68f5626189
4 changed files with 46 additions and 94 deletions
|
@ -9,7 +9,7 @@ class Module(ModuleManager.BaseModule):
|
||||||
def eval(self, event):
|
def eval(self, event):
|
||||||
try:
|
try:
|
||||||
page = utils.http.get_url(EVAL_URL,
|
page = utils.http.get_url(EVAL_URL,
|
||||||
post_params={"input": event["args"]},
|
post_data={"input": event["args"]},
|
||||||
method="POST",
|
method="POST",
|
||||||
soup=True)
|
soup=True)
|
||||||
except socket.timeout:
|
except socket.timeout:
|
||||||
|
|
|
@ -9,6 +9,15 @@ fn main() {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
API_ARGS = {
|
||||||
|
"channel": "nightly",
|
||||||
|
"crateType": "bin",
|
||||||
|
"mode": "debug",
|
||||||
|
"tests": False,
|
||||||
|
"execute": True,
|
||||||
|
"target": "ast",
|
||||||
|
"backtrace": False
|
||||||
|
}
|
||||||
|
|
||||||
class Module(ModuleManager.BaseModule):
|
class Module(ModuleManager.BaseModule):
|
||||||
_name = "Rust"
|
_name = "Rust"
|
||||||
|
@ -18,17 +27,11 @@ class Module(ModuleManager.BaseModule):
|
||||||
:help: Evaluate a rust statement
|
:help: Evaluate a rust statement
|
||||||
:usage: <statement>
|
:usage: <statement>
|
||||||
"""
|
"""
|
||||||
|
args = API_ARGS.copy()
|
||||||
|
args["code"] = FN_TEMPLATE % event["args"]
|
||||||
try:
|
try:
|
||||||
page = utils.http.get_url(EVAL_URL, post_data=json.dumps({
|
page = utils.http.get_url(EVAL_URL, json_data=args,
|
||||||
"code": FN_TEMPLATE % event["args"],
|
method="POST", json=True)
|
||||||
"channel": "nightly",
|
|
||||||
"crateType": "bin",
|
|
||||||
"mode": "debug",
|
|
||||||
"tests": False,
|
|
||||||
"execute": True,
|
|
||||||
"target": "ast",
|
|
||||||
"backtrace": False
|
|
||||||
}), method="POST", json=True)
|
|
||||||
except socket.timeout:
|
except socket.timeout:
|
||||||
event["stderr"].write("%s: eval timed out" %
|
event["stderr"].write("%s: eval timed out" %
|
||||||
event["user"].nickname)
|
event["user"].nickname)
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
beautifulsoup4
|
beautifulsoup4
|
||||||
python-telegram-bot
|
python-telegram-bot
|
||||||
|
requests
|
||||||
scrypt
|
scrypt
|
||||||
suds-jurko
|
suds-jurko
|
||||||
twitter
|
twitter
|
||||||
|
|
||||||
|
|
|
@ -1,102 +1,49 @@
|
||||||
import re, traceback, urllib.error, urllib.parse, urllib.request
|
import re, traceback, urllib.error, urllib.parse
|
||||||
import json, ssl
|
import json as _json
|
||||||
import bs4
|
import bs4, requests
|
||||||
|
|
||||||
USER_AGENT = ("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 "
|
USER_AGENT = ("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 "
|
||||||
"(KHTML, like Gecko) Chrome/49.0.2623.87 Safari/537.36")
|
"(KHTML, like Gecko) Chrome/49.0.2623.87 Safari/537.36")
|
||||||
REGEX_HTTP = re.compile("https?://", re.I)
|
REGEX_HTTP = re.compile("https?://", re.I)
|
||||||
|
|
||||||
def get_url(url, **kwargs):
|
def get_url(url, method="GET", get_params={}, post_data=None, headers={},
|
||||||
|
json_data=None, code=False, json=False, soup=False, parser="lxml"):
|
||||||
|
|
||||||
if not urllib.parse.urlparse(url).scheme:
|
if not urllib.parse.urlparse(url).scheme:
|
||||||
url = "http://%s" % url
|
url = "http://%s" % url
|
||||||
url_parsed = urllib.parse.urlparse(url)
|
|
||||||
|
|
||||||
method = kwargs.get("method", "GET")
|
if not "Accept-Language" in headers:
|
||||||
get_params = kwargs.get("get_params", "")
|
headers["Accept-Language"] = "en-GB"
|
||||||
post_params = kwargs.get("post_params", None)
|
if not "User-Agent" in headers:
|
||||||
post_data = kwargs.get("post_data", None)
|
headers["User-Agent"] = USER_AGENT
|
||||||
headers = kwargs.get("headers", {})
|
|
||||||
return_code = kwargs.get("code", False)
|
|
||||||
|
|
||||||
if get_params:
|
response = requests.request(
|
||||||
get_params = "?%s" % urllib.parse.urlencode(get_params)
|
method.upper(),
|
||||||
if post_params:
|
url,
|
||||||
post_data = urllib.parse.urlencode(post_params)
|
headers=headers,
|
||||||
|
params=get_params,
|
||||||
|
data=post_data,
|
||||||
|
json=json_data
|
||||||
|
)
|
||||||
|
|
||||||
url = "%s%s" % (url, get_params)
|
if soup:
|
||||||
try:
|
soup = bs4.BeautifulSoup(response.text, parser)
|
||||||
url.encode("latin-1")
|
if code:
|
||||||
if post_data:
|
|
||||||
post_data = post_data.encode("utf8")
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
if return_code:
|
|
||||||
return 0, False
|
|
||||||
return False
|
|
||||||
|
|
||||||
request = urllib.request.Request(url, post_data)
|
|
||||||
request.add_header("Accept-Language", "en-US")
|
|
||||||
request.add_header("User-Agent", USER_AGENT)
|
|
||||||
for header, value in headers.items():
|
|
||||||
request.add_header(header, value)
|
|
||||||
request.method = method
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = urllib.request.urlopen(request, timeout=5)
|
|
||||||
except urllib.error.HTTPError as e:
|
|
||||||
traceback.print_exc()
|
|
||||||
if return_code:
|
|
||||||
return e.code, False
|
|
||||||
return False
|
|
||||||
except urllib.error.URLError as e:
|
|
||||||
traceback.print_exc()
|
|
||||||
if kwargs.get("code"):
|
|
||||||
return -1, False
|
|
||||||
return False
|
|
||||||
except ssl.CertificateError as e:
|
|
||||||
traceback.print_exc()
|
|
||||||
if return_code:
|
|
||||||
return -1, False,
|
|
||||||
return False
|
|
||||||
|
|
||||||
response_content = response.read()
|
|
||||||
encoding = response.info().get_content_charset()
|
|
||||||
if kwargs.get("soup"):
|
|
||||||
soup = bs4.BeautifulSoup(response_content, kwargs.get("parser",
|
|
||||||
"lxml"))
|
|
||||||
if return_code:
|
|
||||||
return response.code, soup
|
return response.code, soup
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
if not encoding:
|
data = response.text
|
||||||
soup = bs4.BeautifulSoup(response_content, kwargs.get("parser", "lxml"))
|
if json and data:
|
||||||
metas = soup.find_all("meta")
|
|
||||||
for meta in metas:
|
|
||||||
if "charset=" in meta.get("content", ""):
|
|
||||||
encoding = meta.get("content").split("charset=", 1)[1
|
|
||||||
].split(";", 1)[0]
|
|
||||||
elif meta.get("charset", ""):
|
|
||||||
encoding = meta.get("charset")
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
break
|
|
||||||
if not encoding:
|
|
||||||
for item in soup.contents:
|
|
||||||
if isinstance(item, bs4.Doctype):
|
|
||||||
if item == "html":
|
|
||||||
encoding = "utf8"
|
|
||||||
else:
|
|
||||||
encoding = "latin-1"
|
|
||||||
break
|
|
||||||
response_content = response_content.decode(encoding or "utf8")
|
|
||||||
data = response_content
|
|
||||||
if kwargs.get("json") and data:
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(response_content)
|
data = _json.loads(data)
|
||||||
except json.decoder.JSONDecodeError:
|
except _json.decoder.JSONDecodeError:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
if code:
|
||||||
|
return 0, False
|
||||||
return False
|
return False
|
||||||
if kwargs.get("code"):
|
|
||||||
return response.code, data
|
if code:
|
||||||
|
return response.status_code, data
|
||||||
else:
|
else:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue