utils.http.Response.data should always be bytes
- add .decode and .soup
This commit is contained in:
parent
9eb51740b3
commit
2d21dfa229
23 changed files with 127 additions and 149 deletions
|
@ -12,16 +12,15 @@ class Module(ModuleManager.BaseModule):
|
|||
:usage: [currency]
|
||||
"""
|
||||
currency = (event["args"] or "USD").upper()
|
||||
page = utils.http.request("https://blockchain.info/ticker",
|
||||
json=True)
|
||||
page = utils.http.request("https://blockchain.info/ticker").json()
|
||||
if page:
|
||||
if currency in page.data:
|
||||
conversion = page.data[currency]
|
||||
if currency in page:
|
||||
conversion = page[currency]
|
||||
buy, sell = conversion["buy"], conversion["sell"]
|
||||
event["stdout"].write("1 BTC = %.2f %s (buy) %.2f %s "
|
||||
"(sell)" % (buy, currency, sell, currency))
|
||||
else:
|
||||
event["stderr"].write("Unknown currency, available "
|
||||
"currencies: %s" % ", ".join(page.data.keys()))
|
||||
"currencies: %s" % ", ".join(page.keys()))
|
||||
else:
|
||||
raise utils.EventResultsError()
|
||||
|
|
|
@ -18,9 +18,9 @@ class Module(ModuleManager.BaseModule):
|
|||
if phrase:
|
||||
page = utils.http.request(URL_DDG, get_params={
|
||||
"q": phrase, "format": "json", "no_html": "1",
|
||||
"no_redirect": "1"}, json=True)
|
||||
"no_redirect": "1"}).json()
|
||||
|
||||
if page and page.data["AbstractURL"]:
|
||||
event["stdout"].write(page.data["AbstractURL"])
|
||||
if page and page["AbstractURL"]:
|
||||
event["stdout"].write(page["AbstractURL"])
|
||||
else:
|
||||
event["stderr"].write("No results found")
|
||||
|
|
|
@ -21,6 +21,6 @@ class Module(ModuleManager.BaseModule):
|
|||
|
||||
if page and page.data:
|
||||
event["stdout"].write("%s: %s" % (event["user"].nickname,
|
||||
page.data.rstrip("\n")))
|
||||
page.decode().rstrip("\n")))
|
||||
else:
|
||||
event["stderr"].write("%s: failed to eval" % event["user"].nickname)
|
||||
|
|
|
@ -171,7 +171,7 @@ class GitHub(object):
|
|||
self.log.debug("git.io shortening: %s" % url)
|
||||
try:
|
||||
page = utils.http.request("https://git.io", method="POST",
|
||||
post_data={"url": url}, detect_encoding=False)
|
||||
post_data={"url": url})
|
||||
return page.headers["Location"]
|
||||
except utils.http.HTTPTimeoutException:
|
||||
self.log.warn(
|
||||
|
@ -294,11 +294,11 @@ class GitHub(object):
|
|||
action_desc = "committed to %s" % number
|
||||
|
||||
commits_url = data["pull_request"]["commits_url"]
|
||||
commits = utils.http.request(commits_url, json=True)
|
||||
commits = utils.http.request(commits_url).json()
|
||||
if commits:
|
||||
seen_before = False
|
||||
new_commits = []
|
||||
for commit in commits.data:
|
||||
for commit in commits:
|
||||
if seen_before:
|
||||
new_commits.append({"id": commit["sha"],
|
||||
"message": commit["commit"]["message"]})
|
||||
|
|
|
@ -70,17 +70,18 @@ class Module(ModuleManager.BaseModule):
|
|||
headers = {}
|
||||
if not oauth2_token == None:
|
||||
headers["Authorization"] = "token %s" % oauth2_token
|
||||
request = utils.http.Request(url, headers=headers, json=True)
|
||||
request = utils.http.Request(url, headers=headers)
|
||||
return utils.http.request(request)
|
||||
|
||||
def _commit(self, username, repository, commit):
|
||||
page = self._get(API_COMMIT_URL % (username, repository, commit))
|
||||
if page and page.code == 200:
|
||||
page = page.json()
|
||||
repo = utils.irc.color("%s/%s" % (username, repository), COLOR_REPO)
|
||||
sha = utils.irc.color(page.data["sha"][:8], COLOR_ID)
|
||||
sha = utils.irc.color(page["sha"][:8], COLOR_ID)
|
||||
return "(%s@%s) %s - %s %s" % (repo, sha,
|
||||
page.data["author"]["login"], page.data["commit"]["message"],
|
||||
self._short_url(page.data["html_url"]))
|
||||
page["author"]["login"], page["commit"]["message"],
|
||||
self._short_url(page["html_url"]))
|
||||
def _parse_commit(self, target, ref):
|
||||
username, repository, commit = self._parse_ref(target, ref, "@")
|
||||
return self._commit(username, repository, commit)
|
||||
|
@ -116,21 +117,21 @@ class Module(ModuleManager.BaseModule):
|
|||
def _parse_issue(self, page, username, repository, number):
|
||||
repo = utils.irc.color("%s/%s" % (username, repository), COLOR_REPO)
|
||||
number = utils.irc.color("#%s" % number, COLOR_ID)
|
||||
labels = [label["name"] for label in page.data["labels"]]
|
||||
labels = [label["name"] for label in page["labels"]]
|
||||
labels_str = ""
|
||||
if labels:
|
||||
labels_str = "[%s] " % ", ".join(labels)
|
||||
|
||||
url = self._short_url(page.data["html_url"])
|
||||
url = self._short_url(page["html_url"])
|
||||
|
||||
state = page.data["state"]
|
||||
state = page["state"]
|
||||
if state == "open":
|
||||
state = utils.irc.color("open", COLOR_NEUTRAL)
|
||||
elif state == "closed":
|
||||
state = utils.irc.color("closed", COLOR_NEGATIVE)
|
||||
|
||||
return "(%s issue%s, %s) %s %s%s" % (
|
||||
repo, number, state, page.data["title"], labels_str, url)
|
||||
repo, number, state, page["title"], labels_str, url)
|
||||
def _get_issue(self, username, repository, number):
|
||||
return self._get(API_ISSUE_URL % (username, repository, number))
|
||||
|
||||
|
@ -147,21 +148,21 @@ class Module(ModuleManager.BaseModule):
|
|||
|
||||
page = self._get_issue(username, repository, number)
|
||||
if page and page.code == 200:
|
||||
self._parse_issue(page, username, repository, number)
|
||||
self._parse_issue(page.json(), username, repository, number)
|
||||
else:
|
||||
event["stderr"].write("Could not find issue")
|
||||
|
||||
def _parse_pull(self, page, username, repository, number):
|
||||
repo = utils.irc.color("%s/%s" % (username, repository), COLOR_REPO)
|
||||
number = utils.irc.color("#%s" % number, COLOR_ID)
|
||||
branch_from = page.data["head"]["label"]
|
||||
branch_to = page.data["base"]["label"]
|
||||
added = self._added(page.data["additions"])
|
||||
removed = self._removed(page.data["deletions"])
|
||||
url = self._short_url(page.data["html_url"])
|
||||
branch_from = page["head"]["label"]
|
||||
branch_to = page["base"]["label"]
|
||||
added = self._added(page["additions"])
|
||||
removed = self._removed(page["deletions"])
|
||||
url = self._short_url(page["html_url"])
|
||||
|
||||
state = page.data["state"]
|
||||
if page.data["merged"]:
|
||||
state = page["state"]
|
||||
if page["merged"]:
|
||||
state = utils.irc.color("merged", COLOR_POSITIVE)
|
||||
elif state == "open":
|
||||
state = utils.irc.color("open", COLOR_NEUTRAL)
|
||||
|
@ -170,7 +171,7 @@ class Module(ModuleManager.BaseModule):
|
|||
|
||||
return "(%s PR%s, %s) %s → %s [%s/%s] %s %s" % (
|
||||
repo, number, state, branch_from, branch_to, added, removed,
|
||||
page.data["title"], url)
|
||||
page["title"], url)
|
||||
def _get_pull(self, username, repository, number):
|
||||
return self._get(API_PULL_URL % (username, repository, number))
|
||||
@utils.hook("received.command.ghpull", min_args=1)
|
||||
|
@ -187,7 +188,7 @@ class Module(ModuleManager.BaseModule):
|
|||
page = self._get_pull(username, repository, number)
|
||||
|
||||
if page and page.code == 200:
|
||||
self._parse_pull(page, username, repository, number)
|
||||
self._parse_pull(page.json(), username, repository, number)
|
||||
else:
|
||||
event["stderr"].write("Could not find pull request")
|
||||
|
||||
|
@ -198,9 +199,11 @@ class Module(ModuleManager.BaseModule):
|
|||
|
||||
page = self._get_issue(username, repository, number)
|
||||
if page and page.code == 200:
|
||||
if "pull_request" in page.data:
|
||||
page = page.json()
|
||||
if "pull_request" in page:
|
||||
pull = self._get_pull(username, repository, number)
|
||||
return self._parse_pull(pull, username, repository, number)
|
||||
return self._parse_pull(pull.json(), username, repository,
|
||||
number)
|
||||
else:
|
||||
return self._parse_issue(page, username, repository, number)
|
||||
else:
|
||||
|
|
|
@ -27,13 +27,13 @@ class Module(ModuleManager.BaseModule):
|
|||
safe = "active" if safe_setting else "off"
|
||||
|
||||
page = utils.http.request(URL_GOOGLESEARCH, get_params={
|
||||
"q": phrase, "key": self.bot.config[
|
||||
"google-api-key"], "cx": self.bot.config[
|
||||
"google-search-id"], "prettyPrint": "true",
|
||||
"num": 1, "gl": "gb", "safe": safe}, json=True)
|
||||
"q": phrase, "prettyPrint": "true", "num": 1, "gl": "gb",
|
||||
"key": self.bot.config["google-api-key"],
|
||||
"cx": self.bot.config["google-search-id"],
|
||||
"safe": safe}).json()
|
||||
if page:
|
||||
if "items" in page.data and len(page.data["items"]):
|
||||
item = page.data["items"][0]
|
||||
if "items" in page and len(page["items"]):
|
||||
item = page["items"][0]
|
||||
link = item["link"]
|
||||
title = utils.parse.line_normalise(item["title"])
|
||||
event["stdout"].write(
|
||||
|
@ -54,10 +54,11 @@ class Module(ModuleManager.BaseModule):
|
|||
phrase = event["args"] or event["target"].buffer.get()
|
||||
if phrase:
|
||||
page = utils.http.request(URL_GOOGLESUGGEST, get_params={
|
||||
"output": "json", "client": "hp", "gl": "gb", "q": phrase})
|
||||
"output": "json", "client": "hp", "gl": "gb", "q": phrase}
|
||||
).json()
|
||||
if page:
|
||||
# google gives us jsonp, so we need to unwrap it.
|
||||
page = page.data.split("(", 1)[1][:-1]
|
||||
page = page.split("(", 1)[1][:-1]
|
||||
page = json.loads(page)
|
||||
suggestions = page[1]
|
||||
suggestions = [utils.http.strip_html(s[0]) for s in suggestions]
|
||||
|
|
|
@ -17,15 +17,13 @@ class Module(ModuleManager.BaseModule):
|
|||
:usage: <movie/tv title>
|
||||
"""
|
||||
page = utils.http.request(URL_OMDB, get_params={
|
||||
"t": event["args"],
|
||||
"apikey": self.bot.config["omdbapi-api-key"]},
|
||||
json=True)
|
||||
"apikey": self.bot.config["omdbapi-api-key"],
|
||||
"t": event["args"]}).json()
|
||||
if page:
|
||||
if "Title" in page.data:
|
||||
if "Title" in page:
|
||||
event["stdout"].write("%s, %s (%s) %s (%s/10.0) %s" % (
|
||||
page.data["Title"], page.data["Year"], page.data["Runtime"],
|
||||
page.data["Plot"], page.data["imdbRating"],
|
||||
URL_IMDBTITLE % page.data["imdbID"]))
|
||||
page["Title"], page["Year"], page["Runtime"], page["Plot"],
|
||||
page["imdbRating"], URL_IMDBTITLE % page["imdbID"]))
|
||||
else:
|
||||
event["stderr"].write("Title not found")
|
||||
else:
|
||||
|
|
|
@ -85,19 +85,17 @@ class Module(ModuleManager.BaseModule):
|
|||
:usage: <IP>
|
||||
:prefix: GeoIP
|
||||
"""
|
||||
page = utils.http.request(URL_GEOIP % event["args_split"][0],
|
||||
json=True)
|
||||
page = utils.http.request(URL_GEOIP % event["args_split"][0]).json()
|
||||
if page:
|
||||
if page.data["status"] == "success":
|
||||
data = page.data["query"]
|
||||
data += " | Organisation: %s" % page.data["org"]
|
||||
data += " | City: %s" % page.data["city"]
|
||||
data += " | Region: %s (%s)" % (page.data["regionName"],
|
||||
page.data["countryCode"])
|
||||
data += " | ISP: %s" % page.data["isp"]
|
||||
data += " | Lon/Lat: %s/%s" % (page.data["lon"],
|
||||
page.data["lat"])
|
||||
data += " | Timezone: %s" % page.data["timezone"]
|
||||
if page["status"] == "success":
|
||||
data = page["query"]
|
||||
data += " | Organisation: %s" % page["org"]
|
||||
data += " | City: %s" % page["city"]
|
||||
data += " | Region: %s (%s)" % (
|
||||
page["regionName"], page["countryCode"])
|
||||
data += " | ISP: %s" % page["isp"]
|
||||
data += " | Lon/Lat: %s/%s" % (page["lon"], page["lat"])
|
||||
data += " | Timezone: %s" % page["timezone"]
|
||||
event["stdout"].write(data)
|
||||
else:
|
||||
event["stderr"].write("No geoip data found")
|
||||
|
|
|
@ -14,11 +14,10 @@ class Module(ModuleManager.BaseModule):
|
|||
self.exports.add("get-location", self._get_location)
|
||||
|
||||
def _get_location(self, s):
|
||||
page = utils.http.request(URL_OPENCAGE, get_params={
|
||||
"q": s, "key": self.bot.config["opencagedata-api-key"], "limit": "1"
|
||||
}, json=True)
|
||||
if page and page.data["results"]:
|
||||
result = page.data["results"][0]
|
||||
page = utils.http.request(URL_OPENCAGE, get_params={"limit": "1",
|
||||
"q": s, "key": self.bot.config["opencagedata-api-key"]}).json()
|
||||
if page and page["results"]:
|
||||
result = page["results"][0]
|
||||
timezone = result["annotations"]["timezone"]["name"]
|
||||
lat = result["geometry"]["lat"]
|
||||
lon = result["geometry"]["lng"]
|
||||
|
|
|
@ -39,7 +39,7 @@ class Module(ModuleManager.BaseModule):
|
|||
if page.code == 200:
|
||||
event["stdout"].write("Importing...")
|
||||
self._load_thread = threading.Thread(target=self._load_loop,
|
||||
args=[event["target"].id, page.data])
|
||||
args=[event["target"].id, page.decode()])
|
||||
self._load_thread.daemon = True
|
||||
self._load_thread.start()
|
||||
else:
|
||||
|
|
|
@ -11,9 +11,9 @@ URL_RELAY_SEARCH_DETAILS = "https://metrics.torproject.org/rs.html#details/"
|
|||
|
||||
def _get_relays_details(search):
|
||||
page = utils.http.request(
|
||||
URL_ONIONOO_DETAILS, get_params={"search": search}, json=True)
|
||||
if page and "relays" in page.data:
|
||||
return page.data["relays"]
|
||||
URL_ONIONOO_DETAILS, get_params={"search": search}).json()
|
||||
if page and "relays" in page:
|
||||
return page["relays"]
|
||||
raise utils.EventResultsError()
|
||||
|
||||
def _format_relay_summary_message(relays, search):
|
||||
|
|
|
@ -67,7 +67,7 @@ class Module(ModuleManager.BaseModule):
|
|||
# async url get failed
|
||||
continue
|
||||
|
||||
feed = feedparser.parse(pages[url].data)
|
||||
feed = feedparser.parse(pages[url].decode())
|
||||
feed_title = feed["feed"].get("title", None)
|
||||
max_ids = len(feed["entries"])*10
|
||||
|
||||
|
@ -105,8 +105,7 @@ class Module(ModuleManager.BaseModule):
|
|||
|
||||
def _get_entries(self, url, max: int=None):
|
||||
try:
|
||||
data = utils.http.request(url)
|
||||
feed = feedparser.parse(data.data)
|
||||
feed = feedparser.parse(utils.http.request(url).data)
|
||||
except Exception as e:
|
||||
self.log.warn("failed to parse RSS %s", [url], exc_info=True)
|
||||
feed = None
|
||||
|
|
|
@ -57,10 +57,10 @@ class Module(ModuleManager.BaseModule):
|
|||
access_token = self.bot.config.get("bitly-api-key", None)
|
||||
if access_token:
|
||||
page = utils.http.request(URL_BITLYSHORTEN, get_params={
|
||||
"access_token": access_token, "longUrl": url}, json=True)
|
||||
"access_token": access_token, "longUrl": url}).json()
|
||||
|
||||
if page and page.data["data"]:
|
||||
return page.data["data"]["url"]
|
||||
if page["data"]:
|
||||
return page["data"]["url"]
|
||||
return None
|
||||
|
||||
def _find_url(self, target, args):
|
||||
|
|
|
@ -45,11 +45,11 @@ class Module(ModuleManager.BaseModule):
|
|||
|
||||
page = utils.http.request(
|
||||
URL_SOUNDCLOUD_TRACK if has_query else URL_SOUNDCLOUD_RESOLVE,
|
||||
get_params=get_params, json=True)
|
||||
get_params=get_params).json()
|
||||
|
||||
if page:
|
||||
if len(page.data):
|
||||
page = page.data[0] if has_query else page
|
||||
if len(page):
|
||||
page = page[0] if has_query else page
|
||||
title = page["title"]
|
||||
user = page["user"]["username"]
|
||||
duration = time.strftime("%H:%M:%S", time.gmtime(page[
|
||||
|
|
|
@ -24,12 +24,11 @@ class Module(ModuleManager.BaseModule):
|
|||
|
||||
page = utils.http.request(URL_TOKEN, method="POST",
|
||||
headers={"Authorization": "Basic %s" % bearer},
|
||||
post_data={"grant_type": "client_credentials"},
|
||||
json=True)
|
||||
post_data={"grant_type": "client_credentials"}).json()
|
||||
|
||||
token = page.data["access_token"]
|
||||
token = page["access_token"]
|
||||
self._token = token
|
||||
self._token_expires = time.time()+page.data["expires_in"]
|
||||
self._token_expires = time.time()+page["expires_in"]
|
||||
return token
|
||||
|
||||
@utils.hook("received.command.sp", alias_of="spotify")
|
||||
|
@ -42,11 +41,10 @@ class Module(ModuleManager.BaseModule):
|
|||
token = self._get_token()
|
||||
page = utils.http.request(URL_SEARCH,
|
||||
get_params={"type": "track", "limit": 1, "q": event["args"]},
|
||||
headers={"Authorization": "Bearer %s" % token},
|
||||
json=True)
|
||||
headers={"Authorization": "Bearer %s" % token}).json()
|
||||
if page:
|
||||
if len(page.data["tracks"]["items"]):
|
||||
item = page.data["tracks"]["items"][0]
|
||||
if len(page["tracks"]["items"]):
|
||||
item = page["tracks"]["items"][0]
|
||||
title = item["name"]
|
||||
artist_name = item["artists"][0]["name"]
|
||||
url = item["external_urls"]["spotify"]
|
||||
|
|
|
@ -14,17 +14,18 @@ class Module(ModuleManager.BaseModule):
|
|||
:usage: <word> [type]
|
||||
"""
|
||||
phrase = event["args_split"][0]
|
||||
page = utils.http.request(URL_THESAURUS % (self.bot.config[
|
||||
"bighugethesaurus-api-key"], phrase), json=True)
|
||||
page = utils.http.request(URL_THESAURUS % (
|
||||
self.bot.config["bighugethesaurus-api-key"], phrase))
|
||||
syn_ant = event["command"][:3]
|
||||
if page:
|
||||
if page.code == 404:
|
||||
raise utils.EventError("Word not found")
|
||||
page = page.json()
|
||||
|
||||
if not len(event["args_split"]) > 1:
|
||||
word_types = []
|
||||
for word_type in page.data.keys():
|
||||
if syn_ant in page.data[word_type]:
|
||||
for word_type in page.keys():
|
||||
if syn_ant in page[word_type]:
|
||||
word_types.append(word_type)
|
||||
if word_types:
|
||||
word_types = sorted(word_types)
|
||||
|
@ -35,11 +36,11 @@ class Module(ModuleManager.BaseModule):
|
|||
event["stderr"].write("No categories available")
|
||||
else:
|
||||
category = event["args_split"][1].lower()
|
||||
if category in page.data:
|
||||
if syn_ant in page.data[category]:
|
||||
if category in page:
|
||||
if syn_ant in page[category]:
|
||||
event["stdout"].write("%ss for %s: %s" % (
|
||||
event["command"].title(), phrase, ", ".join(
|
||||
page.data[category][syn_ant])))
|
||||
page[category][syn_ant])))
|
||||
else:
|
||||
event["stderr"].write("No %ss for %s" % (
|
||||
event["command"], phrase))
|
||||
|
|
|
@ -25,26 +25,26 @@ class Module(ModuleManager.BaseModule):
|
|||
page = utils.http.request(URL_TRAKT % username, headers={
|
||||
"Content-Type": "application/json",
|
||||
"trakt-api-version": "2", "trakt-api-key":
|
||||
self.bot.config["trakt-api-key"]}, json=True,
|
||||
code=True)
|
||||
self.bot.config["trakt-api-key"]})
|
||||
if page:
|
||||
if page.code == 200:
|
||||
type = page.data["type"]
|
||||
page = page.json()
|
||||
type = page["type"]
|
||||
if type == "movie":
|
||||
title = page.data["movie"]["title"]
|
||||
year = page.data["movie"]["year"]
|
||||
slug = page.data["movie"]["ids"]["slug"]
|
||||
title = page["movie"]["title"]
|
||||
year = page["movie"]["year"]
|
||||
slug = page["movie"]["ids"]["slug"]
|
||||
event["stdout"].write(
|
||||
"%s is now watching %s (%s) %s" % (
|
||||
username, title, year,
|
||||
URL_TRAKTSLUG % ("movie", slug)))
|
||||
elif type == "episode":
|
||||
season = page.data["episode"]["season"]
|
||||
episode_number = page.data["episode"]["number"]
|
||||
episode_title = page.data["episode"]["title"]
|
||||
show_title = page.data["show"]["title"]
|
||||
show_year = page.data["show"]["year"]
|
||||
slug = page.data["show"]["ids"]["slug"]
|
||||
season = page["episode"]["season"]
|
||||
episode_number = page["episode"]["number"]
|
||||
episode_title = page["episode"]["title"]
|
||||
show_title = page["show"]["title"]
|
||||
show_year = page["show"]["year"]
|
||||
slug = page["show"]["ids"]["slug"]
|
||||
event["stdout"].write(
|
||||
"%s is now watching %s s%se%s - %s %s" % (
|
||||
username, show_title, str(season).zfill(2),
|
||||
|
|
|
@ -35,11 +35,11 @@ class Module(ModuleManager.BaseModule):
|
|||
phrase = phrase.split(" ", 1)[1]
|
||||
|
||||
page = utils.http.request(URL_TRANSLATE, get_params={
|
||||
"client": "gtx", "sl": source_language,
|
||||
"tl": target_language, "dt": "t", "q": phrase})
|
||||
"client": "gtx", "dt": "t", "q": phrase,
|
||||
"sl": source_language, "tl": target_language})
|
||||
|
||||
if page and not page.data.startswith(b"[null,null,"):
|
||||
data = page.data.decode("utf8")
|
||||
data = page.decode("utf8")
|
||||
while ",," in data:
|
||||
data = data.replace(",,", ",null,")
|
||||
data = data.replace("[,", "[null,")
|
||||
|
|
|
@ -24,11 +24,11 @@ class Module(ModuleManager.BaseModule):
|
|||
term = " ".join(term)
|
||||
|
||||
page = utils.http.request(URL_URBANDICTIONARY,
|
||||
get_params={"term": term}, json=True)
|
||||
get_params={"term": term}).json()
|
||||
if page:
|
||||
if len(page.data["list"]):
|
||||
if number > 0 and len(page.data["list"]) > number-1:
|
||||
definition = page.data["list"][number-1]
|
||||
if len(page["list"]):
|
||||
if number > 0 and len(page["list"]) > number-1:
|
||||
definition = page["list"][number-1]
|
||||
event["stdout"].write("%s: %s" % (definition["word"],
|
||||
definition["definition"].replace("\n", " ").replace(
|
||||
"\r", "").replace(" ", " ")))
|
||||
|
|
|
@ -56,24 +56,24 @@ class Module(ModuleManager.BaseModule):
|
|||
args["lat"] = lat
|
||||
args["lon"] = lon
|
||||
|
||||
page = utils.http.request(URL_WEATHER, get_params=args, json=True)
|
||||
page = utils.http.request(URL_WEATHER, get_params=args).json()
|
||||
if page:
|
||||
if "weather" in page.data:
|
||||
if "weather" in page:
|
||||
if location_name:
|
||||
location_str = location_name
|
||||
else:
|
||||
location_parts = [page.data["name"]]
|
||||
if "country" in page.data["sys"]:
|
||||
location_parts.append(page.data["sys"]["country"])
|
||||
location_parts = [page["name"]]
|
||||
if "country" in page["sys"]:
|
||||
location_parts.append(page["sys"]["country"])
|
||||
location_str = ", ".join(location_parts)
|
||||
|
||||
celsius = "%dC" % page.data["main"]["temp"]
|
||||
fahrenheit = "%dF" % ((page.data["main"]["temp"]*(9/5))+32)
|
||||
description = page.data["weather"][0]["description"].title()
|
||||
humidity = "%s%%" % page.data["main"]["humidity"]
|
||||
celsius = "%dC" % page["main"]["temp"]
|
||||
fahrenheit = "%dF" % ((page["main"]["temp"]*(9/5))+32)
|
||||
description = page["weather"][0]["description"].title()
|
||||
humidity = "%s%%" % page["main"]["humidity"]
|
||||
|
||||
# wind speed is in metres per second - 3.6* for KMh
|
||||
wind_speed = 3.6*page.data["wind"]["speed"]
|
||||
wind_speed = 3.6*page["wind"]["speed"]
|
||||
wind_speed_k = "%sKMh" % round(wind_speed, 1)
|
||||
wind_speed_m = "%sMPh" % round(0.6214*wind_speed, 1)
|
||||
|
||||
|
|
|
@ -20,13 +20,13 @@ class Module(ModuleManager.BaseModule):
|
|||
page = utils.http.request(URL_WA,
|
||||
get_params={"i": event["args"],
|
||||
"appid": self.bot.config["wolframalpha-api-key"],
|
||||
"reinterpret": "true", "units": "metric"}, code=True)
|
||||
"reinterpret": "true", "units": "metric"})
|
||||
except utils.http.HTTPTimeoutException:
|
||||
page = None
|
||||
|
||||
if page:
|
||||
if page.code == 200:
|
||||
event["stdout"].write("%s: %s" % (event["args"], page.data))
|
||||
event["stdout"].write("%s: %s" % (event["args"], page.decode()))
|
||||
else:
|
||||
event["stdout"].write("No results")
|
||||
else:
|
||||
|
|
|
@ -87,7 +87,7 @@ class Module(ModuleManager.BaseModule):
|
|||
def get_playlist_page(self, playlist_id):
|
||||
return utils.http.request(URL_YOUTUBEPLAYLIST, get_params={
|
||||
"part": "contentDetails,snippet", "id": playlist_id,
|
||||
"key": self.bot.config["google-api-key"]}, json=True)
|
||||
"key": self.bot.config["google-api-key"]}).json()
|
||||
def playlist_details(self, playlist_id):
|
||||
page = self.get_playlist_page(playlist_id)
|
||||
if page["items"]:
|
||||
|
|
|
@ -67,12 +67,10 @@ class Request(object):
|
|||
cookies: typing.Dict[str, str] = dataclasses.field(
|
||||
default_factory=dict)
|
||||
|
||||
json: bool = False
|
||||
json_body: bool = False
|
||||
|
||||
allow_redirects: bool = True
|
||||
check_content_type: bool = True
|
||||
detect_encoding: bool = True
|
||||
fallback_encoding: typing.Optional[str] = None
|
||||
content_type: typing.Optional[str] = None
|
||||
proxy: typing.Optional[str] = None
|
||||
|
@ -116,7 +114,7 @@ class Request(object):
|
|||
return None
|
||||
|
||||
class Response(object):
|
||||
def __init__(self, code: int, data: typing.Any, encoding: str,
|
||||
def __init__(self, code: int, data: bytes, encoding: str,
|
||||
headers: typing.Dict[str, str], cookies: typing.Dict[str, str]):
|
||||
self.code = code
|
||||
self.data = data
|
||||
|
@ -124,8 +122,8 @@ class Response(object):
|
|||
self.encoding = encoding
|
||||
self.headers = headers
|
||||
self.cookies = cookies
|
||||
def decode(self) -> str:
|
||||
return self.data
|
||||
def decode(self, encoding="utf8") -> str:
|
||||
return self.data.decode(encoding)
|
||||
def json(self) -> typing.Any:
|
||||
return _json.loads(self.data)
|
||||
def soup(self, parser: str="lxml") -> bs4.BeautifulSoup:
|
||||
|
@ -199,28 +197,12 @@ def _request(request_obj: Request) -> Response:
|
|||
else:
|
||||
encoding = "iso-8859-1"
|
||||
|
||||
if (request_obj.detect_encoding and
|
||||
response.content_type and
|
||||
if (response.content_type and
|
||||
response.content_type in SOUP_CONTENT_TYPES):
|
||||
souped = bs4.BeautifulSoup(response.data, "lxml")
|
||||
encoding = _find_encoding(souped) or encoding
|
||||
encoding = _find_encoding(response.data) or encoding
|
||||
response.encoding = encoding
|
||||
|
||||
def _decode_data():
|
||||
return response.data.decode(encoding)
|
||||
|
||||
if request_obj.json and response.data:
|
||||
data = _decode_data()
|
||||
try:
|
||||
response.data = _json.loads(data)
|
||||
return response
|
||||
except _json.decoder.JSONDecodeError as e:
|
||||
raise HTTPParsingException(str(e), data)
|
||||
|
||||
if response.content_type in DECODE_CONTENT_TYPES:
|
||||
response.data = _decode_data()
|
||||
return response
|
||||
else:
|
||||
return response
|
||||
return response
|
||||
|
||||
class RequestManyException(Exception):
|
||||
pass
|
||||
|
|
Loading…
Reference in a new issue