2019-05-25 20:40:06 +00:00
|
|
|
#--depends-on commands
|
|
|
|
#--depends-on config
|
2016-03-30 18:31:23 +00:00
|
|
|
#--require-config google-api-key
|
|
|
|
|
2019-10-29 10:20:29 +00:00
|
|
|
import datetime, re, urllib.parse
|
2019-02-09 10:57:05 +00:00
|
|
|
from src import EventManager, ModuleManager, utils
|
2016-03-29 11:56:58 +00:00
|
|
|
|
2019-10-15 16:27:02 +00:00
|
|
|
REGEX_YOUTUBE = re.compile("https?://(?:www\.|m\.)?(?:youtu.be/|youtube.com/)\\S+", re.I)
|
2016-03-29 11:56:58 +00:00
|
|
|
REGEX_ISO8601 = re.compile("PT(\d+H)?(\d+M)?(\d+S)?", re.I)
|
|
|
|
|
|
|
|
URL_YOUTUBESEARCH = "https://www.googleapis.com/youtube/v3/search"
|
|
|
|
URL_YOUTUBEVIDEO = "https://www.googleapis.com/youtube/v3/videos"
|
2019-07-26 14:19:24 +00:00
|
|
|
URL_YOUTUBEPLAYLIST = "https://www.googleapis.com/youtube/v3/playlists"
|
2016-03-29 11:56:58 +00:00
|
|
|
|
|
|
|
URL_YOUTUBESHORT = "https://youtu.be/%s"
|
2019-07-26 14:19:24 +00:00
|
|
|
URL_VIDEO = "https://www.youtube.com/watch?v=%s"
|
|
|
|
URL_PLAYLIST = "https://www.youtube.com/playlist?list=%s"
|
2016-03-29 11:56:58 +00:00
|
|
|
|
2018-12-09 21:02:58 +00:00
|
|
|
ARROW_UP = "↑"
|
|
|
|
ARROW_DOWN = "↓"
|
2016-03-29 11:56:58 +00:00
|
|
|
|
2019-06-28 22:16:05 +00:00
|
|
|
@utils.export("channelset", utils.BoolSetting("auto-youtube",
|
|
|
|
"Disable/Enable automatically getting info from youtube URLs"))
|
|
|
|
@utils.export("channelset", utils.BoolSetting("youtube-safesearch",
|
|
|
|
"Turn safe search off/on"))
|
2018-09-27 11:08:07 +00:00
|
|
|
class Module(ModuleManager.BaseModule):
|
2019-02-09 13:14:45 +00:00
|
|
|
def on_load(self):
|
|
|
|
self.exports.add("search-youtube", self._search_youtube)
|
|
|
|
|
2016-03-29 11:56:58 +00:00
|
|
|
def get_video_page(self, video_id, part):
|
2018-12-11 22:26:38 +00:00
|
|
|
return utils.http.request(URL_YOUTUBEVIDEO, get_params={"part": part,
|
2016-03-29 11:56:58 +00:00
|
|
|
"id": video_id, "key": self.bot.config["google-api-key"]},
|
|
|
|
json=True)
|
2019-10-25 14:07:30 +00:00
|
|
|
|
|
|
|
def _number(self, n):
|
|
|
|
if n:
|
|
|
|
return "{:,}".format(int(n))
|
|
|
|
|
2016-03-29 11:56:58 +00:00
|
|
|
def video_details(self, video_id):
|
|
|
|
snippet = self.get_video_page(video_id, "snippet")
|
2018-12-11 22:26:38 +00:00
|
|
|
if snippet.data["items"]:
|
|
|
|
snippet = snippet.data["items"][0]["snippet"]
|
2018-12-12 13:28:54 +00:00
|
|
|
statistics = self.get_video_page(video_id, "statistics").data[
|
2016-03-29 11:56:58 +00:00
|
|
|
"items"][0]["statistics"]
|
2018-12-12 13:28:54 +00:00
|
|
|
content = self.get_video_page(video_id, "contentDetails").data[
|
2016-03-29 11:56:58 +00:00
|
|
|
"items"][0]["contentDetails"]
|
2019-10-29 10:20:29 +00:00
|
|
|
|
|
|
|
video_uploaded_at = utils.iso8601_parse(snippet["publishedAt"],
|
|
|
|
microseconds=True)
|
|
|
|
video_uploaded_at = utils.to_pretty_time(
|
|
|
|
(utils.datetime_utcnow()-video_uploaded_at).total_seconds(),
|
|
|
|
max_units=2)
|
2016-03-29 11:56:58 +00:00
|
|
|
video_uploader = snippet["channelTitle"]
|
2019-10-29 14:20:27 +00:00
|
|
|
video_title = utils.irc.bold(snippet["title"])
|
2019-10-25 14:07:30 +00:00
|
|
|
video_views = self._number(statistics["viewCount"])
|
|
|
|
video_likes = self._number(statistics.get("likeCount"))
|
|
|
|
video_dislikes = self._number(statistics.get("dislikeCount"))
|
2016-03-29 11:56:58 +00:00
|
|
|
video_duration = content["duration"]
|
2016-04-04 17:40:39 +00:00
|
|
|
video_opinions = ""
|
|
|
|
if video_likes and video_dislikes:
|
2019-05-19 20:40:48 +00:00
|
|
|
likes = utils.irc.color("%s%s" % (video_likes, ARROW_UP),
|
2019-05-19 20:39:35 +00:00
|
|
|
utils.consts.GREEN)
|
2019-05-19 20:40:48 +00:00
|
|
|
dislikes = utils.irc.color("%s%s" %
|
|
|
|
(ARROW_DOWN, video_dislikes), utils.consts.RED)
|
2019-05-19 20:39:35 +00:00
|
|
|
video_opinions = " (%s%s)" % (likes, dislikes)
|
2016-03-29 11:56:58 +00:00
|
|
|
|
|
|
|
match = re.match(REGEX_ISO8601, video_duration)
|
|
|
|
video_duration = ""
|
|
|
|
video_duration += "%s:" % match.group(1)[:-1].zfill(2
|
|
|
|
) if match.group(1) else ""
|
|
|
|
video_duration += "%s:" % match.group(2)[:-1].zfill(2
|
2016-04-03 12:20:05 +00:00
|
|
|
) if match.group(2) else "00:"
|
2016-03-29 11:56:58 +00:00
|
|
|
video_duration += "%s" % match.group(3)[:-1].zfill(2
|
2016-04-03 12:20:05 +00:00
|
|
|
) if match.group(3) else "00"
|
2019-10-08 10:39:14 +00:00
|
|
|
url = URL_YOUTUBESHORT % video_id
|
2019-10-29 10:20:29 +00:00
|
|
|
return "%s (%s) uploaded by %s (%s ago), %s views%s" % (
|
|
|
|
video_title, video_duration, video_uploader, video_uploaded_at,
|
|
|
|
video_views, video_opinions), url
|
2019-10-08 10:39:14 +00:00
|
|
|
return None
|
2016-03-29 11:56:58 +00:00
|
|
|
|
2019-07-26 14:19:24 +00:00
|
|
|
def get_playlist_page(self, playlist_id, part):
|
|
|
|
return utils.http.request(URL_YOUTUBEPLAYLIST, get_params={
|
|
|
|
"part": part, "id": playlist_id,
|
|
|
|
"key": self.bot.config["google-api-key"]}, json=True)
|
|
|
|
def playlist_details(self, playlist_id):
|
|
|
|
snippet = self.get_playlist_page(playlist_id, "snippet")
|
|
|
|
if snippet.data["items"]:
|
|
|
|
snippet = snippet.data["items"][0]["snippet"]
|
|
|
|
|
|
|
|
content = self.get_playlist_page(playlist_id, "contentDetails")
|
|
|
|
count = content.data["items"][0]["contentDetails"]["itemCount"]
|
|
|
|
|
2019-10-25 14:07:30 +00:00
|
|
|
return "%s - %s (%s %s)" % (snippet["channelTitle"],
|
2019-10-18 15:40:31 +00:00
|
|
|
snippet["title"], count, "video" if count == 1 else "videos"
|
|
|
|
), URL_PLAYLIST % playlist_id
|
2019-07-26 14:19:24 +00:00
|
|
|
|
|
|
|
def _from_url(self, url):
|
|
|
|
parsed = urllib.parse.urlparse(url)
|
|
|
|
query = urllib.parse.parse_qs(parsed.query)
|
|
|
|
|
2019-07-31 10:59:29 +00:00
|
|
|
if parsed.hostname == "youtu.be" and parsed.path:
|
2019-07-31 10:58:08 +00:00
|
|
|
return self.video_details(parsed.path[1:])
|
|
|
|
elif parsed.path == "/watch" and "v" in query:
|
2019-07-26 14:19:24 +00:00
|
|
|
return self.video_details(query["v"][0])
|
2019-09-07 15:28:18 +00:00
|
|
|
elif parsed.path.startswith("/embed/"):
|
|
|
|
return self.video_details(parsed.path.split("/embed/", 1)[1])
|
2019-07-26 14:19:24 +00:00
|
|
|
elif parsed.path == "/playlist" and "list" in query:
|
|
|
|
return self.playlist_details(query["list"][0])
|
|
|
|
|
2019-02-09 13:14:45 +00:00
|
|
|
def _search_youtube(self, query):
|
2018-09-22 23:55:36 +00:00
|
|
|
video_id = ""
|
|
|
|
|
2018-12-11 22:26:38 +00:00
|
|
|
search_page = utils.http.request(URL_YOUTUBESEARCH,
|
2019-02-09 13:14:45 +00:00
|
|
|
get_params={"q": query, "part": "snippet",
|
2018-09-22 23:55:36 +00:00
|
|
|
"maxResults": "1", "type": "video",
|
|
|
|
"key": self.bot.config["google-api-key"]},
|
|
|
|
json=True)
|
|
|
|
|
|
|
|
if search_page:
|
2018-12-11 22:26:38 +00:00
|
|
|
if search_page.data["pageInfo"]["totalResults"] > 0:
|
|
|
|
video_id = search_page.data["items"][0]["id"]["videoId"]
|
2018-09-22 23:55:36 +00:00
|
|
|
return "https://youtu.be/%s" % video_id
|
|
|
|
|
2018-10-10 09:42:41 +00:00
|
|
|
@utils.hook("received.command.yt", alias_of="youtube")
|
|
|
|
@utils.hook("received.command.youtube")
|
2016-03-29 11:56:58 +00:00
|
|
|
def yt(self, event):
|
2018-09-26 17:27:17 +00:00
|
|
|
"""
|
2018-09-30 16:29:09 +00:00
|
|
|
:help: Find a video on youtube
|
|
|
|
:usage: [query/URL]
|
2018-09-26 17:27:17 +00:00
|
|
|
"""
|
2019-07-26 14:19:24 +00:00
|
|
|
url = None
|
2016-03-29 11:56:58 +00:00
|
|
|
search = None
|
|
|
|
if event["args"]:
|
|
|
|
search = event["args"]
|
2019-07-26 14:19:24 +00:00
|
|
|
url_match = re.match(REGEX_YOUTUBE, event["args"])
|
2019-01-11 12:03:23 +00:00
|
|
|
if url_match:
|
2019-07-26 14:19:24 +00:00
|
|
|
url = event["args"]
|
|
|
|
else:
|
|
|
|
search = event["args"]
|
|
|
|
else:
|
|
|
|
url = event["target"].buffer.find(REGEX_YOUTUBE)
|
2019-09-03 11:07:27 +00:00
|
|
|
url = utils.http.url_sanitise(url.match) if url else None
|
2018-10-29 23:13:32 +00:00
|
|
|
|
2019-10-08 10:39:14 +00:00
|
|
|
from_url = not url == None
|
|
|
|
|
2019-07-26 14:19:24 +00:00
|
|
|
if not url:
|
|
|
|
safe_setting = event["target"].get_setting("youtube-safesearch", True)
|
2019-01-11 11:30:29 +00:00
|
|
|
safe = "moderate" if safe_setting else "none"
|
2019-07-26 14:19:24 +00:00
|
|
|
search_page = utils.http.request(URL_YOUTUBESEARCH,
|
|
|
|
get_params={"q": search, "part": "snippet", "maxResults": "1",
|
|
|
|
"type": "video", "key": self.bot.config["google-api-key"],
|
|
|
|
"safeSearch": safe}, json=True)
|
|
|
|
if search_page:
|
|
|
|
if search_page.data["pageInfo"]["totalResults"] > 0:
|
|
|
|
url = URL_VIDEO % search_page.data[
|
|
|
|
"items"][0]["id"]["videoId"]
|
2016-03-29 11:56:58 +00:00
|
|
|
else:
|
2019-07-26 14:19:24 +00:00
|
|
|
raise utils.EventError("No videos found")
|
|
|
|
else:
|
|
|
|
raise utils.EventsResultsError()
|
|
|
|
|
|
|
|
if url:
|
|
|
|
out = self._from_url(url)
|
|
|
|
if not out == None:
|
2019-10-08 10:39:14 +00:00
|
|
|
out, short_url = out
|
|
|
|
if not from_url:
|
|
|
|
out = "%s %s" % (out, short_url)
|
2019-07-26 14:19:24 +00:00
|
|
|
event["stdout"].write(out)
|
2016-03-29 11:56:58 +00:00
|
|
|
else:
|
2019-07-26 14:19:24 +00:00
|
|
|
raise utils.EventsResultsError()
|
2016-03-29 11:56:58 +00:00
|
|
|
else:
|
2019-07-26 14:19:24 +00:00
|
|
|
event["stderr"].write("No search phrase provided")
|
2016-03-29 11:56:58 +00:00
|
|
|
|
2019-06-26 13:37:26 +00:00
|
|
|
@utils.hook("command.regex")
|
|
|
|
@utils.kwarg("ignore_action", False)
|
|
|
|
@utils.kwarg("command", "youtube")
|
|
|
|
@utils.kwarg("pattern", REGEX_YOUTUBE)
|
2016-03-29 11:56:58 +00:00
|
|
|
def channel_message(self, event):
|
2019-05-18 17:35:47 +00:00
|
|
|
if event["target"].get_setting("auto-youtube", False):
|
2019-09-03 11:05:22 +00:00
|
|
|
url = utils.http.url_sanitise(event["match"].group(0))
|
|
|
|
out = self._from_url(url)
|
2019-07-26 14:19:24 +00:00
|
|
|
if not out == None:
|
2019-10-08 10:39:14 +00:00
|
|
|
out, short_url = out
|
2019-07-26 14:19:24 +00:00
|
|
|
event.eat()
|
|
|
|
event["stdout"].write(out)
|