invidious-experimenting/src/invidious/channels.cr

300 lines
9.0 KiB
Crystal
Raw Normal View History

2018-08-05 02:00:44 +05:30
class InvidiousChannel
add_mapping({
id: String,
author: String,
updated: Time,
})
end
class ChannelVideo
add_mapping({
2018-10-30 19:50:51 +05:30
id: String,
title: String,
published: Time,
updated: Time,
ucid: String,
author: String,
length_seconds: {
type: Int32,
default: 0,
},
2018-08-05 02:00:44 +05:30
})
end
2018-12-15 23:35:52 +05:30
def get_channel(id, db, refresh = true, pull_all_videos = true)
client = make_client(YT_URL)
2018-08-05 02:00:44 +05:30
if db.query_one?("SELECT EXISTS (SELECT true FROM channels WHERE id = $1)", id, as: Bool)
channel = db.query_one("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
if refresh && Time.now - channel.updated > 10.minutes
channel = fetch_channel(id, client, db, pull_all_videos)
channel_array = channel.to_a
args = arg_array(channel_array)
db.exec("INSERT INTO channels VALUES (#{args}) \
2018-12-15 23:32:57 +05:30
ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", channel_array)
2018-08-05 02:00:44 +05:30
end
else
channel = fetch_channel(id, client, db, pull_all_videos)
2018-10-30 19:50:51 +05:30
channel_array = channel.to_a
args = arg_array(channel_array)
db.exec("INSERT INTO channels VALUES (#{args})", channel_array)
2018-08-05 02:00:44 +05:30
end
return channel
end
def fetch_channel(ucid, client, db, pull_all_videos = true)
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}").body
rss = XML.parse_html(rss)
author = rss.xpath_node(%q(//feed/title))
if !author
raise "Deleted or invalid channel"
end
author = author.content
2018-09-17 08:14:24 +05:30
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with?(" - Topic") ||
{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? author
auto_generated = true
end
2018-08-05 02:00:44 +05:30
if !pull_all_videos
2018-10-30 19:50:51 +05:30
url = produce_channel_videos_url(ucid, 1, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
videos.each { |video| video.ucid = ucid }
videos.each { |video| video.author = author }
end
end
videos ||= [] of ChannelVideo
2018-08-05 02:00:44 +05:30
rss.xpath_nodes("//feed/entry").each do |entry|
video_id = entry.xpath_node("videoid").not_nil!.content
title = entry.xpath_node("title").not_nil!.content
published = Time.parse(entry.xpath_node("published").not_nil!.content, "%FT%X%z", Time::Location.local)
updated = Time.parse(entry.xpath_node("updated").not_nil!.content, "%FT%X%z", Time::Location.local)
author = entry.xpath_node("author/name").not_nil!.content
ucid = entry.xpath_node("channelid").not_nil!.content
2018-10-30 19:50:51 +05:30
length_seconds = videos.select { |video| video.id == video_id }[0]?.try &.length_seconds
length_seconds ||= 0
video = ChannelVideo.new(video_id, title, published, Time.now, ucid, author, length_seconds)
2018-08-05 02:00:44 +05:30
db.exec("UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, video.published, ucid)
2018-10-30 20:33:03 +05:30
video_array = video.to_a
2018-08-05 02:00:44 +05:30
args = arg_array(video_array)
2018-10-30 19:50:51 +05:30
2018-08-05 02:00:44 +05:30
db.exec("INSERT INTO channel_videos VALUES (#{args}) \
2018-10-30 20:33:03 +05:30
ON CONFLICT (id) DO UPDATE SET title = $2, published = $3, \
updated = $4, ucid = $5, author = $6, length_seconds = $7", video_array)
2018-08-05 02:00:44 +05:30
end
else
page = 1
2018-09-17 07:02:39 +05:30
ids = [] of String
2018-08-05 02:00:44 +05:30
loop do
2018-09-17 08:14:24 +05:30
url = produce_channel_videos_url(ucid, page, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
else
break
end
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
videos.each { |video| video.ucid = ucid }
videos.each { |video| video.author = author }
2018-09-17 08:14:24 +05:30
end
count = nodeset.size
2018-10-30 19:50:51 +05:30
videos = videos.map { |video| ChannelVideo.new(video.id, video.title, video.published, Time.now, video.ucid, video.author, video.length_seconds) }
2018-09-17 07:02:39 +05:30
videos.each do |video|
ids << video.id
# FIXME: Red videos don't provide published date, so the best we can do is ignore them
if Time.now - video.published > 1.minute
db.exec("UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, video.published, video.ucid)
2018-09-17 07:02:39 +05:30
2018-10-30 20:33:03 +05:30
video_array = video.to_a
args = arg_array(video_array)
2018-10-30 19:50:51 +05:30
db.exec("INSERT INTO channel_videos VALUES (#{args}) ON CONFLICT (id) DO UPDATE SET title = $2, \
2018-10-30 20:33:03 +05:30
published = $3, updated = $4, ucid = $5, author = $6, length_seconds = $7", video_array)
end
2018-08-05 02:00:44 +05:30
end
2018-09-17 08:14:24 +05:30
if count < 30
2018-08-05 02:00:44 +05:30
break
end
page += 1
end
# When a video is deleted from a channel, we find and remove it here
2018-09-17 07:02:39 +05:30
db.exec("DELETE FROM channel_videos * WHERE NOT id = ANY ('{#{ids.map { |id| %("#{id}") }.join(",")}}') AND ucid = $1", ucid)
2018-08-05 02:00:44 +05:30
end
channel = InvidiousChannel.new(ucid, author, Time.now)
return channel
end
2018-09-04 19:22:30 +05:30
2018-11-14 06:34:25 +05:30
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest")
2018-09-05 07:34:40 +05:30
if auto_generated
2018-11-04 21:07:12 +05:30
seed = Time.unix(1525757349)
2018-09-04 19:22:30 +05:30
2018-09-05 07:34:40 +05:30
until seed >= Time.now
seed += 1.month
end
timestamp = seed - (page - 1).months
2018-11-04 21:07:12 +05:30
page = "#{timestamp.to_unix}"
2018-09-05 07:34:40 +05:30
switch = "\x36"
else
page = "#{page}"
switch = "\x00"
end
2018-09-18 03:08:18 +05:30
meta = "\x12\x06videos"
meta += "\x30\x02"
meta += "\x38\x01"
meta += "\x60\x01"
meta += "\x6a\x00"
meta += "\xb8\x01\x00"
meta += "\x20#{switch}"
meta += "\x7a"
2018-09-04 19:22:30 +05:30
meta += page.size.to_u8.unsafe_chr
meta += page
2018-11-14 06:34:25 +05:30
case sort_by
when "newest"
# Empty tags can be omitted
# meta += "\x18\x00"
when "popular"
meta += "\x18\x01"
when "oldest"
meta += "\x18\x02"
end
2018-09-04 19:22:30 +05:30
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
2018-10-14 20:23:40 +05:30
url = "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
2018-09-04 19:22:30 +05:30
return url
end
2018-09-21 20:10:04 +05:30
def get_about_info(ucid)
client = make_client(YT_URL)
about = client.get("/channel/#{ucid}/about?disable_polymer=1&gl=US&hl=en")
2018-10-24 07:34:15 +05:30
if about.status_code == 404
about = client.get("/user/#{ucid}/about?disable_polymer=1&gl=US&hl=en")
end
2018-09-21 20:10:04 +05:30
about = XML.parse_html(about.body)
2018-10-24 07:34:15 +05:30
if about.xpath_node(%q(//div[contains(@class, "channel-empty-message")]))
error_message = "This channel does not exist."
raise error_message
2018-09-21 20:10:04 +05:30
end
if about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).try &.content.empty?
error_message = about.xpath_node(%q(//div[@class="yt-alert-content"])).try &.content.strip
error_message ||= "Could not get channel info."
raise error_message
2018-09-21 20:10:04 +05:30
end
sub_count = about.xpath_node(%q(//span[contains(text(), "subscribers")]))
if sub_count
sub_count = sub_count.content.delete(", subscribers").to_i?
end
sub_count ||= 0
2018-10-22 08:14:20 +05:30
author = about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).not_nil!.content
2018-09-21 20:10:04 +05:30
ucid = about.xpath_node(%q(//link[@rel="canonical"])).not_nil!["href"].split("/")[-1]
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
auto_generated = false
if about.xpath_node(%q(//ul[@class="about-custom-links"]/li/a[@title="Auto-generated by YouTube"])) ||
about.xpath_node(%q(//span[@class="qualified-channel-title-badge"]/span[@title="Auto-generated by YouTube"]))
auto_generated = true
end
return {author, ucid, auto_generated, sub_count}
2018-09-21 20:10:04 +05:30
end
2018-11-14 06:34:25 +05:30
def get_60_videos(ucid, page, auto_generated, sort_by = "newest")
count = 0
videos = [] of SearchVideo
client = make_client(YT_URL)
2.times do |i|
2018-11-14 06:34:25 +05:30
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if !json["load_more_widget_html"]?.try &.as_s.empty?
count += 30
end
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
return videos, count
end