2019-03-30 03:00:02 +05:30
|
|
|
struct SearchVideo
|
2019-06-07 23:09:12 +05:30
|
|
|
def to_xml(host_url, auto_generated, xml : XML::Builder)
|
|
|
|
xml.element("entry") do
|
|
|
|
xml.element("id") { xml.text "yt:video:#{self.id}" }
|
|
|
|
xml.element("yt:videoId") { xml.text self.id }
|
|
|
|
xml.element("yt:channelId") { xml.text self.ucid }
|
|
|
|
xml.element("title") { xml.text self.title }
|
|
|
|
xml.element("link", rel: "alternate", href: "#{host_url}/watch?v=#{self.id}")
|
|
|
|
|
|
|
|
xml.element("author") do
|
|
|
|
if auto_generated
|
|
|
|
xml.element("name") { xml.text self.author }
|
|
|
|
xml.element("uri") { xml.text "#{host_url}/channel/#{self.ucid}" }
|
|
|
|
else
|
|
|
|
xml.element("name") { xml.text author }
|
|
|
|
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
xml.element("content", type: "xhtml") do
|
|
|
|
xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
|
|
|
|
xml.element("a", href: "#{host_url}/watch?v=#{self.id}") do
|
|
|
|
xml.element("img", src: "#{host_url}/vi/#{self.id}/mqdefault.jpg")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
xml.element("published") { xml.text self.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
|
|
|
|
|
|
|
|
xml.element("media:group") do
|
|
|
|
xml.element("media:title") { xml.text self.title }
|
|
|
|
xml.element("media:thumbnail", url: "#{host_url}/vi/#{self.id}/mqdefault.jpg",
|
|
|
|
width: "320", height: "180")
|
2019-06-09 01:38:27 +05:30
|
|
|
xml.element("media:description") { xml.text html_to_content(self.description_html) }
|
2019-06-07 23:09:12 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
xml.element("media:community") do
|
|
|
|
xml.element("media:statistics", views: self.views)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_xml(host_url, auto_generated, xml : XML::Builder | Nil = nil)
|
|
|
|
if xml
|
|
|
|
to_xml(host_url, auto_generated, xml)
|
|
|
|
else
|
|
|
|
XML.build do |json|
|
|
|
|
to_xml(host_url, auto_generated, xml)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-09 00:01:41 +05:30
|
|
|
def to_json(locale, config, kemal_config, json : JSON::Builder)
|
|
|
|
json.object do
|
|
|
|
json.field "type", "video"
|
|
|
|
json.field "title", self.title
|
|
|
|
json.field "videoId", self.id
|
|
|
|
|
|
|
|
json.field "author", self.author
|
|
|
|
json.field "authorId", self.ucid
|
|
|
|
json.field "authorUrl", "/channel/#{self.ucid}"
|
|
|
|
|
|
|
|
json.field "videoThumbnails" do
|
|
|
|
generate_thumbnails(json, self.id, config, kemal_config)
|
|
|
|
end
|
|
|
|
|
2019-06-09 01:38:27 +05:30
|
|
|
json.field "description", html_to_content(self.description_html)
|
2019-06-09 00:01:41 +05:30
|
|
|
json.field "descriptionHtml", self.description_html
|
|
|
|
|
|
|
|
json.field "viewCount", self.views
|
|
|
|
json.field "published", self.published.to_unix
|
|
|
|
json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
|
|
|
|
json.field "lengthSeconds", self.length_seconds
|
|
|
|
json.field "liveNow", self.live_now
|
|
|
|
json.field "paid", self.paid
|
|
|
|
json.field "premium", self.premium
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_json(locale, config, kemal_config, json : JSON::Builder | Nil = nil)
|
|
|
|
if json
|
|
|
|
to_json(locale, config, kemal_config, json)
|
|
|
|
else
|
|
|
|
JSON.build do |json|
|
|
|
|
to_json(locale, config, kemal_config, json)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-04-03 22:05:58 +05:30
|
|
|
db_mapping({
|
2019-03-22 22:54:47 +05:30
|
|
|
title: String,
|
|
|
|
id: String,
|
|
|
|
author: String,
|
|
|
|
ucid: String,
|
|
|
|
published: Time,
|
|
|
|
views: Int64,
|
|
|
|
description_html: String,
|
|
|
|
length_seconds: Int32,
|
|
|
|
live_now: Bool,
|
|
|
|
paid: Bool,
|
|
|
|
premium: Bool,
|
|
|
|
premiere_timestamp: Time?,
|
2018-08-05 09:37:38 +05:30
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2019-03-30 03:00:02 +05:30
|
|
|
struct SearchPlaylistVideo
|
2019-04-03 22:05:58 +05:30
|
|
|
db_mapping({
|
2018-09-20 20:06:09 +05:30
|
|
|
title: String,
|
|
|
|
id: String,
|
|
|
|
length_seconds: Int32,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2019-03-30 03:00:02 +05:30
|
|
|
struct SearchPlaylist
|
2019-06-09 00:01:41 +05:30
|
|
|
def to_json(locale, config, kemal_config, json : JSON::Builder)
|
|
|
|
json.object do
|
|
|
|
json.field "type", "playlist"
|
|
|
|
json.field "title", self.title
|
|
|
|
json.field "playlistId", self.id
|
2019-08-22 05:38:11 +05:30
|
|
|
json.field "playlistThumbnail", self.thumbnail
|
2019-06-09 00:01:41 +05:30
|
|
|
|
|
|
|
json.field "author", self.author
|
|
|
|
json.field "authorId", self.ucid
|
|
|
|
json.field "authorUrl", "/channel/#{self.ucid}"
|
|
|
|
|
|
|
|
json.field "videoCount", self.video_count
|
|
|
|
json.field "videos" do
|
|
|
|
json.array do
|
|
|
|
self.videos.each do |video|
|
|
|
|
json.object do
|
|
|
|
json.field "title", video.title
|
|
|
|
json.field "videoId", video.id
|
|
|
|
json.field "lengthSeconds", video.length_seconds
|
|
|
|
|
|
|
|
json.field "videoThumbnails" do
|
|
|
|
generate_thumbnails(json, video.id, config, Kemal.config)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_json(locale, config, kemal_config, json : JSON::Builder | Nil = nil)
|
|
|
|
if json
|
|
|
|
to_json(locale, config, kemal_config, json)
|
|
|
|
else
|
|
|
|
JSON.build do |json|
|
|
|
|
to_json(locale, config, kemal_config, json)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-04-03 22:05:58 +05:30
|
|
|
db_mapping({
|
2019-08-17 02:16:37 +05:30
|
|
|
title: String,
|
|
|
|
id: String,
|
|
|
|
author: String,
|
|
|
|
ucid: String,
|
|
|
|
video_count: Int32,
|
|
|
|
videos: Array(SearchPlaylistVideo),
|
|
|
|
thumbnail: String?,
|
2018-09-20 20:06:09 +05:30
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2019-03-30 03:00:02 +05:30
|
|
|
struct SearchChannel
|
2019-06-09 00:01:41 +05:30
|
|
|
def to_json(locale, config, kemal_config, json : JSON::Builder)
|
|
|
|
json.object do
|
|
|
|
json.field "type", "channel"
|
|
|
|
json.field "author", self.author
|
|
|
|
json.field "authorId", self.ucid
|
|
|
|
json.field "authorUrl", "/channel/#{self.ucid}"
|
|
|
|
|
|
|
|
json.field "authorThumbnails" do
|
|
|
|
json.array do
|
|
|
|
qualities = {32, 48, 76, 100, 176, 512}
|
|
|
|
|
|
|
|
qualities.each do |quality|
|
|
|
|
json.object do
|
2019-08-01 05:46:09 +05:30
|
|
|
json.field "url", self.author_thumbnail.gsub(/=\d+/, "=s#{quality}")
|
2019-06-09 00:01:41 +05:30
|
|
|
json.field "width", quality
|
|
|
|
json.field "height", quality
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
json.field "subCount", self.subscriber_count
|
|
|
|
json.field "videoCount", self.video_count
|
2019-06-09 01:38:27 +05:30
|
|
|
json.field "description", html_to_content(self.description_html)
|
2019-06-09 00:01:41 +05:30
|
|
|
json.field "descriptionHtml", self.description_html
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_json(locale, config, kemal_config, json : JSON::Builder | Nil = nil)
|
|
|
|
if json
|
|
|
|
to_json(locale, config, kemal_config, json)
|
|
|
|
else
|
|
|
|
JSON.build do |json|
|
|
|
|
to_json(locale, config, kemal_config, json)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-04-03 22:05:58 +05:30
|
|
|
db_mapping({
|
2018-09-20 20:06:09 +05:30
|
|
|
author: String,
|
|
|
|
ucid: String,
|
|
|
|
author_thumbnail: String,
|
|
|
|
subscriber_count: Int32,
|
|
|
|
video_count: Int32,
|
|
|
|
description_html: String,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist
|
|
|
|
|
2018-09-14 04:17:31 +05:30
|
|
|
def channel_search(query, page, channel)
|
|
|
|
client = make_client(YT_URL)
|
|
|
|
|
2019-04-12 00:22:09 +05:30
|
|
|
response = client.get("/channel/#{channel}?disable_polymer=1&hl=en&gl=US")
|
2018-09-14 04:17:31 +05:30
|
|
|
document = XML.parse_html(response.body)
|
|
|
|
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
|
|
|
|
|
|
|
|
if !canonical
|
2019-04-12 00:22:09 +05:30
|
|
|
response = client.get("/c/#{channel}?disable_polymer=1&hl=en&gl=US")
|
2018-09-14 04:17:31 +05:30
|
|
|
document = XML.parse_html(response.body)
|
|
|
|
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
|
|
|
|
end
|
|
|
|
|
2019-04-23 02:09:57 +05:30
|
|
|
if !canonical
|
|
|
|
response = client.get("/user/#{channel}?disable_polymer=1&hl=en&gl=US")
|
|
|
|
document = XML.parse_html(response.body)
|
|
|
|
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
|
|
|
|
end
|
|
|
|
|
2018-09-14 04:17:31 +05:30
|
|
|
if !canonical
|
2018-09-20 20:06:09 +05:30
|
|
|
return 0, [] of SearchItem
|
2018-09-14 04:17:31 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
ucid = canonical["href"].split("/")[-1]
|
|
|
|
|
|
|
|
url = produce_channel_search_url(ucid, query, page)
|
|
|
|
response = client.get(url)
|
|
|
|
json = JSON.parse(response.body)
|
|
|
|
|
|
|
|
if json["content_html"]? && !json["content_html"].as_s.empty?
|
|
|
|
document = XML.parse_html(json["content_html"].as_s)
|
|
|
|
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
|
|
|
|
|
|
|
count = nodeset.size
|
2018-09-20 20:06:09 +05:30
|
|
|
items = extract_items(nodeset)
|
2018-09-14 04:17:31 +05:30
|
|
|
else
|
|
|
|
count = 0
|
2018-09-20 20:06:09 +05:30
|
|
|
items = [] of SearchItem
|
2018-09-14 04:17:31 +05:30
|
|
|
end
|
|
|
|
|
2018-09-20 20:06:09 +05:30
|
|
|
return count, items
|
2018-09-14 04:17:31 +05:30
|
|
|
end
|
|
|
|
|
2019-06-29 07:47:56 +05:30
|
|
|
def search(query, page = 1, search_params = produce_search_params(content_type: "all"), region = nil)
|
|
|
|
client = make_client(YT_URL, region)
|
2018-08-28 01:53:25 +05:30
|
|
|
if query.empty?
|
2018-09-20 20:06:09 +05:30
|
|
|
return {0, [] of SearchItem}
|
2018-08-28 01:53:25 +05:30
|
|
|
end
|
|
|
|
|
2018-09-26 04:25:32 +05:30
|
|
|
html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=#{search_params}&hl=en&disable_polymer=1").body
|
2018-08-05 09:37:38 +05:30
|
|
|
if html.empty?
|
2018-09-20 20:06:09 +05:30
|
|
|
return {0, [] of SearchItem}
|
2018-08-05 09:37:38 +05:30
|
|
|
end
|
|
|
|
|
2018-08-05 02:00:44 +05:30
|
|
|
html = XML.parse_html(html)
|
2018-08-10 20:14:19 +05:30
|
|
|
nodeset = html.xpath_nodes(%q(//ol[@class="item-section"]/li))
|
2018-09-20 20:06:09 +05:30
|
|
|
items = extract_items(nodeset)
|
2018-08-05 02:00:44 +05:30
|
|
|
|
2018-09-20 20:06:09 +05:30
|
|
|
return {nodeset.size, items}
|
2018-08-05 02:00:44 +05:30
|
|
|
end
|
2018-08-05 03:42:58 +05:30
|
|
|
|
2018-09-18 03:08:18 +05:30
|
|
|
def produce_search_params(sort : String = "relevance", date : String = "", content_type : String = "",
|
|
|
|
duration : String = "", features : Array(String) = [] of String)
|
2019-08-27 20:05:15 +05:30
|
|
|
header = IO::Memory.new
|
|
|
|
header.write Bytes[0x08]
|
|
|
|
header.write case sort
|
|
|
|
when "relevance"
|
|
|
|
Bytes[0x00]
|
|
|
|
when "rating"
|
|
|
|
Bytes[0x01]
|
|
|
|
when "upload_date", "date"
|
|
|
|
Bytes[0x02]
|
|
|
|
when "view_count", "views"
|
|
|
|
Bytes[0x03]
|
|
|
|
else
|
|
|
|
raise "No sort #{sort}"
|
|
|
|
end
|
2018-08-05 03:42:58 +05:30
|
|
|
|
2019-08-27 20:05:15 +05:30
|
|
|
body = IO::Memory.new
|
|
|
|
body.write case date
|
|
|
|
when "hour"
|
|
|
|
Bytes[0x08, 0x01]
|
|
|
|
when "today"
|
|
|
|
Bytes[0x08, 0x02]
|
|
|
|
when "week"
|
|
|
|
Bytes[0x08, 0x03]
|
|
|
|
when "month"
|
|
|
|
Bytes[0x08, 0x04]
|
|
|
|
when "year"
|
|
|
|
Bytes[0x08, 0x05]
|
|
|
|
else
|
|
|
|
Bytes.new(0)
|
|
|
|
end
|
2018-08-05 03:42:58 +05:30
|
|
|
|
2019-08-27 20:05:15 +05:30
|
|
|
body.write case content_type
|
|
|
|
when "video"
|
|
|
|
Bytes[0x10, 0x01]
|
|
|
|
when "channel"
|
|
|
|
Bytes[0x10, 0x02]
|
|
|
|
when "playlist"
|
|
|
|
Bytes[0x10, 0x03]
|
|
|
|
when "movie"
|
|
|
|
Bytes[0x10, 0x04]
|
|
|
|
when "show"
|
|
|
|
Bytes[0x10, 0x05]
|
|
|
|
when "all"
|
|
|
|
Bytes.new(0)
|
|
|
|
else
|
|
|
|
Bytes[0x10, 0x01]
|
|
|
|
end
|
2018-08-05 03:42:58 +05:30
|
|
|
|
2019-08-27 20:05:15 +05:30
|
|
|
body.write case duration
|
|
|
|
when "short"
|
|
|
|
Bytes[0x18, 0x01]
|
|
|
|
when "long"
|
|
|
|
Bytes[0x18, 0x12]
|
|
|
|
else
|
|
|
|
Bytes.new(0)
|
|
|
|
end
|
2018-08-05 03:42:58 +05:30
|
|
|
|
|
|
|
features.each do |feature|
|
2019-08-27 20:05:15 +05:30
|
|
|
body.write case feature
|
|
|
|
when "hd"
|
|
|
|
Bytes[0x20, 0x01]
|
|
|
|
when "subtitles"
|
|
|
|
Bytes[0x28, 0x01]
|
|
|
|
when "creative_commons", "cc"
|
|
|
|
Bytes[0x30, 0x01]
|
|
|
|
when "3d"
|
|
|
|
Bytes[0x38, 0x01]
|
|
|
|
when "live", "livestream"
|
|
|
|
Bytes[0x40, 0x01]
|
|
|
|
when "purchased"
|
|
|
|
Bytes[0x48, 0x01]
|
|
|
|
when "4k"
|
|
|
|
Bytes[0x70, 0x01]
|
|
|
|
when "360"
|
|
|
|
Bytes[0x78, 0x01]
|
|
|
|
when "location"
|
|
|
|
Bytes[0xb8, 0x01, 0x01]
|
|
|
|
when "hdr"
|
|
|
|
Bytes[0xc8, 0x01, 0x01]
|
|
|
|
else
|
|
|
|
Bytes.new(0)
|
|
|
|
end
|
2018-08-05 03:42:58 +05:30
|
|
|
end
|
|
|
|
|
2019-08-27 20:05:15 +05:30
|
|
|
token = header
|
2019-02-27 02:01:37 +05:30
|
|
|
if !body.empty?
|
2019-08-27 20:05:15 +05:30
|
|
|
token.write Bytes[0x12, body.bytesize]
|
|
|
|
token.write body.to_slice
|
2018-08-05 03:42:58 +05:30
|
|
|
end
|
|
|
|
|
2019-08-27 20:05:15 +05:30
|
|
|
token = Base64.urlsafe_encode(token.to_slice)
|
2018-08-05 03:42:58 +05:30
|
|
|
token = URI.escape(token)
|
|
|
|
|
|
|
|
return token
|
|
|
|
end
|
2018-09-14 04:17:31 +05:30
|
|
|
|
|
|
|
def produce_channel_search_url(ucid, query, page)
|
|
|
|
page = "#{page}"
|
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data = IO::Memory.new
|
|
|
|
data.write_byte 0x12
|
|
|
|
data.write_byte 0x06
|
|
|
|
data.print "search"
|
2019-02-05 02:47:10 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data.write Bytes[0x30, 0x02]
|
|
|
|
data.write Bytes[0x38, 0x01]
|
|
|
|
data.write Bytes[0x60, 0x01]
|
|
|
|
data.write Bytes[0x6a, 0x00]
|
|
|
|
data.write Bytes[0xb8, 0x01, 0x00]
|
2019-02-05 02:47:10 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data.write_byte 0x7a
|
|
|
|
VarInt.to_io(data, page.bytesize)
|
|
|
|
data.print page
|
2019-02-05 02:47:10 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data.rewind
|
|
|
|
data = Base64.urlsafe_encode(data)
|
|
|
|
continuation = URI.escape(data)
|
2018-09-14 04:17:31 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data = IO::Memory.new
|
2019-02-05 02:47:10 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data.write_byte 0x12
|
|
|
|
VarInt.to_io(data, ucid.bytesize)
|
|
|
|
data.print ucid
|
2019-02-05 02:47:10 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data.write_byte 0x1a
|
|
|
|
VarInt.to_io(data, continuation.bytesize)
|
|
|
|
data.print continuation
|
2019-02-05 02:47:10 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data.write_byte 0x5a
|
|
|
|
VarInt.to_io(data, query.bytesize)
|
|
|
|
data.print query
|
2018-09-14 04:17:31 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
data.rewind
|
2018-09-14 04:17:31 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
buffer = IO::Memory.new
|
|
|
|
buffer.write Bytes[0xe2, 0xa9, 0x85, 0xb2, 0x02]
|
|
|
|
VarInt.to_io(buffer, data.bytesize)
|
2018-09-14 04:17:31 +05:30
|
|
|
|
2019-07-21 06:48:08 +05:30
|
|
|
IO.copy data, buffer
|
|
|
|
|
|
|
|
continuation = Base64.urlsafe_encode(buffer)
|
|
|
|
continuation = URI.escape(continuation)
|
|
|
|
|
|
|
|
url = "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
|
2018-09-14 04:17:31 +05:30
|
|
|
|
|
|
|
return url
|
|
|
|
end
|