2019-06-29 07:47:56 +05:30
|
|
|
def fetch_trending(trending_type, region, locale)
|
2018-11-20 22:48:12 +05:30
|
|
|
client = make_client(YT_URL)
|
|
|
|
headers = HTTP::Headers.new
|
|
|
|
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
|
|
|
|
|
|
|
|
region ||= "US"
|
|
|
|
region = region.upcase
|
|
|
|
|
|
|
|
trending = ""
|
2019-04-15 05:34:10 +05:30
|
|
|
plid = nil
|
|
|
|
|
2018-12-21 04:18:45 +05:30
|
|
|
if trending_type && trending_type != "Default"
|
2018-11-20 22:48:12 +05:30
|
|
|
trending_type = trending_type.downcase.capitalize
|
|
|
|
|
|
|
|
response = client.get("/feed/trending?gl=#{region}&hl=en", headers).body
|
|
|
|
|
2019-07-11 17:57:42 +05:30
|
|
|
initial_data = extract_initial_data(response)
|
2018-11-20 22:48:12 +05:30
|
|
|
|
2019-07-11 17:57:42 +05:30
|
|
|
tabs = initial_data["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][0]["tabRenderer"]["content"]["sectionListRenderer"]["subMenu"]["channelListSubMenuRenderer"]["contents"].as_a
|
2018-11-20 22:48:12 +05:30
|
|
|
url = tabs.select { |tab| tab["channelListSubMenuAvatarRenderer"]["title"]["simpleText"] == trending_type }[0]?
|
|
|
|
|
|
|
|
if url
|
2019-04-15 05:34:10 +05:30
|
|
|
url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
|
2018-11-20 22:48:12 +05:30
|
|
|
url = url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
|
|
|
|
url += "&disable_polymer=1&gl=#{region}&hl=en"
|
|
|
|
trending = client.get(url).body
|
2019-04-15 05:34:10 +05:30
|
|
|
plid = extract_plid(url)
|
2018-11-20 22:48:12 +05:30
|
|
|
else
|
|
|
|
trending = client.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body
|
|
|
|
end
|
|
|
|
else
|
|
|
|
trending = client.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body
|
|
|
|
end
|
|
|
|
|
|
|
|
trending = XML.parse_html(trending)
|
|
|
|
nodeset = trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"]))
|
|
|
|
trending = extract_videos(nodeset)
|
|
|
|
|
2019-04-15 05:34:10 +05:30
|
|
|
return {trending, plid}
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_plid(url)
|
|
|
|
wrapper = HTTP::Params.parse(URI.parse(url).query.not_nil!)["bp"]
|
|
|
|
|
2019-09-24 23:01:33 +05:30
|
|
|
wrapper = URI.decode_www_form(wrapper)
|
2019-04-15 05:34:10 +05:30
|
|
|
wrapper = Base64.decode(wrapper)
|
|
|
|
|
|
|
|
# 0xe2 0x02 0x2e
|
|
|
|
wrapper += 3
|
|
|
|
|
|
|
|
# 0x0a
|
|
|
|
wrapper += 1
|
|
|
|
|
|
|
|
# Looks like "/m/[a-z0-9]{5}", not sure what it does here
|
|
|
|
|
|
|
|
item_size = wrapper[0]
|
|
|
|
wrapper += 1
|
|
|
|
item = wrapper[0, item_size]
|
|
|
|
wrapper += item.size
|
|
|
|
|
|
|
|
# 0x12
|
|
|
|
wrapper += 1
|
|
|
|
|
|
|
|
plid_size = wrapper[0]
|
|
|
|
wrapper += 1
|
|
|
|
plid = wrapper[0, plid_size]
|
|
|
|
wrapper += plid.size
|
|
|
|
|
|
|
|
plid = String.new(plid)
|
|
|
|
|
|
|
|
return plid
|
2018-11-20 22:48:12 +05:30
|
|
|
end
|