Compare commits

...

27 Commits
0.3.0 ... 0.4.0

Author SHA1 Message Date
044a57ef34 Fix video count for channels 2018-09-04 23:01:46 -05:00
bc49c7d181 Add author info to API endpoints 2018-09-04 21:35:25 -05:00
5632e58636 Add support for genre channels 2018-09-04 21:04:40 -05:00
e1bf7fa6cc Add descriptionHtml to playlists 2018-09-04 19:27:10 -05:00
40028e1462 Update SQL and remove migration points 2018-09-04 09:57:40 -05:00
53732cdcab Add genre URLs 2018-09-04 09:50:19 -05:00
2ac89d5e00 Update project synopsis 2018-09-04 09:22:10 -05:00
98d71ca8e7 Add support for /c/ URLs 2018-09-04 09:13:58 -05:00
0f2f273335 Don't leak referers 2018-09-04 09:01:43 -05:00
000cfd4834 Don't show comments when commentCount is 0 2018-09-04 08:52:39 -05:00
25c3ee034e Minor refactor 2018-09-04 08:52:30 -05:00
89d3587861 Fix typo 2018-09-03 22:20:20 -05:00
0d8f036bf1 Replace YouTube links 2018-09-03 22:15:47 -05:00
81c520e0dd Add info to README 2018-09-03 21:42:49 -05:00
c0bda13965 Fix view_count_text 2018-08-31 22:53:41 -05:00
3b1df75061 Merge pull request #143 from dimqua/patch-1
Change the color of progressBar marker
2018-08-31 18:20:30 -05:00
eda5beaed5 Change the color of progressBar marker 2018-08-31 16:49:02 +03:00
4022670cb1 Fix typo in video params 2018-08-30 21:04:41 -05:00
7b135a6d0c Add commentCount for videos with no comments 2018-08-30 21:03:22 -05:00
bdaa8a06fd Fix typo 2018-08-30 20:25:43 -05:00
b3f9059452 Add comment formatting 2018-08-30 20:06:08 -05:00
917d220623 Fix search filters 2018-08-30 17:42:30 -05:00
ed8ddbc07d Add seperator when notifications > 0 2018-08-30 16:52:29 -05:00
cb01b50fbb Add option to hide related videos 2018-08-30 16:49:38 -05:00
6b3c9d23d0 Fix referer on 404 2018-08-30 08:14:59 -05:00
3839013a37 Use '/video' page for channel endpoint 2018-08-28 20:29:08 -05:00
9d5dddab29 Fix signature extraction 2018-08-28 09:51:59 -05:00
19 changed files with 486 additions and 275 deletions

View File

@ -1,6 +1,25 @@
# Invidious
## Invidious is what YouTube should be
## Invidious is an alternative front-end to YouTube
- Audio-only (and no need to keep window open on mobile)
- [Open-source](https://github.com/omarroth/invidious) (AGPLv3 licensed)
- No ads
- No need to create a Google account to save subscriptions
- Lightweight (homepage is ~4 KB compressed)
- Tools for managing subscriptions:
- Only show unseen videos
- Only show latest (or latest unseen) video from each channel
- Delivers notifications from all subscribed channels
- Automatically redirect homepage to feed
- Import subscriptions from YouTube
- Dark mode
- Embed support
- Set default player options (speed, quality, autoplay, loop)
- Does not require JS to play videos
- Support for Reddit comments in place of YT comments
- Import/Export subscriptions, watch history, preference
- Does not use any of the official YouTube APIs
Liberapay: https://liberapay.com/omarroth
Patreon: https://patreon.com/omarroth

View File

@ -171,6 +171,11 @@ div {
background-color: rgba(0, 182, 240, 1);
}
/* ProgressBar marker */
.vjs-marker {
background-color: rgba(255, 255, 255, 1);
}
/* Big "Play" Button */
.video-js .vjs-big-play-button {
background-color: rgba(35, 35, 35, 0.5);

View File

@ -20,6 +20,7 @@ CREATE TABLE public.videos
allowed_regions text[] COLLATE pg_catalog."default",
is_family_friendly boolean,
genre text COLLATE pg_catalog."default",
genre_url text COLLATE pg_catalog."default",
CONSTRAINT videos_pkey PRIMARY KEY (id)
)
WITH (

View File

@ -1,4 +1,4 @@
# "Invidious" (which is what YouTube should be)
# "Invidious" (which is an alternative front-end to YouTube)
# Copyright (C) 2018 Omar Roth
#
# This program is free software: you can redistribute it and/or modify
@ -249,7 +249,7 @@ get "/watch" do |env|
aspect_ratio = "16:9"
video.description = fill_links(video.description, "https", "www.youtube.com")
video.description = add_alt_links(video.description)
video.description = replace_links(video.description)
description = video.short_description
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
@ -349,7 +349,7 @@ get "/embed/:id" do |env|
aspect_ratio = nil
video.description = fill_links(video.description, "https", "www.youtube.com")
video.description = add_alt_links(video.description)
video.description = replace_links(video.description)
description = video.short_description
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
@ -450,11 +450,11 @@ get "/search" do |env|
end
end
query = (query.split(" ") - operators).join(" ")
search_query = (query.split(" ") - operators).join(" ")
search_params = build_search_params(sort: sort, date: date, content_type: "video",
duration: duration, features: features)
count, videos = search(query, page, search_params).as(Tuple)
count, videos = search(search_query, page, search_params).as(Tuple)
templated "search"
end
@ -799,6 +799,10 @@ post "/preferences" do |env|
captions_2 = env.params.body["captions_2"]?.try &.as(String) || ""
captions = [captions_0, captions_1, captions_2]
related_videos = env.params.body["related_videos"]?.try &.as(String)
related_videos ||= "off"
related_videos = related_videos == "on"
redirect_feed = env.params.body["redirect_feed"]?.try &.as(String)
redirect_feed ||= "off"
redirect_feed = redirect_feed == "on"
@ -837,6 +841,7 @@ post "/preferences" do |env|
"volume" => volume,
"comments" => comments,
"captions" => captions,
"related_videos" => related_videos,
"redirect_feed" => redirect_feed,
"dark_mode" => dark_mode,
"thin_mode" => thin_mode,
@ -1377,24 +1382,44 @@ get "/feed/channel/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/feed/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
url = produce_videos_url(ucid)
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with? " - Topic"
auto_generated = true
end
url = produce_channel_videos_url(ucid, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"].as_s.empty?
if response.status_code == 500
error_message = "This channel does not exist."
halt env, status_code: 404, response: error_message
else
next ""
end
end
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
content_html = json["content_html"].as_s
document = XML.parse_html(content_html)
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
end
else
videos = [] of SearchVideo
end
channel = get_channel(ucid, client, PG_DB, pull_all_videos: false)
@ -1415,18 +1440,22 @@ get "/feed/channel/:ucid" do |env|
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
end
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
extract_videos(nodeset, ucid).each do |video|
videos.each do |video|
xml.element("entry") do
xml.element("id") { xml.text "yt:video:#{video.id}" }
xml.element("yt:videoId") { xml.text video.id }
xml.element("yt:channelId") { xml.text ucid }
xml.element("yt:channelId") { xml.text video.ucid }
xml.element("title") { xml.text video.title }
xml.element("link", rel: "alternate", href: "#{host_url}/watch?v=#{video.id}")
xml.element("author") do
xml.element("name") { xml.text channel.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
if auto_generated
xml.element("name") { xml.text video.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{video.ucid}" }
else
xml.element("name") { xml.text author }
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
end
end
xml.element("published") { xml.text video.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
@ -1552,6 +1581,23 @@ end
# Channels
# YouTube appears to let users set a "brand" URL that
# is different from their username, so we convert that here
get "/c/:user" do |env|
client = make_client(YT_URL)
user = env.params.url["user"]
response = client.get("/c/#{user}")
document = XML.parse_html(response.body)
anchor = document.xpath_node(%q(//a[contains(@class,"branded-page-header-title-link")]))
if !anchor
next env.redirect "/"
end
env.redirect anchor["href"]
end
get "/user/:user" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}"
@ -1583,24 +1629,43 @@ get "/channel/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
if rss.status_code == 404
error_message = "This channel does not exist."
next templated "error"
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with? " - Topic"
auto_generated = true
end
rss = XML.parse_html(rss.body)
author = rss.xpath_node("//feed/author/name").not_nil!.content
url = produce_channel_videos_url(ucid, page, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
begin
videos = extract_playlist(ucid, page)
videos.each { |a| a.playlists.clear }
rescue ex
error_message = ex.message
next templated "error"
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
end
else
videos = [] of SearchVideo
end
templated "channel"
@ -1741,7 +1806,7 @@ get "/api/v1/comments/:id" do |env|
if format == "json"
next {"comments" => [] of String}.to_json
else
next {"contentHtml" => ""}.to_json
next {"contentHtml" => "", "commentCount" => 0}.to_json
end
end
ctoken = ctoken["ctoken"]
@ -1779,7 +1844,7 @@ get "/api/v1/comments/:id" do |env|
if format == "json"
next {"comments" => [] of String}.to_json
else
next {"contentHtml" => ""}.to_json
next {"contentHtml" => "", "commentCount" => 0}.to_json
end
end
@ -1808,9 +1873,38 @@ get "/api/v1/comments/:id" do |env|
node_comment = node["commentRenderer"]
end
content_text = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
content_text ||= node_comment["contentText"]["runs"].as_a.map { |comment| comment["text"] }
.join("").rchop('\ufeff')
contentHtml = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
contentHtml ||= node_comment["contentText"]["runs"].as_a.map do |run|
text = run["text"].as_s
if run["text"] == "\n"
text = "<br>"
end
if run["bold"]?
text = "<b>#{text}</b>"
end
if run["italics"]?
text = "<i>#{text}</i>"
end
if run["navigationEndpoint"]?
url = run["navigationEndpoint"]["urlEndpoint"]?.try &.["url"].as_s
if url
url = URI.parse(url)
url = HTTP::Params.parse(url.query.not_nil!)["q"]
else
url = run["navigationEndpoint"]["commandMetadata"]?.try &.["webCommandMetadata"]["url"].as_s
end
text = %(<a href="#{url}">#{text}</a>)
end
text
end.join.rchop('\ufeff')
contentHtml, content = html_to_content(contentHtml)
author = node_comment["authorText"]?.try &.["simpleText"]
author ||= ""
@ -1838,7 +1932,8 @@ get "/api/v1/comments/:id" do |env|
published = decode_date(node_comment["publishedTimeText"]["runs"][0]["text"].as_s.rchop(" (edited)"))
json.field "content", content_text
json.field "content", content
json.field "contentHtml", contentHtml
json.field "published", published.epoch
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]
@ -1885,6 +1980,8 @@ get "/api/v1/comments/:id" do |env|
if comments["commentCount"]?
json.field "commentCount", comments["commentCount"]
else
json.field "commentCount", 0
end
end
end
@ -1899,7 +1996,7 @@ get "/api/v1/comments/:id" do |env|
content_html = template_reddit_comments(comments)
content_html = fill_links(content_html, "https", "www.reddit.com")
content_html = add_alt_links(content_html)
content_html = replace_links(content_html)
rescue ex
reddit_thread = nil
content_html = ""
@ -1942,7 +2039,7 @@ get "/api/v1/videos/:id" do |env|
generate_thumbnails(json, video.id)
end
description, video.description = html_to_description(video.description)
video.description, description = html_to_content(video.description)
json.field "description", description
json.field "descriptionHtml", video.description
@ -1960,6 +2057,7 @@ get "/api/v1/videos/:id" do |env|
json.field "isFamilyFriendly", video.is_family_friendly
json.field "allowedRegions", video.allowed_regions
json.field "genre", video.genre
json.field "genreUrl", video.genre_url
json.field "author", video.author
json.field "authorId", video.ucid
@ -2088,7 +2186,7 @@ get "/api/v1/videos/:id" do |env|
end
json.field "author", rv["author"]
json.field "lengthSeconds", rv["length_seconds"].to_i
json.field "viewCountText", rv["short_view_count_text"].rchop(" views")
json.field "viewCountText", rv["short_view_count_text"]
end
end
end
@ -2181,44 +2279,79 @@ get "/api/v1/channels/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/api/v1/channels/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
channel = get_channel(ucid, client, PG_DB, pull_all_videos: false)
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with? " - Topic"
auto_generated = true
end
url = produce_channel_videos_url(ucid, 1, auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
end
else
videos = [] of SearchVideo
end
# TODO: Integrate this into `get_channel` function
# We can't get everything from RSS feed, so we get it from the channel page
channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
channel_html = XML.parse_html(channel_html)
banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
banner = "https:" + banner.match(/background-image: url\((?<url>[^)]+)\)/).not_nil!["url"]
author = channel_html.xpath_node(%q(//a[contains(@class, "branded-page-header-title-link")])).not_nil!.content
author_url = channel_html.xpath_node(%q(//a[@class="channel-header-profile-image-container spf-link"])).not_nil!["href"]
author_thumbnail = channel_html.xpath_node(%q(//img[@class="channel-header-profile-image"])).not_nil!["src"]
description = channel_html.xpath_node(%q(//meta[@itemprop="description"])).not_nil!["content"]
description_html = channel_html.xpath_node(%q(//div[contains(@class,"about-description")]))
description_html, description = html_to_content(description_html)
paid = channel_html.xpath_node(%q(//meta[@itemprop="paid"])).not_nil!["content"] == "True"
is_family_friendly = channel_html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
allowed_regions = channel_html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
anchor = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
if anchor[0].content.includes? "views"
sub_count = 0
total_views = anchor[0].content.delete("views •,").to_i64
joined = Time.parse(anchor[1].content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
else
sub_count = anchor[0].content.delete("subscribers").delete(",").to_i64
total_views = anchor[1].content.delete("views •,").to_i64
joined = Time.parse(anchor[2].content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
total_views = 0_i64
sub_count = 0_i64
joined = Time.epoch(0)
metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
metadata.each do |item|
case item.content
when .includes? "views"
total_views = item.content.delete("views •,").to_i64
when .includes? "subscribers"
sub_count = item.content.delete("subscribers").delete(",").to_i64
when .includes? "Joined"
joined = Time.parse(item.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
end
end
latest_videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid = $1 ORDER BY published DESC LIMIT 15",
channel.id, as: ChannelVideo)
channel_info = JSON.build do |json|
json.object do
json.field "author", channel.author
json.field "authorId", channel.id
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", author_url
json.field "authorBanners" do
@ -2263,19 +2396,37 @@ get "/api/v1/channels/:ucid" do |env|
json.field "isFamilyFriendly", is_family_friendly
json.field "description", description
json.field "descriptionHtml", description_html
json.field "allowedRegions", allowed_regions
json.field "latestVideos" do
json.array do
latest_videos.each do |video|
videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "published", video.published.epoch
if auto_generated
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
else
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
end
json.field "videoThumbnails" do
generate_thumbnails(json, video.id)
end
json.field "description", video.description
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
json.field "published", video.published.epoch
json.field "lengthSeconds", video.length_seconds
end
end
end
@ -2293,6 +2444,7 @@ get "/api/v1/channels/:ucid/videos" do |env|
page ||= 1
client = make_client(YT_URL)
if !ucid.match(/UC[a-zA-Z0-9_-]{22}/)
rss = client.get("/feeds/videos.xml?user=#{ucid}")
rss = XML.parse_html(rss.body)
@ -2304,43 +2456,62 @@ get "/api/v1/channels/:ucid/videos" do |env|
end
ucid = ucid.content
url = "/api/v1/channels/#{ucid}/videos"
if env.params.query
url += "?#{env.params.query}"
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/feed/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
next env.redirect url
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
url = produce_videos_url(ucid, page)
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with? " - Topic"
auto_generated = true
end
url = produce_channel_videos_url(ucid, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if !json["content_html"]?
env.response.content_type = "application/json"
if response.status_code == 500
response = {"Error" => "Channel does not exist"}.to_json
halt env, status_code: 404, response: response
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos = extract_videos(nodeset)
else
next Array(String).new.to_json
videos = extract_videos(nodeset, ucid)
end
else
videos = [] of SearchVideo
end
content_html = json["content_html"].as_s
if content_html.empty?
env.response.content_type = "application/json"
next Hash(String, String).new.to_json
end
document = XML.parse_html(content_html)
videos = JSON.build do |json|
result = JSON.build do |json|
json.array do
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
extract_videos(nodeset, ucid).each do |video|
videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
if auto_generated
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
else
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
end
json.field "videoThumbnails" do
generate_thumbnails(json, video.id)
end
@ -2357,7 +2528,7 @@ get "/api/v1/channels/:ucid/videos" do |env|
end
env.response.content_type = "application/json"
videos
result
end
get "/api/v1/search" do |env|
@ -2403,6 +2574,7 @@ get "/api/v1/search" do |env|
json.field "videoId", video.id
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
json.field "videoThumbnails" do
@ -2449,6 +2621,7 @@ get "/api/v1/playlists/:plid" do |env|
json.field "authorUrl", "/channel/#{playlist.ucid}"
json.field "description", playlist.description
json.field "descriptionHtml", playlist.description_html
json.field "videoCount", playlist.video_count
json.field "viewCount", playlist.views

View File

@ -73,7 +73,7 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
page = 1
loop do
url = produce_videos_url(ucid, page)
url = produce_channel_videos_url(ucid, page)
response = client.get(url)
json = JSON.parse(response.body)
@ -130,3 +130,45 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
return channel
end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
if auto_generated
seed = Time.epoch(1525757349)
until seed >= Time.now
seed += 1.month
end
timestamp = seed - (page - 1).months
page = "#{timestamp.epoch}"
switch = "\x36"
else
page = "#{page}"
switch = "\x00"
end
meta = "\x12\x06videos #{switch}\x30\x02\x38\x01\x60\x01\x6a\x00\x7a"
meta += page.size.to_u8.unsafe_chr
meta += page
meta += "\xb8\x01\x00"
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end

View File

@ -113,7 +113,7 @@ def template_youtube_comments(comments)
- #{recode_date(Time.epoch(child["published"].as_i64))} ago
</p>
<div>
#{child["content"]}
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
#{replies_html}
</div>
</div>
@ -190,37 +190,21 @@ def template_reddit_comments(root)
return html
end
def add_alt_links(html)
alt_links = [] of {String, String}
def replace_links(html)
html = XML.parse_html(html)
# This is painful but likely the only way to accomplish this in Crystal,
# as Crystigiri and others are not able to insert XML Nodes into a document.
# The goal here is to use as little regex as possible
html.scan(/<a[^>]*>([^<]+)<\/a>/) do |match|
anchor = XML.parse_html(match[0])
anchor = anchor.xpath_node("//a").not_nil!
html.xpath_nodes(%q(//a)).each do |anchor|
url = URI.parse(anchor["href"])
if ["www.youtube.com", "m.youtube.com"].includes?(url.host)
if url.path == "/redirect"
params = HTTP::Params.parse(url.query.not_nil!)
alt_url = params["q"]?
alt_url ||= "/"
anchor["href"] = params["q"]?
else
alt_url = url.full_path
anchor["href"] = url.full_path
end
alt_link = <<-END_HTML
<a href="#{alt_url}">
<i class="icon ion-ios-link"></i>
</a>
END_HTML
elsif url.host == "youtu.be"
alt_link = <<-END_HTML
<a href="/watch?v=#{url.path.try &.lchop("/")}&#{url.query}">
<i class="icon ion-ios-link"></i>
</a>
END_HTML
anchor["href"] = "/watch?v=#{url.path.try &.lchop("/")}&#{url.query}"
elsif url.to_s == "#"
begin
length_seconds = decode_length_seconds(anchor.content)
@ -228,23 +212,12 @@ def add_alt_links(html)
length_seconds = decode_time(anchor.content)
end
alt_anchor = <<-END_HTML
<a href="javascript:void(0)" onclick="player.currentTime(#{length_seconds})">#{anchor.content}</a>
END_HTML
html = html.sub(anchor.to_s, alt_anchor)
next
else
alt_link = ""
anchor["href"] = "javascript:void(0)"
anchor["onclick"] = "player.currentTime(#{length_seconds})"
end
alt_links << {anchor.to_s, alt_link}
end
alt_links.each do |original, alternate|
html = html.sub(original, original + alternate)
end
html = html.to_xml(options: XML::SaveOptions::NO_DECL)
return html
end
@ -267,5 +240,5 @@ def fill_links(html, scheme, host)
html = html.to_xml(options: XML::SaveOptions::NO_DECL)
end
html
return html
end

View File

@ -116,81 +116,6 @@ def login_req(login_form, f_req)
return HTTP::Params.encode(data)
end
def produce_videos_url(ucid, page = 1)
page = "#{page}"
meta = "\x12\x06videos \x00\x30\x02\x38\x01\x60\x01\x6a\x00\x7a"
meta += page.size.to_u8.unsafe_chr
meta += page
meta += "\xb8\x01\x00"
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end
def read_var_int(bytes)
numRead = 0
result = 0
read = bytes[numRead]
if bytes.size == 1
result = bytes[0].to_i32
else
while ((read & 0b10000000) != 0)
read = bytes[numRead].to_u64
value = (read & 0b01111111)
result |= (value << (7 * numRead))
numRead += 1
if numRead > 5
raise "VarInt is too big"
end
end
end
return result
end
def write_var_int(value : Int)
bytes = [] of UInt8
value = value.to_u32
if value == 0
bytes = [0_u8]
else
while value != 0
temp = (value & 0b01111111).to_u8
value = value >> 7
if value != 0
temp |= 0b10000000
end
bytes << temp
end
end
return bytes
end
def generate_captcha(key)
minute = Random::Secure.rand(12)
minute_angle = minute * 30
@ -240,7 +165,7 @@ def generate_captcha(key)
return {challenge: challenge, token: token}
end
def html_to_description(description_html)
def html_to_content(description_html)
if !description_html
description = ""
description_html = ""
@ -251,7 +176,7 @@ def html_to_description(description_html)
description = XML.parse_html(description).content.strip("\n ")
end
return description, description_html
return description_html, description
end
def extract_videos(nodeset, ucid = nil)
@ -319,7 +244,7 @@ def extract_videos(nodeset, ucid = nil)
view_count ||= 0_i64
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
description, description_html = html_to_description(description_html)
description_html, description = html_to_content(description_html)
length_seconds = node.xpath_node(%q(.//span[@class="video-time"]))
if length_seconds

View File

@ -191,3 +191,49 @@ def get_referer(env, fallback = "/")
return referer
end
def read_var_int(bytes)
numRead = 0
result = 0
read = bytes[numRead]
if bytes.size == 1
result = bytes[0].to_i32
else
while ((read & 0b10000000) != 0)
read = bytes[numRead].to_u64
value = (read & 0b01111111)
result |= (value << (7 * numRead))
numRead += 1
if numRead > 5
raise "VarInt is too big"
end
end
end
return result
end
def write_var_int(value : Int)
bytes = [] of UInt8
value = value.to_u32
if value == 0
bytes = [0_u8]
else
while value != 0
temp = (value & 0b01111111).to_u8
value = value >> 7
if value != 0
temp |= 0b10000000
end
bytes << temp
end
end
return bytes
end

View File

@ -1,13 +1,14 @@
class Playlist
add_mapping({
title: String,
id: String,
author: String,
ucid: String,
description: String,
video_count: Int32,
views: Int64,
updated: Time,
title: String,
id: String,
author: String,
ucid: String,
description: String,
description_html: String,
video_count: Int32,
views: Int64,
updated: Time,
})
end
@ -99,7 +100,7 @@ def produce_playlist_url(id, index)
slice = URI.escape(slice)
# Outer Base64
continuation = [0x1a.to_u8, slice.bytes.size.to_u8] + slice.bytes
continuation = [0x1a_u8, slice.bytes.size.to_u8] + slice.bytes
continuation = ucid.bytes + continuation
continuation = [0x12_u8, ucid.size.to_u8] + continuation
continuation = [0xe2_u8, 0xa9_u8, 0x85_u8, 0xb2_u8, 2_u8, continuation.size.to_u8] + continuation
@ -123,17 +124,8 @@ def fetch_playlist(plid)
title = document.xpath_node(%q(//h1[@class="pl-header-title"])).not_nil!.content
title = title.strip(" \n")
description = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description ||= document.xpath_node(%q(//span[@class="pl-header-description-text"]))
if description
description = description.to_xml.strip(" \n")
description = description.split("<button ")[0]
description = fill_links(description, "https", "www.youtube.com")
description = add_alt_links(description)
else
description = ""
end
description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description, description_html = html_to_content(description_html)
anchor = document.xpath_node(%q(//ul[@class="pl-header-details"])).not_nil!
author = anchor.xpath_node(%q(.//li[1]/a)).not_nil!.content
@ -151,6 +143,7 @@ def fetch_playlist(plid)
author,
ucid,
description,
description_html,
video_count,
views,
updated

View File

@ -38,9 +38,9 @@ def build_search_params(sort : String = "relevance", date : String = "", content
"\x00"
when "rating"
"\x01"
when "upload_date"
when "upload_date", "date"
"\x02"
when "view_count"
when "view_count", "views"
"\x03"
else
raise "No sort #{sort}"
@ -92,7 +92,7 @@ def build_search_params(sort : String = "relevance", date : String = "", content
"\x20\x01"
when "subtitles"
"\x28\x01"
when "creative_commons"
when "creative_commons", "cc"
"\x30\x01"
when "3d"
"\x38\x01"

View File

@ -4,22 +4,21 @@ def fetch_decrypt_function(client, id = "CvFH_6DNRCY")
player = client.get(url).body
function_name = player.match(/"signature",(?<name>[a-zA-Z0-9]{2})\(/).not_nil!["name"]
function_body = player.match(/#{function_name}=function\(a\){(?<body>[^}]+)}/).not_nil!["body"]
function_body = player.match(/^#{function_name}=function\(a\){(?<body>[^}]+)}/m).not_nil!["body"]
function_body = function_body.split(";")[1..-2]
var_name = function_body[0][0, 2]
var_body = player.delete("\n").match(/var #{var_name}={(?<body>(.*?))};/).not_nil!["body"]
operations = {} of String => String
matches = player.delete("\n").match(/var #{var_name}={(?<op1>[a-zA-Z0-9]{2}:[^}]+}),(?<op2>[a-zA-Z0-9]{2}:[^}]+}),(?<op3>[a-zA-Z0-9]{2}:[^}]+})};/).not_nil!
3.times do |i|
operation = matches["op#{i + 1}"]
op_name = operation[0, 2]
var_body.split("},").each do |operation|
op_name = operation.match(/^[^:]+/).not_nil![0]
op_body = operation.match(/\{[^}]+/).not_nil![0]
op_body = operation.match(/\{[^}]+\}/).not_nil![0]
case op_body
when "{a.reverse()}"
when "{a.reverse()"
operations[op_name] = "a"
when "{a.splice(0,b)}"
when "{a.splice(0,b)"
operations[op_name] = "b"
else
operations[op_name] = "c"
@ -28,11 +27,10 @@ def fetch_decrypt_function(client, id = "CvFH_6DNRCY")
decrypt_function = [] of {name: String, value: Int32}
function_body.each do |function|
function = function.lchop(var_name + ".")
op_name = function[0, 2]
function = function.lchop(var_name).delete("[].")
function = function.lchop(op_name + "(a,")
value = function.rchop(")").to_i
op_name = function.match(/[^\(]+/).not_nil![0]
value = function.match(/\(a,(?<value>[\d]+)\)/).not_nil!["value"].to_i
decrypt_function << {name: operations[op_name], value: value}
end

View File

@ -27,19 +27,20 @@ class User
end
DEFAULT_USER_PREFERENCES = Preferences.from_json({
"video_loop" => false,
"autoplay" => false,
"speed" => 1.0,
"quality" => "hd720",
"volume" => 100,
"comments" => ["youtube", ""],
"captions" => ["", "", ""],
"dark_mode" => false,
"thin_mode " => false,
"max_results" => 40,
"sort" => "published",
"latest_only" => false,
"unseen_only" => false,
"video_loop" => false,
"autoplay" => false,
"speed" => 1.0,
"quality" => "hd720",
"volume" => 100,
"comments" => ["youtube", ""],
"captions" => ["", "", ""],
"related_videos" => true,
"dark_mode" => false,
"thin_mode " => false,
"max_results" => 40,
"sort" => "published",
"latest_only" => false,
"unseen_only" => false,
}.to_json)
class Preferences
@ -85,6 +86,10 @@ class Preferences
type: Bool,
default: false,
},
related_videos: {
type: Bool,
default: true,
},
dark_mode: Bool,
thin_mode: {
type: Bool,

View File

@ -345,6 +345,10 @@ class Video
allowed_regions: Array(String),
is_family_friendly: Bool,
genre: String,
genre_url: {
type: String,
default: "/",
},
})
end
@ -371,10 +375,12 @@ def get_video(id, db, refresh = true)
begin
video = fetch_video(id)
video_array = video.to_a
args = arg_array(video_array[1..-1], 2)
db.exec("UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,\
published,description,language,author,ucid, allowed_regions, is_family_friendly, genre)\
published,description,language,author,ucid, allowed_regions, is_family_friendly,\
genre, genre_url)\
= (#{args}) WHERE id = $1", video_array)
rescue ex
db.exec("DELETE FROM videos * WHERE id = $1", id)
@ -384,6 +390,7 @@ def get_video(id, db, refresh = true)
else
video = fetch_video(id)
video_array = video.to_a
args = arg_array(video_array)
db.exec("INSERT INTO videos VALUES (#{args}) ON CONFLICT (id) DO NOTHING", video_array)
@ -490,10 +497,12 @@ def fetch_video(id)
allowed_regions = html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
is_family_friendly = html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
genre = html.xpath_node(%q(//meta[@itemprop="genre"])).not_nil!["content"]
genre_url = html.xpath_node(%(//a[text()="#{genre}"])).not_nil!["href"]
video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description,
nil, author, ucid, allowed_regions, is_family_friendly, genre)
nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url)
return video
end
@ -533,8 +542,8 @@ def process_video_params(query, preferences)
video_start = decode_time(query["t"])
end
video_start ||= 0
if query["time_continu"]?
video_start = decode_time(query["t"])
if query["time_continue"]?
video_start = decode_time(query["time_continue"])
end
video_start ||= 0
if query["start"]?

View File

@ -51,7 +51,7 @@
</div>
<div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if videos.size == 100 %>
<% if videos.size == 30 %>
<a href="/channel/<%= ucid %>?page=<%= page + 1 %>">Next page</a>
<% end %>
</div>

View File

@ -75,7 +75,7 @@ function update_value(element) {
</div>
<div class="pure-control-group">
<label for="captions_fallback">Fallback languages: </label>
<label for="captions_fallback">Fallback captions: </label>
<select class="pure-u-1-5" name="captions_1" id="captions_1">
<% CAPTION_LANGUAGES.each do |option| %>
<option <% if user.preferences.captions[1] == option %> selected <% end %>><%= option %></option>
@ -89,7 +89,13 @@ function update_value(element) {
</select>
</div>
<div class="pure-control-group">
<label for="related_videos">Show related videos? </label>
<input name="related_videos" id="related_videos" type="checkbox" <% if user.preferences.related_videos %>checked<% end %>>
</div>
<legend>Visual preferences</legend>
<div class="pure-control-group">
<label for="dark_mode">Dark mode: </label>
<input name="dark_mode" id="dark_mode" type="checkbox" <% if user.preferences.dark_mode %>checked<% end %>>
@ -101,6 +107,7 @@ function update_value(element) {
</div>
<legend>Subscription preferences</legend>
<div class="pure-control-group">
<label for="redirect_feed">Redirect homepage to feed: </label>
<input name="redirect_feed" id="redirect_feed" type="checkbox" <% if user.preferences.redirect_feed %>checked<% end %>>
@ -136,6 +143,7 @@ function update_value(element) {
</div>
<legend>Data preferences</legend>
<div class="pure-control-group">
<a href="/clear_watch_history?referer=<%= referer %>">Clear watch history</a>
</div>

View File

@ -1,5 +1,5 @@
<% content_for "header" do %>
<title><%= query.not_nil!.size > 30 ? query.not_nil![0,30].rstrip(".") + "..." : query.not_nil! %> - Invidious</title>
<title><%= search_query.not_nil!.size > 30 ? query.not_nil![0,30].rstrip(".") + "..." : query.not_nil! %> - Invidious</title>
<% end %>
<% videos.each_slice(4) do |slice| %>

View File

@ -16,6 +16,13 @@
</div>
<center><%= notifications.size %> unseen notifications</center>
<% if !notifications.empty? %>
<div class="h-box">
<hr>
</div>
<% end %>
<% notifications.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>

View File

@ -4,6 +4,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="referrer" content="no-referrer">
<%= yield_content "header" %>
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/pure-min.css">
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/grids-responsive-min.css">
@ -34,7 +35,7 @@
<div class="pure-u-1 pure-u-md-8-24 user-field">
<% if env.get? "user" %>
<div class="pure-u-1-4">
<a href="/toggle_theme?referer=<%= env.get("current_page") %>" class="pure-menu-heading">
<a href="/toggle_theme?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
<% preferences = env.get("user").as(User).preferences %>
<% if preferences.dark_mode %>
<i class="icon ion-ios-sunny"></i>
@ -54,15 +55,15 @@
</a>
</div>
<div class="pure-u-1-4">
<a href="/preferences?referer=<%= env.get("current_page") %>" class="pure-menu-heading">
<a href="/preferences?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
<i class="icon ion-ios-cog"></i>
</a>
</div>
<div class="pure-u-1-4">
<a href="/signout?referer=<%= env.get("current_page") %>" class="pure-menu-heading">Sign out</a>
<a href="/signout?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">Sign out</a>
</div>
<% else %>
<a href="/login?referer=<%= env.get("current_page") %>" class="pure-menu-heading">Login</a>
<a href="/login?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">Login</a>
<% end %>
</div>
</div>

View File

@ -55,7 +55,7 @@
<p><i class="icon ion-ios-eye"></i> <%= number_with_separator(video.views) %></p>
<p><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p>
<p><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p>
<p id="Genre">Genre: <%= video.genre %></p>
<p id="Genre">Genre: <a href="<%= video.genre_url %>"><%= video.genre %></a></p>
<p id="FamilyFriendly">Family Friendly? <%= video.is_family_friendly %></p>
<p id="Wilson">Wilson Score: <%= video.wilson_score.round(4) %></p>
<p id="Rating">Rating: <%= rating.round(4) %> / 5</p>
@ -113,6 +113,7 @@
</div>
</div>
<div class="pure-u-1 pure-u-md-1-5">
<% if preferences && preferences.related_videos %>
<div class="h-box">
<% rvs.each do |rv| %>
<% if rv.has_key?("id") %>
@ -129,6 +130,7 @@
<% end %>
<% end %>
</div>
<% end %>
</div>
</div>
@ -246,18 +248,22 @@ function get_youtube_comments() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
View {commentCount} comments
</h3>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
contentHtml: xhr.response.contentHtml,
commentCount: commaSeparateNumber(xhr.response.commentCount)
});
if (xhr.response.commentCount > 0) {
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
View {commentCount} comments
</h3>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
contentHtml: xhr.response.contentHtml,
commentCount: commaSeparateNumber(xhr.response.commentCount)
});
} else {
comments.innerHTML = "";
}
} else {
<% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments();