Compare commits

...

44 Commits
0.2.0 ... 0.4.0

Author SHA1 Message Date
044a57ef34 Fix video count for channels 2018-09-04 23:01:46 -05:00
bc49c7d181 Add author info to API endpoints 2018-09-04 21:35:25 -05:00
5632e58636 Add support for genre channels 2018-09-04 21:04:40 -05:00
e1bf7fa6cc Add descriptionHtml to playlists 2018-09-04 19:27:10 -05:00
40028e1462 Update SQL and remove migration points 2018-09-04 09:57:40 -05:00
53732cdcab Add genre URLs 2018-09-04 09:50:19 -05:00
2ac89d5e00 Update project synopsis 2018-09-04 09:22:10 -05:00
98d71ca8e7 Add support for /c/ URLs 2018-09-04 09:13:58 -05:00
0f2f273335 Don't leak referers 2018-09-04 09:01:43 -05:00
000cfd4834 Don't show comments when commentCount is 0 2018-09-04 08:52:39 -05:00
25c3ee034e Minor refactor 2018-09-04 08:52:30 -05:00
89d3587861 Fix typo 2018-09-03 22:20:20 -05:00
0d8f036bf1 Replace YouTube links 2018-09-03 22:15:47 -05:00
81c520e0dd Add info to README 2018-09-03 21:42:49 -05:00
c0bda13965 Fix view_count_text 2018-08-31 22:53:41 -05:00
3b1df75061 Merge pull request #143 from dimqua/patch-1
Change the color of progressBar marker
2018-08-31 18:20:30 -05:00
eda5beaed5 Change the color of progressBar marker 2018-08-31 16:49:02 +03:00
4022670cb1 Fix typo in video params 2018-08-30 21:04:41 -05:00
7b135a6d0c Add commentCount for videos with no comments 2018-08-30 21:03:22 -05:00
bdaa8a06fd Fix typo 2018-08-30 20:25:43 -05:00
b3f9059452 Add comment formatting 2018-08-30 20:06:08 -05:00
917d220623 Fix search filters 2018-08-30 17:42:30 -05:00
ed8ddbc07d Add seperator when notifications > 0 2018-08-30 16:52:29 -05:00
cb01b50fbb Add option to hide related videos 2018-08-30 16:49:38 -05:00
6b3c9d23d0 Fix referer on 404 2018-08-30 08:14:59 -05:00
3839013a37 Use '/video' page for channel endpoint 2018-08-28 20:29:08 -05:00
9d5dddab29 Fix signature extraction 2018-08-28 09:51:59 -05:00
45fa148380 Don't add playlist id for channel videos 2018-08-27 18:53:34 -05:00
2ba0063dc0 Add search filters 2018-08-27 15:23:25 -05:00
b57176d7ef Fix notification count in subscription feed 2018-08-27 13:46:50 -05:00
0dbef6ab9f Fix typo in preferred_captions 2018-08-26 15:00:19 -05:00
8fc4dcfdea Use username for /data_control 2018-08-25 21:49:18 -05:00
6c98513153 Add referer to /data_control 2018-08-25 21:48:20 -05:00
c3d8ca68b3 Add code to calculate video rating 2018-08-25 21:34:11 -05:00
a37692cce4 Fix 'to_json' for comment array 2018-08-25 21:33:53 -05:00
a1ad561b98 Fix /clear_watch_history 2018-08-25 21:33:33 -05:00
7fd0f93d02 Add support for preferences as query params 2018-08-25 20:05:51 -05:00
23aaf7f1b7 Add comments fallback 2018-08-25 18:33:15 -05:00
41a04e7c67 Clean up /videoplayback 2018-08-25 17:24:07 -05:00
77b12b6249 Only show next page when there are more results 2018-08-25 17:18:43 -05:00
78fcf579a7 Add Liberapay 2018-08-25 15:43:39 -05:00
9ae3bf216e Update signature extraction 2018-08-24 07:17:16 -05:00
0e7c56687b Add error message for comment timeouts 2018-08-23 16:55:26 -05:00
01a80995d3 Add fix for channel endpoint where channel has no subscribers 2018-08-22 11:06:31 -05:00
24 changed files with 865 additions and 521 deletions

View File

@ -1,7 +1,27 @@
# Invidious
## Invidious is what YouTube should be
## Invidious is an alternative front-end to YouTube
- Audio-only (and no need to keep window open on mobile)
- [Open-source](https://github.com/omarroth/invidious) (AGPLv3 licensed)
- No ads
- No need to create a Google account to save subscriptions
- Lightweight (homepage is ~4 KB compressed)
- Tools for managing subscriptions:
- Only show unseen videos
- Only show latest (or latest unseen) video from each channel
- Delivers notifications from all subscribed channels
- Automatically redirect homepage to feed
- Import subscriptions from YouTube
- Dark mode
- Embed support
- Set default player options (speed, quality, autoplay, loop)
- Does not require JS to play videos
- Support for Reddit comments in place of YT comments
- Import/Export subscriptions, watch history, preference
- Does not use any of the official YouTube APIs
Liberapay: https://liberapay.com/omarroth
Patreon: https://patreon.com/omarroth
BTC: 356DpZyMXu6rYd55Yqzjs29n79kGKWcYrY
BCH: qq4ptclkzej5eza6a50et5ggc58hxsq5aylqut2npk

View File

@ -171,6 +171,11 @@ div {
background-color: rgba(0, 182, 240, 1);
}
/* ProgressBar marker */
.vjs-marker {
background-color: rgba(255, 255, 255, 1);
}
/* Big "Play" Button */
.video-js .vjs-big-play-button {
background-color: rgba(35, 35, 35, 0.5);

View File

@ -20,6 +20,7 @@ CREATE TABLE public.videos
allowed_regions text[] COLLATE pg_catalog."default",
is_family_friendly boolean,
genre text COLLATE pg_catalog."default",
genre_url text COLLATE pg_catalog."default",
CONSTRAINT videos_pkey PRIMARY KEY (id)
)
WITH (

View File

@ -1,4 +1,4 @@
# "Invidious" (which is what YouTube should be)
# "Invidious" (which is an alternative front-end to YouTube)
# Copyright (C) 2018 Omar Roth
#
# This program is free software: you can redistribute it and/or modify
@ -215,8 +215,9 @@ get "/watch" do |env|
end
subscriptions ||= [] of String
autoplay, video_loop, video_start, video_end, listen, raw, quality, controls = process_video_params(env.params.query, preferences)
if listen
params = process_video_params(env.params.query, preferences)
if params[:listen]
env.params.query.delete_all("listen")
end
@ -234,17 +235,21 @@ get "/watch" do |env|
audio_streams = video.audio_streams(adaptive_fmts)
captions = video.captions
if preferences
preferred_captions = captions.select { |caption| preferences.captions.includes? caption.name.simpleText }
preferred_captions.sort_by! { |caption| preferences.captions.index(caption.name.simpleText).not_nil! }
captions = captions - preferred_captions
end
preferred_captions ||= [] of Caption
preferred_captions = captions.select { |caption|
params[:preferred_captions].includes?(caption.name.simpleText) ||
params[:preferred_captions].includes?(caption.languageCode.split("-")[0])
}
preferred_captions.sort_by! { |caption|
(params[:preferred_captions].index(caption.name.simpleText) ||
params[:preferred_captions].index(caption.languageCode.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
aspect_ratio = "16:9"
video.description = fill_links(video.description, "https", "www.youtube.com")
video.description = add_alt_links(video.description)
video.description = replace_links(video.description)
description = video.short_description
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
@ -259,11 +264,11 @@ get "/watch" do |env|
# TODO: Find highest resolution thumbnail automatically
thumbnail = "https://i.ytimg.com/vi/#{video.id}/mqdefault.jpg"
if raw
if params[:raw]
url = fmt_stream[0]["url"]
fmt_stream.each do |fmt|
if fmt["label"].split(" - ")[0] == quality
if fmt["label"].split(" - ")[0] == params[:quality]
url = fmt["url"]
end
end
@ -276,7 +281,9 @@ get "/watch" do |env|
rvs << HTTP::Params.parse(rv).to_h
end
# rating = (video.likes.to_f/(video.likes.to_f + video.dislikes.to_f) * 4 + 1)
rating = video.info["avg_rating"].to_f64
engagement = ((video.dislikes.to_f + video.likes.to_f)/video.views * 100)
playability_status = video.player_response["playabilityStatus"]?
@ -313,21 +320,7 @@ get "/embed/:id" do |env|
next env.redirect url
end
autoplay, video_loop, video_start, video_end, listen, raw, quality, controls = process_video_params(env.params.query, nil)
preferred_captions = [] of Caption
preferences = Preferences.from_json({
"video_loop" => video_loop,
"autoplay" => autoplay,
"speed" => 1.0,
"quality" => quality,
"volume" => 100,
"max_results" => 0,
"sort" => "",
"latest_only" => false,
"unseen_only" => false,
"dark_mode" => false,
}.to_json)
aspect_ratio = nil
params = process_video_params(env.params.query, nil)
begin
video = get_video(id, PG_DB)
@ -343,8 +336,20 @@ get "/embed/:id" do |env|
captions = video.captions
preferred_captions = captions.select { |caption|
params[:preferred_captions].includes?(caption.name.simpleText) ||
params[:preferred_captions].includes?(caption.languageCode.split("-")[0])
}
preferred_captions.sort_by! { |caption|
(params[:preferred_captions].index(caption.name.simpleText) ||
params[:preferred_captions].index(caption.languageCode.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
aspect_ratio = nil
video.description = fill_links(video.description, "https", "www.youtube.com")
video.description = add_alt_links(video.description)
video.description = replace_links(video.description)
description = video.short_description
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
@ -359,11 +364,11 @@ get "/embed/:id" do |env|
# TODO: Find highest resolution thumbnail automatically
thumbnail = "https://i.ytimg.com/vi/#{video.id}/mqdefault.jpg"
if raw
if params[:raw]
url = fmt_stream[0]["url"]
fmt_stream.each do |fmt|
if fmt["label"].split(" - ")[0] == quality
if fmt["label"].split(" - ")[0] == params[:quality]
url = fmt["url"]
end
end
@ -424,8 +429,32 @@ get "/search" do |env|
page = env.params.query["page"]?.try &.to_i?
page ||= 1
search_params = build_search_params(sort_by: "relevance", content_type: "video")
videos = search(query, page, search_params)
sort = "relevance"
date = ""
duration = ""
features = [] of String
operators = query.split(" ").select { |a| a.match(/\w+:[\w,]+/) }
operators.each do |operator|
key, value = operator.split(":")
case key
when "sort"
sort = value
when "date"
date = value
when "duration"
duration = value
when "features"
features = value.split(",")
end
end
search_query = (query.split(" ") - operators).join(" ")
search_params = build_search_params(sort: sort, date: date, content_type: "video",
duration: duration, features: features)
count, videos = search(search_query, page, search_params).as(Tuple)
templated "search"
end
@ -761,14 +790,19 @@ post "/preferences" do |env|
volume = env.params.body["volume"]?.try &.as(String).to_i?
volume ||= 100
comments = env.params.body["comments"]?
comments ||= "youtube"
comments_0 = env.params.body["comments_0"]?.try &.as(String) || "youtube"
comments_1 = env.params.body["comments_1"]?.try &.as(String) || ""
comments = [comments_0, comments_1]
captions_0 = env.params.body["captions_0"]?.try &.as(String) || ""
captions_1 = env.params.body["captions_1"]?.try &.as(String) || ""
captions_2 = env.params.body["captions_2"]?.try &.as(String) || ""
captions = [captions_0, captions_1, captions_2]
related_videos = env.params.body["related_videos"]?.try &.as(String)
related_videos ||= "off"
related_videos = related_videos == "on"
redirect_feed = env.params.body["redirect_feed"]?.try &.as(String)
redirect_feed ||= "off"
redirect_feed = redirect_feed == "on"
@ -807,6 +841,7 @@ post "/preferences" do |env|
"volume" => volume,
"comments" => comments,
"captions" => captions,
"related_videos" => related_videos,
"redirect_feed" => redirect_feed,
"dark_mode" => dark_mode,
"thin_mode" => thin_mode,
@ -1027,18 +1062,18 @@ post "/data_control" do |env|
body["watch_history"].as_a.each do |id|
id = id.as_s
if !user.watched.includes? id
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE id = $2", id, user.id)
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE email = $2", id, user.email)
end
end
PG_DB.exec("UPDATE users SET preferences = $1 WHERE id = $2", body["preferences"].to_json, user.id)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", body["preferences"].to_json, user.email)
when "import_youtube"
subscriptions = XML.parse(body)
subscriptions.xpath_nodes(%q(//outline[@type="rss"])).each do |channel|
ucid = channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin
client = make_client(YT_URL)
@ -1053,7 +1088,7 @@ post "/data_control" do |env|
ucid = md["channel_id"]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin
client = make_client(YT_URL)
@ -1069,7 +1104,7 @@ post "/data_control" do |env|
ucid = channel["url"].as_s.match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin
client = make_client(YT_URL)
@ -1090,14 +1125,14 @@ post "/data_control" do |env|
db = entry.io.gets_to_end
db.scan(/youtube\.com\/watch\?v\=(?<id>[a-zA-Z0-9_-]{11})/) do |md|
if !user.watched.includes? md["id"]
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE id = $2", md["id"], user.id)
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE email = $2", md["id"], user.email)
end
end
db.scan(/youtube\.com\/channel\/(?<ucid>[a-zA-Z0-9_-]{22})/) do |md|
ucid = md["ucid"]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin
client = make_client(YT_URL)
@ -1197,7 +1232,7 @@ get "/clear_watch_history" do |env|
if user
user = user.as(User)
PG_DB.exec("UPDATE users SET watched = '{}' WHERE id = $1", user.id)
PG_DB.exec("UPDATE users SET watched = '{}' WHERE email = $1", user.email)
end
env.redirect referer
@ -1242,21 +1277,21 @@ get "/feed/subscriptions" do |env|
if preferences.notifications_only && !notifications.empty?
args = arg_array(notifications)
videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE id IN (#{args})
notifications = PG_DB.query_all("SELECT * FROM channel_videos WHERE id IN (#{args})
ORDER BY published DESC", notifications, as: ChannelVideo)
notifications = [] of ChannelVideo
videos = [] of ChannelVideo
videos.sort_by! { |video| video.published }.reverse!
notifications.sort_by! { |video| video.published }.reverse!
case preferences.sort
when "alphabetically"
videos.sort_by! { |video| video.title }
notifications.sort_by! { |video| video.title }
when "alphabetically - reverse"
videos.sort_by! { |video| video.title }.reverse!
notifications.sort_by! { |video| video.title }.reverse!
when "channel name"
videos.sort_by! { |video| video.author }
notifications.sort_by! { |video| video.author }
when "channel name - reverse"
videos.sort_by! { |video| video.author }.reverse!
notifications.sort_by! { |video| video.author }.reverse!
end
else
if preferences.latest_only
@ -1347,24 +1382,44 @@ get "/feed/channel/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/feed/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
url = produce_videos_url(ucid)
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with? " - Topic"
auto_generated = true
end
url = produce_channel_videos_url(ucid, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"].as_s.empty?
if response.status_code == 500
error_message = "This channel does not exist."
halt env, status_code: 404, response: error_message
else
next ""
end
end
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
content_html = json["content_html"].as_s
document = XML.parse_html(content_html)
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
end
else
videos = [] of SearchVideo
end
channel = get_channel(ucid, client, PG_DB, pull_all_videos: false)
@ -1385,18 +1440,22 @@ get "/feed/channel/:ucid" do |env|
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
end
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
extract_videos(nodeset, ucid).each do |video|
videos.each do |video|
xml.element("entry") do
xml.element("id") { xml.text "yt:video:#{video.id}" }
xml.element("yt:videoId") { xml.text video.id }
xml.element("yt:channelId") { xml.text ucid }
xml.element("yt:channelId") { xml.text video.ucid }
xml.element("title") { xml.text video.title }
xml.element("link", rel: "alternate", href: "#{host_url}/watch?v=#{video.id}")
xml.element("author") do
xml.element("name") { xml.text channel.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
if auto_generated
xml.element("name") { xml.text video.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{video.ucid}" }
else
xml.element("name") { xml.text author }
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
end
end
xml.element("published") { xml.text video.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
@ -1522,6 +1581,23 @@ end
# Channels
# YouTube appears to let users set a "brand" URL that
# is different from their username, so we convert that here
get "/c/:user" do |env|
client = make_client(YT_URL)
user = env.params.url["user"]
response = client.get("/c/#{user}")
document = XML.parse_html(response.body)
anchor = document.xpath_node(%q(//a[contains(@class,"branded-page-header-title-link")]))
if !anchor
next env.redirect "/"
end
env.redirect anchor["href"]
end
get "/user/:user" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}"
@ -1553,23 +1629,43 @@ get "/channel/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
if rss.status_code == 404
error_message = "This channel does not exist."
next templated "error"
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with? " - Topic"
auto_generated = true
end
rss = XML.parse_html(rss.body)
author = rss.xpath_node("//feed/author/name").not_nil!.content
url = produce_channel_videos_url(ucid, page, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
begin
videos = extract_playlist(ucid, page)
rescue ex
error_message = ex.message
next templated "error"
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
end
else
videos = [] of SearchVideo
end
templated "channel"
@ -1710,7 +1806,7 @@ get "/api/v1/comments/:id" do |env|
if format == "json"
next {"comments" => [] of String}.to_json
else
next {"contentHtml" => ""}.to_json
next {"contentHtml" => "", "commentCount" => 0}.to_json
end
end
ctoken = ctoken["ctoken"]
@ -1748,7 +1844,7 @@ get "/api/v1/comments/:id" do |env|
if format == "json"
next {"comments" => [] of String}.to_json
else
next {"contentHtml" => ""}.to_json
next {"contentHtml" => "", "commentCount" => 0}.to_json
end
end
@ -1777,9 +1873,38 @@ get "/api/v1/comments/:id" do |env|
node_comment = node["commentRenderer"]
end
content_text = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
content_text ||= node_comment["contentText"]["runs"].as_a.map { |comment| comment["text"] }
.join("").rchop('\ufeff')
contentHtml = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
contentHtml ||= node_comment["contentText"]["runs"].as_a.map do |run|
text = run["text"].as_s
if run["text"] == "\n"
text = "<br>"
end
if run["bold"]?
text = "<b>#{text}</b>"
end
if run["italics"]?
text = "<i>#{text}</i>"
end
if run["navigationEndpoint"]?
url = run["navigationEndpoint"]["urlEndpoint"]?.try &.["url"].as_s
if url
url = URI.parse(url)
url = HTTP::Params.parse(url.query.not_nil!)["q"]
else
url = run["navigationEndpoint"]["commandMetadata"]?.try &.["webCommandMetadata"]["url"].as_s
end
text = %(<a href="#{url}">#{text}</a>)
end
text
end.join.rchop('\ufeff')
contentHtml, content = html_to_content(contentHtml)
author = node_comment["authorText"]?.try &.["simpleText"]
author ||= ""
@ -1807,7 +1932,8 @@ get "/api/v1/comments/:id" do |env|
published = decode_date(node_comment["publishedTimeText"]["runs"][0]["text"].as_s.rchop(" (edited)"))
json.field "content", content_text
json.field "content", content
json.field "contentHtml", contentHtml
json.field "published", published.epoch
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]
@ -1854,6 +1980,8 @@ get "/api/v1/comments/:id" do |env|
if comments["commentCount"]?
json.field "commentCount", comments["commentCount"]
else
json.field "commentCount", 0
end
end
end
@ -1868,7 +1996,7 @@ get "/api/v1/comments/:id" do |env|
content_html = template_reddit_comments(comments)
content_html = fill_links(content_html, "https", "www.reddit.com")
content_html = add_alt_links(content_html)
content_html = replace_links(content_html)
rescue ex
reddit_thread = nil
content_html = ""
@ -1911,7 +2039,7 @@ get "/api/v1/videos/:id" do |env|
generate_thumbnails(json, video.id)
end
description, video.description = html_to_description(video.description)
video.description, description = html_to_content(video.description)
json.field "description", description
json.field "descriptionHtml", video.description
@ -1929,6 +2057,7 @@ get "/api/v1/videos/:id" do |env|
json.field "isFamilyFriendly", video.is_family_friendly
json.field "allowedRegions", video.allowed_regions
json.field "genre", video.genre
json.field "genreUrl", video.genre_url
json.field "author", video.author
json.field "authorId", video.ucid
@ -2057,7 +2186,7 @@ get "/api/v1/videos/:id" do |env|
end
json.field "author", rv["author"]
json.field "lengthSeconds", rv["length_seconds"].to_i
json.field "viewCountText", rv["short_view_count_text"].rchop(" views")
json.field "viewCountText", rv["short_view_count_text"]
end
end
end
@ -2150,38 +2279,79 @@ get "/api/v1/channels/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/api/v1/channels/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
channel = get_channel(ucid, client, PG_DB, pull_all_videos: false)
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with? " - Topic"
auto_generated = true
end
url = produce_channel_videos_url(ucid, 1, auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
end
else
videos = [] of SearchVideo
end
# TODO: Integrate this into `get_channel` function
# We can't get everything from RSS feed, so we get it from the channel page
channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
channel_html = XML.parse_html(channel_html)
banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
banner = "https:" + banner.match(/background-image: url\((?<url>[^)]+)\)/).not_nil!["url"]
author = channel_html.xpath_node(%q(//a[contains(@class, "branded-page-header-title-link")])).not_nil!.content
author_url = channel_html.xpath_node(%q(//a[@class="channel-header-profile-image-container spf-link"])).not_nil!["href"]
author_thumbnail = channel_html.xpath_node(%q(//img[@class="channel-header-profile-image"])).not_nil!["src"]
description = channel_html.xpath_node(%q(//meta[@itemprop="description"])).not_nil!["content"]
description_html = channel_html.xpath_node(%q(//div[contains(@class,"about-description")]))
description_html, description = html_to_content(description_html)
paid = channel_html.xpath_node(%q(//meta[@itemprop="paid"])).not_nil!["content"] == "True"
is_family_friendly = channel_html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
allowed_regions = channel_html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
sub_count, total_views, joined = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
sub_count = sub_count.content.rchop(" subscribers").delete(",").to_i64
total_views = total_views.content.rchop(" views").lchop("").delete(",").to_i64
joined = Time.parse(joined.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
latest_videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid = $1 ORDER BY published DESC LIMIT 15",
channel.id, as: ChannelVideo)
total_views = 0_i64
sub_count = 0_i64
joined = Time.epoch(0)
metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
metadata.each do |item|
case item.content
when .includes? "views"
total_views = item.content.delete("views •,").to_i64
when .includes? "subscribers"
sub_count = item.content.delete("subscribers").delete(",").to_i64
when .includes? "Joined"
joined = Time.parse(item.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
end
end
channel_info = JSON.build do |json|
json.object do
json.field "author", channel.author
json.field "authorId", channel.id
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", author_url
json.field "authorBanners" do
@ -2226,19 +2396,37 @@ get "/api/v1/channels/:ucid" do |env|
json.field "isFamilyFriendly", is_family_friendly
json.field "description", description
json.field "descriptionHtml", description_html
json.field "allowedRegions", allowed_regions
json.field "latestVideos" do
json.array do
latest_videos.each do |video|
videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "published", video.published.epoch
if auto_generated
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
else
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
end
json.field "videoThumbnails" do
generate_thumbnails(json, video.id)
end
json.field "description", video.description
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
json.field "published", video.published.epoch
json.field "lengthSeconds", video.length_seconds
end
end
end
@ -2256,6 +2444,7 @@ get "/api/v1/channels/:ucid/videos" do |env|
page ||= 1
client = make_client(YT_URL)
if !ucid.match(/UC[a-zA-Z0-9_-]{22}/)
rss = client.get("/feeds/videos.xml?user=#{ucid}")
rss = XML.parse_html(rss.body)
@ -2267,43 +2456,62 @@ get "/api/v1/channels/:ucid/videos" do |env|
end
ucid = ucid.content
url = "/api/v1/channels/#{ucid}/videos"
if env.params.query
url += "?#{env.params.query}"
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/feed/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
next env.redirect url
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
url = produce_videos_url(ucid, page)
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with? " - Topic"
auto_generated = true
end
url = produce_channel_videos_url(ucid, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if !json["content_html"]?
env.response.content_type = "application/json"
if response.status_code == 500
response = {"Error" => "Channel does not exist"}.to_json
halt env, status_code: 404, response: response
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos = extract_videos(nodeset)
else
next Array(String).new.to_json
videos = extract_videos(nodeset, ucid)
end
else
videos = [] of SearchVideo
end
content_html = json["content_html"].as_s
if content_html.empty?
env.response.content_type = "application/json"
next Hash(String, String).new.to_json
end
document = XML.parse_html(content_html)
videos = JSON.build do |json|
result = JSON.build do |json|
json.array do
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
extract_videos(nodeset, ucid).each do |video|
videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
if auto_generated
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
else
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
end
json.field "videoThumbnails" do
generate_thumbnails(json, video.id)
end
@ -2320,7 +2528,7 @@ get "/api/v1/channels/:ucid/videos" do |env|
end
env.response.content_type = "application/json"
videos
result
end
get "/api/v1/search" do |env|
@ -2359,13 +2567,14 @@ get "/api/v1/search" do |env|
response = JSON.build do |json|
json.array do
search_results = search(query, page, search_params)
count, search_results = search(query, page, search_params).as(Tuple)
search_results.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
json.field "videoThumbnails" do
@ -2412,6 +2621,7 @@ get "/api/v1/playlists/:plid" do |env|
json.field "authorUrl", "/channel/#{playlist.ucid}"
json.field "description", playlist.description
json.field "descriptionHtml", playlist.description_html
json.field "videoCount", playlist.video_count
json.field "viewCount", playlist.views
@ -2653,6 +2863,12 @@ get "/videoplayback" do |env|
client = make_client(URI.parse(host))
response = client.head(url)
if response.headers["Location"]?
url = URI.parse(response.headers["Location"])
env.response.headers["Access-Control-Allow-Origin"] = "*"
next env.redirect url.full_path
end
headers = env.request.headers
headers.delete("Host")
headers.delete("Cookie")
@ -2660,30 +2876,24 @@ get "/videoplayback" do |env|
headers.delete("Referer")
client.get(url, headers) do |response|
if response.headers["Location"]?
url = URI.parse(response.headers["Location"])
env.response.headers["Access-Control-Allow-Origin"] = "*"
env.redirect url.full_path
else
env.response.status_code = response.status_code
env.response.status_code = response.status_code
response.headers.each do |key, value|
env.response.headers[key] = value
end
env.response.headers["Access-Control-Allow-Origin"] = "*"
begin
chunk_size = 4096
size = 1
while size > 0
size = IO.copy(response.body_io, env.response.output, chunk_size)
env.response.flush
Fiber.yield
end
rescue ex
break
response.headers.each do |key, value|
env.response.headers[key] = value
end
env.response.headers["Access-Control-Allow-Origin"] = "*"
begin
chunk_size = 4096
size = 1
while size > 0
size = IO.copy(response.body_io, env.response.output, chunk_size)
env.response.flush
Fiber.yield
end
rescue ex
break
end
end
end

View File

@ -73,7 +73,7 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
page = 1
loop do
url = produce_videos_url(ucid, page)
url = produce_channel_videos_url(ucid, page)
response = client.get(url)
json = JSON.parse(response.body)
@ -130,3 +130,45 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
return channel
end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
if auto_generated
seed = Time.epoch(1525757349)
until seed >= Time.now
seed += 1.month
end
timestamp = seed - (page - 1).months
page = "#{timestamp.epoch}"
switch = "\x36"
else
page = "#{page}"
switch = "\x00"
end
meta = "\x12\x06videos #{switch}\x30\x02\x38\x01\x60\x01\x6a\x00\x7a"
meta += page.size.to_u8.unsafe_chr
meta += page
meta += "\xb8\x01\x00"
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end

View File

@ -93,7 +93,7 @@ def template_youtube_comments(comments)
<div class="pure-u-23-24">
<p>
<a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
onclick="load_comments(this)">View #{child["replies"]["replyCount"]} replies</a>
onclick="get_youtube_replies(this)">View #{child["replies"]["replyCount"]} replies</a>
</p>
</div>
</div>
@ -113,7 +113,7 @@ def template_youtube_comments(comments)
- #{recode_date(Time.epoch(child["published"].as_i64))} ago
</p>
<div>
#{child["content"]}
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
#{replies_html}
</div>
</div>
@ -127,7 +127,7 @@ def template_youtube_comments(comments)
<div class="pure-u-1">
<p>
<a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
onclick="load_comments(this)">Load more</a>
onclick="get_youtube_replies(this)">Load more</a>
</p>
</div>
</div>
@ -190,37 +190,21 @@ def template_reddit_comments(root)
return html
end
def add_alt_links(html)
alt_links = [] of {String, String}
def replace_links(html)
html = XML.parse_html(html)
# This is painful but likely the only way to accomplish this in Crystal,
# as Crystigiri and others are not able to insert XML Nodes into a document.
# The goal here is to use as little regex as possible
html.scan(/<a[^>]*>([^<]+)<\/a>/) do |match|
anchor = XML.parse_html(match[0])
anchor = anchor.xpath_node("//a").not_nil!
html.xpath_nodes(%q(//a)).each do |anchor|
url = URI.parse(anchor["href"])
if ["www.youtube.com", "m.youtube.com"].includes?(url.host)
if url.path == "/redirect"
params = HTTP::Params.parse(url.query.not_nil!)
alt_url = params["q"]?
alt_url ||= "/"
anchor["href"] = params["q"]?
else
alt_url = url.full_path
anchor["href"] = url.full_path
end
alt_link = <<-END_HTML
<a href="#{alt_url}">
<i class="icon ion-ios-link"></i>
</a>
END_HTML
elsif url.host == "youtu.be"
alt_link = <<-END_HTML
<a href="/watch?v=#{url.path.try &.lchop("/")}&#{url.query}">
<i class="icon ion-ios-link"></i>
</a>
END_HTML
anchor["href"] = "/watch?v=#{url.path.try &.lchop("/")}&#{url.query}"
elsif url.to_s == "#"
begin
length_seconds = decode_length_seconds(anchor.content)
@ -228,23 +212,12 @@ def add_alt_links(html)
length_seconds = decode_time(anchor.content)
end
alt_anchor = <<-END_HTML
<a href="javascript:void(0)" onclick="player.currentTime(#{length_seconds})">#{anchor.content}</a>
END_HTML
html = html.sub(anchor.to_s, alt_anchor)
next
else
alt_link = ""
anchor["href"] = "javascript:void(0)"
anchor["onclick"] = "player.currentTime(#{length_seconds})"
end
alt_links << {anchor.to_s, alt_link}
end
alt_links.each do |original, alternate|
html = html.sub(original, original + alternate)
end
html = html.to_xml(options: XML::SaveOptions::NO_DECL)
return html
end
@ -267,5 +240,5 @@ def fill_links(html, scheme, host)
html = html.to_xml(options: XML::SaveOptions::NO_DECL)
end
html
return html
end

View File

@ -116,81 +116,6 @@ def login_req(login_form, f_req)
return HTTP::Params.encode(data)
end
def produce_videos_url(ucid, page = 1)
page = "#{page}"
meta = "\x12\x06videos \x00\x30\x02\x38\x01\x60\x01\x6a\x00\x7a"
meta += page.size.to_u8.unsafe_chr
meta += page
meta += "\xb8\x01\x00"
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end
def read_var_int(bytes)
numRead = 0
result = 0
read = bytes[numRead]
if bytes.size == 1
result = bytes[0].to_i32
else
while ((read & 0b10000000) != 0)
read = bytes[numRead].to_u64
value = (read & 0b01111111)
result |= (value << (7 * numRead))
numRead += 1
if numRead > 5
raise "VarInt is too big"
end
end
end
return result
end
def write_var_int(value : Int)
bytes = [] of UInt8
value = value.to_u32
if value == 0
bytes = [0_u8]
else
while value != 0
temp = (value & 0b01111111).to_u8
value = value >> 7
if value != 0
temp |= 0b10000000
end
bytes << temp
end
end
return bytes
end
def generate_captcha(key)
minute = Random::Secure.rand(12)
minute_angle = minute * 30
@ -240,7 +165,7 @@ def generate_captcha(key)
return {challenge: challenge, token: token}
end
def html_to_description(description_html)
def html_to_content(description_html)
if !description_html
description = ""
description_html = ""
@ -251,7 +176,7 @@ def html_to_description(description_html)
description = XML.parse_html(description).content.strip("\n ")
end
return description, description_html
return description_html, description
end
def extract_videos(nodeset, ucid = nil)
@ -319,7 +244,7 @@ def extract_videos(nodeset, ucid = nil)
view_count ||= 0_i64
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
description, description_html = html_to_description(description_html)
description_html, description = html_to_content(description_html)
length_seconds = node.xpath_node(%q(.//span[@class="video-time"]))
if length_seconds

View File

@ -191,3 +191,49 @@ def get_referer(env, fallback = "/")
return referer
end
def read_var_int(bytes)
numRead = 0
result = 0
read = bytes[numRead]
if bytes.size == 1
result = bytes[0].to_i32
else
while ((read & 0b10000000) != 0)
read = bytes[numRead].to_u64
value = (read & 0b01111111)
result |= (value << (7 * numRead))
numRead += 1
if numRead > 5
raise "VarInt is too big"
end
end
end
return result
end
def write_var_int(value : Int)
bytes = [] of UInt8
value = value.to_u32
if value == 0
bytes = [0_u8]
else
while value != 0
temp = (value & 0b01111111).to_u8
value = value >> 7
if value != 0
temp |= 0b10000000
end
bytes << temp
end
end
return bytes
end

View File

@ -2,13 +2,13 @@ def crawl_videos(db)
ids = Deque(String).new
random = Random.new
search(random.base64(3)).each do |video|
search(random.base64(3)).as(Tuple)[1].each do |video|
ids << video.id
end
loop do
if ids.empty?
search(random.base64(3)).each do |video|
search(random.base64(3)).as(Tuple)[1].each do |video|
ids << video.id
end
end

View File

@ -1,13 +1,14 @@
class Playlist
add_mapping({
title: String,
id: String,
author: String,
ucid: String,
description: String,
video_count: Int32,
views: Int64,
updated: Time,
title: String,
id: String,
author: String,
ucid: String,
description: String,
description_html: String,
video_count: Int32,
views: Int64,
updated: Time,
})
end
@ -99,7 +100,7 @@ def produce_playlist_url(id, index)
slice = URI.escape(slice)
# Outer Base64
continuation = [0x1a.to_u8, slice.bytes.size.to_u8] + slice.bytes
continuation = [0x1a_u8, slice.bytes.size.to_u8] + slice.bytes
continuation = ucid.bytes + continuation
continuation = [0x12_u8, ucid.size.to_u8] + continuation
continuation = [0xe2_u8, 0xa9_u8, 0x85_u8, 0xb2_u8, 2_u8, continuation.size.to_u8] + continuation
@ -123,17 +124,8 @@ def fetch_playlist(plid)
title = document.xpath_node(%q(//h1[@class="pl-header-title"])).not_nil!.content
title = title.strip(" \n")
description = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description ||= document.xpath_node(%q(//span[@class="pl-header-description-text"]))
if description
description = description.to_xml.strip(" \n")
description = description.split("<button ")[0]
description = fill_links(description, "https", "www.youtube.com")
description = add_alt_links(description)
else
description = ""
end
description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description, description_html = html_to_content(description_html)
anchor = document.xpath_node(%q(//ul[@class="pl-header-details"])).not_nil!
author = anchor.xpath_node(%q(.//li[1]/a)).not_nil!.content
@ -151,6 +143,7 @@ def fetch_playlist(plid)
author,
ucid,
description,
description_html,
video_count,
views,
updated

View File

@ -14,31 +14,36 @@ end
def search(query, page = 1, search_params = build_search_params(content_type: "video"))
client = make_client(YT_URL)
if query.empty?
return {0, [] of SearchVideo}
end
html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=#{search_params}&disable_polymer=1").body
if html.empty?
return [] of SearchVideo
return {0, [] of SearchVideo}
end
html = XML.parse_html(html)
nodeset = html.xpath_nodes(%q(//ol[@class="item-section"]/li))
videos = extract_videos(nodeset)
return videos
return {nodeset.size, videos}
end
def build_search_params(sort_by = "relevance", date : String = "", content_type : String = "", duration : String = "", features : Array(String) = [] of String)
def build_search_params(sort : String = "relevance", date : String = "", content_type : String = "",
duration : String = "", features : Array(String) = [] of String)
head = "\x08"
head += case sort_by
head += case sort
when "relevance"
"\x00"
when "rating"
"\x01"
when "upload_date"
when "upload_date", "date"
"\x02"
when "view_count"
when "view_count", "views"
"\x03"
else
raise "No sort #{sort_by}"
raise "No sort #{sort}"
end
body = ""
@ -87,7 +92,7 @@ def build_search_params(sort_by = "relevance", date : String = "", content_type
"\x20\x01"
when "subtitles"
"\x28\x01"
when "creative_commons"
when "creative_commons", "cc"
"\x30\x01"
when "3d"
"\x38\x01"

View File

@ -3,23 +3,22 @@ def fetch_decrypt_function(client, id = "CvFH_6DNRCY")
url = document.match(/src="(?<url>\/yts\/jsbin\/player-.{9}\/en_US\/base.js)"/).not_nil!["url"]
player = client.get(url).body
function_name = player.match(/\(b\|\|\(b="signature"\),d.set\(b,(?<name>[a-zA-Z0-9]{2})\(c\)\)\)/).not_nil!["name"]
function_body = player.match(/#{function_name}=function\(a\){(?<body>[^}]+)}/).not_nil!["body"]
function_name = player.match(/"signature",(?<name>[a-zA-Z0-9]{2})\(/).not_nil!["name"]
function_body = player.match(/^#{function_name}=function\(a\){(?<body>[^}]+)}/m).not_nil!["body"]
function_body = function_body.split(";")[1..-2]
var_name = function_body[0][0, 2]
var_body = player.delete("\n").match(/var #{var_name}={(?<body>(.*?))};/).not_nil!["body"]
operations = {} of String => String
matches = player.delete("\n").match(/var #{var_name}={(?<op1>[a-zA-Z0-9]{2}:[^}]+}),(?<op2>[a-zA-Z0-9]{2}:[^}]+}),(?<op3>[a-zA-Z0-9]{2}:[^}]+})};/).not_nil!
3.times do |i|
operation = matches["op#{i + 1}"]
op_name = operation[0, 2]
var_body.split("},").each do |operation|
op_name = operation.match(/^[^:]+/).not_nil![0]
op_body = operation.match(/\{[^}]+/).not_nil![0]
op_body = operation.match(/\{[^}]+\}/).not_nil![0]
case op_body
when "{a.reverse()}"
when "{a.reverse()"
operations[op_name] = "a"
when "{a.splice(0,b)}"
when "{a.splice(0,b)"
operations[op_name] = "b"
else
operations[op_name] = "c"
@ -28,11 +27,10 @@ def fetch_decrypt_function(client, id = "CvFH_6DNRCY")
decrypt_function = [] of {name: String, value: Int32}
function_body.each do |function|
function = function.lchop(var_name + ".")
op_name = function[0, 2]
function = function.lchop(var_name).delete("[].")
function = function.lchop(op_name + "(a,")
value = function.rchop(")").to_i
op_name = function.match(/[^\(]+/).not_nil![0]
value = function.match(/\(a,(?<value>[\d]+)\)/).not_nil!["value"].to_i
decrypt_function << {name: operations[op_name], value: value}
end

View File

@ -27,22 +27,46 @@ class User
end
DEFAULT_USER_PREFERENCES = Preferences.from_json({
"video_loop" => false,
"autoplay" => false,
"speed" => 1.0,
"quality" => "hd720",
"volume" => 100,
"comments" => "youtube",
"captions" => ["", "", ""],
"dark_mode" => false,
"thin_mode " => false,
"max_results" => 40,
"sort" => "published",
"latest_only" => false,
"unseen_only" => false,
"video_loop" => false,
"autoplay" => false,
"speed" => 1.0,
"quality" => "hd720",
"volume" => 100,
"comments" => ["youtube", ""],
"captions" => ["", "", ""],
"related_videos" => true,
"dark_mode" => false,
"thin_mode " => false,
"max_results" => 40,
"sort" => "published",
"latest_only" => false,
"unseen_only" => false,
}.to_json)
class Preferences
module StringToArray
def self.to_json(value : Array(String), json : JSON::Builder)
json.array do
value.each do |element|
json.string element
end
end
end
def self.from_json(value : JSON::PullParser) : Array(String)
begin
result = [] of String
value.read_array do
result << value.read_string
end
rescue ex
result = [value.read_string, ""]
end
result
end
end
JSON.mapping({
video_loop: Bool,
autoplay: Bool,
@ -50,8 +74,9 @@ class Preferences
quality: String,
volume: Int32,
comments: {
type: String,
default: "youtube",
type: Array(String),
default: ["youtube", ""],
converter: StringToArray,
},
captions: {
type: Array(String),
@ -61,6 +86,10 @@ class Preferences
type: Bool,
default: false,
},
related_videos: {
type: Bool,
default: true,
},
dark_mode: Bool,
thin_mode: {
type: Bool,

View File

@ -345,6 +345,10 @@ class Video
allowed_regions: Array(String),
is_family_friendly: Bool,
genre: String,
genre_url: {
type: String,
default: "/",
},
})
end
@ -371,10 +375,12 @@ def get_video(id, db, refresh = true)
begin
video = fetch_video(id)
video_array = video.to_a
args = arg_array(video_array[1..-1], 2)
db.exec("UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,\
published,description,language,author,ucid, allowed_regions, is_family_friendly, genre)\
published,description,language,author,ucid, allowed_regions, is_family_friendly,\
genre, genre_url)\
= (#{args}) WHERE id = $1", video_array)
rescue ex
db.exec("DELETE FROM videos * WHERE id = $1", id)
@ -384,6 +390,7 @@ def get_video(id, db, refresh = true)
else
video = fetch_video(id)
video_array = video.to_a
args = arg_array(video_array)
db.exec("INSERT INTO videos VALUES (#{args}) ON CONFLICT (id) DO NOTHING", video_array)
@ -490,10 +497,12 @@ def fetch_video(id)
allowed_regions = html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
is_family_friendly = html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
genre = html.xpath_node(%q(//meta[@itemprop="genre"])).not_nil!["content"]
genre_url = html.xpath_node(%(//a[text()="#{genre}"])).not_nil!["href"]
video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description,
nil, author, ucid, allowed_regions, is_family_friendly, genre)
nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url)
return video
end
@ -504,24 +513,37 @@ end
def process_video_params(query, preferences)
autoplay = query["autoplay"]?.try &.to_i?
preferred_captions = query["subtitles"]?.try &.split(",").map { |a| a.downcase }
quality = query["quality"]?
speed = query["speed"]?.try &.to_f?
video_loop = query["loop"]?.try &.to_i?
volume = query["volume"]?.try &.to_i?
if preferences
autoplay ||= preferences.autoplay.to_unsafe
preferred_captions ||= preferences.captions
quality ||= preferences.quality
speed ||= preferences.speed
video_loop ||= preferences.video_loop.to_unsafe
volume ||= preferences.volume
end
autoplay ||= 0
autoplay = autoplay == 1
autoplay ||= 0
preferred_captions ||= [] of String
quality ||= "hd720"
speed ||= 1
video_loop ||= 0
volume ||= 100
autoplay = autoplay == 1
video_loop = video_loop == 1
if query["t"]?
video_start = decode_time(query["t"])
end
video_start ||= 0
if query["time_continu"]?
video_start = decode_time(query["t"])
if query["time_continue"]?
video_start = decode_time(query["time_continue"])
end
video_start ||= 0
if query["start"]?
@ -542,14 +564,25 @@ def process_video_params(query, preferences)
raw ||= 0
raw = raw == 1
quality = query["quality"]?
quality ||= "hd720"
controls = query["controls"]?.try &.to_i?
controls ||= 1
controls = controls == 1
return autoplay, video_loop, video_start, video_end, listen, raw, quality, controls
params = {
autoplay: autoplay,
controls: controls,
listen: listen,
preferred_captions: preferred_captions,
quality: quality,
raw: raw,
speed: speed,
video_end: video_end,
video_loop: video_loop,
video_start: video_start,
volume: volume,
}
return params
end
def generate_thumbnails(json, id)

View File

@ -51,7 +51,7 @@
</div>
<div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if videos.size == 100 %>
<% if videos.size == 30 %>
<a href="/channel/<%= ucid %>?page=<%= page + 1 %>">Next page</a>
<% end %>
</div>

View File

@ -1,19 +1,19 @@
<video style="width:100%" playsinline poster="<%= thumbnail %>" title="<%= HTML.escape(video.title) %>"
id="player" class="video-js"
<% if autoplay %>autoplay<% end %>
<% if video_loop %>loop<% end %>
<% if controls %>controls<% end %>>
<% if params[:autoplay] %>autoplay<% end %>
<% if params[:video_loop] %>loop<% end %>
<% if params[:controls] %>controls<% end %>>
<% if hlsvp %>
<source src="<%= hlsvp %>" type="application/x-mpegURL">
<% else %>
<% if listen %>
<% if params[:listen] %>
<% audio_streams.each_with_index do |fmt, i| %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["bitrate"] %>k" selected="<%= i == 0 ? true : false %>">
<% end %>
<% else %>
<% fmt_stream.each_with_index do |fmt, i| %>
<% if preferences %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= preferences.quality == fmt["label"].split(" - ")[0] %>">
<% if params[:quality] %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= params[:quality] == fmt["label"].split(" - ")[0] %>">
<% else %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= i == 0 ? true : false %>">
<% end %>
@ -110,7 +110,7 @@ var player = videojs("player", options, function() {
player.share(shareOptions);
<% if video_start > 0 || video_end > 0 %>
<% if params[:video_start] > 0 || params[:video_end] > 0 %>
player.markers({
onMarkerReached: function(marker) {
if (marker.text === "End") {
@ -122,19 +122,19 @@ player.markers({
}
},
markers: [
{ time: <%= video_start %>, text: "Start" },
<% if video_end < 0 %>
{ time: <%= params[:video_start] %>, text: "Start" },
<% if params[:video_end] < 0 %>
{ time: <%= video.info["length_seconds"].to_f - 0.5 %>, text: "End" }
<% else %>
{ time: <%= video_end %>, text: "End" }
{ time: <%= params[:video_end] %>, text: "End" }
<% end %>
]
});
player.currentTime(<%= video_start %>);
player.currentTime(<%= params[:video_start] %>);
<% end %>
<% if !listen %>
<% if !params[:listen] %>
var currentSources = player.currentSources();
for (var i = 0; i < currentSources.length; i++) {
if (player.canPlayType(currentSources[i]["type"].split(";")[0]) === "") {
@ -146,8 +146,6 @@ for (var i = 0; i < currentSources.length; i++) {
player.src(currentSources);
<% end %>
<% if preferences %>
player.volume(<%= preferences.volume.to_f / 100 %>);
player.playbackRate(<%= preferences.speed %>);
<% end %>
player.volume(<%= params[:volume].to_f / 100 %>);
player.playbackRate(<%= params[:speed] %>);
</script>

View File

@ -1,6 +1,6 @@
<div class="pure-u-1 pure-u-md-1-4">
<div class="h-box">
<% if video.responds_to?(:playlists) %>
<% if video.responds_to?(:playlists) && !video.playlists.empty? %>
<% params = "&list=#{video.playlists[0]}" %>
<% else %>
<% params = nil %>

View File

@ -3,7 +3,7 @@
<% end %>
<div class="h-box">
<form class="pure-form pure-form-aligned" enctype="multipart/form-data" action="/data_control" method="post">
<form class="pure-form pure-form-aligned" enctype="multipart/form-data" action="/data_control?referer=<%= referer %>" method="post">
<fieldset>
<legend>Import</legend>

View File

@ -48,10 +48,19 @@ function update_value(element) {
</div>
<div class="pure-control-group">
<label for="comments">Pull comments from: </label>
<select name="comments" id="comments">
<% {"youtube", "reddit"}.each do |option| %>
<option <% if user.preferences.comments == option %> selected <% end %>><%= option %></option>
<label for="comments_0">Default comments: </label>
<select name="comments_0" id="comments_0">
<% {"", "youtube", "reddit"}.each do |option| %>
<option <% if user.preferences.comments[0] == option %> selected <% end %>><%= option %></option>
<% end %>
</select>
</div>
<div class="pure-control-group">
<label for="comments_1">Fallback comments: </label>
<select name="comments_1" id="comments_1">
<% {"", "youtube", "reddit"}.each do |option| %>
<option <% if user.preferences.comments[1] == option %> selected <% end %>><%= option %></option>
<% end %>
</select>
</div>
@ -66,7 +75,7 @@ function update_value(element) {
</div>
<div class="pure-control-group">
<label for="captions_fallback">Fallback languages: </label>
<label for="captions_fallback">Fallback captions: </label>
<select class="pure-u-1-5" name="captions_1" id="captions_1">
<% CAPTION_LANGUAGES.each do |option| %>
<option <% if user.preferences.captions[1] == option %> selected <% end %>><%= option %></option>
@ -80,7 +89,13 @@ function update_value(element) {
</select>
</div>
<div class="pure-control-group">
<label for="related_videos">Show related videos? </label>
<input name="related_videos" id="related_videos" type="checkbox" <% if user.preferences.related_videos %>checked<% end %>>
</div>
<legend>Visual preferences</legend>
<div class="pure-control-group">
<label for="dark_mode">Dark mode: </label>
<input name="dark_mode" id="dark_mode" type="checkbox" <% if user.preferences.dark_mode %>checked<% end %>>
@ -92,6 +107,7 @@ function update_value(element) {
</div>
<legend>Subscription preferences</legend>
<div class="pure-control-group">
<label for="redirect_feed">Redirect homepage to feed: </label>
<input name="redirect_feed" id="redirect_feed" type="checkbox" <% if user.preferences.redirect_feed %>checked<% end %>>
@ -127,12 +143,13 @@ function update_value(element) {
</div>
<legend>Data preferences</legend>
<div class="pure-control-group">
<a href="/clear_watch_history">Clear watch history</a>
<a href="/clear_watch_history?referer=<%= referer %>">Clear watch history</a>
</div>
<div class="pure-control-group">
<a href="/data_control">Import/Export data</a>
<a href="/data_control?referer=<%= referer %>">Import/Export data</a>
</div>
<div class="pure-control-group">

View File

@ -1,5 +1,5 @@
<% content_for "header" do %>
<title><%= query.not_nil!.size > 30 ? query.not_nil![0,30].rstrip(".") + "..." : query.not_nil! %> - Invidious</title>
<title><%= search_query.not_nil!.size > 30 ? query.not_nil![0,30].rstrip(".") + "..." : query.not_nil! %> - Invidious</title>
<% end %>
<% videos.each_slice(4) do |slice| %>
@ -18,6 +18,8 @@
</div>
<div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if count == 20 %>
<a href="/search?q=<%= query %>&page=<%= page + 1 %>">Next page</a>
<% end %>
</div>
</div>

View File

@ -8,7 +8,7 @@
</div>
<div class="pure-u-1-3" style="text-align:right;">
<h3>
<a href="/data_control">Import/Export</a>
<a href="/data_control?referer=<%= referer %>">Import/Export</a>
</h3>
</div>
</div>

View File

@ -16,6 +16,13 @@
</div>
<center><%= notifications.size %> unseen notifications</center>
<% if !notifications.empty? %>
<div class="h-box">
<hr>
</div>
<% end %>
<% notifications.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>

View File

@ -4,6 +4,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="referrer" content="no-referrer">
<%= yield_content "header" %>
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/pure-min.css">
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/grids-responsive-min.css">
@ -34,7 +35,7 @@
<div class="pure-u-1 pure-u-md-8-24 user-field">
<% if env.get? "user" %>
<div class="pure-u-1-4">
<a href="/toggle_theme?referer=<%= env.get("current_page") %>" class="pure-menu-heading">
<a href="/toggle_theme?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
<% preferences = env.get("user").as(User).preferences %>
<% if preferences.dark_mode %>
<i class="icon ion-ios-sunny"></i>
@ -54,15 +55,15 @@
</a>
</div>
<div class="pure-u-1-4">
<a href="/preferences?referer=<%= env.get("current_page") %>" class="pure-menu-heading">
<a href="/preferences?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
<i class="icon ion-ios-cog"></i>
</a>
</div>
<div class="pure-u-1-4">
<a href="/signout?referer=<%= env.get("current_page") %>" class="pure-menu-heading">Sign out</a>
<a href="/signout?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">Sign out</a>
</div>
<% else %>
<a href="/login?referer=<%= env.get("current_page") %>" class="pure-menu-heading">Login</a>
<a href="/login?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">Login</a>
<% end %>
</div>
</div>
@ -72,6 +73,11 @@
Roth</a>.
Source available <a
href="https://github.com/omarroth/invidious">here</a>.
<p>Liberapay:
<a href="https://liberapay.com/omarroth">
https://liberapay.com/omarroth
</a>
</p>
<p>Patreon:
<a href="https://patreon.com/omarroth">
https://patreon.com/omarroth

View File

@ -30,163 +30,10 @@
<%= rendered "components/player" %>
</div>
<script>
function toggle(target) {
body = target.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function toggle_comments(target) {
body = target.parentNode.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function load_comments(target) {
var continuation = target.getAttribute("data-continuation");
var body = target.parentNode.parentNode;
var fallback = body.innerHTML;
body.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url =
"/api/v1/comments/<%= video.id %>?format=html&continuation=" + continuation;
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
body.innerHTML = xhr.response.contentHtml;
} else {
body.innerHTML = fallback;
}
}
};
xhr.ontimeout = function() {
body.innerHTML = fallback;
};
}
function get_reddit_comments() {
var url = "/api/v1/comments/<%= video.id %>?source=reddit";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
{title}
</h3>
<b>
<a target="_blank" href="https://reddit.com{permalink}">View more comments on Reddit</a>
</b>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
title: xhr.response.title,
permalink: xhr.response.permalink,
contentHtml: xhr.response.contentHtml
});
} else {
get_youtube_comments();
}
};
xhr.ontimeout = function() {
get_reddit_comments();
};
}
function get_youtube_comments() {
var url = "/api/v1/comments/<%= video.id %>?format=html";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
View {commentCount} comments
</h3>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
contentHtml: xhr.response.contentHtml,
commentCount: commaSeparateNumber(xhr.response.commentCount)
});
} else {
comments = document.getElementById("comments");
comments.innerHTML = "";
}
};
xhr.ontimeout = function() {
comments = document.getElementById("comments");
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
get_youtube_comments();
};
}
function commaSeparateNumber(val){
while (/(\d+)(\d{3})/.test(val.toString())){
val = val.toString().replace(/(\d+)(\d{3})/, '$1'+','+'$2');
}
return val;
}
String.prototype.supplant = function(o) {
return this.replace(/{([^{}]*)}/g, function(a, b) {
var r = o[b];
return typeof r === "string" || typeof r === "number" ? r : a;
});
};
<% if preferences && preferences.comments == "reddit" %>
get_reddit_comments();
<% else %>
get_youtube_comments();
<% end %>
</script>
<div class="h-box">
<h1>
<%= HTML.escape(video.title) %>
<% if listen %>
<% if params[:listen] %>
<a href="/watch?<%= env.params.query %>">
<i class="icon ion-ios-videocam"></i>
</a>
@ -208,7 +55,7 @@ get_youtube_comments();
<p><i class="icon ion-ios-eye"></i> <%= number_with_separator(video.views) %></p>
<p><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p>
<p><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p>
<p id="Genre">Genre: <%= video.genre %></p>
<p id="Genre">Genre: <a href="<%= video.genre_url %>"><%= video.genre %></a></p>
<p id="FamilyFriendly">Family Friendly? <%= video.is_family_friendly %></p>
<p id="Wilson">Wilson Score: <%= video.wilson_score.round(4) %></p>
<p id="Rating">Rating: <%= rating.round(4) %> / 5</p>
@ -266,6 +113,7 @@ get_youtube_comments();
</div>
</div>
<div class="pure-u-1 pure-u-md-1-5">
<% if preferences && preferences.related_videos %>
<div class="h-box">
<% rvs.each do |rv| %>
<% if rv.has_key?("id") %>
@ -282,5 +130,191 @@ get_youtube_comments();
<% end %>
<% end %>
</div>
<% end %>
</div>
</div>
<script>
function toggle(target) {
body = target.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function toggle_comments(target) {
body = target.parentNode.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function get_youtube_replies(target) {
var continuation = target.getAttribute("data-continuation");
var body = target.parentNode.parentNode;
var fallback = body.innerHTML;
body.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url =
"/api/v1/comments/<%= video.id %>?format=html&continuation=" + continuation;
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
body.innerHTML = xhr.response.contentHtml;
} else {
body.innerHTML = fallback;
}
}
};
xhr.ontimeout = function() {
console.log("Pulling comments timed out.");
body.innerHTML = fallback;
};
}
function get_reddit_comments() {
var url = "/api/v1/comments/<%= video.id %>?source=reddit";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
{title}
</h3>
<b>
<a target="_blank" href="https://reddit.com{permalink}">View more comments on Reddit</a>
</b>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
title: xhr.response.title,
permalink: xhr.response.permalink,
contentHtml: xhr.response.contentHtml
});
} else {
<% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments();
<% else %>
comments = document.getElementById("comments");
comments.innerHTML = "";
<% end %>
}
};
xhr.ontimeout = function() {
console.log("Pulling comments timed out.");
get_reddit_comments();
};
}
function get_youtube_comments() {
var url = "/api/v1/comments/<%= video.id %>?format=html";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
if (xhr.response.commentCount > 0) {
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
View {commentCount} comments
</h3>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
contentHtml: xhr.response.contentHtml,
commentCount: commaSeparateNumber(xhr.response.commentCount)
});
} else {
comments.innerHTML = "";
}
} else {
<% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments();
<% else %>
comments = document.getElementById("comments");
comments.innerHTML = "";
<% end %>
}
};
xhr.ontimeout = function() {
console.log("Pulling comments timed out.");
comments = document.getElementById("comments");
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
get_youtube_comments();
};
}
function commaSeparateNumber(val){
while (/(\d+)(\d{3})/.test(val.toString())){
val = val.toString().replace(/(\d+)(\d{3})/, '$1'+','+'$2');
}
return val;
}
String.prototype.supplant = function(o) {
return this.replace(/{([^{}]*)}/g, function(a, b) {
var r = o[b];
return typeof r === "string" || typeof r === "number" ? r : a;
});
};
<% if preferences %>
<% if preferences.comments[0] == "youtube" %>
get_youtube_comments();
<% elsif preferences.comments[0] == "reddit" %>
get_reddit_comments();
<% else %>
<% if preferences.comments[1] == "youtube" %>
get_youtube_comments();
<% elsif preferences.comments[1] == "reddit" %>
get_reddit_comments();
<% else %>
comments = document.getElementById("comments");
comments.innerHTML = "";
<% end %>
<% end %>
<% else %>
get_youtube_comments();
<% end %>
</script>