This commit is contained in:
broquemonsieur 2024-10-10 14:46:34 +00:00 committed by GitHub
commit 9ac7173fc7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
42 changed files with 2349 additions and 24 deletions

1
.gitignore vendored
View File

@ -7,3 +7,4 @@
/invidious
/sentry
/config/config.yml
.DS_Store

View File

@ -814,5 +814,67 @@ h1, h2, h3, h4, h5, p,
}
#download_widget {
width: 100%;
width: 100%;
}
/*
* Compilations
*/
input.compilation-video-timestamp {
width: 50px;
height: 20px;
}
div.compilation-video-panel {
display:flex;
justify-content:flex-start;
width:calc(100% - 20px);
height:100px;
border:2px solid #ccc;
margin: 10px;
/*background: #d9d9d9;*/
}
div.compilation-order-swap-arrows {
display:flex;
flex-direction:column;
justify-content:space-between;
}
svg.compilation-video-swap-arrow {
border: solid black;
width:20px;
height:50%;
background-color: beige;
margin: 10px;
}
div.compilation-video-input-panel {
display:flex;
flex-direction:column;
min-width: 0;
margin: 10px;
}
div.compilation-video-title {
display:flex;
justify-content:flex-start;
}
span.compilation-video-title {
text-overflow: ellipsis;
white-space: nowrap;
overflow: hidden;
}
div.compilation-video-timestamp-set {
display:flex;
justify-content: flex-start;
align-items: center;
}
div.compilation-video-thumbnail {
position: relative;
box-sizing: border-box;
}

View File

@ -0,0 +1,63 @@
'use strict';
var compilation_data = JSON.parse(document.getElementById('compilation_data').textContent);
var payload = 'csrf_token=' + compilation_data.csrf_token;
function add_compilation_video(target) {
var select = target.parentNode.children[0].children[1];
var option = select.children[select.selectedIndex];
var url = '/compilation_ajax?action_add_video=1&redirect=false' +
'&video_id=' + target.getAttribute('data-id') +
'&compilation_id=' + option.getAttribute('data-compid');
helpers.xhr('POST', url, {payload: payload}, {
on200: function (response) {
option.textContent = '✓' + option.textContent;
}
});
}
function add_compilation_item(target) {
var tile = target.parentNode.parentNode.parentNode.parentNode.parentNode;
tile.style.display = 'none';
var url = '/compilation_ajax?action_add_video=1&redirect=false' +
'&video_id=' + target.getAttribute('data-id') +
'&compilation_id=' + target.getAttribute('data-compid');
helpers.xhr('POST', url, {payload: payload}, {
onNon200: function (xhr) {
tile.style.display = '';
}
});
}
function remove_compilation_item(target) {
var tile = target.parentNode.parentNode.parentNode.parentNode.parentNode;
tile.style.display = 'none';
var url = '/compilation_ajax?action_remove_video=1&redirect=false' +
'&set_video_id=' + target.getAttribute('data-index') +
'&compilation_id=' + target.getAttribute('data-compid');
helpers.xhr('POST', url, {payload: payload}, {
onNon200: function (xhr) {
tile.style.display = '';
}
});
}
function move_compilation_video_before(target) {
var tile = target.parentNode.parentNode.parentNode.parentNode.parentNode;
tile.style.display = 'none';
var url = '/compilation_ajax?action_move_video_before=1&redirect=false' +
'&set_video_id=' + target.getAttribute('data-index') +
'&compilation_id=' + target.getAttribute('data-compid');
helpers.xhr('POST', url, {payload: payload}, {
onNon200: function (xhr) {
tile.style.display = '';
}
});
}

View File

@ -1,6 +1,39 @@
'use strict';
var video_data = JSON.parse(document.getElementById('video_data').textContent);
function get_compilation(compid) {
var compid_url;
compid_url = '/api/v1/compilations/' + compid +
'?index=' + video_data.index +
'&continuation' + video_data.id +
'&format=html&hl=' + video_data.preferences.locale;
helpers.xhr('GET', compid_url, {retries: 5, entity_name: 'compilation'}, {
on200: function (response) {
if (!response.nextVideo)
return;
player.on('ended', function () {
var url = new URL('https://example.com/embed/' + response.nextVideo);
url.searchParams.set('comp', compid);
if (!compid.startsWith('RD'))
url.searchParams.set('index', response.index);
if (video_data.params.autoplay || video_data.params.continue_autoplay)
url.searchParams.set('autoplay', '1');
if (video_data.params.listen !== video_data.preferences.listen)
url.searchParams.set('listen', video_data.params.listen);
if (video_data.params.speed !== video_data.preferences.speed)
url.searchParams.set('speed', video_data.params.speed);
if (video_data.params.local !== video_data.preferences.local)
url.searchParams.set('local', video_data.params.local);
location.assign(url.pathname + url.search);
});
}
});
}
function get_playlist(plid) {
var plid_url;
if (plid.startsWith('RD')) {
@ -43,6 +76,8 @@ function get_playlist(plid) {
addEventListener('load', function (e) {
if (video_data.plid) {
get_playlist(video_data.plid);
} else if (video_data.compid) {
get_compilation(video_data.compid)
} else if (video_data.video_series) {
player.on('ended', function () {
var url = new URL('https://example.com/embed/' + video_data.video_series.shift());

View File

@ -60,12 +60,21 @@
document.querySelectorAll('[data-onclick="add_playlist_video"]').forEach(function (el) {
el.onclick = function () { add_playlist_video(el); };
});
document.querySelectorAll('[data-onclick="add_compilation_video"]').forEach(function (el) {
el.onclick = function () { add_compilation_video(el); };
});
document.querySelectorAll('[data-onclick="add_playlist_item"]').forEach(function (el) {
el.onclick = function () { add_playlist_item(el); };
});
document.querySelectorAll('[data-onclick="add_compilation_item"]').forEach(function (el) {
el.onclick = function () { add_compilation_item(el); };
});
document.querySelectorAll('[data-onclick="remove_playlist_item"]').forEach(function (el) {
el.onclick = function () { remove_playlist_item(el); };
});
document.querySelectorAll('[data-onclick="remove_compilation_item"]').forEach(function (el) {
el.onclick = function () { remove_compilation_item(el); };
});
document.querySelectorAll('[data-onclick="revoke_token"]').forEach(function (el) {
el.onclick = function () { revoke_token(el); };
});

View File

@ -254,8 +254,13 @@ if (video_data.params.video_start > 0 || video_data.params.video_end > 0) {
player.markers({
onMarkerReached: function (marker) {
if (marker.text === 'End')
player.loop() ? player.markers.prev('Start') : player.pause();
if (marker.text === 'End') {
if (video_data.ending_timestamp_seconds) {
player.currentTime(player.duration());
} else {
player.loop() ? player.markers.prev('Start') : player.pause();
}
}
},
markers: markers
});

View File

@ -50,6 +50,59 @@ function continue_autoplay(event) {
}
}
function get_compilation(compid) {
var compilation = document.getElementById('compilation');
compilation.innerHTML = spinnerHTMLwithHR;
var compid_url;
compid_url = '/api/v1/compilations/' + compid +
'?index=' + video_data.index +
'&continuation=' + video_data.id +
'&format=html&hl=' + video_data.preferences.locale;
helpers.xhr('GET', compid_url, {retries: 5, entity_name: 'compilation'}, {
on200: function (response) {
compilation.innerHTML = response.compilationHtml;
if (!response.nextVideo) return;
var nextVideo = document.getElementById(response.nextVideo);
nextVideo.parentNode.parentNode.scrollTop = nextVideo.offsetTop;
player.on('ended', function () {
var url = new URL('https://example.com/watch?v=' + response.nextVideo);
url.searchParams.set('comp', compid);
if (!compid.startsWith('RD'))
url.searchParams.set('index', response.index);
if (video_data.params.autoplay || video_data.params.continue_autoplay)
url.searchParams.set('autoplay', '1');
if (video_data.params.listen !== video_data.preferences.listen)
url.searchParams.set('listen', video_data.params.listen);
if (video_data.params.speed !== video_data.preferences.speed)
url.searchParams.set('speed', video_data.params.speed);
if (video_data.params.local !== video_data.preferences.local)
url.searchParams.set('local', video_data.params.local);
url.searchParams.set('t',video_data.starting_timestamp_seconds);
url.searchParams.set('end',video_data.ending_timestamp_seconds);
location.assign(url.pathname + url.search);
});
},
onNon200: function (xhr) {
compilation.innerHTML = '';
document.getElementById('continue').style.display = '';
},
onError: function (xhr) {
compilation.innerHTML = spinnerHTMLwithHR;
},
onTimeout: function (xhr) {
compilation.innerHTML = spinnerHTMLwithHR;
}
});
}
function get_playlist(plid) {
var playlist = document.getElementById('playlist');
@ -177,7 +230,8 @@ if (video_data.play_next) {
addEventListener('load', function (e) {
if (video_data.plid)
get_playlist(video_data.plid);
if (video_data.compid)
get_compilation(video_data.compid);
if (video_data.params.comments[0] === 'youtube') {
get_youtube_comments();
} else if (video_data.params.comments[0] === 'reddit') {

View File

@ -485,6 +485,14 @@ hmac_key: "CHANGE_ME!!"
##
#playlist_length_limit: 500
##
## Maximum custom compilation length limit.
##
## Accepted values: Integer
## Default: 500
##
#compilation_length_limit: 500
#########################################
#
# Default user preferences

View File

@ -0,0 +1,21 @@
-- Table: public.compilation_videos
-- DROP TABLE public.compilation_videos;
CREATE TABLE IF NOT EXISTS public.compilation_videos
(
title text,
id text,
author text,
ucid text,
length_seconds integer,
starting_timestamp_seconds integer,
ending_timestamp_seconds integer,
published timestamptz,
compid text references compilations(id),
index int8,
order_index integer,
PRIMARY KEY (index,compid)
);
GRANT ALL ON TABLE public.compilation_videos TO current_user;

View File

@ -0,0 +1,31 @@
-- Type: public.compilation_privacy
-- DROP TYPE public.compilation_privacy;
CREATE TYPE public.compilation_privacy AS ENUM
(
'Unlisted',
'Private'
);
-- Table: public.compilations
-- DROP TABLE public.compilations;
CREATE TABLE IF NOT EXISTS public.compilations
(
title text,
id text primary key,
author text,
description text,
video_count integer,
created timestamptz,
updated timestamptz,
privacy compilation_privacy,
index int8[],
first_video_id text,
first_video_starting_timestamp_seconds integer,
first_video_ending_timestamp_seconds integer
);
GRANT ALL ON public.compilations TO current_user;

View File

@ -10,3 +10,5 @@ psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < config/sql/nonces.sql
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < config/sql/annotations.sql
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < config/sql/playlists.sql
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < config/sql/playlist_videos.sql
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < config/sql/compilations.sql
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < config/sql/compilation_videos.sql

View File

@ -181,10 +181,15 @@
"Delete playlist `x`?": "Delete playlist `x`?",
"Delete playlist": "Delete playlist",
"Create playlist": "Create playlist",
"Create compilation": "Create compilation",
"Title": "Title",
"Playlist privacy": "Playlist privacy",
"Compilation privacy": "Compilation privacy",
"Editing playlist `x`": "Editing playlist `x`",
"Editing compilation `x`": "Editing compilation `x`",
"playlist_button_add_items": "Add videos",
"compilation_button_add_items": "Add videos",
"compilation_button_play": "Play",
"Show more": "Show more",
"Show less": "Show less",
"Watch on YouTube": "Watch on YouTube",
@ -246,6 +251,7 @@
"Not a playlist.": "Not a playlist.",
"Playlist does not exist.": "Playlist does not exist.",
"Could not pull trending pages.": "Could not pull trending pages.",
"Compilation does not exist.": "Compilation does not exist.",
"Hidden field \"challenge\" is a required field": "Hidden field \"challenge\" is a required field",
"Hidden field \"token\" is a required field": "Hidden field \"token\" is a required field",
"Erroneous challenge": "Erroneous challenge",
@ -420,6 +426,7 @@
"Audio mode": "Audio mode",
"Video mode": "Video mode",
"Playlists": "Playlists",
"Compilations": "Compilations",
"search_filters_title": "Filters",
"search_filters_date_label": "Upload date",
"search_filters_date_option_none": "Any date",
@ -476,6 +483,7 @@
"download_subtitles": "Subtitles - `x` (.vtt)",
"user_created_playlists": "`x` created playlists",
"user_saved_playlists": "`x` saved playlists",
"user_created_compilations": "`x` created compilations",
"Video unavailable": "Video unavailable",
"preferences_save_player_pos_label": "Save playback position: ",
"crash_page_you_found_a_bug": "It looks like you found a bug in Invidious!",

View File

@ -121,7 +121,7 @@ Kemal.config.extra_options do |parser|
puts SOFTWARE.to_pretty_json
exit
end
parser.on("--migrate", "Run any migrations (beta, use at your own risk!!") do
parser.on("--migrate", "Run any migrations (beta, use at your own risk!!)") do
Invidious::Database::Migrator.new(PG_DB).migrate
exit
end

View File

@ -0,0 +1,487 @@
struct CompilationVideo
include DB::Serializable
property title : String
property id : String
property author : String
property ucid : String
property length_seconds : Int32
property starting_timestamp_seconds : Int32
property ending_timestamp_seconds : Int32
property published : Time
property compid : String
property index : Int64
property order_index : Int32
def to_xml(xml : XML::Builder)
xml.element("entry") do
xml.element("id") { xml.text "yt:video:#{self.id}" }
xml.element("yt:videoId") { xml.text self.id }
xml.element("yt:channelId") { xml.text self.ucid }
xml.element("title") { xml.text self.title }
xml.element("orderIndex") { xml.text self.order_index }
xml.element("link", rel: "alternate", href: "#{HOST_URL}/watch?v=#{self.id}")
xml.element("author") do
xml.element("name") { xml.text self.author }
xml.element("uri") { xml.text "#{HOST_URL}/channel/#{self.ucid}" }
end
xml.element("content", type: "xhtml") do
xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
xml.element("a", href: "#{HOST_URL}/watch?v=#{self.id}") do
xml.element("img", src: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg")
end
end
end
xml.element("published") { xml.text self.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
xml.element("media:group") do
xml.element("media:title") { xml.text self.title }
xml.element("media:thumbnail", url: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg",
width: "320", height: "180")
end
end
end
def to_xml(_xml : Nil = nil)
XML.build { |xml| to_xml(xml) }
end
def to_json(json : JSON::Builder, index : Int32? = nil)
json.object do
json.field "title", self.title
json.field "videoId", self.id
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "videoThumbnails" do
Invidious::JSONify::APIv1.thumbnails(json, self.id)
end
if index
json.field "index", index
json.field "indexId", self.index.to_u64.to_s(16).upcase
else
json.field "index", self.index
end
json.field "orderIndex", self.order_index
json.field "lengthSeconds", self.length_seconds
json.field "startingTimestampSeconds", self.starting_timestamp_seconds
json.field "endingTimestampSeconds", self.ending_timestamp_seconds
end
end
def to_json(_json : Nil, index : Int32? = nil)
JSON.build { |json| to_json(json, index: index) }
end
end
struct Compilation
include DB::Serializable
property title : String
property id : String
property author : String
property author_thumbnail : String
property ucid : String
property description : String
property description_html : String
property video_count : Int32
property views : Int64
property updated : Time
property thumbnail : String?
property first_video_id : String
property first_video_starting_timestamp_seconds : Int32
property first_video_ending_timestamp_seconds : Int32
def to_json(offset, json : JSON::Builder, video_id : String? = nil)
json.object do
json.field "type", "compilation"
json.field "title", self.title
json.field "compilationId", self.id
json.field "compilationThumbnail", self.thumbnail
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
json.field "url", self.author_thumbnail.not_nil!.gsub(/=\d+/, "=s#{quality}")
json.field "width", quality
json.field "height", quality
end
end
end
end
json.field "description", self.description
json.field "descriptionHtml", self.description_html
json.field "videoCount", self.video_count
json.field "viewCount", self.views
json.field "updated", self.updated.to_unix
json.field "videos" do
json.array do
videos = get_compilation_videos(self, offset: offset, video_id: video_id)
videos.each do |video|
video.to_json(json)
end
end
end
end
end
def to_json(offset, _json : Nil = nil, video_id : String? = nil)
JSON.build do |json|
to_json(offset, json, video_id: video_id)
end
end
def privacy
CompilationPrivacy::Unlisted
end
end
enum CompilationPrivacy
Unlisted = 0
Private = 1
end
struct InvidiousCompilation
include DB::Serializable
property title : String
property id : String
property author : String
property description : String = ""
property video_count : Int32
property created : Time
property updated : Time
@[DB::Field(converter: InvidiousCompilation::CompilationPrivacyConverter)]
property privacy : CompilationPrivacy = CompilationPrivacy::Private
property index : Array(Int64)
property first_video_id : String
property first_video_starting_timestamp_seconds : Int32
property first_video_ending_timestamp_seconds : Int32
@[DB::Field(ignore: true)]
property thumbnail_id : String?
module CompilationPrivacyConverter
def self.from_rs(rs)
return CompilationPrivacy.parse(String.new(rs.read(Slice(UInt8))))
end
end
def to_json(offset, json : JSON::Builder, video_id : String? = nil)
json.object do
json.field "type", "invidiousCompilation"
json.field "title", self.title
json.field "compilationId", self.id
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", nil
json.field "authorThumbnails", [] of String
json.field "description", html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html
json.field "videoCount", self.video_count
json.field "viewCount", self.views
json.field "updated", self.updated.to_unix
json.field "videos" do
json.array do
if (!offset || offset == 0) && !video_id.nil?
index = Invidious::Database::CompilationVideos.select_index(self.id, video_id)
offset = self.index.index(index) || 0
end
videos = get_compilation_videos(self, offset: offset, video_id: video_id)
videos.each_with_index do |video, idx|
video.to_json(json, offset + idx)
end
end
end
end
end
def to_json(offset, _json : Nil = nil, video_id : String? = nil)
JSON.build do |json|
to_json(offset, json, video_id: video_id)
end
end
def thumbnail
# TODO: Get compilation thumbnail from compilation data rather than first video
@thumbnail_id ||= Invidious::Database::CompilationVideos.select_one_id(self.id, self.index) || "-----------"
"/vi/#{@thumbnail_id}/mqdefault.jpg"
end
def author_thumbnail
nil
end
def ucid
nil
end
def views
0_i64
end
def description_html
HTML.escape(self.description)
end
end
def create_compilation(title, privacy, user)
compid = "IVCMP#{Random::Secure.urlsafe_base64(24)[0, 31]}"
compilation = InvidiousCompilation.new({
title: title.byte_slice(0, 150),
id: compid,
author: user.email,
description: "", # Max 5000 characters
video_count: 0,
created: Time.utc,
updated: Time.utc,
privacy: privacy,
index: [] of Int64,
first_video_id: "",
first_video_starting_timestamp_seconds: 0,
first_video_ending_timestamp_seconds: 0,
})
Invidious::Database::Compilations.insert(compilation)
return compilation
end
def subscribe_compilation(user, compilation)
compilation = InvidiousCompilation.new({
title: compilation.title.byte_slice(0, 150),
id: compilation.id,
author: user.email,
description: "", # Max 5000 characters
video_count: compilation.video_count,
created: Time.utc,
updated: compilation.updated,
privacy: CompilationPrivacy::Private,
index: [] of Int64,
first_video_id: "",
first_video_starting_timestamp_seconds: 0,
first_video_ending_timestamp_seconds: 0,
})
Invidious::Database::Compilations.insert(compilation)
return compilation
end
def produce_compilation_continuation(id, index)
if id.starts_with? "UC"
id = "UU" + id.lchop("UC")
end
compid = "VL" + id
# Emulate a "request counter" increment, to make perfectly valid
# ctokens, even if at the time of writing, it's ignored by youtube.
request_count = (index / 100).to_i64 || 1_i64
data = {"1:varint" => index.to_i64}
.try { |i| Protodec::Any.cast_json(i) }
.try { |i| Protodec::Any.from_json(i) }
.try { |i| Base64.urlsafe_encode(i, padding: false) }
object = {
"80226972:embedded" => {
"2:string" => plid,
"3:base64" => {
"1:varint" => request_count,
"15:string" => "PT:#{data}",
"104:embedded" => {"1:0:varint" => 0_i64},
},
"35:string" => id,
},
}
continuation = object.try { |i| Protodec::Any.cast_json(i) }
.try { |i| Protodec::Any.from_json(i) }
.try { |i| Base64.urlsafe_encode(i) }
.try { |i| URI.encode_www_form(i) }
return continuation
end
def get_compilation(compid : String)
if compilation = Invidious::Database::Compilations.select(id: compid)
update_first_video_params(compid)
return compilation
else
raise NotFoundException.new("Compilation does not exist.")
end
end
def update_first_video_params(compid : String)
if compilation = Invidious::Database::Compilations.select(id: compid)
compilation_index_array = compilation.index
if (compilation_index_array.size > 0)
first_index = compilation_index_array[0]
first_id = Invidious::Database::CompilationVideos.select_id_from_index(first_index)
if !first_id.nil?
timestamps = Invidious::Database::CompilationVideos.select_timestamps(compid, first_id)
if (!timestamps.nil?)
starting_timestamp_seconds = timestamps[0]
ending_timestamp_seconds = timestamps[1]
Invidious::Database::Compilations.update_first_video_params(compid, first_id, starting_timestamp_seconds, ending_timestamp_seconds)
end
end
end
else
raise NotFoundException.new("Compilation does not exist.")
end
end
def get_compilation_videos(compilation : InvidiousCompilation | Compilation, offset : Int32, video_id = nil)
# Show empty compilation if requested page is out of range
# (e.g, when a new compilation has been created, offset will be negative)
if offset >= compilation.video_count || offset < 0
return [] of CompilationVideo
end
if compilation.is_a? InvidiousCompilation
Invidious::Database::CompilationVideos.select(compilation.id, compilation.index, offset, limit: 100)
else
if video_id
initial_data = YoutubeAPI.next({
"videoId" => video_id,
"compilationId" => compilation.id,
})
offset = initial_data.dig?("contents", "twoColumnWatchNextResults", "compilation", "compilation", "currentIndex").try &.as_i || offset
end
videos = [] of CompilationVideo
until videos.size >= 200 || videos.size == compilation.video_count || offset >= compilation.video_count
# 100 videos per request
ctoken = produce_compilation_continuation(compilation.id, offset)
initial_data = YoutubeAPI.browse(ctoken)
videos += extract_compilation_videos(initial_data)
offset += 100
end
return videos
end
end
def extract_compilation_videos(initial_data : Hash(String, JSON::Any))
videos = [] of CompilationVideo
if initial_data["contents"]?
tabs = initial_data["contents"]["twoColumnBrowseResultsRenderer"]["tabs"]
tabs_renderer = tabs.as_a.select(&.["tabRenderer"]["selected"]?.try &.as_bool)[0]["tabRenderer"]
# Watch out the two versions, with and without "s"
if tabs_renderer["contents"]? || tabs_renderer["content"]?
# Initial compilation data
tabs_contents = tabs_renderer.["contents"]? || tabs_renderer.["content"]
list_renderer = tabs_contents.["sectionListRenderer"]["contents"][0]
item_renderer = list_renderer.["itemSectionRenderer"]["contents"][0]
contents = item_renderer.["compilationVideoListRenderer"]["contents"].as_a
else
# Continuation data
contents = initial_data["onResponseReceivedActions"][0]?
.try &.["appendContinuationItemsAction"]["continuationItems"].as_a
end
else
contents = initial_data["response"]?.try &.["continuationContents"]["compilationVideoListContinuation"]["contents"].as_a
end
contents.try &.each do |item|
if i = item["compilationVideoRenderer"]?
video_id = i["navigationEndpoint"]["watchEndpoint"]["videoId"].as_s
compid = i["navigationEndpoint"]["watchEndpoint"]["compilationId"].as_s
index = i["navigationEndpoint"]["watchEndpoint"]["index"].as_i64
title = i["title"].try { |t| t["simpleText"]? || t["runs"]?.try &.[0]["text"]? }.try &.as_s || ""
author = i["shortBylineText"]?.try &.["runs"][0]["text"].as_s || ""
ucid = i["shortBylineText"]?.try &.["runs"][0]["navigationEndpoint"]["browseEndpoint"]["browseId"].as_s || ""
length_seconds = i["lengthSeconds"]?.try &.as_s.to_i
live = false
if !length_seconds
live = true
length_seconds = 0
end
videos << CompilationVideo.new({
title: title,
id: video_id,
author: author,
ucid: ucid,
length_seconds: length_seconds,
starting_timestamp_seconds: starting_timestamp_seconds,
ending_timestamp_seconds: ending_timestamp_seconds,
published: Time.utc,
compid: compid,
index: index,
order_index: order_index,
})
end
end
return videos
end
def template_compilation(compilation)
html = <<-END_HTML
<h3>
<a href="/compilation?comp=#{compilation["compilationId"]}">
#{compilation["title"]}
</a>
</h3>
<div class="pure-menu pure-menu-scrollable compilation-restricted">
<ol class="pure-menu-list">
END_HTML
compilation["videos"].as_a.each do |video|
html += <<-END_HTML
<li class="pure-menu-item" id="#{video["videoId"]}">
<a href="/watch?v=#{video["videoId"]}&comp=#{compilation["compilationId"]}&index=#{video["index"]}">
<div class="thumbnail">
<img loading="lazy" class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg" alt="" />
<p class="length">#{recode_length_seconds(video["lengthSeconds"].as_i)}</p>
</div>
<p style="width:100%">#{video["title"]}</p>
<p>
<b style="width:100%">#{video["author"]}</b>
</p>
</a>
</li>
END_HTML
end
html += <<-END_HTML
</ol>
</div>
<hr>
END_HTML
html
end

View File

@ -31,7 +31,7 @@ struct ConfigPreferences
property quality : String = "hd720"
property quality_dash : String = "auto"
property default_home : String? = "Popular"
property feed_menu : Array(String) = ["Popular", "Trending", "Subscriptions", "Playlists"]
property feed_menu : Array(String) = ["Popular", "Trending", "Subscriptions", "Playlists", "Compilations"]
property automatic_instance_redirect : Bool = false
property region : String = "US"
property related_videos : Bool = true
@ -145,6 +145,9 @@ class Config
# Playlist length limit
property playlist_length_limit : Int32 = 500
# Compilation length limit
property compilation_length_limit : Int32 = 500
def disabled?(option)
case disabled = CONFIG.disable_proxy
when Bool

View File

@ -10,11 +10,14 @@ module Invidious::Database
def check_integrity(cfg)
return if !cfg.check_tables
Invidious::Database.check_enum("privacy", PlaylistPrivacy)
Invidious::Database.check_enum("compilation_privacy", CompilationPrivacy)
Invidious::Database.check_table("channels", InvidiousChannel)
Invidious::Database.check_table("channel_videos", ChannelVideo)
Invidious::Database.check_table("playlists", InvidiousPlaylist)
Invidious::Database.check_table("playlist_videos", PlaylistVideo)
Invidious::Database.check_table("compilations", InvidiousCompilation)
Invidious::Database.check_table("compilation_videos", CompilationVideo)
Invidious::Database.check_table("nonces", Nonce)
Invidious::Database.check_table("session_ids", SessionId)
Invidious::Database.check_table("users", User)

View File

@ -0,0 +1,359 @@
require "./base.cr"
#
# This module contains functions related to the "compilations" table.
#
module Invidious::Database::Compilations
extend self
# -------------------
# Insert / delete
# -------------------
def insert(compilation : InvidiousCompilation)
compilation_array = compilation.to_a
request = <<-SQL
INSERT INTO compilations
VALUES (#{arg_array(compilation_array)})
SQL
PG_DB.exec(request, args: compilation_array)
end
# deletes the given compilation and connected compilation videos
def delete(id : String)
CompilationVideos.delete_by_compilation(id)
request = <<-SQL
DELETE FROM compilations *
WHERE id = $1
SQL
PG_DB.exec(request, id)
end
# -------------------
# Update
# -------------------
def update(id : String, title : String, privacy, description, updated)
request = <<-SQL
UPDATE compilations
SET title = $1, privacy = $2, description = $3, updated = $4
WHERE id = $5
SQL
PG_DB.exec(request, title, privacy, description, updated, id)
end
def update_description(id : String, description)
request = <<-SQL
UPDATE compilations
SET description = $1
WHERE id = $2
SQL
PG_DB.exec(request, description, id)
end
def update_video_added(id : String, index : String | Int64)
request = <<-SQL
UPDATE compilations
SET index = array_append(index, $1),
video_count = cardinality(index) + 1,
updated = now()
WHERE id = $2
SQL
PG_DB.exec(request, index, id)
end
def update_video_removed(id : String, index : String | Int64)
request = <<-SQL
UPDATE compilations
SET index = array_remove(index, $1),
video_count = cardinality(index) - 1,
updated = now()
WHERE id = $2
SQL
PG_DB.exec(request, index, id)
end
def move_video_position(id : String, index : Array(Int64))
request = <<-SQL
UPDATE compilations
SET index = $2
WHERE id = $1
SQL
PG_DB.exec(request, id, index)
end
def update_first_video_params(id : String, first_video_id : String, starting_timestamp_seconds : Int32, ending_timestamp_seconds : Int32)
request = <<-SQL
UPDATE compilations
SET first_video_id = $2,
first_video_starting_timestamp_seconds = $3,
first_video_ending_timestamp_seconds = $4
WHERE id = $1
SQL
PG_DB.exec(request, id, first_video_id, starting_timestamp_seconds, ending_timestamp_seconds)
end
# -------------------
# Select
# -------------------
def select(*, id : String) : InvidiousCompilation?
request = <<-SQL
SELECT * FROM compilations
WHERE id = $1
SQL
return PG_DB.query_one?(request, id, as: InvidiousCompilation)
end
def select_all(*, author : String) : Array(InvidiousCompilation)
request = <<-SQL
SELECT * FROM compilations
WHERE author = $1
SQL
return PG_DB.query_all(request, author, as: InvidiousCompilation)
end
def select_index_array(id : String)
request = <<-SQL
SELECT index FROM compilations
WHERE id = $1
LIMIT 1
SQL
PG_DB.query_one?(request, id, as: Array(Int64))
end
# -------------------
# Select (filtered)
# -------------------
def select_like_iv(email : String) : Array(InvidiousCompilation)
request = <<-SQL
SELECT * FROM compilations
WHERE author = $1 AND id LIKE 'IV%'
ORDER BY created
SQL
PG_DB.query_all(request, email, as: InvidiousCompilation)
end
def select_not_like_iv(email : String) : Array(InvidiousCompilation)
request = <<-SQL
SELECT * FROM compilations
WHERE author = $1 AND id NOT LIKE 'IV%'
ORDER BY created
SQL
PG_DB.query_all(request, email, as: InvidiousCompilation)
end
def select_user_created_compilations(email : String) : Array({String, String})
request = <<-SQL
SELECT id,title FROM compilations
WHERE author = $1 AND id LIKE 'IV%'
SQL
PG_DB.query_all(request, email, as: {String, String})
end
# -------------------
# Misc checks
# -------------------
# Check if given compilation ID exists
def exists?(id : String) : Bool
request = <<-SQL
SELECT id FROM compilations
WHERE id = $1
SQL
return PG_DB.query_one?(request, id, as: String).nil?
end
# Count how many compilations a user has created.
def count_owned_by(author : String) : Int64
request = <<-SQL
SELECT count(*) FROM compilations
WHERE author = $1
SQL
return PG_DB.query_one?(request, author, as: Int64) || 0_i64
end
end
#
# This module contains functions related to the "compilation_videos" table.
#
module Invidious::Database::CompilationVideos
extend self
private alias VideoIndex = Int64 | Array(Int64)
# -------------------
# Insert / Delete
# -------------------
def insert(video : CompilationVideo)
video_array = video.to_a
request = <<-SQL
INSERT INTO compilation_videos
VALUES (#{arg_array(video_array)})
SQL
PG_DB.exec(request, args: video_array)
end
def delete(index)
request = <<-SQL
DELETE FROM compilation_videos *
WHERE index = $1
SQL
PG_DB.exec(request, index)
end
def delete_by_compilation(compid : String)
request = <<-SQL
DELETE FROM compilation_videos *
WHERE compid = $1
SQL
PG_DB.exec(request, compid)
end
# -------------------
# Select
# -------------------
def select(compid : String, index : VideoIndex, offset, limit = 100) : Array(CompilationVideo)
request = <<-SQL
SELECT * FROM compilation_videos
WHERE compid = $1
ORDER BY array_position($2, index)
LIMIT $3
OFFSET $4
SQL
return PG_DB.query_all(request, compid, index, limit, offset, as: CompilationVideo)
end
def select_video(compid : String, index : VideoIndex, video_index, offset, limit = 100) : Array(CompilationVideo)
request = <<-SQL
SELECT * FROM compilation_videos
WHERE compid = $1 AND index = $3
ORDER BY array_position($2, index)
LIMIT $5
OFFSET $4
SQL
return PG_DB.query_all(request, compid, index, video_index, offset, limit, as: CompilationVideo)
end
def select_timestamps(compid : String, vid : String)
request = <<-SQL
SELECT starting_timestamp_seconds,ending_timestamp_seconds FROM compilation_videos
WHERE compid = $1 AND id = $2
LIMIT 1
SQL
return PG_DB.query_one?(request, compid, vid, as: {Int32, Int32})
end
def select_id_from_order_index(order_index : Int32)
request = <<-SQL
SELECT id FROM compilation_videos
WHERE order_index = $1
LIMIT 1
SQL
return PG_DB.query_one?(request, order_index, as: String)
end
def select_id_from_index(index : Int64)
request = <<-SQL
SELECT id FROM compilation_videos
WHERE index = $1
LIMIT 1
SQL
return PG_DB.query_one?(request, index, as: String)
end
def select_index_from_order_index(order_index : Int32)
request = <<-SQL
SELECT index FROM compilation_videos
WHERE order_index = $1
LIMIT 1
SQL
return PG_DB.query_one?(request, order_index, as: VideoIndex)
end
def select_index(compid : String, vid : String) : Int64?
request = <<-SQL
SELECT index FROM compilation_videos
WHERE compid = $1 AND id = $2
LIMIT 1
SQL
return PG_DB.query_one?(request, compid, vid, as: Int64)
end
def select_one_id(compid : String, index : VideoIndex) : String?
request = <<-SQL
SELECT id FROM compilation_videos
WHERE compid = $1
ORDER BY array_position($2, index)
LIMIT 1
SQL
return PG_DB.query_one?(request, compid, index, as: String)
end
def select_ids(compid : String, index : VideoIndex, limit = 500) : Array(String)
request = <<-SQL
SELECT id FROM compilation_videos
WHERE compid = $1
ORDER BY array_position($2, index)
LIMIT $3
SQL
return PG_DB.query_all(request, compid, index, limit, as: String)
end
# -------------------
# Update
# -------------------
def update_start_timestamp(id : String, starting_timestamp_seconds : Int32)
request = <<-SQL
UPDATE compilation_videos
SET starting_timestamp_seconds = $2
WHERE id = $1
SQL
PG_DB.exec(request, id, starting_timestamp_seconds)
end
def update_end_timestamp(id : String, ending_timestamp_seconds : Int32)
request = <<-SQL
UPDATE compilation_videos
SET ending_timestamp_seconds = $2
WHERE id = $1
SQL
PG_DB.exec(request, id, ending_timestamp_seconds)
end
end

View File

@ -0,0 +1,52 @@
module Invidious::Database::Migrations
class CreateCompilationsTable < Migration
version 11
def up(conn : DB::Connection)
if !compilation_privacy_type_exists?(conn)
conn.exec <<-SQL
CREATE TYPE public.compilation_privacy AS ENUM
(
'Unlisted',
'Private'
);
SQL
end
conn.exec <<-SQL
CREATE TABLE IF NOT EXISTS public.compilations
(
title text,
id text primary key,
author text,
description text,
video_count integer,
created timestamptz,
updated timestamptz,
privacy compilation_privacy,
index int8[],
first_video_id text,
first_video_starting_timestamp_seconds integer,
first_video_ending_timestamp_seconds integer
);
SQL
conn.exec <<-SQL
GRANT ALL ON public.compilations TO current_user;
SQL
end
private def compilation_privacy_type_exists?(conn : DB::Connection) : Bool
request = <<-SQL
SELECT 1 AS one
FROM pg_type
INNER JOIN pg_namespace ON pg_namespace.oid = pg_type.typnamespace
WHERE pg_namespace.nspname = 'public'
AND pg_type.typname = 'compilation_privacy'
LIMIT 1;
SQL
!conn.query_one?(request, as: Int32).nil?
end
end
end

View File

@ -0,0 +1,30 @@
module Invidious::Database::Migrations