Compare commits

..

109 Commits

Author SHA1 Message Date
9e2a65a5ce Update CHANGELOG and bump version 2019-07-12 23:45:21 -05:00
fea20ea913 Add support for Icelandic translation 2019-07-12 21:07:40 -05:00
5b2480fff2 Merge remote-tracking branch 'weblate/master' 2019-07-12 21:04:20 -05:00
b0dca2a363 Minor refactor 2019-07-12 21:00:50 -05:00
59bbe72798 Update Esperanto translation 2019-07-12 19:05:25 +02:00
f99a30a57e Update Icelandic translation 2019-07-12 19:05:25 +02:00
aa4cb29621 Update Norwegian Bokmål translation 2019-07-12 19:05:25 +02:00
91ad4e396b Update Icelandic translation 2019-07-12 19:05:25 +02:00
351e17aacf Update Icelandic translation 2019-07-12 19:05:25 +02:00
6c8e09acdb Add Icelandic translation 2019-07-12 19:05:25 +02:00
1a7b341745 Update Google login 2019-07-12 12:04:39 -05:00
af592ea8c1 Fix extraction for ytInitialData 2019-07-11 07:27:54 -05:00
bb096a0357 Raise 400 on invalid request to '/feed/webhook' 2019-07-10 11:26:05 -05:00
3c226892c6 Add fix for empty title tag when fetching videos 2019-07-10 10:44:44 -05:00
47f6fe069a Add fix for unsupported attachment types 2019-07-09 23:09:16 -05:00
aa3c1d930b Remove empty representations from dash manifests 2019-07-09 10:08:27 -05:00
99b0b4f5b8 Fix escaping for materialized view SQL 2019-07-09 09:34:19 -05:00
bcd239ac2b Add community page 2019-07-09 09:31:04 -05:00
2cc25b1e6e Add administrator option to disable proxying 2019-07-08 12:15:18 -05:00
5fd3ed782f Add fix for #600 2019-07-08 10:00:08 -05:00
c34a24b633 Attempt to optimize query for subscription feed 2019-07-07 14:00:42 -05:00
775612ec5a Prevent embeds from appearing in watch history 2019-07-05 16:55:28 -05:00
fd43b16213 Add av01 formats to itag list 2019-07-05 13:43:44 -05:00
5a455ec4f7 Fix redirect for livestream segments 2019-07-05 12:08:39 -05:00
1277c3d156 Fix chunk size for livestreams 2019-07-05 11:35:04 -05:00
8033d1ca6d Fix chunking for livestream segments 2019-07-05 11:02:12 -05:00
28df6881a7 Try to prevent redirect for DASH streams 2019-07-04 23:29:28 -05:00
e5fa5df7be Chunk video files to bypass throttling 2019-07-04 23:29:28 -05:00
f7dbf2bdd4 Add 'pipe' for proxying assets 2019-07-04 23:29:28 -05:00
857c57daba Add support for Chinese translation 2019-07-04 23:11:04 -05:00
5515da3c2d Merge pull request #620 from outloudvi/l10n-zh-cn
Add translation of Simplified Chinese
2019-07-04 23:06:26 -05:00
cfc111f855 Add zh-CN translation 2019-07-04 13:08:01 +08:00
3dd4043827 Fix 404 for video thumbnails 2019-07-03 13:54:15 -05:00
351ecfae0f Fix body when proxying assets with status code > 300 2019-07-03 13:13:40 -05:00
b22393092b Fix protocol for video author thumbnails 2019-07-03 13:10:18 -05:00
1485ee8027 Fix channel thumbnail size in FireFox 2019-07-03 10:53:33 -05:00
60826c2d0c Fix author thumbnail for community replies 2019-07-03 10:12:03 -05:00
fb383458d7 Add /api/v1/search/suggestions 2019-07-03 10:11:47 -05:00
196ee1aa8b Add '/api/v1/channels/comments' 2019-07-02 18:53:19 -05:00
2df97cd2f5 Fix provided author for '/videos' endpoint 2019-07-02 07:29:01 -05:00
501b523680 Fit channel link to content 2019-07-01 14:26:27 -05:00
6efa6691b1 Clean up comment templating 2019-07-01 13:38:30 -05:00
c47f1ae236 Add Reddit comment permalink 2019-07-01 12:37:28 -05:00
aac240fe41 Resize comment thumbnails 2019-07-01 12:08:29 -05:00
041debcd93 Revert "Chunk videoplayback response to avoid throttling"
This reverts commit 818cd2454d.
2019-07-01 10:45:09 -05:00
0632a2d3c8 Fix logging for /watch URLs 2019-07-01 10:07:19 -05:00
9f40b3a873 Add missing table to check_tables 2019-07-01 09:29:52 -05:00
8fad0af935 Add caption styling 2019-06-30 22:46:08 -05:00
48ad744ebf Add support for default channel banners 2019-06-30 12:59:38 -05:00
556d5b0ca5 Resize channel thumbnails 2019-06-30 12:39:51 -05:00
e30d70b6d4 Refactor proxy_list into global 2019-06-28 21:17:56 -05:00
a58f5a925a Add banner to "/playlists" page 2019-06-28 21:00:28 -05:00
a3cc3c57fd Add cursor: none to player 2019-06-28 20:55:23 -05:00
0d0d3edeae Add thumbnail and banners to channel page 2019-06-28 20:48:24 -05:00
dd0be7c522 Revert "Push potential fix for #578"
This reverts commit ebfd7d2153.
2019-06-28 11:05:08 -05:00
9d2982fcd7 Fix typo in '/videoplayback' 2019-06-26 15:03:09 -05:00
ebfd7d2153 Push potential fix for #578 2019-06-26 14:44:06 -05:00
818cd2454d Chunk videoplayback response to avoid throttling 2019-06-26 14:43:33 -05:00
b31d1c06f5 Fix typo in StaticFileHandler 2019-06-23 15:41:44 -05:00
6cd884555c Patch StaticFileHandler to serve files from memory 2019-06-23 12:54:46 -05:00
47ef74a1bb Refactor commonly used request and response headers 2019-06-23 08:39:25 -05:00
cc6d6ddd66 Prevent firing _onStreamProgress after aborting 2019-06-22 20:08:37 -05:00
6a6cf015a6 Merge pull request #598 from tleydxdy/patch-1
let docker listen to 127.0.0.1 by default
2019-06-22 19:47:35 -05:00
ca79e81b39 Fix simpleText in comments extractor 2019-06-21 21:53:28 -05:00
a9e86cecf5 Fix comment extractor 2019-06-21 20:25:31 -05:00
5773b1c3e5 Update Ukrainian translation 2019-06-19 02:10:52 +02:00
b562b3410b Update Russian translation 2019-06-19 02:10:52 +02:00
f6440e9830 Update Esperanto translation 2019-06-19 02:10:52 +02:00
e43636e1e9 Update German translation 2019-06-19 02:10:52 +02:00
6783bf9903 Update README 2019-06-17 18:10:04 -05:00
807723c5b2 Fix status codes on error 2019-06-17 14:06:02 -05:00
d3c4936116 let docker listen to 127.0.0.1 by default 2019-06-17 10:46:37 -04:00
bbb40aef51 Fix event listener for notifications.js 2019-06-16 18:11:34 -05:00
485a3e29e7 Optimize get_subscriptions AJAX 2019-06-16 17:33:24 -05:00
1477f99c2c Add target="_blank" to embed titles 2019-06-16 14:49:00 -05:00
2e1f9d5fa9 Fix title URL for embedded videos 2019-06-16 13:14:56 -05:00
9dea251862 Fix typo in notifications.js 2019-06-16 12:57:56 -05:00
17edfd6573 Shorten timeout for AJAX 2019-06-16 12:55:17 -05:00
458e9d6cc7 Update license for sse.js 2019-06-16 09:46:09 -05:00
485459b8b2 Add clickable title for embedded videos 2019-06-16 09:41:33 -05:00
fcf377d26b Fix escaping for login page 2019-06-15 20:42:42 -05:00
3be1c9261f Fix sleep in pull_top_videos 2019-06-15 19:18:36 -05:00
38600b3347 Update list of domains for pulling Reddit comments 2019-06-15 18:58:21 -05:00
62f7f7a689 Update shard.yml 2019-06-15 10:34:31 -05:00
552f616305 Fix retry on timeout for AJAX requests 2019-06-15 10:09:32 -05:00
a3164177f8 Fix SMS for Google login 2019-06-15 10:09:25 -05:00
fa6bf21cd1 Update Google login 2019-06-09 13:48:31 -05:00
eecf76c1fb Fix typo in short_description 2019-06-08 16:34:55 -05:00
d1635cf24e Set max preference size 2019-06-08 16:04:55 -05:00
b43e9ed7e7 Refactor 'description_html' 2019-06-08 15:08:27 -05:00
12b2ab5da8 Add 'to_json' into respective structs 2019-06-08 13:31:41 -05:00
1c9085556c Add support for 'attribution_link' 2019-06-08 11:13:00 -05:00
9122f8acee Add title overlay to embedded videos 2019-06-08 10:52:47 -05:00
ef8c9f093c Add premiere date to watch page 2019-06-08 10:18:45 -05:00
801dffd571 Fix RSS content-type 2019-06-07 21:39:32 -05:00
0b1c57b39f Add notifications to private feed 2019-06-07 21:27:37 -05:00
2febc268f7 Fix warnings in Crystal 0.29 2019-06-07 21:13:50 -05:00
58995bb3a2 Add support for log levels 2019-06-07 21:13:50 -05:00
8c944815bc Minor refactor 2019-06-07 21:13:50 -05:00
f065a21542 Fix 404 handling for endpoints matching short URLs 2019-06-07 21:13:50 -05:00
27e032d10d Add '/api/v1/auth/feeds' 2019-06-07 21:13:50 -05:00
ab3980cd38 Enforce maximum email length 2019-06-07 21:13:50 -05:00
1db648a525 Merge pull request #577 from EsmailELBoBDev2/patch-3
Update ar.json
2019-06-07 10:26:36 -05:00
ce3b5b683d Merge pull request #580 from Vistaus/master
Updated Dutch translation
2019-06-07 10:25:57 -05:00
9d23f1298d Updated Dutch translation 2019-06-07 12:29:03 +02:00
3f791b65b5 Update ar.json 2019-06-07 04:46:46 +00:00
317d8703ca Optimize query for pulling popular videos 2019-06-06 21:33:30 -05:00
fda619f704 Fix 'unique_res' to keep resolutions unique within a representation 2019-06-06 21:32:39 -05:00
e4a0669da8 Fix typo in video param 2019-06-06 21:31:10 -05:00
64 changed files with 3944 additions and 1897 deletions

View File

@ -1,3 +1,65 @@
# 0.19.0 (2019-07-13)
# Version 0.19.0: Communities
Hello again everyone! Focus this month has mainly been on improving playback performance, along with a couple new features I'd like to announce. There have been [109 commits](https://github.com/omarroth/invidious/compare/0.18.0...0.19.0) this past month from 10 contributors.
This past month has seen the addition of Chinese (`zh-CN`) and Icelandic (`is`) translations. I would like to give a huge thanks to their respective translators, and again an enormous thanks to everyone who helps translate the site.
I'm delighted to mention that [FreeTube 0.6.0](https://github.com/FreeTubeApp/FreeTube) now supports 1080p thanks to the Invidious API. I would very much recommend reading the [relevant post](https://freetube.writeas.com/freetube-release-0-6-0-beta-1080p-and-a-lot-of-qol) for some more information on how it works, along with several other major improvements. Folks that are interested in adding similar functionality for their own projects should feel free to get in touch.
This past month there has been quite a bit of work on improving memory usage and improving download and playback speeds. As mentioned in the previous release, some extra hardware has been allocated which should also help with this. I'm still looking for ways to improve performance and feedback is always appreciated.
Along with performance, a couple quality of life improvements have been added, including author thumbnails and banners, clickable titles for embedded videos, and better styling for captions, among some other enhancements.
## Communities
Support for YouTube's [communities tab](https://creatoracademy.youtube.com/page/lesson/community-tab) has been added. It's a very interesting but surprisingly unknown feature. Essentially, providing comments for a channel, rather than a video, where an author can post updates for their subscribers.
It's commonly used to promote interesting links and foster discussion. I hope this feature helps people find more interesting content that otherwise would have been overlooked.
## For Developers
For accessing channel communities, an `/api/v1/channels/comments/:ucid` endpoint has been added, with similar behavior and schema to `/api/v1/comments/:id`, with an extra `attachment` field for top-level comments. More info on usage and available data can be found in the [wiki](https://github.com/omarroth/invidious/wiki/API#get-apiv1channelscommentsucid-apiv1channelsucidcomments).
An `/api/v1/auth/feeds` endpoint has been added for programmatically accessing a user's subscription feed, with options for displaying notifications and filtering an existing feed.
An `/api/v1/search/suggestions` endpoint has been added for retrieving suggestions for a given query.
## For Administrators
It is now possible to disable more resource intensive features, such as downloads and DASH functionality by adding `disable_proxy` to your config. See [#453](https://github.com/omarroth/invidious/issues/453) and the [Wiki](https://github.com/omarroth/invidious/wiki/Configuration) for more information and example usage. I expect this to be a big help for folks with limited bandwidth when hosting their own instances.
## Finances
### Donations
- [Patreon](https://www.patreon.com/omarroth) : \$38.39
- [Liberapay](https://liberapay.com/omarroth) : \$84.85
- Crypto : ~\$0.00 (converted from BCH, BTC)
- Total : \$123.24
### Expenses
- invidious-load1 (nyc1) : \$10.00 (load balancer)
- invidious-update1 (s-1vcpu-1gb) : \$5.00 (updates feeds)
- invidious-node1 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node2 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node3 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node4 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node5 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node6 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node7 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node8 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node9 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-node10 (s-1vcpu-1gb) : \$5.00 (web server)
- invidious-db1 (s-4vcpu-8gb) : \$40.00 (database)
- Total : \$105.00
The goal on Patreon has been updated to reflect the above expenses. As mentioned above, the main reason for more hardware is to improve playback and download speeds, although I'm still looking into improving performance without allocating more hardware.
As always I'm grateful for everyone's support and feedback. I'll see you all next month.
# 0.18.0 (2019-06-06)
# Version 0.18.0: Native Notifications and Optimizations

View File

@ -27,12 +27,16 @@ Patreon: https://patreon.com/omarroth
BTC: 356DpZyMXu6rYd55Yqzjs29n79kGKWcYrY
BCH: qq4ptclkzej5eza6a50et5ggc58hxsq5aylqut2npk
Onion links:
## Invidious Instances
- kgg2m7yk5aybusll.onion
- axqzx4s6s54s32yentfqojs3x5i7faxza6xo3ehd4bzzsg2ii4fv2iid.onion
See [Invidious Instances](https://github.com/omarroth/invidious/wiki/Invidious-Instances) for a full list of publicly available instances.
[Alternative Invidious instances](https://github.com/omarroth/invidious/wiki/Invidious-Instances)
### Official Instances
- [invidio.us](https://invidio.us) 🇺🇸
Issuer: Let's Encrypt, [SSLLabs Verification](https://www.ssllabs.com/ssltest/analyze.html?d=invidio.us)
- [kgg2m7yk5aybusll.onion](http://kgg2m7yk5aybusll.onion)
- [axqzx4s6s54s32yentfqojs3x5i7faxza6xo3ehd4bzzsg2ii4fv2iid.onion](http://axqzx4s6s54s32yentfqojs3x5i7faxza6xo3ehd4bzzsg2ii4fv2iid.onion)
## Screenshots

View File

@ -2,6 +2,17 @@
background-color: rgb(255, 0, 0, 0.5);
}
.channel-profile > * {
font-size: 1.17em;
font-weight: bold;
vertical-align: middle;
}
.channel-profile > img {
width: 48px;
height: auto;
}
.channel-owner {
background-color: #008bec;
color: #fff;
@ -270,6 +281,16 @@ input[type="search"]::-webkit-search-cancel-button {
}
}
.vjs-user-inactive {
cursor: none;
}
.video-js .vjs-text-track-display > div > div > div {
background-color: rgba(0, 0, 0, 0.75) !important;
border-radius: 9px !important;
padding: 5px !important;
}
.vjs-play-control,
.vjs-volume-panel,
.vjs-current-time,
@ -351,6 +372,12 @@ span > select {
background-color: rgba(0, 182, 240, 1);
}
/* Overlay */
.video-js .vjs-overlay {
background-color: rgba(35, 35, 35, 0.75);
color: rgba(255, 255, 255, 1);
}
/* ProgressBar marker */
.vjs-marker {
background-color: rgba(255, 255, 255, 1);

View File

@ -0,0 +1 @@
.video-js .vjs-overlay{color:#fff;position:absolute;text-align:center}.video-js .vjs-overlay-no-background{max-width:33%}.video-js .vjs-overlay-background{background-color:#646464;background-color:rgba(255,255,255,0.4);border-radius:3px;padding:10px;width:33%}.video-js .vjs-overlay-top-left{top:5px;left:5px}.video-js .vjs-overlay-top{left:50%;margin-left:-16.5%;top:5px}.video-js .vjs-overlay-top-right{right:5px;top:5px}.video-js .vjs-overlay-right{right:5px;top:50%;transform:translateY(-50%)}.video-js .vjs-overlay-bottom-right{bottom:3.5em;right:5px}.video-js .vjs-overlay-bottom{bottom:3.5em;left:50%;margin-left:-16.5%}.video-js .vjs-overlay-bottom-left{bottom:3.5em;left:5px}.video-js .vjs-overlay-left{left:5px;top:50%;transform:translateY(-50%)}.video-js .vjs-overlay-center{left:50%;margin-left:-16.5%;top:50%;transform:translateY(-50%)}.video-js .vjs-no-flex .vjs-overlay-left,.video-js .vjs-no-flex .vjs-overlay-center,.video-js .vjs-no-flex .vjs-overlay-right{margin-top:-15px}

101
assets/js/community.js Normal file
View File

@ -0,0 +1,101 @@
String.prototype.supplant = function (o) {
return this.replace(/{([^{}]*)}/g, function (a, b) {
var r = o[b];
return typeof r === 'string' || typeof r === 'number' ? r : a;
});
}
function hide_youtube_replies(event) {
var target = event.target;
sub_text = target.getAttribute('data-inner-text');
inner_text = target.getAttribute('data-sub-text');
body = target.parentNode.parentNode.children[1];
body.style.display = 'none';
target.innerHTML = sub_text;
target.onclick = show_youtube_replies;
target.setAttribute('data-inner-text', inner_text);
target.setAttribute('data-sub-text', sub_text);
}
function show_youtube_replies(event) {
var target = event.target;
sub_text = target.getAttribute('data-inner-text');
inner_text = target.getAttribute('data-sub-text');
body = target.parentNode.parentNode.children[1];
body.style.display = '';
target.innerHTML = sub_text;
target.onclick = hide_youtube_replies;
target.setAttribute('data-inner-text', inner_text);
target.setAttribute('data-sub-text', sub_text);
}
function number_with_separator(val) {
while (/(\d+)(\d{3})/.test(val.toString())) {
val = val.toString().replace(/(\d+)(\d{3})/, '$1' + ',' + '$2');
}
return val;
}
function get_youtube_replies(target, load_more) {
var continuation = target.getAttribute('data-continuation');
var body = target.parentNode.parentNode;
var fallback = body.innerHTML;
body.innerHTML =
'<h3 style="text-align:center"><div class="loading"><i class="icon ion-ios-refresh"></i></div></h3>';
var url = '/api/v1/channels/comments/' + community_data.ucid +
'?format=html' +
'&hl=' + community_data.preferences.locale +
'&thin_mode=' + community_data.preferences.thin_mode +
'&continuation=' + continuation;
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 10000;
xhr.open('GET', url, true);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
if (load_more) {
body = body.parentNode.parentNode;
body.removeChild(body.lastElementChild);
body.innerHTML += xhr.response.contentHtml;
} else {
body.removeChild(body.lastElementChild);
var p = document.createElement('p');
var a = document.createElement('a');
p.appendChild(a);
a.href = 'javascript:void(0)';
a.onclick = hide_youtube_replies;
a.setAttribute('data-sub-text', community_data.hide_replies_text);
a.setAttribute('data-inner-text', community_data.show_replies_text);
a.innerText = community_data.hide_replies_text;
var div = document.createElement('div');
div.innerHTML = xhr.response.contentHtml;
body.appendChild(p);
body.appendChild(div);
}
} else {
body.innerHTML = fallback;
}
}
}
xhr.ontimeout = function () {
console.log('Pulling comments failed.');
body.innerHTML = fallback;
}
xhr.send();
}

View File

@ -1,5 +1,5 @@
function get_playlist(plid, timeouts = 0) {
if (timeouts > 10) {
function get_playlist(plid, retries = 5) {
if (retries <= 0) {
console.log('Failed to pull playlist');
return;
}
@ -16,9 +16,8 @@ function get_playlist(plid, timeouts = 0) {
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('GET', plid_url, true);
xhr.send();
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
@ -51,10 +50,17 @@ function get_playlist(plid, timeouts = 0) {
}
}
xhr.ontimeout = function () {
console.log('Pulling playlist timed out.');
get_playlist(plid, timeouts++);
xhr.onerror = function () {
console.log('Pulling playlist failed... ' + retries + '/5');
setTimeout(function () { get_playlist(plid, retries - 1) }, 1000);
}
xhr.ontimeout = function () {
console.log('Pulling playlist failed... ' + retries + '/5');
get_playlist(plid, retries - 1);
}
xhr.send();
}
if (video_data.plid) {

View File

@ -1,32 +1,35 @@
var notifications, delivered;
function get_subscriptions(callback, failures = 1) {
if (failures >= 10) {
return
function get_subscriptions(callback, retries = 5) {
if (retries <= 0) {
return;
}
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.open('GET', '/api/v1/auth/subscriptions', true);
xhr.send(null);
xhr.timeout = 10000;
xhr.open('GET', '/api/v1/auth/subscriptions?fields=authorId', true);
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
subscriptions = xhr.response;
callback(subscriptions);
} else {
console.log('Pulling subscriptions failed... ' + failures + '/10');
get_subscriptions(callback, failures++)
}
}
}
xhr.ontimeout = function () {
console.log('Pulling subscriptions failed... ' + failures + '/10');
get_subscriptions(callback, failures++);
xhr.onerror = function () {
console.log('Pulling subscriptions failed... ' + retries + '/5');
setTimeout(function () { get_subscriptions(callback, retries - 1) }, 1000);
}
xhr.ontimeout = function () {
console.log('Pulling subscriptions failed... ' + retries + '/5');
get_subscriptions(callback, retries - 1);
}
xhr.send();
}
function create_notification_stream(subscriptions) {
@ -42,7 +45,7 @@ function create_notification_stream(subscriptions) {
notifications.onmessage = function (event) {
if (!event.id) {
return
return;
}
var notification = JSON.parse(event.data);
@ -77,21 +80,16 @@ function create_notification_stream(subscriptions) {
}
}
notifications.onerror = function (event) {
console.log('Something went wrong with notifications, trying to reconnect...');
notifications.close();
get_subscriptions(create_notification_stream);
}
notifications.ontimeout = function (event) {
console.log('Something went wrong with notifications, trying to reconnect...');
notifications.close();
get_subscriptions(create_notification_stream);
}
notifications.addEventListener('error', handle_notification_error);
notifications.stream();
}
function handle_notification_error(event) {
console.log('Something went wrong with notifications, trying to reconnect...');
notifications = { close: function () { } };
setTimeout(function () { get_subscriptions(create_notification_stream) }, 1000);
}
window.addEventListener('load', function (e) {
localStorage.setItem('notification_count', document.getElementById('notification_count') ? document.getElementById('notification_count').innerText : '0');
@ -100,10 +98,11 @@ window.addEventListener('load', function (e) {
} else {
setTimeout(function () {
if (!localStorage.getItem('stream')) {
get_subscriptions(create_notification_stream);
notifications = { close: function () { } };
localStorage.setItem('stream', true);
get_subscriptions(create_notification_stream);
}
}, Math.random() * 1000 + 10);
}, Math.random() * 1000 + 50);
}
window.addEventListener('storage', function (e) {
@ -113,10 +112,11 @@ window.addEventListener('load', function (e) {
} else {
setTimeout(function () {
if (!localStorage.getItem('stream')) {
get_subscriptions(create_notification_stream);
notifications = { close: function () { } };
localStorage.setItem('stream', true);
get_subscriptions(create_notification_stream);
}
}, Math.random() * 1000 + 10);
}, Math.random() * 1000 + 50);
}
} else if (e.key === 'notification_count') {
var notification_ticker = document.getElementById('notification_ticker');

View File

@ -1,20 +1,20 @@
var options = {
preload: "auto",
preload: 'auto',
liveui: true,
playbackRates: [0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 2.0],
controlBar: {
children: [
"playToggle",
"volumePanel",
"currentTimeDisplay",
"timeDivider",
"durationDisplay",
"progressControl",
"remainingTimeDisplay",
"captionsButton",
"qualitySelector",
"playbackRateMenuButton",
"fullscreenToggle"
'playToggle',
'volumePanel',
'currentTimeDisplay',
'timeDivider',
'durationDisplay',
'progressControl',
'remainingTimeDisplay',
'captionsButton',
'qualitySelector',
'playbackRateMenuButton',
'fullscreenToggle'
]
}
}
@ -29,7 +29,7 @@ short_url = location.origin + '/' + video_data.id + embed_url.search;
embed_url = location.origin + '/embed/' + video_data.id + embed_url.search;
var shareOptions = {
socials: ["fbFeed", "tw", "reddit", "email"],
socials: ['fbFeed', 'tw', 'reddit', 'email'],
url: short_url,
title: player_data.title,
@ -38,7 +38,7 @@ var shareOptions = {
embedCode: "<iframe id='ivplayer' type='text/html' width='640' height='360' src='" + embed_url + "' frameborder='0'></iframe>"
}
var player = videojs("player", options, function () {
var player = videojs('player', options, function () {
this.hotkeys({
volumeStep: 0.1,
seekStep: 5,
@ -102,6 +102,22 @@ var player = videojs("player", options, function () {
});
});
if (location.pathname.startsWith('/embed/')) {
player.overlay({
overlays: [{
start: 'loadstart',
content: '<h1><a rel="noopener" target="_blank" href="' + location.origin + '/watch?v=' + video_data.id + '">' + player_data.title + '</a></h1>',
end: 'playing',
align: 'top'
}, {
start: 'pause',
content: '<h1><a rel="noopener" target="_blank" href="' + location.origin + '/watch?v=' + video_data.id + '">' + player_data.title + '</a></h1>',
end: 'playing',
align: 'top'
}]
});
}
player.on('error', function (event) {
if (player.error().code === 2 || player.error().code === 4) {
setInterval(setTimeout(function (event) {
@ -163,27 +179,28 @@ player.on('waiting', function () {
}
});
if (video_data.premiere_timestamp && Math.round(new Date() / 1000) < video_data.premiere_timestamp) {
player.getChild('bigPlayButton').hide();
}
if (video_data.params.autoplay) {
var bpb = player.getChild('bigPlayButton');
bpb.hide();
if (bpb) {
bpb.hide();
player.ready(function () {
new Promise(function (resolve, reject) {
setTimeout(() => resolve(1), 1);
}).then(function (result) {
var promise = player.play();
player.ready(function () {
new Promise(function (resolve, reject) {
setTimeout(() => resolve(1), 1);
}).then(function (result) {
var promise = player.play();
if (promise !== undefined) {
promise.then(_ => {
}).catch(error => {
bpb.show();
});
}
});
if (promise !== undefined) {
promise.then(_ => {
}).catch(error => {
bpb.show();
});
}
});
}
});
}
if (!video_data.params.listen && video_data.params.quality === 'dash') {
@ -201,7 +218,6 @@ if (!video_data.params.listen && video_data.params.annotations) {
xhr.responseType = 'text';
xhr.timeout = 60000;
xhr.open('GET', '/api/v1/annotations/' + video_data.id, true);
xhr.send();
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
@ -234,6 +250,8 @@ if (!video_data.params.listen && video_data.params.annotations) {
window.open(path, '_blank');
}
});
xhr.send();
}
// Since videojs-share can sometimes be blocked, we defer it until last

View File

@ -94,7 +94,7 @@ var SSE = function (url, options) {
}
this._onStreamProgress = function(e) {
if (this.xhr.status !== 200) {
if (this.xhr.status !== 200 && this.readyState !== this.CLOSED) {
this._onStreamFailure(e);
return;
}

View File

@ -7,8 +7,8 @@ if (subscribe_button.getAttribute('data-type') === 'subscribe') {
subscribe_button.onclick = unsubscribe;
}
function subscribe(timeouts = 0) {
if (timeouts >= 10) {
function subscribe(retries = 5) {
if (retries <= 0) {
console.log('Failed to subscribe.');
return;
}
@ -17,10 +17,9 @@ function subscribe(timeouts = 0) {
'&c=' + subscribe_data.ucid;
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('POST', url, true);
xhr.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
xhr.send('csrf_token=' + subscribe_data.csrf_token);
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
var fallback = subscribe_button.innerHTML;
subscribe_button.onclick = unsubscribe;
@ -35,14 +34,21 @@ function subscribe(timeouts = 0) {
}
}
xhr.ontimeout = function () {
console.log('Subscribing timed out.');
subscribe(timeouts++);
xhr.onerror = function () {
console.log('Subscribing failed... ' + retries + '/5');
setTimeout(function () { subscribe(retries - 1) }, 1000);
}
xhr.ontimeout = function () {
console.log('Subscribing failed... ' + retries + '/5');
subscribe(retries - 1);
}
xhr.send('csrf_token=' + subscribe_data.csrf_token);
}
function unsubscribe(timeouts = 0) {
if (timeouts >= 10) {
function unsubscribe(retries = 5) {
if (retries <= 0) {
console.log('Failed to subscribe');
return;
}
@ -51,10 +57,9 @@ function unsubscribe(timeouts = 0) {
'&c=' + subscribe_data.ucid;
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('POST', url, true);
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
xhr.send('csrf_token=' + subscribe_data.csrf_token);
var fallback = subscribe_button.innerHTML;
subscribe_button.onclick = subscribe;
@ -69,8 +74,15 @@ function unsubscribe(timeouts = 0) {
}
}
xhr.ontimeout = function () {
console.log('Unsubscribing timed out.');
unsubscribe(timeouts++);
xhr.onerror = function () {
console.log('Unsubscribing failed... ' + retries + '/5');
setTimeout(function () { unsubscribe(retries - 1) }, 1000);
}
xhr.ontimeout = function () {
console.log('Unsubscribing failed... ' + retries + '/5');
unsubscribe(retries - 1);
}
xhr.send('csrf_token=' + subscribe_data.csrf_token);
}

View File

@ -7,12 +7,13 @@ toggle_theme.addEventListener('click', function () {
var url = '/toggle_theme?redirect=false';
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('GET', url, true);
xhr.send();
set_mode(dark_mode);
localStorage.setItem('dark_mode', dark_mode);
xhr.send();
});
window.addEventListener('storage', function (e) {

2
assets/js/videojs-overlay.min.js vendored Normal file
View File

@ -0,0 +1,2 @@
/*! @name videojs-overlay @version 2.1.4 @license Apache-2.0 */
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e(require("video.js"),require("global/window")):"function"==typeof define&&define.amd?define(["video.js","global/window"],e):t.videojsOverlay=e(t.videojs,t.window)}(this,function(t,e){"use strict";function n(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}t=t&&t.hasOwnProperty("default")?t.default:t,e=e&&e.hasOwnProperty("default")?e.default:e;var r={align:"top-left",class:"",content:"This overlay will show up while the video is playing",debug:!1,showBackground:!0,attachToControlBar:!1,overlays:[{start:"playing",end:"paused"}]},i=t.getComponent("Component"),o=t.dom||t,s=t.registerPlugin||t.plugin,a=function(t){return"number"==typeof t&&t==t},h=function(t){return"string"==typeof t&&/^\S+$/.test(t)},d=function(r){var i,s;function d(t,e){var i;return i=r.call(this,t,e)||this,["start","end"].forEach(function(t){var e=i.options_[t];if(a(e))i[t+"Event_"]="timeupdate";else if(h(e))i[t+"Event_"]=e;else if("start"===t)throw new Error('invalid "start" option; expected number or string')}),["endListener_","rewindListener_","startListener_"].forEach(function(t){i[t]=function(e){return d.prototype[t].call(n(n(i)),e)}}),"timeupdate"===i.startEvent_&&i.on(t,"timeupdate",i.rewindListener_),i.debug('created, listening to "'+i.startEvent_+'" for "start" and "'+(i.endEvent_||"nothing")+'" for "end"'),i.hide(),i}s=r,(i=d).prototype=Object.create(s.prototype),i.prototype.constructor=i,i.__proto__=s;var l=d.prototype;return l.createEl=function(){var t=this.options_,n=t.content,r=t.showBackground?"vjs-overlay-background":"vjs-overlay-no-background",i=o.createEl("div",{className:"\n vjs-overlay\n vjs-overlay-"+t.align+"\n "+t.class+"\n "+r+"\n vjs-hidden\n "});return"string"==typeof n?i.innerHTML=n:n instanceof e.DocumentFragment?i.appendChild(n):o.appendContent(i,n),i},l.debug=function(){if(this.options_.debug){for(var e=t.log,n=e,r=arguments.length,i=new Array(r),o=0;o<r;o++)i[o]=arguments[o];e.hasOwnProperty(i[0])&&"function"==typeof e[i[0]]&&(n=e[i.shift()]),n.apply(void 0,["overlay#"+this.id()+": "].concat(i))}},l.hide=function(){return r.prototype.hide.call(this),this.debug("hidden"),this.debug('bound `startListener_` to "'+this.startEvent_+'"'),this.endEvent_&&(this.debug('unbound `endListener_` from "'+this.endEvent_+'"'),this.off(this.player(),this.endEvent_,this.endListener_)),this.on(this.player(),this.startEvent_,this.startListener_),this},l.shouldHide_=function(t,e){var n=this.options_.end;return a(n)?t>=n:n===e},l.show=function(){return r.prototype.show.call(this),this.off(this.player(),this.startEvent_,this.startListener_),this.debug("shown"),this.debug('unbound `startListener_` from "'+this.startEvent_+'"'),this.endEvent_&&(this.debug('bound `endListener_` to "'+this.endEvent_+'"'),this.on(this.player(),this.endEvent_,this.endListener_)),this},l.shouldShow_=function(t,e){var n=this.options_.start,r=this.options_.end;return a(n)?a(r)?t>=n&&t<r:this.hasShownSinceSeek_?Math.floor(t)===n:(this.hasShownSinceSeek_=!0,t>=n):n===e},l.startListener_=function(t){var e=this.player().currentTime();this.shouldShow_(e,t.type)&&this.show()},l.endListener_=function(t){var e=this.player().currentTime();this.shouldHide_(e,t.type)&&this.hide()},l.rewindListener_=function(t){var e=this.player().currentTime(),n=this.previousTime_,r=this.options_.start,i=this.options_.end;e<n&&(this.debug("rewind detected"),a(i)&&!this.shouldShow_(e)?(this.debug("hiding; "+i+" is an integer and overlay should not show at this time"),this.hasShownSinceSeek_=!1,this.hide()):h(i)&&e<r&&(this.debug("hiding; show point ("+r+") is before now ("+e+") and end point ("+i+") is an event"),this.hasShownSinceSeek_=!1,this.hide())),this.previousTime_=e},d}(i);t.registerComponent("Overlay",d);var l=function(e){var n=this,i=t.mergeOptions(r,e);Array.isArray(this.overlays_)&&this.overlays_.forEach(function(t){n.removeChild(t),n.controlBar&&n.controlBar.removeChild(t),t.dispose()});var o=i.overlays;delete i.overlays,this.overlays_=o.map(function(e){var r=t.mergeOptions(i,e),o="string"==typeof r.attachToControlBar||!0===r.attachToControlBar;if(!n.controls()||!n.controlBar)return n.addChild("overlay",r);if(o&&-1!==r.align.indexOf("bottom")){var s=n.controlBar.children()[0];if(void 0!==n.controlBar.getChild(r.attachToControlBar)&&(s=n.controlBar.getChild(r.attachToControlBar)),s){var a=n.controlBar.addChild("overlay",r);return n.controlBar.el().insertBefore(a.el(),s.el()),a}}var h=n.addChild("overlay",r);return n.el().insertBefore(h.el(),n.controlBar.el()),h})};return l.VERSION="2.1.4",s("overlay",l),l});

View File

@ -109,10 +109,10 @@ function number_with_separator(val) {
return val;
}
function get_playlist(plid, timeouts = 0) {
function get_playlist(plid, retries = 5) {
playlist = document.getElementById('playlist');
if (timeouts > 10) {
if (retries <= 0) {
console.log('Failed to pull playlist');
playlist.innerHTML = '';
return;
@ -134,9 +134,8 @@ function get_playlist(plid, timeouts = 0) {
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('GET', plid_url, true);
xhr.send();
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
@ -174,19 +173,31 @@ function get_playlist(plid, timeouts = 0) {
}
}
xhr.ontimeout = function () {
console.log('Pulling playlist timed out.');
xhr.onerror = function () {
playlist = document.getElementById('playlist');
playlist.innerHTML =
'<h3 style="text-align:center"><div class="loading"><i class="icon ion-ios-refresh"></i></div></h3><hr>';
get_playlist(plid, timeouts + 1);
console.log('Pulling playlist timed out... ' + retries + '/5');
setTimeout(function () { get_playlist(plid, retries - 1) }, 1000);
}
xhr.ontimeout = function () {
playlist = document.getElementById('playlist');
playlist.innerHTML =
'<h3 style="text-align:center"><div class="loading"><i class="icon ion-ios-refresh"></i></div></h3><hr>';
console.log('Pulling playlist timed out... ' + retries + '/5');
get_playlist(plid, retries - 1);
}
xhr.send();
}
function get_reddit_comments(timeouts = 0) {
function get_reddit_comments(retries = 5) {
comments = document.getElementById('comments');
if (timeouts > 10) {
if (retries <= 0) {
console.log('Failed to pull comments');
comments.innerHTML = '';
return;
@ -201,9 +212,8 @@ function get_reddit_comments(timeouts = 0) {
'&hl=' + video_data.preferences.locale;
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('GET', url, true);
xhr.send();
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
@ -238,7 +248,8 @@ function get_reddit_comments(timeouts = 0) {
comments.children[0].children[1].children[0].onclick = swap_comments;
} else {
if (video_data.params.comments[1] === 'youtube') {
get_youtube_comments(timeouts + 1);
console.log('Pulling comments failed... ' + retries + '/5');
setTimeout(function () { get_youtube_comments(retries - 1) }, 1000);
} else {
comments.innerHTML = fallback;
}
@ -246,16 +257,23 @@ function get_reddit_comments(timeouts = 0) {
}
}
xhr.ontimeout = function () {
console.log('Pulling comments timed out.');
get_reddit_comments(timeouts + 1);
xhr.onerror = function () {
console.log('Pulling comments failed... ' + retries + '/5');
setInterval(function () { get_reddit_comments(retries - 1) }, 1000);
}
xhr.ontimeout = function () {
console.log('Pulling comments failed... ' + retries + '/5');
get_reddit_comments(retries - 1);
}
xhr.send();
}
function get_youtube_comments(timeouts = 0) {
function get_youtube_comments(retries = 5) {
comments = document.getElementById('comments');
if (timeouts > 10) {
if (retries <= 0) {
console.log('Failed to pull comments');
comments.innerHTML = '';
return;
@ -271,9 +289,8 @@ function get_youtube_comments(timeouts = 0) {
'&thin_mode=' + video_data.preferences.thin_mode;
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('GET', url, true);
xhr.send();
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
@ -303,7 +320,7 @@ function get_youtube_comments(timeouts = 0) {
comments.children[0].children[1].children[0].onclick = swap_comments;
} else {
if (video_data.params.comments[1] === 'youtube') {
get_youtube_comments(timeouts + 1);
setTimeout(function () { get_youtube_comments(retries - 1) }, 1000);
} else {
comments.innerHTML = '';
}
@ -311,12 +328,21 @@ function get_youtube_comments(timeouts = 0) {
}
}
xhr.ontimeout = function () {
console.log('Pulling comments timed out.');
xhr.onerror = function () {
comments.innerHTML =
'<h3 style="text-align:center"><div class="loading"><i class="icon ion-ios-refresh"></i></div></h3>';
get_youtube_comments(timeouts + 1);
console.log('Pulling comments failed... ' + retries + '/5');
setInterval(function () { get_youtube_comments(retries - 1) }, 1000);
}
xhr.ontimeout = function () {
comments.innerHTML =
'<h3 style="text-align:center"><div class="loading"><i class="icon ion-ios-refresh"></i></div></h3>';
console.log('Pulling comments failed... ' + retries + '/5');
get_youtube_comments(retries - 1);
}
xhr.send();
}
function get_youtube_replies(target, load_more) {
@ -334,9 +360,8 @@ function get_youtube_replies(target, load_more) {
'&continuation=' + continuation;
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('GET', url, true);
xhr.send();
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
@ -371,9 +396,11 @@ function get_youtube_replies(target, load_more) {
}
xhr.ontimeout = function () {
console.log('Pulling comments timed out.');
console.log('Pulling comments failed.');
body.innerHTML = fallback;
}
xhr.send();
}
if (video_data.play_next) {

View File

@ -6,10 +6,9 @@ function mark_watched(target) {
'&id=' + target.getAttribute('data-id');
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('POST', url, true);
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
xhr.send('csrf_token=' + watched_data.csrf_token);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
@ -18,11 +17,13 @@ function mark_watched(target) {
}
}
}
xhr.send('csrf_token=' + watched_data.csrf_token);
}
function mark_unwatched(target) {
var tile = target.parentNode.parentNode.parentNode.parentNode.parentNode;
tile.style.display = "none";
tile.style.display = 'none';
var count = document.getElementById('count')
count.innerText = count.innerText - 1;
@ -30,10 +31,9 @@ function mark_unwatched(target) {
'&id=' + target.getAttribute('data-id');
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('POST', url, true);
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
xhr.send('csrf_token=' + watched_data.csrf_token);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
@ -43,4 +43,6 @@ function mark_unwatched(target) {
}
}
}
xhr.send('csrf_token=' + watched_data.csrf_token);
}

View File

@ -13,7 +13,7 @@ services:
dockerfile: docker/Dockerfile
restart: unless-stopped
ports:
- "3000:3000"
- "127.0.0.1:3000:3000"
depends_on:
- postgres

View File

@ -6,7 +6,7 @@
"Unsubscribe": "إلغاء الإشتراك",
"Subscribe": "إشتراك",
"View channel on YouTube": "زيارة القناة على موقع يوتيوب",
"View playlist on YouTube": "",
"View playlist on YouTube": "عرض قائمة التشغيل على اليوتيوب",
"newest": "الأجدد",
"oldest": "الأقدم",
"popular": "الاكثر شعبية",
@ -85,9 +85,9 @@
"Only show latest unwatched video from channel: ": "فقط اظهر اخر فيديو لم يتم رؤيتة من القناة: ",
"Only show unwatched: ": "فقط اظهر الذى لم يتم رؤيتة: ",
"Only show notifications (if there are any): ": "إظهار الإشعارات فقط (إذا كان هناك أي): ",
"Enable web notifications": "",
"`x` uploaded a video": "",
"`x` is live": "",
"Enable web notifications": "تفعيل إشعارات المتصفح",
"`x` uploaded a video": "`x` رفع فيديو",
"`x` is live": "`x` فى بث مباشر",
"Data preferences": "إعدادات التفضيلات",
"Clear watch history": "حذف سجل المشاهدة",
"Import/export data": "إضافة\\إستخراج البيانات",
@ -136,6 +136,7 @@
"Shared `x`": "شارك منذ `x`",
"`x` views": "`x` مشاهدون",
"Premieres in `x`": "يعرض فى `x`",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "اهلا! يبدو ان الجافاسكريبت معطلة. اضغط هنا لعرض التعليقات, ضع فى إعتبارك انها ستأخذ وقت اطول للعرض.",
"View YouTube comments": "عرض تعليقات اليوتيوب",
"View more comments on Reddit": "عرض المزيد من التعليقات على\\من موقع Reddit",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "",
"(edited)": "(تم تعديلة)",
"YouTube comment permalink": "رابط التعليق على اليوتيوب",
"permalink": "",
"`x` marked it with a ❤": "`x` اعجب بهذا",
"Audio mode": "الوضع الصوتى",
"Video mode": "وضع الفيديو",
"Videos": "الفيديوهات",
"Playlists": "قوائم التشغيل",
"Community": "",
"Current version: ": "الإصدار الحالى"
}

View File

@ -85,9 +85,9 @@
"Only show latest unwatched video from channel: ": "Nur neueste ungesehene Videos des Kanals anzeigen: ",
"Only show unwatched: ": "Nur ungesehene anzeigen: ",
"Only show notifications (if there are any): ": "Nur Benachrichtigungen anzeigen (wenn es welche gibt): ",
"Enable web notifications": "",
"`x` uploaded a video": "",
"`x` is live": "",
"Enable web notifications": "Webbenachrichtigungen aktivieren",
"`x` uploaded a video": "`x` hat ein Video hochgeladen",
"`x` is live": "`x` ist live",
"Data preferences": "Dateneinstellungen",
"Clear watch history": "Verlauf löschen",
"Import/export data": "Daten im- exportieren",
@ -136,6 +136,7 @@
"Shared `x`": "Geteilt `x`",
"`x` views": "`x` Ansichten",
"Premieres in `x`": "Premieren in `x`",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Hallo! Anscheinend haben Sie JavaScript deaktiviert. Klicken Sie hier um Kommentare anzuzeigen, beachten sie dass es etwas länger dauern kann um sie zu laden.",
"View YouTube comments": "YouTube Kommentare anzeigen",
"View more comments on Reddit": "Mehr Kommentare auf Reddit anzeigen",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(editiert)",
"YouTube comment permalink": "YouTube-Kommentar Permalink",
"permalink": "",
"`x` marked it with a ❤": "`x` markierte es mit einem ❤",
"Audio mode": "Audiomodus",
"Video mode": "Videomodus",
"Videos": "Videos",
"Playlists": "Wiedergabelisten",
"Community": "",
"Current version: ": "Aktuelle Version: "
}
}

View File

@ -154,6 +154,7 @@
"": "`x` προβολές"
},
"Premieres in `x`": "Πρώτη προβολή σε `x`",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Γεια! Φαίνεται πως έχετε απενεργοποιήσει το JavaScript. Πατήστε εδώ για προβολή σχολίων, αλλά έχετε υπ'όψιν σας πως ίσως φορτώσουν πιο αργά. ",
"View YouTube comments": "Προβολή σχολίων από το YouTube",
"View more comments on Reddit": "Προβολή περισσότερων σχολίων στο Reddit",
@ -354,10 +355,12 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(τροποποιημένο)",
"YouTube comment permalink": "Σύνδεσμος YouTube σχολίου",
"permalink": "",
"`x` marked it with a ❤": "Ο χρηστης `x` έβαλε ❤",
"Audio mode": "Λειτουργία ήχου",
"Video mode": "Λειτουργία βίντεο",
"Videos": "Βίντεο",
"Playlists": "Λίστες Αναπαραγωγής",
"Community": "",
"Current version: ": "Τρέχουσα έκδοση: "
}

View File

@ -154,6 +154,7 @@
"": "`x` views"
},
"Premieres in `x`": "Premieres in `x`",
"Premieres `x`": "Premieres `x`",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.",
"View YouTube comments": "View YouTube comments",
"View more comments on Reddit": "View more comments on Reddit",
@ -354,10 +355,12 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(edited)",
"YouTube comment permalink": "YouTube comment permalink",
"permalink": "",
"`x` marked it with a ❤": "`x` marked it with a ❤",
"Audio mode": "Audio mode",
"Video mode": "Video mode",
"Videos": "Videos",
"Playlists": "Playlists",
"Community": "Community",
"Current version: ": "Current version: "
}

View File

@ -136,6 +136,7 @@
"Shared `x`": "Konigita `x`",
"`x` views": "`x` spektaĵoj",
"Premieres in `x`": "Premieras en `x`",
"Premieres `x`": "Premieras `x`",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Saluton! Ŝajnas, ke vi havas Ĝavoskripton malebligitan. Klaku ĉi tie por vidi komentojn, memoru, ke la ŝargado povus daŭri iom pli.",
"View YouTube comments": "Vidi komentojn de YouTube",
"View more comments on Reddit": "Vidi pli komentoj en Reddit",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "%A %-d de %B %Y",
"(edited)": "(redaktita)",
"YouTube comment permalink": "Fiksligilo de la komento en YouTube",
"permalink": "konstanta ligilo",
"`x` marked it with a ❤": "`x` markis ĝin per ❤",
"Audio mode": "Aŭda reĝimo",
"Video mode": "Videa reĝimo",
"Videos": "Videoj",
"Playlists": "Ludlistoj",
"Community": "Komunumo",
"Current version: ": "Nuna versio: "
}

View File

@ -136,6 +136,7 @@
"Shared `x`": "Compartido `x`",
"`x` views": "`x` visualizaciones",
"Premieres in `x`": "Se estrena en `x`",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "¡Hola! Parece que tiene JavaScript desactivado. Haga clic aquí para ver los comentarios, pero tenga en cuenta que pueden tardar un poco más en cargarse.",
"View YouTube comments": "Ver los comentarios de YouTube",
"View more comments on Reddit": "Ver más comentarios en Reddit",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(editado)",
"YouTube comment permalink": "Enlace permanente de YouTube del comentario",
"permalink": "",
"`x` marked it with a ❤": "`x` lo ha marcado con un ❤",
"Audio mode": "Modo de audio",
"Video mode": "Modo de vídeo",
"Videos": "Vídeos",
"Playlists": "Listas de reproducción",
"Community": "",
"Current version: ": "Versión actual: "
}

View File

@ -309,6 +309,7 @@
"%A %B %-d, %Y": "",
"(edited)": "",
"YouTube comment permalink": "",
"permalink": "",
"`x` marked it with a ❤": "",
"Audio mode": "",
"Video mode": "",

View File

@ -136,6 +136,7 @@
"Shared `x`": "Ajoutée le `x`",
"`x` views": "`x` vues",
"Premieres in `x`": "Première dans `x`",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Il semblerait que JavaScript soit désactivé. Cliquez ici pour voir les commentaires sans. Gardez à l'esprit que le chargement peut prendre plus de temps.",
"View YouTube comments": "Voir les commentaires YouTube",
"View more comments on Reddit": "Voir plus de commentaires sur Reddit",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "%A %-d %B %Y",
"(edited)": "(modifié)",
"YouTube comment permalink": "Lien YouTube permanent vers le commentaire",
"permalink": "",
"`x` marked it with a ❤": "`x` l'a marqué d'un ❤",
"Audio mode": "Mode Audio",
"Video mode": "Mode Vidéo",
"Videos": "Vidéos",
"Playlists": "Liste de lecture",
"Community": "",
"Current version: ": "Version actuelle : "
}

319
locales/is.json Normal file
View File

@ -0,0 +1,319 @@
{
"`x` subscribers.": "`x` áskrifandar.",
"`x` videos.": "`x` myndbönd.",
"LIVE": "BEINT",
"Shared `x` ago": "Deilt `x` síðan",
"Unsubscribe": "Afskrá",
"Subscribe": "Gerast áskrifandi",
"View channel on YouTube": "Skoða rás á YouTube",
"View playlist on YouTube": "Skoða spilunarlisti á YouTube",
"newest": "nýjasta",
"oldest": "elsta",
"popular": "vinsællt",
"last": "síðast",
"Next page": "Næsta síða",
"Previous page": "Fyrri síða",
"Clear watch history?": "Hreinsa áhorfssögu?",
"New password": "Nýtt lykilorð",
"New passwords must match": "Nýtt lykilorð verður að passa",
"Cannot change password for Google accounts": "Ekki er hægt að breyta lykilorði fyrir Google reikninga",
"Authorize token?": "Leyfa tákn?",
"Authorize token for `x`?": "Leyfa tákn fyrir `x`?",
"Yes": "Já",
"No": "Nei",
"Import and Export Data": "Innflutningur og Útflutningur Gagna",
"Import": "Flytja inn",
"Import Invidious data": "Flytja inn Invidious gögn",
"Import YouTube subscriptions": "Flytja inn YouTube áskriftir",
"Import FreeTube subscriptions (.db)": "Flytja inn FreeTube áskriftir (.db)",
"Import NewPipe subscriptions (.json)": "Flytja inn NewPipe áskriftir (.json)",
"Import NewPipe data (.zip)": "Flytja inn NewPipe gögn (.zip)",
"Export": "Flytja út",
"Export subscriptions as OPML": "Flytja út áskriftir sem OPML",
"Export subscriptions as OPML (for NewPipe & FreeTube)": "Flytja út áskriftir sem OPML (fyrir NewPipe & FreeTube)",
"Export data as JSON": "Flytja út gögn sem JSON",
"Delete account?": "Eyða reikningi?",
"History": "Saga",
"An alternative front-end to YouTube": "Önnur framhlið fyrir YouTube",
"JavaScript license information": "JavaScript leyfi upplýsingar",
"source": "uppspretta",
"Log in": "Skrá inn",
"Log in/register": "Innskráning/nýskráning",
"Log in with Google": "Skrá inn með Google",
"User ID": "Notandakenni",
"Password": "Lykilorð",
"Time (h:mm:ss):": "Tími (h:mm: ss):",
"Text CAPTCHA": "Texta CAPTCHA",
"Image CAPTCHA": "Mynd CAPTCHA",
"Sign In": "Skrá inn",
"Register": "Nýskrá",
"E-mail": "Tölvupóstur",
"Google verification code": "Google staðfestingarkóði",
"Preferences": "Kjörstillingar",
"Player preferences": "Kjörstillingar spilara",
"Always loop: ": "Alltaf lykkja: ",
"Autoplay: ": "Spila sjálfkrafa: ",
"Play next by default: ": "Spila næst sjálfgefið: ",
"Autoplay next video: ": "Spila næst sjálfkrafa: ",
"Listen by default: ": "Hlusta sjálfgefið: ",
"Proxy videos? ": "",
"Default speed: ": "Sjálfgefinn hraði: ",
"Preferred video quality: ": "Æskilegt myndbands gæði: ",
"Player volume: ": "Spilara bindi: ",
"Default comments: ": "Sjálfgefin ummæli: ",
"youtube": "youtube",
"reddit": "reddit",
"Default captions: ": "Sjálfgefin texti: ",
"Fallback captions: ": "Varatextar: ",
"Show related videos? ": "Sýna tengd myndbönd? ",
"Show annotations by default? ": "",
"Visual preferences": "Sjónrænar stillingar",
"Dark mode: ": "Myrkur ham: ",
"Thin mode: ": "Þunnt ham: ",
"Subscription preferences": "Áskriftarstillingar",
"Show annotations by default for subscribed channels? ": "",
"Redirect homepage to feed: ": "",
"Number of videos shown in feed: ": "",
"Sort videos by: ": "Raða myndbönd eftir: ",
"published": "birt",
"published - reverse": "birt - afturábak",
"alphabetically": "í stafrófsröð",
"alphabetically - reverse": "stafrófsröð - afturábak",
"channel name": "heiti rásar",
"channel name - reverse": "heiti rásar - afturábak",
"Only show latest video from channel: ": "Sýna aðeins nýjasta myndband frá rás: ",
"Only show latest unwatched video from channel: ": "Sýna aðeins nýjasta óséð myndband frá rás: ",
"Only show unwatched: ": "Sýna aðeins óséð: ",
"Only show notifications (if there are any): ": "Sýna aðeins tilkynningar (ef einhverjar eru): ",
"Enable web notifications": "Virkja veftilkynningar",
"`x` uploaded a video": "' x ' hlóð upp myndband",
"`x` is live": "' x ' er í beinni",
"Data preferences": "Gagnastillingar",
"Clear watch history": "Hreinsa áhorfssögu",
"Import/export data": "Flytja inn/út gögn",
"Change password": "Breyta lykilorði",
"Manage subscriptions": "Stjórna áskriftum",
"Manage tokens": "",
"Watch history": "",
"Delete account": "",
"Administrator preferences": "",
"Default homepage: ": "",
"Feed menu: ": "",
"Top enabled? ": "",
"CAPTCHA enabled? ": "",
"Login enabled? ": "",
"Registration enabled? ": "",
"Report statistics? ": "",
"Save preferences": "",
"Subscription manager": "",
"Token manager": "",
"Token": "",
"`x` subscriptions.": "",
"`x` tokens.": "",
"Import/export": "",
"unsubscribe": "",
"revoke": "",
"Subscriptions": "",
"`x` unseen notifications.": "",
"search": "",
"Log out": "",
"Released under the AGPLv3 by Omar Roth.": "",
"Source available here.": "",
"View JavaScript license information.": "",
"View privacy policy.": "",
"Trending": "",
"Unlisted": "",
"Watch on YouTube": "",
"Hide annotations": "",
"Show annotations": "",
"Genre: ": "",
"License: ": "",
"Family friendly? ": "",
"Wilson score: ": "",
"Engagement: ": "",
"Whitelisted regions: ": "",
"Blacklisted regions: ": "",
"Shared `x`": "",
"`x` views.": "",
"Premieres in `x`": "",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "",
"View YouTube comments": "",
"View more comments on Reddit": "",
"View `x` comments": "",
"View Reddit comments": "",
"Hide replies": "",
"Show replies": "",
"Incorrect password": "",
"Quota exceeded, try again in a few hours": "",
"Unable to log in, make sure two-factor authentication (Authenticator or SMS) is turned on.": "",
"Invalid TFA code": "",
"Login failed. This may be because two-factor authentication is not turned on for your account.": "",
"Wrong answer": "",
"Erroneous CAPTCHA": "",
"CAPTCHA is a required field": "",
"User ID is a required field": "",
"Password is a required field": "",
"Wrong username or password": "",
"Please sign in using 'Log in with Google'": "",
"Password cannot be empty": "",
"Password cannot be longer than 55 characters": "",
"Please log in": "",
"Invidious Private Feed for `x`": "",
"channel:`x`": "",
"Deleted or invalid channel": "",
"This channel does not exist.": "",
"Could not get channel info.": "",
"Could not fetch comments": "",
"View `x` replies.": "",
"`x` ago": "",
"Load more": "",
"`x` points.": "",
"Could not create mix.": "",
"Empty playlist": "",
"Not a playlist.": "",
"Playlist does not exist.": "",
"Could not pull trending pages.": "",
"Hidden field \"challenge\" is a required field": "",
"Hidden field \"token\" is a required field": "",
"Erroneous challenge": "",
"Erroneous token": "",
"No such user": "",
"Token is expired, please try again": "",
"English": "",
"English (auto-generated)": "",
"Afrikaans": "",
"Albanian": "",
"Amharic": "",
"Arabic": "",
"Armenian": "",
"Azerbaijani": "",
"Bangla": "",
"Basque": "",
"Belarusian": "",
"Bosnian": "",
"Bulgarian": "",
"Burmese": "",
"Catalan": "",
"Cebuano": "",
"Chinese (Simplified)": "",
"Chinese (Traditional)": "",
"Corsican": "",
"Croatian": "",
"Czech": "",
"Danish": "",
"Dutch": "",
"Esperanto": "",
"Estonian": "",
"Filipino": "",
"Finnish": "",
"French": "",
"Galician": "",
"Georgian": "",
"German": "",
"Greek": "",
"Gujarati": "",
"Haitian Creole": "",
"Hausa": "",
"Hawaiian": "",
"Hebrew": "",
"Hindi": "",
"Hmong": "",
"Hungarian": "",
"Icelandic": "",
"Igbo": "",
"Indonesian": "",
"Irish": "",
"Italian": "",
"Japanese": "",
"Javanese": "",
"Kannada": "",
"Kazakh": "",
"Khmer": "",
"Korean": "",
"Kurdish": "",
"Kyrgyz": "",
"Lao": "",
"Latin": "",
"Latvian": "",
"Lithuanian": "",
"Luxembourgish": "",
"Macedonian": "",
"Malagasy": "",
"Malay": "",
"Malayalam": "",
"Maltese": "",
"Maori": "",
"Marathi": "",
"Mongolian": "",
"Nepali": "",
"Norwegian Bokmål": "",
"Nyanja": "",
"Pashto": "",
"Persian": "",
"Polish": "",
"Portuguese": "",
"Punjabi": "",
"Romanian": "",
"Russian": "",
"Samoan": "",
"Scottish Gaelic": "",
"Serbian": "",
"Shona": "",
"Sindhi": "",
"Sinhala": "",
"Slovak": "",
"Slovenian": "",
"Somali": "",
"Southern Sotho": "",
"Spanish": "",
"Spanish (Latin America)": "",
"Sundanese": "",
"Swahili": "",
"Swedish": "",
"Tajik": "",
"Tamil": "",
"Telugu": "",
"Thai": "",
"Turkish": "",
"Ukrainian": "",
"Urdu": "",
"Uzbek": "",
"Vietnamese": "",
"Welsh": "",
"Western Frisian": "",
"Xhosa": "",
"Yiddish": "",
"Yoruba": "",
"Zulu": "",
"`x` years.": "",
"`x` months.": "",
"`x` weeks.": "",
"`x` days.": "",
"`x` hours.": "",
"`x` minutes.": "",
"`x` seconds.": "",
"Fallback comments: ": "",
"Popular": "",
"Top": "",
"About": "",
"Rating: ": "",
"Language: ": "",
"View as playlist": "",
"Default": "",
"Music": "",
"Gaming": "",
"News": "",
"Movies": "",
"Download": "",
"Download as: ": "",
"%A %B %-d, %Y": "",
"(edited)": "",
"YouTube comment permalink": "",
"`x` marked it with a ❤": "",
"Audio mode": "",
"Video mode": "",
"Videos": "",
"Playlists": "",
"Current version: ": ""
}

View File

@ -309,10 +309,12 @@
"%A %B %-d, %Y": "%A %-d %B %Y",
"(edited)": "(modificato)",
"YouTube comment permalink": "Link permanente al commento di YouTube",
"permalink": "",
"`x` marked it with a ❤": "`x` l'ha contrassegnato con un ❤",
"Audio mode": "Modalità audio",
"Video mode": "Modalità video",
"Videos": "",
"Playlists": "",
"Community": "",
"Current version: ": ""
}

View File

@ -136,6 +136,7 @@
"Shared `x`": "Delt `x`",
"`x` views": "`x` visninger",
"Premieres in `x`": "Premiere om `x`",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Hei. Det ser ut til at du har JavaScript avslått. Klikk her for å vise kommentarer, ha i minnet at innlasting tar lengre tid.",
"View YouTube comments": "Vis YouTube-kommentarer",
"View more comments on Reddit": "Vis flere kommenterer på Reddit",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "",
"(edited)": "(redigert)",
"YouTube comment permalink": "Permanent YouTube-lenke til innholdet",
"permalink": "permanent lenke",
"`x` marked it with a ❤": "`x` levnet et ❤",
"Audio mode": "Lydmodus",
"Video mode": "Video-modus",
"Videos": "Videoer",
"Playlists": "Spillelister",
"Community": "",
"Current version: ": "Nåværende versjon: "
}

View File

@ -85,9 +85,9 @@
"Only show latest unwatched video from channel: ": "Alleen nieuwste niet-bekeken video van kanaal tonen: ",
"Only show unwatched: ": "Alleen niet-bekeken videos tonen: ",
"Only show notifications (if there are any): ": "Alleen meldingen tonen (als die er zijn): ",
"Enable web notifications": "",
"`x` uploaded a video": "",
"`x` is live": "",
"Enable web notifications": "Systemmeldingen inschakelen",
"`x` uploaded a video": "`x` heeft een video geüpload",
"`x` is live": "`x` zendt nu live uit",
"Data preferences": "Gegevensinstellingen",
"Clear watch history": "Kijkgeschiedenis wissen",
"Import/export data": "Gegevens im-/exporteren",
@ -117,13 +117,13 @@
"`x` unseen notifications": "`x` ongelezen meldingen",
"search": "zoeken",
"Log out": "Uitloggen",
"Released under the AGPLv3 by Omar Roth.": "Uitgegeven onder de AGPLv3-licentie door Omar Roth.",
"Released under the AGPLv3 by Omar Roth.": "Uitgebracht onder de AGPLv3-licentie, door Omar Roth.",
"Source available here.": "De broncode is hier beschikbaar.",
"View JavaScript license information.": "JavaScript-licentieinformatie tonen.",
"View privacy policy.": "Privacybeleid tonen",
"Trending": "Uitgelicht",
"Unlisted": "Verborgen",
"Watch on YouTube": "Bekijk video op YouTube",
"Watch on YouTube": "Video bekijken op YouTube",
"Hide annotations": "Annotaties verbergen",
"Show annotations": "Annotaties tonen",
"Genre: ": "Genre: ",
@ -136,6 +136,7 @@
"Shared `x`": "`x` gedeeld",
"`x` views": "`x` weergaven",
"Premieres in `x`": "Verschijnt over `x`",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Hoi! Het lijkt erop dat je JavaScript hebt uitgeschakeld. Klik hier om de reacties te bekijken. Let op: het laden duurt wat langer.",
"View YouTube comments": "YouTube-reacties tonen",
"View more comments on Reddit": "Meer reacties bekijken op Reddit",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(bewerkt)",
"YouTube comment permalink": "Link naar YouTube-reactie",
"permalink": "",
"`x` marked it with a ❤": "`x` heeft dit gemarkeerd met ❤",
"Audio mode": "Audiomodus",
"Video mode": "Videomodus",
"Videos": "Video's",
"Playlists": "Afspeellijsten",
"Community": "",
"Current version: ": "Huidige versie: "
}

View File

@ -136,6 +136,7 @@
"Shared `x`": "Udostępniono `x`",
"`x` views": "`x` wyświetleń",
"Premieres in `x`": "Publikacja za `x`",
"Premieres `x`": "",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Cześć! Wygląda na to, że masz wyłączoną obsługę JavaScriptu. Kliknij tutaj, żeby zobaczyć komentarze. Pamiętaj, że wczytywanie może potrwać dłużej.",
"View YouTube comments": "Wyświetl komentarze z YouTube",
"View more comments on Reddit": "Wyświetl więcej komentarzy na Reddicie",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "",
"(edited)": "(edytowany)",
"YouTube comment permalink": "Odnośnik bezpośredni do komentarza na YouTube",
"permalink": "",
"`x` marked it with a ❤": "`x` oznaczonych ❤",
"Audio mode": "Tryb audio",
"Video mode": "Tryb wideo",
"Videos": "Filmy",
"Playlists": "Playlisty",
"Community": "",
"Current version: ": "Aktualna wersja: "
}

View File

@ -6,7 +6,7 @@
"Unsubscribe": "Отписаться",
"Subscribe": "Подписаться",
"View channel on YouTube": "Смотреть канал на YouTube",
"View playlist on YouTube": "",
"View playlist on YouTube": "Посмотреть плейлист на YouTube",
"newest": "самые свежие",
"oldest": "самые старые",
"popular": "популярные",
@ -85,9 +85,9 @@
"Only show latest unwatched video from channel: ": "Показывать только непросмотренные видео с каналов: ",
"Only show unwatched: ": "Показывать только непросмотренные видео: ",
"Only show notifications (if there are any): ": "Показывать только оповещения, если они есть: ",
"Enable web notifications": "",
"`x` uploaded a video": "",
"`x` is live": "",
"Enable web notifications": "Включить уведомления в браузере",
"`x` uploaded a video": "`x` разместил видео",
"`x` is live": "`x` в прямом эфире",
"Data preferences": "Настройки данных",
"Clear watch history": "Очистить историю просмотров",
"Import/export data": "Импорт/Экспорт данных",
@ -136,6 +136,7 @@
"Shared `x`": "Опубликовано `x`",
"`x` views": "`x` просмотров",
"Premieres in `x`": "Премьера через `x`",
"Premieres `x`": "Премьера `x`",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Похоже, у вас отключён JavaScript. Чтобы увидить комментарии, нажмите сюда, но учтите: они могут загружаться немного медленнее.",
"View YouTube comments": "Смотреть комментарии с YouTube",
"View more comments on Reddit": "Посмотреть больше комментариев на Reddit",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "%-d %B %Y, %A",
"(edited)": "(изменено)",
"YouTube comment permalink": "Прямая ссылка на YouTube",
"permalink": "",
"`x` marked it with a ❤": "❤ от автора канала \"`x`\"",
"Audio mode": "Аудио режим",
"Video mode": "Видео режим",
"Videos": "Видео",
"Playlists": "Плейлисты",
"Community": "",
"Current version: ": "Текущая версия: "
}

View File

@ -6,7 +6,7 @@
"Unsubscribe": "Відписатися",
"Subscribe": "Підписатися",
"View channel on YouTube": "Подивитися канал на YouTube",
"View playlist on YouTube": "",
"View playlist on YouTube": "Подивитися плейлист на YouTube",
"newest": "найновіше",
"oldest": "найстаріше",
"popular": "популярне",
@ -85,9 +85,9 @@
"Only show latest unwatched video from channel: ": "Показувати тільки непереглянуті відео з каналів: ",
"Only show unwatched: ": "Показувати тільки непереглянуті відео: ",
"Only show notifications (if there are any): ": "Показувати лише сповіщення, якщо вони є: ",
"Enable web notifications": "",
"`x` uploaded a video": "",
"`x` is live": "",
"Enable web notifications": "Ввімкнути сповіщення в браузері",
"`x` uploaded a video": "`x` розмістив відео",
"`x` is live": "`x` у прямому ефірі",
"Data preferences": "Налаштування даних",
"Clear watch history": "Очистити історію переглядів",
"Import/export data": "Імпорт і експорт даних",
@ -136,6 +136,7 @@
"Shared `x`": "Розміщено `x`",
"`x` views": "`x` переглядів",
"Premieres in `x`": "Прем’єра через `x`",
"Premieres `x`": "Прем’єра `x`",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "Схоже, у вас відключений JavaScript. Щоб побачити коментарі, натисніть сюда, але майте на увазі, що вони можуть завантажуватися трохи довше.",
"View YouTube comments": "Переглянути коментарі з YouTube",
"View more comments on Reddit": "Переглянути більше коментарів на Reddit",
@ -309,10 +310,12 @@
"%A %B %-d, %Y": "%-d %B %Y, %A",
"(edited)": "(змінено)",
"YouTube comment permalink": "Пряме посилання на коментар в YouTube",
"permalink": "",
"`x` marked it with a ❤": "❤ цьому від каналу `x`",
"Audio mode": "Аудіорежим",
"Video mode": "Відеорежим",
"Videos": "Відео",
"Playlists": "Плейлисти",
"Community": "",
"Current version: ": "Поточна версія: "
}

321
locales/zh-CN.json Normal file
View File

@ -0,0 +1,321 @@
{
"`x` subscribers": "`x` 订阅者",
"`x` videos": "`x` 视频",
"LIVE": "直播",
"Shared `x` ago": "`x` 前分享",
"Unsubscribe": "取消订阅",
"Subscribe": "订阅",
"View channel on YouTube": "在 YouTube 查看频道",
"View playlist on YouTube": "在 YouTube 查看播放列表",
"newest": "最新",
"oldest": "最老",
"popular": "时下流行",
"last": "last",
"Next page": "下一页",
"Previous page": "上一页",
"Clear watch history?": "清除观看历史?",
"New password": "新密码",
"New passwords must match": "新密码必须匹配",
"Cannot change password for Google accounts": "无法为 Google 账户更改密码",
"Authorize token?": "授权令牌?",
"Authorize token for `x`?": "`x` 的授权令牌?",
"Yes": "是",
"No": "否",
"Import and Export Data": "导入与导出数据",
"Import": "导入",
"Import Invidious data": "导入 Invidious 数据",
"Import YouTube subscriptions": "导入 YouTube 订阅",
"Import FreeTube subscriptions (.db)": "导入 FreeTube 订阅 (.db)",
"Import NewPipe subscriptions (.json)": "导入 NewPipe 订阅 (.json)",
"Import NewPipe data (.zip)": "导入 NewPipe 数据 (.zip)",
"Export": "导出",
"Export subscriptions as OPML": "导出订阅到 OPML 格式",
"Export subscriptions as OPML (for NewPipe & FreeTube)": "导出订阅到 OPML 格式(用于 NewPipe 及 FreeTube",
"Export data as JSON": "导出数据为 JSON 格式",
"Delete account?": "删除账户?",
"History": "历史",
"An alternative front-end to YouTube": "另一个 YouTube 前端",
"JavaScript license information": "JavaScript 授权信息",
"source": "source",
"Log in": "登录",
"Log in/register": "登录/注册",
"Log in with Google": "使用 Google 账户登录",
"User ID": "用户 ID",
"Password": "密码",
"Time (h:mm:ss):": "时间 (h:mm:ss):",
"Text CAPTCHA": "文本验证码",
"Image CAPTCHA": "图片验证码",
"Sign In": "登录",
"Register": "注册",
"E-mail": "E-mail",
"Google verification code": "Google 验证代码",
"Preferences": "偏好设置",
"Player preferences": "播放器偏好设置",
"Always loop: ": "循环:",
"Autoplay: ": "自动播放:",
"Play next by default: ": "默认自动播放下一个视频:",
"Autoplay next video: ": "自动播放下一个视频:",
"Listen by default: ": "默认只聆听声音:",
"Proxy videos? ": "代理视频?",
"Default speed: ": "默认速度:",
"Preferred video quality: ": "视频质量偏好:",
"Player volume: ": "播放器音量:",
"Default comments: ": "默认评论源:",
"youtube": "YouTube",
"reddit": "Reddit",
"Default captions: ": "默认字幕语言:",
"Fallback captions: ": "后备字幕语言:",
"Show related videos? ": "显示相关视频?",
"Show annotations by default? ": "默认显示视频注释?",
"Visual preferences": "视觉选项",
"Dark mode: ": "暗色模式:",
"Thin mode: ": "窄页模式:",
"Subscription preferences": "订阅设置",
"Show annotations by default for subscribed channels? ": "在订阅频道的视频默认显示注释?",
"Redirect homepage to feed: ": "跳转主页到 feed: ",
"Number of videos shown in feed: ": "Feed 中显示的视频数量:",
"Sort videos by: ": "视频排序方式:",
"published": "发布时间",
"published - reverse": "发布时间(反向)",
"alphabetically": "字母序",
"alphabetically - reverse": "字母序(反向)",
"channel name": "频道名称",
"channel name - reverse": "频道名称(反向)",
"Only show latest video from channel: ": "只显示订阅频道的最新一条视频:",
"Only show latest unwatched video from channel: ": "只显示订阅频道的最新未看过视频:",
"Only show unwatched: ": "只显示未看过的视频:",
"Only show notifications (if there are any): ": "只显示通知(如有):",
"Enable web notifications": "启用浏览器通知",
"`x` uploaded a video": "`x` 上传了视频",
"`x` is live": "`x` 正在直播",
"Data preferences": "数据选项",
"Clear watch history": "清除观看历史",
"Import/export data": "导入/导出数据",
"Change password": "更改密码",
"Manage subscriptions": "管理订阅",
"Manage tokens": "管理令牌",
"Watch history": "观看历史",
"Delete account": "删除账户",
"Administrator preferences": "管理员选项",
"Default homepage: ": "默认主页:",
"Feed menu: ": "Feed 菜单:",
"Top enabled? ": "启用“热门视频”页?",
"CAPTCHA enabled? ": "启用验证码?",
"Login enabled? ": "启用登录?",
"Registration enabled? ": "启用注册?",
"Report statistics? ": "报告统计信息?",
"Save preferences": "保存选项",
"Subscription manager": "订阅管理器",
"Token manager": "令牌管理器",
"Token": "令牌",
"`x` subscriptions": "`x` 个订阅",
"`x` tokens": "`x` 个令牌",
"Import/export": "导入/导出",
"unsubscribe": "取消订阅",
"revoke": "吊销",
"Subscriptions": "订阅",
"`x` unseen notifications": "`x` 条未读通知",
"search": "搜索",
"Log out": "登出",
"Released under the AGPLv3 by Omar Roth.": "由 Omar Roth 开发,以 AGPLv3 授权。",
"Source available here.": "源码可在此查看。",
"View JavaScript license information.": "查看 JavaScript 协议信息。",
"View privacy policy.": "查看隐私政策。",
"Trending": "时下流行",
"Unlisted": "不公开",
"Watch on YouTube": "在 YouTube 观看",
"Hide annotations": "隐藏注释",
"Show annotations": "显示注释",
"Genre: ": "风格:",
"License: ": "协议:",
"Family friendly? ": "家庭友好?",
"Wilson score: ": "威尔逊得分:",
"Engagement: ": "参与度:",
"Whitelisted regions: ": "白名单区域:",
"Blacklisted regions: ": "黑名单区域:",
"Shared `x`": "`x`发布",
"`x` views": "`x` 播放",
"Premieres in `x`": "首映于 `x` 后",
"Premieres `x`": "首映于 `x`",
"Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.": "你好!看起来你关闭了 JavaScript。点击这里阅读评论。注意它们加载的时间可能会稍长。",
"View YouTube comments": "查看 YouTube 评论",
"View more comments on Reddit": "在 Reddit 查看更多评论",
"View `x` comments": "查看 `x` 条评论",
"View Reddit comments": "查看 Reddit 评论",
"Hide replies": "隐藏回复",
"Show replies": "显示回复",
"Incorrect password": "密码错误",
"Quota exceeded, try again in a few hours": "已超出限额,请于几小时后重试",
"Unable to log in, make sure two-factor authentication (Authenticator or SMS) is turned on.": "无法登录。请确认你的短信或验证器的二步验证已打开。",
"Invalid TFA code": "无效的二步验证码",
"Login failed. This may be because two-factor authentication is not turned on for your account.": "登录失败。可能是因为二步验证未打开。",
"Wrong answer": "错误的回复",
"Erroneous CAPTCHA": "验证码错误",
"CAPTCHA is a required field": "验证码必填",
"User ID is a required field": "用户名必填",
"Password is a required field": "密码必填",
"Wrong username or password": "用户名或密码错误",
"Please sign in using 'Log in with Google'": "请通过谷歌账户登录",
"Password cannot be empty": "密码不能为空",
"Password cannot be longer than 55 characters": "密码长度不能大于 55",
"Please log in": "请登录",
"Invidious Private Feed for `x`": "`x` 的 Invidious 私人 feed",
"channel:`x`": "频道:`x`",
"Deleted or invalid channel": "已删除或无效频道",
"This channel does not exist.": "频道不存在。",
"Could not get channel info.": "无法获取频道信息。",
"Could not fetch comments": "无法获取评论",
"View `x` replies": "查看 `x` 条回复",
"`x` ago": "`x` 前",
"Load more": "加载更多",
"`x` points": "`x` 分",
"Could not create mix.": "无法创建合集。",
"Empty playlist": "空播放列表",
"Not a playlist.": "非播放列表。",
"Playlist does not exist.": "播放列表不存在。",
"Could not pull trending pages.": "无法获取“时下流行”页面。",
"Hidden field \"challenge\" is a required field": "隐藏表单项 \"challenge\" 为必填",
"Hidden field \"token\" is a required field": "隐藏表单项 \"token\" 为必填",
"Erroneous challenge": "错误的验证回复(challenge)",
"Erroneous token": "错误的令牌",
"No such user": "用户不存在",
"Token is expired, please try again": "令牌过期,请重试",
"English": "英语",
"English (auto-generated)": "英语(自动生成)",
"Afrikaans": "南非荷兰语",
"Albanian": "阿尔巴尼亚语",
"Amharic": "阿姆哈拉语",
"Arabic": "阿拉伯语",
"Armenian": "亚美尼亚语",
"Azerbaijani": "阿塞拜疆语",
"Bangla": "孟加拉语",
"Basque": "巴斯克语",
"Belarusian": "白俄罗斯语",
"Bosnian": "波黑语",
"Bulgarian": "保加利亚语",
"Burmese": "缅甸语",
"Catalan": "加泰罗尼亚语",
"Cebuano": "宿雾语",
"Chinese (Simplified)": "中文(简体)",
"Chinese (Traditional)": "中文(繁体)",
"Corsican": "科西嘉语",
"Croatian": "克罗地亚语",
"Czech": "捷克语",
"Danish": "丹麦语",
"Dutch": "荷兰语",
"Esperanto": "世界语",
"Estonian": "爱沙尼亚语",
"Filipino": "菲律宾语",
"Finnish": "芬兰语",
"French": "法语",
"Galician": "加利西亚语",
"Georgian": "格鲁吉亚语",
"German": "德语",
"Greek": "希腊语",
"Gujarati": "古吉拉特语",
"Haitian Creole": "海地克里奥尔语",
"Hausa": "豪萨语",
"Hawaiian": "夏威夷语",
"Hebrew": "希伯来语",
"Hindi": "印地语",
"Hmong": "苗语",
"Hungarian": "匈牙利语",
"Icelandic": "冰岛语",
"Igbo": "伊博语",
"Indonesian": "印度尼西亚语",
"Irish": "爱尔兰语",
"Italian": "意大利语",
"Japanese": "日语",
"Javanese": "爪哇语",
"Kannada": "卡纳达语",
"Kazakh": "哈萨克语",
"Khmer": "高棉语",
"Korean": "韩语",
"Kurdish": "库尔德语",
"Kyrgyz": "柯尔克孜语",
"Lao": "老挝语",
"Latin": "拉丁语",
"Latvian": "拉脱维亚语",
"Lithuanian": "立陶宛语",
"Luxembourgish": "卢森堡语",
"Macedonian": "马其顿语",
"Malagasy": "马尔加什语",
"Malay": "马来语",
"Malayalam": "马拉雅拉姆语",
"Maltese": "马耳他语",
"Maori": "毛利语",
"Marathi": "马拉语",
"Mongolian": "蒙古语",
"Nepali": "尼泊尔语",
"Norwegian Bokmål": "书面挪威语",
"Nyanja": "尼昂加语",
"Pashto": "普什图语",
"Persian": "波斯语",
"Polish": "抛光",
"Portuguese": "葡萄牙语",
"Punjabi": "旁遮普语",
"Romanian": "罗马尼亚语",
"Russian": "俄语",
"Samoan": "萨摩亚语",
"Scottish Gaelic": "苏格兰盖尔语",
"Serbian": "塞尔维亚语",
"Shona": "绍纳语",
"Sindhi": "信德语",
"Sinhala": "僧伽罗语",
"Slovak": "斯洛伐克语",
"Slovenian": "斯洛文尼亚语",
"Somali": "索马里语",
"Southern Sotho": "南索托语",
"Spanish": "西班牙语",
"Spanish (Latin America)": "西班牙语(拉丁美洲)",
"Sundanese": "巽丹语",
"Swahili": "斯瓦希里语",
"Swedish": "瑞典语",
"Tajik": "塔吉克语",
"Tamil": "泰米尔语",
"Telugu": "泰卢固语",
"Thai": "泰语",
"Turkish": "土耳其语",
"Ukrainian": "乌克兰语",
"Urdu": "乌尔都语",
"Uzbek": "乌兹别克",
"Vietnamese": "越南语",
"Welsh": "威尔士语",
"Western Frisian": "西弗里西亚语",
"Xhosa": "科萨语",
"Yiddish": "意第绪语",
"Yoruba": "约鲁巴语",
"Zulu": "祖鲁语",
"`x` years": "`x` 年",
"`x` months": "`x` 月",
"`x` weeks": "`x` 周",
"`x` days": "`x` 天",
"`x` hours": "`x` 小时",
"`x` minutes": "`x` 分钟",
"`x` seconds": "`x` 秒",
"Fallback comments: ": "后备评论:",
"Popular": "热门频道",
"Top": "热门视频",
"About": "关于",
"Rating: ": "评分:",
"Language: ": "语言:",
"View as playlist": "作为播放列表查看",
"Default": "默认",
"Music": "音乐",
"Gaming": "游戏",
"News": "新闻",
"Movies": "电影",
"Download": "下载",
"Download as: ": "下载为:",
"%A %B %-d, %Y": "%Y年%-m月%-d日 %a",
"(edited)": "(已编辑)",
"YouTube comment permalink": "YouTube 评论永久链接",
"permalink": "",
"`x` marked it with a ❤": "`x` 为此加 ❤",
"Audio mode": "音频模式",
"Video mode": "视频模式",
"Videos": "视频",
"Playlists": "播放列表",
"Community": "",
"Current version: ": "当前版本:"
}

View File

@ -1,5 +1,5 @@
name: invidious
version: 0.18.0
version: 0.19.0
authors:
- Omar Roth <omarroth@protonmail.com>
@ -9,13 +9,13 @@ targets:
main: src/invidious.cr
dependencies:
kemal:
github: kemalcr/kemal
pg:
github: will/crystal-pg
sqlite3:
github: crystal-lang/crystal-sqlite3
kemal:
github: kemalcr/kemal
crystal: 0.28.0
crystal: 0.29.0
license: AGPLv3

File diff suppressed because it is too large Load Diff

View File

@ -41,6 +41,48 @@ struct ChannelVideo
end
end
def to_xml(locale, host_url, xml : XML::Builder)
xml.element("entry") do
xml.element("id") { xml.text "yt:video:#{self.id}" }
xml.element("yt:videoId") { xml.text self.id }
xml.element("yt:channelId") { xml.text self.ucid }
xml.element("title") { xml.text self.title }
xml.element("link", rel: "alternate", href: "#{host_url}/watch?v=#{self.id}")
xml.element("author") do
xml.element("name") { xml.text self.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{self.ucid}" }
end
xml.element("content", type: "xhtml") do
xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
xml.element("a", href: "#{host_url}/watch?v=#{self.id}") do
xml.element("img", src: "#{host_url}/vi/#{self.id}/mqdefault.jpg")
end
end
end
xml.element("published") { xml.text self.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
xml.element("updated") { xml.text self.updated.to_s("%Y-%m-%dT%H:%M:%S%:z") }
xml.element("media:group") do
xml.element("media:title") { xml.text self.title }
xml.element("media:thumbnail", url: "#{host_url}/vi/#{self.id}/mqdefault.jpg",
width: "320", height: "180")
end
end
end
def to_xml(locale, config, kemal_config, xml : XML::Builder | Nil = nil)
if xml
to_xml(locale, config, kemal_config, xml)
else
XML.build do |xml|
to_xml(locale, config, kemal_config, xml)
end
end
end
db_mapping({
id: String,
title: String,
@ -55,6 +97,36 @@ struct ChannelVideo
})
end
struct AboutRelatedChannel
db_mapping({
ucid: String,
author: String,
author_url: String,
author_thumbnail: String,
})
end
# TODO: Refactor into either SearchChannel or InvidiousChannel
struct AboutChannel
db_mapping({
ucid: String,
author: String,
auto_generated: Bool,
author_url: String,
author_thumbnail: String,
banner: String?,
description_html: String,
paid: Bool,
total_views: Int64,
sub_count: Int64,
joined: Time,
is_family_friendly: Bool,
allowed_regions: Array(String),
related_channels: Array(AboutRelatedChannel),
tabs: Array(String),
})
end
def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, max_threads = 10)
finished_channel = Channel(String | Nil).new
@ -93,10 +165,8 @@ def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, ma
end
def get_channel(id, db, refresh = true, pull_all_videos = true)
if db.query_one?("SELECT EXISTS (SELECT true FROM channels WHERE id = $1)", id, as: Bool)
channel = db.query_one("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
if refresh && Time.now - channel.updated > 10.minutes
if channel = db.query_one?("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
if refresh && Time.utc - channel.updated > 10.minutes
channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
channel_array = channel.to_a
args = arg_array(channel_array)
@ -177,7 +247,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
id: video_id,
title: title,
published: published,
updated: Time.now,
updated: Time.utc,
ucid: ucid,
author: author,
length_seconds: length_seconds,
@ -240,7 +310,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
id: video.id,
title: video.title,
published: video.published,
updated: Time.now,
updated: Time.utc,
ucid: video.ucid,
author: video.author,
length_seconds: video.length_seconds,
@ -254,7 +324,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
# We are notified of Red videos elsewhere (PubSub), which includes a correct published date,
# so since they don't provide a published date here we can safely ignore them.
if Time.now - video.published > 1.minute
if Time.utc - video.published > 1.minute
emails = db.query_all("UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications) RETURNING email",
video.id, video.published, video.ucid, as: String)
@ -290,31 +360,11 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
db.exec("DELETE FROM channel_videos * WHERE NOT id = ANY ('{#{ids.map { |id| %("#{id}") }.join(",")}}') AND ucid = $1", ucid)
end
channel = InvidiousChannel.new(ucid, author, Time.now, false, nil)
channel = InvidiousChannel.new(ucid, author, Time.utc, false, nil)
return channel
end
def subscribe_pubsub(ucid, key, config)
client = make_client(PUBSUB_URL)
time = Time.now.to_unix.to_s
nonce = Random::Secure.hex(4)
signature = "#{time}:#{nonce}"
host_url = make_host_url(config, Kemal.config)
body = {
"hub.callback" => "#{host_url}/feed/webhook/v1:#{time}:#{nonce}:#{OpenSSL::HMAC.hexdigest(:sha1, key, signature)}",
"hub.topic" => "https://www.youtube.com/xml/feeds/videos.xml?channel_id=#{ucid}",
"hub.verify" => "async",
"hub.mode" => "subscribe",
"hub.lease_seconds" => "432000",
"hub.secret" => key.to_s,
}
return client.post("/subscribe", form: body)
end
def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
client = make_client(YT_URL)
@ -378,7 +428,7 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "
if auto_generated
seed = Time.unix(1525757349)
until seed >= Time.now
until seed >= Time.utc
seed += 1.month
end
timestamp = seed - (page - 1).months
@ -567,6 +617,307 @@ def extract_channel_playlists_cursor(url, auto_generated)
return cursor
end
# TODO: Add "sort_by"
def fetch_channel_community(ucid, continuation, locale, config, kemal_config, format, thin_mode)
client = make_client(YT_URL)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
response = client.get("/channel/#{ucid}/community?gl=US&hl=en", headers)
if response.status_code == 404
response = client.get("/user/#{ucid}/community?gl=US&hl=en", headers)
end
if response.status_code == 404
error_message = translate(locale, "This channel does not exist.")
raise error_message
end
ucid = response.body.match(/https:\/\/www.youtube.com\/channel\/(?<ucid>UC[a-zA-Z0-9_-]{22})/).not_nil!["ucid"]
if !continuation || continuation.empty?
initial_data = extract_initial_data(response.body)
body = initial_data["contents"]?.try &.["twoColumnBrowseResultsRenderer"]["tabs"].as_a.select { |tab| tab["tabRenderer"]?.try &.["selected"].as_bool.== true }[0]?
if !body
raise "Could not extract community tab."
end
body = body["tabRenderer"]["content"]["sectionListRenderer"]["contents"][0]["itemSectionRenderer"]
else
continuation = produce_channel_community_continuation(ucid, continuation)
headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = ""
headers["x-spf-referer"] = ""
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[A-Za-z0-9\_\-\=]+)"/).try &.["session_token"]? || ""
post_req = {
session_token: session_token,
}
response = client.post("/comment_service_ajax?action_get_comments=1&ctoken=#{continuation}&continuation=#{continuation}&hl=en&gl=US", headers, form: post_req)
body = JSON.parse(response.body)
body = body["response"]["continuationContents"]["itemSectionContinuation"]? ||
body["response"]["continuationContents"]["backstageCommentsContinuation"]?
if !body
raise "Could not extract continuation."
end
end
continuation = body["continuations"]?.try &.[0]["nextContinuationData"]["continuation"].as_s
posts = body["contents"].as_a
if message = posts[0]["messageRenderer"]?
error_message = (message["text"]["simpleText"]? ||
message["text"]["runs"]?.try &.[0]?.try &.["text"]?)
.try &.as_s || ""
raise error_message
end
response = JSON.build do |json|
json.object do
json.field "authorId", ucid
json.field "comments" do
json.array do
posts.each do |post|
comments = post["backstagePostThreadRenderer"]?.try &.["comments"]? ||
post["backstageCommentsContinuation"]?
post = post["backstagePostThreadRenderer"]?.try &.["post"]["backstagePostRenderer"]? ||
post["commentThreadRenderer"]?.try &.["comment"]["commentRenderer"]?
if !post
next
end
if !post["contentText"]?
content_html = ""
else
content_html = post["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |block| HTML.escape(block) }.to_s ||
content_to_comment_html(post["contentText"]["runs"].as_a).try &.to_s || ""
end
author = post["authorText"]?.try &.["simpleText"]? || ""
json.object do
json.field "author", author
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
author_thumbnail = post["authorThumbnail"]["thumbnails"].as_a[0]["url"].as_s
qualities.each do |quality|
json.object do
json.field "url", author_thumbnail.gsub(/s\d+-/, "s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
if post["authorEndpoint"]?
json.field "authorId", post["authorEndpoint"]["browseEndpoint"]["browseId"]
json.field "authorUrl", post["authorEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
else
json.field "authorId", ""
json.field "authorUrl", ""
end
published_text = post["publishedTimeText"]["runs"][0]["text"].as_s
published = decode_date(published_text.rchop(" (edited)"))
if published_text.includes?(" (edited)")
json.field "isEdited", true
else
json.field "isEdited", false
end
like_count = post["actionButtons"]["commentActionButtonsRenderer"]["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"]
.try &.as_s.gsub(/\D/, "").to_i? || 0
json.field "content", html_to_content(content_html)
json.field "contentHtml", content_html
json.field "published", published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
json.field "likeCount", like_count
json.field "commentId", post["postId"]? || post["commentId"]? || ""
json.field "authorIsChannelOwner", post["authorEndpoint"]["browseEndpoint"]["browseId"] == ucid
if attachment = post["backstageAttachment"]?
json.field "attachment" do
json.object do
case attachment.as_h
when .has_key?("videoRenderer")
attachment = attachment["videoRenderer"]
json.field "type", "video"
if !attachment["videoId"]?
error_message = (attachment["title"]["simpleText"]? ||
attachment["title"]["runs"]?.try &.[0]?.try &.["text"]?)
json.field "error", error_message
else
video_id = attachment["videoId"].as_s
json.field "title", attachment["title"]["simpleText"].as_s
json.field "videoId", video_id
json.field "videoThumbnails" do
generate_thumbnails(json, video_id, config, kemal_config)
end
json.field "lengthSeconds", decode_length_seconds(attachment["lengthText"]["simpleText"].as_s)
author_info = attachment["ownerText"]["runs"][0].as_h
json.field "author", author_info["text"].as_s
json.field "authorId", author_info["navigationEndpoint"]["browseEndpoint"]["browseId"]
json.field "authorUrl", author_info["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
# TODO: json.field "authorThumbnails", "channelThumbnailSupportedRenderers"
# TODO: json.field "authorVerified", "ownerBadges"
published = decode_date(attachment["publishedTimeText"]["simpleText"].as_s)
json.field "published", published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
view_count = attachment["viewCountText"]["simpleText"].as_s.gsub(/\D/, "").to_i64? || 0_i64
json.field "viewCount", view_count
json.field "viewCountText", translate(locale, "`x` views", number_to_short_text(view_count))
end
when .has_key?("backstageImageRenderer")
attachment = attachment["backstageImageRenderer"]
json.field "type", "image"
json.field "imageThumbnails" do
json.array do
thumbnail = attachment["image"]["thumbnails"][0].as_h
width = thumbnail["width"].as_i
height = thumbnail["height"].as_i
aspect_ratio = (width.to_f / height.to_f)
qualities = {320, 560, 640, 1280, 2000}
qualities.each do |quality|
json.object do
json.field "url", thumbnail["url"].as_s.gsub("=s640-", "=s#{quality}-")
json.field "width", quality
json.field "height", (quality / aspect_ratio).ceil.to_i
end
end
end
end
# TODO
# when .has_key?("pollRenderer")
# attachment = attachment["pollRenderer"]
# json.field "type", "poll"
else
json.field "type", "unknown"
json.field "error", "Unrecognized attachment type."
end
end
end
end
if comments && (reply_count = (comments["backstageCommentsRenderer"]["moreText"]["simpleText"]? ||
comments["backstageCommentsRenderer"]["moreText"]["runs"]?.try &.[0]?.try &.["text"]?)
.try &.as_s.gsub(/\D/, "").to_i?)
continuation = comments["backstageCommentsRenderer"]["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
continuation ||= ""
json.field "replies" do
json.object do
json.field "replyCount", reply_count
json.field "continuation", extract_channel_community_cursor(continuation)
end
end
end
end
end
end
end
if body["continuations"]?
continuation = body["continuations"][0]["nextContinuationData"]["continuation"].as_s
json.field "continuation", extract_channel_community_cursor(continuation)
end
end
end
if format == "html"
response = JSON.parse(response)
content_html = template_youtube_comments(response, locale, thin_mode)
response = JSON.build do |json|
json.object do
json.field "contentHtml", content_html
end
end
end
return response
end
def produce_channel_community_continuation(ucid, cursor)
cursor = URI.escape(cursor)
continuation = IO::Memory.new
continuation.write(Bytes[0xe2, 0xa9, 0x85, 0xb2, 0x02])
continuation.write(write_var_int(3 + ucid.size + write_var_int(cursor.size).size + cursor.size))
continuation.write(Bytes[0x12, ucid.size])
continuation.print(ucid)
continuation.write(Bytes[0x1a])
continuation.write(write_var_int(cursor.size))
continuation.print(cursor)
continuation.rewind
continuation = Base64.urlsafe_encode(continuation.to_slice)
continuation = URI.escape(continuation)
return continuation
end
def extract_channel_community_cursor(continuation)
continuation = URI.unescape(continuation)
continuation = Base64.decode(continuation)
# 0xe2 0xa9 0x85 0xb2 0x02
continuation += 5
total_size = read_var_int(continuation[0, 4])
continuation += write_var_int(total_size).size
# 0x12
continuation += 1
ucid_size = continuation[0]
continuation += 1
ucid = continuation[0, ucid_size]
continuation += ucid_size
# 0x1a
continuation += 1
until continuation[0] == 'E'.ord
continuation += 1
end
return String.new(continuation)
end
def get_about_info(ucid, locale)
client = make_client(YT_URL)
@ -579,14 +930,12 @@ def get_about_info(ucid, locale)
if about.xpath_node(%q(//div[contains(@class, "channel-empty-message")]))
error_message = translate(locale, "This channel does not exist.")
raise error_message
end
if about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).try &.content.empty?
error_message = about.xpath_node(%q(//div[@class="yt-alert-content"])).try &.content.strip
error_message ||= translate(locale, "Could not get channel info.")
raise error_message
end
@ -597,8 +946,63 @@ def get_about_info(ucid, locale)
sub_count ||= 0
author = about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).not_nil!.content
author_url = about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).not_nil!["href"]
author_thumbnail = about.xpath_node(%q(//img[@class="channel-header-profile-image"])).not_nil!["src"]
ucid = about.xpath_node(%q(//meta[@itemprop="channelId"])).not_nil!["content"]
banner = about.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
banner = "https:" + banner.match(/background-image: url\((?<url>[^)]+)\)/).not_nil!["url"]
if banner.includes? "channels/c4/default_banner"
banner = nil
end
description_html = about.xpath_node(%q(//div[contains(@class,"about-description")])).try &.to_s || ""
paid = about.xpath_node(%q(//meta[@itemprop="paid"])).not_nil!["content"] == "True"
is_family_friendly = about.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
allowed_regions = about.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
related_channels = about.xpath_nodes(%q(//div[contains(@class, "branded-page-related-channels")]/ul/li))
related_channels = related_channels.map do |node|
related_id = node["data-external-id"]?
related_id ||= ""
anchor = node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
related_title = anchor.try &.["title"]
related_title ||= ""
related_author_url = anchor.try &.["href"]
related_author_url ||= ""
related_author_thumbnail = node.xpath_node(%q(.//img)).try &.["data-thumb"]
related_author_thumbnail ||= ""
AboutRelatedChannel.new(
ucid: related_id,
author: related_title,
author_url: related_author_url,
author_thumbnail: related_author_thumbnail,
)
end
total_views = 0_i64
sub_count = 0_i64
joined = Time.unix(0)
metadata = about.xpath_nodes(%q(//span[@class="about-stat"]))
metadata.each do |item|
case item.content
when .includes? "views"
total_views = item.content.gsub(/\D/, "").to_i64
when .includes? "subscribers"
sub_count = item.content.delete("subscribers").gsub(/\D/, "").to_i64
when .includes? "Joined"
joined = Time.parse(item.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
end
end
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
auto_generated = false
@ -607,10 +1011,28 @@ def get_about_info(ucid, locale)
auto_generated = true
end
return {author, ucid, auto_generated, sub_count}
tabs = about.xpath_nodes(%q(//ul[@id="channel-navigation-menu"]/li/a/span)).map { |node| node.content.downcase }
return AboutChannel.new(
ucid: ucid,
author: author,
auto_generated: auto_generated,
author_url: author_url,
author_thumbnail: author_thumbnail,
banner: banner,
description_html: description_html,
paid: paid,
total_views: total_views,
sub_count: sub_count,
joined: joined,
is_family_friendly: is_family_friendly,
allowed_regions: allowed_regions,
related_channels: related_channels,
tabs: tabs
)
end
def get_60_videos(ucid, page, auto_generated, sort_by = "newest")
def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
count = 0
videos = [] of SearchVideo
@ -632,7 +1054,7 @@ def get_60_videos(ucid, page, auto_generated, sort_by = "newest")
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
videos += extract_videos(nodeset, ucid, author)
end
else
break

View File

@ -22,6 +22,7 @@ class RedditComment
replies: RedditThing | String,
score: Int32,
depth: Int32,
permalink: String,
created_utc: {
type: Time,
converter: RedditComment::TimeConverter,
@ -56,14 +57,14 @@ class RedditListing
})
end
def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_mode, region, sort_by = "top")
video = get_video(id, db, proxies, region: region)
def fetch_youtube_comments(id, db, continuation, format, locale, thin_mode, region, sort_by = "top")
video = get_video(id, db, region: region)
session_token = video.info["session_token"]?
ctoken = produce_comment_continuation(id, cursor: "", sort_by: sort_by)
continuation ||= ctoken
if !continuation || !session_token
if !continuation || continuation.empty? || !session_token
if format == "json"
return {"comments" => [] of String}.to_json
else
@ -72,11 +73,10 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
end
post_req = {
"session_token" => session_token,
session_token: session_token,
}
post_req = HTTP::Params.encode(post_req)
client = make_client(YT_URL, proxies, video.info["region"]?)
client = make_client(YT_URL, video.info["region"]?)
headers = HTTP::Headers.new
headers["content-type"] = "application/x-www-form-urlencoded"
@ -89,7 +89,7 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
response = client.post("/comment_service_ajax?action_get_comments=1&ctoken=#{continuation}&continuation=#{continuation}&hl=en&gl=US", headers, post_req)
response = client.post("/comment_service_ajax?action_get_comments=1&ctoken=#{continuation}&continuation=#{continuation}&hl=en&gl=US", headers, form: post_req)
response = JSON.parse(response.body)
if !response["response"]["continuationContents"]?
@ -112,10 +112,13 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
end
end
comments = JSON.build do |json|
response = JSON.build do |json|
json.object do
if body["header"]?
comment_count = body["header"]["commentsHeaderRenderer"]["countText"]["simpleText"].as_s.delete("Comments,").to_i
count_text = body["header"]["commentsHeaderRenderer"]["countText"]
comment_count = (count_text["simpleText"]? || count_text["runs"]?.try &.[0]?.try &.["text"]?)
.try &.as_s.gsub(/\D/, "").to_i? || 0
json.field "commentCount", comment_count
end
@ -139,16 +142,9 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
node_comment = node["commentRenderer"]
end
content_html = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
if content_html
content_html = HTML.escape(content_html)
end
content_html ||= content_to_comment_html(node_comment["contentText"]["runs"].as_a)
content_html, content = html_to_content(content_html)
author = node_comment["authorText"]?.try &.["simpleText"]
author ||= ""
content_html = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |block| HTML.escape(block) }.to_s ||
content_to_comment_html(node_comment["contentText"]["runs"].as_a).try &.to_s || ""
author = node_comment["authorText"]?.try &.["simpleText"]? || ""
json.field "author", author
json.field "authorThumbnails" do
@ -180,10 +176,12 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
json.field "isEdited", false
end
json.field "content", content
json.field "content", html_to_content(content_html)
json.field "contentHtml", content_html
json.field "published", published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]
json.field "authorIsChannelOwner", node_comment["authorIsChannelOwner"]
@ -199,13 +197,8 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
end
if node_replies && !response["commentRepliesContinuation"]?
reply_count = node_replies["moreText"]["simpleText"].as_s.delete("View all reply replies,")
if reply_count.empty?
reply_count = 1
else
reply_count = reply_count.try &.to_i?
reply_count ||= 1
end
reply_count = (node_replies["moreText"]["simpleText"]? || node_replies["moreText"]["runs"]?.try &.[0]?.try &.["text"]?)
.try &.as_s.gsub(/\D/, "").to_i? || 1
continuation = node_replies["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
continuation ||= ""
@ -230,15 +223,15 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
end
if format == "html"
comments = JSON.parse(comments)
content_html = template_youtube_comments(comments, locale, thin_mode)
response = JSON.parse(response)
content_html = template_youtube_comments(response, locale, thin_mode)
comments = JSON.build do |json|
response = JSON.build do |json|
json.object do
json.field "contentHtml", content_html
if comments["commentCount"]?
json.field "commentCount", comments["commentCount"]
if response["commentCount"]?
json.field "commentCount", response["commentCount"]
else
json.field "commentCount", 0
end
@ -246,14 +239,15 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
end
end
return comments
return response
end
def fetch_reddit_comments(id, sort_by = "confidence")
client = make_client(REDDIT_URL)
headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by /u/omarroth)"}
query = "(url:3D#{id}%20OR%20url:#{id})%20(site:youtube.com%20OR%20site:youtu.be)"
# TODO: Use something like #479 for a static list of instances to use here
query = "(url:3D#{id}%20OR%20url:#{id})%20(site:invidio.us%20OR%20site:youtube.com%20OR%20site:youtu.be)"
search_results = client.get("/search.json?q=#{query}", headers)
if search_results.status_code == 200
@ -282,56 +276,110 @@ def fetch_reddit_comments(id, sort_by = "confidence")
end
def template_youtube_comments(comments, locale, thin_mode)
html = ""
root = comments["comments"].as_a
root.each do |child|
if child["replies"]?
replies_html = <<-END_HTML
<div id="replies" class="pure-g">
<div class="pure-u-1-24"></div>
<div class="pure-u-23-24">
<p>
<a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
onclick="get_youtube_replies(this)">#{translate(locale, "View `x` replies", child["replies"]["replyCount"].to_s)}</a>
</p>
String.build do |html|
root = comments["comments"].as_a
root.each do |child|
if child["replies"]?
replies_html = <<-END_HTML
<div id="replies" class="pure-g">
<div class="pure-u-1-24"></div>
<div class="pure-u-23-24">
<p>
<a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
onclick="get_youtube_replies(this)">#{translate(locale, "View `x` replies", number_with_separator(child["replies"]["replyCount"]))}</a>
</p>
</div>
</div>
</div>
END_HTML
end
if !thin_mode
author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).full_path}"
else
author_thumbnail = ""
end
html += <<-END_HTML
<div class="pure-g">
<div class="pure-u-4-24 pure-u-md-2-24">
<img style="width:90%;padding-right:1em;padding-top:1em" src="#{author_thumbnail}">
</div>
<div class="pure-u-20-24 pure-u-md-22-24">
<p>
<b>
<a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{child["author"]}</a>
</b>
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
<span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
|
<a href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
|
<i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
END_HTML
if child["creatorHeart"]?
if !thin_mode
creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).full_path}"
else
creator_thumbnail = ""
END_HTML
end
html += <<-END_HTML
if !thin_mode
author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).full_path}"
else
author_thumbnail = ""
end
html << <<-END_HTML
<div class="pure-g" style="width:100%">
<div class="channel-profile pure-u-4-24 pure-u-md-2-24">
<img style="padding-right:1em;padding-top:1em;width:90%" src="#{author_thumbnail}">
</div>
<div class="pure-u-20-24 pure-u-md-22-24">
<p>
<b>
<a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{child["author"]}</a>
</b>
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
END_HTML
if child["attachment"]?
attachment = child["attachment"]
case attachment["type"]
when "image"
attachment = attachment["imageThumbnails"][1]
html << <<-END_HTML
<div class="pure-g">
<div class="pure-u-1 pure-u-md-1-2">
<img style="width:100%" src="/ggpht#{URI.parse(attachment["url"].as_s).full_path}">
</div>
</div>
END_HTML
when "video"
html << <<-END_HTML
<div class="pure-g">
<div class="pure-u-1 pure-u-md-1-2">
<div style="position:relative;width:100%;height:0;padding-bottom:56.25%;margin-bottom:5px">
END_HTML
if attachment["error"]?
html << <<-END_HTML
<p>#{attachment["error"]}</p>
END_HTML
else
html << <<-END_HTML
<iframe id='ivplayer' type='text/html' style='position:absolute;width:100%;height:100%;left:0;top:0' src='/embed/#{attachment["videoId"]?}' frameborder='0'></iframe>
END_HTML
end
html << <<-END_HTML
</div>
</div>
</div>
END_HTML
end
end
html << <<-END_HTML
<span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
|
END_HTML
if comments["videoId"]?
html << <<-END_HTML
<a href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
|
END_HTML
elsif comments["authorId"]?
html << <<-END_HTML
<a href="https://www.youtube.com/channel/#{comments["authorId"]}/community?lb=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
|
END_HTML
end
html << <<-END_HTML
<i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
END_HTML
if child["creatorHeart"]?
if !thin_mode
creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).full_path}"
else
creator_thumbnail = ""
end
html << <<-END_HTML
<span class="creator-heart-container" title="#{translate(locale, "`x` marked it with a ❤", child["creatorHeart"]["creatorName"].as_s)}">
<div class="creator-heart">
<img class="creator-heart-background-hearted" src="#{creator_thumbnail}"></img>
@ -340,84 +388,77 @@ def template_youtube_comments(comments, locale, thin_mode)
</div>
</div>
</span>
END_HTML
end
html << <<-END_HTML
</p>
#{replies_html}
</div>
</div>
END_HTML
end
html += <<-END_HTML
</p>
#{replies_html}
if comments["continuation"]?
html << <<-END_HTML
<div class="pure-g">
<div class="pure-u-1">
<p>
<a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
onclick="get_youtube_replies(this, true)">#{translate(locale, "Load more")}</a>
</p>
</div>
</div>
</div>
END_HTML
END_HTML
end
end
if comments["continuation"]?
html += <<-END_HTML
<div class="pure-g">
<div class="pure-u-1">
<p>
<a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
onclick="get_youtube_replies(this, true)">#{translate(locale, "Load more")}</a>
</p>
</div>
</div>
END_HTML
end
return html
end
def template_reddit_comments(root, locale)
html = ""
root.each do |child|
if child.data.is_a?(RedditComment)
child = child.data.as(RedditComment)
author = child.author
score = child.score
body_html = HTML.unescape(child.body_html)
String.build do |html|
root.each do |child|
if child.data.is_a?(RedditComment)
child = child.data.as(RedditComment)
body_html = HTML.unescape(child.body_html)
replies_html = ""
if child.replies.is_a?(RedditThing)
replies = child.replies.as(RedditThing)
replies_html = template_reddit_comments(replies.data.as(RedditListing).children, locale)
end
replies_html = ""
if child.replies.is_a?(RedditThing)
replies = child.replies.as(RedditThing)
replies_html = template_reddit_comments(replies.data.as(RedditListing).children, locale)
end
content = <<-END_HTML
<p>
<a href="javascript:void(0)" onclick="toggle_parent(this)">[ - ]</a>
<b><a href="https://www.reddit.com/user/#{author}">#{author}</a></b>
#{translate(locale, "`x` points", number_with_separator(score))}
#{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}
</p>
<div>
#{body_html}
#{replies_html}
</div>
END_HTML
if child.depth > 0
html += <<-END_HTML
if child.depth > 0
html << <<-END_HTML
<div class="pure-g">
<div class="pure-u-1-24">
</div>
<div class="pure-u-23-24">
#{content}
</div>
</div>
END_HTML
else
html += <<-END_HTML
END_HTML
else
html << <<-END_HTML
<div class="pure-g">
<div class="pure-u-1">
#{content}
</div>
</div>
END_HTML
end
html << <<-END_HTML
<p>
<a href="javascript:void(0)" onclick="toggle_parent(this)">[ - ]</a>
<b><a href="https://www.reddit.com/user/#{child.author}">#{child.author}</a></b>
#{translate(locale, "`x` points", number_with_separator(child.score))}
<span title="#{child.created_utc.to_s(translate(locale, "%a %B %-d %T %Y UTC"))}">#{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}</span>
<a href="https://www.reddit.com#{child.permalink}" title="#{translate(locale, "permalink")}">#{translate(locale, "permalink")}</a>
</p>
<div>
#{body_html}
#{replies_html}
</div>
</div>
</div>
END_HTML
end
end
end
return html
end
def replace_links(html)
@ -517,7 +558,7 @@ def content_to_comment_html(content)
end
text
end.join.rchop('\ufeff')
end.join("").delete('\ufeff')
return comment_html
end

View File

@ -225,3 +225,41 @@ class HTTP::Client
response
end
end
# https://github.com/will/crystal-pg/pull/171
class PG::Statement < ::DB::Statement
protected def perform_query(args : Enumerable) : ResultSet
params = args.map { |arg| PQ::Param.encode(arg) }
conn = self.conn
conn.send_parse_message(@sql)
conn.send_bind_message params
conn.send_describe_portal_message
conn.send_execute_message
conn.send_sync_message
conn.expect_frame PQ::Frame::ParseComplete
conn.expect_frame PQ::Frame::BindComplete
frame = conn.read
case frame
when PQ::Frame::RowDescription
fields = frame.fields
when PQ::Frame::NoData
fields = nil
else
raise "expected RowDescription or NoData, got #{frame}"
end
ResultSet.new(self, fields)
rescue IO::Error
raise DB::ConnectionLost.new(connection)
end
protected def perform_exec(args : Enumerable) : ::DB::ExecResult
result = perform_query(args)
result.each { }
::DB::ExecResult.new(
rows_affected: result.rows_affected,
last_insert_id: 0_i64 # postgres doesn't support this
)
rescue IO::Error
raise DB::ConnectionLost.new(connection)
end
end

View File

@ -96,6 +96,19 @@ struct Config
end
end
def disabled?(option)
case disabled = CONFIG.disable_proxy
when Bool
return disabled
when Array
if disabled.includes? option
return true
else
return false
end
end
end
YAML.mapping({
channel_threads: Int32, # Number of threads to use for crawling videos from channels (for updating subscriptions)
feed_threads: Int32, # Number of threads to use for updating feeds
@ -118,11 +131,12 @@ struct Config
default: Preferences.new(*ConfigPreferences.from_yaml("").to_tuple),
converter: ConfigPreferencesConverter,
},
dmca_content: {type: Array(String), default: [] of String}, # For compliance with DMCA, disables download widget using list of video IDs
check_tables: {type: Bool, default: false}, # Check table integrity, automatically try to add any missing columns, create tables, etc.
cache_annotations: {type: Bool, default: false}, # Cache annotations requested from IA, will not cache empty annotations or annotations that only contain cards
banner: {type: String?, default: nil}, # Optional banner to be displayed along top of page for announcements, etc.
hsts: {type: Bool?, default: true}, # Enables 'Strict-Transport-Security'. Ensure that `domain` and all subdomains are served securely
dmca_content: {type: Array(String), default: [] of String}, # For compliance with DMCA, disables download widget using list of video IDs
check_tables: {type: Bool, default: false}, # Check table integrity, automatically try to add any missing columns, create tables, etc.
cache_annotations: {type: Bool, default: false}, # Cache annotations requested from IA, will not cache empty annotations or annotations that only contain cards
banner: {type: String?, default: nil}, # Optional banner to be displayed along top of page for announcements, etc.
hsts: {type: Bool?, default: true}, # Enables 'Strict-Transport-Security'. Ensure that `domain` and all subdomains are served securely
disable_proxy: {type: Bool? | Array(String)?, default: false}, # Disable proxying server-wide: options: 'dash', 'livestreams', 'downloads', 'local'
})
end
@ -146,7 +160,7 @@ def rank_videos(db, n)
published = rs.read(Time)
# Exponential decay, older videos tend to rank lower
temperature = wilson_score * Math.exp(-0.000005*((Time.now - published).total_minutes))
temperature = wilson_score * Math.exp(-0.000005*((Time.utc - published).total_minutes))
top << {temperature, id}
end
end
@ -160,40 +174,42 @@ def rank_videos(db, n)
return top[0..n - 1]
end
def login_req(login_form, f_req)
def login_req(f_req)
data = {
# Unfortunately there's not much information available on `bgRequest`; part of Google's BotGuard
# Generally this is much longer (>1250 characters), see also
# https://github.com/ytdl-org/youtube-dl/commit/baf67a604d912722b0fe03a40e9dc5349a2208cb .
# For now this can be empty.
"bgRequest" => %|["identifier",""]|,
"pstMsg" => "1",
"checkConnection" => "youtube",
"checkedDomains" => "youtube",
"hl" => "en",
"deviceinfo" => %q([null,null,null,[],null,"US",null,null,[],"GlifWebSignIn",null,[null,null,[]]]),
"deviceinfo" => %|[null,null,null,[],null,"US",null,null,[],"GlifWebSignIn",null,[null,null,[]]]|,
"f.req" => f_req,
"flowName" => "GlifWebSignIn",
"flowEntry" => "ServiceLogin",
# "cookiesDisabled" => "false",
# "gmscoreversion" => "undefined",
# "continue" => "https://accounts.google.com/ManageAccount",
# "azt" => "",
# "bgHash" => "",
}
data = login_form.merge(data)
return HTTP::Params.encode(data)
end
def html_to_content(description_html)
if !description_html
description = ""
description_html = ""
else
description_html = description_html.to_s
description = description_html.gsub("<br>", "\n")
description = description.gsub("<br/>", "\n")
def html_to_content(description_html : String)
description = description_html.gsub(/(<br>)|(<br\/>)/, {
"<br>": "\n",
"<br/>": "\n",
})
if description.empty?
description = ""
else
description = XML.parse_html(description).content.strip("\n ")
end
if !description.empty?
description = XML.parse_html(description).content.strip("\n ")
end
return description_html, description
return description
end
def extract_videos(nodeset, ucid = nil, author_name = nil)
@ -230,8 +246,7 @@ def extract_items(nodeset, ucid = nil, author_name = nil)
author ||= ""
author_id ||= ""
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
description_html, description = html_to_content(description_html)
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")])).try &.to_s || ""
tile = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-tile")]))
if !tile
@ -330,7 +345,6 @@ def extract_items(nodeset, ucid = nil, author_name = nil)
author_thumbnail: author_thumbnail,
subscriber_count: subscriber_count,
video_count: video_count,
description: description,
description_html: description_html
)
else
@ -346,7 +360,7 @@ def extract_items(nodeset, ucid = nil, author_name = nil)
published ||= Time.unix(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
rescue ex
end
published ||= Time.now
published ||= Time.utc
begin
view_count = metadata[0].content.rchop(" watching").delete(",").try &.to_i64?
@ -396,7 +410,6 @@ def extract_items(nodeset, ucid = nil, author_name = nil)
ucid: author_id,
published: published,
views: view_count,
description: description,
description_html: description_html,
length_seconds: length_seconds,
live_now: live_now,
@ -522,7 +535,7 @@ def analyze_table(db, logger, table_name, struct_type = nil)
begin
db.exec("SELECT * FROM #{table_name} LIMIT 0")
rescue ex
logger.write("CREATE TABLE #{table_name}\n")
logger.puts("CREATE TABLE #{table_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
@ -546,7 +559,7 @@ def analyze_table(db, logger, table_name, struct_type = nil)
if name != column_array[i]?
if !column_array[i]?
new_column = column_types.select { |line| line.starts_with? name }[0]
logger.write("ALTER TABLE #{table_name} ADD COLUMN #{new_column}\n")
logger.puts("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next
end
@ -564,26 +577,29 @@ def analyze_table(db, logger, table_name, struct_type = nil)
# There's a column we didn't expect
if !new_column
logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}\n")
logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
column_array = get_column_array(db, table_name)
next
end
logger.write("ALTER TABLE #{table_name} ADD COLUMN #{new_column}\n")
logger.puts("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
logger.write("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}\n")
logger.puts("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE\n")
logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
logger.write("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}\n")
logger.puts("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
column_array = get_column_array(db, table_name)
end
else
logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE\n")
logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
end
end
@ -635,33 +651,48 @@ def cache_annotation(db, id, annotations)
end
def proxy_file(response, env)
if !response.body_io?
return
end
if response.headers.includes_word?("Content-Encoding", "gzip")
Gzip::Writer.open(env.response) do |deflate|
copy_in_chunks(response.body_io, deflate)
response.pipe(deflate)
end
elsif response.headers.includes_word?("Content-Encoding", "deflate")
Flate::Writer.open(env.response) do |deflate|
copy_in_chunks(response.body_io, deflate)
response.pipe(deflate)
end
else
copy_in_chunks(response.body_io, env.response)
response.pipe(env.response)
end
end
# https://stackoverflow.com/a/44802810 <3
def copy_in_chunks(input, output, chunk_size = 4096)
size = 1
while size > 0
size = IO.copy(input, output, chunk_size)
Fiber.yield
class HTTP::Client::Response
def pipe(io)
HTTP.serialize_body(io, headers, @body, @body_io, @version)
end
end
def create_notification_stream(env, proxies, config, kemal_config, decrypt_function, topics, connection_channel)
# Supports serialize_body without first writing headers
module HTTP
def self.serialize_body(io, headers, body, body_io, version)
if body
io << body
elsif body_io
content_length = content_length(headers)
if content_length
copied = IO.copy(body_io, io)
if copied != content_length
raise ArgumentError.new("Content-Length header is #{content_length} but body had #{copied} bytes")
end
elsif Client::Response.supports_chunked?(version)
headers["Transfer-Encoding"] = "chunked"
serialize_chunked_body(io, body_io)
else
io << body
end
end
end
end
def create_notification_stream(env, config, kemal_config, decrypt_function, topics, connection_channel)
connection = Channel(PQ::Notification).new(8)
connection_channel.send({true, connection})
@ -676,10 +707,10 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
loop do
time_span = [0, 0, 0, 0]
time_span[rand(4)] = rand(30) + 5
published = Time.now - Time::Span.new(time_span[0], time_span[1], time_span[2], time_span[3])
published = Time.utc - Time::Span.new(time_span[0], time_span[1], time_span[2], time_span[3])
video_id = TEST_IDS[rand(TEST_IDS.size)]
video = get_video(video_id, PG_DB, proxies)
video = get_video(video_id, PG_DB)
video.published = published
response = JSON.parse(video.to_json(locale, config, kemal_config, decrypt_function))
@ -700,6 +731,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
id += 1
sleep 1.minute
Fiber.yield
end
rescue ex
end
@ -754,7 +786,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
next
end
video = get_video(video_id, PG_DB, proxies)
video = get_video(video_id, PG_DB)
video.published = Time.unix(published)
response = JSON.parse(video.to_json(locale, config, Kemal.config, decrypt_function))
@ -783,7 +815,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
begin
# Send heartbeat
loop do
env.response.puts ":keepalive #{Time.now.to_unix}"
env.response.puts ":keepalive #{Time.utc.to_unix}"
env.response.puts
env.response.flush
sleep (20 + rand(11)).seconds
@ -793,3 +825,12 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
connection_channel.send({false, connection})
end
end
def extract_initial_data(body)
initial_data = body.match(/window\["ytInitialData"\] = (?<info>.*?);\n/).try &.["info"] || "{}"
if initial_data.starts_with?("JSON.parse(\"")
return JSON.parse(JSON.parse(%({"initial_data":"#{initial_data[12..-3]}"}))["initial_data"].as_s)
else
return JSON.parse(initial_data)
end
end

View File

@ -22,12 +22,12 @@ def refresh_channels(db, logger, config)
begin
channel = fetch_channel(id, db, config.full_refresh)
db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.now, channel.author, id)
db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id)
rescue ex
if ex.message == "Deleted or invalid channel"
db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.now, id)
db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id)
end
logger.write("#{id} : #{ex.message}\n")
logger.puts("#{id} : #{ex.message}")
end
active_channel.send(true)
@ -36,6 +36,7 @@ def refresh_channels(db, logger, config)
end
sleep 1.minute
Fiber.yield
end
end
@ -68,14 +69,14 @@ def refresh_feeds(db, logger, config)
column_array = get_column_array(db, view_name)
ChannelVideo.to_type_tuple.each_with_index do |name, i|
if name != column_array[i]?
logger.write("DROP MATERIALIZED VIEW #{view_name}\n")
logger.puts("DROP MATERIALIZED VIEW #{view_name}")
db.exec("DROP MATERIALIZED VIEW #{view_name}")
raise "view does not exist"
end
end
if db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "ucid = ANY"
logger.write("Materialized view #{view_name} is out-of-date, recreating...\n")
if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))"
logger.puts("Materialized view #{view_name} is out-of-date, recreating...")
db.exec("DROP MATERIALIZED VIEW #{view_name}")
end
@ -87,21 +88,18 @@ def refresh_feeds(db, logger, config)
legacy_view_name = "subscriptions_#{sha256(email)[0..7]}"
db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0")
logger.write("RENAME MATERIALIZED VIEW #{legacy_view_name}\n")
logger.puts("RENAME MATERIALIZED VIEW #{legacy_view_name}")
db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}")
rescue ex
begin
# While iterating through, we may have an email stored from a deleted account
if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool)
logger.write("CREATE #{view_name}\n")
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE
ucid IN (SELECT unnest(subscriptions) FROM users WHERE email = E'#{email.gsub("'", "\\'")}')
ORDER BY published DESC")
logger.puts("CREATE #{view_name}")
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}")
db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email)
end
rescue ex
logger.write("REFRESH #{email} : #{ex.message}\n")
logger.puts("REFRESH #{email} : #{ex.message}")
end
end
end
@ -112,6 +110,7 @@ def refresh_feeds(db, logger, config)
end
sleep 5.seconds
Fiber.yield
end
end
@ -151,7 +150,7 @@ def subscribe_to_feeds(db, logger, key, config)
response = subscribe_pubsub(ucid, key, config)
if response.status_code >= 400
logger.write("#{ucid} : #{response.body}\n")
logger.puts("#{ucid} : #{response.body}")
end
rescue ex
end
@ -162,6 +161,7 @@ def subscribe_to_feeds(db, logger, key, config)
end
sleep 1.minute
Fiber.yield
end
end
@ -174,12 +174,16 @@ def pull_top_videos(config, db)
begin
top = rank_videos(db, 40)
rescue ex
sleep 1.minute
Fiber.yield
next
end
if top.size > 0
args = arg_array(top)
else
if top.size == 0
sleep 1.minute
Fiber.yield
next
end
@ -194,22 +198,23 @@ def pull_top_videos(config, db)
end
yield videos
sleep 1.minute
Fiber.yield
end
end
def pull_popular_videos(db)
loop do
subscriptions = db.query_all("SELECT channel FROM \
(SELECT UNNEST(subscriptions) AS channel FROM users) AS d \
GROUP BY channel ORDER BY COUNT(channel) DESC LIMIT 40", as: String)
videos = db.query_all("SELECT DISTINCT ON (ucid) * FROM \
channel_videos WHERE ucid IN (#{arg_array(subscriptions)}) \
ORDER BY ucid, published DESC", subscriptions, as: ChannelVideo).sort_by { |video| video.published }.reverse
videos = db.query_all("SELECT DISTINCT ON (ucid) * FROM channel_videos WHERE ucid IN \
(SELECT channel FROM (SELECT UNNEST(subscriptions) AS channel FROM users) AS d \
GROUP BY channel ORDER BY COUNT(channel) DESC LIMIT 40) \
ORDER BY ucid, published DESC", as: ChannelVideo).sort_by { |video| video.published }.reverse
yield videos
sleep 1.minute
Fiber.yield
end
end
@ -217,12 +222,13 @@ def update_decrypt_function
loop do
begin
decrypt_function = fetch_decrypt_function
yield decrypt_function
rescue ex
next
end
yield decrypt_function
sleep 1.minute
Fiber.yield
end
end
@ -237,5 +243,6 @@ def find_working_proxies(regions)
end
sleep 1.minute
Fiber.yield
end
end

View File

@ -1,13 +1,20 @@
require "logger"
enum LogLevel
Debug
Info
Warn
Error
end
class Invidious::LogHandler < Kemal::BaseLogHandler
def initialize(@io : IO = STDOUT)
def initialize(@io : IO = STDOUT, @level = LogLevel::Warn)
end
def call(context : HTTP::Server::Context)
time = Time.now
time = Time.utc
call_next(context)
elapsed_text = elapsed_text(Time.now - time)
elapsed_text = elapsed_text(Time.utc - time)
@io << time << ' ' << context.response.status_code << ' ' << context.request.method << ' ' << context.request.resource << ' ' << elapsed_text << '\n'
@ -18,7 +25,15 @@ class Invidious::LogHandler < Kemal::BaseLogHandler
context
end
def write(message : String)
def puts(message : String)
@io << message << '\n'
if @io.is_a? File
@io.flush
end
end
def write(message : String, level = @level)
@io << message
if @io.is_a? File
@ -26,6 +41,29 @@ class Invidious::LogHandler < Kemal::BaseLogHandler
end
end
def set_log_level(level : String)
case level.downcase
when "debug"
set_log_level(LogLevel::Debug)
when "info"
set_log_level(LogLevel::Info)
when "warn"
set_log_level(LogLevel::Warn)
when "error"
set_log_level(LogLevel::Error)
end
end
def set_log_level(level : LogLevel)
@level = level
end
{% for level in %w(debug info warn error) %}
def {{level.id}}(message : String)
puts(message, LogLevel::{{level.id.capitalize}})
end
{% end %}
private def elapsed_text(elapsed)
millis = elapsed.total_milliseconds
return "#{millis.round(2)}ms" if millis >= 1

View File

@ -0,0 +1,194 @@
# Since systems have a limit on number of open files (`ulimit -a`),
# we serve them from memory to avoid 'Too many open files' without needing
# to modify ulimit.
#
# Very heavily re-used:
# https://github.com/kemalcr/kemal/blob/master/src/kemal/helpers/helpers.cr
# https://github.com/kemalcr/kemal/blob/master/src/kemal/static_file_handler.cr
#
# Changes:
# - A `send_file` overload is added which supports sending a Slice, file_path, filestat
# - `StaticFileHandler` is patched to cache to and serve from @cached_files
private def multipart(file, env : HTTP::Server::Context)
# See http://httpwg.org/specs/rfc7233.html
fileb = file.size
startb = endb = 0
if match = env.request.headers["Range"].match /bytes=(\d{1,})-(\d{0,})/
startb = match[1].to_i { 0 } if match.size >= 2
endb = match[2].to_i { 0 } if match.size >= 3
end
endb = fileb - 1 if endb == 0
if startb < endb < fileb
content_length = 1 + endb - startb
env.response.status_code = 206
env.response.content_length = content_length
env.response.headers["Accept-Ranges"] = "bytes"
env.response.headers["Content-Range"] = "bytes #{startb}-#{endb}/#{fileb}" # MUST
if startb > 1024
skipped = 0
# file.skip only accepts values less or equal to 1024 (buffer size, undocumented)
until (increase_skipped = skipped + 1024) > startb
file.skip(1024)
skipped = increase_skipped
end
if (skipped_minus_startb = skipped - startb) > 0
file.skip skipped_minus_startb
end
else
file.skip(startb)
end
IO.copy(file, env.response, content_length)
else
env.response.content_length = fileb
env.response.status_code = 200 # Range not satisfable, see 4.4 Note
IO.copy(file, env.response)
end
end
# Set the Content-Disposition to "attachment" with the specified filename,
# instructing the user agents to prompt to save.
private def attachment(env : HTTP::Server::Context, filename : String? = nil, disposition : String? = nil)
disposition = "attachment" if disposition.nil? && filename
if disposition && filename
env.response.headers["Content-Disposition"] = "#{disposition}; filename=\"#{File.basename(filename)}\""
end
end
def send_file(env : HTTP::Server::Context, file_path : String, data : Slice(UInt8), filestat : File::Info, filename : String? = nil, disposition : String? = nil)
config = Kemal.config.serve_static
mime_type = MIME.from_filename(file_path, "application/octet-stream")
env.response.content_type = mime_type
env.response.headers["Accept-Ranges"] = "bytes"
env.response.headers["X-Content-Type-Options"] = "nosniff"
minsize = 860 # http://webmasters.stackexchange.com/questions/31750/what-is-recommended-minimum-object-size-for-gzip-performance-benefits ??
request_headers = env.request.headers
filesize = data.bytesize
attachment(env, filename, disposition)
Kemal.config.static_headers.try(&.call(env.response, file_path, filestat))
file = IO::Memory.new(data)
if env.request.method == "GET" && env.request.headers.has_key?("Range")
return multipart(file, env)
end
condition = config.is_a?(Hash) && config["gzip"]? == true && filesize > minsize && Kemal::Utils.zip_types(file_path)
if condition && request_headers.includes_word?("Accept-Encoding", "gzip")
env.response.headers["Content-Encoding"] = "gzip"
Gzip::Writer.open(env.response) do |deflate|
IO.copy(file, deflate)
end
elsif condition && request_headers.includes_word?("Accept-Encoding", "deflate")
env.response.headers["Content-Encoding"] = "deflate"
Flate::Writer.open(env.response) do |deflate|
IO.copy(file, deflate)
end
else
env.response.content_length = filesize
IO.copy(file, env.response)
end
return
end
module Kemal
class StaticFileHandler < HTTP::StaticFileHandler
CACHE_LIMIT = 5_000_000 # 5MB
@cached_files = {} of String => {data: Bytes, filestat: File::Info}
def call(context : HTTP::Server::Context)
return call_next(context) if context.request.path.not_nil! == "/"
case context.request.method
when "GET", "HEAD"
else
if @fallthrough
call_next(context)
else
context.response.status_code = 405
context.response.headers.add("Allow", "GET, HEAD")
end
return
end
config = Kemal.config.serve_static
original_path = context.request.path.not_nil!
request_path = URI.unescape(original_path)
# File path cannot contains '\0' (NUL) because all filesystem I know
# don't accept '\0' character as file name.
if request_path.includes? '\0'
context.response.status_code = 400
return
end
expanded_path = File.expand_path(request_path, "/")
is_dir_path = if original_path.ends_with?('/') && !expanded_path.ends_with? '/'
expanded_path = expanded_path + '/'
true
else
expanded_path.ends_with? '/'
end
file_path = File.join(@public_dir, expanded_path)
if file = @cached_files[file_path]?
last_modified = file[:filestat].modification_time
add_cache_headers(context.response.headers, last_modified)
if cache_request?(context, last_modified)
context.response.status_code = 304
return
end
send_file(context, file_path, file[:data], file[:filestat])
else
is_dir = Dir.exists? file_path
if request_path != expanded_path
redirect_to context, expanded_path
elsif is_dir && !is_dir_path
redirect_to context, expanded_path + '/'
end
if Dir.exists?(file_path)
if config.is_a?(Hash) && config["dir_listing"] == true
context.response.content_type = "text/html"
directory_listing(context.response, request_path, file_path)
else
call_next(context)
end
elsif File.exists?(file_path)
last_modified = modification_time(file_path)
add_cache_headers(context.response.headers, last_modified)
if cache_request?(context, last_modified)
context.response.status_code = 304
return
end
if @cached_files.sum { |element| element[1][:data].bytesize } + (size = File.size(file_path)) < CACHE_LIMIT
data = Bytes.new(size)
File.open(file_path) do |file|
file.read(data)
end
filestat = File.info(file_path)
@cached_files[file_path] = {data: data, filestat: filestat}
send_file(context, file_path, data, filestat)
else
send_file(context, file_path)
end
else
call_next(context)
end
end
end
end
end

View File

@ -1,6 +1,6 @@
def generate_token(email, scopes, expire, key, db)
session = "v1:#{Base64.urlsafe_encode(Random::Secure.random_bytes(32))}"
PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", session, email, Time.now)
PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", session, email, Time.utc)
token = {
"session" => session,
@ -18,7 +18,7 @@ def generate_token(email, scopes, expire, key, db)
end
def generate_response(session, scopes, key, db, expire = 6.hours, use_nonce = false)
expire = Time.now + expire
expire = Time.utc + expire
token = {
"session" => session,
@ -85,8 +85,8 @@ def validate_request(token, session, request, key, db, locale = nil)
end
if token["nonce"]? && (nonce = db.query_one?("SELECT * FROM nonces WHERE nonce = $1", token["nonce"], as: {String, Time}))
if nonce[1] > Time.now
db.exec("UPDATE nonces SET expire = $1 WHERE nonce = $2", Time.new(1990, 1, 1), nonce[0])
if nonce[1] > Time.utc
db.exec("UPDATE nonces SET expire = $1 WHERE nonce = $2", Time.utc(1990, 1, 1), nonce[0])
else
raise translate(locale, "Erroneous token")
end
@ -100,7 +100,7 @@ def validate_request(token, session, request, key, db, locale = nil)
end
expire = token["expire"]?.try &.as_i
if expire.try &.< Time.now.to_unix
if expire.try &.< Time.utc.to_unix
raise translate(locale, "Token is expired, please try again")
end

View File

@ -18,24 +18,13 @@ def elapsed_text(elapsed)
"#{(millis * 1000).round(2)}µs"
end
def make_client(url : URI, proxies = {} of String => Array({ip: String, port: Int32}), region = nil)
context = nil
if url.scheme == "https"
context = OpenSSL::SSL::Context::Client.new
context.add_options(
OpenSSL::SSL::Options::ALL |
OpenSSL::SSL::Options::NO_SSL_V2 |
OpenSSL::SSL::Options::NO_SSL_V3
)
end
client = HTTPClient.new(url, context)
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
def make_client(url : URI, region = nil)
client = HTTPClient.new(url)
client.read_timeout = 15.seconds
client.connect_timeout = 15.seconds
if region
proxies[region]?.try &.sample(40).each do |proxy|
PROXY_LIST[region]?.try &.sample(40).each do |proxy|
begin
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
client.set_proxy(proxy)
@ -90,7 +79,7 @@ def decode_time(string)
millis = /(?<millis>\d+)ms/.match(string).try &.["millis"].try &.to_f
millis ||= 0
time = hours * 3600 + minutes * 60 + seconds + millis / 1000
time = hours * 3600 + minutes * 60 + seconds + millis // 1000
end
return time
@ -99,7 +88,7 @@ end
def decode_date(string : String)
# String matches 'YYYY'
if string.match(/^\d{4}/)
return Time.new(string.to_i, 1, 1)
return Time.utc(string.to_i, 1, 1)
end
# Try to parse as format Jul 10, 2000
@ -110,9 +99,9 @@ def decode_date(string : String)
case string
when "today"
return Time.now
return Time.utc
when "yesterday"
return Time.now - 1.day
return Time.utc - 1.day
end
# String matches format "20 hours ago", "4 months ago"...
@ -138,18 +127,18 @@ def decode_date(string : String)
raise "Could not parse #{string}"
end
return Time.now - delta
return Time.utc - delta
end
def recode_date(time : Time, locale)
span = Time.now - time
span = Time.utc - time
if span.total_days > 365.0
span = translate(locale, "`x` years", (span.total_days.to_i / 365).to_s)
span = translate(locale, "`x` years", (span.total_days.to_i // 365).to_s)
elsif span.total_days > 30.0
span = translate(locale, "`x` months", (span.total_days.to_i / 30).to_s)
span = translate(locale, "`x` months", (span.total_days.to_i // 30).to_s)
elsif span.total_days > 7.0
span = translate(locale, "`x` weeks", (span.total_days.to_i / 7).to_s)
span = translate(locale, "`x` weeks", (span.total_days.to_i // 7).to_s)
elsif span.total_hours > 24.0
span = translate(locale, "`x` days", (span.total_days.to_i).to_s)
elsif span.total_minutes > 60.0
@ -194,11 +183,11 @@ def number_to_short_text(number)
text = text.rchop(".0")
if number / 1_000_000_000 != 0
if number // 1_000_000_000 != 0
text += "B"
elsif number / 1_000_000 != 0
elsif number // 1_000_000 != 0
text += "M"
elsif number / 1000 != 0
elsif number // 1000 != 0
text += "K"
end
@ -327,3 +316,52 @@ def sha256(text)
digest << text
return digest.hexdigest
end
def subscribe_pubsub(topic, key, config)
case topic
when .match(/^UC[A-Za-z0-9_-]{22}$/)
topic = "channel_id=#{topic}"
when .match(/^(PL|LL|EC|UU|FL|UL|OLAK5uy_)[0-9A-Za-z-_]{10,}$/)
# There's a couple missing from the above regex, namely TL and RD, which
# don't have feeds
topic = "playlist_id=#{topic}"
else
# TODO
end
client = make_client(PUBSUB_URL)
time = Time.utc.to_unix.to_s
nonce = Random::Secure.hex(4)
signature = "#{time}:#{nonce}"
host_url = make_host_url(config, Kemal.config)
body = {
"hub.callback" => "#{host_url}/feed/webhook/v1:#{time}:#{nonce}:#{OpenSSL::HMAC.hexdigest(:sha1, key, signature)}",
"hub.topic" => "https://www.youtube.com/xml/feeds/videos.xml?#{topic}",
"hub.verify" => "async",
"hub.mode" => "subscribe",
"hub.lease_seconds" => "432000",
"hub.secret" => key.to_s,
}
return client.post("/subscribe", form: body)
end
def parse_range(range)
if !range
return 0_i64, nil
end
ranges = range.lchop("bytes=").split(',')
ranges.each do |range|
start_range, end_range = range.split('-')
start_range = start_range.to_i64? || 0_i64
end_range = end_range.to_i64?
return start_range, end_range
end
return 0_i64, nil
end

View File

@ -6,7 +6,7 @@ struct MixVideo
ucid: String,
length_seconds: Int32,
index: Int32,
mixes: Array(String),
rdid: String,
})
end
@ -28,18 +28,13 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
end
response = client.get("/watch?v=#{video_id}&list=#{rdid}&gl=US&hl=en&has_verified=1&bpctr=9999999999", headers)
yt_data = response.body.match(/window\["ytInitialData"\] = (?<data>.*);/)
if yt_data
yt_data = JSON.parse(yt_data["data"].rchop(";"))
else
initial_data = extract_initial_data(response.body)
if !initial_data["contents"]["twoColumnWatchNextResults"]["playlist"]?
raise translate(locale, "Could not create mix.")
end
if !yt_data["contents"]["twoColumnWatchNextResults"]["playlist"]?
raise translate(locale, "Could not create mix.")
end
playlist = yt_data["contents"]["twoColumnWatchNextResults"]["playlist"]["playlist"]
playlist = initial_data["contents"]["twoColumnWatchNextResults"]["playlist"]["playlist"]
mix_title = playlist["title"].as_s
contents = playlist["contents"].as_a
@ -70,7 +65,7 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
ucid,
length_seconds,
index,
[rdid]
rdid
)
end

View File

@ -1,4 +1,32 @@
struct PlaylistVideo
def to_json(locale, config, kemal_config, json : JSON::Builder)
json.object do
json.field "title", self.title
json.field "videoId", self.id
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "videoThumbnails" do
generate_thumbnails(json, self.id, config, kemal_config)
end
json.field "index", self.index
json.field "lengthSeconds", self.length_seconds
end
end
def to_json(locale, config, kemal_config, json : JSON::Builder | Nil = nil)
if json
to_json(locale, config, kemal_config, json)
else
JSON.build do |json|
to_json(locale, config, kemal_config, json)
end
end
end
db_mapping({
title: String,
id: String,
@ -6,7 +34,7 @@ struct PlaylistVideo
ucid: String,
length_seconds: Int32,
published: Time,
playlists: Array(String),
plid: String,
index: Int32,
live_now: Bool,
})
@ -19,7 +47,6 @@ struct Playlist
author: String,
author_thumbnail: String,
ucid: String,
description: String,
description_html: String,
video_count: Int32,
views: Int64,
@ -114,8 +141,8 @@ def extract_playlist(plid, nodeset, index)
author: author,
ucid: ucid,
length_seconds: length_seconds,
published: Time.now,
playlists: [plid],
published: Time.utc,
plid: plid,
index: index + offset,
live_now: live_now
)
@ -186,9 +213,8 @@ def fetch_playlist(plid, locale)
end
title = title.content.strip(" \n")
description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description_html ||= document.xpath_node(%q(//span[@class="pl-header-description-text"]))
description_html, description = html_to_content(description_html)
description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1])).try &.to_s ||
document.xpath_node(%q(//span[@class="pl-header-description-text"])).try &.to_s || ""
# YouTube allows anonymous playlists, so most of this can be empty or optional
anchor = document.xpath_node(%q(//ul[@class="pl-header-details"]))
@ -208,7 +234,7 @@ def fetch_playlist(plid, locale)
if updated
updated = decode_date(updated)
else
updated = Time.now
updated = Time.utc
end
playlist = Playlist.new(
@ -217,7 +243,6 @@ def fetch_playlist(plid, locale)
author: author,
author_thumbnail: author_thumbnail,
ucid: ucid,
description: description,
description_html: description_html,
video_count: video_count,
views: views,

View File

@ -1,4 +1,92 @@
struct SearchVideo
def to_xml(host_url, auto_generated, xml : XML::Builder)
xml.element("entry") do
xml.element("id") { xml.text "yt:video:#{self.id}" }
xml.element("yt:videoId") { xml.text self.id }
xml.element("yt:channelId") { xml.text self.ucid }
xml.element("title") { xml.text self.title }
xml.element("link", rel: "alternate", href: "#{host_url}/watch?v=#{self.id}")
xml.element("author") do
if auto_generated
xml.element("name") { xml.text self.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{self.ucid}" }
else
xml.element("name") { xml.text author }
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
end
end
xml.element("content", type: "xhtml") do
xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
xml.element("a", href: "#{host_url}/watch?v=#{self.id}") do
xml.element("img", src: "#{host_url}/vi/#{self.id}/mqdefault.jpg")
end
end
end
xml.element("published") { xml.text self.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
xml.element("media:group") do
xml.element("media:title") { xml.text self.title }
xml.element("media:thumbnail", url: "#{host_url}/vi/#{self.id}/mqdefault.jpg",
width: "320", height: "180")
xml.element("media:description") { xml.text html_to_content(self.description_html) }
end
xml.element("media:community") do
xml.element("media:statistics", views: self.views)
end
end
end
def to_xml(host_url, auto_generated, xml : XML::Builder | Nil = nil)
if xml
to_xml(host_url, auto_generated, xml)
else
XML.build do |json|
to_xml(host_url, auto_generated, xml)
end
end
end
def to_json(locale, config, kemal_config, json : JSON::Builder)
json.object do
json.field "type", "video"
json.field "title", self.title
json.field "videoId", self.id
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "videoThumbnails" do
generate_thumbnails(json, self.id, config, kemal_config)
end
json.field "description", html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html
json.field "viewCount", self.views
json.field "published", self.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
json.field "lengthSeconds", self.length_seconds
json.field "liveNow", self.live_now
json.field "paid", self.paid
json.field "premium", self.premium
end
end
def to_json(locale, config, kemal_config, json : JSON::Builder | Nil = nil)
if json
to_json(locale, config, kemal_config, json)
else
JSON.build do |json|
to_json(locale, config, kemal_config, json)
end
end
end
db_mapping({
title: String,
id: String,
@ -6,7 +94,6 @@ struct SearchVideo
ucid: String,
published: Time,
views: Int64,
description: String,
description_html: String,
length_seconds: Int32,
live_now: Bool,
@ -25,6 +112,45 @@ struct SearchPlaylistVideo
end
struct SearchPlaylist
def to_json(locale, config, kemal_config, json : JSON::Builder)
json.object do
json.field "type", "playlist"
json.field "title", self.title
json.field "playlistId", self.id
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "videoCount", self.video_count
json.field "videos" do
json.array do
self.videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "lengthSeconds", video.length_seconds
json.field "videoThumbnails" do
generate_thumbnails(json, video.id, config, Kemal.config)
end
end
end
end
end
end
end
def to_json(locale, config, kemal_config, json : JSON::Builder | Nil = nil)
if json
to_json(locale, config, kemal_config, json)
else
JSON.build do |json|
to_json(locale, config, kemal_config, json)
end
end
end
db_mapping({
title: String,
id: String,
@ -37,13 +163,50 @@ struct SearchPlaylist
end
struct SearchChannel
def to_json(locale, config, kemal_config, json : JSON::Builder)
json.object do
json.field "type", "channel"
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
json.field "url", self.author_thumbnail.gsub("=s176-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
json.field "subCount", self.subscriber_count
json.field "videoCount", self.video_count
json.field "description", html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html
end
end
def to_json(locale, config, kemal_config, json : JSON::Builder | Nil = nil)
if json
to_json(locale, config, kemal_config, json)
else
JSON.build do |json|
to_json(locale, config, kemal_config, json)
end
end
end
db_mapping({
author: String,
ucid: String,
author_thumbnail: String,
subscriber_count: Int32,
video_count: Int32,
description: String,
description_html: String,
})
end
@ -93,8 +256,8 @@ def channel_search(query, page, channel)
return count, items
end
def search(query, page = 1, search_params = produce_search_params(content_type: "all"), proxies = nil, region = nil)
client = make_client(YT_URL, proxies, region)
def search(query, page = 1, search_params = produce_search_params(content_type: "all"), region = nil)
client = make_client(YT_URL, region)
if query.empty?
return {0, [] of SearchItem}
end

View File

@ -1,4 +1,4 @@
def fetch_trending(trending_type, proxies, region, locale)
def fetch_trending(trending_type, region, locale)
client = make_client(YT_URL)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
@ -14,14 +14,9 @@ def fetch_trending(trending_type, proxies, region, locale)
response = client.get("/feed/trending?gl=#{region}&hl=en", headers).body
yt_data = response.match(/window\["ytInitialData"\] = (?<data>.*);/)
if yt_data
yt_data = JSON.parse(yt_data["data"].rchop(";"))
else
raise translate(locale, "Could not pull trending pages.")
end
initial_data = extract_initial_data(response)
tabs = yt_data["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][0]["tabRenderer"]["content"]["sectionListRenderer"]["subMenu"]["channelListSubMenuRenderer"]["contents"].as_a
tabs = initial_data["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][0]["tabRenderer"]["content"]["sectionListRenderer"]["subMenu"]["channelListSubMenuRenderer"]["contents"].as_a
url = tabs.select { |tab| tab["channelListSubMenuAvatarRenderer"]["title"]["simpleText"] == trending_type }[0]?
if url

View File

@ -1,5 +1,8 @@
require "crypto/bcrypt/password"
# Materialized views may not be defined using bound parameters (`$1` as used elsewhere)
MATERIALIZED_VIEW_SQL = ->(email : String) { "SELECT cv.* FROM channel_videos cv WHERE EXISTS (SELECT subscriptions FROM users u WHERE cv.ucid = ANY (u.subscriptions) AND u.email = E'#{email.gsub({'\'' => "\\'", '\\' => "\\\\"})}') ORDER BY published DESC" }
struct User
module PreferencesConverter
def self.from_rs(rs)
@ -41,10 +44,10 @@ struct Preferences
begin
result = [] of String
value.read_array do
result << HTML.escape(value.read_string)
result << HTML.escape(value.read_string[0, 100])
end
rescue ex
result = [HTML.escape(value.read_string), ""]
result = [HTML.escape(value.read_string[0, 100]), ""]
end
result
@ -70,11 +73,11 @@ struct Preferences
node.raise "Expected scalar, not #{item.class}"
end
result << HTML.escape(item.value)
result << HTML.escape(item.value[0, 100])
end
rescue ex
if node.is_a?(YAML::Nodes::Scalar)
result = [HTML.escape(node.value), ""]
result = [HTML.escape(node.value[0, 100]), ""]
else
result = ["", ""]
end
@ -84,13 +87,13 @@ struct Preferences
end
end
module EscapeString
module ProcessString
def self.to_json(value : String, json : JSON::Builder)
json.string value
end
def self.from_json(value : JSON::PullParser) : String
HTML.escape(value.read_string)
HTML.escape(value.read_string[0, 100])
end
def self.to_yaml(value : String, yaml : YAML::Nodes::Builder)
@ -98,7 +101,25 @@ struct Preferences
end
def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : String
HTML.escape(node.value)
HTML.escape(node.value[0, 100])
end
end
module ClampInt
def self.to_json(value : Int32, json : JSON::Builder)
json.number value
end
def self.from_json(value : JSON::PullParser) : Int32
value.read_int.clamp(0, MAX_ITEMS_PER_PAGE).to_i32
end
def self.to_yaml(value : Int32, yaml : YAML::Nodes::Builder)
yaml.scalar value
end
def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : Int32
node.value.clamp(0, MAX_ITEMS_PER_PAGE)
end
end
@ -114,13 +135,13 @@ struct Preferences
latest_only: {type: Bool, default: CONFIG.default_user_preferences.latest_only},
listen: {type: Bool, default: CONFIG.default_user_preferences.listen},
local: {type: Bool, default: CONFIG.default_user_preferences.local},
locale: {type: String, default: CONFIG.default_user_preferences.locale, converter: EscapeString},
max_results: {type: Int32, default: CONFIG.default_user_preferences.max_results},
locale: {type: String, default: CONFIG.default_user_preferences.locale, converter: ProcessString},
max_results: {type: Int32, default: CONFIG.default_user_preferences.max_results, converter: ClampInt},
notifications_only: {type: Bool, default: CONFIG.default_user_preferences.notifications_only},
quality: {type: String, default: CONFIG.default_user_preferences.quality, converter: EscapeString},
quality: {type: String, default: CONFIG.default_user_preferences.quality, converter: ProcessString},
redirect_feed: {type: Bool, default: CONFIG.default_user_preferences.redirect_feed},
related_videos: {type: Bool, default: CONFIG.default_user_preferences.related_videos},
sort: {type: String, default: CONFIG.default_user_preferences.sort, converter: EscapeString},
sort: {type: String, default: CONFIG.default_user_preferences.sort, converter: ProcessString},
speed: {type: Float32, default: CONFIG.default_user_preferences.speed},
thin_mode: {type: Bool, default: CONFIG.default_user_preferences.thin_mode},
unseen_only: {type: Bool, default: CONFIG.default_user_preferences.unseen_only},
@ -133,7 +154,7 @@ def get_user(sid, headers, db, refresh = true)
if email = db.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
user = db.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
if refresh && Time.now - user.updated > 1.minute
if refresh && Time.utc - user.updated > 1.minute
user, sid = fetch_user(sid, headers, db)
user_array = user.to_a
@ -144,14 +165,11 @@ def get_user(sid, headers, db, refresh = true)
ON CONFLICT (email) DO UPDATE SET updated = $1, subscriptions = $3", user_array)
db.exec("INSERT INTO session_ids VALUES ($1,$2,$3) \
ON CONFLICT (id) DO NOTHING", sid, user.email, Time.now)
ON CONFLICT (id) DO NOTHING", sid, user.email, Time.utc)
begin
view_name = "subscriptions_#{sha256(user.email)}"
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE
ucid IN (SELECT unnest(subscriptions) FROM users WHERE email = E'#{user.email.gsub("'", "\\'")}')
ORDER BY published DESC")
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(user.email)}")
rescue ex
end
end
@ -166,14 +184,11 @@ def get_user(sid, headers, db, refresh = true)
ON CONFLICT (email) DO UPDATE SET updated = $1, subscriptions = $3", user_array)
db.exec("INSERT INTO session_ids VALUES ($1,$2,$3) \
ON CONFLICT (id) DO NOTHING", sid, user.email, Time.now)
ON CONFLICT (id) DO NOTHING", sid, user.email, Time.utc)
begin
view_name = "subscriptions_#{sha256(user.email)}"
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE
ucid IN (SELECT unnest(subscriptions) FROM users WHERE email = E'#{user.email.gsub("'", "\\'")}')
ORDER BY published DESC")
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(user.email)}")
rescue ex
end
end
@ -206,7 +221,7 @@ def fetch_user(sid, headers, db)
token = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
user = User.new(Time.now, [] of String, channels, email, CONFIG.default_user_preferences, nil, token, [] of String, true)
user = User.new(Time.utc, [] of String, channels, email, CONFIG.default_user_preferences, nil, token, [] of String, true)
return user, sid
end
@ -214,7 +229,7 @@ def create_user(sid, email, password)
password = Crypto::Bcrypt::Password.create(password, cost: 10)
token = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
user = User.new(Time.now, [] of String, [] of String, email, CONFIG.default_user_preferences, password.to_s, token, [] of String, true)
user = User.new(Time.utc, [] of String, [] of String, email, CONFIG.default_user_preferences, password.to_s, token, [] of String, true)
return user, sid
end
@ -314,10 +329,108 @@ def subscribe_ajax(channel_id, action, env_headers)
headers["content-type"] = "application/x-www-form-urlencoded"
post_req = {
"session_token" => session_token,
session_token: session_token,
}
post_url = "/subscription_ajax?#{action}=1&c=#{channel_id}"
client.post(post_url, headers, form: post_req)
end
end
def get_subscription_feed(db, user, max_results = 40, page = 1)
limit = max_results.clamp(0, MAX_ITEMS_PER_PAGE)
offset = (page - 1) * limit
notifications = db.query_one("SELECT notifications FROM users WHERE email = $1", user.email,
as: Array(String))
view_name = "subscriptions_#{sha256(user.email)}"
if user.preferences.notifications_only && !notifications.empty?
# Only show notifications
args = arg_array(notifications)
notifications = db.query_all("SELECT * FROM channel_videos WHERE id IN (#{args})
ORDER BY published DESC", notifications, as: ChannelVideo)
videos = [] of ChannelVideo
notifications.sort_by! { |video| video.published }.reverse!
case user.preferences.sort
when "alphabetically"
notifications.sort_by! { |video| video.title }
when "alphabetically - reverse"
notifications.sort_by! { |video| video.title }.reverse!
when "channel name"
notifications.sort_by! { |video| video.author }
when "channel name - reverse"
notifications.sort_by! { |video| video.author }.reverse!
end
else
if user.preferences.latest_only
if user.preferences.unseen_only
# Show latest video from a channel that a user hasn't watched
# "unseen_only" isn't really correct here, more accurate would be "unwatched_only"
if user.watched.empty?
values = "'{}'"
else
values = "VALUES #{user.watched.map { |id| %(('#{id}')) }.join(",")}"
end
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} WHERE \
NOT id = ANY (#{values}) \
ORDER BY ucid, published DESC", as: ChannelVideo)
else
# Show latest video from each channel
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} \
ORDER BY ucid, published DESC", as: ChannelVideo)
end
videos.sort_by! { |video| video.published }.reverse!
else
if user.preferences.unseen_only
# Only show unwatched
if user.watched.empty?
values = "'{}'"
else
values = "VALUES #{user.watched.map { |id| %(('#{id}')) }.join(",")}"
end
videos = PG_DB.query_all("SELECT * FROM #{view_name} WHERE \
NOT id = ANY (#{values}) \
ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo)
else
# Sort subscriptions as normal
videos = PG_DB.query_all("SELECT * FROM #{view_name} \
ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo)
end
end
case user.preferences.sort
when "published - reverse"
videos.sort_by! { |video| video.published }
when "alphabetically"
videos.sort_by! { |video| video.title }
when "alphabetically - reverse"
videos.sort_by! { |video| video.title }.reverse!
when "channel name"
videos.sort_by! { |video| video.author }
when "channel name - reverse"
videos.sort_by! { |video| video.author }.reverse!
end
notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email,
as: Array(String))
notifications = videos.select { |v| notifications.includes? v.id }
videos = videos - notifications
end
if !limit
videos = videos[0..max_results]
end
return videos, notifications
end

View File

@ -182,7 +182,7 @@ VIDEO_FORMATS = {
"135" => {"ext" => "mp4", "height" => 480, "format" => "DASH video", "vcodec" => "h264"},
"136" => {"ext" => "mp4", "height" => 720, "format" => "DASH video", "vcodec" => "h264"},
"137" => {"ext" => "mp4", "height" => 1080, "format" => "DASH video", "vcodec" => "h264"},
"138" => {"ext" => "mp4", "format" => "DASH video", "vcodec" => "h264"}, # Height can vary (https=>//github.com/rg3/youtube-dl/issues/4559)
"138" => {"ext" => "mp4", "format" => "DASH video", "vcodec" => "h264"}, # Height can vary (https://github.com/ytdl-org/youtube-dl/issues/4559)
"160" => {"ext" => "mp4", "height" => 144, "format" => "DASH video", "vcodec" => "h264"},
"212" => {"ext" => "mp4", "height" => 480, "format" => "DASH video", "vcodec" => "h264"},
"264" => {"ext" => "mp4", "height" => 1440, "format" => "DASH video", "vcodec" => "h264"},
@ -239,6 +239,12 @@ VIDEO_FORMATS = {
"249" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 50},
"250" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 70},
"251" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 160},
# av01 video only formats sometimes served with "unknown" codecs
"394" => {"ext" => "mp4", "height" => 144, "vcodec" => "av01.0.05M.08"},
"395" => {"ext" => "mp4", "height" => 240, "vcodec" => "av01.0.05M.08"},
"396" => {"ext" => "mp4", "height" => 360, "vcodec" => "av01.0.05M.08"},
"397" => {"ext" => "mp4", "height" => 480, "vcodec" => "av01.0.05M.08"},
}
struct VideoPreferences
@ -273,192 +279,209 @@ struct Video
end
end
def to_json(locale, config, kemal_config, decrypt_function)
JSON.build do |json|
json.object do
json.field "type", "video"
def to_json(locale, config, kemal_config, decrypt_function, json : JSON::Builder)
json.object do
json.field "type", "video"
json.field "title", self.title
json.field "videoId", self.id
json.field "videoThumbnails" do
generate_thumbnails(json, self.id, config, kemal_config)
end
json.field "storyboards" do
generate_storyboards(json, self.id, self.storyboards, config, kemal_config)
end
json.field "title", self.title
json.field "videoId", self.id
json.field "videoThumbnails" do
generate_thumbnails(json, self.id, config, kemal_config)
end
json.field "storyboards" do
generate_storyboards(json, self.id, self.storyboards, config, kemal_config)
end
description_html, description = html_to_content(self.description)
json.field "description", html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html
json.field "published", self.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
json.field "keywords", self.keywords
json.field "description", description
json.field "descriptionHtml", description_html
json.field "published", self.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
json.field "keywords", self.keywords
json.field "viewCount", self.views
json.field "likeCount", self.likes
json.field "dislikeCount", self.dislikes
json.field "viewCount", self.views
json.field "likeCount", self.likes
json.field "dislikeCount", self.dislikes
json.field "paid", self.paid
json.field "premium", self.premium
json.field "isFamilyFriendly", self.is_family_friendly
json.field "allowedRegions", self.allowed_regions
json.field "genre", self.genre
json.field "genreUrl", self.genre_url
json.field "paid", self.paid
json.field "premium", self.premium
json.field "isFamilyFriendly", self.is_family_friendly
json.field "allowedRegions", self.allowed_regions
json.field "genre", self.genre
json.field "genreUrl", self.genre_url
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
json.field "url", self.author_thumbnail.gsub("=s48-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
qualities.each do |quality|
json.object do
json.field "url", self.author_thumbnail.gsub("=s48-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
json.field "subCountText", self.sub_count_text
json.field "subCountText", self.sub_count_text
json.field "lengthSeconds", self.info["length_seconds"].to_i
json.field "allowRatings", self.allow_ratings
json.field "rating", self.info["avg_rating"].to_f32
json.field "isListed", self.is_listed
json.field "liveNow", self.live_now
json.field "isUpcoming", self.is_upcoming
json.field "lengthSeconds", self.info["length_seconds"].to_i
json.field "allowRatings", self.allow_ratings
json.field "rating", self.info["avg_rating"].to_f32
json.field "isListed", self.is_listed
json.field "liveNow", self.live_now
json.field "isUpcoming", self.is_upcoming
if self.premiere_timestamp
json.field "premiereTimestamp", self.premiere_timestamp.not_nil!.to_unix
end
if self.premiere_timestamp
json.field "premiereTimestamp", self.premiere_timestamp.not_nil!.to_unix
end
if self.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
host_url = make_host_url(config, kemal_config)
if self.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
host_url = make_host_url(config, kemal_config)
hlsvp = self.player_response["streamingData"]["hlsManifestUrl"].as_s
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
hlsvp = self.player_response["streamingData"]["hlsManifestUrl"].as_s
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
json.field "hlsUrl", hlsvp
end
json.field "hlsUrl", hlsvp
end
json.field "dashUrl", "#{make_host_url(config, kemal_config)}/api/manifest/dash/id/#{id}"
json.field "dashUrl", "#{make_host_url(config, kemal_config)}/api/manifest/dash/id/#{id}"
json.field "adaptiveFormats" do
json.array do
self.adaptive_fmts(decrypt_function).each do |fmt|
json.object do
json.field "index", fmt["index"]
json.field "bitrate", fmt["bitrate"]
json.field "init", fmt["init"]
json.field "url", fmt["url"]
json.field "itag", fmt["itag"]
json.field "type", fmt["type"]
json.field "clen", fmt["clen"]
json.field "lmt", fmt["lmt"]
json.field "projectionType", fmt["projection_type"]
json.field "adaptiveFormats" do
json.array do
self.adaptive_fmts(decrypt_function).each do |fmt|
json.object do
json.field "index", fmt["index"]
json.field "bitrate", fmt["bitrate"]
json.field "init", fmt["init"]
json.field "url", fmt["url"]
json.field "itag", fmt["itag"]
json.field "type", fmt["type"]
json.field "clen", fmt["clen"]
json.field "lmt", fmt["lmt"]
json.field "projectionType", fmt["projection_type"]
fmt_info = itag_to_metadata?(fmt["itag"])
if fmt_info
fps = fmt_info["fps"]?.try &.to_i || fmt["fps"]?.try &.to_i || 30
json.field "fps", fps
json.field "container", fmt_info["ext"]
json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
fmt_info = itag_to_metadata?(fmt["itag"])
if fmt_info
fps = fmt_info["fps"]?.try &.to_i || fmt["fps"]?.try &.to_i || 30
json.field "fps", fps
json.field "container", fmt_info["ext"]
json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
if fmt_info["height"]?
json.field "resolution", "#{fmt_info["height"]}p"
if fmt_info["height"]?
json.field "resolution", "#{fmt_info["height"]}p"
quality_label = "#{fmt_info["height"]}p"
if fps > 30
quality_label += "60"
end
json.field "qualityLabel", quality_label
if fmt_info["width"]?
json.field "size", "#{fmt_info["width"]}x#{fmt_info["height"]}"
end
quality_label = "#{fmt_info["height"]}p"
if fps > 30
quality_label += "60"
end
end
end
end
end
end
json.field "qualityLabel", quality_label
json.field "formatStreams" do
json.array do
self.fmt_stream(decrypt_function).each do |fmt|
json.object do
json.field "url", fmt["url"]
json.field "itag", fmt["itag"]
json.field "type", fmt["type"]
json.field "quality", fmt["quality"]
fmt_info = itag_to_metadata?(fmt["itag"])
if fmt_info
fps = fmt_info["fps"]?.try &.to_i || fmt["fps"]?.try &.to_i || 30
json.field "fps", fps
json.field "container", fmt_info["ext"]
json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
if fmt_info["height"]?
json.field "resolution", "#{fmt_info["height"]}p"
quality_label = "#{fmt_info["height"]}p"
if fps > 30
quality_label += "60"
end
json.field "qualityLabel", quality_label
if fmt_info["width"]?
json.field "size", "#{fmt_info["width"]}x#{fmt_info["height"]}"
end
if fmt_info["width"]?
json.field "size", "#{fmt_info["width"]}x#{fmt_info["height"]}"
end
end
end
end
end
end
json.field "captions" do
json.array do
self.captions.each do |caption|
json.object do
json.field "label", caption.name.simpleText
json.field "languageCode", caption.languageCode
json.field "url", "/api/v1/captions/#{id}?label=#{URI.escape(caption.name.simpleText)}"
end
end
end
end
json.field "recommendedVideos" do
json.array do
self.info["rvs"]?.try &.split(",").each do |rv|
rv = HTTP::Params.parse(rv)
if rv["id"]?
json.object do
json.field "videoId", rv["id"]
json.field "title", rv["title"]
json.field "videoThumbnails" do
generate_thumbnails(json, rv["id"], config, kemal_config)
end
json.field "author", rv["author"]
json.field "lengthSeconds", rv["length_seconds"].to_i
json.field "viewCountText", rv["short_view_count_text"]
end
end
end
end
end
end
json.field "formatStreams" do
json.array do
self.fmt_stream(decrypt_function).each do |fmt|
json.object do
json.field "url", fmt["url"]
json.field "itag", fmt["itag"]
json.field "type", fmt["type"]
json.field "quality", fmt["quality"]
fmt_info = itag_to_metadata?(fmt["itag"])
if fmt_info
fps = fmt_info["fps"]?.try &.to_i || fmt["fps"]?.try &.to_i || 30
json.field "fps", fps
json.field "container", fmt_info["ext"]
json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
if fmt_info["height"]?
json.field "resolution", "#{fmt_info["height"]}p"
quality_label = "#{fmt_info["height"]}p"
if fps > 30
quality_label += "60"
end
json.field "qualityLabel", quality_label
if fmt_info["width"]?
json.field "size", "#{fmt_info["width"]}x#{fmt_info["height"]}"
end
end
end
end
end
end
end
json.field "captions" do
json.array do
self.captions.each do |caption|
json.object do
json.field "label", caption.name.simpleText
json.field "languageCode", caption.languageCode
json.field "url", "/api/v1/captions/#{id}?label=#{URI.escape(caption.name.simpleText)}"
end
end
end
end
json.field "recommendedVideos" do
json.array do
self.info["rvs"]?.try &.split(",").each do |rv|
rv = HTTP::Params.parse(rv)
if rv["id"]?
json.object do
json.field "videoId", rv["id"]
json.field "title", rv["title"]
json.field "videoThumbnails" do
generate_thumbnails(json, rv["id"], config, kemal_config)
end
json.field "author", rv["author"]
json.field "lengthSeconds", rv["length_seconds"].to_i
json.field "viewCountText", rv["short_view_count_text"]
end
end
end
end
end
end
end
def to_json(locale, config, kemal_config, decrypt_function, json : JSON::Builder | Nil = nil)
if json
to_json(locale, config, kemal_config, decrypt_function, json)
else
JSON.build do |json|
to_json(locale, config, kemal_config, decrypt_function, json)
end
end
end
# `description_html` is stored in DB as `description`, which can be
# quite confusing. Since it currently isn't very practical to rename
# it, we instead define a getter and setter here.
def description_html
self.description
end
def description_html=(other : String)
self.description = other
end
def allow_ratings
allow_ratings = player_response["videoDetails"]?.try &.["allowRatings"]?.try &.as_bool
@ -788,14 +811,19 @@ struct Video
end
def short_description
description = self.description.gsub("<br>", " ")
description = description.gsub("<br/>", " ")
description = XML.parse_html(description).content[0..200].gsub('"', "&quot;").gsub("\n", " ").strip(" ")
if description.empty?
description = " "
short_description = self.description_html.gsub(/(<br>)|(<br\/>|"|\n)/, {
"<br>": " ",
"<br/>": " ",
"\"": "&quot;",
"\n": " ",
})
short_description = XML.parse_html(short_description).content[0..200].strip(" ")
if short_description.empty?
short_description = " "
end
return description
return short_description
end
def length_seconds
@ -847,14 +875,16 @@ end
class VideoRedirect < Exception
end
def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32}), refresh = true, region = nil, force_refresh = false)
if db.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", id, as: Bool) && !region
video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video)
# If record was last updated over 10 minutes ago, refresh (expire param in response lasts for 6 hours)
if (refresh && Time.now - video.updated > 10.minutes) || force_refresh
def get_video(id, db, refresh = true, region = nil, force_refresh = false)
if (video = db.query_one?("SELECT * FROM videos WHERE id = $1", id, as: Video)) && !region
# If record was last updated over 10 minutes ago, or video has since premiered,
# refresh (expire param in response lasts for 6 hours)
if (refresh &&
(Time.utc - video.updated > 10.minutes) ||
(video.premiere_timestamp && video.premiere_timestamp.as(Time) < Time.utc)) ||
force_refresh
begin
video = fetch_video(id, proxies, region)
video = fetch_video(id, region)
video_array = video.to_a
args = arg_array(video_array[1..-1], 2)
@ -869,7 +899,7 @@ def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32})
end
end
else
video = fetch_video(id, proxies, region)
video = fetch_video(id, region)
video_array = video.to_a
args = arg_array(video_array)
@ -895,7 +925,7 @@ def extract_polymer_config(body, html)
end
end
initial_data = JSON.parse(body.match(/window\["ytInitialData"\] = (?<info>.*?);\n/).try &.["info"] || "{}")
initial_data = extract_initial_data(body)
primary_results = initial_data["contents"]?
.try &.["twoColumnWatchNextResults"]?
@ -973,7 +1003,7 @@ def extract_polymer_config(body, html)
if published
params["published"] = Time.parse(published, "%b %-d, %Y", Time::Location.local).to_unix.to_s
else
params["published"] = Time.new(1990, 1, 1).to_unix.to_s
params["published"] = Time.utc(1990, 1, 1).to_unix.to_s
end
params["description_html"] = "<p></p>"
@ -1073,8 +1103,8 @@ def extract_player_config(body, html)
return params
end
def fetch_video(id, proxies, region)
client = make_client(YT_URL, proxies, region)
def fetch_video(id, region)
client = make_client(YT_URL, region)
response = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
if md = response.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/)
@ -1089,9 +1119,9 @@ def fetch_video(id, proxies, region)
if info["reason"]? && info["reason"].includes? "your country"
bypass_channel = Channel({XML::Node, HTTP::Params} | Nil).new
proxies.each do |proxy_region, list|
PROXY_LIST.each do |proxy_region, list|
spawn do
client = make_client(YT_URL, proxies, proxy_region)
client = make_client(YT_URL, proxy_region)
proxy_response = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
proxy_html = XML.parse_html(proxy_response.body)
@ -1107,7 +1137,7 @@ def fetch_video(id, proxies, region)
end
end
proxies.size.times do
PROXY_LIST.size.times do
response = bypass_channel.receive
if response
html, info = response
@ -1136,37 +1166,32 @@ def fetch_video(id, proxies, region)
raise "Video unavailable."
end
if !info["title"]?
if !info["title"]? || info["title"].empty?
raise "Video unavailable."
end
title = info["title"]
author = info["author"]
ucid = info["ucid"]
author = info["author"]? || ""
ucid = info["ucid"]? || ""
views = html.xpath_node(%q(//meta[@itemprop="interactionCount"]))
views = views.try &.["content"].to_i64?
views ||= 0_i64
.try &.["content"].to_i64? || 0_i64
likes = html.xpath_node(%q(//button[@title="I like this"]/span))
likes = likes.try &.content.delete(",").try &.to_i?
likes ||= 0
.try &.content.delete(",").try &.to_i? || 0
dislikes = html.xpath_node(%q(//button[@title="I dislike this"]/span))
dislikes = dislikes.try &.content.delete(",").try &.to_i?
dislikes ||= 0
.try &.content.delete(",").try &.to_i? || 0
avg_rating = (likes.to_f/(likes.to_f + dislikes.to_f) * 4 + 1)
avg_rating = avg_rating.nan? ? 0.0 : avg_rating
info["avg_rating"] = "#{avg_rating}"
description = html.xpath_node(%q(//p[@id="eow-description"]))
description = description ? description.to_xml(options: XML::SaveOptions::NO_DECL) : %q(<p id="eow-description"></p>)
description_html = html.xpath_node(%q(//p[@id="eow-description"])).try &.to_xml(options: XML::SaveOptions::NO_DECL) || ""
wilson_score = ci_lower_bound(likes, likes + dislikes)
published = html.xpath_node(%q(//meta[@itemprop="datePublished"])).try &.["content"]
published ||= Time.now.to_s("%Y-%m-%d")
published ||= Time.utc.to_s("%Y-%m-%d")
published = Time.parse(published, "%Y-%m-%d", Time::Location.local)
allowed_regions = html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).try &.["content"].split(",")
@ -1178,7 +1203,8 @@ def fetch_video(id, proxies, region)
genre = html.xpath_node(%q(//meta[@itemprop="genre"])).try &.["content"]
genre ||= ""
genre_url = html.xpath_node(%(//ul[contains(@class, "watch-info-tag-list")]/li/a[text()="#{genre}"])).try &.["href"]
genre_url = html.xpath_node(%(//ul[contains(@class, "watch-info-tag-list")]/li/a[text()="#{genre}"])).try &.["href"]?
genre_url ||= ""
# YouTube provides invalid URLs for some genres, so we fix that here
case genre
@ -1195,30 +1221,12 @@ def fetch_video(id, proxies, region)
when "Trailers"
genre_url = "/channel/UClgRkhTL3_hImCAmdLfDE4g"
end
genre_url ||= ""
license = html.xpath_node(%q(//h4[contains(text(),"License")]/parent::*/ul/li))
if license
license = license.content
else
license = ""
end
license = html.xpath_node(%q(//h4[contains(text(),"License")]/parent::*/ul/li)).try &.content || ""
sub_count_text = html.xpath_node(%q(//span[contains(@class, "yt-subscriber-count")])).try &.["title"]? || "0"
author_thumbnail = html.xpath_node(%(//span[@class="yt-thumb-clip"]/img)).try &.["data-thumb"]?.try &.gsub(/^\/\//, "https://") || ""
sub_count_text = html.xpath_node(%q(//span[contains(@class, "yt-subscriber-count")]))
if sub_count_text
sub_count_text = sub_count_text["title"]
else
sub_count_text = "0"
end
author_thumbnail = html.xpath_node(%(//span[@class="yt-thumb-clip"]/img))
if author_thumbnail
author_thumbnail = author_thumbnail["data-thumb"]
else
author_thumbnail = ""
end
video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description,
video = Video.new(id, info, Time.utc, title, views, likes, dislikes, wilson_score, published, description_html,
nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url, license, sub_count_text, author_thumbnail)
return video
@ -1235,7 +1243,7 @@ def process_video_params(query, preferences)
continue = query["continue"]?.try &.to_i?
continue_autoplay = query["continue_autoplay"]?.try &.to_i?
listen = query["listen"]? && (query["listen"] == "true" || query["listen"] == "1").to_unsafe
local = query["local"]? && (query["local"] == "true" || query["listen"] == "1").to_unsafe
local = query["local"]? && (query["local"] == "true" || query["local"] == "1").to_unsafe
preferred_captions = query["subtitles"]?.try &.split(",").map { |a| a.downcase }
quality = query["quality"]?
region = query["region"]?
@ -1284,6 +1292,14 @@ def process_video_params(query, preferences)
related_videos = related_videos == 1
video_loop = video_loop == 1
if CONFIG.disabled?("dash") && quality == "dash"
quality = "high"
end
if CONFIG.disabled?("local") && local
local = false
end
if query["t"]?
video_start = decode_time(query["t"])
end

View File

@ -1,37 +1,57 @@
<% content_for "header" do %>
<title><%= author %> - Invidious</title>
<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/channel/<%= ucid %>" />
<title><%= channel.author %> - Invidious</title>
<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/channel/<%= channel.ucid %>" />
<% end %>
<% if channel.banner %>
<div class="h-box">
<img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).full_path %>">
</div>
<div class="h-box">
<hr>
</div>
<% end %>
<div class="pure-g h-box">
<div class="pure-u-2-3">
<h3><%= author %></h3>
<div class="channel-profile">
<img src="/ggpht<%= URI.parse(channel.author_thumbnail).full_path %>">
<span><%= channel.author %></span>
</div>
</div>
<div class="pure-u-1-3" style="text-align:right">
<h3>
<a href="/feed/channel/<%= ucid %>"><i class="icon ion-logo-rss"></i></a>
<a href="/feed/channel/<%= channel.ucid %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<div class="h-box">
<% sub_count_text = number_to_short_text(sub_count) %>
<% ucid = channel.ucid %>
<% author = channel.author %>
<% sub_count_text = number_to_short_text(channel.sub_count) %>
<%= rendered "components/subscribe_widget" %>
</div>
<div class="pure-g h-box">
<div class="pure-u-1-3">
<a href="https://www.youtube.com/channel/<%= ucid %>"><%= translate(locale, "View channel on YouTube") %></a>
<% if !auto_generated %>
<a href="https://www.youtube.com/channel/<%= channel.ucid %>"><%= translate(locale, "View channel on YouTube") %></a>
<% if !channel.auto_generated %>
<div class="pure-u-1 pure-md-1-3">
<b><%= translate(locale, "Videos") %></b>
</div>
<% end %>
<div class="pure-u-1 pure-md-1-3">
<% if auto_generated %>
<% if channel.auto_generated %>
<b><%= translate(locale, "Playlists") %></b>
<% else %>
<a href="/channel/<%= ucid %>/playlists"><%= translate(locale, "Playlists") %></a>
<a href="/channel/<%= channel.ucid %>/playlists"><%= translate(locale, "Playlists") %></a>
<% end %>
</div>
<div class="pure-u-1 pure-md-1-3">
<% if channel.tabs.includes? "community" %>
<a href="/channel/<%= channel.ucid %>/community"><%= translate(locale, "Community") %></a>
<% end %>
</div>
</div>
@ -43,7 +63,7 @@
<% if sort_by == sort %>
<b><%= translate(locale, sort) %></b>
<% else %>
<a href="/channel/<%= ucid %>?page=<%= page %>&sort_by=<%= sort %>">
<a href="/channel/<%= channel.ucid %>?page=<%= page %>&sort_by=<%= sort %>">
<%= translate(locale, sort) %>
</a>
<% end %>
@ -67,8 +87,8 @@
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-lg-1-5">
<% if page >= 2 %>
<a href="/channel/<%= ucid %>?page=<%= page - 1 %><% if sort_by != "newest" %>&sort_by=<%= sort_by %><% end %>">
<% if page > 1 %>
<a href="/channel/<%= channel.ucid %>?page=<%= page - 1 %><% if sort_by != "newest" %>&sort_by=<%= sort_by %><% end %>">
<%= translate(locale, "Previous page") %>
</a>
<% end %>
@ -76,7 +96,7 @@
<div class="pure-u-1 pure-u-lg-3-5"></div>
<div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
<% if count == 60 %>
<a href="/channel/<%= ucid %>?page=<%= page + 1 %><% if sort_by != "newest" %>&sort_by=<%= sort_by %><% end %>">
<a href="/channel/<%= channel.ucid %>?page=<%= page + 1 %><% if sort_by != "newest" %>&sort_by=<%= sort_by %><% end %>">
<%= translate(locale, "Next page") %>
</a>
<% end %>

View File

@ -0,0 +1,80 @@
<% content_for "header" do %>
<title><%= channel.author %> - Invidious</title>
<% end %>
<% if channel.banner %>
<div class="h-box">
<img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).full_path %>">
</div>
<div class="h-box">
<hr>
</div>
<% end %>
<div class="pure-g h-box">
<div class="pure-u-2-3">
<div class="channel-profile">
<img src="/ggpht<%= URI.parse(channel.author_thumbnail).full_path %>">
<span><%= channel.author %></span>
</div>
</div>
<div class="pure-u-1-3" style="text-align:right">
<h3>
<a href="/feed/channel/<%= channel.ucid %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<div class="h-box">
<% ucid = channel.ucid %>
<% author = channel.author %>
<% sub_count_text = number_to_short_text(channel.sub_count) %>
<%= rendered "components/subscribe_widget" %>
</div>
<div class="pure-g h-box">
<div class="pure-u-1-3">
<a href="https://www.youtube.com/channel/<%= channel.ucid %>/community"><%= translate(locale, "View channel on YouTube") %></a>
<% if !channel.auto_generated %>
<div class="pure-u-1 pure-md-1-3">
<a href="/channel/<%= channel.ucid %>"><%= translate(locale, "Videos") %></a>
</div>
<% end %>
<div class="pure-u-1 pure-md-1-3">
<a href="/channel/<%= channel.ucid %>/playlists"><%= translate(locale, "Playlists") %></a>
</div>
<div class="pure-u-1 pure-md-1-3">
<% if channel.tabs.includes? "community" %>
<b><%= translate(locale, "Community") %></b>
<% end %>
</div>
</div>
<div class="pure-u-2-3"></div>
</div>
<div class="h-box">
<hr>
</div>
<% if error_message %>
<div class="h-box">
<p><%= error_message %></p>
</div>
<% else %>
<div class="h-box pure-g" id="comments">
<%= template_youtube_comments(items.not_nil!, locale, thin_mode) %>
</div>
<% end %>
<script>
var community_data = {
ucid: '<%= channel.ucid %>',
youtube_comments_text: '<%= HTML.escape(translate(locale, "View YouTube comments")) %>',
comments_text: '<%= HTML.escape(translate(locale, "View `x` comments", "{commentCount}")) %>',
hide_replies_text: '<%= HTML.escape(translate(locale, "Hide replies")) %>',
show_replies_text: '<%= HTML.escape(translate(locale, "Show replies")) %>',
preferences: <%= env.get("preferences").as(Preferences).to_json %>,
}
</script>
<script src="/js/community.js?v=<%= ASSET_COMMIT %>"></script>

View File

@ -35,7 +35,7 @@
</b>
</p>
<% when MixVideo %>
<a style="width:100%" href="/watch?v=<%= item.id %>&list=<%= item.mixes[0] %>">
<a style="width:100%" href="/watch?v=<%= item.id %>&list=<%= item.rdid %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
@ -52,7 +52,7 @@
</b>
</p>
<% when PlaylistVideo %>
<a style="width:100%" href="/watch?v=<%= item.id %>&list=<%= item.playlists[0] %>">
<a style="width:100%" href="/watch?v=<%= item.id %>&list=<%= item.plid %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
@ -72,9 +72,9 @@
</p>
<h5 class="pure-g">
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.now %>
<div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.now).ago, locale)) %></div>
<% elsif Time.now - item.published > 1.minute %>
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.utc %>
<div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div>
<% elsif Time.utc - item.published > 1.minute %>
<div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div>
<% else %>
<div class="pure-u-2-3"></div>
@ -121,9 +121,9 @@
</p>
<h5 class="pure-g">
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.now %>
<div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.now).ago, locale)) %></div>
<% elsif Time.now - item.published > 1.minute %>
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.utc %>
<div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div>
<% elsif Time.utc - item.published > 1.minute %>
<div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div>
<% else %>
<div class="pure-u-2-3"></div>

View File

@ -6,7 +6,7 @@
<% if params.autoplay %>autoplay<% end %>
<% if params.video_loop %>loop<% end %>
<% if params.controls %>controls<% end %>>
<% if hlsvp %>
<% if hlsvp && !CONFIG.disabled?("livestreams") %>
<source src="<%= hlsvp %>?local=true" type="application/x-mpegURL" label="livestream">
<% else %>
<% if params.listen %>
@ -43,7 +43,7 @@
var player_data = {
aspect_ratio: '<%= aspect_ratio %>',
title: "<%= video.title.dump_unquoted %>",
description: "<%= HTML.escape(description) %>",
description: "<%= HTML.escape(video.short_description) %>",
thumbnail: "<%= thumbnail %>"
}
</script>

View File

@ -6,7 +6,10 @@
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="thumbnail" content="<%= thumbnail %>">
<%= rendered "components/player_sources" %>
<link rel="stylesheet" href="/css/videojs-overlay.css?v=<%= ASSET_COMMIT %>">
<script src="/js/videojs-overlay.min.js?v=<%= ASSET_COMMIT %>"></script>
<link rel="stylesheet" href="/css/default.css?v=<%= ASSET_COMMIT %>">
<link rel="stylesheet" href="/css/darktheme.css?v=<%= ASSET_COMMIT %>">
<title><%= HTML.escape(video.title) %> - Invidious</title>
<style>
#player {
@ -30,7 +33,8 @@ var video_data = {
length_seconds: '<%= video.info["length_seconds"].to_f %>',
video_series: <%= video_series.to_json %>,
params: <%= params.to_json %>,
preferences: <%= preferences.to_json %>
preferences: <%= preferences.to_json %>,
premiere_timestamp: <%= video.premiere_timestamp.try &.to_unix || "null" %>
}
</script>

View File

@ -56,16 +56,16 @@ var watched_data = {
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-lg-1-5">
<% if page >= 2 %>
<a href="/feed/history?page=<%= page - 1 %>">
<% if page > 1 %>
<a href="/feed/history?page=<%= page - 1 %><% if env.params.query["max_results"]? %>&max_results=<%= max_results %><% end %>">
<%= translate(locale, "Previous page") %>
</a>
<% end %>
</div>
<div class="pure-u-1 pure-u-lg-3-5"></div>
<div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
<% if watched.size >= limit %>
<a href="/feed/history?page=<%= page + 1 %>">
<% if watched.size >= max_results %>
<a href="/feed/history?page=<%= page + 1 %><% if env.params.query["max_results"]? %>&max_results=<%= max_results %><% end %>">
<%= translate(locale, "Next page") %>
</a>
<% end %>

View File

@ -9,6 +9,20 @@
<body>
<h1><%= translate(locale, "JavaScript license information") %></h1>
<table id="jslicense-labels1">
<tr>
<td>
<a href="/js/community.js?v=<%= ASSET_COMMIT %>">community.js</a>
</td>
<td>
<a href="https://www.gnu.org/licenses/agpl-3.0.html">AGPL-3.0</a>
</td>
<td>
<a href="/js/community.js?v=<%= ASSET_COMMIT %>"><%= translate(locale, "source") %></a>
</td>
</tr>
<tr>
<td>
<a href="/js/embed.js?v=<%= ASSET_COMMIT %>">embed.js</a>
@ -71,7 +85,7 @@
</td>
<td>
<a href="http://www.jclark.com/xml/copying.txt">Expat</a>
<a href="http://www.apache.org/licenses/LICENSE-2.0">Apache-2.0-only</a>
</td>
<td>
@ -163,6 +177,20 @@
</td>
</tr>
<tr>
<td>
<a href="/js/videojs-overlay.min.js?v=<%= ASSET_COMMIT %>">videojs-overlay.min.js</a>
</td>
<td>
<a href="http://www.apache.org/licenses/LICENSE-2.0">Apache-2.0-only</a>
</td>
<td>
<a href="https://github.com/brightcove/videojs-overlay"><%= translate(locale, "source") %></a>
</td>
</tr>
<tr>
<td>
<a href="/js/videojs-share.min.js?v=<%= ASSET_COMMIT %>">videojs-share.min.js</a>

View File

@ -32,7 +32,7 @@
<% end %>
<% if password %>
<input name="password" type="hidden" value="<%= password %>">
<input name="password" type="hidden" value="<%= HTML.escape(password) %>">
<% else %>
<label for="password"><%= translate(locale, "Password") %> :</label>
<input required class="pure-input-1" name="password" type="password" placeholder="<%= translate(locale, "Password") %>">
@ -95,7 +95,7 @@
<% end %>
<% if password %>
<input name="password" type="hidden" value="<%= password %>">
<input name="password" type="hidden" value="<%= HTML.escape(password) %>">
<% else %>
<label for="password"><%= translate(locale, "Password") %> :</label>
<input required class="pure-input-1" name="password" type="password" placeholder="<%= translate(locale, "Password") %>">

View File

@ -46,7 +46,7 @@
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-lg-1-5">
<% if page >= 2 %>
<% if page > 1 %>
<a href="/playlist?list=<%= playlist.id %>&page=<%= page - 1 %>">
<%= translate(locale, "Previous page") %>
</a>

View File

@ -1,36 +1,56 @@
<% content_for "header" do %>
<title><%= author %> - Invidious</title>
<title><%= channel.author %> - Invidious</title>
<% end %>
<% if channel.banner %>
<div class="h-box">
<img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).full_path %>">
</div>
<div class="h-box">
<hr>
</div>
<% end %>
<div class="pure-g h-box">
<div class="pure-u-2-3">
<h3><%= author %></h3>
<div class="channel-profile">
<img src="/ggpht<%= URI.parse(channel.author_thumbnail).full_path %>">
<span><%= channel.author %></span>
</div>
</div>
<div class="pure-u-1-3" style="text-align:right">
<h3>
<a href="/feed/channel/<%= ucid %>"><i class="icon ion-logo-rss"></i></a>
<a href="/feed/channel/<%= channel.ucid %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<div class="h-box">
<% sub_count_text = number_to_short_text(sub_count) %>
<% ucid = channel.ucid %>
<% author = channel.author %>
<% sub_count_text = number_to_short_text(channel.sub_count) %>
<%= rendered "components/subscribe_widget" %>
</div>
<div class="pure-g h-box">
<div class="pure-g pure-u-1-3">
<div class="pure-u-1 pure-md-1-3">
<a href="https://www.youtube.com/channel/<%= ucid %>"><%= translate(locale, "View channel on YouTube") %></a>
<a href="https://www.youtube.com/channel/<%= channel.ucid %>/playlists"><%= translate(locale, "View channel on YouTube") %></a>
</div>
<div class="pure-u-1 pure-md-1-3">
<a href="/channel/<%= ucid %>"><%= translate(locale, "Videos") %></a>
<a href="/channel/<%= channel.ucid %>"><%= translate(locale, "Videos") %></a>
</div>
<div class="pure-u-1 pure-md-1-3">
<% if !auto_generated %>
<% if !channel.auto_generated %>
<b><%= translate(locale, "Playlists") %></b>
<% end %>
</div>
<div class="pure-u-1 pure-md-1-3">
<% if channel.tabs.includes? "community" %>
<a href="/channel/<%= channel.ucid %>/community"><%= translate(locale, "Community") %></a>
<% end %>
</div>
</div>
<div class="pure-u-1-3"></div>
<div class="pure-u-1-3">
@ -40,7 +60,7 @@
<% if sort_by == sort %>
<b><%= translate(locale, sort) %></b>
<% else %>
<a href="/channel/<%= ucid %>/playlists?sort_by=<%= sort %>">
<a href="/channel/<%= channel.ucid %>/playlists?sort_by=<%= sort %>">
<%= translate(locale, sort) %>
</a>
<% end %>
@ -66,7 +86,7 @@
<div class="pure-u-1 pure-u-md-4-5"></div>
<div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
<% if items.size >= 28 %>
<a href="/channel/<%= ucid %>/playlists?continuation=<%= continuation %><% if sort_by != "last" %>&sort_by=<%= sort_by %><% end %>">
<a href="/channel/<%= channel.ucid %>/playlists?continuation=<%= continuation %><% if sort_by != "last" %>&sort_by=<%= sort_by %><% end %>">
<%= translate(locale, "Next page") %>
</a>
<% end %>

View File

@ -35,7 +35,7 @@ function update_value(element) {
<div class="pure-control-group">
<label for="local"><%= translate(locale, "Proxy videos? ") %></label>
<input name="local" id="local" type="checkbox" <% if preferences.local %>checked<% end %>>
<input name="local" id="local" type="checkbox" <% if preferences.local && !CONFIG.disabled?("local") %>checked<% end %> <% if CONFIG.disabled?("local") %>disabled<% end %>>
</div>
<div class="pure-control-group">
@ -56,7 +56,9 @@ function update_value(element) {
<label for="quality"><%= translate(locale, "Preferred video quality: ") %></label>
<select name="quality" id="quality">
<% {"dash", "hd720", "medium", "small"}.each do |option| %>
<option value="<%= option %>" <% if preferences.quality == option %> selected <% end %>><%= translate(locale, option) %></option>
<% if !(option == "dash" && CONFIG.disabled?("dash")) %>
<option value="<%= option %>" <% if preferences.quality == option %> selected <% end %>><%= translate(locale, option) %></option>
<% end %>
<% end %>
</select>
</div>

View File

@ -12,7 +12,7 @@
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-lg-1-5">
<% if page >= 2 %>
<% if page > 1 %>
<a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page - 1 %>">
<%= translate(locale, "Previous page") %>
</a>

View File

@ -65,10 +65,9 @@ function remove_subscription(target) {
'&c=' + target.getAttribute('data-ucid');
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('POST', url, true);
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
xhr.send('csrf_token=<%= URI.escape(env.get?("csrf_token").try &.as(String) || "") %>');
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
@ -78,5 +77,7 @@ function remove_subscription(target) {
}
}
}
xhr.send('csrf_token=<%= URI.escape(env.get?("csrf_token").try &.as(String) || "") %>');
}
</script>

View File

@ -62,8 +62,8 @@ var watched_data = {
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-lg-1-5">
<% if page >= 2 %>
<a href="/feed/subscriptions?max_results=<%= max_results %>&page=<%= page - 1 %>">
<% if page > 1 %>
<a href="/feed/subscriptions?page=<%= page - 1 %><% if env.params.query["max_results"]? %>&max_results=<%= max_results %><% end %>">
<%= translate(locale, "Previous page") %>
</a>
<% end %>
@ -71,7 +71,7 @@ var watched_data = {
<div class="pure-u-1 pure-u-lg-3-5"></div>
<div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
<% if (videos.size + notifications.size) == max_results %>
<a href="/feed/subscriptions?max_results=<%= max_results %>&page=<%= page + 1 %>">
<a href="/feed/subscriptions?page=<%= page + 1 %><% if env.params.query["max_results"]? %>&max_results=<%= max_results %><% end %>">
<%= translate(locale, "Next page") %>
</a>
<% end %>

View File

@ -57,10 +57,9 @@ function revoke_token(target) {
'&session=' + target.getAttribute('data-session');
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.timeout = 10000;
xhr.open('POST', url, true);
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
xhr.send('csrf_token=<%= URI.escape(env.get?("csrf_token").try &.as(String) || "") %>');
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
@ -70,5 +69,7 @@ function revoke_token(target) {
}
}
}
xhr.send('csrf_token=<%= URI.escape(env.get?("csrf_token").try &.as(String) || "") %>');
}
</script>

View File

@ -1,12 +1,12 @@
<% content_for "header" do %>
<meta name="thumbnail" content="<%= thumbnail %>">
<meta name="description" content="<%= description %>">
<meta name="description" content="<%= video.short_description %>">
<meta name="keywords" content="<%= video.keywords.join(",") %>">
<meta property="og:site_name" content="Invidious">
<meta property="og:url" content="<%= host_url %>/watch?v=<%= video.id %>">
<meta property="og:title" content="<%= HTML.escape(video.title) %>">
<meta property="og:image" content="/vi/<%= video.id %>/maxres.jpg">
<meta property="og:description" content="<%= description %>">
<meta property="og:description" content="<%= video.short_description %>">
<meta property="og:type" content="video.other">
<meta property="og:video:url" content="<%= host_url %>/embed/<%= video.id %>">
<meta property="og:video:secure_url" content="<%= host_url %>/embed/<%= video.id %>">
@ -17,7 +17,7 @@
<meta name="twitter:site" content="@omarroth1">
<meta name="twitter:url" content="<%= host_url %>/watch?v=<%= video.id %>">
<meta name="twitter:title" content="<%= HTML.escape(video.title) %>">
<meta name="twitter:description" content="<%= description %>">
<meta name="twitter:description" content="<%= video.short_description %>">
<meta name="twitter:image" content="<%= host_url %>/vi/<%= video.id %>/maxres.jpg">
<meta name="twitter:player" content="<%= host_url %>/embed/<%= video.id %>">
<meta name="twitter:player:width" content="1280">
@ -40,7 +40,8 @@ var video_data = {
hide_replies_text: '<%= HTML.escape(translate(locale, "Hide replies")) %>',
show_replies_text: '<%= HTML.escape(translate(locale, "Show replies")) %>',
params: <%= params.to_json %>,
preferences: <%= preferences.to_json %>
preferences: <%= preferences.to_json %>,
premiere_timestamp: <%= video.premiere_timestamp.try &.to_unix || "null" %>
}
</script>
@ -72,15 +73,19 @@ var video_data = {
<h3>
<%= reason %>
</h3>
<% elsif video.premiere_timestamp %>
<h3>
<%= translate(locale, "Premieres in `x`", recode_date((video.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %>
</h3>
<% end %>
</div>
<div class="pure-g">
<div class="pure-u-1 pure-u-lg-1-5">
<div class="h-box">
<p>
<span>
<a href="https://www.youtube.com/watch?v=<%= video.id %>"><%= translate(locale, "Watch on YouTube") %></a>
</p>
</span>
<p>
<% if params.annotations %>
<a href="/watch?<%= env.params.query %>&iv_load_policy=3">
@ -93,7 +98,7 @@ var video_data = {
<% end %>
</p>
<% if CONFIG.dmca_content.includes? video.id %>
<% if CONFIG.dmca_content.includes?(video.id) || CONFIG.disabled?("downloads") %>
<p><%= translate(locale, "Download is disabled.") %></p>
<% else %>
<form class="pure-form pure-form-stacked" action="/latest_version" method="get" rel="noopener" target="_blank">
@ -148,7 +153,7 @@ var video_data = {
<p id="engagement"><%= translate(locale, "Engagement: ") %><%= engagement.round(2) %>%</p>
<% if video.allowed_regions.size != REGIONS.size %>
<p id="allowed_regions">
<% if video.allowed_regions.size < REGIONS.size / 2 %>
<% if video.allowed_regions.size < REGIONS.size // 2 %>
<%= translate(locale, "Whitelisted regions: ") %><%= video.allowed_regions.join(", ") %>
<% else %>
<%= translate(locale, "Blacklisted regions: ") %><%= (REGIONS.to_a - video.allowed_regions).join(", ") %>
@ -160,11 +165,12 @@ var video_data = {
<div class="pure-u-1 <% if params.related_videos || plid %>pure-u-lg-3-5<% else %>pure-u-md-4-5<% end %>">
<div class="h-box">
<p>
<a href="/channel/<%= video.ucid %>">
<h3><%= video.author %></h3>
</a>
</p>
<a href="/channel/<%= video.ucid %>" style="display:block;width:fit-content;width:-moz-fit-content">
<div class="channel-profile">
<img src="/ggpht<%= URI.parse(video.author_thumbnail).full_path %>">
<span><%= video.author %></span>
</div>
</a>
<% ucid = video.ucid %>
<% author = video.author %>
@ -172,11 +178,15 @@ var video_data = {
<%= rendered "components/subscribe_widget" %>
<p>
<b><%= translate(locale, "Shared `x`", video.published.to_s("%B %-d, %Y")) %></b>
<% if video.premiere_timestamp %>
<b><%= translate(locale, "Premieres `x`", video.premiere_timestamp.not_nil!.to_s("%B %-d, %R UTC")) %></b>
<% else %>
<b><%= translate(locale, "Shared `x`", video.published.to_s("%B %-d, %Y")) %></b>
<% end %>
</p>
<div>
<%= video.description %>
<%= video.description_html %>
</div>
<hr>