4
0
forked from midou/invidious

Compare commits

..

196 Commits

Author SHA1 Message Date
Omar Roth
81ea2bf799 Don't nest YouTube replies 2018-10-22 17:15:36 -05:00
Omar Roth
ed3d9ce540 Make channel extractor more robust 2018-10-21 21:44:20 -05:00
Omar Roth
ef95dc2380 Add fix for show playlists 2018-10-21 19:54:41 -05:00
Omar Roth
4875aa1d7e Add partial support for video duration in thumbnails 2018-10-20 20:37:55 -05:00
Omar Roth
3ee7201f5d Comma seperate comment scores 2018-10-20 13:52:06 -05:00
Omar Roth
3c634d9f66 Update styling for subscribe buttons 2018-10-20 13:51:52 -05:00
Omar Roth
94d116974b Add break between text and sub count 2018-10-19 16:20:35 -05:00
Omar Roth
5c87cf1547 Update subscribe buttons 2018-10-19 11:14:26 -05:00
Omar Roth
1cfa1f6559 Add 'paid' and 'premium' flags to API 2018-10-16 11:15:14 -05:00
Omar Roth
8b69e23471 Update CHANGELOG and bump version 2018-10-15 21:22:22 -05:00
Omar Roth
57d88ffcc8 Fix fallback for comments 2018-10-15 11:15:23 -05:00
Omar Roth
e46e6183ae Fix proxying for videos 2018-10-14 11:29:20 -05:00
Omar Roth
b49623f90f Revert "Attempt to bypass channel region locks"
This reverts commit 95c6747a3e.
2018-10-14 11:14:27 -05:00
Omar Roth
95c6747a3e Attempt to bypass channel region locks 2018-10-14 09:53:40 -05:00
Omar Roth
245d0b571f Add next page for channels with geo-blocked videos 2018-10-14 09:06:04 -05:00
Omar Roth
6e0df50a03 Remove migration points 2018-10-13 20:03:48 -05:00
Omar Roth
f88697541c Add author_thumbnail to '/api/v1/videos' 2018-10-13 20:01:58 -05:00
Omar Roth
5eefab62fd Add "show replies" and "hide replies" 2018-10-13 19:40:42 -05:00
Omar Roth
13b0526c7a Fix subscribe button when logged out 2018-10-13 19:40:24 -05:00
Omar Roth
1568a35cfb Add column to video update 2018-10-12 22:37:12 -05:00
Omar Roth
93082c0a45 Remove migration points 2018-10-12 21:28:15 -05:00
Omar Roth
1a39faee75 Add subCountText and add XHR alternative for subscribing to channels 2018-10-12 21:17:37 -05:00
Omar Roth
81b447782a Fix speed param for playlist preferences 2018-10-10 19:55:28 -05:00
Omar Roth
c87aa8671c Add fix for continuation on playlists smaller than 100 videos 2018-10-10 19:47:51 -05:00
Omar Roth
921c34aa65 Create materialized views for Google accounts 2018-10-10 16:10:58 -05:00
Omar Roth
ccc423f682 Fix 'latest only' feed 2018-10-09 18:39:19 -05:00
Omar Roth
02335f3390 Fix typo 2018-10-09 18:10:27 -05:00
Omar Roth
bcc8ba73bf Fix update_feeds job 2018-10-09 17:24:29 -05:00
Omar Roth
35e63fa3f5 Use materialized views for subscription feeds 2018-10-09 08:40:29 -05:00
Omar Roth
3fe4547f8e Update CHANGELOG and bump version 2018-10-09 08:09:04 -05:00
Omar Roth
2dbe151ceb Add speed param to playlist redirect 2018-10-09 08:08:52 -05:00
Omar Roth
e2c15468e0 Make usernames case-insensitive 2018-10-08 20:09:06 -05:00
Omar Roth
022427e20e Fix typo 2018-10-08 17:52:55 -05:00
Omar Roth
88430a6fc0 Add playlist playback support 2018-10-07 21:11:33 -05:00
Omar Roth
c72b9bea64 Add '&list' to videos shown on mix page 2018-10-06 22:22:50 -05:00
Omar Roth
80bc29f3cd Add basic handling for (almost) valid video URLs 2018-10-06 22:22:22 -05:00
Omar Roth
f7125c1204 Move watch page JS into seperate file 2018-10-06 22:20:40 -05:00
Omar Roth
6f9056fd84 Add extra handling for shortened video URLs 2018-10-06 22:19:36 -05:00
Omar Roth
3733fe8272 Redirect mixes 2018-10-06 22:18:50 -05:00
Omar Roth
98bb20abcd Add option to switch between YouTube and Reddit comments 2018-10-06 18:54:05 -05:00
Omar Roth
a4d44d3286 Fix position of [ + ] button for YouTube comments 2018-10-06 18:53:27 -05:00
Omar Roth
dc358fc7e5 Don't add channels if they've been deleted 2018-10-06 18:36:06 -05:00
Omar Roth
e14f2f2750 Prevent duplicate subscriptions when importing user data 2018-10-06 18:19:47 -05:00
Omar Roth
650b44ade2 Improve comment templating 2018-10-05 10:08:24 -05:00
Omar Roth
3830604e42 Try to speed up find_working_proxies 2018-10-03 10:38:07 -05:00
Omar Roth
f83e9e6eb9 Add config option for geo-bypass 2018-10-03 10:36:30 -05:00
Omar Roth
236358d3ad Escape search query in "next page" and "previous page" links 2018-10-02 09:08:18 -05:00
Omar Roth
43d6b65b4f Update CHANGELOG and bump version 2018-10-01 22:53:27 -05:00
Omar Roth
f8eb5ab416 Break after successful response 2018-10-01 20:02:14 -05:00
Omar Roth
ae2850215f Fix method for detecting valid info resposne 2018-10-01 19:55:47 -05:00
Omar Roth
d418f50576 Make geo-bypass more robust 2018-10-01 19:01:44 -05:00
Omar Roth
8c04768ef8 Add support for geo-bypass in '/videoplayback' 2018-09-30 20:26:28 -05:00
Omar Roth
a718d5543d Add 'lang' and 'tlang' to '/api/v1/captions' 2018-09-30 10:13:07 -05:00
Omar Roth
20130db556 Add mixes 2018-09-29 10:59:11 -05:00
Omar Roth
66f3ab0663 Update README 2018-09-29 10:11:21 -05:00
Omar Roth
1de7c0caf9 Merge pull request from flourgaz/feature/docker-compose
Add basic docker-compose cluster
2018-09-29 10:04:31 -05:00
Omar Roth
7d35b6e44f Add rel="noopener" to target="_blank" links 2018-09-29 09:56:37 -05:00
flourgaz
71a99542fe basic docker-compose cluster 2018-09-29 13:30:56 +02:00
Omar Roth
8530c1f4ec Fix typo 2018-09-28 19:44:16 -05:00
Omar Roth
29a6291957 Show info instead of empty playlist when possible 2018-09-28 09:54:45 -05:00
Omar Roth
25ba5bda62 Fix encoding of playlist index 2018-09-28 09:54:01 -05:00
Omar Roth
477c84deb1 Don't deliver new notifications for YouTube Red videos 2018-09-28 09:23:28 -05:00
Omar Roth
c2f7d3d41c Add handling for specific genre channels 2018-09-27 17:11:19 -05:00
Omar Roth
b0b5e3e982 Escape search queries 2018-09-27 17:02:59 -05:00
Omar Roth
4fb275ec6e Get more video information when possible 2018-09-26 19:47:06 -05:00
Omar Roth
f99b2cdf01 Add support for proxying comments 2018-09-26 18:44:37 -05:00
Omar Roth
5d7bd9af0f Add host language for comments 2018-09-26 10:33:08 -05:00
Omar Roth
aa819a189e Use alternate source for proxies 2018-09-25 21:07:18 -05:00
Omar Roth
2e65997447 Fix geo-bypass threads 2018-09-25 18:16:07 -05:00
Omar Roth
3e3de1890a Overhaul geo-bypass 2018-09-25 17:56:59 -05:00
Omar Roth
5b5d69a33b Add host language to YouTube requests 2018-09-25 17:55:32 -05:00
Omar Roth
1289065151 Add host language to fetch_video 2018-09-25 17:42:17 -05:00
Omar Roth
21a8df42dd Add fix for short playlist descriptions 2018-09-25 10:28:57 -05:00
Omar Roth
74b285d0f7 Add author thumbnails to playlist endpoint 2018-09-25 10:28:40 -05:00
Omar Roth
c2e72439f5 Don't add anchor for empty genre URL 2018-09-25 10:10:25 -05:00
Omar Roth
87498ae777 Update CHANGELOG 2018-09-25 09:55:14 -05:00
Omar Roth
e122d68acc 0.7.0 2018-09-24 19:48:06 -05:00
Omar Roth
b3ca3922a9 Add experimental dash support 2018-09-24 19:28:36 -05:00
Omar Roth
9a7852341d Use custom ranking for regions 2018-09-24 14:24:33 -05:00
Omar Roth
1922f6c69a Add more regions to geo-bypass 2018-09-23 19:29:47 -05:00
Omar Roth
a5e8f87a26 Add error for non-existent playlists 2018-09-23 12:32:32 -05:00
Omar Roth
1d9c6f63e1 Add better error for invalid playlists 2018-09-23 12:26:12 -05:00
Omar Roth
5bdd8327d4 Add better support for movies in search and watch page 2018-09-23 12:13:08 -05:00
Omar Roth
35ac887133 Add fix for playlists with less than 100 videos 2018-09-22 14:13:10 -05:00
Omar Roth
d886f8d1e3 Add fix for playlists with no videos 2018-09-22 14:11:01 -05:00
Omar Roth
d8b7c0750a Fix name of playlist resource 2018-09-22 11:34:29 -05:00
Omar Roth
1ab04638e3 Add 'videoCount' to playlists in search 2018-09-22 11:14:57 -05:00
Omar Roth
f80f4f2521 Add '/api/v1/channels/search' 2018-09-22 10:49:42 -05:00
Omar Roth
60038b29f1 Fix typo in user defaults 2018-09-21 11:06:35 -05:00
Omar Roth
099b711400 Add 'host' to API thumbnails 2018-09-21 10:11:04 -05:00
Omar Roth
b56d07556e Remove break point 2018-09-21 09:49:08 -05:00
Omar Roth
a68924f0ce Refactor name to ucid conversion 2018-09-21 09:40:04 -05:00
Omar Roth
51d00b16c3 Use hash instead of btree for channel_videos_ucid_idx 2018-09-21 08:52:27 -05:00
Omar Roth
bead12d6d0 Add styling for commits to CHANGELOG 2018-09-20 17:50:47 -05:00
Omar Roth
1703788f4b Revert "Replace ytimg with local thumbnails"
This reverts commit 95e2d8f1b7.
2018-09-20 11:05:25 -05:00
Omar Roth
6e092026d2 Fix for Reddit timezone 2018-09-20 11:04:36 -05:00
Omar Roth
95e2d8f1b7 Replace ytimg with local thumbnails 2018-09-20 10:35:49 -05:00
Omar Roth
abb15b82e6 Don't discard search results with no metadata 2018-09-20 10:24:27 -05:00
Omar Roth
687013c1b9 Add minor convenience features to search 2018-09-20 10:16:10 -05:00
Omar Roth
cefb5d679f Add label to HLS 2018-09-20 09:45:49 -05:00
Omar Roth
62380933b2 Add support for playlists and channels in search 2018-09-20 09:36:09 -05:00
Omar Roth
1627cfc2fa Update changelog 2018-09-19 19:27:08 -05:00
Omar Roth
82cc407629 Add CHANGELOG.md 2018-09-19 19:25:31 -05:00
Omar Roth
bac66c798f Remove debug info 2018-09-19 15:58:00 -05:00
Omar Roth
2c6f8022e6 Fix comments where link has no host 2018-09-19 15:25:49 -05:00
Omar Roth
fe5286a210 Add 'maxres' to video thumbnails 2018-09-19 15:24:41 -05:00
Omar Roth
bb604c8925 Add movies to search results 2018-09-19 15:24:19 -05:00
Omar Roth
c166f46b7c Add caption URL to video info 2018-09-19 14:08:59 -05:00
Omar Roth
4c8bb3b293 Fix typo 2018-09-19 10:39:07 -05:00
Omar Roth
0f5b93e394 Add 'authorId' to /api/v1/trending and /api/v1/top 2018-09-19 10:37:00 -05:00
Omar Roth
54f9941c7b Add alias '/api/v1/channels/videos/:ucid' 2018-09-18 10:47:22 -05:00
Omar Roth
8500a62462 Add /api/v1/insights 2018-09-17 20:08:26 -05:00
Omar Roth
53c8b5ff2e Minor refactor 2018-09-17 20:07:32 -05:00
Omar Roth
64cc0362fb Add fix for playlists with no views 2018-09-17 19:21:13 -05:00
Omar Roth
35bee987f6 Proxy profile pictures 2018-09-17 18:39:28 -05:00
Omar Roth
bd5ec2f2f3 Add playlist RSS 2018-09-17 18:13:24 -05:00
Omar Roth
296771809a Refactor protocol buffers 2018-09-17 16:56:28 -05:00
Omar Roth
83ba4e2a4c Fix truncated thumbnails 2018-09-17 14:48:02 -05:00
Omar Roth
6cb834a18d Add support for 304 in thumbnails 2018-09-17 09:38:52 -05:00
Omar Roth
0a4e9e6252 Properly filter user's subscriptions in search 2018-09-16 22:14:51 -05:00
Omar Roth
9619d3f1bc Fix channel refresh 2018-09-16 21:44:24 -05:00
Omar Roth
f39ed3d145 Add subscriptions search filter 2018-09-16 21:28:00 -05:00
Omar Roth
f38aac851e Fix full channel refresh 2018-09-16 20:32:39 -05:00
Omar Roth
b6adeb80e6 Fix player margin 2018-09-15 13:04:13 -05:00
Omar Roth
c74cc1123f Maintain aspect ratio even when JS is disabled 2018-09-15 12:15:39 -05:00
Omar Roth
0e1b5d7cdd Add fix for dash sequences 2018-09-15 10:25:43 -05:00
Omar Roth
d2bbf9d33c Fix dash parsing for video info 2018-09-15 08:56:47 -05:00
Omar Roth
3ccee120d3 Proxy thumbnails for related videos 2018-09-15 08:20:43 -05:00
Omar Roth
6753294ee1 Fix poster resize 2018-09-14 22:38:53 -05:00
Omar Roth
f9881ebaab Update videojs-share.css 2018-09-14 21:49:05 -05:00
Omar Roth
429a4b2dec Proxy thumbnails 2018-09-14 21:24:28 -05:00
Omar Roth
4287c0d96a Fix related video bar for users that aren't logged in 2018-09-14 20:10:13 -05:00
Omar Roth
5cd137d808 Refactor signature extractor 2018-09-14 19:50:11 -05:00
Omar Roth
62ae836565 Remove 'less' button in playlist descriptions 2018-09-13 21:00:39 -05:00
Omar Roth
b7acdfad24 Fix typo 2018-09-13 20:27:50 -05:00
Omar Roth
d3eadccd51 Add 'publishedText' to API endpoints 2018-09-13 20:26:05 -05:00
Omar Roth
2232bc0495 Use escaped newlines instead of graves 2018-09-13 18:12:19 -05:00
Omar Roth
f7ca81c384 Add support for channel search 2018-09-13 17:47:31 -05:00
Omar Roth
d4ee786cab Add support for comments under controversial videos 2018-09-13 16:09:14 -05:00
Omar Roth
a54668688b Add support for dashmpd within video info 2018-09-12 22:31:47 -05:00
Omar Roth
89bda1d3db Remove migration points 2018-09-09 21:58:22 -05:00
Omar Roth
e0ee1c3d79 Shrink size of template gutters 2018-09-09 14:50:24 -05:00
Omar Roth
5b2c228bb6 Add 'license' 2018-09-09 14:47:26 -05:00
Omar Roth
ffab3ee79f Shrink size between video requests 2018-09-09 14:41:29 -05:00
Omar Roth
dc6cc028c5 Remove migration point 2018-09-09 14:34:16 -05:00
Omar Roth
c1f17f2f82 Show quality selector even if only one source 2018-09-09 14:23:37 -05:00
Omar Roth
1c8bd671d8 Fix link redirect for YouTube comments 2018-09-09 09:18:31 -05:00
Omar Roth
133b72f9cf Add support for genre channels that don't end with " - Topic" 2018-09-09 08:53:04 -05:00
Omar Roth
8c45694ce5 Escape comment text 2018-09-09 07:40:12 -05:00
Omar Roth
bd820b9b48 Update videojs-share.js 2018-09-07 15:55:11 -05:00
Omar Roth
47e94fedc6 Fix signature extraction 2018-09-07 15:52:46 -05:00
Omar Roth
aff2083529 Fix missing 'end' 2018-09-06 18:18:36 -05:00
Omar Roth
1eae76fc15 Add fix for empty descriptions 2018-09-06 16:50:12 -05:00
Omar Roth
cf63c825d4 Add fix for shortened youtu.be links in comments 2018-09-06 16:45:15 -05:00
Omar Roth
446d8569a4 Bump version to match tag 2018-09-06 10:54:12 -05:00
Omar Roth
454b1662b7 Add format=json for reddit comments 2018-09-06 10:19:28 -05:00
Omar Roth
3ec684ae71 Host assets locally 2018-09-06 09:59:17 -05:00
Omar Roth
b17d3d1e51 Bump number of videos in channel resources to 60 2018-09-06 08:43:22 -05:00
Omar Roth
d81a803618 Add /user/:user/videos 2018-09-05 23:12:11 -05:00
Omar Roth
e6d2166bac Add X-XSS-Protection and X-Content-Type-Options 2018-09-05 21:51:40 -05:00
Omar Roth
e590d39aa9 Revert "Add header check for CSRF"
This reverts commit a749ac73ac.
2018-09-05 21:45:14 -05:00
Omar Roth
4f91854bd3 Fix typo 2018-09-05 21:10:32 -05:00
Omar Roth
29a21860ae Strip leading slashes from referers 2018-09-05 21:07:19 -05:00
Omar Roth
96234e509f Add X-Frame-Options, X-XSS-Protection, and X-Content-Type-Options 2018-09-05 21:06:30 -05:00
Omar Roth
a749ac73ac Add header check for CSRF 2018-09-05 20:32:01 -05:00
Omar Roth
62f023c50f Add 'https_only' default 2018-09-05 20:31:08 -05:00
Omar Roth
29dc114f7a Bump supported Crystal version 2018-09-05 20:30:44 -05:00
Omar Roth
023066b452 Revert "Remove 'codecs' from source types"
This reverts commit 93e12d94fc.
2018-09-05 10:49:40 -05:00
Omar Roth
93e12d94fc Remove 'codecs' from source types 2018-09-05 10:38:01 -05:00
Omar Roth
044a57ef34 Fix video count for channels 2018-09-04 23:01:46 -05:00
Omar Roth
bc49c7d181 Add author info to API endpoints 2018-09-04 21:35:25 -05:00
Omar Roth
5632e58636 Add support for genre channels 2018-09-04 21:04:40 -05:00
Omar Roth
e1bf7fa6cc Add descriptionHtml to playlists 2018-09-04 19:27:10 -05:00
Omar Roth
40028e1462 Update SQL and remove migration points 2018-09-04 09:57:40 -05:00
Omar Roth
53732cdcab Add genre URLs 2018-09-04 09:50:19 -05:00
Omar Roth
2ac89d5e00 Update project synopsis 2018-09-04 09:22:10 -05:00
Omar Roth
98d71ca8e7 Add support for /c/ URLs 2018-09-04 09:13:58 -05:00
Omar Roth
0f2f273335 Don't leak referers 2018-09-04 09:01:43 -05:00
Omar Roth
000cfd4834 Don't show comments when commentCount is 0 2018-09-04 08:52:39 -05:00
Omar Roth
25c3ee034e Minor refactor 2018-09-04 08:52:30 -05:00
Omar Roth
89d3587861 Fix typo 2018-09-03 22:20:20 -05:00
Omar Roth
0d8f036bf1 Replace YouTube links 2018-09-03 22:15:47 -05:00
Omar Roth
81c520e0dd Add info to README 2018-09-03 21:42:49 -05:00
Omar Roth
c0bda13965 Fix view_count_text 2018-08-31 22:53:41 -05:00
Omar Roth
3b1df75061 Merge pull request from dimqua/patch-1
Change the color of progressBar marker
2018-08-31 18:20:30 -05:00
dimqua
eda5beaed5 Change the color of progressBar marker 2018-08-31 16:49:02 +03:00
Omar Roth
4022670cb1 Fix typo in video params 2018-08-30 21:04:41 -05:00
Omar Roth
7b135a6d0c Add commentCount for videos with no comments 2018-08-30 21:03:22 -05:00
Omar Roth
bdaa8a06fd Fix typo 2018-08-30 20:25:43 -05:00
Omar Roth
b3f9059452 Add comment formatting 2018-08-30 20:06:08 -05:00
Omar Roth
917d220623 Fix search filters 2018-08-30 17:42:30 -05:00
Omar Roth
ed8ddbc07d Add seperator when notifications > 0 2018-08-30 16:52:29 -05:00
Omar Roth
cb01b50fbb Add option to hide related videos 2018-08-30 16:49:38 -05:00
Omar Roth
6b3c9d23d0 Fix referer on 404 2018-08-30 08:14:59 -05:00
Omar Roth
3839013a37 Use '/video' page for channel endpoint 2018-08-28 20:29:08 -05:00
Omar Roth
9d5dddab29 Fix signature extraction 2018-08-28 09:51:59 -05:00
61 changed files with 5543 additions and 935 deletions

187
CHANGELOG.md Normal file

@@ -0,0 +1,187 @@
# 0.10.0 (2018-10-16)
## Week 10: Subscriptions
This week I'm happy to announce that subscriptions have been drastically sped up with
35e63fa. As I mentioned last week, this essentially "caches" a user's feed, meaning that operations that previously took 20 seconds or timed out, now can load in under a second. I'd take a look at [#173](https://github.com/omarroth/invidious/issues/173) for a sample benchmark. Previously features that made Invidious's feed so useful, such as filtering by unseen and by author would take too long to load, and so instead would timeout. I'm very happy that this has been fixed, and folks can get back to using these features.
Among some smaller features that have been added this week include [#118](https://github.com/omarroth/invidious/issues/118), which adds, in my opinion, some very attractive subscribe and unsubscribe buttons. I think it's also a bit of a functional improvement as well, since it doesn't require a user to reload the page in order to subscribe or unsubscribe to a channel, and also gives the opportunity to put the channel's sub count on display.
An option to swap between Reddit and YouTube comments without a page reload has been added with
5eefab6, bringing it somewhat closer in functionality to the popular [AlienTube](https://github.com/xlexi/alientube) extension, on which it is based (although the extension unfortunately appears now to be fragmented).
As always, there are a couple smaller improvements this week, including some minor fixes for geo-bypass with
e46e618 and [`245d0b5`](https://github.com/omarroth/invidious/245d0b5), playlist preferences with [`81b4477`](https://github.com/omarroth/invidious/81b4477), and YouTube comments with [`02335f3`](https://github.com/omarroth/invidious/02335f3).
This coming week I'd also recommend keeping an eye on the excellent [FreeTube](https://github.com/FreeTubeApp/FreeTube), which is looking forward to a new release. I've been very lucky to work with [**@PrestonN**](https://github.com/PrestonN) for the past few weeks to improve the Invidious API, and I'm quite looking forward to the new release.
That's all for this week folks, thank you all again for your continued interest and support.
# 0.9.0 (2018-10-08)
## Week 9: Playlists
Not as much to announce this week, but I'm still quite happy to announce a couple things, namely:
Playback support for playlists has finally been added with [`88430a6`](https://github.com/omarroth/invidious/88430a6). You can now view playlists with the `&list=` query param, as you would on YouTube. You can also view mixes with the mentioned `&list=`, although they require some extra handling that I would like to add in the coming week, as well as adding playlist looping and shuffle. I think playback support has been a roadblock for more exciting features such as [#114](https://github.com/omarroth/invidious/issues/114), and I look forward to improving the experience.
Comments have had a bit of a cosmetic upgrade with [#132](https://github.com/omarroth/invidious/issues/132), which I think helps better distinguish between Reddit and YouTube comments, as it makes them appear similarly to their respective sites. You can also now switch between YouTube and Reddit comments with a push of a button, which I think is quite an improvement, especially for newer or less popular videos with fewer comments.
I've had a small breakthrough in speeding up users' subscription feeds with PostgreSQL's [materialized views](https://www.postgresql.org/docs/current/static/rules-materializedviews.html). Without going into too much detail, materialized views essentially cache the result of a query, making it possible to run resource-intensive queries once, rather than every time a user visits their feed. In the coming week I hope to push this out to users, and hopefully close [#173](https://github.com/omarroth/invidious/issues/173).
I haven't had as much time to work on the project this week, but I'm quite happy to have added some new features. Have a great week everyone.
# 0.8.0 (2018-10-02)
## Week 8: Mixes
Hello again!
Mixes have been added with [`20130db`](https://github.com/omarroth/invidious/20130db), which makes it easy to create a playlist of related content. See [#188](https://github.com/omarroth/invidious/issues/188) for more info on how they work. Currently, they return the first 50 videos rather than a continuous feed to avoid tracking by Google/YouTube, which I think is a good trade-off between usability and privacy, and I hope other folks agree. You can create mixes by adding `RD` to the beginning of a video ID, an example is provided [here](https://www.invidio.us/mix?list=RDYE7VzlLtp-4) based on Big Buck Bunny. I've been quite happy with the results returned for the mixes I've tried, and it is not limited to music, which I think is a big plus. To emulate a continuous feed provided many are used to, using the last video of each mix as a new 'seed' has worked well for me. In the coming week I'd like to to add playback support in the player to listen to these easily.
A very big thanks to [**@flourgaz**](https://github.com/flourgaz) for Docker support with [#186](https://github.com/omarroth/invidious/pull/186). This is an enormous improvement in portability for the project, and opens the door for Heroku support (see [#162](https://github.com/omarroth/invidious/issues/162)), and seamless support on Windows. For most users, it should be as easy as running `docker-compose up`.
I've spent quite a bit of time this past week improving support for geo-bypass (see [#92](https://github.com/omarroth/invidious/issues/92)), and am happy to note that Invidious has been able to proxy ~50% of the geo-restricted videos I've tried. In addition, you can now watch geo-restricted videos if you have `dash` enabled as your `preferred quality`, for more details see [#34](https://github.com/omarroth/invidious/issues/34) and [#185](https://github.com/omarroth/invidious/issues/185), or last week's update. For folks interested in replicating these results for themselves, I'd take a look [here](https://gist.github.com/omarroth/3ce0f276c43e0c4b13e7d9cd35524688) for the script used, and [here](https://gist.github.com/omarroth/beffc4a76a7b82a422e1b36a571878ef) for a list of videos restricted in the US.
1080p has seen a fairly smooth roll-out, although there have been a couple issues reported, mainly [#193](https://github.com/omarroth/invidious/issues/193), which is likely an issue in the player. I've also encountered a couple other issues myself that I would like to investigate. Although none are major, I'd like to keep 1080p opt-in for registered users another week to better address these issues.
Have an excellent week everyone.
# 0.7.0 (2018-09-25)
## Week 7: 1080p and Search Types
Hello again everyone! I've got quite a couple announcements this week:
Experimental 1080p support has been added with [`b3ca392`](https://github.com/omarroth/invidious/b3ca392), and can be enabled by going to preferences and changing `preferred video quality` to `dash`. You can find more details [here](https://github.com/omarroth/invidious/issues/34#issuecomment-424171888). Currently quality and speed controls have not yet been integrated into the player, but I'd still appreciate feedback, mainly on any issues with buffering or DASH playback. I hope to integrate 1080p support into the player and push support site-wide in the coming weeks.
You can now filter content types in search with the `type:TYPE` filter. Supported content types are `playlist`, `channel`, and `video`. More info is available [here](https://github.com/omarroth/invidious/issues/126#issuecomment-423823148). I think this is quite an improvement in usability and I hope others find the same.
A [CHANGELOG](https://github.com/omarroth/invidious/blob/master/CHANGELOG.md) has been added to the repository, so folks will now receive a copy of all these updates when cloning. I think this is an improvement in hosting the project, as it is no longer tied to the `/releases` tab on Github or the posts on Patreon.
Recently, users have been reporting 504s when attempting to access their subscriptions, which is tracked in [#173](https://github.com/omarroth/invidious/issues/173). This is most likely caused by an uptick in usage, which I am absolutely grateful for, but unfortunately has resulted in an increase in costs for hosting the site, which is why I will be bumping my goal on Patreon from $60 to $80. I would appreciate any feedback on how subscriptions could be improved.
Other minor improvements include:
- Additional regions added to bypass geo-block with [`9a78523`](https://github.com/omarroth/invidious/9a78523)
- Fix for playlists containing less than 100 videos (previously shown as empty) with [`35ac887`](https://github.com/omarroth/invidious/35ac887)
- Fix for `published` date for Reddit comments (previously showing negative seconds) with [`6e09202`](https://github.com/omarroth/invidious/6e09202)
Thank you everyone for your support!
# 0.6.0 (2018-09-18)
## Week 6: Filters and Thumbnails
Hello again! This week I'm happy to mention a couple new features to search as well as some miscellaneous usability improvements.
You can now constrain your search query to a specific channel with the `channel:CHANNEL` filter (see [#165](https://github.com/omarroth/invidious/issues/165) for more details). Unfortunately, other search filters combined with channel search are not yet supported. I hope to add support for them in the coming weeks.
You can also now search only your subscriptions by adding `subscriptions:true` to your query (see [#30](https://github.com/omarroth/invidious/issues/30) for more details). It's not quite ready for widespread use but I would appreciate feedback as the site updates to fully support it. Other search filters are not yet supported with `subscriptions:true`, but I hope to add more functionality to this as well.
With [#153](https://github.com/omarroth/invidious/issues/153) and [#168](https://github.com/omarroth/invidious/issues/168) all images on the site are now proxied through Invidious. In addition to offering the user more protection from Google's eyes, it also allows the site to automatically pick out the highest resolution thumbnail for videos. I think this is quite a large aesthetic improvement and I hope others will find the same.
As a smaller improvement to the site, you can also now view RSS feeds for playlists with [#113](https://github.com/omarroth/invidious/issues/113).
These updates are also now listed under Github's [releases](https://github.com/omarroth/invidious/releases). I'm also planning on adding them as a `CHANGELOG.md` in the repository itself so people can receive a copy with the project's source.
That's all for this week. Thank you everyone for your support!
# 0.5.0 (2018-09-11)
## Week 5: Privacy and Security
I hope everyone had a good weekend! This past week I've been fixing some issues that have been brought to my attention to help better protect users and help them keep their anonymity.
An issue with open referers has been fixed with [`29a2186`](https://github.com/omarroth/invidious/29a2186), which prevents potential redirects to external sites on actions such as login or modifying preferences.
Additionally, X-XSS-Protection, X-Content-Type-Options, and X-Frame-Options headers have been added with [`96234e5`](https://github.com/omarroth/invidious/96234e5), which should keep users safer while using the site.
A potential XSS vector has also been fixed in YouTube comments with [`8c45694`](https://github.com/omarroth/invidious/8c45694).
All the above vulnerabilities were brought to my attention by someone who wishes to remain anonymous, but I would like to say again here how thankful I am. If anyone else would like to get in touch please feel free to email me at omarroth@hotmail.com or omarroth@protonmail.com.
This week a couple changes have been made to better protect user's privacy as well.
All CSS and JS assets are now served locally with [`3ec684a`](https://github.com/omarroth/invidious/3ec684a), which means users no longer need to whitelist unpkg.com. Although I personally have encountered few issues, I understand that many folks would like to keep their browsing activity contained to as few parties as possible. In the coming week I also hope to proxy YouTube images, so that no user data is sent to Google.
YouTube links in comments now should redirect properly to the Invidious alternate with [`1c8bd67`](https://github.com/omarroth/invidious/1c8bd67) and [`cf63c82`](https://github.com/omarroth/invidious/cf63c82), so users can more easily evade Google tracking.
I'm also happy to mention a couple quality of life features this week:
Invidious now shows a video's "license" if provided, see [#159](https://github.com/omarroth/invidious/issues/159) for more details. You can also search for videos licensed under the creative commons with "QUERY features:creative_commons".
Videos with only one source will always display the cog for changing quality, so that users can see what quality is currently playing. See [#158](https://github.com/omarroth/invidious/issues/158) for more details.
Folks have also probably noticed that the gutters on either side of the screen have been shrunk down quite significantly, so that more of the screen is filled with content. Hopefully this can be improved even more in the coming weeks.
"Music", "Sports", and "Popular on YouTube" channels now properly display their videos. You can subscribe to these channels just as you would normally.
This coming week I'm planning on spending time with my family, so I unfortunately may not be as responsive. I do still hope to add some smaller features for next week however, and I hope to continue development soon.
Thank you everyone again for your support.
# 0.4.0 (2018-09-06)
## Week 4: Genre Channels
Hello! I hope everyone enjoyed their weekend. Without further ado:
Just today genre channels have been added with [#119](https://github.com/omarroth/invidious/issues/119). More information on genre channels is available [here](https://support.google.com/youtube/answer/2579942). You can subscribe to them as normally, and view them as RSS. I think they offer an interesting alternative way to find new content and I hope people find them useful.
This past week folks have started reporting 504s on their subscription page (see [#144](https://github.com/omarroth/invidious/issues/144) for more details). Upgrading the database server appeared to fix the issue, as well as providing a smoother experience across the site. Unfortunately, that means I will be increasing the goal from $50 to $60 in order to meet the increased hosting costs.
With [#134](https://github.com/omarroth/invidious/issues/134), comments are now formatted correctly, providing support for bold, italics, and links in comments. I think this improvement makes them much easier to read, and I hope others find the same. Also to note is that links in both comments and the video description now no longer contain any of Google's tracking with [#115](https://github.com/omarroth/invidious/issues/115).
One of the major use cases for Invidious is as a stripped-down version of YouTube. In line with that, I'm happy to announce that you can now hide related videos if you're logged in, for users that prefer an even more lightweight experience.
Finally, I'm pleased to announce that Invidious has hit 100 stars on GitHub. I am very happy that Invidious has proven to be useful to so many people, and I can't say how grateful I am to everyone for their continued support.
Enjoy the rest of your week everyone!
# 0.3.0 (2018-09-06)
## Week 3: Quality of Life
Hello everyone! This week I've been working on some smaller features that will hopefully make the site more functional.
Search filters have been added with [#126](https://github.com/omarroth/invidious/issues/126). You can now specify 'sort', 'date', 'duration', and 'features' within your query using the 'operator:value' syntax. I'd recommend taking a look [here](https://github.com/omarroth/invidious/blob/master/src/invidious/search.cr#L33-L114) for a list of supported options and at [#126](https://github.com/omarroth/invidious/issues/126) for some examples. This also opens the door for features such as [#30](https://github.com/omarroth/invidious/issues/30) which can be implemented as filters. I think advanced search is a major point in which Invidious can improve on YouTube and hope to add more features soon!
This week a more advanced system for viewing fallback comments has been added (see [#84](https://github.com/omarroth/invidious/issues/84) for more details). You can now specify a comment fallback in your preferences, which Invidious will use. If, for example, no Reddit comments are available for a given video, it can choose to fallback on YouTube comments. This also makes it possible to turn comments off completely for users that prefer a more streamlined experience.
With [#98](https://github.com/omarroth/invidious/issues/98), it is now possible for users to specify preferences without creating an account. You can now change speed, volume, subtitles, autoplay, loop, and quality using query parameters. See the issue above for more details and several examples.
I'd also like to announce that I've set up an account on [Liberapay](https://liberapay.com/omarroth), for patrons that prefer a privacy-friendly alternative to Patreon. Liberapay also does not take any percentage of donations, so I'd recommend donating some to the Liberapay for their hard work. Go check it out!
[Two weeks ago](https://github.com/omarroth/invidious/releases/tag/0.1.0) I mentioned adding 1080p support into the player. Currently, the only thing blocking is [#207](https://github.com/videojs/http-streaming/pull/207) in the excellent [http-streaming](https://github.com/videojs/http-streaming) library. I hope to work with the videojs team to merge it soon and finally implement 1080p support!
That's all for this week, thank you again everyone for your support!
# 0.2.0 (2018-09-06)
## Week 2: Toward Playlists
Sorry for the late update! Not as much to announce this week, but still a couple things of note:
I'm happy to announce that a playlists page and API endpoint has been added so you can now view playlists. Currently, you cannot watch playlists through the player, but I hope to add that in the coming week as well as adding functionality to add and modify playlists. There is a good conversation on [#114](https://github.com/omarroth/invidious/issues/114) about giving playlists even more functionality, which I think is interesting and would appreciate feedback on.
As an update to the Invidious API announcement last week, I've been working with [**@PrestonN**](https://github.com/PrestonN), the developer of [FreeTube](https://github.com/FreeTubeApp/FreeTube), to help migrate his project to the Invidious API. Because of it's increasing popularity, he has had trouble keeping under the quota set by YouTube's API. I hope to improve the API to meet his and others needs and I'd recommend folks to keep an eye on his excellent project! There is a good discussion with his thoughts [here](https://github.com/FreeTubeApp/FreeTube/issues/100).
A couple of miscellaneous features and bugfixes:
- You can now login to Invidious simultaneously from multiple devices - [#109](https://github.com/omarroth/invidious/issues/109)
- Added a note for scheduled livestreams - [#124](https://github.com/omarroth/invidious/issues/124)
- Changed YouTube comment header to "View x comments" - [#120](https://github.com/omarroth/invidious/issues/120)
Enjoy your week everyone!
# 0.1.0 (2018-09-06)
## Week 1: Invidious API and Geo-Bypass
Hello everyone! This past week there have been quite a few things worthy of mention:
I'm happy to announce the [Invidious Developer API](https://github.com/omarroth/invidious/wiki/API). The Invidious API does not use any of the official YouTube APIs, and instead crawls the site to provide a JSON interface for other developers to use. It's still under development but is already powering [CloudTube](https://github.com/cloudrac3r/cadencegq). The API currently does not have a quota (compared to YouTube) which I hope to continue thanks to continued support from my Patrons. Hopefully other developers find it useful, and I hope to continue to improve it so it can better serve the community.
Just today partial support for bypassing geo-restrictions has been added with [fada57a](https://github.com/omarroth/invidious/commit/fada57a307d66d696d9286fc943c579a3fd22de6). If a video is unblocked in one of: United States, Canada, Germany, France, Japan, Russia, or United Kingdom, then Invidious will be able to serve video info. Currently you will not yet be able to access the video files themselves, but in the coming week I hope to proxy videos so that users can enjoy content across borders.
Support for generating DASH manifests has been fixed, in the coming week I hope to integrate this functionality into the watch page, so users can view videos in 1080p and above.
Thank you everyone for your continued interest and support!

@@ -1,6 +1,25 @@
# Invidious
## Invidious is what YouTube should be
## Invidious is an alternative front-end to YouTube
- Audio-only (and no need to keep window open on mobile)
- [Open-source](https://github.com/omarroth/invidious) (AGPLv3 licensed)
- No ads
- No need to create a Google account to save subscriptions
- Lightweight (homepage is ~4 KB compressed)
- Tools for managing subscriptions:
- Only show unseen videos
- Only show latest (or latest unseen) video from each channel
- Delivers notifications from all subscribed channels
- Automatically redirect homepage to feed
- Import subscriptions from YouTube
- Dark mode
- Embed support
- Set default player options (speed, quality, autoplay, loop)
- Does not require JS to play videos
- Support for Reddit comments in place of YT comments
- Import/Export subscriptions, watch history, preference
- Does not use any of the official YouTube APIs
Liberapay: https://liberapay.com/omarroth
Patreon: https://patreon.com/omarroth
@@ -9,6 +28,29 @@ BCH: qq4ptclkzej5eza6a50et5ggc58hxsq5aylqut2npk
## Installation
### Docker:
#### Build and start cluster:
```bash
$ docker-compose up
```
And visit `localhost:3000` in your browser.
#### Rebuild cluster:
```bash
$ docker-compose build
```
#### Delete data and rebuild:
```bash
$ docker volume rm invidious_postgresdata
$ docker-compose build
```
### Installing [Crystal](https://github.com/crystal-lang/crystal):
#### On Arch:
@@ -55,8 +97,21 @@ $ sudo pacman -S imagemagick librsvg
## Usage:
```bash
$ crystal build src/invidious.cr
$ ./invidious
$ crystal build src/invidious.cr --release
$ ./invidious -h
Usage: invidious [arguments]
-b HOST, --bind HOST Host to bind (defaults to 0.0.0.0)
-p PORT, --port PORT Port to listen for connections (defaults to 3000)
-s, --ssl Enables SSL
--ssl-key-file FILE SSL key file
--ssl-cert-file FILE SSL certificate file
-h, --help Shows this help
-t THREADS, --crawl-threads=THREADS
Number of threads for crawling (default: 1)
-c THREADS, --channel-threads=THREADS
Number of threads for refreshing channels (default: 1)
-v THREADS, --video-threads=THREADS
Number of threads for refreshing videos (default: 1)
```
Or for development:

@@ -17,6 +17,44 @@ div {
animation: spin 2s linear infinite;
}
.playlist-restricted {
height: 20em;
padding-right: 10px;
}
a.pure-button-primary {
background-color: #a0a0a0;
color: rgba(35, 35, 35, 1);
}
a.pure-button-primary:hover {
background-color: rgba(0, 182, 240, 1);
color: #fff;
}
div.thumbnail {
position: relative;
}
img.thumbnail {
width: 100%;
left: 0;
top: 0;
}
.length {
z-index: 100;
position: absolute;
background-color: rgba(35, 35, 35, 0.75);
color: #fff;
border-radius: 2px;
padding: 2px;
font-size: 16px;
font-family: sans-serif;
right: 0.5em;
bottom: -0.5em;
}
/*
* Navbar
*/
@@ -171,6 +209,11 @@ div {
background-color: rgba(0, 182, 240, 1);
}
/* ProgressBar marker */
.vjs-marker {
background-color: rgba(255, 255, 255, 1);
}
/* Big "Play" Button */
.video-js .vjs-big-play-button {
background-color: rgba(35, 35, 35, 0.5);
@@ -191,3 +234,23 @@ div {
padding-left: 0px;
padding-right: 0px;
}
.video-js .vjs-poster {
background-size: cover;
object-fit: cover;
}
#player {
position: absolute;
left: 0;
top: 0;
height: 100%;
}
#player-container {
position: relative;
padding-bottom: 56.25%;
margin-left: 1em;
margin-right: 1em;
height: 0;
}

7
assets/css/grids-responsive-min.css vendored Normal file

File diff suppressed because one or more lines are too long

11
assets/css/ionicons.min.css vendored Normal file

File diff suppressed because one or more lines are too long

11
assets/css/pure-min.css vendored Normal file

File diff suppressed because one or more lines are too long

@@ -0,0 +1 @@
.vjs-quality-selector .vjs-menu-button{margin:0;padding:0;height:100%;width:100%}.vjs-quality-selector .vjs-icon-placeholder{font-family:'VideoJS';font-weight:normal;font-style:normal}.vjs-quality-selector .vjs-icon-placeholder:before{content:'\f110'}.vjs-quality-changing .vjs-big-play-button{display:none}.vjs-quality-changing .vjs-control-bar{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;visibility:visible;opacity:1}

1
assets/css/video-js.min.css vendored Normal file

File diff suppressed because one or more lines are too long

@@ -0,0 +1,7 @@
/**
* videojs-share
* @version 2.0.1
* @copyright 2018 Mikhail Khazov <mkhazov.work@gmail.com>
* @license MIT
*/
.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-modal-dialog-content{display:flex;align-items:center;padding:0;background-image:linear-gradient(to bottom, rgba(0,0,0,0.77), rgba(0,0,0,0.75))}.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button{position:absolute;right:0;top:5px;width:30px;height:30px;color:#fff;cursor:pointer;opacity:0.9;transition:opacity 0.25s ease-out}.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button:before{content:'×';font-size:20px;line-height:15px}.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button:hover{opacity:1}.video-js .vjs-share{display:flex;flex-direction:column;justify-content:space-around;align-items:center;width:100%;height:100%;max-height:400px}.video-js .vjs-share__top,.video-js .vjs-share__middle,.video-js .vjs-share__bottom{display:flex}.video-js .vjs-share__top,.video-js .vjs-share__middle{flex-direction:column;justify-content:space-between}.video-js .vjs-share__middle{padding:0 25px}.video-js .vjs-share__title{align-self:center;font-size:22px;color:#fff}.video-js .vjs-share__subtitle{width:100%;margin:0 auto 12px;font-size:16px;color:#fff;opacity:0.7}.video-js .vjs-share__short-link-wrapper{position:relative;display:block;width:100%;height:40px;margin:0 auto;margin-bottom:15px;border:0;color:rgba(255,255,255,0.65);background-color:#363636;outline:none;overflow:hidden;flex-shrink:0}.video-js .vjs-share__short-link{display:block;width:100%;height:100%;padding:0 40px 0 15px;border:0;color:rgba(255,255,255,0.65);background-color:#363636;outline:none}.video-js .vjs-share__btn{position:absolute;right:0;bottom:0;height:40px;width:40px;display:flex;align-items:center;padding:0 11px;border:0;color:#fff;background-color:#2e2e2e;background-size:18px 19px;background-position:center;background-repeat:no-repeat;cursor:pointer;outline:none;transition:width 0.3s ease-out, padding 0.3s ease-out}.video-js .vjs-share__btn svg{flex-shrink:0}.video-js .vjs-share__btn span{position:relative;padding-left:10px;opacity:0;transition:opacity 0.3s ease-out}.video-js .vjs-share__btn:hover{justify-content:center;width:100%;padding:0 40px;background-image:none}.video-js .vjs-share__btn:hover span{opacity:1}.video-js .vjs-share__socials{display:flex;flex-wrap:wrap;justify-content:center;align-content:flex-start;transition:width 0.3s ease-out, height 0.3s ease-out}.video-js .vjs-share__social{display:flex;justify-content:center;align-items:center;flex-shrink:0;width:32px;height:32px;margin-right:6px;margin-bottom:6px;cursor:pointer;font-size:8px;transition:transform 0.3s ease-out, filter 0.2s ease-out;border:none;outline:none}.video-js .vjs-share__social:hover{filter:brightness(115%)}.video-js .vjs-share__social svg{width:100%;max-height:24px}.video-js .vjs-share__social_vk{background-color:#5d7294}.video-js .vjs-share__social_ok{background-color:#ed7c20}.video-js .vjs-share__social_mail{background-color:#134785}.video-js .vjs-share__social_tw{background-color:#76aaeb}.video-js .vjs-share__social_reddit{background-color:#ff4500}.video-js .vjs-share__social_fbFeed{background-color:#475995}.video-js .vjs-share__social_messenger{background-color:#0084ff}.video-js .vjs-share__social_gp{background-color:#d53f35}.video-js .vjs-share__social_linkedin{background-color:#0077b5}.video-js .vjs-share__social_viber{background-color:#766db5}.video-js .vjs-share__social_telegram{background-color:#4bb0e2}.video-js .vjs-share__social_whatsapp{background-color:#78c870}.video-js .vjs-share__bottom{justify-content:center}@media (max-height: 220px){.video-js .vjs-share .hidden-xs{display:none}}@media (max-height: 350px){.video-js .vjs-share .hidden-sm{display:none}}@media (min-height: 400px){.video-js .vjs-share__title{margin-bottom:15px}.video-js .vjs-share__short-link-wrapper{margin-bottom:30px}}@media (min-width: 320px){.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button{right:5px;top:10px}}@media (min-width: 660px){.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button{right:20px;top:20px}.video-js .vjs-share__social{width:40px;height:40px}}

1
assets/css/videojs.markers.min.css vendored Normal file

@@ -0,0 +1 @@
.vjs-marker{position:absolute;left:0;bottom:0;opacity:1;height:100%;transition:opacity .2s ease;-webkit-transition:opacity .2s ease;-moz-transition:opacity .2s ease;z-index:100}.vjs-marker:hover{cursor:pointer;-webkit-transform:scale(1.3,1.3);-moz-transform:scale(1.3,1.3);-o-transform:scale(1.3,1.3);-ms-transform:scale(1.3,1.3);transform:scale(1.3,1.3)}.vjs-tip{visibility:hidden;display:block;opacity:.8;padding:5px;font-size:10px;position:absolute;bottom:14px;z-index:100000}.vjs-tip .vjs-tip-arrow{background:url(data:image/gif;base64,R0lGODlhCQAJAIABAAAAAAAAACH5BAEAAAEALAAAAAAJAAkAAAIRjAOnwIrcDJxvwkplPtchVQAAOw==) no-repeat top left;bottom:0;left:50%;margin-left:-4px;background-position:bottom left;position:absolute;width:9px;height:5px}.vjs-tip .vjs-tip-inner{border-radius:3px;-moz-border-radius:3px;-webkit-border-radius:3px;padding:5px 8px 4px 8px;background-color:#000;color:#fff;max-width:200px;text-align:center}.vjs-break-overlay{visibility:hidden;position:absolute;z-index:100000;top:0}.vjs-break-overlay .vjs-break-overlay-text{padding:9px;text-align:center}

BIN
assets/fonts/ionicons.eot Normal file

Binary file not shown.

2090
assets/fonts/ionicons.svg Normal file

File diff suppressed because it is too large Load Diff

After

(image error) Size: 305 KiB

BIN
assets/fonts/ionicons.ttf Normal file

Binary file not shown.

BIN
assets/fonts/ionicons.woff Normal file

Binary file not shown.

BIN
assets/fonts/ionicons.woff2 Normal file

Binary file not shown.

29
assets/js/dash.mediaplayer.min.js vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

7
assets/js/video.min.js vendored Normal file

File diff suppressed because one or more lines are too long

@@ -0,0 +1,2 @@
/*! @name videojs-contrib-quality-levels @version 2.0.7 @license Apache-2.0 */
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t(require("video.js"),require("global/document")):"function"==typeof define&&define.amd?define(["video.js","global/document"],t):e.videojsContribQualityLevels=t(e.videojs,e.document)}(this,function(e,t){"use strict";e=e&&e.hasOwnProperty("default")?e.default:e,t=t&&t.hasOwnProperty("default")?t.default:t;var n=function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")},r=function(e,t){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!t||"object"!=typeof t&&"function"!=typeof t?e:t},i=function(i){function o(){n(this,o);var l=r(this,i.call(this)),s=l;if(e.browser.IS_IE8)for(var u in s=t.createElement("custom"),o.prototype)"constructor"!==u&&(s[u]=o.prototype[u]);return s.levels_=[],s.selectedIndex_=-1,Object.defineProperty(s,"selectedIndex",{get:function(){return s.selectedIndex_}}),Object.defineProperty(s,"length",{get:function(){return s.levels_.length}}),r(l,s)}return function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function, not "+typeof t);e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),t&&(Object.setPrototypeOf?Object.setPrototypeOf(e,t):e.__proto__=t)}(o,i),o.prototype.addQualityLevel=function(r){var i=this.getQualityLevelById(r.id);if(i)return i;var o=this.levels_.length;return i=new function r(i){n(this,r);var o=this;if(e.browser.IS_IE8)for(var l in o=t.createElement("custom"),r.prototype)"constructor"!==l&&(o[l]=r.prototype[l]);return o.id=i.id,o.label=o.id,o.width=i.width,o.height=i.height,o.bitrate=i.bandwidth,o.enabled_=i.enabled,Object.defineProperty(o,"enabled",{get:function(){return o.enabled_()},set:function(e){o.enabled_(e)}}),o}(r),""+o in this||Object.defineProperty(this,o,{get:function(){return this.levels_[o]}}),this.levels_.push(i),this.trigger({qualityLevel:i,type:"addqualitylevel"}),i},o.prototype.removeQualityLevel=function(e){for(var t=null,n=0,r=this.length;n<r;n++)if(this[n]===e){t=this.levels_.splice(n,1)[0],this.selectedIndex_===n?this.selectedIndex_=-1:this.selectedIndex_>n&&this.selectedIndex_--;break}return t&&this.trigger({qualityLevel:e,type:"removequalitylevel"}),t},o.prototype.getQualityLevelById=function(e){for(var t=0,n=this.length;t<n;t++){var r=this[t];if(r.id===e)return r}return null},o.prototype.dispose=function(){this.selectedIndex_=-1,this.levels_.length=0},o}(e.EventTarget);for(var o in i.prototype.allowedEvents_={change:"change",addqualitylevel:"addqualitylevel",removequalitylevel:"removequalitylevel"},i.prototype.allowedEvents_)i.prototype["on"+o]=null;var l=function(t){return n=this,e.mergeOptions({},t),r=n.qualityLevels,o=new i,n.on("dispose",function e(){o.dispose(),n.qualityLevels=r,n.off("dispose",e)}),n.qualityLevels=function(){return o},n.qualityLevels.VERSION="__VERSION__",o;var n,r,o};return(e.registerPlugin||e.plugin)("qualityLevels",l),l.VERSION="__VERSION__",l});

3
assets/js/videojs-dash.min.js vendored Normal file

File diff suppressed because one or more lines are too long

14
assets/js/videojs-http-streaming.min.js vendored Normal file

File diff suppressed because one or more lines are too long

4
assets/js/videojs-markers.min.js vendored Normal file

File diff suppressed because one or more lines are too long

7
assets/js/videojs-share.min.js vendored Normal file

File diff suppressed because one or more lines are too long

2
assets/js/videojs.hotkeys.min.js vendored Normal file

@@ -0,0 +1,2 @@
/* videojs-hotkeys v0.2.22 - https://github.com/ctd1500/videojs-hotkeys */
!function(e,t){"undefined"!=typeof window&&window.videojs?t(window.videojs):"function"==typeof define&&define.amd?define("videojs-hotkeys",["video.js"],function(e){return t(e.default||e)}):"undefined"!=typeof module&&module.exports&&(module.exports=t(require("video.js")))}(0,function(s){"use strict";"undefined"!=typeof window&&(window.videojs_hotkeys={version:"0.2.22"});(s.registerPlugin||s.plugin)("hotkeys",function(m){var y=this,v=y.el(),f=document,e={volumeStep:.1,seekStep:5,enableMute:!0,enableVolumeScroll:!0,enableHoverScroll:!0,enableFullscreen:!0,enableNumbers:!0,enableJogStyle:!1,alwaysCaptureHotkeys:!1,enableModifiersForNumbers:!0,enableInactiveFocus:!0,skipInitialFocus:!1,playPauseKey:function(e){return 32===e.which||179===e.which},rewindKey:function(e){return 37===e.which||177===e.which},forwardKey:function(e){return 39===e.which||176===e.which},volumeUpKey:function(e){return 38===e.which},volumeDownKey:function(e){return 40===e.which},muteKey:function(e){return 77===e.which},fullscreenKey:function(e){return 70===e.which},customKeys:{}},t=s.mergeOptions||s.util.mergeOptions,d=(m=t(e,m||{})).volumeStep,n=m.seekStep,p=m.enableMute,r=m.enableVolumeScroll,o=m.enableHoverScroll,b=m.enableFullscreen,h=m.enableNumbers,w=m.enableJogStyle,k=m.alwaysCaptureHotkeys,S=m.enableModifiersForNumbers,u=m.enableInactiveFocus,l=m.skipInitialFocus;v.hasAttribute("tabIndex")||v.setAttribute("tabIndex","-1"),v.style.outline="none",!k&&y.autoplay()||l||y.one("play",function(){v.focus()}),u&&y.on("userinactive",function(){var n=function(){clearTimeout(e)},e=setTimeout(function(){y.off("useractive",n);var e=f.activeElement,t=v.querySelector(".vjs-control-bar");e&&e.parentElement==t&&v.focus()},10);y.one("useractive",n)}),y.on("play",function(){var e=v.querySelector(".iframeblocker");e&&""===e.style.display&&(e.style.display="block",e.style.bottom="39px")});var i=!1,c=v.querySelector(".vjs-volume-menu-button")||v.querySelector(".vjs-volume-panel");c.onmouseover=function(){i=!0},c.onmouseout=function(){i=!1};var a=function(e){if(o)var t=0;else t=f.activeElement;if(y.controls()&&(k||t==v||t==v.querySelector(".vjs-tech")||t==v.querySelector(".iframeblocker")||t==v.querySelector(".vjs-control-bar")||i)&&r){e=window.event||e;var n=Math.max(-1,Math.min(1,e.wheelDelta||-e.detail));e.preventDefault(),1==n?y.volume(y.volume()+d):-1==n&&y.volume(y.volume()-d)}},K=function(e,t){return m.playPauseKey(e,t)?1:m.rewindKey(e,t)?2:m.forwardKey(e,t)?3:m.volumeUpKey(e,t)?4:m.volumeDownKey(e,t)?5:m.muteKey(e,t)?6:m.fullscreenKey(e,t)?7:void 0};function q(e){return"function"==typeof n?n(e):n}return y.on("keydown",function(e){var t,n,r=e.which,o=e.preventDefault,u=y.duration();if(y.controls()){var l=f.activeElement;if(k||l==v||l==v.querySelector(".vjs-tech")||l==v.querySelector(".vjs-control-bar")||l==v.querySelector(".iframeblocker"))switch(K(e,y)){case 1:o(),k&&e.stopPropagation(),y.paused()?y.play():y.pause();break;case 2:t=!y.paused(),o(),t&&y.pause(),(n=y.currentTime()-q(e))<=0&&(n=0),y.currentTime(n),t&&y.play();break;case 3:t=!y.paused(),o(),t&&y.pause(),u<=(n=y.currentTime()+q(e))&&(n=t?u-.001:u),y.currentTime(n),t&&y.play();break;case 5:o(),w?(n=y.currentTime()-1,y.currentTime()<=1&&(n=0),y.currentTime(n)):y.volume(y.volume()-d);break;case 4:o(),w?(u<=(n=y.currentTime()+1)&&(n=u),y.currentTime(n)):y.volume(y.volume()+d);break;case 6:p&&y.muted(!y.muted());break;case 7:b&&(y.isFullscreen()?y.exitFullscreen():y.requestFullscreen());break;default:if((47<r&&r<59||95<r&&r<106)&&(S||!(e.metaKey||e.ctrlKey||e.altKey))&&h){var i=48;95<r&&(i=96);var c=r-i;o(),y.currentTime(y.duration()*c*.1)}for(var a in m.customKeys){var s=m.customKeys[a];s&&s.key&&s.handler&&s.key(e)&&(o(),s.handler(y,m,e))}}}}),y.on("dblclick",function(e){if(y.controls()){var t=e.relatedTarget||e.toElement||f.activeElement;t!=v&&t!=v.querySelector(".vjs-tech")&&t!=v.querySelector(".iframeblocker")||b&&(y.isFullscreen()?y.exitFullscreen():y.requestFullscreen())}}),y.on("mousewheel",a),y.on("DOMMouseScroll",a),this})});

52
assets/js/watch.js Normal file

@@ -0,0 +1,52 @@
function toggle_parent(target) {
body = target.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function toggle_comments(target) {
body = target.parentNode.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function swap_comments(source) {
if (source == "youtube") {
get_youtube_comments();
} else if (source == "reddit") {
get_reddit_comments();
}
}
String.prototype.supplant = function(o) {
return this.replace(/{([^{}]*)}/g, function(a, b) {
var r = o[b];
return typeof r === "string" || typeof r === "number" ? r : a;
});
};
function show_youtube_replies(target) {
body = target.parentNode.parentNode.children[1];
body.style.display = "";
target.innerHTML = "Hide replies";
target.setAttribute("onclick", "hide_youtube_replies(this)");
}
function hide_youtube_replies(target) {
body = target.parentNode.parentNode.children[1];
body.style.display = "none";
target.innerHTML = "Show replies";
target.setAttribute("onclick", "show_youtube_replies(this)");
}

@@ -1,5 +1,6 @@
crawl_threads: 1
channel_threads: 1
feed_threads: 1
video_threads: 1
db:
user: kemal
@@ -7,4 +8,6 @@ db:
host: localhost
port: 5432
dbname: invidious
full_refresh: false
full_refresh: false
https_only: false
geo_bypass: true

@@ -33,6 +33,6 @@ CREATE INDEX channel_videos_published_idx
-- DROP INDEX public.channel_videos_ucid_idx;
CREATE INDEX channel_videos_ucid_idx
ON public.channel_videos USING btree
ON public.channel_videos USING hash
(ucid COLLATE pg_catalog."default")
TABLESPACE pg_default;

@@ -20,6 +20,10 @@ CREATE TABLE public.videos
allowed_regions text[] COLLATE pg_catalog."default",
is_family_friendly boolean,
genre text COLLATE pg_catalog."default",
genre_url text COLLATE pg_catalog."default",
license text COLLATE pg_catalog."default",
sub_count_text text COLLATE pg_catalog."default",
author_thumbnail text COLLATE pg_catalog."default",
CONSTRAINT videos_pkey PRIMARY KEY (id)
)
WITH (

21
docker-compose.yml Normal file

@@ -0,0 +1,21 @@
version: '3'
services:
postgres:
build:
context: .
dockerfile: docker/Dockerfile.postgres
restart: unless-stopped
volumes:
- postgresdata:/var/lib/postgresql/data
invidious:
build:
context: .
dockerfile: docker/Dockerfile
restart: unless-stopped
ports:
- "3000:3000"
depends_on:
- postgres
volumes:
postgresdata:

15
docker/Dockerfile Normal file

@@ -0,0 +1,15 @@
FROM archlinux/base
RUN pacman -Sy --noconfirm shards crystal imagemagick librsvg \
which pkgconf gcc ttf-liberation
# base-devel contains many other basic packages, that are normally assumed to already exist on a clean arch system
ADD . /invidious
WORKDIR /invidious
RUN sed -i 's/host: localhost/host: postgres/' config/config.yml && \
shards && \
crystal build src/invidious.cr
CMD [ "/invidious/invidious" ]

@@ -0,0 +1,10 @@
FROM postgres:10
ENV POSTGRES_USER postgres
ADD ./setup.sh /setup.sh
ADD ./config/sql /config/sql
ADD ./docker/entrypoint.postgres.sh /entrypoint.sh
ENTRYPOINT [ "/entrypoint.sh" ]
CMD [ "postgres" ]

19
docker/entrypoint.postgres.sh Executable file

@@ -0,0 +1,19 @@
#!/usr/bin/env bash
CMD="$@"
if [ ! -f /var/lib/postgresql/data/setupFinished ]; then
echo "### first run - setting up invidious database"
/usr/local/bin/docker-entrypoint.sh postgres &
sleep 10
until runuser -l postgres -c 'pg_isready' 2>/dev/null; do
>&2 echo "### Postgres is unavailable - waiting"
sleep 5
done
>&2 echo "### importing table schemas"
su postgres -c "/setup.sh" && touch /var/lib/postgresql/data/setupFinished
echo "### invidious database setup finished"
exit
fi
echo "running postgres /usr/local/bin/docker-entrypoint.sh $CMD"
exec /usr/local/bin/docker-entrypoint.sh $CMD

@@ -1,7 +1,8 @@
#!/bin/bash
createdb invidious
createuser kemal
#createuser kemal
psql -c "CREATE USER kemal WITH PASSWORD 'kemal';"
psql invidious < config/sql/channels.sql
psql invidious < config/sql/videos.sql
psql invidious < config/sql/channel_videos.sql

@@ -1,5 +1,5 @@
name: invidious
version: 0.2.0
version: 0.10.0
authors:
- Omar Roth <omarroth@hotmail.com>
@@ -9,13 +9,13 @@ targets:
main: src/invidious.cr
dependencies:
kemal:
github: kemalcr/kemal
pg:
github: will/crystal-pg
detect_language:
github: detectlanguage/detectlanguage-crystal
kemal:
github: kemalcr/kemal
pg:
github: will/crystal-pg
crystal: 0.26.0
crystal: 0.26.1
license: AGPLv3

File diff suppressed because it is too large Load Diff

@@ -15,6 +15,11 @@ class ChannelVideo
ucid: String,
author: String,
})
# TODO: Add length_seconds to channel_video
def length_seconds
return 0
end
end
def get_channel(id, client, db, refresh = true, pull_all_videos = true)
@@ -48,6 +53,13 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
end
author = author.content
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with?(" - Topic") ||
{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? author
auto_generated = true
end
if !pull_all_videos
rss.xpath_nodes("//feed/entry").each do |entry|
video_id = entry.xpath_node("videoid").not_nil!.content
@@ -69,64 +81,174 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
updated = $4, ucid = $5, author = $6", video_array)
end
else
videos = [] of ChannelVideo
page = 1
ids = [] of String
loop do
url = produce_videos_url(ucid, page)
url = produce_channel_videos_url(ucid, page, auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
content_html = json["content_html"].as_s
if content_html.empty?
# If we don't get anything, move on
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
else
break
end
document = XML.parse_html(content_html)
document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])).each do |item|
anchor = item.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a))
if !anchor
raise "could not find anchor"
end
title = anchor.content.strip
video_id = anchor["href"].lchop("/watch?v=")
published = item.xpath_node(%q(.//div[@class="yt-lockup-meta"]/ul/li[1]))
if !published
# This happens on Youtube red videos, here we just skip them
next
end
published = published.content
published = decode_date(published)
videos << ChannelVideo.new(video_id, title, published, Time.now, ucid, author)
if auto_generated
videos = extract_videos(nodeset)
else
videos = extract_videos(nodeset, ucid)
videos.each { |video| video.ucid = ucid }
videos.each { |video| video.author = author }
end
if document.xpath_nodes(%q(//li[contains(@class, "channels-content-item")])).size < 30
count = nodeset.size
videos = videos.map { |video| ChannelVideo.new(video.id, video.title, video.published, Time.now, video.ucid, video.author) }
videos.each do |video|
ids << video.id
# FIXME: Red videos don't provide published date, so the best we can do is ignore them
if Time.now - video.published > 1.minute
db.exec("UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, video.published, video.ucid)
video_array = video.to_a
args = arg_array(video_array)
db.exec("INSERT INTO channel_videos VALUES (#{args}) ON CONFLICT (id) DO UPDATE SET title = $2, \
published = $3, updated = $4, ucid = $5, author = $6", video_array)
end
end
if count < 30
break
end
page += 1
end
video_ids = [] of String
videos.each do |video|
db.exec("UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, video.published, ucid)
video_ids << video.id
video_array = video.to_a
args = arg_array(video_array)
db.exec("INSERT INTO channel_videos VALUES (#{args}) ON CONFLICT (id) DO NOTHING", video_array)
end
# When a video is deleted from a channel, we find and remove it here
db.exec("DELETE FROM channel_videos * WHERE NOT id = ANY ('{#{video_ids.map { |a| %("#{a}") }.join(",")}}') AND ucid = $1", ucid)
db.exec("DELETE FROM channel_videos * WHERE NOT id = ANY ('{#{ids.map { |id| %("#{id}") }.join(",")}}') AND ucid = $1", ucid)
end
channel = InvidiousChannel.new(ucid, author, Time.now)
return channel
end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
if auto_generated
seed = Time.epoch(1525757349)
until seed >= Time.now
seed += 1.month
end
timestamp = seed - (page - 1).months
page = "#{timestamp.epoch}"
switch = "\x36"
else
page = "#{page}"
switch = "\x00"
end
meta = "\x12\x06videos"
meta += "\x30\x02"
meta += "\x38\x01"
meta += "\x60\x01"
meta += "\x6a\x00"
meta += "\xb8\x01\x00"
meta += "\x20#{switch}"
meta += "\x7a"
meta += page.size.to_u8.unsafe_chr
meta += page
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
return url
end
def get_about_info(ucid)
client = make_client(YT_URL)
about = client.get("/user/#{ucid}/about?disable_polymer=1&gl=US&hl=en")
about = XML.parse_html(about.body)
if !about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a))
about = client.get("/channel/#{ucid}/about?disable_polymer=1&gl=US&hl=en")
about = XML.parse_html(about.body)
end
if !about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a))
raise "User does not exist."
end
sub_count = about.xpath_node(%q(//span[contains(text(), "subscribers")]))
if sub_count
sub_count = sub_count.content.delete(", subscribers").to_i?
end
sub_count ||= 0
author = about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).not_nil!.content
ucid = about.xpath_node(%q(//link[@rel="canonical"])).not_nil!["href"].split("/")[-1]
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
auto_generated = false
if about.xpath_node(%q(//ul[@class="about-custom-links"]/li/a[@title="Auto-generated by YouTube"])) ||
about.xpath_node(%q(//span[@class="qualified-channel-title-badge"]/span[@title="Auto-generated by YouTube"]))
auto_generated = true
end
return {author, ucid, auto_generated, sub_count}
end
def get_60_videos(ucid, page, auto_generated)
count = 0
videos = [] of SearchVideo
client = make_client(YT_URL)
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if !json["load_more_widget_html"]?.try &.as_s.empty?
count += 30
end
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
return videos, count
end

@@ -17,12 +17,12 @@ class RedditComment
end
JSON.mapping({
author: String,
body_html: String,
replies: RedditThing | String,
score: Int32,
depth: Int32,
created: {
author: String,
body_html: String,
replies: RedditThing | String,
score: Int32,
depth: Int32,
created_utc: {
type: Time,
converter: RedditComment::TimeConverter,
},
@@ -100,23 +100,25 @@ def template_youtube_comments(comments)
END_HTML
end
author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).full_path}"
html += <<-END_HTML
<div class="pure-g">
<div class="pure-u-2-24">
<img style="width:90%; padding-right:1em; padding-top:1em;" src="#{child["authorThumbnails"][-1]["url"]}">
<div class="pure-u-4-24 pure-u-md-2-24">
<img style="width:90%; padding-right:1em; padding-top:1em;" src="#{author_thumbnail}">
</div>
<div class="pure-u-22-24">
<div class="pure-u-20-24 pure-u-md-22-24">
<p>
<a href="javascript:void(0)" onclick="toggle(this)">[ - ]</a>
<i class="icon ion-ios-thumbs-up"></i> #{child["likeCount"]}
<b><a href="#{child["authorUrl"]}">#{child["author"]}</a></b>
- #{recode_date(Time.epoch(child["published"].as_i64))} ago
</p>
<div>
#{child["content"]}
#{replies_html}
</div>
</div>
<b>
<a href="#{child["authorUrl"]}">#{child["author"]}</a>
</b>
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
#{recode_date(Time.epoch(child["published"].as_i64))} ago
|
<i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
</p>
#{replies_html}
</div>
</div>
END_HTML
end
@@ -127,7 +129,7 @@ def template_youtube_comments(comments)
<div class="pure-u-1">
<p>
<a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
onclick="get_youtube_replies(this)">Load more</a>
onclick="get_youtube_replies(this, true)">Load more</a>
</p>
</div>
</div>
@@ -154,10 +156,10 @@ def template_reddit_comments(root)
content = <<-END_HTML
<p>
<a href="javascript:void(0)" onclick="toggle(this)">[ - ]</a>
<i class="icon ion-ios-thumbs-up"></i> #{score}
<a href="javascript:void(0)" onclick="toggle_parent(this)">[ - ]</a>
<b><a href="https://www.reddit.com/user/#{author}">#{author}</a></b>
- #{recode_date(child.created)} ago
#{number_with_separator(score)} points
#{recode_date(child.created_utc)} ago
</p>
<div>
#{body_html}
@@ -190,37 +192,19 @@ def template_reddit_comments(root)
return html
end
def add_alt_links(html)
alt_links = [] of {String, String}
def replace_links(html)
html = XML.parse_html(html)
# This is painful but likely the only way to accomplish this in Crystal,
# as Crystigiri and others are not able to insert XML Nodes into a document.
# The goal here is to use as little regex as possible
html.scan(/<a[^>]*>([^<]+)<\/a>/) do |match|
anchor = XML.parse_html(match[0])
anchor = anchor.xpath_node("//a").not_nil!
html.xpath_nodes(%q(//a)).each do |anchor|
url = URI.parse(anchor["href"])
if ["www.youtube.com", "m.youtube.com"].includes?(url.host)
if {"www.youtube.com", "m.youtube.com", "youtu.be"}.includes?(url.host)
if url.path == "/redirect"
params = HTTP::Params.parse(url.query.not_nil!)
alt_url = params["q"]?
alt_url ||= "/"
anchor["href"] = params["q"]?
else
alt_url = url.full_path
anchor["href"] = url.full_path
end
alt_link = <<-END_HTML
<a href="#{alt_url}">
<i class="icon ion-ios-link"></i>
</a>
END_HTML
elsif url.host == "youtu.be"
alt_link = <<-END_HTML
<a href="/watch?v=#{url.path.try &.lchop("/")}&#{url.query}">
<i class="icon ion-ios-link"></i>
</a>
END_HTML
elsif url.to_s == "#"
begin
length_seconds = decode_length_seconds(anchor.content)
@@ -228,23 +212,12 @@ def add_alt_links(html)
length_seconds = decode_time(anchor.content)
end
alt_anchor = <<-END_HTML
<a href="javascript:void(0)" onclick="player.currentTime(#{length_seconds})">#{anchor.content}</a>
END_HTML
html = html.sub(anchor.to_s, alt_anchor)
next
else
alt_link = ""
anchor["href"] = "javascript:void(0)"
anchor["onclick"] = "player.currentTime(#{length_seconds})"
end
alt_links << {anchor.to_s, alt_link}
end
alt_links.each do |original, alternate|
html = html.sub(original, original + alternate)
end
html = html.to_xml(options: XML::SaveOptions::NO_DECL)
return html
end
@@ -267,5 +240,46 @@ def fill_links(html, scheme, host)
html = html.to_xml(options: XML::SaveOptions::NO_DECL)
end
html
return html
end
def content_to_comment_html(content)
comment_html = content.map do |run|
text = HTML.escape(run["text"].as_s)
if run["text"] == "\n"
text = "<br>"
end
if run["bold"]?
text = "<b>#{text}</b>"
end
if run["italics"]?
text = "<i>#{text}</i>"
end
if run["navigationEndpoint"]?
url = run["navigationEndpoint"]["urlEndpoint"]?.try &.["url"].as_s
if url
url = URI.parse(url)
if !url.host || {"m.youtube.com", "www.youtube.com", "youtu.be"}.includes? url.host
if url.path == "/redirect"
url = HTTP::Params.parse(url.query.not_nil!)["q"]
else
url = url.full_path
end
end
else
url = run["navigationEndpoint"]["commandMetadata"]?.try &.["webCommandMetadata"]["url"].as_s
end
text = %(<a href="#{url}">#{text}</a>)
end
text
end.join.rchop('\ufeff')
return comment_html
end

@@ -2,6 +2,7 @@ class Config
YAML.mapping({
crawl_threads: Int32,
channel_threads: Int32,
feed_threads: Int32,
video_threads: Int32,
db: NamedTuple(
user: String,
@@ -14,11 +15,12 @@ class Config
https_only: Bool?,
hmac_key: String?,
full_refresh: Bool,
geo_bypass: Bool,
})
end
class FilteredCompressHandler < Kemal::Handler
exclude ["/videoplayback", "/videoplayback/*", "/api/*"]
exclude ["/videoplayback", "/videoplayback/*", "/vi/*", "/api/*", "/ggpht/*"]
def call(env)
return call_next env if exclude_match? env
@@ -41,6 +43,17 @@ class FilteredCompressHandler < Kemal::Handler
end
end
class DenyFrame < Kemal::Handler
exclude ["/embed/*"]
def call(env)
return call_next env if exclude_match? env
env.response.headers["X-Frame-Options"] = "sameorigin"
call_next env
end
end
def rank_videos(db, n, filter, url)
top = [] of {Float64, String}
@@ -116,81 +129,6 @@ def login_req(login_form, f_req)
return HTTP::Params.encode(data)
end
def produce_videos_url(ucid, page = 1)
page = "#{page}"
meta = "\x12\x06videos \x00\x30\x02\x38\x01\x60\x01\x6a\x00\x7a"
meta += page.size.to_u8.unsafe_chr
meta += page
meta += "\xb8\x01\x00"
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end
def read_var_int(bytes)
numRead = 0
result = 0
read = bytes[numRead]
if bytes.size == 1
result = bytes[0].to_i32
else
while ((read & 0b10000000) != 0)
read = bytes[numRead].to_u64
value = (read & 0b01111111)
result |= (value << (7 * numRead))
numRead += 1
if numRead > 5
raise "VarInt is too big"
end
end
end
return result
end
def write_var_int(value : Int)
bytes = [] of UInt8
value = value.to_u32
if value == 0
bytes = [0_u8]
else
while value != 0
temp = (value & 0b01111111).to_u8
value = value >> 7
if value != 0
temp |= 0b10000000
end
bytes << temp
end
end
return bytes
end
def generate_captcha(key)
minute = Random::Secure.rand(12)
minute_angle = minute * 30
@@ -240,7 +178,7 @@ def generate_captcha(key)
return {challenge: challenge, token: token}
end
def html_to_description(description_html)
def html_to_content(description_html)
if !description_html
description = ""
description_html = ""
@@ -248,15 +186,26 @@ def html_to_description(description_html)
description_html = description_html.to_s
description = description_html.gsub("<br>", "\n")
description = description.gsub("<br/>", "\n")
description = XML.parse_html(description).content.strip("\n ")
if description.empty?
description = ""
else
description = XML.parse_html(description).content.strip("\n ")
end
end
return description, description_html
return description_html, description
end
def extract_videos(nodeset, ucid = nil)
videos = extract_items(nodeset, ucid)
videos.select! { |item| !item.is_a?(SearchChannel | SearchPlaylist) }
videos.map { |video| video.as(SearchVideo) }
end
def extract_items(nodeset, ucid = nil)
# TODO: Make this a 'common', so it makes more sense to be used here
videos = [] of SearchVideo
items = [] of SearchItem
nodeset.each do |node|
anchor = node.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a))
@@ -268,78 +217,175 @@ def extract_videos(nodeset, ucid = nil)
next
end
case node.xpath_node(%q(.//div)).not_nil!["class"]
when .includes? "yt-lockup-movie-vertical-poster"
next
when .includes? "yt-lockup-playlist"
next
when .includes? "yt-lockup-channel"
next
end
title = anchor.content.strip
id = anchor["href"].lchop("/watch?v=")
if ucid
anchor = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-byline")]/a))
if !anchor
author = ""
author_id = ""
else
anchor = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-byline")]/a))
if !anchor
next
end
author = anchor.content
author = anchor.content.strip
author_id = anchor["href"].split("/")[-1]
end
metadata = node.xpath_nodes(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li))
if metadata.empty?
anchor = node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
if !anchor
next
end
title = anchor.content.strip
id = anchor["href"]
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
description_html, description = html_to_content(description_html)
tile = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-tile")]))
if !tile
next
end
begin
published = decode_date(metadata[0].content.lchop("Streamed ").lchop("Starts "))
rescue ex
end
begin
published ||= Time.epoch(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
rescue ex
end
published ||= Time.now
case tile["class"]
when .includes? "yt-lockup-playlist"
plid = HTTP::Params.parse(URI.parse(id).query.not_nil!)["list"]
begin
view_count = metadata[0].content.rchop(" watching").delete(",").try &.to_i64?
rescue ex
end
begin
view_count ||= metadata.try &.[1].content.delete("No views,").try &.to_i64?
rescue ex
end
view_count ||= 0_i64
anchor = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-meta")]/a))
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
description, description_html = html_to_description(description_html)
if !anchor
anchor = node.xpath_node(%q(.//ul[@class="yt-lockup-meta-info"]/li/a))
end
length_seconds = node.xpath_node(%q(.//span[@class="video-time"]))
if length_seconds
length_seconds = decode_length_seconds(length_seconds.content)
video_count = node.xpath_node(%q(.//span[@class="formatted-video-count-label"]/b))
if video_count
video_count = video_count.content
if video_count == "50+"
author = "YouTube"
author_id = "UC-9-kyTW8ZkZNDHQJ6FgpwQ"
video_count = video_count.rchop("+")
end
video_count = video_count.to_i?
end
video_count ||= 0
videos = [] of SearchPlaylistVideo
node.xpath_nodes(%q(.//*[contains(@class, "yt-lockup-playlist-items")]/li)).each do |video|
anchor = video.xpath_node(%q(.//a))
if anchor
video_title = anchor.content.strip
id = HTTP::Params.parse(URI.parse(anchor["href"]).query.not_nil!)["v"]
end
video_title ||= ""
id ||= ""
anchor = video.xpath_node(%q(.//span/span))
if anchor
length_seconds = decode_length_seconds(anchor.content)
end
length_seconds ||= 0
videos << SearchPlaylistVideo.new(
video_title,
id,
length_seconds
)
end
items << SearchPlaylist.new(
title,
plid,
author,
author_id,
video_count,
videos
)
when .includes? "yt-lockup-channel"
author = title.strip
ucid = id.split("/")[-1]
author_thumbnail = node.xpath_node(%q(.//div/span/img)).try &.["data-thumb"]?
author_thumbnail ||= node.xpath_node(%q(.//div/span/img)).try &.["src"]
author_thumbnail ||= ""
subscriber_count = node.xpath_node(%q(.//span[contains(@class, "yt-subscriber-count")])).try &.["title"].delete(",").to_i?
subscriber_count ||= 0
video_count = node.xpath_node(%q(.//ul[@class="yt-lockup-meta-info"]/li)).try &.content.split(" ")[0].delete(",").to_i?
video_count ||= 0
items << SearchChannel.new(
author,
ucid,
author_thumbnail,
subscriber_count,
video_count,
description,
description_html
)
else
length_seconds = -1
end
id = id.lchop("/watch?v=")
videos << SearchVideo.new(
title,
id,
author,
author_id,
published,
view_count,
description,
description_html,
length_seconds,
)
metadata = node.xpath_nodes(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li))
begin
published = decode_date(metadata[0].content.lchop("Streamed ").lchop("Starts "))
rescue ex
end
begin
published ||= Time.epoch(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
rescue ex
end
published ||= Time.now
begin
view_count = metadata[0].content.rchop(" watching").delete(",").try &.to_i64?
rescue ex
end
begin
view_count ||= metadata.try &.[1].content.delete("No views,").try &.to_i64?
rescue ex
end
view_count ||= 0_i64
length_seconds = node.xpath_node(%q(.//span[@class="video-time"]))
if length_seconds
length_seconds = decode_length_seconds(length_seconds.content)
else
length_seconds = -1
end
live_now = node.xpath_node(%q(.//span[contains(@class, "yt-badge-live")]))
if live_now
live_now = true
else
live_now = false
end
if node.xpath_node(%q(.//span[text()="Premium"]))
premium = true
else
premium = false
end
if node.xpath_node(%q(.//span[contains(text(), "Get YouTube Premium")]))
paid = true
else
paid = false
end
items << SearchVideo.new(
title,
id,
author,
author_id,
published,
view_count,
description,
description_html,
length_seconds,
live_now,
paid,
premium
)
end
end
return videos
return items
end

@@ -89,6 +89,68 @@ class HTTPClient < HTTP::Client
end
def get_proxies(country_code = "US")
# return get_spys_proxies(country_code)
return get_nova_proxies(country_code)
end
def filter_proxies(proxies)
proxies.select! do |proxy|
begin
client = HTTPClient.new(YT_URL)
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
client.set_proxy(proxy)
client.head("/").status_code == 200
rescue ex
false
end
end
return proxies
end
def get_nova_proxies(country_code = "US")
country_code = country_code.downcase
client = HTTP::Client.new(URI.parse("https://www.proxynova.com"))
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
headers["Accept-Language"] = "Accept-Language: en-US,en;q=0.9"
headers["Host"] = "www.proxynova.com"
headers["Origin"] = "https://www.proxynova.com"
headers["Referer"] = "https://www.proxynova.com/proxy-server-list/country-#{country_code}/"
response = client.get("/proxy-server-list/country-#{country_code}/", headers)
document = XML.parse_html(response.body)
proxies = [] of {ip: String, port: Int32, score: Float64}
document.xpath_nodes(%q(//tr[@data-proxy-id])).each do |node|
ip = node.xpath_node(%q(.//td/abbr/script)).not_nil!.content
ip = ip.match(/document\.write\('(?<sub1>[^']+)'.substr\(8\) \+ '(?<sub2>[^']+)'/).not_nil!
ip = "#{ip["sub1"][8..-1]}#{ip["sub2"]}"
port = node.xpath_node(%q(.//td[2])).not_nil!.content.strip.to_i
anchor = node.xpath_node(%q(.//td[4]/div)).not_nil!
speed = anchor["data-value"].to_f
latency = anchor["title"].to_f
uptime = node.xpath_node(%q(.//td[5]/span)).not_nil!.content.rchop("%").to_f
# TODO: Tweak me
score = (uptime*4 + speed*2 + latency)/7
proxies << {ip: ip, port: port, score: score}
end
# proxies = proxies.sort_by { |proxy| proxy[:score] }.reverse
return proxies
end
def get_spys_proxies(country_code = "US")
client = HTTP::Client.new(URI.parse("http://spys.one"))
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
@@ -108,7 +170,15 @@ def get_proxies(country_code = "US")
"xf4" => "0",
"xf5" => "1",
}
response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
20.times do
if response.status_code == 200
break
end
response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
end
response = XML.parse_html(response.body)
mapping = response.xpath_node(%q(.//body/script)).not_nil!.content
@@ -157,6 +227,7 @@ def get_proxies(country_code = "US")
proxies << {ip: ip, port: port, score: score}
end
proxies = proxies.sort_by { |proxy| proxy[:score] }.reverse
return proxies
end

@@ -40,6 +40,23 @@ def decode_length_seconds(string)
return length_seconds
end
def recode_length_seconds(time)
if time <= 0
return ""
else
time = time.seconds
text = "#{time.minutes.to_s.rjust(2, '0')}:#{time.seconds.to_s.rjust(2, '0')}"
if time.hours > 0
text = "#{time.hours.to_s.rjust(2, '0')}:#{text}"
end
text = text.lchop('0')
return text
end
end
def decode_time(string)
time = string.try &.to_f?
@@ -138,6 +155,25 @@ def number_with_separator(number)
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
end
def number_to_short_text(number)
seperated = number_with_separator(number).gsub(",", ".").split("")
text = seperated.first(2).join
if seperated[2]? && seperated[2] != "."
text += seperated[2]
end
text = text.rchop(".0")
if number / 1000000 != 0
text += "M"
elsif number / 1000 != 0
text += "K"
end
text
end
def arg_array(array, start = 1)
if array.size == 0
args = "NULL"
@@ -169,7 +205,7 @@ def get_referer(env, fallback = "/")
referer = URI.parse(referer)
# "Unroll" nested referers
# "Unroll" nested referrers
loop do
if referer.query
params = HTTP::Params.parse(referer.query.not_nil!)
@@ -184,6 +220,7 @@ def get_referer(env, fallback = "/")
end
referer = referer.full_path
referer = "/" + referer.lstrip("\/\\")
if referer == env.request.path
referer = fallback
@@ -191,3 +228,55 @@ def get_referer(env, fallback = "/")
return referer
end
def read_var_int(bytes)
numRead = 0
result = 0
read = bytes[numRead]
if bytes.size == 1
result = bytes[0].to_i32
else
while ((read & 0b10000000) != 0)
read = bytes[numRead].to_u64
value = (read & 0b01111111)
result |= (value << (7 * numRead))
numRead += 1
if numRead > 5
raise "VarInt is too big"
end
end
end
return result
end
def write_var_int(value : Int)
bytes = [] of UInt8
value = value.to_u32
if value == 0
bytes = [0_u8]
else
while value != 0
temp = (value & 0b01111111).to_u8
value = value >> 7
if value != 0
temp |= 0b10000000
end
bytes << temp
end
end
return bytes
end
def sha256(text)
digest = OpenSSL::Digest.new("SHA256")
digest << text
return digest.hexdigest
end

@@ -3,13 +3,17 @@ def crawl_videos(db)
random = Random.new
search(random.base64(3)).as(Tuple)[1].each do |video|
ids << video.id
if video.is_a?(SearchVideo)
ids << video.id
end
end
loop do
if ids.empty?
search(random.base64(3)).as(Tuple)[1].each do |video|
ids << video.id
if video.is_a?(SearchVideo)
ids << video.id
end
end
end
@@ -100,6 +104,44 @@ def refresh_videos(db)
end
end
def refresh_feeds(db, max_threads = 1)
max_channel = Channel(Int32).new
spawn do
max_threads = max_channel.receive
active_threads = 0
active_channel = Channel(Bool).new
loop do
db.query("SELECT email FROM users") do |rs|
rs.each do
email = rs.read(String)
view_name = "subscriptions_#{sha256(email)[0..7]}"
if active_threads >= max_threads
if active_channel.receive
active_threads -= 1
end
end
active_threads += 1
spawn do
begin
db.exec("REFRESH MATERIALIZED VIEW #{view_name}")
rescue ex
STDOUT << "REFRESH " << email << " : " << ex.message << "\n"
end
active_channel.send(true)
end
end
end
end
end
max_channel.send(max_threads)
end
def pull_top_videos(config, db)
if config.dl_api_key
DetectLanguage.configure do |dl_config|
@@ -141,8 +183,7 @@ end
def update_decrypt_function
loop do
begin
client = make_client(YT_URL)
decrypt_function = fetch_decrypt_function(client)
decrypt_function = fetch_decrypt_function
rescue ex
next
end
@@ -151,3 +192,16 @@ def update_decrypt_function
Fiber.yield
end
end
def find_working_proxies(regions)
loop do
regions.each do |region|
proxies = get_proxies(region).first(20)
proxies = proxies.map { |proxy| {ip: proxy[:ip], port: proxy[:port]} }
# proxies = filter_proxies(proxies)
yield region, proxies
Fiber.yield
end
end
end

114
src/invidious/mixes.cr Normal file

@@ -0,0 +1,114 @@
class MixVideo
add_mapping({
title: String,
id: String,
author: String,
ucid: String,
length_seconds: Int32,
index: Int32,
mixes: Array(String),
})
end
class Mix
add_mapping({
title: String,
id: String,
videos: Array(MixVideo),
})
end
def fetch_mix(rdid, video_id, cookies = nil)
client = make_client(YT_URL)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"
if cookies
headers = cookies.add_request_headers(headers)
end
response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en", headers)
yt_data = response.body.match(/window\["ytInitialData"\] = (?<data>.*);/)
if yt_data
yt_data = JSON.parse(yt_data["data"].rchop(";"))
else
raise "Could not create mix."
end
if !yt_data["contents"]["twoColumnWatchNextResults"]["playlist"]?
raise "Could not create mix."
end
playlist = yt_data["contents"]["twoColumnWatchNextResults"]["playlist"]["playlist"]
mix_title = playlist["title"].as_s
contents = playlist["contents"].as_a
until contents[0]["playlistPanelVideoRenderer"]["videoId"].as_s == video_id
contents.shift
end
videos = [] of MixVideo
contents.each do |item|
item = item["playlistPanelVideoRenderer"]
id = item["videoId"].as_s
title = item["title"]["simpleText"].as_s
author = item["longBylineText"]["runs"][0]["text"].as_s
ucid = item["longBylineText"]["runs"][0]["navigationEndpoint"]["browseEndpoint"]["browseId"].as_s
length_seconds = decode_length_seconds(item["lengthText"]["simpleText"].as_s)
index = item["navigationEndpoint"]["watchEndpoint"]["index"].as_i
videos << MixVideo.new(
title,
id,
author,
ucid,
length_seconds,
index,
[rdid]
)
end
if !cookies
next_page = fetch_mix(rdid, videos[-1].id, response.cookies)
videos += next_page.videos
end
videos.uniq! { |video| video.id }
videos = videos.first(50)
return Mix.new(mix_title, rdid, videos)
end
def template_mix(mix)
html = <<-END_HTML
<h3>
<a href="/mix?list=#{mix["mixId"]}">
#{mix["title"]}
</a>
</h3>
<div class="pure-menu pure-menu-scrollable playlist-restricted">
<ol class="pure-menu-list">
END_HTML
mix["videos"].as_a.each do |video|
html += <<-END_HTML
<li class="pure-menu-item">
<a href="/watch?v=#{video["videoId"]}&list=#{mix["mixId"]}">
<img style="width:100%;" src="/vi/#{video["videoId"]}/mqdefault.jpg">
<p style="width:100%">#{video["title"]}</p>
<p>
<b style="width: 100%">#{video["author"]}</b>
</p>
</a>
</li>
END_HTML
end
html += <<-END_HTML
</ol>
</div>
<hr>
END_HTML
html
end

@@ -1,16 +1,3 @@
class Playlist
add_mapping({
title: String,
id: String,
author: String,
ucid: String,
description: String,
video_count: Int32,
views: Int64,
updated: Time,
})
end
class PlaylistVideo
add_mapping({
title: String,
@@ -24,58 +11,109 @@ class PlaylistVideo
})
end
def extract_playlist(plid, page)
index = (page - 1) * 100
url = produce_playlist_url(plid, index)
class Playlist
add_mapping({
title: String,
id: String,
author: String,
author_thumbnail: String,
ucid: String,
description: String,
description_html: String,
video_count: Int32,
views: Int64,
updated: Time,
})
end
def fetch_playlist_videos(plid, page, video_count, continuation = nil)
client = make_client(YT_URL)
response = client.get(url)
response = JSON.parse(response.body)
if !response["content_html"]? || response["content_html"].as_s.empty?
raise "Playlist does not exist"
if continuation
html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
html = XML.parse_html(html.body)
index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i?
if index
index -= 1
end
index ||= 0
else
index = (page - 1) * 100
end
if video_count > 100
url = produce_playlist_url(plid, index)
response = client.get(url)
response = JSON.parse(response.body)
if !response["content_html"]? || response["content_html"].as_s.empty?
raise "Playlist is empty"
end
document = XML.parse_html(response["content_html"].as_s)
nodeset = document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")]))
videos = extract_playlist(plid, nodeset, index)
else
# Playlist has less than one page of videos, so subsequent pages will be empty
if page > 1
videos = [] of PlaylistVideo
else
# Extract first page of videos
response = client.get("/playlist?list=#{plid}&gl=US&hl=en&disable_polymer=1")
document = XML.parse_html(response.body)
nodeset = document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")]))
videos = extract_playlist(plid, nodeset, 0)
if continuation
until videos[0].id == continuation
videos.shift
end
end
end
end
return videos
end
def extract_playlist(plid, nodeset, index)
videos = [] of PlaylistVideo
document = XML.parse_html(response["content_html"].as_s)
anchor = document.xpath_node(%q(//div[@class="pl-video-owner"]/a))
if anchor
document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")])).each_with_index do |video, offset|
anchor = video.xpath_node(%q(.//td[@class="pl-video-title"]))
if !anchor
next
end
title = anchor.xpath_node(%q(.//a)).not_nil!.content.strip(" \n")
id = anchor.xpath_node(%q(.//a)).not_nil!["href"].lchop("/watch?v=")[0, 11]
anchor = anchor.xpath_node(%q(.//div[@class="pl-video-owner"]/a))
if anchor
author = anchor.content
ucid = anchor["href"].split("/")[2]
else
author = ""
ucid = ""
end
anchor = video.xpath_node(%q(.//td[@class="pl-video-time"]/div/div[1]))
if anchor && !anchor.content.empty?
length_seconds = decode_length_seconds(anchor.content)
else
length_seconds = 0
end
videos << PlaylistVideo.new(
title,
id,
author,
ucid,
length_seconds,
Time.now,
[plid],
index + offset,
)
nodeset.each_with_index do |video, offset|
anchor = video.xpath_node(%q(.//td[@class="pl-video-title"]))
if !anchor
next
end
title = anchor.xpath_node(%q(.//a)).not_nil!.content.strip(" \n")
id = anchor.xpath_node(%q(.//a)).not_nil!["href"].lchop("/watch?v=")[0, 11]
anchor = anchor.xpath_node(%q(.//div[@class="pl-video-owner"]/a))
if anchor
author = anchor.content
ucid = anchor["href"].split("/")[2]
else
author = ""
ucid = ""
end
anchor = video.xpath_node(%q(.//td[@class="pl-video-time"]/div/div[1]))
if anchor && !anchor.content.empty?
length_seconds = decode_length_seconds(anchor.content)
else
length_seconds = 0
end
videos << PlaylistVideo.new(
title,
id,
author,
ucid,
length_seconds,
Time.now,
[plid],
index + offset,
)
end
return videos
@@ -87,60 +125,78 @@ def produce_playlist_url(id, index)
end
ucid = "VL" + id
continuation = [0x08_u8] + write_var_int(index)
slice = continuation.to_unsafe.to_slice(continuation.size)
slice = Base64.urlsafe_encode(slice, false)
meta = [0x08_u8] + write_var_int(index)
meta = Slice.new(meta.to_unsafe, meta.size)
meta = Base64.urlsafe_encode(meta, false)
meta = "PT:#{meta}"
# Inner Base64
continuation = "PT:" + slice
continuation = [0x7a_u8, continuation.bytes.size.to_u8] + continuation.bytes
slice = continuation.to_unsafe.to_slice(continuation.size)
slice = Base64.urlsafe_encode(slice)
slice = URI.escape(slice)
wrapped = "\x7a"
wrapped += meta.bytes.size.unsafe_chr
wrapped += meta
# Outer Base64
continuation = [0x1a.to_u8, slice.bytes.size.to_u8] + slice.bytes
continuation = ucid.bytes + continuation
continuation = [0x12_u8, ucid.size.to_u8] + continuation
continuation = [0xe2_u8, 0xa9_u8, 0x85_u8, 0xb2_u8, 2_u8, continuation.size.to_u8] + continuation
wrapped = Base64.urlsafe_encode(wrapped)
meta = URI.escape(wrapped)
# Wrap bytes
slice = continuation.to_unsafe.to_slice(continuation.size)
slice = Base64.urlsafe_encode(slice)
slice = URI.escape(slice)
continuation = slice
continuation = "\x12"
continuation += ucid.size.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.bytes.size.unsafe_chr
continuation += meta
url = "/browse_ajax?action_continuation=1&continuation=#{continuation}"
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end
def fetch_playlist(plid)
client = make_client(YT_URL)
response = client.get("/playlist?list=#{plid}&disable_polymer=1")
document = XML.parse_html(response.body)
title = document.xpath_node(%q(//h1[@class="pl-header-title"])).not_nil!.content
title = title.strip(" \n")
description = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description ||= document.xpath_node(%q(//span[@class="pl-header-description-text"]))
if description
description = description.to_xml.strip(" \n")
description = description.split("<button ")[0]
description = fill_links(description, "https", "www.youtube.com")
description = add_alt_links(description)
else
description = ""
if plid.starts_with? "UC"
plid = "UU#{plid.lchop("UC")}"
end
response = client.get("/playlist?list=#{plid}&hl=en&disable_polymer=1")
if response.status_code != 200
raise "Invalid playlist."
end
body = response.body.gsub(<<-END_BUTTON
<button class="yt-uix-button yt-uix-button-size-default yt-uix-button-link yt-uix-expander-head playlist-description-expander yt-uix-inlineedit-ignore-edit" type="button" onclick=";return false;"><span class="yt-uix-button-content"> less <img alt="" src="/yts/img/pixel-vfl3z5WfW.gif">
</span></button>
END_BUTTON
, "")
document = XML.parse_html(body)
title = document.xpath_node(%q(//h1[@class="pl-header-title"]))
if !title
raise "Playlist does not exist."
end
title = title.content.strip(" \n")
description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description_html ||= document.xpath_node(%q(//span[@class="pl-header-description-text"]))
description_html, description = html_to_content(description_html)
anchor = document.xpath_node(%q(//ul[@class="pl-header-details"])).not_nil!
author = anchor.xpath_node(%q(.//li[1]/a)).not_nil!.content
author_thumbnail = document.xpath_node(%q(//img[@class="channel-header-profile-image"])).try &.["src"]
author_thumbnail ||= ""
ucid = anchor.xpath_node(%q(.//li[1]/a)).not_nil!["href"].split("/")[2]
video_count = anchor.xpath_node(%q(.//li[2])).not_nil!.content.delete("videos, ").to_i
views = anchor.xpath_node(%q(.//li[3])).not_nil!.content.delete("views, ").to_i64
views = anchor.xpath_node(%q(.//li[3])).not_nil!.content.delete("No views, ")
if views.empty?
views = 0_i64
else
views = views.to_i64
end
updated = anchor.xpath_node(%q(.//li[4])).not_nil!.content.lchop("Last updated on ").lchop("Updated ")
updated = decode_date(updated)
@@ -149,8 +205,10 @@ def fetch_playlist(plid)
title,
plid,
author,
author_thumbnail,
ucid,
description,
description_html,
video_count,
views,
updated
@@ -158,3 +216,37 @@ def fetch_playlist(plid)
return playlist
end
def template_playlist(playlist)
html = <<-END_HTML
<h3>
<a href="/playlist?list=#{playlist["playlistId"]}">
#{playlist["title"]}
</a>
</h3>
<div class="pure-menu pure-menu-scrollable playlist-restricted">
<ol class="pure-menu-list">
END_HTML
playlist["videos"].as_a.each do |video|
html += <<-END_HTML
<li class="pure-menu-item">
<a href="/watch?v=#{video["videoId"]}&list=#{playlist["playlistId"]}">
<img style="width:100%;" src="/vi/#{video["videoId"]}/mqdefault.jpg">
<p style="width:100%">#{video["title"]}</p>
<p>
<b style="width: 100%">#{video["author"]}</b>
</p>
</a>
</li>
END_HTML
end
html += <<-END_HTML
</ol>
</div>
<hr>
END_HTML
html
end

@@ -9,38 +9,111 @@ class SearchVideo
description: String,
description_html: String,
length_seconds: Int32,
live_now: Bool,
paid: Bool,
premium: Bool,
})
end
def search(query, page = 1, search_params = build_search_params(content_type: "video"))
class SearchPlaylistVideo
add_mapping({
title: String,
id: String,
length_seconds: Int32,
})
end
class SearchPlaylist
add_mapping({
title: String,
id: String,
author: String,
ucid: String,
video_count: Int32,
videos: Array(SearchPlaylistVideo),
})
end
class SearchChannel
add_mapping({
author: String,
ucid: String,
author_thumbnail: String,
subscriber_count: Int32,
video_count: Int32,
description: String,
description_html: String,
})
end
alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist
def channel_search(query, page, channel)
client = make_client(YT_URL)
if query.empty?
return {0, [] of SearchVideo}
response = client.get("/user/#{channel}")
document = XML.parse_html(response.body)
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
if !canonical
response = client.get("/channel/#{channel}")
document = XML.parse_html(response.body)
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
end
html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=#{search_params}&disable_polymer=1").body
if !canonical
return 0, [] of SearchItem
end
ucid = canonical["href"].split("/")[-1]
url = produce_channel_search_url(ucid, query, page)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
count = nodeset.size
items = extract_items(nodeset)
else
count = 0
items = [] of SearchItem
end
return count, items
end
def search(query, page = 1, search_params = produce_search_params(content_type: "all"))
client = make_client(YT_URL)
if query.empty?
return {0, [] of SearchItem}
end
html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=#{search_params}&hl=en&disable_polymer=1").body
if html.empty?
return {0, [] of SearchVideo}
return {0, [] of SearchItem}
end
html = XML.parse_html(html)
nodeset = html.xpath_nodes(%q(//ol[@class="item-section"]/li))
videos = extract_videos(nodeset)
items = extract_items(nodeset)
return {nodeset.size, videos}
return {nodeset.size, items}
end
def build_search_params(sort : String = "relevance", date : String = "", content_type : String = "",
duration : String = "", features : Array(String) = [] of String)
def produce_search_params(sort : String = "relevance", date : String = "", content_type : String = "",
duration : String = "", features : Array(String) = [] of String)
head = "\x08"
head += case sort
when "relevance"
"\x00"
when "rating"
"\x01"
when "upload_date"
when "upload_date", "date"
"\x02"
when "view_count"
when "view_count", "views"
"\x03"
else
raise "No sort #{sort}"
@@ -73,8 +146,10 @@ def build_search_params(sort : String = "relevance", date : String = "", content
"\x10\x04"
when "show"
"\x10\x05"
else
when "all"
""
else
"\x10\x01"
end
body += case duration
@@ -92,11 +167,11 @@ def build_search_params(sort : String = "relevance", date : String = "", content
"\x20\x01"
when "subtitles"
"\x28\x01"
when "creative_commons"
when "creative_commons", "cc"
"\x30\x01"
when "3d"
"\x38\x01"
when "live"
when "live", "livestream"
"\x40\x01"
when "purchased"
"\x48\x01"
@@ -114,7 +189,7 @@ def build_search_params(sort : String = "relevance", date : String = "", content
end
if body.size > 0
token = head + "\x12" + body.size.to_u8.unsafe_chr + body
token = head + "\x12" + body.size.unsafe_chr + body
else
token = head
end
@@ -124,3 +199,40 @@ def build_search_params(sort : String = "relevance", date : String = "", content
return token
end
def produce_channel_search_url(ucid, query, page)
page = "#{page}"
meta = "\x12\x06search"
meta += "\x30\x02"
meta += "\x38\x01"
meta += "\x60\x01"
meta += "\x6a\x00"
meta += "\xb8\x01\x00"
meta += "\x7a"
meta += page.size.unsafe_chr
meta += page
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.unsafe_chr
continuation += meta
continuation += "\x5a"
continuation += query.size.unsafe_chr
continuation += query
continuation = continuation.size.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end

@@ -1,25 +1,25 @@
def fetch_decrypt_function(client, id = "CvFH_6DNRCY")
def fetch_decrypt_function(id = "CvFH_6DNRCY")
client = make_client(YT_URL)
document = client.get("/watch?v=#{id}").body
url = document.match(/src="(?<url>\/yts\/jsbin\/player-.{9}\/en_US\/base.js)"/).not_nil!["url"]
player = client.get(url).body
function_name = player.match(/"signature",(?<name>[a-zA-Z0-9]{2})\(/).not_nil!["name"]
function_body = player.match(/#{function_name}=function\(a\){(?<body>[^}]+)}/).not_nil!["body"]
function_name = player.match(/^(?<name>[^=]+)=function\(a\){a=a\.split\(""\)/m).not_nil!["name"]
function_body = player.match(/^#{function_name}=function\(a\){(?<body>[^}]+)}/m).not_nil!["body"]
function_body = function_body.split(";")[1..-2]
var_name = function_body[0][0, 2]
var_body = player.delete("\n").match(/var #{var_name}={(?<body>(.*?))};/).not_nil!["body"]
operations = {} of String => String
matches = player.delete("\n").match(/var #{var_name}={(?<op1>[a-zA-Z0-9]{2}:[^}]+}),(?<op2>[a-zA-Z0-9]{2}:[^}]+}),(?<op3>[a-zA-Z0-9]{2}:[^}]+})};/).not_nil!
3.times do |i|
operation = matches["op#{i + 1}"]
op_name = operation[0, 2]
var_body.split("},").each do |operation|
op_name = operation.match(/^[^:]+/).not_nil![0]
op_body = operation.match(/\{[^}]+/).not_nil![0]
op_body = operation.match(/\{[^}]+\}/).not_nil![0]
case op_body
when "{a.reverse()}"
when "{a.reverse()"
operations[op_name] = "a"
when "{a.splice(0,b)}"
when "{a.splice(0,b)"
operations[op_name] = "b"
else
operations[op_name] = "c"
@@ -28,11 +28,10 @@ def fetch_decrypt_function(client, id = "CvFH_6DNRCY")
decrypt_function = [] of {name: String, value: Int32}
function_body.each do |function|
function = function.lchop(var_name + ".")
op_name = function[0, 2]
function = function.lchop(var_name).delete("[].")
function = function.lchop(op_name + "(a,")
value = function.rchop(")").to_i
op_name = function.match(/[^\(]+/).not_nil![0]
value = function.match(/\(a,(?<value>[\d]+)\)/).not_nil!["value"].to_i
decrypt_function << {name: operations[op_name], value: value}
end

@@ -27,19 +27,20 @@ class User
end
DEFAULT_USER_PREFERENCES = Preferences.from_json({
"video_loop" => false,
"autoplay" => false,
"speed" => 1.0,
"quality" => "hd720",
"volume" => 100,
"comments" => ["youtube", ""],
"captions" => ["", "", ""],
"dark_mode" => false,
"thin_mode " => false,
"max_results" => 40,
"sort" => "published",
"latest_only" => false,
"unseen_only" => false,
"video_loop" => false,
"autoplay" => false,
"speed" => 1.0,
"quality" => "hd720",
"volume" => 100,
"comments" => ["youtube", ""],
"captions" => ["", "", ""],
"related_videos" => true,
"dark_mode" => false,
"thin_mode" => false,
"max_results" => 40,
"sort" => "published",
"latest_only" => false,
"unseen_only" => false,
}.to_json)
class Preferences
@@ -85,6 +86,10 @@ class Preferences
type: Bool,
default: false,
},
related_videos: {
type: Bool,
default: true,
},
dark_mode: Bool,
thin_mode: {
type: Bool,
@@ -114,6 +119,15 @@ def get_user(sid, client, headers, db, refresh = true)
db.exec("INSERT INTO users VALUES (#{args}) \
ON CONFLICT (email) DO UPDATE SET id = users.id || $1, updated = $2, subscriptions = $4", user_array)
begin
view_name = "subscriptions_#{sha256(user.email)[0..7]}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE \
ucid = ANY ((SELECT subscriptions FROM users WHERE email = '#{user.email}')::text[]) \
ORDER BY published DESC;")
rescue ex
end
end
else
user = fetch_user(sid, client, headers, db)
@@ -124,6 +138,15 @@ def get_user(sid, client, headers, db, refresh = true)
db.exec("INSERT INTO users VALUES (#{args}) \
ON CONFLICT (email) DO UPDATE SET id = users.id || $1, updated = $2, subscriptions = $4", user_array)
begin
view_name = "subscriptions_#{sha256(user.email)[0..7]}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE \
ucid = ANY ((SELECT subscriptions FROM users WHERE email = '#{user.email}')::text[]) \
ORDER BY published DESC;")
rescue ex
end
end
return user
@@ -135,7 +158,7 @@ def fetch_user(sid, client, headers, db)
channels = [] of String
feed.xpath_nodes(%q(//ul[@id="guide-channels"]/li/a)).each do |channel|
if !["Popular on YouTube", "Music", "Sports", "Gaming"].includes? channel["title"]
if !{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? channel["title"]
channel_id = channel["href"].lstrip("/channel/")
begin

@@ -109,17 +109,43 @@ CAPTION_LANGUAGES = {
}
REGIONS = {"AD", "AE", "AF", "AG", "AI", "AL", "AM", "AO", "AQ", "AR", "AS", "AT", "AU", "AW", "AX", "AZ", "BA", "BB", "BD", "BE", "BF", "BG", "BH", "BI", "BJ", "BL", "BM", "BN", "BO", "BQ", "BR", "BS", "BT", "BV", "BW", "BY", "BZ", "CA", "CC", "CD", "CF", "CG", "CH", "CI", "CK", "CL", "CM", "CN", "CO", "CR", "CU", "CV", "CW", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM", "DO", "DZ", "EC", "EE", "EG", "EH", "ER", "ES", "ET", "FI", "FJ", "FK", "FM", "FO", "FR", "GA", "GB", "GD", "GE", "GF", "GG", "GH", "GI", "GL", "GM", "GN", "GP", "GQ", "GR", "GS", "GT", "GU", "GW", "GY", "HK", "HM", "HN", "HR", "HT", "HU", "ID", "IE", "IL", "IM", "IN", "IO", "IQ", "IR", "IS", "IT", "JE", "JM", "JO", "JP", "KE", "KG", "KH", "KI", "KM", "KN", "KP", "KR", "KW", "KY", "KZ", "LA", "LB", "LC", "LI", "LK", "LR", "LS", "LT", "LU", "LV", "LY", "MA", "MC", "MD", "ME", "MF", "MG", "MH", "MK", "ML", "MM", "MN", "MO", "MP", "MQ", "MR", "MS", "MT", "MU", "MV", "MW", "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI", "NL", "NO", "NP", "NR", "NU", "NZ", "OM", "PA", "PE", "PF", "PG", "PH", "PK", "PL", "PM", "PN", "PR", "PS", "PT", "PW", "PY", "QA", "RE", "RO", "RS", "RU", "RW", "SA", "SB", "SC", "SD", "SE", "SG", "SH", "SI", "SJ", "SK", "SL", "SM", "SN", "SO", "SR", "SS", "ST", "SV", "SX", "SY", "SZ", "TC", "TD", "TF", "TG", "TH", "TJ", "TK", "TL", "TM", "TN", "TO", "TR", "TT", "TV", "TW", "TZ", "UA", "UG", "UM", "US", "UY", "UZ", "VA", "VC", "VE", "VG", "VI", "VN", "VU", "WF", "WS", "YE", "YT", "ZA", "ZM", "ZW"}
BYPASS_REGIONS = {"CA", "DE", "FR", "JP", "RU", "UK"}
BYPASS_REGIONS = {
"GB",
"DE",
"FR",
"IN",
"CN",
"RU",
"CA",
"JP",
"IT",
"TH",
"ES",
"AE",
"KR",
"IR",
"BR",
"PK",
"ID",
"BD",
"MX",
"PH",
"EG",
"VN",
"CD",
"TR",
}
VIDEO_THUMBNAILS = {
{name: "default", url: "default", height: 90, width: 120},
{name: "medium", url: "mqdefault", height: 180, width: 320},
{name: "high", url: "hqdefault", height: 360, width: 480},
{name: "sddefault", url: "sddefault", height: 480, width: 640},
{name: "maxresdefault", url: "maxresdefault", height: 1280, width: 720},
{name: "start", url: "1", height: 90, width: 120},
{name: "middle", url: "2", height: 90, width: 120},
{name: "end", url: "3", height: 90, width: 120},
{name: "maxres", host: "invidio.us", url: "maxres", height: 720, width: 1280},
{name: "maxresdefault", host: "i.ytimg.com", url: "maxresdefault", height: 720, width: 1280},
{name: "sddefault", host: "i.ytimg.com", url: "sddefault", height: 480, width: 640},
{name: "high", host: "i.ytimg.com", url: "hqdefault", height: 360, width: 480},
{name: "medium", host: "i.ytimg.com", url: "mqdefault", height: 180, width: 320},
{name: "default", host: "i.ytimg.com", url: "default", height: 90, width: 120},
{name: "start", host: "i.ytimg.com", url: "1", height: 90, width: 120},
{name: "middle", host: "i.ytimg.com", url: "2", height: 90, width: 120},
{name: "end", host: "i.ytimg.com", url: "3", height: 90, width: 120},
}
# See https://github.com/rg3/youtube-dl/blob/master/youtube_dl/extractor/youtube.py#L380-#L476
@@ -247,6 +273,12 @@ class Video
streams.each { |s| s.add("label", "#{s["quality"]} - #{s["type"].split(";")[0].split("/")[1]}") }
streams = streams.uniq { |s| s["label"] }
if self.info["region"]?
streams.each do |fmt|
fmt["url"] += "&region=" + self.info["region"]
end
end
if streams[0]? && streams[0]["s"]?
streams.each do |fmt|
fmt["url"] += "&signature=" + decrypt_signature(fmt["s"], decrypt_function)
@@ -258,10 +290,88 @@ class Video
def adaptive_fmts(decrypt_function)
adaptive_fmts = [] of HTTP::Params
if self.info.has_key?("adaptive_fmts")
self.info["adaptive_fmts"].split(",") do |string|
adaptive_fmts << HTTP::Params.parse(string)
end
elsif self.info.has_key?("dashmpd")
client = make_client(YT_URL)
response = client.get(self.info["dashmpd"])
document = XML.parse_html(response.body)
document.xpath_nodes(%q(//adaptationset)).each do |adaptation_set|
mime_type = adaptation_set["mimetype"]
document.xpath_nodes(%q(.//representation)).each do |representation|
codecs = representation["codecs"]
itag = representation["id"]
bandwidth = representation["bandwidth"]
url = representation.xpath_node(%q(.//baseurl)).not_nil!.content
clen = url.match(/clen\/(?<clen>\d+)/).try &.["clen"]
clen ||= "0"
lmt = url.match(/lmt\/(?<lmt>\d+)/).try &.["lmt"]
lmt ||= "#{((Time.now + 1.hour).epoch_f.to_f64 * 1000000).to_i64}"
segment_list = representation.xpath_node(%q(.//segmentlist)).not_nil!
init = segment_list.xpath_node(%q(.//initialization))
# TODO: Replace with sane defaults when byteranges are absent
if init && !init["sourceurl"].starts_with? "sq"
init = init["sourceurl"].lchop("range/")
index = segment_list.xpath_node(%q(.//segmenturl)).not_nil!["media"]
index = index.lchop("range/")
index = "#{init.split("-")[1].to_i + 1}-#{index.split("-")[0].to_i}"
else
init = "0-0"
index = "1-1"
end
params = {
"type" => ["#{mime_type}; codecs=\"#{codecs}\""],
"url" => [url],
"projection_type" => ["1"],
"index" => [index],
"init" => [init],
"xtags" => [] of String,
"lmt" => [lmt],
"clen" => [clen],
"bitrate" => [bandwidth],
"itag" => [itag],
}
if mime_type == "video/mp4"
width = representation["width"]?
height = representation["height"]?
fps = representation["framerate"]?
metadata = itag_to_metadata?(itag)
if metadata
width ||= metadata["width"]?
height ||= metadata["height"]?
fps ||= metadata["fps"]?
end
if width && height
params["size"] = ["#{width}x#{height}"]
end
if width
params["quality_label"] = ["#{height}p"]
end
end
adaptive_fmts << HTTP::Params.new(params)
end
end
end
if self.info["region"]?
adaptive_fmts.each do |fmt|
fmt["url"] += "&region=" + self.info["region"]
end
end
if adaptive_fmts[0]? && adaptive_fmts[0]["s"]?
@@ -297,6 +407,23 @@ class Video
return @player_json.not_nil!
end
def paid
reason = self.player_response["playabilityStatus"]?.try &.["reason"]?
if reason == "This video requires payment to watch."
paid = true
else
paid = false
end
return paid
end
def premium
premium = self.player_response.to_s.includes? "Get YouTube without the ads."
return premium
end
def captions
captions = [] of Caption
if player_response["captions"]?
@@ -324,6 +451,10 @@ class Video
return description
end
def length_seconds
return self.info["length_seconds"].to_i
end
add_mapping({
id: String,
info: {
@@ -345,6 +476,13 @@ class Video
allowed_regions: Array(String),
is_family_friendly: Bool,
genre: String,
genre_url: String,
license: String,
sub_count_text: String,
author_thumbnail: {
type: String,
default: "",
},
})
end
@@ -362,19 +500,24 @@ class CaptionName
)
end
def get_video(id, db, refresh = true)
class VideoRedirect < Exception
end
def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32}), refresh = true)
if db.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", id, as: Bool)
video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video)
# If record was last updated over an hour ago, refresh (expire param in response lasts for 6 hours)
if refresh && Time.now - video.updated > 1.hour
# If record was last updated over 10 minutes ago, refresh (expire param in response lasts for 6 hours)
if refresh && Time.now - video.updated > 10.minutes
begin
video = fetch_video(id)
video = fetch_video(id, proxies)
video_array = video.to_a
args = arg_array(video_array[1..-1], 2)
db.exec("UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,\
published,description,language,author,ucid, allowed_regions, is_family_friendly, genre)\
published,description,language,author,ucid,allowed_regions,is_family_friendly,\
genre,genre_url,license,sub_count_text,author_thumbnail)\
= (#{args}) WHERE id = $1", video_array)
rescue ex
db.exec("DELETE FROM videos * WHERE id = $1", id)
@@ -382,8 +525,9 @@ def get_video(id, db, refresh = true)
end
end
else
video = fetch_video(id)
video = fetch_video(id, proxies)
video_array = video.to_a
args = arg_array(video_array)
db.exec("INSERT INTO videos VALUES (#{args}) ON CONFLICT (id) DO NOTHING", video_array)
@@ -392,15 +536,19 @@ def get_video(id, db, refresh = true)
return video
end
def fetch_video(id)
html_channel = Channel(XML::Node).new
def fetch_video(id, proxies)
html_channel = Channel(XML::Node | String).new
info_channel = Channel(HTTP::Params).new
spawn do
client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&disable_polymer=1")
html = XML.parse_html(html.body)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
if md = html.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/)
next html_channel.send(md["id"])
end
html = XML.parse_html(html.body)
html_channel.send(html)
end
@@ -418,47 +566,72 @@ def fetch_video(id)
end
html = html_channel.receive
if html.as?(String)
raise VideoRedirect.new("#{html.as(String)}")
end
html = html.as(XML::Node)
info = info_channel.receive
if info["reason"]? && info["reason"].includes? "your country"
bypass_channel = Channel({HTTP::Params | Nil, XML::Node | Nil}).new
bypass_channel = Channel(HTTPProxy | Nil).new
BYPASS_REGIONS.each do |country_code|
proxies.each do |region, list|
spawn do
begin
proxies = get_proxies(country_code)
info = HTTP::Params.new({
"reason" => [info["reason"]],
})
# Try not to overload single proxy
proxy = proxies[0, 5].sample(1)[0]
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
list.each do |proxy|
begin
client = HTTPClient.new(YT_URL)
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
client = HTTPClient.new(URI.parse("https://www.youtube.com"))
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
client.set_proxy(proxy)
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
client.set_proxy(proxy)
proxy_info = client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1")
proxy_info = HTTP::Params.parse(proxy_info.body)
if !proxy_info["reason"]?
proxy_html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
proxy_html = XML.parse_html(proxy_html.body)
bypass_channel.send({proxy_info, proxy_html})
else
bypass_channel.send({nil, nil})
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
if !info["reason"]?
bypass_channel.send(proxy)
break
end
rescue ex
end
rescue ex
bypass_channel.send({nil, nil})
end
# If none of the proxies we tried returned a valid response
if info["reason"]?
bypass_channel.send(nil)
end
end
end
BYPASS_REGIONS.size.times do
response = bypass_channel.receive
if response[0] || response[1]
info = response[0].not_nil!
html = response[1].not_nil!
proxies.size.times do
proxy = bypass_channel.receive
if proxy
begin
client = HTTPClient.new(YT_URL)
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
client.set_proxy(proxy)
html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1").body)
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
if info["reason"]?
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
end
proxy = {ip: proxy.proxy_host, port: proxy.proxy_port}
region = proxies.select { |region, list| list.includes? proxy }
if !region.empty?
info["region"] = region.keys[0]
end
break
rescue ex
end
end
end
end
@@ -488,12 +661,46 @@ def fetch_video(id)
published = html.xpath_node(%q(//meta[@itemprop="datePublished"])).not_nil!["content"]
published = Time.parse(published, "%Y-%m-%d", Time::Location.local)
allowed_regions = html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
is_family_friendly = html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
allowed_regions = html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).try &.["content"].split(",")
allowed_regions ||= [] of String
is_family_friendly = html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).try &.["content"] == "True"
is_family_friendly ||= true
genre = html.xpath_node(%q(//meta[@itemprop="genre"])).not_nil!["content"]
genre_url = html.xpath_node(%(//a[text()="#{genre}"])).try &.["href"]
case genre
when "Movies"
genre_url = "/channel/UClgRkhTL3_hImCAmdLfDE4g"
when "Education"
# Education channel is linked but does not exist
# genre_url = "/channel/UC3yA8nDwraeOfnYfBWun83g"
genre_url = ""
end
genre_url ||= ""
license = html.xpath_node(%q(//h4[contains(text(),"License")]/parent::*/ul/li))
if license
license = license.content
else
license = ""
end
sub_count_text = html.xpath_node(%q(//span[contains(@class, "yt-subscriber-count")]))
if sub_count_text
sub_count_text = sub_count_text["title"]
else
sub_count_text = "0"
end
author_thumbnail = html.xpath_node(%(//img[@alt="#{author}"]))
if author_thumbnail
author_thumbnail = author_thumbnail["data-thumb"]
else
author_thumbnail = ""
end
video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description,
nil, author, ucid, allowed_regions, is_family_friendly, genre)
nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url, license, sub_count_text, author_thumbnail)
return video
end
@@ -533,8 +740,8 @@ def process_video_params(query, preferences)
video_start = decode_time(query["t"])
end
video_start ||= 0
if query["time_continu"]?
video_start = decode_time(query["t"])
if query["time_continue"]?
video_start = decode_time(query["time_continue"])
end
video_start ||= 0
if query["start"]?
@@ -581,7 +788,7 @@ def generate_thumbnails(json, id)
VIDEO_THUMBNAILS.each do |thumbnail|
json.object do
json.field "quality", thumbnail[:name]
json.field "url", "https://i.ytimg.com/vi/#{id}/#{thumbnail["url"]}.jpg"
json.field "url", "https://#{thumbnail[:host]}/vi/#{id}/#{thumbnail["url"]}.jpg"
json.field "width", thumbnail[:width]
json.field "height", thumbnail[:height]
end

@@ -13,23 +13,32 @@
</div>
</div>
<p class="h-box">
<div class="h-box">
<% if user %>
<% if subscriptions.includes? ucid %>
<a href="/subscription_ajax?action_remove_subscriptions=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>">
<b>Unsubscribe from <%= author %></b>
</a>
<p>
<a id="subscribe" onclick="unsubscribe()" class="pure-button pure-button-primary"
href="/subscription_ajax?action_remove_subscriptions=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>">
<b>Unsubscribe | <%= number_to_short_text(sub_count) %></b>
</a>
</p>
<% else %>
<a href="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>">
<b>Subscribe to <%= author %></b>
</a>
<p>
<a id="subscribe" onclick="subscribe()" class="pure-button pure-button-primary"
href="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>">
<b>Subscribe | <%= number_to_short_text(sub_count) %></b>
</a>
</p>
<% end %>
<% else %>
<a href="/login?referer=<%= env.get("current_page") %>">
<b>Login to subscribe to <%= author %></b>
</a>
<p>
<a id="subscribe" class="pure-button pure-button-primary"
href="/login?referer=<%= env.get("current_page") %>">
<b>Login to subscribe to <%= author %></b>
</a>
</p>
<% end %>
</p>
</div>
<p class="h-box">
<a href="https://www.youtube.com/channel/<%= ucid %>">View channel on YouTube</a>
@@ -37,8 +46,8 @@
<% videos.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>
<%= rendered "components/video" %>
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
</div>
<% end %>
@@ -51,8 +60,50 @@
</div>
<div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if videos.size == 100 %>
<% if count == 60 %>
<a href="/channel/<%= ucid %>?page=<%= page + 1 %>">Next page</a>
<% end %>
</div>
</div>
<script>
document.getElementById("subscribe")["href"] = "javascript:void(0);"
function subscribe() {
var url = "/subscription_ajax?action_create_subscription_to_channel=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
subscribe_button = document.getElementById("subscribe");
subscribe_button.onclick = unsubscribe;
subscribe_button.innerHTML = '<b>Unsubscribe | <%= number_to_short_text(sub_count) %></b>'
}
}
}
}
function unsubscribe() {
var url = "/subscription_ajax?action_remove_subscriptions=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
subscribe_button = document.getElementById("subscribe");
subscribe_button.onclick = subscribe;
subscribe_button.innerHTML = '<b>Subscribe | <%= number_to_short_text(sub_count) %></b>'
}
}
}
}
</script>

@@ -0,0 +1,95 @@
<div class="pure-u-1 pure-u-md-1-4">
<div class="h-box">
<% case item when %>
<% when SearchChannel %>
<a style="width:100%;" href="/channel/<%= item.ucid %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %>
<center>
<img style="width:56.25%;" src="/ggpht<%= URI.parse(item.author_thumbnail).full_path %>"/>
</center>
<% end %>
<p><%= item.author %></p>
</a>
<p><%= number_with_separator(item.subscriber_count) %> subscribers</p>
<h5><%= item.description_html %></h5>
<% when SearchPlaylist %>
<% if item.id.starts_with? "RD" %>
<% url = "/mix?list=#{item.id}&continuation=#{item.videos[0]?.try &.id}" %>
<% else %>
<% url = "/playlist?list=#{item.id}" %>
<% end %>
<a style="width:100%;" href="<%= url %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %>
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.videos[0]?.try &.id %>/mqdefault.jpg"/>
<p class="length"><%= recode_length_seconds(item.videos[0]?.try &.length_seconds || 0) %></p>
</div>
<% end %>
<p><%= item.title %></p>
</a>
<p>
<b><a style="width:100%;" href="/channel/<%= item.ucid %>"><%= item.author %></a></b>
</p>
<p><%= number_with_separator(item.video_count) %> videos</p>
<p>PLAYLIST</p>
<% when MixVideo %>
<a style="width:100%;" href="/watch?v=<%= item.id %>&list=<%= item.mixes[0] %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %>
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
<p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
</div>
<% end %>
<p><%= item.title %></p>
</a>
<p>
<b><a style="width:100%;" href="/channel/<%= item.ucid %>"><%= item.author %></a></b>
</p>
<% when PlaylistVideo %>
<a style="width:100%;" href="/watch?v=<%= item.id %>&list=<%= item.playlists[0] %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %>
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
<p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
</div>
<% end %>
<p><%= item.title %></p>
</a>
<% if item.responds_to?(:live_now) && item.live_now %>
<p>LIVE</p>
<% end %>
<p>
<b><a style="width:100%;" href="/channel/<%= item.ucid %>"><%= item.author %></a></b>
</p>
<% if Time.now - item.published > 1.minute %>
<h5>Shared <%= recode_date(item.published) %> ago</h5>
<% end %>
<% else %>
<a style="width:100%;" href="/watch?v=<%= item.id %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %>
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
<p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
</div>
<% end %>
<p><%= item.title %></p>
</a>
<% if item.responds_to?(:live_now) && item.live_now %>
<p>LIVE</p>
<% end %>
<p>
<b><a style="width:100%;" href="/channel/<%= item.ucid %>"><%= item.author %></a></b>
</p>
<% if Time.now - item.published > 1.minute %>
<h5>Shared <%= recode_date(item.published) %> ago</h5>
<% end %>
<% end %>
</div>
</div>

@@ -4,13 +4,16 @@
<% if params[:video_loop] %>loop<% end %>
<% if params[:controls] %>controls<% end %>>
<% if hlsvp %>
<source src="<%= hlsvp %>" type="application/x-mpegURL">
<source src="<%= hlsvp %>" type="application/x-mpegURL" label="livestream">
<% else %>
<% if params[:listen] %>
<% audio_streams.each_with_index do |fmt, i| %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["bitrate"] %>k" selected="<%= i == 0 ? true : false %>">
<% end %>
<% else %>
<% if params[:quality] == "dash" %>
<source src="/api/manifest/dash/id/<%= video.id %>?local=true" type='application/dash+xml' label="dash">
<% end %>
<% fmt_stream.each_with_index do |fmt, i| %>
<% if params[:quality] %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= params[:quality] == fmt["label"].split(" - ")[0] %>">
@@ -63,8 +66,8 @@ var shareOptions = {
title: "<%= video.title.dump_unquoted %>",
description: "<%= description %>",
image: "<%= thumbnail %>",
embedCode: `<iframe id='ivplayer' type='text/html' width='640' height='360'
src='<%= host_url %>/embed/<%= video.id %>?<%= host_params %>' frameborder='0'></iframe>`
embedCode: "<iframe id='ivplayer' type='text/html' width='640' height='360' \
src='<%= host_url %>/embed/<%= video.id %>?<%= host_params %>' frameborder='0'></iframe>"
};
var player = videojs("player", options, function() {

@@ -1,12 +1,15 @@
<link rel="stylesheet" href="https://unpkg.com/video.js@6.12.0/dist/video-js.min.css">
<link rel="stylesheet" href="https://unpkg.com/silvermine-videojs-quality-selector@1.1.2/dist/css/quality-selector.css">
<link rel="stylesheet" href="https://unpkg.com/videojs-markers@1.0.1/dist/videojs.markers.min.css">
<link rel="stylesheet" href="https://unpkg.com/videojs-share@1.1.0/dist/videojs-share.css">
<script src="https://unpkg.com/video.js@6.12.0/dist/video.min.js"></script>
<script src="https://unpkg.com/videojs-hotkeys@0.2.22/build/videojs.hotkeys.min.js"></script>
<script src="https://unpkg.com/silvermine-videojs-quality-selector@1.1.2/dist/js/silvermine-videojs-quality-selector.min.js"></script>
<script src="https://unpkg.com/videojs-markers@1.0.1/dist/videojs-markers.min.js"></script>
<script src="https://unpkg.com/videojs-share@1.1.0/dist/videojs-share.min.js"></script>
<% if hlsvp %>
<script src="https://unpkg.com/@videojs/http-streaming@1.2.2/dist/videojs-http-streaming.min.js"></script>
<% end %>
<link rel="stylesheet" href="/css/video-js.min.css">
<link rel="stylesheet" href="/css/quality-selector.css">
<link rel="stylesheet" href="/css/videojs.markers.min.css">
<link rel="stylesheet" href="/css/videojs-share.css">
<script src="/js/video.min.js"></script>
<script src="/js/videojs.hotkeys.min.js"></script>
<script src="/js/silvermine-videojs-quality-selector.min.js"></script>
<script src="/js/videojs-markers.min.js"></script>
<script src="/js/videojs-share.min.js"></script>
<script src="/js/videojs-http-streaming.min.js"></script>
<% if env.get?("user") && env.get("user").as(User).preferences.quality == "dash" %>
<script src="/js/dash.mediaplayer.min.js"></script>
<script src="/js/videojs-dash.min.js"></script>
<script src="/js/videojs-contrib-quality-levels.min.js"></script>
<% end %>

@@ -1,23 +0,0 @@
<div class="pure-u-1 pure-u-md-1-4">
<div class="h-box">
<% if video.responds_to?(:playlists) && !video.playlists.empty? %>
<% params = "&list=#{video.playlists[0]}" %>
<% else %>
<% params = nil %>
<% end %>
<a style="width:100%;" href="/watch?v=<%= video.id %><%= params %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %>
<img style="width:100%;" src="https://i.ytimg.com/vi/<%= video.id %>/mqdefault.jpg"/>
<% end %>
<p><%= video.title %></p>
</a>
<p>
<b><a style="width:100%;" href="/channel/<%= video.ucid %>"><%= video.author %></a></b>
</p>
<% if Time.now - video.published > 1.minute %>
<h5>Shared <%= recode_date(video.published) %> ago</h5>
<% end %>
</div>
</div>

@@ -13,7 +13,7 @@
</div>
<div class="pure-control-group">
<label for="import_youtube">Import <a target="_blank"
<label for="import_youtube">Import <a rel="noopener" target="_blank"
href="https://support.google.com/youtube/answer/6224202?hl=en-GB">YouTube subscriptions</a></label>
<input type="file" id="import_youtube" name="import_youtube">
</div>

@@ -4,8 +4,8 @@
<% top_videos.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>
<%= rendered "components/video" %>
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
</div>
<% end %>

@@ -0,0 +1,22 @@
<% content_for "header" do %>
<title><%= mix.title %> - Invidious</title>
<% end %>
<div class="pure-g h-box">
<div class="pure-u-2-3">
<h3><%= mix.title %></h3>
</div>
<div class="pure-u-1-3" style="text-align:right;">
<h3>
<a href="/feed/playlist/<%= mix.id %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<% mix.videos.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
</div>
<% end %>

@@ -6,6 +6,11 @@
<div class="pure-u-2-3">
<h3><%= playlist.title %></h3>
</div>
<div class="pure-u-1-3" style="text-align:right;">
<h3>
<a href="/feed/playlist/<%= plid %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-md-1-4">
@@ -16,13 +21,13 @@
</div>
<div class="h-box">
<p><%= playlist.description %></p>
<p><%= playlist.description_html %></p>
</div>
<% videos.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>
<%= rendered "components/video" %>
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
</div>
<% end %>
@@ -30,7 +35,7 @@
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-md-1-5">
<% if page >= 2 %>
<a href="/playlist?list=<%= playlist.id %>&page=<%= page - 1 %>">Next page</a>
<a href="/playlist?list=<%= playlist.id %>&page=<%= page - 1 %>">Previous page</a>
<% end %>
</div>
<div class="pure-u-1 pure-u-md-3-5"></div>

@@ -35,7 +35,7 @@ function update_value(element) {
<div class="pure-control-group">
<label for="quality">Preferred video quality: </label>
<select name="quality" id="quality">
<% {"hd720", "medium", "small"}.each do |option| %>
<% {"dash", "hd720", "medium", "small"}.each do |option| %>
<option <% if user.preferences.quality == option %> selected <% end %>><%= option %></option>
<% end %>
</select>
@@ -75,7 +75,7 @@ function update_value(element) {
</div>
<div class="pure-control-group">
<label for="captions_fallback">Fallback languages: </label>
<label for="captions_fallback">Fallback captions: </label>
<select class="pure-u-1-5" name="captions_1" id="captions_1">
<% CAPTION_LANGUAGES.each do |option| %>
<option <% if user.preferences.captions[1] == option %> selected <% end %>><%= option %></option>
@@ -89,7 +89,13 @@ function update_value(element) {
</select>
</div>
<div class="pure-control-group">
<label for="related_videos">Show related videos? </label>
<input name="related_videos" id="related_videos" type="checkbox" <% if user.preferences.related_videos %>checked<% end %>>
</div>
<legend>Visual preferences</legend>
<div class="pure-control-group">
<label for="dark_mode">Dark mode: </label>
<input name="dark_mode" id="dark_mode" type="checkbox" <% if user.preferences.dark_mode %>checked<% end %>>
@@ -101,6 +107,7 @@ function update_value(element) {
</div>
<legend>Subscription preferences</legend>
<div class="pure-control-group">
<label for="redirect_feed">Redirect homepage to feed: </label>
<input name="redirect_feed" id="redirect_feed" type="checkbox" <% if user.preferences.redirect_feed %>checked<% end %>>
@@ -136,6 +143,7 @@ function update_value(element) {
</div>
<legend>Data preferences</legend>
<div class="pure-control-group">
<a href="/clear_watch_history?referer=<%= referer %>">Clear watch history</a>
</div>

@@ -1,11 +1,11 @@
<% content_for "header" do %>
<title><%= query.not_nil!.size > 30 ? query.not_nil![0,30].rstrip(".") + "..." : query.not_nil! %> - Invidious</title>
<title><%= search_query.not_nil!.size > 30 ? query.not_nil![0,30].rstrip(".") + "..." : query.not_nil! %> - Invidious</title>
<% end %>
<% videos.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>
<%= rendered "components/video" %>
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
</div>
<% end %>
@@ -13,13 +13,13 @@
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-md-1-5">
<% if page >= 2 %>
<a href="/search?q=<%= query %>&page=<%= page - 1 %>">Previous page</a>
<a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page - 1 %>">Previous page</a>
<% end %>
</div>
<div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if count == 20 %>
<a href="/search?q=<%= query %>&page=<%= page + 1 %>">Next page</a>
<% if count >= 20 %>
<a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page + 1 %>">Next page</a>
<% end %>
</div>
</div>

@@ -16,10 +16,17 @@
</div>
<center><%= notifications.size %> unseen notifications</center>
<% if !notifications.empty? %>
<div class="h-box">
<hr>
</div>
<% end %>
<% notifications.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>
<%= rendered "components/video" %>
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
</div>
<% end %>
@@ -30,8 +37,8 @@
<% videos.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>
<%= rendered "components/video" %>
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
</div>
<% end %>

@@ -4,10 +4,11 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="referrer" content="no-referrer">
<%= yield_content "header" %>
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/pure-min.css">
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/grids-responsive-min.css">
<link rel="stylesheet" href="https://unpkg.com/ionicons@4.2.6/dist/css/ionicons.min.css">
<link rel="stylesheet" href="/css/pure-min.css">
<link rel="stylesheet" href="/css/grids-responsive-min.css">
<link rel="stylesheet" href="/css/ionicons.min.css">
<link rel="stylesheet" href="/css/default.css">
<% if env.get?("user") && env.get("user").as(User).preferences.dark_mode %>
<link rel="stylesheet" href="/css/darktheme.css">
@@ -18,8 +19,8 @@
<body>
<div class="pure-g">
<div class="pure-u-1 pure-u-md-4-24"></div>
<div class="pure-u-1 pure-u-md-16-24">
<div class="pure-u-1 pure-u-md-2-24"></div>
<div class="pure-u-1 pure-u-md-20-24">
<div class="pure-g navbar h-box">
<div class="pure-u-1 pure-u-md-4-24">
<a href="/" class="index-link pure-menu-heading">Invidious</a>
@@ -27,14 +28,14 @@
<div class="pure-u-1 pure-u-md-12-24 searchbar">
<form class="pure-form" action="/search" method="get">
<fieldset>
<input type="search" style="width:100%;" name="q" placeholder="search" value="<%= env.params.query["q"]? %>">
<input type="search" style="width:100%;" name="q" placeholder="search" value="<%= env.params.query["q"]?.try {|x| HTML.escape(x)} || env.get?("search").try {|x| HTML.escape(x.as(String)) } %>">
</fieldset>
</form>
</div>
<div class="pure-u-1 pure-u-md-8-24 user-field">
<% if env.get? "user" %>
<div class="pure-u-1-4">
<a href="/toggle_theme?referer=<%= env.get("current_page") %>" class="pure-menu-heading">
<a href="/toggle_theme?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
<% preferences = env.get("user").as(User).preferences %>
<% if preferences.dark_mode %>
<i class="icon ion-ios-sunny"></i>
@@ -54,15 +55,15 @@
</a>
</div>
<div class="pure-u-1-4">
<a href="/preferences?referer=<%= env.get("current_page") %>" class="pure-menu-heading">
<a href="/preferences?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
<i class="icon ion-ios-cog"></i>
</a>
</div>
<div class="pure-u-1-4">
<a href="/signout?referer=<%= env.get("current_page") %>" class="pure-menu-heading">Sign out</a>
<a href="/signout?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">Sign out</a>
</div>
<% else %>
<a href="/login?referer=<%= env.get("current_page") %>" class="pure-menu-heading">Login</a>
<a href="/login?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">Login</a>
<% end %>
</div>
</div>
@@ -86,8 +87,7 @@
<p>BCH: qq4ptclkzej5eza6a50et5ggc58hxsq5aylqut2npk</p>
</div>
</div>
<div class="pure-u-1 pure-u-md-4-24"></div>
</div>
<div class="pure-u-1 pure-u-md-2-24"></div>
</body>
</html>

@@ -5,7 +5,7 @@
<meta property="og:site_name" content="Invidious">
<meta property="og:url" content="<%= host_url %>/watch?v=<%= video.id %>">
<meta property="og:title" content="<%= HTML.escape(video.title) %>">
<meta property="og:image" content="https://i.ytimg.com/vi/<%= video.id %>/hqdefault.jpg">
<meta property="og:image" content="/vi/<%= video.id %>/maxres.jpg">
<meta property="og:description" content="<%= description %>">
<meta property="og:type" content="video.other">
<meta property="og:video:url" content="<%= host_url %>/embed/<%= video.id %>">
@@ -22,11 +22,12 @@
<meta name="twitter:player" content="<%= host_url %>/embed/<%= video.id %>">
<meta name="twitter:player:width" content="1280">
<meta name="twitter:player:height" content="720">
<script src="/js/watch.js"></script>
<%= rendered "components/player_sources" %>
<title><%= HTML.escape(video.title) %> - Invidious</title>
<% end %>
<div class="h-box">
<div id="player-container" class="h-box">
<%= rendered "components/player" %>
</div>
@@ -55,7 +56,16 @@
<p><i class="icon ion-ios-eye"></i> <%= number_with_separator(video.views) %></p>
<p><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p>
<p><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p>
<p id="Genre">Genre: <%= video.genre %></p>
<p id="Genre">Genre:
<% if video.genre_url.empty? %>
<%= video.genre %>
<% else %>
<a href="<%= video.genre_url %>"><%= video.genre %></a>
<% end %>
</p>
<% if !video.license.empty? %>
<p id="License">License: <%= video.license %></p>
<% end %>
<p id="FamilyFriendly">Family Friendly? <%= video.is_family_friendly %></p>
<p id="Wilson">Wilson Score: <%= video.wilson_score.round(4) %></p>
<p id="Rating">Rating: <%= rating.round(4) %> / 5</p>
@@ -82,20 +92,23 @@
<% if user %>
<% if subscriptions.includes? video.ucid %>
<p>
<a href="/subscription_ajax?action_remove_subscriptions=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>">
<b>Unsubscribe from <%= video.author %></b>
<a id="subscribe" onclick="unsubscribe()" class="pure-button pure-button-primary"
href="/subscription_ajax?action_remove_subscriptions=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>">
<b>Unsubscribe | <%= video.sub_count_text %></b>
</a>
</p>
<% else %>
<p>
<a href="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>">
<b>Subscribe to <%= video.author %></b>
<a id="subscribe" onclick="subscribe()" class="pure-button pure-button-primary"
href="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>">
<b>Subscribe | <%= video.sub_count_text %></b>
</a>
</p>
<% end %>
<% else %>
<p>
<a href="/login?referer=<%= env.get("current_page") %>">
<a id="subscribe" class="pure-button pure-button-primary"
href="/login?referer=<%= env.get("current_page") %>">
<b>Login to subscribe to <%= video.author %></b>
</a>
</p>
@@ -108,18 +121,26 @@
</div>
<hr>
<div id="comments">
<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>
</div>
</div>
</div>
<div class="pure-u-1 pure-u-md-1-5">
<% if plid %>
<div id="playlist" class="h-box">
</div>
<% end %>
<% if !preferences || preferences && preferences.related_videos %>
<div class="h-box">
<% rvs.each do |rv| %>
<% if rv.has_key?("id") %>
<a href="/watch?v=<%= rv["id"] %>">
<% if preferences && preferences.thin_mode %>
<% else %>
<img style="width:100%;" src="<%= rv["iurlmq"] %>">
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= rv["id"] %>/mqdefault.jpg">
<p class="length"><%= recode_length_seconds(rv["length_seconds"]?.try &.to_i? || 0) %></p>
</div>
<% end %>
<p style="width:100%"><%= rv["title"] %></p>
<p>
@@ -129,42 +150,130 @@
<% end %>
<% end %>
</div>
<% end %>
</div>
</div>
<script>
function toggle(target) {
body = target.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
function number_with_separator(val) {
while (/(\d+)(\d{3})/.test(val.toString())) {
val = val.toString().replace(/(\d+)(\d{3})/, "$1" + "," + "$2");
}
return val;
}
function toggle_comments(target) {
body = target.parentNode.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
subscribe_button = document.getElementById("subscribe");
if (subscribe_button.getAttribute('onclick')) {
subscribe_button["href"] = "javascript:void(0);";
}
function get_youtube_replies(target) {
var continuation = target.getAttribute("data-continuation");
function subscribe() {
var url = "/subscription_ajax?action_create_subscription_to_channel=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
var body = target.parentNode.parentNode;
var fallback = body.innerHTML;
body.innerHTML =
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
subscribe_button = document.getElementById("subscribe");
subscribe_button.onclick = unsubscribe;
subscribe_button.innerHTML = '<b>Unsubscribe | <%= video.sub_count_text %></b>'
}
}
}
}
function unsubscribe() {
var url = "/subscription_ajax?action_remove_subscriptions=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
subscribe_button = document.getElementById("subscribe");
subscribe_button.onclick = subscribe;
subscribe_button.innerHTML = '<b>Subscribe | <%= video.sub_count_text %></b>'
}
}
}
}
<% if plid %>
function get_playlist() {
playlist = document.getElementById("playlist");
playlist.innerHTML = ' \
<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3> \
<hr>'
var plid = "<%= plid %>"
if (plid.startsWith("RD")) {
var plid_url = "/api/v1/mixes/<%= plid %>?continuation=<%= video.id %>&format=html";
} else {
var plid_url = "/api/v1/playlists/<%= plid %>?continuation=<%= video.id %>&format=html";
}
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", plid_url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
playlist.innerHTML = xhr.response.playlistHtml;
if (xhr.response.nextVideo) {
player.on('ended', function() {
window.location.replace("/watch?v="
+ xhr.response.nextVideo
+ "&list=<%= plid %>"
<% if params[:listen] %>
+ "&listen=1"
<% end %>
<% if params[:autoplay] %>
+ "&autoplay=1"
<% end %>
<% if params[:speed] %>
+ "&speed=<%= params[:speed] %>"
<% end %>
);
});
}
} else {
playlist.innerHTML = "";
}
}
};
xhr.ontimeout = function() {
console.log("Pulling playlist timed out.");
comments = document.getElementById("playlist");
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3><hr>';
get_playlist();
};
}
get_playlist();
<% end %>
function get_reddit_comments() {
comments = document.getElementById("comments");
var fallback = comments.innerHTML;
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url =
"/api/v1/comments/<%= video.id %>?format=html&continuation=" + continuation;
var url = "/api/v1/comments/<%= video.id %>?source=reddit&format=html";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
@@ -174,45 +283,25 @@ function get_youtube_replies(target) {
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
body.innerHTML = xhr.response.contentHtml;
} else {
body.innerHTML = fallback;
}
}
};
xhr.ontimeout = function() {
console.log("Pulling comments timed out.");
body.innerHTML = fallback;
};
}
function get_reddit_comments() {
var url = "/api/v1/comments/<%= video.id %>?source=reddit";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
{title}
</h3>
<b>
<a target="_blank" href="https://reddit.com{permalink}">View more comments on Reddit</a>
</b>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
comments.innerHTML = ' \
<div> \
<h3> \
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a> \
{title} \
</h3> \
<p> \
<b> \
<a href="javascript:void(0)" onclick="swap_comments(\'youtube\')"> \
View YouTube comments \
</a> \
</b> \
</p> \
<b> \
<a rel="noopener" target="_blank" href="https://reddit.com{permalink}">View more comments on Reddit</a> \
</b> \
</div> \
<div>{contentHtml}</div> \
<hr>'.supplant({
title: xhr.response.title,
permalink: xhr.response.permalink,
contentHtml: xhr.response.contentHtml
@@ -221,10 +310,10 @@ function get_reddit_comments() {
<% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments();
<% else %>
comments = document.getElementById("comments");
comments.innerHTML = "";
comments.innerHTML = fallback;
<% end %>
}
}
};
xhr.ontimeout = function() {
@@ -235,6 +324,11 @@ function get_reddit_comments() {
}
function get_youtube_comments() {
comments = document.getElementById("comments");
var fallback = comments.innerHTML;
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url = "/api/v1/comments/<%= video.id %>?format=html";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
@@ -243,54 +337,92 @@ function get_youtube_comments() {
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.readyState == 4) {
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
View {commentCount} comments
</h3>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
contentHtml: xhr.response.contentHtml,
commentCount: commaSeparateNumber(xhr.response.commentCount)
});
if (xhr.response.commentCount > 0) {
comments.innerHTML = ' \
<div> \
<h3> \
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a> \
View {commentCount} comments \
</h3> \
<b> \
<a href="javascript:void(0)" onclick="swap_comments(\'reddit\')"> \
View Reddit comments \
</a> \
</b> \
</div> \
<div>{contentHtml}</div> \
<hr>'.supplant({
contentHtml: xhr.response.contentHtml,
commentCount: number_with_separator(xhr.response.commentCount)
});
} else {
comments.innerHTML = "";
}
} else {
<% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments();
<% else %>
comments = document.getElementById("comments");
comments.innerHTML = "";
<% end %>
}
}
};
xhr.ontimeout = function() {
console.log("Pulling comments timed out.");
comments = document.getElementById("comments");
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
get_youtube_comments();
};
}
function commaSeparateNumber(val){
while (/(\d+)(\d{3})/.test(val.toString())){
val = val.toString().replace(/(\d+)(\d{3})/, '$1'+','+'$2');
}
return val;
}
function get_youtube_replies(target, load_more) {
var continuation = target.getAttribute('data-continuation');
String.prototype.supplant = function(o) {
return this.replace(/{([^{}]*)}/g, function(a, b) {
var r = o[b];
return typeof r === "string" || typeof r === "number" ? r : a;
});
};
var body = target.parentNode.parentNode;
var fallback = body.innerHTML;
body.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url = '/api/v1/comments/<%= video.id %>?format=html&continuation=' +
continuation;
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.open('GET', url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
if (load_more) {
body = body.parentNode.parentNode;
body.removeChild(body.lastElementChild);
body.innerHTML += xhr.response.contentHtml;
} else {
body.innerHTML = ' \
<p><a href="javascript:void(0)" \
onclick="hide_youtube_replies(this)">Hide replies \
</a></p> \
<div>{contentHtml}</div>'.supplant({
contentHtml: xhr.response.contentHtml,
});
}
} else {
body.innerHTML = fallback;
}
}
};
xhr.ontimeout = function() {
console.log('Pulling comments timed out.');
body.innerHTML = fallback;
};
}
<% if preferences %>
<% if preferences.comments[0] == "youtube" %>