Compare commits

..

76 Commits

Author SHA1 Message Date
81ea2bf799 Don't nest YouTube replies 2018-10-22 17:15:36 -05:00
ed3d9ce540 Make channel extractor more robust 2018-10-21 21:44:20 -05:00
ef95dc2380 Add fix for show playlists 2018-10-21 19:54:41 -05:00
4875aa1d7e Add partial support for video duration in thumbnails 2018-10-20 20:37:55 -05:00
3ee7201f5d Comma seperate comment scores 2018-10-20 13:52:06 -05:00
3c634d9f66 Update styling for subscribe buttons 2018-10-20 13:51:52 -05:00
94d116974b Add break between text and sub count 2018-10-19 16:20:35 -05:00
5c87cf1547 Update subscribe buttons 2018-10-19 11:14:26 -05:00
1cfa1f6559 Add 'paid' and 'premium' flags to API 2018-10-16 11:15:14 -05:00
8b69e23471 Update CHANGELOG and bump version 2018-10-15 21:22:22 -05:00
57d88ffcc8 Fix fallback for comments 2018-10-15 11:15:23 -05:00
e46e6183ae Fix proxying for videos 2018-10-14 11:29:20 -05:00
b49623f90f Revert "Attempt to bypass channel region locks"
This reverts commit 95c6747a3e.
2018-10-14 11:14:27 -05:00
95c6747a3e Attempt to bypass channel region locks 2018-10-14 09:53:40 -05:00
245d0b571f Add next page for channels with geo-blocked videos 2018-10-14 09:06:04 -05:00
6e0df50a03 Remove migration points 2018-10-13 20:03:48 -05:00
f88697541c Add author_thumbnail to '/api/v1/videos' 2018-10-13 20:01:58 -05:00
5eefab62fd Add "show replies" and "hide replies" 2018-10-13 19:40:42 -05:00
13b0526c7a Fix subscribe button when logged out 2018-10-13 19:40:24 -05:00
1568a35cfb Add column to video update 2018-10-12 22:37:12 -05:00
93082c0a45 Remove migration points 2018-10-12 21:28:15 -05:00
1a39faee75 Add subCountText and add XHR alternative for subscribing to channels 2018-10-12 21:17:37 -05:00
81b447782a Fix speed param for playlist preferences 2018-10-10 19:55:28 -05:00
c87aa8671c Add fix for continuation on playlists smaller than 100 videos 2018-10-10 19:47:51 -05:00
921c34aa65 Create materialized views for Google accounts 2018-10-10 16:10:58 -05:00
ccc423f682 Fix 'latest only' feed 2018-10-09 18:39:19 -05:00
02335f3390 Fix typo 2018-10-09 18:10:27 -05:00
bcc8ba73bf Fix update_feeds job 2018-10-09 17:24:29 -05:00
35e63fa3f5 Use materialized views for subscription feeds 2018-10-09 08:40:29 -05:00
3fe4547f8e Update CHANGELOG and bump version 2018-10-09 08:09:04 -05:00
2dbe151ceb Add speed param to playlist redirect 2018-10-09 08:08:52 -05:00
e2c15468e0 Make usernames case-insensitive 2018-10-08 20:09:06 -05:00
022427e20e Fix typo 2018-10-08 17:52:55 -05:00
88430a6fc0 Add playlist playback support 2018-10-07 21:11:33 -05:00
c72b9bea64 Add '&list' to videos shown on mix page 2018-10-06 22:22:50 -05:00
80bc29f3cd Add basic handling for (almost) valid video URLs 2018-10-06 22:22:22 -05:00
f7125c1204 Move watch page JS into seperate file 2018-10-06 22:20:40 -05:00
6f9056fd84 Add extra handling for shortened video URLs 2018-10-06 22:19:36 -05:00
3733fe8272 Redirect mixes 2018-10-06 22:18:50 -05:00
98bb20abcd Add option to switch between YouTube and Reddit comments 2018-10-06 18:54:05 -05:00
a4d44d3286 Fix position of [ + ] button for YouTube comments 2018-10-06 18:53:27 -05:00
dc358fc7e5 Don't add channels if they've been deleted 2018-10-06 18:36:06 -05:00
e14f2f2750 Prevent duplicate subscriptions when importing user data 2018-10-06 18:19:47 -05:00
650b44ade2 Improve comment templating 2018-10-05 10:08:24 -05:00
3830604e42 Try to speed up find_working_proxies 2018-10-03 10:38:07 -05:00
f83e9e6eb9 Add config option for geo-bypass 2018-10-03 10:36:30 -05:00
236358d3ad Escape search query in "next page" and "previous page" links 2018-10-02 09:08:18 -05:00
43d6b65b4f Update CHANGELOG and bump version 2018-10-01 22:53:27 -05:00
f8eb5ab416 Break after successful response 2018-10-01 20:02:14 -05:00
ae2850215f Fix method for detecting valid info resposne 2018-10-01 19:55:47 -05:00
d418f50576 Make geo-bypass more robust 2018-10-01 19:01:44 -05:00
8c04768ef8 Add support for geo-bypass in '/videoplayback' 2018-09-30 20:26:28 -05:00
a718d5543d Add 'lang' and 'tlang' to '/api/v1/captions' 2018-09-30 10:13:07 -05:00
20130db556 Add mixes 2018-09-29 10:59:11 -05:00
66f3ab0663 Update README 2018-09-29 10:11:21 -05:00
1de7c0caf9 Merge pull request #186 from flourgaz/feature/docker-compose
Add basic docker-compose cluster
2018-09-29 10:04:31 -05:00
7d35b6e44f Add rel="noopener" to target="_blank" links 2018-09-29 09:56:37 -05:00
71a99542fe basic docker-compose cluster 2018-09-29 13:30:56 +02:00
8530c1f4ec Fix typo 2018-09-28 19:44:16 -05:00
29a6291957 Show info instead of empty playlist when possible 2018-09-28 09:54:45 -05:00
25ba5bda62 Fix encoding of playlist index 2018-09-28 09:54:01 -05:00
477c84deb1 Don't deliver new notifications for YouTube Red videos 2018-09-28 09:23:28 -05:00
c2f7d3d41c Add handling for specific genre channels 2018-09-27 17:11:19 -05:00
b0b5e3e982 Escape search queries 2018-09-27 17:02:59 -05:00
4fb275ec6e Get more video information when possible 2018-09-26 19:47:06 -05:00
f99b2cdf01 Add support for proxying comments 2018-09-26 18:44:37 -05:00
5d7bd9af0f Add host language for comments 2018-09-26 10:33:08 -05:00
aa819a189e Use alternate source for proxies 2018-09-25 21:07:18 -05:00
2e65997447 Fix geo-bypass threads 2018-09-25 18:16:07 -05:00
3e3de1890a Overhaul geo-bypass 2018-09-25 17:56:59 -05:00
5b5d69a33b Add host language to YouTube requests 2018-09-25 17:55:32 -05:00
1289065151 Add host language to fetch_video 2018-09-25 17:42:17 -05:00
21a8df42dd Add fix for short playlist descriptions 2018-09-25 10:28:57 -05:00
74b285d0f7 Add author thumbnails to playlist endpoint 2018-09-25 10:28:40 -05:00
c2e72439f5 Don't add anchor for empty genre URL 2018-09-25 10:10:25 -05:00
87498ae777 Update CHANGELOG 2018-09-25 09:55:14 -05:00
32 changed files with 1664 additions and 413 deletions

View File

@ -1,3 +1,74 @@
# 0.10.0 (2018-10-16)
## Week 10: Subscriptions
This week I'm happy to announce that subscriptions have been drastically sped up with
35e63fa. As I mentioned last week, this essentially "caches" a user's feed, meaning that operations that previously took 20 seconds or timed out, now can load in under a second. I'd take a look at [#173](https://github.com/omarroth/invidious/issues/173) for a sample benchmark. Previously features that made Invidious's feed so useful, such as filtering by unseen and by author would take too long to load, and so instead would timeout. I'm very happy that this has been fixed, and folks can get back to using these features.
Among some smaller features that have been added this week include [#118](https://github.com/omarroth/invidious/issues/118), which adds, in my opinion, some very attractive subscribe and unsubscribe buttons. I think it's also a bit of a functional improvement as well, since it doesn't require a user to reload the page in order to subscribe or unsubscribe to a channel, and also gives the opportunity to put the channel's sub count on display.
An option to swap between Reddit and YouTube comments without a page reload has been added with
5eefab6, bringing it somewhat closer in functionality to the popular [AlienTube](https://github.com/xlexi/alientube) extension, on which it is based (although the extension unfortunately appears now to be fragmented).
As always, there are a couple smaller improvements this week, including some minor fixes for geo-bypass with
e46e618 and [`245d0b5`](https://github.com/omarroth/invidious/245d0b5), playlist preferences with [`81b4477`](https://github.com/omarroth/invidious/81b4477), and YouTube comments with [`02335f3`](https://github.com/omarroth/invidious/02335f3).
This coming week I'd also recommend keeping an eye on the excellent [FreeTube](https://github.com/FreeTubeApp/FreeTube), which is looking forward to a new release. I've been very lucky to work with [**@PrestonN**](https://github.com/PrestonN) for the past few weeks to improve the Invidious API, and I'm quite looking forward to the new release.
That's all for this week folks, thank you all again for your continued interest and support.
# 0.9.0 (2018-10-08)
## Week 9: Playlists
Not as much to announce this week, but I'm still quite happy to announce a couple things, namely:
Playback support for playlists has finally been added with [`88430a6`](https://github.com/omarroth/invidious/88430a6). You can now view playlists with the `&list=` query param, as you would on YouTube. You can also view mixes with the mentioned `&list=`, although they require some extra handling that I would like to add in the coming week, as well as adding playlist looping and shuffle. I think playback support has been a roadblock for more exciting features such as [#114](https://github.com/omarroth/invidious/issues/114), and I look forward to improving the experience.
Comments have had a bit of a cosmetic upgrade with [#132](https://github.com/omarroth/invidious/issues/132), which I think helps better distinguish between Reddit and YouTube comments, as it makes them appear similarly to their respective sites. You can also now switch between YouTube and Reddit comments with a push of a button, which I think is quite an improvement, especially for newer or less popular videos with fewer comments.
I've had a small breakthrough in speeding up users' subscription feeds with PostgreSQL's [materialized views](https://www.postgresql.org/docs/current/static/rules-materializedviews.html). Without going into too much detail, materialized views essentially cache the result of a query, making it possible to run resource-intensive queries once, rather than every time a user visits their feed. In the coming week I hope to push this out to users, and hopefully close [#173](https://github.com/omarroth/invidious/issues/173).
I haven't had as much time to work on the project this week, but I'm quite happy to have added some new features. Have a great week everyone.
# 0.8.0 (2018-10-02)
## Week 8: Mixes
Hello again!
Mixes have been added with [`20130db`](https://github.com/omarroth/invidious/20130db), which makes it easy to create a playlist of related content. See [#188](https://github.com/omarroth/invidious/issues/188) for more info on how they work. Currently, they return the first 50 videos rather than a continuous feed to avoid tracking by Google/YouTube, which I think is a good trade-off between usability and privacy, and I hope other folks agree. You can create mixes by adding `RD` to the beginning of a video ID, an example is provided [here](https://www.invidio.us/mix?list=RDYE7VzlLtp-4) based on Big Buck Bunny. I've been quite happy with the results returned for the mixes I've tried, and it is not limited to music, which I think is a big plus. To emulate a continuous feed provided many are used to, using the last video of each mix as a new 'seed' has worked well for me. In the coming week I'd like to to add playback support in the player to listen to these easily.
A very big thanks to [**@flourgaz**](https://github.com/flourgaz) for Docker support with [#186](https://github.com/omarroth/invidious/pull/186). This is an enormous improvement in portability for the project, and opens the door for Heroku support (see [#162](https://github.com/omarroth/invidious/issues/162)), and seamless support on Windows. For most users, it should be as easy as running `docker-compose up`.
I've spent quite a bit of time this past week improving support for geo-bypass (see [#92](https://github.com/omarroth/invidious/issues/92)), and am happy to note that Invidious has been able to proxy ~50% of the geo-restricted videos I've tried. In addition, you can now watch geo-restricted videos if you have `dash` enabled as your `preferred quality`, for more details see [#34](https://github.com/omarroth/invidious/issues/34) and [#185](https://github.com/omarroth/invidious/issues/185), or last week's update. For folks interested in replicating these results for themselves, I'd take a look [here](https://gist.github.com/omarroth/3ce0f276c43e0c4b13e7d9cd35524688) for the script used, and [here](https://gist.github.com/omarroth/beffc4a76a7b82a422e1b36a571878ef) for a list of videos restricted in the US.
1080p has seen a fairly smooth roll-out, although there have been a couple issues reported, mainly [#193](https://github.com/omarroth/invidious/issues/193), which is likely an issue in the player. I've also encountered a couple other issues myself that I would like to investigate. Although none are major, I'd like to keep 1080p opt-in for registered users another week to better address these issues.
Have an excellent week everyone.
# 0.7.0 (2018-09-25)
## Week 7: 1080p and Search Types
Hello again everyone! I've got quite a couple announcements this week:
Experimental 1080p support has been added with [`b3ca392`](https://github.com/omarroth/invidious/b3ca392), and can be enabled by going to preferences and changing `preferred video quality` to `dash`. You can find more details [here](https://github.com/omarroth/invidious/issues/34#issuecomment-424171888). Currently quality and speed controls have not yet been integrated into the player, but I'd still appreciate feedback, mainly on any issues with buffering or DASH playback. I hope to integrate 1080p support into the player and push support site-wide in the coming weeks.
You can now filter content types in search with the `type:TYPE` filter. Supported content types are `playlist`, `channel`, and `video`. More info is available [here](https://github.com/omarroth/invidious/issues/126#issuecomment-423823148). I think this is quite an improvement in usability and I hope others find the same.
A [CHANGELOG](https://github.com/omarroth/invidious/blob/master/CHANGELOG.md) has been added to the repository, so folks will now receive a copy of all these updates when cloning. I think this is an improvement in hosting the project, as it is no longer tied to the `/releases` tab on Github or the posts on Patreon.
Recently, users have been reporting 504s when attempting to access their subscriptions, which is tracked in [#173](https://github.com/omarroth/invidious/issues/173). This is most likely caused by an uptick in usage, which I am absolutely grateful for, but unfortunately has resulted in an increase in costs for hosting the site, which is why I will be bumping my goal on Patreon from $60 to $80. I would appreciate any feedback on how subscriptions could be improved.
Other minor improvements include:
- Additional regions added to bypass geo-block with [`9a78523`](https://github.com/omarroth/invidious/9a78523)
- Fix for playlists containing less than 100 videos (previously shown as empty) with [`35ac887`](https://github.com/omarroth/invidious/35ac887)
- Fix for `published` date for Reddit comments (previously showing negative seconds) with [`6e09202`](https://github.com/omarroth/invidious/6e09202)
Thank you everyone for your support!
# 0.6.0 (2018-09-18) # 0.6.0 (2018-09-18)
## Week 6: Filters and Thumbnails ## Week 6: Filters and Thumbnails

View File

@ -28,6 +28,29 @@ BCH: qq4ptclkzej5eza6a50et5ggc58hxsq5aylqut2npk
## Installation ## Installation
### Docker:
#### Build and start cluster:
```bash
$ docker-compose up
```
And visit `localhost:3000` in your browser.
#### Rebuild cluster:
```bash
$ docker-compose build
```
#### Delete data and rebuild:
```bash
$ docker volume rm invidious_postgresdata
$ docker-compose build
```
### Installing [Crystal](https://github.com/crystal-lang/crystal): ### Installing [Crystal](https://github.com/crystal-lang/crystal):
#### On Arch: #### On Arch:
@ -74,8 +97,21 @@ $ sudo pacman -S imagemagick librsvg
## Usage: ## Usage:
```bash ```bash
$ crystal build src/invidious.cr $ crystal build src/invidious.cr --release
$ ./invidious $ ./invidious -h
Usage: invidious [arguments]
-b HOST, --bind HOST Host to bind (defaults to 0.0.0.0)
-p PORT, --port PORT Port to listen for connections (defaults to 3000)
-s, --ssl Enables SSL
--ssl-key-file FILE SSL key file
--ssl-cert-file FILE SSL certificate file
-h, --help Shows this help
-t THREADS, --crawl-threads=THREADS
Number of threads for crawling (default: 1)
-c THREADS, --channel-threads=THREADS
Number of threads for refreshing channels (default: 1)
-v THREADS, --video-threads=THREADS
Number of threads for refreshing videos (default: 1)
``` ```
Or for development: Or for development:

View File

@ -17,6 +17,44 @@ div {
animation: spin 2s linear infinite; animation: spin 2s linear infinite;
} }
.playlist-restricted {
height: 20em;
padding-right: 10px;
}
a.pure-button-primary {
background-color: #a0a0a0;
color: rgba(35, 35, 35, 1);
}
a.pure-button-primary:hover {
background-color: rgba(0, 182, 240, 1);
color: #fff;
}
div.thumbnail {
position: relative;
}
img.thumbnail {
width: 100%;
left: 0;
top: 0;
}
.length {
z-index: 100;
position: absolute;
background-color: rgba(35, 35, 35, 0.75);
color: #fff;
border-radius: 2px;
padding: 2px;
font-size: 16px;
font-family: sans-serif;
right: 0.5em;
bottom: -0.5em;
}
/* /*
* Navbar * Navbar
*/ */

52
assets/js/watch.js Normal file
View File

@ -0,0 +1,52 @@
function toggle_parent(target) {
body = target.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function toggle_comments(target) {
body = target.parentNode.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function swap_comments(source) {
if (source == "youtube") {
get_youtube_comments();
} else if (source == "reddit") {
get_reddit_comments();
}
}
String.prototype.supplant = function(o) {
return this.replace(/{([^{}]*)}/g, function(a, b) {
var r = o[b];
return typeof r === "string" || typeof r === "number" ? r : a;
});
};
function show_youtube_replies(target) {
body = target.parentNode.parentNode.children[1];
body.style.display = "";
target.innerHTML = "Hide replies";
target.setAttribute("onclick", "hide_youtube_replies(this)");
}
function hide_youtube_replies(target) {
body = target.parentNode.parentNode.children[1];
body.style.display = "none";
target.innerHTML = "Show replies";
target.setAttribute("onclick", "show_youtube_replies(this)");
}

View File

@ -1,5 +1,6 @@
crawl_threads: 1 crawl_threads: 1
channel_threads: 1 channel_threads: 1
feed_threads: 1
video_threads: 1 video_threads: 1
db: db:
user: kemal user: kemal
@ -9,3 +10,4 @@ db:
dbname: invidious dbname: invidious
full_refresh: false full_refresh: false
https_only: false https_only: false
geo_bypass: true

View File

@ -22,6 +22,8 @@ CREATE TABLE public.videos
genre text COLLATE pg_catalog."default", genre text COLLATE pg_catalog."default",
genre_url text COLLATE pg_catalog."default", genre_url text COLLATE pg_catalog."default",
license text COLLATE pg_catalog."default", license text COLLATE pg_catalog."default",
sub_count_text text COLLATE pg_catalog."default",
author_thumbnail text COLLATE pg_catalog."default",
CONSTRAINT videos_pkey PRIMARY KEY (id) CONSTRAINT videos_pkey PRIMARY KEY (id)
) )
WITH ( WITH (

21
docker-compose.yml Normal file
View File

@ -0,0 +1,21 @@
version: '3'
services:
postgres:
build:
context: .
dockerfile: docker/Dockerfile.postgres
restart: unless-stopped
volumes:
- postgresdata:/var/lib/postgresql/data
invidious:
build:
context: .
dockerfile: docker/Dockerfile
restart: unless-stopped
ports:
- "3000:3000"
depends_on:
- postgres
volumes:
postgresdata:

15
docker/Dockerfile Normal file
View File

@ -0,0 +1,15 @@
FROM archlinux/base
RUN pacman -Sy --noconfirm shards crystal imagemagick librsvg \
which pkgconf gcc ttf-liberation
# base-devel contains many other basic packages, that are normally assumed to already exist on a clean arch system
ADD . /invidious
WORKDIR /invidious
RUN sed -i 's/host: localhost/host: postgres/' config/config.yml && \
shards && \
crystal build src/invidious.cr
CMD [ "/invidious/invidious" ]

View File

@ -0,0 +1,10 @@
FROM postgres:10
ENV POSTGRES_USER postgres
ADD ./setup.sh /setup.sh
ADD ./config/sql /config/sql
ADD ./docker/entrypoint.postgres.sh /entrypoint.sh
ENTRYPOINT [ "/entrypoint.sh" ]
CMD [ "postgres" ]

19
docker/entrypoint.postgres.sh Executable file
View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
CMD="$@"
if [ ! -f /var/lib/postgresql/data/setupFinished ]; then
echo "### first run - setting up invidious database"
/usr/local/bin/docker-entrypoint.sh postgres &
sleep 10
until runuser -l postgres -c 'pg_isready' 2>/dev/null; do
>&2 echo "### Postgres is unavailable - waiting"
sleep 5
done
>&2 echo "### importing table schemas"
su postgres -c "/setup.sh" && touch /var/lib/postgresql/data/setupFinished
echo "### invidious database setup finished"
exit
fi
echo "running postgres /usr/local/bin/docker-entrypoint.sh $CMD"
exec /usr/local/bin/docker-entrypoint.sh $CMD

View File

@ -1,7 +1,8 @@
#!/bin/bash #!/bin/bash
createdb invidious createdb invidious
createuser kemal #createuser kemal
psql -c "CREATE USER kemal WITH PASSWORD 'kemal';"
psql invidious < config/sql/channels.sql psql invidious < config/sql/channels.sql
psql invidious < config/sql/videos.sql psql invidious < config/sql/videos.sql
psql invidious < config/sql/channel_videos.sql psql invidious < config/sql/channel_videos.sql

View File

@ -1,5 +1,5 @@
name: invidious name: invidious
version: 0.7.0 version: 0.10.0
authors: authors:
- Omar Roth <omarroth@hotmail.com> - Omar Roth <omarroth@hotmail.com>

View File

@ -31,6 +31,7 @@ HMAC_KEY = CONFIG.hmac_key || Random::Secure.random_bytes(32)
crawl_threads = CONFIG.crawl_threads crawl_threads = CONFIG.crawl_threads
channel_threads = CONFIG.channel_threads channel_threads = CONFIG.channel_threads
feed_threads = CONFIG.feed_threads
video_threads = CONFIG.video_threads video_threads = CONFIG.video_threads
Kemal.config.extra_options do |parser| Kemal.config.extra_options do |parser|
@ -51,6 +52,14 @@ Kemal.config.extra_options do |parser|
exit exit
end end
end end
parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{feed_threads})") do |number|
begin
feed_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
parser.on("-v THREADS", "--video-threads=THREADS", "Number of threads for refreshing videos (default: #{video_threads})") do |number| parser.on("-v THREADS", "--video-threads=THREADS", "Number of threads for refreshing videos (default: #{video_threads})") do |number|
begin begin
video_threads = number.to_i video_threads = number.to_i
@ -85,6 +94,8 @@ end
refresh_channels(PG_DB, channel_threads, CONFIG.full_refresh) refresh_channels(PG_DB, channel_threads, CONFIG.full_refresh)
refresh_feeds(PG_DB, feed_threads)
video_threads.times do |i| video_threads.times do |i|
spawn do spawn do
refresh_videos(PG_DB) refresh_videos(PG_DB)
@ -105,6 +116,17 @@ spawn do
end end
end end
proxies = {} of String => Array({ip: String, port: Int32})
if CONFIG.geo_bypass
spawn do
find_working_proxies(BYPASS_REGIONS) do |region, list|
if !list.empty?
proxies[region] = list
end
end
end
end
before_all do |env| before_all do |env|
env.response.headers["X-XSS-Protection"] = "1; mode=block;" env.response.headers["X-XSS-Protection"] = "1; mode=block;"
env.response.headers["X-Content-Type-Options"] = "nosniff" env.response.headers["X-Content-Type-Options"] = "nosniff"
@ -206,6 +228,8 @@ get "/watch" do |env|
next env.redirect "/" next env.redirect "/"
end end
plid = env.params.query["list"]?
user = env.get? "user" user = env.get? "user"
if user if user
user = user.as(User) user = user.as(User)
@ -225,7 +249,9 @@ get "/watch" do |env|
end end
begin begin
video = get_video(id, PG_DB) video = get_video(id, PG_DB, proxies)
rescue ex : VideoRedirect
next env.redirect "/watch?v=#{ex.message}"
rescue ex rescue ex
error_message = ex.message error_message = ex.message
STDOUT << id << " : " << ex.message << "\n" STDOUT << id << " : " << ex.message << "\n"
@ -325,7 +351,9 @@ get "/embed/:id" do |env|
params = process_video_params(env.params.query, nil) params = process_video_params(env.params.query, nil)
begin begin
video = get_video(id, PG_DB) video = get_video(id, PG_DB, proxies)
rescue ex : VideoRedirect
next env.redirect "/embed/#{ex.message}"
rescue ex rescue ex
error_message = ex.message error_message = ex.message
next templated "error" next templated "error"
@ -381,6 +409,7 @@ get "/embed/:id" do |env|
end end
# Playlists # Playlists
get "/playlist" do |env| get "/playlist" do |env|
plid = env.params.query["list"]? plid = env.params.query["list"]?
if !plid if !plid
@ -390,17 +419,45 @@ get "/playlist" do |env|
page = env.params.query["page"]?.try &.to_i? page = env.params.query["page"]?.try &.to_i?
page ||= 1 page ||= 1
if plid.starts_with? "RD"
next env.redirect "/mix?list=#{plid}"
end
begin begin
playlist = fetch_playlist(plid) playlist = fetch_playlist(plid)
videos = fetch_playlist_videos(plid, page, playlist.video_count)
rescue ex rescue ex
error_message = ex.message error_message = ex.message
next templated "error" next templated "error"
end end
begin
videos = fetch_playlist_videos(plid, page, playlist.video_count)
rescue ex
videos = [] of PlaylistVideo
end
templated "playlist" templated "playlist"
end end
get "/mix" do |env|
rdid = env.params.query["list"]?
if !rdid
next env.redirect "/"
end
continuation = env.params.query["continuation"]?
continuation ||= rdid.lchop("RD")
begin
mix = fetch_mix(rdid, continuation)
rescue ex
error_message = ex.message
next templated "error"
end
templated "mix"
end
# Search # Search
get "/results" do |env| get "/results" do |env|
@ -429,9 +486,8 @@ get "/search" do |env|
user = env.get? "user" user = env.get? "user"
if user if user
user = user.as(User) user = user.as(User)
ucids = user.subscriptions view_name = "subscriptions_#{sha256(user.email)[0..7]}"
end end
ucids ||= [] of String
channel = nil channel = nil
content_type = "all" content_type = "all"
@ -468,14 +524,19 @@ get "/search" do |env|
if channel if channel
count, videos = channel_search(search_query, page, channel) count, videos = channel_search(search_query, page, channel)
elsif subscriptions elsif subscriptions
if view_name
videos = PG_DB.query_all("SELECT id,title,published,updated,ucid,author FROM ( videos = PG_DB.query_all("SELECT id,title,published,updated,ucid,author FROM (
SELECT *, SELECT *,
to_tsvector(channel_videos.title) || to_tsvector(#{view_name}.title) ||
to_tsvector(channel_videos.author) to_tsvector(#{view_name}.author)
as document as document
FROM channel_videos WHERE ucid IN (#{arg_array(ucids, 3)}) FROM #{view_name}
) v_search WHERE v_search.document @@ plainto_tsquery($1) LIMIT 20 OFFSET $2;", [search_query, (page - 1) * 20] + ucids, as: ChannelVideo) ) v_search WHERE v_search.document @@ plainto_tsquery($1) LIMIT 20 OFFSET $2;", search_query, (page - 1) * 20, as: ChannelVideo)
count = videos.size count = videos.size
else
videos = [] of ChannelVideo
count = 0
end
else else
begin begin
search_params = produce_search_params(sort: sort, date: date, content_type: content_type, search_params = produce_search_params(sort: sort, date: date, content_type: content_type,
@ -709,7 +770,7 @@ post "/login" do |env|
end end
if action == "signin" if action == "signin"
user = PG_DB.query_one?("SELECT * FROM users WHERE email = $1 AND password IS NOT NULL", email, as: User) user = PG_DB.query_one?("SELECT * FROM users WHERE LOWER(email) = LOWER($1) AND password IS NOT NULL", email, as: User)
if !user if !user
error_message = "Invalid username or password" error_message = "Invalid username or password"
@ -723,7 +784,7 @@ post "/login" do |env|
if Crypto::Bcrypt::Password.new(user.password.not_nil!) == password if Crypto::Bcrypt::Password.new(user.password.not_nil!) == password
sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32)) sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
PG_DB.exec("UPDATE users SET id = id || $1 WHERE email = $2", [sid], email) PG_DB.exec("UPDATE users SET id = id || $1 WHERE LOWER(email) = LOWER($2)", [sid], email)
if Kemal.config.ssl || CONFIG.https_only if Kemal.config.ssl || CONFIG.https_only
secure = true secure = true
@ -738,7 +799,7 @@ post "/login" do |env|
next templated "error" next templated "error"
end end
elsif action == "register" elsif action == "register"
user = PG_DB.query_one?("SELECT * FROM users WHERE email = $1 AND password IS NOT NULL", email, as: User) user = PG_DB.query_one?("SELECT * FROM users WHERE LOWER(email) = LOWER($1) AND password IS NOT NULL", email, as: User)
if user if user
error_message = "Please sign in" error_message = "Please sign in"
next templated "error" next templated "error"
@ -753,6 +814,12 @@ post "/login" do |env|
PG_DB.exec("INSERT INTO users VALUES (#{args})", user_array) PG_DB.exec("INSERT INTO users VALUES (#{args})", user_array)
view_name = "subscriptions_#{sha256(user.email)[0..7]}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE \
ucid = ANY ((SELECT subscriptions FROM users WHERE email = '#{user.email}')::text[]) \
ORDER BY published DESC;")
if Kemal.config.ssl || CONFIG.https_only if Kemal.config.ssl || CONFIG.https_only
secure = true secure = true
else else
@ -1079,12 +1146,14 @@ post "/data_control" do |env|
body = JSON.parse(body) body = JSON.parse(body)
body["subscriptions"].as_a.each do |ucid| body["subscriptions"].as_a.each do |ucid|
ucid = ucid.as_s ucid = ucid.as_s
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
if !user.subscriptions.includes? ucid
begin begin
client = make_client(YT_URL) client = make_client(YT_URL)
get_channel(ucid, client, PG_DB, false, false) get_channel(ucid, client, PG_DB, false, false)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
user.subscriptions << ucid
rescue ex rescue ex
next next
end end
@ -1093,8 +1162,10 @@ post "/data_control" do |env|
body["watch_history"].as_a.each do |id| body["watch_history"].as_a.each do |id|
id = id.as_s id = id.as_s
if !user.watched.includes? id if !user.watched.includes? id
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE email = $2", id, user.email) PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE email = $2", id, user.email)
user.watched << id
end end
end end
@ -1105,11 +1176,12 @@ post "/data_control" do |env|
ucid = channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0] ucid = channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
if !user.subscriptions.includes? ucid if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin begin
client = make_client(YT_URL) client = make_client(YT_URL)
get_channel(ucid, client, PG_DB, false, false) get_channel(ucid, client, PG_DB, false, false)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
user.subscriptions << ucid
rescue ex rescue ex
next next
end end
@ -1120,11 +1192,12 @@ post "/data_control" do |env|
ucid = md["channel_id"] ucid = md["channel_id"]
if !user.subscriptions.includes? ucid if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin begin
client = make_client(YT_URL) client = make_client(YT_URL)
get_channel(ucid, client, PG_DB, false, false) get_channel(ucid, client, PG_DB, false, false)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
user.subscriptions << ucid
rescue ex rescue ex
next next
end end
@ -1136,11 +1209,12 @@ post "/data_control" do |env|
ucid = channel["url"].as_s.match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0] ucid = channel["url"].as_s.match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
if !user.subscriptions.includes? ucid if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin begin
client = make_client(YT_URL) client = make_client(YT_URL)
get_channel(ucid, client, PG_DB, false, false) get_channel(ucid, client, PG_DB, false, false)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
user.subscriptions << ucid
rescue ex rescue ex
next next
end end
@ -1156,19 +1230,24 @@ post "/data_control" do |env|
db = entry.io.gets_to_end db = entry.io.gets_to_end
db.scan(/youtube\.com\/watch\?v\=(?<id>[a-zA-Z0-9_-]{11})/) do |md| db.scan(/youtube\.com\/watch\?v\=(?<id>[a-zA-Z0-9_-]{11})/) do |md|
if !user.watched.includes? md["id"] id = md["id"]
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE email = $2", md["id"], user.email)
if !user.watched.includes? id
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE email = $2", id, user.email)
user.watched << id
end end
end end
db.scan(/youtube\.com\/channel\/(?<ucid>[a-zA-Z0-9_-]{22})/) do |md| db.scan(/youtube\.com\/channel\/(?<ucid>[a-zA-Z0-9_-]{22})/) do |md|
ucid = md["ucid"] ucid = md["ucid"]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
if !user.subscriptions.includes? ucid
begin begin
client = make_client(YT_URL) client = make_client(YT_URL)
get_channel(ucid, client, PG_DB, false, false) get_channel(ucid, client, PG_DB, false, false)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
user.subscriptions << ucid
rescue ex rescue ex
next next
end end
@ -1306,6 +1385,8 @@ get "/feed/subscriptions" do |env|
notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email, notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email,
as: Array(String)) as: Array(String))
view_name = "subscriptions_#{sha256(user.email)[0..7]}"
if preferences.notifications_only && !notifications.empty? if preferences.notifications_only && !notifications.empty?
args = arg_array(notifications) args = arg_array(notifications)
@ -1328,39 +1409,35 @@ get "/feed/subscriptions" do |env|
else else
if preferences.latest_only if preferences.latest_only
if preferences.unseen_only if preferences.unseen_only
ucids = arg_array(user.subscriptions)
if user.watched.empty? if user.watched.empty?
watched = "'{}'" watched = "'{}'"
else else
watched = arg_array(user.watched, user.subscriptions.size + 1) watched = arg_array(user.watched)
end end
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM channel_videos WHERE \ videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} WHERE \
ucid IN (#{ucids}) AND id NOT IN (#{watched}) ORDER BY ucid, published DESC", id NOT IN (#{watched}) ORDER BY ucid, published DESC",
user.subscriptions + user.watched, as: ChannelVideo) user.watched, as: ChannelVideo)
else else
args = arg_array(user.subscriptions) videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} \
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM channel_videos WHERE \ ORDER BY ucid, published DESC", as: ChannelVideo)
ucid IN (#{args}) ORDER BY ucid, published DESC", user.subscriptions, as: ChannelVideo)
end end
videos.sort_by! { |video| video.published }.reverse! videos.sort_by! { |video| video.published }.reverse!
else else
if preferences.unseen_only if preferences.unseen_only
ucids = arg_array(user.subscriptions, 3)
if user.watched.empty? if user.watched.empty?
watched = "'{}'" watched = "'{}'"
else else
watched = arg_array(user.watched, user.subscriptions.size + 3) watched = arg_array(user.watched, 3)
end end
videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid IN (#{ucids}) \ videos = PG_DB.query_all("SELECT * FROM #{view_name} WHERE \
AND id NOT IN (#{watched}) ORDER BY published DESC LIMIT $1 OFFSET $2", id NOT IN (#{watched}) LIMIT $1 OFFSET $2",
[limit, offset] + user.subscriptions + user.watched, as: ChannelVideo) [limit, offset] + user.watched, as: ChannelVideo)
else else
args = arg_array(user.subscriptions, 3) videos = PG_DB.query_all("SELECT * FROM #{view_name} \
videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid IN (#{args}) \ ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo)
ORDER BY published DESC LIMIT $1 OFFSET $2", [limit, offset] + user.subscriptions, as: ChannelVideo)
end end
end end
@ -1409,29 +1486,8 @@ get "/feed/channel/:ucid" do |env|
halt env, status_code: 404, response: error_message halt env, status_code: 404, response: error_message
end end
client = make_client(YT_URL)
page = 1 page = 1
videos, count = get_60_videos(ucid, page, auto_generated)
videos = [] of SearchVideo
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?) host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
path = env.request.path path = env.request.path
@ -1518,15 +1574,14 @@ get "/feed/private" do |env|
latest_only ||= 0 latest_only ||= 0
latest_only = latest_only == 1 latest_only = latest_only == 1
view_name = "subscriptions_#{sha256(user.email)[0..7]}"
if latest_only if latest_only
args = arg_array(user.subscriptions) videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} ORDER BY ucid, published DESC", as: ChannelVideo)
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM channel_videos WHERE \
ucid IN (#{args}) ORDER BY ucid, published DESC", user.subscriptions, as: ChannelVideo)
videos.sort_by! { |video| video.published }.reverse! videos.sort_by! { |video| video.published }.reverse!
else else
args = arg_array(user.subscriptions, 3) videos = PG_DB.query_all("SELECT * FROM #{view_name} \
videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid IN (#{args}) \ ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo)
ORDER BY published DESC LIMIT $1 OFFSET $2", [limit, offset] + user.subscriptions, as: ChannelVideo)
end end
sort = env.params.query["sort"]? sort = env.params.query["sort"]?
@ -1663,7 +1718,7 @@ get "/channel/:ucid" do |env|
page ||= 1 page ||= 1
begin begin
author, ucid, auto_generated = get_about_info(ucid) author, ucid, auto_generated, sub_count = get_about_info(ucid)
rescue ex rescue ex
error_message = "User does not exist" error_message = "User does not exist"
next templated "error" next templated "error"
@ -1677,27 +1732,7 @@ get "/channel/:ucid" do |env|
end end
end end
client = make_client(YT_URL) videos, count = get_60_videos(ucid, page, auto_generated)
videos = [] of SearchVideo
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
templated "channel" templated "channel"
end end
@ -1718,11 +1753,15 @@ end
# API Endpoints # API Endpoints
get "/api/v1/captions/:id" do |env| get "/api/v1/captions/:id" do |env|
env.response.content_type = "application/json"
id = env.params.url["id"] id = env.params.url["id"]
client = make_client(YT_URL) client = make_client(YT_URL)
begin begin
video = get_video(id, PG_DB) video = get_video(id, PG_DB, proxies)
rescue ex : VideoRedirect
next env.redirect "/api/v1/captions/#{ex.message}"
rescue ex rescue ex
halt env, status_code: 403 halt env, status_code: 403
end end
@ -1730,9 +1769,10 @@ get "/api/v1/captions/:id" do |env|
captions = video.captions captions = video.captions
label = env.params.query["label"]? label = env.params.query["label"]?
if !label lang = env.params.query["lang"]?
env.response.content_type = "application/json" tlang = env.params.query["tlang"]?
if !label && !lang
response = JSON.build do |json| response = JSON.build do |json|
json.object do json.object do
json.field "captions" do json.field "captions" do
@ -1752,22 +1792,27 @@ get "/api/v1/captions/:id" do |env|
next response next response
end end
env.response.content_type = "text/vtt"
caption = captions.select { |caption| caption.name.simpleText == label } caption = captions.select { |caption| caption.name.simpleText == label }
env.response.content_type = "text/vtt" if lang
caption = captions.select { |caption| caption.languageCode == lang }
end
if caption.empty? if caption.empty?
halt env, status_code: 403 halt env, status_code: 404
else else
caption = caption[0] caption = caption[0]
end end
caption_xml = client.get(caption.baseUrl).body caption_xml = client.get(caption.baseUrl + "&tlang=#{tlang}").body
caption_xml = XML.parse(caption_xml) caption_xml = XML.parse(caption_xml)
webvtt = <<-END_VTT webvtt = <<-END_VTT
WEBVTT WEBVTT
Kind: captions Kind: captions
Language: #{caption.languageCode} Language: #{tlang || caption.languageCode}
END_VTT END_VTT
@ -1806,6 +1851,8 @@ get "/api/v1/captions/:id" do |env|
end end
get "/api/v1/comments/:id" do |env| get "/api/v1/comments/:id" do |env|
env.response.content_type = "application/json"
id = env.params.url["id"] id = env.params.url["id"]
source = env.params.query["source"]? source = env.params.query["source"]?
@ -1816,26 +1863,66 @@ get "/api/v1/comments/:id" do |env|
if source == "youtube" if source == "youtube"
client = make_client(YT_URL) client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
headers = HTTP::Headers.new headers = HTTP::Headers.new
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&disable_polymer=1")
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"] headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}"
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
body = html.body body = html.body
session_token = body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"] session_token = body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
itct = body.match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
ctoken = body.match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/) ctoken = body.match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
if !ctoken if body.match(/<meta itemprop="regionsAllowed" content="">/)
env.response.content_type = "application/json" bypass_channel = Channel({String, HTTPClient, HTTP::Headers} | Nil).new
proxies.each do |region, list|
spawn do
proxy_html = %(<meta itemprop="regionsAllowed" content="">)
list.each do |proxy|
begin
proxy_client = HTTPClient.new(YT_URL)
proxy_client.read_timeout = 10.seconds
proxy_client.connect_timeout = 10.seconds
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
proxy_client.set_proxy(proxy)
response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
proxy_headers = HTTP::Headers.new
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
proxy_html = response.body
if !proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
bypass_channel.send({proxy_html, proxy_client, proxy_headers})
break
end
rescue ex
end
end
# If none of the proxies we tried returned a valid response
if proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
bypass_channel.send(nil)
end
end
end
proxies.size.times do
response = bypass_channel.receive
if response
session_token = response[0].match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
itct = response[0].match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
ctoken = response[0].match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
client = response[1]
headers = response[2]
break
end
end
end
if !ctoken
if format == "json" if format == "json"
next {"comments" => [] of String}.to_json next {"comments" => [] of String}.to_json
else else
@ -1843,7 +1930,6 @@ get "/api/v1/comments/:id" do |env|
end end
end end
ctoken = ctoken["ctoken"] ctoken = ctoken["ctoken"]
itct = body.match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
if env.params.query["continuation"]? && !env.params.query["continuation"].empty? if env.params.query["continuation"]? && !env.params.query["continuation"].empty?
continuation = env.params.query["continuation"] continuation = env.params.query["continuation"]
@ -1857,10 +1943,16 @@ get "/api/v1/comments/:id" do |env|
} }
post_req = HTTP::Params.encode(post_req) post_req = HTTP::Params.encode(post_req)
response = client.post("/comment_service_ajax?action_get_comments=1&pbj=1&ctoken=#{ctoken}&continuation=#{continuation}&itct=#{itct}", headers, post_req).body headers["content-type"] = "application/x-www-form-urlencoded"
response = JSON.parse(response)
env.response.content_type = "application/json" headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
response = client.post("/comment_service_ajax?action_get_comments=1&pbj=1&ctoken=#{ctoken}&continuation=#{continuation}&itct=#{itct}&hl=en&gl=US", headers, post_req)
response = JSON.parse(response.body)
if !response["response"]["continuationContents"]? if !response["response"]["continuationContents"]?
halt env, status_code: 403 halt env, status_code: 403
@ -2016,8 +2108,6 @@ get "/api/v1/comments/:id" do |env|
halt env, status_code: 404 halt env, status_code: 404
end end
env.response.content_type = "application/json"
if format == "json" if format == "json"
reddit_thread = JSON.parse(reddit_thread.to_json).as_h reddit_thread = JSON.parse(reddit_thread.to_json).as_h
reddit_thread["comments"] = JSON.parse(comments.to_json) reddit_thread["comments"] = JSON.parse(comments.to_json)
@ -2038,7 +2128,7 @@ get "/api/v1/insights/:id" do |env|
client = make_client(YT_URL) client = make_client(YT_URL)
headers = HTTP::Headers.new headers = HTTP::Headers.new
html = client.get("/watch?v=#{id}&disable_polymer=1") html = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1")
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"] headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
headers["content-type"] = "application/x-www-form-urlencoded" headers["content-type"] = "application/x-www-form-urlencoded"
@ -2113,12 +2203,15 @@ get "/api/v1/insights/:id" do |env|
end end
get "/api/v1/videos/:id" do |env| get "/api/v1/videos/:id" do |env|
env.response.content_type = "application/json"
id = env.params.url["id"] id = env.params.url["id"]
begin begin
video = get_video(id, PG_DB) video = get_video(id, PG_DB, proxies)
rescue ex : VideoRedirect
next env.redirect "/api/v1/videos/#{ex.message}"
rescue ex rescue ex
env.response.content_type = "application/json"
error_message = {"error" => ex.message}.to_json error_message = {"error" => ex.message}.to_json
halt env, status_code: 500, response: error_message halt env, status_code: 500, response: error_message
end end
@ -2128,7 +2221,6 @@ get "/api/v1/videos/:id" do |env|
captions = video.captions captions = video.captions
env.response.content_type = "application/json"
video_info = JSON.build do |json| video_info = JSON.build do |json|
json.object do json.object do
json.field "title", video.title json.field "title", video.title
@ -2153,6 +2245,8 @@ get "/api/v1/videos/:id" do |env|
json.field "likeCount", video.likes json.field "likeCount", video.likes
json.field "dislikeCount", video.dislikes json.field "dislikeCount", video.dislikes
json.field "paid", video.paid
json.field "premium", video.premium
json.field "isFamilyFriendly", video.is_family_friendly json.field "isFamilyFriendly", video.is_family_friendly
json.field "allowedRegions", video.allowed_regions json.field "allowedRegions", video.allowed_regions
json.field "genre", video.genre json.field "genre", video.genre
@ -2162,6 +2256,22 @@ get "/api/v1/videos/:id" do |env|
json.field "authorId", video.ucid json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}" json.field "authorUrl", "/channel/#{video.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = [32, 48, 76, 100, 176, 512]
qualities.each do |quality|
json.object do
json.field "url", video.author_thumbnail.gsub("=s48-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
json.field "subCountText", video.sub_count_text
json.field "lengthSeconds", video.info["length_seconds"].to_i json.field "lengthSeconds", video.info["length_seconds"].to_i
if video.info["allow_ratings"]? if video.info["allow_ratings"]?
json.field "allowRatings", video.info["allow_ratings"] == "1" json.field "allowRatings", video.info["allow_ratings"] == "1"
@ -2380,30 +2490,10 @@ get "/api/v1/channels/:ucid" do |env|
halt env, status_code: 404, response: error_message halt env, status_code: 404, response: error_message
end end
client = make_client(YT_URL)
page = 1 page = 1
videos, count = get_60_videos(ucid, page, auto_generated)
videos = [] of SearchVideo client = make_client(YT_URL)
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
channel_html = XML.parse_html(channel_html) channel_html = XML.parse_html(channel_html)
banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
@ -2514,6 +2604,8 @@ get "/api/v1/channels/:ucid" do |env|
json.field "published", video.published.epoch json.field "published", video.published.epoch
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds json.field "lengthSeconds", video.length_seconds
json.field "paid", video.paid
json.field "premium", video.premium
end end
end end
end end
@ -2539,27 +2631,7 @@ end
halt env, status_code: 404, response: error_message halt env, status_code: 404, response: error_message
end end
client = make_client(YT_URL) videos, count = get_60_videos(ucid, page, auto_generated)
videos = [] of SearchVideo
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
result = JSON.build do |json| result = JSON.build do |json|
json.array do json.array do
@ -2589,6 +2661,8 @@ end
json.field "published", video.published.epoch json.field "published", video.published.epoch
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds json.field "lengthSeconds", video.length_seconds
json.field "paid", video.paid
json.field "premium", video.premium
end end
end end
end end
@ -2636,6 +2710,8 @@ get "/api/v1/channels/search/:ucid" do |env|
json.field "publishedText", "#{recode_date(item.published)} ago" json.field "publishedText", "#{recode_date(item.published)} ago"
json.field "lengthSeconds", item.length_seconds json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now json.field "liveNow", item.live_now
json.field "paid", item.paid
json.field "premium", item.premium
when SearchPlaylist when SearchPlaylist
json.field "type", "playlist" json.field "type", "playlist"
json.field "title", item.title json.field "title", item.title
@ -2757,6 +2833,8 @@ get "/api/v1/search" do |env|
json.field "publishedText", "#{recode_date(item.published)} ago" json.field "publishedText", "#{recode_date(item.published)} ago"
json.field "lengthSeconds", item.length_seconds json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now json.field "liveNow", item.live_now
json.field "paid", item.paid
json.field "premium", item.premium
when SearchPlaylist when SearchPlaylist
json.field "type", "playlist" json.field "type", "playlist"
json.field "title", item.title json.field "title", item.title
@ -2822,14 +2900,28 @@ get "/api/v1/playlists/:plid" do |env|
page = env.params.query["page"]?.try &.to_i? page = env.params.query["page"]?.try &.to_i?
page ||= 1 page ||= 1
format = env.params.query["format"]?
format ||= "json"
continuation = env.params.query["continuation"]?
if plid.starts_with? "RD"
next env.redirect "/api/v1/mixes/#{plid}"
end
begin begin
playlist = fetch_playlist(plid) playlist = fetch_playlist(plid)
videos = fetch_playlist_videos(plid, page, playlist.video_count)
rescue ex rescue ex
error_message = {"error" => "Playlist is empty"}.to_json error_message = {"error" => "Playlist is empty"}.to_json
halt env, status_code: 404, response: error_message halt env, status_code: 404, response: error_message
end end
begin
videos = fetch_playlist_videos(plid, page, playlist.video_count, continuation)
rescue ex
videos = [] of PlaylistVideo
end
response = JSON.build do |json| response = JSON.build do |json|
json.object do json.object do
json.field "title", playlist.title json.field "title", playlist.title
@ -2839,6 +2931,20 @@ get "/api/v1/playlists/:plid" do |env|
json.field "authorId", playlist.ucid json.field "authorId", playlist.ucid
json.field "authorUrl", "/channel/#{playlist.ucid}" json.field "authorUrl", "/channel/#{playlist.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = [32, 48, 76, 100, 176, 512]
qualities.each do |quality|
json.object do
json.field "url", playlist.author_thumbnail.gsub("=s100-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
json.field "description", playlist.description json.field "description", playlist.description
json.field "descriptionHtml", playlist.description_html json.field "descriptionHtml", playlist.description_html
json.field "videoCount", playlist.video_count json.field "videoCount", playlist.video_count
@ -2870,6 +2976,80 @@ get "/api/v1/playlists/:plid" do |env|
end end
end end
if format == "html"
response = JSON.parse(response)
playlist_html = template_playlist(response)
next_video = response["videos"].as_a[1]?.try &.["videoId"]
response = {
"playlistHtml" => playlist_html,
"nextVideo" => next_video,
}.to_json
end
response
end
get "/api/v1/mixes/:rdid" do |env|
env.response.content_type = "application/json"
rdid = env.params.url["rdid"]
continuation = env.params.query["continuation"]?
continuation ||= rdid.lchop("RD")
format = env.params.query["format"]?
format ||= "json"
begin
mix = fetch_mix(rdid, continuation)
rescue ex
error_message = {"error" => ex.message}.to_json
halt env, status_code: 500, response: error_message
end
response = JSON.build do |json|
json.object do
json.field "title", mix.title
json.field "mixId", mix.id
json.field "videos" do
json.array do
mix.videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
json.field "videoThumbnails" do
json.array do
generate_thumbnails(json, video.id)
end
end
json.field "index", video.index
json.field "lengthSeconds", video.length_seconds
end
end
end
end
end
end
if format == "html"
response = JSON.parse(response)
playlist_html = template_mix(response)
next_video = response["videos"].as_a[1]?.try &.["videoId"]
response = {
"playlistHtml" => playlist_html,
"nextVideo" => next_video,
}.to_json
end
response response
end end
@ -2892,7 +3072,9 @@ get "/api/manifest/dash/id/:id" do |env|
client = make_client(YT_URL) client = make_client(YT_URL)
begin begin
video = get_video(id, PG_DB) video = get_video(id, PG_DB, proxies)
rescue ex : VideoRedirect
next env.redirect "/api/manifest/dash/id/#{ex.message}"
rescue ex rescue ex
halt env, status_code: 403 halt env, status_code: 403
end end
@ -3078,8 +3260,40 @@ get "/videoplayback" do |env|
host = "https://r#{fvip}---#{mn}.googlevideo.com" host = "https://r#{fvip}---#{mn}.googlevideo.com"
url = "/videoplayback?#{query_params.to_s}" url = "/videoplayback?#{query_params.to_s}"
if query_params["region"]?
client = make_client(URI.parse(host))
response = HTTP::Client::Response.new(status_code: 403)
if !proxies[query_params["region"]]?
halt env, status_code: 403
end
proxies[query_params["region"]].each do |proxy|
begin
client = HTTPClient.new(URI.parse(host))
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
client.set_proxy(proxy)
response = client.head(url)
if response.status_code == 200
# For whatever reason the proxy needs to be set again
client.set_proxy(proxy)
break
end
rescue ex
end
end
else
client = make_client(URI.parse(host)) client = make_client(URI.parse(host))
response = client.head(url) response = client.head(url)
end
if response.status_code != 200
halt env, status_code: 403
end
if response.headers["Location"]? if response.headers["Location"]?
url = URI.parse(response.headers["Location"]) url = URI.parse(response.headers["Location"])
@ -3224,6 +3438,24 @@ get "/vi/:id/:name" do |env|
end end
error 404 do |env| error 404 do |env|
if md = env.request.path.match(/^\/(?<id>[a-zA-Z0-9_-]{11})/)
id = md["id"]
params = [] of String
env.params.query.each do |k, v|
params << "#{k}=#{v}"
end
params = params.join("&")
url = "/watch?v=#{id}"
if !params.empty?
url += "&#{params}"
end
env.response.headers["Location"] = url
halt env, status_code: 302
end
error_message = "404 Page not found" error_message = "404 Page not found"
templated "error" templated "error"
end end

View File

@ -15,6 +15,11 @@ class ChannelVideo
ucid: String, ucid: String,
author: String, author: String,
}) })
# TODO: Add length_seconds to channel_video
def length_seconds
return 0
end
end end
def get_channel(id, client, db, refresh = true, pull_all_videos = true) def get_channel(id, client, db, refresh = true, pull_all_videos = true)
@ -104,6 +109,9 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
videos.each do |video| videos.each do |video|
ids << video.id ids << video.id
# FIXME: Red videos don't provide published date, so the best we can do is ignore them
if Time.now - video.published > 1.minute
db.exec("UPDATE users SET notifications = notifications || $1 \ db.exec("UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, video.published, video.ucid) WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, video.published, video.ucid)
@ -112,6 +120,7 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
db.exec("INSERT INTO channel_videos VALUES (#{args}) ON CONFLICT (id) DO UPDATE SET title = $2, \ db.exec("INSERT INTO channel_videos VALUES (#{args}) ON CONFLICT (id) DO UPDATE SET title = $2, \
published = $3, updated = $4, ucid = $5, author = $6", video_array) published = $3, updated = $4, ucid = $5, author = $6", video_array)
end end
end
if count < 30 if count < 30
break break
@ -172,7 +181,7 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
continuation = Base64.urlsafe_encode(continuation) continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation) continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}" url = "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
return url return url
end end
@ -180,19 +189,25 @@ end
def get_about_info(ucid) def get_about_info(ucid)
client = make_client(YT_URL) client = make_client(YT_URL)
about = client.get("/user/#{ucid}/about?disable_polymer=1") about = client.get("/user/#{ucid}/about?disable_polymer=1&gl=US&hl=en")
about = XML.parse_html(about.body) about = XML.parse_html(about.body)
if !about.xpath_node(%q(//span[@class="qualified-channel-title-text"]/a)) if !about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a))
about = client.get("/channel/#{ucid}/about?disable_polymer=1") about = client.get("/channel/#{ucid}/about?disable_polymer=1&gl=US&hl=en")
about = XML.parse_html(about.body) about = XML.parse_html(about.body)
end end
if !about.xpath_node(%q(//span[@class="qualified-channel-title-text"]/a)) if !about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a))
raise "User does not exist." raise "User does not exist."
end end
author = about.xpath_node(%q(//span[@class="qualified-channel-title-text"]/a)).not_nil!.content sub_count = about.xpath_node(%q(//span[contains(text(), "subscribers")]))
if sub_count
sub_count = sub_count.content.delete(", subscribers").to_i?
end
sub_count ||= 0
author = about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).not_nil!.content
ucid = about.xpath_node(%q(//link[@rel="canonical"])).not_nil!["href"].split("/")[-1] ucid = about.xpath_node(%q(//link[@rel="canonical"])).not_nil!["href"].split("/")[-1]
# Auto-generated channels # Auto-generated channels
@ -203,5 +218,37 @@ def get_about_info(ucid)
auto_generated = true auto_generated = true
end end
return {author, ucid, auto_generated} return {author, ucid, auto_generated, sub_count}
end
def get_60_videos(ucid, page, auto_generated)
count = 0
videos = [] of SearchVideo
client = make_client(YT_URL)
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if !json["load_more_widget_html"]?.try &.as_s.empty?
count += 30
end
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
return videos, count
end end

View File

@ -104,22 +104,22 @@ def template_youtube_comments(comments)
html += <<-END_HTML html += <<-END_HTML
<div class="pure-g"> <div class="pure-g">
<div class="pure-u-2-24"> <div class="pure-u-4-24 pure-u-md-2-24">
<img style="width:90%; padding-right:1em; padding-top:1em;" src="#{author_thumbnail}"> <img style="width:90%; padding-right:1em; padding-top:1em;" src="#{author_thumbnail}">
</div> </div>
<div class="pure-u-22-24"> <div class="pure-u-20-24 pure-u-md-22-24">
<p> <p>
<a href="javascript:void(0)" onclick="toggle(this)">[ - ]</a> <b>
<i class="icon ion-ios-thumbs-up"></i> #{child["likeCount"]} <a href="#{child["authorUrl"]}">#{child["author"]}</a>
<b><a href="#{child["authorUrl"]}">#{child["author"]}</a></b> </b>
- #{recode_date(Time.epoch(child["published"].as_i64))} ago
</p>
<div>
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p> <p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
#{recode_date(Time.epoch(child["published"].as_i64))} ago
|
<i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
</p>
#{replies_html} #{replies_html}
</div> </div>
</div> </div>
</div>
END_HTML END_HTML
end end
@ -129,7 +129,7 @@ def template_youtube_comments(comments)
<div class="pure-u-1"> <div class="pure-u-1">
<p> <p>
<a href="javascript:void(0)" data-continuation="#{comments["continuation"]}" <a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
onclick="get_youtube_replies(this)">Load more</a> onclick="get_youtube_replies(this, true)">Load more</a>
</p> </p>
</div> </div>
</div> </div>
@ -156,10 +156,10 @@ def template_reddit_comments(root)
content = <<-END_HTML content = <<-END_HTML
<p> <p>
<a href="javascript:void(0)" onclick="toggle(this)">[ - ]</a> <a href="javascript:void(0)" onclick="toggle_parent(this)">[ - ]</a>
<i class="icon ion-ios-thumbs-up"></i> #{score}
<b><a href="https://www.reddit.com/user/#{author}">#{author}</a></b> <b><a href="https://www.reddit.com/user/#{author}">#{author}</a></b>
- #{recode_date(child.created_utc)} ago #{number_with_separator(score)} points
#{recode_date(child.created_utc)} ago
</p> </p>
<div> <div>
#{body_html} #{body_html}

View File

@ -2,6 +2,7 @@ class Config
YAML.mapping({ YAML.mapping({
crawl_threads: Int32, crawl_threads: Int32,
channel_threads: Int32, channel_threads: Int32,
feed_threads: Int32,
video_threads: Int32, video_threads: Int32,
db: NamedTuple( db: NamedTuple(
user: String, user: String,
@ -14,6 +15,7 @@ class Config
https_only: Bool?, https_only: Bool?,
hmac_key: String?, hmac_key: String?,
full_refresh: Bool, full_refresh: Bool,
geo_bypass: Bool,
}) })
end end
@ -244,11 +246,22 @@ def extract_items(nodeset, ucid = nil)
plid = HTTP::Params.parse(URI.parse(id).query.not_nil!)["list"] plid = HTTP::Params.parse(URI.parse(id).query.not_nil!)["list"]
anchor = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-meta")]/a)) anchor = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-meta")]/a))
if !anchor if !anchor
anchor = node.xpath_node(%q(.//ul[@class="yt-lockup-meta-info"]/li/a)) anchor = node.xpath_node(%q(.//ul[@class="yt-lockup-meta-info"]/li/a))
end end
if anchor
video_count = anchor.content.match(/View full playlist \((?<count>\d+)/).try &.["count"].to_i? video_count = node.xpath_node(%q(.//span[@class="formatted-video-count-label"]/b))
if video_count
video_count = video_count.content
if video_count == "50+"
author = "YouTube"
author_id = "UC-9-kyTW8ZkZNDHQJ6FgpwQ"
video_count = video_count.rchop("+")
end
video_count = video_count.to_i?
end end
video_count ||= 0 video_count ||= 0
@ -345,6 +358,18 @@ def extract_items(nodeset, ucid = nil)
live_now = false live_now = false
end end
if node.xpath_node(%q(.//span[text()="Premium"]))
premium = true
else
premium = false
end
if node.xpath_node(%q(.//span[contains(text(), "Get YouTube Premium")]))
paid = true
else
paid = false
end
items << SearchVideo.new( items << SearchVideo.new(
title, title,
id, id,
@ -355,7 +380,9 @@ def extract_items(nodeset, ucid = nil)
description, description,
description_html, description_html,
length_seconds, length_seconds,
live_now live_now,
paid,
premium
) )
end end
end end

View File

@ -89,6 +89,68 @@ class HTTPClient < HTTP::Client
end end
def get_proxies(country_code = "US") def get_proxies(country_code = "US")
# return get_spys_proxies(country_code)
return get_nova_proxies(country_code)
end
def filter_proxies(proxies)
proxies.select! do |proxy|
begin
client = HTTPClient.new(YT_URL)
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
client.set_proxy(proxy)
client.head("/").status_code == 200
rescue ex
false
end
end
return proxies
end
def get_nova_proxies(country_code = "US")
country_code = country_code.downcase
client = HTTP::Client.new(URI.parse("https://www.proxynova.com"))
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
headers["Accept-Language"] = "Accept-Language: en-US,en;q=0.9"
headers["Host"] = "www.proxynova.com"
headers["Origin"] = "https://www.proxynova.com"
headers["Referer"] = "https://www.proxynova.com/proxy-server-list/country-#{country_code}/"
response = client.get("/proxy-server-list/country-#{country_code}/", headers)
document = XML.parse_html(response.body)
proxies = [] of {ip: String, port: Int32, score: Float64}
document.xpath_nodes(%q(//tr[@data-proxy-id])).each do |node|
ip = node.xpath_node(%q(.//td/abbr/script)).not_nil!.content
ip = ip.match(/document\.write\('(?<sub1>[^']+)'.substr\(8\) \+ '(?<sub2>[^']+)'/).not_nil!
ip = "#{ip["sub1"][8..-1]}#{ip["sub2"]}"
port = node.xpath_node(%q(.//td[2])).not_nil!.content.strip.to_i
anchor = node.xpath_node(%q(.//td[4]/div)).not_nil!
speed = anchor["data-value"].to_f
latency = anchor["title"].to_f
uptime = node.xpath_node(%q(.//td[5]/span)).not_nil!.content.rchop("%").to_f
# TODO: Tweak me
score = (uptime*4 + speed*2 + latency)/7
proxies << {ip: ip, port: port, score: score}
end
# proxies = proxies.sort_by { |proxy| proxy[:score] }.reverse
return proxies
end
def get_spys_proxies(country_code = "US")
client = HTTP::Client.new(URI.parse("http://spys.one")) client = HTTP::Client.new(URI.parse("http://spys.one"))
client.read_timeout = 10.seconds client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds client.connect_timeout = 10.seconds
@ -108,7 +170,15 @@ def get_proxies(country_code = "US")
"xf4" => "0", "xf4" => "0",
"xf5" => "1", "xf5" => "1",
} }
response = client.post("/free-proxy-list/#{country_code}/", headers, form: body) response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
20.times do
if response.status_code == 200
break
end
response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
end
response = XML.parse_html(response.body) response = XML.parse_html(response.body)
mapping = response.xpath_node(%q(.//body/script)).not_nil!.content mapping = response.xpath_node(%q(.//body/script)).not_nil!.content

View File

@ -40,6 +40,23 @@ def decode_length_seconds(string)
return length_seconds return length_seconds
end end
def recode_length_seconds(time)
if time <= 0
return ""
else
time = time.seconds
text = "#{time.minutes.to_s.rjust(2, '0')}:#{time.seconds.to_s.rjust(2, '0')}"
if time.hours > 0
text = "#{time.hours.to_s.rjust(2, '0')}:#{text}"
end
text = text.lchop('0')
return text
end
end
def decode_time(string) def decode_time(string)
time = string.try &.to_f? time = string.try &.to_f?
@ -138,6 +155,25 @@ def number_with_separator(number)
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
end end
def number_to_short_text(number)
seperated = number_with_separator(number).gsub(",", ".").split("")
text = seperated.first(2).join
if seperated[2]? && seperated[2] != "."
text += seperated[2]
end
text = text.rchop(".0")
if number / 1000000 != 0
text += "M"
elsif number / 1000 != 0
text += "K"
end
text
end
def arg_array(array, start = 1) def arg_array(array, start = 1)
if array.size == 0 if array.size == 0
args = "NULL" args = "NULL"
@ -238,3 +274,9 @@ def write_var_int(value : Int)
return bytes return bytes
end end
def sha256(text)
digest = OpenSSL::Digest.new("SHA256")
digest << text
return digest.hexdigest
end

View File

@ -104,6 +104,44 @@ def refresh_videos(db)
end end
end end
def refresh_feeds(db, max_threads = 1)
max_channel = Channel(Int32).new
spawn do
max_threads = max_channel.receive
active_threads = 0
active_channel = Channel(Bool).new
loop do
db.query("SELECT email FROM users") do |rs|
rs.each do
email = rs.read(String)
view_name = "subscriptions_#{sha256(email)[0..7]}"
if active_threads >= max_threads
if active_channel.receive
active_threads -= 1
end
end
active_threads += 1
spawn do
begin
db.exec("REFRESH MATERIALIZED VIEW #{view_name}")
rescue ex
STDOUT << "REFRESH " << email << " : " << ex.message << "\n"
end
active_channel.send(true)
end
end
end
end
end
max_channel.send(max_threads)
end
def pull_top_videos(config, db) def pull_top_videos(config, db)
if config.dl_api_key if config.dl_api_key
DetectLanguage.configure do |dl_config| DetectLanguage.configure do |dl_config|
@ -154,3 +192,16 @@ def update_decrypt_function
Fiber.yield Fiber.yield
end end
end end
def find_working_proxies(regions)
loop do
regions.each do |region|
proxies = get_proxies(region).first(20)
proxies = proxies.map { |proxy| {ip: proxy[:ip], port: proxy[:port]} }
# proxies = filter_proxies(proxies)
yield region, proxies
Fiber.yield
end
end
end

114
src/invidious/mixes.cr Normal file
View File

@ -0,0 +1,114 @@
class MixVideo
add_mapping({
title: String,
id: String,
author: String,
ucid: String,
length_seconds: Int32,
index: Int32,
mixes: Array(String),
})
end
class Mix
add_mapping({
title: String,
id: String,
videos: Array(MixVideo),
})
end
def fetch_mix(rdid, video_id, cookies = nil)
client = make_client(YT_URL)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"
if cookies
headers = cookies.add_request_headers(headers)
end
response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en", headers)
yt_data = response.body.match(/window\["ytInitialData"\] = (?<data>.*);/)
if yt_data
yt_data = JSON.parse(yt_data["data"].rchop(";"))
else
raise "Could not create mix."
end
if !yt_data["contents"]["twoColumnWatchNextResults"]["playlist"]?
raise "Could not create mix."
end
playlist = yt_data["contents"]["twoColumnWatchNextResults"]["playlist"]["playlist"]
mix_title = playlist["title"].as_s
contents = playlist["contents"].as_a
until contents[0]["playlistPanelVideoRenderer"]["videoId"].as_s == video_id
contents.shift
end
videos = [] of MixVideo
contents.each do |item|
item = item["playlistPanelVideoRenderer"]
id = item["videoId"].as_s
title = item["title"]["simpleText"].as_s
author = item["longBylineText"]["runs"][0]["text"].as_s
ucid = item["longBylineText"]["runs"][0]["navigationEndpoint"]["browseEndpoint"]["browseId"].as_s
length_seconds = decode_length_seconds(item["lengthText"]["simpleText"].as_s)
index = item["navigationEndpoint"]["watchEndpoint"]["index"].as_i
videos << MixVideo.new(
title,
id,
author,
ucid,
length_seconds,
index,
[rdid]
)
end
if !cookies
next_page = fetch_mix(rdid, videos[-1].id, response.cookies)
videos += next_page.videos
end
videos.uniq! { |video| video.id }
videos = videos.first(50)
return Mix.new(mix_title, rdid, videos)
end
def template_mix(mix)
html = <<-END_HTML
<h3>
<a href="/mix?list=#{mix["mixId"]}">
#{mix["title"]}
</a>
</h3>
<div class="pure-menu pure-menu-scrollable playlist-restricted">
<ol class="pure-menu-list">
END_HTML
mix["videos"].as_a.each do |video|
html += <<-END_HTML
<li class="pure-menu-item">
<a href="/watch?v=#{video["videoId"]}&list=#{mix["mixId"]}">
<img style="width:100%;" src="/vi/#{video["videoId"]}/mqdefault.jpg">
<p style="width:100%">#{video["title"]}</p>
<p>
<b style="width: 100%">#{video["author"]}</b>
</p>
</a>
</li>
END_HTML
end
html += <<-END_HTML
</ol>
</div>
<hr>
END_HTML
html
end

View File

@ -1,17 +1,3 @@
class Playlist
add_mapping({
title: String,
id: String,
author: String,
ucid: String,
description: String,
description_html: String,
video_count: Int32,
views: Int64,
updated: Time,
})
end
class PlaylistVideo class PlaylistVideo
add_mapping({ add_mapping({
title: String, title: String,
@ -25,11 +11,38 @@ class PlaylistVideo
}) })
end end
def fetch_playlist_videos(plid, page, video_count) class Playlist
add_mapping({
title: String,
id: String,
author: String,
author_thumbnail: String,
ucid: String,
description: String,
description_html: String,
video_count: Int32,
views: Int64,
updated: Time,
})
end
def fetch_playlist_videos(plid, page, video_count, continuation = nil)
client = make_client(YT_URL) client = make_client(YT_URL)
if video_count > 100 if continuation
html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
html = XML.parse_html(html.body)
index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i?
if index
index -= 1
end
index ||= 0
else
index = (page - 1) * 100 index = (page - 1) * 100
end
if video_count > 100
url = produce_playlist_url(plid, index) url = produce_playlist_url(plid, index)
response = client.get(url) response = client.get(url)
@ -42,14 +55,21 @@ def fetch_playlist_videos(plid, page, video_count)
nodeset = document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")])) nodeset = document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")]))
videos = extract_playlist(plid, nodeset, index) videos = extract_playlist(plid, nodeset, index)
else else
# Playlist has less than one page of videos, so subsequent pages will be empty
if page > 1 if page > 1
videos = [] of PlaylistVideo videos = [] of PlaylistVideo
else else
response = client.get("/playlist?list=#{plid}&disable_polymer=1") # Extract first page of videos
response = client.get("/playlist?list=#{plid}&gl=US&hl=en&disable_polymer=1")
document = XML.parse_html(response.body) document = XML.parse_html(response.body)
nodeset = document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")])) nodeset = document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")]))
videos = extract_playlist(plid, nodeset, 0) videos = extract_playlist(plid, nodeset, 0)
if continuation
until videos[0].id == continuation
videos.shift
end
end
end end
end end
@ -105,7 +125,8 @@ def produce_playlist_url(id, index)
end end
ucid = "VL" + id ucid = "VL" + id
meta = "\x08#{write_var_int(index).join}" meta = [0x08_u8] + write_var_int(index)
meta = Slice.new(meta.to_unsafe, meta.size)
meta = Base64.urlsafe_encode(meta, false) meta = Base64.urlsafe_encode(meta, false)
meta = "PT:#{meta}" meta = "PT:#{meta}"
@ -141,7 +162,7 @@ def fetch_playlist(plid)
plid = "UU#{plid.lchop("UC")}" plid = "UU#{plid.lchop("UC")}"
end end
response = client.get("/playlist?list=#{plid}&disable_polymer=1") response = client.get("/playlist?list=#{plid}&hl=en&disable_polymer=1")
if response.status_code != 200 if response.status_code != 200
raise "Invalid playlist." raise "Invalid playlist."
end end
@ -160,10 +181,13 @@ def fetch_playlist(plid)
title = title.content.strip(" \n") title = title.content.strip(" \n")
description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1])) description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description_html ||= document.xpath_node(%q(//span[@class="pl-header-description-text"]))
description_html, description = html_to_content(description_html) description_html, description = html_to_content(description_html)
anchor = document.xpath_node(%q(//ul[@class="pl-header-details"])).not_nil! anchor = document.xpath_node(%q(//ul[@class="pl-header-details"])).not_nil!
author = anchor.xpath_node(%q(.//li[1]/a)).not_nil!.content author = anchor.xpath_node(%q(.//li[1]/a)).not_nil!.content
author_thumbnail = document.xpath_node(%q(//img[@class="channel-header-profile-image"])).try &.["src"]
author_thumbnail ||= ""
ucid = anchor.xpath_node(%q(.//li[1]/a)).not_nil!["href"].split("/")[2] ucid = anchor.xpath_node(%q(.//li[1]/a)).not_nil!["href"].split("/")[2]
video_count = anchor.xpath_node(%q(.//li[2])).not_nil!.content.delete("videos, ").to_i video_count = anchor.xpath_node(%q(.//li[2])).not_nil!.content.delete("videos, ").to_i
@ -181,6 +205,7 @@ def fetch_playlist(plid)
title, title,
plid, plid,
author, author,
author_thumbnail,
ucid, ucid,
description, description,
description_html, description_html,
@ -191,3 +216,37 @@ def fetch_playlist(plid)
return playlist return playlist
end end
def template_playlist(playlist)
html = <<-END_HTML
<h3>
<a href="/playlist?list=#{playlist["playlistId"]}">
#{playlist["title"]}
</a>
</h3>
<div class="pure-menu pure-menu-scrollable playlist-restricted">
<ol class="pure-menu-list">
END_HTML
playlist["videos"].as_a.each do |video|
html += <<-END_HTML
<li class="pure-menu-item">
<a href="/watch?v=#{video["videoId"]}&list=#{playlist["playlistId"]}">
<img style="width:100%;" src="/vi/#{video["videoId"]}/mqdefault.jpg">
<p style="width:100%">#{video["title"]}</p>
<p>
<b style="width: 100%">#{video["author"]}</b>
</p>
</a>
</li>
END_HTML
end
html += <<-END_HTML
</ol>
</div>
<hr>
END_HTML
html
end

View File

@ -10,6 +10,8 @@ class SearchVideo
description_html: String, description_html: String,
length_seconds: Int32, length_seconds: Int32,
live_now: Bool, live_now: Bool,
paid: Bool,
premium: Bool,
}) })
end end
@ -89,7 +91,7 @@ def search(query, page = 1, search_params = produce_search_params(content_type:
return {0, [] of SearchItem} return {0, [] of SearchItem}
end end
html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=#{search_params}&disable_polymer=1").body html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=#{search_params}&hl=en&disable_polymer=1").body
if html.empty? if html.empty?
return {0, [] of SearchItem} return {0, [] of SearchItem}
end end

View File

@ -119,6 +119,15 @@ def get_user(sid, client, headers, db, refresh = true)
db.exec("INSERT INTO users VALUES (#{args}) \ db.exec("INSERT INTO users VALUES (#{args}) \
ON CONFLICT (email) DO UPDATE SET id = users.id || $1, updated = $2, subscriptions = $4", user_array) ON CONFLICT (email) DO UPDATE SET id = users.id || $1, updated = $2, subscriptions = $4", user_array)
begin
view_name = "subscriptions_#{sha256(user.email)[0..7]}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE \
ucid = ANY ((SELECT subscriptions FROM users WHERE email = '#{user.email}')::text[]) \
ORDER BY published DESC;")
rescue ex
end
end end
else else
user = fetch_user(sid, client, headers, db) user = fetch_user(sid, client, headers, db)
@ -129,6 +138,15 @@ def get_user(sid, client, headers, db, refresh = true)
db.exec("INSERT INTO users VALUES (#{args}) \ db.exec("INSERT INTO users VALUES (#{args}) \
ON CONFLICT (email) DO UPDATE SET id = users.id || $1, updated = $2, subscriptions = $4", user_array) ON CONFLICT (email) DO UPDATE SET id = users.id || $1, updated = $2, subscriptions = $4", user_array)
begin
view_name = "subscriptions_#{sha256(user.email)[0..7]}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE \
ucid = ANY ((SELECT subscriptions FROM users WHERE email = '#{user.email}')::text[]) \
ORDER BY published DESC;")
rescue ex
end
end end
return user return user

View File

@ -110,7 +110,7 @@ CAPTION_LANGUAGES = {
REGIONS = {"AD", "AE", "AF", "AG", "AI", "AL", "AM", "AO", "AQ", "AR", "AS", "AT", "AU", "AW", "AX", "AZ", "BA", "BB", "BD", "BE", "BF", "BG", "BH", "BI", "BJ", "BL", "BM", "BN", "BO", "BQ", "BR", "BS", "BT", "BV", "BW", "BY", "BZ", "CA", "CC", "CD", "CF", "CG", "CH", "CI", "CK", "CL", "CM", "CN", "CO", "CR", "CU", "CV", "CW", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM", "DO", "DZ", "EC", "EE", "EG", "EH", "ER", "ES", "ET", "FI", "FJ", "FK", "FM", "FO", "FR", "GA", "GB", "GD", "GE", "GF", "GG", "GH", "GI", "GL", "GM", "GN", "GP", "GQ", "GR", "GS", "GT", "GU", "GW", "GY", "HK", "HM", "HN", "HR", "HT", "HU", "ID", "IE", "IL", "IM", "IN", "IO", "IQ", "IR", "IS", "IT", "JE", "JM", "JO", "JP", "KE", "KG", "KH", "KI", "KM", "KN", "KP", "KR", "KW", "KY", "KZ", "LA", "LB", "LC", "LI", "LK", "LR", "LS", "LT", "LU", "LV", "LY", "MA", "MC", "MD", "ME", "MF", "MG", "MH", "MK", "ML", "MM", "MN", "MO", "MP", "MQ", "MR", "MS", "MT", "MU", "MV", "MW", "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI", "NL", "NO", "NP", "NR", "NU", "NZ", "OM", "PA", "PE", "PF", "PG", "PH", "PK", "PL", "PM", "PN", "PR", "PS", "PT", "PW", "PY", "QA", "RE", "RO", "RS", "RU", "RW", "SA", "SB", "SC", "SD", "SE", "SG", "SH", "SI", "SJ", "SK", "SL", "SM", "SN", "SO", "SR", "SS", "ST", "SV", "SX", "SY", "SZ", "TC", "TD", "TF", "TG", "TH", "TJ", "TK", "TL", "TM", "TN", "TO", "TR", "TT", "TV", "TW", "TZ", "UA", "UG", "UM", "US", "UY", "UZ", "VA", "VC", "VE", "VG", "VI", "VN", "VU", "WF", "WS", "YE", "YT", "ZA", "ZM", "ZW"} REGIONS = {"AD", "AE", "AF", "AG", "AI", "AL", "AM", "AO", "AQ", "AR", "AS", "AT", "AU", "AW", "AX", "AZ", "BA", "BB", "BD", "BE", "BF", "BG", "BH", "BI", "BJ", "BL", "BM", "BN", "BO", "BQ", "BR", "BS", "BT", "BV", "BW", "BY", "BZ", "CA", "CC", "CD", "CF", "CG", "CH", "CI", "CK", "CL", "CM", "CN", "CO", "CR", "CU", "CV", "CW", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM", "DO", "DZ", "EC", "EE", "EG", "EH", "ER", "ES", "ET", "FI", "FJ", "FK", "FM", "FO", "FR", "GA", "GB", "GD", "GE", "GF", "GG", "GH", "GI", "GL", "GM", "GN", "GP", "GQ", "GR", "GS", "GT", "GU", "GW", "GY", "HK", "HM", "HN", "HR", "HT", "HU", "ID", "IE", "IL", "IM", "IN", "IO", "IQ", "IR", "IS", "IT", "JE", "JM", "JO", "JP", "KE", "KG", "KH", "KI", "KM", "KN", "KP", "KR", "KW", "KY", "KZ", "LA", "LB", "LC", "LI", "LK", "LR", "LS", "LT", "LU", "LV", "LY", "MA", "MC", "MD", "ME", "MF", "MG", "MH", "MK", "ML", "MM", "MN", "MO", "MP", "MQ", "MR", "MS", "MT", "MU", "MV", "MW", "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI", "NL", "NO", "NP", "NR", "NU", "NZ", "OM", "PA", "PE", "PF", "PG", "PH", "PK", "PL", "PM", "PN", "PR", "PS", "PT", "PW", "PY", "QA", "RE", "RO", "RS", "RU", "RW", "SA", "SB", "SC", "SD", "SE", "SG", "SH", "SI", "SJ", "SK", "SL", "SM", "SN", "SO", "SR", "SS", "ST", "SV", "SX", "SY", "SZ", "TC", "TD", "TF", "TG", "TH", "TJ", "TK", "TL", "TM", "TN", "TO", "TR", "TT", "TV", "TW", "TZ", "UA", "UG", "UM", "US", "UY", "UZ", "VA", "VC", "VE", "VG", "VI", "VN", "VU", "WF", "WS", "YE", "YT", "ZA", "ZM", "ZW"}
BYPASS_REGIONS = { BYPASS_REGIONS = {
"UK", "GB",
"DE", "DE",
"FR", "FR",
"IN", "IN",
@ -129,7 +129,6 @@ BYPASS_REGIONS = {
"ID", "ID",
"BD", "BD",
"MX", "MX",
"ET",
"PH", "PH",
"EG", "EG",
"VN", "VN",
@ -274,6 +273,12 @@ class Video
streams.each { |s| s.add("label", "#{s["quality"]} - #{s["type"].split(";")[0].split("/")[1]}") } streams.each { |s| s.add("label", "#{s["quality"]} - #{s["type"].split(";")[0].split("/")[1]}") }
streams = streams.uniq { |s| s["label"] } streams = streams.uniq { |s| s["label"] }
if self.info["region"]?
streams.each do |fmt|
fmt["url"] += "&region=" + self.info["region"]
end
end
if streams[0]? && streams[0]["s"]? if streams[0]? && streams[0]["s"]?
streams.each do |fmt| streams.each do |fmt|
fmt["url"] += "&signature=" + decrypt_signature(fmt["s"], decrypt_function) fmt["url"] += "&signature=" + decrypt_signature(fmt["s"], decrypt_function)
@ -363,6 +368,12 @@ class Video
end end
end end
if self.info["region"]?
adaptive_fmts.each do |fmt|
fmt["url"] += "&region=" + self.info["region"]
end
end
if adaptive_fmts[0]? && adaptive_fmts[0]["s"]? if adaptive_fmts[0]? && adaptive_fmts[0]["s"]?
adaptive_fmts.each do |fmt| adaptive_fmts.each do |fmt|
fmt["url"] += "&signature=" + decrypt_signature(fmt["s"], decrypt_function) fmt["url"] += "&signature=" + decrypt_signature(fmt["s"], decrypt_function)
@ -396,6 +407,23 @@ class Video
return @player_json.not_nil! return @player_json.not_nil!
end end
def paid
reason = self.player_response["playabilityStatus"]?.try &.["reason"]?
if reason == "This video requires payment to watch."
paid = true
else
paid = false
end
return paid
end
def premium
premium = self.player_response.to_s.includes? "Get YouTube without the ads."
return premium
end
def captions def captions
captions = [] of Caption captions = [] of Caption
if player_response["captions"]? if player_response["captions"]?
@ -423,6 +451,10 @@ class Video
return description return description
end end
def length_seconds
return self.info["length_seconds"].to_i
end
add_mapping({ add_mapping({
id: String, id: String,
info: { info: {
@ -445,7 +477,9 @@ class Video
is_family_friendly: Bool, is_family_friendly: Bool,
genre: String, genre: String,
genre_url: String, genre_url: String,
license: { license: String,
sub_count_text: String,
author_thumbnail: {
type: String, type: String,
default: "", default: "",
}, },
@ -466,21 +500,24 @@ class CaptionName
) )
end end
def get_video(id, db, refresh = true) class VideoRedirect < Exception
end
def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32}), refresh = true)
if db.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", id, as: Bool) if db.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", id, as: Bool)
video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video) video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video)
# If record was last updated over 10 minutes ago, refresh (expire param in response lasts for 6 hours) # If record was last updated over 10 minutes ago, refresh (expire param in response lasts for 6 hours)
if refresh && Time.now - video.updated > 10.minutes if refresh && Time.now - video.updated > 10.minutes
begin begin
video = fetch_video(id) video = fetch_video(id, proxies)
video_array = video.to_a video_array = video.to_a
args = arg_array(video_array[1..-1], 2) args = arg_array(video_array[1..-1], 2)
db.exec("UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,\ db.exec("UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,\
published,description,language,author,ucid,allowed_regions,is_family_friendly,\ published,description,language,author,ucid,allowed_regions,is_family_friendly,\
genre, genre_url, license)\ genre,genre_url,license,sub_count_text,author_thumbnail)\
= (#{args}) WHERE id = $1", video_array) = (#{args}) WHERE id = $1", video_array)
rescue ex rescue ex
db.exec("DELETE FROM videos * WHERE id = $1", id) db.exec("DELETE FROM videos * WHERE id = $1", id)
@ -488,7 +525,7 @@ def get_video(id, db, refresh = true)
end end
end end
else else
video = fetch_video(id) video = fetch_video(id, proxies)
video_array = video.to_a video_array = video.to_a
args = arg_array(video_array) args = arg_array(video_array)
@ -499,15 +536,19 @@ def get_video(id, db, refresh = true)
return video return video
end end
def fetch_video(id) def fetch_video(id, proxies)
html_channel = Channel(XML::Node).new html_channel = Channel(XML::Node | String).new
info_channel = Channel(HTTP::Params).new info_channel = Channel(HTTP::Params).new
spawn do spawn do
client = make_client(YT_URL) client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&disable_polymer=1") html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
html = XML.parse_html(html.body)
if md = html.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/)
next html_channel.send(md["id"])
end
html = XML.parse_html(html.body)
html_channel.send(html) html_channel.send(html)
end end
@ -525,48 +566,72 @@ def fetch_video(id)
end end
html = html_channel.receive html = html_channel.receive
if html.as?(String)
raise VideoRedirect.new("#{html.as(String)}")
end
html = html.as(XML::Node)
info = info_channel.receive info = info_channel.receive
if info["reason"]? && info["reason"].includes? "your country" if info["reason"]? && info["reason"].includes? "your country"
bypass_channel = Channel({HTTP::Params | Nil, XML::Node | Nil}).new bypass_channel = Channel(HTTPProxy | Nil).new
BYPASS_REGIONS.each do |country_code| proxies.each do |region, list|
spawn do spawn do
info = HTTP::Params.new({
"reason" => [info["reason"]],
})
list.each do |proxy|
begin begin
proxies = get_proxies(country_code) client = HTTPClient.new(YT_URL)
client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds
# Try not to overload single proxy
proxy = proxies[0, 5].sample(1)[0]
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port]) proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
client.set_proxy(proxy)
client = HTTPClient.new(URI.parse("https://www.youtube.com")) info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
if !info["reason"]?
bypass_channel.send(proxy)
break
end
rescue ex
end
end
# If none of the proxies we tried returned a valid response
if info["reason"]?
bypass_channel.send(nil)
end
end
end
proxies.size.times do
proxy = bypass_channel.receive
if proxy
begin
client = HTTPClient.new(YT_URL)
client.read_timeout = 10.seconds client.read_timeout = 10.seconds
client.connect_timeout = 10.seconds client.connect_timeout = 10.seconds
client.set_proxy(proxy) client.set_proxy(proxy)
proxy_info = client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1") html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1").body)
proxy_info = HTTP::Params.parse(proxy_info.body) info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
if !proxy_info["reason"]? if info["reason"]?
proxy_html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1") info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
proxy_html = XML.parse_html(proxy_html.body) end
bypass_channel.send({proxy_info, proxy_html}) proxy = {ip: proxy.proxy_host, port: proxy.proxy_port}
else region = proxies.select { |region, list| list.includes? proxy }
bypass_channel.send({nil, nil}) if !region.empty?
end info["region"] = region.keys[0]
rescue ex
bypass_channel.send({nil, nil})
end
end
end end
BYPASS_REGIONS.size.times do
response = bypass_channel.receive
if response[0] || response[1]
info = response[0].not_nil!
html = response[1].not_nil!
break break
rescue ex
end
end end
end end
end end
@ -603,20 +668,39 @@ def fetch_video(id)
genre = html.xpath_node(%q(//meta[@itemprop="genre"])).not_nil!["content"] genre = html.xpath_node(%q(//meta[@itemprop="genre"])).not_nil!["content"]
genre_url = html.xpath_node(%(//a[text()="#{genre}"])).try &.["href"] genre_url = html.xpath_node(%(//a[text()="#{genre}"])).try &.["href"]
if genre == "Movies" case genre
genre_url ||= "/channel/UClgRkhTL3_hImCAmdLfDE4g" when "Movies"
end genre_url = "/channel/UClgRkhTL3_hImCAmdLfDE4g"
when "Education"
# Education channel is linked but does not exist
# genre_url = "/channel/UC3yA8nDwraeOfnYfBWun83g"
genre_url = "" genre_url = ""
end
genre_url ||= ""
license = html.xpath_node(%q(//h4[contains(text(),"License")]/parent::*/ul/li)) license = html.xpath_node(%q(//h4[contains(text(),"License")]/parent::*/ul/li))
if license if license
license = license.content license = license.content
else else
license ||= "" license = ""
end
sub_count_text = html.xpath_node(%q(//span[contains(@class, "yt-subscriber-count")]))
if sub_count_text
sub_count_text = sub_count_text["title"]
else
sub_count_text = "0"
end
author_thumbnail = html.xpath_node(%(//img[@alt="#{author}"]))
if author_thumbnail
author_thumbnail = author_thumbnail["data-thumb"]
else
author_thumbnail = ""
end end
video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description, video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description,
nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url, license) nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url, license, sub_count_text, author_thumbnail)
return video return video
end end

View File

@ -13,23 +13,32 @@
</div> </div>
</div> </div>
<p class="h-box"> <div class="h-box">
<% if user %> <% if user %>
<% if subscriptions.includes? ucid %> <% if subscriptions.includes? ucid %>
<a href="/subscription_ajax?action_remove_subscriptions=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>"> <p>
<b>Unsubscribe from <%= author %></b> <a id="subscribe" onclick="unsubscribe()" class="pure-button pure-button-primary"
href="/subscription_ajax?action_remove_subscriptions=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>">
<b>Unsubscribe | <%= number_to_short_text(sub_count) %></b>
</a> </a>
</p>
<% else %> <% else %>
<a href="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>"> <p>
<b>Subscribe to <%= author %></b> <a id="subscribe" onclick="subscribe()" class="pure-button pure-button-primary"
href="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>">
<b>Subscribe | <%= number_to_short_text(sub_count) %></b>
</a> </a>
</p>
<% end %> <% end %>
<% else %> <% else %>
<a href="/login?referer=<%= env.get("current_page") %>"> <p>
<a id="subscribe" class="pure-button pure-button-primary"
href="/login?referer=<%= env.get("current_page") %>">
<b>Login to subscribe to <%= author %></b> <b>Login to subscribe to <%= author %></b>
</a> </a>
<% end %>
</p> </p>
<% end %>
</div>
<p class="h-box"> <p class="h-box">
<a href="https://www.youtube.com/channel/<%= ucid %>">View channel on YouTube</a> <a href="https://www.youtube.com/channel/<%= ucid %>">View channel on YouTube</a>
@ -51,8 +60,50 @@
</div> </div>
<div class="pure-u-1 pure-u-md-3-5"></div> <div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5"> <div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if videos.size == 60 %> <% if count == 60 %>
<a href="/channel/<%= ucid %>?page=<%= page + 1 %>">Next page</a> <a href="/channel/<%= ucid %>?page=<%= page + 1 %>">Next page</a>
<% end %> <% end %>
</div> </div>
</div> </div>
<script>
document.getElementById("subscribe")["href"] = "javascript:void(0);"
function subscribe() {
var url = "/subscription_ajax?action_create_subscription_to_channel=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
subscribe_button = document.getElementById("subscribe");
subscribe_button.onclick = unsubscribe;
subscribe_button.innerHTML = '<b>Unsubscribe | <%= number_to_short_text(sub_count) %></b>'
}
}
}
}
function unsubscribe() {
var url = "/subscription_ajax?action_remove_subscriptions=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
subscribe_button = document.getElementById("subscribe");
subscribe_button.onclick = subscribe;
subscribe_button.innerHTML = '<b>Subscribe | <%= number_to_short_text(sub_count) %></b>'
}
}
}
}
</script>

View File

@ -14,10 +14,18 @@
<p><%= number_with_separator(item.subscriber_count) %> subscribers</p> <p><%= number_with_separator(item.subscriber_count) %> subscribers</p>
<h5><%= item.description_html %></h5> <h5><%= item.description_html %></h5>
<% when SearchPlaylist %> <% when SearchPlaylist %>
<a style="width:100%;" href="/playlist?list=<%= item.id %>"> <% if item.id.starts_with? "RD" %>
<% url = "/mix?list=#{item.id}&continuation=#{item.videos[0]?.try &.id}" %>
<% else %>
<% url = "/playlist?list=#{item.id}" %>
<% end %>
<a style="width:100%;" href="<%= url %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %> <% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %> <% else %>
<img style="width:100%;" src="/vi/<%= item.videos[0]?.try &.id %>/mqdefault.jpg"/> <div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.videos[0]?.try &.id %>/mqdefault.jpg"/>
<p class="length"><%= recode_length_seconds(item.videos[0]?.try &.length_seconds || 0) %></p>
</div>
<% end %> <% end %>
<p><%= item.title %></p> <p><%= item.title %></p>
</a> </a>
@ -26,16 +34,49 @@
</p> </p>
<p><%= number_with_separator(item.video_count) %> videos</p> <p><%= number_with_separator(item.video_count) %> videos</p>
<p>PLAYLIST</p> <p>PLAYLIST</p>
<% else %> <% when MixVideo %>
<% if item.responds_to?(:playlists) && !item.playlists.empty? %> <a style="width:100%;" href="/watch?v=<%= item.id %>&list=<%= item.mixes[0] %>">
<% params = "&list=#{item.playlists[0]}" %>
<% else %>
<% params = nil %>
<% end %>
<a style="width:100%;" href="/watch?v=<%= item.id %><%= params %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %> <% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %> <% else %>
<img style="width:100%;" src="/vi/<%= item.id %>/mqdefault.jpg"/> <div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
<p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
</div>
<% end %>
<p><%= item.title %></p>
</a>
<p>
<b><a style="width:100%;" href="/channel/<%= item.ucid %>"><%= item.author %></a></b>
</p>
<% when PlaylistVideo %>
<a style="width:100%;" href="/watch?v=<%= item.id %>&list=<%= item.playlists[0] %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %>
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
<p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
</div>
<% end %>
<p><%= item.title %></p>
</a>
<% if item.responds_to?(:live_now) && item.live_now %>
<p>LIVE</p>
<% end %>
<p>
<b><a style="width:100%;" href="/channel/<%= item.ucid %>"><%= item.author %></a></b>
</p>
<% if Time.now - item.published > 1.minute %>
<h5>Shared <%= recode_date(item.published) %> ago</h5>
<% end %>
<% else %>
<a style="width:100%;" href="/watch?v=<%= item.id %>">
<% if env.get?("user") && env.get("user").as(User).preferences.thin_mode %>
<% else %>
<div class="thumbnail">
<img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
<p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
</div>
<% end %> <% end %>
<p><%= item.title %></p> <p><%= item.title %></p>
</a> </a>

View File

@ -13,7 +13,7 @@
</div> </div>
<div class="pure-control-group"> <div class="pure-control-group">
<label for="import_youtube">Import <a target="_blank" <label for="import_youtube">Import <a rel="noopener" target="_blank"
href="https://support.google.com/youtube/answer/6224202?hl=en-GB">YouTube subscriptions</a></label> href="https://support.google.com/youtube/answer/6224202?hl=en-GB">YouTube subscriptions</a></label>
<input type="file" id="import_youtube" name="import_youtube"> <input type="file" id="import_youtube" name="import_youtube">
</div> </div>

View File

@ -0,0 +1,22 @@
<% content_for "header" do %>
<title><%= mix.title %> - Invidious</title>
<% end %>
<div class="pure-g h-box">
<div class="pure-u-2-3">
<h3><%= mix.title %></h3>
</div>
<div class="pure-u-1-3" style="text-align:right;">
<h3>
<a href="/feed/playlist/<%= mix.id %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<% mix.videos.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
</div>
<% end %>

View File

@ -35,7 +35,7 @@
<div class="pure-g h-box"> <div class="pure-g h-box">
<div class="pure-u-1 pure-u-md-1-5"> <div class="pure-u-1 pure-u-md-1-5">
<% if page >= 2 %> <% if page >= 2 %>
<a href="/playlist?list=<%= playlist.id %>&page=<%= page - 1 %>">Next page</a> <a href="/playlist?list=<%= playlist.id %>&page=<%= page - 1 %>">Previous page</a>
<% end %> <% end %>
</div> </div>
<div class="pure-u-1 pure-u-md-3-5"></div> <div class="pure-u-1 pure-u-md-3-5"></div>

View File

@ -13,13 +13,13 @@
<div class="pure-g h-box"> <div class="pure-g h-box">
<div class="pure-u-1 pure-u-md-1-5"> <div class="pure-u-1 pure-u-md-1-5">
<% if page >= 2 %> <% if page >= 2 %>
<a href="/search?q=<%= query %>&page=<%= page - 1 %>">Previous page</a> <a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page - 1 %>">Previous page</a>
<% end %> <% end %>
</div> </div>
<div class="pure-u-1 pure-u-md-3-5"></div> <div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5"> <div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if count >= 20 %> <% if count >= 20 %>
<a href="/search?q=<%= query %>&page=<%= page + 1 %>">Next page</a> <a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page + 1 %>">Next page</a>
<% end %> <% end %>
</div> </div>
</div> </div>

View File

@ -28,7 +28,7 @@
<div class="pure-u-1 pure-u-md-12-24 searchbar"> <div class="pure-u-1 pure-u-md-12-24 searchbar">
<form class="pure-form" action="/search" method="get"> <form class="pure-form" action="/search" method="get">
<fieldset> <fieldset>
<input type="search" style="width:100%;" name="q" placeholder="search" value="<%= env.params.query["q"]? || env.get? "search" %>"> <input type="search" style="width:100%;" name="q" placeholder="search" value="<%= env.params.query["q"]?.try {|x| HTML.escape(x)} || env.get?("search").try {|x| HTML.escape(x.as(String)) } %>">
</fieldset> </fieldset>
</form> </form>
</div> </div>

View File

@ -22,6 +22,7 @@
<meta name="twitter:player" content="<%= host_url %>/embed/<%= video.id %>"> <meta name="twitter:player" content="<%= host_url %>/embed/<%= video.id %>">
<meta name="twitter:player:width" content="1280"> <meta name="twitter:player:width" content="1280">
<meta name="twitter:player:height" content="720"> <meta name="twitter:player:height" content="720">
<script src="/js/watch.js"></script>
<%= rendered "components/player_sources" %> <%= rendered "components/player_sources" %>
<title><%= HTML.escape(video.title) %> - Invidious</title> <title><%= HTML.escape(video.title) %> - Invidious</title>
<% end %> <% end %>
@ -55,7 +56,13 @@
<p><i class="icon ion-ios-eye"></i> <%= number_with_separator(video.views) %></p> <p><i class="icon ion-ios-eye"></i> <%= number_with_separator(video.views) %></p>
<p><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p> <p><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p>
<p><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p> <p><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p>
<p id="Genre">Genre: <a href="<%= video.genre_url %>"><%= video.genre %></a></p> <p id="Genre">Genre:
<% if video.genre_url.empty? %>
<%= video.genre %>
<% else %>
<a href="<%= video.genre_url %>"><%= video.genre %></a>
<% end %>
</p>
<% if !video.license.empty? %> <% if !video.license.empty? %>
<p id="License">License: <%= video.license %></p> <p id="License">License: <%= video.license %></p>
<% end %> <% end %>
@ -85,20 +92,23 @@
<% if user %> <% if user %>
<% if subscriptions.includes? video.ucid %> <% if subscriptions.includes? video.ucid %>
<p> <p>
<a href="/subscription_ajax?action_remove_subscriptions=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>"> <a id="subscribe" onclick="unsubscribe()" class="pure-button pure-button-primary"
<b>Unsubscribe from <%= video.author %></b> href="/subscription_ajax?action_remove_subscriptions=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>">
<b>Unsubscribe | <%= video.sub_count_text %></b>
</a> </a>
</p> </p>
<% else %> <% else %>
<p> <p>
<a href="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>"> <a id="subscribe" onclick="subscribe()" class="pure-button pure-button-primary"
<b>Subscribe to <%= video.author %></b> href="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>">
<b>Subscribe | <%= video.sub_count_text %></b>
</a> </a>
</p> </p>
<% end %> <% end %>
<% else %> <% else %>
<p> <p>
<a href="/login?referer=<%= env.get("current_page") %>"> <a id="subscribe" class="pure-button pure-button-primary"
href="/login?referer=<%= env.get("current_page") %>">
<b>Login to subscribe to <%= video.author %></b> <b>Login to subscribe to <%= video.author %></b>
</a> </a>
</p> </p>
@ -111,11 +121,15 @@
</div> </div>
<hr> <hr>
<div id="comments"> <div id="comments">
<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>
</div> </div>
</div> </div>
</div> </div>
<div class="pure-u-1 pure-u-md-1-5"> <div class="pure-u-1 pure-u-md-1-5">
<% if plid %>
<div id="playlist" class="h-box">
</div>
<% end %>
<% if !preferences || preferences && preferences.related_videos %> <% if !preferences || preferences && preferences.related_videos %>
<div class="h-box"> <div class="h-box">
<% rvs.each do |rv| %> <% rvs.each do |rv| %>
@ -123,7 +137,10 @@
<a href="/watch?v=<%= rv["id"] %>"> <a href="/watch?v=<%= rv["id"] %>">
<% if preferences && preferences.thin_mode %> <% if preferences && preferences.thin_mode %>
<% else %> <% else %>
<img style="width:100%;" src="/vi/<%= rv["id"] %>/mqdefault.jpg"> <div class="thumbnail">
<img class="thumbnail" src="/vi/<%= rv["id"] %>/mqdefault.jpg">
<p class="length"><%= recode_length_seconds(rv["length_seconds"]?.try &.to_i? || 0) %></p>
</div>
<% end %> <% end %>
<p style="width:100%"><%= rv["title"] %></p> <p style="width:100%"><%= rv["title"] %></p>
<p> <p>
@ -138,38 +155,20 @@
</div> </div>
<script> <script>
function toggle(target) { function number_with_separator(val) {
body = target.parentNode.parentNode.children[1]; while (/(\d+)(\d{3})/.test(val.toString())) {
if (body.style.display === null || body.style.display === "") { val = val.toString().replace(/(\d+)(\d{3})/, "$1" + "," + "$2");
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
} }
return val;
} }
function toggle_comments(target) { subscribe_button = document.getElementById("subscribe");
body = target.parentNode.parentNode.parentNode.children[1]; if (subscribe_button.getAttribute('onclick')) {
if (body.style.display === null || body.style.display === "") { subscribe_button["href"] = "javascript:void(0);";
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
} }
function get_youtube_replies(target) { function subscribe() {
var continuation = target.getAttribute("data-continuation"); var url = "/subscription_ajax?action_create_subscription_to_channel=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>";
var body = target.parentNode.parentNode;
var fallback = body.innerHTML;
body.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url =
"/api/v1/comments/<%= video.id %>?format=html&continuation=" + continuation;
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.responseType = "json"; xhr.responseType = "json";
xhr.timeout = 20000; xhr.timeout = 20000;
@ -179,21 +178,101 @@ function get_youtube_replies(target) {
xhr.onreadystatechange = function() { xhr.onreadystatechange = function() {
if (xhr.readyState == 4) { if (xhr.readyState == 4) {
if (xhr.status == 200) { if (xhr.status == 200) {
body.innerHTML = xhr.response.contentHtml; subscribe_button = document.getElementById("subscribe");
subscribe_button.onclick = unsubscribe;
subscribe_button.innerHTML = '<b>Unsubscribe | <%= video.sub_count_text %></b>'
}
}
}
}
function unsubscribe() {
var url = "/subscription_ajax?action_remove_subscriptions=1&c=<%= video.ucid %>&referer=<%= env.get("current_page") %>";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
subscribe_button = document.getElementById("subscribe");
subscribe_button.onclick = subscribe;
subscribe_button.innerHTML = '<b>Subscribe | <%= video.sub_count_text %></b>'
}
}
}
}
<% if plid %>
function get_playlist() {
playlist = document.getElementById("playlist");
playlist.innerHTML = ' \
<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3> \
<hr>'
var plid = "<%= plid %>"
if (plid.startsWith("RD")) {
var plid_url = "/api/v1/mixes/<%= plid %>?continuation=<%= video.id %>&format=html";
} else { } else {
body.innerHTML = fallback; var plid_url = "/api/v1/playlists/<%= plid %>?continuation=<%= video.id %>&format=html";
}
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", plid_url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
playlist.innerHTML = xhr.response.playlistHtml;
if (xhr.response.nextVideo) {
player.on('ended', function() {
window.location.replace("/watch?v="
+ xhr.response.nextVideo
+ "&list=<%= plid %>"
<% if params[:listen] %>
+ "&listen=1"
<% end %>
<% if params[:autoplay] %>
+ "&autoplay=1"
<% end %>
<% if params[:speed] %>
+ "&speed=<%= params[:speed] %>"
<% end %>
);
});
}
} else {
playlist.innerHTML = "";
} }
} }
}; };
xhr.ontimeout = function() { xhr.ontimeout = function() {
console.log("Pulling comments timed out."); console.log("Pulling playlist timed out.");
body.innerHTML = fallback; comments = document.getElementById("playlist");
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3><hr>';
get_playlist();
}; };
} }
get_playlist();
<% end %>
function get_reddit_comments() { function get_reddit_comments() {
comments = document.getElementById("comments");
var fallback = comments.innerHTML;
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url = "/api/v1/comments/<%= video.id %>?source=reddit&format=html"; var url = "/api/v1/comments/<%= video.id %>?source=reddit&format=html";
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.responseType = "json"; xhr.responseType = "json";
@ -202,17 +281,23 @@ function get_reddit_comments() {
xhr.send(); xhr.send();
xhr.onreadystatechange = function() { xhr.onreadystatechange = function() {
if (xhr.readyState == 4) if (xhr.readyState == 4) {
if (xhr.status == 200) { if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = ' \ comments.innerHTML = ' \
<div> \ <div> \
<h3> \ <h3> \
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a> \ <a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a> \
{title} \ {title} \
</h3> \ </h3> \
<p> \
<b> \ <b> \
<a target="_blank" href="https://reddit.com{permalink}">View more comments on Reddit</a> \ <a href="javascript:void(0)" onclick="swap_comments(\'youtube\')"> \
View YouTube comments \
</a> \
</b> \
</p> \
<b> \
<a rel="noopener" target="_blank" href="https://reddit.com{permalink}">View more comments on Reddit</a> \
</b> \ </b> \
</div> \ </div> \
<div>{contentHtml}</div> \ <div>{contentHtml}</div> \
@ -225,10 +310,10 @@ function get_reddit_comments() {
<% if preferences && preferences.comments[1] == "youtube" %> <% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments(); get_youtube_comments();
<% else %> <% else %>
comments = document.getElementById("comments"); comments.innerHTML = fallback;
comments.innerHTML = "";
<% end %> <% end %>
} }
}
}; };
xhr.ontimeout = function() { xhr.ontimeout = function() {
@ -239,6 +324,11 @@ function get_reddit_comments() {
} }
function get_youtube_comments() { function get_youtube_comments() {
comments = document.getElementById("comments");
var fallback = comments.innerHTML;
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url = "/api/v1/comments/<%= video.id %>?format=html"; var url = "/api/v1/comments/<%= video.id %>?format=html";
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.responseType = "json"; xhr.responseType = "json";
@ -247,9 +337,8 @@ function get_youtube_comments() {
xhr.send(); xhr.send();
xhr.onreadystatechange = function() { xhr.onreadystatechange = function() {
if (xhr.readyState == 4) if (xhr.readyState == 4) {
if (xhr.status == 200) { if (xhr.status == 200) {
comments = document.getElementById("comments");
if (xhr.response.commentCount > 0) { if (xhr.response.commentCount > 0) {
comments.innerHTML = ' \ comments.innerHTML = ' \
<div> \ <div> \
@ -257,11 +346,16 @@ function get_youtube_comments() {
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a> \ <a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a> \
View {commentCount} comments \ View {commentCount} comments \
</h3> \ </h3> \
<b> \
<a href="javascript:void(0)" onclick="swap_comments(\'reddit\')"> \
View Reddit comments \
</a> \
</b> \
</div> \ </div> \
<div>{contentHtml}</div> \ <div>{contentHtml}</div> \
<hr>'.supplant({ <hr>'.supplant({
contentHtml: xhr.response.contentHtml, contentHtml: xhr.response.contentHtml,
commentCount: commaSeparateNumber(xhr.response.commentCount) commentCount: number_with_separator(xhr.response.commentCount)
}); });
} else { } else {
comments.innerHTML = ""; comments.innerHTML = "";
@ -270,36 +364,66 @@ function get_youtube_comments() {
<% if preferences && preferences.comments[1] == "youtube" %> <% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments(); get_youtube_comments();
<% else %> <% else %>
comments = document.getElementById("comments");
comments.innerHTML = ""; comments.innerHTML = "";
<% end %> <% end %>
} }
}
}; };
xhr.ontimeout = function() { xhr.ontimeout = function() {
console.log("Pulling comments timed out."); console.log("Pulling comments timed out.");
comments = document.getElementById("comments");
comments.innerHTML = comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>'; '<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
get_youtube_comments(); get_youtube_comments();
}; };
} }
function commaSeparateNumber(val){ function get_youtube_replies(target, load_more) {
while (/(\d+)(\d{3})/.test(val.toString())){ var continuation = target.getAttribute('data-continuation');
val = val.toString().replace(/(\d+)(\d{3})/, '$1'+','+'$2');
}
return val;
}
String.prototype.supplant = function(o) { var body = target.parentNode.parentNode;
return this.replace(/{([^{}]*)}/g, function(a, b) { var fallback = body.innerHTML;
var r = o[b]; body.innerHTML =
return typeof r === "string" || typeof r === "number" ? r : a; '<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url = '/api/v1/comments/<%= video.id %>?format=html&continuation=' +
continuation;
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.open('GET', url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
if (load_more) {
body = body.parentNode.parentNode;
body.removeChild(body.lastElementChild);
body.innerHTML += xhr.response.contentHtml;
} else {
body.innerHTML = ' \
<p><a href="javascript:void(0)" \
onclick="hide_youtube_replies(this)">Hide replies \
</a></p> \
<div>{contentHtml}</div>'.supplant({
contentHtml: xhr.response.contentHtml,
}); });
}
} else {
body.innerHTML = fallback;
}
}
}; };
xhr.ontimeout = function() {
console.log('Pulling comments timed out.');
body.innerHTML = fallback;
};
}
<% if preferences %> <% if preferences %>
<% if preferences.comments[0] == "youtube" %> <% if preferences.comments[0] == "youtube" %>
get_youtube_comments(); get_youtube_comments();