Compare commits

...

73 Commits
0.2.0 ... 0.5.0

Author SHA1 Message Date
89bda1d3db Remove migration points 2018-09-09 21:58:22 -05:00
e0ee1c3d79 Shrink size of template gutters 2018-09-09 14:50:24 -05:00
5b2c228bb6 Add 'license' 2018-09-09 14:47:26 -05:00
ffab3ee79f Shrink size between video requests 2018-09-09 14:41:29 -05:00
dc6cc028c5 Remove migration point 2018-09-09 14:34:16 -05:00
c1f17f2f82 Show quality selector even if only one source 2018-09-09 14:23:37 -05:00
1c8bd671d8 Fix link redirect for YouTube comments 2018-09-09 09:18:31 -05:00
133b72f9cf Add support for genre channels that don't end with " - Topic" 2018-09-09 08:53:04 -05:00
8c45694ce5 Escape comment text 2018-09-09 07:40:12 -05:00
bd820b9b48 Update videojs-share.js 2018-09-07 15:55:11 -05:00
47e94fedc6 Fix signature extraction 2018-09-07 15:52:46 -05:00
aff2083529 Fix missing 'end' 2018-09-06 18:18:36 -05:00
1eae76fc15 Add fix for empty descriptions 2018-09-06 16:50:12 -05:00
cf63c825d4 Add fix for shortened youtu.be links in comments 2018-09-06 16:45:15 -05:00
446d8569a4 Bump version to match tag 2018-09-06 10:54:12 -05:00
454b1662b7 Add format=json for reddit comments 2018-09-06 10:19:28 -05:00
3ec684ae71 Host assets locally 2018-09-06 09:59:17 -05:00
b17d3d1e51 Bump number of videos in channel resources to 60 2018-09-06 08:43:22 -05:00
d81a803618 Add /user/:user/videos 2018-09-05 23:12:11 -05:00
e6d2166bac Add X-XSS-Protection and X-Content-Type-Options 2018-09-05 21:51:40 -05:00
e590d39aa9 Revert "Add header check for CSRF"
This reverts commit a749ac73ac.
2018-09-05 21:45:14 -05:00
4f91854bd3 Fix typo 2018-09-05 21:10:32 -05:00
29a21860ae Strip leading slashes from referers 2018-09-05 21:07:19 -05:00
96234e509f Add X-Frame-Options, X-XSS-Protection, and X-Content-Type-Options 2018-09-05 21:06:30 -05:00
a749ac73ac Add header check for CSRF 2018-09-05 20:32:01 -05:00
62f023c50f Add 'https_only' default 2018-09-05 20:31:08 -05:00
29dc114f7a Bump supported Crystal version 2018-09-05 20:30:44 -05:00
023066b452 Revert "Remove 'codecs' from source types"
This reverts commit 93e12d94fc.
2018-09-05 10:49:40 -05:00
93e12d94fc Remove 'codecs' from source types 2018-09-05 10:38:01 -05:00
044a57ef34 Fix video count for channels 2018-09-04 23:01:46 -05:00
bc49c7d181 Add author info to API endpoints 2018-09-04 21:35:25 -05:00
5632e58636 Add support for genre channels 2018-09-04 21:04:40 -05:00
e1bf7fa6cc Add descriptionHtml to playlists 2018-09-04 19:27:10 -05:00
40028e1462 Update SQL and remove migration points 2018-09-04 09:57:40 -05:00
53732cdcab Add genre URLs 2018-09-04 09:50:19 -05:00
2ac89d5e00 Update project synopsis 2018-09-04 09:22:10 -05:00
98d71ca8e7 Add support for /c/ URLs 2018-09-04 09:13:58 -05:00
0f2f273335 Don't leak referers 2018-09-04 09:01:43 -05:00
000cfd4834 Don't show comments when commentCount is 0 2018-09-04 08:52:39 -05:00
25c3ee034e Minor refactor 2018-09-04 08:52:30 -05:00
89d3587861 Fix typo 2018-09-03 22:20:20 -05:00
0d8f036bf1 Replace YouTube links 2018-09-03 22:15:47 -05:00
81c520e0dd Add info to README 2018-09-03 21:42:49 -05:00
c0bda13965 Fix view_count_text 2018-08-31 22:53:41 -05:00
3b1df75061 Merge pull request #143 from dimqua/patch-1
Change the color of progressBar marker
2018-08-31 18:20:30 -05:00
eda5beaed5 Change the color of progressBar marker 2018-08-31 16:49:02 +03:00
4022670cb1 Fix typo in video params 2018-08-30 21:04:41 -05:00
7b135a6d0c Add commentCount for videos with no comments 2018-08-30 21:03:22 -05:00
bdaa8a06fd Fix typo 2018-08-30 20:25:43 -05:00
b3f9059452 Add comment formatting 2018-08-30 20:06:08 -05:00
917d220623 Fix search filters 2018-08-30 17:42:30 -05:00
ed8ddbc07d Add seperator when notifications > 0 2018-08-30 16:52:29 -05:00
cb01b50fbb Add option to hide related videos 2018-08-30 16:49:38 -05:00
6b3c9d23d0 Fix referer on 404 2018-08-30 08:14:59 -05:00
3839013a37 Use '/video' page for channel endpoint 2018-08-28 20:29:08 -05:00
9d5dddab29 Fix signature extraction 2018-08-28 09:51:59 -05:00
45fa148380 Don't add playlist id for channel videos 2018-08-27 18:53:34 -05:00
2ba0063dc0 Add search filters 2018-08-27 15:23:25 -05:00
b57176d7ef Fix notification count in subscription feed 2018-08-27 13:46:50 -05:00
0dbef6ab9f Fix typo in preferred_captions 2018-08-26 15:00:19 -05:00
8fc4dcfdea Use username for /data_control 2018-08-25 21:49:18 -05:00
6c98513153 Add referer to /data_control 2018-08-25 21:48:20 -05:00
c3d8ca68b3 Add code to calculate video rating 2018-08-25 21:34:11 -05:00
a37692cce4 Fix 'to_json' for comment array 2018-08-25 21:33:53 -05:00
a1ad561b98 Fix /clear_watch_history 2018-08-25 21:33:33 -05:00
7fd0f93d02 Add support for preferences as query params 2018-08-25 20:05:51 -05:00
23aaf7f1b7 Add comments fallback 2018-08-25 18:33:15 -05:00
41a04e7c67 Clean up /videoplayback 2018-08-25 17:24:07 -05:00
77b12b6249 Only show next page when there are more results 2018-08-25 17:18:43 -05:00
78fcf579a7 Add Liberapay 2018-08-25 15:43:39 -05:00
9ae3bf216e Update signature extraction 2018-08-24 07:17:16 -05:00
0e7c56687b Add error message for comment timeouts 2018-08-23 16:55:26 -05:00
01a80995d3 Add fix for channel endpoint where channel has no subscribers 2018-08-22 11:06:31 -05:00
46 changed files with 3152 additions and 563 deletions

View File

@ -1,7 +1,27 @@
# Invidious
## Invidious is what YouTube should be
## Invidious is an alternative front-end to YouTube
- Audio-only (and no need to keep window open on mobile)
- [Open-source](https://github.com/omarroth/invidious) (AGPLv3 licensed)
- No ads
- No need to create a Google account to save subscriptions
- Lightweight (homepage is ~4 KB compressed)
- Tools for managing subscriptions:
- Only show unseen videos
- Only show latest (or latest unseen) video from each channel
- Delivers notifications from all subscribed channels
- Automatically redirect homepage to feed
- Import subscriptions from YouTube
- Dark mode
- Embed support
- Set default player options (speed, quality, autoplay, loop)
- Does not require JS to play videos
- Support for Reddit comments in place of YT comments
- Import/Export subscriptions, watch history, preference
- Does not use any of the official YouTube APIs
Liberapay: https://liberapay.com/omarroth
Patreon: https://patreon.com/omarroth
BTC: 356DpZyMXu6rYd55Yqzjs29n79kGKWcYrY
BCH: qq4ptclkzej5eza6a50et5ggc58hxsq5aylqut2npk

View File

@ -171,6 +171,11 @@ div {
background-color: rgba(0, 182, 240, 1);
}
/* ProgressBar marker */
.vjs-marker {
background-color: rgba(255, 255, 255, 1);
}
/* Big "Play" Button */
.video-js .vjs-big-play-button {
background-color: rgba(35, 35, 35, 0.5);

7
assets/css/grids-responsive-min.css vendored Normal file

File diff suppressed because one or more lines are too long

11
assets/css/ionicons.min.css vendored Normal file

File diff suppressed because one or more lines are too long

11
assets/css/pure-min.css vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
.vjs-quality-selector .vjs-menu-button{margin:0;padding:0;height:100%;width:100%}.vjs-quality-selector .vjs-icon-placeholder{font-family:'VideoJS';font-weight:normal;font-style:normal}.vjs-quality-selector .vjs-icon-placeholder:before{content:'\f110'}.vjs-quality-changing .vjs-big-play-button{display:none}.vjs-quality-changing .vjs-control-bar{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;visibility:visible;opacity:1}

1
assets/css/video-js.min.css vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,7 @@
/**
* videojs-share
* @version 1.1.0
* @copyright 2018 Mikhail Khazov <mkhazov.work@gmail.com>
* @license MIT
*/
.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-modal-dialog-content{display:flex;align-items:center;padding:0;background-image:linear-gradient(to bottom, rgba(0,0,0,0.77), rgba(0,0,0,0.75))}.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button{position:absolute;right:0;top:5px;width:30px;height:30px;color:#fff;cursor:pointer;opacity:0.9;transition:opacity 0.25s ease-out}.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button:before{content:'×';font-size:20px;line-height:15px}.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button:hover{opacity:1}.video-js .vjs-share{display:flex;flex-direction:column;justify-content:space-around;align-items:center;width:100%;height:100%;max-height:400px}.video-js .vjs-share__top,.video-js .vjs-share__middle,.video-js .vjs-share__bottom{display:flex}.video-js .vjs-share__top,.video-js .vjs-share__middle{flex-direction:column;justify-content:space-between}.video-js .vjs-share__middle{padding:0 25px}.video-js .vjs-share__title{align-self:center;font-size:22px;color:#fff}.video-js .vjs-share__subtitle{width:100%;margin:0 auto 12px;font-size:16px;color:#fff;opacity:0.7}.video-js .vjs-share__short-link-wrapper{position:relative;display:block;width:100%;height:40px;margin:0 auto;margin-bottom:15px;border:0;color:rgba(255,255,255,0.65);background-color:#363636;outline:none;overflow:hidden;flex-shrink:0}.video-js .vjs-share__short-link{display:block;width:100%;height:100%;padding:0 40px 0 15px;border:0;color:rgba(255,255,255,0.65);background-color:#363636;outline:none}.video-js .vjs-share__btn{position:absolute;right:0;bottom:0;height:40px;width:40px;display:flex;align-items:center;padding:0 11px;border:0;color:#fff;background-color:#2e2e2e;background-size:18px 19px;background-position:center;background-repeat:no-repeat;cursor:pointer;outline:none;transition:width 0.3s ease-out, padding 0.3s ease-out}.video-js .vjs-share__btn svg{flex-shrink:0}.video-js .vjs-share__btn span{position:relative;padding-left:10px;opacity:0;transition:opacity 0.3s ease-out}.video-js .vjs-share__btn:hover{justify-content:center;width:100%;padding:0 40px;background-image:none}.video-js .vjs-share__btn:hover span{opacity:1}.video-js .vjs-share__socials{display:flex;flex-wrap:wrap;justify-content:center;align-content:flex-start;transition:width 0.3s ease-out, height 0.3s ease-out}.video-js .vjs-share__social{display:flex;justify-content:center;align-items:center;flex-shrink:0;width:32px;height:32px;margin-right:6px;margin-bottom:6px;cursor:pointer;font-size:8px;transition:transform 0.3s ease-out, filter 0.2s ease-out;border:none;outline:none}.video-js .vjs-share__social:hover{filter:brightness(115%)}.video-js .vjs-share__social svg{width:100%;max-height:24px}.video-js .vjs-share__social_vk{background-color:#5d7294}.video-js .vjs-share__social_ok{background-color:#ed7c20}.video-js .vjs-share__social_mail{background-color:#134785}.video-js .vjs-share__social_tw{background-color:#76aaeb}.video-js .vjs-share__social_reddit{background-color:#ff4500}.video-js .vjs-share__social_fbFeed{background-color:#475995}.video-js .vjs-share__social_messenger{background-color:#0084ff}.video-js .vjs-share__social_gp{background-color:#d53f35}.video-js .vjs-share__social_linkedin{background-color:#0077b5}.video-js .vjs-share__social_viber{background-color:#766db5}.video-js .vjs-share__social_telegram{background-color:#4bb0e2}.video-js .vjs-share__social_whatsapp{background-color:#78c870}.video-js .vjs-share__bottom{justify-content:center}@media (max-height: 220px){.video-js .vjs-share .hidden-xs{display:none}}@media (max-height: 350px){.video-js .vjs-share .hidden-sm{display:none}}@media (min-height: 400px){.video-js .vjs-share__title{margin-bottom:15px}.video-js .vjs-share__short-link-wrapper{margin-bottom:30px}}@media (min-width: 320px){.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button{right:5px;top:10px}}@media (min-width: 660px){.video-js.vjs-videojs-share_open .vjs-modal-dialog .vjs-close-button{right:20px;top:20px}.video-js .vjs-share__social{width:40px;height:40px}}

1
assets/css/videojs.markers.min.css vendored Normal file
View File

@ -0,0 +1 @@
.vjs-marker{position:absolute;left:0;bottom:0;opacity:1;height:100%;transition:opacity .2s ease;-webkit-transition:opacity .2s ease;-moz-transition:opacity .2s ease;z-index:100}.vjs-marker:hover{cursor:pointer;-webkit-transform:scale(1.3,1.3);-moz-transform:scale(1.3,1.3);-o-transform:scale(1.3,1.3);-ms-transform:scale(1.3,1.3);transform:scale(1.3,1.3)}.vjs-tip{visibility:hidden;display:block;opacity:.8;padding:5px;font-size:10px;position:absolute;bottom:14px;z-index:100000}.vjs-tip .vjs-tip-arrow{background:url(data:image/gif;base64,R0lGODlhCQAJAIABAAAAAAAAACH5BAEAAAEALAAAAAAJAAkAAAIRjAOnwIrcDJxvwkplPtchVQAAOw==) no-repeat top left;bottom:0;left:50%;margin-left:-4px;background-position:bottom left;position:absolute;width:9px;height:5px}.vjs-tip .vjs-tip-inner{border-radius:3px;-moz-border-radius:3px;-webkit-border-radius:3px;padding:5px 8px 4px 8px;background-color:#000;color:#fff;max-width:200px;text-align:center}.vjs-break-overlay{visibility:hidden;position:absolute;z-index:100000;top:0}.vjs-break-overlay .vjs-break-overlay-text{padding:9px;text-align:center}

BIN
assets/fonts/ionicons.eot Normal file

Binary file not shown.

2090
assets/fonts/ionicons.svg Normal file

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 305 KiB

BIN
assets/fonts/ionicons.ttf Normal file

Binary file not shown.

BIN
assets/fonts/ionicons.woff Normal file

Binary file not shown.

BIN
assets/fonts/ionicons.woff2 Normal file

Binary file not shown.

File diff suppressed because one or more lines are too long

7
assets/js/video.min.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
/*! @name videojs-contrib-quality-levels @version 2.0.7 @license Apache-2.0 */
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t(require("video.js"),require("global/document")):"function"==typeof define&&define.amd?define(["video.js","global/document"],t):e.videojsContribQualityLevels=t(e.videojs,e.document)}(this,function(e,t){"use strict";e=e&&e.hasOwnProperty("default")?e.default:e,t=t&&t.hasOwnProperty("default")?t.default:t;var n=function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")},r=function(e,t){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!t||"object"!=typeof t&&"function"!=typeof t?e:t},i=function(i){function o(){n(this,o);var l=r(this,i.call(this)),s=l;if(e.browser.IS_IE8)for(var u in s=t.createElement("custom"),o.prototype)"constructor"!==u&&(s[u]=o.prototype[u]);return s.levels_=[],s.selectedIndex_=-1,Object.defineProperty(s,"selectedIndex",{get:function(){return s.selectedIndex_}}),Object.defineProperty(s,"length",{get:function(){return s.levels_.length}}),r(l,s)}return function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function, not "+typeof t);e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),t&&(Object.setPrototypeOf?Object.setPrototypeOf(e,t):e.__proto__=t)}(o,i),o.prototype.addQualityLevel=function(r){var i=this.getQualityLevelById(r.id);if(i)return i;var o=this.levels_.length;return i=new function r(i){n(this,r);var o=this;if(e.browser.IS_IE8)for(var l in o=t.createElement("custom"),r.prototype)"constructor"!==l&&(o[l]=r.prototype[l]);return o.id=i.id,o.label=o.id,o.width=i.width,o.height=i.height,o.bitrate=i.bandwidth,o.enabled_=i.enabled,Object.defineProperty(o,"enabled",{get:function(){return o.enabled_()},set:function(e){o.enabled_(e)}}),o}(r),""+o in this||Object.defineProperty(this,o,{get:function(){return this.levels_[o]}}),this.levels_.push(i),this.trigger({qualityLevel:i,type:"addqualitylevel"}),i},o.prototype.removeQualityLevel=function(e){for(var t=null,n=0,r=this.length;n<r;n++)if(this[n]===e){t=this.levels_.splice(n,1)[0],this.selectedIndex_===n?this.selectedIndex_=-1:this.selectedIndex_>n&&this.selectedIndex_--;break}return t&&this.trigger({qualityLevel:e,type:"removequalitylevel"}),t},o.prototype.getQualityLevelById=function(e){for(var t=0,n=this.length;t<n;t++){var r=this[t];if(r.id===e)return r}return null},o.prototype.dispose=function(){this.selectedIndex_=-1,this.levels_.length=0},o}(e.EventTarget);for(var o in i.prototype.allowedEvents_={change:"change",addqualitylevel:"addqualitylevel",removequalitylevel:"removequalitylevel"},i.prototype.allowedEvents_)i.prototype["on"+o]=null;var l=function(t){return n=this,e.mergeOptions({},t),r=n.qualityLevels,o=new i,n.on("dispose",function e(){o.dispose(),n.qualityLevels=r,n.off("dispose",e)}),n.qualityLevels=function(){return o},n.qualityLevels.VERSION="__VERSION__",o;var n,r,o};return(e.registerPlugin||e.plugin)("qualityLevels",l),l.VERSION="__VERSION__",l});

14
assets/js/videojs-http-streaming.min.js vendored Normal file

File diff suppressed because one or more lines are too long

4
assets/js/videojs-markers.min.js vendored Normal file

File diff suppressed because one or more lines are too long

7
assets/js/videojs-share.min.js vendored Normal file

File diff suppressed because one or more lines are too long

2
assets/js/videojs.hotkeys.min.js vendored Normal file
View File

@ -0,0 +1,2 @@
/* videojs-hotkeys v0.2.22 - https://github.com/ctd1500/videojs-hotkeys */
!function(e,t){"undefined"!=typeof window&&window.videojs?t(window.videojs):"function"==typeof define&&define.amd?define("videojs-hotkeys",["video.js"],function(e){return t(e.default||e)}):"undefined"!=typeof module&&module.exports&&(module.exports=t(require("video.js")))}(0,function(s){"use strict";"undefined"!=typeof window&&(window.videojs_hotkeys={version:"0.2.22"});(s.registerPlugin||s.plugin)("hotkeys",function(m){var y=this,v=y.el(),f=document,e={volumeStep:.1,seekStep:5,enableMute:!0,enableVolumeScroll:!0,enableHoverScroll:!0,enableFullscreen:!0,enableNumbers:!0,enableJogStyle:!1,alwaysCaptureHotkeys:!1,enableModifiersForNumbers:!0,enableInactiveFocus:!0,skipInitialFocus:!1,playPauseKey:function(e){return 32===e.which||179===e.which},rewindKey:function(e){return 37===e.which||177===e.which},forwardKey:function(e){return 39===e.which||176===e.which},volumeUpKey:function(e){return 38===e.which},volumeDownKey:function(e){return 40===e.which},muteKey:function(e){return 77===e.which},fullscreenKey:function(e){return 70===e.which},customKeys:{}},t=s.mergeOptions||s.util.mergeOptions,d=(m=t(e,m||{})).volumeStep,n=m.seekStep,p=m.enableMute,r=m.enableVolumeScroll,o=m.enableHoverScroll,b=m.enableFullscreen,h=m.enableNumbers,w=m.enableJogStyle,k=m.alwaysCaptureHotkeys,S=m.enableModifiersForNumbers,u=m.enableInactiveFocus,l=m.skipInitialFocus;v.hasAttribute("tabIndex")||v.setAttribute("tabIndex","-1"),v.style.outline="none",!k&&y.autoplay()||l||y.one("play",function(){v.focus()}),u&&y.on("userinactive",function(){var n=function(){clearTimeout(e)},e=setTimeout(function(){y.off("useractive",n);var e=f.activeElement,t=v.querySelector(".vjs-control-bar");e&&e.parentElement==t&&v.focus()},10);y.one("useractive",n)}),y.on("play",function(){var e=v.querySelector(".iframeblocker");e&&""===e.style.display&&(e.style.display="block",e.style.bottom="39px")});var i=!1,c=v.querySelector(".vjs-volume-menu-button")||v.querySelector(".vjs-volume-panel");c.onmouseover=function(){i=!0},c.onmouseout=function(){i=!1};var a=function(e){if(o)var t=0;else t=f.activeElement;if(y.controls()&&(k||t==v||t==v.querySelector(".vjs-tech")||t==v.querySelector(".iframeblocker")||t==v.querySelector(".vjs-control-bar")||i)&&r){e=window.event||e;var n=Math.max(-1,Math.min(1,e.wheelDelta||-e.detail));e.preventDefault(),1==n?y.volume(y.volume()+d):-1==n&&y.volume(y.volume()-d)}},K=function(e,t){return m.playPauseKey(e,t)?1:m.rewindKey(e,t)?2:m.forwardKey(e,t)?3:m.volumeUpKey(e,t)?4:m.volumeDownKey(e,t)?5:m.muteKey(e,t)?6:m.fullscreenKey(e,t)?7:void 0};function q(e){return"function"==typeof n?n(e):n}return y.on("keydown",function(e){var t,n,r=e.which,o=e.preventDefault,u=y.duration();if(y.controls()){var l=f.activeElement;if(k||l==v||l==v.querySelector(".vjs-tech")||l==v.querySelector(".vjs-control-bar")||l==v.querySelector(".iframeblocker"))switch(K(e,y)){case 1:o(),k&&e.stopPropagation(),y.paused()?y.play():y.pause();break;case 2:t=!y.paused(),o(),t&&y.pause(),(n=y.currentTime()-q(e))<=0&&(n=0),y.currentTime(n),t&&y.play();break;case 3:t=!y.paused(),o(),t&&y.pause(),u<=(n=y.currentTime()+q(e))&&(n=t?u-.001:u),y.currentTime(n),t&&y.play();break;case 5:o(),w?(n=y.currentTime()-1,y.currentTime()<=1&&(n=0),y.currentTime(n)):y.volume(y.volume()-d);break;case 4:o(),w?(u<=(n=y.currentTime()+1)&&(n=u),y.currentTime(n)):y.volume(y.volume()+d);break;case 6:p&&y.muted(!y.muted());break;case 7:b&&(y.isFullscreen()?y.exitFullscreen():y.requestFullscreen());break;default:if((47<r&&r<59||95<r&&r<106)&&(S||!(e.metaKey||e.ctrlKey||e.altKey))&&h){var i=48;95<r&&(i=96);var c=r-i;o(),y.currentTime(y.duration()*c*.1)}for(var a in m.customKeys){var s=m.customKeys[a];s&&s.key&&s.handler&&s.key(e)&&(o(),s.handler(y,m,e))}}}}),y.on("dblclick",function(e){if(y.controls()){var t=e.relatedTarget||e.toElement||f.activeElement;t!=v&&t!=v.querySelector(".vjs-tech")&&t!=v.querySelector(".iframeblocker")||b&&(y.isFullscreen()?y.exitFullscreen():y.requestFullscreen())}}),y.on("mousewheel",a),y.on("DOMMouseScroll",a),this})});

View File

@ -7,4 +7,5 @@ db:
host: localhost
port: 5432
dbname: invidious
full_refresh: false
full_refresh: false
https_only: false

View File

@ -20,6 +20,8 @@ CREATE TABLE public.videos
allowed_regions text[] COLLATE pg_catalog."default",
is_family_friendly boolean,
genre text COLLATE pg_catalog."default",
genre_url text COLLATE pg_catalog."default",
license text COLLATE pg_catalog."default",
CONSTRAINT videos_pkey PRIMARY KEY (id)
)
WITH (

View File

@ -1,5 +1,5 @@
name: invidious
version: 0.2.0
version: 0.4.0
authors:
- Omar Roth <omarroth@hotmail.com>
@ -9,13 +9,13 @@ targets:
main: src/invidious.cr
dependencies:
kemal:
github: kemalcr/kemal
pg:
github: will/crystal-pg
detect_language:
github: detectlanguage/detectlanguage-crystal
kemal:
github: kemalcr/kemal
pg:
github: will/crystal-pg
crystal: 0.26.0
crystal: 0.26.1
license: AGPLv3

View File

@ -1,4 +1,4 @@
# "Invidious" (which is what YouTube should be)
# "Invidious" (which is an alternative front-end to YouTube)
# Copyright (C) 2018 Omar Roth
#
# This program is free software: you can redistribute it and/or modify
@ -106,6 +106,9 @@ spawn do
end
before_all do |env|
env.response.headers["X-XSS-Protection"] = "1; mode=block;"
env.response.headers["X-Content-Type-Options"] = "nosniff"
if env.request.cookies.has_key? "SID"
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
@ -215,8 +218,9 @@ get "/watch" do |env|
end
subscriptions ||= [] of String
autoplay, video_loop, video_start, video_end, listen, raw, quality, controls = process_video_params(env.params.query, preferences)
if listen
params = process_video_params(env.params.query, preferences)
if params[:listen]
env.params.query.delete_all("listen")
end
@ -234,17 +238,21 @@ get "/watch" do |env|
audio_streams = video.audio_streams(adaptive_fmts)
captions = video.captions
if preferences
preferred_captions = captions.select { |caption| preferences.captions.includes? caption.name.simpleText }
preferred_captions.sort_by! { |caption| preferences.captions.index(caption.name.simpleText).not_nil! }
captions = captions - preferred_captions
end
preferred_captions ||= [] of Caption
preferred_captions = captions.select { |caption|
params[:preferred_captions].includes?(caption.name.simpleText) ||
params[:preferred_captions].includes?(caption.languageCode.split("-")[0])
}
preferred_captions.sort_by! { |caption|
(params[:preferred_captions].index(caption.name.simpleText) ||
params[:preferred_captions].index(caption.languageCode.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
aspect_ratio = "16:9"
video.description = fill_links(video.description, "https", "www.youtube.com")
video.description = add_alt_links(video.description)
video.description = replace_links(video.description)
description = video.short_description
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
@ -259,11 +267,11 @@ get "/watch" do |env|
# TODO: Find highest resolution thumbnail automatically
thumbnail = "https://i.ytimg.com/vi/#{video.id}/mqdefault.jpg"
if raw
if params[:raw]
url = fmt_stream[0]["url"]
fmt_stream.each do |fmt|
if fmt["label"].split(" - ")[0] == quality
if fmt["label"].split(" - ")[0] == params[:quality]
url = fmt["url"]
end
end
@ -276,7 +284,9 @@ get "/watch" do |env|
rvs << HTTP::Params.parse(rv).to_h
end
# rating = (video.likes.to_f/(video.likes.to_f + video.dislikes.to_f) * 4 + 1)
rating = video.info["avg_rating"].to_f64
engagement = ((video.dislikes.to_f + video.likes.to_f)/video.views * 100)
playability_status = video.player_response["playabilityStatus"]?
@ -313,21 +323,7 @@ get "/embed/:id" do |env|
next env.redirect url
end
autoplay, video_loop, video_start, video_end, listen, raw, quality, controls = process_video_params(env.params.query, nil)
preferred_captions = [] of Caption
preferences = Preferences.from_json({
"video_loop" => video_loop,
"autoplay" => autoplay,
"speed" => 1.0,
"quality" => quality,
"volume" => 100,
"max_results" => 0,
"sort" => "",
"latest_only" => false,
"unseen_only" => false,
"dark_mode" => false,
}.to_json)
aspect_ratio = nil
params = process_video_params(env.params.query, nil)
begin
video = get_video(id, PG_DB)
@ -343,8 +339,20 @@ get "/embed/:id" do |env|
captions = video.captions
preferred_captions = captions.select { |caption|
params[:preferred_captions].includes?(caption.name.simpleText) ||
params[:preferred_captions].includes?(caption.languageCode.split("-")[0])
}
preferred_captions.sort_by! { |caption|
(params[:preferred_captions].index(caption.name.simpleText) ||
params[:preferred_captions].index(caption.languageCode.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
aspect_ratio = nil
video.description = fill_links(video.description, "https", "www.youtube.com")
video.description = add_alt_links(video.description)
video.description = replace_links(video.description)
description = video.short_description
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
@ -359,11 +367,11 @@ get "/embed/:id" do |env|
# TODO: Find highest resolution thumbnail automatically
thumbnail = "https://i.ytimg.com/vi/#{video.id}/mqdefault.jpg"
if raw
if params[:raw]
url = fmt_stream[0]["url"]
fmt_stream.each do |fmt|
if fmt["label"].split(" - ")[0] == quality
if fmt["label"].split(" - ")[0] == params[:quality]
url = fmt["url"]
end
end
@ -424,8 +432,32 @@ get "/search" do |env|
page = env.params.query["page"]?.try &.to_i?
page ||= 1
search_params = build_search_params(sort_by: "relevance", content_type: "video")
videos = search(query, page, search_params)
sort = "relevance"
date = ""
duration = ""
features = [] of String
operators = query.split(" ").select { |a| a.match(/\w+:[\w,]+/) }
operators.each do |operator|
key, value = operator.split(":")
case key
when "sort"
sort = value
when "date"
date = value
when "duration"
duration = value
when "features"
features = value.split(",")
end
end
search_query = (query.split(" ") - operators).join(" ")
search_params = build_search_params(sort: sort, date: date, content_type: "video",
duration: duration, features: features)
count, videos = search(search_query, page, search_params).as(Tuple)
templated "search"
end
@ -761,14 +793,19 @@ post "/preferences" do |env|
volume = env.params.body["volume"]?.try &.as(String).to_i?
volume ||= 100
comments = env.params.body["comments"]?
comments ||= "youtube"
comments_0 = env.params.body["comments_0"]?.try &.as(String) || "youtube"
comments_1 = env.params.body["comments_1"]?.try &.as(String) || ""
comments = [comments_0, comments_1]
captions_0 = env.params.body["captions_0"]?.try &.as(String) || ""
captions_1 = env.params.body["captions_1"]?.try &.as(String) || ""
captions_2 = env.params.body["captions_2"]?.try &.as(String) || ""
captions = [captions_0, captions_1, captions_2]
related_videos = env.params.body["related_videos"]?.try &.as(String)
related_videos ||= "off"
related_videos = related_videos == "on"
redirect_feed = env.params.body["redirect_feed"]?.try &.as(String)
redirect_feed ||= "off"
redirect_feed = redirect_feed == "on"
@ -807,6 +844,7 @@ post "/preferences" do |env|
"volume" => volume,
"comments" => comments,
"captions" => captions,
"related_videos" => related_videos,
"redirect_feed" => redirect_feed,
"dark_mode" => dark_mode,
"thin_mode" => thin_mode,
@ -1027,18 +1065,18 @@ post "/data_control" do |env|
body["watch_history"].as_a.each do |id|
id = id.as_s
if !user.watched.includes? id
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE id = $2", id, user.id)
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE email = $2", id, user.email)
end
end
PG_DB.exec("UPDATE users SET preferences = $1 WHERE id = $2", body["preferences"].to_json, user.id)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", body["preferences"].to_json, user.email)
when "import_youtube"
subscriptions = XML.parse(body)
subscriptions.xpath_nodes(%q(//outline[@type="rss"])).each do |channel|
ucid = channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin
client = make_client(YT_URL)
@ -1053,7 +1091,7 @@ post "/data_control" do |env|
ucid = md["channel_id"]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin
client = make_client(YT_URL)
@ -1069,7 +1107,7 @@ post "/data_control" do |env|
ucid = channel["url"].as_s.match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin
client = make_client(YT_URL)
@ -1090,14 +1128,14 @@ post "/data_control" do |env|
db = entry.io.gets_to_end
db.scan(/youtube\.com\/watch\?v\=(?<id>[a-zA-Z0-9_-]{11})/) do |md|
if !user.watched.includes? md["id"]
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE id = $2", md["id"], user.id)
PG_DB.exec("UPDATE users SET watched = array_append(watched,$1) WHERE email = $2", md["id"], user.email)
end
end
db.scan(/youtube\.com\/channel\/(?<ucid>[a-zA-Z0-9_-]{22})/) do |md|
ucid = md["ucid"]
if !user.subscriptions.includes? ucid
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", ucid, user.id)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
begin
client = make_client(YT_URL)
@ -1197,7 +1235,7 @@ get "/clear_watch_history" do |env|
if user
user = user.as(User)
PG_DB.exec("UPDATE users SET watched = '{}' WHERE id = $1", user.id)
PG_DB.exec("UPDATE users SET watched = '{}' WHERE email = $1", user.email)
end
env.redirect referer
@ -1242,21 +1280,21 @@ get "/feed/subscriptions" do |env|
if preferences.notifications_only && !notifications.empty?
args = arg_array(notifications)
videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE id IN (#{args})
notifications = PG_DB.query_all("SELECT * FROM channel_videos WHERE id IN (#{args})
ORDER BY published DESC", notifications, as: ChannelVideo)
notifications = [] of ChannelVideo
videos = [] of ChannelVideo
videos.sort_by! { |video| video.published }.reverse!
notifications.sort_by! { |video| video.published }.reverse!
case preferences.sort
when "alphabetically"
videos.sort_by! { |video| video.title }
notifications.sort_by! { |video| video.title }
when "alphabetically - reverse"
videos.sort_by! { |video| video.title }.reverse!
notifications.sort_by! { |video| video.title }.reverse!
when "channel name"
videos.sort_by! { |video| video.author }
notifications.sort_by! { |video| video.author }
when "channel name - reverse"
videos.sort_by! { |video| video.author }.reverse!
notifications.sort_by! { |video| video.author }.reverse!
end
else
if preferences.latest_only
@ -1347,25 +1385,51 @@ get "/feed/channel/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/feed/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
url = produce_videos_url(ucid)
response = client.get(url)
json = JSON.parse(response.body)
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with?(" - Topic") ||
{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? author
auto_generated = true
end
if json["content_html"].as_s.empty?
if response.status_code == 500
error_message = "This channel does not exist."
halt env, status_code: 404, response: error_message
page = 1
videos = [] of SearchVideo
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
next ""
break
end
end
content_html = json["content_html"].as_s
document = XML.parse_html(content_html)
channel = get_channel(ucid, client, PG_DB, pull_all_videos: false)
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
@ -1385,18 +1449,22 @@ get "/feed/channel/:ucid" do |env|
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
end
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
extract_videos(nodeset, ucid).each do |video|
videos.each do |video|
xml.element("entry") do
xml.element("id") { xml.text "yt:video:#{video.id}" }
xml.element("yt:videoId") { xml.text video.id }
xml.element("yt:channelId") { xml.text ucid }
xml.element("yt:channelId") { xml.text video.ucid }
xml.element("title") { xml.text video.title }
xml.element("link", rel: "alternate", href: "#{host_url}/watch?v=#{video.id}")
xml.element("author") do
xml.element("name") { xml.text channel.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
if auto_generated
xml.element("name") { xml.text video.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{video.ucid}" }
else
xml.element("name") { xml.text author }
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
end
end
xml.element("published") { xml.text video.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
@ -1522,11 +1590,33 @@ end
# Channels
# YouTube appears to let users set a "brand" URL that
# is different from their username, so we convert that here
get "/c/:user" do |env|
client = make_client(YT_URL)
user = env.params.url["user"]
response = client.get("/c/#{user}")
document = XML.parse_html(response.body)
anchor = document.xpath_node(%q(//a[contains(@class,"branded-page-header-title-link")]))
if !anchor
next env.redirect "/"
end
env.redirect anchor["href"]
end
get "/user/:user" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}"
end
get "/user/:user/videos" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}/videos"
end
get "/channel/:ucid" do |env|
user = env.get? "user"
if user
@ -1553,23 +1643,47 @@ get "/channel/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
if rss.status_code == 404
error_message = "This channel does not exist."
next templated "error"
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with?(" - Topic") ||
{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? author
auto_generated = true
end
rss = XML.parse_html(rss.body)
author = rss.xpath_node("//feed/author/name").not_nil!.content
videos = [] of SearchVideo
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
begin
videos = extract_playlist(ucid, page)
rescue ex
error_message = ex.message
next templated "error"
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
templated "channel"
@ -1704,13 +1818,14 @@ get "/api/v1/comments/:id" do |env|
body = html.body
session_token = body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
ctoken = body.match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
if !ctoken
env.response.content_type = "application/json"
if format == "json"
next {"comments" => [] of String}.to_json
else
next {"contentHtml" => ""}.to_json
next {"contentHtml" => "", "commentCount" => 0}.to_json
end
end
ctoken = ctoken["ctoken"]
@ -1748,7 +1863,7 @@ get "/api/v1/comments/:id" do |env|
if format == "json"
next {"comments" => [] of String}.to_json
else
next {"contentHtml" => ""}.to_json
next {"contentHtml" => "", "commentCount" => 0}.to_json
end
end
@ -1777,9 +1892,49 @@ get "/api/v1/comments/:id" do |env|
node_comment = node["commentRenderer"]
end
content_text = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
content_text ||= node_comment["contentText"]["runs"].as_a.map { |comment| comment["text"] }
.join("").rchop('\ufeff')
content_html = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
if content_html
content_html = HTML.escape(content_html)
end
content_html ||= node_comment["contentText"]["runs"].as_a.map do |run|
text = HTML.escape(run["text"].as_s)
if run["text"] == "\n"
text = "<br>"
end
if run["bold"]?
text = "<b>#{text}</b>"
end
if run["italics"]?
text = "<i>#{text}</i>"
end
if run["navigationEndpoint"]?
url = run["navigationEndpoint"]["urlEndpoint"]?.try &.["url"].as_s
if url
url = URI.parse(url)
if {"m.youtube.com", "www.youtube.com", "youtu.be"}.includes? url.host
if url.path == "/redirect"
url = HTTP::Params.parse(url.query.not_nil!)["q"]
else
url = url.full_path
end
end
else
url = run["navigationEndpoint"]["commandMetadata"]?.try &.["webCommandMetadata"]["url"].as_s
end
text = %(<a href="#{url}">#{text}</a>)
end
text
end.join.rchop('\ufeff')
content_html, content = html_to_content(content_html)
author = node_comment["authorText"]?.try &.["simpleText"]
author ||= ""
@ -1807,7 +1962,8 @@ get "/api/v1/comments/:id" do |env|
published = decode_date(node_comment["publishedTimeText"]["runs"][0]["text"].as_s.rchop(" (edited)"))
json.field "content", content_text
json.field "content", content
json.field "contentHtml", content_html
json.field "published", published.epoch
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]
@ -1854,6 +2010,8 @@ get "/api/v1/comments/:id" do |env|
if comments["commentCount"]?
json.field "commentCount", comments["commentCount"]
else
json.field "commentCount", 0
end
end
end
@ -1868,21 +2026,30 @@ get "/api/v1/comments/:id" do |env|
content_html = template_reddit_comments(comments)
content_html = fill_links(content_html, "https", "www.reddit.com")
content_html = add_alt_links(content_html)
content_html = replace_links(content_html)
rescue ex
comments = nil
reddit_thread = nil
content_html = ""
end
if !reddit_thread
if !reddit_thread || !comments
halt env, status_code: 404
end
env.response.content_type = "application/json"
next {"title" => reddit_thread.title,
"permalink" => reddit_thread.permalink,
"contentHtml" => content_html,
}.to_json
if format == "json"
reddit_thread = JSON.parse(reddit_thread.to_json).as_h
reddit_thread["comments"] = JSON.parse(comments.to_json)
next reddit_thread.to_json
else
next {
"title" => reddit_thread.title,
"permalink" => reddit_thread.permalink,
"contentHtml" => content_html,
}.to_json
end
end
end
@ -1911,7 +2078,7 @@ get "/api/v1/videos/:id" do |env|
generate_thumbnails(json, video.id)
end
description, video.description = html_to_description(video.description)
video.description, description = html_to_content(video.description)
json.field "description", description
json.field "descriptionHtml", video.description
@ -1929,6 +2096,7 @@ get "/api/v1/videos/:id" do |env|
json.field "isFamilyFriendly", video.is_family_friendly
json.field "allowedRegions", video.allowed_regions
json.field "genre", video.genre
json.field "genreUrl", video.genre_url
json.field "author", video.author
json.field "authorId", video.ucid
@ -2057,7 +2225,7 @@ get "/api/v1/videos/:id" do |env|
end
json.field "author", rv["author"]
json.field "lengthSeconds", rv["length_seconds"].to_i
json.field "viewCountText", rv["short_view_count_text"].rchop(" views")
json.field "viewCountText", rv["short_view_count_text"]
end
end
end
@ -2150,38 +2318,85 @@ get "/api/v1/channels/:ucid" do |env|
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/api/v1/channels/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
channel = get_channel(ucid, client, PG_DB, pull_all_videos: false)
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with?(" - Topic") ||
{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? author
auto_generated = true
end
page = 1
videos = [] of SearchVideo
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
# TODO: Integrate this into `get_channel` function
# We can't get everything from RSS feed, so we get it from the channel page
channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
channel_html = XML.parse_html(channel_html)
banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
banner = "https:" + banner.match(/background-image: url\((?<url>[^)]+)\)/).not_nil!["url"]
author = channel_html.xpath_node(%q(//a[contains(@class, "branded-page-header-title-link")])).not_nil!.content
author_url = channel_html.xpath_node(%q(//a[@class="channel-header-profile-image-container spf-link"])).not_nil!["href"]
author_thumbnail = channel_html.xpath_node(%q(//img[@class="channel-header-profile-image"])).not_nil!["src"]
description = channel_html.xpath_node(%q(//meta[@itemprop="description"])).not_nil!["content"]
description_html = channel_html.xpath_node(%q(//div[contains(@class,"about-description")]))
description_html, description = html_to_content(description_html)
paid = channel_html.xpath_node(%q(//meta[@itemprop="paid"])).not_nil!["content"] == "True"
is_family_friendly = channel_html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
allowed_regions = channel_html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
sub_count, total_views, joined = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
sub_count = sub_count.content.rchop(" subscribers").delete(",").to_i64
total_views = total_views.content.rchop(" views").lchop("").delete(",").to_i64
joined = Time.parse(joined.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
latest_videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid = $1 ORDER BY published DESC LIMIT 15",
channel.id, as: ChannelVideo)
total_views = 0_i64
sub_count = 0_i64
joined = Time.epoch(0)
metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
metadata.each do |item|
case item.content
when .includes? "views"
total_views = item.content.delete("views •,").to_i64
when .includes? "subscribers"
sub_count = item.content.delete("subscribers").delete(",").to_i64
when .includes? "Joined"
joined = Time.parse(item.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
end
end
channel_info = JSON.build do |json|
json.object do
json.field "author", channel.author
json.field "authorId", channel.id
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", author_url
json.field "authorBanners" do
@ -2226,19 +2441,37 @@ get "/api/v1/channels/:ucid" do |env|
json.field "isFamilyFriendly", is_family_friendly
json.field "description", description
json.field "descriptionHtml", description_html
json.field "allowedRegions", allowed_regions
json.field "latestVideos" do
json.array do
latest_videos.each do |video|
videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "published", video.published.epoch
if auto_generated
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
else
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
end
json.field "videoThumbnails" do
generate_thumbnails(json, video.id)
end
json.field "description", video.description
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
json.field "published", video.published.epoch
json.field "lengthSeconds", video.length_seconds
end
end
end
@ -2256,6 +2489,7 @@ get "/api/v1/channels/:ucid/videos" do |env|
page ||= 1
client = make_client(YT_URL)
if !ucid.match(/UC[a-zA-Z0-9_-]{22}/)
rss = client.get("/feeds/videos.xml?user=#{ucid}")
rss = XML.parse_html(rss.body)
@ -2267,43 +2501,66 @@ get "/api/v1/channels/:ucid/videos" do |env|
end
ucid = ucid.content
url = "/api/v1/channels/#{ucid}/videos"
if env.params.query
url += "?#{env.params.query}"
author = rss.xpath_node("//author/name").not_nil!.content
next env.redirect "/feed/channel/#{ucid}"
else
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}")
rss = XML.parse_html(rss.body)
ucid = rss.xpath_node("//feed/channelid")
if !ucid
error_message = "User does not exist."
next templated "error"
end
next env.redirect url
ucid = ucid.content
author = rss.xpath_node("//author/name").not_nil!.content
end
url = produce_videos_url(ucid, page)
response = client.get(url)
json = JSON.parse(response.body)
if !json["content_html"]?
env.response.content_type = "application/json"
if response.status_code == 500
response = {"Error" => "Channel does not exist"}.to_json
halt env, status_code: 404, response: response
else
next Array(String).new.to_json
end
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
if author.ends_with?(" - Topic") ||
{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? author
auto_generated = true
end
content_html = json["content_html"].as_s
if content_html.empty?
env.response.content_type = "application/json"
next Hash(String, String).new.to_json
end
document = XML.parse_html(content_html)
videos = [] of SearchVideo
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
response = client.get(url)
json = JSON.parse(response.body)
videos = JSON.build do |json|
json.array do
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
extract_videos(nodeset, ucid).each do |video|
if auto_generated
videos += extract_videos(nodeset)
else
videos += extract_videos(nodeset, ucid)
end
else
break
end
end
result = JSON.build do |json|
json.array do
videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
if auto_generated
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
else
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
end
json.field "videoThumbnails" do
generate_thumbnails(json, video.id)
end
@ -2320,7 +2577,7 @@ get "/api/v1/channels/:ucid/videos" do |env|
end
env.response.content_type = "application/json"
videos
result
end
get "/api/v1/search" do |env|
@ -2359,13 +2616,14 @@ get "/api/v1/search" do |env|
response = JSON.build do |json|
json.array do
search_results = search(query, page, search_params)
count, search_results = search(query, page, search_params).as(Tuple)
search_results.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
json.field "videoThumbnails" do
@ -2412,6 +2670,7 @@ get "/api/v1/playlists/:plid" do |env|
json.field "authorUrl", "/channel/#{playlist.ucid}"
json.field "description", playlist.description
json.field "descriptionHtml", playlist.description_html
json.field "videoCount", playlist.video_count
json.field "viewCount", playlist.views
@ -2653,6 +2912,12 @@ get "/videoplayback" do |env|
client = make_client(URI.parse(host))
response = client.head(url)
if response.headers["Location"]?
url = URI.parse(response.headers["Location"])
env.response.headers["Access-Control-Allow-Origin"] = "*"
next env.redirect url.full_path
end
headers = env.request.headers
headers.delete("Host")
headers.delete("Cookie")
@ -2660,30 +2925,24 @@ get "/videoplayback" do |env|
headers.delete("Referer")
client.get(url, headers) do |response|
if response.headers["Location"]?
url = URI.parse(response.headers["Location"])
env.response.headers["Access-Control-Allow-Origin"] = "*"
env.redirect url.full_path
else
env.response.status_code = response.status_code
env.response.status_code = response.status_code
response.headers.each do |key, value|
env.response.headers[key] = value
end
env.response.headers["Access-Control-Allow-Origin"] = "*"
begin
chunk_size = 4096
size = 1
while size > 0
size = IO.copy(response.body_io, env.response.output, chunk_size)
env.response.flush
Fiber.yield
end
rescue ex
break
response.headers.each do |key, value|
env.response.headers[key] = value
end
env.response.headers["Access-Control-Allow-Origin"] = "*"
begin
chunk_size = 4096
size = 1
while size > 0
size = IO.copy(response.body_io, env.response.output, chunk_size)
env.response.flush
Fiber.yield
end
rescue ex
break
end
end
end
@ -2723,6 +2982,7 @@ public_folder "assets"
Kemal.config.powered_by_header = false
add_handler FilteredCompressHandler.new
add_handler DenyFrame.new
add_context_storage_type(User)
Kemal.run

View File

@ -73,7 +73,7 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
page = 1
loop do
url = produce_videos_url(ucid, page)
url = produce_channel_videos_url(ucid, page)
response = client.get(url)
json = JSON.parse(response.body)
@ -130,3 +130,45 @@ def fetch_channel(ucid, client, db, pull_all_videos = true)
return channel
end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
if auto_generated
seed = Time.epoch(1525757349)
until seed >= Time.now
seed += 1.month
end
timestamp = seed - (page - 1).months
page = "#{timestamp.epoch}"
switch = "\x36"
else
page = "#{page}"
switch = "\x00"
end
meta = "\x12\x06videos #{switch}\x30\x02\x38\x01\x60\x01\x6a\x00\x7a"
meta += page.size.to_u8.unsafe_chr
meta += page
meta += "\xb8\x01\x00"
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end

View File

@ -93,7 +93,7 @@ def template_youtube_comments(comments)
<div class="pure-u-23-24">
<p>
<a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
onclick="load_comments(this)">View #{child["replies"]["replyCount"]} replies</a>
onclick="get_youtube_replies(this)">View #{child["replies"]["replyCount"]} replies</a>
</p>
</div>
</div>
@ -113,7 +113,7 @@ def template_youtube_comments(comments)
- #{recode_date(Time.epoch(child["published"].as_i64))} ago
</p>
<div>
#{child["content"]}
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
#{replies_html}
</div>
</div>
@ -127,7 +127,7 @@ def template_youtube_comments(comments)
<div class="pure-u-1">
<p>
<a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
onclick="load_comments(this)">Load more</a>
onclick="get_youtube_replies(this)">Load more</a>
</p>
</div>
</div>
@ -190,37 +190,19 @@ def template_reddit_comments(root)
return html
end
def add_alt_links(html)
alt_links = [] of {String, String}
def replace_links(html)
html = XML.parse_html(html)
# This is painful but likely the only way to accomplish this in Crystal,
# as Crystigiri and others are not able to insert XML Nodes into a document.
# The goal here is to use as little regex as possible
html.scan(/<a[^>]*>([^<]+)<\/a>/) do |match|
anchor = XML.parse_html(match[0])
anchor = anchor.xpath_node("//a").not_nil!
html.xpath_nodes(%q(//a)).each do |anchor|
url = URI.parse(anchor["href"])
if ["www.youtube.com", "m.youtube.com"].includes?(url.host)
if {"www.youtube.com", "m.youtube.com", "youtu.be"}.includes?(url.host)
if url.path == "/redirect"
params = HTTP::Params.parse(url.query.not_nil!)
alt_url = params["q"]?
alt_url ||= "/"
anchor["href"] = params["q"]?
else
alt_url = url.full_path
anchor["href"] = url.full_path
end
alt_link = <<-END_HTML
<a href="#{alt_url}">
<i class="icon ion-ios-link"></i>
</a>
END_HTML
elsif url.host == "youtu.be"
alt_link = <<-END_HTML
<a href="/watch?v=#{url.path.try &.lchop("/")}&#{url.query}">
<i class="icon ion-ios-link"></i>
</a>
END_HTML
elsif url.to_s == "#"
begin
length_seconds = decode_length_seconds(anchor.content)
@ -228,23 +210,12 @@ def add_alt_links(html)
length_seconds = decode_time(anchor.content)
end
alt_anchor = <<-END_HTML
<a href="javascript:void(0)" onclick="player.currentTime(#{length_seconds})">#{anchor.content}</a>
END_HTML
html = html.sub(anchor.to_s, alt_anchor)
next
else
alt_link = ""
anchor["href"] = "javascript:void(0)"
anchor["onclick"] = "player.currentTime(#{length_seconds})"
end
alt_links << {anchor.to_s, alt_link}
end
alt_links.each do |original, alternate|
html = html.sub(original, original + alternate)
end
html = html.to_xml(options: XML::SaveOptions::NO_DECL)
return html
end
@ -267,5 +238,5 @@ def fill_links(html, scheme, host)
html = html.to_xml(options: XML::SaveOptions::NO_DECL)
end
html
return html
end

View File

@ -41,6 +41,17 @@ class FilteredCompressHandler < Kemal::Handler
end
end
class DenyFrame < Kemal::Handler
exclude ["/embed/*"]
def call(env)
return call_next env if exclude_match? env
env.response.headers["X-Frame-Options"] = "sameorigin"
call_next env
end
end
def rank_videos(db, n, filter, url)
top = [] of {Float64, String}
@ -116,81 +127,6 @@ def login_req(login_form, f_req)
return HTTP::Params.encode(data)
end
def produce_videos_url(ucid, page = 1)
page = "#{page}"
meta = "\x12\x06videos \x00\x30\x02\x38\x01\x60\x01\x6a\x00\x7a"
meta += page.size.to_u8.unsafe_chr
meta += page
meta += "\xb8\x01\x00"
meta = Base64.urlsafe_encode(meta)
meta = URI.escape(meta)
continuation = "\x12"
continuation += ucid.size.to_u8.unsafe_chr
continuation += ucid
continuation += "\x1a"
continuation += meta.size.to_u8.unsafe_chr
continuation += meta
continuation = continuation.size.to_u8.unsafe_chr + continuation
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
continuation = Base64.urlsafe_encode(continuation)
continuation = URI.escape(continuation)
url = "/browse_ajax?continuation=#{continuation}"
return url
end
def read_var_int(bytes)
numRead = 0
result = 0
read = bytes[numRead]
if bytes.size == 1
result = bytes[0].to_i32
else
while ((read & 0b10000000) != 0)
read = bytes[numRead].to_u64
value = (read & 0b01111111)
result |= (value << (7 * numRead))
numRead += 1
if numRead > 5
raise "VarInt is too big"
end
end
end
return result
end
def write_var_int(value : Int)
bytes = [] of UInt8
value = value.to_u32
if value == 0
bytes = [0_u8]
else
while value != 0
temp = (value & 0b01111111).to_u8
value = value >> 7
if value != 0
temp |= 0b10000000
end
bytes << temp
end
end
return bytes
end
def generate_captcha(key)
minute = Random::Secure.rand(12)
minute_angle = minute * 30
@ -240,7 +176,7 @@ def generate_captcha(key)
return {challenge: challenge, token: token}
end
def html_to_description(description_html)
def html_to_content(description_html)
if !description_html
description = ""
description_html = ""
@ -248,10 +184,15 @@ def html_to_description(description_html)
description_html = description_html.to_s
description = description_html.gsub("<br>", "\n")
description = description.gsub("<br/>", "\n")
description = XML.parse_html(description).content.strip("\n ")
if description.empty?
description = ""
else
description = XML.parse_html(description).content.strip("\n ")
end
end
return description, description_html
return description_html, description
end
def extract_videos(nodeset, ucid = nil)
@ -319,7 +260,7 @@ def extract_videos(nodeset, ucid = nil)
view_count ||= 0_i64
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
description, description_html = html_to_description(description_html)
description_html, description = html_to_content(description_html)
length_seconds = node.xpath_node(%q(.//span[@class="video-time"]))
if length_seconds

View File

@ -169,7 +169,7 @@ def get_referer(env, fallback = "/")
referer = URI.parse(referer)
# "Unroll" nested referers
# "Unroll" nested referrers
loop do
if referer.query
params = HTTP::Params.parse(referer.query.not_nil!)
@ -184,6 +184,7 @@ def get_referer(env, fallback = "/")
end
referer = referer.full_path
referer = "/" + referer.lstrip("\/\\")
if referer == env.request.path
referer = fallback
@ -191,3 +192,49 @@ def get_referer(env, fallback = "/")
return referer
end
def read_var_int(bytes)
numRead = 0
result = 0
read = bytes[numRead]
if bytes.size == 1
result = bytes[0].to_i32
else
while ((read & 0b10000000) != 0)
read = bytes[numRead].to_u64
value = (read & 0b01111111)
result |= (value << (7 * numRead))
numRead += 1
if numRead > 5
raise "VarInt is too big"
end
end
end
return result
end
def write_var_int(value : Int)
bytes = [] of UInt8
value = value.to_u32
if value == 0
bytes = [0_u8]
else
while value != 0
temp = (value & 0b01111111).to_u8
value = value >> 7
if value != 0
temp |= 0b10000000
end
bytes << temp
end
end
return bytes
end

View File

@ -2,13 +2,13 @@ def crawl_videos(db)
ids = Deque(String).new
random = Random.new
search(random.base64(3)).each do |video|
search(random.base64(3)).as(Tuple)[1].each do |video|
ids << video.id
end
loop do
if ids.empty?
search(random.base64(3)).each do |video|
search(random.base64(3)).as(Tuple)[1].each do |video|
ids << video.id
end
end

View File

@ -1,13 +1,14 @@
class Playlist
add_mapping({
title: String,
id: String,
author: String,
ucid: String,
description: String,
video_count: Int32,
views: Int64,
updated: Time,
title: String,
id: String,
author: String,
ucid: String,
description: String,
description_html: String,
video_count: Int32,
views: Int64,
updated: Time,
})
end
@ -99,7 +100,7 @@ def produce_playlist_url(id, index)
slice = URI.escape(slice)
# Outer Base64
continuation = [0x1a.to_u8, slice.bytes.size.to_u8] + slice.bytes
continuation = [0x1a_u8, slice.bytes.size.to_u8] + slice.bytes
continuation = ucid.bytes + continuation
continuation = [0x12_u8, ucid.size.to_u8] + continuation
continuation = [0xe2_u8, 0xa9_u8, 0x85_u8, 0xb2_u8, 2_u8, continuation.size.to_u8] + continuation
@ -123,17 +124,8 @@ def fetch_playlist(plid)
title = document.xpath_node(%q(//h1[@class="pl-header-title"])).not_nil!.content
title = title.strip(" \n")
description = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description ||= document.xpath_node(%q(//span[@class="pl-header-description-text"]))
if description
description = description.to_xml.strip(" \n")
description = description.split("<button ")[0]
description = fill_links(description, "https", "www.youtube.com")
description = add_alt_links(description)
else
description = ""
end
description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1]))
description, description_html = html_to_content(description_html)
anchor = document.xpath_node(%q(//ul[@class="pl-header-details"])).not_nil!
author = anchor.xpath_node(%q(.//li[1]/a)).not_nil!.content
@ -151,6 +143,7 @@ def fetch_playlist(plid)
author,
ucid,
description,
description_html,
video_count,
views,
updated

View File

@ -14,31 +14,36 @@ end
def search(query, page = 1, search_params = build_search_params(content_type: "video"))
client = make_client(YT_URL)
if query.empty?
return {0, [] of SearchVideo}
end
html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=#{search_params}&disable_polymer=1").body
if html.empty?
return [] of SearchVideo
return {0, [] of SearchVideo}
end
html = XML.parse_html(html)
nodeset = html.xpath_nodes(%q(//ol[@class="item-section"]/li))
videos = extract_videos(nodeset)
return videos
return {nodeset.size, videos}
end
def build_search_params(sort_by = "relevance", date : String = "", content_type : String = "", duration : String = "", features : Array(String) = [] of String)
def build_search_params(sort : String = "relevance", date : String = "", content_type : String = "",
duration : String = "", features : Array(String) = [] of String)
head = "\x08"
head += case sort_by
head += case sort
when "relevance"
"\x00"
when "rating"
"\x01"
when "upload_date"
when "upload_date", "date"
"\x02"
when "view_count"
when "view_count", "views"
"\x03"
else
raise "No sort #{sort_by}"
raise "No sort #{sort}"
end
body = ""
@ -87,7 +92,7 @@ def build_search_params(sort_by = "relevance", date : String = "", content_type
"\x20\x01"
when "subtitles"
"\x28\x01"
when "creative_commons"
when "creative_commons", "cc"
"\x30\x01"
when "3d"
"\x38\x01"

View File

@ -3,23 +3,22 @@ def fetch_decrypt_function(client, id = "CvFH_6DNRCY")
url = document.match(/src="(?<url>\/yts\/jsbin\/player-.{9}\/en_US\/base.js)"/).not_nil!["url"]
player = client.get(url).body
function_name = player.match(/\(b\|\|\(b="signature"\),d.set\(b,(?<name>[a-zA-Z0-9]{2})\(c\)\)\)/).not_nil!["name"]
function_body = player.match(/#{function_name}=function\(a\){(?<body>[^}]+)}/).not_nil!["body"]
function_name = player.match(/^(?<name>[^=]+)=function\(a\){a=a\.split\(""\)/m).not_nil!["name"]
function_body = player.match(/^#{function_name}=function\(a\){(?<body>[^}]+)}/m).not_nil!["body"]
function_body = function_body.split(";")[1..-2]
var_name = function_body[0][0, 2]
var_body = player.delete("\n").match(/var #{var_name}={(?<body>(.*?))};/).not_nil!["body"]
operations = {} of String => String
matches = player.delete("\n").match(/var #{var_name}={(?<op1>[a-zA-Z0-9]{2}:[^}]+}),(?<op2>[a-zA-Z0-9]{2}:[^}]+}),(?<op3>[a-zA-Z0-9]{2}:[^}]+})};/).not_nil!
3.times do |i|
operation = matches["op#{i + 1}"]
op_name = operation[0, 2]
var_body.split("},").each do |operation|
op_name = operation.match(/^[^:]+/).not_nil![0]
op_body = operation.match(/\{[^}]+/).not_nil![0]
op_body = operation.match(/\{[^}]+\}/).not_nil![0]
case op_body
when "{a.reverse()}"
when "{a.reverse()"
operations[op_name] = "a"
when "{a.splice(0,b)}"
when "{a.splice(0,b)"
operations[op_name] = "b"
else
operations[op_name] = "c"
@ -28,11 +27,10 @@ def fetch_decrypt_function(client, id = "CvFH_6DNRCY")
decrypt_function = [] of {name: String, value: Int32}
function_body.each do |function|
function = function.lchop(var_name + ".")
op_name = function[0, 2]
function = function.lchop(var_name).delete("[].")
function = function.lchop(op_name + "(a,")
value = function.rchop(")").to_i
op_name = function.match(/[^\(]+/).not_nil![0]
value = function.match(/\(a,(?<value>[\d]+)\)/).not_nil!["value"].to_i
decrypt_function << {name: operations[op_name], value: value}
end

View File

@ -27,22 +27,46 @@ class User
end
DEFAULT_USER_PREFERENCES = Preferences.from_json({
"video_loop" => false,
"autoplay" => false,
"speed" => 1.0,
"quality" => "hd720",
"volume" => 100,
"comments" => "youtube",
"captions" => ["", "", ""],
"dark_mode" => false,
"thin_mode " => false,
"max_results" => 40,
"sort" => "published",
"latest_only" => false,
"unseen_only" => false,
"video_loop" => false,
"autoplay" => false,
"speed" => 1.0,
"quality" => "hd720",
"volume" => 100,
"comments" => ["youtube", ""],
"captions" => ["", "", ""],
"related_videos" => true,
"dark_mode" => false,
"thin_mode " => false,
"max_results" => 40,
"sort" => "published",
"latest_only" => false,
"unseen_only" => false,
}.to_json)
class Preferences
module StringToArray
def self.to_json(value : Array(String), json : JSON::Builder)
json.array do
value.each do |element|
json.string element
end
end
end
def self.from_json(value : JSON::PullParser) : Array(String)
begin
result = [] of String
value.read_array do
result << value.read_string
end
rescue ex
result = [value.read_string, ""]
end
result
end
end
JSON.mapping({
video_loop: Bool,
autoplay: Bool,
@ -50,8 +74,9 @@ class Preferences
quality: String,
volume: Int32,
comments: {
type: String,
default: "youtube",
type: Array(String),
default: ["youtube", ""],
converter: StringToArray,
},
captions: {
type: Array(String),
@ -61,6 +86,10 @@ class Preferences
type: Bool,
default: false,
},
related_videos: {
type: Bool,
default: true,
},
dark_mode: Bool,
thin_mode: {
type: Bool,
@ -111,7 +140,7 @@ def fetch_user(sid, client, headers, db)
channels = [] of String
feed.xpath_nodes(%q(//ul[@id="guide-channels"]/li/a)).each do |channel|
if !["Popular on YouTube", "Music", "Sports", "Gaming"].includes? channel["title"]
if !{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? channel["title"]
channel_id = channel["href"].lstrip("/channel/")
begin

View File

@ -345,6 +345,11 @@ class Video
allowed_regions: Array(String),
is_family_friendly: Bool,
genre: String,
genre_url: String,
license: {
type: String,
default: "",
},
})
end
@ -366,15 +371,17 @@ def get_video(id, db, refresh = true)
if db.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", id, as: Bool)
video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video)
# If record was last updated over an hour ago, refresh (expire param in response lasts for 6 hours)
if refresh && Time.now - video.updated > 1.hour
# If record was last updated over 10 minutes ago, refresh (expire param in response lasts for 6 hours)
if refresh && Time.now - video.updated > 10.minutes
begin
video = fetch_video(id)
video_array = video.to_a
args = arg_array(video_array[1..-1], 2)
db.exec("UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,\
published,description,language,author,ucid, allowed_regions, is_family_friendly, genre)\
published,description,language,author,ucid, allowed_regions, is_family_friendly,\
genre, genre_url, license)\
= (#{args}) WHERE id = $1", video_array)
rescue ex
db.exec("DELETE FROM videos * WHERE id = $1", id)
@ -384,6 +391,7 @@ def get_video(id, db, refresh = true)
else
video = fetch_video(id)
video_array = video.to_a
args = arg_array(video_array)
db.exec("INSERT INTO videos VALUES (#{args}) ON CONFLICT (id) DO NOTHING", video_array)
@ -490,10 +498,19 @@ def fetch_video(id)
allowed_regions = html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
is_family_friendly = html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
genre = html.xpath_node(%q(//meta[@itemprop="genre"])).not_nil!["content"]
genre_url = html.xpath_node(%(//a[text()="#{genre}"])).not_nil!["href"]
license = html.xpath_node(%q(//h4[contains(text(),"License")]/parent::*/ul/li))
if license
license = license.content
else
license ||= ""
end
video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description,
nil, author, ucid, allowed_regions, is_family_friendly, genre)
nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url, license)
return video
end
@ -504,24 +521,37 @@ end
def process_video_params(query, preferences)
autoplay = query["autoplay"]?.try &.to_i?
preferred_captions = query["subtitles"]?.try &.split(",").map { |a| a.downcase }
quality = query["quality"]?
speed = query["speed"]?.try &.to_f?
video_loop = query["loop"]?.try &.to_i?
volume = query["volume"]?.try &.to_i?
if preferences
autoplay ||= preferences.autoplay.to_unsafe
preferred_captions ||= preferences.captions
quality ||= preferences.quality
speed ||= preferences.speed
video_loop ||= preferences.video_loop.to_unsafe
volume ||= preferences.volume
end
autoplay ||= 0
autoplay = autoplay == 1
autoplay ||= 0
preferred_captions ||= [] of String
quality ||= "hd720"
speed ||= 1
video_loop ||= 0
volume ||= 100
autoplay = autoplay == 1
video_loop = video_loop == 1
if query["t"]?
video_start = decode_time(query["t"])
end
video_start ||= 0
if query["time_continu"]?
video_start = decode_time(query["t"])
if query["time_continue"]?
video_start = decode_time(query["time_continue"])
end
video_start ||= 0
if query["start"]?
@ -542,14 +572,25 @@ def process_video_params(query, preferences)
raw ||= 0
raw = raw == 1
quality = query["quality"]?
quality ||= "hd720"
controls = query["controls"]?.try &.to_i?
controls ||= 1
controls = controls == 1
return autoplay, video_loop, video_start, video_end, listen, raw, quality, controls
params = {
autoplay: autoplay,
controls: controls,
listen: listen,
preferred_captions: preferred_captions,
quality: quality,
raw: raw,
speed: speed,
video_end: video_end,
video_loop: video_loop,
video_start: video_start,
volume: volume,
}
return params
end
def generate_thumbnails(json, id)

View File

@ -51,7 +51,7 @@
</div>
<div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if videos.size == 100 %>
<% if videos.size == 60 %>
<a href="/channel/<%= ucid %>?page=<%= page + 1 %>">Next page</a>
<% end %>
</div>

View File

@ -1,19 +1,19 @@
<video style="width:100%" playsinline poster="<%= thumbnail %>" title="<%= HTML.escape(video.title) %>"
id="player" class="video-js"
<% if autoplay %>autoplay<% end %>
<% if video_loop %>loop<% end %>
<% if controls %>controls<% end %>>
<% if params[:autoplay] %>autoplay<% end %>
<% if params[:video_loop] %>loop<% end %>
<% if params[:controls] %>controls<% end %>>
<% if hlsvp %>
<source src="<%= hlsvp %>" type="application/x-mpegURL">
<% else %>
<% if listen %>
<% if params[:listen] %>
<% audio_streams.each_with_index do |fmt, i| %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["bitrate"] %>k" selected="<%= i == 0 ? true : false %>">
<% end %>
<% else %>
<% fmt_stream.each_with_index do |fmt, i| %>
<% if preferences %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= preferences.quality == fmt["label"].split(" - ")[0] %>">
<% if params[:quality] %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= params[:quality] == fmt["label"].split(" - ")[0] %>">
<% else %>
<source src="<%= fmt["url"] %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= i == 0 ? true : false %>">
<% end %>
@ -110,7 +110,7 @@ var player = videojs("player", options, function() {
player.share(shareOptions);
<% if video_start > 0 || video_end > 0 %>
<% if params[:video_start] > 0 || params[:video_end] > 0 %>
player.markers({
onMarkerReached: function(marker) {
if (marker.text === "End") {
@ -122,19 +122,19 @@ player.markers({
}
},
markers: [
{ time: <%= video_start %>, text: "Start" },
<% if video_end < 0 %>
{ time: <%= params[:video_start] %>, text: "Start" },
<% if params[:video_end] < 0 %>
{ time: <%= video.info["length_seconds"].to_f - 0.5 %>, text: "End" }
<% else %>
{ time: <%= video_end %>, text: "End" }
{ time: <%= params[:video_end] %>, text: "End" }
<% end %>
]
});
player.currentTime(<%= video_start %>);
player.currentTime(<%= params[:video_start] %>);
<% end %>
<% if !listen %>
<% if !params[:listen] %>
var currentSources = player.currentSources();
for (var i = 0; i < currentSources.length; i++) {
if (player.canPlayType(currentSources[i]["type"].split(";")[0]) === "") {
@ -146,8 +146,6 @@ for (var i = 0; i < currentSources.length; i++) {
player.src(currentSources);
<% end %>
<% if preferences %>
player.volume(<%= preferences.volume.to_f / 100 %>);
player.playbackRate(<%= preferences.speed %>);
<% end %>
player.volume(<%= params[:volume].to_f / 100 %>);
player.playbackRate(<%= params[:speed] %>);
</script>

View File

@ -1,12 +1,11 @@
<link rel="stylesheet" href="https://unpkg.com/video.js@6.12.0/dist/video-js.min.css">
<link rel="stylesheet" href="https://unpkg.com/silvermine-videojs-quality-selector@1.1.2/dist/css/quality-selector.css">
<link rel="stylesheet" href="https://unpkg.com/videojs-markers@1.0.1/dist/videojs.markers.min.css">
<link rel="stylesheet" href="https://unpkg.com/videojs-share@1.1.0/dist/videojs-share.css">
<script src="https://unpkg.com/video.js@6.12.0/dist/video.min.js"></script>
<script src="https://unpkg.com/videojs-hotkeys@0.2.22/build/videojs.hotkeys.min.js"></script>
<script src="https://unpkg.com/silvermine-videojs-quality-selector@1.1.2/dist/js/silvermine-videojs-quality-selector.min.js"></script>
<script src="https://unpkg.com/videojs-markers@1.0.1/dist/videojs-markers.min.js"></script>
<script src="https://unpkg.com/videojs-share@1.1.0/dist/videojs-share.min.js"></script>
<% if hlsvp %>
<script src="https://unpkg.com/@videojs/http-streaming@1.2.2/dist/videojs-http-streaming.min.js"></script>
<% end %>
<link rel="stylesheet" href="/css/video-js.min.css">
<link rel="stylesheet" href="/css/quality-selector.css">
<link rel="stylesheet" href="/css/videojs.markers.min.css">
<link rel="stylesheet" href="/css/videojs-share.css">
<script src="/js/video.min.js"></script>
<script src="/js/videojs.hotkeys.min.js"></script>
<script src="/js/silvermine-videojs-quality-selector.min.js"></script>
<script src="/js/videojs-markers.min.js"></script>
<script src="/js/videojs-share.min.js"></script>
<script src="/js/videojs-http-streaming.min.js"></script>
<script src="/js/videojs-contrib-quality-levels.min.js"></script>

View File

@ -1,6 +1,6 @@
<div class="pure-u-1 pure-u-md-1-4">
<div class="h-box">
<% if video.responds_to?(:playlists) %>
<% if video.responds_to?(:playlists) && !video.playlists.empty? %>
<% params = "&list=#{video.playlists[0]}" %>
<% else %>
<% params = nil %>

View File

@ -3,7 +3,7 @@
<% end %>
<div class="h-box">
<form class="pure-form pure-form-aligned" enctype="multipart/form-data" action="/data_control" method="post">
<form class="pure-form pure-form-aligned" enctype="multipart/form-data" action="/data_control?referer=<%= referer %>" method="post">
<fieldset>
<legend>Import</legend>

View File

@ -48,10 +48,19 @@ function update_value(element) {
</div>
<div class="pure-control-group">
<label for="comments">Pull comments from: </label>
<select name="comments" id="comments">
<% {"youtube", "reddit"}.each do |option| %>
<option <% if user.preferences.comments == option %> selected <% end %>><%= option %></option>
<label for="comments_0">Default comments: </label>
<select name="comments_0" id="comments_0">
<% {"", "youtube", "reddit"}.each do |option| %>
<option <% if user.preferences.comments[0] == option %> selected <% end %>><%= option %></option>
<% end %>
</select>
</div>
<div class="pure-control-group">
<label for="comments_1">Fallback comments: </label>
<select name="comments_1" id="comments_1">
<% {"", "youtube", "reddit"}.each do |option| %>
<option <% if user.preferences.comments[1] == option %> selected <% end %>><%= option %></option>
<% end %>
</select>
</div>
@ -66,7 +75,7 @@ function update_value(element) {
</div>
<div class="pure-control-group">
<label for="captions_fallback">Fallback languages: </label>
<label for="captions_fallback">Fallback captions: </label>
<select class="pure-u-1-5" name="captions_1" id="captions_1">
<% CAPTION_LANGUAGES.each do |option| %>
<option <% if user.preferences.captions[1] == option %> selected <% end %>><%= option %></option>
@ -80,7 +89,13 @@ function update_value(element) {
</select>
</div>
<div class="pure-control-group">
<label for="related_videos">Show related videos? </label>
<input name="related_videos" id="related_videos" type="checkbox" <% if user.preferences.related_videos %>checked<% end %>>
</div>
<legend>Visual preferences</legend>
<div class="pure-control-group">
<label for="dark_mode">Dark mode: </label>
<input name="dark_mode" id="dark_mode" type="checkbox" <% if user.preferences.dark_mode %>checked<% end %>>
@ -92,6 +107,7 @@ function update_value(element) {
</div>
<legend>Subscription preferences</legend>
<div class="pure-control-group">
<label for="redirect_feed">Redirect homepage to feed: </label>
<input name="redirect_feed" id="redirect_feed" type="checkbox" <% if user.preferences.redirect_feed %>checked<% end %>>
@ -127,12 +143,13 @@ function update_value(element) {
</div>
<legend>Data preferences</legend>
<div class="pure-control-group">
<a href="/clear_watch_history">Clear watch history</a>
<a href="/clear_watch_history?referer=<%= referer %>">Clear watch history</a>
</div>
<div class="pure-control-group">
<a href="/data_control">Import/Export data</a>
<a href="/data_control?referer=<%= referer %>">Import/Export data</a>
</div>
<div class="pure-control-group">

View File

@ -1,5 +1,5 @@
<% content_for "header" do %>
<title><%= query.not_nil!.size > 30 ? query.not_nil![0,30].rstrip(".") + "..." : query.not_nil! %> - Invidious</title>
<title><%= search_query.not_nil!.size > 30 ? query.not_nil![0,30].rstrip(".") + "..." : query.not_nil! %> - Invidious</title>
<% end %>
<% videos.each_slice(4) do |slice| %>
@ -18,6 +18,8 @@
</div>
<div class="pure-u-1 pure-u-md-3-5"></div>
<div style="text-align:right;" class="pure-u-1 pure-u-md-1-5">
<% if count == 20 %>
<a href="/search?q=<%= query %>&page=<%= page + 1 %>">Next page</a>
<% end %>
</div>
</div>

View File

@ -8,7 +8,7 @@
</div>
<div class="pure-u-1-3" style="text-align:right;">
<h3>
<a href="/data_control">Import/Export</a>
<a href="/data_control?referer=<%= referer %>">Import/Export</a>
</h3>
</div>
</div>

View File

@ -16,6 +16,13 @@
</div>
<center><%= notifications.size %> unseen notifications</center>
<% if !notifications.empty? %>
<div class="h-box">
<hr>
</div>
<% end %>
<% notifications.each_slice(4) do |slice| %>
<div class="pure-g">
<% slice.each do |video| %>

View File

@ -4,10 +4,11 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="referrer" content="no-referrer">
<%= yield_content "header" %>
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/pure-min.css">
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/grids-responsive-min.css">
<link rel="stylesheet" href="https://unpkg.com/ionicons@4.2.6/dist/css/ionicons.min.css">
<link rel="stylesheet" href="/css/pure-min.css">
<link rel="stylesheet" href="/css/grids-responsive-min.css">
<link rel="stylesheet" href="/css/ionicons.min.css">
<link rel="stylesheet" href="/css/default.css">
<% if env.get?("user") && env.get("user").as(User).preferences.dark_mode %>
<link rel="stylesheet" href="/css/darktheme.css">
@ -18,8 +19,8 @@
<body>
<div class="pure-g">
<div class="pure-u-1 pure-u-md-4-24"></div>
<div class="pure-u-1 pure-u-md-16-24">
<div class="pure-u-1 pure-u-md-2-24"></div>
<div class="pure-u-1 pure-u-md-20-24">
<div class="pure-g navbar h-box">
<div class="pure-u-1 pure-u-md-4-24">
<a href="/" class="index-link pure-menu-heading">Invidious</a>
@ -34,7 +35,7 @@
<div class="pure-u-1 pure-u-md-8-24 user-field">
<% if env.get? "user" %>
<div class="pure-u-1-4">
<a href="/toggle_theme?referer=<%= env.get("current_page") %>" class="pure-menu-heading">
<a href="/toggle_theme?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
<% preferences = env.get("user").as(User).preferences %>
<% if preferences.dark_mode %>
<i class="icon ion-ios-sunny"></i>
@ -54,15 +55,15 @@
</a>
</div>
<div class="pure-u-1-4">
<a href="/preferences?referer=<%= env.get("current_page") %>" class="pure-menu-heading">
<a href="/preferences?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
<i class="icon ion-ios-cog"></i>
</a>
</div>
<div class="pure-u-1-4">
<a href="/signout?referer=<%= env.get("current_page") %>" class="pure-menu-heading">Sign out</a>
<a href="/signout?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">Sign out</a>
</div>
<% else %>
<a href="/login?referer=<%= env.get("current_page") %>" class="pure-menu-heading">Login</a>
<a href="/login?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">Login</a>
<% end %>
</div>
</div>
@ -72,6 +73,11 @@
Roth</a>.
Source available <a
href="https://github.com/omarroth/invidious">here</a>.
<p>Liberapay:
<a href="https://liberapay.com/omarroth">
https://liberapay.com/omarroth
</a>
</p>
<p>Patreon:
<a href="https://patreon.com/omarroth">
https://patreon.com/omarroth
@ -81,8 +87,7 @@
<p>BCH: qq4ptclkzej5eza6a50et5ggc58hxsq5aylqut2npk</p>
</div>
</div>
<div class="pure-u-1 pure-u-md-4-24"></div>
</div>
<div class="pure-u-1 pure-u-md-2-24"></div>
</body>
</html>

View File

@ -30,163 +30,10 @@
<%= rendered "components/player" %>
</div>
<script>
function toggle(target) {
body = target.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function toggle_comments(target) {
body = target.parentNode.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function load_comments(target) {
var continuation = target.getAttribute("data-continuation");
var body = target.parentNode.parentNode;
var fallback = body.innerHTML;
body.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url =
"/api/v1/comments/<%= video.id %>?format=html&continuation=" + continuation;
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
body.innerHTML = xhr.response.contentHtml;
} else {
body.innerHTML = fallback;
}
}
};
xhr.ontimeout = function() {
body.innerHTML = fallback;
};
}
function get_reddit_comments() {
var url = "/api/v1/comments/<%= video.id %>?source=reddit";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
{title}
</h3>
<b>
<a target="_blank" href="https://reddit.com{permalink}">View more comments on Reddit</a>
</b>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
title: xhr.response.title,
permalink: xhr.response.permalink,
contentHtml: xhr.response.contentHtml
});
} else {
get_youtube_comments();
}
};
xhr.ontimeout = function() {
get_reddit_comments();
};
}
function get_youtube_comments() {
var url = "/api/v1/comments/<%= video.id %>?format=html";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
View {commentCount} comments
</h3>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
contentHtml: xhr.response.contentHtml,
commentCount: commaSeparateNumber(xhr.response.commentCount)
});
} else {
comments = document.getElementById("comments");
comments.innerHTML = "";
}
};
xhr.ontimeout = function() {
comments = document.getElementById("comments");
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
get_youtube_comments();
};
}
function commaSeparateNumber(val){
while (/(\d+)(\d{3})/.test(val.toString())){
val = val.toString().replace(/(\d+)(\d{3})/, '$1'+','+'$2');
}
return val;
}
String.prototype.supplant = function(o) {
return this.replace(/{([^{}]*)}/g, function(a, b) {
var r = o[b];
return typeof r === "string" || typeof r === "number" ? r : a;
});
};
<% if preferences && preferences.comments == "reddit" %>
get_reddit_comments();
<% else %>
get_youtube_comments();
<% end %>
</script>
<div class="h-box">
<h1>
<%= HTML.escape(video.title) %>
<% if listen %>
<% if params[:listen] %>
<a href="/watch?<%= env.params.query %>">
<i class="icon ion-ios-videocam"></i>
</a>
@ -208,7 +55,10 @@ get_youtube_comments();
<p><i class="icon ion-ios-eye"></i> <%= number_with_separator(video.views) %></p>
<p><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p>
<p><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p>
<p id="Genre">Genre: <%= video.genre %></p>
<p id="Genre">Genre: <a href="<%= video.genre_url %>"><%= video.genre %></a></p>
<% if !video.license.empty? %>
<p id="License">License: <%= video.license %></p>
<% end %>
<p id="FamilyFriendly">Family Friendly? <%= video.is_family_friendly %></p>
<p id="Wilson">Wilson Score: <%= video.wilson_score.round(4) %></p>
<p id="Rating">Rating: <%= rating.round(4) %> / 5</p>
@ -266,6 +116,7 @@ get_youtube_comments();
</div>
</div>
<div class="pure-u-1 pure-u-md-1-5">
<% if preferences && preferences.related_videos %>
<div class="h-box">
<% rvs.each do |rv| %>
<% if rv.has_key?("id") %>
@ -282,5 +133,191 @@ get_youtube_comments();
<% end %>
<% end %>
</div>
<% end %>
</div>
</div>
<script>
function toggle(target) {
body = target.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function toggle_comments(target) {
body = target.parentNode.parentNode.parentNode.children[1];
if (body.style.display === null || body.style.display === "") {
target.innerHTML = "[ + ]";
body.style.display = "none";
} else {
target.innerHTML = "[ - ]";
body.style.display = "";
}
}
function get_youtube_replies(target) {
var continuation = target.getAttribute("data-continuation");
var body = target.parentNode.parentNode;
var fallback = body.innerHTML;
body.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
var url =
"/api/v1/comments/<%= video.id %>?format=html&continuation=" + continuation;
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
body.innerHTML = xhr.response.contentHtml;
} else {
body.innerHTML = fallback;
}
}
};
xhr.ontimeout = function() {
console.log("Pulling comments timed out.");
body.innerHTML = fallback;
};
}
function get_reddit_comments() {
var url = "/api/v1/comments/<%= video.id %>?source=reddit&format=html";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
{title}
</h3>
<b>
<a target="_blank" href="https://reddit.com{permalink}">View more comments on Reddit</a>
</b>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
title: xhr.response.title,
permalink: xhr.response.permalink,
contentHtml: xhr.response.contentHtml
});
} else {
<% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments();
<% else %>
comments = document.getElementById("comments");
comments.innerHTML = "";
<% end %>
}
};
xhr.ontimeout = function() {
console.log("Pulling comments timed out.");
get_reddit_comments();
};
}
function get_youtube_comments() {
var url = "/api/v1/comments/<%= video.id %>?format=html";
var xhr = new XMLHttpRequest();
xhr.responseType = "json";
xhr.timeout = 20000;
xhr.open("GET", url, true);
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4)
if (xhr.status == 200) {
comments = document.getElementById("comments");
if (xhr.response.commentCount > 0) {
comments.innerHTML = `
<div>
<h3>
<a href="javascript:void(0)" onclick="toggle_comments(this)">[ - ]</a>
View {commentCount} comments
</h3>
</div>
<div>{contentHtml}</div>
<hr>`.supplant({
contentHtml: xhr.response.contentHtml,
commentCount: commaSeparateNumber(xhr.response.commentCount)
});
} else {
comments.innerHTML = "";
}
} else {
<% if preferences && preferences.comments[1] == "youtube" %>
get_youtube_comments();
<% else %>
comments = document.getElementById("comments");
comments.innerHTML = "";
<% end %>
}
};
xhr.ontimeout = function() {
console.log("Pulling comments timed out.");
comments = document.getElementById("comments");
comments.innerHTML =
'<h3><center class="loading"><i class="icon ion-ios-refresh"></i></center></h3>';
get_youtube_comments();
};
}
function commaSeparateNumber(val){
while (/(\d+)(\d{3})/.test(val.toString())){
val = val.toString().replace(/(\d+)(\d{3})/, '$1'+','+'$2');
}
return val;
}
String.prototype.supplant = function(o) {
return this.replace(/{([^{}]*)}/g, function(a, b) {
var r = o[b];
return typeof r === "string" || typeof r === "number" ? r : a;
});
};
<% if preferences %>
<% if preferences.comments[0] == "youtube" %>
get_youtube_comments();
<% elsif preferences.comments[0] == "reddit" %>
get_reddit_comments();
<% else %>
<% if preferences.comments[1] == "youtube" %>
get_youtube_comments();
<% elsif preferences.comments[1] == "reddit" %>
get_reddit_comments();
<% else %>
comments = document.getElementById("comments");
comments.innerHTML = "";
<% end %>
<% end %>
<% else %>
get_youtube_comments();
<% end %>
</script>