2018-03-08 05:28:33 +05:30
macro add_mapping ( mapping )
def initialize ( {{ * mapping . keys . map { | id | " @ #{ id } " . id }} } )
end
def to_a
return [ {{ * mapping . keys . map { | id | " @ #{ id } " . id }} } ]
end
DB . mapping ( {{ mapping }} )
end
macro templated ( filename )
render " src/views/ #{ { { filename } } } .ecr " , " src/views/layout.ecr "
end
2018-03-10 00:12:23 +05:30
class Config
YAML . mapping ( {
pool_size : Int32 ,
threads : Int32 ,
db : NamedTuple (
user : String ,
password : String ,
host : String ,
port : Int32 ,
dbname : String ,
) ,
2018-03-17 06:22:25 +05:30
dl_api_key : String ?,
2018-03-10 00:12:23 +05:30
} )
end
2018-01-28 07:39:27 +05:30
class Video
module HTTPParamConverter
def self . from_rs ( rs )
HTTP :: Params . parse ( rs . read ( String ) )
end
end
2018-03-08 05:28:33 +05:30
add_mapping ( {
2018-01-28 07:39:27 +05:30
id : String ,
info : {
type : HTTP :: Params ,
default : HTTP :: Params . parse ( " " ) ,
converter : Video :: HTTPParamConverter ,
} ,
updated : Time ,
title : String ,
views : Int64 ,
likes : Int32 ,
dislikes : Int32 ,
wilson_score : Float64 ,
2018-02-03 09:14:10 +05:30
published : Time ,
2018-02-27 06:28:45 +05:30
description : String ,
2018-03-17 06:15:37 +05:30
language : String ?,
2018-01-28 07:39:27 +05:30
} )
end
2018-03-25 09:08:35 +05:30
class InvidiousChannel
module XMLConverter
def self . from_rs ( rs )
XML . parse_html ( rs . read ( String ) )
end
end
add_mapping ( {
2018-03-29 08:59:54 +05:30
id : String ,
2018-03-25 09:08:35 +05:30
author : String ,
2018-03-29 08:59:54 +05:30
updated : Time ,
} )
end
class ChannelVideo
add_mapping ( {
id : String ,
title : String ,
published : Time ,
updated : Time ,
ucid : String ,
author : String ,
2018-03-25 09:08:35 +05:30
} )
end
2018-03-30 08:11:05 +05:30
class User
add_mapping ( {
2018-04-01 05:39:27 +05:30
id : String ,
updated : Time ,
notifications : Array ( String ) ,
subscriptions : Array ( String ) ,
email : String ,
2018-03-30 08:11:05 +05:30
} )
end
2018-03-04 02:36:14 +05:30
class RedditSubmit
JSON . mapping ( {
data : RedditSubmitData ,
} )
end
class RedditSubmitData
JSON . mapping ( {
children : Array ( RedditThread ) ,
} )
end
class RedditThread
JSON . mapping ( {
data : RedditThreadData ,
} )
end
class RedditThreadData
JSON . mapping ( {
subreddit : String ,
id : String ,
num_comments : Int32 ,
score : Int32 ,
author : String ,
permalink : String ,
title : String ,
} )
end
2018-01-21 05:49:12 +05:30
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
def ci_lower_bound ( pos , n )
if n == 0
2018-01-28 07:39:27 +05:30
return 0.0
2018-01-21 05:49:12 +05:30
end
# z value here represents a confidence level of 0.95
z = 1.96
phat = 1.0 * pos / n
return ( phat + z * z / ( 2 * n ) - z * Math . sqrt ( ( phat * ( 1 - phat ) + z * z / ( 4 * n ) ) / n ) ) / ( 1 + z * z / n )
end
def elapsed_text ( elapsed )
millis = elapsed . total_milliseconds
return " #{ millis . round ( 2 ) } ms " if millis >= 1
" #{ ( millis * 1000 ) . round ( 2 ) } µs "
end
2018-01-28 07:39:27 +05:30
def get_client ( pool )
while pool . empty?
2018-01-21 05:49:12 +05:30
sleep rand ( 0 .. 10 ) . milliseconds
end
2018-01-28 07:39:27 +05:30
return pool . shift
2018-01-21 05:49:12 +05:30
end
2018-01-28 07:39:27 +05:30
def fetch_video ( id , client )
2018-03-04 20:24:19 +05:30
info = client . get ( " /get_video_info?video_id= #{ id } &el=detailpage&ps=default&eurl=&gl=US&hl=en " ) . body
2018-03-09 22:25:14 +05:30
html = client . get ( " /watch?v= #{ id } &bpctr= #{ Time . new . epoch + 2000 } " ) . body
2018-01-21 05:49:12 +05:30
html = XML . parse_html ( html )
2018-01-28 07:39:27 +05:30
info = HTTP :: Params . parse ( info )
2018-01-21 05:49:12 +05:30
if info [ " reason " ]?
2018-02-03 09:34:34 +05:30
info = client . get ( " /get_video_info?video_id= #{ id } &ps=default&eurl=&gl=US&hl=en " ) . body
info = HTTP :: Params . parse ( info )
if info [ " reason " ]?
raise info [ " reason " ]
end
2018-01-21 05:49:12 +05:30
end
2018-01-28 07:39:27 +05:30
title = info [ " title " ]
views = info [ " view_count " ] . to_i64
2018-01-21 05:49:12 +05:30
2018-01-28 07:39:27 +05:30
likes = html . xpath_node ( % q ( / / button [ @title = " I like this " ] / span ) )
2018-03-15 04:36:21 +05:30
likes = likes . try & . content . delete ( " , " ) . try & . to_i
likes || = 0
2018-01-28 07:39:27 +05:30
dislikes = html . xpath_node ( % q ( / / button [ @title = " I dislike this " ] / span ) )
2018-03-15 04:36:21 +05:30
dislikes = dislikes . try & . content . delete ( " , " ) . try & . to_i
dislikes || = 0
2018-01-28 07:39:27 +05:30
2018-02-27 06:28:45 +05:30
description = html . xpath_node ( % q ( / / p [ @id = " eow-description " ] ) )
description = description ? description . to_xml : " "
2018-01-28 07:39:27 +05:30
wilson_score = ci_lower_bound ( likes , likes + dislikes )
2018-02-05 07:12:13 +05:30
published = html . xpath_node ( % q ( / /s trong [ contains ( @class , " watch-time-text " ) ] ) )
2018-02-03 09:14:10 +05:30
if published
published = published . content
2018-02-05 07:12:13 +05:30
else
raise " Could not find date published "
end
published = published . lchop ( " Published " )
published = published . lchop ( " Started streaming " )
2018-03-17 09:04:26 +05:30
published = published . lchop ( " Streamed live " )
published = published . lchop ( " Uploaded " )
2018-02-05 07:12:13 +05:30
published = published . lchop ( " on " )
published = published . lchop ( " Scheduled for " )
2018-02-06 05:27:03 +05:30
if ! published . includes? ( " ago " )
published = Time . parse ( published , " %b %-d, %Y " )
else
# Time matches format "20 hours ago", "40 minutes ago"...
2018-02-05 07:12:13 +05:30
delta = published . split ( " " ) [ 0 ] . to_i
case published
when . includes? " minute "
published = Time . now - delta . minutes
when . includes? " hour "
published = Time . now - delta . hours
2018-02-06 05:27:03 +05:30
else
2018-02-05 07:12:13 +05:30
raise " Could not parse #{ published } "
2018-02-06 05:27:03 +05:30
end
2018-02-03 09:14:10 +05:30
end
2018-03-17 06:15:37 +05:30
video = Video . new ( id , info , Time . now , title , views , likes , dislikes , wilson_score , published , description , nil )
2018-01-21 05:49:12 +05:30
return video
end
2018-01-28 07:39:27 +05:30
def get_video ( id , client , db , refresh = true )
if db . query_one? ( " SELECT EXISTS (SELECT true FROM videos WHERE id = $1) " , id , as : Bool )
video = db . query_one ( " SELECT * FROM videos WHERE id = $1 " , id , as : Video )
2018-01-21 05:49:12 +05:30
2018-01-28 07:39:27 +05:30
# If record was last updated over an hour ago, refresh (expire param in response lasts for 6 hours)
2018-03-30 05:51:44 +05:30
if refresh && Time . now - video . updated > 1 . hour
2018-03-29 08:59:54 +05:30
begin
2018-03-31 20:21:44 +05:30
video = fetch_video ( id , client )
2018-03-30 05:33:00 +05:30
video_array = video . to_a
args = arg_array ( video_array [ 1 .. - 1 ] , 2 )
2018-03-29 08:59:54 +05:30
2018-03-30 05:33:00 +05:30
db . exec ( " UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,published,description,language) \
= ( #{args}) WHERE id = $1", video_array)
2018-03-29 08:59:54 +05:30
rescue ex
db . exec ( " DELETE FROM videos * WHERE id = $1 " , id )
2018-03-31 20:21:44 +05:30
end
2018-01-21 05:49:12 +05:30
end
else
2018-01-28 07:39:27 +05:30
video = fetch_video ( id , client )
2018-03-04 20:24:19 +05:30
args = arg_array ( video . to_a )
db . exec ( " INSERT INTO videos VALUES ( #{ args } ) " , video . to_a )
2018-01-21 05:49:12 +05:30
end
return video
end
2018-01-22 05:19:27 +05:30
2018-01-28 07:39:27 +05:30
def search ( query , client )
2018-03-04 20:24:19 +05:30
html = client . get ( " https://www.youtube.com/results?q= #{ query } &sp=EgIQAVAU " ) . body
2018-01-22 05:19:27 +05:30
html = XML . parse_html ( html )
html . xpath_nodes ( % q ( / / ol [ @class = " item-section " ] / li ) ) . each do | item |
root = item . xpath_node ( % q ( div [ contains ( @class , " yt-lockup-video " ) ] / div ) )
if root
link = root . xpath_node ( % q ( div [ contains ( @class , " yt-lockup-thumbnail " ) ] / a / @href ) )
if link
yield link . content . split ( " = " ) [ 1 ]
end
end
end
end
2018-02-04 02:11:59 +05:30
2018-02-23 00:31:37 +05:30
def splice ( a , b )
2018-02-04 02:11:59 +05:30
c = a [ 0 ]
2018-02-12 09:36:29 +05:30
a [ 0 ] = a [ b % a . size ]
a [ b % a . size ] = c
return a
end
2018-02-23 00:31:37 +05:30
def decrypt_signature ( a )
2018-02-13 22:14:11 +05:30
a = a . split ( " " )
2018-02-12 09:36:29 +05:30
2018-03-07 19:37:40 +05:30
a . reverse!
2018-03-22 21:32:15 +05:30
a . delete_at ( 0 .. 2 )
2018-03-07 19:37:40 +05:30
a . reverse!
2018-03-22 21:32:15 +05:30
a . delete_at ( 0 .. 2 )
2018-03-29 05:07:00 +05:30
a = splice ( a , 38 )
a . delete_at ( 0 .. 0 )
a = splice ( a , 64 )
a . reverse!
a . delete_at ( 0 .. 1 )
2018-03-07 09:33:45 +05:30
2018-02-13 22:14:11 +05:30
return a . join ( " " )
end
2018-03-17 06:06:49 +05:30
def rank_videos ( db , n , pool , filter )
2018-02-06 05:26:40 +05:30
top = [ ] of { Float64 , String }
2018-03-17 10:27:31 +05:30
db . query ( " SELECT id, wilson_score, published FROM videos WHERE views > 5000 ORDER BY published DESC LIMIT 1000 " ) do | rs |
2018-02-06 05:26:40 +05:30
rs . each do
id = rs . read ( String )
wilson_score = rs . read ( Float64 )
published = rs . read ( Time )
# Exponential decay, older videos tend to rank lower
2018-02-10 21:36:37 +05:30
temperature = wilson_score * Math . exp ( - 0.000005 * ( ( Time . now - published ) . total_minutes ) )
2018-02-06 05:26:40 +05:30
top << { temperature , id }
end
end
top . sort!
# Make hottest come first
top . reverse!
top = top . map { | a , b | b }
2018-03-17 06:06:49 +05:30
if filter
language_list = [ ] of String
top . each do | id |
if language_list . size == n
break
else
client = get_client ( pool )
2018-03-19 23:05:35 +05:30
begin
video = get_video ( id , client , db )
rescue ex
next
end
2018-03-17 06:06:49 +05:30
pool << client
2018-03-17 06:15:37 +05:30
if video . language
language = video . language
else
description = XML . parse ( video . description )
content = [ video . title , description . content ] . join ( " " )
2018-03-19 23:05:35 +05:30
content = content [ 0 , 10000 ]
2018-03-17 06:06:49 +05:30
2018-03-17 06:15:37 +05:30
results = DetectLanguage . detect ( content )
language = results [ 0 ] . language
2018-03-17 06:06:49 +05:30
2018-03-17 06:15:37 +05:30
db . exec ( " UPDATE videos SET language = $1 WHERE id = $2 " , language , id )
end
if language == " en "
2018-03-17 06:06:49 +05:30
language_list << id
end
end
end
return language_list
else
return top [ 0 .. n - 1 ]
end
2018-02-06 05:26:40 +05:30
end
2018-02-06 06:37:49 +05:30
2018-03-05 09:55:03 +05:30
def make_client ( url )
context = OpenSSL :: SSL :: Context :: Client . new
context . add_options (
OpenSSL :: SSL :: Options :: ALL |
OpenSSL :: SSL :: Options :: NO_SSL_V2 |
OpenSSL :: SSL :: Options :: NO_SSL_V3
)
2018-02-27 06:29:02 +05:30
client = HTTP :: Client . new ( url , context )
2018-03-04 22:29:03 +05:30
client . read_timeout = 10 . seconds
client . connect_timeout = 10 . seconds
2018-02-06 06:37:49 +05:30
return client
end
2018-03-04 02:36:14 +05:30
2018-03-04 22:29:03 +05:30
def get_reddit_comments ( id , client , headers )
2018-03-04 21:01:26 +05:30
query = " (url:3D #{ id } %20OR%20url: #{ id } )%20(site:youtube.com%20OR%20site:youtu.be) "
2018-03-04 22:29:03 +05:30
search_results = client . get ( " /search.json?q= #{ query } " , headers )
2018-03-09 22:25:14 +05:30
2018-03-04 22:29:03 +05:30
if search_results . status_code == 200
2018-03-04 06:40:25 +05:30
search_results = RedditSubmit . from_json ( search_results . body )
2018-03-09 22:25:14 +05:30
2018-03-04 06:40:25 +05:30
thread = search_results . data . children . sort_by { | child | child . data . score } [ - 1 ]
2018-03-09 22:17:50 +05:30
result = client . get ( " /r/ #{ thread . data . subreddit } /comments/ #{ thread . data . id } ?limit=100&sort=top " , headers ) . body
2018-03-04 06:40:25 +05:30
result = JSON . parse ( result )
2018-03-04 22:29:03 +05:30
elsif search_results . status_code == 302
search_results = client . get ( search_results . headers [ " Location " ] , headers ) . body
result = JSON . parse ( search_results )
thread = RedditThread . from_json ( result [ 0 ] [ " data " ] [ " children " ] [ 0 ] . to_json )
else
raise " Got error code #{ search_results . status_code } "
2018-03-04 06:40:25 +05:30
end
2018-03-04 22:29:03 +05:30
comments = result [ 1 ] [ " data " ] [ " children " ]
2018-03-04 06:40:25 +05:30
return comments , thread
2018-03-04 02:36:14 +05:30
end
def template_comments ( root )
html = " "
root . each do | child |
if child [ " data " ] [ " body_html " ]?
author = child [ " data " ] [ " author " ]
score = child [ " data " ] [ " score " ]
body_html = HTML . unescape ( child [ " data " ] [ " body_html " ] . as_s )
2018-03-07 09:30:35 +05:30
# Replace local links wtih links back to Reddit
body_html = fill_links ( body_html , " https " , " www.reddit.com " )
2018-03-04 02:36:14 +05:30
replies_html = " "
if child [ " data " ] [ " replies " ] != " "
replies_html = template_comments ( child [ " data " ] [ " replies " ] [ " data " ] [ " children " ] )
end
content = <<-END_HTML
< p >
2018-03-07 09:33:45 +05:30
< a href = " javascript:void(0) " onclick = " toggle(this) " > [ - ] < / a> #{ score } <b> #{ author } < / b >
2018-03-04 02:36:14 +05:30
< / p>
2018-03-04 10:06:18 +05:30
< div >
#{body_html}
2018-03-04 02:36:14 +05:30
#{replies_html}
2018-03-04 10:06:18 +05:30
< / div>
2018-03-04 02:36:14 +05:30
END_HTML
if child [ " data " ] [ " depth " ] . as_i > 0
html += <<-END_HTML
< div class = " pure-g " >
< div class = " pure-u-1-24 " > < / div>
< div class = " pure-u-23-24 " >
#{content}
< / div>
< / div>
END_HTML
else
html += <<-END_HTML
< div class = " pure-g " >
< div class = " pure-u-1 " >
#{content}
< / div>
< / div>
END_HTML
end
end
end
return html
end
2018-03-04 02:40:56 +05:30
def number_with_separator ( number )
number . to_s . reverse . gsub ( / ( \ d{3})(?= \ d) / , " \\ 1, " ) . reverse
end
2018-03-04 20:24:19 +05:30
2018-03-30 05:33:00 +05:30
def arg_array ( array , start = 1 )
2018-03-04 20:24:19 +05:30
args = [ ] of String
2018-03-30 05:33:00 +05:30
( start .. array . size + start - 1 ) . each { | i | args << " ($ #{ i } ) " }
2018-03-25 07:27:57 +05:30
args = args . join ( " , " )
2018-03-04 20:24:19 +05:30
return args
end
2018-03-07 09:30:35 +05:30
def add_alt_links ( html )
alt_links = [ ] of { Int32 , String }
# This is painful but is likely the only way to accomplish this in Crystal,
# as Crystigiri and others are not able to insert XML Nodes into a document.
# The goal here is to use as little regex as possible
html . scan ( / <a[^>]*>([^<]+)< \/ a> / ) do | match |
anchor = XML . parse_html ( match [ 0 ] )
anchor = anchor . xpath_node ( " //a " ) . not_nil!
url = URI . parse ( HTML . unescape ( anchor [ " href " ] ) )
2018-03-10 01:36:35 +05:30
if [ " www.youtube.com " , " m.youtube.com " ] . includes? ( url . host ) && url . path == " /watch "
2018-03-07 09:30:35 +05:30
alt_link = <<-END_HTML
2018-03-07 09:33:45 +05:30
< a href = " #{ url . full_path } " >
2018-03-07 09:30:35 +05:30
< i class = " fa fa-link " aria - hidden = " true " > < / i>
< / a>
END_HTML
2018-03-10 01:36:35 +05:30
elsif url . host == " youtu.be "
alt_link = <<-END_HTML
< a href = " /watch?v= #{ url . full_path . lchop ( " / " ) } " >
< i class = " fa fa-link " aria - hidden = " true " > < / i>
< / a>
END_HTML
else
alt_link = " "
2018-03-07 09:30:35 +05:30
end
2018-03-10 01:36:35 +05:30
alt_links << { match . end . not_nil! , alt_link }
2018-03-07 09:30:35 +05:30
end
alt_links . reverse!
alt_links . each do | position , alt_link |
html = html . insert ( position , alt_link )
end
return html
end
def fill_links ( html , scheme , host )
html = XML . parse_html ( html )
html . xpath_nodes ( " //a " ) . each do | match |
url = URI . parse ( match [ " href " ] )
2018-03-26 08:51:24 +05:30
# Reddit links don't have host
if ! url . host
2018-03-07 09:30:35 +05:30
url . scheme = scheme
url . host = host
match [ " href " ] = url
end
end
html = html . to_xml
end
2018-03-16 22:10:29 +05:30
def login_req ( login_form , f_req )
data = {
" pstMsg " = > " 1 " ,
" checkConnection " = > " youtube " ,
" checkedDomains " = > " youtube " ,
" hl " = > " en " ,
" deviceinfo " = > % q ( [ null , null , null , [ ] , null , " US " , null , null , [ ] , " GlifWebSignIn " , null , [ null , null , [ ] ] ] ) ,
" f.req " = > f_req ,
" flowName " = > " GlifWebSignIn " ,
" flowEntry " = > " ServiceLogin " ,
}
data = data . merge ( login_form )
return HTTP :: Params . encode ( data )
end
2018-03-25 09:08:35 +05:30
def get_channel ( id , client , db )
if db . query_one? ( " SELECT EXISTS (SELECT true FROM channels WHERE id = $1) " , id , as : Bool )
channel = db . query_one ( " SELECT * FROM channels WHERE id = $1 " , id , as : InvidiousChannel )
2018-03-26 08:48:29 +05:30
if Time . now - channel . updated > 1 . minutes
2018-03-29 08:59:54 +05:30
channel = fetch_channel ( id , client , db )
2018-03-29 09:35:20 +05:30
channel_array = channel . to_a
2018-03-26 08:48:29 +05:30
args = arg_array ( channel_array )
2018-03-29 09:35:20 +05:30
db . exec ( " INSERT INTO channels VALUES ( #{ args } ) \
2018-03-30 05:20:24 +05:30
ON CONFLICT ( id ) DO UPDATE SET updated = $3 " , channel_array)
2018-03-25 09:08:35 +05:30
end
else
2018-03-29 08:59:54 +05:30
channel = fetch_channel ( id , client , db )
2018-03-25 09:08:35 +05:30
args = arg_array ( channel . to_a )
db . exec ( " INSERT INTO channels VALUES ( #{ args } ) " , channel . to_a )
end
return channel
end
2018-03-29 08:59:54 +05:30
def fetch_channel ( id , client , db )
2018-03-25 09:08:35 +05:30
rss = client . get ( " /feeds/videos.xml?channel_id= #{ id } " ) . body
rss = XML . parse_html ( rss )
2018-04-01 03:03:01 +05:30
db . exec ( " DELETE FROM channel_videos * WHERE ucid = $1 " , id )
2018-03-29 08:59:54 +05:30
rss . xpath_nodes ( " //feed/entry " ) . each do | entry |
video_id = entry . xpath_node ( " videoid " ) . not_nil! . content
title = entry . xpath_node ( " title " ) . not_nil! . content
published = Time . parse ( entry . xpath_node ( " published " ) . not_nil! . content , " %FT%X%z " )
updated = Time . parse ( entry . xpath_node ( " updated " ) . not_nil! . content , " %FT%X%z " )
author = entry . xpath_node ( " author/name " ) . not_nil! . content
ucid = entry . xpath_node ( " channelid " ) . not_nil! . content
video = ChannelVideo . new ( video_id , title , published , updated , ucid , author )
2018-03-31 20:21:44 +05:30
video_array = video . to_a
2018-03-29 08:59:54 +05:30
args = arg_array ( video_array )
2018-04-01 05:39:27 +05:30
db . exec ( " UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY ( subscriptions ) AND $1 < > ALL ( notifications ) " , video_id, published, ucid)
# UPDATE users SET notifications = notifications || ARRAY['Os9Rypn2rEQ'] WHERE updated < '2018-03-24 20:48:46' AND 'UCSc16oMxxlcJSb9SXkjwMjA' = ANY(subscriptions) AND 'Os9Rypn2rEQ' <> ALL (notifications);
2018-03-29 08:59:54 +05:30
# TODO: Update record on conflict
2018-03-31 20:21:44 +05:30
db . exec ( " INSERT INTO channel_videos VALUES ( #{ args } ) \
ON CONFLICT ( id ) DO NOTHING " , video_array)
2018-03-29 08:59:54 +05:30
end
2018-03-25 09:08:35 +05:30
author = rss . xpath_node ( " //feed/author/name " ) . not_nil! . content
2018-03-29 08:59:54 +05:30
channel = InvidiousChannel . new ( id , author , Time . now )
2018-03-25 09:08:35 +05:30
return channel
end
2018-03-30 08:11:05 +05:30
def get_user ( sid , client , headers , db )
if db . query_one? ( " SELECT EXISTS (SELECT true FROM users WHERE id = $1) " , sid , as : Bool )
user = db . query_one ( " SELECT * FROM users WHERE id = $1 " , sid , as : User )
if Time . now - user . updated > 1 . minutes
user = fetch_user ( sid , client , headers )
user_array = user . to_a
args = arg_array ( user_array )
2018-03-31 20:21:44 +05:30
2018-03-30 08:11:05 +05:30
db . exec ( " INSERT INTO users VALUES ( #{ args } ) \
2018-04-01 05:39:27 +05:30
ON CONFLICT ( email ) DO UPDATE SET id = $1 , updated = $2 , notifications = ARRAY [ ] :: text [ ] , subscriptions = $4 " , user_array)
2018-03-30 08:11:05 +05:30
end
else
user = fetch_user ( sid , client , headers )
2018-03-31 21:00:17 +05:30
user_array = user . to_a
2018-03-30 08:11:05 +05:30
args = arg_array ( user . to_a )
2018-03-31 21:00:17 +05:30
db . exec ( " INSERT INTO users VALUES ( #{ args } ) \
ON CONFLICT ( email ) DO UPDATE SET id = $1 , updated = $2 , subscriptions = $4 " , user_array)
2018-03-30 08:11:05 +05:30
end
return user
end
def fetch_user ( sid , client , headers )
2018-03-31 21:00:17 +05:30
feed = client . get ( " /subscription_manager?disable_polymer=1 " , headers ) . body
2018-03-30 08:11:05 +05:30
channels = [ ] of String
feed = XML . parse_html ( feed )
2018-03-31 21:00:17 +05:30
feed . xpath_nodes ( % q ( / / a [ @class = " subscription-title yt-uix-sessionlink " ] / @href ) ) . each do | channel |
channel_id = channel . content . lstrip ( " /channel/ " ) . not_nil!
get_channel ( channel_id , client , PG_DB )
channels << channel_id
end
email = feed . xpath_node ( % q ( / / a [ @class = " yt-masthead-picker-header yt-masthead-picker-active-account " ] ) )
if email
email = email . content . lstrip . rstrip
else
email = " "
2018-03-30 08:11:05 +05:30
end
2018-04-01 05:39:27 +05:30
user = User . new ( sid , Time . now , [ ] of String , channels , email )
2018-03-30 08:11:05 +05:30
return user
end