invidious-experimenting/src/helpers.cr

212 lines
5.4 KiB
Crystal
Raw Normal View History

2018-01-28 07:39:27 +05:30
class Video
module HTTPParamConverter
def self.from_rs(rs)
HTTP::Params.parse(rs.read(String))
end
end
module XMLConverter
def self.from_rs(rs)
XML.parse_html(rs.read(String))
end
end
2018-02-03 09:14:10 +05:30
def initialize(id, info, html, updated, title, views, likes, dislikes, wilson_score, published)
2018-01-28 07:39:27 +05:30
@id = id
@info = info
@html = html
@updated = updated
@title = title
@views = views
@likes = likes
@dislikes = dislikes
@wilson_score = wilson_score
2018-02-03 09:14:10 +05:30
@published = published
2018-01-28 07:39:27 +05:30
end
def to_a
2018-02-03 09:14:10 +05:30
return [@id, @info, @html, @updated, @title, @views, @likes, @dislikes, @wilson_score, @published]
2018-01-28 07:39:27 +05:30
end
DB.mapping({
id: String,
info: {
type: HTTP::Params,
default: HTTP::Params.parse(""),
converter: Video::HTTPParamConverter,
},
html: {
type: XML::Node,
default: XML.parse_html(""),
converter: Video::XMLConverter,
},
updated: Time,
title: String,
views: Int64,
likes: Int32,
dislikes: Int32,
wilson_score: Float64,
2018-02-03 09:14:10 +05:30
published: Time,
2018-01-28 07:39:27 +05:30
})
end
2018-01-21 05:49:12 +05:30
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
def ci_lower_bound(pos, n)
if n == 0
2018-01-28 07:39:27 +05:30
return 0.0
2018-01-21 05:49:12 +05:30
end
# z value here represents a confidence level of 0.95
z = 1.96
phat = 1.0*pos/n
return (phat + z*z/(2*n) - z * Math.sqrt((phat*(1 - phat) + z*z/(4*n))/n))/(1 + z*z/n)
end
def elapsed_text(elapsed)
millis = elapsed.total_milliseconds
return "#{millis.round(2)}ms" if millis >= 1
"#{(millis * 1000).round(2)}µs"
end
2018-01-28 07:39:27 +05:30
def get_client(pool)
while pool.empty?
2018-01-21 05:49:12 +05:30
sleep rand(0..10).milliseconds
end
2018-01-28 07:39:27 +05:30
return pool.shift
2018-01-21 05:49:12 +05:30
end
2018-01-28 07:39:27 +05:30
def fetch_video(id, client)
begin
info = client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en").body
html = client.get("/watch?v=#{id}").body
end
2018-01-21 05:49:12 +05:30
html = XML.parse_html(html)
2018-01-28 07:39:27 +05:30
info = HTTP::Params.parse(info)
2018-01-21 05:49:12 +05:30
if info["reason"]?
2018-02-03 09:34:34 +05:30
info = client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en").body
info = HTTP::Params.parse(info)
if info["reason"]?
raise info["reason"]
end
2018-01-21 05:49:12 +05:30
end
2018-01-28 07:39:27 +05:30
title = info["title"]
views = info["view_count"].to_i64
2018-01-21 05:49:12 +05:30
2018-01-28 07:39:27 +05:30
likes = html.xpath_node(%q(//button[@title="I like this"]/span))
2018-02-05 07:12:13 +05:30
likes = likes ? likes.content.delete(",").to_i : 0
2018-01-28 07:39:27 +05:30
dislikes = html.xpath_node(%q(//button[@title="I dislike this"]/span))
dislikes = dislikes ? dislikes.content.delete(",").to_i : 0
wilson_score = ci_lower_bound(likes, likes + dislikes)
2018-02-05 07:12:13 +05:30
published = html.xpath_node(%q(//strong[contains(@class,"watch-time-text")]))
2018-02-03 09:14:10 +05:30
if published
published = published.content
2018-02-05 07:12:13 +05:30
else
raise "Could not find date published"
end
published = published.lchop("Published ")
published = published.lchop("Streamed live ")
published = published.lchop("Started streaming ")
published = published.lchop("on ")
published = published.lchop("Scheduled for ")
2018-02-06 05:27:03 +05:30
if !published.includes?("ago")
published = Time.parse(published, "%b %-d, %Y")
else
# Time matches format "20 hours ago", "40 minutes ago"...
2018-02-05 07:12:13 +05:30
delta = published.split(" ")[0].to_i
case published
when .includes? "minute"
published = Time.now - delta.minutes
when .includes? "hour"
published = Time.now - delta.hours
2018-02-06 05:27:03 +05:30
else
2018-02-05 07:12:13 +05:30
raise "Could not parse #{published}"
2018-02-06 05:27:03 +05:30
end
2018-02-03 09:14:10 +05:30
end
video = Video.new(id, info, html, Time.now, title, views, likes, dislikes, wilson_score, published)
2018-01-21 05:49:12 +05:30
return video
end
2018-01-28 07:39:27 +05:30
def get_video(id, client, db, refresh = true)
if db.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", id, as: Bool)
video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video)
2018-01-21 05:49:12 +05:30
2018-01-28 07:39:27 +05:30
# If record was last updated over an hour ago, refresh (expire param in response lasts for 6 hours)
if refresh && Time.now - video.updated > 1.hours
video = fetch_video(id, client)
db.exec("UPDATE videos SET info = $2, html = $3, updated = $4,\
2018-02-03 09:14:10 +05:30
title = $5, views = $6, likes = $7, dislikes = $8, wilson_score = $9, published = $10 WHERE id = $1", video.to_a)
2018-01-21 05:49:12 +05:30
end
else
2018-01-28 07:39:27 +05:30
video = fetch_video(id, client)
2018-02-03 09:14:10 +05:30
db.exec("INSERT INTO videos VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", video.to_a)
2018-01-21 05:49:12 +05:30
end
return video
end
2018-01-22 05:19:27 +05:30
2018-01-28 07:39:27 +05:30
def search(query, client)
begin
html = client.get("https://www.youtube.com/results?q=#{query}&sp=EgIQAVAU").body
end
2018-01-22 05:19:27 +05:30
html = XML.parse_html(html)
html.xpath_nodes(%q(//ol[@class="item-section"]/li)).each do |item|
root = item.xpath_node(%q(div[contains(@class,"yt-lockup-video")]/div))
if root
link = root.xpath_node(%q(div[contains(@class,"yt-lockup-thumbnail")]/a/@href))
if link
yield link.content.split("=")[1]
end
end
end
end
def decrypt_signature(a)
2018-02-05 07:12:13 +05:30
a = a.split("")
a.delete_at(0..2)
a = a.reverse
c = a[0]
a[0] = a[49 % a.size]
a[49] = c
return a.join("")
2018-02-06 05:26:40 +05:30
end
def rank_videos(db, n)
top = [] of {Float64, String}
db.query("SELECT id, wilson_score, published FROM videos WHERE views > 5000 ORDER BY published DESC LIMIT 10000") do |rs|
rs.each do
id = rs.read(String)
wilson_score = rs.read(Float64)
published = rs.read(Time)
# Exponential decay, older videos tend to rank lower
temperature = wilson_score * Math.exp(-0.02*((Time.now - published).hours))
top << {temperature, id}
end
end
top.sort!
# Make hottest come first
top.reverse!
top = top.map { |a, b| b }
# Return top
return top[1..n]
end