forked from midou/invidious
* Add stats-based /videoplayback blockage status * Count when YouTube returns wrong video as failure * Cast playback stats hash type prior to return * Bump stats refresh timer to 10 minutes
228 lines
5.7 KiB
Crystal
228 lines
5.7 KiB
Crystal
require "./macros"
|
|
|
|
struct Nonce
|
|
include DB::Serializable
|
|
|
|
property nonce : String
|
|
property expire : Time
|
|
end
|
|
|
|
struct SessionId
|
|
include DB::Serializable
|
|
|
|
property id : String
|
|
property email : String
|
|
property issued : String
|
|
end
|
|
|
|
struct Annotation
|
|
include DB::Serializable
|
|
|
|
property id : String
|
|
property annotations : String
|
|
end
|
|
|
|
def html_to_content(description_html : String)
|
|
description = description_html.gsub(/(<br>)|(<br\/>)/, {
|
|
"<br>": "\n",
|
|
"<br/>": "\n",
|
|
})
|
|
|
|
if !description.empty?
|
|
description = XML.parse_html(description).content.strip("\n ")
|
|
end
|
|
|
|
return description
|
|
end
|
|
|
|
def cache_annotation(id, annotations)
|
|
if !CONFIG.cache_annotations
|
|
return
|
|
end
|
|
|
|
body = XML.parse(annotations)
|
|
nodeset = body.xpath_nodes(%q(/document/annotations/annotation))
|
|
|
|
return if nodeset == 0
|
|
|
|
has_legacy_annotations = false
|
|
nodeset.each do |node|
|
|
if !{"branding", "card", "drawer"}.includes? node["type"]?
|
|
has_legacy_annotations = true
|
|
break
|
|
end
|
|
end
|
|
|
|
Invidious::Database::Annotations.insert(id, annotations) if has_legacy_annotations
|
|
end
|
|
|
|
def create_notification_stream(env, topics, connection_channel)
|
|
connection = Channel(PQ::Notification).new(8)
|
|
connection_channel.send({true, connection})
|
|
|
|
locale = env.get("preferences").as(Preferences).locale
|
|
|
|
since = env.params.query["since"]?.try &.to_i?
|
|
id = 0
|
|
|
|
if topics.includes? "debug"
|
|
spawn do
|
|
begin
|
|
loop do
|
|
time_span = [0, 0, 0, 0]
|
|
time_span[rand(4)] = rand(30) + 5
|
|
published = Time.utc - Time::Span.new(days: time_span[0], hours: time_span[1], minutes: time_span[2], seconds: time_span[3])
|
|
video_id = TEST_IDS[rand(TEST_IDS.size)]
|
|
|
|
video = get_video(video_id)
|
|
video.published = published
|
|
response = JSON.parse(video.to_json(locale, nil))
|
|
|
|
if fields_text = env.params.query["fields"]?
|
|
begin
|
|
JSONFilter.filter(response, fields_text)
|
|
rescue ex
|
|
env.response.status_code = 400
|
|
response = {"error" => ex.message}
|
|
end
|
|
end
|
|
|
|
env.response.puts "id: #{id}"
|
|
env.response.puts "data: #{response.to_json}"
|
|
env.response.puts
|
|
env.response.flush
|
|
|
|
id += 1
|
|
|
|
sleep 1.minute
|
|
Fiber.yield
|
|
end
|
|
rescue ex
|
|
end
|
|
end
|
|
end
|
|
|
|
spawn do
|
|
begin
|
|
if since
|
|
since_unix = Time.unix(since.not_nil!)
|
|
|
|
topics.try &.each do |topic|
|
|
case topic
|
|
when .match(/UC[A-Za-z0-9_-]{22}/)
|
|
Invidious::Database::ChannelVideos.select_notfications(topic, since_unix).each do |video|
|
|
response = JSON.parse(video.to_json(locale))
|
|
|
|
if fields_text = env.params.query["fields"]?
|
|
begin
|
|
JSONFilter.filter(response, fields_text)
|
|
rescue ex
|
|
env.response.status_code = 400
|
|
response = {"error" => ex.message}
|
|
end
|
|
end
|
|
|
|
env.response.puts "id: #{id}"
|
|
env.response.puts "data: #{response.to_json}"
|
|
env.response.puts
|
|
env.response.flush
|
|
|
|
id += 1
|
|
end
|
|
else
|
|
# TODO
|
|
end
|
|
end
|
|
end
|
|
end
|
|
end
|
|
|
|
spawn do
|
|
begin
|
|
loop do
|
|
event = connection.receive
|
|
|
|
notification = JSON.parse(event.payload)
|
|
topic = notification["topic"].as_s
|
|
video_id = notification["videoId"].as_s
|
|
published = notification["published"].as_i64
|
|
|
|
if !topics.try &.includes? topic
|
|
next
|
|
end
|
|
|
|
video = get_video(video_id)
|
|
video.published = Time.unix(published)
|
|
response = JSON.parse(video.to_json(locale, nil))
|
|
|
|
if fields_text = env.params.query["fields"]?
|
|
begin
|
|
JSONFilter.filter(response, fields_text)
|
|
rescue ex
|
|
env.response.status_code = 400
|
|
response = {"error" => ex.message}
|
|
end
|
|
end
|
|
|
|
env.response.puts "id: #{id}"
|
|
env.response.puts "data: #{response.to_json}"
|
|
env.response.puts
|
|
env.response.flush
|
|
|
|
id += 1
|
|
end
|
|
rescue ex
|
|
ensure
|
|
connection_channel.send({false, connection})
|
|
end
|
|
end
|
|
|
|
begin
|
|
# Send heartbeat
|
|
loop do
|
|
env.response.puts ":keepalive #{Time.utc.to_unix}"
|
|
env.response.puts
|
|
env.response.flush
|
|
sleep (20 + rand(11)).seconds
|
|
end
|
|
rescue ex
|
|
ensure
|
|
connection_channel.send({false, connection})
|
|
end
|
|
end
|
|
|
|
def extract_initial_data(body) : Hash(String, JSON::Any)
|
|
return JSON.parse(body.match(/(window\["ytInitialData"\]|var\s*ytInitialData)\s*=\s*(?<info>{.*?});<\/script>/mx).try &.["info"] || "{}").as_h
|
|
end
|
|
|
|
def proxy_file(response, env)
|
|
if response.headers.includes_word?("Content-Encoding", "gzip")
|
|
Compress::Gzip::Writer.open(env.response) do |deflate|
|
|
IO.copy response.body_io, deflate
|
|
end
|
|
elsif response.headers.includes_word?("Content-Encoding", "deflate")
|
|
Compress::Deflate::Writer.open(env.response) do |deflate|
|
|
IO.copy response.body_io, deflate
|
|
end
|
|
else
|
|
IO.copy response.body_io, env.response
|
|
end
|
|
end
|
|
|
|
# Fetch the playback requests tracker from the statistics endpoint.
|
|
#
|
|
# Creates a new tracker when unavailable.
|
|
def get_playback_statistic
|
|
if (tracker = Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"]) && tracker.as(Hash).empty?
|
|
tracker = {
|
|
"totalRequests" => 0_i64,
|
|
"successfulRequests" => 0_i64,
|
|
"ratio" => 0_f64,
|
|
}
|
|
|
|
Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"] = tracker
|
|
end
|
|
|
|
return tracker.as(Hash(String, Int64 | Float64))
|
|
end
|