mirror of
https://github.com/iv-org/invidious
synced 2024-11-30 23:13:30 +01:00
Refactor connection channel for delivering notifications
This commit is contained in:
parent
84b2583973
commit
d892ba6aa5
@ -186,10 +186,21 @@ spawn do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
notification_channels = [] of Channel(PQ::Notification)
|
connection_channel = Channel({Bool, Channel(PQ::Notification)}).new
|
||||||
PG.connect_listen(PG_URL, "notifications") do |event|
|
spawn do
|
||||||
notification_channels.each do |channel|
|
connections = [] of Channel(PQ::Notification)
|
||||||
channel.send(event)
|
|
||||||
|
PG.connect_listen(PG_URL, "notifications") { |event| connections.each { |connection| connection.send(event) } }
|
||||||
|
|
||||||
|
loop do
|
||||||
|
action, connection = connection_channel.receive
|
||||||
|
|
||||||
|
case action
|
||||||
|
when true
|
||||||
|
connections << connection
|
||||||
|
when false
|
||||||
|
connections.delete(connection)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -4469,15 +4480,7 @@ get "/api/v1/auth/notifications" do |env|
|
|||||||
topics = env.params.query["topics"]?.try &.split(",").uniq.first(1000)
|
topics = env.params.query["topics"]?.try &.split(",").uniq.first(1000)
|
||||||
topics ||= [] of String
|
topics ||= [] of String
|
||||||
|
|
||||||
notification_channel = Channel(PQ::Notification).new
|
create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics, connection_channel)
|
||||||
notification_channels << notification_channel
|
|
||||||
|
|
||||||
begin
|
|
||||||
create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics, notification_channel)
|
|
||||||
rescue ex
|
|
||||||
ensure
|
|
||||||
notification_channels.delete(notification_channel)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
post "/api/v1/auth/notifications" do |env|
|
post "/api/v1/auth/notifications" do |env|
|
||||||
@ -4486,15 +4489,7 @@ post "/api/v1/auth/notifications" do |env|
|
|||||||
topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000)
|
topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000)
|
||||||
topics ||= [] of String
|
topics ||= [] of String
|
||||||
|
|
||||||
notification_channel = Channel(PQ::Notification).new
|
create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics, connection_channel)
|
||||||
notification_channels << notification_channel
|
|
||||||
|
|
||||||
begin
|
|
||||||
create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics, notification_channel)
|
|
||||||
rescue ex
|
|
||||||
ensure
|
|
||||||
notification_channels.delete(notification_channel)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/api/v1/auth/preferences" do |env|
|
get "/api/v1/auth/preferences" do |env|
|
||||||
|
@ -661,7 +661,10 @@ def copy_in_chunks(input, output, chunk_size = 4096)
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_notification_stream(env, proxies, config, kemal_config, decrypt_function, topics, notification_channel)
|
def create_notification_stream(env, proxies, config, kemal_config, decrypt_function, topics, connection_channel)
|
||||||
|
connection = Channel(PQ::Notification).new
|
||||||
|
connection_channel.send({true, connection})
|
||||||
|
|
||||||
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
|
||||||
|
|
||||||
since = env.params.query["since"]?.try &.to_i?
|
since = env.params.query["since"]?.try &.to_i?
|
||||||
@ -669,15 +672,87 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
|
|||||||
|
|
||||||
if topics.includes? "debug"
|
if topics.includes? "debug"
|
||||||
spawn do
|
spawn do
|
||||||
|
begin
|
||||||
|
loop do
|
||||||
|
time_span = [0, 0, 0, 0]
|
||||||
|
time_span[rand(4)] = rand(30) + 5
|
||||||
|
published = Time.now - Time::Span.new(time_span[0], time_span[1], time_span[2], time_span[3])
|
||||||
|
video_id = TEST_IDS[rand(TEST_IDS.size)]
|
||||||
|
|
||||||
|
video = get_video(video_id, PG_DB, proxies)
|
||||||
|
video.published = published
|
||||||
|
response = JSON.parse(video.to_json(locale, config, kemal_config, decrypt_function))
|
||||||
|
|
||||||
|
if fields_text = env.params.query["fields"]?
|
||||||
|
begin
|
||||||
|
JSONFilter.filter(response, fields_text)
|
||||||
|
rescue ex
|
||||||
|
env.response.status_code = 400
|
||||||
|
response = {"error" => ex.message}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
env.response.puts "id: #{id}"
|
||||||
|
env.response.puts "data: #{response.to_json}"
|
||||||
|
env.response.puts
|
||||||
|
env.response.flush
|
||||||
|
|
||||||
|
id += 1
|
||||||
|
|
||||||
|
sleep 1.minute
|
||||||
|
end
|
||||||
|
rescue ex
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
spawn do
|
||||||
|
begin
|
||||||
|
if since
|
||||||
|
topics.try &.each do |topic|
|
||||||
|
case topic
|
||||||
|
when .match(/UC[A-Za-z0-9_-]{22}/)
|
||||||
|
PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid = $1 AND published > $2 ORDER BY published DESC LIMIT 15",
|
||||||
|
topic, Time.unix(since.not_nil!), as: ChannelVideo).each do |video|
|
||||||
|
response = JSON.parse(video.to_json(locale, config, Kemal.config))
|
||||||
|
|
||||||
|
if fields_text = env.params.query["fields"]?
|
||||||
|
begin
|
||||||
|
JSONFilter.filter(response, fields_text)
|
||||||
|
rescue ex
|
||||||
|
env.response.status_code = 400
|
||||||
|
response = {"error" => ex.message}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
env.response.puts "id: #{id}"
|
||||||
|
env.response.puts "data: #{response.to_json}"
|
||||||
|
env.response.puts
|
||||||
|
env.response.flush
|
||||||
|
|
||||||
|
id += 1
|
||||||
|
end
|
||||||
|
else
|
||||||
|
# TODO
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
spawn do
|
||||||
|
begin
|
||||||
loop do
|
loop do
|
||||||
time_span = [0, 0, 0, 0]
|
event = connection.receive
|
||||||
time_span[rand(4)] = rand(30) + 5
|
|
||||||
published = Time.now - Time::Span.new(time_span[0], time_span[1], time_span[2], time_span[3])
|
notification = JSON.parse(event.payload)
|
||||||
video_id = TEST_IDS[rand(TEST_IDS.size)]
|
topic = notification["topic"].as_s
|
||||||
|
video_id = notification["videoId"].as_s
|
||||||
|
published = notification["published"].as_i64
|
||||||
|
|
||||||
video = get_video(video_id, PG_DB, proxies)
|
video = get_video(video_id, PG_DB, proxies)
|
||||||
video.published = published
|
video.published = Time.unix(published)
|
||||||
response = JSON.parse(video.to_json(locale, config, kemal_config, decrypt_function))
|
response = JSON.parse(video.to_json(locale, config, Kemal.config, decrypt_function))
|
||||||
|
|
||||||
if fields_text = env.params.query["fields"]?
|
if fields_text = env.params.query["fields"]?
|
||||||
begin
|
begin
|
||||||
@ -688,88 +763,31 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
env.response.puts "id: #{id}"
|
if topics.try &.includes? topic
|
||||||
env.response.puts "data: #{response.to_json}"
|
env.response.puts "id: #{id}"
|
||||||
env.response.puts
|
env.response.puts "data: #{response.to_json}"
|
||||||
env.response.flush
|
env.response.puts
|
||||||
|
env.response.flush
|
||||||
|
|
||||||
id += 1
|
id += 1
|
||||||
|
|
||||||
sleep 1.minute
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
spawn do
|
|
||||||
if since
|
|
||||||
topics.try &.each do |topic|
|
|
||||||
case topic
|
|
||||||
when .match(/UC[A-Za-z0-9_-]{22}/)
|
|
||||||
PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid = $1 AND published > $2 ORDER BY published DESC LIMIT 15",
|
|
||||||
topic, Time.unix(since.not_nil!), as: ChannelVideo).each do |video|
|
|
||||||
response = JSON.parse(video.to_json(locale, config, Kemal.config))
|
|
||||||
|
|
||||||
if fields_text = env.params.query["fields"]?
|
|
||||||
begin
|
|
||||||
JSONFilter.filter(response, fields_text)
|
|
||||||
rescue ex
|
|
||||||
env.response.status_code = 400
|
|
||||||
response = {"error" => ex.message}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
env.response.puts "id: #{id}"
|
|
||||||
env.response.puts "data: #{response.to_json}"
|
|
||||||
env.response.puts
|
|
||||||
env.response.flush
|
|
||||||
|
|
||||||
id += 1
|
|
||||||
end
|
|
||||||
else
|
|
||||||
# TODO
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
rescue ex
|
||||||
|
ensure
|
||||||
|
connection_channel.send({false, connection})
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
spawn do
|
begin
|
||||||
|
# Send heartbeat
|
||||||
loop do
|
loop do
|
||||||
event = notification_channel.receive
|
env.response.puts ":keepalive #{Time.now.to_unix}"
|
||||||
|
env.response.puts
|
||||||
notification = JSON.parse(event.payload)
|
env.response.flush
|
||||||
topic = notification["topic"].as_s
|
sleep (20 + rand(11)).seconds
|
||||||
video_id = notification["videoId"].as_s
|
|
||||||
published = notification["published"].as_i64
|
|
||||||
|
|
||||||
video = get_video(video_id, PG_DB, proxies)
|
|
||||||
video.published = Time.unix(published)
|
|
||||||
response = JSON.parse(video.to_json(locale, config, Kemal.config, decrypt_function))
|
|
||||||
|
|
||||||
if fields_text = env.params.query["fields"]?
|
|
||||||
begin
|
|
||||||
JSONFilter.filter(response, fields_text)
|
|
||||||
rescue ex
|
|
||||||
env.response.status_code = 400
|
|
||||||
response = {"error" => ex.message}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
if topics.try &.includes? topic
|
|
||||||
env.response.puts "id: #{id}"
|
|
||||||
env.response.puts "data: #{response.to_json}"
|
|
||||||
env.response.puts
|
|
||||||
env.response.flush
|
|
||||||
|
|
||||||
id += 1
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
rescue ex
|
||||||
|
ensure
|
||||||
# Send heartbeat
|
connection_channel.send({false, connection})
|
||||||
loop do
|
|
||||||
env.response.puts ":keepalive #{Time.now.to_unix}"
|
|
||||||
env.response.puts
|
|
||||||
env.response.flush
|
|
||||||
sleep (20 + rand(11)).seconds
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
Loading…
Reference in New Issue
Block a user