2018-09-04 16:22:10 +02:00
# "Invidious" (which is an alternative front-end to YouTube)
2019-03-15 17:44:53 +01:00
# Copyright (C) 2019 Omar Roth
2018-01-28 18:32:40 +01:00
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2018-11-22 20:26:08 +01:00
require " digest/md5 "
2019-01-23 21:15:19 +01:00
require " file_utils "
2017-11-23 08:48:55 +01:00
require " kemal "
2018-07-18 21:26:02 +02:00
require " openssl/hmac "
2018-02-03 23:13:14 +01:00
require " option_parser "
2017-11-24 05:06:43 +01:00
require " pg "
2018-11-22 00:12:13 +01:00
require " sqlite3 "
2018-01-16 21:02:35 +01:00
require " xml "
2018-03-09 19:42:23 +01:00
require " yaml "
2020-06-16 00:57:20 +02:00
require " compress/zip "
2019-10-27 18:50:42 +01:00
require " protodec/utils "
2018-08-04 22:30:44 +02:00
require " ./invidious/helpers/* "
2018-07-06 14:59:56 +02:00
require " ./invidious/* "
2021-07-14 17:46:12 +02:00
require " ./invidious/channels/* "
2020-10-06 06:41:18 +02:00
require " ./invidious/routes/** "
require " ./invidious/jobs/** "
2017-11-29 22:33:46 +01:00
2021-01-23 18:58:13 +01:00
CONFIG = Config . load
HMAC_KEY = CONFIG . hmac_key || Random :: Secure . hex ( 32 )
2018-03-09 19:42:23 +01:00
2021-01-30 15:52:48 +01:00
PG_DB = DB . open CONFIG . database_url
2019-06-23 15:39:14 +02:00
ARCHIVE_URL = URI . parse ( " https://archive.org " )
LOGIN_URL = URI . parse ( " https://accounts.google.com " )
PUBSUB_URL = URI . parse ( " https://pubsubhubbub.appspot.com " )
REDDIT_URL = URI . parse ( " https://www.reddit.com " )
2020-03-10 16:12:11 +01:00
TEXTCAPTCHA_URL = URI . parse ( " https://textcaptcha.com " )
2019-06-23 15:39:14 +02:00
YT_URL = URI . parse ( " https://www.youtube.com " )
2021-01-23 19:39:04 +01:00
HOST_URL = make_host_url ( Kemal . config )
2019-06-23 15:39:14 +02:00
2019-06-07 19:39:12 +02:00
CHARS_SAFE = " ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_ "
TEST_IDS = { " AgbeGFYluEA " , " BaW_jenozKc " , " a9LDPn-MO4I " , " ddFvjfvPnqk " , " iqKdEhx-dD4 " }
2019-06-08 23:04:55 +02:00
MAX_ITEMS_PER_PAGE = 1500
2018-03-05 05:25:03 +01:00
2019-11-24 19:41:47 +01:00
REQUEST_HEADERS_WHITELIST = { " accept " , " accept-encoding " , " cache-control " , " content-length " , " if-none-match " , " range " }
RESPONSE_HEADERS_BLACKLIST = { " access-control-allow-origin " , " alt-svc " , " server " }
2019-07-04 22:30:00 +02:00
HTTP_CHUNK_SIZE = 10485760 # ~10MB
2019-06-23 15:39:14 +02:00
2020-02-15 19:52:28 +01:00
CURRENT_BRANCH = {{ " #{ ` git branch | sed -n '/* /s///p' ` . strip } " }}
2019-06-23 15:39:14 +02:00
CURRENT_COMMIT = {{ " #{ ` git rev-list HEAD --max-count=1 --abbrev-commit ` . strip } " }}
2020-12-05 20:06:24 +01:00
CURRENT_VERSION = {{ " #{ ` git log -1 --format=%ci | awk '{print $1}' | sed s/-/./g ` . strip } " }}
2019-06-23 15:39:14 +02:00
2019-05-09 18:52:37 +02:00
# This is used to determine the `?v=` on the end of file URLs (for cache busting). We
# only need to expire modified assets, so we can use this to find the last commit that changes
# any assets
ASSET_COMMIT = {{ " #{ ` git rev-list HEAD --max-count=1 --abbrev-commit -- assets ` . strip } " }}
2019-04-06 15:28:53 +02:00
SOFTWARE = {
" name " = > " invidious " ,
" version " = > " #{ CURRENT_VERSION } - #{ CURRENT_COMMIT } " ,
" branch " = > " #{ CURRENT_BRANCH } " ,
}
2021-04-03 22:11:35 +02:00
YT_POOL = YoutubeConnectionPool . new ( YT_URL , capacity : CONFIG . pool_size , timeout : 2.0 , use_quic : CONFIG . use_quic )
2019-10-25 18:58:16 +02:00
2021-01-04 16:51:06 +01:00
# CLI
2019-04-06 15:28:53 +02:00
Kemal . config . extra_options do | parser |
parser . banner = " Usage: invidious [arguments] "
2021-01-04 16:51:06 +01:00
parser . on ( " -c THREADS " , " --channel-threads=THREADS " , " Number of threads for refreshing channels (default: #{ CONFIG . channel_threads } ) " ) do | number |
2019-04-06 15:28:53 +02:00
begin
2021-01-04 16:51:06 +01:00
CONFIG . channel_threads = number . to_i
2019-04-06 15:28:53 +02:00
rescue ex
puts " THREADS must be integer "
exit
end
end
2021-01-04 16:51:06 +01:00
parser . on ( " -f THREADS " , " --feed-threads=THREADS " , " Number of threads for refreshing feeds (default: #{ CONFIG . feed_threads } ) " ) do | number |
2019-04-06 15:28:53 +02:00
begin
2021-01-04 16:51:06 +01:00
CONFIG . feed_threads = number . to_i
2019-04-06 15:28:53 +02:00
rescue ex
puts " THREADS must be integer "
exit
end
end
2021-01-04 16:51:06 +01:00
parser . on ( " -o OUTPUT " , " --output=OUTPUT " , " Redirect output (default: #{ CONFIG . output } ) " ) do | output |
CONFIG . output = output
2019-04-06 15:28:53 +02:00
end
2021-01-04 16:51:06 +01:00
parser . on ( " -l LEVEL " , " --log-level=LEVEL " , " Log level, one of #{ LogLevel . values } (default: #{ CONFIG . log_level } ) " ) do | log_level |
CONFIG . log_level = LogLevel . parse ( log_level )
2020-12-21 16:05:35 +01:00
end
parser . on ( " -v " , " --version " , " Print version " ) do
2019-04-06 15:28:53 +02:00
puts SOFTWARE . to_pretty_json
exit
end
end
Kemal :: CLI . new ARGV
2021-01-04 16:51:06 +01:00
if CONFIG . output . upcase != " STDOUT "
FileUtils . mkdir_p ( File . dirname ( CONFIG . output ) )
2021-01-04 16:05:15 +01:00
end
2021-01-04 16:51:06 +01:00
OUTPUT = CONFIG . output . upcase == " STDOUT " ? STDOUT : File . open ( CONFIG . output , mode : " a " )
LOGGER = Invidious :: LogHandler . new ( OUTPUT , CONFIG . log_level )
2021-01-04 16:05:15 +01:00
2019-04-15 18:13:09 +02:00
# Check table integrity
2019-04-11 19:13:25 +02:00
if CONFIG . check_tables
2021-01-04 16:51:06 +01:00
check_enum ( PG_DB , " privacy " , PlaylistPrivacy )
2019-08-06 01:49:13 +02:00
2021-01-04 16:51:06 +01:00
check_table ( PG_DB , " channels " , InvidiousChannel )
check_table ( PG_DB , " channel_videos " , ChannelVideo )
check_table ( PG_DB , " playlists " , InvidiousPlaylist )
check_table ( PG_DB , " playlist_videos " , PlaylistVideo )
check_table ( PG_DB , " nonces " , Nonce )
check_table ( PG_DB , " session_ids " , SessionId )
check_table ( PG_DB , " users " , User )
check_table ( PG_DB , " videos " , Video )
2019-04-15 18:13:09 +02:00
if CONFIG . cache_annotations
2021-01-04 16:51:06 +01:00
check_table ( PG_DB , " annotations " , Annotation )
2019-04-15 18:13:09 +02:00
end
2019-04-11 19:13:25 +02:00
end
2018-03-26 05:18:29 +02:00
2019-04-10 23:23:37 +02:00
# Start jobs
2019-05-15 19:26:29 +02:00
2021-01-23 19:41:50 +01:00
if CONFIG . channel_threads > 0
Invidious :: Jobs . register Invidious :: Jobs :: RefreshChannelsJob . new ( PG_DB )
end
if CONFIG . feed_threads > 0
Invidious :: Jobs . register Invidious :: Jobs :: RefreshFeedsJob . new ( PG_DB )
end
2020-09-27 19:19:44 +02:00
DECRYPT_FUNCTION = DecryptFunction . new ( CONFIG . decrypt_polling )
2021-01-23 19:39:04 +01:00
if CONFIG . decrypt_polling
2021-01-04 16:51:06 +01:00
Invidious :: Jobs . register Invidious :: Jobs :: UpdateDecryptFunctionJob . new
2020-09-27 19:19:44 +02:00
end
2019-03-04 02:18:23 +01:00
2021-01-23 19:39:04 +01:00
if CONFIG . statistics_enabled
Invidious :: Jobs . register Invidious :: Jobs :: StatisticsRefreshJob . new ( PG_DB , SOFTWARE )
2020-10-17 14:25:57 +02:00
end
2019-08-27 15:08:26 +02:00
2021-01-23 19:39:04 +01:00
if ( CONFIG . use_pubsub_feeds . is_a? ( Bool ) && CONFIG . use_pubsub_feeds . as ( Bool ) ) || ( CONFIG . use_pubsub_feeds . is_a? ( Int32 ) && CONFIG . use_pubsub_feeds . as ( Int32 ) > 0 )
Invidious :: Jobs . register Invidious :: Jobs :: SubscribeToFeedsJob . new ( PG_DB , HMAC_KEY )
2021-01-07 20:15:26 +01:00
end
2021-01-23 19:39:04 +01:00
if CONFIG . popular_enabled
2020-12-27 06:12:43 +01:00
Invidious :: Jobs . register Invidious :: Jobs :: PullPopularVideosJob . new ( PG_DB )
end
2021-01-23 19:39:04 +01:00
if CONFIG . captcha_key
Invidious :: Jobs . register Invidious :: Jobs :: BypassCaptchaJob . new
2019-03-02 02:25:16 +01:00
end
2019-03-01 23:47:06 +01:00
2020-10-17 14:25:57 +02:00
connection_channel = Channel ( { Bool , Channel ( PQ :: Notification ) } ) . new ( 32 )
2021-01-30 15:52:48 +01:00
Invidious :: Jobs . register Invidious :: Jobs :: NotificationJob . new ( connection_channel , CONFIG . database_url )
2020-10-17 14:25:57 +02:00
2020-10-06 06:41:18 +02:00
Invidious :: Jobs . start_all
def popular_videos
Invidious :: Jobs :: PullPopularVideosJob :: POPULAR_VIDEOS . get
2018-11-09 03:08:03 +01:00
end
2018-03-25 05:56:41 +02:00
before_all do | env |
2020-10-16 12:23:18 +02:00
preferences = begin
2021-05-24 15:52:55 +02:00
Preferences . from_json ( URI . decode_www_form ( env . request . cookies [ " PREFS " ]? . try & . value || " {} " ) )
2020-03-15 22:46:08 +01:00
rescue
2020-10-16 12:23:18 +02:00
Preferences . from_json ( " {} " )
2020-03-15 22:46:08 +01:00
end
2020-10-16 12:23:18 +02:00
env . set " preferences " , preferences
2019-05-10 23:48:38 +02:00
env . response . headers [ " X-XSS-Protection " ] = " 1; mode=block "
2018-09-06 04:51:40 +02:00
env . response . headers [ " X-Content-Type-Options " ] = " nosniff "
2021-06-17 19:45:20 +02:00
2021-06-19 09:40:33 +02:00
# Allow media resources to be loaded from google servers
2021-06-17 19:45:20 +02:00
# TODO: check if *.youtube.com can be removed
2020-03-15 22:46:08 +01:00
if CONFIG . disabled? ( " local " ) || ! preferences . local
2021-06-17 19:45:20 +02:00
extra_media_csp = " https://*.googlevideo.com:443 https://*.youtube.com:443 "
else
extra_media_csp = " "
end
# Only allow the pages at /embed/* to be embedded
if env . request . resource . starts_with? ( " /embed " )
frame_ancestors = " 'self' http: https: "
else
2021-06-20 18:43:00 +02:00
frame_ancestors = " 'none' "
2020-03-15 22:46:08 +01:00
end
2021-06-17 19:45:20 +02:00
# TODO: Remove style-src's 'unsafe-inline', requires to remove all
# inline styles (<style> [..] </style>, style=" [..] ")
env . response . headers [ " Content-Security-Policy " ] = {
" default-src 'none' " ,
" script-src 'self' " ,
" style-src 'self' 'unsafe-inline' " ,
" img-src 'self' data: " ,
" font-src 'self' data: " ,
" connect-src 'self' " ,
" manifest-src 'self' " ,
" media-src 'self' blob: " + extra_media_csp ,
" child-src 'self' blob: " ,
" frame-src 'self' " ,
" frame-ancestors " + frame_ancestors ,
} . join ( " ; " )
2019-04-07 21:01:08 +02:00
env . response . headers [ " Referrer-Policy " ] = " same-origin "
2021-06-17 19:45:20 +02:00
# Ask the chrom*-based browsers to disable FLoC
# See: https://blog.runcloud.io/google-floc/
env . response . headers [ " Permissions-Policy " ] = " interest-cohort=() "
2021-01-23 19:39:04 +01:00
if ( Kemal . config . ssl || CONFIG . https_only ) && CONFIG . hsts
2019-05-01 03:53:56 +02:00
env . response . headers [ " Strict-Transport-Security " ] = " max-age=31536000; includeSubDomains; preload "
2019-04-07 21:01:08 +02:00
end
2019-03-28 19:43:40 +01:00
2019-11-20 18:03:52 +01:00
next if {
" /sb/ " ,
" /vi/ " ,
" /s_p/ " ,
" /yts/ " ,
" /ggpht/ " ,
" /api/manifest/ " ,
" /videoplayback " ,
" /latest_version " ,
} . any? { | r | env . request . resource . starts_with? r }
2018-07-16 18:24:24 +02:00
if env . request . cookies . has_key? " SID "
2018-04-01 02:09:27 +02:00
sid = env . request . cookies [ " SID " ] . value
2018-07-06 01:43:26 +02:00
2019-04-18 23:23:50 +02:00
if sid . starts_with? " v1: "
raise " Cannot use token as SID "
end
2018-07-18 21:26:02 +02:00
# Invidious users only have SID
if ! env . request . cookies . has_key? " SSID "
2019-04-16 06:23:40 +02:00
if email = PG_DB . query_one? ( " SELECT email FROM session_ids WHERE id = $1 " , sid , as : String )
2019-02-12 03:52:47 +01:00
user = PG_DB . query_one ( " SELECT * FROM users WHERE email = $1 " , email , as : User )
2019-08-06 01:49:13 +02:00
csrf_token = generate_response ( sid , {
" :authorize_token " ,
" :playlist_ajax " ,
" :signout " ,
" :subscription_ajax " ,
" :token_ajax " ,
" :watch_ajax " ,
} , HMAC_KEY , PG_DB , 1 . week )
2018-11-09 00:42:25 +01:00
2019-03-11 18:44:25 +01:00
preferences = user . preferences
2020-10-16 12:23:18 +02:00
env . set " preferences " , preferences
2019-03-11 18:44:25 +01:00
2018-08-15 19:40:42 +02:00
env . set " sid " , sid
2019-04-18 23:23:50 +02:00
env . set " csrf_token " , csrf_token
2019-04-16 06:23:40 +02:00
env . set " user " , user
2018-07-18 21:26:02 +02:00
end
else
2019-04-16 06:23:40 +02:00
headers = HTTP :: Headers . new
headers [ " Cookie " ] = env . request . headers [ " Cookie " ]
2018-07-18 21:26:02 +02:00
begin
2021-01-04 16:51:06 +01:00
user , sid = get_user ( sid , headers , PG_DB , false )
2019-08-06 01:49:13 +02:00
csrf_token = generate_response ( sid , {
" :authorize_token " ,
" :playlist_ajax " ,
" :signout " ,
" :subscription_ajax " ,
" :token_ajax " ,
" :watch_ajax " ,
} , HMAC_KEY , PG_DB , 1 . week )
2018-11-16 03:23:17 +01:00
2019-03-11 18:44:25 +01:00
preferences = user . preferences
2020-10-16 12:23:18 +02:00
env . set " preferences " , preferences
2019-03-11 18:44:25 +01:00
2018-08-15 19:40:42 +02:00
env . set " sid " , sid
2019-04-18 23:23:50 +02:00
env . set " csrf_token " , csrf_token
2019-04-16 06:23:40 +02:00
env . set " user " , user
2018-07-18 21:26:02 +02:00
rescue ex
end
2018-07-16 19:50:41 +02:00
end
2018-04-14 04:32:14 +02:00
end
2018-08-17 17:19:20 +02:00
2019-08-15 18:29:55 +02:00
dark_mode = convert_theme ( env . params . query [ " dark_mode " ]? ) || preferences . dark_mode . to_s
2019-03-11 18:44:25 +01:00
thin_mode = env . params . query [ " thin_mode " ]? || preferences . thin_mode . to_s
thin_mode = thin_mode == " true "
locale = env . params . query [ " hl " ]? || preferences . locale
preferences . dark_mode = dark_mode
preferences . thin_mode = thin_mode
preferences . locale = locale
2021-03-17 20:07:38 +01:00
env . set " preferences " , preferences
2018-12-20 22:32:09 +01:00
2018-08-17 17:19:20 +02:00
current_page = env . request . path
if env . request . query
query = HTTP :: Params . parse ( env . request . query . not_nil! )
if query [ " referer " ]?
query [ " referer " ] = get_referer ( env , " / " )
end
current_page += " ? #{ query } "
end
2019-09-24 19:31:33 +02:00
env . set " current_page " , URI . encode_www_form ( current_page )
2018-03-22 18:44:36 +01:00
end
2021-02-02 06:18:16 +01:00
Invidious :: Routing . get " / " , Invidious :: Routes :: Misc , :home
Invidious :: Routing . get " /privacy " , Invidious :: Routes :: Misc , :privacy
Invidious :: Routing . get " /licenses " , Invidious :: Routes :: Misc , :licenses
2021-04-07 05:07:19 +02:00
Invidious :: Routing . get " /watch " , Invidious :: Routes :: Watch , :handle
Invidious :: Routing . get " /watch/:id " , Invidious :: Routes :: Watch , :redirect
Invidious :: Routing . get " /shorts/:id " , Invidious :: Routes :: Watch , :redirect
Invidious :: Routing . get " /w/:id " , Invidious :: Routes :: Watch , :redirect
Invidious :: Routing . get " /v/:id " , Invidious :: Routes :: Watch , :redirect
Invidious :: Routing . get " /e/:id " , Invidious :: Routes :: Watch , :redirect
2021-03-27 02:36:40 +01:00
Invidious :: Routing . get " /redirect " , Invidious :: Routes :: Misc , :cross_instance_redirect
2021-04-07 05:07:19 +02:00
2021-02-02 06:18:16 +01:00
Invidious :: Routing . get " /embed/ " , Invidious :: Routes :: Embed , :redirect
Invidious :: Routing . get " /embed/:id " , Invidious :: Routes :: Embed , :show
2021-04-07 05:07:19 +02:00
2020-11-20 02:21:48 +01:00
Invidious :: Routing . get " /view_all_playlists " , Invidious :: Routes :: Playlists , :index
Invidious :: Routing . get " /create_playlist " , Invidious :: Routes :: Playlists , :new
Invidious :: Routing . post " /create_playlist " , Invidious :: Routes :: Playlists , :create
Invidious :: Routing . get " /subscribe_playlist " , Invidious :: Routes :: Playlists , :subscribe
Invidious :: Routing . get " /delete_playlist " , Invidious :: Routes :: Playlists , :delete_page
Invidious :: Routing . post " /delete_playlist " , Invidious :: Routes :: Playlists , :delete
Invidious :: Routing . get " /edit_playlist " , Invidious :: Routes :: Playlists , :edit
Invidious :: Routing . post " /edit_playlist " , Invidious :: Routes :: Playlists , :update
Invidious :: Routing . get " /add_playlist_items " , Invidious :: Routes :: Playlists , :add_playlist_items_page
Invidious :: Routing . post " /playlist_ajax " , Invidious :: Routes :: Playlists , :playlist_ajax
Invidious :: Routing . get " /playlist " , Invidious :: Routes :: Playlists , :show
Invidious :: Routing . get " /mix " , Invidious :: Routes :: Playlists , :mix
2021-04-07 05:07:19 +02:00
2020-12-02 01:02:39 +01:00
Invidious :: Routing . get " /opensearch.xml " , Invidious :: Routes :: Search , :opensearch
Invidious :: Routing . get " /results " , Invidious :: Routes :: Search , :results
Invidious :: Routing . get " /search " , Invidious :: Routes :: Search , :search
2021-04-07 05:07:19 +02:00
2020-12-09 03:50:39 +01:00
Invidious :: Routing . get " /login " , Invidious :: Routes :: Login , :login_page
Invidious :: Routing . post " /login " , Invidious :: Routes :: Login , :login
Invidious :: Routing . post " /signout " , Invidious :: Routes :: Login , :signout
2021-04-07 05:07:19 +02:00
2021-02-02 06:18:16 +01:00
Invidious :: Routing . get " /preferences " , Invidious :: Routes :: PreferencesRoute , :show
Invidious :: Routing . post " /preferences " , Invidious :: Routes :: PreferencesRoute , :update
Invidious :: Routing . get " /toggle_theme " , Invidious :: Routes :: PreferencesRoute , :toggle_theme
2018-07-22 03:56:11 +02:00
2018-08-04 22:30:44 +02:00
# Users
2018-07-22 03:56:11 +02:00
2019-04-16 06:23:40 +02:00
post " /watch_ajax " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-11-20 05:06:59 +01:00
user = env . get? " user "
2019-04-16 06:23:40 +02:00
sid = env . get? " sid "
2018-11-20 05:06:59 +01:00
referer = get_referer ( env , " /feed/subscriptions " )
redirect = env . params . query [ " redirect " ]?
2019-04-16 06:23:40 +02:00
redirect || = " true "
2018-11-20 05:06:59 +01:00
redirect = redirect == " true "
2019-04-16 06:23:40 +02:00
if ! user
2019-04-18 23:23:50 +02:00
if redirect
next env . redirect referer
else
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " No such user " )
2019-04-18 23:23:50 +02:00
end
2018-11-20 05:06:59 +01:00
end
2018-12-20 22:32:09 +01:00
2019-04-16 06:23:40 +02:00
user = user . as ( User )
sid = sid . as ( String )
2019-04-18 23:23:50 +02:00
token = env . params . body [ " csrf_token " ]?
2018-11-20 05:06:59 +01:00
id = env . params . query [ " id " ]?
if ! id
2019-03-23 16:24:30 +01:00
env . response . status_code = 400
next
2018-11-20 05:06:59 +01:00
end
2019-04-16 06:23:40 +02:00
begin
2019-04-18 23:23:50 +02:00
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
2019-04-16 06:23:40 +02:00
rescue ex
if redirect
2020-11-30 10:59:21 +01:00
next error_template ( 400 , ex )
2019-04-16 06:23:40 +02:00
else
2020-11-30 10:59:21 +01:00
next error_json ( 400 , ex )
2019-04-16 06:23:40 +02:00
end
end
if env . params . query [ " action_mark_watched " ]?
action = " action_mark_watched "
elsif env . params . query [ " action_mark_unwatched " ]?
action = " action_mark_unwatched "
else
next env . redirect referer
end
case action
when " action_mark_watched "
if ! user . watched . includes? id
2020-02-28 17:46:24 +01:00
PG_DB . exec ( " UPDATE users SET watched = array_append(watched, $1) WHERE email = $2 " , id , user . email )
2019-04-16 06:23:40 +02:00
end
when " action_mark_unwatched "
2018-11-22 00:12:13 +01:00
PG_DB . exec ( " UPDATE users SET watched = array_remove(watched, $1) WHERE email = $2 " , id , user . email )
2020-04-09 19:18:09 +02:00
else
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Unsupported action #{ action } " )
2018-11-20 05:06:59 +01:00
end
if redirect
env . redirect referer
else
env . response . content_type = " application/json "
" {} "
end
end
2018-08-05 06:07:38 +02:00
# /modify_notifications
# will "ding" all subscriptions.
2018-08-04 22:30:44 +02:00
# /modify_notifications?receive_all_updates=false&receive_no_updates=false
# will "unding" all subscriptions.
get " /modify_notifications " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env . get? " user "
2019-04-07 19:59:12 +02:00
sid = env . get? " sid "
referer = get_referer ( env , " / " )
2018-07-14 15:36:31 +02:00
2019-04-07 19:59:12 +02:00
redirect = env . params . query [ " redirect " ]?
redirect || = " false "
redirect = redirect == " true "
2018-07-30 04:05:40 +02:00
2019-04-18 23:23:50 +02:00
if ! user
if redirect
next env . redirect referer
else
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " No such user " )
2019-04-18 23:23:50 +02:00
end
2019-04-07 19:59:12 +02:00
end
user = user . as ( User )
if ! user . password
2018-08-04 22:30:44 +02:00
channel_req = { } of String = > String
2018-02-27 01:59:02 +01:00
2018-08-04 22:30:44 +02:00
channel_req [ " receive_all_updates " ] = env . params . query [ " receive_all_updates " ]? || " true "
channel_req [ " receive_no_updates " ] = env . params . query [ " receive_no_updates " ]? || " "
channel_req [ " receive_post_updates " ] = env . params . query [ " receive_post_updates " ]? || " true "
2018-01-07 18:42:24 +01:00
2018-08-04 22:30:44 +02:00
channel_req . reject! { | k , v | v != " true " && v != " false " }
2018-01-07 03:39:24 +01:00
2018-08-04 22:30:44 +02:00
headers = HTTP :: Headers . new
headers [ " Cookie " ] = env . request . headers [ " Cookie " ]
2017-12-30 22:21:43 +01:00
2019-10-25 18:58:16 +02:00
html = YT_POOL . client & . get ( " /subscription_manager?disable_polymer=1 " , headers )
2019-04-07 19:59:12 +02:00
2021-05-24 15:45:50 +02:00
cookies = HTTP :: Cookies . from_client_headers ( headers )
2019-04-07 19:59:12 +02:00
html . cookies . each do | cookie |
if { " VISITOR_INFO1_LIVE " , " YSC " , " SIDCC " } . includes? cookie . name
if cookies [ cookie . name ]?
cookies [ cookie . name ] = cookie
else
cookies << cookie
end
end
end
headers = cookies . add_request_headers ( headers )
2020-06-16 00:33:23 +02:00
if match = html . body . match ( / 'XSRF_TOKEN': "(?<session_token>[^"]+)" / )
2018-08-04 22:30:44 +02:00
session_token = match [ " session_token " ]
else
next env . redirect referer
end
2018-07-18 21:26:02 +02:00
2019-04-07 19:59:12 +02:00
headers [ " content-type " ] = " application/x-www-form-urlencoded "
2018-08-04 22:30:44 +02:00
channel_req [ " session_token " ] = session_token
2018-04-08 04:36:09 +02:00
2019-04-07 19:59:12 +02:00
subs = XML . parse_html ( html . body )
2018-08-04 22:30:44 +02:00
subs . xpath_nodes ( % q ( / / a [ @class = " subscription-title yt-uix-sessionlink " ] / @href ) ) . each do | channel |
channel_id = channel . content . lstrip ( " /channel/ " ) . not_nil!
channel_req [ " channel_id " ] = channel_id
2019-10-25 18:58:16 +02:00
YT_POOL . client & . post ( " /subscription_ajax?action_update_subscription_preferences=1 " , headers , form : channel_req )
2018-08-04 22:30:44 +02:00
end
2018-07-18 21:26:02 +02:00
end
2019-04-07 19:59:12 +02:00
if redirect
env . redirect referer
else
env . response . content_type = " application/json "
" {} "
end
end
2019-04-16 06:23:40 +02:00
post " /subscription_ajax " do | env |
2019-04-07 19:59:12 +02:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env , " / " )
redirect = env . params . query [ " redirect " ]?
2019-04-16 06:23:40 +02:00
redirect || = " true "
2019-04-07 19:59:12 +02:00
redirect = redirect == " true "
2019-04-16 06:23:40 +02:00
if ! user
2019-04-18 23:23:50 +02:00
if redirect
next env . redirect referer
else
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " No such user " )
2019-04-18 23:23:50 +02:00
end
2019-04-07 19:59:12 +02:00
end
user = user . as ( User )
2019-04-16 06:23:40 +02:00
sid = sid . as ( String )
2019-04-18 23:23:50 +02:00
token = env . params . body [ " csrf_token " ]?
2019-04-16 06:23:40 +02:00
begin
2019-04-18 23:23:50 +02:00
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
2019-04-16 06:23:40 +02:00
rescue ex
if redirect
2020-11-30 10:59:21 +01:00
next error_template ( 400 , ex )
2019-04-16 06:23:40 +02:00
else
2020-11-30 10:59:21 +01:00
next error_json ( 400 , ex )
2019-04-16 06:23:40 +02:00
end
end
2019-04-07 19:59:12 +02:00
2019-06-08 02:56:41 +02:00
if env . params . query [ " action_create_subscription_to_channel " ]? . try & . to_i? . try & . == 1
2019-04-07 19:59:12 +02:00
action = " action_create_subscription_to_channel "
2019-06-08 02:56:41 +02:00
elsif env . params . query [ " action_remove_subscriptions " ]? . try & . to_i? . try & . == 1
2019-04-07 19:59:12 +02:00
action = " action_remove_subscriptions "
else
next env . redirect referer
end
channel_id = env . params . query [ " c " ]?
channel_id || = " "
if ! user . password
2019-04-15 01:08:00 +02:00
# Sync subscriptions with YouTube
2019-05-15 19:26:29 +02:00
subscribe_ajax ( channel_id , action , env . request . headers )
2019-04-07 19:59:12 +02:00
end
2019-05-15 19:26:29 +02:00
email = user . email
2019-04-07 19:59:12 +02:00
case action
2019-06-08 02:56:41 +02:00
when " action_create_subscription_to_channel "
2019-04-07 19:59:12 +02:00
if ! user . subscriptions . includes? channel_id
2021-01-04 16:51:06 +01:00
get_channel ( channel_id , PG_DB , false , false )
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2 " , channel_id , email )
2019-04-07 19:59:12 +02:00
end
2019-06-08 02:56:41 +02:00
when " action_remove_subscriptions "
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2 " , channel_id , email )
2020-04-09 19:18:09 +02:00
else
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Unsupported action #{ action } " )
2019-04-07 19:59:12 +02:00
end
if redirect
env . redirect referer
else
env . response . content_type = " application/json "
" {} "
end
2018-08-04 22:30:44 +02:00
end
2018-04-29 16:40:33 +02:00
2018-08-04 22:30:44 +02:00
get " /subscription_manager " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env . get? " user "
2019-02-10 19:33:29 +01:00
sid = env . get? " sid "
2019-04-18 23:23:50 +02:00
referer = get_referer ( env )
2018-08-09 03:26:02 +02:00
2019-04-18 23:23:50 +02:00
if ! user
2018-08-09 03:26:02 +02:00
next env . redirect referer
2018-04-28 16:27:05 +02:00
end
2018-08-04 22:30:44 +02:00
user = user . as ( User )
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if ! user . password
# Refresh account
headers = HTTP :: Headers . new
headers [ " Cookie " ] = env . request . headers [ " Cookie " ]
2018-04-08 04:36:09 +02:00
2021-01-04 16:51:06 +01:00
user , sid = get_user ( sid , headers , PG_DB )
2018-08-04 22:30:44 +02:00
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
action_takeout = env . params . query [ " action_takeout " ]? . try & . to_i?
action_takeout || = 0
action_takeout = action_takeout == 1
2018-07-18 21:26:02 +02:00
2018-08-04 22:30:44 +02:00
format = env . params . query [ " format " ]?
format || = " rss "
2018-07-18 21:26:02 +02:00
2019-04-22 17:40:29 +02:00
if user . subscriptions . empty?
values = " '{}' "
else
values = " VALUES #{ user . subscriptions . map { | id | %( ( ' #{ id } ' ) ) } . join ( " , " ) } "
end
subscriptions = PG_DB . query_all ( " SELECT * FROM channels WHERE id = ANY( #{ values } ) " , as : InvidiousChannel )
2018-08-04 22:30:44 +02:00
subscriptions . sort_by! { | channel | channel . author . downcase }
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if action_takeout
if format == " json "
env . response . content_type = " application/json "
env . response . headers [ " content-disposition " ] = " attachment "
2020-07-25 19:30:28 +02:00
playlists = PG_DB . query_all ( " SELECT * FROM playlists WHERE author = $1 AND id LIKE 'IV%' ORDER BY created " , user . email , as : InvidiousPlaylist )
next JSON . build do | json |
json . object do
json . field " subscriptions " , user . subscriptions
json . field " watch_history " , user . watched
json . field " preferences " , user . preferences
json . field " playlists " do
json . array do
playlists . each do | playlist |
json . object do
json . field " title " , playlist . title
json . field " description " , html_to_content ( playlist . description_html )
json . field " privacy " , playlist . privacy . to_s
json . field " videos " do
json . array do
2020-07-28 23:21:39 +02:00
PG_DB . query_all ( " SELECT id FROM playlist_videos WHERE plid = $1 ORDER BY array_position($2, index) LIMIT 500 " , playlist . id , playlist . index , as : String ) . each do | video_id |
json . string video_id
2020-07-25 19:30:28 +02:00
end
end
end
end
end
end
end
end
end
2018-08-04 22:30:44 +02:00
else
env . response . content_type = " application/xml "
env . response . headers [ " content-disposition " ] = " attachment "
export = XML . build do | xml |
xml . element ( " opml " , version : " 1.1 " ) do
xml . element ( " body " ) do
if format == " newpipe "
title = " YouTube Subscriptions "
else
title = " Invidious Subscriptions "
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
xml . element ( " outline " , text : title , title : title ) do
subscriptions . each do | channel |
if format == " newpipe "
xmlUrl = " https://www.youtube.com/feeds/videos.xml?channel_id= #{ channel . id } "
else
2020-06-16 00:10:30 +02:00
xmlUrl = " #{ HOST_URL } /feed/channel/ #{ channel . id } "
2018-08-04 22:30:44 +02:00
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
xml . element ( " outline " , text : channel . author , title : channel . author ,
" type " : " rss " , xmlUrl : xmlUrl )
end
end
end
2018-07-18 21:26:02 +02:00
end
2018-03-16 17:40:29 +01:00
end
2018-08-04 22:30:44 +02:00
next export . gsub ( %( <?xml version="1.0"?> \n ) , " " )
end
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
templated " subscription_manager "
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
get " /data_control " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env . get? " user "
2018-08-09 03:26:02 +02:00
referer = get_referer ( env )
2018-03-16 17:40:29 +01:00
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
2018-08-04 22:30:44 +02:00
end
2019-07-13 04:00:50 +02:00
user = user . as ( User )
templated " data_control "
2018-08-04 22:30:44 +02:00
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
post " /data_control " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env . get? " user "
2018-08-09 03:26:02 +02:00
referer = get_referer ( env )
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if user
user = user . as ( User )
2018-04-29 16:40:33 +02:00
2020-07-26 16:58:50 +02:00
# TODO: Find a way to prevent browser timeout
2019-04-25 03:18:35 +02:00
2018-08-04 22:30:44 +02:00
HTTP :: FormData . parse ( env . request ) do | part |
body = part . body . gets_to_end
2020-07-25 19:30:28 +02:00
next if body . empty?
2018-04-18 00:54:33 +02:00
2020-04-09 19:18:09 +02:00
# TODO: Unify into single import based on content-type
2018-08-04 22:30:44 +02:00
case part . name
when " import_invidious "
body = JSON . parse ( body )
2018-07-26 17:20:15 +02:00
2018-11-10 00:25:24 +01:00
if body [ " subscriptions " ]?
user . subscriptions += body [ " subscriptions " ] . as_a . map { | a | a . as_s }
user . subscriptions . uniq!
2021-01-04 16:51:06 +01:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-11-10 00:25:24 +01:00
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-08-04 22:30:44 +02:00
end
2018-07-26 17:20:15 +02:00
2018-11-08 23:43:28 +01:00
if body [ " watch_history " ]?
2018-11-10 00:25:24 +01:00
user . watched += body [ " watch_history " ] . as_a . map { | a | a . as_s }
user . watched . uniq!
PG_DB . exec ( " UPDATE users SET watched = $1 WHERE email = $2 " , user . watched , user . email )
2018-07-26 17:20:15 +02:00
end
2018-04-29 16:40:33 +02:00
2018-11-08 23:35:26 +01:00
if body [ " preferences " ]?
2020-07-26 16:58:50 +02:00
user . preferences = Preferences . from_json ( body [ " preferences " ] . to_json )
2018-11-10 00:25:24 +01:00
PG_DB . exec ( " UPDATE users SET preferences = $1 WHERE email = $2 " , user . preferences . to_json , user . email )
2018-11-08 23:35:26 +01:00
end
2020-07-25 19:30:28 +02:00
if playlists = body [ " playlists " ]? . try & . as_a?
playlists . each do | item |
title = item [ " title " ]? . try & . as_s? . try & . delete ( " <> " )
description = item [ " description " ]? . try & . as_s? . try & . delete ( " \ r " )
privacy = item [ " privacy " ]? . try & . as_s? . try { | privacy | PlaylistPrivacy . parse? privacy }
next if ! title
next if ! description
next if ! privacy
playlist = create_playlist ( PG_DB , title , privacy , user )
PG_DB . exec ( " UPDATE playlists SET description = $1 WHERE id = $2 " , description , playlist . id )
2020-07-28 23:21:39 +02:00
videos = item [ " videos " ]? . try & . as_a? . try & . each_with_index do | video_id , idx |
2020-11-30 10:59:21 +01:00
raise InfoException . new ( " Playlist cannot have more than 500 videos " ) if idx > 500
2020-07-28 23:21:39 +02:00
2020-07-25 19:30:28 +02:00
video_id = video_id . try & . as_s?
next if ! video_id
begin
video = get_video ( video_id , PG_DB )
rescue ex
next
end
2020-07-26 16:58:50 +02:00
playlist_video = PlaylistVideo . new ( {
title : video . title ,
id : video . id ,
author : video . author ,
ucid : video . ucid ,
2020-07-25 19:30:28 +02:00
length_seconds : video . length_seconds ,
2020-07-26 16:58:50 +02:00
published : video . published ,
plid : playlist . id ,
live_now : video . live_now ,
index : Random :: Secure . rand ( 0 _i64 .. Int64 :: MAX ) ,
} )
2020-07-25 19:30:28 +02:00
video_array = playlist_video . to_a
args = arg_array ( video_array )
PG_DB . exec ( " INSERT INTO playlist_videos VALUES ( #{ args } ) " , args : video_array )
2020-11-01 05:14:46 +01:00
PG_DB . exec ( " UPDATE playlists SET index = array_append(index, $1), video_count = cardinality(index) + 1, updated = $2 WHERE id = $3 " , playlist_video . index , Time . utc , playlist . id )
2020-07-25 19:30:28 +02:00
end
end
end
2018-08-04 22:30:44 +02:00
when " import_youtube "
2021-02-27 21:59:09 +01:00
if body [ 0 .. 4 ] == " <opml "
2021-02-27 18:58:55 +01:00
subscriptions = XML . parse ( body )
user . subscriptions += subscriptions . xpath_nodes ( % q ( / / outline [ @type = " rss " ] ) ) . map do | channel |
channel [ " xmlUrl " ] . match ( / UC[a-zA-Z0-9_-]{22} / ) . not_nil! [ 0 ]
end
else
subscriptions = JSON . parse ( body )
user . subscriptions += subscriptions . as_a . compact_map do | entry |
entry [ " snippet " ] [ " resourceId " ] [ " channelId " ] . as_s
2021-02-27 19:12:01 +01:00
end
2018-11-10 00:25:24 +01:00
end
user . subscriptions . uniq!
2021-01-04 16:51:06 +01:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-10-07 01:19:47 +02:00
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-11-10 00:25:24 +01:00
when " import_freetube "
user . subscriptions += body . scan ( / "channelId":"(?<channel_id>[a-zA-Z0-9_-]{24})" / ) . map do | md |
md [ " channel_id " ]
end
user . subscriptions . uniq!
2021-01-04 16:51:06 +01:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-11-10 00:25:24 +01:00
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-08-04 22:30:44 +02:00
when " import_newpipe_subscriptions "
body = JSON . parse ( body )
2019-04-22 22:39:57 +02:00
user . subscriptions += body [ " subscriptions " ] . as_a . compact_map do | channel |
if match = channel [ " url " ] . as_s . match ( / \/ channel \/ (?<channel>UC[a-zA-Z0-9_-]{22}) / )
next match [ " channel " ]
elsif match = channel [ " url " ] . as_s . match ( / \/ user \/ (?<user>.+) / )
2019-10-25 18:58:16 +02:00
response = YT_POOL . client & . get ( " /user/ #{ match [ " user " ] } ?disable_polymer=1&hl=en&gl=US " )
2020-01-14 14:21:17 +01:00
html = XML . parse_html ( response . body )
ucid = html . xpath_node ( % q ( / / link [ @rel = " canonical " ] ) ) . try & . [ " href " ] . split ( " / " ) [ - 1 ]
next ucid if ucid
2019-04-22 22:39:57 +02:00
end
nil
2018-11-10 00:25:24 +01:00
end
user . subscriptions . uniq!
2021-01-04 16:51:06 +01:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-11-10 00:25:24 +01:00
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-08-04 22:30:44 +02:00
when " import_newpipe "
2020-06-16 00:57:20 +02:00
Compress :: Zip :: Reader . open ( IO :: Memory . new ( body ) ) do | file |
2018-08-04 22:30:44 +02:00
file . each_entry do | entry |
if entry . filename == " newpipe.db "
2018-11-22 00:12:13 +01:00
tempfile = File . tempfile ( " .db " )
File . write ( tempfile . path , entry . io . gets_to_end )
db = DB . open ( " sqlite3:// " + tempfile . path )
2018-04-29 16:40:33 +02:00
2018-11-22 00:12:13 +01:00
user . watched += db . query_all ( " SELECT url FROM streams " , as : String ) . map { | url | url . lchop ( " https://www.youtube.com/watch?v= " ) }
2018-11-10 00:25:24 +01:00
user . watched . uniq!
2018-07-18 21:26:02 +02:00
2018-11-10 00:25:24 +01:00
PG_DB . exec ( " UPDATE users SET watched = $1 WHERE email = $2 " , user . watched , user . email )
2018-10-07 01:19:47 +02:00
2018-11-22 00:12:13 +01:00
user . subscriptions += db . query_all ( " SELECT url FROM subscriptions " , as : String ) . map { | url | url . lchop ( " https://www.youtube.com/channel/ " ) }
2018-11-10 00:25:24 +01:00
user . subscriptions . uniq!
2021-01-04 16:51:06 +01:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-11-10 00:25:24 +01:00
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-11-22 00:12:13 +01:00
db . close
tempfile . delete
2018-08-04 22:30:44 +02:00
end
2018-07-18 21:26:02 +02:00
end
2018-07-08 15:57:06 +02:00
end
2020-04-09 19:18:09 +02:00
else nil # Ignore
2018-07-18 21:26:02 +02:00
end
2018-08-04 22:30:44 +02:00
end
end
2018-07-18 21:26:02 +02:00
2018-08-04 22:30:44 +02:00
env . redirect referer
end
2018-07-18 21:26:02 +02:00
2019-04-22 17:18:17 +02:00
get " /change_password " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
2019-04-22 17:18:17 +02:00
end
2019-07-13 04:00:50 +02:00
user = user . as ( User )
sid = sid . as ( String )
csrf_token = generate_response ( sid , { " :change_password " } , HMAC_KEY , PG_DB )
templated " change_password "
2019-04-22 17:18:17 +02:00
end
post " /change_password " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
end
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
user = user . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
# We don't store passwords for Google accounts
if ! user . password
2020-11-30 10:59:21 +01:00
next error_template ( 400 , " Cannot change password for Google accounts " )
2019-07-13 04:00:50 +02:00
end
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
2020-11-30 10:59:21 +01:00
next error_template ( 400 , ex )
2019-07-13 04:00:50 +02:00
end
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
password = env . params . body [ " password " ]?
if ! password
2020-11-30 10:59:21 +01:00
next error_template ( 401 , " Password is a required field " )
2019-07-13 04:00:50 +02:00
end
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
new_passwords = env . params . body . select { | k , v | k . match ( / ^new_password \ [ \ d+ \ ]$ / ) } . map { | k , v | v }
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
if new_passwords . size <= 1 || new_passwords . uniq . size != 1
2020-11-30 10:59:21 +01:00
next error_template ( 400 , " New passwords must match " )
2019-07-13 04:00:50 +02:00
end
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
new_password = new_passwords . uniq [ 0 ]
if new_password . empty?
2020-11-30 10:59:21 +01:00
next error_template ( 401 , " Password cannot be empty " )
2019-07-13 04:00:50 +02:00
end
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
if new_password . bytesize > 55
2020-11-30 10:59:21 +01:00
next error_template ( 400 , " Password cannot be longer than 55 characters " )
2019-07-13 04:00:50 +02:00
end
2019-04-22 17:18:17 +02:00
2019-07-13 04:00:50 +02:00
if ! Crypto :: Bcrypt :: Password . new ( user . password . not_nil! ) . verify ( password . byte_slice ( 0 , 55 ) )
2020-11-30 10:59:21 +01:00
next error_template ( 401 , " Incorrect password " )
2019-04-22 17:18:17 +02:00
end
2019-07-13 04:00:50 +02:00
new_password = Crypto :: Bcrypt :: Password . create ( new_password , cost : 10 )
PG_DB . exec ( " UPDATE users SET password = $1 WHERE email = $2 " , new_password . to_s , user . email )
2019-04-22 17:18:17 +02:00
env . redirect referer
end
2018-11-08 07:12:14 +01:00
get " /delete_account " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-11-08 07:12:14 +01:00
user = env . get? " user "
2019-04-16 06:23:40 +02:00
sid = env . get? " sid "
2018-11-08 07:12:14 +01:00
referer = get_referer ( env )
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
2018-11-08 07:12:14 +01:00
end
2019-07-13 04:00:50 +02:00
user = user . as ( User )
sid = sid . as ( String )
csrf_token = generate_response ( sid , { " :delete_account " } , HMAC_KEY , PG_DB )
templated " delete_account "
2018-11-08 07:12:14 +01:00
end
post " /delete_account " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-11-08 07:12:14 +01:00
user = env . get? " user "
2019-04-16 06:23:40 +02:00
sid = env . get? " sid "
2018-11-08 07:12:14 +01:00
referer = get_referer ( env )
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
end
2018-11-08 07:12:14 +01:00
2019-07-13 04:00:50 +02:00
user = user . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
2018-11-08 07:12:14 +01:00
2019-07-13 04:00:50 +02:00
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
2020-11-30 10:59:21 +01:00
next error_template ( 400 , ex )
2019-07-13 04:00:50 +02:00
end
2018-11-08 07:12:14 +01:00
2019-07-13 04:00:50 +02:00
view_name = " subscriptions_ #{ sha256 ( user . email ) } "
PG_DB . exec ( " DELETE FROM users * WHERE email = $1 " , user . email )
PG_DB . exec ( " DELETE FROM session_ids * WHERE email = $1 " , user . email )
PG_DB . exec ( " DROP MATERIALIZED VIEW #{ view_name } " )
env . request . cookies . each do | cookie |
cookie . expires = Time . utc ( 1990 , 1 , 1 )
env . response . cookies << cookie
2018-11-08 07:12:14 +01:00
end
env . redirect referer
end
2018-08-04 22:30:44 +02:00
get " /clear_watch_history " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env . get? " user "
2019-04-16 06:23:40 +02:00
sid = env . get? " sid "
2018-11-08 07:12:14 +01:00
referer = get_referer ( env )
2018-08-09 03:26:02 +02:00
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
2018-11-08 07:12:14 +01:00
end
2019-07-13 04:00:50 +02:00
user = user . as ( User )
sid = sid . as ( String )
csrf_token = generate_response ( sid , { " :clear_watch_history " } , HMAC_KEY , PG_DB )
templated " clear_watch_history "
2018-11-08 07:12:14 +01:00
end
post " /clear_watch_history " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-11-08 07:12:14 +01:00
user = env . get? " user "
2019-04-16 06:23:40 +02:00
sid = env . get? " sid "
2018-08-09 03:26:02 +02:00
referer = get_referer ( env )
2018-03-16 17:40:29 +01:00
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
end
2018-11-08 07:12:14 +01:00
2019-07-13 04:00:50 +02:00
user = user . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
2018-11-08 07:12:14 +01:00
2019-07-13 04:00:50 +02:00
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
2020-11-30 10:59:21 +01:00
next error_template ( 400 , ex )
2018-08-04 22:30:44 +02:00
end
2019-07-13 04:00:50 +02:00
PG_DB . exec ( " UPDATE users SET watched = '{}' WHERE email = $1 " , user . email )
2018-08-04 22:30:44 +02:00
env . redirect referer
end
2019-05-15 19:26:29 +02:00
get " /authorize_token " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
end
2019-05-15 19:26:29 +02:00
2019-07-13 04:00:50 +02:00
user = user . as ( User )
sid = sid . as ( String )
csrf_token = generate_response ( sid , { " :authorize_token " } , HMAC_KEY , PG_DB )
2019-05-15 19:26:29 +02:00
2019-07-13 04:00:50 +02:00
scopes = env . params . query [ " scopes " ]? . try & . split ( " , " )
scopes || = [ ] of String
2019-05-15 19:26:29 +02:00
2019-07-13 04:00:50 +02:00
callback_url = env . params . query [ " callback_url " ]?
if callback_url
callback_url = URI . parse ( callback_url )
2019-05-15 19:26:29 +02:00
end
2019-07-13 04:00:50 +02:00
expire = env . params . query [ " expire " ]? . try & . to_i?
templated " authorize_token "
2019-05-15 19:26:29 +02:00
end
2019-04-18 23:23:50 +02:00
post " /authorize_token " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
2019-07-13 04:00:50 +02:00
if ! user
next env . redirect referer
end
2019-04-18 23:23:50 +02:00
2019-07-13 04:00:50 +02:00
user = env . get ( " user " ) . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
2019-04-18 23:23:50 +02:00
2019-07-13 04:00:50 +02:00
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
2020-11-30 10:59:21 +01:00
next error_template ( 400 , ex )
2019-07-13 04:00:50 +02:00
end
2019-04-18 23:23:50 +02:00
2019-07-13 04:00:50 +02:00
scopes = env . params . body . select { | k , v | k . match ( / ^scopes \ [ \ d+ \ ]$ / ) } . map { | k , v | v }
callback_url = env . params . body [ " callbackUrl " ]?
expire = env . params . body [ " expire " ]? . try & . to_i?
2019-04-18 23:23:50 +02:00
2019-07-13 04:00:50 +02:00
access_token = generate_token ( user . email , scopes , expire , HMAC_KEY , PG_DB )
2019-04-18 23:23:50 +02:00
2019-07-13 04:00:50 +02:00
if callback_url
2019-09-24 19:31:33 +02:00
access_token = URI . encode_www_form ( access_token )
2019-07-13 04:00:50 +02:00
url = URI . parse ( callback_url )
2019-04-18 23:23:50 +02:00
2019-07-13 04:00:50 +02:00
if url . query
query = HTTP :: Params . parse ( url . query . not_nil! )
2019-04-18 23:23:50 +02:00
else
2019-07-13 04:00:50 +02:00
query = HTTP :: Params . new
2019-04-18 23:23:50 +02:00
end
2019-07-13 04:00:50 +02:00
query [ " token " ] = access_token
url . query = query . to_s
env . redirect url . to_s
else
csrf_token = " "
env . set " access_token " , access_token
templated " authorize_token "
2019-04-18 23:23:50 +02:00
end
end
get " /token_manager " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env , " /subscription_manager " )
if ! user
next env . redirect referer
end
user = user . as ( User )
tokens = PG_DB . query_all ( " SELECT id, issued FROM session_ids WHERE email = $1 ORDER BY issued DESC " , user . email , as : { session : String , issued : Time } )
templated " token_manager "
end
post " /token_ajax " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
redirect = env . params . query [ " redirect " ]?
redirect || = " true "
redirect = redirect == " true "
if ! user
if redirect
next env . redirect referer
else
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " No such user " )
2019-04-18 23:23:50 +02:00
end
end
user = user . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
if redirect
2020-11-30 10:59:21 +01:00
next error_template ( 400 , ex )
2019-04-18 23:23:50 +02:00
else
2020-11-30 10:59:21 +01:00
next error_json ( 400 , ex )
2019-04-18 23:23:50 +02:00
end
end
if env . params . query [ " action_revoke_token " ]?
action = " action_revoke_token "
else
next env . redirect referer
end
session = env . params . query [ " session " ]?
session || = " "
case action
when . starts_with? " action_revoke_token "
PG_DB . exec ( " DELETE FROM session_ids * WHERE id = $1 AND email = $2 " , session , user . email )
2020-04-09 19:18:09 +02:00
else
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Unsupported action #{ action } " )
2019-04-18 23:23:50 +02:00
end
if redirect
env . redirect referer
else
env . response . content_type = " application/json "
" {} "
end
end
2018-08-04 22:30:44 +02:00
# Feeds
2020-11-20 02:21:48 +01:00
get " /feed/playlists " do | env |
env . redirect " /view_all_playlists "
end
2018-11-26 17:50:34 +01:00
get " /feed/top " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2020-12-27 06:12:43 +01:00
message = translate ( locale , " The Top feed has been removed from Invidious. " )
templated " message "
2018-11-26 17:50:34 +01:00
end
get " /feed/popular " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2021-01-23 19:39:04 +01:00
if CONFIG . popular_enabled
2020-12-27 06:12:43 +01:00
templated " popular "
else
message = translate ( locale , " The Popular feed has been disabled by the administrator. " )
templated " message "
end
2018-11-26 17:50:34 +01:00
end
2018-11-20 18:18:12 +01:00
get " /feed/trending " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-11-20 18:18:12 +01:00
trending_type = env . params . query [ " type " ]?
2018-12-20 23:48:45 +01:00
trending_type || = " Default "
2018-11-20 18:18:12 +01:00
region = env . params . query [ " region " ]?
2018-12-20 23:48:45 +01:00
region || = " US "
2018-11-20 18:18:12 +01:00
begin
2019-06-29 04:17:56 +02:00
trending , plid = fetch_trending ( trending_type , region , locale )
2018-11-20 18:18:12 +01:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_template ( 500 , ex )
2018-11-20 18:18:12 +01:00
end
templated " trending "
end
2018-08-04 22:30:44 +02:00
get " /feed/subscriptions " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env . get? " user "
2019-02-10 19:33:29 +01:00
sid = env . get? " sid "
2018-08-09 03:26:02 +02:00
referer = get_referer ( env )
2018-08-04 22:30:44 +02:00
2019-06-07 19:39:12 +02:00
if ! user
next env . redirect referer
end
2019-06-07 19:42:07 +02:00
user = user . as ( User )
sid = sid . as ( String )
token = user . token
2018-11-21 20:06:29 +01:00
2019-06-07 19:39:12 +02:00
if user . preferences . unseen_only
2019-06-07 19:42:07 +02:00
env . set " show_watched " , true
end
2018-08-04 22:30:44 +02:00
2019-06-07 19:42:07 +02:00
# Refresh account
headers = HTTP :: Headers . new
headers [ " Cookie " ] = env . request . headers [ " Cookie " ]
2018-08-04 22:30:44 +02:00
2019-06-07 19:42:07 +02:00
if ! user . password
2021-01-04 16:51:06 +01:00
user , sid = get_user ( sid , headers , PG_DB )
2019-06-07 19:42:07 +02:00
end
2018-03-26 05:21:24 +02:00
2019-06-08 23:04:55 +02:00
max_results = env . params . query [ " max_results " ]? . try & . to_i? . try & . clamp ( 0 , MAX_ITEMS_PER_PAGE )
max_results || = user . preferences . max_results
max_results || = CONFIG . default_user_preferences . max_results
2018-08-04 22:30:44 +02:00
2019-06-07 19:42:07 +02:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2018-08-04 22:30:44 +02:00
2019-06-07 19:39:12 +02:00
videos , notifications = get_subscription_feed ( PG_DB , user , max_results , page )
2019-02-18 18:29:57 +01:00
2019-06-07 19:42:07 +02:00
# "updated" here is used for delivering new notifications, so if
# we know a user has looked at their feed e.g. in the past 10 minutes,
# they've already seen a video posted 20 minutes ago, and don't need
# to be notified.
2019-06-08 02:56:41 +02:00
PG_DB . exec ( " UPDATE users SET notifications = $1, updated = $2 WHERE email = $3 " , [ ] of String , Time . utc ,
2019-06-07 19:42:07 +02:00
user . email )
user . notifications = [ ] of String
env . set " user " , user
2018-08-04 22:30:44 +02:00
2019-06-07 19:42:07 +02:00
templated " subscriptions "
end
2018-03-16 17:40:29 +01:00
2018-11-20 05:06:59 +01:00
get " /feed/history " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-11-20 05:06:59 +01:00
user = env . get? " user "
referer = get_referer ( env )
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2019-06-07 19:39:12 +02:00
if ! user
next env . redirect referer
end
2019-06-07 19:42:07 +02:00
user = user . as ( User )
2018-11-10 03:37:46 +01:00
2019-06-08 23:04:55 +02:00
max_results = env . params . query [ " max_results " ]? . try & . to_i? . try & . clamp ( 0 , MAX_ITEMS_PER_PAGE )
max_results || = user . preferences . max_results
max_results || = CONFIG . default_user_preferences . max_results
if user . watched [ ( page - 1 ) * max_results ]?
watched = user . watched . reverse [ ( page - 1 ) * max_results , max_results ]
2018-11-20 05:06:59 +01:00
end
2019-06-08 23:04:55 +02:00
watched || = [ ] of String
2018-11-10 00:25:24 +01:00
2019-06-07 19:42:07 +02:00
templated " history "
end
2018-08-04 22:30:44 +02:00
get " /feed/channel/:ucid " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2019-06-08 04:39:32 +02:00
env . response . content_type = " application/atom+xml "
2019-03-05 19:56:59 +01:00
2018-08-04 22:30:44 +02:00
ucid = env . params . url [ " ucid " ]
2018-04-08 04:36:09 +02:00
2019-09-07 17:45:37 +02:00
params = HTTP :: Params . parse ( env . params . query [ " params " ]? || " " )
2018-09-21 16:40:04 +02:00
begin
2019-06-29 03:48:24 +02:00
channel = get_about_info ( ucid , locale )
2019-09-08 18:08:59 +02:00
rescue ex : ChannelRedirect
next env . redirect env . request . resource . gsub ( ucid , ex . channel_id )
2018-09-21 16:40:04 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_atom ( 500 , ex )
2018-09-05 04:04:40 +02:00
end
2020-04-10 18:49:51 +02:00
response = YT_POOL . client & . get ( " /feeds/videos.xml?channel_id= #{ channel . ucid } " )
rss = XML . parse_html ( response . body )
2019-02-18 23:06:00 +01:00
2020-04-07 20:34:40 +02:00
videos = rss . xpath_nodes ( " //feed/entry " ) . map do | entry |
2019-02-18 23:06:00 +01:00
video_id = entry . xpath_node ( " videoid " ) . not_nil! . content
title = entry . xpath_node ( " title " ) . not_nil! . content
2019-03-08 04:13:54 +01:00
published = Time . parse_rfc3339 ( entry . xpath_node ( " published " ) . not_nil! . content )
updated = Time . parse_rfc3339 ( entry . xpath_node ( " updated " ) . not_nil! . content )
2019-02-18 23:06:00 +01:00
author = entry . xpath_node ( " author/name " ) . not_nil! . content
ucid = entry . xpath_node ( " channelid " ) . not_nil! . content
2019-06-08 22:08:27 +02:00
description_html = entry . xpath_node ( " group/description " ) . not_nil! . to_s
2019-02-18 23:06:00 +01:00
views = entry . xpath_node ( " group/community/statistics " ) . not_nil! . [ " views " ] . to_i64
2020-07-26 16:58:50 +02:00
SearchVideo . new ( {
title : title ,
id : video_id ,
author : author ,
ucid : ucid ,
published : published ,
views : views ,
description_html : description_html ,
length_seconds : 0 ,
live_now : false ,
paid : false ,
premium : false ,
premiere_timestamp : nil ,
} )
2019-02-18 23:06:00 +01:00
end
2018-07-16 18:24:24 +02:00
2019-06-07 19:39:12 +02:00
XML . build ( indent : " " , encoding : " UTF-8 " ) do | xml |
2018-08-04 22:30:44 +02:00
xml . element ( " feed " , " xmlns:yt " : " http://www.youtube.com/xml/schemas/2015 " ,
2018-12-23 19:07:04 +01:00
" xmlns:media " : " http://search.yahoo.com/mrss/ " , xmlns : " http://www.w3.org/2005/Atom " ,
" xml:lang " : " en-US " ) do
2020-06-16 00:10:30 +02:00
xml . element ( " link " , rel : " self " , href : " #{ HOST_URL } #{ env . request . resource } " )
2019-06-29 03:48:24 +02:00
xml . element ( " id " ) { xml . text " yt:channel: #{ channel . ucid } " }
xml . element ( " yt:channelId " ) { xml . text channel . ucid }
2020-12-14 19:25:39 +01:00
xml . element ( " icon " ) { xml . text channel . author_thumbnail }
2019-06-29 03:48:24 +02:00
xml . element ( " title " ) { xml . text channel . author }
2020-06-16 00:10:30 +02:00
xml . element ( " link " , rel : " alternate " , href : " #{ HOST_URL } /channel/ #{ channel . ucid } " )
2018-07-28 16:49:58 +02:00
2018-08-04 22:30:44 +02:00
xml . element ( " author " ) do
2019-06-29 03:48:24 +02:00
xml . element ( " name " ) { xml . text channel . author }
2020-06-16 00:10:30 +02:00
xml . element ( " uri " ) { xml . text " #{ HOST_URL } /channel/ #{ channel . ucid } " }
2018-08-04 22:30:44 +02:00
end
2018-07-29 05:31:02 +02:00
2018-09-05 04:04:40 +02:00
videos . each do | video |
2020-06-16 00:10:30 +02:00
video . to_xml ( channel . auto_generated , params , xml )
2019-06-07 19:42:07 +02:00
end
end
end
end
2018-07-16 18:24:24 +02:00
2018-08-04 22:30:44 +02:00
get " /feed/private " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2019-06-08 04:39:32 +02:00
env . response . content_type = " application/atom+xml "
2019-03-05 19:56:59 +01:00
2018-08-04 22:30:44 +02:00
token = env . params . query [ " token " ]?
2018-07-16 18:24:24 +02:00
2018-08-04 22:30:44 +02:00
if ! token
2019-03-23 16:24:30 +01:00
env . response . status_code = 403
next
2018-08-04 22:30:44 +02:00
end
2018-03-25 05:38:35 +02:00
2018-08-04 22:30:44 +02:00
user = PG_DB . query_one? ( " SELECT * FROM users WHERE token = $1 " , token . strip , as : User )
if ! user
2019-03-23 16:24:30 +01:00
env . response . status_code = 403
next
2018-08-04 22:30:44 +02:00
end
2018-07-17 15:19:45 +02:00
2019-06-08 23:04:55 +02:00
max_results = env . params . query [ " max_results " ]? . try & . to_i? . try & . clamp ( 0 , MAX_ITEMS_PER_PAGE )
max_results || = user . preferences . max_results
max_results || = CONFIG . default_user_preferences . max_results
2018-07-17 15:19:45 +02:00
2018-08-04 22:30:44 +02:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2018-03-25 05:38:35 +02:00
2019-09-07 17:45:37 +02:00
params = HTTP :: Params . parse ( env . params . query [ " params " ]? || " " )
2019-06-07 19:39:12 +02:00
videos , notifications = get_subscription_feed ( PG_DB , user , max_results , page )
2018-07-31 17:44:07 +02:00
2019-06-07 19:39:12 +02:00
XML . build ( indent : " " , encoding : " UTF-8 " ) do | xml |
2018-12-23 19:07:04 +01:00
xml . element ( " feed " , " xmlns:yt " : " http://www.youtube.com/xml/schemas/2015 " ,
" xmlns:media " : " http://search.yahoo.com/mrss/ " , xmlns : " http://www.w3.org/2005/Atom " ,
2018-08-04 22:30:44 +02:00
" xml:lang " : " en-US " ) do
2020-06-16 00:10:30 +02:00
xml . element ( " link " , " type " : " text/html " , rel : " alternate " , href : " #{ HOST_URL } /feed/subscriptions " )
2019-06-07 19:39:12 +02:00
xml . element ( " link " , " type " : " application/atom+xml " , rel : " self " ,
2020-06-16 00:10:30 +02:00
href : " #{ HOST_URL } #{ env . request . resource } " )
2018-12-20 22:32:09 +01:00
xml . element ( " title " ) { xml . text translate ( locale , " Invidious Private Feed for `x` " , user . email ) }
2018-07-18 16:15:58 +02:00
2019-06-08 04:27:37 +02:00
( notifications + videos ) . each do | video |
2020-06-16 00:10:30 +02:00
video . to_xml ( locale , params , xml )
2018-08-04 22:30:44 +02:00
end
2018-04-17 23:27:55 +02:00
end
2018-08-04 22:30:44 +02:00
end
end
2018-03-25 05:38:35 +02:00
2018-09-18 01:13:24 +02:00
get " /feed/playlist/:plid " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2019-06-08 04:39:32 +02:00
env . response . content_type = " application/atom+xml "
2019-03-05 19:56:59 +01:00
2018-09-18 01:13:24 +02:00
plid = env . params . url [ " plid " ]
2019-09-07 17:45:37 +02:00
params = HTTP :: Params . parse ( env . params . query [ " params " ]? || " " )
2018-09-18 01:13:24 +02:00
path = env . request . path
2019-08-06 01:49:13 +02:00
if plid . starts_with? " IV "
if playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
videos = get_playlist_videos ( PG_DB , playlist , offset : 0 , locale : locale )
next XML . build ( indent : " " , encoding : " UTF-8 " ) do | xml |
xml . element ( " feed " , " xmlns:yt " : " http://www.youtube.com/xml/schemas/2015 " ,
" xmlns:media " : " http://search.yahoo.com/mrss/ " , xmlns : " http://www.w3.org/2005/Atom " ,
" xml:lang " : " en-US " ) do
2020-06-16 00:10:30 +02:00
xml . element ( " link " , rel : " self " , href : " #{ HOST_URL } #{ env . request . resource } " )
2019-08-06 01:49:13 +02:00
xml . element ( " id " ) { xml . text " iv:playlist: #{ plid } " }
xml . element ( " iv:playlistId " ) { xml . text plid }
xml . element ( " title " ) { xml . text playlist . title }
2020-06-16 00:10:30 +02:00
xml . element ( " link " , rel : " alternate " , href : " #{ HOST_URL } /playlist?list= #{ plid } " )
2019-08-06 01:49:13 +02:00
xml . element ( " author " ) do
xml . element ( " name " ) { xml . text playlist . author }
end
videos . each do | video |
2020-06-16 00:10:30 +02:00
video . to_xml ( false , xml )
2019-08-06 01:49:13 +02:00
end
end
end
else
env . response . status_code = 404
next
end
end
2019-10-25 18:58:16 +02:00
response = YT_POOL . client & . get ( " /feeds/videos.xml?playlist_id= #{ plid } " )
2018-09-18 01:13:24 +02:00
document = XML . parse ( response . body )
document . xpath_nodes ( % q ( / / * [ @href ] | / / * [ @url ] ) ) . each do | node |
node . attributes . each do | attribute |
case attribute . name
2019-09-07 17:45:37 +02:00
when " url " , " href "
2021-01-31 19:52:32 +01:00
request_target = URI . parse ( node [ attribute . name ] ) . request_target
query_string_opt = request_target . starts_with? ( " /watch?v= " ) ? " & #{ params } " : " "
node [ attribute . name ] = " #{ HOST_URL } #{ request_target } #{ query_string_opt } "
2020-04-09 19:18:09 +02:00
else nil # Skip
2018-09-18 01:13:24 +02:00
end
end
end
document = document . to_xml ( options : XML :: SaveOptions :: NO_DECL )
document . scan ( / <uri>(?<url>[^<]+)< \/ uri> / ) . each do | match |
2021-01-31 19:52:32 +01:00
content = " #{ HOST_URL } #{ URI . parse ( match [ " url " ] ) . request_target } "
2018-09-18 01:13:24 +02:00
document = document . gsub ( match [ 0 ] , " <uri> #{ content } </uri> " )
end
document
end
2019-03-29 21:50:18 +01:00
get " /feeds/videos.xml " do | env |
if ucid = env . params . query [ " channel_id " ]?
env . redirect " /feed/channel/ #{ ucid } "
elsif user = env . params . query [ " user " ]?
env . redirect " /feed/channel/ #{ user } "
elsif plid = env . params . query [ " playlist_id " ]?
env . redirect " /feed/playlist/ #{ plid } "
end
end
2019-03-04 17:46:58 +01:00
# Support push notifications via PubSubHubbub
2019-03-04 02:18:23 +01:00
2019-03-04 03:40:24 +01:00
get " /feed/webhook/:token " do | env |
verify_token = env . params . url [ " token " ]
2019-07-10 18:22:10 +02:00
mode = env . params . query [ " hub.mode " ]?
topic = env . params . query [ " hub.topic " ]?
challenge = env . params . query [ " hub.challenge " ]?
if ! mode || ! topic || ! challenge
env . response . status_code = 400
next
else
mode = mode . not_nil!
topic = topic . not_nil!
challenge = challenge . not_nil!
end
2019-03-04 02:18:23 +01:00
2019-06-08 02:56:41 +02:00
case verify_token
when . starts_with? " v1 "
2019-03-04 14:53:31 +01:00
_ , time , nonce , signature = verify_token . split ( " : " )
data = " #{ time } : #{ nonce } "
2019-06-08 02:56:41 +02:00
when . starts_with? " v2 "
2019-03-04 14:53:31 +01:00
time , signature = verify_token . split ( " : " )
data = " #{ time } "
2019-06-08 02:56:41 +02:00
else
env . response . status_code = 400
next
2019-03-04 14:53:31 +01:00
end
2019-03-04 02:18:23 +01:00
2019-04-04 14:49:53 +02:00
# The hub will sometimes check if we're still subscribed after delivery errors,
# so we reply with a 200 as long as the request hasn't expired
2019-06-08 02:56:41 +02:00
if Time . utc . to_unix - time . to_i > 432000
2019-03-23 16:24:30 +01:00
env . response . status_code = 400
next
2019-03-04 02:18:23 +01:00
end
2019-03-04 14:53:31 +01:00
if OpenSSL :: HMAC . hexdigest ( :sha1 , HMAC_KEY , data ) != signature
2019-03-23 16:24:30 +01:00
env . response . status_code = 400
next
2019-03-04 02:18:23 +01:00
end
2019-06-08 02:56:41 +02:00
if ucid = HTTP :: Params . parse ( URI . parse ( topic ) . query . not_nil! ) [ " channel_id " ]?
PG_DB . exec ( " UPDATE channels SET subscribed = $1 WHERE id = $2 " , Time . utc , ucid )
elsif plid = HTTP :: Params . parse ( URI . parse ( topic ) . query . not_nil! ) [ " playlist_id " ]?
PG_DB . exec ( " UPDATE playlists SET subscribed = $1 WHERE id = $2 " , Time . utc , ucid )
else
env . response . status_code = 400
next
end
2019-03-04 02:18:23 +01:00
2019-03-23 16:24:30 +01:00
env . response . status_code = 200
2019-06-08 02:56:41 +02:00
challenge
2019-03-04 02:18:23 +01:00
end
2019-03-04 03:40:24 +01:00
post " /feed/webhook/:token " do | env |
2019-04-11 00:58:42 +02:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-03-04 18:07:27 +01:00
token = env . params . url [ " token " ]
2019-03-04 02:18:23 +01:00
body = env . request . body . not_nil! . gets_to_end
signature = env . request . headers [ " X-Hub-Signature " ] . lchop ( " sha1= " )
if signature != OpenSSL :: HMAC . hexdigest ( :sha1 , HMAC_KEY , body )
2021-01-04 16:51:06 +01:00
LOGGER . error ( " /feed/webhook/ #{ token } : Invalid signature " )
2019-03-23 16:24:30 +01:00
env . response . status_code = 200
next
2019-03-04 02:18:23 +01:00
end
2019-03-04 18:07:27 +01:00
spawn do
rss = XML . parse_html ( body )
rss . xpath_nodes ( " //feed/entry " ) . each do | entry |
id = entry . xpath_node ( " videoid " ) . not_nil! . content
2019-04-04 14:49:53 +02:00
author = entry . xpath_node ( " author/name " ) . not_nil! . content
2019-03-08 04:49:52 +01:00
published = Time . parse_rfc3339 ( entry . xpath_node ( " published " ) . not_nil! . content )
2019-03-08 04:13:54 +01:00
updated = Time . parse_rfc3339 ( entry . xpath_node ( " updated " ) . not_nil! . content )
2019-03-04 02:18:23 +01:00
2019-06-29 04:17:56 +02:00
video = get_video ( id , PG_DB , force_refresh : true )
2019-04-11 00:58:42 +02:00
# Deliver notifications to `/api/v1/auth/notifications`
payload = {
2019-04-20 19:41:51 +02:00
" topic " = > video . ucid ,
" videoId " = > video . id ,
" published " = > published . to_unix ,
2019-04-11 00:58:42 +02:00
} . to_json
PG_DB . exec ( " NOTIFY notifications, E' #{ payload } ' " )
2020-07-26 16:58:50 +02:00
video = ChannelVideo . new ( {
id : id ,
title : video . title ,
published : published ,
updated : updated ,
ucid : video . ucid ,
author : author ,
length_seconds : video . length_seconds ,
live_now : video . live_now ,
2019-04-11 00:58:42 +02:00
premiere_timestamp : video . premiere_timestamp ,
2020-07-26 16:58:50 +02:00
views : video . views ,
} )
2019-03-04 02:18:23 +01:00
2021-02-27 21:59:09 +01:00
was_insert = PG_DB . query_one ( " INSERT INTO channel_videos VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
ON CONFLICT ( id ) DO UPDATE SET title = $2 , published = $3 ,
updated = $4 , ucid = $5 , author = $6 , length_seconds = $7 ,
2020-09-10 01:03:27 +02:00
live_now = $8 , premiere_timestamp = $9 , views = $10 returning ( xmax = 0 ) as was_insert " , *video.to_tuple, as: Bool)
2021-02-27 21:59:09 +01:00
PG_DB . exec ( " UPDATE users SET notifications = array_append(notifications, $1),
2020-09-10 01:03:27 +02:00
feed_needs_update = true WHERE $2 = ANY ( subscriptions ) " , video.id, video.ucid) if was_insert
2019-03-04 18:07:27 +01:00
end
2019-03-04 02:18:23 +01:00
end
2019-03-04 02:50:23 +01:00
2019-03-23 16:24:30 +01:00
env . response . status_code = 200
next
2019-03-04 02:18:23 +01:00
end
2018-08-04 22:30:44 +02:00
# Channels
2019-04-28 18:47:16 +02:00
{ " /channel/:ucid/live " , " /user/:user/live " , " /c/:user/live " } . each do | route |
get route do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
# Appears to be a bug in routing, having several routes configured
# as `/a/:a`, `/b/:a`, `/c/:a` results in 404
value = env . request . resource . split ( " / " ) [ 2 ]
body = " "
{ " channel " , " user " , " c " } . each do | type |
2019-10-25 18:58:16 +02:00
response = YT_POOL . client & . get ( " / #{ type } / #{ value } /live?disable_polymer=1 " )
2019-04-28 18:47:16 +02:00
if response . status_code == 200
body = response . body
end
end
video_id = body . match ( / 'VIDEO_ID': "(?<id>[a-zA-Z0-9_-]{11})" / ) . try & . [ " id " ]?
if video_id
params = [ ] of String
env . params . query . each do | k , v |
params << " #{ k } = #{ v } "
end
params = params . join ( " & " )
url = " /watch?v= #{ video_id } "
if ! params . empty?
url += " & #{ params } "
end
env . redirect url
else
env . redirect " /channel/ #{ value } "
end
end
end
2018-09-04 16:13:58 +02:00
# YouTube appears to let users set a "brand" URL that
# is different from their username, so we convert that here
get " /c/:user " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-09-04 16:13:58 +02:00
user = env . params . url [ " user " ]
2019-10-25 18:58:16 +02:00
response = YT_POOL . client & . get ( " /c/ #{ user } " )
2020-04-04 22:31:24 +02:00
html = XML . parse_html ( response . body )
2018-09-04 16:13:58 +02:00
2020-04-04 22:31:24 +02:00
ucid = html . xpath_node ( % q ( / / link [ @rel = " canonical " ] ) ) . try & . [ " href " ] . split ( " / " ) [ - 1 ]
next env . redirect " / " if ! ucid
2018-09-04 16:13:58 +02:00
2020-04-04 22:31:24 +02:00
env . redirect " /channel/ #{ ucid } "
2018-09-04 16:13:58 +02:00
end
2019-01-24 06:12:48 +01:00
# Legacy endpoint for /user/:username
get " /profile " do | env |
user = env . params . query [ " user " ]?
if ! user
env . redirect " / "
else
env . redirect " /user/ #{ user } "
end
end
2019-06-08 18:13:00 +02:00
get " /attribution_link " do | env |
if query = env . params . query [ " u " ]?
2021-01-31 19:52:32 +01:00
url = URI . parse ( query ) . request_target
2019-06-08 18:13:00 +02:00
else
url = " / "
end
env . redirect url
end
2019-05-26 20:49:35 +02:00
# Page used by YouTube to provide captioning widget, since we
# don't support it we redirect to '/'
get " /timedtext_video " do | env |
env . redirect " / "
end
2018-08-04 22:30:44 +02:00
get " /user/:user " do | env |
user = env . params . url [ " user " ]
env . redirect " /channel/ #{ user } "
2018-03-25 05:38:35 +02:00
end
2018-09-06 06:12:11 +02:00
get " /user/:user/videos " do | env |
user = env . params . url [ " user " ]
env . redirect " /channel/ #{ user } /videos "
end
2019-07-09 16:31:04 +02:00
get " /user/:user/about " do | env |
user = env . params . url [ " user " ]
env . redirect " /channel/ #{ user } "
end
2019-08-17 03:06:21 +02:00
get " /channel/:ucid/about " do | env |
2019-07-09 16:31:04 +02:00
ucid = env . params . url [ " ucid " ]
env . redirect " /channel/ #{ ucid } "
end
2018-08-04 22:30:44 +02:00
get " /channel/:ucid " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env . get? " user "
if user
user = user . as ( User )
subscriptions = user . subscriptions
end
subscriptions || = [ ] of String
2018-07-28 15:24:53 +02:00
ucid = env . params . url [ " ucid " ]
2018-08-04 22:30:44 +02:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2019-02-24 23:39:44 +01:00
continuation = env . params . query [ " continuation " ]?
2018-11-14 02:04:25 +01:00
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
2018-09-21 16:40:04 +02:00
begin
2019-06-29 03:48:24 +02:00
channel = get_about_info ( ucid , locale )
2019-09-08 18:08:59 +02:00
rescue ex : ChannelRedirect
next env . redirect env . request . resource . gsub ( ucid , ex . channel_id )
2018-09-21 16:40:04 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_template ( 500 , ex )
2018-08-04 22:30:44 +02:00
end
2019-06-29 03:48:24 +02:00
if channel . auto_generated
2019-03-03 17:54:23 +01:00
sort_options = { " last " , " oldest " , " newest " }
sort_by || = " last "
2021-03-24 06:33:45 +01:00
items , continuation = fetch_channel_playlists ( channel . ucid , channel . author , continuation , sort_by )
2019-03-18 00:31:11 +01:00
items . uniq! do | item |
if item . responds_to? ( :title )
item . title
elsif item . responds_to? ( :author )
item . author
end
end
2020-06-16 00:33:23 +02:00
items = items . select ( & . is_a? ( SearchPlaylist ) ) . map ( & . as ( SearchPlaylist ) )
2019-02-25 16:52:44 +01:00
items . each { | item | item . author = " " }
2019-02-24 23:39:44 +01:00
else
2019-03-03 17:54:23 +01:00
sort_options = { " newest " , " oldest " , " popular " }
sort_by || = " newest "
2020-06-16 00:33:23 +02:00
count , items = get_60_videos ( channel . ucid , channel . author , page , channel . auto_generated , sort_by )
items . reject! & . paid
2019-06-29 03:48:24 +02:00
env . set " search " , " channel: #{ channel . ucid } "
2019-02-24 23:39:44 +01:00
end
2018-08-04 22:30:44 +02:00
templated " channel "
end
get " /channel/:ucid/videos " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
ucid = env . params . url [ " ucid " ]
params = env . request . query
if ! params || params . empty?
params = " "
else
params = " ? #{ params } "
2018-08-01 17:44:02 +02:00
end
2018-08-04 22:30:44 +02:00
env . redirect " /channel/ #{ ucid } #{ params } "
end
2018-07-28 15:24:53 +02:00
2019-03-03 17:54:23 +01:00
get " /channel/:ucid/playlists " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-03-03 17:54:23 +01:00
user = env . get? " user "
if user
user = user . as ( User )
subscriptions = user . subscriptions
end
subscriptions || = [ ] of String
ucid = env . params . url [ " ucid " ]
continuation = env . params . query [ " continuation " ]?
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
sort_by || = " last "
begin
2019-06-29 03:48:24 +02:00
channel = get_about_info ( ucid , locale )
2019-09-08 18:08:59 +02:00
rescue ex : ChannelRedirect
next env . redirect env . request . resource . gsub ( ucid , ex . channel_id )
2019-03-03 17:54:23 +01:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_template ( 500 , ex )
2019-03-03 17:54:23 +01:00
end
2019-06-29 03:48:24 +02:00
if channel . auto_generated
next env . redirect " /channel/ #{ channel . ucid } "
2019-03-03 17:54:23 +01:00
end
2021-03-24 06:33:45 +01:00
items , continuation = fetch_channel_playlists ( channel . ucid , channel . author , continuation , sort_by )
2019-08-17 03:06:21 +02:00
items = items . select { | item | item . is_a? ( SearchPlaylist ) } . map { | item | item . as ( SearchPlaylist ) }
2019-03-03 17:54:23 +01:00
items . each { | item | item . author = " " }
2019-07-27 15:51:10 +02:00
env . set " search " , " channel: #{ channel . ucid } "
2019-03-03 17:54:23 +01:00
templated " playlists "
end
2019-07-09 16:31:04 +02:00
get " /channel/:ucid/community " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
if user
user = user . as ( User )
subscriptions = user . subscriptions
end
subscriptions || = [ ] of String
ucid = env . params . url [ " ucid " ]
thin_mode = env . params . query [ " thin_mode " ]? || env . get ( " preferences " ) . as ( Preferences ) . thin_mode
thin_mode = thin_mode == " true "
continuation = env . params . query [ " continuation " ]?
# sort_by = env.params.query["sort_by"]?.try &.downcase
begin
channel = get_about_info ( ucid , locale )
2019-09-08 18:08:59 +02:00
rescue ex : ChannelRedirect
next env . redirect env . request . resource . gsub ( ucid , ex . channel_id )
2019-07-09 16:31:04 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_template ( 500 , ex )
2019-07-09 16:31:04 +02:00
end
if ! channel . tabs . includes? " community "
next env . redirect " /channel/ #{ channel . ucid } "
end
begin
2020-06-16 00:10:30 +02:00
items = JSON . parse ( fetch_channel_community ( ucid , continuation , locale , " json " , thin_mode ) )
2020-11-30 10:59:21 +01:00
rescue ex : InfoException
2019-07-09 16:31:04 +02:00
env . response . status_code = 500
error_message = ex . message
2020-11-30 10:59:21 +01:00
rescue ex
next error_template ( 500 , ex )
2019-07-09 16:31:04 +02:00
end
2019-07-27 15:51:10 +02:00
env . set " search " , " channel: #{ channel . ucid } "
2019-07-09 16:31:04 +02:00
templated " community "
end
2018-08-04 22:30:44 +02:00
# API Endpoints
2018-07-28 15:24:53 +02:00
2019-03-02 02:25:16 +01:00
get " /api/v1/stats " do | env |
2020-11-30 10:59:21 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-03-02 02:25:16 +01:00
env . response . content_type = " application/json "
2021-01-23 19:39:04 +01:00
if ! CONFIG . statistics_enabled
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Statistics are not enabled. " )
2019-03-02 02:25:16 +01:00
end
2020-10-17 14:25:57 +02:00
Invidious :: Jobs :: StatisticsRefreshJob :: STATISTICS . to_json
2019-03-23 20:05:13 +01:00
end
2019-03-02 02:25:16 +01:00
2019-05-21 03:22:01 +02:00
# YouTube provides "storyboards", which are sprites containing x * y
2019-05-02 21:20:19 +02:00
# preview thumbnails for individual scenes in a video.
# See https://support.jwplayer.com/articles/how-to-add-preview-thumbnails
get " /api/v1/storyboards/:id " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
id = env . params . url [ " id " ]
region = env . params . query [ " region " ]?
begin
2019-06-29 04:17:56 +02:00
video = get_video ( id , PG_DB , region : region )
2019-05-02 21:20:19 +02:00
rescue ex : VideoRedirect
2019-09-08 18:08:59 +02:00
env . response . headers [ " Location " ] = env . request . resource . gsub ( id , ex . video_id )
2020-11-30 10:59:21 +01:00
next error_json ( 302 , " Video is unavailable " , { " videoId " = > ex . video_id } )
2019-05-02 21:20:19 +02:00
rescue ex
env . response . status_code = 500
next
end
storyboards = video . storyboards
width = env . params . query [ " width " ]?
height = env . params . query [ " height " ]?
if ! width && ! height
response = JSON . build do | json |
json . object do
json . field " storyboards " do
2020-06-16 00:10:30 +02:00
generate_storyboards ( json , id , storyboards )
2019-05-02 21:20:19 +02:00
end
end
end
next response
end
env . response . content_type = " text/vtt "
storyboard = storyboards . select { | storyboard | width == " #{ storyboard [ :width ] } " || height == " #{ storyboard [ :height ] } " }
if storyboard . empty?
env . response . status_code = 404
next
else
storyboard = storyboard [ 0 ]
end
2019-08-27 15:08:26 +02:00
String . build do | str |
str << <<-END_VTT
WEBVTT
2019-05-02 21:20:19 +02:00
2019-08-27 15:08:26 +02:00
END_VTT
2019-05-02 21:20:19 +02:00
2019-08-27 15:08:26 +02:00
start_time = 0 . milliseconds
end_time = storyboard [ :interval ] . milliseconds
2019-05-02 21:20:19 +02:00
2019-08-27 15:08:26 +02:00
storyboard [ :storyboard_count ] . times do | i |
2020-10-25 09:35:16 +01:00
url = storyboard [ :url ]
authority = / (i \ d?).ytimg.com / . match ( url ) . not_nil! [ 1 ]?
2020-12-21 17:02:36 +01:00
url = url . gsub ( " $M " , i ) . gsub ( %r( https://i \ d?.ytimg.com/sb/ ) , " " )
2020-10-25 09:35:16 +01:00
url = " #{ HOST_URL } /sb/ #{ authority } / #{ url } "
2019-05-02 21:20:19 +02:00
2019-08-27 15:08:26 +02:00
storyboard [ :storyboard_height ] . times do | j |
storyboard [ :storyboard_width ] . times do | k |
str << <<-END_CUE
#{start_time}.000 --> #{end_time}.000
2020-12-21 17:02:36 +01:00
#{url}#xywh=#{storyboard[:width] * k},#{storyboard[:height] * j},#{storyboard[:width] - 2},#{storyboard[:height]}
2019-05-02 21:20:19 +02:00
2019-08-27 15:08:26 +02:00
END_CUE
2019-05-02 21:20:19 +02:00
2019-08-27 15:08:26 +02:00
start_time += storyboard [ :interval ] . milliseconds
end_time += storyboard [ :interval ] . milliseconds
end
2019-05-02 21:20:19 +02:00
end
end
end
end
2018-08-04 22:30:44 +02:00
get " /api/v1/captions/:id " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-09-27 01:44:37 +02:00
env . response . content_type = " application/json "
2018-08-04 22:30:44 +02:00
id = env . params . url [ " id " ]
2018-11-18 00:37:57 +01:00
region = env . params . query [ " region " ]?
2018-07-30 04:01:28 +02:00
2019-05-21 03:22:01 +02:00
# See https://github.com/ytdl-org/youtube-dl/blob/6ab30ff50bf6bd0585927cb73c7421bef184f87a/youtube_dl/extractor/youtube.py#L1354
# It is possible to use `/api/timedtext?type=list&v=#{id}` and
# `/api/timedtext?type=track&v=#{id}&lang=#{lang_code}` directly,
# but this does not provide links for auto-generated captions.
#
# In future this should be investigated as an alternative, since it does not require
# getting video info.
2018-08-04 22:30:44 +02:00
begin
2019-06-29 04:17:56 +02:00
video = get_video ( id , PG_DB , region : region )
2018-10-07 05:22:22 +02:00
rescue ex : VideoRedirect
2019-09-08 18:08:59 +02:00
env . response . headers [ " Location " ] = env . request . resource . gsub ( id , ex . video_id )
2020-11-30 10:59:21 +01:00
next error_json ( 302 , " Video is unavailable " , { " videoId " = > ex . video_id } )
2018-08-04 22:30:44 +02:00
rescue ex
2019-03-23 16:24:30 +01:00
env . response . status_code = 500
next
2018-07-28 15:24:53 +02:00
end
2018-08-05 06:07:38 +02:00
captions = video . captions
2018-07-28 15:24:53 +02:00
2018-08-04 22:30:44 +02:00
label = env . params . query [ " label " ]?
2018-09-30 17:13:07 +02:00
lang = env . params . query [ " lang " ]?
tlang = env . params . query [ " tlang " ]?
if ! label && ! lang
2018-08-04 22:30:44 +02:00
response = JSON . build do | json |
json . object do
json . field " captions " do
json . array do
2018-08-05 06:07:38 +02:00
captions . each do | caption |
2018-08-04 22:30:44 +02:00
json . object do
2021-06-27 16:18:16 +02:00
json . field " label " , caption . name
2018-08-07 01:25:25 +02:00
json . field " languageCode " , caption . languageCode
2021-06-27 16:18:16 +02:00
json . field " url " , " /api/v1/captions/ #{ id } ?label= #{ URI . encode_www_form ( caption . name ) } "
2018-08-04 22:30:44 +02:00
end
end
end
end
2018-07-28 15:24:53 +02:00
end
2018-08-04 22:30:44 +02:00
end
2018-07-28 15:24:53 +02:00
2019-03-23 20:05:13 +01:00
next response
end
2018-07-28 15:24:53 +02:00
2019-05-21 03:22:01 +02:00
env . response . content_type = " text/vtt; charset=UTF-8 "
2018-09-30 17:13:07 +02:00
if lang
caption = captions . select { | caption | caption . languageCode == lang }
2020-01-09 02:27:21 +01:00
else
2021-06-27 16:18:16 +02:00
caption = captions . select { | caption | caption . name == label }
2018-09-30 17:13:07 +02:00
end
2018-08-05 06:07:38 +02:00
if caption . empty?
2019-03-23 16:24:30 +01:00
env . response . status_code = 404
next
2018-08-04 22:30:44 +02:00
else
2018-08-05 06:07:38 +02:00
caption = caption [ 0 ]
2018-08-04 22:30:44 +02:00
end
2018-07-28 15:24:53 +02:00
2021-01-31 19:52:32 +01:00
url = URI . parse ( " #{ caption . baseUrl } &tlang= #{ tlang } " ) . request_target
2018-07-28 15:24:53 +02:00
2019-05-19 03:27:19 +02:00
# Auto-generated captions often have cues that aren't aligned properly with the video,
# as well as some other markup that makes it cumbersome, so we try to fix that here
2021-06-27 16:18:16 +02:00
if caption . name . includes? " auto-generated "
2019-10-25 18:58:16 +02:00
caption_xml = YT_POOL . client & . get ( url ) . body
2019-05-19 03:27:19 +02:00
caption_xml = XML . parse ( caption_xml )
2018-07-28 15:24:53 +02:00
2019-08-27 15:08:26 +02:00
webvtt = String . build do | str |
str << <<-END_VTT
WEBVTT
Kind : captions
Language : #{tlang || caption.languageCode}
2018-07-28 15:24:53 +02:00
2019-08-27 15:08:26 +02:00
END_VTT
2018-08-07 14:36:51 +02:00
2019-08-27 15:08:26 +02:00
caption_nodes = caption_xml . xpath_nodes ( " //transcript/text " )
caption_nodes . each_with_index do | node , i |
start_time = node [ " start " ] . to_f . seconds
duration = node [ " dur " ]? . try & . to_f . seconds
duration || = start_time
2019-05-19 03:27:19 +02:00
2019-08-27 15:08:26 +02:00
if caption_nodes . size > i + 1
end_time = caption_nodes [ i + 1 ] [ " start " ] . to_f . seconds
else
end_time = start_time + duration
end
2018-07-28 15:24:53 +02:00
2019-08-27 15:08:26 +02:00
start_time = " #{ start_time . hours . to_s . rjust ( 2 , '0' ) } : #{ start_time . minutes . to_s . rjust ( 2 , '0' ) } : #{ start_time . seconds . to_s . rjust ( 2 , '0' ) } . #{ start_time . milliseconds . to_s . rjust ( 3 , '0' ) } "
end_time = " #{ end_time . hours . to_s . rjust ( 2 , '0' ) } : #{ end_time . minutes . to_s . rjust ( 2 , '0' ) } : #{ end_time . seconds . to_s . rjust ( 2 , '0' ) } . #{ end_time . milliseconds . to_s . rjust ( 3 , '0' ) } "
2018-08-04 22:30:44 +02:00
2019-08-27 15:08:26 +02:00
text = HTML . unescape ( node . content )
text = text . gsub ( / <font color=" # [a-fA-F0-9]{6}"> / , " " )
text = text . gsub ( / < \/ font> / , " " )
if md = text . match ( / (?<name>.*) : (?<text>.*) / )
text = " <v #{ md [ " name " ] } > #{ md [ " text " ] } </v> "
end
2018-08-04 22:30:44 +02:00
2019-08-27 15:08:26 +02:00
str << <<-END_CUE
#{start_time} --> #{end_time}
#{text}
2018-08-04 22:30:44 +02:00
2019-08-27 15:08:26 +02:00
END_CUE
end
2019-05-19 03:27:19 +02:00
end
else
2019-10-25 18:58:16 +02:00
webvtt = YT_POOL . client & . get ( " #{ url } &format=vtt " ) . body
2018-07-28 15:24:53 +02:00
end
2019-04-11 19:08:43 +02:00
if title = env . params . query [ " title " ]?
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
2019-09-24 19:31:33 +02:00
env . response . headers [ " Content-Disposition " ] = " attachment; filename= \" #{ URI . encode_www_form ( title ) } \" ; filename*=UTF-8'' #{ URI . encode_www_form ( title ) } "
2019-04-11 19:08:43 +02:00
end
2018-08-04 22:30:44 +02:00
webvtt
2018-07-28 15:24:53 +02:00
end
2018-08-04 22:30:44 +02:00
get " /api/v1/comments/:id " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-02-07 00:55:22 +01:00
region = env . params . query [ " region " ]?
2018-12-20 22:32:09 +01:00
2018-09-27 01:44:37 +02:00
env . response . content_type = " application/json "
2018-08-04 22:30:44 +02:00
id = env . params . url [ " id " ]
2018-07-20 18:19:49 +02:00
2018-08-04 22:30:44 +02:00
source = env . params . query [ " source " ]?
source || = " youtube "
2018-07-20 18:19:49 +02:00
2019-03-27 17:31:05 +01:00
thin_mode = env . params . query [ " thin_mode " ]?
thin_mode = thin_mode == " true "
2018-08-04 22:30:44 +02:00
format = env . params . query [ " format " ]?
format || = " json "
2018-07-20 18:19:49 +02:00
2021-03-18 06:23:32 +01:00
action = env . params . query [ " action " ]?
action || = " action_get_comments "
2018-10-31 22:47:53 +01:00
continuation = env . params . query [ " continuation " ]?
2019-04-15 01:08:00 +02:00
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
2018-08-04 22:30:44 +02:00
2018-10-31 22:47:53 +01:00
if source == " youtube "
2019-04-15 01:08:00 +02:00
sort_by || = " top "
2018-10-31 22:47:53 +01:00
begin
2021-03-18 06:23:32 +01:00
comments = fetch_youtube_comments ( id , PG_DB , continuation , format , locale , thin_mode , region , sort_by : sort_by , action : action )
2018-10-31 22:47:53 +01:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2018-07-20 18:19:49 +02:00
end
2018-11-10 16:05:26 +01:00
next comments
2018-08-04 22:30:44 +02:00
elsif source == " reddit "
2019-04-15 01:08:00 +02:00
sort_by || = " confidence "
2018-08-04 22:30:44 +02:00
begin
2019-04-15 01:08:00 +02:00
comments , reddit_thread = fetch_reddit_comments ( id , sort_by : sort_by )
2018-12-20 22:32:09 +01:00
content_html = template_reddit_comments ( comments , locale )
2018-03-31 03:52:10 +02:00
2018-08-04 22:30:44 +02:00
content_html = fill_links ( content_html , " https " , " www.reddit.com " )
2018-09-04 05:15:47 +02:00
content_html = replace_links ( content_html )
2018-08-04 22:30:44 +02:00
rescue ex
2018-09-06 17:19:28 +02:00
comments = nil
2018-08-04 22:30:44 +02:00
reddit_thread = nil
content_html = " "
end
2018-07-16 18:24:24 +02:00
2018-09-06 17:19:28 +02:00
if ! reddit_thread || ! comments
2019-03-23 16:24:30 +01:00
env . response . status_code = 404
next
2018-08-04 22:30:44 +02:00
end
2018-03-31 03:52:10 +02:00
2018-09-06 17:19:28 +02:00
if format == " json "
reddit_thread = JSON . parse ( reddit_thread . to_json ) . as_h
reddit_thread [ " comments " ] = JSON . parse ( comments . to_json )
2019-01-25 17:50:18 +01:00
2019-03-23 20:05:13 +01:00
next reddit_thread . to_json
2018-09-06 17:19:28 +02:00
else
2019-01-25 17:50:18 +01:00
response = {
2018-09-06 17:19:28 +02:00
" title " = > reddit_thread . title ,
2018-09-07 01:18:36 +02:00
" permalink " = > reddit_thread . permalink ,
" contentHtml " = > content_html ,
2019-01-25 17:50:18 +01:00
}
2019-03-23 20:05:13 +01:00
next response . to_json
2018-09-07 01:18:36 +02:00
end
2018-08-04 22:30:44 +02:00
end
2019-03-23 20:05:13 +01:00
end
2018-03-31 03:52:10 +02:00
2018-09-18 03:08:26 +02:00
get " /api/v1/insights/:id " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2020-11-30 10:59:21 +01:00
next error_json ( 410 , " YouTube has removed publicly available analytics. " )
2019-03-23 20:05:13 +01:00
end
2018-09-18 03:08:26 +02:00
2019-04-01 05:07:06 +02:00
get " /api/v1/annotations/:id " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " text/xml "
id = env . params . url [ " id " ]
source = env . params . query [ " source " ]?
source || = " archive "
if ! id . match ( / [a-zA-Z0-9_-]{11} / )
env . response . status_code = 400
next
end
annotations = " "
case source
when " archive "
2019-04-15 18:13:09 +02:00
if CONFIG . cache_annotations && ( cached_annotation = PG_DB . query_one? ( " SELECT * FROM annotations WHERE id = $1 " , id , as : Annotation ) )
annotations = cached_annotation . annotations
else
index = CHARS_SAFE . index ( id [ 0 ] ) . not_nil! . to_s . rjust ( 2 , '0' )
2019-04-01 05:07:06 +02:00
2019-04-15 18:13:09 +02:00
# IA doesn't handle leading hyphens,
# so we use https://archive.org/details/youtubeannotations_64
if index == " 62 "
index = " 64 "
id = id . sub ( / ^- / , 'A' )
end
2019-04-01 05:07:06 +02:00
2019-09-24 19:31:33 +02:00
file = URI . encode_www_form ( " #{ id [ 0 , 3 ] } / #{ id } .xml " )
2019-04-01 05:07:06 +02:00
2020-12-23 06:52:23 +01:00
location = make_client ( ARCHIVE_URL , & . get ( " /download/youtubeannotations_ #{ index } / #{ id [ 0 , 2 ] } .tar/ #{ file } " ) )
2019-04-01 05:07:06 +02:00
2019-04-15 18:13:09 +02:00
if ! location . headers [ " Location " ]?
env . response . status_code = location . status_code
end
2019-04-01 05:07:06 +02:00
2020-12-23 06:52:23 +01:00
response = make_client ( URI . parse ( location . headers [ " Location " ] ) , & . get ( location . headers [ " Location " ] ) )
2019-04-01 05:07:06 +02:00
2019-04-15 18:13:09 +02:00
if response . body . empty?
env . response . status_code = 404
next
end
2019-04-13 15:28:59 +02:00
2019-04-15 18:13:09 +02:00
if response . status_code != 200
env . response . status_code = response . status_code
next
end
2019-04-01 05:07:06 +02:00
2019-04-15 18:13:09 +02:00
annotations = response . body
cache_annotation ( PG_DB , id , annotations )
end
2020-04-09 19:18:09 +02:00
else # "youtube"
2019-10-25 18:58:16 +02:00
response = YT_POOL . client & . get ( " /annotations_invideo?video_id= #{ id } " )
2019-04-01 05:07:06 +02:00
if response . status_code != 200
env . response . status_code = response . status_code
next
end
annotations = response . body
end
2019-11-10 04:05:17 +01:00
etag = sha256 ( annotations ) [ 0 , 16 ]
if env . request . headers [ " If-None-Match " ]? . try & . == etag
env . response . status_code = 304
else
env . response . headers [ " ETag " ] = etag
annotations
end
2019-04-01 05:07:06 +02:00
end
2018-08-04 22:30:44 +02:00
get " /api/v1/videos/:id " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-09-29 06:12:35 +02:00
env . response . content_type = " application/json "
2018-08-04 22:30:44 +02:00
id = env . params . url [ " id " ]
2018-11-18 00:33:30 +01:00
region = env . params . query [ " region " ]?
2018-03-31 03:52:10 +02:00
2018-08-04 22:30:44 +02:00
begin
2019-06-29 04:17:56 +02:00
video = get_video ( id , PG_DB , region : region )
2018-10-07 05:22:22 +02:00
rescue ex : VideoRedirect
2019-09-08 18:08:59 +02:00
env . response . headers [ " Location " ] = env . request . resource . gsub ( id , ex . video_id )
2020-11-30 10:59:21 +01:00
next error_json ( 302 , " Video is unavailable " , { " videoId " = > ex . video_id } )
2018-08-04 22:30:44 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2018-08-04 22:30:44 +02:00
end
2018-03-31 03:52:10 +02:00
2020-06-16 00:10:30 +02:00
video . to_json ( locale )
2019-03-23 20:05:13 +01:00
end
2018-07-30 19:34:57 +02:00
2018-08-04 22:30:44 +02:00
get " /api/v1/trending " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2019-01-25 17:50:18 +01:00
env . response . content_type = " application/json "
2018-11-20 18:18:12 +01:00
region = env . params . query [ " region " ]?
trending_type = env . params . query [ " type " ]?
begin
2019-06-29 04:17:56 +02:00
trending , plid = fetch_trending ( trending_type , region , locale )
2018-11-20 18:18:12 +01:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2018-11-20 18:18:12 +01:00
end
2018-08-04 22:30:44 +02:00
videos = JSON . build do | json |
json . array do
2018-11-20 18:18:12 +01:00
trending . each do | video |
2020-06-16 00:10:30 +02:00
video . to_json ( locale , json )
2018-08-04 22:30:44 +02:00
end
2019-06-23 19:54:46 +02:00
end
end
2018-07-30 19:34:57 +02:00
2019-03-23 20:05:13 +01:00
videos
end
2018-08-04 22:30:44 +02:00
2018-11-26 01:13:11 +01:00
get " /api/v1/popular " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2019-01-25 17:50:18 +01:00
env . response . content_type = " application/json "
2021-01-23 19:39:04 +01:00
if ! CONFIG . popular_enabled
2020-12-27 06:12:43 +01:00
error_message = { " error " = > " Administrator has disabled this endpoint. " } . to_json
env . response . status_code = 400
next error_message
end
2019-04-29 01:14:16 +02:00
JSON . build do | json |
2018-11-26 01:13:11 +01:00
json . array do
popular_videos . each do | video |
2020-06-16 00:10:30 +02:00
video . to_json ( locale , json )
2018-11-26 01:13:11 +01:00
end
end
end
2019-03-23 20:05:13 +01:00
end
2018-11-26 01:13:11 +01:00
2018-08-04 22:30:44 +02:00
get " /api/v1/top " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2019-01-25 17:50:18 +01:00
env . response . content_type = " application/json "
2020-12-27 06:12:43 +01:00
env . response . status_code = 400
{ " error " = > " The Top feed has been removed from Invidious. " } . to_json
2019-03-23 20:05:13 +01:00
end
2018-07-30 19:34:57 +02:00
2018-08-04 22:30:44 +02:00
get " /api/v1/channels/:ucid " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-09-21 16:40:04 +02:00
env . response . content_type = " application/json "
2018-08-10 16:44:19 +02:00
2018-09-21 16:40:04 +02:00
ucid = env . params . url [ " ucid " ]
2018-11-14 02:04:25 +01:00
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
sort_by || = " newest "
2018-09-05 04:04:40 +02:00
2018-09-21 16:40:04 +02:00
begin
2019-06-29 03:48:24 +02:00
channel = get_about_info ( ucid , locale )
2019-09-08 18:08:59 +02:00
rescue ex : ChannelRedirect
env . response . headers [ " Location " ] = env . request . resource . gsub ( ucid , ex . channel_id )
2020-11-30 10:59:21 +01:00
next error_json ( 302 , " Channel is unavailable " , { " authorId " = > ex . channel_id } )
2018-09-21 16:40:04 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2018-09-05 04:04:40 +02:00
end
2018-09-06 15:43:22 +02:00
page = 1
2019-06-29 03:48:24 +02:00
if channel . auto_generated
2019-02-24 23:39:44 +01:00
videos = [ ] of SearchVideo
count = 0
else
begin
2020-06-16 00:10:30 +02:00
count , videos = get_60_videos ( channel . ucid , channel . author , page , channel . auto_generated , sort_by )
2019-02-24 23:39:44 +01:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2019-02-24 23:39:44 +01:00
end
2018-11-09 00:10:14 +01:00
end
2018-08-29 03:29:08 +02:00
2019-06-29 03:48:24 +02:00
JSON . build do | json |
2019-06-08 20:31:41 +02:00
# TODO: Refactor into `to_json` for InvidiousChannel
2018-08-04 22:30:44 +02:00
json . object do
2019-06-29 03:48:24 +02:00
json . field " author " , channel . author
json . field " authorId " , channel . ucid
json . field " authorUrl " , channel . author_url
2018-08-04 22:30:44 +02:00
json . field " authorBanners " do
json . array do
2019-06-30 19:59:38 +02:00
if channel . banner
qualities = {
{ width : 2560 , height : 424 } ,
{ width : 2120 , height : 351 } ,
{ width : 1060 , height : 175 } ,
}
qualities . each do | quality |
json . object do
2019-07-03 01:53:19 +02:00
json . field " url " , channel . banner . not_nil! . gsub ( " =w1060- " , " =w #{ quality [ :width ] } - " )
2019-06-30 19:59:38 +02:00
json . field " width " , quality [ :width ]
json . field " height " , quality [ :height ]
end
2018-08-04 22:30:44 +02:00
end
2019-06-30 19:59:38 +02:00
json . object do
2019-07-03 01:53:19 +02:00
json . field " url " , channel . banner . not_nil! . split ( " =w1060- " ) [ 0 ]
2019-06-30 19:59:38 +02:00
json . field " width " , 512
json . field " height " , 288
end
2018-08-04 22:30:44 +02:00
end
2018-07-18 21:26:02 +02:00
end
end
2018-03-31 16:51:14 +02:00
2018-08-04 22:30:44 +02:00
json . field " authorThumbnails " do
json . array do
2019-03-25 16:00:18 +01:00
qualities = { 32 , 48 , 76 , 100 , 176 , 512 }
2018-07-18 21:26:02 +02:00
2018-08-04 22:30:44 +02:00
qualities . each do | quality |
json . object do
2020-04-10 18:49:51 +02:00
json . field " url " , channel . author_thumbnail . gsub ( / =s \ d+ / , " =s #{ quality } " )
2018-08-04 22:30:44 +02:00
json . field " width " , quality
json . field " height " , quality
end
end
2018-07-18 21:26:02 +02:00
end
2018-03-31 16:51:14 +02:00
end
2019-06-29 03:48:24 +02:00
json . field " subCount " , channel . sub_count
json . field " totalViews " , channel . total_views
json . field " joined " , channel . joined . to_unix
json . field " paid " , channel . paid
2018-03-31 16:51:14 +02:00
2019-06-29 03:48:24 +02:00
json . field " autoGenerated " , channel . auto_generated
json . field " isFamilyFriendly " , channel . is_family_friendly
json . field " description " , html_to_content ( channel . description_html )
json . field " descriptionHtml " , channel . description_html
2018-09-05 02:27:10 +02:00
2019-06-29 03:48:24 +02:00
json . field " allowedRegions " , channel . allowed_regions
2018-07-29 03:40:59 +02:00
2018-08-04 22:30:44 +02:00
json . field " latestVideos " do
json . array do
2018-08-29 03:29:08 +02:00
videos . each do | video |
2020-06-16 00:10:30 +02:00
video . to_json ( locale , json )
2018-08-04 22:30:44 +02:00
end
end
end
2018-11-28 05:07:45 +01:00
json . field " relatedChannels " do
json . array do
2019-06-29 03:48:24 +02:00
channel . related_channels . each do | related_channel |
2018-11-28 05:07:45 +01:00
json . object do
2019-06-29 03:48:24 +02:00
json . field " author " , related_channel . author
json . field " authorId " , related_channel . ucid
json . field " authorUrl " , related_channel . author_url
2018-11-28 05:07:45 +01:00
json . field " authorThumbnails " do
json . array do
2019-03-25 16:00:18 +01:00
qualities = { 32 , 48 , 76 , 100 , 176 , 512 }
2018-11-28 05:07:45 +01:00
qualities . each do | quality |
json . object do
2019-08-01 02:16:09 +02:00
json . field " url " , related_channel . author_thumbnail . gsub ( / = \ d+ / , " =s #{ quality } " )
2018-11-28 05:07:45 +01:00
json . field " width " , quality
json . field " height " , quality
end
end
end
end
end
end
end
end
2018-08-04 22:30:44 +02:00
end
2018-07-29 03:40:59 +02:00
end
2019-03-23 20:05:13 +01:00
end
2018-07-16 15:18:59 +02:00
2019-04-28 18:47:16 +02:00
{ " /api/v1/channels/:ucid/videos " , " /api/v1/channels/videos/:ucid " } . each do | route |
2018-09-18 17:47:22 +02:00
get route do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-09-21 16:40:04 +02:00
env . response . content_type = " application/json "
2018-09-20 16:36:09 +02:00
ucid = env . params . url [ " ucid " ]
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2019-02-16 00:28:54 +01:00
sort_by = env . params . query [ " sort " ]? . try & . downcase
sort_by || = env . params . query [ " sort_by " ]? . try & . downcase
2018-11-14 02:11:16 +01:00
sort_by || = " newest "
2018-07-16 15:18:59 +02:00
2018-09-21 16:40:04 +02:00
begin
2019-06-29 03:48:24 +02:00
channel = get_about_info ( ucid , locale )
2019-09-08 18:08:59 +02:00
rescue ex : ChannelRedirect
env . response . headers [ " Location " ] = env . request . resource . gsub ( ucid , ex . channel_id )
2020-11-30 10:59:21 +01:00
next error_json ( 302 , " Channel is unavailable " , { " authorId " = > ex . channel_id } )
2018-09-21 16:40:04 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2018-09-20 16:36:09 +02:00
end
2018-07-16 15:18:59 +02:00
2018-11-09 00:10:14 +01:00
begin
2020-06-16 00:10:30 +02:00
count , videos = get_60_videos ( channel . ucid , channel . author , page , channel . auto_generated , sort_by )
2018-11-09 00:10:14 +01:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2018-11-09 00:10:14 +01:00
end
2018-07-30 04:01:28 +02:00
2019-06-08 20:31:41 +02:00
JSON . build do | json |
2018-09-20 16:36:09 +02:00
json . array do
videos . each do | video |
2020-06-16 00:10:30 +02:00
video . to_json ( locale , json )
2018-08-04 22:30:44 +02:00
end
end
2019-06-23 19:54:46 +02:00
end
end
end
2018-07-16 15:18:59 +02:00
2019-04-28 18:47:16 +02:00
{ " /api/v1/channels/:ucid/latest " , " /api/v1/channels/latest/:ucid " } . each do | route |
2019-02-20 00:00:06 +01:00
get route do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-02-20 15:49:39 +01:00
2019-02-20 00:00:06 +01:00
env . response . content_type = " application/json "
ucid = env . params . url [ " ucid " ]
begin
videos = get_latest_videos ( ucid )
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2019-02-20 00:00:06 +01:00
end
2019-04-29 01:14:16 +02:00
JSON . build do | json |
2019-02-20 00:00:06 +01:00
json . array do
videos . each do | video |
2020-06-16 00:10:30 +02:00
video . to_json ( locale , json )
2019-02-20 00:00:06 +01:00
end
end
end
2019-06-23 19:54:46 +02:00
end
end
2019-02-20 00:00:06 +01:00
2019-04-28 18:47:16 +02:00
{ " /api/v1/channels/:ucid/playlists " , " /api/v1/channels/playlists/:ucid " } . each do | route |
2019-02-20 00:05:27 +01:00
get route do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-02-20 00:05:27 +01:00
env . response . content_type = " application/json "
ucid = env . params . url [ " ucid " ]
continuation = env . params . query [ " continuation " ]?
2020-06-16 00:10:30 +02:00
sort_by = env . params . query [ " sort " ]? . try & . downcase ||
env . params . query [ " sort_by " ]? . try & . downcase ||
" last "
2019-02-20 00:05:27 +01:00
begin
2019-06-29 03:48:24 +02:00
channel = get_about_info ( ucid , locale )
2019-09-08 18:08:59 +02:00
rescue ex : ChannelRedirect
env . response . headers [ " Location " ] = env . request . resource . gsub ( ucid , ex . channel_id )
2020-11-30 10:59:21 +01:00
next error_json ( 302 , " Channel is unavailable " , { " authorId " = > ex . channel_id } )
2019-02-20 00:05:27 +01:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2019-02-20 00:05:27 +01:00
end
2021-03-24 06:33:45 +01:00
items , continuation = fetch_channel_playlists ( channel . ucid , channel . author , continuation , sort_by )
2019-02-20 00:05:27 +01:00
2019-06-29 03:48:24 +02:00
JSON . build do | json |
2019-02-20 00:05:27 +01:00
json . object do
json . field " playlists " do
json . array do
items . each do | item |
2020-06-16 00:10:30 +02:00
item . to_json ( locale , json ) if item . is_a? ( SearchPlaylist )
2019-02-20 00:05:27 +01:00
end
end
end
json . field " continuation " , continuation
end
end
end
2019-03-23 20:05:13 +01:00
end
2019-02-20 00:05:27 +01:00
2019-07-03 01:53:19 +02:00
{ " /api/v1/channels/:ucid/comments " , " /api/v1/channels/comments/:ucid " } . each do | route |
get route do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
ucid = env . params . url [ " ucid " ]
2019-07-09 16:31:04 +02:00
thin_mode = env . params . query [ " thin_mode " ]?
thin_mode = thin_mode == " true "
2019-07-03 01:53:19 +02:00
2019-07-09 16:31:04 +02:00
format = env . params . query [ " format " ]?
format || = " json "
continuation = env . params . query [ " continuation " ]?
2019-07-03 01:53:19 +02:00
# sort_by = env.params.query["sort_by"]?.try &.downcase
begin
2020-06-16 00:10:30 +02:00
fetch_channel_community ( ucid , continuation , locale , format , thin_mode )
2019-07-03 01:53:19 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2019-07-03 01:53:19 +02:00
end
end
end
2018-09-22 17:49:42 +02:00
get " /api/v1/channels/search/:ucid " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-09-22 17:49:42 +02:00
env . response . content_type = " application/json "
ucid = env . params . url [ " ucid " ]
query = env . params . query [ " q " ]?
query || = " "
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
count , search_results = channel_search ( query , page , ucid )
2019-06-08 20:31:41 +02:00
JSON . build do | json |
2018-09-22 17:49:42 +02:00
json . array do
search_results . each do | item |
2020-06-16 00:10:30 +02:00
item . to_json ( locale , json )
2018-09-22 17:49:42 +02:00
end
end
end
2019-03-23 20:05:13 +01:00
end
2018-09-22 17:49:42 +02:00
2018-08-04 22:30:44 +02:00
get " /api/v1/search " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-02-07 01:21:40 +01:00
region = env . params . query [ " region " ]?
2018-12-20 22:32:09 +01:00
2018-09-22 17:49:42 +02:00
env . response . content_type = " application/json "
2018-08-05 06:07:38 +02:00
query = env . params . query [ " q " ]?
query || = " "
2018-08-03 00:18:57 +02:00
2018-08-04 22:30:44 +02:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2018-08-05 00:12:58 +02:00
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
sort_by || = " relevance "
date = env . params . query [ " date " ]? . try & . downcase
date || = " "
2019-02-26 21:31:37 +01:00
duration = env . params . query [ " duration " ]? . try & . downcase
2018-08-05 00:12:58 +02:00
duration || = " "
features = env . params . query [ " features " ]? . try & . split ( " , " ) . map { | feature | feature . downcase }
features || = [ ] of String
2018-09-20 16:36:09 +02:00
content_type = env . params . query [ " type " ]? . try & . downcase
content_type || = " video "
2018-08-05 00:12:58 +02:00
begin
2021-02-26 03:50:40 +01:00
search_params = produce_search_params ( page , sort_by , date , content_type , duration , features )
2018-08-05 00:12:58 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 400 , ex )
2018-08-05 00:12:58 +02:00
end
2021-02-26 03:50:40 +01:00
count , search_results = search ( query , search_params , region ) . as ( Tuple )
2019-06-08 20:31:41 +02:00
JSON . build do | json |
2018-08-04 22:30:44 +02:00
json . array do
2018-09-20 16:36:09 +02:00
search_results . each do | item |
2020-06-16 00:10:30 +02:00
item . to_json ( locale , json )
2018-08-04 22:30:44 +02:00
end
end
end
2019-03-23 20:05:13 +01:00
end
2018-08-03 00:18:57 +02:00
2019-05-21 14:15:15 +02:00
get " /api/v1/search/suggestions " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
region = env . params . query [ " region " ]?
env . response . content_type = " application/json "
query = env . params . query [ " q " ]?
query || = " "
begin
2020-05-08 16:00:53 +02:00
headers = HTTP :: Headers { " :authority " = > " suggestqueries.google.com " }
response = YT_POOL . client & . get ( " /complete/search?hl=en&gl= #{ region } &client=youtube&ds=yt&q= #{ URI . encode_www_form ( query ) } &callback=suggestCallback " , headers ) . body
2019-11-28 15:20:44 +01:00
2019-05-21 14:15:15 +02:00
body = response [ 35 .. - 2 ]
body = JSON . parse ( body ) . as_a
suggestions = body [ 1 ] . as_a [ 0 .. - 2 ]
JSON . build do | json |
json . object do
json . field " query " , body [ 0 ] . as_s
json . field " suggestions " do
json . array do
suggestions . each do | suggestion |
json . string suggestion [ 0 ] . as_s
end
end
end
end
end
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2019-05-21 14:15:15 +02:00
end
end
2019-08-06 01:49:13 +02:00
{ " /api/v1/playlists/:plid " , " /api/v1/auth/playlists/:plid " } . each do | route |
get route do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-10-08 04:11:33 +02:00
2019-08-06 01:49:13 +02:00
env . response . content_type = " application/json "
plid = env . params . url [ " plid " ]
2018-10-07 05:18:50 +02:00
2019-08-06 01:49:13 +02:00
offset = env . params . query [ " index " ]? . try & . to_i?
offset || = env . params . query [ " page " ]? . try & . to_i? . try { | page | ( page - 1 ) * 100 }
offset || = 0
2018-08-15 17:22:36 +02:00
2019-08-06 01:49:13 +02:00
continuation = env . params . query [ " continuation " ]?
2018-09-28 16:54:45 +02:00
2019-08-06 01:49:13 +02:00
format = env . params . query [ " format " ]?
format || = " json "
2018-08-15 17:22:36 +02:00
2019-08-06 01:49:13 +02:00
if plid . starts_with? " RD "
next env . redirect " /api/v1/mixes/ #{ plid } "
end
2018-08-15 17:22:36 +02:00
2019-08-06 01:49:13 +02:00
begin
playlist = get_playlist ( PG_DB , plid , locale )
2021-01-04 05:35:59 +01:00
rescue ex : InfoException
next error_json ( 404 , ex )
2019-08-06 01:49:13 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-06 01:49:13 +02:00
end
2018-09-25 17:28:40 +02:00
2019-08-06 01:49:13 +02:00
user = env . get? ( " user " ) . try & . as ( User )
2019-10-16 14:21:26 +02:00
if ! playlist || playlist . privacy . private? && playlist . author != user . try & . email
2020-11-30 10:59:21 +01:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-06 01:49:13 +02:00
end
2018-09-25 17:28:40 +02:00
2020-06-16 00:10:30 +02:00
response = playlist . to_json ( offset , locale , continuation : continuation )
2018-08-15 17:22:36 +02:00
2019-08-06 01:49:13 +02:00
if format == " html "
response = JSON . parse ( response )
playlist_html = template_playlist ( response )
2019-10-22 03:40:03 +02:00
index , next_video = response [ " videos " ] . as_a . skip ( 1 ) . select { | video | ! video [ " author " ] . as_s . empty? } [ 0 ]? . try { | v | { v [ " index " ] , v [ " videoId " ] } } || { nil , nil }
2018-08-15 17:22:36 +02:00
2019-08-06 01:49:13 +02:00
response = {
" playlistHtml " = > playlist_html ,
" index " = > index ,
" nextVideo " = > next_video ,
} . to_json
2018-08-15 17:22:36 +02:00
end
2018-10-08 04:11:33 +02:00
2019-08-06 01:49:13 +02:00
response
2018-10-08 04:11:33 +02:00
end
2019-03-23 20:05:13 +01:00
end
2018-08-15 17:22:36 +02:00
2018-09-29 06:12:35 +02:00
get " /api/v1/mixes/:rdid " do | env |
2019-03-11 18:44:25 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 22:32:09 +01:00
2018-09-29 06:12:35 +02:00
env . response . content_type = " application/json "
rdid = env . params . url [ " rdid " ]
continuation = env . params . query [ " continuation " ]?
2019-02-16 00:28:54 +01:00
continuation || = rdid . lchop ( " RD " ) [ 0 , 11 ]
2018-09-29 06:12:35 +02:00
2018-10-08 04:11:33 +02:00
format = env . params . query [ " format " ]?
format || = " json "
2018-09-29 06:12:35 +02:00
begin
2018-12-20 22:32:09 +01:00
mix = fetch_mix ( rdid , continuation , locale : locale )
2018-10-31 15:15:17 +01:00
if ! rdid . ends_with? continuation
mix = fetch_mix ( rdid , mix . videos [ 1 ] . id )
index = mix . videos . index ( mix . videos . select { | video | video . id == continuation } [ 0 ]? )
end
mix . videos = mix . videos [ index .. - 1 ]
2018-09-29 06:12:35 +02:00
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2018-09-29 06:12:35 +02:00
end
response = JSON . build do | json |
json . object do
json . field " title " , mix . title
json . field " mixId " , mix . id
json . field " videos " do
json . array do
mix . videos . each do | video |
json . object do
json . field " title " , video . title
json . field " videoId " , video . id
json . field " author " , video . author
json . field " authorId " , video . ucid
json . field " authorUrl " , " /channel/ #{ video . ucid } "
json . field " videoThumbnails " do
json . array do
2020-06-16 00:10:30 +02:00
generate_thumbnails ( json , video . id )
2018-09-29 06:12:35 +02:00
end
end
json . field " index " , video . index
json . field " lengthSeconds " , video . length_seconds
end
end
end
end
end
end
2018-10-08 04:11:33 +02:00
if format == " html "
response = JSON . parse ( response )
playlist_html = template_mix ( response )
2019-10-22 01:00:56 +02:00
next_video = response [ " videos " ] . as_a . select { | video | ! video [ " author " ] . as_s . empty? } [ 0 ]? . try & . [ " videoId " ]
2018-10-08 04:11:33 +02:00
response = {
" playlistHtml " = > playlist_html ,
" nextVideo " = > next_video ,
} . to_json
end
2019-03-23 16:24:30 +01:00
response
2018-09-29 06:12:35 +02:00
end
2019-06-07 19:39:12 +02:00
# Authenticated endpoints
2019-04-11 00:58:42 +02:00
get " /api/v1/auth/notifications " do | env |
2019-06-02 14:41:53 +02:00
env . response . content_type = " text/event-stream "
2019-04-11 00:58:42 +02:00
topics = env . params . query [ " topics " ]? . try & . split ( " , " ) . uniq . first ( 1000 )
topics || = [ ] of String
2020-06-16 00:10:30 +02:00
create_notification_stream ( env , topics , connection_channel )
2019-05-21 16:01:17 +02:00
end
2019-04-11 00:58:42 +02:00
2019-05-21 16:01:17 +02:00
post " /api/v1/auth/notifications " do | env |
2019-06-02 14:41:53 +02:00
env . response . content_type = " text/event-stream "
2019-05-21 16:01:17 +02:00
topics = env . params . body [ " topics " ]? . try & . split ( " , " ) . uniq . first ( 1000 )
topics || = [ ] of String
2019-04-11 00:58:42 +02:00
2020-06-16 00:10:30 +02:00
create_notification_stream ( env , topics , connection_channel )
2019-04-11 00:58:42 +02:00
end
2019-05-01 04:01:57 +02:00
get " /api/v1/auth/preferences " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
user . preferences . to_json
end
2019-04-18 23:23:50 +02:00
2019-05-01 04:01:57 +02:00
post " /api/v1/auth/preferences " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
begin
2020-07-26 16:58:50 +02:00
preferences = Preferences . from_json ( env . request . body || " {} " )
2019-05-01 04:01:57 +02:00
rescue
preferences = user . preferences
end
PG_DB . exec ( " UPDATE users SET preferences = $1 WHERE email = $2 " , preferences . to_json , user . email )
env . response . status_code = 204
end
2019-04-18 23:23:50 +02:00
2019-06-07 19:39:12 +02:00
get " /api/v1/auth/feed " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-06-08 23:04:55 +02:00
max_results = env . params . query [ " max_results " ]? . try & . to_i?
max_results || = user . preferences . max_results
max_results || = CONFIG . default_user_preferences . max_results
2019-06-07 19:39:12 +02:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
videos , notifications = get_subscription_feed ( PG_DB , user , max_results , page )
JSON . build do | json |
json . object do
json . field " notifications " do
json . array do
notifications . each do | video |
2020-06-16 00:10:30 +02:00
video . to_json ( locale , json )
2019-06-07 19:39:12 +02:00
end
end
end
json . field " videos " do
json . array do
videos . each do | video |
2020-06-16 00:10:30 +02:00
video . to_json ( locale , json )
2019-06-07 19:39:12 +02:00
end
end
end
end
end
end
2019-04-22 17:40:29 +02:00
get " /api/v1/auth/subscriptions " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
2019-04-18 23:23:50 +02:00
2019-04-22 17:40:29 +02:00
if user . subscriptions . empty?
values = " '{}' "
else
values = " VALUES #{ user . subscriptions . map { | id | %( ( ' #{ id } ' ) ) } . join ( " , " ) } "
end
2019-04-18 23:23:50 +02:00
2019-04-22 17:40:29 +02:00
subscriptions = PG_DB . query_all ( " SELECT * FROM channels WHERE id = ANY( #{ values } ) " , as : InvidiousChannel )
JSON . build do | json |
json . array do
subscriptions . each do | subscription |
json . object do
json . field " author " , subscription . author
json . field " authorId " , subscription . id
end
end
end
end
end
post " /api/v1/auth/subscriptions/:ucid " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
ucid = env . params . url [ " ucid " ]
if ! user . subscriptions . includes? ucid
2021-01-04 16:51:06 +01:00
get_channel ( ucid , PG_DB , false , false )
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions,$1) WHERE email = $2 " , ucid , user . email )
2019-04-22 17:40:29 +02:00
end
2019-05-15 19:26:29 +02:00
# For Google accounts, access tokens don't have enough information to
# make a request on the user's behalf, which is why we don't sync with
# YouTube.
2019-04-22 17:40:29 +02:00
env . response . status_code = 204
end
delete " /api/v1/auth/subscriptions/:ucid " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
ucid = env . params . url [ " ucid " ]
2019-06-01 17:19:18 +02:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2 " , ucid , user . email )
2019-04-22 17:40:29 +02:00
env . response . status_code = 204
end
2019-04-18 23:23:50 +02:00
2019-08-06 01:49:13 +02:00
get " /api/v1/auth/playlists " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
playlists = PG_DB . query_all ( " SELECT * FROM playlists WHERE author = $1 " , user . email , as : InvidiousPlaylist )
JSON . build do | json |
json . array do
playlists . each do | playlist |
2020-06-16 00:10:30 +02:00
playlist . to_json ( 0 , locale , json )
2019-08-06 01:49:13 +02:00
end
end
end
end
post " /api/v1/auth/playlists " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
title = env . params . json [ " title " ]? . try & . as ( String ) . delete ( " <> " ) . byte_slice ( 0 , 150 )
if ! title
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Invalid title. " )
2019-08-06 01:49:13 +02:00
end
privacy = env . params . json [ " privacy " ]? . try { | privacy | PlaylistPrivacy . parse ( privacy . as ( String ) . downcase ) }
if ! privacy
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Invalid privacy setting. " )
2019-08-06 01:49:13 +02:00
end
if PG_DB . query_one ( " SELECT count(*) FROM playlists WHERE author = $1 " , user . email , as : Int64 ) >= 100
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " User cannot have more than 100 playlists. " )
2019-08-06 01:49:13 +02:00
end
playlist = create_playlist ( PG_DB , title , privacy , user )
2020-06-16 00:10:30 +02:00
env . response . headers [ " Location " ] = " #{ HOST_URL } /api/v1/auth/playlists/ #{ playlist . id } "
2019-08-06 01:49:13 +02:00
env . response . status_code = 201
{
" title " = > title ,
" playlistId " = > playlist . id ,
} . to_json
end
patch " /api/v1/auth/playlists/:plid " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
plid = env . params . url [ " plid " ]
playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
2019-10-16 14:21:26 +02:00
if ! playlist || playlist . author != user . email && playlist . privacy . private?
2020-11-30 10:59:21 +01:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-06 01:49:13 +02:00
end
if playlist . author != user . email
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " Invalid user " )
2019-08-06 01:49:13 +02:00
end
title = env . params . json [ " title " ] . try & . as ( String ) . delete ( " <> " ) . byte_slice ( 0 , 150 ) || playlist . title
privacy = env . params . json [ " privacy " ]? . try { | privacy | PlaylistPrivacy . parse ( privacy . as ( String ) . downcase ) } || playlist . privacy
description = env . params . json [ " description " ]? . try & . as ( String ) . delete ( " \ r " ) || playlist . description
if title != playlist . title ||
privacy != playlist . privacy ||
description != playlist . description
updated = Time . utc
else
updated = playlist . updated
end
PG_DB . exec ( " UPDATE playlists SET title = $1, privacy = $2, description = $3, updated = $4 WHERE id = $5 " , title , privacy , description , updated , plid )
env . response . status_code = 204
end
delete " /api/v1/auth/playlists/:plid " do | env |
2020-11-30 10:59:21 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-08-06 01:49:13 +02:00
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
plid = env . params . url [ " plid " ]
playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
2019-10-16 14:21:26 +02:00
if ! playlist || playlist . author != user . email && playlist . privacy . private?
2020-11-30 10:59:21 +01:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-06 01:49:13 +02:00
end
if playlist . author != user . email
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " Invalid user " )
2019-08-06 01:49:13 +02:00
end
PG_DB . exec ( " DELETE FROM playlist_videos * WHERE plid = $1 " , plid )
PG_DB . exec ( " DELETE FROM playlists * WHERE id = $1 " , plid )
env . response . status_code = 204
end
post " /api/v1/auth/playlists/:plid/videos " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
plid = env . params . url [ " plid " ]
playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
2019-10-16 14:21:26 +02:00
if ! playlist || playlist . author != user . email && playlist . privacy . private?
2020-11-30 10:59:21 +01:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-06 01:49:13 +02:00
end
if playlist . author != user . email
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " Invalid user " )
2019-08-06 01:49:13 +02:00
end
if playlist . index . size >= 500
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Playlist cannot have more than 500 videos " )
2019-08-06 01:49:13 +02:00
end
video_id = env . params . json [ " videoId " ] . try & . as ( String )
if ! video_id
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " Invalid videoId " )
2019-08-06 01:49:13 +02:00
end
begin
video = get_video ( video_id , PG_DB )
rescue ex
2020-11-30 10:59:21 +01:00
next error_json ( 500 , ex )
2019-08-06 01:49:13 +02:00
end
2020-07-26 16:58:50 +02:00
playlist_video = PlaylistVideo . new ( {
title : video . title ,
id : video . id ,
author : video . author ,
ucid : video . ucid ,
2019-08-06 01:49:13 +02:00
length_seconds : video . length_seconds ,
2020-07-26 16:58:50 +02:00
published : video . published ,
plid : plid ,
live_now : video . live_now ,
index : Random :: Secure . rand ( 0 _i64 .. Int64 :: MAX ) ,
} )
2019-08-06 01:49:13 +02:00
video_array = playlist_video . to_a
args = arg_array ( video_array )
PG_DB . exec ( " INSERT INTO playlist_videos VALUES ( #{ args } ) " , args : video_array )
2020-11-01 05:14:46 +01:00
PG_DB . exec ( " UPDATE playlists SET index = array_append(index, $1), video_count = cardinality(index) + 1, updated = $2 WHERE id = $3 " , playlist_video . index , Time . utc , plid )
2019-08-06 01:49:13 +02:00
2020-06-16 00:10:30 +02:00
env . response . headers [ " Location " ] = " #{ HOST_URL } /api/v1/auth/playlists/ #{ plid } /videos/ #{ playlist_video . index . to_u64 . to_s ( 16 ) . upcase } "
2019-08-06 01:49:13 +02:00
env . response . status_code = 201
2020-06-16 00:10:30 +02:00
playlist_video . to_json ( locale , index : playlist . index . size )
2019-08-06 01:49:13 +02:00
end
delete " /api/v1/auth/playlists/:plid/videos/:index " do | env |
2020-11-30 10:59:21 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-08-06 01:49:13 +02:00
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
plid = env . params . url [ " plid " ]
index = env . params . url [ " index " ] . to_i64 ( 16 )
playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
2019-10-16 14:21:26 +02:00
if ! playlist || playlist . author != user . email && playlist . privacy . private?
2020-11-30 10:59:21 +01:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-06 01:49:13 +02:00
end
if playlist . author != user . email
2020-11-30 10:59:21 +01:00
next error_json ( 403 , " Invalid user " )
2019-08-06 01:49:13 +02:00
end
if ! playlist . index . includes? index
2020-11-30 10:59:21 +01:00
next error_json ( 404 , " Playlist does not contain index " )
2019-08-06 01:49:13 +02:00
end
PG_DB . exec ( " DELETE FROM playlist_videos * WHERE index = $1 " , index )
2020-11-01 05:14:46 +01:00
PG_DB . exec ( " UPDATE playlists SET index = array_remove(index, $1), video_count = cardinality(index) - 1, updated = $2 WHERE id = $3 " , index , Time . utc , plid )
2019-08-06 01:49:13 +02:00
env . response . status_code = 204
end
# patch "/api/v1/auth/playlists/:plid/videos/:index" do |env|
# TODO: Playlist stub
# end
2019-04-18 23:23:50 +02:00
get " /api/v1/auth/tokens " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
scopes = env . get ( " scopes " ) . as ( Array ( String ) )
tokens = PG_DB . query_all ( " SELECT id, issued FROM session_ids WHERE email = $1 " , user . email , as : { session : String , issued : Time } )
JSON . build do | json |
json . array do
tokens . each do | token |
json . object do
json . field " session " , token [ :session ]
json . field " issued " , token [ :issued ] . to_unix
end
end
end
end
end
post " /api/v1/auth/tokens/register " do | env |
user = env . get ( " user " ) . as ( User )
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
case env . request . headers [ " Content-Type " ]?
when " application/x-www-form-urlencoded "
scopes = env . params . body . select { | k , v | k . match ( / ^scopes \ [ \ d+ \ ]$ / ) } . map { | k , v | v }
callback_url = env . params . body [ " callbackUrl " ]?
expire = env . params . body [ " expire " ]? . try & . to_i?
when " application/json "
scopes = env . params . json [ " scopes " ] . as ( Array ) . map { | v | v . as_s }
callback_url = env . params . json [ " callbackUrl " ]? . try & . as ( String )
expire = env . params . json [ " expire " ]? . try & . as ( Int64 )
else
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Invalid or missing header 'Content-Type' " )
2019-04-18 23:23:50 +02:00
end
if callback_url && callback_url . empty?
callback_url = nil
end
if callback_url
callback_url = URI . parse ( callback_url )
end
if sid = env . get? ( " sid " ) . try & . as ( String )
env . response . content_type = " text/html "
csrf_token = generate_response ( sid , { " :authorize_token " } , HMAC_KEY , PG_DB , use_nonce : true )
next templated " authorize_token "
else
env . response . content_type = " application/json "
superset_scopes = env . get ( " scopes " ) . as ( Array ( String ) )
authorized_scopes = [ ] of String
scopes . each do | scope |
if scopes_include_scope ( superset_scopes , scope )
authorized_scopes << scope
end
end
access_token = generate_token ( user . email , authorized_scopes , expire , HMAC_KEY , PG_DB )
if callback_url
2019-09-24 19:31:33 +02:00
access_token = URI . encode_www_form ( access_token )
2019-04-18 23:23:50 +02:00
if query = callback_url . query
query = HTTP :: Params . parse ( query . not_nil! )
else
query = HTTP :: Params . new
end
query [ " token " ] = access_token
callback_url . query = query . to_s
env . redirect callback_url . to_s
else
access_token
end
end
end
post " /api/v1/auth/tokens/unregister " do | env |
2020-11-30 10:59:21 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-04-18 23:23:50 +02:00
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
scopes = env . get ( " scopes " ) . as ( Array ( String ) )
session = env . params . json [ " session " ]? . try & . as ( String )
session || = env . get ( " session " ) . as ( String )
# Allow tokens to revoke other tokens with correct scope
if session == env . get ( " session " ) . as ( String )
PG_DB . exec ( " DELETE FROM session_ids * WHERE id = $1 " , session )
elsif scopes_include_scope ( scopes , " GET:tokens " )
PG_DB . exec ( " DELETE FROM session_ids * WHERE id = $1 " , session )
else
2020-11-30 10:59:21 +01:00
next error_json ( 400 , " Cannot revoke session #{ session } " )
2019-04-18 23:23:50 +02:00
end
env . response . status_code = 204
end
2018-08-07 20:10:52 +02:00
get " /api/manifest/dash/id/videoplayback " do | env |
2019-04-12 18:08:33 +02:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 20:23:27 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-07 20:10:52 +02:00
env . redirect " /videoplayback? #{ env . params . query } "
end
get " /api/manifest/dash/id/videoplayback/* " do | env |
2019-04-12 18:08:33 +02:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 20:23:27 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-07 20:10:52 +02:00
env . redirect env . request . path . lchop ( " /api/manifest/dash/id " )
end
2018-07-16 15:18:59 +02:00
get " /api/manifest/dash/id/:id " do | env |
env . response . headers . add ( " Access-Control-Allow-Origin " , " * " )
env . response . content_type = " application/dash+xml "
local = env . params . query [ " local " ]? . try & . == " true "
id = env . params . url [ " id " ]
2018-11-18 00:37:57 +01:00
region = env . params . query [ " region " ]?
2018-07-16 15:18:59 +02:00
2019-06-05 03:54:38 +02:00
# Since some implementations create playlists based on resolution regardless of different codecs,
2019-06-07 04:32:39 +02:00
# we can opt to only add a source to a representation if it has a unique height within that representation
2020-01-09 02:27:21 +01:00
unique_res = env . params . query [ " unique_res " ]? . try { | q | ( q == " true " || q == " 1 " ) . to_unsafe }
2019-06-05 03:54:38 +02:00
2018-07-16 15:18:59 +02:00
begin
2019-06-29 04:17:56 +02:00
video = get_video ( id , PG_DB , region : region )
2018-10-07 05:22:22 +02:00
rescue ex : VideoRedirect
2019-09-08 18:08:59 +02:00
next env . redirect env . request . resource . gsub ( id , ex . video_id )
2018-07-16 15:18:59 +02:00
rescue ex
2019-03-23 16:24:30 +01:00
env . response . status_code = 403
next
2018-07-16 15:18:59 +02:00
end
2020-06-16 00:33:23 +02:00
if dashmpd = video . dash_manifest_url
2021-01-31 19:52:32 +01:00
manifest = YT_POOL . client & . get ( URI . parse ( dashmpd ) . request_target ) . body
2018-07-16 15:18:59 +02:00
manifest = manifest . gsub ( / <BaseURL>[^<]+< \/ BaseURL> / ) do | baseurl |
url = baseurl . lchop ( " <BaseURL> " )
url = url . rchop ( " </BaseURL> " )
if local
2021-01-11 00:00:45 +01:00
uri = URI . parse ( url )
2021-01-31 19:52:32 +01:00
url = " #{ uri . request_target } host/ #{ uri . host } / "
2018-07-16 15:18:59 +02:00
end
" <BaseURL> #{ url } </BaseURL> "
end
next manifest
end
2020-06-16 00:33:23 +02:00
adaptive_fmts = video . adaptive_fmts
2018-07-16 15:18:59 +02:00
if local
adaptive_fmts . each do | fmt |
2021-01-31 19:52:32 +01:00
fmt [ " url " ] = JSON :: Any . new ( URI . parse ( fmt [ " url " ] . as_s ) . request_target )
2018-07-16 15:18:59 +02:00
end
end
2020-06-16 00:33:23 +02:00
audio_streams = video . audio_streams
video_streams = video . video_streams . sort_by { | stream | { stream [ " width " ] . as_i , stream [ " fps " ] . as_i } } . reverse
2018-08-05 06:07:38 +02:00
2019-04-12 18:08:33 +02:00
XML . build ( indent : " " , encoding : " UTF-8 " ) do | xml |
2018-08-11 22:01:22 +02:00
xml . element ( " MPD " , " xmlns " : " urn:mpeg:dash:schema:mpd:2011 " ,
2019-06-05 03:54:38 +02:00
" profiles " : " urn:mpeg:dash:profile:full:2011 " , minBufferTime : " PT1.5S " , type : " static " ,
2019-07-30 02:41:45 +02:00
mediaPresentationDuration : " PT #{ video . length_seconds } S " ) do
2018-07-16 15:18:59 +02:00
xml . element ( " Period " ) do
2019-04-12 18:19:54 +02:00
i = 0
2019-04-12 15:04:59 +02:00
{ " audio/mp4 " , " audio/webm " } . each do | mime_type |
2020-06-16 00:33:23 +02:00
mime_streams = audio_streams . select { | stream | stream [ " mimeType " ] . as_s . starts_with? mime_type }
next if mime_streams . empty?
2019-07-09 17:08:00 +02:00
2019-04-12 18:19:54 +02:00
xml . element ( " AdaptationSet " , id : i , mimeType : mime_type , startWithSAP : 1 , subsegmentAlignment : true ) do
2019-07-09 17:08:00 +02:00
mime_streams . each do | fmt |
2020-06-16 00:33:23 +02:00
codecs = fmt [ " mimeType " ] . as_s . split ( " codecs= " ) [ 1 ] . strip ( '"' )
bandwidth = fmt [ " bitrate " ] . as_i
itag = fmt [ " itag " ] . as_i
url = fmt [ " url " ] . as_s
2019-04-12 15:04:59 +02:00
xml . element ( " Representation " , id : fmt [ " itag " ] , codecs : codecs , bandwidth : bandwidth ) do
xml . element ( " AudioChannelConfiguration " , schemeIdUri : " urn:mpeg:dash:23003:3:audio_channel_configuration:2011 " ,
value : " 2 " )
xml . element ( " BaseURL " ) { xml . text url }
2020-06-16 00:33:23 +02:00
xml . element ( " SegmentBase " , indexRange : " #{ fmt [ " indexRange " ] [ " start " ] } - #{ fmt [ " indexRange " ] [ " end " ] } " ) do
xml . element ( " Initialization " , range : " #{ fmt [ " initRange " ] [ " start " ] } - #{ fmt [ " initRange " ] [ " end " ] } " )
2019-04-12 15:04:59 +02:00
end
2018-07-16 15:18:59 +02:00
end
end
end
2019-04-12 18:19:54 +02:00
i += 1
2018-07-16 15:18:59 +02:00
end
2020-06-16 00:33:23 +02:00
potential_heights = { 4320 , 2160 , 1440 , 1080 , 720 , 480 , 360 , 240 , 144 }
2019-04-12 15:04:59 +02:00
{ " video/mp4 " , " video/webm " } . each do | mime_type |
2020-06-16 00:33:23 +02:00
mime_streams = video_streams . select { | stream | stream [ " mimeType " ] . as_s . starts_with? mime_type }
2020-01-09 02:27:21 +01:00
next if mime_streams . empty?
2019-07-09 17:08:00 +02:00
2019-06-07 04:32:39 +02:00
heights = [ ] of Int32
2019-04-12 18:19:54 +02:00
xml . element ( " AdaptationSet " , id : i , mimeType : mime_type , startWithSAP : 1 , subsegmentAlignment : true , scanType : " progressive " ) do
2019-07-09 17:08:00 +02:00
mime_streams . each do | fmt |
2020-06-16 00:33:23 +02:00
codecs = fmt [ " mimeType " ] . as_s . split ( " codecs= " ) [ 1 ] . strip ( '"' )
bandwidth = fmt [ " bitrate " ] . as_i
itag = fmt [ " itag " ] . as_i
url = fmt [ " url " ] . as_s
width = fmt [ " width " ] . as_i
height = fmt [ " height " ] . as_i
2019-05-30 22:39:02 +02:00
# Resolutions reported by YouTube player (may not accurately reflect source)
2020-06-16 00:33:23 +02:00
height = potential_heights . min_by { | i | ( height - i ) . abs }
2019-06-05 03:54:38 +02:00
next if unique_res && heights . includes? height
heights << height
2019-04-12 15:04:59 +02:00
xml . element ( " Representation " , id : itag , codecs : codecs , width : width , height : height ,
startWithSAP : " 1 " , maxPlayoutRate : " 1 " ,
bandwidth : bandwidth , frameRate : fmt [ " fps " ] ) do
xml . element ( " BaseURL " ) { xml . text url }
2020-06-16 00:33:23 +02:00
xml . element ( " SegmentBase " , indexRange : " #{ fmt [ " indexRange " ] [ " start " ] } - #{ fmt [ " indexRange " ] [ " end " ] } " ) do
xml . element ( " Initialization " , range : " #{ fmt [ " initRange " ] [ " start " ] } - #{ fmt [ " initRange " ] [ " end " ] } " )
2019-04-12 15:04:59 +02:00
end
2018-07-16 15:18:59 +02:00
end
end
end
2019-04-12 18:19:54 +02:00
i += 1
2018-07-16 15:18:59 +02:00
end
end
end
end
end
2018-07-28 01:25:58 +02:00
get " /api/manifest/hls_variant/* " do | env |
2020-06-16 00:33:23 +02:00
response = YT_POOL . client & . get ( env . request . path )
2018-07-28 01:25:58 +02:00
2020-06-16 00:33:23 +02:00
if response . status_code != 200
env . response . status_code = response . status_code
2019-03-23 16:24:30 +01:00
next
2018-07-28 01:25:58 +02:00
end
2019-04-25 19:41:35 +02:00
local = env . params . query [ " local " ]? . try & . == " true "
2018-07-28 01:25:58 +02:00
env . response . content_type = " application/x-mpegURL "
env . response . headers . add ( " Access-Control-Allow-Origin " , " * " )
2018-08-05 06:07:38 +02:00
2020-06-16 00:10:30 +02:00
manifest = response . body
2019-04-25 19:41:35 +02:00
if local
2020-06-16 00:10:30 +02:00
manifest = manifest . gsub ( " https://www.youtube.com " , HOST_URL )
2019-04-25 19:41:35 +02:00
manifest = manifest . gsub ( " index.m3u8 " , " index.m3u8?local=true " )
end
manifest
2018-07-28 01:25:58 +02:00
end
get " /api/manifest/hls_playlist/* " do | env |
2020-06-16 00:33:23 +02:00
response = YT_POOL . client & . get ( env . request . path )
2018-07-28 01:25:58 +02:00
2020-06-16 00:33:23 +02:00
if response . status_code != 200
env . response . status_code = response . status_code
2019-03-23 16:24:30 +01:00
next
2018-07-28 01:25:58 +02:00
end
2019-04-25 19:41:35 +02:00
local = env . params . query [ " local " ]? . try & . == " true "
env . response . content_type = " application/x-mpegURL "
env . response . headers . add ( " Access-Control-Allow-Origin " , " * " )
2020-06-16 00:10:30 +02:00
manifest = response . body
2019-04-25 19:41:35 +02:00
if local
2019-07-05 19:08:39 +02:00
manifest = manifest . gsub ( / ^https: \/ \/ r \ d---.{11} \ .c \ .youtube \ .com[^ \ n]* /m ) do | match |
path = URI . parse ( match ) . path
path = path . lchop ( " /videoplayback/ " )
path = path . rchop ( " / " )
path = path . gsub ( / mime \/ \ w+ \/ \ w+ / ) do | mimetype |
mimetype = mimetype . split ( " / " )
mimetype [ 0 ] + " / " + mimetype [ 1 ] + " %2F " + mimetype [ 2 ]
end
path = path . split ( " / " )
raw_params = { } of String = > Array ( String )
path . each_slice ( 2 ) do | pair |
key , value = pair
2019-09-24 19:31:33 +02:00
value = URI . decode_www_form ( value )
2019-07-05 19:08:39 +02:00
if raw_params [ key ]?
raw_params [ key ] << value
else
raw_params [ key ] = [ value ]
end
end
raw_params = HTTP :: Params . new ( raw_params )
if fvip = raw_params [ " hls_chunk_host " ] . match ( / r(?<fvip> \ d+)--- / )
raw_params [ " fvip " ] = fvip [ " fvip " ]
end
raw_params [ " local " ] = " true "
2019-04-25 19:41:35 +02:00
2020-06-16 00:10:30 +02:00
" #{ HOST_URL } /videoplayback? #{ raw_params } "
2019-07-05 19:08:39 +02:00
end
end
2018-07-28 01:25:58 +02:00
manifest
end
2019-01-28 03:35:32 +01:00
# YouTube /videoplayback links expire after 6 hours,
# so we have a mechanism here to redirect to the latest version
get " /latest_version " do | env |
2019-02-24 18:04:46 +01:00
if env . params . query [ " download_widget " ]?
download_widget = JSON . parse ( env . params . query [ " download_widget " ] )
2019-04-11 19:08:43 +02:00
2019-02-24 18:04:46 +01:00
id = download_widget [ " id " ] . as_s
title = download_widget [ " title " ] . as_s
2019-04-11 19:08:43 +02:00
if label = download_widget [ " label " ]?
env . redirect " /api/v1/captions/ #{ id } ?label= #{ label } &title= #{ title } "
next
else
2021-01-09 20:40:01 +01:00
itag = download_widget [ " itag " ] . as_s . to_i
2019-04-12 14:29:47 +02:00
local = " true "
end
2019-04-11 19:08:43 +02:00
end
2019-02-24 18:04:46 +01:00
id || = env . params . query [ " id " ]?
2020-06-16 00:33:23 +02:00
itag || = env . params . query [ " itag " ]? . try & . to_i
2019-01-28 03:35:32 +01:00
2019-02-09 19:28:43 +01:00
region = env . params . query [ " region " ]?
2019-02-24 18:04:46 +01:00
local || = env . params . query [ " local " ]?
2019-01-28 04:20:52 +01:00
local || = " false "
local = local == " true "
2019-01-28 03:35:32 +01:00
if ! id || ! itag
2019-03-23 16:24:30 +01:00
env . response . status_code = 400
next
2019-01-28 03:35:32 +01:00
end
2019-06-29 04:17:56 +02:00
video = get_video ( id , PG_DB , region : region )
2019-01-28 03:35:32 +01:00
2020-06-16 00:33:23 +02:00
fmt = video . fmt_stream . find ( nil ) { | f | f [ " itag " ] . as_i == itag } || video . adaptive_fmts . find ( nil ) { | f | f [ " itag " ] . as_i == itag }
url = fmt . try & . [ " url " ]? . try & . as_s
2019-01-28 03:35:32 +01:00
2020-06-16 00:33:23 +02:00
if ! url
2019-03-23 16:24:30 +01:00
env . response . status_code = 404
next
2019-01-28 04:20:52 +01:00
end
2021-01-31 19:52:32 +01:00
url = URI . parse ( url ) . request_target . not_nil! if local
2020-06-16 00:33:23 +02:00
url = " #{ url } &title= #{ title } " if title
2019-02-24 18:04:46 +01:00
2019-01-28 04:20:52 +01:00
env . redirect url
2019-01-28 03:35:32 +01:00
end
2018-08-07 20:25:22 +02:00
options " /videoplayback " do | env |
2019-04-12 18:08:33 +02:00
env . response . headers . delete ( " Content-Type " )
2018-08-04 22:30:44 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-09 16:43:47 +02:00
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
2018-08-04 22:30:44 +02:00
end
2018-08-07 18:39:56 +02:00
options " /videoplayback/* " do | env |
2019-04-12 18:08:33 +02:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 18:39:56 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-09 16:43:47 +02:00
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
2018-08-07 18:39:56 +02:00
end
2018-08-07 20:18:38 +02:00
options " /api/manifest/dash/id/videoplayback " do | env |
2019-04-12 18:08:33 +02:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 20:18:38 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-09 16:43:47 +02:00
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
2018-08-07 20:18:38 +02:00
end
options " /api/manifest/dash/id/videoplayback/* " do | env |
2019-04-12 18:08:33 +02:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 20:18:38 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-09 16:43:47 +02:00
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
2018-08-07 20:18:38 +02:00
end
2018-08-07 18:39:56 +02:00
get " /videoplayback/* " do | env |
2018-06-07 00:55:51 +02:00
path = env . request . path
2018-08-07 18:39:56 +02:00
2018-08-07 18:49:14 +02:00
path = path . lchop ( " /videoplayback/ " )
path = path . rchop ( " / " )
2018-07-16 04:53:24 +02:00
2018-08-07 18:49:14 +02:00
path = path . gsub ( / mime \/ \ w+ \/ \ w+ / ) do | mimetype |
mimetype = mimetype . split ( " / " )
mimetype [ 0 ] + " / " + mimetype [ 1 ] + " %2F " + mimetype [ 2 ]
end
2018-07-16 04:53:24 +02:00
2018-08-07 18:49:14 +02:00
path = path . split ( " / " )
2018-06-07 00:55:51 +02:00
2018-08-07 18:49:14 +02:00
raw_params = { } of String = > Array ( String )
path . each_slice ( 2 ) do | pair |
key , value = pair
2019-09-24 19:31:33 +02:00
value = URI . decode_www_form ( value )
2018-06-07 00:55:51 +02:00
2018-08-07 18:49:14 +02:00
if raw_params [ key ]?
raw_params [ key ] << value
else
raw_params [ key ] = [ value ]
2018-06-07 00:55:51 +02:00
end
2018-08-07 18:49:14 +02:00
end
2018-06-07 00:55:51 +02:00
2018-08-07 18:49:14 +02:00
query_params = HTTP :: Params . new ( raw_params )
2018-08-07 18:39:56 +02:00
2018-08-11 21:29:51 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-07 18:39:56 +02:00
env . redirect " /videoplayback? #{ query_params } "
end
get " /videoplayback " do | env |
2020-11-30 10:59:21 +01:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-08-07 18:49:14 +02:00
query_params = env . params . query
2018-04-16 03:47:37 +02:00
2019-03-11 20:07:55 +01:00
fvip = query_params [ " fvip " ]? || " 3 "
2019-05-31 03:47:04 +02:00
mns = query_params [ " mn " ]? . try & . split ( " , " )
mns || = [ ] of String
2019-03-11 20:07:55 +01:00
2019-03-27 20:59:53 +01:00
if query_params [ " region " ]?
region = query_params [ " region " ]
query_params . delete ( " region " )
end
2019-03-11 19:14:30 +01:00
if query_params [ " host " ]? && ! query_params [ " host " ] . empty?
host = " https:// #{ query_params [ " host " ] } "
2019-03-11 19:32:46 +01:00
query_params . delete ( " host " )
2019-03-11 19:14:30 +01:00
else
2019-03-11 20:07:55 +01:00
host = " https://r #{ fvip } --- #{ mns . pop } .googlevideo.com "
2019-03-11 19:14:30 +01:00
end
2018-04-16 03:47:37 +02:00
url = " /videoplayback? #{ query_params . to_s } "
2019-03-11 17:43:48 +01:00
headers = HTTP :: Headers . new
2019-06-23 15:39:14 +02:00
REQUEST_HEADERS_WHITELIST . each do | header |
2019-03-11 17:43:48 +01:00
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
2019-01-24 20:52:33 +01:00
2019-08-27 16:53:44 +02:00
client = make_client ( URI . parse ( host ) , region )
2019-10-26 17:43:28 +02:00
response = HTTP :: Client :: Response . new ( 500 )
2020-03-06 19:50:00 +01:00
error = " "
2019-08-27 16:53:44 +02:00
5 . times do
begin
response = client . head ( url , headers )
if response . headers [ " Location " ]?
location = URI . parse ( response . headers [ " Location " ] )
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2020-12-23 06:52:23 +01:00
new_host = " #{ location . scheme } :// #{ location . host } "
if new_host != host
host = new_host
client . close
client = make_client ( URI . parse ( new_host ) , region )
end
2019-08-27 16:53:44 +02:00
2021-01-31 19:52:32 +01:00
url = " #{ location . request_target } &host= #{ location . host } #{ region ? " ®ion= #{ region } " : " " } "
2019-08-27 16:53:44 +02:00
else
break
end
rescue Socket :: Addrinfo :: Error
if ! mns . empty?
mn = mns . pop
end
fvip = " 3 "
host = " https://r #{ fvip } --- #{ mn } .googlevideo.com "
client = make_client ( URI . parse ( host ) , region )
rescue ex
2020-03-06 19:50:00 +01:00
error = ex . message
2019-08-27 16:53:44 +02:00
end
end
if response . status_code >= 400
env . response . status_code = response . status_code
2020-03-06 19:50:00 +01:00
env . response . content_type = " text/plain "
next error
2019-08-27 16:53:44 +02:00
end
2019-07-05 18:34:22 +02:00
if url . includes? " &file=seg.ts "
2019-07-07 16:07:53 +02:00
if CONFIG . disabled? ( " livestreams " )
2020-11-30 10:59:21 +01:00
next error_template ( 403 , " Administrator has disabled this endpoint. " )
2019-07-07 16:07:53 +02:00
end
2019-07-05 18:34:22 +02:00
begin
client . get ( url , headers ) do | response |
response . headers . each do | key , value |
2019-11-24 19:41:47 +01:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-07-05 18:34:22 +02:00
env . response . headers [ key ] = value
end
end
2018-04-16 03:47:37 +02:00
2019-07-05 18:34:22 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
if location = response . headers [ " Location " ]?
location = URI . parse ( location )
2021-01-31 19:52:32 +01:00
location = " #{ location . request_target } &host= #{ location . host } "
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
if region
location += " ®ion= #{ region } "
end
2019-05-26 17:53:56 +02:00
2019-07-05 18:34:22 +02:00
next env . redirect location
end
IO . copy ( response . body_io , env . response )
end
rescue ex
end
else
2019-07-07 16:07:53 +02:00
if query_params [ " title " ]? && CONFIG . disabled? ( " downloads " ) ||
CONFIG . disabled? ( " dash " )
2020-11-30 10:59:21 +01:00
next error_template ( 403 , " Administrator has disabled this endpoint. " )
2019-07-07 16:07:53 +02:00
end
2019-07-05 18:34:22 +02:00
content_length = nil
first_chunk = true
range_start , range_end = parse_range ( env . request . headers [ " Range " ]? )
chunk_start = range_start
chunk_end = range_end
if ! chunk_end || chunk_end - chunk_start > HTTP_CHUNK_SIZE
chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
2019-07-04 22:30:00 +02:00
end
2019-05-19 14:12:45 +02:00
2019-07-05 18:34:22 +02:00
# TODO: Record bytes written so we can restart after a chunk fails
while true
if ! range_end && content_length
range_end = content_length
end
2019-03-27 20:59:53 +01:00
2019-07-05 18:34:22 +02:00
if range_end && chunk_start > range_end
break
end
if range_end && chunk_end > range_end
chunk_end = range_end
end
2019-03-27 20:59:53 +01:00
2019-07-05 18:34:22 +02:00
headers [ " Range " ] = " bytes= #{ chunk_start } - #{ chunk_end } "
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
begin
client . get ( url , headers ) do | response |
if first_chunk
if ! env . request . headers [ " Range " ]? && response . status_code == 206
env . response . status_code = 200
else
env . response . status_code = response . status_code
end
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
response . headers . each do | key , value |
2019-11-24 19:41:47 +01:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase ) && key . downcase != " content-range "
2019-07-05 18:34:22 +02:00
env . response . headers [ key ] = value
end
2019-07-04 22:30:00 +02:00
end
2019-07-05 18:34:22 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
if location = response . headers [ " Location " ]?
location = URI . parse ( location )
2021-01-31 19:52:32 +01:00
location = " #{ location . request_target } &host= #{ location . host } #{ region ? " ®ion= #{ region } " : " " } "
2019-07-05 18:34:22 +02:00
env . redirect location
break
end
if title = query_params [ " title " ]?
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
2019-09-24 19:31:33 +02:00
env . response . headers [ " Content-Disposition " ] = " attachment; filename= \" #{ URI . encode_www_form ( title ) } \" ; filename*=UTF-8'' #{ URI . encode_www_form ( title ) } "
2019-07-05 18:34:22 +02:00
end
if ! response . headers . includes_word? ( " Transfer-Encoding " , " chunked " )
content_length = response . headers [ " Content-Range " ] . split ( " / " ) [ - 1 ] . to_i64
if env . request . headers [ " Range " ]?
env . response . headers [ " Content-Range " ] = " bytes #{ range_start } - #{ range_end || ( content_length - 1 ) } / #{ content_length } "
env . response . content_length = ( ( range_end . try & . + 1 ) || content_length ) - range_start
else
env . response . content_length = content_length
end
2019-07-05 18:02:12 +02:00
end
2019-07-04 22:30:00 +02:00
end
2019-07-01 17:45:09 +02:00
2019-07-05 18:34:22 +02:00
proxy_file ( response , env )
end
rescue ex
if ex . message != " Error reading socket: Connection reset by peer "
break
2019-08-27 16:53:44 +02:00
else
2020-12-23 06:52:23 +01:00
client . close
2019-08-27 16:53:44 +02:00
client = make_client ( URI . parse ( host ) , region )
2019-07-05 18:34:22 +02:00
end
2019-07-01 17:45:09 +02:00
end
2019-07-05 18:02:12 +02:00
2019-07-05 18:34:22 +02:00
chunk_start = chunk_end + 1
chunk_end += HTTP_CHUNK_SIZE
first_chunk = false
2019-03-25 22:32:11 +01:00
end
2019-05-26 17:53:56 +02:00
end
2020-12-23 06:52:23 +01:00
client . close
2018-09-15 04:24:28 +02:00
end
2018-09-18 01:39:28 +02:00
get " /ggpht/* " do | env |
url = env . request . path . lchop ( " /ggpht " )
2020-05-08 16:00:53 +02:00
headers = HTTP :: Headers { " :authority " = > " yt3.ggpht.com " }
2019-06-23 15:39:14 +02:00
REQUEST_HEADERS_WHITELIST . each do | header |
2019-04-12 00:00:00 +02:00
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
2019-05-26 16:41:12 +02:00
begin
2020-03-06 19:53:35 +01:00
YT_POOL . client & . get ( url , headers ) do | response |
2019-07-03 20:13:40 +02:00
env . response . status_code = response . status_code
2019-05-26 17:53:56 +02:00
response . headers . each do | key , value |
2019-11-24 19:41:47 +01:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-05-26 17:53:56 +02:00
env . response . headers [ key ] = value
end
2019-05-19 14:12:45 +02:00
end
2019-04-12 00:00:00 +02:00
2019-07-03 20:13:40 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
if response . status_code >= 300
2019-07-03 20:54:15 +02:00
env . response . headers . delete ( " Transfer-Encoding " )
2019-05-26 17:53:56 +02:00
break
end
2019-04-12 00:00:00 +02:00
2019-05-26 17:53:56 +02:00
proxy_file ( response , env )
end
2019-05-26 16:41:12 +02:00
rescue ex
end
2019-04-12 00:00:00 +02:00
end
2020-10-25 09:35:16 +01:00
options " /sb/:authority/:id/:storyboard/:index " do | env |
2019-05-02 21:20:19 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
end
2020-10-25 09:35:16 +01:00
get " /sb/:authority/:id/:storyboard/:index " do | env |
authority = env . params . url [ " authority " ]
2019-04-12 00:00:00 +02:00
id = env . params . url [ " id " ]
storyboard = env . params . url [ " storyboard " ]
index = env . params . url [ " index " ]
2020-03-06 19:53:35 +01:00
url = " /sb/ #{ id } / #{ storyboard } / #{ index } ? #{ env . params . query } "
headers = HTTP :: Headers . new
2020-10-25 09:35:16 +01:00
headers [ " :authority " ] = " #{ authority } .ytimg.com "
2019-04-12 00:00:00 +02:00
2019-06-23 15:39:14 +02:00
REQUEST_HEADERS_WHITELIST . each do | header |
2019-03-11 17:43:48 +01:00
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
2018-09-18 01:39:28 +02:00
2019-05-26 16:41:12 +02:00
begin
2020-03-06 19:53:35 +01:00
YT_POOL . client & . get ( url , headers ) do | response |
2019-05-26 17:53:56 +02:00
env . response . status_code = response . status_code
response . headers . each do | key , value |
2019-11-24 19:41:47 +01:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-05-26 17:53:56 +02:00
env . response . headers [ key ] = value
end
2019-05-19 14:12:45 +02:00
end
2018-09-18 01:39:28 +02:00
2020-03-04 21:36:39 +01:00
env . response . headers [ " Connection " ] = " close "
2019-07-03 20:13:40 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
if response . status_code >= 300
2019-07-03 20:54:15 +02:00
env . response . headers . delete ( " Transfer-Encoding " )
2019-05-26 17:53:56 +02:00
break
end
2018-09-18 01:39:28 +02:00
2019-05-26 17:53:56 +02:00
proxy_file ( response , env )
end
2019-05-26 16:41:12 +02:00
rescue ex
end
2018-09-18 01:39:28 +02:00
end
2019-08-16 22:46:37 +02:00
get " /s_p/:id/:name " do | env |
id = env . params . url [ " id " ]
name = env . params . url [ " name " ]
url = env . request . resource
2020-05-08 16:00:53 +02:00
headers = HTTP :: Headers { " :authority " = > " i9.ytimg.com " }
2019-08-16 22:46:37 +02:00
REQUEST_HEADERS_WHITELIST . each do | header |
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
begin
2020-03-06 19:53:35 +01:00
YT_POOL . client & . get ( url , headers ) do | response |
2019-08-16 22:46:37 +02:00
env . response . status_code = response . status_code
response . headers . each do | key , value |
2019-11-24 19:41:47 +01:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-08-16 22:46:37 +02:00
env . response . headers [ key ] = value
end
2019-11-01 17:02:38 +01:00
end
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
if response . status_code >= 300 && response . status_code != 404
env . response . headers . delete ( " Transfer-Encoding " )
break
end
proxy_file ( response , env )
end
rescue ex
end
end
get " /yts/img/:name " do | env |
headers = HTTP :: Headers . new
REQUEST_HEADERS_WHITELIST . each do | header |
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
begin
YT_POOL . client & . get ( env . request . resource , headers ) do | response |
env . response . status_code = response . status_code
response . headers . each do | key , value |
2019-11-24 19:41:47 +01:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-11-01 17:02:38 +01:00
env . response . headers [ key ] = value
end
2019-08-16 22:46:37 +02:00
end
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
if response . status_code >= 300 && response . status_code != 404
env . response . headers . delete ( " Transfer-Encoding " )
break
end
proxy_file ( response , env )
end
rescue ex
end
end
2018-09-15 04:24:28 +02:00
get " /vi/:id/:name " do | env |
id = env . params . url [ " id " ]
name = env . params . url [ " name " ]
2020-05-08 16:00:53 +02:00
headers = HTTP :: Headers { " :authority " = > " i.ytimg.com " }
2020-03-06 19:53:35 +01:00
2018-09-15 04:24:28 +02:00
if name == " maxres.jpg "
2020-06-16 00:10:30 +02:00
build_thumbnails ( id ) . each do | thumb |
2020-03-06 19:53:35 +01:00
if YT_POOL . client & . head ( " /vi/ #{ id } / #{ thumb [ :url ] } .jpg " , headers ) . status_code == 200
2018-09-15 04:24:28 +02:00
name = thumb [ :url ] + " .jpg "
break
end
end
end
url = " /vi/ #{ id } / #{ name } "
2019-06-23 15:39:14 +02:00
REQUEST_HEADERS_WHITELIST . each do | header |
2019-03-11 17:43:48 +01:00
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
2018-09-15 04:24:28 +02:00
2019-05-26 16:41:12 +02:00
begin
2020-03-06 19:53:35 +01:00
YT_POOL . client & . get ( url , headers ) do | response |
2019-05-26 17:53:56 +02:00
env . response . status_code = response . status_code
response . headers . each do | key , value |
2019-11-24 19:41:47 +01:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-05-26 17:53:56 +02:00
env . response . headers [ key ] = value
end
2019-05-19 14:12:45 +02:00
end
2018-09-15 04:24:28 +02:00
2019-07-03 20:13:40 +02:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2019-07-03 20:54:15 +02:00
if response . status_code >= 300 && response . status_code != 404
env . response . headers . delete ( " Transfer-Encoding " )
2019-05-26 17:53:56 +02:00
break
end
2018-09-15 04:24:28 +02:00
2019-05-26 17:53:56 +02:00
proxy_file ( response , env )
end
2019-05-26 16:41:12 +02:00
rescue ex
end
2018-04-16 03:47:37 +02:00
end
2019-10-27 05:19:05 +01:00
get " /Captcha " do | env |
2020-05-08 16:00:53 +02:00
headers = HTTP :: Headers { " :authority " = > " accounts.google.com " }
response = YT_POOL . client & . get ( env . request . resource , headers )
2019-10-27 05:19:05 +01:00
env . response . headers [ " Content-Type " ] = response . headers [ " Content-Type " ]
response . body
end
2019-08-22 01:23:20 +02:00
# Undocumented, creates anonymous playlist with specified 'video_ids', max 50 videos
2019-05-03 16:11:27 +02:00
get " /watch_videos " do | env |
2019-10-25 18:58:16 +02:00
response = YT_POOL . client & . get ( env . request . resource )
2019-05-03 16:11:27 +02:00
if url = response . headers [ " Location " ]?
2021-01-31 19:52:32 +01:00
url = URI . parse ( url ) . request_target
2019-05-03 16:11:27 +02:00
next env . redirect url
end
env . response . status_code = response . status_code
end
2018-02-10 16:15:23 +01:00
error 404 do | env |
2019-03-27 11:28:53 +01:00
if md = env . request . path . match ( / ^ \/ (?<id>([a-zA-Z0-9_-]{11})|( \ w+))$ / )
2019-04-17 21:46:00 +02:00
item = md [ " id " ]
2018-10-07 05:19:36 +02:00
2019-04-17 21:46:00 +02:00
# Check if item is branding URL e.g. https://youtube.com/gaming
2019-10-25 18:58:16 +02:00
response = YT_POOL . client & . get ( " / #{ item } " )
2019-03-27 11:28:53 +01:00
if response . status_code == 301
2021-01-31 19:52:32 +01:00
response = YT_POOL . client & . get ( URI . parse ( response . headers [ " Location " ] ) . request_target )
2019-03-27 11:28:53 +01:00
end
2019-06-07 19:42:07 +02:00
if response . body . empty?
env . response . headers [ " Location " ] = " / "
halt env , status_code : 302
end
2019-03-27 11:28:53 +01:00
html = XML . parse_html ( response . body )
2020-01-14 14:21:17 +01:00
ucid = html . xpath_node ( % q ( / / link [ @rel = " canonical " ] ) ) . try & . [ " href " ] . split ( " / " ) [ - 1 ]
2019-03-27 11:28:53 +01:00
if ucid
2020-01-14 14:21:17 +01:00
env . response . headers [ " Location " ] = " /channel/ #{ ucid } "
2019-03-27 11:28:53 +01:00
halt env , status_code : 302
end
2018-10-07 05:19:36 +02:00
params = [ ] of String
env . params . query . each do | k , v |
params << " #{ k } = #{ v } "
end
params = params . join ( " & " )
2019-04-17 21:46:00 +02:00
url = " /watch?v= #{ item } "
2018-10-07 05:19:36 +02:00
if ! params . empty?
url += " & #{ params } "
end
2019-04-17 21:46:00 +02:00
# Check if item is video ID
2019-10-25 18:58:16 +02:00
if item . match ( / ^[a-zA-Z0-9_-]{11}$ / ) && YT_POOL . client & . head ( " /watch?v= #{ item } " ) . status_code != 404
2019-02-21 22:07:22 +01:00
env . response . headers [ " Location " ] = url
halt env , status_code : 302
end
end
2019-01-12 20:18:08 +01:00
env . response . headers [ " Location " ] = " / "
halt env , status_code : 302
2017-12-30 22:21:43 +01:00
end
2020-11-30 10:59:21 +01:00
error 500 do | env , ex |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
error_template ( 500 , ex )
2017-12-30 22:21:43 +01:00
end
2018-03-09 18:28:57 +01:00
static_headers do | response , filepath , filestat |
2019-05-08 15:58:10 +02:00
response . headers . add ( " Cache-Control " , " max-age=2629800 " )
2018-03-09 18:28:57 +01:00
end
2017-11-23 08:48:55 +01:00
public_folder " assets "
2018-04-16 05:56:58 +02:00
2018-07-31 01:42:45 +02:00
Kemal . config . powered_by_header = false
2018-04-16 05:56:58 +02:00
add_handler FilteredCompressHandler . new
2019-02-03 05:48:47 +01:00
add_handler APIHandler . new
2019-04-18 23:23:50 +02:00
add_handler AuthHandler . new
2019-03-23 16:24:30 +01:00
add_handler DenyFrame . new
2019-04-18 23:23:50 +02:00
add_context_storage_type ( Array ( String ) )
2019-02-24 16:49:48 +01:00
add_context_storage_type ( Preferences )
2019-04-18 23:23:50 +02:00
add_context_storage_type ( User )
2017-11-23 08:48:55 +01:00
2021-01-04 16:51:06 +01:00
Kemal . config . logger = LOGGER
2019-09-23 19:05:29 +02:00
Kemal . config . host_binding = Kemal . config . host_binding != " 0.0.0.0 " ? Kemal . config . host_binding : CONFIG . host_binding
Kemal . config . port = Kemal . config . port != 3000 ? Kemal . config . port : CONFIG . port
2017-11-23 08:48:55 +01:00
Kemal . run