mirror of
				https://github.com/iv-org/invidious.git
				synced 2025-10-30 20:22:00 +00:00 
			
		
		
		
	| @@ -106,34 +106,30 @@ LOCALES = { | ||||
|  | ||||
| YT_POOL = QUICPool.new(YT_URL, capacity: CONFIG.pool_size, timeout: 2.0) | ||||
|  | ||||
| config = CONFIG | ||||
| output = STDOUT | ||||
| loglvl = LogLevel::Debug | ||||
|  | ||||
| # CLI | ||||
| Kemal.config.extra_options do |parser| | ||||
|   parser.banner = "Usage: invidious [arguments]" | ||||
|   parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{config.channel_threads})") do |number| | ||||
|   parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{CONFIG.channel_threads})") do |number| | ||||
|     begin | ||||
|       config.channel_threads = number.to_i | ||||
|       CONFIG.channel_threads = number.to_i | ||||
|     rescue ex | ||||
|       puts "THREADS must be integer" | ||||
|       exit | ||||
|     end | ||||
|   end | ||||
|   parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{config.feed_threads})") do |number| | ||||
|   parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{CONFIG.feed_threads})") do |number| | ||||
|     begin | ||||
|       config.feed_threads = number.to_i | ||||
|       CONFIG.feed_threads = number.to_i | ||||
|     rescue ex | ||||
|       puts "THREADS must be integer" | ||||
|       exit | ||||
|     end | ||||
|   end | ||||
|   parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: STDOUT)") do |output_arg| | ||||
|     FileUtils.mkdir_p(File.dirname(output_arg)) | ||||
|     output = File.open(output_arg, mode: "a") | ||||
|   parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{CONFIG.output})") do |output| | ||||
|     CONFIG.output = output | ||||
|   end | ||||
|   parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{loglvl})") do |loglvl_arg| | ||||
|     loglvl = LogLevel.parse(loglvl_arg) | ||||
|   parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{CONFIG.log_level})") do |log_level| | ||||
|     CONFIG.log_level = LogLevel.parse(log_level) | ||||
|   end | ||||
|   parser.on("-v", "--version", "Print version") do | ||||
|     puts SOFTWARE.to_pretty_json | ||||
| @@ -143,35 +139,41 @@ end | ||||
|  | ||||
| Kemal::CLI.new ARGV | ||||
|  | ||||
| logger = Invidious::LogHandler.new(output, loglvl) | ||||
| if CONFIG.output.upcase != "STDOUT" | ||||
|   FileUtils.mkdir_p(File.dirname(CONFIG.output)) | ||||
| end | ||||
| OUTPUT = CONFIG.output.upcase == "STDOUT" ? STDOUT : File.open(CONFIG.output, mode: "a") | ||||
| LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level) | ||||
|  | ||||
| config = CONFIG | ||||
|  | ||||
| # Check table integrity | ||||
| if CONFIG.check_tables | ||||
|   check_enum(PG_DB, logger, "privacy", PlaylistPrivacy) | ||||
|   check_enum(PG_DB, "privacy", PlaylistPrivacy) | ||||
|  | ||||
|   check_table(PG_DB, logger, "channels", InvidiousChannel) | ||||
|   check_table(PG_DB, logger, "channel_videos", ChannelVideo) | ||||
|   check_table(PG_DB, logger, "playlists", InvidiousPlaylist) | ||||
|   check_table(PG_DB, logger, "playlist_videos", PlaylistVideo) | ||||
|   check_table(PG_DB, logger, "nonces", Nonce) | ||||
|   check_table(PG_DB, logger, "session_ids", SessionId) | ||||
|   check_table(PG_DB, logger, "users", User) | ||||
|   check_table(PG_DB, logger, "videos", Video) | ||||
|   check_table(PG_DB, "channels", InvidiousChannel) | ||||
|   check_table(PG_DB, "channel_videos", ChannelVideo) | ||||
|   check_table(PG_DB, "playlists", InvidiousPlaylist) | ||||
|   check_table(PG_DB, "playlist_videos", PlaylistVideo) | ||||
|   check_table(PG_DB, "nonces", Nonce) | ||||
|   check_table(PG_DB, "session_ids", SessionId) | ||||
|   check_table(PG_DB, "users", User) | ||||
|   check_table(PG_DB, "videos", Video) | ||||
|  | ||||
|   if CONFIG.cache_annotations | ||||
|     check_table(PG_DB, logger, "annotations", Annotation) | ||||
|     check_table(PG_DB, "annotations", Annotation) | ||||
|   end | ||||
| end | ||||
|  | ||||
| # Start jobs | ||||
|  | ||||
| Invidious::Jobs.register Invidious::Jobs::RefreshChannelsJob.new(PG_DB, logger, config) | ||||
| Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB, logger, config) | ||||
| Invidious::Jobs.register Invidious::Jobs::SubscribeToFeedsJob.new(PG_DB, logger, config, HMAC_KEY) | ||||
| Invidious::Jobs.register Invidious::Jobs::RefreshChannelsJob.new(PG_DB, config) | ||||
| Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB, config) | ||||
| Invidious::Jobs.register Invidious::Jobs::SubscribeToFeedsJob.new(PG_DB, config, HMAC_KEY) | ||||
|  | ||||
| DECRYPT_FUNCTION = DecryptFunction.new(CONFIG.decrypt_polling) | ||||
| if config.decrypt_polling | ||||
|   Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new(logger) | ||||
|   Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new | ||||
| end | ||||
|  | ||||
| if config.statistics_enabled | ||||
| @@ -183,7 +185,7 @@ if config.popular_enabled | ||||
| end | ||||
|  | ||||
| if config.captcha_key | ||||
|   Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new(logger, config) | ||||
|   Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new(config) | ||||
| end | ||||
|  | ||||
| connection_channel = Channel({Bool, Channel(PQ::Notification)}).new(32) | ||||
| @@ -261,7 +263,7 @@ before_all do |env| | ||||
|       headers["Cookie"] = env.request.headers["Cookie"] | ||||
|  | ||||
|       begin | ||||
|         user, sid = get_user(sid, headers, PG_DB, logger, false) | ||||
|         user, sid = get_user(sid, headers, PG_DB, false) | ||||
|         csrf_token = generate_response(sid, { | ||||
|           ":authorize_token", | ||||
|           ":playlist_ajax", | ||||
| @@ -531,7 +533,7 @@ post "/subscription_ajax" do |env| | ||||
|   case action | ||||
|   when "action_create_subscription_to_channel" | ||||
|     if !user.subscriptions.includes? channel_id | ||||
|       get_channel(channel_id, PG_DB, logger, false, false) | ||||
|       get_channel(channel_id, PG_DB, false, false) | ||||
|       PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2", channel_id, email) | ||||
|     end | ||||
|   when "action_remove_subscriptions" | ||||
| @@ -566,7 +568,7 @@ get "/subscription_manager" do |env| | ||||
|     headers = HTTP::Headers.new | ||||
|     headers["Cookie"] = env.request.headers["Cookie"] | ||||
|  | ||||
|     user, sid = get_user(sid, headers, PG_DB, logger) | ||||
|     user, sid = get_user(sid, headers, PG_DB) | ||||
|   end | ||||
|  | ||||
|   action_takeout = env.params.query["action_takeout"]?.try &.to_i? | ||||
| @@ -690,7 +692,7 @@ post "/data_control" do |env| | ||||
|           user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s } | ||||
|           user.subscriptions.uniq! | ||||
|  | ||||
|           user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) | ||||
|           user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false) | ||||
|  | ||||
|           PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) | ||||
|         end | ||||
| @@ -759,7 +761,7 @@ post "/data_control" do |env| | ||||
|         end | ||||
|         user.subscriptions.uniq! | ||||
|  | ||||
|         user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) | ||||
|         user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false) | ||||
|  | ||||
|         PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) | ||||
|       when "import_freetube" | ||||
| @@ -768,7 +770,7 @@ post "/data_control" do |env| | ||||
|         end | ||||
|         user.subscriptions.uniq! | ||||
|  | ||||
|         user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) | ||||
|         user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false) | ||||
|  | ||||
|         PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) | ||||
|       when "import_newpipe_subscriptions" | ||||
| @@ -787,7 +789,7 @@ post "/data_control" do |env| | ||||
|         end | ||||
|         user.subscriptions.uniq! | ||||
|  | ||||
|         user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) | ||||
|         user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false) | ||||
|  | ||||
|         PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) | ||||
|       when "import_newpipe" | ||||
| @@ -806,7 +808,7 @@ post "/data_control" do |env| | ||||
|               user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") } | ||||
|               user.subscriptions.uniq! | ||||
|  | ||||
|               user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) | ||||
|               user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false) | ||||
|  | ||||
|               PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) | ||||
|  | ||||
| @@ -1209,7 +1211,7 @@ get "/feed/subscriptions" do |env| | ||||
|   headers["Cookie"] = env.request.headers["Cookie"] | ||||
|  | ||||
|   if !user.password | ||||
|     user, sid = get_user(sid, headers, PG_DB, logger) | ||||
|     user, sid = get_user(sid, headers, PG_DB) | ||||
|   end | ||||
|  | ||||
|   max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE) | ||||
| @@ -1513,7 +1515,7 @@ post "/feed/webhook/:token" do |env| | ||||
|   signature = env.request.headers["X-Hub-Signature"].lchop("sha1=") | ||||
|  | ||||
|   if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body) | ||||
|     logger.error("/feed/webhook/#{token} : Invalid signature") | ||||
|     LOGGER.error("/feed/webhook/#{token} : Invalid signature") | ||||
|     env.response.status_code = 200 | ||||
|     next | ||||
|   end | ||||
| @@ -2831,7 +2833,7 @@ post "/api/v1/auth/subscriptions/:ucid" do |env| | ||||
|   ucid = env.params.url["ucid"] | ||||
|  | ||||
|   if !user.subscriptions.includes? ucid | ||||
|     get_channel(ucid, PG_DB, logger, false, false) | ||||
|     get_channel(ucid, PG_DB, false, false) | ||||
|     PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email) | ||||
|   end | ||||
|  | ||||
| @@ -3925,7 +3927,7 @@ add_context_storage_type(Array(String)) | ||||
| add_context_storage_type(Preferences) | ||||
| add_context_storage_type(User) | ||||
|  | ||||
| Kemal.config.logger = logger | ||||
| Kemal.config.logger = LOGGER | ||||
| Kemal.config.host_binding = Kemal.config.host_binding != "0.0.0.0" ? Kemal.config.host_binding : CONFIG.host_binding | ||||
| Kemal.config.port = Kemal.config.port != 3000 ? Kemal.config.port : CONFIG.port | ||||
| Kemal.run | ||||
|   | ||||
| @@ -144,7 +144,7 @@ class ChannelRedirect < Exception | ||||
|   end | ||||
| end | ||||
|  | ||||
| def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos = true, max_threads = 10) | ||||
| def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, max_threads = 10) | ||||
|   finished_channel = Channel(String | Nil).new | ||||
|  | ||||
|   spawn do | ||||
| @@ -160,7 +160,7 @@ def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos = | ||||
|       active_threads += 1 | ||||
|       spawn do | ||||
|         begin | ||||
|           get_channel(ucid, db, logger, refresh, pull_all_videos) | ||||
|           get_channel(ucid, db, refresh, pull_all_videos) | ||||
|           finished_channel.send(ucid) | ||||
|         rescue ex | ||||
|           finished_channel.send(nil) | ||||
| @@ -181,10 +181,10 @@ def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos = | ||||
|   return final | ||||
| end | ||||
|  | ||||
| def get_channel(id, db, logger, refresh = true, pull_all_videos = true) | ||||
| def get_channel(id, db, refresh = true, pull_all_videos = true) | ||||
|   if channel = db.query_one?("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel) | ||||
|     if refresh && Time.utc - channel.updated > 10.minutes | ||||
|       channel = fetch_channel(id, db, logger, pull_all_videos: pull_all_videos) | ||||
|       channel = fetch_channel(id, db, pull_all_videos: pull_all_videos) | ||||
|       channel_array = channel.to_a | ||||
|       args = arg_array(channel_array) | ||||
|  | ||||
| @@ -192,7 +192,7 @@ def get_channel(id, db, logger, refresh = true, pull_all_videos = true) | ||||
|         ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", args: channel_array) | ||||
|     end | ||||
|   else | ||||
|     channel = fetch_channel(id, db, logger, pull_all_videos: pull_all_videos) | ||||
|     channel = fetch_channel(id, db, pull_all_videos: pull_all_videos) | ||||
|     channel_array = channel.to_a | ||||
|     args = arg_array(channel_array) | ||||
|  | ||||
| @@ -202,12 +202,13 @@ def get_channel(id, db, logger, refresh = true, pull_all_videos = true) | ||||
|   return channel | ||||
| end | ||||
|  | ||||
| def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil) | ||||
|   logger.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}") | ||||
| def fetch_channel(ucid, db, pull_all_videos = true, locale = nil) | ||||
|   LOGGER.debug("fetch_channel: #{ucid}") | ||||
|   LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}") | ||||
|  | ||||
|   logger.trace("fetch_channel: #{ucid} : Downloading RSS feed") | ||||
|   LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed") | ||||
|   rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body | ||||
|   logger.trace("fetch_channel: #{ucid} : Parsing RSS feed") | ||||
|   LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed") | ||||
|   rss = XML.parse_html(rss) | ||||
|  | ||||
|   author = rss.xpath_node(%q(//feed/title)) | ||||
| @@ -223,11 +224,11 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil) | ||||
|     auto_generated = true | ||||
|   end | ||||
|  | ||||
|   logger.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}") | ||||
|   LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}") | ||||
|  | ||||
|   page = 1 | ||||
|  | ||||
|   logger.trace("fetch_channel: #{ucid} : Downloading channel videos page") | ||||
|   LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page") | ||||
|   response = get_channel_videos_response(ucid, page, auto_generated: auto_generated) | ||||
|  | ||||
|   videos = [] of SearchVideo | ||||
| @@ -235,7 +236,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil) | ||||
|     initial_data = JSON.parse(response.body).as_a.find &.["response"]? | ||||
|     raise InfoException.new("Could not extract channel JSON") if !initial_data | ||||
|  | ||||
|     logger.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data") | ||||
|     LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data") | ||||
|     videos = extract_videos(initial_data.as_h, author, ucid) | ||||
|   rescue ex | ||||
|     if response.body.includes?("To continue with your YouTube experience, please fill out the form below.") || | ||||
| @@ -245,7 +246,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil) | ||||
|     raise ex | ||||
|   end | ||||
|  | ||||
|   logger.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed") | ||||
|   LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed") | ||||
|   rss.xpath_nodes("//feed/entry").each do |entry| | ||||
|     video_id = entry.xpath_node("videoid").not_nil!.content | ||||
|     title = entry.xpath_node("title").not_nil!.content | ||||
| @@ -279,7 +280,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil) | ||||
|       views:              views, | ||||
|     }) | ||||
|  | ||||
|     logger.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video") | ||||
|     LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video") | ||||
|  | ||||
|     # We don't include the 'premiere_timestamp' here because channel pages don't include them, | ||||
|     # meaning the above timestamp is always null | ||||
| @@ -289,11 +290,11 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil) | ||||
|       live_now = $8, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool) | ||||
|  | ||||
|     if was_insert | ||||
|       logger.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions") | ||||
|       LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions") | ||||
|       db.exec("UPDATE users SET notifications = array_append(notifications, $1), \ | ||||
|         feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid) | ||||
|     else | ||||
|       logger.trace("fetch_channel: #{ucid} : video #{video_id} : Updated") | ||||
|       LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated") | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   | ||||
| @@ -64,11 +64,13 @@ end | ||||
| class Config | ||||
|   include YAML::Serializable | ||||
|  | ||||
|   property channel_threads : Int32                 # Number of threads to use for crawling videos from channels (for updating subscriptions) | ||||
|   property feed_threads : Int32                    # Number of threads to use for updating feeds | ||||
|   property channel_threads : Int32 = 1             # Number of threads to use for crawling videos from channels (for updating subscriptions) | ||||
|   property feed_threads : Int32 = 1                # Number of threads to use for updating feeds | ||||
|   property output : String = "STDOUT"              # Log file path or STDOUT | ||||
|   property log_level : LogLevel = LogLevel::Info   # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr | ||||
|   property db : DBConfig                           # Database configuration | ||||
|   property decrypt_polling : Bool = true           # Use polling to keep decryption function up to date | ||||
|   property full_refresh : Bool                     # Used for crawling channels: threads should check all videos uploaded by a channel | ||||
|   property full_refresh : Bool = false             # Used for crawling channels: threads should check all videos uploaded by a channel | ||||
|   property https_only : Bool?                      # Used to tell Invidious it is behind a proxy, so links to resources should be https:// | ||||
|   property hmac_key : String?                      # HMAC signing key for CSRF tokens and verifying pubsub subscriptions | ||||
|   property domain : String?                        # Domain to be used for links to resources on the site where an absolute URL is required | ||||
| @@ -334,11 +336,11 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri | ||||
|   items | ||||
| end | ||||
|  | ||||
| def check_enum(db, logger, enum_name, struct_type = nil) | ||||
| def check_enum(db, enum_name, struct_type = nil) | ||||
|   return # TODO | ||||
|  | ||||
|   if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool) | ||||
|     logger.info("check_enum: CREATE TYPE #{enum_name}") | ||||
|     LOGGER.info("check_enum: CREATE TYPE #{enum_name}") | ||||
|  | ||||
|     db.using_connection do |conn| | ||||
|       conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql")) | ||||
| @@ -346,12 +348,12 @@ def check_enum(db, logger, enum_name, struct_type = nil) | ||||
|   end | ||||
| end | ||||
|  | ||||
| def check_table(db, logger, table_name, struct_type = nil) | ||||
| def check_table(db, table_name, struct_type = nil) | ||||
|   # Create table if it doesn't exist | ||||
|   begin | ||||
|     db.exec("SELECT * FROM #{table_name} LIMIT 0") | ||||
|   rescue ex | ||||
|     logger.info("check_table: check_table: CREATE TABLE #{table_name}") | ||||
|     LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}") | ||||
|  | ||||
|     db.using_connection do |conn| | ||||
|       conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql")) | ||||
| @@ -371,7 +373,7 @@ def check_table(db, logger, table_name, struct_type = nil) | ||||
|     if name != column_array[i]? | ||||
|       if !column_array[i]? | ||||
|         new_column = column_types.select { |line| line.starts_with? name }[0] | ||||
|         logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|         LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|         db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|         next | ||||
|       end | ||||
| @@ -389,29 +391,29 @@ def check_table(db, logger, table_name, struct_type = nil) | ||||
|  | ||||
|           # There's a column we didn't expect | ||||
|           if !new_column | ||||
|             logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}") | ||||
|             LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}") | ||||
|             db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|  | ||||
|             column_array = get_column_array(db, table_name) | ||||
|             next | ||||
|           end | ||||
|  | ||||
|           logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|           LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|           db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|  | ||||
|           logger.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") | ||||
|           LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") | ||||
|           db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") | ||||
|  | ||||
|           logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|           LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|           db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|  | ||||
|           logger.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") | ||||
|           LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") | ||||
|           db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") | ||||
|  | ||||
|           column_array = get_column_array(db, table_name) | ||||
|         end | ||||
|       else | ||||
|         logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|         LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|         db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|       end | ||||
|     end | ||||
| @@ -421,7 +423,7 @@ def check_table(db, logger, table_name, struct_type = nil) | ||||
|  | ||||
|   column_array.each do |column| | ||||
|     if !struct_array.includes? column | ||||
|       logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") | ||||
|       LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") | ||||
|       db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") | ||||
|     end | ||||
|   end | ||||
|   | ||||
| @@ -1,14 +1,14 @@ | ||||
| require "logger" | ||||
|  | ||||
| enum LogLevel | ||||
|   All | ||||
|   Trace | ||||
|   Debug | ||||
|   Info | ||||
|   Warn | ||||
|   Error | ||||
|   Fatal | ||||
|   Off | ||||
|   All   = 0 | ||||
|   Trace = 1 | ||||
|   Debug = 2 | ||||
|   Info  = 3 | ||||
|   Warn  = 4 | ||||
|   Error = 5 | ||||
|   Fatal = 6 | ||||
|   Off   = 7 | ||||
| end | ||||
|  | ||||
| class Invidious::LogHandler < Kemal::BaseLogHandler | ||||
|   | ||||
| @@ -1,8 +1,7 @@ | ||||
| class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob | ||||
|   private getter logger : Invidious::LogHandler | ||||
|   private getter config : Config | ||||
|  | ||||
|   def initialize(@logger, @config) | ||||
|   def initialize(@config) | ||||
|   end | ||||
|  | ||||
|   def begin | ||||
| @@ -127,7 +126,7 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob | ||||
|           end | ||||
|         end | ||||
|       rescue ex | ||||
|         logger.error("BypassCaptchaJob: #{ex.message}") | ||||
|         LOGGER.error("BypassCaptchaJob: #{ex.message}") | ||||
|       ensure | ||||
|         sleep 1.minute | ||||
|         Fiber.yield | ||||
|   | ||||
| @@ -1,9 +1,8 @@ | ||||
| class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob | ||||
|   private getter db : DB::Database | ||||
|   private getter logger : Invidious::LogHandler | ||||
|   private getter config : Config | ||||
|  | ||||
|   def initialize(@db, @logger, @config) | ||||
|   def initialize(@db, @config) | ||||
|   end | ||||
|  | ||||
|   def begin | ||||
| @@ -14,37 +13,37 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob | ||||
|     backoff = 1.seconds | ||||
|  | ||||
|     loop do | ||||
|       logger.debug("RefreshChannelsJob: Refreshing all channels") | ||||
|       LOGGER.debug("RefreshChannelsJob: Refreshing all channels") | ||||
|       db.query("SELECT id FROM channels ORDER BY updated") do |rs| | ||||
|         rs.each do | ||||
|           id = rs.read(String) | ||||
|  | ||||
|           if active_fibers >= lim_fibers | ||||
|             logger.trace("RefreshChannelsJob: Fiber limit reached, waiting...") | ||||
|             LOGGER.trace("RefreshChannelsJob: Fiber limit reached, waiting...") | ||||
|             if active_channel.receive | ||||
|               logger.trace("RefreshChannelsJob: Fiber limit ok, continuing") | ||||
|               LOGGER.trace("RefreshChannelsJob: Fiber limit ok, continuing") | ||||
|               active_fibers -= 1 | ||||
|             end | ||||
|           end | ||||
|  | ||||
|           logger.trace("RefreshChannelsJob: #{id} : Spawning fiber") | ||||
|           LOGGER.debug("RefreshChannelsJob: #{id} : Spawning fiber") | ||||
|           active_fibers += 1 | ||||
|           spawn do | ||||
|             begin | ||||
|               logger.trace("RefreshChannelsJob: #{id} fiber : Fetching channel") | ||||
|               channel = fetch_channel(id, db, logger, config.full_refresh) | ||||
|               LOGGER.trace("RefreshChannelsJob: #{id} fiber : Fetching channel") | ||||
|               channel = fetch_channel(id, db, config.full_refresh) | ||||
|  | ||||
|               lim_fibers = max_fibers | ||||
|  | ||||
|               logger.trace("RefreshChannelsJob: #{id} fiber : Updating DB") | ||||
|               LOGGER.trace("RefreshChannelsJob: #{id} fiber : Updating DB") | ||||
|               db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id) | ||||
|             rescue ex | ||||
|               logger.error("RefreshChannelsJob: #{id} : #{ex.message}") | ||||
|               LOGGER.error("RefreshChannelsJob: #{id} : #{ex.message}") | ||||
|               if ex.message == "Deleted or invalid channel" | ||||
|                 db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id) | ||||
|               else | ||||
|                 lim_fibers = 1 | ||||
|                 logger.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s") | ||||
|                 LOGGER.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s") | ||||
|                 sleep backoff | ||||
|                 if backoff < 1.days | ||||
|                   backoff += backoff | ||||
| @@ -53,14 +52,14 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob | ||||
|                 end | ||||
|               end | ||||
|             ensure | ||||
|               logger.trace("RefreshChannelsJob: #{id} fiber : Done") | ||||
|               LOGGER.debug("RefreshChannelsJob: #{id} fiber : Done") | ||||
|               active_channel.send(true) | ||||
|             end | ||||
|           end | ||||
|         end | ||||
|       end | ||||
|  | ||||
|       logger.debug("RefreshChannelsJob: Done, sleeping for one minute") | ||||
|       LOGGER.debug("RefreshChannelsJob: Done, sleeping for one minute") | ||||
|       sleep 1.minute | ||||
|       Fiber.yield | ||||
|     end | ||||
|   | ||||
| @@ -1,9 +1,8 @@ | ||||
| class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob | ||||
|   private getter db : DB::Database | ||||
|   private getter logger : Invidious::LogHandler | ||||
|   private getter config : Config | ||||
|  | ||||
|   def initialize(@db, @logger, @config) | ||||
|   def initialize(@db, @config) | ||||
|   end | ||||
|  | ||||
|   def begin | ||||
| @@ -30,14 +29,14 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob | ||||
|               column_array = get_column_array(db, view_name) | ||||
|               ChannelVideo.type_array.each_with_index do |name, i| | ||||
|                 if name != column_array[i]? | ||||
|                   logger.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}") | ||||
|                   LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}") | ||||
|                   db.exec("DROP MATERIALIZED VIEW #{view_name}") | ||||
|                   raise "view does not exist" | ||||
|                 end | ||||
|               end | ||||
|  | ||||
|               if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))" | ||||
|                 logger.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...") | ||||
|                 LOGGER.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...") | ||||
|                 db.exec("DROP MATERIALIZED VIEW #{view_name}") | ||||
|               end | ||||
|  | ||||
| @@ -49,18 +48,18 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob | ||||
|                 legacy_view_name = "subscriptions_#{sha256(email)[0..7]}" | ||||
|  | ||||
|                 db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0") | ||||
|                 logger.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}") | ||||
|                 LOGGER.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}") | ||||
|                 db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}") | ||||
|               rescue ex | ||||
|                 begin | ||||
|                   # While iterating through, we may have an email stored from a deleted account | ||||
|                   if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool) | ||||
|                     logger.info("RefreshFeedsJob: CREATE #{view_name}") | ||||
|                     LOGGER.info("RefreshFeedsJob: CREATE #{view_name}") | ||||
|                     db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}") | ||||
|                     db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email) | ||||
|                   end | ||||
|                 rescue ex | ||||
|                   logger.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}") | ||||
|                   LOGGER.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}") | ||||
|                 end | ||||
|               end | ||||
|             end | ||||
|   | ||||
| @@ -1,10 +1,9 @@ | ||||
| class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob | ||||
|   private getter db : DB::Database | ||||
|   private getter logger : Invidious::LogHandler | ||||
|   private getter hmac_key : String | ||||
|   private getter config : Config | ||||
|  | ||||
|   def initialize(@db, @logger, @config, @hmac_key) | ||||
|   def initialize(@db, @config, @hmac_key) | ||||
|   end | ||||
|  | ||||
|   def begin | ||||
| @@ -34,10 +33,10 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob | ||||
|               response = subscribe_pubsub(ucid, hmac_key, config) | ||||
|  | ||||
|               if response.status_code >= 400 | ||||
|                 logger.error("SubscribeToFeedsJob: #{ucid} : #{response.body}") | ||||
|                 LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{response.body}") | ||||
|               end | ||||
|             rescue ex | ||||
|               logger.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}") | ||||
|               LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}") | ||||
|             end | ||||
|  | ||||
|             active_channel.send(true) | ||||
|   | ||||
| @@ -1,15 +1,10 @@ | ||||
| class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob | ||||
|   private getter logger : Invidious::LogHandler | ||||
|  | ||||
|   def initialize(@logger) | ||||
|   end | ||||
|  | ||||
|   def begin | ||||
|     loop do | ||||
|       begin | ||||
|         DECRYPT_FUNCTION.update_decrypt_function | ||||
|       rescue ex | ||||
|         logger.error("UpdateDecryptFunctionJob : #{ex.message}") | ||||
|         LOGGER.error("UpdateDecryptFunctionJob : #{ex.message}") | ||||
|       ensure | ||||
|         sleep 1.minute | ||||
|         Fiber.yield | ||||
|   | ||||
| @@ -1,7 +1,6 @@ | ||||
| abstract class Invidious::Routes::BaseRoute | ||||
|   private getter config : Config | ||||
|   private getter logger : Invidious::LogHandler | ||||
|  | ||||
|   def initialize(@config, @logger) | ||||
|   def initialize(@config) | ||||
|   end | ||||
| end | ||||
|   | ||||
| @@ -267,7 +267,7 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute | ||||
|           raise "Couldn't get SID." | ||||
|         end | ||||
|  | ||||
|         user, sid = get_user(sid, headers, PG_DB, logger) | ||||
|         user, sid = get_user(sid, headers, PG_DB) | ||||
|  | ||||
|         # We are now logged in | ||||
|         traceback << "done.<br/>" | ||||
|   | ||||
| @@ -62,7 +62,7 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute | ||||
|     rescue ex : VideoRedirect | ||||
|       return env.redirect env.request.resource.gsub(id, ex.video_id) | ||||
|     rescue ex | ||||
|       logger.error("get_video: #{id} : #{ex.message}") | ||||
|       LOGGER.error("get_video: #{id} : #{ex.message}") | ||||
|       return error_template(500, ex) | ||||
|     end | ||||
|  | ||||
|   | ||||
| @@ -1,14 +1,14 @@ | ||||
| module Invidious::Routing | ||||
|   macro get(path, controller, method = :handle) | ||||
|     get {{ path }} do |env| | ||||
|       controller_instance = {{ controller }}.new(config, logger) | ||||
|       controller_instance = {{ controller }}.new(config) | ||||
|       controller_instance.{{ method.id }}(env) | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   macro post(path, controller, method = :handle) | ||||
|     post {{ path }} do |env| | ||||
|       controller_instance = {{ controller }}.new(config, logger) | ||||
|       controller_instance = {{ controller }}.new(config) | ||||
|       controller_instance.{{ method.id }}(env) | ||||
|     end | ||||
|   end | ||||
|   | ||||
| @@ -269,12 +269,12 @@ struct Preferences | ||||
|   end | ||||
| end | ||||
|  | ||||
| def get_user(sid, headers, db, logger, refresh = true) | ||||
| def get_user(sid, headers, db, refresh = true) | ||||
|   if email = db.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String) | ||||
|     user = db.query_one("SELECT * FROM users WHERE email = $1", email, as: User) | ||||
|  | ||||
|     if refresh && Time.utc - user.updated > 1.minute | ||||
|       user, sid = fetch_user(sid, headers, db, logger) | ||||
|       user, sid = fetch_user(sid, headers, db) | ||||
|       user_array = user.to_a | ||||
|       user_array[4] = user_array[4].to_json # User preferences | ||||
|       args = arg_array(user_array) | ||||
| @@ -292,7 +292,7 @@ def get_user(sid, headers, db, logger, refresh = true) | ||||
|       end | ||||
|     end | ||||
|   else | ||||
|     user, sid = fetch_user(sid, headers, db, logger) | ||||
|     user, sid = fetch_user(sid, headers, db) | ||||
|     user_array = user.to_a | ||||
|     user_array[4] = user_array[4].to_json # User preferences | ||||
|     args = arg_array(user.to_a) | ||||
| @@ -313,7 +313,7 @@ def get_user(sid, headers, db, logger, refresh = true) | ||||
|   return user, sid | ||||
| end | ||||
|  | ||||
| def fetch_user(sid, headers, db, logger) | ||||
| def fetch_user(sid, headers, db) | ||||
|   feed = YT_POOL.client &.get("/subscription_manager?disable_polymer=1", headers) | ||||
|   feed = XML.parse_html(feed.body) | ||||
|  | ||||
| @@ -326,7 +326,7 @@ def fetch_user(sid, headers, db, logger) | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   channels = get_batch_channels(channels, db, logger, false, false) | ||||
|   channels = get_batch_channels(channels, db, false, false) | ||||
|  | ||||
|   email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"])) | ||||
|   if email | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Perflyst
					Perflyst