mirror of
				https://github.com/iv-org/invidious.git
				synced 2025-10-31 04:32:02 +00:00 
			
		
		
		
	Minor refactor
This commit is contained in:
		
							
								
								
									
										186
									
								
								src/invidious.cr
									
									
									
									
									
								
							
							
						
						
									
										186
									
								
								src/invidious.cr
									
									
									
									
									
								
							| @@ -148,26 +148,32 @@ statistics = { | ||||
| } | ||||
| if config.statistics_enabled | ||||
|   spawn do | ||||
|     loop do | ||||
|       statistics = { | ||||
|         "version"           => "2.0", | ||||
|         "software"          => SOFTWARE, | ||||
|         "openRegistrations" => config.registration_enabled, | ||||
|         "usage"             => { | ||||
|           "users" => { | ||||
|             "total"          => PG_DB.query_one("SELECT count(*) FROM users", as: Int64), | ||||
|             "activeHalfyear" => PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '6 months'", as: Int64), | ||||
|             "activeMonth"    => PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '1 month'", as: Int64), | ||||
|           }, | ||||
|     statistics = { | ||||
|       "version"           => "2.0", | ||||
|       "software"          => SOFTWARE, | ||||
|       "openRegistrations" => config.registration_enabled, | ||||
|       "usage"             => { | ||||
|         "users" => { | ||||
|           "total"          => PG_DB.query_one("SELECT count(*) FROM users", as: Int64), | ||||
|           "activeHalfyear" => PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '6 months'", as: Int64), | ||||
|           "activeMonth"    => PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '1 month'", as: Int64), | ||||
|         }, | ||||
|         "metadata" => { | ||||
|           "updatedAt"              => Time.utc.to_unix, | ||||
|           "lastChannelRefreshedAt" => PG_DB.query_one?("SELECT updated FROM channels ORDER BY updated DESC LIMIT 1", as: Time).try &.to_unix || 0, | ||||
|         }, | ||||
|       } | ||||
|       }, | ||||
|       "metadata" => { | ||||
|         "updatedAt"              => Time.utc.to_unix, | ||||
|         "lastChannelRefreshedAt" => PG_DB.query_one?("SELECT updated FROM channels ORDER BY updated DESC LIMIT 1", as: Time).try &.to_unix || 0_i64, | ||||
|       }, | ||||
|     } | ||||
|  | ||||
|     loop do | ||||
|       sleep 1.minute | ||||
|       Fiber.yield | ||||
|  | ||||
|       statistics["usage"].as(Hash)["users"].as(Hash)["total"] = PG_DB.query_one("SELECT count(*) FROM users", as: Int64) | ||||
|       statistics["usage"].as(Hash)["users"].as(Hash)["activeHalfyear"] = PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '6 months'", as: Int64) | ||||
|       statistics["usage"].as(Hash)["users"].as(Hash)["activeMonth"] = PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '1 month'", as: Int64) | ||||
|       statistics["metadata"].as(Hash(String, Int64))["updatedAt"] = Time.utc.to_unix | ||||
|       statistics["metadata"].as(Hash(String, Int64))["lastChannelRefreshedAt"] = PG_DB.query_one?("SELECT updated FROM channels ORDER BY updated DESC LIMIT 1", as: Time).try &.to_unix || 0_i64 | ||||
|     end | ||||
|   end | ||||
| end | ||||
| @@ -3223,35 +3229,35 @@ get "/api/v1/storyboards/:id" do |env| | ||||
|     storyboard = storyboard[0] | ||||
|   end | ||||
|  | ||||
|   webvtt = <<-END_VTT | ||||
|   WEBVTT | ||||
|   String.build do |str| | ||||
|     str << <<-END_VTT | ||||
|     WEBVTT | ||||
|  | ||||
|  | ||||
|   END_VTT | ||||
|     END_VTT | ||||
|  | ||||
|   start_time = 0.milliseconds | ||||
|   end_time = storyboard[:interval].milliseconds | ||||
|     start_time = 0.milliseconds | ||||
|     end_time = storyboard[:interval].milliseconds | ||||
|  | ||||
|   storyboard[:storyboard_count].times do |i| | ||||
|     host_url = make_host_url(config, Kemal.config) | ||||
|     url = storyboard[:url].gsub("$M", i).gsub("https://i9.ytimg.com", host_url) | ||||
|     storyboard[:storyboard_count].times do |i| | ||||
|       host_url = make_host_url(config, Kemal.config) | ||||
|       url = storyboard[:url].gsub("$M", i).gsub("https://i9.ytimg.com", host_url) | ||||
|  | ||||
|     storyboard[:storyboard_height].times do |j| | ||||
|       storyboard[:storyboard_width].times do |k| | ||||
|         webvtt += <<-END_CUE | ||||
|         #{start_time}.000 --> #{end_time}.000 | ||||
|         #{url}#xywh=#{storyboard[:width] * k},#{storyboard[:height] * j},#{storyboard[:width]},#{storyboard[:height]} | ||||
|       storyboard[:storyboard_height].times do |j| | ||||
|         storyboard[:storyboard_width].times do |k| | ||||
|           str << <<-END_CUE | ||||
|           #{start_time}.000 --> #{end_time}.000 | ||||
|           #{url}#xywh=#{storyboard[:width] * k},#{storyboard[:height] * j},#{storyboard[:width]},#{storyboard[:height]} | ||||
|  | ||||
|  | ||||
|         END_CUE | ||||
|           END_CUE | ||||
|  | ||||
|         start_time += storyboard[:interval].milliseconds | ||||
|         end_time += storyboard[:interval].milliseconds | ||||
|           start_time += storyboard[:interval].milliseconds | ||||
|           end_time += storyboard[:interval].milliseconds | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   webvtt | ||||
| end | ||||
|  | ||||
| get "/api/v1/captions/:id" do |env| | ||||
| @@ -3321,7 +3327,7 @@ get "/api/v1/captions/:id" do |env| | ||||
|     caption = caption[0] | ||||
|   end | ||||
|  | ||||
|   url = caption.baseUrl + "&tlang=#{tlang}" | ||||
|   url = "#{caption.baseUrl}&tlang=#{tlang}" | ||||
|  | ||||
|   # Auto-generated captions often have cues that aren't aligned properly with the video, | ||||
|   # as well as some other markup that makes it cumbersome, so we try to fix that here | ||||
| @@ -3329,46 +3335,47 @@ get "/api/v1/captions/:id" do |env| | ||||
|     caption_xml = client.get(url).body | ||||
|     caption_xml = XML.parse(caption_xml) | ||||
|  | ||||
|     webvtt = <<-END_VTT | ||||
|     WEBVTT | ||||
|     Kind: captions | ||||
|     Language: #{tlang || caption.languageCode} | ||||
|     webvtt = String.build do |str| | ||||
|       str << <<-END_VTT | ||||
|       WEBVTT | ||||
|       Kind: captions | ||||
|       Language: #{tlang || caption.languageCode} | ||||
|  | ||||
|  | ||||
|     END_VTT | ||||
|       END_VTT | ||||
|  | ||||
|     caption_nodes = caption_xml.xpath_nodes("//transcript/text") | ||||
|     caption_nodes.each_with_index do |node, i| | ||||
|       start_time = node["start"].to_f.seconds | ||||
|       duration = node["dur"]?.try &.to_f.seconds | ||||
|       duration ||= start_time | ||||
|       caption_nodes = caption_xml.xpath_nodes("//transcript/text") | ||||
|       caption_nodes.each_with_index do |node, i| | ||||
|         start_time = node["start"].to_f.seconds | ||||
|         duration = node["dur"]?.try &.to_f.seconds | ||||
|         duration ||= start_time | ||||
|  | ||||
|       if caption_nodes.size > i + 1 | ||||
|         end_time = caption_nodes[i + 1]["start"].to_f.seconds | ||||
|       else | ||||
|         end_time = start_time + duration | ||||
|         if caption_nodes.size > i + 1 | ||||
|           end_time = caption_nodes[i + 1]["start"].to_f.seconds | ||||
|         else | ||||
|           end_time = start_time + duration | ||||
|         end | ||||
|  | ||||
|         start_time = "#{start_time.hours.to_s.rjust(2, '0')}:#{start_time.minutes.to_s.rjust(2, '0')}:#{start_time.seconds.to_s.rjust(2, '0')}.#{start_time.milliseconds.to_s.rjust(3, '0')}" | ||||
|         end_time = "#{end_time.hours.to_s.rjust(2, '0')}:#{end_time.minutes.to_s.rjust(2, '0')}:#{end_time.seconds.to_s.rjust(2, '0')}.#{end_time.milliseconds.to_s.rjust(3, '0')}" | ||||
|  | ||||
|         text = HTML.unescape(node.content) | ||||
|         text = text.gsub(/<font color="#[a-fA-F0-9]{6}">/, "") | ||||
|         text = text.gsub(/<\/font>/, "") | ||||
|         if md = text.match(/(?<name>.*) : (?<text>.*)/) | ||||
|           text = "<v #{md["name"]}>#{md["text"]}</v>" | ||||
|         end | ||||
|  | ||||
|         str << <<-END_CUE | ||||
|         #{start_time} --> #{end_time} | ||||
|         #{text} | ||||
|  | ||||
|  | ||||
|         END_CUE | ||||
|       end | ||||
|  | ||||
|       start_time = "#{start_time.hours.to_s.rjust(2, '0')}:#{start_time.minutes.to_s.rjust(2, '0')}:#{start_time.seconds.to_s.rjust(2, '0')}.#{start_time.milliseconds.to_s.rjust(3, '0')}" | ||||
|       end_time = "#{end_time.hours.to_s.rjust(2, '0')}:#{end_time.minutes.to_s.rjust(2, '0')}:#{end_time.seconds.to_s.rjust(2, '0')}.#{end_time.milliseconds.to_s.rjust(3, '0')}" | ||||
|  | ||||
|       text = HTML.unescape(node.content) | ||||
|       text = text.gsub(/<font color="#[a-fA-F0-9]{6}">/, "") | ||||
|       text = text.gsub(/<\/font>/, "") | ||||
|       if md = text.match(/(?<name>.*) : (?<text>.*)/) | ||||
|         text = "<v #{md["name"]}>#{md["text"]}</v>" | ||||
|       end | ||||
|  | ||||
|       webvtt += <<-END_CUE | ||||
|     #{start_time} --> #{end_time} | ||||
|     #{text} | ||||
|  | ||||
|  | ||||
|     END_CUE | ||||
|     end | ||||
|   else | ||||
|     url += "&format=vtt" | ||||
|     webvtt = client.get(url).body | ||||
|     webvtt = client.get("#{url}&format=vtt").body | ||||
|   end | ||||
|  | ||||
|   if title = env.params.query["title"]? | ||||
| @@ -4833,43 +4840,6 @@ get "/videoplayback" do |env| | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   response = HTTP::Client::Response.new(403) | ||||
|   5.times do | ||||
|     begin | ||||
|       client = make_client(URI.parse(host), region) | ||||
|       response = client.head(url, headers) | ||||
|       break | ||||
|     rescue Socket::Addrinfo::Error | ||||
|       if !mns.empty? | ||||
|         mn = mns.pop | ||||
|       end | ||||
|       fvip = "3" | ||||
|  | ||||
|       host = "https://r#{fvip}---#{mn}.googlevideo.com" | ||||
|     rescue ex | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   if response.headers["Location"]? | ||||
|     url = URI.parse(response.headers["Location"]) | ||||
|     host = url.host | ||||
|     env.response.headers["Access-Control-Allow-Origin"] = "*" | ||||
|  | ||||
|     url = url.full_path | ||||
|     url += "&host=#{host}" | ||||
|  | ||||
|     if region | ||||
|       url += "®ion=#{region}" | ||||
|     end | ||||
|  | ||||
|     next env.redirect url | ||||
|   end | ||||
|  | ||||
|   if response.status_code >= 400 | ||||
|     env.response.status_code = response.status_code | ||||
|     next | ||||
|   end | ||||
|  | ||||
|   if url.includes? "&file=seg.ts" | ||||
|     if CONFIG.disabled?("livestreams") | ||||
|       env.response.status_code = 403 | ||||
| @@ -4957,11 +4927,7 @@ get "/videoplayback" do |env| | ||||
|  | ||||
|             if location = response.headers["Location"]? | ||||
|               location = URI.parse(location) | ||||
|               location = "#{location.full_path}&host=#{location.host}" | ||||
|  | ||||
|               if region | ||||
|                 location += "®ion=#{region}" | ||||
|               end | ||||
|               location = "#{location.full_path}&host=#{location.host}#{region ? "®ion=#{region}" : ""}" | ||||
|  | ||||
|               env.redirect location | ||||
|               break | ||||
|   | ||||
| @@ -159,10 +159,9 @@ class APIHandler < Kemal::Handler | ||||
|       call_next env | ||||
|  | ||||
|       env.response.output.rewind | ||||
|       response = env.response.output.gets_to_end | ||||
|  | ||||
|       if env.response.headers["Content-Type"]?.try &.== "application/json" | ||||
|         response = JSON.parse(response) | ||||
|       if env.response.headers.includes_word?("Content-Type", "application/json") | ||||
|         response = JSON.parse(env.response.output) | ||||
|  | ||||
|         if fields_text = env.params.query["fields"]? | ||||
|           begin | ||||
| @@ -178,6 +177,8 @@ class APIHandler < Kemal::Handler | ||||
|         else | ||||
|           response = response.to_json | ||||
|         end | ||||
|       else | ||||
|         response = env.response.output.gets_to_end | ||||
|       end | ||||
|     rescue ex | ||||
|     ensure | ||||
|   | ||||
| @@ -295,8 +295,7 @@ def get_subscription_feed(db, user, max_results = 40, page = 1) | ||||
|  | ||||
|     args = arg_array(notifications) | ||||
|  | ||||
|     notifications = db.query_all("SELECT * FROM channel_videos WHERE id IN (#{args}) | ||||
|     ORDER BY published DESC", notifications, as: ChannelVideo) | ||||
|     notifications = db.query_all("SELECT * FROM channel_videos WHERE id IN (#{args}) ORDER BY published DESC", notifications, as: ChannelVideo) | ||||
|     videos = [] of ChannelVideo | ||||
|  | ||||
|     notifications.sort_by! { |video| video.published }.reverse! | ||||
| @@ -322,14 +321,11 @@ def get_subscription_feed(db, user, max_results = 40, page = 1) | ||||
|         else | ||||
|           values = "VALUES #{user.watched.map { |id| %(('#{id}')) }.join(",")}" | ||||
|         end | ||||
|         videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} WHERE \ | ||||
|         NOT id = ANY (#{values}) \ | ||||
|         ORDER BY ucid, published DESC", as: ChannelVideo) | ||||
|         videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} WHERE NOT id = ANY (#{values}) ORDER BY ucid, published DESC", as: ChannelVideo) | ||||
|       else | ||||
|         # Show latest video from each channel | ||||
|  | ||||
|         videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} \ | ||||
|         ORDER BY ucid, published DESC", as: ChannelVideo) | ||||
|         videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} ORDER BY ucid, published DESC", as: ChannelVideo) | ||||
|       end | ||||
|  | ||||
|       videos.sort_by! { |video| video.published }.reverse! | ||||
| @@ -342,14 +338,11 @@ def get_subscription_feed(db, user, max_results = 40, page = 1) | ||||
|         else | ||||
|           values = "VALUES #{user.watched.map { |id| %(('#{id}')) }.join(",")}" | ||||
|         end | ||||
|         videos = PG_DB.query_all("SELECT * FROM #{view_name} WHERE \ | ||||
|         NOT id = ANY (#{values}) \ | ||||
|         ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo) | ||||
|         videos = PG_DB.query_all("SELECT * FROM #{view_name} WHERE NOT id = ANY (#{values}) ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo) | ||||
|       else | ||||
|         # Sort subscriptions as normal | ||||
|  | ||||
|         videos = PG_DB.query_all("SELECT * FROM #{view_name} \ | ||||
|         ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo) | ||||
|         videos = PG_DB.query_all("SELECT * FROM #{view_name} ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo) | ||||
|       end | ||||
|     end | ||||
|  | ||||
| @@ -366,16 +359,11 @@ def get_subscription_feed(db, user, max_results = 40, page = 1) | ||||
|       videos.sort_by! { |video| video.author }.reverse! | ||||
|     end | ||||
|  | ||||
|     notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email, | ||||
|       as: Array(String)) | ||||
|     notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email, as: Array(String)) | ||||
|  | ||||
|     notifications = videos.select { |v| notifications.includes? v.id } | ||||
|     videos = videos - notifications | ||||
|   end | ||||
|  | ||||
|   if !limit | ||||
|     videos = videos[0..max_results] | ||||
|   end | ||||
|  | ||||
|   return videos, notifications | ||||
| end | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Omar Roth
					Omar Roth