mirror of
				https://github.com/iv-org/invidious.git
				synced 2025-10-30 20:22:00 +00:00 
			
		
		
		
	Automatically migrate database
This commit is contained in:
		| @@ -105,10 +105,16 @@ end | ||||
|  | ||||
| Kemal::CLI.new ARGV | ||||
|  | ||||
| # Check table integrity | ||||
| analyze_table(PG_DB, logger, "channel_videos", ChannelVideo) | ||||
| analyze_table(PG_DB, logger, "nonces", Nonce) | ||||
| analyze_table(PG_DB, logger, "session_ids", SessionId) | ||||
| analyze_table(PG_DB, logger, "users", User) | ||||
| analyze_table(PG_DB, logger, "videos", Video) | ||||
|  | ||||
| # Start jobs | ||||
| refresh_channels(PG_DB, logger, config.channel_threads, config.full_refresh) | ||||
|  | ||||
| refresh_feeds(PG_DB, logger, config.feed_threads) | ||||
|  | ||||
| subscribe_to_feeds(PG_DB, logger, HMAC_KEY, config) | ||||
|  | ||||
| statistics = { | ||||
|   | ||||
| @@ -1,5 +1,20 @@ | ||||
| require "./macros" | ||||
|  | ||||
| struct Nonce | ||||
|   db_mapping({ | ||||
|     nonce:  String, | ||||
|     expire: Time, | ||||
|   }) | ||||
| end | ||||
|  | ||||
| struct SessionId | ||||
|   db_mapping({ | ||||
|     id:     String, | ||||
|     email:  String, | ||||
|     issued: String, | ||||
|   }) | ||||
| end | ||||
|  | ||||
| struct ConfigPreferences | ||||
|   module StringToArray | ||||
|     def self.to_yaml(value : Array(String), yaml : YAML::Nodes::Builder) | ||||
| @@ -483,3 +498,92 @@ def extract_shelf_items(nodeset, ucid = nil, author_name = nil) | ||||
|  | ||||
|   return items | ||||
| end | ||||
|  | ||||
| def analyze_table(db, logger, table_name, struct_type = nil) | ||||
|   # Create table if it doesn't exist | ||||
|   if !db.query_one?("SELECT true FROM information_schema.tables WHERE table_name = $1", table_name, as: Bool) | ||||
|     db.using_connection do |conn| | ||||
|       conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql")) | ||||
|     end | ||||
|  | ||||
|     logger.write("CREATE TABLE #{table_name}\n") | ||||
|   end | ||||
|  | ||||
|   if !struct_type | ||||
|     return | ||||
|   end | ||||
|  | ||||
|   struct_array = struct_type.to_type_tuple | ||||
|   column_array = get_column_array(db, table_name) | ||||
|   column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/) | ||||
|     .try &.["types"].split(",").map { |line| line.strip } | ||||
|  | ||||
|   if !column_types | ||||
|     return | ||||
|   end | ||||
|  | ||||
|   struct_array.each_with_index do |name, i| | ||||
|     if name != column_array[i]? | ||||
|       if !column_array[i]? | ||||
|         new_column = column_types.select { |line| line.starts_with? name }[0] | ||||
|         db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|         logger.write("ALTER TABLE #{table_name} ADD COLUMN #{new_column}\n") | ||||
|         next | ||||
|       end | ||||
|  | ||||
|       # Column doesn't exist | ||||
|       if !column_array.includes? name | ||||
|         new_column = column_types.select { |line| line.starts_with? name }[0] | ||||
|         db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|       end | ||||
|  | ||||
|       # Column exists but in the wrong position, rotate | ||||
|       if struct_array.includes? column_array[i] | ||||
|         until name == column_array[i] | ||||
|           new_column = column_types.select { |line| line.starts_with? column_array[i] }[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new") | ||||
|  | ||||
|           # There's a column we didn't expect | ||||
|           if !new_column | ||||
|             db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|             logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}\n") | ||||
|  | ||||
|             column_array = get_column_array(db, table_name) | ||||
|             next | ||||
|           end | ||||
|  | ||||
|           db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") | ||||
|           logger.write("ALTER TABLE #{table_name} ADD COLUMN #{new_column}\n") | ||||
|           db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") | ||||
|           logger.write("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}\n") | ||||
|           db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|           logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE\n") | ||||
|           db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") | ||||
|           logger.write("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}\n") | ||||
|  | ||||
|           column_array = get_column_array(db, table_name) | ||||
|         end | ||||
|       else | ||||
|         db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") | ||||
|         logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE\n") | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | ||||
| class PG::ResultSet | ||||
|   def field(index = @column_index) | ||||
|     @fields.not_nil![index] | ||||
|   end | ||||
| end | ||||
|  | ||||
| def get_column_array(db, table_name) | ||||
|   column_array = [] of String | ||||
|   db.query("SELECT * FROM #{table_name} LIMIT 0") do |rs| | ||||
|     rs.column_count.times do |i| | ||||
|       column = rs.as(PG::ResultSet).field(i) | ||||
|       column_array << column.name | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   return column_array | ||||
| end | ||||
|   | ||||
| @@ -66,7 +66,7 @@ def refresh_feeds(db, logger, max_threads = 1) | ||||
|           spawn do | ||||
|             begin | ||||
|               db.query("SELECT * FROM #{view_name} LIMIT 1") do |rs| | ||||
|                 # View doesn't contain same number of rows as ChannelVideo | ||||
|                 # Drop view that doesn't contain same number of rows as ChannelVideo | ||||
|                 if ChannelVideo.from_rs(rs)[0]?.try &.to_a.size.try &.!= rs.column_count | ||||
|                   db.exec("DROP MATERIALIZED VIEW #{view_name}") | ||||
|                   raise "valid schema does not exist" | ||||
| @@ -3,10 +3,14 @@ macro db_mapping(mapping) | ||||
|     end | ||||
|  | ||||
|     def to_a | ||||
|         return [{{*mapping.keys.map { |id| "@#{id}".id }}}] | ||||
|         return [ {{*mapping.keys.map { |id| "@#{id}".id }}} ] | ||||
|     end | ||||
|  | ||||
|     DB.mapping({{mapping}}) | ||||
|     def self.to_type_tuple | ||||
|         return { {{*mapping.keys.map { |id| "#{id}" }}} } | ||||
|     end | ||||
|  | ||||
|     DB.mapping( {{mapping}} ) | ||||
| end | ||||
|  | ||||
| macro json_mapping(mapping) | ||||
| @@ -14,11 +18,11 @@ macro json_mapping(mapping) | ||||
|     end | ||||
|  | ||||
|     def to_a | ||||
|         return [{{*mapping.keys.map { |id| "@#{id}".id }}}] | ||||
|         return [ {{*mapping.keys.map { |id| "@#{id}".id }}} ] | ||||
|     end | ||||
|  | ||||
|     JSON.mapping({{mapping}}) | ||||
|     YAML.mapping({{mapping}}) | ||||
|     JSON.mapping( {{mapping}} ) | ||||
|     YAML.mapping( {{mapping}} ) | ||||
| end | ||||
|  | ||||
| macro yaml_mapping(mapping) | ||||
| @@ -26,7 +30,7 @@ macro yaml_mapping(mapping) | ||||
|     end | ||||
|  | ||||
|     def to_a | ||||
|         return [{{*mapping.keys.map { |id| "@#{id}".id }}}] | ||||
|         return [ {{*mapping.keys.map { |id| "@#{id}".id }}} ] | ||||
|     end | ||||
|  | ||||
|     def to_tuple | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Omar Roth
					Omar Roth