#!/usr/bin/env ruby =begin Migrate TT-RSS data from MySQL to Postgres Author: Bryce Chidester Provided as-is, no warranty. Make backups! This script may work in the reverse direction... but probably not. And seeing as TT-RSS doesn't officially support any other database backend at this time, there's really no point in migrating to or from any other database. Really this script was just the quickest and easiest way for me to "convert" my TT-RSS instance from the legacy MySQL instance to Postgres, without losing all the histories, entries, starred entries, tags etc. I found an old Java migrator, which wasn't an option for many reasons, and I messed around with pgloader for awhile but it required too much configuration and coaxing. In the end, it was much simpler to write my own migrator/converter. This script is largely based off sequel's --copy-database mode. Basic Usage: 1. Create the target database, and a user. If you don't know how to do this, I refer you to Postgres' documentation. 2. Load the bare TT-RSS Postgres schema. psql -f 3. Make sure update-daemon2.php is stopped 4. Run this script. ttrss-migrator.rb Ex: 'mysql2://localhost/tt-rss?user=&password=&encoding=utf8' Note: I used the mysql2 connector and explicitly specified encoding=utf8 to avoid some encoding issues I encountered early on. If newlines appear as "\012" then you probably messed up this part. Ex: 'postgres://localhost/ttrss?user=&password=&encoding=utf8' Note: Again, I explicitly specified encoding=utf8 to ensure everything is clear. 5. Update your TT-RSS config.php with the Postgres connection details. License: Copyright (c) 2016, Bryce Chidester All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project. =end require 'logger' require 'sequel' def usage "Usage:\n" + "#{$0} \n" + "See file header for more information." end abort usage if ARGV.count < 2 start_time = Time.now Srcloggers = [] Srcloggers << Logger.new($stderr) Srcloggers.each { |l| l.progname="SourceDB" } Tgtloggers = [] Tgtloggers << Logger.new($stderr) Tgtloggers.each { |l| l.progname="TargetDB" } SourceDB = Sequel.connect(ARGV[0]) SourceDB.loggers = Srcloggers SourceDB.test_connection TargetDB = Sequel.connect(ARGV[1]) TargetDB.loggers = Tgtloggers TargetDB.test_connection =begin 1. Drop foreign_keys from TargetDB 2. Make any custom table modifications to avoid constraints 3. Copy data 4. Make any custom table modifications to reset constraints 5. Re-add foreign_keys to TargetDB =end Sequel.extension :migration TargetDB.extension :schema_dumper #index_migration = eval(TargetDB.dump_indexes_migration(:same_db=>true, :index_names=>true)) fk_migration = eval(TargetDB.dump_foreign_key_migration(:same_db=>true)) SourceDB.transaction do TargetDB.transaction do puts "Begin removing foreign key constraints" fk_migration.apply(TargetDB, :down) puts "Finished removing foreign key constraints" # Temporarily allow null in section_name from ttrss_prefs_sections TargetDB.alter_table(:ttrss_prefs_sections) do set_column_allow_null :section_name end SourceDB.tables.each do |table| puts "Truncating TargetDB table: #{table}" TargetDB[table].truncate puts "Begin copying records for table: #{table}" time = Time.now to_ds = TargetDB.from(table) j = 0 SourceDB.from(table).each do |record| if Time.now - time > 5 puts "Status: #{j} records copied" time = Time.now end to_ds.insert(record) j += 1 end puts "Finished copying #{j} records for table: #{table}" end puts "Finished copying data" # Apply schema updates to ttrss_prefs_sections and reset section_name allowing nulls TargetDB[:ttrss_prefs_sections].where(:id=>1).update(:section_name => 'General') TargetDB[:ttrss_prefs_sections].where(:id=>2).update(:section_name => 'Interface') TargetDB[:ttrss_prefs_sections].where(:id=>3).update(:section_name => 'Advanced') TargetDB[:ttrss_prefs_sections].where(:id=>4).update(:section_name => 'Digest') TargetDB.alter_table(:ttrss_prefs_sections) do set_column_not_null :section_name end puts "Begin adding foreign key constraints" fk_migration.apply(TargetDB, :up) puts "Finished adding foreign key constraints" TargetDB.tables.each{|t| TargetDB.reset_primary_key_sequence(t)} puts "Primary key sequences reset successfully" puts "Database copy finished in #{Time.now - start_time} seconds" end end puts "Finished copying data" exit =begin For reference: if copy_databases Sequel.extension :migration DB.extension :schema_dumper db2 = ARGV.shift error_proc["Error: Must specify database connection string or path to yaml file as second argument for database you want to copy to"] if db2.nil? || db2.empty? extra_proc.call start_time = Time.now TO_DB = connect_proc[db2] same_db = DB.database_type==TO_DB.database_type index_opts = {:same_db=>same_db} index_opts[:index_names] = :namespace if !DB.global_index_namespace? && TO_DB.global_index_namespace? puts "Databases connections successful" schema_migration = eval(DB.dump_schema_migration(:indexes=>false, :same_db=>same_db)) index_migration = eval(DB.dump_indexes_migration(index_opts)) fk_migration = eval(DB.dump_foreign_key_migration(:same_db=>same_db)) puts "Migrations dumped successfully" schema_migration.apply(TO_DB, :up) puts "Tables created" puts "Begin copying data" DB.transaction do TO_DB.transaction do DB.tables.each do |table| puts "Begin copying records for table: #{table}" time = Time.now to_ds = TO_DB.from(table) j = 0 DB.from(table).each do |record| if Time.now - time > 5 puts "Status: #{j} records copied" time = Time.now end to_ds.insert(record) j += 1 end puts "Finished copying #{j} records for table: #{table}" end end end puts "Finished copying data" puts "Begin creating indexes" index_migration.apply(TO_DB, :up) puts "Finished creating indexes" puts "Begin adding foreign key constraints" fk_migration.apply(TO_DB, :up) puts "Finished adding foreign key constraints" if TO_DB.database_type == :postgres TO_DB.tables.each{|t| TO_DB.reset_primary_key_sequence(t)} puts "Primary key sequences reset successfully" end puts "Database copy finished in #{Time.now - start_time} seconds" exit end =end