# File lib/taps/operation.rb, line 538 def fetch_local_tables_info tables_with_counts = {} db.tables.each do |table| tables_with_counts[table] = db[table.to_sym.identifier].count end apply_table_filter(tables_with_counts) end
# File lib/taps/operation.rb, line 374 def file_prefix "push" end
# File lib/taps/operation.rb, line 521 def local_tables_info opts[:local_tables_info] ||= fetch_local_tables_info end
# File lib/taps/operation.rb, line 455 def push_data puts "Sending data" puts "#{tables.size} tables, #{format_number(record_count)} records" tables.each do |table_name, count| stream = Taps::DataStream.factory(db, :table_name => table_name, :chunksize => default_chunksize) progress = ProgressBar.new(table_name.to_s, count) push_data_from_table(stream, progress) end end
# File lib/taps/operation.rb, line 469 def push_data_from_table(stream, progress) loop do if exiting? store_session exit 0 end row_size = 0 chunksize = stream.state[:chunksize] chunksize = Taps::Utils.calculate_chunksize(chunksize) do |c| stream.state[:chunksize] = c encoded_data, row_size, elapsed_time = stream.fetch break if stream.complete? data = { :state => stream.to_hash, :checksum => Taps::Utils.checksum(encoded_data).to_s } begin content, content_type = Taps::Multipart.create do |r| r.attach :name => :encoded_data, :payload => encoded_data, :content_type => 'application/octet-stream' r.attach :name => :json, :payload => data.to_json, :content_type => 'application/json' end session_resource['push/table'].post(content, http_headers(:content_type => content_type)) self.stream_state = stream.to_hash rescue RestClient::RequestFailed => e # retry the same data, it got corrupted somehow. if e.http_code == 412 next end raise end elapsed_time end stream.state[:chunksize] = chunksize progress.inc(row_size) stream.increment(row_size) break if stream.complete? end progress.finish completed_tables << stream.table_name.to_s self.stream_state = {} end
# File lib/taps/operation.rb, line 408 def push_indexes idxs = JSON.parse(Taps::Utils.schema_bin(:indexes_individual, database_url)) return unless idxs.size > 0 puts "Sending indexes" apply_table_filter(idxs).each do |table, indexes| next unless indexes.size > 0 progress = ProgressBar.new(table, indexes.size) indexes.each do |idx| session_resource['push/indexes'].post(idx, http_headers) progress.inc(1) end progress.finish end end
# File lib/taps/operation.rb, line 444 def push_partial_data return if stream_state == {} table_name = stream_state[:table_name] record_count = tables[table_name.to_s] puts "Resuming #{table_name}, #{format_number(record_count)} records" progress = ProgressBar.new(table_name.to_s, record_count) stream = Taps::DataStream.factory(db, stream_state) push_data_from_table(stream, progress) end
# File lib/taps/operation.rb, line 438 def push_reset_sequences puts "Resetting sequences" session_resource['push/reset_sequences'].post('', http_headers) end
# File lib/taps/operation.rb, line 426 def push_schema puts "Sending schema" progress = ProgressBar.new('Schema', tables.size) tables.each do |table, count| schema_data = Taps::Utils.schema_bin(:dump_table, database_url, table) session_resource['push/schema'].post(schema_data, http_headers) progress.inc(1) end progress.finish end
# File lib/taps/operation.rb, line 534 def record_count @record_count ||= local_tables_info.values.inject(0) { |a,c| a += c } end
# File lib/taps/operation.rb, line 382 def run verify_server begin unless resuming? push_schema push_indexes if indexes_first? end setup_signal_trap push_partial_data if resuming? push_data push_indexes unless indexes_first? push_reset_sequences close_session rescue RestClient::Exception => e store_session if e.respond_to?(:response) puts "!!! Caught Server Exception" puts "HTTP CODE: #{e.http_code}" puts "#{e.response.to_s}" exit(1) else raise end end end
Generated with the Darkfish Rdoc Generator 2.