#############################################################################
# Module: BulkInsertable
# Author: Peter Philips
#
# The idea is that in a high performing application you may want to 
# insert lots and lots of rows. Doing this as individual inserts is 
# a very slow process.  If you group the inserts into one "bulk insert", 
# the db is able to optimize the process, and you will see at least an order 
# of magnitude improvement depending on how much data you have.  This is 
# normally easy(atleast, with MySQL) if you have all the data upfront, you 
# can explicitly code a bulk insert mysql statement, similar to what is used 
# when you load in a mysql dump that has data in it.  
#
# The problem which this module solves is that what if you have a bunch of 
# application code which determines the data that you want to insert.  Without 
# this module you would need to pass the data in and out of methods in order 
# to generate your bulk insert string.  This module basically queues up 
# inserts into a buffer and when the application is ready, it can execute a 
# simple method to flush the queue and do the bulk insert.  
#
# KNOWN ISSUES:
#    Updates are not supported
#    Deletions of records that are in the queue.  
#       -   This module attempts to support this by maintaining a delete_queue but
#           currently only works for very specific models.  TODO: generalize this!
#
# Usage: 
#      - include in your AR class that you want to be able to queue inserts up on
#     class MyArClass < ActiveRecord::Base
#        include BulkInsertable
#     end
# 
#     - start queueing
#     MyArClass.start_queue #from this point on, all inserts done via this ActiveRecord model will be queued up
#    
#     - when ready, do the bulk insert
#     MyArClass.flush_queue
#
#     - utility methods
#     MyArClass.is_queued             # boolean whether this class is queueing inserts
#     MyArClass.queue                 # returns instance of queue(array)
#     MyArClass.delete_queue          # returns instance of the delete queue
#     MyArClass.delete_all_items_that_match_condition(sql_conditions, &block) 
#                                     # This searches through the insert queue to find
#                                       and delete records/objects that match the conditions
#                                       passed in
#
###############################################################################
module BulkInsertable
  #this is called when the module is loaded
  #(hopefully after the klass has loaded)
  def BulkInsertable.included(klass)

    #open static class context
    #this is the only way to define static class methods
    #when they come in through a module
    klass.module_eval{
      @@is_queued = false
      @@queue = []
      @@delete_queue = []
      def self.is_queued
        @@is_queued 
      end

      def self.queue
        @@queue 
      end

      def self.delete_queue
        @@delete_queue
      end

      #we may have created slot_sets and put them in the queue
      #but now need to delete them
      #so check queue as well as database
      def self.delete_all_items_that_match_condition(sql_conditions, &block)
        raise BbFatalError, "Must pass block to this method" unless block_given?
        
	#delete items in the queue via the block passed in
        LOGGER.info {"Queue size when trying to delete ssr's: "+@@queue.length.to_s}
	@@queue.delete_if{|item| yield item }
        LOGGER.info {"Deleting ssr's from db based upon: "+sql_conditions.to_s}
        self.delete_all(sql_conditions) if sql_conditions
      end

      #start queue-ing up save requests
      #this is a static method on the including class
      #Eg SlotSetRecord.start_queue
      def self.start_queue

        #when starting the queue-ing
        #need to dynamically replace the before_create method
        #but save the original method when we flush the queue
        #this module eval puts us into an "instance context"
        #so we have direct access to class variables
        #and instance methods
        module_eval{

          #save existing before_create callback in the static class
          @@before_create_method = self.instance_method(:before_create)
          @@before_destroy_method = self.instance_method(:before_destroy)

          #initialize queue
          @@queue = []

          #keep a queue for deletes
          @@delete_queue = []

          @@is_queued = true

          #get in the way of all saves
          def before_create

            self.clear_duplicate_keys
            
            #add obj to queue
            @@queue << self

            #add error on the object in case anyone wants to handle this case
            self.errors.add(:is_queued, "This object's saved is being deleted so that it is grouped with other objects")

            #stop the rest of the callbacks
            return false

          end#end before save

          def before_destroy
            @@delete_queue << self
            self.errors.add(:is_queued, "This object's before destroy is being suspended'")
            return false
          end

        }#end module eval
      end#end start queue

      #Flush the queue of saved objects
      def self.flush_queue(start_id)
        bucket_size = 10
        
        #hang on to database handle
        link = ActiveRecord::Base.connection

        #process delete queue
        del_hdr_sql_stream = StringIO.new("", "w+")
        del_hdr_sql_stream << "DELETE FROM slot_set_records WHERE ID IN ("
        bucket_ctr = 0
        del_val_sql_stream = StringIO.new("", "w+")
        del_obj_ctr = 1
        @@delete_queue.each do |obj|
          bucket_ctr += 1
          del_val_sql_stream << "#{obj.id}"
          del_val_sql_stream << "," unless del_obj_ctr == @@delete_queue.size or bucket_ctr == bucket_size
          del_obj_ctr += 1

          # perform the deletion if we've filled up our bucket
          if bucket_ctr == bucket_size
            del_val_sql_stream << ")"
            link.execute(del_hdr_sql_stream.string + del_val_sql_stream.string) unless @@delete_queue.empty?
            del_val_sql_stream = StringIO.new("", "w+")
            bucket_ctr = 0
          end
        end
        del_val_sql_stream << ")"

        # if we left the queue iterator and bucket_countr is non-zero, we still have leftover values to delete...
        if bucket_ctr > 0
          link.execute(del_hdr_sql_stream.string + del_val_sql_stream.string) unless @@delete_queue.empty?
        end

        @@delete_queue.clear

        #process insert queue
        row_counter = start_id

        #redefine the before save method by putting
        #it back to what it was before the queueing
        define_method(:before_create, @@before_create_method)
        define_method(:before_destroy, @@before_destroy_method)

        #now group save the queue'd up objects
        hdr_sql_stream = StringIO.new("", "w+")
        exclude_columns = [:id, :created_at, :updated_at]
SlotSetRecord.benchmark("Bench: start sql") do
        hdr_sql_stream << "INSERT INTO #{self.table_name} (id,"

        num_cols = column_names.size - exclude_columns.size
        num_col_countr = 1
        self.column_names.each do |col_name|
          next if exclude_columns.include?(col_name.to_sym)
          hdr_sql_stream << col_name
          hdr_sql_stream << "," unless num_col_countr == num_cols
          num_col_countr += 1
        end
        hdr_sql_stream << ")"#closes the column list
end#end benchmark

        num_objs = @@queue.size
        num_obj_countr = 1
        val_sql_stream = StringIO.new("", "w+")
        val_sql_stream << " VALUES "
SlotSetRecord.benchmark("Bench: flushing queue - row tabulation") do
        bucket_countr = 0
        @@queue.each do |obj|
          bucket_countr += 1
          val_sql_stream << "(#{row_counter},"
          row_counter += 1
          num_cols = column_names.size - exclude_columns.size
          num_cols_countr = 1
          self.columns.each do |col|
            next if exclude_columns.include?(col.name.to_sym)

            #val = obj.attributes[col.name].to_s
            #val = obj.send(col.name).to_s
            val = obj[col.name].to_s
            quote = (col.number? or col.type==:boolean) ? false : true

            val_sql_stream << "'" if quote
            val_sql_stream << val
            val_sql_stream << "'" if quote
            val_sql_stream << "," if (!val.empty? && num_cols_countr != num_cols)
            num_cols_countr += 1
          end
          val_sql_stream << ")"
          val_sql_stream << "," unless num_obj_countr == num_objs or bucket_countr == bucket_size
          num_obj_countr += 1
          #obj.save

          #perform the insert if we've filled up our bucket
          if bucket_countr == bucket_size
#            SlotSetRecord.transaction do
                
                link.execute(hdr_sql_stream.string + val_sql_stream.string) unless @@queue.empty?
                # set up for next bucket...
                val_sql_stream = StringIO.new("", "w+")
                val_sql_stream << " VALUES "
                bucket_countr = 0
#            end
          end
        end

        # if we left the queue iterator and bucket_countr is non-zero, we still have leftover values to insert...
        if bucket_countr > 0
          #perform the insert
#          SlotSetRecord.transaction do
          link.execute(hdr_sql_stream.string + val_sql_stream.string) unless @@queue.empty?
#          end
        end
        
end#end benchmark

        #reset the queue
        @@queue.clear
        @@is_queued = false
      end#end flush_queue

    }#end module eval


  end#end included
end#end BulkInsertable module
