class Fragment < ActiveRecord::Base
  belongs_to :institution
  belongs_to :fragment_type  
  has_many :concepts, :order => 'name', :dependent => :delete_all  

  attr_accessor :file

  named_scope :cores, :conditions => "name LIKE '%core%'", :order => "name"    

  def Fragment.create_from_csv_file(fragment, path)
    if File.exists?(path) && File.readable?(path)      
      concepts_sql = ""
      attributes_sql = ""

      attribute_names = Array.new

      t1 = Time.now
      
      max_id = Concept.max_id

      # If we count the number of rows in the database, then we can programmatically
      # determine the concept ids.
      # NOTE: This is not configured to be run concurrently!
      # 
      # count number of entries to see how many concepts we're going to insert
      # don't just count lines because the csv file may contain strings that span multiple lines
      row_count = 0
      FasterCSV.foreach path do |row|
        row_count += 1
      end

      # subtract 1 from row_count becuase first row is header row
      row_count -= 1
      concept_id = max_id + row_count

      count = 0      
      FasterCSV.foreach path do |row|
        # first row is attribute names
        if count == 0
          attribute_names = row[2..-1]
        else
          if concepts_sql.blank? || attributes_sql.blank?
            concepts_sql = "insert into #{Concept.table_name}(id, fragment_id, local_key, name, created_at, updated_at) values "
            attributes_sql = "insert into #{Attribute.table_name}(concept_id, name, `value`, created_at, updated_at) values "
          end
                    
          concepts_sql << sanitize_sql_array(["(%s, %s, '%s', '%s', utc_timestamp(), utc_timestamp()),", concept_id, fragment.id, (row[0].strip unless row[0].nil?), (row[1].strip unless row[1].nil?)])

          attr_index = 0
          for col in row[2..-1] do
            # NOTE: getting rid of the select statment here looks like it can cut the total time to import in half            
            attributes_sql << sanitize_sql_array(["(%s, '%s', '%s', utc_timestamp(), utc_timestamp()),", concept_id, attribute_names[attr_index], col])
            attr_index += 1
          end

          concept_id -= 1

          # do the insert on every 100th row
          if count % 100 == 0
            # remove tailing ',' chars
            concepts_sql.chop!
            attributes_sql.chop!            
            
            Fragment.execute_statements(concepts_sql, attributes_sql)

            concepts_sql = ""
            attributes_sql = ""
          end
        end

        count += 1
#        break if count >= 1001
      end

      unless concepts_sql.blank? && attributes_sql.blank?
        concepts_sql.chop!
        attributes_sql.chop!
        Fragment.execute_statements(concepts_sql, attributes_sql)
      end

      t2 = Time.now

      puts "Time to read file: " + Fragment.duration(t1, t2)
    end
  end

  Suffix = "..."

  def Fragment.create_from_csv_string(fragment, str)
    unless str.blank?            
      concepts_sql = ""
      attributes_sql = ""

      attribute_names = Array.new

      t1 = Time.now
      
      max_id = Concept.max_id

      # If we count the number of rows in the database, then we can programmatically
      # determine the concept ids.
      # NOTE: This is not configured to be run concurrently!
      # 
      # count number of entries to see how many concepts we're going to insert
      # don't just count lines because the csv file may contain strings that span multiple lines
      row_count = 0
      FasterCSV.parse str do |row|
        row_count += 1
      end

      # subtract 1 from row_count becuase first row is header row
      row_count -= 1
      concept_id = max_id + row_count

      count = 0      
      FasterCSV.parse str do |row|
        # first row is attribute names
        if count == 0
          attribute_names = row[2..-1]
        else
#          if concepts_sql.blank? || attributes_sql.blank?
#            concepts_sql = "insert into #{Concept.table_name}(id, fragment_id, local_key, name, created_at, updated_at) values "
#            attributes_sql = "insert into #{Attribute.table_name}(concept_id, name, `value`, created_at, updated_at) values "
#          end
                    
          concepts_sql << sanitize(["(%s, %s, '%s', '%s', utc_timestamp(), utc_timestamp()),", concept_id, fragment.id, row[0], row[1]])

          attr_index = 0
          for col in row[2..-1] do
            # column attributes.value is varchar(1024), so truncate data and add Suffix to indicate truncation
            if !col.blank? && col.length > 1024
              col = col[0..1020] << Suffix
            end
            attributes_sql << sanitize(["(%s, '%s', '%s', utc_timestamp(), utc_timestamp()),", concept_id, attribute_names[attr_index], col])
            attr_index += 1
          end

          concept_id -= 1

          # do the insert on every 100th row
          if count % 100 == 0
            Fragment.insert_concepts concepts_sql, attributes_sql

            concepts_sql = ""
            attributes_sql = ""
          end
        end

        count += 1
#        break if count >= 1001
      end

      unless concepts_sql.blank? && attributes_sql.blank?
        Fragment.insert_concepts concepts_sql, attributes_sql
      end

      t2 = Time.now

      puts "Time to read file: " + Fragment.duration(t1, t2)
    end
  end

  INSERT_MAP_COLS = "INSERT INTO #{Map.table_name}(from_fragment_id, from_local_key, to_fragment_id, to_local_key, user_id, mapping_type_id, comments, is_imported, updated_at, created_at) VALUES"

  # imports mappings from this fragment.  input is a csv string representation of the mappings
  def import_mappings_from_csv map_to_fragment_id, str, user
    unless map_to_fragment_id.nil? || str.blank? || user.nil?      
      to_fragment = Fragment.find map_to_fragment_id
      default_mapping_type = MappingType.find_by_name "synonym"
      count = 0
      values = Array.new
      updates = Array.new
      FasterCSV.parse str do |row|
        from_local_key = row[0]
        to_local_key = row[1]
        mapping_type_id = row[2] || default_mapping_type.id
        comments = row[3]

        # Look up the mapping for these concepts with the specified mapping type
        # existing_map = existing_maps.select {|m| m.from_fragment_id == self.id && m.from_local_key == from_local_key && m.to_fragment_id == map_to_fragment_id.to_i && m.to_local_key == to_local_key && m.mapping_type_id == mapping_type_id.to_i}.first
        existing_map = Map.find :first, :conditions => {:from_fragment_id => self.id, :from_local_key => from_local_key, :to_fragment_id => map_to_fragment_id.to_i, :to_local_key => to_local_key, :mapping_type_id => mapping_type_id.to_i}
        
        # If the mapping exists and the comments for the new mapping aren't blank, then update the existing mapping with the new comments
        # If the mapping exists and the comments for the new mapping are blank, then do nothing
        # If the mapping doesn't exist, then insert it
        if existing_map
          unless comments.blank?
            updates << build_update_statment(existing_map, comments)
          end
        else
          comments ||= "[IMPORTED_MAPPING]"
          values << Fragment.sanitize(["(%s, '%s', %s, '%s', %s, %s, '%s', true, utc_timestamp(), utc_timestamp())", self.id, from_local_key, to_fragment.id, to_local_key, user.id, mapping_type_id, comments])
        end

        # after every 100 rows, execute the inserts and updates
        if (count += 1) >= 100
          conn = Fragment.connection

          # insert the new mappings
          conn.execute(INSERT_MAP_COLS + values.join(", ")) unless values.empty?
          values = Array.new

          Fragment.transaction do
            # update the existing mappings
            updates.each do |u|
              conn.update u
            end
          end
          updates = Array.new

          count = 0
        end
      end

      # if there are any remaining inserts or updates, make sure they are saved
      if values.size > 0 || updates.size > 0
        conn = Fragment.connection
        conn.execute(INSERT_MAP_COLS + values.join(", ")) unless values.empty?
        updates.each do |u|
          conn.update u
        end
      end

    end
  end

  def Fragment.insert_concepts(concepts_sql, attributes_sql)
    # remove tailing ',' chars
    concepts_sql.chop!
    attributes_sql.chop!

    unless concepts_sql.blank?
      concepts_sql = "insert into #{Concept.table_name}(id, fragment_id, local_key, name, created_at, updated_at) values " + concepts_sql
    end

    unless attributes_sql.blank?
      attributes_sql = "insert into #{Attribute.table_name}(concept_id, name, `value`, created_at, updated_at) values "  + attributes_sql
    end

    Fragment.execute_statements(concepts_sql, attributes_sql)
  end

  private  
  def Fragment.execute_statements(*stmts)
    begin
      Fragment.transaction do
        for sql in stmts do
          unless sql.blank?
            Fragment.connection.execute sql
          end
        end
      end            
    rescue
      logger.error "There was a problem trying to execute the sql...\n#{$!}"
    end    
  end

  def build_update_statment existing_map, additional_comments = ""
    comments = existing_map.comments
    comments += ", " + additional_comments if additional_comments.length > 0
    Fragment.sanitize ["UPDATE #{Map.table_name} SET comments = '%s', updated_at = utc_timestamp() WHERE from_fragment_id = %s AND from_local_key = '%s' AND to_fragment_id = %s AND to_local_key = '%s' AND mapping_type_id = %s", comments, existing_map.from_fragment_id, existing_map.from_local_key, existing_map.to_fragment_id, existing_map.to_local_key, existing_map.mapping_type_id]
  end
  
  def Fragment.duration(t1, t2)
    diff = t2.to_f - t1.to_f
    return "#{diff.to_i/60} minutes #{diff%60} seconds"
  end

  # why I can't just call ActiveRecord::Base.sanitize_sql_array from an instance method is beyond me, has to be done in a class method
  def Fragment.sanitize(ary)
    sanitize_sql_array(ary)
  end
  
end
