  def normalize_first_form	
    root_table_array = get_root_tables("first_normalize_config.csv")
    new_connection = create_new_connection
    for root_table in root_table_array
      primary_key = get_primary_key(root_table)
      handle_primary_key(primary_key,root_table,new_connection)
      create_structure_csv_file(root_table,new_connection)
      FasterCSV.foreach("first_normalize_config.csv") do |row|
        if row.first == "child tables" 
          for i in 3..row.length-1
            if row[1] == root_table
              if row[i] != nil
                row[i] = row[i].gsub(' ','_')
                column_struc = get_the_col_spec_for_single_col(row[i],root_table,new_connection)
                if i == 3
                  if [primary_key,primary_key.upcase,primary_key.downcase,primary_key.capitalize].include? row[i]  
                    new_connection.query("CREATE TABLE #{row[2]}(#{row[i]} #{column_struc})")
                  else
                    new_connection.query("CREATE TABLE #{row[2]}(#{row[i]} #{column_struc})")		
                  end	
                else
                  if [primary_key,primary_key.upcase,primary_key.downcase,primary_key.capitalize].include? row[i]
                    new_connection.query("ALTER TABLE #{row[2]} ADD #{row[i]} #{column_struc}")
                  else
                    new_connection.query("ALTER TABLE #{row[2]} ADD #{row[i]} #{column_struc}")
                  end	
                end	
              end	
            end	
          end
        end
      end	
    end
  end	


  # Column data splitting is being executed in four steps.
  # This method will take the block having key:value pairs in it
  # It will extract required options, sets up the "splitting point"
  # call to execute_splitting will call the second method which will split the column values.
  def split_column_data(options={})
    splitted_at = []
    options.each{|key,value|
      connection = key
      value.each{|inner_key,inner_value|
        target_tables = inner_key
        required_tables = get_required_columns(connection.tables,target_tables,"",connection)
        inner_value.each{|inner_sec_key,inner_sec_value|  
          column_name = inner_sec_key
          global_splitting_point = inner_sec_value
          for single_table in required_tables
            execute_splitting(connection,single_table,column_name,global_splitting_point)
            end
        }
      }
    }    
  end    



  # This method will split the actual column data based on the global_splitting_point.
  # At the end of the method, the count of values created by splitting a single value is same.
  # The nil values are replaced by " ".
  def execute_splitting(connection,single_table,column_name,global_splitting_point)
    uniform_column_value_array = []
    all_columns = get_column_list(connection,single_table)
    all_column_data = connection.execute("select #{column_name} from #{single_table}")
    all_column_data.each{|column_value|
      column_value_array = column_value[0].split("#{global_splitting_point}") if !column_value.nil?
      uniform_column_value_array << column_value_array
    }
    uniform_column_value_array = make_array_uniform(uniform_column_value_array) 
    generate_new_columns_with_data(uniform_column_value_array,single_table,connection,column_name,global_splitting_point)  
  end



  # This method will make the values array uniform.
  # e.g. make_array_uniform([[1,5,6],[1,3],[5,3,2,4]]) will result in
  # [[1,5,6," "],[1,3," "," "],[5,3,2,4]]
  def make_array_uniform(col_value_array)
    length_array = []
    for single_array in col_value_array
      length_array << single_array.length  
    end  
    max_length = length_array.sort.last
    for single_array in col_value_array
      for length_counter in 0..max_length-1
          single_array[length_counter] = " " if single_array[length_counter] == nil
        end
    end
    return col_value_array  
  end  



  # This method will actually split the table's columns and will insert the values from uniform_column_value_array.
  def generate_new_columns_with_data(uniform_column_value_array,single_table,connection,column_name,global_splitting_point)
    columns_created = false
    new_column_array = []
    for single_array in uniform_column_value_array  
      if !columns_created
        for length_counter in 0..(single_array.length-1)  
          columns_created = true
          new_column_name = Normalize::Splitter.generate_missing_column_name("#{column_name}",single_table,connection)
          connection.execute("ALTER TABLE #{single_table} ADD #{new_column_name} VARCHAR(255)")
          new_column_array << new_column_name
        end    
      end
      column_value = single_array.join("#{global_splitting_point}")
      for i in 0..((single_array.length)-1)
        connection.execute("UPDATE #{single_table} SET #{new_column_array[i]} = '#{single_array[i]}' WHERE #{column_name} = '#{column_value}'")
      end
    end
    puts "Finished splitting #{column_name.downcase} from #{single_table}"  
  end  



  # This is the first of the two step normalization transform of multiple tables.
  # This method extracts all the required optiosn from the block passed to it,
  #  and then passes them to the second method to actually generate child tables.
  #  parent_primary_key will be used as the primary key of the table when the table iteslf has none.
  def normalize(&b)
    normalize_hash = b.call
    global_primary_key = normalize_hash[:parent_primary_key]
    normalize_hash.each{|key,value|
      if value != global_primary_key
        source_connection =  key
        value.each{|inner_key,inner_value|
          source_table_list = get_required_columns(source_connection.tables,inner_key,"",source_connection)
          for single_source_table in source_table_list.uniq
            inner_value.each{|sec_inner_key,sec_inner_value|
              destination_connection=sec_inner_key
              generate_child_table_through_normalization(sec_inner_value,destination_connection,single_source_table,source_connection,global_primary_key)  
            }  
          end    
        }
      end    
    }
  end  
 
 
 
  # This is the second method of "normalize" tranforms,
  # Based on the options passed by the normalize method, this will generate the child tables and set up their primary keys.
  def generate_child_table_through_normalization(sec_inner_value,destination_connection,source_table_name,source_connection,primary_key)  
    p [primary_key,source_table_name]
    sec_inner_value.each{|key,value|
      all_columns = get_column_list(source_connection,source_table_name)
      required_columns = get_required_columns(all_columns,value,"",source_connection)
      for single_child_table in key
        single_child_table = 
        Normalize.normalize(destination_connection,single_child_table,required_columns,source_connection,source_table_name,primary_key)
      end
    }
    puts "#{source_table_name} is successfully normalized."
  end  
 
 
 
  # This method extracts the primary key from the normalization config file.
  def get_primary_key_from_file(tablename)
    FasterCSV.foreach("first_normalize_config.csv") do |row|
      if row.first == "primary key"
        for field_counter in 0..row.length
            return row[field_counter + 1] if row[field_counter] == tablename
        end	
      end	
    end	
  end



  def execute_data_transfer_post_normalization(destination_connection,child_table,source_connection,root_table,column_names,primary_key)
    column_names << primary_key if !column_names.include?(primary_key)
    column_names = column_names.join(",")
    if destination_connection == source_connection
      destination_connection.execute("INSERT INTO #{child_table} SELECT #{column_names} FROM #{root_table}")
    else
      # This is when both source and destination are lying in separate databases.
    end    
  end



  # This method will create the normalization config file template.
  def create_first_normalization_config_file
    first_norm_file = File.new("first_normalize_config.csv","w")
    first_norm_file.write("root table,")
    first_norm_file.write("\n")	
    first_norm_file.write("primary key,")
    first_norm_file.write("\n")
    first_norm_file.write("child tables,")
    first_norm_file.write("\n")
    first_norm_file.write("child tables,")
    first_norm_file.write("\n")
    first_norm_file.write("comments,")
    first_norm_file.write("for adding more child tables copy above two child table records and provide desired values.") 
    first_norm_file.close
  end	
