  require 'yaml'
  require 'rexml/document'
  
  #These methods will be used in the extraction phase.
  #This method prepopulates the config templates for both the extract and loading process based on the parameter sent.

 
  def write_extract_options_to_a_file(process_option)
        if process_option == "extracting" 
              config_file = File.open("extract_config.csv","w")
        elsif process_option == "loading"
              config_file = File.open("load_config.csv","w")
              config_file.write("csv_files,")
              config_file.write("\n")
        end	
        config_file.close
  end

  #~ def extract_column_to_xml
      #~ extract_tables(all_table_names,new_connection)	
  #~ end


  #~ def extract_column_to_yml
      #~ #user_metadata=connection.query("desc users;")
      #~ user_data=connection.query("select * from users;")
      #~ open('user_data','w'){|f| YAML.dump(user_data,f)}
      #~ #open('user_metadata','w'){|f| YAML.dump(user_metadata,f)}
  #~ end


  #~ def extract_column_to_fixed_width
  #~ end


  #~ def extract_to_column_to_spread_sheet
  #~ end  


  def extract_column(column_array,table_name,connection,status)
        generate_header_file(table_name,connection,column_array)
        file_name = "#{table_name}_selected_#{status}_columns.csv"
        file_handler = File.new("#{file_name}","w")
        for column in column_array
              column_data = connection.select_all("select #{column} from #{table_name}")
              file_handler.write("\"")
              file_handler.write(column_data.join("\",\""))
              file_handler.write("\"")
              file_handler.write("\n")
        end   
        file_handler.close    
        return file_name
  end 


 #This method reads the config file and extracts all the options. These options include the source database and other connection parameter. 
 #This method will be merged with extract_connection_options generic method.
  def read_extract_file
        extract_options = String.new
        FasterCSV.foreach("config/extract_config.csv") do |row|
              for field_counter in 0..row.length
                    if field_counter == 0  
                          extract_options = extract_options+"#{row[field_counter]}"
                    else  
                          extract_options = extract_options+",#{row[field_counter]}"	
                    end	
              end
        end	
        return extract_options	
  end




  #This method is the central clearing house of the extraction process. This one calls both the connection and extraction methods.
  def get_table_data_to_csv
        new_connection = create_new_connection#(connection_options)
        all_table_names = new_connection.query('show tables')
        @start_time_database = Time.new
        extract_tables(all_table_names,new_connection)
        @end_time_database = Time.new
  end	



  def extract_tables_to_spread_sheet(all_tables,connection)
        
  end


  def extract_tables_to_fixed_width(all_tables,connection)
      extract_tables(all_tables,connection)
      all_tables.each{|table_name|
          fixed_width_hdlr = File.new("#{table_name}.txt","w")
          FasterCSV.foreach("#{table_name}.csv")do|row|
              row.each{|field|
                  field = "" if field.nil?
                  fixed_width_hdlr.write("\"#{field}\""+"    ")
                  
              }
              fixed_width_hdlr.write("\n")
          end  
          fixed_width_hdlr.close  
      }      
  end  


  # While extracting data to XML please note that it is the slowest of all the files. 
  def extract_tables_to_xml(all_tables,connection)
      extract_tables(all_tables,connection)
      all_tables.each{|table_name|
          xml_hdlr = File.new("#{table_name}.xml","w")
          xml_hdlr.write("<#{table_name}>")
          xml_hdlr.write("\n")
          all_columns = get_column_list(connection,table_name)
          FasterCSV.foreach("#{table_name}.csv")do|row|
                xml_hdlr.write("    <table_record>")
                row.each_index{|field_index|
                    col_specs = get_the_col_spec_for_single_col(all_columns[field_index],table_name,connection)
                    col_size = ""
                    col_datatype = col_specs.split("(")[0]
                    if !col_specs.split("(")[1].nil?
                        col_size = col_specs.split("(")[1].split(")")[0]
                    end    
                    xml_hdlr.write("\n")
                    xml_hdlr.write("      <#{all_columns[field_index]} datatype=\"#{col_datatype}\" size=\"#{col_size}\">")
                    xml_hdlr.write("\n")
                    xml_hdlr.write("            #{row[field_index]}")
                    xml_hdlr.write("\n")
                    xml_hdlr.write("      </#{all_columns[field_index]}>")
                    xml_hdlr.write("\n")
                } 
                xml_hdlr.write("    </table_record>")
                xml_hdlr.write("\n")
          end    
          xml_hdlr.write("</#{table_name}>")    
          xml_hdlr.close
          if !valid_xml?("#{table_name}.xml").nil?
              puts "#{table_name} was succesfully extracted to XML"
          else    
              puts "#{table_name} could not be extracted to XML"
          end    
      }
  end




  def extract_tables_to_yml(all_tables,connection)
      all_tables.each{|table_name|
          user_data=connection.execute("select * from #{table_name};")
          open("#{table_name}_data.yml",'w'){|f| YAML.dump(user_data,f)}
          puts "#{table_name} was succesfully extracted to yml files."
      }    
  end


  def extract_tables(all_table_names,new_connection)	
        table_counter = 0 
        all_table_names.each do |table_name|
              create_structure_csv_file(table_name,new_connection)
              generate_header_file(table_name,new_connection)
              @start_time = Time.now 
              file_handler = File.new("#{table_name}.csv",'w')
              @data_from_each_row = new_connection.execute("select * from #{table_name}")
              @data_from_each_row.each do |data_record|
                    for single_field_value in 0..data_record.length-1
                          if data_record[single_field_value] == nil
                                data_record[single_field_value] = ' '
                          end	
                          if single_field_value != data_record.length-1
                                file_handler.write("\"#{data_record[single_field_value]}\""+",")
                          else	
                                file_handler.write("\"#{data_record[single_field_value]}\"")
                          end
                    end
                    file_handler.write("\n")
              end
            @end_time = Time.now
            generate_extraction_info_file_table_level(new_connection,"#{table_name}", @start_time,@end_time)
            table_counter = table_counter + 1
            file_handler.close
        end
 end	


 #This method generates a different statistics file for each table.Currently they are including the time stats. Much advanced features like Benchmarking a table for speed 
 #of extraction are in the pipeline right now. This also shows extraction time per record.
 def generate_extraction_info_file_table_level(connection,names,start_time,end_time)		
        row_counter = 0
        time_required_for_this_table = end_time - start_time
        total_number_of_rows = connection.execute("select * from #{names}")
        total_number_of_rows.each do |row|
              row_counter = row_counter + 1
        end
        file_handler = File.new("extraction statistics #{names}.txt",'w')
        file_handler.write('The table name is ')
        file_handler.write(names)
        file_handler.write("\n")
        file_handler.write('The extraction time required for this table ')
        file_handler.write(time_required_for_this_table)
        file_handler.write("\n")
        file_handler.write('The total number of records extracted ')
        file_handler.write(row_counter)
        file_handler.write("\n")
        file_handler.close
 end	


 #This method generates a single statistics file for the entire database. This includes number of tables extracted and time taken for it.	
 def generate_extraction_info_file_database_level(database,total_tables,total_time_for_extraction)
        file_handler = File.new("extraction statistics #{database}.txt",'w')
        file_handler.write('Total number of tables extracted ')
        file_handler.write(total_tables)
        file_handler.write("\n")
        file_handler.write('The total time required ')
        file_handler.write(total_time_for_extraction)
        file_handler.write("\n")
        file_handler.close
 end	
						