# This file is the implementation of the loading DSL.
#  There are two methods related to 
#   1.  CSV to database load
#   2. database to database load.

class Loading
      # Constraint class is for filtering the columns.
      class Constraint
          def self.method_missing(method_name,options={})
              method_name_array = method_name.to_s.split("_")
              constraint_name = method_name_array[1..(method_name_array.length-2)].join("_")
              options.each{|key,value|
                  deatination_connection = key
                  value.each{|inner_key,inner_value|
                      source_connection = inner_key
                      all_tables = source_connection.tables
                      required_tables = get_required_columns(all_tables,inner_value,"",source_connection)
                      required_tables.each{|table_name|
                          columns_list = get_columns_by_constraint(constraint_name,table_name,source_connection)
                          if !columns_list.empty?
                              execute_partial_column_loading(columns_list,table_name,source_connection,destination_connection)
                          else
                              puts "There is no column under #{table_name} with #{constraint_name} constraint"
                          end    
                      }
                  }    
              }    
          end           
          
          # Based on the database type connected to; this method will return the actual constraint name.
          # For example; primary_key will transfer into "PRI"
          def get_constraint_name(constraint_name) 
              case constraint_name
                  when "not_null"
                      return "NOT NULL"
                  when "primary_key"
                      return "PRI"
                  when "unique"
                      return "UNIQUE"
                  else
                      return "auto_increment"    
                end
            end 
      end

  
      def self.method_missing(method_name,options={})
          parent_connection = options[:destination_database]
          options.each{|key,value|
              if !["destination_connection","destination_database"].include? key.to_s.downcase
                  file_type = key.to_s
                  target_connections = value[:connection]
                  folder_paths = value[:folders]
                  folder_paths = [folder_paths] if !folder_paths.respond_to? "split"
                  file_list = value[:files]
                  file_names = []
                  if !file_list.nil?
                      file_list.each{|single_file|
                          file_names << [single_file,".#{single_file.split(".").last}","from_file"]
                      }
                  else
                      file_names = ["",""]
                  end    
                  table_hash={:from_file=>true,:table_names=>folder_paths}        
                  if !target_connections.nil? && !target_connections.empty?     
                      target_connections.each{|destination_connection|
                          Loading.load_to_database(destination_connection,'',file_names,".#{key.to_s.downcase}",table_hash)
                      }      
                  else
                      Loading.load_to_database(parent_connection,'',file_names,".#{key.to_s.downcase}",table_hash)
                  end  
              end  
          }  
      end  
  
      # This method is for loading tables from multiple databases to multiple databases.
      # While loading from source_database (which would usually be staging area) tables can be selected. 
      def self.load_multiple_databases(options={})
          options.each{|key,value|
              destination_connection = key
              value.each{|inner_key,inner_value|
                  source_connection = inner_key
                  all_tables = source_connection.tables
                  required_table_names = get_required_columns(all_tables,inner_value,"",source_connection)
                  table_hash={:table_names=>required_table_names}    
                  Loading.db_to_db_load(source_connection,destination_connection,table_hash)
              }    
          }
      end  


      # This method is for loading selected columns of table/s
      def self.load_selected_columns(options={})
          options.each{|key,value|
              key.each{|inner_key,inner_value|
                  column_list = inner_value - [inner_value[0]]
                  table_name = inner_key.to_s
                  source_connection = inner_value[0]
                  execute_partial_column_loading(column_list,table_name,source_connection,value,false)
              }
          }
      end
  
  
  
      def self.load load_string
          $load_string = load_string  
          loading_in_progress = Loading.new
          return loading_in_progress
      end	
	

  
      def from source_connection
          all_tables = source_connection.tables
          if $load_string.respond_to?"join"
              if $load_string[0].respond_to? "abs"
                  $all_tables = get_columns_from_numbers($load_string,all_tables)
              elsif $load_string[0].respond_to? "split"	
                  $all_tables = get_columns_from_positions($load_string,all_tables)
              end	
          else	
              $all_tables = get_columns_from_positions($load_string,all_tables)
          end
          $source_connection = source_connection
          loading_in_progress_two = Loading.new
          return loading_in_progress_two
      end	
	
  
  
      def to destination_connection
          table_hash = {:table_names=>$all_tables}
          Loading.db_to_db_load($source_connection,destination_connection,table_hash)
      end
	
	
      # Here the two connections are the source and destination databases.Here source_connection is optional.
      # The third argument i.e. the block, is file list for loading.
      def self.load_to_database(destination_connection,source_connection,direct_files_list,extension_req,table_hash)
          file_names = []
          if table_hash[:from_file]
              table_hash[:table_names].each{|table_name|
                  if !table_name.nil? && !table_name.empty?
                      for single_table in table_name
                          file_names = file_names + Loading.all_files_from(single_table) 
                      end    
                  end      
              }
          elsif table_hash[:from_table]
              table_hash[:table_names].each{|table_name|
                  file_names << [(table_name+".csv"),".csv"]
              }    
          else
              file_names = Loading.get_all_file_names(source_connection,file_names)
          end
          file_names = file_names + direct_files_list if direct_files_list.first != ""        
          file_names = file_names.uniq
          for file_name in file_names
              if file_name[0].to_s.scan("header").size != 0 
                  puts "#{file_name[0]} is likely to be a header file."
              elsif file_name[0].to_s.scan("structure").size != 0 	
                  puts "#{file_name[0]} is likely to be a structure file."
              elsif file_name[0].to_s.scan("statistics").size != 0 	
                  puts "#{file_name[0]} is likely to be a structure file."
              else
                  if file_name[1] == ".xml"
                      if (extension_req.nil?) || (!extension_req.nil? && extension_req == ".xml")
                          only_file_name = file_name[0].to_s.split(/\\/).last
                          convert_xml_to_csv(file_name[0])
                          full_file_path = file_name[0].to_s.gsub("#{only_file_name}","")
                          execute_loading(full_file_path,only_file_name,file_name[1].to_s,destination_connection)
                      end    
                  elsif file_name[1] == ".txt"
                      if (extension_req.nil?) || (!extension_req.nil? && extension_req == ".fixed_width")
                          only_file_name = file_name[0].to_s.split(/\\/).last
                          full_file_path = file_name[0].to_s.gsub("#{only_file_name}","")
                          convert_fixed_width_to_csv(file_name[0])
                          execute_loading(full_file_path,only_file_name,file_name[1].to_s,destination_connection)
                      end
                  elsif file_name[1] == ".csv"
                      if (extension_req.nil?) || (!extension_req.nil? && extension_req == ".csv")
                          only_file_name = file_name[0].to_s.split(/\\/).last
                          full_file_path = file_name[0].to_s.gsub("#{only_file_name}","")
                          execute_loading(full_file_path,only_file_name,file_name[1].to_s,destination_connection)
                      end    
                  end 
              end	  
          end	
      end	
	
  
      # Based on the table_names under database connected to source_connection, file names will be generated,
      def self.get_all_file_names(source_connection,file_name_array)
          file_names = []
          if file_name_array == :all
              all_table_names = source_connection.tables
              for table_name in all_table_names
                  file_names << table_name.to_s+".csv"
              end	
          else	
              for file_name in file_name_array
                  file_names << file_name.to_s+".csv"
              end
          end	
          return file_names
      end	
	
		
    
      # This method is for the db_to_db loading.
      # db to db loading is possible only if the destination and the source are different.
      def self.db_to_db_load(source_connection,destination_connection,table_hash)
          if source_connection != destination_connection
              extract_tables(table_hash[:table_names],source_connection)	
              table_hash[:from_table] = true
              Loading.load_to_database(destination_connection,source_connection,[""],".csv",table_hash)
          end	
      end	


      # This method extracts all the files from the given directory.
      # For this pathname library is being used.
      def self.all_files_from(pathname)
          new_pathname = Pathname.new(pathname.to_s)	
          all_files = new_pathname.children 
          file_array = []
          for file in all_files
              file_array << [file,file.extname.to_s,"from_folder"]if [".csv",".xml",".txt"].include? file.extname.to_s 
          end
          $from_folder = true    
          return file_array
      end	

end	