require 'rextral'
# active_profiler is used for profiling stage which is to be executed before ETL so as to avoid in-process surprises for ETL.
# active_profiler is divided in 4 main parts which are: column/table/database_level profiling, rules generator, rule validator.
  
  class ActiveProfiler
      # This is the first method of two-step dependency checking. 
      # This method will extract all the required options from the options hash 
      # and will pass it to the generate_dependecny_profile method which will actually generate the statistics.
      def self.check_dependency(options={})
          indepedent_columns = []
          parent_column_names = []
          child_column_names = []
          table_name = self.to_s.downcase
          connection = options[:connection] 
          column_array = get_column_list(connection,table_name)
          dependent = options[:dependency_of]
          dependee = options[:dependency_on]
          parent_columns = (get_required_columns(column_array,dependee," ",connection)).uniq
          child_columns = (get_required_columns(column_array,dependent," ",connection)).uniq
          parent_columns.each{|parent_column|
              parent_column_names << parent_column
          }        
          child_columns.each{|child_column|
              child_column_names << child_column
          }    
          child_column_names = child_column_names - parent_column_names  
          ActiveProfiler.generate_dependency_profile(parent_column_names,child_column_names,connection,table_name)
      end



      # This is the method which generates stats from options revceived from check_dependency method.
      # parent_column_names are the names of the columns ON which dependency is to be checked.
      # child_column_names are the names of the columns OF which dependency is to be checked.
      def self.generate_dependency_profile(parent_column_names,child_column_names,connection,table_name)
          indepedent_columns = []
          parent_column_names.each{|parent_column|
              parent_values = connection.execute("select #{parent_column} from #{table_name}")
              parent_values_array = []
              parent_values.each{|parent_value|
                  parent_values_array = parent_values_array + parent_value
              }
              child_column_names.each{|child_column|
                  child_values_array = []
                  child_values = connection.execute("select #{child_column} from #{table_name}")
                  child_values.each{|child_value|
                      child_values_array = child_values_array + child_value
                  }
                  for parent_val_counter in 0..(parent_values_array.length-1)
                      current_parent_value = parent_values_array[parent_val_counter]
                      current_child_value = child_values_array[parent_val_counter]
                      child_values_array.each_index{|value|
                          if child_values_array[value] == current_child_value
                              if value != parent_val_counter && parent_values_array[value] != current_parent_value
                                  indepedent_columns << [child_column,parent_column]
                                  break
                              end    
                          end  
                      }
                  end          
              }    
          }
          if !indepedent_columns.empty?
              puts "Under #{table_name}"
              indepedent_columns.uniq.each{|independent_pair|
                    puts "#{independent_pair[0]} column does not depend on #{independent_pair[1]}"
              }  
          else
                return 0
          end    
      end  



      # This method checks the primary key dependency of the non-key attributes of 'this' table.
      # connection is the database connection where 'this' table is present.
      def self.check_primary_key_dependency(connection)
          table_name = self.to_s.downcase
          Profile.generate_primary_key_dep_profile(table_name,connection)
      end  



      def self.method_missing(method_name,connection)
          method_array = method_name.to_s.split("_")
          table_name = self.to_s.downcase
          constraint_name = method_array[1..(method_array.length-2)].join("_")
          column_list = get_columns_by_constraint(constraint_name,table_name,connection)
          if column_list.empty?
              puts "There no columns in #{table_name} following #{constraint_name}"
          else  
              column_list.each{|column_name|
                  generate_column_profile(column_name,table_name,connection)
              }    
          end    
      end  



      # This method is for extracting options from the args_val. 
      # After extraction, it will pass columns one-by-one to the actual column profiling method.
      def self.profile_columns(args_val,connection)
            all_col_names = []
            all_columns = connection.execute("desc #{self.to_s.downcase}")  
            all_columns.each{|col_details|col_details.collect{|col_val| all_col_names << col_details[0]}}
            col_names = []
            all_col_names = all_col_names.uniq
            args_val.each{|value|
                  if value.respond_to? "split"
                        col_names = col_names + get_columns_from_positions(value,all_col_names)  
                  elsif value.respond_to? "abs"
                        col_names = col_names + get_columns_from_numbers([value],all_col_names)  
                  else      
                        col_names = col_names + [value.to_s]
                  end    
            }          
            col_names = col_names.uniq
            col_names.each{|column_name|
                Profile::Rule.profile_single_column(column_name,self.to_s.downcase,connection)    
            }
      end    
  end


      # This method will generate graphs based on the repeatation data profiling.
      # This method generates the pie chart for each table passed as key
      # The graph will show for each column of each key table 
      # The grouping is done on the basis of the if the data is repeated at all or not.
      def overlap_profile(&b)
          options = b.call
          options.each{|key,value|
              all_key_connection_table_names = []
              all_value_connection_table_names = []
              key_connection = key[0]
              value_connection = key[0]
              key_table_list = key - [key[0]]
              value_table_list = value - [value[0]]
              all_key_connection_tables = key_connection.execute("show tables")
              all_value_connection_tables = value_connection.execute("show tables")
              all_key_connection_tables.each{|table_name|
                    all_key_connection_table_names = all_key_connection_table_names + table_name
              }
              all_value_connection_tables.each{|table_name|
                    all_value_connection_table_names  = all_value_connection_table_names  + table_name
              }
              all_key_connection_tables = get_required_columns(all_key_connection_table_names,key_table_list,"",key_connection)
              all_value_connection_tables = get_required_columns(all_value_connection_table_names,value_table_list,"",value_connection)
              value_final_tables = all_value_connection_tables - all_key_connection_tables
              records_from_all_key_tables = []
              all_key_connection_tables.each{|table_name|
                    new_hdlr = File.new("#{table_name}.csv","w")  
                    all_records = key_connection.execute("select * from #{table_name}")                           
                    all_records.each{|single_record|
                          single_record_array = []
                          single_record.each{|single_field|
                                new_hdlr.write("\"#{single_field}\"")
                                if single_field != single_record.last
                                    new_hdlr.write(",")
                                end    
                          }
                    }
                    new_hdlr.close  
              }
              all_value_connection_tables.each{|table_name|
                    new_hdlr = File.new("#{table_name}.csv","w")  
                    all_records = value_connection.execute("select * from #{table_name}")                           
                    all_records.each{|single_record|
                        single_record_array = []
                        single_record.each{|single_field|
                            new_hdlr.write("\"#{single_field}\"")
                            if single_field != single_record.last
                                new_hdlr.write(",")
                            end    
                        }
                        new_hdlr.write("\n")
                    }
                    new_hdlr.close      
              }
              all_key_connection_tables.each{|key_table_name|
                  g_bar = Gruff::Bar.new
                  g_partial = Gruff::Pie.new(1000)
                  field_match_array = []
                  full_repeat_found = 0 
                  all_key_records_count = value_connection.select_all("select * from #{key_table_name}").size
                  value_final_tables.each{|value_table_name|
                      FasterCSV.foreach("#{key_table_name}.csv") do |key_row|
                          key_row.each_index{|key_field_value|
                          field_match = 0
                              FasterCSV.foreach("#{value_table_name}.csv") do |value_row|
                                  if value_row == key_row
                                        full_repeat_found = full_repeat_found + 1
                                  end  
                                  value_row.each{|value_field_value|
                                      if !key_row[key_field_value].nil? && !key_row[key_field_value].to_s.empty? && key_row[key_field_value] == value_field_value
                                          field_match = field_match +1
                                      end
                                  }
                              end  
                              field_match_array[key_field_value]  = 0 if field_match_array[key_field_value].nil?
                              field_match_array[key_field_value] = field_match_array[key_field_value] + field_match      
                          }      
                      end
                  }  
                  column_list = get_column_list(key_connection,key_table_name)
                  not_repeated = 0
                  field_match_array.each_index{|field_counter|
                      no_of_times_repeated = field_match_array[field_counter]
                      if no_of_times_repeated == 0
                          not_repeated = not_repeated + 1
                      else
                          g_bar.data("#{column_list[field_counter]}",no_of_times_repeated)
                      end      
                  }
                  g_bar.write("#{key_table_name}_level1.png")
                  g_partial.data("Data never repeated",not_repeated)
                  g_partial.data("Repeated atleast once",(column_list.size-not_repeated))
                  g_partial.write("#{key_table_name}_#{key_connection.to_s[15..10]}_partial_redundency.png")
              }
          }
  end  




  # This rule_validator will check if the values of two given sets of columns are similar.
  # This can be used in case you want to quickly check data redudency of two columns without going through
  # all the graphical analysis of redundency profiling.
  def validates_similar_values(options={})
      all_columns = get_column_list(options[:source_connection],self.to_s.downcase)
      required_source_columns = get_required_columns(all_columns,options[:source_columns],"",options[:source_connections])
      source_file_name = extract_column(required_source_columns,self.to_s.downcase,options[:source_connection],"source")
      options.each{|key,value|
          if value != options[:source_connection] and !value.respond_to?"join"
              current_connection = value[:connection]
              all_tables = current_connection.tables
              value[:table_name] = [value[:table_name]] if !value[:table_name].respond_to?"join"
              required_tables = get_required_columns(all_tables,value[:table_name],"",current_connection)
              required_tables.each{|table_name|
                  value[:columns] = [value[:columns]] if !value[:columns].respond_to?"join"
                  all_columns = get_column_list(current_connection,table_name)
                  required_columns = get_required_columns(all_columns,value[:columns],"",current_connection)  
                  destination_file_name = extract_column(required_columns,table_name,current_connection,"destination")
                  match_status = compare_csv_files(source_file_name,destination_file_name) 
                  if match_status
                      puts "data from #{required_source_columns.join(",")} of #{self.to_s.downcase} matches with data from #{required_columns.join(",")} of #{table_name}"  
                  else
                      puts "data from #{required_source_columns.join(",")} of #{self.to_s.downcase} does not match with data from #{required_columns.join(",")} of #{table_name}"
                  end  
              }
          end    
      }
  end  


  
  
  # This method checks if columns from a given set are following a value domain.
  # You can specify the value domain as either 'range' or as 'array'.
  def validates_range_of(options={})
      connection = options[:connection]
      puts "The statistical information"    
      column_array = get_column_list(connection,self.to_s.downcase)
        
      #~ What if the datasize is more than buffer memory can handle?
      all_records = connection.select_all("select * from #{self.to_s.downcase}").size
      options.each{|key,value|
          all_columns = get_required_columns(column_array,key,value,connection)
          generate_range_compact_info(all_columns,connection,self.to_s.downcase,all_records,value)      
      }
  end   




  # This method actually checks the range compatibility.
  # all_columns are the column names under 'this' table.
  # value is the value domain
  def generate_range_compact_info(all_columns,connection,table_name,all_records,value)
      if !all_columns.nil?
          for column in all_columns
              out_of_bound = 0
              connection.select_all("select #{column} from #{table_name}").each{|col_value|
                  if !col_value.nil?
                      if !value.include? col_value[0]
                          out_of_bound = out_of_bound + 1
                      end   
                  end        
              }
              if out_of_bound != 0
                  puts "for #{column} approximately #{(((out_of_bound.to_f)/all_records)*100).round} % rows have values out of bounds"
              else
                  puts "for #{column}, all rows have values WITHIN bounds"  
              end    
            end
      end      
  end




  # This method checks if the columns from a given set are of given type.
  #  type check is not in terms of CHAR,INT or FLOAT but in terms of what kind of data can be stored; numeric, alphabetic and alphanumeric.
  def validates_type_of(options={})
      connection = options[:connection]
      $all_records = [connection.execute("select count(*) from #{self.to_s.downcase}")].size
      column_array = get_column_list(connection,self.to_s.downcase) 
      puts "The statistical information"    
      options.each{|key,value|
            if value != connection
                required_columns = get_required_columns(column_array,key,value,connection)
                generate_type_compact_info(required_columns,value,self.to_s.downcase,connection)
            end    
      }      
  end  




  # This method will perform the actual type mapping.
  # value is the kind of data that needs to be checked.
  # column_array is column names in table_name.
  def generate_type_compact_info(column_array,value,table_name,connection)
      column_array.each{|column_name|
          col_specs = get_the_col_spec_for_single_col(column_name,table_name,connection)
          data_type = col_specs.split("(")[0]
          unmatched_type = 0
          if (value == "numeric" && !(["int","float","bigint","smallint"].include? data_type.downcase))||(value == "alphabetic" && !(["char","text","varchar"].include? data_type.downcase)) 
                unmatched_type = unmatched_type + 1
          end  
          if unmatched_type != 0
              puts "for #{column_name}, values in #{(unmatched_type/$all_records)*100}% rows are NOT #{value}"
          elsif unmatched_type == 0
              puts "for #{column_name}, values in all the rows are #{value}"
          end  
      }      
  end  



  # This method checks if the self table is dependent on given set of tables.
  def validates_dependency_on(options={})
      connection=options[:connection]
      column_list = get_column_list(connection,self.to_s.downcase)
      options.each{|key,value|
          if value != connection  
              present_connection = key 
              table_names = get_required_columns(present_connection.tables,value,"",present_connection)
              table_names.each{|table_name| 
                  primary_key = get_primary_key(table_name,key)
                  if primary_key.nil?
                      puts "#{table_name} does not have a primary key. So dependency cannot be determined reliably."
                  else 
                      # More primary key heuristics can be added.  
                      if column_list.include?(table_name+"_"+primary_key) || column_list.include?(primary_key) || column_list.include?(table_name+"_"+primary_key.downcase) || column_list.include?(table_name.downcase+"_"+primary_key) || column_list.include?(table_name.downcase+"_"+primary_key.downcase)
                          puts
                          puts "#{self.to_s.downcase} is related with #{table_name} via #{primary_key} column in #{table_name}"
                      end 
                  end      
              }     
          end      
      }  
  end




  # This method will check if columns from a set of columns are following certain constraints.
  def validates_constraints_on(options={})
        rec_counter = 0
        connection = options[:connection]
        all_records = connection.execute("select count(*) from #{self.to_s.downcase}")
        for record in all_records
          all_records = record 
          break
        end
        column_array = get_column_list(connection,self.to_s.downcase)  
        puts "The statistical information"    
        options.each{|key,value|
            if value != connection
                all_columns = get_required_columns(column_array,key,value,connection)
                value = [value] if !value.respond_to? "join" 
                for constraint in value
                    generate_constraint_compact_info(all_columns,connection,self.to_s.downcase,all_records,constraint)      
                end
            end      
        }
  end  





  # This method is the exact opposite of validates_dependency_on.
  # It checks if any table from a given set of tables is dependent on this table.
  def validates_relation_with(options={})
      connection = options[:connection]
      curr_table_name = self.to_s.downcase
      primary_key = get_primary_key(curr_table_name,connection)
      if primary_key.nil?
          puts "#{curr_table_name} does not a primary key. So its relationship with others cannot be tracked reliably."  
      else  
          options.each{|key,value|
              if value != connection
                  value.each{|table_name|
                      column_list = get_column_list(key,table_name)  
                      if column_list.include?(curr_table_name+"_"+primary_key)
                          puts 
                          puts "#{table_name} is related with #{curr_table_name} via #{primary_key} in #{curr_table_name}"
                      end  
                  }      
              end  
          }
      end    
  end  



  # These are actual result verification and printing methods for each rule validator.
  def generate_constraint_compact_info(all_columns,connection,table_name,all_records,constraint)  
      column_list = get_columns_by_constraint(constraint,table_name,connection)
      all_columns.each{|column_name|
          if column_list.include?column_name
              puts "#{column_name} is following #{constraint} constraint"  
          else
              puts "#{column_name} is NOT following #{constraint} constraint"  
          end  
      }  
  end




  




 

  

  
