# BrokenCompass
INDEX_DIRECTORY = "broken_compass"
DEFAULT_COLUMN_WEIGHT = 10
module ActiveRecord #:nodoc:
  class Base
    
    #look_in_brokencompass method
    def self.look_in_brokencompass(searchTerm = nil, options ={})
      t1 = Time.now
      brokencompass_option = options
      brokencompass_option ||= {}

      #Entity finding regex for extended mode.
      searchterm_entity_regex = /\'[a-z0-9 \-_]*\'@{1}[a-zA-Z0-9\-_]+[^&| ]/
      @@bc_logger.debug("searchterm_entity_regex       : #{searchterm_entity_regex.to_s}")

      #Rechecking variables and assigning defaults
      @@bc_logger.debug("brokencompass_option          : #{brokencompass_option.inspect}")
      default_offset  = brokencompass_option[:offset]
      default_limit   = brokencompass_option[:limit]
      eager_unload    = brokencompass_option[:eager_unload]
      extended_mode   = brokencompass_option[:extended_mode]
      plural_forms    = brokencompass_option[:plural_forms]
      column_weight   = brokencompass_option[:column_weight]

      default_offset  ||= 0
      default_limit   ||= 20
      eager_unload    ||= false
      extended_mode   ||= false
      plural_forms      = (plural_forms == false) ? false : true

      index_file            = self.base_class.to_s.downcase + ".index"
      index_file_with_path  = File.join(INDEX_DIRECTORY,index_file)
      container_index_key   = self.base_class.to_s.downcase.to_sym

      puts "Error: Index file #{index_file_with_path} not found." and return unless File.exist?(index_file_with_path)

      #Initializing index container if not already initialized
      @@broken_compass_index ||= {}
      begin
        if @@broken_compass_index[container_index_key].nil?
         #Loading from index file
         @@bc_logger.debug("Loading from index file       : #{container_index_key.inspect}")
         File.open(index_file_with_path) { |f| @@broken_compass_index[container_index_key] = Marshal.load(f) }
        end
      rescue
        puts "Error occurred while loading index from #{index_file_with_path}.\n Please redo #{index_file}"
        return
      end

      #initializing column weights
      column_weight   ||= {}
      @@broken_compass_index[container_index_key].keys.each do |key|
        if column_weight.keys.include?(key.to_sym)
          column_weight[key.to_sym] = column_weight[key.to_sym].to_i
        else
          column_weight[key.to_sym] = DEFAULT_COLUMN_WEIGHT
        end
      end

      result_objectid_array = []
      result_objectid_with_weight_hash = {}
      #When extended mode is true, advanced such as column level search is activated
      if extended_mode
        search_entities      = searchTerm.to_s.scan(searchterm_entity_regex)
        search_entities_conj = searchTerm.to_s.split(searchterm_entity_regex).collect{|x| (x.to_s.strip == "&") ? "&" : "|"}
        search_entities_conj[0] = "|"
#        puts "****search_entities     : #{search_entities.inspect}"
#        puts "****search_entities_conj: #{search_entities_conj.inspect}"
        simplified_search_entities = search_entities.collect{|x| 
          [
            x.scan(/@{1}[a-zA-Z0-9\-_]+[^&| ]/)[0].gsub("@",""),
            x.scan(/\'[a-z0-9 \-_]*\'/)[0].gsub("'","")
          ] }        
        @@bc_logger.debug("simplified_search_entities    : #{simplified_search_entities.inspect}")

        #to write out logs
        log_stmnt =  "Searching in extended mode for nothing"
        unless simplified_search_entities.empty?
          search_entities_conj.each_with_index do |operator,index|
            unless simplified_search_entities[index].nil?
              if @@broken_compass_index[container_index_key].keys.include?(simplified_search_entities[index][0])
                if operator == "|"
                  log_stmnt = log_stmnt + " or '#{simplified_search_entities[index][1]}' in #{simplified_search_entities[index][0]}"
                elsif  operator == "&"
                  log_stmnt = log_stmnt + " and '#{simplified_search_entities[index][1]}' in #{simplified_search_entities[index][0]}"
                end
              end
            end
          end
        end
        @@bc_logger.debug("simple search statement       : #{log_stmnt.to_s}")

        #Stores object_id w.r.t search-entities and its sequence in result_objectid_wrt_srchentities
        result_objectid_wrt_srchentities = Hash.new
        @@broken_compass_index[container_index_key].keys.each do |key|
          simplified_search_entities.each_with_index do |sim_srch_entity,index|
            if sim_srch_entity[0].to_s == key.to_s
              # Storing results with weightage
              object_ids = @@broken_compass_index[container_index_key][key][sim_srch_entity[1].to_s]
              unless object_ids.nil?
                object_ids.each do |object_id|
                  result_objectid_with_weight_hash[object_id] ||= 0
                  result_objectid_with_weight_hash[object_id] = (result_objectid_with_weight_hash[object_id].to_i + column_weight[key.to_sym])
                end
              end

              #sorting results with weightage
              result_objectid_wrt_srchentities[index] = result_objectid_with_weight_hash.sort{|a,b| b[1]<=>a[1]}
              result_objectid_wrt_srchentities[index] ||= []

            end
          end
        end
#        puts "****result_objectid_wrt_srchentities: #{result_objectid_wrt_srchentities.inspect}"

        #working out on search-entities conjection operators
        lhs_array = []
        rhs_array = []
        search_entities_conj.each_with_index do |operator,index|
          rhs_array = result_objectid_wrt_srchentities[index]
          rhs_array ||= []
          if operator == "|"
            lhs_array = (lhs_array + rhs_array)
          elsif  operator == "&"
             temp1 = lhs_array.collect{|x| x if (rhs_array.include?(x))}.compact
             temp2 = rhs_array.collect{|x| x if (lhs_array.include?(x))}.compact
             lhs_array = temp1 + temp2
          end
        end
        result_objectid_array = lhs_array

      else #When extended mode is false, all column search is made

        splitted_searchTerm_array = searchTerm.to_s.split(" ").compact.uniq
        if plural_forms
          #Adjusting for singular/plural forms of search term
          splitted_searchTerm_array = splitted_searchTerm_array.collect{|x| [x.to_s.singularize, x.to_s.pluralize]}.flatten.uniq.compact
        end
        @@bc_logger.info("Searching for term            : '#{splitted_searchTerm_array.join(",")}'")

        @@broken_compass_index[container_index_key].keys.each do |key|
          splitted_searchTerm_array.each do |splitted_searchTerm|
            # Storing results with weightage
            object_ids = @@broken_compass_index[container_index_key][key][splitted_searchTerm]
            unless object_ids.nil?
              object_ids.each do |object_id|
                result_objectid_with_weight_hash[object_id] ||= 0
                result_objectid_with_weight_hash[object_id] = (result_objectid_with_weight_hash[object_id].to_i + column_weight[key.to_sym])
              end
            end

          end
          
        end

        #sorting results with weightage
        result_objectid_with_weight_array = result_objectid_with_weight_hash.sort{|a,b| b[1]<=>a[1]}

        result_objectid_array = result_objectid_with_weight_array
      end

      #Calculating subarray, depending upoin offset and limit
      result_objectid_subarray = result_objectid_array[default_offset.to_i..(default_offset.to_i + default_limit.to_i-1).to_i]

      #Sets the index container to nil if eager_unload flagset is true
      @fields = @@broken_compass_index[container_index_key].keys
      @@broken_compass_index[container_index_key] = nil if eager_unload

      t2 = Time.now

      # Returning hash with consolidated inference
      {
        :weighted_records     => result_objectid_subarray ,
        :total_record_count   => result_objectid_array.length ,
        :offset               => default_offset,
        :limit                => default_limit,
        :time_elapsed         => (t2.to_f - t1.to_f),
        :fields               => @fields,
        :plural_forms         => plural_forms,
        :extended_mode        => extended_mode,
        :eager_unload         => eager_unload,
        :column_weight        => column_weight
      }
      
    end

    #find_with_brokencompass method
    def self.find_with_brokencompass(searchTerm = nil, options ={})
#      puts "find_with_brokencompass: #{searchTerm}"
      libk_inference_hash = look_in_brokencompass(searchTerm, options.delete(:brokencompass))

      # Merging the order field with weighted record
      # As order is lost while fetching data from 'IN' clause
      options.merge!({:order => "field(#{primary_key}, #{libk_inference_hash[:weighted_records].collect{|x| x[0]}.join(',')})"})

      # calling find method to get all objects with id in result_objectid_array
      find(libk_inference_hash[:weighted_records].collect{|x| x[0]}, options)
    end

    #Unloads the indedxes from memory
    def self.unload_brokencompass_index
      #Initializing index if not already initialized
      @@broken_compass_index ||= {}
      container_index_key   = self.base_class.to_s.downcase.to_sym
      @@bc_logger.info("Unloading compass index       : #{container_index_key.to_s}")
      @@broken_compass_index[container_index_key] = nil unless @@broken_compass_index[container_index_key].nil?
    end

  end
end