require_relative "twikit_tokenizer.rb"
require_relative "twikit_file_reader.rb"

class TwikitSearch
  def initialize
    @FileReader = TwikitFileReader.new
    @Tokenizer = TwikitTokenizer.new
    @Index = {}
    @Query = nil
    @QueryXTP = nil
    @QueryInjections = nil
    return load_index
  end
  def get_index
    return @Index
  end
  def load_index
    @FileReader.read_directory('twikit_tokenizer_doc_store').each_pair{|filename,content|
      @Index[filename] = JSON.parse(content)  
    }
    return true
  end
  def load_query(query_file)
    @Tokenizer.load_unit_tests([query_file])
    @Query = @Tokenizer.get_unit_test( query_file )
    if(detected_injections = @Query.match(/TWIKIT::SOURCE::INTERSECTIONS::{(.*)}/))
      @QueryInjections = @Tokenizer.inject_intersections_for_unit_test(query_file, $1.split(','))
    end
    @QueryXTP = @Tokenizer.get_XTP_map_for_unit_test( query_file, true )
    if(@QueryXTP.nil?)then warn "XTP map for this query '#{query_file}' is nil." end
    return @Query
  end
  def query_index(query_file = nil)
    q = query_file.nil? ? @Query : @Query = load_query(query_file)
    if(!q.nil?)
      print_and_flush "\nSearching.."
      nb = @Index.to_a.length.to_f
      na = 0.to_f
      n = 0.to_f
      results = @Index.to_a.map{|index, content|
         n+=1
         if(((n/nb)*10.0) > na)then print_and_flush "."; na+=1; if(na==5)then print_and_flush "50%" end end
         {'file'=>content.keys[0], 'distance'=>@QueryXTP['xtp'].hamming_distance(content.values[0]['xtp'])}
      }.sort{|a, b| a['distance'] <=> b['distance']}.select{|v| v['distance']!=-1}
      print_and_flush("100%")
      return results
    else warn "Cannot query index, Query is nil"; return false end
  end
  def print_and_flush(str)
    print str
    $stdout.flush
  end
end