require 'pp'
require 'hpricot'
require 'open-uri'

Page = Struct::new( :id, :title, :url, :hotness, :text, :keywords )

KEYWORDS_PER_PAGE = 30
NUMBER_OF_COMMON_WORDS = 150

class Correlator
	def build( input_filename, output_filename )
		
		doc = Hpricot(open(input_filename))
		
		# TODO Scrape from (doc/'mediawiki/siteinfo/base')== http://uncyclopedia.org/wiki/Main_Page
		wiki_base_url = (doc/:mediawiki/:siteinfo/:base).inner_text.gsub('Main_Page','')	
		
		sitename = (doc/:mediawiki/:siteinfo/:sitename).inner_text
		
		puts "Scanning XML for wiki \"#{sitename}\""
		pages = []
		
		i = 0
		(doc/'mediawiki/page').each do |page_tag|
			page = Page.new
			
			page.id      = (page_tag/:id).inner_text
			page.title   = (page_tag/:title).inner_text
			page.url     = wiki_base_url + (page_tag/:title).inner_text
			page.hotness = Time.now - Time.parse((page_tag/:revision/:timestamp).inner_text)
			page.text    = (page_tag/:revision/:text).inner_text
			
			page.text.gsub!(/\<.*?\>/,' ') # remove <html tags>
		  page.text.gsub!(/\{\{.*?\}\}/,' ') # remove {{tags}}
			page.text.gsub!(/\[\[.*\]\]/,' ') # remove [[wiki tags]]
			page.text.gsub!(/[\n]/, ' ') # remove "\n"
			page.text.gsub!('*', '')
			page.text.gsub!('"', '')

      # Don't store meta pages			
			next if page.title.index("MediaWiki:") == 0
			next if page.title.index("#{sitename}:") == 0
			
			pages << page

			i += 1			
		end		
		puts "Parsed #{pages.length} pages."
		
		# We don't really want to correlate pages based on common words ("I", "you", "is", "are", etc.)
		# so identify them now, to be kicked out later.
	  all_text = pages.inject("") {|text_so_far, page| text_so_far + " " + page.text }
	  $common_words = identify_keywords(all_text, NUMBER_OF_COMMON_WORDS, false)			  
	  puts "Identified the top #{NUMBER_OF_COMMON_WORDS} common words across the wiki to reject"
	  
	  # Keywords per page
	  pages.each do |page|
	  	page.keywords = identify_keywords(page.text, KEYWORDS_PER_PAGE, true)
	  end
	  puts "Identified the top #{KEYWORDS_PER_PAGE} keywords per page."
	  
	  
	  # i can has REPL?
	  while true
	    begin  
  	    print "> "
	      p eval(gets)
  	  rescue Exception => e
	      retry
	    end
	  end
	  
	  
	  
		
	end
	
	def identify_keywords( text, how_many_words, reject_common_words )
	  words = text.downcase.split(" ")
	  
	  freqs = Hash.new(0)
	  
	  # Kick out common words, and then count frequencies
	  
	  words.reject!{|word| $common_words.include?(word)} if reject_common_words
  	words.each { |word| freqs[word] += 1 }
    
    top_n_keywords = freqs.sort_by{|k,v| v}.reverse.collect{|entry| entry.first}[0..how_many_words-1]
  end
	
end

if __FILE__ == $0
  puts Time.new
  Correlator.new.build("pages_current_partial_short.xml", "uncyc-links.xml")
  puts Time.new
end
  