# link_collector.rb


require 'open-uri'
require 'hpricot'
require 'fatspider'

include GC

domcache = {}
urlcache = {}

open(File.dirname(__FILE__) + '/roots') {|f|
  f.each {|line|
    urlcache[line.chomp!] = false
  }  
}

urlcache.each_pair do |url, parsed|
  domcache[url] = Hpricot(open(url)) unless parsed
  urlcache[url] = true
  puts url + ' is cached'
  STDOUT.flush
end

begin
  domcache.each_pair do |url, dom|
    (dom/'a').each do |achor|
      host = FatUtil.host_of url
      href = achor['href']
      if href
        href = host + href if href[0] == ?/
        (urlcache[href] = false) if FatUtil.trackable?(href) and (not urlcache.include?(href))
      end
    end
  end
  
  urlcache.select {|url, parsed| not parsed }.each do |pair|
    url = pair[0]
    url = host_of(url) + url if url[0] == ?/
    begin
      body = ''
      puts 'Parse request: ' + url
      open(url) do |f|
        body = f.read  
      end
      next if url =~ /\.aspx/i and body.include? '__VIEWSTATE'    
      puts 'Ready to parse: ' + url
      domcache[url] = Hpricot(body)
      urlcache[url] = true
      puts url + ' is cached'      
      puts '>>>>>> Use parsed dom, and destroy it'
      STDOUT.flush
      domcache[url] = nil
      garbage_collect
    rescue
    end
  end
end








