require 'hpricot'   # gem
require 'open-uri'  # ruby stdlib

module SitemapSpider

  class Sitemap

    private

    # Uses Hpricot to check if pages are available and if yes, adds the records to <tt>@pages_crawled</tt>.
    def go_through_links
      while @pages_to_crawl.size > 0 do
        smrec = @pages_to_crawl.shift
        begin
          Hpricot(open(smrec.loc))
          @pages_crawled << smrec
          sleep @opts[:sleep_between_requests]
        rescue => err
          if @opts[:debug]
            $stderr.printf("ERROR downloading url: %s :: %s\n", smrec.loc, err.inspect)
            $stderr.flush
          end
          raise err if @opts[:errors_are_fatal]
          @pages_failed_download << smrec
        end
      end
    end
    
    # Uses Hpricot to grab links from downloaded pages.
    # Adds new crawled records to @pages_crawled, adds each new url found to @pages_to_crawl as new record,
    # unless it was already crawled or max_url_length restrictions apply.
    def crawl_for_links(smrec)
      @pages_crawled << smrec unless @opts[:add_only_valid]
      begin
        doc = Hpricot(open(smrec.loc))
        @pages_crawled << smrec if @opts[:add_only_valid]
      rescue => err
        if @opts[:debug]
          if @opts[:report_a_reference]
            $stderr.printf("ERROR downloading url: %s :: %s, referenced from: %s :: as: %s\n",
              smrec.loc, err.inspect, smrec.url.referenced_from, smrec.url.orig_href_string)
          else
            $stderr.printf("ERROR downloading url: %s :: %s\n", smrec.loc, err.inspect)
          end
          $stderr.flush
        end
        raise err if @opts[:errors_are_fatal]
        @pages_failed_download << smrec
        return
      end
      (doc/"a").each do |a|
        if a['href']
          next unless new_url = get_url(smrec.url, a['href'])
          new_smrec = SitemapRecord.new(new_url)
          @pages_to_crawl << new_smrec unless (
            @pages_crawled.include?(new_smrec) or
            @pages_to_crawl.include?(new_smrec) or 
            @pages_failed_download.include?(new_smrec) or
            (@opts[:check_max_url_length] and new_smrec.loc.length > @opts[:max_url_length])
          )
        end
      end
    end
  end
end
