class Crawler::FetchJob < Crawler::Job
  def run
    
    # sort jobs by deadline and get first
    next_src = @crawler.sources.reject{|s| !s.enabled}.sort_by{|s| s.deadline}.first
    
    # if no sources, do nothing
    if next_src.nil?
      log_info("No enabled sources to fetch.")

    # if deadline is in past, run its job; otherwise do nothing
    elsif next_src.deadline <= Time.now
      log_info("Fetching from source: #{next_src.name}")
      next_src.deadline = Time.now + 1.year
    
      # fetch
      start_fetch = Time.now
      fetched = next_src.fetch
      finish_fetch = Time.now
    
      # start inserting
      start_insert = Time.now
      inserted = 0
      while next_src.parse_one
        inserted += 1
        # check if we've exceeded the parsing time limit
        if Time.now - start_insert > CONF.source_parse_time_limit
          # add a warning and stop parsing the source. we will resume next crawl
          log_warning("Ran out of time when parsing #{next_src.name}. Limit is #{CONF.source_parse_time_limit} seconds.")
          break
        end
      end
      finish_insert = Time.now
    
      fetch_time = (finish_fetch - start_fetch).to_f.round(1)
      insert_time = (finish_insert - start_insert).to_f.round(1)
      log_info("Fetching from source: #{next_src.name} complete. Fetched #{fetched} (#{fetch_time} s), " + 
        "inserted #{inserted} (#{insert_time} s), #{next_src.queue_size} in queue.")
    end

    @deadline = Time.now + 1.seconds
  end
end