# a background processor that retrieves reports, runs searches, and does other housekeeping.
class Crawler::Crawler < ActiveRecord::Base
  before_create(:remove_others)
  after_create(:write_pid)
  attr_accessor(:sources)
  
  VERBOSITY = 1 # can be 1 (low), 2, 3 (high)
  
  # returns the current crawler if it is running, otherwise returns nil
  def self.running
    # find the db entry if it exists
    return nil unless c = find(:first)
    # if the crawler is running, return the obj, else return nil
    begin Process.kill(0, pid) ? c : nil rescue nil end
  end
  
  # generates a new crawler instance as long as one isn't already running
  def self.generate
    running ? raise("Crawler is already running.") : create(:backlog => 0)
  end
  
  # returns the pid of the current crawler
  def self.pid
    begin File.read(CONF.crawler_pid_file).to_i rescue nil end
  end
  
  # return the last N lines of the log
  def self.log(lines)
    log_path = File.join(Rails.root, "log/crawler.log")
    if File.exists?(log_path) && lines.is_a?(Fixnum)
      `tail -#{lines} #{log_path}`
    else
      nil
    end
  end
  
  # starts the main crawler loop.
  def go
    begin
      # get jobs
      jobs = Crawler::Job.create_all(self)
      loop do
        # sort jobs by deadline
        jobs = jobs.sort_by{|j| j.deadline}
        
        # if oldest deadline is in past, run its job
        oldest = jobs.first
        if oldest.deadline <= Time.now
          log_info("Starting #{oldest.class}", :verbosity => 2)
          
          # clear old deadline
          oldest.deadline = Time.now + 1.year
          
          # run the job
          oldest.run
        else
          # sleep until just after next deadline
          diff = oldest.deadline - Time.now
          if diff > 0
            log_info("Sleeping #{diff}s", :verbosity => 2)
            sleep(diff)
          end
        end
      end
    rescue
      # email the admin on error
      if Rails.env == "production"
        AdminMailer.error($!).deliver 
      else
        raise $!
      end
    end
  end
  
  # shortcuts for logging different types of events
  def log_info(*args) log_event("info", *args) end
  def log_warning(*args) log_event("warning", *args) end
  def log_error(*args) log_event("error", *args) end

  def log_event(level, msg, options = {})
    if !options[:verbosity] || options[:verbosity] <= VERBOSITY
      puts "#{Time.now.strftime('%c')} [#{level.capitalize}] #{msg}"
    end
  end
  
  private
    # on create, ensures there are no other crawler objects
    def remove_others
      self.class.delete_all
    end
    # after create, writes the process PID to a file to facilitate inter-process communication
    def write_pid
      File.open(CONF.crawler_pid_file, 'w'){|f| f.write(Process.pid.to_s)}
    end
end
