require 'yaml'
require 'forwardable'
require 'http'
require 'proxy_pump/const'
require 'proxy_pump/proxy'
require 'proxy_pump/user_agents'
require 'proxy_pump/sources'
require 'proxy_pump/crawler'
require 'proxy_pump/fresh_keeper'
require 'proxy_pump/thread_pool'
require 'proxy_pump/daemonize'
module ProxyPump
  # Pump use to crawl proxies from sites
  #
  class Pump

    extend Forwardable
    include ProxyPump::Daemonize

    attr_reader :last_crawled_at, :last_cached_at
    attr_reader :last_deleted_at, :last_verified_at

    def start
      begin
        if daemonize?
          $stdout.puts <<-HEADER.strip_heredoc
          => Booting ProxyPump Pump
          => ProxyPump Pump starting
          * Daemonizing...
          HEADER
          daemonize
        else
          $stdout.puts <<-HEADER.strip_heredoc
          => Booting ProxyPump Pump
          => ProxyPump Pump starting
          * Use Ctrl-C to shutdown pump
          HEADER
        end

        write_pidfile
        trap_signals
        redirect_io
        Proxy.create_table
        loop do
          schedule_cache
          schedule_verify
          schedule_crawl
          schedule_delete
        end
      rescue => e
        puts e.message
      end
    end

    def schedule_crawl
      if next_crawl?
        ProxyPump.logger.debug '* Crawl proxies'
        sources.to_a.shuffle.each do |source|
          schedule do
            begin
              ProxyPump.logger.debug "* Crawl site #{source.url}"
              crawler.crawl(source) do |proxies|
                proxies.each do |attrs|
                  proxy = Proxy.new attrs
                  schedule { verify_proxy(proxy) } if proxy.valid?
                end
              end
            rescue => e
              ProxyPump.logger.error "#{e.message} (#{e.class})"
            end
          end # schedule
        end
        @last_crawled_at = Time.now.utc
        ProxyPump.logger.debug "* Next crawl at #{@last_crawled_at + options[:crawl_interval]}"
      end
    end

    def schedule_verify
      if next_verify?
        ProxyPump.logger.debug '* Verify proxies'
        schedule do
          Proxy.to_verify(options[:verify_interval]) { |proxy| schedule { verify_proxy proxy } }
        end # schedule
        @last_verified_at = Time.now.utc
        ProxyPump.logger.debug "* Next verify at #{@last_verified_at + options[:verify_interval]}"
      end
    end

    def schedule_delete
      if next_delete?
        ProxyPump.logger.debug '* Delete proxies'
        schedule do
          datetime = ProxyPump::Helpers.sqlite_ftime(Time.now.utc - options[:delete_interval])
          database.execute Const::DELETE_PROXIES_SQL, datetime
        end # schedule
        @last_deleted_at = Time.now.utc
        ProxyPump.logger.debug "* Next delete at #{@last_deleted_at + options[:delete_interval]}"
      end
    end

    def schedule_cache
      if next_cache?
        ProxyPump.logger.debug '* Cache proxies'
        schedule do
          cache.clear
          Proxy.to_cache(options[:cache_interval]) { |proxy| cache.add proxy }
        end
        @last_cached_at = Time.now.utc
        ProxyPump.logger.debug "* Next cache at #{@last_cached_at + options[:cache_interval]}"
      end
    end

    def verify_proxy(proxy)
      ProxyPump.logger.debug "* Verify proxy #{proxy.host}:#{proxy.port}"
      fresh_keeper.ping(proxy.host, proxy.port, timeout: options[:proxy_timeout]) do |elapsed|
        proxy.elapsed = elapsed
        if proxy.ping?
          proxy.score = proxy.fast? ? 3 : proxy.slow? ? 1 : 2
          fresh_keeper.ipinfo(proxy.host) { |ipinfo| proxy.ipinfo = ipinfo }
          datetime = ProxyPump::Helpers.sqlite_ftime Time.now.utc
          existed_proxy = Proxy.find proxy.host, proxy.port
          if existed_proxy
            proxy.created_at       = existed_proxy.created_at
            proxy.updated_at       = datetime
            proxy.last_verified_at = datetime
            proxy.update do
              cache.remove proxy
              cache.add proxy
            end
          else
            proxy.created_at       = datetime
            proxy.updated_at       = datetime
            proxy.last_verified_at = datetime
            proxy.save { cache.add proxy }
          end
        else
          proxy.delete { cache.remove proxy }
        end
      end
    end

    # Get a random agent header
    #
    def user_agent
      user_agents.random
    end

    def http
      HTTP.headers({'User-Agent' => user_agent})
    end

    def logger
      ProxyPump.logger
    end

    def crawler
      @crawler ||= Crawler.new(self)
    end

    def fresh_keeper
      @fresh_keeper ||= FreshKeeper.new(self)
    end

    def user_agents
      @user_agents ||= UserAgents.new(options[:user_agents])
    end

    def sources
      @sources ||= Sources.new(options[:sources])
    end

    def cache
      @cache ||= ProxyPump.cache
    end

    def database
      @database ||= ProxyPump.database
    end

    private

      def thread_pool
        @thread_pool ||= ThreadPool.new(options[:min_threads], options[:max_threads])
      end

      def schedule(*args, &block)
        thread_pool.schedule(*args, &block)
      end

      def next_crawl?
        last_crawled_at.nil? || Time.now.utc - last_crawled_at > options[:crawl_interval]
      end

      def next_cache?
        last_cached_at.nil? || Time.now.utc - last_cached_at > options[:cache_interval]
      end

      def next_verify?
        last_verified_at.nil? || Time.now.utc - last_verified_at > options[:verify_interval]
      end

      def next_delete?
        last_deleted_at.nil? || Time.now.utc - last_deleted_at > options[:delete_interval]
      end

  end # Pump
end # ProxyPump
