module Spider
	class Agent
		attr_accessor :start_page, :continue_method, :extract_method, :extract_regexp, :multi_line, :named_captures, :save_method, :headers, :encoding, :need_parse
		
		def initialize(options = {})
		  @start_page = options[:start_page]
		  @continue_method = options[:continue_method]
		  @extract_regexp = options[:extract_regexp] #提取文本的正则表达式，如果extract_method不为空该项才有效
		  @extract_method = options[:extract_method] #true/false
		  @multi_line = options[:multi_line]  
		  @named_captures = options[:named_captures]
		  @save_method = options[:save_method]
		  @headers = options[:headers]
		  @encoding = options[:encoding]
		  @need_parse = options[:need_parse]
		  @logger = options[:logger] == true ? Logger.new(STDOUT) : options[:logger]
		  @url_queue = [@start_page]
		end
		  
		def start
			@results = []
			@captured_queue = []
			while !@url_queue.empty?
        url = @url_queue.shift
        @captured_queue << url
        parse_page(url)
      end
      @results
		end
		
		def push_url_queue(*url)
			puts "put_url_queue #{url}"
		  return if @url_queue.include?(url)
		  @url_queue << url
		  start #再次启动抓取，单线程
		end
		
		private
		def parse_page(url)
			 @logger.debug "crawling page: #{url.to_s}" if @logger
			 response = Net::HTTP.get_response(URI.parse(url.to_s), @headers)
			 parse_response(response, url)
			 puts response.code
			 #puts response.body[400, 55]
		end
			
		def continue_uri
		end
		
		def parse_response(response, url)
			response_body = encoding.nil? ? response.body : Iconv.iconv("UTF-8//IGNORE", "#{encoding}//IGNORE", response.body).first
			if response.is_a? Net::HTTPSuccess
				@logger.debug "crawling success: #{url.to_s}" if @logger
        @continue_method.call(url, response_body) unless @continue_method.nil? #获取next uri
        
	      results = extract_by_regexp(url, response_body) unless @extract_method        #进行解析操作
	      
	      save_results(url, results) #保存提取的结果

			elsif response.is_a? Net::HTTPRedirection
				@logger.debug "crawling redirect: #{response['location']}" if @logger
			else
        @logger.debug "crawling nothing: #{uri.to_s}" if @logger
			end
		end
		
		def extract_by_regexp(url, response_body)
			result = nil
			reg_ret = response_body.scan @extract_regexp
			if reg_ret
        @logger.debug "response body extract" if @logger
	      if @multi_line.nil? #平铺
        	result_hash = Hash.new { |hash,key| hash[key] = [] }
        	reg_ret.flatten!
		      reg_ret.each_index do |i|
		        result_hash[named_captures[i].to_sym] = reg_ret[i]
		     	end
		     	return result_hash
        else
        	result_array = []
        	reg_ret.each_index do |i|
        		_ret = {}
        	
        		reg_ret[i].each_index do |j|
        			_ret[named_captures[j].to_sym] = reg_ret[j]
        		end
        		
						result_array << _ret        		
        	end #end each_index 
        	return result_array
        end #~end multi_line
      end #~end if reg_ret
		end #~end method
		
		def save_results(url, result)
			if @save_method
				ret = @save_method.call(result, url.to_s)
			else
				@results << {@model.downcase.to_sym => result, :page => url.to_s}
			end
		end
	end
end
