#!/usr/bin/python
# coding: utf-8

import sys
sys.path.append("..")
import urlparse
import urllib2
import random
import time
from datetime import datetime, timedelta
import socket
from Config.Config import DEFAULT_AGENT, DEFAULT_DELAY, DEFAULT_RETRIES, DEFAULT_TIMEOUT, TimeSleep, COOKIES

class SoftInfoDownloader(object):
    def __init__(self, delay=DEFAULT_DELAY, user_agent=DEFAULT_AGENT, proxies=None, num_retries=DEFAULT_RETRIES,
                 timeout=DEFAULT_TIMEOUT, opener=None, cache=None):
        socket.setdefaulttimeout(timeout)
        self.throttle = Throttle(delay)
        self.user_agent = user_agent
        self.proxies = proxies
        self.num_retries = num_retries
        self.opener = opener
        self.cache = cache

    def download(self, url, headers, proxy, num_retries, data=None):
        # print 'Downloading:', url
        headers = {"User-Agent" : headers}
        request = urllib2.Request(url, data, headers or {})
        opener = self.opener or urllib2.build_opener()
        # 如果使用代理的话直接把这个注销给修改过来即可
        # if proxy:
        #     proxy_params = {urlparse.urlparse(url).scheme: proxy}
        #     opener.add_handler(urllib2.ProxyHandler(proxy_params))
        try:
            response = opener.open(request)
            html = response.read()
            code = response.code
        except Exception as e:
            # time.sleep(TimeSleep)
            print u'下载出错，错误原因如下:', str(e)
            html = None
            if hasattr(e, 'code'):
                code = e.code
                if num_retries > 0 and 500 <= code < 600:
                    # retry 5XX HTTP errors
                    # 此时是服务器出错，则继续进行下载即可
                    print u"尝试进行第%d此下载" % (num_retries - 1)
                    return self.download(url, headers, proxy, num_retries - 1, data)
            else:
                code = None
        return html


class Throttle:
    """Throttle downloading by sleeping between requests to same domain
    """
    def __init__(self, delay):
        # amount of delay between downloads for each domain
        self.delay = delay
        # timestamp of when a domain was last accessed
        self.domains = {}

    def wait(self, url):
        """Delay if have accessed this domain recently
        """
        domain = urlparse.urlsplit(url).netloc
        last_accessed = self.domains.get(domain)
        if self.delay > 0 and last_accessed is not None:
            sleep_secs = self.delay - (datetime.now() - last_accessed).seconds
            if sleep_secs > 0:
                time.sleep(sleep_secs)
        self.domains[domain] = datetime.now()
