#!/usr/bin/env python
#coding=utf-8
import socket
import urllib2
import urllib
import cookielib
import sys
import time
import logging

class DefaultErrorHandler(urllib2.HTTPDefaultErrorHandler):
    def http_error_default(self, req, fp, code, msg, headers):
        result = urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
        result.status = code
        return result

class WebError(Exception):pass
class WebFinishException(Exception):pass

class Web(object):
    headers = {
        'User-Agent'        : 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; CIBA; MAXTHON 2.0)',
        'Accept'            : '*/*',
        'Accept-Language'   : 'zh-cn',
        'Content-Type'      : 'application/x-www-form-urlencoded',
        'UA-CPU'            : 'x86',
        'Cache-Control'     : 'no-cache',
        'Accept-Encoding'   : '*/*',
    }
    isSet = False#停止标志
    cookies = None

    def __init__(self, log = logging, proxy = None, timeout = 30, interval_time = 180, try_times = 3):
        self.log = log
        self.proxy = proxy
        self.interval_time = interval_time
        self.try_times = try_times

        self.set_timeout(timeout)
        self.cj = cookielib.CookieJar()
        self._build_opener()

    def set_timeout(self, timeout):
        socket.setdefaulttimeout(timeout)

    def _build_opener(self):
        handlers = [urllib2.HTTPCookieProcessor(self.cj), 
                    DefaultErrorHandler()]
        if self.proxy:
            handlers.append(urllib2.ProxyHandler({'http':self.proxy}))
        self.__opener = urllib2.build_opener(*handlers)

    def fetch(self, url, data = None,referer = ''):
        self.log.debug('\n[url]%s\n[data]%s' % (url, data))
        if referer:
            self.headers['referer'] = referer

        if isinstance(data,dict):
            data = urllib.urlencode(data)

        request = urllib2.Request(url=url, data=data, headers=self.headers)
        try:
            return self._open(request)
        except WebError, e:
            self.log.error(unicode(e))
            sys.exit()
        except WebFinishException, e:  
            self.log.info(unicode(e))


    def sleepWithSet(self, seconds):
        """在sleep的时候每隔1s检查set标志位
        """
        self.log.info(u'等待%s秒' % self.interval_time)
        for i in range(seconds):
            if self.isSet: 
                break
            else:
                time.sleep(1)

    def _process_not_200(self, response):
        if response.code != 200:
            self.log.warning(u'远程服务器未正常返回，响应码：%s' % response.code)
            return True

    def _processError(self, e):
        if hasattr(e, 'reason'):
            self.log.warning(u'访问远程服务器失败，原因：%s' % e.reason)         
        elif hasattr(e, 'code'):
            self.log.warning(u'请求不能满足远程服务器，错误代码：%s' % e.code)

    def _open(self, request): 
        def __process_set():
            if self.isSet:
                raise WebFinishException(u'isSet为True，程序中断') 

        def __try(times):
            times -= 1
            if times > 0:
                self.sleepWithSet(self.interval_time)
                return times
            else:
                raise WebError(u'网络出问题，程序即将关闭')

        times = self.try_times
        while times > 0:
            __process_set()
            try:
                response = self.__opener.open(request)
                if self._process_not_200(response):
                    times = __try(times)
                    continue
                data = response.read()
            except IOError, e:
                self._processError(e)
                times = __try(times)
            else:
                # 记录cookie
                self.cookies = self.cj.make_cookies(response, request)
                return data  

if __name__ == '__main__':
    import parse
    def try_many_times():
        web = Web(timeout = 1, interval_time = 1)
        f = web.fetch('http://www.iip138.com/ip2city.asp')
        print f.read()

    def not_try():
        web = Web(timeout = 1, try_times = 1)
        f = web.fetch('http://www.iip138.com/ip2city.asp')
        print f.read()

    import threading
    def try_but_set():
        def _web_set(web):
            time.sleep(5)
            web.isSet = True

        def _thread_set(web):
            t = threading.Thread(target = _web_set, args = (web, ))
            t.setDaemon(True)
            t.start()

        web = Web(timeout = 1, interval_time = 100)
        _thread_set(web)
        f = web.fetch('http://www.iip138.com/ip2city.asp')
        print f

    def cookie():
        web = Web()
        f = web.fetch('http://www.baidu.com')
        print f
        print web.cookies
        print web.cookies[0].name, web.cookies[0].value

    #try_many_times()
    #not_try()
    #try_but_set()
    cookie()
