# -*- coding: utf-8 -*-
__author__ = 'yanshi'
import urllib2
import re
import pycurl
import StringIO
import MySQLdb
import chardet
import copy

from com.umessage.config import setting


class BaseProxy(object):

    def _getTemplate(self):
        return setting.template[self.template]

    def _getHtml(self):
        return self.html

    def _getPageTemplate(self):
        return None

    def _fetchHtml(self):
        print self.url
        req = urllib2.Request(self.url)
        res = urllib2.urlopen(req)
        html = res.read()
        chardit = chardet.detect(html)
        html = unicode(html, chardit['encoding'], 'ignore').encode("utf8")
        format = lambda source: re.compile(r'([^<>\d&]+)(?=<)').sub(r'', source)
        self.html = format(html)
        res.close()

    def getPageNum(self):
        self._fetchHtml()
        pageNums = re.findall(self._getPageTemplate(), self._getHtml(), re.I)
        return 0 if len(pageNums) <= 0 else max([int(i) for i in pageNums])

    def load(self):
        self._fetchHtml()
        return re.finditer(self._getTemplate(), self._getHtml())


class ProxySite(BaseProxy):

    def __init__(self, id, url, template):
        self.id = id
        self.url = url
        self.template = template

    def _getTemplate(self):
        return super(ProxySite, self)._getTemplate()['template']

    def _getPageTemplate(self):
        if not super(ProxySite, self)._getTemplate().has_key('pagenum'):
            return None
        return super(ProxySite, self)._getTemplate()['pagenum']

    def generatorPage(self, url, pageNums):
        urls = []
        if pageNums == 0:
            return urls

        page_url_temp = setting.page_url_temp[self.template]
        for num in xrange(2, pageNums + 1):
            urls.append((url + page_url_temp % (num), 1021))

        return urls

    def loadPage(self):
        urls = [(match.group('url'), match.group('time')) for match in super(ProxySite, self).load()]

        if len(urls) == 0:
            return urls

        test_url = urls[0][0]
        is_http_start = re.match('^http://', test_url)
        if is_http_start:
            return urls

        domain = self.url
        is_domain = re.match('^(/|[^h])', test_url)
        if is_domain:
            m = re.match('^(http://.*?)/', domain)
            if m:
                domain = m.groups()[0] + '/'

        result_urls = []
        for url in urls:
            try:
                result_urls.append((domain + url[0], url[1]))
            except Exception, e:
                print url, e.message

        #抓取详情页下的分页页面
        marge_urls = copy.deepcopy(result_urls)
        if self._getPageTemplate():
            for url in marge_urls:
                result_urls.extend(
                    self.generatorPage(url[0], ProxySite(id=0, url=url[0], template=self.template).getPageNum()))
        return result_urls


    def __str__(self):
        return 'id=%s ,url=%s, enable=%s, template=%s' % (self.id, self.url, self.enable, self.template)


class ProxyPage(BaseProxy):

    def __init__(self, id, url, template, updateTime, createTime):
        self.id = id
        self.url = url
        self.template = template
        self.updateTime = updateTime
        self.createTime = createTime

    def _getTemplate(self):
        return super(ProxyPage, self)._getTemplate()

    def loadProxyInfo(self):
        return [(match.group('ip'), match.group('port')) for match in super(ProxyPage, self).load()]

    def __str__(self):
        return 'id=%s ,url=%s, template=%s' % (self.id, self.url, self.template)


class ProxyInfo(object):

    def __init__(self, ip, port, ms=0):
        self.ip = ip
        self.port = int(port)
        self.ms = ms

    def getIp(self):
        return self.ip

    def getPort(self):
        return self.port

    def getMS(self):
        return self.ms

    def test(self):

        url = setting.default_test_site['site']
        c = pycurl.Curl()
        c.setopt(c.URL, url)
        b = StringIO.StringIO()
        c.setopt(pycurl.PROXY, str('http://%s:%s' % (self.ip, self.port)))
        c.setopt(c.WRITEFUNCTION, b.write)
        c.setopt(c.FOLLOWLOCATION, 1)
        c.setopt(c.HEADER, True)
        c.setopt(pycurl.CONNECTTIMEOUT, 3)
        c.setopt(pycurl.TIMEOUT, 3)
        try:
            c.perform()
        except Exception,e:
            return False

        http_code = c.getinfo(pycurl.HTTP_CODE)
        http_total_time = c.getinfo(pycurl.TOTAL_TIME)
        self.ms = http_total_time * 100000
        return http_code == 200




