import scrapy
from scrapyProject.spiders.DataBase import databaseCore
import MySQLdb
import time
import uuid


class urlManage(databaseCore):
    def InsertUnRequestUrl(self, url, spliderName):
        if self.isHasUrl(url, spliderName) == False:
            self.InsertOrUpdateData(tableName="urlmanage",
                                    valueForSet=[self.getID(url + spliderName), url, spliderName],
                                    colForSet=["id", "urlName", "spiderName"]
                                    )

    # if self.isHasUrl(url, spliderName) == False:
    #     sql = "insert into urlmanage(id, urlName, spiderName) VALUES(%s,%s,%s)"
    #    data = (self.getID(url + spliderName), url, spliderName)
    #    cursor = self.m_conn.cursor()
    #    cursor.execute(sql, data)
    #   self.m_conn.commit()

    def InsertRequestUrl(self, url, spliderName):

        idData = self.getID(url + spliderName)
        self.InsertOrUpdateData(tableName="urlmanage",
                                valueForCompare=[idData], colForCompare=["id"], compareRule=["="],
                                valueForSet=[idData, url, time.time(), spliderName],
                                colForSet=["id", "urlName", "lastRequestTime", "spiderName"])
        '''
        if self.isHasUrl(url, spliderName):
            sql = "update urlmanage set lastRequestTime=%s where id = %s"
            data = (time.time(),  self.getID(url + spliderName))
        else:
            sql="insert into urlmanage(id, urlName,lastRequestTime,spiderName) VALUES(%s,%s,%s,%s)"
            data = (self.getID(url+spliderName), url, time.time(), spliderName)
        cursor = self.m_conn.cursor()
        cursor.execute(sql, data)
        self.m_conn.commit()
        '''

    def UpdateUrlTime(self, url, spliderName, content):
        if self.IsNeedUpdate(url, spliderName, content):
            sql = 'update urlmanage set lastUpdataTime=%s,dataID=%s where id = %s '
            data = (time.time(), self.getID(content, code=""), self.getID(url + spliderName))
            cursor = self.m_conn.cursor()
            cursor.execute(sql, data)
            self.m_conn.commit()

    def IsNeedUpdate(self, url, spliderName, content):
        iCount =self.GetDataCount(tableName="urlmanage",
                          compareValue=[self.getID(url + spliderName), self.getID(content, code="")],
                          colName=["id", "dataID"], compareRule=["=", "="])
        return  iCount<=0
        '''     
        sql = "select * from urlmanage where id = \'%s\'"% (str(self.getID(url + spliderName)))
        cursor = self.m_conn.cursor()
        cursor.execute(sql)
        ret = True
        for row in cursor.fetchall():
            if row[4] == self.getID(content, code=""):
                 ret = False
        return ret
        '''

    def isHasUrl(self, url, spliderName):
        return self.IsHasData(tableName="urlmanage", compareValue=url + spliderName)

    def GetUnrequestUrl(self, spliderName):
        sql = " select urlName  from urlmanage where (lastRequestTime is NULL or lastUpdataTime is NULL) and spiderName=\'%s\'" % spliderName
        cursor = self.m_conn.cursor()
        cursor.execute(sql)
        for i in range(cursor.rowcount):
            row = cursor.fetchone()
            yield row[0]
        # yield retLine


class spiderBase(scrapy.Spider):
    type=[]

    def __init__(self):
        self.urlManage = urlManage()
        self.listCallBack = {}

    def getUnrequestUrl(self):
        for url in self.urlManage.GetUnrequestUrl(self.name):
            yield url

    def AddNeedrequestUrl(self, url):
        self.urlManage.InsertUnRequestUrl(url, self.name)

    def FromRequestUrl(self,  url, isMustParse=False,    method = 'POST',             # GET or POST
                         formdata = None,       # 表单提交的数据
                       callback=None,
                       headers=None, body=None,
                       cookies=None, meta=None, encoding='utf-8', priority=0,
                       dont_filter=False, errback=None, flags=None):
        self.urlManage.InsertRequestUrl(url, self.name)
        if meta is None:
            meta = {}
        meta["spiderCallBackID"] = uuid.uuid1()
        meta["spiderName"] = self.name
        meta["isMustParse"] = isMustParse
        self.listCallBack[meta["spiderCallBackID"]] = callback
        return scrapy.FormRequest(url=url, formdata =formdata,  callback=self.parseData, method=method, headers=headers, body=body,
                              cookies=cookies, meta=meta, encoding=encoding, priority=priority, dont_filter=dont_filter,
                              errback=errback, flags=flags)

    def RequestUrl(self,  url, isMustParse=False, callback=None, method='GET', headers=None, body=None,
                   cookies=None, meta=None, encoding='utf-8', priority=0,
                   dont_filter=False, errback=None, flags=None):
        self.urlManage.InsertRequestUrl(url, self.name)
        if meta is None:
            meta = {}
        meta["spiderCallBackID"] = uuid.uuid1()
        meta["spiderName"] = self.name
        meta["isMustParse"] = isMustParse
        self.listCallBack[meta["spiderCallBackID"]] = callback

        return scrapy.Request(url=url, callback=self.parseData, method=method, headers=headers, body=body,
                              cookies=cookies, meta=meta, encoding=encoding, priority=priority, dont_filter=dont_filter,
                              errback=errback, flags=flags)

    def parseData(self, response):
        if self.urlManage.IsNeedUpdate(url = response.url, spliderName = self.name, content = response.body) or response.meta["isMustParse"] == True:
            self.urlManage.UpdateUrlTime(response.url, response.meta["spiderName"], response.body)
            callBack = self.listCallBack[response.meta["spiderCallBackID"]]
            del self.listCallBack[response.meta["spiderCallBackID"]]

            yield from callBack(response)


    def constructUrl(self, response, urlIn, headType="https://"):
        ret = response.urljoin(urlIn)
        ret = headType + ret[ret.find("//") + len("//"):]
        return  ret;