#coding:utf-8
import re
import mysql.connector
from spider.conf import conf
from lxml import etree
from Opener import Opener

class baseSpider:
    opener = Opener()
    def __init__(self,table="spider"):
        self.conn = mysql.connector.connect(user=conf.USERNAME, password=conf.PASSWORD, database=conf.DATABASE, use_unicode=True)
        self.cursor = self.conn.cursor()
        self.table = table
        self.createTable()
    def createTable(self):
        self.cursor.execute("""create table if not exists """+self.table+""" (
        `id` int(11) NOT NULL AUTO_INCREMENT PRIMARY KEY,
        `url` varchar(1024) NOT NULL,
        `site` varchar(512) NOT NULL,
        `pubtime` varchar(20) NOT NULL DEFAULT '',
        `fetchtime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
        `title` varchar(1024) NOT NULL,
        `content` blob NOT NULL,
        `searchword` varchar(128) NOT NULL,
        `md5` varchar(32) NOT NULL
        )""")
        # `keywords` varchar(1024) NOT NULL DEFAULT '',
        # PRIMARY KEY (`md5`)
        self.conn.commit()
    def __del__(self):
        self.cursor.close()
        self.conn.close()
    def run(self,url):
        print("visit-----"+url)
        code = self.opener.openurl(url)
        self.debug(code)
        if not code:
            return None
        html = etree.HTML(code)
        data = self.extract(html)
        for res in data:
            self.insert(res)
    def insert(self,res):
        if isinstance(res,dict):
            params = []
            params.append(res.get("url",""))
            params.append(res.get("site",self.getSiteByUrl(res.get("url",""))))
            params.append(res.get("pubtime",""))
            params.append(res.get("title",""))
            params.append(res.get("content",""))
            params.append(res.get("searchword",""))
            params.append(res.get("md5",""))
        elif(isinstance(res,list)):
            params = res
        else:
            return False
        if (not params):# or self.existUrl(params[0]):
            return False
        try:
            self.cursor.execute('insert into '+self.table+' (url,site,pubtime,title,content,searchword,md5) values(%s,%s,%s,%s,%s,%s,%s)',params)
            self.conn.commit()
        except mysql.connector.errors.IntegrityError:
            return False
        return True
    def debug(self,code):
        # with open("test.html","w") as f:
        #     f.write(code)
        #     f.close()
        pass
    def getInnerText(self,lable):
        text = etree.tostring(lable,encoding="GBK").decode("gbk")
        return re.subn('<[^>]+>','',text)[0]
    def getSiteByUrl(self,url):
        site= re.findall('(?<=http://)[^/]+',url)
        if site:
            return site[0]
        return ""
    def getQueryByUrl(self,url):
        query = re.findall('(?<=http://)([^/]+)(.+)',url)
        if query and query[0] and query[0][1]:
            query = query[0][1]
            if query[0]!='/':
                query = "/"
            return query
        return "/"
    def start(self,url):
        # 重载此函数构造请求
        self.run(url)
    def extract(self,html):
        # url,site,pubtime,title,content,searchword,md5
        # 重新载此函数提取网站信息
        pass
    def existUrl(self,url):
        # 重新载此函数去重
        # 返回True 则url不重复
        return True