import json
from pymysql.cursors import Cursor
from requests.sessions import RequestsCookieJar
from selenium import webdriver
import requests
from time import sleep
import pymysql
from selenium.webdriver.ie.webdriver import DEFAULT_SERVICE_LOG_PATH
from lxml import etree
from datetime import datetime
import urllib3
from retrying import retry
import random
import re
urllib3.disable_warnings()
def retry_error(exception):
    return isinstance(exception,NameError)

header=[{"User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"},
{"User-Agent":"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"},
{"User-Agent":"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;"},
{"User-Agent":"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)"},
{"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)"},
{"User-Agent":" Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv"},
{"User-Agent":"Mozilla/5.0 (Windows NT 6.1; rv"},
{"User-Agent":"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11"},
{"User-Agent":"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11"},
{"User-Agent":" Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)"},
{"User-Agent":" Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)"},]

dianpingNumber={'\ue155':0,'1':1,'\ue4dd':2,'\ueef6':3,'\ued6f':4,'\uedc0':5,'\ue6f4':6,'\ue4ae':7,'\uf3df':8,'\ue13b':9}
def getReplace(txt):
    for tx in txt:
        if tx in dianpingNumber:
            txt = re.sub(tx,str(dianpingNumber[tx]),txt)
            txt = re.sub('\n','',txt)
            txt = re.sub(" ","",txt)
    return txt

class publicTitle:
    host='192.168.0.113'
    port=3306
    user='coname'
    passwd='xZS6p4LDcKCwn4Yb'
    charset='utf8'
    db='co'
    num=0
    page=1
    flag=True
    def __init__(self):
        options = webdriver.ChromeOptions()
        options.add_experimental_option("excludeSwitches",["enable-logging"])        
        self.bro = webdriver.Chrome(options=options)
        self.bro.maximize_window()
        self.start_url='https://www.dianping.com/'
        # self.bro.get(self.start_url)
        st_code = self.selenium_get()
        if st_code==200:
            retry_error(self.selenium_get())
            print("最初状态"+str(st_code))
        else:
            retry_error(self.selenium_get())
            print("最初状态"+str(st_code))
        sleep(1)
        #登录的界面
        login = self.bro.find_element_by_xpath('//*[@id="top-nav"]/div/div[2]/span[1]/a[1]')
        self.bro.execute_script('arguments[0].click();',login)
        sleep(1)
        self.bro.current_window_handle
        sleep(10)
        #获取cookie
        cookies = self.bro.get_cookies()
        with open('cookies.txt','w') as f:
            json.dump(cookies,f)
        self.req = requests.Session()
        # for cookie in cookies:
        #     self.req.cookies.set(cookie["name"],cookie['value'])
        # self.req.get(self.start_url)
        self.bro.close()

    @retry(retry_on_result=retry_error)
    def selenium_get(self):
        self.bro.get(self.start_url)
        return requests.get(self.start_url,verify=False).status_code

    def open_mysql(self):#连接数据库
        try:
            self.conn = pymysql.connect(host=self.host,port=self.port,user=self.user,passwd=self.passwd,db=self.db,charset=self.charset)
            self.cursor=self.conn.cursor()
            print("连接数据库成功!")
            return self.conn,self.cursor
        except Exception as e:
            print("连接数据库失败>>>",e)

    def close_mysql(self,conn,cursor):#关闭数据库
        try:
            cursor.close()
            sleep(1)
            conn.close()
            print("关闭数据库成功!")
        except Exception as e:
            print("关闭数据失败！>>>",e)

    def input_mysql(self,datas):#存入数据到数据库
        conn,cursor=self.open_mysql()
        try:
            if not cursor.execute('select srname from co_beautify where srname="{}"'.format(datas[2])):
                cursor.execute('insert into co_beautify (keywordid,sr_from,srname,fileurl,sr_comment,sr_adrl) values("{}","{}","{}","{}","{}","{}")'.format(datas[0],datas[1],datas[2],datas[3],datas[4],datas[5]))
                conn.commit()
                print("插入数据成功!")
        except Exception as e:
            print("插入数据失败!>>>",e)
        self.close_mysql(conn,cursor)
    
    def update_mysql(self,datas):
        conn,cursor = self.open_mysql() 
        cursor.execute('update co_beautify set sr_address="{}",sr_phone="{}",sr_adrl="{}" where srname = "{}" '.format("","",datas[0],datas[1]))
        conn.commit()
        self.close_mysql(conn,cursor)

    def output_mysql(self):#取出数据
        conn,cursor = self.open_mysql()
        cursor.execute('select srname,fileurl from co_beautify')
        datas=cursor.fetchall()
        self.close_mysql(conn,cursor)
        return datas

    @retry(retry_on_result=retry_error)#,stop_max_attampt_number=10
    def getHTML(self,i):#获取html
        html = self.req.get("https://www.dianping.com/chengdu/ch85/g183p"+str(i),verify=False,headers=random.choice(header)).text
        s_code = self.req.get("https://www.dianping.com/chengdu/ch85/g183p"+str(i),verify=False,headers=random.choice(header)).status_code
        return html,s_code

    def spider(self,i):#爬取数据
        #http://www.dianping.com/chengdu/ch85/g183p1
        if self.flag:
            # jar=RequestsCookieJar()
            with open('cookies.txt','r') as f:
                for cookie in json.load(f):
                    # jar.set(cookie["name"],cookie["value"])
                    self.req.cookies.set(cookie["name"],cookie["value"])
            tamp = '{0:%y%m%d%H%M%S}'.format(datetime.now())
            # html = self.req.get("https://www.dianping.com/chengdu/ch85/g183p"+str(i),cookies=jar,verify=False).text
            # htmls = self.req.get("https://www.dianping.com/chengdu/ch85/g183p"+str(i),cookies=jar,verify=False)
            html,s_code=self.getHTML(i)
            if not s_code == 200:
                retry_error(self.getHTML(i))
                print(s_code)
            else:
                retry_error(self.getHTML(i))
                print(s_code)
            print(html)
            st =  etree.HTML(html)
            li_list = st.xpath('//*[@id="shop-all-list"]/ul/li')
            for lis in li_list:
                datas=[]
                self.num=self.num+1
                keywordid=str(int(tamp)+self.num)
                fromer = 'dianping'
                #//*[@id="shop-all-list"]/ul/li[1]/div[2]/div[1]/a[1]
                srname=lis.xpath('.//div[@class="txt"]/div[@class="tit"]/a[1]/@title')[0]
                fileurl=lis.xpath('.//div[@class="txt"]/div[@class="tit"]/a[1]/@href')[0]
                #//*[@id="shop-all-list"]/ul/li[1]/div[2]/div[2]
                try:
                    comment = lis.xpath('.//div[@class="txt"]/div[@class="comment"]')#[0].xpath('string(.)')
                    comment = comment[0].xpath('string(.)').strip()

                    comment=getReplace(comment)
                except Exception as e:
                    print("没有>>>",e)

                try:
                    commentList=lis.xpath('.//div[@class="txt"]/span[@class="comment-list"]')#[0].xpath('string(.)')
                    commentList=commentList[0].xpath('string(.)').strip()
                    commentList=getReplace(commentList)
                except Exception as e:
                    print("没有>>>",e)
                

                datas.extend([commentList,srname])
                print("--------------------")
                self.input_mysql(datas)
            #翻页
            self.page=self.page+1
            if self.page>41:
                self.flag=False
            self.spider(self.page)
        else:
            return
        
    def run(self):
        self.spider(self.page)
if __name__=='__main__':
    publicTitle().run()