import json
from requests.sessions import RequestsCookieJar
from selenium import webdriver
import requests
from time import sleep
import pymysql
from selenium.webdriver.ie.webdriver import DEFAULT_SERVICE_LOG_PATH
from lxml import etree
from datetime import datetime
import urllib3
from retrying import retry
import random
import re
urllib3.disable_warnings()
def retry_error(exception):
    return isinstance(exception,NameError)

header=[#{"User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"},
#{"User-Agent":"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"},
# {"User-Agent":"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;"},
{"User-Agent":"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)"},
# {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv"},
# {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; rv"},
# {"User-Agent":"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11"},
# {"User-Agent":"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11"},
# {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)"},
# {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)"}
]

dianpingNumber={'\ue155':0,'1':1,'\ue4dd':2,'\ueef6':3,'\ued6f':4,'\uedc0':5,'\ue6f4':6,'\ue4ae':7,'\uf3df':8,'\ue13b':9}
def getReplace(txt):
    for tx in txt:
        if tx in dianpingNumber:
            txt = re.sub(tx,str(dianpingNumber[tx]),txt)
            txt = re.sub('\n','',txt)
    return txt

class publicDetail:
    host='192.168.0.113'
    port=3306
    user='coname'
    passwd='xZS6p4LDcKCwn4Yb'
    charset='utf8'
    db='co'
    num=0
    tamp=None
    flag=True
    
    def __init__(self):
        options = webdriver.ChromeOptions()
        options.add_experimental_option("excludeSwitches",["enable-logging"])        
        self.bro = webdriver.Chrome(options=options)
        self.bro.maximize_window()
        self.start_url='https://www.dianping.com/'
        # self.bro.get(self.start_url)
        st_code = self.selenium_get()
        if st_code==200:
            retry_error(self.selenium_get())
            print("最初状态"+str(st_code))
        else:
            retry_error(self.selenium_get())
            print("最初状态"+str(st_code))
        sleep(1)
        #登录的界面
        login = self.bro.find_element_by_xpath('//*[@id="top-nav"]/div/div[2]/span[1]/a[1]')
        self.bro.execute_script('arguments[0].click();',login)
        sleep(1)
        self.bro.current_window_handle
        sleep(10)
        #获取cookie
        cookies = self.bro.get_cookies()
        with open('cookies.txt','w') as f:
            json.dump(cookies,f)
        self.req = requests.Session()
        # for cookie in cookies:
        #     self.req.cookies.set(cookie["name"],cookie['value'])
        # self.req.get(self.start_url)
        self.bro.close()

    @retry(retry_on_result=retry_error)
    def selenium_get(self):
        self.bro.get(self.start_url)
        return requests.get(self.start_url,verify=False).status_code

    def open_mysql(self):#连接数据库
        try:
            self.conn = pymysql.connect(host=self.host,port=self.port,user=self.user,passwd=self.passwd,db=self.db,charset=self.charset)
            self.cursor=self.conn.cursor()
            print("连接数据库成功!")
            return self.conn,self.cursor
        except Exception as e:
            print("连接数据库失败>>>",e)

    def close_mysql(self,conn,cursor):#关闭数据库
        try:
            cursor.close()
            sleep(1)
            conn.close()
            print("关闭数据库成功!")
        except Exception as e:
            print("关闭数据失败！>>>",e)

    def input_mysql(self,datas):#存入数据到数据库
        conn,cursor=self.open_mysql()
        try:
            if not cursor.execute('select srname from co_beautify where srname="{}"'.format(datas[2])):
                cursor.execute('insert into co_beautify (keywordid,sr_from,srname,fileurl,sr_comment,sr_adrl) values("{}","{}","{}","{}","{}","{}")'.format(datas[0],datas[1],datas[2],datas[3],datas[4],datas[5]))
                conn.commit()
                print("插入数据成功!")
        except Exception as e:
            print("插入数据失败!>>>",e)
        self.close_mysql(conn,cursor)
    
    def update_mysql_source(self,datas):#更改之前的数据
        conn,cursor=self.open_mysql()
        try:
            cursor.execute('update co_beautify set sr_address="{}",sr_phone="{}",sr_adrl="{}" where srname="{}"'.format(datas[0],datas[1],datas[2],datas[3]))
            conn.commit()
            print("更改源数据成功!")
        except Exception as e:
            print('更改数据失败',e)

        self.close_mysql(conn,cursor)
    
    def output_mysql(self):#取出数据
        conn,cursor = self.open_mysql()
        cursor.execute('select keywordid,srname,fileurl from co_beautify where sr_from = "dianping" and sr_address is null')
        datas=cursor.fetchall()
        self.close_mysql(conn,cursor)
        return datas

    @retry(retry_on_result=retry_error)#,stop_max_attampt_number=10 #
    def getHTML(self,url):#获取html
        headers = random.choice(header)
        headers['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'#'application/json, text/javascript'
        headers['Connection'] = 'keep-alive'
        try:
            with open('cookies.txt','r') as f:
                for cookie in json.load(f):
                    # jar.set(cookie["name"],cookie["value"])
                    self.req.cookies.set(cookie["name"],cookie["value"])
            html = self.req.get(url,verify=False,headers=headers).text
            s_code = self.req.get(url,verify=False,headers=headers).status_code
            return html,s_code
        except Exception as e:
            print("进入子网页失败>>>",e)
            self.getHTML(url)

    def spider(self):#爬取数据
        #http://www.dianping.com/chengdu/ch85/g183p1
        # jar=RequestsCookieJar()
        self.tamp=int("{0:%y%m%d%H%M%S}".format(datetime.now()))
        # html = self.req.get("https://www.dianping.com/chengdu/ch85/g183p"+str(i),cookies=jar,verify=False).text
        # htmls = self.req.get("https://www.dianping.com/chengdu/ch85/g183p"+str(i),cookies=jar,verify=False)
        datas = self.output_mysql()
        for data in datas:
            datas1=[]#在原表更改的数据
            datas2=[]#在子表插入的数据
            item_strs=''
            srname=data[1]#获取美容机构名
            pid = data[0] #获取keywordid
            url = data[2] #获取链接
            print(url)
            self.tamp=self.tamp+1
            html,s_code=self.getHTML(url)
            if not s_code == 200:
                retry_error(not s_code == 200)
                print(s_code)
            # else:
            #     retry_error(not s_code == 200)
            #     print(s_code)
            try:
                st =  etree.HTML(html)
                list_item = st.xpath('//*[@id="basic-info"]/div[1]/span[@class="item"]')
                for item in list_item: 
                    item_strs = item_strs+"|"+item.xpath('.//text()')[0]
                print(list_item)

                ad = st.xpath('//*[@id="basic-info"]/div[2]')
                addres = ad[0].xpath("string(.)")
                addres=re.sub("\n","",addres)
                address=re.sub("\s","",addres)
        
                ph = st.xpath('//*[@id="basic-info"]/p[1]')
                phon = ph[0].xpath("string(.)")
                phon = re.sub("\n","",phon)
                phone = re.sub(" ","",phon)

                datas1.extend([address,phone,item_strs,srname])
                print(datas1)
                self.update_mysql_source(datas1)
                
            except Exception as e:
                print("下标越界")
                pass

    def run(self):
        self.spider()
if __name__=='__main__':
    publicDetail().run()