from os import name
import re
from requests.models import Response
import urllib3
import requests
from selenium import webdriver
from time import sleep
import pymysql
from lxml import etree
# from fuzzywuzzy import fuzz
from datetime import datetime
import random
import pandas as pd

class training:
    urllib3.disable_warnings()

    userr='verus'    #账号
    passwdd='a15938381591' #密码
    start_url='https://www.ubaike.cn/' #开始链接
    login_url = 'https://www.ubaike.cn/index.html' #登录页链接
    login_xpath='/html/body/div[1]/div/div/nav[2]/span/a[1]' #登录页xpath
    login_xpath2='//*[@id="login-show1"]/h6' #登录页xpath2  如果有
    login_user_xpath='//*[@id="xm-login-user-name"]'  #登录名xpath
    login_passwd_xpath='//*[@id="xm-login-user-password"]' #登录密码xpath
    login_button_xpath='//*[@id="login_submit"]' #登录按钮xpath
    #获取cookie后登录的页面
    cookie_url='https://www.ubaike.cn/'
    #数据库
    host='192.168.0.113'
    port=3306
    user='coname'
    passwd='xZS6p4LDcKCwn4Yb'
    charset='utf8'
    db='co'
    #寻找查询输入框
    find_xpath='//*[@id="searchkey"]'#输入框界面
    find_button='//*[@id="indexSearchForm"]/div/span/input'#输入框按钮
    #机构名称目录
    trains_dir='/html/body/div[4]/div/div[2]/div/div[2]/ul[2]/li'
    

    def __init__(self):
        #初始化浏览器
        options = webdriver.ChromeOptions()
        options.add_experimental_option("excludeSwitches",["enable-logging"])
        self.bro= webdriver.Chrome(options=options)
        self.bro.maximize_window()
        self.resus=[]
        self.names=[]
        self.urls=[]
        self.isgo=True
        self.headers=[{"User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"},
            {"User-Agent":"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"},
            {"User-Agent":"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)"},
            {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv"},
            {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; rv"},
            {"User-Agent":"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11"},
            {"User-Agent":"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11"},
            {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)"},
            {"User-Agent":"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)"},]
        sleep(1)

    def train_login(self):#点击登录获取cookie
        #进入还有登录页面
        self.bro.get(self.login_url)
        sleep(1)
        self.bro.current_window_handle
        #进入登录入口
        login=self.bro.find_element_by_xpath(self.login_xpath)
        self.bro.execute_script("arguments[0].click();",login)
        sleep(1)
        self.bro.current_window_handle
        #进入登录入口2  如果有的话
        login2= self.bro.find_element_by_xpath(self.login_xpath2)
        self.bro.execute_script('arguments[0].click();',login2)
        sleep(1)
        self.bro.current_window_handle
        #登录账号
        self.bro.find_element_by_xpath(self.login_user_xpath).send_keys(self.userr)
        sleep(1)
        #登录密码
        self.bro.find_element_by_xpath(self.login_passwd_xpath).send_keys(self.passwdd)
        sleep(1)
        #点击登录
        button = self.bro.find_element_by_xpath(self.login_button_xpath)
        self.bro.execute_script("arguments[0].click();",button)
        sleep(1)
        self.bro.current_window_handle
        cookies=self.bro.get_cookies()
        return cookies

    def train_get_cookie(self):#进入到详情页
        cookies=self.train_login()
        req=requests.Session()
        for cookie in cookies:
            req.cookies.set(cookie['name'],cookie['value'])
        response = req.get(self.cookie_url,verify=False)
        return response

    def atach(self):#进入四川培训机构页面
        response = self.train_get_cookie()
        #找到机构
        self.bro.find_element_by_xpath(self.find_xpath).send_keys("四川人力资源")
        find_button=self.bro.find_element_by_xpath(self.find_button)
        self.bro.execute_script('arguments[0].click();',find_button) 
        sleep(1)
        self.bro.current_window_handle
        #点击更多
        train_more = self.bro.find_element_by_xpath('//*[@id="getmorebtn"]/a')
        self.bro.execute_script('arguments[0].click();',train_more)
        sleep(1)
        self.bro.current_window_handle
        #      
        url_s='https://www.ubaike.cn/topic/search/%E5%9B%9B%E5%B7%9D%E4%BA%BA%E5%8A%9B%E8%B5%84%E6%BA%90'
        
        i=1
        self.title(url_s,url_s+"1.html",i)
        sleep(1)
        self.save_mysql()
        #获取第一页标题页
        

    def title(self,url_s,url,i):#进入到详情页
        #获取 cookie
        #/html/body/div[4]/div/div[1]/div[1]/div[2]/div[1]/div[1]
        #/html/body/div[4]/div/div[1]/div[1]/div[2]/div[1]/div[1]
        if self.isgo:
            cookies = self.bro.get_cookies()
            req = requests.Session()
            for cookie in cookies:
                req.cookies.set(cookie['name'],cookie['value'])
            reso = req.get(url,headers=random.choice(self.headers),verify=False)  
            page=reso.text
            resp=etree.HTML(page)
            list_titles = resp.xpath("/html/body/div[4]/div/div[2]/div/ul/li")
            for titles in list_titles:
                title = ''.join(titles.xpath(".//div/a//text()"))
                url_t= titles.xpath(".//div/a//@href")[0]
                print(title)
                print(url_t)
                sleep(1)
                self.names.append(title)
                self.urls.append(url_t)
            # print(i)
            # this=resp.xpath("/html/body/div[4]/div/div[1]/div[1]/div[2]/div[1]/div[1]")
            # tx = this[0].xpath("string(.)")
            # tx=''.join(tx)
            # self.contents.append(tx)
            
            i=i+1
            if i>426:
                self.isgo=False
            url_y=url_s+str(i)+'.html'
            self.title(url_s,url_y,i)
        else:
            return
        
        

    def save_mysql(self):#保存数据到数据库
        # conn,cursor=self.open_datas()
        sleep(1)
        detetmp='{0:%y%m%d%H%M%S}'.format(datetime.now())
        # print(self.names)
        #插入数据
        # try:
        #     for i in range(len(self.names)):
        #         if not cursor.execute('select srname from co_human_source where srname="{}"'.format(self.names[i])):
        #             cursor.execute('insert into co_human_source (keywordid,srname,fileurl) values("{}","{}","{}")'.format(detetmp+str(i),self.names[i],self.urls[i]))
        #             conn.commit()
        #             print('录入到数据库成功!第'+str(i)+"条")
        # except Exception as e:
        #     print("录入到数据失败>>>",e)
        #到excel
        db = pd.read_excel(r'D:\wky\spider\humansource\humansource\human.xlsx',sheet_name='Sheet1')
        for i in range(1,len(self.names),1):
            db.loc[i+1,"srname"] = self.names[i-1]
            db.loc[i+1,"fileurl"]=self.urls[i-1]
            db.loc[i+1,"keywordid"]=detetmp+str(i)
            sleep(2)
        db.to_excel(excel_writer=r'D:\wky\spider\humansource\humansource\human1.xlsx',sheet_name="Sheet1",index=False,header=True,encoding='utf-8')
        #更改数据
        # try:
        #     for i in range(len(self.names)):
        #         cursor.execute('update co_training set content="{}" where srname="{}"'.format(self.contents[i],self.names[i]))
        #         conn.commit()
        #     print("更改数据成功!")
        # except Exception as e:
        #     print("更改数据失败>>>",e)
        # self.close_datas(conn,cursor)

    def open_datas(self):#打开数据库
        try:
            self.conn=pymysql.Connection(host=self.host,port=self.port,user=self.user,passwd=self.passwd,db=self.db,charset=self.charset)
            self.cursor=self.conn.cursor()
            print("打开数据库成功!")
            return self.conn,self.cursor
        except Exception as e:
            print("打开数据库失败>>>",e)

            
    def close_datas(self,conn,cursor):#关闭数据库
        try:
            cursor.close()
            conn.close()
            print("关闭数据成功!")
        except Exception as e:
            print("关闭数据库失败>>>",e)

    def train_run(self):
        self.atach()


if __name__ == "__main__":
    training=training()
    training.train_run()
        