import requests
import pymysql
import re
from multiprocessing.dummy import Pool as ThreadPool


class XLY(object):
    def __init__(self):
        self.host = '127.0.0.1'
        self.db = 'app_mark'
        self.user = 'root'
        self.passwd = '123456'
        self.charset = 'utf8mb4'
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
        }

    def get_links(self):
        links = []
        con = pymysql.connect(host=self.host, db=self.db, user=self.user, passwd=self.passwd, charset=self.charset)
        cur = con.cursor()
        sql = 'select link from gly where tag = "0" and sitename="智联招聘"'
        # update_tag = 'update gly set tag = "1" where tag = "0" and sitename="智联招聘"'
        try:
            cur.execute(sql)
            results = cur.fetchall()
            # cur.execute(update_tag)
        except Exception as e:
            con.rollback()
            results = None
        else:
            con.commit()
        cur.close()
        con.close()
        for result in results:
            links.append(result[0])
        return links

    def parse_data(self, link):
        data = {}
        response = requests.get(link, headers=self.headers)
        text = response.text
        try:
            # 标题
            title = re.findall('<h1 class="l info-h3">(.*?)</h1>', text, re.S)[0]
            # 薪资
            money = re.findall('<li class="info-money">.*?<strong>(.*?)</strong>.*?</li>', text, re.S)[0]
            # 公司
            company = re.findall('<a rel="nofollow" href="http://company.zhaopin.com/.*?>(.*?)</a>', text, re.S)[0]
            # 地点经验要求
            info = re.findall('<div class="info-three l">(.*?)</div>', text, re.S)[0]
            info = re.sub('<.*?>', '', info)
            info = info.strip()
            # 职位亮点
            welfare = re.findall("var JobWelfareTab = '(.*?)';", text, re.S)[0]
            # 职位信息
            job_info = re.findall('<div class="pos-ul">(.*?)</div>', text, re.S)[0]
            job_info = re.sub('<[\s\S]*?>', '', job_info)
            # 工资简介
            company_info = re.findall('<div class="jianjie">[\s\S]*?</div>', text)[0]
            company_info = re.sub('<[\s\S]*?>|\n', '', company_info).strip()
            data['title'] = title
            data['money'] = money
            data['company'] = company
            data['info'] = info
            data['welfare'] = welfare
            data['job_info'] = job_info
            # data['company_info'] = company_info
            print(data)
        except Exception as e:
            data = None
        if data:
            # self.save_data(data)
            pass

    def save_data(self, data):
        con = pymysql.connect(host=self.host, db=self.db, user=self.db, passwd=self.passwd, charset=self.charset)
        cur = con.cursor()
        list_data = [data['title'], data['money'], data['company'], data['info'], data['welfare'], data['job_info'], data['company_info']]
        sql = 'insert into xly(title, money, company, info, welfare, job_info, company_info) values (%s, %s, %s, %s, %s, %s, %s)'
        try:
            cur.execute(sql, list_data)
            print('insert success')
        except Exception as e:
            con.rollback()
            print('insert error ', e)
        else:
            con.commit
        cur.close()
        con.close()


if __name__ == '__main__':
    xly = XLY()
    links = xly.get_links()
    pool = ThreadPool(10)
    if links:
        pool.map(xly.parse_data, links)
        pool.close()
        pool.join()
    # for link in links:
    #     link = link[0]
    #     data = xly.parse_data(link)