# -*- coding: utf-8 -*-
import requests
from lxml import etree
import time,re
import pymysql
from requests.adapters import HTTPAdapter
from threading import Thread

# try:
#     db = pymysql.connect(host="127.0.0.1", user="root", database='pp', password="123456", port=3306, charset="utf8",
#                          cursorclass=pymysql.cursors.DictCursor)
#     cursor = db.cursor()
#     print('连接成功')
# except:
#     print('连接失败')

def getpage():
    id_list = 0
    for i in range(1,14000):

        # time.sleep(1)
        base_url = ('https://job.dajie.com/qz1-p%d/'%i)
        headers = {
            "user - agent": "Mozilla / 5.0(Windows NT 10.0;WOW64) AppleWebKit / 537.36(KHTML, likeGecko) Chrome / 74.0.3702.0Safari / 537.36"
        }
        proxy = {
            'http': 'http: // 111.72.155.77:9999 /',
            # 'https': 'https: // 111.72.155.77:9999 /',
        }
        response = requests.get(url=base_url,headers=headers,proxies=proxy)
        text = response.text
        html = etree.HTML(text)
        requests.adapters.DEFAULT_RETRIES = 10
        # print(text)

        url_li = html.xpath('//div[@class="jobList"]/ul/li')

        for item in url_li:
            name = item.xpath('.//p/a[@class="jobName"]/text()')
            print(name)
            if name==[]:
                continue
            name_list = name[0].strip()
            print(name_list)


            money = item.xpath('./div/p/span[@class="money"]/text()')
            if money==[]:
                continue
            money_list = money[0]
            print(money_list)

            address = item.xpath('./div/p/span[@class="ads"]/text()')
            if address==[]:
                continue
            address_list = address[0]
            print(address_list)

            work = item.xpath('./div/p/span[@class="suffer"]/text()')
            if work==[]:
                continue
            work_list = work[0]
            print(work_list)


            Educational = item.xpath('./div/p/span[@class="edu"]/text()')
            if Educational==[]:
                continue
            Educational_list = Educational[0]
            print(Educational_list)
            company= item.xpath('./div/p/a[@class="companyName"]/text()')
            if company==[]:
                continue
            company_list = company[0]
            print(company_list)

            type= item.xpath('./div/p/span[@class="type"]/text()')
            if type==[]:
                continue
            type_list = type[0]
            print(type_list)
            personnum = item.xpath('./div/p/span[@class="personNum"]/text()')
            if personnum==[]:
                continue
            personnum_list = personnum[0]
            print(personnum_list)
            print('----------')
            id_list += 1
            try:
                db = pymysql.connect(host="127.0.0.1",user="root",database='pp',password="123456",port=3306,charset="utf8",cursorclass=pymysql.cursors.DictCursor)
                cursor = db.cursor()
                print('连接成功')
            except:
                print('连接失败')

            sql = 'insert into dajie_copy1 values(%d,"%s","%s","%s","%s","%s","%s","%s","%s") '% (id_list,name_list,money_list,address_list,work_list,Educational_list,company_list,type_list,personnum_list)
            cursor.execute(sql)
            db.commit()

def Fangtianxia():
    id_list = 0
    base_url = 'https://tj.zu.fang.com/cities.aspx'
    headers = {
        "User - Agent": "Mozilla / 5.0(Windows NT 10.0;WOW64) AppleWebKit / 537.36(KHTML, likeGecko) Chrome / 74.0.3702.0Safari / 537.36",
        "Connection": "close"

    }
    proxy = {
        'http': '111.75.193.24:48449 /',
    }
    response = requests.get(url=base_url, headers=headers, proxies=proxy)
    response.encoding = 'utf-8'
    text = response.text
    html = etree.HTML(text)
    requests.adapters.DEFAULT_RETRIES = 10
    # print(text)
    url_address = html.xpath('//div[@class="onCont"]/ul/li/a/@href')
    # print(url_address)
    # 全国各地地址
    url_list = []
    for i in url_address:
        url = 'https:' + i
        url_list.append(url)
        # print(url_list)
    # 各个城市分页页面url
    # time.sleep(2)
    for url in url_list:
        # print(url)
        response2 = requests.get(url=url)
        text = response2.text
        html = etree.HTML(text)
        html2 = html.xpath('//div[@class="fanye"]/span/text()')
        if html2 == []:
            continue
        else:
            html2 = html.xpath('//div[@class="fanye"]/span/text()')[0]

        # html5 = html.xpath('//div[@class="guide rel"]/a/text()')[1]
        # print(html2)
        print('----------------------------')
        html3 = re.findall(r'\d+', html2)[0]
        print(html3)
        url_paging = []
        for i in range(1, int(html3) + 1):
            url_lists = url + 'house/i3%s/' % i
            url_paging.append(url_lists)
            # print(url_lists)
        for urls in url_paging:
            # if urls=='https://kaifeng.zu.fang.com/':
            #     continue
            # print(urls)
            response3 = requests.get(url=urls)
            text = response3.text
            # print(text)
            html4 = etree.HTML(text)
            # print(html4)
            url_complete = html4.xpath('//dl[@class="list hiddenMap rel"]/dd')
            print(url_complete)
            for item in url_complete:
                addressrent = item.xpath('//div[@class="guide rel"]/a/text()')
                if addressrent == []:
                    continue
                addressrent_list = addressrent[1]
                print(addressrent_list)

                name = item.xpath('./p[@class="title"]/a/text()')
                if name == []:
                    continue
                name_list = name[0]
                print(name_list)

                rent = item.xpath('./p[@class="font15 mt12 bold"]/text()')
                if rent == []:
                    continue
                rent_list = rent[0].strip()
                print(rent_list)

                shape = item.xpath('./p[@class="font15 mt12 bold"]/text()')
                if shape == []:
                    continue
                shape_list = shape[1]
                print(shape_list)

                acreage = item.xpath('./p[@class="font15 mt12 bold"]/text()')
                if acreage == []:
                    continue
                acreage_list = acreage[2]
                acreages = re.findall(r'\d+', acreage_list)[0]
                acreage_lists = acreages + 'm*m'
                print(acreage_lists)

                address = item.xpath('./p[@class="gray6 mt12"]/a/span/text()')
                if address == []:
                    continue
                if len(address) < 3:
                    continue
                else:
                    address_list0 = address[0]
                    address_list1 = address[1]
                    address_list2 = address[-1]
                    address_list = address_list0 + '-' + address_list1 + '-' + address_list2
                    print(address_list)

                money = item.xpath('./div/p[@class="mt5 alingC"]/span/text()')
                if money == []:
                    continue
                money_list = money[0]
                money1 = item.xpath('./div/p[@class="mt5 alingC"]/text()')
                if money1 == []:
                    continue
                money_list1 = money1[0]
                money_list2 = money_list + money_list1
                print(money_list2)

                print('++++++++++++++++')
                id_list += 1
                try:
                    db = pymysql.connect(host="127.0.0.1", user="root", database='pp', password="123456",
                                         port=3306,
                                         charset="utf8", cursorclass=pymysql.cursors.DictCursor)
                    cursor = db.cursor()
                    print('连接成功')
                except:
                    print('连接失败')

                sql = 'insert into fangtianxia_copy1_copy1 values(%d,"%s","%s","%s","%s","%s","%s","%s") ' % (
                id_list, addressrent_list, name_list, rent_list, shape_list, acreage_lists, address_list,
                money_list2)
                cursor.execute(sql)
                db.commit()

        # time.sleep(1)
if __name__ == '__main__':
    t1 = Thread(target = getpage)
    t1.start()
    t2 = Thread(target=Fangtianxia)
    t2.start()

