import  time,pymysql
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from datetime import datetime
from crawlab import save_item

conn = pymysql.connect(
    host="172.16.10.201",
    user="zy001",
    port=3306,
    password="zy@123",
    database="crawl",
    charset = 'utf8'
)
cursor = conn.cursor()
sql = "insert into bieke_sale(url_list,url,title,area,community,other,followers,pub_time,total_price,unit_price,create_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"

option = ChromeOptions()
prefs = {
        'profile.default_content_setting_values': {
            'images': 2
        }
    }
option.headless = True
option.add_experimental_option('prefs', prefs)
option.add_experimental_option('excludeSwitches', ['enable-automation'])
option.add_argument('--ignore-certificate-errors')

print('浏览器加载成功')
area_url = {
    '五华':'https://km.ke.com/ershoufang/wuhua/',
    '盘龙': 'https://km.ke.com/ershoufang/panlong/',
    '官渡':'https://km.ke.com/ershoufang/guandu/',
    '西山':'https://km.ke.com/ershoufang/xishan23/',
    '呈贡':'https://km.ke.com/ershoufang/chenggong/',
    '晋宁':'https://km.ke.com/ershoufang/jinning/',
    '嵩明':'https://km.ke.com/ershoufang/songming/',
    '东川':'https://km.ke.com/ershoufang/dongchuan/',
    '富民':'https://km.ke.com/ershoufang/fumin/',
    '宜良':'https://km.ke.com/ershoufang/yiliang/',
    '石林':'https://km.ke.com/ershoufang/shilin/',
    '寻甸':'https://km.ke.com/ershoufang/xundian/',
    '禄劝':'https://km.ke.com/ershoufang/luquan1/',
    '安宁':'https://km.ke.com/ershoufang/anning/'
}

if __name__ == '__main__':
    driver = webdriver.Chrome(executable_path='chromedriver', options=option)
    # 实现路由拼接
    for k,v in area_url.items():
        # 分别选择房屋型号（一室二室）
        print('选择房屋')
        for i in range(1,7):
            try:
                h =i
                attr ='l'+str(i)+'/'
                url =v+attr
                driver.get(url)
                time.sleep(3)
                # 获取页数
                page = driver.find_element_by_xpath('//*[@id="beike"]/div[1]/div[4]/div[1]/div[5]/div[2]/div/a[last()]').text
                if '下一页' in page:
                    page = driver.find_element_by_xpath('//*[@id="beike"]/div[1]/div[4]/div[1]/div[5]/div[2]/div/a[last()-1]').text
                page = int(page)
                # 遍历页数
                for i in range(1,page+1):
                    url = v+'pg'+str(i)+attr
                    driver.get(url)
                    data = driver.find_elements_by_xpath('//li[@class="clear"]/div')
                    ls = []
                    for i in data:
                        title = i.find_element_by_xpath('./div[@class="title"]/a').text
                        url = i.find_element_by_xpath('./div[@class="title"]/a').get_attribute('href')
                        community = i.find_element_by_xpath('./div[@class="address"]/div[@class="flood"]/div/a').text
                        other = i.find_element_by_xpath('./div[@class="address"]/div[@class="houseInfo"]').text
                        followers_view = i.find_element_by_xpath('./div[@class="address"]/div[@class="followInfo"]')
                        if followers_view:
                            followers = followers_view.text.split('/')[0]
                            pub_time = followers_view.text.split('/')[1]
                        total_price = i.find_element_by_xpath('./div[@class="address"]/div[@class="priceInfo"]/div[1]/span').text
                        unit_price = i.find_element_by_xpath('./div[@class="address"]/div[@class="priceInfo"]/div[2]/span').text
                        ls.append((v,url,title,k,community,other, followers,pub_time,total_price,unit_price,datetime.now()))
                        result = {'url_list':v,'url':url,'title': title,'area':k,'community':community,'other':other,
                        'followers':followers,'pub_time':pub_time,'total_price':total_price,
                        'unit_price':unit_price,'create_time':datetime.now()}
                        save_item(result)
                        # print(result)
                    print('存储' + str(k) + '区域面积区间第' + str(h) + '面积类型' + '第' + str(i) + '页' + '成功')
                    cursor.executemany(sql,ls)
                    conn.commit()
            except Exception as a:
                if driver.current_url != url:
                    driver.close()
                    # time.sleep(500)
                    print('被重定向了')
                    driver = webdriver.Chrome(executable_path='chromedriver', options=option)
                else:
                    print('数据元素缺失，在' + str(k) + '区域面积区间第' + str(h) + '面积类型' + '第' + str(i) + '页' + '报错')
                    conn.rollback()
            



conn.close()
cursor.close()