import  time,pymysql,pandas,re
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from datetime import datetime
from  urllib.parse import quote_plus
# from crawlab import save_item
settings = { 'host':'172.16.10.201','user':'zy001','port':3306,'password':'zy@123','database':'crawl-bk','charset' : 'utf8'}
conn = pymysql.connect(**settings)
print('##################################################数据库连接成功##################################################')
cursor = conn.cursor()
print('##################################################获取游标成功##################################################')

sql = "insert into 58_rent_all(url,list_url,region,types,create_time) values(%s,%s,%s,%s,%s)"

option = ChromeOptions()
prefs = {
        'profile.default_content_setting_values': {
            'images': 2
        }
    }
# option.headless = True
option.add_experimental_option('prefs', prefs)
option.add_experimental_option('excludeSwitches', ['enable-automation'])
option.add_argument('--ignore-certificate-errors')
option.add_argument("--proxy-server=http://121.37.254.208:8888")
print('##################################################浏览器加载成功##################################################')

area_url = {
    '五华': 'https://km.58.com/wuhua/zhaozu/pve_1092_0/?from=zf&PGTID=0d30000d-0021-d849-ca7b-f186a75cab6e&ClickID=103',
    '盘龙': 'https://km.58.com/panlong/zhaozu/pve_1092_0/?from=zf&PGTID=0d30000d-0021-d17b-388b-267cfe2427d6&ClickID=87',
    '官渡': 'https://km.58.com/guandu/zhaozu/pve_1092_0/?from=zf&PGTID=0d30000d-0021-dfa3-2bff-e85f90ab1475&ClickID=9',
    '西山': 'https://km.58.com/xishan/zhaozu/pve_1092_0/?from=zf&PGTID=0d30000d-0021-d83b-e746-c96d50b2f177&ClickID=5',
    '呈贡': 'https://km.58.com/chenggong/zhaozu/pve_1092_0/?from=zf&PGTID=0d30000d-0021-dcd2-c89f-082df9b8df0f&ClickID=3',
    '大理市': 'https://dali.58.com/zhaozu/?PGTID=0d30000d-0021-d023-8095-7d9c22036be2&ClickID=8',
    '祥云县': 'https://dali.58.com/xiangyunxian/zhaozu/?PGTID=0d30000d-0021-d8f8-1006-a57f4f4ff8c7&ClickID=38',
    '宾川县': 'https://dali.58.com/binchuanxian/zhaozu/?PGTID=0d30000d-0021-d84d-422f-86956d97a3bf&ClickID=2',
    '鹤庆县': 'https://dali.58.com/heqingxian/zhaozu/?PGTID=0d30000d-0021-d919-2582-1d8d08254d11&ClickID=2',
    '洱源县': 'https://dali.58.com/eryuanxian/zhaozu/?PGTID=0d30000d-0021-d3da-eb14-a525b39b571c&ClickID=3',
}

# 重定向路由
redirect  = '''https://km.58.com/pinpaigongyu/42056184631314x.shtml?adtype=1&ClickID=2&slotid=1000856&productid=10006&tid=f265c81a-c26f-46cb-b8c0-fe859027fb20&extParam=%7B%22ppgy_stats%22%3A%7B%22pageSource%22%3A%22%22%2C%22resource%22%3A%2258%22%2C%22abVersion%22%3A%22%22%2C%22launchid%22%3A%22%22%7D%7D&bizresource=0&PGTID=0d3090a7-0021-f553-48cc-e58a3f6d84de'''


# 获取列表页链接
def get_list_url(area_url,types):
    print('##################################################获取列表页链接开始##################################################')
    driver = webdriver.Chrome(executable_path='chromedriver', options=option)

    for k,v in area_url.items():
        driver.get(v)
        # break
        time.sleep(3)
        try:
            page_total =driver.find_element_by_xpath('//div[@class="pager"]/a[last()]')
            while(not page_total):
                driver.get(v)
                page_total = driver.find_element_by_xpath('//*[@class="pager"]/div/a[last()]')
            page_total = page_total.text
            if '下一页' in page_total:
                page_total =driver.find_element_by_xpath('//div[@class="pager"]/a[last()-1]').text
        except:
            page_total = 1

        for i in range(int(page_total)):
            # break
            try:
                lis = driver.find_elements_by_xpath('//*[@id="house-list-wrap"]/li/a')
                ls = []
                for i in lis:
                    # break
                    url = i.get_attribute('href')
                    list_url = driver.current_url
                    region = k
                    ls.append((url,list_url,region,types,datetime.now()))
                cursor.executemany(sql,ls)
                conn.commit()
                driver.find_element_by_xpath('//div[@class="pager"]/a[last()]').click()
            except Exception as e:
                print(e)
                if list_url != driver.current_url:
                    driver.close()
                    driver = webdriver.Chrome(executable_path='chromedriver', options=option)
                    driver.get(list_url)
                continue
    driver.close()
    print('##################################################获取列表页链接结束##################################################')
    return '##################################################{region}页面连接抓取成功##################################################'.format(region=region)


# 数据去重并生成新表
def remove_duplicate(s_table,a_table):
    print('##################################################去重并生成新表##################################################')
    sql = "select * from  " + s_table
    data1 = pandas.read_sql(sql,conn)
    data2 = data1.drop_duplicates(subset=["url"],keep="first")
    pandas.io.sql.to_sql(data2, a_table, con='mysql+pymysql://zy001:'+quote_plus("zy@123")+'@172.16.10.201:3306/crawl?charset=utf8mb4', index=False, if_exists='append')
    info = '##################################################{s_table}表数据已经去重并生成新表:{a_table}##################################################'.format(s_table=s_table,a_table=a_table )
    print(info)
    return info
    

def save_datail_date(url,driver,table):
    dic = {'楼盘': 'houses', '地址': 'location', '类型': 'office_build_level', '楼层': 'layer_info', '装修': 'fitment',
        '面积': 'area','月租':'rental_expenses'}
    d = {}
    data_dic ={}
    try:
        list_s = driver.find_elements_by_xpath('//ul[@class="general-item-wrap"]/li')
        for i in list_s:
            k = i.find_element_by_xpath('./span[1]').text
            v = i.find_element_by_xpath('./span[2]').text
            d[k] = v
        for k,v in d.items():
            for k1,v1 in dic.items():
                if k1 in k:
                    data_dic[v1] = v
        data_dic['area'] =  re.findall(r'\d+',data_dic.get('area'))[0]
        data_dic['unit_of_area']  = '㎡'
        s = data_dic.get('rental_expenses')
        if s != 'null':
            r = re.sub("\D", "", s)[-1]
            h = s.split(r)
            data_dic['rental_expenses'] = h[0]+r
            data_dic['unit_of_expenses'] = h[1]

        update_sql = '''UPDATE {table} set houses ="{houses}",location = "{location}",'''\
                    '''office_build_level = "{office_build_level}",layer_info = "{layer_info}",fitment = "{fitment}", area="{area}",'''\
                    '''unit_of_area = "{unit_of_area}",rental_expenses="{rental_expenses}",unit_of_expenses="{unit_of_expenses}" '''\
                    ''' where url ="{url}"'''.format(
                    table =table,
                    houses=data_dic.get('houses'),
                    location=data_dic.get('location'),
                    office_build_level=data_dic.get('office_build_level'),
                    layer_info=data_dic.get('layer_info'),
                    fitment = data_dic.get('fitment'),
                    area = data_dic.get('area'),
                    unit_of_area = data_dic.get('unit_of_area'),
                    rental_expenses =data_dic.get('rental_expenses'),
                    unit_of_expenses =data_dic.get('unit_of_expenses'),
                    url = url
                    )
        print('开始存储')
        print(update_sql)
        cursor.execute(update_sql)
        conn.commit()
        print('存储成功')
    except Exception as e:
        e = str(e)
        e = e.replace('"','"')
        update_s = '''UPDATE 58_rent_all  set exception ="{e}" where url ="{url}"'''.format(e=e,url=url)
        cursor.execute(update_s)
        conn.commit()
        print(e)



def get_datail_date(table):
    print('##################################################开始获取并存储详细信息##################################################')
    driver = webdriver.Chrome(executable_path='chromedriver', chrome_options=option)
    sql  = 'select url from '+table +' where types="写字楼" and area is null ORDER BY id DESC'
    cursor.execute(sql)
    data = cursor.fetchall()
    for i in data:
        try:
            url = i[0]
            driver.get(url)
            time.sleep(1)
            while(url != driver.current_url):
                try:
                    driver.find_element_by_xpath('//*[@id="btnSubmit"]').click()
                    print('点击了')
                except Exception as e:
                    print(e)
                    break
            save_datail_date(url, driver,table)
        except Exception as e:
            print(e)
            # 发生重定向，关闭浏览器，等待300面秒，重新加载
            if driver.current_url != url:
                driver.close()
                time.sleep(300)
                driver = webdriver.Chrome(executable_path='chromedriver', options=option)
                driver.get(url)
                time.sleep(3)
                save_datail_date(url, driver,table)
    info= '##################################################{table}表详情数据已经完成获取并完成存储##################################################'.format(table=table )
    print(info)
    driver.close()
    return info



        



if __name__ == '__main__':
    # get_list_url(area_url, types='写字楼')
    # remove_duplicate(s_table='58_rent_all',a_table = '58_rent_all_total_c')
    get_datail_date(table='58_rent_all')
    # conn.close()
    # cursor.close()
