import  time,pymysql,pandas,re
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from datetime import datetime
from  urllib.parse import quote_plus
# from crawlab import save_item
# from utils.login import login_58
from utils.login import login_58

settings = { 'host':'172.16.10.201','user':'zy001','port':3306,'password':'zy@123','database':'crawl-bk','charset' : 'utf8'}
conn = pymysql.connect(**settings)
print('##################################################数据库连接成功##################################################')
cursor = conn.cursor()
print('##################################################获取游标成功##################################################')

sql = "insert into 58_rent_all(url,list_url,region,types,create_time) values(%s,%s,%s,%s,%s)"

option = ChromeOptions()


prefs = {
        'profile.default_content_setting_values': {
            'images': 2
        }
    }
# option.headless = True
option.add_experimental_option('prefs', prefs)
option.add_experimental_option('excludeSwitches', ['enable-automation'])
option.add_argument('--ignore-certificate-errors')
option.add_argument("--proxy-server=http://121.37.240.200:8888")
print('##################################################浏览器加载成功##################################################')

area_url = {
    '五华': 'https://km.58.com/wuhua/shangpucz/?from=zf&PGTID=0d306b35-0021-dbbb-6412-7b88523ada48&ClickID=111',
    '盘龙': 'https://km.58.com/panlong/shangpucz/?from=zf&PGTID=0d306b35-0021-df7b-a710-0e64e9dc6235&ClickID=4',
    '官渡': 'https://km.58.com/guandu/shangpucz/?from=zf&PGTID=0d306b35-0021-daca-6ec2-951c59ac10e2&ClickID=4',
    '西山': 'https://km.58.com/xishan/shangpucz/?from=zf&PGTID=0d306b35-0021-d981-e419-c2705a9d2d96&ClickID=3',
    '呈贡': 'https://km.58.com/chenggong/shangpucz/?from=zf&PGTID=0d306b35-0021-dfa5-d6b1-ee0678ce6876&ClickID=4',
    '大理市': 'https://dali.58.com/dalishi/shangpucz/?from=zf&PGTID=0d306b35-0021-dca8-3ce2-7110c7f77780&ClickID=4',
    '祥云县': 'https://dali.58.com/xiangyunxian/shangpucz/?from=zf&PGTID=0d306b35-0021-d0c2-10ab-906e385c1c18&ClickID=6',
    '弥渡': 'https://dali.58.com/miduxian/shangpucz/?from=zf&PGTID=0d306b35-0021-d9b8-106d-bcf084251cd7&ClickID=37',
    '宾川县': 'https://dali.58.com/binchuanxian/shangpucz/?from=zf&PGTID=0d306b35-0021-ddef-beac-53794ce3f169&ClickID=3',
    '巍山县': 'https://dali.58.com/weishanzizhi/shangpucz/?from=zf&PGTID=0d306b35-0021-d30c-9b33-363fa69995b6&ClickID=3',
    '鹤庆县': 'https://dali.58.com/heqingxian/shangpucz/?from=zf&PGTID=0d306b35-0021-d9f9-2541-4bcd4139424d&ClickID=3',
    '南涧县': 'https://dali.58.com/nanjianzizhi/shangpucz/?from=zf&PGTID=0d306b35-0021-db0a-5cdf-54c173a5d69f&ClickID=3',
    '剑川县': 'https://dali.58.com/jianchuanxian/shangpucz/?from=zf&PGTID=0d306b35-0021-d3c5-6872-21f003d27937&ClickID=3',
    '漾濞县': 'https://dali.58.com/yangxianzizhi/shangpucz/?from=zf&PGTID=0d306b35-0021-d08d-ff5e-50c3519b206a&ClickID=3',
    '永平县': 'https://dali.58.com/yongpingxian/shangpucz/?from=zf&PGTID=0d306b35-0021-d312-c286-25ee2defec3a&ClickID=5',
    '云龙县': 'https://dali.58.com/yunlongxian/shangpucz/?from=zf&PGTID=0d306b35-0021-d397-38a8-07698257df2a&ClickID=3',
    '洱源县': 'https://dali.58.com/eryuanxian/shangpucz/?from=zf&PGTID=0d306b35-0021-d2b4-623f-534f7f60b43c&ClickID=9'
}

# 重定向路由
redirect  = '''https://km.58.com/pinpaigongyu/42056184631314x.shtml?adtype=1&ClickID=2&slotid=1000856&productid=10006&tid=f265c81a-c26f-46cb-b8c0-fe859027fb20&extParam=%7B%22ppgy_stats%22%3A%7B%22pageSource%22%3A%22%22%2C%22resource%22%3A%2258%22%2C%22abVersion%22%3A%22%22%2C%22launchid%22%3A%22%22%7D%7D&bizresource=0&PGTID=0d3090a7-0021-f553-48cc-e58a3f6d84de'''


# 获取列表页链接
def get_list_url(area_url,types):
    print('##################################################获取列表页链接开始##################################################')
    # driver = webdriver.Chrome(executable_path='/Users/lidong/Desktop/zhouyu/项目/crawl-lidong/crawlab_env/bin/chromedriver', options=option)
    driver = webdriver.Chrome(executable_path='chromedriver', options=option)
    driver = login_58(driver)

    for k,v in area_url.items():
        driver.get(v)
        # break
        time.sleep(3)
        try:
            page_total =driver.find_element_by_xpath('//div[@class="pager"]/a[last()]')
            while(not page_total):
                driver.get(v)
                page_total = driver.find_element_by_xpath('//*[@class="pager"]/div/a[last()]')
            page_total = page_total.text
            if '下一页' in page_total:
                page_total =driver.find_element_by_xpath('//div[@class="pager"]/a[last()-1]').text
        except:
            page_total = 1
      

        for i in range(int(page_total)):
            try:
                lis = driver.find_elements_by_xpath('//*[@id="house-list-wrap"]/li/a')
                ls = []
                for i in lis:
                    url = i.get_attribute('href')
                    list_url = driver.current_url
                    region = k
                    ls.append((url,list_url,region,types,datetime.now()))
                cursor.executemany(sql,ls)
                conn.commit()
                driver.find_element_by_xpath('//div[@class="pager"]/a[last()]').click()
            except Exception as e:
                time.sleep(5)
                print(e)
                if list_url != driver.current_url:
                    driver.close()
                    driver = webdriver.Chrome(executable_path='chromedriver', options=option)
                    driver.get(list_url)
                continue
    driver.close()
    print('##################################################获取列表页链接结束##################################################')
    return '##################################################{region}页面连接抓取成功##################################################'.format(region=region)


# 数据去重并生成新表
def remove_duplicate(s_table,a_table):
    print('##################################################去重并生成新表##################################################')
    sql = "select * from  " + s_table
    data1 = pandas.read_sql(sql,conn)
    data2 = data1.drop_duplicates(subset=["url"],keep="first")
    pandas.io.sql.to_sql(data2, a_table, con='mysql+pymysql://zy001:'+quote_plus("zy@123")+'@172.16.10.201:3306/crawl?charset=utf8mb4', index=False, if_exists='append')
    info = '##################################################{s_table}表数据已经去重并生成新表:{a_table}##################################################'.format(s_table=s_table,a_table=a_table )
    print(info)
    return info
    

def save_datail_date(url,driver,table):
    try:
        houses = driver.find_element_by_xpath('//div[@class="house_basic_title_info_2"]/p[1]').text.split('：')[1]
        location = driver.find_element_by_xpath('//div[@class="house_basic_title_info_2"]/p[2]').text.split('：')[1]
        area = driver.find_element_by_xpath('//p[@class="house_basic_title_info"]/span[1]').text
        area =  re.findall(r'\d+', area)[0]
        unit_of_area = '㎡'
        manage_type = driver.find_element_by_xpath('//p[@class="house_basic_title_info"]/span[2]').text
        rental_expenses = driver.find_element_by_xpath('//p[@class="house_basic_title_money"]/span[1]').text
        unit_of_expenses = driver.find_element_by_xpath('//p[@class="house_basic_title_money"]/span[2]').text
        
        update_sql = '''UPDATE {table} set houses ="{houses}",location = "{location}",'''\
                    '''area = "{area}",unit_of_area = "{unit_of_area}",manage_type = "{manage_type}", rental_expenses="{rental_expenses}",'''\
                    '''unit_of_expenses="{unit_of_expenses}" where url="{url}"'''.format(
                    table =table,
                    houses=houses,
                    location=location,
                    area=area,
                    unit_of_area=unit_of_area,
                    manage_type = manage_type,
                    rental_expenses = rental_expenses,
                    unit_of_expenses = unit_of_expenses,
                    url = url
                    )
        cursor.execute(update_sql)
        conn.commit()
    except Exception as e:
        e = str(e)
        e =e.replace('"',"'")
        update_s = '''UPDATE 58_rent_all  set exception ="{e}" where url ="{url}"'''.format(e=e,url=url)
        print(update_s)
        cursor.execute(update_s)
        conn.commit()
        print(e)

def get_datail_date(table):
    print('##################################################开始获取并存储详细信息##################################################')
    # driver = webdriver.Chrome(executable_path='chromedriver', options=option)
    driver = webdriver.Chrome(executable_path='chromedriver', options=option)
    # sql  = 'select url from '+table+  '  where types="商铺" and id > 37078'
    sql  = 'select url from 58_rent_all  where types="商铺" and  houses is null ORDER by id desc'
    cursor.execute(sql)
    data = cursor.fetchall()
    for i in data:
        # break
        try:
            url = i[0]
            # break
            driver.get(url)
            time.sleep(1)
            save_datail_date(url, driver,table)
        except Exception as e:
            print(e)
            time.sleep(300)
    info= '##################################################{table}表详情数据已经完成获取并完成存储##################################################'.format(table=table )
    print(info)
    driver.close()
    return info



        



if __name__ == '__main__':
    # get_list_url(area_url, types='商铺')
    # remove_duplicate(s_table='58_rent_all',a_table = '58_rent_all_bak')
    get_datail_date(table='58_rent_all')
    # conn.close()
    # cursor.close()
