#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
@author: caijj
@version: V1.0
@file: zg_sz_new_house_info.py
@time: 2022/02/11
"""
import sys

sys.path.append('E:\python_project')
import csv
import multiprocessing
import os
import random
import re
import uuid
from time import sleep
import pandas as pd
import requests
# from fake_useragent import UserAgent
from lxml import etree
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from sqlalchemy import create_engine
from sqlalchemy.exc import ResourceClosedError
from common.globalvar import GlobalVar
from config.conf import cm
from tools import timeutil, image_orc
from tools.database_tool import DataBaseTool
from tools.decorator import update_flag
from tools.DingTalk_Base import DingTalk_Disaster
from pyDes import *
from jsonpath import jsonpath
from tools.timeutil import timestamp
import base64

ding = DingTalk_Disaster()
xf_table_root_directory = 'mega_{city}_xf_timely_deal_info'
xf_file_root_directory = 'mega_{city}_xf_timely_deal_info.csv'


class NewHouseTimelyDealInfo():

    @staticmethod
    def firefox_enter_target_page(url='https://www.creprice.cn/urban/su.html'):  # firefox进入目标页面
        firefox_options = Options()
        firefox_options.headless = True
        web_driver = webdriver.Firefox(options=firefox_options)
        web_driver.maximize_window()
        web_driver.get(url)
        return web_driver

    @staticmethod
    def chrome_enter_target_page(url):  # chrome进入目标页面
        chrome_options = webdriver.ChromeOptions()
        chrome_options.add_argument('--headless')
        chrome_options.add_argument('--disable-gpu')
        web_driver = webdriver.Chrome(options=chrome_options)
        web_driver.maximize_window()
        web_driver.get(url)
        return web_driver

    @staticmethod
    def quit(web_driver):  # 退出
        if web_driver is not None:
            web_driver.quit()
            sleep(5)

    @staticmethod
    def get_table_content(tree, table_id, index=0):
        # 定位获取表格信息
        tb = tree.xpath(table_id)
        # 将byte类型解码为str类型
        tb = etree.tostring(tb[index], encoding='utf8').decode()
        return tb

    """
    上海-实时成交数据
    """

    def sh_get_timely_deal_info(self, create_time, file_name, table_name,
                                url='http://www.fangdi.com.cn/new_house/new_house_district_bargain.html',
                                count=0):
        web_driver = None
        try:
            web_driver = self.firefox_enter_target_page(url)
            sleep(15)
            html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
            tree = etree.HTML(html_source)
            # title_list = tree.xpath("//div[@class='today_trade_content']/h6/span")
            data_parent_ele_list = tree.xpath("//div[@class='today_trade_content']/div[@id='districtBargain']/ul")
            timely_deal_info = []
            for data_parent_ele in data_parent_ele_list:
                deal_info_id = uuid.uuid4()
                index = data_parent_ele_list.index(data_parent_ele)
                city_region = \
                    tree.xpath(
                        "//div[@class='today_trade_content']/div[@id='districtBargain']/ul[" + str(index + 1) + "]//a")[
                        0].text
                residence_deal_count = tree.xpath(
                    "//div[@class='today_trade_content']/div[@id='districtBargain']/ul[" + str(
                        index + 1) + "]/li[3]//i")[0].text
                residence_deal_area_count = tree.xpath(
                    "//div[@class='today_trade_content']/div[@id='districtBargain']/ul[" + str(
                        index + 1) + "]/li[3]//i")[1].text
                total_deal_count = tree.xpath(
                    "//div[@class='today_trade_content']/div[@id='districtBargain']/ul[" + str(
                        index + 1) + "]/li[4]//i")[0].text
                total_deal_area_count = tree.xpath(
                    "//div[@class='today_trade_content']/div[@id='districtBargain']/ul[" + str(
                        index + 1) + "]/li[4]//i")[1].text
                timely_deal_info.append((deal_info_id, create_time, city_region, total_deal_count,
                                         float(total_deal_area_count),
                                         residence_deal_count, float(residence_deal_area_count), create_time))
            df = pd.DataFrame(data=timely_deal_info,
                              columns=['id', 'stat_date', 'city_region', 'total_deal_count', 'total_deal_area_count',
                                       'residence_deal_count', 'residence_deal_area_count',
                                       'create_time'])
            self.check_file_is_exists(file_name)
            # 保存为csv格式
            df.to_csv(file_name, mode="a",
                      # columns=['model_id', '序号', '楼盘名称', '单价(元/㎡)', 'create_time', 'area', 'trend'],
                      encoding="utf-8",
                      header=False,
                      index=False)
            self.insert_timely_deal_info(file_name, table_name)
        except Exception as e:
            count += 1
            print(e)
            self.quit(web_driver)
            if count < 3:
                return self.sh_get_timely_deal_info(create_time, file_name, table_name, url, count)
            else:
                ding.send_msg('上海实时交易数据更新失败')

    """
    常州-实时成交数据
    """

    def changzhou_get_timely_deal_info(self, create_time, file_name=None, table_name=None,
                                       url='http://gs.czfdc.com.cn/newxgs/index.aspx', index=0,
                                       residence_deal_count_list=[],
                                       residence_deal_area_count_list=[]
                                       ):
        web_driver = None
        web_driver = self.chrome_enter_target_page(url)
        sleep(8)
        html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
        tree = etree.HTML(html_source)
        tr_list = tree.xpath("//div[@class='a_date_left_02']//table[@class='a_date_left_01_mid_02']//tr")
        if len(tr_list) == 0:  # 暂无数据
            return
        current_window = web_driver.current_window_handle  # 获取当前窗口handle name
        city_region_list = tree.xpath("//select[@id='area']/option/text()")[1:]
        if len(residence_deal_count_list) == 0:
            residence_deal_count_list = [0 for city_region in city_region_list]
            residence_deal_area_count_list = [0 for city_region in city_region_list]
        try:
            for tr in tr_list[index:]:
                tr_index = tr_list.index(tr)
                residence_deal = web_driver.find_element('xpath', "//div[@class='a_date_left_02']//table["
                                                                  "@class='a_date_left_01_mid_02']//tr[" + str(tr_index
                                                                                                               + 1) +
                                                         "]/td[1]").text
                if residence_deal.strip() != '':
                    residence_deal_count = web_driver.find_element('xpath', "//div[@class='a_date_left_02']//table["
                                                                            "@class='a_date_left_01_mid_02']//tr[" +
                                                                   str(tr_index + 1) + "]/td[4]").text
                    residence_deal_area_count = web_driver.find_element('xpath', "//div[@class='a_date_left_02"
                                                                                 "']//table["
                                                                                 "@class='a_date_left_01_mid_02']//tr[" + str(
                        tr_index + 1) + "]/td[5]").text
                    tab_xpath = "//div[@class='a_date_left_02']//table[@class='a_date_left_01_mid_02']//tr[" + str(
                        tr_index + 1) + "]/td[1]/span"
                    web_driver.find_element('xpath', tab_xpath).click()
                    sleep(10)
                    all_window = web_driver.window_handles
                    for window in all_window:
                        if window != current_window:
                            web_driver.switch_to.window(window)
                            sleep(5)
                            city_region = \
                                web_driver.find_element('xpath', "//td[contains(text(),'所属行政区：')]").text.split('：')[1]
                            web_driver.close()
                            web_driver.switch_to.window(current_window)
                    region_index = city_region_list.index(city_region)
                    residence_deal_count_list[region_index] = residence_deal_count_list[region_index] + int(
                        residence_deal_count)
                    residence_deal_area_count_list[region_index] = residence_deal_area_count_list[region_index] + float(
                        residence_deal_area_count)
                else:
                    break
                index += 1
            df = pd.DataFrame(columns=['id', 'stat_date', 'city_region', 'total_deal_count', 'total_deal_area_count',
                                       'residence_deal_count', 'residence_deal_area_count',
                                       'create_time'])
            df['city_region'] = city_region_list
            df['total_deal_count'] = residence_deal_count_list
            df['total_deal_area_count'] = residence_deal_area_count_list
            df['residence_deal_count'] = residence_deal_count_list
            df['residence_deal_area_count'] = residence_deal_area_count_list
            df['create_time'] = create_time
            df['id'] = df.apply(lambda _: uuid.uuid4(), axis=1)
            df['stat_date'] = create_time
            self.check_file_is_exists(file_name)
            # 保存为csv格式
            df.to_csv(file_name, mode="a",
                      encoding="utf-8",
                      header=False,
                      index=False)
            self.insert_timely_deal_info(file_name, table_name)
            self.quit(web_driver)
        except Exception as e:
            print(e)
            ding.send_msg('常州实时交易数据更新失败')
            self.quit(web_driver)
            return self.changzhou_get_timely_deal_info(create_time, file_name, table_name, url, index,
                                                       residence_deal_count_list,
                                                       residence_deal_area_count_list)

    """
    杭州-实时成交数据
    """

    def hz_get_timely_deal_info(self, create_time, file_name, table_name,
                                url='http://fgj.hangzhou.gov.cn/col/col1229440802/index.html'):
        web_driver = None
        try:
            web_driver = self.chrome_enter_target_page(url)
            sleep(5)
            iframe = web_driver.find_element_by_xpath("//iframe[@id='mainContent']")
            web_driver.switch_to.frame(iframe)
            html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
            tree = etree.HTML(html_source)
            xf_data_parent_ele_list = tree.xpath("//span[contains(text(),'今日商品房累计成交信息')]/parent::div["
                                                 "@class='title']/following-sibling::div//div[@id='con1']/div")
            # es_data_parent_ele_list = tree.xpath("//span[contains(text(),'今日二手房累计成交信息')]/parent::div["
            #                                      "@class='title']/following-sibling::div//div[@id='con3']/div")
            timely_deal_info = []
            for xf_data_parent_ele in xf_data_parent_ele_list[:-1]:
                deal_info_id = uuid.uuid4()
                index = xf_data_parent_ele_list.index(xf_data_parent_ele)
                city_region = \
                    tree.xpath(
                        "//span[contains(text(),'今日商品房累计成交信息')]/parent::div["
                        "@class='title']/following-sibling::div//div[ "
                        "@id='con1']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[1]")[0].text
                xf_total_deal_count = tree.xpath(
                    "//span[contains(text(),'今日商品房累计成交信息')]/parent::div[@class='title']/following-sibling::div//div["
                    "@id='con1']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[2]")[0].text[:-1]
                xf_total_deal_area_count = tree.xpath(
                    "//span[contains(text(),'今日商品房累计成交信息')]/parent::div[@class='title']/following-sibling::div//div["
                    "@id='con1']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[3]")[0].text.split('m')[0]
                xf_residence_deal_count = tree.xpath(
                    "//span[contains(text(),'今日商品房累计成交信息')]/parent::div[@class='title']/following-sibling::div//div["
                    "@id='con1']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[4]")[0].text[:-1]
                xf_residence_deal_area_count = tree.xpath(
                    "//span[contains(text(),'今日商品房累计成交信息')]/parent::div[@class='title']/following-sibling::div//div["
                    "@id='con1']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[5]")[0].text.split('m')[0]

                es_total_deal_count = tree.xpath(
                    "//span[contains(text(),'今日二手房累计成交信息')]/parent::div[@class='title']/following-sibling::div//div["
                    "@id='con3']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[2]")[0].text[:-1]
                es_total_deal_area_count = tree.xpath(
                    "//span[contains(text(),'今日二手房累计成交信息')]/parent::div[@class='title']/following-sibling::div//div["
                    "@id='con3']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[3]")[0].text.split('m')[0]
                es_residence_deal_count = tree.xpath(
                    "//span[contains(text(),'今日二手房累计成交信息')]/parent::div[@class='title']/following-sibling::div//div["
                    "@id='con3']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[4]")[0].text[:-1]
                es_residence_deal_area_count = tree.xpath(
                    "//span[contains(text(),'今日二手房累计成交信息')]/parent::div[@class='title']/following-sibling::div//div["
                    "@id='con3']/div[@class='list-item hehe'][" + str(index + 1) + "]/div[5]")[0].text.split('m')[0]
                total_deal_count = int(xf_total_deal_count) + int(es_total_deal_count)
                total_deal_area_count = float(xf_total_deal_area_count) + float(es_total_deal_area_count)
                residence_deal_count = int(xf_residence_deal_count) + int(es_residence_deal_count)
                residence_deal_area_count = float(xf_residence_deal_area_count) + float(es_residence_deal_area_count)

                timely_deal_info.append((deal_info_id, create_time, city_region, total_deal_count,
                                         float(total_deal_area_count),
                                         residence_deal_count, float(residence_deal_area_count), create_time))
            df = pd.DataFrame(data=timely_deal_info,
                              columns=['id', 'stat_date', 'city_region', 'total_deal_count', 'total_deal_area_count',
                                       'residence_deal_count', 'residence_deal_area_count',
                                       'create_time'])
            df.replace('合计', '杭州', inplace=True)
            self.check_file_is_exists(file_name)
            # 保存为csv格式
            df.to_csv(file_name, mode="a",
                      # columns=['model_id', '序号', '楼盘名称', '单价(元/㎡)', 'create_time', 'area', 'trend'],
                      encoding="utf-8",
                      header=False,
                      index=False)
            self.insert_timely_deal_info(file_name, table_name)
        except Exception as e:
            print(e)
            self.quit(web_driver)
            print('杭州实时交易更新失败')
            ding.send_msg('杭州实时交易数据更新失败')

    """
    南京-实时成交数据
    """

    def nj_get_timely_deal_info(self, create_time, file_name, table_name,
                                url='https://www.njhouse.com.cn/data/index'):
        web_driver = None
        try:
            web_driver = self.chrome_enter_target_page(url)
            sleep(2)
            html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
            tree = etree.HTML(html_source)
            xf_tb = self.get_table_content(tree,
                                           "//div[@class='datas_main']/div[contains(@class,'datas_block')][1]//h2["
                                           "contains(text(),'住宅类')]/parent::div["
                                           "@class='b_title']/following-sibling::table")
            es_tb = self.get_table_content(tree,
                                           "//div[@class='datas_main']/div[contains(@class,'datas_block')][2]//h2["
                                           "contains(text(),'住宅类')]/parent::div["
                                           "@class='b_title']/following-sibling::table")
            xf_df = pd.read_html(xf_tb, encoding='utf-8', header=1)[0].fillna(0)
            # es_df = pd.read_html(es_tb, encoding='utf-8', header=1)[0].fillna(0)
            deal_info_df = pd.DataFrame(
                columns=['id', 'stat_date', 'city_region', 'total_deal_count', 'total_deal_area_count',
                         'residence_deal_count', 'residence_deal_area_count',
                         'create_time'])
            xf_total_deal_count = xf_df['成交套数.1'].values.tolist()
            # es_total_deal_count = es_df['成交套数.1'].values.tolist()
            xf_total_deal_area_count = xf_df['成交面积(m2).1'].values.tolist()
            # es_total_deal_area_count = es_df['成交面积(m2).1'].values.tolist()
            # total_deal_count_list = [int(x) + int(y) for x, y in zip(xf_total_deal_count, es_total_deal_count)]
            # total_deal_area_count = [float(x) + float(y) for x, y in
            #                          zip(xf_total_deal_area_count, es_total_deal_area_count)]
            total_deal_count_list = xf_total_deal_count
            total_deal_area_count = xf_total_deal_area_count
            deal_info_df['city_region'] = xf_df['区属'].values.tolist()
            deal_info_df['total_deal_count'] = total_deal_count_list
            deal_info_df['total_deal_area_count'] = total_deal_area_count
            deal_info_df['residence_deal_count'] = total_deal_count_list
            deal_info_df['residence_deal_area_count'] = total_deal_area_count
            deal_info_df['create_time'] = create_time
            deal_info_df['id'] = deal_info_df.apply(lambda _: uuid.uuid4(), axis=1)
            deal_info_df['stat_date'] = create_time
            deal_info_df.replace('全市', '南京', inplace=True)
            self.check_file_is_exists(file_name)
            # # 保存为csv格式
            deal_info_df.to_csv(file_name, mode="a",
                                encoding="utf-8",
                                header=False,
                                index=False)
            self.insert_timely_deal_info(file_name, table_name)
        except Exception as e:
            print(e)
            self.quit(web_driver)
            ding.send_msg('南京实时交易数据更新失败')

    """
    青岛-实时成交数据
    """

    def qd_get_timely_deal_info(self, create_time, file_name, table_name,
                                url='https://www.qdfd.com.cn/qdweb/realweb/indexnew.jsp', count=0):
        web_driver = None
        try:
            web_driver = self.chrome_enter_target_page(url)
            sleep(5)
            html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
            tree = etree.HTML(html_source)
            xf_tb = self.get_table_content(tree,
                                           "//div[contains(text(),'一手房今日成交')]/following-sibling::div[@class='con2lx "
                                           "mg2 "
                                           "xi12']/table")
            es_tb = self.get_table_content(tree,
                                           "//div[contains(text(),'二手房今日成交')]/following-sibling::div[@class='con2lx "
                                           "mg2 "
                                           "xi12']/table")
            xf_df = pd.read_html(xf_tb, encoding='utf-8', header=0)[0].fillna(0)
            es_df = pd.read_html(es_tb, encoding='utf-8', header=0)[0].fillna(0)
            deal_info_df = pd.DataFrame(
                columns=['id', 'stat_date', 'city_region', 'total_deal_count', 'total_deal_area_count',
                         'residence_deal_count', 'residence_deal_area_count',
                         'create_time'])
            xf_total_deal_count = xf_df['总成交套数'].values.tolist()
            es_total_deal_count = es_df['总成交套数'].values.tolist()
            xf_total_deal_area_count = xf_df['总成交面积（㎡）'].values.tolist()
            es_total_deal_area_count = es_df['总成交面积（㎡）'].values.tolist()

            xf_residence_deal_count = xf_df['住宅套数'].values.tolist()
            es_residence_deal_count = es_df['住宅套数'].values.tolist()
            xf_residence_deal_area_count = xf_df['住宅面积（㎡）'].values.tolist()
            es_residence_deal_area_count = es_df['住宅面积（㎡）'].values.tolist()

            total_deal_count_list = [int(x) + int(y) for x, y in zip(xf_total_deal_count, es_total_deal_count)]
            total_deal_area_count = [float(x) + float(y) for x, y in
                                     zip(xf_total_deal_area_count, es_total_deal_area_count)]

            residence_deal_count_list = [int(x) + int(y) for x, y in
                                         zip(xf_residence_deal_count, es_residence_deal_count)]
            residence_deal_area_count_list = [float(x) + float(y) for x, y in
                                              zip(xf_residence_deal_area_count, es_residence_deal_area_count)]
            deal_info_df['city_region'] = xf_df['区县'].values.tolist()
            deal_info_df['total_deal_count'] = total_deal_count_list
            deal_info_df['total_deal_area_count'] = total_deal_area_count
            deal_info_df['residence_deal_count'] = residence_deal_count_list
            deal_info_df['residence_deal_area_count'] = residence_deal_area_count_list
            deal_info_df['create_time'] = create_time
            deal_info_df['id'] = deal_info_df.apply(lambda _: uuid.uuid4(), axis=1)
            deal_info_df['stat_date'] = create_time
            deal_info_df.replace('全市', '青岛', inplace=True)
            self.check_file_is_exists(file_name)
            # # 保存为csv格式
            deal_info_df.to_csv(file_name, mode="a",
                                encoding="utf-8",
                                header=False,
                                index=False)
            self.insert_timely_deal_info(file_name, table_name)
        except Exception as e:
            count += 1
            print(e)
            self.quit(web_driver)
            if count < 3:
                sleep(1)
                return self.qd_get_timely_deal_info(create_time, file_name, table_name, url, count)
            else:
                ding.send_msg('青岛实时交易数据更新失败')

    """
    湖州-实时成交数据
    """

    # def huzhou_get_timely_deal_info(self, create_time, file_name, table_name,
    #                                 url='http://hufdc.jsj.huzhou.gov.cn/'):
    #     web_driver = self.chrome_enter_target_page(url)
    #     sleep(2)
    #     html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
    #     tree = etree.HTML(html_source)
    #     try:
    #         xf_tb = self.get_table_content(tree,
    #                                        "//div[contains(text(),'新房交易信息')]/following-sibling::div["
    #                                        "@class='box15']//table",
    #                                        1)
    #         es_tb = self.get_table_content(tree,
    #                                        "//div[contains(text(),'二手房信息')]/following-sibling::div["
    #                                        "@class='box15']//table",
    #                                        1)
    #         if '暂无最新数据' in xf_tb and '暂无最新数据' in es_tb:
    #             pass
    #         elif '暂无最新数据' in xf_tb and '暂无最新数据' not in es_tb:
    #             es_df = pd.read_html(es_tb, encoding='utf-8', header=0)[0].fillna(0)
    #         elif '暂无最新数据' in es_tb and '暂无最新数据' not in xf_tb:
    #             xf_df = pd.read_html(xf_tb, encoding='utf-8', header=0)[0].fillna(0)
    #         else:
    #             xf_df = pd.read_html(xf_tb, encoding='utf-8', header=0)[0].fillna(0)
    #             es_df = pd.read_html(es_tb, encoding='utf-8', header=0)[0].fillna(0)
    #         deal_info_df = pd.DataFrame(
    #             columns=['id', 'stat_date', 'city_region', 'total_deal_count', 'total_deal_area_count',
    #                      'residence_deal_count', 'residence_deal_area_count',
    #                      'create_time'])
    #         xf_city_region = xf_df['区 域'].values.tolist()[:-1]
    #         xf_total_deal_count = xf_df['已售套数'].values.tolist()[:-1]
    #         xf_total_deal_count = [count[:-1] for count in xf_total_deal_count]
    #         xf_total_deal_area_count = xf_df['已售面积'].values.tolist()[:-1]
    #         xf_total_deal_area_count = [area.split('㎡')[0] for area in xf_total_deal_area_count]
    #         xf_deal_count_dict = dict(zip(xf_city_region, xf_total_deal_count))
    #         xf_deal_area_dict = dict(zip(xf_city_region, xf_total_deal_area_count))
    #         es_city_region = es_df['城区'].values.tolist()[:-1]
    #         es_total_deal_count = es_df['套数'].values.tolist()[:-1]
    #         es_total_deal_count = [count[:-1] for count in es_total_deal_count]
    #         es_total_deal_area_count = es_df['面积'].values.tolist()[:-1]
    #         es_total_deal_area_count = [area.split('㎡')[0] for area in es_total_deal_area_count]
    #         es_deal_count_dict = dict(zip(es_city_region, es_total_deal_count))
    #         es_deal_area_dict = dict(zip(es_city_region, es_total_deal_area_count))
    #
    #         new_es_city_region = [es_city for es_city in es_city_region if es_city not in xf_city_region]
    #         city_region = xf_city_region + new_es_city_region
    #         residence_deal_count_list, residence_deal_area_count_list = [], []
    #         for region in city_region:
    #             if region in xf_city_region and region in es_city_region:
    #                 xf_deal_count = xf_deal_count_dict[region]
    #                 xf_deal_area = xf_deal_area_dict[region].replace(',', '')
    #                 es_deal_count = es_deal_count_dict[region]
    #                 es_deal_area = es_deal_area_dict[region].replace(',', '')
    #             elif region not in xf_city_region:
    #                 xf_deal_count, xf_deal_area = 0, 0
    #                 es_deal_count = es_deal_count_dict[region]
    #                 es_deal_area = es_deal_area_dict[region].replace(',', '')
    #             else:
    #                 xf_deal_count = xf_deal_count_dict[region]
    #                 xf_deal_area = xf_deal_area_dict[region].replace(',', '')
    #                 es_deal_count, es_deal_area = 0, 0
    #             residence_deal_count_list.append(int(xf_deal_count) + int(es_deal_count))
    #             residence_deal_area_count_list.append(float(xf_deal_area) + float(es_deal_area))
    #
    #         deal_info_df['city_region'] = city_region
    #         deal_info_df['total_deal_count'] = residence_deal_count_list
    #         deal_info_df['total_deal_area_count'] = residence_deal_area_count_list
    #         deal_info_df['residence_deal_count'] = residence_deal_count_list
    #         deal_info_df['residence_deal_area_count'] = residence_deal_area_count_list
    #         deal_info_df['create_time'] = create_time
    #         deal_info_df['id'] = deal_info_df.apply(lambda _: uuid.uuid4(), axis=1)
    #         deal_info_df['stat_date'] = create_time
    #         deal_info_df.replace('全市', '青岛', inplace=True)
    #         self.check_file_is_exists(file_name)
    #         # 保存为csv格式
    #         deal_info_df.to_csv(file_name, mode="a",
    #                             encoding="utf-8",
    #                             header=False,
    #                             index=False)
    #         self.insert_timely_deal_info(file_name, table_name)
    #     except Exception as e:
    #         print(e)
    #         self.quit(web_driver)

    """
    宁波-实时成交数据
    """

    def nb_get_timely_deal_info(self, create_time, file_name, table_name,
                                url='https://newhouse.cnnbfdc.com/?_tsduk=2'):
        web_driver = None
        try:
            web_driver = self.chrome_enter_target_page(url)
            html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
            tree = etree.HTML(html_source)
            ele = tree.xpath("//a[contains(text(),'如果没有跳转，请手动点击这里')]")
            if len(ele) == 1:
                web_driver.find_element_by_xpath("//a[contains(text(),'如果没有跳转，请手动点击这里')]").click()
            html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
            tree = etree.HTML(html_source)
            img_url = tree.xpath("//div[@id='cnnbfdc-ranking-list']//img/@src")[2]
            self.quit(web_driver)
            img_path = cm.tmp_file_path('nb.png')
            image_orc.convert_image(img_url, img_path)
            local_img_url = self.upload_img_request(img_path)
            # word_list = image_orc.aliyun_get_data(img_url, img_path)  # 阿里云接口识别图片
            web_driver = self.chrome_enter_target_page('https://duguang.aliyun.com/experience')  # 进入阿里云进行图片识别
            web_driver.find_element_by_xpath("//input[@placeholder='请输入图片URL查看效果']").send_keys(local_img_url)
            sleep(1)
            # web_driver.find_element_by_xpath("//div[contains(text(),'检 测') and @class='ocr-button invert ']").click()
            web_driver.find_element_by_xpath("//div[contains(text(),'检 测') and @class='ocr-button invert ']").click()
            sleep(3)
            result = web_driver.find_element_by_xpath("//div[@class='result-container']/div").text
            word_list = result.split(' ')
            if len(word_list) != 80:
                ding.send_msg('宁波实时交易图片数据识别失败')
                self.quit(web_driver)
                return
            df_data_list, new_data_list = [], []
            count = len(word_list) // 5
            for i in range(1, count + 1):
                data = word_list[(i - 1) * 5:i * 5]
                new_data_list.append(data)
            for data in new_data_list:
                deal_count_list = re.findall(r"\d+\.?\d*", str(data[1]))
                if len(deal_count_list) == 0:
                    total_deal_count = 0
                else:
                    total_deal_count = int(deal_count_list[0])
                deal_area_list = re.findall(r"\d+\.?\d*", str(data[2]))
                if len(deal_area_list) == 0:
                    total_deal_area_count = 0
                else:
                    total_deal_area_count = float(deal_area_list[0])
                residence_deal_count = total_deal_count
                residence_deal_area_count = total_deal_area_count
                data_list = [uuid.uuid4(), create_time, data[0], total_deal_count, total_deal_area_count,
                             residence_deal_count, residence_deal_area_count, create_time]
                df_data_list.append(data_list)
            deal_info_df = pd.DataFrame(df_data_list,
                                        columns=['id', 'stat_date', 'city_region', 'total_deal_count',
                                                 'total_deal_area_count',
                                                 'residence_deal_count', 'residence_deal_area_count',
                                                 'create_time'])
            deal_info_df.fillna(0)
            self.check_file_is_exists(file_name)
            # 保存为csv格式
            deal_info_df.to_csv(file_name, mode="a",
                                encoding="utf-8",
                                header=False,
                                index=False)
            self.insert_timely_deal_info(file_name, table_name)
            self.quit(web_driver)
        except Exception as e:
            print(e)
            self.quit(web_driver)
            print('宁波实时交易更新失败')
            # ding.send_msg('宁波实时交易数据更新失败')

    """
    扬州-实时成交数据
    """

    # def yz_get_timely_deal_info(self, create_time, file_name, table_name,
    #                             xf_url='https://www.yzfdc.cn/Default.aspx',
    #                             es_url='https://sec.yzfdc.cn/Index.aspx'):
    #     web_driver = None
    #     try:
    #         web_driver = self.chrome_enter_target_page(xf_url)
    #         html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
    #         tree = etree.HTML(html_source)
    #         xf_tb = self.get_table_content(tree, "//div[@id='dtDetailContent']/table", 0)
    #         xf_df = pd.read_html(xf_tb, encoding='utf-8', header=1)[0].fillna(0)
    #         xf_city_region = xf_df['区域'].values.tolist()[:-1]
    #         xf_total_deal_count = xf_df['住宅成交套数'].values.tolist()[:-1]
    #         xf_total_deal_area_count = xf_df['住宅成交面积(m2)'].values.tolist()[:-1]
    #         new_data_list = []
    #         deal_info_df = pd.DataFrame(new_data_list,
    #                                     columns=['id', 'stat_date', 'city_region', 'total_deal_count',
    #                                              'total_deal_area_count',
    #                                              'residence_deal_count', 'residence_deal_area_count',
    #                                              'create_time'])
    #         # 保存为csv格式
    #         deal_info_df.to_csv(file_name, mode="a",
    #                             encoding="utf-8",
    #                             header=False,
    #                             index=False)
    #         self.insert_timely_deal_info(file_name, table_name)
    #         self.quit(web_driver)
    #     except Exception as e:
    #         print(e)
    #         self.quit(web_driver)

    """
    镇江-实时成交数据
    """

    # def zj_get_timely_deal_info(self, create_time, file_name, table_name,
    #                             xf_url='http://221.6.146.72:9080/estate2/olestate/comore.action?cid.cityId=',
    #                             es_url='http://221.6.146.71:9080/shestate2/publish/jrqyList.action'):
    #     web_driver = self.chrome_enter_target_page(xf_url)
    #     sleep(2)
    #     html_source = web_driver.page_source  # 该属性可以获取当前浏览器的当前页的源码（html）
    #     tree = etree.HTML(html_source)

    @staticmethod
    def get_estate_info_from_txt(csv_name):  # 获取贝壳新房信息
        df = pd.read_csv(csv_name, sep=',', header=None)
        return df.values.tolist()

    @staticmethod
    @update_flag()
    def insert_timely_deal_info(csv_name, table_name):
        engine = create_engine('mssql+pymssql://base:base@192.168.106.21:1433/JRZF_BASE?charset=utf8')
        # 读取本地CSV文件
        if not os.path.exists(csv_name):
            raise FileNotFoundError("配置文件%s不存在！" % csv_name)
        else:
            sql_result = DataBaseTool().get_column_name_sql(table_name)
            column_name_list = [column_item[0] for column_item in sql_result]
            df = pd.read_csv(csv_name, sep=',', header=None)
            df.columns = column_name_list
            df.drop_duplicates(subset=['city_region'], keep='first', inplace=True)
            try:
                df.to_sql(table_name, engine, index=False, if_exists='fail')
            except ValueError as e:
                df.to_sql(table_name, engine, index=False, if_exists='append')

    @staticmethod
    def del_data(table_name):
        db = DataBaseTool()
        sql = "delete from {}".format(table_name)
        db.update_sql(sql)

    @staticmethod
    def check_file_is_exists(csv_name):
        if os.path.isfile(csv_name):
            os.remove(csv_name)

    def upload_img_request(self, img_path):
        """
        上传图片api
        :param method
        :param url
        :param img_path: 图片路径
        """
        self.login()  # 登录京日管家
        url = 'https://szuat-gateway.jingrizf.com/static/net/upload/img'
        header = {'Authorization': GlobalVar.header['Authorization'], 'Connection': 'close'}
        files = {'file': (os.path.basename(img_path), open(img_path, "rb"), "image/png/jpg")}
        res = requests.post(url=url, headers=header, files=files)
        return res.json()['url']

    def login(self, account='11111300000', password='Autotest1', platform='1001', scope=1):  # 登录，并更新token
        s = int(round(timestamp() * 1000))
        des_password = str(self.des_ecb_encode(password + '-' + str(s), '12345678'), encoding="utf-8")
        login_url = 'https://uat-piblicgateway.jingrizf.com/oauth/auth/account/password'
        header = {'Content-Type': 'application/json', 'Connection': 'close'}  # 初始请求头
        json = {
            'account': account,
            'password': des_password,
            'scope': scope,
            's': s
        }
        account_password_res = requests.post(url=login_url, headers=header, json=json)  # 登录接口
        pre_code = jsonpath(account_password_res.json(), '$.data.code')[0]
        region = jsonpath(account_password_res.json(), '$.data.regions[0].abbreviated')[0]

        precode_url = 'https://uat-piblicgateway.jingrizf.com/oauth/auth/precode'
        """获取token api"""
        token_json = {
            'preCode': pre_code,
            'platform': platform,
            'region': region,
            'role': None
        }
        precode_res = requests.post(url=precode_url, headers=header, json=token_json)  # 获取token接口
        GlobalVar.header['Authorization'] = 'Bearer ' + jsonpath(precode_res.json(), '$.data.token')[0]

    @staticmethod
    def des_ecb_encode(source, key):
        des_obj = des(key=key, mode=ECB, IV=None, pad=None, padmode=PAD_PKCS5)  # .encode('utf-8')
        encrypt_str = des_obj.encrypt(source)
        return base64.b64encode(encrypt_str)


if __name__ == '__main__':
    time = timeutil.dt_strptime("%Y-%m-%d %H%M%S", )
    city_list = ['sh', 'changzhou', 'hz', 'nj', 'qd']
    # city_list = ['nb']
    for city in city_list:
        timely_deal_info_file_name = xf_file_root_directory.format(city=city)
        timely_deal_info_table_name = xf_table_root_directory.format(city=city)
        if city == 'sh':
            NewHouseTimelyDealInfo().sh_get_timely_deal_info(time, cm.tmp_file_path(timely_deal_info_file_name),
                                                             timely_deal_info_table_name)
        elif city == 'changzhou':
            NewHouseTimelyDealInfo().changzhou_get_timely_deal_info(time, cm.tmp_file_path(timely_deal_info_file_name),
                                                                    timely_deal_info_table_name)
        elif city == 'hz':
            NewHouseTimelyDealInfo().hz_get_timely_deal_info(time, cm.tmp_file_path(timely_deal_info_file_name),
                                                             timely_deal_info_table_name)
        elif city == 'nj':
            NewHouseTimelyDealInfo().nj_get_timely_deal_info(time, cm.tmp_file_path(timely_deal_info_file_name),
                                                             timely_deal_info_table_name)
        elif city == 'nb':
            NewHouseTimelyDealInfo().nb_get_timely_deal_info(time, cm.tmp_file_path(timely_deal_info_file_name),
                                                             timely_deal_info_table_name)
        else:
            NewHouseTimelyDealInfo().qd_get_timely_deal_info(time, cm.tmp_file_path(timely_deal_info_file_name),
                                                             timely_deal_info_table_name)
