import logging
import time

import pymysql
from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk
import psycopg2
import psycopg2.extras
import psycopg2.sql as sql

logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')


# 自定义字段名转换策略
class AsIs(psycopg2.extensions.AsIs):
    pass


psycopg2.extensions.register_adapter(str, lambda x: AsIs(x))


# 连接mysql数据库
def connect_to_db_mysql():
    try:
        connection = pymysql.connect(
            host='rds50g3807a68zwc9soo969.mysql.rds.aliyuncs.com',
            user='data_browser',
            password='D6a_T3_Brs',
            database='data_browser',
            charset='utf8mb4',
            cursorclass=pymysql.cursors.DictCursor
        )
        logging.info("连接mysql数据库成功")
        return connection
    except Exception as e:
        logging.error(f"连接mysql数据库时出错: {e}")
        return None


# 连接到数据库
def connect_to_db():
    try:
        connection = psycopg2.connect(
            host="hgprecn-cn-v641lnkxm003-cn-shanghai.hologres.aliyuncs.com",
            port=80,
            dbname="odpstest",
            user="LTAI5tQo9VtJ414iSEZrE8Vn",
            password="BPBKqDiQ7JKPl6o3QDIFg6kh71nakS",
            application_name="third_edb_data",
            options='-c search_path=your_schema'
        )
        logging.info("连接到数据库成功")
        return connection
    except Exception as e:
        logging.error(f"连接到数据库时出错: {e}")
        return None


# 从 MySQL 数据库读取数据
def read_data_from_mysql(connection, query):
    try:
        with connection.cursor() as cursor:
            cursor.execute(query)
            row = cursor.fetchone()
            if row:
                return row['param_value']
            else:
                logging.warning("未找到匹配的记录")
                return None
    except Exception as e:
        logging.error(f"从 MySQL 数据库读取数据时出错: {e}")
        return None


# 创建索引
def create_index(es, index_name):
    mappings = {
        "properties": {
            "stockCode": {"type": "text", "analyzer": "ik_smart"},
            "stockAbbrName": {"type": "text", "analyzer": "ik_smart"},
            "sInfoWindCode": {"type": "text", "analyzer": "ik_smart"},
            "compShortName": {"type": "text", "analyzer": "ik_smart"},
            "compFullName": {"type": "keyword"},
            "englishCompFullName": {"type": "keyword"},
            "sInfoCity": {"type": "text", "analyzer": "ik_smart"},
            "stockTypeName": {"type": "keyword"},
            "windCompCode": {"type": "keyword"},
            "englishCompShortName": {"type": "keyword"},
            "countryName": {"type": "keyword"},
            "sInfoProvince": {"type": "keyword"},
            "address": {"type": "keyword"},
            "office": {"type": "keyword"},
            "phoneNumber": {"type": "keyword"},
            "fax": {"type": "keyword"},
            "email": {"type": "keyword"},
            "sInfoWebsite": {"type": "keyword"},
            "uniformSocialCreditCode": {"type": "keyword"},
            "orgCode": {"type": "keyword"},
            "sInfoChairman": {"type": "keyword"},
            "president": {"type": "keyword"},
            "discloser": {"type": "keyword"},
            "sInfoRegCapital": {"type": "keyword"},
            "sInfoFoundDate": {"type": "keyword"},
            "compEndDate": {"type": "keyword"},
            "sInfoChineseIntroduction": {"type": "keyword"},
            "companyIndustryTypeCode": {"type": "keyword"},
            "companyIndustryTypeName": {"type": "keyword"},
            "compEconomyClassifyTypeName": {"type": "keyword"},
            "sInfoBusinessScope": {"type": "keyword"},
            "sInfoTotalEmployees": {"type": "keyword"},
            "mainBusiness": {"type": "keyword"},
            "isListed": {"type": "keyword"},
            "compTechClassifyTypeName": {"type": "keyword"},
            "compScaleClassifyTypeName": {"type": "keyword"},
            "insertTime": {"type": "keyword"},
            "companyTypeCode": {"type": "keyword"},
            "companyTypeName": {"type": "keyword"},
            "district": {"type": "keyword"},
            "isTrusteeshipFlag": {"type": "keyword"},
            "yyAdminLevel": {"type": "keyword"}
        }
    }

    request_body = {
        "mappings": mappings
    }

    if es.indices.exists(index=index_name):
        logging.info(f"索引 {index_name} 已经存在")
    else:
        try:
            response = es.indices.create(index=index_name, body=request_body)
            logging.info(f"创建索引 {index_name} 成功: {response}")
        except Elasticsearch.ElasticsearchException as e:
            logging.error(f"创建索引时出错 (ElasticsearchException): {e}")
        except Exception as e:
            logging.error(f"创建索引时出错 (未知错误): {e}")


# 连接到Elasticsearch
def create_es_client(es_url):
    try:
        es = Elasticsearch(
            [es_url],
            basic_auth=('elastic', 'elaKib#1111')
        )
        if not es.ping():
            raise ValueError("Connection failed")
        logging.info("连接到Elasticsearch成功")
        return es
    except Exception as e:
        logging.error(f"连接到Elasticsearch时出错: {e}")
        return None


# 从数据库读取数据并转换为键值对格式
def read_data_from_db(connection, query):
    try:
        with connection.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
            cursor.execute(sql.SQL(query))
            rows = cursor.fetchall()
            data = [dict(row) for row in rows]
            return data
    except Exception as e:
        logging.error(f"从数据库读取数据时出错: {e}")
        return None


# 删除索引中的所有数据
def delete_all_data_in_index(es, index_name):
    try:
        if es.indices.exists(index=index_name):
            response = es.delete_by_query(index=index_name, body={"query": {"match_all": {}}})
            logging.info(f"删除索引 {index_name} 中的所有数据: {response}")
        else:
            logging.info(f"索引 {index_name} 不存在")
    except Exception as e:
        logging.error(f"删除索引中的数据时出错: {e}")


# 将数据导入到Elasticsearch
def import_data_to_es(es, data, index_name, batch_size):
    try:
        actions = [
            {
                "_index": index_name,
                "_source": item
            }
            for item in data
        ]

        total_batches = (len(actions) + batch_size - 1) // batch_size
        logging.info(f"总共有 {len(actions)} 条操作需要执行，每次批量导入 {batch_size} 条，共需 {total_batches} 批次")

        for i in range(total_batches):
            start = i * batch_size
            end = (i + 1) * batch_size
            batch_actions = actions[start:end]

            success, errors = bulk(es, batch_actions, chunk_size=batch_size)
            logging.info(f"第 {i + 1} 批次: 成功导入 {success} 条记录到 {index_name}")

            if errors:
                logging.error(f"导入数据时出现错误: {errors}")

    except Exception as e:
        logging.error(f"导入数据到Elasticsearch时出错: {e}")


# 主函数
def main():
    db_connection = connect_to_db()
    db_mysql = connect_to_db_mysql()
    if db_connection is None:
        return
    if db_mysql is None:
        return
    # 从数据库读取数据
    query = (
        "select param_value from service_config where param_code = 'ES_URL_DEV'"
    )
    # 从数据库读取 ES_URL_DEV 的值
    es_url = read_data_from_mysql(db_mysql, query)

    if es_url:
        logging.info(f"获取到的 ES_URL_DEV 值是: {es_url}")
    else:
        logging.info("未获取到 ES_URL_DEV 的值")

    es = create_es_client(es_url)
    if es is None:
        return

    index_name = "edb_company_library"

    # 创建索引
    create_index(es, index_name)

    # 从数据库读取数据
    query = (
        "SELECT "
        "  t1.stock_code AS \"stockCode\",  "
        "  t1.stock_abbr_name AS \"stockAbbrName\", "
        "  t1.wind_stock_code AS \"sInfoWindCode\", "
        "  t1.stock_type_name AS \"stockTypeName\","
        "  t2.wind_comp_code AS \"windCompCode\","
        "  t2.comp_short_name AS \"compShortName\",  "
        "  t2.comp_full_name AS \"compFullName\","
        "  t2.english_comp_full_name AS \"englishCompFullName\","
        "  t2.english_comp_short_name AS \"englishCompShortName\","
        "  t2.country_name AS \"countryName\","
        "  t2.province_name AS \"sInfoProvince\","
        "  t2.city_name AS \"sInfoCity\","
        "  t2.address AS \"address\","
        "  t2.office AS \"office\","
        "  t2.phone_number AS \"phoneNumber\","
        "  t2.fax AS \"fax\","
        "  t2.email AS \"email\","
        "  t2.website AS \"sInfoWebsite\","
        "  t2.uniform_social_credit_code AS \"uniformSocialCreditCode\","
        "  t2.org_code AS \"orgCode\","
        "  t2.chairman AS \"sInfoChairman\","
        "  t2.president AS \"president\","
        "  t2.discloser AS \"discloser\","
        "  t2.register_capital AS \"sInfoRegCapital\","
        "  t2.comp_found_date AS \"sInfoFoundDate\","
        "  t2.comp_end_date AS \"compEndDate\","
        "  t2.briefing AS \"sInfoChineseIntroduction\","
        "  t2.company_industry_type_code AS \"companyIndustryTypeCode\","
        "  t2.company_industry_type_name AS \"companyIndustryTypeName\","
        "  t2.comp_economy_classify_type_name AS \"compEconomyClassifyTypeName\","
        "  t2.business_scope AS \"sInfoBusinessScope\","
        "  t2.total_employee_cnt AS \"sInfoTotalEmployees\","
        "  t2.main_business AS \"mainBusiness\","
        "  t2.is_listed AS \"isListed\","
        "  t2.comp_tech_classify_type_name AS \"compTechClassifyTypeName\","
        "  t2.comp_scale_classify_type_name AS \"compScaleClassifyTypeName\","
        "  t2.insert_time AS \"insertTime\","
        "  t2.company_type_code AS \"companyTypeCode\","
        "  t2.company_type_name AS \"companyTypeName\","
        "  t2.district AS \"district\","
        "  t2.is_trusteeship_flag AS \"isTrusteeshipFlag\","
        "  t2.yy_admin_level AS \"yyAdminLevel\""
        " FROM"
        " ("
        "     SELECT * FROM public.tmp_dim_product_stock_base_info_d"
        "     WHERE p_nature_date=MAX_PT('public.tmp_dim_product_stock_base_info_d')"
        "     AND stock_type_name IN ('A股','普通股')"
        " ) t1"
        " LEFT JOIN "
        " (SELECT * FROM public.test_dim_product_comp_global_base_info_d WHERE p_nature_date=MAX_PT('public.test_dim_product_comp_global_base_info_d')) t2"
        " ON t1.publish_company_code=t2.wind_comp_code"
    )
    data = read_data_from_db(db_connection, query)

    if data is None:
        return

    logging.debug(f"前5条数据: {data[:5]}")

    # 删除索引中的所有数据
    delete_all_data_in_index(es, index_name)

    # 将数据导入到Elasticsearch
    import_data_to_es(es, data, index_name, batch_size=1000)

    # 等待一段时间以确保数据已更新
    # time.sleep(1)

    # 获取Elasticsearch索引中所有文档的数量
    # count = es.count(index=index_name)['count']
    # logging.info(f"ES中的总数是: {count}")

    # 关闭数据库连接
    db_connection.close()


if __name__ == "__main__":
    main()
