import os

import pandas as pd
from elasticsearch import Elasticsearch, TransportError
from hdfs.client import Client
from pyhive import hive

para_data = {
    "pro": {
        "es_host": '10.33.42.34',
        'es_port': 9200,
        "index": 'pro_address_library_new',
        "alias": 'pro_address_library_search',
        "hive_host": '10.33.48.6',
        'nn1': '10.33.48.4',
        'nn2': '10.33.48.5'

    },
    # "pro": {
    #     "es_host": '192.168.12.201',
    #     'es_port': 9200,
    #     "index": 'test_address_library_new',
    #     "hive_host": '10.33.48.6',
    #     'nn1': '10.33.48.4',
    #     'nn2': '10.33.48.5'
    #
    # },
    "test": {
        "es_host": '192.168.12.201',
        'es_port': 9200,
        "index": 'test_address_library_new',
        "hive_host": '192.168.12.136'
    }
}


def jms_thrcodes_esaddr_new(dt, env: str):
    print(env)
    # 从hive三段码变更表读取数据
    conn = hive.Connection(host=para_data.get(env.lower()).get('hive_host'), port=10000, username='hive', password='8qn6p5sLhv',
                           auth='LDAP', database='jms_dim')
    # host主机ip,port：端口号，username:用户名，database:使用的数据库名称

    # 此脚本检测namenode在哪个节点
    status = os.popen("hdfs haadmin -getServiceState nn1").read()
    print(status)
    if "active" in status:
        defaultFS = para_data.get(env.lower()).get("nn1")
    else:
        defaultFS = para_data.get(env.lower()).get("nn2")
    # 拿到hdfs client
    client = Client("http://{defaultFS}:50070".format(defaultFS=defaultFS))  # 50070: Hadoop默认namenode
    hdfs_path = "/dw/hive/jms_dim.db/external/dim_cn_second_codes_change_error_dt/dt={dt}".format(dt=dt)

    cursor = conn.cursor()
    sql = """
    select
    network_id,
    network_code,
    network_second_code
    from jms_dim.dim_cn_three_codes_change_dt
    where dt = '{dt}' and network_code is not null
    group by 
    network_id,
    network_code,
    network_second_code
    """.format(dt=dt)
    cursor.execute(sql)
    # es索引
    index_name = para_data.get(env.lower()).get('index')
    alias_name = para_data.get(env.lower()).get('alias')
    # es连接
    # es连接
    es_hosts = ["10.33.42.34", "10.33.42.35", "10.33.42.36", "10.33.42.37",
                "10.33.42.38", "10.33.42.39", "10.33.42.40", "10.33.42.41",
                "10.33.42.42", "10.33.42.43", "10.33.42.44", "10.33.42.45"]
    es_user = "admin"
    es_password = "YL_AdmLn0417.c1m"
    es = Elasticsearch(hosts=es_hosts, http_auth=(es_user, es_password), port="9200",
                       timeout=3600)
    a = 0
    # 新建空数组
    network_code, network_second_code = [], []
    for address in cursor:
        # es查询体
        update_body = {
            "query": {
                "bool": {
                    "must": [
                        {
                            "term": {
                                "network_code": {
                                    "value": address[1]
                                }
                            }
                        }
                    ],
                    "must_not": [
                        {
                            "term": {
                                "second_code": {
                                    "value": address[2]
                                }
                            }
                        }
                    ]
                }
            },
            "script": {
                "source": "ctx._source['second_code']=params.second_code",
                "params": {
                    "second_code": address[2]
                }
            }

        }
        a = a + 1
        print(a)
        try:
            # 刷新更新es
            es.indices.refresh(index=alias_name)
            es.update_by_query(index=alias_name, body=update_body, wait_for_completion=False)
        except TransportError:
            network_code.append(address[1])
            network_second_code.append(address[2])
            df = pd.DataFrame(
                [network_code, network_second_code]).T
            print("异常地址网点：")
            print(address)
            # 异常网点写入hdfs
            client.write(hdfs_path + '/data', df.to_csv(header=False, index=False, sep="\001"), encoding='utf-8',
                         overwrite=True)
            continue

    os.popen(
        f'hive -e "ALTER TABLE jms_dim.dim_cn_second_codes_change_error_dt ADD IF NOT EXISTS PARTITION (dt={dt}) LOCATION \'/dw/hive/jms_dim.db/external/dim_cn_second_codes_change_error_dt/dt={dt}\';"')

    cursor.close()
