#!/usr/local/python3.6.5/bin/python3
# coding=utf-8
import datetime
import os
import pandas as pd
from elasticsearch import Elasticsearch
from hdfs.client import Client
para_data = {
    "pro": {
        "host": '10.33.40.71',
        'port': 9200,
        'nn1': '10.33.48.4',
        'nn2': '10.33.48.5'
    },
    "test": {
        "host": '192.168.12.201',
        'port': 9200,
        'nn1': '192.168.12.136',
        'nn2': '192.168.12.137'
    }
}

# 1. 找到当天修改的地址库数据
def jms_find_address(env, startdt, enddt):
    # index_name = "test_address_library_new"
    index_name = "pro_address_library_new"
    es = Elasticsearch([{'host': para_data.get(env.lower()).get('host'), 'port': para_data.get(env.lower()).get('port')}])
    # 此脚本检测namenode在哪个节点
    status = os.popen("hdfs haadmin -getServiceState nn1").read()
    print(status)
    if "active" in status:
        defaultFS = para_data.get(env.lower()).get("nn1")
    else:
        defaultFS = para_data.get(env.lower()).get("nn2")
    # 拿到hdfs client
    client = Client("http://{defaultFS}:50070".format(defaultFS=defaultFS))  # 50070: Hadoop默认namenode
    hdfs_path = "/dw/hive/jms_dwd.db/external/dwd_cn_address_warehouse_es_dt/dt={startdt}".format(startdt=startdt)
    area_name, last_sign_time, third_code, lng, operation_type, full_address, area_id, errcodeenum, province_name, city_name, network_id, second_code, operation_time, province_id, town_id, street, details, first_code, lat, township, city_id, operator, operation_network_code, operation_network_name, address_library_source, address_library_checked, md5_id=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]
    query_json = {
        "size": 100000,
        "query": {
            "bool": {
                "must": [
                    {
                        # 按时间过滤数据
                        "range": {
                            "operation_time": {
                                "gte": datetime.datetime.strptime(startdt, "%Y%m%d").strftime("%Y-%m-%d %H:%M:%S"),
                                "lt": datetime.datetime.strptime(enddt, "%Y%m%d").strftime("%Y-%m-%d %H:%M:%S")
                            }
                        }
                    }
                ],
                "must_not": [
                    {"term": {"operation_type" : 3}}
                    ,{"term": {"address_library_source" : 2}}
                ]
            }
        }
    }
    result = es.search(
        index=index_name,
        body=query_json)
    for addr in result["hits"]["hits"]:
        addr["_source"]["_id"] = addr["_id"]
        area_name.append(addr["_source"].get("area_name", ''))
        last_sign_time.append(addr["_source"].get("last_sign_time", ''))
        third_code.append(addr["_source"].get("third_code", ''))
        lng.append(addr["_source"].get("lng", ''))
        operation_type.append(addr["_source"].get("operation_type", ''))
        full_address.append(addr["_source"].get("full_address", ''))
        area_id.append(addr["_source"].get("area_id", ''))
        errcodeenum.append(addr["_source"].get("errcodeenum", ''))
        province_name.append(addr["_source"].get("province_name", ''))
        city_name.append(addr["_source"].get("city_name", ''))
        network_id.append(addr["_source"].get("network_id", ''))
        second_code.append(addr["_source"].get("second_code", ''))
        operation_time.append(addr["_source"].get("operation_time", ''))
        province_id.append(addr["_source"].get("province_id", ''))
        town_id.append(addr["_source"].get("town_id", ''))
        street.append(addr["_source"].get("street", ''))
        details.append(addr["_source"].get("details", ''))
        first_code.append(addr["_source"].get("first_code", ''))
        lat.append(addr["_source"].get("lat", ''))
        township.append(addr["_source"].get("township", ''))
        city_id.append(addr["_source"].get("city_id", ''))
        operator.append(addr["_source"].get("operator", ''))
        operation_network_code.append(addr["_source"].get("operation_network_code", ''))
        operation_network_name.append(addr["_source"].get("operation_network_name", ''))
        address_library_source.append(addr["_source"].get("address_library_source", ''))
        address_library_checked.append(addr["_source"].get("address_library_checked", ''))
        md5_id.append(addr["_source"].get("_id", ''))
    df = pd.DataFrame(
        [area_name, last_sign_time, third_code, lng, operation_type, full_address, area_id, errcodeenum, province_name,
         city_name, network_id, second_code, operation_time, province_id, town_id, street, details, first_code, lat,
         township, city_id, operator, operation_network_code, operation_network_name, address_library_source, address_library_checked, md5_id]).T
    print(df)
    client.write(hdfs_path+'/data', df.to_csv(header=False, index=False, sep="\001"), encoding='utf-8', overwrite=True)

    os.popen(f'hive -e "ALTER TABLE jms_dwd.dwd_cn_address_warehouse_es_dt ADD IF NOT EXISTS PARTITION (dt={startdt}) LOCATION \'/dw/hive/jms_dwd.db/external/dwd_cn_address_warehouse_es_dt/dt={startdt}\';"')


# 脚本刷新hdfs文件
def flush_hdfs(hdfs_path):
    os.popen("hdfs dfs -rm -r {hdfs_path}".format(hdfs_path=hdfs_path))
    os.popen("hdfs dfs -mkdir {hdfs_path}".format(hdfs_path=hdfs_path))
    os.popen("hdfs dfs -touchz {hdfs_path}/data".format(hdfs_path=hdfs_path))



