"""
-------------------------------------------------
   File Name：     create_sample_file
   Description :
   Author :       willis
   date：          2019/5/6
-------------------------------------------------
   Change Activity:
                   2019/5/6:

-------------------------------------------------

"""
__author__ = 'willis'

import logging.handlers
import logging
import sys
import os
import time
import psutil, json


# ES地址
es_host = '172.22.56.34:9200,172.22.56.31:9200'
# 索引名称
es_index = 'sampleindex'
# 每bulk导入es数量
es_batch_size = 6000
# 需要导入数据的总量，累加最大值
es_add_total = 6000000
# es主分片数量
es_pshards_total = 4
# ES副本数量
es_rshards_total = 1
# 线程数量
es_proc_total = 4
# 索引类型
es_doc_type = "type_doc_test"
# es refresh时间
es_refresh_time = -1
# 节点数据磁盘数量
es_disk_num = 1
# 导入ES的源文件
source_dir = 'input_file'
source_file = 'samplesource.txt'
source_file_count = 100
source_start_id = 0
# json格式的源文件
source_json = 'package.json'
# 报告文件，多次的进行累加
report_file = 'samplereportup.txt'
# 报告文件，每次运行的结果
create_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())

def Log_Out():
    '''
    日志输出
    :return:
    '''

    log_file = os.path.realpath(sys.argv[0]) + '.log'

    # 定义对应的程序模块名name，默认是root
    logs = logging.getLogger()
    logs.setLevel(logging.DEBUG)

    # 定义日志回滚
    logrota = logging.handlers.RotatingFileHandler(log_file, maxBytes=10 * 1024 * 1024, backupCount=20)

    # 设置日志等级
    logrota.setLevel(logging.DEBUG)

    # 日志输出到屏幕控制台
    console = logging.StreamHandler()
    console.setLevel(logging.DEBUG)

    # 定义日志格式
    format = ('%(asctime)s|%(name)s|%(levelname)s|'
              '%(pathname)s|%(lineno)d|%(thread)s|%(process)s|%(message)s')

    # 实例化handler
    formatter = logging.Formatter(format)

    # 格式化handler
    logrota.setFormatter(formatter)
    console.setFormatter(formatter)

    # 添加handler
    logs.addHandler(logrota)
    logs.addHandler(console)

    # logs.debug('my is debug')
    # logs.info('my is info')
    # logs.warning('my is warning')
    # logs.error('my is error')
    # logs.critical('my is critical')
    logs.info('Loging start ...')
    return logs

def TimeCal(start=None, end=None):
    if start and end == None:
        return time.time()
    if start and end:
        # if not isinstance(start, float):
        #     return
        sec = time.time() - start
        mint = sec / 60
        return round(sec, 1), round(mint, 1)
    # if end and start == None:
    #     return

def CreateFileThread():
    start_time = TimeCal(start=True)
    logs.info("开始生成数据：")

    if os.path.exists(source_dir):
        for f in os.listdir(source_dir):
            logs.info("清理历史数据[{}]".format(os.path.join(source_dir, f)))
            os.remove(os.path.join(source_dir, f))
    else:
        logs.info("创建源数据目录[{}]".format(source_dir))
        os.mkdir(source_dir)

    input_file_total = es_add_total // source_file_count
    input_file_add = es_add_total % source_file_count
    input_list = [input_file_total, input_file_total + input_file_add]
    logs.info('文件信息：{}'.format(input_list))

    num = 0
    name = 'index'
    ip = '192.168.1.1'
    port = 0
    dep_path = '/var/bh/dep'
    put_path = '/var/bh/put'
    bak_path = '/var/bh/bak'
    data_create_time = create_time
    data_update_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())

    input_id = 0
    for f in range(1, source_file_count + 1):
        # 输出文件
        if f == source_file_count:
            input_max_line = input_list[1]
        else:
            input_max_line = input_list[0]

        logs.debug('当前文件行数：{}'.format(input_max_line))

        file = os.path.join(source_dir, source_file.replace('.txt', str(f) + '.txt'))
        logs.info('生成文件[{}],行数:[{}]'.format(file, input_max_line))
        with open(file, 'w') as f_show:
            for line in range(input_id + 1, int(input_max_line) + 1):
                data_line = [
                    line,
                    name + str(line),
                    ip,
                    port + line,
                    dep_path,
                    put_path,
                    bak_path,
                    data_create_time,
                    data_update_time
                ]
                w_line = '|'.join(str(d) for d in data_line)
                # logs.info("插入数据：{}".format(w_line))
                f_show.write(w_line)
                f_show.write('\n')
    use_sec, use_min = TimeCal(start=start_time, end=True)
    logs.info("生成数据[{}]完成,用时[{}]秒,计[{}]分".format(es_add_total, use_sec, use_min))

def GetSysLoad():
    import psutil

def Report_Out(msg):
    '''
    报告输出
    :return:
    '''
    # RefreshEs()

    logs.info('输出报告 ...')
    logs.info('msg:{}'.format(msg))
    print(msg)
    for index, value in enumerate(msg):
        print("{}:{}".format(index, value))
    if msg[0] == 0:
        # result = [
        #  0   0,
        #  1   time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),
        #  2   es_proc_total,
        #  3   multiprocessing.current_process().name,
        #  4   multiprocessing.current_process().pid,
        #  5   n,
        #  6   len(lines),
        #  7   GetEsTotal(),
        # 8    batch_size,
        #  9   len(disk_msg),
        #  10   disk_msg,
        #  11   es_rshards_total,
        #  12   bulk_import_use_sec,
        #  13   bulk_import_use_min,
        #   14  file
        # ]

        result = '{0}, 进程:[{1}-{2}-{3}], 批次:{4}, 文件:[{5}], 增量:{6}, ES总量:{7}, 批量:{8}, 磁盘:[{9}]{10}, ES副本:{11}, 耗时:{12}s计{13}m'.format(
            msg[1],
            msg[2],
            msg[3],
            msg[4],
            msg[5],
            msg[14],
            msg[6],
            msg[7],
            msg[8],
            msg[9],
            msg[10],
            msg[11],
            msg[12],
            msg[13]
        )
    else:
        result = '{0}, 进程:[{1}-{2}-{3}], 批次状态:{4}-run{5}, 增量:{6}, ES总量:{7}, 批量:{8}, 磁盘:[{9}]{10}, ES副本:{11}, 分片:{12}, 耗时:{13}s计{14}m'.format(
            msg[1],
            msg[2],
            msg[3],
            msg[4],
            msg[5],
            msg[6],
            msg[7],
            msg[8],
            msg[9],
            msg[10],
            msg[11],
            msg[12],
            msg[13],
            msg[14],
            msg[15]
        )
        print(result)

def ReadJson():
    result = ''
    with open(source_json, 'r') as sf_show:
        sf_data = sf_show.read()
        result = json.loads(sf_data)

    return result

def CreateFileFromJson():
    start_time = TimeCal(start=True)
    logs.info("开始生成数据：")

    if os.path.exists(source_dir):
        pass
        # for f in os.listdir(source_dir):
        #     logs.info("清理历史数据[{}]".format(os.path.join(source_dir, f)))
        #     os.remove(os.path.join(source_dir, f))
    else:
        logs.info("创建源数据目录[{}]".format(source_dir))
        os.mkdir(source_dir)

    # region 计算每个文件行数
    input_file_total = es_add_total // source_file_count
    input_file_add = es_add_total % source_file_count
    input_list = [input_file_total, input_file_total + input_file_add]
    logs.info('文件信息：{}'.format(input_list))
    # endregion

    input_id = 0
    for f in range(1, source_file_count + 1):
        # region 最后一个文件的行数，如果数据平分到每个文件后，余数部分的数据添加到最后一个文件中
        if f == source_file_count:
            input_max_line = input_list[1]
        else:
            input_max_line = input_list[0]
        # endregion

        # region 第一个文件从ES中取最大ID，在写文件时+1
        if f == 1:
            input_id = 0
            max_id = input_list[0]
        else:
            input_id = max_id
            max_id = max_id + int(input_max_line)
        # endregion

        # region 取出对应的文件
        logs.debug('当前文件行数：{}'.format(input_max_line))
        file = os.path.join(source_dir, source_file.replace('.txt', str(f) + '.txt'))
        logs.info('生成文件[{}],行数:[{}]'.format(file, input_max_line))
        # endregion

        # 获取JSON数据模板
        json_data = ReadJson()

        # 输出文件
        with open(file, 'w') as f_show:
            # inline = 1000
            # inlist = []
            for line in range(input_id + 1, int(input_max_line) + input_id + 1):
                json_data['num'] = line

                json.dump(json_data, f_show)
                f_show.write('\n')

    use_sec, use_min = TimeCal(start=start_time, end=True)
    logs.info("生成数据完成,用时[{}]秒,计[{}]分".format(use_sec, use_min))

def GetDiskNum():
    cfg = '/home/willislong/app/elasticsearch-6.2.4/cfg/config-node-1/elasticsearch.yml'
    disk_list_tmp = ''
    with open(cfg, 'r') as c_f:
        for line in c_f.readlines():
            if line.split(':')[0] == 'path.data':
                disk_list_tmp = line.split(':')[1].split(',')
    disk_list_result = []
    for d in disk_list_tmp:
        if d.strip()[-1] == '/':
            disk_list_result.append(d.strip()[:-1])
        else:
            disk_list_result.append(d.strip())
    return disk_list_result

def disk_io_dela():
    disk_io = psutil.disk_io_counters(perdisk=True)
    # logs.info('disk io:{}'.format(disk_io))

    p = psutil.disk_partitions()
    logs.info(p)

    disk_list = GetDiskNum()
    print(disk_list)
    disk_list_new = []
    for disk in disk_list:
        disk_list_new.append(disk.strip())
    print(disk_list_new)

    io_device_list = []
    for device in p:
        if device[1] in disk_list:
            print(device[0])
            print(device[1])
            print(device[0].split('/')[-1])
            io_device_list.append(device[0].split('/')[-1])
    print(io_device_list)

    io_result = {}
    for device_for in io_device_list:
        io_device_msg = disk_io[device_for]
        # print("{}:{}".format(device_for, io_device_msg))
        # print("分区{}写入数据：{}b".format(device_for, io_device_msg[3]))
        io_result[device_for] = io_device_msg[3]

    return io_result

if __name__ == '__main__':
    global logs
    logs = Log_Out()
    # logs.info(dir(psutil))
    # for i in dir(psutil):
    #     print(i)
    disk_use = psutil.disk_usage('/es1-data1')
    u = disk_use.used / 1024 / 1024
    a = disk_use.total / 1024 / 1024 / 1024
    logs.info("disk msg:{}".format(disk_use))
    logs.info("disk total:{}".format(a))
    logs.info("disk use:{}".format(u))



    start = disk_io_dela()
    print("start:{}".format(start))
    time.sleep(1)
    end = disk_io_dela()
    print("end:{}".format(end))

    io_devicd_dela = {}
    for device_for, write_b in start.items():
        dela = end[device_for] - write_b
        io_devicd_dela[device_for] = round(dela / 1024 / 1024, 2)

    for d, w in io_devicd_dela.items():
        print("分区{}写入数据：{}m".format(d, w))

    # Report_Out([1, '2019-05-08 18:21:10', 4, 'ForkPoolWorker-1', 27959, 1, 1, 55555, 0, 1555, 1, ['size:2758m,/es1-data1/'], 1, 4, 143.9, 2.4])

    # CreateFileFromJson()



