#!/bin/env python3
#coding:utf-8
#version:20211219

import re
import os
import sys
from influxdb import InfluxDBClient
from datetime import datetime
import threading


influxdb_DB_Host = "localhost"
influxdb_DB_Name = "mydb"
influxdb_DB_User = "test"
influxdb_DB_Passwd = "test"
influxdb_DB_Port = 8086


def add_job_influx_db_metrics(json_body,db_name,user,passwd,host,port):
    client = InfluxDBClient(host, port, user, passwd, db_name)


    response = client.write_points(json_body,time_precision='s')
    if response:
        print("INFO:Threading:%s write_operation response Success."%(threading.currentThread().name))
    else:
        print("ERROR:Threading:%s write_operation response Failed."%(threading.currentThread().name))

    print(".",end='')



def readVirtualData(file):

    headPattern = re.compile( r"^Virtual Volume,VPD Id,fe-lu read \(KB\/s\),fe-lu ops \(counts\/s\),fe-lu write-lat recent-average \(us\),fe-lu write \(KB\/s\),fe-lu read-lat recent-average \(us\)") 
    datePattern = re.compile( r"^\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}$") 
    contentPattern = re.compile(r"^\w+\,VPD83T3:\w+,")



    # 收集一次批量所有列表数据
    tmp_list = []
    # 计算读取virtual的记录
    tmp_line = 0

    dater = ""


    with open(file,'r') as f:
        lines = f.readlines()
        # 读取第一行是否为以下固定格式，格式不一样不做处理
        # Virtual Volume,VPD Id,fe-lu read (KB/s),fe-lu ops (counts/s),fe-lu write-lat recent-average (us),fe-lu write (KB/s),fe-lu read-lat recent-average (us)
        if  headPattern.findall(lines[0]) :
            print("INFO:文件对比指标一至....")
        else:
            print("ERROR:文件对比指标不一至....退出程序")
            exit(0)
        
        for i in lines:
            dateLine = datePattern.findall(i)
            if dateLine :
                # print("找到日期",dateLine)
                dater = dateLine[0]
            contentLine = contentPattern.findall(i)
            if contentLine and len(dater) > 0 :
                l = [dater,]
                [ l.append(i) for i in i.strip().replace('\n', '').replace('\r', '').split(",") ]
                tmp_list.append(l)
                tmp_line= tmp_line + 1
        tmp_list.sort()
    print("INFO:File:%s total device line:%s"%(file,tmp_line))

    return tmp_list


def threading_add(perf_path,file,n,l):
    batch_num = 8000

    total_perf_data = readVirtualData(perf_path + file)
    measurement = 'VIRTUAL_VOLUMES_PERPETUAL_MONITOR'

    batch_body = []
    for c in range(0,len(total_perf_data),batch_num):
        for i in total_perf_data[c:c+batch_num]:
            tmp_dict = {}
            tmp_tags_dict = {
                "Virtual_Volume": i[1],
                "director": file.split("_VIRTUAL_VOLUMES_PERPETUAL_MONITOR")[0],
            }

            tmp_fields_dict = {
                "fe_lu_read_KB_for_s": float(i[3]),
                "fe_lu_ops_counts_for_s": float(i[4]),
                "fe_lu_write_lat_recent_average_us": float(i[5]),
                "fe_lu_write_KB_for_s": float(i[6]),
                "fe_lu_read_lat_recent_average_us": float(i[7]),
            }

            # current_time=time.strftime('%Y-%m-%dT%H:%M:%SZ',time.localtime(time.time()))
            # dateTime_P = datetime.strptime(i[0],'%Y-%m-%d %H:%M:%S')
            # format_time = datetime.utcfromtimestamp(dateTime_P.timestamp()).strftime('%Y-%m-%d %H:%M:%S')

            tmp_dict ={
                    "measurement": measurement,
                    "tags": tmp_tags_dict,
                    "time": i[0],
                    "fields": tmp_fields_dict
                }

            batch_body.append(tmp_dict)


        add_job_influx_db_metrics(
            batch_body,
            influxdb_DB_Name,
            influxdb_DB_User,
            influxdb_DB_Passwd,
            influxdb_DB_Host,
            influxdb_DB_Port
            )
        batch_body = []
    print("INFO:Do File :%s....[%s/%s]....commit:%s counts..Done,"%(file,n,l,len(total_perf_data)))
     



def main():
    threads = []
    # file = "./virtual_data/director-1-1-A_VIRTUAL_VOLUMES_PERPETUAL_MONITOR.log"
    if len(sys.argv) == 2 :
        perf_path = sys.argv[1]
    else:
        print("ERROR:Please input VIRTUAL_VOLUMES_PERPETUAL_MONITOR file path ")
        sys.exit(0)
    n =0 
    for file in os.listdir(perf_path):
        if os.path.isfile(perf_path + file):
            if "VIRTUAL_VOLUMES_PERPETUAL_MONITOR.log" in file:
                n = n + 1
                print("INFO:Do File :%s....[%s/%s]"%(file,n,len(os.listdir(perf_path))))
                jobs_add_job_influx_db_metrics = threading.Thread(target=threading_add, args=(perf_path,file,n,len(os.listdir(perf_path))))
                threads.append(jobs_add_job_influx_db_metrics)
                jobs_add_job_influx_db_metrics.start()
                
        else:
            print("ERROR:File %s isn't exist..." % (perf_path + file))
    
    for t in threads:
        t.join()
    print("INFO:所有任务完成")




if __name__ == '__main__':
    print("INFO:Beging-%s"%(datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
    main()
    print("INFO:End-%s"%(datetime.now().strftime('%Y-%m-%d %H:%M:%S')))


