#!/usr/bin/env/ python
# -*- coding:utf-8 -*-

__author__ = 'shouke'

import configparser
import threading
import time
import  re

from influxdb import InfluxDBClient
from collections import deque
from log import logger

from myadb import MyAdb



class Mornitor:
    data_queue = deque()
    def __init__(self, interval=1, count=1): # interval:指定采样时间间隔  count:指定采样次数
        try:
            self.adb = MyAdb()

            self.interval = interval
            self.count = count

            self.cpu_data_lasttime = {}    # 存放上次采集的系统cpu使用率相关数据信息
            self.proc_cpu_data_lasttime = {} # 存放上次采集的进程cpu使用率相关数据信息
            self.net_data_lasttime = {}    # 存放上次采集的系统网卡流量相关数据信息
            self.proc_net_data_lasttime = {}    # 存放上次采集的软件包进程网卡流量相关数据信息


            logger.info('正在读取监控范围配置')
            config = configparser.ConfigParser()
            config.read('./conf/monitor_scope.conf', encoding='utf-8-sig')
            device_choice = config['DEVICE']['choice']
            package_choide = config['PACKAGE']['choice']

            logger.info('正在读取待监控设备和应用包配置')
            config.clear()
            config.read('./conf/device_with_package.conf', encoding='utf-8-sig')
            self.device_list = [item[0] for item in self.adb.get_devices()]
            if not self.device_list:
                logger.warn('获取设备列表失败，退出程序')
                exit()
            if device_choice == '1':
                logger.info('监控所有设备')
            else:
                logger.info('监控指定设备')
                temp_list = []
                device_list = [config['DEVICE'][item] for item in config.options('DEVICE')]
                for item in device_list:
                    if item in self.device_list:
                        temp_list.append(item)
                    else:
                        logger.warn('待监控设备(序列号：%s)未连接，退出程序' % item)
                        exit()
                self.device_list = temp_list
            logger.info('待监控设备列表有：%s' % self.device_list)

            self.package_dict = {}
            secsions = config.sections()
            for device in self.device_list:
                if device in secsions:
                    self.package_dict[device] = [config[device][item] for item in config.options(device)]
            logger.info('待监控应用包有：%s' % self.package_dict)

            logger.info('正在读取采样类型配置')
            config.clear()
            config.read('./conf/sample_type.conf', encoding='utf-8-sig')
            self.sample_type = [item.strip().lower() for item in config['SAMPLETYPE']['sample_type'].rstrip(',').replace('，', ',').split(',')]
            filter = [item.strip().lower() for item in config['SAMPLETYPE']['filter'].rstrip(',').replace('，', ',').split(',')]
            self.sample_type = set(self.sample_type) - set(filter)
            logger.info('需要采样类型有：%s' % self.sample_type)

            self.target_list = [] # 存放待监控对象列表
            # 针对设备采样
            temp_set = self.sample_type - set(['proc_cpu','proc_mem', 'proc_net', 'gfx'])
            self.target_list = self.target_list + [{sample_type:device} for sample_type in temp_set for device in self.device_list]

            # 针对应用包采样
            temp_set = self.sample_type & set(['proc_cpu','proc_mem', 'proc_net', 'gfx'])
            self.target_list = self.target_list + [{sample_type:[device,package]} for sample_type in temp_set for device in self.package_dict for package in self.package_dict[device]]
            logger.info('待监控对象有：%s' % self.target_list)

            logger.info('正在读取influxdb数据库初始化配置')
            config = configparser.ConfigParser()
            config.read('./conf/influxDB.conf', encoding='utf-8')
            influxdb_host = config['INFLUXDB']['influxdb_host']
            influxdb_port = int(config['INFLUXDB']['influxdb_port'])

            self.influx_db_client = InfluxDBClient(influxdb_host, influxdb_port, timeout=10)
            self.database_list = self.influx_db_client.get_list_database()
            self.database_list = [dic_item['name'] for dic_item in self.database_list]
            logger.info('获取到的初始数据库列表：%s' % self.database_list)

            for device in self.device_list:
                db = 'db_%s' % device
                if db not in self.database_list:
                    logger.info('influxdb数据库 %s 不存在，正在创建数据库' % db)
                    self.influx_db_client.create_database(db)
        except Exception as e:
            logger.error('mornitor初始化失败：%s' % e)
            exit()

    def sample_data(self, target, interval, count):
        if 'cpu' in target:
            serial_number = target['cpu']
            for i in range(0, count):
                second_for_localtime1 = time.mktime(time.localtime()) # UTC时间（秒）
                result = self.adb.get_cpu_data(serial_number)
                if result[0]:
                    if serial_number not in self.cpu_data_lasttime: # 第一次采样
                        self.cpu_data_lasttime[serial_number] = result[1]
                    else:
                        temp_dict = {}
                        temp_dict['db'] = 'db_%s' % serial_number # 采样存放数据库
                        temp_dict['sample_type'] = 'cpu'  # 采样类型
                        temp_dict['table'] = 'cpu'  # 存放数据表
                        temp_dict['sample_data'] = [self.cpu_data_lasttime[serial_number], result[1]] # 采样数据
                        self.data_queue.append(temp_dict)

                        self.cpu_data_lasttime[serial_number] = result[1]
                else:
                    continue

                second_for_localtime2 = time.mktime(time.localtime()) # UTC时间（秒）
                time_difference = second_for_localtime2 - second_for_localtime1
                if time_difference < interval:
                    time.sleep(interval - time_difference)
        elif 'proc_cpu' in target:
            serial_number, package = target['proc_cpu']
            key = '%s_%s'% (serial_number, package)

            result = self.adb.get_cpu_processor_num(serial_number)
            if result[0]:
                cpu_processor_num = int(result[1])
            else:
                logger.error('采集设备%s 系统逻辑CPU数失败' % serial_number)
                cpu_processor_num = 1 # 采集不到则只计算单核

            for i in range(0, count):
                result = self.adb.get_package_pid(serial_number, package)
                if result[0]:
                    pid = result[1]
                else:
                    logger.error('采集设备%s 应用包%s进程cpu快照信息失败：%s' % (serial_number, package, '获取进程id失败'))
                    continue

                second_for_localtime1 = time.mktime(time.localtime()) # UTC时间（秒）
                result = self.adb.get_proc_cpu_data(serial_number, pid, package)
                if result[0]:
                    if key not in self.proc_cpu_data_lasttime:
                        self.proc_cpu_data_lasttime[key] = result[1]
                    else:
                        temp_dict = {}
                        temp_dict['db'] = 'db_%s' % serial_number # 采样存放数据库
                        temp_dict['table'] = 'proc_cpu'  # 存放数据表
                        temp_dict['package'] = package # 存放数据表
                        temp_dict['sample_type'] = 'proc_cpu'  # 采样类型
                        temp_dict['sample_data'] = [self.proc_cpu_data_lasttime[key], result[1], cpu_processor_num] # 采样数据
                        self.data_queue.append(temp_dict)

                        self.proc_cpu_data_lasttime[key] = result[1]
                else:
                    continue

                second_for_localtime2 = time.mktime(time.localtime()) # UTC时间（秒）
                time_difference = second_for_localtime2 - second_for_localtime1
                if time_difference < interval:
                    time.sleep(interval - time_difference)
        elif 'mem' in target:
            serial_number = target['mem']
            for i in range(0, count):
                second_for_localtime1 = time.mktime(time.localtime()) # UTC时间（秒）
                result = self.adb.get_mem_data(serial_number)
                if result[0]:
                    temp_dict = {}
                    temp_dict['db'] = 'db_%s' % serial_number # 采样存放数据库
                    temp_dict['table'] = 'mem'     # 存放数据表
                    temp_dict['sample_type'] = 'mem'  # 采样类型
                    temp_dict['sample_data'] = result[1] # 采样数据
                    self.data_queue.append(temp_dict)
                else:
                    continue

                second_for_localtime2 = time.mktime(time.localtime()) # UTC时间（秒）
                time_difference = second_for_localtime2 - second_for_localtime1
                if time_difference < interval:
                    time.sleep(interval - time_difference)
        elif 'proc_mem' in target:
            serial_number, package = target['proc_mem']
            result = self.adb.get_mem_data(serial_number)
            if result[0]:
                mem_info_list = result[1]['data'].strip().split('\n\n')
                mem_data_list = re.findall('\d+', mem_info_list[1])
                mem_total = int(mem_data_list[0])
                # swap_data_list = re.findall('\d+', mem_info_list[3])
                # swap_mem_dict['swap_total']  = int(swap_data_list[0])
            else:
                logger.error('获取内存信息失败，结束采集软件包(%s)进程占用内存信息' % package)
                return

            for i in range(0, count):
                result = self.adb.get_package_pid(serial_number, package)
                if result[0]:
                    pid = result[1]
                else:
                    continue

                second_for_localtime1 = time.mktime(time.localtime()) # UTC时间（秒）
                result = self.adb.get_proc_mem_data(serial_number, pid, package)
                if result[0]:
                    temp_dict = {}
                    temp_dict['db'] = 'db_%s' % serial_number # 采样存放数据库
                    temp_dict['table'] = 'proc_mem'  # 存放数据表
                    temp_dict['package'] = package # 存放数据表
                    temp_dict['sample_type'] = 'proc_mem'  # 采样类型
                    temp_dict['sample_data'] = [mem_total, result[1]] # 采样数据
                    self.data_queue.append(temp_dict)
                else:
                    continue
                second_for_localtime2 = time.mktime(time.localtime()) # UTC时间（秒）
                time_difference = second_for_localtime2 - second_for_localtime1
                if time_difference < interval:
                    time.sleep(interval - time_difference)
        elif 'battery' in target:
            serial_number = target['battery']
            for i in range(0, count):
                second_for_localtime1 = time.mktime(time.localtime()) # UTC时间（秒）
                result = self.adb.get_battery_data(serial_number)
                if result[0]:
                    temp_dict = {}
                    temp_dict['db'] = 'db_%s' % serial_number # 采样存放数据库
                    temp_dict['table'] = 'battery'         # 存放数据表
                    temp_dict['sample_type'] = 'battery'  # 采样类型
                    temp_dict['sample_data'] = result[1] # 采样数据
                    self.data_queue.append(temp_dict)
                else:
                    continue
                second_for_localtime2 = time.mktime(time.localtime()) # UTC时间（秒）
                time_difference = second_for_localtime2 - second_for_localtime1
                if time_difference < interval:
                    time.sleep(interval - time_difference)
        elif 'net' in target:
            serial_number = target['net']
            for i in range(0, count):
                second_for_localtime1 = time.mktime(time.localtime()) # UTC时间（秒）
                result = self.adb.get_net_dev_flow_data(serial_number)
                if result[0]:
                    if serial_number not in self.net_data_lasttime: # 第一次采样
                        self.net_data_lasttime[serial_number] = result[1]
                    else:
                        temp_dict = {}
                        temp_dict['db'] = 'db_%s' % serial_number # 采样存放数据库
                        temp_dict['table'] = 'net'  # 存放数据表
                        temp_dict['sample_type'] = 'net'  # 采样类型
                        temp_dict['sample_data'] = [self.net_data_lasttime[serial_number], result[1]] # 采样数据
                        self.data_queue.append(temp_dict)

                        self.net_data_lasttime[serial_number] = result[1]
                else:
                    continue
                second_for_localtime2 = time.mktime(time.localtime()) # UTC时间（秒）
                time_difference = second_for_localtime2 - second_for_localtime1
                if time_difference < interval:
                    time.sleep(interval - time_difference)
        elif 'proc_net' in target:
            serial_number, package= target['proc_net']
            key = '%s_%s'% (serial_number, package)

            for i in range(0, count):
                result = self.adb.get_package_pid(serial_number, package)
                if result[0]:
                    pid = result[1]
                else:
                    logger.error('采集设备%s 应用包%s进程cpu快照信息失败：%s' % (serial_number, package, '获取进程id失败'))
                    continue

                second_for_localtime1 = time.mktime(time.localtime()) # UTC时间（秒）
                result = self.adb.get_proc_net_dev_flow_data(serial_number, pid, package)
                if result[0]:
                    if key not in self.proc_net_data_lasttime:
                        self.proc_net_data_lasttime[key] = result[1]
                    else:
                        temp_dict = {}
                        temp_dict['db'] = 'db_%s' % serial_number # 采样存放数据库
                        temp_dict['table'] = 'proc_net'  # 存放数据表
                        temp_dict['package'] = package # 存放数据表
                        temp_dict['sample_type'] = 'proc_net'  # 采样类型
                        temp_dict['sample_data'] = [self.proc_net_data_lasttime[key], result[1]] # 采样数据
                        self.data_queue.append(temp_dict)

                        self.proc_net_data_lasttime[key] = result[1]
                else:
                    continue

                second_for_localtime2 = time.mktime(time.localtime()) # UTC时间（秒）
                time_difference = second_for_localtime2 - second_for_localtime1
                if time_difference < interval:
                    time.sleep(interval - time_difference)
        elif 'gfx' in target:
            serial_number, package = target['gfx']
            for i in range(0, count):
                second_for_localtime1 = time.mktime(time.localtime()) # UTC时间（秒）
                result = self.adb.get_gfx_data(serial_number, package)
                if result[0]:
                    temp_dict = {}
                    temp_dict['db'] = 'db_%s' % serial_number # 采样存放数据库
                    temp_dict['table'] = 'gfx'         # 存放数据表
                    temp_dict['package'] = package # 存放数据表
                    temp_dict['sample_type'] = 'gfx'  # 采样类型
                    temp_dict['sample_data'] = result[1] # 采样数据
                    self.data_queue.append(temp_dict)
                else:
                    continue

                second_for_localtime2 = time.mktime(time.localtime()) # UTC时间（秒）
                time_difference = second_for_localtime2 - second_for_localtime1
                if time_difference < interval:
                    time.sleep(interval - time_difference)

    def parse_data(self, running_batch_no):
        cpu_headers_list = ['user', 'nice', 'system', 'idle', 'iowait', 'irq', 'softirq', 'stealstolen', 'guest', 'guest_nice']
        proc_cpu_headers_list = ['user', 'system']
        while self.data_queue:
            item = self.data_queue.popleft()
            database = item['db']
            table = item['table']
            sample_type = item['sample_type']
            sample_data = item['sample_data']

            if sample_type == 'cpu':
                # 上次采集的数据
                cpu_data_lasttime = sample_data[0]
                data_lasttime_list = re.findall('\d+', cpu_data_lasttime['data'])
                data_lasttime_list = [int(item) for item in data_lasttime_list]

                # 本次采集的数据
                cpu_data_thistime = sample_data[1]
                data_thistime_list = re.findall('\d+', cpu_data_thistime['data'])
                data_thistime_list = [int(item) for item in data_thistime_list]

                total_lasttime = 0 # 记录上次的cpu整体数据和
                for item in data_lasttime_list:
                    total_lasttime += item

                total_thistime = 0 # 记录本次cpu整体数据和
                for item in data_thistime_list:
                    total_thistime += item

                data_list = [data_lasttime_list, data_thistime_list]
                index = 0
                fields_dict = {} # 记录数据
                total = total_thistime - total_lasttime
                for item in zip(*data_list):
                    if index < len(cpu_headers_list) - 1:
                        fields_dict[cpu_headers_list[index]] = int((item[1] - item[0]) / total * 100)
                        index += 1

                idle = data_thistime_list[3] - data_lasttime_list[3]
                fields_dict['cpu_usage'] = int((total - idle) / total * 100)
                sample_time = time.strptime(cpu_data_thistime['sample_time'], '%Y-%m-%d %H:%M:%S')
                json_records_list = [{
                    "measurement": table,
                    "tags": {
                        "batchNo": running_batch_no
                    },
                    "fields":fields_dict
                }]

            elif sample_type == 'proc_cpu':
                package = item['package']
                # 上次采集的数据
                cpu_data_lasttime = sample_data[0]
                data_lasttime_list = cpu_data_lasttime['data'].strip().split('\n\n') #[13: 17]
                proc_cpu_data_lasttime_list = data_lasttime_list[0].strip().split(' ')[13:17] # 进程cpu相关数据
                cpu_data_lasttime_list = data_lasttime_list[1].strip().lstrip('cpu').strip().split(' ') # 整体cpu相关数据集

                # 本次采集的数据
                cpu_processor_num = sample_data[2]
                cpu_data_thistime = sample_data[1]
                data_thistime_list = cpu_data_thistime['data'].strip().split('\n\n')
                proc_cpu_data_thistime_list = data_thistime_list[0].strip().split(' ')[13:17]
                cpu_data_thistime_list = data_thistime_list[1].strip().lstrip('cpu').strip().split(' ')

                total_lasttime = 0 # 记录上次的cpu时间总和
                for item in cpu_data_lasttime_list:
                    total_lasttime += int(item)

                total_thistime = 0 # 记录本次cpu时间总和
                for item in cpu_data_thistime_list:
                    total_thistime += int(item)

                proc_total_lasttime = 0 # 记录上次进程cpu时间总和
                for item in proc_cpu_data_lasttime_list:
                    proc_total_lasttime += int(item)

                proc_total_thistime = 0 # 记录本次进程cpu时间总和
                for item in proc_cpu_data_thistime_list:
                    proc_total_thistime += int(item)

                data_list = [proc_cpu_data_lasttime_list[0:2], proc_cpu_data_thistime_list[0:2]]

                index = 0
                fields_dict = {} # 记录数据
                total = total_thistime - total_lasttime
                for item in zip(*data_list):
                    fields_dict[proc_cpu_headers_list[index]] = int((int(item[1]) - int(item[0])) / total * 100)
                    index += 1

                fields_dict['cpu_usage'] = int((proc_total_thistime - proc_total_lasttime) / total * 100 * cpu_processor_num)
                fields_dict['processor_num'] = cpu_processor_num

                sample_time = time.strptime(cpu_data_thistime['sample_time'], '%Y-%m-%d %H:%M:%S')

                json_records_list = [{
                    "measurement": table,
                    "tags": {
                        "batchNo": running_batch_no,
                        "package": package
                    },
                    "fields":fields_dict
                }]
            elif sample_type == 'mem':
                mem_info_list = sample_data['data'].strip().split('\n\n')
                mem_headers_list = re.findall('\w+', mem_info_list[0])

                mem_data_list = re.findall('\d+', mem_info_list[1])
                mem_data_list = [int(item) for item in mem_data_list]

                app_mem_data_list =  re.findall('\d+', mem_info_list[2])
                app_mem_data_list = [int(item) for item in app_mem_data_list]

                swap_data_list = re.findall('\d+', mem_info_list[3])
                swap_data_list = [int(item) for item in swap_data_list]


                mem_usage = int(mem_data_list[1] / mem_data_list[0] * 100)
                app_mem_usage = int(app_mem_data_list[0] / mem_data_list[0] * 100)
                if swap_data_list[0]:
                    swap_usage = int(swap_data_list[1]/swap_data_list[0] * 100)
                else:
                    swap_usage = 0

                fields_dict = {} # 记录数据
                for item in zip(*[mem_headers_list, mem_data_list]):
                    fields_dict[item[0]] = item[1]

                for item in zip(*[mem_headers_list[1:len(app_mem_data_list) + 1], app_mem_data_list]):
                    fields_dict['app_%s' % item[0]] = item[1]

                for item in zip(*[mem_headers_list[0:len(swap_data_list)], swap_data_list]):
                    fields_dict['swap_%s' % item[0]] = item[1]

                fields_dict['mem_usage'] = mem_usage
                fields_dict['app_mem_usage'] = app_mem_usage
                fields_dict['swap_usage'] = swap_usage

                sample_time = time.strptime(sample_data['sample_time'], '%Y-%m-%d %H:%M:%S')

                json_records_list = [{
                    "measurement": table,
                    "tags": {
                        "batchNo": running_batch_no
                    },
                    "fields":fields_dict
                }]
            elif sample_type == 'proc_mem':
                package = item['package']
                mem_total = sample_data[0]
                proc_mem_info = sample_data[1]['data']
                proc_mem_data_list = re.findall('(\w+):\s+(\d+)', proc_mem_info)

                fields_dict = {} # 记录数据
                for item in proc_mem_data_list:
                    fields_dict[item[0]] = int(item[1])

                fields_dict['proc_mem_usage'] = int(fields_dict['VmRSS'] / mem_total * 100)

                sample_time = time.strptime(sample_data[1]['sample_time'], '%Y-%m-%d %H:%M:%S')

                json_records_list = [{
                    "measurement": table,
                    "tags": {
                        "batchNo": running_batch_no,
                        "package": package
                    },
                    "fields":fields_dict
                }]
            elif sample_type == 'battery':
                proc_mem_info = sample_data['data']
                proc_mem_data_list = re.findall('(\w+):\s+(\d+)', proc_mem_info)

                fields_dict = {} # 记录数据
                for item in proc_mem_data_list:
                    fields_dict[item[0]] = int(item[1])

                sample_time = time.strptime(sample_data['sample_time'], '%Y-%m-%d %H:%M:%S')

                json_records_list = [{
                    "measurement": table,
                    "tags": {
                        "batchNo": running_batch_no
                    },
                    "fields":fields_dict
                }]
            elif sample_type == 'net':
                # 上次采集的数据
                net_dev_flow_data_lasttime = sample_data[0]
                # 本次采集的数据
                net_dev_flow_data_thistime = sample_data[1]

                # 上次采集数据时间
                sample_time = time.strptime(net_dev_flow_data_lasttime['sample_time'], '%Y-%m-%d %H:%M:%S')
                sample_time_lasttime = time.mktime(sample_time)

                # 本次采集时间
                sample_time = time.strptime(net_dev_flow_data_thistime['sample_time'], '%Y-%m-%d %H:%M:%S')
                sample_time_thistime = time.mktime(sample_time)

                time_difference = sample_time_thistime - sample_time_lasttime
                if time_difference == 0:
                    logger.warn('数据采集时间差太小，无法计算每秒网络数据传输信息,结束本次网络数据解析')
                    continue

                data_lasttime_list = net_dev_flow_data_lasttime['data'].strip().split('\n\n')[1:]

                net_dev, rx_headers, tx_headers = data_lasttime_list[0].split('|')
                rx_headers_list = re.findall('\w+', rx_headers)
                rx_headers_list = ['rx_%s' % item for item in rx_headers_list]
                tx_headers_list = re.findall('\w+', tx_headers)
                tx_headers_list = ['tx_%s' % item for item in tx_headers_list]
                headers_list = rx_headers_list + tx_headers_list

                data_lasttime_dict = {}
                for item in data_lasttime_list[1:]:
                    net_dev =  re.findall('([^\s]+):', item.strip())[0]
                    net_data_list = re.findall('\s+(\d+)', item)
                    net_data_list = [int(item) for item in net_data_list]
                    data_lasttime_dict[net_dev] = net_data_list

                data_thistime_list = net_dev_flow_data_thistime['data'].strip().split('\n\n')[1:]
                data_thistime_dict = {}
                for item in data_thistime_list[1:]:
                    net_dev =  re.findall('([^\s]+):', item)[0]
                    net_data_list = re.findall('\s+(\d+)', item.strip())
                    net_data_list = [int(item) for item in net_data_list]
                    data_thistime_dict[net_dev] = net_data_list

                headers_list = ['%s/s' % item for item in headers_list]

                json_records_list = [] # 存放 influxdb 记录数据
                for net_dev, net_data_list in data_thistime_dict.items():
                    temp_data_list = []
                    fields_dict = {} # 记录数据
                    for item in zip(*[net_data_list, data_lasttime_dict[net_dev]]):
                        temp_data_list.append(int((item[0] - item[1]) / time_difference))
                    for item in zip(*[headers_list, temp_data_list]):
                        fields_dict[item[0]] = item[1]
                    json_record = {
                        "measurement": table,
                        "tags": {
                            "batchNo": running_batch_no,
                            "interface": net_dev
                        },
                        "fields":fields_dict
                    }
                    json_records_list.append(json_record)
            elif sample_type == 'proc_net':
                package = item['package']
                # 上次采集的数据
                net_dev_flow_data_lasttime = sample_data[0]
                # 本次采集的数据
                net_dev_flow_data_thistime = sample_data[1]

                # 上次采集数据时间
                sample_time = time.strptime(net_dev_flow_data_lasttime['sample_time'], '%Y-%m-%d %H:%M:%S')
                sample_time_lasttime = time.mktime(sample_time)

                # 本次采集时间
                sample_time = time.strptime(net_dev_flow_data_thistime['sample_time'], '%Y-%m-%d %H:%M:%S')
                sample_time_thistime = time.mktime(sample_time)

                time_difference = sample_time_thistime - sample_time_lasttime
                if time_difference == 0:
                    logger.warn('数据采集时间差太小，无法计算每秒网络数据传输信息,结束本次网络数据解析')
                    continue

                data_lasttime_list = net_dev_flow_data_lasttime['data'].strip().split('\n\n')[1:]

                net_dev, rx_headers, tx_headers = data_lasttime_list[0].split('|')
                rx_headers_list = re.findall('\w+', rx_headers)
                rx_headers_list = ['rx_%s' % item for item in rx_headers_list]
                tx_headers_list = re.findall('\w+', tx_headers)
                tx_headers_list = ['tx_%s' % item for item in tx_headers_list]
                headers_list = rx_headers_list + tx_headers_list

                data_lasttime_dict = {}
                for item in data_lasttime_list[1:]:
                    net_dev =  re.findall('([^\s]+):', item)[0]
                    net_data_list = re.findall('\s+(\d+)', item.strip())
                    net_data_list = [int(item) for item in net_data_list]
                    data_lasttime_dict[net_dev] = net_data_list

                data_thistime_list = net_dev_flow_data_thistime['data'].strip().split('\n\n')[1:]

                data_thistime_dict = {}
                for item in data_thistime_list[1:]:
                    net_dev =  re.findall('([^\s]+):', item)[0]
                    net_data_list = re.findall('\s+(\d+)', item.strip())
                    net_data_list = [int(item) for item in net_data_list]
                    data_thistime_dict[net_dev] = net_data_list

                headers_list = ['%s/s' % item for item in headers_list]

                json_records_list = []
                for net_dev, net_data_list in data_thistime_dict.items():
                    temp_data_list = []
                    fields_dict = {} # 记录数据
                    for item in zip(*[net_data_list, data_lasttime_dict[net_dev]]):
                        temp_data_list.append(int((item[0] - item[1]) / time_difference))
                    for item in zip(*[headers_list, temp_data_list]):
                        fields_dict[item[0]] = item[1]

                    json_record = {
                        "measurement": table,
                        "tags": {
                            "batchNo": running_batch_no,
                            "package": package,
                            "interface": net_dev
                        },
                        "fields":fields_dict
                    }
                    json_records_list.append(json_record)
            elif sample_type == 'gfx':
                package = item['package']
                render_time = 0  # 帧渲染时间
                jank_count = 0   # 丢帧数
                vsync_overtime = 0
                fps = 0 # 帧率
                gfxinfo = sample_data['data']
                gfxinfo = re.findall('Profile data in ms:(.+)View hierarchy', gfxinfo, re.DOTALL) # ['\n\n\n\n\tcom.android.calculator2/com.android.calculator2.Calculator/android.view.ViewRootImpl@d88be7b (visibility=0)\n\n\tDraw\tPrepare\tProcess\tExecute\n\n\t125.29\t0.23\t22.77\t87.98\n\n\t0.01\t0.25\t13.99\t324.71\n\n\n\n']
                if gfxinfo:
                    gfxinfo =  gfxinfo[0].strip()
                    gfxinfo = re.findall('\n\n\t(.+)', gfxinfo, re.DOTALL) # ['Draw\tPrepare\tProcess\tExecute\n\n\t125.29\t0.23\t22.77\t87.98\n\n\t0.01\t0.25\t13.99\t324.71']

                    if gfxinfo:
                        gfxinfo = gfxinfo[0].strip()
                        gfxinfo = gfxinfo.split('\n\n\t')
                        if gfxinfo:
                            gfxinfo_headers = gfxinfo[0].split('\t') + ['jank', 'fps']

                            frame_count = len(gfxinfo[1:])
                            for item in gfxinfo[1:]:
                                frame_count += 1
                                value_list = item.split('\t')
                                value_list = [float(item) for item in value_list]
                                for value in value_list:
                                    render_time += value

                                if render_time > 16.67:
                                    jank_count += 1
                                    if render_time % 16.67 == 0:
                                        vsync_overtime += int(render_time / 16.67) - 1
                                    else:
                                        vsync_overtime += int(render_time / 16.67)
                            temp = frame_count + vsync_overtime
                            if temp != 0:
                                fps = int(frame_count * 60 / (frame_count + vsync_overtime))
                            else:
                                fps = 0
                else:
                    continue
                fields_dict = {'fps':fps, 'junk':jank_count} # 记录数据
                sample_time = time.strptime(sample_data['sample_time'], '%Y-%m-%d %H:%M:%S')
                json_records_list = [{
                    "measurement": table,
                    "tags": {
                        "batchNo": running_batch_no,
                        "package": package
                    },
                    "fields":fields_dict
                }]
            else:
                continue

            second_for_sampletime_utc = int(time.mktime(sample_time)) - 8 * 3600 # UTC时间（秒）
            timetuple = time.localtime(second_for_sampletime_utc)
            date_for_data = time.strftime('%Y-%m-%d', timetuple)
            time_for_data = time.strftime('%H:%M:%S', timetuple)
            datetime_for_data = '%sT%sZ' % (date_for_data, time_for_data)
            for json_record in json_records_list:
                json_record['time'] = datetime_for_data

            if json_records_list:
                result = self.influx_db_client.write_points(json_records_list, database=database)
                if not result:
                    logger.error('采集性能数据失败-往InfluxDB：%s 写入数据失败' % database)
                else:
                    logger.info('往数据库:%s 成功写入数据' % database)
            else:
                logger.error('获取influxdb json数据为空')


    def run(self):
        '''运行'''

        TIME_CONSTANT = 99999999999999 # 常量值
        timetuple = time.localtime()
        second_for_localtime = int(time.mktime(timetuple))
        sample_time = time.strftime('%Y-%m-%d %H:%M:%S', timetuple)
        running_batch_no = '%s-%s' % (str(TIME_CONSTANT - second_for_localtime), sample_time)

        thread_group_list = [] # 存放开启的采集数据线程
        for target in self.target_list:
            thread = threading.Thread(target=self.sample_data, args=(target, self.interval, self.count))
            thread.start()
            thread_group_list.append(thread)


        active_thread_group_list1 = [t for t in thread_group_list if t.is_alive()] # 获取监控性采集线程组中还活着的线程
        active_thread_group_list2 = [] # 存储解析数据线程组中还活着的线程

        while active_thread_group_list1: # 还有存活的数据采集线程
            active_thread_group_list2 = [t for t in active_thread_group_list2 if t.is_alive()]
            thread_num_to_new = len(active_thread_group_list1) - len(active_thread_group_list2)

            # 根据差值动态创建线程
            for i in range(0, thread_num_to_new):
                thread = threading.Thread(target=self.parse_data,
                                          args=(running_batch_no,))
                thread.start()
                active_thread_group_list2.append(thread)

            active_thread_group_list1 = [t for t in thread_group_list if t.is_alive()]

        else:
            while self.data_queue: # 队列还有数据
                active_thread_group_list2 = [t for t in active_thread_group_list2 if t.is_alive()]
                thread_num_to_new = len(thread_group_list) - len(active_thread_group_list2)

                # 根据差值动态创建线程
                for i in range(0, thread_num_to_new):
                    thread = threading.Thread(target=self.parse_data,
                                              args=(running_batch_no,))
                    thread.start()
                    active_thread_group_list2.append(thread)
        self.influx_db_client.close()



