# -*- coding: utf-8 -*-
import requests
import datetime
import json
import time
import multiprocessing
import MySQLdb
import urllib2
import ssl
import gevent
import redis
from gevent import monkey

from elasticsearch import Elasticsearch, helpers

ssl._create_default_https_context = ssl._create_unverified_context

# 连接redis
r = redis.Redis(host='127.0.0.1', port=6379, decode_responses=True)

es = Elasticsearch(['10.201.241.41', '10.201.241.42', '10.201.241.43'], sniff_on_start=True, sniff_on_connection_fail=True, max_retries=3, retry_on_timeout=True)


#访问api3
class ReauestApi3(object):

    def __init__(self):
        # 生产url
        self.url = 'https://cmdbapi.bl.com'
        # 开发url
        #self.url = 'http://127.0.0.1:7008'

    def uptoken(self):
        url = '%s/auth/token/' % self.url
        post_data = {
            "client_id":"tauy8TddnzsrHZUFdvHXWw7jWI8ipUCOaV7QolNj",
            "client_secret":"PXTtRuCa4P3Josrav0jiKzWBQsfrRprITMR2w7fMQUXSmKbKqOBIXdKmdZpjPkJFaE0nGJ0cuLNQP5c2dYVOnwXg5qTN09Ten1226UVAAtscAS6XRoYGg38TWyNCUq7W",
            "grant_type":"password",
            "username":"huizhe.wang@bl.com",
            "password":"wang+123"
        }
        url_request = urllib2.Request(url, data=json.dumps(post_data), headers={"Content-Type":"application/json"})
        url_open = urllib2.urlopen(url_request)
        url_info = eval(url_open.read())
        r.set('cmdb_token', 'Bearer %s' % str(url_info['access_token']))
        r.expire('cmdb_token', 1800)
        return 'Bearer %s' % str(url_info['access_token'])

    def put(self, url_suffix, data):
        token = r.get('cmdb_token')
        if not token:
            token = self.uptoken()
            url = '%s/%s' % (self.url, url_suffix)
            url_request = urllib2.Request(url, data=json.dumps(data), headers={"Authorization":"%s" % str(token), "Content-Type":"application/json"})
            try:
                url_open = urllib2.urlopen(url_request)
                url_info = json.loads(url_open.read())
                status = 1
            except Exception as e:
                url_info = str(e)
                status = 0
            return {'put':url_info, 'status':status}



# 获得cpu, memory
class GetCpuMem:

    def __init__(self, env, appname, appid, times):
        self.env = env
        self.appname = appname
        self.appid = appid
        self.times = times
        self.ips = self.getips()
        self.start_time, self.end_time = self.get_time()

    # 获得应用所有节点
    def getips(self):
        templist = []
        sql = "select bus_ip from app_node where db_status=1 and app_basis_an_id=%s" % self.appid
        data = exec_sql(sql)
        if data:
            for i in data:
                templist.append(i[0])
        return templist

    # 获得对应的字段的url
    def geturl(self, urlstr):
        url_dict = {
            '生产环境': 'http://prometheus3.blops.com:9090/api/v1/query_range?query=',
            '仿真环境': 'http://prometheus2.blops.com:9090/api/v1/query_range?query='
        }
        url = "%s%s&start=%s&end=%s&step=1m" % (url_dict[str(self.env)], urlstr, self.start_time, self.end_time)
        return url

    # 获得开始和结束时间
    def get_time(self):
        end_time = time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(self.times)) + '.000Z'
        start_time = time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(self.times - 300)) + '.000Z'
        return start_time, end_time

    # 获得查询条件
    def get_query_str(self, ip, field):
        avg_str, max_str = '', ''
        if field == 'cpu':
            avg_str = '100 - avg_over_time(cpu_usage_idle{instance="%s", cpu="cpu-total"}[5s])' % ip
            max_str = '100 - min_over_time(cpu_usage_idle{instance="%s", cpu="cpu-total"}[5s])' % ip
        elif field == 'mem':
            max_str = 'max_over_time(mem_used_percent{instance="%s"}[5s])' % ip
            avg_str = 'avg_over_time(mem_used_percent{instance="%s"}[5s])' % ip
        return max_str, avg_str

    # 通过url获得数据
    def get_data(self, url):
        total = 0.0
        count = 0
	res = ''
	while res == '':
            try:
                res = requests.get(url)
                if res.status_code == 200:
                    data = res.json()
            	    if 'status' in data.keys() and data['status'] == 'success':
                	if data['data']['result'] and data['data']['result'][0]['values']:
                    	    for i in data['data']['result'][0]['values']:
                                count = count + 1
                                total = total + float(i[1])
	    except Exception as e:
		print '访问太多需要等一会儿'
		print 'sleep 3'
		time.sleep(3)
		continue
        if count == 0:
            return total
        else:
            return total / count

    # 获得每个ip的数据
    def get_prometheus_data(self, ip, field):
        value_max, value_avg = 0, 0
        max_str, avg_str = self.get_query_str(ip, field)
        max_url = self.geturl(max_str)
        avg_url = self.geturl(avg_str)
        value_max = self.get_data(max_url)
        value_avg = self.get_data(avg_url)
        return value_max, value_avg

    # 获得字段聚合后的数据
    def getfielddata(self, field):
        max_value, avg_value = 0, 0
        maxlist = []
        avglist = []
        if len(self.ips) > 0:
            for ip in self.ips:
                amax, aavg = self.get_prometheus_data(ip, field)
                maxlist.append(amax)
                avglist.append(aavg)
            if maxlist:
                max_value = round(sum(maxlist)/len(maxlist), 3)
            if avglist:
                avg_value = round(sum(avglist)/len(avglist), 3)
        return max_value, avg_value


# 获得TPS
class GetTps:

    def __init__(self, appname, times):
        self.appname = appname
        self.times = times
        self.requestapi3 = ReauestApi3()

    def gen_dates(self, b_date, days):
        day = datetime.timedelta(days=1)
        for i in range(days):
            yield b_date + day * i

    def get_time(self):
        end_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.times))
        start_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.times - 300))
        return start_time, end_time

    def formatdata(self):
        times = 5
        st, et = self.get_time()
        templist = []
        qps_templist = []
        tpsmax, tpsavg, qpsmax, qpsavg = 0, 0, 0, 0
        data = {
            "app_name": self.appname,
            "conditions": {"query": [{"operator": "AND", "field": "response", "request_conditions": "<400"}],
                           "start_time": st, "end_time": et, },
            "size": 0,
            "aggs_field": "response",
            "query_type": [],
            "query_tag": "ngx_tag",
            'aggs_dict': {
                'data': {
                    "date_histogram": {
                        "field": "timestamp",
                        "interval": "5s",
                        "time_zone": "Asia/Shanghai",
                        "min_doc_count": 1,
                    }
                }
            },
        }
        qps_data = {
            "app_name": self.appname,
            "conditions": {"query": [], "start_time": st, "end_time": et, },
            "size": 0,
            "aggs_field": "response",
            "query_type": [],
            "query_tag": "ngx_tag",
            'aggs_dict': {
                'data': {
                    "date_histogram": {
                        "field": "timestamp",
                        "interval": "5s",
                        "time_zone": "Asia/Shanghai",
                        "min_doc_count": 1,
                    }
                }
            },
        }
        res = self.requestapi3.put('monitoring/esquery/', data)
        qps_res = self.requestapi3.put('monitoring/esquery/', qps_data)

        if res:
            if res['status'] == 1 and res['put']['status'] == 1:
                if 'param' in res['put'].keys() and res['put']['param']:
                    param = res['put']['param']
                    if 'aggregations' in param.keys() and param['aggregations']:
                        if 'data' in param['aggregations'].keys() and param['aggregations']['data']:
                            data = param['aggregations']['data']
                            if 'buckets' in data.keys() and data['buckets']:
                                response_list = data['buckets']
                                for n in response_list:
                                    templist.append(n['doc_count'])
        if qps_res:
            if qps_res['status'] == 1 and qps_res['put']['status'] == 1:
                if 'param' in qps_res['put'].keys() and qps_res['put']['param']:
                    param = qps_res['put']['param']
                    if 'aggregations' in param.keys() and param['aggregations']:
                        if 'data' in param['aggregations'].keys() and param['aggregations']['data']:
                            data = param['aggregations']['data']
                            if 'buckets' in data.keys() and data['buckets']:
                                response_list = data['buckets']
                                for n in response_list:
                                    qps_templist.append(n['doc_count'])
        if templist:
            tpsmax = round(max(templist)/times, 3)
            tpsavg = round(sum(templist)/len(templist)/times, 3)
        if qps_templist:
            qpsmax = round(max(qps_templist)/times, 3)
            qpsavg = round(sum(qps_templist)/len(qps_templist)/times, 3)
        return tpsmax, tpsavg, qpsmax, qpsavg


def get_prefive_time(times):
    startime = times + " 00:00:00"
    endtime = times + " 23:59:59"
    st = int(time.mktime(time.strptime(startime, '%Y-%m-%d %H:%M:%S')))
    et = int(time.mktime(time.strptime(endtime, '%Y-%m-%d %H:%M:%S')))

    return range(st, et, 300)


# 获得ElasticSearch 服务
def clusterhealth():
    es_server = [
        'http://10.201.241.41:9200',
        'http://10.201.241.42:9200',
        'http://10.201.241.43:9200'
    ]
    endpoint = '/_cluster/health'
    for es in es_server:
        url = es + endpoint
        try:
            res = requests.get(url)
            jsondata = res.json()
            if jsondata['status'] == 'green' or jsondata['status'] == 'yellow':
                return es
        except Exception as e:
            print e
            continue


# 写入ElasticSearch
def post_data(data, dates=None):
    elasticIndex = 'blbasedata-' + format(datetime.datetime.now(), '%Y-%m-%d')
    if dates is not None:
        elasticIndex = 'blbasedata-' + dates

    adict = [{"_index": elasticIndex , "_type": "message", "_source": data}]

    try:
        helpers.bulk(es, adict)
        return True
    except Exception as e:
        print("try again")
        return 0


    # if dates is not None:
    #     elasticIndex = 'blbasedata-' + dates
    # server = clusterhealth()
    # if server:
    #     url_parameters = {'cluster': server, 'index': elasticIndex}
    #     url = "%(cluster)s/%(index)s/message" % url_parameters
    #     headers = {'content-type': 'application/json'}
    #     try:
    #         req = requests.post(url, headers=headers, data=json.dumps(data), timeout=20)
    #         if req:
    #             return req.status_code
    #     except Exception as e:
    #         return e
    # else:
    #     return


def write_data(appname, appid, times):
    res = GetCpuMem('生产环境', appname, appid, times)
    cpumax, cpuavg = res.getfielddata('cpu')
    memorymax, memoryavg = res.getfielddata('mem')
    nodenum = len(res.ips)
    tps = GetTps(appname, times)
    tpsmax, tpsavg, qpsmax, qpsavg = tps.formatdata()
    tempdict = {
        'appname': appname,
        'tpsmax': tpsmax,
        'tpsavg': tpsavg,
        'cpumax': cpumax,
        'cpuavg': cpuavg,
        'memorymax': memorymax,
        'memoryavg': memoryavg,
        'nodenum': nodenum,
        'qpsmax': qpsmax,
        'qpsavg': qpsavg,
        '@timestamp': time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(times-(8*3600))) + '.000'
    }
    dates = time.strftime('%Y-%m-%d', time.localtime(times))
    res = post_data(tempdict, dates=dates)
    return res


# 使用多进程来获得各工程数据
def get_appname_data(applist, times):
    results = []
    # pre_pool = multiprocessing.Pool(100)
    n = 0
    for appid, appname in applist:
        n = n + 1
        print u'----工程--%s %s' % (n, appname)
        # res = pre_pool.apply_async(write_data, args=(appname, appid, times))
	res = write_data(appname, appid, times)
        results.append(res)
    # pre_pool.close()
    # pre_pool.join()

    return results

# 执行sql
def exec_sql(sql):
    templist = []
    db = MySQLdb.connect('10.201.241.17','blcmdb', 'B|9l8C7m6D5|b', 'blcmdb', charset='utf8')
    cursor = db.cursor()
    try:
        cursor.execute(sql)
        data = cursor.fetchall()
        if data:
            for i in data:
                templist.append(i)
    except Exception as e:
        print e
        return None
    else:
       db.close()
       return templist

# 获得工程列表
def get_appid_appname_list():
    sql = "select id, app_name from app_basis where db_status=1 and environment='生产环境'"
    data = exec_sql(sql)
    return data


if __name__ == '__main__':
    b = []
    a = ['2017-12-04', '2017-12-05', '2017-12-06', '2017-12-07', '2017-12-08', '2017-12-09', '2017-12-10', '2017-12-11', '2017-12-12', '2017-12-13', '2017-12-14', '2017-12-15', '2017-12-16', '2017-12-17', '2017-12-18', '2017-12-19', '2017-12-20', '2017-12-21', '2017-12-22', '2017-12-23', '2017-12-24', '2017-12-25', '2017-12-26', '2017-12-27', '2017-12-28', '2017-12-29', '2017-12-30', '2017-12-31', '2018-01-01', '2018-01-02', '2018-01-03', '2018-01-04', '2018-01-05', '2018-01-06', '2018-01-07', '2018-01-08', '2018-01-09', '2018-01-10', '2018-01-11', '2018-01-12', '2018-01-13', '2018-01-14', '2018-01-15', '2018-01-16', '2018-01-17', '2018-01-18', '2018-01-19', '2018-01-20', '2018-01-21', '2018-01-22', '2018-01-23', '2018-01-24', '2018-01-25', '2018-01-26', '2018-01-27', '2018-01-28', '2018-01-29', '2018-01-30', '2018-01-31', '2018-02-01', '2018-02-02', '2018-02-03', '2018-02-04', '2018-02-05', '2018-02-06', '2018-02-07', '2018-02-08', '2018-02-09', '2018-02-10', '2018-02-11', '2018-02-12', '2018-02-13', '2018-02-14', '2018-02-15', '2018-02-16', '2018-02-17', '2018-02-18', '2018-02-19', '2018-02-20', '2018-02-21', '2018-02-22', '2018-02-23', '2018-02-24', '2018-02-25', '2018-02-26', '2018-02-27', '2018-02-28', '2018-03-01', '2018-03-02', '2018-03-03', '2018-03-04', '2018-03-05', '2018-03-06', '2018-03-07', '2018-03-08', '2018-03-09', '2018-03-10', '2018-03-11', '2018-03-12', '2018-03-13']
    applist = get_appid_appname_list()

    for d in a[60:80]:
        print u'开始------%s------' % d
        times = get_prefive_time(d)
        for i in times:
            res = get_appname_data(applist, i)
            b.append(res)
    print '--------------%s end--------------' % d
    print '===================='
