import redis
import sys
import time
from elasticsearch import Elasticsearch
import logging

# 定义redis连接
r = redis.ConnectionPool(host='192.168.0.67', port=6379, db=0, decode_responses=True)

# ES 连接
_es = Elasticsearch(["192.168.1.78:9200"],
                    http_auth=('elastic', '1CHYC5IO6TrgZM66kyOw'),
                    # sniff before doing anything
                    sniff_on_start=True,
                    # refresh nodes after a node fails to respond
                    sniff_on_connection_fail=True,
                    # and also every 60 seconds
                    sniffer_timeout=60)


def es_query_sum():
    es_search_body = {
  "aggs": {
    "2": {
      "terms": {
        "field": "api_url.keyword",
        "size": 100,
        "order": {
          "1": "desc"
        }
      },
      "aggs": {
        "1": {
          "avg": {
            "field": "request_time"
          }
        },
        "3": {
          "terms": {
            "field": "httphost.keyword",
            "size": 100,
            "order": {
              "1": "desc"
            }
          },
          "aggs": {
            "1": {
              "avg": {
                "field": "request_time"
              }
            }
          }
        }
      }
    }
  },
  "size": 0,
  "_source": {
    "excludes": []
  },
  "stored_fields": [
    "*"
  ],
  "script_fields": {},
  "docvalue_fields": [
    {
      "field": "@timestamp",
      "format": "date_time"
    },
    {
      "field": "timestamp",
      "format": "date_time"
    }
  ],
  "query": {
    "bool": {
      "must": [
        {
          "match_all": {}
        },
        {
          "match_all": {}
        },
        {
          "range": {
            "timestamp": {
              "gte": 1675180800000,
              "lte": 1677599999999,
              "format": "epoch_millis"
            }
          }
        }
      ],
      "filter": [],
      "should": [],
      "must_not": []
    }
  }
}
    # 执行es查询
    data = _es.search(index=f'otosaas_qlllog-*', body=es_search_body)
    print(data)
    # print(data)
    # print(data['hits']['total'])
    # print(data['aggregations']['3']['buckets']['400.0-599.0']['5']['buckets'])
    # print(data['aggregations']['3']['buckets']['200.0-599.0']['5']['buckets'])
    # try:
    #     for key, value in data['aggregations']['3']['buckets']['200.0-599.0']['5']['buckets'].items():
    #         if '400' in key:
    #             count_4xx = value['doc_count']
    #         else:
    #             continue
    #         # elif '400' in key:
    #         #     count_4xx = value['doc_count']
    #         # else:
    #         #     break
    # except:
    #     logging.info('返回值报错')
    # a = redis.Redis(connection_pool=r)
    # a.set('ELKRequestStatus-{}-4xx'.format(app_code), count_4xx)
    # return data['hits']['total'] ,data['aggregations']['UV']['value']


def get_fetch_slow_from_redis(app_code, status_level):
    '''
        根据 appCode 、status_level 获取相应的redis数据
        2020-07-13
    '''
    a = redis.Redis(connection_pool=r)
    if status_level == '3xx':
        resp = a.get('ELKRequestStatus-{}-3xx'.format(app_code)) or 0
    elif status_level == '4xx':
        resp = a.get('ELKRequestStatus-{}-4xx'.format(app_code)) or 0
    elif status_level == '5xx':
        resp = a.get('ELKRequestStatus-{}-5xx'.format(app_code)) or 0
    else:
        resp = 0
    return resp

    self.rdb.set('ELKRequestStatus-{}-4xx'.format(app_code), count_4xx)
    # self.rdb.set('ELKRequestStatus-{}-5xx'.format(app_code), count_5xx)


if __name__ == "__main__":
    app_code = 'cxa'
    status_level = '4xx'
    today_date = '2023.02.06'
    # start_ = 1677044709929
    # end_ = 1677045609929
    es_query_sum()
    count = get_fetch_slow_from_redis(app_code, status_level)
    print(count)
