#!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
import time
import json
import os
import csv
import shutil
from CacheRankUtils import CacheRankUtils
import datetime

# wiki： http://sla.kibana.intra.weibo.cn/goto/69488a96a4d15b4f6d6734a52a38495e

CACHES_RANK = 20  # 安卓取前20位
TIME_INTERVAL = 3  # 请求间隔
LOG_INTERVAL = 1800  # 策略 间隔半小时的数据
DETAIL_SIZE = 1000  # 一次拉取的条数
LOG_COUNT = (3600 * 24 / LOG_INTERVAL * DETAIL_SIZE)  # 日志的条数 单位秒

Android_RESULT_DIR_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "android_result")
Android_CSV_FILE_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "android_result.csv")
# FIRST_REQUEST_START_TIME = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0).strftime("%Y-%m-%d-%H-%M") #第一次请求开始的时间
# FIRST_REQUEST_END_TIME = time.strftime('%Y-%m-%d-%H-%M', time.localtime(time.mktime(time.strptime(FIRST_REQUEST_START_TIME, "%Y-%m-%d-%H-%M")) + LOG_INTERVAL)) # 第一次请求结束的时间

FIRST_REQUEST_START_TIME = "2023-09-04-00-00"
FIRST_REQUEST_END_TIME = "2023-09-04-00-30"

Timestamp = f"{FIRST_REQUEST_START_TIME}~{FIRST_REQUEST_END_TIME}"
MailTitle = ""


class CacheRankAndroid:

    def make_request(self):
        global Timestamp
        global MailTitle

        url = "http://esrest.sinaops.intra.weibo.cn/maiops/getAggData.do"
        headers = {
            "Content-Type": "application/json"
        }
        requestparams = {
            "detailSize": DETAIL_SIZE,
            "indexName": "logstash-webview-",
            "agg_key": "from",
            "timestamp": Timestamp,
            "timeFormat": "day",
            "isDetail": "yes",
            "fields": {
                "programname": "mweibo_client_cache_analyse",
                "subtype": "cache_size_rank",
                "query_string": "size_total_weibo:>1000000000 AND size_total_weibo:<1500000000",
                "agent_os_type": "android"
            },
            "fields_or": {
                "query_string": ["agent_weibo_version:>13.8.2"]
            },
        }
        try:
            response = requests.post(url, headers=headers, json=requestparams)
            response.raise_for_status()

            # Process the response data here
            json_data = response.json()
            os.makedirs(Android_RESULT_DIR_PATH, exist_ok=True)
            filename = Timestamp
            filepath = os.path.join(Android_RESULT_DIR_PATH, filename)
            with open(filepath, "w") as f:
                json.dump(json_data, f)
            print(f"拉取了{DETAIL_SIZE}条数据 保存在:", filepath)
            Timestamp = CacheRankAndroid.add_one_hour_to_timestamp()
            requestparams["timestamp"] = Timestamp
        except requests.exceptions.RequestException as e:
            print("Error making request:", e)

    @staticmethod
    def add_one_hour_to_timestamp():
        start_time, end_time = Timestamp.split("~")
        start_time = time.strptime(start_time, "%Y-%m-%d-%H-%M")
        end_time = time.strptime(end_time, "%Y-%m-%d-%H-%M")

        new_start_time = time.mktime(start_time) + LOG_INTERVAL
        new_end_time = time.mktime(end_time) + LOG_INTERVAL

        new_timestamp = f"{time.strftime('%Y-%m-%d-%H-%M', time.localtime(new_start_time))}~{time.strftime('%Y-%m-%d-%H-%M', time.localtime(new_end_time))}"
        return new_timestamp

    def parsefiles(self, directory):
        global MailTitle
        json_dict = {}
        for filename in os.listdir(directory):
            file_path = os.path.join(directory, filename)
            # 仅处理JSON文件
            if os.path.isfile(file_path):
                with open(file_path, 'r') as file:
                    # 读取JSON文件内容
                    json_data = json.load(file)
                    # 将JSON数据添加到字典中
                    json_dict[filename] = json_data
        # 处理结果
        ok = 0
        nook = 0
        totalsize = 0
        allcaches = {}
        for filename, json_data in json_dict.items():
            print("正在处理-Android数据-:", filename)
            jsonarray = json_data["hits"]["hits"]
            for onejson in jsonarray:
                info_system_after = onejson["_source"]["info_system_after"]
                # 由于历史原因可能存在字符串和数组数据 进版的时间不同导致
                if isinstance(info_system_after, str):
                    info_system_after = json.loads(info_system_after)  # 转成数组

                if info_system_after is not None and isinstance(info_system_after, list):
                    ok += 1
                else:
                    print("Android获取数据失败：info_system_after未空或者数据类型错误 应该为有值字符串")
                    nook += 1
                    continue
                for oneline in info_system_after:
                    path = oneline["path"]
                    if not isinstance(path, str):
                        nook += 1
                        continue
                    size = oneline["size"]
                    if path in allcaches:
                        lastsize = allcaches[path]
                        size += lastsize
                    allcaches[path] = size

                _source = onejson["_source"]
                if "size_total_weibo" in _source:
                    size_total_weibo = int(_source["size_total_weibo"])
                elif "size_total.weibo" in _source:
                    size_total_weibo = int(_source["size_total.weibo"])
                else:
                    print("数据错误 - 没有微博占用大小字段")
                    nook += 1
                    continue
                if size_total_weibo != 0:
                    totalsize += size_total_weibo

        if ok == 0:
            print("===========没有有效数据================")
            return
        averageweibosize = totalsize / ok
        averageweibosizefloat = round(averageweibosize / 1000000, 2)

        ranknum = 1
        ranktotalsize = 0
        rankratio = 0.00
        # 降序
        sorted_items = sorted(allcaches.items(), key=lambda x: x[1], reverse=True)
        with open(Android_CSV_FILE_PATH, 'w', newline='') as csvfile:
            writer = csv.writer(csvfile)
            # 写入标题行
            writer.writerow(["路径", "大小:MB", "占比:%", "业务"])

            for key, value in sorted_items:
                if CACHES_RANK == ranknum:
                    break
                if CacheRankAndroid.ignorpath(key):
                    continue
                if "-FILES" in key:
                    continue
                averagevalue = value / ok
                size = round(averagevalue / 1000000, 2)
                ratio = round(averagevalue / averageweibosize * 100, 2)
                if ratio == 0:
                    continue
                ranknum += 1
                ranktotalsize += averagevalue
                rankratio += ratio
                writer.writerow([key, size, ratio, CacheRankUtils.androidbussines(key)])
                print(f"{key} {size}MB {ratio}%")
        print("CSV文件已生成:", Android_CSV_FILE_PATH)
        rankratio = round(rankratio, 2)
        ranktotalsizefloat = round(ranktotalsize / 1000000, 2)
        MailTitle = f"一共处理了{ok + nook}份日志 有效日志{ok}份 无效日志{nook}份 排名文件总大小:{ranktotalsizefloat}MB 微博占用均值:{averageweibosizefloat}MB 排名文件/微博占用:{rankratio}%"
        print(MailTitle)

    @staticmethod
    def ignorpath(key):
        return False

    def fetch(self, isrequest=True):
        if isrequest:
            if os.path.exists(Android_RESULT_DIR_PATH):
                shutil.rmtree(Android_RESULT_DIR_PATH)
            if os.path.exists(Android_CSV_FILE_PATH):
                os.remove(Android_CSV_FILE_PATH)

        times = 0
        while times < round(LOG_COUNT / DETAIL_SIZE, 0):
            self.make_request()
            times += 1
            time.sleep(5)
        print(f"android端获取数据结束 一共请求了{times}次")

        self.parsefiles(Android_RESULT_DIR_PATH)
        start_time, end_time = Timestamp.split("~")
        dutaion = f"{FIRST_REQUEST_START_TIME}~{end_time}"
        info = {"mail_title": MailTitle, "log_interval": LOG_INTERVAL, "log_count": LOG_COUNT, "duration": dutaion, "csv": Android_CSV_FILE_PATH, "result": Android_RESULT_DIR_PATH}
        return info


if __name__ == "__main__":
    if os.path.exists(Android_RESULT_DIR_PATH):
        CacheRankAndroid().parsefiles(Android_RESULT_DIR_PATH)

