import requests
import json
import time
import datetime

headers = {
    'Accept': 'application/json, text/plain, */*',
    'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
    'Content-Type': 'application/json;charset=UTF-8',
    'Origin': 'http://139.198.164.54:30005',
    'Referer': 'http://139.198.164.54:30005/trace',
    'Connection': 'keep-alive',
    # 'Proxy-Connection': 'keep-alive',  # 坑！
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.46'
}

def save_json(file_name, data):

    save_path = '../dataset/' + file_name

    with open(save_path, 'w') as file:
        json.dump(data, file)
    print("保存成功！文件名为：" + file_name)


def load_json(file_path):
    assert file_path.split('.')[-1] == 'json'
    with open(file_path, 'r') as file:
        data = json.load(file)
    return data


def trace_filter(trace):
    if 'Mysql' in trace['endpointNames'][0] or 'HikariCP' in trace['endpointNames'][0]:
        return False
    return True

def remove_seconds(timestamp):
    t_str = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H%M')
    t = datetime.datetime.strptime(t_str, '%Y-%m-%d %H%M').timestamp()
    return t


def print_and_get_trace_id_set(host, timestamp_start, timestamp_end, timestep=3):

    trace_id_set = set()
    total_trace_num = 0

    url = "http://" + host + "/graphql"
    queryStr = '''query queryTraces($condition: TraceQueryCondition) {
          data: queryBasicTraces(condition: $condition) {
            traces {
              key: segmentId
              endpointNames
              duration
              start
              isError
              traceIds
            }
            total
          }}'''

    time_start_utc = datetime.datetime.utcfromtimestamp(timestamp_start)
    time_end_utc = datetime.datetime.utcfromtimestamp(timestamp_end)
    t = time_start_utc
    delta = datetime.timedelta(minutes=timestep)

    while t < time_end_utc:
        tend = t + delta
        if tend > time_end_utc:
            tend = time_end_utc

        # 构造参数
        time_start = t.strftime('%Y-%m-%d %H%M')
        time_end = tend.strftime('%Y-%m-%d %H%M')
        data = {
            "query": queryStr,
            "variables": {
                "condition": {
                    "queryDuration": {
                        "start": time_start,
                        "end": time_end,
                        "step": "MINUTE"
                    },
                    "traceState": "ALL",
                    "paging": {
                        "pageNum": 1,
                        "pageSize": 10000,
                        "needTotal": True
                    },
                    "queryOrder": "BY_START_TIME"
                }
            }
        }
        res = requests.request('POST', url, headers=headers, data=json.dumps(data))
        trace_num = res.json()['data']['data']['total']

        # 筛选trace
        traces = res.json()['data']['data']['traces']
        num = 0
        for trace in traces:
            if trace['traceIds'][0] not in trace_id_set and trace_filter(trace):
                trace_id_set.add(trace['traceIds'][0])
                num += 1

        print("时间范围：", time_start, "-", time_end, "内，共有{}条trace，合适的有{}条".format(trace_num, num))
        total_trace_num += trace_num

        t = tend + datetime.timedelta(minutes=1)

    time_start = datetime.datetime.fromtimestamp(timestamp_start).strftime('%Y-%m-%d_%H%M')
    time_end = datetime.datetime.fromtimestamp(timestamp_end).strftime('%Y-%m-%d_%H%M')
    print("总：时间范围：", time_start, "-", time_end, "内，共有{}条trace，合适的有{}条".format(total_trace_num, len(trace_id_set)))

    return trace_id_set


def collect_traces_all(host, timestamp_start, timestamp_end):
    '''
    采集[timestamp_start, timestamp_end]内所有合适的trace，保存为json文件。时间为精确到分钟，不会涉及秒级
    :param host: ip:port
    :param timestamp_start: 开始时间戳（本地时间，非utc）
    :param timestamp_end: 结束时间戳（本地时间，非utc）
    :return:
    '''

    collect_time_start = time.time()

    # 去除秒针数据
    timestamp_start = remove_seconds(timestamp_start)
    timestamp_end = remove_seconds(timestamp_end)

    # 由于es最多只能返回最近的10000条数据，因此一下子查询整个时间范围内的数据可能会超出10000条导致丢失数据，因此每隔3分钟查一次
    # 这里的timestep(3)可以根据trace产生频率适当调节，如果频率不高，那么可以适当调大，如果频率很高，为了不丢失数据，就要调低一点
    trace_id_set = print_and_get_trace_id_set(host, timestamp_start, timestamp_end, 1)

    # 记录时间
    time_start_utc = datetime.datetime.utcfromtimestamp(timestamp_start).strftime('%Y-%m-%d %H%M')
    time_end_utc = datetime.datetime.utcfromtimestamp(timestamp_end).strftime('%Y-%m-%d %H%M')
    time_start = datetime.datetime.fromtimestamp(timestamp_start).strftime('%Y-%m-%d %H%M')
    time_end = datetime.datetime.fromtimestamp(timestamp_end).strftime('%Y-%m-%d %H%M')

    # 准备json数据结构
    traces = {
        'traces': [],
        'time_start_utc': time_start_utc,
        'time_end_utc': time_end_utc,
        'time_start': time_start,
        'time_end': time_end,
        'num': len(trace_id_set)
    }

    # 逐个采集trace
    idx = 0
    for trace_id in trace_id_set:
        idx += 1
        if idx % 100 == 0:
            print("已经采集了{}条".format(idx))

        # print("正在采集{}".format(trace_id))
        spans = collect_trace(host, trace_id)
        trace = {
            'traceId': trace_id,
            'spans': spans
        }
        traces['traces'].append(trace)

    collect_time_end = time.time()
    print("采集完毕，一共{}条trace".format(len(trace_id_set)))
    print("采集一共花费了{}秒".format(int(collect_time_end - collect_time_start)))

    # 保存json
    time_start = datetime.datetime.fromtimestamp(timestamp_start).strftime('%Y-%m-%d_%H%M')
    time_end = datetime.datetime.fromtimestamp(timestamp_end).strftime('%Y-%m-%d_%H%M')
    time_now = datetime.datetime.fromtimestamp(collect_time_end).strftime('%Y-%m-%d_%H%M')
    file_name = 'traces' + '__' + time_start + '__' + time_end + '__' + time_now + '.json'
    save_json(file_name, traces)


def collect_traces(host, trace_id, timestamp_start, timestamp_end, pageNum=1, pageSize=5000):
    url = "http://" + host + "/graphql"

    # 构造参数
    queryStr = '''query queryTraces($condition: TraceQueryCondition) {
  data: queryBasicTraces(condition: $condition) {
    traces {
      key: segmentId
      endpointNames
      duration
      start
      isError
      traceIds
    }
    total
  }}'''
    time_start = datetime.datetime.utcfromtimestamp(timestamp_start).strftime('%Y-%m-%d %H%M')
    time_end = datetime.datetime.utcfromtimestamp(timestamp_end).strftime('%Y-%m-%d %H%M')

    data = {
        "query": queryStr,
        "variables": {
            "condition": {
                "queryDuration": {
                    "start": time_start,
                    "end": time_end,
                    "step": "MINUTE"
                },
                "traceState": "ALL",
                "paging": {
                    "pageNum": pageNum,
                    "pageSize": pageSize,
                    "needTotal": True
                },
                "queryOrder": "BY_START_TIME",
                "traceId": trace_id
            }
        }
    }
    res = requests.request('POST', url, headers=headers, data=json.dumps(data))
    print("response返回结果：", res)


def collect_trace(host, trace_id):
    '''
    根据trace_id拿到对应trace下所有的span数据
    :param host: ip:port
    :param trace_id:
    :return:
    '''

    url = "http://" + host + "/graphql"

    # 构造参数
    queryStr = '''query queryTrace($traceId: ID!) {
  trace: queryTrace(traceId: $traceId) {
    spans {
      traceId
      segmentId
      spanId
      parentSpanId
      refs {
        traceId
        parentSegmentId
        parentSpanId
        type
      }
      serviceCode
      serviceInstanceName
      startTime
      endTime
      endpointName
      type
      peer
      component
      isError
      layer
      tags {
        key
        value
      }
      logs {
        time
        data {
          key
          value
        }
      }
    }
  }
  }'''
    data = {
        "query": queryStr,
        "variables": {
            "traceId": trace_id
        }
    }
    res = requests.request('POST', url, headers=headers, data=json.dumps(data))
    return res.json()['data']['trace']['spans']


if __name__ == "__main__":

    # example
    host = "139.198.164.54:30005"
    timestamp_start = datetime.datetime.strptime('2023-05-03 0225', '%Y-%m-%d %H%M').timestamp() # 注意这里是本地时间，精确到分钟即可
    timestamp_end = datetime.datetime.strptime('2023-05-03 0240', '%Y-%m-%d %H%M').timestamp()
    collect_traces_all(host, timestamp_start, timestamp_end)

