import os
from prometheus_api_client import PrometheusConnect
import pandas as pd
from datetime import datetime, timedelta
import logging
from concurrent.futures import ThreadPoolExecutor, as_completed



# 连接到 Prometheus
prom = PrometheusConnect(url="http://192.168.49.2:30003", disable_ssl=True)

service=["ts-auth-service","ts-assurance-service","ts-basic-service","ts-config-service","ts-contacts-service","ts-food-map-service","ts-food-service","ts-inside-payment-service","ts-notification-service","ts-order-other-service","ts-order-service","ts-payment-service","ts-preserve-service","ts-price-service","ts-route-service","ts-route-plan-service","ts-seat-service","ts-security-service","ts-ticketinfo-service","ts-train-service","ts-travel-service","ts-travel-plan-service","ts-travel2-service","ts-user-service","ts-cancel-service","ts-station-service","ts-verification-code-service"]

# pod对应接口
# 进入容器
# kubectl exec -it cilium-7kgps -n kube-system -- /bin/bash
# 查询接口
# cilium endpoint list -o json | jq -r '.[] | select(.status["external-identifiers"]["k8s-namespace"] == "tt" and (.status["external-identifiers"]["k8s-pod-name"] // "" | test("^ts-.*-service-.*$"))) | ( .status["external-identifiers"]["k8s-pod-name"] // "unknown" | split("-service-")[0] + "-service" ) + " \(.status["networking"]["interface-name"] // "unknown")"'
interface_text='''
ts-preserve-service lxc33b062ee3131
ts-voucher-service lxc85ea109385f1
ts-admin-route-service lxcd535181eb6c1
ts-food-map-service lxcb277480fb1e5
ts-admin-user-service lxcfcb9ba110269
ts-config-service lxc4f4f1264654a
ts-route-service lxca8351ca99c11
ts-execute-service lxc6978270cf089
ts-food-service lxcf393ed25512e
ts-auth-service lxca6372df853a5
ts-inside-payment-service lxc8b3324c334ad
ts-assurance-service lxccedd86cd5596
ts-user-service lxceac65ce605be
ts-order-other-service lxcb3002dbb80b2
ts-rebook-service lxc111e3a0ec8f4
ts-admin-travel-service lxc80586d93261d
ts-route-plan-service lxc2d8137c211b5
ts-seat-service lxc40662822a2b0
ts-travel-plan-service lxc88bf88e54795
ts-ticketinfo-service lxced49aa5c5697
ts-admin-order-service lxcce4946465d30
ts-order-service lxc514b40f49278
ts-consign-price-service lxcd9780dc6b419
ts-preserve-other-service lxc47cafeb4e37c
ts-train-service lxc741201a3f35f
ts-cancel-service lxc0e93a0c78b2c
ts-notification-service lxc9372a1a4740b
ts-security-service lxcaff3d14a075e
ts-payment-service lxc067434af43a8
ts-station-service lxc7803485cb163
ts-travel2-service lxc829a4dfbf38b
ts-news-service lxcc4c6b52ebe46
ts-basic-service lxc36aeb405f2f2
ts-ticket-office-service lxcbb2d67f40aa9
ts-verification-code-service lxcad11bb76ecb1
ts-price-service lxc624d48b4530a
ts-travel-service lxcc7e614254875
ts-basic-info-service lxcfb1690275d0e
ts-contacts-service lxcf19527f4d03d
ts-consign-service lxc5ab2d1907943

'''

# 接口解析
interface_map = {}

# 逐行解析
for line in interface_text.strip().split('\n'):
    if line:  # 跳过空行
        parts = line.split(' ', 1)  # 按第一个空格分割
        if len(parts) == 2:
            pod_name, interface = parts
            interface_map[pod_name] = interface



def get_metric_data(start_time, end_time, out_path,temptime_start,temptime_end):
    # 创建输出目录
    if not os.path.exists(os.path.join(out_path, "metrics")):
        os.makedirs(os.path.join(out_path, "metrics"))

    # # 设置时间范围 东八区 即北京时间
    # start_time = datetime.strptime("2025-05-25 17:00:00", "%Y-%m-%d %H:%M:%S")
    # end_time = datetime.strptime("2025-05-25 17:30:00", "%Y-%m-%d %H:%M:%S")

    max_workers = min(8, len(service))  # 可根据实际情况调整线程数
    with ThreadPoolExecutor(max_workers=max_workers) as executor:
        futures = [executor.submit(process_service, s, start_time, end_time, os.path.join(out_path, "metrics"),temptime_start,temptime_end) for s in service]
        for future in as_completed(futures):
            pass  # 可以加日志或异常处理





# # 日志文件路径
# log_file_path = "metrics.log"
# # 设置日志文件
# import logging
# logging.basicConfig(
#     filename=log_file_path,
#     level=logging.INFO,
#     format='%(asctime)s - %(levelname)s - %(message)s'
# )

# # 创建 metrics 目录
# os.makedirs("metrics", exist_ok=True)



def process_service(service_name, start_time=None, end_time=None,out_path="metrics",temptime_start=None,temptime_end=None):
    try:
        interface = interface_map.get(service_name, "")
        if not interface:
            print(f"No interface found for {service_name}, skipping.")
            return
        print(f"Processing {service_name} with interface {interface}")
    except Exception as e:
        print(f"Error processing {service_name}: {e}")
        logging.error(f"Error processing {service_name}: {e}")
        return

    queries = {
        "cpu_usage_system": f'container_cpu_system_seconds_total{{namespace="tt", pod=~"{service_name}-.*"}}',
        "cpu_usage_total": f'container_cpu_usage_seconds_total{{namespace="tt", pod=~"{service_name}-.*"}}',
        "cpu_usage_user": f'container_cpu_user_seconds_total{{namespace="tt", pod=~"{service_name}-.*"}}',
        "memory_usage": f'container_memory_usage_bytes{{namespace="tt", pod=~"{service_name}-.*"}}',
        "memory_working_set": f'container_memory_working_set_bytes{{namespace="tt", pod=~"{service_name}-.*"}}',
        "rx_bytes": f'container_network_receive_bytes_total{{interface="{interface}"}}',
        "tx_bytes": f'container_network_transmit_bytes_total{{interface="{interface}"}}'
    }

    df_final = None

    for metric_name, promql in queries.items():
        print(f"[{service_name}] {promql}")
        metric_data = prom.get_metric_range_data(
            promql,
            start_time=start_time,
            end_time=end_time,
            chunk_size=timedelta(seconds=1),
        )
        if not metric_data:
            print(f"No data for {service_name} {metric_name}")
            logging.error(f"No data for {service_name} {metric_name}")
            df = pd.DataFrame([], columns=["timestamp", metric_name])
            if df_final is None:
                df_final = df
            else:
                df_final = pd.merge(df_final, df, on=["timestamp"], how="outer")
            continue

        df_list = []
        for result in metric_data:
            values = result["values"]
            df = pd.DataFrame(values, columns=["timestamp", metric_name])
            df["timestamp"] = df["timestamp"].astype(float).astype(int)
            if metric_name in ["memory_usage", "memory_working_set"]:
                df[metric_name] = df[metric_name].astype(float) / (1024 * 1024 * 1024)
            df_list.append(df)
        if df_list:
            df_metric = pd.concat(df_list, ignore_index=True)
            if df_final is None:
                df_final = df_metric
            else:
                df_final = pd.merge(df_final, df_metric, on=["timestamp"], how="outer")

    if df_final is None:
        print(f"No data for {service_name}, skip saving.")
        logging.error(f"No data for {service_name}, skip saving.")
        return

    df_final = df_final.sort_values("timestamp").fillna(0)
    df_final['timestamp'] = df_final['timestamp'].astype(int)
    # start_ts = df_final['timestamp'].min()
    # end_ts = df_final['timestamp'].max()
    start_ts = int(temptime_start)
    end_ts = int(temptime_end)
    full_range = pd.Series(range(start_ts, end_ts + 1), name='timestamp')
    df_final = pd.merge(full_range, df_final, on='timestamp', how='left')
    df_final = df_final.fillna(0)

    columns_to_fill = ["cpu_usage_system", "cpu_usage_total", "cpu_usage_user", "memory_usage", "memory_working_set", "tx_bytes", "rx_bytes"]
    for col in columns_to_fill:
        df_final[col] = df_final[col].replace(0, pd.NA).ffill().fillna(0)
    try:
        for col in columns_to_fill:
            if col != "memory_usage" and col != "memory_working_set":
                df_final[col] = pd.to_numeric(df_final[col], errors='coerce').fillna(0)
                df_final[col] = df_final[col].diff().fillna(0)
        columns_to_fill2 = ["cpu_usage_total"]
        for col in columns_to_fill2:
            df_final[col] = df_final[col].replace(0, pd.NA).ffill().fillna(0)
    except Exception as e:
        print(f"Error processing {service_name} for diff: {e}")
        logging.error(f"Error processing {service_name} for diff: {e}")
        df_final.to_csv(f"metrics/{service_name}_error.csv", index=False)
        return

    for col in ["tx_bytes", "rx_bytes"]:
        non_zero_idx = df_final.index[df_final[col] != 0]
        if len(non_zero_idx) > 0:
            first_idx = non_zero_idx[0]
            df_final.at[first_idx, col] = 0



    # df_final.to_csv(f"metrics/{service_name}.csv", index=False)
    out_file = os.path.join(out_path, f"{service_name}.csv")
    df_final.to_csv(out_file, index=False)
    print(f"Saved metrics/{service_name}.csv")

if __name__ == "__main__":
    # os.makedirs("metrics", exist_ok=True)
    # max_workers = min(8, len(service))  # 可根据实际情况调整线程数
    # with ThreadPoolExecutor(max_workers=max_workers) as executor:
    #     futures = [executor.submit(process_service, s) for s in service]
    #     for future in as_completed(futures):
    #         pass  # 可以加日志或异常处理

    process_service("ts-basic-service", start_time=datetime.strptime("2025-05-27 12:00:00", "%Y-%m-%d %H:%M:%S"), end_time=datetime.strptime("2025-05-27 12:13:59", "%Y-%m-%d %H:%M:%S"), out_path="./data/TT.2025-05-27T120000D2025-05-27T121359/metrics")
