import os
from util import *
from CallGraph_clean_and_sample import *
from CallGraph_statistic_base import *
from CallGraph_request_number import *

def down_load_dataset(dataset_path):
    if os.path.exists(dataset_path):
        print(f"数据集已存在！（{dataset_path}）")
    else:
        print(f"开始下载数据集...")
        command=f"bash fetchData.sh start_date={start_date} end_date={end_date}"
        status=os.system(command)
        if status==0:
            print(f"数据集下载完毕。")
        else:
            print("下载出错！")
            exit(-1)


def get_time_str():
    from datetime import datetime

    # 获取当前时间
    now = datetime.now()

    # 格式化为字符串，例如：2025-05-29 14:30:00
    current_time_str = now.strftime("%m-%d-%H-%M-%S")
    return current_time_str


def ensure_directory_exists(path, folder_name):
    """
    检查指定路径下是否存在指定文件夹，如果不存在则创建
    
    Args:
        path (str): 基础路径
        folder_name (str): 文件夹名称
    
    Returns:
        str: 完整的文件夹路径
    
    Example:
        >>> ensure_directory_exists("/home/user", "data")
        '/home/user/data'
    """
    # 构建完整的文件夹路径
    full_path = os.path.join(path, folder_name)
    
    # 检查文件夹是否存在
    if not os.path.exists(full_path):
        try:
            # 创建文件夹（包括父目录）
            os.makedirs(full_path, exist_ok=True)
            print(f"文件夹已创建: {full_path}")
        except OSError as e:
            print(f"创建文件夹失败: {e}")
            return None
    else:
        print(f"文件夹已存在: {full_path}")
    
    return full_path


        

start_time=time.time()
parallel_num=32       #120G->32

# current_time_str=get_time_str()
# current_time_str="07-30-09-01-43"
# current_time_str="common"
file_number=0
cur_dir=get_cur_dir()
dataset_dir="/root/autodl-tmp"
step_list=[ "sift_validate_data"]#"download_data", "get_top_service", "single_service_a_file","sift_validate_data"

#step 1: 生成验证数据集
start_date="0d11"
end_date="0d12"
dataset_dir_name=f"dataset_{start_date}_{end_date}_new"
dataset_path=dataset_dir+"/"+dataset_dir_name
if "download_data" in step_list:
    down_load_dataset(dataset_path)

#step 2:获取top N应用名称
high_frequency_N=10
start_date="0d11"
end_date="0d12"
history_dataset_dir_name=f"dataset_{start_date}_{end_date}_new"
top_n_file_path=f"dealed_data/statistic_pkl_sift_{history_dataset_dir_name}.csv"

input_file_path_all_service_call=f"dealed_data/statistic_pkl_sift_{history_dataset_dir_name}.csv"
out_file_path_topcall=input_file_path_all_service_call
spec=""
if "get_top_service" in step_list:
    spec=f"top{high_frequency_N}_"
    out_file_path_topcall=f"dealed_data/statistic_pkl_sift_{history_dataset_dir_name}_Call_top{high_frequency_N}.csv"
    pd_all=pd.read_csv(input_file_path_all_service_call)
    new_pd=pd_all.sort_values(by=" all request num", ascending=False).head(high_frequency_N)
    new_pd.to_csv(out_file_path_topcall, index=False)
    top_n_file_path=out_file_path_topcall


#step 3:得到验证数据
if "single_service_a_file" in step_list:
    assert "get_top_service" in step_list
    seed=0
    percent=1
    pd_all=pd.read_csv(out_file_path_topcall)
    for service_name in pd_all["service"].tolist():
        
        out_file_name_clean_and_sample=f"CallGraph_{dataset_dir_name}_{service_name}_cleaned_validate.csv"
        out_file_path_vali=cur_dir+"/dealed_data/"+out_file_name_clean_and_sample

        if "sift_validate_data" in step_list:
            clean_and_sample_dataset_multi_processing(seed, percent, dataset_path,  out_file_path_vali, top_n_file_path=out_file_path_topcall, file_number=file_number,parallel_num=parallel_num, service_name=service_name)
    
else:
    for seed in range(10):
        # seed=9
        percent=0.1

        out_file_name_clean_and_sample=f"CallGraph_{dataset_dir_name}_{spec}cleaned_{percent}_s{seed}_history.csv"
        out_file_path_vali=cur_dir+"/dealed_data/"+out_file_name_clean_and_sample

        if "sift_validate_data" in step_list:
            clean_and_sample_dataset_multi_processing(seed, percent, dataset_path,  out_file_path_vali, top_n_file_path=out_file_path_topcall, file_number=file_number,parallel_num=parallel_num)
        
print(f"持续时间：{time.time()-start_time}")