import os
import time
import subprocess
from mmdet.apis import init_detector, inference_detector
import sys
import matplotlib
matplotlib.use('Agg')  # 设置使用Agg后端
import matplotlib.pyplot as plt
import argparse

def process_images(image_dir, model):
    before_inference_list = []
    after_inference_list = []
    image_count = 0
    total_inference_time = 0

    for image_file in os.listdir(image_dir):
        image_path = os.path.join(image_dir, image_file)
        if not os.path.isfile(image_path):
            continue

        before_inference = subprocess.check_output(
            ['nvidia-smi', '--query-gpu=memory.used', '--format=csv,noheader,nounits'], encoding='utf-8')
        before_inference = int(before_inference.strip()) / 1024
        before_inference_list.append(before_inference)

        start_time = time.time()
        inference_detector(model, image_path)

        end_time = time.time()

        after_inference = subprocess.check_output(
            ['nvidia-smi', '--query-gpu=memory.used', '--format=csv,noheader,nounits'], encoding='utf-8')
        after_inference = int(after_inference.strip()) / 1024
        after_inference_list.append(after_inference)

        inference_time = end_time - start_time
        total_inference_time += inference_time
        image_count += 1

    return before_inference_list, after_inference_list, total_inference_time, image_count

def plot_memory_usage(before_list, after_list, image_indices, image_dir):
    plt.figure(figsize=(10, 6))
    plt.plot(image_indices, before_list, label='Before Inference', marker='o')
    plt.plot(image_indices, after_list, label='After Inference', marker='*')
    plt.xlabel('Image Index')
    plt.ylabel('Memory Used (GB)')
    plt.title('Memory Usage Before and After Inference')
    plt.legend()
    plt.tight_layout()
    plt.savefig(f'/home/jacy/GDW/data_KSDD_100e_2try/yolo_v5_rack_v1/memory_usage_plot_{os.path.basename(image_dir)}.png')

def main(args):
    parser = argparse.ArgumentParser(description='Process images and plot memory usage.')
    parser.add_argument('image_dir', type=str, help='Path to the image directory')
    args = parser.parse_args()

    config = r'/home/jacy/GDW/data_KSDD_100e_2try/yolo_v5_rack_v1/yolov5_s-v61_syncbn_8xb16-300e_coco_Rack.py'
    checkpoint = r'/home/jacy/GDW/data_KSDD_100e_2try/yolo_v5_rack_v1/best_coco_bbox_mAP_epoch_290.pth'
    inf_ago =  before_inference = subprocess.check_output(
            ['nvidia-smi', '--query-gpu=memory.used', '--format=csv,noheader,nounits'], encoding='utf-8')
    model = init_detector(config, checkpoint)
    inf_after = subprocess.check_output(
            ['nvidia-smi', '--query-gpu=memory.used', '--format=csv,noheader,nounits'], encoding='utf-8')
    print(f"{inf_ago} {inf_after}")

    before_inference_list, after_inference_list, total_inference_time, image_count = process_images(args.image_dir, model)
    image_indices = list(range(1, len(before_inference_list) + 1))
    plot_memory_usage(before_inference_list, after_inference_list, image_indices, args.image_dir)

    # 打印显存所用差值
    print(f"Memory Usage Difference: {sum(after_inference_list) - sum(before_inference_list):.2f} GB")

    print(f"Average Inference Time: {total_inference_time / image_count:.5f} s")

if __name__ == '__main__':
    main(sys.argv)