import torch
import json
from parse_config import Config

def restore_graph(compute_graph_path,meta_json_path):
    loaded_traced = torch.load(compute_graph_path)
    with open(meta_json_path,'r') as f:
        meta_data=json.load(f)
    i=0
    for node in loaded_traced.graph.nodes:
        node.meta.update(meta_data[i])
        i+=1
    return loaded_traced

def load_valid_nodes(valid_nodes_path):
    with open(valid_nodes_path,'r') as f:
        valid_nodes=json.load(f)
    return valid_nodes

def find_good_seperate(model_traced,valid_nodes):
    graph_nodes=model_traced.graph.nodes
    peak_memory=0
    time_cost=0
    seperate_nodes_peak={}
    for node in graph_nodes:
        if node.name in valid_nodes:
            peak_memory=max(peak_memory,node.meta['peak_memory'])
            time_cost+=node.meta['time_elapsed']
            seperate_nodes_peak[node.name]={
                'peak_memory':peak_memory,
                'time_cost':time_cost,
                'output_memory':node.meta['output_memory'],
            }
            time_cost=0
            peak_memory=0
        peak_memory=max(peak_memory,node.meta['peak_memory'])
        time_cost+=node.meta['time_elapsed']
    return seperate_nodes_peak
        
def main():
    config_path='./config.json'
    config=Config(config_path)
    model_traced=restore_graph(config.config['formal_model_save_path'],config.config['meta_result_save_path'])
    valid_nodes=load_valid_nodes(config.config['valid_nodes_save_path'])
    seperate_peak_memory=find_good_seperate(model_traced,valid_nodes)
    with open(config.config['valid_resourse_cost_save_path'],'w') as f:
        json.dump(seperate_peak_memory,f,indent=4)
    
if __name__ == '__main__':
    main()