from functools import wraps
from geesibling.adapters.pytorch.megatronModelToGeesiblingGraph import get_global_plan_param

def set_args_by_plan(args, plan):
    args.pipeline_model_parallel_size = plan.pipeline_model_parallel_size
    args.tensor_model_parallel_size = plan.get_tp_size_by_rank(args.rank)
    args.data_parallel_size = plan.data_parallel_size
    args.expert_model_parallel_size = plan.expert_parallel_size
    print(f"########### my rank is {args.rank} &&&&&&&&&&&&pipeline_model_parallel_size = {args.pipeline_model_parallel_size} &&&&&&& tensor_model_parallel_size = {args.tensor_model_parallel_size}")

def initialize_megatron_wrapper(fn):
    @wraps(fn)
    def wrapper(self, *args, **kwargs):
        plan = get_global_plan_param()


        fn(self, *args, **kwargs)

    return wrapper