from .p2p_communication import *
from .training import forward_backward_pipelining_without_interleaving
from .training2 import forward_backward_pipelining_without_interleaving2,forward_backward_pipelining_without_interleaving2_remove_sgp

from .training2 import forward_backward_pipelining_with_interleaving,forward_backward_pipelining_with_interleaving_overlap_comm


def adjust_batch(train_loader,args,len_dataset,micro_batch_size):
    if len(train_loader) % args.micro_batch != 0:
        # 修改micro-batch数
        args.micro_batch = len(train_loader) % args.micro_batch
        # 最后一个micro-batch不完整，直接去掉最后一个micro batch
        if len_dataset % micro_batch_size != 0:
            args.micro_batch = args.micro_batch - 1
    return args.micro_batch