import torch.nn as nn


def activation_factory(name, inplace=True):
    if name == "relu":
        return nn.ReLU(inplace=inplace)
    elif name == "relu6":  # 添加ReLU6支持
        return nn.ReLU6(inplace=inplace)
    elif name == "leakyrelu":
        return nn.LeakyReLU(0.2, inplace=inplace)
    elif name == "tanh":
        return nn.Tanh()
    elif name == "linear" or name is None:
        return nn.Identity()
    else:
        raise ValueError("Not supported activation:", name)
