File size: 641 Bytes
bc31fc1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
from transformers import PretrainedConfig
from typing import List
class DeepFakeConfig(PretrainedConfig):
model_type = "pulse2pulse-2"
def __init__(self, architectures="AutoModle", **kwargs):
# if block_type not in ["basic", "bottleneck"]:
# raise ValueError(f"`block_type` must be 'basic' or bottleneck', got {block_type}.")
# if stem_type not in ["", "deep", "deep-tiered"]:
# raise ValueError(f"`stem_type` must be '', 'deep' or 'deep-tiered', got {stem_type}.")
#self.architectures = "AutoModle"
self.architectures = architectures
super().__init__(**kwargs) |