from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from typing import Dict, List
from ..common.base_simulator_builder import GeesTransformerLayerSimulatorBuilder

# 基础就是gpt
class GeesGptSimulatorBuilder(GeesTransformerLayerSimulatorBuilder):
    def __init__(self):
        super().__init__()
        self.TransformerLayer_methond_mapping = {
            # embedding
            "LanguageModelEmbedding": [self.build_language_embeddings],
            'VocabParallelEmbedding': [self.build_word_embeddings],
            'Embedding': [self.build_position_embeddings, self.build_tokentype_embeddings],

            # relative 相对顺序判断调用哪一个
            'FusedLayerNorm': [self.build_input_layernorm, self.build_pre_mlp_layernorm],
            'SelfAttention': [self.build_self_attention],
            'DotProductAttention': [self.build_core_attention],
            'FusedScaleMaskSoftmax': [self.build_scale_mask_softmax],
            'Dropout': [self.build_embedding_dropout, self.build_attention_dropout],
            'ColumnParallelLinear': [self.build_linear_qkv, self.build_linear_fc1, self.build_output_layer],
            'RowParallelLinear': [self.build_linear_proj, self.build_linear_fc2],
            'IdentityOp': [self.build_q_layernorm, self.build_k_layernorm, self.build_pre_cross_attn_layernorm, self.build_cross_attention],
            'IdentityFuncOp': [self.build_self_attn_bda],
            'MLP': [self.build_mlp], # 如果是MOE模型，遇到"MLP"就不是build_mlp， 而是mlp.export 之类的，因为之前Moe_layer应该先build_mlp，可以把普通mlp理解成只有一个专家的MOE模型

            # layer_spec之后的layer_nomal
            'LayerNorm':[self.build_layer_norm]
        }