from transformers import AutoModel, AutoConfig, Qwen3Config, Qwen3ForCausalLM

from ai_lab import AiLabForCausalLM, AiLabConfig, AiLabModel, AiLabForSequenceClassification, AiLabForQuestionAnswering, \
    AiLabPreTrainedModel, AiLabForTokenClassification
from utils.param_util import cal_model_params


def test_qwen3_config():
    import modelscope
    # load tokenizers
    # 因为国内无法直接访问HuggingFace，因此使用魔搭将模型的配置文件和Tokenizer下载下来
    modelscope.AutoConfig.from_pretrained("Qwen/Qwen3-0.6B").save_pretrained(
        "Qwen3_config"
    )

def test_qwen3_tokenizer():
    import modelscope
    tokenizer=modelscope.AutoTokenizer.from_pretrained("Qwen/Qwen3-0.6B")
    tokenizer.save_pretrained(
        "qwen3_token"
    )

def test_ailab_config():
    config = AiLabConfig()
    config.save_pretrained('ai-lab-data')



def test2_ailab_model():
    config = AiLabConfig()
    model = AiLabForCausalLM(config)
    model.save_pretrained('ai-lab-data')


def test_init_ailab_model():
    import modelscope
    model_name='AiLab-0.6B'
    save_path=f"data/{model_name}"
    register_ai_lab_to_autoclass()
    config = AiLabConfig()
    model = AiLabForCausalLM(config)
    model.save_pretrained(save_path)
    tokenizer=modelscope.AutoTokenizer.from_pretrained("Qwen/Qwen3-0.6B")
    tokenizer.save_pretrained(
        save_path
    )

def register_ai_lab_to_autoclass():
    AiLabConfig.register_for_auto_class()
    AiLabModel.register_for_auto_class("AutoModel")
    AiLabPreTrainedModel.register_for_auto_class("AutoModelForPreTraining")
    AiLabForCausalLM.register_for_auto_class("AutoModelForCausalLM")
    AiLabForSequenceClassification.register_for_auto_class("AutoModelForSequenceClassification")
    AiLabForQuestionAnswering.register_for_auto_class("AutoModelForQuestionAnswering")
    AiLabForTokenClassification.register_for_auto_class("AutoModelForTokenClassification")

def test_gen_0__3B():
    import modelscope
    register_ai_lab_to_autoclass()
    model_name='AiLab-0.3B'
    save_path=f"data/{model_name}"
    config = AiLabConfig(
        hidden_size=768,
        intermediate_size=2304,
        num_hidden_layers=16,
        num_attention_heads=16,
        num_key_value_heads=8,
        head_dim=64)
    model = AiLabForCausalLM(config)
    model.save_pretrained(save_path)
    tokenizer=modelscope.AutoTokenizer.from_pretrained("Qwen/Qwen3-0.6B")
    tokenizer.save_pretrained(
        save_path
    )

def test_gen_0__1B():
    import modelscope
    register_ai_lab_to_autoclass()
    model_name='AiLab-0.1B'
    save_path=f"data/{model_name}"
    config = AiLabConfig(hidden_size=384,
        intermediate_size=1024,
        num_hidden_layers=16,
        num_attention_heads=8,
        num_key_value_heads=8,
        head_dim=48)
    model = AiLabForCausalLM(config)
    model.save_pretrained(save_path)
    tokenizer=modelscope.AutoTokenizer.from_pretrained("Qwen/Qwen3-0.6B")
    tokenizer.save_pretrained(
        save_path
    )

def test_gen_qwen3_0__1B():
    import modelscope
    model_name='Qwen3-0.1B'
    save_path=f"data/{model_name}"
    config = Qwen3Config(
        hidden_size=384,
        intermediate_size=1024,
        num_hidden_layers=16,
        num_attention_heads=8,
        num_key_value_heads=8,
        head_dim=48
    )
    model = Qwen3ForCausalLM(config)
    model.save_pretrained(save_path)
    tokenizer=modelscope.AutoTokenizer.from_pretrained("Qwen/Qwen3-0.6B")
    tokenizer.save_pretrained(
        save_path
    )

def test_params():
    # 默认配置 66371 万参数 ≈ 0.66B（0.6B）
    config = AiLabConfig()
    cal_model_params(config)

    #   32784 万参数 ≈ 0.3B（0.3B）
    config = AiLabConfig(
        hidden_size=768,
        intermediate_size=2304,
        num_hidden_layers=16,
        num_attention_heads=16,
        num_key_value_heads=8,
        head_dim=64)
    cal_model_params(config)


    # 默认配置 13875 万参数 ≈ 0.1B（0.1B）
    config = AiLabConfig(hidden_size=384,
        intermediate_size=1024,
        num_hidden_layers=16,
        num_attention_heads=8,
        num_key_value_heads=8,
        head_dim=48)
    cal_model_params(config)
