{ "base_model_name_or_path": "pythainlp/wangchanglm-7.5B-sft-en-sharded", "inference_mode": true, "num_attention_heads": 24, "num_layers": 32, "num_transformer_submodules": 1, "num_virtual_tokens": 10, "peft_type": "PROMPT_TUNING", "prompt_tuning_init": "TEXT", "prompt_tuning_init_text": "\u0e08\u0e07\u0e41\u0e15\u0e48\u0e07\u0e19\u0e34\u0e22\u0e32\u0e22\u0e15\u0e48\u0e2d\u0e08\u0e32\u0e01\u0e40\u0e23\u0e37\u0e48\u0e2d\u0e07\u0e22\u0e48\u0e2d\u0e15\u0e48\u0e2d\u0e44\u0e1b\u0e19\u0e35\u0e49:", "task_type": "CAUSAL_LM", "token_dim": 4096, "tokenizer_name_or_path": "pythainlp/wangchanglm-7.5B-sft-en-sharded" }