{ "_name_or_path": "FacebookAI/xlm-roberta-large", "architectures": [ "XLMRobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "Th\u1eddi Trang Nam", "1": "Th\u1eddi Trang N\u1eef", "2": "Th\u1eddi Trang Tr\u1ebb Em", "3": "Gi\u00e0y D\u00e9p N\u1eef", "4": "Gi\u00e0y D\u00e9p Nam", "5": "T\u00fai V\u00ed N\u1eef", "6": "Balo & T\u00fai V\u00ed Nam", "7": "Ph\u1ee5 Ki\u1ec7n & Trang S\u1ee9c N\u1eef", "8": "\u0110i\u1ec7n Tho\u1ea1i & Ph\u1ee5 Ki\u1ec7n", "9": "Thi\u1ebft B\u1ecb \u0110i\u1ec7n T\u1eed", "10": "M\u00e1y T\u00ednh & Laptop", "11": "M\u00e1y \u1ea2nh & M\u00e1y Quay Phim", "12": "Thi\u1ebft B\u1ecb \u0110i\u1ec7n Gia D\u1ee5ng", "13": "Nh\u00e0 C\u1eeda & \u0110\u1eddi S\u1ed1ng", "14": "Gi\u1eb7t Gi\u0169 & Ch\u0103m S\u00f3c Nh\u00e0 C\u1eeda", "15": "D\u1ee5ng C\u1ee5 & Thi\u1ebft B\u1ecb Ti\u1ec7n \u00cdch", "16": "S\u1eafc \u0110\u1eb9p", "17": "S\u1ee9c Kh\u1ecfe", "18": "M\u1eb9 & B\u00e9", "19": "\u0110\u1ed3 Ch\u01a1i", "20": "Th\u1ec3 Thao & Du L\u1ecbch", "21": "\u00d4 T\u00f4", "22": "Xe M\u00e1y", "23": "Xe \u0110\u1ea1p", "24": "B\u00e1ch H\u00f3a Online", "25": "Voucher & D\u1ecbch V\u1ee5", "26": "Nh\u00e0 S\u00e1ch Online", "27": "Ch\u0103m S\u00f3c Th\u00fa C\u01b0ng" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "Balo & T\u00fai V\u00ed Nam": 6, "B\u00e1ch H\u00f3a Online": 24, "Ch\u0103m S\u00f3c Th\u00fa C\u01b0ng": 27, "D\u1ee5ng C\u1ee5 & Thi\u1ebft B\u1ecb Ti\u1ec7n \u00cdch": 15, "Gi\u00e0y D\u00e9p Nam": 4, "Gi\u00e0y D\u00e9p N\u1eef": 3, "Gi\u1eb7t Gi\u0169 & Ch\u0103m S\u00f3c Nh\u00e0 C\u1eeda": 14, "M\u00e1y T\u00ednh & Laptop": 10, "M\u00e1y \u1ea2nh & M\u00e1y Quay Phim": 11, "M\u1eb9 & B\u00e9": 18, "Nh\u00e0 C\u1eeda & \u0110\u1eddi S\u1ed1ng": 13, "Nh\u00e0 S\u00e1ch Online": 26, "Ph\u1ee5 Ki\u1ec7n & Trang S\u1ee9c N\u1eef": 7, "S\u1eafc \u0110\u1eb9p": 16, "S\u1ee9c Kh\u1ecfe": 17, "Thi\u1ebft B\u1ecb \u0110i\u1ec7n Gia D\u1ee5ng": 12, "Thi\u1ebft B\u1ecb \u0110i\u1ec7n T\u1eed": 9, "Th\u1ec3 Thao & Du L\u1ecbch": 20, "Th\u1eddi Trang Nam": 0, "Th\u1eddi Trang N\u1eef": 1, "Th\u1eddi Trang Tr\u1ebb Em": 2, "T\u00fai V\u00ed N\u1eef": 5, "Voucher & D\u1ecbch V\u1ee5": 25, "Xe M\u00e1y": 22, "Xe \u0110\u1ea1p": 23, "\u00d4 T\u00f4": 21, "\u0110i\u1ec7n Tho\u1ea1i & Ph\u1ee5 Ki\u1ec7n": 8, "\u0110\u1ed3 Ch\u01a1i": 19 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.48.0", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }