| { |
| "_name_or_path": "NousResearch/Llama-2-7b-chat-hf", |
| "architectures": [ |
| "LlamaForSequenceClassification" |
| ], |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "bos_token_id": 1, |
| "eos_token_id": 2, |
| "hidden_act": "silu", |
| "hidden_size": 4096, |
| "id2label": { |
| "0": "DISAPPOINTED", |
| "1": "SENTIMENTAL", |
| "2": "PREPARED", |
| "3": "AFRAID", |
| "4": "GUILTY", |
| "5": "ANGRY", |
| "6": "IMPRESSED", |
| "7": "CONFIDENT", |
| "8": "PROUD", |
| "9": "HOPEFUL", |
| "10": "SAD", |
| "11": "LONELY", |
| "12": "TRUSTING", |
| "13": "SURPRISED", |
| "14": "EXCITED", |
| "15": "ANNOYED", |
| "16": "DISGUSTED", |
| "17": "JEALOUS", |
| "18": "ANXIOUS", |
| "19": "CARING", |
| "20": "CONTENT", |
| "21": "GRATEFUL", |
| "22": "JOYFUL", |
| "23": "EMBARRASSED", |
| "24": "ANTICIPATING", |
| "25": "DEVASTATED", |
| "26": "FAITHFUL" |
| }, |
| "initializer_range": 0.02, |
| "intermediate_size": 11008, |
| "label2id": { |
| "AFRAID": 3, |
| "ANGRY": 5, |
| "ANNOYED": 15, |
| "ANTICIPATING": 24, |
| "ANXIOUS": 18, |
| "CARING": 19, |
| "CONFIDENT": 7, |
| "CONTENT": 20, |
| "DEVASTATED": 25, |
| "DISAPPOINTED": 0, |
| "DISGUSTED": 16, |
| "EMBARRASSED": 23, |
| "EXCITED": 14, |
| "FAITHFUL": 26, |
| "GRATEFUL": 21, |
| "GUILTY": 4, |
| "HOPEFUL": 9, |
| "IMPRESSED": 6, |
| "JEALOUS": 17, |
| "JOYFUL": 22, |
| "LONELY": 11, |
| "PREPARED": 2, |
| "PROUD": 8, |
| "SAD": 10, |
| "SENTIMENTAL": 1, |
| "SURPRISED": 13, |
| "TRUSTING": 12 |
| }, |
| "max_position_embeddings": 4096, |
| "model_type": "llama", |
| "num_attention_heads": 32, |
| "num_hidden_layers": 32, |
| "num_key_value_heads": 32, |
| "pad_token_id": 0, |
| "pretraining_tp": 1, |
| "problem_type": "single_label_classification", |
| "quantization_config": { |
| "_load_in_4bit": true, |
| "_load_in_8bit": false, |
| "bnb_4bit_compute_dtype": "float16", |
| "bnb_4bit_quant_storage": "uint8", |
| "bnb_4bit_quant_type": "nf4", |
| "bnb_4bit_use_double_quant": false, |
| "llm_int8_enable_fp32_cpu_offload": false, |
| "llm_int8_has_fp16_weight": false, |
| "llm_int8_skip_modules": null, |
| "llm_int8_threshold": 6.0, |
| "load_in_4bit": true, |
| "load_in_8bit": false, |
| "quant_method": "bitsandbytes" |
| }, |
| "rms_norm_eps": 1e-05, |
| "rope_scaling": null, |
| "rope_theta": 10000.0, |
| "tie_word_embeddings": false, |
| "torch_dtype": "float32", |
| "transformers_version": "4.40.1", |
| "use_cache": false, |
| "vocab_size": 32028 |
| } |
|
|