| { |
| "alora_invocation_tokens": null, |
| "alpha_pattern": {}, |
| "arrow_config": null, |
| "auto_mapping": { |
| "base_model_class": "LlavaForConditionalGeneration", |
| "parent_library": "transformers.models.llava.modeling_llava", |
| "unsloth_fixed": true |
| }, |
| "base_model_name_or_path": "/root/.cache/modelscope/hub/models/unsloth/pixtral-12b-2409-bnb-4bit", |
| "bias": "none", |
| "corda_config": null, |
| "ensure_weight_tying": false, |
| "eva_config": null, |
| "exclude_modules": null, |
| "fan_in_fan_out": false, |
| "inference_mode": true, |
| "init_lora_weights": true, |
| "layer_replication": null, |
| "layers_pattern": null, |
| "layers_to_transform": null, |
| "loftq_config": {}, |
| "lora_alpha": 128, |
| "lora_bias": false, |
| "lora_dropout": 0.05, |
| "megatron_config": null, |
| "megatron_core": "megatron.core", |
| "modules_to_save": null, |
| "peft_type": "LORA", |
| "peft_version": "0.18.1", |
| "qalora_group_size": 16, |
| "r": 64, |
| "rank_pattern": {}, |
| "revision": null, |
| "target_modules": "(?:.*?(?:language|text).*?(?:self_attn|attention|attn|mlp|feed_forward|ffn|dense).*?(?:gate_proj|up_proj|down_proj|k_proj|v_proj|q_proj|o_proj).*?)|(?:\\bmodel\\.layers\\.[\\d]{1,}\\.(?:self_attn|attention|attn|mlp|feed_forward|ffn|dense)\\.(?:(?:gate_proj|up_proj|down_proj|k_proj|v_proj|q_proj|o_proj)))", |
| "target_parameters": null, |
| "task_type": "CAUSAL_LM", |
| "trainable_token_indices": null, |
| "use_dora": false, |
| "use_qalora": false, |
| "use_rslora": false |
| } |