Safetensors
x-lora / X-LoRA /X-LoRA_xlora_config.json
mjbuehler's picture
Upload X-LoRA_xlora_config.json
a66e82b verified
raw
history blame
904 Bytes
{"base_model_id":"HuggingFaceH4/zephyr-7b-beta", adapters={
"adapter_1": "lamm-mit/x-lora/X-LoRA_adapters/1/",
"adapter_2": "lamm-mit/x-lora/X-LoRA_adapters/2/",
"adapter_3": "lamm-mit/x-lora/X-LoRA_adapters/3/",
"adapter_4": "lamm-mit/x-lora/X-LoRA_adapters/4/",
"adapter_5": "lamm-mit/x-lora/X-LoRA_adapters/5/",
"adapter_6": "lamm-mit/x-lora/X-LoRA_adapters/6/",
"adapter_7": "lamm-mit/x-lora/X-LoRA_adapters/7/",
"adapter_8": "lamm-mit/x-lora/X-LoRA_adapters/8/",
"adapter_9": "lamm-mit/x-lora/X-LoRA_adapters/9/",
}, "hidden_size": 4096, "enable_softmax": true, "enable_softmax_topk": false, "layerwise_scalings": true, "xlora_depth": 1, "xlora_size": 2048, "enable_relu_and_dropout": true, "use_bias": true, "xlora_dropout_p": 0.2, "stop_token_id": null, "use_trainable_adapters": false, "softmax_temperature": 1.0, "top_k_lora": null, "scaling_pass_value": 0.0}