ahmedabdo commited on
Commit
2482723
1 Parent(s): d3e78be

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +4 -3
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -1,14 +1,15 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
- "base_model_class": "Blip2ForConditionalGeneration",
5
- "parent_library": "transformers.models.blip_2.modeling_blip_2"
6
  },
7
- "base_model_name_or_path": "ybelkada/blip2-opt-2.7b-fp16-sharded",
8
  "bias": "none",
9
  "fan_in_fan_out": false,
10
  "inference_mode": true,
11
  "init_lora_weights": true,
 
12
  "layers_pattern": null,
13
  "layers_to_transform": null,
14
  "loftq_config": {},
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
+ "base_model_class": "PeftModel",
5
+ "parent_library": "peft.peft_model"
6
  },
7
+ "base_model_name_or_path": null,
8
  "bias": "none",
9
  "fan_in_fan_out": false,
10
  "inference_mode": true,
11
  "init_lora_weights": true,
12
+ "layer_replication": null,
13
  "layers_pattern": null,
14
  "layers_to_transform": null,
15
  "loftq_config": {},
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:64fb3b01064070244f6aa1a7b5b6dcb12c45cbbdfd0e07f3468c39b348f8c8bc
3
- size 20991608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6506481cdcfa9490ab92df4558a203400af81dab3285edcf2ea2ef618f85c00d
3
+ size 20993784