teowu commited on
Commit
c6db862
1 Parent(s): b60180c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -4
config.json CHANGED
@@ -1,10 +1,15 @@
1
  {
2
- "_name_or_path": "MAGAer13/mplug-owl2-llama2-7b",
 
3
  "architectures": [
4
  "MPLUGOwl2LlamaForCausalLM"
5
  ],
 
 
 
 
 
6
  "attention_bias": false,
7
- "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "freeze_vision_model": false,
@@ -25,9 +30,9 @@
25
  "rope_theta": 10000.0,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "bfloat16",
28
- "transformers_version": "4.37.0.dev0",
29
  "tune_visual_abstractor": true,
30
- "use_cache": false,
31
  "visual_abstractor_lr": null,
32
  "visual_config": {
33
  "visual_abstractor": {
 
1
  {
2
+ "_name_or_path": "q-future/co-instruct-preview",
3
+ "model_type": "mplug_owl2",
4
  "architectures": [
5
  "MPLUGOwl2LlamaForCausalLM"
6
  ],
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_mplug_owl2.MPLUGOwl2Config",
9
+ "AutoModel": "modeling_mplug_owl2.MPLUGOwl2LlamaForCausalLM",
10
+ "AutoModelForCausalLM": "modeling_mplug_owl2.MPLUGOwl2LlamaForCausalLM"
11
+ },
12
  "attention_bias": false,
 
13
  "bos_token_id": 1,
14
  "eos_token_id": 2,
15
  "freeze_vision_model": false,
 
30
  "rope_theta": 10000.0,
31
  "tie_word_embeddings": false,
32
  "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.31.0",
34
  "tune_visual_abstractor": true,
35
+ "use_cache": true,
36
  "visual_abstractor_lr": null,
37
  "visual_config": {
38
  "visual_abstractor": {