myownskyW7 commited on
Commit
b06eb0c
1 Parent(s): 0f9f66c

Upload modeling_InternLM_XComposer.py

Browse files
Files changed (1) hide show
  1. modeling_InternLM_XComposer.py +2 -6
modeling_InternLM_XComposer.py CHANGED
@@ -119,17 +119,13 @@ class InternLMXComposerForCausalLM(PreTrainedModel):
119
  vision_width,
120
  cross_attention_freq=2,
121
  pretrain=True):
122
- encoder_config = BertConfig.from_pretrained("bert-base-uncased")
123
  encoder_config.encoder_width = vision_width
124
  # insert cross-attention layer every other block
125
  encoder_config.add_cross_attention = True
126
  encoder_config.cross_attention_freq = cross_attention_freq
127
  encoder_config.query_length = num_query_token
128
- if pretrain:
129
- Qformer = BertLMHeadModel.from_pretrained("bert-base-uncased",
130
- config=encoder_config)
131
- else:
132
- Qformer = BertLMHeadModel(config=encoder_config)
133
  query_tokens = nn.Parameter(
134
  torch.zeros(1, num_query_token, encoder_config.hidden_size))
135
  query_tokens.data.normal_(mean=0.0,
 
119
  vision_width,
120
  cross_attention_freq=2,
121
  pretrain=True):
122
+ encoder_config = BertConfig()
123
  encoder_config.encoder_width = vision_width
124
  # insert cross-attention layer every other block
125
  encoder_config.add_cross_attention = True
126
  encoder_config.cross_attention_freq = cross_attention_freq
127
  encoder_config.query_length = num_query_token
128
+ Qformer = BertLMHeadModel(config=encoder_config)
 
 
 
 
129
  query_tokens = nn.Parameter(
130
  torch.zeros(1, num_query_token, encoder_config.hidden_size))
131
  query_tokens.data.normal_(mean=0.0,