Spaces:
Runtime error
Runtime error
import torch | |
import torch.nn as nn | |
import re | |
class IdentityMap(nn.Module): | |
def __init__(self): | |
super().__init__() | |
def forward(self, x, *args, **kwargs): | |
return x | |
def config(self): | |
return {"mm_projector_type": 'identity'} | |
class SimpleResBlock(nn.Module): | |
def __init__(self, channels): | |
super().__init__() | |
self.pre_norm = nn.LayerNorm(channels) | |
self.proj = nn.Sequential( | |
nn.Linear(channels, channels), | |
nn.GELU(), | |
nn.Linear(channels, channels) | |
) | |
def forward(self, x): | |
x = self.pre_norm(x) | |
return x + self.proj(x) | |
def build_vision_projector(config, delay_load=False, **kwargs): | |
projector_type = getattr(config, 'mm_projector_type', 'linear') | |
if projector_type == 'linear': | |
return nn.Linear(config.mm_hidden_size, config.hidden_size) | |
mlp_gelu_match = re.match(r'^mlp(\d+)x_gelu$', projector_type) | |
if mlp_gelu_match: | |
mlp_depth = int(mlp_gelu_match.group(1)) | |
modules = [nn.Linear(config.mm_hidden_size, config.hidden_size)] | |
# modules = [] | |
for _ in range(1, mlp_depth): | |
modules.append(nn.GELU()) | |
modules.append(nn.Linear(config.hidden_size, config.hidden_size)) | |
return nn.Sequential(*modules) | |
if projector_type == 'identity': | |
return IdentityMap() | |
raise ValueError(f'Unknown projector type: {projector_type}') | |
class DenseConnector(nn.Module): | |
def __init__(self, config, mlp_gelu_match): | |
super().__init__() | |
self.dense_connector_type = dense_connector_type | |
if self.dense_connector_type == 'token_cat': # else channel cat | |
self.avg_pooling_k8 = nn.AvgPool1d(kernel_size=8, stride=8) | |
elif self.dense_connector_type == 'channel_cat': | |
pass | |
elif self.dense_connector_type == 'channel_sum_cat': | |
self.dc_ln = nn.LayerNorm(mm_hidden_size) | |
self.dc_linear = nn.Linear(mm_hidden_size, mm_hidden_size), | |
else: | |
raise ValueError(f'Unknown dense connector type: {dense_connector_type}') | |
def forward(self, image_forward_outs, selected_features): | |
return x | |
def build_dense_connector(config, delay_load=False, **kwargs): | |
projector_type = getattr(config, 'mm_projector_type', 'linear') | |
if projector_type == 'linear': | |
return nn.Linear(config.mm_hidden_size, config.hidden_size) | |
mlp_gelu_match = re.match(r'^mlp(\d+)x_gelu$', projector_type) | |
if mlp_gelu_match: | |
DenseConnector(config, mlp_gelu_match) | |
mlp_depth = int(mlp_gelu_match.group(1)) | |
modules = [nn.Linear(config.mm_hidden_size, config.hidden_size)] | |
# modules = [] | |
for _ in range(1, mlp_depth): | |
modules.append(nn.GELU()) | |
modules.append(nn.Linear(config.hidden_size, config.hidden_size)) | |
return nn.Sequential(*modules) | |
if projector_type == 'identity': | |
return IdentityMap() | |
raise ValueError(f'Unknown projector type: {projector_type}') | |