Update modeling_mplug_owl2.py
Browse files- modeling_mplug_owl2.py +0 -2
modeling_mplug_owl2.py
CHANGED
@@ -360,11 +360,9 @@ class MPLUGOwl2LlamaForCausalLM(LlamaForCausalLM, MPLUGOwl2MetaForCausalLM):
|
|
360 |
with torch.inference_mode():
|
361 |
output_logits = self(input_ids, images=image_tensor)["logits"][:, -1, self.preferential_ids_]
|
362 |
output_logits = output_logits.cpu().detach().numpy() / 100
|
363 |
-
print(output_logits)
|
364 |
probabilities.append(np.dot(softmax(output_logits), self.weight_tensor))
|
365 |
updated_matrix = update_matrix(self.anchor_matrix, np.squeeze(np.array(probabilities)), self.anchor_indices)
|
366 |
score = optimize_score_map_pytorch_cuda(updated_matrix, seed=0, original_seed=20020, num_iterations=100)
|
367 |
-
print(score)
|
368 |
return score
|
369 |
|
370 |
def forward(
|
|
|
360 |
with torch.inference_mode():
|
361 |
output_logits = self(input_ids, images=image_tensor)["logits"][:, -1, self.preferential_ids_]
|
362 |
output_logits = output_logits.cpu().detach().numpy() / 100
|
|
|
363 |
probabilities.append(np.dot(softmax(output_logits), self.weight_tensor))
|
364 |
updated_matrix = update_matrix(self.anchor_matrix, np.squeeze(np.array(probabilities)), self.anchor_indices)
|
365 |
score = optimize_score_map_pytorch_cuda(updated_matrix, seed=0, original_seed=20020, num_iterations=100)
|
|
|
366 |
return score
|
367 |
|
368 |
def forward(
|