printing on deformable self attention
Browse files
models/GroundingDINO/transformer.py
CHANGED
|
@@ -847,6 +847,7 @@ class DeformableTransformerEncoderLayer(nn.Module):
|
|
| 847 |
):
|
| 848 |
# self attention
|
| 849 |
# import ipdb; ipdb.set_trace()
|
|
|
|
| 850 |
src2 = self.self_attn(
|
| 851 |
query=self.with_pos_embed(src, pos),
|
| 852 |
reference_points=reference_points,
|
|
|
|
| 847 |
):
|
| 848 |
# self attention
|
| 849 |
# import ipdb; ipdb.set_trace()
|
| 850 |
+
print("deformable self-attention")
|
| 851 |
src2 = self.self_attn(
|
| 852 |
query=self.with_pos_embed(src, pos),
|
| 853 |
reference_points=reference_points,
|