Commit
•
0083e2c
1
Parent(s):
058d8fb
Upload modeling_vit.py
Browse files- modeling_vit.py +4 -2
modeling_vit.py
CHANGED
@@ -23,7 +23,7 @@ import torch
|
|
23 |
import torch.utils.checkpoint
|
24 |
from torch import nn
|
25 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
26 |
-
|
27 |
from transformers.activations import ACT2FN
|
28 |
from transformers.modeling_outputs import (
|
29 |
BaseModelOutput,
|
@@ -91,7 +91,9 @@ def softmax_n_shifted_zeros(input: torch.Tensor, n: int, dim=-1) -> torch.Tensor
|
|
91 |
denominator = torch.add(original_denominator,
|
92 |
torch.multiply(torch.exp(shifted_zeros), n))
|
93 |
return torch.divide(numerator, denominator)
|
94 |
-
|
|
|
|
|
95 |
|
96 |
def softmax_1(input: torch.Tensor, dim=-1) -> torch.Tensor:
|
97 |
"""
|
|
|
23 |
import torch.utils.checkpoint
|
24 |
from torch import nn
|
25 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
26 |
+
import numpy as np
|
27 |
from transformers.activations import ACT2FN
|
28 |
from transformers.modeling_outputs import (
|
29 |
BaseModelOutput,
|
|
|
91 |
denominator = torch.add(original_denominator,
|
92 |
torch.multiply(torch.exp(shifted_zeros), n))
|
93 |
return torch.divide(numerator, denominator)
|
94 |
+
def logit(p, eps=1e-16):
|
95 |
+
p = np.clip(p, eps, 1 - eps)
|
96 |
+
return -np.log(1 / p - 1)
|
97 |
|
98 |
def softmax_1(input: torch.Tensor, dim=-1) -> torch.Tensor:
|
99 |
"""
|