File size: 1,425 Bytes
e3081e3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
def ba_activation(x, weights, a, epsilon):
    # Ensure x is a torch tensor
    x = torch.as_tensor(x, dtype=torch.float32)
    
    # Modulate inputs based on weights for the activation
    x = weights * x
    
    # Apply the Ba-inspired operation
    # Clamp and normalize x to stabilize the operation
    x_normalized = torch.clamp(x, -1, 1)
    fractional_inspired = torch.pow(torch.abs(x_normalized), x_normalized)
    activation_result = epsilon * torch.cos(np.pi * a * fractional_inspired * torch.log(torch.abs(fractional_inspired) + 1e-7))
    
    # Apply an additional non-linearity to ensure the output is stable
    activation_result = torch.tanh(activation_result)
    
    return activation_result

# Define a custom model using the Ba-inspired activation function
class CustomModel(nn.Module):
    def __init__(self, input_size, hidden_size, output_size):
        super(CustomModel, self).__init__()
        self.linear1 = nn.Linear(input_size, hidden_size)
        self.linear2 = nn.Linear(hidden_size, output_size)
        self.weights = nn.Parameter(torch.randn(hidden_size))
        self.a = 0.5  # Parameter for the Ba-inspired activation
        self.epsilon = 0.1  # Parameter for the Ba-inspired activation

    def forward(self, x):
        x = self.linear1(x)
        x = ba_activation(x, self.weights, self.a, self.epsilon)  # Use Ba-inspired activation
        x = self.linear2(x)
        return x