import torch
from torch.autograd import Variable # torch 中 Variable 模块

from torchfm.layer import FactorizationMachine, FeaturesEmbedding, FeaturesLinear
input_dim = 230
class myFactorizationMachine(torch.nn.Module):

    def __init__(self):
        super().__init__()
        k = 6
        self.bias = torch.nn.Parameter(torch.FloatTensor((1,)))#.long()

        #self.bias = torch.LongTensor(1)
        #self.bias = Variable(self.bias, requires_grad=True)
        #self.weight = torch.LongTensor(input_dim,1)
        #self.weight= Variable(self.weight, requires_grad=True)
        #self.weight = torch.nn.Parameter(torch.zeros((input_dim,1)))#.long()
        self.weight =  torch.nn.Parameter(torch.FloatTensor(input_dim,1))
        self.f_mat_nk =  torch.nn.Parameter(torch.FloatTensor(input_dim,k))

        self.embedding = torch.nn.Embedding(input_dim, k)

    def forward(self, x):
        print('size')
        print(self.weight.size())
        #wx = x* self.weight
        x = x.float()
        wx = torch.matmul(x,self.weight)
        #0.5975064774990332
        wx_b = wx+self.bias
        n = input_dim
        v = self.f_mat_nk
        #v = torch.matmul(x, self.f_mat_nk)

        for i in range(n):
            for j in range(i+1,n):
                tmp = torch.dot(v[i,:],  v[j,:])
                #tmp = (v[i,:] * v[j,:]).sum()
                #tmp = tmp.expand()
                #tmp =  (v[:,i].view(-1,1)* v[:,j].view(-1,1))
                #print('tmp shape {}'.format(tmp.shape))
                #tmp shape torch.Size([16])
                #tmp2 = tmp * x[:,i].view(-1,1)*x[:,j].view(-1,1)
                tmp2 = tmp  * x[:,i].view(-1,1)*x[:,j].view(-1,1)
                #print('tmp2shape {}'.format(tmp2.shape))
                wx_b += tmp2
                #print('wx_b {}'.format(wx_b.shape))
                #print('-------')view
        return torch.sigmoid(wx_b.squeeze(1))
class FactorizationMachineModel(torch.nn.Module):
    """
    A pytorch implementation of Factorization Machine.

    Reference:
        S Rendle, Factorization Machines, 2010.
    """

    def __init__(self, field_dims, embed_dim):
        super().__init__()
        #【610 193609】
        self.embedding = FeaturesEmbedding(field_dims, embed_dim)
        self.linear = FeaturesLinear(field_dims)
        """
        self. linear FeaturesLinear(
          (fc): Embedding(194219, 1)
        )
        """
        print('self. linear',self.linear)
        self.fm = FactorizationMachine(reduce_sum=True)


    def forward(self, x):
        """
        :param x: Long tensor of size ``(batch_size, num_fields)``
        """
        x = self.linear(x) + self.fm(self.embedding(x))
        return torch.sigmoid(x.squeeze(1))
