"""
处理输入
"""


import numpy
import torch
from torch import nn

class InputLayer(nn.Module):
    def __init__(self, nup, ndn):
        super().__init__()
        self.nup = nup
        self.ndn = ndn
        self.nel = self.nup + self.ndn
    
    def forward(self, rupdn:torch.Tensor):
        rup = rupdn[:self.nup, :]
        rdn = rupdn[self.nup:, :]
        #h^{αβ}_{ij} ← concatenate(r^{α}_{i} − r^{β}_{j}, |r^{α}_i − r^{β}_j| ∀ j,β)
        self.h_ai = torch.cat([rupdn, torch.linalg.vector_norm(rupdn, dim=1, keepdim=True)], dim=1)
        #h_aibj_l = numpy.zeros((self.nel, self.nel, 3))
        h_aibj_ = torch.zeros((self.nel, self.nel, 3), dtype=torch.float32)
        #print(rel.shape)
        for i1 in range(self.nel):
            for i2 in range(self.nel):
                if i1 == i2:
                    #为了稳定hessian的结果
                    h_aibj_[i1, i2, :] = 1e-16
                else:
                #h_aibj_l = numpy.concatenate([h_aibj_l, rel[i1:i1+1, :] - rel[i2:i2+1, :]])
                    h_aibj_[i1, i2, :] = rupdn[i1:i1+1, :] - rupdn[i2:i2+1, :]
        #print(h_aibj_l.shape)
        self.h_aibj = torch.cat([h_aibj_, torch.linalg.vector_norm(h_aibj_, dim=2, keepdim=True)], dim=2)
        #self.h_aibj = torch.cat([h_aibj_, torch.sqrt(torch.sum(torch.square(h_aibj_), dim=2, keepdim=True))], dim=2)
        #print(self.h_ai)
        #print(self.h_aibj)
        return rupdn, self.h_ai, self.h_aibj#torch.zeros_like(self.h_ai), torch.zeros_like(self.h_aibj)
