#include <torch/torch.h>

// at::Tensor softmax(const at::Tensor & self, int64_t dim, 
//                   c10::optional<at::ScalarType> dtype=c10::nullopt);
torch::Tensor softmax_forward(torch::Tensor input) {
    return torch::softmax(input, 0);
}

//! 只实现了一维的
torch::Tensor softmax_backward(torch::Tensor grad_out, torch::Tensor output){
    int m = output.size(0);
    torch::Tensor jacb = torch::zeros({m, m}, torch::TensorOptions().dtype(torch::kFloat));
    for (auto i = 0; i < m; i++) {
        for (auto j = 0; j < m; j++) {
            jacb[i][j] = (i == j) ? (output[i] * (1 - output[j])) : (-output[i] * output[j]);
        }
    }

    return torch::mv(jacb, grad_out);
}


PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
    m.def("forward",  &softmax_forward,  "softmax forward"); 
    m.def("backward", &softmax_backward, "softmax backward");
}