#include <vector>
#include <cmath>
#include <cfloat>
#include <iostream>

using namespace std;


// S = [N, D] * [D, N] -> [N, N]
// P = softmax(S)
// O = P * [N, D] -> [N, D]


// S = [N, D] * [D, N] -> [N, N]
template <typename T,
    int N,
    int D
> void naive_attn_qkt(const std::vector<T>& Q,
    const std::vector<T>& K,
    std::vector<T>& S
) {
    for (int i = 0; i < N; i++) {
        for (int j = 0; j < N; j++) {
            float sum = 0.0;
            for (int d = 0; d < D; d++) {
                sum += Q[i * D + d] * K[j * D + d];
            }
            S[i * N + j] = sum / sqrt(D);
        }
    }
}

// P = softmax(S)
// pass1: get max
// pass2: get exp & sum
// pass3: get softmax
template <typename T,
    int N
> void naive_attn_softmax(const std::vector<T>& S,
    std::vector<T>& P
) {

    for (int i = 0; i < N; i++) {
        float max = -FLT_MAX;
        for (int j = 0; j < N; j++) {
            max = max > S[i * N + j] ? max : S[i * N + j];
        }
        float sum = 0.0;
        vector<float> exp_tmp(N, 0.0);
        for (int j = 0; j < N; j++) {
            exp_tmp[j] = exp(S[i * N + j] - max);
            sum += exp_tmp[j];
        }
        for (int j = 0; j < N; j++) {
            P[i * N + j] = exp_tmp[j] / sum;
        }
    }
}

// O = P * [N, D] -> [N, D]
template <typename T,
    int N,
    int D
> void naive_attn_pv(const std::vector<T>& P,
    const std::vector<T>& V,
    std::vector<T>& O
) {
    for (int i = 0; i < N; i++) {
        for (int j = 0; j < D; j++) {
            float sum = 0.0;
            for (int n = 0; n < N; n++) {
                sum += P[i * N + n] * V[n * D + j];
            }
            O[i * D + j] = sum;
        }
    }
}

template <typename T,
    int N,
    int D
> void naive_attn(const std::vector<T>& Q,
    const std::vector<T>& K,
    const std::vector<T>& V,
    std::vector<T>& O
) {
    std::vector<T> S(N * N, 0.0);
    auto& P = S;
    naive_attn_qkt<T, N, D>(Q, K, S);
    naive_attn_softmax<T, N>(S, P);
    naive_attn_pv<T, N, D>(S, V, O);
}
