package LibDL.models;

import LibDL.core.*;
import LibDL.core.nn.*;
import LibDL.core.nn.Module;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

public class Caser extends Module {
    int num_users;
    int num_items;
    int fc_dim_v, fc_dim_h;
    public Conv2d conv_v;
    List<Conv2d> conv_h;
    public Embedding user_embeddings, item_embeddings, W2, b2;
    Linear fc1;
    Dropout dropout;


    public static class CaserArgs {
        public CaserArgs() {
            has_nv_ = has_nh_ = false;
        }

        private int L_, dims_, n_h_, n_v_;
        private double drop_ratio_;
        private String ac_fc_, ac_conv_;
        private boolean has_nv_, has_nh_;

        public CaserArgs L(int i) {
            this.L_ = i;
            return this;
        }

        public CaserArgs dims(int i) {
            this.dims_ = i;
            return this;
        }

        public CaserArgs n_h(int i) {
            this.has_nh_ = true;
            this.n_h_ = i;
            return this;
        }

        public CaserArgs n_v(int i) {
            this.has_nv_ = true;
            this.n_v_ = i;
            return this;
        }

        public CaserArgs drop_ratio(double i) {
            this.drop_ratio_ = i;
            return this;
        }

        public CaserArgs ac_fc(String i) {
            this.ac_fc_ = i;
            return this;
        }

        public CaserArgs ac_conv(String i) {
            this.ac_conv_ = i;
            return this;
        }
    }

    CaserArgs args;

    public static CaserArgs args() {
        CaserArgs caserArgs = new CaserArgs();
        return caserArgs;
    }

    public Caser(int num_users, int num_items, CaserArgs args) {
        this.num_users = num_users;
        this.num_items = num_items;
        this.args = args;
        this.user_embeddings = new Embedding(num_users, args.dims_);
        this.item_embeddings = new Embedding(num_items, args.dims_);
        this.conv_v = new Conv2d(1, args.n_v_,
                new StdVectorLong(new long[]{args.L_, 1}));
        this.conv_h = new ArrayList<>();
        for (int i = 0; i < args.L_; i++) {
            this.conv_h.add(new Conv2d(1, args.n_h_,
                    new StdVectorLong(new long[]{i + 1, args.dims_})));
        }
        this.fc_dim_v = args.n_v_ * args.dims_;
        this.fc_dim_h = args.n_h_ * args.L_;
        int fc_dim_in = fc_dim_v + fc_dim_h;
        this.fc1 = new Linear(fc_dim_in, args.dims_);
        this.W2 = new Embedding(num_items, 2 * args.dims_);
        this.b2 = new Embedding(num_items, 1);
        this.dropout = new Dropout(args.drop_ratio_);
        register_modules("conv_h", this.conv_h);
        this.register_modules(new String[]{"user_embeddings", "item_embeddings", "conv_v", "fc1", "W2", "b2", "dropout"},
                user_embeddings, item_embeddings, conv_v, fc1, W2, b2, dropout);

        try (NoGrad noGrad = new NoGrad()) {
            this.user_embeddings.getWeight().normal_(0, 1.0 / args.dims_);
            this.item_embeddings.getWeight().normal_(0, 1.0 / args.dims_);
            this.W2.getWeight().normal_(0, 1.0 / (2 * args.dims_));
            this.b2.getWeight().zero_();
        }
    }

    private Tensor ac(Tensor input, String s) {
        switch (s) {
            case "iden":
                return input;
            case "relu":
                return functional.relu(input);
            case "tanh":
                return functional.tanh(input);
            case "sigm":
                return functional.sigmoid(input);
        }
        return input;
    }

    private Tensor ac_conv(Tensor in) {
        return ac(in, args.ac_conv_);
    }

    private Tensor ac_fc(Tensor in) {
        return ac(in, args.ac_fc_);
    }

    public Tensor forward(Tensor seq_var, Tensor user_var, Tensor item_var, boolean for_pred) {
        Tensor item_embs = item_embeddings.forward(seq_var).unsqueeze(1);
        Tensor user_emb = user_embeddings.forward(user_var).squeeze(1);
        Tensor out_v, out_h, out;
//        if(args.has_nv_){
        out_v = this.conv_v.forward(item_embs);
        out_v = out_v.view(new StdVectorLong(new long[]{-1, fc_dim_v}));
//        }
        StdVectorTensor out_hs = new StdVectorTensor();
//        if(args.has_nh_){
        for (Conv2d conv : conv_h) {
            Tensor conv_out = ac_conv(conv.forward(item_embs).squeeze(3));
            Tensor pool_out = functional.max_pool1d(conv_out, conv_out.size(2), conv_out.size(2)).squeeze(2);
            out_hs.add(pool_out);
        }
        out_h = functional.cat(out_hs, 1);

        out = functional.cat(new StdVectorTensor(new Tensor[]{out_v, out_h}), 1);
        out = dropout.forward(out);

        Tensor z = ac_fc(fc1.forward(out));
        Tensor x = functional.cat(new StdVectorTensor(new Tensor[]{z, user_emb}), 1);

        Tensor w2 = this.W2.forward(item_var);
        Tensor b2 = this.b2.forward(item_var);
        if (for_pred) {
            w2 = w2.squeeze();
            b2 = b2.squeeze();
            return (x.mul(w2)).sum(1).add(b2);
        } else {
            return functional.baddbmm(b2, w2, x.unsqueeze(2)).squeeze();
        }
    }
}
