package LibDL.recommender.sequential;

import LibDL.core.*;
import LibDL.core.optim.Adam;
import LibDL.recommender.data.Sequence;
import LibDL.recommender.data.utils.NumUtils;
import LibDL.models.Caser;
import LibDL.recommender.LibDLRecommender;
import LibDL.recommender.utils.RecommenderUtils;
import LibDL.recommender.SequencialRecommender;
import LibDL.recommender.data.model.TextSequenceDataModel;
import LibDL.supporting.DataConverter;
import net.librec.common.LibrecException;
import net.librec.eval.EvalContext;
import net.librec.eval.ranking.PrecisionEvaluator;
import net.librec.eval.ranking.RecallEvaluator;
import net.librec.math.structure.DenseMatrix;
import net.librec.math.structure.DenseVector;
import net.librec.math.structure.SequentialAccessSparseMatrix;
import net.librec.recommender.item.RecommendedList;
import org.jetbrains.annotations.Nullable;

import java.util.ArrayList;
import java.util.List;

public class CaserRecommender extends SequencialRecommender {

    // model dependent arguments
    private int nv = 4;
    private int nh = 16;
    private float drop = 0.5f;
    private String ac_conv = "relu";
    private String ac_fc = "relu";
    private int _L;
    private int dim;

    @Override
    protected void setup() throws LibrecException {
        super.setup();
        dim = conf.getInt("libdl.caser.dim", 50);
        _L = this.L;
        int num_items = ((TextSequenceDataModel) getDataModel()).getTrainSequence().num_items;
        int num_users = ((TextSequenceDataModel) getDataModel()).getTrainSequence().num_users;
        Caser.CaserArgs model_config = new Caser.CaserArgs()
                .ac_conv(ac_conv)
                .ac_fc(ac_fc)
                .dims(dim)
                .drop_ratio(drop)
                .n_h(nh)
                .n_v(nv)
                .L(_L);
        this.module = new Caser(num_users, num_items, model_config);
        this.optimizer = new Adam(this.module.parameters(), this.learning_rate, 0.9, 0.999, this.l2);
    }

    @Override
    protected void trainModel() throws LibrecException {
        TextSequenceDataModel dataModel = (TextSequenceDataModel) context.getDataModel();
        Sequence train_seq = dataModel.getTrainSequence();

        DenseMatrix sequences_np = train_seq.sequences;
        DenseMatrix targets_np = train_seq.targets;
        DenseMatrix users_np = train_seq.user_ids.transpose(); // TODO reshape(-1, 1)

        int n_train = sequences_np.rowSize();
        String utput_str = "total training instances: " + n_train;
        LOG.info(utput_str);

        generate_candidate();

        int start_epoch = 0;

        for (int epoch_num = start_epoch; epoch_num < epoch; epoch_num++) {

            long t1 = System.currentTimeMillis();

            // set model to training mode
            this.module.train();

            // shuffle
            Tensor indices = functional.randperm(users_np.rowSize(), new TensorOptions(Dtype.INT64));
            ArrayList<Integer> shuffle_indices = new ArrayList<>(indices.tolist_long());
            users_np = NumUtils.row_shuffle(users_np, shuffle_indices);
            sequences_np = NumUtils.row_shuffle(sequences_np, shuffle_indices);
            targets_np = NumUtils.row_shuffle(targets_np, shuffle_indices);

            DenseMatrix negatives_np = this.generate_negative_samples(users_np, neg_samples);

            // convert arrays to LibTorch tensors and move it to the corresponding devices
            Tensor users, sequences, targets, negatives;
            users = DataConverter.matToTensor(users_np).to(Dtype.INT64);
            sequences = DataConverter.matToTensor(sequences_np).to(Dtype.INT64);
            targets = DataConverter.matToTensor(targets_np).to(Dtype.INT64);
            negatives = DataConverter.matToTensor(negatives_np).to(Dtype.INT64);
            if (this.device == LibDLRecommender.USE_CUDA) {
                users = users.cuda();
                sequences = sequences.cuda();
                targets = targets.cuda();
                negatives = negatives.cuda();
            } else {
                users = users.cpu();
                sequences = sequences.cpu();
                targets = targets.cpu();
                negatives = negatives.cpu();
            }

            double epoch_loss = 0.0;

            int minibatch_num = 0;
            for (ArrayList<Tensor> tensors : RecommenderUtils.minibatch(new Tensor[]{users, sequences, targets, negatives}, batch_size)) {

                functional.enable_grad();

                Tensor batch_users = tensors.get(0);
                Tensor batch_sequences = tensors.get(1);
                Tensor batch_targets = tensors.get(2);
                Tensor batch_negatives = tensors.get(3);
                Tensor items_to_predict = functional.cat(new StdVectorTensor(new Tensor[]{batch_targets, batch_negatives}), 1);
                Tensor items_prediction = ((Caser) this.module).forward(batch_sequences, batch_users, items_to_predict, false);

                StdVectorTensor splits = functional.split_with_sizes(items_prediction, new StdVectorLong(
                        new long[]{batch_targets.size(1), batch_negatives.size(1)}), 1);
                Tensor targets_prediction = splits.get(0);
                Tensor negatives_prediction = splits.get(1);

                this.optimizer.zero_grad();
                Tensor positive_loss = functional.mean(functional.log(functional.sigmoid(targets_prediction))).mul(-1);
                Tensor negative_loss = functional.mean(functional.log(functional.sigmoid(negatives_prediction).mul(-1).add(1))).mul(-1);
                Tensor loss = positive_loss.add(negative_loss);
                epoch_loss += loss.item().to_double();
                loss.backward();
                this.optimizer.step();
                minibatch_num++;
                if (minibatch_num % 50 == 0) {
                    System.out.print("minibatch: " + minibatch_num + "\r");
                }
            }
            System.out.println();
            epoch_loss /= minibatch_num;
            long t2 = System.currentTimeMillis();

//            if (verbose && (epoch_num + 1) % this.eval_per_epoch == 0) {
//                Object[] prm = null;//Evaluation.evaluate_ranking(this, test, train, Arrays.asList(1, 5, 10));
//                ArrayList<ArrayList<Double>> precision, recall;
//                double mean_aps;
//                precision = (ArrayList<ArrayList<Double>>) prm[0];
//                recall = (ArrayList<ArrayList<Double>>) prm[1];
//                mean_aps = (double) prm[2];
//                String output_str = String.format("Epoch %d [%.1f s]\tloss=%.4f, map=%.4f, " +
//                                "prec@1=%.4f, prec@5=%.4f, prec@10=%.4f, " +
//                                "recall@1=%.4f, recall@5=%.4f, recall@10=%.4f, [%.1f s]",
//                        epoch_num + 1,
//                        (float) (t2 - t1) / 1000,
//                        epoch_loss,
//                        mean_aps,
//                        NumUtils.mean(precision.get(0)),
//                        NumUtils.mean(precision.get(1)),
//                        NumUtils.mean(precision.get(2)),
//                        NumUtils.mean(recall.get(0)),
//                        NumUtils.mean(recall.get(1)),
//                        NumUtils.mean(recall.get(2)),
//                        (float) (System.currentTimeMillis() - t2) / 1000
//                );
//                System.out.println(output_str);
//            } else {
//                String output_str = String.format("Epoch %d [%.1f s]\tloss=%.4f [%.1f s]",
//                        epoch_num + 1,
//                        (float) (t2 - t1) / 1000,
//                        epoch_loss,
//                        (float) (System.currentTimeMillis() - t2) / 1000
//                );
//                System.out.println(output_str);
//            }
            if (verbose && (epoch_num + 1) % this.eval_per_epoch == 0) {
                RecommendedList recommendedList = this.recommendRank();
                RecommendedList groundTruthList = EvalContext.getGroundTruthListFromSparseMatrix(
                        (SequentialAccessSparseMatrix) this.getDataModel().getTestDataSet());

                double prec1, prec5, prec10, recall1, recall5, recall10;

                PrecisionEvaluator precisionEvaluator = new PrecisionEvaluator();
                precisionEvaluator.setTopN(10);
                prec10 = precisionEvaluator.evaluate(groundTruthList, recommendedList);
                precisionEvaluator.setTopN(1);
                prec1 = precisionEvaluator.evaluate(groundTruthList, recommendedList);
                precisionEvaluator.setTopN(5);
                prec5 = precisionEvaluator.evaluate(groundTruthList, recommendedList);

                RecallEvaluator recallEvaluator = new RecallEvaluator();
                recallEvaluator.setTopN(10);
                recall10 = recallEvaluator.evaluate(groundTruthList, recommendedList);
                recallEvaluator.setTopN(1);
                recall1 = recallEvaluator.evaluate(groundTruthList, recommendedList);
                recallEvaluator.setTopN(5);
                recall5 = recallEvaluator.evaluate(groundTruthList, recommendedList);

                String output_str = String.format("Epoch %d [%.1f s]\tloss=%.4f, " +
                                "prec@1=%.4f, prec@5=%.4f, prec@10=%.4f, " +
                                "recall@1=%.4f, recall@5=%.4f, recall@10=%.4f, [%.1f s]",
                        epoch_num + 1,
                        (float) (t2 - t1) / 1000,
                        epoch_loss,
                        prec1,
                        prec5,
                        prec10,
                        recall1,
                        recall5,
                        recall10,
                        (float) (System.currentTimeMillis() - t2) / 1000
                );
                LOG.info(output_str);
            } else {
                String output_str = String.format("Epoch %d [%.1f s]\tloss=%.4f [%.1f s]",
                        epoch_num + 1,
                        (float) (t2 - t1) / 1000,
                        epoch_loss,
                        (float) (System.currentTimeMillis() - t2) / 1000
                );
                LOG.info(output_str);
            }
        }
    }

    @Override
    public List<Float> predict(int user_id, @Nullable List<Integer> item_ids) {
        Sequence test_seq = ((TextSequenceDataModel) this.getDataModel()).getTestSequence();
        if (test_seq == null || test_seq.sequences == null) {
            LOG.error("Missing test sequences, cannot make predictions");
            return null;
        }
        this.module.eval();

        functional.no_grad();
        DenseVector sequences_ = test_seq.sequences.row(user_id);

        if (item_ids == null)
            item_ids = NumUtils.arange(0, num_items, 1);

        Tensor _user_id = functional.as_tensor(user_id, new TensorOptions(Dtype.INT64)).reshape(new StdVectorLong(new long[]{-1, 1}));

        Tensor user, sequences, items;

        sequences = DataConverter.vector2Tensor(sequences_, new TensorOptions(Dtype.INT64));
        items = functional.as_tensor(new StdVectorLong(item_ids), new TensorOptions(Dtype.INT64));

        if (device == LibDLRecommender.USE_CUDA) {
            user = _user_id.cuda();
            sequences = sequences.cuda();
            items = items.cuda();
        } else {
            user = _user_id.cpu();
            sequences = sequences.cpu();
            items = items.cpu();
        }

        sequences = sequences.reshape(new StdVectorLong(new long[]{1, -1}));
        items = items.reshape(new StdVectorLong(new long[]{1, -1}));

        Tensor out = ((Caser)this.module).forward(sequences, user, items, true);

        return new ArrayList<>(out.flatten().tolist_float());
    }
}
