package LibDL.recommender;

import LibDL.core.*;
import LibDL.core.nn.Module;
import LibDL.core.optim.Optimizer;
import LibDL.core.optim.SGD;
import LibDL.recommender.data.utils.NumUtils;
import LibDL.models.BPR;
import net.librec.common.LibrecException;
import net.librec.data.structure.AbstractBaseDataEntry;
import net.librec.data.structure.LibrecDataList;
import net.librec.math.algorithm.Randoms;
import net.librec.math.structure.DataSet;
import net.librec.math.structure.SequentialAccessSparseMatrix;
import net.librec.recommender.RecommenderContext;
import net.librec.recommender.item.RecommendedList;
import org.jetbrains.annotations.Nullable;

import java.util.*;
import java.util.stream.IntStream;

@Deprecated
public class _BprRecommender extends AbstractLibDLRecommender {
    public int num_items;
    public int num_users;
    public int dim;
    public int n_iter,batch_size,sample_size;
    public float learning_rate,regulation_rate;
    private String device;
    public boolean earlyStop;
    public Optimizer optimizer;
    private double lastloss=0.0d,loss =0.0d;
    private SequentialAccessSparseMatrix train_matrix,test_matrix;
    public _BprRecommender(Builder builder){
        super(builder);
        this.dim = builder.dim;
        this.n_iter=builder.n_iter;
        this.batch_size = builder.batch_size;
        this.sample_size = builder.sample_size;
        this.learning_rate = builder.learning_rate;
        this.regulation_rate = builder.regulation_rate;
        this.device = builder.use_cuda ? "cuda" : "cpu";
        this.earlyStop = builder.earlyStop;
    }
    private Map<Integer,Tensor> getUserItems(SequentialAccessSparseMatrix sparseMatrix) {
        Map<Integer,Tensor> tempUserItems = new HashMap<>();
        for (int userIdx = 0; userIdx < num_users; ++userIdx) {
            StdVectorInt pos = new StdVectorInt(sparseMatrix.row(userIdx).getIndices());
            Tensor positems = functional.tensor(pos);
            tempUserItems.put(userIdx,positems);
        }
        return tempUserItems;
    }
    protected boolean isConverged(int iter,boolean verbose){
        float delta_loss = (float) (lastloss - loss);
        if (verbose) {
            String recName = getClass().getSimpleName();
            String info = recName + " iter " + iter + ": loss = " + loss + ", delta_loss = " + delta_loss;
            System.out.println(info);
        }
        if (Double.isNaN(loss) || Double.isInfinite(loss)) {
            System.err.println("Loss = NaN or Infinity: current settings does not fit the recommender!");
        }
        return Math.abs(delta_loss) < 1e-5;
    }
    @Override
    public List<Float> predict(int user_id, @Nullable List<Integer> item_ids) {
        if (this.test_matrix == null)
            System.out.println("Missing test sequences, cannot make predictions");
        this.module.eval();

        functional.no_grad();

        if (item_ids == null)
            item_ids = NumUtils.arange(0, num_items, 1);

        Tensor items = functional.as_tensor(new StdVectorLong(item_ids), new TensorOptions(Dtype.INT64));

        Tensor auc = ((BPR)module).predict(user_id,items);

        return new ArrayList<>(auc.flatten().tolist_float());
    }

    public List<Float> predict(List<Integer> user_ids, @Nullable List<Integer> item_ids) {
        if (this.test_matrix == null)
            System.out.println("Missing test sequences, cannot make predictions");
        this.module.eval();

        functional.no_grad();

        if (item_ids == null)
            item_ids = NumUtils.arange(0, num_items, 1);

        Tensor items = functional.as_tensor(new StdVectorLong(item_ids), new TensorOptions(Dtype.INT64));
        Tensor users = functional.as_tensor(new StdVectorLong(user_ids), new TensorOptions(Dtype.INT64));

        Tensor auc = ((BPR)module).predict(users,items);

        return new ArrayList<>(auc.flatten().tolist_float());
    }

    @Override
    public Module getModule() {
        return module;
    }

    @Override
    public void initModule() {
        this.train_matrix = (SequentialAccessSparseMatrix) getDataModel().getTrainDataSet();
        this.test_matrix = (SequentialAccessSparseMatrix) getDataModel().getTestDataSet();
        this.num_users = train_matrix.rowSize();
        this.num_items = train_matrix.columnSize();
        this.module = new BPR(num_users,num_items,dim);
        this.optimizer = new SGD(this.module.parameters(), this.learning_rate);
    }
/*    @Override
    public void trainModel(boolean verbose) {
        initModule();
        int start_epoch = 0;
        Map<Integer,Tensor> userItems = getUserItems(train_matrix);
        *//*double maxSample = train_matrix.size();*//*
        double maxSample = batch_size;
        for (int epoch_num = start_epoch; epoch_num < this.n_iter; epoch_num++) {
            long t1 = System.currentTimeMillis();
            this.module.train();
            loss = 0.0d;
            for (int sampleCount = 0; sampleCount < maxSample; sampleCount++) {
                functional.enable_grad();
                int user_index, pos_item_index, neg_item_index;
                while (true) {
                    user_index = Randoms.uniform(num_users);
                    Tensor positems = userItems.get(user_index);
                    if (positems.size(0) == 0 || positems.size(0) == num_items)
                        continue;
                    pos_item_index = (int)positems.get(Randoms.uniform(positems.size(0))).item().to_double();
                    do {
                        neg_item_index = Randoms.uniform(num_items);
                    } while (positems.contains(new Scalar(neg_item_index)));
                    break;
                }
                this.optimizer.zero_grad();
                Tensor loss_tensor = ((BPR)module).forward(user_index,pos_item_index,neg_item_index).add( ((BPR)module).l2_norm(user_index,pos_item_index,neg_item_index).mul(new Scalar(regulation_rate)));
                if (this.device.equals("cuda")) {
                    loss_tensor = loss_tensor.cuda();
                } else {
                    loss_tensor = loss_tensor.cpu();
                }
                loss += loss_tensor.item().to_double();
                loss_tensor.backward();
                this.optimizer.step();
                System.out.print("sampleCount: " + sampleCount + "\r");
            }
            loss /= maxSample;
            long t2 = System.currentTimeMillis();
            String output_str = String.format("Epoch %d [%.1f s]\tloss=%.4f",
                    epoch_num + 1,
                    (float) (t2 - t1) / 1000,
                    loss
            );
            System.out.println(output_str);
            if (isConverged(epoch_num,verbose) && earlyStop) {
                break;
            }
            lastloss = loss;
        }
    }*/
    @Override
    public void trainModel(boolean verbose) {
        initModule();
        int start_epoch = 0;
        Map<Integer,Tensor> userItems = getUserItems(train_matrix);
        for (int epoch_num = start_epoch; epoch_num < this.n_iter; epoch_num++) {
            long t1 = System.currentTimeMillis();
            this.module.train();
            loss = 0.0d;
            for (int batchCount = 0; batchCount < batch_size; batchCount++) {
                functional.enable_grad();
                int[] user_indexes=new int[sample_size],pos_item_indexes=new int[sample_size],neg_item_indexes=new int[sample_size];
                int user_index, pos_item_index, neg_item_index;
                for(int sampleCount = 0; sampleCount < sample_size; sampleCount++) {
                    while (true) {
                        user_index = Randoms.uniform(num_users);
                        Tensor positems = userItems.get(user_index);
                        if (positems.size(0) == 0 || positems.size(0) == num_items)
                            continue;
                        pos_item_index = (int) positems.get(Randoms.uniform(positems.size(0))).item().to_double();
                        do {
                            neg_item_index = Randoms.uniform(num_items);
                        } while (positems.contains(new Scalar(neg_item_index)));
                        break;
                    }
                    user_indexes[sampleCount] = user_index;
                    pos_item_indexes[sampleCount] = pos_item_index;
                    neg_item_indexes[sampleCount] = neg_item_index;
                }
                Tensor users = functional.as_tensor(new StdVectorLong(user_indexes),new TensorOptions(Dtype.INT64));
                Tensor pos_items = functional.as_tensor(new StdVectorLong(pos_item_indexes),new TensorOptions(Dtype.INT64));
                Tensor neg_items = functional.as_tensor(new StdVectorLong(neg_item_indexes),new TensorOptions(Dtype.INT64));
                this.optimizer.zero_grad();
                Tensor loss_tensor = ((BPR)module).forward(users,pos_items,neg_items).add( ((BPR)module).l2_norm(users,pos_items,neg_items).mul(new Scalar(regulation_rate)));
                if (this.device.equals("cuda")) {
                    loss_tensor = loss_tensor.cuda();
                } else {
                    loss_tensor = loss_tensor.cpu();
                }
                loss+=loss_tensor.mean().item().to_double();
                loss_tensor.backward();
                this.optimizer.step();
                System.out.print("batchCount: " + batchCount + "\r");
            }
            loss /= batch_size;
            long t2 = System.currentTimeMillis();
            String output_str = String.format("Epoch %d [%.1f s]\tloss=%.4f",
                    epoch_num + 1,
                    (float) (t2 - t1) / 1000,
                    loss
            );
            System.out.println(output_str);
            if (isConverged(epoch_num,verbose) && earlyStop) {
                break;
            }
            lastloss = loss;
        }
    }

    @Override
    public void train(RecommenderContext context) throws LibrecException {

    }

    @Override
    public RecommendedList recommendRating(DataSet predictDataSet) throws LibrecException {
        return null;
    }

    @Override
    public RecommendedList recommendRating(LibrecDataList<AbstractBaseDataEntry> dataList) throws LibrecException {
        return null;
    }

    @Override
    public RecommendedList recommendRank() {
        List users = Arrays.asList(IntStream.range(0, num_users).boxed().toArray());
        return recommendRank(users);
    }

    @Override
    public RecommendedList recommendRank(LibrecDataList<AbstractBaseDataEntry> dataList) throws LibrecException {
        return null;
    }

    @Override
    public void loadModel(String filePath) {

    }

    @Override
    public void saveModel(String filePath) {

    }

    @Override
    public void setContext(RecommenderContext context) {

    }

    public static class Builder extends AbstractLibDLRecommender.Builder {
        private int dim = 10;
        private int n_iter = 4;
        private int batch_size = 10000;
        private int sample_size = 12;
        private float learning_rate = 0.01f;
        private float regulation_rate = 0.0001f;
        private boolean earlyStop = false;
        public Builder(String ... data_input_path) {
            super(data_input_path);
        }
        public _BprRecommender.Builder n_iter(int n_iter) {
            this.n_iter = n_iter;
            return this;
        }
        public _BprRecommender.Builder batch_size(int batch_size) {
            this.batch_size = batch_size;
            return this;
        }
        public _BprRecommender.Builder sample_size(int sample_size) {
            this.sample_size = sample_size;
            return this;
        }
        public _BprRecommender.Builder learning_rate(float learning_rate) {
            this.learning_rate = learning_rate;
            return this;
        }
        public _BprRecommender.Builder regulation_rate(float regulation_rate) {
            this.regulation_rate = regulation_rate;
            return this;
        }
        public _BprRecommender.Builder earlyStop(boolean earlyStop) {
            this.earlyStop = earlyStop;
            return this;
        }
        @Override
        public Recommender build() throws LibrecException {
            return new _BprRecommender(this);
        }
    }
}