package LibDL.recommender;

import LibDL.core.StdVectorInt;
import LibDL.core.Tensor;
import LibDL.core.functional;
import net.librec.common.LibrecException;
import net.librec.math.structure.SequentialAccessSparseMatrix;

import java.util.HashMap;
import java.util.Map;

public abstract class GeneralRecommender extends LibDLRecommender {

    protected int sample_size;

    @Override
    protected void setup() throws LibrecException {
        super.setup();
        sample_size = conf.getInt("libdl.general.sample.size", 10);
    }

    protected SequentialAccessSparseMatrix train_matrix, test_matrix;

    protected Map<Integer, Tensor> getUserItems(SequentialAccessSparseMatrix sparseMatrix) {
        Map<Integer, Tensor> tempUserItems = new HashMap<>();
        for (int userIdx = 0; userIdx < num_users; ++userIdx) {
            StdVectorInt pos = new StdVectorInt(sparseMatrix.row(userIdx).getIndices());
            Tensor positems = functional.tensor(pos);
            tempUserItems.put(userIdx, positems);
        }
        return tempUserItems;
    }

    protected boolean isConverged(int iter, boolean verbose) {
        float delta_loss = (float) (lastLoss - loss);
        if (verbose) {
            String recName = getClass().getSimpleName();
            String info = recName + " iter " + iter + ": loss = " + loss + ", delta_loss = " + delta_loss;
            LOG.info(info);
        }
        if (Double.isNaN(loss) || Double.isInfinite(loss)) {
            LOG.error("Loss = NaN or Infinity: current settings does not fit the recommender!");
        }
        return Math.abs(delta_loss) < 1e-5;
    }
}
