package LibDL.recommender.data.model;

import LibDL.recommender.data.Sequence;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import net.librec.common.LibrecException;
import net.librec.conf.Configured;
import net.librec.data.DataContext;
import net.librec.data.convertor.TextDataConvertor;
import net.librec.math.structure.DataFrame;
import org.apache.commons.lang3.StringUtils;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;

public class TextSequenceDataModel extends LibDLAbstractDataModel {

    private Sequence train_sequence;

    private Sequence test_sequence;

    private HashMap<String, Integer> user_map, item_map;

    private int num_users, num_items;

    private int train_num_items, train_num_users;

    private int L, T;

    public void setup() {
//        user_map = new HashMap<>();
//        item_map = new HashMap<>();
        L = this.conf.getInt("libdl.sequence.length");
        T = this.conf.getInt("libdl.sequence.target");
    }

    @Override
    public void buildDataModel() throws LibrecException {
        setup();
        context = new DataContext(conf);
        if (!conf.getBoolean("data.convert.read.ready")) {
            buildConvert();
            LOG.info("Transform data to Convertor successfully!");
            conf.setBoolean("data.convert.read.ready", true);
        }

        conf.set("data.model.splitter", "testset");
        LOG.info("data.model.splitter is set to the default value 'testset'!");

        buildSplitter();
        if (StringUtils.isNotBlank(conf.get("data.appender.class")) && !conf.getBoolean("data.appender.read.ready")) {
            buildFeature();
            LOG.info("Transform data to Feature successfully!");
            conf.setBoolean("data.appender.read.ready", true);
        }
        LOG.info("Split data to train Set and test Set successfully!");
        if (trainDataSet != null && trainDataSet.size() > 0 && testDataSet != null && testDataSet.size() > 0) {
            LOG.info("Data cardinality of training is " + trainDataSet.size());
            LOG.info("Data cardinality of testing is " + testDataSet.size());
        }
    }

    @Override
    public void buildConvert() {
        String[] inputDataPath = conf.get(Configured.CONF_DATA_INPUT_PATH).trim().split(":");
        for (int i = 0; i < inputDataPath.length; i++) {
            inputDataPath[i] = conf.get(Configured.CONF_DFS_DATA_DIR) + "/" + inputDataPath[i];
        }
        String dataColumnFormat = conf.get(Configured.CONF_DATA_COLUMN_FORMAT, "UIR");
        dataConvertor = new TextDataConvertor(dataColumnFormat, inputDataPath, conf.get("data.convert.sep","[\t;, ]"));
        try {
            dataConvertor.processData();
        } catch (IOException e) {
            e.printStackTrace();
        }

        // hold 0 for padding constant zero in `Sequence`
        BiMap<String, Integer> item_map = getItemMappingData();
        if (!item_map.containsKey("libdl_placeholder")) {
            BiMap<String, Integer> temp_map = HashBiMap.create(item_map);
            item_map.clear();
            item_map.put("libdl_placeholder", 0);
            temp_map.forEach((k, v) -> {
                item_map.put(k, v + 1);
            });
            LOG.info("ItemFeatureMap = ItemFeatureMap + 1 !!");
        }

        train_num_items = getItemMappingData().size();
        train_num_users = getUserMappingData().size();
    }

    private void dataConvter2Sequence(List users, List items, boolean isTrain,
                                      int train_num_users, int train_num_items,
                                      int num_users, int num_items) {
        Sequence[] sequences = Sequence.to_sequence(users, items, true,
                train_num_users, train_num_items, num_users, num_items, L, T);
        train_sequence = sequences[0];
        test_sequence = sequences[1];
    }

    @Override
    public void nextFold(){
        super.nextFold();
        if (train_sequence == null)
            dataConvter2Sequence(
                    dataConvertor.getMatrix().getData().get(0), dataConvertor.getMatrix().getData().get(1), true,
                    train_num_users, train_num_items,
                    DataFrame.getInnerMapping("user").size(), DataFrame.getInnerMapping("item").size()
            );
    }

    public Sequence getTrainSequence() {
        return train_sequence;
    }

    public Sequence getTestSequence() {
        return test_sequence;
    }

    public int getTrainNumItems() {
        return train_num_items;
    }
}
