/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package aiaudio.processing.split;

import aiaudio.processing.split.reducers.CalculateNeededUserArtistReducer;
import aiaudio.processing.split.reducers.DataSetSplitReducer;
import aiaudio.processing.split.reducers.DataSetArtistListReducer;
import aiaudio.processing.split.reducers.ArtistCountCheckerReducer;
import aiaudio.processing.split.reducers.AddNecessaryUsersReducer;
import aiaudio.processing.base.mapreduce.CoordinateDoubleValueKey;
import aiaudio.processing.base.mapreduce.standard.mappers.ArtistToUserValueMatrixMapper;
import aiaudio.processing.split.mappers.AddNecessaryUsersMapper;
import aiaudio.processing.split.mappers.CalculateNeededUserArtistMapper;
import aiaudio.processing.split.mappers.DataSetArtistListMapper;
import aiaudio.processing.prediction.recset.RecomendationSetCreationAlgorithm;
import aiaudio.lastfm.hbase.CannotCreateTableException;
import aiaudio.Application;
import aiaudio.database.tables.RatingsMatrixTable;
import aiaudio.processing.MainTableGroup;
import aiaudio.processing.base.AlgorithmParameters;
import aiaudio.processing.base.TimeMeasuredAlgorithm;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.mapreduce.Job;

/**
 *
 * @author nastya
 */
public class DataSetSplittingAlgorithm extends TimeMeasuredAlgorithm<DataSetSplittingAlgorithmParameters> {

    public DataSetSplittingAlgorithm(MainTableGroup mainTableGroup) {
        super(mainTableGroup, "data set split");
    }
    private String temporaryTableName;

    @Override
    protected void removeTemporaryTables() {
    }

    @Override
    protected void createTemporaryTables() throws IOException {
        temporaryTableName = Application.database().createTemporaryTable();
    }

    @Override
    protected void process() throws IOException, CannotCreateTableException, InterruptedException, ClassNotFoundException {
        Job job = createSplitterJob();
        job.waitForCompletion(true);
        job = createUsersCollectorJob();
        job.waitForCompletion(true);
        job = createUsersToAddAanalyzerJob();
        job.waitForCompletion(true);
        job = createCheckerJob();
        job.waitForCompletion(true);

//        RecomendationSetCreationAlgorithm alg = new RecomendationSetCreationAlgorithm(tablePackage);
//        alg.start();
        copyDefaultValues(getNormilizedMatrix(),
                getSplitResultMatrix());
    }

    @Override
    protected void measureAll() throws IOException {
    }

    @Override
    protected void writeParameters(DataSetSplittingAlgorithmParameters parameters) throws IOException {
    }

    private void copyDefaultValues(String fromTable, String toTable) throws IOException {
        HTable fromHTable = Application.database().getByName(fromTable).getTable();
        HTable toHTable = Application.database().getByName(toTable).getTable();
        RatingsMatrixTable.copyDefaultValues(fromHTable, toHTable);
    }

    private Job createSplitterJob() throws IOException {
        Job job = new Job();

        job.getConfiguration().set(DataSetSplitReducer.ARTISTS_TABLE_NAME_PARAMETER, getArtistTable());
        job.getConfiguration().setFloat(DataSetSplitReducer.SPLITTING_COEFFICIENT_PARAM, parameters.getSplittingCoefficient());

        Scan scan = new Scan();

        String ratingsTableName = Application.database().getByName(getNormilizedMatrix()).getDatabaseName();
        TableMapReduceUtil.initTableMapperJob(ratingsTableName, scan, ArtistToUserValueMatrixMapper.class, ImmutableBytesWritable.class,
                CoordinateDoubleValueKey.class, job);

        String resultRatingTableName = Application.database().getByName(getSplitResultMatrix()).getDatabaseName();
        TableMapReduceUtil.initTableReducerJob(resultRatingTableName, DataSetSplitReducer.class, job);

        return job;

    }

    private Job createUsersCollectorJob() throws IOException {
        Job job = new Job();

        Scan scan = new Scan();

        String usersTableName = getUsersTable();
        String usersTable = Application.database().getByName(usersTableName).getDatabaseName();
        TableMapReduceUtil.initTableMapperJob(usersTable, scan, DataSetArtistListMapper.class, ImmutableBytesWritable.class,
                BooleanWritable.class, job);

        String temporaryTable = Application.database().getByName(temporaryTableName).getDatabaseName();
        TableMapReduceUtil.initTableReducerJob(temporaryTable, DataSetArtistListReducer.class, job);

        return job;

    }

    private Job createUsersToAddAanalyzerJob() throws IOException {
        Job job = new Job();

        job.getConfiguration().set(CalculateNeededUserArtistMapper.TABLE_TO_CHECK_NAME_PARAM, temporaryTableName);
        job.getConfiguration().setFloat(CalculateNeededUserArtistReducer.SPLITTING_COEFFICIENT_PARAM, parameters.getSplittingCoefficient());
        job.getConfiguration().set(CalculateNeededUserArtistReducer.USERS_TABLE_NAME_PARAMETER, getUsersTable());

        Scan scan = new Scan();

        String usersTableName = getUsersTable();
        String usersTable = Application.database().getByName(usersTableName).getDatabaseName();
        TableMapReduceUtil.initTableMapperJob(usersTable, scan, CalculateNeededUserArtistMapper.class, ImmutableBytesWritable.class,
                BooleanWritable.class, job);

        String temporaryTable = Application.database().getByName(temporaryTableName).getDatabaseName();
        TableMapReduceUtil.initTableReducerJob(temporaryTable, CalculateNeededUserArtistReducer.class, job);

        return job;

    }

    private Job createCheckerJob() throws IOException {
        Job job = new Job();
        job.getConfiguration().set(AddNecessaryUsersMapper.TEMPORAY_TABLE_WITH_NEEDED_USERS_NAME, temporaryTableName);

        job.getConfiguration().set(ArtistCountCheckerReducer.INITIAL_RATING_TABLE_NAME_PARAM, getNormilizedMatrix());
        job.getConfiguration().setFloat(ArtistCountCheckerReducer.SPLITTING_COEFFICIENT_PARAM, parameters.getSplittingCoefficient());
        job.getConfiguration().set(ArtistCountCheckerReducer.USERS_TABLE_NAME_PARAMETER, getUsersTable());

        Scan scan = new Scan();


        String ratingsTable = Application.database().getByName(getNormilizedMatrix()).getDatabaseName();
        TableMapReduceUtil.initTableMapperJob(ratingsTable, scan, AddNecessaryUsersMapper.class, ImmutableBytesWritable.class,
                CoordinateDoubleValueKey.class, job);

        String resultRatingTableName = Application.database().getByName(getSplitResultMatrix()).getDatabaseName();
        TableMapReduceUtil.initTableReducerJob(resultRatingTableName, AddNecessaryUsersReducer.class, job);

        return job;

    }

    private String getSplitResultMatrix() {
        return mainTableGroup.getSplitDataSetAlgTableGroup().getTrainingRatingMatrix();
    }

    private String getNormilizedMatrix() {
        return mainTableGroup.getPrepareRatingsAlgTableGroup().getNormilizedRatingMatrix();
    }

    private String getUsersTable() {
        return mainTableGroup.getInitialDataTableGroup().getUsersTable();
    }

    private String getArtistTable() {
        return mainTableGroup.getInitialDataTableGroup().getArtistsTable();
    }
}
