package com.chee.gene;

import com.chee.gene.constant.KmeansConstants;
import com.chee.gene.mapper.BiCclassifyMapper;
import com.chee.gene.mapper.KmeansMapper;
import com.chee.gene.partitioner.KMeansPartitioner;
import com.chee.gene.reduce.KeyPrintlnReducer;
import com.chee.gene.reduce.KmeansReducer;
import com.chee.gene.utils.HanminDistanceComputer;
import com.chee.gene.utils.HdfsFileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.log4j.Logger;

import java.util.Collections;
import java.util.List;

/**
 * KMeas主函数
 * Created by chenguoqing on 17/3/11.
 */
public class KMeans{

    private static final Logger logger = Logger.getLogger(KMeans.class);

    public static void main(String[] args) throws Exception{

        Configuration conf = new Configuration();
        String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
        run(otherArgs, conf);

    }

    /**
     * 启动一个Job
     *
     * @param args 参数
     * @param conf
     * @throws Exception
     */
    public static void run(String[] args, Configuration conf) throws Exception{

        boolean isDone = false;
        int iteration = 0;
        FileSystem fs = FileSystem.get(conf);
        String lastOutputPath = "";
        // 结束循环
        while(!isDone && iteration < KmeansConstants.KM_MAX_ITERATE_CNT){
            lastOutputPath = KmeansConstants.OUTPUT_PATH + System.nanoTime();
            Path outputPath = new Path(lastOutputPath);
            String distributedCachePath = null;

            if(!lunchKMeansIteJob(conf, iteration, outputPath, KmeansConstants.PRE_CENTER_NAME)){
                logger.error("run job fail on " + iteration + " iteration!!");
            }else{
                // 将结果文件复制到新文件
                HdfsFileUtils.copyMerge(fs, outputPath, fs, new Path(KmeansConstants.NEW_CENTER_NAME), true, conf, "");
            }

            iteration += 1;

            //比较前后两次质心的距离,如果距离比较小,结束迭代
            List<String> preCenters = HdfsFileUtils.readFileAsStrings(fs, new Path(KmeansConstants.PRE_CENTER_NAME));
            List<String> newCenters = HdfsFileUtils.readFileAsStrings(fs, new Path(KmeansConstants.NEW_CENTER_NAME));

            Collections.sort(preCenters);
            Collections.sort(newCenters);

            if(preCenters.size() != newCenters.size()){
                logger.error("centers are illage as pre size = " + preCenters.size() + ", new size = " + newCenters.size());
                System.exit(-1);
            }

            if(getCentersDistance(preCenters, newCenters) <= preCenters.size()){
                isDone = true;
            }

        }
        // 结束迭代后
        // 将结果文件复制到新文件
        HdfsFileUtils.copyMerge(fs, new Path(lastOutputPath), fs, new Path(KmeansConstants.NEW_CENTER_NAME), true, conf, "");
        // 需不需要排序?
        //
        String biClfPath = KmeansConstants.BINARY_CLASSFICATION_PATH + System.nanoTime();
        logger.info("output binary classfic result : " + biClfPath);
        int status = lunchBiClassfyJob(conf, lastOutputPath, biClfPath) ? 0 : 1;
        System.exit(status);
    }

    /**
     * 计算前后两个质心点的距离
     *
     * @param preCenters
     * @param newCenters
     * @return
     */
    public static int getCentersDistance(List<String> preCenters, List<String> newCenters){
        String preGene = "";
        String newGene = "";
        HanminDistanceComputer hdc = new HanminDistanceComputer();
        int dimension = preCenters.size();
        int sumDistance = 0;
        for(int i = 0; i < dimension; i++){
            sumDistance += hdc.getHMDistance(preCenters.get(i), newCenters.get(i));
        }
        return sumDistance;

    }


    /**
     * 二元分类job
     *
     * @param conf
     * @param centerFilePath
     * @param outputPath
     * @return
     * @throws Exception
     */
    public static boolean lunchBiClassfyJob(Configuration conf, String centerFilePath, String outputPath) throws Exception{
        Job job = Job.getInstance(conf, "Binary Classfication Job");
        job.addCacheFile(new Path(centerFilePath).toUri());
        job.setJarByClass(KMeans.class);
        job.setMapperClass(BiCclassifyMapper.class);
        job.setReducerClass(KeyPrintlnReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);
        FileInputFormat.addInputPath(job, new Path(KmeansConstants.INPUT_PATH + KmeansConstants.DATA_FILE_NAME));
        FileOutputFormat.setOutputPath(job, new Path(outputPath));
        return job.waitForCompletion(true);
    }

    /**
     * 启动一个KMeans算法迭代job
     *
     * @param conf
     * @param iteration           循环次数
     * @param outputPath          job输出路径
     * @param distributedFilePath 质心路径
     * @return
     * @throws Exception
     */
    public static boolean lunchKMeansIteJob(Configuration conf, int iteration, Path outputPath, String distributedFilePath) throws Exception{
        Job job = Job.getInstance(conf, KmeansConstants.JOB_NAME + iteration);
        job.addCacheFile(new Path(distributedFilePath).toUri());
        job.setJarByClass(KMeans.class);
        job.setMapperClass(KmeansMapper.class);
        job.setPartitionerClass(KMeansPartitioner.class);
        job.setReducerClass(KmeansReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);
        FileInputFormat.addInputPath(job, new Path(KmeansConstants.INPUT_PATH + KmeansConstants.DATA_FILE_NAME));
        FileOutputFormat.setOutputPath(job, outputPath);
        return job.waitForCompletion(true);
    }
}
