package cas.ihep.hmss.hdfsin;

/**
 * Created by chocolate on 2017/11/24.
 */

import cas.ihep.hmss.JobSubmit;
import com.google.common.io.Closer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;

public class HdfsIn extends Configured implements JobSubmit,Tool {

    private Path[] getInputPath(Configuration conf,int n,List<String> fList) throws IOException {
        FileSystem hdfs= FileSystem.get(conf);
        int count=fList.size();
        ArrayList<String> inputfiles=new ArrayList<>();
        int d=count/n;
        int m = count%n;
        if(m!=0){
            d++;
        }
        String tmppath;
        for (int i =0;i<n;i++){
            tmppath="/tmp/filelist/"+"task"+i+ UUID.randomUUID().toString();
            OutputStream out=hdfs.create(new Path(tmppath));
            for (int j=0;j<d;j++){
                if(i*d+j>=fList.size()){
                    break;
                }
                String s= fList.get(i*d+j);
                if (s!=null){
                    out.write(s.getBytes());
                    out.write("\r\n".getBytes());
                }

            }
            out.close();
            inputfiles.add(tmppath);
        }
        Path[] paths=new Path[inputfiles.size()];
        for(int i=0;i<inputfiles.size();i++){
            paths[i]=new Path(inputfiles.get(i));
        }
        return paths;
    }

    private Path[] getInputPath(Configuration conf, String optionFile) throws IOException, InterruptedException {
        /*读取optionfile,并包装成一个hdfs的临时文件*/
        try(Closer closer= Closer.create()){
            BufferedReader br = closer.register(new BufferedReader(closer.register(new FileReader(optionFile))));
            int index = -1;
            FileSystem hdfs= FileSystem.get(conf);
            ArrayList<String> inputfiles=new ArrayList<>();
            //for(String s= br.readLine(); s != null; s = br.readLine()){
            // String tmppath="/tmp/filelist/"+UUID.randomUUID().toString();
            // /*为optionfile每一行创建一个临时文件*/
            // try(OutputStream out=hdfs.create(new Path(tmppath))) {
            //   out.write(s.getBytes());/*把optionfile每一行写入临时文件*/
            // }
            // }
            String l = "";
            int count = 0;
            ArrayList<String> fList=new ArrayList<>();
            while ((l = br.readLine())!=null){
                count ++;
                fList.add(l);
            }
            //int n = count/d;
            int n=conf.getInt("task.cores",0);
            int d=count/n;
            int m = count%n;
            if(m!=0){
                d++;
            }
            String tmppath;
            for (int i =0;i<n;i++){
                tmppath="/tmp/filelist/"+"task"+i+ UUID.randomUUID().toString();
                OutputStream out=hdfs.create(new Path(tmppath));
                for (int j=0;j<d;j++){
                    if(i*d+j>=fList.size()){
                        break;
                    }
                    String s= fList.get(i*d+j);
                    if (s!=null){
                        out.write(s.getBytes());
                        out.write("\r\n".getBytes());
                    }

                }
                out.close();
                inputfiles.add(tmppath);
            }
//            if (m>0){
//                tmppath="/tmp/filelist/"+"task"+n+UUID.randomUUID().toString();
//                OutputStream out=hdfs.create(new Path(tmppath));
//                for (int t=d*n;t<count;t++){
//                    String s= fList.get(t);
//                    if (s!=null){
//                        out.write(s.getBytes());
//                        out.write("\r\n".getBytes());
//                    }
//                }
//                out.close();
//                inputfiles.add(tmppath);
//            }

            Path[] paths=new Path[inputfiles.size()];
            for(int i=0;i<inputfiles.size();i++){

                paths[i]=new Path(inputfiles.get(i));
            }
            return paths;
        }

    }

    private void setInputPaths(Configuration conf,Path inputPaths[])throws IOException{
        Path path = inputPaths[0].getFileSystem(conf).makeQualified(inputPaths[0]);
        StringBuilder str = new StringBuilder(StringUtils.escapeString(path.toString()));
        for(int i = 1; i < inputPaths.length; ++i) {
            str.append(",");
            path = inputPaths[i].getFileSystem(conf).makeQualified(inputPaths[i]);
            str.append(StringUtils.escapeString(path.toString()));
        }
        conf.set("mapreduce.input.fileinputformat.inputdir", str.toString());
    }

    public org.apache.hadoop.mapreduce.Job submit(JobClient jclient, int cores, List<String> inputfiles, String destpath) throws IOException,
            ClassNotFoundException, InterruptedException {
        System.out.println("Submitting job ...");
        JobConf jobConf=new JobConf(jclient.getConf(),HdfsIn.class);
        jobConf.set("target.directory",destpath);
        jobConf.setQueueName("ybj");
        jobConf.setJarByClass(HdfsIn.class);
        jobConf.setNumReduceTasks(0);
        jobConf.setJobName("HmsIn");
        Job job=Job.getInstance(jobConf);
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(NullOutputFormat.class);
        job.setMapperClass(ApiMapper.class);
        job.setReducerClass(InReducer.class);
        Path[] inputPaths = getInputPath(jobConf,cores,inputfiles);
        if (inputPaths==null || inputPaths.length<=0){
            throw  new FileNotFoundException("InputFile does not exist.");
        }
        FileInputFormat.setInputPaths(job,inputPaths);
        job.submit();
        return job;
    }

    public int run(String[] args) throws Exception {
        JobConf jobconf = new JobConf(HdfsIn.class);
        //String opfilePath = optionFile + "#OptionFile.txt";
        jobconf.setQueueName("ybj");
        jobconf.set("target.directory",args[1]);
        jobconf.set("task.cores",args[2]);
        Job job = Job.getInstance(jobconf);
        job.setJobName("Hms");
        job.setJarByClass(HdfsIn.class);
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(NullOutputFormat.class);
        job.setMapperClass(ApiMapper.class);
        job.setReducerClass(InReducer.class);
        job.setNumReduceTasks(0);
        //FileSystem hdfs=FileSystem.get(jobconf);

        Path[] inputPaths = getInputPath(jobconf,args[0]);
        if (inputPaths==null || inputPaths.length<=0){
            throw  new FileNotFoundException("InputFile does not exist.");
        }
        //FileSystem fs = FileSystem.get(getConf());
        //Path[] inputPaths = Utils.getRecursivePaths(fs, inputPath);/*循环获取inputpath中的path对象 */
        FileInputFormat.setInputPaths(job, inputPaths);
        job.submit();
        return 0;
    }
}
