package cn.ekgc.mapjoin;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.*;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;

public class EmpJoinDept extends Configured implements Tool {
    @Override
    public int run(String[] args) throws Exception {
        Configuration configuration = getConf();
        String elativePath=null;
        String fileName=null;
        //判断路径是Uri还是Path
        if(args[0].lastIndexOf("#")>0){//uri
            elativePath="./"+args[0].substring(args[0].lastIndexOf("#")+1);
            fileName=args[0].substring(args[0].lastIndexOf("/")+1,args[0].lastIndexOf("#"));
        }else{//path
            elativePath="./"+args[0].substring(args[0].lastIndexOf("/")+1);
            fileName=args[0].substring(args[0].lastIndexOf("/")+1);
        }
        //conf set share var

        configuration.set("myCachePath",elativePath);
        configuration.set("myFileName",fileName);
        this.setConf(configuration);
        //Define Job
        Job job = Job.getInstance(configuration,"emp-join-dept");
        // job set
        // addCacheFile
        if(args[0].lastIndexOf("#")>0){
            job.addCacheFile(new URI(args[0])); //hdfspath#别名
        }else{
            job.addCacheFile(new Path(args[0]).toUri()); // 在缓存中会有一个 和hdfs上同名的文件
        }

        //set mapper class
        job.setMapperClass(MyMapper.class);
        //set reducer class
        job.setReducerClass(MyReduce.class);

        //set key type  value type
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        //set input path
        FileInputFormat.addInputPath(job,new Path(args[1]));
        //判断计算结果输出目录是否存在,如果存在就删除
        Path out = new Path(args[2]);
        FileSystem fileSystem = FileSystem.get(this.getConf());
        if(fileSystem.exists(out)){
            fileSystem.delete(out,true);
        }
        FileOutputFormat.setOutputPath(job,out);

        return job.waitForCompletion(true)?0:1;
    }
    public static class MyMapper extends Mapper<LongWritable, Text,Text, NullWritable>{
        private Map<String,String> dept= new HashMap<>();
        private Text outKey = new Text();

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
           //init dep
            Configuration conf=context.getConfiguration();
            String elativePath=conf.get("myCachePath");
            String fileName=conf.get("myFileName");
           URI[] uris = context.getCacheFiles();
            for (URI uri :
                    uris) {
                if(uri.getPath().endsWith(fileName)){
                    try(BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(new File(elativePath))))){
                        in.lines().forEach(line -> {
                            String[] ss = line.split("\\s+");
                            dept.put(ss[0],ss[1]);
                        });
                    }
                }
            }

        }

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            if(line.length()>1){
                String[] ss = line.split("\\s+");
                if(ss.length==4){
                    String deptName = dept.get(ss[3]);
                    String newLine = String.format("%s,%s,%s,%s",ss[0],ss[1],ss[2],deptName);
                    this.outKey.set(newLine);
                    context.write(outKey,NullWritable.get());
                }
            }
        }

        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            // gc dept
            dept=null;
        }
    }
    public static class MyReduce extends Reducer<Text,NullWritable,Text,NullWritable>{

        @Override
        protected void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
            context.write(key,NullWritable.get());
        }
    }

    public static void main(String[] args) throws Exception{
        int n = ToolRunner.run(new EmpJoinDept(),args);
        System.out.println("exit with status "+n);
    }
}
