package com.shujia.mr.kqzl2;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/*
    空气质量数据：part-r-00000
        20180418-1048   58   ->  <1048, #20180418-58>

    城市数据：city.csv
        1048,东污水处理厂,邯郸  ->  <1048, $邯郸东污水处理厂>

 */
class KQZLCityMapper extends Mapper<LongWritable, Text, Text, Text> {
    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context) throws IOException, InterruptedException {
        //contextHadoop上下文环境对象
        FileSplit inputSplit = (FileSplit) context.getInputSplit();
        String fileName = inputSplit.getPath().getName();

        String line = value.toString();
        if (fileName.startsWith("part")) {
            //20180418-1048   58
            String[] infos = line.split("\t");
            String date = infos[0].split("-")[0];//20180418-1048
            String id = infos[0].split("-")[1];//20180418-1048
            String avgPm25 = infos[1];
            context.write(new Text(id), new Text("#" + date + "-" + avgPm25)); // <1048, #20180418-58>
        } else if (fileName.startsWith("city")) {
            //1048,东污水处理厂,邯郸
            String[] infos = line.split(",");
            context.write(new Text(infos[0]), new Text("$" + infos[2] + infos[1])); // <1048, $邯郸东污水处理厂>
        }
    }
}

class KQZLCityReducer extends Reducer<Text, Text, Text, Text> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, Text, Text>.Context context) throws IOException, InterruptedException {
        String date = null;
        String city = null;

        //<1048,[#20180418-58, $邯郸东污水处理厂]>
        //<3027,[#20180418-52]>
        for (Text value : values) {
            String info = value.toString();
            if(info.startsWith("#")){
                date = info.substring(1);
            }

            if(info.startsWith("$")){
                city = info.substring(1);
            }
        }

        System.out.println("-------------------------------------------");
        System.out.println(date);
        System.out.println("-------------------------------------------");

        if(date!=null){
            String y = date.split("-")[0];
            String avgPm25 = date.split("-")[1];
            if(city==null){
                context.write(key, new Text("城市:未知地点, 平均pm2.5值:"+avgPm25+", 日期:"+y));
            }else {
                context.write(key, new Text("城市:"+city+", 平均pm2.5值:"+avgPm25+", 日期:"+y));
            }
        }

    }
}


public class KQZLWithCity {
    public static void main(String[] args) throws Exception{
        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://master:9000");

        Job job = Job.getInstance(conf);
        // 设置reduce的个数
//        job.setNumReduceTasks(4);

        job.setJarByClass(KQZLWithCity.class);

        job.setJobName("监测点日平均PM2.5值关联城市信息");

        job.setMapperClass(KQZLCityMapper.class);

        job.setReducerClass(KQZLCityReducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        FileInputFormat.setInputPaths(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]));

        boolean b = job.waitForCompletion(true);
        if(b){
            System.out.println("32期 监测点日平均PM2.5值关联城市信息mapreduce实现执行成功！>_-");
        }else {
            System.out.println("32期 监测点日平均PM2.5值关联城市信息mapreduce实现执行失败！T_T");
        }
        //
        //
    }
}
