package com.shujia.wyh.kqzldemo;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.MapTask;
import org.apache.hadoop.mapred.ReduceTask;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.LinkedList;



//reduce: <"2604",["$20180718-22","#临沧-市气象局"]>
class PM25CityReducer extends Reducer<Text, Text, Text, Text> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, Text, Text>.Context context) throws IOException, InterruptedException {
        //<"2604",["$20180718-22","#临沧-市气象局"]>
        //<"2604",["#临沧-市气象局","$20180718-22"]>

        //使用集合临时存储数据
        LinkedList<String> pm25Datas = new LinkedList<>();
        LinkedList<String> citys = new LinkedList<>();


        for (Text value : values) {
            String info = value.toString();
            if (info.startsWith("$")) {
                pm25Datas.add(info.substring(1));
            } else if (info.startsWith("#")) {
                citys.add(info.substring(1));
            }
        }


        for (String pm25Data : pm25Datas) {
            String date = pm25Data.split("-")[0];
            String pm25Avg = pm25Data.split("-")[1];
            for (String city : citys) {
                context.write(new Text("日期："+date+",检测点："+city), new Text("PM2.5的平均值：" + pm25Avg));
            }
        }


    }
}

public class PM25CityDemo {
    public static void main(String[] args) throws Exception {
        //获取hadoop集群环境配置
        Configuration conf = new Configuration();
//        conf.set("mapreduce.input.fileinputformat.split.minsize","23");
//        conf.set("mapreduce.job.reduces","2");
        conf.set("mapreduce.task.io.sort.mb","200m");

        //创建Job作业
        Job job = Job.getInstance(conf);

        //设置job作业的名称
        job.setJobName("检测编号与城市编号进行连接");

        //设置job作业reduce的个数
        job.setNumReduceTasks(1);

        //设置主类
        job.setJarByClass(PM25CityDemo.class);

        //设置Map类
        job.setMapperClass(PM25CityMapper.class);
        //设置Map类的输出key-value类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        //设置Reduce类
        job.setReducerClass(PM25CityReducer.class);
        //设置reduce的输出key-value类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        //设置数据输入来源
        //这里的Path不仅可以是一个文件的路径，还可以是一个目录文件夹的路径
        //如果传入的文件夹，将读取文件夹中所有的文件
        FileInputFormat.addInputPath(job, new Path(args[0]));
        //设置reduce结果输出的路径
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        //将job作业提交到yarn上执行
        job.waitForCompletion(true);
    }
}
