package com.galeno.join;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

/**
 * @author galeno
 * @Title:
 * @Description:
 * @date 2021/7/279:27
 */
public class Join {
    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf=new Configuration();
        conf.set("fs.defaultFS","file:/");
        Job job = Job.getInstance(conf, "orderuser");
        job.setMapperClass(JoinMapper.class);
        job.setReducerClass(JoinReducer.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);
        job.setNumReduceTasks(2);
        FileInputFormat.setInputPaths(job,new Path("D:\\mrdata\\join\\input"));
        FileOutputFormat.setOutputPath(job,new Path("D:\\mrdata\\join\\output"));
        job.waitForCompletion(true);

    }



    static class JoinMapper extends Mapper<LongWritable, Text, Text, Text> {
        String fileName = null;

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            //当前任务处理的数据内容,根据文件名获取内容特征
            FileSplit inputSplit = (FileSplit) context.getInputSplit();
            fileName = inputSplit.getPath().getName();
        }

        Text k = new Text();
        Text v = new Text();

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            //用户数据
            if (fileName.startsWith("user")){
                String uid = line.split(",")[0];
                k.set(uid);
                v.set(line);
                System.out.println("用户数据"+uid+line);
            }else {
                //订单数据
              String uid=line.split(" ")[1];
              k.set(uid);
              v.set(line);
                System.out.println("订单数据"+uid+line);
            }

            context.write(k,v);

        }


    }

    static class JoinReducer extends Reducer<Text, Text, Text, NullWritable> {
        Text k=new Text();
        //相同的k会聚集到一起,uid也是
        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            List<String> orders=new ArrayList<>();
       //     System.out.println(orders);
            String user =null;
            //遍历接受一个用户的所有数据订单用户
            for (Text value : values) {
                String line = value.toString();
                if(line.contains(" ")){
                    orders.add(line);
                }else {
                    user=line;
                }

            }
            System.out.println(orders);
            //拼接
            if(orders!=null&&orders.size()>0&&user!=null){
                for (String order : orders) {
                    String result= order.split("\\s+")[0]+":"+user;
                    k.set(result);
                    context.write(k,NullWritable.get());

                }

            }



        }
    }


}
