package com.csw.mapreduce;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;

public class alltest {
    //通过combine预聚合  输入班级查看班级人数
//    public static class combineMap extends Mapper<LongWritable,Text,Text,IntWritable>{
//        @Override
//        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//            String clazz = value.toString().split(",")[4];
//            context.write(new Text(clazz),new IntWritable(1));
//        }
//    }
//    public static class combine extends Reducer<Text,IntWritable,Text,IntWritable>{
//        @Override
//        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
//            int sum=0;
//            for (IntWritable value : values) {
//                int i = value.get();
//                sum+=i;
//            }
//            String clazz = key.toString();
//            context.write(new Text(clazz),new IntWritable(sum));
//        }
//    }
//    public static class combineReduce extends Reducer<Text,IntWritable,Text,IntWritable>{
//        @Override
//        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
//            int sum=0;
//            for (IntWritable value : values) {
//                int i = value.get();
//                sum+=i;
//            }
//            String clazz = key.toString();
//            context.write(new Text(clazz),new IntWritable(sum));
//        }
//    }
//    public static void main(String[] args) throws Exception{
//        Job job = Job.getInstance();
//        job.setJobName("a");
//        job.setJarByClass(alltest.class);
//
//
//        job.setMapperClass(combineMap.class);
//        job.setMapOutputKeyClass(Text.class);
//        job.setMapOutputValueClass(IntWritable.class);
//
//        //指定预聚合
//        job.setCombinerClass(combine.class);
//
//        job.setReducerClass(combineReduce.class);
//        job.setOutputKeyClass(Text.class);
//        job.setOutputValueClass(IntWritable.class);
//
//        FileInputFormat.addInputPath(job,new Path("/data/students.txt"));
//        Path path = new Path("/data/a");
//        FileSystem fs = FileSystem.get(new Configuration());
//        if (fs.exists(path)){
//            fs.delete(path,true);
//        }
//        FileOutputFormat.setOutputPath(job,path);
//
//        job.waitForCompletion(true);
//    }



    //关联学生表的信息和成绩表的成绩
//    public static class joinMap extends Mapper<LongWritable,Text,Text,Text>{
//        @Override
//        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//            FileSplit fs = (FileSplit) context.getInputSplit();
//            String name = fs.getPath().getName();
//            if (name.contains("students.txt")){
//                String id = value.toString().split(",")[0];
//                String line ="stu"+value.toString();
//                context.write(new Text(id),new Text(line));
//            }else{
//                String id = value.toString().split(",")[0];
//                String line = "sco"+value.toString();
//                context.write(new Text(id),new Text(line));
//            }
//        }
//    }
//    public static class joinReduce extends Reducer<Text,Text,Text,NullWritable>{
//        @Override
//        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
//            String students="";
//            ArrayList<String> scores = new ArrayList<String>();
//            if (values.toString().startsWith("stu")){
//                students=values.toString().substring(3);
//            }else{
//                String score = values.toString().substring(19);
//                scores.add(score);
//            }
//            for (String score : scores) {
//                students=students+score;
//            }
//            context.write(new Text(students),NullWritable.get());
//        }
//    }
//
//    public static void main(String[] args) throws Exception{
//        Job job = Job.getInstance();
//        job.setJobName("a");
//        job.setJarByClass(alltest.class);
//
//        job.setMapperClass(joinMap.class);
//        job.setMapOutputKeyClass(Text.class);
//        job.setMapOutputValueClass(Text.class);
//
//        job.setReducerClass(joinReduce.class);
//        job.setMapOutputKeyClass(Text.class);
//        job.setOutputValueClass(NullWritable.class);
//
//        FileInputFormat.addInputPath(job,new Path("/data/students.txt"));
//        FileInputFormat.addInputPath(job,new Path("/data/score.txt"));
//
//        Path path = new Path("/data/alltest");
//        FileSystem fs = FileSystem.get(new Configuration());
//        if (fs.exists(path)){
//            fs.delete(path,true);
//        }
//        FileOutputFormat.setOutputPath(job,path);
//
//        job.waitForCompletion(true);
//    }


    //通过Map端直接处理数据(大表关联小表)
    public static class MapJoin extends Mapper<LongWritable,Text,Text,NullWritable>{
        //创建HashMap存储学生信息 id作为k,每行数据作为v
        HashMap<String,String> students=new HashMap<String,String>();
        //大表关联小表时,由于小表比较小,可以直接加载到内存
        // 在map执行之前执行
        // 每个map task都会执行一次
        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            //获取文件管理系统
            FileSystem fs = FileSystem.get(new Configuration());
            //打开小表
            FSDataInputStream open = fs.open(new Path("/data/students.txt"));
            //通过缓冲流读取数据
            BufferedReader br = new BufferedReader(new InputStreamReader(open));
            String line;
            while ((line=br.readLine())!=null){
                String id = line.split(",")[0];
                students.put(id,line);
            }
        }

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String score = value.toString().substring(19);
            String id = value.toString().split(",")[0];
            //以score中id获取students之中数据
            String student = students.get(id);
            String line=student+score;
            context.write(new Text(line),NullWritable.get());
        }
    }

    public static void main(String[] args) throws Exception{
        Job job = Job.getInstance();
        job.setJobName("mapComdine");
        job.setJarByClass(alltest.class);

        //指定reduce dask为0  即reduce不参与
        job.setNumReduceTasks(0);

        job.setMapperClass(MapJoin.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(NullWritable.class);

        FileInputFormat.addInputPath(job,new Path("/data/score.txt"));

        Path path = new Path("/data/alltest");
        FileSystem fs = FileSystem.get(new Configuration());
        if (fs.exists(path)){
            fs.delete(path,true);
        }
        FileOutputFormat.setOutputPath(job,path);

        job.waitForCompletion(true);
    }
}
