package com.mapreduce;

import com.bean.People;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;

public class WriteDBMapReduce extends Configured implements Tool {
    // K1       V1
    // TextInputFormat->
    // 0        3,wangwu,3000
    //  K2                  v2
    //  NullWritable        People
    // K3                               v3
    // new People(3,wangwu,3000)    NullWritable
    static class WriteDBMapper extends
            Mapper<LongWritable, Text, NullWritable, People>{
        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, NullWritable, People>.Context context) throws IOException, InterruptedException {
            // K1:key       V1:value
            // TextInputFormat->
            // 0        3,wangwu,3000
            //  K2                  v2
            //  NullWritable        People
            String[] datas = value.toString().split(",");
//           // datas:["3","wangwu","3000"]
            int id = Integer.parseInt(datas[0]);
            String name = datas[1];
            int age = Integer.parseInt(datas[2]);
            // 构造对象，通过对象才能写入到数据库中
            People people = new People(id,name,age);
            context.write(NullWritable.get(),people);
        }
    }

    static class WriteDBReducer extends
            Reducer<NullWritable, People, People, NullWritable>{
        @Override
        protected void reduce(NullWritable key, Iterable<People> values, Reducer<NullWritable, People, People, NullWritable>.Context context) throws IOException, InterruptedException {
            for (People val:values){
                context.write(val,NullWritable.get());
            }
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        Path input = new Path(
                "hdfs://192.168.10.11:9000/dbdata");
        DBConfiguration.configureDB(conf,
                "com.mysql.cj.jdbc.Driver",
                "jdbc:mysql://192.168.10.11:3306/briup",
                "root",
                "root");

        Job job = Job.getInstance(conf);
        job.setJobName("writedb");
        job.setJarByClass(this.getClass());

        job.setMapperClass(WriteDBMapper.class);
        job.setMapOutputKeyClass(NullWritable.class);
        job.setMapOutputValueClass(People.class);

        job.setReducerClass(WriteDBReducer.class);
        job.setOutputKeyClass(People.class);
        job.setOutputValueClass(NullWritable.class);

        TextInputFormat.addInputPath(job,input);

        job.setOutputFormatClass(DBOutputFormat.class);
        DBOutputFormat.setOutput(job,"people",
                "id","name","age");
        return job.waitForCompletion(true)?0:-1;
    }

    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(new WriteDBMapReduce(),args));
    }
}
