package com.nd.mr.wordcount.homework;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.StringTokenizer;

public class WriteSQL {
    public static class StaMapper extends Mapper<LongWritable, Text, statistics, NullWritable>{
        private statistics k = new statistics();

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = new String(value.getBytes(),0,value.getLength(),"GBK");
            String[] fields = line.split(",", -1);

            k.setId();
            k.setDate(fields[0]);
            k.setRegion(fields[1]);
            k.setLocation(fields[2]);
            k.setIncrease(fields[3]);
            k.setCured(fields[4]);
            k.setDeath(fields[5]);
            System.out.println(k);
            context.write(k,NullWritable.get());
        }
    }

    public static class StaReducer extends Reducer<statistics, NullWritable, statistics, NullWritable>{
        @Override
        protected void reduce(statistics key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
            context.write(key,NullWritable.get());
        }
    }

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);

        DBConfiguration.configureDB(conf,"com.mysql.jdbc.Driver",
                "jdbc:mysql://localhost:3306/mr?characterEncoding=utf-8",
                "root","root");


        job.setJarByClass(WriteSQL.class);

        job.setMapperClass(StaMapper.class);
        job.setReducerClass(StaReducer.class);

        job.setOutputKeyClass(statistics.class);
        job.setOutputValueClass(NullWritable.class);

        job.setMapOutputKeyClass(statistics.class);
        job.setMapOutputValueClass(NullWritable.class);

        FileInputFormat.setInputPaths(job,new Path("D:\\training\\data\\input\\data.csv"));
        FileOutputFormat.setOutputPath(job,new Path("D:\\training\\data\\output\\yq"));
        String[] fields={"Id","date","region","location","increase","cured","death"};
        boolean result = job.waitForCompletion(true);
        System.exit(result?0:1);


        //DBOutputFormat.setOutput(job,"tb_YiqingSta",fields);
        //boolean result = job.waitForCompletion(true);
        //System.exit(result?0:1);


    }
}

