package cn.itcast.mapreduce.db.read;

import cn.itcast.mapreduce.db.bean.GoodsBean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


public class ReadDBDriver {

    public static void main(String[] args) throws Exception {
        //配置文件对象
        Configuration conf = new Configuration();

        //todo 配置当前作业需要使用的JDBC信息
        DBConfiguration.configureDB(
                conf, "com.mysql.jdbc.Driver",
                "jdbc:mysql://localhost:3306/mr",
                "root",
                "root"
        );

        //创建作业的job类实例
        Job job = Job.getInstance(conf, ReadDBDriver.class.getSimpleName());

        //设置本次mr程序的驱动类
        job.setJarByClass(ReadDBDriver.class);

        //设置mapper类
        job.setMapperClass(ReadDBMapper.class);

        //设置程序最终数输出的key value类型
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(Text.class);

        FileOutputFormat.setOutputPath(job,new Path("C://mysqlout"));

        //todo 本需求不需要reduce阶段，所以需要把reducetask的个数设置为0
        job.setNumReduceTasks(0);

        //todo 设置输入组件
        job.setInputFormatClass(DBInputFormat.class);

        //todo 添加读取数据库相关参数
        DBInputFormat.setInput(
                job,
                GoodsBean.class,
                "SELECT goodsId,goodsSn,goodsName,marketPrice,shopPrice,saleNum FROM itheima_goods ",
                "SELECT count(goodsId) FROM itheima_goods"
        );

        boolean b = job.waitForCompletion(true);
        System.exit(b ? 0 : 1);
    }

}
