package hadoop.mr07;


import hadoop.unit.GlobalConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;



public class DbDriver {
    private static Configuration conf = GlobalConfiguration.getCon();

    // 运行任务
    public static void main(String[] args) throws Exception {
        // todo 配置本次任务使用的JDBC信息
        DBConfiguration.configureDB(
                conf,
                "com.mysql.jdbc.Driver",
                "jdbc:mysql://192.168.150.101:3306/use_hadoop?useSSL=false",
                "root",
                "123456"
        );
        Job job = Job.getInstance(conf, "DdRead_Job");

        job.setJarByClass(DbDriver.class);
        job.setMapperClass(DbMapper.class);
        // 读取数据库保存至本地，不用写reduce，但需要设置reduceTask个数为0
        job.setNumReduceTasks(0);

        // 此时，map的输出就是最终输出
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        // 设置文件读取格式组件
        job.setInputFormatClass(DBInputFormat.class);
        // todo 设置读取数据表的相关参数
        DBInputFormat.setInput(
                job,
                stuTable.class, // 实现了DBWritable方法的类
                "select * from stu_01", // sql查询语句
                "select count(*) from stu_01" // 表的数据行数
        );

        // 设置文件保存位置
        Path output_Path = new Path("file:///home/local/db_data");
        FileOutputFormat.setOutputPath(job, output_Path);

        int case_ = job.waitForCompletion(true) ? 0 : 2; // 0为正常退出,2为异常退出
        System.exit(case_);
    }
}

