package com.gedi.data.InputFormate.DBInputFormate;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.db.DBConfiguration;
import org.apache.hadoop.mapred.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.IOException;


/**
 * @Author:RenPu
 * @Date: 2020/1/13 14:51
 * @Version: 1.0
 * @description:
 */
public class OrderCompution {

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {


        //创建配置类对象
        Configuration configuration = new Configuration();

        //设置数据的配置信息
        configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,"com.mysql.jdbc.Driver");
        configuration.set(DBConfiguration.URL_PROPERTY,"jdbc:mysql://localhost:3306/hadoop");
        configuration.set(DBConfiguration.USERNAME_PROPERTY,"root");
        configuration.set(DBConfiguration.PASSWORD_PROPERTY,"root");
        //初始化job对象以及设置任务名称
        Job job =  Job.getInstance(configuration, "order_task");

        job.setJarByClass(OrderCompution.class);

        //设置文件的输入格式以及输出格式
       job.setInputFormatClass(DBInputFormat.class);
       job.setOutputFormatClass(TextOutputFormat.class);

       //设置从数据库获取要展示的字段以及设置查询的表
       DBInputFormat.setInput(job,OrderWritable.class,"t_order",null,null,"order_id","total_money","create_time","user_id");
       TextOutputFormat.setOutputPath(job,new Path("file:///d:\\DBInput"));

       //设置mapper阶段和reduce阶段的输出的类对象的对象
        job.setMapperClass(OrderMapper.class);
        job.setReducerClass(OrderReduce.class);


        //设置输出k以及v的数据类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(DoubleWritable.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(DoubleWritable.class);

        job.waitForCompletion(true);








    }

}
