package main.java.sales;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * SalesMain
 *
 * @author zhangyimin
 * @version 1.0
 * @date 2018-11-07 上午10:08
 */
public class SalesMain {

    public static void main(String[] args) throws  Exception {
        Job job = Job.getInstance(new Configuration());
        job.setJarByClass(SalesMain.class);

        //2??????????Mapper???????????
        job.setMapperClass(SalesMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(DoubleWritable.class);

        //3??????????Reducer???????????
        job.setReducerClass(SalesReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(DoubleWritable.class);

        //4???????????????
//		FileInputFormat.setInputPaths(job, new Path(args[0]));
//		FileOutputFormat.setOutputPath(job, new Path(args[1]));

        FileInputFormat.setInputPaths(job, new Path("hdfs://10.16.7.36:9000/data/input/sales/sales.txt"));
        FileOutputFormat.setOutputPath(job, new Path("hdfs://10.16.7.36:9000/data/output/sales/out_" + Math.random()*1000));


        //5?????
        job.waitForCompletion(true);
    }
}
