package demo;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class DataProcessing extends Configured implements Tool {

    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getMyConfiguration();
        conf.set("SPLITTER", args[2]);
        Job job = Job.getInstance(conf, "dataprocessing");

        job.setJarByClass(DataProcessing.class);

        job.setMapperClass(DataProcessingMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(NullWritable.class);
        job.setNumReduceTasks(0);

        FileInputFormat.addInputPath(job, new Path(args[0]));

        FileSystem.get(conf).delete(new Path(args[1]), true);
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        return job.waitForCompletion(true) ? -1 : 1;
    }

    public static void main(String[] args) {
        String[] myArgs = {
                "/movie/gender_genre/part-r-00000",
                "/movie/processing_out",
                ","
        };
        try {
            ToolRunner.run(getMyConfiguration(), new DataProcessing(), myArgs);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static Configuration getMyConfiguration() {
        //声明配置
        Configuration conf = new Configuration();
        //通过设置 fs.hdfs.impl.disable.cache=true禁用缓存，确保每次 FileSystem.get()返回新实例
        conf.setBoolean("fs.hdfs.impl.disable.cache", true);

        conf.setBoolean("mapreduce.app-submission.cross-platform", true);
        conf.set("dfs.client.socket-timeout", String.valueOf(10 * 60 * 1000)); // 设置超时为60秒
        conf.set("fs.defaultFS", "hdfs://192.40.10.130:9000");

        //**mapred-site.xml**
        //在windows开发环境上面测试，不能配置此项参数
//        conf.set("mapreduce.framework.name","yarn"); // 指定使用yarn框架 local

//        conf.set("mapreduce.job.jar", JarUtil.jar(DataProcessing.class));
        return conf;
    }
}
