//package com.cn.hadoop;
//
//import org.apache.hadoop.conf.Configuration;
//import org.apache.hadoop.fs.Path;
//import org.apache.hadoop.io.IntWritable;
//import org.apache.hadoop.io.Text;
//import org.apache.hadoop.mapreduce.Job;
//import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
//import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
//
//import java.io.IOException;
//
//public class CountWord {
//    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
//        Configuration conf = new Configuration(true);
//        Job job = Job.getInstance(conf);
//        conf.addResource("hdfs-site.xml");
//        conf.addResource("core-site.xml");
////        conf.set("mapreduce.app-submission.cross-platform", "true");
////        conf.set("mapreduce.framework.name", "local");
////        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
//        job.setJarByClass(CountWord.class);
//        job.setMapOutputKeyClass(Text.class);
//        job.setMapOutputValueClass(IntWritable.class);
//        job.setJobName("测试啊");
//        job.setMapperClass(MyMapper.class);
//        job.setReducerClass(MyReducer.class);
//        Path path = new Path("/user/hadooptest.txt");
//        Path path1 = new Path("/user/wordcount");
//        if (path1.getFileSystem(conf).exists(path1)) {
//            path1.getFileSystem(conf).delete(path1, true);
//        }
//        FileInputFormat.addInputPath(job, path);
//        FileOutputFormat.setOutputPath(job, path1);
//        job.waitForCompletion(true);
//    }
//}
