package PageRankTest;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class DriTest {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        int runCount = 0;
        while (true) {
            runCount++;
            conf.setInt("runCount", runCount);
            Job job = Job.getInstance(conf);
            job.setJarByClass(DriTest.class);
            job.setMapperClass(MapTest.class);
            job.setReducerClass(RedTest.class);
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(Text.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);
            /** 设置采用KeyValueTextInputFormat来读取数据封装kv对送到mapper的map方法会根据tab分割数据,把分割后的第一个数据当做key,其它当做value*/
            job.setInputFormatClass(KeyValueTextInputFormat.class);
            Path path = null;
            Path out = null;
            if (runCount == 1) {
                //第一次运行
                path = new Path("D:\\MP\\PageRank\\input");
            } else {
                path = new Path("D:\\MP\\PageRank\\output" + (runCount - 1));
            }
            FileInputFormat.setInputPaths(job, path);
            out = new Path("D:\\MP\\PageRank\\output" + runCount);
            if (out.getFileSystem(conf).exists(out)) {
                out.getFileSystem(conf).delete(out, true);
            }
            FileOutputFormat.setOutputPath(job, out);
            boolean b = job.waitForCompletion(true);
            if (b) {
                if (Count.count >= 4) {
                    break;
                }
            }
        }
    }
}
