package cn.lsh.main;

import cn.lsh.conts.MyCounter;
import cn.lsh.mapper.PageRankMapper;
import cn.lsh.reducer.PageRankReducer;
import cn.lsh.writable.NetPage;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.BufferedReader;
import java.io.InputStreamReader;

/**
 * 网站pr计算公式：pr = (1-d)/n + d*sum(tr)
 * d：阻尼系数，为0.85
 * n：网站总数，这里即是记录条数
 * tr：网站得到其他网站的投票权值
 */
public class PageRankDriver {
	/**页面pr值差值范围，用于判断是否停止迭代*/
	private static final double LIMIT = 0.001;
	private static final double D = 0.85;

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration(true);
		//是否跨平台，如客户端在windows提交，需要设置成true做兼容，默认false
		conf.set("mapreduce.app-submission.cross-platform", "true");
		//配置程序运行平台，本地运行配置成local，分布式配置为yarn，分布式运行程序必须打成jar包
		conf.set("mapreduce.framework.name", "local");

		//最原始的网站数据
		String inputFile = "/test/pagerank/input/page_data.txt";
		FileSystem fs = FileSystem.get(conf);
		//设置总到网站数
		conf.setInt("pageNum", readFileLineNum(fs, inputFile));
		//设置阻尼系数
		conf.setDouble("zuniNumber", D);

		int i = 0;
		while (true) {
			i++;
			try {
				//设置一个自定义配置，在mapreduce程序中读取，相当于传入一个入参
				conf.setInt("runCount", i);

				//输入格式化类的分隔符，初始化文件为空格，后续结果文件为制表符
				String spiltStr = " ";
				if (i != 1) {
					inputFile = "/test/pagerank/output" + (i - 1) + "/part-r-00000";
					spiltStr = "\t";
				}
				Path input = new Path(inputFile);
				if (!fs.exists(input)) {
					System.out.println("文件：" + inputFile + "不存在");
					break;
				}

				//设置输入格式化类KeyValueTextInputFormat的分隔符，只分隔第一个，默认为/t
				conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator", spiltStr);

				Job job = Job.getInstance(conf);
				job.setJobName("pagerank-" + i);
				job.setJarByClass(PageRankDriver.class);
				//如果要本地提交分布式运行，不仅上面要配置yarn，这里还需要配置打好的jar包的全路径
				// job.setJar("/a/c/d.jar");

				//设置输入格式化类，KeyValueTextInputFormat会将一行按照制表符分割，可通过参数设置分隔符
				job.setInputFormatClass(KeyValueTextInputFormat.class);

				job.setMapperClass(PageRankMapper.class);
				job.setMapOutputKeyClass(Text.class);
				job.setMapOutputValueClass(NetPage.class);

				job.setReducerClass(PageRankReducer.class);

				FileInputFormat.addInputPath(job, input);
				Path output = new Path("/test/pagerank/output" + i);
				FileOutputFormat.setOutputPath(job, output);

				boolean flag = job.waitForCompletion(true);
				if (flag) {
					System.out.println("success.");
					//从作业中取出计数器里的值，这个为每个页面的pr值的差值的总和
					long sum = job.getCounters().findCounter(MyCounter.MY).getValue();
					System.out.println("pr差值总和：" + sum);
					double avgd = sum / 4000.0;
					if (avgd < LIMIT) {
						break;
					}
				}
			} catch (Exception e) {
				e.printStackTrace();
			}
		}
	}

	/**
	 * 读取文件有多少行
	 *
	 * @param fs
	 * @param file
	 * @return
	 * @throws Exception
	 */
	public static int readFileLineNum(FileSystem fs, String file) throws Exception {
		FSDataInputStream fsDataInputStream = fs.open(new Path(file));
		InputStreamReader s = new InputStreamReader(fsDataInputStream);
		BufferedReader reader = new BufferedReader(s);
		int i = 0;
		while (reader.ready()) {
			i++;
			reader.readLine();
		}
		return i;
	}
}
