package org.weishe.pagerank.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class PageRankHbase {

	/**
	 * 计算总条数，将Hbase中的数据导入到HDFS中
	 * 
	 * @throws Exception
	 */
	private static long preRun() throws Exception {
		String outputPath = "hdfs://192.168.15.11:9000/pagerank2/output0";
		Configuration config = HBaseConfiguration.create();
		config.set("mapred.jar", "/Users/bill/Desktop/peoplerank.jar");

		String zk_list = "hadoop,hadoop11,hadoop12";
		config.set("hbase.zookeeper.quorum", zk_list);

		Job job = new Job(config, "PageRankHbase pre run");
		job.setJarByClass(PageRankHbase.class);
		Scan scan = new Scan();
		scan.setCaching(500); // 1 is the default in Scan, which will be bad for

		scan.setCacheBlocks(false); // don't set to true for MR jobs
		TableMapReduceUtil.initTableMapperJob("t_people", scan, PreMapper.class, Text.class, Text.class, job);
		job.setReducerClass(PreReducer.class);

		Path op = new Path(outputPath);

		FileOutputFormat.setOutputPath(job, op);

		boolean f = job.waitForCompletion(true);
		if (f) {
			System.out.println("pre job 成功执行");
		}

		Counters l = job.getCounters();
		CounterGroup cg = l.getGroup("PeopleRank");
		Counter c = cg.findCounter("sum");
		return c.getValue();
	}

	/**
	 * 迭代计算特征向量
	 * 
	 * @param step
	 *            当前迭代次数
	 * @param count
	 *            所有数据总条数
	 * @return 有多少条已满足迭代要求
	 * @throws Exception
	 */
	private static long run(int step, long count) throws Exception {

		String inputPath = "hdfs://192.168.15.11:9000/pagerank2/output" + (step);
		String outputPath = "hdfs://192.168.15.11:9000/pagerank2/output" + (step + 1);

		// 1、配置文件
		Configuration conf = new Configuration();
		conf.set("mapred.jar", "/Users/bill/Desktop/peoplerank.jar");
		conf.set("sumcount", count + "");
		Job job = Job.getInstance(conf);
		job.setJarByClass(PageRankHbase.class);
		// 2、设置Job

		job.setJobName("PeopleRank step:" + step);

		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);

		job.setMapperClass(PRMapper.class);
		job.setReducerClass(PRReducer.class);

		job.setInputFormatClass(KeyValueTextInputFormat.class);

		FileInputFormat.setInputPaths(job, new Path[] { new Path(inputPath) });
		Path op = new Path(outputPath);

		FileOutputFormat.setOutputPath(job, op);

		boolean f = job.waitForCompletion(true);

		Counters l = job.getCounters();
		CounterGroup cg = l.getGroup("PeopleRank");
		Counter c = cg.findCounter("finish");
		long v = c.getValue();
		if (f) {
			System.out.println("step:" + step + " job 成功执行  Counter:" + v);
		}
		return v;
	}

	/**
	 * 将完成计算的数据导入到Hbase的表中
	 * 
	 * @param inputPath
	 *            最后一次迭代完成的输出目录
	 * @throws Exception
	 */
	private static void moveDataIntoHbase(String inputPath) throws Exception {

		Configuration config = HBaseConfiguration.create();

		String zk_list = "hadoop,hadoop11,hadoop12";
		config.set("hbase.zookeeper.quorum", zk_list);

		config.set("mapred.jar", "/Users/bill/Desktop/peoplerank.jar");

		Job job = new Job(config, "PageRankHbase moveDataIntoHbase");
		job.setJarByClass(PageRankHbase.class);

		job.setMapOutputKeyClass(DoubleWritable.class);
		job.setMapOutputValueClass(Text.class);
		job.setMapperClass(EndMapper.class);

		TableMapReduceUtil.initTableReducerJob("t_people_rank", // output table
				EndReducer.class, // reducer class
				job);

		job.setInputFormatClass(KeyValueTextInputFormat.class);

		FileInputFormat.setInputPaths(job, new Path[] { new Path(inputPath) });
		boolean f = job.waitForCompletion(true);
		if (f) {
			System.out.println("end   job 成功执行 ");
		}
	}

	public static void main(String[] args) throws Exception {
		// 将hbase中的数据按照格式要求导出到hdfs中来，并完成总数计算
		long count = preRun();

		System.out.println("count:" + count);

		int step = 0;
		for (int i = 0; i < 300; i++) {
			long v = run(i, count);

			// 判断一次迭代完成之后是否全部达到要求
			if (v == count) {
				step = i;
				break;
			}
		}
		String inputPath = "hdfs://192.168.15.11:9000/pagerank2/output" + (step + 1);

		/**
		 * 将最后迭代完成的数据导入hbase
		 */
		moveDataIntoHbase(inputPath);

	}
}
