package com;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.NavigableMap;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

public class PageRank {

	private static HashMap<String, Float> map = new HashMap<String, Float>();
	private static boolean next = true;
	private static boolean first = true;
	private static Configuration config = null;
	/**
	 * 初始化配置
	 */
	static {
		config = HBaseConfiguration.create();
	}

	public static void main(String[] args) {

		try {
			for (int i = 0; first || (!first && next); i++) {
				next = false;
				String tableName=i==0?"crawldb_webpage": i  + "crawldb_webpage";
				String[] familys = { "ol", "score" };
				createTable((i + 1) + "crawldb_webpage", familys);
				Job job = Job.getInstance(config, "pageRank");
				job.setJarByClass(PageRank.class); // class that contains mapper
													// and
													// reducer

				Scan scan = new Scan();
				scan.setCaching(500); // 1 is the default in Scan, which will be
										// bad
										// for MapReduce jobs
				scan.setCacheBlocks(false); // don't set to true for MR jobs
				scan.setBatch(1000);
				// set other scan attrs
				// scan.addColumn(family, qualifier);
				TableMapReduceUtil.initTableMapperJob(tableName, // input
																				// table
						scan, // Scan instance to control CF and attribute
								// selection
						MyMapper.class, // mapper class
						Text.class, // mapper output key
						Text.class, // mapper output value
						job);
				TableMapReduceUtil.initTableReducerJob((i + 1)
						+ "crawldb_webpage", // output
						// table
						MyTableReducer.class, // reducer class
						job);
				job.setNumReduceTasks(1); // at least one, adjust as required

				boolean b = job.waitForCompletion(true);
				if (!b) {
					// throw new IOException("error with job!");
				}

			}
		} catch (Exception e) {
			e.printStackTrace();
		}
		// System.out.println(map);
		// System.out.println("==========END========");
	}

	public static void createTable(String tableName, String familys[])
			throws Exception {

		HBaseAdmin admin = new HBaseAdmin(config); // 客户端管理工具类
		if (admin.tableExists(tableName)) {
			admin.disableTable(tableName);
			admin.deleteTable(tableName);
			System.out.println("delete table " + tableName + " ok.");
		}
		HTableDescriptor tableDesc = new HTableDescriptor(
				TableName.valueOf(tableName));
		for (int j = 0; j < familys.length; j++) {
			tableDesc.addFamily(new HColumnDescriptor(familys[j]));
		}
		admin.createTable(tableDesc);
		admin.close();
		System.out.println("create table " + tableName + " ok.");
	}

	public static class MyMapper extends TableMapper<Text, Text> {

		private int olCount = 0;
		private float pr = 1;
		private float average_pr = 0;

		public void map(ImmutableBytesWritable row, Result value,
				Context context) throws IOException, InterruptedException {
			NavigableMap<byte[], byte[]> ol = value.getFamilyMap(Bytes
					.toBytes("ol"));
			olCount = ol.entrySet().size();
			if (olCount == 0)
				return;
			NavigableMap<byte[], byte[]> score = value.getFamilyMap(Bytes
					.toBytes("score"));
			for (Entry<byte[], byte[]> i : score.entrySet()) {
				// System.out.println("new String(i.getValue())"+new
				// String(i.getValue()));
				pr = Float.parseFloat(new String(i.getValue()));
				// System.out.println(pr);
			}
			average_pr = pr / olCount;
			String url = Bytes.toString(row.get());
			if (url.indexOf("http") != 0) {
				String hostZA = url.substring(0, url.indexOf(":"));
				url = url.substring(url.indexOf(":") + 1);
				int end = url.lastIndexOf("/") == url.length() - 1 ? url
						.length() - 1 : url.length();
				String path = url.substring(url.indexOf("/"), end);
				url = url.substring(0, url.indexOf("/")) + "://";
				String[] hosts = hostZA.split("\\.");
				for (int i = hosts.length - 1; i > 0; i--) {
					url += (hosts[i] + ".");
				}
				url += (hosts[0] + path);
			}
			for (Entry<byte[], byte[]> i : ol.entrySet()) {
				String url2 = new String(i.getKey());
				int end = url2.lastIndexOf("/") == url2.length() - 1 ? url2
						.length() - 1 : url2.length();
				url2 = url2.substring(0, end);
				context.write(new Text(url), new Text(url2));
				context.write(new Text(url2),
						new Text(String.valueOf(average_pr)));
				System.out.println(url2+":"+average_pr);
			}
		}
	}

	public static class MyTableReducer extends
			TableReducer<Text, Text, ImmutableBytesWritable> {
		public void reduce(Text key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			float sum = 0;
			Put put = new Put(Bytes.toBytes(key.toString()));
			for (Text val : values) {
				String dw = val.toString();
				if (!dw.contains("/")) {
					// 转换成double进行相加
					sum += Float.parseFloat(dw);
				} else {
					put.add(Bytes.toBytes("ol"), Bytes.toBytes(dw),
							Bytes.toBytes(0));
				}
			}
			sum = sum * 0.85f + 0.15f;
			put.add(Bytes.toBytes("score"), Bytes.toBytes(0),
					Bytes.toBytes(Float.toString(sum)));
			context.write(null, put);

			// System.out.println("map.get(key)");
			// System.out.println(map.get(key.toString()));
			if (map.get(key.toString()) != null) {
				first = false;
				if (Math.abs(Float.parseFloat(map.get(key.toString())
						.toString()) - sum) > 10) {
					next = true;
					System.out.println("go on!");
				}
			}
			map.put(key.toString(), sum);

		}
	}
}