package com.hdaccp.cn.card;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;


/**
 * 查找缺失的扑克牌
 * 
 * @author hduser
 * 
 */
public class SearchCard {
	private static class CardMapper extends
			Mapper<LongWritable, Text, Text, IntWritable> {
		
		@Override
        protected void map(LongWritable key, Text value, Context context)
                throws IOException, InterruptedException {
            StringTokenizer st = new StringTokenizer(value.toString());
            String card = st.nextToken() + "+" + st.nextToken();
            context.write(new Text(card), new IntWritable(1));
        }

	}

	private static class IntNumReducer extends
			TableReducer<Text, IntWritable, Text> {
		
		@Override
		protected void reduce(Text key, Iterable<IntWritable> values,Context context)
				throws IOException, InterruptedException {
			int count = 0;
			for (IntWritable n : values) {
				count += n.get();
			}
			// 写入表
			Put put = new Put(Bytes.toBytes(key.toString()));
			put.addColumn(Bytes.toBytes("result"), Bytes.toBytes("count"),
					Bytes.toBytes(count));
			context.write(key, put);
		}

	}
	
	static Configuration conf = HBaseConfiguration.create();
	// 创建表
	public static void create(String tableName, String... familyNames)
			throws IOException {
		Connection conn = ConnectionFactory.createConnection(conf);
		Admin admin = conn.getAdmin();
		// TableName:以POJO对象来封装表的名字
		TableName tn = TableName.valueOf(tableName);
		if (admin.tableExists(tn)) { // 如果存在先删除
			admin.disableTable(tn);// 先使表无效
			admin.deleteTable(tn);
		}
		// HTableDescriptor包含了表的名字及其对应的列族
		HTableDescriptor htd = new HTableDescriptor(tn);
		for (String family : familyNames)
			htd.addFamily(new HColumnDescriptor(family));
		admin.createTable(htd);
		conn.close();
		System.out.println("create success!");
	}
	/**
	 * 	 获取总数为99的行
	 */
	static void get(String tableName) throws IOException {
		Connection conn = ConnectionFactory.createConnection(conf);
		Table tb = conn.getTable(TableName.valueOf(tableName));
		Scan scan=new Scan();
		//增加过滤条件
		scan.setFilter(new SingleColumnValueFilter(
				Bytes.toBytes("result"),
				Bytes.toBytes("count"),
				CompareOp.EQUAL,
				Bytes.toBytes(99)));
		ResultScanner rs=tb.getScanner(scan);
		for(Result row :rs ){
			System.out.println(String.format(
					"result.value=%s,result.toString():%s",
					Bytes.toInt(row.value()), row));		
		}
		
		conn.close();
	}
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		
		String inputPath = "/input4/ch06";
		String tbname = "cards";
		create(tbname,"result");//创建表
		Job job = Job.getInstance(conf, "search card");
		// MapReduce程序作业基本配置
		job.setJarByClass(SearchCard.class);
		job.setNumReduceTasks(1);
		
		job.setMapperClass(CardMapper.class);

		//  必须单独设置Map输出键值类型,因为initTableReducerJob会调整输出类型.

		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(IntWritable.class);
		
		FileInputFormat.addInputPath(job, new Path(inputPath));


        TableMapReduceUtil.initTableReducerJob(tbname,IntNumReducer.class, job,null,null,null,null,false);
		//100幅扑克写入到cards表中
		if (job.waitForCompletion(true)) {
			// 输出
			get(tbname);
		}
	}
}
