package com.ipinyou.mr2hbase;

import java.text.SimpleDateFormat;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class m2hbase extends Configured implements Tool {

	public static void main(String[] args) throws Exception {

		int res = ToolRunner.run(new m2hbase(), args);

	}

	static class BatchImportReducer extends TableReducer<Text, Text, NullWritable> {

		@Override
		protected void reduce(Text key, java.lang.Iterable<Text> values, Context context)
				throws java.io.IOException, InterruptedException {
			for (Text text : values) {
				String[] splited = text.toString().split("\t");

				Put put = new Put(Bytes.toBytes(key.toString()));
				put.add(Bytes.toBytes("f1"), Bytes.toBytes("w"), Bytes.toBytes(splited[0]));
				context.write(NullWritable.get(), put);
			}
		};
	}

	static class BatchImportMapper extends Mapper<LongWritable, Text, Text, Text> {
		SimpleDateFormat dateformat1 = new SimpleDateFormat("yyyyMMddHHmmss");
		Text v1 = new Text();
		Text v2 = new Text();

		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws java.io.IOException, InterruptedException {
			String[] splited = value.toString().split("\t");
			try {

				// 对pyid hash, 取模
				String pyid = splited[0];
				int py_hash = pyid.hashCode();
				int py_partition = py_hash % 10;
				String py_par = String.format("%02d", py_partition);

				v1.set(py_par + "|" + splited[1] + "|" + splited[0]);
				v2.set(splited[3]);

				context.write(v1, v2);
			} catch (NumberFormatException e) {
				Counter counter = context.getCounter("BatchImport", "ErrorFormat");
				counter.increment(1L);
				System.out.println("出错了" + splited[0] + " " + e.getMessage());
			}
		};
	}

	@Override
	public int run(String[] arg0) throws Exception {

		String input = arg0[0];

		Configuration configuration = getConf();

		configuration.set("mapreduce.job.queuename", "normal");
		configuration.set("mapreduce.job.reduces", "50");
		// 设置hbase表名称
		configuration.set(TableOutputFormat.OUTPUT_TABLE, "mprofile");
		// 将该值改大，防止hbase超时退出
		configuration.set("dfs.socket.timeout", "180000");

		Job job = Job.getInstance(configuration);
		TableMapReduceUtil.addDependencyJars(job);
		job.setJarByClass(m2hbase.class);

		job.setMapperClass(BatchImportMapper.class);
		job.setReducerClass(BatchImportReducer.class);
		// 设置map的输出，不设置reduce的输出类型
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);

		job.setInputFormatClass(TextInputFormat.class);
		// 不再设置输出路径，而是设置输出格式类型
		job.setOutputFormatClass(TableOutputFormat.class);

		// FileInputFormat.setInputPaths(job,
		// "/production/userprofile/merge_all/2016/08/31/app_profile/part-r-00000");
		FileInputFormat.setInputPaths(job, input);

		job.waitForCompletion(true);
		return 0;
	}

}
