package com.bj58.data.hadoop.info.job;

import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.hsqldb.Result;

import com.bj58.data.hadoop.info.dimension.DimensionParser;
import com.bj58.data.hadoop.info.dimension.IDimension;
import com.bj58.data.hadoop.info.dimension.param;
import com.bj58.data.hadoop.info.entry.InfoEntry;
import com.bj58.data.hadoop.info.tools.InfoString;

public class ZP_InfoJobHDFS {

	public static final IntWritable ONE = new IntWritable(1);
	public static final Text textOne = new Text("1");
	private static final int REDUCE_TASK_NUM = 60;
	private static final String JOB_NAME = "ZP_InfoJob";
	public static final String ALL = "A";
	private static final String TABLE_NAME = "FourthInfo";
	private static final String FC_INFOCOUNT = "cf";
	private static final String INFOCOUNT = "InfoCount";
	private static final String USERCOUNT = "UserCount";
	
	public static class InfoMapper extends Mapper<LongWritable, Text, Text, Text> {
		public static List<List<IDimension>> allCareList = null;
		static
		{
			try {
				allCareList = DimensionParser.readConfig("stat_config.conf");
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			try
			{
				String[] info = InfoString.getInfoArray(value);
				InfoEntry infoEntry = new InfoEntry(info);
				for (List<IDimension> onePolicy : allCareList) {
					Set<String> paramKeys = null;
					
					String rowKey = "";
					for (IDimension dimOne : onePolicy) {
						if (!dimOne.inCareValues(infoEntry)) {
							rowKey = "";
							break;
						}
						String className = dimOne.getClass().getName();
						if(className.equals("com.bj58.data.hadoop.info.dimension.param"))
						{
							param paramOne = (param)dimOne;
							paramKeys = paramOne.getRowKeys(infoEntry);
						}
						rowKey += dimOne.getRowKey(infoEntry) + "_";
					}
					if (rowKey.length() > 1) {
						String currentDate = infoEntry.getInsertDate();
						rowKey += currentDate;
						if(paramKeys!=null)
						{
							for(String paramKey:paramKeys)
							{
								context.write(new Text("I"+rowKey.replace("{param}", paramKey)), textOne);
								context.write(new Text("U"+rowKey.replace("{param}", paramKey)),new Text(infoEntry.getUserID()));								
							}
						}
						else
						{
							context.write(new Text("I"+rowKey), textOne);
							context.write(new Text("U"+rowKey),new Text(infoEntry.getUserID()));	
						}
					}
				}
				
				String allRowKey = "A_"+infoEntry.getInsertDate();
				context.write(new Text("I"+allRowKey), textOne);
				context.write(new Text("U"+allRowKey),new Text(infoEntry.getUserID()));
			}
			catch (Exception e) {
				// TODO: handle exception
				e.printStackTrace();
			}
		}
	}
	
	public static class InfoReducer extends Reducer<Text, Text, NullWritable,Text> {
		public void reduce(Text key, Iterable<Text> values,Context context) throws IOException, InterruptedException {
			Iterator<Text> it = values.iterator();
			String stateType = key.toString().substring(0,1);
			
			String outPutKey = key.toString().substring(1,key.toString().length());
			if(stateType.equals("I"))
			{
				Integer count = 0;
				while (it.hasNext()) {
					it.next();
					count++;
				}
				context.write(NullWritable.get(), new Text(outPutKey+"_icount:"+count.toString()));
			}
			else if(stateType.equals("U"))
			{
				Set<String> userSet = new HashSet<String>();
				while (it.hasNext()) {
					userSet.add(it.next().toString());
				}
				context.write(NullWritable.get(), new Text(outPutKey+"_ucount:"+String.valueOf(userSet.size())));
			}
		}
	}
	
	
	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
		Configuration configuration = new Configuration();
		String[] otherArgs = new GenericOptionsParser(configuration, args).getRemainingArgs();
		
		System.out.println("===================配置信息开始=========================");
		
		configuration.setInt("mapred.linerecordreader.maxlength", 81920); // 
		configuration.setInt("io.sort.mb", 500);//
		configuration.setInt("io.sort.factor", 100);//
		configuration.setBoolean("mapred.reduce.tasks.speculative.execution", false);// 关闭reduce speculative 运行
		
		System.out.println("===================配置信息结束=========================");
		Job job = new Job(configuration, JOB_NAME);
		job.setJarByClass(ZP_InfoJobHDFS.class);
		job.setNumReduceTasks(REDUCE_TASK_NUM);
		job.setMapperClass(InfoMapper.class);
		job.setReducerClass(InfoReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Result.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);

		FileInputFormat.setMaxInputSplitSize(job, 16 *1024 * 1024);
		FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
		FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}
	
}
