package main;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;

import net.sf.json.JSONArray;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class MCityCateSearchWordTop50_V2 {
/**
  `stat_date` date DEFAULT NULL COMMENT '日期',
  `word` text COMMENT '搜索词',
  `city_list_name` varchar(20) DEFAULT NULL COMMENT '地域英文名称',
  `cate_list_name` varchar(20) DEFAULT NULL COMMENT '类别英文名称',
  `disp_city_id` int(10) DEFAULT NULL,
  `disp_cate_id` int(10) DEFAULT NULL,
  `disp_city1_name` varchar(50) DEFAULT NULL COMMENT '展现一级城市',
  `disp_city2_name` varchar(50) DEFAULT NULL COMMENT '展现二级城市',
  `disp_city3_name` varchar(50) DEFAULT NULL COMMENT '展现三级城市',
  `disp_city4_name` varchar(50) DEFAULT NULL COMMENT '展现四级城市',
  `disp_cate1_name` varchar(50) DEFAULT NULL COMMENT '展现一级类别',
  `disp_cate2_name` varchar(50) DEFAULT NULL COMMENT '展现二级类别',
  `disp_cate3_name` varchar(50) DEFAULT NULL COMMENT '展现三级类别',
  `disp_cate4_name` varchar(50) DEFAULT NULL COMMENT '展现四级类别',
  `disp_cate5_name` varchar(50) DEFAULT NULL COMMENT '展现五级类别',
  `page_num` varchar(10) DEFAULT NULL COMMENT '页码',
  `search_count` int(10) DEFAULT '0' COMMENT '搜索次数'	
 * @author zhaoxiang
 */
	//Mapper
	public static class MyMapper extends Mapper<LongWritable, Text, Text, IntWritable>
	{
		IntWritable counter = new IntWritable();
		public  Map<String,String> businessInfo=null;
		
		protected void setup(Context context) throws IOException,InterruptedException 
		{
			try
			{
				String statDate = context.getConfiguration().get("statDate");
				businessInfo = OpHDFSTools.getMapFormHDFSFileForChanel("/dsap/resultdata/ecdataconfig/DispBusinessTypeInfo/"+ statDate.replace("-", "") + "/part-r-00000",	"\t");
			} 
			catch (Exception e) 
			{
			}
		}
		
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException 
		{
			String line=value.toString();
			try 
			{
				JSONArray recod = JSONArray.fromObject(line.split("\\t"));
				
				if(recod.size() < 17){
					return;
				}
				
				//日期，业务线，一级城市，二级类，搜索词，次数
				String stat_date = recod.getString(0);
				String word = recod.getString(1);
				String cateId = recod.getString(5);
				String business_name = businessInfo.get(cateId);
				String disp_city1_name = recod.getString(6);
				String disp_cate1_name = recod.getString(10);
				String disp_cate2_name = recod.getString(11);
				int search_count = recod.getInt(16);
				counter.set(search_count);
				
				//搜索词既要统计在城市类别里，又要统计在上级汇总里面
				String [] cityCateArr1={disp_city1_name,business_name,disp_cate1_name,disp_cate2_name,stat_date,word};
				String [] cityCateArr2={disp_city1_name,business_name,disp_cate1_name,"全部",stat_date,word};
				String [] cityCateArr3={disp_city1_name,business_name,"全部","全部",stat_date,word};
				String [] cityCateArr4={disp_city1_name,"全部","全部","全部",stat_date,word};
				
				String [] cityCateArr5={"全部","全部","全部","全部",stat_date,word};
				String [] cityCateArr6={"全部",business_name,"全部","全部",stat_date,word};
				String [] cityCateArr7={"全部",business_name,disp_cate1_name,"全部",stat_date,word};
				String [] cityCateArr8={"全部",business_name,disp_cate1_name,disp_cate2_name,stat_date,word};
				
				context.write(new Text(StringUtils.join(cityCateArr1, "_")), counter);
				context.write(new Text(StringUtils.join(cityCateArr2, "_")), counter);
				context.write(new Text(StringUtils.join(cityCateArr3, "_")), counter);
				context.write(new Text(StringUtils.join(cityCateArr4, "_")), counter);
				context.write(new Text(StringUtils.join(cityCateArr5, "_")), counter);
				context.write(new Text(StringUtils.join(cityCateArr6, "_")), counter);
				context.write(new Text(StringUtils.join(cityCateArr7, "_")), counter);
				context.write(new Text(StringUtils.join(cityCateArr8, "_")), counter);
			} 
			catch (Exception e) 
			{
				e.printStackTrace();
			}
		}
	}
	
	//Reducer
	public static class MyReducer extends Reducer<Text, IntWritable, Text, IntWritable> 
	{
		private IntWritable counter = new IntWritable();
		
		public void reduce(Text key, Iterable<IntWritable> values,Context context) throws IOException, InterruptedException 
		{	
			int searchCount=0;
			for (IntWritable v:values) 
			{
				try
				{
					searchCount = searchCount + v.get();
				}
				catch(Exception e)
				{
					e.printStackTrace();
					return;
				}
			}
			//少于5次的仍掉
			if(searchCount < 5)
				return;
			counter.set(searchCount);
			context.write(key, counter);
		}
	}
	
	
	//Mapreduce2  取top 50
	public static class MyMapper2 extends Mapper<Text, Text, Text, Text>
	{
		private Text k = new Text(); 
		private Text v = new Text(); 
		
		public void map(Text key,Text value, Context context) throws IOException, InterruptedException 
		{
			String line=key.toString();
			try 
			{
				int index = line.lastIndexOf("_");
				
				if(index < 0)
					return;
				String rowKey = line.substring(0,index);
				String word = line.substring(0,index+1);
				k.set(rowKey);
				v.set(word+":"+value);
				context.write(k, v);
			} 
			catch (Exception e) 
			{
				e.printStackTrace();
			}
		}
	}
	
	//Reducer2
	public static class MyReducer2 extends Reducer<Text, Text, ImmutableBytesWritable, KeyValue> 
	{
		class Pair implements Comparable<Pair>{
			private String word;
			private int count;
			public Pair(String word,int count){
				this.word=word;
				this.count=count;
			}
			public String getWord() {
				return word;
			}
			public void setWord(String word) {
				this.word = word;
			}
			public int getCount() {
				return count;
			}
			public void setCount(int count) {
				this.count = count;
			}
			
			@Override
			public int compareTo(Pair o) {
				return this.count -  o.count;
			}
			
		}
		
		public void add(List<Pair> topN,Pair p){
			if(topN.size() < 49)
				topN.add(p);
			else if(topN.size() == 49){
				topN.add(p);
				Collections.sort(topN);
			}else{
				topN.set(0, p);
				Collections.sort(topN);
			}
		}
		
		 
		/**
		  * @param key: disp_city1_name , business_name, disp_cate1_name, disp_cate2_name, stat_date
		  * @param values [word1:count1, word2:count2]
		*/
		@Override
		public void reduce(Text key, Iterable<Text> values,Context context) throws IOException, InterruptedException 
		{
			List<Pair> topN = new ArrayList<Pair>(100);
			for(Text t : values){
				try{
					String line=t.toString();  // word:count
					int index = line.lastIndexOf(":");
					String word = line.substring(0,index);
					String count = line.substring(0,index+1);
					Pair pair = new Pair(word, Integer.parseInt(count));
					add(topN,pair);
				}catch (Exception e){
					e.printStackTrace();
				}
			}
			//创建HBase中的RowKey
			byte[] rowKey=Bytes.toBytes(key.toString());
			ImmutableBytesWritable rowKeyWritable=new ImmutableBytesWritable(rowKey);
			for(Pair p : topN){
				//创建HBase中KeyValue对象,第一个参数为rowKey，第二个参数为列族，第三个参数为列，第四个参数为 具体的值
				byte[] family=Bytes.toBytes("cf");
				byte[] qualifier=Bytes.toBytes(p.getWord());
				byte[] hbaseValue=Bytes.toBytes(p.getCount());
				KeyValue keyValue=new KeyValue(rowKey, family, qualifier, hbaseValue);
				context.write(rowKeyWritable, keyValue);
			}
		}
	}
	
	public static int getArgsParamsNum(String[] args){
		if(args==null || args.length==0){
			return 0;
		}else{
			return args.length;
		}
	}
	
	@SuppressWarnings("deprecation")
	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException 
	{
		//job执行参数设置
		String JOB_NAME="MCityCateSearchWordTop50";
		int REDUCE_TASK_NUM=10;
		
		String inPath = "/dsap/resultdata/ysa/userdata/MSearchCountJob/yyyyMMdd" ;
		String OPATH_1=String.format("/dsap/resultdata/zhaoxiang/SearchWordJob/yyyyMMdd",JOB_NAME);
		String OPATH_2=String.format("/dsap/resultdata/zhaoxiang/SearchWordTopNJob/yyyyMMdd",JOB_NAME);
		
		List<String> dateList=null;
		// 字符串日期遍历
		Configuration configuration = new Configuration();
		String[] otherArgs = null;
		try
		{
			otherArgs = new GenericOptionsParser(configuration, args).getRemainingArgs();
		} 
		catch (IOException e) 
		{
			e.printStackTrace();
			return;
		}
		
		int argsNum=getArgsParamsNum(otherArgs);
		
		String yestoday=DateUtil.getYestoday("yyyyMMdd");
		if(argsNum==0){
			dateList=DateUtil.getDayList(yestoday, yestoday);
		}else if(argsNum==1){
			dateList=DateUtil.getDayList(otherArgs[0], otherArgs[0]);
		}else if(argsNum==2){
			dateList=DateUtil.getDayList(otherArgs[0], otherArgs[1]);
		}
		
		 int result=0;
//		 dateList=DateUtil.getDayList("20150428", "20150505");
		for (String statDate : dateList) 
		{
			System.out.println(statDate);
			try
			{
				configuration.set("statDate", statDate);
				Job job1 = new Job(configuration, JOB_NAME);
				job1.setJarByClass(MCityCateSearchWordTop50_V2.class);
				job1.setNumReduceTasks(REDUCE_TASK_NUM);
				job1.setMapperClass(MyMapper.class);
				job1.setReducerClass(MyReducer.class);
				job1.setMapOutputKeyClass(Text.class);
				job1.setMapOutputValueClass(IntWritable.class);
				job1.setOutputKeyClass(Text.class);
				job1.setOutputValueClass(IntWritable.class);
				
				FileSystem hdfs = FileSystem.get(configuration);
				if(hdfs.exists(new Path(OPATH_1.replace("yyyyMMdd", statDate))))
				{
					hdfs.delete(new Path(OPATH_1.replace("yyyyMMdd", statDate)), true);
				}
				FileInputFormat.addInputPath(job1,new Path(inPath.replace("yyyyMMdd", statDate)));
				
				FileOutputFormat.setOutputPath(job1, new Path(OPATH_1.replace("yyyyMMdd", statDate))); 
				System.out.println(job1.waitForCompletion(true) ? 0 : 1);
				
				 /**定义job2，通过bulk——load写入到hbase中**/
				Job job2=new Job(configuration, JOB_NAME+"_2"); 
		        job2.setJarByClass(MCityCateSearchWordTop50_V2.class);
		        job2.setMapperClass(MCityCateSearchWordTop50_V2.MyMapper2.class);
		        job2.setReducerClass(MCityCateSearchWordTop50_V2.MyReducer2.class);
		        job2.setInputFormatClass(KeyValueTextInputFormat.class);
		        job2.setMapOutputKeyClass(Text.class);
		        job2.setMapOutputValueClass(Text.class);
		        job2.setOutputKeyClass(ImmutableBytesWritable.class);
		        job2.setOutputValueClass(KeyValue.class);
		        
		        //以Job1的输出做为输入
		        FileInputFormat.addInputPath(job2, new Path(OPATH_1.replace("yyyyMMdd", statDate)));
		        FileOutputFormat.setOutputPath(job2, new Path(OPATH_2));
		        //创建HBase的配置对象
		        Configuration hbaseConfiguration=HBaseConfiguration.create();
		        //创建目标表对象
		        HTable wordCountTable =new HTable(hbaseConfiguration, "SearchWordCount");
		        HFileOutputFormat.configureIncrementalLoad(job2,wordCountTable);
		       
		        result=job2.waitForCompletion(true)?0:1;
		        
		        //job2结束之后，调用BulkLoad方式来将MR结果批量入库
//		        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(hbaseConfiguration);
//		        loader.doBulkLoad(new Path(OPATH_2), wordCountTable);

			} 
			catch (Exception e) 
			{
				e.printStackTrace();
				return;
			}
		}
        
        System.exit(result);
	}
	
}
