package main;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
import org.apache.hadoop.hbase.mapreduce.KeyValueSortReducer;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;



/**
 * 分析搜索日志，对相同city1、cate1||city1、cate2||city1、cate3的搜索词进行合并统计
 * key为city1#cate1#searchword,value为搜索词的频率
 */
@SuppressWarnings("deprecation")
public class SearchWordDownloadJob {
	private static final int REDUCE_TASK_NUM = 10;
	private static final String JOB_NAME = "SearchWordDownloadJob";
	private static final String COLUMN_NAME = "currentTime";
	public static Map<String,String> cateListInfo=new HashMap<String,String>();
	public static Map<String,String> cateInfo=new HashMap<String,String>();
	public static Map<String,String> chanelinfo=new HashMap<String,String>();
	public static Map<String,String> uniqtype=new HashMap<String,String>();
	public static Map<String,String> uniqtype1=new HashMap<String,String>();
	public static Map<String,String> uniqtype2=new HashMap<String,String>();
	public static Map<String,String> uniqcity=new HashMap<String,String>();
	
	static {
		try {
			String statDate=OpHDFSTools.getStringDate(OpHDFSTools.getDateBefore(new Date(), 1),"yyyy-MM-dd");//"+ statDate.replace("-", "") + "
			//cateListInfo为所有cate键值与value对应，cateinfo为value与一级类别，二级类别对应关系
			cateListInfo = OpHDFSTools.getMapFormHDFSFile("/dsap/resultdata/ecdataconfig/DispCateListNameInfo/"+ statDate.replace("-", "") + "/part-r-00000","\t");
			cateInfo = OpHDFSTools.getFormatMapFormHDFSFile("/dsap/resultdata/ecdataconfig/DispCateInfo/"+ statDate.replace("-", "") + "/part-r-00000","\t",5);
			chanelinfo=OpHDFSTools.getMapFormHDFSFileForChanel("/dsap/resultdata/ecdataconfig/DispBusinessTypeInfo/"+ statDate.replace("-", "") + "/part-r-00000","\t");			
		    uniqtype.put("1", "业务线全部");
		    uniqtype.put("2", "业务线黄页");
		    uniqtype.put("3", "业务线二手");
		    uniqtype.put("4", "业务线二手车");
		    uniqtype.put("5", "业务线房产");
		    uniqtype.put("6", "房产信息");
		    uniqtype.put("7", "业务线招聘");
		    uniqtype.put("8", "全职招聘");
		    uniqtype1.put("1", "业务线全部");
		    uniqtype1.put("2", "业务线黄页");
		    uniqtype1.put("3", "业务线二手");
		    uniqtype1.put("4", "业务线二手车");
		    uniqtype1.put("5", "业务线房产");
		    uniqtype1.put("6", "房产信息");
		    uniqtype1.put("7", "业务线招聘");
		    uniqtype1.put("8", "全职招聘");
		    uniqtype1.put("9", "宠物");
		    uniqtype1.put("10", "兼职招聘");
		    uniqtype1.put("11", "求职信息");
		    uniqtype1.put("12", "租房");
		    uniqtype1.put("13", "客服");
		    uniqtype1.put("14", "餐饮");
		    uniqtype1.put("15", "家政保洁/安保");
		    uniqtype2.put("1", "业务线全部");
		    uniqtype2.put("2", "业务线黄页");
		    uniqtype2.put("3", "业务线二手");
		    uniqtype2.put("4", "业务线二手车");
		    uniqtype2.put("5", "业务线房产");
		    uniqtype2.put("6", "房产信息");
		    uniqtype2.put("7", "业务线招聘");
		    uniqtype2.put("8", "全职招聘");
		    uniqtype2.put("9", "宠物");
		    uniqtype2.put("10", "兼职招聘");
		    uniqtype2.put("11", "求职信息");
		    uniqcity.put("1", "深圳");
		    uniqcity.put("2", "上海");
		    uniqcity.put("3", "广州");
		    uniqcity.put("4", "成都");
		    uniqcity.put("5", "重庆");
		    uniqcity.put("6", "苏州");
		    uniqcity.put("7", "天津");
		    uniqcity.put("8", "郑州");
		    uniqcity.put("9", "武汉");
		    uniqcity.put("10", "杭州");
		    uniqcity.put("11", "西安");
		    uniqcity.put("12", "东莞");
		    uniqcity.put("13", "长沙");
		    uniqcity.put("14", "哈尔滨");
		    uniqcity.put("15", "沈阳"); 
		    uniqcity.put("16", "南京");
		    uniqcity.put("17", "青岛");
		    uniqcity.put("18", "石家庄");
		    uniqcity.put("19", "济南");
		    uniqcity.put("20", "长春");
		    uniqcity.put("21", "佛山");
		    uniqcity.put("22", "南宁");
		    uniqcity.put("23", "合肥");
		    uniqcity.put("24", "福州");
		    uniqcity.put("25", "宁波");
		    uniqcity.put("26", "大连");
		    uniqcity.put("27", "太原");
		    uniqcity.put("28", "无锡");
		    uniqcity.put("29", "昆明");
		    uniqcity.put("30", "南阳");
		    uniqcity.put("31", "洛阳");
		    uniqcity.put("32", "海口");
		    uniqcity.put("33", "乌鲁木齐");
		    uniqcity.put("34", "呼和浩特");
		    uniqcity.put("35", "西宁");
		    uniqcity.put("36", "银川 ");
		    uniqcity.put("37", "贵阳");
		    uniqcity.put("38", "南昌");
		    uniqcity.put("39", "全部");
		    
		} catch (Exception e) {
			// TODO Auto-generated catch block
		}
		
	}
	/**
	 * 对日志进行拆分
	 * 输出为：北京#二手房#出租二手房    1
	 * */
	public static class M1 extends	Mapper<Object,Text, Text, IntWritable> {
	
		@Override
		public void map(Object key, Text value, Context context)throws IOException, InterruptedException {
			String cityName="";
			String catelistName="";
			String wordkey="";
			String sourString="";
			try {
				
				sourString=value.toString().trim();
				if(!("".equals(sourString)))
				{
					String [] lineParams=sourString!=null?sourString.split(","):null;
					if(lineParams!=null)
					{
						cityName =lineParams[9];//searchwordtools
						wordkey=lineParams[2].trim();
						if(!wordkey.contains(" "))
						{
							/***cate1为全部，cate2为全部***/
							context.write(new Text(cityName+"#业务线全部#"+wordkey), new IntWritable(1));//初始记为1
							
							String catekeyString="";
							for(String str:lineParams[14].split("/"))
							{
								if((!("".equals(str)))&&cateListInfo.containsKey(str))
								{
									
									catekeyString=cateListInfo.get(str);
									if(chanelinfo.containsKey(catekeyString))//业务线统计
									{
										context.write(new Text(cityName+"#业务线"+chanelinfo.get(catekeyString)+"#"+wordkey), new IntWritable(1));//初始记为1
									}
									if(cateInfo.containsKey(catekeyString))
									{
										catelistName=cateInfo.get(catekeyString);
										break;
									}else {
										break;
									}
								}
							}
							String[] cateName=catelistName.split("\t");
							for(int i=0;i<cateName.length;i++)
							{
								if(("车辆买卖与服务".equals(cateName[i]))&&chanelinfo.containsKey(catekeyString))
								{
									context.write(new Text(cityName+"#业务线"+chanelinfo.get(catekeyString)+cateName[i]+"#"+wordkey), new IntWritable(1));//初始记为1
									context.write(new Text("全部#业务线"+chanelinfo.get(catekeyString)+cateName[i]+"#"+wordkey), new IntWritable(1));//初始记为1
								}else if((!("".equals(cateName[i])))&&(!("其他".equals(cateName[i]))))
								{
									context.write(new Text(cityName+"#"+cateName[i]+"#"+wordkey), new IntWritable(1));//初始记为1
									context.write(new Text("全部#"+cateName[i]+"#"+wordkey), new IntWritable(1));//初始记为1
								}
							}
						}
				 }
			}
				
		} catch (Exception e) {
			//e.printStackTrace();
		}
		
	}	
	}
	
	
	public static class R1 extends Reducer<Text, IntWritable, Text, IntWritable> {// 前两个输入：例：（hello，1），后两个输出（hello，2）
	/***
		 * 对key进行切分，调整，相同城市类别的进行叠加，输出格式为：
		 * city1#cate1#key,5
		 * */
			public IntWritable intValue = new IntWritable();
			@Override
			protected void reduce(Text key, Iterable<IntWritable> values,Context context)
					throws IOException, InterruptedException {
				int sum = 0;
				for (IntWritable val : values) {
                    
					sum += val.get();

				}
				
				if(sum>0)
				{
				intValue.set(sum);
				context.write(key, intValue);
				}
			}
		
		
	}
	/**
	 * 对搜索关键词按时间、城市、类别进行合并
	 * 合并后格式为：key：20150302$北京$二手房，value:霍营二手房:5
	 * */
	public static class M2 extends	Mapper<Object,Text, Text, Text> {
		@Override
		public void map(Object key, Text value, Context context)throws IOException, InterruptedException {
			try {
				String[] readline=value.toString().trim().split("\t");
				if(readline.length>1)
				{
					if(readline[1].matches("[0-9]+"))
					{
						String[] arr=readline[0].split("#");
						if(arr.length>2)
							{
							String keyTime = context.getConfiguration().get(COLUMN_NAME);

							if(arr[0].equals("全部"))//北京和全国太多，过滤掉部分，否则后面无法排序
							{
								if(!(Integer.parseInt(readline[1])<5))
								{	
									if(!(uniqtype1.containsValue(arr[1])&&Integer.parseInt(readline[1])<30))	
									{
								     context.write(new Text(arr[0]+"_"+arr[1]+"_"+keyTime), new Text(arr[2]+":"+readline[1]));
									}
								}
								
							}else if(arr[0].equals("北京")){
								if(!(Integer.parseInt(readline[1])<2))
								{
									if(!(uniqtype2.containsValue(arr[1])&&Integer.parseInt(readline[1])<10))	
									{
									  
										 context.write(new Text(arr[0]+"_"+arr[1]+"_"+keyTime), new Text(arr[2]+":"+readline[1]));
									}
								}
							}else {
								if(!(uniqcity.containsValue(arr[0])&&uniqtype.containsValue(arr[1])&&Integer.parseInt(readline[1])<5))	
								{
								  
									 context.write(new Text(arr[0]+"_"+arr[1]+"_"+keyTime), new Text(arr[2]+":"+readline[1]));
								}
							}
							}
					}
				}
			} catch (Exception e) {
				//e.printStackTrace();
			}
		
	}	
	}
	/**
	 * 对m2的输出，整合输入到文件中，一行格式为
	 * key：20150302$北京$二手房，value:霍营二手房:5,'''',北京二手房：98
	 * */
	public static class R2 extends Reducer<Text, Text, Text, Text> {// 前两个输入：例：（hello，1），后两个输出（hello，2）
		   /***
			 * 对搜索关键词按时间、城市、类别进行合并
	         * 合并后格式为：key：20150302$北京$二手房，value:霍营二手房:5,'''',北京二手房：98
			 * 
			 */
				String valtextString="";
				public Text valuetext=new Text();
				@Override
				protected void reduce(Text key, Iterable<Text> values,Context context)
						throws IOException, InterruptedException {
					valtextString="";
					for (Text val : values) {
	                    
						valtextString=val.toString()+","+valtextString;

					}
					valuetext.set(valtextString);
					
					context.write(key, valuetext);
					
				}
			
			
		}
	/**
	 * 对搜索关键词按搜索次数进行排序
	 * 如有需要，对排序后的搜索关键词取top50；
	 * */
	public static class M3 extends	Mapper<Object,Text, Text, Text> {
		
		private Text keytext =new Text();
		public Text valuetext=new Text();
		Map<String, Integer> wp=new HashMap<String, Integer>();
		@Override
		public void map(Object key, Text value, Context context)throws IOException, InterruptedException {
			try {
				String[] readline=value.toString().trim().split("\t");
				String tString="";
				wp.clear();
				if(readline.length>1)
				{
				    keytext.set(readline[0]);
				    String[] arr=readline[1].split(",");
				    for(int i=0;i<arr.length;i++)
				    {
				    	if(!("".equals(arr[i])))
				    	{
				    	String[] timep=arr[i].split(":");
				    	wp.put(timep[0], Integer.parseInt(timep[1]));
				    	}
				    }
				    List<Map.Entry<String, Integer>> infoIds =
				        new ArrayList<Map.Entry<String, Integer>>(wp.entrySet());
				    Collections.sort(infoIds, new Comparator<Map.Entry<String,Integer>>() {

				        @Override
				        public int compare(Map.Entry<String,Integer> firstMapEntry, 
				                   Map.Entry<String,Integer> secondMapEntry) {
				         
				        	return secondMapEntry.getValue().compareTo(firstMapEntry.getValue());
				         }
				      });
				    //可在此控制输出top50
				    if(infoIds.size()>0)
				    {
				    	 tString=infoIds.get(0).getKey()+":"+infoIds.get(0).getValue();
					    for (int i = 1; i < infoIds.size()&&i<50; i++) {
					    	
					    	tString=tString+","+infoIds.get(i).getKey()+":"+infoIds.get(i).getValue();
					       // System.out.println(infoIds.get(i));
					     }
				    }
					valuetext.set(tString);
				}
				context.write(keytext, valuetext);
			} catch (Exception e) {
				//e.printStackTrace();
			}
		
	}	
	}
	public static class R3 extends Reducer<Text, Text, Text, Text> {// 前两个输入：例：（hello，1），后两个输出（hello，2）
		   /***
			 *排序后的输出
			 * 
			 */
				protected void reduce(Text key, Text valuetext,Context context)
						throws IOException, InterruptedException {
					
					context.write(key, valuetext);
					
				}
			
			
		}
	/**
	 * 对m3排序后的结果，写入到hbase中
	 * */
	public static class M4 extends Mapper<LongWritable, Text, ImmutableBytesWritable, KeyValue>
	{

		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			String wordCountStr=value.toString();
			String[] wordCountArray=wordCountStr.split("\t");
			String word=wordCountArray[0];
			String[] val=wordCountArray[1].split(",");
			//创建HBase中的RowKey
			byte[] rowKey=Bytes.toBytes(word);
			ImmutableBytesWritable rowKeyWritable=new ImmutableBytesWritable(rowKey);
			for(int i=0;i<val.length;i++)
			{
				String[] keycount=val[i].toString().split(":");
				//创建HBase中KeyValue对象,第一个参数为rowKey，第二个参数为列族，第三个参数为列，第四个参数为 具体的值
				byte[] family=Bytes.toBytes("cf");
				byte[] qualifier=Bytes.toBytes(keycount[0]);
				byte[] hbaseValue=Bytes.toBytes(keycount[1]);
				KeyValue keyValue=new KeyValue(rowKey, family, qualifier, hbaseValue);
				
				context.write(rowKeyWritable, keyValue);
			}
			
			
			
		}
		
		
		
	}
	public static void main(String[] args) throws Exception{
		Configuration conf = new Configuration();
		String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
		String currentTime = "";
		if(otherArgs.length !=0){
			currentTime = otherArgs[0];
		}
		if (currentTime == null || currentTime.equals("")) {
			Calendar calcurrent = Calendar.getInstance();
			calcurrent.add(Calendar.DATE, -1);
			calcurrent.set(Calendar.SECOND, 0);
			calcurrent.set(Calendar.MINUTE, 0);
			calcurrent.set(Calendar.HOUR_OF_DAY, 0);
			Date dataTime = calcurrent.getTime();
			SimpleDateFormat formatterYYYYMMdd = new SimpleDateFormat("yyyyMMdd");
			currentTime = formatterYYYYMMdd.format(dataTime);
		}
		conf.set(COLUMN_NAME, currentTime);
		/**定义第一个job，对搜索日志分割统计**/
		String path1in="/dsap/resultdata/track.58.com/lessnoresult/SourceAnalysis/"+currentTime;
		String temp="/dsap/resultdata/hzx/userdata/SearchWordDownLoadJob/"+currentTime;
		String path1out=temp+"/SearchWordDataJob1_Result";
		String path2out=temp+"/SearchWordDataJob2_Result";
		String path3out=temp+"/SearchWordDataJob3_Result";
		String path4out=temp+"/SearchWordDataJob4_Result";
		Job job1 = new Job(conf, JOB_NAME+"_1");
		job1.setJarByClass(SearchWordDownloadJob.class);
		job1.setNumReduceTasks(REDUCE_TASK_NUM);
		job1.setMapperClass(M1.class);
		job1.setReducerClass(R1.class);
		
		job1.setOutputKeyClass(Text.class);
		job1.setOutputValueClass(IntWritable.class);
		
		 if(FileSystem.get(conf).exists(new Path(temp)))
		 {
			 FileSystem.get(conf).delete(new Path(temp), true);
		 }
		 FileInputFormat.addInputPath(job1, new Path(path1in));//处理文件路径  
		 FileOutputFormat.setOutputPath(job1, new Path(path1out));//结果输出路径  
		 job1.waitForCompletion(true);
		 
		 /**定义第二个job，整合输出到文件中*/
		 Job job2 = new Job(conf, JOB_NAME+"_2");//SearchWordInputHbase
		 job2.setJarByClass(SearchWordDownloadJob.class);
		 job2.setNumReduceTasks(20);
		 job2.setMapperClass(M2.class);
		 job2.setReducerClass(R2.class);//R2	
		 job2.setOutputKeyClass(Text.class);
		 job2.setOutputValueClass(Text.class);
		 FileInputFormat.addInputPath(job2, new Path(path1out));
		 FileOutputFormat.setOutputPath(job2, new Path(path2out));
		 //TableMapReduceUtil.initTableReducerJob(TABLE_NAME, R2.class,job2);
		 job2.waitForCompletion(true);
		 
		 
		 /**定义第三个job，对整合后的内容进行排序，选top50*/
		 Job job3 = new Job(conf, JOB_NAME+"_3");//SearchWordInputHbase
		 job3.setJarByClass(SearchWordDownloadJob.class);
		 job3.setNumReduceTasks(REDUCE_TASK_NUM);
		 job3.setMapperClass(M3.class);
		 job3.setReducerClass(R3.class);	
		 job3.setOutputKeyClass(Text.class);
		 job3.setOutputValueClass(Text.class);
		 FileInputFormat.addInputPath(job3, new Path(path2out));
		 FileOutputFormat.setOutputPath(job3, new Path(path3out));
		 job3.waitForCompletion(true);
		 
		 
		 /**定义job4，通过bulk——load写入到hbase中**/
		Job job4=new Job(conf, JOB_NAME+"_4"); 
        job4.setJarByClass(SearchWordDownloadJob.class);
        job4.setMapperClass(M4.class);
        job4.setReducerClass(KeyValueSortReducer.class);
        job4.setMapOutputKeyClass(ImmutableBytesWritable.class);
        job4.setMapOutputValueClass(KeyValue.class);
        //以第三个Job的输出做为第四个Job的输入
        FileInputFormat.addInputPath(job4, new Path(path3out));
        FileOutputFormat.setOutputPath(job4, new Path(path4out));
        //创建HBase的配置对象
        Configuration hbaseConfiguration=HBaseConfiguration.create();
        //创建目标表对象
        HTable wordCountTable =new HTable(hbaseConfiguration, "SearchWordCount");
        HFileOutputFormat.configureIncrementalLoad(job4,wordCountTable);
       
        //提交第四个job
        int job4Result=job4.waitForCompletion(true)?0:1;
        
        //当第四个job结束之后，调用BulkLoad方式来将MR结果批量入库
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(hbaseConfiguration);
        loader.doBulkLoad(new Path(path4out), wordCountTable);
        
        //最后调用System.exit进行退出
        System.exit(job4Result);
			
	}
}
