package com.qyer.log.job.sum;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import com.qyer.util.log.util.CateUtil;
import com.qyer.util.log.util.MyDateUtil;
import com.qyer.util.log.util.StatisticsUtils;
import com.qyer.util.log.util.TrackSourceType;

public class UserTracksSummary {
    
	 public static  class MapClass extends MapReduceBase implements Mapper<LongWritable, BytesRefArrayWritable, Text, Text> {
		 
		static HashMap<String, String> sourceMap= new HashMap<String, String>();
		static HashMap<String, String> cateMap= new HashMap<String, String>();
		
		@Override
		public void configure(JobConf job){
			BufferedReader in = null;

			try {
				// 从当前作业中获取要缓存的文件
				Path[] paths = DistributedCache.getLocalCacheFiles(job);
				
				System.out.println("paths===="+paths);
				String line = null;
				String[] strArr = null;
				for (Path path : paths) {
					System.out.println("path===="+path.toString());
					
					if (path.toString().contains("sourcetype.txt")) {
						in = new BufferedReader((new InputStreamReader(new FileInputStream(path.toString()),"utf-8")));
						while (null != (line = in.readLine())) {
							strArr = line.split("\t", -1);
							
							if(strArr.length<4){
								continue ;
							}
							
							if("3".equals(strArr[0])){
								sourceMap.put(strArr[4], strArr[0]+"@"+strArr[1]);
							}
						}
						
					}else if (path.toString().contains("cate_business.txt")) {
						in = new BufferedReader((new InputStreamReader(new FileInputStream(path.toString()),"utf-8")));
						while (null != (line = in.readLine())) {
							strArr = line.split("\t", -1);
							
							if(strArr.length<4){
								continue ;
							}
							
							cateMap.put(strArr[1]+","+strArr[2], strArr[3]);
						}
					}
				}
				
				if (in != null) {
					in.close();
				}
			} catch (IOException e) {
				e.printStackTrace();
			} finally {
				try {
					if (in != null) {
						in.close();
					}
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
		
		public  String getRCValue(BytesRefArrayWritable value, int index) throws IOException {
			Text txt = new Text();
			if (index >= 0 && index < value.size()) {
				BytesRefWritable brw = value.get(index);
				txt.set(brw.getData(), brw.getStart(), brw.getLength());
				return txt.toString();
			} else {
				return null;
			}
		}
		
		private Text val = new Text();
		private Text keyVal =new Text();
		
		final static String FS="\t";
		
		@Override
		public  void map(LongWritable key, BytesRefArrayWritable value,
		          OutputCollector<Text, Text> output, Reporter reporter)
		          throws IOException {
			
			if(value.size()<21){
				return ; 
			}
			
			String becon_type=getRCValue(value,13);//获取becontype
			
			if(!"1".equals(becon_type)){
				return ; 
			}
			String url=getRCValue(value,11);//获取url地址
			String platform = ""; 
			if(url.startsWith("m.qyer.com")){
				platform = "m"; 
			}else{
				platform = "web"; 
			}
			
			if("web".equals(platform)||"m".equals(platform)){
			
				String unique_id=getRCValue(value,1);//获取唯一标示
				String refer=getRCValue(value,12);//获取refer
				
				String sourcetype =TrackSourceType.getSourceType(refer, url, sourceMap, cateMap); 
				String cate =CateUtil.getCate(url, cateMap);
			    
			    for (String p : new String []{"A" , platform }){
			    	for (String c : new String []{"A" , cate }){
			    		for(String sou : StatisticsUtils.getDimListTSource(sourcetype)) {
			    			
			    			sou=sou.replace("@", FS);
			    			
			    			String keyStr = p+FS+c+FS+sou; 
			    			String valStr = unique_id;
			    			keyVal.set(keyStr);
			    			val.set(valStr);
			    			output.collect(keyVal,val);    
						}
				    }
			    }
				
			}		
		}         
	 }
	 
	 
	 public static class Combiner extends MapReduceBase implements Reducer<Text, Text, Text, Text> {

		  private Text val = new Text();
		  private Text keyVal =new Text();
		  
		  final static String FS="\t";
		  
		  String DATE = "";
	    
	     @Override
	     public void reduce(Text key, Iterator<Text> value, OutputCollector<Text, Text> output, Reporter reporter)
	               throws IOException {
	    	 	String[] keyStrs = key.toString().split(FS); 
	    	 	if(keyStrs.length<4){
	    	 		return ; 
	    	 	}
	    	 	
	    	 	//去掉站内来源 本业务线内的来源维度
	    	 	if(keyStrs[2].equals("2")){
	    	 		if(keyStrs[1].equals(keyStrs[3])){
	    	 			return ; 
	    	 		}
	    	 		
	    	 		//去掉全业务线  站内来源数据
	    	 		if("A".equals(keyStrs[1])){
	    	 			return ;
	    	 		}
	    	 	}
	    	 	
				Set<String> cookieIdSet = new HashSet<String>();
				long pv = 0l;
	
				while(value.hasNext()){
					Text v = value.next(); 
					pv++; 
					cookieIdSet.add(v.toString()); 
				}
	
				keyVal.set(key);
				val.set(pv + FS + cookieIdSet.toString());
				cookieIdSet.clear();
				output.collect(keyVal, val);       
	     }
	 }

	 
     public static  class Reduce extends MapReduceBase implements Reducer<Text, Text, Text, Text> {
    
    	  private Text val = new Text();
 		  private Text keyVal =new Text();
 		  
 		  final static String FS="\t";
 		  
 		  static String DATE = "";
 		  
 		  @Override
 		  public void configure(JobConf job){
 			 if(DATE==null||"".equals(DATE)){
 				DATE=job.get("DATE");
 			 }
 		  }
         
          @Override
          public void reduce(Text key, Iterator<Text> value, OutputCollector<Text, Text> output, Reporter reporter)
                    throws IOException {
  			
  			Set<String> cookieIdSet = new HashSet<String>();
  			long uv = 0l;
  			long pv = 0l;

  			while(value.hasNext()){
				String val  = value.next().toString(); 
				String pvStr =val.split(FS)[0];
				
				pv=pv+Long.parseLong(pvStr);
				
				String uvStr =val.split(FS)[1];
				
				uvStr=uvStr.replace("{", "");
				uvStr=uvStr.replace("}", "");
				
				if(uvStr.indexOf(",")>-1){
					for(String uvS : uvStr.split(",")){
						cookieIdSet.add(uvS); 
					}
				}else{
					cookieIdSet.add(uvStr); 
				}
  			}

  			uv=cookieIdSet.size();
  			
  			
  			SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
  			Long curTimes=0L;
			try {
				String d =sdf.format(new Date());
				curTimes = sdf.parse(d).getTime();
			} catch (ParseException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			curTimes=curTimes/1000;
			
//  			CREATE TABLE `traffic_source_summary` (
//  				  `id` int(11) NOT NULL AUTO_INCREMENT,
//  				  `addtime` int(11) NOT NULL COMMENT '添加时间',
//  				  `fromtime` int(11) NOT NULL COMMENT '周期时间',
//  				  `uv` int(11) NOT NULL DEFAULT '0',
//  				  `len` int(11) DEFAULT '1',
//  				  `platform` varchar(10) DEFAULT 'A' COMMENT '平台 A,web,m,app',
//  				  `cate_1` varchar(5) DEFAULT 'A' COMMENT '落地频道 1全站|2论坛|3目的地|4相册|5发现|6问答|7锦囊|8专题|9行程助手|10折扣|11首页|12个人中心|13搜索页|14微游记|15应用|16酒店|17机票|18签证|19特价酒店|20华人旅馆|21邮轮|22租|23其他|24unkown|25游记|26周边|27微锦囊|28fanshop|29booking_goto|30结伴',
//  				  `source_1` varchar(5) DEFAULT 'A' COMMENT '来源频道 1全部，2站内，3站外',
//  				  `source_2` varchar(5) DEFAULT 'A' COMMENT '来源频道 全部：1全站；\n                                                      站内：2论坛|3目的地|4相册|5发现|6问答|7锦囊|8专题|9行程助手|10折扣|11首页|12个人中心|13搜索页|14微游记|15应用|16酒店|17机票|18签证|19特价酒店|20华人旅馆|21邮轮|22租|23其他|24unkown|25游记|26周边|27微锦囊|28fanshop|29booking_goto|30结伴\n                                                      站外：2直接|3seo|4SEM|5EDM|5weibo|45weixin',
//		  			  `uv_rate` float(10,2) DEFAULT '0.00' COMMENT '占比',
//  				  PRIMARY KEY (`id`),
//  				  KEY `allIdx` (`fromtime`,`platform`,`cate_1`,`source_1`,`source_2`,`source_3`) USING BTREE
//  				) ENGINE=MyISAM DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC;
  			keyVal.set(curTimes+FS +DATE+FS+uv+FS+"1");
  			val.set(key);
  			output.collect(keyVal, val);       
          }
         
     }
     /**
     * @param args
     * @throws URISyntaxException 
     * @throws ParseException 
     */
     public static void main(String[] args) throws IOException, URISyntaxException, ParseException{
        JobConf conf = new JobConf(UserTracksSummary.class);
        conf.set("mapred.job.queue.name", "regular"); // default,regular,realtime
//    	conf.set("mapreduce.reduce.memory.mb" ,"4096");
//    	conf.set("mapreduce.reduce.shuffle.parallelcopies", "10");
//    	conf.set("mapreduce.task.io.sort.factor", "20");
//    	conf.set("mapreduce.task.io.sort.mb", "512");
	    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
	    int exitCode = 127;
			
		if (otherArgs.length < 2) {
			System.out.println("************************************************************");
			System.out.println("************************************************************");
			System.out.println("Usage: please input 1 params, for example: file.jar args[0] args[1]");
			System.out.println("args[0] is dateList: 20130101,20130106   or 20130106");
			System.out.println("args[1] is day or week or  mouth ");
			System.out.println("************************************************************");
			System.out.println("************************************************************");
			System.exit(exitCode);
		}
          
		String startDate = otherArgs[0].split(",")[0];
		String endDate = otherArgs[0].split(",").length == 2 ? otherArgs[0].split(",")[1] : startDate;
		
		String timeCycle = otherArgs[1]; 
		
		if("week".equalsIgnoreCase(timeCycle)){
			startDate=MyDateUtil.getMondayOfThisWeek();
		}else if ("mouth".equalsIgnoreCase(timeCycle)){
			startDate=MyDateUtil.getFirstDayOfThisMouth();
		}
		
		String baseWeb = "/hive/warehouse/qyer.db/user_tracks/platform=web/logdate=";
		String baseMobile = "/hive/warehouse/qyer.db/user_tracks/platform=mobile/logdate=";
		String baseOutPath = "/flume/summary/UserTracksSummary/"+timeCycle;
		
		String cateMapPath ="/flume/dict/cate_business/20151020";
		String sourcetypePath="/flume/dict/sourcetype/20151020";
		
		
		//单天
		if ("day".equalsIgnoreCase(timeCycle)){
			
			for (String runDate : MyDateUtil.getDateList(startDate, endDate, 0)) {
				SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
				Long startDateLong =sdf.parse(runDate).getTime();
				
				startDateLong=startDateLong/1000;
				
				conf.set("DATE", startDateLong.toString());
		        conf.setJobName("UserTracksSummary");
		         
		        conf.setOutputKeyClass(Text.class);
		        conf.setOutputValueClass(Text.class);
		         
		        conf.setMapperClass(MapClass.class);
		        conf.setCombinerClass(Combiner.class);
		        conf.setReducerClass(Reduce.class);
		         
		        conf.setInputFormat(RCFileInputFormat.class);
		        conf.setOutputFormat(TextOutputFormat.class);
//		        conf.setNumMapTasks(30);
		        conf.setNumReduceTasks(3);
		        conf.setMemoryForReduceTask(2048);
//		        conf.setMemoryForMapTask(4096);
		        
		        // source type 字典
	 			FileSystem hdfs1 = FileSystem.get(conf);
	 			FileStatus[] fileList = hdfs1.listStatus(new Path(sourcetypePath));
	 			for (int i = 0; i < fileList.length; i++) {
	 				String fileName = fileList[i].getPath().getName();
					if (fileName.contains("sourcetype.txt")) {
						System.out.println("----------------->>> " + sourcetypePath + "/" + fileName);
						String inPath2Link = new Path(sourcetypePath + "/" + fileName).toUri().toString() + "#"+ "sourcetype.txt" + i;
						DistributedCache.addCacheFile(new URI(inPath2Link), conf);
					}
	 			}
	 			
	 			
	 			// category busniess字典
	 			hdfs1 = FileSystem.get(conf);
	 			fileList = hdfs1.listStatus(new Path(cateMapPath));
	 			for (int i = 0; i < fileList.length; i++) {
	 				String fileName = fileList[i].getPath().getName();
					if (fileName.contains("cate_business.txt")) {
						System.out.println("----------------->>> " + cateMapPath + "/" + fileName);
						String inPath2Link = new Path(cateMapPath + "/" + fileName).toUri().toString() + "#"+ "cate_business.txt" + i;
						DistributedCache.addCacheFile(new URI(inPath2Link), conf);
					}
	 			}
		        
		        
		        FileInputFormat.addInputPath(conf, new Path(baseWeb + runDate+"/*/*"));
		        FileInputFormat.addInputPath(conf, new Path(baseMobile + runDate+"/*/*"));
		        
		        String outPath = baseOutPath +"/"+ runDate;
		        FileSystem.get(conf).delete(new Path(outPath), true);
		        FileOutputFormat.setOutputPath(conf, new Path(outPath));
		         
		        JobClient.runJob(conf);
		        System.out.println("--------------------------------------------END1"+runDate);
			}
			
			
		//当前周	当前月	
		}else if("week".equalsIgnoreCase(timeCycle)||"mouth".equalsIgnoreCase(timeCycle)){
			//数据量较大时候 设置参数  error in shuffle in InMemoryMerger - Thread to merge in-memory shuffled map-outputs
//			conf.set("mapreduce.reduce.shuffle.input.buffer.percent", "0.3");
			
			SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
			Long startDateLong =sdf.parse(startDate).getTime();
			
			startDateLong=startDateLong/1000;
			
			conf.set("DATE", startDateLong.toString());
			conf.setJobName("UserTracksSummary");
	        conf.setOutputKeyClass(Text.class);
	        conf.setOutputValueClass(Text.class);
	        conf.setMapperClass(MapClass.class);
	        conf.setCombinerClass(Combiner.class);
	        conf.setReducerClass(Reduce.class);
	        conf.setInputFormat(RCFileInputFormat.class);
	        conf.setOutputFormat(TextOutputFormat.class);
//	        conf.setNumMapTasks(30);
	        conf.setNumReduceTasks(1);
//	        conf.setMemoryForReduceTask(4096);
	        conf.setMemoryForMapTask(4096);
	        
	        // source type 字典
 			FileSystem hdfs1 = FileSystem.get(conf);
 			FileStatus[] fileList = hdfs1.listStatus(new Path(sourcetypePath));
 			for (int i = 0; i < fileList.length; i++) {
 				String fileName = fileList[i].getPath().getName();
				if (fileName.contains("sourcetype.txt")) {
					System.out.println("----------------->>> " + sourcetypePath + "/" + fileName);
					String inPath2Link = new Path(sourcetypePath + "/" + fileName).toUri().toString() + "#"+ "sourcetype.txt" + i;
					DistributedCache.addCacheFile(new URI(inPath2Link), conf);
				}
 			}
 			
 			
 			// category busniess字典
 			hdfs1 = FileSystem.get(conf);
 			fileList = hdfs1.listStatus(new Path(cateMapPath));
 			for (int i = 0; i < fileList.length; i++) {
 				String fileName = fileList[i].getPath().getName();
				if (fileName.contains("cate_business.txt")) {
					System.out.println("----------------->>> " + cateMapPath + "/" + fileName);
					String inPath2Link = new Path(cateMapPath + "/" + fileName).toUri().toString() + "#"+ "cate_business.txt" + i;
					DistributedCache.addCacheFile(new URI(inPath2Link), conf);
				}
 			}
			
			for (String runDate : MyDateUtil.getDateList(startDate, endDate, 0)) {
		        System.out.println(  " add input path :"+runDate );
		        FileInputFormat.addInputPath(conf, new Path(baseWeb + runDate+"/*/*"));
		        FileInputFormat.addInputPath(conf, new Path(baseMobile + runDate+"/*/*"));
			}
			
			String outPath = baseOutPath +"/"+ endDate;
	        FileSystem.get(conf).delete(new Path(outPath), true);
	        FileOutputFormat.setOutputPath(conf, new Path(outPath));
	         
	        JobClient.runJob(conf);
		}
		
		System.out.println("--------------------------------------------END1");
     }
}
