package  com.qyer.log.job.sum;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import com.qyer.util.log.common.Constants;
import com.qyer.util.log.entity.TrackLog;
import com.qyer.util.log.service.LogService;
import com.qyer.util.log.service.impl.LogServiceImpl;
import com.qyer.util.log.util.CateUtil;
import com.qyer.util.log.util.LogFatory;
import com.qyer.util.log.util.MyDateUtil;
import com.qyer.util.log.util.StatisticsUtils;
import com.qyer.util.log.util.TrackInputFormat;
import com.qyer.util.log.util.TrackSourceType;
import com.qyer.util.log.util.TrackSplitUtils;

import com.hadoop.mapreduce.LzoTextInputFormat;

public class TrackSummary_Lzo {

	//信息量
	
	/**
	 * 计算维度
	  平台
	  platform 
	  channel
	  设备
	  device
	  os
	  频道
	  cate1
	  
	  来源
	  source1
	  source2
	  
	  时间
	  daytime
	  
	  页面类型
	  ugctype
	  
	  
	 * 计算指标
	  `pv` int(11) DEFAULT NULL COMMENT 'pv',
	  `uv` int(11) DEFAULT NULL COMMENT 'uv',
	  `

	  `visit` int(11) DEFAULT NULL COMMENT '访问数',
	  `avgpage` float DEFAULT NULL COMMENT '平均浏览页码',
	  `avgtime` float DEFAULT NULL COMMENT '平均停留时间',？？？
	  `outrate` float DEFAULT NULL COMMENT '跳出率'
	  `ips`  ip数
	*/
	public static class M1 extends Mapper<LongWritable, Text, Text, Text> {
		private Text oKey = new Text();
		private Text oVal = new Text();
		
		static HashMap<String, String> sourceMap= new HashMap<String, String>();
		static HashMap<String, String> cateMap= new HashMap<String, String>();
		static HashMap<String, String> platformMap= new HashMap<String, String>();
		
		final static String FS ="\t";

		@Override
		protected void setup(Context context) throws IOException, InterruptedException {
			BufferedReader in = null;

			try {
				// 从当前作业中获取要缓存的文件
				Path[] paths = DistributedCache.getLocalCacheFiles(context.getConfiguration());
				
				System.out.println("paths===="+paths);
				String line = null;
				String[] strArr = null;
				for (Path path : paths) {
					System.out.println("path===="+path.toString());
					
					if (path.toString().contains("sourcetype.txt")) {
						in = new BufferedReader((new InputStreamReader(new FileInputStream(path.toString()),"utf-8")));
						while (null != (line = in.readLine())) {
							strArr = line.split("\t", -1);
							
							if(strArr.length<4){
								continue ;
							}
							
							if("3".equals(strArr[0])){
								sourceMap.put(strArr[4], strArr[0]+"@"+strArr[1]);
							}
						}
						
					}else if (path.toString().contains("cate_business.txt")) {
						in = new BufferedReader((new InputStreamReader(new FileInputStream(path.toString()),"utf-8")));
						while (null != (line = in.readLine())) {
							strArr = line.split("\t", -1);
							
							if(strArr.length<4){
								continue ;
							}
							
							cateMap.put(strArr[1]+","+strArr[2], strArr[3]);
						}
					}else if (path.toString().contains("platform.txt")) {
						in = new BufferedReader((new InputStreamReader(new FileInputStream(path.toString()),"utf-8")));
						while (null != (line = in.readLine())) {
							strArr = line.split("\t", -1);
							
							if(strArr.length<4){
								continue ;
							}
							
							platformMap.put(strArr[1]+","+strArr[2], strArr[3]);
						}
					}
				}
				
				if (in != null) {
					in.close();
				}
				
			} catch (IOException e) {
				e.printStackTrace();
			} finally {
				try {
					if (in != null) {
						in.close();
					}
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}

		//清洗日志工具类
		LogService lli = (LogService)LogFatory.getInstance(LogServiceImpl.class.getName());
		
		TrackLog tracklog = null ;

		public void map(LongWritable key, Text value, Context context) throws IOException,InterruptedException {
				Set<String> outKeySet = new HashSet<String>();
				int count = 0;
				String guid = "E";
				String visits ="E";
				//  1 表示新访客
				int newVisitFlag = 0;
				//session 中最小时间、最大时间
				long timeOnSite=0;
				long minTime=0;
				long maxTime=0;
				String source=null;
				//是跳入数
				int jumpIn=0;
				//当前url
				String url ="";
				String refer ="";
				
				tracklog = lli.getTrackFormat(value.toString());
				
				if(!Constants.LOG_TYPE_PV.equals(tracklog.getType())){
					return  ;
				}
				
				guid = tracklog.getGuid();
				url=tracklog.getCurUrl();
				refer=tracklog.getRefUrl();
				
				source =TrackSourceType.getSourceType(refer, url, sourceMap, cateMap ,tracklog.getUserAgent()); 
				String cate =CateUtil.getCate(url, cateMap);
				
				visits=tracklog.getSession_timestamp();
				
				//areas
				for (String ugctype : new String []{"A" , tracklog.getUgc_type()}) {
					
					//source
					for(String platform : new String []{"A" , tracklog.getPlatform()}){
					
						//product cate
						for(String c :  new String []{"A" , cate}){
						
							for(String s :  StatisticsUtils.getDimListTSource(source)){
								s=s.replace("@", FS);
								String outKey = platform+FS+c+FS+ugctype+FS+s;
								outKeySet.add(outKey);
								// 访问次数visit  独立访客数uv  浏览量pv  跳出率/跳出次数  新访客比例/新访客数	||	人均浏览量 访问时长 跳入数
								String outValue = visits+"|E|1|0|0|0|"+jumpIn+"|"+tracklog.getUserIp();
								oKey.set(outKey);
								oVal.set(outValue);
								context.write(oKey, oVal);
							}
						}
					}	
				}	
			}	
	}

	
	public static class R1 extends Reducer<Text, Text, Text, Text> {
		Text oKey = new Text();
		Text oVal = new Text();
		String DATE;
		final String FS = "\t";
		
		public void reduce(Text key, Iterable<Text> values, Context context) throws IOException,
				InterruptedException {

			if (DATE == null) {
				DATE = context.getConfiguration().get("DATE");
			}
			Set<String> cookieIdSet = new HashSet<String>();
			Set<String> visitSet = new HashSet<String>();
			Set<String> ipSet = new HashSet<String>();
			Set<String> repeatVisitSet = new HashSet<String>();
			long visit = 0l;
			long uv = 0l;
			long pv = 0l;
			long bounce = 0l;
//			long newVisit = 0l;
//			long newuv=0l;
			long timeOnSite=0l;
//			long jumpIn=0l;
			long ips =0l;

			for (Text val : values) {
				String[] valArr = val.toString().split("\\|");
				visitSet.add(valArr[0]);
				if (!"E".equalsIgnoreCase(valArr[1])) {
					cookieIdSet.add(valArr[1]);
				}
				pv += Integer.parseInt(valArr[2]);
				bounce += Integer.parseInt(valArr[3]);
				
				if("0".equalsIgnoreCase(valArr[4])){
					repeatVisitSet.add(valArr[1]);
				}
//				newVisit += Integer.parseInt(valArr[4]);
				
				timeOnSite+= Long.parseLong(valArr[5]);
				
//				jumpIn+= Long.parseLong(valArr[6]);
				
				if(valArr.length>7){
					ipSet.add(valArr[7]);
				}
			}

			uv = cookieIdSet.size() == 0 ? 1 : cookieIdSet.size();
			visit = visitSet.size() == 0 ? 1 : visitSet.size();
			ips=ipSet.size() == 0 ? 1 : ipSet.size();
//			visit = visit == 0 ? 1 : visit;
			String bounceRate = String.format("%.2f", ((float) bounce / visit));
			String avgPage = String.format("%.2f", ((float) pv / visit) );
			String avgTime = String.format("%.3f", ((float) timeOnSite/1000 / visit) );
			//新访客数
//			newuv=uv-repeatVisitSet.size();
//			newuv = newuv<= 0 ? 1 : newuv;
			/*
			 * 计算指标
			  `pv` int(11) DEFAULT NULL COMMENT 'pv',
			  `uv` int(11) DEFAULT NULL COMMENT 'uv',
			  `visit` int(11) DEFAULT NULL COMMENT '访问数',
			  `avgpage` float DEFAULT NULL COMMENT '平均浏览页码',
			  `avgtime` float DEFAULT NULL COMMENT '平均停留时间',
			  `outrate` float DEFAULT NULL COMMENT '跳出率'
			  `ips`  ip数
			*/
			oKey.set(DATE+FS+key);
			oVal.set(pv + FS + uv + FS + visit + FS  +bounceRate+FS+avgTime+ FS + avgPage+FS+ ips);
			context.write(oKey, oVal);
		}
	}

	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException,
			URISyntaxException {

		Configuration conf = new Configuration();
//		conf.set("mapred.job.queue.name", "regular"); // default,regular,realtime
//		conf.set("mapreduce.reduce.memory.mb" ,"2048");
		String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
		int exitCode = 127;
		
		if (otherArgs.length != 1) {
			System.out.println("************************************************************");
			System.out.println("************************************************************");
			System.out.println("Usage: please input 1 params, for example: file.jar args[0]");
			System.out.println("args[0] is dateList: 20130101,20130106 or 20130106");
			System.out.println("************************************************************");
			System.out.println("************************************************************");
			System.exit(exitCode);
		}
		
		String startDate = otherArgs[0].split(",")[0];
		String endDate = otherArgs[0].split(",").length == 2 ? otherArgs[0].split(",")[1] : startDate;

		String baseInPath01 = "/flume/clean/track";
		String baseOutPath = "/flume/summary/track";
		String cateMapPath ="/flume/dict/cate_business/20151020";
		String sourcetypePath="/flume/dict/sourcetype/20151020";
		
		String platformPath="/flume/dict/platform/20151217";
		

		// 例行调度：昨天的日期
		for (String runDate :  MyDateUtil.getDateList(startDate, endDate, 0)) {
			conf.set("DATE", runDate);
			Job job = new Job(conf, "TrackSummary_Lzo");
			job.setInputFormatClass(LzoTextInputFormat.class);
			job.setJarByClass(TrackSummary_Lzo.class);
			job.setMapperClass(M1.class);
			job.setReducerClass(R1.class);
			job.setNumReduceTasks(3);
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(Text.class);
			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(Text.class);
			
			// source type 字典
 			FileSystem hdfs1 = FileSystem.get(conf);
 			FileStatus[] fileList = hdfs1.listStatus(new Path(sourcetypePath));
 			for (int i = 0; i < fileList.length; i++) {
 				String fileName = fileList[i].getPath().getName();
				if (fileName.contains("sourcetype.txt")) {
					System.out.println("----------------->>> " + sourcetypePath + "/" + fileName);
					String inPath2Link = new Path(sourcetypePath + "/" + fileName).toUri().toString() + "#"+ "sourcetype.txt" + i;
					DistributedCache.addCacheFile(new URI(inPath2Link), job.getConfiguration());
				}
 			}
 			
 			
 			// category busniess字典
 			hdfs1 = FileSystem.get(conf);
 			fileList = hdfs1.listStatus(new Path(cateMapPath));
 			for (int i = 0; i < fileList.length; i++) {
 				String fileName = fileList[i].getPath().getName();
				if (fileName.contains("cate_business.txt")) {
					System.out.println("----------------->>> " + cateMapPath + "/" + fileName);
					String inPath2Link = new Path(cateMapPath + "/" + fileName).toUri().toString() + "#"+ "cate_business.txt" + i;
					DistributedCache.addCacheFile(new URI(inPath2Link), job.getConfiguration());
				}
 			}
 			
 			
 			// platformPath字典
 			hdfs1 = FileSystem.get(conf);
 			fileList = hdfs1.listStatus(new Path(platformPath));
 			for (int i = 0; i < fileList.length; i++) {
 				String fileName = fileList[i].getPath().getName();
				if (fileName.contains("platform.txt")) {
					System.out.println("----------------->>> " + platformPath + "/" + fileName);
					String inPath2Link = new Path(platformPath + "/" + fileName).toUri().toString() + "#"+ "platform.txt" + i;
					DistributedCache.addCacheFile(new URI(inPath2Link), job.getConfiguration());
				}
 			}

			// 加载 track.58.com
			FileInputFormat.addInputPath(job, new Path(baseInPath01 +"/" + runDate));
			//FileInputFormat.addInputPath(job, new Path(baseInPath01));

			String outPath = baseOutPath + "/" + runDate;
			FileSystem.get(conf).delete(new Path(outPath), true);
			FileOutputFormat.setOutputPath(job, new Path(outPath));
			exitCode = job.waitForCompletion(true) ? 0 : 1;
			System.out.println("--------------------------------------------END1");

		}
		System.exit(exitCode);

	}

}
