package com._58city.spark.app.mr;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import scala.Tuple2;

import com._58city.spark.app.DaoUtil;
import com._58city.spark.app.SparkUtil;
import com._58city.spark.app.dao.TrackDto;
import com._58city.spark.app.dao.TrackValue;
import com._58city.spark.app.ext.JsonUtil;
import com._58city.spark.app.ext.YURLDecode;
import com._58city.spark.app.ext.dto.DispCate;
import com._58city.spark.app.redis.Constant;
import com._58city.spark.app.redis.RedisUtil;
import com._58city.spark.app.redis.TrackModel;
import com._58city.spark.app.redis.TrackSearchWord;
import com._58city.spark.app.redis.TransferDimensionUtil;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;

	/**
	 * @author lihao
	 * 流量的Spark运算类
	 */
public class MrKafkaTrackAPP implements MrKafkaInterface<String, TrackValue>{

		private static final long serialVersionUID = 5276176426349314034L;
		private static Splitter splitter = Splitter.on("\n");
		private static String filterRule = "\"pagetype\":\"list\"&\"actiontype\":\"enter\";\"pagetype\":\"list\"&\"actiontype\":\"nextpage\";\"pagetype\":\"detail\"&\"actiontype\":\"show\";\"pagetype\":\"myjob\";\"pagetype\":\"preferred\";\"pagetype\":\"schoolrecruit\"";
		private Logger log = LoggerFactory.getLogger(MrKafkaTrackAPP.class);
		
	    private Broadcast<Map<Long, DispCate>> bc_cate_map;
		
		private String[] keys;
		
		private int batchInterval;
		
	    private int platform_type; //PC(-1), M(0), APP(2)
	    static{
	    	configure();
	    }
		public MrKafkaTrackAPP(String[] keys, int batchInterval, int platform_type) {
			super();
			this.keys = keys;
			this.batchInterval = batchInterval;
			this.platform_type = platform_type;
//			configure();
		}

		public void setBc_cate_map(Broadcast<Map<Long, DispCate>> bc_cate_map) {
			this.bc_cate_map = bc_cate_map;
		}
		
		/**
		 * 从日志中找到搜索词
		 * @param url
		 * @return
		 */
		private String get_search_word(String url){
			String  keyWordReStr = "(?:/key_|[&?]key=|/jh_)([^?^/^&]+)";
			Pattern keyWordRePatten = Pattern.compile(keyWordReStr) ;
			Matcher matc = keyWordRePatten.matcher(url);
			if (!matc.find()) {
				return "";
			}
			return YURLDecode.searchWordDecode(matc.group(1));
		}
		
		/**
		 * 将日志转化为一个流量的Bean对象
		 * @param content
		 * @return
		 */
		private TrackDto toDto(String content){
			if(StringUtils.isNotBlank(content)){
				TrackDto dto = null;
				dto = DaoUtil.track_app(content);
				if(null == dto) 
					return null;
				long cateid = dto.getLastCateId();
				Map<Long, DispCate> cate_map = bc_cate_map.value();
				DispCate dispCate = cate_map.get(cateid);
//				if(null == dispCate){
//					try {
//						dispCate = CacheUtil.getCateDict(cateid);
//					} catch (Exception e) {
//						log.info("Track toDto cate error");
//					}
//				}
				dto.setBusiLine(dispCate != null ? dispCate.getBusiness_type_biz() : "other");
				if(platform_type != 2) dto.setPlatform(platform_type);
				
				if(StringUtils.isNotBlank(dto.getUrl())){
					String searchWord = this.get_search_word(dto.getUrl());
					if(StringUtils.isNotBlank(searchWord)){
						TrackSearchWord tsw = new TrackSearchWord(dto.getPlatform(), dto.getBusiLine(),
								searchWord);
						final String json_searchWord = JsonUtil.toJson(tsw);
						try {
							Executors.newCachedThreadPool().submit(new Runnable() {
								public void run() {
									RedisUtil.cache.lpushString(Constant.SEARCH_WORD_TRACK, json_searchWord);
								}
							});
						} catch (Exception e) {
						}
					}
				}
				return dto;
			}else
				return null;
		} 


		/* (non-Javadoc)
		 * @see com._58city.spark.app.mr.MrKafkaInterface#mapPair(java.util.List)
		 */
		public List<JavaPairDStream<String, TrackValue>> mapPair(
				List<JavaPairDStream<String, String>> kafkaStreams) {
			List<JavaPairDStream<String, TrackValue>> list = 
					new ArrayList<JavaPairDStream<String,TrackValue>>();
			for(JavaPairDStream<String, String> stream : kafkaStreams){
				JavaPairDStream<String, TrackValue> mapStream = 
					stream.flatMapToPair(new PairFlatMapFunction<Tuple2<String,String>, String, TrackValue>() {
						private static final long serialVersionUID = 1L;

						public Iterable<Tuple2<String, TrackValue>> call(Tuple2<String, String> event) throws Exception {
							List<Tuple2<String, TrackValue>> flatedList= Lists.newArrayList();
							try {
								String content = event._2;
								for(String oneLine : splitter.split(content)){
									if(!shouldCompute(oneLine)){ //应该过滤掉
//										log.error(groupList.toString());
										continue;
									}
									TrackDto dto = toDto(oneLine);				
									if(null != dto){
										String key = SparkUtil.getMapKeyNotTime(dto, keys);
										if(null != key){
											long pv = 1;
											String cookie = dto.getCookie();
											Set<String> cookies = new HashSet<String>();
											cookies.add(cookie);
											String pageType = dto.getPageType();
											long listPv = 0;
											long vpPv = 0;
											if(StringUtils.isNotBlank(pageType)){
												if(pageType.contains(TrackDto.PAGE_TYPE_DETAIL)) vpPv = 1;
												else if(pageType.contains(TrackDto.PAGE_TYPE_LIST)) listPv = 1;
												else{
													log.warn("pagetype is:"+pageType);
												}
											}else{
												log.error("empty pagetype!");
											}
											TrackValue value = new TrackValue(pv, listPv, vpPv, cookies);
											flatedList.add(new Tuple2<String, TrackValue>(key, value));
										}
									}
								}
							} catch (Exception e) {
								log.error("Track map ERROR!", e);
							}
							return flatedList;
						}
				});
				
				list.add(mapStream);
			}
			return list;
		}

//		public List<JavaPairDStream<String, TrackValue>> mapPair(
//				List<JavaPairDStream<String, String>> kafkaStreams) {
//			List<JavaPairDStream<String, TrackValue>> list = 
//					new ArrayList<JavaPairDStream<String,TrackValue>>();
//			for(JavaPairDStream<String, String> stream : kafkaStreams){
//				JavaPairDStream<String, TrackValue> mapStream = stream.
//						mapToPair(new PairFunction<Tuple2<String,String>, String, TrackValue>() {
//							private static final long serialVersionUID = 3742490077169282334L;
//
//							public Tuple2<String, TrackValue> call(
//									Tuple2<String, String> event) throws Exception {
//								try {
//									String content = event._2;
//									log.info(content);
//									TrackDto dto = toDto(content.split("\n")[0]);
//									
//									if(null != dto){
//										String key = SparkUtil.getMapKeyNotTime(dto, keys);
//										if(null != key){
//											long pv = 1;
//											String cookie = dto.getCookie();
//											Set<String> cookies = new HashSet<String>();
//											cookies.add(cookie);
//											String pageType = dto.getPageType();
//											long listPv = 0;
//											long vpPv = 0;
//											if(StringUtils.isNotBlank(pageType)){
//												if(pageType.contains(TrackDto.PAGE_TYPE_DETAIL)) vpPv = 1;
//												else if(pageType.contains(TrackDto.PAGE_TYPE_LIST)) listPv = 1;
//											}
//											TrackValue value = new TrackValue(pv, listPv, vpPv, cookies);
//											return new Tuple2<String, TrackValue>(key, value);
//										}
//									}
//								} catch (Exception e) {
//									log.error("Track map ERROR!", e);
//								}
//								return new Tuple2<String, TrackValue>(SparkUtil.ERROR, new TrackValue());
//							}
//				});
//				list.add(mapStream);
//			}
//			return list;
//		}
		
		
		/* (non-Javadoc)
		 * @see com._58city.spark.app.mr.MrKafkaInterface#reducePair(org.apache.spark.streaming.api.java.JavaPairDStream)
		 */
		public JavaPairDStream<String, TrackValue> reducePair(
				JavaPairDStream<String, TrackValue> mapStream) {
			JavaPairDStream<String, TrackValue> reducePair = mapStream
					.reduceByKey(new Function2<TrackValue, TrackValue, TrackValue>() {
						private static final long serialVersionUID = 6264099295078802382L;

						public TrackValue call(TrackValue v1, TrackValue v2)
								throws Exception {
							try {
								long pv = v1.getPv() + v2.getPv();
								long listPv = v1.getListPv() + v2.getListPv();
								long vpPv = v1.getVpPv() + v2.getVpPv();
								Set<String> cookies = new HashSet<String>();
								cookies.addAll(v1.getCookies());
								cookies.addAll(v2.getCookies());
								return new TrackValue(pv, listPv, vpPv, cookies);
							} catch (Exception e) {
								log.error("Track reduce ERROR!",e);
							}
							return new TrackValue();
						}
					});
			return reducePair;
		}

		/* (non-Javadoc)
		 * @see com._58city.spark.app.mr.MrKafkaInterface#foreachRDD(org.apache.spark.streaming.api.java.JavaPairDStream)
		 */
//		public void foreachRDD(JavaPairDStream<String, TrackValue> reduceStream) {
//			reduceStream.foreachRDD(new Function<JavaPairRDD<String,TrackValue>, Void>() {
//				private static final long serialVersionUID = -8199089522425453338L;
	//
//				public Void call(JavaPairRDD<String, TrackValue> v)
//						throws Exception {
//					long now = new Date().getTime();
//				    final long current_time = SparkUtil.makeFormateDate(now, batchInterval);
//					
//					v.foreach(new VoidFunction<Tuple2<String,TrackValue>>() {
//						private static final long serialVersionUID = 5929818265457879114L;
	//
//						public void call(Tuple2<String, TrackValue> tuple) throws Exception {
//							try {
//								if(null != tuple){
//									String keyStr = tuple._1;
//									if(!SparkUtil.ERROR.equals(keyStr)){
//										TrackValue value = tuple._2;
//										String[] key_values = keyStr.split(",");
//										
//										long pv = value.getPv();
//										long listPv = value.getListPv();
//										long vpPv = value.getVpPv();
//										int uv = value.getCookies().size();	
//										log.info("Track ========== key : " + keyStr 
//													+ ",time : " + current_time
//													+ ",pv : " + pv + ",listPv : " + listPv
//													+ ",vpPv : " + vpPv
//													+ ",uv : " + uv);
//										TrackModel model = new TrackModel();
//										model.setTime(current_time);
//										TransferDimensionUtil.dimension(model, keys, Integer.parseInt(key_values[0]),
//												key_values[1]);
//										model.setPv(pv);
//										model.setListpv(listPv);
//										model.setVppv(vpPv);
//										model.setUv(uv);
//										try {
//											RedisUtil.cache.lpushString(Constant.TRACK_REDIS_QUEUE_KEY,
//													JsonUtil.toJson(model));
//										} catch (Exception e) {
//											log.error("track redis insert error!");
//										}
//										}
//									}
//							} catch (Exception e) {
//								log.error("Track foreachRDD ERROR!",e);
//							}
//						}
//					});
//					return null;
//				}
//			});
//			
//		}
		
		public void foreachRDD(JavaPairDStream<String, TrackValue> reduceStream) {
			reduceStream.foreachRDD(new Function<JavaPairRDD<String,TrackValue>, Void>() {
				private static final long serialVersionUID = -8199089522425453338L;

				public Void call(JavaPairRDD<String, TrackValue> v)
						throws Exception {
					long now = new Date().getTime();
				    final long current_time = SparkUtil.makeFormateDate(now, batchInterval);
					
					v.foreachPartition(new VoidFunction<Iterator<Tuple2<String,TrackValue>>>() {
						private static final long serialVersionUID = 3040955589442973044L;

						public void call(Iterator<Tuple2<String, TrackValue>> iter)
								throws Exception {
							if(null != iter){
								final List<TrackModel> list = new ArrayList<TrackModel>();
							    while(iter.hasNext()){
									Tuple2<String, TrackValue> tuple = iter.next();
									String keyStr = tuple._1;
									if(!SparkUtil.ERROR.equals(keyStr)){
										TrackValue value = tuple._2;
										String[] key_values = keyStr.split(",");
										
										long pv = value.getPv();
										long listPv = value.getListPv();
										long vpPv = value.getVpPv();
										int uv = value.getCookies().size();	
										log.info("Track ========== key : " + keyStr 
													+ ",time : " + current_time
													+ ",pv : " + pv + ",listPv : " + listPv
													+ ",vpPv : " + vpPv
													+ ",uv : " + uv);
										TrackModel model = new TrackModel();
										model.setTime(current_time);
										TransferDimensionUtil.dimension(model, keys, Integer.parseInt(key_values[0]),
												key_values[1]);
										model.setPv(pv);
										model.setListpv(listPv);
										model.setVppv(vpPv);
										model.setUv(uv);
										list.add(model);
									}
								}
							    
							    if(list.size() > 0){
							    	Executors.newCachedThreadPool().submit(new Runnable() {
										public void run() {
	                                        RedisUtil.send(Constant.TRACK_REDIS_QUEUE_KEY, list);										
										}
									});
							    }
							}
						}
						
					});
					return null;
				}
			});
			
		}

		private static List<Set<String>> groupList;
		public  static void configure() {
			groupList = Lists.newArrayList();
			String[] groups = filterRule.split(";");
			for (String group : groups) {
				String[] items = group.split("&");
				Set<String> set = Sets.newHashSet(items);
				groupList.add(set);
			}
		}

		public static boolean shouldCompute(String line){
				for (Set<String> group : groupList) {
					if (contains(line, group))
						return true;
				}
			return false;
		}
		
		private static boolean contains(String body, Set<String> group) {
			for (String item : group) {
				if (!body.contains(item))
					return false;
			}
			return true;
		}		
	}

