package com._58city.spark.app.mr;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import scala.Tuple2;

import com._58city.spark.app.DaoUtil;
import com._58city.spark.app.SparkUtil;
import com._58city.spark.app.dao.TrackDto;
import com._58city.spark.app.dao.TrackValue;
import com._58city.spark.app.ext.JsonUtil;
import com._58city.spark.app.ext.YURLDecode;
import com._58city.spark.app.ext.dto.DispCate;
import com._58city.spark.app.redis.Constant;
import com._58city.spark.app.redis.RedisUtil;
import com._58city.spark.app.redis.TrackModel;
import com._58city.spark.app.redis.TrackSearchWord;
import com._58city.spark.app.redis.TransferDimensionUtil;
import com._58city.spark.consumer.kafka.MessageAndMetadata;

public class MrKafkaTrack2 implements MrKafkaInterface2<String, TrackValue>{
    
	private static final long serialVersionUID = 7435731666723915534L;

	private Logger log = LoggerFactory.getLogger(MrKafkaTrack2.class);
	
    private Broadcast<Map<Long, DispCate>> bc_cate_map;
	
	private String[] keys;
	
	private int batchInterval;
	
    private int platform_type; //PC(-1), M(0), APP(2)
	
	public MrKafkaTrack2(String[] keys, int batchInterval, int platform_type) {
		super();
		this.keys = keys;
		this.batchInterval = batchInterval;
		this.platform_type = platform_type;
	}

	public void setBc_cate_map(Broadcast<Map<Long, DispCate>> bc_cate_map) {
		this.bc_cate_map = bc_cate_map;
	}

	/**
	 * 从日志中找到搜索词
	 * @param url
	 * @return
	 */
	private String get_search_word(String url){
		String  keyWordReStr = "(?:/key_|[&?]key=|/jh_)([^?^/^&]+)";
		Pattern keyWordRePatten = Pattern.compile(keyWordReStr) ;
		Matcher matc = keyWordRePatten.matcher(url);
		if (!matc.find()) {
			return "";
		}
		return YURLDecode.searchWordDecode(matc.group(1));
	}
	
	/**
	 * 将日志转化为一个流量的Bean对象
	 * @param content
	 * @return
	 */
	private TrackDto toDto(MessageAndMetadata event){
		if(null != event && null != event.getPayload()){
			String content = new String(event.getPayload());
			if(StringUtils.isNotBlank(content)){
				TrackDto dto = null;
				if(platform_type == 2){
					dto = DaoUtil.track_app(content);
				}else{
					dto = DaoUtil.track(content);
				}
				if(null == dto) return null;
				long cateid = dto.getLastCateId();
				Map<Long, DispCate> cate_map = bc_cate_map.value();
				DispCate dispCate = cate_map.get(cateid);
//				if(null == dispCate){
//					try {
//						dispCate = CacheUtil.getCateDict(cateid);
//					} catch (Exception e) {
//						log.info("Track toDto cate error");
//					}
//				}
				dto.setBusiLine(dispCate != null ? dispCate.getBusiness_type_biz() : "other");
				if(platform_type != 2) dto.setPlatform(platform_type);
				
				if(StringUtils.isNotBlank(dto.getUrl())){
					String searchWord = this.get_search_word(dto.getUrl());
					if(StringUtils.isNotBlank(searchWord)){
						TrackSearchWord tsw = new TrackSearchWord(dto.getPlatform(), dto.getBusiLine(),
								searchWord);
						final String json_searchWord = JsonUtil.toJson(tsw);
						try {
							Executors.newCachedThreadPool().submit(new Runnable() {
								public void run() {
									RedisUtil.cache.lpushString(Constant.SEARCH_WORD_TRACK, json_searchWord);
								}
							});
						} catch (Exception e) {
						}
					}
				}
				return dto;
			}
		}
		return null;
	} 
	
	public List<JavaPairDStream<String, TrackValue>> mapPair(
			List<JavaDStream<MessageAndMetadata>> kafkaStreams) {
		List<JavaPairDStream<String, TrackValue>> list = 
				new ArrayList<JavaPairDStream<String,TrackValue>>();
		for(JavaDStream<MessageAndMetadata> stream : kafkaStreams){
			JavaPairDStream<String, TrackValue> mapStream = stream.
					mapToPair(new PairFunction<MessageAndMetadata, String, TrackValue>() {
						private static final long serialVersionUID = 7177440903958488351L;

						public Tuple2<String, TrackValue> call(
								MessageAndMetadata event) throws Exception {
							try {
								TrackDto dto = toDto(event);
								if(null != dto){
									String key = SparkUtil.getMapKeyNotTime(dto, keys);
									if(null != key){
										long pv = 1;
										String cookie = dto.getCookie();
										Set<String> cookies = new HashSet<String>();
										cookies.add(cookie);
										String pageType = dto.getPageType();
										long listPv = 0;
										long vpPv = 0;
										if(StringUtils.isNotBlank(pageType)){
											if(pageType.contains(TrackDto.PAGE_TYPE_DETAIL)) vpPv = 1;
											else if(pageType.contains(TrackDto.PAGE_TYPE_LIST)) listPv = 1;
										}
										TrackValue value = new TrackValue(pv, listPv, vpPv, cookies);
										return new Tuple2<String, TrackValue>(key, value);
									}
								}
							} catch (Exception e) {
								log.error("Track map ERROR!", e);
							}
							return new Tuple2<String, TrackValue>(SparkUtil.ERROR, new TrackValue());
						}
			});
			list.add(mapStream);
		}
		return list;
	}

	public JavaPairDStream<String, TrackValue> reducePair(
			JavaPairDStream<String, TrackValue> mapStream) {
		JavaPairDStream<String, TrackValue> reducePair = mapStream
				.reduceByKey(new Function2<TrackValue, TrackValue, TrackValue>() {
					private static final long serialVersionUID = -8890383475914996712L;

					public TrackValue call(TrackValue v1, TrackValue v2)
							throws Exception {
						try {
							long pv = v1.getPv() + v2.getPv();
							long listPv = v1.getListPv() + v2.getListPv();
							long vpPv = v1.getVpPv() + v2.getVpPv();
							Set<String> cookies = new HashSet<String>();
							cookies.addAll(v1.getCookies());
							cookies.addAll(v2.getCookies());
							return new TrackValue(pv, listPv, vpPv, cookies);
						} catch (Exception e) {
							log.error("Track reduce ERROR!",e);
						}
						return new TrackValue();
					}
				});
		return reducePair;
	}

//	public void foreachRDD(JavaPairDStream<String, TrackValue> reduceStream) {
//		reduceStream.foreachRDD(new Function<JavaPairRDD<String,TrackValue>, Void>() {
//			private static final long serialVersionUID = 2960491317310220185L;
//
//			public Void call(JavaPairRDD<String, TrackValue> v)
//					throws Exception {
//				long now = new Date().getTime();
//			    final long current_time = SparkUtil.makeFormateDate(now, batchInterval);
//				
//				v.foreach(new VoidFunction<Tuple2<String,TrackValue>>() {
//					private static final long serialVersionUID = 2783708008520734379L;
//
//					public void call(Tuple2<String, TrackValue> tuple) throws Exception {
//						try {
//							if(null != tuple){
//								String keyStr = tuple._1;
//								if(!SparkUtil.ERROR.equals(keyStr)){
//									TrackValue value = tuple._2;
//									String[] key_values = keyStr.split(",");
//									
//									long pv = value.getPv();
//									long listPv = value.getListPv();
//									long vpPv = value.getVpPv();
//									int uv = value.getCookies().size();	
//									log.info("Track ========== key : " + keyStr 
//												+ ",time : " + current_time
//												+ ",pv : " + pv + ",listPv : " + listPv
//												+ ",vpPv : " + vpPv
//												+ ",uv : " + uv);
//									TrackModel model = new TrackModel();
//									model.setTime(current_time);
//									TransferDimensionUtil.dimension(model, keys, Integer.parseInt(key_values[0]),
//											key_values[1]);
//									model.setPv(pv);
//									model.setListpv(listPv);
//									model.setVppv(vpPv);
//									model.setUv(uv);
////									try {
////										RedisUtil.cache.lpushString(Constant.TRACK_REDIS_QUEUE_KEY,
////												JsonUtil.toJson(model));
////									} catch (Exception e) {
////										log.error("track redis insert error!");
////									}
//									}
//								}
//						} catch (Exception e) {
//							log.error("Track foreachRDD ERROR!",e);
//						}
//					}
//				});
//				return null;
//			}
//		});
//		
//	}
	
	public void foreachRDD(JavaPairDStream<String, TrackValue> reduceStream) {
		reduceStream.foreachRDD(new Function<JavaPairRDD<String,TrackValue>, Void>() {
			private static final long serialVersionUID = -8199089522425453338L;

			public Void call(JavaPairRDD<String, TrackValue> v)
					throws Exception {
				long now = new Date().getTime();
			    final long current_time = SparkUtil.makeFormateDate(now, batchInterval);
				
				v.foreachPartition(new VoidFunction<Iterator<Tuple2<String,TrackValue>>>() {
					private static final long serialVersionUID = 3040955589442973044L;

					public void call(Iterator<Tuple2<String, TrackValue>> iter)
							throws Exception {
						if(null != iter){
							final List<TrackModel> list = new ArrayList<TrackModel>();
						    while(iter.hasNext()){
								Tuple2<String, TrackValue> tuple = iter.next();
								String keyStr = tuple._1;
								if(!SparkUtil.ERROR.equals(keyStr)){
									TrackValue value = tuple._2;
									String[] key_values = keyStr.split(",");
									
									long pv = value.getPv();
									long listPv = value.getListPv();
									long vpPv = value.getVpPv();
									int uv = value.getCookies().size();	
									log.info("Track ========== key : " + keyStr 
												+ ",time : " + current_time
												+ ",pv : " + pv + ",listPv : " + listPv
												+ ",vpPv : " + vpPv
												+ ",uv : " + uv);
									TrackModel model = new TrackModel();
									model.setTime(current_time);
									TransferDimensionUtil.dimension(model, keys, Integer.parseInt(key_values[0]),
											key_values[1]);
									model.setPv(pv);
									model.setListpv(listPv);
									model.setVppv(vpPv);
									model.setUv(uv);
									list.add(model);
								}
							}
						    
						    if(list.size() > 0){
						    	Executors.newCachedThreadPool().submit(new Runnable() {
									public void run() {
                                        RedisUtil.send(Constant.TRACK_REDIS_QUEUE_KEY, list);										
									}
								});
						    }
						}
					}
					
				});
				return null;
			}
		});
		
	}

}
