package com._58city.spark.app.mr;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import scala.Tuple2;

import com._58city.spark.app.DaoUtil;
import com._58city.spark.app.SparkUtil;
import com._58city.spark.app.dao.TrackDto;
import com._58city.spark.app.dao.TrackValueTemp;
import com._58city.spark.app.ext.dto.DispCate;
import com._58city.spark.app.redis.Constant;
import com._58city.spark.app.redis.RedisUtil;
import com._58city.spark.app.redis.TrackModelTemp;
import com._58city.spark.app.redis.TransferDimensionUtil;
import com.google.common.collect.Sets;

public class MrKafkaTrack4Compare implements MrKafkaInterface<String, TrackValueTemp> {

	/**
	 * 
	 */
	private static final long serialVersionUID = 7724385507060552900L;

	private Logger log = LoggerFactory.getLogger(MrKafkaTrack4Compare.class);

	private Broadcast<Map<Long, DispCate>> bc_cate_map;

	private Broadcast<Set<String>> bc_city_set;;

	public void setBc_city_set(Broadcast<Set<String>> bc_city_set) {
		this.bc_city_set = bc_city_set;
	}

	private String[] keys;

	private int batchInterval;

	private int platform_type; // PC(-1), M(0), APP(2)
	
	private HashSet<String> tagSet = Sets.newHashSet();

	public MrKafkaTrack4Compare(String[] keys, int batchInterval, int platform_type) {
		super();
		this.keys = keys;
		this.batchInterval = batchInterval;
		this.platform_type = platform_type;
		tagSet.add("o");
		tagSet.add("n");
	}

	public void setBc_cate_map(Broadcast<Map<Long, DispCate>> bc_cate_map) {
		this.bc_cate_map = bc_cate_map;
	}

	// /**
	// * 从日志中找到搜索词
	// *
	// * @param url
	// * @return
	// */
	// private String get_search_word(String url) {
	// String keyWordReStr = "(?:/key_|[&?]key=|/jh_)([^?^/^&]+)";
	// Pattern keyWordRePatten = Pattern.compile(keyWordReStr);
	// Matcher matc = keyWordRePatten.matcher(url);
	// if (!matc.find()) {
	// return "";
	// }
	// return YURLDecode.searchWordDecode(matc.group(1));
	// }

	/**
	 * 将日志转化为一个流量的Bean对象
	 * 
	 * @param content
	 * @return
	 */
	private TrackDto toDto(String content) {
		if (StringUtils.isNotBlank(content)) {
			TrackDto dto = null;
			if (platform_type == 2) {
				dto = DaoUtil.track_app(content);
			} else {
				dto = DaoUtil.track(content);
			}
			if (null == dto)
				return null;
			long cateid = dto.getLastCateId();
			Map<Long, DispCate> cate_map = bc_cate_map.value();
			DispCate dispCate = cate_map.get(cateid);
			// if(null == dispCate){
			// try {
			// dispCate = CacheUtil.getCateDict(cateid);
			// } catch (Exception e) {
			// log.info("Track toDto cate error");
			// }
			// }
			dto.setBusiLine(dispCate != null ? dispCate.getBusiness_type_biz() : "other");
			if (platform_type != 2)
				dto.setPlatform(platform_type);

			// 判断url是否是监控的url---[city].58.com,.p.m.58.com,post.58.com,passport.58.com
			String cityFromURL = getCityFromUrl(dto.getUrl());
			Set<String> citySet = bc_city_set.value();
			if (StringUtils.isBlank(cityFromURL) || !citySet.contains(cityFromURL.toLowerCase())) {
				return null;
			}

			// 判断新旧机房
			Map<String, String> trackurl = dto.getTrackURL();
			String machineRoom = "default";
			if (trackurl!=null && trackurl.containsKey("tag_tmp")) {
				machineRoom = trackurl.get("tag_tmp");
				// 默认机房
				if (StringUtils.isBlank(machineRoom)){
					machineRoom = "o";
					log.warn("empty tag_tmp value:"+ "  url:"+ dto.getUrl() + "  trackURL:"+trackurl);
				}else if(!tagSet.contains(machineRoom)){
					log.error("error tag_tmp value:" + machineRoom + "  url:"+ dto.getUrl() + "  trackURL:"+trackurl);
				}
			}else{
				log.error("no trackURL or tag_tmp:"+content+ "  url is:"+ dto.getUrl());
			}
			dto.setMachineRoom(machineRoom);

			// if (StringUtils.isNotBlank(dto.getUrl())) {
			// String searchWord = this.get_search_word(dto.getUrl());
			// if (StringUtils.isNotBlank(searchWord)) {
			// TrackSearchWord tsw = new TrackSearchWord(dto.getPlatform(),
			// dto.getBusiLine(), searchWord);
			// final String json_searchWord = JsonUtil.toJson(tsw);
			// try {
			// Executors.newCachedThreadPool().submit(new Runnable() {
			// @Override
			// public void run() {
			// RedisUtil.cache.lpushString(Constant.SEARCH_WORD_TRACK,
			// json_searchWord);
			// }
			// });
			// } catch (Exception e) {
			// }
			// }
			// }
			return dto;
		} else
			return null;
	}

	private String getCityFromUrl(String url) {
		if (StringUtils.isBlank(url)) {
			return null;
		}
		return StringUtils.substringBetween(url, "http://", ".58.com");
	}

	/*
	 * (non-Javadoc)
	 * 
	 * @see com._58city.spark.app.mr.MrKafkaInterface#mapPair(java.util.List)
	 */
	public List<JavaPairDStream<String, TrackValueTemp>> mapPair(List<JavaPairDStream<String, String>> kafkaStreams) {
		List<JavaPairDStream<String, TrackValueTemp>> list = new ArrayList<JavaPairDStream<String, TrackValueTemp>>();
		for (JavaPairDStream<String, String> stream : kafkaStreams) {
			JavaPairDStream<String, TrackValueTemp> mapStream = stream
					.mapToPair(new PairFunction<Tuple2<String, String>, String, TrackValueTemp>() {
						private static final long serialVersionUID = 3742490077169282336L;

						public Tuple2<String, TrackValueTemp> call(Tuple2<String, String> event) throws Exception {
							try {
								String content = event._2;
								TrackDto dto = toDto(content);
								if (null != dto) {
									String key = SparkUtil.getMapKeyNotTime(dto, keys);
									if (null != key) {
										long pv = 1;
										// String cookie = dto.getCookie();
										// Set<String> cookies = new
										// HashSet<String>();
										// cookies.add(cookie);
										String pageType = dto.getPageType();
										long listPv = 0;
										long vpPv = 0;
										long postPv = 0;
										if (StringUtils.isNotBlank(pageType)) {
											if (pageType.contains(TrackDto.PAGE_TYPE_DETAIL))
												vpPv = 1;
											else if (pageType.contains(TrackDto.PAGE_TYPE_LIST))
												listPv = 1;
											else if (pageType.contains("post"))
												postPv = 1;
										}
										TrackValueTemp value = new TrackValueTemp(pv, listPv, vpPv, postPv);
										return new Tuple2<String, TrackValueTemp>(key, value);
									}
								}
							} catch (Exception e) {
								log.error("Track map ERROR!", e);
							}
							return new Tuple2<String, TrackValueTemp>(SparkUtil.ERROR, new TrackValueTemp());
						}
					});
			list.add(mapStream);
		}
		return list;
	}

	/*
	 * (non-Javadoc)
	 * 
	 * @see
	 * com._58city.spark.app.mr.MrKafkaInterface#reducePair(org.apache.spark
	 * .streaming.api.java.JavaPairDStream)
	 */
	public JavaPairDStream<String, TrackValueTemp> reducePair(JavaPairDStream<String, TrackValueTemp> mapStream) {
		JavaPairDStream<String, TrackValueTemp> reducePair = mapStream
				.reduceByKey(new Function2<TrackValueTemp, TrackValueTemp, TrackValueTemp>() {
					private static final long serialVersionUID = 6264099295078802382L;

					public TrackValueTemp call(TrackValueTemp v1, TrackValueTemp v2) throws Exception {
						try {
							long pv = v1.getPv() + v2.getPv();
							long listPv = v1.getListPv() + v2.getListPv();
							long vpPv = v1.getVpPv() + v2.getVpPv();
							long postPv = v1.getPostPv() + v2.getPostPv();
							// Set<String> cookies = new HashSet<String>();
							// cookies.addAll(v1.getCookies());
							// cookies.addAll(v2.getCookies());
							return new TrackValueTemp(pv, listPv, vpPv, postPv);
						} catch (Exception e) {
							log.error("Track reduce ERROR!", e);
						}
						return new TrackValueTemp();
					}
				});
		return reducePair;
	}

	/*
	 * (non-Javadoc)
	 * 
	 * @see
	 * com._58city.spark.app.mr.MrKafkaInterface#foreachRDD(org.apache.spark
	 * .streaming.api.java.JavaPairDStream)
	 */
	// public void foreachRDD(JavaPairDStream<String, TrackValue> reduceStream)
	// {
	// reduceStream.foreachRDD(new Function<JavaPairRDD<String,TrackValue>,
	// Void>() {
	// private static final long serialVersionUID = -8199089522425453338L;
	//
	// public Void call(JavaPairRDD<String, TrackValue> v)
	// throws Exception {
	// long now = new Date().getTime();
	// final long current_time = SparkUtil.makeFormateDate(now, batchInterval);
	//
	// v.foreach(new VoidFunction<Tuple2<String,TrackValue>>() {
	// private static final long serialVersionUID = 5929818265457879114L;
	//
	// public void call(Tuple2<String, TrackValue> tuple) throws Exception {
	// try {
	// if(null != tuple){
	// String keyStr = tuple._1;
	// if(!SparkUtil.ERROR.equals(keyStr)){
	// TrackValue value = tuple._2;
	// String[] key_values = keyStr.split(",");
	//
	// long pv = value.getPv();
	// long listPv = value.getListPv();
	// long vpPv = value.getVpPv();
	// int uv = value.getCookies().size();
	// log.info("Track ========== key : " + keyStr
	// + ",time : " + current_time
	// + ",pv : " + pv + ",listPv : " + listPv
	// + ",vpPv : " + vpPv
	// + ",uv : " + uv);
	// TrackModel model = new TrackModel();
	// model.setTime(current_time);
	// TransferDimensionUtil.dimension(model, keys,
	// Integer.parseInt(key_values[0]),
	// key_values[1]);
	// model.setPv(pv);
	// model.setListpv(listPv);
	// model.setVppv(vpPv);
	// model.setUv(uv);
	// try {
	// RedisUtil.cache.lpushString(Constant.TRACK_REDIS_QUEUE_KEY,
	// JsonUtil.toJson(model));
	// } catch (Exception e) {
	// log.error("track redis insert error!");
	// }
	// }
	// }
	// } catch (Exception e) {
	// log.error("Track foreachRDD ERROR!",e);
	// }
	// }
	// });
	// return null;
	// }
	// });
	//
	// }

	public void foreachRDD(JavaPairDStream<String, TrackValueTemp> reduceStream) {
		reduceStream.foreachRDD(new Function<JavaPairRDD<String, TrackValueTemp>, Void>() {
			private static final long serialVersionUID = -8199089522425453338L;

			public Void call(JavaPairRDD<String, TrackValueTemp> v) throws Exception {
				long now = new Date().getTime();
				final long current_time = SparkUtil.makeFormateDate(now, batchInterval);

				v.foreachPartition(new VoidFunction<Iterator<Tuple2<String, TrackValueTemp>>>() {
					private static final long serialVersionUID = 3040955589442973044L;

					public void call(Iterator<Tuple2<String, TrackValueTemp>> iter) throws Exception {
						if (null != iter) {
							final List<TrackModelTemp> list = new ArrayList<TrackModelTemp>();
							while (iter.hasNext()) {
								Tuple2<String, TrackValueTemp> tuple = iter.next();
								String keyStr = tuple._1;
								if (!SparkUtil.ERROR.equals(keyStr)) {
									TrackValueTemp value = tuple._2;
									String[] key_values = keyStr.split(",");

									long pv = value.getPv();
									long listPv = value.getListPv();
									long vpPv = value.getVpPv();
									long postPv = value.getPostPv();
									// int uv = value.getCookies().size();
									log.info("Track ========== key : " + keyStr + ",time : " + current_time + ",pv : "
											+ pv + ",listPv : " + listPv + ",vpPv : " + vpPv + ",postPv : " + postPv);
									TrackModelTemp model = new TrackModelTemp();
									model.setTime(current_time);
									TransferDimensionUtil.dimension(model, keys, Integer.parseInt(key_values[0]),
											key_values[1], key_values[2]);
									model.setPv(pv);
									model.setListpv(listPv);
									model.setVppv(vpPv);
									model.setPostPv(postPv);
									list.add(model);
								}
							}

							if (list.size() > 0) {
								Executors.newCachedThreadPool().submit(new Runnable() {
									public void run() {
										RedisUtil.send(Constant.TRACK_REDIS_QUEUE_KEY_TMP, list);
									}
								});
							}
						}
					}

				});
				return null;
			}
		});

	}
}
