package com._58city.spark.app.mr;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import scala.Tuple2;

import com._58city.spark.app.DaoUtil;
import com._58city.spark.app.SparkUtil;
import com._58city.spark.app.dao.CPCDisplayDto;
import com._58city.spark.app.dao.CpcDisplayValue;
import com._58city.spark.app.ext.JsonUtil;
import com._58city.spark.app.ext.dto.DispCate;
import com._58city.spark.app.redis.Constant;
import com._58city.spark.app.redis.CpcDisplayModel;
import com._58city.spark.app.redis.RedisUtil;
import com._58city.spark.app.redis.TransferDimensionUtil;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;

/**
 * @author lihao
 * CPC Display的Spark运算类
 */
public class MrKafkaCpcDisplay implements
		MrKafkaInterface<String, CpcDisplayValue> {

	private static final long serialVersionUID = 2622525258343948119L;
	private static Splitter splitter = Splitter.on("\n");
	private Logger log = LoggerFactory.getLogger(MrKafkaCpcDisplay.class);

	private Broadcast<Map<Long, DispCate>> bc_cate_map;

	private String[] keys;
	private final Set<String> excludeIPSet;

	private int batchInterval;

	public MrKafkaCpcDisplay(String[] keys, int batchInterval,String excludeIPString) {
		super();
		this.keys = keys;
		this.batchInterval = batchInterval;
		if(!StringUtils.isBlank(excludeIPString)){
			excludeIPSet = Sets.newHashSet(excludeIPString.split(","));
		}else{
			excludeIPSet = Sets.newHashSet();
		}
	}

	public void setBc_cate_map(Broadcast<Map<Long, DispCate>> bc_cate_map) {
		this.bc_cate_map = bc_cate_map;
	}


	/**
	 * 将日志转化为一个CPC Display的Bean对象
	 * @param content
	 * @return
	 */
	private CPCDisplayDto toDto(String content) {
		if (StringUtils.isNotBlank(content)) {
			CPCDisplayDto dto = DaoUtil.cpc_display(content);
			if (null == dto)
				return null;
			Map<Long, DispCate> cate_map = bc_cate_map.value();
			List<DispCate> cate_list = new ArrayList<DispCate>();
			for (String one : dto.getDisplayTypes()) {
				long cateid = Long.parseLong(one);
				DispCate dispCate = cate_map.get(cateid);
//				if (null == dispCate){
//					try {
//						dispCate = CacheUtil.getCateDict(cateid);
//					} catch (Exception e) {
//						log.info("CPC Display toDto cate error");
//					}
//				}
				if (null != dispCate)
					cate_list.add(dispCate);
			}
			dto.setBusiLine(cate_list.size() > 0 ? cate_list.get(0)
					.getBusiness_type_biz() : "other");
			return dto;
		} else
			return null;
	}


	/* (non-Javadoc)
	 * @see com._58city.spark.app.mr.MrKafkaInterface#mapPair(java.util.List)
	 */
	public List<JavaPairDStream<String, CpcDisplayValue>> mapPair(
			List<JavaPairDStream<String, String>> kafkaStreams) {
		List<JavaPairDStream<String, CpcDisplayValue>> list = new ArrayList<JavaPairDStream<String, CpcDisplayValue>>();
		for (JavaPairDStream<String, String> stream : kafkaStreams) {
			JavaPairDStream<String, CpcDisplayValue> mapStream = stream
					.flatMapToPair(new PairFlatMapFunction<Tuple2<String, String>, String, CpcDisplayValue>() {
						private static final long serialVersionUID = 6684450563218472963L;

						public Iterable<Tuple2<String, CpcDisplayValue>> call(
								Tuple2<String, String> event) throws Exception {
							List<Tuple2<String, CpcDisplayValue>> flatedList= Lists.newArrayList();
							try {
								String content = event._2; //Kafka拉取一个String字符串
								//平台那边将多条日志合成一条消息了，此处将日志按"\n"分割还原
								for(String oneLine : splitter.split(content)){
									CPCDisplayDto dto = toDto(oneLine); 
									if (null != dto && !excludeIPSet.contains(dto.getClientIp())) {
										String key = SparkUtil.getMapKeyNotTime(dto, keys); //通过反射获取一个key
										if (null != key) {
											long pv = 1;
											long adNum = dto.getAdNum();
											long adPv = 0;
											if (adNum > 0)
												adPv = 1;
											CpcDisplayValue value = new CpcDisplayValue( //统计需要的值
													pv, adPv, adNum);
											flatedList.add(new Tuple2<String, CpcDisplayValue>(key, value));
										}
									}
								}
							} catch (Exception e) {
								log.error("MrCpcDisplay map ERROR!", e);
							}
							return flatedList;
						}
					});
			list.add(mapStream);
		}
		return list;
	}


	/* (non-Javadoc)
	 * @see com._58city.spark.app.mr.MrKafkaInterface#reducePair(org.apache.spark.streaming.api.java.JavaPairDStream)
	 */
	public JavaPairDStream<String, CpcDisplayValue> reducePair(
			JavaPairDStream<String, CpcDisplayValue> mapStream) {
		JavaPairDStream<String, CpcDisplayValue> reducePair = mapStream
				.reduceByKey(new Function2<CpcDisplayValue, CpcDisplayValue, CpcDisplayValue>() {
					private static final long serialVersionUID = 8840128583661160560L;

					/*
					 * 将相同Key的V值，相加
					 */
					public CpcDisplayValue call(CpcDisplayValue v1,
							CpcDisplayValue v2) throws Exception {
						try {
							long pv = v1.getPv() + v2.getPv();
							long adNum = v1.getAdNum() + v2.getAdNum();
							long adPv = v1.getAdPv() + v2.getAdPv();
							return new CpcDisplayValue(pv, adPv, adNum);
						} catch (Exception e) {
							log.error("MrCpcDisplay reduce ERROR!", e);
						}
						return new CpcDisplayValue();
					}
				});
		return reducePair;
	}


	/* (non-Javadoc)
	 * @see com._58city.spark.app.mr.MrKafkaInterface#foreachRDD(org.apache.spark.streaming.api.java.JavaPairDStream)
	 */
	public void foreachRDD(JavaPairDStream<String, CpcDisplayValue> reduceStream) {
		reduceStream
				.foreachRDD(new Function<JavaPairRDD<String, CpcDisplayValue>, Void>() {
					private static final long serialVersionUID = -5051969198294837336L;

					public Void call(JavaPairRDD<String, CpcDisplayValue> v)
							throws Exception {
						long now = new Date().getTime();
						final long current_time = SparkUtil.makeFormateDate(
								now, batchInterval);

						v.foreach(new VoidFunction<Tuple2<String, CpcDisplayValue>>() {
							private static final long serialVersionUID = -1742941788650707936L;

							public void call(
									Tuple2<String, CpcDisplayValue> tuple)
									throws Exception {
								try {
									if (null != tuple) {
										String keyStr = tuple._1;
										if (!SparkUtil.ERROR.equals(keyStr)) {
											CpcDisplayValue value = tuple._2;
											String[] key_values = keyStr
													.split(",");

											long pv = value.getPv();
											long adNum = value.getAdNum();
											long adPv = value.getAdPv();

//											log.info("CPC Display ========== key : "
//													+ keyStr
//													+ ",time : "
//													+ current_time
//													+ ",pv : "
//													+ pv
//													+ ",adPv : "
//													+ adPv
//													+ ",adNum : " + adNum);

											CpcDisplayModel cdm = new CpcDisplayModel();
											cdm.setTime(current_time);
											TransferDimensionUtil.dimension(
													cdm,
													keys,
													Integer.parseInt(key_values[0]),
													key_values[1]);
											cdm.setPv(pv);
											cdm.setAdPv(adPv);
											cdm.setAdNum(adNum);
											try {
												RedisUtil.cache
														.lpushString(
																Constant.CPC_DISPLAY_REDIS_QUEUE_KEY,
																JsonUtil.toJson(cdm)); 
											} catch (Exception e) {
												log.error("display redis insert error!");
											}
										}
									}
								} catch (Exception e) {
									log.error("MrCpcDisplay foreachRDD ERROR!",
											e);
								}
							}
						});
						return null;
					}
				});
	}

}
