package com._58city.spark.app.mr;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import scala.Tuple2;

import com._58city.spark.app.DaoUtil;
import com._58city.spark.app.SparkUtil;
import com._58city.spark.app.dao.IMCInfoAddDto;
import com._58city.spark.app.dao.IMCInfoAddValue;
import com._58city.spark.app.ext.JsonUtil;
import com._58city.spark.app.ext.dto.BelongCate;
import com._58city.spark.app.ext.dto.ImcInfoSource;
import com._58city.spark.app.redis.Constant;
import com._58city.spark.app.redis.ImcInfoAddModel;
import com._58city.spark.app.redis.RedisUtil;
import com._58city.spark.app.redis.TransferDimensionUtil;
import com.google.common.base.Splitter;

/**
 * IMC 新增日志数据的Spark运算类
 * 
 * @author huangliang
 * 
 */
public class MrKafkaIMCInfoAdd implements MrKafkaInterface<String, IMCInfoAddValue> {

	private static final long serialVersionUID = 3633432580588998886L;

	private Logger log = LoggerFactory.getLogger(MrKafkaIMCInfoAdd.class);

	// map key
	private String[] keys;

	// 时间间隔
	private int batchInterval;

	// 归属类别 - 广播至集群
	private Broadcast<Map<Long, BelongCate>> bcBelongCateMap;

	// IMC来源字典表 - 广播至集群
	private Broadcast<Map<String, ImcInfoSource>> bcImcSourceMap;

	public MrKafkaIMCInfoAdd(String[] keys, int batchInterval) {
		super();
		this.keys = keys;
		this.batchInterval = batchInterval;
	}

	/**
	 * 将日志转化为Bean对象
	 * 
	 * @param content
	 * @return
	 */
	@SuppressWarnings({ "unchecked", "rawtypes" })
	private IMCInfoAddDto toDto(String content) {
		if (StringUtils.isNotBlank(content)) {
			IMCInfoAddDto dto = DaoUtil.imc_add(content);
			if (null == dto) {
				return null;
			}

			// set busiLine & belong_cate1 & belong_cate2
			Map<Long, BelongCate> belongCateMap = bcBelongCateMap.value();
			String[] cateIds = StringUtils.split(dto.getCateID(), '|');

			if (cateIds.length > 0) {
				// 获取最低一级的cateid
				long cateId = Long.parseLong(cateIds[cateIds.length - 1]);
				if (belongCateMap.containsKey(cateId)) {
					BelongCate dict = belongCateMap.get(cateId);
					dto.setBusiLine(dict.getBusiness_type());
					dto.setBelong_cate1(dict.getCate1_name());
					dto.setBelong_cate2(dict.getCate2_name());
				} else {
					dto.setBusiLine("other");
					dto.setBelong_cate1("other");
					dto.setBelong_cate2("other");
				}
			} else {
				dto.setBusiLine("other");
				dto.setBelong_cate1("other");
				dto.setBelong_cate2("other");
			}

			/**
			 * set platform sourceid 获取优先级如下 sourceid = 3|7 > para参数中I=10589 的值
			 * (需处理 - 参考getPostSourceId)> para参数中I=8992 (需处理 -
			 * 参考getPostSourceId) 的值 > source 再用sourceid去字典表匹配platform
			 */

			Map<String, ImcInfoSource> imcSourceMap = bcImcSourceMap.value();
			String source = dto.getSource();

			if (!"3".equals(source) && !"7".equals(source)) {
				try {
					List<?> paraList = JsonUtil.fromJson(dto.getPara(), List.class);
					if (null != paraList) {
						String source10589 = "";
						String source8992 = "";
						for (Object one : paraList) {
							if (null != one && one instanceof Map) {
								Map<String, String> map = (Map) one;
								String key = String.valueOf(map.get("I"));
								if (StringUtils.isBlank(key)) {
									continue;
								}
								if (key.indexOf(".") != -1) {
									key = key.substring(0, key.indexOf("."));
								}
								if ("10589".equals(key)) {
									source10589 = map.get("V").toString();
								}
								if ("8992".equals(key)) {
									source8992 = map.get("V").toString();
								}
							}
						}

						source = getPostSourceNew(source, source8992, source10589);

					}
				} catch (Exception e) {
					log.error("MrKafkaIMCInfoAdd get sourceId from para error ", e);
					return null;
				}
			}

			if (imcSourceMap.containsKey(source)) {
				dto.setPlatform(imcSourceMap.get(source).getPlatform());
			} else {
				dto.setPlatform("other");
			}

			// 计算今日累计发布总人数
			try {
				String userId = dto.getUserID();
				if (StringUtils.isBlank(userId)) {
					log.error("MrKafkaIMCInfoAdd get userid is null ");
				} else {
					SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
					String currentDate = format.format(new Date());
					String daySumKey = currentDate + Constant.INFO_CUST_SUM;
					RedisUtil.cache.saddString(daySumKey, userId);
					RedisUtil.cache.expire(daySumKey, Constant.redis_2d);
				}
			} catch (Exception e) {
				log.error("MrKafkaIMCInfoAdd deal day sum cus count error! ", e);
			}

			return dto;
		} else {
			return null;
		}

	}

	private static Splitter splitter = Splitter.on("\n");

	
	public List<JavaPairDStream<String, IMCInfoAddValue>> mapPair(List<JavaPairDStream<String, String>> kafkaStreams) {

		List<JavaPairDStream<String, IMCInfoAddValue>> list = new ArrayList<JavaPairDStream<String, IMCInfoAddValue>>();

		for (JavaPairDStream<String, String> stream : kafkaStreams) {

			JavaPairDStream<String, IMCInfoAddValue> mapStream = stream
					.flatMapToPair(new PairFlatMapFunction<Tuple2<String, String>, String, IMCInfoAddValue>() {
						private static final long serialVersionUID = -2181768485661965900L;

						
						public Iterable<Tuple2<String, IMCInfoAddValue>> call(Tuple2<String, String> event)
								throws Exception {

							List<Tuple2<String, IMCInfoAddValue>> result = new ArrayList<Tuple2<String, IMCInfoAddValue>>();

							// MAP内容

							String content = event._2; // Kafka拉取一条内容
														// Kafka存储的无KEY
														// 直接_2获取value
							for (String oneLine : splitter.split(content)) {
								try {
									IMCInfoAddDto dto = toDto(oneLine);
									if (null != dto) {
										String key = SparkUtil.getMapKeyNotTime(dto, keys); // 通过反射获取一个key
										if (null != key) {
											// 发布数
											int infoNum = 1;
											IMCInfoAddValue value = new IMCInfoAddValue(infoNum);
											// K,V结构用于存储到RDD
											result.add(new Tuple2<String, IMCInfoAddValue>(key, value));
											continue;
										} else {
											log.error(" MrKafkaIMCInfoAdd get key is null !");
										}
									} else {
										log.error(" MrKafkaIMCInfoAdd get dto is null !");
									}
								} catch (Exception e) {
									log.error("MrKafkaIMCInfoAdd map ERROR!", e);
								}
								result.add(new Tuple2<String, IMCInfoAddValue>(SparkUtil.ERROR, new IMCInfoAddValue()));
							}

							// 出错数据将key设为error返回

							return result;
						}

					});

			list.add(mapStream);
		}
		return list;
	}

	// /*
	// * (non-Javadoc)
	// *
	// * @see com._58city.spark.app.mr.MrKafkaInterface#mapPair(java.util.List)
	// */
	// public List<JavaPairDStream<String, IMCInfoAddValue>>
	// mapPair(List<JavaPairDStream<String, String>> kafkaStreams) {
	//
	// List<JavaPairDStream<String, IMCInfoAddValue>> list = new
	// ArrayList<JavaPairDStream<String, IMCInfoAddValue>>();
	//
	// for (JavaPairDStream<String, String> stream : kafkaStreams) {
	// JavaPairDStream<String, IMCInfoAddValue> mapStream = stream
	// .mapToPair(new PairFunction<Tuple2<String, String>, String,
	// IMCInfoAddValue>() {
	//
	// private static final long serialVersionUID = -2181768485661965909L;
	//
	// 
	// public Tuple2<String, IMCInfoAddValue> call(Tuple2<String, String> event)
	// throws Exception {
	//
	// // MAP内容
	// try {
	// String content = event._2; // Kafka拉取一条内容
	// // Kafka存储的无KEY
	// // 直接_2获取value
	// IMCInfoAddDto dto = toDto(content);
	// if (null != dto) {
	// String key = SparkUtil.getMapKeyNotTime(dto, keys); // 通过反射获取一个key
	// if (null != key) {
	// // 发布数
	// int infoNum = 1;
	// IMCInfoAddValue value = new IMCInfoAddValue(infoNum);
	// // K,V结构用于存储到RDD
	// return new Tuple2<String, IMCInfoAddValue>(key, value);
	// } else {
	// log.error(" MrKafkaIMCInfoAdd get key is null !");
	// }
	// } else {
	// log.error(" MrKafkaIMCInfoAdd get dto is null !");
	// }
	// } catch (Exception e) {
	// log.error("MrKafkaIMCInfoAdd map ERROR!", e);
	// }
	//
	// // 出错数据将key设为error返回
	// return new Tuple2<String, IMCInfoAddValue>(SparkUtil.ERROR, new
	// IMCInfoAddValue());
	// }
	// });
	// list.add(mapStream);
	// }
	// return list;
	// }

	/*
	 * (non-Javadoc)
	 * 
	 * @see
	 * com._58city.spark.app.mr.MrKafkaInterface#reducePair(org.apache.spark
	 * .streaming.api.java.JavaPairDStream)
	 */
	
	public JavaPairDStream<String, IMCInfoAddValue> reducePair(JavaPairDStream<String, IMCInfoAddValue> mapStream) {
		JavaPairDStream<String, IMCInfoAddValue> reducePair = mapStream
				.reduceByKey(new Function2<IMCInfoAddValue, IMCInfoAddValue, IMCInfoAddValue>() {

					private static final long serialVersionUID = 3099844879342864020L;

					/*
					 * reduce 将相同Key的V值，相加
					 */
					
					public IMCInfoAddValue call(IMCInfoAddValue v1, IMCInfoAddValue v2) throws Exception {
						try {
							long infoNum = v1.getInfoNum() + v2.getInfoNum();
							return new IMCInfoAddValue(infoNum);
						} catch (Exception e) {
							log.error("MrKafkaIMCInfoAdd reduce ERROR!", e);
						}
						return new IMCInfoAddValue();
					}
				});
		return reducePair;
	}

	/*
	 * (non-Javadoc)
	 * 
	 * @see
	 * com._58city.spark.app.mr.MrKafkaInterface#foreachRDD(org.apache.spark
	 * .streaming.api.java.JavaPairDStream)
	 */
	
	public void foreachRDD(JavaPairDStream<String, IMCInfoAddValue> reduceStream) {
		reduceStream.foreachRDD(new Function<JavaPairRDD<String, IMCInfoAddValue>, Void>() {
			private static final long serialVersionUID = 8402873490238594541L;

			
			public Void call(JavaPairRDD<String, IMCInfoAddValue> v) throws Exception {
				long now = new Date().getTime();
				final long current_time = SparkUtil.makeFormateDate(now, batchInterval);

				v.foreach(new VoidFunction<Tuple2<String, IMCInfoAddValue>>() {
					private static final long serialVersionUID = -4492986986364984593L;

					
					public void call(Tuple2<String, IMCInfoAddValue> tuple) throws Exception {
						// 结果处理
						try {
							if (null != tuple) {
								String key = tuple._1;
								IMCInfoAddValue value = tuple._2;
								long infoNum = value.getInfoNum();

								log.info("Imc info add ========== : key = " + key + " ; infoNum = " + infoNum);

								if (!SparkUtil.ERROR.equals(key)) {
									String[] key_values = key.split(",");

									ImcInfoAddModel iad = new ImcInfoAddModel();
									iad.setTime(current_time);
									TransferDimensionUtil.dimension(iad, keys, key_values[0], key_values[1],
											key_values[2], key_values[3]);

									iad.setInfoNum(infoNum);

									// 细分数据
									try {
										// log.info(JsonUtil.toJson(iad));
										RedisUtil.cache.lpushString(Constant.IMC_ADD_REDIS_QUEUE_KEY,
												JsonUtil.toJson(iad));

									} catch (Exception e) {
										log.error("imc_add detail redis insert error!", e);
									}

									// 日累加 校验总数
									try {
										SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
										String currentDate = format.format(new Date());

										String daySumKey = currentDate + "_daysum";
										long daySum = RedisUtil.cache.incrBy(daySumKey, infoNum);
										RedisUtil.cache.expire(daySumKey, Constant.redis_8d);
										log.info("Imc day sum is :" + daySum);
									} catch (Exception e) {
										log.error("imc_add infoNum day sum insert redis error!", e);
									}

								}
							} else {
								log.error("Imc info add  tuple  is null ");
							}
						} catch (Exception e) {
							log.error("MrKafkaIMCInfoAdd foreachRDD ERROR!", e);
						}
					}
				});
				return null;
			}
		});
	}

	public void setBcBelongCateMap(Broadcast<Map<Long, BelongCate>> bcBelongCateMap) {
		this.bcBelongCateMap = bcBelongCateMap;
	}

	public void setBcImcSourceMap(Broadcast<Map<String, ImcInfoSource>> bcImcSourceMap) {
		this.bcImcSourceMap = bcImcSourceMap;
	}

	/**
	 * 仁喜的代码 根据 source 8992 10589获取唯一的sourceid
	 * 
	 * @param source
	 * @param postSourceId8992
	 * @param postSourceId10589
	 * @return
	 */
	public String getPostSourceNew(String source, String postSourceId8992, String postSourceId10589) {

		// 硬编码，当source= 3 7就以source为准
		if ("3".equals(source) || "7".equals(source)) {
			return source;
		}

		// 10589最新一次postsourceid 兰根顺定的，新老postsourceid同时出现时，以新的为准
		if (StringUtils.isNotEmpty(postSourceId10589) && postSourceId10589.length() >= 2) {
			return getPostSourceId(postSourceId10589);
		}
		// 如果postSourceId8992不为null 且长度大于2，截取前两位数字显示
		else if (StringUtils.isNotEmpty(postSourceId8992) && postSourceId8992.length() >= 2) {
			return getPostSourceId(postSourceId8992);
		}

		// 其他默认source，如当单元参数都为null时
		return source;
	}

	/**
	 * 仁喜的代码 截取postsourceid前两位
	 * 
	 * @param postSourceId
	 * @return
	 */
	public String getPostSourceId(String postSourceId) {
		try {
			// 如果postsourceid不为null 且长度大于2，截取前两位数字显示
			if (StringUtils.isNotEmpty(postSourceId) && postSourceId.length() >= 2) {
				String res1 = postSourceId.substring(0, 1);// 第一个数字
				String res2 = postSourceId.substring(1, 2);// 第二个数字

				// 如果是0开头，只要第二个字符
				if ("0".equals(res1)) {
					return res2;
				} else {
					return res1 + res2;
				}
			}
			// 其他默认为异常
			return "-";
		} catch (Exception e) {
			log.error("MrKafkaIMCInfoAdd getPostSourceId error!", e);
			return "-";
		}

	}

}
