package com._58city.spark.app.mr;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.NumberUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import scala.Tuple2;

import com._58city.spark.app.SparkUtil;
import com._58city.spark.app.dao.ResumeAddDto;
import com._58city.spark.app.ext.JsonUtil;
import com._58city.spark.app.ext.dto.BelongCate;
import com._58city.spark.app.ext.dto.DispCity;
import com._58city.spark.app.ext.dto.ResumeSource;
import com._58city.spark.app.redis.Constant;
import com._58city.spark.app.redis.RedisUtil;
import com._58city.spark.app.redis.TransferDimensionUtil;

public class MrKafkaResumeAdd implements MrKafkaInterface<String, Integer> {

	/**
	 * 
	 */
	private static final long serialVersionUID = 6512221650190338764L;

	private Logger log = LoggerFactory.getLogger(MrKafkaResumeAdd.class);

	// map key
	private String[] keys;

	// 时间间隔
	private int batchInterval;

	// 归属类别 - 广播至集群
	private Broadcast<Map<Long, BelongCate>> bcBelongCateMap;

	private Broadcast<Map<Long, DispCity>> bc_city_map;

	private Broadcast<Map<Integer, ResumeSource>> bc_resume_map;

	public void setBc_resume_map(Broadcast<Map<Integer, ResumeSource>> bc_resume_map) {
		this.bc_resume_map = bc_resume_map;
	}

	public void setBc_city_map(Broadcast<Map<Long, DispCity>> bc_city_map) {
		this.bc_city_map = bc_city_map;
	}

	public void setBcBelongCateMap(Broadcast<Map<Long, BelongCate>> bcBelongCateMap) {
		this.bcBelongCateMap = bcBelongCateMap;
	}

	/**
	 * 
	 * @param keys计算map的key
	 *            -然并卵
	 * @param batchInterval
	 *            --计算批次时间戳
	 */
	public MrKafkaResumeAdd(String[] keys, int batchInterval) {
		super();
		this.keys = keys;
		this.batchInterval = batchInterval;
	}

	public List<JavaPairDStream<String, Integer>> mapPair(List<JavaPairDStream<String, String>> kafkaStreams) {

		List<JavaPairDStream<String, Integer>> list = new ArrayList<JavaPairDStream<String, Integer>>();

		for (JavaPairDStream<String, String> stream : kafkaStreams) {
			JavaPairDStream<String, Integer> mapStream = stream
					.mapToPair(new PairFunction<Tuple2<String, String>, String, Integer>() {

						/**
						 * 
						 */
						private static final long serialVersionUID = 5856892611643133352L;

						public Tuple2<String, Integer> call(Tuple2<String, String> event) throws Exception {

							// MAP内容
							try {
								String content = event._2; // Kafka拉取一条内容 ，Kafka存储的无KEY，直接_2获取value
								ResumeAddDto dto = toDto(content); //
								if (null != dto) {
									String key = SparkUtil.getMapKeyNotTime(dto, keys); // 通过反射获取一个key
									if (null != key) {
										// K,V结构用于存储到RDD
										return new Tuple2<String, Integer>(key, 1);
									} else {
										log.info("key is null !");
									}
								} else {
									log.info("dto is null !");
								}
							} catch (Exception e) {
								log.info(" map ERROR!", e);
							}

							// 出错数据将key设为error返回
							return new Tuple2<String, Integer>(SparkUtil.ERROR, 1);
						}

					});
			list.add(mapStream);
		}
		return list;
	}

	/**
	 * foreachRDD时加上信息
	 * 
	 * @return
	 */
	public ResumeAddDto addInfo(ResumeAddDto o) {
		if (o == null) {
			return null;
		}

		Map<Long, BelongCate> belongCateMap = bcBelongCateMap.value();

		long cateId = Long.parseLong(o.getTargetCateID());
		if (belongCateMap.containsKey(cateId)) {
			BelongCate dict = belongCateMap.get(cateId);
			o.setTargetCateName1(dict.getCate1_name());
			o.setTargetCateName2(dict.getCate2_name());
		} else {
			o.setTargetCateName1("other");
			o.setTargetCateName2("other");
		}

		Map<Long, DispCity> cityMap = bc_city_map.value();
		long cityid = NumberUtils.toLong(o.getTargetAreaID(), 0L);
		DispCity city = cityMap.get(cityid);
		if (city != null) {
			o.setTargetAreaName(city.getCity1_name());
		} else {
			o.setTargetAreaName("other");
		}

		return o;
	}

	
	protected ResumeAddDto toDto(String content) {
		if (StringUtils.isBlank(content)) {
			return null;
		}
		//消息本来就是json字符串
		ResumeAddDto o = JsonUtil.fromJson(content, ResumeAddDto.class);
		o.setTargetCateID(o.getTargetCateID().split(",")[0]);
		o.setTargetAreaID(o.getTargetAreaID().split(",")[0]);
		ResumeSource sou = bc_resume_map.value().get(o.getSource());
		if (sou != null) {
			o.setPlatform(sou.getPlatform());
		} else {
			o.setPlatform("other");
		}

		try {
			String userId = o.getUserid();
			if (StringUtils.isBlank(userId)) {
				log.info(" userid is null ");
			} else {
				SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
				String currentDate = format.format(new Date());
				String daySumKey = currentDate + Constant.INFO_CUST_SUM;
				RedisUtil.cache.saddString(daySumKey, userId);
				RedisUtil.cache.expire(daySumKey, Constant.redis_2d);
			}
		} catch (Exception e) {
			log.error(" deal day sum cus count error! ", e);
		}
		return o;
	}

	public JavaPairDStream<String, Integer> reducePair(JavaPairDStream<String, Integer> mapStream) {
		JavaPairDStream<String, Integer> reducePair = mapStream.reduceByKey(new Function2<Integer, Integer, Integer>() {

			/**
			 * 
			 */
			private static final long serialVersionUID = 8154588228999340664L;

			/*
			 * reduce 将相同Key的V值，相加
			 */
			public Integer call(Integer v1, Integer v2) throws Exception {
				try {
					return v1 + v2;
				} catch (Exception e) {
					log.info(" reduce ERROR!", e);
				}
				return 0;
			}
		});
		return reducePair;
	}

	public void foreachRDD(JavaPairDStream<String, Integer> reduceStream) {
		reduceStream.foreachRDD(new Function<JavaPairRDD<String, Integer>, Void>() {

			/**
			 * 
			 */
			private static final long serialVersionUID = 5992085027467804110L;

			public Void call(JavaPairRDD<String, Integer> v) throws Exception {
				long now = new Date().getTime();
				final long current_time = SparkUtil.makeFormateDate(now, batchInterval);

				v.foreach(new VoidFunction<Tuple2<String, Integer>>() {

					/**
					 * 
					 */
					private static final long serialVersionUID = -8652411630252507466L;

					public void call(Tuple2<String, Integer> tuple) throws Exception {
						// 结果处理
						try {
							if (null != tuple) {
								String key = tuple._1;
								Integer value = tuple._2;

								log.info("resume add ========== : key = " + key + " ; infoNum = " + value);

								if (!SparkUtil.ERROR.equals(key)) {
									String[] key_values = key.split(",");

									ResumeAddDto iad = new ResumeAddDto();
									// iad.setTargetCateID(key_values[1]);
									// iad.setTargetAreaID(key_values[0]);
									// iad.setPlatform(key_values[2]);
									iad.setTime(current_time);
									TransferDimensionUtil.dimension(iad, keys, key_values[0], key_values[1],
											key_values[2]);

									iad.setNum(value);
									addInfo(iad);

									// 细分数据
									try {
										RedisUtil.cache.lpushString(Constant.RESUME_ADD_REDIS_QUEUE_KEY,
												JsonUtil.toJson(iad));
										log.info(JsonUtil.toJson(iad));

									} catch (Exception e) {
										log.info(" detail redis insert error!", e);
									}

								}
							} else {
								log.info("resume add  tuple  is null ");
							}
						} catch (Exception e) {
							log.info(" foreachRDD ERROR!", e);
						}
					}
				});
				return null;
			}
		});
	}

}
