package com._58city.spark.app.mr;

import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import scala.Tuple2;

import com._58city.spark.app.DaoUtil;
import com._58city.spark.app.SparkUtil;
import com._58city.spark.app.dao.CPCClickDto;
import com._58city.spark.app.dao.CpcClickValue;
import com._58city.spark.app.dao.KeyInfo;
import com._58city.spark.app.ext.JsonUtil;
import com._58city.spark.app.ext.dto.DispCate;
import com._58city.spark.app.ext.dto.DispCity;
import com._58city.spark.app.redis.CPCClickDetails;
import com._58city.spark.app.redis.Constant;
import com._58city.spark.app.redis.CpcClickModel;
import com._58city.spark.app.redis.RedisUtil;
import com._58city.spark.app.redis.TransferDimensionUtil;
import com.google.common.base.Splitter;
import com.google.common.math.DoubleMath;

/**
 * @author lihao
 * CPC Click的Spark运算类
 */
public class MrKafkaCpcClick implements MrKafkaInterface<String, CpcClickValue>{

	private static final long serialVersionUID = -5103521594515138651L;
	private static Splitter splitter = Splitter.on("\n");
	private static final ExecutorService threadPool = Executors.newCachedThreadPool();
	
	public static final String type_new = "new";
	
	private Broadcast<Map<Long, DispCity>> bc_city_map;
	private Broadcast<Set<Long>> bc_direct_sales_city;
	private Broadcast<Map<Long, DispCate>> bc_cate_map;
	
    private Logger log = LoggerFactory.getLogger(MrKafkaCpcClick.class);
//    private Logger detailLog = LoggerFactory.getLogger("DetailLog");
	
	private List<KeyInfo> keyInfos = new ArrayList<KeyInfo>(); //要统计的多维度列的key
	
	private int batchInterval; //时间窗口间隔
	
	public MrKafkaCpcClick(List<KeyInfo> keyInfos, int batchInterval) {
		super();
		this.keyInfos = keyInfos;
		this.batchInterval = batchInterval;
	}
	
	public void setBc_city_map(Broadcast<Map<Long, DispCity>> bc_city_map) {
		this.bc_city_map = bc_city_map;
	}

	public void setBc_cate_map(Broadcast<Map<Long, DispCate>> bc_cate_map) {
		this.bc_cate_map = bc_cate_map;
	}

	public void setBc_direct_sales_city(Broadcast<Set<Long>> bc_direct_sales_city) {
		this.bc_direct_sales_city = bc_direct_sales_city;
	}

	/**
	 * 将日志转化为一个CPC Click的Bean对象
	 * @param content
	 * @return
	 */
	private CPCClickDto toDto(String content){
		if(StringUtils.isNotBlank(content)){
			//转对象
			CPCClickDto dto = DaoUtil.cpc_click(content);
			if(null == dto) return null;
			//匹配城市类别
			Map<Long, DispCate> cate_map = bc_cate_map.value();
			//可能有多级类别
			List<DispCate> cate_list = new ArrayList<DispCate>();
			for(String one : dto.getDisplayTypes()){
                long cateid = Long.parseLong(one);
                DispCate dispCate = cate_map.get(cateid);
                if(null != dispCate) cate_list.add(dispCate);
			}
			//业务线
			dto.setBusiLine(cate_list.size() > 0 ? cate_list.get(0).getBusiness_type_biz() : "other");
			
			if(cate_list.size() == 0){
				dto.setCate2Id("0");
				dto.setCate2Name("null");
			}else{
				//多个类别拼接起来
				StringBuffer sb_cate2Id = new StringBuffer();
				StringBuffer sb_cate2Name = new StringBuffer();
				for(DispCate one : cate_list){
					sb_cate2Id.append(one.getCate2_id()).append(CPCClickDto.separator);
					sb_cate2Name.append(one.getCate2_name()).append(CPCClickDto.separator);
				}
				String cate2Id = sb_cate2Id.toString();
				String cate2Name = sb_cate2Name.toString();
				dto.setCate2Id(cate2Id.substring(0,cate2Id.length()-1));
				dto.setCate2Name(cate2Name.substring(0,cate2Name.length()-1));
			}
			
			long city_id = Long.valueOf(dto.getCityId());
			Map<Long, DispCity> city_map = bc_city_map.value();
			Set<Long> direct_sales_city = bc_direct_sales_city.value();
			DispCity dc = city_map.get(city_id);
			if(null != dc){
				dto.setCity1Id(dc.getCity1_id());
				dto.setCity1Name(dc.getCity1_name());
			}else{
				log.info("UNKNOW ======== city : " + city_id);
                dto.setCity1Id(-1);
                dto.setCity1Name("未知城市");
			}
			//是否是27个直销城市
			if(direct_sales_city.contains(dto.getCity1Id())){
				dto.setCityType(CPCClickDto.CITY_TYPE_DIRECT);
			}else
				dto.setCityType(CPCClickDto.CITY_TYPE_CHANNEL);
			
			//点击日志需要保存明细数据，用于滚动展现
			if(!dto.getCate2Id().equals("0") && dto.getCity1Id()!=-1){
				long cash = DoubleMath.roundToLong(Float.parseFloat(dto.getCash())*100,
			    		RoundingMode.HALF_UP);
			    final CPCClickDetails ccd = new CPCClickDetails();
				ccd.setCash(cash);
				ccd.setCate2Name(dto.getCate2Name());
				ccd.setCity1Name(dto.getCity1Name());
				ccd.setClickTime(Long.parseLong(dto.getClickTime()));
				ccd.setCost(Long.parseLong(dto.getCost()));
				ccd.setCustomerId(dto.getCustomerId());
				ccd.setPlatform(dto.getPlatform());
//				detailLog.info(ccd.toString());
				final String json_details = JsonUtil.toJson(ccd);
				try {
					//CPC地图模式下方的点击明细数据
					threadPool.submit(new Runnable() {
						public void run() {
							RedisUtil.cache.zaddString(Constant.DETAILS_CLICK, ccd.getClickTime(), 
									json_details);
						}
					});
					
				} catch (Exception e) {
				}
			}
			return dto;
		}else
			return null;
	} 

	/* (non-Javadoc)
	 * @see com._58city.spark.app.mr.MrKafkaInterface#mapPair(java.util.List)
	 */
	public List<JavaPairDStream<String, CpcClickValue>> mapPair(
			List<JavaPairDStream<String, String>> kafkaStreams) {
		List<JavaPairDStream<String, CpcClickValue>> list = 
				new ArrayList<JavaPairDStream<String,CpcClickValue>>();
		for(JavaPairDStream<String, String> stream : kafkaStreams){
			JavaPairDStream<String, CpcClickValue> mapStream = stream.
					flatMapToPair(new PairFlatMapFunction<Tuple2<String,String>, String, CpcClickValue>() {
				private static final long serialVersionUID = -2571127980126965939L;

				public Iterable<Tuple2<String, CpcClickValue>> call(Tuple2<String,String> event)
						throws Exception {
					List<Tuple2<String, CpcClickValue>> list = new ArrayList<Tuple2<String, CpcClickValue>>();
					try {
						String content = event._2;
						//平台那边将多条日志合成一条消息了，此处将日志按"\n"分割还原
						for(String oneLine : splitter.split(content)){
							CPCClickDto dto = toDto(oneLine);
							if(null != dto){
								String state = dto.getState1();
								if(CPCClickDto.success_stats.contains(state)){
									for(KeyInfo keyInfo : keyInfos){
										String key = SparkUtil.getMapKeyNotTime(dto, keyInfo);
										if(null != key){
											long cash = DoubleMath.roundToLong(Float.parseFloat(dto.getCash())*100,
										    		RoundingMode.HALF_UP);
											long pv = 1;
											long cost = Long.parseLong(dto.getCost());
											String customerId = dto.getCustomerId();
											Set<String> customers = new HashSet<String>();
											customers.add(customerId);
											CpcClickValue value = new CpcClickValue(pv, cash, cost, customers);
											list.add(new Tuple2<String, CpcClickValue>(key, value));
										}
									}
								}
							}
						}
					} catch (Exception e) {
						log.error("MrCpcClick map ERROR!", e);
					}
					return list;
				}
			});
			list.add(mapStream);
		}
		return list;
	}

	/* (non-Javadoc)
	 * @see com._58city.spark.app.mr.MrKafkaInterface#reducePair(org.apache.spark.streaming.api.java.JavaPairDStream)
	 */
	public JavaPairDStream<String, CpcClickValue> reducePair(
			JavaPairDStream<String, CpcClickValue> mapStream) {
		JavaPairDStream<String, CpcClickValue> reducePair = mapStream
				.reduceByKey(new Function2<CpcClickValue, CpcClickValue, CpcClickValue>() {
					private static final long serialVersionUID = -8736176470652700715L;

					public CpcClickValue call(CpcClickValue v1,
							CpcClickValue v2) throws Exception {
						try {
							long pv = v1.getPv() + v2.getPv();
							long cash = v1.getCash() + v2.getCash();
							long cost = v1.getCost() + v2.getCost();
							Set<String> customers = new HashSet<String>();
							customers.addAll(v1.getCustomers());
							customers.addAll(v2.getCustomers());
							return new CpcClickValue(pv, cash, cost, customers);
						} catch (Exception e) {
							log.error("MrCpcClick reduce ERROR!",e);
						}
						return new CpcClickValue();
					}
				});
		return reducePair;
	}
	
	/**
	 * 通过key的前缀，找到这个维度的信息
	 * @param keyName
	 * @return
	 */
	private String[] keys(String keyName){
		String[] keys = null;
		for(KeyInfo one : keyInfos){
			if(one.getKey_name().equals(keyName)){
				keys = one.getKey_values();
				break;
			}
		}
		return keys;
	}

	/* (non-Javadoc)
	 * @see com._58city.spark.app.mr.MrKafkaInterface#foreachRDD(org.apache.spark.streaming.api.java.JavaPairDStream)
	 */
//	public void foreachRDD2(JavaPairDStream<String, CpcClickValue> reduceStream) {
//		reduceStream.foreachRDD(new Function<JavaPairRDD<String,CpcClickValue>, Void>() {
//			private static final long serialVersionUID = -5152845570104684454L;
//
//			public Void call(JavaPairRDD<String, CpcClickValue> v)
//					throws Exception {
//				long now = new Date().getTime();
//			    final long current_time = SparkUtil.makeFormateDate(now, batchInterval);
//				
//				v.foreach(new VoidFunction<Tuple2<String,CpcClickValue>>() {
//					private static final long serialVersionUID = 9013046302230472711L;
//
//					public void call(Tuple2<String, CpcClickValue> tuple) throws Exception {
//						try {
//							if(null != tuple){
//								String keyStr = tuple._1;
//								if(!SparkUtil.ERROR.equals(keyStr)){
//									CpcClickValue value = tuple._2;
//									String key_name = keyStr.split("@")[0];
//									String[] key_values = keyStr.split("@")[1].split(",");
//									long pv = value.getPv();
//									long cash = value.getCash();
//									long cost = value.getCost();
//									Set<String> customers = value.getCustomers();
//									log.info("CPC Click ========== key : " + keyStr 
//										    + ",time : " + current_time
//											+ ",pv : " + pv + ",cash : " + cash
//											+ ",cost : " + cost + ", custNum : " + customers.size());
//									if(type_new.equals(key_name)){
//										//"${platform}","${busiLine}","${cityType}","${city1Id}","${city1Name}","${cate2Id}","${cate2Name}"
//										String[] keys = keys(type_new);
//										CpcClickModel ccm = new CpcClickModel();
//										ccm.setTime(current_time);
//										TransferDimensionUtil.dimension(ccm, keys, Integer.parseInt(key_values[0]),
//												key_values[1], Integer.parseInt(key_values[2]), Integer.parseInt(key_values[3]), 
//												key_values[4], key_values[5], key_values[6]);
//										ccm.setCash(cash);
//										ccm.setCost(cost);
//										ccm.setCustNum(customers.size());
//										ccm.setPv(pv);
//										try {
//											RedisUtil.cache.lpushString(Constant.CPC_CLICK_REDIS_QUEUE_KEY, 
//										    		JsonUtil.toJson(ccm)); //将结果保存到redis一个list中
//										} catch (Exception e) {
//											log.error("click redis insert error!");
//										}
//									}
//								}
//							}
//						} catch (Exception e) {
//							log.error("MrCpcClick foreachRDD ERROR!",e);
//						}
//					}
//				});
//				return null;
//			}
//		});
//		
//	}
	
	
	public void foreachRDD(JavaPairDStream<String, CpcClickValue> reduceStream) {
		reduceStream.foreachRDD(new Function<JavaPairRDD<String,CpcClickValue>, Void>() {
			private static final long serialVersionUID = -5152845570104684454L;

			public Void call(JavaPairRDD<String, CpcClickValue> v)
					throws Exception {
				long now = new Date().getTime();
			    final long current_time = SparkUtil.makeFormateDate(now, batchInterval);
				
				v.foreachPartition(new VoidFunction<Iterator<Tuple2<String,CpcClickValue>>>() {
					
					private static final long serialVersionUID = 8200704507965930162L;
					
					public void call(Iterator<Tuple2<String, CpcClickValue>> iter)
							throws Exception {
						if(null != iter){
							final List<CpcClickModel> list = new ArrayList<CpcClickModel>();
						    while(iter.hasNext()){
								Tuple2<String, CpcClickValue> tuple = iter.next();
								String keyStr = tuple._1;
								if(!SparkUtil.ERROR.equals(keyStr)){
								    CpcClickValue value = tuple._2;
									String key_name = keyStr.split("@")[0];
								    String[] key_values = keyStr.split("@")[1].split(",");
									long pv = value.getPv();
									long cash = value.getCash();
									long cost = value.getCost();
									Set<String> customers = value.getCustomers();
//									log.info("CPC Click ========== key : " + keyStr 
//											    + ",time : " + current_time
//												+ ",pv : " + pv + ",cash : " + cash
//												+ ",cost : " + cost + ", custNum : " + customers.size());
									if(type_new.equals(key_name)){
										String[] keys = keys(type_new);
										CpcClickModel ccm = new CpcClickModel();
										ccm.setTime(current_time);
										TransferDimensionUtil.dimension(ccm, keys, Integer.parseInt(key_values[0]),
													key_values[1], Integer.parseInt(key_values[2]), Integer.parseInt(key_values[3]), 
													key_values[4], key_values[5], key_values[6]);
										ccm.setCash(cash);
										ccm.setCost(cost);
										ccm.setCustNum(customers.size());
										ccm.setPv(pv);
										list.add(ccm);	
									}
							    }
							}
						    
						    if(list.size() > 0){
						    	log.info("CPC Click {}: output {} recodes ." ,current_time,list.size());
						    	threadPool.submit(new Runnable() {
									public void run() {
                                        RedisUtil.send(Constant.CPC_CLICK_REDIS_QUEUE_KEY, list);										
									}
								});
							}
						}
					}
				});
				return null;
			}
		});
		
	}

}
