package com._58city.spark.app.mr;

import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import scala.Tuple2;

import com._58city.spark.app.dao.AppStartEntity;
import com._58city.spark.app.redis.Constant;
import com._58city.spark.app.redis.RedisUtil;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;

public class MrKafkaAppStart implements MrKafkaInterface<String, String>{
	private static final long serialVersionUID = -1160176500661026996L;
	private static Logger log = LoggerFactory.getLogger(MrKafkaAppStart.class);
	private static final ExecutorService threadPool = Executors.newCachedThreadPool();
	private static final String cookie_redis_key = Constant.COOKIE_REDIS_QUEUE_KEY_APP;
	private static final String  emptyString = "";
	private static final Splitter line_splitter = Splitter.on("\n");
	
	public MrKafkaAppStart(){
		
	}
	
	private static  Random random = new Random();
	public static String generateCookie(){
		StringBuilder strBuilder = new StringBuilder("NULL_");
		strBuilder.append(System.currentTimeMillis());
		strBuilder.append(random.nextInt());
		return strBuilder.toString();
	}
	
	public List<JavaPairDStream<String, String>> mapPair(List<JavaPairDStream<String, String>> kafkaStreams) {
		List<JavaPairDStream<String, String>> list =  Lists.newArrayList();
		for(JavaPairDStream<String, String> stream : kafkaStreams){
			JavaPairDStream<String, String> mapStream = stream.flatMapToPair(
					
					new PairFlatMapFunction<Tuple2<String,String>, String, String>() {
						private static final long serialVersionUID = 1L;
						
						public Iterable<Tuple2<String, String>> call(Tuple2<String, String> event) throws Exception {
							List<Tuple2<String, String>> flatedList= Lists.newArrayList();
							try {
								String content = event._2;
								//平台那边将多条日志合成一条消息了，此处将日志按"\n"分割还原
								for(String oneLine : line_splitter.split(content)){
									if(StringUtils.isBlank(oneLine))
										continue;
									AppStartEntity dto = new AppStartEntity(oneLine);
									String imei = dto.getImei();
									//设备imei为空的都只算作一个UV
									if(imei == null){
										log.warn("empty imei:{}", oneLine);
										imei = "";
									}
									flatedList.add(new Tuple2<String, String>(imei, emptyString));
								}
							} catch (Exception e) {
								log.error("AppStart map ERROR!", e);
							}
							return flatedList;
						}
					}
			);
			list.add(mapStream);
		}
		return list;				
	}

	@Override
	public JavaPairDStream<String, String> reducePair(JavaPairDStream<String, String> mapStream) {
		JavaPairDStream<String, String> reducePair = mapStream.reduceByKey(new Function2<String, String, String>() {
					private static final long serialVersionUID = 6264099295078802382L;
					public String call(String v1, String v2)
							throws Exception {
						try {
							return emptyString;
						} catch (Exception e) {
							log.error("AppStart reduce ERROR!",e);
						}
						return new String();
					}
				});
		return reducePair;
	}
	
	@Override
	public void foreachRDD(JavaPairDStream<String, String> reduceStream) {
		reduceStream.foreachRDD(new Function<JavaPairRDD<String,String>, Void>() {
			private static final long serialVersionUID = -8199089522425453338L;

			public Void call(JavaPairRDD<String, String> v) throws Exception {
				final long now = new Date().getTime();
//			    final long current_time = SparkUtil.makeFormateDate(now, batchInterval);
				
				v.foreachPartition(new VoidFunction<Iterator<Tuple2<String,String>>>() {
					private static final long serialVersionUID = 3040955589442973044L;
					public void call(Iterator<Tuple2<String, String>> iter)
							throws Exception {
						if(null != iter){
							final Set<String> imeiSet = Sets.newHashSet();
						    while(iter.hasNext()){
								Tuple2<String, String> tuple = iter.next();
								String keyStr = tuple._1;
								imeiSet.add(keyStr);
							}
						   if(imeiSet.size() > 0){
								threadPool .submit(new Runnable() {
									public void run() {
										String date = DateFormatUtils.format(now, "yyyyMMdd");
										String dayCookieKey = String.format(cookie_redis_key, date);
										try {
											RedisUtil.sCookieAdd(dayCookieKey, imeiSet);
											log.info("output cookie : {}",imeiSet.size());
										} catch (Exception e) {
											log.error("sCookieAdd error:",e);
										}
									}
								});
						   }
						}
					}
					
				});
				return null;
			}
		});
		
	}
}
