package com.yc.streaming.app;

import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.NumberUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.api.java.function.VoidFunction2;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.Time;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.api.java.JavaStreamingContextFactory;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.codehaus.jettison.json.JSONObject;

import com.google.common.collect.Sets;
import com.yc.streaming.common.AppConfig;
import com.yc.streaming.common.AppConstants;
import com.yc.streaming.common.RedisConstant;
import com.yc.streaming.util.RedisUtils;

import kafka.serializer.StringDecoder;
import scala.Tuple2;

/**
 * 消费派单明细数据，缓存加价的订单ID，用于其他程序计算完成加价订单。
 * @author Administrator
 */
public class BidOrderToRedisAppOld {
	private static JavaStreamingContext createContext(String checkpointDirectory, int duration) {
		 String brokers = AppConfig.get(AppConstants.APP_KAFKA_BROKERS);
         String dispatchToipics = AppConfig.get(AppConstants.APP_KAFKA_TOPIC_DIPATH_DETAIL_INFO_ETL);

       
           
 		SparkConf conf = new SparkConf()
 		.set("spark.streaming.unpersist", "true")
 		.set("spark.default.parallelism", "8")
 		.set("spark.storage.memoryFraction", "0.5")
 		.set("spark.shuffle.consolidateFiles", "true");
         JavaStreamingContext ssc = new JavaStreamingContext(conf, Durations.seconds(duration));
         ssc.checkpoint(checkpointDirectory);
         
         HashSet<String> dispatchTopicSet = new HashSet<String>(Arrays.asList(dispatchToipics.split(",")));
         HashMap<String, String> kafkaParams = new HashMap<String, String>();
         kafkaParams.put("metadata.broker.list", brokers);
         

         JavaPairInputDStream<String, String> dispatchMessages = KafkaUtils.createDirectStream(ssc,
                 String.class,
                 String.class,
                 StringDecoder.class,
                 StringDecoder.class,
                 kafkaParams,
                 dispatchTopicSet);
         
         dispatchMessages.mapToPair(new PairFunction<Tuple2<String,String>,String, Long>() {
     			private static final long serialVersionUID = 1L;
     			@Override
     			public Tuple2<String, Long> call(Tuple2<String, String> t) throws Exception {
     				String line = StringUtils.trim(t._2());
     				String[] items = line.split("\001");
                    double bidRate = 0;
                    try {
                     	String addPrice = items[25];
//                     	String 	dispatch_lat = items[9];
//                     	String 	dispatch_lng = items[10];
                        JSONObject t1 = new JSONObject(addPrice);
                        bidRate = Double.parseDouble(t1.getString("total_magnification"));
                     } catch (Exception e) { 
                     }
                     if(bidRate > 0)
                     	return new Tuple2<String, Long>(items[2], 1L);
                     else 
                     	return new Tuple2<String, Long>(items[2], null);
     			}
     		}).filter(new Function<Tuple2<String, Long>, Boolean>() {
     			private static final long serialVersionUID = 1L;
     			@Override
     			public Boolean call(Tuple2<String, Long> t) throws Exception {
     				return t._2() != null;
     			}
     		}).reduceByKey(new Function2<Long, Long, Long>() {
     			private static final long serialVersionUID = 1L;
     			@Override
     			public Long call(Long arg0, Long arg1) throws Exception {
     				return arg1;
     			}
     	}).repartition(1).foreachRDD(new VoidFunction2<JavaPairRDD<String, Long>,Time>() {
			private static final long serialVersionUID = 1L;
			@Override
			public void call(JavaPairRDD<String, Long> rdd, Time t) throws Exception {
				
				rdd.foreachPartition(new VoidFunction<Iterator<Tuple2<String,Long>>>() {
					private static final long serialVersionUID = 1L;

					@Override
					public void call(Iterator<Tuple2<String, Long>> iter) throws Exception {
						Set<String> orderSet = Sets.newHashSet();
						while (iter.hasNext()) {
							Tuple2<String, Long> t = iter.next();
							String orderID = t._1;
							orderSet.add(orderID);
						}
						RedisUtils.setAllWithEx(RedisConstant.BID_ORDER_DB_INDEX, orderSet, RedisConstant.redis_2h,RedisConstant.BID_ORDER_PREFIX);
					}
				});
			}
		});
       return ssc;  
	}
	
	public static void main(String[] args) throws ClassNotFoundException, IOException, InterruptedException {
        int d = Integer.parseInt(AppConfig.get(AppConstants.APP_DURATION));
       
		if (args != null && 1 == args.length) {
			d = Integer.parseInt(args[0]);
		}

		final int duration = d;

		final String checkpointDirectory = AppConfig.get(AppConstants.CHECKPOINT_DIRECTORY) + "/BidOrderToRedisApp";
		JavaStreamingContextFactory factory = new JavaStreamingContextFactory() {
			@Override
			public JavaStreamingContext create() {
				return createContext(checkpointDirectory, duration);
			}
		};
		JavaStreamingContext context = JavaStreamingContext.getOrCreate(checkpointDirectory, factory);

		context.start();
		context.awaitTermination();
		context.stop();  
	}
}
