package com.yongche;

import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;

import kafka.serializer.StringDecoder;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.api.java.function.VoidFunction2;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.Time;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import scala.Tuple2;

import com.yongche.bigdata.common.AppConfig;
import com.yongche.bigdata.common.AppConstants;
import com.yongche.bigdata.dao.OriginAmountVoteDao;
import com.yongche.bigdata.model.metric.OriginAmountMetric;
import com.yongche.bigdata.model.vote.OriginAmountVote;
import com.yongche.bigdata.util.TimeUtil;

/**
 * Created by jarry on 16/11/22.
 */
public class OriginAmountMetricApp_Test {
    public static void main(String[] args) {
        String brokers = AppConfig.get(AppConstants.APP_KAFKA_BROKERS);
        String orderToipics = AppConfig.get(AppConstants.APP_KAFKA_TOPIC_ORDER);
        int duration = Integer.parseInt(AppConfig.get(AppConstants.APP_DURATION));
        if(args!=null && 1==args.length) {
            duration =  Integer.parseInt(args[0]);
        }

    	SparkConf conf = new SparkConf()
		.set("spark.streaming.unpersist", "true") //Spark来计算哪些RDD需要持久化，这样有利于提高GC的表现。
		.set("spark.default.parallelism", "8")	//reduceByKey 执行时启动的线程数，默认是8个
		.set("spark.storage.memoryFraction", "0.5")
    	.set("spark.shuffle.consolidateFiles", "true");
    	
        JavaStreamingContext context = new JavaStreamingContext(conf, Durations.seconds(duration));
        final SQLContext sqlcontext = new SQLContext(context.sparkContext());
        HashSet<String> orderTopicSet = new HashSet<String>(Arrays.asList(orderToipics.split(",")));
        HashMap<String, String> kafkaParams = new HashMap<String, String>();
        kafkaParams.put("metadata.broker.list", brokers);

        // Create direct kafka stream with brokers and topics
        JavaPairInputDStream<String, String> messages = KafkaUtils.createDirectStream(context,
                String.class,
                String.class,
                StringDecoder.class,
                StringDecoder.class,
                kafkaParams,
                orderTopicSet);

        JavaPairDStream<String, OriginAmountMetric> orderLines = messages.mapToPair(new PairFunction<Tuple2<String, String>, String, OriginAmountMetric>() {
			private static final long serialVersionUID = 1L;
			@Override
			public Tuple2<String, OriginAmountMetric> call(
					Tuple2<String, String> t) throws Exception {
				String line = t._2();
                String[] items = line.split("\001");
                String id = items[0];
                String city = items[18];
                int status = Integer.parseInt(items[6]);
                long flag =Long.parseLong(items[10]);
                long f = flag & 0x400000;
                double origin_amount = Double.parseDouble(items[73]);
                double origin_sharing_amount = Double.parseDouble(items[74]);
                int pay_amount = (int) Double.parseDouble(items[71]);
                String op_type = items[86];
                if(status==7 && f==0x400000 && pay_amount==0 && !"DELETE".equals(op_type)){
                	return new Tuple2<String, OriginAmountMetric>(id, new OriginAmountMetric(id, status, origin_sharing_amount,origin_amount, flag, city));
                }
                return new Tuple2<String, OriginAmountMetric>(id, null);
            }
		}).filter(new Function<Tuple2<String, OriginAmountMetric>, Boolean>() {
        	private static final long serialVersionUID = 1L;
			@Override
			public Boolean call(Tuple2<String, OriginAmountMetric> t) throws Exception {
                return t._2() != null;
			}
        });

        JavaPairDStream<String, OriginAmountMetric> orders = orderLines.reduceByKey(new Function2<OriginAmountMetric, OriginAmountMetric, OriginAmountMetric>() {
        	private static final long serialVersionUID = 1L;
			@Override
			public OriginAmountMetric call(OriginAmountMetric v1,
					OriginAmountMetric v2) throws Exception {
				return v2;
			}
		});
        
        JavaPairDStream<String, OriginAmountMetric> orderPairs = orders.mapToPair(new PairFunction<Tuple2<String,OriginAmountMetric>, String, OriginAmountMetric>() {
        	private static final long serialVersionUID = 1L;
			@Override
			public Tuple2<String, OriginAmountMetric> call(
					Tuple2<String, OriginAmountMetric> t) throws Exception {
				return new Tuple2<String, OriginAmountMetric>(t._2.getCity(), t._2);
			}
        });

        JavaDStream<OriginAmountVote> cs = orderPairs.groupByKey().map(new Function<Tuple2<String,Iterable<OriginAmountMetric>>, OriginAmountVote>() {
        	private static final long serialVersionUID = 1L;
            @Override
            public OriginAmountVote call(Tuple2<String, Iterable<OriginAmountMetric>> t) throws Exception {
                String city = t._1();
                Iterator<OriginAmountMetric> it = t._2().iterator();
                double amount = 0.0;
                while(it.hasNext()) {
                    OriginAmountMetric m = it.next();
                    amount += m.getOriginSharingAmount() - m.getOriginAmount();;

                }
                amount = (double)Math.round(amount*1000)/1000;
                return new OriginAmountVote(city, amount);
            }
        });

        JavaDStream<OriginAmountVote> qs = cs.transform(new Function<JavaRDD<OriginAmountVote>, JavaRDD<OriginAmountVote>>() {
        	private static final long serialVersionUID = 1L;
            @Override
            public JavaRDD<OriginAmountVote> call(JavaRDD<OriginAmountVote> r) throws Exception {
                DataFrame schema = sqlcontext.createDataFrame(r, OriginAmountVote.class);
                schema.registerTempTable("t_origin_amount_vote");
                DataFrame df = sqlcontext.sql("select 'quanguo', sum(amount) from t_origin_amount_vote");
                
                JavaRDD<OriginAmountVote> rs =  df.javaRDD().map(new Function<Row, OriginAmountVote>() {
                	private static final long serialVersionUID = 1L;
                    @Override
                    public OriginAmountVote call(Row row) throws Exception {
                        String city = row.getString(0);
                        double amount = row.getDouble(1);
                        amount = (double)Math.round(amount*1000)/1000;
                        return new OriginAmountVote(city, amount);
                    }
                });
                return rs;
            }
        }).persist(StorageLevel.MEMORY_AND_DISK_SER());


        JavaDStream<OriginAmountVote> vs = cs.union(qs);

        vs.repartition(1).foreachRDD(new VoidFunction2<JavaRDD<OriginAmountVote>, Time>() {
        	private static final long serialVersionUID = 1L;
            @Override
            public void call(JavaRDD<OriginAmountVote> r, final Time t) throws Exception {
                r.foreachPartition(new VoidFunction<Iterator<OriginAmountVote>>() {
                	private static final long serialVersionUID = 1L;
                    @Override
                    public void call(Iterator<OriginAmountVote> it) throws Exception {
                        OriginAmountVoteDao dao = new OriginAmountVoteDao();
                        while (it.hasNext()) {
                            OriginAmountVote v = it.next();
                            v.setTime(TimeUtil.getTime(t.milliseconds()));
                            dao.add(v);
                        }
                    }
                });
            }
        });

        vs.print(100);

        context.start();
        context.awaitTermination();
        context.stop();
        
        		
//        		.groupByKey()
//				.mapValues(new Function<Iterable<OriginAmountMetric>, String>() {
//					private static final long serialVersionUID = 1L;
//
//					@Override
//					public String call(Iterable<OriginAmountMetric> t)
//							throws Exception {
//						Iterator<OriginAmountMetric> it = t.iterator();
//						List<String> list = Lists.newArrayList();
//						while (it.hasNext()) {
//							list.add(it.next().toString());
//						}
//						return StringUtils.join(list, "\n");
//					}
//				}).filter(new Function<Tuple2<String, String>, Boolean>() {
//					private static final long serialVersionUID = 1L;
//					@Override
//					public Boolean call(Tuple2<String, String> t)
//							throws Exception {
//						return StringUtils.split(t._2(), "\n").length > 1;
//					}
//				});
//		JavaPairDStream<String, Long> resultStream = orderLines.groupByKey()
//				.mapValues(new Function<Iterable<OriginAmountMetric>, Long>() {
//					private static final long serialVersionUID = 1L;
//
//					@Override
//					public Long call(Iterable<OriginAmountMetric> t)
//							throws Exception {
//						Iterator<OriginAmountMetric> it = t.iterator();
//						long n = 0;
//						while (it.hasNext()) {
//							it.next();
//							n++;
//						}
//						return n;
//					}
//				})
				

//        resultStream.print(1000);
//
//        context.start();
//        context.awaitTermination();
//        context.stop();
    }
}
