package com.yd.spark.demo;

import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;

import kafka.common.TopicAndPartition;
import kafka.message.MessageAndMetadata;
import kafka.serializer.StringDecoder;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.api.java.function.VoidFunction2;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.Time;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import scala.Tuple2;

import com.yongche.bigdata.common.AppConfig;
import com.yongche.bigdata.common.AppConstants;
import com.yongche.bigdata.dao.OriginAmountVoteDao;
import com.yongche.bigdata.model.metric.OriginAmountMetric;
import com.yongche.bigdata.model.vote.OriginAmountVote;
import com.yongche.bigdata.util.TimeUtil;

/**
 * Created by jarry on 16/11/22.
 */
public class OriginAmountMetricApp_Test {
    public static void main(String[] args) {
        String brokers = AppConfig.get(AppConstants.APP_KAFKA_BROKERS);
        String orderToipics = AppConfig.get(AppConstants.APP_KAFKA_TOPIC_ORDER);
        int duration = Integer.parseInt(AppConfig.get(AppConstants.APP_DURATION));
        if(args!=null && 1==args.length) {
            duration =  Integer.parseInt(args[0]);
        }

    	SparkConf conf = new SparkConf()
		.set("spark.streaming.unpersist", "true") //Spark来计算哪些RDD需要持久化，这样有利于提高GC的表现。
		.set("spark.default.parallelism", "8")	//reduceByKey 执行时启动的线程数，默认是8个
		.set("spark.storage.memoryFraction", "0.5")
    	.set("spark.shuffle.consolidateFiles", "true")
    	.set("spark.streaming.kafka.maxRatePerPartition", ""+200);
    	
        JavaStreamingContext context = new JavaStreamingContext(conf, Durations.seconds(duration));
        final SQLContext sqlcontext = new SQLContext(context.sparkContext());
        Map<String, String> kafkaParams = new HashMap<String, String>();
        kafkaParams.put("metadata.broker.list", brokers);
        kafkaParams.put("auto.offset.reset", "smallest");
        
        Map<TopicAndPartition,Long> offset = new HashMap<TopicAndPartition,Long>();
        for(int i=0; i<18 ;i++){
        	offset.put(new TopicAndPartition(orderToipics, i), 0L);
        }
       
        JavaInputDStream<String> messages = KafkaUtils.<String,String,StringDecoder,StringDecoder,String>createDirectStream(
        		context,  
        		String.class,
                String.class,
                StringDecoder.class,
                StringDecoder.class, 
                String.class,
                kafkaParams,
                offset,
                new Function<MessageAndMetadata<String, String>,String>(){
	 				private static final long serialVersionUID = 1L;
					@Override
					public String  call(MessageAndMetadata<String, String> v1) throws Exception {
						String line = v1.message();
		               return line;
					}}
        		);
        
//        JavaDStream<String> messages = context.textFileStream("/user/zhaoxiang/data/service_order/");
        JavaPairDStream<String, OriginAmountMetric> orderLines = messages.mapToPair(new PairFunction<String, String, OriginAmountMetric>() {
			private static final long serialVersionUID = 1L;
			@Override
			public Tuple2<String, OriginAmountMetric> call(String t) throws Exception {
				String line = t;
                String[] items = line.split("\001", -1);
                String id = items[0];
                String city = items[18];
                int status = Integer.parseInt(items[6]);
                long flag =Long.parseLong(items[10]);
                long f = flag & 0x400000;
                double origin_amount = Double.parseDouble(items[73]);
                double origin_sharing_amount = Double.parseDouble(items[74]);
                int pay_amount = (int) Double.parseDouble(items[71]);
                String op_type = items.length >=87 ? items[86] : "";
                if(status==7 && f==0x400000 && pay_amount==0 && !"DELETE".equals(op_type)){
                	return new Tuple2<String, OriginAmountMetric>(id, new OriginAmountMetric(id, status, origin_sharing_amount,origin_amount, flag, city));
                }
                return new Tuple2<String, OriginAmountMetric>(id, null);
            }
		}).filter(new Function<Tuple2<String, OriginAmountMetric>, Boolean>() {
        	private static final long serialVersionUID = 1L;
			@Override
			public Boolean call(Tuple2<String, OriginAmountMetric> t) throws Exception {
                return t._2() != null;
			}
        });
        
        JavaPairDStream<String, OriginAmountMetric> orderLines_WithDelete = messages.mapToPair(new PairFunction<String, String, OriginAmountMetric>() {
			private static final long serialVersionUID = 1L;
			@Override
			public Tuple2<String, OriginAmountMetric> call(String t) throws Exception {
				String line = t;
                String[] items = line.split("\001");
                String id = items[0];
                String city = items[18];
                int status = Integer.parseInt(items[6]);
                long flag =Long.parseLong(items[10]);
                long f = flag & 0x400000;
                double origin_amount = Double.parseDouble(items[73]);
                double origin_sharing_amount = Double.parseDouble(items[74]);
                int pay_amount = (int) Double.parseDouble(items[71]);
                String op_type = items[86];
                if(status==7 && f==0x400000 && pay_amount==0 ){
                	return new Tuple2<String, OriginAmountMetric>(id, new OriginAmountMetric(id, status, origin_sharing_amount,origin_amount, flag, city));
                }
                return new Tuple2<String, OriginAmountMetric>(id, null);
            }
		}).filter(new Function<Tuple2<String, OriginAmountMetric>, Boolean>() {
        	private static final long serialVersionUID = 1L;
			@Override
			public Boolean call(Tuple2<String, OriginAmountMetric> t) throws Exception {
                return t._2() != null;
			}
        });
        
//        System.out.println("orderLines no deleted : " );
//        orderLines.count().print();
//        System.out.println("orderLines with deleted : " );
//        orderLines_WithDelete.count().print();
        JavaPairDStream<String, OriginAmountMetric> orders = orderLines.reduceByKey(new Function2<OriginAmountMetric, OriginAmountMetric, OriginAmountMetric>() {
        	private static final long serialVersionUID = 1L;
			@Override
			public OriginAmountMetric call(OriginAmountMetric v1, OriginAmountMetric v2) throws Exception {
				return v2;
			}
		});
        
        JavaPairDStream<String, OriginAmountMetric> orderPairs = orders.mapToPair(new PairFunction<Tuple2<String,OriginAmountMetric>, String, OriginAmountMetric>() {
        	private static final long serialVersionUID = 1L;
			@Override
			public Tuple2<String, OriginAmountMetric> call(Tuple2<String, OriginAmountMetric> t) throws Exception {
				return new Tuple2<String, OriginAmountMetric>(t._2.getCity(), t._2);
			}
        });

        JavaDStream<OriginAmountVote> cs = orderPairs.groupByKey().map(new Function<Tuple2<String,Iterable<OriginAmountMetric>>, OriginAmountVote>() {
        	private static final long serialVersionUID = 1L;
            @Override
            public OriginAmountVote call(Tuple2<String, Iterable<OriginAmountMetric>> t) throws Exception {
                String city = t._1();
                Iterator<OriginAmountMetric> it = t._2().iterator();
                double amount = 0.0;
                while(it.hasNext()) {
                    OriginAmountMetric m = it.next();
                    amount += m.getOriginSharingAmount() - m.getOriginAmount();;

                }
                amount = (double)Math.round(amount*1000)/1000;
                return new OriginAmountVote(city, amount);
            }
        }).persist(StorageLevel.MEMORY_AND_DISK_SER());

        JavaDStream<OriginAmountVote> qs = cs.transform(new Function<JavaRDD<OriginAmountVote>, JavaRDD<OriginAmountVote>>() {
        	private static final long serialVersionUID = 1L;
            @Override
            public JavaRDD<OriginAmountVote> call(JavaRDD<OriginAmountVote> r) throws Exception {
                DataFrame schema = sqlcontext.createDataFrame(r, OriginAmountVote.class);
                schema.registerTempTable("t_origin_amount_vote");
                DataFrame df = sqlcontext.sql("select 'quanguo', sum(amount) from t_origin_amount_vote");
                
                JavaRDD<OriginAmountVote> rs =  df.javaRDD().map(new Function<Row, OriginAmountVote>() {
                	private static final long serialVersionUID = 1L;
                    @Override
                    public OriginAmountVote call(Row row) throws Exception {
                        String city = row.getString(0);
                        double amount = row.getDouble(1);
                        amount = (double)Math.round(amount*1000)/1000;
                        return new OriginAmountVote(city, amount);
                    }
                });
                return rs;
            }
        });


        JavaDStream<OriginAmountVote> vs = cs.union(qs);

        vs.repartition(1).foreachRDD(new VoidFunction2<JavaRDD<OriginAmountVote>, Time>() {
        	private static final long serialVersionUID = 1L;
            @Override
            public void call(JavaRDD<OriginAmountVote> r, final Time t) throws Exception {
                r.foreachPartition(new VoidFunction<Iterator<OriginAmountVote>>() {
                	private static final long serialVersionUID = 1L;
                    @Override
                    public void call(Iterator<OriginAmountVote> it) throws Exception {
                        OriginAmountVoteDao dao = new OriginAmountVoteDao();
                        while (it.hasNext()) {
                            OriginAmountVote v = it.next();
                            v.setTime(TimeUtil.getTime(t.milliseconds()));
                            dao.add(v);
                        }
                    }
                });
            }
        });
        vs.print(500);
        context.start();
        context.awaitTermination();
        context.stop();
        context.close();
        		
    }
}
