package streaming;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import kafka.serializer.StringDecoder;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.*;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.*;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import producer.KafkaRedisConfig;
import redis.clients.jedis.Jedis;
import scala.Tuple2;
import util.JavaRedisClient;

import java.util.*;

import static org.bouncycastle.asn1.x500.style.RFC4519Style.uid;

/**
 * Created by chs on 8/27/18.
 */
public class OrderStreamingAnalysis {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setAppName("OrderStreamingAnalysis");
//        final SparkSession spark = SparkSession.builder().config(conf).getOrCreate();


        JavaStreamingContext ssc = new JavaStreamingContext(conf, Durations.seconds(2));
        //kafka中接收到的数据格式{"uid":"55792,1.4898961024,0"} 里面没有年龄，所以需要跟user表做join，
        //因为一个程序中只能有一个sparkcontext所以通过JavaStreamingContext 获得
        final SQLContext sqlcontext = new SQLContext(ssc.sparkContext());

        ssc.sparkContext().setLogLevel("WARN");
        //设置schema信息
        final Dataset<Row> userDs = sqlcontext.read().csv("hdfs://master:9000/warehouse/t_user");
        StructType userSchema = new StructType()
                .add("uid", "string", false)
                .add("age", "string", false)
                .add("six", "string", true)
                .add("active_date", "string", false)
                .add("limit", "string", false);
        final Dataset<Row> userDf = sqlcontext.createDataFrame(userDs.toJavaRDD(), userSchema);


        // Kafka configurations
        String[] topics = KafkaRedisConfig.KAFKA_ORDER_TOPIC.split("\\,");
        System.out.println("Topics: " + Arrays.toString(topics));
        String brokers = KafkaRedisConfig.KAFKA_ADDR;

        Set<String> topicsSet = new HashSet<String>(Arrays.asList(topics));
        Map<String, Object> kafkaParams = new HashMap<String, Object>();
        kafkaParams.put("bootstrap.servers", brokers);
        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "use_a_separate_group_id_for_each_stream");
        kafkaParams.put("auto.offset.reset", "latest");
        kafkaParams.put("enable.auto.commit", false);
        //初始化dstream 这里使用spark-streaming-kafka-0-10_2.11中的kafkautil
        JavaInputDStream<ConsumerRecord<String, String>> kafkaStream =
                KafkaUtils.createDirectStream(
                        ssc,
                        LocationStrategies.PreferConsistent(),
                        ConsumerStrategies.<String, String>Subscribe(topicsSet, kafkaParams)
                );
        //读取到的string转化为json对象
        JavaDStream<JSONObject> events = kafkaStream.map(new Function<ConsumerRecord<String, String>, JSONObject>() {
            public JSONObject call(ConsumerRecord<String, String> line) throws Exception {
                System.out.println("line:" + line.value());

                return JSON.parseObject(line.value());
            }
        });
        //取出uid和金额
        JavaPairDStream<String, Double> orderDs = events.mapToPair(new PairFunction<JSONObject, String, Double>() {
            public Tuple2<String, Double> call(JSONObject json) throws Exception {
                String[] strs = json.getString("uid").split(",");
                return new Tuple2<String, Double>(strs[0], Double.parseDouble(strs[1]) - Double.parseDouble(strs[2]));
            }
        });
        orderDs.foreachRDD(new VoidFunction<JavaPairRDD<String, Double>>() {
            public void call(JavaPairRDD<String, Double> rdd) throws Exception {
                JavaRDD<Row> mapRow = rdd.map(new Function<Tuple2<String, Double>, Row>() {
                    public Row call(Tuple2<String, Double> v1) throws Exception {
                        String uid = v1._1();
                        Double money = v1._2();
//                        System.out.println("orderUID:" + uid+":"+money);
                        return RowFactory.create(uid, money);
                    }
                });
                StructType orderSchema = new StructType()
                        .add("uid", "string", false)
                        .add("money", "Double", false);
                Dataset<Row> orderDf = sqlcontext.createDataFrame(mapRow, orderSchema);

                final String moneyHashKey = "age::money";
                //查询
                Dataset<Row> count = orderDf.join(userDf, orderDf.col("uid").equalTo(userDf.col("uid")))
                        .select("age", "money")
                        .groupBy("age")
                        .sum("money");

                count.printSchema();
                count.repartition(3).foreachPartition(new ForeachPartitionFunction<Row>() {
                    public void call(Iterator<Row> t) throws Exception {
                        Jedis jedis = JavaRedisClient.get().getResource();
                        try {
                            if(t.hasNext()){
                                Row row = t.next();
                                String age = row.getString(0);
                                Double money = row.getDouble(1);
                                System.out.println(age+"::::"+money);
                                jedis.hincrByFloat(moneyHashKey, age, money);
                            }
                        }catch (Exception e){
                            System.out.println("error"+e);
                        }
                        jedis.close();
                    }
                });
//                count.toJavaRDD().foreach(new VoidFunction<Row>() {
//                    public void call(Row row) throws Exception {
//                        String age = row.getString(0);
//                        Double money = row.getDouble(1);
//                        System.out.println(age+"::::"+money);
////                        Jedis jedis = JavaRedisClient.get().getResource();
////                        jedis.hincrByFloat(moneyHashKey, age, money);
////                        try{
////
////
////                                String age = row.getString(0);
////                                Double money = row.getDouble(1);
////
////                            System.out.println(age+"::::"+money);
////                            jedis.hincrByFloat(moneyHashKey, age, money);
////                        }catch (Exception e){
////                            System.out.println("error:"+e);
////                        }
//                    }
//                });
            }
        });

        ssc.start();
        ssc.awaitTermination();
    }
}
