package com.zq.learn.spark.springspark;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;

/**
 * SparkService
 *
 * @author ZhangQi
 * @date 2020/2/12
 */
@Service
public class SparkService {
  @Autowired
  private JavaSparkContext sparkContext;
  @Value("${brokers}")
  private String brokers;
  @Value("${groupId}")
  private String groupId;

  public void statisticOrder(){

    JavaStreamingContext jssc = new JavaStreamingContext(sparkContext, Durations.seconds(5));
    // 构建kafka参数map

    // 主要要放置的是连接的kafka集群的地址（broker集群的地址列表）
    Map<String, Object> kafkaParams = new HashMap<>();
    //Kafka服务监听端口
    kafkaParams.put("bootstrap.servers", brokers);
    //指定kafka输出key的数据类型及编码格式（默认为字符串类型编码格式为uft-8）
    kafkaParams.put("key.deserializer", StringDeserializer.class);
    //指定kafka输出value的数据类型及编码格式（默认为字符串类型编码格式为uft-8）
    kafkaParams.put("value.deserializer", StringDeserializer.class);
    //消费者ID，随意指定
    kafkaParams.put("group.id",groupId);
    //指定从latest(最新,其他版本的是largest这里不行)还是smallest(最早)处开始读取数据
    kafkaParams.put("auto.offset.reset", "latest");
    //如果true,consumer定期地往zookeeper写入每个分区的offset
    kafkaParams.put("enable.auto.commit", false);

    //构建消息Topic集合
    String topics="orderTopic";
    Collection<String> topicsSet = new HashSet<>(Arrays.asList(topics.split(",")));

    //获取Kafka数据
    JavaInputDStream<ConsumerRecord<String, String>>
      lines= KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(),
      ConsumerStrategies.Subscribe(topicsSet,kafkaParams));

    lines.foreachRDD(rdd->{
      JavaRDD<JSONObject> order = rdd.map(x -> JSON.parseObject(x.value()));
      //统计每批成功订单数量
      JavaPairRDD<String,Integer> javaPairRDD = order.mapToPair(x -> {
        Integer status = x.getInteger("status");
        String timestamp = x.getString("timestamp");
        String day = timestamp.substring(0, 8);
        return new Tuple2<>(day, status);
      });
      javaPairRDD.reduceByKey((a1,a2)->a1+a2).foreachPartition(partition->{
        partition.forEachRemaining(x->{
          //流处理中只能使用静态对象，务必封装MongoTemplate！
          StatisticsResult
            source = MongoUtil.mongoTemplate.findById("perCount_" + x._1, StatisticsResult.class);
          Integer sumValue=x._2;
          if(source!=null){
            sumValue=sumValue+source.getValue();
          }
          StatisticsResult statisticsResult = new StatisticsResult("perCount_"+x._1,x._1,"perCount",sumValue);
          MongoUtil.mongoTemplate.save(statisticsResult);
        });
      });

      //统计每批成功订单金额
      JavaPairRDD<String,Integer> javaPairRDD1 = order.mapToPair(x -> {
        Integer price = x.getInteger("price");
        Integer status = x.getInteger("status");
        String timestamp = x.getString("timestamp");
        String day = timestamp.substring(0, 8);
        return new Tuple2<>(day, status.equals(1)?price:0);
      });
      javaPairRDD1.reduceByKey((a1,a2)->a1+a2).foreachPartition(partition->{
        partition.forEachRemaining(x->{
          StatisticsResult source = MongoUtil.mongoTemplate.findById("perSumPrice_" + x._1, StatisticsResult.class);
          Integer sumValue=x._2;
          if(source!=null){
            sumValue=sumValue+source.getValue();
          }
          StatisticsResult statisticsResult = new StatisticsResult("perSumPrice_"+x._1,x._1,"perSumPrice",sumValue);
          MongoUtil.mongoTemplate.save(statisticsResult);
        });
      });
    });

    jssc.start();
    try {
      jssc.awaitTermination();
    } catch (InterruptedException e) {
      e.printStackTrace();
    }finally {
      jssc.close();
    }
  }
}
