package com.apexsoft.util;

import com.alibaba.fastjson.JSONObject;
import com.apexsoft.pojo.FirstCut;

import org.apache.commons.codec.StringDecoder;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.*;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalyst.parser.SqlBaseParser;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.*;
import scala.Tuple2;

import java.io.Serializable;
import java.util.*;
import java.util.concurrent.LinkedBlockingDeque;


public class SparkDStreamSQL_2 {
    static String sql="select * from temp";
    static LinkedBlockingDeque<JSONObject> queue=new LinkedBlockingDeque<>();

    public static void main(String[] args) throws Exception {
        /*System.setProperty("java.security.auth.login.config", "/home/ubuntu/apexsoft/jaas.conf");
        SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("SparkDStreamSQL");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(10));
        Map<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "cmserver.apex.com:9092,datanode01.apex.com:9092,datanode02.apex.com:9092");
        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "g1");
        kafkaParams.put("auto.offset.reset", "earliest");
        kafkaParams.put("enable.auto.commit", true);
        kafkaParams.put("security.protocol", "SASL_PLAINTEXT");
        kafkaParams.put("sasl.kerberos.service.name", "kafka");*/

        SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("SparkDStreamSQL");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(10));
        Map<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "pro1:9092");

        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "g1");
        kafkaParams.put("auto.offset.reset", "earliest");
        kafkaParams.put("enable.auto.commit", false);

        Collection<String> topics = Arrays.asList("test", "test1");

        Map<TopicPartition,Long> topicsAndOffset = new HashMap<>();//  配置对应的主题分区的offset，从指定offset消费 功能矩阵，遗留问题
        topicsAndOffset.put(new TopicPartition("test",0),0L);
        topicsAndOffset.put(new TopicPartition("test",1),10L);
        topicsAndOffset.put(new TopicPartition("test",2),330L);

        JavaInputDStream<ConsumerRecord<String, String>> stream =
                KafkaUtils.createDirectStream(
                        jssc,
                        LocationStrategies.PreferConsistent(),
                        ConsumerStrategies.<String, String>Subscribe(topics, kafkaParams)
                );

        ArrayList<StructField> structType = new ArrayList<StructField>();
        StructField field = null;
        field = DataTypes.createStructField("name", DataTypes.StringType, true);
        structType.add(field);
        field = DataTypes.createStructField("jstime", DataTypes.StringType, true);
        structType.add(field);
        field = DataTypes.createStructField("after", DataTypes.StringType, true);
        structType.add(field);
        StructType schema = DataTypes.createStructType(structType);

        List<JSONObject> list=new ArrayList<>();
            JSONObject j=new JSONObject();
            j.put("name","TZQGL");
            j.put("fields","GDH,JYS,KHH,ZQDM,ZQMC,MRWTSL");
            j.put("sql","select * from temp order by  khh limit 2 ");
        list.add(j);
        j=new JSONObject();
            j.put("name","TSSCJ");
            j.put("fields","CJBH,CJJE,CJJG,CJSL,GDH,JYS,KHH");
            j.put("sql","select khh,count(1) from temp group by khh");
        list.add(j);
        j=new JSONObject();
            j.put("name","TZJDJMX");
            j.put("fields","DJLB,FSJE,KHH,KHXM,LSH,RQ,ZY");
            j.put("sql","select khh,sum(FSJE)fsje from temp group by khh order by fsje");
        list.add(j);



        stream.foreachRDD(new VoidFunction<JavaRDD<ConsumerRecord<String, String>>>() {
            @Override
            public void call(JavaRDD<ConsumerRecord<String, String>> mapJavaRDD) throws Exception {
                SparkSession spark = JavaSparkSessionSingleton_2.getInstance(mapJavaRDD.context().getConf());

                JavaRDD<Tuple2<String, JSONObject>> filter = mapJavaRDD.filter(new Function<ConsumerRecord<String, String>, Boolean>() {
                    @Override
                    public Boolean call(ConsumerRecord<String, String> stringStringConsumerRecord) throws Exception {
                        String topic = stringStringConsumerRecord.topic();
                        if ("test1".equals(topic)) {
                            try {
                                JSONObject value = (JSONObject) JSONObject.parse(stringStringConsumerRecord.value());
                                queue.add(value);
                            }catch (Exception e){
                                System.err.println("消息格式不对");
                            }
                            return false;
                        }
                        return true;
                    }
                }).flatMapToPair(new PairFlatMapFunction<ConsumerRecord<String, String>, String, Tuple2<String,JSONObject>>() {
                    @Override
                    public Iterator<Tuple2<String, Tuple2<String,JSONObject>>> call(ConsumerRecord<String, String> stringStringConsumerRecord) throws Exception {
                        JSONObject parse = (JSONObject) JSONObject.parse(stringStringConsumerRecord.value());
                        List<Tuple2<String,Tuple2<String,JSONObject>>> list=new ArrayList<Tuple2<String, Tuple2<String,JSONObject>>>();
                        list.add(new Tuple2(parse.getString("jstime"),new Tuple2<>(parse.getString("name"),JSONObject.parse(parse.get("after")+""))));
                        return list.iterator();
                    }
                }).sortByKey().map(new Function<Tuple2<String, Tuple2<String, JSONObject>>, Tuple2<String,JSONObject>>() {
                    @Override
                    public Tuple2<String,JSONObject> call(Tuple2<String, Tuple2<String, JSONObject>> stringTuple2Tuple2) throws Exception {
                        return stringTuple2Tuple2._2();
                    }
                });

                synchronized (SparkDStreamSQL_2.class){
                    List<JSONObject> temp=list;
                    while(!queue.isEmpty()){
                        JSONObject value = queue.poll();
                        JSONObject result=new JSONObject();
                        for(JSONObject j:temp){
                            if(j.getString("name").equals(value.getString("name"))){
                                list.remove(j);
                                break;
                            }
                            result.put("name",value.getString("name"));
                            result.put("fields",value.getString("fields"));
                            result.put("sql",value.getString("sql"));
                        }
                        list.add(result);
                        System.out.println("list--->"+list);
                    }
                }
                for (int i=0;i<list.size();i++){
                    JSONObject jsonObject=list.get(i);
                    ArrayList<StructField> structType = new ArrayList<StructField>();
                    StructField field = null;
                    String[] fieldses = (jsonObject.get("fields") + "").split(",");
                    for(String f:fieldses){
                        field = DataTypes.createStructField(f, DataTypes.StringType, true);
                        structType.add(field);
                    }
                    StructType schema = DataTypes.createStructType(structType);

                    JavaRDD<Row> map1 =filter.filter(new Function<Tuple2<String,JSONObject>, Boolean>() {
                        @Override
                        public Boolean call(Tuple2<String,JSONObject> stringStringConsumerRecord) throws Exception {
                            String name=stringStringConsumerRecord._1();
                            if(jsonObject.get("name").equals(name)){
                                return true;
                            }
                            return false;
                        }
                    }).map(new Function<Tuple2<String,JSONObject>, Row>() {
                        @Override
                        public Row call(Tuple2<String,JSONObject> ob) throws Exception {
                            JSONObject parse1 =ob._2();
                            List<String> list=new ArrayList<String>();
                            for(String s:fieldses){
                                list.add(parse1.get(s)+"");;
                            }
                            String[] strings=list.toArray(new String[list.size()]);
                            return RowFactory.create(strings);
                        }
                    });
                    Dataset<Row> df = spark.createDataFrame(map1, schema);
                    String tableName="temp";
                    df.createOrReplaceTempView(tableName);
                    Dataset<Row> sql = spark.sql(jsonObject.getString("sql"));
                    sql.show();
                    /*spark.sql(jsonObject.getString("sql")).foreachPartition(new ForeachPartitionFunction<Row>() {
                        @Override
                        public void call(Iterator<Row> iterator) throws Exception {
                            while (iterator.hasNext()){
                                System.out.println("---->"+iterator.next());
                            }
                        }
                    });*/
                }
                System.out.println("====================================");
            }
        });
        stream.foreachRDD(rdd -> {
                    HasOffsetRanges hasOffsetRanges = (HasOffsetRanges) (rdd.rdd());
                    StringBuilder sb = new StringBuilder();
                    for (OffsetRange of : hasOffsetRanges.offsetRanges()) {
                        sb.append(of.topic()).append("-").append(of.partition()).append("=").append(of.untilOffset()).append("\n");
                    }
            //System.out.println("---->"+sb);
            ((CanCommitOffsets) stream.dstream()).commitAsync(hasOffsetRanges.offsetRanges());
        });

        /*stream.foreachRDD(rdd -> {
            //更新kafka offset
            OffsetRange[] offsetRanges = ((HasOffsetRanges) rdd.rdd()).offsetRanges();
            ((CanCommitOffsets) stream.dstream()).commitAsync(offsetRanges);
        });*/

        jssc.start();
        jssc.awaitTermination();
    }
}

class JavaSparkSessionSingleton_2 implements Serializable {
    private static transient SparkSession instance = null;

    public static SparkSession getInstance(SparkConf sparkConf) {
        if (instance == null) {
            instance = SparkSession
                    .builder()
                    .config(sparkConf)
                    .getOrCreate();
        }
        return instance;
    }
}
