package com.apexsoft.service;

import java.io.Serializable;
import java.util.*;

import com.alibaba.fastjson.JSONObject;

import com.apexsoft.pojo.FirstCut;
import com.apexsoft.pojo.KafkaMessage;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.*;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.*;
import org.apache.spark.streaming.kafka010.*;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;


public class SparkDStreamSQL {
    public static void main(String[] args) throws Exception {
        /*System.setProperty("java.security.auth.login.config", "/home/ubuntu/apexsoft/jaas.conf");
        SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("SparkDStreamSQL");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(10));
        Map<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "cmserver.apex.com:9092,datanode01.apex.com:9092,datanode02.apex.com:9092");
        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "g1");
        kafkaParams.put("auto.offset.reset", "earliest");
        kafkaParams.put("enable.auto.commit", true);
        kafkaParams.put("security.protocol", "SASL_PLAINTEXT");
        kafkaParams.put("sasl.kerberos.service.name", "kafka");*/

        SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("SparkDStreamSQL");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(10));
        Map<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "pro1:9092");

        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "g1");
        kafkaParams.put("auto.offset.reset", "earliest");
        kafkaParams.put("enable.auto.commit", true);

        Collection<String> topics = Arrays.asList("test", "test1");

        JavaInputDStream<ConsumerRecord<String, String>> stream =
                KafkaUtils.createDirectStream(
                        jssc,
                        LocationStrategies.PreferConsistent(),
                        ConsumerStrategies.<String, String>Subscribe(topics, kafkaParams)
                );

        JavaDStream<Map<String, String>> javaDStream = stream.mapPartitions(new FlatMapFunction<Iterator<ConsumerRecord<String, String>>, Map<String, String>>() {
            @Override
            public Iterator<Map<String, String>> call(Iterator<ConsumerRecord<String, String>> consumerRecordIterator) throws Exception {
                Map<String, String> stringMap = new HashMap<>();
                while (consumerRecordIterator.hasNext()) {
                    ConsumerRecord<String, String> record = consumerRecordIterator.next();
                    String value = record.value();
                    long timestamp = record.timestamp();
                    String topic = record.topic();
                    long offset = record.offset();
                    if (topic.equals("test1")) {
                        stringMap.put(topic,  value);
                    } else if (topic.equals("test")) {
                        JSONObject object = JSONObject.parseObject(value);
                        JSONObject result=new JSONObject();
                        String name = object.getString("name");
                        String jstime = object.getString("jstime");
                        String after = object.getString("after");
                        result.put("name",name);
                        result.put("jstime",jstime);
                        result.put("after",after);
                        /*JSONObject object = JSONObject.parseObject(value);
                        JSONObject sparkJson = JSONObject.parseObject(object.getString("SparkJob"));
                        String message = sparkJson.getString("message");
                        stringMap.put(topic, timestamp + "@" + offset + "@" + message);*/
                        stringMap.put(topic,  result+"");
                    }
                }
                return Collections.singletonList(stringMap).iterator();
            }
        });

        javaDStream.filter(new Function<Map<String, String>, Boolean>() {
            @Override
            public Boolean call(Map<String, String> stringStringMap) throws Exception {
                if(stringStringMap!=null&&!stringStringMap.isEmpty()){
                    return true;
                }else{
                    return false;
                }
            }
        }).foreachRDD(new VoidFunction<JavaRDD<Map<String, String>>>() {
            @Override
            public void call(JavaRDD<Map<String, String>> mapJavaRDD) throws Exception {
                SparkSession spark = JavaSparkSessionSingleton.getInstance(mapJavaRDD.context().getConf());
                System.out.println(mapJavaRDD.count());
                JavaRDD<FirstCut> map = mapJavaRDD.map(object -> {
                    String test = object.get("test");
                    Map<String, String> parse = (Map<String, String>) JSONObject.parse(test);
                    FirstCut result = new FirstCut();
                    if(parse!=null){
                        result.setName(parse.get("name"));
                        result.setJstime(parse.get("jstime"));
                        result.setAfter(parse.get("after"));
                    }
                    return result;
                });
                Dataset<Row> df = spark.createDataFrame(map, FirstCut.class);
                String tableName="TEMP"+new Date().getTime()+"";
                df.createTempView(tableName);

                spark.sql("select *from "+tableName+" ").show();
                /*mapJavaRDD.foreachPartition(new VoidFunction<Iterator<Map<String, String>>>() {
                    @Override
                    public void call(Iterator<Map<String, String>> mapIterator) throws Exception {
                        String sql="select * from updates";
                        while (mapIterator.hasNext()){
                            Map<String, String> next = mapIterator.next();
                            if (StringUtils.isNotBlank(next.get("test1"))){
                                sql=next.get("test1");
                            }
                        }
                    }
                });*/

            }
        });
        /*JavaDStream<Map<List<KafkaMessage>, String>> javaDStream1 = javaDStream.flatMap(new FlatMapFunction<Map<String, String>, Map<List<KafkaMessage>, String>>() {
            @Override
            public Iterator<Map<List<KafkaMessage>, String>> call(Map<String, String> messageMap) throws Exception {
                Map<List<KafkaMessage>, String> resultMap = new HashMap<>();
                List<KafkaMessage> messageList = new LinkedList<>();
                String sql = "";
                if (messageMap.size() > 0){
                    for (Map.Entry<String, String> entry : messageMap.entrySet()) {
                        KafkaMessage messageObj = new KafkaMessage();
                        if (entry.getKey().equals("apex_logkit_micro")) {
                            String[] strings = entry.getValue().split("`");
                            String timestamp = strings[0];
                            String offset = strings[1];
                            String json = strings[2];
                            JSONObject object = JSONObject.parseObject(json);
                            String source = JSONObject.parseObject(JSONObject.parseObject(object
                                    .getString("log"))
                                    .getString("file"))
                                    .getString("path");
                            String message = object.getString("message");
                            //messageObj.setSource(source);
                            messageObj.setMessage(message);
                            //messageObj.setOffset(offset);
                            messageObj.setTimestamp(timestamp);
                            messageList.add(messageObj);
                        } else {
                            sql = entry.getValue().split("@")[2];
                            System.out.println(sql);
                        }
                        resultMap.put(messageList, sql);
                    }
                }

                return Collections.singletonList(resultMap).iterator();
            }
        });


        javaDStream1.foreachRDD(new VoidFunction<JavaRDD<Map<List<KafkaMessage>, String>>>() {
            @Override
            public void call(JavaRDD<Map<List<KafkaMessage>, String>> mapJavaRDD) throws Exception {
                SparkSession spark = JavaSparkSessionSingleton.getInstance(mapJavaRDD.context().getConf());
                String sql = "";
            }
        });*/

        jssc.start();
        jssc.awaitTermination();
    }
}

class JavaSparkSessionSingleton implements Serializable {
    private static transient SparkSession instance = null;

    public static SparkSession getInstance(SparkConf sparkConf) {
        if (instance == null) {
            instance = SparkSession
                    .builder()
                    .config(sparkConf)
                    .getOrCreate();
        }
        return instance;
    }
}
