package com.apexsoft.service;

import com.alibaba.fastjson.JSONObject;
import com.apexsoft.pojo.FirstCut;
import com.apexsoft.util.DStreamUtil;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.VoidFunction2;
import org.apache.spark.sql.*;
import org.apache.spark.sql.catalyst.plans.logical.Window;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.apache.spark.sql.streaming.Trigger;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;

import java.util.*;

/**
 * Created by caigx
 * Date: 2020/9/16
 * Time: 14:51
 * Description: No Description
 */
public class newconsumer {
    static List<JSONObject> list=new ArrayList<>();
    public static String id="";
    public static void main(String[] args) {
       /* System.out.println("========");
        System.setProperty("java.security.auth.login.config","/home/apex/jaas.conf");
        System.setProperty("sun.security.jgss.debug","true");
        System.setProperty("javax.security.auth.useSubjectCredsOnly","false");
        System.setProperty("java.security.krb5.conf", "/home/apex/krb5.conf");*/

         //StructType type = new StructType().add("name", "string").add("after", "string").add("jstime", "string");

        SparkSession spark = SparkSession.builder().appName("apex_ids_real_cal")
                // .enableHiveSupport()
                .master("local[*]")
                .getOrCreate();
        Dataset< String> kafka = spark.readStream()
                .format("kafka")
                .option("kafka.bootstrap.servers",  "pro1:9092")
                /*.option("kafka.bootstrap.servers",  "datanode02.apex.com:9092")
                .option("kafka.security.protocol", "SASL_PLAINTEXT")
                .option("kafka.sasl.kerberos.service.name","kafka")*/
                .option("subscribe", "test")
                .option("includeTimestamp", true) //输出内容包括时间戳
                .load()
                .selectExpr("CAST(value AS STRING)")
                .as(Encoders.STRING());

        Dataset<FirstCut> jsonObjectDataset = kafka.flatMap(new FlatMapFunction<String, FirstCut>() {
            @Override
            public Iterator<FirstCut> call(String row) throws Exception {
                FirstCut result = new FirstCut();
                try{
                    JSONObject jsonObject = JSONObject.parseObject(row);
                    String after = jsonObject.get("after") + "";
                    String jstime = jsonObject.getString("jstime");
                    String name = jsonObject.getString("name");
                    result.setAfter(after);
                    result.setJstime(jstime);
                    result.setName(name);
                    //return Collections.singletonList(result).iterator();
                }catch (Exception e){

                }
                return Collections.singletonList(result).iterator();
            }
        }, Encoders.bean(FirstCut.class));

        jsonObjectDataset.createOrReplaceTempView("updates");
        Dataset<Row> dataset2 = spark.sql(DStreamUtil.sql);

       // StreamingQuery console =dataset2.writeStream().format("console").option("truncate", "false").outputMode("update").start();
        //StreamingQuery console =dataset2.withWatermark("timestamp", "2 seconds").groupBy("timestamp").count().orderBy("timestamp").writeStream().format("console").option("truncate", "false").start();
        //StreamingQuery console = dataset2.withWatermark("timestamp", "5 seconds").writeStream().format("console").option("truncate", "false").start();
        StreamingQuery console =dataset2.writeStream().foreachBatch(new VoidFunction2<Dataset<Row>, Long>() {
            @Override
            public void call(Dataset<Row> rowDataset, Long aLong) throws Exception {
                rowDataset.foreach(f->{
                    System.out.println(f);
                    System.out.println(new DStreamUtil().getI());
                    System.out.println("===>"+id);
                    if("test".equals(DStreamUtil.sql)){
                        System.out.println("==改变了==");
                    }
                });
                /*System.out.println("count__>"+rowDataset.count());
                list=new ArrayList<>();
                rowDataset.foreach(f->{
                    JSONObject next= JSONObject.parseObject(f.get(0)+"");
                    if(next!=null&&(next+"")!="null"){
                        list.add(next);
                    }
                });

                Collections.sort(list, new Comparator<JSONObject>() {
                    @Override
                    public int compare(JSONObject o1, JSONObject o2) {
                        return (o1.get("jstime") + "").compareTo((o2.get("jstime") + ""));
                    }
                });
                System.out.println("list-->"+list);*/
            }
        }).option("truncate", "false").outputMode("update").start();
        try {
            console.awaitTermination();
        } catch (StreamingQueryException e) {
            e.printStackTrace();
        }
    }


}
