package com.apexsoft.service;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;

/**
 * Created by caigx
 * Date: 2020/9/16
 * Time: 14:51
 * Description: No Description
 */
public class newconsumer_1 {
    public static void main(String[] args) {
       /* System.out.println("========");
        System.setProperty("java.security.auth.login.config","/home/apex/jaas.conf");
        System.setProperty("sun.security.jgss.debug","true");
        System.setProperty("javax.security.auth.useSubjectCredsOnly","false");
        System.setProperty("java.security.krb5.conf", "/home/apex/krb5.conf");*/

         StructType type = new StructType().add("name", "string").add("after", "string").add("jstime", "string");

        SparkSession spark = SparkSession.builder().appName("apex_ids_real_cal")
                // .enableHiveSupport()
                .master("local[*]")
                .getOrCreate();
        Dataset<Tuple2<String, String>> kafka = spark.readStream()
                .format("kafka")
                .option("kafka.bootstrap.servers",  "pro1:9092")
                /*.option("kafka.bootstrap.servers",  "datanode02.apex.com:9092")
                .option("kafka.security.protocol", "SASL_PLAINTEXT")
                .option("kafka.sasl.kerberos.service.name","kafka")*/
                .option("subscribe", "test")
                .option("includeTimestamp", true) //输出内容包括时间戳
                .load()
                .selectExpr("CAST(value AS STRING)","CAST(timestamp AS timestamp)")
                //.as(Encoders.STRING());
                .as(Encoders.tuple(Encoders.STRING(),Encoders.STRING()));

        //kafka.as(Encoders.tuple(Encoders.STRING(),Encoders.STRING()))
        /*Dataset<String> jsonObjectDataset =kafka.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterator<String> call(String row) throws Exception {
                JSONObject result = new JSONObject();
                try{
                    JSONObject jsonObject = JSONObject.parseObject(row);
                    System.out.println("===>"+jsonObject);
                    String after = jsonObject.get("after") + "";
                    String jstime = jsonObject.getString("jstime");
                    String name = jsonObject.getString("name");
                    result.put("after", after);
                    result.put("jstime", jstime);
                    result.put("name", name);
                    //return Collections.singletonList(result).iterator();
                }catch (Exception e){

                }
                return Collections.singletonList(result+"").iterator();
            }
        }, Encoders.STRING());*/
        /*Dataset<FirstCut> jsonObjectDataset = kafka.flatMap(new FlatMapFunction<String, FirstCut>() {
            @Override
            public Iterator<FirstCut> call(String row) throws Exception {
                FirstCut result = new FirstCut();
                try{
                    System.out.println("row====>"+row);
                    JSONObject jsonObject = JSONObject.parseObject(row);
                    String after = jsonObject.get("after") + "";
                    String jstime = jsonObject.getString("jstime");
                    String name = jsonObject.getString("name");
                    result.setAfter(after);
                    result.setJstime(jstime);
                    result.setName(name);
                    *//*result.put("after", after);
                    result.put("jstime", jstime);
                    result.put("name", name);*//*
                    //return Collections.singletonList(result).iterator();
                }catch (Exception e){

                }
                return Collections.singletonList(result).iterator();
            }
        }, Encoders.bean(FirstCut.class));*/

        //jsonObjectDataset.groupBy(new Window());
        /*Dataset<String> map = kafka.map(new MapFunction<String, String>() {
            @Override
            public String call(String row) throws Exception {
                System.out.println("收到--->" + row);
                return row;
            }
        }, Encoders.STRING());*/
        //kafka.createOrReplaceTempView("updates");
        kafka.createOrReplaceTempView("updates");
        Dataset<Row> dataset2 = spark.sql("select * from updates ");

        StreamingQuery console =dataset2.writeStream().format("console").option("truncate", "false").outputMode("update").start();
        //StreamingQuery console =dataset2.withWatermark("timestamp", "2 seconds").groupBy("timestamp").count().orderBy("timestamp").writeStream().format("console").option("truncate", "false").start();
        //StreamingQuery console = dataset2.withWatermark("timestamp", "5 seconds").writeStream().format("console").option("truncate", "false").start();

        try {
            console.awaitTermination();
        } catch (StreamingQueryException e) {
            e.printStackTrace();
        }
        /*kafka.show();
        kafka.foreach(k->{
            System.out.println(k);
            System.out.println(k.get(0));
            System.out.println(k.length());
        });
        kafka.printSchema();*/
    }
}
