package com.apexsoft.yuanjian;

import com.alibaba.fastjson.JSONObject;
//import com.apex.obj.KafkaMessage;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.*;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;

import java.util.Collections;
import java.util.Iterator;

/**
 * spark2-submit --class com.apex.demo.SparkStructured --driver-java-options "-Djava.security.auth.login.config=/home/ubuntu/apexsoft/jaas.conf" --conf spark.executor.extraJavaOptions=-Djava.security.auth.login.config=/home/ubuntu/apexsoft/jaas.conf --master yarn --deploy-mode client spark-1.0-SNAPSHOT-shaded.jar
 */

public class SparkStructured {

    public static void main(String[] args) {
        try {
            SparkSession spark = SparkSession
                    .builder()
                    .master("local[*]")
                    .appName("SparkStructured")
                    .getOrCreate();
            System.setProperty("java.security.auth.login.config","/home/ubuntu/apexsoft/jaas.conf");
            spark.conf().set("spark.sql.streaming.metricsEnabled", "true");
            Dataset<String> df = spark
                    .readStream()
                    .format("kafka")
                    .option("kafka.bootstrap.servers", "datanode01.apex.com:9092,datanode02.apex.com:9092")
                    .option("kafka.security.protocol", "SASL_PLAINTEXT")
                    .option("kafka.sasl.kerberos.service.name", "kafka")
                    .option("subscribe", "apex_logkit_micro,apex_logkit_nginx")
                    .load()
                    .selectExpr("CAST(value AS STRING)")
                    .as(Encoders.STRING())
                    ;
            //统一格式化数据
            Dataset<String> kafkaMessageDataset = df.map(new MapFunction<String, String>() {
                @Override
                public String call(String value) throws Exception {
                    System.out.println(value);
                    JSONObject object = JSONObject.parseObject(value);
                    String filePath = JSONObject.parseObject(JSONObject.parseObject(object
                            .getString("log"))
                            .getString("file"))
                            .getString("path");
                    String message = object.getString("message");
                    String timestamp = object.getString("@timestamp");
                    String hostName = JSONObject.parseObject(object.getString("host")).getString("name");
                    return timestamp+","+hostName+","+filePath+","+message;
                }
            }, Encoders.STRING());

            Dataset<KafkaMessage> dataset = kafkaMessageDataset.flatMap(new FlatMapFunction<String, KafkaMessage>() {
                @Override
                public Iterator<KafkaMessage> call(String s) throws Exception {
                    KafkaMessage kafkaMessage = new KafkaMessage();
                    String [] records = s.split(",");
                    kafkaMessage.setTimestamp(records[0]);
                    kafkaMessage.setHostName(records[1]);
                    kafkaMessage.setFilePath(records[2]);
                    kafkaMessage.setMessage(records[3]);
                    return Collections.singletonList(kafkaMessage).iterator();
                }
            },Encoders.bean(KafkaMessage.class));

            dataset.createOrReplaceTempView("updates");
            Dataset<Row> dataset2 = spark.sql("select * from updates");
            StreamingQuery query = dataset2.writeStream()
                    .outputMode(OutputMode.Update())
                    .format("console")
                    .start();

            query.awaitTermination();

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
