package org.databandtech.sparkstreaming;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeoutException;

import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.StructType;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;

public class KafkaApp {

	public static void main(String[] args) {
		
        System.out.println( "kafka source start..." );
        
        SparkSession spark = SparkSession
        		  .builder()
        		  .appName("kafkaconsumer")
        		  .master("local[*]")
        		  .getOrCreate();
        
        //json data schema
        StructType dataSchema = new StructType()
        		.add("action_type","string")
        		.add("sys_id","string")
        		.add("user_id","string")
        		.add("user_group_id","string")
        		.add("epg_group_id","string")
        		.add("stb_ip","string")
        		.add("stb_id","string")
        		.add("stb_type","string")
        		.add("stb_mac","string")
        		.add("terminal_type","string")
        		.add("log_time","string")
        		.add("mediacode","string")
        		.add("definition","string")
        		.add("bitrate","string")
        		.add("start_time","string")
        		.add("currentplaytime","string")
        		.add("refer_type","string")
        		.add("refer_page_id","string")
        		.add("area_code","string");
        
        Dataset<Row> df = spark
        		  .readStream()
        		  .format("kafka")
        		  .option("kafka.bootstrap.servers", "hadoop001:9092")
        		  .option("subscribe", "Hello-Kafka")
        		  //.option("subscribePattern", "topic.*")
        		  .option("startingOffsets", "earliest")
        		  //.option("endingOffsets", "latest")
        		  //.schema(dataSchema)
        		  .load();
        //方法1：直接json解析
        String nestTimestampFormat = "yyyy-MM-dd HH:mm:ss";
        HashMap<String,String> jsonOptions = new HashMap<String,String>();
        jsonOptions.put("timestampFormat", nestTimestampFormat);
        Dataset<Row> parsed  = df.select(functions.from_json(df.col("value").cast("string"), dataSchema, jsonOptions).alias("parsed_value"),df.col("timestamp"));
        //查询       
        //使用ingestion time（入栈时间）
        Dataset<Row> selected  = parsed.select("parsed_value.*","timestamp");
        //使用event time（事件时间）
        //.withColumn("dtime",functions.to_timestamp(functions.from_unixtime(parsed.col("parsed_value.log_time").substr(0,10),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss"));

        StreamingQuery querySelected;
        try {
        	querySelected = selected.writeStream()
				.outputMode("update")
				.format("console")
				.start();
		} catch (TimeoutException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
        
        /**方法2：使用gson方式的json解析
        
        // Configure Gson
        GsonBuilder gsonBuilder = new GsonBuilder();
        Gson gson = gsonBuilder.create();
        //map操作进行转换
        Dataset<EpgVod> parsed = df
        		.selectExpr("CAST(value AS STRING)")
        		.map((MapFunction<Row, EpgVod>) row -> {
        			EpgVod model = gson.fromJson(row.getString(0), EpgVod.class);
        			return model;
              }, Encoders.bean(EpgVod.class));

        **/
        
        //运营商分类合计,视窗定义
        Dataset<Row> onlineCountsBySysid = selected.groupBy(
        		functions.window(selected.col("timestamp"), "1 minutes")
        		,selected.col("sys_id"),selected.col("action_type")
        		  ).count().orderBy("window");
        
        StreamingQuery query;
        try {
        	query = onlineCountsBySysid.writeStream()
				.outputMode("update")
				.format("console")
				.start();
            
        	try {
    			query.awaitTermination();
    		} catch (StreamingQueryException e) {
    			// TODO Auto-generated catch block
    			e.printStackTrace();
    		}
		} catch (TimeoutException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
        



	}

}
