/*
 * FileName: SparkUtil.java
 * Author:   zzw
 * Date:     2018年05月24日
 * Description:
 */
package com.chezhibao.util;

import com.chezhibao.configuration.HbaseProperties;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import kafka.serializer.StringDecoder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import scala.Tuple2;

import java.util.*;

/**
 * 〈Spark工具类〉<br>
 * 〈Spark工具类〉
 *
 * @author zzw
 * @see [相关类/方法]（可选）
 * @since [产品/模块版本]（可选）
 */
@Component
public class SparkUtil {

    private static Logger logger = LoggerFactory.getLogger(SparkUtil.class);

    @Autowired
    private static HbaseProperties hbaseProperties;

    private static String SPARK_MASTER = "yarn";    //使用yarn作为资源框架
    private static String SPARK_DEPLOY_MODE = "client";
    private static String SPARK_APP_NAME = "sparkStreaming";
    private static long SPARK_BATCH_SIZE_M = 10000;  //批处理间隔10s
    private static long SPARK_WIN_LEN = 120000;   //窗口大小 2min （方便演示）
    private static long SPARK_WIN_SLID = 10000;    //窗口滑动间隔10s


    public static void handleHBase(ObjectMapper objectMapper){
        logger.info(hbaseProperties.toString());
        SparkConf sparkConf = new SparkConf().setAppName("spark").setMaster("local[2]");
        JavaSparkContext jsc = new JavaSparkContext(sparkConf);
        jsc.setLogLevel("INFO");
        JavaStreamingContext jssc = new JavaStreamingContext(jsc, new Duration(5000));
        Map<String,String> kafkaParams=new HashMap<String,String>();
        kafkaParams.put("bootstrap.servers", hbaseProperties.getKafkaServer());
        //kafkaParams.put("key.deserializer", hbaseProperties.getKeyDeserializer());
        //kafkaParams.put("value.deserializer", hbaseProperties.getValueDeserializer());
        kafkaParams.put("group.id", hbaseProperties.getGroupId());
        kafkaParams.put("auto.offset.reset", hbaseProperties.getAutoOffsetReset());
        //kafkaParams.put("enable.auto.commit", hbaseProperties.getEnableAutoCCommit());

        Set<String> topics=new HashSet<String>();

        topics.add(hbaseProperties.getTopic());

        JavaPairInputDStream<String, String> stream = KafkaUtils.createDirectStream(jssc, String.class, String.class, StringDecoder.class, StringDecoder.class, kafkaParams, topics);

        JavaDStream<String> jds= stream.map(new Function<Tuple2<String,String>, String>() {
            @Override
            public String call(Tuple2<String, String> v1) throws Exception {
                return v1._2();
            }
        });

        jds.foreachRDD(new VoidFunction<JavaRDD<String>>() {
            @Override
            public void call(JavaRDD<String> t) throws Exception {
                try {
                    t.foreachPartition(new VoidFunction<Iterator<String>>() {
                        @Override
                        public void call(Iterator<String> t) throws Exception {
                            List<Put> listPut = new ArrayList<Put>();
                            while(t.hasNext()) {
                                String one = t.next();
                                String s[] = one.split("\\|");
                                StringBuffer buffer = new StringBuffer();

                                if(Bytes.toBytes(s[0]).length == 0){
                                    Put p = new Put(Bytes.toBytes(s[0]));
                                    putData(p,s,objectMapper);
                                    listPut.add(p);
                                }
                                else {
                                    for (String t1:s){
                                        buffer.append(t1);
                                    }
                                    logger.info("messgae:" + buffer.toString());
                                    System.out.println("messgae:" + buffer.toString());
                                }
                            }
                            if(listPut.size() > 0){
                                logger.info("list size:" + listPut.size());
                                Configuration conf = HBaseConfiguration.create();
                                conf.set("hbase.zookeeper.quorum", hbaseProperties.getQuorum());
                                HTable table = new HTable(conf, hbaseProperties.getTable());
                                table.put(listPut);
                                //table.flushCommits();
                                table.close();
                            }
                        }
                    });
                } catch (Exception e) {
                    logger.error(e.getMessage());
                }
            }
        });
        jssc.start();
        try {
            jssc.awaitTermination();
        }
        catch (Exception e) {
            logger.error(e.getMessage());
        }
    }

    public static Put putData(Put put,String[] datas,ObjectMapper objectMapper){
        Map<String,String> maps = new HashMap<>();
        String qualifier = null;
        if ("0".equals(datas[2])){
            maps.put("traceId",datas[0]);
            maps.put("startTime",datas[1]);
            maps.put("rpcType",datas[2]);
            maps.put("appname",datas[3]);
            maps.put("rpcId",datas[4]);
            maps.put("span",datas[5]);
            maps.put("traceName",datas[6]);
            maps.put("resultCode",datas[7]);
            if (datas.length == 9){
                maps.put("userData" , datas[8]);
            }
            qualifier = datas[2];
        }
        /*else if ("1".equals(datas[2])){
            maps.put("traceId" , datas[0]);
            maps.put("startTime" , datas[1]);
            maps.put("rpcType" , datas[2]);
            maps.put("appname" , datas[3]);
            maps.put("rpcId" , datas[4]);
            maps.put("serviceName" , datas[5]);
            maps.put("methodName" , datas[6]);
            maps.put("remoteIp" , datas[7]);
            maps.put("span" , datas[8]);
            maps.put("resultCode" , datas[9]);
            maps.put("requestSize" , datas[10]);
            maps.put("responseSize" , datas[11]);
            if (datas.length == 13){
                maps.put("userData" , datas[12]);
            }
            qualifier = datas[4] + "-" + datas[2];
        }*/
        else {
            maps.put("traceId" , datas[0]);
            maps.put("startTime" , datas[1]);
            maps.put("rpcType" , datas[2]);
            maps.put("appname" , datas[3]);
            maps.put("rpcId" , datas[4]);
            maps.put("serviceName" , datas[5]);
            maps.put("methodName" , datas[6]);
            maps.put("remoteIp" , datas[7]);
            maps.put("span" , datas[8]);
            maps.put("resultCode" , datas[9]);
            maps.put("requestSize" , datas[10]);
            maps.put("responseSize" , datas[11]);
            if (datas.length == 13){
                maps.put("userData" , datas[12]);
            }
            qualifier = datas[4] + "-" + datas[2];
        }
        try {
            put.addColumn(Bytes.toBytes("rpc"), Bytes.toBytes(qualifier), Bytes.toBytes(objectMapper.writeValueAsString(maps)));
        } catch (JsonProcessingException e) {
            e.printStackTrace();
        }
        return put;
    }

    @Autowired
    public void setHbaseProperties(HbaseProperties hbaseProperties) {
        SparkUtil.hbaseProperties = hbaseProperties;
    }
}
