/*
 * FileName: SparkUtil.java
 * Author:   zzw
 * Date:     2018年05月24日
 * Description:
 */
package util;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import scala.Tuple2;
import java.util.*;

/**
 * 〈Spark工具类〉<br>
 * 〈Spark工具类〉
 *
 * @author zzw
 * @see [相关类/方法]（可选）
 * @since [产品/模块版本]（可选）
 */
public class SparkUtil {

    private static final Logger logger = Logger.getLogger(SparkUtil.class);

    private static String SPARK_MASTER = "yarn";    //使用yarn作为资源框架
    private static String SPARK_DEPLOY_MODE = "client";
    private static String SPARK_APP_NAME = "sparkStreaming";
    private static long SPARK_BATCH_SIZE_M = 10000;  //批处理间隔10s
    private static long SPARK_WIN_LEN = 120000;   //窗口大小 2min （方便演示）
    private static long SPARK_WIN_SLID = 10000;    //窗口滑动间隔10s


    public static void handleHBase(ObjectMapper objectMapper){
        SparkConf sparkConf = new SparkConf().setAppName("spark").setMaster("yarn-cluster");
        JavaSparkContext jsc = new JavaSparkContext(sparkConf);
        //jsc.setLogLevel("ERROR");
        JavaStreamingContext jssc = new JavaStreamingContext(jsc, new Duration(60000));
       /* Map<String,String> kafkaParams=new HashMap<String,String>();
        kafkaParams.put("bootstrap.servers", "hadoop41:9092");
        kafkaParams.put("group.id", "g1");*/
       // kafkaParams.put("auto.offset.reset", "earliest");

        Map<String, Integer> topics = new HashMap<>();

        topics.put("clotho_rpc",3);

        JavaPairInputDStream<String, String> stream = KafkaUtils.createStream(jssc,"hadoop90:2181","trace",topics,StorageLevel.MEMORY_AND_DISK_SER());
        //KafkaUtils.createDirectStream(jssc, String.class, String.class, StringDecoder.class, StringDecoder.class, kafkaParams, topics);

        JavaDStream<String> jds= stream.map(new Function<Tuple2<String,String>, String>() {
            @Override
            public String call(Tuple2<String, String> v1) throws Exception {
                return v1._2();
            }
        });

        jds.foreachRDD(new VoidFunction<JavaRDD<String>>() {
            @Override
            public void call(JavaRDD<String> t) throws Exception {
                try {
                    t.foreachPartition(new VoidFunction<Iterator<String>>() {
                        @Override
                        public void call(Iterator<String> t) throws Exception {
                            List<Put> listPut = new ArrayList<Put>();
                            List<Put> listSql = new ArrayList<Put>();
                            while(t.hasNext()) {
                                String one = t.next();
                                String s[] = one.split("\\|");
                                if(Bytes.toBytes(s[0]).length != 0){
                                    putData(s,objectMapper,one,listPut,listSql);
                                }
                                else {
                                    logger.error("msg error:" + one);
                                }
                            }
                            if(listPut.size() > 0){
                                logger.error("listPut:" + listPut.size());
                                Configuration conf = HBaseConfiguration.create();
                                conf.set("hbase.zookeeper.quorum", "hadoop90");
                                HTable table = new HTable(conf, "trace");
                                table.put(listPut);
                                table.close();
                            }
                            if(listSql.size() > 0){
                                logger.error("listSql:" + listSql.size());
                                Configuration conf = HBaseConfiguration.create();
                                conf.set("hbase.zookeeper.quorum", "hadoop90");
                                HTable table = new HTable(conf, "tracesql");
                                table.put(listSql);
                                table.close();
                            }
                        }
                    });
                } catch (Exception e) {
                    logger.error(e);
                }
            }
        });
        jssc.start();
        try {
            jssc.awaitTermination();
        }
        catch (Exception e) {
            logger.error(e);
        }
    }

    /*TYPE_TRACE = 0;
    TYPE_CSF_CLIENT = 1;
    TYPE_CSF_SERVER = 2;
    TYPE_MYSQL = 3;
    TYPE_REDIS = 4;
    TYPE_HTTP = 5;
    TYPE_MQ = 6;
    TYPE_INDEX = 9;*/
    public static void putData(String[] datas,ObjectMapper objectMapper,String msg,List<Put> listPut,List<Put> listSql){
        Put put = null;
        Map<String,String> maps = new HashMap<>();
        if (datas.length <= 1){
            return;
        }
        String qualifier = null;
        try {
            if ("0".equals(datas[2])){
                put = new Put(Bytes.toBytes(datas[0]));
                convertMap(maps,datas,datas[2]);
                if (datas.length == 9){
                    maps.put("userData" , datas[8]);
                }
                qualifier = datas[4];
            }
            else if ("3".equals(datas[2])){
                put = new Put(Bytes.toBytes(datas[0]));
                convertMap(maps,datas,datas[2]);
                if (datas.length == 13){
                    maps.put("userData" , datas[12]);
                }
                qualifier = datas[4];
            }
            else if ("4".equals(datas[2])){
                put = new Put(Bytes.toBytes(datas[0]));
                convertMap(maps,datas,datas[2]);
                if (datas.length == 13){
                    maps.put("userData" , datas[12]);
                }
                qualifier = datas[4];
            }
            else if("6".equals(datas[2])){
                put = new Put(Bytes.toBytes(datas[0]));
                convertMap(maps,datas,datas[2]);
                if (datas.length == 13){
                    maps.put("userData" , datas[12]);
                }
                qualifier = datas[4];
            }
            else if(datas.length == 5 && "54007".equals(datas[0]) && "9".equals(datas[2])){
                put = new Put(Bytes.toBytes(datas[3]));
                maps.put("startTime",datas[1]);
                maps.put("sql",datas[4].substring(1,datas[4].length()));
                qualifier = datas[3];
                put.addColumn(Bytes.toBytes("sql"), Bytes.toBytes(qualifier), Bytes.toBytes(objectMapper.writeValueAsString(maps)));
                listSql.add(put);
                logger.error("sql message:" + msg);
                return;
            }
            else {
                put = new Put(Bytes.toBytes(datas[0]));
                convertMap(maps,datas,datas[2]);
                if (datas.length == 13){
                    maps.put("userData" , datas[12]);
                }
                qualifier = datas[4] + "-" + datas[2];
            }
        } catch (Exception e) {
            logger.error("message:" + msg,e);
        }
        try {
            if (qualifier == null){
                logger.error("message:" + msg);
                logger.error("hbase qualifier:" + qualifier);
            }
            else {
                put.addColumn(Bytes.toBytes("rpc"), Bytes.toBytes(qualifier), Bytes.toBytes(objectMapper.writeValueAsString(maps)));
                listPut.add(put);
            }
        } catch (JsonProcessingException e) {
            logger.error(e);
        }
    }

    public static void convertMap(Map<String,String> maps,String[] datas,String type){
        if ("0".equals(type)){
            maps.put("traceId",datas[0]);
            maps.put("startTime",datas[1]);
            maps.put("rpcType",datas[2]);
            maps.put("appname",datas[3]);
            maps.put("rpcId",datas[4]);
            maps.put("span",datas[5]);
            maps.put("traceName",datas[6]);
            maps.put("resultCode",datas[7]);
        }
        else {
            maps.put("traceId" , datas[0]);
            maps.put("startTime" , datas[1]);
            maps.put("rpcType" , datas[2]);
            maps.put("appname" , datas[3]);
            maps.put("rpcId" , datas[4]);
            maps.put("serviceName" , datas[5]);
            maps.put("methodName" , datas[6]);
            maps.put("remoteIp" , datas[7]);
            maps.put("span" , datas[8]);
            maps.put("resultCode" , datas[9]);
            maps.put("requestSize" , datas[10]);
            maps.put("responseSize" , datas[11]);
        }
    }
}
