/*
 * FileName: RpcJob.java
 * Author:   zzw
 * Date:     2019年01月23日
 * Description:
 */
package com.chezhibao.flink.job;

import com.chezhibao.flink.output.HBaseOutputFormat;
import com.chezhibao.flink.sink.ClickHouseSink;
import com.chezhibao.flink.splitter.MessageSplitter;
import com.chezhibao.flink.util.MessageUtil;
import com.chezhibao.flink.vo.RpcSqlVo;
import com.virtusai.clickhouseclient.ClickHouseClient;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple13;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.util.Collector;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;

/**
 * 〈〉<br>
 * 〈〉
 *
 * @author zzw
 * @see [相关类/方法]（可选）
 * @since [产品/模块版本]（可选）
 */
public class RpcJob {

    private static Logger logger = LoggerFactory.getLogger(RpcJob.class);

    private static ClickHouseClient client = new ClickHouseClient("http://172.16.12.129:8123", "default", "123456");

    private static HTable table;

    static {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "escnode0");
        conf.set("hbase.zookeeper.port", "2181");
        try {
            table = new HTable(conf, "tracesql");
        } catch (IOException e) {
            logger.error("init hbase error",e);
        }
    }

    private static final ObjectMapper objectMapper = new ObjectMapper();

    public static void main(String[] args) throws Exception {
        String topic = "clotho_rpc";
        Properties props = new Properties();
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("bootstrap.servers", "escnode0:9092");
        props.put("group.id", "g2");

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        FlinkKafkaConsumer011<String> kafkaSource = new FlinkKafkaConsumer011<String>(topic, new SimpleStringSchema(), props);
        env.addSource(kafkaSource)
                .flatMap(new MessageSplitter())
                .keyBy(0)
                .timeWindow(Time.seconds(5))
                .apply(new WindowFunction<Tuple13<String, String, String, String, String, String, String, String, String, String, String, String, String>, RpcSqlVo, Tuple, TimeWindow>() {

                    @Override
                    public void apply(Tuple tuple, TimeWindow timeWindow, Iterable<Tuple13<String, String, String, String, String, String, String, String, String, String, String, String, String>> iterable, Collector<RpcSqlVo> collector) throws Exception {
                        List<Object[]> rowsRpc = new ArrayList<>();
                        List<Put> sqls = new ArrayList<Put>();
                        for (Tuple13<String, String, String, String, String, String, String, String, String, String, String, String, String> record: iterable) {
                            handleData(rowsRpc,sqls,record);
                        }
                        collector.collect(new RpcSqlVo(rowsRpc,sqls));
                    }
                })
                .addSink(new ClickHouseSink())
                //.writeUsingOutputFormat(new HBaseOutputFormat())
                .setParallelism(2);

        env.execute("RpcJob");
    }

    private static void insert(List<Object[]> rows) {
        try {
            client.post("INSERT INTO default.clotho_trace", rows);
        } catch (Exception e) {
            logger.error("click hosue insert error:",e);
            for (Object[] objects:rows){
                StringBuilder stringBuilder = null;
                for (Object o:objects){
                    stringBuilder = new StringBuilder();
                    stringBuilder.append(o);
                }
                logger.info(stringBuilder.toString());
            }
        }
        //client.close();
    }

    private static void handleData(List<Object[]> rowsRpc,List listSql,Tuple13<String, String, String, String, String, String, String, String, String, String, String, String, String> record){
        Object[] objects  = new Object[16];
        String rpcType = record.getField(2);
        if("54007".equals(record.getField(0)) && "9".equals(rpcType)){
            String sql = record.getField(4);
            String field = record.getField(3);
            Put put = new Put(Bytes.toBytes(field));
            Map<String,String> maps = new HashMap<>();
            String qualifier = null;
            maps.put("startTime",record.getField(1));
            maps.put("sql", sql.substring(1,sql.length()));
            qualifier = record.getField(3);
            try {
                put.addColumn(Bytes.toBytes("sql"), Bytes.toBytes(qualifier), Bytes.toBytes(objectMapper.writeValueAsString(maps)));
            } catch (IOException e) {
                logger.error("objectMapper" + e);
            }
            listSql.add(put);
        }
        else {
            MessageUtil.convertMap(objects,record,Integer.parseInt(rpcType));
            rowsRpc.add(objects);
        }
    }
}
