package com.etc;

import com.etc.common.Constants;
import com.etc.util.HbaseUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


import java.time.Duration;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * @Author kalista
 * @Description
 * @Date 2021/5/24  10:40
 **/
public class HaiKouOrderInfoConsumer implements Runnable {

    public final static Logger log = LoggerFactory.getLogger(HaiKouOrderInfoConsumer.class);
    private final KafkaConsumer<Integer, String> consumer;
    private final String topic;
    private Map<String,String> orderMap = new HashMap<>();
    private static Pattern pattern = Pattern.compile("[0-9]{4}-[0-9]{2}-[0-9]{2}");


    public HaiKouOrderInfoConsumer(String topic, String groupId) {
        Properties props = new Properties();
        props.put("bootstrap.servers", "hdp-01:9092,hdp-02:9092,hdp-03:9092");
        props.put("group.id", groupId);
        props.put("enable.auto.commit", "true");
        //earliest 如果各个分区下有提交得 offset 从头消费
        //latest 当各个分区有提交得offset 从提交offset 开始消费，无提交得offset时 消费新的分区下的数据
        // non topic 分区都存在提交offset  从offset 后开始消费。 如果只有一个分区不存在就报错
        props.put("auto.offset.reset", "earliest");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        consumer = new KafkaConsumer<>(props);
        this.topic = topic;
    }

    @Override
    public void run() {
        while (true){
            try {
                word();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }

    //
    public void word() throws Exception {
        consumer.subscribe(Collections.singletonList(this.topic));
        ConsumerRecords<Integer, String> records = consumer.poll(Duration.ofSeconds(1));
        System.out.println("消费到消息数:" + records.count());
        if (records.count() > 0) {

            // 表不存在创建表
            if (!HbaseUtil.tableExists(Constants.HTAB_HAIKOU_ORDER)){
                HbaseUtil.createTable(HbaseUtil.getConnection(),Constants.HTAB_HAIKOU_ORDER,Constants.DEFAULT_FAMILY);
            }


            Table table = HbaseUtil.getTable(Constants.HTAB_HAIKOU_ORDER);
            List<Put> puts = new ArrayList<>();

            // rowkey 设计 订单id + 出发时间作为hbase表得rowkey
            String rowkey = "";


            if(orderMap.size() > 0){
                orderMap.clear();
            }


            for (ConsumerRecord<Integer, String> record : records) {
                // kafka value结果
                String value = record.value();
                // 进行处理
                String[] fields = value.split(" ");

                if(!isDataLine(value)){
                    continue;
                }

                // 过滤掉不合法得数据行
                if (fields.length != 26){
                   continue;
                }

                // rowkey 设计 订单id + 出发时间作为hbase表得rowkey 2017-05-19 01:05:19   20170519010519
                rowkey = fields[0] + "_" + fields[13].replaceAll("-","") +
                        fields[14].replaceAll(":","");

                // 订单所有数据拿出来
                orderMap.put("ORDER_ID", fields[0]);
                orderMap.put("PRODUCT_ID", fields[1]);
                orderMap.put("CITY_ID", fields[2]);
                orderMap.put("DISTRICT", fields[3]);
                orderMap.put("COUNTY", fields[4]);
                orderMap.put("TYPE", fields[5]);
                orderMap.put("COMBO_TYPE", fields[6]);
                orderMap.put("TRAFFIC_TYPE", fields[7]);
                orderMap.put("PASSENGER_COUNT", fields[8]);
                orderMap.put("DRIVER_PRODUCT_ID", fields[9]);
                orderMap.put("START_DEST_DISTANCE", fields[10]);
                orderMap.put("ARRIVE_TIME", fields[11] + " " + fields[12]);
                orderMap.put("DEPARTURE_TIME", fields[13] + " " + fields[14]);
                orderMap.put("PRE_TOTAL_FEE", fields[15]);
                orderMap.put("NORMAL_TIME", fields[16]);
                orderMap.put("BUBBLE_TRACE_ID", fields[17]);
                orderMap.put("PRODUCT_1LEVEL", fields[18]);
                orderMap.put("DEST_LNG", fields[19]);
                orderMap.put("DEST_LAT", fields[20]);
                orderMap.put("STARTING_LNG", fields[21]);
                orderMap.put("STARTING_LAT", fields[22]);
                orderMap.put("YEAR", fields[23]);
                orderMap.put("MONTH", fields[24]);
                orderMap.put("DAY", fields[25]);
                puts.add( HbaseUtil.createPut(rowkey,Constants.DEFAULT_FAMILY.getBytes(),orderMap) );
            }
            table.put(puts);
        }
        log.warn("************** 正常结束 **********");
    }



    public static boolean isDataLine(String line){
        if (StringUtils.isEmpty(line)){
            return false;
        }
        Matcher matcher = pattern.matcher(line);
        boolean b = matcher.find();
        return b;
    }


    public static void main(String[] args) {
        HaiKouOrderInfoConsumer haiKouOrderInfoConsumer =
                new HaiKouOrderInfoConsumer("hai_kou_order_topic","hai_kou_order_g_00w5");

        Thread thread = new Thread(haiKouOrderInfoConsumer);
        thread.start();
    }
}
