package com.itheima.momo;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.MD5Hash;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.Properties;

/**
 * @author lilulu
 * @date 2023/2/27 0:02
 */
//从kafka中获取消息数据，写入hbase
public class MOMO_KAFKA_HBase {
    public static void main(String[] args) throws Exception {
        //创建kafka配置信息，从kafka中接收数据
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "node1:9092,node2:9092,node3:9092");
        props.setProperty("group.id", "momo_g1");
        props.setProperty("enable.auto.commit", "true");
        props.setProperty("auto.commit.interval.ms", "1000");
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        //创建kafka消费者对象
        KafkaConsumer<String, String> consumer = new KafkaConsumer(props);
        //设置kafka监听哪些topic
        consumer.subscribe(Arrays.asList("MOMO_MSG"));
        /**
         * 写入habase
         */

        //根据工厂，创建hbase连接对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","node1:2181,node2:2181,node3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);
        //根据连接对象，获取管理对象，admin和table
        Table table = connection.getTable(TableName.valueOf("MOMO_CHAT:MOMO_MSG"));
        //执行操作，table.put
        //消费数据，一直在消费, 只要有数据,立马进行处理操作
        while (true) {
            //消费者拉取数据
            ConsumerRecords<String, String> records = consumer.poll(100);
            for (ConsumerRecord<String, String> record : records) {
                String msg = record.value();
                System.out.println(msg);

                //将kafka消费到的消息写入hbase
                //创建put对象
                byte[] rowKey = getRowKey(msg);
                Put put = new Put(rowKey);
                String[] fields = msg.split("\001");
                put.addColumn("C1".getBytes(), "msg_time".getBytes(), fields[0].getBytes());
                put.addColumn("C1".getBytes(), "sender_nickyname".getBytes(), fields[1].getBytes());
                put.addColumn("C1".getBytes(), "sender_account".getBytes(), fields[2].getBytes());
                put.addColumn("C1".getBytes(), "sender_sex".getBytes(), fields[3].getBytes());
                put.addColumn("C1".getBytes(), "sender_ip".getBytes(), fields[4].getBytes());
                put.addColumn("C1".getBytes(), "sender_os".getBytes(), fields[5].getBytes());
                put.addColumn("C1".getBytes(), "sender_phone_type".getBytes(), fields[6].getBytes());
                put.addColumn("C1".getBytes(), "sender_network".getBytes(), fields[7].getBytes());
                put.addColumn("C1".getBytes(), "sender_gps".getBytes(), fields[8].getBytes());
                put.addColumn("C1".getBytes(), "receiver_nickyname".getBytes(), fields[9].getBytes());
                put.addColumn("C1".getBytes(), "receiver_ip".getBytes(), fields[10].getBytes());
                put.addColumn("C1".getBytes(), "receiver_account".getBytes(), fields[11].getBytes());
                put.addColumn("C1".getBytes(), "receiver_os".getBytes(), fields[12].getBytes());
                put.addColumn("C1".getBytes(), "receiver_phone_type".getBytes(), fields[13].getBytes());
                put.addColumn("C1".getBytes(), "receiver_network".getBytes(), fields[14].getBytes());
                put.addColumn("C1".getBytes(), "receiver_gps".getBytes(), fields[15].getBytes());
                put.addColumn("C1".getBytes(), "receiver_sex".getBytes(), fields[16].getBytes());
                put.addColumn("C1".getBytes(), "msg_type".getBytes(), fields[17].getBytes());
                put.addColumn("C1".getBytes(), "distance".getBytes(), fields[18].getBytes());
                put.addColumn("C1".getBytes(), "message".getBytes(), fields[19].getBytes());
                table.put(put);
            }
            //处理结果集
            //释放资源
        }
    }


    private static byte[] getRowKey(String msg)throws Exception {
        //1. 对消息进行切割处理
        String[] fields = msg.split("\001");
        //2. 获取 发件人账户, 收件人账户, 时间
        String msg_time = fields[0];
        String sender_account = fields[2];
        String receiver_account = fields[11];

        //3. 将时间转换为时间戳
        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date date = simpleDateFormat.parse(msg_time);
        long time = date.getTime();
        //4- 生成MD5HASH值 前8位
        String mdHash = MD5Hash.getMD5AsHex((sender_account + "_" + receiver_account).getBytes()).substring(0, 8);
        //5- 拼接返回
        byte[] rowKey = (mdHash + "_" + sender_account + "_" + receiver_account + "_" + time).getBytes();

        return rowKey;
    }
}
