package com.yyx.up.consumer.bean;

import com.yyx.up.common.bean.Consumer;
import com.yyx.up.common.constant.Names;
import com.yyx.up.consumer.dao.HBaseDao;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.io.IOException;
import java.util.Arrays;
import java.util.Properties;

/**
 * Description: 观影日志的消费者对象
 * @Author: yyx
 * @Create: 2023/5/10 14:43
 * @Version: 8
 */
public class WatchLogConsumer implements Consumer {
    /**
     * 消费数据
     */
    @Override
    public void consume() {
        try {
            // 创建配置对象,获取自己的Kafka对象
            Properties properties = new Properties();
            properties.load(Thread.currentThread().getContextClassLoader().getResourceAsStream("consumer.properties"));//当前运行的线程获取它上下文的类加载器，加载文件，作为流来使用
            // 用Kafka消费者获取Flume采集的数据
            KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(properties);
            // 关注主题
            consumer.subscribe(Arrays.asList(Names.TOPIC.getValue()));
            // Hbase数据访问对象
            HBaseDao hBaseDao = new HBaseDao();
            hBaseDao.init();//初始化
            // 消费数据
            while (true){
                ConsumerRecords<String,String> consumerRecords =consumer.poll(100);
                for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                    System.out.println(consumerRecord.value());
                    hBaseDao.insertData(consumerRecord.value());
                    //WatchLog watchLog = new WatchLog(consumerRecord.value());
                    //hBaseDao.insertData(watchLog);// 将数据存储到Hbase中去
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 关闭资源
     * @throws IOException
     */
    @Override
    public void close() throws IOException {

    }
}
