package com.shujia.kafka;

import com.aliyun.datahub.client.DatahubClient;
import com.aliyun.datahub.client.DatahubClientBuilder;
import com.aliyun.datahub.client.auth.AliyunAccount;
import com.aliyun.datahub.client.common.DatahubConfig;
import com.aliyun.datahub.client.exception.DatahubClientException;
import com.aliyun.datahub.client.http.HttpConfig;
import com.aliyun.datahub.client.model.PutRecordsResult;
import com.aliyun.datahub.client.model.RecordEntry;
import com.aliyun.datahub.client.model.RecordSchema;
import com.aliyun.datahub.client.model.TupleRecordData;
import com.google.gson.Gson;
import com.shujia.datahub.DatahubUtil;
import com.shujia.kafkaTopicModel.WeiboArticle;
import com.shujia.kafkaTopicModel.WeiboUser;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

public class KafkaConsumerWeibo {
    // 写入Tuple型数据
    public static void tupleExample(DatahubClient datahubClient, String project, String topic, int retryTimes, List<RecordEntry> recordEntries) {


        // 往Datahub中写构建好的数据
        try {
            // 往datahub 写数据
            PutRecordsResult result = datahubClient.putRecords(project, topic, recordEntries);
            // 获取失败的条数
            int i = result.getFailedRecordCount();
            if (i > 0) {
                retry(datahubClient, result.getFailedRecords(), retryTimes, project, topic);
            }
        } catch (DatahubClientException e) {
            System.out.println("requestId:" + e.getRequestId() + "\tmessage:" + e.getErrorMessage());
        }
    }

    //重试机制
    public static void retry(DatahubClient client, List<RecordEntry> records, int retryTimes, String project, String topic) {
        boolean suc = false;
        while (retryTimes != 0) {
            retryTimes = retryTimes - 1;
            PutRecordsResult recordsResult = client.putRecords(project, topic, records);
            if (recordsResult.getFailedRecordCount() > 0) {
                retry(client, recordsResult.getFailedRecords(), retryTimes, project, topic);
            }
            suc = true;
            break;
        }
        if (!suc) {
            System.out.println("retryFailure");
        }
    }

    public static void main(String[] args) {
        // 指定kafka配置
        Properties props = new Properties();
        // kafka broker地址
        props.put("bootstrap.servers", "master:9092,node1:9092,node2:9092");
        // 消费者组id
        props.put("group.id", "consumer12");
        // 自动偏移量置为最早的
        props.put("auto.offset.reset", "earliest");
        // 一次最大消费数量
        props.put("max.poll.records", "1000");
        // 指定key的序列化方式
        props.put("key.deserializer", StringDeserializer.class.getName());
        // 指定value的序列化方式
        props.put("value.deserializer", StringDeserializer.class.getName());

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

        ConsumerRecords<String, String> msgList;

        String topic = "ods_kafka_weibo";

        List<String> topicList = Arrays.asList(topic);

        consumer.subscribe(topicList);


        Gson gson = new Gson();
        String project = "sentiment";
        String dataHubTopic = "weibo_article";
        DatahubClient datahubClient = new DatahubUtil(project, dataHubTopic).datahubClient;
        // 获取schema
        RecordSchema recordSchema = datahubClient.getTopic(project, dataHubTopic).getRecordSchema();
        int count = 0;
        try {
            while (true) {
                msgList = consumer.poll(1000);
                List<RecordEntry> recordEntries = new ArrayList<>();
                System.out.println("recordEntries is " + recordEntries.size());

                for (ConsumerRecord<String, String> record : msgList) {
                    WeiboArticle weibo = gson.fromJson(record.value(), WeiboArticle.class);

                    RecordEntry recordEntry = new RecordEntry();
                    // 对每条数据设置额外属性，例如ip 机器名等。可以不设置额外属性，不影响数据写入

                    TupleRecordData data = new TupleRecordData(recordSchema);


                    data.setField("id", weibo.id);
                    data.setField("comments_count", weibo.comments_count != null ? Long.parseLong(weibo.comments_count) : 0);
                    data.setField("created_at", weibo.created_at);
                    data.setField("source", weibo.source);
                    data.setField("reposts_count", weibo.reposts_count != null ? Long.parseLong(weibo.reposts_count) : 0);
                    data.setField("attitudes_count", weibo.attitudes_count != null ? Long.parseLong(weibo.attitudes_count) : 0);
                    data.setField("text", weibo.text);
                    data.setField("user_id", weibo.user_id != null ? Long.parseLong(weibo.user_id) : 0);

                    recordEntry.setRecordData(data);


                    recordEntries.add(recordEntry);


                    count++;
                }
                System.out.println(recordEntries.size());
                if (recordEntries.size() > 0) {
                    System.out.println("往DataHub写入了" + recordEntries.size() + "条数据");
                    KafkaConsumerWeibo.tupleExample(datahubClient, project, dataHubTopic, 3, recordEntries);
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            System.out.println("一共消费了" + count + "条数据");
            consumer.close();
        }
    }


}
