package com.sure0000.basicKafka.example;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.json.JsonConverter;
import org.apache.kafka.connect.source.SourceRecord;
import org.apache.kafka.connect.storage.Converter;

import java.util.*;

/**
 * @author xuyouchang
 * @date 2021/1/13.
 * <p>
 * 依赖包
 * <dependency>
 * <groupId>org.apache.kafka</groupId>
 * <artifactId>connect-json</artifactId>
 * <version>0.11.0.3</version>
 * </dependency>
 *
 * <dependency>
 * <groupId>org.apache.kafka</groupId>
 * <artifactId>kafka-clients</artifactId>
 * <version>0.11.0.3</version>
 * </dependency>
 *
 * <dependency>
 * <groupId>com.alibaba</groupId>
 * <artifactId>fastjson</artifactId>
 * <version>1.2.62</version>
 * </dependency>
 */
public class KafkaSchemaJsonProducer {

    private static final String SCHEMAS_ENABLE_CONFIG = "schemas.enable";


    public static void main(String[] args) {
        // 测试数据
        List<JSONObject> values = new ArrayList<>();

        for (int i = 0; i < 10000; i++) {
            JSONObject jsonObject = new JSONObject();
            jsonObject.put("id", i);
            jsonObject.put("name", "xyc" + i);
            values.add(jsonObject);
        }

        Map<String, String> valueSchema = new HashMap<>();
        valueSchema.put("id", "int");
        valueSchema.put("name", "string");

        // kafka 连接信息
        String topic = "20210927_xyc_3";
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.17.1.105:9092");
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
        KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(props);

        for (JSONObject valueO : values) {
            // 构造 record
            SourceRecord sourceRecord = extractRecord(topic, valueO.toJSONString(), valueSchema);

            // json 序列化转换器构造
            Map<String, Object> config = new HashMap<>();
            config.put(SCHEMAS_ENABLE_CONFIG, true);

            Converter keyConverter = new JsonConverter();
            keyConverter.configure(config, true);

            Converter valueConverter = new JsonConverter();
            valueConverter.configure(config, false);

            // 序列化
            byte[] key = keyConverter.fromConnectData(topic, sourceRecord.keySchema(), sourceRecord.key());
            byte[] value = valueConverter.fromConnectData(topic, sourceRecord.valueSchema(), sourceRecord.value());

            String ket = new String(key);
            System.out.println("key:" + ket);

            // producer
            ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(sourceRecord.topic(), null, value);

            producer.send(record, new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    System.out.println(String.format("offset:%s,partition:%s", metadata.offset(), metadata.partition()));
                }
            });
        }


        producer.close();

    }

    /**
     * 将获取数据转换为 source record
     *
     * @param topic       topic
     * @param resource    json 数据
     * @param valueSchema 数据结构
     */
    private static SourceRecord extractRecord(String topic, String resource, Map<String, String> valueSchema) {
        final Map<String, String> sourcePartition = Collections.singletonMap("http", "http");
        Schema schema = convertValueSchema(valueSchema, topic);
        return new SourceRecord(sourcePartition, null, topic, schema, convertStructValue(schema, resource));

    }

    /**
     * 将数据转换为 Strut 类型
     */
    private static Struct convertStructValue(Schema schema, String value) {
        Struct struct = new Struct(schema);
        try {
            JSONObject.parseObject(value).forEach(struct::put);
        } catch (Exception e) {
//            throw new DataException("the data with value.schema must be a json string", e);
        }
        return struct;
    }

    /**
     * 构建数据的 schema
     */
    private static Schema convertValueSchema(Map<String, String> valueSchema, String topic) {
        try {
            SchemaBuilder builder = SchemaBuilder.struct().name(topic);
            valueSchema.forEach((k, v) -> {
                builder.field(k, convertFieldType(v));
            });
            return builder.build();
        } catch (Exception e) {
            throw new RuntimeException("value schema must be json string");
        }
    }

    /**
     * 用户输入的字段类型转换为对应的 connect schema 类型
     *
     * @param fieldType 字段类型
     * @return 字段对应 schema
     */
    private static Schema convertFieldType(String fieldType) {
//        String fieldTypeLowerCase = fieldType.toLowerCase();
        // array 类型
//        if (fieldTypeLowerCase.startsWith(ARRAY_TYPE_FLAG)) {
//            String[] fieldArray = fieldTypeLowerCase.split(SPLIT_FLAG);
//            return SchemaBuilder.array(convertFieldType(fieldArray[1])).build();
//        }

        // map 类型
//        if (fieldTypeLowerCase.startsWith(MAP_TYPE_FLAG)) {
//            String[] fieldArray = fieldTypeLowerCase.split(SPLIT_FLAG);
//            return SchemaBuilder.map(convertFieldType(fieldArray[1]), convertFieldType(fieldArray[2])).build();
//        }

        switch (fieldType.toLowerCase()) {
            case "byte":
                return Schema.INT8_SCHEMA;
            case "short":
                return Schema.INT16_SCHEMA;
            case "int":
                return Schema.INT32_SCHEMA;
            case "long":
                return Schema.INT64_SCHEMA;
            case "float":
                return Schema.FLOAT32_SCHEMA;
            case "double":
                return Schema.FLOAT64_SCHEMA;
            case "boolean":
                return Schema.BOOLEAN_SCHEMA;
            case "string":
                return Schema.STRING_SCHEMA;
            default:
                throw new IllegalArgumentException(fieldType + " is not the right field type");
        }
    }
}
