package flink.deserialize;

import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/** @Description: 通用的反序列化类 把 json 字符串转成 JSONObject 对象 */
public class JSONObjectDeserialization implements KafkaDeserializationSchema<JSONObject> {
    private static final Logger LOG = LoggerFactory.getLogger(JSONObjectDeserialization.class);
    private final String encoding = "UTF8";
    private final boolean includeMetadata;

    public JSONObjectDeserialization(boolean includeMetadata) {
        this.includeMetadata = includeMetadata;
    }

    public boolean isEndOfStream(JSONObject nextElement) {
        return false;
    }

    public JSONObject deserialize(ConsumerRecord<byte[], byte[]> record) throws Exception {
        if (record != null) {
            String value = new String(record.value(), encoding);
            try {
                JSONObject jsonObject = JSONObject.parseObject(value);
                if (includeMetadata) {
                    String topic = record.topic();
                    jsonObject.put("topic", topic);
                }
                return jsonObject;
            } catch (Exception e) {
                LOG.error(
                        "failed to parse json, original data: {}, error message: {}",
                        value,
                        e.getMessage());
            }
        }
        return null;
    }

    @Override
    public TypeInformation<JSONObject> getProducedType() {
        return TypeInformation.of(JSONObject.class);
    }
}
