/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */



package com.sui.bigdata.flink.sql.source.kafka;


import com.alibaba.fastjson.JSONObject;
import com.sui.bigdata.flink.sql.core.source.AbsDeserialization;
import com.sui.bigdata.flink.sql.source.kafka.metric.KafkaTopicPartitionLagMetric;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.calcite.shaded.com.google.common.base.Strings;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.shaded.guava18.com.google.common.collect.Maps;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.flink.streaming.connectors.kafka.internal.KafkaConsumerThread;
import org.apache.flink.streaming.connectors.kafka.internals.AbstractFetcher;
import org.apache.flink.types.Row;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.internals.SubscriptionState;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.sui.bigdata.flink.sql.core.metric.MetricConstant.SUI_PARTITION_GROUP;
import static com.sui.bigdata.flink.sql.core.metric.MetricConstant.SUI_TOPIC_GROUP;
import static com.sui.bigdata.flink.sql.core.metric.MetricConstant.SUI_TOPIC_PARTITION_LAG_GAUGE;

/**
 * json string parsing custom
 * Date: 2018/09/18
 * Company: www.dtstack.com
 * @author sishu.yss
 */

public class CustomerJsonDeserialization extends AbsDeserialization<Row> {

    private static final Logger LOG = LoggerFactory.getLogger(CustomerJsonDeserialization.class);

    private static final long serialVersionUID = 2385115520960444192L;

    private final ObjectMapper objectMapper = new ObjectMapper();

    /**
     * Type information describing the result type.
     */
    private final TypeInformation<Row> typeInfo;

    /**
     * Field names to parse. Indices match fieldTypes indices.
     */
    private final String[] fieldNames;

    /**
     * Types to parse fields as. Indices match fieldNames indices.
     */
    private final TypeInformation<?>[] fieldTypes;

    /**
     * Flag indicating whether to fail on a missing field.
     */
    private boolean failOnMissingField;

    private AbstractFetcher<Row, ?> fetcher;

    private boolean firstMsg = true;

    private Map<String, JsonNode> nodeAndJsonNodeMapping = Maps.newHashMap();

    private Map<String, String> rowAndFieldMapping;

    private JSONObject jsonObject;


    public CustomerJsonDeserialization(TypeInformation<Row> typeInfo, Map<String, String> rowAndFieldMapping) {
        this.typeInfo = typeInfo;

        this.fieldNames = ((RowTypeInfo) typeInfo).getFieldNames();

        this.fieldTypes = ((RowTypeInfo) typeInfo).getFieldTypes();

        this.rowAndFieldMapping = rowAndFieldMapping;
    }

    @Override
    public Row deserialize(byte[] message) throws IOException {

        try {
            numInRecord.inc();
            if (message != null) {
                numInBytes.inc(message.length);
            }
            String msg = new String(message);

            this.jsonObject = JSONObject.parseObject(msg);

            Row row = new Row(fieldNames.length);

            for (int i = 0; i < fieldNames.length; i++) {

                String nodeMappingKey = rowAndFieldMapping.getOrDefault(fieldNames[i], fieldNames[i]);

                String node = getValue(nodeMappingKey);

                if (node == null) {
                    if (failOnMissingField) {
                        throw new IllegalStateException("Failed to find field with name '"
                                + fieldNames[i] + "'.");
                    } else {
                        row.setField(i, null);
                    }
                } else {
                    // Read the value as specified type
                    Object value;
                    if (fieldTypes[i].getTypeClass().getTypeName().equalsIgnoreCase("java.sql.Timestamp")) {
                        SimpleDateFormat df1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        java.util.Date date11 = df1.parse(node.replace("\"", ""));
                        String time = df1.format(date11);
                        value = Timestamp.valueOf(time);
                    } else {
                        value = getObject(node,fieldTypes[i].getTypeClass().getTypeName());
                    }
                    row.setField(i, value);
                }
            }

            numInResolveRecord.inc();
            return row;
        } catch (Exception e) {
            //add metric of dirty data
            dirtyDataCounter.inc();
            LOG.error(" Exception data : " +new String(message) , e);
            return null;
        } finally {
            nodeAndJsonNodeMapping.clear();
        }
    }


    private String getNodeKey(String prefix, String nodeName) {
        if (Strings.isNullOrEmpty(prefix)) {
            return nodeName;
        }

        return prefix + "." + nodeName;
    }

    private String getNodeKey(String prefix, int i) {
        if (Strings.isNullOrEmpty(prefix)) {
            return "[" + i + "]";
        }

        return prefix + "[" + i + "]";
    }

    public void setFetcher(AbstractFetcher<Row, ?> fetcher) {
        this.fetcher = fetcher;
    }


    protected void registerPtMetric(AbstractFetcher<Row, ?> fetcher) throws Exception {

        Field consumerThreadField = fetcher.getClass().getSuperclass().getDeclaredField("consumerThread");
        consumerThreadField.setAccessible(true);
        KafkaConsumerThread consumerThread = (KafkaConsumerThread) consumerThreadField.get(fetcher);

        Field hasAssignedPartitionsField = consumerThread.getClass().getDeclaredField("hasAssignedPartitions");
        hasAssignedPartitionsField.setAccessible(true);

        //wait until assignedPartitions

        boolean hasAssignedPartitions = (boolean) hasAssignedPartitionsField.get(consumerThread);

        if (!hasAssignedPartitions) {
            throw new RuntimeException("wait 50 secs, but not assignedPartitions");
        }

        Field consumerField = consumerThread.getClass().getDeclaredField("consumer");
        consumerField.setAccessible(true);

        KafkaConsumer kafkaConsumer = (KafkaConsumer) consumerField.get(consumerThread);
        Field subscriptionStateField = kafkaConsumer.getClass().getDeclaredField("subscriptions");
        subscriptionStateField.setAccessible(true);

        //topic partitions lag
        SubscriptionState subscriptionState = (SubscriptionState) subscriptionStateField.get(kafkaConsumer);
        Set<TopicPartition> assignedPartitions = subscriptionState.assignedPartitions();
        for (TopicPartition topicPartition : assignedPartitions) {
            MetricGroup metricGroup = getRuntimeContext().getMetricGroup().addGroup(SUI_TOPIC_GROUP, topicPartition.topic())
                    .addGroup(SUI_PARTITION_GROUP, topicPartition.partition() + "");
            metricGroup.gauge(SUI_TOPIC_PARTITION_LAG_GAUGE, new KafkaTopicPartitionLagMetric(subscriptionState, topicPartition));
        }

    }

    private static String partitionLagMetricName(TopicPartition tp) {
        return tp + ".records-lag";
    }


    public String getValue(String exp) {
        try {
            Object object = this.jsonObject;
            String[] expArr = exp.split("\\.");
            for (int i = 0; i < expArr.length; i++) {
                if (i == expArr.length - 1) {
                    Pattern p = Pattern.compile("(\\[[^\\]]*\\])");
                    Matcher m = p.matcher(expArr[i]);
                    String resVal = null;
                    if (m.find()) {
                        String index = m.group(0).substring(1, m.group().length() - 1);
                        String filedName = expArr[i].split("\\[")[0];
                        resVal = ((JSONObject) object).getJSONArray(filedName).getString(Integer.valueOf(index));
                    } else {
                        resVal = ((JSONObject) object).getString(expArr[i]);
                    }
                    return resVal;
                }
                Pattern p = Pattern.compile("(\\[[^\\]]*\\])");
                Matcher m = p.matcher(expArr[i]);
                if (m.find()) {
                    String index = m.group(0).substring(1, m.group().length() - 1);
                    String filedName = expArr[i].split("\\[")[0];
                    Object tempObject = ((JSONObject) object).getJSONArray(filedName).get(Integer.valueOf(index));
                    if (null == tempObject) {
                        return null;
                    }
                    object = tempObject;
                } else {
                    Object tempObject = ((JSONObject) object).getJSONObject(expArr[i]);
                    if (null == tempObject) {
                        return null;
                    }
                    object = tempObject;
                }
            }
        } catch (Exception e) {
            return null;
        }
        return null;
    }

    public Object getObject(String node, String type) {

        switch (type) {
            case "java.lang.String":
                return node;
            case "java.lang.Integer":
                return Integer.valueOf(node);
            case "java.lang.Long":
                return Long.valueOf(node);

            case "java.lang.Double":
                return Double.valueOf(node);

            case "java.lang.Float":
                return Float.valueOf(node);

            case "java.lang.Byte":
                return Byte.valueOf(node);

            case "java.lang.Short":
                return Short.valueOf(node);

            case "java.math.BigDecimal":
                return BigDecimal.valueOf(Long.valueOf(node));

        }
        return node;
    }


}