import com.fasterxml.jackson.core.type.TypeReference;
import lib.avro.Avro;

import java.util.Objects;
import java.util.Properties;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.Arrays;
import java.util.Base64 ;

import java.util.regex.Pattern;
import java.util.regex.Matcher;

import java.time.Duration;
import java.io.File;
import java.io.ByteArrayOutputStream;
import java.nio.charset.Charset;
import java.nio.ByteBuffer;
import java.util.Collections;

import java.net.NetworkInterface;
import java.net.InetAddress;

import org.apache.commons.io.FileUtils;

import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import com.fasterxml.jackson.databind.ObjectMapper;

import com.github.luben.zstd.Zstd;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericContainer ;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;

import org.apache.avro.io.EncoderFactory;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;

import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.ExtendedJsonDecoder;

import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import okhttp3.MediaType;

import io.github.cdimascio.dotenv.Dotenv;

import org.apache.logging.log4j.Logger ;
import org.apache.logging.log4j.LogManager ;

public class StreamingGate {
    private static final String MODULE = "push-gate-java:streaming" ;
    private static final Logger LOGGER = LogManager.getLogger(LogManager.ROOT_LOGGER_NAME) ;
    private static void info(String message) {
        LOGGER.info("[component:{}] {}", MODULE, message) ;
    }
    private static void warn(String message) {
        LOGGER.warn("[component:{}] {}", MODULE, message) ;
    }
    private static void error(String message) {
        LOGGER.error("[component:{}] {}", MODULE, message) ;
    }

    private static String _resolveEnv(Dotenv dotenv, String name) {
        String v = dotenv.get(name);
        if (v == null)
            return null;
        Matcher p = Pattern.compile("\\$\\{([^}]*)\\}").matcher(v);

        String ret = v;
        while (p.find()) {
            String subVarValue = _resolveEnv(dotenv, p.group(1));
            if (subVarValue == null)
                return null;
            ret = v.replace(p.group(0), subVarValue);
        }
        return ret ;
    }

    public static String resolveEnv(Dotenv dotenv, String name, String defaultValue) {
        String resolveValue = _resolveEnv(dotenv, name);
        return resolveValue != null ? resolveValue : defaultValue;
    }
    public static String resolveMyIp(String nodes) throws Exception {
        for (NetworkInterface ni : Collections.list(NetworkInterface.getNetworkInterfaces())) {
            for (InetAddress addr : Collections.list(ni.getInetAddresses())) {
                String currentIp = addr.getHostAddress();
                if (nodes.contains(currentIp)) {
                    return currentIp;
                }
            }
        }
        return null;
    }
    // 
    public static void main(String[] args) throws Exception {
        Dotenv dotenv = Dotenv.configure().directory("meta-lib-static/env/.env").load();

        String nodes = resolveEnv(dotenv, "EXCHANGE_GATE_PUSH_STREAMING_HOSTS", "127.0.0.1");
        info("nodes:" + nodes);
        String myIp = resolveMyIp(nodes);
        if (myIp == null) {
            error("EXCHANGE_GATE_PUSH_STREAMING_HOSTS not match! skip");
            System.exit(0);
        }
        info("myIp:" + myIp);

        String buffer_prefix = resolveEnv(dotenv, "EXCHANGE_GATE_PUSH_STREAMING_PREFIX", "data_buffer_dev");
        String inKafkas = resolveEnv(dotenv, "EXCHANGE_GATE_PUSH_STREAMING_IN_KAFKAS", "http://127.0.0.1:9092");
        String outKafkas = resolveEnv(dotenv, "EXCHANGE_GATE_PUSH_STREAMING_OUT_KAFKAS", "http://127.0.0.1:9092");
        String outKafkaSrs = resolveEnv(dotenv, "EXCHANGE_GATE_PUSH_STREAMING_OUT_KAFKA_SRS",
                "http://127.0.0.1:8081");

        String avroTopic =  buffer_prefix + "_dc_sdk_push_avro";
        String jsonTopic =  buffer_prefix + "_dc_sdk_push_json";
        String mergeTopic = buffer_prefix + "_dc_sdk_push";

        String schema_str = FileUtils.readFileToString(new File("meta-lib-static/avsc/dcSdkPlatformBulk.avsc"), Charset.forName("UTF-8"));
        OkHttpClient http = new OkHttpClient();

        String url = String.format("%s/subjects/%s_%s-value/versions", outKafkaSrs.split(",")[0], buffer_prefix, "dc_sdk_push");
        info(url);

        String content = String.format("{\"schema\":\"%s\"}", schema_str.replace("\"", "\\\"")).replaceAll("\\s+", "");
        MediaType SR_TYPE = MediaType.get("application/vnd.schemaregistry.v1+json");
        Request request = new Request.Builder()
                .url(url)
                .post(RequestBody.create(SR_TYPE, content))
                .build();
        int schemaId ;
        try (Response response = http.newCall(request).execute()) {
            String response_str = response.body().string();
            Map<String, Object> sr_json = new ObjectMapper().readValue(response_str, Map.class);
            info("response:" + response_str);
            schemaId = (int)sr_json.get("id");
        }

        Properties inProps = new Properties();
        inProps.put("bootstrap.servers", inKafkas);
        inProps.put("auto.offset.reset", "earliest");
        inProps.put("group.id", "exchange-gate-buffer-java");
        inProps.put("key.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
        inProps.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
        KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<byte[], byte[]>(inProps);

        Properties outProps = new Properties();
        outProps.put("bootstrap.servers", outKafkas);
        outProps.put("compression.type", "snappy");
        // outProps.put("transactional.id", myIp);
        outProps.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
        outProps.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
        KafkaProducer<byte[], byte[]> producer = new KafkaProducer<byte[], byte[]>(outProps);


        Schema schema = new Schema.Parser().parse(schema_str);
        GenericDatumReader<Object> reader = new GenericDatumReader<Object>(schema);
        GenericDatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);

        consumer.subscribe(Arrays.asList(avroTopic, jsonTopic));
        ObjectMapper objectMapper = new ObjectMapper();
        // producer.initTransactions();
        while (true) {
            // producer.beginTransaction();
            for (ConsumerRecord<byte[], byte[]> record : consumer.poll(Duration.ofMillis(Long.MAX_VALUE))) {
                try {
                    String recordTopic = record.topic();
                    byte[] avro_zstd = record.value();
                    GenericContainer avro = null ;
                    if (recordTopic.equals(jsonTopic)) {
                        List<Map<String, Object>> dataList = objectMapper.readValue(avro_zstd, new TypeReference<List<Map<String, Object>>>() {});
                        dataList.forEach(StreamingGate::fieldCastType);
                        String newJsonStr = objectMapper.writeValueAsString(dataList);
                        avro = Avro.fromJson(schema, newJsonStr) ;
                    } else if (recordTopic.equals(avroTopic)) {
                        avro = Avro.fromCompressedBin(schema, avro_zstd) ;
                    }
                    GenericData.Array<GenericRecord> sdkRecords = (GenericData.Array<GenericRecord>) avro ;
                    ByteArrayOutputStream os = new ByteArrayOutputStream();
                    Encoder binEncoder = EncoderFactory.get().binaryEncoder(os, null);
                    writer.write(sdkRecords, binEncoder);
                    binEncoder.flush();

                    byte[] avro_gate = os.toByteArray();
                    byte[] sr_avro = avro_gate;

                    ByteArrayOutputStream bs = new ByteArrayOutputStream();
                    byte MAGIC_BYTE = 0x0;
                    bs.write(MAGIC_BYTE);
                    bs.write(ByteBuffer.allocate(4).putInt(schemaId).array());
                    bs.write(sr_avro);
                    producer.send(new ProducerRecord<byte[], byte[]>(mergeTopic, null, bs.toByteArray()));
                } catch (Exception e) {
                    if (record.topic().equals(jsonTopic)) {
                        warn("unvalid warn:" + new String(record.value()));
                    } else {
                        warn("unvalid bin-base64:" + Base64.getEncoder().encodeToString(record.value()));
                    }
                    e.printStackTrace();
                    continue;
                }
            }
            // producer.commitTransaction();
            consumer.commitSync();
        }
    }

    /**
     * 补丁，修复数据格式与定义不一致的数据
     */
    public static void fieldCastType(Map<String, Object> map) {
        for (Map.Entry<String, Object> entry : map.entrySet()) {
            String key = entry.getKey();
            Object value = entry.getValue();
            if (value instanceof Map) {
                Map<String, Object> valueMap = (Map<String, Object>) value;
                fieldCastType(valueMap);
            } else if (value instanceof List) {
                List list = (List) value;
                list.forEach(t -> fieldCastType((Map<String, Object>) t));
            } else {
                if (Objects.nonNull(value)) {
                    if (key.equals("user_id")) {
                        map.put("user_id", value.toString());
                    } else if (key.equals("visible_window_height")) {
                        if (value.getClass() == Double.class) {
                            double v = (double) value;
                            map.put("visible_window_height", (long) v);
                        }
                    } else if (key.equals("page_height")) {
                        if (value.getClass() == Double.class) {
                            double v = (double) value;
                            map.put("page_height", (long) v);
                        }
                    }
                }
            }
        }
    }
}
