package dwd ;

import lib.ApiClient ;
import lib.ExtendedConfluentRegistryAvroDeserializationSchema;
import javax.annotation.Nullable;
import java.io.ByteArrayOutputStream;
import java.text.SimpleDateFormat;
import java.util.Base64;
import java.util.List;
import java.util.ArrayList;
import java.util.Map ;
import java.io.* ;

import java.util.Date;
import java.util.TimeZone;
// import java.sql.Timestamp;
import java.util.Properties;
import java.util.Arrays;

import java.util.regex.Pattern;
import java.util.regex.Matcher;


import com.github.luben.zstd.Zstd;

import org.apache.flink.util.Collector;
import org.apache.flink.api.common.functions.FlatMapFunction;

import com.google.protobuf.Descriptors.Descriptor;
import protobuf.Lib;

import org.apache.zookeeper.ZooKeeper;

import org.apache.flink.avro.shaded.org.apache.avro.Schema;
import org.apache.flink.avro.shaded.org.apache.avro.generic.GenericData;
import org.apache.flink.avro.shaded.org.apache.avro.generic.GenericRecord;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.functions.sink.PrintSinkFunction;

import org.apache.flink.api.java.tuple.Tuple2;

import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.formats.avro.registry.confluent.ConfluentRegistryAvroSerializationSchema;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import org.apache.flink.types.Row;
import org.apache.flink.table.data.RowData;

import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.formats.avro.AvroRowDataSerializationSchema;
import org.apache.flink.formats.avro.RowDataToAvroConverters;

import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.api.DataTypes;

import org.apache.flink.formats.avro.typeutils.AvroSchemaConverter;

import extension.ParseDcEventColdStart;
import extension.ParseDcEventHotStart;
import extension.ParseDcEventUseEnd;
import extension.ParseDcEventClick;
import extension.ParseDcEventViewPage ;
import extension.ParseDcEventCustom ;

import com.fasterxml.jackson.databind.ObjectMapper;

public class DcEvent {
    public static String extractPlatformName(String schemaName) {
        String result = "unknown" ;
        if (schemaName.equals("DcSdkAndroid")) {
            result = "android" ;
        } else if (schemaName.equals("DcSdkIos")) {
            result = "ios" ;
        } else if (schemaName.equals("DcSdkWeb")) {
            result = "web" ;
        } else if (schemaName.equals("DcSdkWechat")) {
            result = "wechat" ;
        } else if (schemaName.equals("DcSdkH5")) {
            result = "h5" ;
        }
        return result ;
    }
    public static String extractPlatformName(GenericRecord record) {
        String result = "unknown" ;
        if (record.get("is_android") != null) {
            result = "android" ;
        } else if (record.get("is_ios") != null) {
            result = "ios" ;
        } else if (record.get("is_web") != null) {
            result = "web" ;
        } else if (record.get("is_wechat") != null) {
            result = "wechat" ;
        } else if (record.get("is_h5") != null) {
            result = "h5" ;
        }
        return result ;
    }


    public static String extractEventName(String schemaName) {
        String result = "unkown" ;
        if (schemaName.startsWith("EventColdStart")) {
            result = "cold_start" ;
        } else if (schemaName.startsWith("EventHotStart")) {
            result = "hot_start" ;
        } else if (schemaName.startsWith("EventUseEnd")) {
            result = "use_end" ;
        } else if (schemaName.startsWith("EventViewPage")) {
            result = "view_page" ;
        } else if (schemaName.startsWith("EventClick")) {
            result = "click" ;
        } else if (schemaName.startsWith("EventCustom")) {
            result = "custom" ;
        }
        return result ;
    }
    private static byte[] toByteArray(InputStream in) throws Exception {
        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
        int nRead;
        byte[] data = new byte[16384];
        while ((nRead = in.read(data, 0, data.length)) != -1) {
            buffer.write(data, 0, nRead);
        }
        return buffer.toByteArray();
    }
    private static String toText(InputStream in) throws Exception {
        return new String(toByteArray(in)) ;
    }

    public static String slurpFile(String schemaPath) throws Exception {
        // InputStream in = Avro.class.getClassLoader().getResourceAsStream(schemaPath);
        InputStream in = new FileInputStream(schemaPath) ;
        return toText(in);
    }


    public static String parseChinaDt(String s) {
        long tsL = Long.parseLong(s);
        Date dt = new Date(tsL);
        SimpleDateFormat bjSdf = new SimpleDateFormat("yyyyMMdd");
        bjSdf.setTimeZone(TimeZone.getTimeZone("Asia/Shanghai")); 
        return bjSdf.format(dt);
    }


    public static void main(String[] args) throws Exception {
        String metaServerNodes = args[1] ;
        Map<String, String> dotenv= ApiClient.getMetaEnv(metaServerNodes) ;

        String buffer_prefix = dotenv.getOrDefault("PIPELINE_DC_EVENT_BUFFER_PREFIX", "data_buffer_dev");
        String pipeline_prefix = dotenv.getOrDefault("PIPELINE_DC_EVENT_PIPELINE_PREFIX", "data_pipeline_dev");
        String kafkas_s = dotenv.getOrDefault("PIPELINE_DC_EVENT_KAFKAS", "127.0.0.1:9092");
        
        String srs_s = dotenv.getOrDefault("PIPELINE_DC_EVENT_KAFKA_SRS", "127.0.0.1:8081");

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        String schema_s = ApiClient.getMetaCommonSchema(metaServerNodes) ;
        Schema reader_schema = new Schema.Parser().parse(schema_s);

        Properties consumer_properties = new Properties();
        consumer_properties.setProperty("bootstrap.servers", kafkas_s);
        consumer_properties.setProperty("group.id", "data_pipeline__dc_event");

        SourceFunction<GenericData.Array> kafka_source = new FlinkKafkaConsumer<>(
                String.format("%s_%s", buffer_prefix, "dc_sdk_push"),
                ExtendedConfluentRegistryAvroDeserializationSchema.forGenericArray(reader_schema, srs_s),
                consumer_properties).setStartFromEarliest();


        // DataStream
        DataStream<Tuple2<String, RowData>> ds = env.addSource(kafka_source).flatMap(new FlatMapFunction<GenericData.Array, Tuple2<String, RowData>>() {
            @Override
            public void flatMap(GenericData.Array rec, Collector<Tuple2<String, RowData>> out) throws Exception {
                GenericData.Array<GenericRecord> bulk = rec;
                for (GenericRecord sdkRecord : bulk) {
                    String platformName = extractPlatformName(sdkRecord) ;
                    // String platformName = extractPlatformName(sdkRecord.getSchema().getName()) ;
                    GenericRecord avroHeader = (GenericRecord) sdkRecord.get("common");

                    GenericRecord basicInfo = (GenericRecord) avroHeader.get("basic") ;
                    String dt = parseChinaDt(basicInfo.get("upload_time").toString());
                    
                    RowData header = ParseDcEventColdStart.parseHeader(avroHeader) ; // same for every event
                    for (GenericRecord sdkEvent : (GenericData.Array<GenericRecord>) sdkRecord.get("events")) {
                        String schemaName = extractEventName(sdkEvent.getSchema().getName()) ;
                       
                        RowData headerEvent = null ;
                          
                        switch (schemaName) {
                            case "cold_start": 
                                headerEvent = ParseDcEventColdStart.parseEvent(sdkEvent, platformName, dt, header);
	                        break ;
                            case "hot_start": 
                                headerEvent = ParseDcEventHotStart.parseEvent(sdkEvent, platformName, dt, header);
	                        break ;
                            case "use_end": 
                                headerEvent = ParseDcEventUseEnd.parseEvent(sdkEvent, platformName, dt, header);
	                        break ;
                            case "click": 
                                headerEvent = ParseDcEventClick.parseEvent(sdkEvent, platformName, dt, header);
	                        break ;
                            case "view_page": 
                                headerEvent = ParseDcEventViewPage.parseEvent(sdkEvent, platformName, dt, header);
	                        break ;
                            case "custom": 
                                headerEvent = ParseDcEventCustom.parseEvent(sdkEvent, platformName, dt, header);
	                        break ;
                        }
                        if (headerEvent != null) out.collect(new Tuple2(schemaName, headerEvent));
                    }

                }
            }
        });

        String[] eventNames = { "cold_start", "hot_start", "use_end", "click", "view_page", "custom" };
        
        for (String eventName : eventNames) {
            RowType eventRowType = null ;
            
            String event_topic = String.format("%s_dc_event_%s", pipeline_prefix, eventName);
            switch (eventName) {
                case "cold_start": 
                    eventRowType = (RowType)ParseDcEventColdStart.getDataType().getLogicalType();
                    break ;
                case "hot_start":
                    eventRowType = (RowType)ParseDcEventHotStart.getDataType().getLogicalType();
                    break ;
                case "use_end":
                    eventRowType = (RowType)ParseDcEventUseEnd.getDataType().getLogicalType();
                    break ;
                case "click":
                    eventRowType = (RowType)ParseDcEventClick.getDataType().getLogicalType();
                    break ;
                case "view_page":
                    eventRowType = (RowType)ParseDcEventViewPage.getDataType().getLogicalType();
                    break ;
                case "custom":
                    eventRowType = (RowType)ParseDcEventCustom.getDataType().getLogicalType();
                    break ;
            }
            
            ds.flatMap(new FlatMapFunction<Tuple2<String, RowData>, RowData>() {
                @Override
                public void flatMap(Tuple2<String, RowData> in, Collector<RowData> out) {
                    if(in.f0.equals(eventName)) {
                        out.collect(in.f1)  ;
                    }
                }
            }).addSink(
                // new PrintSinkFunction()
                new FlinkKafkaProducer<RowData>(kafkas_s, event_topic, 
                    new AvroRowDataSerializationSchema(
                        eventRowType
                      , ConfluentRegistryAvroSerializationSchema.forGeneric(
                            event_topic + "-value"
                          , AvroSchemaConverter.convertToSchema(eventRowType) 
                          , srs_s
                        )
                      , RowDataToAvroConverters.createConverter(eventRowType)
                    )
                )
            ) ;
        }


        env.execute();
    }
}

