package com.chenzhiling.study;

import com.alibaba.fastjson.JSON;
import io.delta.flink.sink.DeltaSink;
import org.apache.flink.core.fs.Path;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.util.DataFormatConverters;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.IntType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.logical.VarCharType;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.types.Row;

import java.util.Arrays;
import java.util.Properties;

/**
 * Author: CHEN ZHI LING
 * Date: 2022/11/17
 * Description:
 */
public class FlinkDeltaUtil {

    public static final DataFormatConverters.DataFormatConverter<RowData, Row> MYSQL_CONVERTER =
            DataFormatConverters.getConverterForDataType(
                    TypeConversions.fromLogicalToDataType(getMysqlRowType())
            );


    public static RowType getMysqlRowType(){
        return new RowType(Arrays.asList(
                new RowType.RowField("id", new BigIntType()),
                new RowType.RowField("name", new VarCharType(VarCharType.MAX_LENGTH)),
                new RowType.RowField("dept_id",new IntType())
                ));
    }


    public static org.apache.hadoop.conf.Configuration getHadoopConf() {
        org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
        conf.set("parquet.compression", "SNAPPY");
        return conf;
    }

    public static DeltaSink<RowData> createDeltaSink(String deltaTablePath, RowType rowType) {
        return DeltaSink
                .forRowData(
                        new Path(deltaTablePath),
                        getHadoopConf(),
                        rowType).build();
    }

    public static RowData mysqlJsonToRowData(String line){
        String body = JSON.parseObject(line).getString("after");
        Long id = JSON.parseObject(body).getLong("id");
        String name = JSON.parseObject(body).getString("name");
        Integer deptId = JSON.parseObject(body).getInteger("dept_id");
        Row row = Row.of(id,name,deptId);
        return MYSQL_CONVERTER.toInternal(row);
    }


    public static final DataFormatConverters.DataFormatConverter<RowData, Row> CONVERTER =
            DataFormatConverters.getConverterForDataType(
                    TypeConversions.fromLogicalToDataType(getKafkaRowType())
            );

    public static Properties getProperties(String topic, String consumer){
        Properties properties = new Properties();
        properties.put("bootstrap.servers", consumer);
        properties.put("group.id", topic);
        properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        return properties;
    }

    public static RowType getKafkaRowType(){
        return new RowType(Arrays.asList(
                new RowType.RowField("userId", new VarCharType(VarCharType.MAX_LENGTH)),
                new RowType.RowField("stationTime", new VarCharType(VarCharType.MAX_LENGTH)),
                new RowType.RowField("score", new IntType()),
                new RowType.RowField("localTime", new VarCharType(VarCharType.MAX_LENGTH))));
    }

    public static RowData kafkaJsonToRowData(String line){
        String userId = JSON.parseObject(line).getString("user_id");
        String stationTime = JSON.parseObject(line).getString("station_time");
        Integer score = JSON.parseObject(line).getInteger("score");
        String localTime = JSON.parseObject(line).getString("local_time");
        Row row = Row.of(userId, stationTime, score, localTime);
        return CONVERTER.toInternal(row);
    }
}
