package cn.gwm.flink.streaming.task;


import cn.gwm.flink.streaming.demo.ods.OdsSignal;
import cn.gwm.flink.streaming.demo.ods.OdsSignalVectorizer;
import cn.gwm.utils.ConfigLoader;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.core.fs.Path;
import org.apache.flink.orc.writer.OrcBulkWriterFactory;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.OutputFileConfig;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.BasePathBucketAssigner;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Objects;
import java.util.Properties;

/**
 * soh估算任务
 *
 * @author GW00283474
 */
public class DemoTask extends BaseTask {
    private static final Logger logger = LoggerFactory.getLogger(DemoTask.class);

    public static void main(String[] args) {
        ConfigLoader.init(args);
        try {
            //log4j2日志打印demo
            logDemoExecute(args);
            //odsSinkHdfsWithOrcDemo(args);
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println("任务异常");
        }
    }

    /**
     * log4j2使用示例demo
     *
     * @param args
     * @throws Exception
     */
    static void logDemoExecute(String[] args) throws Exception {
        StreamExecutionEnvironment env = getEnv("logDemoTask");
        env.enableCheckpointing(120 * 1000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(300 * 1000L);
        env.setParallelism(3);
        FlinkKafkaConsumer<String> kafkaConsumer = decorateKafkaConsumer("logDemoTask", "test-lxy");
        SingleOutputStreamOperator<String> strToBean = env.addSource(kafkaConsumer)
                .map(new MapFunction<String, String>() {
                    @Override
                    public String map(String value) throws Exception {
                        logger.debug("日志系统打印debug：{}", value);
                        logger.info("日志系统打印info：{}", value);
                        logger.warn("日志系统打印warn：{}", value);
                        logger.error("日志系统打印error：{}", value);
                        return value;
                    }
                }).returns(Types.STRING);

        env.execute();
    }

    /**
     * ods层数据以orc+snappy的格式存储到hdfs的示例demo
     *
     * @param args
     * @throws Exception
     */
    static void odsSinkHdfsWithOrcDemo(String[] args) throws Exception {
        StreamExecutionEnvironment env = getEnv("odsSinkHdfsWithOrcDemo");
        env.enableCheckpointing(120 * 1000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(300 * 1000L);
        FlinkKafkaConsumer kafkaConsumer = decorateKafkaConsumer("odsSinkHdfsWithOrcDemo", "lxy-test-orc");
        SingleOutputStreamOperator<JSONObject> operator = env.addSource(kafkaConsumer).map(JSONUtil::parseObj).returns(JSONObject.class);
        SingleOutputStreamOperator<OdsSignal> streamOperator = operator.map(new MapFunction<JSONObject, OdsSignal>() {
            @Override
            public OdsSignal map(JSONObject value) throws Exception {
                OdsSignal odsSignal = new OdsSignal();
                //车架号和事件时间是必备属性
                //车架号
                Object deviceId = value.getObj("deviceId");
                //事件时间
                Object itemTime = value.get("item_time");
                if (Objects.isNull(deviceId)) {
                    //继续寻找车架号
                    Object vin = value.get("vin");
                    if (Objects.isNull(vin)) {
                        logger.error("数据源数据异常，无车架号");
                    } else {
                        odsSignal.setDeviceId(vin.toString());
                    }
                } else {
                    odsSignal.setDeviceId(deviceId.toString());
                }

                if (Objects.isNull(itemTime)) {
                    //继续寻找事件时间
                    Object tid = value.get("tid");
                    if (Objects.isNull(tid)) {
                        logger.error("数据源数据异常，事件时间");
                    } else {
                        odsSignal.setItemTime(tid.toString());
                    }
                } else {
                    odsSignal.setItemTime(itemTime.toString());
                }
                //事件时间 年月日
                SimpleDateFormat s = new SimpleDateFormat("yyyy-MM-dd");
                Date date = new Date();
                date.setTime(Long.valueOf(odsSignal.getItemTime()));
                odsSignal.setItemDate(s.format(date));

                //摄取时间
                Date iDate = new Date();
                odsSignal.setIngestionTime(String.valueOf(iDate.getTime()));
                odsSignal.setIngestionDate(s.format(iDate));
                //源信号
                odsSignal.setContent(JSONUtil.toJsonStr(value));
                logger.info("源数据：{}", JSONUtil.toJsonStr(odsSignal));
                return odsSignal;
            }
        }).returns(TypeInformation.of(OdsSignal.class));
        String schema = "struct<deviceId:string,itemTime:string,itemDate:string,ingestionTime:string,ingestionDate:string,content:string>";
        Properties writerProperties = new Properties();
        writerProperties.setProperty("orc.compress", "SNAPPY");
        OrcBulkWriterFactory<OdsSignal> writerFactory = new OrcBulkWriterFactory<OdsSignal>(new OdsSignalVectorizer(schema), writerProperties, new Configuration());
        //配置sink输出配置信息（这部分可按需修改，提取出公共部分作为baseTask内容的一部分）
        //配置文件前、后缀
        OutputFileConfig config = OutputFileConfig
                .builder()
                .withPartPrefix("ods")
                .withPartSuffix(".orc")
                .build();
        StreamingFileSink<OdsSignal> sink = StreamingFileSink
                //配置文件输出位置（可按需修改）
                .forBulkFormat(new Path(ConfigLoader.get("hdfsUri") + "/external/data/" + "odsTest"), writerFactory)
                //配置分桶器（可按需修改）
                .withBucketAssigner(new BasePathBucketAssigner<>())
                //无需修改
                .withRollingPolicy(OnCheckpointRollingPolicy.build())
                //无需修改
                .withOutputFileConfig(config)
                .build();
        streamOperator.addSink(sink);

        env.execute();
    }

    /**
     * ods层数据以orc+snappy的格式存储到hdfs的示例demo
     *
     * @param args
     * @throws Exception
     */
    static void odsSinkHdfsWithPerquDemo(String[] args) throws Exception {
        StreamExecutionEnvironment env = getEnv("odsSinkHdfsWithOrcDemo");
        env.enableCheckpointing(120 * 1000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(300 * 1000L);
        FlinkKafkaConsumer kafkaConsumer = decorateKafkaConsumer("odsSinkHdfsWithOrcDemo", "lxy-test-orc");
        SingleOutputStreamOperator<JSONObject> operator = env.addSource(kafkaConsumer).map(JSONUtil::parseObj).returns(JSONObject.class);
        SingleOutputStreamOperator<OdsSignal> streamOperator = operator.map(new MapFunction<JSONObject, OdsSignal>() {
            @Override
            public OdsSignal map(JSONObject value) throws Exception {
                OdsSignal odsSignal = new OdsSignal();
                //车架号和事件时间是必备属性
                //车架号
                Object deviceId = value.getObj("deviceId");
                //事件时间
                Object itemTime = value.get("item_time");
                if (Objects.isNull(deviceId)) {
                    //继续寻找车架号
                    Object vin = value.get("vin");
                    if (Objects.isNull(vin)) {
                        logger.error("数据源数据异常，无车架号");
                    } else {
                        odsSignal.setDeviceId(vin.toString());
                    }
                } else {
                    odsSignal.setDeviceId(deviceId.toString());
                }

                if (Objects.isNull(itemTime)) {
                    //继续寻找事件时间
                    Object tid = value.get("tid");
                    if (Objects.isNull(tid)) {
                        logger.error("数据源数据异常，事件时间");
                    } else {
                        odsSignal.setItemTime(tid.toString());
                    }
                } else {
                    odsSignal.setItemTime(itemTime.toString());
                }
                //事件时间 年月日
                SimpleDateFormat s = new SimpleDateFormat("yyyy-MM-dd");
                Date date = new Date();
                date.setTime(Long.valueOf(odsSignal.getItemTime()));
                odsSignal.setItemDate(s.format(date));

                //摄取时间
                Date iDate = new Date();
                odsSignal.setIngestionTime(String.valueOf(iDate.getTime()));
                odsSignal.setIngestionDate(s.format(iDate));
                //源信号
                odsSignal.setContent(JSONUtil.toJsonStr(value));
                logger.info("源数据：{}", JSONUtil.toJsonStr(odsSignal));
                return odsSignal;
            }
        }).returns(TypeInformation.of(OdsSignal.class));
        String schema = "struct<deviceId:string,itemTime:string,itemDate:string,ingestionTime:string,ingestionDate:string,content:string>";
        Properties writerProperties = new Properties();
        writerProperties.setProperty("orc.compress", "SNAPPY");
        OrcBulkWriterFactory<OdsSignal> writerFactory = new OrcBulkWriterFactory<OdsSignal>(new OdsSignalVectorizer(schema), writerProperties, new Configuration());
        //配置sink输出配置信息（这部分可按需修改，提取出公共部分作为baseTask内容的一部分）
        //配置文件前、后缀
        OutputFileConfig config = OutputFileConfig
                .builder()
                .withPartPrefix("ods")
                .withPartSuffix(".orc")
                .build();
        StreamingFileSink<OdsSignal> sink = StreamingFileSink
                //配置文件输出位置（可按需修改）
                .forBulkFormat(new Path(ConfigLoader.get("hdfsUri") + "/external/data/" + "odsTest"), writerFactory)
                //配置分桶器（可按需修改）
                .withBucketAssigner(new BasePathBucketAssigner<>())
                //无需修改
                .withRollingPolicy(OnCheckpointRollingPolicy.build())
                //无需修改
                .withOutputFileConfig(config)
                .build();
        streamOperator.addSink(sink);

        env.execute();
    }
}
