package cn.itcast.streaming.task;

import cn.itcast.streaming.entity.ItcastDataObj;
import cn.itcast.streaming.sink.SrcDataToHBaseOptimizerSink;
import cn.itcast.streaming.sink.SrcDetailToHBaseOptimizerSink;
import cn.itcast.streaming.utils.JsonParseUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;

/**
 * Author itcast
 * Date 2021/9/20 11:39
 * 主要完成从kafka集群读取车辆的json数据并将其转换成ItcastDataObj，并将其
 * 通过errorData过滤出来正常的数据或者错误的数据，将正确的数据保存到hdfs上
 * 和hbase上，将错误的数据保存到hdfs上
 */
public class KafkaSourceDataTask extends BaseTask {
    public static void main(String[] args) {

        //获取当前流执行环境
        StreamExecutionEnvironment env = getEnv(KafkaSourceDataTask.class.getSimpleName());

        //获取kafka中的车辆数据json字符串
        DataStreamSource<String> source = getKafkaStream(
                env,
                "__vehicle_consumer_",
                SimpleStringSchema.class
        );
        //todo 7 打印输出
        source.printToErr();
        //todo 8 将读取出来的 json 字符串转换成 ItcastDataObj
        DataStream<ItcastDataObj> vehicleDataStream = source
                .map(JsonParseUtil::parseJsonToObject);
        //todo 9 将数据拆分成正确的数据和异常的数据
        DataStream<ItcastDataObj> srcDataStream = vehicleDataStream
                .filter(obj -> StringUtils.isEmpty(obj.getErrorData()));
        //错误的数据流
        DataStream<ItcastDataObj> errorDataStream = vehicleDataStream
                .filter(obj -> StringUtils.isNotEmpty(obj.getErrorData()));
        //todo 10 将正确的数据保存到 hdfs
          StreamingFileSink<String> errorSink = getSink(
                "vehicle",
                "_txt",
                "itcast_error",
                "yyyyMMdd"
        );
        //todo 11 将错误的数据保存到 hdfs
        /*errorDataStream
                .map(ItcastDataObj::toHiveString)
                .addSink(errorSink);*/
        //todo 将正确的数据保存到 hdfs
        StreamingFileSink<String> srcSink = getSink(
                "vehicle",
                "_txt",
                "itcast_src",
                "yyyyMMdd"
        );
        //todo 将正确的数据写入到hdfs
        /*srcDataStream
                .map(ItcastDataObj::toHiveString)
                .addSink(srcSink);*/
        //todo 将错误的数据直接写入到 hive 表中
        //errorDataStream.addSink(new SaveErrorDataHiveSink("itcast_ods","itcast_error"));
        //todo 12 将正确的数据写入到 hbase
        srcDataStream.addSink(new SrcDataToHBaseOptimizerSink("itcast_src"));
        //srcDataStream.addSink(new SrcDataToHBaseOptimizerSink("itcast_src_fastdiff"));
        /*srcDataStream.addSink(new SrcDataToHBaseOptimizerSink("itcast_src_snappy"));
        srcDataStream.addSink(new SrcDataToHBaseOptimizerSink("itcast_src_lz4"));
        srcDataStream.addSink(new SrcDataToHBaseOptimizerSink("itcast_src_gz"));*/
        //todo 将分析指标的车辆部分数据写入到 hbase 中
        //srcDataStream.addSink(new SrcDetailToHBaseOptimizerSink("itcastsrc_vehicle_detail"));

        //todo 8 执行流环境
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
