package cn.gwm.flink.streaming.task;

import cn.gwm.flink.streaming.beans.estimateSoh.EstimateSohEvDws;
import cn.gwm.utils.ConfigLoader;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

/**
 * soh估算任务
 *
 * @author GW00283474
 */
public class EstimateSohEvDwsHiveTask extends BaseTask {

    public static void main(String[] args) throws Exception {
        ConfigLoader.init(args);
        String jobName = "estimateSohDwsHiveTask";
        StreamExecutionEnvironment env = getEnv(jobName);
        env.setParallelism(1);
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.minutes(15), Time.seconds(10)));
        String consumerTopic = ConfigLoader.get("kafka.sohyuce.topic");
        FlinkKafkaConsumer<String> kafkaConsumer = decorateKafkaConsumer("consumer-estimate-soh-yc", consumerTopic);
        SingleOutputStreamOperator<JSONObject> returns = env.addSource(kafkaConsumer)
                .map((MapFunction<String, JSONObject>) s -> JSONUtil.parseObj(s)).returns(Types.GENERIC(JSONObject.class));
        SingleOutputStreamOperator<JSONObject> source = returns.map((MapFunction<JSONObject, JSONObject>) s -> JSONUtil.parseObj(JSONUtil.parseObj(s.get("body").toString()).get("source").toString()))
                .returns(Types.GENERIC(JSONObject.class));
        SingleOutputStreamOperator<JSONObject> hiveData = source.map((MapFunction<JSONObject, JSONObject>) s -> JSONUtil.parseObj(s.get("hive_data").toString()))
                .returns(Types.GENERIC(JSONObject.class));
        SingleOutputStreamOperator<EstimateSohEvDws> result = hiveData.map((MapFunction<JSONObject, EstimateSohEvDws>) s -> BeanUtil.toBean(s, EstimateSohEvDws.class))
                .returns(Types.GENERIC(EstimateSohEvDws.class));
        SingleOutputStreamOperator<String> str = result.map((MapFunction<EstimateSohEvDws, String>) s -> s.getPropertyInfo("\t")).returns(Types.STRING);
        StreamingFileSink<String> hdfsSink = hdfsSink("estimate_soh_yuce_dws", ".txt", "dws/estimate_soh_yuce_dws");
        str.addSink(hdfsSink).name("hdfsSink");
        env.execute(jobName);
    }
}