package cn.gwm.flink.streaming.ods;

import cn.gwm.flink.streaming.beans.BeanSource;
import cn.gwm.flink.streaming.client.RedisClient;
import cn.gwm.flink.streaming.constant.BaseFields;
import cn.gwm.flink.streaming.function.map.OriginalAllDataMapFunction;
import cn.gwm.flink.streaming.function.process.OdsConnectDataProcessFunction;
import cn.gwm.flink.streaming.ods.model.StandardFieldConf;
import cn.gwm.flink.streaming.ods.model.StandardModelConf;
import cn.gwm.flink.streaming.sink.hbase.HbaseBean;
import cn.gwm.flink.streaming.sink.hbase.HbaseSinkClient;
import cn.gwm.flink.streaming.sink.hbase.HbaseSinkFunction;
import cn.gwm.flink.streaming.sink.kafka.FlinkKafkaUtil;
import cn.gwm.flink.streaming.task.BaseTask;
import cn.gwm.utils.ConfigLoader;
import cn.gwm.utils.StringUtil;
import cn.hutool.db.Db;
import cn.hutool.db.ds.DSFactory;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @author GW00256253
 */
public class OdsModelHandle implements Serializable {

    private static final Logger logger = LoggerFactory.getLogger(OdsModelHandle.class);

    private StandardModelConf conf;

    //ods层接入字段集合
    List<String> list;

    Map<String,String> codeMap;

    public OdsModelHandle(StandardModelConf conf){
        this.conf = conf;
        initList();
    }

    /**
     *初始化 set 集合
     */
    private void initList() {
        this.list = new ArrayList<>();
        this.codeMap = new ConcurrentHashMap<>(16);
        try {
            Db.use(DSFactory.get(ConfigLoader.get("mysql.group.db.config")))
                    .find(StandardFieldConf.getEntityWhere(), StandardFieldConf.class).stream()
                    .filter(data -> ModelHandle.BIZ_TYPE_ODS.equals(data.getBizType())
                            &&Integer.parseInt(data.getState())>0
                            &&((conf.getFieldVersion().equals(data.getVersion())
                                &&conf.getFieldVehicleType().equals(data.getVehicletype()))
                            ||"BASE".equals(data.getVehicletype())))
                    .sorted(new Comparator<StandardFieldConf>() {
                        @Override
                        public int compare(StandardFieldConf o1, StandardFieldConf o2) {
                            return Integer.parseInt(o1.getId())-Integer.parseInt(o2.getId());
                        }
                    })
                    .forEach(data -> {
                        list.add(data.getSemaphore());
                        codeMap.put(data.getSubCode(),data.getSemaphore());
                    });
            logger.info("读取mysql 表standard_field_conf 成功");
        } catch (SQLException e) {
            logger.error("读取mysql 表standard_field_conf 失败");
            throw new RuntimeException(e);
        }
    }
    public List<String> getList(){
        return list;
    }

    private final String HIVE_PATH_ORC_G = "ods/";

    public SingleOutputStreamOperator<JSONObject> connectStream(SingleOutputStreamOperator<String> process, Map<String, OutputTag<String>> canTagMap){
        OutputTag<JSONObject> gpsTag = new OutputTag<JSONObject>("gpsTag"){};
        OutputTag<JSONObject> gpsHbaseTag = new OutputTag<JSONObject>("gpsHbaseTag"){};
        OutputTag<JSONObject> requestTag = new OutputTag<JSONObject>("requestTag"){};
        //读取本车型侧输出流，并且过滤长工况和国内gps数据
        SingleOutputStreamOperator<JSONObject> stream = process.getSideOutput(canTagMap.get(conf.getModelName()))
                .flatMap(new OriginalAllDataMapFunction(conf.getVehicleType(), list, codeMap))
                .name("flatMap"+conf.getVehicleType()+conf.getTopicName())
                .returns(JSONObject.class).name("coProcess"+conf.getVehicleType()+conf.getTopicName()).uid("ods_coProcess_id"+conf.getVehicleType()+conf.getTopicName());

        //gps输出到侧输出流，gps合并到can主流中
        SingleOutputStreamOperator<JSONObject> canStream = stream.keyBy(item->item.getStr(BaseFields.vin))
        .process(new OdsConnectDataProcessFunction(gpsTag,gpsHbaseTag,requestTag)).name("process"+conf.getVehicleType())
        .uid("ods_process_id"+conf.getVehicleType()+conf.getTopicName())
        .setParallelism(conf.getParallelism()!=null&&!"".equals(conf.getParallelism())?Integer.parseInt(conf.getParallelism()):stream.getParallelism());
        if("1".equals(conf.getKafkaFlag())){
            canStream.addSink(FlinkKafkaUtil.toKafka(conf.getTopicName())).name("toKafkaDefault"+conf.getVehicleType()+conf.getTopicName())
                    .uid("ods_toKafkaDefault_id"+conf.getVehicleType()+conf.getTopicName())
                    .setParallelism(conf.getParallelism()!=null&&!"".equals(conf.getParallelism())?Integer.parseInt(conf.getParallelism()):stream.getParallelism());
        }
        if("1".equals(conf.getHiveFlag())){
            canStream.addSink(BaseTask.getOrcTxtSink(conf.getCanPrefix(),HIVE_PATH_ORC_G+conf.getHiveCanPath(), list))
                    .name("toCanHive"+conf.getVehicleType()+conf.getTopicName()).uid("ods_toCanHive_id"+conf.getVehicleType()+conf.getTopicName())
                    .setParallelism(conf.getParallelism()!=null&&!"".equals(conf.getParallelism())?Integer.parseInt(conf.getParallelism()):stream.getParallelism());
            canStream.getSideOutput(requestTag).addSink(BaseTask.getOrcTxtSink(conf.getRequestPrefix(),HIVE_PATH_ORC_G+conf.getHiveRequestPath(),
                            BeanSource.SourceEnum.remoteRequest)).name("toRequestHive"+conf.getVehicleType()+conf.getTopicName())
                    .uid("ods_toRequestHive_id"+conf.getVehicleType()+conf.getTopicName())
                    .setParallelism(conf.getParallelism()!=null&&!"".equals(conf.getParallelism())?Integer.parseInt(conf.getParallelism()):stream.getParallelism());
        }
        if("1".equals(conf.getHbaseFlag())){
            DataStream<JSONObject> gpsStream = canStream.getSideOutput(gpsTag);
            gpsStream.addSink(BaseTask.getOrcTxtSink(conf.getGpsPrefix(),HIVE_PATH_ORC_G+conf.getHiveGpsPath(),
                            BeanSource.SourceEnum.gpsIntranet)).name("toGPSHive"+conf.getVehicleType()+conf.getTopicName())
                    .uid("ods_toGpsHive_id"+conf.getVehicleType()+conf.getTopicName())
                    .setParallelism(conf.getParallelism()!=null&&!"".equals(conf.getParallelism())?Integer.parseInt(conf.getParallelism()):stream.getParallelism());
            DataStream<JSONObject> gpsHbaseStream = canStream.getSideOutput(gpsHbaseTag);
            gpsHbaseStream.flatMap(new FlatMapFunction<JSONObject, Object>() {
                private int SECONDS = 259200;
                private String REDIS_KEY_LAST_GPS = "l_gps:";
                @Override
                public void flatMap(JSONObject entries, Collector<Object> collector) throws Exception {
                    JSONObject object = JSONUtil.createObj();
                    object.set(BaseFields.tid,entries.getStr(BaseFields.tid));
                    object.set(BaseFields.LONGITUDE, entries.getStr(BaseFields.LONGITUDE));
                    object.set(BaseFields.LATITUDE, entries.getStr(BaseFields.LATITUDE));
                    object.set(BaseFields.vin, entries.getStr(BaseFields.vin));
                    collector.collect(object);
                    RedisClient.setData(getKey(entries),object.toString(),SECONDS);
                }
                private String getKey(JSONObject entries){
                    return REDIS_KEY_LAST_GPS+conf.getVehicleType()+":"+entries.getStr(BaseFields.vin);
                }
            }).name("toRedis"+conf.getVehicleType()+conf.getTopicName()).uid("ods_toRedis_id"+conf.getVehicleType()+conf.getTopicName())
                    .setParallelism(conf.getParallelism()!=null&&!"".equals(conf.getParallelism())?Integer.parseInt(conf.getParallelism()):stream.getParallelism());
        }
        return canStream;
    }

    public SingleOutputStreamOperator<JSONObject> connectStream2(SingleOutputStreamOperator<String> process, Map<String, OutputTag<String>> canTagMap){
        //读取本车型侧输出流，并且过滤长工况和国内gps数据
        SingleOutputStreamOperator<JSONObject> stream = process.getSideOutput(canTagMap.get(conf.getModelName()))
                .flatMap(new OriginalAllDataMapFunction(conf.getVehicleType(), list, codeMap))
                .name("flatMap"+conf.getVehicleType()+conf.getTopicName())
                .returns(JSONObject.class).name("coProcess"+conf.getVehicleType()+conf.getTopicName()).uid("ods_coProcess_id"+conf.getVehicleType()+conf.getTopicName());

        //gps输出到侧输出流，gps合并到can主流中
        SingleOutputStreamOperator<JSONObject> canStream = stream.flatMap(new FlatMapFunction<JSONObject, JSONObject>() {
            @Override
            public void flatMap(JSONObject object, Collector<JSONObject> collector) throws Exception {
                if(BaseFields.COMMAND_STATUS.equals(object.getStr(BaseFields.command))){
                    collector.collect(object);
                }
            }
        });
        if("1".equals(conf.getKafkaFlag())){
            canStream.addSink(FlinkKafkaUtil.toKafka(conf.getTopicName())).name("toKafkaDefault"+conf.getVehicleType()+conf.getTopicName())
                    .uid("ods_toKafkaDefault_id"+conf.getVehicleType()+conf.getTopicName())
                    .setParallelism(conf.getParallelism()!=null&&!"".equals(conf.getParallelism())?Integer.parseInt(conf.getParallelism()):stream.getParallelism());
        }
        return canStream;
    }
}
