package com.asiainfo.www.function;

import com.asiainfo.www.pojo.MapFile;
import com.asiainfo.www.pojo.StatFile;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.co.RichCoFlatMapFunction;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Jedis;
import scala.Tuple4;

import java.util.HashMap;

/**
 * @author loso
 */
public  class  MapStatRichCoFlatMapFunction extends RichCoFlatMapFunction<MapFile, StatFile, Tuple4<String,MapFile,StatFile,StatFile>> {
    private HashMap<String,MapPairStat> mapCache ;
    private HashMap<String,Long> mapSetTime ;
    private long  lastMapRefreshTime = 0;
    private static final long REFRESH_TIME =7200000;
    private static final long CLEAR_TIME =24*60*60*1000;

    Jedis jedis;


    /**利用Rich 在open中配置jedis以便可以初始化
     *
     * @param parameters 传入的配置参数
     * * @throws Exception
     */
    @Override
    public void open(Configuration parameters) throws Exception {
        jedis = new Jedis("192.168.61.131");
        mapCache = new HashMap<>();
        mapSetTime =  new HashMap<>();
        dnCache = new HashMap<>();
        statSetTime = new HashMap<>();
    }

    /**
     *
     */
    public enum MapPairStat{
        /**
         * 未匹配
         */
        UNMATCH,
        /**
         * 匹配到上发省文件
         */
        MATCH_UP_STAT,
        /**
         * 匹配到下发省文件
         */
        MATCH_DN_STAT,
        /**
         * 全部匹配到上下发省文件
         */
        MATCH
    }

    @Override
    public void flatMap1(MapFile mapFile, Collector<Tuple4<String, MapFile, StatFile, StatFile>> collector) throws Exception {
        Long time = System.currentTimeMillis();
        //判断map的匹配状态
        MapPairStat thisStat;
        if(mapCache.containsKey(mapFile.getDnName())){
            //如果数据存在
            thisStat = mapCache.get(mapFile.getDnName());
        } else{
            //不存在数据中，添加到map中
            thisStat = MapPairStat.UNMATCH;
            mapCache.put(mapFile.getDnName(),thisStat);
            mapSetTime.put(mapFile.getDnName(),time);
        }
            MapPairStat updateStat =  pairStatFile(mapFile,thisStat);
            //判断结果是否需要更新
            if (thisStat!=updateStat){
                //判断是否是pair状态
                if(updateStat == MapPairStat.MATCH){
                    //说明数据已经匹配完毕了,其实需要查询数据
                    StatFile upStatFile = getUpStat(mapFile.getUpName());
                    StatFile dnStatFile = getDnStat(mapFile.getDnName());
                    //收集数据
                    collector.collect(new Tuple4<>("MATCH",mapFile,upStatFile,dnStatFile));
                    //清除数据
                    cleanDnStat(mapFile.getDnName());
                    cleanUpStat(mapFile.getUpName());
                }
                else {
                    //说明没有准备完毕，缓存更新状态
                    mapCache.put(mapFile.getDnName(),updateStat);
                    //直接输出map数据到重试
                    collector.collect(new Tuple4<>("UNMATCH",mapFile,null,null));
                }
            }else{
                collector.collect(new Tuple4<>("UNMATCH",mapFile,null,null));
            }
        if (System.currentTimeMillis() - lastMapRefreshTime > REFRESH_TIME){
            //删除旧的数据
            lastMapRefreshTime = System.currentTimeMillis();
            //遍历剔除超时数据
            mapSetTime.keySet().stream().filter(key -> mapSetTime.get(key) < time - CLEAR_TIME).forEach(key -> {
                mapSetTime.remove(key);
                mapCache.remove(key);
            });
        }
    }

    /**
     * dn缓存再节点中
     * */
    private HashMap<String,StatFile> dnCache ;
    private HashMap<String,Long> statSetTime ;

    @Override
    public void flatMap2(StatFile statFile, Collector<Tuple4<String, MapFile, StatFile, StatFile>> collector) throws Exception {
        //说明已经经过了一半的时间,两次迭代后数据就被删除
        Long time = System.currentTimeMillis();
        long lastStatRefreshTime = 0;
        if (time - lastStatRefreshTime > REFRESH_TIME){
            //删除过期的数据
            lastMapRefreshTime = System.currentTimeMillis();
            statSetTime.keySet().stream().filter(key -> statSetTime.get(key) < time - CLEAR_TIME).forEach(key -> {
                statSetTime.remove(key);
                dnCache.remove(key);
            });
        }
        dnCache.put(statFile.getDestFileName(),statFile);
        statSetTime.put(statFile.getDestFileName(),time);
    }

    private MapPairStat pairStatFile(MapFile mapFile , MapPairStat pairStat ){
        switch (pairStat){
            case UNMATCH:
                if (pairDnStat(mapFile.getDnName())){
                    return pairStatFile(mapFile,MapPairStat.MATCH_DN_STAT);
                }else{
                    if (pairUpStat(mapFile.getUpName())){
                        return MapPairStat.MATCH_UP_STAT;
                    }
                }
                break;
            case MATCH_UP_STAT:
                if (pairDnStat(mapFile.getDnName())){
                    return MapPairStat.MATCH;
                }
                break;
            case MATCH_DN_STAT:
                if (pairUpStat(mapFile.getUpName())){
                    return MapPairStat.MATCH;
                }
                break;
            default:
                break;
        }
        return pairStat;
    }

    private boolean pairDnStat(String key){
        return dnCache.containsKey(key);
    }

    private boolean pairUpStat(String key){
        return jedis.exists(key);
    }

    private void cleanDnStat(String key){
        dnCache.remove(key);
        statSetTime.remove(key);
    }

    private void cleanUpStat(String key){
        jedis.del(key);
    }

    private StatFile getDnStat(String key){
        return dnCache.get(key);
    }

    private StatFile getUpStat(String key){
        String statString = jedis.get(key);
        String[] statStrings = statString.split(",");
        return new StatFile(statStrings);
    }
}
