package com.ry.flink.source;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.exceptions.JedisException;
import java.util.HashMap;
import java.util.Map;

/**
 *  GETALL dictionary_areas
 * 1) "AREA_US"
 * 2) "US"
 * 3) "AREA_CHINA"
 * 4) "TW,HK,BJ,SH,SZ"
 *
 * redis数据集准备：表里面的数据是动态的
 * key	   				map（key）    	    map(value)
 * dictionary_areas   	AREA_CHINA      	 TW,HK,BJ,SH,SZ
 * dictionary_areas   	AREA_US      		 US,SJ,AK
 * */
public class RedisSource implements SourceFunction<HashMap<String,String>> {
    private Jedis jedis;
    private Boolean isRunning = true;

    @Override
    public void run(SourceContext<HashMap<String, String>> sourceContext) {
        this.jedis = new Jedis("hadoop5",6379);
        HashMap<String,String> resultMap = new HashMap<>();
        while (isRunning) {
            try {
            resultMap.clear();
            //获取所有的大区数据
            Map<String, String> dictionary_areas = jedis.hgetAll("dictionary_areas");
            for(Map.Entry<String,String> entry:dictionary_areas.entrySet()) {
                //获取key和value
                String key = entry.getKey();
                //TW,HK,BJ,SH,SZ
                String values = entry.getValue();
                //非空判断
                for(String province:values.split(",")) {
                    resultMap.put(province,key);
                }
            }
            if (resultMap.size() > 0) {
                sourceContext.collect(resultMap);
            }
            //定期刷新区域数据
            Thread.sleep(200000);
            } catch (JedisException e) {
                e.printStackTrace();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }

    @Override
    public void cancel() {
        isRunning = false;
        if (jedis != null) {
            jedis.close();
        }
    }
}