package realtime.topology;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;

import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

/**
 * Created by luoqifei on 17-5-24.
 */
public class SaveByPointsBolt extends BaseRichBolt {
    private Pipeline pipeline;
    private Jedis jedis;
    private OutputCollector _collector;
    private final static String nullStr = "";
    private Date startTime = new Date();

    //for test
    private static String fieldSample = "23.7612%s%s,";
    private static long timestamp=1495019278000L;
    public static int counter = 0;
    public static long retention = 600000;//10mins
    public static long maxMemoryUse = 1204*1204*1024*10;//10G
    public SaveByPointsBolt(){

    }
    @Override
    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
        _collector = collector;
        jedis = new Jedis("127.0.0.1",6379,Integer.MAX_VALUE);
        pipeline = jedis.pipelined();
    }

    private long getTimeMinute(long unixTime){
        DateFormat df = new SimpleDateFormat("yyyyMMddHHmm");
        Date date = new Date(unixTime);
        String dateTime = df.format(date);
        try {
            return df.parse(dateTime).getTime()/1000;
        } catch (ParseException e) {
            e.printStackTrace();
            return 0;
        }
    }

    /**
     * solution one,zset idx::fg::compoundid::timeslotBoundary, hmset dat::fg::compoundid::timeslotBoundary
     * jedis api use hmset ,and pipelined
     * @param times
     * @param fieldValues
     * @param fgId
     * @param compoundId
     * @return
     */
    public boolean saveToRedisByPoint(long times, Map<String,String> fieldValues, String fgId, String compoundId){
        //asset=fgId:compoundId
        long time = getTimeMinute(times);

        //check memory and hlen
        long hlenth= jedis.hlen(Constants.H_KEY_PREFIX+compoundId+Constants.REALTIME_DATA_CONNECTOR+
                Constants.REALTIME_DATA_CONNECTOR+time);
        long zlenth= jedis.zcard(Constants.Z_KEY_PREFIX +fgId+Constants.REALTIME_DATA_CONNECTOR+compoundId);
        if(hlenth > 10000000 || zlenth> 10000000){
            //set clean list for retention
            jedis.hset("cleanlist::rep1",Constants.H_KEY_PREFIX+compoundId+Constants.REALTIME_DATA_CONNECTOR+
                    Constants.REALTIME_DATA_CONNECTOR+time,(time+retention)+"");
        }
        String arg[] = jedis.info("Memory").split("\n");
        long memoryUse = Long.valueOf(arg[1].split(":")[1].trim());
        double ratio = Double.valueOf(arg[14].split(":")[1].trim());
        if(memoryUse > maxMemoryUse){
            //System.out.println("the redis serser max memory use is more than 10G.");
            //return false;
        }
        if (ratio < 1){
            //System.out.println("the redis sersermem_fragmentation_ratio less than 1.");
           // return false;
        }

        pipeline.zadd(Constants.Z_KEY_PREFIX +fgId+Constants.REALTIME_DATA_CONNECTOR+compoundId,times,nullStr+times);
        Map<String,String> fields = new HashMap<String, String>();
        for(String key : fieldValues.keySet()){
            fields.put(key+ Constants.REALTIME_DATA_CONNECTOR+times,fieldValues.get(key));
        }
        pipeline.hmset(Constants.H_KEY_PREFIX+compoundId+Constants.REALTIME_DATA_CONNECTOR+
                Constants.REALTIME_DATA_CONNECTOR+time,fields);
        pipeline.sync();
        return true;
    }
    private boolean isExpired(long timeStamp){
        if((System.currentTimeMillis()-timeStamp) > 600000){
            return true;
        }else {
            return false;
        }
    }
    @Override
    public void execute(Tuple input) {
        try {
            int str = input.getInteger(0);
            if(str%10000==0){
                System.out.println("save record num is :"+str+", start time is "+startTime+",end time is"+new Date());
            }
            int fieldNum = 47;
            counter++;
            isExpired(timestamp);//donothing for test
            HashMap<String,String> map = new HashMap();
            for(int i=0;i<fieldNum;i++){
                map.put("fieldName_"+i,String.format(fieldSample,String.valueOf(i+1),String.valueOf(i+1)));
            }
            if(saveToRedisByPoint(timestamp+counter*5, map,"fgid1","assetid1")){
                _collector.ack(input);
            }else {
                _collector.fail(input);
            }

        }catch (Exception e){
            e.printStackTrace();
            _collector.fail(input);
        }
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {

    }
}
