package realtime.topology;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;

import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;


/**
 * Created by luoqifei on 17-5-3.
 */
public class RedisBolt extends BaseRichBolt {
    private Pipeline pipeline;
    private Jedis jedis;
    private OutputCollector _collector;
    private final static String nullStr = "";
    private Date startTime = new Date();

    //for test
    private static String fieldSample = "23.7612%s%s,";
    private static long timestamp=1495019278000L;
    public static int counter = 0;

    public RedisBolt() {
    }
    @Override
    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
        _collector = collector;
        jedis = new Jedis("127.0.0.1",6379,Integer.MAX_VALUE);
        pipeline = jedis.pipelined();
    }

    private long getTimeMinute(long unixTime){
        DateFormat df = new SimpleDateFormat("yyyyMMddHHmm");
        Date date = new Date(unixTime);
        String dateTime = df.format(date);
        try {
            return df.parse(dateTime).getTime()/1000;
        } catch (ParseException e) {
            e.printStackTrace();
            return 0;
        }
    }
    public boolean saveToRedis2(long times, Map<String,String> fieldValues, String fgId, String compoundId){
        //asset=fgId:compoundId
        long time = getTimeMinute(times);
        pipeline.zadd(Constants.Z_KEY_PREFIX +fgId+Constants.REALTIME_DATA_CONNECTOR+compoundId,times,nullStr+times);
        for(String key : fieldValues.keySet()){
            pipeline.hset(Constants.H_KEY_PREFIX+compoundId+Constants.REALTIME_DATA_CONNECTOR+key+
                    Constants.REALTIME_DATA_CONNECTOR+time,fgId+
                    Constants.REALTIME_DATA_CONNECTOR+times,fieldValues.get(key));
            /*not overwrite
            jedis.hsetnx(Constants.H_KEY_PREFIX+compoundId+Constants.REALTIME_DATA_CONNECTOR+key+
                    Constants.REALTIME_DATA_CONNECTOR+times/60000,fgId+
                    Constants.REALTIME_DATA_CONNECTOR+times,fieldValues.get(key));
            */
            /*for optimization
            jedis.hset("sts_dat::"+compoundId+"::"+fName[i],fgId,"1");
            */
        }
        pipeline.sync();
        return true;
    }

    private boolean isExpired(long timeStamp){
        if((System.currentTimeMillis()-timeStamp) > 600000){
            return true;
        }else {
            return false;
        }
    }
    @Override
    public void execute(Tuple input) {
        try {
            int str = input.getInteger(0);
            if(str == 1){
                startTime = new Date();
            }
            if(str%1000==0){
                System.out.println("save record num is :"+str+", start time is "+startTime+",end time is"+new Date());
            }
            int fieldNum = 47;
            counter++;
            HashMap<String,String> map = new HashMap();
            for(int i=0;i<fieldNum;i++){
                map.put("fieldName_"+i,String.format(fieldSample,String.valueOf(i+1),String.valueOf(i+1)));
            }
            saveToRedis2(timestamp+counter*5, map,"fgid1","assetid1");
            _collector.ack(input);
        }catch (Exception e){
            e.printStackTrace();
            _collector.fail(input);
        }
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {

    }

}