package com.duowan.realtime.scheduled.batch.writer;

import com.duowan.common.util.Profiler;
import com.duowan.realtime.cache.KpiCodeDimStrTableNameCache;
import com.duowan.realtime.dao.RealTimeDataDao;
import com.duowan.realtime.model.RedisKey;
import com.google.common.base.Joiner;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;

import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * 将List<Map>
 * 20141113sum_mid_new_user_ucntproductcaishe,101
 *
 * @author
 */
public class RedisKeyValueToMySqlWriter implements ItemWriter<Map>, InitializingBean {

    private static final Logger logger = LoggerFactory.getLogger(RedisKeyValueToMySqlWriter.class);

    private RealTimeDataDao realTimeDataDao;

    private String tdate;

    private String mtime;


    private int batchSize = DEFAULT_BATCH_SIZE;

    private static int DEFAULT_BATCH_SIZE = 2000;


    @Autowired(required = true)
    private KpiCodeDimStrTableNameCache kpiCodeDimStrTableNameCache;


    @Override
    public void afterPropertiesSet() throws Exception {

    }


    public void setTdate(String tdate) {
        this.tdate = tdate;
    }

    public void setMtime(String mtime) {
        this.mtime = mtime;
    }


    @Override
    public void write(List<? extends Map> items) throws Exception {
        for (Map kvMap : items) {
            try {
                ArrayListMultimap<String, Map<String, Object>> sqlToDataMaps = getSqlAndDataMultimap(kvMap, tdate, mtime);
                List<DataJob> dataJobList = generateDataJobList(sqlToDataMaps);
                parallelInsert(dataJobList);
            } catch (Exception e) {
                logger.error("insert into db error" + e.getMessage(), e);
                continue;
            }
        }
    }

    private void parallelInsert(List<DataJob> dataJobList) {
        if (CollectionUtils.isEmpty(dataJobList)) return;
        ExecutorService executorService = Executors.newCachedThreadPool();
        CountDownLatch latch = new CountDownLatch(dataJobList.size());
        Profiler.start();
        Profiler.enter("insert into db parallel");
        for (DataJob job : dataJobList) {
            job.latch = latch;
            executorService.execute(job);
        }
        try {
            latch.await();
            Profiler.release();
            Profiler.release();
            logger.info(Profiler.dump());
        } catch (InterruptedException e) {
            logger.error(e.getMessage(), e);
            throw new RuntimeException(e);
        } finally {
            executorService.shutdown();
        }
    }

    /**
     * 将要插入的数据和sql按照 sql-data 分组，形成一个list，如果一个sql对应的data数据量超过了
     * DEFAULT_BATCH_SIZE，则把data切分成多个，形成多个DataJob
     *
     * @param sqlToDataMaps
     * @return
     */
    private List<DataJob> generateDataJobList(ArrayListMultimap<String, Map<String, Object>> sqlToDataMaps) {
        List<DataJob> result = Lists.newLinkedList();
        for (String sql : sqlToDataMaps.asMap().keySet()) {
            List<Map<String, Object>> dataToBeInsert = sqlToDataMaps.get(sql);
            if (dataToBeInsert.size() <= batchSize) {
                result.add(new DataJob(sql, dataToBeInsert));
            } else {
                List<List<Map<String, Object>>> tempList = Lists.partition(dataToBeInsert, batchSize);
                for (List<Map<String, Object>> tempData : tempList) {
                    result.add(new DataJob(sql, tempData));
                }
            }
        }
        return result;
    }

    private ArrayListMultimap<String, Map<String, Object>> getSqlAndDataMultimap(Map kvMap, String tdate, String mtime) {
        ArrayListMultimap<String, Map<String, Object>> sqlToDataMaps = ArrayListMultimap.create();
        Set<Map.Entry> entries = kvMap.entrySet();
        for (Map.Entry entry : entries) {
            String redisKey = null;
            Long redisValue = null;
            try {
                redisKey = (String) entry.getKey();
                redisValue = Long.parseLong((String) entry.getValue());
                Map<String, Object> dataMapToInsert = getDateMap(redisKey, redisValue, tdate, mtime);
                String sql = generateSql(redisKey, dataMapToInsert);
                sqlToDataMaps.put(sql, dataMapToInsert);
            } catch (NumberFormatException e) {
                logger.error("insert into db error, key:" + redisKey + " value:" + redisValue, e);
                continue;
            }
        }
        return sqlToDataMaps;
    }

    private String generateSql(String redisKey, Map<String, Object> dataMap) {
        String insert_into = generateInsert(redisKey);
        if (StringUtils.isBlank(insert_into)) {
            logger.error("fail to insert:" + redisKey + ",value is" + redisKey);
            return null;
        }
        String columsAndValues = getColumsAndValues(dataMap.keySet());
        StringBuilder sb = new StringBuilder();
        sb.append(insert_into).append(columsAndValues).append("ON DUPLICATE KEY update kpi_value := values(kpi_value)");
        return sb.toString();
    }


    private String generateInsert(String redisKeyStr) {
        RedisKey redisKey = RedisKey.factory(redisKeyStr);
        String kpiCode = redisKey.getKpiName();
        Map<String, String> dimMap = redisKey.getOrderKVMap();
        String tableName = null;
        try {
            tableName = kpiCodeDimStrTableNameCache.getTableName(kpiCode, dimMap.keySet());
        } catch (ExecutionException e) {
            logger.error("fail to load cache,redisKey:" + redisKeyStr, e);
            return null;
        }
        if (StringUtils.isNotBlank(tableName)) {
            return "insert into " + tableName;
        } else {
            return null;
        }
    }

    private String getColumsAndValues(Iterable<String> keys) {
        String colums = Joiner.on(", ").join(keys);
        String values = Joiner.on(",:").join(keys);
        return "( " + colums + " ) values(:" + values + ")";
    }


    private Map<String, Object> getDateMap(String redisKey, Long redisValue, String tdate, String mtime) {
        RedisKey key = RedisKey.factory(redisKey);
        TreeMap<String, Object> tempMap = (TreeMap) key.getOrderKVMap();
        if (StringUtils.isNotBlank(mtime)) {
            tempMap.put("mtime", mtime);
        }
        if (StringUtils.isNotBlank(tdate)) {
            tempMap.put("tdate", tdate);
        }
        tempMap.put("kpi_code", key.getKpiName());
        tempMap.put("kpi_value", String.valueOf(redisValue));
        return tempMap;
    }


    public void setRealTimeDataDao(RealTimeDataDao realTimeDataDao) {
        this.realTimeDataDao = realTimeDataDao;
    }

    private class DataJob implements Runnable {
        private String sql;
        private List<Map<String, Object>> data;
        private CountDownLatch latch;

        DataJob(String sql, List<Map<String, Object>> data) {
            this.sql = sql;
            this.data = data;
        }

        @Override
        public void run() {
            try {
                realTimeDataDao.batchInsert(sql, data);
            } catch (Exception e) {
                logger.error(e.getMessage(), e);
            } finally {
                latch.countDown();
            }
        }
    }
}
