package com.dmp.storm.bolt.file;

import com.dmp.common.date.DateUtil;
import com.dmp.common.string.MsgResetUtil;
import com.dmp.common.string.StringUtils;
import com.dmp.storm.bolt.BaseMapBolt;
import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.*;
import java.util.concurrent.*;

/**
 * @author suzg
 * @ClassName: BaseFileBolt
 * @Description: 文件bolt基类
 * @date 2015年11月5日 下午4:15:05
 */
public abstract class BaseFileBolt extends BaseMapBolt {

    private static Logger log = LoggerFactory.getLogger(BaseFileBolt.class);
    private static final long serialVersionUID = 1L;
    private static Properties props = new Properties();
    private ScheduledExecutorService scheduExec;
    private List<Map<String, Object>> userRecodeList;
    private Map<String, String> redisMap;
    private volatile boolean flag = false;
    @SuppressWarnings("rawtypes")
    public ScheduledFuture future;
    protected int count = 0;// 记录条数数目
    protected int fileIndex = 1;// 生成文件的数字
    protected Random random = new Random();
    // 临时文件后缀
    protected static final String FILE_TEMP_SUFFIX = ".tmp";
    // 最终文件后缀
    protected static final String FILE_SUFFIX = ".txt";
    //
    // private static final String fileDir = "/data1/hadoop/storm";
    // private static final String fileDir = "d:/odsFile/s1mme";
    protected static String fileDir;
    protected static int timeOut = 60; // 5分钟生成一个文件
    protected static int recordSize = 100;// 三万条数据生成一个

    // 配置信息
    protected String filePreffix;// 文件存放路径
    protected String extraKeys;// 额外需要处理的字段
    protected String inputKeys; // 如果inputKeys配置了值，表示需要重新组装kafka消息

    @SuppressWarnings("rawtypes")
    private ScheduledFuture startTask() {
        return scheduExec.scheduleWithFixedDelay(new Runnable() {
            @Override
            public void run() {
                flag = true;
                core();
            }
        }, timeOut * 1000, timeOut * 1000, TimeUnit.MILLISECONDS);
    }

    @Override
    public void prepare() {
        try {
            props.load(BaseFileBolt.class.getClassLoader().getResourceAsStream("greenet.properties"));
            timeOut = Integer.parseInt(props.getProperty("file_timeOut"));
            recordSize = Integer.parseInt(props.getProperty("file_recordSize"));
            fileDir = props.getProperty("file_dir");
            log.info("timeOut = " + timeOut);
            log.info("recordSize = " + recordSize);
            log.info("fileDir = " + fileDir);
        } catch (IOException e) {
            e.printStackTrace();
            log.error("加载配置文件出错！");
        }
        redisMap = new ConcurrentHashMap<String, String>();
        userRecodeList = new ArrayList<Map<String, Object>>();
        scheduExec = Executors.newSingleThreadScheduledExecutor();
        future = startTask();
    }

    /**
     * 主执行方法
     */
    @Override
    public Map<String, Object> process(Map<String, Object> map) {
        userRecodeList.add(map);
        count++;
        if (count >= recordSize) {
            this.flag = true;
            System.err.println("recodeSize = " + count);
        }
        if (flag) {
            // 核心处理方法
            core();
        }
        return map;

    }

    /**
     * @Title: core
     * @Description: 执行方法
     * @author suzg
     * @date 2015年11月6日 下午2:27:55
     */
    private void core() {
        this.count = 0;
        this.flag = false;
        if (CollectionUtils.isNotEmpty(userRecodeList)) {
            // 生成文件
            createFileByList(userRecodeList);

            userRecodeList = new ArrayList<Map<String, Object>>();
            future.cancel(true);
            future = startTask();
        }
    }

    /**
     * @param list
     * @Title: createFileByList2
     * @Description: NIO写文件
     * @author suzg
     * @date 2015年11月16日 下午3:25:17
     */
    protected void createFileByList2(List<Map<String, Object>> list) {
        File file = null;
        FileOutputStream fos = null;
        FileChannel fc_out = null;
        try {
            file = createTempFile();
            file.setWritable(true);
            fos = new FileOutputStream(file);
            fc_out = fos.getChannel();

            System.out.println("new file:" + file.getName() + "=======recodeSize:" + list.size());
            // 第二步写文件
            for (Map<String, Object> map : list) {
                // 循环处理每条记录，写入文件中
                writeFileByMap(fc_out, map);
            }
            fc_out.close();
            // 第三步临时文件修改为最终文件
            updateFileName(file);
        } catch (IOException e) {
            System.err.println(file.getName() + ":文件写数据出错！");
            e.printStackTrace();
        } finally {
            if (null != fc_out) {
                try {
                    fc_out.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if (null != fos) {
                try {
                    fos.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    private void writeFileByMap(FileChannel fc_out, Map<String, Object> map) {

        String kafkamsg = (String) map.get("result");
        StringBuffer sb = new StringBuffer(kafkamsg);
        if (!kafkamsg.endsWith("|")) {
            sb.append("|");
        }
        try {
            // 如果inputKeys配置了值，表示需要重新组装kafka消息
            if (StringUtils.isNotBlank(inputKeys)) {
                sb = MsgResetUtil.createMsgStringBuffer(inputKeys, map);
            }
            // 处理额外的字段
            dealWithExtraKeys(sb, map);
            // stringbuffer优化：1，删除最后的“|”，2：增加换行
            sb.deleteCharAt(sb.length() - 1).append("\r\n");
            // 行内容
            String oneMessage = sb.toString();
            ByteBuffer buf = ByteBuffer.wrap(oneMessage.getBytes());
            buf.put(oneMessage.getBytes());
            buf.flip();
            fc_out.write(buf);
        } catch (Exception e) {
            log.error("写文件异常！==================");
            e.printStackTrace();
        }
    }

    /**
     * @param map
     * @throws IOException
     * @Title: createFileByMap
     * @Description: 根据map生成文件
     * @author suzg
     * @date 2015年11月5日 下午5:15:47
     */
    public void createFileByList(List<Map<String, Object>> list) {
        // 第一步生成临时文件
        File file = null;
        FileWriter writer = null;
        try {
            file = createTempFile();
            System.out.println("new file:" + file.getName() + "=======recodeSize:" + list.size());
            writer = new FileWriter(file, true);
            // 第二步写文件
            for (Map<String, Object> map : list) {
                // 循环处理每条记录，写入文件中
                writeFileByMap(writer, map);
            }
            writer.close();
            // 第三步临时文件修改为最终文件
            updateFileName(file);
        } catch (Exception e) {
            e.printStackTrace();
            System.err.println(file.getName() + ":文件写数据出错！");
        } finally {
            if (writer != null) {
                try {
                    writer.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }

    }

    // 创建临时文件
    public File createTempFile() {
        String currentTime = DateUtil.date2String(new Date(), DateUtil.yyyyMMddHHmmss);
        int rand = random.nextInt(1000000);
        String tempFileName = filePreffix + currentTime + "_" + rand + FILE_TEMP_SUFFIX;
        String realFileName = filePreffix + currentTime + "_" + rand + FILE_SUFFIX;
        File file = null;
        try {
            // 如果根目录不存在，创建目录
            File dir = new File(fileDir);
            if (!dir.exists()) {
                log.info("dir is creating..................");
                dir.mkdirs();
            }
            file = new File(fileDir, tempFileName);
            File realFile = new File(fileDir, realFileName);
            if (!file.exists() && !realFile.exists()) {
                fileIndex = 1;
                file.createNewFile();
            } else {
                rand = random.nextInt(1000000);
                // 如果已经存在，
                fileIndex++;
                tempFileName = filePreffix + currentTime + "_" + fileIndex + "_" + rand + FILE_TEMP_SUFFIX;
                file = new File(fileDir, tempFileName);
                file.createNewFile();
            }
        } catch (Exception e) {
            log.error("create file failed!!!");
            System.err.println("创建文件失败:" + fileDir + File.separator + tempFileName);
            e.printStackTrace();
        }
        return file;
    }

    public static void main(String[] args) {
        try {
            String ss = "4601187192625";
            System.out.println(ss.substring(5));
            long s = 87192625;
            long enodeId = s >>> 8;//
            long ci = s & 0xff;// 小区id
            System.out.println("enodeId:" + enodeId + ",ci:" + ci);

        } catch (Exception e) {

        }
    }

    /**
     * @param map
     * @Title: writeFileByMap
     * @Description: 写文件
     * @author suzg
     * @date 2015年11月6日 上午9:59:06
     */
    public void writeFileByMap(FileWriter writer, Map<String, Object> map) {
        StringBuffer sb = new StringBuffer();
        try {
            // 处理额外的字段
            // 如果inputKeys配置了值，表示需要重新组装kafka消息
            if (StringUtils.isNotBlank(inputKeys)) {
                sb = MsgResetUtil.createMsgStringBuffer(inputKeys, map);
            }
            dealWithExtraKeys(sb, map);
            // stringbuffer优化：1，删除最后的“|”，2：增加换行
            sb.deleteCharAt(sb.length() - 1).append("\r\n");
            // 行内容
            String oneMessage = sb.toString();
            // System.out.println("onMessage = " + oneMessage);
            writer.write(oneMessage);
            writer.flush();
        } catch (Exception e) {
            log.error("写文件异常！==================");
            e.printStackTrace();
        }

    }

    /**
     * @param file
     * @param currentTime
     * @Title: updateFileName
     * @Description: 文件重命名
     * @author suzg
     * @date 2015年11月6日 上午10:46:25
     */
    public void updateFileName(File file) {

        String oldFileName = file.getName();
        String newFileName = oldFileName.substring(0, oldFileName.indexOf(".")) + FILE_SUFFIX;
        File newFile = new File(fileDir, newFileName);
        file.renameTo(newFile);
    }

    /**
     * @param sb
     * @param map
     * @return
     * @Title: dealWithExtraKeys
     * @Description: 处理额外的字段
     * @author suzg
     * @date 2015年11月6日 上午10:24:00
     */
    protected abstract void dealWithExtraKeys(StringBuffer sb, Map<String, Object> map) throws Exception;

    /**
     * @param sb
     * @param key
     * @param map
     * @param tableName
     * @param ehcacheId
     * @param defaultVal 默认值
     * @return
     * @Title: getEhcacheValueByKey
     * @Description: 根据key获得ehcache缓存中的数据，有默认值
     * @author suzg
     * @date 2015年11月9日 下午4:51:03
     */
    protected StringBuffer getEhcacheValueByKey(StringBuffer sb, String key, Map<String, Object> map, String tableName,
                                                String ehcacheId, String defaultVal) throws Exception {
        String result = "";
        Object value = map.get(key.trim());
        String valueStr = value == null ? "" : value.toString();

        String redisKey = tableName + "_" + ehcacheId + "_" + valueStr;
        redisKey = redisKey.toLowerCase();
        result = redisMap.get(redisKey);

        if (StringUtils.isBlank(result)) {
            //result = DictUtils.getValueByKey(tableName, ehcacheId, valueStr);
            result = StringUtils.isBlank(result) ? "1" : result;
            redisMap.put(redisKey, result);
        }
        if (StringUtils.isBlank(result) && StringUtils.isNotBlank(defaultVal)) {
            result = defaultVal;
        }
        sb.append(result).append("|");
        return sb;
    }

    /**
     * @param sb
     * @param key
     * @param map
     * @param tableName
     * @param ehcacheId
     * @return
     * @Title: getEhcacheValueByKey
     * @Description: 根据key获得ehcache缓存中的数据
     * @author suzg
     * @date 2015年11月11日 下午6:03:59
     */
    protected StringBuffer getEhcacheValueByKey(StringBuffer sb, String key, Map<String, Object> map, String tableName,
                                                String ehcacheId) throws Exception {
        return getEhcacheValueByKey(sb, key, map, tableName, ehcacheId, null);
    }

    protected StringBuffer getRedisValueByKey(StringBuffer sb, String keyValue, String tableName, String ehcacheId,
                                              String defaultVal) throws Exception {
        String redisKey = tableName + "_" + ehcacheId + "_" + keyValue;
        redisKey = redisKey.toLowerCase();
        String result = redisMap.get(redisKey);
        if (StringUtils.isBlank(result)) {
            //result = DictUtils.getValueByKey(tableName, ehcacheId, keyValue);
            result = StringUtils.isBlank(result) ? "1" : result;
            System.out.println("result>>>>>>>>>>>>>=" + result);
            redisMap.put(redisKey, result);
        }
        if (StringUtils.isBlank(result) && StringUtils.isNotBlank(defaultVal)) {
            result = defaultVal;
        }
        sb.append(result).append("|");
        return sb;

    }

    public void setTimeOut(int timeOut) {
        this.timeOut = timeOut;
    }

    public void setRecordSize(int recordSize) {
        this.recordSize = recordSize;
    }

    public String getExtraKeys() {
        return extraKeys;
    }

    public void setExtraKeys(String extraKeys) {
        this.extraKeys = extraKeys;
    }

    public String getFilePreffix() {
        return filePreffix;
    }

    public void setFilePreffix(String filePreffix) {
        this.filePreffix = filePreffix;
    }

    public String getInputKeys() {
        return inputKeys;
    }

    public void setInputKeys(String inputKeys) {
        this.inputKeys = inputKeys;
    }
}
