package com.lhczf.lucenedb.consumer;

import com.lhczf.lucenedb.bean.DataWrap;
import com.lhczf.lucenedb.bean.SystemConfig;
import com.lhczf.lucenedb.production.AbstractDataQueues;
import com.lhczf.lucenedb.service.LuceneDbServer;
import com.lhczf.lucenedb.util.BeanUtil;
import com.lhczf.lucenedb.util.FileUtil;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.IndexWriter;

import java.io.File;
import java.io.IOException;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;

/**
 * @author: 刘华春
 * @date: 2019/2/6
 */
@Slf4j
public abstract class AbstractDataConsumer<T> extends AbstractDataQueues implements Runnable {

    @Setter
    private int id = -1;
    private IndexWriter indexWriter;
    /**
     * 存储每个索引使用的DirectoryTaxonomyWriter对象，Key为索引的名字
     */
    static final Map<String, DirectoryTaxonomyWriter> TAXONOMY_WRITER_MAP = new HashMap<>(16);
    private static final Map<String, Map<Integer, String>> CURRENT_DATA_DIR_MAP = new HashMap<>(16);
    private static final int SPEED_UNIT = 1000;

    /**
     * 保存当前线程的配置信息
     */
    @Getter
    @Setter
    protected Map<String, Object> indexConfig;

    @Getter
    private WrapUncaughtExceptionHandler warpExceptionHandler = new WrapUncaughtExceptionHandler();

    /**
     * 创建一个当前类的对象
     *
     * @return this
     */
    abstract AbstractDataConsumer<T> createDataConsumer();

    /**
     * 是否要给当前的消费线程设置taxWriter;
     *
     * @return true or false
     */
    public abstract boolean hasConfigTaxoWriter();

    /**
     * 设置当前消费则消费的是那个索引
     *
     * @return 索引名称
     */
    public abstract String configIndexName();

    /**
     * 从队列中获取一条数据
     *
     * @return DataWrap对象
     */
    abstract DataWrap takeDataWrap();

    /**
     * 设置当前消费者所需要的TaxoIndexWriter。
     *
     * @param taxonomyWriter DirectoryTaxonomyWriter对象
     */
    public abstract void configTaxoIndexWriter(DirectoryTaxonomyWriter taxonomyWriter);

    /**
     * * 当线程运行过程中出现未捕获异常时，当前线程是否自我修复，自动启动一个线程
     *
     * @return true 表示启动自我修复， false 关闭此功能
     */
    public abstract boolean autoRecover();

    /**
     * 运行时的线程数配置
     *
     * @return 当前线程运行时的线程数
     */
    public abstract int runtimeThreadNum();

    DataWrap getDataWrapFromQueue(BlockingQueue blockingQueue) {
        try {
            return (DataWrap) blockingQueue.poll(100, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e) {
            log.info("接受到一个中断信号。");
            Thread.currentThread().interrupt();
        }
        return null;
    }

    @Override
    public void run() {
        log.info("【{}】消费者线程已经启动。", configIndexName());
        String currentStr = LocalDateTime.now().format(SystemConfig.Y_M_D_DTF);
        LuceneDbServer server = BeanUtil.getInstance().getBean(LuceneDbServer.class);
        Map<Integer, String> threadParams = CURRENT_DATA_DIR_MAP.computeIfAbsent(configIndexName(), k -> new HashMap<>(16));
        threadParams.put(id, currentStr);

        FacetsConfig facetsConfig = new FacetsConfig();
        List<Document> documents = new ArrayList<>();
        SystemConfig systemConfig = BeanUtil.getInstance().getBean(SystemConfig.class);
        DirectoryTaxonomyWriter currentTaxoWriter;

        for (; ; ) {
            DataWrap dataWrap = takeDataWrap();
            changedIndexDir();
            if (dataWrap == null || dataWrap.getData().isEmpty()) {
                continue;
            }
            if (indexConfig == null) {
                String configPath = dataWrap.getConfig();
                log.info("当前线程的配置文件为：{}", configPath);
                indexConfig = server.mappingConfigInfo(configPath);
                this.indexWriter = initIndexWriter(server, systemConfig);
                String indexDataDir = server.findIndexPath(this.indexWriter.getDirectory());
                log.info("当前线程的索引目录为：{}", indexDataDir);
            }
            currentTaxoWriter = TAXONOMY_WRITER_MAP.get(configIndexName());
            int total = dataWrap.getData().size();
            LocalDateTime start = LocalDateTime.now();
            List datas = dataWrap.getData();

            //这里可能还有其它类型的数据需要处理，后续就在这里增加判断
            if (dataWrap.getType().equals(SystemConfig.LOCAL_FILE_DATA)) {
                dealLocalFileData(currentTaxoWriter, server, facetsConfig, documents, datas);
            }
            LocalDateTime end = LocalDateTime.now();
            boolean debug = systemConfig.isPerformanceDebug();
            if (debug) {
                Duration duration = Duration.between(start, end);
                long times = duration.toMillis();
                total = documents.size();
                long speed = getSpeed(total, times);
                log.info("创建【{}】个文档花费了【{}】毫秒。创建速率是：【{}】个每秒", total, duration.toMillis(), speed);
            }
            try {
                log.debug("正在向目录中写索引数据：{}", server.findIndexPath(indexWriter.getDirectory()));
                indexWriter.addDocuments(documents);
                indexWriter.commit();
                documents.clear();
            } catch (IOException e) {
                log.error("");
            }
            if (debug) {
                LocalDateTime commit = LocalDateTime.now();
                Duration duration = Duration.between(end, commit);
                long times = duration.toMillis();
                long speed = getSpeed(total, times);
                log.info("【{}】个文档写入磁盘花费了【{}】毫秒。速率是：【{}】个每秒", total, times, speed);
            }
        }
    }

    private void changedIndexDir() {
        String currentStr = LocalDateTime.now().format(SystemConfig.Y_M_D_DTF);
        SystemConfig systemConfig = BeanUtil.getInstance().getBean(SystemConfig.class);
        Map<Integer, String> daysString = CURRENT_DATA_DIR_MAP.get(configIndexName());
        if (!currentStr.equals(daysString.get(id))) {
            LuceneDbServer server = BeanUtil.getInstance().getBean(LuceneDbServer.class);
            String oldDir = server.findIndexPath(indexWriter.getDirectory());
            server.closeWriter(indexWriter);
            BlockingQueue blockingQueue = DATA_QUEUE.computeIfAbsent(MERGE_DATA_QUEUE_KEY, k -> new LinkedBlockingQueue());
            //将当前线程切换目录前的数据目录封装为DataWrap对象存入MERGE_DATA_QUEUE_KEY的队列里
            //待合并线程消费者消费
            try {
                blockingQueue.put(new DataWrap(configIndexName(), oldDir));
            } catch (InterruptedException e) {
                log.error("", e);
                Thread.currentThread().interrupt();
            }
            indexWriter = initIndexWriter(server, systemConfig);
            daysString.put(id, currentStr);
            log.info("索引目录发生切换, 原目录：{}， 现在的目录是：{}", oldDir, server.findIndexPath(indexWriter.getDirectory()));
        }
    }

    @Override
    protected void dataQueueConfig() {
        log.info("the methond is empty");
    }

    private IndexWriter initIndexWriter(LuceneDbServer server, SystemConfig systemConfig) {
        DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd");
        String todayStr = LocalDateTime.now().format(formatter);

        String root = systemConfig.getIndexData();
        StringBuilder sb = new StringBuilder(root);
        sb.append(File.separator).append(SystemConfig.INDEX_DAYS_DIR)
                .append(File.separator).append(configIndexName()).append(File.separator)
                .append(todayStr).append(File.separator).append(id);
        File file = new File(sb.toString());
        FileUtil.createDir(file);
        return server.createIndexWriter(file.getAbsolutePath());
    }

    private void dealLocalFileData(DirectoryTaxonomyWriter currentTaxoWriter, LuceneDbServer server,
                                   FacetsConfig facetsConfig, List<Document> documents, List<String> datas) {
        for (String data : datas) {
            List<Document> docs = server.dealDataByJson(data, indexConfig);
            File file = new File(data);
            FileUtil.safeDeleteFile(file);

            if (docs == null || docs.isEmpty()) {
                continue;
            }
            for (Document doc : docs) {
                Document facetsDoc = null;
                try {
                    facetsDoc = facetsConfig.build(currentTaxoWriter, doc);
                } catch (IOException e) {
                    log.error("", e);
                }
                if (facetsDoc != null) {
                    documents.add(facetsDoc);
                }
            }
        }
    }

    private long getSpeed(int total, long times) {
        long speed;
        if (times / SPEED_UNIT == 0) {
            speed = total;
        } else {
            speed = total / (times / SPEED_UNIT);
        }
        return speed;
    }

    private class WrapUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {

        @Override
        public void uncaughtException(Thread t, Throwable e) {
            try {
                indexWriter.close();
            } catch (IOException e1) {
                log.error("", e);
            }
            log.error("线程在运行过程中产生了未捕获的异常。 ", e);
            if (autoRecover()) {
                AbstractDataConsumer<T> dataConsumer = createDataConsumer();
                if (dataConsumer == null) {
                    return;
                }
                dataConsumer.setId(id);
                ThreadFactory factory = new WrapThreadFactory(t.getName());
                factory.newThread(dataConsumer).start();
            } else {
                log.info("当前线程的自我修复开关为：{}", autoRecover());
            }
        }
    }
}
