package com.lucene.test.service.impl;

import cn.hutool.core.io.FileUtil;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.lucene.test.common.Page;
import com.lucene.test.common.PageData;
import com.lucene.test.config.LuceneTemplate;
import com.lucene.test.exception.LuceneException;
import com.lucene.test.field.BaseIndexField;
import com.lucene.test.pojo.BaseIndexEntity;
import com.lucene.test.service.ILuceneSearch;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.Resource;
import org.apache.lucene.document.*;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import java.io.IOException;
import java.lang.reflect.Type;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;

/**
 * 搜索实现类
 * 并发读
 * 单独写
 * @author zoumy9
 * @date 2024/05/23 16:32
 **/
@Service
public class LuceneSearchImpl implements ILuceneSearch {
    private static final Logger logger = LoggerFactory.getLogger(LuceneSearchImpl.class);

    private final Lock lock = new ReentrantLock();

    final static Sort sort = new Sort(new SortField(BaseIndexField.F_ID, SortField.Type.LONG));
    final static Sort sortReverse = new Sort(new SortField(BaseIndexField.F_ID, SortField.Type.LONG, true));

    @Resource
    private LuceneTemplate luceneTemplate;
    @Value("${index.init.docFile:}")
    private String initDocFile;

    @PostConstruct
    private void init() {
        loadInit();
    }

    @Override
    public void insertUpdateIndex(BaseIndexEntity entity) {
        lock.lock();
        try {
        Document document = convert(entity);
        luceneTemplate.updateDocument(BaseIndexField.F_ID, document);
        } finally {
            lock.unlock();
        }
    }

    @Override
    public void insertBatch(Collection<? extends BaseIndexEntity> list) {
        if(CollectionUtils.isEmpty(list)) {
            logger.error("list empty");
            return;
        }
        lock.lock();
        try {
            List<Document> documents = list.stream().map(this::convert).collect(Collectors.toList());
            luceneTemplate.addDocuments(documents, false);
        } catch (Exception e) {
            logger.error("insertBatch failed", e);
        } finally {
            lock.unlock();
        }
    }

    @Override
    public void updateBatch(Collection<? extends BaseIndexEntity> list) {
        lock.lock();
        try {
            List<Document> documents = list.stream().map(this::convert).collect(Collectors.toList());
            luceneTemplate.updateDocuments(BaseIndexField.F_ID, documents, false);
        } finally {
            lock.unlock();
        }
    }

    @Override
    public PageData<List<? extends BaseIndexEntity>> pageQuery(Query query, Page page) {
        return pageQuery(query, page, false);
    }

    @Override
    public PageData<List<? extends BaseIndexEntity>> pageQuery(Query query, Page page, boolean reverse) {
        try {
            IndexSearcher searcher = luceneTemplate.getSearcher();
            TopDocs topDocs = queryScoreDocsByPerPage(page.getPages(), page.getSize(), searcher, query, reverse);
            List<BaseIndexEntity> list = parseScoreDoc(searcher, topDocs);
            int total = getMaxDocId(searcher);
            return PageData.data(page, total, list);
        } catch (Exception e) {
            logger.error("pageQuery failed", e);
            return PageData.empty(page);
        }
    }

    @Override
    public List<? extends BaseIndexEntity> queryList(Query query) {
        return queryList(query, false);
    }
    @Override
    public List<? extends BaseIndexEntity> queryList(Query query, boolean reverse) {
        try {
            IndexSearcher searcher = luceneTemplate.getSearcher();
            TopDocs topDocs = getScoreDocs(searcher, query, reverse);
            return parseScoreDoc(searcher, topDocs);
        } catch (Exception e) {
            logger.error("pageQuery failed", e);
            return Collections.emptyList();
        }
    }

    /**
     * 分页查询
     * @param page
     * @param perPage
     * @param searcher
     * @param query
     * @return
     * @throws IOException
     */
    private TopDocs queryScoreDocsByPerPage(int page, int perPage, IndexSearcher searcher, Query query) throws IOException{
        return queryScoreDocsByPerPage(page, perPage, searcher, query, false);
    }

    private TopDocs queryScoreDocsByPerPage(int page, int perPage, IndexSearcher searcher, Query query, boolean reverse) throws IOException{
        TopDocs result;
        if(query == null){
            logger.error("Query is null");
            return null;
        }
        ScoreDoc before = null;
        if(page != 1) {
            TopDocs docsBefore = searcher.search(query, (page - 1) * perPage);
            ScoreDoc[] scoreDocs = docsBefore.scoreDocs;
            if(scoreDocs.length > 0){
                before = scoreDocs[scoreDocs.length - 1];
            }
        }

        result = searcher.searchAfter(before, query, perPage, reverse ? sortReverse : sort);
        return result;
    }


    private TopDocs getScoreDocs(IndexSearcher searcher,Query query) throws IOException{
        return getScoreDocs(searcher, query, false);
    }

    private TopDocs getScoreDocs(IndexSearcher searcher,Query query, boolean reverse) throws IOException{
        return searcher.search(query, getMaxDocId(searcher), reverse ? sortReverse : sort);
    }


    /** 统计document的数量
     * @param searcher
     * @return
     */
    private int getMaxDocId(IndexSearcher searcher){
        return searcher.getIndexReader().maxDoc();
    }

    @Override
    public Document convert(BaseIndexEntity entity) {
        Document doc = new Document();
        doc.add(new StringField(BaseIndexField.F_ID, Long.toString(entity.getId()), Field.Store.YES));
        doc.add(new NumericDocValuesField(BaseIndexField.F_ID, entity.getId()));
        doc.add(new StringField(BaseIndexField.F_STATUS, entity.getStatus(), Field.Store.YES));
        doc.add(new TextField(BaseIndexField.F_TITLE, entity.getTitle(), Field.Store.YES));
        // 时间范围搜索
        doc.add(new LongPoint(BaseIndexField.F_TIME, entity.getTime()));
        // 数据落盘
        doc.add(new StoredField(BaseIndexField.F_TIME, entity.getTime()));
        return doc;
    }

    @Override
    public BaseIndexEntity getById(Long id) {
        IndexSearcher searcher = luceneTemplate.getSearcher();
        TermQuery termQuery = new TermQuery(buildTerm(id));
        try {
            TopDocs topDocs = searcher.search(termQuery, 1);
            List<BaseIndexEntity> list = parseScoreDoc(searcher, topDocs);
            if(CollectionUtils.isEmpty(list)) {
                return null;
            }
            return list.get(0);
        } catch (IOException e) {
            throw new LuceneException(e);
        }
    }

    @Override
    public void insertNoLock(Collection<? extends BaseIndexEntity> list) {
        List<Document> documents = list.stream().map(this::convert).collect(Collectors.toList());
        luceneTemplate.addDocuments(documents, false);
    }

    private BaseIndexEntity docToEntity(Document document) {
        BaseIndexEntity indexEntity = new BaseIndexEntity();
        indexEntity.setId(Long.valueOf(document.get(BaseIndexField.F_ID)));
        indexEntity.setTime(Long.valueOf(document.get(BaseIndexField.F_TIME)));
        indexEntity.setStatus(document.get(BaseIndexField.F_STATUS));
        indexEntity.setTitle(document.get(BaseIndexField.F_TITLE));
        return indexEntity;
    }

    Term buildTerm(long id) {
        return new Term(BaseIndexField.F_ID, Long.toString(id));
    }

    private void loadInit() {
        logger.info("start to loadInit, read file:{} to insert", initDocFile);
        String filePath =
                LuceneSearchImpl.class.getClassLoader().getResource(initDocFile).getPath();
        if(FileUtil.exist(filePath)) {
            String jsonArray = FileUtil.readString(filePath, Charset.defaultCharset());
            Type type = new TypeToken<List<BaseIndexEntity>>(){}.getType();
            List<BaseIndexEntity> list = new Gson().fromJson(jsonArray, type);
            List<Document> collect = list.stream().map(this::convert).collect(Collectors.toList());
            IndexWriter indexWriter = null;
            try {
                indexWriter = luceneTemplate.getIndexWriter(true);
                luceneTemplate.addDocuments(indexWriter, collect);
            } finally {
                luceneTemplate.closeIndexWriter(indexWriter);
            }
        }
    }

    private List<BaseIndexEntity> parseScoreDoc(IndexSearcher searcher, TopDocs topDocs) throws IOException {
        List<BaseIndexEntity> list = new ArrayList<>();
        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
        for(ScoreDoc scoreDoc : scoreDocs) {
            int docId = scoreDoc.doc;
            Document doc = searcher.doc(docId);
            BaseIndexEntity indexEntity = docToEntity(doc);
            list.add(indexEntity);
        }
        return list;
    }
}
