package com.wt.springsamples.es.lucene;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.document.Document;

import org.apache.lucene.document.Field;

import java.io.IOException;
import java.nio.file.*;

/**
 * 1、获取并设置Codec(Lucene87Codec)：
 *    IndexWriterConfig config = new IndexWriterConfig(analyzer); ---其构造方法中调用Codec.getDefault()
 *
 * 2、writeField
 *IndexWriter的doFlush方法内部->...->DocumentsWriter类的doFlush方法->...->FreqProxTermsWriter类的flush方法
 *
 *FreqProxTermsWriter类的flush方法内部:
 *   ->FieldsConsumer consumer = state.segmentInfo.getCodec().postingsFormat().fieldsConsumer(state);
 *   ->consumer.write(...)  ---对应PerFieldPostingsFormat内部类FieldsWriter的write方法
 *    注：1、state.segmentInfo.getCodec()返回类型为Lucene87Codec
 *       2、state.segmentInfo.getCodec().postingsFormat()返回类型为Lucene84PostingsFormat
 *FieldsWriter的write方法内部：
 *   ->FieldsConsumer consumer = format.fieldsConsumer(group.state);  --consumer类型为BlockTreeTermsWriter
 *   ->consumer.write(maskedFields, norms);
 */
public class LuceneWriteTest {
    public static void main(String[] args) throws IOException {
        Analyzer analyzer = new StandardAnalyzer();
//        Path indexPath  = Files.createTempDirectory("index");
//        Path indexPath = Files.createFile();
        Directory directory = FSDirectory.open(Paths.get("d:\\lucene\\index"));
        IndexWriterConfig config = new IndexWriterConfig(analyzer);
        TieredMergePolicy tieredMergePolicy = new TieredMergePolicy();
        tieredMergePolicy.setSegmentsPerTier(3.0);
        config.setMergePolicy(tieredMergePolicy);
//        config.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
//        config.setUseCompoundFile(false);
        IndexWriter indexWriter = new IndexWriter(directory, config);
        long i=0;
        while(i<1024*1024*1024){
            Document doc = new Document();
            String text = "This is the texted to be indexed";
//            System.out.println("text length:"+text.getBytes().length);

            doc.add(new Field("name", text, TextField.TYPE_STORED));
            indexWriter.addDocument(doc);
            i++;
        }
//        indexWriter.deleteDocuments(new Term("name","text"));
        indexWriter.close();
        double maxRamMB = config.getRAMBufferSizeMB();
        System.out.println(maxRamMB != -1?(long)(2 * (16 * 1024 * 1024)) : Long.MAX_VALUE);
    }
}
