package com.gome.han.lucene.analyzer;

import java.io.IOException;
import java.io.StringReader;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.util.Version;


/**
* @author hanpeng
* @version 创建时间：2021年2月1日 下午4:41:47
* @ClassName 类名称
* @Description 类描述
*                   some text goes here
*                   
*     some          token start offset: 0 
*                   token end offset: 4 (不包含)
*     text          token start offset: 5
                    token end offset: 9(不包含)
      goes          token start offset: 10
                    token end offset: 14
      here          token start offset: 15
                    token end offset: 19
*                    
*/
public class InvokingAnalyzer {

	public static void main(String[] args) throws IOException {
		 //Version matchVersion = Version.LUCENE_8_7_0; // Substitute desired Lucene version for XY
		 StandardAnalyzer analyzer = new StandardAnalyzer(); // or any other analyzer
	     TokenStream ts = analyzer.tokenStream("myfield", new StringReader("some text goes here a"));
	     //StandardAnalyzer.
	     // The Analyzer class will construct the Tokenizer, TokenFilter(s), and CharFilter(s),
	     //   and pass the resulting Reader to the Tokenizer.
	     OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
	     
	     try {
	       ts.reset(); // Resets this stream to the beginning. (Required)
	       while (ts.incrementToken()) {
	         // Use AttributeSource.reflectAsString(boolean)
	         // for token stream debugging.
	         System.out.println("token: " + ts.reflectAsString(true));
	 
	         System.out.println("token start offset: " + offsetAtt.startOffset());
	         System.out.println("token end offset: " + offsetAtt.endOffset());
	       }
	       ts.end();   // Perform end-of-stream operations, e.g. set the final offset.
	     } finally {
	       ts.close(); // Release resources associated with this stream.
	       analyzer.close();
	     }
	}
	

}
