package com.bj58.wuxian.lucene.wordcount;

import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.junit.Test;
import org.wltea.analyzer.lucene.IKAnalyzer;

import com.bj58.wuxian.lucene.utils.RedisUtil;

public class WordCount {

	@Test
	public void wordCount() {

		try {
			Analyzer analyzer = new IKAnalyzer(true);
			TokenStream tokenStream = analyzer.tokenStream("content",
					new InputStreamReader(new FileInputStream("D:\\data\\data.txt"), "gbk"));
			CharTermAttribute charTermAttribute=tokenStream.addAttribute(CharTermAttribute.class);
			OffsetAttribute offsetAttribute=tokenStream.addAttribute(OffsetAttribute.class);
			   tokenStream.reset();
			while(tokenStream.incrementToken()){
				String name=charTermAttribute.toString();
				int startOffset=offsetAttribute.startOffset();
				int endOffset=offsetAttribute.endOffset();
				
				System.out.println("name:"+name+"  startOffset:"+startOffset+"  endOffset:"+endOffset);
				RedisUtil.incr(name);
			}
			   tokenStream.close();
		} catch (IOException e) {
			e.printStackTrace();
		}

	}
	

}
