package com.flute.framework.index;

import java.io.File;
import java.io.IOException;
import java.util.Iterator;

import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;

import com.flute.framework.Flute;
import com.flute.framework.LuceneDocumentBuilder;
import com.flute.framework.data.mysql.MysqlIndexData;
import com.flute.framework.exception.ConfigurationParseException;
import com.flute.framework.exception.IndexException;
import com.flute.framework.index.analysis.PayloadTokenizer;
import com.flute.framework.index.analysis.SpliterAnalyzer;
import com.flute.framework.index.fieldbuilder.ScoreField;
import com.flute.framework.search.LayerPayLoadFunction;
import com.flute.framework.search.LayerPayloadTermQuery;
import com.flute.framework.search.LayerSimilarity;
import com.flute.framework.search.sort.ScoreUtil;
import com.flute.framework.spliter.WordSpliter;
import com.flute.tools.data.DataIterator;
import com.flute.tools.data.DataRecord;
import com.flute.tools.util.PinyinUtil;
import com.flute.tools.util.StringUtil;

public class RawLuceneTester {
	private static int count = 0;

	public static void main(String[] args) throws CorruptIndexException,
			LockObtainFailedException, IOException, InterruptedException,
			IndexException {
		IndexWriter writer = new IndexWriter(FSDirectory.open(new File(
				"search/E")), new SpliterAnalyzer(), true,
				IndexWriter.MaxFieldLength.LIMITED);
		writer.setMergeFactor(10000);
		writer.setMaxBufferedDocs(10000);

		DataIterator<DataRecord> it = new MysqlIndexData()
				.getCurrentDataRecords(true);
		long s = System.currentTimeMillis();
		int i = 0;
		LuceneDocumentBuilder builder = getBuilder();
		while (it.hasNext()) {
			writer.addDocument(getDocumetByBuilder(builder, it.next()));
			// writer.addDocument(getDocument(it.next()));
			System.out.println(i++);
		}

		// for (int i = 0; i < 10; i++) {
		// Thread th = new Thread(getRunnable(it, writer));
		// th.start();
		// }

		writer.optimize();
		writer.close();
		System.out.println("time:" + (System.currentTimeMillis() - s));

		search();
	}

	private static void search() {
		try {
			IndexSearcher searcher = new IndexSearcher(
					FSDirectory.open(new File("search/E")));
			searcher.setSimilarity(new LayerSimilarity());
			TopDocs tds = searcher
					.search(new LayerPayloadTermQuery(new Term("songname_fc",
							"you"), new LayerPayLoadFunction(), "0"), 10);

			for (int i = 0; i < tds.scoreDocs.length; i++) {
				ScoreDoc sd = tds.scoreDocs[i];
				System.out.println(searcher.doc(sd.doc).getField("song_name")
						.stringValue()
						+ " score:" + sd.score);
			}
		} catch (CorruptIndexException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@SuppressWarnings("unused")
	private static Runnable getRunnable(final Iterator<DataRecord> it,
			final IndexWriter writer) {
		return new Runnable() {

			@Override
			public void run() {
				while (it.hasNext()) {
					try {
						writer.addDocument(getDocument(it.next()));
						System.out.println(count++);
					} catch (CorruptIndexException e) {
						e.printStackTrace();
					} catch (IOException e) {
						e.printStackTrace();
					}
				}
			}

		};
	}

	private static Document getDocumetByBuilder(LuceneDocumentBuilder builder,
			DataRecord dr) {
		return (Document) builder.buildDocument(dr);
	}

	private static LuceneDocumentBuilder getBuilder() {
		try {
			return (LuceneDocumentBuilder) ((LuceneIndexer) Flute.getInstance()
					.getIndexer("songIndex").getIndexer()).getBuilder();
		} catch (ConfigurationParseException e) {
			e.printStackTrace();
		}

		return null;
	}

	private static Document getDocument(DataRecord dr) {
		Document doc = new Document();
		String name = dr.getFieldValue("song_name");
		String lyric = dr.getFieldValue("lyric");
		String singer = dr.getFieldValue("singer_name");
		String album = dr.getFieldValue("album_name");
		name = StringUtil.getStandardString(name);
		lyric = StringUtil.getStandardString(lyric);
		singer = StringUtil.getStandardString(singer);
		album = StringUtil.getStandardString(album);

		doc.add(new Field("song_same", getSameTokenizer(name)));
		doc.add(new Field("song_split", getSplitTokenizer(name)));
		doc.add(new Field("song_pinyin", getPinyinTokenizer(name)));
		doc.add(new Field("song_header", getHeaderTokenizer(name)));
		doc.add(new Field("song_reverse", getSameTokenizer(StringUtil
				.removeSpecialChars(name))));
		doc.add(new Field("song_charsame", getHeaderTokenizer(StringUtil
				.reverseString(name))));

		doc.add(new Field("singer_same", getSameTokenizer(singer)));
		doc.add(new Field("singer_split", getSplitTokenizer(singer)));
		doc.add(new Field("singer_pinyin", getPinyinTokenizer(singer)));
		doc.add(new Field("singer_header", getHeaderTokenizer(singer)));
		doc.add(new Field("singer_reverse", getSameTokenizer(StringUtil
				.removeSpecialChars(singer))));
		doc.add(new Field("singer_charsame", getHeaderTokenizer(StringUtil
				.reverseString(singer))));

		doc.add(new Field("album_same", getSameTokenizer(album)));
		doc.add(new Field("album_split", getSplitTokenizer(album)));
		doc.add(new Field("album_pinyin", getPinyinTokenizer(album)));
		doc.add(new Field("album_header", getHeaderTokenizer(album)));
		doc.add(new Field("album_reverse", getSameTokenizer(StringUtil
				.removeSpecialChars(album))));
		doc.add(new Field("album_charsame", getHeaderTokenizer(StringUtil
				.reverseString(album))));

		doc.add(new Field("lyric", lyric, Field.Store.YES, Field.Index.NO));
		doc.add(new Field("song", name, Field.Store.YES, Field.Index.NO));
		doc.add(new Field("singer", singer, Field.Store.YES, Field.Index.NO));
		doc.add(new Field("album", album, Field.Store.YES, Field.Index.NO));

		return doc;
	}

	private static PayloadTokenizer getSameTokenizer(String key) {
		ScoreField sf = new ScoreField();
		sf.addString(key);
		sf.addScore(key, ScoreField.DEFAULT_SCORE, 100);
		return new PayloadTokenizer(sf);
	}

	private static PayloadTokenizer getSplitTokenizer(String key) {
		ScoreField sf = new ScoreField();
		for (String s : WordSpliter.getInstance().split(key)) {
			sf.addString(s);
			sf.addScore(s, ScoreField.DEFAULT_SCORE, 100);
		}
		return new PayloadTokenizer(sf);
	}

	private static PayloadTokenizer getPinyinTokenizer(String key) {
		ScoreField sf = new ScoreField();
		for (String s : PinyinUtil.getPinyinStrings(key)) {
			sf.addString(s);
			sf.addScore(s, ScoreField.DEFAULT_SCORE,
					ScoreUtil.getWordDefaultScore(key, s) * 5000);
		}
		return new PayloadTokenizer(sf);
	}

	private static PayloadTokenizer getHeaderTokenizer(String key) {
		ScoreField sf = new ScoreField();
		for (String s : PinyinUtil.getPinyinHeaders(key)) {
			sf.addString(s);
			sf.addScore(s, ScoreField.DEFAULT_SCORE, 100);
		}
		return new PayloadTokenizer(sf);
	}
}
