/** Build a file-based Lucene inverted index.
 * 
 * @author Scott Sanner
 */

package search;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.snowball.SnowballAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.en.EnglishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.Version;


public class FileIndexBuilder {

	public Analyzer  _analyzer; 
	public String    _indexPath;
	public final static String IGNORE_WORDS_ABS = "./src/nlp/nicta/filters/stopwords.txt";
	
	public CharArraySet stopWord() throws IOException {
        String line;
        List<String> stopWord = new ArrayList<String>();
        BufferedReader br = new BufferedReader(new FileReader(IGNORE_WORDS_ABS));
        while ((line = br.readLine()) != null) {
        	stopWord.add(line.trim().toLowerCase());
        }
        br.close();
        
        String[] stopWordArray = new String[stopWord.size()];
        stopWord.toArray(stopWordArray);
        
        return StopFilter.makeStopSet(Version.LUCENE_42, stopWordArray);

	}
	
	public FileIndexBuilder(String index_path) throws IOException {
		
//	    _analyzer = new SnowballAnalyzer(Version.LUCENE_42, "English");
//	    _analyzer = new StandardAnalyzer(Version.LUCENE_42, stopWord());
		_analyzer = new EnglishAnalyzer(Version.LUCENE_42, stopWord());
	    // Store the index path
	    _indexPath = index_path;
	}
		
	/** Main procedure for adding files to the index
	 * 
	 * @param files
	 * @param clear_old_index set to true to create a new index, or
	 *                        false to add to a currently existing index
	 * @return
	 */
	public boolean addFiles(List<File> files, boolean clear_old_index) {
	
		try {
		    // The boolean arg in the IndexWriter ctor means to
		    // create a new index, overwriting any existing index
			//
			// NOTES: Set create=false to add to an index (even while
			//        searchers and readers are accessing it... additional
			//        content goes into separate segments).
			//
			//        To merge can use:
			//        IndexWriter.addIndexes(IndexReader[]) and 
			//        IndexWriter.addIndexes(Directory[])
			//
			//        Index is optimized on optimize() or close()
			File indexDir = new File(_indexPath);
			Directory index = new SimpleFSDirectory(indexDir);
			IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_42, _analyzer);
			config.setOpenMode(OpenMode.CREATE);
		    IndexWriter w = new IndexWriter(index, config);
		    
		    // Add all files
		    for (File f : files) {
		    	DocAdder.AddDoc(w, f);
		    }
		    
		    // Close index writer
		    w.close();
		    
		} catch (IOException e) {
			System.err.println(e);
			return false;
		}
		
		return true;
	}
	
	public static void main(String[] args) throws Exception {
		
		String index_path = "lucene.index";
		FileIndexBuilder b = new FileIndexBuilder(index_path);
		b.addFiles(FileFinder.GetAllFiles("data/test_lucene", ".txt", true), 
				true /*clear_old_index = false if adding*/);
		
		IndexDisplay.Display(index_path, System.out);
	}

}
