package ar.uba.dc.webming.tp1.main;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.StopAnalyzer;
import org.apache.lucene.analysis.snowball.SnowballAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.Version;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;

import ar.uba.dc.webming.tp1.scoring.SimilarityIdfModified;
import ar.uba.dc.webming.tp1.scoring.SimilarityNormCubeRoot;
import ar.uba.dc.webming.tp1.scoring.SimilarityNormOne;
import ar.uba.dc.webming.tp1.scoring.SimilarityTfCubeRoot;
import ar.uba.dc.webming.tp1.scoring.SimilarityTfLineal;

public class IndexOhsuCorpus {

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		String usage = "java ar.uba.dc.webmining.tp1.main.IndexOhsuCorpus -corpus <corpusFile> [-sim similarityNum] [-an analyzerNum]\n";
		usage += "\tAnalyzer number description:\n";
		usage += "\tanalyzerNum = 0\t\tLucene StandardAnalizer (default)\n";
		usage += "\tanalyzerNum = 1\t\tSnowballAnalizer with porter stemmer without stopwords\n";
		usage += "\tanalyzerNum = 2\t\tSnowballAnalizer with porter stemmer with english stopwords\n";
		usage += "\tSimilarity number description:\n";
		usage += "\tsimilarityNum = 0\tLucene default similarity(default)\n";
		usage += "\tsimilarityNum = 1\tLucene default similarity with idf factor modified\n";
		usage += "\tsimilarityNum = 2\tLucene default similarity with tf factor lineal\n";
		usage += "\tsimilarityNum = 3\tLucene default similarity with tf factor cube root\n";
		usage += "\tsimilarityNum = 4\tLucene default similarity without normalization length\n";
		usage += "\tsimilarityNum = 5\tLucene default similarity with normalization length using cube root\n";
		if(args.length>=1 && ("-h".equals(args[0]) || "-help".equals(args[0]))){
			System.out.println(usage);
			return;
		}
		String corpusFileName = null;
		Integer similarityNum = 0;
		Integer analyzerNum = 0;
		int i = 0;
		while(i<args.length){
			String opc = args[i];
			if("-corpus".equals(opc)){
				corpusFileName = args[i+1];
				i++;
			}
			if("-sim".equals(opc)){
				similarityNum = Integer.valueOf(args[i+1]);
				i++;
			}
			
			if("-an".equals(opc)){
				analyzerNum = Integer.valueOf(args[i+1]);
				i++;
			}
			i++;
		}
		if(corpusFileName==null){
			System.out.println(usage);
			return;
		}
		File corpusFile = new File(corpusFileName);
		File indexFile = new File(corpusFile.getParent() + File.separator + "index");
		Analyzer analyzer = null;
		Similarity similarity = null;

		if(analyzerNum == 1){
			analyzer = new SnowballAnalyzer("Porter");
		}else if(analyzerNum == 2){
			analyzer = new SnowballAnalyzer("Porter", StopAnalyzer.ENGLISH_STOP_WORDS);
		}else{
			analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
		}
		
		if(similarityNum == 1){
			similarity = new SimilarityIdfModified();
		}else if(similarityNum == 2){
			similarity = new SimilarityTfLineal();
		}else if(similarityNum == 3){
			similarity = new SimilarityTfCubeRoot();
		}else if(similarityNum == 4){
			similarity = new SimilarityNormOne();
		}else if(similarityNum == 5){
			similarity = new SimilarityNormCubeRoot();
		}else{
			similarity = new DefaultSimilarity();
		}
		try {
			IndexWriter writer = new IndexWriter(
					FSDirectory.open(indexFile), 
					analyzer,
					true, 
					IndexWriter.MaxFieldLength.LIMITED);
			writer.setSimilarity(similarity);
			System.out.println("Indexing " + corpusFile.getName() + ". Index files at: " + indexFile.getAbsolutePath());
			BufferedReader corpusReader =  new BufferedReader(new FileReader(corpusFile));
			indexOshuCorpus(writer, corpusReader);
			System.out.println("Indexing finished.");
			System.out.println("Optimizing...");
			writer.optimize();
			writer.close();
			System.out.println("Done");
		} catch (CorruptIndexException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (LockObtainFailedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	public static void indexOshuCorpus(IndexWriter writer, BufferedReader corpusReader) throws IOException{
	      try {
	          String line = ""; //not declared within while loop
	          assert(corpusReader.readLine().contains(".I"));
	          boolean flag = true;
	          while (flag){        	  
	        	  flag = createOshuDoc(writer, corpusReader);
	          }
	        }
	        finally {
	        	corpusReader.close();
	        }
	}
	
	
	
	public static boolean createOshuDoc(IndexWriter writer, BufferedReader corpusReader)throws IOException{
		String line = null;
		Document doc = new Document();
		while(( line = corpusReader.readLine()) != null && !line.contains(".I") ){
			String value = corpusReader.readLine();
			doc.add(new Field(line, value, Field.Store.YES, Field.Index.ANALYZED));
		}
		writer.addDocument(doc);
		return line!=null;
	}

}
