package langnstats.project.languagemodel.loglinear;

///////////////////////////////////////////////////////////////////////////////
// Copyright (C) 2001 Chieu Hai Leong and Jason Baldridge
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
//////////////////////////////////////////////////////////////////////////////   

import opennlp.maxent.*;
import opennlp.maxent.io.*;
import java.io.*;

import langnstats.project.ParserTools.StanfordParserHandel;
import langnstats.project.lib.LanguageModel;
import langnstats.project.lib.WordType;
import langnstats.project.lib.crossvalidation.CrossValidationResult;
import langnstats.project.lib.crossvalidation.CrossValidationResultItem;
import langnstats.project.lib.crossvalidation.CrossValidationTokenSet;
import langnstats.project.lib.crossvalidation.TrainTokens;
import langnstats.project.tools.Logdouble;
/**
 * Main class which calls the GIS procedure after building the EventStream
 * from the data.
 *
 * @author  Chieu Hai Leong and Jason Baldridge
 * @version $Revision: 1.5 $, $Date: 2005/10/24 12:29:20 $
 */
public class LogLinear implements LanguageModel{

    // some parameters if you want to play around with the smoothing option
    // for model training.  This can improve model accuracy, though training
    // will potentially take longer and use more memory.  Model size will also
    // be larger.  Initial testing indicates improvements for models built on
    // small data sets and few outcomes, but performance degradation for those
    // with large data sets and lots of outcomes.
    public static boolean USE_SMOOTHING = false;
    public static double SMOOTHING_OBSERVATION = 0.1;
  
    public LanguageModel clone(){return new LogLinear(this);}
    
    public LogLinear(){}
    public LogLinear(LogLinear c){}
    
    public void prepare(WordType[] allWordtype) {}
    public double [] predict(WordType wordType){return Predict.predict(wordType);}
    public String getDescription(){return null;}
    
    public void train(TrainTokens trainTokens) {
//    			Process process=null;
//    			String commandLine=null;
    			StanfordParserHandel parserHandel = new StanfordParserHandel();
    			parserHandel.initializeParser();
    			
    			WordType[] tt = trainTokens.getTokenArray();
    			String modelFileName ="ModelFile";
    			StringWriter strWriter = null;
    
//	      try {	          
	            // input/output file names
//	            String inputFileName  = trainFile;
//	            String outputFileName = "FeatureFile";

	            // Create FileReader Object
//	            FileReader inputFileReader   = new FileReader(inputFileName);
//	            FileWriter outputFileReader  = new FileWriter(outputFileName);
	            
	            // Create Buffered/PrintWriter Objects
	//            BufferedReader inputStream   = new BufferedReader(inputFileReader);
	            strWriter = new StringWriter();
	            PrintWriter    outputStream  = new PrintWriter(strWriter);
	            
	            WordType curr = null;
	            WordType prev = null;
	            WordType temp = null;
	            String featureStr="";
//	            Runtime runtime = Runtime.getRuntime();
//									String actualCommand = "";
//									try {
			/*							  actualCommand = commandLine; 
										  process = runtime.exec(actualCommand);
	             */  
										  for(int i=0;i<tt.length;i++) {	 
	            		     curr=tt[i];
											  		 temp=curr;
	            	//System.out.println(inLine);
	            		     if(i==0){
	            			         prev=curr;   // prev is the context string
	            			         continue;
	            							             }	            				
		
//		       									PrintWriter procOutput=new PrintWriter(new OutputStreamWriter(process.getOutputStream()));
	//	       									procOutput.println(prev);
		       									
	//	       							   BufferedReader procInput=new BufferedReader(new InputStreamReader(process.getInputStream()));
	//	       							   prev=prev+" "+procInput.readLine();
		       									
	            		     							//////////////////////////////////////////////////////////////////
	            		     ///// This is where you should throw in features/////////
	            		     ///// Please remember to add spaces between features/////

	            		     	featureStr	+=		parserHandel.getParseFeature(prev.getName());
	            		     	featureStr	+=		" "+parserHandel.getHistoryLengthFeature();

	            		     							//////////////////////////////////////////////////////////////////
	            		     	
	            		     	
		       									featureStr+=" "+prev.getName()+" "+curr.getName();
//		       									System.out.println(featureStr);
		       									//			    			            	System.out.println(inLine);
		       									outputStream.println(featureStr);
		       									featureStr="";
		       									prev=temp;
		            					    	}
		        							 
		    								
/*		    							} catch (IOException e) {
		    								System.err.println("Training Failed! The command line: " + actualCommand);
		    								e.printStackTrace();
		    							} 
	*/	    							

		    					outputStream.close();
//		    					inputStream.close();

/*	        	} catch (IOException e) {

	            System.out.println("IOException:");
	            e.printStackTrace();

	        	}
*/
      try {
    	//FileReader datafr = new FileReader(new File("FeatureFile"));
    	  StringReader datafr = new StringReader(strWriter.toString());


        EventStream es = 
          new BasicEventStream(new PlainTextByLineDataStream(datafr));
        GIS.SMOOTHING_OBSERVATION = SMOOTHING_OBSERVATION;
        GISModel model = GIS.trainModel(es,USE_SMOOTHING);
        
        File outputFile = new File(modelFileName);
        GISModelWriter writer =
          new SuffixSensitiveGISModelWriter(model, outputFile);
        writer.persist();
      } catch (Exception e) {
        System.out.print("Unable to create model due to exception: ");
        System.out.println(e);
      }
    }

}
