
package com.ordern.classification.classifiers.branched;

/*
 Some requirements and TODOs for business rule class as well as applet:

 - the user should be able to type in a text and the system will classify
 the text.

 One way of doing this is to have to TextAreas:
 * one for spam messages
 * one for no spams


 - there should be at least two classes (e.g. spam/no spam).

 - the user should be able to define his/her own instances, including
 defining own classes. I.e. just the classes "0", "1" will not do
 (at least not in a more elaborated version). 

 - there should be a possibility to select different examples
 (probably via the <PARAMETER> tag in the applet HTML page).
 This is mainly for showing some different applications. (There are more
 applications than spam classification!)

 - I must find at least one example besides spam/no spam which are easy to do

 BUGS:
 - a boring bug is that some classifiers don't work if the test string
 contains a word not included in the original classifier. Which is not
 that strange if one think about it; but still.
 SOLUTION: put all words in the original model into a a Hash/Set and
 for the test string just check the words contained in the original model.
 But then comes the problem of deciding which class to choose if there is
 no original word in the test string!

 Is there any way of getting the default class from a classifier?

 FIX: I just test the words (model words) that's in the modeling 
 classification.

 */

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import weka.classifiers.Classifier;
import weka.classifiers.Evaluation;
import weka.core.Attribute;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.SelectedTag;
import weka.core.SerializationHelper;
import weka.core.SparseInstance;
import weka.core.converters.ArffSaver;
import weka.filters.unsupervised.attribute.StringToWordVector;

import com.ordern.classification.ContestDriver;
import com.ordern.classification.CustomClassifier;
import com.ordern.classification.Domain;
import com.ordern.classification.DomainType;
import com.ordern.classification.Page;
import com.ordern.classification.TestDataDriver;

public class WekaDCClassifier implements CustomClassifier, Serializable {

	private static final String STRING_VECTOR_MODEL =
			"./serialized/bodyToStringVector.model";
	private static final String BODY_INSTANCES_MODEL =
			"./serialized/bodyInstances.model";
	private static final String BODYCLASSIFIER_MODEL =
			"./serialized/bodyclassifier.model";
	private static final String TESTDIR =
			"/Users/manishmaheshwari/Projects/DataContest/testdata";
	// private static final String TEST_OUTPUT = "results-1-1000.txt";
	// private static final String TEST_INPUT = "input-1-1000.txt";
	private static final String TEST_INPUT = "summer2011datacontest-testdata.txt";
	private static final String TEST_OUTPUT =
			"summer2011datacontest-targetresults.txt";
	

	private static final String BOOSTER_DIR =
			"/Users/manishmaheshwari/Projects/DataContest/testdata/boosterDomains";
	public static boolean SERIALIZE_AND_STOP = false;

	// private static final String BOOSTER_DIR = "";

	// private static final String TEST_OUTPUT = "results-err.txt";
	// private static final String TEST_INPUT = "input-err.txt";

	/*
	 * "weka.classifiers.bayes.NaiveBayes"; "weka.classifiers.lazy.IBk";
	 * "weka.classifiers.functions.SMO"; "weka.classifiers.trees.J48";
	 * "weka.classifiers.trees.Id3"; "weka.classifiers.trees.RandomForest"
	 */
	private static final String CLASSIFIER_ALGO =
			"weka.classifiers.functions.SMO";
	// maybe this should be settable?
	private String delimitersStringToWordVector = "\\s.,:'\\\"()?!";

	/**
	 * Classifier instances, will be initialized in constructor.
	 */

	Classifier bodyClassifier = null;

	private Attribute bodyClassAttribute = null;
	private Attribute bodyTextAttribute = null;

	Instances bodyInstances = null;

	StringToWordVector bodyStringToWordVectorFilter;


	// private Instances metaDataFiltered = null;
	// private Instances titleFiltered = null;
	// private Instances bodyFiltered = null;

	public WekaDCClassifier ( String classifierType ) {
		System.out.println( "Initializing DCClassifier" );
		try {
			this.bodyClassifier = Classifier.forName( classifierType, null );
		}
		catch ( Exception e ) {
			e.printStackTrace();
		}
	}


	/**
	 * CustomeClassifier contract.
	 */
	public void train ( List<Domain> boosterDomains, List<Domain> domains ) {
		for ( Domain domain : domains ) {
			if ( domain.getDomainType().equals( DomainType.NOT_DETERMINED ) ) {
				System.err.println( "ASSERT FAIL" );
				System.exit( 1 );
			}
		}
		try {
			initialize( domains.size() + boosterDomains.size() );
			System.out.println( "\nPopulating Training data ..." );
			for ( Domain domain : domains ) {
				System.out
						.format(
								"%nDomain Name: %s, %n    Domain Metadata: %s%n    Domain Title: %s%n    Domain Classification: %s"
										+ "%n    Domain Body: %s", domain.getName(), domain
										.getMetaContent(), domain.getTitle(), domain
										.getDomainType(), domain.getBody() );
				populateMetaInfo( domain );
				populateTitleInfo( domain );
				populateBodyInfo( domain );

			}

			System.out.println( "\nPopulating Booster data ..." );
			for ( Domain domain : boosterDomains ) {
				System.out
						.format(
								"%nBooster Domain Name: %s, %n    Domain Metadata: %s%n    Domain Title: %s%n    Domain Classification: %s"
										+ "%n    Domain Body: %s", domain.getName(), domain
										.getMetaContent(), domain.getTitle(), domain
										.getDomainType(), domain.getBody() );
				populateMetaInfo( domain );
				populateTitleInfo( domain );
				populateBodyInfo( domain );

			}

			System.out
					.println( "\nFiltering on training data, and building classifiers ..." );
			long start = System.currentTimeMillis();
			filterAndBuildBodyClassifier();
			System.out.println( "Done with Training ..." );
			if ( SERIALIZE_AND_STOP ) {
				SerializationHelper.write( BODYCLASSIFIER_MODEL, this.bodyClassifier );
				SerializationHelper.write( BODY_INSTANCES_MODEL, this.bodyInstances );
				SerializationHelper.write( STRING_VECTOR_MODEL,
						this.bodyStringToWordVectorFilter );
				long end = System.currentTimeMillis();
				System.out.println( "Building Time: " + (end - start) );
				System.out.println( "Done with Persisting ..." );

				System.exit( 1 );
			}
		}
		catch ( Exception e ) {
			e.printStackTrace();
		}

	}


	private void saveAsArff ( Instances instances ) throws IOException {
		ArffSaver saver = new ArffSaver();
		saver.setInstances( instances );
		saver.setFile( new File( "manish.arff" ) );
		saver.writeBatch();
	}


	private void populateMetaInfo ( Domain domain ) {
		if ( null != domain.getMetaContent()
				&& domain.getMetaContent().length() > 0 ) {
			Instance inst = new Instance( 2 );
			inst.setValue( this.bodyTextAttribute, domain.getMetaContent() );
			inst.setValue( this.bodyClassAttribute, domain.getDomainType().name() );
			this.bodyInstances.add( inst );
		}
	}


	private void populateTitleInfo ( Domain domain ) {
		if ( null != domain.getTitle() && domain.getTitle().length() > 0 ) {
			Instance inst = new Instance( 2 );
			inst.setValue( this.bodyTextAttribute, domain.getTitle() );
			inst.setValue( this.bodyClassAttribute, domain.getDomainType().name() );
			this.bodyInstances.add( inst );
		}
	}


	private void populateBodyInfo ( Domain domain ) {
		String domainType = domain.getDomainType().name();
		if ( null != domain.getBody() && domain.getBody().length() > 0 ) {
			Instance inst = new Instance( 2 );
			inst.setValue( this.bodyTextAttribute, domain.getBody() );
			inst.setValue( this.bodyClassAttribute, domainType );
			this.bodyInstances.add( inst );
		}
		/*
		 * Augmented body classifier training on child pages.
		 */
		List<Page> pages = domain.getChildPages();
		for ( Page page : pages ) {
			if ( null != page.getBody() && page.getBody().length() > 0 ) {
				Instance inst = new Instance( 2 );
				inst.setValue( this.bodyTextAttribute, page.getBody() );
				inst.setValue( this.bodyClassAttribute, domainType );
				this.bodyInstances.add( inst );
			}
		}
	}


	private void filterAndBuildBodyClassifier () throws Exception {
		this.bodyStringToWordVectorFilter = newStringToWordVector();
		this.bodyStringToWordVectorFilter.setInputFormat( this.bodyInstances );
		Instances bodyDataFiltered =
				weka.filters.Filter.useFilter( this.bodyInstances,
						this.bodyStringToWordVectorFilter );
		saveAsArff( bodyDataFiltered );

		this.bodyClassifier.buildClassifier( bodyDataFiltered );
	}


	/**
	 * Factory method to create a new String to Word Vector.
	 * 
	 * @param theseInstances
	 * @return
	 * @throws Exception
	 */
	private static StringToWordVector newStringToWordVector () throws Exception {
		int wordsToKeep = 200000;
		StringToWordVector filter = new StringToWordVector( wordsToKeep );
		// we ignore this for now...
		// filter.setDelimiters(delimitersStringToWordVector);
		filter.setStemmer( new SnowballStemmerAdapter() );
		// filter.setStopwords( new File( "serialized/stopwords.txt" ) );
		// if ( !filter.getUseStoplist() ) {
		// System.out.println( "Assertion failed. Stopword file was not found." );
		// System.exit( 1 );
		// }
		// filter.setUseStoplist( true );
		filter.setOutputWordCounts( true );
		filter.setSelectedRange( "1" );
		filter.setPeriodicPruning( 0.1 );
		filter.setLowerCaseTokens( true );
		filter
				.setNormalizeDocLength( new SelectedTag(
						StringToWordVector.FILTER_NORMALIZE_ALL,
						StringToWordVector.TAGS_FILTER ) );
		
		// filter.setIDFTransform( true );
		// filter.setInputFormat( theseInstances );
		return filter;
	}

	private static final int MIN_META_ATTRIBUTES = 4000;
	private static final int MIN_TITLE_ATTRIBUTES = 14000;
	private static final int MIN_BODY_ATTRIBUTES = 4000;


	public void test ( List<Domain> domains ) {
		try {
			Instances bodyTestInstances = this.bodyInstances.stringFreeStructure();
			System.out.println( "Classification Started..... " );
		// checkPatternRules( domains );
			
			
			
			for ( Domain domain : domains ) {
				System.out
						.format(
								"%nTest Domain Name: %s, %n    Domain Metadata: %s%n    Domain Title: %s%n    Domain Classification: %s"
										+ "%n    Domain Body: %s", domain.getName(), domain
										.getMetaContent(), domain.getTitle(), domain
										.getDomainType(), domain.getBody() );
				if ( null != domain.getMetaContent()
						&& domain.getMetaContent().length() > 0 ) {
					if ( predictByMetaData( bodyTestInstances,
							this.bodyStringToWordVectorFilter, domain ) ) {
						continue;
					}
				}
				if ( null != domain.getTitle() && domain.getTitle().length() > 0 ) {
					if ( predictByTitle( bodyTestInstances,
							this.bodyStringToWordVectorFilter, domain ) ) {
						continue;
					}

				}
				if ( (null != domain.getBody() && domain.getBody().length() > 0)
						|| domain.getChildPages().size() > 0 ) {
					/*
					 * If body/pages is there, use it
					 */
					if ( predictByBody( bodyTestInstances,
							this.bodyStringToWordVectorFilter, domain ) ) {
						continue;
					}
				}
				domain.setDomainType( DomainType.NOT_PERSONAL );
				domain.setClassifierComments( domain.getClassifierComments()
						+ ":ByProbability:" );
			}

		}
		catch ( Exception e ) {
			e.printStackTrace();
		}
	}

	 String CONTACT_PATTERN = "contact[\\s]*me";
	String IM_PATTERN = "I[\\s]*am";
	String BLOG_PATTERN = "about[\\s]*me";
	// "biblography" "biography" "photos" "about me"

	// String CONTACT_PATTERN = "friend";
	// String IM_PATTERN = "family";
	// String BLOG_PATTERN = "picture";
	
	Pattern contactPatt = Pattern.compile( CONTACT_PATTERN, Pattern.CASE_INSENSITIVE );
	Pattern imPatt = Pattern.compile( IM_PATTERN, Pattern.CASE_INSENSITIVE );
	Pattern blogPatt = Pattern.compile( BLOG_PATTERN, Pattern.CASE_INSENSITIVE );
	private void checkPatternRules ( List<Domain> domains ) {
		HashSet<String> personal = null;
		try {
			 personal =
					TestDataDriver.readPersonalDomainSet( TESTDIR, TEST_OUTPUT );
			 System.out.println("Total Personal: " + personal.size());
			 System.out.println(personal.toString());
		}
		catch ( Exception e ) {
			e.printStackTrace();
		}		
		
		File patternFile = new File( "serialized/patternfile.txt" );
		HashMap<String, Pattern> pattMap = new HashMap<String, Pattern>();
		try {
			BufferedReader reader = new BufferedReader( new FileReader( patternFile ) );
			String line;
			while ( (line = reader.readLine()) != null ) {
				line = line.trim();
				// comment?
				if ( line.startsWith( "#" ) )
					continue;
				pattMap.put( line, Pattern.compile( line, Pattern.CASE_INSENSITIVE ) );
			}

			reader.close();
		}
		catch ( Exception e ) {
			e.printStackTrace();
		}
		
		int countContactMe = 0;
		int success = 0;
		int fail = 0;
		List<Domain> personalList = new ArrayList<Domain>( 4000 );
		List<Domain> nonPersonalList = new ArrayList<Domain>( 4000 );
		for ( Domain domain : domains ) {
			StringBuffer buffer = new StringBuffer();
			buffer.append( domain.getMetaContent() );
			buffer.append( " " );
			buffer.append( domain.getTitle() );
			buffer.append( " " );
			buffer.append( domain.getBody() );
			buffer.append( " " );
			List<Page> childPages = domain.getChildPages();
			for ( Page page : childPages ) {
				buffer.append( page.getMetaContent() );
				buffer.append( " " );
				buffer.append( page.getTitle() );
				buffer.append( " " );
				buffer.append( page.getBody() );
				buffer.append( " " );
			}

			HashMap<String, Integer> countMap = new HashMap<String, Integer>(); 
			
			Set<String> keys = pattMap.keySet();
			for ( String key : keys ) {
				Matcher matcherCm = pattMap.get( key ).matcher( buffer );
				int countPattern = 0;
				while ( matcherCm.find() ) {
					countPattern++;
				}
				countMap.put( key, countPattern );
			}
			final boolean isPersonal = personal.contains( domain.getName() );
			domain.setCountMap( countMap );
			if ( isPersonal ) {
				domain.setDomainType( DomainType.PERSONAL );
				personalList.add( domain );
			}
			else {
				domain.setDomainType( DomainType.NOT_PERSONAL );
				nonPersonalList.add( domain );
			}
			
		}
		
		HashMap<String, Integer> summary = new HashMap<String, Integer>();
		Set<String> keys = pattMap.keySet();
		for ( String key : keys ) {
			summary.put( key, 0 );
		}
		for ( Domain domain : nonPersonalList ) {
			System.out.format(
					"%nDomain Type: %s, PatternCountMap: %s, Content: %s",
					domain
							.getDomainType(), domain.getCountMap().toString(), domain
							.getLocation() );
			Set<String> domKey = domain.getCountMap().keySet();
			for ( String key : domKey ) {
				int a = summary.get( key );
				int b = domain.getCountMap().get( key );
				summary.put( key, a + b );
			}
		}
		
		HashMap<String, Integer> summaryP = new HashMap<String, Integer>();
		Set<String> keysP = pattMap.keySet();
		for ( String key : keysP ) {
			summaryP.put( key, 0 );
		}
		for ( Domain domain : personalList ) {
			System.out.format(
					"%nDomain Type: %s, PatternCountMap: %s, Content: %s",
					domain
							.getDomainType(), domain.getCountMap().toString(), domain
							.getLocation() );
			Set<String> domKey = domain.getCountMap().keySet();
			for ( String key : domKey ) {
				int a = summaryP.get( key );
				int b = domain.getCountMap().get( key );
				summaryP.put( key, a + b );
			}
		}
		
		System.out.println( "SUMMARY(nonPersonal): " + summary.toString() );
		System.out.println( "SUMMARY(Personal): " + summaryP.toString() );
		
		
		
		// System.out
		// .format(
		// "%nActual Personal: %d, Contact Me && Im Found in: %d {Personal: %d , NonPersonal: %d}",
		// personal.size(), countContactMe, success, fail );
		System.exit( 1 );
	}


	private int countBlog ( StringBuffer buffer ) {
		Matcher blogIm = blogPatt.matcher( buffer );
		int countblog = 0;
		while ( blogIm.find() ) {
			countblog++;
		}
		return countblog;
	}


	private int countIm ( StringBuffer buffer ) {
		Matcher matcherIm = imPatt.matcher( buffer );
		int countIm = 0;
		while ( matcherIm.find() ) {
			countIm++;
		}
		return countIm;
	}


	private int countContactMe ( StringBuffer buffer ) {
		Matcher matcherCm = contactPatt.matcher( buffer );
		
		int countCM = 0;
		while ( matcherCm.find() ) {
			countCM++;
		}
		return countCM;
	}

	private DomainType[] domainTypes = {
			DomainType.PERSONAL, DomainType.NOT_PERSONAL
	};


	private boolean predictByMetaData ( Instances metaTestInstances,
			StringToWordVector metaFilter, Domain domain ) throws Exception {
		if ( !(countTokens( domain.getMetaContent() ) >= MIN_META_ATTRIBUTES) ) {
			return false;
		}
		Instance inst = new Instance( 2 );
		// Set value for message attribute
		Attribute metaAttribute = metaTestInstances.attribute( "Body" );
		inst.setValue( metaAttribute, metaAttribute.addStringValue( domain
				.getMetaContent() ) );

		// Give instance access to attribute information from the dataset.
		inst.setDataset( metaTestInstances );

		metaFilter.input( inst );
		metaFilter.batchFinished();
		Instance filteredInstance = metaFilter.output();
		double predicted = this.bodyClassifier.classifyInstance( filteredInstance );
		domain.setDomainType( domainTypes[ (int) predicted ] );
		domain.setClassifierComments( domain.getClassifierComments() + ":ByMeta:" );
		return true;
	}


	private boolean predictByTitle ( Instances titleTestInstances,
			StringToWordVector titleFilter, Domain domain ) throws Exception {
		String content = domain.getTitle() + " " + domain.getMetaContent();
		if ( !(countTokens( content ) >= MIN_TITLE_ATTRIBUTES) ) {
			return false;
		}
		Instance inst = new Instance( 2 );
		// Set value for message attribute
		Attribute titleAttribute = titleTestInstances.attribute( "Body" );

		inst.setValue( titleAttribute, titleAttribute.addStringValue( content ) );

		// Give instance access to attribute information from the dataset.
		inst.setDataset( titleTestInstances );
		titleFilter.input( inst );
		titleFilter.batchFinished();
		Instance filteredInstance = titleFilter.output();

		double predicted = this.bodyClassifier.classifyInstance( filteredInstance );
		domain.setDomainType( domainTypes[ (int) predicted ] );
		domain.setClassifierComments( domain.getClassifierComments() + ":ByTitle:" );
		return true;
	}


	private boolean predictByBody ( Instances bodyTestInstances,
			StringToWordVector bodyFilter, Domain domain ) throws Exception {
		/*
		 * We try to make a batch big enough so that we can predicts.
		 */

		try {
			String content =
					domain.getBody() + " " + domain.getMetaContent() + " "
							+ domain.getTitle();
			List<Page> childPages = domain.getChildPages();
			int childPage = 0;

			while ( !(countTokens( content ) >= MIN_BODY_ATTRIBUTES)
					&& childPage < childPages.size() ) {
				Page page = childPages.get( childPage );
				content +=
						" " + page.getMetaContent() + " " + page.getTitle() + " "
								+ page.getBody();
				childPage++;
			}
			int tokens = countTokens( content );
			Instance inst = new Instance( 2 );
			// Set value for message attribute
			Attribute bodyAttribute = bodyTestInstances.attribute( "Body" );
			inst.setValue( bodyAttribute, bodyAttribute.addStringValue( content ) );

			// Give instance access to attribute information from the dataset.
			inst.setDataset( bodyTestInstances );
			bodyFilter.input( inst );
			bodyFilter.batchFinished();
			Instance filteredInstance = bodyFilter.output();
			double predicted =
					this.bodyClassifier.classifyInstance( filteredInstance );
			domain.setDomainType( domainTypes[ (int) predicted ] );
			domain.setClassifierComments( domain.getClassifierComments()
					+ ":ByBody(Tokens:" + tokens + ":and PagesCount=" + childPage
					+ "):" );
			domain.setClassifierComments( domain.getClassifierComments()
					+ ":"
					+ Arrays.toString( this.bodyClassifier
							.distributionForInstance( filteredInstance ) ) + ":" );
			return true;
		}
		catch ( IndexOutOfBoundsException e ) {
			/*
			 * With all child pages, we are still below the threshold tokens.
			 */
			e.printStackTrace();
			return false;
		}
	}


	public static int countTokens ( String str ) {
		int x = str.split( "\\s+" ).length;
		return x;
	}


	/**
	 * Creates data structures for the classifier.
	 * 
	 * @param size
	 */
	private void initialize ( int size ) {
		try {
			// Create vector of attributes.
			FastVector bodyAttributes = new FastVector( 2 );

			// Add attribute for holding body.
			this.bodyTextAttribute = new Attribute( "Body", (FastVector) null );
			bodyAttributes.addElement( this.bodyTextAttribute );

			// Add class attribute.
			FastVector classValues = new FastVector( 2 );
			classValues.addElement( DomainType.PERSONAL.name() );
			classValues.addElement( DomainType.NOT_PERSONAL.name() );
			// classValues.addElement( DomainType.NOT_DETERMINED.name() );

			this.bodyClassAttribute = new Attribute( "bodyclass", classValues );
			bodyAttributes.addElement( this.bodyClassAttribute );
			bodyInstances = new Instances( "body data set", bodyAttributes, size );
			bodyInstances.setClass( bodyClassAttribute );

		}
		catch ( Exception e ) {
			e.printStackTrace();
		}
	}


	//
	// from empty instances populate with text and class arrays
	//
	public static Instances populateInstances ( String[] theseInputTexts,
			String[] theseInputClasses, Instances theseInstances,
			Attribute classAttribute, Attribute textAttribute ) {

		for ( int i = 0; i < theseInputTexts.length; i++ ) {
			Instance inst = new Instance( 2 );
			inst.setValue( textAttribute, theseInputTexts[ i ] );
			if ( theseInputClasses != null && theseInputClasses.length > 0 ) {
				inst.setValue( classAttribute, theseInputClasses[ i ] );
			}
			theseInstances.add( inst );
		}

		return theseInstances;

	} // populateInstances


	//
	// check instances (full set or just test cases)
	//
	public static StringBuffer checkCases ( Instances theseInstances,
			Classifier thisClassifier, Attribute thisClassAttribute, String[] texts,
			String testType, int startIx ) {

		StringBuffer result = new StringBuffer();

		try {

			result.append( "\nCHECKING ALL THE INSTANCES:\n" );

			Enumeration enumClasses = thisClassAttribute.enumerateValues();
			result.append( "Class values (in order): " );
			while ( enumClasses.hasMoreElements() ) {
				String classStr = (String) enumClasses.nextElement();
				result.append( "'" + classStr + "'  " );
			}
			result.append( "\n" );

			// startIx is a fix for handling text cases
			for ( int i = startIx; i < theseInstances.numInstances(); i++ ) {

				SparseInstance sparseInst =
						new SparseInstance( theseInstances.instance( i ) );
				sparseInst.setDataset( theseInstances );

				result.append( "\nTesting: '" + texts[ i - startIx ] + "'\n" );
				// result.append("SparseInst: " + sparseInst + "\n");

				double correctValue = (double) sparseInst.classValue();
				double predictedValue = thisClassifier.classifyInstance( sparseInst );

				String predictString =
						thisClassAttribute.value( (int) predictedValue ) + " ("
								+ predictedValue + ")";
				result.append( "predicted: '" + predictString );
				// print comparison if not new case
				if ( !"newcase".equals( testType ) ) {
					String correctString =
							thisClassAttribute.value( (int) correctValue ) + " ("
									+ correctValue + ")";
					String testString =
							((predictedValue == correctValue) ? "OK!" : "NOT OK!") + "!";
					result.append( "' real class: '" + correctString + "' ==> "
							+ testString );
				}
				result.append( "\n" );

				/*
				 * if (thisClassifier instanceof Distribution) { double[] dist =
				 * ((Distribution)thisClassifier).distributionForInstance(sparseInst);
				 * // weight the levels into a spamValue double weightedValue = 0; //
				 * experimental result.append("probability distribution:\n");
				 * NumberFormat nf = NumberFormat.getInstance();
				 * nf.setMaximumFractionDigits(3); for (int j = 0; j < dist.length; j++)
				 * { result.append(nf.format(dist[j]) + " "); weightedValue +=
				 * 10(j+1)dist[j]; if (j < dist.length -1) { result.append(",  "); } }
				 * result.append("\nWeighted Value: " + nf.format(weightedValue) +
				 * "\n"); }
				 */

				result.append( "\n" );
				// result.append(thisClassifier.dumpDistribution());
				// result.append("\n");
			}

		}
		catch ( Exception e ) {
			e.printStackTrace();
			result.append( "\nException (sorry!):\n" + e.toString() );
		}

		return result;

	} // end checkCases


	//
	// information about classifier and evaluation
	//
	public static StringBuffer printClassifierAndEvaluation (
			Classifier thisClassifier, Evaluation thisEvaluation ) {

		StringBuffer result = new StringBuffer();

		try {
			result.append( "\n\nINFORMATION ABOUT THE CLASSIFIER AND EVALUATION:\n" );
			result.append( "\nclassifier.toString():\n" + thisClassifier.toString()
					+ "\n" );
			result.append( "\nevaluation.toSummaryString(title, false):\n"
					+ thisEvaluation.toSummaryString( "Summary", false ) + "\n" );
			result.append( "\nevaluation.toMatrixString():\n"
					+ thisEvaluation.toMatrixString() + "\n" );
			result.append( "\nevaluation.toClassDetailsString():\n"
					+ thisEvaluation.toClassDetailsString( "Details" ) + "\n" );
			result.append( "\nevaluation.toCumulativeMarginDistribution:\n"
					+ thisEvaluation.toCumulativeMarginDistributionString() + "\n" );
		}
		catch ( Exception e ) {
			e.printStackTrace();
			result.append( "\nException (sorry!):\n" + e.toString() );
		}

		return result;

	} // end printClassifierAndEvaluation


	private String prepareMetaContent ( Domain d ) {

		return d.getMetaContent();
	}


// private static final String TESTDIR =
	// "/Users/manishmaheshwari/Projects/DataContest/testdata";
	// private static final String TEST_OUTPUT = "results-1-1000.txt";
	// private static final String TEST_INPUT = "input-1-1000.txt";
// private static final String TEST_INPUT =
// "summer2011datacontest-testdata.txt";
// private static final String TEST_OUTPUT =
// "summer2011datacontest-targetresults.txt";



	public static void main ( String[] args ) throws Exception {
		String L_TESTDIR = TESTDIR;
		String L_TEST_INPUT = TEST_INPUT;
		String L_TEST_OUTPUT = TEST_OUTPUT;
		String L_BOOSTER_DIR = BOOSTER_DIR;
		
		if ( args.length > 0 ) {
			L_TESTDIR = args[ 0 ];
			L_TEST_INPUT = args[ 1 ];
			L_TEST_OUTPUT = args[ 2 ];
			L_BOOSTER_DIR = args[ 3 ];
		}
		
		TestDataDriver testDriver = new TestDataDriver();
		CustomClassifier wekaclassifier = new WekaDCClassifier( CLASSIFIER_ALGO );
		System.out.println( "Starting test driver" );
		testDriver.runTest( wekaclassifier, L_TESTDIR, L_TEST_INPUT, L_TEST_OUTPUT,
				L_BOOSTER_DIR );
	}

	public static PrintStream originalOut = System.out;
	public static PrintStream originalErr = System.err;


	public static void mainX ( String[] args ) throws Exception {
		// PrintStream printStream =
		// new PrintStream( new File( "tempFile.txt" ) );
		// System.setOut( printStream );
		// System.setErr( printStream );

		ContestDriver driver = new ContestDriver();
		File f = new File( BODYCLASSIFIER_MODEL );
		if ( !f.exists() ) {
			System.out.print( "Failed on reading serialized files." );
			System.exit( 1 );
		}

		WekaDCClassifier wekaclassifier = new WekaDCClassifier( CLASSIFIER_ALGO );
		wekaclassifier.bodyClassifier =
				(Classifier) SerializationHelper.read( BODYCLASSIFIER_MODEL );
		wekaclassifier.bodyInstances =
				(Instances) SerializationHelper.read( BODY_INSTANCES_MODEL );
		wekaclassifier.bodyStringToWordVectorFilter =
				(StringToWordVector) SerializationHelper.read( STRING_VECTOR_MODEL );
		System.out.println( "Classifier is ready ...." );
		// ((WekaDCClassifier) wekaclassifier).SERIALIZE_AND_STOP = true;

		FileInputStream fis =
				new FileInputStream( new File( TESTDIR + "/" + TEST_INPUT ) );
		driver.runContest( fis, originalOut, TESTDIR, wekaclassifier );

		// driver.runContest( fis, originalOut, null, wekaclassifier );

		PrintStream printStream = new PrintStream( new File( "tempFile.txt" ) );
		System.setOut( printStream );
		System.setErr( printStream );
		driver.getResults( TEST_OUTPUT );
		System.out.println( "Done..." );
	}

} // end class TextClassifier

