package edu.gatech.doi.spamframework.filters;

import java.awt.BorderLayout;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;

import moa.classifiers.DecisionStump;

import weka.attributeSelection.CfsSubsetEval;
import weka.attributeSelection.GreedyStepwise;
import weka.classifiers.Classifier;
import weka.classifiers.Evaluation;
import weka.classifiers.MultipleClassifiersCombiner;
import weka.classifiers.RandomizableClassifier;
import weka.classifiers.SingleClassifierEnhancer;
import weka.classifiers.bayes.AODE;
import weka.classifiers.bayes.BayesNet;
import weka.classifiers.bayes.ComplementNaiveBayes;
import weka.classifiers.bayes.NaiveBayes;
import weka.classifiers.bayes.NaiveBayesMultinomial;
import weka.classifiers.bayes.NaiveBayesSimple;
import weka.classifiers.evaluation.ThresholdCurve;
import weka.classifiers.functions.LeastMedSq;
import weka.classifiers.functions.LinearRegression;
import weka.classifiers.functions.Logistic;
import weka.classifiers.functions.MultilayerPerceptron;
import weka.classifiers.functions.PaceRegression;
import weka.classifiers.functions.RBFNetwork;
import weka.classifiers.functions.SMO;
import weka.classifiers.functions.SMOreg;
import weka.classifiers.functions.SimpleLinearRegression;
import weka.classifiers.functions.SimpleLogistic;
import weka.classifiers.functions.VotedPerceptron;
import weka.classifiers.functions.Winnow;
import weka.classifiers.lazy.IB1;
import weka.classifiers.lazy.KStar;
import weka.classifiers.lazy.LBR;
import weka.classifiers.meta.AdaBoostM1;
import weka.classifiers.meta.FilteredClassifier;
import weka.classifiers.misc.HyperPipes;
import weka.classifiers.misc.VFI;
import weka.classifiers.rules.ConjunctiveRule;
import weka.classifiers.rules.DecisionTable;
import weka.classifiers.rules.JRip;
import weka.classifiers.rules.NNge;
import weka.classifiers.rules.OneR;
import weka.classifiers.rules.PART;
import weka.classifiers.rules.Prism;
import weka.classifiers.rules.Ridor;
import weka.classifiers.rules.ZeroR;
import weka.classifiers.trees.ADTree;
import weka.classifiers.trees.Id3;
import weka.classifiers.trees.J48;
import weka.classifiers.trees.LMT;
import weka.classifiers.trees.NBTree;
import weka.classifiers.trees.REPTree;
import weka.classifiers.trees.RandomForest;
import weka.classifiers.trees.RandomTree;
import weka.classifiers.trees.UserClassifier;
import weka.classifiers.trees.lmt.LogisticBase;
import weka.classifiers.trees.m5.M5Base;
import weka.classifiers.trees.m5.PreConstructedLinearModel;
import weka.classifiers.trees.m5.RuleNode;
import weka.core.Attribute;
import weka.core.AttributeStats;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
import weka.core.converters.ArffSaver;

import weka.core.stemmers.SnowballStemmer;
import weka.core.tokenizers.AlphabeticTokenizer;
import weka.core.tokenizers.NGramTokenizer;
import weka.core.tokenizers.Tokenizer;
import weka.core.tokenizers.WordTokenizer;
import weka.experiment.Stats;
import weka.filters.Filter;
import weka.filters.supervised.attribute.AddClassification;
import weka.filters.supervised.attribute.AttributeSelection;
import weka.filters.unsupervised.attribute.NumericToNominal;
import weka.filters.unsupervised.attribute.Remove;
import weka.filters.unsupervised.attribute.RemoveUseless;
import weka.filters.unsupervised.attribute.ReplaceMissingValues;
import weka.filters.unsupervised.attribute.Standardize;
import weka.filters.unsupervised.attribute.StringToWordVector;

import weka.gui.treevisualizer.PlaceNode2;
import weka.gui.treevisualizer.TreeVisualizer;
import weka.gui.visualize.PlotData2D;
import weka.gui.visualize.ThresholdVisualizePanel;

public class CopyOfTrainTestFilter {
	private String instancesdirectory=System.getProperties().getProperty("user.dir")+System.getProperties().getProperty("file.separator")+"instances"+System.getProperties().getProperty("file.separator");
	private double hamThreshold;
	private double spamThreshold;

	
	public Classifier getClassifier(int index)
	{
		Classifier cls = null;
		switch(index)
		{
		case 1:
			System.out.println("Index:"+index+" classifier name: ADTree");
			cls=new ADTree();
			break;
		case 2:
			System.out.println("Index:"+index+" classifier name: AODE");
			cls=new AODE();
			break;
		case 3:
			System.out.println("Index:"+index+" classifier name: BayesNet");
			cls=new BayesNet();
			break;
		case 4:
			System.out.println("Index:"+index+" classifier name:ComplementNaiveBayes");
			//cls=new ComplementNaiveBayes();
			break;
		case 5:
			System.out.println("Index:"+index+" classifier name: ConjunctiveRule");
			cls=new ConjunctiveRule();
			break;
		case 6:
			System.out.println("Index:"+index+" classifier name:");
			//cls=new DecisionStump();
			
			break;
		case 7:
			System.out.println("Index:"+index+" classifier name: DecisionTable");
			cls=new DecisionTable();
			break;
		case 8:
			System.out.println("Index:"+index+" classifier name:HyperPipes");
			cls=new HyperPipes();
			break;
		case 9:
			System.out.println("Index:"+index+" classifier name: IB1");
			cls=new IB1();
			break;
		case 10:
			System.out.println("Index:"+index+" classifier name:");
			//cls=new IBK();
			break;
		case 11:
			System.out.println("Index:"+index+" classifier name: Id3");
			cls=new Id3();
			break;
		case 12:
			System.out.println("Index:"+index+" classifier name: J48");
			cls=new J48();
			break;
		case 13:
			System.out.println("Index:"+index+" classifier name:JRip");
			cls=new JRip();
			break;
		case 14:
			System.out.println("Index:"+index+" classifier name: KStar");
			cls=new KStar();
			break;
		case 15:
			System.out.println("Index:"+index+" classifier name: LBR");
			cls=new LBR();
			break;
		case 16:
			System.out.println("Index:"+index+" classifier name: LeastMedSq");
			cls=new LeastMedSq();
			break;
		case 17:
			System.out.println("Index:"+index+" classifier name: LinearRegression");
			cls=new LinearRegression();
			break;
		case 18:
			System.out.println("Index:"+index+" classifier name: LMT");
			cls=new LMT();
			break;
		case 19:
			System.out.println("Index:"+index+" classifier name:Logistic");
			cls=new Logistic();
			break;
		case 20:
			System.out.println("Index:"+index+" classifier name: LogisticBase");
			cls=new LogisticBase();
			break;
		case 21:
			System.out.println("Index:"+index+" classifier name: ");
			//cls=new M5Base();
			break;
		case 22:
			System.out.println("Index:"+index+" classifier name:  MultilayerPerceptron");
			cls=new MultilayerPerceptron();
			break;
		case 23:
			System.out.println("Index:"+index+" classifier name:");
			//cls=new MultipleClassifiersCombiner();
			break;
		case 24:
			System.out.println("Index:"+index+" classifier name: NaiveBayes");
			cls=new NaiveBayes();
			break;
		case 25:
			System.out.println("Index:"+index+" classifier name: NaiveBayesMultinomial");
			cls=new NaiveBayesMultinomial();
			break;
		case 26:
			System.out.println("Index:"+index+" classifier name:NaiveBayesSimple");
			cls=new NaiveBayesSimple();
			break;
		case 27:
			System.out.println("Index:"+index+" classifier name: NBTree");
			cls=new NBTree();
			break;
		case 28:
			System.out.println("Index:"+index+" classifier name: NNge");
			cls=new NNge();
			break;
		case 29:
			System.out.println("Index:"+index+" classifier name: OneR");
			cls=new OneR();
			break;
		case 30:
			System.out.println("Index:"+index+" classifier name: PaceRegression");
			cls=new PaceRegression();
			break;
		case 31:
			System.out.println("Index:"+index+" classifier name: PART");
			cls=new PART();
			break;
		case 32:
			System.out.println("Index:"+index+" classifier name: ");
			//cls=new PreConstructedLinearModel();
			break;
		case 33:
			System.out.println("Index:"+index+" classifier name: Prism");
			cls=new Prism();
			break;
		case 34:
			System.out.println("Index:"+index+" classifier name: RandomForest");
			cls=new RandomForest();
			break;
		case 35:
			System.out.println("Index:"+index+" classifier name: ");
			//cls=new RandomizableClassifier();
			break;
		case 36:
			System.out.println("Index:"+index+" classifier name: RandomTree");
			cls=new RandomTree();
			break;
		case 37:
			System.out.println("Index:"+index+" classifier name: RBFNetwork");
			cls=new RBFNetwork();
			break;
		case 38:
			System.out.println("Index:"+index+" classifier name: REPTree");
			cls=new REPTree();
			break;
		case 39:
			System.out.println("Index:"+index+" classifier name:Ridor");
			cls=new Ridor();
			break;
		case 40:
			System.out.println("Index:"+index+" classifier name: ");
			//cls=new RuleNode();
			break;
		case 41:
			System.out.println("Index:"+index+" classifier name:SimpleLinearRegression");
			cls=new SimpleLinearRegression();
			break;
		case 42:
			System.out.println("Index:"+index+" classifier name:SimpleLogistic");
			cls=new SimpleLogistic();
			break;
		case 43:
			//cls=new SingleClassifierEnhancer();
			System.out.println("Index:"+index+" classifier name: ");
			break;
		case 44:
			System.out.println("Index:"+index+" classifier name: SMO");
			cls=new SMO();
			break;
		case 45:
			System.out.println("Index:"+index+" classifier name: SMOreg");
			cls=new SMOreg();
			break;
		case 46:
			System.out.println("Index:"+index+" classifier name: UserClassifier");
			cls=new UserClassifier();
			break;
		case 47:
			System.out.println("Index:"+index+" classifier name: VFI");
			cls=new VFI();
			break;
		case 48:
			System.out.println("Index:"+index+" classifier name: VotedPerceptron");
			cls=new VotedPerceptron();
			break;
		case 49:
			System.out.println("Index:"+index+" classifier name: Winnow");
			cls=new Winnow();
			break;
		case 50:
			System.out.println("Index:"+index+" classifier name: ZeroR");
			cls=new ZeroR();
			break;
			
		}
		return cls;
	}
	public boolean isContentEmpty(Instance ins)
	{
		
		for(int i=1;i<ins.numAttributes();i++)
		{
			if(ins.value(i)>0.0)
			{
				return false;
			}
		}
		return true;
	}
	
	public boolean isLessThanNum(Instance ins, int num)
	{
		int total=0;
		boolean result=true;
		for(int i=1;i<ins.numAttributes();i++)
		{
			
			if(ins.value(i)>0.0)
			{
				total++;
			}
			
		}
		if(total>num)
			result=false;
		return result;
	}
	public void setHamThreshold(double threshold)
	{
		this.hamThreshold=threshold;
	}

	public void setSpamThreshold(double threshold)
	{
		this.spamThreshold=threshold;
	}
	private HashSet<Integer> CountForAttribute(Instances data1,Instances data2)
	{
		HashSet<Integer> removeset=new HashSet<Integer>();
		int hamcount=0;
		int spamcount=0;
		int hamcount1=0;
		int spamcount1=0;
		for(int i=0;i<data1.numAttributes();i++)
		{
			for(int j=0;j<data1.numInstances();j++)
			{
				Instance tempins=data1.instance(j);
				if(tempins.value(i)>0.0)
				{
					//the attribute appear in this instance
					if(tempins.value(0)==0.0)
					{
						//this instance is ham;
						hamcount++;
					}
					else
					{
						spamcount++;
					}

				}
			}

			for(int j=0;j<data2.numInstances();j++)
			{
				Instance tempins=data2.instance(j);
				if(tempins.value(i)>0.0)
				{
					//the attribute appear in this instance
					if(tempins.value(0)==0.0)
					{
						//this instance is ham;
						hamcount1++;
					}
					else
					{
						spamcount1++;
					}

				}
			}

			//System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1);

			double hamP=(double)hamcount/(double)(hamcount+spamcount);
			double spamP=(double)spamcount/(double)(hamcount+spamcount);
			double hamP1=(double)hamcount1/(double)(hamcount1+spamcount1);
			double spamP1=(double)spamcount1/(double)(hamcount1+spamcount1);
			//System.out.println("hamP:"+hamP+" spamP:"+spamP);
			//System.out.println("hamP1:"+hamP1+" spamP1:"+spamP1);
			
			
			if((hamP>spamP)&&(hamP1<spamP1))
			{
//				if(hamcount1==0)
//				System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1);
//  			if(spamcount1>=1000)
//					System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1);
				removeset.add(i);
			}
			else if((spamP>hamP)&&(spamP1<hamP1))
			{
//				if(hamcount==0)
//					System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1);
//				//System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1);
				removeset.add(i);
			}
			else if((hamP>=spamP)&&(hamP1>=spamP1))
			{
				if(hamP>=this.hamThreshold&&hamP1>=this.hamThreshold)
				{
					//System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1+" hamP:"+hamP);
				}
				else
				{
					//System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1);
					removeset.add(i);
				}
			}
			else if((spamP>=hamP)&&(spamP1>=hamP1))
			{
				
				if(hamcount==0&&hamcount1==0)
					System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1+" spamP:"+spamP);
				if(spamP>=this.spamThreshold&&spamP1>=this.spamThreshold)
				{
					//System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1+" spamP:"+spamP);
				}
				else
				{
					//System.out.println("index:"+i+" attribute:"+data1.attribute(i).name()+" hamcount:"+hamcount+" spamcount:"+spamcount+" hamcount1:"+hamcount1+" spamcount1:"+spamcount1);
					removeset.add(i);
				}
			}
			//both take the message as spam or ham will stay otherwise remove

			hamcount=0;
			spamcount=0;
			hamcount1=0;
			spamcount1=0;
		}
		return removeset;
	}

	public void tainingMsg(Instances data1,Instances data2,int algorithmType,int tokenizerType,int removenum,int wordsTK,int modeltype)
	{
		System.out.println("TokenizerType:"+tokenizerType+" removenum:"+removenum+" words to keep:"+wordsTK+" modeltype:"+modeltype);
		System.out.println("HamThreshold:"+this.hamThreshold+" SpamThreshold:"+this.spamThreshold);
		if(data1==null)
		{
			System.out.println("data1 is null");

		}

		if(data2==null)
		{
			System.out.println("data2 is null");

		}

		Instances train =null;  // from somewhere
		Instances test =null;    // from somewhere
		//		 Standardize filter = new Standardize();
		//		 System.out.println(data2);



		try {


			String userdir=System.getProperties().getProperty("user.dir")+System.getProperties().getProperty("file.separator");
			String stopwordsfile=userdir+"stopwords.txt";
			File stopwordfile=new File(stopwordsfile);
			
			ArrayList<Integer> keyAL=new ArrayList<Integer>();
			ArrayList<String> statusidAL=new ArrayList<String>();
			for(int i=0;i<data2.numInstances();i++)
			{
				//String value=data2.instance(i).value(data2.numAttributes()-2);
				String key=data2.attribute(data2.numAttributes()-2).value(i);
			//	System.out.println("key:"+key);
				double value=data2.instance(i).value(data2.numAttributes()-1);
				//System.out.println("value:"+value);
				if(value>0.0)
				{
					keyAL.add(Integer.parseInt("1"));
					
				}
				else
				{
					keyAL.add(Integer.parseInt("0"));
				}
				statusidAL.add(key);
			}
			/**
			 * remove instanceid
			 */
			Remove remove=new Remove();
			int [] attlist=new int[1];

		
			attlist[0]=data1.numAttributes()-2;
//			attlist[1]=data1.numAttributes()-3;

			remove.setAttributeIndicesArray(attlist);

			remove.setInputFormat(data1);
			

			
			
			Instances newdata1= Filter.useFilter(data1, remove);
			//			   
			Instances newdata2=Filter.useFilter(data2, remove);
//			Instances newdata2=Filter.useFilter(data3, remove);

			train=newdata1;
			test=newdata2;
			//			   train.setClassIndex(train.numAttributes()-1);
			//			   test.setClassIndex(test.numAttributes()-1);
			int wordstokeep=wordsTK;
			StringToWordVector filter = new StringToWordVector(wordsTK);
			//			 	filter.setOutputWordCounts(true);
			filter.setMinTermFreq(10);
			filter.setLowerCaseTokens(true);
			filter.setIDFTransform(true);
			SnowballStemmer snowball=new SnowballStemmer();
			filter.setStemmer(snowball);
			Tokenizer wt = null;
			switch(tokenizerType)
			{
			case 1:
				wt=new WordTokenizer();
				break;
			case 2:
				wt=new AlphabeticTokenizer();
				break;
			case 3:
				wt=new NGramTokenizer();
				break;

			}

			filter.setTokenizer(wt);
			//			 	filter.setTFTransform(true);
			filter.setInputFormat(train);
			filter.setStopwords(stopwordfile);
			Instances traindataFiltered = Filter.useFilter(train, filter);

			Instances testdataFiltered = Filter.useFilter(test, filter);

		

			NumericToNominal ntnfilter=new NumericToNominal();
			ntnfilter.setInputFormat(traindataFiltered);

			Instances traindata=Filter.useFilter(traindataFiltered,ntnfilter);
			// ntnfilter.setInputFormat(testdataFiltered);
			Instances testdata=Filter.useFilter(testdataFiltered,ntnfilter);

		


			System.out.println("the number of attributes in training data:"+traindata.numAttributes());
			System.out.println("the number of attributes in testing data:"+testdata.numAttributes());

			HashMap<String,Integer> commonhm=new HashMap<String,Integer>();
			ArrayList<Integer> removeal=new ArrayList<Integer>();
			int removenum1=removenum;

			HashMap hm=new HashMap();
			for(int i=0;i<traindata.numAttributes();i++)
			{
				Attribute attr=traindata.attribute(i);
				String key=attr.name().trim();
				AttributeStats as=traindata.attributeStats(i);
				if(attr.isNominal())
				{
					int[] nc=as.nominalCounts;

					if(nc.length>1)
					{
						int value=nc[1];


						if(value>=removenum1)
						{
							//	System.out.println("key:"+key);
							//	commonal.add(key);
							hm.put(key, value);

						}
						else
						{
							
							removeal.add(i);
						}
					}
					else
					{
						
						removeal.add(i);
					}
					//Integer value=Integer.valueOf(as);

				}
				else if(attr.isNumeric())
				{
					Stats nc=as.numericStats;
					int value=(int) nc.sum;
					if(value>=removenum1)
					{
						//	System.out.println("key:"+key);
						//noncommonal.add(key);
						hm.put(key, value);
					}
					else
					{
						
						removeal.add(i);
					}

					//System.out.println("key:"+key+" as:"+value);
					//Integer value=Integer.valueOf(as);


				}
			}

			for(int j=0;j<testdata.numAttributes();j++)
			{


				String key=testdata.attribute(j).name().trim();
				if(!hm.containsKey(key))
				{
					//noncommonal.add(key);
					
					removeal.add(j);
				}
				else
				{
					Attribute attr=testdata.attribute(j);
					AttributeStats as=testdata.attributeStats(j);
					if(attr.isNominal())
					{
						int[] nc=as.nominalCounts;
						if(nc.length>1)
						{
							int value=nc[1];
							//System.out.println("key:"+key+" value:"+value);
							if(value>=removenum1)
							{
								//System.out.println("key:"+key);
								commonhm.put(key, value);
							}
							else
							{
								
								removeal.add(j);
							}
						}
						else
						{
							
							removeal.add(j);
						}

					}
					else if(attr.isNumeric())
					{
						Stats nc=as.numericStats;
						int value=(int) nc.sum;
						//System.out.println("key:"+key+" value:"+value);
						if(value>=removenum1)
						{
							//	System.out.println("key:"+key);
							commonhm.put(key, value);
						}
						else
						{
							
							removeal.add(j);
						}

					}

				}

			}
			System.out.println("common word list:"+commonhm.size());
			//			   for(String tempkey: commonhm.keySet())
			//			   {
			//				   Integer value=commonhm.get(tempkey);
			//				  System.out.println("key:"+tempkey+" as:"+value.intValue());
			//			   }



			ArrayList<Integer> attl=new ArrayList<Integer>();
			HashSet<Integer> a = new HashSet<Integer>(removeal);
			int i=0;
			for (Integer tempB: a){
				attl.add(tempB);
				//System.out.println("index:"+tempB.intValue());
			}

			int [] attlistB=new int[attl.size()];
			for(Integer tempC:attl)
			{
				if(tempC.intValue()==0)
					continue;
				attlistB[i]=tempC.intValue();
				i++;
			}
			//System.out.println("i="+i);

			//attlistB[0]=data1.numAttributes()-2;
			//attlist[1]=3;
			Remove removeB=new Remove();
			removeB.setAttributeIndicesArray(attlistB);

			removeB.setInputFormat(traindata);
			Instances finaltraindata1= Filter.useFilter(traindata, removeB);
			//			   
			Instances finaltestdata1=Filter.useFilter(testdata, removeB);

			//			    System.out.println("the number of attributes in final train data 1:"+finaltraindata1.numAttributes());
			HashSet<Integer> C=this.CountForAttribute(finaltraindata1,finaltestdata1);
			////			    
			ArrayList<Integer> attlC=new ArrayList<Integer>();
			// HashSet<Integer> a = new HashSet<Integer>(removeal);
			i=0;
			for (Integer tempB: C){
				attlC.add(tempB);
				//System.out.println("index:"+tempB.intValue());
			}

			int [] attlistC=new int[attlC.size()];
			for(Integer tempC:attlC)
			{
				if(tempC.intValue()==0)
					continue;
				attlistC[i]=tempC.intValue();
				i++;
			}
			//System.out.println("i="+i);

			//attlistB[0]=data1.numAttributes()-2;
			//attlist[1]=3;
			Remove removeC=new Remove();
			removeC.setAttributeIndicesArray(attlistC);

			removeC.setInputFormat(finaltraindata1);
			Instances finaltraindata= Filter.useFilter(finaltraindata1, removeC);
			//			   
			Instances finaltestdata=Filter.useFilter(finaltestdata1, removeC);

			

			
			System.out.println("the number of attributes in final train data:"+finaltraindata.numAttributes());
			System.out.println("the number of instances in final train data:"+finaltraindata.numInstances());
			System.out.println("the number of instances in final test data:"+finaltestdata.numInstances());
			HashSet<Integer> D=this.CountForAttribute(finaltraindata,finaltestdata);
			
			if(modeltype==1)
			{
				
				finaltraindata.setClassIndex(3);
				finaltestdata.setClassIndex(3);

			}
			else
			{
				finaltraindata.setClassIndex(0);
				finaltestdata.setClassIndex(0);
			}
			//				RankAttributes.rankByInfoGain(traindata);
			//				RankAttributes.rankByGainRatio(traindata);

			System.out.println("Rank attributes by Chi Square:");
			
//			RankAttributes.rankByChiSquare(finaltraindata);

			//				RankAttributes.rankByInfoGain(testdata);
			//				RankAttributes.rankByGainRatio(testdata);

//			RankAttributes.rankByChiSquare(finaltestdata);



			//			 	// create new test set
			//			    FilteredClassifier fc=new FilteredClassifier();
			//			    
			//			    fc.buildClassifier(traindata);
			//			    for (int i = 0; i < testdata.numInstances(); i++) {
			//					 System.out.println(testdata.instance(i).value(3));
			//					   double pred = fc.classifyInstance(testdata.instance(i));
			//					   System.out.print("ID: " + testdata.instance(i).value(testdata.numAttributes()-2));
			//					   System.out.print(", FCactual: " + testdata.classAttribute().value((int) testdata.instance(i).classValue()));
			//					   System.out.println(", FCpredicted: " + testdata.classAttribute().value((int) pred));
			//					 }

			
			
			//  this.CountForAttribute(finaltestdata);
			System.out.println(">>>>>>>>>>>>>");
			int type=algorithmType;
			Evaluation eval=null;
			String classifiername="";
			
			if(algorithmType==51)
			{
				for(i=1;i<=50;i++)
				{
					
					
					Classifier cls=this.getClassifier(i);
					if(cls==null)
						continue;
					
					cls.buildClassifier(finaltraindata);
					eval= new Evaluation(finaltraindata);
					eval.evaluateModel(cls, finaltestdata);	
					
					System.out.println(eval.toSummaryString("\nResults\n======\n", false));
					System.out.println("TPR:"+eval.truePositiveRate(0)+
					"FPR:"+eval.falsePositiveRate(0)+
					"F-measure:"+eval.fMeasure(0));
					double[][] tempdou=eval.confusionMatrix();
					for(int j=0;j<tempdou.length;j++)
					{
						System.out.println(tempdou[j][0]+" "+tempdou[j][1]);

					}
				}
			}
			else
			{
				Classifier cls=this.getClassifier(algorithmType);
				
				
				cls.buildClassifier(finaltraindata);
				eval= new Evaluation(finaltraindata);
				eval.evaluateModel(cls, finaltestdata);	
				
				System.out.println(eval.toSummaryString("\nResults\n======\n", false));
				System.out.println("TPR:"+eval.truePositiveRate(0)+
				"FPR:"+eval.falsePositiveRate(0)+
				"F-measure:"+eval.fMeasure(0));
				double[][] tempdou=eval.confusionMatrix();
				for(int j=0;j<tempdou.length;j++)
				{
					System.out.println(tempdou[j][0]+" "+tempdou[j][1]);

				}
				
				
				Instances predictedData = null;
				  // add predictions
			      AddClassification predictefilter = new AddClassification();
			      predictefilter.setClassifier(cls);
			      predictefilter.setOutputClassification(true);
			      predictefilter.setOutputDistribution(true);
			      predictefilter.setOutputErrorFlag(true);
			      predictefilter.setInputFormat(finaltraindata);
			      Filter.useFilter(finaltraindata, predictefilter);  // trains the classifier
			      Instances pred = Filter.useFilter(finaltestdata, predictefilter);  // perform predictions on test set
			    
			      System.out.println("==============================");
			      
			      int spamemptynum=0;
			      int hamemptynum=0;
			      
			      String filename="statusTxtL.txt";
			      try {
						File file=new File(filename);
						if(!file.exists())
						{
							file.createNewFile();
						}
						else
						{
							file.delete();
							file.createNewFile();
						}
						FileWriter frs=new FileWriter(filename);
						BufferedWriter br=new BufferedWriter(frs);
						String line=null;
						 
						
						 
						for (int j = 0; j < pred.numInstances(); j++)
					      {
					    	  
					    	  double value1=pred.instance(j).value(pred.numAttributes()-1);
					    	  double value2=pred.instance(j).value(0);
					    	  Integer integer=keyAL.get(j);
					    	  String id=statusidAL.get(j);
					    	  if(value1>0.0)
					    	  {
					    		
					    			  br.write(id +" 1");
					    		  
					    	  }else
					    	  {
					    		
					    			  br.write(id+" 0");
					    		
					    	  }
					    	 
								br.newLine();
								br.flush();
						
								
					      }
					
							
						br.close();
						frs.close();
						
						
					} catch (FileNotFoundException e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					} catch (IOException e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
			      
			      
			      for (int j = 0; j < pred.numInstances(); j++)
			      {
			    	  
			    	  double value1=pred.instance(j).value(pred.numAttributes()-1);
			    	  Integer integer=keyAL.get(j);
			    	  
			    	  
			    	//  System.out.println("value1:"+value1);
			    	 // System.out.println("integer:"+integer.intValue());
			    	  //instance j is spam
//			    	  if(value1>0.0)
//			    	  {
//			    		//  System.out.println("value1:"+value1);
//			    		 
//			    		  
//			    		  if(integer.intValue()==0)
//			    		  {
//			    			  //predict spam, actually it is ham
//			    			  if(this.isContentEmpty(pred.instance(j)))
//			    				  continue;
//			    			  System.out.println("spam:"+statusidAL.get(j));
//			    		  }
//			    	  }
//			    	  else
//			    	  {
//			    		 
//			    		  if(integer.intValue()==1)
//			    		  {
//			    			  //predict spam, actually it is ham
//			    			  if(this.isContentEmpty(pred.instance(j)))
//			    				  continue;
//			    			  System.out.println("ham:"+statusidAL.get(j));
//			    		  }
//			    	  }
						
			      }
			     
				
				
			}
			

			// evaluate classifier and print some statistics

			// generate curve
			ThresholdCurve tc = new ThresholdCurve();
			int classIndex = 0;
			Instances result = tc.getCurve(eval.predictions(), classIndex);

			// plot curve
			ThresholdVisualizePanel vmc = new ThresholdVisualizePanel();
			vmc.setROCString("(Area under ROC = " + 
					Utils.doubleToString(tc.getROCArea(result), 4) + ")");
			vmc.setName(result.relationName());
			PlotData2D tempd = new PlotData2D(result);
			tempd.setPlotName(result.relationName());
			tempd.addInstanceNumberAttribute();
			// specify which points are connected
			boolean[] cp = new boolean[result.numInstances()];
			for (int n = 1; n < cp.length; n++)
				cp[n] = true;
			tempd.setConnectPoints(cp);
			// add plot
			vmc.addPlot(tempd);

			// display curve
			String plotName = vmc.getName()+"  "+classifiername; 
			final javax.swing.JFrame jf = 
				new javax.swing.JFrame("Weka Classifier Visualize: "+plotName);
			jf.setSize(500,400);
			jf.getContentPane().setLayout(new BorderLayout());
			jf.getContentPane().add(vmc, BorderLayout.CENTER);
			jf.addWindowListener(new java.awt.event.WindowAdapter() {
				public void windowClosing(java.awt.event.WindowEvent e) {
					jf.dispose();
				}
			});
			jf.setVisible(true);




		} catch (Exception e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}  // initializing the filter once with training set

	}

	public HashSet<String> loadFile2HashSet(String filename)
	{
		HashSet<String> temphs=new HashSet<String>();
		try {
			FileReader frs=new FileReader(filename);
			BufferedReader br=new BufferedReader(frs);
			String line=null;
			while((line=br.readLine())!=null)
			{
				temphs.add(line.trim());
			}
			br.close();
			frs.close();
			
			
		} catch (FileNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return temphs;
	}
	private HashSet<String> blackList;
	private HashSet<String> whiteList;
	
	public boolean URLBlackList(String url)
	{
		boolean onblacklist=false;
		if(this.blackList.contains(url))
			onblacklist=true;
		return onblacklist;
	}

	public boolean URLWhiteList(String url)
	{
		boolean onwhitelist=false;
		if(this.whiteList.contains(url))
			onwhitelist=true;
		return onwhitelist;
	}

	public void setBlackList(String filename)
	{
		this.blackList=this.loadFile2HashSet(filename);
		
	}
	public void setWhiteList(String filename)
	{
		this.whiteList=this.loadFile2HashSet(filename);
	}
	
	
	public void tainingWeb(Instances data1,Instances data2,int algorithmType,int tokenizerType,int removenum,int wordsTK,int modeltype)
	{
		System.out.println("TokenizerType:"+tokenizerType+" removenum:"+removenum+" words to keep:"+wordsTK+" modeltype:"+modeltype);
		System.out.println("HamThreshold:"+this.hamThreshold+" SpamThreshold:"+this.spamThreshold);
		if(data1==null)
		{
			System.out.println("data1 is null");

		}

		if(data2==null)
		{
			System.out.println("data2 is null");

		}

		Instances train =null;  // from somewhere
		Instances test =null;    // from somewhere
		//		 Standardize filter = new Standardize();
		//		 System.out.println(data2);

		String userdir=System.getProperties().getProperty("user.dir")+System.getProperties().getProperty("file.separator");
		//set whitelist and blacklist
	
		

		try {


			
			String stopwordsfile=userdir+"stopwords.txt";
			File stopwordfile=new File(stopwordsfile);
			
			ArrayList<Integer> keyAL=new ArrayList<Integer>();
			ArrayList<String> statusidAL=new ArrayList<String>();
			ArrayList<String> urlAL=new ArrayList<String>();
			for(int i=0;i<data2.numInstances();i++)
			{
				//String value=data2.instance(i).value(data2.numAttributes()-2);
				String key=data2.attribute(data2.numAttributes()-2).value(i);
			//	System.out.println("key:"+key);
				double value=data2.instance(i).value(data2.numAttributes()-1);
				//System.out.println("value:"+value);
				if(value>0.0)
				{
					keyAL.add(Integer.parseInt("1"));
					
				}
				else
				{
					keyAL.add(Integer.parseInt("0"));
				}
				statusidAL.add(key);
			}
			

			
			/**
			 * remove instanceid
			 */
			Remove remove=new Remove();
			int [] attlist=new int[17];

			for(int i=0;i<17;i++)
			{
				if(i<3)
				attlist[i]=i;
				else
					attlist[i]=i+1;
			}
//			attlist[0]=data1.numAttributes()-2;
//			attlist[1]=1;

			remove.setAttributeIndicesArray(attlist);

			remove.setInputFormat(data1);
			Instances newdata1= Filter.useFilter(data1, remove);
			//			   
			Instances newdata2=Filter.useFilter(data2, remove);

			train=newdata1;
			test=newdata2;
			//			   train.setClassIndex(train.numAttributes()-1);
			//			   test.setClassIndex(test.numAttributes()-1);
			int wordstokeep=wordsTK;
			StringToWordVector filter = new StringToWordVector(wordsTK);
			//			 	filter.setOutputWordCounts(true);
			filter.setMinTermFreq(10);
			filter.setLowerCaseTokens(true);
			SnowballStemmer snowball=new SnowballStemmer();
			filter.setStemmer(snowball);
			Tokenizer wt = null;
			switch(tokenizerType)
			{
			case 1:
				wt=new WordTokenizer();
				break;
			case 2:
				wt=new AlphabeticTokenizer();
				break;
			case 3:
				wt=new NGramTokenizer();
				break;

			}

			filter.setTokenizer(wt);
			//			 	filter.setTFTransform(true);
			filter.setInputFormat(train);
			filter.setStopwords(stopwordfile);
			Instances traindataFiltered = Filter.useFilter(train, filter);

			Instances testdataFiltered = Filter.useFilter(test, filter);



			NumericToNominal ntnfilter=new NumericToNominal();
			ntnfilter.setInputFormat(traindataFiltered);

			Instances traindata=Filter.useFilter(traindataFiltered,ntnfilter);
			// ntnfilter.setInputFormat(testdataFiltered);
			Instances testdata=Filter.useFilter(testdataFiltered,ntnfilter);

		



			Instances finaltraindata=traindata;
			Instances finaltestdata=testdata;
		

			

	

			if(modeltype==1)
			{
			
				finaltraindata.setClassIndex(3);
				finaltestdata.setClassIndex(3);

			}
			else
			{
				finaltraindata.setClassIndex(traindata.numAttributes()-1);
				finaltestdata.setClassIndex(traindata.numAttributes()-1);
			}
					//		RankAttributes.rankByInfoGain(traindata);
			//				RankAttributes.rankByGainRatio(traindata);

			System.out.println("Rank attributes by Chi Square:");
			
//			RankAttributes.rankByChiSquare(finaltraindata);

			//				RankAttributes.rankByInfoGain(testdata);
			//				RankAttributes.rankByGainRatio(testdata);

//			RankAttributes.rankByChiSquare(finaltestdata);



			//			 	// create new test set
			//			    FilteredClassifier fc=new FilteredClassifier();
			//			    
			//			    fc.buildClassifier(traindata);
			//			    for (int i = 0; i < testdata.numInstances(); i++) {
			//					 System.out.println(testdata.instance(i).value(3));
			//					   double pred = fc.classifyInstance(testdata.instance(i));
			//					   System.out.print("ID: " + testdata.instance(i).value(testdata.numAttributes()-2));
			//					   System.out.print(", FCactual: " + testdata.classAttribute().value((int) testdata.instance(i).classValue()));
			//					   System.out.println(", FCpredicted: " + testdata.classAttribute().value((int) pred));
			//					 }

			
			
			//  this.CountForAttribute(finaltestdata);
			System.out.println(">>>>>>>>>>>>>");
			int type=algorithmType;
			Evaluation eval=null;
			String classifiername="";
			
			if(algorithmType==51)
			{
				for(int i=1;i<=50;i++)
				{
					
					
					Classifier cls=this.getClassifier(i);
					if(cls==null)
						continue;
					
					cls.buildClassifier(finaltraindata);
					eval= new Evaluation(finaltraindata);
					eval.evaluateModel(cls, finaltestdata);	
					
					System.out.println(eval.toSummaryString("\nResults\n======\n", false));
					System.out.println("TPR:"+eval.truePositiveRate(0)+
					"FPR:"+eval.falsePositiveRate(0)+
					"F-measure:"+eval.fMeasure(0));
					double[][] tempdou=eval.confusionMatrix();
					for(int j=0;j<tempdou.length;j++)
					{
						System.out.println(tempdou[j][0]+" "+tempdou[j][1]);

					}
				}
			}
			else
			{
				Classifier cls=this.getClassifier(algorithmType);
				
				
				cls.buildClassifier(finaltraindata);
				eval= new Evaluation(finaltraindata);
				eval.evaluateModel(cls, finaltestdata);	
				
				System.out.println(eval.toSummaryString("\nResults\n======\n", false));
				System.out.println("TPR:"+eval.truePositiveRate(0)+
				"FPR:"+eval.falsePositiveRate(0)+
				"F-measure:"+eval.fMeasure(0));
				double[][] tempdou=eval.confusionMatrix();
				for(int j=0;j<tempdou.length;j++)
				{
					System.out.println(tempdou[j][0]+" "+tempdou[j][1]);

				}
				
				
				Instances predictedData = null;
				  // add predictions
			      AddClassification predictefilter = new AddClassification();
			      predictefilter.setClassifier(cls);
			      predictefilter.setOutputClassification(true);
			      predictefilter.setOutputDistribution(true);
			      predictefilter.setOutputErrorFlag(true);
			      predictefilter.setInputFormat(finaltraindata);
			      Filter.useFilter(finaltraindata, predictefilter);  // trains the classifier
			      Instances pred = Filter.useFilter(finaltestdata, predictefilter);  // perform predictions on test set
			    
			      System.out.println("==============================");
			      
			      int spamemptynum=0;
			      int hamemptynum=0;
			      int hamnum=0;
//			      int spamnum=0;
			      String filename="webTxtL.txt";
			      try {
						File file=new File(filename);
						if(!file.exists())
						{
							file.createNewFile();
						}
						else
						{
							file.delete();
							file.createNewFile();
						}
						FileWriter frs=new FileWriter(filename);
						BufferedWriter br=new BufferedWriter(frs);
						String line=null;
						 
						
						 
						for (int j = 0; j < pred.numInstances(); j++)
					      {
					    	  
					    	  double value1=pred.instance(j).value(pred.numAttributes()-1);
					    	  double value2=pred.instance(j).value(0);
					    	  Integer integer=keyAL.get(j);
					    	  String id=statusidAL.get(j);
					    	  if(value2>0.0)
					    	  {
					    		    if(value1==0.0)
					    			  br.write(id );
					    		  
					    	  }else
					    	  {
					    		  if(value1>0.0)
					    			  br.write(id);
					    		
					    	  }
					    	 
								br.newLine();
								br.flush();
						
								
					      }
					
							
						br.close();
						frs.close();
						
						
					} catch (FileNotFoundException e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					} catch (IOException e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
			      
			      System.out.println("hamnum:"+hamnum);
			     
				
				
			}
			
			


			
			
		 


			// evaluate classifier and print some statistics


		


			// generate curve
			ThresholdCurve tc = new ThresholdCurve();
			int classIndex = 0;
			Instances result = tc.getCurve(eval.predictions(), classIndex);

			// plot curve
			ThresholdVisualizePanel vmc = new ThresholdVisualizePanel();
			vmc.setROCString("(Area under ROC = " + 
					Utils.doubleToString(tc.getROCArea(result), 4) + ")");
			vmc.setName(result.relationName());
			PlotData2D tempd = new PlotData2D(result);
			tempd.setPlotName(result.relationName());
			tempd.addInstanceNumberAttribute();
			// specify which points are connected
			boolean[] cp = new boolean[result.numInstances()];
			for (int n = 1; n < cp.length; n++)
				cp[n] = true;
			tempd.setConnectPoints(cp);
			// add plot
			vmc.addPlot(tempd);

			// display curve
			String plotName = vmc.getName()+"  "+classifiername; 
			final javax.swing.JFrame jf = 
				new javax.swing.JFrame("Weka Classifier Visualize: "+plotName);
			jf.setSize(500,400);
			jf.getContentPane().setLayout(new BorderLayout());
			jf.getContentPane().add(vmc, BorderLayout.CENTER);
			jf.addWindowListener(new java.awt.event.WindowAdapter() {
				public void windowClosing(java.awt.event.WindowEvent e) {
					jf.dispose();
				}
			});
			jf.setVisible(true);




		} catch (Exception e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}  // initializing the filter once with training set

	}
	
	public void tainingWebIDF(Instances data1,Instances data2,int algorithmType,int tokenizerType,int removenum,int wordsTK,int modeltype)
	{
		System.out.println("TokenizerType:"+tokenizerType+" removenum:"+removenum+" words to keep:"+wordsTK+" modeltype:"+modeltype);
		System.out.println("HamThreshold:"+this.hamThreshold+" SpamThreshold:"+this.spamThreshold);
		if(data1==null)
		{
			System.out.println("data1 is null");

		}

		if(data2==null)
		{
			System.out.println("data2 is null");

		}

		Instances train =null;  // from somewhere
		Instances test =null;    // from somewhere
		//		 Standardize filter = new Standardize();
		//		 System.out.println(data2);

		String userdir=System.getProperties().getProperty("user.dir")+System.getProperties().getProperty("file.separator");
		//set whitelist and blacklist
		String whitelistfile=userdir+"whitelist.txt";
		

		try {


			
			String stopwordsfile=userdir+"stopwords.txt";
			File stopwordfile=new File(stopwordsfile);
			
			ArrayList<Integer> keyAL=new ArrayList<Integer>();
			ArrayList<String> statusidAL=new ArrayList<String>();
			ArrayList<String> urlAL=new ArrayList<String>();
			for(int i=0;i<data2.numInstances();i++)
			{
				//String value=data2.instance(i).value(data2.numAttributes()-2);
				String key=data2.attribute(data2.numAttributes()-2).value(i);
			//	System.out.println("key:"+key);
				double value=data2.instance(i).value(data2.numAttributes()-1);
				//System.out.println("value:"+value);
				if(value>0.0)
				{
					keyAL.add(Integer.parseInt("1"));
					
				}
				else
				{
					keyAL.add(Integer.parseInt("0"));
				}
				statusidAL.add(key);
			}
			

			
			/**
			 * remove instanceid
			 */
			Remove remove=new Remove();
//			int [] attlist=new int[17];
//
//			for(int i=0;i<17;i++)
//			{
//				if(i<3)
//				attlist[i]=i;
//				else
//					attlist[i]=i+1;
//			}
			int [] attlist=new int[1];
			attlist[0]=data1.numAttributes()-2;
//			attlist[1]=1;

			remove.setAttributeIndicesArray(attlist);

			remove.setInputFormat(data1);
			Instances newdata1= Filter.useFilter(data1, remove);
			//			   
			Instances newdata2=Filter.useFilter(data2, remove);

			train=newdata1;
			test=newdata2;
			//			   train.setClassIndex(train.numAttributes()-1);
			//			   test.setClassIndex(test.numAttributes()-1);
			int wordstokeep=wordsTK;
			StringToWordVector filter = new StringToWordVector(wordsTK);
			//			 	filter.setOutputWordCounts(true);
			filter.setMinTermFreq(10);
			filter.setIDFTransform(true);
			filter.setLowerCaseTokens(true);
			SnowballStemmer snowball=new SnowballStemmer();
			filter.setStemmer(snowball);
			Tokenizer wt = null;
			switch(tokenizerType)
			{
			case 1:
				wt=new WordTokenizer();
				break;
			case 2:
				wt=new AlphabeticTokenizer();
				break;
			case 3:
				wt=new NGramTokenizer();
				break;

			}

			filter.setTokenizer(wt);
			//			 	filter.setTFTransform(true);
			filter.setInputFormat(train);
			filter.setStopwords(stopwordfile);
			Instances traindataFiltered = Filter.useFilter(train, filter);

			Instances testdataFiltered = Filter.useFilter(test, filter);

			//				traindataFiltered.setClassIndex(3);
			//				testdataFiltered.setClassIndex(3);
			//				for (int i = 0; i < testdataFiltered.numInstances(); i++) {
			//					 System.out.println(testdataFiltered.instance(i)+" "+i);
			//				}
			//				System.out.println("############");



			//				Instances traindata=traindataFiltered;
			//				Instances testdata=testdataFiltered;


//			NumericToNominal ntnfilter=new NumericToNominal();
//			ntnfilter.setInputFormat(traindataFiltered);

//			Instances traindata=Filter.useFilter(traindataFiltered,ntnfilter);
//			
			
//			Instances testdata=Filter.useFilter(testdataFiltered,ntnfilter);
			Instances traindata=traindataFiltered;
			Instances testdata=testdataFiltered;
		




			System.out.println("the number of attributes in training data:"+traindata.numAttributes());
			System.out.println("the number of attributes in testing data:"+testdata.numAttributes());


			Instances finaltraindata= traindata;
			//			   
			Instances finaltestdata=testdata;

			

			
			System.out.println("the number of attributes in final train data:"+finaltraindata.numAttributes());
			System.out.println("the number of instances in final train data:"+finaltraindata.numInstances());
			System.out.println("the number of instances in final test data:"+finaltestdata.numInstances());
			HashSet<Integer> D=this.CountForAttribute(finaltraindata,finaltestdata);
				    

			if(modeltype==1)
			{
			
				finaltraindata.setClassIndex(3);
				finaltestdata.setClassIndex(3);

			}
			else
			{
				finaltraindata.setClassIndex(0);
				finaltestdata.setClassIndex(0);
			}
							RankAttributes.rankByInfoGain(traindata);
			//				RankAttributes.rankByGainRatio(traindata);

			System.out.println("Rank attributes by Chi Square:");
			
//			RankAttributes.rankByChiSquare(finaltraindata);

			//				RankAttributes.rankByInfoGain(testdata);
			//				RankAttributes.rankByGainRatio(testdata);

//			RankAttributes.rankByChiSquare(finaltestdata);



			//			 	// create new test set
			//			    FilteredClassifier fc=new FilteredClassifier();
			//			    
			//			    fc.buildClassifier(traindata);
			//			    for (int i = 0; i < testdata.numInstances(); i++) {
			//					 System.out.println(testdata.instance(i).value(3));
			//					   double pred = fc.classifyInstance(testdata.instance(i));
			//					   System.out.print("ID: " + testdata.instance(i).value(testdata.numAttributes()-2));
			//					   System.out.print(", FCactual: " + testdata.classAttribute().value((int) testdata.instance(i).classValue()));
			//					   System.out.println(", FCpredicted: " + testdata.classAttribute().value((int) pred));
			//					 }

			
			
			//  this.CountForAttribute(finaltestdata);
			System.out.println(">>>>>>>>>>>>>");
			int type=algorithmType;
			Evaluation eval=null;
			String classifiername="";
			
			if(algorithmType==51)
			{
				for(int i=1;i<=50;i++)
				{
					
					
					Classifier cls=this.getClassifier(i);
					if(cls==null)
						continue;
					
					cls.buildClassifier(finaltraindata);
					eval= new Evaluation(finaltraindata);
					eval.evaluateModel(cls, finaltestdata);	
					
					System.out.println(eval.toSummaryString("\nResults\n======\n", false));
					System.out.println("TPR:"+eval.truePositiveRate(0)+
					"FPR:"+eval.falsePositiveRate(0)+
					"F-measure:"+eval.fMeasure(0));
					double[][] tempdou=eval.confusionMatrix();
					for(int j=0;j<tempdou.length;j++)
					{
						System.out.println(tempdou[j][0]+" "+tempdou[j][1]);

					}
				}
			}
			else
			{
				Classifier cls=this.getClassifier(algorithmType);
				
				
				cls.buildClassifier(finaltraindata);
				eval= new Evaluation(finaltraindata);
				eval.evaluateModel(cls, finaltestdata);	
				
				System.out.println(eval.toSummaryString("\nResults\n======\n", false));
				System.out.println("TPR:"+eval.truePositiveRate(0)+
				"FPR:"+eval.falsePositiveRate(0)+
				"F-measure:"+eval.fMeasure(0));
				double[][] tempdou=eval.confusionMatrix();
				for(int j=0;j<tempdou.length;j++)
				{
					System.out.println(tempdou[j][0]+" "+tempdou[j][1]);

				}
				
//				
//				Instances predictedData = null;
//				  // add predictions
//			      AddClassification predictefilter = new AddClassification();
//			      predictefilter.setClassifier(cls);
//			      predictefilter.setOutputClassification(true);
//			      predictefilter.setOutputDistribution(true);
//			      predictefilter.setOutputErrorFlag(true);
//			      predictefilter.setInputFormat(finaltraindata);
//			      Filter.useFilter(finaltraindata, predictefilter);  // trains the classifier
//			      Instances pred = Filter.useFilter(finaltestdata, predictefilter);  // perform predictions on test set
//			    
//			      System.out.println("==============================");
//			      
//			      int spamemptynum=0;
//			      int hamemptynum=0;
//			      int hamnum=0;
////			      int spamnum=0;
//			      for (int j = 0; j < pred.numInstances(); j++)
//			      {
//			    	  
//			    	  double value1=pred.instance(j).value(pred.numAttributes()-1);
//			    	  Integer integer=keyAL.get(j);
//			    	//  System.out.println("value1:"+value1);
//			    	 // System.out.println("integer:"+integer.intValue());
//			    	  //instance j is spam
//			    	  if(value1>0.0)
//			    	  {
//			    		//  System.out.println("value1:"+value1);
//			    		  
//			    		  
//			    		  
//			    		  if(integer.intValue()==0)
//			    		  {
////			    			  if(delHS.contains(j))
////				    			  hamnum++;
//			    			  //predict spam, actually it is ham
//			    			  if(this.isContentEmpty(pred.instance(j)))
//			    				  continue;
//			    			  System.out.println("spam:"+statusidAL.get(j));
//			    			  System.out.println("URL:\t"+urlAL.get(j));
//			    		  }
//			    	  }
//			    	  else
//			    	  {
//			    		 
//			    		  if(integer.intValue()==1)
//			    		  {
//			    			  //predict ham, actually it is spam
//			    			  if(this.isContentEmpty(pred.instance(j)))
//			    				  continue;
////			    			  System.out.println("ham:"+statusidAL.get(j));
//			    		  }
//			    	  }
//						
//			      }
//			      System.out.println("hamnum:"+hamnum);
//			     
				
				
			}
		
		} catch (Exception e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}  // initializing the filter once with training set

	}



	/**
	 * save instances to arff files
	 * @param in
	 * @param filedirectory
	 * @param filename
	 */
	public void save2arff(Instances in, String filedirectory, String filename)
	{
		Instances dataSet =in;
		ArffSaver saver = new ArffSaver();
		saver.setInstances(dataSet);
		try {
			saver.setFile(new File(filedirectory+System.getProperties().getProperty("file.separator")+filename));
			//	saver.setDestination(new File("./data/test.arff"));   // **not** necessary in 3.5.4 and later
			saver.writeBatch();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}
	public static void main(String [] args)
	{
		String instancesdirectory=System.getProperties().getProperty("user.dir")+System.getProperties().getProperty("file.separator")+"instances"+System.getProperties().getProperty("file.separator");

		String file1=instancesdirectory+"trec"+System.getProperties().getProperty("file.separator")+"trecmessage_2005ham.arff";
		String file2=instancesdirectory+"trec"+System.getProperties().getProperty("file.separator")+"trecmessage_2005spam.arff";

		String file0=instancesdirectory+"myspaceperson_spam.arff";
		CopyOfTrainTestFilter ttf=new CopyOfTrainTestFilter();

		String testfile=instancesdirectory+"newtest"+System.getProperties().getProperty("file.separator")+"twitterpersonSpam3000_Ham3000R.arff";
		//		String trainfile=instancesdirectory+"myspace"+System.getProperties().getProperty("file.separator")+"myspaceperson_random_spam.arff";
		String trainfile=instancesdirectory+"newtest"+System.getProperties().getProperty("file.separator")+"myspaceperson_Top8_spam3000R.arff";

		int type=Integer.parseInt(args[0]);
		trainfile=args[1];
		testfile=args[2];
		int algorithmType=Integer.parseInt(args[3]);
		int tokenizerType=Integer.parseInt(args[4]);
		int removenum=Integer.parseInt(args[5]);
		int wordsTK=Integer.parseInt(args[6]);

		double hamT=Double.parseDouble(args[7]);
		double spamT=Double.parseDouble(args[8]);

		//fixed set
		

//		int algorithmType=24;
//		int tokenizerType=2;
//		int removenum=5;
//		int wordsTK=10000;
//
//		double hamT=0.5;
//		double spamT=0.5;
		
		
		//set up the threshold for ham and spam attributes;
		ttf.setHamThreshold(hamT);
		ttf.setSpamThreshold(spamT);
		//		Instances data1=ttf.combination(file1, file2);

		Instances data1=null;
		try {

			data1 = new Instances(
					new BufferedReader(
							new FileReader(trainfile)));
		} catch (FileNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}


		Instances data2=null;
		try {

			data2 = new Instances(
					new BufferedReader(
							new FileReader(testfile)));
		} catch (FileNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		if(type==1)
		{
			for(int i=1;i<=10;i++)
			{
			ttf.tainingWeb(data1, data2,algorithmType,tokenizerType,removenum,wordsTK*i,0);
			}
		}
		else if(type==2)
		{ 
			ttf.tainingMsg(data1, data2,algorithmType,tokenizerType,removenum,wordsTK,0);
		}
//		
//		ttf.tainingMsg(data1, data2,algorithmType,tokenizerType,removenum,wordsTK,0);
//		ttf.tainingWebIDF(data1, data2,algorithmType,tokenizerType,removenum,wordsTK,0);
//				for(int i=0;i<5;i++)
//				{
//					ttf.setHamThreshold(hamT+0.05*i);
//					for(int j=0;j<5;j++)
//					{
//						ttf.setSpamThreshold(spamT+0.05*j);
//						ttf.taining(data1, data2,tokenizerType,removenum,wordsTK,0);
//					}
//					
//				}


	}
}
