package crp.logic.clustering;

import java.util.HashSet;

import cc.mallet.types.FeatureVector;
import cc.mallet.types.InstanceList;

import crp.object.SymmMatrix;
import edu.stanford.nlp.math.ArrayMath;

/**
 * Key Phrase Cluster
 * 
 * @author ouhang
 *
 */
public class NEOClusterKP {
	
	//public double[][] freqKeyArticleM = null; //articleNum * (kwNum + 1)
	public FeatureVector[] feqKeyArticleM = null;
	public SymmMatrix corrKPM = null;// kwNum*(kwNum+1)/2
	public SymmMatrix distMa = null;//  kwNum * kwNum
	public SymmMatrix binaryMa = null;//kwNum * kwNum
	public double[][] KPi = null; //    kwNum * articleNum: feature matrix of all key phrase
	public double[] cvs = null;
	public double[] mdvs = null;
	public double[] crf = null;
	public int[] centers;
	private NEOClusterFactory neoClusterFactory = null;
	private InstanceList instances = null;

	public NEOClusterKP(NEOClusterFactory neoClusterFactory){
		this.neoClusterFactory = neoClusterFactory;
	}
	
    public void cluster(int centerNum) throws Exception{
    	//preprocessing();
    	//clustering
    	centers = neoClusterFactory.initialCenter(centerNum, distMa, binaryMa, cvs, mdvs, crf, w1, w2);
    	double[] mdvstemp = new double[centers.length];
		double[] cvstemp = new double[centers.length];
		for(int i = 0 ; i < centers.length ; i++) {
			mdvstemp[i] = mdvs[centers[i]];
			cvstemp[i] = mdvs[centers[i]];
		}
		
		//calculate the objective value
		double objective = neoClusterFactory.computeObj(mdvstemp, cvstemp, w1, w2);
    	
    	for(int i = 0 ; i < itr ; i++)
    		objective = neoClusterFactory.clusteringStepreplaceCenter(centers, distMa, cvs, mdvs, crf, w1, w2, objective);
    }
    //@Deprecated
    //private void computeFrequencyKeyA(String model) throws Exception{
    	//freqKeyArticleM = new double[Article.getArticleList().size()][];
    	//KPExtraction kpe = new KPExtraction();
		//kpe.automaticTagging(train, patents, 10);
		//kpe.loadModel(model);
		
    	//for(int i = 0 ; i < Article.getArticleList().size() ; i++)
    		//freqKeyArticleM[i] = kpe.kpFreqCounting(Article.getArticleList().get(i));
    		//freqKeyArticleM[i] = Utility.getWordFreqOfOneArticle2(Article.getArticleList().get(i), Utility.trivialWords);
    //}
    
	private void computeNTF_IDF_CorrM() throws Exception{
		int kwNum;
		int articleNum = this.instances.size();
		int totalArtiNum = 0;
		double[] tempSum = new double[articleNum];
		HashSet<Integer> temp = new HashSet<Integer>();
		
		for(int i = 0 ; i < articleNum ; i++){
			tempSum[i] = 0.0;
			FeatureVector fv = (FeatureVector)instances.get(i).getData();
			for(int j = 0 ; j < fv.getIndices().length ; j++){
				tempSum[i] += fv.value(fv.getIndices()[j]);
				temp.add(fv.getIndices()[j]);
			}
			totalArtiNum += tempSum[i];
		}
		
		kwNum = temp.size();
		temp = null;
		corrKPM = new SymmMatrix(kwNum); //upper triangular matrix
		KPi = new double[kwNum][articleNum];
		double[] awk = new double[articleNum];

		
		//for(int i = 0 ; i < articleNum ; i++)
			//totalArtiNum += freqKeyArticleM[i][kwNum];
		
		
		//for(int i = 0 ; i < articleNum ; i++)
			//awk[i] = totalArtiNum / (freqKeyArticleM[i][kwNum] * articleNum) ;
		for(int i = 0 ; i < articleNum ; i++){
			//double temp = instances.get(i)
			awk[i] = totalArtiNum / (tempSum[i]* articleNum) ;
		}
		
		/*int[] idfis = new int[kwNum];
		for(int i = 0 ; i < kwNum ; i++){
			int dfi = 0; // number of articles contains kp i
			for(int j = 0 ; j < articleNum ; j++)
				if(freqKeyArticleM[j][i] > 0) dfi++;
			idfis[i] = dfi;
		}//*/
		
		for(int i = 0 ; i < kwNum ; i++){
			int dfi = 0; // number of articles contains kp i
			for(int j = 0 ; j < articleNum ; j++)
				if(((FeatureVector)instances.get(i).getData()).contains(i))dfi++;
				//if(freqKeyArticleM[j][i] > 0) dfi++;
			
			
			double idfi = Math.log((double)articleNum/(double)dfi)/Math.log(2);
			for(int k = 0 ; k < articleNum ; k++){
				double temp1 = 0.0;
				
				if(((FeatureVector)instances.get(k).getData()).contains(k)){
					temp1 = ((FeatureVector)instances.get(k).getData()).value(k);
					KPi[i][k] = temp1 * awk[k] * idfi;
				}else{
					KPi[i][k] = 0.0;	
				}
				
				//KPi[i][k] = freqKeyArticleM[k][i] * awk[k] * idfi;
				//
			}
		}
		
		distMa = new SymmMatrix(KPi.length);
    	for(int i = 0 ; i < distMa.dim ; i++){
    		distMa.setValue(0, i, i);
    		for(int j = i+1 ; j <distMa.dim ; j++){
    			double distance = ArrayMath.norm(ArrayMath.pairwiseSubtract(KPi[i], KPi[j]));
    			distMa.setValue(distance, i, j);
    		}
    	}
		
		for(int i = 0 ; i < kwNum ; i++){
			corrKPM.setValue(0, i, i);
			for(int j = i+1 ; j < kwNum ; j++){
				double temp1 = ArrayMath.norm(KPi[i]);
				double temp2 = ArrayMath.norm(KPi[j]); 
				if(temp1 == temp2 && temp2 == 0){ corrKPM.setValue(1, i, j);
				//else if(temp1 == 0 || temp2 == 0){
					//corrKPM.setValue(NEOClusterP.computeSimilarity(distMa.getValue(i, j),dif), i, j);
				}else{
					double value = ArrayMath.sum(ArrayMath.pairwiseMultiply(KPi[i], KPi[j])) /(temp1*temp2);
					corrKPM.setValue(value, i, j);
				}
			}
		}
	}

	public void preprocessing(InstanceList instances) throws Exception{
		//preprocessing
		//freqKeyArticleM = new double[instances.size()][];
		//
		this.instances = instances;
		
		
    	computeNTF_IDF_CorrM();
    	Ts = neoClusterFactory.calculateTs(corrKPM);
    	//corrKPM.printSelf();
    	binaryMa = neoClusterFactory.computeBinaryM(corrKPM, Ts);
    	//binaryMa.printSelf();
    	cvs = new double[distMa.dim];
    	mdvs = new double[distMa.dim];
    	crf = new double[distMa.dim];
	}
	
	public double Ts = 0;
	public double w1 = 0.5;
	public double w2 = 0.5;
	public int itr = 500;
}
