
package textmining.gpanalysis;

import Jama.Matrix;
import data.KeywordVector;
import data.Post;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.collections.Bag;
import org.apache.commons.collections.bag.HashBag;

/**
 *
 * @author ibrahimsabek
 */
public class TFIDFKeywordVectorGenerator extends KeywordVectorGenerator {
    
    private Post[] prevPosts; //array of posts
    private Map<Post,Bag> postWordFreq; //hold each post with its keywords
    private Matrix TFMatrix; //TF matrix for all posts(previous and current)
    private SortedSet<String> keywordSet;  //all keywordSets

    public TFIDFKeywordVectorGenerator(Post post) throws IOException{
        super(post);
    }

    @Override
    public Matrix generateKeywordsContainer() {

        keywordSet = createWordSet(prevPosts);

        //for typical TF-IDF generate TF(term frequency)
        int numPosts = prevPosts.length + 1;
        int numkeywords = keywordSet.size();
        double[][] data = new double[numkeywords][numPosts];

          //for previous posts
          int k = 0 ;
          for(String keyword : keywordSet){
              for (int l = 0; l < numPosts-1; l++) {
                Bag postBag1 = postWordFreq.get(prevPosts[l]);
                data[k][l] = postBag1.getCount(keyword);
              }
              k++;
          }
       
          //for current post
          System.out.println("keywords unique set");
          int j = 0 ;
          Bag postBag = postWordFreq.get(post);
          for(String key : keywordSet){
              System.out.println(key);
              data[j][numPosts-1] = postBag.getCount(key);
              j++;
          }

          TFMatrix = new Matrix(data);

          return TFMatrix;
    }

    //create TFIDF matrix
    public Matrix createTFIDFMatrix(Matrix TFMatrix){
        return new IDFHandler().createIDFMatrix(TFMatrix);
    }

    //return the modified keyword vectors
    public KeywordVector[] generateKeywordsVectors(){

        Matrix TFIDFMatrix = createTFIDFMatrix(generateKeywordsContainer());

        int numVectors = prevPosts.length + 1;  //all keyword vectors (previous and current)
        KeywordVector[] vectors = new KeywordVector[numVectors];

        //initialize vecotrs
        for(int k = 0; k < numVectors - 1; k++){
            vectors[k] = new KeywordVector();
            vectors[k].setVectorID(prevPosts[k].getpID());
        }
        vectors[numVectors - 1] = new KeywordVector();
        vectors[numVectors - 1].setVectorID(post.getpID());

        int i = 0;
        for(String key : keywordSet){
            for(int j = 0; j < TFIDFMatrix.getColumnDimension(); j++){
                if(TFIDFMatrix.get(i, j) > 0){
                    vectors[j].getData().put(key, TFIDFMatrix.get(i, j));
                }
            }
            i++;
        }

        return vectors;
    }

    //create wordset for the whole previous and current post keywords
    @Override
    protected SortedSet<String> createWordSet(Post[] prevPosts) {
        postWordFreq = new HashMap<Post, Bag>();
        SortedSet<String> keywordSet = new TreeSet<String>();

        //previous posts
        for(Post currPost : prevPosts){
            try {
                KeywordVectorGenerator currVector = new KeywordVectorGenerator(currPost);
                Bag postBag = new HashBag();

                keywordSet.addAll(currVector.getPITokenBag().uniqueSet());
                Iterator iPITokenBag = currVector.getPITokenBag().iterator();
                while(iPITokenBag.hasNext()){
                    postBag.add((String)iPITokenBag.next());
                }

                if (currPost.getParaReaders().length > 2){
                    keywordSet.addAll(currVector.getRemPITokenBag().uniqueSet());
                    Iterator iRemPITokenBag = currVector.getPITokenBag().iterator();
                    while(iRemPITokenBag.hasNext()){
                        postBag.add((String)iRemPITokenBag.next());
                    }
                }

                postWordFreq.put(currPost, postBag);
            } catch (IOException ex) {
                Logger.getLogger(TFIDFKeywordVectorGenerator.class.getName()).log(Level.SEVERE, null, ex);
            }
        }

        //current post
        Bag postBag = new HashBag();

        keywordSet.addAll(this.PITokenBag.uniqueSet());
        Iterator iPITokenBag = this.PITokenBag.iterator();
        while(iPITokenBag.hasNext()){
          postBag.add((String)iPITokenBag.next());
        }

        if (this.post.getParaReaders().length > 2) {
            keywordSet.addAll(this.remPITokenBag.uniqueSet());
            Iterator iRemPITokenBag = this.remPITokenBag.iterator();
            while(iRemPITokenBag.hasNext()){
               postBag.add((String)iRemPITokenBag.next());
            }
        }
        postWordFreq.put(post, postBag);

        return keywordSet;

    }
    
    @Override
    public void setPosts(Post[] posts) {
        this.prevPosts = posts;
    }

}
