package inference;



import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.PrintWriter;
import java.util.List;
import java.util.Map;

import com.aliasi.cluster.LatentDirichletAllocation;
import com.aliasi.symbol.MapSymbolTable;
import com.aliasi.symbol.SymbolTable;
import com.aliasi.tokenizer.IndoEuropeanTokenizerFactory;
import com.aliasi.tokenizer.TokenizerFactory;

import data.TopicSim;

import static com.aliasi.cluster.LatentDirichletAllocation.tokenizeDocuments;
import parser.TwitterParser;
import utils.kml.Kml;

public class LDALingPipe {
	
	/*
	 * LINGPIPE UTILS
	 */
	public static TokenizerFactory tokFact;
	public static int minCount;
	public static SymbolTable symTab;

	
	public void run(String path, String out, String outcsv, int cellsize, long seed, int numsamples, short numtopics, TwitterParser parser) throws Exception{
		
		//set params
		double alpha = (double)1/(double)numtopics;
		double beta = (double)1/(double)parser.int2filename.size(); // number of documents
		setParams();
		
		//create doc-words mapping
		int [][] docwords = createDocWords(path, parser.int2filename);
		
		PrintWriter kml = new PrintWriter(out);
		
		runLDA(docwords, seed, numtopics, alpha, beta, numsamples, parser, kml, cellsize, outcsv);		
	}
	
	public void setParams(){
		tokFact = new IndoEuropeanTokenizerFactory();
		minCount = 1;
		symTab = new MapSymbolTable();
	}
	
	public int [][] createDocWords(String path, Map<Integer,String> int2filename) throws Exception{
		//each file contains a day
		//the name of the file is in the form: Mon,dd-MM-yyy.txt
		File fdir = new File(path); 
		File[] daysfile = fdir.listFiles();
		
		CharSequence[] texts = new String[daysfile.length];
		
		int filecounter = 0;
		for(int i=0;i<daysfile.length;i++)
			//to avoid folders created by sourceforge
			if(!daysfile[i].getName().contains("svn")){
				String daypath = daysfile[i].getAbsolutePath();
				//each file contains a single line with all the words
				BufferedReader inFile = new BufferedReader(new FileReader(new File(daypath)));
				String words = inFile.readLine();
				inFile.close();
				
				texts[filecounter] = words;
				filecounter++;
			}
		
		int[][] docWords = tokenizeDocuments(texts, tokFact, symTab, minCount);
		printSymTab(docWords, int2filename);
		
		return docWords;		
	}
	
	public void printSymTab(int[][] dw, Map<Integer,String> int2filename){
		
		System.out.println("symbTab=" + symTab);
			
	    for (int n = 0; n < dw.length; ++n) {
	    	System.out.print(int2filename.get(n)+" ");
	    	System.out.printf("docWords[%2d] = { ",n);
	        for (int i = 0; i < dw[n].length; ++i) {
	        	if (i > 0) System.out.print(", ");
	            System.out.print(symTab.idToSymbol(dw[n][i]));
	        }
	        System.out.println(" }");
	    }
	    System.out.println();		
		
	}
	
	public void runLDA(int[][] docwords, long seed, short numtopics, double alpha, double beta, int numsamples, TwitterParser parser, PrintWriter out, int cellsize, String outcsv) throws Exception{
		//other classes
		Kml kmlutils = new Kml();
		PrintWriter csv  = new PrintWriter(outcsv);
		csv.println("topic;day;prob;hour;lat;lon");
		
		//run two different lda model -->return a list of topics ordered by their similarity in 2 different runs
		LdaRunnable run1 = new LdaRunnable(docwords, seed, numtopics, alpha, beta, numsamples);
		LdaRunnable run2 = new LdaRunnable(docwords, seed+1L, numtopics, alpha, beta, numsamples);
		
		Thread th1 = new Thread(run1);
		Thread th2 = new Thread(run2);
		th1.start();
		th2.start();
		th1.join();
		th2.join();
		
		LatentDirichletAllocation lda1 = run1.mLda;
		LatentDirichletAllocation lda2 = run2.mLda;
		LatentDirichletAllocation.GibbsSample sample = run1.sample;

		
    	LingPipeLdaHandler handler = new LingPipeLdaHandler(symTab, parser, cellsize, out, csv);
    	List<TopicSim> listSim = handler.similarity(lda1, lda2);
    	
		kmlutils.printHeader(out, "topic");
		
		for(int i=0;i<listSim.size();i++)
			handler.report(sample, listSim.get(i).mI);
		
		kmlutils.printFooter(out);
		out.close();
		csv.close();
	}
	

	

}
