package generator.denpendency.detection;
import generator.structure.Pair;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;

public class LDA implements Serializable{
	
	Runtime rt = Runtime.getRuntime();
	
	private int fileNum;
	private ArrayList<Map<String, Integer>> currentDistributionList;
	private ArrayList<Integer> rowsArray;
	private int generateRows;
	
	int K;
	int V;
	int M;
	
	int[][] nw;
	int[][] nd;
	
	int[] nwsum;
	int[] ndsum;
	
	int[][] z;
	
	double[][] theta;
	double[][] phi;
	//double[] arguments;
	
	double[][] thetasum;
	double[][] phisum;

	int generateDay;
	
	//keys
	private String[] keysAll;
	HashMap<String, Integer> string_to_id = new HashMap<String, Integer>();
	
	//arguments of gibbs
	private int ITERATIONS;
	private int BURN_IN;
	private int THIN_INTERVAL;
	private int SAMPLE_LAG;
	
	private double numstats;
	
	private double beta = 0.5;
	private double alpha = 2;
	
	private int[] generateDistribution;
	
	private String phiFile;
	private String thetaFile;
	private String lpFile;
	private String lpTemp;
	private String keyFile;
	
	private int flag;
	private int dispcol;
	
	private ArrayList<ArrayList<Double>> weightList;
	
	private int currentGenerateRows;
	
	private int lpDimension;
	
	private int normalize = 200000;
	
	public LDA(int fileN, int generatedFileNum, 
			ArrayList<Map<String, Integer>> currentDistributionList, 
			ArrayList<Integer> rowsArray, int generateRows,
			int topicNumbers, int lpDimension) {
		
		this.fileNum = fileN;
		this.generateDay = fileNum - 1;
		
		this.currentDistributionList = currentDistributionList;
		this.rowsArray = rowsArray;
		
		this.generateRows = generateRows;
		
		this.lpDimension = lpDimension;
		
		System.out.println();
		
		K = topicNumbers;  
		configure(4, 0, 1, 1);
		
		//V, keysAll
		setVariable(currentDistributionList, rowsArray);
		
		M = fileN + generatedFileNum;
		
		nw = new int[V][K];  
        System.out.println("V: " + V + " K: " + K + " M: " + M);
        
        nd = new int[M][K];  
        nwsum = new int[K];  
        ndsum = new int[M];  
        z = new int[M][];
        
        thetasum = new double[M][K];
        phisum = new double[K][V];
        
        theta = new double[M][K];
        phi = new double[K][V];
		
	}
	
	private void configure(int iterations, int burnIn, int thinInterval,  
		    int sampleLag) {  
			
		    ITERATIONS = iterations;  
		    BURN_IN = burnIn;  
		    THIN_INTERVAL = thinInterval;  
		    SAMPLE_LAG = sampleLag;  
	}
	
	private void setVariable(
			ArrayList<Map<String, Integer>> currentDistributionList2,
			ArrayList<Integer> rowsArray) {
	
		Set<String> keys = new HashSet<String>();
		
		for(int i = 0; i < currentDistributionList.size(); i++) {
    		Map<String, Integer> hm = currentDistributionList.get(i);
    		Set<String> currentKeys = hm.keySet();
    		keys.addAll(currentKeys);
    	}
		
		Set<String> allKeys = keys;
    	keysAll = allKeys.toArray(new String[allKeys.size()]);
    	int distinct = keysAll.length;
    	
    	V = distinct;
    	
    	//string---id
    	for(int i = 0; i < distinct; i++) {
    		string_to_id.put(keysAll[i], i);
    	}
    	
    	System.out.println("distinct:" + V + " setVariable (store the string_to_id): Used Memory = " + (rt.totalMemory() - rt.freeMemory()));
	}
	
	/**  
     * Initialisation: Must start with an assignment of observations to topics ?  
     * Many alternatives are possible, I chose to perform random assignments  
     * with equal probabilities  
     *   
     * @param K  
     *            number of topics  
     * @param	
     * 			  category: initial , inference
     * @return z assignment of topics to words  
     */ 
	public void initialState() {
		 
        if(flag == 0) {
        	for (int m = 0; m < fileNum; m++) {
            	
                int N = rowsArray.get(m);  
                
                z[m] = new int[N];  
                
                Map<String, Integer> currentDistribution = currentDistributionList.get(m);
                
                Set<String> allKeysCurrent = currentDistribution.keySet();
                
                int n = 0;
                for(Iterator<String> ite = allKeysCurrent.iterator(); ite.hasNext();) {
                	
                	String currentKey = ite.next();
                	int frequency = currentDistribution.get(currentKey);
        			int currentID = string_to_id.get(currentKey);
        			
        			for(int i = 0; i < frequency; i++) {
        				
        				int topic = (int) (Math.random() * K);
        				z[m][n] = topic; 
        				
        				// number of instances of word i assigned to topic j  
                        nw[currentID][topic]++;  
                        // number of words in document i assigned to topic j.  
                        nd[m][topic]++;  
                        // total number of words assigned to topic j.  
                        nwsum[topic]++; 
        				
                        n++;
        			}
                	
                }
          
                // total number of words in document i
                ndsum[m] = N;  
            }
          
        	System.out.println("initial ok!");
        }
        	
        else {
        	
        	int N = currentGenerateRows; 
                
        	int distinct = generateDistribution.length;
        	
        	int zID = generateDay;
            z[zID] = new int[N];  
            
            int n = 0;
            for(int i = 0; i < distinct; i++) {
            	
            	int frequency = generateDistribution[i];
            	int currentID = i;
            	
            	for(int j = 0; j < frequency; j++) {
            		
            		int topic = (int) (Math.random() * K); 
            		z[zID][n] = topic;
            		// number of instances of word i assigned to topic j  
                    nw[currentID][topic]++;  
                    // number of words in document i assigned to topic j.  
                    nd[zID][topic]++;  
                    // total number of words assigned to topic j.  
                    nwsum[topic]++; 
                    
                    n++;
            	}
            	
            }
        
            // total number of words in document i  
            ndsum[zID] = N;      
        	
        	System.out.println("inference--initial ok!");
        }
        
    }
	
	/**  
     * Main method: Select initial state ? Repeat a large number of times: 1.  
     * Select an element 2. Update conditional on other elements. If  
     * appropriate, output summary for each run.  
     *   
     * @param K  
     *            number of topics  
     * @param alpha  
     *            symmetric prior parameter on document--topic associations  
     * @param beta  
     *            symmetric prior parameter on topic--term associations  
     */ 
	public void gibbs() {  
 
        numstats = 0;
 
        for (int i = 0; i < ITERATIONS; i++) {
 
        	long start = System.currentTimeMillis();
        	
            // for all z_i  
        	if(flag == 0) {
        		 for (int m = 0; m < fileNum; m++) {
                     for (int n = 0; n < z[m].length; n++) {
      
                         int topic = sampleFullConditional(m, n);  
                         z[m][n] = topic;  
                     }
                 }
        	}
        	else {
//        		for (int m = fileNum; m < M; m++) {
                    for (int n = 0; n < z[generateDay].length; n++) {
     
                        int topic = sampleFullConditional(generateDay, n);  
                        z[generateDay][n] = topic;  
                    }
//                }
        	}
           
            System.out.println("iteration: " + i);
            
            if ((i < BURN_IN) && (i % THIN_INTERVAL == 0)) {
                dispcol++;  
            }  
            
            // display progress  
            if ((i > BURN_IN) && (i % THIN_INTERVAL == 0)) {  
                dispcol++;  
            }  
            
            // get statistics after burn-in  
            if ((i >= BURN_IN) && (SAMPLE_LAG > 0) && (i % SAMPLE_LAG == 0)) {  
            	System.out.println("updateparams");
                updateParams();  
                if (i % THIN_INTERVAL != 0)  
                    dispcol++;  
            }  
            if (dispcol >= 100) {  
                dispcol = 0;  
            }  
            
            long end = System.currentTimeMillis();
            System.out.println("distinct:" + V + " iteration: " + i + " gibbs(): Used Memory = " + (rt.totalMemory() - rt.freeMemory()) + " used time " + (end - start));
            
          //  if(i % 2 == 0 && i > 1) {
            	
            	theta = getTheta();
        		
        		phi = getPhi();
        		
        		long end1 = System.currentTimeMillis();
        		
        		if(i == 2) 
        		System.out.println("distinct:" + V + " get theta and phi: " + (end1 - end));
        		
        		findDependency();
        		
        		long dependTime = System.currentTimeMillis();
        		
        		if(i == 2)
        		System.out.println("distinct:" + V + " find dependency has used: " + (dependTime - end1));
        		
        		write(theta, phi, weightList);
        		long writeTime = System.currentTimeMillis();
        		
        		if(i == 2)
        			System.out.println("distinct:" + V + " write disk has taken " + (writeTime - dependTime));
        //    }
            
        }  
        
    }
	
	/**  
     * Sample a topic z_i from the full conditional distribution: p(z_i = j |  
     * z_-i, w) = (n_-i,j(w_i) + beta)/(n_-i,j(.) + W * beta) * (n_-i,j(d_i) +  
     * alpha)/(n_-i,.(d_i) + K * alpha)  
     *   
     * @param m  
     *            document  
     * @param n  
     *            word  
     */ 
    private int sampleFullConditional(int m, int n) {  
 
        Map<String, Integer> currentDistribution = currentDistributionList.get(m);
        Set<String> currentKeysAll = currentDistribution.keySet();
        
        int currentID = 0;
        int num = 0;
        
        for(Iterator<String> ite = currentKeysAll.iterator(); ite.hasNext();) {
        	String currentKey = ite.next();
        	num += currentDistribution.get(currentKey);
        	
        	if(n <= num - 1) {
        		currentID = string_to_id.get(currentKey);
        		break;
        	}
        }
    	
    	// remove z_i from the count variables  
        int topic = z[m][n];  
        
        nd[m][topic]--;  
        nwsum[topic]--;  
        ndsum[m]--;  
 
        // do multinomial sampling via cumulative method:  
        double[] p = new double[K];  
        
        if(flag == 0) {
        	nw[currentID][topic]--;  
            for (int k = 0; k < K; k++) {  
                p[k] = (nw[currentID][k] + beta) / (nwsum[k] + V * beta)  
                    * (nd[m][k] + alpha) / (ndsum[m] + K * alpha);  
            }  
        }
        else {
        	nw[generateDistribution[n]][topic]--;  
            for (int k = 0; k < K; k++) {  
                p[k] = (nw[generateDistribution[n]][k] + beta) / (nwsum[k] + V * beta)  
                    * (nd[m][k] + alpha) / (ndsum[m] + K * alpha);  
            }
        }
        
        
        // cumulate multinomial parameters  
        for (int k = 1; k < p.length; k++) {  
            p[k] += p[k - 1];  
        }
        
        
        // scaled sample because of unnormalised p[]  
        double u = Math.random() * p[K - 1];  
        for (topic = 0; topic < p.length; topic++) {  
            if (u < p[topic])  
                break;  
        }  
 
        // add newly estimated z_i to count variables  
        if(flag == 0) nw[currentID][topic]++;
        else nw[currentID][topic]++;
        
        nd[m][topic]++;  
        nwsum[topic]++;  
        ndsum[m]++;  
 
        return topic;  
    }
	
	 /**  
     * @param	category: initial, inference
     * Add to the statistics the values of theta and phi for the current state.  
     */ 
    private void updateParams() {  
    	
    	if(flag == 0) {
    		for (int m = 0; m < fileNum; m++) {  
                for (int k = 0; k < K; k++) {  
                    thetasum[m][k] += (nd[m][k] + alpha) / (ndsum[m] + K * alpha);  
//                    System.out.println("1:" + nd[m][k]);
//                    System.out.println("2:" + ndsum[m]);
//                    System.out.println("3: " +  thetasum[m][k]);
                }  
            }  
            
    	}
    	else {
    		for (int m = fileNum; m < M; m++) {  
                for (int k = 0; k < K; k++) {  
                    thetasum[m][k] += (nd[m][k] + alpha) / (ndsum[m] + K * alpha);  
                }  
            } 
    	}
    	
    	for (int k = 0; k < K; k++) {  
            for (int w = 0; w < V; w++) {  
                phisum[k][w] += (nw[w][k] + beta) / (nwsum[k] + V * beta);  
            }  
        }
          
        numstats++;  
    }
	
    /**  
     * Retrieve estimated document--topic associations. If sample lag > 0 then  
     * the mean value of all sampled statistics for theta[][] is taken.  
     *   
     * @return theta multinomial mixture of document topics (M x K)  
     */ 
    public double[][] getTheta() {  
    	
    	System.out.println("getTheta()----------------");
    	
        double[][] theta = new double[generateDay+1][K];  
 
        System.out.println("getTheta()--" + (generateDay + 1) + "-th day");
        if (SAMPLE_LAG > 0) {  
        	
            for (int m = 0; m < generateDay + 1; m++) {  
                for (int k = 0; k < K; k++) {  
                    theta[m][k] = thetasum[m][k] / numstats;  
                }  
            }  
 
        } else {  
            for (int m = 0; m < generateDay + 1; m++) {  
                for (int k = 0; k < K; k++) {  
                    theta[m][k] = (nd[m][k] + alpha) / (ndsum[m] + K * alpha); 
                }  
            }  
        }  
 
        System.out.println("theta end");
//        System.out.println("theta: " + theta[0][0]);
        
        return theta;  
    }
    
    /**  
     * Retrieve estimated topic--word associations. If sample lag > 0 then the  
     * mean value of all sampled statistics for phi[][] is taken.  
     *   
     * @return phi multinomial mixture of topic words (K x V)  
     */ 
    public double[][] getPhi() {  
    	
    	System.out.println("getPhi--------------");
    	System.out.println("getPhi()--" + (generateDay + 1) + "-th day");
//        System.out.println("K is:"+K+",V is:"+V);  
        double[][] phi = new double[K][V];  
        if (SAMPLE_LAG > 0) {  
        	
            for (int k = 0; k < K; k++) {  
                for (int w = 0; w < V; w++) {  
                    phi[k][w] = phisum[k][w] / numstats;  
                }  
            }  
        } else {  
        	
            for (int k = 0; k < K; k++) {  
                for (int w = 0; w < V; w++) {  
                    phi[k][w] = (nw[w][k] + beta) / (nwsum[k] + V * beta);  
                }  
            }  
        }  
        
        System.out.println("phi end");
//        System.out.println("phi: " + phi[0][0]);
        return phi;  
    }
    
    public void findDependency() {
    	
    	System.out.println("distinct " + V + " findDependency---------------------");
    	Matrix m = new Matrix();
    	
    	int documentLength = generateDay + 1;
    	
        LinearProgramming lp = null;
        
        System.out.println();
        
		try {
			lp = new LinearProgramming(theta, documentLength, K, lpTemp, lpDimension);
		} catch (IOException e) {
			e.printStackTrace();
		}
		
		weightList = lp.getAlphaNormalize();  
        
        LinearProgramming.pwTemp.close();
    }
	
	public void setFile(String phiFile, String thetaFile, String lpFile, String lpTempFile, String keyFile) {
    	this.phiFile = phiFile;
		this.thetaFile = thetaFile;
		this.lpFile = lpFile;
		this.lpTemp = lpTempFile;
		this.keyFile = keyFile;
    }
	
	public int getDistinct() {
		
	    return V;
	  
	}
	
	public int getGenerateDay() {
	    	
	    return generateDay;
	}
	
	//=================================file deal=================================
	
	//write to disk
		private void write(double[][] theta, double[][] phi, ArrayList<ArrayList<Double>> weightList) {
			try {
				//--------------theta-------------------
				PrintWriter pw = new PrintWriter(new FileWriter(thetaFile));
				
				for(int i = 0; i < theta.length; i++) {
					String temp = "";
					int length = theta[0].length;
					
					for(int j = 0; j < length; j++) {
						temp += theta[i][j] + " ";
					}
					pw.println(temp.substring(0,temp.length()-1));
				}
				
				pw.close();
				
				//----------------phi: the first line is distinct numbers--------------
				pw = new PrintWriter(new FileWriter(phiFile));
				
				pw.println(V);
				for(int i = 0; i < phi.length; i++) {
					String temp = "";
					int length = phi[0].length;
					
					for(int j = 0; j < length; j++) {
						temp += phi[i][j] + " ";
					}
					pw.println(temp.substring(0,temp.length()-1));
				}
				
				pw.close();
				
				//--------------weight----------------
				pw = new PrintWriter(new FileWriter(lpFile));
				
				int length = weightList.size();

				for(int i = 0; i < length; i++) {
					String temp = "";
					ArrayList<Double> al = weightList.get(i);
					int alLength = al.size();
					
					for(int j = 0; j < alLength; j++) {
						
						temp += al.get(j) + " ";
					}
					
					pw.println(temp.substring(0, temp.length()-1));
				}

				pw.close();
				
				//-------------keys------------------
				pw = new PrintWriter(new FileWriter(keyFile));
				
				String temp = "";
				for(int i = 0; i < keysAll.length; i++) {
					temp += keysAll[i] + " ";
				}
				System.out.println(keyFile+":::"+keysAll[0]);
				pw.println(temp.substring(0, temp.length()-1));
				pw.flush();
				pw.close();
				
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			
		}
	
	private void write(ArrayList<ArrayList<Double>> weightList) {
		
		PrintWriter pw;
		try {
			pw = new PrintWriter(new FileWriter(lpFile));
			
			int length = weightList.size();

			for(int i = 0; i < length; i++) {
				String temp = "";
				ArrayList<Double> al = weightList.get(i);
				int alLength = al.size();
				
				for(int j = 0; j < alLength; j++) {
					
					temp += al.get(j) + " ";
				}
				
				pw.println(temp.substring(0, temp.length()-1));
			}

			pw.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}	
		
	private void readThetaFile() {
		
		System.out.println("theta_length " + theta.length);
		try {
			BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(thetaFile)));
			
			String tempString = "";
			
			int i = 0;
			while((tempString = br.readLine()) != null) {
				String[] temp = tempString.split(" ");
				
				for(int j = 0; j < temp.length; j++) 
					theta[i][j] = Double.parseDouble(temp[j]);
				
				i++;
			}
			
			br.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	
	private void readPhiFile() {
		try {
			
			System.out.println("phi_length " + phi.length);
			BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(phiFile)));
			
			String tempString = "";
			
			int i = 0;
			while((tempString = br.readLine()) != null) {
				String[] temp = tempString.split(" ");
				
				for(int j = 0; j < temp.length; j++) 
					phi[i][j] = Double.parseDouble(temp[j]);
				
				i++;
				if(i == K) break;
			}
			
			br.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	//------------------compute----------------------
	public void compute() {
		
		long start = System.currentTimeMillis();
		
		initialState();
		
		long end = System.currentTimeMillis();
		
		System.out.println("distinct:" + V + " initialState() (z[]): Used Memory = " + (rt.totalMemory() - rt.freeMemory()) + " used time " + (end - start));

		gibbs();
		
		/*theta = getTheta();
		
		phi = getPhi();
		
		long end = System.currentTimeMillis();
		
		System.out.println("distinct:" + V + " LDA procedure has used: " + (end - start));
		
		findDependency();
		
		long dependTime = System.currentTimeMillis();
		System.out.println("distinct:" + V + " find dependency has used: " + (dependTime - end));
		
		write(theta, phi, arguments);*/
		
	}	
	
	public void fastCompute() {
		
		long start = System.currentTimeMillis();
		readThetaFile();
		readPhiFile();
		long end = System.currentTimeMillis();
		
		System.out.println("read theta and phi has used time " + (end - start));
		findDependency();
		long depend = System.currentTimeMillis();
		System.out.println("find dependency has used time " + (depend - start));
		
		write(weightList);
	}
		
}
