package cn.edu.scut.hsrc.cluster;

import java.util.concurrent.CountDownLatch;

import cn.edu.scut.hsrc.bean.Doc;
import cn.edu.scut.hsrc.input.InputAdapter;
import cn.edu.scut.hsrc.output.ClusterResult;

/**
 * 对整个算法的包装
 * 
 * @author feiyu
 * 
 */
public class HSRC {
	// 聚类的标志位
	private final boolean bSemanticCluster; // 语义聚类标志位
	private final boolean bSiteCluster; // 站点聚类标志位
	private final boolean bTimeCluster; // 时间聚类标志位
	
	private ClusterResult [] semanticClusterResults;	//语义聚类结果
	private ClusterResult [] siteClusterResults;		//站点聚类结果
	private ClusterResult [] timeClusterResults;		//时间聚类结果

	/**
	 * cluster algorithm switch tag, true for open , false for close
	 * @param bSemanticCluster semantic cluster tag
	 * @param bSiteCluster site cluster tag
	 * @param bTimeCluster time cluster tag
	 */
	public HSRC(boolean bSemanticCluster, boolean bSiteCluster,
			boolean bTimeCluster) {
		// TODO Auto-generated constructor stub
		this.bSemanticCluster = bSemanticCluster;
		this.bSiteCluster = bSiteCluster;
		this.bTimeCluster = bTimeCluster;
		this.semanticClusterResults = null;
		this.siteClusterResults = null;
		this.timeClusterResults = null;
	}

	/**
	 * 
	 * @param query
	 * @param inputAdapter
	 * @param start
	 * @param count
	 */
	public void doCluster(String query, InputAdapter inputAdapter, int start,
			int count) {
		// 读取数据
		Doc[] docs = inputAdapter.initData(start, count).toArray(new Doc[0]);
		doCluster(query, docs);
	}

	/**
	 * 多线程启动三种聚类算法
	 * @param query
	 * @param docs
	 */
	public void doCluster(String query, Doc[] docs) {
		try {
			int numOfThread = 0;
			if (this.bSemanticCluster)
			{
				numOfThread++;
			}
			if (this.bSiteCluster)
			{
				numOfThread++;
			}
			if (this.bTimeCluster)
			{
				numOfThread++;
			}
			if(numOfThread == 0)
				return;
			
			ClusterThread semanticThread = null;
			ClusterThread siteThread = null;
			ClusterThread timeThread = null;
			System.out.println("******* start to cluster \""+query+"\" with "+docs.length+" documents. *******");
			long costTime = System.currentTimeMillis();
			CountDownLatch countDownLatch = new CountDownLatch(numOfThread);
			if (this.bSemanticCluster) {
				System.out.println("cluster based on semantic...");
				ICluster semanticCluster = new SemanticCluster(query, docs);
				semanticThread = new ClusterThread(semanticCluster, countDownLatch);
				semanticThread.start();
			}
			if (this.bSiteCluster) {
				System.out.println("cluster based on site...");
				ICluster siteCluster = new SiteCluster(docs);
				siteThread = new ClusterThread(siteCluster,countDownLatch);
				siteThread.start();
			}
			if (this.bTimeCluster) {
				System.out.println("cluster based on time...");
				ICluster timeCluster = new TimeCluster(docs);
				timeThread = new ClusterThread(timeCluster,countDownLatch);
				timeThread.start();
			}
			//等待所有算法计算完毕
			countDownLatch.await();
			costTime = System.currentTimeMillis() - costTime;
			//对结果赋值
			if(semanticThread != null)
				this.semanticClusterResults = semanticThread.getResults();
			
			if(siteThread != null)
				this.siteClusterResults = siteThread.getResults();
			
			if(timeThread != null)
				this.timeClusterResults = timeThread.getResults();
			System.out.println("******* cluster over, cost time : "+costTime+" ms. *******");
			
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	/**
	 * 获取语义聚类的结果
	 * @return
	 */
	public ClusterResult[] getSemanticClusterResults() {
		return semanticClusterResults;
	}

	/**
	 * 获取站点聚类的结果
	 * @return
	 */
	public ClusterResult[] getSiteClusterResults() {
		return siteClusterResults;
	}

	/**
	 * 获取时间聚类的结果
	 * @return
	 */
	public ClusterResult[] getTimeClusterResults() {
		return timeClusterResults;
	}
}
