package TempPackage;
import java.io.BufferedInputStream;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

import javax.sound.midi.MidiDevice.Info;

import Config.GlobalLog;

import com.sleepycat.je.rep.stream.Protocol.Entry;

/**
 * @deprecated
 * @author Xenophon
 * @version 2009-12-13
 * The PreProcessor processes all the files crawled.That is,
 * delete all the garbage files we don't want to analyze.
 *
 */

public class PreProcessor {
	private static String directory = "D:\\OfflineExplorerEnterprise\\Portable Offline Browser\\Download\\bbs.pku.edu.cn\\bbs";
	private static String logFile = "log.txt";
	private static String statFile = "stat.txt";
	private static String postDirectory = "D:\\TestRes";
	
	/**
	 * @author Xenophon
	 * When we get a page, the first thing is to decide whether it is
	 * a post.If not, we can ignore it and process next page.
	 */
	private static class PagePeekInfo
	{
		public boolean isPost = true;
		public boolean hasNext = false;
		public boolean hasPre  = false;
		public String  url     = "";
	}
	
	private static class Statitics
	{
		public HashMap<String, Integer> fileTitlePool = new HashMap<String, Integer>();
	}
	
	/**
	 * when get a post which has following pages,we will store the current page
	 * so that when we merge the following pages,we known where the main post is.
	 * The information is store as <ULI,FileName> tuple.
	 */
	private static HashMap<String, String> tmpMainPostPool = new HashMap<String, String>();
	private static HashMap<String, String> tmpFollowingPagePool = new HashMap<String, String>();
	private static Statitics stat = new Statitics();
	
	public static String GetStringFromFile(File f)throws IOException
	{
		BufferedInputStream bis = new BufferedInputStream(
				new FileInputStream(f));
		byte[] res = new byte[bis.available()];
		bis.read(res);
		bis.close();
		return new String(res,"GB2312");
	}
	
	public static String GetTitleFromString(String fileStr)
	{
		String[] strs = fileStr.split("<title>");
		if(strs.length==1)
			return "An error may occur,I can't get the title...";
		return strs[1].substring(0, strs[1].indexOf("</title>"));
	}
	
	public static String GetURLFromString(String fileStr)
	{
		String[] strs = fileStr.split("本文链接: <a href='");
		if(strs.length==1)
			return "An error may occur,I can't get the URL...";
		return strs[1].substring(0,strs[1].indexOf("'>"));
	}
	
	public static PagePeekInfo PeekPage(String filestr)
	{
		PagePeekInfo info = new PagePeekInfo();
		String title = GetTitleFromString(filestr);
		String[] segs = title.split(":");
		if (!segs[0].equals("北大未名站 同主题阅读"))
			info.isPost = false;
		else{
			//Whether there are some pages follow?
			if(filestr.indexOf("下一页</a>")!=-1)
				info.hasNext = true;
			//Whether there are some pages precede?
			if(filestr.indexOf("上一页</a>")!=-1)
				info.hasPre = true;
				info.url = GetURLFromString(filestr);
		}
		return info;
	}
	
	public static void SearchDirectory(File dir,BufferedWriter bw,String curPath) throws IOException
	{
		if(dir.isDirectory())
		{
			GlobalLog.AddLog("Processing Directory: "+dir.getName());
			File postDir = new File(postDirectory+curPath);
			postDir.mkdir();
			GlobalLog.AddLog("Create new Directory in "+postDirectory+curPath);
			File[] files = dir.listFiles();
			for(File file:files)
				SearchDirectory(file, bw,curPath+"\\"+file.getName());
		}else {
			GlobalLog.AddLog("Processing File: "+curPath);
			String filestr = GetStringFromFile(dir);

			PagePeekInfo pInfo = PeekPage(filestr);
			if (!pInfo.isPost)
				return;
			//This is the first page of a multi-page topic...
			if(!pInfo.hasPre&&pInfo.hasNext)
				tmpMainPostPool.put(pInfo.url,curPath);
			if(pInfo.hasPre)
			{
				if (!tmpMainPostPool.containsKey(pInfo.url))
					tmpFollowingPagePool.put(pInfo.url, curPath);
					return;
			}
			
			File mainPost = pInfo.hasPre?new File(postDirectory+tmpMainPostPool.get(pInfo.url)):new File(postDirectory+"\\"+curPath);
			if (pInfo.hasPre&&!mainPost.exists())
			    GlobalLog.AddLog("Severe Error: There is no file:"+postDirectory+tmpMainPostPool.get(pInfo.url));
			if(!pInfo.hasPre)
				mainPost.createNewFile();
			BufferedWriter workbw = new BufferedWriter(new FileWriter(mainPost));
			workbw.write(filestr);
			workbw.close();
				
		}
	}
	
	/**
	 * @param args
	 * @throws IOException 
	 */
	public static void main(String[] args) throws IOException {
		// TODO Auto-generated method stub
		File baseDir = new File(directory);
		if(!baseDir.isDirectory())
		{
			System.out.println("The "+ directory+" is not Directory...");
		    return;
		}
		// create result Directory and log file and statistical file...
		File desDir = new File(postDirectory);
		desDir.mkdir();
		// create log file and statistical file...
		File log = new File(postDirectory + "\\" + logFile);
		try {
			log.createNewFile();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			System.out.println("An error occured when creating statistical file...");
			e.printStackTrace();
		}
		// create statistical file...
		File statistics = new File(postDirectory+"\\"+statFile);
		try{
		statistics.createNewFile();
		}catch(IOException e){
			System.out.println("An error occured when creating statistical file...");
			e.printStackTrace();
		}
		// process all the file...
		BufferedWriter bw = new BufferedWriter(new FileWriter(log));
		SearchDirectory(baseDir, bw,"");
		bw.close();
		
		BufferedWriter bs = new BufferedWriter(new FileWriter(statistics));
		for(Map.Entry<String,Integer> entry:stat.fileTitlePool.entrySet())
			bs.write(entry.toString()+"\n");
		bs.close();
	}

}
