/**
 * @author Fang Lu, fanglupku@gmail.com
 * 2011-4-17
 * this class use google crawler, backlink crawler and data extractor
 */
package controller;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import net.GooglePageParserRegexp;
import net.GooglePageRobustParser;
import net.PageFetcher;

import persist.GooglePageDB;
import persist.RefPageDB;

import code.extract.CodeExtractor;
import code.extract.PageClassifier;
import code.extract.SegmentItem;
import code.extract.training.KeywordList;
import crawler.BackLinkCrawlerThread;
import crawler.CrawlItemPool;
import dao.APIDao;
import dao.BaseDao;
import dao.ErrorPageRecorder;
import dao.PageDao;
import dao.QueryDao;
import dao.QueryPageDao;
import dao.SegmentDao;
import dao.entity.APIEntity;
import dao.entity.PageEntity;
import dao.entity.QueryEntity;
import dao.entity.QueryPageRelation;
import dao.entity.SegmentEntity;


public class AutomaticDataDealer {
	public static final int DATA_NOT_EXISTS = 0;
	public static final int DATA_ALREADY_EXISTS = 1;
	
	public static KeywordList initAPI(Connection conn, CrawlerConfig config){
		String keywordListFilename = config.getGoogleKeywordListFilename();
//		String keywordCrawledListFilename = config.getGoogleKeywordCrawledListFilename();
		KeywordList keywordList = new KeywordList(keywordListFilename,
				null, null);
		ArrayList<String> apiNameList = keywordList.getContent();
		HashMap<String, Integer> apiNameMapID = APIDao.getNameMapID(conn);
		int maxAPIID = APIDao.getMaxID(conn);
		for (int i=0; i<apiNameList.size(); ++i){
			String thisAPIName = apiNameList.get(i);
			if (!apiNameMapID.containsKey(thisAPIName)){
				APIEntity thisAPI = new APIEntity(maxAPIID+1, thisAPIName);
				if (APIDao.insertAPI(conn, thisAPI) == true){
					++ maxAPIID;
				} 
			}
		}
		return keywordList;
	}
	
	public static void crawlAndExtractGoogleResult(Connection conn, CrawlerConfig config,
			KeywordList keywordList, int model){
		int rnInPage = 100;
		int GooglePages = config.getGoogleResultCount() / rnInPage;
		if (config.getGoogleResultCount() % rnInPage != 0) {
			GooglePages += 1;
		}
		int maxWaitTime = config.getGoogleWaitSecondMax();
		int minWaitTime = config.getGoogleWaitSecondMin();
		Random timeRandom = new Random(System.currentTimeMillis());
		int timeDiff = maxWaitTime - minWaitTime;
		
		String googleBdbDirName = config.getGoogleBDBDirname();
		String googleBdbDatabaseName = config.getGoogleBDBDatabasename();
		File googleBdbDir = new File(googleBdbDirName);
		if (!googleBdbDir.exists()) {
			googleBdbDir.mkdirs();
		}
		GooglePageDB googleDB = new GooglePageDB(googleBdbDirName, 1024 * 1024, null);
		googleDB.open(googleBdbDatabaseName);
		HashMap<String, Integer> apiNameMapID = APIDao.getNameMapID(conn);
		
		try {
			ArrayList<String> wordList = keywordList.getUncrawledContent();
			ArrayList<QueryEntity> existQueryList = QueryDao.getAllQuery(conn);
			HashMap<String, Integer> linkMapPageID = PageDao.getLinkMapID(conn);
			HashSet<String> allRelMark = QueryPageDao.getAllRelMarkSet(conn);
			int maxPageID = PageDao.getMaxID(conn);
			HashMap<String, QueryEntity> contentMapQuery = new HashMap<String, QueryEntity>();
			int maxQueryID = 0;
			for (int i=0; i<existQueryList.size(); ++i){
				contentMapQuery.put(existQueryList.get(i).getContent(), existQueryList.get(i));
				if (maxQueryID < existQueryList.get(i).getId()){
					maxQueryID = existQueryList.get(i).getId();
				}
			}
			
			System.out.println("total keyword num:" + keywordList.getContent().size());
			System.out.println("need cralwed num:" + keywordList.getUncrawledContent().size());
			int googlePageNum = 0;
			int totalGooglePageNum = keywordList.getUncrawledContent().size() * GooglePages;
			for (int i = 0; i < wordList.size(); ++i) {
				String queryContent = "\"" + wordList.get(i) + "\"+example+OR+java";
				queryContent = URLEncoder.encode(queryContent, "utf8");
				System.out.println(i + "/" + wordList.size() + "/query:" + queryContent);
				if (contentMapQuery.containsKey(queryContent)){
					System.out.println("this query is crawled!");
					continue;
				}
				int thisAPIID = 0;
				if (apiNameMapID.containsKey(wordList.get(i))){
					thisAPIID = apiNameMapID.get(wordList.get(i));
				}
				QueryEntity thisQuery = new QueryEntity(maxQueryID+1, thisAPIID, 0, 0, queryContent);
				int itemNum = 0;
				boolean isFirstPage = true;
				for (int j = 0; j < GooglePages; ++j) {
					String queryURL = "http://www.google.com/search?num="
							+ rnInPage + "&q=" + queryContent + "&hl=en&start="
							+ (j * rnInPage) + "&sa=N";
					String thisKey = wordList.get(i) + GoogleCrawler.KEYWORD_INDEX_SYMBOL + j;
					++ googlePageNum;
					System.out.println(googlePageNum + "/" + totalGooglePageNum + ", get google page:" + queryURL);
					String content = "";
					if (model == DATA_ALREADY_EXISTS){
						content = googleDB.get(thisKey);
						if (content == null){
							content = "";
						}
					}else{
						content = PageFetcher.fetchURL(queryURL);
						try {
							googleDB.put(thisKey, content);
						} catch (Exception e) {
							System.err.println("save into googlebdb error!");
						}
					}
					
					if (isFirstPage == true){
						int resultCount = GooglePageParserRegexp.getResultNumOfQuery(content);
						thisQuery.setResult_count(resultCount);
						if (QueryDao.insertQuery(conn, thisQuery) == true){
							++ maxQueryID;
						}
						isFirstPage = false;
					}
//					ArrayList<GoogleResultItem> items = new GooglePageParserRegexp()
//							.parse(content, "pp_" + j);
					ArrayList<String> backlinkList = GooglePageRobustParser.getResultLinkList(content);
					if (thisQuery.getResult_count() > config.getGoogleResultCount() + 1000 && backlinkList.size() < rnInPage / 2){
						System.err.println("------google page parser maybe error! page content:");
						System.err.println(content);
					}
					for (String thisLink : backlinkList) {
						try{
							new URL( thisLink);
//							thisURL.getAuthority();
						}catch (Exception e) {
							System.out.println("URL:" + thisLink + " is illegal");
							continue;
						}
						if (linkMapPageID.containsKey(thisLink)){
							int thisPageID = linkMapPageID.get(thisLink);
							String thisRelMark = QueryPageDao.getRelMark(thisQuery.getId(), thisPageID);
							if (allRelMark.add(thisRelMark)){
								QueryPageRelation thisRel = new QueryPageRelation(thisQuery.getId(), thisPageID, itemNum+1);
								if (QueryPageDao.insertQuery(conn, thisRel) == true){
									++ itemNum;
								}
							}
						}else{
							PageEntity thisPage = new PageEntity(maxPageID+1, thisLink
									, "", "", "", "", PageClassifier.NORMAL_TYPE, PageEntity.NOT_CRAWLED);
							if (PageDao.insertPage(conn, thisPage) == true){
								++ maxPageID;
								String thisRelMark = QueryPageDao.getRelMark(thisQuery.getId(), thisPage.getId());
								if (allRelMark.add(thisRelMark)){
									QueryPageRelation thisRel = new QueryPageRelation(thisQuery.getId(), thisPage.getId(), itemNum+1);
									if (QueryPageDao.insertQuery(conn, thisRel) == true){
										++ itemNum;
									}
								}
								linkMapPageID.put(thisLink, thisPage.getId());
							}
						}
					}

					int thisWaitSecond = maxWaitTime;
					int thisRandomTime = timeRandom.nextInt() >>> 1;
					if (timeDiff <= 0) {
						thisWaitSecond = minWaitTime;
					} else {
						thisWaitSecond = thisRandomTime % timeDiff
								+ minWaitTime;
					}
					if (model == DATA_ALREADY_EXISTS){
					}
					else{
						Thread.sleep(thisWaitSecond * 1000);
					}
				}
			}
		} catch (Exception e) {
			// TODO: handle exception
			e.printStackTrace();
		} finally {
			googleDB.close();
		}
	}
	public static void crawlBacklink(Connection conn, CrawlerConfig config,
			ArrayList<BackLinkCrawlerThread> threadList, ExecutorService exec,
			int THREADS, int model){
		HashMap<String, PageEntity> uncrawledLinkMapPage = PageDao.getLinkMapPage(conn, PageEntity.NOT_CRAWLED);
		
		RefPageDB refDB = new RefPageDB(config.getRefPageBDBDirname(),
				1024 * 1024, "utf8");
		refDB.open(config.getRefPageBDBDatabasename());
//		ArrayList<String> fullList = PageDao.getLinkList(conn, PageEntity.NOT_CRAWLED);
		if (model == DATA_ALREADY_EXISTS){
			if (uncrawledLinkMapPage.size() == 0){
				refDB.close();
				return;
			}
			int index = 0;
			refDB.openCursor();
			try {
				PageEntity page = refDB.getNextPage();
				while (page != null){
					System.out.println("deal with :" + index + "," + page.getLink());
					++ index;
					if (uncrawledLinkMapPage.containsKey(page.getLink())){
						PageEntity thisPage = uncrawledLinkMapPage.get(page.getLink());
						thisPage.setMeta_description(page.getMeta_description());
						thisPage.setMeta_keywords(page.getMeta_keywords());
						thisPage.setState(PageEntity.CRAWLED_OK);
						thisPage.setTitle(page.getTitle());
						PageDao.updatePage(conn, thisPage);
					}
					page = refDB.getNextPage();
				}
				
				
			} catch (UnsupportedEncodingException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			
			System.out.println("refdb will close!");
			refDB.closeCursor();
			refDB.close();
			System.out.println("refdb is closed!");
			index = 0;
			Iterator<String> itor = uncrawledLinkMapPage.keySet().iterator();
			while (itor.hasNext()){
				PageEntity thisPage = uncrawledLinkMapPage.get(itor.next());
				if (thisPage.getState() == PageEntity.NOT_CRAWLED){
					thisPage.setState(PageEntity.CRAWLED_ERROR);
					PageDao.updatePage(conn, thisPage);
					System.out.println("set crawl error with :" + index + "," + thisPage.getLink());
					++ index;
				}
			}
			System.out.println("set crawl error ok!");
		}else{
			CrawlItemPool.generateCrawlList(conn);	
			for (int j = 0; j < THREADS; j++) {
				BackLinkCrawlerThread crawlerThread = new BackLinkCrawlerThread();
				crawlerThread.setUncrawledLinkMapPage(uncrawledLinkMapPage);
				crawlerThread.setModel(BackLinkCrawlerThread.BDB_AND_MYSQL_MODEL);
				threadList.add(crawlerThread);
				Connection thread_conn = BaseDao.getDbConnection(dbURL, user, pwd);
				crawlerThread.setJob(j, refDB, thread_conn);
				exec.execute(crawlerThread);
			}
		}
	}
	
	public static void extractCodeSegment(Connection conn, CrawlerConfig config, 
			ArrayList<BackLinkCrawlerThread> threadList, ExecutorService exec, 
			int model){
		int lastUncrawledCount = PageDao.getPageCount(conn, PageEntity.NOT_CRAWLED);
		int checkThreshold = threadList.size() / 2;
		int waitSecond = config.extractStateWaitSecond();
		while(true){
			try {
				int finishedNum = 0;
				for (int i=0; i<threadList.size(); ++i){
					if (threadList.get(i).isFinished() == true){
						++ finishedNum;
					}
				}
				Thread.sleep(waitSecond * 1000);
				int thisUncrawledCount = PageDao.getPageCount(conn, PageEntity.NOT_CRAWLED);
				if (lastUncrawledCount == thisUncrawledCount && finishedNum > checkThreshold){
//					exec.shutdown();
//					exec.shutdownNow();
					break;
				}
				if (model == DATA_ALREADY_EXISTS && lastUncrawledCount == thisUncrawledCount){
					break;
				}
				lastUncrawledCount = thisUncrawledCount;
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		RefPageDB refDB = new RefPageDB(config.getRefPageBDBDirname(), 1024*1024, "utf8");
		refDB.open(config.getRefPageBDBDatabasename());
		refDB.openCursor();
		int totalCount = Integer.MAX_VALUE;
		int okNum = 0;
		try {
			HashMap<String, PageEntity> crawledLinkMapPage = PageDao.getLinkMapPage(conn, PageEntity.CRAWLED_OK);
			HashMap<Integer, Integer> pageidMapQueryid = QueryPageDao.getPageIDMapQueryID(conn);
			ArrayList<QueryEntity> queryList = QueryDao.getAllQuery(conn);
			HashMap<Integer, QueryEntity> queryidMapQueryEntity = new HashMap<Integer, QueryEntity>();
			for (QueryEntity thisQuery: queryList){
				queryidMapQueryEntity.put(thisQuery.getId(), thisQuery);
			}
			int maxPageId = PageDao.getMaxID(conn);
			System.out.println("maxPageId:" + maxPageId);
			int maxSeg = config.getCodeExtractMaxseg();
			int minSeg = config.getCodeExtractMinseg();
			ErrorPageRecorder error_recorder = new ErrorPageRecorder(config.getCodeExtractErrorFilename(), "utf8");
			HashSet<String> extractedSet = PageDao.getLinkSet(conn, PageEntity.EXTRACT_CODE_OK);
			PageEntity page = refDB.getNextPage();
			while (page != null && okNum < totalCount){
				try{
					if (page.getContent() == null || page.getLink().trim().length() == 0
							||page.getLink() == null || page.getLink().trim().length() == 0){
						page = refDB.getNextPage();
						continue;
					}
					if (!extractedSet.contains(page.getLink())){
						if (crawledLinkMapPage.containsKey(page.getLink())){
							PageEntity pageInMysql = crawledLinkMapPage.get(page.getLink());
							pageInMysql.setPage_type(PageClassifier.getPageType(page));
							if (pageInMysql.getPage_type() != PageClassifier.NORMAL_TYPE){
								pageInMysql.setState(PageEntity.EXTRACT_CODE_OK);
							}else{
								ArrayList<SegmentItem> segList = 
									CodeExtractor.extractCodeSegmentFromPage(page.getContent(), 
											CodeExtractor.CONTINUAL_MERGE|CodeExtractor.SMALL_CODE_MERGE
											| CodeExtractor.EMPTY_MERGE);
								if (segList.size() > maxSeg || segList.size() < minSeg){
									error_recorder.addErrorPage(pageInMysql.getLink());
									pageInMysql.setState(PageEntity.EXTRACT_CODE_ERROR);
								}else{
									for (int i=0; i<segList.size(); ++i){
										SegmentEntity thisSeg = new SegmentEntity(segList.get(i));
										thisSeg.setPage_id(pageInMysql.getId());
										int thisAPI_ID = 0;
										if (pageidMapQueryid.containsKey(pageInMysql.getId())){
											int thisQueryID = pageidMapQueryid.get(pageInMysql.getId());
											if (queryidMapQueryEntity.containsKey(thisQueryID)){
												thisAPI_ID = queryidMapQueryEntity.get(thisQueryID).getApi_id();
											}
										}
										thisSeg.setApi_id(thisAPI_ID);
										SegmentDao.insertSegment(conn, thisSeg);
									}
									pageInMysql.setState(PageEntity.EXTRACT_CODE_OK);
								}	
							}
							PageDao.updatePage(conn, pageInMysql);
							System.out.println("page_OK:" + okNum);
							++ okNum;
						}else{
							System.out.println("this url will not be extracted! url:" + page.getLink());
						}
					}
					page = refDB.getNextPage();
				}catch (Exception e) {
					e.printStackTrace();
				}
			}
			conn.close();
		} catch (UnsupportedEncodingException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		refDB.closeCursor();
		refDB.close();
	}
	static String dbURL;
	static String user;
	static String pwd;
	public static void doOneJob(String configFilename, int googlePageModel, int refPageModel){
		CrawlerConfig config = new CrawlerConfig(configFilename);
		dbURL = config.getDatabaseURL();
		user = config.getDatabaseUsername();
		pwd = config.getDatabasePwd();
		Connection conn = BaseDao.getDbConnection(dbURL,user,pwd);
		
		long timeBegin = System.currentTimeMillis();
		System.out.println("Job 1 start, " + new java.util.Date(timeBegin));
		long timeStage1 = System.currentTimeMillis();
		System.out.println("stage 1: initAPI, " + new java.util.Date(timeStage1));
		System.out.println(timeStage1);
		KeywordList keywordList = initAPI(conn, config);
		
		long timeStage2 = System.currentTimeMillis();
		System.out.println("stage 2: crawlAndExtractGoogleResult, " + new java.util.Date(timeStage2));
		System.out.println(timeStage2);
		crawlAndExtractGoogleResult(conn, config, keywordList, googlePageModel);
		
		long timeStage3 = System.currentTimeMillis();
		System.out.println("stage 3: crawlBacklink, " + new java.util.Date(timeStage3));
		System.out.println(timeStage3);
		ArrayList<BackLinkCrawlerThread> threadList = new ArrayList<BackLinkCrawlerThread>();
		int THREADS = config.getBacklinkCrawlerThread();
		ExecutorService exec = Executors.newFixedThreadPool(THREADS);
		crawlBacklink(conn, config, threadList, exec, THREADS, refPageModel);
		
		long timeStage4 = System.currentTimeMillis();
		System.out.println("stage 4: extractCodeSegment, " + new java.util.Date(timeStage4));
		System.out.println(timeStage4);
		extractCodeSegment(conn, config, threadList, exec, refPageModel);
		
		long timeEnd = System.currentTimeMillis();
		System.out.println("Good Job, everything is done!");
		System.out.println("total time: " + (timeEnd - timeBegin) / 1000);
		System.out.println("stage1 time: " + (timeStage2 - timeStage1) / 1000);
		System.out.println("stage2 time: " + (timeStage3 - timeStage2) / 1000);
		System.out.println("stage3 time: " + (timeStage4 - timeStage3) / 1000);
		System.out.println("stage4 time: " + (timeEnd - timeStage4) / 1000);
	}
	
	public static void main(String args[]){
		File file = new File("D:/APIExample_Crawl_Task/task_2/log.txt");	
		try {
			PrintStream ps = new PrintStream(file);
			System.setOut(ps);
			System.setErr(ps);
			
			doOneJob("crawlerconf.task2", DATA_NOT_EXISTS, DATA_NOT_EXISTS);
			
			ps.flush();
			ps.close();
		} catch (FileNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}
