/**
 * 
 */
package com.search;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;

import org.htmlparser.util.ParserException;
import org.joy.analyzer.html.Anchor;
import org.joy.analyzer.html.HTMLDocument;
import org.joy.analyzer.html.ParseException;
import org.joy.analyzer.html.Utility;

import com.Constant.Contants;
import com.crawl.MetaSearch;
import com.link.LinkDB;

/**
 * @author LvSaiHui {gurnfe@163.com}
 * @version 1.0 <br>
 *          Copyright (C), 2007-2008, ZJUT <br>
 *          This program is protected by copyright laws. <br>
 *          Program Name: AdaptiveGA.java <br>
 *          Date: Feb 10, 2009 <br>
 *          Description:
 */
public class AdaptiveGA {
	/*
	 * 变异的概率缺省值为0.05，下面设置mutationProbability为5，是通过 Random rd = new Random();
	 * rd.nextInt(100)<mutationProbability， 达到概率的换算，交叉概率也是通过同样的方法到达概率设置的。
	 */
	private int mutationProbability = 5;
	private float crossProbability = 0.2f;

	public void search() throws IOException {

		File crwledURLs = new File(
				"D:\\eclipse_workplace\\crawler\\src\\urls_saga.txt");
		if (!crwledURLs.exists()) {
			crwledURLs.createNewFile();
		}
		FileWriter fileWriter = new FileWriter(crwledURLs);
		PrintWriter printWriter = new PrintWriter(fileWriter);

		for (int i = 0; i < Contants.generationCount
				&& LinkDB.getVisitedUrlNum() < Contants.visitedUrlCount; i++) {
			Map nextGenerationUrls = new HashMap<String, Double>();
			int j = 1;
			int count = LinkDB.getUnVisitedUrlSize();
			while (!LinkDB.getUnVisitedUrl().isEmpty()) {
				ArrayList<String> unVisitedUrlList = LinkDB.getUnVisitedUrl();
				String url = unVisitedUrlList.get(0);
				unVisitedUrlList.remove(0);
				try {
					HTMLDocument doc = HTMLDocument.createHTMLDocument(url,
							Utility.getWebContent(url));
					double webPageSimilarity = Similarity.scoreOnWebPageText(
							url, doc);
					if (webPageSimilarity > Contants.selectionProbability) {
						// save doc to local disk expected
						// PageAnalysis.saveSourceCodeToLocalDisk(url);
						// printWriter.println(url);
						for (Anchor a : doc.getAnchors()) {
							if (a != null && a.getText().trim().length() > 0) {
								double pageRankScore = Similarity
										.scoreOnPageRank(a.getURL());
								double urlTextScore = Similarity
										.scoreBasedOnUrlText(a.getText());
								double ecpectedLinkValue = webPageSimilarity
										+ pageRankScore + urlTextScore;
								System.out.println("The ecpected link value :"
										+ ecpectedLinkValue + "="
										+ "parentPageScore:"
										+ webPageSimilarity + "pageRankScore:"
										+ pageRankScore + "urlTextScore:"
										+ urlTextScore);

								/*if (!nextGenerationUrls.containsKey(a.getURL())) {
									nextGenerationUrls.put(a.getURL(),
											ecpectedLinkValue);
								}*/
								if(ecpectedLinkValue>0.55){
									if(!LinkDB.getVisitedUrl().contains(a.getURL())){
										LinkDB.getUnVisitedUrl().add(a.getURL());
										System.out.println("AAAAAAA : "+a.getURL());
										printWriter.println(a.getURL());
										printWriter.flush();
										LinkDB.getVisitedUrl().add(a.getURL());
									}
									
								}
							}
                         
						}
						
					}
					
				} catch (Exception e) {
					// to do for the innormal URL
					System.out.println("1" + e);
				}
				System.out
						.print(j
								+ "/"
								+ count
								+ "##################process one url completed ########################################");
				j++;
			}

			// according to mutation probability to bring in a new url fetched
			// from meta search
			Random rd = new Random();
			if (rd.nextInt(100) < 5) {
				String mutationUrl = MetaSearch
						.getMutationUrlBasedCorpusSearch();
				if (mutationUrl != null) {
					LinkDB.getUnVisitedUrl().add(mutationUrl);
				}
			}

		}
		if (printWriter != null) {
			printWriter.close();
		}
		if (fileWriter != null) {
			fileWriter.close();
		}

		System.exit(0);

	}

	public void processExtractedUrls(Map extractedUrls, PrintWriter printWriter) {

		System.out
				.println("The original size of nextGenerationUrls before cross operation : "
						+ extractedUrls.size());
		/*int pushTonNextGenNum = Float.floatToRawIntBits(extractedUrls.size()
				* crossProbability);
		for (int k = 0; k < pushTonNextGenNum; k++) {
			Iterator it = extractedUrls.entrySet().iterator();
			String highScoreKey = null;
			double highScore = 0;
			while (it.hasNext()) {
				Map.Entry entry = (Map.Entry) it.next();
				if ((Double) entry.getValue() > highScore) {
					highScoreKey = (String) entry.getKey();
					highScore = (Double) entry.getValue();
				}

			}
			if (highScoreKey != null) {
				if (highScore > 0.65) {
					LinkDB.getUnVisitedUrl().add(highScoreKey);
					System.out.println("AAAAAAAAAAAAAAAAAAAAAAA");
					printWriter.println(highScoreKey);
				}
				extractedUrls.remove(highScoreKey);
			}

		}*/

		Iterator it = extractedUrls.entrySet().iterator();
		String highScoreKey = null;
		double highScore = 0;
		while (it.hasNext()) {
			Map.Entry entry = (Map.Entry) it.next();
			if ((Double) entry.getValue() > 0.65) {
				highScoreKey = (String) entry.getKey();
				LinkDB.getUnVisitedUrl().add(highScoreKey);
				System.out.println("AAAAAAAAAAAAAAAAAAAAAAA");
				printWriter.println(highScoreKey);
			}

		}
		

	
		
		

	}

	/**
	 * @param args
	 * @throws IOException
	 * @throws ParseException
	 * @throws ParserException
	 */
	public static void main(String[] args) throws IOException, ParseException,
			ParserException {
		AdaptiveGA aga = new AdaptiveGA();
		aga.search();

	}

}
