package com.morgan.test;

/**
 * @author morgan.liao
 *
 */
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;

import org.aspectj.weaver.patterns.ParserException;
import org.htmlparser.Node;
import org.htmlparser.Parser;
import org.htmlparser.util.NodeIterator;

public class CTSpider implements Runnable {

boolean search_key_words = false;

int count = 0;

int limitsite = 10;

int countsite = 1;

String keyword = "中国";  //Key word

Parser parser = new Parser();

//List linkList = new ArrayList();

String startsite = "";   //search start Website.

CTSearchResultBean srb;   //save search result.

List<String> resultlist = new ArrayList<String>(); //Search key  linked List.

List<String> searchedsite = new ArrayList<String>(); // has been searched   website list.

Queue<String> linklist = new LinkedList<String>(); // need to analyze   website list. 

HashMap<String, ArrayList<String>> disallowListCache = new HashMap<String, ArrayList<String>>();

public CTSpider(String keyword, String startsite) {
    
    this.keyword = keyword; this.startsite = startsite; linklist.add(startsite);
    
    srb = new CTSearchResultBean();

}

public void run() {
search(linklist);

}

public void search(Queue<String> queue) {

String url = "";

while(!queue.isEmpty()){ url = queue.peek().toString(); //search queen.

try {

if (!isSearched(searchedsite, url)) {
  //Empty.
}
    
//Check this link is allowed to search?
if (isRobotAllowed(new URL(url))) {
    processHtml(url);
} else {
System.out.println("this page is disallowed to search");

}

} catch (Exception ex) {

}

queue.remove();

}

}



/**
* Parse HTML
* @param url
* @throws ParserException
* @throws Exception
*/

public void processHtml(String url) throws ParserException, Exception { searchedsite.add(url);

count = 0;

System.out.println("searching ... :" + url); 
parser.setURL(url);
parser.setEncoding("GBK"); 
URLConnection uc = parser.getConnection(); 
uc.connect();
//uc.getLastModified(); 
NodeIterator nit = parser.elements();

while (nit.hasMoreNodes()) { Node node = nit.nextNode();
parserNode(node);

}

}

private void parserNode(Node node) {
    // TODO Auto-generated method stub
    
}


private boolean isSearched(List<String> searchedsite2, String url) {
    // TODO Auto-generated method stub
    return false;
}

private boolean isRobotAllowed(URL url) {
    // TODO Auto-generated method stub
    return false;
}
}
