package project2;
import java.io.*;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;


public class ComplexCrawl {
	private String fPath = "d:\\temp\\download3\\";// 鍌ㄥ瓨缃戦〉鏂囦欢鐨勭洰褰曞悕
    private ArrayList<String> undoArrUrls = new ArrayList<String>();// 瀛樺偍鏈鐞哢RL
    private ArrayList<String> allUrlForIndex = new ArrayList<String>();// 瀛樺偍鎵�鏈塙RL渚涘缓绔嬬储寮�
    private Hashtable<String, Integer> allUrls = new Hashtable<String, Integer>();// 瀛樺偍鎵�鏈塙RL鐨勭綉椤靛彿
    private Hashtable<String, Integer> deepUrls = new Hashtable<String, Integer>();// 瀛樺偍鎵�鏈塙RL娣卞害
    private int intWebIndex = 0;// 缃戦〉瀵瑰簲鏂囦欢涓嬫爣锛屼粠0寮�濮�
    private String charset = "utf-8";
    private String report = "";
    private long startTime;
    private int webSuccessed = 0;
    private int webFailed = 0;
    private int webDepth = 2;
    private int intThreadNum = 10;
    private String strHomePage = "";
    private String myDomain;

    public ComplexCrawl(String s) {
        this.strHomePage = s;
    }

    public ComplexCrawl(String s, int i) {
        this.strHomePage = s;
        this.webDepth = i;
    }

    public synchronized void addWebSuccessed() {
        webSuccessed++;
    }

    public synchronized void addWebFailed() {
        webFailed++;
    }

    public synchronized void info2log(String s) {
        try {
            report += s;
            PrintWriter pwReport = new PrintWriter(new FileOutputStream("d:\\temp\\download2\\myReport.txt"));
            pwReport.println(report);
            pwReport.flush();
            pwReport.close();
        } catch (Exception e) {
            System.out.println("gene report file failure!");
        }
    }

    public synchronized String getUndoUrl() {
        String tmpAUrl = undoArrUrls.get(0);
        undoArrUrls.remove(0);
        return tmpAUrl;
    }

    public synchronized String getUrl() {
        String tmpUrl = allUrlForIndex.get(0);
        allUrlForIndex.remove(0);
        return tmpUrl;
    }

    public synchronized Integer getIntWebIndex() {
        intWebIndex++;
        return intWebIndex;
    }

  
    /**
     * 鐖彇缃戦潰鐨勪富鍏ュ彛
     */
    public void crawlContent() {
        startTime = System.currentTimeMillis();
        this.myDomain = getDomain();
        if (myDomain == null) {
            System.out.println("Wrong input!");
            // System.exit(1);
            return;
        }
        System.out.println("main_page = " + strHomePage);
        info2log("main_page = " + strHomePage + "!\n");
        System.out.println("Domain #########################: " + myDomain);
        info2log("Domain = " + myDomain + "!\n");
        undoArrUrls.add(strHomePage);
        allUrlForIndex.add(strHomePage);
        allUrls.put(strHomePage, 0);
        deepUrls.put(strHomePage, 1);
        File fDir = new File(fPath);
        if (!fDir.exists()) {
            fDir.mkdir();
        }
        System.out.println("Start!");
        this.info2log("Start!\n");
        String undoUrl = getUndoUrl();//鏈鐞唘rl
        this.crawlContentByUrl(undoUrl, charset, allUrls.get(undoUrl) + "");
        int i = 0;
        for (i = 0; i < intThreadNum; i++) {
            new Thread(new Processer(this)).start();
        }
        while (true) {
            if (undoArrUrls.isEmpty() && Thread.activeCount() == 1) {
                long finishTime = System.currentTimeMillis();
                long costTime = finishTime - startTime;
                System.out.println("\n\n\n\n\nFinished!");
                info2log("\n\n\n\n\nFinished!\n");
                System.out.println("Start time = " + startTime + "   "
                        + "Finish time = " + finishTime + "   "
                        + "Cost time = " + costTime + "ms");
                info2log("Start time = " + startTime + "   "
                        + "Finish time = " + finishTime + "   "
                        + "Cost time = " + costTime + "ms" + "\n");
                System.out.println("Total url number = "
                        + (webSuccessed + webFailed) + "   Successed: "
                        + webSuccessed + "   Failed: " + webFailed);
                info2log("Total url number = " + (webSuccessed + webFailed)
                        + "   Successed: " + webSuccessed + "   Failed: "
                        + webFailed + "\n");

                String strIndex = "";
                String tmpUrl = "";
                while (!allUrlForIndex.isEmpty()) {
                    tmpUrl = getUrl();
                    strIndex += "Web depth:" + deepUrls.get(tmpUrl)
                            + "   Filepath: " + fPath + "/web"
                            + allUrls.get(tmpUrl) + ".htm" + "   url:" + tmpUrl
                            + "\n\n";
                }
                System.out.println(strIndex);
                try {
                    PrintWriter pwIndex = new PrintWriter(new FileOutputStream("d:/fileindex.txt"));
                    pwIndex.println(strIndex);
                    pwIndex.close();
                } catch (Exception e) {
                    System.out.println("general index file == failure!");
                }
                break;
            }
        }
    }

    public void crawlContentByUrl(String strUrl, String charset, String fileIndex) {
        try {
            System.out.println("Getting web by url: " + strUrl);
            info2log("Getting web by url: " + strUrl + "\n");
            URL url = new URL(strUrl);
            URLConnection conn = url.openConnection();
            conn.setDoOutput(true);
            InputStream is = null;
            is = url.openStream();

            String filePath = fPath + "/web" + fileIndex + ".htm";
            PrintWriter pw = null;
            FileOutputStream fos = new FileOutputStream(filePath);
            OutputStreamWriter writer = new OutputStreamWriter(fos,charset);
            pw = new PrintWriter(writer);
            BufferedReader bReader = new BufferedReader(new InputStreamReader(is,charset));
            StringBuffer sb = new StringBuffer();
            String rLine = null;
            String tmp_rLine = null;
            while ((rLine = bReader.readLine()) != null) {
                tmp_rLine = rLine;
                int str_len = tmp_rLine.length();
                if (str_len > 0) {
                    sb.append("\n" + tmp_rLine);
                    pw.println(tmp_rLine);
                    pw.flush();
                    if (deepUrls.get(strUrl) < webDepth)
                        getUrlByString(tmp_rLine, strUrl);
                }
                tmp_rLine = null;
            }
            is.close();
            pw.close();
            System.out.println("Get web successfully! " + strUrl);
            info2log("Get web successfully! " + strUrl + "\n");
            addWebSuccessed();
        } catch (Exception e) {
            System.out.println("Get web failed!       " + strUrl);
            info2log("Get web failed!       " + strUrl + "\n");
            addWebFailed();
        }
    }

    public String getDomain() {
        String reg = "(?<=http\\://[a-zA-Z0-9]{0,100}[.]{0,1})[^.\\s]*?\\.(com|cn|net|org|biz|info|cc|tv)";
        Pattern p = Pattern.compile(reg, Pattern.CASE_INSENSITIVE);
        Matcher m = p.matcher(strHomePage);
        boolean blnp = m.find();
        if (blnp == true) {
            return m.group(0);
        }
        return null;
    }

    public void getUrlByString(String inputArgs, String strUrl) {
        String tmpStr = inputArgs;
        String regUrl = "(?<=(href=)[\"]?[\']?)[http://][^\\s\"\'\\?]*(" + myDomain + ")[^\\s\"\'>]*";
        Pattern p = Pattern.compile(regUrl, Pattern.CASE_INSENSITIVE);
        Matcher m = p.matcher(tmpStr);
        boolean blnp = m.find();
        // int i = 0;
        while (blnp == true) {
            if (!allUrls.containsKey(m.group(0))) {
                System.out.println("Find a new url,depth:" + (deepUrls.get(strUrl) + 1) + " " + m.group(0));
                info2log("Find a new url,depth:" + (deepUrls.get(strUrl) + 1) + " " + m.group(0) + "\n");
                undoArrUrls.add(m.group(0));
                allUrlForIndex.add(m.group(0));
                allUrls.put(m.group(0), getIntWebIndex());
                deepUrls.put(m.group(0), (deepUrls.get(strUrl) + 1));
            }
            tmpStr = tmpStr.substring(m.end(), tmpStr.length());
            m = p.matcher(tmpStr);
            blnp = m.find();
        }
    }

    class Processer implements Runnable {
        ComplexCrawl crawl;

        public Processer(ComplexCrawl g) {
            this.crawl = g;
        }

        public void run() {
            // Thread.sleep(5000);
            while (!undoArrUrls.isEmpty()) {
                String tmp = getUndoUrl();
                crawlContentByUrl(tmp, charset, allUrls.get(tmp) + "");
            }
        }
    }
    
    /**
     * @param args
     */
    public static void main(String[] args) {
    	String url ="http://www.baidu.com.cn/";
    	if(args.length==1){
    		url =args[0];
    	}
    	
        ComplexCrawl complexCrawl = new ComplexCrawl(url, 2);
        complexCrawl.crawlContent();
    }




}
