package com.meiwenhui.sishuwujing;

import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.io.IOUtils;
import org.htmlparser.Parser;
import org.htmlparser.filters.AndFilter;
import org.htmlparser.filters.HasAttributeFilter;
import org.htmlparser.filters.NodeClassFilter;
import org.htmlparser.tags.LinkTag;
import org.htmlparser.tags.TableColumn;
import org.htmlparser.tags.TableRow;
import org.htmlparser.tags.TableTag;
import org.htmlparser.util.NodeList;

import com.mchange.v2.c3p0.ComboPooledDataSource;

public class Catalogue {

	private static final String URL = "http://guoxue.shufaji.com/";

	private static Parser parser;

	private List<LinkTag> tags = new ArrayList<LinkTag>();

	ComboPooledDataSource cpds = new ComboPooledDataSource();
	
	public Connection connection = null;

	public void getCatalogue() throws Exception {

		connection = cpds.getConnection();
		URLConnection urlConnection = new URL(URL).openConnection();
		String content1 = IOUtils.toString(urlConnection.getInputStream(), "utf-8");
		parser = new Parser(urlConnection);
		parser.setInputHTML(content1);

		AndFilter tableFilter = new AndFilter(new NodeClassFilter(TableTag.class), new HasAttributeFilter("class", "tbC"));
		NodeList subCatalogues = parser.extractAllNodesThatMatch(tableFilter);
		for (int i = 0; i < subCatalogues.size(); i++) {
			TableTag element = (TableTag) subCatalogues.elementAt(i);
			TableRow[] row = element.getRows();
			for (TableRow tableRow : row) {
				TableColumn[] columns = tableRow.getColumns();
				for (TableColumn tableColumn : columns) {
					String txt = tableColumn.toPlainTextString();
					if (!txt.trim().equals("&nbsp;")) {
						LinkTag link = (LinkTag) tableColumn.getFirstChild();
						tags.add(link);
					}
				}
			}
		}

		for (LinkTag _t : tags) {
			SpiderThread thread = new Catalogue().new SpiderThread(_t.getLinkText(), URL + _t.getLink());
			thread.start();
		}
	}

	/**
	 * @param args
	 * @throws IOException
	 * @throws MalformedURLException
	 */
	public static void main(String[] args) throws Exception {
		Catalogue catalogue = new Catalogue();
		catalogue.getCatalogue();

	}

	class SpiderThread extends Thread {

		private String book;
		private String url;

		public SpiderThread(String book, String url) {
			this.book = book;
			this.url = url;
		}

		public void run() {
			deepCrawl(book, url);
		}

		public void deepCrawl(String book, String url) {
			System.out.println(Thread.currentThread().getId() + "\t" + book + "\t" + url);
			try {
				PreparedStatement preparedStatement = cpds.getConnection().prepareStatement("insert into url values(default,?,?,?,0,0)");
				URLConnection urlConnection = new URL(url).openConnection();
				String content1 = IOUtils.toString(urlConnection.getInputStream(), "utf-8");
				if (content1.contains("书名")) {
					preparedStatement.setString(1, book);
					preparedStatement.setString(2, book);
					preparedStatement.setString(3, url);
					preparedStatement.executeUpdate();
					return;
				}

				Parser spiderParser = new Parser(urlConnection);

				spiderParser.setInputHTML(content1);

				boolean hasNext = content1.contains("下一页");
				boolean hasPrevious = content1.contains("上一页");

				NodeList chapters = spiderParser.extractAllNodesThatMatch(new NodeClassFilter(LinkTag.class));

				int count = chapters.size();
				if (hasNext) {
					count -= 1;
				}
				if (hasPrevious) {
					count -= 1;
				}

				String prefix = url.substring(0, url.lastIndexOf("/") + 1);

				for (int i = 0; i < count; i++) {

					LinkTag next = (LinkTag) chapters.elementAt(i);

					preparedStatement.setString(1, book);
					preparedStatement.setString(2, next.getLinkText());
					preparedStatement.setString(3, prefix + next.getLink());
					preparedStatement.executeUpdate();
				}

				if (hasNext) {
					LinkTag next = (LinkTag) chapters.elementAt(chapters.size() - 1);
					deepCrawl(book, prefix + next.getLink());
				}
			} catch (Exception e) {
				e.printStackTrace();
			}
		}

	}

}
