/**
 * 
 */
package com.google.code.crawler;

import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;

/**
 * 
 * @author sunnyin
 * 
 */
public class Crawler {

	public Page crawl() {
		DefaultHttpClient httpclient = new DefaultHttpClient();
		try {
			HttpHost target = new HttpHost("data.book.163.com", 80, "http");
			HttpGet req = new HttpGet("/book/section/000BICAY/000BICAY2.html");
			HttpResponse rsp = httpclient.execute(target, req);
			HttpEntity entity = rsp.getEntity();
			Parser parser = new Parser();
			long t = System.currentTimeMillis();
			//parser.parse(EntityUtils.toString(entity));
			parser.parse(entity.getContent());
			System.out.println("t:"+(System.currentTimeMillis() - t));
		} catch (Exception ex) {
			ex.printStackTrace();
		} finally {
			httpclient.getConnectionManager().shutdown();
		}
		return null;
	}

	public static void main(String[] args) throws Exception {
		Crawler client = new Crawler();
		client.crawl();
	}

}
