package crawl;

import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;

import java.io.IOException;

public class CrawlerThread implements Runnable {

    private String url;

    private int depth = 0;

    private HttpClient httpclient = new DefaultHttpClient();

    public CrawlerThread() {
    }

    public CrawlerThread(String url, int depth) {
        this.url = url;
        this.depth = depth;
    }

    public void run() {
        doBefore();
        if (url != null) {
            HttpGet get = new HttpGet(url);
            try {
                HttpResponse response = httpclient.execute(get);
                HttpEntity entity = response.getEntity();
                if (entity != null) {
                    String result = EntityUtils.toString(entity);
                    parseContent(result);
                }
            } catch (IOException e) {
                //e.printStackTrace();
            } finally {
                doAfter(get);
            }
        }
    }

    private void parseContent(String content) {
        if (content != null && !content.equals("")) {
            ContentParser cp = new ContentParser(depth);
            cp.doParse(content);
        }
    }

    private void doAfter(HttpGet get) {
        get.abort();
    }

    private void doBefore() {
    }

}
