package com.parq.cs.crawl;

import java.net.URL;
import java.text.BreakIterator;
import java.util.Locale;
import java.util.Vector;

import org.apache.log4j.Logger;

import com.parq.cs.db.CSDBProxy;

import edu.parq.contentvision.ContentVision;

public class CSCrawler implements Runnable {

	private String id;
	private static Logger log = Logger.getLogger(CSCrawler.class);
	private String url;
	private CSCrawlerController controller;
	private long searchId;

	public CSCrawler(String id, CSCrawlerController controller)
			throws Exception {
		this.id = id;
		this.controller = controller;
	}

	public long getSearchId() {
		return searchId;
	}

	public void setSearchId(long searchId) {
		this.searchId = searchId;
	}

	public String getId() {
		return id;
	}

	public void setId(String id) {
		this.id = id;
	}

	public String getUrl() {
		return url;
	}

	public void setUrl(String url) {
		this.url = url;
	}

	
	protected CSCrawlResult fetchFromInternet()throws Exception{
		CSDBProxy db = new CSDBProxy();
		log.info("Fetching From Internet... " + url);
		ContentVision cv = new ContentVision(new URL(url), true, true, 1,
				0, 0, 1);
		cv.getFilteredTextContents();
		Locale currentLocale = new Locale("en", "US");
		BreakIterator iterator = BreakIterator
				.getSentenceInstance(currentLocale);

		String contents = cv.getFilteredTextContents();

		iterator.setText(contents);
		int boundary = iterator.first();
		int beginIndex = 0;
		Vector<String> sentences=new Vector<String>();
		
		while (boundary != BreakIterator.DONE) {
			String sentence = contents.substring(beginIndex, boundary)
					.replaceAll("\n", " ");

			//CSCrawlResult result = new CSCrawlResult(this.getUrl(),
			//		sentence);
			
			//db.addSearchResultContents(this.getSearchId(), sentence, url);
			//controller.add(result);
			
			sentences.add(sentence);
			beginIndex = boundary;
			boundary = iterator.next();
		}

		CSCrawlResult result = new CSCrawlResult(this.getUrl(),sentences);
		return result;
	}
	
	public void run() {
		try {

			/*
			 * First we need to check wether the results are available in db
			 * Because of low performance, we have cancelled this impl 
			 */
//			CSDBProxy db = new CSDBProxy();
//			ResultSet resultsInDB = db.getCrawlResults(url);
//			boolean hasResults = false;
//			while (resultsInDB.next()) {
//				hasResults = true;
//				//searchid,searchdate,sentence,url
//				
//				String sId=resultsInDB.getString(1);
//				String sDate=resultsInDB.getString(2);
//				String sentence=resultsInDB.getString(3);
//								
//				CSCrawlResult result = new CSCrawlResult(this.getUrl(),
//						sentence);
//				controller.add(result);
//			}
//
//			db.close();
			
//			if (!hasResults)
			CSCrawlResult result=fetchFromInternet();
			controller.add(result);	
			controller.jobCompleted(this);
		} catch (Exception e) {
			log.error("Exception while fetching url " + this.getUrl() + e);
		}

	}

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub

	}

}
