package twitter;
import java.net.URL;

import org.w3c.dom.Node;
import org.w3c.dom.NodeList;

class PageFetcher extends Thread {
	TweetsFetcher tweetFetcher;
	Object hasJob;

	/**
	 * Fetch tweets on a single page
	 * @param tweetFetcher TweetFetcher to pass results to
	 */
	public PageFetcher(TweetsFetcher tweetFetcher) {
		this.tweetFetcher = tweetFetcher;
		this.hasJob = tweetFetcher.hasJob;
	}

	@Override
	public void run() {
		boolean kill = false;
		while(!kill) {
			// Wait for Job
			synchronized(hasJob) {
				try {
					while(!tweetFetcher.hasPageToLoad()) {
						hasJob.wait(1000);
					}
				} catch (InterruptedException e) {
					System.out.println("PageFetcher got killed!");
					return;
				}
			}
			
			// We mark the number of working pagefetcher
			tweetFetcher.increaseWorkingKids();

			URL url = tweetFetcher.getPageURL();
			while(url != null) {
				Downloader downloader = new Downloader(url);
				String xml = downloader.fetchAsString();
				
				if(xml != null) {
					if(xml.startsWith("_exeption")) {
						if(xml.endsWith("_blocked")) {
							tweetFetcher.clientWasBlocked();
							tweetFetcher.markNoMorePage();
						}
						else if(xml.endsWith("_protected")) {
							tweetFetcher.gotProtectedPage();
						}
						else {
							tweetFetcher.pageFetcherHasFailed();
						}
						break;
					}
					else {
						Tweet[] tweets = parse(xml);
						if(tweets != null) {
							if(tweets.length > 0) {
								tweetFetcher.handlePageResults(tweets);
							}
							else { // No tweets on page > guest that last page reached
								tweetFetcher.markNoMorePage();
							}
						}
					}
				}
				
				// Stop if there are too many pagefetcher
				if(tweetFetcher.shouldIKillMyself(this)) {
					kill = true;
					break;
				}
				else {
					url = tweetFetcher.getPageURL();
				}
			}

			// Done so we decrease it
			tweetFetcher.decreaseWorkingKids();
		}
	}
	
	/**
	 * Parse page-xml to get tweets
	 * @param xml The page-xml to parse
	 * @return Twitt[] An array of tweets on that xml
	 */
	private Tweet[] parse(String xml) {
		NodeList ids = Helper.parseXMLWithXPath(xml, "//status");
		if(ids == null) {
			return null;
		}
		
		Tweet[] tweets = new Tweet[ids.getLength()];
		for(int i = 0; i < ids.getLength(); i++) {
			NodeList tweetData	= ids.item(i).getChildNodes();

			String created_at = null;
			String id = null;
			String text = null;
			String source = null;
			String place = null;
			for(int j = 0; j < tweetData.getLength(); j++) {
				Node currentChild = tweetData.item(j);
				String nodeName = currentChild.getNodeName();
				if(nodeName.equals("created_at")) {
					created_at = currentChild.getTextContent();
					created_at = Helper.convertDateAndTime(created_at);
				}
				else if(nodeName.equals("id")) {
					 id = currentChild.getTextContent();
				}
				else if(nodeName.equals("text")) {
					 text = currentChild.getTextContent();
				}
				else if(nodeName.equals("source")) {
					 source = currentChild.getTextContent();
				}
				else if(nodeName.equals("place")) {
					 place = currentChild.getTextContent();
				}
			}

			Tweet tweet = new Tweet(id, text, created_at, source, place);
			tweets[i] = tweet;
		}
		return tweets;
	}
}