package twitter;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Vector;

class TweetsFetcher extends Thread {
	Client client;

	// Use this object to notify workers that there are jobs to do
	public Object hasJob = new Object();
	
	private Vector<PageFetcher> pageFetchers;
	
	int workingKids = 0;
	int currentPage = -1;
	int failedDeep	= 0;

	/**
	 * Fetch tweets of current User
	 */
	public TweetsFetcher(Client client) {
		this.client		= client;
		pageFetchers	= new Vector<PageFetcher>();
	}

	synchronized public void startNewPageFetcher() {
		// System.err.println("started");
		PageFetcher kid = new PageFetcher(this);
		kid.start();
		pageFetchers.add(kid);
	}
	
	/**
	 * Kill all pagefetchers at once
	 */
	public void killAllPageFetchers() {
		for(PageFetcher p:pageFetchers) {
			p.interrupt();
		}

		pageFetchers.clear();

		markNoMorePage();

		workingKids = 0;
	}
	
	synchronized public void pageFetcherHasFailed() {
		failedDeep++;
		if(failedDeep >= 10) {
			markNoMorePage();
			failedDeep = 0;
		}
	}
	
	/**
	 * Start all PageFetcher, that should be started as default
	 */
	private void startDefaultNrOfPageFetchers() {
		for(int i = 0; i < client.getMaxPageFetcherThreads(); i++) {
			startNewPageFetcher();
		}
	}
	
	/**
	 * Start more PageFetcher if needed
	 */
	public void tryToStartPageFetchers() {
		for(int i = 0; pageFetchers.size() < client.getMaxPageFetcherThreads(); i++) {
			startNewPageFetcher();
		}
	}
	
	@Override
	public void run() {
		// Start all page fetchers thread
		startDefaultNrOfPageFetchers();
		
		while(true) {
			synchronized(this) {
				try {
					this.wait();
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
			}

			synchronized(hasJob) {
				System.out.println(" + Fetching pages");
				resetPageNumber();
				tryToStartPageFetchers();// Workaround > start missing threads
				hasJob.notifyAll();
			}
		}
	}
	
	/**
	 * Check if there are page to be loaded
	 * @return True - There are page to load; False - All pages are fetched
	 */
	synchronized public boolean hasPageToLoad() {
		return currentPage >= 0;
	}

	/**
	 * Url of page that must be downloaded next
	 * @return URL URL of page
	 */
	synchronized public URL getPageURL() {
		URL url = null;

		// We tell pagefetcher that there are no more page to fetch
		if(!hasPageToLoad()) {
			return url;
		}
		
		++currentPage;
		try {
			String sinceParam = "";
			if(client.user.lastTweet != null) {
				sinceParam = "&since_id="+client.user.lastTweet;
			}
			url = new URL("http://api.twitter.com/1/statuses/user_timeline.xml?user_id="+ client.user.id + sinceParam +"&count=100&page="+ currentPage);
		} catch (MalformedURLException e) {
			e.printStackTrace();
		}
		return url;
	}
	
	/**
	 * Tell pagefetcher that it should kill itself
	 * - Too many pagefetcher
	 * @param fetcher
	 * @return
	 */
	synchronized public boolean shouldIKillMyself(PageFetcher fetcher) {
		if(pageFetchers.size() > client.getMaxPageFetcherThreads()) {
			for(PageFetcher p:pageFetchers) {
				if(p.getId() == fetcher.getId()) {
					pageFetchers.remove(fetcher);
					return true;
				}
			}
		}
		return false;
	}
	
	synchronized private void resetPageNumber() {
		currentPage = 0;
	}
	
	/**
	 * Mark to -1, so no pagefetcher can get the nextpage
	 */
	synchronized public void markNoMorePage() {
		currentPage = -1;
	}
	
	/**
	 * Forward blocked status to client
	 */
	public void clientWasBlocked() {
		markNoMorePage();
		client.clientWasBlocked();
	}
	
	/**
	 * Forward protected status to client
	 */
	public void gotProtectedPage() {
		markNoMorePage();
		client.gotProtectedpage();
	}

	/**
	 * Pass tweets on page to client
	 * @param xml The page-xml to be handle
	 */
	synchronized public void handlePageResults(Tweet[] tweets) {
		failedDeep = 0;
		
		// Stop loading next pages - user has stopped the client
		if(!client.isRunning()) {
			currentPage = -1;
			return;
		}
		
		client.handleTweetResults(tweets);
	}

	/**
	 * We mark nr of working pageFetcher, so if we
	 * want to stop TweetsFetcher, we wait until 
	 * all PageFetcher finished their job
	 */
	synchronized public void increaseWorkingKids() {
		workingKids++;
	}
	
	/**
	 * Decrease nr of working pageFetcher
	 */
	synchronized public void decreaseWorkingKids() {
		workingKids--;
		// Tell client that job is done
		if(allKidsAreDone() && currentPage == -1) {
			client.hasDoneFetchingTweets();
		}
	}
	
	/**
	 * Check if all PageFetchers are done with their jobs
	 * @return True - Done; False - Some PageFetchers still working
	 */
	private boolean allKidsAreDone() {
		return workingKids <= 0;
	}
}
