package net.Stratus.ImageDumper.Sites;

import java.io.File;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import net.Stratus.ImageDumper.Job;
import net.Stratus.ImageDumper.Logger;
import net.Stratus.ImageDumper.Site;
import net.Stratus.ImageDumper.Dumper.Internet;

/**
 * A class for dumping a thread from 4chan
 * 
 * @author Stratus
 */
public class Chan4 extends Site {
	
	private static final boolean CAN_COUNT = true;
	private static final String IDENTIFIER = "4chan.org thread";
	// for harvesting photo
	private static final String PHOTO_KEY_FIRST = "File : <a href=\"http://images.4chan.org/";
	private static final String PHOTO_KEY = "File<a href=\"http://images.4chan.org/";
	private static final String PHOTO_KEY_END = "\"";
	private static final String PHOTO_FRAGMENT = "http://images.4chan.org/";
	private static final String VALIDATION_REGEX = "http://boards.4chan.org/[A-Za-z]+/res/[0-9#]+";
	// http://boards.4chan.org/b/res/377582848
	
	private Job job;
	private String threadUrl;
	private ArrayList<String> photoUrls;
	
	/**
	 * Creates a new 4chan dumper
	 * 
	 * @param job job to use
	 */
	public Chan4(Job job) {
		this.job = job;
		job.setSite(this, IDENTIFIER, CAN_COUNT);
		photoUrls = new ArrayList<String>();
	}
	

	@Override
	public void dump() {
		initDump(job);
		if(ioh.isLoaded()) {
			Logger.logln(job.getID(), "IO Handler loaded!");
			// get thread url
			String url = job.getUrl();
			Pattern p = Pattern.compile(VALIDATION_REGEX);
			Matcher m = p.matcher(url);
			if(m.find()) {
				threadUrl = url.substring(m.start(), m.end());
				Logger.logln(job.getID(), "URL of Thread: "+threadUrl);
				harvestThread();
				harvestPhotos();
				Logger.logln(job.getID(), "Done");
			}
			//TODO: handle failed jobs/errors
		}
	}
	
	/**
	 * Download all pictures
	 */
	private void harvestPhotos() {
		// for each pic
		for(int i = 0; i < photoUrls.size(); i++) {
			// get ext
			String src = photoUrls.get(i);
			String ext = src.substring(src.lastIndexOf(".")+1);
			File file = new File(job.getDirectory()+i+"."+ext);
			// download
			if(Internet.urlToFile(src, file)) job.addDownloaded();
		}
	}
	
	/**
	 * Harvests all of the photos in a Thread
	 */
	private void harvestThread() {
		Logger.log(job.getID(), "Harvesting Thread... ");		
		String resp = Internet.httpRequest(threadUrl);
		int harvested = 0;
		int last = 0;
		
		int start = resp.indexOf(PHOTO_KEY_FIRST);
		start += PHOTO_KEY_FIRST.length();		
		photoUrls.add(PHOTO_FRAGMENT+resp.substring(start, resp.indexOf(PHOTO_KEY_END, start)));
		harvested++;	
		
		while((last = resp.indexOf(PHOTO_KEY, last)) != -1) {
			last += PHOTO_KEY.length();
			int end = resp.indexOf(PHOTO_KEY_END, last);
			
			try {
				// build the url from fragment
				String url = PHOTO_FRAGMENT+resp.substring(last, end);
				photoUrls.add(url);
				harvested++;
			} catch(Exception e) {}
		}
		job.setTotal(harvested);
		Logger.logln(job.getID(), harvested+" Harvested");
	}
	
	
	/**
	 * Uses validation RegEx to check if url is valid
	 * 
	 * @return url validity
	 */
	public static boolean isValidURL(String url) {
		return validateURL(VALIDATION_REGEX, url);
	}
}
