/**
 * FileURLFetcher.java
 * 
 * FreeZzaph is free software; you can redistribute it
 * and/or modify it under the terms of the GNU General Public License as
 * published by the Free Software Foundation; either version 3 of
 * the License, or (at your option) any later version.
 *
 * FreeZzaph is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; see the file COPYING.
 */
package freezzaph;

import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;

import freezzaph.exceptions.URLFetchException;
import freezzaph.plugins.URLFetcher;

/**
 * Class with a method that takes a URL to a (X)HTML page and
 * returns the URLs to files that match the filter provided in
 * the constructor of the class when created.
 *   
 * @author FreeZzaph
 */
final class FileURLFetcher extends URLFetcher {
	
	private Pattern pattern;
	
	/**
	 * Creates a new FileURLFetcher with the given filter.
	 * 
	 * @param filter regular expression that lists file extensions
	 * that this class should look for. The filter is only to contain
	 * the file extensions, such as "(tar.)?(g?z|bz2?)" to fetch
	 * archive types commonly used on Un*x systems.
	 * @throws PatternSyntaxException if the filter given was an 
	 * invalid regular expression
	 */
	public FileURLFetcher(String filter) throws PatternSyntaxException {
		pattern = Pattern.compile("<a.+?href=\"?([^\"> ]+?\\.(?:" + filter + "))\"?[^>]*?>(.+?)</a>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
	}
	
	/**
	 * A method that takes a URL to a (X)HTML page and
	 * returns the URLs to files that match the filter provided in
	 * the constructor of the class when created.
	 * 
	 * @param url the URL to look for files in
	 * @return the URLs to the files found
	 * @throws URLFetchException if there is a
	 * problem fetching URLs from the given URL.
	 */
	public URL[] getURLsFrom(URL url) throws URLFetchException {
		// Download the page
		String data = fetch(url);
		ArrayList<URL> urlList = new ArrayList<URL>();
		
		// Look for matching file links
		Matcher m = pattern.matcher(data);
		while (m.find()) {
			try {
				URL dataurl = new URL(url, m.group(1));
				urlList.add(dataurl);
			} catch (MalformedURLException e) {
				// Malformed URL, ignore
			}
		}
		
		// Return the array of URLs found
		return urlList.toArray(new URL[0]);
	}

}
