/*
 * Copyright 2013 Solace Systems, Inc.
 * 
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */

package com.solacesystems.tools.ant;

import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;

import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.FileSet;

/**
 * Ant task that makes use of the link checking crawler
 * @author swilson
 *
 */
public class CheckLinkTask extends Task {
	
	/**
	 * Stores references to a file
	 * @author swilson
	 *
	 */
	private static class Reference {
		public final URI file;
		public final Location[] references;
		
		public Reference(URI f, Location[] references) {
			this.file = f;
			this.references = references;
		}
	}
	
	private boolean m_FollowLinks = true;
	private boolean m_StripQueryAndFragment = false;
	private boolean m_FollowRemote = false;
	
	private final ArrayList<FileSet> m_FileSets = new ArrayList<FileSet>();

	@Override
	public void execute() throws BuildException {
		final List<Reference> missing = new ArrayList<Reference>();
		Crawler c = new Crawler(new Crawler.EventHandler() {
			@Override
			public void error(Location location, Exception ex) {
				String loc = location.toString();
				
				if (ex instanceof URISyntaxException) {
					logURISyntaxException(loc, (URISyntaxException) ex);
				} else {
					log("Unhandled Exception: " + loc, ex, Project.MSG_ERR);
				}
			}
			
			@Override
			public void missing(URI file, Location[] locations) {
				missing.add(new Reference(file, locations));
			}
			
			protected void logURISyntaxException(String location, URISyntaxException ex) {
				log("Malformed URI: " + location, Project.MSG_WARN);
			}
		});
		c.setFollowLinks(getFollowLinks());
		c.setFollowRemote(getFollowRemote());
		c.setStripQueryAndFragment(getStripQueryAndFragment());
		
		for (FileSet set : m_FileSets) {
			DirectoryScanner scanner = set.getDirectoryScanner();
			scanner.scan();
			
			String[] files = scanner.getIncludedFiles();
			log("Checking " + files.length + " files for broken links...");
			File base = set.getDir();
			for (String path : files) {
				File file = new File(base, path);
				c.addTarget(file.toURI());
			}
		}
		
		c.crawl();
		
		if (!missing.isEmpty()) {
			StringBuilder sb = new StringBuilder();
			
			for (Reference r : missing) {
				sb.append("Not found: ");
				sb.append(r.file.toString());
				sb.append('\n');
				
				for (Location location : r.references) {
					sb.append("\tReferenced by ");
					sb.append(location.toString());
					sb.append('\n');
				}
				sb.append('\n');
			}
			String msg = sb.toString();
			msg = msg.trim();
			throw new BuildException(msg);
		}
	}
	
	public void addFileSet(FileSet fs) {
		m_FileSets.add(fs);
	}
	
	/**
	 * If true the crawler will follow links found in the documents
	 * @param value Set to true to follow links in documents
	 */
	public void setFollowLinks(boolean value) {
		m_FollowLinks = value;
	}
	
	/**
	 * If true the crawler will follow links found in documents
	 * @return True if the crawler should follow links
	 */
	public boolean getFollowLinks() {
		return m_FollowLinks;
	}
	
	/**
	 * If true, the crawler will remove the query and fragment (hash, anchor)
	 * portions of URIs found in documents.
	 * @param value
	 */
	public void setStripQueryAndFragment(boolean value) {
		m_StripQueryAndFragment = value;
	}
	
	/**
	 * If true, the crawler will remove the query and fragment (hash, anchor)
	 * portions of URIs found in documents.
	 * @return True if the crawler will strip query and fragments from URIs
	 */
	public boolean getStripQueryAndFragment() {
		return m_StripQueryAndFragment;
	}

	/**
	 * If true the crawler will follow links that are on remote servers.
	 * 
	 * This is really just a simple test to see if the URIs have the file schema
	 * or not.
	 * @return True if the crawler will follow remote links
	 */
	public boolean getFollowRemote() {
		return m_FollowRemote;
	}

	/**
	 * If true the crawler will follow links that are on remote servers.
	 * 
	 * This is really just a simple test to see if the URIs have the file schema
	 * or not.
	 * @param followRemote Set to true to make the crawler follow remote links
	 */
	public void setFollowRemote(boolean followRemote) {
		this.m_FollowRemote = followRemote;
	}
}
