/*
 * Copyright 2013 Solace Systems, Inc.
 * 
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */

package com.solacesystems.tools.ant;

import static org.junit.Assert.*;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandler;
import java.net.URLStreamHandlerFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import org.junit.Test;
import org.xml.sax.Attributes;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;

import com.solacesystems.tools.ant.LinkFinderTests.Box;

public class CrawlerTests {

	/**
	 * Really dumb URL handler that always returns the same content
	 * @author swilson
	 *
	 */
	private static final class TestURLHandler extends URLStreamHandler {		
		@Override
		protected URLConnection openConnection(URL u) throws IOException {
			return new URLConnection(u) {
				private final InputStream m_InputStream = new ByteArrayInputStream("<html></html>".getBytes());
				@Override
				public void connect() throws IOException {
					if (this.url.toString().contains("missing")) {
						throw new IOException();
					}
					this.connected = true;
				}
				
				@Override
				public InputStream getInputStream() throws IOException {
					if (this.url.toString().contains("missing")) {
						throw new IOException();
					} else if (this.url.toString().contains("invalid")) {
						throw new RuntimeException();
					}
					return m_InputStream;
				}
				
				@Override
				public String getContentType() {
					if (this.url.toString().endsWith("html")) {
						return "text/html";
					} else {
						return "application/octet-stream";
					}
				}
			};
		}
		
	}
	
	private static final class TestStreamHandlerFactory implements URLStreamHandlerFactory {
		@Override
		public URLStreamHandler createURLStreamHandler(String protocol) {
			assertEquals("protocol must be `test`", "test", protocol);
			return new TestURLHandler();
		}
		
	}
	
	private static class MockLinkFinder extends LinkFinder {
		private LinkEventListener m_Listener = null;
		private Locator m_Locator = null;
		private final Map<String, Collection<URI>> m_Links;
		private final Set<String> m_Visited = new HashSet<String>();
	
		public MockLinkFinder(Map<String, Collection<URI>> links) {
			m_Links = links;
		}
		
		@Override
		public void addLinkEventListener(LinkEventListener listener) {
			if (m_Listener != null) {
				throw new UnsupportedOperationException("multiple listeners not supported in mock object");
			}
			m_Listener = listener;
		}

		@Override
		public void setLinkAttributeNames(String[] names) {
			throw new UnsupportedOperationException("not supported in mock object");
		}

		@Override
		public String[] getLinkAttributeNames() {
			return super.getLinkAttributeNames();
		}

		@Override
		public void startDocument() throws SAXException {
			// Do nothing
		}

		@Override
		public void setDocumentLocator(Locator locator) {
			m_Locator = locator;
			
			assertTrue("visiting non-existant link:" + locator.getSystemId(), m_Links.containsKey(locator.getSystemId()));
			assertTrue("visited same link twice", m_Visited.add(locator.getSystemId()));
		}

		@Override
		public Locator getCurrentLocator() {
			return m_Locator;
		}

		@Override
		public URI getDocumentURI() throws URISyntaxException {
			return new URI(m_Locator.getSystemId());
		}

		@Override
		public void startElement(String uri, String localName, String qName,
				Attributes attributes) throws SAXException {
			Collection<URI> links = m_Links.get(m_Locator.getSystemId());
			int line = 0;
			for (URI link : links) {
				line ++;
				m_Listener.linkFound(new Location(m_Locator.getSystemId(), line, 1), link);
			}
		}

		@Override
		public void reset() {
			m_Locator = null;
		}

		@Override
		public void removeLinkEventListener(LinkEventListener listener) {
			assertEquals("removing unknown listener", m_Listener, listener);
			m_Listener = null;
		}
	}
	
	static {
		// Add our URL handler for 
		URL.setURLStreamHandlerFactory(new TestStreamHandlerFactory());
	}
	
	@Test
	public void testCrawlerTarget() throws URISyntaxException {
		Crawler.Target target = new Crawler.Target(TestConst.KNOWN_LOCATION, new URI(TestConst.GOOD_URI_STR));
		assertSame(TestConst.KNOWN_LOCATION, target.getLocation());
		assertEquals(TestConst.GOOD_URI_STR, target.getURI().toString());
	}
	
	@Test
	public void testCrawlerTargetEquals() throws URISyntaxException {
		URI uri = new URI(TestConst.GOOD_URI_STR);
		Crawler.Target a = new Crawler.Target(TestConst.KNOWN_LOCATION, uri);
		Crawler.Target b = new Crawler.Target(TestConst.KNOWN_LOCATION, uri);
		
		assertEquals(a, a);
		assertEquals(a, b);
		assertEquals(b, a);
	}
	
	@Test
	public void testCrawlerTargetEquals2() throws URISyntaxException {
		URI uri = new URI(TestConst.GOOD_URI_STR);
		URI uri2 = new URI(TestConst.GOOD_URI_STR2);
		Crawler.Target a = new Crawler.Target(TestConst.KNOWN_LOCATION, uri);
		Crawler.Target b = new Crawler.Target(TestConst.KNOWN_LOCATION, uri2);
		
		assertFalse(a.equals(b));
		assertFalse(b.equals(a));
	}
	
	@Test
	public void testCrawlerTargetEquals3() throws URISyntaxException {
		URI uri = new URI(TestConst.GOOD_URI_STR);
		Crawler.Target a = new Crawler.Target(TestConst.KNOWN_LOCATION, uri);
		Object b = new Object();
		
		assertFalse(a.equals(b));
		assertFalse(b.equals(a));
	}
	
	@Test
	public void testCrawlerTargetHashCode() throws URISyntaxException {
		URI uri = new URI(TestConst.GOOD_URI_STR);
		Crawler.Target a = new Crawler.Target(TestConst.KNOWN_LOCATION, uri);
		Crawler.Target b = new Crawler.Target(TestConst.KNOWN_LOCATION, uri);
		
		assertEquals(a.hashCode(), b.hashCode());
	}
	
	@Test
	public void testCrawlerTargetCompareTo() throws URISyntaxException {
		URI uri = new URI(TestConst.GOOD_URI_STR);
		URI uri2 = new URI(TestConst.GOOD_URI_STR);
		Crawler.Target a = new Crawler.Target(TestConst.KNOWN_LOCATION, uri);
		Crawler.Target b = new Crawler.Target(TestConst.KNOWN_LOCATION, uri2);
		
		assertEquals(a.compareTo(b), uri.compareTo(uri2));
		assertEquals(b.compareTo(a), uri2.compareTo(uri));
	}
	
	@Test
	public void testCrawlerTargetCompareTo2() throws URISyntaxException {
		URI uri = new URI("a");
		URI uri2 = new URI("b");
		Crawler.Target a = new Crawler.Target(TestConst.KNOWN_LOCATION, uri);
		Crawler.Target b = new Crawler.Target(TestConst.KNOWN_LOCATION, uri2);
		
		assertEquals(a.compareTo(b), uri.compareTo(uri2));
		assertEquals(b.compareTo(a), uri2.compareTo(uri));
	}
	
	@Test
	public void testCrawler() {
		Crawler c = new Crawler();
		assertNotNull(c);
	}

	@Test
	public void testCrawlerEventHandler() {
		Crawler c = new Crawler() {};
		assertNotNull(c);
	}

	@Test
	public void testAddTargetURI() throws URISyntaxException {
		Crawler c = new Crawler();
		URI uri = new URI(TestConst.GOOD_URI_STR);
		c.addTarget(uri);
		assertEquals(uri, c.nextTarget().getURI());
	}

	@Test
	public void testAddTargetString() throws URISyntaxException {
		Crawler c = new Crawler();
		c.addTarget(TestConst.GOOD_URI_STR);
		assertEquals(TestConst.GOOD_URI_STR, c.nextTarget().getURI().toString());
	}

	@Test
	public void testCrawlFollow() throws MalformedURLException {
		/*
		 * This test relies on some internal implementation details of
		 * Crawler.  Specifically, it relies on Crawler's usage of
		 * URL.openConnection().
		 * 
		 * We add a custom URL handler for the 'test' protocol, which
		 * always returns an empty html element.  The mock link finder
		 * will return the proper links for each page.
		 */
	
		Crawler.EventHandler handler = new Crawler.EventHandler() {
			private boolean m_Missing = false;
			@Override
			public void error(Location location, Exception ex) {
				fail("unexpected error: " + ex.toString());
			}
			
			@Override
			public void missing(URI file, Location[] locations) {
				assertFalse("too many missing files", m_Missing);
				assertTrue("wrong missing file", file.toString().contains("missing.html"));
				m_Missing = true;
			}
		};
		
		HashMap<String, Collection<URI>> links = new HashMap<String, Collection<URI>>();
		
		links.put("test://first.html", Arrays.asList(new URI[] {
				URI.create("test://second.html"),
				URI.create("test://third.html"),
				URI.create("test://missing.html"),
				URI.create("test://image.png"),
		}));
		
		links.put("test://second.html", Arrays.asList(new URI[] {
				URI.create("test://first.html"),
				URI.create("test://fourth.html"),
				URI.create("test://missing.html"),
		}));
		
		links.put("test://third.html", Arrays.asList(new URI[] {}));
		links.put("test://fourth.html", Arrays.asList(new URI[] {}));
		
		Crawler c = new Crawler(handler, new MockLinkFinder(links));
		c.addRemoteScheme("test");
		c.setFollowLinks(true);
		c.setFollowRemote(true);
		c.addTarget(URI.create("test://first.html"));
		c.crawl();
	}
	
	@Test
	public void testCrawlNoFollow() throws MalformedURLException {
		/*
		 * This test relies on some internal implementation details of
		 * Crawler.  Specifically, it relies on Crawler's usage of
		 * URL.openConnection().
		 * 
		 * We add a custom URL handler for the 'test' protocol, which
		 * always returns an empty html element.  The mock link finder
		 * will return the proper links for each page.
		 */
	
		Crawler.EventHandler handler = new Crawler.EventHandler() {
			@Override
			public void error(Location location, Exception ex) {
				fail("unexpected error: " + ex.toString());
			}
			
			@Override
			public void missing(URI file, Location[] locations) {
				fail("no missing files should be found");
			}
		};
		
		HashMap<String, Collection<URI>> links = new HashMap<String, Collection<URI>>();
		
		links.put("test://first.html", Arrays.asList(new URI[] {
				URI.create("test://missing.html"),
		}));
		
		Crawler c = new Crawler(handler, new MockLinkFinder(links));
		c.addRemoteScheme("test");
		c.setFollowLinks(false);
		c.setFollowRemote(true);
		c.addTarget(URI.create("test://first.html"));
		c.crawl();
	}
	
	@Test
	public void testCrawlError() throws MalformedURLException {
		/*
		 * This test relies on some internal implementation details of
		 * Crawler.  Specifically, it relies on Crawler's usage of
		 * URL.openConnection().
		 * 
		 * We add a custom URL handler for the 'test' protocol, which
		 * always returns an empty html element.  The mock link finder
		 * will return the proper links for each page.
		 */
	
		final Box<Boolean> failed = new Box<Boolean>(Boolean.FALSE);
		Crawler.EventHandler handler = new Crawler.EventHandler() {
			@Override
			public void error(Location location, Exception ex) {
				failed.value = Boolean.TRUE;
			}
			
			@Override
			public void missing(URI file, Location[] locations) {
				fail("no missing files should be found");
			}
		};
		
		HashMap<String, Collection<URI>> links = new HashMap<String, Collection<URI>>();
		
		links.put("test://invalid.html", Arrays.asList(new URI[] {}));
		
		Crawler c = new Crawler(handler, new MockLinkFinder(links));
		c.addRemoteScheme("test");
		c.setFollowLinks(false);
		c.setFollowRemote(true);
		c.addTarget(URI.create("test://invalid.html"));
		c.crawl();
		
		assertTrue(failed.value.booleanValue());
	}

	@Test
	public void testSetGetFollowLinks() {
		Crawler c = new Crawler();
		
		c.setFollowLinks(true);
		assertTrue(c.getFollowLinks());
		
		c.setFollowLinks(false);
		assertFalse(c.getFollowLinks());
	}

	@Test
	public void testSetGetFollowRemote() {
		Crawler c = new Crawler();
		
		c.setFollowRemote(true);
		assertTrue(c.getFollowRemote());
		
		c.setFollowRemote(false);
		assertFalse(c.getFollowRemote());
	}

	@Test
	public void testSetGetStripQueryAndFragment() {
		Crawler c = new Crawler();
		
		c.setStripQueryAndFragment(true);
		assertTrue(c.getStripQueryAndFragment());
		
		c.setStripQueryAndFragment(false);
		assertFalse(c.getStripQueryAndFragment());
	}
}
