package com.eric.utils;

import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.jsoup.nodes.Element;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.eric.downloader.SeoDownloader;
import com.eric.seo.SeoPage;

public class RobotsTxtParser {
	
	HashMap<String, JSONArray> robotsRulesMap = new HashMap<>();
	
	HashMap<String,String> robotsResultMap = new HashMap<>();	
	
	SeoDownloader sd;

	public static void main(String[] args) throws MalformedURLException, IOException {
		String s = "http://www.jmall360.com/robots.txt";
		
		RobotsTxtParser parser = new RobotsTxtParser();
		
		System.out.println(parser.siteMapUrl(s));

//		parser.parseAllLinks(sd.createSeoPage(s));
		
	}

	//Constructor
	public RobotsTxtParser() {
		
	}
	
	//Main Function,Execute All Links
	public void parseAllLinks (SeoPage page) {
		for (Element e : page.linkElements()) {
			String a = e.attr("href");
			if (a.startsWith("http")) probe(a,page);
		}
	}
	
	public String siteMapUrl (String robotsUrl) {
		String siteMapUrl = null;
		sd = new SeoDownloader();
		if (checkRules(robotsUrl)) {
			try {
				siteMapUrl = ((JSONObject)robotsRulesMap.get(new URL(robotsUrl).getHost()).get(0)).getString("Sitemap");
			} catch (MalformedURLException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}else {
			try {
				robotsRulesMap = sd.downloadRobotsRules(robotsUrl,robotsRulesMap);
			} catch (MalformedURLException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			try {
				siteMapUrl = ((JSONObject)robotsRulesMap.get(new URL(robotsUrl).getHost()).get(0)).getString("Sitemap");
			} catch (MalformedURLException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		return siteMapUrl;
		
	}
	
	public HashMap<String,String> getResult () {
		return robotsResultMap;
	}
	
	public HashMap<String, JSONArray> getRulesMap () {
		return robotsRulesMap;
	}
	
	//Probe a single link against robots
	public void probe (String url, SeoPage page) {
		sd = new SeoDownloader();
		if (! UrlUtils.isExternal(page.doc().baseUri(), url) ) {
			if ( checkRules(url)) {
				if (isDisallowed(url)) {
					//System.out.println("Disallowed: " + url);
					robotsResultMap.put(url,"Disallowed");
				} else {
					//System.out.println("SEO: " + url);
					robotsResultMap.put(url,"SEO");
				}
			}else{
				try {
					robotsRulesMap = sd.downloadRobotsRules(url,robotsRulesMap);
					probe (url , page);
				} catch (MalformedURLException e) {
					e.printStackTrace();
				}
			}
		}else{
			//System.out.println("External: " + url);
		}
	}
	
	//Check If Robots Rules Is Already Stored in robotsRulesMap
	public boolean checkRules (String strUrl) {
		try {
			URL url = new URL(strUrl);
			if (robotsRulesMap.get(url.getHost()) == null) return false;
		}catch (MalformedURLException e) {
			e.printStackTrace();
		}
		return true;
	}

	public boolean isDisallowed(String strUrl) {
		boolean t = false;

		try {
			URL url = new URL(strUrl);
			
			JSONArray ja = new JSONArray();
			for (int i=0;i<robotsRulesMap.get(url.getHost()).size();i++){
				if (((JSONObject)robotsRulesMap.get(url.getHost()).get(i)).get("User-agent").equals("*")) {
					ja = (JSONArray) ((JSONObject)robotsRulesMap.get(url.getHost()).get(i)).get("Disallow");
				}
			}
			
			for (int i=0;i<ja.size();i++) {	
				String b = ja.get(i).toString().replaceAll("\\?", "\\\\?").replaceAll("\\*", ".*?");
				
				Pattern p = Pattern.compile(b);
				Matcher m = p.matcher(url.getFile());
				if (m.find()) {
					t = true;
				}
			}
		} catch (MalformedURLException e) {
			e.printStackTrace();
		}
		return t;
	}

}
