/*
 * Created on 2005-4-30
 *
 * Copyright: Bluejay Networks Inc. 2005
 *
 * This software is the proprietary information of Bluejay Networks Inc. 
 * 
 * Use is subject to license terms.
 * 
 */
package com.ims.iaa;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;

/**
 * @author ShengMengBai
 *
 * This class get a robots.txt from the root of each site,
 * and process it, you can call isAllow() to judge current
 * URL can't be access or not.
 */
public class RobotsTXTAnalyzer {
    //the static object which used to save the RobotsAnalyzer instance
    private HashMap m_hostMap = new HashMap();
    private HashMap m_outputMap = new HashMap();
    //the ims robot name
    private final static String ROBOTNAME = IaaDebug.m_robotName;
    private String m_sHost;
    private String m_sUserAgent;
    /**
     * because this class must connect to the site to get the robots.txt,
     * so the time maybe very long, so we can't make a static synchronized
     * method to get the instance. so must create a new instance for every
     * crawling site.
     */
    public RobotsTXTAnalyzer(URL url, String sUserAgent){
        m_sUserAgent = sUserAgent;
        m_sHost = url.getHost();
        //analyze the robots.txt
        ArrayList disallowLst = analyzeFile(url);
        //save it to the map.
        m_hostMap.put(m_sHost, disallowLst);
    };
    /**
     * Based on the current access site, judge IAA can access it or not
     * @param url, the URL of current access site
     * @return true, if IAA can access it, else return false.
     */
    public synchronized boolean isAllow(URL url){
        //based on the site, get the existed ArrayList.
        String sHost = url.getHost();
        ArrayList disallowLst = (ArrayList)m_hostMap.get(sHost);
        //if the instance is noexist, get new
        if(disallowLst == null){
            disallowLst = analyzeFile(url);
            //save it to the map.
            m_hostMap.put(sHost, disallowLst);
        }
        boolean bRet = isAllow(disallowLst, url);
        if(!bRet){
            //get the exist PrintStream
            PrintStream ps = (PrintStream)m_outputMap.get(sHost);
            //if unexist, create a new PrintStream
            if(ps == null){
                ps = createFile(url);
                //save it to the map
                m_outputMap.put(sHost, ps);
                //output some information
                String sOut = sHost + " Disallow: ";
                for(int i = 0; i < disallowLst.size(); i++){
                    sOut += disallowLst.get(i) + " ";
                }
                sOut += "\r\nDon't accessed file:\r\n";
                ps.println(sOut);;
            }
            //save the url.
            ps.println(url);
        }
        return bRet;
    }
    /**
     * Base on the robots.txt setting, judge current url can access or not.
     * @param sURL, the URL String which will be accessed.
     * @return true, if IAA can access it, else return false.
     */
    private static boolean isAllow(ArrayList disallowLst, URL url){
        String sURL = url.toString();
        for(int i = 0; i < disallowLst.size(); i++){
            String sMatch = (String)disallowLst.get(i);
            if(sURL.indexOf(sMatch) != -1){
                return false;
            }
        }
        return true;
    }
    
    private ArrayList analyzeFile(URL url){
        ArrayList disallowLst = new ArrayList();
        try {
            //get the robots.txt
            URL robotFile = URLUtil.getURL(url, "\\robots.txt");
            URLConnection uc = robotFile.openConnection();
            uc.setRequestProperty("User-Agent", m_sUserAgent);
            uc.connect();
            BufferedReader reader = new BufferedReader(new InputStreamReader(uc.getInputStream()));
            //analyze the robots.txt
            analyzeRobots(reader, disallowLst);
            reader.close();
        } catch (MalformedURLException e) {
        } catch (IOException e) {
        } catch (Exception e) {
            IaaUtil.printStackTrace(e);
            IaaUtil.log("Can not get robots.txt from: " + url.toString());
        }
        return disallowLst;
    }
    private void analyzeRobots(BufferedReader reader, ArrayList disallowLst) throws IOException{
        String sLine = reader.readLine();
        String sCurrentAgent = null;
        while(sLine != null){
            sLine = sLine.trim();
            //skip blank line
            if(sLine.length() == 0){
                sLine = reader.readLine();
                continue;
            }
            //skip comment
            if(sLine.charAt(0) == '#'){
                sLine = reader.readLine();
                continue;
            }
            //get rid of the comment
            int iCommentStart = sLine.indexOf(" #");
            if(iCommentStart != -1){
                sLine = sLine.substring(0, iCommentStart).trim();
            }
            //split the string with ":"
            int iIdx = sLine.indexOf(':');
            if(iIdx != -1){
                String sField = sLine.substring(0, iIdx).toLowerCase().trim();
                String sValue = sLine.substring(iIdx + 1).trim();
                if(sField.equals("user-agent")){
                    //if current user-agent is not IAA specified agent, 
                    //set sCurrentAgent as null, and skip it while process Disallow field
                    if(sValue.equals("*") || sValue.equals(ROBOTNAME)){
                        sCurrentAgent = ROBOTNAME;
                    }else{
                        sCurrentAgent = null;
                    }
                }else if(sField.equals("disallow")){
                    //if it is not IAA agent, skip it.
                    if(sCurrentAgent != null){
                        disallowLst.add(sValue);
                    }
                }
            }
            sLine = reader.readLine();
        }
    }
	 
 	private PrintStream createFile(URL url) {
		//base on user setting, make full path name.
 	    String sPath = IaaDebug.LogDirectory;
 	    String sFileName = null;
 	    PrintStream out = null;
		if (!IaaDebug.LogDirectory.equals("")) {
		    File path = null;
 			try  {
 			    //check log directory
 			 	path = new File(sPath);
 				if (!path.exists())	path.mkdirs();
 			    //check robots directory
 				sPath += "\\ObeyRobotsTXT";
 			 	path = new File(sPath);
 				if (!path.exists())	path.mkdirs();
 			    //check host directory
 				sPath += "\\" + m_sHost;
 			 	path = new File(sPath);
 				if (!path.exists())	path.mkdirs();
 				//get file name
 				sFileName = sPath + "\\" + url.getHost() + ".txt";
 				out = new PrintStream( new FileOutputStream(sFileName));
		 	} catch (Exception e)  {
 				IaaUtil.log("There is a error while create a file to output obey robots.txt, error: " + e);			
	 		}		
		}
		if(out == null){
			IaaUtil.log("Warning, Can not create file to save, output to screen.");			
		    out = System.out;
		}
		return out;
 	}
    public String toString(){
        String sRet = "";
        Iterator itr = m_hostMap.keySet().iterator();
        while(itr.hasNext()){
            String sKey = (String)itr.next();
            List lst = (List)m_hostMap.get(sKey);
            sRet += sKey + ": ";
            if(lst.size() == 0){
                sRet += "(unlimited)";
            }else{
                for(int i = 0; i < lst.size(); i++){
                    sRet += lst.get(i) + " ";
                }
            }
            sRet += "\r\n";
        }
        return sRet;
    }
    public static void main(String[] args) {
        URL url;
        try {
            url = URLUtil.getURL("http://www.google.com/catalogs/test");
            RobotsTXTAnalyzer ana = new RobotsTXTAnalyzer(url, "Mozilla/4.0 (compatible; MSIE 5.0; Windows NT 4.0)");
            url = URLUtil.getURL("http://pagead2.googlesyndication.com/pagead/iclk?sa=l&amp;ai=Bn5ZqVeNyQsDmAaGS4AG-lu29CryM9AKQ070KwI23AdD3BxACGAIg9MaHAigEQOwQSIc5sgEUd3d3LmZhY2lsaXR5Y2l0eS5jb226AQoxMjB4NjAwX2FzyAEB2gEcaHR0cDovL3d3dy5mYWNpbGl0eWNpdHkuY29tL-gBAQ&amp;num=2&amp;adurl=http://www.jrosspub.com/Engine/Shopping/affiliate/referral.asp?store=12&amp;test=test&amp;category=181&amp;key=google5&amp;client=ca-pub-6173332886728133");
            System.out.println(url);
            System.out.println(ana.isAllow(url));
            url = URLUtil.getURL("http://www.google.com/catalogs/test");
            System.out.println(url);
            System.out.println(ana.isAllow(url));
            url = URLUtil.getURL("http://www.businessfacilities.com/images/bflive_logo.gif");
            System.out.println(url);
            System.out.println(ana.isAllow(url));
            
            System.out.println(ana.toString());
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
