%% @author Rob Evans <rob.g.evans@gmail.com>
%% @copyright 2008 ftw

%%  Retrieves and checks robots.txt file for Url to determine "crawlability"
-module(es_robots).
-export([checkrobots_txt/1,
		 test/0]).

%% Testing/debugging macros
-define(TEST(Call), io:format("~s: ~w~n", [??Call, Call])).
-define(DEBUGMODE, false).
-define(DEBUG(Stmt), if ?DEBUGMODE==true -> io:format("~s~n",[Stmt]); true -> ok end).

%% Pair = { string(), string() }
%% User-agent = string()
%% Rule = { string(), string() }


%%  checkrobots_txt(string() | binary()) -> bool()
%%  grabs robots.txt file and checks to determine if FileUrl can be crawled
checkrobots_txt(FileUrl) ->
	{_,Server,_Path,_File} = es_parse:splitUrl(FileUrl),
	put(robots_server,Server),
	
	Url = "http://"++Server++"/robots.txt",

	inets:start(),
	Response = (catch http:request(Url)),
	case Response of
		{ok,{{_,Code,_},_,Body}} ->
			if
				Code == 404 ->
					?DEBUG("{allow, 404}"),
					true;
				true ->
					parseRobots(Body, FileUrl)
			end;
		{'EXIT', _} ->
			?DEBUG("Error in HTTP Request"),
			true
	end.

%%  parseRobots(string(), string()) -> bool()
%%  given the text of robots.txt determine crawlability
parseRobots(Body, FileUrl) ->
	Lines=string:tokens(Body,"\n"),

	RemCom=fun(Line) ->
	Len=string:cspan(Line,"#"),
	if
		Len > 0 ->
			string:to_lower(string:substr(Line,1,Len));
		true ->
			[]
		end
	end,
	
	% remove comments
	RemainingLines=lists:map(RemCom,Lines),
	
	% remove empty lines
	KeyValLines=lists:filter(fun(Line) -> if length(Line) == 0 -> false; true -> true end end,RemainingLines),
	
	% map to {key,value} tuples
	Pairs=lists:map(fun(Line) -> list_to_tuple(string:tokens(Line,": ")) end,KeyValLines),
	
	GrpPerms = getGroups(Pairs),
	
	case lists:keytake("*",1,GrpPerms) of
		{value,{"*",Rules},_} ->
			evalRules(Rules, FileUrl);
		_ ->
			true
	end.

%%  getGroups([Pair]) -> [{User-agent, [Pair]}]
%%  given key-value pairs create a hierarchy with the user-agent keys
getGroups(Pairs) ->
	case Pairs of
		[First | Rest ] ->
			case First of
				{"user-agent",UAs} ->
					{Remaining, SubPerms} = getNonUALines(Rest,[]),
					[{UAs, SubPerms}]++getGroups(Remaining);
				_ ->
					?DEBUG("shouldn't be here... ever\n"),
					ok
			end;
		[] ->
			[]
	end.
%%  getNonUALines([Pair],[Pair])-> {[Pair],[Pair]}
%%  given a list of pairs, accumulates and returns the allow & disallow lines
getNonUALines(RemainingLines, Acc) ->
	case RemainingLines of
		[ First | Rest ] ->
			case First of
				{"user-agent",_UAs} ->
					{RemainingLines, Acc};
				{"allow",_} ->
					getNonUALines(Rest, Acc++[First]);
				{"disallow",_} ->
					getNonUALines(Rest, Acc++[First]);
				_ ->
					getNonUALines(Rest, Acc)
				end;
		[] ->
			{[],Acc}
	end.

%%  evalRules([Rule], string()) -> bool()
%%  given a list of rules and a url, checks if the url can be crawled
evalRules(Rules, FileUrl) ->
	MatchingRules=lists:filter(fun({_,Path}) -> lists:prefix("http://"++get(robots_server)++Path, FileUrl); ({_}) -> true end, Rules),
	
	LongerPath=fun({_,Path1},{_,Path2}) -> if (length(Path1) >= length(Path2)) -> true; true -> false end end,
	
	if
		length(MatchingRules) > 0 ->
			SortedRules=lists:sort(LongerPath,MatchingRules),
			[FirstRule|_Rest] = SortedRules,
			case FirstRule of
				{"allow",Path} ->
					?DEBUG("{allow, \""++Path++"\"}"),
					true;
				{"disallow",Path} ->
					?DEBUG("{disallow, \""++Path++"\"}"),
				false
			end;
		true ->
			?DEBUG("{allow, no_matches}"),
			true
	end.

%%%%%%%%%%% Testing functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

test_getGroups() ->
	Pairs = [{"user-agent","*"},
			 {"disallow"},
			 {"disallow","/path1/"},
			 {"user-agent","googlebot"},
			 {"allow","/thispath/"}],
	[{"*", [ {"disallow",""},
			 {"disallow","/path1/"}]},
	 {"googlebot", [{"allow","/thispath/"}]}]=getGroups(Pairs),
	 
	 ok.

test_getNonUALines() ->
	Lines = [{"allow","path"},{"disallow","path"},{"ignore","this"}],
	UA = {"user-agent","agent smith"},
	{[{"user-agent","agent smith"}], [{"allow","path"},{"disallow","path"}]}=getNonUALines(Lines++[UA],[]),
	{[], [{"allow","path"},{"disallow","path"}]}=getNonUALines(Lines,[]),
	
	ok.

test_evalRules() ->
	OldServer = put(robots_server, "www.google.com"),
	
	TestRules = [ {"allow", "/path/thats/longer/"}, {"disallow","/path/"} ],
	
	FileUrl0 = "http://www.google.com/path/thats/fail.html",
	false = evalRules(TestRules, FileUrl0),
	
	FileUrl1 = "http://www.google.com/path/thats/longer/win.html",
	true = evalRules(TestRules, FileUrl1),
	
	FileUrl2 = "http://www.google.com/",
	true = evalRules(TestRules, FileUrl2),
	
	put(robots_server,OldServer),
	
	ok.
	
test() ->
	?TEST(test_getGroups()),
	?TEST(test_getNonUALines()),
	?TEST(test_evalRules()),
	ok.