# Class for Search engine and server response analysis

=begin
Copyright (C) 2014 Enrique Rando

This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.

http://www.gnu.org/licenses/gpl-2.0.html
=end


require File.expand_path(File.dirname(__FILE__) + '/My_HTTP_client.rb')
require File.expand_path(File.dirname(__FILE__) + '/Report_generator.rb')

class BlackSEO_detector
	# Object creation
	#	Parameters:
	#		search_engines	Hash of Search_engine_name => Search_engine_object         Search_engine_object s will be used for making searches
	#		user_agent		User-Agente to use in requests
	#		proxy			Proxy name or address. Use nil for no proxy.
	#						By default, Ruby 2.1 uses HTTP_PROXY environment variable to set the proxy
	#		proxy_port		Proxy port
	#		proxy_user		Proxy username, if required
	#		proxy_password	Proxy password, if required.
	#		verbose			Whether to show messages about the process
	def initialize(search_engines, user_agent="BlackSEO-detector", proxy=:ENV, proxy_port=nil, proxy_user=nil, proxy_password=nil, verbose=true)
		@verbose = verbose
		@client = My_HTTP_client.new(user_agent, proxy, proxy_port, proxy_user, proxy_password)
		@user_agent = user_agent
		@proxy = proxy
		@proxy_port = proxy_port
		@proxy_user = proxy_user
		@proxy_password = proxy_password
		@search_engines = search_engines
	end

	
	# Searches using the engines available and tests for potential problems
	# Parameters
	#	queries		queries to submit to search engines. Hash search_engine => query
	#	text		text pointing to problems. String or string array
	#	report_generator	object to generate report
	#	report_name		name for the report
	#	hard_check	keep checking even if a problem was already found
	#	max_redirects	Maximum number of allowed redirections to follow
	#	valid_domains	domains to show in report and allow redirection to
	#	ignore_files	array of regexp for files to skip
	#	max_report, max_host, max_folder 	Max number of results to investigate
	def investigate(queries, text, report_generator, report_name='Test', hard_check=false, 
			max_redirects=5, valid_domains=[], ignore_files=[], 
			max_report=20, max_host=12, max_folder=5)
		# Retrieve results from search engines
		print "Making search engine queries\n" if @verbose
		results = request_all(queries)
		urls = results.keys.sort

		# Initial state
		state = report_state
		
		# Start generating report
		report_generator.begin_test(report_name)
				
		# Process results
		urls.each do |url_to_process|
			url = My_HTTP_client.repair_url(url_to_process)
		
			# Skip non valid domains without including them in the report
			unless url_in_valid_domain(url, valid_domains)
				print "Ignoring URL #{url}. Domain outside of report\n" if @verbose
				next
			end	

			print "Processing URL #{url}\n" if @verbose		

			bsd_result = results[url_to_process]
			
			nothing_new = true
		
			#Update state
			state = report_state(state, url, report_generator)
			
			# Decide whether or not documents must be downloaded
			download = true
			reason = nil
			if state[:problems_in_report] >= max_report
				download = false
				reason = :max_problems_report
			end
			
			if state[:problems_in_host] >= max_host
				download = false
				reason = :max_problems_host
			end
			
			if state[:problems_in_folder] >= max_folder
				download = false
				reason = :max_problems_folder
			end
			
			if not state[:changed] and state[:current_file_detection]
				download = false
				reason = :problems_already_detected
			end
			
			# Skip non supported protocols
			url_scheme = URI.parse(url).scheme
			unless ["http", "https"].include?(url_scheme)
				download = false
				reason = {
					:reason => :analysis_omitted_protocol, 
					:params => {:protocol =>url_scheme}
				}
			end
			
			# Skip PDF, DOC, etc.
			document_type = nil
			bsd_result.each do |s_e, data_result|
				unless data_result[:document_type].nil?
					document_type = data_result[:document_type].to_s.upcase
					break
				end
			end
			unless [nil, "TXT", "TEXT"].include?(document_type)
				download = false
				reason = {
					:reason => :analysis_omitted_type, 
					:params => {:document_type =>document_type}
				}
			end
			


			# Skip 'to ignore' files
			ignore_file = false
			regexp = ""
			ignore_files.each do |re|
				if re.match(url)
					ignore_file = true
					regexp = re.source
					break
				end
			end
			if ignore_file
				report_generator.not_tested(url, bsd_result, "[[analysis_omitted_regexp]] #{regexp}")
				next
			end
			
			# Analise result
			analysis = investigate_result(url, bsd_result, text, hard_check, valid_domains, max_redirects, download, reason)
			
			# Determine whether there is a problem
			problem_in_analysis = false
			analysis.each do |name, data|
				if data[:problem_found]
					problem_in_analysis = true
					break
				end
			end		
			
			# Add info to report and keep counting
			if problem_in_analysis
				report_generator.problem(url, bsd_result, analysis, text)
				state[:problems_in_folder] += 1
				state[:problems_in_host] += 1
				state[:problems_in_report] += 1
				state[:current_file_detection] = true
			else
				report_generator.maybe_ok(url, bsd_result)
			end	
		end
		
		# End the report
		report_generator.end_test
		
		print "Analysis completed\n" if @verbose
		
		# Return number of problems found
		return state[:problems_in_report]
	end


private
	# Checks if an URL belongs to a domain
	# Parameters
	#	url		The URL
	#	domain	The domain
	def url_in_valid_domain(url, valid_domains)
		valid_domains.each do |valid_domain|
			return true if url_in_domain(url, valid_domain)
		end
		
		return false
	end
	
	# Checks if a URL belongs to a domain
	def url_in_domain(url, domain)
		# Get URL host
		uri = URI.parse(url.to_s)
		return false if uri.nil?
		return false if uri.host.nil?
		host = uri.host
		
		# Check if host is in domain
		return true if host == domain
		return true if host.end_with?(".#{domain}")

		return false
	end
		
	# Checks if a URL is in the same domain that another one
	def url_in_same_domain(url, compare_with)	
		# Get compare_with host
		uri = URI.parse(compare_with.to_s)
		return false if uri.nil?
		return false if uri.host.nil?
		host = uri.host

		# Try compare_with host as domain
		return true if url_in_domain(url, host)
		
		# Try removing first component of compare_with
		return false if host.count(".") < 2
		domain = host.sub(/^.*?\./,"")
		return true if url_in_domain(url, domain)
		
		return false
	end
	
	# Manages report_state
	# Parameters
	#	state	The previous state
	#	url		The url	empty for inital state
	#	report_generator 	The report generator object
	def report_state(state=nil, url=nil, report_generator=nil)
		return {
			:server => nil, :folder => nil, :file => nil,
			:problems_in_report=>0, :problems_in_host=>0, :problems_in_folder=>0,
			:current_file_detection=>false,
			:changed=>false
		} if url.nil?
		
		# Parse URL
		uri = URI.parse(url)

		#URL host
		server = uri.scheme + "://" + uri.host
		if uri.port
			server += (":" + uri.port.to_s)
		end

		# URL path
		filepath = uri.path
		if filepath.nil? or filepath == ""
			filepath = "/"
		end
		
		# URL folder and file
		match = filepath.match(/^(?<folder>.*\/)(?<file>[^\/]*)$/)		
		folder = match[:folder]
		file = match[:file]
		
		
		# Process change of host, folder and file
		state[:changed] = false
		
		if server != state[:server]     #New server
			# Close host section if needed and open new one
			report_generator.end_host unless state[:server].nil?
			report_generator.begin_host(server)
			
			state[:server] = server
			state[:problems_in_host] = 0
			state[:folder] = nil
			state[:file] = nil
			
			state[:changed] = true			
		end

		if folder != state[:folder] #New folder
			report_generator.end_folder unless state[:folder].nil?
			report_generator.begin_folder(folder)
			
			state[:folder] = folder
			state[:problems_in_folder] = 0
			state[:file] = nil
			
			state[:changed] = true						
		end
			
		if file != state[:file]		#New file
			report_generator.end_file unless state[:file].nil?
			report_generator.begin_file(file)
			
			state[:file] = file
			state[:current_file_detection] = false
			
			state[:changed] = true
		end
		
		return state
	end
	

	# Retrieves results from all search engines
	# Parameter queries		queries to submit to search engines. Array of hashes {search_engine => query}
	def request_all(queries)
		# Data to return
		all_results = {}

		# Process each query
		queries.each do |query_data|
			query_data.each do |s_e_name, query|
				print "   #{s_e_name}: #{query}\n" if @verbose
				
				# Get Search Engine for the request
				s_e = @search_engines[s_e_name]
				raise("Invalid Search Engine Name: #{s_e_name}") if s_e.nil?
				
				# Make the search engine work
				results = s_e.request_all(query)
				print "      #{results.count} results\n" if @verbose
				
				results.each do |result|
					url = result[:url]
					
					# Add each result to output
					if not all_results.has_key?(url)
						all_results[url] = {s_e_name => result}
					else
						all_results[url][s_e_name] = result
					end			
				end
			end
		end
		
		return all_results
	end



	# Investigate a result
	# Parameters
	#	url			URL to check
	#	bsd_result		BSD_result info
	#	text		text that points to problems
	#	hard_check	keep checking even if a problem was already found
	#	valid_domains	Domains to allow redirection to
	#	max_redirects	Maximum number of allowed redirections to follow
	#	download		Whether documents must be downloaded or not
	#	reason			Reason to not to download documents
	def investigate_result(url, bsd_result, text, hard_check=false, valid_domains=[], max_redirects=5, download=true, reason="")		
		# Test result
		problem_found = false
		output = {}
		result_description = []
		
		test_name = :url_index_title
		found_in_titles_summaries = false	
		
		# URL analysis
		url_problem_found, url_result_description = check_url(url, text)
		found_in_titles_summaries = true if url_problem_found
		result_description << prepare_for_report(:url, {}, :section)
		result_description = result_description + url_result_description
		
		#  Titles
		result_description << prepare_for_report("")
		result_description << prepare_for_report(:titles, {}, :section)
		already_seen = {}
		
		table_headers = [:search_engine, :title]
		table_data = []
		table_levels = []
		bsd_result.each do |search_engine, data|
			# Skip duplicates
			already_seen[search_engine] = [] if not already_seen.has_key?(search_engine)
			next if already_seen[search_engine].include?(data[:title].to_s)
			already_seen[search_engine] << data[:title].to_s
			
			# Detect text in title
			title_has_text = false
			text.each do |t|
				if data[:title].to_s.upcase.upcase.include?(t.upcase)
					found_in_titles_summaries = true
					title_has_text = true
					break
				end
			end

			table_data << [search_engine, data[:title]]
			table_levels << (title_has_text ? :alert : :info)
		end
		result_description << prepare_for_report(table_data, {}, table_levels, table_headers)


		table_headers = [:search_engine, :summary]
		table_data = []
		table_levels = []		
		#  Summaries
		result_description << prepare_for_report("")
		result_description << prepare_for_report(:summaries, {}, :section)
		already_seen = {}
		bsd_result.each do |search_engine, data|
			# Skip duplicates
			already_seen[search_engine] = [] if not already_seen.has_key?(search_engine)
			next if already_seen[search_engine].include?(data[:summary].to_s)
			already_seen[search_engine] << data[:summary].to_s
			
			# Detect text in summary
			summary_has_text = false
			text.each do |t|
				if data[:summary].to_s.upcase.upcase.include?(t.upcase)
					found_in_titles_summaries = true
					summary_has_text = true
					break
				end
			end

			table_data << [search_engine, data[:summary]]
			table_levels << (summary_has_text ? :alert : :info)			
		end
		result_description << prepare_for_report(table_data, {}, table_levels, table_headers)
		
		output[test_name] = {
			:problem_found => found_in_titles_summaries,
			:description => result_description,
			:response => nil,
			:external_redirection => false,
			:redirection_to_authorized => false,
			:text_found => found_in_titles_summaries
		}
		
		# Download and generate info or add message about why documents are not downloaded
		if not download
			output[test_name][:description] << prepare_for_report("")
			if reason.is_a?(Hash) 
				output[test_name][:description] << prepare_for_report("[[no_files_downloaded]]: [[#{reason[:reason].to_s}]]", reason[:params])
			else
				output[test_name][:description] << prepare_for_report("[[no_files_downloaded]]: [[#{reason}]]")			
			end
		else
			# Test URL downloaded the regular way
			test_name = "[[regular_access]]"
			print "   #{test_name}\n" if @verbose
			test_result = check_response(url, text, test_name, @user_agent, nil, true, valid_domains, max_redirects)
			problem_found = true if test_result[:problem_found]
			output[test_name] = test_result

			# Try with Search Engine User Agent
			if not problem_found or hard_check
				bsd_result.each do |search_engine, data| 
					next if data[:bot_user_agent].nil?
					test_name = "[[regular_access]] [[using_user_agent_of]] #{search_engine}"
					print "   #{test_name}\n" if @verbose
					test_result = check_response(url, text, test_name, data[:bot_user_agent], nil, true, valid_domains, max_redirects)
					problem_found = true if test_result[:problem_found]
					output[test_name] = test_result
				end
			end

			# Send search engine referer
			if not problem_found or hard_check
				bsd_result.each do |search_engine, data| 
					next if data[:referer].nil?
					test_name = "[[regular_access]] [[forging_referer_of]] #{search_engine}"
					print "   #{test_name}\n" if @verbose
					test_result = check_response(url, text, test_name, @user_agent, data[:referer], true, valid_domains, max_redirects)
					problem_found = true if test_result[:problem_found]
					output[test_name] = test_result
				end
			end		

			# Try to retrieve cache
			if not problem_found or hard_check
				#cache_retrieved = true
				bsd_result.each do |search_engine, data|
					#Check for deferred URL data if needed
					if data[:cache].nil?
						cache_data = data[:s_e_object].deferred_info(:cache, data)
						next if cache_data.nil?
						
						# Process only first element
						cache_data = cache_data[0]
						cache_referer = cache_data[:referer].nil? ? data[:referer] : cache_data[:referer]
					else
						cache_data = data[:cache]
						cache_referer = data[:referer]
					end
					
					test_name = "[[cache_from]] #{search_engine}"
					print "   #{test_name}\n" if @verbose
					test_result = check_response(cache_data, text, test_name, @user_agent, cache_referer, false, valid_domains)
					problem_found = true if test_result[:problem_found]
					output[test_name] = test_result
				end
				
				# Try to retrieve translation too
				bsd_result.each do |search_engine, data| 
					#Check for deferred URL data if needed
					if data[:translate].nil?
						translate_data = data[:s_e_object].deferred_info(:translate, data)
						next if translate_data.nil?
					else
						translate_data = data[:translate]
					end
					
					test_name = "[[translate_using]] #{search_engine}"
					print "   #{test_name}\n" if @verbose
							
					translate_data = [translate_data] if not translate_data.is_a?(Array)
					
					i = 0
					translate_data.each do |test|	
						i += 1
						test_result = check_response(test[:url], text, test_name + " (" + i.to_s + ")", @user_agent, test[:referer], true, valid_domains, max_redirects, @search_engines[search_engine])
						problem_found = true if test_result[:problem_found]
						output[test_name + " (" + i.to_s + ")"] = test_result
					end
				end
			end		
		end
		
		return output
	end


	
	# Retrieves a result and checks for problems in the response
	# Parameters
	#	url				url for the result
	#	text			text pointing to problems. String or string array
	#	type			string identifying the test
	#	user_agent		user agent to user
	#	referer			referer to include in http headers
	#	check_redirs	whether to check for redirections
	#	valid_domains	domains to allow redirection to
	#	max_redirects	Maximum number of allowed redirections to follow
	#	translator		Object used to translate URLs
	def check_response(url, text, type, user_agent=@user_agent, referer=nil, check_redirs=false, valid_domains=[], max_redirects=0, translator=nil)
		# Description of results. Array of text lines		
		result_description = []
		response = nil

		result_description << prepare_for_report(type, {}, :section)
		uri = URI.parse(url)
		if uri.host.nil?
			return {
				:problem_found => false,
				:description => [prepare_for_report(:cannot_download)],
				:response => nil,
				:external_redirection => false,
				:redirection_to_authorized => false,
				:text_found => false
			}
		end
		
		# Get the domain for the URL (two last parts of the host name)
		# domain = uri.host.sub(/.*\.(?=[^.]+\.[^.]+$)/, "")

		# Prepare data for request(s)
		myheaders = {}
		myheaders["Referer"] = referer if not referer.nil?
		
		# Intitial test status
		problem_found = false
		external_redirection = false
		redirection_to_authorized = false
		text_found = false
		
		# Get the document, following redirections if required
		# Count redirections
		redirects = 0
		location = url.gsub(" ", "%20")
		while true
			# Make the request
			@client.set_server(uri.scheme.to_sym, uri.host, uri.port)
			@client.change_user_agent(user_agent)
			response = @client.get(uri.request_uri, {}, myheaders)
			
			if response.nil?
				result_description << prepare_for_report(:cannot_download)
				break
			end

			# Info about the result of the test
			if type.include?("[[regular_access]]")
				result_description << prepare_for_report("[[response_code]]: #{response.code} - #{response.message}")
			end
			
			# Detect redirections 
			is_redir, redir_type, redir_url = My_HTTP_client.detect_redirection(response, location)
			if check_redirs and is_redir 
					# Follow redirection
					location = redir_url.gsub(" ", "%20")
					print "   #{redir_type.to_s.upcase} redirection found: #{location}\n" if @verbose
					result_description << prepare_for_report("[[redirection_found]]: #{location}")
					
					# URL analysis
					url_problem_found, url_result_description = check_url(location, text)
					result_description = result_description + url_result_description
					
					if url_in_valid_domain(location, valid_domains)
						if url_in_same_domain(location, url)
							result_description << prepare_for_report(:redirection_inside_domain, {}, :warning)
							result_description << prepare_for_report(:destination_page_problems, {}, :warning)						
						else
							result_description << prepare_for_report(:authorized_domain, {}, :warning)
							result_description << prepare_for_report(:destination_domain_problems, {}, :warning)						
							redirection_to_authorized = true
						end
							
						# Follow Redirection if needed
						uri = URI.parse(location)
						redirects += 1
						break if redirects >= max_redirects					
					else
						result_description << prepare_for_report(:malicious_redirection, {}, :alert)
						external_redirection = true
						problem_found = true
						break										
					end
			else
				break
			end
			
			unless translator.nil?
				new_translation = translator.deferred_info(:translate, {:url=>location, :referer=>referer}, true)
				if new_translation.nil?
					result_description << prepare_for_report("[[:no_url_to_translate]] #{location}")
					break
				end
				
				new_location = new_translation.first[:url]
				referer = new_translation.first[:referer]
	
				uri = URI.parse(new_location)
			end
		end
		
		if not response.nil?
			# Detect malicious content
			fulltext = response.body.to_s.upcase
			mytext = text.is_a?(Array) ? text : [text]
			mytext.each do |x|
				hint = x.upcase
				number = fulltext.scan(hint).size
				if number > 0
					result_description << prepare_for_report(:unauthorized_content, {:hint => hint, :number => number}, :alert)
					text_found = true
					problem_found = true
				end
			end
		end
		
		result_description << prepare_for_report(:no_problems) unless problem_found
		
		return {
			:problem_found => problem_found,
			:description => result_description,
			:response => response,
			:external_redirection => external_redirection,
			:redirection_to_authorized => redirection_to_authorized,
			:text_found => text_found
		}
	end
	
	# Looks for problems in the URL
	# Parameter: url
	def check_url(url, texts)
		output = []
		
		# Parse URL and get components
		uri = URI.parse(url)
		host = uri.host.to_s.downcase
		path = uri.path.to_s.downcase
		query = uri.query.to_s.downcase
		hash = uri.fragment.to_s.downcase
		problem_found = false
		
		# Look for the text inside components
		texts.each do |t|
			text = t.downcase
			if host.include?(text)
				output << prepare_for_report(:host_name_contains_text, {:host=>host, :text=>text}, :alert)
				output << prepare_for_report(:dns_compromise, {}, :alert)
				output << prepare_for_report("")
				problem_found = true
			end
			
			if path.include?(text)
				output << prepare_for_report(:path_contains_text, {:path=>path, :text=>text}, :alert)
				output << prepare_for_report(:server_compromise, {}, :alert)
				output << prepare_for_report("")			
				problem_found = true
			end
			
			if query.include?(text)
				output << prepare_for_report(:query_string_contains_text, {:text=>text}, :warning)
				output << prepare_for_report("")
				problem_found = true
			end
			
			if hash.include?(text)
				output << prepare_for_report(:hash_contains_text, {}, :warning)
				output << prepare_for_report("")
				problem_found = true
			end
		end
		
		output << prepare_for_report(:no_problems_in_url) if not problem_found
		
		return [problem_found, output]
	end
	
	# Prepares contents for the report
	# Parameters
	#	info		Content to prepare
	#	parameters	Parameters for info translation
	#	level	:info, :ok, :warning, :alert
	#	headers	header for table data
	def prepare_for_report(info, parameters={}, level=:info, headers=nil)
		return {
			:level => level,
			:headers => headers,
			:info => info,
			:parameters => parameters
		}
	end
end


