# Class to implement a generic search engine
=begin
Copyright (C) 2014 Enrique Rando

This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.

http://www.gnu.org/licenses/gpl-2.0.html
=end



require File.expand_path(File.dirname(__FILE__) + '/My_HTTP_client.rb')
require 'uri'

class Search_engine
	# Bot used by the search engine when visiting pages
	attr_reader :search_engine_bot
	
	#Object creation
	#	Parameters:
	#		protocol	protocol to use: :http, :https	
	#		server		server to connect to
	#		port		port to use (:default for default port)
	#		path		path used for searches
	#		query_parameter	parameter used to give query to search engine
	#
	#		regexps		regular expressions for result parsing.
	#					Hash with these keys
	#						:begin_result
	#						:end_results
	#						:document_type
	#						:title
	#						:url
	#						:summary
	#						:cache
	#						:translate
	#						:translate  ----> Search and replace before recoginition
	#							Hash of search => replace
	#
	#		paging		Paging info
	#					Hash with these keys
	#						:parameter --------> parameter for page number
	#						:parameter_method
	#
	#						:size_parameter -----> parameter for page size
	#						:size_parameter_method
	#						:size_parameter_value
	#						:size ---------------> page size as integer
	#
	#		base_get_parameters
	#		base_post_parameters	other parameters and headers to include in all requests
	#		headers				
	#
	#		user_agent		User agent to use in requests
	#		search_engine_bot	Bot used by the search engine when visiting pages
	#
	#		max_requests	maximum number of requests
	#
	#		request_method	Method to use in requests: :get or :post 
	#		update_cookies	true if cookies are to be updated whit response data
	#		query_parameter_method	Method used for query parameter
	#
	#		proxy			Proxy name or address. Use nil for no proxy.
	#						By default (:ENV), Ruby 2.1 uses HTTP_PROXY environment variable to set the proxy
	#		proxy_port		Proxy port
	#		proxy_user		Proxy username, if required
	#		proxy_password	Proxy password, if required.
	#		max_redirs		maximum number of redirections to follow
	def initialize(
					protocol, server, port=:default, path="/", query_parameter="q",
					regexps = {}, paging = {},
					base_get_parameters ={}, base_post_parameters="", headers={},
					user_agent="Search Engine Requester",
					search_engine_bot = nil,
					max_requests=5,
					request_method=:get, update_cookies=false,
					query_parameter_method=:get,					
					proxy=:ENV, proxy_port=nil, proxy_user=nil, proxy_password=nil,
					max_redirs=5)
		@max_redirs = max_redirs
		@max_requests = max_requests
		
		@search_engine_bot = search_engine_bot
		
		# Connection data
		@protocol = protocol
		@server = server
		@port = port
		@user_agent = user_agent
		@update_cookies = update_cookies

		# Set connection to server
		connect_through(proxy, proxy_port, proxy_user, proxy_password)

		# URL info
		@path = path
		@request_method = request_method

		# Parameter to use for queries
		@query_parameter = query_parameter
		@query_parameter_method = query_parameter_method

		# GET, POST and Headers for all requests
		@base_get_parameters = base_get_parameters
		@base_post_parameters = base_post_parameters
		@headers = headers

		# Regular expressions
		[:begin_result, :end_results, :title, :url].each do |x| 
			raise "Missing Regexp Key: #{x}\n" if not regexps.has_key?(x)
		end

		@regexp_begin_result = regexps[:begin_result]
		@regexp_end_results = regexps[:end_results]
		@regexp_document_type = regexps[:document_type]
		@regexp_title = regexps[:title]
		@regexp_url = regexps[:url]
		@regexp_summary = regexps[:summary]
		@regexp_cache = regexps[:cache]
		@regexp_translate = regexps[:translate]
		@regexp_replaces = regexps[:replaces]

		# Paging
		[:parameter, :parameter_method, :size_parameter, :size_parameter_method, :size_parameter_value, :size].each do |x| 
			raise "Missing Paging Key: #{x}\n" if not paging.has_key?(x)
		end

		# Parameter used to request a given page
		@page_parameter = paging[:parameter]
		@page_parameter_method = paging[:parameter_method]
		
		# Parameters for page size
		@page_size = paging[:size]
		@page_size_parameter = paging[:size_parameter]
		case paging[:size_parameter_method]
			when :get
				@base_get_parameters[paging[:size_parameter]] = paging[:size_parameter_value]
			when :post
				@base_post_parameters[paging[:size_parameter]] = paging[:size_parameter_value]
			when :cookie
				@headers['Cookie'] = paging[:size_parameter] + "=" + paging[:size_parameter_value]
			else
				raise "Unsupported method for page size: " + paging[:size_parameter_method].to_s
		end
		@deferred = nil
	end

	# Establish connection
	# Parameters
	#		proxy			Proxy name or address. Use nil for no proxy.
	#						By default (:ENV), Ruby 2.1 uses HTTP_PROXY environment variable to set the proxy
	#		proxy_port		Proxy port
	#		proxy_user		Proxy username, if required
	#		proxy_password	Proxy password, if required.
	def connect_through(proxy=:ENV, proxy_port=nil, proxy_user=nil, proxy_password=nil)
		@proxy = proxy
		@proxy_port = proxy_port
		@proxy_user = proxy_user
		@proxy_password = proxy_password

		@client = My_HTTP_client.new(@user_agent, proxy, proxy_port, proxy_user, proxy_password)
		@client.set_server(@protocol, @server, @port, @update_cookies)
		
		@deferred.connect_through(proxy, proxy_port, proxy_user, proxy_password) unless @deferred.nil?
	end

		
	# Set a Deferred search engine data object
	#	that will be used in real time to retrieve URLs related to the one being processed
	#		(translations, cache, ...)
	# Parameter: deferred		The deferred data object
	def set_deferred(deferred)
		@deferred = deferred
	end
	
	# Request deferred URL info
	# Parameters
	#	type	data to generate: :translate, :cache, 
	#	result	BlackSEO result data
	#	only_one	true if only one result is needed
	# Returns an array of URL info or nil if no data was generated
	def deferred_info(type, result, only_one=false)
		return nil if @deferred.nil?
		return @deferred.get_info(type, result, only_one)
	end
	
	# Request a SERP for a query
	# Parameters:
	#	query	Query to submit
	#	page	Page number
	def request(query, page=1)
		# Set server
		@client.set_server(@protocol, @server, @port, @update_cookies)

		# Prepare get and post parameters
		get_parameters = @base_get_parameters.clone
		post_parameters = @base_post_parameters.clone

		if @query_parameter_method === :post
			post_parameters[@query_parameter] = query
		else
			get_parameters[@query_parameter] = query
		end

		if @page_parameter_method === :post
			post_parameters[@page_parameter] = compute_page_start(page)
		else
			get_parameters[@page_parameter] = compute_page_start(page)
		end

		# URL that would be made for the request
		referer = URI.parse("")
		referer.scheme = @protocol.to_s
		referer.host = @server
		referer.port = @port if @port != :default
		referer.path = @path
		referer.query = URI.encode_www_form(get_parameters)
		referer = referer.to_s
		
		
		# Make request to search engine, following redirs if required
		redirs = 0
		finished = false
		path = @path
		while not finished
			response = @method === :post ? 
					@client.post(path, get_parameters, post_parameters, @headers)
					:
					@client.get(path, get_parameters, @headers)
					
			if response.is_a?(Net::HTTPRedirection)
				redirs +=1
				uri = URI.parse(response["location"])
				@client.set_server(uri.scheme.to_sym, uri.host, uri.port, @update_cookies)
				path = uri.path
				get_parameters = Hash[URI.decode_www_form(uri.query)]
				finished = true if redirs > @max_redirs
			else
				finished = true
			end
		end
		
		return response.nil? ? nil : process(response, referer)
	end

	# Requests all data for a query
	# Parameters
	#	query	query to submit
	#	pause	make pause before request (seconds)
	#	max_results	stop if more than this number of results are retrieved
	def request_all(query, pause=1, max_results=1000)
		# Page number
		page = 1

		# Data to return
		results = []

		# Data for url counting
		all_urls = []
		url_count = 0

		while true
			sleep(pause)
			# Make request
			partial = request(query, page)

			break if partial.nil?
			
			# If no new URL is added, quit
			partial_urls = partial.collect{|x| x[:url]}
			all_urls.concat(partial_urls).uniq!
			if all_urls.size == url_count
				break
			end

			url_count = all_urls.size

			# Add new results to output data
			results.concat(partial)
			
			# Exit if max_results reached
			if url_count >= max_results
				break
			end

			# Exit if max_requests reached
			if page >= @max_requests
				break
			end

			# Prepare for next page
			page += 1
		end

		# Sort results and remove duplicated URLs
		results.sort!{|x,y| x[:url] <=> y[:url]}
		results.uniq!{|x| x[:url]}

		return results
	end
private

	# Gets data out of the response
	# Parameters:
	#	response	response from the search engine
	#	referer		url to include as referer
	def process(response, referer)
		# Data to return
		output = []

		# Get response text and make replacements
		text = response.body.to_s
		if @regexp_replaces.is_a?(Hash)
			@regexp_replaces.each do |x,y|
				text = text.gsub(x,y)
			end
		end

		# Find out where first result starts and delete previous text
		match = text.match(@regexp_begin_result)
		return [] if match.nil?
		text = match[0] + match.post_match

		# and remove text after results
		match = text.match(@regexp_end_results)
		if not match.nil? 
			text = match.pre_match
		end

		# Split text into results
		while not text.empty?
			# Get offset to discard first result beginning
			match1 = text.match(@regexp_begin_result)
			return output if match1.nil?
			# ... and look for the second
			remaining_text = match1.post_match
			match2 = remaining_text.match(@regexp_begin_result)

			# Extract result from text
			result = match1[0] + (match2.nil? ? remaining_text : match2.pre_match)
			text = match2.nil? ? "" : match2[0] + match2.post_match

			output << {
				:referer => referer,
				:document_type => get_document_type(result),
				:title => get_title(result),
				:url => get_url(result),
				:summary => get_summary(result),
				:cache => get_cache(result),
				:translate => get_translate(result),
				:bot_user_agent => @search_engine_bot,
				:s_e_object => self
			}
		end

		return output
	end

	# Functions to get each data item from the result
	def get_document_type(result)
		return get_result_data(result, @regexp_document_type)
	end

	def get_title(result)
		return get_result_data(result, @regexp_title)
	end

	def get_url(result)
		return get_result_data(result, @regexp_url)
	end

	def get_summary(result)
		return get_result_data(result, @regexp_summary)
	end

	def get_cache(result)
		return get_result_data(result, @regexp_cache)
	end

	def get_translate(result)
		return [{
			:url=>get_result_data(result, @regexp_translate),
			:referer => nil
		}]
	end

	# Extracts data from the result
	# Parameters
	# 	result: the result
	#	regexp: regexp to match data
	def get_result_data(result, regexp)
		return nil if regexp.nil?
		match = result.match(regexp)
		return nil if match.nil?
		return nil if not match.names.include?("data")

		return match[:data].force_encoding("ascii-8bit")
	end

	def compute_page_start(x)
		return @page_size * (x - 1)
	end
end


