require 'elasticsearch'

#Very early stages for a nice wrapper

class EsWrapSearch

	
	#Initialize 
	def initialize(host='localhost',port=9200,tflog=false)
		@@eclient = Elasticsearch::Client.new log: tflog, host: "http://#{host}:#{port}"

		@lastresult = ''
		@size = 0
		@aggs = {}
		@must_values = []
		@must_not_values = []
		@should_values = []
		@body = {"size" => @size, 
				 "query" => 
						{"filtered" => 
							{"filter" => 
								{"bool" => 
									{	"must" => @must_values,
										"should" => @should_values,
										"must_not" => @must_not_values
									}
								} 
							} 
						},
				 "aggs" => @aggs
				}
	end

	def version
		"0.0.2"
	end


	def reset
		@size = 0
		@aggs = {}
		@must_values = []
		@must_not_values = []
		@should_values = []
		@body = {"size" => @size, 
				 "query" => 
						{"filtered" => 
							{"filter" => 
								{"bool" => 
									{	"must" => @must_values,
										"should" => @should_values,
										"must_not" => @must_not_values
									}
								} 
							} 
						},
				 "aggs" => @aggs
				}
	end

	def update
		@body = {"size" => @size, 
				 "query" => 
						{"filtered" => 
							{"filter" => 
								{"bool" => 
									{	"must" => @must_values,
										"should" => @should_values,
										"must_not" => @must_not_values
									}
								} 
							} 
						},
				 "aggs" => @aggs
				}
	end

	#Set amount of source documents to be returned
	def set_size(size)
		@size = size
		self.update
	end

	#Get amount of source documents to be returned
	def get_size
		@size
	end

	#what the current query looks like
	def get_body
		@body
	end

	#Bool query for match
	def must_match(field,value)
		@must_values << {"query" => {"match" => { field => value}}  }
		self.update
	end

	#Bool query for match
	def must_not_match(field,value)
		@must_not_values << {"query" => {"match" => { field => value}}  }
		self.update
	end

	#Bool query for match
	def should_match(field,value)
		@should_values << {"query" => {"match" => { field => value}}  }
		self.update
	end

	#Bool query for range
	def must_range(field,from_value,to_value)
		@must_values << {"range" => {field =>  { "gte" => from_value, "lte" => to_value } } }
		self.update
	end

	#Bool query for range
	def must_not_range(field,from_value,to_value)
		@must_not_values << {"range" => {field =>  { "gte" => from_value, "lte" => to_value } } }
		self.update
	end

	#Bool query for range
	def should_range(field,from_value,to_value)
		@should_values << {"range" => {field =>  { "gte" => from_value, "lte" => to_value } } }
		self.update
	end

	#Execute search, returns complete results set "ignoring type atm"
	def search(index,type="")
		@lastresult = @@eclient.search index: index, type: type, body: @body
		@lastresult
	end

	#returns last result set
	def last_result
		@lastresult
	end

	#returns source documents
	def get_source
		@lastresult["hits"]["hits"]
	end

	#private function to recursively build aggregations, will expand in the future
	def build_agg(inaggs)
		if inaggs.size == 1
			if inaggs[0]['interval'] == nil		
				{inaggs[0]["field"] => {inaggs[0]["type"] => ({"field" => inaggs[0]["script"]} == nil) ? {"field" => inaggs[0]["field"]}:{"script" => inaggs[0]["script"] , "lang" => "expression" }} }
			else
				{inaggs[0]["field"] => {inaggs[0]["type"] => {"field" => inaggs[0]["field"], "interval" => inaggs[0]['interval'] }} }
			end	
		else
			if inaggs[0]['interval'] == nil		
				{inaggs[0]["field"] => {inaggs[0]["type"] => {"field" => inaggs[0]["field"]}, "aggs" =>  build_agg(inaggs[1..inaggs.size])  } }
			else
				{inaggs[0]["field"] => {inaggs[0]["type"] => {"field" => inaggs[0]["field"], "interval" => inaggs[0]['interval'] }, "aggs" =>  build_agg(inaggs[1..inaggs.size])} }
			end	
		end	
	end
	private	:build_agg

	#add aggregation method 
	#Takes an array of hashes with field name and the type of aggregation. Very much like a group by in SQL
	#Example: obj.set_aggs( [ {"field" => "datatype", "type" =>  "terms" },{"field" => "totalbytes", "type" => "sum", "script" => "doc['totalbytes'].value/1024/1024/1024"} ] )
	#This array creates a bucket that will group by all different terms in the field "datatype" and then sub-bucket into a bucket for the field "totalbytes" and sum it with a script that converts it into GB
	#If the result needed no scripting, dont include it in the aggs  
	# Example: obj.set_aggs( [ {"field" => "datatype", "type" =>  "terms" },{"field" => "totalbytes", "type" => "sum"} ] )
	def set_aggs(inaggs)
		if inaggs.size == 1
			@aggs = {inaggs[0]["field"] => {inaggs[0]["type"] => ({"field" => inaggs[0]["script"]} == nil) ? {"field" => inaggs[0]["field"]}:{"script" => inaggs[0]["script"] , "lang" => "expression" } } }
		else
			if inaggs[0]['interval'] == nil		
				@aggs = {inaggs[0]["field"] => {inaggs[0]["type"] => {"field" => inaggs[0]["field"]}, "aggs" =>  build_agg(inaggs[1..inaggs.size])  } }
			else
				@aggs = {inaggs[0]["field"] => {inaggs[0]["type"] => {"field" => inaggs[0]["field"], "interval" => inaggs[0]['interval'] }, "aggs" =>  build_agg(inaggs[1..inaggs.size])} }
			end	
		end
		self.update
	end

	#returns the buckets from the last query
	def get_aggs
		@lastresult["aggregations"]
	end


end