module Hashish

  def acts_as_hashish(options = {})
    unless options.keys.all?{|k| k.is_a?(Symbol)} and options.values.all?{|v| v.is_a?(String) or v.is_a?(Hash)}
      raise "Keys can only be Symbol, values can only be String or Hash" 
    end
    raise "Cannot act as hashish without a redis connection" unless Hashish.redis_connection
    options[:key_prefix] ||= Hashish.redis_namespace + ':' + self.to_s
    raise "Please specify a primary index via the :key option" unless options[:key]
    options[:indexes] ||= {}
    options[:sorters] ||= {}
    
    @prefix = options[:key_prefix]
    @indexes = options[:indexes]
    @sorters = options[:sorters]
    @key = options[:key]
    
    extend ClassMethods
    extend PrivateClassMethods
  end

  module ClassMethods

    def hashish_rebuild(data = nil)
      hashish_wait_on_lock do
        data ||= hashish_list(:page_size => 0)
        # hashish_flush! -- we know that all the indexes will be dropped and recreated due to hashish_remove_metadata being called on hashish_insert
      end
      begin
        yield if block_given?
      ensure
        hashish_insert(data, Time.now.to_i)
      end
    end

    def hashish_flush!
      hashish_redis_keys.each{|x| Hashish.redis_connection.del(x)}
    end

    def hashish_length
      Hashish.redis_connection.zcard("#{@prefix}*")
    end
    
    def hashish_delete(key = nil)
      key ||= hashish_get_key(hashish_list.first)
      Hashish.redis_connection.zrem("#{@prefix}*", "#{@prefix}@#{key}")
      if data = hashish_find(key)
        hashish_remove_metadata(key, JSON.parse(data))
        Hashish.redis_connection.del("#{@prefix}@#{key}")
      end
    end
    
    def hashish_insert(data, t = Time.now.to_i)
      if data.is_a?(Array)
        data.each do |d|
          hashish_single_insert(d, t)
        end
      elsif data.is_a?(Hash)
        hashish_single_insert(data, t)
      else
        raise "Data must be of type Hash or Array of Hash"
      end
    end

    def hashish_find(key)
      Hashish.redis_connection.get("#{@prefix}@#{key}")
    end

    def hashish_list(options = {})
      page_no = options[:page_no] || 1
      page_size = options[:page_size] || 10
      
      max_time = options[:to] || '+inf'
      min_time = options[:from] || '-inf'
      
      filters = options[:filters] || {}
      sort_by = options[:sort_by] || nil
      sort_order = options[:sort_order] || nil
      
      offset = (page_no - 1) * page_size
      limit = offset + page_size
      
      # get the next seq no for search operations for this search instance (perhaps this entire section shld be oops based stuff but what the heck :E mayb later)
      seq = Hashish.redis_connection.incr("#{@prefix}#SEQUENCER")
      
      # search
      inter = []
      unless filters.empty?
        # inter = filters.map{|k,v| "#{@prefix}:#{k}:#{v}"}
        
        filters.each do |key, value|
          if value.is_a?(Array)
            union = []
            union_key = "#{@prefix}#UNION##{key}##{seq}"
            value.each do |v|
              union << "#{@prefix}!#{key}=#{v}"
            end
            Hashish.redis_connection.zunionstore(union_key, union.uniq)
            Hashish.redis_connection.expire(union_key, Hashish.redis_search_keys_ttl)
            inter << union_key
          else
            inter << "#{@prefix}!#{key}=#{value}"
          end
        end
        
      end
      
      full_list = "#{@prefix}*"
      
      # is the user askin for a cropped set of data (min/max date/time of nQ)
      if min_time == '-inf' and max_time == '+inf'
        inter << full_list
      else
        # copy the full list to different temp set
        all_items_key = "#{@prefix}#ALL_ITEMS##{seq}"
        Hashish.redis_connection.zunionstore(all_items_key, [full_list])
        Hashish.redis_connection.expire(all_items_key, Hashish.redis_search_keys_ttl * 60)
        # crop the set based on min/max (wish we had a 1 step zcroprangebyscore)
        Hashish.redis_connection.zremrangebyscore(all_items_key, "-inf", "(#{min_time}") if min_time != '-inf'
        Hashish.redis_connection.zremrangebyscore(all_items_key, "(#{max_time}", "+inf") if max_time != '+inf'
        inter << all_items_key
      end
      
      result_key = "#{@prefix}#RESULT##{seq}"
      Hashish.redis_connection.zinterstore(result_key, inter, :aggregate => 'max')
      Hashish.redis_connection.expire(result_key, Hashish.redis_search_keys_ttl * 60)
      result = nil
      if sort_by
        custom_sort_key = "#{@prefix}#CUSTOM_SORT##{seq}"
        Hashish.redis_connection.sort(result_key, :by => "*$#{sort_by}",:get => '*', :store => custom_sort_key, :order => sort_order)
        Hashish.redis_connection.expire(custom_sort_key, Hashish.redis_search_keys_ttl * 60)
        result = Hashish.redis_connection.lrange(custom_sort_key, offset, limit -1)
      else
        res_keys = Hashish.redis_connection.send("z#{(sort_order == 'DESC' ? 'rev' : '')}range".to_sym, result_key, offset, limit - 1)
        if res_keys.empty?
          result = []
        else
          result = Hashish.redis_connection.mget(*res_keys)
        end
      end
      result.compact.map{|x| JSON.parse(x) rescue x}
    end
    
  end
end