#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import json
import traceback
import six
from six.moves import reload_module
reload_module(sys)
if six.PY2:
    sys.setdefaultencoding('utf-8')

from .es import zElasticsearch

class SearchError(Exception):
    def __init__(self, search_req, msg) :
        self.search_req = search_req
        self.msg = msg

# sort and combine
def unify_rules(rules):
    op_list = ['query', 'prequery', 'same', 'count', 'stat', 'range', 'period', 'every', 'vector']
    op_dict = {}
    sorted_rules = []

    if not rules or type(rules[0]) not in [type([]), type(())]:
        return rules

    try:
        for item in rules:
            if item[1] not in op_list:
                op_list.append(item[1])

            if item[1] not in op_dict.keys():
                op_dict[item[1]] = []

            op_dict[item[1]].append(item)

        for op in op_list:
            if op not in op_dict.keys():
                continue

            # operater same can't be sorted
            if op in ['same']:
                for item in op_dict[op]:
                    sorted_rules.append(item)
                continue

            for item in sorted(op_dict[op]):
                sorted_rules.append(item)

        return sorted_rules
    except Exception as e:
        print('unify rule exception: ', e)
        return rules


def get_limited_length_string(data, max_container_print_size=20) :
    result_str = ""
    if type(data) in [type(()), type([])] :
        if type(data) == type(()) :
            result_str += ' ('
        else:
            result_str += ' ['
        count = 0
        for one in data :
            if count != 0 :
                result_str += ', '
            count += 1
            if count > max_container_print_size :
                result_str += '...'
                break
            result_str += get_limited_length_string(one, max_container_print_size)

        if type(data) == type(()) :
            result_str += ') '
        else:
            result_str += '] '
    elif type(data) == type({}) :
        count = 0
        result_str += ' {'
        for key, value in six.iteritems(data):
            if count != 0 :
                result_str += ', '
            count += 1
            if count > max_container_print_size :
                result_str += '...'
                break
            result_str += '%s : '%key
            result_str += get_limited_length_string(value, max_container_print_size)
        result_str += '} '
    elif type(data) == type('') and len(data) > 10*max_container_print_size:
        result_str += data[0:10*max_container_print_size]
        result_str += " ... "
    else :
        result_str += '%s'%data
    return result_str

def get_records_in_time_range_from_elasticsearch(
        host, #ElasticSearch host IP address
        search_index, #ElasticSearch index name 'btgd_xml*'
        doc_type, #Elastic doc type name, To be determined
        time_field_names,
        time_range,
        term_values = None,
        doc_cnt = -1, #doc_cnt less than zero means to return all record
        sort_order = None, #can be "asc", "desc", or None
        verbose = 0,
        block_doc_cnt = 1000, #internal, control the number of records returned in one search
        fields= [],
        agg_terms = None
        ) :
    (records, total_rec_cnt)  = get_records_in_time_range_from_elasticsearch_ext(host, search_index,
            doc_type, time_field_names, time_range, term_values = term_values, 
            doc_cnt = doc_cnt, sort_order = sort_order, verbose = verbose,
            fields = fields, agg_terms = agg_terms)
    return records

def get_records_in_time_range_from_elasticsearch_ext(
        host, #ElasticSearch host IP address
        search_index, #ElasticSearch index name 'btgd_xml*'
        doc_type, #Elastic doc type name, To be determined
        time_field_names,
        time_range,
        term_values = None,
        doc_cnt = -1, #doc_cnt less than zero means to return all record
        sort_order = None, #can be "asc", "desc", or None
        verbose = 0,
        block_doc_cnt = 1000, #internal, control the number of records returned in one search
        fields= [],
        agg_terms = None,
        cardi_term = None,
        missing = False,
        count_only = False
        ) :
    if doc_cnt >= 0 and doc_cnt < block_doc_cnt :
        block_doc_cnt = doc_cnt
    req = {}
    req['query'] = {}
    req['query']['filtered'] = {}
    req['query']['filtered']['filter'] = {}
    req['query']['filtered']['filter']['bool'] = {}
    req['query']['filtered']['filter']['bool']['should']   = []
    req['query']['filtered']['filter']['bool']['must_not'] = []
    req['query']['filtered']['filter']['bool']['must'] = []
    if not count_only :
        req['size'] = block_doc_cnt

    start_time_field = None
    end_time_field = None
    if type(time_field_names) == type([]) :
         if len(time_field_names) >= 1:
             start_time_field = time_field_names[0]
         if len(time_field_names) >= 2:
             end_time_field = time_field_names[1]
    elif type(time_field_names) in  (type(''), type(u'')) :
        start_time_field = time_field_names

    if start_time_field == None :
        raise ValueError("Wrong value \"%s\" provided for parameter time_field_names."%time_field_names)

    if sort_order in ['asc', 'desc'] :
        if end_time_field == None or sort_order == 'asc':
            req['sort'] = {start_time_field:{'order':sort_order, "unmapped_type" : "date"}}
        else :
            req['sort'] = {end_time_field:{'order':sort_order, "unmapped_type" : "date"}}
    
    if len(fields) > 0 :
        req['fields'] = fields

    if end_time_field == None :
        range = {'range' : 
                    {start_time_field : 
                        {'gte' : time_range[0],
                         'lt'  : time_range[1]
                        }
                    }
                }
        req['query']['filtered']['filter']['bool']['must'].append(range)
    else:
        range1 = {'range' : {end_time_field : {'gte' : time_range[0], 'time_zone':'+8:00'}}}
        range2 = {'range' : {start_time_field : {'lt' : time_range[1],'time_zone':'+8:00'}}}
        req['query']['filtered']['filter']['bool']['must'].append(range1)
        req['query']['filtered']['filter']['bool']['must'].append(range2)

    if term_values is not None and len(term_values) > 0 :
        for one_tab in term_values :
            for key, value in six.iteritems(one_tab):
                req['query']['filtered']['filter']['bool']['must'].append({'term':{key:value}}) 
    
    agg_search = False
    cardi_search = False
    if agg_terms is not None and len(agg_terms) > 0 :
        agg_search = True
        req['size'] = 0 #in aggregation mode, size constraint will apply to aggregation
        aggs_req = req 
        for one_term in agg_terms:
            aggs_req['aggs'] = {}
            aggs_req['aggs'][one_term]={}
            aggs_req['aggs'][one_term]['terms']={'field':one_term, 'size':block_doc_cnt}
            #nest aggs request
            if sort_order in ('asc', 'desc') : 
                aggs_req['aggs'][one_term]['terms']['order'] = {"_count":sort_order}

            aggs_req = aggs_req['aggs'][one_term]
    elif cardi_term is not None :
        cardi_search = True
        req['size'] = 0 #in aggregation mode, size constraint will apply to aggregation
        aggs_req = req 
        aggs_req['aggs'] = {}
        aggs_req['aggs'][cardi_term]={}
        if not missing :
            aggs_req['aggs'][cardi_term]['cardinality']={'field':cardi_term, 'precision_threshold':4000}
        else :
            aggs_req['aggs'][cardi_term]['missing']={'field':cardi_term}

    if verbose > 1 :
        print(json.dumps(req, sort_keys = True, indent = 4))
   
    search_count = 1
    try :
        es = zElasticsearch(host, timeout=100)
        if count_only :
            results = es.count(search_index, doc_type=doc_type, body=req)
        else :
            results = es.search(search_index, doc_type=doc_type, body=req)
    except Exception:
        if verbose > 1:
            print(str(sys.exc_info()[1]))
            print(traceback.format_exc())
        raise SearchError(req, "ElasticSearch Error Found!\n")
        
    if verbose > 5 :
        print("Search result (#%d)"%search_count)
        print(json.dumps(results, sort_keys = True, indent = 4))
   
    if count_only :
        return ([], results['count'])

    total_cnt = results['hits']['total']
    if agg_search :
        return (results['aggregations'], total_cnt)
    elif cardi_search :
        aggs_res = results['aggregations']
        if not missing :
            return (results['aggregations'], aggs_res[cardi_term]['value'])
        else :
            return (results['aggregations'], aggs_res[cardi_term]['doc_count'])
    else:
        if total_cnt > 100000 :
            raise Exception("Too many records (%d) found to be loaded in memory."%total_cnt)

        if doc_cnt < 0  or doc_cnt > total_cnt:
            doc_cnt = total_cnt

        one_res = results['hits']['hits']
        ret_res = one_res
        while len(ret_res) < doc_cnt and len(one_res) >= block_doc_cnt :
            req['from'] = len(ret_res)
            search_count += 1
            results = es.search(search_index, doc_type=doc_type, body=req)
            if verbose > 5 :
                print("Search result (#%d)"%search_count)
                print(json.dumps(results, sort_keys = True, indent = 4))
            one_res = results['hits']['hits']
            ret_res.extend(one_res)

    return (ret_res, total_cnt)

if __name__ == '__main__':
    host = '192.168.3.93'
    #host = '192.168.1.152'
    if len(sys.argv) > 1 :
        try :
            with open(sys.argv[1]) as input_file:
                inputs = json.load(input_file)
                host = inputs.get('host', host)
        except :
            print(str(sys.exc_info()[1]))
            print(traceback.format_exc())
            print("Error in loading input file %s."%sys.argv[1])

    print("===================================================================")
    search_index = "btgd_xml*"
    doc_type = "xml_zjs_req"
    time_field_names = ["START_TIME", "END_TIME"]
    time_range = ["1970-01-01T00:00:00", "2015-09-02T00:00:00"]
    print("Testing get_records_in_time_range_from_elasticsearch: host %s, time_field_names %s, time_range %s"%(host, time_field_names, time_range))
    results = get_records_in_time_range_from_elasticsearch(host, search_index, doc_type, time_field_names, time_range, verbose=2, block_doc_cnt=2, doc_cnt = 20, sort_order='asc')
    print("Final Records: %d records"%len(results))
    print(json.dumps(results, sort_keys = True, indent = 4))
    
    print("===================================================================")

    time_field_names = "@timestamp"
    print("Testing get_records_in_time_range_from_elasticsearch: host %s, time_field_names %s, time_range %s"%(host, time_field_names, time_range))
    results = get_records_in_time_range_from_elasticsearch(host, search_index, doc_type, time_field_names, time_range, verbose=2, block_doc_cnt=2, doc_cnt = 20, sort_order='desc')
    print("Final Records: %d records"%len(results))
    print(json.dumps(results, sort_keys = True, indent = 4))
    
    print("===================================================================")

    time_field_names = ["START_TIME", "END_TIME"]
    #time_field_names = "@timestamp"
    print("Testing get_records_in_time_range_from_elasticsearch: host %s, time_field_names %s, time_range %s"%(host, time_field_names, time_range))
    results = get_records_in_time_range_from_elasticsearch(host, search_index, doc_type, time_field_names, time_range, verbose=1, agg_terms=['USER_NAME', 'OPERATE_CONTENT.last_word'])
    print("Final Records: %d records"%len(results))
    print(json.dumps(results, sort_keys = True, indent = 4))

