#!/usr/bin/env python
#coding:utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import os 
from elasticsearch import Elasticsearch,helpers,exceptions
import traceback
import time
from util.log_helper import initRotatingLogger,initMpTimedLogger
from datetime import datetime
from constants import *
import json

__author__ = 'hongshun@staff.sina.com.cn'
__version__= '0.1'

class ElasticsearchHandler():
    '''ES客户端，用于排重'''
    log = initMpTimedLogger(LOG_PATH, 'per_SimHandler')
    def __init__(self, index,server=None):
        self.index = index
        self.ip_1 = '10.13.1.154'
        self.ip_2 = '10.13.1.160'
        #self.ip_3 = '10.13.1.199'
        self.ip_4 = '10.13.1.204'
        self.port = '9200'
        self.server = [
                {'host':self.ip_1,'port':self.port},
                {'host':self.ip_2,'port':self.port},
                #{'host':self.ip_3,'port':self.port},
                {'host':self.ip_4,'port':self.port}
                ] if not server else server
        #日志模块
        #self.log = initRotatingLogger('/logs/SimEsHandler.log', 'SimEsHandler')
            
        #elasticsearch 对象
        #self.es = Elasticsearch(hosts=self.server)

        self.new_server = [
            {'host':'10.23.2.21', 'port':'9200'}, # 20220215 节点挂了
            {'host':'10.23.2.22', 'port':'9200'},
            {'host':'10.23.2.23', 'port':'9200'}
        ]
        self.new_es = Elasticsearch(hosts=self.new_server)
        #elasticsearch 对象
        self.es = Elasticsearch(hosts=self.new_server)

        #设置mapping
        self.set_mapping()

        self.type_ = 'simfp'

    def set_mapping(self):
        '''设置mapping'''
        
        properties = {}

        #uuid iid url设置为不可检索
        for elem in ['uuid', 'iid', 'url', 'title_fp', 'content_fp', 'dataid']:
            properties[elem] = {'type':'keyword'}

        #title, keywords设置为按空格分词后检索
        for elem in ['title_split', 'keywords']:
            properties[elem] = {'type':'text', 'analyzer':'whitespace'}
        
        #title split的词性list
        properties['pos_tags'] = {'type':'integer_range'}

        #保留原始title的信息
        properties['title_ori'] = {'type':'text'}

        #ctime 设置为整型
        properties['ctime'] = {'type':'integer'}

        #keywords的权重 设置为浮点型数组
        properties['weights'] = {'type':'float_range'}

        #设置mapping
        mapping = {
                #'template':self.index,
                'setting':{
                    'number_of_shards': 10,
                    'number_of_replicas': 0
                    },
                'mapping':{
                    '_defalut_':{
                        '_source':{'enabled': True},
                        'properites':properties
                        }
                    }
                }
        if not self.es.indices.exists(self.index):
            try:
                self.es.indices.create(index=self.index,body=mapping,ignore=400)
                return 1
            except Exception as err:
                self.log.error(err)
        return 0

    def build(self, id, data, opType=''):
        '''build bulk data'''

        #bdata = {'_index':self.index, '_type':self.type_, '_id':id}
        bdata = {'_index':self.index, '_id':id}
        if opType=='update':
            bdata['doc'] = data
            bdata['_op_type'] = 'update'
        elif opType=='delete':
            bdata['_op_type'] = 'delete'
        else:
            bdata['_source'] = data
        return bdata

    def bulk(self, bulkData):
        '''批量导入、删除、更新操作'''

        try:
            helpers.bulk(self.es, bulkData,request_timeout=3)
        except:
            return traceback.format_exc()

    def new_build(self, id, data, index, opType=''):
        '''build bulk data'''
        #bdata = {'_index':self.index, '_type':self.type_, '_id':id}
        bdata = {'_index':index, '_id':id}
        if opType=='update':
            bdata['doc'] = data
            bdata['_op_type'] = 'update'
        elif opType=='delete':
            bdata['_op_type'] = 'delete'
        else:
            bdata['_source'] = data
        return bdata

    def new_bulk(self, bulkData):
        '''批量导入、删除、更新操作'''
        try:
            return helpers.bulk(self.new_es, bulkData,request_timeout=3)
        except:
            return traceback.format_exc()

    def search_by_title(self, title, count, time_begin):
        '''通过title检索，用于标题排重'''

        #query = {'size':count, 'query':{'match':{'title_split':{'query':title}}}}
        query = {"size":count,"query":{"bool":{"must":[{"match":{
                    "title_split":{"query":title,"operator":"OR",
                    "minimum_should_match":"20%"}}}],
                    "adjust_pure_negative":True,"boost":1.0,
                    #"filter":{"range":{"gtime":{"lte":begin,"gte":end}}}}},
                    #"track_total_hits":2147483647}
                    }}}
        if isinstance(time_begin, int):   query["query"]["bool"]["filter"] = {"range":{"ctime":{"gte":time_begin}}}
        self.log.info("query: {}".format(json.dumps(query, ensure_ascii=False)))
        result = self.es.search(self.index, body=query, preference='primary_first',request_timeout=1) # 1s 
        #result = self.es.search(self.index, body=query, preference='replica')
        
        return result

    def search_by_keywords(self, keywords, count, time_begin):
        '''通过keywords检索，用于内容排重'''

        #query = {'size':count, 'query':{'match':{'keywords':{'query':keywords}}}}
        query = {"size":count,"query":{"bool":{"must":[{"match":{
                    "keywords":{"query":keywords,"operator":"OR",
                    "minimum_should_match":"20%"}}}],
                    "adjust_pure_negative":True,"boost":1.0,
                    #"filter":{"range":{"gtime":{"lte":begin,"gte":end}}}
                    }},"track_total_hits":2147483647}
        #if begin !=0 and end != 0 and begin > end:
        #    query["query"]["bool"]["filter"] = {"range":{"gtime":{"lte":begin,"gte":end}}}
        if isinstance(time_begin, int):   query["query"]["bool"]["filter"] = {"range":{"ctime":{"gte":time_begin}}}
            
        result = self.es.search(self.index, body=query, preference='primary_first',request_timeout=1)
        #result = self.es.search(self.index, body=query)
        
        return result
    
    def search_by_dataid(self, dataid):
        '''通过dataid查询'''

        query = {"query":{"term":{"dataid":dataid}}, "size":1000}
        res = self.es.search(self.index, body=query ,request_timeout=1)['hits']['hits']
        return res
        
        #del by hongshun
        '''
        if not res:return "",""
        _title = res[0]['_source'].get('title_ori','')
        title_fp = res[0]['_source'].get('title_fp','')
        content_fp = res[0]['_source'].get('content_fp','')
        if not title_fp:title_fp=''
        if not content_fp:content_fp=''
        return title_fp + ',' + content_fp,_title
        '''

    def search_by_ctime(self, begin, end):
        '''通过ctime查询,用于定期删除'''

        query = {'size':500,'query':{'range':{'ctime':{'gte':begin,'lte':end}}}}
        result = self.es.search(self.index, body=query)
        return result['hits']['hits']

    def search_by_id(self, _id):
        '''通过_id进行检索'''

        query = {"query":{"term":{"_id":_id}}}
        res = self.es.search(self.index, body=query,request_timeout=1)['hits']['hits']
        return res

    def term_search(self, field, value, sort='asc'):
        query = {"size":10, "query":{"term":{field:value}}, "sort" :{"ctime":{"order":sort}}}
        res = self.es.search(self.index, body=query,request_timeout=1)
        return res

    def delete_by_ctime(self, begin, end):
        '''定期删除一定时间段内的文章'''
    
        try:
            result = self.search_by_ctime(begin, end)
            while result:
                ids = [w['_id'] for w in result]
                bulkData = [self.build(id, None, 'delete') for id in ids]
                self.bulk(bulkData)
                result = self.search_by_ctime(begin, end)
            return 1
        except Exception as err:
            #记录错误日志
            self.log.error(err)
            return 0 
            

if __name__ == '__main__':
    esh = ElasticsearchHandler('new_sim_fp_online')
    #start = int(time.mktime(datetime(2019,9,25,0,0).timetuple()))
    #end = int(time.mktime(datetime(2019,11,4,0,0).timetuple()))
    from util.mongodb_handler_online import MongoHandlerOnline
    mongo = MongoHandlerOnline()
    d = mongo.findFeature('f4cf0050171c399793ea9460faaeda25')
    did = d.get('dataid')
    s = u'培育 发展 能源 商品交易 平台 股份制改造 兼并重组 有效 办法'
    cands = esh.search_by_title(s, 50, 1612396581)
    for w in cands['hits']['hits']:
        print w['_source']['title_ori']
    ret = esh.term_search('dataid', 'comos:ktzqtyu8739152')
    print(ret)
