# -*- coding: utf-8 -*-
"""
Created on Thu Jul 18 18:08:12 2019

@author: admin

"""
from elasticsearch import Elasticsearch
import time 
import market_label
import market_duplicate
import market_label_rule
import json
import os
  

#全量打标
def label_batch(es_access,es_index,doc_type,query_body=None,is_unite=True,time_stamp='1',scroll='1m',size=100):
    s = time.time()
    _es = Elasticsearch([es_access], http_auth=('dict', 'tisson1!es'), timeout=60, max_retries=3, retry_on_timeout=True)
    print(es_access+'已连接')
        
    body = {'query': {'match_all': {}}}
    body = {'query': {'term': {'is_delete': False}}}
    if query_body:
        body = query_body
    search_result = _es.search(index=es_index, doc_type=doc_type, scroll=scroll, body=body, size=size)
    total = search_result['hits']['total']
    print('数据总量：'+str(total))    
    
    filedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'market_log')
    filepath = os.path.join(filedir, 'label_result.json')
    status = {'stage':'Rule','percent':0}
    response = {'error_clean':[],'error_label':[],'时间戳':time_stamp, 'Status':status}
    with open(filepath, 'w') as f:
        json.dump(response, f)
    
    list_rule, id_text_match = market_label_rule.rule_word_transform(False)
    
    num = 0
    list_bulk = []
    page = ['1']
    while(len(page) > 0):
        #start = time.time()
        page = search_result['hits']['hits']
        for p in page:
            try:
                body_new = market_duplicate.data_clean(p)
                body_new = body_new['_source']
            except:
                response['error_clean'].append(p['_id'])
                continue
            try:
                body_new = market_label.return_label(body_new,list_rule,id_text_match)
            except:
                response['error_label'].append(p['_id'])
                continue
            list_bulk.append({'update':{'_index':es_index, '_type':doc_type, '_id':body_new['id']}})
            list_bulk.append({'doc':body_new})
        if list_bulk:
            _es.bulk(index=es_index, doc_type=doc_type, body=list_bulk)
        sid = search_result['_scroll_id']
        search_result = _es.scroll(scroll_id=sid, scroll=scroll)
        list_bulk = []
        num += 1
        if num % 10 == 0:
            percent = (num * size * 100) / total
            percent = float(format(percent,'.2f'))
            response['Status'] = {'stage':'Tag','percent':percent}
            with open(filepath, 'w') as f:
                json.dump(response, f)
#        end = time.time()
#        print('耗费时间'+str((end - start)))
#        print('已打标%d条' % (num*100))
        
    #统一标签
    if is_unite:
        response['Status'] = {'stage':'Union','percent':100}
        with open(filepath, 'w') as f:
            json.dump(response, f)        
        time.sleep(5)
        entity = ['customer','supplier']
        for e in entity:
            #防止es连接自动断开，重新连接一次
            _es = Elasticsearch([es_access], http_auth=('dict', 'tisson1!es'), timeout=60, max_retries=3, retry_on_timeout=True)
            list_modify = market_label.label_unite(_es,es_index,doc_type,e)
            if list_modify:
                market_label.label_modify(_es,es_index,doc_type,list_modify,e)
                time.sleep(5)

    response['Status'] = {'stage':'Done','percent':100}
    with open(filepath, 'w') as f:
        json.dump(response, f)     
    e = time.time()
    print('耗费总时间'+str((e - s)))
    return response
        
if __name__ == '__main__':
    es_access = 'http://47.112.138.184:9202'
    es_index = 'projecttest1'
    doc_type = '_doc'
    body = {
          "query": {
            "range": {
              "create_time": {
                "gte": "21/11/2019",
                "lte": "21/11/2019",
                "format": "dd/MM/yyyy"
              }
            }
          }
        }
    response = label_batch(es_access,es_index,doc_type,body,False,'123')



    

    

    
    