# -*- encoding: utf-8 -*-
import sys

from elasticsearch import Elasticsearch, helpers
from elasticsearch.helpers import bulk

import json
import pandas as pd
from pandas.io.json import json_normalize
import pickle as pk


# 连接ES
def match_field(startTime, endTime):
    start = pd.Timestamp(startTime).tz_localize('Asia/Shanghai')
    end = pd.Timestamp(endTime).tz_localize('Asia/Shanghai')
    es_clent = Elasticsearch(['41.111.32.37'], http_auth=('admin', 'admin'), port=9200)
    query_json = {
        'query': {
            'bool': {
                'must': [
                    {'range': {'timestamp': {  # 过车时间
                        'gte': int(start.value / 10 ** 6),
                        'lte': int(end.value / 10 ** 6),
                    }
                    }}
                ],
            }
        }
    }
    all_Doc = helpers.scan(client=es_clent, scroll='5m', index='ehlindex201811', doc_type='pass_car',
                           query=query_json)
    return list(all_Doc)


def save_csv(startTime, endTime):
    print('开始')
    all_data = match_field(startTime, endTime)
    pk.dump(all_data, open("ehlindex201811", "wb"))
    print('ok')


def data_save_es(path, es):
    actions = pk.load(open(path, "rb"))
    # actions = []
    count = 1000
    ll = int(len(actions) / count) + 1
    for v in range(ll):
        # 创建action
        # action = {
        #     "_index":index_name,
        #     "_type":type_name,
        #     "_id":str(uuid.uuid1().hex),
        #     "_source":data
        #     }
        action = actions[v * count:(v + 1) * count]
        # 批量处理
        try:
            success, _ = bulk(es, action, raise_on_error=True)
            print(f"成功导入第{v+1}至{(v+1)*count}条数据")
        except Exception as e:
            print("第{v+1}至{(v+1)*count}导入失败，详细信息查看log日志")
    return count


if __name__ == '__main__':
    # startTime = '2018/11/5 08:00:00'
    # endTime = '2018/11/5 08:10:00'
    # save_csv(startTime, endTime)
    # data = pk.load(open("ehlindex201809_10","rb"))
    # es = Elasticsearch(['10.2.111.56'], http_auth=('admin', 'admin'), port=9200)
    # data_save_es(sys.argv[1], es)
    print('ok')
