from elasticsearch import *
from elasticsearch import helpers
import json

from datetime import datetime

index_name = "http-demo"
type_str = 'http'

def load(es, filepath, index_name = index_name, type_str = type_str):
    with open(filepath) as json_file:
        actions = list()
        count = 0
        len1 = 1e10
        batch_size = 50000
        lines = json_file.readlines()
        print('data_len:', len(lines))
        for line in lines :
            if count >= len1:
                break
            data = json.loads(line)
            if count == 0:
                print(data)
            count += 1
            
            action = {
               "_index": index_name,
               "_type": type_str,
               "_id": count,
               "_source": data
            }
            actions.append(action)
            if (len(actions) == batch_size):
                helpers.bulk(es, actions)
                del actions[0:len(actions)]

        if (len(actions) > 0):
            helpers.bulk(es, actions)
            del actions[0:len(actions)]

        #return result

if __name__ == '__main__':
    host = '127.0.0.1'

    es = Elasticsearch(
    hosts=[{'host': host, 'port': 9200}],
    use_ssl=False,
    verify_certs=False,
    connection_class=RequestsHttpConnection
    )
    
    print(es.info())
    
    fp='/Users/leochan/Documents/src_codes/_xipu/http.json'
    
    t = datetime.now()
    #load(es, fp)
    print('t:', (datetime.now() - t).seconds)
    
    #print('len=', len(data))
    print('end...')
    #3227302
    res = es.search(index = index_name, body={"query": {"match_all": { }}})
    #res = es.search(index = index_name, q='url:robot*')
    print("Got %d Hits:" % res['hits']['total'])

    #curl -XDELETE 'http://localhost:9200/http-demo'
    #curl -XDELETE 'http://localhost:9200/http-demo/http/1'
