#!/usr/bin/python
# coding=utf-8
from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk
from   datetime import *
import argparse
import simplejson
import sys
import os
import json
import subprocess as sub
from pymongo import  MongoClient

class Esoper:
    #field_desc fieldA|string fieldB|data,
    def __init__(self, field_desc,db='tiku',mgtable='', host='47.92.139.146', port='9200', index='shiji21_index', doctype='shiji21',
                 cut_off=1000):
        #es host
        self.host = host
        #需要同步的mongo数据表
        self.table=mgtable;
        #es port
        self.port = port
        self.db=db;
        #同步到的索引
        self.index = index
        #同步到的索引类型
        self.doctype = doctype
        #field_desc 同步
        self.delimeter = ','
        self.tempdir='/tmp/synes';
        self.tmp_file = os.path.join(self.tempdir,'mongotemp');
        self.field_desc = field_desc;
        #同步索引的header投
        self.header = '{"index":{"_index":"%s", "_type":"%s"}}' % (self.index, self.doctype)
        # 同步的每次上传数量
        self.cut_off = cut_off
        #_bulk接口同步地址
        self.url = 'http://%s:%s/_bulk' % (self.host, self.port)
        self.createtempdir();
        #初始化mongo信息
        self.initMongodb();
        #初始化字段信息
        self.parse_field();
        #初始化es信息
        self.initES()

    def initES(self):
        self.es=Elasticsearch(hosts=[self.host+":"+self.port],timeout=5000)
        return ;

    #初始化mongo信息
    def  initMongodb(self):
        self.client = MongoClient('47.92.139.146', 27017)
        self.db = self.client['tiku']
        self.db.authenticate('tiku', 'tiku123')
        self.collection=self.db[self.table];

    #bulk上传文件的临时数据文件
    def createtempdir(self):
        if( not os.path.exists(self.tempdir)):
            os.mkdir(self.tempdir);
        return

    def load_fromMongodb(self):
        self.start=0;
        self.body_list=[];
        self.bulk='';
        self.line_num=0;
        while(True):
            self.line_num = 0;
            vs= self.collection.find({}).skip(self.start).limit(self.cut_off);
            for v in vs:
                self.line_num+=1;
                self.do_line(v)
            if self.line_num>0:
                self.send_data_1()
                self.pretty_print('INFO: all lines parsed.total lines=%d'%(self.line_num))
            if(self.line_num<self.cut_off):
                self.start=0;
                break;
            self.start += self.line_num;



    def do_line(self, line):
        #self.body_list.append(self.header)
        v=self.get_body(line,self.fields_list)
        self.body_list.append(v)

    def parse_field(self):
        fields_list = []
        fields_desc = self.field_desc.strip().split(self.delimeter)
        for item in fields_desc:
            items=item.split('|');
            fields_list.append((items[0],items[1]))
        self.fields_list = fields_list
        self.field_len = len(fields_list)

    def send_data_1(self):
        #open(self.tmp_file, 'w').write(self.bulk_content)
        #f=open(self.tmp_file,'r');
        Action = []
        for v in self.body_list:
            action={
                "_index":self.index,
                "_type":self.doctype,
                "_source":v
            }
            Action.append(action);
        success,_=bulk(self.es,Action,index=self.index,raise_on_error=True)




    def send_data(self):
        open(self.tmp_file, 'w').write(self.bulk_content)
        p = sub.Popen(['curl', '-s', '-XPOST', self.url, '--data-binary', "@" + self.tmp_file], stdout=sub.PIPE)
        for line in iter(p.stdout.readline, b''):
            ret_dict = json.loads(line)
            if not ret_dict['errors']:
                self.pretty_print("INFO: %6s lines parseed with no errors, total cost %d ms." % (
                len(ret_dict['items']), ret_dict['took']))
            else:
                self.pretty_print("ERROR: %6s lines parseed with some errors, total cost %d ms." % (
                len(ret_dict['items']), ret_dict['took']))

    def pretty_print(self, str):
        print('%s %s' % (datetime.now(), str))

    def get_body(self, line_v, fields_list):
        counter = 0
        body = {}
        while (counter < len(fields_list)):
            if fields_list[counter][1] == 'date':
                body[fields_list[counter][0]] = self.translate_str_to_date(
                    line_v[fields_list[counter][0]])
            elif fields_list[counter][1] == 'int':
                body[fields_list[counter][0]] = self.translate_str_to_int(
                    line_v[fields_list[counter][0]])
            elif fields_list[counter][1] == 'float':
                body[fields_list[counter][0]] = self.translate_str_to_float(
                    line_v[fields_list[counter][0]])
            else:
                body[fields_list[counter][0]] = line_v[fields_list[counter][0]]
            counter += 1
        return body

    def translate_str_to_date(self, date_str):
        try:
            date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S')
            return date.isoformat()
        except:
            self.pretty_print("Unexpected error: %s" % (sys.exc_info()[0]))
            self.pretty_print("Failed to translate '%s' to date." % (date_str))
        return False

    def translate_str_to_int(self, num_str):
        try:
            return int(num_str)
        except:
            self.pretty_print("Failed to translate '%s' to int." % (num_str))
        return False

    def translate_str_to_float(self, num_str):
        try:
            return float(num_str)
        except:
            self.pretty_print("Failed to translate '%s' to int." % (num_str))
        return False


