# -*- coding: utf-8 -*-
from __future__ import print_function
import json
import psycopg2
import re
from tag.data import es_helper
from tag.data.es_objects import MomentTag, TaggingRecord

def extract_json_file(file, match_pattern, func):
    # python 的函数传参，务必流畅！
    ignore_count = 0
    list = []
    with open(file, 'r') as f:
        s = f.read()
        p = re.compile(match_pattern)
        b = p.sub('\n' + match_pattern, s)
        array = b.splitlines()
        print(len(array))
        for a in array:
            if a is None or a.lstrip() == '':
                continue
            try:
                json_obj = json.loads(a)
                value = func(json_obj)
                if value is None:
                    ignore_count += 1
                else:
                    list.append(value)
            except Exception as e:
                print(e, e.__class__)
    print('igore#', ignore_count)
    return list

moment_tag_cols = ['id','weight','heat','tag_category_id','tag_min_range','tag_max_range','active','tag_lang']

def process_moment_tag(json_obj):
    tag_name = json_obj['tag_name']['zh-CN']
    active = json_obj['active']
    if tag_name is None or tag_name == u'体重' or tag_name == u'身高' or active == False:
        return None
    else:
        tag_obj = MomentTag(json_obj)
        list = get_json_values_in_list(json_obj, moment_tag_cols)
        list.append(json.dumps(json_obj['tag_name']))
    return [tuple(list), tag_obj]



def load_moment_tags():
    f2 = 'data/moment_tags.json'
    # sql 需要增加tag_name, values 中单独处理
    cols = moment_tag_cols[:]
    cols.append('tag_name')
    sql_insert = gen_sql('moment_tags', cols)
    conn = psycopg2.connect(database="timehut_development", user="postgres", password="a1s23f4g5", host="127.0.0.1",
                            port="5432")
    cur = conn.cursor()
    data = extract_json_file(f2, '{"id":', process_moment_tag, )

    for row in data:
        # print(row)
        cur.execute(sql_insert, row[0])
        conn.commit()
        tag_obj = row[1]
        es_helper.insert_document(tag_obj.index_name, tag_obj.index_type, tag_obj.id, tag_obj.data)
    conn.close()

def load_tagging_records():
    file = 'data/tagging_records_exports.json'
    conn = psycopg2.connect(database="timehut_development", user="postgres", password="a1s23f4g5", host="127.0.0.1", port="5432")
    cur = conn.cursor()
    i = 0
    with open(file, 'r') as f:
        s = f.read()
        document = json.loads(s)
        json_hits = document['hits']['hits']


        cols = ['id', 'tag_name', 'moment_tag_id', 'baby_id', 'user_id', 'gender', 'total_days', 'active']
        table = 'stats_tagging'
        sql_insert = gen_sql(table, cols)
        for json_hit in json_hits:
            i += 1
            # if i == 1:
            #     continue
            tagging = json_hit['_source']
            try:
                # print(tagging['tag_name'], end=', ')
                # print(tagging['total_days'])
                # values = get_json_values_in_tuple(tagging, cols)
                # cur.execute(sql_insert, values)
                # conn.commit()

                tag_obj = TaggingRecord(tagging)
                es_helper.insert_document(tag_obj.index_name, tag_obj.index_type, tag_obj.id, tag_obj.data)


                if i % 50 == 0:
                    print(i)
            except Exception as e:
                print(e)
    print('total data#', i)
    conn.close()

def get_json_values_in_tuple(json, keys):
    values = []
    for key in keys:
        values.append(json[key])
    return tuple(values)

def get_json_values_in_list(json, keys):
    values = []
    for key in keys:
        try:
          obj = json[key]
          values.append(obj)
        except:
            pass
    return values


def gen_sql(table, cols):
    sql_name = []
    sql_value = []
    for col in cols:
        sql_name.append(col)
        sql_value.append('%s')
    sql = "insert into " + table + ' (' + (','.join(sql_name)) + ') values (' + (','.join(sql_value)) + ')'
    return sql



load_tagging_records()
# load_moment_tags()